Detailed changes
@@ -19,8 +19,6 @@ rustflags = [
"windows_slim_errors", # This cfg will reduce the size of `windows::core::Error` from 16 bytes to 4 bytes
"-C",
"target-feature=+crt-static", # This fixes the linking issue when compiling livekit on Windows
- "-C",
- "link-arg=-fuse-ld=lld",
]
[env]
@@ -41,5 +41,4 @@ workspace-members = [
"slash_commands_example",
"zed_snippets",
"zed_test_extension",
- "zed_toml",
]
@@ -1,2 +1,5 @@
# Prevent GitHub from displaying comments within JSON files as errors.
*.json linguist-language=JSON-with-Comments
+
+# Ensure the WSL script always has LF line endings, even on Windows
+crates/zed/resources/windows/zed.sh text eol=lf
@@ -14,7 +14,7 @@ body:
### Description
<!-- Describe with sufficient detail to reproduce from a clean Zed install.
- Any code must be sufficient to reproduce (include context!)
- - Code must as text, not just as a screenshot.
+ - Include code as text, not just as a screenshot.
- Issues with insufficient detail may be summarily closed.
-->
@@ -19,14 +19,27 @@ self-hosted-runner:
- namespace-profile-16x32-ubuntu-2004-arm
- namespace-profile-32x64-ubuntu-2004-arm
# Namespace Ubuntu 22.04 (Everything else)
- - namespace-profile-2x4-ubuntu-2204
- namespace-profile-4x8-ubuntu-2204
- namespace-profile-8x16-ubuntu-2204
- namespace-profile-16x32-ubuntu-2204
- namespace-profile-32x64-ubuntu-2204
+ # Namespace Ubuntu 24.04 (like ubuntu-latest)
+ - namespace-profile-2x4-ubuntu-2404
# Namespace Limited Preview
- namespace-profile-8x16-ubuntu-2004-arm-m4
- namespace-profile-8x32-ubuntu-2004-arm-m4
# Self Hosted Runners
- self-mini-macos
- self-32vcpu-windows-2022
+
+# Disable shellcheck because it doesn't like powershell
+# This should have been triggered with initial rollout of actionlint
+# but https://github.com/zed-industries/zed/pull/36693
+# somehow caused actionlint to actually check those windows jobs
+# where previously they were being skipped. Likely caused by an
+# unknown bug in actionlint where parsing of `runs-on: [ ]`
+# breaks something else. (yuck)
+paths:
+ .github/workflows/{ci,release_nightly}.yml:
+ ignore:
+ - "shellcheck"
@@ -20,167 +20,8 @@ runs:
with:
node-version: "18"
- - name: Configure crash dumps
- shell: powershell
- run: |
- # Record the start time for this CI run
- $runStartTime = Get-Date
- $runStartTimeStr = $runStartTime.ToString("yyyy-MM-dd HH:mm:ss")
- Write-Host "CI run started at: $runStartTimeStr"
-
- # Save the timestamp for later use
- echo "CI_RUN_START_TIME=$($runStartTime.Ticks)" >> $env:GITHUB_ENV
-
- # Create crash dump directory in workspace (non-persistent)
- $dumpPath = "$env:GITHUB_WORKSPACE\crash_dumps"
- New-Item -ItemType Directory -Force -Path $dumpPath | Out-Null
-
- Write-Host "Setting up crash dump detection..."
- Write-Host "Workspace dump path: $dumpPath"
-
- # Note: We're NOT modifying registry on stateful runners
- # Instead, we'll check default Windows crash locations after tests
-
- name: Run tests
shell: powershell
working-directory: ${{ inputs.working-directory }}
run: |
- $env:RUST_BACKTRACE = "full"
-
- # Enable Windows debugging features
- $env:_NT_SYMBOL_PATH = "srv*https://msdl.microsoft.com/download/symbols"
-
- # .NET crash dump environment variables (ephemeral)
- $env:COMPlus_DbgEnableMiniDump = "1"
- $env:COMPlus_DbgMiniDumpType = "4"
- $env:COMPlus_CreateDumpDiagnostics = "1"
-
cargo nextest run --workspace --no-fail-fast
-
- - name: Analyze crash dumps
- if: always()
- shell: powershell
- run: |
- Write-Host "Checking for crash dumps..."
-
- # Get the CI run start time from the environment
- $runStartTime = [DateTime]::new([long]$env:CI_RUN_START_TIME)
- Write-Host "Only analyzing dumps created after: $($runStartTime.ToString('yyyy-MM-dd HH:mm:ss'))"
-
- # Check all possible crash dump locations
- $searchPaths = @(
- "$env:GITHUB_WORKSPACE\crash_dumps",
- "$env:LOCALAPPDATA\CrashDumps",
- "$env:TEMP",
- "$env:GITHUB_WORKSPACE",
- "$env:USERPROFILE\AppData\Local\CrashDumps",
- "C:\Windows\System32\config\systemprofile\AppData\Local\CrashDumps"
- )
-
- $dumps = @()
- foreach ($path in $searchPaths) {
- if (Test-Path $path) {
- Write-Host "Searching in: $path"
- $found = Get-ChildItem "$path\*.dmp" -ErrorAction SilentlyContinue | Where-Object {
- $_.CreationTime -gt $runStartTime
- }
- if ($found) {
- $dumps += $found
- Write-Host " Found $($found.Count) dump(s) from this CI run"
- }
- }
- }
-
- if ($dumps) {
- Write-Host "Found $($dumps.Count) crash dump(s)"
-
- # Install debugging tools if not present
- $cdbPath = "C:\Program Files (x86)\Windows Kits\10\Debuggers\x64\cdb.exe"
- if (-not (Test-Path $cdbPath)) {
- Write-Host "Installing Windows Debugging Tools..."
- $url = "https://go.microsoft.com/fwlink/?linkid=2237387"
- Invoke-WebRequest -Uri $url -OutFile winsdksetup.exe
- Start-Process -Wait winsdksetup.exe -ArgumentList "/features OptionId.WindowsDesktopDebuggers /quiet"
- }
-
- foreach ($dump in $dumps) {
- Write-Host "`n=================================="
- Write-Host "Analyzing crash dump: $($dump.Name)"
- Write-Host "Size: $([math]::Round($dump.Length / 1MB, 2)) MB"
- Write-Host "Time: $($dump.CreationTime)"
- Write-Host "=================================="
-
- # Set symbol path
- $env:_NT_SYMBOL_PATH = "srv*C:\symbols*https://msdl.microsoft.com/download/symbols"
-
- # Run analysis
- $analysisOutput = & $cdbPath -z $dump.FullName -c "!analyze -v; ~*k; lm; q" 2>&1 | Out-String
-
- # Extract key information
- if ($analysisOutput -match "ExceptionCode:\s*([\w]+)") {
- Write-Host "Exception Code: $($Matches[1])"
- if ($Matches[1] -eq "c0000005") {
- Write-Host "Exception Type: ACCESS VIOLATION"
- }
- }
-
- if ($analysisOutput -match "EXCEPTION_RECORD:\s*(.+)") {
- Write-Host "Exception Record: $($Matches[1])"
- }
-
- if ($analysisOutput -match "FAULTING_IP:\s*\n(.+)") {
- Write-Host "Faulting Instruction: $($Matches[1])"
- }
-
- # Save full analysis
- $analysisFile = "$($dump.FullName).analysis.txt"
- $analysisOutput | Out-File -FilePath $analysisFile
- Write-Host "`nFull analysis saved to: $analysisFile"
-
- # Print stack trace section
- Write-Host "`n--- Stack Trace Preview ---"
- $stackSection = $analysisOutput -split "STACK_TEXT:" | Select-Object -Last 1
- $stackLines = $stackSection -split "`n" | Select-Object -First 20
- $stackLines | ForEach-Object { Write-Host $_ }
- Write-Host "--- End Stack Trace Preview ---"
- }
-
- Write-Host "`n⚠️ Crash dumps detected! Download the 'crash-dumps' artifact for detailed analysis."
-
- # Copy dumps to workspace for artifact upload
- $artifactPath = "$env:GITHUB_WORKSPACE\crash_dumps_collected"
- New-Item -ItemType Directory -Force -Path $artifactPath | Out-Null
-
- foreach ($dump in $dumps) {
- $destName = "$($dump.Directory.Name)_$($dump.Name)"
- Copy-Item $dump.FullName -Destination "$artifactPath\$destName"
- if (Test-Path "$($dump.FullName).analysis.txt") {
- Copy-Item "$($dump.FullName).analysis.txt" -Destination "$artifactPath\$destName.analysis.txt"
- }
- }
-
- Write-Host "Copied $($dumps.Count) dump(s) to artifact directory"
- } else {
- Write-Host "No crash dumps from this CI run found"
- }
-
- - name: Upload crash dumps
- if: always()
- uses: actions/upload-artifact@v4
- with:
- name: crash-dumps-${{ github.run_id }}-${{ github.run_attempt }}
- path: |
- crash_dumps_collected/*.dmp
- crash_dumps_collected/*.txt
- if-no-files-found: ignore
- retention-days: 7
-
- - name: Check test results
- shell: powershell
- working-directory: ${{ inputs.working-directory }}
- run: |
- # Re-check test results to fail the job if tests failed
- if ($LASTEXITCODE -ne 0) {
- Write-Host "Tests failed with exit code: $LASTEXITCODE"
- exit $LASTEXITCODE
- }
@@ -8,7 +8,7 @@ on:
jobs:
update-collab-staging-tag:
if: github.repository_owner == 'zed-industries'
- runs-on: ubuntu-latest
+ runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
@@ -37,7 +37,7 @@ jobs:
run_nix: ${{ steps.filter.outputs.run_nix }}
run_actionlint: ${{ steps.filter.outputs.run_actionlint }}
runs-on:
- - ubuntu-latest
+ - namespace-profile-2x4-ubuntu-2404
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
@@ -81,6 +81,7 @@ jobs:
echo "run_license=false" >> "$GITHUB_OUTPUT"
echo "$CHANGED_FILES" | grep -qP '^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)' && \
+ echo "$GITHUB_REF_NAME" | grep -qvP '^v[0-9]+\.[0-9]+\.[0-9x](-pre)?$' && \
echo "run_nix=true" >> "$GITHUB_OUTPUT" || \
echo "run_nix=false" >> "$GITHUB_OUTPUT"
@@ -237,7 +238,7 @@ jobs:
uses: ./.github/actions/build_docs
actionlint:
- runs-on: ubuntu-latest
+ runs-on: namespace-profile-2x4-ubuntu-2404
if: github.repository_owner == 'zed-industries' && needs.job_spec.outputs.run_actionlint == 'true'
needs: [job_spec]
steps:
@@ -418,7 +419,7 @@ jobs:
if: |
github.repository_owner == 'zed-industries' &&
needs.job_spec.outputs.run_tests == 'true'
- runs-on: [self-hosted, Windows, X64]
+ runs-on: [self-32vcpu-windows-2022]
steps:
- name: Environment Setup
run: |
@@ -458,7 +459,7 @@ jobs:
tests_pass:
name: Tests Pass
- runs-on: ubuntu-latest
+ runs-on: namespace-profile-2x4-ubuntu-2404
needs:
- job_spec
- style
@@ -784,7 +785,7 @@ jobs:
bundle-windows-x64:
timeout-minutes: 120
name: Create a Windows installer
- runs-on: [self-hosted, Windows, X64]
+ runs-on: [self-32vcpu-windows-2022]
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
# if: (startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling'))
needs: [windows_tests]
@@ -12,7 +12,7 @@ on:
jobs:
danger:
if: github.repository_owner == 'zed-industries'
- runs-on: ubuntu-latest
+ runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
@@ -59,7 +59,7 @@ jobs:
timeout-minutes: 60
name: Run tests on Windows
if: github.repository_owner == 'zed-industries'
- runs-on: [self-hosted, Windows, X64]
+ runs-on: [self-32vcpu-windows-2022]
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
@@ -206,9 +206,6 @@ jobs:
runs-on: github-8vcpu-ubuntu-2404
needs: tests
name: Build Zed on FreeBSD
- # env:
- # MYTOKEN : ${{ secrets.MYTOKEN }}
- # MYTOKEN2: "value2"
steps:
- uses: actions/checkout@v4
- name: Build FreeBSD remote-server
@@ -243,7 +240,6 @@ jobs:
bundle-nix:
name: Build and cache Nix package
- if: false
needs: tests
secrets: inherit
uses: ./.github/workflows/nix.yml
@@ -252,7 +248,7 @@ jobs:
timeout-minutes: 60
name: Create a Windows installer
if: github.repository_owner == 'zed-industries'
- runs-on: [self-hosted, Windows, X64]
+ runs-on: [self-32vcpu-windows-2022]
needs: windows-tests
env:
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
@@ -294,7 +290,7 @@ jobs:
update-nightly-tag:
name: Update nightly tag
if: github.repository_owner == 'zed-industries'
- runs-on: ubuntu-latest
+ runs-on: namespace-profile-2x4-ubuntu-2404
needs:
- bundle-mac
- bundle-linux-x86
@@ -12,7 +12,7 @@ jobs:
shellcheck:
name: "ShellCheck Scripts"
if: github.repository_owner == 'zed-industries'
- runs-on: ubuntu-latest
+ runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
@@ -12,6 +12,19 @@
- Example: avoid `let _ = client.request(...).await?;` - use `client.request(...).await?;` instead
* When implementing async operations that may fail, ensure errors propagate to the UI layer so users get meaningful feedback.
* Never create files with `mod.rs` paths - prefer `src/some_module.rs` instead of `src/some_module/mod.rs`.
+* When creating new crates, prefer specifying the library root path in `Cargo.toml` using `[lib] path = "...rs"` instead of the default `lib.rs`, to maintain consistent and descriptive naming (e.g., `gpui.rs` or `main.rs`).
+* Avoid creative additions unless explicitly requested
+* Use full words for variable names (no abbreviations like "q" for "queue")
+* Use variable shadowing to scope clones in async contexts for clarity, minimizing the lifetime of borrowed references.
+ Example:
+ ```rust
+ executor.spawn({
+ let task_ran = task_ran.clone();
+ async move {
+ *task_ran.borrow_mut() = true;
+ }
+ });
+ ```
# GPUI
@@ -27,6 +27,22 @@ By effectively engaging with the Zed team and community early in your process, w
We plan to set aside time each week to pair program with contributors on promising pull requests in Zed. This will be an experiment. We tend to prefer pairing over async code review on our team, and we'd like to see how well it works in an open source setting. If we're finding it difficult to get on the same page with async review, we may ask you to pair with us if you're open to it. The closer a contribution is to the goals outlined in our roadmap, the more likely we'll be to spend time pairing on it.
+## Mandatory PR contents
+
+Please ensure the PR contains
+
+- Before & after screenshots, if there are visual adjustments introduced.
+
+Examples of visual adjustments: tree-sitter query updates, UI changes, etc.
+
+- A disclosure of the AI assistance usage, if any was used.
+
+Any kind of AI assistance must be disclosed in the PR, along with the extent to which AI assistance was used (e.g. docs only vs. code generation).
+
+If the PR responses are being generated by an AI, disclose that as well.
+
+As a small exception, trivial tab-completion doesn't need to be disclosed, as long as it's limited to single keywords or short phrases.
+
## Tips to improve the chances of your PR getting reviewed and merged
- Discuss your plans ahead of time with the team
@@ -49,6 +65,8 @@ If you would like to add a new icon to the Zed icon theme, [open a Discussion](h
## Bird's-eye view of Zed
+We suggest you keep the [zed glossary](docs/src/development/glossary.md) at your side when starting out. It lists and explains some of the structures and terms you will see throughout the codebase.
+
Zed is made up of several smaller crates - let's go over those you're most likely to interact with:
- [`gpui`](/crates/gpui) is a GPU-accelerated UI framework which provides all of the building blocks for Zed. **We recommend familiarizing yourself with the root level GPUI documentation.**
@@ -7,8 +7,8 @@ name = "acp_thread"
version = "0.1.0"
dependencies = [
"action_log",
- "agent",
"agent-client-protocol",
+ "agent_settings",
"anyhow",
"buffer_diff",
"collections",
@@ -23,13 +23,15 @@ dependencies = [
"language_model",
"markdown",
"parking_lot",
+ "portable-pty",
"project",
"prompt_store",
- "rand 0.8.5",
+ "rand 0.9.1",
"serde",
"serde_json",
"settings",
"smol",
+ "task",
"tempfile",
"terminal",
"ui",
@@ -37,6 +39,27 @@ dependencies = [
"util",
"uuid",
"watch",
+ "which 6.0.3",
+ "workspace-hack",
+]
+
+[[package]]
+name = "acp_tools"
+version = "0.1.0"
+dependencies = [
+ "agent-client-protocol",
+ "collections",
+ "gpui",
+ "language",
+ "markdown",
+ "project",
+ "serde",
+ "serde_json",
+ "settings",
+ "theme",
+ "ui",
+ "util",
+ "workspace",
"workspace-hack",
]
@@ -56,7 +79,7 @@ dependencies = [
"log",
"pretty_assertions",
"project",
- "rand 0.8.5",
+ "rand 0.9.1",
"serde_json",
"settings",
"text",
@@ -149,7 +172,7 @@ dependencies = [
"pretty_assertions",
"project",
"prompt_store",
- "rand 0.8.5",
+ "rand 0.9.1",
"ref-cast",
"rope",
"schemars",
@@ -167,16 +190,18 @@ dependencies = [
"uuid",
"workspace",
"workspace-hack",
+ "zed_env_vars",
"zstd",
]
[[package]]
name = "agent-client-protocol"
-version = "0.0.26"
+version = "0.2.0-alpha.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "160971bb53ca0b2e70ebc857c21e24eb448745f1396371015f4c59e9a9e51ed0"
+checksum = "6d02292efd75080932b6466471d428c70e2ac06908ae24792fc7c36ecbaf67ca"
dependencies = [
"anyhow",
+ "async-broadcast",
"futures 0.3.31",
"log",
"parking_lot",
@@ -206,6 +231,7 @@ dependencies = [
"collections",
"context_server",
"ctor",
+ "db",
"editor",
"env_logger 0.11.8",
"fs",
@@ -226,7 +252,6 @@ dependencies = [
"open",
"parking_lot",
"paths",
- "portable-pty",
"pretty_assertions",
"project",
"prompt_store",
@@ -239,10 +264,12 @@ dependencies = [
"smol",
"sqlez",
"task",
+ "telemetry",
"tempfile",
"terminal",
"text",
"theme",
+ "thiserror 2.0.12",
"tree-sitter-rust",
"ui",
"unindent",
@@ -250,9 +277,9 @@ dependencies = [
"uuid",
"watch",
"web_search",
- "which 6.0.3",
"workspace-hack",
"worktree",
+ "zed_env_vars",
"zlog",
"zstd",
]
@@ -262,40 +289,37 @@ name = "agent_servers"
version = "0.1.0"
dependencies = [
"acp_thread",
+ "acp_tools",
"action_log",
"agent-client-protocol",
"agent_settings",
- "agentic-coding-protocol",
"anyhow",
+ "client",
"collections",
- "context_server",
"env_logger 0.11.8",
+ "fs",
"futures 0.3.31",
"gpui",
+ "gpui_tokio",
"indoc",
- "itertools 0.14.0",
"language",
"language_model",
"language_models",
"libc",
"log",
"nix 0.29.0",
- "paths",
"project",
- "rand 0.8.5",
- "schemars",
+ "reqwest_client",
"serde",
"serde_json",
"settings",
"smol",
- "strum 0.27.1",
+ "task",
"tempfile",
"thiserror 2.0.12",
"ui",
"util",
- "uuid",
"watch",
- "which 6.0.3",
"workspace-hack",
]
@@ -375,11 +399,12 @@ dependencies = [
"parking_lot",
"paths",
"picker",
+ "postage",
"pretty_assertions",
"project",
"prompt_store",
"proto",
- "rand 0.8.5",
+ "rand 0.9.1",
"release_channel",
"rope",
"rules_library",
@@ -389,6 +414,7 @@ dependencies = [
"serde_json",
"serde_json_lenient",
"settings",
+ "shlex",
"smol",
"streaming_diff",
"task",
@@ -414,24 +440,6 @@ dependencies = [
"zed_actions",
]
-[[package]]
-name = "agentic-coding-protocol"
-version = "0.0.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a3e6ae951b36fa2f8d9dd6e1af6da2fcaba13d7c866cf6a9e65deda9dc6c5fe4"
-dependencies = [
- "anyhow",
- "chrono",
- "derive_more 2.0.1",
- "futures 0.3.31",
- "log",
- "parking_lot",
- "schemars",
- "semver",
- "serde",
- "serde_json",
-]
-
[[package]]
name = "ahash"
version = "0.7.8"
@@ -498,7 +506,7 @@ dependencies = [
"parking_lot",
"piper",
"polling",
- "regex-automata 0.4.9",
+ "regex-automata",
"rustix-openpty",
"serde",
"signal-hook",
@@ -822,7 +830,7 @@ dependencies = [
"project",
"prompt_store",
"proto",
- "rand 0.8.5",
+ "rand 0.9.1",
"regex",
"rpc",
"serde",
@@ -838,6 +846,7 @@ dependencies = [
"uuid",
"workspace",
"workspace-hack",
+ "zed_env_vars",
]
[[package]]
@@ -847,7 +856,7 @@ dependencies = [
"anyhow",
"async-trait",
"collections",
- "derive_more 0.99.19",
+ "derive_more",
"extension",
"futures 0.3.31",
"gpui",
@@ -910,7 +919,7 @@ dependencies = [
"clock",
"collections",
"ctor",
- "derive_more 0.99.19",
+ "derive_more",
"gpui",
"icons",
"indoc",
@@ -920,7 +929,7 @@ dependencies = [
"parking_lot",
"pretty_assertions",
"project",
- "rand 0.8.5",
+ "rand 0.9.1",
"regex",
"serde",
"serde_json",
@@ -947,7 +956,7 @@ dependencies = [
"cloud_llm_client",
"collections",
"component",
- "derive_more 0.99.19",
+ "derive_more",
"diffy",
"editor",
"feature_flags",
@@ -972,7 +981,7 @@ dependencies = [
"pretty_assertions",
"project",
"prompt_store",
- "rand 0.8.5",
+ "rand 0.9.1",
"regex",
"reqwest_client",
"rust-embed",
@@ -1373,10 +1382,11 @@ version = "0.1.0"
dependencies = [
"anyhow",
"collections",
- "derive_more 0.99.19",
"gpui",
- "parking_lot",
"rodio",
+ "schemars",
+ "serde",
+ "settings",
"util",
"workspace-hack",
]
@@ -2277,7 +2287,7 @@ dependencies = [
[[package]]
name = "blade-graphics"
version = "0.6.0"
-source = "git+https://github.com/kvark/blade?rev=e0ec4e720957edd51b945b64dd85605ea54bcfe5#e0ec4e720957edd51b945b64dd85605ea54bcfe5"
+source = "git+https://github.com/kvark/blade?rev=bfa594ea697d4b6326ea29f747525c85ecf933b9#bfa594ea697d4b6326ea29f747525c85ecf933b9"
dependencies = [
"ash",
"ash-window",
@@ -2310,7 +2320,7 @@ dependencies = [
[[package]]
name = "blade-macros"
version = "0.3.0"
-source = "git+https://github.com/kvark/blade?rev=e0ec4e720957edd51b945b64dd85605ea54bcfe5#e0ec4e720957edd51b945b64dd85605ea54bcfe5"
+source = "git+https://github.com/kvark/blade?rev=bfa594ea697d4b6326ea29f747525c85ecf933b9#bfa594ea697d4b6326ea29f747525c85ecf933b9"
dependencies = [
"proc-macro2",
"quote",
@@ -2320,7 +2330,7 @@ dependencies = [
[[package]]
name = "blade-util"
version = "0.2.0"
-source = "git+https://github.com/kvark/blade?rev=e0ec4e720957edd51b945b64dd85605ea54bcfe5#e0ec4e720957edd51b945b64dd85605ea54bcfe5"
+source = "git+https://github.com/kvark/blade?rev=bfa594ea697d4b6326ea29f747525c85ecf933b9#bfa594ea697d4b6326ea29f747525c85ecf933b9"
dependencies = [
"blade-graphics",
"bytemuck",
@@ -2337,19 +2347,6 @@ dependencies = [
"digest",
]
-[[package]]
-name = "blake3"
-version = "1.8.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3888aaa89e4b2a40fca9848e400f6a658a5a3978de7be858e209cafa8be9a4a0"
-dependencies = [
- "arrayref",
- "arrayvec",
- "cc",
- "cfg-if",
- "constant_time_eq 0.3.1",
-]
-
[[package]]
name = "block"
version = "0.1.6"
@@ -2447,7 +2444,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "234113d19d0d7d613b40e86fb654acf958910802bcceab913a4f9e7cda03b1a4"
dependencies = [
"memchr",
- "regex-automata 0.4.9",
+ "regex-automata",
"serde",
]
@@ -2464,7 +2461,7 @@ dependencies = [
"language",
"log",
"pretty_assertions",
- "rand 0.8.5",
+ "rand 0.9.1",
"rope",
"serde_json",
"sum_tree",
@@ -2885,7 +2882,7 @@ dependencies = [
"language",
"log",
"postage",
- "rand 0.8.5",
+ "rand 0.9.1",
"release_channel",
"rpc",
"settings",
@@ -3056,10 +3053,9 @@ dependencies = [
"clock",
"cloud_api_client",
"cloud_llm_client",
- "cocoa 0.26.0",
"collections",
"credentials_provider",
- "derive_more 0.99.19",
+ "derive_more",
"feature_flags",
"fs",
"futures 0.3.31",
@@ -3069,10 +3065,11 @@ dependencies = [
"http_client_tls",
"httparse",
"log",
+ "objc2-foundation",
"parking_lot",
"paths",
"postage",
- "rand 0.8.5",
+ "rand 0.9.1",
"regex",
"release_channel",
"rpc",
@@ -3321,7 +3318,7 @@ dependencies = [
"prometheus",
"prompt_store",
"prost 0.9.0",
- "rand 0.8.5",
+ "rand 0.9.1",
"recent_projects",
"release_channel",
"remote",
@@ -3491,7 +3488,7 @@ name = "command_palette_hooks"
version = "0.1.0"
dependencies = [
"collections",
- "derive_more 0.99.19",
+ "derive_more",
"gpui",
"workspace-hack",
]
@@ -3501,6 +3498,7 @@ name = "component"
version = "0.1.0"
dependencies = [
"collections",
+ "documented",
"gpui",
"inventory",
"parking_lot",
@@ -3563,12 +3561,6 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc"
-[[package]]
-name = "constant_time_eq"
-version = "0.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6"
-
[[package]]
name = "context_server"
version = "0.1.0"
@@ -3872,7 +3864,7 @@ dependencies = [
"jni",
"js-sys",
"libc",
- "mach2",
+ "mach2 0.4.2",
"ndk",
"ndk-context",
"num-derive",
@@ -4022,7 +4014,7 @@ checksum = "031ed29858d90cfdf27fe49fae28028a1f20466db97962fa2f4ea34809aeebf3"
dependencies = [
"cfg-if",
"libc",
- "mach2",
+ "mach2 0.4.2",
]
[[package]]
@@ -4034,7 +4026,7 @@ dependencies = [
"cfg-if",
"crash-context",
"libc",
- "mach2",
+ "mach2 0.4.2",
"parking_lot",
]
@@ -4042,14 +4034,17 @@ dependencies = [
name = "crashes"
version = "0.1.0"
dependencies = [
+ "bincode",
"crash-handler",
"log",
+ "mach2 0.5.0",
"minidumper",
"paths",
"release_channel",
"serde",
"serde_json",
"smol",
+ "system_specs",
"workspace-hack",
]
@@ -4475,6 +4470,7 @@ dependencies = [
"tempfile",
"util",
"workspace-hack",
+ "zed_env_vars",
]
[[package]]
@@ -4651,27 +4647,6 @@ dependencies = [
"syn 2.0.101",
]
-[[package]]
-name = "derive_more"
-version = "2.0.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678"
-dependencies = [
- "derive_more-impl",
-]
-
-[[package]]
-name = "derive_more-impl"
-version = "2.0.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn 2.0.101",
- "unicode-xid",
-]
-
[[package]]
name = "derive_refineable"
version = "0.1.0"
@@ -4692,7 +4667,6 @@ dependencies = [
"component",
"ctor",
"editor",
- "futures 0.3.31",
"gpui",
"indoc",
"language",
@@ -4701,7 +4675,7 @@ dependencies = [
"markdown",
"pretty_assertions",
"project",
- "rand 0.8.5",
+ "rand 0.9.1",
"serde",
"serde_json",
"settings",
@@ -4741,7 +4715,7 @@ version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b545b8c50194bdd008283985ab0b31dba153cfd5b3066a92770634fbc0d7d291"
dependencies = [
- "nu-ansi-term 0.50.1",
+ "nu-ansi-term",
]
[[package]]
@@ -5073,7 +5047,7 @@ dependencies = [
"postage",
"pretty_assertions",
"project",
- "rand 0.8.5",
+ "rand 0.9.1",
"regex",
"release_channel",
"rpc",
@@ -5568,7 +5542,7 @@ dependencies = [
"parking_lot",
"paths",
"project",
- "rand 0.8.5",
+ "rand 0.9.1",
"release_channel",
"remote",
"reqwest_client",
@@ -5641,8 +5615,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "531e46835a22af56d1e3b66f04844bed63158bc094a628bec1d321d9b4c44bf2"
dependencies = [
"bit-set 0.5.3",
- "regex-automata 0.4.9",
- "regex-syntax 0.8.5",
+ "regex-automata",
+ "regex-syntax",
]
[[package]]
@@ -5652,8 +5626,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e24cb5a94bcae1e5408b0effca5cd7172ea3c5755049c5f3af4cd283a165298"
dependencies = [
"bit-set 0.8.0",
- "regex-automata 0.4.9",
- "regex-syntax 0.8.5",
+ "regex-automata",
+ "regex-syntax",
]
[[package]]
@@ -5730,14 +5704,10 @@ dependencies = [
name = "feedback"
version = "0.1.0"
dependencies = [
- "client",
"editor",
"gpui",
- "human_bytes",
"menu",
- "release_channel",
- "serde",
- "sysinfo",
+ "system_specs",
"ui",
"urlencoding",
"util",
@@ -6172,17 +6142,6 @@ dependencies = [
"futures-util",
]
-[[package]]
-name = "futures-batch"
-version = "0.6.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6f444c45a1cb86f2a7e301469fd50a82084a60dadc25d94529a8312276ecb71a"
-dependencies = [
- "futures 0.3.31",
- "futures-timer",
- "pin-utils",
-]
-
[[package]]
name = "futures-channel"
version = "0.3.31"
@@ -6278,12 +6237,6 @@ version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988"
-[[package]]
-name = "futures-timer"
-version = "3.0.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24"
-
[[package]]
name = "futures-util"
version = "0.3.31"
@@ -6413,7 +6366,7 @@ dependencies = [
"askpass",
"async-trait",
"collections",
- "derive_more 0.99.19",
+ "derive_more",
"futures 0.3.31",
"git2",
"gpui",
@@ -6421,7 +6374,7 @@ dependencies = [
"log",
"parking_lot",
"pretty_assertions",
- "rand 0.8.5",
+ "rand 0.9.1",
"regex",
"rope",
"schemars",
@@ -7307,8 +7260,8 @@ dependencies = [
"aho-corasick",
"bstr",
"log",
- "regex-automata 0.4.9",
- "regex-syntax 0.8.5",
+ "regex-automata",
+ "regex-syntax",
]
[[package]]
@@ -7443,7 +7396,7 @@ dependencies = [
"core-video",
"cosmic-text",
"ctor",
- "derive_more 0.99.19",
+ "derive_more",
"embed-resource",
"env_logger 0.11.8",
"etagere",
@@ -7474,7 +7427,7 @@ dependencies = [
"pathfinder_geometry",
"postage",
"profiling",
- "rand 0.8.5",
+ "rand 0.9.1",
"raw-window-handle",
"refineable",
"reqwest_client",
@@ -7531,6 +7484,7 @@ dependencies = [
name = "gpui_tokio"
version = "0.1.0"
dependencies = [
+ "anyhow",
"gpui",
"tokio",
"util",
@@ -7967,7 +7921,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"bytes 1.10.1",
- "derive_more 0.99.19",
+ "derive_more",
"futures 0.3.31",
"http 1.3.1",
"http-body 1.0.1",
@@ -8312,7 +8266,7 @@ dependencies = [
"globset",
"log",
"memchr",
- "regex-automata 0.4.9",
+ "regex-automata",
"same-file",
"walkdir",
"winapi-util",
@@ -8479,6 +8433,7 @@ dependencies = [
"theme",
"ui",
"util",
+ "util_macros",
"workspace",
"workspace-hack",
"zed_actions",
@@ -8910,7 +8865,7 @@ dependencies = [
"percent-encoding",
"referencing",
"regex",
- "regex-syntax 0.8.5",
+ "regex-syntax",
"reqwest 0.12.15 (registry+https://github.com/rust-lang/crates.io-index)",
"serde",
"serde_json",
@@ -8963,6 +8918,44 @@ dependencies = [
"uuid",
]
+[[package]]
+name = "keymap_editor"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "collections",
+ "command_palette",
+ "component",
+ "db",
+ "editor",
+ "fs",
+ "fuzzy",
+ "gpui",
+ "itertools 0.14.0",
+ "language",
+ "log",
+ "menu",
+ "notifications",
+ "paths",
+ "project",
+ "search",
+ "serde",
+ "serde_json",
+ "settings",
+ "telemetry",
+ "tempfile",
+ "theme",
+ "tree-sitter-json",
+ "tree-sitter-rust",
+ "ui",
+ "ui_input",
+ "util",
+ "vim",
+ "workspace",
+ "workspace-hack",
+ "zed_actions",
+]
+
[[package]]
name = "khronos-egl"
version = "6.0.0"
@@ -9047,7 +9040,7 @@ dependencies = [
"parking_lot",
"postage",
"pretty_assertions",
- "rand 0.8.5",
+ "rand 0.9.1",
"regex",
"rpc",
"schemars",
@@ -9120,6 +9113,7 @@ dependencies = [
"icons",
"image",
"log",
+ "open_router",
"parking_lot",
"proto",
"schemars",
@@ -9188,6 +9182,19 @@ dependencies = [
"x_ai",
]
+[[package]]
+name = "language_onboarding"
+version = "0.1.0"
+dependencies = [
+ "db",
+ "editor",
+ "gpui",
+ "project",
+ "ui",
+ "workspace",
+ "workspace-hack",
+]
+
[[package]]
name = "language_selector"
version = "0.1.0"
@@ -9215,6 +9222,7 @@ dependencies = [
"anyhow",
"client",
"collections",
+ "command_palette_hooks",
"copilot",
"editor",
"futures 0.3.31",
@@ -9223,6 +9231,7 @@ dependencies = [
"language",
"lsp",
"project",
+ "proto",
"release_channel",
"serde_json",
"settings",
@@ -9248,7 +9257,6 @@ dependencies = [
"chrono",
"collections",
"dap",
- "feature_flags",
"futures 0.3.31",
"gpui",
"http_client",
@@ -9484,6 +9492,21 @@ dependencies = [
"vcpkg",
]
+[[package]]
+name = "line_ending_selector"
+version = "0.1.0"
+dependencies = [
+ "editor",
+ "gpui",
+ "language",
+ "picker",
+ "project",
+ "ui",
+ "util",
+ "workspace",
+ "workspace-hack",
+]
+
[[package]]
name = "link-cplusplus"
version = "1.0.10"
@@ -9615,6 +9638,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"async-trait",
+ "audio",
"collections",
"core-foundation 0.10.0",
"core-video",
@@ -9637,6 +9661,7 @@ dependencies = [
"scap",
"serde",
"serde_json",
+ "settings",
"sha2",
"simplelog",
"smallvec",
@@ -9709,7 +9734,7 @@ dependencies = [
"lazy_static",
"proc-macro2",
"quote",
- "regex-syntax 0.8.5",
+ "regex-syntax",
"rustc_version",
"syn 2.0.101",
]
@@ -9781,7 +9806,7 @@ dependencies = [
[[package]]
name = "lsp-types"
version = "0.95.1"
-source = "git+https://github.com/zed-industries/lsp-types?rev=39f629bdd03d59abd786ed9fc27e8bca02c0c0ec#39f629bdd03d59abd786ed9fc27e8bca02c0c0ec"
+source = "git+https://github.com/zed-industries/lsp-types?rev=0874f8742fe55b4dc94308c1e3c0069710d8eeaf#0874f8742fe55b4dc94308c1e3c0069710d8eeaf"
dependencies = [
"bitflags 1.3.2",
"serde",
@@ -9867,6 +9892,15 @@ dependencies = [
"libc",
]
+[[package]]
+name = "mach2"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6a1b95cd5421ec55b445b5ae102f5ea0e768de1f82bd3001e11f426c269c3aea"
+dependencies = [
+ "libc",
+]
+
[[package]]
name = "malloc_buf"
version = "0.0.6"
@@ -9915,9 +9949,11 @@ dependencies = [
"editor",
"fs",
"gpui",
+ "html5ever 0.27.0",
"language",
"linkify",
"log",
+ "markup5ever_rcdom",
"pretty_assertions",
"pulldown-cmark 0.12.2",
"settings",
@@ -9978,11 +10014,11 @@ dependencies = [
[[package]]
name = "matchers"
-version = "0.1.0"
+version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
+checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9"
dependencies = [
- "regex-automata 0.1.10",
+ "regex-automata",
]
[[package]]
@@ -10203,7 +10239,7 @@ dependencies = [
"goblin",
"libc",
"log",
- "mach2",
+ "mach2 0.4.2",
"memmap2",
"memoffset",
"minidump-common",
@@ -10346,7 +10382,7 @@ dependencies = [
"parking_lot",
"pretty_assertions",
"project",
- "rand 0.8.5",
+ "rand 0.9.1",
"rope",
"serde",
"settings",
@@ -10683,16 +10719,6 @@ dependencies = [
"winapi",
]
-[[package]]
-name = "nu-ansi-term"
-version = "0.46.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
-dependencies = [
- "overload",
- "winapi",
-]
-
[[package]]
name = "nu-ansi-term"
version = "0.50.1"
@@ -11191,6 +11217,8 @@ dependencies = [
"schemars",
"serde",
"serde_json",
+ "strum 0.27.1",
+ "thiserror 2.0.12",
"workspace-hack",
]
@@ -11386,12 +11414,6 @@ version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a80800c0488c3a21695ea981a54918fbb37abf04f4d0720c453632255e2ff0e"
-[[package]]
-name = "overload"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
-
[[package]]
name = "p256"
version = "0.11.1"
@@ -11614,6 +11636,12 @@ dependencies = [
"hmac",
]
+[[package]]
+name = "pciid-parser"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0008e816fcdaf229cdd540e9b6ca2dc4a10d65c31624abb546c6420a02846e61"
+
[[package]]
name = "pem"
version = "3.0.5"
@@ -1,6 +1,7 @@
[workspace]
resolver = "2"
members = [
+ "crates/acp_tools",
"crates/acp_thread",
"crates/action_log",
"crates/activity_indicator",
@@ -53,6 +54,8 @@ members = [
"crates/deepseek",
"crates/diagnostics",
"crates/docs_preprocessor",
+ "crates/edit_prediction",
+ "crates/edit_prediction_button",
"crates/editor",
"crates/eval",
"crates/explorer_command_injector",
@@ -81,20 +84,21 @@ members = [
"crates/http_client_tls",
"crates/icons",
"crates/image_viewer",
- "crates/edit_prediction",
- "crates/edit_prediction_button",
"crates/inspector_ui",
"crates/install_cli",
"crates/jj",
"crates/jj_ui",
"crates/journal",
+ "crates/keymap_editor",
"crates/language",
"crates/language_extension",
"crates/language_model",
"crates/language_models",
+ "crates/language_onboarding",
"crates/language_selector",
"crates/language_tools",
"crates/languages",
+ "crates/line_ending_selector",
"crates/livekit_api",
"crates/livekit_client",
"crates/lmstudio",
@@ -129,6 +133,7 @@ members = [
"crates/refineable",
"crates/refineable/derive_refineable",
"crates/release_channel",
+ "crates/scheduler",
"crates/remote",
"crates/remote_server",
"crates/repl",
@@ -139,12 +144,12 @@ members = [
"crates/rules_library",
"crates/schema_generator",
"crates/search",
- "crates/semantic_index",
"crates/semantic_version",
"crates/session",
"crates/settings",
"crates/settings_profile_selector",
"crates/settings_ui",
+ "crates/settings_ui_macros",
"crates/snippet",
"crates/snippet_provider",
"crates/snippets_ui",
@@ -157,6 +162,7 @@ members = [
"crates/supermaven",
"crates/supermaven_api",
"crates/svg_preview",
+ "crates/system_specs",
"crates/tab_switcher",
"crates/task",
"crates/tasks_ui",
@@ -189,6 +195,7 @@ members = [
"crates/x_ai",
"crates/zed",
"crates/zed_actions",
+ "crates/zed_env_vars",
"crates/zeta",
"crates/zeta_cli",
"crates/zlog",
@@ -205,7 +212,6 @@ members = [
"extensions/slash-commands-example",
"extensions/snippets",
"extensions/test-extension",
- "extensions/toml",
#
# Tooling
@@ -226,6 +232,7 @@ edition = "2024"
# Workspace member crates
#
+acp_tools = { path = "crates/acp_tools" }
acp_thread = { path = "crates/acp_thread" }
action_log = { path = "crates/action_log" }
agent = { path = "crates/agent" }
@@ -294,9 +301,7 @@ git_hosting_providers = { path = "crates/git_hosting_providers" }
git_ui = { path = "crates/git_ui" }
go_to_line = { path = "crates/go_to_line" }
google_ai = { path = "crates/google_ai" }
-gpui = { path = "crates/gpui", default-features = false, features = [
- "http_client",
-] }
+gpui = { path = "crates/gpui", default-features = false }
gpui_macros = { path = "crates/gpui_macros" }
gpui_tokio = { path = "crates/gpui_tokio" }
html_to_markdown = { path = "crates/html_to_markdown" }
@@ -311,13 +316,16 @@ install_cli = { path = "crates/install_cli" }
jj = { path = "crates/jj" }
jj_ui = { path = "crates/jj_ui" }
journal = { path = "crates/journal" }
+keymap_editor = { path = "crates/keymap_editor" }
language = { path = "crates/language" }
language_extension = { path = "crates/language_extension" }
language_model = { path = "crates/language_model" }
language_models = { path = "crates/language_models" }
+language_onboarding = { path = "crates/language_onboarding" }
language_selector = { path = "crates/language_selector" }
language_tools = { path = "crates/language_tools" }
languages = { path = "crates/languages" }
+line_ending_selector = { path = "crates/line_ending_selector" }
livekit_api = { path = "crates/livekit_api" }
livekit_client = { path = "crates/livekit_client" }
lmstudio = { path = "crates/lmstudio" }
@@ -355,6 +363,7 @@ proto = { path = "crates/proto" }
recent_projects = { path = "crates/recent_projects" }
refineable = { path = "crates/refineable" }
release_channel = { path = "crates/release_channel" }
+scheduler = { path = "crates/scheduler" }
remote = { path = "crates/remote" }
remote_server = { path = "crates/remote_server" }
repl = { path = "crates/repl" }
@@ -365,11 +374,11 @@ rope = { path = "crates/rope" }
rpc = { path = "crates/rpc" }
rules_library = { path = "crates/rules_library" }
search = { path = "crates/search" }
-semantic_index = { path = "crates/semantic_index" }
semantic_version = { path = "crates/semantic_version" }
session = { path = "crates/session" }
settings = { path = "crates/settings" }
settings_ui = { path = "crates/settings_ui" }
+settings_ui_macros = { path = "crates/settings_ui_macros" }
snippet = { path = "crates/snippet" }
snippet_provider = { path = "crates/snippet_provider" }
snippets_ui = { path = "crates/snippets_ui" }
@@ -381,6 +390,7 @@ streaming_diff = { path = "crates/streaming_diff" }
sum_tree = { path = "crates/sum_tree" }
supermaven = { path = "crates/supermaven" }
supermaven_api = { path = "crates/supermaven_api" }
+system_specs = { path = "crates/system_specs" }
tab_switcher = { path = "crates/tab_switcher" }
task = { path = "crates/task" }
tasks_ui = { path = "crates/tasks_ui" }
@@ -414,6 +424,7 @@ worktree = { path = "crates/worktree" }
x_ai = { path = "crates/x_ai" }
zed = { path = "crates/zed" }
zed_actions = { path = "crates/zed_actions" }
+zed_env_vars = { path = "crates/zed_env_vars" }
zeta = { path = "crates/zeta" }
zlog = { path = "crates/zlog" }
zlog_settings = { path = "crates/zlog_settings" }
@@ -422,8 +433,7 @@ zlog_settings = { path = "crates/zlog_settings" }
# External crates
#
-agentic-coding-protocol = "0.0.10"
-agent-client-protocol = "0.0.26"
+agent-client-protocol = { version = "0.2.0-alpha.6", features = ["unstable"]}
aho-corasick = "1.1"
alacritty_terminal = { git = "https://github.com/zed-industries/alacritty.git", branch = "add-hush-login-flag" }
any_vec = "0.14"
@@ -437,6 +447,7 @@ async-fs = "2.1"
async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553" }
async-recursion = "1.0.0"
async-tar = "0.5.0"
+async-task = "4.7"
async-trait = "0.1"
async-tungstenite = "0.29.1"
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
@@ -450,10 +461,11 @@ aws-sdk-bedrockruntime = { version = "1.80.0", features = [
aws-smithy-runtime-api = { version = "1.7.4", features = ["http-1x", "client"] }
aws-smithy-types = { version = "1.3.0", features = ["http-body-1-x"] }
base64 = "0.22"
+bincode = "1.2.1"
bitflags = "2.6.0"
-blade-graphics = { git = "https://github.com/kvark/blade", rev = "e0ec4e720957edd51b945b64dd85605ea54bcfe5" }
-blade-macros = { git = "https://github.com/kvark/blade", rev = "e0ec4e720957edd51b945b64dd85605ea54bcfe5" }
-blade-util = { git = "https://github.com/kvark/blade", rev = "e0ec4e720957edd51b945b64dd85605ea54bcfe5" }
+blade-graphics = { git = "https://github.com/kvark/blade", rev = "bfa594ea697d4b6326ea29f747525c85ecf933b9" }
+blade-macros = { git = "https://github.com/kvark/blade", rev = "bfa594ea697d4b6326ea29f747525c85ecf933b9" }
+blade-util = { git = "https://github.com/kvark/blade", rev = "bfa594ea697d4b6326ea29f747525c85ecf933b9" }
blake3 = "1.5.3"
bytes = "1.0"
cargo_metadata = "0.19"
@@ -493,6 +505,7 @@ handlebars = "4.3"
heck = "0.5"
heed = { version = "0.21.0", features = ["read-txn-no-tls"] }
hex = "0.4.3"
+human_bytes = "0.4.1"
html5ever = "0.27.0"
http = "1.1"
http-body = "1.0"
@@ -514,7 +527,8 @@ libc = "0.2"
libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
linkify = "0.10.0"
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
-lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "39f629bdd03d59abd786ed9fc27e8bca02c0c0ec" }
+lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "0874f8742fe55b4dc94308c1e3c0069710d8eeaf" }
+mach2 = "0.5"
markup5ever_rcdom = "0.3.0"
metal = "0.29"
minidumper = "0.8"
@@ -525,12 +539,39 @@ nbformat = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c80421
nix = "0.29"
num-format = "0.4.4"
objc = "0.2"
+objc2-foundation = { version = "0.3", default-features = false, features = [
+ "NSArray",
+ "NSAttributedString",
+ "NSBundle",
+ "NSCoder",
+ "NSData",
+ "NSDate",
+ "NSDictionary",
+ "NSEnumerator",
+ "NSError",
+ "NSGeometry",
+ "NSNotification",
+ "NSNull",
+ "NSObjCRuntime",
+ "NSObject",
+ "NSProcessInfo",
+ "NSRange",
+ "NSRunLoop",
+ "NSString",
+ "NSURL",
+ "NSUndoManager",
+ "NSValue",
+ "objc2-core-foundation",
+ "std"
+] }
open = "5.0.0"
ordered-float = "2.1.1"
palette = { version = "0.7.5", default-features = false, features = ["std"] }
+parking = "2.0"
parking_lot = "0.12.1"
partial-json-fixer = "0.5.3"
parse_int = "0.9"
+pciid-parser = "0.8.0"
pathdiff = "0.2"
pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" }
pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" }
@@ -549,7 +590,7 @@ prost-build = "0.9"
prost-types = "0.9"
pulldown-cmark = { version = "0.12.0", default-features = false }
quote = "1.0.9"
-rand = "0.8.5"
+rand = "0.9"
rayon = "1.8"
ref-cast = "1.0.24"
regex = "1.5"
@@ -581,6 +622,7 @@ serde_json_lenient = { version = "0.2", features = [
"preserve_order",
"raw_value",
] }
+serde_path_to_error = "0.1.17"
serde_repr = "0.1"
serde_urlencoded = "0.7"
sha2 = "0.10"
@@ -617,7 +659,7 @@ tower-http = "0.4.4"
tree-sitter = { version = "0.25.6", features = ["wasm"] }
tree-sitter-bash = "0.25.0"
tree-sitter-c = "0.23"
-tree-sitter-cpp = "0.23"
+tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev = "5cb9b693cfd7bfacab1d9ff4acac1a4150700609" }
tree-sitter-css = "0.23"
tree-sitter-diff = "0.1.0"
tree-sitter-elixir = "0.3"
@@ -684,6 +726,7 @@ features = [
"Win32_Graphics_Dxgi_Common",
"Win32_Graphics_Gdi",
"Win32_Graphics_Imaging",
+ "Win32_Graphics_Hlsl",
"Win32_Networking_WinSock",
"Win32_Security",
"Win32_Security_Credentials",
@@ -801,54 +844,32 @@ unexpected_cfgs = { level = "allow" }
dbg_macro = "deny"
todo = "deny"
-# Motivation: We use `vec![a..b]` a lot when dealing with ranges in text, so
-# warning on this rule produces a lot of noise.
-single_range_in_vec_init = "allow"
+# This is not a style lint, see https://github.com/rust-lang/rust-clippy/pull/15454
+# Remove when the lint gets promoted to `suspicious`.
+declare_interior_mutable_const = "deny"
-# These are all of the rules that currently have violations in the Zed
-# codebase.
+redundant_clone = "deny"
+
+# We currently do not restrict any style rules
+# as it slows down shipping code to Zed.
#
-# We'll want to drive this list down by either:
-# 1. fixing violations of the rule and begin enforcing it
-# 2. deciding we want to allow the rule permanently, at which point
-# we should codify that separately above.
+# Running ./script/clippy can take several minutes, and so it's
+# common to skip that step and let CI do it. Any unexpected failures
+# (which also take minutes to discover) thus require switching back
+# to an old branch, manual fixing, and re-pushing.
#
-# This list shouldn't be added to; it should only get shorter.
-# =============================================================================
-
-# There are a bunch of rules currently failing in the `style` group, so
-# allow all of those, for now.
+# In the future we could improve this by either making sure
+# Zed can surface clippy errors in diagnostics (in addition to the
+# rust-analyzer errors), or by having CI fix style nits automatically.
style = { level = "allow", priority = -1 }
-# Temporary list of style lints that we've fixed so far.
-comparison_to_empty = "warn"
-iter_cloned_collect = "warn"
-iter_next_slice = "warn"
-iter_nth = "warn"
-iter_nth_zero = "warn"
-iter_skip_next = "warn"
-module_inception = { level = "deny" }
-question_mark = { level = "deny" }
-redundant_closure = { level = "deny" }
-declare_interior_mutable_const = { level = "deny" }
-collapsible_if = { level = "warn"}
-collapsible_else_if = { level = "warn" }
-needless_borrow = { level = "warn"}
-needless_return = { level = "warn" }
-unnecessary_mut_passed = {level = "warn"}
-unnecessary_map_or = { level = "warn" }
-unused_unit = "warn"
-
# Individual rules that have violations in the codebase:
type_complexity = "allow"
-# We often return trait objects from `new` functions.
-new_ret_no_self = { level = "allow" }
-# We have a few `next` functions that differ in lifetimes
-# compared to Iterator::next. Yet, clippy complains about those.
-should_implement_trait = { level = "allow" }
let_underscore_future = "allow"
-# It doesn't make sense to implement `Default` unilaterally.
-new_without_default = "allow"
+
+# Motivation: We use `vec![a..b]` a lot when dealing with ranges in text, so
+# warning on this rule produces a lot of noise.
+single_range_in_vec_init = "allow"
# in Rust it can be very tedious to reduce argument count without
# running afoul of the borrow checker.
@@ -857,6 +878,9 @@ too_many_arguments = "allow"
# We often have large enum variants yet we rarely actually bother with splitting them up.
large_enum_variant = "allow"
+# Boolean expressions can be hard to read, requiring only the minimal form gets in the way
+nonminimal_bool = "allow"
+
[workspace.metadata.cargo-machete]
ignored = [
"bindgen",
@@ -0,0 +1,2 @@
+postgrest_llm: postgrest crates/collab/postgrest_llm.conf
+website: cd ../zed.dev; npm run dev -- --port=3000
@@ -0,0 +1,3 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M7.37288 4.48506L7.43539 10.6638C7.43539 10.9365 7.54373 11.1981 7.73655 11.3909C7.92938 11.5837 8.19092 11.6921 8.46362 11.6921C8.73632 11.6921 8.99785 11.5837 9.19068 11.3909C9.38351 11.1981 9.49184 10.9366 9.49184 10.6638L9.42933 4.48506C9.42933 3.93975 9.2127 3.41678 8.82711 3.03119C8.44152 2.6456 7.91855 2.42898 7.37324 2.42898C6.82794 2.42898 6.30496 2.6456 5.91937 3.03119C5.53378 3.41678 5.31716 3.93975 5.31716 4.48506L5.37968 10.6384C5.37636 11.0455 5.45368 11.4492 5.60718 11.8263C5.76067 12.2034 5.98731 12.5463 6.27401 12.8354C6.56071 13.1244 6.9018 13.3538 7.27761 13.5104C7.65341 13.667 8.0565 13.7476 8.46362 13.7476C8.87073 13.7476 9.27382 13.667 9.64963 13.5104C10.0254 13.3538 10.3665 13.1244 10.6532 12.8354C10.9399 12.5463 11.1666 12.2034 11.3201 11.8263C11.4736 11.4492 11.5509 11.0455 11.5476 10.6384L11.485 4.48506" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+</svg>
@@ -1 +1,4 @@
-<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="none"><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M12.286 6H7.048C6.469 6 6 6.469 6 7.048v5.238c0 .578.469 1.047 1.048 1.047h5.238c.578 0 1.047-.469 1.047-1.047V7.048c0-.579-.469-1.048-1.047-1.048Z"/><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M3.714 10a1.05 1.05 0 0 1-1.047-1.048V3.714a1.05 1.05 0 0 1 1.047-1.047h5.238A1.05 1.05 0 0 1 10 3.714"/></svg>
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M12.486 6.2H7.24795C6.66895 6.2 6.19995 6.669 6.19995 7.248V12.486C6.19995 13.064 6.66895 13.533 7.24795 13.533H12.486C13.064 13.533 13.533 13.064 13.533 12.486V7.248C13.533 6.669 13.064 6.2 12.486 6.2Z" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M3.91712 10.203C3.63951 10.2022 3.37351 10.0915 3.1773 9.89511C2.98109 9.69872 2.87064 9.43261 2.87012 9.155V3.917C2.87091 3.63956 2.98147 3.37371 3.17765 3.17753C3.37383 2.98135 3.63968 2.87079 3.91712 2.87H9.15512C9.43273 2.87053 9.69883 2.98097 9.89523 3.17718C10.0916 3.37339 10.2023 3.63939 10.2031 3.917" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+</svg>
@@ -0,0 +1 @@
+<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-list-filter-icon lucide-list-filter"><path d="M3 6h18"/><path d="M7 12h10"/><path d="M10 18h4"/></svg>
@@ -1 +1,3 @@
-<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="none"><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M2.667 8h8M2.667 4h10.666M2.667 12H8"/></svg>
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M2.66699 8H10.667M2.66699 4H13.333M2.66699 12H7.99999" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
+</svg>
@@ -0,0 +1,3 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M13.333 10H8M13.333 6H2.66701" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
+</svg>
@@ -0,0 +1,6 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M8.5 5.50621L10.5941 3.41227C10.8585 3.14798 11.217 2.99953 11.5908 2.99957C11.9646 2.99962 12.3231 3.14816 12.5874 3.41252C12.8517 3.67688 13.0001 4.03541 13.0001 4.40922C13.0001 4.78304 12.8515 5.14152 12.5872 5.40582L10.493 7.5" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M5.50789 8.5L3.92098 10.0869C3.80488 10.2027 3.71903 10.3452 3.67097 10.5019L3.01047 12.678C2.99754 12.7212 2.99657 12.7672 3.00764 12.8109C3.01872 12.8547 3.04143 12.8946 3.07337 12.9265C3.1053 12.9584 3.14528 12.981 3.18905 12.992C3.23282 13.003 3.27875 13.002 3.32197 12.989L5.49849 12.329C5.65508 12.2813 5.79758 12.196 5.91349 12.0805L7.49184 10.5019" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M9 5L11 7" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M3 3L13 13" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+</svg>
@@ -0,0 +1,4 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M8 12.375H13" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M3 11.125L6.75003 7.375L3 3.62497" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+</svg>
@@ -1,3 +1,3 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
-<path d="M9.95231 10.2159C10.0803 9.58974 9.95231 9.57261 10.9111 8.46959C11.4686 7.82822 11.8699 7.09214 11.8699 6.27818C11.8699 5.28184 11.4658 4.32631 10.7467 3.62179C10.0275 2.91728 9.05201 2.52148 8.03492 2.52148C7.01782 2.52148 6.04239 2.91728 5.32319 3.62179C4.604 4.32631 4.19995 5.28184 4.19995 6.27818C4.19995 6.9043 4.32779 7.65565 5.1587 8.46959C6.11744 9.59098 5.98965 9.58974 6.11748 10.2159M9.95231 10.2159V12.2989C9.95231 12.9504 9.41327 13.4786 8.7482 13.4786H7.32165C6.65658 13.4786 6.11744 12.9504 6.11744 12.2989L6.11748 10.2159M9.95231 10.2159H8.03492H6.11748" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M9.9526 10.2625C10.0833 9.62316 9.9526 9.60566 10.9315 8.47946C11.5008 7.82461 11.9105 7.07306 11.9105 6.242C11.9105 5.22472 11.4979 4.2491 10.7637 3.52978C10.0294 2.81046 9.03338 2.40634 7.99491 2.40634C6.95644 2.40634 5.96051 2.81046 5.22619 3.52978C4.49189 4.2491 4.07935 5.22472 4.07935 6.242C4.07935 6.88128 4.20987 7.64842 5.05825 8.47946C6.03714 9.62442 5.90666 9.62316 6.03718 10.2625M9.9526 10.2625V12.3893C9.9526 13.0544 9.40223 13.5937 8.72319 13.5937H7.26665C6.58761 13.5937 6.03714 13.0544 6.03714 12.3893L6.03718 10.2625M9.9526 10.2625H7.99491H6.03718" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
</svg>
@@ -0,0 +1,3 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M8 2C11.3137 2 14 4.68629 14 8C14 11.3137 11.3137 14 8 14C4.68629 14 2 11.3137 2 8C2 4.68629 4.68629 2 8 2ZM10.4238 5.57617C10.1895 5.34187 9.81049 5.3419 9.57617 5.57617L8 7.15234L6.42383 5.57617C6.18953 5.34187 5.81049 5.3419 5.57617 5.57617C5.34186 5.81049 5.34186 6.18951 5.57617 6.42383L7.15234 8L5.57617 9.57617C5.34186 9.81049 5.34186 10.1895 5.57617 10.4238C5.81049 10.6581 6.18954 10.6581 6.42383 10.4238L8 8.84766L9.57617 10.4238C9.81049 10.6581 10.1895 10.6581 10.4238 10.4238C10.6581 10.1895 10.658 9.81048 10.4238 9.57617L8.84766 8L10.4238 6.42383C10.6581 6.18954 10.658 5.81048 10.4238 5.57617Z" fill="black"/>
+</svg>
@@ -0,0 +1,27 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M11 8.75V10.5C8.93097 10.5 8.06903 10.5 6 10.5V10L11 6V5.5H6V7.25" stroke="black" stroke-width="1.5"/>
+<path d="M2 8.5C2.27614 8.5 2.5 8.27614 2.5 8C2.5 7.72386 2.27614 7.5 2 7.5C1.72386 7.5 1.5 7.72386 1.5 8C1.5 8.27614 1.72386 8.5 2 8.5Z" fill="black"/>
+<path opacity="0.6" d="M2.99976 6.33002C3.2759 6.33002 3.49976 6.10616 3.49976 5.83002C3.49976 5.55387 3.2759 5.33002 2.99976 5.33002C2.72361 5.33002 2.49976 5.55387 2.49976 5.83002C2.49976 6.10616 2.72361 6.33002 2.99976 6.33002Z" fill="black"/>
+<path opacity="0.6" d="M2.99976 10.66C3.2759 10.66 3.49976 10.4361 3.49976 10.16C3.49976 9.88383 3.2759 9.65997 2.99976 9.65997C2.72361 9.65997 2.49976 9.88383 2.49976 10.16C2.49976 10.4361 2.72361 10.66 2.99976 10.66Z" fill="black"/>
+<path d="M15 8.5C15.2761 8.5 15.5 8.27614 15.5 8C15.5 7.72386 15.2761 7.5 15 7.5C14.7239 7.5 14.5 7.72386 14.5 8C14.5 8.27614 14.7239 8.5 15 8.5Z" fill="black"/>
+<path opacity="0.6" d="M14 6.33002C14.2761 6.33002 14.5 6.10616 14.5 5.83002C14.5 5.55387 14.2761 5.33002 14 5.33002C13.7239 5.33002 13.5 5.55387 13.5 5.83002C13.5 6.10616 13.7239 6.33002 14 6.33002Z" fill="black"/>
+<path opacity="0.6" d="M14 10.66C14.2761 10.66 14.5 10.4361 14.5 10.16C14.5 9.88383 14.2761 9.65997 14 9.65997C13.7239 9.65997 13.5 9.88383 13.5 10.16C13.5 10.4361 13.7239 10.66 14 10.66Z" fill="black"/>
+<path d="M8.49219 2C8.76833 2 8.99219 1.77614 8.99219 1.5C8.99219 1.22386 8.76833 1 8.49219 1C8.21605 1 7.99219 1.22386 7.99219 1.5C7.99219 1.77614 8.21605 2 8.49219 2Z" fill="black"/>
+<path opacity="0.6" d="M6 3C6.27614 3 6.5 2.77614 6.5 2.5C6.5 2.22386 6.27614 2 6 2C5.72386 2 5.5 2.22386 5.5 2.5C5.5 2.77614 5.72386 3 6 3Z" fill="black"/>
+<path d="M4 4C4.27614 4 4.5 3.77614 4.5 3.5C4.5 3.22386 4.27614 3 4 3C3.72386 3 3.5 3.22386 3.5 3.5C3.5 3.77614 3.72386 4 4 4Z" fill="black"/>
+<path d="M3.99976 13C4.2759 13 4.49976 12.7761 4.49976 12.5C4.49976 12.2239 4.2759 12 3.99976 12C3.72361 12 3.49976 12.2239 3.49976 12.5C3.49976 12.7761 3.72361 13 3.99976 13Z" fill="black"/>
+<path opacity="0.2" d="M2 12.5C2.27614 12.5 2.5 12.2761 2.5 12C2.5 11.7239 2.27614 11.5 2 11.5C1.72386 11.5 1.5 11.7239 1.5 12C1.5 12.2761 1.72386 12.5 2 12.5Z" fill="black"/>
+<path opacity="0.2" d="M2 4.5C2.27614 4.5 2.5 4.27614 2.5 4C2.5 3.72386 2.27614 3.5 2 3.5C1.72386 3.5 1.5 3.72386 1.5 4C1.5 4.27614 1.72386 4.5 2 4.5Z" fill="black"/>
+<path opacity="0.2" d="M15 12.5C15.2761 12.5 15.5 12.2761 15.5 12C15.5 11.7239 15.2761 11.5 15 11.5C14.7239 11.5 14.5 11.7239 14.5 12C14.5 12.2761 14.7239 12.5 15 12.5Z" fill="black"/>
+<path opacity="0.2" d="M15 4.5C15.2761 4.5 15.5 4.27614 15.5 4C15.5 3.72386 15.2761 3.5 15 3.5C14.7239 3.5 14.5 3.72386 14.5 4C14.5 4.27614 14.7239 4.5 15 4.5Z" fill="black"/>
+<path opacity="0.5" d="M3.99976 15C4.2759 15 4.49976 14.7761 4.49976 14.5C4.49976 14.2239 4.2759 14 3.99976 14C3.72361 14 3.49976 14.2239 3.49976 14.5C3.49976 14.7761 3.72361 15 3.99976 15Z" fill="black"/>
+<path opacity="0.5" d="M4 2C4.27614 2 4.5 1.77614 4.5 1.5C4.5 1.22386 4.27614 1 4 1C3.72386 1 3.5 1.22386 3.5 1.5C3.5 1.77614 3.72386 2 4 2Z" fill="black"/>
+<path opacity="0.5" d="M13 15C13.2761 15 13.5 14.7761 13.5 14.5C13.5 14.2239 13.2761 14 13 14C12.7239 14 12.5 14.2239 12.5 14.5C12.5 14.7761 12.7239 15 13 15Z" fill="black"/>
+<path opacity="0.5" d="M13 2C13.2761 2 13.5 1.77614 13.5 1.5C13.5 1.22386 13.2761 1 13 1C12.7239 1 12.5 1.22386 12.5 1.5C12.5 1.77614 12.7239 2 13 2Z" fill="black"/>
+<path d="M13 4C13.2761 4 13.5 3.77614 13.5 3.5C13.5 3.22386 13.2761 3 13 3C12.7239 3 12.5 3.22386 12.5 3.5C12.5 3.77614 12.7239 4 13 4Z" fill="black"/>
+<path d="M13 13C13.2761 13 13.5 12.7761 13.5 12.5C13.5 12.2239 13.2761 12 13 12C12.7239 12 12.5 12.2239 12.5 12.5C12.5 12.7761 12.7239 13 13 13Z" fill="black"/>
+<path opacity="0.6" d="M11 3C11.2761 3 11.5 2.77614 11.5 2.5C11.5 2.22386 11.2761 2 11 2C10.7239 2 10.5 2.22386 10.5 2.5C10.5 2.77614 10.7239 3 11 3Z" fill="black"/>
+<path d="M8.5 15C8.77614 15 9 14.7761 9 14.5C9 14.2239 8.77614 14 8.5 14C8.22386 14 8 14.2239 8 14.5C8 14.7761 8.22386 15 8.5 15Z" fill="black"/>
+<path opacity="0.6" d="M6 14C6.27614 14 6.5 13.7761 6.5 13.5C6.5 13.2239 6.27614 13 6 13C5.72386 13 5.5 13.2239 5.5 13.5C5.5 13.7761 5.72386 14 6 14Z" fill="black"/>
+<path opacity="0.6" d="M11 14C11.2761 14 11.5 13.7761 11.5 13.5C11.5 13.2239 11.2761 13 11 13C10.7239 13 10.5 13.2239 10.5 13.5C10.5 13.7761 10.7239 14 11 14Z" fill="black"/>
+</svg>
@@ -1,5 +1,5 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M8 2.93652L6.9243 6.20697C6.86924 6.37435 6.77565 6.52646 6.65105 6.65105C6.52646 6.77565 6.37435 6.86924 6.20697 6.9243L2.93652 8L6.20697 9.0757C6.37435 9.13076 6.52646 9.22435 6.65105 9.34895C6.77565 9.47354 6.86924 9.62565 6.9243 9.79306L8 13.0635L9.0757 9.79306C9.13076 9.62565 9.22435 9.47354 9.34895 9.34895C9.47354 9.22435 9.62565 9.13076 9.79306 9.0757L13.0635 8L9.79306 6.9243C9.62565 6.86924 9.47354 6.77565 9.34895 6.65105C9.22435 6.52646 9.13076 6.37435 9.0757 6.20697L8 2.93652Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M3.33334 2V4.66666M2 3.33334H4.66666" stroke="black" stroke-opacity="0.75" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M12.6665 11.3333V14M11.3333 12.6666H13.9999" stroke="black" stroke-opacity="0.75" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M3.33334 2V4.66666M2 3.33334H4.66666" stroke="black" stroke-opacity="0.75" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M12.6665 11.3333V14M11.3333 12.6666H13.9999" stroke="black" stroke-opacity="0.75" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
</svg>
@@ -0,0 +1,1257 @@
+<svg width="515" height="126" viewBox="0 0 515 126" fill="none" xmlns="http://www.w3.org/2000/svg">
+<g clip-path="url(#clip0_2906_6463)">
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 0.390625H0.390625V12.1094H12.1094V0.390625ZM0 0V12.5H12.5V0H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 0.390625H12.8906V12.1094H24.6094V0.390625ZM12.5 0V12.5H25V0H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 0.390625H25.3906V12.1094H37.1094V0.390625ZM25 0V12.5H37.5V0H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 0.390625H37.8906V12.1094H49.6094V0.390625ZM37.5 0V12.5H50V0H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 0.390625H50.3906V12.1094H62.1094V0.390625ZM50 0V12.5H62.5V0H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 0.390625H62.8906V12.1094H74.6094V0.390625ZM62.5 0V12.5H75V0H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 0.390625H75.3906V12.1094H87.1094V0.390625ZM75 0V12.5H87.5V0H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 0.390625H87.8906V12.1094H99.6094V0.390625ZM87.5 0V12.5H100V0H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 0.390625H100.391V12.1094H112.109V0.390625ZM100 0V12.5H112.5V0H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 0.390625H112.891V12.1094H124.609V0.390625ZM112.5 0V12.5H125V0H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 0.390625H125.391V12.1094H137.109V0.390625ZM125 0V12.5H137.5V0H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 0.390625H137.891V12.1094H149.609V0.390625ZM137.5 0V12.5H150V0H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 0.390625H150.391V12.1094H162.109V0.390625ZM150 0V12.5H162.5V0H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 0.390625H162.891V12.1094H174.609V0.390625ZM162.5 0V12.5H175V0H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 0.390625H175.391V12.1094H187.109V0.390625ZM175 0V12.5H187.5V0H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 0.390625H187.891V12.1094H199.609V0.390625ZM187.5 0V12.5H200V0H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 0.390625H200.391V12.1094H212.109V0.390625ZM200 0V12.5H212.5V0H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 0.390625H212.891V12.1094H224.609V0.390625ZM212.5 0V12.5H225V0H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 0.390625H225.391V12.1094H237.109V0.390625ZM225 0V12.5H237.5V0H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 0.390625H237.891V12.1094H249.609V0.390625ZM237.5 0V12.5H250V0H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 0.390625H250.391V12.1094H262.109V0.390625ZM250 0V12.5H262.5V0H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 0.390625H262.891V12.1094H274.609V0.390625ZM262.5 0V12.5H275V0H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 0.390625H275.391V12.1094H287.109V0.390625ZM275 0V12.5H287.5V0H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 0.390625H287.891V12.1094H299.609V0.390625ZM287.5 0V12.5H300V0H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 0.390625H300.391V12.1094H312.109V0.390625ZM300 0V12.5H312.5V0H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 0.390625H312.891V12.1094H324.609V0.390625ZM312.5 0V12.5H325V0H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 0.390625H325.391V12.1094H337.109V0.390625ZM325 0V12.5H337.5V0H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 0.390625H337.891V12.1094H349.609V0.390625ZM337.5 0V12.5H350V0H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 0.390625H350.391V12.1094H362.109V0.390625ZM350 0V12.5H362.5V0H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 0.390625H362.891V12.1094H374.609V0.390625ZM362.5 0V12.5H375V0H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 0.390625H375.391V12.1094H387.109V0.390625ZM375 0V12.5H387.5V0H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 0.390625H387.891V12.1094H399.609V0.390625ZM387.5 0V12.5H400V0H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 0.390625H400.391V12.1094H412.109V0.390625ZM400 0V12.5H412.5V0H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 0.390625H412.891V12.1094H424.609V0.390625ZM412.5 0V12.5H425V0H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 0.390625H425.391V12.1094H437.109V0.390625ZM425 0V12.5H437.5V0H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 0.390625H437.891V12.1094H449.609V0.390625ZM437.5 0V12.5H450V0H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 0.390625H450.391V12.1094H462.109V0.390625ZM450 0V12.5H462.5V0H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 0.390625H462.891V12.1094H474.609V0.390625ZM462.5 0V12.5H475V0H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 0.390625H475.391V12.1094H487.109V0.390625ZM475 0V12.5H487.5V0H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 0.390625H487.891V12.1094H499.609V0.390625ZM487.5 0V12.5H500V0H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 0.390625H500.391V12.1094H512.109V0.390625ZM500 0V12.5H512.5V0H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 0.390625H512.891V12.1094H524.609V0.390625ZM512.5 0V12.5H525V0H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 0.390625H525.391V12.1094H537.109V0.390625ZM525 0V12.5H537.5V0H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 0.390625H537.891V12.1094H549.609V0.390625ZM537.5 0V12.5H550V0H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 0.390625H550.391V12.1094H562.109V0.390625ZM550 0V12.5H562.5V0H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 0.390625H562.891V12.1094H574.609V0.390625ZM562.5 0V12.5H575V0H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 0.390625H575.391V12.1094H587.109V0.390625ZM575 0V12.5H587.5V0H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 0.390625H587.891V12.1094H599.609V0.390625ZM587.5 0V12.5H600V0H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 12.8906H0.390625V24.6094H12.1094V12.8906ZM0 12.5V25H12.5V12.5H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 12.8906H12.8906V24.6094H24.6094V12.8906ZM12.5 12.5V25H25V12.5H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 12.8906H25.3906V24.6094H37.1094V12.8906ZM25 12.5V25H37.5V12.5H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 12.8906H37.8906V24.6094H49.6094V12.8906ZM37.5 12.5V25H50V12.5H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 12.8906H50.3906V24.6094H62.1094V12.8906ZM50 12.5V25H62.5V12.5H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 12.8906H62.8906V24.6094H74.6094V12.8906ZM62.5 12.5V25H75V12.5H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 12.8906H75.3906V24.6094H87.1094V12.8906ZM75 12.5V25H87.5V12.5H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 12.8906H87.8906V24.6094H99.6094V12.8906ZM87.5 12.5V25H100V12.5H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 12.8906H100.391V24.6094H112.109V12.8906ZM100 12.5V25H112.5V12.5H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 12.8906H112.891V24.6094H124.609V12.8906ZM112.5 12.5V25H125V12.5H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 12.8906H125.391V24.6094H137.109V12.8906ZM125 12.5V25H137.5V12.5H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 12.8906H137.891V24.6094H149.609V12.8906ZM137.5 12.5V25H150V12.5H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 12.8906H150.391V24.6094H162.109V12.8906ZM150 12.5V25H162.5V12.5H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 12.8906H162.891V24.6094H174.609V12.8906ZM162.5 12.5V25H175V12.5H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 12.8906H175.391V24.6094H187.109V12.8906ZM175 12.5V25H187.5V12.5H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 12.8906H187.891V24.6094H199.609V12.8906ZM187.5 12.5V25H200V12.5H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 12.8906H200.391V24.6094H212.109V12.8906ZM200 12.5V25H212.5V12.5H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 12.8906H212.891V24.6094H224.609V12.8906ZM212.5 12.5V25H225V12.5H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 12.8906H225.391V24.6094H237.109V12.8906ZM225 12.5V25H237.5V12.5H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 12.8906H237.891V24.6094H249.609V12.8906ZM237.5 12.5V25H250V12.5H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 12.8906H250.391V24.6094H262.109V12.8906ZM250 12.5V25H262.5V12.5H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 12.8906H262.891V24.6094H274.609V12.8906ZM262.5 12.5V25H275V12.5H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 12.8906H275.391V24.6094H287.109V12.8906ZM275 12.5V25H287.5V12.5H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 12.8906H287.891V24.6094H299.609V12.8906ZM287.5 12.5V25H300V12.5H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 12.8906H300.391V24.6094H312.109V12.8906ZM300 12.5V25H312.5V12.5H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 12.8906H312.891V24.6094H324.609V12.8906ZM312.5 12.5V25H325V12.5H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 12.8906H325.391V24.6094H337.109V12.8906ZM325 12.5V25H337.5V12.5H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 12.8906H337.891V24.6094H349.609V12.8906ZM337.5 12.5V25H350V12.5H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 12.8906H350.391V24.6094H362.109V12.8906ZM350 12.5V25H362.5V12.5H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 12.8906H362.891V24.6094H374.609V12.8906ZM362.5 12.5V25H375V12.5H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 12.8906H375.391V24.6094H387.109V12.8906ZM375 12.5V25H387.5V12.5H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 12.8906H387.891V24.6094H399.609V12.8906ZM387.5 12.5V25H400V12.5H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 12.8906H400.391V24.6094H412.109V12.8906ZM400 12.5V25H412.5V12.5H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 12.8906H412.891V24.6094H424.609V12.8906ZM412.5 12.5V25H425V12.5H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 12.8906H425.391V24.6094H437.109V12.8906ZM425 12.5V25H437.5V12.5H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 12.8906H437.891V24.6094H449.609V12.8906ZM437.5 12.5V25H450V12.5H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 12.8906H450.391V24.6094H462.109V12.8906ZM450 12.5V25H462.5V12.5H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 12.8906H462.891V24.6094H474.609V12.8906ZM462.5 12.5V25H475V12.5H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 12.8906H475.391V24.6094H487.109V12.8906ZM475 12.5V25H487.5V12.5H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 12.8906H487.891V24.6094H499.609V12.8906ZM487.5 12.5V25H500V12.5H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 12.8906H500.391V24.6094H512.109V12.8906ZM500 12.5V25H512.5V12.5H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 12.8906H512.891V24.6094H524.609V12.8906ZM512.5 12.5V25H525V12.5H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 12.8906H525.391V24.6094H537.109V12.8906ZM525 12.5V25H537.5V12.5H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 12.8906H537.891V24.6094H549.609V12.8906ZM537.5 12.5V25H550V12.5H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 12.8906H550.391V24.6094H562.109V12.8906ZM550 12.5V25H562.5V12.5H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 12.8906H562.891V24.6094H574.609V12.8906ZM562.5 12.5V25H575V12.5H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 12.8906H575.391V24.6094H587.109V12.8906ZM575 12.5V25H587.5V12.5H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 12.8906H587.891V24.6094H599.609V12.8906ZM587.5 12.5V25H600V12.5H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 25.3906H0.390625V37.1094H12.1094V25.3906ZM0 25V37.5H12.5V25H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 25.3906H12.8906V37.1094H24.6094V25.3906ZM12.5 25V37.5H25V25H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 25.3906H25.3906V37.1094H37.1094V25.3906ZM25 25V37.5H37.5V25H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 25.3906H37.8906V37.1094H49.6094V25.3906ZM37.5 25V37.5H50V25H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 25.3906H50.3906V37.1094H62.1094V25.3906ZM50 25V37.5H62.5V25H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 25.3906H62.8906V37.1094H74.6094V25.3906ZM62.5 25V37.5H75V25H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 25.3906H75.3906V37.1094H87.1094V25.3906ZM75 25V37.5H87.5V25H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 25.3906H87.8906V37.1094H99.6094V25.3906ZM87.5 25V37.5H100V25H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 25.3906H100.391V37.1094H112.109V25.3906ZM100 25V37.5H112.5V25H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 25.3906H112.891V37.1094H124.609V25.3906ZM112.5 25V37.5H125V25H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 25.3906H125.391V37.1094H137.109V25.3906ZM125 25V37.5H137.5V25H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 25.3906H137.891V37.1094H149.609V25.3906ZM137.5 25V37.5H150V25H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 25.3906H150.391V37.1094H162.109V25.3906ZM150 25V37.5H162.5V25H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 25.3906H162.891V37.1094H174.609V25.3906ZM162.5 25V37.5H175V25H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 25.3906H175.391V37.1094H187.109V25.3906ZM175 25V37.5H187.5V25H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 25.3906H187.891V37.1094H199.609V25.3906ZM187.5 25V37.5H200V25H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 25.3906H200.391V37.1094H212.109V25.3906ZM200 25V37.5H212.5V25H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 25.3906H212.891V37.1094H224.609V25.3906ZM212.5 25V37.5H225V25H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 25.3906H225.391V37.1094H237.109V25.3906ZM225 25V37.5H237.5V25H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 25.3906H237.891V37.1094H249.609V25.3906ZM237.5 25V37.5H250V25H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 25.3906H250.391V37.1094H262.109V25.3906ZM250 25V37.5H262.5V25H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 25.3906H262.891V37.1094H274.609V25.3906ZM262.5 25V37.5H275V25H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 25.3906H275.391V37.1094H287.109V25.3906ZM275 25V37.5H287.5V25H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 25.3906H287.891V37.1094H299.609V25.3906ZM287.5 25V37.5H300V25H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 25.3906H300.391V37.1094H312.109V25.3906ZM300 25V37.5H312.5V25H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 25.3906H312.891V37.1094H324.609V25.3906ZM312.5 25V37.5H325V25H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 25.3906H325.391V37.1094H337.109V25.3906ZM325 25V37.5H337.5V25H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 25.3906H337.891V37.1094H349.609V25.3906ZM337.5 25V37.5H350V25H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 25.3906H350.391V37.1094H362.109V25.3906ZM350 25V37.5H362.5V25H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 25.3906H362.891V37.1094H374.609V25.3906ZM362.5 25V37.5H375V25H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 25.3906H375.391V37.1094H387.109V25.3906ZM375 25V37.5H387.5V25H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 25.3906H387.891V37.1094H399.609V25.3906ZM387.5 25V37.5H400V25H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 25.3906H400.391V37.1094H412.109V25.3906ZM400 25V37.5H412.5V25H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 25.3906H412.891V37.1094H424.609V25.3906ZM412.5 25V37.5H425V25H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 25.3906H425.391V37.1094H437.109V25.3906ZM425 25V37.5H437.5V25H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 25.3906H437.891V37.1094H449.609V25.3906ZM437.5 25V37.5H450V25H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 25.3906H450.391V37.1094H462.109V25.3906ZM450 25V37.5H462.5V25H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 25.3906H462.891V37.1094H474.609V25.3906ZM462.5 25V37.5H475V25H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 25.3906H475.391V37.1094H487.109V25.3906ZM475 25V37.5H487.5V25H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 25.3906H487.891V37.1094H499.609V25.3906ZM487.5 25V37.5H500V25H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 25.3906H500.391V37.1094H512.109V25.3906ZM500 25V37.5H512.5V25H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 25.3906H512.891V37.1094H524.609V25.3906ZM512.5 25V37.5H525V25H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 25.3906H525.391V37.1094H537.109V25.3906ZM525 25V37.5H537.5V25H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 25.3906H537.891V37.1094H549.609V25.3906ZM537.5 25V37.5H550V25H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 25.3906H550.391V37.1094H562.109V25.3906ZM550 25V37.5H562.5V25H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 25.3906H562.891V37.1094H574.609V25.3906ZM562.5 25V37.5H575V25H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 25.3906H575.391V37.1094H587.109V25.3906ZM575 25V37.5H587.5V25H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 25.3906H587.891V37.1094H599.609V25.3906ZM587.5 25V37.5H600V25H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 37.8906H0.390625V49.6094H12.1094V37.8906ZM0 37.5V50H12.5V37.5H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 37.8906H12.8906V49.6094H24.6094V37.8906ZM12.5 37.5V50H25V37.5H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 37.8906H25.3906V49.6094H37.1094V37.8906ZM25 37.5V50H37.5V37.5H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 37.8906H37.8906V49.6094H49.6094V37.8906ZM37.5 37.5V50H50V37.5H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 37.8906H50.3906V49.6094H62.1094V37.8906ZM50 37.5V50H62.5V37.5H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 37.8906H62.8906V49.6094H74.6094V37.8906ZM62.5 37.5V50H75V37.5H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 37.8906H75.3906V49.6094H87.1094V37.8906ZM75 37.5V50H87.5V37.5H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 37.8906H87.8906V49.6094H99.6094V37.8906ZM87.5 37.5V50H100V37.5H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 37.8906H100.391V49.6094H112.109V37.8906ZM100 37.5V50H112.5V37.5H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 37.8906H112.891V49.6094H124.609V37.8906ZM112.5 37.5V50H125V37.5H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 37.8906H125.391V49.6094H137.109V37.8906ZM125 37.5V50H137.5V37.5H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 37.8906H137.891V49.6094H149.609V37.8906ZM137.5 37.5V50H150V37.5H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 37.8906H150.391V49.6094H162.109V37.8906ZM150 37.5V50H162.5V37.5H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 37.8906H162.891V49.6094H174.609V37.8906ZM162.5 37.5V50H175V37.5H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 37.8906H175.391V49.6094H187.109V37.8906ZM175 37.5V50H187.5V37.5H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 37.8906H187.891V49.6094H199.609V37.8906ZM187.5 37.5V50H200V37.5H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 37.8906H200.391V49.6094H212.109V37.8906ZM200 37.5V50H212.5V37.5H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 37.8906H212.891V49.6094H224.609V37.8906ZM212.5 37.5V50H225V37.5H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 37.8906H225.391V49.6094H237.109V37.8906ZM225 37.5V50H237.5V37.5H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 37.8906H237.891V49.6094H249.609V37.8906ZM237.5 37.5V50H250V37.5H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 37.8906H250.391V49.6094H262.109V37.8906ZM250 37.5V50H262.5V37.5H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 37.8906H262.891V49.6094H274.609V37.8906ZM262.5 37.5V50H275V37.5H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 37.8906H275.391V49.6094H287.109V37.8906ZM275 37.5V50H287.5V37.5H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 37.8906H287.891V49.6094H299.609V37.8906ZM287.5 37.5V50H300V37.5H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 37.8906H300.391V49.6094H312.109V37.8906ZM300 37.5V50H312.5V37.5H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 37.8906H312.891V49.6094H324.609V37.8906ZM312.5 37.5V50H325V37.5H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 37.8906H325.391V49.6094H337.109V37.8906ZM325 37.5V50H337.5V37.5H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 37.8906H337.891V49.6094H349.609V37.8906ZM337.5 37.5V50H350V37.5H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 37.8906H350.391V49.6094H362.109V37.8906ZM350 37.5V50H362.5V37.5H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 37.8906H362.891V49.6094H374.609V37.8906ZM362.5 37.5V50H375V37.5H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 37.8906H375.391V49.6094H387.109V37.8906ZM375 37.5V50H387.5V37.5H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 37.8906H387.891V49.6094H399.609V37.8906ZM387.5 37.5V50H400V37.5H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 37.8906H400.391V49.6094H412.109V37.8906ZM400 37.5V50H412.5V37.5H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 37.8906H412.891V49.6094H424.609V37.8906ZM412.5 37.5V50H425V37.5H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 37.8906H425.391V49.6094H437.109V37.8906ZM425 37.5V50H437.5V37.5H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 37.8906H437.891V49.6094H449.609V37.8906ZM437.5 37.5V50H450V37.5H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 37.8906H450.391V49.6094H462.109V37.8906ZM450 37.5V50H462.5V37.5H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 37.8906H462.891V49.6094H474.609V37.8906ZM462.5 37.5V50H475V37.5H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 37.8906H475.391V49.6094H487.109V37.8906ZM475 37.5V50H487.5V37.5H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 37.8906H487.891V49.6094H499.609V37.8906ZM487.5 37.5V50H500V37.5H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 37.8906H500.391V49.6094H512.109V37.8906ZM500 37.5V50H512.5V37.5H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 37.8906H512.891V49.6094H524.609V37.8906ZM512.5 37.5V50H525V37.5H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 37.8906H525.391V49.6094H537.109V37.8906ZM525 37.5V50H537.5V37.5H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 37.8906H537.891V49.6094H549.609V37.8906ZM537.5 37.5V50H550V37.5H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 37.8906H550.391V49.6094H562.109V37.8906ZM550 37.5V50H562.5V37.5H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 37.8906H562.891V49.6094H574.609V37.8906ZM562.5 37.5V50H575V37.5H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 37.8906H575.391V49.6094H587.109V37.8906ZM575 37.5V50H587.5V37.5H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 37.8906H587.891V49.6094H599.609V37.8906ZM587.5 37.5V50H600V37.5H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 50.3906H0.390625V62.1094H12.1094V50.3906ZM0 50V62.5H12.5V50H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 50.3906H12.8906V62.1094H24.6094V50.3906ZM12.5 50V62.5H25V50H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 50.3906H25.3906V62.1094H37.1094V50.3906ZM25 50V62.5H37.5V50H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 50.3906H37.8906V62.1094H49.6094V50.3906ZM37.5 50V62.5H50V50H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 50.3906H50.3906V62.1094H62.1094V50.3906ZM50 50V62.5H62.5V50H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 50.3906H62.8906V62.1094H74.6094V50.3906ZM62.5 50V62.5H75V50H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 50.3906H75.3906V62.1094H87.1094V50.3906ZM75 50V62.5H87.5V50H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 50.3906H87.8906V62.1094H99.6094V50.3906ZM87.5 50V62.5H100V50H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 50.3906H100.391V62.1094H112.109V50.3906ZM100 50V62.5H112.5V50H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 50.3906H112.891V62.1094H124.609V50.3906ZM112.5 50V62.5H125V50H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 50.3906H125.391V62.1094H137.109V50.3906ZM125 50V62.5H137.5V50H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 50.3906H137.891V62.1094H149.609V50.3906ZM137.5 50V62.5H150V50H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 50.3906H150.391V62.1094H162.109V50.3906ZM150 50V62.5H162.5V50H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 50.3906H162.891V62.1094H174.609V50.3906ZM162.5 50V62.5H175V50H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 50.3906H175.391V62.1094H187.109V50.3906ZM175 50V62.5H187.5V50H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 50.3906H187.891V62.1094H199.609V50.3906ZM187.5 50V62.5H200V50H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 50.3906H200.391V62.1094H212.109V50.3906ZM200 50V62.5H212.5V50H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 50.3906H212.891V62.1094H224.609V50.3906ZM212.5 50V62.5H225V50H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 50.3906H225.391V62.1094H237.109V50.3906ZM225 50V62.5H237.5V50H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 50.3906H237.891V62.1094H249.609V50.3906ZM237.5 50V62.5H250V50H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 50.3906H250.391V62.1094H262.109V50.3906ZM250 50V62.5H262.5V50H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 50.3906H262.891V62.1094H274.609V50.3906ZM262.5 50V62.5H275V50H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 50.3906H275.391V62.1094H287.109V50.3906ZM275 50V62.5H287.5V50H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 50.3906H287.891V62.1094H299.609V50.3906ZM287.5 50V62.5H300V50H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 50.3906H300.391V62.1094H312.109V50.3906ZM300 50V62.5H312.5V50H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 50.3906H312.891V62.1094H324.609V50.3906ZM312.5 50V62.5H325V50H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 50.3906H325.391V62.1094H337.109V50.3906ZM325 50V62.5H337.5V50H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 50.3906H337.891V62.1094H349.609V50.3906ZM337.5 50V62.5H350V50H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 50.3906H350.391V62.1094H362.109V50.3906ZM350 50V62.5H362.5V50H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 50.3906H362.891V62.1094H374.609V50.3906ZM362.5 50V62.5H375V50H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 50.3906H375.391V62.1094H387.109V50.3906ZM375 50V62.5H387.5V50H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 50.3906H387.891V62.1094H399.609V50.3906ZM387.5 50V62.5H400V50H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 50.3906H400.391V62.1094H412.109V50.3906ZM400 50V62.5H412.5V50H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 50.3906H412.891V62.1094H424.609V50.3906ZM412.5 50V62.5H425V50H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 50.3906H425.391V62.1094H437.109V50.3906ZM425 50V62.5H437.5V50H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 50.3906H437.891V62.1094H449.609V50.3906ZM437.5 50V62.5H450V50H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 50.3906H450.391V62.1094H462.109V50.3906ZM450 50V62.5H462.5V50H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 50.3906H462.891V62.1094H474.609V50.3906ZM462.5 50V62.5H475V50H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 50.3906H475.391V62.1094H487.109V50.3906ZM475 50V62.5H487.5V50H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 50.3906H487.891V62.1094H499.609V50.3906ZM487.5 50V62.5H500V50H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 50.3906H500.391V62.1094H512.109V50.3906ZM500 50V62.5H512.5V50H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 50.3906H512.891V62.1094H524.609V50.3906ZM512.5 50V62.5H525V50H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 50.3906H525.391V62.1094H537.109V50.3906ZM525 50V62.5H537.5V50H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 50.3906H537.891V62.1094H549.609V50.3906ZM537.5 50V62.5H550V50H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 50.3906H550.391V62.1094H562.109V50.3906ZM550 50V62.5H562.5V50H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 50.3906H562.891V62.1094H574.609V50.3906ZM562.5 50V62.5H575V50H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 50.3906H575.391V62.1094H587.109V50.3906ZM575 50V62.5H587.5V50H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 50.3906H587.891V62.1094H599.609V50.3906ZM587.5 50V62.5H600V50H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 62.8906H0.390625V74.6094H12.1094V62.8906ZM0 62.5V75H12.5V62.5H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 62.8906H12.8906V74.6094H24.6094V62.8906ZM12.5 62.5V75H25V62.5H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 62.8906H25.3906V74.6094H37.1094V62.8906ZM25 62.5V75H37.5V62.5H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 62.8906H37.8906V74.6094H49.6094V62.8906ZM37.5 62.5V75H50V62.5H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 62.8906H50.3906V74.6094H62.1094V62.8906ZM50 62.5V75H62.5V62.5H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 62.8906H62.8906V74.6094H74.6094V62.8906ZM62.5 62.5V75H75V62.5H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 62.8906H75.3906V74.6094H87.1094V62.8906ZM75 62.5V75H87.5V62.5H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 62.8906H87.8906V74.6094H99.6094V62.8906ZM87.5 62.5V75H100V62.5H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 62.8906H100.391V74.6094H112.109V62.8906ZM100 62.5V75H112.5V62.5H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 62.8906H112.891V74.6094H124.609V62.8906ZM112.5 62.5V75H125V62.5H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 62.8906H125.391V74.6094H137.109V62.8906ZM125 62.5V75H137.5V62.5H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 62.8906H137.891V74.6094H149.609V62.8906ZM137.5 62.5V75H150V62.5H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 62.8906H150.391V74.6094H162.109V62.8906ZM150 62.5V75H162.5V62.5H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 62.8906H162.891V74.6094H174.609V62.8906ZM162.5 62.5V75H175V62.5H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 62.8906H175.391V74.6094H187.109V62.8906ZM175 62.5V75H187.5V62.5H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 62.8906H187.891V74.6094H199.609V62.8906ZM187.5 62.5V75H200V62.5H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 62.8906H200.391V74.6094H212.109V62.8906ZM200 62.5V75H212.5V62.5H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 62.8906H212.891V74.6094H224.609V62.8906ZM212.5 62.5V75H225V62.5H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 62.8906H225.391V74.6094H237.109V62.8906ZM225 62.5V75H237.5V62.5H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 62.8906H237.891V74.6094H249.609V62.8906ZM237.5 62.5V75H250V62.5H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 62.8906H250.391V74.6094H262.109V62.8906ZM250 62.5V75H262.5V62.5H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 62.8906H262.891V74.6094H274.609V62.8906ZM262.5 62.5V75H275V62.5H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 62.8906H275.391V74.6094H287.109V62.8906ZM275 62.5V75H287.5V62.5H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 62.8906H287.891V74.6094H299.609V62.8906ZM287.5 62.5V75H300V62.5H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 62.8906H300.391V74.6094H312.109V62.8906ZM300 62.5V75H312.5V62.5H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 62.8906H312.891V74.6094H324.609V62.8906ZM312.5 62.5V75H325V62.5H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 62.8906H325.391V74.6094H337.109V62.8906ZM325 62.5V75H337.5V62.5H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 62.8906H337.891V74.6094H349.609V62.8906ZM337.5 62.5V75H350V62.5H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 62.8906H350.391V74.6094H362.109V62.8906ZM350 62.5V75H362.5V62.5H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 62.8906H362.891V74.6094H374.609V62.8906ZM362.5 62.5V75H375V62.5H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 62.8906H375.391V74.6094H387.109V62.8906ZM375 62.5V75H387.5V62.5H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 62.8906H387.891V74.6094H399.609V62.8906ZM387.5 62.5V75H400V62.5H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 62.8906H400.391V74.6094H412.109V62.8906ZM400 62.5V75H412.5V62.5H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 62.8906H412.891V74.6094H424.609V62.8906ZM412.5 62.5V75H425V62.5H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 62.8906H425.391V74.6094H437.109V62.8906ZM425 62.5V75H437.5V62.5H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 62.8906H437.891V74.6094H449.609V62.8906ZM437.5 62.5V75H450V62.5H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 62.8906H450.391V74.6094H462.109V62.8906ZM450 62.5V75H462.5V62.5H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 62.8906H462.891V74.6094H474.609V62.8906ZM462.5 62.5V75H475V62.5H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 62.8906H475.391V74.6094H487.109V62.8906ZM475 62.5V75H487.5V62.5H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 62.8906H487.891V74.6094H499.609V62.8906ZM487.5 62.5V75H500V62.5H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 62.8906H500.391V74.6094H512.109V62.8906ZM500 62.5V75H512.5V62.5H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 62.8906H512.891V74.6094H524.609V62.8906ZM512.5 62.5V75H525V62.5H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 62.8906H525.391V74.6094H537.109V62.8906ZM525 62.5V75H537.5V62.5H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 62.8906H537.891V74.6094H549.609V62.8906ZM537.5 62.5V75H550V62.5H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 62.8906H550.391V74.6094H562.109V62.8906ZM550 62.5V75H562.5V62.5H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 62.8906H562.891V74.6094H574.609V62.8906ZM562.5 62.5V75H575V62.5H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 62.8906H575.391V74.6094H587.109V62.8906ZM575 62.5V75H587.5V62.5H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 62.8906H587.891V74.6094H599.609V62.8906ZM587.5 62.5V75H600V62.5H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 75.3906H0.390625V87.1094H12.1094V75.3906ZM0 75V87.5H12.5V75H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 75.3906H12.8906V87.1094H24.6094V75.3906ZM12.5 75V87.5H25V75H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 75.3906H25.3906V87.1094H37.1094V75.3906ZM25 75V87.5H37.5V75H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 75.3906H37.8906V87.1094H49.6094V75.3906ZM37.5 75V87.5H50V75H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 75.3906H50.3906V87.1094H62.1094V75.3906ZM50 75V87.5H62.5V75H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 75.3906H62.8906V87.1094H74.6094V75.3906ZM62.5 75V87.5H75V75H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 75.3906H75.3906V87.1094H87.1094V75.3906ZM75 75V87.5H87.5V75H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 75.3906H87.8906V87.1094H99.6094V75.3906ZM87.5 75V87.5H100V75H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 75.3906H100.391V87.1094H112.109V75.3906ZM100 75V87.5H112.5V75H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 75.3906H112.891V87.1094H124.609V75.3906ZM112.5 75V87.5H125V75H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 75.3906H125.391V87.1094H137.109V75.3906ZM125 75V87.5H137.5V75H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 75.3906H137.891V87.1094H149.609V75.3906ZM137.5 75V87.5H150V75H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 75.3906H150.391V87.1094H162.109V75.3906ZM150 75V87.5H162.5V75H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 75.3906H162.891V87.1094H174.609V75.3906ZM162.5 75V87.5H175V75H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 75.3906H175.391V87.1094H187.109V75.3906ZM175 75V87.5H187.5V75H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 75.3906H187.891V87.1094H199.609V75.3906ZM187.5 75V87.5H200V75H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 75.3906H200.391V87.1094H212.109V75.3906ZM200 75V87.5H212.5V75H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 75.3906H212.891V87.1094H224.609V75.3906ZM212.5 75V87.5H225V75H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 75.3906H225.391V87.1094H237.109V75.3906ZM225 75V87.5H237.5V75H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 75.3906H237.891V87.1094H249.609V75.3906ZM237.5 75V87.5H250V75H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 75.3906H250.391V87.1094H262.109V75.3906ZM250 75V87.5H262.5V75H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 75.3906H262.891V87.1094H274.609V75.3906ZM262.5 75V87.5H275V75H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 75.3906H275.391V87.1094H287.109V75.3906ZM275 75V87.5H287.5V75H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 75.3906H287.891V87.1094H299.609V75.3906ZM287.5 75V87.5H300V75H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 75.3906H300.391V87.1094H312.109V75.3906ZM300 75V87.5H312.5V75H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 75.3906H312.891V87.1094H324.609V75.3906ZM312.5 75V87.5H325V75H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 75.3906H325.391V87.1094H337.109V75.3906ZM325 75V87.5H337.5V75H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 75.3906H337.891V87.1094H349.609V75.3906ZM337.5 75V87.5H350V75H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 75.3906H350.391V87.1094H362.109V75.3906ZM350 75V87.5H362.5V75H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 75.3906H362.891V87.1094H374.609V75.3906ZM362.5 75V87.5H375V75H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 75.3906H375.391V87.1094H387.109V75.3906ZM375 75V87.5H387.5V75H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 75.3906H387.891V87.1094H399.609V75.3906ZM387.5 75V87.5H400V75H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 75.3906H400.391V87.1094H412.109V75.3906ZM400 75V87.5H412.5V75H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 75.3906H412.891V87.1094H424.609V75.3906ZM412.5 75V87.5H425V75H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 75.3906H425.391V87.1094H437.109V75.3906ZM425 75V87.5H437.5V75H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 75.3906H437.891V87.1094H449.609V75.3906ZM437.5 75V87.5H450V75H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 75.3906H450.391V87.1094H462.109V75.3906ZM450 75V87.5H462.5V75H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 75.3906H462.891V87.1094H474.609V75.3906ZM462.5 75V87.5H475V75H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 75.3906H475.391V87.1094H487.109V75.3906ZM475 75V87.5H487.5V75H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 75.3906H487.891V87.1094H499.609V75.3906ZM487.5 75V87.5H500V75H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 75.3906H500.391V87.1094H512.109V75.3906ZM500 75V87.5H512.5V75H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 75.3906H512.891V87.1094H524.609V75.3906ZM512.5 75V87.5H525V75H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 75.3906H525.391V87.1094H537.109V75.3906ZM525 75V87.5H537.5V75H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 75.3906H537.891V87.1094H549.609V75.3906ZM537.5 75V87.5H550V75H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 75.3906H550.391V87.1094H562.109V75.3906ZM550 75V87.5H562.5V75H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 75.3906H562.891V87.1094H574.609V75.3906ZM562.5 75V87.5H575V75H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 75.3906H575.391V87.1094H587.109V75.3906ZM575 75V87.5H587.5V75H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 75.3906H587.891V87.1094H599.609V75.3906ZM587.5 75V87.5H600V75H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 87.8906H0.390625V99.6094H12.1094V87.8906ZM0 87.5V100H12.5V87.5H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 87.8906H12.8906V99.6094H24.6094V87.8906ZM12.5 87.5V100H25V87.5H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 87.8906H25.3906V99.6094H37.1094V87.8906ZM25 87.5V100H37.5V87.5H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 87.8906H37.8906V99.6094H49.6094V87.8906ZM37.5 87.5V100H50V87.5H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 87.8906H50.3906V99.6094H62.1094V87.8906ZM50 87.5V100H62.5V87.5H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 87.8906H62.8906V99.6094H74.6094V87.8906ZM62.5 87.5V100H75V87.5H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 87.8906H75.3906V99.6094H87.1094V87.8906ZM75 87.5V100H87.5V87.5H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 87.8906H87.8906V99.6094H99.6094V87.8906ZM87.5 87.5V100H100V87.5H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 87.8906H100.391V99.6094H112.109V87.8906ZM100 87.5V100H112.5V87.5H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 87.8906H112.891V99.6094H124.609V87.8906ZM112.5 87.5V100H125V87.5H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 87.8906H125.391V99.6094H137.109V87.8906ZM125 87.5V100H137.5V87.5H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 87.8906H137.891V99.6094H149.609V87.8906ZM137.5 87.5V100H150V87.5H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 87.8906H150.391V99.6094H162.109V87.8906ZM150 87.5V100H162.5V87.5H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 87.8906H162.891V99.6094H174.609V87.8906ZM162.5 87.5V100H175V87.5H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 87.8906H175.391V99.6094H187.109V87.8906ZM175 87.5V100H187.5V87.5H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 87.8906H187.891V99.6094H199.609V87.8906ZM187.5 87.5V100H200V87.5H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 87.8906H200.391V99.6094H212.109V87.8906ZM200 87.5V100H212.5V87.5H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 87.8906H212.891V99.6094H224.609V87.8906ZM212.5 87.5V100H225V87.5H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 87.8906H225.391V99.6094H237.109V87.8906ZM225 87.5V100H237.5V87.5H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 87.8906H237.891V99.6094H249.609V87.8906ZM237.5 87.5V100H250V87.5H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 87.8906H250.391V99.6094H262.109V87.8906ZM250 87.5V100H262.5V87.5H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 87.8906H262.891V99.6094H274.609V87.8906ZM262.5 87.5V100H275V87.5H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 87.8906H275.391V99.6094H287.109V87.8906ZM275 87.5V100H287.5V87.5H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 87.8906H287.891V99.6094H299.609V87.8906ZM287.5 87.5V100H300V87.5H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 87.8906H300.391V99.6094H312.109V87.8906ZM300 87.5V100H312.5V87.5H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 87.8906H312.891V99.6094H324.609V87.8906ZM312.5 87.5V100H325V87.5H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 87.8906H325.391V99.6094H337.109V87.8906ZM325 87.5V100H337.5V87.5H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 87.8906H337.891V99.6094H349.609V87.8906ZM337.5 87.5V100H350V87.5H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 87.8906H350.391V99.6094H362.109V87.8906ZM350 87.5V100H362.5V87.5H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 87.8906H362.891V99.6094H374.609V87.8906ZM362.5 87.5V100H375V87.5H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 87.8906H375.391V99.6094H387.109V87.8906ZM375 87.5V100H387.5V87.5H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 87.8906H387.891V99.6094H399.609V87.8906ZM387.5 87.5V100H400V87.5H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 87.8906H400.391V99.6094H412.109V87.8906ZM400 87.5V100H412.5V87.5H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 87.8906H412.891V99.6094H424.609V87.8906ZM412.5 87.5V100H425V87.5H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 87.8906H425.391V99.6094H437.109V87.8906ZM425 87.5V100H437.5V87.5H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 87.8906H437.891V99.6094H449.609V87.8906ZM437.5 87.5V100H450V87.5H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 87.8906H450.391V99.6094H462.109V87.8906ZM450 87.5V100H462.5V87.5H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 87.8906H462.891V99.6094H474.609V87.8906ZM462.5 87.5V100H475V87.5H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 87.8906H475.391V99.6094H487.109V87.8906ZM475 87.5V100H487.5V87.5H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 87.8906H487.891V99.6094H499.609V87.8906ZM487.5 87.5V100H500V87.5H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 87.8906H500.391V99.6094H512.109V87.8906ZM500 87.5V100H512.5V87.5H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 87.8906H512.891V99.6094H524.609V87.8906ZM512.5 87.5V100H525V87.5H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 87.8906H525.391V99.6094H537.109V87.8906ZM525 87.5V100H537.5V87.5H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 87.8906H537.891V99.6094H549.609V87.8906ZM537.5 87.5V100H550V87.5H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 87.8906H550.391V99.6094H562.109V87.8906ZM550 87.5V100H562.5V87.5H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 87.8906H562.891V99.6094H574.609V87.8906ZM562.5 87.5V100H575V87.5H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 87.8906H575.391V99.6094H587.109V87.8906ZM575 87.5V100H587.5V87.5H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 87.8906H587.891V99.6094H599.609V87.8906ZM587.5 87.5V100H600V87.5H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 100.391H0.390625V112.109H12.1094V100.391ZM0 100V112.5H12.5V100H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 100.391H12.8906V112.109H24.6094V100.391ZM12.5 100V112.5H25V100H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 100.391H25.3906V112.109H37.1094V100.391ZM25 100V112.5H37.5V100H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 100.391H37.8906V112.109H49.6094V100.391ZM37.5 100V112.5H50V100H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 100.391H50.3906V112.109H62.1094V100.391ZM50 100V112.5H62.5V100H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 100.391H62.8906V112.109H74.6094V100.391ZM62.5 100V112.5H75V100H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 100.391H75.3906V112.109H87.1094V100.391ZM75 100V112.5H87.5V100H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 100.391H87.8906V112.109H99.6094V100.391ZM87.5 100V112.5H100V100H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 100.391H100.391V112.109H112.109V100.391ZM100 100V112.5H112.5V100H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 100.391H112.891V112.109H124.609V100.391ZM112.5 100V112.5H125V100H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 100.391H125.391V112.109H137.109V100.391ZM125 100V112.5H137.5V100H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 100.391H137.891V112.109H149.609V100.391ZM137.5 100V112.5H150V100H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 100.391H150.391V112.109H162.109V100.391ZM150 100V112.5H162.5V100H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 100.391H162.891V112.109H174.609V100.391ZM162.5 100V112.5H175V100H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 100.391H175.391V112.109H187.109V100.391ZM175 100V112.5H187.5V100H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 100.391H187.891V112.109H199.609V100.391ZM187.5 100V112.5H200V100H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 100.391H200.391V112.109H212.109V100.391ZM200 100V112.5H212.5V100H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 100.391H212.891V112.109H224.609V100.391ZM212.5 100V112.5H225V100H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 100.391H225.391V112.109H237.109V100.391ZM225 100V112.5H237.5V100H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 100.391H237.891V112.109H249.609V100.391ZM237.5 100V112.5H250V100H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 100.391H250.391V112.109H262.109V100.391ZM250 100V112.5H262.5V100H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 100.391H262.891V112.109H274.609V100.391ZM262.5 100V112.5H275V100H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 100.391H275.391V112.109H287.109V100.391ZM275 100V112.5H287.5V100H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 100.391H287.891V112.109H299.609V100.391ZM287.5 100V112.5H300V100H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 100.391H300.391V112.109H312.109V100.391ZM300 100V112.5H312.5V100H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 100.391H312.891V112.109H324.609V100.391ZM312.5 100V112.5H325V100H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 100.391H325.391V112.109H337.109V100.391ZM325 100V112.5H337.5V100H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 100.391H337.891V112.109H349.609V100.391ZM337.5 100V112.5H350V100H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 100.391H350.391V112.109H362.109V100.391ZM350 100V112.5H362.5V100H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 100.391H362.891V112.109H374.609V100.391ZM362.5 100V112.5H375V100H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 100.391H375.391V112.109H387.109V100.391ZM375 100V112.5H387.5V100H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 100.391H387.891V112.109H399.609V100.391ZM387.5 100V112.5H400V100H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 100.391H400.391V112.109H412.109V100.391ZM400 100V112.5H412.5V100H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 100.391H412.891V112.109H424.609V100.391ZM412.5 100V112.5H425V100H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 100.391H425.391V112.109H437.109V100.391ZM425 100V112.5H437.5V100H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 100.391H437.891V112.109H449.609V100.391ZM437.5 100V112.5H450V100H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 100.391H450.391V112.109H462.109V100.391ZM450 100V112.5H462.5V100H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 100.391H462.891V112.109H474.609V100.391ZM462.5 100V112.5H475V100H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 100.391H475.391V112.109H487.109V100.391ZM475 100V112.5H487.5V100H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 100.391H487.891V112.109H499.609V100.391ZM487.5 100V112.5H500V100H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 100.391H500.391V112.109H512.109V100.391ZM500 100V112.5H512.5V100H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 100.391H512.891V112.109H524.609V100.391ZM512.5 100V112.5H525V100H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 100.391H525.391V112.109H537.109V100.391ZM525 100V112.5H537.5V100H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 100.391H537.891V112.109H549.609V100.391ZM537.5 100V112.5H550V100H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 100.391H550.391V112.109H562.109V100.391ZM550 100V112.5H562.5V100H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 100.391H562.891V112.109H574.609V100.391ZM562.5 100V112.5H575V100H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 100.391H575.391V112.109H587.109V100.391ZM575 100V112.5H587.5V100H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 100.391H587.891V112.109H599.609V100.391ZM587.5 100V112.5H600V100H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 112.891H0.390625V124.609H12.1094V112.891ZM0 112.5V125H12.5V112.5H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 112.891H12.8906V124.609H24.6094V112.891ZM12.5 112.5V125H25V112.5H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 112.891H25.3906V124.609H37.1094V112.891ZM25 112.5V125H37.5V112.5H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 112.891H37.8906V124.609H49.6094V112.891ZM37.5 112.5V125H50V112.5H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 112.891H50.3906V124.609H62.1094V112.891ZM50 112.5V125H62.5V112.5H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 112.891H62.8906V124.609H74.6094V112.891ZM62.5 112.5V125H75V112.5H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 112.891H75.3906V124.609H87.1094V112.891ZM75 112.5V125H87.5V112.5H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 112.891H87.8906V124.609H99.6094V112.891ZM87.5 112.5V125H100V112.5H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 112.891H100.391V124.609H112.109V112.891ZM100 112.5V125H112.5V112.5H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 112.891H112.891V124.609H124.609V112.891ZM112.5 112.5V125H125V112.5H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 112.891H125.391V124.609H137.109V112.891ZM125 112.5V125H137.5V112.5H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 112.891H137.891V124.609H149.609V112.891ZM137.5 112.5V125H150V112.5H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 112.891H150.391V124.609H162.109V112.891ZM150 112.5V125H162.5V112.5H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 112.891H162.891V124.609H174.609V112.891ZM162.5 112.5V125H175V112.5H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 112.891H175.391V124.609H187.109V112.891ZM175 112.5V125H187.5V112.5H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 112.891H187.891V124.609H199.609V112.891ZM187.5 112.5V125H200V112.5H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 112.891H200.391V124.609H212.109V112.891ZM200 112.5V125H212.5V112.5H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 112.891H212.891V124.609H224.609V112.891ZM212.5 112.5V125H225V112.5H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 112.891H225.391V124.609H237.109V112.891ZM225 112.5V125H237.5V112.5H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 112.891H237.891V124.609H249.609V112.891ZM237.5 112.5V125H250V112.5H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 112.891H250.391V124.609H262.109V112.891ZM250 112.5V125H262.5V112.5H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 112.891H262.891V124.609H274.609V112.891ZM262.5 112.5V125H275V112.5H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 112.891H275.391V124.609H287.109V112.891ZM275 112.5V125H287.5V112.5H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 112.891H287.891V124.609H299.609V112.891ZM287.5 112.5V125H300V112.5H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 112.891H300.391V124.609H312.109V112.891ZM300 112.5V125H312.5V112.5H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 112.891H312.891V124.609H324.609V112.891ZM312.5 112.5V125H325V112.5H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 112.891H325.391V124.609H337.109V112.891ZM325 112.5V125H337.5V112.5H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 112.891H337.891V124.609H349.609V112.891ZM337.5 112.5V125H350V112.5H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 112.891H350.391V124.609H362.109V112.891ZM350 112.5V125H362.5V112.5H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 112.891H362.891V124.609H374.609V112.891ZM362.5 112.5V125H375V112.5H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 112.891H375.391V124.609H387.109V112.891ZM375 112.5V125H387.5V112.5H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 112.891H387.891V124.609H399.609V112.891ZM387.5 112.5V125H400V112.5H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 112.891H400.391V124.609H412.109V112.891ZM400 112.5V125H412.5V112.5H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 112.891H412.891V124.609H424.609V112.891ZM412.5 112.5V125H425V112.5H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 112.891H425.391V124.609H437.109V112.891ZM425 112.5V125H437.5V112.5H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 112.891H437.891V124.609H449.609V112.891ZM437.5 112.5V125H450V112.5H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 112.891H450.391V124.609H462.109V112.891ZM450 112.5V125H462.5V112.5H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 112.891H462.891V124.609H474.609V112.891ZM462.5 112.5V125H475V112.5H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 112.891H475.391V124.609H487.109V112.891ZM475 112.5V125H487.5V112.5H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 112.891H487.891V124.609H499.609V112.891ZM487.5 112.5V125H500V112.5H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 112.891H500.391V124.609H512.109V112.891ZM500 112.5V125H512.5V112.5H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 112.891H512.891V124.609H524.609V112.891ZM512.5 112.5V125H525V112.5H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 112.891H525.391V124.609H537.109V112.891ZM525 112.5V125H537.5V112.5H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 112.891H537.891V124.609H549.609V112.891ZM537.5 112.5V125H550V112.5H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 112.891H550.391V124.609H562.109V112.891ZM550 112.5V125H562.5V112.5H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 112.891H562.891V124.609H574.609V112.891ZM562.5 112.5V125H575V112.5H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 112.891H575.391V124.609H587.109V112.891ZM575 112.5V125H587.5V112.5H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 112.891H587.891V124.609H599.609V112.891ZM587.5 112.5V125H600V112.5H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 125.391H0.390625V137.109H12.1094V125.391ZM0 125V137.5H12.5V125H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 125.391H12.8906V137.109H24.6094V125.391ZM12.5 125V137.5H25V125H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 125.391H25.3906V137.109H37.1094V125.391ZM25 125V137.5H37.5V125H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 125.391H37.8906V137.109H49.6094V125.391ZM37.5 125V137.5H50V125H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 125.391H50.3906V137.109H62.1094V125.391ZM50 125V137.5H62.5V125H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 125.391H62.8906V137.109H74.6094V125.391ZM62.5 125V137.5H75V125H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 125.391H75.3906V137.109H87.1094V125.391ZM75 125V137.5H87.5V125H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 125.391H87.8906V137.109H99.6094V125.391ZM87.5 125V137.5H100V125H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 125.391H100.391V137.109H112.109V125.391ZM100 125V137.5H112.5V125H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 125.391H112.891V137.109H124.609V125.391ZM112.5 125V137.5H125V125H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 125.391H125.391V137.109H137.109V125.391ZM125 125V137.5H137.5V125H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 125.391H137.891V137.109H149.609V125.391ZM137.5 125V137.5H150V125H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 125.391H150.391V137.109H162.109V125.391ZM150 125V137.5H162.5V125H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 125.391H162.891V137.109H174.609V125.391ZM162.5 125V137.5H175V125H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 125.391H175.391V137.109H187.109V125.391ZM175 125V137.5H187.5V125H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 125.391H187.891V137.109H199.609V125.391ZM187.5 125V137.5H200V125H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 125.391H200.391V137.109H212.109V125.391ZM200 125V137.5H212.5V125H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 125.391H212.891V137.109H224.609V125.391ZM212.5 125V137.5H225V125H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 125.391H225.391V137.109H237.109V125.391ZM225 125V137.5H237.5V125H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 125.391H237.891V137.109H249.609V125.391ZM237.5 125V137.5H250V125H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 125.391H250.391V137.109H262.109V125.391ZM250 125V137.5H262.5V125H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 125.391H262.891V137.109H274.609V125.391ZM262.5 125V137.5H275V125H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 125.391H275.391V137.109H287.109V125.391ZM275 125V137.5H287.5V125H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 125.391H287.891V137.109H299.609V125.391ZM287.5 125V137.5H300V125H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 125.391H300.391V137.109H312.109V125.391ZM300 125V137.5H312.5V125H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 125.391H312.891V137.109H324.609V125.391ZM312.5 125V137.5H325V125H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 125.391H325.391V137.109H337.109V125.391ZM325 125V137.5H337.5V125H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 125.391H337.891V137.109H349.609V125.391ZM337.5 125V137.5H350V125H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 125.391H350.391V137.109H362.109V125.391ZM350 125V137.5H362.5V125H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 125.391H362.891V137.109H374.609V125.391ZM362.5 125V137.5H375V125H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 125.391H375.391V137.109H387.109V125.391ZM375 125V137.5H387.5V125H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 125.391H387.891V137.109H399.609V125.391ZM387.5 125V137.5H400V125H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 125.391H400.391V137.109H412.109V125.391ZM400 125V137.5H412.5V125H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 125.391H412.891V137.109H424.609V125.391ZM412.5 125V137.5H425V125H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 125.391H425.391V137.109H437.109V125.391ZM425 125V137.5H437.5V125H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 125.391H437.891V137.109H449.609V125.391ZM437.5 125V137.5H450V125H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 125.391H450.391V137.109H462.109V125.391ZM450 125V137.5H462.5V125H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 125.391H462.891V137.109H474.609V125.391ZM462.5 125V137.5H475V125H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 125.391H475.391V137.109H487.109V125.391ZM475 125V137.5H487.5V125H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 125.391H487.891V137.109H499.609V125.391ZM487.5 125V137.5H500V125H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 125.391H500.391V137.109H512.109V125.391ZM500 125V137.5H512.5V125H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 125.391H512.891V137.109H524.609V125.391ZM512.5 125V137.5H525V125H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 125.391H525.391V137.109H537.109V125.391ZM525 125V137.5H537.5V125H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 125.391H537.891V137.109H549.609V125.391ZM537.5 125V137.5H550V125H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 125.391H550.391V137.109H562.109V125.391ZM550 125V137.5H562.5V125H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 125.391H562.891V137.109H574.609V125.391ZM562.5 125V137.5H575V125H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 125.391H575.391V137.109H587.109V125.391ZM575 125V137.5H587.5V125H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 125.391H587.891V137.109H599.609V125.391ZM587.5 125V137.5H600V125H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 137.891H0.390625V149.609H12.1094V137.891ZM0 137.5V150H12.5V137.5H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 137.891H12.8906V149.609H24.6094V137.891ZM12.5 137.5V150H25V137.5H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 137.891H25.3906V149.609H37.1094V137.891ZM25 137.5V150H37.5V137.5H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 137.891H37.8906V149.609H49.6094V137.891ZM37.5 137.5V150H50V137.5H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 137.891H50.3906V149.609H62.1094V137.891ZM50 137.5V150H62.5V137.5H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 137.891H62.8906V149.609H74.6094V137.891ZM62.5 137.5V150H75V137.5H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 137.891H75.3906V149.609H87.1094V137.891ZM75 137.5V150H87.5V137.5H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 137.891H87.8906V149.609H99.6094V137.891ZM87.5 137.5V150H100V137.5H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 137.891H100.391V149.609H112.109V137.891ZM100 137.5V150H112.5V137.5H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 137.891H112.891V149.609H124.609V137.891ZM112.5 137.5V150H125V137.5H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 137.891H125.391V149.609H137.109V137.891ZM125 137.5V150H137.5V137.5H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 137.891H137.891V149.609H149.609V137.891ZM137.5 137.5V150H150V137.5H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 137.891H150.391V149.609H162.109V137.891ZM150 137.5V150H162.5V137.5H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 137.891H162.891V149.609H174.609V137.891ZM162.5 137.5V150H175V137.5H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 137.891H175.391V149.609H187.109V137.891ZM175 137.5V150H187.5V137.5H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 137.891H187.891V149.609H199.609V137.891ZM187.5 137.5V150H200V137.5H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 137.891H200.391V149.609H212.109V137.891ZM200 137.5V150H212.5V137.5H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 137.891H212.891V149.609H224.609V137.891ZM212.5 137.5V150H225V137.5H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 137.891H225.391V149.609H237.109V137.891ZM225 137.5V150H237.5V137.5H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 137.891H237.891V149.609H249.609V137.891ZM237.5 137.5V150H250V137.5H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 137.891H250.391V149.609H262.109V137.891ZM250 137.5V150H262.5V137.5H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 137.891H262.891V149.609H274.609V137.891ZM262.5 137.5V150H275V137.5H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 137.891H275.391V149.609H287.109V137.891ZM275 137.5V150H287.5V137.5H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 137.891H287.891V149.609H299.609V137.891ZM287.5 137.5V150H300V137.5H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 137.891H300.391V149.609H312.109V137.891ZM300 137.5V150H312.5V137.5H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 137.891H312.891V149.609H324.609V137.891ZM312.5 137.5V150H325V137.5H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 137.891H325.391V149.609H337.109V137.891ZM325 137.5V150H337.5V137.5H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 137.891H337.891V149.609H349.609V137.891ZM337.5 137.5V150H350V137.5H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 137.891H350.391V149.609H362.109V137.891ZM350 137.5V150H362.5V137.5H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 137.891H362.891V149.609H374.609V137.891ZM362.5 137.5V150H375V137.5H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 137.891H375.391V149.609H387.109V137.891ZM375 137.5V150H387.5V137.5H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 137.891H387.891V149.609H399.609V137.891ZM387.5 137.5V150H400V137.5H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 137.891H400.391V149.609H412.109V137.891ZM400 137.5V150H412.5V137.5H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 137.891H412.891V149.609H424.609V137.891ZM412.5 137.5V150H425V137.5H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 137.891H425.391V149.609H437.109V137.891ZM425 137.5V150H437.5V137.5H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 137.891H437.891V149.609H449.609V137.891ZM437.5 137.5V150H450V137.5H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 137.891H450.391V149.609H462.109V137.891ZM450 137.5V150H462.5V137.5H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 137.891H462.891V149.609H474.609V137.891ZM462.5 137.5V150H475V137.5H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 137.891H475.391V149.609H487.109V137.891ZM475 137.5V150H487.5V137.5H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 137.891H487.891V149.609H499.609V137.891ZM487.5 137.5V150H500V137.5H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 137.891H500.391V149.609H512.109V137.891ZM500 137.5V150H512.5V137.5H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 137.891H512.891V149.609H524.609V137.891ZM512.5 137.5V150H525V137.5H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 137.891H525.391V149.609H537.109V137.891ZM525 137.5V150H537.5V137.5H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 137.891H537.891V149.609H549.609V137.891ZM537.5 137.5V150H550V137.5H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 137.891H550.391V149.609H562.109V137.891ZM550 137.5V150H562.5V137.5H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 137.891H562.891V149.609H574.609V137.891ZM562.5 137.5V150H575V137.5H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 137.891H575.391V149.609H587.109V137.891ZM575 137.5V150H587.5V137.5H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 137.891H587.891V149.609H599.609V137.891ZM587.5 137.5V150H600V137.5H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 150.391H0.390625V162.109H12.1094V150.391ZM0 150V162.5H12.5V150H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 150.391H12.8906V162.109H24.6094V150.391ZM12.5 150V162.5H25V150H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 150.391H25.3906V162.109H37.1094V150.391ZM25 150V162.5H37.5V150H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 150.391H37.8906V162.109H49.6094V150.391ZM37.5 150V162.5H50V150H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 150.391H50.3906V162.109H62.1094V150.391ZM50 150V162.5H62.5V150H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 150.391H62.8906V162.109H74.6094V150.391ZM62.5 150V162.5H75V150H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 150.391H75.3906V162.109H87.1094V150.391ZM75 150V162.5H87.5V150H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 150.391H87.8906V162.109H99.6094V150.391ZM87.5 150V162.5H100V150H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 150.391H100.391V162.109H112.109V150.391ZM100 150V162.5H112.5V150H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 150.391H112.891V162.109H124.609V150.391ZM112.5 150V162.5H125V150H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 150.391H125.391V162.109H137.109V150.391ZM125 150V162.5H137.5V150H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 150.391H137.891V162.109H149.609V150.391ZM137.5 150V162.5H150V150H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 150.391H150.391V162.109H162.109V150.391ZM150 150V162.5H162.5V150H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 150.391H162.891V162.109H174.609V150.391ZM162.5 150V162.5H175V150H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 150.391H175.391V162.109H187.109V150.391ZM175 150V162.5H187.5V150H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 150.391H187.891V162.109H199.609V150.391ZM187.5 150V162.5H200V150H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 150.391H200.391V162.109H212.109V150.391ZM200 150V162.5H212.5V150H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 150.391H212.891V162.109H224.609V150.391ZM212.5 150V162.5H225V150H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 150.391H225.391V162.109H237.109V150.391ZM225 150V162.5H237.5V150H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 150.391H237.891V162.109H249.609V150.391ZM237.5 150V162.5H250V150H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 150.391H250.391V162.109H262.109V150.391ZM250 150V162.5H262.5V150H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 150.391H262.891V162.109H274.609V150.391ZM262.5 150V162.5H275V150H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 150.391H275.391V162.109H287.109V150.391ZM275 150V162.5H287.5V150H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 150.391H287.891V162.109H299.609V150.391ZM287.5 150V162.5H300V150H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 150.391H300.391V162.109H312.109V150.391ZM300 150V162.5H312.5V150H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 150.391H312.891V162.109H324.609V150.391ZM312.5 150V162.5H325V150H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 150.391H325.391V162.109H337.109V150.391ZM325 150V162.5H337.5V150H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 150.391H337.891V162.109H349.609V150.391ZM337.5 150V162.5H350V150H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 150.391H350.391V162.109H362.109V150.391ZM350 150V162.5H362.5V150H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 150.391H362.891V162.109H374.609V150.391ZM362.5 150V162.5H375V150H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 150.391H375.391V162.109H387.109V150.391ZM375 150V162.5H387.5V150H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 150.391H387.891V162.109H399.609V150.391ZM387.5 150V162.5H400V150H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 150.391H400.391V162.109H412.109V150.391ZM400 150V162.5H412.5V150H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 150.391H412.891V162.109H424.609V150.391ZM412.5 150V162.5H425V150H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 150.391H425.391V162.109H437.109V150.391ZM425 150V162.5H437.5V150H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 150.391H437.891V162.109H449.609V150.391ZM437.5 150V162.5H450V150H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 150.391H450.391V162.109H462.109V150.391ZM450 150V162.5H462.5V150H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 150.391H462.891V162.109H474.609V150.391ZM462.5 150V162.5H475V150H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 150.391H475.391V162.109H487.109V150.391ZM475 150V162.5H487.5V150H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 150.391H487.891V162.109H499.609V150.391ZM487.5 150V162.5H500V150H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 150.391H500.391V162.109H512.109V150.391ZM500 150V162.5H512.5V150H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 150.391H512.891V162.109H524.609V150.391ZM512.5 150V162.5H525V150H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 150.391H525.391V162.109H537.109V150.391ZM525 150V162.5H537.5V150H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 150.391H537.891V162.109H549.609V150.391ZM537.5 150V162.5H550V150H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 150.391H550.391V162.109H562.109V150.391ZM550 150V162.5H562.5V150H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 150.391H562.891V162.109H574.609V150.391ZM562.5 150V162.5H575V150H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 150.391H575.391V162.109H587.109V150.391ZM575 150V162.5H587.5V150H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 150.391H587.891V162.109H599.609V150.391ZM587.5 150V162.5H600V150H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 162.891H0.390625V174.609H12.1094V162.891ZM0 162.5V175H12.5V162.5H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 162.891H12.8906V174.609H24.6094V162.891ZM12.5 162.5V175H25V162.5H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 162.891H25.3906V174.609H37.1094V162.891ZM25 162.5V175H37.5V162.5H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 162.891H37.8906V174.609H49.6094V162.891ZM37.5 162.5V175H50V162.5H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 162.891H50.3906V174.609H62.1094V162.891ZM50 162.5V175H62.5V162.5H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 162.891H62.8906V174.609H74.6094V162.891ZM62.5 162.5V175H75V162.5H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 162.891H75.3906V174.609H87.1094V162.891ZM75 162.5V175H87.5V162.5H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 162.891H87.8906V174.609H99.6094V162.891ZM87.5 162.5V175H100V162.5H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 162.891H100.391V174.609H112.109V162.891ZM100 162.5V175H112.5V162.5H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 162.891H112.891V174.609H124.609V162.891ZM112.5 162.5V175H125V162.5H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 162.891H125.391V174.609H137.109V162.891ZM125 162.5V175H137.5V162.5H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 162.891H137.891V174.609H149.609V162.891ZM137.5 162.5V175H150V162.5H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 162.891H150.391V174.609H162.109V162.891ZM150 162.5V175H162.5V162.5H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 162.891H162.891V174.609H174.609V162.891ZM162.5 162.5V175H175V162.5H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 162.891H175.391V174.609H187.109V162.891ZM175 162.5V175H187.5V162.5H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 162.891H187.891V174.609H199.609V162.891ZM187.5 162.5V175H200V162.5H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 162.891H200.391V174.609H212.109V162.891ZM200 162.5V175H212.5V162.5H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 162.891H212.891V174.609H224.609V162.891ZM212.5 162.5V175H225V162.5H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 162.891H225.391V174.609H237.109V162.891ZM225 162.5V175H237.5V162.5H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 162.891H237.891V174.609H249.609V162.891ZM237.5 162.5V175H250V162.5H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 162.891H250.391V174.609H262.109V162.891ZM250 162.5V175H262.5V162.5H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 162.891H262.891V174.609H274.609V162.891ZM262.5 162.5V175H275V162.5H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 162.891H275.391V174.609H287.109V162.891ZM275 162.5V175H287.5V162.5H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 162.891H287.891V174.609H299.609V162.891ZM287.5 162.5V175H300V162.5H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 162.891H300.391V174.609H312.109V162.891ZM300 162.5V175H312.5V162.5H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 162.891H312.891V174.609H324.609V162.891ZM312.5 162.5V175H325V162.5H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 162.891H325.391V174.609H337.109V162.891ZM325 162.5V175H337.5V162.5H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 162.891H337.891V174.609H349.609V162.891ZM337.5 162.5V175H350V162.5H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 162.891H350.391V174.609H362.109V162.891ZM350 162.5V175H362.5V162.5H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 162.891H362.891V174.609H374.609V162.891ZM362.5 162.5V175H375V162.5H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 162.891H375.391V174.609H387.109V162.891ZM375 162.5V175H387.5V162.5H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 162.891H387.891V174.609H399.609V162.891ZM387.5 162.5V175H400V162.5H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 162.891H400.391V174.609H412.109V162.891ZM400 162.5V175H412.5V162.5H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 162.891H412.891V174.609H424.609V162.891ZM412.5 162.5V175H425V162.5H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 162.891H425.391V174.609H437.109V162.891ZM425 162.5V175H437.5V162.5H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 162.891H437.891V174.609H449.609V162.891ZM437.5 162.5V175H450V162.5H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 162.891H450.391V174.609H462.109V162.891ZM450 162.5V175H462.5V162.5H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 162.891H462.891V174.609H474.609V162.891ZM462.5 162.5V175H475V162.5H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 162.891H475.391V174.609H487.109V162.891ZM475 162.5V175H487.5V162.5H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 162.891H487.891V174.609H499.609V162.891ZM487.5 162.5V175H500V162.5H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 162.891H500.391V174.609H512.109V162.891ZM500 162.5V175H512.5V162.5H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 162.891H512.891V174.609H524.609V162.891ZM512.5 162.5V175H525V162.5H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 162.891H525.391V174.609H537.109V162.891ZM525 162.5V175H537.5V162.5H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 162.891H537.891V174.609H549.609V162.891ZM537.5 162.5V175H550V162.5H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 162.891H550.391V174.609H562.109V162.891ZM550 162.5V175H562.5V162.5H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 162.891H562.891V174.609H574.609V162.891ZM562.5 162.5V175H575V162.5H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 162.891H575.391V174.609H587.109V162.891ZM575 162.5V175H587.5V162.5H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 162.891H587.891V174.609H599.609V162.891ZM587.5 162.5V175H600V162.5H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 175.391H0.390625V187.109H12.1094V175.391ZM0 175V187.5H12.5V175H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 175.391H12.8906V187.109H24.6094V175.391ZM12.5 175V187.5H25V175H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 175.391H25.3906V187.109H37.1094V175.391ZM25 175V187.5H37.5V175H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 175.391H37.8906V187.109H49.6094V175.391ZM37.5 175V187.5H50V175H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 175.391H50.3906V187.109H62.1094V175.391ZM50 175V187.5H62.5V175H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 175.391H62.8906V187.109H74.6094V175.391ZM62.5 175V187.5H75V175H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 175.391H75.3906V187.109H87.1094V175.391ZM75 175V187.5H87.5V175H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 175.391H87.8906V187.109H99.6094V175.391ZM87.5 175V187.5H100V175H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 175.391H100.391V187.109H112.109V175.391ZM100 175V187.5H112.5V175H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 175.391H112.891V187.109H124.609V175.391ZM112.5 175V187.5H125V175H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 175.391H125.391V187.109H137.109V175.391ZM125 175V187.5H137.5V175H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 175.391H137.891V187.109H149.609V175.391ZM137.5 175V187.5H150V175H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 175.391H150.391V187.109H162.109V175.391ZM150 175V187.5H162.5V175H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 175.391H162.891V187.109H174.609V175.391ZM162.5 175V187.5H175V175H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 175.391H175.391V187.109H187.109V175.391ZM175 175V187.5H187.5V175H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 175.391H187.891V187.109H199.609V175.391ZM187.5 175V187.5H200V175H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 175.391H200.391V187.109H212.109V175.391ZM200 175V187.5H212.5V175H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 175.391H212.891V187.109H224.609V175.391ZM212.5 175V187.5H225V175H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 175.391H225.391V187.109H237.109V175.391ZM225 175V187.5H237.5V175H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 175.391H237.891V187.109H249.609V175.391ZM237.5 175V187.5H250V175H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 175.391H250.391V187.109H262.109V175.391ZM250 175V187.5H262.5V175H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 175.391H262.891V187.109H274.609V175.391ZM262.5 175V187.5H275V175H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 175.391H275.391V187.109H287.109V175.391ZM275 175V187.5H287.5V175H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 175.391H287.891V187.109H299.609V175.391ZM287.5 175V187.5H300V175H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 175.391H300.391V187.109H312.109V175.391ZM300 175V187.5H312.5V175H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 175.391H312.891V187.109H324.609V175.391ZM312.5 175V187.5H325V175H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 175.391H325.391V187.109H337.109V175.391ZM325 175V187.5H337.5V175H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 175.391H337.891V187.109H349.609V175.391ZM337.5 175V187.5H350V175H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 175.391H350.391V187.109H362.109V175.391ZM350 175V187.5H362.5V175H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 175.391H362.891V187.109H374.609V175.391ZM362.5 175V187.5H375V175H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 175.391H375.391V187.109H387.109V175.391ZM375 175V187.5H387.5V175H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 175.391H387.891V187.109H399.609V175.391ZM387.5 175V187.5H400V175H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 175.391H400.391V187.109H412.109V175.391ZM400 175V187.5H412.5V175H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 175.391H412.891V187.109H424.609V175.391ZM412.5 175V187.5H425V175H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 175.391H425.391V187.109H437.109V175.391ZM425 175V187.5H437.5V175H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 175.391H437.891V187.109H449.609V175.391ZM437.5 175V187.5H450V175H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 175.391H450.391V187.109H462.109V175.391ZM450 175V187.5H462.5V175H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 175.391H462.891V187.109H474.609V175.391ZM462.5 175V187.5H475V175H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 175.391H475.391V187.109H487.109V175.391ZM475 175V187.5H487.5V175H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 175.391H487.891V187.109H499.609V175.391ZM487.5 175V187.5H500V175H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 175.391H500.391V187.109H512.109V175.391ZM500 175V187.5H512.5V175H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 175.391H512.891V187.109H524.609V175.391ZM512.5 175V187.5H525V175H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 175.391H525.391V187.109H537.109V175.391ZM525 175V187.5H537.5V175H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 175.391H537.891V187.109H549.609V175.391ZM537.5 175V187.5H550V175H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 175.391H550.391V187.109H562.109V175.391ZM550 175V187.5H562.5V175H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 175.391H562.891V187.109H574.609V175.391ZM562.5 175V187.5H575V175H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 175.391H575.391V187.109H587.109V175.391ZM575 175V187.5H587.5V175H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 175.391H587.891V187.109H599.609V175.391ZM587.5 175V187.5H600V175H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 187.891H0.390625V199.609H12.1094V187.891ZM0 187.5V200H12.5V187.5H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 187.891H12.8906V199.609H24.6094V187.891ZM12.5 187.5V200H25V187.5H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 187.891H25.3906V199.609H37.1094V187.891ZM25 187.5V200H37.5V187.5H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 187.891H37.8906V199.609H49.6094V187.891ZM37.5 187.5V200H50V187.5H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 187.891H50.3906V199.609H62.1094V187.891ZM50 187.5V200H62.5V187.5H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 187.891H62.8906V199.609H74.6094V187.891ZM62.5 187.5V200H75V187.5H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 187.891H75.3906V199.609H87.1094V187.891ZM75 187.5V200H87.5V187.5H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 187.891H87.8906V199.609H99.6094V187.891ZM87.5 187.5V200H100V187.5H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 187.891H100.391V199.609H112.109V187.891ZM100 187.5V200H112.5V187.5H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 187.891H112.891V199.609H124.609V187.891ZM112.5 187.5V200H125V187.5H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 187.891H125.391V199.609H137.109V187.891ZM125 187.5V200H137.5V187.5H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 187.891H137.891V199.609H149.609V187.891ZM137.5 187.5V200H150V187.5H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 187.891H150.391V199.609H162.109V187.891ZM150 187.5V200H162.5V187.5H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 187.891H162.891V199.609H174.609V187.891ZM162.5 187.5V200H175V187.5H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 187.891H175.391V199.609H187.109V187.891ZM175 187.5V200H187.5V187.5H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 187.891H187.891V199.609H199.609V187.891ZM187.5 187.5V200H200V187.5H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 187.891H200.391V199.609H212.109V187.891ZM200 187.5V200H212.5V187.5H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 187.891H212.891V199.609H224.609V187.891ZM212.5 187.5V200H225V187.5H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 187.891H225.391V199.609H237.109V187.891ZM225 187.5V200H237.5V187.5H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 187.891H237.891V199.609H249.609V187.891ZM237.5 187.5V200H250V187.5H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 187.891H250.391V199.609H262.109V187.891ZM250 187.5V200H262.5V187.5H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 187.891H262.891V199.609H274.609V187.891ZM262.5 187.5V200H275V187.5H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 187.891H275.391V199.609H287.109V187.891ZM275 187.5V200H287.5V187.5H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 187.891H287.891V199.609H299.609V187.891ZM287.5 187.5V200H300V187.5H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 187.891H300.391V199.609H312.109V187.891ZM300 187.5V200H312.5V187.5H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 187.891H312.891V199.609H324.609V187.891ZM312.5 187.5V200H325V187.5H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 187.891H325.391V199.609H337.109V187.891ZM325 187.5V200H337.5V187.5H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 187.891H337.891V199.609H349.609V187.891ZM337.5 187.5V200H350V187.5H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 187.891H350.391V199.609H362.109V187.891ZM350 187.5V200H362.5V187.5H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 187.891H362.891V199.609H374.609V187.891ZM362.5 187.5V200H375V187.5H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 187.891H375.391V199.609H387.109V187.891ZM375 187.5V200H387.5V187.5H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 187.891H387.891V199.609H399.609V187.891ZM387.5 187.5V200H400V187.5H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 187.891H400.391V199.609H412.109V187.891ZM400 187.5V200H412.5V187.5H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 187.891H412.891V199.609H424.609V187.891ZM412.5 187.5V200H425V187.5H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 187.891H425.391V199.609H437.109V187.891ZM425 187.5V200H437.5V187.5H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 187.891H437.891V199.609H449.609V187.891ZM437.5 187.5V200H450V187.5H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 187.891H450.391V199.609H462.109V187.891ZM450 187.5V200H462.5V187.5H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 187.891H462.891V199.609H474.609V187.891ZM462.5 187.5V200H475V187.5H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 187.891H475.391V199.609H487.109V187.891ZM475 187.5V200H487.5V187.5H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 187.891H487.891V199.609H499.609V187.891ZM487.5 187.5V200H500V187.5H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 187.891H500.391V199.609H512.109V187.891ZM500 187.5V200H512.5V187.5H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 187.891H512.891V199.609H524.609V187.891ZM512.5 187.5V200H525V187.5H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 187.891H525.391V199.609H537.109V187.891ZM525 187.5V200H537.5V187.5H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 187.891H537.891V199.609H549.609V187.891ZM537.5 187.5V200H550V187.5H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 187.891H550.391V199.609H562.109V187.891ZM550 187.5V200H562.5V187.5H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 187.891H562.891V199.609H574.609V187.891ZM562.5 187.5V200H575V187.5H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 187.891H575.391V199.609H587.109V187.891ZM575 187.5V200H587.5V187.5H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 187.891H587.891V199.609H599.609V187.891ZM587.5 187.5V200H600V187.5H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 200.391H0.390625V212.109H12.1094V200.391ZM0 200V212.5H12.5V200H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 200.391H12.8906V212.109H24.6094V200.391ZM12.5 200V212.5H25V200H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 200.391H25.3906V212.109H37.1094V200.391ZM25 200V212.5H37.5V200H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 200.391H37.8906V212.109H49.6094V200.391ZM37.5 200V212.5H50V200H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 200.391H50.3906V212.109H62.1094V200.391ZM50 200V212.5H62.5V200H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 200.391H62.8906V212.109H74.6094V200.391ZM62.5 200V212.5H75V200H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 200.391H75.3906V212.109H87.1094V200.391ZM75 200V212.5H87.5V200H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 200.391H87.8906V212.109H99.6094V200.391ZM87.5 200V212.5H100V200H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 200.391H100.391V212.109H112.109V200.391ZM100 200V212.5H112.5V200H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 200.391H112.891V212.109H124.609V200.391ZM112.5 200V212.5H125V200H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 200.391H125.391V212.109H137.109V200.391ZM125 200V212.5H137.5V200H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 200.391H137.891V212.109H149.609V200.391ZM137.5 200V212.5H150V200H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 200.391H150.391V212.109H162.109V200.391ZM150 200V212.5H162.5V200H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 200.391H162.891V212.109H174.609V200.391ZM162.5 200V212.5H175V200H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 200.391H175.391V212.109H187.109V200.391ZM175 200V212.5H187.5V200H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 200.391H187.891V212.109H199.609V200.391ZM187.5 200V212.5H200V200H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 200.391H200.391V212.109H212.109V200.391ZM200 200V212.5H212.5V200H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 200.391H212.891V212.109H224.609V200.391ZM212.5 200V212.5H225V200H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 200.391H225.391V212.109H237.109V200.391ZM225 200V212.5H237.5V200H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 200.391H237.891V212.109H249.609V200.391ZM237.5 200V212.5H250V200H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 200.391H250.391V212.109H262.109V200.391ZM250 200V212.5H262.5V200H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 200.391H262.891V212.109H274.609V200.391ZM262.5 200V212.5H275V200H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 200.391H275.391V212.109H287.109V200.391ZM275 200V212.5H287.5V200H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 200.391H287.891V212.109H299.609V200.391ZM287.5 200V212.5H300V200H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 200.391H300.391V212.109H312.109V200.391ZM300 200V212.5H312.5V200H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 200.391H312.891V212.109H324.609V200.391ZM312.5 200V212.5H325V200H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 200.391H325.391V212.109H337.109V200.391ZM325 200V212.5H337.5V200H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 200.391H337.891V212.109H349.609V200.391ZM337.5 200V212.5H350V200H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 200.391H350.391V212.109H362.109V200.391ZM350 200V212.5H362.5V200H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 200.391H362.891V212.109H374.609V200.391ZM362.5 200V212.5H375V200H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 200.391H375.391V212.109H387.109V200.391ZM375 200V212.5H387.5V200H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 200.391H387.891V212.109H399.609V200.391ZM387.5 200V212.5H400V200H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 200.391H400.391V212.109H412.109V200.391ZM400 200V212.5H412.5V200H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 200.391H412.891V212.109H424.609V200.391ZM412.5 200V212.5H425V200H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 200.391H425.391V212.109H437.109V200.391ZM425 200V212.5H437.5V200H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 200.391H437.891V212.109H449.609V200.391ZM437.5 200V212.5H450V200H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 200.391H450.391V212.109H462.109V200.391ZM450 200V212.5H462.5V200H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 200.391H462.891V212.109H474.609V200.391ZM462.5 200V212.5H475V200H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 200.391H475.391V212.109H487.109V200.391ZM475 200V212.5H487.5V200H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 200.391H487.891V212.109H499.609V200.391ZM487.5 200V212.5H500V200H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 200.391H500.391V212.109H512.109V200.391ZM500 200V212.5H512.5V200H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 200.391H512.891V212.109H524.609V200.391ZM512.5 200V212.5H525V200H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 200.391H525.391V212.109H537.109V200.391ZM525 200V212.5H537.5V200H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 200.391H537.891V212.109H549.609V200.391ZM537.5 200V212.5H550V200H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 200.391H550.391V212.109H562.109V200.391ZM550 200V212.5H562.5V200H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 200.391H562.891V212.109H574.609V200.391ZM562.5 200V212.5H575V200H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 200.391H575.391V212.109H587.109V200.391ZM575 200V212.5H587.5V200H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 200.391H587.891V212.109H599.609V200.391ZM587.5 200V212.5H600V200H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 212.891H0.390625V224.609H12.1094V212.891ZM0 212.5V225H12.5V212.5H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 212.891H12.8906V224.609H24.6094V212.891ZM12.5 212.5V225H25V212.5H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 212.891H25.3906V224.609H37.1094V212.891ZM25 212.5V225H37.5V212.5H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 212.891H37.8906V224.609H49.6094V212.891ZM37.5 212.5V225H50V212.5H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 212.891H50.3906V224.609H62.1094V212.891ZM50 212.5V225H62.5V212.5H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 212.891H62.8906V224.609H74.6094V212.891ZM62.5 212.5V225H75V212.5H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 212.891H75.3906V224.609H87.1094V212.891ZM75 212.5V225H87.5V212.5H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 212.891H87.8906V224.609H99.6094V212.891ZM87.5 212.5V225H100V212.5H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 212.891H100.391V224.609H112.109V212.891ZM100 212.5V225H112.5V212.5H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 212.891H112.891V224.609H124.609V212.891ZM112.5 212.5V225H125V212.5H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 212.891H125.391V224.609H137.109V212.891ZM125 212.5V225H137.5V212.5H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 212.891H137.891V224.609H149.609V212.891ZM137.5 212.5V225H150V212.5H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 212.891H150.391V224.609H162.109V212.891ZM150 212.5V225H162.5V212.5H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 212.891H162.891V224.609H174.609V212.891ZM162.5 212.5V225H175V212.5H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 212.891H175.391V224.609H187.109V212.891ZM175 212.5V225H187.5V212.5H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 212.891H187.891V224.609H199.609V212.891ZM187.5 212.5V225H200V212.5H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 212.891H200.391V224.609H212.109V212.891ZM200 212.5V225H212.5V212.5H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 212.891H212.891V224.609H224.609V212.891ZM212.5 212.5V225H225V212.5H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 212.891H225.391V224.609H237.109V212.891ZM225 212.5V225H237.5V212.5H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 212.891H237.891V224.609H249.609V212.891ZM237.5 212.5V225H250V212.5H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 212.891H250.391V224.609H262.109V212.891ZM250 212.5V225H262.5V212.5H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 212.891H262.891V224.609H274.609V212.891ZM262.5 212.5V225H275V212.5H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 212.891H275.391V224.609H287.109V212.891ZM275 212.5V225H287.5V212.5H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 212.891H287.891V224.609H299.609V212.891ZM287.5 212.5V225H300V212.5H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 212.891H300.391V224.609H312.109V212.891ZM300 212.5V225H312.5V212.5H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 212.891H312.891V224.609H324.609V212.891ZM312.5 212.5V225H325V212.5H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 212.891H325.391V224.609H337.109V212.891ZM325 212.5V225H337.5V212.5H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 212.891H337.891V224.609H349.609V212.891ZM337.5 212.5V225H350V212.5H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 212.891H350.391V224.609H362.109V212.891ZM350 212.5V225H362.5V212.5H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 212.891H362.891V224.609H374.609V212.891ZM362.5 212.5V225H375V212.5H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 212.891H375.391V224.609H387.109V212.891ZM375 212.5V225H387.5V212.5H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 212.891H387.891V224.609H399.609V212.891ZM387.5 212.5V225H400V212.5H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 212.891H400.391V224.609H412.109V212.891ZM400 212.5V225H412.5V212.5H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 212.891H412.891V224.609H424.609V212.891ZM412.5 212.5V225H425V212.5H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 212.891H425.391V224.609H437.109V212.891ZM425 212.5V225H437.5V212.5H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 212.891H437.891V224.609H449.609V212.891ZM437.5 212.5V225H450V212.5H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 212.891H450.391V224.609H462.109V212.891ZM450 212.5V225H462.5V212.5H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 212.891H462.891V224.609H474.609V212.891ZM462.5 212.5V225H475V212.5H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 212.891H475.391V224.609H487.109V212.891ZM475 212.5V225H487.5V212.5H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 212.891H487.891V224.609H499.609V212.891ZM487.5 212.5V225H500V212.5H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 212.891H500.391V224.609H512.109V212.891ZM500 212.5V225H512.5V212.5H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 212.891H512.891V224.609H524.609V212.891ZM512.5 212.5V225H525V212.5H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 212.891H525.391V224.609H537.109V212.891ZM525 212.5V225H537.5V212.5H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 212.891H537.891V224.609H549.609V212.891ZM537.5 212.5V225H550V212.5H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 212.891H550.391V224.609H562.109V212.891ZM550 212.5V225H562.5V212.5H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 212.891H562.891V224.609H574.609V212.891ZM562.5 212.5V225H575V212.5H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 212.891H575.391V224.609H587.109V212.891ZM575 212.5V225H587.5V212.5H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 212.891H587.891V224.609H599.609V212.891ZM587.5 212.5V225H600V212.5H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 225.391H0.390625V237.109H12.1094V225.391ZM0 225V237.5H12.5V225H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 225.391H12.8906V237.109H24.6094V225.391ZM12.5 225V237.5H25V225H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 225.391H25.3906V237.109H37.1094V225.391ZM25 225V237.5H37.5V225H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 225.391H37.8906V237.109H49.6094V225.391ZM37.5 225V237.5H50V225H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 225.391H50.3906V237.109H62.1094V225.391ZM50 225V237.5H62.5V225H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 225.391H62.8906V237.109H74.6094V225.391ZM62.5 225V237.5H75V225H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 225.391H75.3906V237.109H87.1094V225.391ZM75 225V237.5H87.5V225H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 225.391H87.8906V237.109H99.6094V225.391ZM87.5 225V237.5H100V225H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 225.391H100.391V237.109H112.109V225.391ZM100 225V237.5H112.5V225H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 225.391H112.891V237.109H124.609V225.391ZM112.5 225V237.5H125V225H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 225.391H125.391V237.109H137.109V225.391ZM125 225V237.5H137.5V225H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 225.391H137.891V237.109H149.609V225.391ZM137.5 225V237.5H150V225H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 225.391H150.391V237.109H162.109V225.391ZM150 225V237.5H162.5V225H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 225.391H162.891V237.109H174.609V225.391ZM162.5 225V237.5H175V225H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 225.391H175.391V237.109H187.109V225.391ZM175 225V237.5H187.5V225H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 225.391H187.891V237.109H199.609V225.391ZM187.5 225V237.5H200V225H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 225.391H200.391V237.109H212.109V225.391ZM200 225V237.5H212.5V225H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 225.391H212.891V237.109H224.609V225.391ZM212.5 225V237.5H225V225H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 225.391H225.391V237.109H237.109V225.391ZM225 225V237.5H237.5V225H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 225.391H237.891V237.109H249.609V225.391ZM237.5 225V237.5H250V225H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 225.391H250.391V237.109H262.109V225.391ZM250 225V237.5H262.5V225H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 225.391H262.891V237.109H274.609V225.391ZM262.5 225V237.5H275V225H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 225.391H275.391V237.109H287.109V225.391ZM275 225V237.5H287.5V225H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 225.391H287.891V237.109H299.609V225.391ZM287.5 225V237.5H300V225H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 225.391H300.391V237.109H312.109V225.391ZM300 225V237.5H312.5V225H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 225.391H312.891V237.109H324.609V225.391ZM312.5 225V237.5H325V225H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 225.391H325.391V237.109H337.109V225.391ZM325 225V237.5H337.5V225H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 225.391H337.891V237.109H349.609V225.391ZM337.5 225V237.5H350V225H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 225.391H350.391V237.109H362.109V225.391ZM350 225V237.5H362.5V225H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 225.391H362.891V237.109H374.609V225.391ZM362.5 225V237.5H375V225H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 225.391H375.391V237.109H387.109V225.391ZM375 225V237.5H387.5V225H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 225.391H387.891V237.109H399.609V225.391ZM387.5 225V237.5H400V225H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 225.391H400.391V237.109H412.109V225.391ZM400 225V237.5H412.5V225H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 225.391H412.891V237.109H424.609V225.391ZM412.5 225V237.5H425V225H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 225.391H425.391V237.109H437.109V225.391ZM425 225V237.5H437.5V225H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 225.391H437.891V237.109H449.609V225.391ZM437.5 225V237.5H450V225H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 225.391H450.391V237.109H462.109V225.391ZM450 225V237.5H462.5V225H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 225.391H462.891V237.109H474.609V225.391ZM462.5 225V237.5H475V225H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 225.391H475.391V237.109H487.109V225.391ZM475 225V237.5H487.5V225H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 225.391H487.891V237.109H499.609V225.391ZM487.5 225V237.5H500V225H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 225.391H500.391V237.109H512.109V225.391ZM500 225V237.5H512.5V225H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 225.391H512.891V237.109H524.609V225.391ZM512.5 225V237.5H525V225H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 225.391H525.391V237.109H537.109V225.391ZM525 225V237.5H537.5V225H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 225.391H537.891V237.109H549.609V225.391ZM537.5 225V237.5H550V225H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 225.391H550.391V237.109H562.109V225.391ZM550 225V237.5H562.5V225H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 225.391H562.891V237.109H574.609V225.391ZM562.5 225V237.5H575V225H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 225.391H575.391V237.109H587.109V225.391ZM575 225V237.5H587.5V225H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 225.391H587.891V237.109H599.609V225.391ZM587.5 225V237.5H600V225H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 237.891H0.390625V249.609H12.1094V237.891ZM0 237.5V250H12.5V237.5H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 237.891H12.8906V249.609H24.6094V237.891ZM12.5 237.5V250H25V237.5H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 237.891H25.3906V249.609H37.1094V237.891ZM25 237.5V250H37.5V237.5H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 237.891H37.8906V249.609H49.6094V237.891ZM37.5 237.5V250H50V237.5H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 237.891H50.3906V249.609H62.1094V237.891ZM50 237.5V250H62.5V237.5H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 237.891H62.8906V249.609H74.6094V237.891ZM62.5 237.5V250H75V237.5H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 237.891H75.3906V249.609H87.1094V237.891ZM75 237.5V250H87.5V237.5H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 237.891H87.8906V249.609H99.6094V237.891ZM87.5 237.5V250H100V237.5H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 237.891H100.391V249.609H112.109V237.891ZM100 237.5V250H112.5V237.5H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 237.891H112.891V249.609H124.609V237.891ZM112.5 237.5V250H125V237.5H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 237.891H125.391V249.609H137.109V237.891ZM125 237.5V250H137.5V237.5H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 237.891H137.891V249.609H149.609V237.891ZM137.5 237.5V250H150V237.5H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 237.891H150.391V249.609H162.109V237.891ZM150 237.5V250H162.5V237.5H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 237.891H162.891V249.609H174.609V237.891ZM162.5 237.5V250H175V237.5H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 237.891H175.391V249.609H187.109V237.891ZM175 237.5V250H187.5V237.5H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 237.891H187.891V249.609H199.609V237.891ZM187.5 237.5V250H200V237.5H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 237.891H200.391V249.609H212.109V237.891ZM200 237.5V250H212.5V237.5H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 237.891H212.891V249.609H224.609V237.891ZM212.5 237.5V250H225V237.5H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 237.891H225.391V249.609H237.109V237.891ZM225 237.5V250H237.5V237.5H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 237.891H237.891V249.609H249.609V237.891ZM237.5 237.5V250H250V237.5H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 237.891H250.391V249.609H262.109V237.891ZM250 237.5V250H262.5V237.5H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 237.891H262.891V249.609H274.609V237.891ZM262.5 237.5V250H275V237.5H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 237.891H275.391V249.609H287.109V237.891ZM275 237.5V250H287.5V237.5H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 237.891H287.891V249.609H299.609V237.891ZM287.5 237.5V250H300V237.5H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 237.891H300.391V249.609H312.109V237.891ZM300 237.5V250H312.5V237.5H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 237.891H312.891V249.609H324.609V237.891ZM312.5 237.5V250H325V237.5H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 237.891H325.391V249.609H337.109V237.891ZM325 237.5V250H337.5V237.5H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 237.891H337.891V249.609H349.609V237.891ZM337.5 237.5V250H350V237.5H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 237.891H350.391V249.609H362.109V237.891ZM350 237.5V250H362.5V237.5H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 237.891H362.891V249.609H374.609V237.891ZM362.5 237.5V250H375V237.5H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 237.891H375.391V249.609H387.109V237.891ZM375 237.5V250H387.5V237.5H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 237.891H387.891V249.609H399.609V237.891ZM387.5 237.5V250H400V237.5H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 237.891H400.391V249.609H412.109V237.891ZM400 237.5V250H412.5V237.5H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 237.891H412.891V249.609H424.609V237.891ZM412.5 237.5V250H425V237.5H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 237.891H425.391V249.609H437.109V237.891ZM425 237.5V250H437.5V237.5H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 237.891H437.891V249.609H449.609V237.891ZM437.5 237.5V250H450V237.5H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 237.891H450.391V249.609H462.109V237.891ZM450 237.5V250H462.5V237.5H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 237.891H462.891V249.609H474.609V237.891ZM462.5 237.5V250H475V237.5H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 237.891H475.391V249.609H487.109V237.891ZM475 237.5V250H487.5V237.5H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 237.891H487.891V249.609H499.609V237.891ZM487.5 237.5V250H500V237.5H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 237.891H500.391V249.609H512.109V237.891ZM500 237.5V250H512.5V237.5H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 237.891H512.891V249.609H524.609V237.891ZM512.5 237.5V250H525V237.5H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 237.891H525.391V249.609H537.109V237.891ZM525 237.5V250H537.5V237.5H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 237.891H537.891V249.609H549.609V237.891ZM537.5 237.5V250H550V237.5H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 237.891H550.391V249.609H562.109V237.891ZM550 237.5V250H562.5V237.5H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 237.891H562.891V249.609H574.609V237.891ZM562.5 237.5V250H575V237.5H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 237.891H575.391V249.609H587.109V237.891ZM575 237.5V250H587.5V237.5H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 237.891H587.891V249.609H599.609V237.891ZM587.5 237.5V250H600V237.5H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 250.391H0.390625V262.109H12.1094V250.391ZM0 250V262.5H12.5V250H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 250.391H12.8906V262.109H24.6094V250.391ZM12.5 250V262.5H25V250H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 250.391H25.3906V262.109H37.1094V250.391ZM25 250V262.5H37.5V250H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 250.391H37.8906V262.109H49.6094V250.391ZM37.5 250V262.5H50V250H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 250.391H50.3906V262.109H62.1094V250.391ZM50 250V262.5H62.5V250H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 250.391H62.8906V262.109H74.6094V250.391ZM62.5 250V262.5H75V250H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 250.391H75.3906V262.109H87.1094V250.391ZM75 250V262.5H87.5V250H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 250.391H87.8906V262.109H99.6094V250.391ZM87.5 250V262.5H100V250H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 250.391H100.391V262.109H112.109V250.391ZM100 250V262.5H112.5V250H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 250.391H112.891V262.109H124.609V250.391ZM112.5 250V262.5H125V250H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 250.391H125.391V262.109H137.109V250.391ZM125 250V262.5H137.5V250H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 250.391H137.891V262.109H149.609V250.391ZM137.5 250V262.5H150V250H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 250.391H150.391V262.109H162.109V250.391ZM150 250V262.5H162.5V250H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 250.391H162.891V262.109H174.609V250.391ZM162.5 250V262.5H175V250H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 250.391H175.391V262.109H187.109V250.391ZM175 250V262.5H187.5V250H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 250.391H187.891V262.109H199.609V250.391ZM187.5 250V262.5H200V250H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 250.391H200.391V262.109H212.109V250.391ZM200 250V262.5H212.5V250H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 250.391H212.891V262.109H224.609V250.391ZM212.5 250V262.5H225V250H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 250.391H225.391V262.109H237.109V250.391ZM225 250V262.5H237.5V250H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 250.391H237.891V262.109H249.609V250.391ZM237.5 250V262.5H250V250H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 250.391H250.391V262.109H262.109V250.391ZM250 250V262.5H262.5V250H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 250.391H262.891V262.109H274.609V250.391ZM262.5 250V262.5H275V250H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 250.391H275.391V262.109H287.109V250.391ZM275 250V262.5H287.5V250H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 250.391H287.891V262.109H299.609V250.391ZM287.5 250V262.5H300V250H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 250.391H300.391V262.109H312.109V250.391ZM300 250V262.5H312.5V250H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 250.391H312.891V262.109H324.609V250.391ZM312.5 250V262.5H325V250H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 250.391H325.391V262.109H337.109V250.391ZM325 250V262.5H337.5V250H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 250.391H337.891V262.109H349.609V250.391ZM337.5 250V262.5H350V250H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 250.391H350.391V262.109H362.109V250.391ZM350 250V262.5H362.5V250H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 250.391H362.891V262.109H374.609V250.391ZM362.5 250V262.5H375V250H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 250.391H375.391V262.109H387.109V250.391ZM375 250V262.5H387.5V250H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 250.391H387.891V262.109H399.609V250.391ZM387.5 250V262.5H400V250H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 250.391H400.391V262.109H412.109V250.391ZM400 250V262.5H412.5V250H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 250.391H412.891V262.109H424.609V250.391ZM412.5 250V262.5H425V250H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 250.391H425.391V262.109H437.109V250.391ZM425 250V262.5H437.5V250H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 250.391H437.891V262.109H449.609V250.391ZM437.5 250V262.5H450V250H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 250.391H450.391V262.109H462.109V250.391ZM450 250V262.5H462.5V250H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 250.391H462.891V262.109H474.609V250.391ZM462.5 250V262.5H475V250H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 250.391H475.391V262.109H487.109V250.391ZM475 250V262.5H487.5V250H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 250.391H487.891V262.109H499.609V250.391ZM487.5 250V262.5H500V250H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 250.391H500.391V262.109H512.109V250.391ZM500 250V262.5H512.5V250H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 250.391H512.891V262.109H524.609V250.391ZM512.5 250V262.5H525V250H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 250.391H525.391V262.109H537.109V250.391ZM525 250V262.5H537.5V250H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 250.391H537.891V262.109H549.609V250.391ZM537.5 250V262.5H550V250H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 250.391H550.391V262.109H562.109V250.391ZM550 250V262.5H562.5V250H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 250.391H562.891V262.109H574.609V250.391ZM562.5 250V262.5H575V250H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 250.391H575.391V262.109H587.109V250.391ZM575 250V262.5H587.5V250H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 250.391H587.891V262.109H599.609V250.391ZM587.5 250V262.5H600V250H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 262.891H0.390625V274.609H12.1094V262.891ZM0 262.5V275H12.5V262.5H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 262.891H12.8906V274.609H24.6094V262.891ZM12.5 262.5V275H25V262.5H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 262.891H25.3906V274.609H37.1094V262.891ZM25 262.5V275H37.5V262.5H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 262.891H37.8906V274.609H49.6094V262.891ZM37.5 262.5V275H50V262.5H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 262.891H50.3906V274.609H62.1094V262.891ZM50 262.5V275H62.5V262.5H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 262.891H62.8906V274.609H74.6094V262.891ZM62.5 262.5V275H75V262.5H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 262.891H75.3906V274.609H87.1094V262.891ZM75 262.5V275H87.5V262.5H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 262.891H87.8906V274.609H99.6094V262.891ZM87.5 262.5V275H100V262.5H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 262.891H100.391V274.609H112.109V262.891ZM100 262.5V275H112.5V262.5H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 262.891H112.891V274.609H124.609V262.891ZM112.5 262.5V275H125V262.5H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 262.891H125.391V274.609H137.109V262.891ZM125 262.5V275H137.5V262.5H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 262.891H137.891V274.609H149.609V262.891ZM137.5 262.5V275H150V262.5H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 262.891H150.391V274.609H162.109V262.891ZM150 262.5V275H162.5V262.5H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 262.891H162.891V274.609H174.609V262.891ZM162.5 262.5V275H175V262.5H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 262.891H175.391V274.609H187.109V262.891ZM175 262.5V275H187.5V262.5H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 262.891H187.891V274.609H199.609V262.891ZM187.5 262.5V275H200V262.5H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 262.891H200.391V274.609H212.109V262.891ZM200 262.5V275H212.5V262.5H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 262.891H212.891V274.609H224.609V262.891ZM212.5 262.5V275H225V262.5H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 262.891H225.391V274.609H237.109V262.891ZM225 262.5V275H237.5V262.5H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 262.891H237.891V274.609H249.609V262.891ZM237.5 262.5V275H250V262.5H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 262.891H250.391V274.609H262.109V262.891ZM250 262.5V275H262.5V262.5H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 262.891H262.891V274.609H274.609V262.891ZM262.5 262.5V275H275V262.5H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 262.891H275.391V274.609H287.109V262.891ZM275 262.5V275H287.5V262.5H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 262.891H287.891V274.609H299.609V262.891ZM287.5 262.5V275H300V262.5H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 262.891H300.391V274.609H312.109V262.891ZM300 262.5V275H312.5V262.5H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 262.891H312.891V274.609H324.609V262.891ZM312.5 262.5V275H325V262.5H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 262.891H325.391V274.609H337.109V262.891ZM325 262.5V275H337.5V262.5H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 262.891H337.891V274.609H349.609V262.891ZM337.5 262.5V275H350V262.5H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 262.891H350.391V274.609H362.109V262.891ZM350 262.5V275H362.5V262.5H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 262.891H362.891V274.609H374.609V262.891ZM362.5 262.5V275H375V262.5H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 262.891H375.391V274.609H387.109V262.891ZM375 262.5V275H387.5V262.5H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 262.891H387.891V274.609H399.609V262.891ZM387.5 262.5V275H400V262.5H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 262.891H400.391V274.609H412.109V262.891ZM400 262.5V275H412.5V262.5H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 262.891H412.891V274.609H424.609V262.891ZM412.5 262.5V275H425V262.5H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 262.891H425.391V274.609H437.109V262.891ZM425 262.5V275H437.5V262.5H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 262.891H437.891V274.609H449.609V262.891ZM437.5 262.5V275H450V262.5H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 262.891H450.391V274.609H462.109V262.891ZM450 262.5V275H462.5V262.5H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 262.891H462.891V274.609H474.609V262.891ZM462.5 262.5V275H475V262.5H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 262.891H475.391V274.609H487.109V262.891ZM475 262.5V275H487.5V262.5H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 262.891H487.891V274.609H499.609V262.891ZM487.5 262.5V275H500V262.5H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 262.891H500.391V274.609H512.109V262.891ZM500 262.5V275H512.5V262.5H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 262.891H512.891V274.609H524.609V262.891ZM512.5 262.5V275H525V262.5H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 262.891H525.391V274.609H537.109V262.891ZM525 262.5V275H537.5V262.5H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 262.891H537.891V274.609H549.609V262.891ZM537.5 262.5V275H550V262.5H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 262.891H550.391V274.609H562.109V262.891ZM550 262.5V275H562.5V262.5H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 262.891H562.891V274.609H574.609V262.891ZM562.5 262.5V275H575V262.5H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 262.891H575.391V274.609H587.109V262.891ZM575 262.5V275H587.5V262.5H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 262.891H587.891V274.609H599.609V262.891ZM587.5 262.5V275H600V262.5H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 275.391H0.390625V287.109H12.1094V275.391ZM0 275V287.5H12.5V275H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 275.391H12.8906V287.109H24.6094V275.391ZM12.5 275V287.5H25V275H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 275.391H25.3906V287.109H37.1094V275.391ZM25 275V287.5H37.5V275H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 275.391H37.8906V287.109H49.6094V275.391ZM37.5 275V287.5H50V275H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 275.391H50.3906V287.109H62.1094V275.391ZM50 275V287.5H62.5V275H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 275.391H62.8906V287.109H74.6094V275.391ZM62.5 275V287.5H75V275H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 275.391H75.3906V287.109H87.1094V275.391ZM75 275V287.5H87.5V275H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 275.391H87.8906V287.109H99.6094V275.391ZM87.5 275V287.5H100V275H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 275.391H100.391V287.109H112.109V275.391ZM100 275V287.5H112.5V275H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 275.391H112.891V287.109H124.609V275.391ZM112.5 275V287.5H125V275H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 275.391H125.391V287.109H137.109V275.391ZM125 275V287.5H137.5V275H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 275.391H137.891V287.109H149.609V275.391ZM137.5 275V287.5H150V275H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 275.391H150.391V287.109H162.109V275.391ZM150 275V287.5H162.5V275H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 275.391H162.891V287.109H174.609V275.391ZM162.5 275V287.5H175V275H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 275.391H175.391V287.109H187.109V275.391ZM175 275V287.5H187.5V275H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 275.391H187.891V287.109H199.609V275.391ZM187.5 275V287.5H200V275H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 275.391H200.391V287.109H212.109V275.391ZM200 275V287.5H212.5V275H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 275.391H212.891V287.109H224.609V275.391ZM212.5 275V287.5H225V275H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 275.391H225.391V287.109H237.109V275.391ZM225 275V287.5H237.5V275H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 275.391H237.891V287.109H249.609V275.391ZM237.5 275V287.5H250V275H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 275.391H250.391V287.109H262.109V275.391ZM250 275V287.5H262.5V275H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 275.391H262.891V287.109H274.609V275.391ZM262.5 275V287.5H275V275H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 275.391H275.391V287.109H287.109V275.391ZM275 275V287.5H287.5V275H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 275.391H287.891V287.109H299.609V275.391ZM287.5 275V287.5H300V275H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 275.391H300.391V287.109H312.109V275.391ZM300 275V287.5H312.5V275H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 275.391H312.891V287.109H324.609V275.391ZM312.5 275V287.5H325V275H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 275.391H325.391V287.109H337.109V275.391ZM325 275V287.5H337.5V275H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 275.391H337.891V287.109H349.609V275.391ZM337.5 275V287.5H350V275H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 275.391H350.391V287.109H362.109V275.391ZM350 275V287.5H362.5V275H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 275.391H362.891V287.109H374.609V275.391ZM362.5 275V287.5H375V275H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 275.391H375.391V287.109H387.109V275.391ZM375 275V287.5H387.5V275H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 275.391H387.891V287.109H399.609V275.391ZM387.5 275V287.5H400V275H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 275.391H400.391V287.109H412.109V275.391ZM400 275V287.5H412.5V275H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 275.391H412.891V287.109H424.609V275.391ZM412.5 275V287.5H425V275H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 275.391H425.391V287.109H437.109V275.391ZM425 275V287.5H437.5V275H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 275.391H437.891V287.109H449.609V275.391ZM437.5 275V287.5H450V275H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 275.391H450.391V287.109H462.109V275.391ZM450 275V287.5H462.5V275H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 275.391H462.891V287.109H474.609V275.391ZM462.5 275V287.5H475V275H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 275.391H475.391V287.109H487.109V275.391ZM475 275V287.5H487.5V275H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 275.391H487.891V287.109H499.609V275.391ZM487.5 275V287.5H500V275H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 275.391H500.391V287.109H512.109V275.391ZM500 275V287.5H512.5V275H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 275.391H512.891V287.109H524.609V275.391ZM512.5 275V287.5H525V275H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 275.391H525.391V287.109H537.109V275.391ZM525 275V287.5H537.5V275H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 275.391H537.891V287.109H549.609V275.391ZM537.5 275V287.5H550V275H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 275.391H550.391V287.109H562.109V275.391ZM550 275V287.5H562.5V275H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 275.391H562.891V287.109H574.609V275.391ZM562.5 275V287.5H575V275H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 275.391H575.391V287.109H587.109V275.391ZM575 275V287.5H587.5V275H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 275.391H587.891V287.109H599.609V275.391ZM587.5 275V287.5H600V275H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 287.891H0.390625V299.609H12.1094V287.891ZM0 287.5V300H12.5V287.5H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 287.891H12.8906V299.609H24.6094V287.891ZM12.5 287.5V300H25V287.5H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 287.891H25.3906V299.609H37.1094V287.891ZM25 287.5V300H37.5V287.5H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 287.891H37.8906V299.609H49.6094V287.891ZM37.5 287.5V300H50V287.5H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 287.891H50.3906V299.609H62.1094V287.891ZM50 287.5V300H62.5V287.5H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 287.891H62.8906V299.609H74.6094V287.891ZM62.5 287.5V300H75V287.5H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 287.891H75.3906V299.609H87.1094V287.891ZM75 287.5V300H87.5V287.5H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 287.891H87.8906V299.609H99.6094V287.891ZM87.5 287.5V300H100V287.5H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 287.891H100.391V299.609H112.109V287.891ZM100 287.5V300H112.5V287.5H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 287.891H112.891V299.609H124.609V287.891ZM112.5 287.5V300H125V287.5H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 287.891H125.391V299.609H137.109V287.891ZM125 287.5V300H137.5V287.5H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 287.891H137.891V299.609H149.609V287.891ZM137.5 287.5V300H150V287.5H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 287.891H150.391V299.609H162.109V287.891ZM150 287.5V300H162.5V287.5H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 287.891H162.891V299.609H174.609V287.891ZM162.5 287.5V300H175V287.5H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 287.891H175.391V299.609H187.109V287.891ZM175 287.5V300H187.5V287.5H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 287.891H187.891V299.609H199.609V287.891ZM187.5 287.5V300H200V287.5H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 287.891H200.391V299.609H212.109V287.891ZM200 287.5V300H212.5V287.5H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 287.891H212.891V299.609H224.609V287.891ZM212.5 287.5V300H225V287.5H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 287.891H225.391V299.609H237.109V287.891ZM225 287.5V300H237.5V287.5H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 287.891H237.891V299.609H249.609V287.891ZM237.5 287.5V300H250V287.5H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 287.891H250.391V299.609H262.109V287.891ZM250 287.5V300H262.5V287.5H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 287.891H262.891V299.609H274.609V287.891ZM262.5 287.5V300H275V287.5H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 287.891H275.391V299.609H287.109V287.891ZM275 287.5V300H287.5V287.5H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 287.891H287.891V299.609H299.609V287.891ZM287.5 287.5V300H300V287.5H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 287.891H300.391V299.609H312.109V287.891ZM300 287.5V300H312.5V287.5H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 287.891H312.891V299.609H324.609V287.891ZM312.5 287.5V300H325V287.5H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 287.891H325.391V299.609H337.109V287.891ZM325 287.5V300H337.5V287.5H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 287.891H337.891V299.609H349.609V287.891ZM337.5 287.5V300H350V287.5H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 287.891H350.391V299.609H362.109V287.891ZM350 287.5V300H362.5V287.5H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 287.891H362.891V299.609H374.609V287.891ZM362.5 287.5V300H375V287.5H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 287.891H375.391V299.609H387.109V287.891ZM375 287.5V300H387.5V287.5H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 287.891H387.891V299.609H399.609V287.891ZM387.5 287.5V300H400V287.5H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 287.891H400.391V299.609H412.109V287.891ZM400 287.5V300H412.5V287.5H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 287.891H412.891V299.609H424.609V287.891ZM412.5 287.5V300H425V287.5H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 287.891H425.391V299.609H437.109V287.891ZM425 287.5V300H437.5V287.5H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 287.891H437.891V299.609H449.609V287.891ZM437.5 287.5V300H450V287.5H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 287.891H450.391V299.609H462.109V287.891ZM450 287.5V300H462.5V287.5H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 287.891H462.891V299.609H474.609V287.891ZM462.5 287.5V300H475V287.5H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 287.891H475.391V299.609H487.109V287.891ZM475 287.5V300H487.5V287.5H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 287.891H487.891V299.609H499.609V287.891ZM487.5 287.5V300H500V287.5H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 287.891H500.391V299.609H512.109V287.891ZM500 287.5V300H512.5V287.5H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 287.891H512.891V299.609H524.609V287.891ZM512.5 287.5V300H525V287.5H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 287.891H525.391V299.609H537.109V287.891ZM525 287.5V300H537.5V287.5H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 287.891H537.891V299.609H549.609V287.891ZM537.5 287.5V300H550V287.5H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 287.891H550.391V299.609H562.109V287.891ZM550 287.5V300H562.5V287.5H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 287.891H562.891V299.609H574.609V287.891ZM562.5 287.5V300H575V287.5H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 287.891H575.391V299.609H587.109V287.891ZM575 287.5V300H587.5V287.5H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 287.891H587.891V299.609H599.609V287.891ZM587.5 287.5V300H600V287.5H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 300.391H0.390625V312.109H12.1094V300.391ZM0 300V312.5H12.5V300H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 300.391H12.8906V312.109H24.6094V300.391ZM12.5 300V312.5H25V300H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 300.391H25.3906V312.109H37.1094V300.391ZM25 300V312.5H37.5V300H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 300.391H37.8906V312.109H49.6094V300.391ZM37.5 300V312.5H50V300H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 300.391H50.3906V312.109H62.1094V300.391ZM50 300V312.5H62.5V300H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 300.391H62.8906V312.109H74.6094V300.391ZM62.5 300V312.5H75V300H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 300.391H75.3906V312.109H87.1094V300.391ZM75 300V312.5H87.5V300H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 300.391H87.8906V312.109H99.6094V300.391ZM87.5 300V312.5H100V300H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 300.391H100.391V312.109H112.109V300.391ZM100 300V312.5H112.5V300H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 300.391H112.891V312.109H124.609V300.391ZM112.5 300V312.5H125V300H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 300.391H125.391V312.109H137.109V300.391ZM125 300V312.5H137.5V300H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 300.391H137.891V312.109H149.609V300.391ZM137.5 300V312.5H150V300H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 300.391H150.391V312.109H162.109V300.391ZM150 300V312.5H162.5V300H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 300.391H162.891V312.109H174.609V300.391ZM162.5 300V312.5H175V300H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 300.391H175.391V312.109H187.109V300.391ZM175 300V312.5H187.5V300H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 300.391H187.891V312.109H199.609V300.391ZM187.5 300V312.5H200V300H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 300.391H200.391V312.109H212.109V300.391ZM200 300V312.5H212.5V300H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 300.391H212.891V312.109H224.609V300.391ZM212.5 300V312.5H225V300H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 300.391H225.391V312.109H237.109V300.391ZM225 300V312.5H237.5V300H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 300.391H237.891V312.109H249.609V300.391ZM237.5 300V312.5H250V300H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 300.391H250.391V312.109H262.109V300.391ZM250 300V312.5H262.5V300H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 300.391H262.891V312.109H274.609V300.391ZM262.5 300V312.5H275V300H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 300.391H275.391V312.109H287.109V300.391ZM275 300V312.5H287.5V300H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 300.391H287.891V312.109H299.609V300.391ZM287.5 300V312.5H300V300H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 300.391H300.391V312.109H312.109V300.391ZM300 300V312.5H312.5V300H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 300.391H312.891V312.109H324.609V300.391ZM312.5 300V312.5H325V300H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 300.391H325.391V312.109H337.109V300.391ZM325 300V312.5H337.5V300H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 300.391H337.891V312.109H349.609V300.391ZM337.5 300V312.5H350V300H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 300.391H350.391V312.109H362.109V300.391ZM350 300V312.5H362.5V300H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 300.391H362.891V312.109H374.609V300.391ZM362.5 300V312.5H375V300H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 300.391H375.391V312.109H387.109V300.391ZM375 300V312.5H387.5V300H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 300.391H387.891V312.109H399.609V300.391ZM387.5 300V312.5H400V300H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 300.391H400.391V312.109H412.109V300.391ZM400 300V312.5H412.5V300H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 300.391H412.891V312.109H424.609V300.391ZM412.5 300V312.5H425V300H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 300.391H425.391V312.109H437.109V300.391ZM425 300V312.5H437.5V300H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 300.391H437.891V312.109H449.609V300.391ZM437.5 300V312.5H450V300H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 300.391H450.391V312.109H462.109V300.391ZM450 300V312.5H462.5V300H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 300.391H462.891V312.109H474.609V300.391ZM462.5 300V312.5H475V300H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 300.391H475.391V312.109H487.109V300.391ZM475 300V312.5H487.5V300H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 300.391H487.891V312.109H499.609V300.391ZM487.5 300V312.5H500V300H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 300.391H500.391V312.109H512.109V300.391ZM500 300V312.5H512.5V300H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 300.391H512.891V312.109H524.609V300.391ZM512.5 300V312.5H525V300H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 300.391H525.391V312.109H537.109V300.391ZM525 300V312.5H537.5V300H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 300.391H537.891V312.109H549.609V300.391ZM537.5 300V312.5H550V300H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 300.391H550.391V312.109H562.109V300.391ZM550 300V312.5H562.5V300H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 300.391H562.891V312.109H574.609V300.391ZM562.5 300V312.5H575V300H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 300.391H575.391V312.109H587.109V300.391ZM575 300V312.5H587.5V300H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 300.391H587.891V312.109H599.609V300.391ZM587.5 300V312.5H600V300H587.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M12.1094 312.891H0.390625V324.609H12.1094V312.891ZM0 312.5V325H12.5V312.5H0Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M24.6094 312.891H12.8906V324.609H24.6094V312.891ZM12.5 312.5V325H25V312.5H12.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M37.1094 312.891H25.3906V324.609H37.1094V312.891ZM25 312.5V325H37.5V312.5H25Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M49.6094 312.891H37.8906V324.609H49.6094V312.891ZM37.5 312.5V325H50V312.5H37.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M62.1094 312.891H50.3906V324.609H62.1094V312.891ZM50 312.5V325H62.5V312.5H50Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M74.6094 312.891H62.8906V324.609H74.6094V312.891ZM62.5 312.5V325H75V312.5H62.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M87.1094 312.891H75.3906V324.609H87.1094V312.891ZM75 312.5V325H87.5V312.5H75Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M99.6094 312.891H87.8906V324.609H99.6094V312.891ZM87.5 312.5V325H100V312.5H87.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M112.109 312.891H100.391V324.609H112.109V312.891ZM100 312.5V325H112.5V312.5H100Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M124.609 312.891H112.891V324.609H124.609V312.891ZM112.5 312.5V325H125V312.5H112.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M137.109 312.891H125.391V324.609H137.109V312.891ZM125 312.5V325H137.5V312.5H125Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M149.609 312.891H137.891V324.609H149.609V312.891ZM137.5 312.5V325H150V312.5H137.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M162.109 312.891H150.391V324.609H162.109V312.891ZM150 312.5V325H162.5V312.5H150Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M174.609 312.891H162.891V324.609H174.609V312.891ZM162.5 312.5V325H175V312.5H162.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M187.109 312.891H175.391V324.609H187.109V312.891ZM175 312.5V325H187.5V312.5H175Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M199.609 312.891H187.891V324.609H199.609V312.891ZM187.5 312.5V325H200V312.5H187.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M212.109 312.891H200.391V324.609H212.109V312.891ZM200 312.5V325H212.5V312.5H200Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M224.609 312.891H212.891V324.609H224.609V312.891ZM212.5 312.5V325H225V312.5H212.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M237.109 312.891H225.391V324.609H237.109V312.891ZM225 312.5V325H237.5V312.5H225Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M249.609 312.891H237.891V324.609H249.609V312.891ZM237.5 312.5V325H250V312.5H237.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M262.109 312.891H250.391V324.609H262.109V312.891ZM250 312.5V325H262.5V312.5H250Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M274.609 312.891H262.891V324.609H274.609V312.891ZM262.5 312.5V325H275V312.5H262.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M287.109 312.891H275.391V324.609H287.109V312.891ZM275 312.5V325H287.5V312.5H275Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M299.609 312.891H287.891V324.609H299.609V312.891ZM287.5 312.5V325H300V312.5H287.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M312.109 312.891H300.391V324.609H312.109V312.891ZM300 312.5V325H312.5V312.5H300Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M324.609 312.891H312.891V324.609H324.609V312.891ZM312.5 312.5V325H325V312.5H312.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M337.109 312.891H325.391V324.609H337.109V312.891ZM325 312.5V325H337.5V312.5H325Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M349.609 312.891H337.891V324.609H349.609V312.891ZM337.5 312.5V325H350V312.5H337.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M362.109 312.891H350.391V324.609H362.109V312.891ZM350 312.5V325H362.5V312.5H350Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M374.609 312.891H362.891V324.609H374.609V312.891ZM362.5 312.5V325H375V312.5H362.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M387.109 312.891H375.391V324.609H387.109V312.891ZM375 312.5V325H387.5V312.5H375Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M399.609 312.891H387.891V324.609H399.609V312.891ZM387.5 312.5V325H400V312.5H387.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M412.109 312.891H400.391V324.609H412.109V312.891ZM400 312.5V325H412.5V312.5H400Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M424.609 312.891H412.891V324.609H424.609V312.891ZM412.5 312.5V325H425V312.5H412.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M437.109 312.891H425.391V324.609H437.109V312.891ZM425 312.5V325H437.5V312.5H425Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M449.609 312.891H437.891V324.609H449.609V312.891ZM437.5 312.5V325H450V312.5H437.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M462.109 312.891H450.391V324.609H462.109V312.891ZM450 312.5V325H462.5V312.5H450Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M474.609 312.891H462.891V324.609H474.609V312.891ZM462.5 312.5V325H475V312.5H462.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M487.109 312.891H475.391V324.609H487.109V312.891ZM475 312.5V325H487.5V312.5H475Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M499.609 312.891H487.891V324.609H499.609V312.891ZM487.5 312.5V325H500V312.5H487.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M512.109 312.891H500.391V324.609H512.109V312.891ZM500 312.5V325H512.5V312.5H500Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M524.609 312.891H512.891V324.609H524.609V312.891ZM512.5 312.5V325H525V312.5H512.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M537.109 312.891H525.391V324.609H537.109V312.891ZM525 312.5V325H537.5V312.5H525Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M549.609 312.891H537.891V324.609H549.609V312.891ZM537.5 312.5V325H550V312.5H537.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M562.109 312.891H550.391V324.609H562.109V312.891ZM550 312.5V325H562.5V312.5H550Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M574.609 312.891H562.891V324.609H574.609V312.891ZM562.5 312.5V325H575V312.5H562.5Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M587.109 312.891H575.391V324.609H587.109V312.891ZM575 312.5V325H587.5V312.5H575Z" fill="black"/>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M599.609 312.891H587.891V324.609H599.609V312.891ZM587.5 312.5V325H600V312.5H587.5Z" fill="black"/>
+</g>
+<defs>
+<clipPath id="clip0_2906_6463">
+<rect width="515" height="126" fill="white"/>
+</clipPath>
+</defs>
+</svg>
@@ -0,0 +1 @@
@@ -0,0 +1,46 @@
+<svg width="257" height="47" viewBox="0 0 257 47" fill="none" xmlns="http://www.w3.org/2000/svg">
+<g clip-path="url(#clip0_100_26)">
+<path d="M119.922 24.4481L109.212 5.8996C107.081 2.20815 103.26 0 98.9973 0C94.7394 0 90.9279 2.19855 88.7918 5.8804L66.6239 44.2734C66.3646 44.7198 66.3646 45.2671 66.6239 45.7135C66.8831 46.1599 67.3583 46.4336 67.8719 46.4336H73.7715C74.2852 46.4336 74.7604 46.1599 75.0196 45.7135L76.9302 42.4013L77.5158 41.4652C77.5158 41.4652 77.535 41.4364 77.5398 41.422L84.6923 29.0324C84.7211 28.9844 84.7499 28.9316 84.7691 28.874C84.8363 28.7203 84.9035 28.5763 84.9851 28.4419L95.6946 9.89347C96.3811 8.70299 97.6148 7.98774 98.9925 7.98774C100.37 7.98774 101.604 8.69819 102.29 9.89347L113 28.4419C113.686 29.6324 113.691 31.0581 113 32.2486C112.313 33.4391 111.08 34.1543 109.702 34.1543H102.232C101.718 34.1543 101.243 34.4279 100.984 34.8744L98.0318 39.9819C97.7726 40.4283 97.7726 40.9756 98.0318 41.422C98.291 41.8684 98.7662 42.1421 99.2799 42.1421H109.404C113.965 42.1421 118.079 39.7275 120.133 35.844C122.039 32.2438 121.957 27.9859 119.912 24.4433L119.922 24.4481Z" fill="white"/>
+<path d="M82.8564 34.1538H104.17L103.872 42.1416H79.9042C79.3905 42.1416 78.9153 41.8679 78.6561 41.4215C78.3969 40.9751 78.3969 40.4278 78.6561 39.9814L81.6083 34.8739C81.8675 34.4274 82.3427 34.1538 82.8564 34.1538Z" fill="url(#paint0_linear_100_26)"/>
+<path d="M43.7123 25.1535C43.7123 24.9039 43.6451 24.659 43.5155 24.443L32.8972 6.14897C30.7131 2.36152 26.7288 -0.000244141 22.5093 -0.000244141C22.3701 -0.000244141 22.2309 -0.000244141 22.0869 0.00935651C18.0162 0.158167 14.368 2.36152 12.323 5.89936L1.61829 24.4478C-1.31951 29.5314 -0.181833 35.6374 4.44568 39.6361C6.31301 41.2538 8.78518 42.1418 11.4062 42.1418H31.3851C31.8987 42.1418 32.374 41.8682 32.6332 41.4218L35.5806 36.3142C35.8398 35.8678 35.8398 35.3206 35.5806 34.8741C35.3214 34.4277 34.8461 34.1541 34.3325 34.1541H11.8334C10.4557 34.1541 9.22201 33.4436 8.53556 32.2532C7.84431 31.0627 7.84431 29.6418 8.53556 28.4465L19.2451 9.89803C19.9315 8.70755 21.1652 7.9923 22.5429 7.9923C23.9206 7.9923 25.1495 8.70275 25.8407 9.89803L41.1346 36.4198C41.461 36.9863 42.1282 37.2599 42.7619 37.0919C43.3955 36.9191 43.8276 36.343 43.8228 35.6902L43.7171 25.1583L43.7123 25.1535Z" fill="url(#paint1_linear_100_26)"/>
+<path d="M4.44544 39.6362C-0.182077 35.6376 -1.31975 29.5315 1.61805 24.448L8.53532 28.4467C7.84407 29.6419 7.84407 31.0628 8.53532 32.2533C9.22176 33.4438 10.4554 34.1543 11.8331 34.1543H34.3323C34.8459 34.1543 35.3211 34.4279 35.5804 34.8743C35.8396 35.3207 35.8396 35.868 35.5804 36.3144L32.633 41.422C32.3737 41.8684 31.8985 42.142 31.3849 42.142H11.4059C8.78493 42.142 6.31276 41.2539 4.44544 39.6362Z" fill="white"/>
+<path d="M74.6308 34.8696C74.3716 34.4231 73.8964 34.1495 73.3827 34.1495H51.2532C49.8755 34.1495 48.6418 33.4391 47.9554 32.2486C47.2642 31.0581 47.2642 29.6372 47.9554 28.4419L58.6649 9.89347C59.3514 8.70299 60.5851 7.98774 61.9628 7.98774C63.3404 7.98774 64.5693 8.69819 65.2606 9.89347L65.899 10.9975C66.1582 11.444 66.6335 11.7176 67.1471 11.7176C67.6607 11.7176 68.1408 11.4392 68.3952 10.9927L71.2322 6.01961C71.5298 5.49637 71.4722 4.84353 71.0882 4.3827C68.7648 1.59851 65.4238 0 61.9291 0C61.7899 0 61.6507 0 61.5067 0.00960065C57.436 0.158411 53.7878 2.36176 51.7429 5.8996L41.0333 24.4481C38.0955 29.5316 39.2332 35.6376 43.8607 39.6363C45.728 41.254 48.2002 42.1421 50.8212 42.1421H70.4257C70.9394 42.1421 71.4146 41.8684 71.6738 41.422L74.626 36.3145C74.8852 35.868 74.8852 35.3208 74.626 34.8744L74.6308 34.8696Z" fill="url(#paint2_linear_100_26)"/>
+</g>
@@ -41,7 +41,7 @@
"shift-f11": "debugger::StepOut",
"f11": "zed::ToggleFullScreen",
"ctrl-alt-z": "edit_prediction::RateCompletions",
- "ctrl-shift-i": "edit_prediction::ToggleMenu",
+ "ctrl-alt-shift-i": "edit_prediction::ToggleMenu",
"ctrl-alt-l": "lsp_tool::ToggleMenu"
}
},
@@ -64,8 +64,8 @@
"ctrl-k": "editor::CutToEndOfLine",
"ctrl-k ctrl-q": "editor::Rewrap",
"ctrl-k q": "editor::Rewrap",
- "ctrl-backspace": "editor::DeleteToPreviousWordStart",
- "ctrl-delete": "editor::DeleteToNextWordEnd",
+ "ctrl-backspace": ["editor::DeleteToPreviousWordStart", { "ignore_newlines": false, "ignore_brackets": false }],
+ "ctrl-delete": ["editor::DeleteToNextWordEnd", { "ignore_newlines": false, "ignore_brackets": false }],
"cut": "editor::Cut",
"shift-delete": "editor::Cut",
"ctrl-x": "editor::Cut",
@@ -121,7 +121,7 @@
"alt-g m": "git::OpenModifiedFiles",
"menu": "editor::OpenContextMenu",
"shift-f10": "editor::OpenContextMenu",
- "ctrl-shift-e": "editor::ToggleEditPrediction",
+ "ctrl-alt-shift-e": "editor::ToggleEditPrediction",
"f9": "editor::ToggleBreakpoint",
"shift-f9": "editor::EditLogBreakpoint"
}
@@ -131,14 +131,14 @@
"bindings": {
"shift-enter": "editor::Newline",
"enter": "editor::Newline",
- "ctrl-enter": "editor::NewlineAbove",
- "ctrl-shift-enter": "editor::NewlineBelow",
+ "ctrl-enter": "editor::NewlineBelow",
+ "ctrl-shift-enter": "editor::NewlineAbove",
"ctrl-k ctrl-z": "editor::ToggleSoftWrap",
"ctrl-k z": "editor::ToggleSoftWrap",
"find": "buffer_search::Deploy",
"ctrl-f": "buffer_search::Deploy",
"ctrl-h": "buffer_search::DeployReplace",
- "ctrl->": "assistant::QuoteSelection",
+ "ctrl->": "agent::QuoteSelection",
"ctrl-<": "assistant::InsertIntoEditor",
"ctrl-alt-e": "editor::SelectEnclosingSymbol",
"ctrl-shift-backspace": "editor::GoToPreviousChange",
@@ -171,6 +171,7 @@
"context": "Markdown",
"bindings": {
"copy": "markdown::Copy",
+ "ctrl-insert": "markdown::Copy",
"ctrl-c": "markdown::Copy"
}
},
@@ -241,7 +242,7 @@
"ctrl-shift-i": "agent::ToggleOptionsMenu",
"ctrl-alt-shift-n": "agent::ToggleNewThreadMenu",
"shift-alt-escape": "agent::ExpandMessageEditor",
- "ctrl->": "assistant::QuoteSelection",
+ "ctrl->": "agent::QuoteSelection",
"ctrl-alt-e": "agent::RemoveAllContext",
"ctrl-shift-e": "project_panel::ToggleFocus",
"ctrl-shift-enter": "agent::ContinueThread",
@@ -259,6 +260,7 @@
"context": "AgentPanel > Markdown",
"bindings": {
"copy": "markdown::CopyAsMarkdown",
+ "ctrl-insert": "markdown::CopyAsMarkdown",
"ctrl-c": "markdown::CopyAsMarkdown"
}
},
@@ -327,7 +329,7 @@
}
},
{
- "context": "AcpThread > Editor",
+ "context": "AcpThread > Editor && !use_modifier_to_send",
"use_key_equivalents": true,
"bindings": {
"enter": "agent::Chat",
@@ -336,6 +338,16 @@
"ctrl-shift-n": "agent::RejectAll"
}
},
+ {
+ "context": "AcpThread > Editor && use_modifier_to_send",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-enter": "agent::Chat",
+ "shift-ctrl-r": "agent::OpenAgentDiff",
+ "ctrl-shift-y": "agent::KeepAll",
+ "ctrl-shift-n": "agent::RejectAll"
+ }
+ },
{
"context": "ThreadHistory",
"bindings": {
@@ -571,7 +583,7 @@
"ctrl-n": "workspace::NewFile",
"shift-new": "workspace::NewWindow",
"ctrl-shift-n": "workspace::NewWindow",
- "ctrl-`": "terminal_panel::ToggleFocus",
+ "ctrl-`": "terminal_panel::Toggle",
"f10": ["app_menu::OpenApplicationMenu", "Zed"],
"alt-1": ["workspace::ActivatePane", 0],
"alt-2": ["workspace::ActivatePane", 1],
@@ -616,6 +628,7 @@
"alt-save": "workspace::SaveAll",
"ctrl-alt-s": "workspace::SaveAll",
"ctrl-k m": "language_selector::Toggle",
+ "ctrl-k ctrl-m": "toolchain::AddToolchain",
"escape": "workspace::Unfollow",
"ctrl-k ctrl-left": "workspace::ActivatePaneLeft",
"ctrl-k ctrl-right": "workspace::ActivatePaneRight",
@@ -846,7 +859,7 @@
"ctrl-backspace": ["project_panel::Delete", { "skip_prompt": false }],
"ctrl-delete": ["project_panel::Delete", { "skip_prompt": false }],
"alt-ctrl-r": "project_panel::RevealInFileManager",
- "ctrl-shift-enter": "project_panel::OpenWithSystem",
+ "ctrl-shift-enter": "workspace::OpenWithSystem",
"alt-d": "project_panel::CompareMarkedFiles",
"shift-find": "project_panel::NewSearchInDirectory",
"ctrl-alt-shift-f": "project_panel::NewSearchInDirectory",
@@ -1016,6 +1029,13 @@
"tab": "channel_modal::ToggleMode"
}
},
+ {
+ "context": "ToolchainSelector",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-shift-a": "toolchain::AddToolchain"
+ }
+ },
{
"context": "FileFinder || (FileFinder > Picker > Editor)",
"bindings": {
@@ -1185,9 +1205,16 @@
"ctrl-1": "onboarding::ActivateBasicsPage",
"ctrl-2": "onboarding::ActivateEditingPage",
"ctrl-3": "onboarding::ActivateAISetupPage",
- "ctrl-escape": "onboarding::Finish",
- "alt-tab": "onboarding::SignIn",
+ "ctrl-enter": "onboarding::Finish",
+ "alt-shift-l": "onboarding::SignIn",
"alt-shift-a": "onboarding::OpenAccount"
}
+ },
+ {
+ "context": "InvalidBuffer",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-shift-enter": "workspace::OpenWithSystem"
+ }
}
]
@@ -70,9 +70,9 @@
"cmd-k q": "editor::Rewrap",
"cmd-backspace": "editor::DeleteToBeginningOfLine",
"cmd-delete": "editor::DeleteToEndOfLine",
- "alt-backspace": "editor::DeleteToPreviousWordStart",
- "ctrl-w": "editor::DeleteToPreviousWordStart",
- "alt-delete": "editor::DeleteToNextWordEnd",
+ "alt-backspace": ["editor::DeleteToPreviousWordStart", { "ignore_newlines": false, "ignore_brackets": false }],
+ "ctrl-w": ["editor::DeleteToPreviousWordStart", { "ignore_newlines": false, "ignore_brackets": false }],
+ "alt-delete": ["editor::DeleteToNextWordEnd", { "ignore_newlines": false, "ignore_brackets": false }],
"cmd-x": "editor::Cut",
"cmd-c": "editor::Copy",
"cmd-v": "editor::Paste",
@@ -162,7 +162,7 @@
"cmd-alt-f": "buffer_search::DeployReplace",
"cmd-alt-l": ["buffer_search::Deploy", { "selection_search_enabled": true }],
"cmd-e": ["buffer_search::Deploy", { "focus": false }],
- "cmd->": "assistant::QuoteSelection",
+ "cmd->": "agent::QuoteSelection",
"cmd-<": "assistant::InsertIntoEditor",
"cmd-alt-e": "editor::SelectEnclosingSymbol",
"alt-enter": "editor::OpenSelectionsInMultibuffer"
@@ -281,7 +281,7 @@
"cmd-shift-i": "agent::ToggleOptionsMenu",
"cmd-alt-shift-n": "agent::ToggleNewThreadMenu",
"shift-alt-escape": "agent::ExpandMessageEditor",
- "cmd->": "assistant::QuoteSelection",
+ "cmd->": "agent::QuoteSelection",
"cmd-alt-e": "agent::RemoveAllContext",
"cmd-shift-e": "project_panel::ToggleFocus",
"cmd-ctrl-b": "agent::ToggleBurnMode",
@@ -379,7 +379,7 @@
}
},
{
- "context": "AcpThread > Editor",
+ "context": "AcpThread > Editor && !use_modifier_to_send",
"use_key_equivalents": true,
"bindings": {
"enter": "agent::Chat",
@@ -388,6 +388,16 @@
"cmd-shift-n": "agent::RejectAll"
}
},
+ {
+ "context": "AcpThread > Editor && use_modifier_to_send",
+ "use_key_equivalents": true,
+ "bindings": {
+ "cmd-enter": "agent::Chat",
+ "shift-ctrl-r": "agent::OpenAgentDiff",
+ "cmd-shift-y": "agent::KeepAll",
+ "cmd-shift-n": "agent::RejectAll"
+ }
+ },
{
"context": "ThreadHistory",
"bindings": {
@@ -639,7 +649,7 @@
"alt-shift-enter": "toast::RunAction",
"cmd-shift-s": "workspace::SaveAs",
"cmd-shift-n": "workspace::NewWindow",
- "ctrl-`": "terminal_panel::ToggleFocus",
+ "ctrl-`": "terminal_panel::Toggle",
"cmd-1": ["workspace::ActivatePane", 0],
"cmd-2": ["workspace::ActivatePane", 1],
"cmd-3": ["workspace::ActivatePane", 2],
@@ -680,6 +690,7 @@
"cmd-?": "agent::ToggleFocus",
"cmd-alt-s": "workspace::SaveAll",
"cmd-k m": "language_selector::Toggle",
+ "cmd-k cmd-m": "toolchain::AddToolchain",
"escape": "workspace::Unfollow",
"cmd-k cmd-left": "workspace::ActivatePaneLeft",
"cmd-k cmd-right": "workspace::ActivatePaneRight",
@@ -905,7 +916,7 @@
"cmd-backspace": ["project_panel::Trash", { "skip_prompt": true }],
"cmd-delete": ["project_panel::Delete", { "skip_prompt": false }],
"alt-cmd-r": "project_panel::RevealInFileManager",
- "ctrl-shift-enter": "project_panel::OpenWithSystem",
+ "ctrl-shift-enter": "workspace::OpenWithSystem",
"alt-d": "project_panel::CompareMarkedFiles",
"cmd-alt-backspace": ["project_panel::Delete", { "skip_prompt": false }],
"cmd-alt-shift-f": "project_panel::NewSearchInDirectory",
@@ -1084,6 +1095,13 @@
"tab": "channel_modal::ToggleMode"
}
},
+ {
+ "context": "ToolchainSelector",
+ "use_key_equivalents": true,
+ "bindings": {
+ "cmd-shift-a": "toolchain::AddToolchain"
+ }
+ },
{
"context": "FileFinder || (FileFinder > Picker > Editor)",
"use_key_equivalents": true,
@@ -1291,5 +1309,12 @@
"alt-tab": "onboarding::SignIn",
"alt-shift-a": "onboarding::OpenAccount"
}
+ },
+ {
+ "context": "InvalidBuffer",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-shift-enter": "workspace::OpenWithSystem"
+ }
}
]
@@ -0,0 +1,1231 @@
+[
+ // Standard Windows bindings
+ {
+ "use_key_equivalents": true,
+ "bindings": {
+ "home": "menu::SelectFirst",
+ "shift-pageup": "menu::SelectFirst",
+ "pageup": "menu::SelectFirst",
+ "end": "menu::SelectLast",
+ "shift-pagedown": "menu::SelectLast",
+ "pagedown": "menu::SelectLast",
+ "ctrl-n": "menu::SelectNext",
+ "tab": "menu::SelectNext",
+ "down": "menu::SelectNext",
+ "ctrl-p": "menu::SelectPrevious",
+ "shift-tab": "menu::SelectPrevious",
+ "up": "menu::SelectPrevious",
+ "enter": "menu::Confirm",
+ "ctrl-enter": "menu::SecondaryConfirm",
+ "ctrl-escape": "menu::Cancel",
+ "ctrl-c": "menu::Cancel",
+ "escape": "menu::Cancel",
+ "shift-alt-enter": "menu::Restart",
+ "alt-enter": ["picker::ConfirmInput", { "secondary": false }],
+ "ctrl-alt-enter": ["picker::ConfirmInput", { "secondary": true }],
+ "ctrl-shift-w": "workspace::CloseWindow",
+ "shift-escape": "workspace::ToggleZoom",
+ "ctrl-o": "workspace::Open",
+ "ctrl-=": ["zed::IncreaseBufferFontSize", { "persist": false }],
+ "ctrl-shift-=": ["zed::IncreaseBufferFontSize", { "persist": false }],
+ "ctrl--": ["zed::DecreaseBufferFontSize", { "persist": false }],
+ "ctrl-0": ["zed::ResetBufferFontSize", { "persist": false }],
+ "ctrl-,": "zed::OpenSettings",
+ "ctrl-q": "zed::Quit",
+ "f4": "debugger::Start",
+ "shift-f5": "debugger::Stop",
+ "ctrl-shift-f5": "debugger::RerunSession",
+ "f6": "debugger::Pause",
+ "f7": "debugger::StepOver",
+ "ctrl-f11": "debugger::StepInto",
+ "shift-f11": "debugger::StepOut",
+ "f11": "zed::ToggleFullScreen",
+ "ctrl-shift-i": "edit_prediction::ToggleMenu",
+ "shift-alt-l": "lsp_tool::ToggleMenu"
+ }
+ },
+ {
+ "context": "Picker || menu",
+ "use_key_equivalents": true,
+ "bindings": {
+ "up": "menu::SelectPrevious",
+ "down": "menu::SelectNext"
+ }
+ },
+ {
+ "context": "Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "escape": "editor::Cancel",
+ "shift-backspace": "editor::Backspace",
+ "backspace": "editor::Backspace",
+ "delete": "editor::Delete",
+ "tab": "editor::Tab",
+ "shift-tab": "editor::Backtab",
+ "ctrl-k": "editor::CutToEndOfLine",
+ "ctrl-k ctrl-q": "editor::Rewrap",
+ "ctrl-k q": "editor::Rewrap",
+ "ctrl-backspace": ["editor::DeleteToPreviousWordStart", { "ignore_newlines": false, "ignore_brackets": false }],
+ "ctrl-delete": ["editor::DeleteToNextWordEnd", { "ignore_newlines": false, "ignore_brackets": false }],
+ "shift-delete": "editor::Cut",
+ "ctrl-x": "editor::Cut",
+ "ctrl-insert": "editor::Copy",
+ "ctrl-c": "editor::Copy",
+ "shift-insert": "editor::Paste",
+ "ctrl-v": "editor::Paste",
+ "ctrl-z": "editor::Undo",
+ "ctrl-y": "editor::Redo",
+ "ctrl-shift-z": "editor::Redo",
+ "up": "editor::MoveUp",
+ "ctrl-up": "editor::LineUp",
+ "ctrl-down": "editor::LineDown",
+ "pageup": "editor::MovePageUp",
+ "alt-pageup": "editor::PageUp",
+ "shift-pageup": "editor::SelectPageUp",
+ "home": ["editor::MoveToBeginningOfLine", { "stop_at_soft_wraps": true, "stop_at_indent": true }],
+ "down": "editor::MoveDown",
+ "pagedown": "editor::MovePageDown",
+ "alt-pagedown": "editor::PageDown",
+ "shift-pagedown": "editor::SelectPageDown",
+ "end": ["editor::MoveToEndOfLine", { "stop_at_soft_wraps": true }],
+ "left": "editor::MoveLeft",
+ "right": "editor::MoveRight",
+ "ctrl-left": "editor::MoveToPreviousWordStart",
+ "ctrl-right": "editor::MoveToNextWordEnd",
+ "ctrl-home": "editor::MoveToBeginning",
+ "ctrl-end": "editor::MoveToEnd",
+ "shift-up": "editor::SelectUp",
+ "shift-down": "editor::SelectDown",
+ "shift-left": "editor::SelectLeft",
+ "shift-right": "editor::SelectRight",
+ "ctrl-shift-left": "editor::SelectToPreviousWordStart",
+ "ctrl-shift-right": "editor::SelectToNextWordEnd",
+ "ctrl-shift-home": "editor::SelectToBeginning",
+ "ctrl-shift-end": "editor::SelectToEnd",
+ "ctrl-a": "editor::SelectAll",
+ "ctrl-l": "editor::SelectLine",
+ "shift-alt-f": "editor::Format",
+ "shift-alt-o": "editor::OrganizeImports",
+ "shift-home": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": true, "stop_at_indent": true }],
+ "shift-end": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": true }],
+ "ctrl-alt-space": "editor::ShowCharacterPalette",
+ "ctrl-;": "editor::ToggleLineNumbers",
+ "ctrl-'": "editor::ToggleSelectedDiffHunks",
+ "ctrl-\"": "editor::ExpandAllDiffHunks",
+ "ctrl-i": "editor::ShowSignatureHelp",
+ "alt-g b": "git::Blame",
+ "alt-g m": "git::OpenModifiedFiles",
+ "menu": "editor::OpenContextMenu",
+ "shift-f10": "editor::OpenContextMenu",
+ "ctrl-shift-e": "editor::ToggleEditPrediction",
+ "f9": "editor::ToggleBreakpoint",
+ "shift-f9": "editor::EditLogBreakpoint"
+ }
+ },
+ {
+ "context": "Editor && mode == full",
+ "use_key_equivalents": true,
+ "bindings": {
+ "shift-enter": "editor::Newline",
+ "enter": "editor::Newline",
+ "ctrl-enter": "editor::NewlineBelow",
+ "ctrl-shift-enter": "editor::NewlineAbove",
+ "ctrl-k ctrl-z": "editor::ToggleSoftWrap",
+ "ctrl-k z": "editor::ToggleSoftWrap",
+ "ctrl-f": "buffer_search::Deploy",
+ "ctrl-h": "buffer_search::DeployReplace",
+ "ctrl-shift-.": "assistant::QuoteSelection",
+ "ctrl-shift-,": "assistant::InsertIntoEditor",
+ "shift-alt-e": "editor::SelectEnclosingSymbol",
+ "ctrl-shift-backspace": "editor::GoToPreviousChange",
+ "ctrl-shift-alt-backspace": "editor::GoToNextChange",
+ "alt-enter": "editor::OpenSelectionsInMultibuffer"
+ }
+ },
+ {
+ "context": "Editor && mode == full && edit_prediction",
+ "use_key_equivalents": true,
+ "bindings": {
+ "alt-]": "editor::NextEditPrediction",
+ "alt-[": "editor::PreviousEditPrediction"
+ }
+ },
+ {
+ "context": "Editor && !edit_prediction",
+ "use_key_equivalents": true,
+ "bindings": {
+ "alt-\\": "editor::ShowEditPrediction"
+ }
+ },
+ {
+ "context": "Editor && mode == auto_height",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-enter": "editor::Newline",
+ "shift-enter": "editor::Newline",
+ "ctrl-shift-enter": "editor::NewlineBelow"
+ }
+ },
+ {
+ "context": "Markdown",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-c": "markdown::Copy"
+ }
+ },
+ {
+ "context": "Editor && jupyter && !ContextEditor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-shift-enter": "repl::Run",
+ "ctrl-alt-enter": "repl::RunInPlace"
+ }
+ },
+ {
+ "context": "Editor && !agent_diff",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-k ctrl-r": "git::Restore",
+ "alt-y": "git::StageAndNext",
+ "shift-alt-y": "git::UnstageAndNext"
+ }
+ },
+ {
+ "context": "Editor && editor_agent_diff",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-y": "agent::Keep",
+ "ctrl-n": "agent::Reject",
+ "ctrl-shift-y": "agent::KeepAll",
+ "ctrl-shift-n": "agent::RejectAll",
+ "ctrl-shift-r": "agent::OpenAgentDiff"
+ }
+ },
+ {
+ "context": "AgentDiff",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-y": "agent::Keep",
+ "ctrl-n": "agent::Reject",
+ "ctrl-shift-y": "agent::KeepAll",
+ "ctrl-shift-n": "agent::RejectAll"
+ }
+ },
+ {
+ "context": "ContextEditor > Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-enter": "assistant::Assist",
+ "ctrl-s": "workspace::Save",
+ "ctrl-shift-,": "assistant::InsertIntoEditor",
+ "shift-enter": "assistant::Split",
+ "ctrl-r": "assistant::CycleMessageRole",
+ "enter": "assistant::ConfirmCommand",
+ "alt-enter": "editor::Newline",
+ "ctrl-k c": "assistant::CopyCode",
+ "ctrl-g": "search::SelectNextMatch",
+ "ctrl-shift-g": "search::SelectPreviousMatch",
+ "ctrl-k l": "agent::OpenRulesLibrary"
+ }
+ },
+ {
+ "context": "AgentPanel",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-n": "agent::NewThread",
+ "shift-alt-n": "agent::NewTextThread",
+ "ctrl-shift-h": "agent::OpenHistory",
+ "shift-alt-c": "agent::OpenSettings",
+ "shift-alt-p": "agent::OpenRulesLibrary",
+ "ctrl-i": "agent::ToggleProfileSelector",
+ "shift-alt-/": "agent::ToggleModelSelector",
+ "ctrl-shift-a": "agent::ToggleContextPicker",
+ "ctrl-shift-j": "agent::ToggleNavigationMenu",
+ "ctrl-shift-i": "agent::ToggleOptionsMenu",
+ // "ctrl-shift-alt-n": "agent::ToggleNewThreadMenu",
+ "shift-alt-escape": "agent::ExpandMessageEditor",
+ "ctrl-shift-.": "assistant::QuoteSelection",
+ "shift-alt-e": "agent::RemoveAllContext",
+ "ctrl-shift-e": "project_panel::ToggleFocus",
+ "ctrl-shift-enter": "agent::ContinueThread",
+ "super-ctrl-b": "agent::ToggleBurnMode",
+ "alt-enter": "agent::ContinueWithBurnMode"
+ }
+ },
+ {
+ "context": "AgentPanel > NavigationMenu",
+ "use_key_equivalents": true,
+ "bindings": {
+ "shift-backspace": "agent::DeleteRecentlyOpenThread"
+ }
+ },
+ {
+ "context": "AgentPanel > Markdown",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-c": "markdown::CopyAsMarkdown"
+ }
+ },
+ {
+ "context": "AgentPanel && prompt_editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-n": "agent::NewTextThread",
+ "ctrl-alt-t": "agent::NewThread"
+ }
+ },
+ {
+ "context": "AgentPanel && external_agent_thread",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-n": "agent::NewExternalAgentThread",
+ "ctrl-alt-t": "agent::NewThread"
+ }
+ },
+ {
+ "context": "MessageEditor && !Picker > Editor && !use_modifier_to_send",
+ "use_key_equivalents": true,
+ "bindings": {
+ "enter": "agent::Chat",
+ "ctrl-enter": "agent::ChatWithFollow",
+ "ctrl-i": "agent::ToggleProfileSelector",
+ "ctrl-shift-r": "agent::OpenAgentDiff",
+ "ctrl-shift-y": "agent::KeepAll",
+ "ctrl-shift-n": "agent::RejectAll"
+ }
+ },
+ {
+ "context": "MessageEditor && !Picker > Editor && use_modifier_to_send",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-enter": "agent::Chat",
+ "enter": "editor::Newline",
+ "ctrl-i": "agent::ToggleProfileSelector",
+ "ctrl-shift-r": "agent::OpenAgentDiff",
+ "ctrl-shift-y": "agent::KeepAll",
+ "ctrl-shift-n": "agent::RejectAll"
+ }
+ },
+ {
+ "context": "EditMessageEditor > Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "escape": "menu::Cancel",
+ "enter": "menu::Confirm",
+ "alt-enter": "editor::Newline"
+ }
+ },
+ {
+ "context": "AgentFeedbackMessageEditor > Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "escape": "menu::Cancel",
+ "enter": "menu::Confirm",
+ "alt-enter": "editor::Newline"
+ }
+ },
+ {
+ "context": "ContextStrip",
+ "use_key_equivalents": true,
+ "bindings": {
+ "up": "agent::FocusUp",
+ "right": "agent::FocusRight",
+ "left": "agent::FocusLeft",
+ "down": "agent::FocusDown",
+ "backspace": "agent::RemoveFocusedContext",
+ "enter": "agent::AcceptSuggestedContext"
+ }
+ },
+ {
+ "context": "AcpThread > Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "enter": "agent::Chat",
+ "ctrl-shift-r": "agent::OpenAgentDiff",
+ "ctrl-shift-y": "agent::KeepAll",
+ "ctrl-shift-n": "agent::RejectAll"
+ }
+ },
+ {
+ "context": "ThreadHistory",
+ "use_key_equivalents": true,
+ "bindings": {
+ "backspace": "agent::RemoveSelectedThread"
+ }
+ },
+ {
+ "context": "PromptLibrary",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-n": "rules_library::NewRule",
+ "ctrl-shift-s": "rules_library::ToggleDefaultRule"
+ }
+ },
+ {
+ "context": "BufferSearchBar",
+ "use_key_equivalents": true,
+ "bindings": {
+ "escape": "buffer_search::Dismiss",
+ "tab": "buffer_search::FocusEditor",
+ "enter": "search::SelectNextMatch",
+ "shift-enter": "search::SelectPreviousMatch",
+ "alt-enter": "search::SelectAllMatches",
+ "ctrl-f": "search::FocusSearch",
+ "ctrl-h": "search::ToggleReplace",
+ "ctrl-l": "search::ToggleSelection"
+ }
+ },
+ {
+ "context": "BufferSearchBar && in_replace > Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "enter": "search::ReplaceNext",
+ "ctrl-enter": "search::ReplaceAll"
+ }
+ },
+ {
+ "context": "BufferSearchBar && !in_replace > Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "up": "search::PreviousHistoryQuery",
+ "down": "search::NextHistoryQuery"
+ }
+ },
+ {
+ "context": "ProjectSearchBar",
+ "use_key_equivalents": true,
+ "bindings": {
+ "escape": "project_search::ToggleFocus",
+ "ctrl-shift-f": "search::FocusSearch",
+ "ctrl-shift-h": "search::ToggleReplace",
+ "alt-r": "search::ToggleRegex" // vscode
+ }
+ },
+ {
+ "context": "ProjectSearchBar > Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "up": "search::PreviousHistoryQuery",
+ "down": "search::NextHistoryQuery"
+ }
+ },
+ {
+ "context": "ProjectSearchBar && in_replace > Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "enter": "search::ReplaceNext",
+ "ctrl-alt-enter": "search::ReplaceAll"
+ }
+ },
+ {
+ "context": "ProjectSearchView",
+ "use_key_equivalents": true,
+ "bindings": {
+ "escape": "project_search::ToggleFocus",
+ "ctrl-shift-h": "search::ToggleReplace",
+ "alt-r": "search::ToggleRegex" // vscode
+ }
+ },
+ {
+ "context": "Pane",
+ "use_key_equivalents": true,
+ "bindings": {
+ "alt-1": ["pane::ActivateItem", 0],
+ "alt-2": ["pane::ActivateItem", 1],
+ "alt-3": ["pane::ActivateItem", 2],
+ "alt-4": ["pane::ActivateItem", 3],
+ "alt-5": ["pane::ActivateItem", 4],
+ "alt-6": ["pane::ActivateItem", 5],
+ "alt-7": ["pane::ActivateItem", 6],
+ "alt-8": ["pane::ActivateItem", 7],
+ "alt-9": ["pane::ActivateItem", 8],
+ "alt-0": "pane::ActivateLastItem",
+ "ctrl-pageup": "pane::ActivatePreviousItem",
+ "ctrl-pagedown": "pane::ActivateNextItem",
+ "ctrl-shift-pageup": "pane::SwapItemLeft",
+ "ctrl-shift-pagedown": "pane::SwapItemRight",
+ "ctrl-f4": ["pane::CloseActiveItem", { "close_pinned": false }],
+ "ctrl-w": ["pane::CloseActiveItem", { "close_pinned": false }],
+ "ctrl-shift-alt-t": ["pane::CloseOtherItems", { "close_pinned": false }],
+ "ctrl-shift-alt-w": "workspace::CloseInactiveTabsAndPanes",
+ "ctrl-k e": ["pane::CloseItemsToTheLeft", { "close_pinned": false }],
+ "ctrl-k t": ["pane::CloseItemsToTheRight", { "close_pinned": false }],
+ "ctrl-k u": ["pane::CloseCleanItems", { "close_pinned": false }],
+ "ctrl-k w": ["pane::CloseAllItems", { "close_pinned": false }],
+ "ctrl-k ctrl-w": "workspace::CloseAllItemsAndPanes",
+ "back": "pane::GoBack",
+ "alt--": "pane::GoBack",
+ "alt-=": "pane::GoForward",
+ "forward": "pane::GoForward",
+ "f3": "search::SelectNextMatch",
+ "shift-f3": "search::SelectPreviousMatch",
+ "ctrl-shift-f": "project_search::ToggleFocus",
+ "shift-alt-h": "search::ToggleReplace",
+ "alt-l": "search::ToggleSelection",
+ "alt-enter": "search::SelectAllMatches",
+ "alt-c": "search::ToggleCaseSensitive",
+ "alt-w": "search::ToggleWholeWord",
+ "alt-f": "project_search::ToggleFilters",
+ "alt-r": "search::ToggleRegex",
+ // "ctrl-shift-alt-x": "search::ToggleRegex",
+ "ctrl-k shift-enter": "pane::TogglePinTab"
+ }
+ },
+ // Bindings from VS Code
+ {
+ "context": "Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-[": "editor::Outdent",
+ "ctrl-]": "editor::Indent",
+ "ctrl-shift-alt-up": "editor::AddSelectionAbove", // Insert Cursor Above
+ "ctrl-shift-alt-down": "editor::AddSelectionBelow", // Insert Cursor Below
+ "ctrl-shift-k": "editor::DeleteLine",
+ "alt-up": "editor::MoveLineUp",
+ "alt-down": "editor::MoveLineDown",
+ "shift-alt-up": "editor::DuplicateLineUp",
+ "shift-alt-down": "editor::DuplicateLineDown",
+ "shift-alt-right": "editor::SelectLargerSyntaxNode", // Expand Selection
+ "shift-alt-left": "editor::SelectSmallerSyntaxNode", // Shrink Selection
+ "ctrl-shift-l": "editor::SelectAllMatches", // Select all occurrences of current selection
+ "ctrl-f2": "editor::SelectAllMatches", // Select all occurrences of current word
+ "ctrl-d": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch / find_under_expand
+ "ctrl-shift-down": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch
+ "ctrl-shift-up": ["editor::SelectPrevious", { "replace_newest": false }], // editor.action.addSelectionToPreviousFindMatch
+ "ctrl-k ctrl-d": ["editor::SelectNext", { "replace_newest": true }], // editor.action.moveSelectionToNextFindMatch / find_under_expand_skip
+ "ctrl-k ctrl-shift-d": ["editor::SelectPrevious", { "replace_newest": true }], // editor.action.moveSelectionToPreviousFindMatch
+ "ctrl-k ctrl-i": "editor::Hover",
+ "ctrl-k ctrl-b": "editor::BlameHover",
+ "ctrl-/": ["editor::ToggleComments", { "advance_downwards": false }],
+ "f8": ["editor::GoToDiagnostic", { "severity": { "min": "hint", "max": "error" } }],
+ "shift-f8": ["editor::GoToPreviousDiagnostic", { "severity": { "min": "hint", "max": "error" } }],
+ "f2": "editor::Rename",
+ "f12": "editor::GoToDefinition",
+ "alt-f12": "editor::GoToDefinitionSplit",
+ "ctrl-shift-f10": "editor::GoToDefinitionSplit",
+ "ctrl-f12": "editor::GoToImplementation",
+ "shift-f12": "editor::GoToTypeDefinition",
+ "ctrl-alt-f12": "editor::GoToTypeDefinitionSplit",
+ "shift-alt-f12": "editor::FindAllReferences",
+ "ctrl-m": "editor::MoveToEnclosingBracket", // from jetbrains
+ "ctrl-shift-\\": "editor::MoveToEnclosingBracket",
+ "ctrl-shift-[": "editor::Fold",
+ "ctrl-shift-]": "editor::UnfoldLines",
+ "ctrl-k ctrl-l": "editor::ToggleFold",
+ "ctrl-k ctrl-[": "editor::FoldRecursive",
+ "ctrl-k ctrl-]": "editor::UnfoldRecursive",
+ "ctrl-k ctrl-1": ["editor::FoldAtLevel", 1],
+ "ctrl-k ctrl-2": ["editor::FoldAtLevel", 2],
+ "ctrl-k ctrl-3": ["editor::FoldAtLevel", 3],
+ "ctrl-k ctrl-4": ["editor::FoldAtLevel", 4],
+ "ctrl-k ctrl-5": ["editor::FoldAtLevel", 5],
+ "ctrl-k ctrl-6": ["editor::FoldAtLevel", 6],
+ "ctrl-k ctrl-7": ["editor::FoldAtLevel", 7],
+ "ctrl-k ctrl-8": ["editor::FoldAtLevel", 8],
+ "ctrl-k ctrl-9": ["editor::FoldAtLevel", 9],
+ "ctrl-k ctrl-0": "editor::FoldAll",
+ "ctrl-k ctrl-j": "editor::UnfoldAll",
+ "ctrl-space": "editor::ShowCompletions",
+ "ctrl-shift-space": "editor::ShowWordCompletions",
+ "ctrl-.": "editor::ToggleCodeActions",
+ "ctrl-k r": "editor::RevealInFileManager",
+ "ctrl-k p": "editor::CopyPath",
+ "ctrl-\\": "pane::SplitRight",
+ "ctrl-shift-alt-c": "editor::DisplayCursorNames",
+ "alt-.": "editor::GoToHunk",
+ "alt-,": "editor::GoToPreviousHunk"
+ }
+ },
+ {
+ "context": "Editor && extension == md",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-k v": "markdown::OpenPreviewToTheSide",
+ "ctrl-shift-v": "markdown::OpenPreview"
+ }
+ },
+ {
+ "context": "Editor && extension == svg",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-k v": "svg::OpenPreviewToTheSide",
+ "ctrl-shift-v": "svg::OpenPreview"
+ }
+ },
+ {
+ "context": "Editor && mode == full",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-shift-o": "outline::Toggle",
+ "ctrl-g": "go_to_line::Toggle"
+ }
+ },
+ {
+ "context": "Workspace",
+ "use_key_equivalents": true,
+ "bindings": {
+ // Change the default action on `menu::Confirm` by setting the parameter
+ // "ctrl-alt-o": ["projects::OpenRecent", { "create_new_window": true }],
+ "ctrl-r": ["projects::OpenRecent", { "create_new_window": false }],
+ // Change to open path modal for existing remote connection by setting the parameter
+ // "ctrl-shift-alt-o": "["projects::OpenRemote", { "from_existing_connection": true }]",
+ "ctrl-shift-alt-o": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }],
+ "shift-alt-b": "branches::OpenRecent",
+ "shift-alt-enter": "toast::RunAction",
+ "ctrl-shift-`": "workspace::NewTerminal",
+ "ctrl-s": "workspace::Save",
+ "ctrl-k ctrl-shift-s": "workspace::SaveWithoutFormat",
+ "ctrl-shift-s": "workspace::SaveAs",
+ "ctrl-n": "workspace::NewFile",
+ "ctrl-shift-n": "workspace::NewWindow",
+ "ctrl-`": "terminal_panel::Toggle",
+ "f10": ["app_menu::OpenApplicationMenu", "Zed"],
+ "alt-1": ["workspace::ActivatePane", 0],
+ "alt-2": ["workspace::ActivatePane", 1],
+ "alt-3": ["workspace::ActivatePane", 2],
+ "alt-4": ["workspace::ActivatePane", 3],
+ "alt-5": ["workspace::ActivatePane", 4],
+ "alt-6": ["workspace::ActivatePane", 5],
+ "alt-7": ["workspace::ActivatePane", 6],
+ "alt-8": ["workspace::ActivatePane", 7],
+ "alt-9": ["workspace::ActivatePane", 8],
+ "ctrl-alt-b": "workspace::ToggleRightDock",
+ "ctrl-b": "workspace::ToggleLeftDock",
+ "ctrl-j": "workspace::ToggleBottomDock",
+ "ctrl-shift-y": "workspace::CloseAllDocks",
+ "alt-r": "workspace::ResetActiveDockSize",
+ // For 0px parameter, uses UI font size value.
+ "shift-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }],
+ "shift-alt-=": ["workspace::IncreaseActiveDockSize", { "px": 0 }],
+ "shift-alt-0": "workspace::ResetOpenDocksSize",
+ "ctrl-shift-alt--": ["workspace::DecreaseOpenDocksSize", { "px": 0 }],
+ "ctrl-shift-alt-=": ["workspace::IncreaseOpenDocksSize", { "px": 0 }],
+ "ctrl-shift-f": "pane::DeploySearch",
+ "ctrl-shift-h": ["pane::DeploySearch", { "replace_enabled": true }],
+ "ctrl-shift-t": "pane::ReopenClosedItem",
+ "ctrl-k ctrl-s": "zed::OpenKeymapEditor",
+ "ctrl-k ctrl-t": "theme_selector::Toggle",
+ "ctrl-alt-super-p": "settings_profile_selector::Toggle",
+ "ctrl-t": "project_symbols::Toggle",
+ "ctrl-p": "file_finder::Toggle",
+ "ctrl-tab": "tab_switcher::Toggle",
+ "ctrl-shift-tab": ["tab_switcher::Toggle", { "select_last": true }],
+ "ctrl-e": "file_finder::Toggle",
+ "f1": "command_palette::Toggle",
+ "ctrl-shift-p": "command_palette::Toggle",
+ "ctrl-shift-m": "diagnostics::Deploy",
+ "ctrl-shift-e": "project_panel::ToggleFocus",
+ "ctrl-shift-b": "outline_panel::ToggleFocus",
+ "ctrl-shift-g": "git_panel::ToggleFocus",
+ "ctrl-shift-d": "debug_panel::ToggleFocus",
+ "ctrl-shift-/": "agent::ToggleFocus",
+ "ctrl-k s": "workspace::SaveAll",
+ "ctrl-k m": "language_selector::Toggle",
+ "ctrl-m ctrl-m": "toolchain::AddToolchain",
+ "escape": "workspace::Unfollow",
+ "ctrl-k ctrl-left": "workspace::ActivatePaneLeft",
+ "ctrl-k ctrl-right": "workspace::ActivatePaneRight",
+ "ctrl-k ctrl-up": "workspace::ActivatePaneUp",
+ "ctrl-k ctrl-down": "workspace::ActivatePaneDown",
+ "ctrl-k shift-left": "workspace::SwapPaneLeft",
+ "ctrl-k shift-right": "workspace::SwapPaneRight",
+ "ctrl-k shift-up": "workspace::SwapPaneUp",
+ "ctrl-k shift-down": "workspace::SwapPaneDown",
+ "ctrl-shift-x": "zed::Extensions",
+ "ctrl-shift-r": "task::Rerun",
+ "alt-t": "task::Rerun",
+ "shift-alt-t": "task::Spawn",
+ "shift-alt-r": ["task::Spawn", { "reveal_target": "center" }],
+ // also possible to spawn tasks by name:
+ // "foo-bar": ["task::Spawn", { "task_name": "MyTask", "reveal_target": "dock" }]
+ // or by tag:
+ // "foo-bar": ["task::Spawn", { "task_tag": "MyTag" }],
+ "f5": "debugger::Rerun",
+ "ctrl-f4": "workspace::CloseActiveDock",
+ "ctrl-w": "workspace::CloseActiveDock"
+ }
+ },
+ {
+ "context": "Workspace && debugger_running",
+ "use_key_equivalents": true,
+ "bindings": {
+ "f5": "zed::NoAction"
+ }
+ },
+ {
+ "context": "Workspace && debugger_stopped",
+ "use_key_equivalents": true,
+ "bindings": {
+ "f5": "debugger::Continue"
+ }
+ },
+ {
+ "context": "ApplicationMenu",
+ "use_key_equivalents": true,
+ "bindings": {
+ "f10": "menu::Cancel",
+ "left": "app_menu::ActivateMenuLeft",
+ "right": "app_menu::ActivateMenuRight"
+ }
+ },
+ // Bindings from Sublime Text
+ {
+ "context": "Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-u": "editor::UndoSelection",
+ "ctrl-shift-u": "editor::RedoSelection",
+ "ctrl-shift-j": "editor::JoinLines",
+ "ctrl-alt-backspace": "editor::DeleteToPreviousSubwordStart",
+ "shift-alt-h": "editor::DeleteToPreviousSubwordStart",
+ "ctrl-alt-delete": "editor::DeleteToNextSubwordEnd",
+ "shift-alt-d": "editor::DeleteToNextSubwordEnd",
+ "ctrl-alt-left": "editor::MoveToPreviousSubwordStart",
+ "ctrl-alt-right": "editor::MoveToNextSubwordEnd",
+ "ctrl-shift-alt-left": "editor::SelectToPreviousSubwordStart",
+ "ctrl-shift-alt-right": "editor::SelectToNextSubwordEnd"
+ }
+ },
+ // Bindings from Atom
+ {
+ "context": "Pane",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-k up": "pane::SplitUp",
+ "ctrl-k down": "pane::SplitDown",
+ "ctrl-k left": "pane::SplitLeft",
+ "ctrl-k right": "pane::SplitRight"
+ }
+ },
+ // Bindings that should be unified with bindings for more general actions
+ {
+ "context": "Editor && renaming",
+ "use_key_equivalents": true,
+ "bindings": {
+ "enter": "editor::ConfirmRename"
+ }
+ },
+ {
+ "context": "Editor && showing_completions",
+ "use_key_equivalents": true,
+ "bindings": {
+ "enter": "editor::ConfirmCompletion",
+ "shift-enter": "editor::ConfirmCompletionReplace",
+ "tab": "editor::ComposeCompletion"
+ }
+ },
+ // Bindings for accepting edit predictions
+ //
+ // alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This is
+ // because alt-tab may not be available, as it is often used for window switching.
+ {
+ "context": "Editor && edit_prediction",
+ "use_key_equivalents": true,
+ "bindings": {
+ "alt-tab": "editor::AcceptEditPrediction",
+ "alt-l": "editor::AcceptEditPrediction",
+ "tab": "editor::AcceptEditPrediction",
+ "alt-right": "editor::AcceptPartialEditPrediction"
+ }
+ },
+ {
+ "context": "Editor && edit_prediction_conflict",
+ "use_key_equivalents": true,
+ "bindings": {
+ "alt-tab": "editor::AcceptEditPrediction",
+ "alt-l": "editor::AcceptEditPrediction",
+ "alt-right": "editor::AcceptPartialEditPrediction"
+ }
+ },
+ {
+ "context": "Editor && showing_code_actions",
+ "use_key_equivalents": true,
+ "bindings": {
+ "enter": "editor::ConfirmCodeAction"
+ }
+ },
+ {
+ "context": "Editor && (showing_code_actions || showing_completions)",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-p": "editor::ContextMenuPrevious",
+ "up": "editor::ContextMenuPrevious",
+ "ctrl-n": "editor::ContextMenuNext",
+ "down": "editor::ContextMenuNext",
+ "pageup": "editor::ContextMenuFirst",
+ "pagedown": "editor::ContextMenuLast"
+ }
+ },
+ {
+ "context": "Editor && showing_signature_help && !showing_completions",
+ "use_key_equivalents": true,
+ "bindings": {
+ "up": "editor::SignatureHelpPrevious",
+ "down": "editor::SignatureHelpNext"
+ }
+ },
+ // Custom bindings
+ {
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-shift-alt-f": "workspace::FollowNextCollaborator",
+ // Only available in debug builds: opens an element inspector for development.
+ "shift-alt-i": "dev::ToggleInspector"
+ }
+ },
+ {
+ "context": "!Terminal",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-shift-c": "collab_panel::ToggleFocus"
+ }
+ },
+ {
+ "context": "!ContextEditor > Editor && mode == full",
+ "use_key_equivalents": true,
+ "bindings": {
+ "alt-enter": "editor::OpenExcerpts",
+ "shift-enter": "editor::ExpandExcerpts",
+ "ctrl-alt-enter": "editor::OpenExcerptsSplit",
+ "ctrl-shift-e": "pane::RevealInProjectPanel",
+ "ctrl-f8": "editor::GoToHunk",
+ "ctrl-shift-f8": "editor::GoToPreviousHunk",
+ "ctrl-enter": "assistant::InlineAssist",
+ "ctrl-shift-;": "editor::ToggleInlayHints"
+ }
+ },
+ {
+ "context": "PromptEditor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-[": "agent::CyclePreviousInlineAssist",
+ "ctrl-]": "agent::CycleNextInlineAssist",
+ "shift-alt-e": "agent::RemoveAllContext"
+ }
+ },
+ {
+ "context": "Prompt",
+ "use_key_equivalents": true,
+ "bindings": {
+ "left": "menu::SelectPrevious",
+ "right": "menu::SelectNext",
+ "h": "menu::SelectPrevious",
+ "l": "menu::SelectNext"
+ }
+ },
+ {
+ "context": "ProjectSearchBar && !in_replace",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-enter": "project_search::SearchInNew"
+ }
+ },
+ {
+ "context": "OutlinePanel && not_editing",
+ "use_key_equivalents": true,
+ "bindings": {
+ "left": "outline_panel::CollapseSelectedEntry",
+ "right": "outline_panel::ExpandSelectedEntry",
+ "shift-alt-c": "outline_panel::CopyPath",
+ "ctrl-shift-alt-c": "workspace::CopyRelativePath",
+ "ctrl-alt-r": "outline_panel::RevealInFileManager",
+ "space": "outline_panel::OpenSelectedEntry",
+ "shift-down": "menu::SelectNext",
+ "shift-up": "menu::SelectPrevious",
+ "alt-enter": "editor::OpenExcerpts",
+ "ctrl-alt-enter": "editor::OpenExcerptsSplit"
+ }
+ },
+ {
+ "context": "ProjectPanel",
+ "use_key_equivalents": true,
+ "bindings": {
+ "left": "project_panel::CollapseSelectedEntry",
+ "right": "project_panel::ExpandSelectedEntry",
+ "ctrl-n": "project_panel::NewFile",
+ "alt-n": "project_panel::NewDirectory",
+ "ctrl-x": "project_panel::Cut",
+ "ctrl-insert": "project_panel::Copy",
+ "ctrl-c": "project_panel::Copy",
+ "shift-insert": "project_panel::Paste",
+ "ctrl-v": "project_panel::Paste",
+ "shift-alt-c": "project_panel::CopyPath",
+ "ctrl-k ctrl-shift-c": "workspace::CopyRelativePath",
+ "enter": "project_panel::Rename",
+ "f2": "project_panel::Rename",
+ "backspace": ["project_panel::Trash", { "skip_prompt": false }],
+ "delete": ["project_panel::Trash", { "skip_prompt": false }],
+ "shift-delete": ["project_panel::Delete", { "skip_prompt": false }],
+ "ctrl-backspace": ["project_panel::Delete", { "skip_prompt": false }],
+ "ctrl-delete": ["project_panel::Delete", { "skip_prompt": false }],
+ "ctrl-alt-r": "project_panel::RevealInFileManager",
+ "ctrl-shift-enter": "project_panel::OpenWithSystem",
+ "alt-d": "project_panel::CompareMarkedFiles",
+ "ctrl-k ctrl-shift-f": "project_panel::NewSearchInDirectory",
+ "shift-down": "menu::SelectNext",
+ "shift-up": "menu::SelectPrevious",
+ "escape": "menu::Cancel"
+ }
+ },
+ {
+ "context": "ProjectPanel && not_editing",
+ "use_key_equivalents": true,
+ "bindings": {
+ "space": "project_panel::Open"
+ }
+ },
+ {
+ "context": "GitPanel && ChangesList",
+ "use_key_equivalents": true,
+ "bindings": {
+ "up": "menu::SelectPrevious",
+ "down": "menu::SelectNext",
+ "enter": "menu::Confirm",
+ "alt-y": "git::StageFile",
+ "shift-alt-y": "git::UnstageFile",
+ "space": "git::ToggleStaged",
+ "shift-space": "git::StageRange",
+ "tab": "git_panel::FocusEditor",
+ "shift-tab": "git_panel::FocusEditor",
+ "escape": "git_panel::ToggleFocus",
+ "alt-enter": "menu::SecondaryConfirm",
+ "delete": ["git::RestoreFile", { "skip_prompt": false }],
+ "backspace": ["git::RestoreFile", { "skip_prompt": false }],
+ "shift-delete": ["git::RestoreFile", { "skip_prompt": false }],
+ "ctrl-backspace": ["git::RestoreFile", { "skip_prompt": false }],
+ "ctrl-delete": ["git::RestoreFile", { "skip_prompt": false }]
+ }
+ },
+ {
+ "context": "GitPanel && CommitEditor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "escape": "git::Cancel"
+ }
+ },
+ {
+ "context": "GitCommit > Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "escape": "menu::Cancel",
+ "enter": "editor::Newline",
+ "ctrl-enter": "git::Commit",
+ "ctrl-shift-enter": "git::Amend",
+ "alt-l": "git::GenerateCommitMessage"
+ }
+ },
+ {
+ "context": "GitPanel",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-g ctrl-g": "git::Fetch",
+ "ctrl-g up": "git::Push",
+ "ctrl-g down": "git::Pull",
+ "ctrl-g shift-up": "git::ForcePush",
+ "ctrl-g d": "git::Diff",
+ "ctrl-g backspace": "git::RestoreTrackedFiles",
+ "ctrl-g shift-backspace": "git::TrashUntrackedFiles",
+ "ctrl-space": "git::StageAll",
+ "ctrl-shift-space": "git::UnstageAll",
+ "ctrl-enter": "git::Commit",
+ "ctrl-shift-enter": "git::Amend"
+ }
+ },
+ {
+ "context": "GitDiff > Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-enter": "git::Commit",
+ "ctrl-shift-enter": "git::Amend",
+ "ctrl-space": "git::StageAll",
+ "ctrl-shift-space": "git::UnstageAll"
+ }
+ },
+ {
+ "context": "AskPass > Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "enter": "menu::Confirm"
+ }
+ },
+ {
+ "context": "CommitEditor > Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "escape": "git_panel::FocusChanges",
+ "tab": "git_panel::FocusChanges",
+ "shift-tab": "git_panel::FocusChanges",
+ "enter": "editor::Newline",
+ "ctrl-enter": "git::Commit",
+ "ctrl-shift-enter": "git::Amend",
+ "alt-up": "git_panel::FocusChanges",
+ "alt-l": "git::GenerateCommitMessage"
+ }
+ },
+ {
+ "context": "DebugPanel",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-t": "debugger::ToggleThreadPicker",
+ "ctrl-i": "debugger::ToggleSessionPicker",
+ "shift-alt-escape": "debugger::ToggleExpandItem"
+ }
+ },
+ {
+ "context": "VariableList",
+ "use_key_equivalents": true,
+ "bindings": {
+ "left": "variable_list::CollapseSelectedEntry",
+ "right": "variable_list::ExpandSelectedEntry",
+ "enter": "variable_list::EditVariable",
+ "ctrl-c": "variable_list::CopyVariableValue",
+ "ctrl-alt-c": "variable_list::CopyVariableName",
+ "delete": "variable_list::RemoveWatch",
+ "backspace": "variable_list::RemoveWatch",
+ "alt-enter": "variable_list::AddWatch"
+ }
+ },
+ {
+ "context": "BreakpointList",
+ "use_key_equivalents": true,
+ "bindings": {
+ "space": "debugger::ToggleEnableBreakpoint",
+ "backspace": "debugger::UnsetBreakpoint",
+ "left": "debugger::PreviousBreakpointProperty",
+ "right": "debugger::NextBreakpointProperty"
+ }
+ },
+ {
+ "context": "CollabPanel && not_editing",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-backspace": "collab_panel::Remove",
+ "space": "menu::Confirm"
+ }
+ },
+ {
+ "context": "CollabPanel",
+ "use_key_equivalents": true,
+ "bindings": {
+ "alt-up": "collab_panel::MoveChannelUp",
+ "alt-down": "collab_panel::MoveChannelDown"
+ }
+ },
+ {
+ "context": "(CollabPanel && editing) > Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "space": "collab_panel::InsertSpace"
+ }
+ },
+ {
+ "context": "ChannelModal",
+ "use_key_equivalents": true,
+ "bindings": {
+ "tab": "channel_modal::ToggleMode"
+ }
+ },
+ {
+ "context": "Picker > Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "escape": "menu::Cancel",
+ "up": "menu::SelectPrevious",
+ "down": "menu::SelectNext",
+ "tab": "picker::ConfirmCompletion",
+ "alt-enter": ["picker::ConfirmInput", { "secondary": false }]
+ }
+ },
+ {
+ "context": "ChannelModal > Picker > Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "tab": "channel_modal::ToggleMode"
+ }
+ },
+ {
+ "context": "ToolchainSelector",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-shift-a": "toolchain::AddToolchain"
+ }
+ },
+ {
+ "context": "FileFinder || (FileFinder > Picker > Editor)",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-p": "file_finder::Toggle",
+ "ctrl-shift-a": "file_finder::ToggleSplitMenu",
+ "ctrl-shift-i": "file_finder::ToggleFilterMenu"
+ }
+ },
+ {
+ "context": "FileFinder || (FileFinder > Picker > Editor) || (FileFinder > Picker > menu)",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-shift-p": "file_finder::SelectPrevious",
+ "ctrl-j": "pane::SplitDown",
+ "ctrl-k": "pane::SplitUp",
+ "ctrl-h": "pane::SplitLeft",
+ "ctrl-l": "pane::SplitRight"
+ }
+ },
+ {
+ "context": "TabSwitcher",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-shift-tab": "menu::SelectPrevious",
+ "ctrl-up": "menu::SelectPrevious",
+ "ctrl-down": "menu::SelectNext",
+ "ctrl-backspace": "tab_switcher::CloseSelectedItem"
+ }
+ },
+ {
+ "context": "Terminal",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-alt-space": "terminal::ShowCharacterPalette",
+ "ctrl-insert": "terminal::Copy",
+ "ctrl-shift-c": "terminal::Copy",
+ "shift-insert": "terminal::Paste",
+ "ctrl-shift-v": "terminal::Paste",
+ "ctrl-enter": "assistant::InlineAssist",
+ "alt-b": ["terminal::SendText", "\u001bb"],
+ "alt-f": ["terminal::SendText", "\u001bf"],
+ "alt-.": ["terminal::SendText", "\u001b."],
+ "ctrl-delete": ["terminal::SendText", "\u001bd"],
+ // Overrides for conflicting keybindings
+ "ctrl-b": ["terminal::SendKeystroke", "ctrl-b"],
+ "ctrl-c": ["terminal::SendKeystroke", "ctrl-c"],
+ "ctrl-e": ["terminal::SendKeystroke", "ctrl-e"],
+ "ctrl-o": ["terminal::SendKeystroke", "ctrl-o"],
+ "ctrl-w": ["terminal::SendKeystroke", "ctrl-w"],
+ "ctrl-backspace": ["terminal::SendKeystroke", "ctrl-w"],
+ "ctrl-shift-a": "editor::SelectAll",
+ "ctrl-shift-f": "buffer_search::Deploy",
+ "ctrl-shift-l": "terminal::Clear",
+ "ctrl-shift-w": "pane::CloseActiveItem",
+ "up": ["terminal::SendKeystroke", "up"],
+ "pageup": ["terminal::SendKeystroke", "pageup"],
+ "down": ["terminal::SendKeystroke", "down"],
+ "pagedown": ["terminal::SendKeystroke", "pagedown"],
+ "escape": ["terminal::SendKeystroke", "escape"],
+ "enter": ["terminal::SendKeystroke", "enter"],
+ "shift-pageup": "terminal::ScrollPageUp",
+ "shift-pagedown": "terminal::ScrollPageDown",
+ "shift-up": "terminal::ScrollLineUp",
+ "shift-down": "terminal::ScrollLineDown",
+ "shift-home": "terminal::ScrollToTop",
+ "shift-end": "terminal::ScrollToBottom",
+ "ctrl-shift-space": "terminal::ToggleViMode",
+ "ctrl-shift-r": "terminal::RerunTask",
+ "ctrl-alt-r": "terminal::RerunTask",
+ "alt-t": "terminal::RerunTask"
+ }
+ },
+ {
+ "context": "ZedPredictModal",
+ "use_key_equivalents": true,
+ "bindings": {
+ "escape": "menu::Cancel"
+ }
+ },
+ {
+ "context": "ConfigureContextServerModal > Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "escape": "menu::Cancel",
+ "enter": "editor::Newline",
+ "ctrl-enter": "menu::Confirm"
+ }
+ },
+ {
+ "context": "OnboardingAiConfigurationModal",
+ "use_key_equivalents": true,
+ "bindings": {
+ "escape": "menu::Cancel"
+ }
+ },
+ {
+ "context": "Diagnostics",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-r": "diagnostics::ToggleDiagnosticsRefresh"
+ }
+ },
+ {
+ "context": "DebugConsole > Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "enter": "menu::Confirm",
+ "alt-enter": "console::WatchExpression"
+ }
+ },
+ {
+ "context": "RunModal",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-tab": "pane::ActivateNextItem",
+ "ctrl-shift-tab": "pane::ActivatePreviousItem"
+ }
+ },
+ {
+ "context": "MarkdownPreview",
+ "use_key_equivalents": true,
+ "bindings": {
+ "pageup": "markdown::MovePageUp",
+ "pagedown": "markdown::MovePageDown"
+ }
+ },
+ {
+ "context": "KeymapEditor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-f": "search::FocusSearch",
+ "alt-f": "keymap_editor::ToggleKeystrokeSearch",
+ "alt-c": "keymap_editor::ToggleConflictFilter",
+ "enter": "keymap_editor::EditBinding",
+ "alt-enter": "keymap_editor::CreateBinding",
+ "ctrl-c": "keymap_editor::CopyAction",
+ "ctrl-shift-c": "keymap_editor::CopyContext",
+ "ctrl-t": "keymap_editor::ShowMatchingKeybinds"
+ }
+ },
+ {
+ "context": "KeystrokeInput",
+ "use_key_equivalents": true,
+ "bindings": {
+ "enter": "keystroke_input::StartRecording",
+ "escape escape escape": "keystroke_input::StopRecording",
+ "delete": "keystroke_input::ClearKeystrokes"
+ }
+ },
+ {
+ "context": "KeybindEditorModal",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-enter": "menu::Confirm",
+ "escape": "menu::Cancel"
+ }
+ },
+ {
+ "context": "KeybindEditorModal > Editor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "up": "menu::SelectPrevious",
+ "down": "menu::SelectNext"
+ }
+ },
+ {
+ "context": "Onboarding",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-1": "onboarding::ActivateBasicsPage",
+ "ctrl-2": "onboarding::ActivateEditingPage",
+ "ctrl-3": "onboarding::ActivateAISetupPage",
+ "ctrl-escape": "onboarding::Finish",
+ "alt-tab": "onboarding::SignIn",
+ "shift-alt-a": "onboarding::OpenAccount"
+ }
+ }
+]
@@ -17,8 +17,8 @@
"bindings": {
"ctrl-i": "agent::ToggleFocus",
"ctrl-shift-i": "agent::ToggleFocus",
- "ctrl-shift-l": "assistant::QuoteSelection", // In cursor uses "Ask" mode
- "ctrl-l": "assistant::QuoteSelection", // In cursor uses "Agent" mode
+ "ctrl-shift-l": "agent::QuoteSelection", // In cursor uses "Ask" mode
+ "ctrl-l": "agent::QuoteSelection", // In cursor uses "Agent" mode
"ctrl-k": "assistant::InlineAssist",
"ctrl-shift-k": "assistant::InsertIntoEditor"
}
@@ -38,10 +38,11 @@
"alt-;": ["editor::ToggleComments", { "advance_downwards": false }],
"ctrl-x ctrl-;": "editor::ToggleComments",
"alt-.": "editor::GoToDefinition", // xref-find-definitions
+ "alt-?": "editor::FindAllReferences", // xref-find-references
"alt-,": "pane::GoBack", // xref-pop-marker-stack
"ctrl-x h": "editor::SelectAll", // mark-whole-buffer
"ctrl-d": "editor::Delete", // delete-char
- "alt-d": "editor::DeleteToNextWordEnd", // kill-word
+ "alt-d": ["editor::DeleteToNextWordEnd", { "ignore_newlines": false, "ignore_brackets": false }], // kill-word
"ctrl-k": "editor::KillRingCut", // kill-line
"ctrl-w": "editor::Cut", // kill-region
"alt-w": "editor::Copy", // kill-ring-save
@@ -125,7 +125,7 @@
{
"context": "Workspace || Editor",
"bindings": {
- "alt-f12": "terminal_panel::ToggleFocus",
+ "alt-f12": "terminal_panel::Toggle",
"ctrl-shift-k": "git::Push"
}
},
@@ -50,8 +50,8 @@
"ctrl-k ctrl-u": "editor::ConvertToUpperCase",
"ctrl-k ctrl-l": "editor::ConvertToLowerCase",
"shift-alt-m": "markdown::OpenPreviewToTheSide",
- "ctrl-backspace": "editor::DeleteToPreviousWordStart",
- "ctrl-delete": "editor::DeleteToNextWordEnd",
+ "ctrl-backspace": ["editor::DeleteToPreviousWordStart", { "ignore_newlines": false, "ignore_brackets": false }],
+ "ctrl-delete": ["editor::DeleteToNextWordEnd", { "ignore_newlines": false, "ignore_brackets": false }],
"alt-right": "editor::MoveToNextSubwordEnd",
"alt-left": "editor::MoveToPreviousSubwordStart",
"alt-shift-right": "editor::SelectToNextSubwordEnd",
@@ -17,8 +17,8 @@
"bindings": {
"cmd-i": "agent::ToggleFocus",
"cmd-shift-i": "agent::ToggleFocus",
- "cmd-shift-l": "assistant::QuoteSelection", // In cursor uses "Ask" mode
- "cmd-l": "assistant::QuoteSelection", // In cursor uses "Agent" mode
+ "cmd-shift-l": "agent::QuoteSelection", // In cursor uses "Ask" mode
+ "cmd-l": "agent::QuoteSelection", // In cursor uses "Agent" mode
"cmd-k": "assistant::InlineAssist",
"cmd-shift-k": "assistant::InsertIntoEditor"
}
@@ -38,10 +38,11 @@
"alt-;": ["editor::ToggleComments", { "advance_downwards": false }],
"ctrl-x ctrl-;": "editor::ToggleComments",
"alt-.": "editor::GoToDefinition", // xref-find-definitions
+ "alt-?": "editor::FindAllReferences", // xref-find-references
"alt-,": "pane::GoBack", // xref-pop-marker-stack
"ctrl-x h": "editor::SelectAll", // mark-whole-buffer
"ctrl-d": "editor::Delete", // delete-char
- "alt-d": "editor::DeleteToNextWordEnd", // kill-word
+ "alt-d": ["editor::DeleteToNextWordEnd", { "ignore_newlines": false, "ignore_brackets": false }], // kill-word
"ctrl-k": "editor::KillRingCut", // kill-line
"ctrl-w": "editor::Cut", // kill-region
"alt-w": "editor::Copy", // kill-ring-save
@@ -127,7 +127,7 @@
{
"context": "Workspace || Editor",
"bindings": {
- "alt-f12": "terminal_panel::ToggleFocus",
+ "alt-f12": "terminal_panel::Toggle",
"cmd-shift-k": "git::Push"
}
},
@@ -52,8 +52,8 @@
"cmd-k cmd-l": "editor::ConvertToLowerCase",
"cmd-shift-j": "editor::JoinLines",
"shift-alt-m": "markdown::OpenPreviewToTheSide",
- "ctrl-backspace": "editor::DeleteToPreviousWordStart",
- "ctrl-delete": "editor::DeleteToNextWordEnd",
+ "ctrl-backspace": ["editor::DeleteToPreviousWordStart", { "ignore_newlines": false, "ignore_brackets": false }],
+ "ctrl-delete": ["editor::DeleteToNextWordEnd", { "ignore_newlines": false, "ignore_brackets": false }],
"ctrl-right": "editor::MoveToNextSubwordEnd",
"ctrl-left": "editor::MoveToPreviousSubwordStart",
"ctrl-shift-right": "editor::SelectToNextSubwordEnd",
@@ -21,10 +21,10 @@
{
"context": "Editor",
"bindings": {
- "alt-backspace": "editor::DeleteToPreviousWordStart",
- "alt-shift-backspace": "editor::DeleteToNextWordEnd",
- "alt-delete": "editor::DeleteToNextWordEnd",
- "alt-shift-delete": "editor::DeleteToNextWordEnd",
+ "alt-backspace": ["editor::DeleteToPreviousWordStart", { "ignore_newlines": false, "ignore_brackets": false }],
+ "alt-shift-backspace": ["editor::DeleteToNextWordEnd", { "ignore_newlines": false, "ignore_brackets": false }],
+ "alt-delete": ["editor::DeleteToNextWordEnd", { "ignore_newlines": false, "ignore_brackets": false }],
+ "alt-shift-delete": ["editor::DeleteToNextWordEnd", { "ignore_newlines": false, "ignore_brackets": false }],
"ctrl-backspace": "editor::DeleteToPreviousSubwordStart",
"ctrl-delete": "editor::DeleteToNextSubwordEnd",
"alt-left": ["editor::MoveToPreviousWordStart", { "stop_at_soft_wraps": true }],
@@ -32,34 +32,6 @@
"(": "vim::SentenceBackward",
")": "vim::SentenceForward",
"|": "vim::GoToColumn",
- "] ]": "vim::NextSectionStart",
- "] [": "vim::NextSectionEnd",
- "[ [": "vim::PreviousSectionStart",
- "[ ]": "vim::PreviousSectionEnd",
- "] m": "vim::NextMethodStart",
- "] shift-m": "vim::NextMethodEnd",
- "[ m": "vim::PreviousMethodStart",
- "[ shift-m": "vim::PreviousMethodEnd",
- "[ *": "vim::PreviousComment",
- "[ /": "vim::PreviousComment",
- "] *": "vim::NextComment",
- "] /": "vim::NextComment",
- "[ -": "vim::PreviousLesserIndent",
- "[ +": "vim::PreviousGreaterIndent",
- "[ =": "vim::PreviousSameIndent",
- "] -": "vim::NextLesserIndent",
- "] +": "vim::NextGreaterIndent",
- "] =": "vim::NextSameIndent",
- "] b": "pane::ActivateNextItem",
- "[ b": "pane::ActivatePreviousItem",
- "] shift-b": "pane::ActivateLastItem",
- "[ shift-b": ["pane::ActivateItem", 0],
- "] space": "vim::InsertEmptyLineBelow",
- "[ space": "vim::InsertEmptyLineAbove",
- "[ e": "editor::MoveLineUp",
- "] e": "editor::MoveLineDown",
- "[ f": "workspace::FollowNextCollaborator",
- "] f": "workspace::FollowNextCollaborator",
// Word motions
"w": "vim::NextWordStart",
@@ -83,10 +55,6 @@
"n": "vim::MoveToNextMatch",
"shift-n": "vim::MoveToPreviousMatch",
"%": "vim::Matching",
- "] }": ["vim::UnmatchedForward", { "char": "}" }],
- "[ {": ["vim::UnmatchedBackward", { "char": "{" }],
- "] )": ["vim::UnmatchedForward", { "char": ")" }],
- "[ (": ["vim::UnmatchedBackward", { "char": "(" }],
"f": ["vim::PushFindForward", { "before": false, "multiline": false }],
"t": ["vim::PushFindForward", { "before": true, "multiline": false }],
"shift-f": ["vim::PushFindBackward", { "after": false, "multiline": false }],
@@ -219,6 +187,46 @@
".": "vim::Repeat"
}
},
+ {
+ "context": "vim_mode == normal || vim_mode == visual || vim_mode == operator",
+ "bindings": {
+ "] ]": "vim::NextSectionStart",
+ "] [": "vim::NextSectionEnd",
+ "[ [": "vim::PreviousSectionStart",
+ "[ ]": "vim::PreviousSectionEnd",
+ "] m": "vim::NextMethodStart",
+ "] shift-m": "vim::NextMethodEnd",
+ "[ m": "vim::PreviousMethodStart",
+ "[ shift-m": "vim::PreviousMethodEnd",
+ "[ *": "vim::PreviousComment",
+ "[ /": "vim::PreviousComment",
+ "] *": "vim::NextComment",
+ "] /": "vim::NextComment",
+ "[ -": "vim::PreviousLesserIndent",
+ "[ +": "vim::PreviousGreaterIndent",
+ "[ =": "vim::PreviousSameIndent",
+ "] -": "vim::NextLesserIndent",
+ "] +": "vim::NextGreaterIndent",
+ "] =": "vim::NextSameIndent",
+ "] b": "pane::ActivateNextItem",
+ "[ b": "pane::ActivatePreviousItem",
+ "] shift-b": "pane::ActivateLastItem",
+ "[ shift-b": ["pane::ActivateItem", 0],
+ "] space": "vim::InsertEmptyLineBelow",
+ "[ space": "vim::InsertEmptyLineAbove",
+ "[ e": "editor::MoveLineUp",
+ "] e": "editor::MoveLineDown",
+ "[ f": "workspace::FollowNextCollaborator",
+ "] f": "workspace::FollowNextCollaborator",
+ "] }": ["vim::UnmatchedForward", { "char": "}" }],
+ "[ {": ["vim::UnmatchedBackward", { "char": "{" }],
+ "] )": ["vim::UnmatchedForward", { "char": ")" }],
+ "[ (": ["vim::UnmatchedBackward", { "char": "(" }],
+ // tree-sitter related commands
+ "[ x": "vim::SelectLargerSyntaxNode",
+ "] x": "vim::SelectSmallerSyntaxNode"
+ }
+ },
{
"context": "vim_mode == normal",
"bindings": {
@@ -249,9 +257,6 @@
"g w": "vim::PushRewrap",
"g q": "vim::PushRewrap",
"insert": "vim::InsertBefore",
- // tree-sitter related commands
- "[ x": "vim::SelectLargerSyntaxNode",
- "] x": "vim::SelectSmallerSyntaxNode",
"] d": "editor::GoToDiagnostic",
"[ d": "editor::GoToPreviousDiagnostic",
"] c": "editor::GoToHunk",
@@ -317,10 +322,7 @@
"g w": "vim::Rewrap",
"g ?": "vim::ConvertToRot13",
// "g ?": "vim::ConvertToRot47",
- "\"": "vim::PushRegister",
- // tree-sitter related commands
- "[ x": "editor::SelectLargerSyntaxNode",
- "] x": "editor::SelectSmallerSyntaxNode"
+ "\"": "vim::PushRegister"
}
},
{
@@ -337,7 +339,7 @@
"ctrl-x ctrl-z": "editor::Cancel",
"ctrl-x ctrl-e": "vim::LineDown",
"ctrl-x ctrl-y": "vim::LineUp",
- "ctrl-w": "editor::DeleteToPreviousWordStart",
+ "ctrl-w": ["editor::DeleteToPreviousWordStart", { "ignore_newlines": false, "ignore_brackets": false }],
"ctrl-u": "editor::DeleteToBeginningOfLine",
"ctrl-t": "vim::Indent",
"ctrl-d": "vim::Outdent",
@@ -354,6 +356,15 @@
"ctrl-s": "editor::ShowSignatureHelp"
}
},
+ {
+ "context": "showing_completions",
+ "bindings": {
+ "ctrl-d": "vim::ScrollDown",
+ "ctrl-u": "vim::ScrollUp",
+ "ctrl-e": "vim::LineDown",
+ "ctrl-y": "vim::LineUp"
+ }
+ },
{
"context": "(vim_mode == normal || vim_mode == helix_normal) && !menu",
"bindings": {
@@ -388,6 +399,9 @@
"ctrl-[": "editor::Cancel",
";": "vim::HelixCollapseSelection",
":": "command_palette::Toggle",
+ "m": "vim::PushHelixMatch",
+ "]": ["vim::PushHelixNext", { "around": true }],
+ "[": ["vim::PushHelixPrevious", { "around": true }],
"left": "vim::WrappingLeft",
"right": "vim::WrappingRight",
"h": "vim::WrappingLeft",
@@ -410,13 +424,6 @@
"insert": "vim::InsertBefore",
"alt-.": "vim::RepeatFind",
"alt-s": ["editor::SplitSelectionIntoLines", { "keep_selections": true }],
- // tree-sitter related commands
- "[ x": "editor::SelectLargerSyntaxNode",
- "] x": "editor::SelectSmallerSyntaxNode",
- "] d": "editor::GoToDiagnostic",
- "[ d": "editor::GoToPreviousDiagnostic",
- "] c": "editor::GoToHunk",
- "[ c": "editor::GoToPreviousHunk",
// Goto mode
"g n": "pane::ActivateNextItem",
"g p": "pane::ActivatePreviousItem",
@@ -428,12 +435,14 @@
"g h": "vim::StartOfLine",
"g s": "vim::FirstNonWhitespace", // "g s" default behavior is "space s"
"g e": "vim::EndOfDocument",
+ "g .": "vim::HelixGotoLastModification", // go to last modification
"g r": "editor::FindAllReferences", // zed specific
"g t": "vim::WindowTop",
"g c": "vim::WindowMiddle",
"g b": "vim::WindowBottom",
- "x": "editor::SelectLine",
+ "shift-r": "editor::Paste",
+ "x": "vim::HelixSelectLine",
"shift-x": "editor::SelectLine",
"%": "editor::SelectAll",
// Window mode
@@ -458,9 +467,6 @@
"space c": "editor::ToggleComments",
"space y": "editor::Copy",
"space p": "editor::Paste",
- // Match mode
- "m m": "vim::Matching",
- "m i w": ["workspace::SendKeystrokes", "v i w"],
"shift-u": "editor::Redo",
"ctrl-c": "editor::ToggleComments",
"d": "vim::HelixDelete",
@@ -529,7 +535,7 @@
}
},
{
- "context": "vim_operator == a || vim_operator == i || vim_operator == cs",
+ "context": "vim_operator == a || vim_operator == i || vim_operator == cs || vim_operator == helix_next || vim_operator == helix_previous",
"bindings": {
"w": "vim::Word",
"shift-w": ["vim::Word", { "ignore_punctuation": true }],
@@ -566,6 +572,48 @@
"e": "vim::EntireFile"
}
},
+ {
+ "context": "vim_operator == helix_m",
+ "bindings": {
+ "m": "vim::Matching"
+ }
+ },
+ {
+ "context": "vim_operator == helix_next",
+ "bindings": {
+ "z": "vim::NextSectionStart",
+ "shift-z": "vim::NextSectionEnd",
+ "*": "vim::NextComment",
+ "/": "vim::NextComment",
+ "-": "vim::NextLesserIndent",
+ "+": "vim::NextGreaterIndent",
+ "=": "vim::NextSameIndent",
+ "b": "pane::ActivateNextItem",
+ "shift-b": "pane::ActivateLastItem",
+ "x": "editor::SelectSmallerSyntaxNode",
+ "d": "editor::GoToDiagnostic",
+ "c": "editor::GoToHunk",
+ "space": "vim::InsertEmptyLineBelow"
+ }
+ },
+ {
+ "context": "vim_operator == helix_previous",
+ "bindings": {
+ "z": "vim::PreviousSectionStart",
+ "shift-z": "vim::PreviousSectionEnd",
+ "*": "vim::PreviousComment",
+ "/": "vim::PreviousComment",
+ "-": "vim::PreviousLesserIndent",
+ "+": "vim::PreviousGreaterIndent",
+ "=": "vim::PreviousSameIndent",
+ "b": "pane::ActivatePreviousItem",
+ "shift-b": ["pane::ActivateItem", 0],
+ "x": "editor::SelectLargerSyntaxNode",
+ "d": "editor::GoToPreviousDiagnostic",
+ "c": "editor::GoToPreviousHunk",
+ "space": "vim::InsertEmptyLineAbove"
+ }
+ },
{
"context": "vim_operator == c",
"bindings": {
@@ -819,7 +867,7 @@
"v": "project_panel::OpenPermanent",
"p": "project_panel::Open",
"x": "project_panel::RevealInFileManager",
- "s": "project_panel::OpenWithSystem",
+ "s": "workspace::OpenWithSystem",
"z d": "project_panel::CompareMarkedFiles",
"] c": "project_panel::SelectNextGitEntry",
"[ c": "project_panel::SelectPrevGitEntry",
@@ -172,7 +172,7 @@ The user has specified the following rules that should be applied:
Rules title: {{title}}
{{/if}}
``````
-{{contents}}}
+{{contents}}
``````
{{/each}}
{{/if}}
@@ -162,6 +162,12 @@
// 2. Always quit the application
// "on_last_window_closed": "quit_app",
"on_last_window_closed": "platform_default",
+ // Whether to show padding for zoomed panels.
+ // When enabled, zoomed center panels (e.g. code editor) will have padding all around,
+ // while zoomed bottom/left/right panels will have padding to the top/right/left (respectively).
+ //
+ // Default: true
+ "zoomed_padding": true,
// Whether to use the system provided dialogs for Open and Save As.
// When set to false, Zed will use the built-in keyboard-first pickers.
"use_system_path_prompts": true,
@@ -182,8 +188,8 @@
// 4. A box drawn around the following character
// "hollow"
//
- // Default: not set, defaults to "bar"
- "cursor_shape": null,
+ // Default: "bar"
+ "cursor_shape": "bar",
// Determines when the mouse cursor should be hidden in an editor or input box.
//
// 1. Never hide the mouse cursor:
@@ -217,9 +223,25 @@
"current_line_highlight": "all",
// Whether to highlight all occurrences of the selected text in an editor.
"selection_highlight": true,
+ // Whether the text selection should have rounded corners.
+ "rounded_selection": true,
// The debounce delay before querying highlights from the language
// server based on the current cursor location.
"lsp_highlight_debounce": 75,
+ // The minimum APCA perceptual contrast between foreground and background colors.
+ // APCA (Accessible Perceptual Contrast Algorithm) is more accurate than WCAG 2.x,
+ // especially for dark mode. Values range from 0 to 106.
+ //
+ // Based on APCA Readability Criterion (ARC) Bronze Simple Mode:
+ // https://readtech.org/ARC/tests/bronze-simple-mode/
+ // - 0: No contrast adjustment
+ // - 45: Minimum for large fluent text (36px+)
+ // - 60: Minimum for other content text
+ // - 75: Minimum for body text
+ // - 90: Preferred for body text
+ //
+ // This only affects text drawn over highlight backgrounds in the editor.
+ "minimum_contrast_for_highlights": 45,
// Whether to pop the completions menu while typing in an editor without
// explicitly requesting it.
"show_completions_on_input": true,
@@ -260,8 +282,8 @@
// - "warning"
// - "info"
// - "hint"
- // - null — allow all diagnostics (default)
- "diagnostics_max_severity": null,
+ // - "all" — allow all diagnostics (default)
+ "diagnostics_max_severity": "all",
// Whether to show wrap guides (vertical rulers) in the editor.
// Setting this to true will show a guide at the 'preferred_line_length' value
// if 'soft_wrap' is set to 'preferred_line_length', and will show any
@@ -273,6 +295,8 @@
"redact_private_values": false,
// The default number of lines to expand excerpts in the multibuffer by.
"expand_excerpt_lines": 5,
+ // The default number of context lines shown in multibuffer excerpts.
+ "excerpt_context_lines": 2,
// Globs to match against file paths to determine if a file is private.
"private_files": ["**/.env*", "**/*.pem", "**/*.key", "**/*.cert", "**/*.crt", "**/secrets.yml"],
// Whether to use additional LSP queries to format (and amend) the code after
@@ -357,6 +381,8 @@
// Whether to show code action buttons in the editor toolbar.
"code_actions": false
},
+ // Whether to allow windows to tab together based on the user’s tabbing preference (macOS only).
+ "use_system_window_tabs": false,
// Titlebar related settings
"title_bar": {
// Whether to show the branch icon beside branch switcher in the titlebar.
@@ -647,6 +673,8 @@
// "never"
"show": "always"
},
+ // Whether to enable drag-and-drop operations in the project panel.
+ "drag_and_drop": true,
// Whether to hide the root entry when only one folder is open in the window.
"hide_root": false
},
@@ -934,7 +962,7 @@
// Show git status colors in the editor tabs.
"git_status": false,
// Position of the close button on the editor tabs.
- // One of: ["right", "left", "hidden"]
+ // One of: ["right", "left"]
"close_position": "right",
// Whether to show the file icon for a tab.
"file_icons": false,
@@ -1133,11 +1161,6 @@
// The minimum severity of the diagnostics to show inline.
// Inherits editor's diagnostics' max severity settings when `null`.
"max_severity": null
- },
- "cargo": {
- // When enabled, Zed disables rust-analyzer's check on save and starts to query
- // Cargo diagnostics separately.
- "fetch_cargo_diagnostics": false
}
},
// Files or globs of files that will be excluded by Zed entirely. They will be skipped during file
@@ -1503,6 +1526,11 @@
//
// Default: fallback
"words": "fallback",
+ // Minimum number of characters required to automatically trigger word-based completions.
+ // Before that value, it's still possible to trigger the words-based completion manually with the corresponding editor command.
+ //
+ // Default: 3
+ "words_min_length": 3,
// Whether to fetch LSP completions or not.
//
// Default: true
@@ -1575,7 +1603,7 @@
"ensure_final_newline_on_save": false
},
"Elixir": {
- "language_servers": ["elixir-ls", "!next-ls", "!lexical", "..."]
+ "language_servers": ["elixir-ls", "!expert", "!next-ls", "!lexical", "..."]
},
"Elm": {
"tab_size": 4
@@ -1600,7 +1628,7 @@
}
},
"HEEX": {
- "language_servers": ["elixir-ls", "!next-ls", "!lexical", "..."]
+ "language_servers": ["elixir-ls", "!expert", "!next-ls", "!lexical", "..."]
},
"HTML": {
"prettier": {
@@ -1629,6 +1657,9 @@
"allowed": true
}
},
+ "Kotlin": {
+ "language_servers": ["kotlin-language-server", "!kotlin-lsp", "..."]
+ },
"LaTeX": {
"formatter": "language_server",
"language_servers": ["texlab", "..."],
@@ -1642,9 +1673,6 @@
"use_on_type_format": false,
"allow_rewrap": "anywhere",
"soft_wrap": "editor_width",
- "completions": {
- "words": "disabled"
- },
"prettier": {
"allowed": true
}
@@ -1658,9 +1686,6 @@
}
},
"Plain Text": {
- "completions": {
- "words": "disabled"
- },
"allow_rewrap": "anywhere"
},
"Python": {
@@ -1751,7 +1776,7 @@
"api_url": "http://localhost:1234/api/v0"
},
"deepseek": {
- "api_url": "https://api.deepseek.com"
+ "api_url": "https://api.deepseek.com/v1"
},
"mistral": {
"api_url": "https://api.mistral.ai/v1"
@@ -1899,7 +1924,10 @@
"debugger": {
"stepping_granularity": "line",
"save_breakpoints": true,
+ "timeout": 2000,
"dock": "bottom",
+ "log_dap_communications": true,
+ "format_dap_log_messages": true,
"button": true
},
// Configures any number of settings profiles that are temporarily applied on
@@ -43,8 +43,8 @@
// "args": ["--login"]
// }
// }
- "shell": "system",
+ "shell": "system"
// Represents the tags for inline runnable indicators, or spawning multiple tasks at once.
- "tags": []
+ // "tags": []
}
]
@@ -93,7 +93,7 @@
"terminal.ansi.bright_cyan": "#4c806fff",
"terminal.ansi.dim_cyan": "#cbf2e4ff",
"terminal.ansi.white": "#bfbdb6ff",
- "terminal.ansi.bright_white": "#bfbdb6ff",
+ "terminal.ansi.bright_white": "#fafafaff",
"terminal.ansi.dim_white": "#787876ff",
"link_text.hover": "#5ac1feff",
"conflict": "#feb454ff",
@@ -479,7 +479,7 @@
"terminal.ansi.bright_cyan": "#ace0cbff",
"terminal.ansi.dim_cyan": "#2a5f4aff",
"terminal.ansi.white": "#fcfcfcff",
- "terminal.ansi.bright_white": "#fcfcfcff",
+ "terminal.ansi.bright_white": "#ffffffff",
"terminal.ansi.dim_white": "#bcbec0ff",
"link_text.hover": "#3b9ee5ff",
"conflict": "#f1ad49ff",
@@ -865,7 +865,7 @@
"terminal.ansi.bright_cyan": "#4c806fff",
"terminal.ansi.dim_cyan": "#cbf2e4ff",
"terminal.ansi.white": "#cccac2ff",
- "terminal.ansi.bright_white": "#cccac2ff",
+ "terminal.ansi.bright_white": "#fafafaff",
"terminal.ansi.dim_white": "#898a8aff",
"link_text.hover": "#72cffeff",
"conflict": "#fecf72ff",
@@ -94,7 +94,7 @@
"terminal.ansi.bright_cyan": "#45603eff",
"terminal.ansi.dim_cyan": "#c7dfbdff",
"terminal.ansi.white": "#fbf1c7ff",
- "terminal.ansi.bright_white": "#fbf1c7ff",
+ "terminal.ansi.bright_white": "#ffffffff",
"terminal.ansi.dim_white": "#b0a189ff",
"link_text.hover": "#83a598ff",
"version_control.added": "#b7bb26ff",
@@ -494,7 +494,7 @@
"terminal.ansi.bright_cyan": "#45603eff",
"terminal.ansi.dim_cyan": "#c7dfbdff",
"terminal.ansi.white": "#fbf1c7ff",
- "terminal.ansi.bright_white": "#fbf1c7ff",
+ "terminal.ansi.bright_white": "#ffffffff",
"terminal.ansi.dim_white": "#b0a189ff",
"link_text.hover": "#83a598ff",
"version_control.added": "#b7bb26ff",
@@ -894,7 +894,7 @@
"terminal.ansi.bright_cyan": "#45603eff",
"terminal.ansi.dim_cyan": "#c7dfbdff",
"terminal.ansi.white": "#fbf1c7ff",
- "terminal.ansi.bright_white": "#fbf1c7ff",
+ "terminal.ansi.bright_white": "#ffffffff",
"terminal.ansi.dim_white": "#b0a189ff",
"link_text.hover": "#83a598ff",
"version_control.added": "#b7bb26ff",
@@ -1294,7 +1294,7 @@
"terminal.ansi.bright_cyan": "#9fbca8ff",
"terminal.ansi.dim_cyan": "#253e2eff",
"terminal.ansi.white": "#fbf1c7ff",
- "terminal.ansi.bright_white": "#fbf1c7ff",
+ "terminal.ansi.bright_white": "#ffffffff",
"terminal.ansi.dim_white": "#b0a189ff",
"link_text.hover": "#0b6678ff",
"version_control.added": "#797410ff",
@@ -1694,7 +1694,7 @@
"terminal.ansi.bright_cyan": "#9fbca8ff",
"terminal.ansi.dim_cyan": "#253e2eff",
"terminal.ansi.white": "#f9f5d7ff",
- "terminal.ansi.bright_white": "#f9f5d7ff",
+ "terminal.ansi.bright_white": "#ffffffff",
"terminal.ansi.dim_white": "#b0a189ff",
"link_text.hover": "#0b6678ff",
"version_control.added": "#797410ff",
@@ -2094,7 +2094,7 @@
"terminal.ansi.bright_cyan": "#9fbca8ff",
"terminal.ansi.dim_cyan": "#253e2eff",
"terminal.ansi.white": "#f2e5bcff",
- "terminal.ansi.bright_white": "#f2e5bcff",
+ "terminal.ansi.bright_white": "#ffffffff",
"terminal.ansi.dim_white": "#b0a189ff",
"link_text.hover": "#0b6678ff",
"version_control.added": "#797410ff",
@@ -93,7 +93,7 @@
"terminal.ansi.bright_cyan": "#3a565bff",
"terminal.ansi.dim_cyan": "#b9d9dfff",
"terminal.ansi.white": "#dce0e5ff",
- "terminal.ansi.bright_white": "#dce0e5ff",
+ "terminal.ansi.bright_white": "#fafafaff",
"terminal.ansi.dim_white": "#575d65ff",
"link_text.hover": "#74ade8ff",
"version_control.added": "#27a657ff",
@@ -468,7 +468,7 @@
"terminal.bright_foreground": "#242529ff",
"terminal.dim_foreground": "#fafafaff",
"terminal.ansi.black": "#242529ff",
- "terminal.ansi.bright_black": "#242529ff",
+ "terminal.ansi.bright_black": "#747579ff",
"terminal.ansi.dim_black": "#97979aff",
"terminal.ansi.red": "#d36151ff",
"terminal.ansi.bright_red": "#f0b0a4ff",
@@ -489,7 +489,7 @@
"terminal.ansi.bright_cyan": "#a3bedaff",
"terminal.ansi.dim_cyan": "#254058ff",
"terminal.ansi.white": "#fafafaff",
- "terminal.ansi.bright_white": "#fafafaff",
+ "terminal.ansi.bright_white": "#ffffffff",
"terminal.ansi.dim_white": "#aaaaaaff",
"link_text.hover": "#5c78e2ff",
"version_control.added": "#27a657ff",
@@ -18,8 +18,8 @@ test-support = ["gpui/test-support", "project/test-support", "dep:parking_lot"]
[dependencies]
action_log.workspace = true
agent-client-protocol.workspace = true
-agent.workspace = true
anyhow.workspace = true
+agent_settings.workspace = true
buffer_diff.workspace = true
collections.workspace = true
editor.workspace = true
@@ -31,18 +31,21 @@ language.workspace = true
language_model.workspace = true
markdown.workspace = true
parking_lot = { workspace = true, optional = true }
+portable-pty.workspace = true
project.workspace = true
prompt_store.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
smol.workspace = true
+task.workspace = true
terminal.workspace = true
ui.workspace = true
url.workspace = true
util.workspace = true
uuid.workspace = true
watch.workspace = true
+which.workspace = true
workspace-hack.workspace = true
[dev-dependencies]
@@ -3,13 +3,20 @@ mod diff;
mod mention;
mod terminal;
+use agent_settings::AgentSettings;
+use collections::HashSet;
pub use connection::*;
pub use diff::*;
+use futures::future::Shared;
+use language::language_settings::FormatOnSave;
pub use mention::*;
+use project::lsp_store::{FormatTrigger, LspFormatTarget};
+use serde::{Deserialize, Serialize};
+use settings::Settings as _;
pub use terminal::*;
use action_log::ActionLog;
-use agent_client_protocol as acp;
+use agent_client_protocol::{self as acp};
use anyhow::{Context as _, Result, anyhow};
use editor::Bias;
use futures::{FutureExt, channel::oneshot, future::BoxFuture};
@@ -27,7 +34,8 @@ use std::rc::Rc;
use std::time::{Duration, Instant};
use std::{fmt::Display, mem, path::PathBuf, sync::Arc};
use ui::App;
-use util::ResultExt;
+use util::{ResultExt, get_system_shell};
+use uuid::Uuid;
#[derive(Debug)]
pub struct UserMessage {
@@ -177,38 +185,46 @@ impl ToolCall {
tool_call: acp::ToolCall,
status: ToolCallStatus,
language_registry: Arc<LanguageRegistry>,
+ terminals: &HashMap<acp::TerminalId, Entity<Terminal>>,
cx: &mut App,
- ) -> Self {
- Self {
+ ) -> Result<Self> {
+ let title = if let Some((first_line, _)) = tool_call.title.split_once("\n") {
+ first_line.to_owned() + "…"
+ } else {
+ tool_call.title
+ };
+ let mut content = Vec::with_capacity(tool_call.content.len());
+ for item in tool_call.content {
+ content.push(ToolCallContent::from_acp(
+ item,
+ language_registry.clone(),
+ terminals,
+ cx,
+ )?);
+ }
+
+ let result = Self {
id: tool_call.id,
- label: cx.new(|cx| {
- Markdown::new(
- tool_call.title.into(),
- Some(language_registry.clone()),
- None,
- cx,
- )
- }),
+ label: cx
+ .new(|cx| Markdown::new(title.into(), Some(language_registry.clone()), None, cx)),
kind: tool_call.kind,
- content: tool_call
- .content
- .into_iter()
- .map(|content| ToolCallContent::from_acp(content, language_registry.clone(), cx))
- .collect(),
+ content,
locations: tool_call.locations,
resolved_locations: Vec::default(),
status,
raw_input: tool_call.raw_input,
raw_output: tool_call.raw_output,
- }
+ };
+ Ok(result)
}
fn update_fields(
&mut self,
fields: acp::ToolCallUpdateFields,
language_registry: Arc<LanguageRegistry>,
+ terminals: &HashMap<acp::TerminalId, Entity<Terminal>>,
cx: &mut App,
- ) {
+ ) -> Result<()> {
let acp::ToolCallUpdateFields {
kind,
status,
@@ -229,15 +245,31 @@ impl ToolCall {
if let Some(title) = title {
self.label.update(cx, |label, cx| {
- label.replace(title, cx);
+ if let Some((first_line, _)) = title.split_once("\n") {
+ label.replace(first_line.to_owned() + "…", cx)
+ } else {
+ label.replace(title, cx);
+ }
});
}
if let Some(content) = content {
- self.content = content
- .into_iter()
- .map(|chunk| ToolCallContent::from_acp(chunk, language_registry.clone(), cx))
- .collect();
+ let new_content_len = content.len();
+ let mut content = content.into_iter();
+
+ // Reuse existing content if we can
+ for (old, new) in self.content.iter_mut().zip(content.by_ref()) {
+ old.update_from_acp(new, language_registry.clone(), terminals, cx)?;
+ }
+ for new in content {
+ self.content.push(ToolCallContent::from_acp(
+ new,
+ language_registry.clone(),
+ terminals,
+ cx,
+ )?)
+ }
+ self.content.truncate(new_content_len);
}
if let Some(locations) = locations {
@@ -259,6 +291,7 @@ impl ToolCall {
}
self.raw_output = Some(raw_output);
}
+ Ok(())
}
pub fn diffs(&self) -> impl Iterator<Item = &Entity<Diff>> {
@@ -297,11 +330,9 @@ impl ToolCall {
) -> Option<AgentLocation> {
let buffer = project
.update(cx, |project, cx| {
- if let Some(path) = project.project_path_for_absolute_path(&location.path, cx) {
- Some(project.open_buffer(path, cx))
- } else {
- None
- }
+ project
+ .project_path_for_absolute_path(&location.path, cx)
+ .map(|path| project.open_buffer(path, cx))
})
.ok()??;
let buffer = buffer.await.log_err()?;
@@ -467,7 +498,7 @@ impl ContentBlock {
fn block_string_contents(&self, block: acp::ContentBlock) -> String {
match block {
- acp::ContentBlock::Text(text_content) => text_content.text.clone(),
+ acp::ContentBlock::Text(text_content) => text_content.text,
acp::ContentBlock::ResourceLink(resource_link) => {
Self::resource_link_md(&resource_link.uri)
}
@@ -496,7 +527,7 @@ impl ContentBlock {
"`Image`".into()
}
- fn to_markdown<'a>(&'a self, cx: &'a App) -> &'a str {
+ pub fn to_markdown<'a>(&'a self, cx: &'a App) -> &'a str {
match self {
ContentBlock::Empty => "",
ContentBlock::Markdown { markdown } => markdown.read(cx).source(),
@@ -531,13 +562,16 @@ impl ToolCallContent {
pub fn from_acp(
content: acp::ToolCallContent,
language_registry: Arc<LanguageRegistry>,
+ terminals: &HashMap<acp::TerminalId, Entity<Terminal>>,
cx: &mut App,
- ) -> Self {
+ ) -> Result<Self> {
match content {
- acp::ToolCallContent::Content { content } => {
- Self::ContentBlock(ContentBlock::new(content, &language_registry, cx))
- }
- acp::ToolCallContent::Diff { diff } => Self::Diff(cx.new(|cx| {
+ acp::ToolCallContent::Content { content } => Ok(Self::ContentBlock(ContentBlock::new(
+ content,
+ &language_registry,
+ cx,
+ ))),
+ acp::ToolCallContent::Diff { diff } => Ok(Self::Diff(cx.new(|cx| {
Diff::finalized(
diff.path,
diff.old_text,
@@ -545,8 +579,37 @@ impl ToolCallContent {
language_registry,
cx,
)
- })),
+ }))),
+ acp::ToolCallContent::Terminal { terminal_id } => terminals
+ .get(&terminal_id)
+ .cloned()
+ .map(Self::Terminal)
+ .ok_or_else(|| anyhow::anyhow!("Terminal with id `{}` not found", terminal_id)),
+ }
+ }
+
+ pub fn update_from_acp(
+ &mut self,
+ new: acp::ToolCallContent,
+ language_registry: Arc<LanguageRegistry>,
+ terminals: &HashMap<acp::TerminalId, Entity<Terminal>>,
+ cx: &mut App,
+ ) -> Result<()> {
+ let needs_update = match (&self, &new) {
+ (Self::Diff(old_diff), acp::ToolCallContent::Diff { diff: new_diff }) => {
+ old_diff.read(cx).needs_update(
+ new_diff.old_text.as_deref().unwrap_or(""),
+ &new_diff.new_text,
+ cx,
+ )
+ }
+ _ => true,
+ };
+
+ if needs_update {
+ *self = Self::from_acp(new, language_registry, terminals, cx)?;
}
+ Ok(())
}
pub fn to_markdown(&self, cx: &App) -> String {
@@ -664,6 +727,43 @@ impl PlanEntry {
}
}
+#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
+pub struct TokenUsage {
+ pub max_tokens: u64,
+ pub used_tokens: u64,
+}
+
+impl TokenUsage {
+ pub fn ratio(&self) -> TokenUsageRatio {
+ #[cfg(debug_assertions)]
+ let warning_threshold: f32 = std::env::var("ZED_THREAD_WARNING_THRESHOLD")
+ .unwrap_or("0.8".to_string())
+ .parse()
+ .unwrap();
+ #[cfg(not(debug_assertions))]
+ let warning_threshold: f32 = 0.8;
+
+ // When the maximum is unknown because there is no selected model,
+ // avoid showing the token limit warning.
+ if self.max_tokens == 0 {
+ TokenUsageRatio::Normal
+ } else if self.used_tokens >= self.max_tokens {
+ TokenUsageRatio::Exceeded
+ } else if self.used_tokens as f32 / self.max_tokens as f32 >= warning_threshold {
+ TokenUsageRatio::Warning
+ } else {
+ TokenUsageRatio::Normal
+ }
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum TokenUsageRatio {
+ Normal,
+ Warning,
+ Exceeded,
+}
+
#[derive(Debug, Clone)]
pub struct RetryStatus {
pub last_error: SharedString,
@@ -683,24 +783,33 @@ pub struct AcpThread {
send_task: Option<Task<()>>,
connection: Rc<dyn AgentConnection>,
session_id: acp::SessionId,
+ token_usage: Option<TokenUsage>,
+ prompt_capabilities: acp::PromptCapabilities,
+ _observe_prompt_capabilities: Task<anyhow::Result<()>>,
+ determine_shell: Shared<Task<String>>,
+ terminals: HashMap<acp::TerminalId, Entity<Terminal>>,
}
#[derive(Debug)]
pub enum AcpThreadEvent {
NewEntry,
TitleUpdated,
+ TokenUsageUpdated,
EntryUpdated(usize),
EntriesRemoved(Range<usize>),
ToolAuthorizationRequired,
Retry(RetryStatus),
Stopped,
Error,
- ServerExited(ExitStatus),
+ LoadError(LoadError),
+ PromptCapabilitiesUpdated,
+ Refusal,
+ AvailableCommandsUpdated(Vec<acp::AvailableCommand>),
}
impl EventEmitter<AcpThreadEvent> for AcpThread {}
-#[derive(PartialEq, Eq)]
+#[derive(PartialEq, Eq, Debug)]
pub enum ThreadStatus {
Idle,
WaitingForToolConfirmation,
@@ -710,20 +819,33 @@ pub enum ThreadStatus {
#[derive(Debug, Clone)]
pub enum LoadError {
Unsupported {
- error_message: SharedString,
- upgrade_message: SharedString,
- upgrade_command: String,
+ command: SharedString,
+ current_version: SharedString,
+ minimum_version: SharedString,
+ },
+ FailedToInstall(SharedString),
+ Exited {
+ status: ExitStatus,
},
- Exited(i32),
Other(SharedString),
}
impl Display for LoadError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
- LoadError::Unsupported { error_message, .. } => write!(f, "{}", error_message),
- LoadError::Exited(status) => write!(f, "Server exited with status {}", status),
- LoadError::Other(msg) => write!(f, "{}", msg),
+ LoadError::Unsupported {
+ command: path,
+ current_version,
+ minimum_version,
+ } => {
+ write!(
+ f,
+ "version {current_version} from {path} is not supported (need at least {minimum_version})"
+ )
+ }
+ LoadError::FailedToInstall(msg) => write!(f, "Failed to install: {msg}"),
+ LoadError::Exited { status } => write!(f, "Server exited with status {status}"),
+ LoadError::Other(msg) => write!(f, "{msg}"),
}
}
}
@@ -737,7 +859,34 @@ impl AcpThread {
project: Entity<Project>,
action_log: Entity<ActionLog>,
session_id: acp::SessionId,
+ mut prompt_capabilities_rx: watch::Receiver<acp::PromptCapabilities>,
+ cx: &mut Context<Self>,
) -> Self {
+ let prompt_capabilities = *prompt_capabilities_rx.borrow();
+ let task = cx.spawn::<_, anyhow::Result<()>>(async move |this, cx| {
+ loop {
+ let caps = prompt_capabilities_rx.recv().await?;
+ this.update(cx, |this, cx| {
+ this.prompt_capabilities = caps;
+ cx.emit(AcpThreadEvent::PromptCapabilitiesUpdated);
+ })?;
+ }
+ });
+
+ let determine_shell = cx
+ .background_spawn(async move {
+ if cfg!(windows) {
+ return get_system_shell();
+ }
+
+ if which::which("bash").is_ok() {
+ "bash".into()
+ } else {
+ get_system_shell()
+ }
+ })
+ .shared();
+
Self {
action_log,
shared_buffers: Default::default(),
@@ -748,9 +897,18 @@ impl AcpThread {
send_task: None,
connection,
session_id,
+ token_usage: None,
+ prompt_capabilities,
+ _observe_prompt_capabilities: task,
+ terminals: HashMap::default(),
+ determine_shell,
}
}
+ pub fn prompt_capabilities(&self) -> acp::PromptCapabilities {
+ self.prompt_capabilities
+ }
+
pub fn connection(&self) -> &Rc<dyn AgentConnection> {
&self.connection
}
@@ -787,6 +945,10 @@ impl AcpThread {
}
}
+ pub fn token_usage(&self) -> Option<&TokenUsage> {
+ self.token_usage.as_ref()
+ }
+
pub fn has_pending_edit_tool_calls(&self) -> bool {
for entry in self.entries.iter().rev() {
match entry {
@@ -842,6 +1004,9 @@ impl AcpThread {
acp::SessionUpdate::Plan(plan) => {
self.update_plan(plan, cx);
}
+ acp::SessionUpdate::AvailableCommandsUpdate { available_commands } => {
+ cx.emit(AcpThreadEvent::AvailableCommandsUpdated(available_commands))
+ }
}
Ok(())
}
@@ -931,10 +1096,24 @@ impl AcpThread {
cx.emit(AcpThreadEvent::NewEntry);
}
- pub fn update_title(&mut self, title: SharedString, cx: &mut Context<Self>) -> Result<()> {
- self.title = title;
- cx.emit(AcpThreadEvent::TitleUpdated);
- Ok(())
+ pub fn can_set_title(&mut self, cx: &mut Context<Self>) -> bool {
+ self.connection.set_title(&self.session_id, cx).is_some()
+ }
+
+ pub fn set_title(&mut self, title: SharedString, cx: &mut Context<Self>) -> Task<Result<()>> {
+ if title != self.title {
+ self.title = title.clone();
+ cx.emit(AcpThreadEvent::TitleUpdated);
+ if let Some(set_title) = self.connection.set_title(&self.session_id, cx) {
+ return set_title.run(title, cx);
+ }
+ }
+ Task::ready(Ok(()))
+ }
+
+ pub fn update_token_usage(&mut self, usage: Option<TokenUsage>, cx: &mut Context<Self>) {
+ self.token_usage = usage;
+ cx.emit(AcpThreadEvent::TokenUsageUpdated);
}
pub fn update_retry_status(&mut self, status: RetryStatus, cx: &mut Context<Self>) {
@@ -949,27 +1128,28 @@ impl AcpThread {
let update = update.into();
let languages = self.project.read(cx).languages().clone();
- let (ix, current_call) = self
- .tool_call_mut(update.id())
+ let ix = self
+ .index_for_tool_call(update.id())
.context("Tool call not found")?;
+ let AgentThreadEntry::ToolCall(call) = &mut self.entries[ix] else {
+ unreachable!()
+ };
+
match update {
ToolCallUpdate::UpdateFields(update) => {
let location_updated = update.fields.locations.is_some();
- current_call.update_fields(update.fields, languages, cx);
+ call.update_fields(update.fields, languages, &self.terminals, cx)?;
if location_updated {
- self.resolve_locations(update.id.clone(), cx);
+ self.resolve_locations(update.id, cx);
}
}
ToolCallUpdate::UpdateDiff(update) => {
- current_call.content.clear();
- current_call
- .content
- .push(ToolCallContent::Diff(update.diff));
+ call.content.clear();
+ call.content.push(ToolCallContent::Diff(update.diff));
}
ToolCallUpdate::UpdateTerminal(update) => {
- current_call.content.clear();
- current_call
- .content
+ call.content.clear();
+ call.content
.push(ToolCallContent::Terminal(update.terminal));
}
}
@@ -992,21 +1172,30 @@ impl AcpThread {
/// Fails if id does not match an existing entry.
pub fn upsert_tool_call_inner(
&mut self,
- tool_call_update: acp::ToolCallUpdate,
+ update: acp::ToolCallUpdate,
status: ToolCallStatus,
cx: &mut Context<Self>,
) -> Result<(), acp::Error> {
let language_registry = self.project.read(cx).languages().clone();
- let id = tool_call_update.id.clone();
+ let id = update.id.clone();
+
+ if let Some(ix) = self.index_for_tool_call(&id) {
+ let AgentThreadEntry::ToolCall(call) = &mut self.entries[ix] else {
+ unreachable!()
+ };
- if let Some((ix, current_call)) = self.tool_call_mut(&id) {
- current_call.update_fields(tool_call_update.fields, language_registry, cx);
- current_call.status = status;
+ call.update_fields(update.fields, language_registry, &self.terminals, cx)?;
+ call.status = status;
cx.emit(AcpThreadEvent::EntryUpdated(ix));
} else {
- let call =
- ToolCall::from_acp(tool_call_update.try_into()?, status, language_registry, cx);
+ let call = ToolCall::from_acp(
+ update.try_into()?,
+ status,
+ language_registry,
+ &self.terminals,
+ cx,
+ )?;
self.push_entry(AgentThreadEntry::ToolCall(call), cx);
};
@@ -1014,6 +1203,22 @@ impl AcpThread {
Ok(())
}
+ fn index_for_tool_call(&self, id: &acp::ToolCallId) -> Option<usize> {
+ self.entries
+ .iter()
+ .enumerate()
+ .rev()
+ .find_map(|(index, entry)| {
+ if let AgentThreadEntry::ToolCall(tool_call) = entry
+ && &tool_call.id == id
+ {
+ Some(index)
+ } else {
+ None
+ }
+ })
+ }
+
fn tool_call_mut(&mut self, id: &acp::ToolCallId) -> Option<(usize, &mut ToolCall)> {
// The tool call we are looking for is typically the last one, or very close to the end.
// At the moment, it doesn't seem like a hashmap would be a good fit for this use case.
@@ -1032,6 +1237,22 @@ impl AcpThread {
})
}
+ pub fn tool_call(&mut self, id: &acp::ToolCallId) -> Option<(usize, &ToolCall)> {
+ self.entries
+ .iter()
+ .enumerate()
+ .rev()
+ .find_map(|(index, tool_call)| {
+ if let AgentThreadEntry::ToolCall(tool_call) = tool_call
+ && &tool_call.id == id
+ {
+ Some((index, tool_call))
+ } else {
+ None
+ }
+ })
+ }
+
pub fn resolve_locations(&mut self, id: acp::ToolCallId, cx: &mut Context<Self>) {
let project = self.project.clone();
let Some((_, tool_call)) = self.tool_call_mut(&id) else {
@@ -1083,9 +1304,29 @@ impl AcpThread {
tool_call: acp::ToolCallUpdate,
options: Vec<acp::PermissionOption>,
cx: &mut Context<Self>,
- ) -> Result<oneshot::Receiver<acp::PermissionOptionId>, acp::Error> {
+ ) -> Result<BoxFuture<'static, acp::RequestPermissionOutcome>> {
let (tx, rx) = oneshot::channel();
+ if AgentSettings::get_global(cx).always_allow_tool_actions {
+ // Don't use AllowAlways, because then if you were to turn off always_allow_tool_actions,
+ // some tools would (incorrectly) continue to auto-accept.
+ if let Some(allow_once_option) = options.iter().find_map(|option| {
+ if matches!(option.kind, acp::PermissionOptionKind::AllowOnce) {
+ Some(option.id.clone())
+ } else {
+ None
+ }
+ }) {
+ self.upsert_tool_call_inner(tool_call, ToolCallStatus::Pending, cx)?;
+ return Ok(async {
+ acp::RequestPermissionOutcome::Selected {
+ option_id: allow_once_option,
+ }
+ }
+ .boxed());
+ }
+ }
+
let status = ToolCallStatus::WaitingForConfirmation {
options,
respond_tx: tx,
@@ -1093,7 +1334,16 @@ impl AcpThread {
self.upsert_tool_call_inner(tool_call, status, cx)?;
cx.emit(AcpThreadEvent::ToolAuthorizationRequired);
- Ok(rx)
+
+ let fut = async {
+ match rx.await {
+ Ok(option) => acp::RequestPermissionOutcome::Selected { option_id: option },
+ Err(oneshot::Canceled) => acp::RequestPermissionOutcome::Cancelled,
+ }
+ }
+ .boxed();
+
+ Ok(fut)
}
pub fn authorize_tool_call(
@@ -1216,11 +1466,7 @@ impl AcpThread {
};
let git_store = self.project.read(cx).git_store().clone();
- let message_id = if self
- .connection
- .session_editor(&self.session_id, cx)
- .is_some()
- {
+ let message_id = if self.connection.truncate(&self.session_id, cx).is_some() {
Some(UserMessageId::new())
} else {
None
@@ -1258,6 +1504,10 @@ impl AcpThread {
})
}
+ pub fn can_resume(&self, cx: &App) -> bool {
+ self.connection.resume(&self.session_id, cx).is_some()
+ }
+
pub fn resume(&mut self, cx: &mut Context<Self>) -> BoxFuture<'static, Result<()>> {
self.run_turn(cx, async move |this, cx| {
this.update(cx, |this, cx| {
@@ -1304,7 +1554,7 @@ impl AcpThread {
let canceled = matches!(
result,
Ok(Ok(acp::PromptResponse {
- stop_reason: acp::StopReason::Canceled
+ stop_reason: acp::StopReason::Cancelled
}))
);
@@ -1317,6 +1567,44 @@ impl AcpThread {
this.send_task.take();
}
+ // Handle refusal - distinguish between user prompt and tool call refusals
+ if let Ok(Ok(acp::PromptResponse {
+ stop_reason: acp::StopReason::Refusal,
+ })) = result
+ {
+ if let Some((user_msg_ix, _)) = this.last_user_message() {
+ // Check if there's a completed tool call with results after the last user message
+ // This indicates the refusal is in response to tool output, not the user's prompt
+ let has_completed_tool_call_after_user_msg =
+ this.entries.iter().skip(user_msg_ix + 1).any(|entry| {
+ if let AgentThreadEntry::ToolCall(tool_call) = entry {
+ // Check if the tool call has completed and has output
+ matches!(tool_call.status, ToolCallStatus::Completed)
+ && tool_call.raw_output.is_some()
+ } else {
+ false
+ }
+ });
+
+ if has_completed_tool_call_after_user_msg {
+ // Refusal is due to tool output - don't truncate, just notify
+ // The model refused based on what the tool returned
+ cx.emit(AcpThreadEvent::Refusal);
+ } else {
+ // User prompt was refused - truncate back to before the user message
+ let range = user_msg_ix..this.entries.len();
+ if range.start < range.end {
+ this.entries.truncate(user_msg_ix);
+ cx.emit(AcpThreadEvent::EntriesRemoved(range));
+ }
+ cx.emit(AcpThreadEvent::Refusal);
+ }
+ } else {
+ // No user message found, treat as general refusal
+ cx.emit(AcpThreadEvent::Refusal);
+ }
+ }
+
cx.emit(AcpThreadEvent::Stopped);
Ok(())
}
@@ -1355,7 +1643,7 @@ impl AcpThread {
/// Rewinds this thread to before the entry at `index`, removing it and all
/// subsequent entries while reverting any changes made from that point.
pub fn rewind(&mut self, id: UserMessageId, cx: &mut Context<Self>) -> Task<Result<()>> {
- let Some(session_editor) = self.connection.session_editor(&self.session_id, cx) else {
+ let Some(truncate) = self.connection.truncate(&self.session_id, cx) else {
return Task::ready(Err(anyhow!("not supported")));
};
let Some(message) = self.user_message(&id) else {
@@ -1375,8 +1663,7 @@ impl AcpThread {
.await?;
}
- cx.update(|cx| session_editor.truncate(id.clone(), cx))?
- .await?;
+ cx.update(|cx| truncate.run(id.clone(), cx))?.await?;
this.update(cx, |this, cx| {
if let Some((ix, _)) = this.user_message_mut(&id) {
let range = ix..this.entries.len();
@@ -1582,42 +1869,198 @@ impl AcpThread {
.collect::<Vec<_>>()
})
.await;
- cx.update(|cx| {
- project.update(cx, |project, cx| {
- project.set_agent_location(
- Some(AgentLocation {
- buffer: buffer.downgrade(),
- position: edits
- .last()
- .map(|(range, _)| range.end)
- .unwrap_or(Anchor::MIN),
- }),
- cx,
- );
- });
+ project.update(cx, |project, cx| {
+ project.set_agent_location(
+ Some(AgentLocation {
+ buffer: buffer.downgrade(),
+ position: edits
+ .last()
+ .map(|(range, _)| range.end)
+ .unwrap_or(Anchor::MIN),
+ }),
+ cx,
+ );
+ })?;
+
+ let format_on_save = cx.update(|cx| {
action_log.update(cx, |action_log, cx| {
action_log.buffer_read(buffer.clone(), cx);
});
- buffer.update(cx, |buffer, cx| {
+
+ let format_on_save = buffer.update(cx, |buffer, cx| {
buffer.edit(edits, None, cx);
+
+ let settings = language::language_settings::language_settings(
+ buffer.language().map(|l| l.name()),
+ buffer.file(),
+ cx,
+ );
+
+ settings.format_on_save != FormatOnSave::Off
});
action_log.update(cx, |action_log, cx| {
action_log.buffer_edited(buffer.clone(), cx);
});
+ format_on_save
})?;
+
+ if format_on_save {
+ let format_task = project.update(cx, |project, cx| {
+ project.format(
+ HashSet::from_iter([buffer.clone()]),
+ LspFormatTarget::Buffers,
+ false,
+ FormatTrigger::Save,
+ cx,
+ )
+ })?;
+ format_task.await.log_err();
+
+ action_log.update(cx, |action_log, cx| {
+ action_log.buffer_edited(buffer.clone(), cx);
+ })?;
+ }
+
project
.update(cx, |project, cx| project.save_buffer(buffer, cx))?
.await
})
}
+ pub fn create_terminal(
+ &self,
+ mut command: String,
+ args: Vec<String>,
+ extra_env: Vec<acp::EnvVariable>,
+ cwd: Option<PathBuf>,
+ output_byte_limit: Option<u64>,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Entity<Terminal>>> {
+ for arg in args {
+ command.push(' ');
+ command.push_str(&arg);
+ }
+
+ let shell_command = if cfg!(windows) {
+ format!("$null | & {{{}}}", command.replace("\"", "'"))
+ } else if let Some(cwd) = cwd.as_ref().and_then(|cwd| cwd.as_os_str().to_str()) {
+ // Make sure once we're *inside* the shell, we cd into `cwd`
+ format!("(cd {cwd}; {}) </dev/null", command)
+ } else {
+ format!("({}) </dev/null", command)
+ };
+ let args = vec!["-c".into(), shell_command];
+
+ let env = match &cwd {
+ Some(dir) => self.project.update(cx, |project, cx| {
+ project.directory_environment(dir.as_path().into(), cx)
+ }),
+ None => Task::ready(None).shared(),
+ };
+
+ let env = cx.spawn(async move |_, _| {
+ let mut env = env.await.unwrap_or_default();
+ if cfg!(unix) {
+ env.insert("PAGER".into(), "cat".into());
+ }
+ for var in extra_env {
+ env.insert(var.name, var.value);
+ }
+ env
+ });
+
+ let project = self.project.clone();
+ let language_registry = project.read(cx).languages().clone();
+ let determine_shell = self.determine_shell.clone();
+
+ let terminal_id = acp::TerminalId(Uuid::new_v4().to_string().into());
+ let terminal_task = cx.spawn({
+ let terminal_id = terminal_id.clone();
+ async move |_this, cx| {
+ let program = determine_shell.await;
+ let env = env.await;
+ let terminal = project
+ .update(cx, |project, cx| {
+ project.create_terminal_task(
+ task::SpawnInTerminal {
+ command: Some(program),
+ args,
+ cwd: cwd.clone(),
+ env,
+ ..Default::default()
+ },
+ cx,
+ )
+ })?
+ .await?;
+
+ cx.new(|cx| {
+ Terminal::new(
+ terminal_id,
+ command,
+ cwd,
+ output_byte_limit.map(|l| l as usize),
+ terminal,
+ language_registry,
+ cx,
+ )
+ })
+ }
+ });
+
+ cx.spawn(async move |this, cx| {
+ let terminal = terminal_task.await?;
+ this.update(cx, |this, _cx| {
+ this.terminals.insert(terminal_id, terminal.clone());
+ terminal
+ })
+ })
+ }
+
+ pub fn kill_terminal(
+ &mut self,
+ terminal_id: acp::TerminalId,
+ cx: &mut Context<Self>,
+ ) -> Result<()> {
+ self.terminals
+ .get(&terminal_id)
+ .context("Terminal not found")?
+ .update(cx, |terminal, cx| {
+ terminal.kill(cx);
+ });
+
+ Ok(())
+ }
+
+ pub fn release_terminal(
+ &mut self,
+ terminal_id: acp::TerminalId,
+ cx: &mut Context<Self>,
+ ) -> Result<()> {
+ self.terminals
+ .remove(&terminal_id)
+ .context("Terminal not found")?
+ .update(cx, |terminal, cx| {
+ terminal.kill(cx);
+ });
+
+ Ok(())
+ }
+
+ pub fn terminal(&self, terminal_id: acp::TerminalId) -> Result<Entity<Terminal>> {
+ self.terminals
+ .get(&terminal_id)
+ .context("Terminal not found")
+ .cloned()
+ }
+
pub fn to_markdown(&self, cx: &App) -> String {
self.entries.iter().map(|e| e.to_markdown(cx)).collect()
}
- pub fn emit_server_exited(&mut self, status: ExitStatus, cx: &mut Context<Self>) {
- cx.emit(AcpThreadEvent::ServerExited(status));
+ pub fn emit_load_error(&mut self, error: LoadError, cx: &mut Context<Self>) {
+ cx.emit(AcpThreadEvent::LoadError(error));
}
}
@@ -1671,7 +2114,7 @@ mod tests {
use gpui::{App, AsyncApp, TestAppContext, WeakEntity};
use indoc::indoc;
use project::{FakeFs, Fs};
- use rand::Rng as _;
+ use rand::{distr, prelude::*};
use serde_json::json;
use settings::SettingsStore;
use smol::stream::StreamExt as _;
@@ -2263,6 +2706,273 @@ mod tests {
assert_eq!(fs.files(), vec![Path::new(path!("/test/file-0"))]);
}
+ #[gpui::test]
+ async fn test_tool_result_refusal(cx: &mut TestAppContext) {
+ use std::sync::atomic::AtomicUsize;
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ let project = Project::test(fs, None, cx).await;
+
+ // Create a connection that simulates refusal after tool result
+ let prompt_count = Arc::new(AtomicUsize::new(0));
+ let connection = Rc::new(FakeAgentConnection::new().on_user_message({
+ let prompt_count = prompt_count.clone();
+ move |_request, thread, mut cx| {
+ let count = prompt_count.fetch_add(1, SeqCst);
+ async move {
+ if count == 0 {
+ // First prompt: Generate a tool call with result
+ thread.update(&mut cx, |thread, cx| {
+ thread
+ .handle_session_update(
+ acp::SessionUpdate::ToolCall(acp::ToolCall {
+ id: acp::ToolCallId("tool1".into()),
+ title: "Test Tool".into(),
+ kind: acp::ToolKind::Fetch,
+ status: acp::ToolCallStatus::Completed,
+ content: vec![],
+ locations: vec![],
+ raw_input: Some(serde_json::json!({"query": "test"})),
+ raw_output: Some(
+ serde_json::json!({"result": "inappropriate content"}),
+ ),
+ }),
+ cx,
+ )
+ .unwrap();
+ })?;
+
+ // Now return refusal because of the tool result
+ Ok(acp::PromptResponse {
+ stop_reason: acp::StopReason::Refusal,
+ })
+ } else {
+ Ok(acp::PromptResponse {
+ stop_reason: acp::StopReason::EndTurn,
+ })
+ }
+ }
+ .boxed_local()
+ }
+ }));
+
+ let thread = cx
+ .update(|cx| connection.new_thread(project, Path::new(path!("/test")), cx))
+ .await
+ .unwrap();
+
+ // Track if we see a Refusal event
+ let saw_refusal_event = Arc::new(std::sync::Mutex::new(false));
+ let saw_refusal_event_captured = saw_refusal_event.clone();
+ thread.update(cx, |_thread, cx| {
+ cx.subscribe(
+ &thread,
+ move |_thread, _event_thread, event: &AcpThreadEvent, _cx| {
+ if matches!(event, AcpThreadEvent::Refusal) {
+ *saw_refusal_event_captured.lock().unwrap() = true;
+ }
+ },
+ )
+ .detach();
+ });
+
+ // Send a user message - this will trigger tool call and then refusal
+ let send_task = thread.update(cx, |thread, cx| {
+ thread.send(
+ vec![acp::ContentBlock::Text(acp::TextContent {
+ text: "Hello".into(),
+ annotations: None,
+ })],
+ cx,
+ )
+ });
+ cx.background_executor.spawn(send_task).detach();
+ cx.run_until_parked();
+
+ // Verify that:
+ // 1. A Refusal event WAS emitted (because it's a tool result refusal, not user prompt)
+ // 2. The user message was NOT truncated
+ assert!(
+ *saw_refusal_event.lock().unwrap(),
+ "Refusal event should be emitted for tool result refusals"
+ );
+
+ thread.read_with(cx, |thread, _| {
+ let entries = thread.entries();
+ assert!(entries.len() >= 2, "Should have user message and tool call");
+
+ // Verify user message is still there
+ assert!(
+ matches!(entries[0], AgentThreadEntry::UserMessage(_)),
+ "User message should not be truncated"
+ );
+
+ // Verify tool call is there with result
+ if let AgentThreadEntry::ToolCall(tool_call) = &entries[1] {
+ assert!(
+ tool_call.raw_output.is_some(),
+ "Tool call should have output"
+ );
+ } else {
+ panic!("Expected tool call at index 1");
+ }
+ });
+ }
+
+ #[gpui::test]
+ async fn test_user_prompt_refusal_emits_event(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ let project = Project::test(fs, None, cx).await;
+
+ let refuse_next = Arc::new(AtomicBool::new(false));
+ let connection = Rc::new(FakeAgentConnection::new().on_user_message({
+ let refuse_next = refuse_next.clone();
+ move |_request, _thread, _cx| {
+ if refuse_next.load(SeqCst) {
+ async move {
+ Ok(acp::PromptResponse {
+ stop_reason: acp::StopReason::Refusal,
+ })
+ }
+ .boxed_local()
+ } else {
+ async move {
+ Ok(acp::PromptResponse {
+ stop_reason: acp::StopReason::EndTurn,
+ })
+ }
+ .boxed_local()
+ }
+ }
+ }));
+
+ let thread = cx
+ .update(|cx| connection.new_thread(project, Path::new(path!("/test")), cx))
+ .await
+ .unwrap();
+
+ // Track if we see a Refusal event
+ let saw_refusal_event = Arc::new(std::sync::Mutex::new(false));
+ let saw_refusal_event_captured = saw_refusal_event.clone();
+ thread.update(cx, |_thread, cx| {
+ cx.subscribe(
+ &thread,
+ move |_thread, _event_thread, event: &AcpThreadEvent, _cx| {
+ if matches!(event, AcpThreadEvent::Refusal) {
+ *saw_refusal_event_captured.lock().unwrap() = true;
+ }
+ },
+ )
+ .detach();
+ });
+
+ // Send a message that will be refused
+ refuse_next.store(true, SeqCst);
+ cx.update(|cx| thread.update(cx, |thread, cx| thread.send(vec!["hello".into()], cx)))
+ .await
+ .unwrap();
+
+ // Verify that a Refusal event WAS emitted for user prompt refusal
+ assert!(
+ *saw_refusal_event.lock().unwrap(),
+ "Refusal event should be emitted for user prompt refusals"
+ );
+
+ // Verify the message was truncated (user prompt refusal)
+ thread.read_with(cx, |thread, cx| {
+ assert_eq!(thread.to_markdown(cx), "");
+ });
+ }
+
+ #[gpui::test]
+ async fn test_refusal(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(path!("/"), json!({})).await;
+ let project = Project::test(fs.clone(), [path!("/").as_ref()], cx).await;
+
+ let refuse_next = Arc::new(AtomicBool::new(false));
+ let connection = Rc::new(FakeAgentConnection::new().on_user_message({
+ let refuse_next = refuse_next.clone();
+ move |request, thread, mut cx| {
+ let refuse_next = refuse_next.clone();
+ async move {
+ if refuse_next.load(SeqCst) {
+ return Ok(acp::PromptResponse {
+ stop_reason: acp::StopReason::Refusal,
+ });
+ }
+
+ let acp::ContentBlock::Text(content) = &request.prompt[0] else {
+ panic!("expected text content block");
+ };
+ thread.update(&mut cx, |thread, cx| {
+ thread
+ .handle_session_update(
+ acp::SessionUpdate::AgentMessageChunk {
+ content: content.text.to_uppercase().into(),
+ },
+ cx,
+ )
+ .unwrap();
+ })?;
+ Ok(acp::PromptResponse {
+ stop_reason: acp::StopReason::EndTurn,
+ })
+ }
+ .boxed_local()
+ }
+ }));
+ let thread = cx
+ .update(|cx| connection.new_thread(project, Path::new(path!("/test")), cx))
+ .await
+ .unwrap();
+
+ cx.update(|cx| thread.update(cx, |thread, cx| thread.send(vec!["hello".into()], cx)))
+ .await
+ .unwrap();
+ thread.read_with(cx, |thread, cx| {
+ assert_eq!(
+ thread.to_markdown(cx),
+ indoc! {"
+ ## User
+
+ hello
+
+ ## Assistant
+
+ HELLO
+
+ "}
+ );
+ });
+
+ // Simulate refusing the second message. The message should be truncated
+ // when a user prompt is refused.
+ refuse_next.store(true, SeqCst);
+ cx.update(|cx| thread.update(cx, |thread, cx| thread.send(vec!["world".into()], cx)))
+ .await
+ .unwrap();
+ thread.read_with(cx, |thread, cx| {
+ assert_eq!(
+ thread.to_markdown(cx),
+ indoc! {"
+ ## User
+
+ hello
+
+ ## Assistant
+
+ HELLO
+
+ "}
+ );
+ });
+ }
+
async fn run_until_first_tool_call(
thread: &Entity<AcpThread>,
cx: &mut TestAppContext,
@@ -10,7 +10,7 @@ use std::{any::Any, error::Error, fmt, path::Path, rc::Rc, sync::Arc};
use ui::{App, IconName};
use uuid::Uuid;
-#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
+#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)]
pub struct UserMessageId(Arc<str>);
impl UserMessageId {
@@ -41,18 +41,26 @@ pub trait AgentConnection {
fn resume(
&self,
_session_id: &acp::SessionId,
- _cx: &mut App,
+ _cx: &App,
) -> Option<Rc<dyn AgentSessionResume>> {
None
}
fn cancel(&self, session_id: &acp::SessionId, cx: &mut App);
- fn session_editor(
+ fn truncate(
&self,
_session_id: &acp::SessionId,
- _cx: &mut App,
- ) -> Option<Rc<dyn AgentSessionEditor>> {
+ _cx: &App,
+ ) -> Option<Rc<dyn AgentSessionTruncate>> {
+ None
+ }
+
+ fn set_title(
+ &self,
+ _session_id: &acp::SessionId,
+ _cx: &App,
+ ) -> Option<Rc<dyn AgentSessionSetTitle>> {
None
}
@@ -64,6 +72,9 @@ pub trait AgentConnection {
None
}
+ fn telemetry(&self) -> Option<Rc<dyn AgentTelemetry>> {
+ None
+ }
fn into_any(self: Rc<Self>) -> Rc<dyn Any>;
}
@@ -73,14 +84,31 @@ impl dyn AgentConnection {
}
}
-pub trait AgentSessionEditor {
- fn truncate(&self, message_id: UserMessageId, cx: &mut App) -> Task<Result<()>>;
+pub trait AgentSessionTruncate {
+ fn run(&self, message_id: UserMessageId, cx: &mut App) -> Task<Result<()>>;
}
pub trait AgentSessionResume {
fn run(&self, cx: &mut App) -> Task<Result<acp::PromptResponse>>;
}
+pub trait AgentSessionSetTitle {
+ fn run(&self, title: SharedString, cx: &mut App) -> Task<Result<()>>;
+}
+
+pub trait AgentTelemetry {
+ /// The name of the agent used for telemetry.
+ fn agent_name(&self) -> String;
+
+ /// A representation of the current thread state that can be serialized for
+ /// storage with telemetry events.
+ fn thread_data(
+ &self,
+ session_id: &acp::SessionId,
+ cx: &mut App,
+ ) -> Task<Result<serde_json::Value>>;
+}
+
#[derive(Debug)]
pub struct AuthRequired {
pub description: Option<String>,
@@ -298,13 +326,19 @@ mod test_support {
) -> Task<gpui::Result<Entity<AcpThread>>> {
let session_id = acp::SessionId(self.sessions.lock().len().to_string().into());
let action_log = cx.new(|_| ActionLog::new(project.clone()));
- let thread = cx.new(|_cx| {
+ let thread = cx.new(|cx| {
AcpThread::new(
"Test",
self.clone(),
project,
action_log,
session_id.clone(),
+ watch::Receiver::constant(acp::PromptCapabilities {
+ image: true,
+ audio: true,
+ embedded_context: true,
+ }),
+ cx,
)
});
self.sessions.lock().insert(
@@ -358,14 +392,15 @@ mod test_support {
};
let task = cx.spawn(async move |cx| {
if let Some((tool_call, options)) = permission_request {
- let permission = thread.update(cx, |thread, cx| {
- thread.request_tool_call_authorization(
- tool_call.clone().into(),
- options.clone(),
- cx,
- )
- })?;
- permission?.await?;
+ thread
+ .update(cx, |thread, cx| {
+ thread.request_tool_call_authorization(
+ tool_call.clone().into(),
+ options.clone(),
+ cx,
+ )
+ })??
+ .await;
}
thread.update(cx, |thread, cx| {
thread.handle_session_update(update.clone(), cx).unwrap();
@@ -393,15 +428,15 @@ mod test_support {
.response_tx
.take()
{
- end_turn_tx.send(acp::StopReason::Canceled).unwrap();
+ end_turn_tx.send(acp::StopReason::Cancelled).unwrap();
}
}
- fn session_editor(
+ fn truncate(
&self,
_session_id: &agent_client_protocol::SessionId,
- _cx: &mut App,
- ) -> Option<Rc<dyn AgentSessionEditor>> {
+ _cx: &App,
+ ) -> Option<Rc<dyn AgentSessionTruncate>> {
Some(Rc::new(StubAgentSessionEditor))
}
@@ -412,8 +447,8 @@ mod test_support {
struct StubAgentSessionEditor;
- impl AgentSessionEditor for StubAgentSessionEditor {
- fn truncate(&self, _: UserMessageId, _: &mut App) -> Task<Result<()>> {
+ impl AgentSessionTruncate for StubAgentSessionEditor {
+ fn run(&self, _: UserMessageId, _: &mut App) -> Task<Result<()>> {
Task::ready(Ok(()))
}
}
@@ -1,6 +1,6 @@
use anyhow::Result;
use buffer_diff::{BufferDiff, BufferDiffSnapshot};
-use editor::{MultiBuffer, PathKey};
+use editor::{MultiBuffer, PathKey, multibuffer_context_lines};
use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task};
use itertools::Itertools;
use language::{
@@ -28,57 +28,46 @@ impl Diff {
cx: &mut Context<Self>,
) -> Self {
let multibuffer = cx.new(|_cx| MultiBuffer::without_headers(Capability::ReadOnly));
-
let new_buffer = cx.new(|cx| Buffer::local(new_text, cx));
- let old_buffer = cx.new(|cx| Buffer::local(old_text.unwrap_or("".into()), cx));
- let new_buffer_snapshot = new_buffer.read(cx).text_snapshot();
- let buffer_diff = cx.new(|cx| BufferDiff::new(&new_buffer_snapshot, cx));
-
+ let base_text = old_text.clone().unwrap_or(String::new()).into();
let task = cx.spawn({
let multibuffer = multibuffer.clone();
let path = path.clone();
+ let buffer = new_buffer.clone();
async move |_, cx| {
let language = language_registry
.language_for_file_path(&path)
.await
.log_err();
- new_buffer.update(cx, |buffer, cx| buffer.set_language(language.clone(), cx))?;
-
- let old_buffer_snapshot = old_buffer.update(cx, |buffer, cx| {
- buffer.set_language(language, cx);
- buffer.snapshot()
- })?;
+ buffer.update(cx, |buffer, cx| buffer.set_language(language.clone(), cx))?;
- buffer_diff
- .update(cx, |diff, cx| {
- diff.set_base_text(
- old_buffer_snapshot,
- Some(language_registry),
- new_buffer_snapshot,
- cx,
- )
- })?
- .await?;
+ let diff = build_buffer_diff(
+ old_text.unwrap_or("".into()).into(),
+ &buffer,
+ Some(language_registry.clone()),
+ cx,
+ )
+ .await?;
multibuffer
.update(cx, |multibuffer, cx| {
let hunk_ranges = {
- let buffer = new_buffer.read(cx);
- let diff = buffer_diff.read(cx);
+ let buffer = buffer.read(cx);
+ let diff = diff.read(cx);
diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer, cx)
.map(|diff_hunk| diff_hunk.buffer_range.to_point(buffer))
.collect::<Vec<_>>()
};
multibuffer.set_excerpts_for_path(
- PathKey::for_buffer(&new_buffer, cx),
- new_buffer.clone(),
+ PathKey::for_buffer(&buffer, cx),
+ buffer.clone(),
hunk_ranges,
- editor::DEFAULT_MULTIBUFFER_CONTEXT,
+ multibuffer_context_lines(cx),
cx,
);
- multibuffer.add_diff(buffer_diff, cx);
+ multibuffer.add_diff(diff, cx);
})
.log_err();
@@ -89,23 +78,26 @@ impl Diff {
Self::Finalized(FinalizedDiff {
multibuffer,
path,
+ base_text,
+ new_buffer,
_update_diff: task,
})
}
pub fn new(buffer: Entity<Buffer>, cx: &mut Context<Self>) -> Self {
- let buffer_snapshot = buffer.read(cx).snapshot();
- let base_text = buffer_snapshot.text();
- let language_registry = buffer.read(cx).language_registry();
- let text_snapshot = buffer.read(cx).text_snapshot();
+ let buffer_text_snapshot = buffer.read(cx).text_snapshot();
+ let base_text_snapshot = buffer.read(cx).snapshot();
+ let base_text = base_text_snapshot.text();
+ debug_assert_eq!(buffer_text_snapshot.text(), base_text);
let buffer_diff = cx.new(|cx| {
- let mut diff = BufferDiff::new(&text_snapshot, cx);
- let _ = diff.set_base_text(
- buffer_snapshot.clone(),
- language_registry,
- text_snapshot,
- cx,
- );
+ let mut diff = BufferDiff::new_unchanged(&buffer_text_snapshot, base_text_snapshot);
+ let snapshot = diff.snapshot(cx);
+ let secondary_diff = cx.new(|cx| {
+ let mut diff = BufferDiff::new(&buffer_text_snapshot, cx);
+ diff.set_snapshot(snapshot, &buffer_text_snapshot, cx);
+ diff
+ });
+ diff.set_secondary_diff(secondary_diff);
diff
});
@@ -123,7 +115,7 @@ impl Diff {
diff.update(cx);
}
}),
- buffer,
+ new_buffer: buffer,
diff: buffer_diff,
revealed_ranges: Vec::new(),
update_diff: Task::ready(Ok(())),
@@ -158,9 +150,9 @@ impl Diff {
.map(|buffer| buffer.read(cx).text())
.join("\n");
let path = match self {
- Diff::Pending(PendingDiff { buffer, .. }) => {
- buffer.read(cx).file().map(|file| file.path().as_ref())
- }
+ Diff::Pending(PendingDiff {
+ new_buffer: buffer, ..
+ }) => buffer.read(cx).file().map(|file| file.path().as_ref()),
Diff::Finalized(FinalizedDiff { path, .. }) => Some(path.as_path()),
};
format!(
@@ -173,12 +165,33 @@ impl Diff {
pub fn has_revealed_range(&self, cx: &App) -> bool {
self.multibuffer().read(cx).excerpt_paths().next().is_some()
}
+
+ pub fn needs_update(&self, old_text: &str, new_text: &str, cx: &App) -> bool {
+ match self {
+ Diff::Pending(PendingDiff {
+ base_text,
+ new_buffer,
+ ..
+ }) => {
+ base_text.as_str() != old_text
+ || !new_buffer.read(cx).as_rope().chunks().equals_str(new_text)
+ }
+ Diff::Finalized(FinalizedDiff {
+ base_text,
+ new_buffer,
+ ..
+ }) => {
+ base_text.as_str() != old_text
+ || !new_buffer.read(cx).as_rope().chunks().equals_str(new_text)
+ }
+ }
+ }
}
pub struct PendingDiff {
multibuffer: Entity<MultiBuffer>,
base_text: Arc<String>,
- buffer: Entity<Buffer>,
+ new_buffer: Entity<Buffer>,
diff: Entity<BufferDiff>,
revealed_ranges: Vec<Range<Anchor>>,
_subscription: Subscription,
@@ -187,7 +200,7 @@ pub struct PendingDiff {
impl PendingDiff {
pub fn update(&mut self, cx: &mut Context<Diff>) {
- let buffer = self.buffer.clone();
+ let buffer = self.new_buffer.clone();
let buffer_diff = self.diff.clone();
let base_text = self.base_text.clone();
self.update_diff = cx.spawn(async move |diff, cx| {
@@ -204,7 +217,10 @@ impl PendingDiff {
)
.await?;
buffer_diff.update(cx, |diff, cx| {
- diff.set_snapshot(diff_snapshot, &text_snapshot, cx)
+ diff.set_snapshot(diff_snapshot.clone(), &text_snapshot, cx);
+ diff.secondary_diff().unwrap().update(cx, |diff, cx| {
+ diff.set_snapshot(diff_snapshot.clone(), &text_snapshot, cx);
+ });
})?;
diff.update(cx, |diff, cx| {
if let Diff::Pending(diff) = diff {
@@ -222,10 +238,10 @@ impl PendingDiff {
fn finalize(&self, cx: &mut Context<Diff>) -> FinalizedDiff {
let ranges = self.excerpt_ranges(cx);
let base_text = self.base_text.clone();
- let language_registry = self.buffer.read(cx).language_registry().clone();
+ let language_registry = self.new_buffer.read(cx).language_registry();
let path = self
- .buffer
+ .new_buffer
.read(cx)
.file()
.map(|file| file.path().as_ref())
@@ -234,12 +250,12 @@ impl PendingDiff {
// Replace the buffer in the multibuffer with the snapshot
let buffer = cx.new(|cx| {
- let language = self.buffer.read(cx).language().cloned();
+ let language = self.new_buffer.read(cx).language().cloned();
let buffer = TextBuffer::new_normalized(
0,
cx.entity_id().as_non_zero_u64().into(),
- self.buffer.read(cx).line_ending(),
- self.buffer.read(cx).as_rope().clone(),
+ self.new_buffer.read(cx).line_ending(),
+ self.new_buffer.read(cx).as_rope().clone(),
);
let mut buffer = Buffer::build(buffer, None, Capability::ReadWrite);
buffer.set_language(language, cx);
@@ -248,7 +264,6 @@ impl PendingDiff {
let buffer_diff = cx.spawn({
let buffer = buffer.clone();
- let language_registry = language_registry.clone();
async move |_this, cx| {
build_buffer_diff(base_text, &buffer, language_registry, cx).await
}
@@ -264,7 +279,7 @@ impl PendingDiff {
path_key,
buffer,
ranges,
- editor::DEFAULT_MULTIBUFFER_CONTEXT,
+ multibuffer_context_lines(cx),
cx,
);
multibuffer.add_diff(buffer_diff.clone(), cx);
@@ -276,7 +291,9 @@ impl PendingDiff {
FinalizedDiff {
path,
+ base_text: self.base_text.clone(),
multibuffer: self.multibuffer.clone(),
+ new_buffer: self.new_buffer.clone(),
_update_diff: update_diff,
}
}
@@ -285,10 +302,10 @@ impl PendingDiff {
let ranges = self.excerpt_ranges(cx);
self.multibuffer.update(cx, |multibuffer, cx| {
multibuffer.set_excerpts_for_path(
- PathKey::for_buffer(&self.buffer, cx),
- self.buffer.clone(),
+ PathKey::for_buffer(&self.new_buffer, cx),
+ self.new_buffer.clone(),
ranges,
- editor::DEFAULT_MULTIBUFFER_CONTEXT,
+ multibuffer_context_lines(cx),
cx,
);
let end = multibuffer.len(cx);
@@ -298,7 +315,7 @@ impl PendingDiff {
}
fn excerpt_ranges(&self, cx: &App) -> Vec<Range<Point>> {
- let buffer = self.buffer.read(cx);
+ let buffer = self.new_buffer.read(cx);
let diff = self.diff.read(cx);
let mut ranges = diff
.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer, cx)
@@ -332,6 +349,8 @@ impl PendingDiff {
pub struct FinalizedDiff {
path: PathBuf,
+ base_text: Arc<String>,
+ new_buffer: Entity<Buffer>,
multibuffer: Entity<MultiBuffer>,
_update_diff: Task<Result<()>>,
}
@@ -385,3 +404,21 @@ async fn build_buffer_diff(
diff
})
}
+
+#[cfg(test)]
+mod tests {
+ use gpui::{AppContext as _, TestAppContext};
+ use language::Buffer;
+
+ use crate::Diff;
+
+ #[gpui::test]
+ async fn test_pending_diff(cx: &mut TestAppContext) {
+ let buffer = cx.new(|cx| Buffer::local("hello!", cx));
+ let _diff = cx.new(|cx| Diff::new(buffer.clone(), cx));
+ buffer.update(cx, |buffer, cx| {
+ buffer.set_text("HELLO!", cx);
+ });
+ cx.run_until_parked();
+ }
+}
@@ -1,32 +1,33 @@
-use agent::ThreadId;
+use agent_client_protocol as acp;
use anyhow::{Context as _, Result, bail};
use file_icons::FileIcons;
use prompt_store::{PromptId, UserPromptId};
use serde::{Deserialize, Serialize};
use std::{
fmt,
- ops::Range,
+ ops::RangeInclusive,
path::{Path, PathBuf},
str::FromStr,
};
use ui::{App, IconName, SharedString};
use url::Url;
-#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
+#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)]
pub enum MentionUri {
File {
abs_path: PathBuf,
},
+ PastedImage,
Directory {
abs_path: PathBuf,
},
Symbol {
- path: PathBuf,
+ abs_path: PathBuf,
name: String,
- line_range: Range<u32>,
+ line_range: RangeInclusive<u32>,
},
Thread {
- id: ThreadId,
+ id: acp::SessionId,
name: String,
},
TextThread {
@@ -38,8 +39,9 @@ pub enum MentionUri {
name: String,
},
Selection {
- path: PathBuf,
- line_range: Range<u32>,
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ abs_path: Option<PathBuf>,
+ line_range: RangeInclusive<u32>,
},
Fetch {
url: Url,
@@ -48,36 +50,44 @@ pub enum MentionUri {
impl MentionUri {
pub fn parse(input: &str) -> Result<Self> {
+ fn parse_line_range(fragment: &str) -> Result<RangeInclusive<u32>> {
+ let range = fragment
+ .strip_prefix("L")
+ .context("Line range must start with \"L\"")?;
+ let (start, end) = range
+ .split_once(":")
+ .context("Line range must use colon as separator")?;
+ let range = start
+ .parse::<u32>()
+ .context("Parsing line range start")?
+ .checked_sub(1)
+ .context("Line numbers should be 1-based")?
+ ..=end
+ .parse::<u32>()
+ .context("Parsing line range end")?
+ .checked_sub(1)
+ .context("Line numbers should be 1-based")?;
+ Ok(range)
+ }
+
let url = url::Url::parse(input)?;
let path = url.path();
match url.scheme() {
"file" => {
let path = url.to_file_path().ok().context("Extracting file path")?;
if let Some(fragment) = url.fragment() {
- let range = fragment
- .strip_prefix("L")
- .context("Line range must start with \"L\"")?;
- let (start, end) = range
- .split_once(":")
- .context("Line range must use colon as separator")?;
- let line_range = start
- .parse::<u32>()
- .context("Parsing line range start")?
- .checked_sub(1)
- .context("Line numbers should be 1-based")?
- ..end
- .parse::<u32>()
- .context("Parsing line range end")?
- .checked_sub(1)
- .context("Line numbers should be 1-based")?;
+ let line_range = parse_line_range(fragment)?;
if let Some(name) = single_query_param(&url, "symbol")? {
Ok(Self::Symbol {
name,
- path,
+ abs_path: path,
line_range,
})
} else {
- Ok(Self::Selection { path, line_range })
+ Ok(Self::Selection {
+ abs_path: Some(path),
+ line_range,
+ })
}
} else if input.ends_with("/") {
Ok(Self::Directory { abs_path: path })
@@ -89,7 +99,7 @@ impl MentionUri {
if let Some(thread_id) = path.strip_prefix("/agent/thread/") {
let name = single_query_param(&url, "name")?.context("Missing thread name")?;
Ok(Self::Thread {
- id: thread_id.into(),
+ id: acp::SessionId(thread_id.into()),
name,
})
} else if let Some(path) = path.strip_prefix("/agent/text-thread/") {
@@ -105,6 +115,17 @@ impl MentionUri {
id: rule_id.into(),
name,
})
+ } else if path.starts_with("/agent/pasted-image") {
+ Ok(Self::PastedImage)
+ } else if path.starts_with("/agent/untitled-buffer") {
+ let fragment = url
+ .fragment()
+ .context("Missing fragment for untitled buffer selection")?;
+ let line_range = parse_line_range(fragment)?;
+ Ok(Self::Selection {
+ abs_path: None,
+ line_range,
+ })
} else {
bail!("invalid zed url: {:?}", input);
}
@@ -121,13 +142,16 @@ impl MentionUri {
.unwrap_or_default()
.to_string_lossy()
.into_owned(),
+ MentionUri::PastedImage => "Image".to_string(),
MentionUri::Symbol { name, .. } => name.clone(),
MentionUri::Thread { name, .. } => name.clone(),
MentionUri::TextThread { name, .. } => name.clone(),
MentionUri::Rule { name, .. } => name.clone(),
MentionUri::Selection {
- path, line_range, ..
- } => selection_name(path, line_range),
+ abs_path: path,
+ line_range,
+ ..
+ } => selection_name(path.as_deref(), line_range),
MentionUri::Fetch { url } => url.to_string(),
}
}
@@ -137,6 +161,7 @@ impl MentionUri {
MentionUri::File { abs_path } => {
FileIcons::get_icon(abs_path, cx).unwrap_or_else(|| IconName::File.path().into())
}
+ MentionUri::PastedImage => IconName::Image.path().into(),
MentionUri::Directory { .. } => FileIcons::get_folder_icon(false, cx)
.unwrap_or_else(|| IconName::Folder.path().into()),
MentionUri::Symbol { .. } => IconName::Code.path().into(),
@@ -157,29 +182,40 @@ impl MentionUri {
MentionUri::File { abs_path } => {
Url::from_file_path(abs_path).expect("mention path should be absolute")
}
+ MentionUri::PastedImage => Url::parse("zed:///agent/pasted-image").unwrap(),
MentionUri::Directory { abs_path } => {
Url::from_directory_path(abs_path).expect("mention path should be absolute")
}
MentionUri::Symbol {
- path,
+ abs_path,
name,
line_range,
} => {
- let mut url = Url::from_file_path(path).expect("mention path should be absolute");
+ let mut url =
+ Url::from_file_path(abs_path).expect("mention path should be absolute");
url.query_pairs_mut().append_pair("symbol", name);
url.set_fragment(Some(&format!(
"L{}:{}",
- line_range.start + 1,
- line_range.end + 1
+ line_range.start() + 1,
+ line_range.end() + 1
)));
url
}
- MentionUri::Selection { path, line_range } => {
- let mut url = Url::from_file_path(path).expect("mention path should be absolute");
+ MentionUri::Selection {
+ abs_path: path,
+ line_range,
+ } => {
+ let mut url = if let Some(path) = path {
+ Url::from_file_path(path).expect("mention path should be absolute")
+ } else {
+ let mut url = Url::parse("zed:///").unwrap();
+ url.set_path("/agent/untitled-buffer");
+ url
+ };
url.set_fragment(Some(&format!(
"L{}:{}",
- line_range.start + 1,
- line_range.end + 1
+ line_range.start() + 1,
+ line_range.end() + 1
)));
url
}
@@ -191,7 +227,10 @@ impl MentionUri {
}
MentionUri::TextThread { path, name } => {
let mut url = Url::parse("zed:///").unwrap();
- url.set_path(&format!("/agent/text-thread/{}", path.to_string_lossy()));
+ url.set_path(&format!(
+ "/agent/text-thread/{}",
+ path.to_string_lossy().trim_start_matches('/')
+ ));
url.query_pairs_mut().append_pair("name", name);
url
}
@@ -237,12 +276,14 @@ fn single_query_param(url: &Url, name: &'static str) -> Result<Option<String>> {
}
}
-pub fn selection_name(path: &Path, line_range: &Range<u32>) -> String {
+pub fn selection_name(path: Option<&Path>, line_range: &RangeInclusive<u32>) -> String {
format!(
"{} ({}:{})",
- path.file_name().unwrap_or_default().display(),
- line_range.start + 1,
- line_range.end + 1
+ path.and_then(|path| path.file_name())
+ .unwrap_or("Untitled".as_ref())
+ .display(),
+ *line_range.start() + 1,
+ *line_range.end() + 1
)
}
@@ -302,14 +343,14 @@ mod tests {
let parsed = MentionUri::parse(symbol_uri).unwrap();
match &parsed {
MentionUri::Symbol {
- path,
+ abs_path: path,
name,
line_range,
} => {
assert_eq!(path.to_str().unwrap(), path!("/path/to/file.rs"));
assert_eq!(name, "MySymbol");
- assert_eq!(line_range.start, 9);
- assert_eq!(line_range.end, 19);
+ assert_eq!(line_range.start(), &9);
+ assert_eq!(line_range.end(), &19);
}
_ => panic!("Expected Symbol variant"),
}
@@ -321,16 +362,39 @@ mod tests {
let selection_uri = uri!("file:///path/to/file.rs#L5:15");
let parsed = MentionUri::parse(selection_uri).unwrap();
match &parsed {
- MentionUri::Selection { path, line_range } => {
- assert_eq!(path.to_str().unwrap(), path!("/path/to/file.rs"));
- assert_eq!(line_range.start, 4);
- assert_eq!(line_range.end, 14);
+ MentionUri::Selection {
+ abs_path: path,
+ line_range,
+ } => {
+ assert_eq!(
+ path.as_ref().unwrap().to_str().unwrap(),
+ path!("/path/to/file.rs")
+ );
+ assert_eq!(line_range.start(), &4);
+ assert_eq!(line_range.end(), &14);
}
_ => panic!("Expected Selection variant"),
}
assert_eq!(parsed.to_uri().to_string(), selection_uri);
}
+ #[test]
+ fn test_parse_untitled_selection_uri() {
+ let selection_uri = uri!("zed:///agent/untitled-buffer#L1:10");
+ let parsed = MentionUri::parse(selection_uri).unwrap();
+ match &parsed {
+ MentionUri::Selection {
+ abs_path: None,
+ line_range,
+ } => {
+ assert_eq!(line_range.start(), &0);
+ assert_eq!(line_range.end(), &9);
+ }
+ _ => panic!("Expected Selection variant without path"),
+ }
+ assert_eq!(parsed.to_uri().to_string(), selection_uri);
+ }
+
#[test]
fn test_parse_thread_uri() {
let thread_uri = "zed:///agent/thread/session123?name=Thread+name";
@@ -1,34 +1,43 @@
-use gpui::{App, AppContext, Context, Entity};
+use agent_client_protocol as acp;
+
+use futures::{FutureExt as _, future::Shared};
+use gpui::{App, AppContext, Context, Entity, Task};
use language::LanguageRegistry;
use markdown::Markdown;
use std::{path::PathBuf, process::ExitStatus, sync::Arc, time::Instant};
pub struct Terminal {
+ id: acp::TerminalId,
command: Entity<Markdown>,
working_dir: Option<PathBuf>,
terminal: Entity<terminal::Terminal>,
started_at: Instant,
output: Option<TerminalOutput>,
+ output_byte_limit: Option<usize>,
+ _output_task: Shared<Task<acp::TerminalExitStatus>>,
}
pub struct TerminalOutput {
pub ended_at: Instant,
pub exit_status: Option<ExitStatus>,
- pub was_content_truncated: bool,
+ pub content: String,
pub original_content_len: usize,
pub content_line_count: usize,
- pub finished_with_empty_output: bool,
}
impl Terminal {
pub fn new(
+ id: acp::TerminalId,
command: String,
working_dir: Option<PathBuf>,
+ output_byte_limit: Option<usize>,
terminal: Entity<terminal::Terminal>,
language_registry: Arc<LanguageRegistry>,
cx: &mut Context<Self>,
) -> Self {
+ let command_task = terminal.read(cx).wait_for_completed_task(cx);
Self {
+ id,
command: cx.new(|cx| {
Markdown::new(
format!("```\n{}\n```", command).into(),
@@ -41,27 +50,93 @@ impl Terminal {
terminal,
started_at: Instant::now(),
output: None,
+ output_byte_limit,
+ _output_task: cx
+ .spawn(async move |this, cx| {
+ let exit_status = command_task.await;
+
+ this.update(cx, |this, cx| {
+ let (content, original_content_len) = this.truncated_output(cx);
+ let content_line_count = this.terminal.read(cx).total_lines();
+
+ this.output = Some(TerminalOutput {
+ ended_at: Instant::now(),
+ exit_status,
+ content,
+ original_content_len,
+ content_line_count,
+ });
+ cx.notify();
+ })
+ .ok();
+
+ let exit_status = exit_status.map(portable_pty::ExitStatus::from);
+
+ acp::TerminalExitStatus {
+ exit_code: exit_status.as_ref().map(|e| e.exit_code()),
+ signal: exit_status.and_then(|e| e.signal().map(Into::into)),
+ }
+ })
+ .shared(),
}
}
- pub fn finish(
- &mut self,
- exit_status: Option<ExitStatus>,
- original_content_len: usize,
- truncated_content_len: usize,
- content_line_count: usize,
- finished_with_empty_output: bool,
- cx: &mut Context<Self>,
- ) {
- self.output = Some(TerminalOutput {
- ended_at: Instant::now(),
- exit_status,
- was_content_truncated: truncated_content_len < original_content_len,
- original_content_len,
- content_line_count,
- finished_with_empty_output,
+ pub fn id(&self) -> &acp::TerminalId {
+ &self.id
+ }
+
+ pub fn wait_for_exit(&self) -> Shared<Task<acp::TerminalExitStatus>> {
+ self._output_task.clone()
+ }
+
+ pub fn kill(&mut self, cx: &mut App) {
+ self.terminal.update(cx, |terminal, _cx| {
+ terminal.kill_active_task();
});
- cx.notify();
+ }
+
+ pub fn current_output(&self, cx: &App) -> acp::TerminalOutputResponse {
+ if let Some(output) = self.output.as_ref() {
+ let exit_status = output.exit_status.map(portable_pty::ExitStatus::from);
+
+ acp::TerminalOutputResponse {
+ output: output.content.clone(),
+ truncated: output.original_content_len > output.content.len(),
+ exit_status: Some(acp::TerminalExitStatus {
+ exit_code: exit_status.as_ref().map(|e| e.exit_code()),
+ signal: exit_status.and_then(|e| e.signal().map(Into::into)),
+ }),
+ }
+ } else {
+ let (current_content, original_len) = self.truncated_output(cx);
+
+ acp::TerminalOutputResponse {
+ truncated: current_content.len() < original_len,
+ output: current_content,
+ exit_status: None,
+ }
+ }
+ }
+
+ fn truncated_output(&self, cx: &App) -> (String, usize) {
+ let terminal = self.terminal.read(cx);
+ let mut content = terminal.get_content();
+
+ let original_content_len = content.len();
+
+ if let Some(limit) = self.output_byte_limit
+ && content.len() > limit
+ {
+ let mut end_ix = limit.min(content.len());
+ while !content.is_char_boundary(end_ix) {
+ end_ix -= 1;
+ }
+ // Don't truncate mid-line, clear the remainder of the last line
+ end_ix = content[..end_ix].rfind('\n').unwrap_or(end_ix);
+ content.truncate(end_ix);
+ }
+
+ (content, original_content_len)
}
pub fn command(&self) -> &Entity<Markdown> {
@@ -0,0 +1,30 @@
+[package]
+name = "acp_tools"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/acp_tools.rs"
+doctest = false
+
+[dependencies]
+agent-client-protocol.workspace = true
+collections.workspace = true
+gpui.workspace = true
+language.workspace= true
+markdown.workspace = true
+project.workspace = true
+serde.workspace = true
+serde_json.workspace = true
+settings.workspace = true
+theme.workspace = true
+ui.workspace = true
+util.workspace = true
+workspace-hack.workspace = true
+workspace.workspace = true
@@ -0,0 +1,494 @@
+use std::{
+ cell::RefCell,
+ collections::HashSet,
+ fmt::Display,
+ rc::{Rc, Weak},
+ sync::Arc,
+};
+
+use agent_client_protocol as acp;
+use collections::HashMap;
+use gpui::{
+ App, Empty, Entity, EventEmitter, FocusHandle, Focusable, Global, ListAlignment, ListState,
+ StyleRefinement, Subscription, Task, TextStyleRefinement, Window, actions, list, prelude::*,
+};
+use language::LanguageRegistry;
+use markdown::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownStyle};
+use project::Project;
+use settings::Settings;
+use theme::ThemeSettings;
+use ui::prelude::*;
+use util::ResultExt as _;
+use workspace::{Item, Workspace};
+
+actions!(dev, [OpenAcpLogs]);
+
+pub fn init(cx: &mut App) {
+ cx.observe_new(
+ |workspace: &mut Workspace, _window, _cx: &mut Context<Workspace>| {
+ workspace.register_action(|workspace, _: &OpenAcpLogs, window, cx| {
+ let acp_tools =
+ Box::new(cx.new(|cx| AcpTools::new(workspace.project().clone(), cx)));
+ workspace.add_item_to_active_pane(acp_tools, None, true, window, cx);
+ });
+ },
+ )
+ .detach();
+}
+
+struct GlobalAcpConnectionRegistry(Entity<AcpConnectionRegistry>);
+
+impl Global for GlobalAcpConnectionRegistry {}
+
+#[derive(Default)]
+pub struct AcpConnectionRegistry {
+ active_connection: RefCell<Option<ActiveConnection>>,
+}
+
+struct ActiveConnection {
+ server_name: SharedString,
+ connection: Weak<acp::ClientSideConnection>,
+}
+
+impl AcpConnectionRegistry {
+ pub fn default_global(cx: &mut App) -> Entity<Self> {
+ if cx.has_global::<GlobalAcpConnectionRegistry>() {
+ cx.global::<GlobalAcpConnectionRegistry>().0.clone()
+ } else {
+ let registry = cx.new(|_cx| AcpConnectionRegistry::default());
+ cx.set_global(GlobalAcpConnectionRegistry(registry.clone()));
+ registry
+ }
+ }
+
+ pub fn set_active_connection(
+ &self,
+ server_name: impl Into<SharedString>,
+ connection: &Rc<acp::ClientSideConnection>,
+ cx: &mut Context<Self>,
+ ) {
+ self.active_connection.replace(Some(ActiveConnection {
+ server_name: server_name.into(),
+ connection: Rc::downgrade(connection),
+ }));
+ cx.notify();
+ }
+}
+
+struct AcpTools {
+ project: Entity<Project>,
+ focus_handle: FocusHandle,
+ expanded: HashSet<usize>,
+ watched_connection: Option<WatchedConnection>,
+ connection_registry: Entity<AcpConnectionRegistry>,
+ _subscription: Subscription,
+}
+
+struct WatchedConnection {
+ server_name: SharedString,
+ messages: Vec<WatchedConnectionMessage>,
+ list_state: ListState,
+ connection: Weak<acp::ClientSideConnection>,
+ incoming_request_methods: HashMap<i32, Arc<str>>,
+ outgoing_request_methods: HashMap<i32, Arc<str>>,
+ _task: Task<()>,
+}
+
+impl AcpTools {
+ fn new(project: Entity<Project>, cx: &mut Context<Self>) -> Self {
+ let connection_registry = AcpConnectionRegistry::default_global(cx);
+
+ let subscription = cx.observe(&connection_registry, |this, _, cx| {
+ this.update_connection(cx);
+ cx.notify();
+ });
+
+ let mut this = Self {
+ project,
+ focus_handle: cx.focus_handle(),
+ expanded: HashSet::default(),
+ watched_connection: None,
+ connection_registry,
+ _subscription: subscription,
+ };
+ this.update_connection(cx);
+ this
+ }
+
+ fn update_connection(&mut self, cx: &mut Context<Self>) {
+ let active_connection = self.connection_registry.read(cx).active_connection.borrow();
+ let Some(active_connection) = active_connection.as_ref() else {
+ return;
+ };
+
+ if let Some(watched_connection) = self.watched_connection.as_ref() {
+ if Weak::ptr_eq(
+ &watched_connection.connection,
+ &active_connection.connection,
+ ) {
+ return;
+ }
+ }
+
+ if let Some(connection) = active_connection.connection.upgrade() {
+ let mut receiver = connection.subscribe();
+ let task = cx.spawn(async move |this, cx| {
+ while let Ok(message) = receiver.recv().await {
+ this.update(cx, |this, cx| {
+ this.push_stream_message(message, cx);
+ })
+ .ok();
+ }
+ });
+
+ self.watched_connection = Some(WatchedConnection {
+ server_name: active_connection.server_name.clone(),
+ messages: vec![],
+ list_state: ListState::new(0, ListAlignment::Bottom, px(2048.)),
+ connection: active_connection.connection.clone(),
+ incoming_request_methods: HashMap::default(),
+ outgoing_request_methods: HashMap::default(),
+ _task: task,
+ });
+ }
+ }
+
+ fn push_stream_message(&mut self, stream_message: acp::StreamMessage, cx: &mut Context<Self>) {
+ let Some(connection) = self.watched_connection.as_mut() else {
+ return;
+ };
+ let language_registry = self.project.read(cx).languages().clone();
+ let index = connection.messages.len();
+
+ let (request_id, method, message_type, params) = match stream_message.message {
+ acp::StreamMessageContent::Request { id, method, params } => {
+ let method_map = match stream_message.direction {
+ acp::StreamMessageDirection::Incoming => {
+ &mut connection.incoming_request_methods
+ }
+ acp::StreamMessageDirection::Outgoing => {
+ &mut connection.outgoing_request_methods
+ }
+ };
+
+ method_map.insert(id, method.clone());
+ (Some(id), method.into(), MessageType::Request, Ok(params))
+ }
+ acp::StreamMessageContent::Response { id, result } => {
+ let method_map = match stream_message.direction {
+ acp::StreamMessageDirection::Incoming => {
+ &mut connection.outgoing_request_methods
+ }
+ acp::StreamMessageDirection::Outgoing => {
+ &mut connection.incoming_request_methods
+ }
+ };
+
+ if let Some(method) = method_map.remove(&id) {
+ (Some(id), method.into(), MessageType::Response, result)
+ } else {
+ (
+ Some(id),
+ "[unrecognized response]".into(),
+ MessageType::Response,
+ result,
+ )
+ }
+ }
+ acp::StreamMessageContent::Notification { method, params } => {
+ (None, method.into(), MessageType::Notification, Ok(params))
+ }
+ };
+
+ let message = WatchedConnectionMessage {
+ name: method,
+ message_type,
+ request_id,
+ direction: stream_message.direction,
+ collapsed_params_md: match params.as_ref() {
+ Ok(params) => params
+ .as_ref()
+ .map(|params| collapsed_params_md(params, &language_registry, cx)),
+ Err(err) => {
+ if let Ok(err) = &serde_json::to_value(err) {
+ Some(collapsed_params_md(&err, &language_registry, cx))
+ } else {
+ None
+ }
+ }
+ },
+
+ expanded_params_md: None,
+ params,
+ };
+
+ connection.messages.push(message);
+ connection.list_state.splice(index..index, 1);
+ cx.notify();
+ }
+
+ fn render_message(
+ &mut self,
+ index: usize,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> AnyElement {
+ let Some(connection) = self.watched_connection.as_ref() else {
+ return Empty.into_any();
+ };
+
+ let Some(message) = connection.messages.get(index) else {
+ return Empty.into_any();
+ };
+
+ let base_size = TextSize::Editor.rems(cx);
+
+ let theme_settings = ThemeSettings::get_global(cx);
+ let text_style = window.text_style();
+
+ let colors = cx.theme().colors();
+ let expanded = self.expanded.contains(&index);
+
+ v_flex()
+ .w_full()
+ .px_4()
+ .py_3()
+ .border_color(colors.border)
+ .border_b_1()
+ .gap_2()
+ .items_start()
+ .font_buffer(cx)
+ .text_size(base_size)
+ .id(index)
+ .group("message")
+ .hover(|this| this.bg(colors.element_background.opacity(0.5)))
+ .on_click(cx.listener(move |this, _, _, cx| {
+ if this.expanded.contains(&index) {
+ this.expanded.remove(&index);
+ } else {
+ this.expanded.insert(index);
+ let Some(connection) = &mut this.watched_connection else {
+ return;
+ };
+ let Some(message) = connection.messages.get_mut(index) else {
+ return;
+ };
+ message.expanded(this.project.read(cx).languages().clone(), cx);
+ connection.list_state.scroll_to_reveal_item(index);
+ }
+ cx.notify()
+ }))
+ .child(
+ h_flex()
+ .w_full()
+ .gap_2()
+ .items_center()
+ .flex_shrink_0()
+ .child(match message.direction {
+ acp::StreamMessageDirection::Incoming => {
+ ui::Icon::new(ui::IconName::ArrowDown).color(Color::Error)
+ }
+ acp::StreamMessageDirection::Outgoing => {
+ ui::Icon::new(ui::IconName::ArrowUp).color(Color::Success)
+ }
+ })
+ .child(
+ Label::new(message.name.clone())
+ .buffer_font(cx)
+ .color(Color::Muted),
+ )
+ .child(div().flex_1())
+ .child(
+ div()
+ .child(ui::Chip::new(message.message_type.to_string()))
+ .visible_on_hover("message"),
+ )
+ .children(
+ message
+ .request_id
+ .map(|req_id| div().child(ui::Chip::new(req_id.to_string()))),
+ ),
+ )
+ // I'm aware using markdown is a hack. Trying to get something working for the demo.
+ // Will clean up soon!
+ .when_some(
+ if expanded {
+ message.expanded_params_md.clone()
+ } else {
+ message.collapsed_params_md.clone()
+ },
+ |this, params| {
+ this.child(
+ div().pl_6().w_full().child(
+ MarkdownElement::new(
+ params,
+ MarkdownStyle {
+ base_text_style: text_style,
+ selection_background_color: colors.element_selection_background,
+ syntax: cx.theme().syntax().clone(),
+ code_block_overflow_x_scroll: true,
+ code_block: StyleRefinement {
+ text: Some(TextStyleRefinement {
+ font_family: Some(
+ theme_settings.buffer_font.family.clone(),
+ ),
+ font_size: Some((base_size * 0.8).into()),
+ ..Default::default()
+ }),
+ ..Default::default()
+ },
+ ..Default::default()
+ },
+ )
+ .code_block_renderer(
+ CodeBlockRenderer::Default {
+ copy_button: false,
+ copy_button_on_hover: expanded,
+ border: false,
+ },
+ ),
+ ),
+ )
+ },
+ )
+ .into_any()
+ }
+}
+
+struct WatchedConnectionMessage {
+ name: SharedString,
+ request_id: Option<i32>,
+ direction: acp::StreamMessageDirection,
+ message_type: MessageType,
+ params: Result<Option<serde_json::Value>, acp::Error>,
+ collapsed_params_md: Option<Entity<Markdown>>,
+ expanded_params_md: Option<Entity<Markdown>>,
+}
+
+impl WatchedConnectionMessage {
+ fn expanded(&mut self, language_registry: Arc<LanguageRegistry>, cx: &mut App) {
+ let params_md = match &self.params {
+ Ok(Some(params)) => Some(expanded_params_md(params, &language_registry, cx)),
+ Err(err) => {
+ if let Some(err) = &serde_json::to_value(err).log_err() {
+ Some(expanded_params_md(&err, &language_registry, cx))
+ } else {
+ None
+ }
+ }
+ _ => None,
+ };
+ self.expanded_params_md = params_md;
+ }
+}
+
+fn collapsed_params_md(
+ params: &serde_json::Value,
+ language_registry: &Arc<LanguageRegistry>,
+ cx: &mut App,
+) -> Entity<Markdown> {
+ let params_json = serde_json::to_string(params).unwrap_or_default();
+ let mut spaced_out_json = String::with_capacity(params_json.len() + params_json.len() / 4);
+
+ for ch in params_json.chars() {
+ match ch {
+ '{' => spaced_out_json.push_str("{ "),
+ '}' => spaced_out_json.push_str(" }"),
+ ':' => spaced_out_json.push_str(": "),
+ ',' => spaced_out_json.push_str(", "),
+ c => spaced_out_json.push(c),
+ }
+ }
+
+ let params_md = format!("```json\n{}\n```", spaced_out_json);
+ cx.new(|cx| Markdown::new(params_md.into(), Some(language_registry.clone()), None, cx))
+}
+
+fn expanded_params_md(
+ params: &serde_json::Value,
+ language_registry: &Arc<LanguageRegistry>,
+ cx: &mut App,
+) -> Entity<Markdown> {
+ let params_json = serde_json::to_string_pretty(params).unwrap_or_default();
+ let params_md = format!("```json\n{}\n```", params_json);
+ cx.new(|cx| Markdown::new(params_md.into(), Some(language_registry.clone()), None, cx))
+}
+
+enum MessageType {
+ Request,
+ Response,
+ Notification,
+}
+
+impl Display for MessageType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ MessageType::Request => write!(f, "Request"),
+ MessageType::Response => write!(f, "Response"),
+ MessageType::Notification => write!(f, "Notification"),
+ }
+ }
+}
+
+enum AcpToolsEvent {}
+
+impl EventEmitter<AcpToolsEvent> for AcpTools {}
+
+impl Item for AcpTools {
+ type Event = AcpToolsEvent;
+
+ fn tab_content_text(&self, _detail: usize, _cx: &App) -> ui::SharedString {
+ format!(
+ "ACP: {}",
+ self.watched_connection
+ .as_ref()
+ .map_or("Disconnected", |connection| &connection.server_name)
+ )
+ .into()
+ }
+
+ fn tab_icon(&self, _window: &Window, _cx: &App) -> Option<Icon> {
+ Some(ui::Icon::new(IconName::Thread))
+ }
+}
+
+impl Focusable for AcpTools {
+ fn focus_handle(&self, _cx: &App) -> FocusHandle {
+ self.focus_handle.clone()
+ }
+}
+
+impl Render for AcpTools {
+ fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ v_flex()
+ .track_focus(&self.focus_handle)
+ .size_full()
+ .bg(cx.theme().colors().editor_background)
+ .child(match self.watched_connection.as_ref() {
+ Some(connection) => {
+ if connection.messages.is_empty() {
+ h_flex()
+ .size_full()
+ .justify_center()
+ .items_center()
+ .child("No messages recorded yet")
+ .into_any()
+ } else {
+ list(
+ connection.list_state.clone(),
+ cx.processor(Self::render_message),
+ )
+ .with_sizing_behavior(gpui::ListSizingBehavior::Auto)
+ .flex_grow()
+ .into_any()
+ }
+ }
+ None => h_flex()
+ .size_full()
+ .justify_center()
+ .items_center()
+ .child("No active connection")
+ .into_any(),
+ })
+ }
+}
@@ -161,7 +161,7 @@ impl ActionLog {
diff_base,
last_seen_base,
unreviewed_edits,
- snapshot: text_snapshot.clone(),
+ snapshot: text_snapshot,
status,
version: buffer.read(cx).version(),
diff,
@@ -190,7 +190,7 @@ impl ActionLog {
cx: &mut Context<Self>,
) {
match event {
- BufferEvent::Edited { .. } => self.handle_buffer_edited(buffer, cx),
+ BufferEvent::Edited => self.handle_buffer_edited(buffer, cx),
BufferEvent::FileHandleChanged => {
self.handle_buffer_file_changed(buffer, cx);
}
@@ -264,15 +264,14 @@ impl ActionLog {
if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
cx.update(|cx| {
let mut old_head = buffer_repo.read(cx).head_commit.clone();
- Some(cx.subscribe(git_diff, move |_, event, cx| match event {
- buffer_diff::BufferDiffEvent::DiffChanged { .. } => {
+ Some(cx.subscribe(git_diff, move |_, event, cx| {
+ if let buffer_diff::BufferDiffEvent::DiffChanged { .. } = event {
let new_head = buffer_repo.read(cx).head_commit.clone();
if new_head != old_head {
old_head = new_head;
git_diff_updates_tx.send(()).ok();
}
}
- _ => {}
}))
})?
} else {
@@ -462,7 +461,7 @@ impl ActionLog {
anyhow::Ok((
tracked_buffer.diff.clone(),
buffer.read(cx).language().cloned(),
- buffer.read(cx).language_registry().clone(),
+ buffer.read(cx).language_registry(),
))
})??;
let diff_snapshot = BufferDiff::update_diff(
@@ -530,12 +529,12 @@ impl ActionLog {
/// Mark a buffer as created by agent, so we can refresh it in the context
pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
- self.track_buffer_internal(buffer.clone(), true, cx);
+ self.track_buffer_internal(buffer, true, cx);
}
/// Mark a buffer as edited by agent, so we can refresh it in the context
pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
- let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
+ let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
if let TrackedBufferStatus::Deleted = tracked_buffer.status {
tracked_buffer.status = TrackedBufferStatus::Modified;
}
@@ -2219,7 +2218,7 @@ mod tests {
action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
for _ in 0..operations {
- match rng.gen_range(0..100) {
+ match rng.random_range(0..100) {
0..25 => {
action_log.update(cx, |log, cx| {
let range = buffer.read(cx).random_byte_range(0, &mut rng);
@@ -2238,7 +2237,7 @@ mod tests {
.unwrap();
}
_ => {
- let is_agent_edit = rng.gen_bool(0.5);
+ let is_agent_edit = rng.random_bool(0.5);
if is_agent_edit {
log::info!("agent edit");
} else {
@@ -2253,7 +2252,7 @@ mod tests {
}
}
- if rng.gen_bool(0.2) {
+ if rng.random_bool(0.2) {
quiesce(&action_log, &buffer, cx);
}
}
@@ -2426,7 +2425,7 @@ mod tests {
assert_eq!(
unreviewed_hunks(&action_log, cx),
vec![(
- buffer.clone(),
+ buffer,
vec![
HunkStatus {
range: Point::new(6, 0)..Point::new(7, 0),
@@ -1,11 +1,10 @@
use auto_update::{AutoUpdateStatus, AutoUpdater, DismissErrorMessage, VersionCheckType};
use editor::Editor;
-use extension_host::ExtensionStore;
+use extension_host::{ExtensionOperation, ExtensionStore};
use futures::StreamExt;
use gpui::{
- Animation, AnimationExt as _, App, Context, CursorStyle, Entity, EventEmitter,
- InteractiveElement as _, ParentElement as _, Render, SharedString, StatefulInteractiveElement,
- Styled, Transformation, Window, actions, percentage,
+ App, Context, CursorStyle, Entity, EventEmitter, InteractiveElement as _, ParentElement as _,
+ Render, SharedString, StatefulInteractiveElement, Styled, Window, actions,
};
use language::{
BinaryStatus, LanguageRegistry, LanguageServerId, LanguageServerName,
@@ -25,7 +24,10 @@ use std::{
sync::Arc,
time::{Duration, Instant},
};
-use ui::{ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle, Tooltip, prelude::*};
+use ui::{
+ ButtonLike, CommonAnimationExt, ContextMenu, PopoverMenu, PopoverMenuHandle, Tooltip,
+ prelude::*,
+};
use util::truncate_and_trailoff;
use workspace::{StatusItemView, Workspace, item::ItemHandle};
@@ -82,7 +84,6 @@ impl ActivityIndicator {
) -> Entity<ActivityIndicator> {
let project = workspace.project().clone();
let auto_updater = AutoUpdater::get(cx);
- let workspace_handle = cx.entity();
let this = cx.new(|cx| {
let mut status_events = languages.language_server_binary_statuses();
cx.spawn(async move |this, cx| {
@@ -100,29 +101,10 @@ impl ActivityIndicator {
})
.detach();
- cx.subscribe_in(
- &workspace_handle,
- window,
- |activity_indicator, _, event, window, cx| match event {
- workspace::Event::ClearActivityIndicator { .. } => {
- if activity_indicator.statuses.pop().is_some() {
- activity_indicator.dismiss_error_message(
- &DismissErrorMessage,
- window,
- cx,
- );
- cx.notify();
- }
- }
- _ => {}
- },
- )
- .detach();
-
cx.subscribe(
&project.read(cx).lsp_store(),
- |activity_indicator, _, event, cx| match event {
- LspStoreEvent::LanguageServerUpdate { name, message, .. } => {
+ |activity_indicator, _, event, cx| {
+ if let LspStoreEvent::LanguageServerUpdate { name, message, .. } = event {
if let proto::update_language_server::Variant::StatusUpdate(status_update) =
message
{
@@ -191,7 +173,6 @@ impl ActivityIndicator {
}
cx.notify()
}
- _ => {}
},
)
.detach();
@@ -206,9 +187,10 @@ impl ActivityIndicator {
cx.subscribe(
&project.read(cx).git_store().clone(),
- |_, _, event: &GitStoreEvent, cx| match event {
- project::git_store::GitStoreEvent::JobsUpdated => cx.notify(),
- _ => {}
+ |_, _, event: &GitStoreEvent, cx| {
+ if let project::git_store::GitStoreEvent::JobsUpdated = event {
+ cx.notify()
+ }
},
)
.detach();
@@ -230,7 +212,8 @@ impl ActivityIndicator {
server_name,
status,
} => {
- let create_buffer = project.update(cx, |project, cx| project.create_buffer(cx));
+ let create_buffer =
+ project.update(cx, |project, cx| project.create_buffer(false, cx));
let status = status.clone();
let server_name = server_name.clone();
cx.spawn_in(window, async move |workspace, cx| {
@@ -410,13 +393,7 @@ impl ActivityIndicator {
icon: Some(
Icon::new(IconName::ArrowCircle)
.size(IconSize::Small)
- .with_animation(
- "arrow-circle",
- Animation::new(Duration::from_secs(2)).repeat(),
- |icon, delta| {
- icon.transform(Transformation::rotate(percentage(delta)))
- },
- )
+ .with_rotate_animation(2)
.into_any_element(),
),
message,
@@ -438,11 +415,7 @@ impl ActivityIndicator {
icon: Some(
Icon::new(IconName::ArrowCircle)
.size(IconSize::Small)
- .with_animation(
- "arrow-circle",
- Animation::new(Duration::from_secs(2)).repeat(),
- |icon, delta| icon.transform(Transformation::rotate(percentage(delta))),
- )
+ .with_rotate_animation(2)
.into_any_element(),
),
message: format!("Debug: {}", session.read(cx).adapter()),
@@ -465,11 +438,7 @@ impl ActivityIndicator {
icon: Some(
Icon::new(IconName::ArrowCircle)
.size(IconSize::Small)
- .with_animation(
- "arrow-circle",
- Animation::new(Duration::from_secs(2)).repeat(),
- |icon, delta| icon.transform(Transformation::rotate(percentage(delta))),
- )
+ .with_rotate_animation(2)
.into_any_element(),
),
message: job_info.message.into(),
@@ -676,8 +645,9 @@ impl ActivityIndicator {
}
// Show any application auto-update info.
- if let Some(updater) = &self.auto_updater {
- return match &updater.read(cx).status() {
+ self.auto_updater
+ .as_ref()
+ .and_then(|updater| match &updater.read(cx).status() {
AutoUpdateStatus::Checking => Some(Content {
icon: Some(
Icon::new(IconName::Download)
@@ -733,28 +703,49 @@ impl ActivityIndicator {
tooltip_message: None,
}),
AutoUpdateStatus::Idle => None,
- };
- }
-
- if let Some(extension_store) =
- ExtensionStore::try_global(cx).map(|extension_store| extension_store.read(cx))
- && let Some(extension_id) = extension_store.outstanding_operations().keys().next()
- {
- return Some(Content {
- icon: Some(
- Icon::new(IconName::Download)
- .size(IconSize::Small)
- .into_any_element(),
- ),
- message: format!("Updating {extension_id} extension…"),
- on_click: Some(Arc::new(|this, window, cx| {
- this.dismiss_error_message(&DismissErrorMessage, window, cx)
- })),
- tooltip_message: None,
- });
- }
+ })
+ .or_else(|| {
+ if let Some(extension_store) =
+ ExtensionStore::try_global(cx).map(|extension_store| extension_store.read(cx))
+ && let Some((extension_id, operation)) =
+ extension_store.outstanding_operations().iter().next()
+ {
+ let (message, icon, rotate) = match operation {
+ ExtensionOperation::Install => (
+ format!("Installing {extension_id} extension…"),
+ IconName::LoadCircle,
+ true,
+ ),
+ ExtensionOperation::Upgrade => (
+ format!("Updating {extension_id} extension…"),
+ IconName::Download,
+ false,
+ ),
+ ExtensionOperation::Remove => (
+ format!("Removing {extension_id} extension…"),
+ IconName::LoadCircle,
+ true,
+ ),
+ };
- None
+ Some(Content {
+ icon: Some(Icon::new(icon).size(IconSize::Small).map(|this| {
+ if rotate {
+ this.with_rotate_animation(3).into_any_element()
+ } else {
+ this.into_any_element()
+ }
+ })),
+ message,
+ on_click: Some(Arc::new(|this, window, cx| {
+ this.dismiss_error_message(&Default::default(), window, cx)
+ })),
+ tooltip_message: None,
+ })
+ } else {
+ None
+ }
+ })
}
fn version_tooltip_message(version: &VersionCheckType) -> String {
@@ -63,6 +63,7 @@ time.workspace = true
util.workspace = true
uuid.workspace = true
workspace-hack.workspace = true
+zed_env_vars.workspace = true
zstd.workspace = true
[dev-dependencies]
@@ -132,7 +132,7 @@ mod tests {
});
let tool_set = default_tool_set(cx);
- let profile = AgentProfile::new(id.clone(), tool_set);
+ let profile = AgentProfile::new(id, tool_set);
let mut enabled_tools = cx
.read(|cx| profile.enabled_tools(cx))
@@ -169,7 +169,7 @@ mod tests {
});
let tool_set = default_tool_set(cx);
- let profile = AgentProfile::new(id.clone(), tool_set);
+ let profile = AgentProfile::new(id, tool_set);
let mut enabled_tools = cx
.read(|cx| profile.enabled_tools(cx))
@@ -202,7 +202,7 @@ mod tests {
});
let tool_set = default_tool_set(cx);
- let profile = AgentProfile::new(id.clone(), tool_set);
+ let profile = AgentProfile::new(id, tool_set);
let mut enabled_tools = cx
.read(|cx| profile.enabled_tools(cx))
@@ -362,7 +362,7 @@ impl Display for DirectoryContext {
let mut is_first = true;
for descendant in &self.descendants {
if !is_first {
- write!(f, "\n")?;
+ writeln!(f)?;
} else {
is_first = false;
}
@@ -650,7 +650,7 @@ impl TextThreadContextHandle {
impl Display for TextThreadContext {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
// TODO: escape title?
- write!(f, "<text_thread title=\"{}\">\n", self.title)?;
+ writeln!(f, "<text_thread title=\"{}\">", self.title)?;
write!(f, "{}", self.text.trim())?;
write!(f, "\n</text_thread>")
}
@@ -716,7 +716,7 @@ impl RulesContextHandle {
impl Display for RulesContext {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if let Some(title) = &self.title {
- write!(f, "Rules title: {}\n", title)?;
+ writeln!(f, "Rules title: {}", title)?;
}
let code_block = MarkdownCodeBlock {
tag: "",
@@ -86,15 +86,13 @@ impl Tool for ContextServerTool {
) -> ToolResult {
if let Some(server) = self.store.read(cx).get_running_server(&self.server_id) {
let tool_name = self.tool.name.clone();
- let server_clone = server.clone();
- let input_clone = input.clone();
cx.spawn(async move |_cx| {
- let Some(protocol) = server_clone.client() else {
+ let Some(protocol) = server.client() else {
bail!("Context server not initialized");
};
- let arguments = if let serde_json::Value::Object(map) = input_clone {
+ let arguments = if let serde_json::Value::Object(map) = input {
Some(map.into_iter().collect())
} else {
None
@@ -254,10 +254,9 @@ impl HistoryStore {
}
pub fn remove_recently_opened_thread(&mut self, id: ThreadId, cx: &mut Context<Self>) {
- self.recently_opened_entries.retain(|entry| match entry {
- HistoryEntryId::Thread(thread_id) if thread_id == &id => false,
- _ => true,
- });
+ self.recently_opened_entries.retain(
+ |entry| !matches!(entry, HistoryEntryId::Thread(thread_id) if thread_id == &id),
+ );
self.save_recently_opened_entries(cx);
}
@@ -9,7 +9,10 @@ use crate::{
tool_use::{PendingToolUse, ToolUse, ToolUseMetadata, ToolUseState},
};
use action_log::ActionLog;
-use agent_settings::{AgentProfileId, AgentSettings, CompletionMode, SUMMARIZE_THREAD_PROMPT};
+use agent_settings::{
+ AgentProfileId, AgentSettings, CompletionMode, SUMMARIZE_THREAD_DETAILED_PROMPT,
+ SUMMARIZE_THREAD_PROMPT,
+};
use anyhow::{Result, anyhow};
use assistant_tool::{AnyToolCard, Tool, ToolWorkingSet};
use chrono::{DateTime, Utc};
@@ -107,7 +110,7 @@ impl std::fmt::Display for PromptId {
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)]
-pub struct MessageId(pub(crate) usize);
+pub struct MessageId(pub usize);
impl MessageId {
fn post_inc(&mut self) -> Self {
@@ -178,7 +181,7 @@ impl Message {
}
}
- pub fn to_string(&self) -> String {
+ pub fn to_message_content(&self) -> String {
let mut result = String::new();
if !self.loaded_context.text.is_empty() {
@@ -384,7 +387,6 @@ pub struct Thread {
cumulative_token_usage: TokenUsage,
exceeded_window_error: Option<ExceededWindowError>,
tool_use_limit_reached: bool,
- feedback: Option<ThreadFeedback>,
retry_state: Option<RetryState>,
message_feedback: HashMap<MessageId, ThreadFeedback>,
last_received_chunk_at: Option<Instant>,
@@ -484,14 +486,13 @@ impl Thread {
cumulative_token_usage: TokenUsage::default(),
exceeded_window_error: None,
tool_use_limit_reached: false,
- feedback: None,
retry_state: None,
message_feedback: HashMap::default(),
last_error_context: None,
last_received_chunk_at: None,
request_callback: None,
remaining_turns: u32::MAX,
- configured_model: configured_model.clone(),
+ configured_model,
profile: AgentProfile::new(profile_id, tools),
}
}
@@ -529,7 +530,7 @@ impl Thread {
.and_then(|model| {
let model = SelectedModel {
provider: model.provider.clone().into(),
- model: model.model.clone().into(),
+ model: model.model.into(),
};
registry.select_model(&model, cx)
})
@@ -609,7 +610,6 @@ impl Thread {
cumulative_token_usage: serialized.cumulative_token_usage,
exceeded_window_error: None,
tool_use_limit_reached: serialized.tool_use_limit_reached,
- feedback: None,
message_feedback: HashMap::default(),
last_error_context: None,
last_received_chunk_at: None,
@@ -1643,17 +1643,15 @@ impl Thread {
};
self.tool_use
- .request_tool_use(tool_message_id, tool_use, tool_use_metadata.clone(), cx);
+ .request_tool_use(tool_message_id, tool_use, tool_use_metadata, cx);
- let pending_tool_use = self.tool_use.insert_tool_output(
- tool_use_id.clone(),
+ self.tool_use.insert_tool_output(
+ tool_use_id,
tool_name,
tool_output,
self.configured_model.as_ref(),
self.completion_mode,
- );
-
- pending_tool_use
+ )
}
pub fn stream_completion(
@@ -2427,12 +2425,10 @@ impl Thread {
return;
}
- let added_user_message = include_str!("./prompts/summarize_thread_detailed_prompt.txt");
-
let request = self.to_summarize_request(
&model,
CompletionIntent::ThreadContextSummarization,
- added_user_message.into(),
+ SUMMARIZE_THREAD_DETAILED_PROMPT.into(),
cx,
);
@@ -2788,10 +2784,6 @@ impl Thread {
cx.emit(ThreadEvent::CancelEditing);
}
- pub fn feedback(&self) -> Option<ThreadFeedback> {
- self.feedback
- }
-
pub fn message_feedback(&self, message_id: MessageId) -> Option<ThreadFeedback> {
self.message_feedback.get(&message_id).copied()
}
@@ -2824,7 +2816,7 @@ impl Thread {
let message_content = self
.message(message_id)
- .map(|msg| msg.to_string())
+ .map(|msg| msg.to_message_content())
.unwrap_or_default();
cx.background_spawn(async move {
@@ -2853,52 +2845,6 @@ impl Thread {
})
}
- pub fn report_feedback(
- &mut self,
- feedback: ThreadFeedback,
- cx: &mut Context<Self>,
- ) -> Task<Result<()>> {
- let last_assistant_message_id = self
- .messages
- .iter()
- .rev()
- .find(|msg| msg.role == Role::Assistant)
- .map(|msg| msg.id);
-
- if let Some(message_id) = last_assistant_message_id {
- self.report_message_feedback(message_id, feedback, cx)
- } else {
- let final_project_snapshot = Self::project_snapshot(self.project.clone(), cx);
- let serialized_thread = self.serialize(cx);
- let thread_id = self.id().clone();
- let client = self.project.read(cx).client();
- self.feedback = Some(feedback);
- cx.notify();
-
- cx.background_spawn(async move {
- let final_project_snapshot = final_project_snapshot.await;
- let serialized_thread = serialized_thread.await?;
- let thread_data = serde_json::to_value(serialized_thread)
- .unwrap_or_else(|_| serde_json::Value::Null);
-
- let rating = match feedback {
- ThreadFeedback::Positive => "positive",
- ThreadFeedback::Negative => "negative",
- };
- telemetry::event!(
- "Assistant Thread Rated",
- rating,
- thread_id,
- thread_data,
- final_project_snapshot
- );
- client.telemetry().flush_events().await;
-
- Ok(())
- })
- }
- }
-
/// Create a snapshot of the current project state including git information and unsaved buffers.
fn project_snapshot(
project: Entity<Project>,
@@ -3242,7 +3188,7 @@ impl Thread {
self.configured_model.as_ref(),
self.completion_mode,
);
- self.tool_finished(tool_use_id.clone(), None, true, window, cx);
+ self.tool_finished(tool_use_id, None, true, window, cx);
}
}
@@ -3874,7 +3820,7 @@ fn main() {{
AgentSettings {
model_parameters: vec![LanguageModelParameters {
provider: Some(model.provider_id().0.to_string().into()),
- model: Some(model.id().0.clone()),
+ model: Some(model.id().0),
temperature: Some(0.66),
}],
..AgentSettings::get_global(cx).clone()
@@ -3894,7 +3840,7 @@ fn main() {{
AgentSettings {
model_parameters: vec![LanguageModelParameters {
provider: None,
- model: Some(model.id().0.clone()),
+ model: Some(model.id().0),
temperature: Some(0.66),
}],
..AgentSettings::get_global(cx).clone()
@@ -3934,7 +3880,7 @@ fn main() {{
AgentSettings {
model_parameters: vec![LanguageModelParameters {
provider: Some("anthropic".into()),
- model: Some(model.id().0.clone()),
+ model: Some(model.id().0),
temperature: Some(0.66),
}],
..AgentSettings::get_global(cx).clone()
@@ -41,8 +41,7 @@ use std::{
};
use util::ResultExt as _;
-pub static ZED_STATELESS: std::sync::LazyLock<bool> =
- std::sync::LazyLock::new(|| std::env::var("ZED_STATELESS").is_ok_and(|v| !v.is_empty()));
+use zed_env_vars::ZED_STATELESS;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum DataType {
@@ -74,7 +73,7 @@ impl Column for DataType {
}
}
-const RULES_FILE_NAMES: [&'static str; 9] = [
+const RULES_FILE_NAMES: [&str; 9] = [
".rules",
".cursorrules",
".windsurfrules",
@@ -893,8 +892,19 @@ impl ThreadsDatabase {
let needs_migration_from_heed = mdb_path.exists();
- let connection = if *ZED_STATELESS || cfg!(any(feature = "test-support", test)) {
+ let connection = if *ZED_STATELESS {
Connection::open_memory(Some("THREAD_FALLBACK_DB"))
+ } else if cfg!(any(feature = "test-support", test)) {
+ // rust stores the name of the test on the current thread.
+ // We use this to automatically create a database that will
+ // be shared within the test (for the test_retrieve_old_thread)
+ // but not with concurrent tests.
+ let thread = std::thread::current();
+ let test_name = thread.name();
+ Connection::open_memory(Some(&format!(
+ "THREAD_FALLBACK_{}",
+ test_name.unwrap_or_default()
+ )))
} else {
Connection::open_file(&sqlite_path.to_string_lossy())
};
@@ -8,6 +8,10 @@ license = "GPL-3.0-or-later"
[lib]
path = "src/agent2.rs"
+[features]
+test-support = ["db/test-support"]
+e2e = []
+
[lints]
workspace = true
@@ -23,9 +27,11 @@ assistant_context.workspace = true
assistant_tool.workspace = true
assistant_tools.workspace = true
chrono.workspace = true
+client.workspace = true
cloud_llm_client.workspace = true
collections.workspace = true
context_server.workspace = true
+db.workspace = true
fs.workspace = true
futures.workspace = true
git.workspace = true
@@ -42,7 +48,6 @@ log.workspace = true
open.workspace = true
parking_lot.workspace = true
paths.workspace = true
-portable-pty.workspace = true
project.workspace = true
prompt_store.workspace = true
rust-embed.workspace = true
@@ -53,23 +58,28 @@ settings.workspace = true
smol.workspace = true
sqlez.workspace = true
task.workspace = true
+telemetry.workspace = true
terminal.workspace = true
+thiserror.workspace = true
text.workspace = true
ui.workspace = true
util.workspace = true
uuid.workspace = true
watch.workspace = true
web_search.workspace = true
-which.workspace = true
workspace-hack.workspace = true
+zed_env_vars.workspace = true
zstd.workspace = true
[dev-dependencies]
agent = { workspace = true, "features" = ["test-support"] }
+agent_servers = { workspace = true, "features" = ["test-support"] }
+assistant_context = { workspace = true, "features" = ["test-support"] }
ctor.workspace = true
client = { workspace = true, "features" = ["test-support"] }
clock = { workspace = true, "features" = ["test-support"] }
context_server = { workspace = true, "features" = ["test-support"] }
+db = { workspace = true, "features" = ["test-support"] }
editor = { workspace = true, "features" = ["test-support"] }
env_logger.workspace = true
fs = { workspace = true, "features" = ["test-support"] }
@@ -1,8 +1,8 @@
use crate::{
- ContextServerRegistry, Thread, ThreadEvent, ToolCallAuthorization, UserMessageContent,
- templates::Templates,
+ ContextServerRegistry, Thread, ThreadEvent, ThreadsDatabase, ToolCallAuthorization,
+ UserMessageContent, templates::Templates,
};
-use crate::{HistoryStore, ThreadsDatabase};
+use crate::{HistoryStore, TerminalHandle, ThreadEnvironment, TitleUpdated, TokenUsageUpdated};
use acp_thread::{AcpThread, AgentModelSelector};
use action_log::ActionLog;
use agent_client_protocol as acp;
@@ -10,7 +10,8 @@ use agent_settings::AgentSettings;
use anyhow::{Context as _, Result, anyhow};
use collections::{HashSet, IndexMap};
use fs::Fs;
-use futures::channel::mpsc;
+use futures::channel::{mpsc, oneshot};
+use futures::future::Shared;
use futures::{StreamExt, future};
use gpui::{
App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
@@ -23,12 +24,12 @@ use prompt_store::{
use settings::update_settings_file;
use std::any::Any;
use std::collections::HashMap;
-use std::path::Path;
+use std::path::{Path, PathBuf};
use std::rc::Rc;
use std::sync::Arc;
use util::ResultExt;
-const RULES_FILE_NAMES: [&'static str; 9] = [
+const RULES_FILE_NAMES: [&str; 9] = [
".rules",
".cursorrules",
".windsurfrules",
@@ -61,16 +62,19 @@ pub struct LanguageModels {
model_list: acp_thread::AgentModelList,
refresh_models_rx: watch::Receiver<()>,
refresh_models_tx: watch::Sender<()>,
+ _authenticate_all_providers_task: Task<()>,
}
impl LanguageModels {
- fn new(cx: &App) -> Self {
+ fn new(cx: &mut App) -> Self {
let (refresh_models_tx, refresh_models_rx) = watch::channel(());
+
let mut this = Self {
models: HashMap::default(),
model_list: acp_thread::AgentModelList::Grouped(IndexMap::default()),
refresh_models_rx,
refresh_models_tx,
+ _authenticate_all_providers_task: Self::authenticate_all_language_model_providers(cx),
};
this.refresh_list(cx);
this
@@ -90,7 +94,7 @@ impl LanguageModels {
let mut recommended = Vec::new();
for provider in &providers {
for model in provider.recommended_models(cx) {
- recommended_models.insert(model.id());
+ recommended_models.insert((model.provider_id(), model.id()));
recommended.push(Self::map_language_model_to_info(&model, provider));
}
}
@@ -107,7 +111,7 @@ impl LanguageModels {
for model in provider.provided_models(cx) {
let model_info = Self::map_language_model_to_info(&model, &provider);
let model_id = model_info.id.clone();
- if !recommended_models.contains(&model.id()) {
+ if !recommended_models.contains(&(model.provider_id(), model.id())) {
provider_models.push(model_info);
}
models.insert(model_id, model);
@@ -150,6 +154,52 @@ impl LanguageModels {
fn model_id(model: &Arc<dyn LanguageModel>) -> acp_thread::AgentModelId {
acp_thread::AgentModelId(format!("{}/{}", model.provider_id().0, model.id().0).into())
}
+
+ fn authenticate_all_language_model_providers(cx: &mut App) -> Task<()> {
+ let authenticate_all_providers = LanguageModelRegistry::global(cx)
+ .read(cx)
+ .providers()
+ .iter()
+ .map(|provider| (provider.id(), provider.name(), provider.authenticate(cx)))
+ .collect::<Vec<_>>();
+
+ cx.background_spawn(async move {
+ for (provider_id, provider_name, authenticate_task) in authenticate_all_providers {
+ if let Err(err) = authenticate_task.await {
+ if matches!(err, language_model::AuthenticateError::CredentialsNotFound) {
+ // Since we're authenticating these providers in the
+ // background for the purposes of populating the
+ // language selector, we don't care about providers
+ // where the credentials are not found.
+ } else {
+ // Some providers have noisy failure states that we
+ // don't want to spam the logs with every time the
+ // language model selector is initialized.
+ //
+ // Ideally these should have more clear failure modes
+ // that we know are safe to ignore here, like what we do
+ // with `CredentialsNotFound` above.
+ match provider_id.0.as_ref() {
+ "lmstudio" | "ollama" => {
+ // LM Studio and Ollama both make fetch requests to the local APIs to determine if they are "authenticated".
+ //
+ // These fail noisily, so we don't log them.
+ }
+ "copilot_chat" => {
+ // Copilot Chat returns an error if Copilot is not enabled, so we don't log those errors.
+ }
+ _ => {
+ log::error!(
+ "Failed to authenticate provider: {}: {err}",
+ provider_name.0
+ );
+ }
+ }
+ }
+ }
+ }
+ })
+ }
}
pub struct NativeAgent {
@@ -180,7 +230,7 @@ impl NativeAgent {
fs: Arc<dyn Fs>,
cx: &mut AsyncApp,
) -> Result<Entity<NativeAgent>> {
- log::info!("Creating new NativeAgent");
+ log::debug!("Creating new NativeAgent");
let project_context = cx
.update(|cx| Self::build_project_context(&project, prompt_store.as_ref(), cx))?
@@ -227,34 +277,46 @@ impl NativeAgent {
cx: &mut Context<Self>,
) -> Entity<AcpThread> {
let connection = Rc::new(NativeAgentConnection(cx.entity()));
- let registry = LanguageModelRegistry::read_global(cx);
- let summarization_model = registry.thread_summary_model().map(|c| c.model);
-
- thread_handle.update(cx, |thread, cx| {
- thread.set_summarization_model(summarization_model, cx);
- thread.add_default_tools(cx)
- });
let thread = thread_handle.read(cx);
let session_id = thread.id().clone();
let title = thread.title();
let project = thread.project.clone();
let action_log = thread.action_log.clone();
- let acp_thread = cx.new(|_cx| {
+ let prompt_capabilities_rx = thread.prompt_capabilities_rx.clone();
+ let acp_thread = cx.new(|cx| {
acp_thread::AcpThread::new(
title,
connection,
project.clone(),
action_log.clone(),
session_id.clone(),
+ prompt_capabilities_rx,
+ cx,
+ )
+ });
+
+ let registry = LanguageModelRegistry::read_global(cx);
+ let summarization_model = registry.thread_summary_model().map(|c| c.model);
+
+ thread_handle.update(cx, |thread, cx| {
+ thread.set_summarization_model(summarization_model, cx);
+ thread.add_default_tools(
+ Rc::new(AcpThreadEnvironment {
+ acp_thread: acp_thread.downgrade(),
+ }) as _,
+ cx,
)
});
+
let subscriptions = vec![
cx.observe_release(&acp_thread, |this, acp_thread, _cx| {
this.sessions.remove(acp_thread.session_id());
}),
+ cx.subscribe(&thread_handle, Self::handle_thread_title_updated),
+ cx.subscribe(&thread_handle, Self::handle_thread_token_usage_updated),
cx.observe(&thread_handle, move |this, thread, cx| {
- this.save_thread(thread.clone(), cx)
+ this.save_thread(thread, cx)
}),
];
@@ -440,6 +502,43 @@ impl NativeAgent {
})
}
+ fn handle_thread_title_updated(
+ &mut self,
+ thread: Entity<Thread>,
+ _: &TitleUpdated,
+ cx: &mut Context<Self>,
+ ) {
+ let session_id = thread.read(cx).id();
+ let Some(session) = self.sessions.get(session_id) else {
+ return;
+ };
+ let thread = thread.downgrade();
+ let acp_thread = session.acp_thread.clone();
+ cx.spawn(async move |_, cx| {
+ let title = thread.read_with(cx, |thread, _| thread.title())?;
+ let task = acp_thread.update(cx, |acp_thread, cx| acp_thread.set_title(title, cx))?;
+ task.await
+ })
+ .detach_and_log_err(cx);
+ }
+
+ fn handle_thread_token_usage_updated(
+ &mut self,
+ thread: Entity<Thread>,
+ usage: &TokenUsageUpdated,
+ cx: &mut Context<Self>,
+ ) {
+ let Some(session) = self.sessions.get(thread.read(cx).id()) else {
+ return;
+ };
+ session
+ .acp_thread
+ .update(cx, |acp_thread, cx| {
+ acp_thread.update_token_usage(usage.0.clone(), cx);
+ })
+ .ok();
+ }
+
fn handle_project_event(
&mut self,
_project: Entity<Project>,
@@ -481,8 +580,8 @@ impl NativeAgent {
self.models.refresh_list(cx);
let registry = LanguageModelRegistry::read_global(cx);
- let default_model = registry.default_model().map(|m| m.model.clone());
- let summarization_model = registry.thread_summary_model().map(|m| m.model.clone());
+ let default_model = registry.default_model().map(|m| m.model);
+ let summarization_model = registry.thread_summary_model().map(|m| m.model);
for session in self.sessions.values_mut() {
session.thread.update(cx, |thread, cx| {
@@ -536,7 +635,33 @@ impl NativeAgent {
})
}
+ pub fn thread_summary(
+ &mut self,
+ id: acp::SessionId,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<SharedString>> {
+ let thread = self.open_thread(id.clone(), cx);
+ cx.spawn(async move |this, cx| {
+ let acp_thread = thread.await?;
+ let result = this
+ .update(cx, |this, cx| {
+ this.sessions
+ .get(&id)
+ .unwrap()
+ .thread
+ .update(cx, |thread, cx| thread.summary(cx))
+ })?
+ .await?;
+ drop(acp_thread);
+ Ok(result)
+ })
+ }
+
fn save_thread(&mut self, thread: Entity<Thread>, cx: &mut Context<Self>) {
+ if thread.read(cx).is_empty() {
+ return;
+ }
+
let database_future = ThreadsDatabase::connect(cx);
let (id, db_thread) =
thread.update(cx, |thread, cx| (thread.id().clone(), thread.to_db(cx)));
@@ -645,18 +770,15 @@ impl NativeAgentConnection {
options,
response,
}) => {
- let recv = acp_thread.update(cx, |thread, cx| {
+ let outcome_task = acp_thread.update(cx, |thread, cx| {
thread.request_tool_call_authorization(tool_call, options, cx)
- })?;
+ })??;
cx.background_spawn(async move {
- if let Some(recv) = recv.log_err()
- && let Some(option) = recv
- .await
- .context("authorization sender was dropped")
- .log_err()
+ if let acp::RequestPermissionOutcome::Selected { option_id } =
+ outcome_task.await
{
response
- .send(option)
+ .send(option_id)
.map(|_| anyhow!("authorization receiver was dropped"))
.log_err();
}
@@ -673,10 +795,6 @@ impl NativeAgentConnection {
thread.update_tool_call(update, cx)
})??;
}
- ThreadEvent::TitleUpdate(title) => {
- acp_thread
- .update(cx, |thread, cx| thread.update_title(title, cx))??;
- }
ThreadEvent::Retry(status) => {
acp_thread.update(cx, |thread, cx| {
thread.update_retry_status(status, cx)
@@ -695,7 +813,7 @@ impl NativeAgentConnection {
}
}
- log::info!("Response stream completed");
+ log::debug!("Response stream completed");
anyhow::Ok(acp::PromptResponse {
stop_reason: acp::StopReason::EndTurn,
})
@@ -720,7 +838,7 @@ impl AgentModelSelector for NativeAgentConnection {
model_id: acp_thread::AgentModelId,
cx: &mut App,
) -> Task<Result<()>> {
- log::info!("Setting model for session {}: {}", session_id, model_id);
+ log::debug!("Setting model for session {}: {}", session_id, model_id);
let Some(thread) = self
.0
.read(cx)
@@ -791,12 +909,11 @@ impl acp_thread::AgentConnection for NativeAgentConnection {
cx: &mut App,
) -> Task<Result<Entity<acp_thread::AcpThread>>> {
let agent = self.0.clone();
- log::info!("Creating new thread for project at: {:?}", cwd);
+ log::debug!("Creating new thread for project at: {:?}", cwd);
cx.spawn(async move |cx| {
log::debug!("Starting thread creation in async context");
- let action_log = cx.new(|_cx| ActionLog::new(project.clone()))?;
// Create Thread
let thread = agent.update(
cx,
@@ -812,20 +929,16 @@ impl acp_thread::AgentConnection for NativeAgentConnection {
.models
.model_from_id(&LanguageModels::model_id(&default_model.model))
});
-
- let thread = cx.new(|cx| {
+ Ok(cx.new(|cx| {
Thread::new(
project.clone(),
agent.project_context.clone(),
agent.context_server_registry.clone(),
- action_log.clone(),
agent.templates.clone(),
default_model,
cx,
)
- });
-
- Ok(thread)
+ }))
},
)??;
agent.update(cx, |agent, cx| agent.register_session(thread, cx))
@@ -861,7 +974,7 @@ impl acp_thread::AgentConnection for NativeAgentConnection {
.into_iter()
.map(Into::into)
.collect::<Vec<_>>();
- log::info!("Converted prompt to message: {} chars", content.len());
+ log::debug!("Converted prompt to message: {} chars", content.len());
log::debug!("Message id: {:?}", id);
log::debug!("Message content: {:?}", content);
@@ -872,7 +985,7 @@ impl acp_thread::AgentConnection for NativeAgentConnection {
fn resume(
&self,
session_id: &acp::SessionId,
- _cx: &mut App,
+ _cx: &App,
) -> Option<Rc<dyn acp_thread::AgentSessionResume>> {
Some(Rc::new(NativeAgentSessionResume {
connection: self.clone(),
@@ -889,32 +1002,83 @@ impl acp_thread::AgentConnection for NativeAgentConnection {
});
}
- fn session_editor(
+ fn truncate(
&self,
session_id: &agent_client_protocol::SessionId,
- cx: &mut App,
- ) -> Option<Rc<dyn acp_thread::AgentSessionEditor>> {
- self.0.update(cx, |agent, _cx| {
- agent
- .sessions
- .get(session_id)
- .map(|session| Rc::new(NativeAgentSessionEditor(session.thread.clone())) as _)
+ cx: &App,
+ ) -> Option<Rc<dyn acp_thread::AgentSessionTruncate>> {
+ self.0.read_with(cx, |agent, _cx| {
+ agent.sessions.get(session_id).map(|session| {
+ Rc::new(NativeAgentSessionTruncate {
+ thread: session.thread.clone(),
+ acp_thread: session.acp_thread.clone(),
+ }) as _
+ })
})
}
+ fn set_title(
+ &self,
+ session_id: &acp::SessionId,
+ _cx: &App,
+ ) -> Option<Rc<dyn acp_thread::AgentSessionSetTitle>> {
+ Some(Rc::new(NativeAgentSessionSetTitle {
+ connection: self.clone(),
+ session_id: session_id.clone(),
+ }) as _)
+ }
+
+ fn telemetry(&self) -> Option<Rc<dyn acp_thread::AgentTelemetry>> {
+ Some(Rc::new(self.clone()) as Rc<dyn acp_thread::AgentTelemetry>)
+ }
+
fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
self
}
}
-struct NativeAgentSessionEditor(Entity<Thread>);
+impl acp_thread::AgentTelemetry for NativeAgentConnection {
+ fn agent_name(&self) -> String {
+ "Zed".into()
+ }
-impl acp_thread::AgentSessionEditor for NativeAgentSessionEditor {
- fn truncate(&self, message_id: acp_thread::UserMessageId, cx: &mut App) -> Task<Result<()>> {
- Task::ready(
- self.0
- .update(cx, |thread, cx| thread.truncate(message_id, cx)),
- )
+ fn thread_data(
+ &self,
+ session_id: &acp::SessionId,
+ cx: &mut App,
+ ) -> Task<Result<serde_json::Value>> {
+ let Some(session) = self.0.read(cx).sessions.get(session_id) else {
+ return Task::ready(Err(anyhow!("Session not found")));
+ };
+
+ let task = session.thread.read(cx).to_db(cx);
+ cx.background_spawn(async move {
+ serde_json::to_value(task.await).context("Failed to serialize thread")
+ })
+ }
+}
+
+struct NativeAgentSessionTruncate {
+ thread: Entity<Thread>,
+ acp_thread: WeakEntity<AcpThread>,
+}
+
+impl acp_thread::AgentSessionTruncate for NativeAgentSessionTruncate {
+ fn run(&self, message_id: acp_thread::UserMessageId, cx: &mut App) -> Task<Result<()>> {
+ match self.thread.update(cx, |thread, cx| {
+ thread.truncate(message_id.clone(), cx)?;
+ Ok(thread.latest_token_usage())
+ }) {
+ Ok(usage) => {
+ self.acp_thread
+ .update(cx, |thread, cx| {
+ thread.update_token_usage(usage, cx);
+ })
+ .ok();
+ Task::ready(Ok(()))
+ }
+ Err(error) => Task::ready(Err(error)),
+ }
}
}
@@ -932,14 +1096,97 @@ impl acp_thread::AgentSessionResume for NativeAgentSessionResume {
}
}
+struct NativeAgentSessionSetTitle {
+ connection: NativeAgentConnection,
+ session_id: acp::SessionId,
+}
+
+impl acp_thread::AgentSessionSetTitle for NativeAgentSessionSetTitle {
+ fn run(&self, title: SharedString, cx: &mut App) -> Task<Result<()>> {
+ let Some(session) = self.connection.0.read(cx).sessions.get(&self.session_id) else {
+ return Task::ready(Err(anyhow!("session not found")));
+ };
+ let thread = session.thread.clone();
+ thread.update(cx, |thread, cx| thread.set_title(title, cx));
+ Task::ready(Ok(()))
+ }
+}
+
+pub struct AcpThreadEnvironment {
+ acp_thread: WeakEntity<AcpThread>,
+}
+
+impl ThreadEnvironment for AcpThreadEnvironment {
+ fn create_terminal(
+ &self,
+ command: String,
+ cwd: Option<PathBuf>,
+ output_byte_limit: Option<u64>,
+ cx: &mut AsyncApp,
+ ) -> Task<Result<Rc<dyn TerminalHandle>>> {
+ let task = self.acp_thread.update(cx, |thread, cx| {
+ thread.create_terminal(command, vec![], vec![], cwd, output_byte_limit, cx)
+ });
+
+ let acp_thread = self.acp_thread.clone();
+ cx.spawn(async move |cx| {
+ let terminal = task?.await?;
+
+ let (drop_tx, drop_rx) = oneshot::channel();
+ let terminal_id = terminal.read_with(cx, |terminal, _cx| terminal.id().clone())?;
+
+ cx.spawn(async move |cx| {
+ drop_rx.await.ok();
+ acp_thread.update(cx, |thread, cx| thread.release_terminal(terminal_id, cx))
+ })
+ .detach();
+
+ let handle = AcpTerminalHandle {
+ terminal,
+ _drop_tx: Some(drop_tx),
+ };
+
+ Ok(Rc::new(handle) as _)
+ })
+ }
+}
+
+pub struct AcpTerminalHandle {
+ terminal: Entity<acp_thread::Terminal>,
+ _drop_tx: Option<oneshot::Sender<()>>,
+}
+
+impl TerminalHandle for AcpTerminalHandle {
+ fn id(&self, cx: &AsyncApp) -> Result<acp::TerminalId> {
+ self.terminal.read_with(cx, |term, _cx| term.id().clone())
+ }
+
+ fn wait_for_exit(&self, cx: &AsyncApp) -> Result<Shared<Task<acp::TerminalExitStatus>>> {
+ self.terminal
+ .read_with(cx, |term, _cx| term.wait_for_exit())
+ }
+
+ fn current_output(&self, cx: &AsyncApp) -> Result<acp::TerminalOutputResponse> {
+ self.terminal
+ .read_with(cx, |term, cx| term.current_output(cx))
+ }
+}
+
#[cfg(test)]
mod tests {
+ use crate::HistoryEntryId;
+
use super::*;
- use acp_thread::{AgentConnection, AgentModelGroupName, AgentModelId, AgentModelInfo};
+ use acp_thread::{
+ AgentConnection, AgentModelGroupName, AgentModelId, AgentModelInfo, MentionUri,
+ };
use fs::FakeFs;
use gpui::TestAppContext;
+ use indoc::indoc;
+ use language_model::fake_provider::FakeLanguageModel;
use serde_json::json;
use settings::SettingsStore;
+ use util::path;
#[gpui::test]
async fn test_maintaining_project_context(cx: &mut TestAppContext) {
@@ -954,7 +1201,7 @@ mod tests {
.await;
let project = Project::test(fs.clone(), [], cx).await;
let context_store = cx.new(|cx| assistant_context::ContextStore::fake(project.clone(), cx));
- let history_store = cx.new(|cx| HistoryStore::new(context_store, [], cx));
+ let history_store = cx.new(|cx| HistoryStore::new(context_store, cx));
let agent = NativeAgent::new(
project.clone(),
history_store,
@@ -1012,7 +1259,7 @@ mod tests {
fs.insert_tree("/", json!({ "a": {} })).await;
let project = Project::test(fs.clone(), [], cx).await;
let context_store = cx.new(|cx| assistant_context::ContextStore::fake(project.clone(), cx));
- let history_store = cx.new(|cx| HistoryStore::new(context_store, [], cx));
+ let history_store = cx.new(|cx| HistoryStore::new(context_store, cx));
let connection = NativeAgentConnection(
NativeAgent::new(
project.clone(),
@@ -1068,7 +1315,7 @@ mod tests {
let project = Project::test(fs.clone(), [], cx).await;
let context_store = cx.new(|cx| assistant_context::ContextStore::fake(project.clone(), cx));
- let history_store = cx.new(|cx| HistoryStore::new(context_store, [], cx));
+ let history_store = cx.new(|cx| HistoryStore::new(context_store, cx));
// Create the agent and connection
let agent = NativeAgent::new(
@@ -1124,6 +1371,158 @@ mod tests {
);
}
+ #[gpui::test]
+ #[cfg_attr(target_os = "windows", ignore)] // TODO: Fix this test on Windows
+ async fn test_save_load_thread(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ "/",
+ json!({
+ "a": {
+ "b.md": "Lorem"
+ }
+ }),
+ )
+ .await;
+ let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await;
+ let context_store = cx.new(|cx| assistant_context::ContextStore::fake(project.clone(), cx));
+ let history_store = cx.new(|cx| HistoryStore::new(context_store, cx));
+ let agent = NativeAgent::new(
+ project.clone(),
+ history_store.clone(),
+ Templates::new(),
+ None,
+ fs.clone(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+ let connection = Rc::new(NativeAgentConnection(agent.clone()));
+
+ let acp_thread = cx
+ .update(|cx| {
+ connection
+ .clone()
+ .new_thread(project.clone(), Path::new(""), cx)
+ })
+ .await
+ .unwrap();
+ let session_id = acp_thread.read_with(cx, |thread, _| thread.session_id().clone());
+ let thread = agent.read_with(cx, |agent, _| {
+ agent.sessions.get(&session_id).unwrap().thread.clone()
+ });
+
+ // Ensure empty threads are not saved, even if they get mutated.
+ let model = Arc::new(FakeLanguageModel::default());
+ let summary_model = Arc::new(FakeLanguageModel::default());
+ thread.update(cx, |thread, cx| {
+ thread.set_model(model.clone(), cx);
+ thread.set_summarization_model(Some(summary_model.clone()), cx);
+ });
+ cx.run_until_parked();
+ assert_eq!(history_entries(&history_store, cx), vec![]);
+
+ let send = acp_thread.update(cx, |thread, cx| {
+ thread.send(
+ vec![
+ "What does ".into(),
+ acp::ContentBlock::ResourceLink(acp::ResourceLink {
+ name: "b.md".into(),
+ uri: MentionUri::File {
+ abs_path: path!("/a/b.md").into(),
+ }
+ .to_uri()
+ .to_string(),
+ annotations: None,
+ description: None,
+ mime_type: None,
+ size: None,
+ title: None,
+ }),
+ " mean?".into(),
+ ],
+ cx,
+ )
+ });
+ let send = cx.foreground_executor().spawn(send);
+ cx.run_until_parked();
+
+ model.send_last_completion_stream_text_chunk("Lorem.");
+ model.end_last_completion_stream();
+ cx.run_until_parked();
+ summary_model.send_last_completion_stream_text_chunk("Explaining /a/b.md");
+ summary_model.end_last_completion_stream();
+
+ send.await.unwrap();
+ acp_thread.read_with(cx, |thread, cx| {
+ assert_eq!(
+ thread.to_markdown(cx),
+ indoc! {"
+ ## User
+
+ What does [@b.md](file:///a/b.md) mean?
+
+ ## Assistant
+
+ Lorem.
+
+ "}
+ )
+ });
+
+ cx.run_until_parked();
+
+ // Drop the ACP thread, which should cause the session to be dropped as well.
+ cx.update(|_| {
+ drop(thread);
+ drop(acp_thread);
+ });
+ agent.read_with(cx, |agent, _| {
+ assert_eq!(agent.sessions.keys().cloned().collect::<Vec<_>>(), []);
+ });
+
+ // Ensure the thread can be reloaded from disk.
+ assert_eq!(
+ history_entries(&history_store, cx),
+ vec![(
+ HistoryEntryId::AcpThread(session_id.clone()),
+ "Explaining /a/b.md".into()
+ )]
+ );
+ let acp_thread = agent
+ .update(cx, |agent, cx| agent.open_thread(session_id.clone(), cx))
+ .await
+ .unwrap();
+ acp_thread.read_with(cx, |thread, cx| {
+ assert_eq!(
+ thread.to_markdown(cx),
+ indoc! {"
+ ## User
+
+ What does [@b.md](file:///a/b.md) mean?
+
+ ## Assistant
+
+ Lorem.
+
+ "}
+ )
+ });
+ }
+
+ fn history_entries(
+ history: &Entity<HistoryStore>,
+ cx: &mut TestAppContext,
+ ) -> Vec<(HistoryEntryId, String)> {
+ history.read_with(cx, |history, _| {
+ history
+ .entries()
+ .map(|e| (e.id(), e.title().to_string()))
+ .collect::<Vec<_>>()
+ })
+ }
+
fn init_test(cx: &mut TestAppContext) {
env_logger::try_init().ok();
cx.update(|cx| {
@@ -1,5 +1,6 @@
use crate::{AgentMessage, AgentMessageContent, UserMessage, UserMessageContent};
-use agent::thread_store;
+use acp_thread::UserMessageId;
+use agent::{thread::DetailedSummaryState, thread_store};
use agent_client_protocol as acp;
use agent_settings::{AgentProfileId, CompletionMode};
use anyhow::{Result, anyhow};
@@ -17,9 +18,10 @@ use sqlez::{
};
use std::sync::Arc;
use ui::{App, SharedString};
+use zed_env_vars::ZED_STATELESS;
pub type DbMessage = crate::Message;
-pub type DbSummary = agent::thread::DetailedSummaryState;
+pub type DbSummary = DetailedSummaryState;
pub type DbLanguageModel = thread_store::SerializedLanguageModel;
#[derive(Debug, Clone, Serialize, Deserialize)]
@@ -36,13 +38,13 @@ pub struct DbThread {
pub messages: Vec<DbMessage>,
pub updated_at: DateTime<Utc>,
#[serde(default)]
- pub summary: DbSummary,
+ pub detailed_summary: Option<SharedString>,
#[serde(default)]
pub initial_project_snapshot: Option<Arc<agent::thread::ProjectSnapshot>>,
#[serde(default)]
pub cumulative_token_usage: language_model::TokenUsage,
#[serde(default)]
- pub request_token_usage: Vec<language_model::TokenUsage>,
+ pub request_token_usage: HashMap<acp_thread::UserMessageId, language_model::TokenUsage>,
#[serde(default)]
pub model: Option<DbLanguageModel>,
#[serde(default)]
@@ -67,7 +69,10 @@ impl DbThread {
fn upgrade_from_agent_1(thread: agent::SerializedThread) -> Result<Self> {
let mut messages = Vec::new();
- for msg in thread.messages {
+ let mut request_token_usage = HashMap::default();
+
+ let mut last_user_message_id = None;
+ for (ix, msg) in thread.messages.into_iter().enumerate() {
let message = match msg.role {
language_model::Role::User => {
let mut content = Vec::new();
@@ -93,9 +98,12 @@ impl DbThread {
content.push(UserMessageContent::Text(msg.context));
}
+ let id = UserMessageId::new();
+ last_user_message_id = Some(id.clone());
+
crate::Message::User(UserMessage {
// MessageId from old format can't be meaningfully converted, so generate a new one
- id: acp_thread::UserMessageId::new(),
+ id,
content,
})
}
@@ -154,6 +162,12 @@ impl DbThread {
);
}
+ if let Some(last_user_message_id) = &last_user_message_id
+ && let Some(token_usage) = thread.request_token_usage.get(ix).copied()
+ {
+ request_token_usage.insert(last_user_message_id.clone(), token_usage);
+ }
+
crate::Message::Agent(AgentMessage {
content,
tool_results,
@@ -172,10 +186,15 @@ impl DbThread {
title: thread.summary,
messages,
updated_at: thread.updated_at,
- summary: thread.detailed_summary_state,
+ detailed_summary: match thread.detailed_summary_state {
+ DetailedSummaryState::NotGenerated | DetailedSummaryState::Generating { .. } => {
+ None
+ }
+ DetailedSummaryState::Generated { text, .. } => Some(text),
+ },
initial_project_snapshot: thread.initial_project_snapshot,
cumulative_token_usage: thread.cumulative_token_usage,
- request_token_usage: thread.request_token_usage,
+ request_token_usage,
model: thread.model,
completion_mode: thread.completion_mode,
profile: thread.profile,
@@ -183,9 +202,6 @@ impl DbThread {
}
}
-pub static ZED_STATELESS: std::sync::LazyLock<bool> =
- std::sync::LazyLock::new(|| std::env::var("ZED_STATELESS").is_ok_and(|v| !v.is_empty()));
-
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum DataType {
#[serde(rename = "json")]
@@ -248,8 +264,19 @@ impl ThreadsDatabase {
}
pub fn new(executor: BackgroundExecutor) -> Result<Self> {
- let connection = if *ZED_STATELESS || cfg!(any(feature = "test-support", test)) {
+ let connection = if *ZED_STATELESS {
Connection::open_memory(Some("THREAD_FALLBACK_DB"))
+ } else if cfg!(any(feature = "test-support", test)) {
+ // rust stores the name of the test on the current thread.
+ // We use this to automatically create a database that will
+ // be shared within the test (for the test_retrieve_old_thread)
+ // but not with concurrent tests.
+ let thread = std::thread::current();
+ let test_name = thread.name();
+ Connection::open_memory(Some(&format!(
+ "THREAD_FALLBACK_{}",
+ test_name.unwrap_or_default()
+ )))
} else {
let threads_dir = paths::data_dir().join("threads");
std::fs::create_dir_all(&threads_dir)?;
@@ -269,7 +296,7 @@ impl ThreadsDatabase {
.map_err(|e| anyhow!("Failed to create threads table: {}", e))?;
let db = Self {
- executor: executor.clone(),
+ executor,
connection: Arc::new(Mutex::new(connection)),
};
@@ -307,7 +334,7 @@ impl ThreadsDatabase {
INSERT OR REPLACE INTO threads (id, summary, updated_at, data_type, data) VALUES (?, ?, ?, ?, ?)
"})?;
- insert((id.0.clone(), title, updated_at, data_type, data))?;
+ insert((id.0, title, updated_at, data_type, data))?;
Ok(())
}
@@ -416,7 +443,7 @@ mod tests {
let client = Client::new(clock, http_client, cx);
agent::init(cx);
agent_settings::init(cx);
- language_model::init(client.clone(), cx);
+ language_model::init(client, cx);
});
}
@@ -1,17 +1,20 @@
use crate::{DbThreadMetadata, ThreadsDatabase};
+use acp_thread::MentionUri;
use agent_client_protocol as acp;
use anyhow::{Context as _, Result, anyhow};
-use assistant_context::SavedContextMetadata;
+use assistant_context::{AssistantContext, SavedContextMetadata};
use chrono::{DateTime, Utc};
+use db::kvp::KEY_VALUE_STORE;
use gpui::{App, AsyncApp, Entity, SharedString, Task, prelude::*};
use itertools::Itertools;
use paths::contexts_dir;
use serde::{Deserialize, Serialize};
use std::{collections::VecDeque, path::Path, sync::Arc, time::Duration};
+use ui::ElementId;
use util::ResultExt as _;
const MAX_RECENTLY_OPENED_ENTRIES: usize = 6;
-const NAVIGATION_HISTORY_PATH: &str = "agent-navigation-history.json";
+const RECENTLY_OPENED_THREADS_KEY: &str = "recent-agent-threads";
const SAVE_RECENTLY_OPENED_ENTRIES_DEBOUNCE: Duration = Duration::from_millis(50);
const DEFAULT_TITLE: &SharedString = &SharedString::new_static("New Thread");
@@ -37,6 +40,19 @@ impl HistoryEntry {
}
}
+ pub fn mention_uri(&self) -> MentionUri {
+ match self {
+ HistoryEntry::AcpThread(thread) => MentionUri::Thread {
+ id: thread.id.clone(),
+ name: thread.title.to_string(),
+ },
+ HistoryEntry::TextThread(context) => MentionUri::TextThread {
+ path: context.path.as_ref().to_owned(),
+ name: context.title.to_string(),
+ },
+ }
+ }
+
pub fn title(&self) -> &SharedString {
match self {
HistoryEntry::AcpThread(thread) if thread.title.is_empty() => DEFAULT_TITLE,
@@ -47,22 +63,30 @@ impl HistoryEntry {
}
/// Generic identifier for a history entry.
-#[derive(Clone, PartialEq, Eq, Debug)]
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub enum HistoryEntryId {
AcpThread(acp::SessionId),
TextThread(Arc<Path>),
}
-#[derive(Serialize, Deserialize)]
+impl Into<ElementId> for HistoryEntryId {
+ fn into(self) -> ElementId {
+ match self {
+ HistoryEntryId::AcpThread(session_id) => ElementId::Name(session_id.0.into()),
+ HistoryEntryId::TextThread(path) => ElementId::Path(path),
+ }
+ }
+}
+
+#[derive(Serialize, Deserialize, Debug)]
enum SerializedRecentOpen {
- Thread(String),
- ContextName(String),
- /// Old format which stores the full path
- Context(String),
+ AcpThread(String),
+ TextThread(String),
}
pub struct HistoryStore {
threads: Vec<DbThreadMetadata>,
+ entries: Vec<HistoryEntry>,
context_store: Entity<assistant_context::ContextStore>,
recently_opened_entries: VecDeque<HistoryEntryId>,
_subscriptions: Vec<gpui::Subscription>,
@@ -72,35 +96,37 @@ pub struct HistoryStore {
impl HistoryStore {
pub fn new(
context_store: Entity<assistant_context::ContextStore>,
- initial_recent_entries: impl IntoIterator<Item = HistoryEntryId>,
cx: &mut Context<Self>,
) -> Self {
- let subscriptions = vec![cx.observe(&context_store, |_, _, cx| cx.notify())];
+ let subscriptions = vec![cx.observe(&context_store, |this, _, cx| this.update_entries(cx))];
cx.spawn(async move |this, cx| {
- let entries = Self::load_recently_opened_entries(cx).await.log_err()?;
- this.update(cx, |this, _| {
- this.recently_opened_entries
- .extend(
- entries.into_iter().take(
- MAX_RECENTLY_OPENED_ENTRIES
- .saturating_sub(this.recently_opened_entries.len()),
- ),
- );
+ let entries = Self::load_recently_opened_entries(cx).await;
+ this.update(cx, |this, cx| {
+ if let Some(entries) = entries.log_err() {
+ this.recently_opened_entries = entries;
+ }
+
+ this.reload(cx);
})
- .ok()
+ .ok();
})
.detach();
Self {
context_store,
- recently_opened_entries: initial_recent_entries.into_iter().collect(),
+ recently_opened_entries: VecDeque::default(),
threads: Vec::default(),
+ entries: Vec::default(),
_subscriptions: subscriptions,
_save_recently_opened_entries_task: Task::ready(()),
}
}
+ pub fn thread_from_session_id(&self, session_id: &acp::SessionId) -> Option<&DbThreadMetadata> {
+ self.threads.iter().find(|thread| &thread.id == session_id)
+ }
+
pub fn delete_thread(
&mut self,
id: acp::SessionId,
@@ -124,6 +150,16 @@ impl HistoryStore {
})
}
+ pub fn load_text_thread(
+ &self,
+ path: Arc<Path>,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Entity<AssistantContext>>> {
+ self.context_store.update(cx, |context_store, cx| {
+ context_store.open_local_context(path, cx)
+ })
+ }
+
pub fn reload(&self, cx: &mut Context<Self>) {
let database_future = ThreadsDatabase::connect(cx);
cx.spawn(async move |this, cx| {
@@ -134,21 +170,31 @@ impl HistoryStore {
.await?;
this.update(cx, |this, cx| {
+ if this.recently_opened_entries.len() < MAX_RECENTLY_OPENED_ENTRIES {
+ for thread in threads
+ .iter()
+ .take(MAX_RECENTLY_OPENED_ENTRIES - this.recently_opened_entries.len())
+ .rev()
+ {
+ this.push_recently_opened_entry(
+ HistoryEntryId::AcpThread(thread.id.clone()),
+ cx,
+ )
+ }
+ }
this.threads = threads;
- cx.notify();
+ this.update_entries(cx);
})
})
.detach_and_log_err(cx);
}
- pub fn entries(&self, cx: &mut Context<Self>) -> Vec<HistoryEntry> {
- let mut history_entries = Vec::new();
-
+ fn update_entries(&mut self, cx: &mut Context<Self>) {
#[cfg(debug_assertions)]
if std::env::var("ZED_SIMULATE_NO_THREAD_HISTORY").is_ok() {
- return history_entries;
+ return;
}
-
+ let mut history_entries = Vec::new();
history_entries.extend(self.threads.iter().cloned().map(HistoryEntry::AcpThread));
history_entries.extend(
self.context_store
@@ -159,11 +205,12 @@ impl HistoryStore {
);
history_entries.sort_unstable_by_key(|entry| std::cmp::Reverse(entry.updated_at()));
- history_entries
+ self.entries = history_entries;
+ cx.notify()
}
- pub fn recent_entries(&self, limit: usize, cx: &mut Context<Self>) -> Vec<HistoryEntry> {
- self.entries(cx).into_iter().take(limit).collect()
+ pub fn is_empty(&self, _cx: &App) -> bool {
+ self.entries.is_empty()
}
pub fn recently_opened_entries(&self, cx: &App) -> Vec<HistoryEntry> {
@@ -215,58 +262,51 @@ impl HistoryStore {
.iter()
.filter_map(|entry| match entry {
HistoryEntryId::TextThread(path) => path.file_name().map(|file| {
- SerializedRecentOpen::ContextName(file.to_string_lossy().to_string())
+ SerializedRecentOpen::TextThread(file.to_string_lossy().to_string())
}),
- HistoryEntryId::AcpThread(id) => Some(SerializedRecentOpen::Thread(id.to_string())),
+ HistoryEntryId::AcpThread(id) => {
+ Some(SerializedRecentOpen::AcpThread(id.to_string()))
+ }
})
.collect::<Vec<_>>();
self._save_recently_opened_entries_task = cx.spawn(async move |_, cx| {
+ let content = serde_json::to_string(&serialized_entries).unwrap();
cx.background_executor()
.timer(SAVE_RECENTLY_OPENED_ENTRIES_DEBOUNCE)
.await;
- cx.background_spawn(async move {
- let path = paths::data_dir().join(NAVIGATION_HISTORY_PATH);
- let content = serde_json::to_string(&serialized_entries)?;
- std::fs::write(path, content)?;
- anyhow::Ok(())
- })
- .await
- .log_err();
+
+ if cfg!(any(feature = "test-support", test)) {
+ return;
+ }
+ KEY_VALUE_STORE
+ .write_kvp(RECENTLY_OPENED_THREADS_KEY.to_owned(), content)
+ .await
+ .log_err();
});
}
- fn load_recently_opened_entries(cx: &AsyncApp) -> Task<Result<Vec<HistoryEntryId>>> {
+ fn load_recently_opened_entries(cx: &AsyncApp) -> Task<Result<VecDeque<HistoryEntryId>>> {
cx.background_spawn(async move {
- let path = paths::data_dir().join(NAVIGATION_HISTORY_PATH);
- let contents = match smol::fs::read_to_string(path).await {
- Ok(it) => it,
- Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
- return Ok(Vec::new());
- }
- Err(e) => {
- return Err(e)
- .context("deserializing persisted agent panel navigation history");
- }
- };
- let entries = serde_json::from_str::<Vec<SerializedRecentOpen>>(&contents)
+ if cfg!(any(feature = "test-support", test)) {
+ anyhow::bail!("history store does not persist in tests");
+ }
+ let json = KEY_VALUE_STORE
+ .read_kvp(RECENTLY_OPENED_THREADS_KEY)?
+ .unwrap_or("[]".to_string());
+ let entries = serde_json::from_str::<Vec<SerializedRecentOpen>>(&json)
.context("deserializing persisted agent panel navigation history")?
.into_iter()
.take(MAX_RECENTLY_OPENED_ENTRIES)
.flat_map(|entry| match entry {
- SerializedRecentOpen::Thread(id) => Some(HistoryEntryId::AcpThread(
+ SerializedRecentOpen::AcpThread(id) => Some(HistoryEntryId::AcpThread(
acp::SessionId(id.as_str().into()),
)),
- SerializedRecentOpen::ContextName(file_name) => Some(
+ SerializedRecentOpen::TextThread(file_name) => Some(
HistoryEntryId::TextThread(contexts_dir().join(file_name).into()),
),
- SerializedRecentOpen::Context(path) => {
- Path::new(&path).file_name().map(|file_name| {
- HistoryEntryId::TextThread(contexts_dir().join(file_name).into())
- })
- }
})
- .collect::<Vec<_>>();
+ .collect();
Ok(entries)
})
}
@@ -281,10 +321,9 @@ impl HistoryStore {
}
pub fn remove_recently_opened_thread(&mut self, id: acp::SessionId, cx: &mut Context<Self>) {
- self.recently_opened_entries.retain(|entry| match entry {
- HistoryEntryId::AcpThread(thread_id) if thread_id == &id => false,
- _ => true,
- });
+ self.recently_opened_entries.retain(
+ |entry| !matches!(entry, HistoryEntryId::AcpThread(thread_id) if thread_id == &id),
+ );
self.save_recently_opened_entries(cx);
}
@@ -311,4 +350,8 @@ impl HistoryStore {
.retain(|old_entry| old_entry != entry);
self.save_recently_opened_entries(cx);
}
+
+ pub fn entries(&self) -> impl Iterator<Item = HistoryEntry> {
+ self.entries.iter().cloned()
+ }
}
@@ -1,10 +1,9 @@
use std::{any::Any, path::Path, rc::Rc, sync::Arc};
-use agent_servers::AgentServer;
+use agent_servers::{AgentServer, AgentServerDelegate};
use anyhow::Result;
use fs::Fs;
-use gpui::{App, Entity, Task};
-use project::Project;
+use gpui::{App, Entity, SharedString, Task};
use prompt_store::PromptStore;
use crate::{HistoryStore, NativeAgent, NativeAgentConnection, templates::Templates};
@@ -22,34 +21,34 @@ impl NativeAgentServer {
}
impl AgentServer for NativeAgentServer {
- fn name(&self) -> &'static str {
- "Native Agent"
+ fn telemetry_id(&self) -> &'static str {
+ "zed"
}
- fn empty_state_headline(&self) -> &'static str {
- "Native Agent"
- }
-
- fn empty_state_message(&self) -> &'static str {
- "How can I help you today?"
+ fn name(&self) -> SharedString {
+ "Zed Agent".into()
}
fn logo(&self) -> ui::IconName {
- // Using the ZedAssistant icon as it's the native built-in agent
- ui::IconName::ZedAssistant
+ ui::IconName::ZedAgent
}
fn connect(
&self,
- _root_dir: &Path,
- project: &Entity<Project>,
+ _root_dir: Option<&Path>,
+ delegate: AgentServerDelegate,
cx: &mut App,
- ) -> Task<Result<Rc<dyn acp_thread::AgentConnection>>> {
- log::info!(
+ ) -> Task<
+ Result<(
+ Rc<dyn acp_thread::AgentConnection>,
+ Option<task::SpawnInTerminal>,
+ )>,
+ > {
+ log::debug!(
"NativeAgentServer::connect called for path: {:?}",
_root_dir
);
- let project = project.clone();
+ let project = delegate.project().clone();
let fs = self.fs.clone();
let history = self.history.clone();
let prompt_store = PromptStore::global(cx);
@@ -64,9 +63,12 @@ impl AgentServer for NativeAgentServer {
// Create the connection wrapper
let connection = NativeAgentConnection(agent);
- log::info!("NativeAgentServer connection established successfully");
+ log::debug!("NativeAgentServer connection established successfully");
- Ok(Rc::new(connection) as Rc<dyn acp_thread::AgentConnection>)
+ Ok((
+ Rc::new(connection) as Rc<dyn acp_thread::AgentConnection>,
+ None,
+ ))
})
}
@@ -74,3 +76,52 @@ impl AgentServer for NativeAgentServer {
self
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use assistant_context::ContextStore;
+ use gpui::AppContext;
+
+ agent_servers::e2e_tests::common_e2e_tests!(
+ async |fs, project, cx| {
+ let auth = cx.update(|cx| {
+ prompt_store::init(cx);
+ terminal::init(cx);
+
+ let registry = language_model::LanguageModelRegistry::read_global(cx);
+ let auth = registry
+ .provider(&language_model::ANTHROPIC_PROVIDER_ID)
+ .unwrap()
+ .authenticate(cx);
+
+ cx.spawn(async move |_| auth.await)
+ });
+
+ auth.await.unwrap();
+
+ cx.update(|cx| {
+ let registry = language_model::LanguageModelRegistry::global(cx);
+
+ registry.update(cx, |registry, cx| {
+ registry.select_default_model(
+ Some(&language_model::SelectedModel {
+ provider: language_model::ANTHROPIC_PROVIDER_ID,
+ model: language_model::LanguageModelId("claude-sonnet-4-latest".into()),
+ }),
+ cx,
+ );
+ });
+ });
+
+ let history = cx.update(|cx| {
+ let context_store = cx.new(move |cx| ContextStore::fake(project.clone(), cx));
+ cx.new(move |cx| HistoryStore::new(context_store, cx))
+ });
+
+ NativeAgentServer::new(fs.clone(), history)
+ },
+ allow_option_id = "allow"
+ );
+}
@@ -1,30 +1,40 @@
use super::*;
use acp_thread::{AgentConnection, AgentModelGroupName, AgentModelList, UserMessageId};
-use action_log::ActionLog;
use agent_client_protocol::{self as acp};
use agent_settings::AgentProfileId;
use anyhow::Result;
use client::{Client, UserStore};
+use cloud_llm_client::CompletionIntent;
+use collections::IndexMap;
+use context_server::{ContextServer, ContextServerCommand, ContextServerId};
use fs::{FakeFs, Fs};
-use futures::{StreamExt, channel::mpsc::UnboundedReceiver};
+use futures::{
+ StreamExt,
+ channel::{
+ mpsc::{self, UnboundedReceiver},
+ oneshot,
+ },
+};
use gpui::{
App, AppContext, Entity, Task, TestAppContext, UpdateGlobal, http_client::FakeHttpClient,
};
use indoc::indoc;
use language_model::{
LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId,
- LanguageModelProviderName, LanguageModelRegistry, LanguageModelRequestMessage,
- LanguageModelToolResult, LanguageModelToolUse, MessageContent, Role, StopReason,
- fake_provider::FakeLanguageModel,
+ LanguageModelProviderName, LanguageModelRegistry, LanguageModelRequest,
+ LanguageModelRequestMessage, LanguageModelToolResult, LanguageModelToolSchemaFormat,
+ LanguageModelToolUse, MessageContent, Role, StopReason, fake_provider::FakeLanguageModel,
};
use pretty_assertions::assert_eq;
-use project::Project;
+use project::{
+ Project, context_server_store::ContextServerStore, project_settings::ProjectSettings,
+};
use prompt_store::ProjectContext;
use reqwest_client::ReqwestClient;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use serde_json::json;
-use settings::SettingsStore;
+use settings::{Settings, SettingsStore};
use std::{path::Path, rc::Rc, sync::Arc, time::Duration};
use util::path;
@@ -32,17 +42,22 @@ mod test_tools;
use test_tools::*;
#[gpui::test]
-#[ignore = "can't run on CI yet"]
async fn test_echo(cx: &mut TestAppContext) {
- let ThreadTest { thread, .. } = setup(cx, TestModel::Sonnet4).await;
+ let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await;
+ let fake_model = model.as_fake();
let events = thread
.update(cx, |thread, cx| {
thread.send(UserMessageId::new(), ["Testing: Reply with 'Hello'"], cx)
})
- .unwrap()
- .collect()
- .await;
+ .unwrap();
+ cx.run_until_parked();
+ fake_model.send_last_completion_stream_text_chunk("Hello");
+ fake_model
+ .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::EndTurn));
+ fake_model.end_last_completion_stream();
+
+ let events = events.collect().await;
thread.update(cx, |thread, _cx| {
assert_eq!(
thread.last_message().unwrap().to_markdown(),
@@ -57,9 +72,9 @@ async fn test_echo(cx: &mut TestAppContext) {
}
#[gpui::test]
-#[ignore = "can't run on CI yet"]
async fn test_thinking(cx: &mut TestAppContext) {
- let ThreadTest { thread, .. } = setup(cx, TestModel::Sonnet4Thinking).await;
+ let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await;
+ let fake_model = model.as_fake();
let events = thread
.update(cx, |thread, cx| {
@@ -74,9 +89,18 @@ async fn test_thinking(cx: &mut TestAppContext) {
cx,
)
})
- .unwrap()
- .collect()
- .await;
+ .unwrap();
+ cx.run_until_parked();
+ fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::Thinking {
+ text: "Think".to_string(),
+ signature: None,
+ });
+ fake_model.send_last_completion_stream_text_chunk("Hello");
+ fake_model
+ .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::EndTurn));
+ fake_model.end_last_completion_stream();
+
+ let events = events.collect().await;
thread.update(cx, |thread, _cx| {
assert_eq!(
thread.last_message().unwrap().to_markdown(),
@@ -210,7 +234,7 @@ async fn test_prompt_caching(cx: &mut TestAppContext) {
let tool_use = LanguageModelToolUse {
id: "tool_1".into(),
- name: EchoTool.name().into(),
+ name: EchoTool::name().into(),
raw_input: json!({"text": "test"}).to_string(),
input: json!({"text": "test"}),
is_input_complete: true,
@@ -223,7 +247,7 @@ async fn test_prompt_caching(cx: &mut TestAppContext) {
let completion = fake_model.pending_completions().pop().unwrap();
let tool_result = LanguageModelToolResult {
tool_use_id: "tool_1".into(),
- tool_name: EchoTool.name().into(),
+ tool_name: EchoTool::name().into(),
is_error: false,
content: "test".into(),
output: Some("test".into()),
@@ -271,7 +295,7 @@ async fn test_prompt_caching(cx: &mut TestAppContext) {
}
#[gpui::test]
-#[ignore = "can't run on CI yet"]
+#[cfg_attr(not(feature = "e2e"), ignore)]
async fn test_basic_tool_calls(cx: &mut TestAppContext) {
let ThreadTest { thread, .. } = setup(cx, TestModel::Sonnet4).await;
@@ -293,7 +317,7 @@ async fn test_basic_tool_calls(cx: &mut TestAppContext) {
// Test a tool calls that's likely to complete *after* streaming stops.
let events = thread
.update(cx, |thread, cx| {
- thread.remove_tool(&AgentTool::name(&EchoTool));
+ thread.remove_tool(&EchoTool::name());
thread.add_tool(DelayTool);
thread.send(
UserMessageId::new(),
@@ -331,7 +355,7 @@ async fn test_basic_tool_calls(cx: &mut TestAppContext) {
}
#[gpui::test]
-#[ignore = "can't run on CI yet"]
+#[cfg_attr(not(feature = "e2e"), ignore)]
async fn test_streaming_tool_calls(cx: &mut TestAppContext) {
let ThreadTest { thread, .. } = setup(cx, TestModel::Sonnet4).await;
@@ -397,7 +421,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
LanguageModelToolUse {
id: "tool_id_1".into(),
- name: ToolRequiringPermission.name().into(),
+ name: ToolRequiringPermission::name().into(),
raw_input: "{}".into(),
input: json!({}),
is_input_complete: true,
@@ -406,7 +430,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
LanguageModelToolUse {
id: "tool_id_2".into(),
- name: ToolRequiringPermission.name().into(),
+ name: ToolRequiringPermission::name().into(),
raw_input: "{}".into(),
input: json!({}),
is_input_complete: true,
@@ -437,17 +461,17 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
vec![
language_model::MessageContent::ToolResult(LanguageModelToolResult {
tool_use_id: tool_call_auth_1.tool_call.id.0.to_string().into(),
- tool_name: ToolRequiringPermission.name().into(),
+ tool_name: ToolRequiringPermission::name().into(),
is_error: false,
content: "Allowed".into(),
output: Some("Allowed".into())
}),
language_model::MessageContent::ToolResult(LanguageModelToolResult {
tool_use_id: tool_call_auth_2.tool_call.id.0.to_string().into(),
- tool_name: ToolRequiringPermission.name().into(),
+ tool_name: ToolRequiringPermission::name().into(),
is_error: true,
content: "Permission to run tool denied by user".into(),
- output: None
+ output: Some("Permission to run tool denied by user".into())
})
]
);
@@ -456,7 +480,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
LanguageModelToolUse {
id: "tool_id_3".into(),
- name: ToolRequiringPermission.name().into(),
+ name: ToolRequiringPermission::name().into(),
raw_input: "{}".into(),
input: json!({}),
is_input_complete: true,
@@ -478,7 +502,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
vec![language_model::MessageContent::ToolResult(
LanguageModelToolResult {
tool_use_id: tool_call_auth_3.tool_call.id.0.to_string().into(),
- tool_name: ToolRequiringPermission.name().into(),
+ tool_name: ToolRequiringPermission::name().into(),
is_error: false,
content: "Allowed".into(),
output: Some("Allowed".into())
@@ -490,7 +514,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
LanguageModelToolUse {
id: "tool_id_4".into(),
- name: ToolRequiringPermission.name().into(),
+ name: ToolRequiringPermission::name().into(),
raw_input: "{}".into(),
input: json!({}),
is_input_complete: true,
@@ -505,7 +529,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
vec![language_model::MessageContent::ToolResult(
LanguageModelToolResult {
tool_use_id: "tool_id_4".into(),
- tool_name: ToolRequiringPermission.name().into(),
+ tool_name: ToolRequiringPermission::name().into(),
is_error: false,
content: "Allowed".into(),
output: Some("Allowed".into())
@@ -557,7 +581,7 @@ async fn test_resume_after_tool_use_limit(cx: &mut TestAppContext) {
cx.run_until_parked();
let tool_use = LanguageModelToolUse {
id: "tool_id_1".into(),
- name: EchoTool.name().into(),
+ name: EchoTool::name().into(),
raw_input: "{}".into(),
input: serde_json::to_value(&EchoToolInput { text: "def".into() }).unwrap(),
is_input_complete: true,
@@ -570,7 +594,7 @@ async fn test_resume_after_tool_use_limit(cx: &mut TestAppContext) {
let completion = fake_model.pending_completions().pop().unwrap();
let tool_result = LanguageModelToolResult {
tool_use_id: "tool_id_1".into(),
- tool_name: EchoTool.name().into(),
+ tool_name: EchoTool::name().into(),
is_error: false,
content: "def".into(),
output: Some("def".into()),
@@ -650,15 +674,6 @@ async fn test_resume_after_tool_use_limit(cx: &mut TestAppContext) {
"}
)
});
-
- // Ensure we error if calling resume when tool use limit was *not* reached.
- let error = thread
- .update(cx, |thread, cx| thread.resume(cx))
- .unwrap_err();
- assert_eq!(
- error.to_string(),
- "can only resume after tool use limit is reached"
- )
}
#[gpui::test]
@@ -676,14 +691,14 @@ async fn test_send_after_tool_use_limit(cx: &mut TestAppContext) {
let tool_use = LanguageModelToolUse {
id: "tool_id_1".into(),
- name: EchoTool.name().into(),
+ name: EchoTool::name().into(),
raw_input: "{}".into(),
input: serde_json::to_value(&EchoToolInput { text: "def".into() }).unwrap(),
is_input_complete: true,
};
let tool_result = LanguageModelToolResult {
tool_use_id: "tool_id_1".into(),
- tool_name: EchoTool.name().into(),
+ tool_name: EchoTool::name().into(),
is_error: false,
content: "def".into(),
output: Some("def".into()),
@@ -794,7 +809,7 @@ async fn next_tool_call_authorization(
}
#[gpui::test]
-#[ignore = "can't run on CI yet"]
+#[cfg_attr(not(feature = "e2e"), ignore)]
async fn test_concurrent_tool_calls(cx: &mut TestAppContext) {
let ThreadTest { thread, .. } = setup(cx, TestModel::Sonnet4).await;
@@ -860,14 +875,14 @@ async fn test_profiles(cx: &mut TestAppContext) {
"test-1": {
"name": "Test Profile 1",
"tools": {
- EchoTool.name(): true,
- DelayTool.name(): true,
+ EchoTool::name(): true,
+ DelayTool::name(): true,
}
},
"test-2": {
"name": "Test Profile 2",
"tools": {
- InfiniteTool.name(): true,
+ InfiniteTool::name(): true,
}
}
}
@@ -896,7 +911,7 @@ async fn test_profiles(cx: &mut TestAppContext) {
.iter()
.map(|tool| tool.name.clone())
.collect();
- assert_eq!(tool_names, vec![DelayTool.name(), EchoTool.name()]);
+ assert_eq!(tool_names, vec![DelayTool::name(), EchoTool::name()]);
fake_model.end_last_completion_stream();
// Switch to test-2 profile, and verify that it has only the infinite tool.
@@ -915,11 +930,340 @@ async fn test_profiles(cx: &mut TestAppContext) {
.iter()
.map(|tool| tool.name.clone())
.collect();
- assert_eq!(tool_names, vec![InfiniteTool.name()]);
+ assert_eq!(tool_names, vec![InfiniteTool::name()]);
}
#[gpui::test]
-#[ignore = "can't run on CI yet"]
+async fn test_mcp_tools(cx: &mut TestAppContext) {
+ let ThreadTest {
+ model,
+ thread,
+ context_server_store,
+ fs,
+ ..
+ } = setup(cx, TestModel::Fake).await;
+ let fake_model = model.as_fake();
+
+ // Override profiles and wait for settings to be loaded.
+ fs.insert_file(
+ paths::settings_file(),
+ json!({
+ "agent": {
+ "always_allow_tool_actions": true,
+ "profiles": {
+ "test": {
+ "name": "Test Profile",
+ "enable_all_context_servers": true,
+ "tools": {
+ EchoTool::name(): true,
+ }
+ },
+ }
+ }
+ })
+ .to_string()
+ .into_bytes(),
+ )
+ .await;
+ cx.run_until_parked();
+ thread.update(cx, |thread, _| {
+ thread.set_profile(AgentProfileId("test".into()))
+ });
+
+ let mut mcp_tool_calls = setup_context_server(
+ "test_server",
+ vec![context_server::types::Tool {
+ name: "echo".into(),
+ description: None,
+ input_schema: serde_json::to_value(
+ EchoTool.input_schema(LanguageModelToolSchemaFormat::JsonSchema),
+ )
+ .unwrap(),
+ output_schema: None,
+ annotations: None,
+ }],
+ &context_server_store,
+ cx,
+ );
+
+ let events = thread.update(cx, |thread, cx| {
+ thread.send(UserMessageId::new(), ["Hey"], cx).unwrap()
+ });
+ cx.run_until_parked();
+
+ // Simulate the model calling the MCP tool.
+ let completion = fake_model.pending_completions().pop().unwrap();
+ assert_eq!(tool_names_for_completion(&completion), vec!["echo"]);
+ fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
+ LanguageModelToolUse {
+ id: "tool_1".into(),
+ name: "echo".into(),
+ raw_input: json!({"text": "test"}).to_string(),
+ input: json!({"text": "test"}),
+ is_input_complete: true,
+ },
+ ));
+ fake_model.end_last_completion_stream();
+ cx.run_until_parked();
+
+ let (tool_call_params, tool_call_response) = mcp_tool_calls.next().await.unwrap();
+ assert_eq!(tool_call_params.name, "echo");
+ assert_eq!(tool_call_params.arguments, Some(json!({"text": "test"})));
+ tool_call_response
+ .send(context_server::types::CallToolResponse {
+ content: vec![context_server::types::ToolResponseContent::Text {
+ text: "test".into(),
+ }],
+ is_error: None,
+ meta: None,
+ structured_content: None,
+ })
+ .unwrap();
+ cx.run_until_parked();
+
+ assert_eq!(tool_names_for_completion(&completion), vec!["echo"]);
+ fake_model.send_last_completion_stream_text_chunk("Done!");
+ fake_model.end_last_completion_stream();
+ events.collect::<Vec<_>>().await;
+
+ // Send again after adding the echo tool, ensuring the name collision is resolved.
+ let events = thread.update(cx, |thread, cx| {
+ thread.add_tool(EchoTool);
+ thread.send(UserMessageId::new(), ["Go"], cx).unwrap()
+ });
+ cx.run_until_parked();
+ let completion = fake_model.pending_completions().pop().unwrap();
+ assert_eq!(
+ tool_names_for_completion(&completion),
+ vec!["echo", "test_server_echo"]
+ );
+ fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
+ LanguageModelToolUse {
+ id: "tool_2".into(),
+ name: "test_server_echo".into(),
+ raw_input: json!({"text": "mcp"}).to_string(),
+ input: json!({"text": "mcp"}),
+ is_input_complete: true,
+ },
+ ));
+ fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
+ LanguageModelToolUse {
+ id: "tool_3".into(),
+ name: "echo".into(),
+ raw_input: json!({"text": "native"}).to_string(),
+ input: json!({"text": "native"}),
+ is_input_complete: true,
+ },
+ ));
+ fake_model.end_last_completion_stream();
+ cx.run_until_parked();
+
+ let (tool_call_params, tool_call_response) = mcp_tool_calls.next().await.unwrap();
+ assert_eq!(tool_call_params.name, "echo");
+ assert_eq!(tool_call_params.arguments, Some(json!({"text": "mcp"})));
+ tool_call_response
+ .send(context_server::types::CallToolResponse {
+ content: vec![context_server::types::ToolResponseContent::Text { text: "mcp".into() }],
+ is_error: None,
+ meta: None,
+ structured_content: None,
+ })
+ .unwrap();
+ cx.run_until_parked();
+
+ // Ensure the tool results were inserted with the correct names.
+ let completion = fake_model.pending_completions().pop().unwrap();
+ assert_eq!(
+ completion.messages.last().unwrap().content,
+ vec![
+ MessageContent::ToolResult(LanguageModelToolResult {
+ tool_use_id: "tool_3".into(),
+ tool_name: "echo".into(),
+ is_error: false,
+ content: "native".into(),
+ output: Some("native".into()),
+ },),
+ MessageContent::ToolResult(LanguageModelToolResult {
+ tool_use_id: "tool_2".into(),
+ tool_name: "test_server_echo".into(),
+ is_error: false,
+ content: "mcp".into(),
+ output: Some("mcp".into()),
+ },),
+ ]
+ );
+ fake_model.end_last_completion_stream();
+ events.collect::<Vec<_>>().await;
+}
+
+#[gpui::test]
+async fn test_mcp_tool_truncation(cx: &mut TestAppContext) {
+ let ThreadTest {
+ model,
+ thread,
+ context_server_store,
+ fs,
+ ..
+ } = setup(cx, TestModel::Fake).await;
+ let fake_model = model.as_fake();
+
+ // Set up a profile with all tools enabled
+ fs.insert_file(
+ paths::settings_file(),
+ json!({
+ "agent": {
+ "profiles": {
+ "test": {
+ "name": "Test Profile",
+ "enable_all_context_servers": true,
+ "tools": {
+ EchoTool::name(): true,
+ DelayTool::name(): true,
+ WordListTool::name(): true,
+ ToolRequiringPermission::name(): true,
+ InfiniteTool::name(): true,
+ }
+ },
+ }
+ }
+ })
+ .to_string()
+ .into_bytes(),
+ )
+ .await;
+ cx.run_until_parked();
+
+ thread.update(cx, |thread, _| {
+ thread.set_profile(AgentProfileId("test".into()));
+ thread.add_tool(EchoTool);
+ thread.add_tool(DelayTool);
+ thread.add_tool(WordListTool);
+ thread.add_tool(ToolRequiringPermission);
+ thread.add_tool(InfiniteTool);
+ });
+
+ // Set up multiple context servers with some overlapping tool names
+ let _server1_calls = setup_context_server(
+ "xxx",
+ vec![
+ context_server::types::Tool {
+ name: "echo".into(), // Conflicts with native EchoTool
+ description: None,
+ input_schema: serde_json::to_value(
+ EchoTool.input_schema(LanguageModelToolSchemaFormat::JsonSchema),
+ )
+ .unwrap(),
+ output_schema: None,
+ annotations: None,
+ },
+ context_server::types::Tool {
+ name: "unique_tool_1".into(),
+ description: None,
+ input_schema: json!({"type": "object", "properties": {}}),
+ output_schema: None,
+ annotations: None,
+ },
+ ],
+ &context_server_store,
+ cx,
+ );
+
+ let _server2_calls = setup_context_server(
+ "yyy",
+ vec![
+ context_server::types::Tool {
+ name: "echo".into(), // Also conflicts with native EchoTool
+ description: None,
+ input_schema: serde_json::to_value(
+ EchoTool.input_schema(LanguageModelToolSchemaFormat::JsonSchema),
+ )
+ .unwrap(),
+ output_schema: None,
+ annotations: None,
+ },
+ context_server::types::Tool {
+ name: "unique_tool_2".into(),
+ description: None,
+ input_schema: json!({"type": "object", "properties": {}}),
+ output_schema: None,
+ annotations: None,
+ },
+ context_server::types::Tool {
+ name: "a".repeat(MAX_TOOL_NAME_LENGTH - 2),
+ description: None,
+ input_schema: json!({"type": "object", "properties": {}}),
+ output_schema: None,
+ annotations: None,
+ },
+ context_server::types::Tool {
+ name: "b".repeat(MAX_TOOL_NAME_LENGTH - 1),
+ description: None,
+ input_schema: json!({"type": "object", "properties": {}}),
+ output_schema: None,
+ annotations: None,
+ },
+ ],
+ &context_server_store,
+ cx,
+ );
+ let _server3_calls = setup_context_server(
+ "zzz",
+ vec![
+ context_server::types::Tool {
+ name: "a".repeat(MAX_TOOL_NAME_LENGTH - 2),
+ description: None,
+ input_schema: json!({"type": "object", "properties": {}}),
+ output_schema: None,
+ annotations: None,
+ },
+ context_server::types::Tool {
+ name: "b".repeat(MAX_TOOL_NAME_LENGTH - 1),
+ description: None,
+ input_schema: json!({"type": "object", "properties": {}}),
+ output_schema: None,
+ annotations: None,
+ },
+ context_server::types::Tool {
+ name: "c".repeat(MAX_TOOL_NAME_LENGTH + 1),
+ description: None,
+ input_schema: json!({"type": "object", "properties": {}}),
+ output_schema: None,
+ annotations: None,
+ },
+ ],
+ &context_server_store,
+ cx,
+ );
+
+ thread
+ .update(cx, |thread, cx| {
+ thread.send(UserMessageId::new(), ["Go"], cx)
+ })
+ .unwrap();
+ cx.run_until_parked();
+ let completion = fake_model.pending_completions().pop().unwrap();
+ assert_eq!(
+ tool_names_for_completion(&completion),
+ vec![
+ "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",
+ "cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc",
+ "delay",
+ "echo",
+ "infinite",
+ "tool_requiring_permission",
+ "unique_tool_1",
+ "unique_tool_2",
+ "word_list",
+ "xxx_echo",
+ "y_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
+ "yyy_echo",
+ "z_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
+ ]
+ );
+}
+
+#[gpui::test]
+#[cfg_attr(not(feature = "e2e"), ignore)]
async fn test_cancellation(cx: &mut TestAppContext) {
let ThreadTest { thread, .. } = setup(cx, TestModel::Sonnet4).await;
@@ -975,7 +1319,7 @@ async fn test_cancellation(cx: &mut TestAppContext) {
assert!(
matches!(
last_event,
- Some(Ok(ThreadEvent::Stop(acp::StopReason::Canceled)))
+ Some(Ok(ThreadEvent::Stop(acp::StopReason::Cancelled)))
),
"unexpected event {last_event:?}"
);
@@ -1029,7 +1373,7 @@ async fn test_in_progress_send_canceled_by_next_send(cx: &mut TestAppContext) {
fake_model.end_last_completion_stream();
let events_1 = events_1.collect::<Vec<_>>().await;
- assert_eq!(stop_events(events_1), vec![acp::StopReason::Canceled]);
+ assert_eq!(stop_events(events_1), vec![acp::StopReason::Cancelled]);
let events_2 = events_2.collect::<Vec<_>>().await;
assert_eq!(stop_events(events_2), vec![acp::StopReason::EndTurn]);
}
@@ -1117,7 +1461,7 @@ async fn test_refusal(cx: &mut TestAppContext) {
}
#[gpui::test]
-async fn test_truncate(cx: &mut TestAppContext) {
+async fn test_truncate_first_message(cx: &mut TestAppContext) {
let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await;
let fake_model = model.as_fake();
@@ -1137,9 +1481,18 @@ async fn test_truncate(cx: &mut TestAppContext) {
Hello
"}
);
+ assert_eq!(thread.latest_token_usage(), None);
});
fake_model.send_last_completion_stream_text_chunk("Hey!");
+ fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::UsageUpdate(
+ language_model::TokenUsage {
+ input_tokens: 32_000,
+ output_tokens: 16_000,
+ cache_creation_input_tokens: 0,
+ cache_read_input_tokens: 0,
+ },
+ ));
cx.run_until_parked();
thread.read_with(cx, |thread, _| {
assert_eq!(
@@ -1154,6 +1507,13 @@ async fn test_truncate(cx: &mut TestAppContext) {
Hey!
"}
);
+ assert_eq!(
+ thread.latest_token_usage(),
+ Some(acp_thread::TokenUsage {
+ used_tokens: 32_000 + 16_000,
+ max_tokens: 1_000_000,
+ })
+ );
});
thread
@@ -1162,6 +1522,7 @@ async fn test_truncate(cx: &mut TestAppContext) {
cx.run_until_parked();
thread.read_with(cx, |thread, _| {
assert_eq!(thread.to_markdown(), "");
+ assert_eq!(thread.latest_token_usage(), None);
});
// Ensure we can still send a new message after truncation.
@@ -1182,6 +1543,14 @@ async fn test_truncate(cx: &mut TestAppContext) {
});
cx.run_until_parked();
fake_model.send_last_completion_stream_text_chunk("Ahoy!");
+ fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::UsageUpdate(
+ language_model::TokenUsage {
+ input_tokens: 40_000,
+ output_tokens: 20_000,
+ cache_creation_input_tokens: 0,
+ cache_read_input_tokens: 0,
+ },
+ ));
cx.run_until_parked();
thread.read_with(cx, |thread, _| {
assert_eq!(
@@ -1196,7 +1565,124 @@ async fn test_truncate(cx: &mut TestAppContext) {
Ahoy!
"}
);
+
+ assert_eq!(
+ thread.latest_token_usage(),
+ Some(acp_thread::TokenUsage {
+ used_tokens: 40_000 + 20_000,
+ max_tokens: 1_000_000,
+ })
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_truncate_second_message(cx: &mut TestAppContext) {
+ let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await;
+ let fake_model = model.as_fake();
+
+ thread
+ .update(cx, |thread, cx| {
+ thread.send(UserMessageId::new(), ["Message 1"], cx)
+ })
+ .unwrap();
+ cx.run_until_parked();
+ fake_model.send_last_completion_stream_text_chunk("Message 1 response");
+ fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::UsageUpdate(
+ language_model::TokenUsage {
+ input_tokens: 32_000,
+ output_tokens: 16_000,
+ cache_creation_input_tokens: 0,
+ cache_read_input_tokens: 0,
+ },
+ ));
+ fake_model.end_last_completion_stream();
+ cx.run_until_parked();
+
+ let assert_first_message_state = |cx: &mut TestAppContext| {
+ thread.clone().read_with(cx, |thread, _| {
+ assert_eq!(
+ thread.to_markdown(),
+ indoc! {"
+ ## User
+
+ Message 1
+
+ ## Assistant
+
+ Message 1 response
+ "}
+ );
+
+ assert_eq!(
+ thread.latest_token_usage(),
+ Some(acp_thread::TokenUsage {
+ used_tokens: 32_000 + 16_000,
+ max_tokens: 1_000_000,
+ })
+ );
+ });
+ };
+
+ assert_first_message_state(cx);
+
+ let second_message_id = UserMessageId::new();
+ thread
+ .update(cx, |thread, cx| {
+ thread.send(second_message_id.clone(), ["Message 2"], cx)
+ })
+ .unwrap();
+ cx.run_until_parked();
+
+ fake_model.send_last_completion_stream_text_chunk("Message 2 response");
+ fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::UsageUpdate(
+ language_model::TokenUsage {
+ input_tokens: 40_000,
+ output_tokens: 20_000,
+ cache_creation_input_tokens: 0,
+ cache_read_input_tokens: 0,
+ },
+ ));
+ fake_model.end_last_completion_stream();
+ cx.run_until_parked();
+
+ thread.read_with(cx, |thread, _| {
+ assert_eq!(
+ thread.to_markdown(),
+ indoc! {"
+ ## User
+
+ Message 1
+
+ ## Assistant
+
+ Message 1 response
+
+ ## User
+
+ Message 2
+
+ ## Assistant
+
+ Message 2 response
+ "}
+ );
+
+ assert_eq!(
+ thread.latest_token_usage(),
+ Some(acp_thread::TokenUsage {
+ used_tokens: 40_000 + 20_000,
+ max_tokens: 1_000_000,
+ })
+ );
});
+
+ thread
+ .update(cx, |thread, cx| thread.truncate(second_message_id, cx))
+ .unwrap();
+ cx.run_until_parked();
+
+ assert_first_message_state(cx);
}
#[gpui::test]
@@ -1227,6 +1713,7 @@ async fn test_title_generation(cx: &mut TestAppContext) {
summary_model.send_last_completion_stream_text_chunk("oodnight Moon");
summary_model.end_last_completion_stream();
send.collect::<Vec<_>>().await;
+ cx.run_until_parked();
thread.read_with(cx, |thread, _| assert_eq!(thread.title(), "Hello world"));
// Send another message, ensuring no title is generated this time.
@@ -1244,6 +1731,81 @@ async fn test_title_generation(cx: &mut TestAppContext) {
thread.read_with(cx, |thread, _| assert_eq!(thread.title(), "Hello world"));
}
+#[gpui::test]
+async fn test_building_request_with_pending_tools(cx: &mut TestAppContext) {
+ let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await;
+ let fake_model = model.as_fake();
+
+ let _events = thread
+ .update(cx, |thread, cx| {
+ thread.add_tool(ToolRequiringPermission);
+ thread.add_tool(EchoTool);
+ thread.send(UserMessageId::new(), ["Hey!"], cx)
+ })
+ .unwrap();
+ cx.run_until_parked();
+
+ let permission_tool_use = LanguageModelToolUse {
+ id: "tool_id_1".into(),
+ name: ToolRequiringPermission::name().into(),
+ raw_input: "{}".into(),
+ input: json!({}),
+ is_input_complete: true,
+ };
+ let echo_tool_use = LanguageModelToolUse {
+ id: "tool_id_2".into(),
+ name: EchoTool::name().into(),
+ raw_input: json!({"text": "test"}).to_string(),
+ input: json!({"text": "test"}),
+ is_input_complete: true,
+ };
+ fake_model.send_last_completion_stream_text_chunk("Hi!");
+ fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
+ permission_tool_use,
+ ));
+ fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
+ echo_tool_use.clone(),
+ ));
+ fake_model.end_last_completion_stream();
+ cx.run_until_parked();
+
+ // Ensure pending tools are skipped when building a request.
+ let request = thread
+ .read_with(cx, |thread, cx| {
+ thread.build_completion_request(CompletionIntent::EditFile, cx)
+ })
+ .unwrap();
+ assert_eq!(
+ request.messages[1..],
+ vec![
+ LanguageModelRequestMessage {
+ role: Role::User,
+ content: vec!["Hey!".into()],
+ cache: true
+ },
+ LanguageModelRequestMessage {
+ role: Role::Assistant,
+ content: vec![
+ MessageContent::Text("Hi!".into()),
+ MessageContent::ToolUse(echo_tool_use.clone())
+ ],
+ cache: false
+ },
+ LanguageModelRequestMessage {
+ role: Role::User,
+ content: vec![MessageContent::ToolResult(LanguageModelToolResult {
+ tool_use_id: echo_tool_use.id.clone(),
+ tool_name: echo_tool_use.name,
+ is_error: false,
+ content: "test".into(),
+ output: Some("test".into())
+ })],
+ cache: false
+ },
+ ],
+ );
+}
+
#[gpui::test]
async fn test_agent_connection(cx: &mut TestAppContext) {
cx.update(settings::init);
@@ -1259,7 +1821,7 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
let client = Client::new(clock, http_client, cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
language_model::init(client.clone(), cx);
- language_models::init(user_store.clone(), client.clone(), cx);
+ language_models::init(user_store, client.clone(), cx);
Project::init_settings(cx);
LanguageModelRegistry::test(cx);
agent_settings::init(cx);
@@ -1272,7 +1834,7 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
let project = Project::test(fake_fs.clone(), [Path::new("/test")], cx).await;
let cwd = Path::new("/test");
let context_store = cx.new(|cx| assistant_context::ContextStore::fake(project.clone(), cx));
- let history_store = cx.new(|cx| HistoryStore::new(context_store, [], cx));
+ let history_store = cx.new(|cx| HistoryStore::new(context_store, cx));
// Create agent and connection
let agent = NativeAgent::new(
@@ -1395,7 +1957,7 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) {
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
LanguageModelToolUse {
id: "1".into(),
- name: ThinkingTool.name().into(),
+ name: ThinkingTool::name().into(),
raw_input: input.to_string(),
input,
is_input_complete: false,
@@ -1536,6 +2098,7 @@ async fn test_send_retry_on_error(cx: &mut TestAppContext) {
.unwrap();
cx.run_until_parked();
+ fake_model.send_last_completion_stream_text_chunk("Hey,");
fake_model.send_last_completion_stream_error(LanguageModelCompletionError::ServerOverloaded {
provider: LanguageModelProviderName::new("Anthropic"),
retry_after: Some(Duration::from_secs(3)),
@@ -1545,8 +2108,9 @@ async fn test_send_retry_on_error(cx: &mut TestAppContext) {
cx.executor().advance_clock(Duration::from_secs(3));
cx.run_until_parked();
- fake_model.send_last_completion_stream_text_chunk("Hey!");
+ fake_model.send_last_completion_stream_text_chunk("there!");
fake_model.end_last_completion_stream();
+ cx.run_until_parked();
let mut retry_events = Vec::new();
while let Some(Ok(event)) = events.next().await {
@@ -16,15 +16,19 @@ impl AgentTool for EchoTool {
type Input = EchoToolInput;
type Output = String;
- fn name(&self) -> SharedString {
- "echo".into()
+ fn name() -> &'static str {
+ "echo"
}
- fn kind(&self) -> acp::ToolKind {
+ fn kind() -> acp::ToolKind {
acp::ToolKind::Other
}
- fn initial_title(&self, _input: Result<Self::Input, serde_json::Value>) -> SharedString {
+ fn initial_title(
+ &self,
+ _input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> SharedString {
"Echo".into()
}
@@ -51,11 +55,15 @@ impl AgentTool for DelayTool {
type Input = DelayToolInput;
type Output = String;
- fn name(&self) -> SharedString {
- "delay".into()
+ fn name() -> &'static str {
+ "delay"
}
- fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
+ fn initial_title(
+ &self,
+ input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> SharedString {
if let Ok(input) = input {
format!("Delay {}ms", input.ms).into()
} else {
@@ -63,7 +71,7 @@ impl AgentTool for DelayTool {
}
}
- fn kind(&self) -> acp::ToolKind {
+ fn kind() -> acp::ToolKind {
acp::ToolKind::Other
}
@@ -92,15 +100,19 @@ impl AgentTool for ToolRequiringPermission {
type Input = ToolRequiringPermissionInput;
type Output = String;
- fn name(&self) -> SharedString {
- "tool_requiring_permission".into()
+ fn name() -> &'static str {
+ "tool_requiring_permission"
}
- fn kind(&self) -> acp::ToolKind {
+ fn kind() -> acp::ToolKind {
acp::ToolKind::Other
}
- fn initial_title(&self, _input: Result<Self::Input, serde_json::Value>) -> SharedString {
+ fn initial_title(
+ &self,
+ _input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> SharedString {
"This tool requires permission".into()
}
@@ -127,15 +139,19 @@ impl AgentTool for InfiniteTool {
type Input = InfiniteToolInput;
type Output = String;
- fn name(&self) -> SharedString {
- "infinite".into()
+ fn name() -> &'static str {
+ "infinite"
}
- fn kind(&self) -> acp::ToolKind {
+ fn kind() -> acp::ToolKind {
acp::ToolKind::Other
}
- fn initial_title(&self, _input: Result<Self::Input, serde_json::Value>) -> SharedString {
+ fn initial_title(
+ &self,
+ _input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> SharedString {
"Infinite Tool".into()
}
@@ -178,15 +194,19 @@ impl AgentTool for WordListTool {
type Input = WordListInput;
type Output = String;
- fn name(&self) -> SharedString {
- "word_list".into()
+ fn name() -> &'static str {
+ "word_list"
}
- fn kind(&self) -> acp::ToolKind {
+ fn kind() -> acp::ToolKind {
acp::ToolKind::Other
}
- fn initial_title(&self, _input: Result<Self::Input, serde_json::Value>) -> SharedString {
+ fn initial_title(
+ &self,
+ _input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> SharedString {
"List of random words".into()
}
@@ -6,14 +6,18 @@ use crate::{
};
use acp_thread::{MentionUri, UserMessageId};
use action_log::ActionLog;
-use agent::thread::{DetailedSummaryState, GitState, ProjectSnapshot, WorktreeSnapshot};
+use agent::thread::{GitState, ProjectSnapshot, WorktreeSnapshot};
use agent_client_protocol as acp;
-use agent_settings::{AgentProfileId, AgentSettings, CompletionMode, SUMMARIZE_THREAD_PROMPT};
+use agent_settings::{
+ AgentProfileId, AgentProfileSettings, AgentSettings, CompletionMode,
+ SUMMARIZE_THREAD_DETAILED_PROMPT, SUMMARIZE_THREAD_PROMPT,
+};
use anyhow::{Context as _, Result, anyhow};
use assistant_tool::adapt_schema_to_format;
use chrono::{DateTime, Utc};
-use cloud_llm_client::{CompletionIntent, CompletionRequestStatus};
-use collections::IndexMap;
+use client::{ModelRequestUsage, RequestUsage};
+use cloud_llm_client::{CompletionIntent, CompletionRequestStatus, UsageLimit};
+use collections::{HashMap, HashSet, IndexMap};
use fs::Fs;
use futures::{
FutureExt,
@@ -22,10 +26,12 @@ use futures::{
stream::FuturesUnordered,
};
use git::repository::DiffType;
-use gpui::{App, AppContext, AsyncApp, Context, Entity, SharedString, Task, WeakEntity};
+use gpui::{
+ App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task, WeakEntity,
+};
use language_model::{
- LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelImage,
- LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest,
+ LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelExt,
+ LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest,
LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse,
LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage,
@@ -41,15 +47,18 @@ use settings::{Settings, update_settings_file};
use smol::stream::StreamExt;
use std::{
collections::BTreeMap,
+ ops::RangeInclusive,
path::Path,
+ rc::Rc,
sync::Arc,
time::{Duration, Instant},
};
-use std::{fmt::Write, ops::Range};
-use util::{ResultExt, markdown::MarkdownCodeBlock};
+use std::{fmt::Write, path::PathBuf};
+use util::{ResultExt, debug_panic, markdown::MarkdownCodeBlock};
use uuid::Uuid;
const TOOL_CANCELED_MESSAGE: &str = "Tool canceled by user";
+pub const MAX_TOOL_NAME_LENGTH: usize = 64;
/// The ID of the user prompt that initiated a request.
///
@@ -115,7 +124,7 @@ impl Message {
match self {
Message::User(message) => message.to_markdown(),
Message::Agent(message) => message.to_markdown(),
- Message::Resume => "[resumed after tool use limit was reached]".into(),
+ Message::Resume => "[resume]\n".into(),
}
}
@@ -155,9 +164,9 @@ impl UserMessage {
}
UserMessageContent::Mention { uri, content } => {
if !content.is_empty() {
- let _ = write!(&mut markdown, "{}\n\n{}\n", uri.as_link(), content);
+ let _ = writeln!(&mut markdown, "{}\n\n{}", uri.as_link(), content);
} else {
- let _ = write!(&mut markdown, "{}\n", uri.as_link());
+ let _ = writeln!(&mut markdown, "{}", uri.as_link());
}
}
}
@@ -180,6 +189,7 @@ impl UserMessage {
const OPEN_FILES_TAG: &str = "<files>";
const OPEN_DIRECTORIES_TAG: &str = "<directories>";
const OPEN_SYMBOLS_TAG: &str = "<symbols>";
+ const OPEN_SELECTIONS_TAG: &str = "<selections>";
const OPEN_THREADS_TAG: &str = "<threads>";
const OPEN_FETCH_TAG: &str = "<fetched_urls>";
const OPEN_RULES_TAG: &str =
@@ -188,6 +198,7 @@ impl UserMessage {
let mut file_context = OPEN_FILES_TAG.to_string();
let mut directory_context = OPEN_DIRECTORIES_TAG.to_string();
let mut symbol_context = OPEN_SYMBOLS_TAG.to_string();
+ let mut selection_context = OPEN_SELECTIONS_TAG.to_string();
let mut thread_context = OPEN_THREADS_TAG.to_string();
let mut fetch_context = OPEN_FETCH_TAG.to_string();
let mut rules_context = OPEN_RULES_TAG.to_string();
@@ -204,7 +215,7 @@ impl UserMessage {
match uri {
MentionUri::File { abs_path } => {
write!(
- &mut symbol_context,
+ &mut file_context,
"\n{}",
MarkdownCodeBlock {
tag: &codeblock_tag(abs_path, None),
@@ -213,17 +224,19 @@ impl UserMessage {
)
.ok();
}
+ MentionUri::PastedImage => {
+ debug_panic!("pasted image URI should not be used in mention content")
+ }
MentionUri::Directory { .. } => {
write!(&mut directory_context, "\n{}\n", content).ok();
}
MentionUri::Symbol {
- path, line_range, ..
- }
- | MentionUri::Selection {
- path, line_range, ..
+ abs_path: path,
+ line_range,
+ ..
} => {
write!(
- &mut rules_context,
+ &mut symbol_context,
"\n{}",
MarkdownCodeBlock {
tag: &codeblock_tag(path, Some(line_range)),
@@ -232,6 +245,24 @@ impl UserMessage {
)
.ok();
}
+ MentionUri::Selection {
+ abs_path: path,
+ line_range,
+ ..
+ } => {
+ write!(
+ &mut selection_context,
+ "\n{}",
+ MarkdownCodeBlock {
+ tag: &codeblock_tag(
+ path.as_deref().unwrap_or("Untitled".as_ref()),
+ Some(line_range)
+ ),
+ text: content
+ }
+ )
+ .ok();
+ }
MentionUri::Thread { .. } => {
write!(&mut thread_context, "\n{}\n", content).ok();
}
@@ -284,6 +315,13 @@ impl UserMessage {
.push(language_model::MessageContent::Text(symbol_context));
}
+ if selection_context.len() > OPEN_SELECTIONS_TAG.len() {
+ selection_context.push_str("</selections>\n");
+ message
+ .content
+ .push(language_model::MessageContent::Text(selection_context));
+ }
+
if thread_context.len() > OPEN_THREADS_TAG.len() {
thread_context.push_str("</threads>\n");
message
@@ -319,7 +357,7 @@ impl UserMessage {
}
}
-fn codeblock_tag(full_path: &Path, line_range: Option<&Range<u32>>) -> String {
+fn codeblock_tag(full_path: &Path, line_range: Option<&RangeInclusive<u32>>) -> String {
let mut result = String::new();
if let Some(extension) = full_path.extension().and_then(|ext| ext.to_str()) {
@@ -329,10 +367,10 @@ fn codeblock_tag(full_path: &Path, line_range: Option<&Range<u32>>) -> String {
let _ = write!(result, "{}", full_path.display());
if let Some(range) = line_range {
- if range.start == range.end {
- let _ = write!(result, ":{}", range.start + 1);
+ if range.start() == range.end() {
+ let _ = write!(result, ":{}", range.start() + 1);
} else {
- let _ = write!(result, ":{}-{}", range.start + 1, range.end + 1);
+ let _ = write!(result, ":{}-{}", range.start() + 1, range.end() + 1);
}
}
@@ -411,24 +449,33 @@ impl AgentMessage {
cache: false,
};
for chunk in &self.content {
- let chunk = match chunk {
+ match chunk {
AgentMessageContent::Text(text) => {
- language_model::MessageContent::Text(text.clone())
+ assistant_message
+ .content
+ .push(language_model::MessageContent::Text(text.clone()));
}
AgentMessageContent::Thinking { text, signature } => {
- language_model::MessageContent::Thinking {
- text: text.clone(),
- signature: signature.clone(),
- }
+ assistant_message
+ .content
+ .push(language_model::MessageContent::Thinking {
+ text: text.clone(),
+ signature: signature.clone(),
+ });
}
AgentMessageContent::RedactedThinking(value) => {
- language_model::MessageContent::RedactedThinking(value.clone())
+ assistant_message.content.push(
+ language_model::MessageContent::RedactedThinking(value.clone()),
+ );
}
- AgentMessageContent::ToolUse(value) => {
- language_model::MessageContent::ToolUse(value.clone())
+ AgentMessageContent::ToolUse(tool_use) => {
+ if self.tool_results.contains_key(&tool_use.id) {
+ assistant_message
+ .content
+ .push(language_model::MessageContent::ToolUse(tool_use.clone()));
+ }
}
};
- assistant_message.content.push(chunk);
}
let mut user_message = LanguageModelRequestMessage {
@@ -438,11 +485,15 @@ impl AgentMessage {
};
for tool_result in self.tool_results.values() {
+ let mut tool_result = tool_result.clone();
+ // Surprisingly, the API fails if we return an empty string here.
+ // It thinks we are sending a tool use without a tool result.
+ if tool_result.content.is_empty() {
+ tool_result.content = "<Tool returned an empty string>".into();
+ }
user_message
.content
- .push(language_model::MessageContent::ToolResult(
- tool_result.clone(),
- ));
+ .push(language_model::MessageContent::ToolResult(tool_result));
}
let mut messages = Vec::new();
@@ -473,6 +524,22 @@ pub enum AgentMessageContent {
ToolUse(LanguageModelToolUse),
}
+pub trait TerminalHandle {
+ fn id(&self, cx: &AsyncApp) -> Result<acp::TerminalId>;
+ fn current_output(&self, cx: &AsyncApp) -> Result<acp::TerminalOutputResponse>;
+ fn wait_for_exit(&self, cx: &AsyncApp) -> Result<Shared<Task<acp::TerminalExitStatus>>>;
+}
+
+pub trait ThreadEnvironment {
+ fn create_terminal(
+ &self,
+ command: String,
+ cwd: Option<PathBuf>,
+ output_byte_limit: Option<u64>,
+ cx: &mut AsyncApp,
+ ) -> Task<Result<Rc<dyn TerminalHandle>>>;
+}
+
#[derive(Debug)]
pub enum ThreadEvent {
UserMessage(UserMessage),
@@ -481,11 +548,18 @@ pub enum ThreadEvent {
ToolCall(acp::ToolCall),
ToolCallUpdate(acp_thread::ToolCallUpdate),
ToolCallAuthorization(ToolCallAuthorization),
- TitleUpdate(SharedString),
Retry(acp_thread::RetryStatus),
Stop(acp::StopReason),
}
+#[derive(Debug)]
+pub struct NewTerminal {
+ pub command: String,
+ pub output_byte_limit: Option<u64>,
+ pub cwd: Option<PathBuf>,
+ pub response: oneshot::Sender<Result<Entity<acp_thread::Terminal>>>,
+}
+
#[derive(Debug)]
pub struct ToolCallAuthorization {
pub tool_call: acp::ToolCallUpdate,
@@ -493,13 +567,23 @@ pub struct ToolCallAuthorization {
pub response: oneshot::Sender<acp::PermissionOptionId>,
}
+#[derive(Debug, thiserror::Error)]
+enum CompletionError {
+ #[error("max tokens")]
+ MaxTokens,
+ #[error("refusal")]
+ Refusal,
+ #[error(transparent)]
+ Other(#[from] anyhow::Error),
+}
+
pub struct Thread {
id: acp::SessionId,
prompt_id: PromptId,
updated_at: DateTime<Utc>,
title: Option<SharedString>,
- #[allow(unused)]
- summary: DetailedSummaryState,
+ pending_title_generation: Option<Task<()>>,
+ summary: Option<SharedString>,
messages: Vec<Message>,
completion_mode: CompletionMode,
/// Holds the task that handles agent interaction until the end of the turn.
@@ -509,8 +593,7 @@ pub struct Thread {
pending_message: Option<AgentMessage>,
tools: BTreeMap<SharedString, Arc<dyn AnyAgentTool>>,
tool_use_limit_reached: bool,
- #[allow(unused)]
- request_token_usage: Vec<TokenUsage>,
+ request_token_usage: HashMap<UserMessageId, language_model::TokenUsage>,
#[allow(unused)]
cumulative_token_usage: TokenUsage,
#[allow(unused)]
@@ -521,34 +604,48 @@ pub struct Thread {
templates: Arc<Templates>,
model: Option<Arc<dyn LanguageModel>>,
summarization_model: Option<Arc<dyn LanguageModel>>,
+ prompt_capabilities_tx: watch::Sender<acp::PromptCapabilities>,
+ pub(crate) prompt_capabilities_rx: watch::Receiver<acp::PromptCapabilities>,
pub(crate) project: Entity<Project>,
pub(crate) action_log: Entity<ActionLog>,
}
impl Thread {
+ fn prompt_capabilities(model: Option<&dyn LanguageModel>) -> acp::PromptCapabilities {
+ let image = model.map_or(true, |model| model.supports_images());
+ acp::PromptCapabilities {
+ image,
+ audio: false,
+ embedded_context: true,
+ }
+ }
+
pub fn new(
project: Entity<Project>,
project_context: Entity<ProjectContext>,
context_server_registry: Entity<ContextServerRegistry>,
- action_log: Entity<ActionLog>,
templates: Arc<Templates>,
model: Option<Arc<dyn LanguageModel>>,
cx: &mut Context<Self>,
) -> Self {
let profile_id = AgentSettings::get_global(cx).default_profile.clone();
+ let action_log = cx.new(|_cx| ActionLog::new(project.clone()));
+ let (prompt_capabilities_tx, prompt_capabilities_rx) =
+ watch::channel(Self::prompt_capabilities(model.as_deref()));
Self {
id: acp::SessionId(uuid::Uuid::new_v4().to_string().into()),
prompt_id: PromptId::new(),
updated_at: Utc::now(),
title: None,
- summary: DetailedSummaryState::default(),
+ pending_title_generation: None,
+ summary: None,
messages: Vec::new(),
completion_mode: AgentSettings::get_global(cx).preferred_completion_mode,
running_turn: None,
pending_message: None,
tools: BTreeMap::default(),
tool_use_limit_reached: false,
- request_token_usage: Vec::new(),
+ request_token_usage: HashMap::default(),
cumulative_token_usage: TokenUsage::default(),
initial_project_snapshot: {
let project_snapshot = Self::project_snapshot(project.clone(), cx);
@@ -562,6 +659,8 @@ impl Thread {
templates,
model,
summarization_model: None,
+ prompt_capabilities_tx,
+ prompt_capabilities_rx,
project,
action_log,
}
@@ -612,7 +711,20 @@ impl Thread {
stream: &ThreadEventStream,
cx: &mut Context<Self>,
) {
- let Some(tool) = self.tools.get(tool_use.name.as_ref()) else {
+ let tool = self.tools.get(tool_use.name.as_ref()).cloned().or_else(|| {
+ self.context_server_registry
+ .read(cx)
+ .servers()
+ .find_map(|(_, tools)| {
+ if let Some(tool) = tools.get(tool_use.name.as_ref()) {
+ Some(tool.clone())
+ } else {
+ None
+ }
+ })
+ });
+
+ let Some(tool) = tool else {
stream
.0
.unbounded_send(Ok(ThreadEvent::ToolCall(acp::ToolCall {
@@ -629,7 +741,7 @@ impl Thread {
return;
};
- let title = tool.initial_title(tool_use.input.clone());
+ let title = tool.initial_title(tool_use.input.clone(), cx);
let kind = tool.kind();
stream.send_tool_call(&tool_use.id, title, kind, tool_use.input.clone());
@@ -649,7 +761,17 @@ impl Thread {
stream.update_tool_call_fields(
&tool_use.id,
acp::ToolCallUpdateFields {
- status: Some(acp::ToolCallStatus::Completed),
+ status: Some(
+ tool_result
+ .as_ref()
+ .map_or(acp::ToolCallStatus::Failed, |result| {
+ if result.is_error {
+ acp::ToolCallStatus::Failed
+ } else {
+ acp::ToolCallStatus::Completed
+ }
+ }),
+ ),
raw_output: output,
..Default::default()
},
@@ -675,13 +797,15 @@ impl Thread {
.and_then(|model| {
let model = SelectedModel {
provider: model.provider.clone().into(),
- model: model.model.clone().into(),
+ model: model.model.into(),
};
registry.select_model(&model, cx)
})
.or_else(|| registry.default_model())
.map(|model| model.model)
});
+ let (prompt_capabilities_tx, prompt_capabilities_rx) =
+ watch::channel(Self::prompt_capabilities(model.as_deref()));
Self {
id,
@@ -691,7 +815,8 @@ impl Thread {
} else {
Some(db_thread.title.clone())
},
- summary: db_thread.summary,
+ pending_title_generation: None,
+ summary: db_thread.detailed_summary,
messages: db_thread.messages,
completion_mode: db_thread.completion_mode.unwrap_or_default(),
running_turn: None,
@@ -710,16 +835,18 @@ impl Thread {
project,
action_log,
updated_at: db_thread.updated_at,
+ prompt_capabilities_tx,
+ prompt_capabilities_rx,
}
}
pub fn to_db(&self, cx: &App) -> Task<DbThread> {
let initial_project_snapshot = self.initial_project_snapshot.clone();
let mut thread = DbThread {
- title: self.title.clone().unwrap_or_default(),
+ title: self.title(),
messages: self.messages.clone(),
updated_at: self.updated_at,
- summary: self.summary.clone(),
+ detailed_summary: self.summary.clone(),
initial_project_snapshot: None,
cumulative_token_usage: self.cumulative_token_usage,
request_token_usage: self.request_token_usage.clone(),
@@ -866,15 +993,30 @@ impl Thread {
&self.action_log
}
+ pub fn is_empty(&self) -> bool {
+ self.messages.is_empty() && self.title.is_none()
+ }
+
pub fn model(&self) -> Option<&Arc<dyn LanguageModel>> {
self.model.as_ref()
}
pub fn set_model(&mut self, model: Arc<dyn LanguageModel>, cx: &mut Context<Self>) {
+ let old_usage = self.latest_token_usage();
self.model = Some(model);
+ let new_caps = Self::prompt_capabilities(self.model.as_deref());
+ let new_usage = self.latest_token_usage();
+ if old_usage != new_usage {
+ cx.emit(TokenUsageUpdated(new_usage));
+ }
+ self.prompt_capabilities_tx.send(new_caps).log_err();
cx.notify()
}
+ pub fn summarization_model(&self) -> Option<&Arc<dyn LanguageModel>> {
+ self.summarization_model.as_ref()
+ }
+
pub fn set_summarization_model(
&mut self,
model: Option<Arc<dyn LanguageModel>>,
@@ -889,7 +1031,12 @@ impl Thread {
}
pub fn set_completion_mode(&mut self, mode: CompletionMode, cx: &mut Context<Self>) {
+ let old_usage = self.latest_token_usage();
self.completion_mode = mode;
+ let new_usage = self.latest_token_usage();
+ if old_usage != new_usage {
+ cx.emit(TokenUsageUpdated(new_usage));
+ }
cx.notify()
}
@@ -902,7 +1049,11 @@ impl Thread {
}
}
- pub fn add_default_tools(&mut self, cx: &mut Context<Self>) {
+ pub fn add_default_tools(
+ &mut self,
+ environment: Rc<dyn ThreadEnvironment>,
+ cx: &mut Context<Self>,
+ ) {
let language_registry = self.project.read(cx).languages().clone();
self.add_tool(CopyPathTool::new(self.project.clone()));
self.add_tool(CreateDirectoryTool::new(self.project.clone()));
@@ -911,7 +1062,11 @@ impl Thread {
self.action_log.clone(),
));
self.add_tool(DiagnosticsTool::new(self.project.clone()));
- self.add_tool(EditFileTool::new(cx.weak_entity(), language_registry));
+ self.add_tool(EditFileTool::new(
+ self.project.clone(),
+ cx.weak_entity(),
+ language_registry,
+ ));
self.add_tool(FetchTool::new(self.project.read(cx).client().http_client()));
self.add_tool(FindPathTool::new(self.project.clone()));
self.add_tool(GrepTool::new(self.project.clone()));
@@ -923,13 +1078,13 @@ impl Thread {
self.project.clone(),
self.action_log.clone(),
));
- self.add_tool(TerminalTool::new(self.project.clone(), cx));
+ self.add_tool(TerminalTool::new(self.project.clone(), environment));
self.add_tool(ThinkingTool);
- self.add_tool(WebSearchTool); // TODO: Enable this only if it's a zed model.
+ self.add_tool(WebSearchTool);
}
- pub fn add_tool(&mut self, tool: impl AgentTool) {
- self.tools.insert(tool.name(), tool.erase());
+ pub fn add_tool<T: AgentTool>(&mut self, tool: T) {
+ self.tools.insert(T::name().into(), tool.erase());
}
pub fn remove_tool(&mut self, name: &str) -> bool {
@@ -951,6 +1106,17 @@ impl Thread {
self.flush_pending_message(cx);
}
+ fn update_token_usage(&mut self, update: language_model::TokenUsage, cx: &mut Context<Self>) {
+ let Some(last_user_message) = self.last_user_message() else {
+ return;
+ };
+
+ self.request_token_usage
+ .insert(last_user_message.id.clone(), update);
+ cx.emit(TokenUsageUpdated(self.latest_token_usage()));
+ cx.notify();
+ }
+
pub fn truncate(&mut self, message_id: UserMessageId, cx: &mut Context<Self>) -> Result<()> {
self.cancel(cx);
let Some(position) = self.messages.iter().position(
@@ -958,24 +1124,39 @@ impl Thread {
) else {
return Err(anyhow!("Message not found"));
};
- self.messages.truncate(position);
+
+ for message in self.messages.drain(position..) {
+ match message {
+ Message::User(message) => {
+ self.request_token_usage.remove(&message.id);
+ }
+ Message::Agent(_) | Message::Resume => {}
+ }
+ }
+ self.summary = None;
cx.notify();
Ok(())
}
+ pub fn latest_token_usage(&self) -> Option<acp_thread::TokenUsage> {
+ let last_user_message = self.last_user_message()?;
+ let tokens = self.request_token_usage.get(&last_user_message.id)?;
+ let model = self.model.clone()?;
+
+ Some(acp_thread::TokenUsage {
+ max_tokens: model.max_token_count_for_mode(self.completion_mode.into()),
+ used_tokens: tokens.total_tokens(),
+ })
+ }
+
pub fn resume(
&mut self,
cx: &mut Context<Self>,
) -> Result<mpsc::UnboundedReceiver<Result<ThreadEvent>>> {
- anyhow::ensure!(
- self.tool_use_limit_reached,
- "can only resume after tool use limit is reached"
- );
-
self.messages.push(Message::Resume);
cx.notify();
- log::info!("Total messages in thread: {}", self.messages.len());
+ log::debug!("Total messages in thread: {}", self.messages.len());
self.run_turn(cx)
}
@@ -993,7 +1174,7 @@ impl Thread {
{
let model = self.model().context("No language model configured")?;
- log::info!("Thread::send called with model: {:?}", model.name());
+ log::info!("Thread::send called with model: {}", model.name().0);
self.advance_prompt_id();
let content = content.into_iter().map(Into::into).collect::<Vec<_>>();
@@ -1003,7 +1184,7 @@ impl Thread {
.push(Message::User(UserMessage { id, content }));
cx.notify();
- log::info!("Total messages in thread: {}", self.messages.len());
+ log::debug!("Total messages in thread: {}", self.messages.len());
self.run_turn(cx)
}
@@ -1014,108 +1195,43 @@ impl Thread {
self.cancel(cx);
let model = self.model.clone().context("No language model configured")?;
+ let profile = AgentSettings::get_global(cx)
+ .profiles
+ .get(&self.profile_id)
+ .context("Profile not found")?;
let (events_tx, events_rx) = mpsc::unbounded::<Result<ThreadEvent>>();
let event_stream = ThreadEventStream(events_tx);
let message_ix = self.messages.len().saturating_sub(1);
self.tool_use_limit_reached = false;
+ self.summary = None;
self.running_turn = Some(RunningTurn {
event_stream: event_stream.clone(),
+ tools: self.enabled_tools(profile, &model, cx),
_task: cx.spawn(async move |this, cx| {
- log::info!("Starting agent turn execution");
- let turn_result: Result<StopReason> = async {
- let mut completion_intent = CompletionIntent::UserPrompt;
- loop {
- log::debug!(
- "Building completion request with intent: {:?}",
- completion_intent
- );
- let request = this.update(cx, |this, cx| {
- this.build_completion_request(completion_intent, cx)
- })??;
-
- log::info!("Calling model.stream_completion");
-
- let mut tool_use_limit_reached = false;
- let mut refused = false;
- let mut reached_max_tokens = false;
- let mut tool_uses = Self::stream_completion_with_retries(
- this.clone(),
- model.clone(),
- request,
- &event_stream,
- &mut tool_use_limit_reached,
- &mut refused,
- &mut reached_max_tokens,
- cx,
- )
- .await?;
-
- if refused {
- return Ok(StopReason::Refusal);
- } else if reached_max_tokens {
- return Ok(StopReason::MaxTokens);
- }
+ log::debug!("Starting agent turn execution");
- let end_turn = tool_uses.is_empty();
- while let Some(tool_result) = tool_uses.next().await {
- log::info!("Tool finished {:?}", tool_result);
-
- event_stream.update_tool_call_fields(
- &tool_result.tool_use_id,
- acp::ToolCallUpdateFields {
- status: Some(if tool_result.is_error {
- acp::ToolCallStatus::Failed
- } else {
- acp::ToolCallStatus::Completed
- }),
- raw_output: tool_result.output.clone(),
- ..Default::default()
- },
- );
- this.update(cx, |this, _cx| {
- this.pending_message()
- .tool_results
- .insert(tool_result.tool_use_id.clone(), tool_result);
- })
- .ok();
- }
-
- if tool_use_limit_reached {
- log::info!("Tool use limit reached, completing turn");
- this.update(cx, |this, _cx| this.tool_use_limit_reached = true)?;
- return Err(language_model::ToolUseLimitReachedError.into());
- } else if end_turn {
- log::info!("No tool uses found, completing turn");
- return Ok(StopReason::EndTurn);
- } else {
- this.update(cx, |this, cx| this.flush_pending_message(cx))?;
- completion_intent = CompletionIntent::ToolResults;
- }
- }
- }
- .await;
+ let turn_result = Self::run_turn_internal(&this, model, &event_stream, cx).await;
_ = this.update(cx, |this, cx| this.flush_pending_message(cx));
match turn_result {
- Ok(reason) => {
- log::info!("Turn execution completed: {:?}", reason);
-
- let update_title = this
- .update(cx, |this, cx| this.update_title(&event_stream, cx))
- .ok()
- .flatten();
- if let Some(update_title) = update_title {
- update_title.await.context("update title failed").log_err();
- }
-
- event_stream.send_stop(reason);
- if reason == StopReason::Refusal {
- _ = this.update(cx, |this, _| this.messages.truncate(message_ix));
- }
+ Ok(()) => {
+ log::debug!("Turn execution completed");
+ event_stream.send_stop(acp::StopReason::EndTurn);
}
Err(error) => {
log::error!("Turn execution failed: {:?}", error);
- event_stream.send_error(error);
+ match error.downcast::<CompletionError>() {
+ Ok(CompletionError::Refusal) => {
+ event_stream.send_stop(acp::StopReason::Refusal);
+ _ = this.update(cx, |this, _| this.messages.truncate(message_ix));
+ }
+ Ok(CompletionError::MaxTokens) => {
+ event_stream.send_stop(acp::StopReason::MaxTokens);
+ }
+ Ok(CompletionError::Other(error)) | Err(error) => {
+ event_stream.send_error(error);
+ }
+ }
}
}
@@ -1125,130 +1241,154 @@ impl Thread {
Ok(events_rx)
}
- async fn stream_completion_with_retries(
- this: WeakEntity<Self>,
+ async fn run_turn_internal(
+ this: &WeakEntity<Self>,
model: Arc<dyn LanguageModel>,
- request: LanguageModelRequest,
event_stream: &ThreadEventStream,
- tool_use_limit_reached: &mut bool,
- refusal: &mut bool,
- max_tokens_reached: &mut bool,
cx: &mut AsyncApp,
- ) -> Result<FuturesUnordered<Task<LanguageModelToolResult>>> {
- log::debug!("Stream completion started successfully");
+ ) -> Result<()> {
+ let mut attempt = 0;
+ let mut intent = CompletionIntent::UserPrompt;
+ loop {
+ let request =
+ this.update(cx, |this, cx| this.build_completion_request(intent, cx))??;
+
+ telemetry::event!(
+ "Agent Thread Completion",
+ thread_id = this.read_with(cx, |this, _| this.id.to_string())?,
+ prompt_id = this.read_with(cx, |this, _| this.prompt_id.to_string())?,
+ model = model.telemetry_id(),
+ model_provider = model.provider_id().to_string(),
+ attempt
+ );
- let mut attempt = None;
- 'retry: loop {
- let mut events = model.stream_completion(request.clone(), cx).await?;
- let mut tool_uses = FuturesUnordered::new();
+ log::debug!("Calling model.stream_completion, attempt {}", attempt);
+ let mut events = model
+ .stream_completion(request, cx)
+ .await
+ .map_err(|error| anyhow!(error))?;
+ let mut tool_results = FuturesUnordered::new();
+ let mut error = None;
while let Some(event) = events.next().await {
+ log::trace!("Received completion event: {:?}", event);
match event {
- Ok(LanguageModelCompletionEvent::StatusUpdate(
- CompletionRequestStatus::ToolUseLimitReached,
- )) => {
- *tool_use_limit_reached = true;
- }
- Ok(LanguageModelCompletionEvent::Stop(StopReason::Refusal)) => {
- *refusal = true;
- return Ok(FuturesUnordered::default());
- }
- Ok(LanguageModelCompletionEvent::Stop(StopReason::MaxTokens)) => {
- *max_tokens_reached = true;
- return Ok(FuturesUnordered::default());
- }
- Ok(LanguageModelCompletionEvent::Stop(
- StopReason::ToolUse | StopReason::EndTurn,
- )) => break,
Ok(event) => {
- log::trace!("Received completion event: {:?}", event);
- this.update(cx, |this, cx| {
- tool_uses.extend(this.handle_streamed_completion_event(
- event,
- event_stream,
- cx,
- ));
- })
- .ok();
+ tool_results.extend(this.update(cx, |this, cx| {
+ this.handle_completion_event(event, event_stream, cx)
+ })??);
}
- Err(error) => {
- let completion_mode =
- this.read_with(cx, |thread, _cx| thread.completion_mode())?;
- if completion_mode == CompletionMode::Normal {
- return Err(error.into());
- }
-
- let Some(strategy) = Self::retry_strategy_for(&error) else {
- return Err(error.into());
- };
-
- let max_attempts = match &strategy {
- RetryStrategy::ExponentialBackoff { max_attempts, .. } => *max_attempts,
- RetryStrategy::Fixed { max_attempts, .. } => *max_attempts,
- };
+ Err(err) => {
+ error = Some(err);
+ break;
+ }
+ }
+ }
- let attempt = attempt.get_or_insert(0u8);
+ let end_turn = tool_results.is_empty();
+ while let Some(tool_result) = tool_results.next().await {
+ log::debug!("Tool finished {:?}", tool_result);
- *attempt += 1;
+ event_stream.update_tool_call_fields(
+ &tool_result.tool_use_id,
+ acp::ToolCallUpdateFields {
+ status: Some(if tool_result.is_error {
+ acp::ToolCallStatus::Failed
+ } else {
+ acp::ToolCallStatus::Completed
+ }),
+ raw_output: tool_result.output.clone(),
+ ..Default::default()
+ },
+ );
+ this.update(cx, |this, _cx| {
+ this.pending_message()
+ .tool_results
+ .insert(tool_result.tool_use_id.clone(), tool_result);
+ })?;
+ }
- let attempt = *attempt;
- if attempt > max_attempts {
- return Err(error.into());
+ this.update(cx, |this, cx| {
+ this.flush_pending_message(cx);
+ if this.title.is_none() && this.pending_title_generation.is_none() {
+ this.generate_title(cx);
+ }
+ })?;
+
+ if let Some(error) = error {
+ attempt += 1;
+ let retry =
+ this.update(cx, |this, _| this.handle_completion_error(error, attempt))??;
+ let timer = cx.background_executor().timer(retry.duration);
+ event_stream.send_retry(retry);
+ timer.await;
+ this.update(cx, |this, _cx| {
+ if let Some(Message::Agent(message)) = this.messages.last() {
+ if message.tool_results.is_empty() {
+ intent = CompletionIntent::UserPrompt;
+ this.messages.push(Message::Resume);
}
-
- let delay = match &strategy {
- RetryStrategy::ExponentialBackoff { initial_delay, .. } => {
- let delay_secs =
- initial_delay.as_secs() * 2u64.pow((attempt - 1) as u32);
- Duration::from_secs(delay_secs)
- }
- RetryStrategy::Fixed { delay, .. } => *delay,
- };
- log::debug!("Retry attempt {attempt} with delay {delay:?}");
-
- event_stream.send_retry(acp_thread::RetryStatus {
- last_error: error.to_string().into(),
- attempt: attempt as usize,
- max_attempts: max_attempts as usize,
- started_at: Instant::now(),
- duration: delay,
- });
-
- cx.background_executor().timer(delay).await;
- continue 'retry;
}
- }
+ })?;
+ } else if this.read_with(cx, |this, _| this.tool_use_limit_reached)? {
+ return Err(language_model::ToolUseLimitReachedError.into());
+ } else if end_turn {
+ return Ok(());
+ } else {
+ intent = CompletionIntent::ToolResults;
+ attempt = 0;
}
-
- return Ok(tool_uses);
}
}
- pub fn build_system_message(&self, cx: &App) -> LanguageModelRequestMessage {
- log::debug!("Building system message");
- let prompt = SystemPromptTemplate {
- project: self.project_context.read(cx),
- available_tools: self.tools.keys().cloned().collect(),
+ fn handle_completion_error(
+ &mut self,
+ error: LanguageModelCompletionError,
+ attempt: u8,
+ ) -> Result<acp_thread::RetryStatus> {
+ if self.completion_mode == CompletionMode::Normal {
+ return Err(anyhow!(error));
}
- .render(&self.templates)
- .context("failed to build system prompt")
- .expect("Invalid template");
- log::debug!("System message built");
- LanguageModelRequestMessage {
- role: Role::System,
- content: vec![prompt.into()],
- cache: true,
+
+ let Some(strategy) = Self::retry_strategy_for(&error) else {
+ return Err(anyhow!(error));
+ };
+
+ let max_attempts = match &strategy {
+ RetryStrategy::ExponentialBackoff { max_attempts, .. } => *max_attempts,
+ RetryStrategy::Fixed { max_attempts, .. } => *max_attempts,
+ };
+
+ if attempt > max_attempts {
+ return Err(anyhow!(error));
}
+
+ let delay = match &strategy {
+ RetryStrategy::ExponentialBackoff { initial_delay, .. } => {
+ let delay_secs = initial_delay.as_secs() * 2u64.pow((attempt - 1) as u32);
+ Duration::from_secs(delay_secs)
+ }
+ RetryStrategy::Fixed { delay, .. } => *delay,
+ };
+ log::debug!("Retry attempt {attempt} with delay {delay:?}");
+
+ Ok(acp_thread::RetryStatus {
+ last_error: error.to_string().into(),
+ attempt: attempt as usize,
+ max_attempts: max_attempts as usize,
+ started_at: Instant::now(),
+ duration: delay,
+ })
}
/// A helper method that's called on every streamed completion event.
- /// Returns an optional tool result task, which the main agentic loop in
- /// send will send back to the model when it resolves.
- fn handle_streamed_completion_event(
+ /// Returns an optional tool result task, which the main agentic loop will
+ /// send back to the model when it resolves.
+ fn handle_completion_event(
&mut self,
event: LanguageModelCompletionEvent,
event_stream: &ThreadEventStream,
cx: &mut Context<Self>,
- ) -> Option<Task<LanguageModelToolResult>> {
+ ) -> Result<Option<Task<LanguageModelToolResult>>> {
log::trace!("Handling streamed completion event: {:?}", event);
use LanguageModelCompletionEvent::*;
@@ -16,6 +16,29 @@ mod terminal_tool;
mod thinking_tool;
mod web_search_tool;
+/// A list of all built in tool names, for use in deduplicating MCP tool names
+pub fn default_tool_names() -> impl Iterator<Item = &'static str> {
+ [
+ CopyPathTool::name(),
+ CreateDirectoryTool::name(),
+ DeletePathTool::name(),
+ DiagnosticsTool::name(),
+ EditFileTool::name(),
+ FetchTool::name(),
+ FindPathTool::name(),
+ GrepTool::name(),
+ ListDirectoryTool::name(),
+ MovePathTool::name(),
+ NowTool::name(),
+ OpenTool::name(),
+ ReadFileTool::name(),
+ TerminalTool::name(),
+ ThinkingTool::name(),
+ WebSearchTool::name(),
+ ]
+ .into_iter()
+}
+
pub use context_server_registry::*;
pub use copy_path_tool::*;
pub use create_directory_tool::*;
@@ -33,3 +56,5 @@ pub use read_file_tool::*;
pub use terminal_tool::*;
pub use thinking_tool::*;
pub use web_search_tool::*;
+
+use crate::AgentTool;
@@ -145,7 +145,7 @@ impl AnyAgentTool for ContextServerTool {
ToolKind::Other
}
- fn initial_title(&self, _input: serde_json::Value) -> SharedString {
+ fn initial_title(&self, _input: serde_json::Value, _cx: &mut App) -> SharedString {
format!("Run MCP tool `{}`", self.tool.name).into()
}
@@ -169,22 +169,23 @@ impl AnyAgentTool for ContextServerTool {
fn run(
self: Arc<Self>,
input: serde_json::Value,
- _event_stream: ToolCallEventStream,
+ event_stream: ToolCallEventStream,
cx: &mut App,
) -> Task<Result<AgentToolOutput>> {
let Some(server) = self.store.read(cx).get_running_server(&self.server_id) else {
return Task::ready(Err(anyhow!("Context server not found")));
};
let tool_name = self.tool.name.clone();
- let server_clone = server.clone();
- let input_clone = input.clone();
+ let authorize = event_stream.authorize(self.initial_title(input.clone(), cx), cx);
cx.spawn(async move |_cx| {
- let Some(protocol) = server_clone.client() else {
+ authorize.await?;
+
+ let Some(protocol) = server.client() else {
bail!("Context server not initialized");
};
- let arguments = if let serde_json::Value::Object(map) = input_clone {
+ let arguments = if let serde_json::Value::Object(map) = input {
Some(map.into_iter().collect())
} else {
None
@@ -1,23 +1,18 @@
use crate::{AgentTool, ToolCallEventStream};
use agent_client_protocol::ToolKind;
use anyhow::{Context as _, Result, anyhow};
-use gpui::{App, AppContext, Entity, SharedString, Task};
+use gpui::{App, AppContext, Entity, Task};
use project::Project;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use util::markdown::MarkdownInlineCode;
-/// Copies a file or directory in the project, and returns confirmation that the
-/// copy succeeded.
-///
+/// Copies a file or directory in the project, and returns confirmation that the copy succeeded.
/// Directory contents will be copied recursively (like `cp -r`).
///
-/// This tool should be used when it's desirable to create a copy of a file or
-/// directory without modifying the original. It's much more efficient than
-/// doing this by separately reading and then writing the file or directory's
-/// contents, so this tool should be preferred over that approach whenever
-/// copying is the goal.
+/// This tool should be used when it's desirable to create a copy of a file or directory without modifying the original.
+/// It's much more efficient than doing this by separately reading and then writing the file or directory's contents, so this tool should be preferred over that approach whenever copying is the goal.
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct CopyPathToolInput {
/// The source path of the file or directory to copy.
@@ -33,12 +28,10 @@ pub struct CopyPathToolInput {
/// You can copy the first file by providing a source_path of "directory1/a/something.txt"
/// </example>
pub source_path: String,
-
/// The destination path where the file or directory should be copied to.
///
/// <example>
- /// To copy "directory1/a/something.txt" to "directory2/b/copy.txt",
- /// provide a destination_path of "directory2/b/copy.txt"
+ /// To copy "directory1/a/something.txt" to "directory2/b/copy.txt", provide a destination_path of "directory2/b/copy.txt"
/// </example>
pub destination_path: String,
}
@@ -57,15 +50,19 @@ impl AgentTool for CopyPathTool {
type Input = CopyPathToolInput;
type Output = String;
- fn name(&self) -> SharedString {
- "copy_path".into()
+ fn name() -> &'static str {
+ "copy_path"
}
- fn kind(&self) -> ToolKind {
+ fn kind() -> ToolKind {
ToolKind::Move
}
- fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> ui::SharedString {
+ fn initial_title(
+ &self,
+ input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> ui::SharedString {
if let Ok(input) = input {
let src = MarkdownInlineCode(&input.source_path);
let dest = MarkdownInlineCode(&input.destination_path);
@@ -9,12 +9,9 @@ use util::markdown::MarkdownInlineCode;
use crate::{AgentTool, ToolCallEventStream};
-/// Creates a new directory at the specified path within the project. Returns
-/// confirmation that the directory was created.
+/// Creates a new directory at the specified path within the project. Returns confirmation that the directory was created.
///
-/// This tool creates a directory and all necessary parent directories (similar
-/// to `mkdir -p`). It should be used whenever you need to create new
-/// directories within the project.
+/// This tool creates a directory and all necessary parent directories (similar to `mkdir -p`). It should be used whenever you need to create new directories within the project.
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct CreateDirectoryToolInput {
/// The path of the new directory.
@@ -44,15 +41,19 @@ impl AgentTool for CreateDirectoryTool {
type Input = CreateDirectoryToolInput;
type Output = String;
- fn name(&self) -> SharedString {
- "create_directory".into()
+ fn name() -> &'static str {
+ "create_directory"
}
- fn kind(&self) -> ToolKind {
+ fn kind() -> ToolKind {
ToolKind::Read
}
- fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
+ fn initial_title(
+ &self,
+ input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> SharedString {
if let Ok(input) = input {
format!("Create directory {}", MarkdownInlineCode(&input.path)).into()
} else {
@@ -9,8 +9,7 @@ use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
-/// Deletes the file or directory (and the directory's contents, recursively) at
-/// the specified path in the project, and returns confirmation of the deletion.
+/// Deletes the file or directory (and the directory's contents, recursively) at the specified path in the project, and returns confirmation of the deletion.
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct DeletePathToolInput {
/// The path of the file or directory to delete.
@@ -45,15 +44,19 @@ impl AgentTool for DeletePathTool {
type Input = DeletePathToolInput;
type Output = String;
- fn name(&self) -> SharedString {
- "delete_path".into()
+ fn name() -> &'static str {
+ "delete_path"
}
- fn kind(&self) -> ToolKind {
+ fn kind() -> ToolKind {
ToolKind::Delete
}
- fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
+ fn initial_title(
+ &self,
+ input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> SharedString {
if let Ok(input) = input {
format!("Delete “`{}`”", input.path).into()
} else {
@@ -63,15 +63,19 @@ impl AgentTool for DiagnosticsTool {
type Input = DiagnosticsToolInput;
type Output = String;
- fn name(&self) -> SharedString {
- "diagnostics".into()
+ fn name() -> &'static str {
+ "diagnostics"
}
- fn kind(&self) -> acp::ToolKind {
+ fn kind() -> acp::ToolKind {
acp::ToolKind::Read
}
- fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
+ fn initial_title(
+ &self,
+ input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> SharedString {
if let Some(path) = input.ok().and_then(|input| match input.path {
Some(path) if !path.is_empty() => Some(path),
_ => None,
@@ -34,25 +34,21 @@ const DEFAULT_UI_TEXT: &str = "Editing file";
/// - Use the `list_directory` tool to verify the parent directory exists and is the correct location
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct EditFileToolInput {
- /// A one-line, user-friendly markdown description of the edit. This will be
- /// shown in the UI and also passed to another model to perform the edit.
+ /// A one-line, user-friendly markdown description of the edit. This will be shown in the UI and also passed to another model to perform the edit.
///
- /// Be terse, but also descriptive in what you want to achieve with this
- /// edit. Avoid generic instructions.
+ /// Be terse, but also descriptive in what you want to achieve with this edit. Avoid generic instructions.
///
/// NEVER mention the file path in this description.
///
/// <example>Fix API endpoint URLs</example>
/// <example>Update copyright year in `page_footer`</example>
///
- /// Make sure to include this field before all the others in the input object
- /// so that we can display it immediately.
+ /// Make sure to include this field before all the others in the input object so that we can display it immediately.
pub display_description: String,
/// The full path of the file to create or modify in the project.
///
- /// WARNING: When specifying which file path need changing, you MUST
- /// start each path with one of the project's root directories.
+ /// WARNING: When specifying which file path need changing, you MUST start each path with one of the project's root directories.
///
/// The following examples assume we have two root directories in the project:
/// - /a/b/backend
@@ -61,22 +57,19 @@ pub struct EditFileToolInput {
/// <example>
/// `backend/src/main.rs`
///
- /// Notice how the file path starts with `backend`. Without that, the path
- /// would be ambiguous and the call would fail!
+ /// Notice how the file path starts with `backend`. Without that, the path would be ambiguous and the call would fail!
/// </example>
///
/// <example>
/// `frontend/db.js`
/// </example>
pub path: PathBuf,
-
/// The mode of operation on the file. Possible values:
/// - 'edit': Make granular edits to an existing file.
/// - 'create': Create a new file if it doesn't exist.
/// - 'overwrite': Replace the entire contents of an existing file.
///
- /// When a file already exists or you just created it, prefer editing
- /// it as opposed to recreating it from scratch.
+ /// When a file already exists or you just created it, prefer editing it as opposed to recreating it from scratch.
pub mode: EditFileMode,
}
@@ -90,6 +83,7 @@ struct EditFileToolPartialInput {
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "lowercase")]
+#[schemars(inline)]
pub enum EditFileMode {
Edit,
Create,
@@ -126,11 +120,17 @@ impl From<EditFileToolOutput> for LanguageModelToolResultContent {
pub struct EditFileTool {
thread: WeakEntity<Thread>,
language_registry: Arc<LanguageRegistry>,
+ project: Entity<Project>,
}
impl EditFileTool {
- pub fn new(thread: WeakEntity<Thread>, language_registry: Arc<LanguageRegistry>) -> Self {
+ pub fn new(
+ project: Entity<Project>,
+ thread: WeakEntity<Thread>,
+ language_registry: Arc<LanguageRegistry>,
+ ) -> Self {
Self {
+ project,
thread,
language_registry,
}
@@ -193,30 +193,58 @@ impl AgentTool for EditFileTool {
type Input = EditFileToolInput;
type Output = EditFileToolOutput;
- fn name(&self) -> SharedString {
- "edit_file".into()
+ fn name() -> &'static str {
+ "edit_file"
}
- fn kind(&self) -> acp::ToolKind {
+ fn kind() -> acp::ToolKind {
acp::ToolKind::Edit
}
- fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
+ fn initial_title(
+ &self,
+ input: Result<Self::Input, serde_json::Value>,
+ cx: &mut App,
+ ) -> SharedString {
match input {
- Ok(input) => input.display_description.into(),
+ Ok(input) => self
+ .project
+ .read(cx)
+ .find_project_path(&input.path, cx)
+ .and_then(|project_path| {
+ self.project
+ .read(cx)
+ .short_full_path_for_project_path(&project_path, cx)
+ })
+ .unwrap_or(Path::new(&input.path).into())
+ .to_string_lossy()
+ .to_string()
+ .into(),
Err(raw_input) => {
if let Some(input) =
serde_json::from_value::<EditFileToolPartialInput>(raw_input).ok()
{
+ let path = input.path.trim();
+ if !path.is_empty() {
+ return self
+ .project
+ .read(cx)
+ .find_project_path(&input.path, cx)
+ .and_then(|project_path| {
+ self.project
+ .read(cx)
+ .short_full_path_for_project_path(&project_path, cx)
+ })
+ .unwrap_or(Path::new(&input.path).into())
+ .to_string_lossy()
+ .to_string()
+ .into();
+ }
+
let description = input.display_description.trim();
if !description.is_empty() {
return description.to_string().into();
}
-
- let path = input.path.trim().to_string();
- if !path.is_empty() {
- return path.into();
- }
}
DEFAULT_UI_TEXT.into()
@@ -280,6 +308,13 @@ impl AgentTool for EditFileTool {
let diff = cx.new(|cx| Diff::new(buffer.clone(), cx))?;
event_stream.update_diff(diff.clone());
+ let _finalize_diff = util::defer({
+ let diff = diff.downgrade();
+ let mut cx = cx.clone();
+ move || {
+ diff.update(&mut cx, |diff, cx| diff.finalize(cx)).ok();
+ }
+ });
let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
let old_text = cx
@@ -396,8 +431,6 @@ impl AgentTool for EditFileTool {
})
.await;
- diff.update(cx, |diff, cx| diff.finalize(cx)).ok();
-
let input_path = input.path.display();
if unified_diff.is_empty() {
anyhow::ensure!(
@@ -427,7 +460,7 @@ impl AgentTool for EditFileTool {
Ok(EditFileToolOutput {
input_path: input.path,
- new_text: new_text.clone(),
+ new_text,
old_text,
diff: unified_diff,
edit_agent_output,
@@ -524,7 +557,6 @@ fn resolve_path(
mod tests {
use super::*;
use crate::{ContextServerRegistry, Templates};
- use action_log::ActionLog;
use client::TelemetrySettings;
use fs::Fs;
use gpui::{TestAppContext, UpdateGlobal};
@@ -542,16 +574,14 @@ mod tests {
fs.insert_tree("/root", json!({})).await;
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let action_log = cx.new(|_| ActionLog::new(project.clone()));
let context_server_registry =
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
let model = Arc::new(FakeLanguageModel::default());
let thread = cx.new(|cx| {
Thread::new(
- project,
+ project.clone(),
cx.new(|_cx| ProjectContext::default()),
context_server_registry,
- action_log,
Templates::new(),
Some(model),
cx,
@@ -564,11 +594,12 @@ mod tests {
path: "root/nonexistent_file.txt".into(),
mode: EditFileMode::Edit,
};
- Arc::new(EditFileTool::new(thread.downgrade(), language_registry)).run(
- input,
- ToolCallEventStream::test().0,
- cx,
- )
+ Arc::new(EditFileTool::new(
+ project,
+ thread.downgrade(),
+ language_registry,
+ ))
+ .run(input, ToolCallEventStream::test().0, cx)
})
.await;
assert_eq!(
@@ -655,8 +686,7 @@ mod tests {
mode: mode.clone(),
};
- let result = cx.update(|cx| resolve_path(&input, project, cx));
- result
+ cx.update(|cx| resolve_path(&input, project, cx))
}
fn assert_resolved_path_eq(path: anyhow::Result<ProjectPath>, expected: &str) {
@@ -743,16 +773,14 @@ mod tests {
}
});
- let action_log = cx.new(|_| ActionLog::new(project.clone()));
let context_server_registry =
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
let model = Arc::new(FakeLanguageModel::default());
let thread = cx.new(|cx| {
Thread::new(
- project,
+ project.clone(),
cx.new(|_cx| ProjectContext::default()),
context_server_registry,
- action_log.clone(),
Templates::new(),
Some(model.clone()),
cx,
@@ -782,6 +810,7 @@ mod tests {
mode: EditFileMode::Overwrite,
};
Arc::new(EditFileTool::new(
+ project.clone(),
thread.downgrade(),
language_registry.clone(),
))
@@ -809,7 +838,9 @@ mod tests {
"Code should be formatted when format_on_save is enabled"
);
- let stale_buffer_count = action_log.read_with(cx, |log, cx| log.stale_buffers(cx).count());
+ let stale_buffer_count = thread
+ .read_with(cx, |thread, _cx| thread.action_log.clone())
+ .read_with(cx, |log, cx| log.stale_buffers(cx).count());
assert_eq!(
stale_buffer_count, 0,
@@ -838,11 +869,12 @@ mod tests {
path: "root/src/main.rs".into(),
mode: EditFileMode::Overwrite,
};
- Arc::new(EditFileTool::new(thread.downgrade(), language_registry)).run(
- input,
- ToolCallEventStream::test().0,
- cx,
- )
+ Arc::new(EditFileTool::new(
+ project.clone(),
+ thread.downgrade(),
+ language_registry,
+ ))
+ .run(input, ToolCallEventStream::test().0, cx)
});
// Stream the unformatted content
@@ -887,14 +919,12 @@ mod tests {
let context_server_registry =
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let action_log = cx.new(|_| ActionLog::new(project.clone()));
let model = Arc::new(FakeLanguageModel::default());
let thread = cx.new(|cx| {
Thread::new(
- project,
+ project.clone(),
cx.new(|_cx| ProjectContext::default()),
context_server_registry,
- action_log.clone(),
Templates::new(),
Some(model.clone()),
cx,
@@ -925,6 +955,7 @@ mod tests {
mode: EditFileMode::Overwrite,
};
Arc::new(EditFileTool::new(
+ project.clone(),
thread.downgrade(),
language_registry.clone(),
))
@@ -976,11 +1007,12 @@ mod tests {
path: "root/src/main.rs".into(),
mode: EditFileMode::Overwrite,
};
- Arc::new(EditFileTool::new(thread.downgrade(), language_registry)).run(
- input,
- ToolCallEventStream::test().0,
- cx,
- )
+ Arc::new(EditFileTool::new(
+ project.clone(),
+ thread.downgrade(),
+ language_registry,
+ ))
+ .run(input, ToolCallEventStream::test().0, cx)
});
// Stream the content with trailing whitespace
@@ -1016,20 +1048,22 @@ mod tests {
let context_server_registry =
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let action_log = cx.new(|_| ActionLog::new(project.clone()));
let model = Arc::new(FakeLanguageModel::default());
let thread = cx.new(|cx| {
Thread::new(
- project,
+ project.clone(),
cx.new(|_cx| ProjectContext::default()),
context_server_registry,
- action_log.clone(),
Templates::new(),
Some(model.clone()),
cx,
)
});
- let tool = Arc::new(EditFileTool::new(thread.downgrade(), language_registry));
+ let tool = Arc::new(EditFileTool::new(
+ project.clone(),
+ thread.downgrade(),
+ language_registry,
+ ));
fs.insert_tree("/root", json!({})).await;
// Test 1: Path with .zed component should require confirmation
@@ -1154,20 +1188,22 @@ mod tests {
let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
let context_server_registry =
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let action_log = cx.new(|_| ActionLog::new(project.clone()));
let model = Arc::new(FakeLanguageModel::default());
let thread = cx.new(|cx| {
Thread::new(
- project,
+ project.clone(),
cx.new(|_cx| ProjectContext::default()),
context_server_registry,
- action_log.clone(),
Templates::new(),
Some(model.clone()),
cx,
)
});
- let tool = Arc::new(EditFileTool::new(thread.downgrade(), language_registry));
+ let tool = Arc::new(EditFileTool::new(
+ project.clone(),
+ thread.downgrade(),
+ language_registry,
+ ));
// Test global config paths - these should require confirmation if they exist and are outside the project
let test_cases = vec![
@@ -1262,7 +1298,6 @@ mod tests {
)
.await;
let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let action_log = cx.new(|_| ActionLog::new(project.clone()));
let context_server_registry =
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
let model = Arc::new(FakeLanguageModel::default());
@@ -1271,13 +1306,16 @@ mod tests {
project.clone(),
cx.new(|_cx| ProjectContext::default()),
context_server_registry.clone(),
- action_log.clone(),
Templates::new(),
Some(model.clone()),
cx,
)
});
- let tool = Arc::new(EditFileTool::new(thread.downgrade(), language_registry));
+ let tool = Arc::new(EditFileTool::new(
+ project.clone(),
+ thread.downgrade(),
+ language_registry,
+ ));
// Test files in different worktrees
let test_cases = vec![
@@ -1344,7 +1382,6 @@ mod tests {
.await;
let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let action_log = cx.new(|_| ActionLog::new(project.clone()));
let context_server_registry =
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
let model = Arc::new(FakeLanguageModel::default());
@@ -1353,13 +1390,16 @@ mod tests {
project.clone(),
cx.new(|_cx| ProjectContext::default()),
context_server_registry.clone(),
- action_log.clone(),
Templates::new(),
Some(model.clone()),
cx,
)
});
- let tool = Arc::new(EditFileTool::new(thread.downgrade(), language_registry));
+ let tool = Arc::new(EditFileTool::new(
+ project.clone(),
+ thread.downgrade(),
+ language_registry,
+ ));
// Test edge cases
let test_cases = vec![
@@ -1429,7 +1469,6 @@ mod tests {
.await;
let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let action_log = cx.new(|_| ActionLog::new(project.clone()));
let context_server_registry =
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
let model = Arc::new(FakeLanguageModel::default());
@@ -1438,13 +1477,16 @@ mod tests {
project.clone(),
cx.new(|_cx| ProjectContext::default()),
context_server_registry.clone(),
- action_log.clone(),
Templates::new(),
Some(model.clone()),
cx,
)
});
- let tool = Arc::new(EditFileTool::new(thread.downgrade(), language_registry));
+ let tool = Arc::new(EditFileTool::new(
+ project.clone(),
+ thread.downgrade(),
+ language_registry,
+ ));
// Test different EditFileMode values
let modes = vec![
@@ -1511,7 +1553,6 @@ mod tests {
let fs = project::FakeFs::new(cx.executor());
let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let action_log = cx.new(|_| ActionLog::new(project.clone()));
let context_server_registry =
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
let model = Arc::new(FakeLanguageModel::default());
@@ -1520,54 +1561,178 @@ mod tests {
project.clone(),
cx.new(|_cx| ProjectContext::default()),
context_server_registry,
- action_log.clone(),
Templates::new(),
Some(model.clone()),
cx,
)
});
- let tool = Arc::new(EditFileTool::new(thread.downgrade(), language_registry));
+ let tool = Arc::new(EditFileTool::new(
+ project,
+ thread.downgrade(),
+ language_registry,
+ ));
- assert_eq!(
- tool.initial_title(Err(json!({
- "path": "src/main.rs",
- "display_description": "",
- "old_string": "old code",
- "new_string": "new code"
- }))),
- "src/main.rs"
- );
- assert_eq!(
- tool.initial_title(Err(json!({
- "path": "",
- "display_description": "Fix error handling",
- "old_string": "old code",
- "new_string": "new code"
- }))),
- "Fix error handling"
- );
- assert_eq!(
- tool.initial_title(Err(json!({
- "path": "src/main.rs",
- "display_description": "Fix error handling",
- "old_string": "old code",
- "new_string": "new code"
- }))),
- "Fix error handling"
- );
- assert_eq!(
- tool.initial_title(Err(json!({
- "path": "",
- "display_description": "",
- "old_string": "old code",
- "new_string": "new code"
- }))),
- DEFAULT_UI_TEXT
- );
- assert_eq!(
- tool.initial_title(Err(serde_json::Value::Null)),
- DEFAULT_UI_TEXT
- );
+ cx.update(|cx| {
+ // ...
+ assert_eq!(
+ tool.initial_title(
+ Err(json!({
+ "path": "src/main.rs",
+ "display_description": "",
+ "old_string": "old code",
+ "new_string": "new code"
+ })),
+ cx
+ ),
+ "src/main.rs"
+ );
+ assert_eq!(
+ tool.initial_title(
+ Err(json!({
+ "path": "",
+ "display_description": "Fix error handling",
+ "old_string": "old code",
+ "new_string": "new code"
+ })),
+ cx
+ ),
+ "Fix error handling"
+ );
+ assert_eq!(
+ tool.initial_title(
+ Err(json!({
+ "path": "src/main.rs",
+ "display_description": "Fix error handling",
+ "old_string": "old code",
+ "new_string": "new code"
+ })),
+ cx
+ ),
+ "src/main.rs"
+ );
+ assert_eq!(
+ tool.initial_title(
+ Err(json!({
+ "path": "",
+ "display_description": "",
+ "old_string": "old code",
+ "new_string": "new code"
+ })),
+ cx
+ ),
+ DEFAULT_UI_TEXT
+ );
+ assert_eq!(
+ tool.initial_title(Err(serde_json::Value::Null), cx),
+ DEFAULT_UI_TEXT
+ );
+ });
+ }
+
+ #[gpui::test]
+ async fn test_diff_finalization(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = project::FakeFs::new(cx.executor());
+ fs.insert_tree("/", json!({"main.rs": ""})).await;
+
+ let project = Project::test(fs.clone(), [path!("/").as_ref()], cx).await;
+ let languages = project.read_with(cx, |project, _cx| project.languages().clone());
+ let context_server_registry =
+ cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
+ let model = Arc::new(FakeLanguageModel::default());
+ let thread = cx.new(|cx| {
+ Thread::new(
+ project.clone(),
+ cx.new(|_cx| ProjectContext::default()),
+ context_server_registry.clone(),
+ Templates::new(),
+ Some(model.clone()),
+ cx,
+ )
+ });
+
+ // Ensure the diff is finalized after the edit completes.
+ {
+ let tool = Arc::new(EditFileTool::new(
+ project.clone(),
+ thread.downgrade(),
+ languages.clone(),
+ ));
+ let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
+ let edit = cx.update(|cx| {
+ tool.run(
+ EditFileToolInput {
+ display_description: "Edit file".into(),
+ path: path!("/main.rs").into(),
+ mode: EditFileMode::Edit,
+ },
+ stream_tx,
+ cx,
+ )
+ });
+ stream_rx.expect_update_fields().await;
+ let diff = stream_rx.expect_diff().await;
+ diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Pending(_))));
+ cx.run_until_parked();
+ model.end_last_completion_stream();
+ edit.await.unwrap();
+ diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Finalized(_))));
+ }
+
+ // Ensure the diff is finalized if an error occurs while editing.
+ {
+ model.forbid_requests();
+ let tool = Arc::new(EditFileTool::new(
+ project.clone(),
+ thread.downgrade(),
+ languages.clone(),
+ ));
+ let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
+ let edit = cx.update(|cx| {
+ tool.run(
+ EditFileToolInput {
+ display_description: "Edit file".into(),
+ path: path!("/main.rs").into(),
+ mode: EditFileMode::Edit,
+ },
+ stream_tx,
+ cx,
+ )
+ });
+ stream_rx.expect_update_fields().await;
+ let diff = stream_rx.expect_diff().await;
+ diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Pending(_))));
+ edit.await.unwrap_err();
+ diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Finalized(_))));
+ model.allow_requests();
+ }
+
+ // Ensure the diff is finalized if the tool call gets dropped.
+ {
+ let tool = Arc::new(EditFileTool::new(
+ project.clone(),
+ thread.downgrade(),
+ languages.clone(),
+ ));
+ let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
+ let edit = cx.update(|cx| {
+ tool.run(
+ EditFileToolInput {
+ display_description: "Edit file".into(),
+ path: path!("/main.rs").into(),
+ mode: EditFileMode::Edit,
+ },
+ stream_tx,
+ cx,
+ )
+ });
+ stream_rx.expect_update_fields().await;
+ let diff = stream_rx.expect_diff().await;
+ diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Pending(_))));
+ drop(edit);
+ cx.run_until_parked();
+ diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Finalized(_))));
+ }
}
fn init_test(cx: &mut TestAppContext) {
@@ -118,15 +118,19 @@ impl AgentTool for FetchTool {
type Input = FetchToolInput;
type Output = String;
- fn name(&self) -> SharedString {
- "fetch".into()
+ fn name() -> &'static str {
+ "fetch"
}
- fn kind(&self) -> acp::ToolKind {
+ fn kind() -> acp::ToolKind {
acp::ToolKind::Fetch
}
- fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
+ fn initial_title(
+ &self,
+ input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> SharedString {
match input {
Ok(input) => format!("Fetch {}", MarkdownEscaped(&input.url)).into(),
Err(_) => "Fetch URL".into(),
@@ -136,12 +140,17 @@ impl AgentTool for FetchTool {
fn run(
self: Arc<Self>,
input: Self::Input,
- _event_stream: ToolCallEventStream,
+ event_stream: ToolCallEventStream,
cx: &mut App,
) -> Task<Result<Self::Output>> {
+ let authorize = event_stream.authorize(input.url.clone(), cx);
+
let text = cx.background_spawn({
let http_client = self.http_client.clone();
- async move { Self::build_message(http_client, &input.url).await }
+ async move {
+ authorize.await?;
+ Self::build_message(http_client, &input.url).await
+ }
});
cx.foreground_executor().spawn(async move {
@@ -31,7 +31,6 @@ pub struct FindPathToolInput {
/// You can get back the first two paths by providing a glob of "*thing*.txt"
/// </example>
pub glob: String,
-
/// Optional starting position for paginated results (0-based).
/// When not provided, starts from the beginning.
#[serde(default)]
@@ -86,15 +85,19 @@ impl AgentTool for FindPathTool {
type Input = FindPathToolInput;
type Output = FindPathToolOutput;
- fn name(&self) -> SharedString {
- "find_path".into()
+ fn name() -> &'static str {
+ "find_path"
}
- fn kind(&self) -> acp::ToolKind {
+ fn kind() -> acp::ToolKind {
acp::ToolKind::Search
}
- fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
+ fn initial_title(
+ &self,
+ input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> SharedString {
let mut title = "Find paths".to_string();
if let Ok(input) = input {
title.push_str(&format!(" matching “`{}`”", input.glob));
@@ -116,7 +119,7 @@ impl AgentTool for FindPathTool {
..cmp::min(input.offset + RESULTS_PER_PAGE, matches.len())];
event_stream.update_fields(acp::ToolCallUpdateFields {
- title: Some(if paginated_matches.len() == 0 {
+ title: Some(if paginated_matches.is_empty() {
"No matches".into()
} else if paginated_matches.len() == 1 {
"1 match".into()
@@ -166,16 +169,17 @@ fn search_paths(glob: &str, project: Entity<Project>, cx: &mut App) -> Task<Resu
.collect();
cx.background_spawn(async move {
- Ok(snapshots
- .iter()
- .flat_map(|snapshot| {
+ let mut results = Vec::new();
+ for snapshot in snapshots {
+ for entry in snapshot.entries(false, 0) {
let root_name = PathBuf::from(snapshot.root_name());
- snapshot
- .entries(false, 0)
- .map(move |entry| root_name.join(&entry.path))
- .filter(|path| path_matcher.is_match(&path))
- })
- .collect())
+ if path_matcher.is_match(root_name.join(&entry.path)) {
+ results.push(snapshot.abs_path().join(entry.path.as_ref()));
+ }
+ }
+ }
+
+ Ok(results)
})
}
@@ -216,8 +220,8 @@ mod test {
assert_eq!(
matches,
&[
- PathBuf::from("root/apple/banana/carrot"),
- PathBuf::from("root/apple/bandana/carbonara")
+ PathBuf::from(path!("/root/apple/banana/carrot")),
+ PathBuf::from(path!("/root/apple/bandana/carbonara"))
]
);
@@ -228,8 +232,8 @@ mod test {
assert_eq!(
matches,
&[
- PathBuf::from("root/apple/banana/carrot"),
- PathBuf::from("root/apple/bandana/carbonara")
+ PathBuf::from(path!("/root/apple/banana/carrot")),
+ PathBuf::from(path!("/root/apple/bandana/carbonara"))
]
);
}
@@ -27,8 +27,7 @@ use util::paths::PathMatcher;
/// - DO NOT use HTML entities solely to escape characters in the tool parameters.
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct GrepToolInput {
- /// A regex pattern to search for in the entire project. Note that the regex
- /// will be parsed by the Rust `regex` crate.
+ /// A regex pattern to search for in the entire project. Note that the regex will be parsed by the Rust `regex` crate.
///
/// Do NOT specify a path here! This will only be matched against the code **content**.
pub regex: String,
@@ -68,15 +67,19 @@ impl AgentTool for GrepTool {
type Input = GrepToolInput;
type Output = String;
- fn name(&self) -> SharedString {
- "grep".into()
+ fn name() -> &'static str {
+ "grep"
}
- fn kind(&self) -> acp::ToolKind {
+ fn kind() -> acp::ToolKind {
acp::ToolKind::Search
}
- fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
+ fn initial_title(
+ &self,
+ input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> SharedString {
match input {
Ok(input) => {
let page = input.page();
@@ -318,7 +321,7 @@ mod tests {
init_test(cx);
cx.executor().allow_parking();
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/root"),
serde_json::json!({
@@ -403,7 +406,7 @@ mod tests {
init_test(cx);
cx.executor().allow_parking();
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/root"),
serde_json::json!({
@@ -478,7 +481,7 @@ mod tests {
init_test(cx);
cx.executor().allow_parking();
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
// Create test file with syntax structures
fs.insert_tree(
@@ -763,7 +766,7 @@ mod tests {
if cfg!(windows) {
result.replace("root\\", "root/")
} else {
- result.to_string()
+ result
}
}
Err(e) => panic!("Failed to run grep tool: {}", e),
@@ -10,14 +10,12 @@ use std::fmt::Write;
use std::{path::Path, sync::Arc};
use util::markdown::MarkdownInlineCode;
-/// Lists files and directories in a given path. Prefer the `grep` or
-/// `find_path` tools when searching the codebase.
+/// Lists files and directories in a given path. Prefer the `grep` or `find_path` tools when searching the codebase.
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct ListDirectoryToolInput {
/// The fully-qualified path of the directory to list in the project.
///
- /// This path should never be absolute, and the first component
- /// of the path should always be a root directory in a project.
+ /// This path should never be absolute, and the first component of the path should always be a root directory in a project.
///
/// <example>
/// If the project has the following root directories:
@@ -53,15 +51,19 @@ impl AgentTool for ListDirectoryTool {
type Input = ListDirectoryToolInput;
type Output = String;
- fn name(&self) -> SharedString {
- "list_directory".into()
+ fn name() -> &'static str {
+ "list_directory"
}
- fn kind(&self) -> ToolKind {
+ fn kind() -> ToolKind {
ToolKind::Read
}
- fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
+ fn initial_title(
+ &self,
+ input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> SharedString {
if let Ok(input) = input {
let path = MarkdownInlineCode(&input.path);
format!("List the {path} directory's contents").into()
@@ -8,14 +8,11 @@ use serde::{Deserialize, Serialize};
use std::{path::Path, sync::Arc};
use util::markdown::MarkdownInlineCode;
-/// Moves or rename a file or directory in the project, and returns confirmation
-/// that the move succeeded.
+/// Moves or rename a file or directory in the project, and returns confirmation that the move succeeded.
///
-/// If the source and destination directories are the same, but the filename is
-/// different, this performs a rename. Otherwise, it performs a move.
+/// If the source and destination directories are the same, but the filename is different, this performs a rename. Otherwise, it performs a move.
///
-/// This tool should be used when it's desirable to move or rename a file or
-/// directory without changing its contents at all.
+/// This tool should be used when it's desirable to move or rename a file or directory without changing its contents at all.
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct MovePathToolInput {
/// The source path of the file or directory to move/rename.
@@ -55,15 +52,19 @@ impl AgentTool for MovePathTool {
type Input = MovePathToolInput;
type Output = String;
- fn name(&self) -> SharedString {
- "move_path".into()
+ fn name() -> &'static str {
+ "move_path"
}
- fn kind(&self) -> ToolKind {
+ fn kind() -> ToolKind {
ToolKind::Move
}
- fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
+ fn initial_title(
+ &self,
+ input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> SharedString {
if let Ok(input) = input {
let src = MarkdownInlineCode(&input.source_path);
let dest = MarkdownInlineCode(&input.destination_path);
@@ -11,6 +11,7 @@ use crate::{AgentTool, ToolCallEventStream};
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
+#[schemars(inline)]
pub enum Timezone {
/// Use UTC for the datetime.
Utc,
@@ -32,15 +33,19 @@ impl AgentTool for NowTool {
type Input = NowToolInput;
type Output = String;
- fn name(&self) -> SharedString {
- "now".into()
+ fn name() -> &'static str {
+ "now"
}
- fn kind(&self) -> acp::ToolKind {
+ fn kind() -> acp::ToolKind {
acp::ToolKind::Other
}
- fn initial_title(&self, _input: Result<Self::Input, serde_json::Value>) -> SharedString {
+ fn initial_title(
+ &self,
+ _input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> SharedString {
"Get current time".into()
}
@@ -8,19 +8,15 @@ use serde::{Deserialize, Serialize};
use std::{path::PathBuf, sync::Arc};
use util::markdown::MarkdownEscaped;
-/// This tool opens a file or URL with the default application associated with
-/// it on the user's operating system:
+/// This tool opens a file or URL with the default application associated with it on the user's operating system:
///
/// - On macOS, it's equivalent to the `open` command
/// - On Windows, it's equivalent to `start`
/// - On Linux, it uses something like `xdg-open`, `gio open`, `gnome-open`, `kde-open`, `wslview` as appropriate
///
-/// For example, it can open a web browser with a URL, open a PDF file with the
-/// default PDF viewer, etc.
+/// For example, it can open a web browser with a URL, open a PDF file with the default PDF viewer, etc.
///
-/// You MUST ONLY use this tool when the user has explicitly requested opening
-/// something. You MUST NEVER assume that the user would like for you to use
-/// this tool.
+/// You MUST ONLY use this tool when the user has explicitly requested opening something. You MUST NEVER assume that the user would like for you to use this tool.
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
pub struct OpenToolInput {
/// The path or URL to open with the default application.
@@ -41,15 +37,19 @@ impl AgentTool for OpenTool {
type Input = OpenToolInput;
type Output = String;
- fn name(&self) -> SharedString {
- "open".into()
+ fn name() -> &'static str {
+ "open"
}
- fn kind(&self) -> ToolKind {
+ fn kind() -> ToolKind {
ToolKind::Execute
}
- fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
+ fn initial_title(
+ &self,
+ input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> SharedString {
if let Ok(input) = input {
format!("Open `{}`", MarkdownEscaped(&input.path_or_url)).into()
} else {
@@ -65,7 +65,7 @@ impl AgentTool for OpenTool {
) -> Task<Result<Self::Output>> {
// If path_or_url turns out to be a path in the project, make it absolute.
let abs_path = to_absolute_path(&input.path_or_url, self.project.clone(), cx);
- let authorize = event_stream.authorize(self.initial_title(Ok(input.clone())), cx);
+ let authorize = event_stream.authorize(self.initial_title(Ok(input.clone()), cx), cx);
cx.background_spawn(async move {
authorize.await?;
@@ -11,6 +11,7 @@ use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::Settings;
use std::sync::Arc;
+use util::markdown::MarkdownCodeBlock;
use crate::{AgentTool, ToolCallEventStream};
@@ -21,8 +22,7 @@ use crate::{AgentTool, ToolCallEventStream};
pub struct ReadFileToolInput {
/// The relative path of the file to read.
///
- /// This path should never be absolute, and the first component
- /// of the path should always be a root directory in a project.
+ /// This path should never be absolute, and the first component of the path should always be a root directory in a project.
///
/// <example>
/// If the project has the following root directories:
@@ -34,11 +34,9 @@ pub struct ReadFileToolInput {
/// If you want to access `file.txt` in `directory2`, you should use the path `directory2/file.txt`.
/// </example>
pub path: String,
-
/// Optional line number to start reading on (1-based index)
#[serde(default)]
pub start_line: Option<u32>,
-
/// Optional line number to end reading on (1-based index, inclusive)
#[serde(default)]
pub end_line: Option<u32>,
@@ -62,31 +60,34 @@ impl AgentTool for ReadFileTool {
type Input = ReadFileToolInput;
type Output = LanguageModelToolResultContent;
- fn name(&self) -> SharedString {
- "read_file".into()
+ fn name() -> &'static str {
+ "read_file"
}
- fn kind(&self) -> acp::ToolKind {
+ fn kind() -> acp::ToolKind {
acp::ToolKind::Read
}
- fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
- if let Ok(input) = input {
- let path = &input.path;
+ fn initial_title(
+ &self,
+ input: Result<Self::Input, serde_json::Value>,
+ cx: &mut App,
+ ) -> SharedString {
+ if let Ok(input) = input
+ && let Some(project_path) = self.project.read(cx).find_project_path(&input.path, cx)
+ && let Some(path) = self
+ .project
+ .read(cx)
+ .short_full_path_for_project_path(&project_path, cx)
+ {
match (input.start_line, input.end_line) {
(Some(start), Some(end)) => {
- format!(
- "[Read file `{}` (lines {}-{})](@selection:{}:({}-{}))",
- path, start, end, path, start, end
- )
+ format!("Read file `{}` (lines {}-{})", path.display(), start, end,)
}
(Some(start), None) => {
- format!(
- "[Read file `{}` (from line {})](@selection:{}:({}-{}))",
- path, start, path, start, start
- )
+ format!("Read file `{}` (from line {})", path.display(), start)
}
- _ => format!("[Read file `{}`](@file:{})", path, path),
+ _ => format!("Read file `{}`", path.display()),
}
.into()
} else {
@@ -103,6 +104,12 @@ impl AgentTool for ReadFileTool {
let Some(project_path) = self.project.read(cx).find_project_path(&input.path, cx) else {
return Task::ready(Err(anyhow!("Path {} not found in project", &input.path)));
};
+ let Some(abs_path) = self.project.read(cx).absolute_path(&project_path, cx) else {
+ return Task::ready(Err(anyhow!(
+ "Failed to convert {} to absolute path",
+ &input.path
+ )));
+ };
// Error out if this path is either excluded or private in global settings
let global_settings = WorktreeSettings::get_global(cx);
@@ -138,6 +145,14 @@ impl AgentTool for ReadFileTool {
let file_path = input.path.clone();
+ event_stream.update_fields(ToolCallUpdateFields {
+ locations: Some(vec![acp::ToolCallLocation {
+ path: abs_path,
+ line: input.start_line.map(|line| line.saturating_sub(1)),
+ }]),
+ ..Default::default()
+ });
+
if image_store::is_image_file(&self.project, &project_path, cx) {
return cx.spawn(async move |cx| {
let image_entity: Entity<ImageItem> = cx
@@ -246,21 +261,25 @@ impl AgentTool for ReadFileTool {
};
project.update(cx, |project, cx| {
- if let Some(abs_path) = project.absolute_path(&project_path, cx) {
- project.set_agent_location(
- Some(AgentLocation {
- buffer: buffer.downgrade(),
- position: anchor.unwrap_or(text::Anchor::MIN),
- }),
- cx,
- );
+ project.set_agent_location(
+ Some(AgentLocation {
+ buffer: buffer.downgrade(),
+ position: anchor.unwrap_or(text::Anchor::MIN),
+ }),
+ cx,
+ );
+ if let Ok(LanguageModelToolResultContent::Text(text)) = &result {
+ let markdown = MarkdownCodeBlock {
+ tag: &input.path,
+ text,
+ }
+ .to_string();
event_stream.update_fields(ToolCallUpdateFields {
- locations: Some(vec![acp::ToolCallLocation {
- path: abs_path,
- line: input.start_line.map(|line| line.saturating_sub(1)),
+ content: Some(vec![acp::ToolCallContent::Content {
+ content: markdown.into(),
}]),
..Default::default()
- });
+ })
}
})?;
@@ -1,19 +1,19 @@
use agent_client_protocol as acp;
use anyhow::Result;
-use futures::{FutureExt as _, future::Shared};
-use gpui::{App, AppContext, Entity, SharedString, Task};
-use project::{Project, terminals::TerminalKind};
+use gpui::{App, Entity, SharedString, Task};
+use project::Project;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::{
path::{Path, PathBuf},
+ rc::Rc,
sync::Arc,
};
-use util::{ResultExt, get_system_shell, markdown::MarkdownInlineCode};
+use util::markdown::MarkdownInlineCode;
-use crate::{AgentTool, ToolCallEventStream};
+use crate::{AgentTool, ThreadEnvironment, ToolCallEventStream};
-const COMMAND_OUTPUT_LIMIT: usize = 16 * 1024;
+const COMMAND_OUTPUT_LIMIT: u64 = 16 * 1024;
/// Executes a shell one-liner and returns the combined output.
///
@@ -36,28 +36,14 @@ pub struct TerminalToolInput {
pub struct TerminalTool {
project: Entity<Project>,
- determine_shell: Shared<Task<String>>,
+ environment: Rc<dyn ThreadEnvironment>,
}
impl TerminalTool {
- pub fn new(project: Entity<Project>, cx: &mut App) -> Self {
- let determine_shell = cx.background_spawn(async move {
- if cfg!(windows) {
- return get_system_shell();
- }
-
- if which::which("bash").is_ok() {
- log::info!("agent selected bash for terminal tool");
- "bash".into()
- } else {
- let shell = get_system_shell();
- log::info!("agent selected {shell} for terminal tool");
- shell
- }
- });
+ pub fn new(project: Entity<Project>, environment: Rc<dyn ThreadEnvironment>) -> Self {
Self {
project,
- determine_shell: determine_shell.shared(),
+ environment,
}
}
}
@@ -66,15 +52,19 @@ impl AgentTool for TerminalTool {
type Input = TerminalToolInput;
type Output = String;
- fn name(&self) -> SharedString {
- "terminal".into()
+ fn name() -> &'static str {
+ "terminal"
}
- fn kind(&self) -> acp::ToolKind {
+ fn kind() -> acp::ToolKind {
acp::ToolKind::Execute
}
- fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
+ fn initial_title(
+ &self,
+ input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> SharedString {
if let Ok(input) = input {
let mut lines = input.command.lines();
let first_line = lines.next().unwrap_or_default();
@@ -102,128 +92,49 @@ impl AgentTool for TerminalTool {
event_stream: ToolCallEventStream,
cx: &mut App,
) -> Task<Result<Self::Output>> {
- let language_registry = self.project.read(cx).languages().clone();
let working_dir = match working_dir(&input, &self.project, cx) {
Ok(dir) => dir,
Err(err) => return Task::ready(Err(err)),
};
- let program = self.determine_shell.clone();
- let command = if cfg!(windows) {
- format!("$null | & {{{}}}", input.command.replace("\"", "'"))
- } else if let Some(cwd) = working_dir
- .as_ref()
- .and_then(|cwd| cwd.as_os_str().to_str())
- {
- // Make sure once we're *inside* the shell, we cd into `cwd`
- format!("(cd {cwd}; {}) </dev/null", input.command)
- } else {
- format!("({}) </dev/null", input.command)
- };
- let args = vec!["-c".into(), command];
-
- let env = match &working_dir {
- Some(dir) => self.project.update(cx, |project, cx| {
- project.directory_environment(dir.as_path().into(), cx)
- }),
- None => Task::ready(None).shared(),
- };
- let env = cx.spawn(async move |_| {
- let mut env = env.await.unwrap_or_default();
- if cfg!(unix) {
- env.insert("PAGER".into(), "cat".into());
- }
- env
- });
-
- let authorize = event_stream.authorize(self.initial_title(Ok(input.clone())), cx);
-
- cx.spawn({
- async move |cx| {
- authorize.await?;
-
- let program = program.await;
- let env = env.await;
- let terminal = self
- .project
- .update(cx, |project, cx| {
- project.create_terminal(
- TerminalKind::Task(task::SpawnInTerminal {
- command: Some(program),
- args,
- cwd: working_dir.clone(),
- env,
- ..Default::default()
- }),
- cx,
- )
- })?
- .await?;
- let acp_terminal = cx.new(|cx| {
- acp_thread::Terminal::new(
- input.command.clone(),
- working_dir.clone(),
- terminal.clone(),
- language_registry,
- cx,
- )
- })?;
- event_stream.update_terminal(acp_terminal.clone());
-
- let exit_status = terminal
- .update(cx, |terminal, cx| terminal.wait_for_completed_task(cx))?
- .await;
- let (content, content_line_count) = terminal.read_with(cx, |terminal, _| {
- (terminal.get_content(), terminal.total_lines())
- })?;
+ let authorize = event_stream.authorize(self.initial_title(Ok(input.clone()), cx), cx);
+ cx.spawn(async move |cx| {
+ authorize.await?;
+
+ let terminal = self
+ .environment
+ .create_terminal(
+ input.command.clone(),
+ working_dir,
+ Some(COMMAND_OUTPUT_LIMIT),
+ cx,
+ )
+ .await?;
- let (processed_content, finished_with_empty_output) = process_content(
- &content,
- &input.command,
- exit_status.map(portable_pty::ExitStatus::from),
- );
+ let terminal_id = terminal.id(cx)?;
+ event_stream.update_fields(acp::ToolCallUpdateFields {
+ content: Some(vec![acp::ToolCallContent::Terminal { terminal_id }]),
+ ..Default::default()
+ });
- acp_terminal
- .update(cx, |terminal, cx| {
- terminal.finish(
- exit_status,
- content.len(),
- processed_content.len(),
- content_line_count,
- finished_with_empty_output,
- cx,
- );
- })
- .log_err();
+ let exit_status = terminal.wait_for_exit(cx)?.await;
+ let output = terminal.current_output(cx)?;
- Ok(processed_content)
- }
+ Ok(process_content(output, &input.command, exit_status))
})
}
}
fn process_content(
- content: &str,
+ output: acp::TerminalOutputResponse,
command: &str,
- exit_status: Option<portable_pty::ExitStatus>,
-) -> (String, bool) {
- let should_truncate = content.len() > COMMAND_OUTPUT_LIMIT;
-
- let content = if should_truncate {
- let mut end_ix = COMMAND_OUTPUT_LIMIT.min(content.len());
- while !content.is_char_boundary(end_ix) {
- end_ix -= 1;
- }
- // Don't truncate mid-line, clear the remainder of the last line
- end_ix = content[..end_ix].rfind('\n').unwrap_or(end_ix);
- &content[..end_ix]
- } else {
- content
- };
- let content = content.trim();
+ exit_status: acp::TerminalExitStatus,
+) -> String {
+ let content = output.output.trim();
let is_empty = content.is_empty();
+
let content = format!("```\n{content}\n```");
- let content = if should_truncate {
+ let content = if output.truncated {
format!(
"Command output too long. The first {} bytes:\n\n{content}",
content.len(),
@@ -232,24 +143,21 @@ fn process_content(
content
};
- let content = match exit_status {
- Some(exit_status) if exit_status.success() => {
+ let content = match exit_status.exit_code {
+ Some(0) => {
if is_empty {
"Command executed successfully.".to_string()
} else {
- content.to_string()
+ content
}
}
- Some(exit_status) => {
+ Some(exit_code) => {
if is_empty {
- format!(
- "Command \"{command}\" failed with exit code {}.",
- exit_status.exit_code()
- )
+ format!("Command \"{command}\" failed with exit code {}.", exit_code)
} else {
format!(
"Command \"{command}\" failed with exit code {}.\n\n{content}",
- exit_status.exit_code()
+ exit_code
)
}
}
@@ -260,7 +168,7 @@ fn process_content(
)
}
};
- (content, is_empty)
+ content
}
fn working_dir(
@@ -303,169 +211,3 @@ fn working_dir(
anyhow::bail!("`cd` directory {cd:?} was not in any of the project's worktrees.");
}
}
-
-#[cfg(test)]
-mod tests {
- use agent_settings::AgentSettings;
- use editor::EditorSettings;
- use fs::RealFs;
- use gpui::{BackgroundExecutor, TestAppContext};
- use pretty_assertions::assert_eq;
- use serde_json::json;
- use settings::{Settings, SettingsStore};
- use terminal::terminal_settings::TerminalSettings;
- use theme::ThemeSettings;
- use util::test::TempTree;
-
- use crate::ThreadEvent;
-
- use super::*;
-
- fn init_test(executor: &BackgroundExecutor, cx: &mut TestAppContext) {
- zlog::init_test();
-
- executor.allow_parking();
- cx.update(|cx| {
- let settings_store = SettingsStore::test(cx);
- cx.set_global(settings_store);
- language::init(cx);
- Project::init_settings(cx);
- ThemeSettings::register(cx);
- TerminalSettings::register(cx);
- EditorSettings::register(cx);
- AgentSettings::register(cx);
- });
- }
-
- #[gpui::test]
- async fn test_interactive_command(executor: BackgroundExecutor, cx: &mut TestAppContext) {
- if cfg!(windows) {
- return;
- }
-
- init_test(&executor, cx);
-
- let fs = Arc::new(RealFs::new(None, executor));
- let tree = TempTree::new(json!({
- "project": {},
- }));
- let project: Entity<Project> =
- Project::test(fs, [tree.path().join("project").as_path()], cx).await;
-
- let input = TerminalToolInput {
- command: "cat".to_owned(),
- cd: tree
- .path()
- .join("project")
- .as_path()
- .to_string_lossy()
- .to_string(),
- };
- let (event_stream_tx, mut event_stream_rx) = ToolCallEventStream::test();
- let result = cx
- .update(|cx| Arc::new(TerminalTool::new(project, cx)).run(input, event_stream_tx, cx));
-
- let auth = event_stream_rx.expect_authorization().await;
- auth.response.send(auth.options[0].id.clone()).unwrap();
- event_stream_rx.expect_terminal().await;
- assert_eq!(result.await.unwrap(), "Command executed successfully.");
- }
-
- #[gpui::test]
- async fn test_working_directory(executor: BackgroundExecutor, cx: &mut TestAppContext) {
- if cfg!(windows) {
- return;
- }
-
- init_test(&executor, cx);
-
- let fs = Arc::new(RealFs::new(None, executor));
- let tree = TempTree::new(json!({
- "project": {},
- "other-project": {},
- }));
- let project: Entity<Project> =
- Project::test(fs, [tree.path().join("project").as_path()], cx).await;
-
- let check = |input, expected, cx: &mut TestAppContext| {
- let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
- let result = cx.update(|cx| {
- Arc::new(TerminalTool::new(project.clone(), cx)).run(input, stream_tx, cx)
- });
- cx.run_until_parked();
- let event = stream_rx.try_next();
- if let Ok(Some(Ok(ThreadEvent::ToolCallAuthorization(auth)))) = event {
- auth.response.send(auth.options[0].id.clone()).unwrap();
- }
-
- cx.spawn(async move |_| {
- let output = result.await;
- assert_eq!(output.ok(), expected);
- })
- };
-
- check(
- TerminalToolInput {
- command: "pwd".into(),
- cd: ".".into(),
- },
- Some(format!(
- "```\n{}\n```",
- tree.path().join("project").display()
- )),
- cx,
- )
- .await;
-
- check(
- TerminalToolInput {
- command: "pwd".into(),
- cd: "other-project".into(),
- },
- None, // other-project is a dir, but *not* a worktree (yet)
- cx,
- )
- .await;
-
- // Absolute path above the worktree root
- check(
- TerminalToolInput {
- command: "pwd".into(),
- cd: tree.path().to_string_lossy().into(),
- },
- None,
- cx,
- )
- .await;
-
- project
- .update(cx, |project, cx| {
- project.create_worktree(tree.path().join("other-project"), true, cx)
- })
- .await
- .unwrap();
-
- check(
- TerminalToolInput {
- command: "pwd".into(),
- cd: "other-project".into(),
- },
- Some(format!(
- "```\n{}\n```",
- tree.path().join("other-project").display()
- )),
- cx,
- )
- .await;
-
- check(
- TerminalToolInput {
- command: "pwd".into(),
- cd: ".".into(),
- },
- None,
- cx,
- )
- .await;
- }
-}
@@ -11,8 +11,7 @@ use crate::{AgentTool, ToolCallEventStream};
/// Use this tool when you need to work through complex problems, develop strategies, or outline approaches before taking action.
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct ThinkingToolInput {
- /// Content to think about. This should be a description of what to think about or
- /// a problem to solve.
+ /// Content to think about. This should be a description of what to think about or a problem to solve.
content: String,
}
@@ -22,15 +21,19 @@ impl AgentTool for ThinkingTool {
type Input = ThinkingToolInput;
type Output = String;
- fn name(&self) -> SharedString {
- "thinking".into()
+ fn name() -> &'static str {
+ "thinking"
}
- fn kind(&self) -> acp::ToolKind {
+ fn kind() -> acp::ToolKind {
acp::ToolKind::Think
}
- fn initial_title(&self, _input: Result<Self::Input, serde_json::Value>) -> SharedString {
+ fn initial_title(
+ &self,
+ _input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> SharedString {
"Thinking".into()
}
@@ -14,7 +14,7 @@ use ui::prelude::*;
use web_search::WebSearchRegistry;
/// Search the web for information using your query.
-/// Use this when you need real-time information, facts, or data that might not be in your training. \
+/// Use this when you need real-time information, facts, or data that might not be in your training.
/// Results will include snippets and links from relevant web pages.
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct WebSearchToolInput {
@@ -40,15 +40,19 @@ impl AgentTool for WebSearchTool {
type Input = WebSearchToolInput;
type Output = WebSearchToolOutput;
- fn name(&self) -> SharedString {
- "web_search".into()
+ fn name() -> &'static str {
+ "web_search"
}
- fn kind(&self) -> acp::ToolKind {
+ fn kind() -> acp::ToolKind {
acp::ToolKind::Fetch
}
- fn initial_title(&self, _input: Result<Self::Input, serde_json::Value>) -> SharedString {
+ fn initial_title(
+ &self,
+ _input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> SharedString {
"Searching the Web".into()
}
@@ -6,7 +6,7 @@ publish.workspace = true
license = "GPL-3.0-or-later"
[features]
-test-support = ["acp_thread/test-support", "gpui/test-support", "project/test-support"]
+test-support = ["acp_thread/test-support", "gpui/test-support", "project/test-support", "dep:env_logger", "client/test-support", "dep:gpui_tokio", "reqwest_client/test-support"]
e2e = []
[lints]
@@ -17,37 +17,36 @@ path = "src/agent_servers.rs"
doctest = false
[dependencies]
+acp_tools.workspace = true
acp_thread.workspace = true
action_log.workspace = true
agent-client-protocol.workspace = true
agent_settings.workspace = true
-agentic-coding-protocol.workspace = true
anyhow.workspace = true
+client = { workspace = true, optional = true }
collections.workspace = true
-context_server.workspace = true
+env_logger = { workspace = true, optional = true }
+fs.workspace = true
futures.workspace = true
gpui.workspace = true
+gpui_tokio = { workspace = true, optional = true }
indoc.workspace = true
-itertools.workspace = true
+language.workspace = true
language_model.workspace = true
language_models.workspace = true
log.workspace = true
-paths.workspace = true
project.workspace = true
-rand.workspace = true
-schemars.workspace = true
+reqwest_client = { workspace = true, optional = true }
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
smol.workspace = true
-strum.workspace = true
+task.workspace = true
tempfile.workspace = true
thiserror.workspace = true
ui.workspace = true
util.workspace = true
-uuid.workspace = true
watch.workspace = true
-which.workspace = true
workspace-hack.workspace = true
[target.'cfg(unix)'.dependencies]
@@ -55,8 +54,12 @@ libc.workspace = true
nix.workspace = true
[dev-dependencies]
+client = { workspace = true, features = ["test-support"] }
env_logger.workspace = true
+fs.workspace = true
language.workspace = true
indoc.workspace = true
acp_thread = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
+gpui_tokio.workspace = true
+reqwest_client = { workspace = true, features = ["test-support"] }
@@ -1,34 +1,501 @@
-use std::{path::Path, rc::Rc};
-
-use crate::AgentServerCommand;
use acp_thread::AgentConnection;
-use anyhow::Result;
-use gpui::AsyncApp;
+use acp_tools::AcpConnectionRegistry;
+use action_log::ActionLog;
+use agent_client_protocol::{self as acp, Agent as _, ErrorCode};
+use anyhow::anyhow;
+use collections::HashMap;
+use futures::AsyncBufReadExt as _;
+use futures::io::BufReader;
+use project::Project;
+use project::agent_server_store::AgentServerCommand;
+use serde::Deserialize;
+
+use std::path::PathBuf;
+use std::{any::Any, cell::RefCell};
+use std::{path::Path, rc::Rc};
use thiserror::Error;
-mod v0;
-mod v1;
+use anyhow::{Context as _, Result};
+use gpui::{App, AppContext as _, AsyncApp, Entity, SharedString, Task, WeakEntity};
+
+use acp_thread::{AcpThread, AuthRequired, LoadError};
#[derive(Debug, Error)]
#[error("Unsupported version")]
pub struct UnsupportedVersion;
+pub struct AcpConnection {
+ server_name: SharedString,
+ connection: Rc<acp::ClientSideConnection>,
+ sessions: Rc<RefCell<HashMap<acp::SessionId, AcpSession>>>,
+ auth_methods: Vec<acp::AuthMethod>,
+ agent_capabilities: acp::AgentCapabilities,
+ root_dir: PathBuf,
+ _io_task: Task<Result<()>>,
+ _wait_task: Task<Result<()>>,
+ _stderr_task: Task<Result<()>>,
+}
+
+pub struct AcpSession {
+ thread: WeakEntity<AcpThread>,
+ suppress_abort_err: bool,
+}
+
pub async fn connect(
- server_name: &'static str,
+ server_name: SharedString,
command: AgentServerCommand,
root_dir: &Path,
+ is_remote: bool,
cx: &mut AsyncApp,
) -> Result<Rc<dyn AgentConnection>> {
- let conn = v1::AcpConnection::stdio(server_name, command.clone(), root_dir, cx).await;
-
- match conn {
- Ok(conn) => Ok(Rc::new(conn) as _),
- Err(err) if err.is::<UnsupportedVersion>() => {
- // Consider re-using initialize response and subprocess when adding another version here
- let conn: Rc<dyn AgentConnection> =
- Rc::new(v0::AcpConnection::stdio(server_name, command, root_dir, cx).await?);
- Ok(conn)
+ let conn = AcpConnection::stdio(server_name, command.clone(), root_dir, is_remote, cx).await?;
+ Ok(Rc::new(conn) as _)
+}
+
+const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::V1;
+
+impl AcpConnection {
+ pub async fn stdio(
+ server_name: SharedString,
+ command: AgentServerCommand,
+ root_dir: &Path,
+ is_remote: bool,
+ cx: &mut AsyncApp,
+ ) -> Result<Self> {
+ let mut child = util::command::new_smol_command(command.path);
+ child
+ .args(command.args.iter().map(|arg| arg.as_str()))
+ .envs(command.env.iter().flatten())
+ .stdin(std::process::Stdio::piped())
+ .stdout(std::process::Stdio::piped())
+ .stderr(std::process::Stdio::piped())
+ .kill_on_drop(true);
+ if !is_remote {
+ child.current_dir(root_dir);
+ }
+ let mut child = child.spawn()?;
+
+ let stdout = child.stdout.take().context("Failed to take stdout")?;
+ let stdin = child.stdin.take().context("Failed to take stdin")?;
+ let stderr = child.stderr.take().context("Failed to take stderr")?;
+ log::trace!("Spawned (pid: {})", child.id());
+
+ let sessions = Rc::new(RefCell::new(HashMap::default()));
+
+ let client = ClientDelegate {
+ sessions: sessions.clone(),
+ cx: cx.clone(),
+ };
+ let (connection, io_task) = acp::ClientSideConnection::new(client, stdin, stdout, {
+ let foreground_executor = cx.foreground_executor().clone();
+ move |fut| {
+ foreground_executor.spawn(fut).detach();
+ }
+ });
+
+ let io_task = cx.background_spawn(io_task);
+
+ let stderr_task = cx.background_spawn(async move {
+ let mut stderr = BufReader::new(stderr);
+ let mut line = String::new();
+ while let Ok(n) = stderr.read_line(&mut line).await
+ && n > 0
+ {
+ log::warn!("agent stderr: {}", &line);
+ line.clear();
+ }
+ Ok(())
+ });
+
+ let wait_task = cx.spawn({
+ let sessions = sessions.clone();
+ async move |cx| {
+ let status = child.status().await?;
+
+ for session in sessions.borrow().values() {
+ session
+ .thread
+ .update(cx, |thread, cx| {
+ thread.emit_load_error(LoadError::Exited { status }, cx)
+ })
+ .ok();
+ }
+
+ anyhow::Ok(())
+ }
+ });
+
+ let connection = Rc::new(connection);
+
+ cx.update(|cx| {
+ AcpConnectionRegistry::default_global(cx).update(cx, |registry, cx| {
+ registry.set_active_connection(server_name.clone(), &connection, cx)
+ });
+ })?;
+
+ let response = connection
+ .initialize(acp::InitializeRequest {
+ protocol_version: acp::VERSION,
+ client_capabilities: acp::ClientCapabilities {
+ fs: acp::FileSystemCapability {
+ read_text_file: true,
+ write_text_file: true,
+ },
+ terminal: true,
+ },
+ })
+ .await?;
+
+ if response.protocol_version < MINIMUM_SUPPORTED_VERSION {
+ return Err(UnsupportedVersion.into());
+ }
+
+ Ok(Self {
+ auth_methods: response.auth_methods,
+ root_dir: root_dir.to_owned(),
+ connection,
+ server_name,
+ sessions,
+ agent_capabilities: response.agent_capabilities,
+ _io_task: io_task,
+ _wait_task: wait_task,
+ _stderr_task: stderr_task,
+ })
+ }
+
+ pub fn prompt_capabilities(&self) -> &acp::PromptCapabilities {
+ &self.agent_capabilities.prompt_capabilities
+ }
+
+ pub fn root_dir(&self) -> &Path {
+ &self.root_dir
+ }
+}
+
+impl AgentConnection for AcpConnection {
+ fn new_thread(
+ self: Rc<Self>,
+ project: Entity<Project>,
+ cwd: &Path,
+ cx: &mut App,
+ ) -> Task<Result<Entity<AcpThread>>> {
+ let conn = self.connection.clone();
+ let sessions = self.sessions.clone();
+ let cwd = cwd.to_path_buf();
+ let context_server_store = project.read(cx).context_server_store().read(cx);
+ let mcp_servers = if project.read(cx).is_local() {
+ context_server_store
+ .configured_server_ids()
+ .iter()
+ .filter_map(|id| {
+ let configuration = context_server_store.configuration_for_server(id)?;
+ let command = configuration.command();
+ Some(acp::McpServer {
+ name: id.0.to_string(),
+ command: command.path.clone(),
+ args: command.args.clone(),
+ env: if let Some(env) = command.env.as_ref() {
+ env.iter()
+ .map(|(name, value)| acp::EnvVariable {
+ name: name.clone(),
+ value: value.clone(),
+ })
+ .collect()
+ } else {
+ vec![]
+ },
+ })
+ })
+ .collect()
+ } else {
+ // In SSH projects, the external agent is running on the remote
+ // machine, and currently we only run MCP servers on the local
+ // machine. So don't pass any MCP servers to the agent in that case.
+ Vec::new()
+ };
+
+ cx.spawn(async move |cx| {
+ let response = conn
+ .new_session(acp::NewSessionRequest { mcp_servers, cwd })
+ .await
+ .map_err(|err| {
+ if err.code == acp::ErrorCode::AUTH_REQUIRED.code {
+ let mut error = AuthRequired::new();
+
+ if err.message != acp::ErrorCode::AUTH_REQUIRED.message {
+ error = error.with_description(err.message);
+ }
+
+ anyhow!(error)
+ } else {
+ anyhow!(err)
+ }
+ })?;
+
+ let session_id = response.session_id;
+ let action_log = cx.new(|_| ActionLog::new(project.clone()))?;
+ let thread = cx.new(|cx| {
+ AcpThread::new(
+ self.server_name.clone(),
+ self.clone(),
+ project,
+ action_log,
+ session_id.clone(),
+ // ACP doesn't currently support per-session prompt capabilities or changing capabilities dynamically.
+ watch::Receiver::constant(self.agent_capabilities.prompt_capabilities),
+ cx,
+ )
+ })?;
+
+ let session = AcpSession {
+ thread: thread.downgrade(),
+ suppress_abort_err: false,
+ };
+ sessions.borrow_mut().insert(session_id, session);
+
+ Ok(thread)
+ })
+ }
+
+ fn auth_methods(&self) -> &[acp::AuthMethod] {
+ &self.auth_methods
+ }
+
+ fn authenticate(&self, method_id: acp::AuthMethodId, cx: &mut App) -> Task<Result<()>> {
+ let conn = self.connection.clone();
+ cx.foreground_executor().spawn(async move {
+ let result = conn
+ .authenticate(acp::AuthenticateRequest {
+ method_id: method_id.clone(),
+ })
+ .await?;
+
+ Ok(result)
+ })
+ }
+
+ fn prompt(
+ &self,
+ _id: Option<acp_thread::UserMessageId>,
+ params: acp::PromptRequest,
+ cx: &mut App,
+ ) -> Task<Result<acp::PromptResponse>> {
+ let conn = self.connection.clone();
+ let sessions = self.sessions.clone();
+ let session_id = params.session_id.clone();
+ cx.foreground_executor().spawn(async move {
+ let result = conn.prompt(params).await;
+
+ let mut suppress_abort_err = false;
+
+ if let Some(session) = sessions.borrow_mut().get_mut(&session_id) {
+ suppress_abort_err = session.suppress_abort_err;
+ session.suppress_abort_err = false;
+ }
+
+ match result {
+ Ok(response) => Ok(response),
+ Err(err) => {
+ if err.code != ErrorCode::INTERNAL_ERROR.code {
+ anyhow::bail!(err)
+ }
+
+ let Some(data) = &err.data else {
+ anyhow::bail!(err)
+ };
+
+ // Temporary workaround until the following PR is generally available:
+ // https://github.com/google-gemini/gemini-cli/pull/6656
+
+ #[derive(Deserialize)]
+ #[serde(deny_unknown_fields)]
+ struct ErrorDetails {
+ details: Box<str>,
+ }
+
+ match serde_json::from_value(data.clone()) {
+ Ok(ErrorDetails { details }) => {
+ if suppress_abort_err
+ && (details.contains("This operation was aborted")
+ || details.contains("The user aborted a request"))
+ {
+ Ok(acp::PromptResponse {
+ stop_reason: acp::StopReason::Cancelled,
+ })
+ } else {
+ Err(anyhow!(details))
+ }
+ }
+ Err(_) => Err(anyhow!(err)),
+ }
+ }
+ }
+ })
+ }
+
+ fn cancel(&self, session_id: &acp::SessionId, cx: &mut App) {
+ if let Some(session) = self.sessions.borrow_mut().get_mut(session_id) {
+ session.suppress_abort_err = true;
}
- Err(err) => Err(err),
+ let conn = self.connection.clone();
+ let params = acp::CancelNotification {
+ session_id: session_id.clone(),
+ };
+ cx.foreground_executor()
+ .spawn(async move { conn.cancel(params).await })
+ .detach();
+ }
+
+ fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
+ self
+ }
+}
+
+struct ClientDelegate {
+ sessions: Rc<RefCell<HashMap<acp::SessionId, AcpSession>>>,
+ cx: AsyncApp,
+}
+
+impl acp::Client for ClientDelegate {
+ async fn request_permission(
+ &self,
+ arguments: acp::RequestPermissionRequest,
+ ) -> Result<acp::RequestPermissionResponse, acp::Error> {
+ let cx = &mut self.cx.clone();
+
+ let task = self
+ .session_thread(&arguments.session_id)?
+ .update(cx, |thread, cx| {
+ thread.request_tool_call_authorization(arguments.tool_call, arguments.options, cx)
+ })??;
+
+ let outcome = task.await;
+
+ Ok(acp::RequestPermissionResponse { outcome })
+ }
+
+ async fn write_text_file(
+ &self,
+ arguments: acp::WriteTextFileRequest,
+ ) -> Result<(), acp::Error> {
+ let cx = &mut self.cx.clone();
+ let task = self
+ .session_thread(&arguments.session_id)?
+ .update(cx, |thread, cx| {
+ thread.write_text_file(arguments.path, arguments.content, cx)
+ })?;
+
+ task.await?;
+
+ Ok(())
+ }
+
+ async fn read_text_file(
+ &self,
+ arguments: acp::ReadTextFileRequest,
+ ) -> Result<acp::ReadTextFileResponse, acp::Error> {
+ let task = self.session_thread(&arguments.session_id)?.update(
+ &mut self.cx.clone(),
+ |thread, cx| {
+ thread.read_text_file(arguments.path, arguments.line, arguments.limit, false, cx)
+ },
+ )?;
+
+ let content = task.await?;
+
+ Ok(acp::ReadTextFileResponse { content })
+ }
+
+ async fn session_notification(
+ &self,
+ notification: acp::SessionNotification,
+ ) -> Result<(), acp::Error> {
+ self.session_thread(¬ification.session_id)?
+ .update(&mut self.cx.clone(), |thread, cx| {
+ thread.handle_session_update(notification.update, cx)
+ })??;
+
+ Ok(())
+ }
+
+ async fn create_terminal(
+ &self,
+ args: acp::CreateTerminalRequest,
+ ) -> Result<acp::CreateTerminalResponse, acp::Error> {
+ let terminal = self
+ .session_thread(&args.session_id)?
+ .update(&mut self.cx.clone(), |thread, cx| {
+ thread.create_terminal(
+ args.command,
+ args.args,
+ args.env,
+ args.cwd,
+ args.output_byte_limit,
+ cx,
+ )
+ })?
+ .await?;
+ Ok(
+ terminal.read_with(&self.cx, |terminal, _| acp::CreateTerminalResponse {
+ terminal_id: terminal.id().clone(),
+ })?,
+ )
+ }
+
+ async fn kill_terminal(&self, args: acp::KillTerminalRequest) -> Result<(), acp::Error> {
+ self.session_thread(&args.session_id)?
+ .update(&mut self.cx.clone(), |thread, cx| {
+ thread.kill_terminal(args.terminal_id, cx)
+ })??;
+
+ Ok(())
+ }
+
+ async fn release_terminal(&self, args: acp::ReleaseTerminalRequest) -> Result<(), acp::Error> {
+ self.session_thread(&args.session_id)?
+ .update(&mut self.cx.clone(), |thread, cx| {
+ thread.release_terminal(args.terminal_id, cx)
+ })??;
+
+ Ok(())
+ }
+
+ async fn terminal_output(
+ &self,
+ args: acp::TerminalOutputRequest,
+ ) -> Result<acp::TerminalOutputResponse, acp::Error> {
+ self.session_thread(&args.session_id)?
+ .read_with(&mut self.cx.clone(), |thread, cx| {
+ let out = thread
+ .terminal(args.terminal_id)?
+ .read(cx)
+ .current_output(cx);
+
+ Ok(out)
+ })?
+ }
+
+ async fn wait_for_terminal_exit(
+ &self,
+ args: acp::WaitForTerminalExitRequest,
+ ) -> Result<acp::WaitForTerminalExitResponse, acp::Error> {
+ let exit_status = self
+ .session_thread(&args.session_id)?
+ .update(&mut self.cx.clone(), |thread, cx| {
+ anyhow::Ok(thread.terminal(args.terminal_id)?.read(cx).wait_for_exit())
+ })??
+ .await;
+
+ Ok(acp::WaitForTerminalExitResponse { exit_status })
+ }
+}
+
+impl ClientDelegate {
+ fn session_thread(&self, session_id: &acp::SessionId) -> Result<WeakEntity<AcpThread>> {
+ let sessions = self.sessions.borrow();
+ sessions
+ .get(session_id)
+ .context("Failed to get session")
+ .map(|session| session.thread.clone())
}
}
@@ -1,516 +0,0 @@
-// Translates old acp agents into the new schema
-use action_log::ActionLog;
-use agent_client_protocol as acp;
-use agentic_coding_protocol::{self as acp_old, AgentRequest as _};
-use anyhow::{Context as _, Result, anyhow};
-use futures::channel::oneshot;
-use gpui::{AppContext as _, AsyncApp, Entity, Task, WeakEntity};
-use project::Project;
-use std::{any::Any, cell::RefCell, path::Path, rc::Rc};
-use ui::App;
-use util::ResultExt as _;
-
-use crate::AgentServerCommand;
-use acp_thread::{AcpThread, AgentConnection, AuthRequired};
-
-#[derive(Clone)]
-struct OldAcpClientDelegate {
- thread: Rc<RefCell<WeakEntity<AcpThread>>>,
- cx: AsyncApp,
- next_tool_call_id: Rc<RefCell<u64>>,
- // sent_buffer_versions: HashMap<Entity<Buffer>, HashMap<u64, BufferSnapshot>>,
-}
-
-impl OldAcpClientDelegate {
- fn new(thread: Rc<RefCell<WeakEntity<AcpThread>>>, cx: AsyncApp) -> Self {
- Self {
- thread,
- cx,
- next_tool_call_id: Rc::new(RefCell::new(0)),
- }
- }
-}
-
-impl acp_old::Client for OldAcpClientDelegate {
- async fn stream_assistant_message_chunk(
- &self,
- params: acp_old::StreamAssistantMessageChunkParams,
- ) -> Result<(), acp_old::Error> {
- let cx = &mut self.cx.clone();
-
- cx.update(|cx| {
- self.thread
- .borrow()
- .update(cx, |thread, cx| match params.chunk {
- acp_old::AssistantMessageChunk::Text { text } => {
- thread.push_assistant_content_block(text.into(), false, cx)
- }
- acp_old::AssistantMessageChunk::Thought { thought } => {
- thread.push_assistant_content_block(thought.into(), true, cx)
- }
- })
- .log_err();
- })?;
-
- Ok(())
- }
-
- async fn request_tool_call_confirmation(
- &self,
- request: acp_old::RequestToolCallConfirmationParams,
- ) -> Result<acp_old::RequestToolCallConfirmationResponse, acp_old::Error> {
- let cx = &mut self.cx.clone();
-
- let old_acp_id = *self.next_tool_call_id.borrow() + 1;
- self.next_tool_call_id.replace(old_acp_id);
-
- let tool_call = into_new_tool_call(
- acp::ToolCallId(old_acp_id.to_string().into()),
- request.tool_call,
- );
-
- let mut options = match request.confirmation {
- acp_old::ToolCallConfirmation::Edit { .. } => vec![(
- acp_old::ToolCallConfirmationOutcome::AlwaysAllow,
- acp::PermissionOptionKind::AllowAlways,
- "Always Allow Edits".to_string(),
- )],
- acp_old::ToolCallConfirmation::Execute { root_command, .. } => vec![(
- acp_old::ToolCallConfirmationOutcome::AlwaysAllow,
- acp::PermissionOptionKind::AllowAlways,
- format!("Always Allow {}", root_command),
- )],
- acp_old::ToolCallConfirmation::Mcp {
- server_name,
- tool_name,
- ..
- } => vec![
- (
- acp_old::ToolCallConfirmationOutcome::AlwaysAllowMcpServer,
- acp::PermissionOptionKind::AllowAlways,
- format!("Always Allow {}", server_name),
- ),
- (
- acp_old::ToolCallConfirmationOutcome::AlwaysAllowTool,
- acp::PermissionOptionKind::AllowAlways,
- format!("Always Allow {}", tool_name),
- ),
- ],
- acp_old::ToolCallConfirmation::Fetch { .. } => vec![(
- acp_old::ToolCallConfirmationOutcome::AlwaysAllow,
- acp::PermissionOptionKind::AllowAlways,
- "Always Allow".to_string(),
- )],
- acp_old::ToolCallConfirmation::Other { .. } => vec![(
- acp_old::ToolCallConfirmationOutcome::AlwaysAllow,
- acp::PermissionOptionKind::AllowAlways,
- "Always Allow".to_string(),
- )],
- };
-
- options.extend([
- (
- acp_old::ToolCallConfirmationOutcome::Allow,
- acp::PermissionOptionKind::AllowOnce,
- "Allow".to_string(),
- ),
- (
- acp_old::ToolCallConfirmationOutcome::Reject,
- acp::PermissionOptionKind::RejectOnce,
- "Reject".to_string(),
- ),
- ]);
-
- let mut outcomes = Vec::with_capacity(options.len());
- let mut acp_options = Vec::with_capacity(options.len());
-
- for (index, (outcome, kind, label)) in options.into_iter().enumerate() {
- outcomes.push(outcome);
- acp_options.push(acp::PermissionOption {
- id: acp::PermissionOptionId(index.to_string().into()),
- name: label,
- kind,
- })
- }
-
- let response = cx
- .update(|cx| {
- self.thread.borrow().update(cx, |thread, cx| {
- thread.request_tool_call_authorization(tool_call.into(), acp_options, cx)
- })
- })??
- .context("Failed to update thread")?
- .await;
-
- let outcome = match response {
- Ok(option_id) => outcomes[option_id.0.parse::<usize>().unwrap_or(0)],
- Err(oneshot::Canceled) => acp_old::ToolCallConfirmationOutcome::Cancel,
- };
-
- Ok(acp_old::RequestToolCallConfirmationResponse {
- id: acp_old::ToolCallId(old_acp_id),
- outcome: outcome,
- })
- }
-
- async fn push_tool_call(
- &self,
- request: acp_old::PushToolCallParams,
- ) -> Result<acp_old::PushToolCallResponse, acp_old::Error> {
- let cx = &mut self.cx.clone();
-
- let old_acp_id = *self.next_tool_call_id.borrow() + 1;
- self.next_tool_call_id.replace(old_acp_id);
-
- cx.update(|cx| {
- self.thread.borrow().update(cx, |thread, cx| {
- thread.upsert_tool_call(
- into_new_tool_call(acp::ToolCallId(old_acp_id.to_string().into()), request),
- cx,
- )
- })
- })??
- .context("Failed to update thread")?;
-
- Ok(acp_old::PushToolCallResponse {
- id: acp_old::ToolCallId(old_acp_id),
- })
- }
-
- async fn update_tool_call(
- &self,
- request: acp_old::UpdateToolCallParams,
- ) -> Result<(), acp_old::Error> {
- let cx = &mut self.cx.clone();
-
- cx.update(|cx| {
- self.thread.borrow().update(cx, |thread, cx| {
- thread.update_tool_call(
- acp::ToolCallUpdate {
- id: acp::ToolCallId(request.tool_call_id.0.to_string().into()),
- fields: acp::ToolCallUpdateFields {
- status: Some(into_new_tool_call_status(request.status)),
- content: Some(
- request
- .content
- .into_iter()
- .map(into_new_tool_call_content)
- .collect::<Vec<_>>(),
- ),
- ..Default::default()
- },
- },
- cx,
- )
- })
- })?
- .context("Failed to update thread")??;
-
- Ok(())
- }
-
- async fn update_plan(&self, request: acp_old::UpdatePlanParams) -> Result<(), acp_old::Error> {
- let cx = &mut self.cx.clone();
-
- cx.update(|cx| {
- self.thread.borrow().update(cx, |thread, cx| {
- thread.update_plan(
- acp::Plan {
- entries: request
- .entries
- .into_iter()
- .map(into_new_plan_entry)
- .collect(),
- },
- cx,
- )
- })
- })?
- .context("Failed to update thread")?;
-
- Ok(())
- }
-
- async fn read_text_file(
- &self,
- acp_old::ReadTextFileParams { path, line, limit }: acp_old::ReadTextFileParams,
- ) -> Result<acp_old::ReadTextFileResponse, acp_old::Error> {
- let content = self
- .cx
- .update(|cx| {
- self.thread.borrow().update(cx, |thread, cx| {
- thread.read_text_file(path, line, limit, false, cx)
- })
- })?
- .context("Failed to update thread")?
- .await?;
- Ok(acp_old::ReadTextFileResponse { content })
- }
-
- async fn write_text_file(
- &self,
- acp_old::WriteTextFileParams { path, content }: acp_old::WriteTextFileParams,
- ) -> Result<(), acp_old::Error> {
- self.cx
- .update(|cx| {
- self.thread
- .borrow()
- .update(cx, |thread, cx| thread.write_text_file(path, content, cx))
- })?
- .context("Failed to update thread")?
- .await?;
-
- Ok(())
- }
-}
-
-fn into_new_tool_call(id: acp::ToolCallId, request: acp_old::PushToolCallParams) -> acp::ToolCall {
- acp::ToolCall {
- id: id,
- title: request.label,
- kind: acp_kind_from_old_icon(request.icon),
- status: acp::ToolCallStatus::InProgress,
- content: request
- .content
- .into_iter()
- .map(into_new_tool_call_content)
- .collect(),
- locations: request
- .locations
- .into_iter()
- .map(into_new_tool_call_location)
- .collect(),
- raw_input: None,
- raw_output: None,
- }
-}
-
-fn acp_kind_from_old_icon(icon: acp_old::Icon) -> acp::ToolKind {
- match icon {
- acp_old::Icon::FileSearch => acp::ToolKind::Search,
- acp_old::Icon::Folder => acp::ToolKind::Search,
- acp_old::Icon::Globe => acp::ToolKind::Search,
- acp_old::Icon::Hammer => acp::ToolKind::Other,
- acp_old::Icon::LightBulb => acp::ToolKind::Think,
- acp_old::Icon::Pencil => acp::ToolKind::Edit,
- acp_old::Icon::Regex => acp::ToolKind::Search,
- acp_old::Icon::Terminal => acp::ToolKind::Execute,
- }
-}
-
-fn into_new_tool_call_status(status: acp_old::ToolCallStatus) -> acp::ToolCallStatus {
- match status {
- acp_old::ToolCallStatus::Running => acp::ToolCallStatus::InProgress,
- acp_old::ToolCallStatus::Finished => acp::ToolCallStatus::Completed,
- acp_old::ToolCallStatus::Error => acp::ToolCallStatus::Failed,
- }
-}
-
-fn into_new_tool_call_content(content: acp_old::ToolCallContent) -> acp::ToolCallContent {
- match content {
- acp_old::ToolCallContent::Markdown { markdown } => markdown.into(),
- acp_old::ToolCallContent::Diff { diff } => acp::ToolCallContent::Diff {
- diff: into_new_diff(diff),
- },
- }
-}
-
-fn into_new_diff(diff: acp_old::Diff) -> acp::Diff {
- acp::Diff {
- path: diff.path,
- old_text: diff.old_text,
- new_text: diff.new_text,
- }
-}
-
-fn into_new_tool_call_location(location: acp_old::ToolCallLocation) -> acp::ToolCallLocation {
- acp::ToolCallLocation {
- path: location.path,
- line: location.line,
- }
-}
-
-fn into_new_plan_entry(entry: acp_old::PlanEntry) -> acp::PlanEntry {
- acp::PlanEntry {
- content: entry.content,
- priority: into_new_plan_priority(entry.priority),
- status: into_new_plan_status(entry.status),
- }
-}
-
-fn into_new_plan_priority(priority: acp_old::PlanEntryPriority) -> acp::PlanEntryPriority {
- match priority {
- acp_old::PlanEntryPriority::Low => acp::PlanEntryPriority::Low,
- acp_old::PlanEntryPriority::Medium => acp::PlanEntryPriority::Medium,
- acp_old::PlanEntryPriority::High => acp::PlanEntryPriority::High,
- }
-}
-
-fn into_new_plan_status(status: acp_old::PlanEntryStatus) -> acp::PlanEntryStatus {
- match status {
- acp_old::PlanEntryStatus::Pending => acp::PlanEntryStatus::Pending,
- acp_old::PlanEntryStatus::InProgress => acp::PlanEntryStatus::InProgress,
- acp_old::PlanEntryStatus::Completed => acp::PlanEntryStatus::Completed,
- }
-}
-
-pub struct AcpConnection {
- pub name: &'static str,
- pub connection: acp_old::AgentConnection,
- pub _child_status: Task<Result<()>>,
- pub current_thread: Rc<RefCell<WeakEntity<AcpThread>>>,
-}
-
-impl AcpConnection {
- pub fn stdio(
- name: &'static str,
- command: AgentServerCommand,
- root_dir: &Path,
- cx: &mut AsyncApp,
- ) -> Task<Result<Self>> {
- let root_dir = root_dir.to_path_buf();
-
- cx.spawn(async move |cx| {
- let mut child = util::command::new_smol_command(&command.path)
- .args(command.args.iter())
- .current_dir(root_dir)
- .stdin(std::process::Stdio::piped())
- .stdout(std::process::Stdio::piped())
- .stderr(std::process::Stdio::inherit())
- .kill_on_drop(true)
- .spawn()?;
-
- let stdin = child.stdin.take().unwrap();
- let stdout = child.stdout.take().unwrap();
- log::trace!("Spawned (pid: {})", child.id());
-
- let foreground_executor = cx.foreground_executor().clone();
-
- let thread_rc = Rc::new(RefCell::new(WeakEntity::new_invalid()));
-
- let (connection, io_fut) = acp_old::AgentConnection::connect_to_agent(
- OldAcpClientDelegate::new(thread_rc.clone(), cx.clone()),
- stdin,
- stdout,
- move |fut| foreground_executor.spawn(fut).detach(),
- );
-
- let io_task = cx.background_spawn(async move {
- io_fut.await.log_err();
- });
-
- let child_status = cx.background_spawn(async move {
- let result = match child.status().await {
- Err(e) => Err(anyhow!(e)),
- Ok(result) if result.success() => Ok(()),
- Ok(result) => Err(anyhow!(result)),
- };
- drop(io_task);
- result
- });
-
- Ok(Self {
- name,
- connection,
- _child_status: child_status,
- current_thread: thread_rc,
- })
- })
- }
-}
-
-impl AgentConnection for AcpConnection {
- fn new_thread(
- self: Rc<Self>,
- project: Entity<Project>,
- _cwd: &Path,
- cx: &mut App,
- ) -> Task<Result<Entity<AcpThread>>> {
- let task = self.connection.request_any(
- acp_old::InitializeParams {
- protocol_version: acp_old::ProtocolVersion::latest(),
- }
- .into_any(),
- );
- let current_thread = self.current_thread.clone();
- cx.spawn(async move |cx| {
- let result = task.await?;
- let result = acp_old::InitializeParams::response_from_any(result)?;
-
- if !result.is_authenticated {
- anyhow::bail!(AuthRequired::new())
- }
-
- cx.update(|cx| {
- let thread = cx.new(|cx| {
- let session_id = acp::SessionId("acp-old-no-id".into());
- let action_log = cx.new(|_| ActionLog::new(project.clone()));
- AcpThread::new(self.name, self.clone(), project, action_log, session_id)
- });
- current_thread.replace(thread.downgrade());
- thread
- })
- })
- }
-
- fn auth_methods(&self) -> &[acp::AuthMethod] {
- &[]
- }
-
- fn authenticate(&self, _method_id: acp::AuthMethodId, cx: &mut App) -> Task<Result<()>> {
- let task = self
- .connection
- .request_any(acp_old::AuthenticateParams.into_any());
- cx.foreground_executor().spawn(async move {
- task.await?;
- Ok(())
- })
- }
-
- fn prompt(
- &self,
- _id: Option<acp_thread::UserMessageId>,
- params: acp::PromptRequest,
- cx: &mut App,
- ) -> Task<Result<acp::PromptResponse>> {
- let chunks = params
- .prompt
- .into_iter()
- .filter_map(|block| match block {
- acp::ContentBlock::Text(text) => {
- Some(acp_old::UserMessageChunk::Text { text: text.text })
- }
- acp::ContentBlock::ResourceLink(link) => Some(acp_old::UserMessageChunk::Path {
- path: link.uri.into(),
- }),
- _ => None,
- })
- .collect();
-
- let task = self
- .connection
- .request_any(acp_old::SendUserMessageParams { chunks }.into_any());
- cx.foreground_executor().spawn(async move {
- task.await?;
- anyhow::Ok(acp::PromptResponse {
- stop_reason: acp::StopReason::EndTurn,
- })
- })
- }
-
- fn cancel(&self, _session_id: &acp::SessionId, cx: &mut App) {
- let task = self
- .connection
- .request_any(acp_old::CancelSendMessageParams.into_any());
- cx.foreground_executor()
- .spawn(async move {
- task.await?;
- anyhow::Ok(())
- })
- .detach_and_log_err(cx)
- }
-
- fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
- self
- }
-}
@@ -1,309 +0,0 @@
-use action_log::ActionLog;
-use agent_client_protocol::{self as acp, Agent as _};
-use anyhow::anyhow;
-use collections::HashMap;
-use futures::AsyncBufReadExt as _;
-use futures::channel::oneshot;
-use futures::io::BufReader;
-use project::Project;
-use std::path::Path;
-use std::rc::Rc;
-use std::{any::Any, cell::RefCell};
-
-use anyhow::{Context as _, Result};
-use gpui::{App, AppContext as _, AsyncApp, Entity, Task, WeakEntity};
-
-use crate::{AgentServerCommand, acp::UnsupportedVersion};
-use acp_thread::{AcpThread, AgentConnection, AuthRequired};
-
-pub struct AcpConnection {
- server_name: &'static str,
- connection: Rc<acp::ClientSideConnection>,
- sessions: Rc<RefCell<HashMap<acp::SessionId, AcpSession>>>,
- auth_methods: Vec<acp::AuthMethod>,
- _io_task: Task<Result<()>>,
-}
-
-pub struct AcpSession {
- thread: WeakEntity<AcpThread>,
-}
-
-const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::V1;
-
-impl AcpConnection {
- pub async fn stdio(
- server_name: &'static str,
- command: AgentServerCommand,
- root_dir: &Path,
- cx: &mut AsyncApp,
- ) -> Result<Self> {
- let mut child = util::command::new_smol_command(&command.path)
- .args(command.args.iter().map(|arg| arg.as_str()))
- .envs(command.env.iter().flatten())
- .current_dir(root_dir)
- .stdin(std::process::Stdio::piped())
- .stdout(std::process::Stdio::piped())
- .stderr(std::process::Stdio::piped())
- .kill_on_drop(true)
- .spawn()?;
-
- let stdout = child.stdout.take().context("Failed to take stdout")?;
- let stdin = child.stdin.take().context("Failed to take stdin")?;
- let stderr = child.stderr.take().context("Failed to take stderr")?;
- log::trace!("Spawned (pid: {})", child.id());
-
- let sessions = Rc::new(RefCell::new(HashMap::default()));
-
- let client = ClientDelegate {
- sessions: sessions.clone(),
- cx: cx.clone(),
- };
- let (connection, io_task) = acp::ClientSideConnection::new(client, stdin, stdout, {
- let foreground_executor = cx.foreground_executor().clone();
- move |fut| {
- foreground_executor.spawn(fut).detach();
- }
- });
-
- let io_task = cx.background_spawn(io_task);
-
- cx.background_spawn(async move {
- let mut stderr = BufReader::new(stderr);
- let mut line = String::new();
- while let Ok(n) = stderr.read_line(&mut line).await
- && n > 0
- {
- log::warn!("agent stderr: {}", &line);
- line.clear();
- }
- })
- .detach();
-
- cx.spawn({
- let sessions = sessions.clone();
- async move |cx| {
- let status = child.status().await?;
-
- for session in sessions.borrow().values() {
- session
- .thread
- .update(cx, |thread, cx| thread.emit_server_exited(status, cx))
- .ok();
- }
-
- anyhow::Ok(())
- }
- })
- .detach();
-
- let response = connection
- .initialize(acp::InitializeRequest {
- protocol_version: acp::VERSION,
- client_capabilities: acp::ClientCapabilities {
- fs: acp::FileSystemCapability {
- read_text_file: true,
- write_text_file: true,
- },
- },
- })
- .await?;
-
- if response.protocol_version < MINIMUM_SUPPORTED_VERSION {
- return Err(UnsupportedVersion.into());
- }
-
- Ok(Self {
- auth_methods: response.auth_methods,
- connection: connection.into(),
- server_name,
- sessions,
- _io_task: io_task,
- })
- }
-}
-
-impl AgentConnection for AcpConnection {
- fn new_thread(
- self: Rc<Self>,
- project: Entity<Project>,
- cwd: &Path,
- cx: &mut App,
- ) -> Task<Result<Entity<AcpThread>>> {
- let conn = self.connection.clone();
- let sessions = self.sessions.clone();
- let cwd = cwd.to_path_buf();
- cx.spawn(async move |cx| {
- let response = conn
- .new_session(acp::NewSessionRequest {
- mcp_servers: vec![],
- cwd,
- })
- .await
- .map_err(|err| {
- if err.code == acp::ErrorCode::AUTH_REQUIRED.code {
- let mut error = AuthRequired::new();
-
- if err.message != acp::ErrorCode::AUTH_REQUIRED.message {
- error = error.with_description(err.message);
- }
-
- anyhow!(error)
- } else {
- anyhow!(err)
- }
- })?;
-
- let session_id = response.session_id;
- let action_log = cx.new(|_| ActionLog::new(project.clone()))?;
- let thread = cx.new(|_cx| {
- AcpThread::new(
- self.server_name,
- self.clone(),
- project,
- action_log,
- session_id.clone(),
- )
- })?;
-
- let session = AcpSession {
- thread: thread.downgrade(),
- };
- sessions.borrow_mut().insert(session_id, session);
-
- Ok(thread)
- })
- }
-
- fn auth_methods(&self) -> &[acp::AuthMethod] {
- &self.auth_methods
- }
-
- fn authenticate(&self, method_id: acp::AuthMethodId, cx: &mut App) -> Task<Result<()>> {
- let conn = self.connection.clone();
- cx.foreground_executor().spawn(async move {
- let result = conn
- .authenticate(acp::AuthenticateRequest {
- method_id: method_id.clone(),
- })
- .await?;
-
- Ok(result)
- })
- }
-
- fn prompt(
- &self,
- _id: Option<acp_thread::UserMessageId>,
- params: acp::PromptRequest,
- cx: &mut App,
- ) -> Task<Result<acp::PromptResponse>> {
- let conn = self.connection.clone();
- cx.foreground_executor().spawn(async move {
- let response = conn.prompt(params).await?;
- Ok(response)
- })
- }
-
- fn cancel(&self, session_id: &acp::SessionId, cx: &mut App) {
- let conn = self.connection.clone();
- let params = acp::CancelNotification {
- session_id: session_id.clone(),
- };
- cx.foreground_executor()
- .spawn(async move { conn.cancel(params).await })
- .detach();
- }
-
- fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
- self
- }
-}
-
-struct ClientDelegate {
- sessions: Rc<RefCell<HashMap<acp::SessionId, AcpSession>>>,
- cx: AsyncApp,
-}
-
-impl acp::Client for ClientDelegate {
- async fn request_permission(
- &self,
- arguments: acp::RequestPermissionRequest,
- ) -> Result<acp::RequestPermissionResponse, acp::Error> {
- let cx = &mut self.cx.clone();
- let rx = self
- .sessions
- .borrow()
- .get(&arguments.session_id)
- .context("Failed to get session")?
- .thread
- .update(cx, |thread, cx| {
- thread.request_tool_call_authorization(arguments.tool_call, arguments.options, cx)
- })?;
-
- let result = rx?.await;
-
- let outcome = match result {
- Ok(option) => acp::RequestPermissionOutcome::Selected { option_id: option },
- Err(oneshot::Canceled) => acp::RequestPermissionOutcome::Canceled,
- };
-
- Ok(acp::RequestPermissionResponse { outcome })
- }
-
- async fn write_text_file(
- &self,
- arguments: acp::WriteTextFileRequest,
- ) -> Result<(), acp::Error> {
- let cx = &mut self.cx.clone();
- let task = self
- .sessions
- .borrow()
- .get(&arguments.session_id)
- .context("Failed to get session")?
- .thread
- .update(cx, |thread, cx| {
- thread.write_text_file(arguments.path, arguments.content, cx)
- })?;
-
- task.await?;
-
- Ok(())
- }
-
- async fn read_text_file(
- &self,
- arguments: acp::ReadTextFileRequest,
- ) -> Result<acp::ReadTextFileResponse, acp::Error> {
- let cx = &mut self.cx.clone();
- let task = self
- .sessions
- .borrow()
- .get(&arguments.session_id)
- .context("Failed to get session")?
- .thread
- .update(cx, |thread, cx| {
- thread.read_text_file(arguments.path, arguments.line, arguments.limit, false, cx)
- })?;
-
- let content = task.await?;
-
- Ok(acp::ReadTextFileResponse { content })
- }
-
- async fn session_notification(
- &self,
- notification: acp::SessionNotification,
- ) -> Result<(), acp::Error> {
- let cx = &mut self.cx.clone();
- let sessions = self.sessions.borrow();
- let session = sessions
- .get(¬ification.session_id)
- .context("Failed to get session")?;
-
- session.thread.update(cx, |thread, cx| {
- thread.handle_session_update(notification.update, cx)
- })??;
-
- Ok(())
- }
-}
@@ -1,46 +1,62 @@
mod acp;
mod claude;
+mod custom;
mod gemini;
-mod settings;
-#[cfg(test)]
-mod e2e_tests;
+#[cfg(any(test, feature = "test-support"))]
+pub mod e2e_tests;
pub use claude::*;
+pub use custom::*;
pub use gemini::*;
-pub use settings::*;
+use project::agent_server_store::AgentServerStore;
use acp_thread::AgentConnection;
use anyhow::Result;
-use collections::HashMap;
-use gpui::{App, AsyncApp, Entity, SharedString, Task};
+use gpui::{App, Entity, SharedString, Task};
use project::Project;
-use schemars::JsonSchema;
-use serde::{Deserialize, Serialize};
-use std::{
- any::Any,
- path::{Path, PathBuf},
- rc::Rc,
- sync::Arc,
-};
-use util::ResultExt as _;
+use std::{any::Any, path::Path, rc::Rc};
-pub fn init(cx: &mut App) {
- settings::init(cx);
+pub use acp::AcpConnection;
+
+pub struct AgentServerDelegate {
+ store: Entity<AgentServerStore>,
+ project: Entity<Project>,
+ status_tx: Option<watch::Sender<SharedString>>,
+ new_version_available: Option<watch::Sender<Option<String>>>,
+}
+
+impl AgentServerDelegate {
+ pub fn new(
+ store: Entity<AgentServerStore>,
+ project: Entity<Project>,
+ status_tx: Option<watch::Sender<SharedString>>,
+ new_version_tx: Option<watch::Sender<Option<String>>>,
+ ) -> Self {
+ Self {
+ store,
+ project,
+ status_tx,
+ new_version_available: new_version_tx,
+ }
+ }
+
+ pub fn project(&self) -> &Entity<Project> {
+ &self.project
+ }
}
pub trait AgentServer: Send {
fn logo(&self) -> ui::IconName;
- fn name(&self) -> &'static str;
- fn empty_state_headline(&self) -> &'static str;
- fn empty_state_message(&self) -> &'static str;
+ fn name(&self) -> SharedString;
+ fn telemetry_id(&self) -> &'static str;
fn connect(
&self,
- root_dir: &Path,
- project: &Entity<Project>,
+ root_dir: Option<&Path>,
+ delegate: AgentServerDelegate,
cx: &mut App,
- ) -> Task<Result<Rc<dyn AgentConnection>>>;
+ ) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>>;
fn into_any(self: Rc<Self>) -> Rc<dyn Any>;
}
@@ -50,136 +66,3 @@ impl dyn AgentServer {
self.into_any().downcast().ok()
}
}
-
-impl std::fmt::Debug for AgentServerCommand {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- let filtered_env = self.env.as_ref().map(|env| {
- env.iter()
- .map(|(k, v)| {
- (
- k,
- if util::redact::should_redact(k) {
- "[REDACTED]"
- } else {
- v
- },
- )
- })
- .collect::<Vec<_>>()
- });
-
- f.debug_struct("AgentServerCommand")
- .field("path", &self.path)
- .field("args", &self.args)
- .field("env", &filtered_env)
- .finish()
- }
-}
-
-pub enum AgentServerVersion {
- Supported,
- Unsupported {
- error_message: SharedString,
- upgrade_message: SharedString,
- upgrade_command: String,
- },
-}
-
-#[derive(Deserialize, Serialize, Clone, PartialEq, Eq, JsonSchema)]
-pub struct AgentServerCommand {
- #[serde(rename = "command")]
- pub path: PathBuf,
- #[serde(default)]
- pub args: Vec<String>,
- pub env: Option<HashMap<String, String>>,
-}
-
-impl AgentServerCommand {
- pub(crate) async fn resolve(
- path_bin_name: &'static str,
- extra_args: &[&'static str],
- fallback_path: Option<&Path>,
- settings: Option<AgentServerSettings>,
- project: &Entity<Project>,
- cx: &mut AsyncApp,
- ) -> Option<Self> {
- if let Some(agent_settings) = settings {
- Some(Self {
- path: agent_settings.command.path,
- args: agent_settings
- .command
- .args
- .into_iter()
- .chain(extra_args.iter().map(|arg| arg.to_string()))
- .collect(),
- env: agent_settings.command.env,
- })
- } else {
- match find_bin_in_path(path_bin_name, project, cx).await {
- Some(path) => Some(Self {
- path,
- args: extra_args.iter().map(|arg| arg.to_string()).collect(),
- env: None,
- }),
- None => fallback_path.and_then(|path| {
- if path.exists() {
- Some(Self {
- path: path.to_path_buf(),
- args: extra_args.iter().map(|arg| arg.to_string()).collect(),
- env: None,
- })
- } else {
- None
- }
- }),
- }
- }
- }
-}
-
-async fn find_bin_in_path(
- bin_name: &'static str,
- project: &Entity<Project>,
- cx: &mut AsyncApp,
-) -> Option<PathBuf> {
- let (env_task, root_dir) = project
- .update(cx, |project, cx| {
- let worktree = project.visible_worktrees(cx).next();
- match worktree {
- Some(worktree) => {
- let env_task = project.environment().update(cx, |env, cx| {
- env.get_worktree_environment(worktree.clone(), cx)
- });
-
- let path = worktree.read(cx).abs_path();
- (env_task, path)
- }
- None => {
- let path: Arc<Path> = paths::home_dir().as_path().into();
- let env_task = project.environment().update(cx, |env, cx| {
- env.get_directory_environment(path.clone(), cx)
- });
- (env_task, path)
- }
- }
- })
- .log_err()?;
-
- cx.background_executor()
- .spawn(async move {
- let which_result = if cfg!(windows) {
- which::which(bin_name)
- } else {
- let env = env_task.await.unwrap_or_default();
- let shell_path = env.get("PATH").cloned();
- which::which_in(bin_name, shell_path.as_ref(), root_dir.as_ref())
- };
-
- if let Err(which::Error::CannotFindBinaryPath) = which_result {
- return None;
- }
-
- which_result.log_err()
- })
- .await
-}
@@ -1,53 +1,29 @@
-mod mcp_server;
-pub mod tools;
-
-use action_log::ActionLog;
-use collections::HashMap;
-use context_server::listener::McpServerTool;
-use language_models::provider::anthropic::AnthropicLanguageModelProvider;
-use project::Project;
-use settings::SettingsStore;
-use smol::process::Child;
-use std::any::Any;
-use std::cell::RefCell;
-use std::fmt::Display;
use std::path::Path;
use std::rc::Rc;
-use uuid::Uuid;
+use std::{any::Any, path::PathBuf};
-use agent_client_protocol as acp;
-use anyhow::{Context as _, Result, anyhow};
-use futures::channel::oneshot;
-use futures::{AsyncBufReadExt, AsyncWriteExt};
-use futures::{
- AsyncRead, AsyncWrite, FutureExt, StreamExt,
- channel::mpsc::{self, UnboundedReceiver, UnboundedSender},
- io::BufReader,
- select_biased,
-};
-use gpui::{App, AppContext, AsyncApp, Entity, Task, WeakEntity};
-use serde::{Deserialize, Serialize};
-use util::{ResultExt, debug_panic};
+use anyhow::{Context as _, Result};
+use gpui::{App, SharedString, Task};
+use project::agent_server_store::CLAUDE_CODE_NAME;
-use crate::claude::mcp_server::{ClaudeZedMcpServer, McpConfig};
-use crate::claude::tools::ClaudeTool;
-use crate::{AgentServer, AgentServerCommand, AllAgentServersSettings};
-use acp_thread::{AcpThread, AgentConnection, AuthRequired};
+use crate::{AgentServer, AgentServerDelegate};
+use acp_thread::AgentConnection;
#[derive(Clone)]
pub struct ClaudeCode;
-impl AgentServer for ClaudeCode {
- fn name(&self) -> &'static str {
- "Claude Code"
- }
+pub struct AgentServerLoginCommand {
+ pub path: PathBuf,
+ pub arguments: Vec<String>,
+}
- fn empty_state_headline(&self) -> &'static str {
- self.name()
+impl AgentServer for ClaudeCode {
+ fn telemetry_id(&self) -> &'static str {
+ "claude-code"
}
- fn empty_state_message(&self) -> &'static str {
- "How can I help you today?"
+ fn name(&self) -> SharedString {
+ "Claude Code".into()
}
fn logo(&self) -> ui::IconName {
@@ -56,1060 +32,37 @@ impl AgentServer for ClaudeCode {
fn connect(
&self,
- _root_dir: &Path,
- _project: &Entity<Project>,
- _cx: &mut App,
- ) -> Task<Result<Rc<dyn AgentConnection>>> {
- let connection = ClaudeAgentConnection {
- sessions: Default::default(),
- };
-
- Task::ready(Ok(Rc::new(connection) as _))
- }
-
- fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
- self
- }
-}
-
-struct ClaudeAgentConnection {
- sessions: Rc<RefCell<HashMap<acp::SessionId, ClaudeAgentSession>>>,
-}
-
-impl AgentConnection for ClaudeAgentConnection {
- fn new_thread(
- self: Rc<Self>,
- project: Entity<Project>,
- cwd: &Path,
+ root_dir: Option<&Path>,
+ delegate: AgentServerDelegate,
cx: &mut App,
- ) -> Task<Result<Entity<AcpThread>>> {
- let cwd = cwd.to_owned();
- cx.spawn(async move |cx| {
- let settings = cx.read_global(|settings: &SettingsStore, _| {
- settings.get::<AllAgentServersSettings>(None).claude.clone()
- })?;
-
- let Some(command) = AgentServerCommand::resolve(
- "claude",
- &[],
- Some(&util::paths::home_dir().join(".claude/local/claude")),
- settings,
- &project,
- cx,
- )
- .await
- else {
- anyhow::bail!("Failed to find claude binary");
- };
-
- let api_key =
- cx.update(AnthropicLanguageModelProvider::api_key)?
- .await
- .map_err(|err| {
- if err.is::<language_model::AuthenticateError>() {
- anyhow!(AuthRequired::new().with_language_model_provider(
- language_model::ANTHROPIC_PROVIDER_ID
- ))
- } else {
- anyhow!(err)
- }
- })?;
-
- let (mut thread_tx, thread_rx) = watch::channel(WeakEntity::new_invalid());
- let fs = project.read_with(cx, |project, _cx| project.fs().clone())?;
- let permission_mcp_server = ClaudeZedMcpServer::new(thread_rx.clone(), fs, cx).await?;
+ ) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
+ let name = self.name();
+ let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().to_string());
+ let is_remote = delegate.project.read(cx).is_via_remote_server();
+ let store = delegate.store.downgrade();
- let mut mcp_servers = HashMap::default();
- mcp_servers.insert(
- mcp_server::SERVER_NAME.to_string(),
- permission_mcp_server.server_config()?,
- );
- let mcp_config = McpConfig { mcp_servers };
-
- let mcp_config_file = tempfile::NamedTempFile::new()?;
- let (mcp_config_file, mcp_config_path) = mcp_config_file.into_parts();
-
- let mut mcp_config_file = smol::fs::File::from(mcp_config_file);
- mcp_config_file
- .write_all(serde_json::to_string(&mcp_config)?.as_bytes())
- .await?;
- mcp_config_file.flush().await?;
-
- let (incoming_message_tx, mut incoming_message_rx) = mpsc::unbounded();
- let (outgoing_tx, outgoing_rx) = mpsc::unbounded();
-
- let session_id = acp::SessionId(Uuid::new_v4().to_string().into());
-
- log::trace!("Starting session with id: {}", session_id);
-
- let mut child = spawn_claude(
- &command,
- ClaudeSessionMode::Start,
- session_id.clone(),
- api_key,
- &mcp_config_path,
- &cwd,
- )?;
-
- let stdout = child.stdout.take().context("Failed to take stdout")?;
- let stdin = child.stdin.take().context("Failed to take stdin")?;
- let stderr = child.stderr.take().context("Failed to take stderr")?;
-
- let pid = child.id();
- log::trace!("Spawned (pid: {})", pid);
-
- cx.background_spawn(async move {
- let mut stderr = BufReader::new(stderr);
- let mut line = String::new();
- while let Ok(n) = stderr.read_line(&mut line).await
- && n > 0
- {
- log::warn!("agent stderr: {}", &line);
- line.clear();
- }
- })
- .detach();
-
- cx.background_spawn(async move {
- let mut outgoing_rx = Some(outgoing_rx);
-
- ClaudeAgentSession::handle_io(
- outgoing_rx.take().unwrap(),
- incoming_message_tx.clone(),
- stdin,
- stdout,
- )
+ cx.spawn(async move |cx| {
+ let (command, root_dir, login) = store
+ .update(cx, |store, cx| {
+ let agent = store
+ .get_external_agent(&CLAUDE_CODE_NAME.into())
+ .context("Claude Code is not registered")?;
+ anyhow::Ok(agent.get_command(
+ root_dir.as_deref(),
+ Default::default(),
+ delegate.status_tx,
+ delegate.new_version_available,
+ &mut cx.to_async(),
+ ))
+ })??
.await?;
-
- log::trace!("Stopped (pid: {})", pid);
-
- drop(mcp_config_path);
- anyhow::Ok(())
- })
- .detach();
-
- let turn_state = Rc::new(RefCell::new(TurnState::None));
-
- let handler_task = cx.spawn({
- let turn_state = turn_state.clone();
- let mut thread_rx = thread_rx.clone();
- async move |cx| {
- while let Some(message) = incoming_message_rx.next().await {
- ClaudeAgentSession::handle_message(
- thread_rx.clone(),
- message,
- turn_state.clone(),
- cx,
- )
- .await
- }
-
- if let Some(status) = child.status().await.log_err()
- && let Some(thread) = thread_rx.recv().await.ok()
- {
- thread
- .update(cx, |thread, cx| {
- thread.emit_server_exited(status, cx);
- })
- .ok();
- }
- }
- });
-
- let action_log = cx.new(|_| ActionLog::new(project.clone()))?;
- let thread = cx.new(|_cx| {
- AcpThread::new(
- "Claude Code",
- self.clone(),
- project,
- action_log,
- session_id.clone(),
- )
- })?;
-
- thread_tx.send(thread.downgrade())?;
-
- let session = ClaudeAgentSession {
- outgoing_tx,
- turn_state,
- _handler_task: handler_task,
- _mcp_server: Some(permission_mcp_server),
- };
-
- self.sessions.borrow_mut().insert(session_id, session);
-
- Ok(thread)
+ let connection =
+ crate::acp::connect(name, command, root_dir.as_ref(), is_remote, cx).await?;
+ Ok((connection, login))
})
}
- fn auth_methods(&self) -> &[acp::AuthMethod] {
- &[]
- }
-
- fn authenticate(&self, _: acp::AuthMethodId, _cx: &mut App) -> Task<Result<()>> {
- Task::ready(Err(anyhow!("Authentication not supported")))
- }
-
- fn prompt(
- &self,
- _id: Option<acp_thread::UserMessageId>,
- params: acp::PromptRequest,
- cx: &mut App,
- ) -> Task<Result<acp::PromptResponse>> {
- let sessions = self.sessions.borrow();
- let Some(session) = sessions.get(¶ms.session_id) else {
- return Task::ready(Err(anyhow!(
- "Attempted to send message to nonexistent session {}",
- params.session_id
- )));
- };
-
- let (end_tx, end_rx) = oneshot::channel();
- session.turn_state.replace(TurnState::InProgress { end_tx });
-
- let mut content = String::new();
- for chunk in params.prompt {
- match chunk {
- acp::ContentBlock::Text(text_content) => {
- content.push_str(&text_content.text);
- }
- acp::ContentBlock::ResourceLink(resource_link) => {
- content.push_str(&format!("@{}", resource_link.uri));
- }
- acp::ContentBlock::Audio(_)
- | acp::ContentBlock::Image(_)
- | acp::ContentBlock::Resource(_) => {
- // TODO
- }
- }
- }
-
- if let Err(err) = session.outgoing_tx.unbounded_send(SdkMessage::User {
- message: Message {
- role: Role::User,
- content: Content::UntaggedText(content),
- id: None,
- model: None,
- stop_reason: None,
- stop_sequence: None,
- usage: None,
- },
- session_id: Some(params.session_id.to_string()),
- }) {
- return Task::ready(Err(anyhow!(err)));
- }
-
- cx.foreground_executor().spawn(async move { end_rx.await? })
- }
-
- fn cancel(&self, session_id: &acp::SessionId, _cx: &mut App) {
- let sessions = self.sessions.borrow();
- let Some(session) = sessions.get(session_id) else {
- log::warn!("Attempted to cancel nonexistent session {}", session_id);
- return;
- };
-
- let request_id = new_request_id();
-
- let turn_state = session.turn_state.take();
- let TurnState::InProgress { end_tx } = turn_state else {
- // Already canceled or idle, put it back
- session.turn_state.replace(turn_state);
- return;
- };
-
- session.turn_state.replace(TurnState::CancelRequested {
- end_tx,
- request_id: request_id.clone(),
- });
-
- session
- .outgoing_tx
- .unbounded_send(SdkMessage::ControlRequest {
- request_id,
- request: ControlRequest::Interrupt,
- })
- .log_err();
- }
-
fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
self
}
}
-
-#[derive(Clone, Copy)]
-enum ClaudeSessionMode {
- Start,
- #[expect(dead_code)]
- Resume,
-}
-
-fn spawn_claude(
- command: &AgentServerCommand,
- mode: ClaudeSessionMode,
- session_id: acp::SessionId,
- api_key: language_models::provider::anthropic::ApiKey,
- mcp_config_path: &Path,
- root_dir: &Path,
-) -> Result<Child> {
- let child = util::command::new_smol_command(&command.path)
- .args([
- "--input-format",
- "stream-json",
- "--output-format",
- "stream-json",
- "--print",
- "--verbose",
- "--mcp-config",
- mcp_config_path.to_string_lossy().as_ref(),
- "--permission-prompt-tool",
- &format!(
- "mcp__{}__{}",
- mcp_server::SERVER_NAME,
- mcp_server::PermissionTool::NAME,
- ),
- "--allowedTools",
- &format!(
- "mcp__{}__{},mcp__{}__{}",
- mcp_server::SERVER_NAME,
- mcp_server::EditTool::NAME,
- mcp_server::SERVER_NAME,
- mcp_server::ReadTool::NAME
- ),
- "--disallowedTools",
- "Read,Edit",
- ])
- .args(match mode {
- ClaudeSessionMode::Start => ["--session-id".to_string(), session_id.to_string()],
- ClaudeSessionMode::Resume => ["--resume".to_string(), session_id.to_string()],
- })
- .args(command.args.iter().map(|arg| arg.as_str()))
- .envs(command.env.iter().flatten())
- .env("ANTHROPIC_API_KEY", api_key.key)
- .current_dir(root_dir)
- .stdin(std::process::Stdio::piped())
- .stdout(std::process::Stdio::piped())
- .stderr(std::process::Stdio::piped())
- .kill_on_drop(true)
- .spawn()?;
-
- Ok(child)
-}
-
-struct ClaudeAgentSession {
- outgoing_tx: UnboundedSender<SdkMessage>,
- turn_state: Rc<RefCell<TurnState>>,
- _mcp_server: Option<ClaudeZedMcpServer>,
- _handler_task: Task<()>,
-}
-
-#[derive(Debug, Default)]
-enum TurnState {
- #[default]
- None,
- InProgress {
- end_tx: oneshot::Sender<Result<acp::PromptResponse>>,
- },
- CancelRequested {
- end_tx: oneshot::Sender<Result<acp::PromptResponse>>,
- request_id: String,
- },
- CancelConfirmed {
- end_tx: oneshot::Sender<Result<acp::PromptResponse>>,
- },
-}
-
-impl TurnState {
- fn is_canceled(&self) -> bool {
- matches!(self, TurnState::CancelConfirmed { .. })
- }
-
- fn end_tx(self) -> Option<oneshot::Sender<Result<acp::PromptResponse>>> {
- match self {
- TurnState::None => None,
- TurnState::InProgress { end_tx, .. } => Some(end_tx),
- TurnState::CancelRequested { end_tx, .. } => Some(end_tx),
- TurnState::CancelConfirmed { end_tx } => Some(end_tx),
- }
- }
-
- fn confirm_cancellation(self, id: &str) -> Self {
- match self {
- TurnState::CancelRequested { request_id, end_tx } if request_id == id => {
- TurnState::CancelConfirmed { end_tx }
- }
- _ => self,
- }
- }
-}
-
-impl ClaudeAgentSession {
- async fn handle_message(
- mut thread_rx: watch::Receiver<WeakEntity<AcpThread>>,
- message: SdkMessage,
- turn_state: Rc<RefCell<TurnState>>,
- cx: &mut AsyncApp,
- ) {
- match message {
- // we should only be sending these out, they don't need to be in the thread
- SdkMessage::ControlRequest { .. } => {}
- SdkMessage::User {
- message,
- session_id: _,
- } => {
- let Some(thread) = thread_rx
- .recv()
- .await
- .log_err()
- .and_then(|entity| entity.upgrade())
- else {
- log::error!("Received an SDK message but thread is gone");
- return;
- };
-
- for chunk in message.content.chunks() {
- match chunk {
- ContentChunk::Text { text } | ContentChunk::UntaggedText(text) => {
- if !turn_state.borrow().is_canceled() {
- thread
- .update(cx, |thread, cx| {
- thread.push_user_content_block(None, text.into(), cx)
- })
- .log_err();
- }
- }
- ContentChunk::ToolResult {
- content,
- tool_use_id,
- } => {
- let content = content.to_string();
- thread
- .update(cx, |thread, cx| {
- thread.update_tool_call(
- acp::ToolCallUpdate {
- id: acp::ToolCallId(tool_use_id.into()),
- fields: acp::ToolCallUpdateFields {
- status: if turn_state.borrow().is_canceled() {
- // Do not set to completed if turn was canceled
- None
- } else {
- Some(acp::ToolCallStatus::Completed)
- },
- content: (!content.is_empty())
- .then(|| vec![content.into()]),
- ..Default::default()
- },
- },
- cx,
- )
- })
- .log_err();
- }
- ContentChunk::Thinking { .. }
- | ContentChunk::RedactedThinking
- | ContentChunk::ToolUse { .. } => {
- debug_panic!(
- "Should not get {:?} with role: assistant. should we handle this?",
- chunk
- );
- }
-
- ContentChunk::Image
- | ContentChunk::Document
- | ContentChunk::WebSearchToolResult => {
- thread
- .update(cx, |thread, cx| {
- thread.push_assistant_content_block(
- format!("Unsupported content: {:?}", chunk).into(),
- false,
- cx,
- )
- })
- .log_err();
- }
- }
- }
- }
- SdkMessage::Assistant {
- message,
- session_id: _,
- } => {
- let Some(thread) = thread_rx
- .recv()
- .await
- .log_err()
- .and_then(|entity| entity.upgrade())
- else {
- log::error!("Received an SDK message but thread is gone");
- return;
- };
-
- for chunk in message.content.chunks() {
- match chunk {
- ContentChunk::Text { text } | ContentChunk::UntaggedText(text) => {
- thread
- .update(cx, |thread, cx| {
- thread.push_assistant_content_block(text.into(), false, cx)
- })
- .log_err();
- }
- ContentChunk::Thinking { thinking } => {
- thread
- .update(cx, |thread, cx| {
- thread.push_assistant_content_block(thinking.into(), true, cx)
- })
- .log_err();
- }
- ContentChunk::RedactedThinking => {
- thread
- .update(cx, |thread, cx| {
- thread.push_assistant_content_block(
- "[REDACTED]".into(),
- true,
- cx,
- )
- })
- .log_err();
- }
- ContentChunk::ToolUse { id, name, input } => {
- let claude_tool = ClaudeTool::infer(&name, input);
-
- thread
- .update(cx, |thread, cx| {
- if let ClaudeTool::TodoWrite(Some(params)) = claude_tool {
- thread.update_plan(
- acp::Plan {
- entries: params
- .todos
- .into_iter()
- .map(Into::into)
- .collect(),
- },
- cx,
- )
- } else {
- thread.upsert_tool_call(
- claude_tool.as_acp(acp::ToolCallId(id.into())),
- cx,
- )?;
- }
- anyhow::Ok(())
- })
- .log_err();
- }
- ContentChunk::ToolResult { .. } | ContentChunk::WebSearchToolResult => {
- debug_panic!(
- "Should not get tool results with role: assistant. should we handle this?"
- );
- }
- ContentChunk::Image | ContentChunk::Document => {
- thread
- .update(cx, |thread, cx| {
- thread.push_assistant_content_block(
- format!("Unsupported content: {:?}", chunk).into(),
- false,
- cx,
- )
- })
- .log_err();
- }
- }
- }
- }
- SdkMessage::Result {
- is_error,
- subtype,
- result,
- ..
- } => {
- let turn_state = turn_state.take();
- let was_canceled = turn_state.is_canceled();
- let Some(end_turn_tx) = turn_state.end_tx() else {
- debug_panic!("Received `SdkMessage::Result` but there wasn't an active turn");
- return;
- };
-
- if is_error || (!was_canceled && subtype == ResultErrorType::ErrorDuringExecution) {
- end_turn_tx
- .send(Err(anyhow!(
- "Error: {}",
- result.unwrap_or_else(|| subtype.to_string())
- )))
- .ok();
- } else {
- let stop_reason = match subtype {
- ResultErrorType::Success => acp::StopReason::EndTurn,
- ResultErrorType::ErrorMaxTurns => acp::StopReason::MaxTurnRequests,
- ResultErrorType::ErrorDuringExecution => acp::StopReason::Canceled,
- };
- end_turn_tx
- .send(Ok(acp::PromptResponse { stop_reason }))
- .ok();
- }
- }
- SdkMessage::ControlResponse { response } => {
- if matches!(response.subtype, ResultErrorType::Success) {
- let new_state = turn_state.take().confirm_cancellation(&response.request_id);
- turn_state.replace(new_state);
- } else {
- log::error!("Control response error: {:?}", response);
- }
- }
- SdkMessage::System { .. } => {}
- }
- }
-
- async fn handle_io(
- mut outgoing_rx: UnboundedReceiver<SdkMessage>,
- incoming_tx: UnboundedSender<SdkMessage>,
- mut outgoing_bytes: impl Unpin + AsyncWrite,
- incoming_bytes: impl Unpin + AsyncRead,
- ) -> Result<UnboundedReceiver<SdkMessage>> {
- let mut output_reader = BufReader::new(incoming_bytes);
- let mut outgoing_line = Vec::new();
- let mut incoming_line = String::new();
- loop {
- select_biased! {
- message = outgoing_rx.next() => {
- if let Some(message) = message {
- outgoing_line.clear();
- serde_json::to_writer(&mut outgoing_line, &message)?;
- log::trace!("send: {}", String::from_utf8_lossy(&outgoing_line));
- outgoing_line.push(b'\n');
- outgoing_bytes.write_all(&outgoing_line).await.ok();
- } else {
- break;
- }
- }
- bytes_read = output_reader.read_line(&mut incoming_line).fuse() => {
- if bytes_read? == 0 {
- break
- }
- log::trace!("recv: {}", &incoming_line);
- match serde_json::from_str::<SdkMessage>(&incoming_line) {
- Ok(message) => {
- incoming_tx.unbounded_send(message).log_err();
- }
- Err(error) => {
- log::error!("failed to parse incoming message: {error}. Raw: {incoming_line}");
- }
- }
- incoming_line.clear();
- }
- }
- }
-
- Ok(outgoing_rx)
- }
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-struct Message {
- role: Role,
- content: Content,
- #[serde(skip_serializing_if = "Option::is_none")]
- id: Option<String>,
- #[serde(skip_serializing_if = "Option::is_none")]
- model: Option<String>,
- #[serde(skip_serializing_if = "Option::is_none")]
- stop_reason: Option<String>,
- #[serde(skip_serializing_if = "Option::is_none")]
- stop_sequence: Option<String>,
- #[serde(skip_serializing_if = "Option::is_none")]
- usage: Option<Usage>,
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-#[serde(untagged)]
-enum Content {
- UntaggedText(String),
- Chunks(Vec<ContentChunk>),
-}
-
-impl Content {
- pub fn chunks(self) -> impl Iterator<Item = ContentChunk> {
- match self {
- Self::Chunks(chunks) => chunks.into_iter(),
- Self::UntaggedText(text) => vec![ContentChunk::Text { text: text.clone() }].into_iter(),
- }
- }
-}
-
-impl Display for Content {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- match self {
- Content::UntaggedText(txt) => write!(f, "{}", txt),
- Content::Chunks(chunks) => {
- for chunk in chunks {
- write!(f, "{}", chunk)?;
- }
- Ok(())
- }
- }
- }
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-#[serde(tag = "type", rename_all = "snake_case")]
-enum ContentChunk {
- Text {
- text: String,
- },
- ToolUse {
- id: String,
- name: String,
- input: serde_json::Value,
- },
- ToolResult {
- content: Content,
- tool_use_id: String,
- },
- Thinking {
- thinking: String,
- },
- RedactedThinking,
- // TODO
- Image,
- Document,
- WebSearchToolResult,
- #[serde(untagged)]
- UntaggedText(String),
-}
-
-impl Display for ContentChunk {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- match self {
- ContentChunk::Text { text } => write!(f, "{}", text),
- ContentChunk::Thinking { thinking } => write!(f, "Thinking: {}", thinking),
- ContentChunk::RedactedThinking => write!(f, "Thinking: [REDACTED]"),
- ContentChunk::UntaggedText(text) => write!(f, "{}", text),
- ContentChunk::ToolResult { content, .. } => write!(f, "{}", content),
- ContentChunk::Image
- | ContentChunk::Document
- | ContentChunk::ToolUse { .. }
- | ContentChunk::WebSearchToolResult => {
- write!(f, "\n{:?}\n", &self)
- }
- }
- }
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-struct Usage {
- input_tokens: u32,
- cache_creation_input_tokens: u32,
- cache_read_input_tokens: u32,
- output_tokens: u32,
- service_tier: String,
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-#[serde(rename_all = "snake_case")]
-enum Role {
- System,
- Assistant,
- User,
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-struct MessageParam {
- role: Role,
- content: String,
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-#[serde(tag = "type", rename_all = "snake_case")]
-enum SdkMessage {
- // An assistant message
- Assistant {
- message: Message, // from Anthropic SDK
- #[serde(skip_serializing_if = "Option::is_none")]
- session_id: Option<String>,
- },
- // A user message
- User {
- message: Message, // from Anthropic SDK
- #[serde(skip_serializing_if = "Option::is_none")]
- session_id: Option<String>,
- },
- // Emitted as the last message in a conversation
- Result {
- subtype: ResultErrorType,
- duration_ms: f64,
- duration_api_ms: f64,
- is_error: bool,
- num_turns: i32,
- #[serde(skip_serializing_if = "Option::is_none")]
- result: Option<String>,
- session_id: String,
- total_cost_usd: f64,
- },
- // Emitted as the first message at the start of a conversation
- System {
- cwd: String,
- session_id: String,
- tools: Vec<String>,
- model: String,
- mcp_servers: Vec<McpServer>,
- #[serde(rename = "apiKeySource")]
- api_key_source: String,
- #[serde(rename = "permissionMode")]
- permission_mode: PermissionMode,
- },
- /// Messages used to control the conversation, outside of chat messages to the model
- ControlRequest {
- request_id: String,
- request: ControlRequest,
- },
- /// Response to a control request
- ControlResponse { response: ControlResponse },
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-#[serde(tag = "subtype", rename_all = "snake_case")]
-enum ControlRequest {
- /// Cancel the current conversation
- Interrupt,
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-struct ControlResponse {
- request_id: String,
- subtype: ResultErrorType,
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)]
-#[serde(rename_all = "snake_case")]
-enum ResultErrorType {
- Success,
- ErrorMaxTurns,
- ErrorDuringExecution,
-}
-
-impl Display for ResultErrorType {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- match self {
- ResultErrorType::Success => write!(f, "success"),
- ResultErrorType::ErrorMaxTurns => write!(f, "error_max_turns"),
- ResultErrorType::ErrorDuringExecution => write!(f, "error_during_execution"),
- }
- }
-}
-
-fn new_request_id() -> String {
- use rand::Rng;
- // In the Claude Code TS SDK they just generate a random 12 character string,
- // `Math.random().toString(36).substring(2, 15)`
- rand::thread_rng()
- .sample_iter(&rand::distributions::Alphanumeric)
- .take(12)
- .map(char::from)
- .collect()
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-struct McpServer {
- name: String,
- status: String,
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-enum PermissionMode {
- Default,
- AcceptEdits,
- BypassPermissions,
- Plan,
-}
-
-#[cfg(test)]
-pub(crate) mod tests {
- use super::*;
- use crate::e2e_tests;
- use gpui::TestAppContext;
- use serde_json::json;
-
- crate::common_e2e_tests!(ClaudeCode, allow_option_id = "allow");
-
- pub fn local_command() -> AgentServerCommand {
- AgentServerCommand {
- path: "claude".into(),
- args: vec![],
- env: None,
- }
- }
-
- #[gpui::test]
- #[cfg_attr(not(feature = "e2e"), ignore)]
- async fn test_todo_plan(cx: &mut TestAppContext) {
- let fs = e2e_tests::init_test(cx).await;
- let project = Project::test(fs, [], cx).await;
- let thread =
- e2e_tests::new_test_thread(ClaudeCode, project.clone(), "/private/tmp", cx).await;
-
- thread
- .update(cx, |thread, cx| {
- thread.send_raw(
- "Create a todo plan for initializing a new React app. I'll follow it myself, do not execute on it.",
- cx,
- )
- })
- .await
- .unwrap();
-
- let mut entries_len = 0;
-
- thread.read_with(cx, |thread, _| {
- entries_len = thread.plan().entries.len();
- assert!(thread.plan().entries.len() > 0, "Empty plan");
- });
-
- thread
- .update(cx, |thread, cx| {
- thread.send_raw(
- "Mark the first entry status as in progress without acting on it.",
- cx,
- )
- })
- .await
- .unwrap();
-
- thread.read_with(cx, |thread, _| {
- assert!(matches!(
- thread.plan().entries[0].status,
- acp::PlanEntryStatus::InProgress
- ));
- assert_eq!(thread.plan().entries.len(), entries_len);
- });
-
- thread
- .update(cx, |thread, cx| {
- thread.send_raw(
- "Now mark the first entry as completed without acting on it.",
- cx,
- )
- })
- .await
- .unwrap();
-
- thread.read_with(cx, |thread, _| {
- assert!(matches!(
- thread.plan().entries[0].status,
- acp::PlanEntryStatus::Completed
- ));
- assert_eq!(thread.plan().entries.len(), entries_len);
- });
- }
-
- #[test]
- fn test_deserialize_content_untagged_text() {
- let json = json!("Hello, world!");
- let content: Content = serde_json::from_value(json).unwrap();
- match content {
- Content::UntaggedText(text) => assert_eq!(text, "Hello, world!"),
- _ => panic!("Expected UntaggedText variant"),
- }
- }
-
- #[test]
- fn test_deserialize_content_chunks() {
- let json = json!([
- {
- "type": "text",
- "text": "Hello"
- },
- {
- "type": "tool_use",
- "id": "tool_123",
- "name": "calculator",
- "input": {"operation": "add", "a": 1, "b": 2}
- }
- ]);
- let content: Content = serde_json::from_value(json).unwrap();
- match content {
- Content::Chunks(chunks) => {
- assert_eq!(chunks.len(), 2);
- match &chunks[0] {
- ContentChunk::Text { text } => assert_eq!(text, "Hello"),
- _ => panic!("Expected Text chunk"),
- }
- match &chunks[1] {
- ContentChunk::ToolUse { id, name, input } => {
- assert_eq!(id, "tool_123");
- assert_eq!(name, "calculator");
- assert_eq!(input["operation"], "add");
- assert_eq!(input["a"], 1);
- assert_eq!(input["b"], 2);
- }
- _ => panic!("Expected ToolUse chunk"),
- }
- }
- _ => panic!("Expected Chunks variant"),
- }
- }
-
- #[test]
- fn test_deserialize_tool_result_untagged_text() {
- let json = json!({
- "type": "tool_result",
- "content": "Result content",
- "tool_use_id": "tool_456"
- });
- let chunk: ContentChunk = serde_json::from_value(json).unwrap();
- match chunk {
- ContentChunk::ToolResult {
- content,
- tool_use_id,
- } => {
- match content {
- Content::UntaggedText(text) => assert_eq!(text, "Result content"),
- _ => panic!("Expected UntaggedText content"),
- }
- assert_eq!(tool_use_id, "tool_456");
- }
- _ => panic!("Expected ToolResult variant"),
- }
- }
-
- #[test]
- fn test_deserialize_tool_result_chunks() {
- let json = json!({
- "type": "tool_result",
- "content": [
- {
- "type": "text",
- "text": "Processing complete"
- },
- {
- "type": "text",
- "text": "Result: 42"
- }
- ],
- "tool_use_id": "tool_789"
- });
- let chunk: ContentChunk = serde_json::from_value(json).unwrap();
- match chunk {
- ContentChunk::ToolResult {
- content,
- tool_use_id,
- } => {
- match content {
- Content::Chunks(chunks) => {
- assert_eq!(chunks.len(), 2);
- match &chunks[0] {
- ContentChunk::Text { text } => assert_eq!(text, "Processing complete"),
- _ => panic!("Expected Text chunk"),
- }
- match &chunks[1] {
- ContentChunk::Text { text } => assert_eq!(text, "Result: 42"),
- _ => panic!("Expected Text chunk"),
- }
- }
- _ => panic!("Expected Chunks content"),
- }
- assert_eq!(tool_use_id, "tool_789");
- }
- _ => panic!("Expected ToolResult variant"),
- }
- }
-}
@@ -1,352 +0,0 @@
-use std::path::PathBuf;
-use std::sync::Arc;
-
-use crate::claude::tools::{ClaudeTool, EditToolParams, ReadToolParams};
-use acp_thread::AcpThread;
-use agent_client_protocol as acp;
-use agent_settings::AgentSettings;
-use anyhow::{Context, Result};
-use collections::HashMap;
-use context_server::listener::{McpServerTool, ToolResponse};
-use context_server::types::{
- Implementation, InitializeParams, InitializeResponse, ProtocolVersion, ServerCapabilities,
- ToolAnnotations, ToolResponseContent, ToolsCapabilities, requests,
-};
-use gpui::{App, AsyncApp, Task, WeakEntity};
-use project::Fs;
-use schemars::JsonSchema;
-use serde::{Deserialize, Serialize};
-use settings::{Settings as _, update_settings_file};
-use util::debug_panic;
-
-pub struct ClaudeZedMcpServer {
- server: context_server::listener::McpServer,
-}
-
-pub const SERVER_NAME: &str = "zed";
-
-impl ClaudeZedMcpServer {
- pub async fn new(
- thread_rx: watch::Receiver<WeakEntity<AcpThread>>,
- fs: Arc<dyn Fs>,
- cx: &AsyncApp,
- ) -> Result<Self> {
- let mut mcp_server = context_server::listener::McpServer::new(cx).await?;
- mcp_server.handle_request::<requests::Initialize>(Self::handle_initialize);
-
- mcp_server.add_tool(PermissionTool {
- thread_rx: thread_rx.clone(),
- fs: fs.clone(),
- });
- mcp_server.add_tool(ReadTool {
- thread_rx: thread_rx.clone(),
- });
- mcp_server.add_tool(EditTool {
- thread_rx: thread_rx.clone(),
- });
-
- Ok(Self { server: mcp_server })
- }
-
- pub fn server_config(&self) -> Result<McpServerConfig> {
- #[cfg(not(test))]
- let zed_path = std::env::current_exe()
- .context("finding current executable path for use in mcp_server")?;
-
- #[cfg(test)]
- let zed_path = crate::e2e_tests::get_zed_path();
-
- Ok(McpServerConfig {
- command: zed_path,
- args: vec![
- "--nc".into(),
- self.server.socket_path().display().to_string(),
- ],
- env: None,
- })
- }
-
- fn handle_initialize(_: InitializeParams, cx: &App) -> Task<Result<InitializeResponse>> {
- cx.foreground_executor().spawn(async move {
- Ok(InitializeResponse {
- protocol_version: ProtocolVersion("2025-06-18".into()),
- capabilities: ServerCapabilities {
- experimental: None,
- logging: None,
- completions: None,
- prompts: None,
- resources: None,
- tools: Some(ToolsCapabilities {
- list_changed: Some(false),
- }),
- },
- server_info: Implementation {
- name: SERVER_NAME.into(),
- version: "0.1.0".into(),
- },
- meta: None,
- })
- })
- }
-}
-
-#[derive(Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct McpConfig {
- pub mcp_servers: HashMap<String, McpServerConfig>,
-}
-
-#[derive(Serialize, Clone)]
-#[serde(rename_all = "camelCase")]
-pub struct McpServerConfig {
- pub command: PathBuf,
- pub args: Vec<String>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub env: Option<HashMap<String, String>>,
-}
-
-// Tools
-
-#[derive(Clone)]
-pub struct PermissionTool {
- fs: Arc<dyn Fs>,
- thread_rx: watch::Receiver<WeakEntity<AcpThread>>,
-}
-
-#[derive(Deserialize, JsonSchema, Debug)]
-pub struct PermissionToolParams {
- tool_name: String,
- input: serde_json::Value,
- tool_use_id: Option<String>,
-}
-
-#[derive(Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct PermissionToolResponse {
- behavior: PermissionToolBehavior,
- updated_input: serde_json::Value,
-}
-
-#[derive(Serialize)]
-#[serde(rename_all = "snake_case")]
-enum PermissionToolBehavior {
- Allow,
- Deny,
-}
-
-impl McpServerTool for PermissionTool {
- type Input = PermissionToolParams;
- type Output = ();
-
- const NAME: &'static str = "Confirmation";
-
- fn description(&self) -> &'static str {
- "Request permission for tool calls"
- }
-
- async fn run(
- &self,
- input: Self::Input,
- cx: &mut AsyncApp,
- ) -> Result<ToolResponse<Self::Output>> {
- if agent_settings::AgentSettings::try_read_global(cx, |settings| {
- settings.always_allow_tool_actions
- })
- .unwrap_or(false)
- {
- let response = PermissionToolResponse {
- behavior: PermissionToolBehavior::Allow,
- updated_input: input.input,
- };
-
- return Ok(ToolResponse {
- content: vec![ToolResponseContent::Text {
- text: serde_json::to_string(&response)?,
- }],
- structured_content: (),
- });
- }
-
- let mut thread_rx = self.thread_rx.clone();
- let Some(thread) = thread_rx.recv().await?.upgrade() else {
- anyhow::bail!("Thread closed");
- };
-
- let claude_tool = ClaudeTool::infer(&input.tool_name, input.input.clone());
- let tool_call_id = acp::ToolCallId(input.tool_use_id.context("Tool ID required")?.into());
-
- const ALWAYS_ALLOW: &'static str = "always_allow";
- const ALLOW: &'static str = "allow";
- const REJECT: &'static str = "reject";
-
- let chosen_option = thread
- .update(cx, |thread, cx| {
- thread.request_tool_call_authorization(
- claude_tool.as_acp(tool_call_id).into(),
- vec![
- acp::PermissionOption {
- id: acp::PermissionOptionId(ALWAYS_ALLOW.into()),
- name: "Always Allow".into(),
- kind: acp::PermissionOptionKind::AllowAlways,
- },
- acp::PermissionOption {
- id: acp::PermissionOptionId(ALLOW.into()),
- name: "Allow".into(),
- kind: acp::PermissionOptionKind::AllowOnce,
- },
- acp::PermissionOption {
- id: acp::PermissionOptionId(REJECT.into()),
- name: "Reject".into(),
- kind: acp::PermissionOptionKind::RejectOnce,
- },
- ],
- cx,
- )
- })??
- .await?;
-
- let response = match chosen_option.0.as_ref() {
- ALWAYS_ALLOW => {
- cx.update(|cx| {
- update_settings_file::<AgentSettings>(self.fs.clone(), cx, |settings, _| {
- settings.set_always_allow_tool_actions(true);
- });
- })?;
-
- PermissionToolResponse {
- behavior: PermissionToolBehavior::Allow,
- updated_input: input.input,
- }
- }
- ALLOW => PermissionToolResponse {
- behavior: PermissionToolBehavior::Allow,
- updated_input: input.input,
- },
- REJECT => PermissionToolResponse {
- behavior: PermissionToolBehavior::Deny,
- updated_input: input.input,
- },
- opt => {
- debug_panic!("Unexpected option: {}", opt);
- PermissionToolResponse {
- behavior: PermissionToolBehavior::Deny,
- updated_input: input.input,
- }
- }
- };
-
- Ok(ToolResponse {
- content: vec![ToolResponseContent::Text {
- text: serde_json::to_string(&response)?,
- }],
- structured_content: (),
- })
- }
-}
-
-#[derive(Clone)]
-pub struct ReadTool {
- thread_rx: watch::Receiver<WeakEntity<AcpThread>>,
-}
-
-impl McpServerTool for ReadTool {
- type Input = ReadToolParams;
- type Output = ();
-
- const NAME: &'static str = "Read";
-
- fn description(&self) -> &'static str {
- "Read the contents of a file. In sessions with mcp__zed__Read always use it instead of Read as it contains the most up-to-date contents."
- }
-
- fn annotations(&self) -> ToolAnnotations {
- ToolAnnotations {
- title: Some("Read file".to_string()),
- read_only_hint: Some(true),
- destructive_hint: Some(false),
- open_world_hint: Some(false),
- idempotent_hint: None,
- }
- }
-
- async fn run(
- &self,
- input: Self::Input,
- cx: &mut AsyncApp,
- ) -> Result<ToolResponse<Self::Output>> {
- let mut thread_rx = self.thread_rx.clone();
- let Some(thread) = thread_rx.recv().await?.upgrade() else {
- anyhow::bail!("Thread closed");
- };
-
- let content = thread
- .update(cx, |thread, cx| {
- thread.read_text_file(input.abs_path, input.offset, input.limit, false, cx)
- })?
- .await?;
-
- Ok(ToolResponse {
- content: vec![ToolResponseContent::Text { text: content }],
- structured_content: (),
- })
- }
-}
-
-#[derive(Clone)]
-pub struct EditTool {
- thread_rx: watch::Receiver<WeakEntity<AcpThread>>,
-}
-
-impl McpServerTool for EditTool {
- type Input = EditToolParams;
- type Output = ();
-
- const NAME: &'static str = "Edit";
-
- fn description(&self) -> &'static str {
- "Edits a file. In sessions with mcp__zed__Edit always use it instead of Edit as it will show the diff to the user better."
- }
-
- fn annotations(&self) -> ToolAnnotations {
- ToolAnnotations {
- title: Some("Edit file".to_string()),
- read_only_hint: Some(false),
- destructive_hint: Some(false),
- open_world_hint: Some(false),
- idempotent_hint: Some(false),
- }
- }
-
- async fn run(
- &self,
- input: Self::Input,
- cx: &mut AsyncApp,
- ) -> Result<ToolResponse<Self::Output>> {
- let mut thread_rx = self.thread_rx.clone();
- let Some(thread) = thread_rx.recv().await?.upgrade() else {
- anyhow::bail!("Thread closed");
- };
-
- let content = thread
- .update(cx, |thread, cx| {
- thread.read_text_file(input.abs_path.clone(), None, None, true, cx)
- })?
- .await?;
-
- let new_content = content.replace(&input.old_text, &input.new_text);
- if new_content == content {
- return Err(anyhow::anyhow!("The old_text was not found in the content"));
- }
-
- thread
- .update(cx, |thread, cx| {
- thread.write_text_file(input.abs_path, new_content, cx)
- })?
- .await?;
-
- Ok(ToolResponse {
- content: vec![],
- structured_content: (),
- })
- }
-}
@@ -1,661 +0,0 @@
-use std::path::PathBuf;
-
-use agent_client_protocol as acp;
-use itertools::Itertools;
-use schemars::JsonSchema;
-use serde::{Deserialize, Serialize};
-use util::ResultExt;
-
-pub enum ClaudeTool {
- Task(Option<TaskToolParams>),
- NotebookRead(Option<NotebookReadToolParams>),
- NotebookEdit(Option<NotebookEditToolParams>),
- Edit(Option<EditToolParams>),
- MultiEdit(Option<MultiEditToolParams>),
- ReadFile(Option<ReadToolParams>),
- Write(Option<WriteToolParams>),
- Ls(Option<LsToolParams>),
- Glob(Option<GlobToolParams>),
- Grep(Option<GrepToolParams>),
- Terminal(Option<BashToolParams>),
- WebFetch(Option<WebFetchToolParams>),
- WebSearch(Option<WebSearchToolParams>),
- TodoWrite(Option<TodoWriteToolParams>),
- ExitPlanMode(Option<ExitPlanModeToolParams>),
- Other {
- name: String,
- input: serde_json::Value,
- },
-}
-
-impl ClaudeTool {
- pub fn infer(tool_name: &str, input: serde_json::Value) -> Self {
- match tool_name {
- // Known tools
- "mcp__zed__Read" => Self::ReadFile(serde_json::from_value(input).log_err()),
- "mcp__zed__Edit" => Self::Edit(serde_json::from_value(input).log_err()),
- "MultiEdit" => Self::MultiEdit(serde_json::from_value(input).log_err()),
- "Write" => Self::Write(serde_json::from_value(input).log_err()),
- "LS" => Self::Ls(serde_json::from_value(input).log_err()),
- "Glob" => Self::Glob(serde_json::from_value(input).log_err()),
- "Grep" => Self::Grep(serde_json::from_value(input).log_err()),
- "Bash" => Self::Terminal(serde_json::from_value(input).log_err()),
- "WebFetch" => Self::WebFetch(serde_json::from_value(input).log_err()),
- "WebSearch" => Self::WebSearch(serde_json::from_value(input).log_err()),
- "TodoWrite" => Self::TodoWrite(serde_json::from_value(input).log_err()),
- "exit_plan_mode" => Self::ExitPlanMode(serde_json::from_value(input).log_err()),
- "Task" => Self::Task(serde_json::from_value(input).log_err()),
- "NotebookRead" => Self::NotebookRead(serde_json::from_value(input).log_err()),
- "NotebookEdit" => Self::NotebookEdit(serde_json::from_value(input).log_err()),
- // Inferred from name
- _ => {
- let tool_name = tool_name.to_lowercase();
-
- if tool_name.contains("edit") || tool_name.contains("write") {
- Self::Edit(None)
- } else if tool_name.contains("terminal") {
- Self::Terminal(None)
- } else {
- Self::Other {
- name: tool_name.to_string(),
- input,
- }
- }
- }
- }
- }
-
- pub fn label(&self) -> String {
- match &self {
- Self::Task(Some(params)) => params.description.clone(),
- Self::Task(None) => "Task".into(),
- Self::NotebookRead(Some(params)) => {
- format!("Read Notebook {}", params.notebook_path.display())
- }
- Self::NotebookRead(None) => "Read Notebook".into(),
- Self::NotebookEdit(Some(params)) => {
- format!("Edit Notebook {}", params.notebook_path.display())
- }
- Self::NotebookEdit(None) => "Edit Notebook".into(),
- Self::Terminal(Some(params)) => format!("`{}`", params.command),
- Self::Terminal(None) => "Terminal".into(),
- Self::ReadFile(_) => "Read File".into(),
- Self::Ls(Some(params)) => {
- format!("List Directory {}", params.path.display())
- }
- Self::Ls(None) => "List Directory".into(),
- Self::Edit(Some(params)) => {
- format!("Edit {}", params.abs_path.display())
- }
- Self::Edit(None) => "Edit".into(),
- Self::MultiEdit(Some(params)) => {
- format!("Multi Edit {}", params.file_path.display())
- }
- Self::MultiEdit(None) => "Multi Edit".into(),
- Self::Write(Some(params)) => {
- format!("Write {}", params.file_path.display())
- }
- Self::Write(None) => "Write".into(),
- Self::Glob(Some(params)) => {
- format!("Glob `{params}`")
- }
- Self::Glob(None) => "Glob".into(),
- Self::Grep(Some(params)) => format!("`{params}`"),
- Self::Grep(None) => "Grep".into(),
- Self::WebFetch(Some(params)) => format!("Fetch {}", params.url),
- Self::WebFetch(None) => "Fetch".into(),
- Self::WebSearch(Some(params)) => format!("Web Search: {}", params),
- Self::WebSearch(None) => "Web Search".into(),
- Self::TodoWrite(Some(params)) => format!(
- "Update TODOs: {}",
- params.todos.iter().map(|todo| &todo.content).join(", ")
- ),
- Self::TodoWrite(None) => "Update TODOs".into(),
- Self::ExitPlanMode(_) => "Exit Plan Mode".into(),
- Self::Other { name, .. } => name.clone(),
- }
- }
- pub fn content(&self) -> Vec<acp::ToolCallContent> {
- match &self {
- Self::Other { input, .. } => vec![
- format!(
- "```json\n{}```",
- serde_json::to_string_pretty(&input).unwrap_or("{}".to_string())
- )
- .into(),
- ],
- Self::Task(Some(params)) => vec![params.prompt.clone().into()],
- Self::NotebookRead(Some(params)) => {
- vec![params.notebook_path.display().to_string().into()]
- }
- Self::NotebookEdit(Some(params)) => vec![params.new_source.clone().into()],
- Self::Terminal(Some(params)) => vec![
- format!(
- "`{}`\n\n{}",
- params.command,
- params.description.as_deref().unwrap_or_default()
- )
- .into(),
- ],
- Self::ReadFile(Some(params)) => vec![params.abs_path.display().to_string().into()],
- Self::Ls(Some(params)) => vec![params.path.display().to_string().into()],
- Self::Glob(Some(params)) => vec![params.to_string().into()],
- Self::Grep(Some(params)) => vec![format!("`{params}`").into()],
- Self::WebFetch(Some(params)) => vec![params.prompt.clone().into()],
- Self::WebSearch(Some(params)) => vec![params.to_string().into()],
- Self::ExitPlanMode(Some(params)) => vec![params.plan.clone().into()],
- Self::Edit(Some(params)) => vec![acp::ToolCallContent::Diff {
- diff: acp::Diff {
- path: params.abs_path.clone(),
- old_text: Some(params.old_text.clone()),
- new_text: params.new_text.clone(),
- },
- }],
- Self::Write(Some(params)) => vec![acp::ToolCallContent::Diff {
- diff: acp::Diff {
- path: params.file_path.clone(),
- old_text: None,
- new_text: params.content.clone(),
- },
- }],
- Self::MultiEdit(Some(params)) => {
- // todo: show multiple edits in a multibuffer?
- params
- .edits
- .first()
- .map(|edit| {
- vec![acp::ToolCallContent::Diff {
- diff: acp::Diff {
- path: params.file_path.clone(),
- old_text: Some(edit.old_string.clone()),
- new_text: edit.new_string.clone(),
- },
- }]
- })
- .unwrap_or_default()
- }
- Self::TodoWrite(Some(_)) => {
- // These are mapped to plan updates later
- vec![]
- }
- Self::Task(None)
- | Self::NotebookRead(None)
- | Self::NotebookEdit(None)
- | Self::Terminal(None)
- | Self::ReadFile(None)
- | Self::Ls(None)
- | Self::Glob(None)
- | Self::Grep(None)
- | Self::WebFetch(None)
- | Self::WebSearch(None)
- | Self::TodoWrite(None)
- | Self::ExitPlanMode(None)
- | Self::Edit(None)
- | Self::Write(None)
- | Self::MultiEdit(None) => vec![],
- }
- }
-
- pub fn kind(&self) -> acp::ToolKind {
- match self {
- Self::Task(_) => acp::ToolKind::Think,
- Self::NotebookRead(_) => acp::ToolKind::Read,
- Self::NotebookEdit(_) => acp::ToolKind::Edit,
- Self::Edit(_) => acp::ToolKind::Edit,
- Self::MultiEdit(_) => acp::ToolKind::Edit,
- Self::Write(_) => acp::ToolKind::Edit,
- Self::ReadFile(_) => acp::ToolKind::Read,
- Self::Ls(_) => acp::ToolKind::Search,
- Self::Glob(_) => acp::ToolKind::Search,
- Self::Grep(_) => acp::ToolKind::Search,
- Self::Terminal(_) => acp::ToolKind::Execute,
- Self::WebSearch(_) => acp::ToolKind::Search,
- Self::WebFetch(_) => acp::ToolKind::Fetch,
- Self::TodoWrite(_) => acp::ToolKind::Think,
- Self::ExitPlanMode(_) => acp::ToolKind::Think,
- Self::Other { .. } => acp::ToolKind::Other,
- }
- }
-
- pub fn locations(&self) -> Vec<acp::ToolCallLocation> {
- match &self {
- Self::Edit(Some(EditToolParams { abs_path, .. })) => vec![acp::ToolCallLocation {
- path: abs_path.clone(),
- line: None,
- }],
- Self::MultiEdit(Some(MultiEditToolParams { file_path, .. })) => {
- vec![acp::ToolCallLocation {
- path: file_path.clone(),
- line: None,
- }]
- }
- Self::Write(Some(WriteToolParams { file_path, .. })) => {
- vec![acp::ToolCallLocation {
- path: file_path.clone(),
- line: None,
- }]
- }
- Self::ReadFile(Some(ReadToolParams {
- abs_path, offset, ..
- })) => vec![acp::ToolCallLocation {
- path: abs_path.clone(),
- line: *offset,
- }],
- Self::NotebookRead(Some(NotebookReadToolParams { notebook_path, .. })) => {
- vec![acp::ToolCallLocation {
- path: notebook_path.clone(),
- line: None,
- }]
- }
- Self::NotebookEdit(Some(NotebookEditToolParams { notebook_path, .. })) => {
- vec![acp::ToolCallLocation {
- path: notebook_path.clone(),
- line: None,
- }]
- }
- Self::Glob(Some(GlobToolParams {
- path: Some(path), ..
- })) => vec![acp::ToolCallLocation {
- path: path.clone(),
- line: None,
- }],
- Self::Ls(Some(LsToolParams { path, .. })) => vec![acp::ToolCallLocation {
- path: path.clone(),
- line: None,
- }],
- Self::Grep(Some(GrepToolParams {
- path: Some(path), ..
- })) => vec![acp::ToolCallLocation {
- path: PathBuf::from(path),
- line: None,
- }],
- Self::Task(_)
- | Self::NotebookRead(None)
- | Self::NotebookEdit(None)
- | Self::Edit(None)
- | Self::MultiEdit(None)
- | Self::Write(None)
- | Self::ReadFile(None)
- | Self::Ls(None)
- | Self::Glob(_)
- | Self::Grep(_)
- | Self::Terminal(_)
- | Self::WebFetch(_)
- | Self::WebSearch(_)
- | Self::TodoWrite(_)
- | Self::ExitPlanMode(_)
- | Self::Other { .. } => vec![],
- }
- }
-
- pub fn as_acp(&self, id: acp::ToolCallId) -> acp::ToolCall {
- acp::ToolCall {
- id,
- kind: self.kind(),
- status: acp::ToolCallStatus::InProgress,
- title: self.label(),
- content: self.content(),
- locations: self.locations(),
- raw_input: None,
- raw_output: None,
- }
- }
-}
-
-#[derive(Deserialize, JsonSchema, Debug)]
-pub struct EditToolParams {
- /// The absolute path to the file to read.
- pub abs_path: PathBuf,
- /// The old text to replace (must be unique in the file)
- pub old_text: String,
- /// The new text.
- pub new_text: String,
-}
-
-#[derive(Deserialize, JsonSchema, Debug)]
-pub struct ReadToolParams {
- /// The absolute path to the file to read.
- pub abs_path: PathBuf,
- /// Which line to start reading from. Omit to start from the beginning.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub offset: Option<u32>,
- /// How many lines to read. Omit for the whole file.
- #[serde(skip_serializing_if = "Option::is_none")]
- pub limit: Option<u32>,
-}
-
-#[derive(Deserialize, JsonSchema, Debug)]
-pub struct WriteToolParams {
- /// Absolute path for new file
- pub file_path: PathBuf,
- /// File content
- pub content: String,
-}
-
-#[derive(Deserialize, JsonSchema, Debug)]
-pub struct BashToolParams {
- /// Shell command to execute
- pub command: String,
- /// 5-10 word description of what command does
- #[serde(skip_serializing_if = "Option::is_none")]
- pub description: Option<String>,
- /// Timeout in ms (max 600000ms/10min, default 120000ms)
- #[serde(skip_serializing_if = "Option::is_none")]
- pub timeout: Option<u32>,
-}
-
-#[derive(Deserialize, JsonSchema, Debug)]
-pub struct GlobToolParams {
- /// Glob pattern like **/*.js or src/**/*.ts
- pub pattern: String,
- /// Directory to search in (omit for current directory)
- #[serde(skip_serializing_if = "Option::is_none")]
- pub path: Option<PathBuf>,
-}
-
-impl std::fmt::Display for GlobToolParams {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- if let Some(path) = &self.path {
- write!(f, "{}", path.display())?;
- }
- write!(f, "{}", self.pattern)
- }
-}
-
-#[derive(Deserialize, JsonSchema, Debug)]
-pub struct LsToolParams {
- /// Absolute path to directory
- pub path: PathBuf,
- /// Array of glob patterns to ignore
- #[serde(default, skip_serializing_if = "Vec::is_empty")]
- pub ignore: Vec<String>,
-}
-
-#[derive(Deserialize, JsonSchema, Debug)]
-pub struct GrepToolParams {
- /// Regex pattern to search for
- pub pattern: String,
- /// File/directory to search (defaults to current directory)
- #[serde(skip_serializing_if = "Option::is_none")]
- pub path: Option<String>,
- /// "content" (shows lines), "files_with_matches" (default), "count"
- #[serde(skip_serializing_if = "Option::is_none")]
- pub output_mode: Option<GrepOutputMode>,
- /// Filter files with glob pattern like "*.js"
- #[serde(skip_serializing_if = "Option::is_none")]
- pub glob: Option<String>,
- /// File type filter like "js", "py", "rust"
- #[serde(rename = "type", skip_serializing_if = "Option::is_none")]
- pub file_type: Option<String>,
- /// Case insensitive search
- #[serde(rename = "-i", default, skip_serializing_if = "is_false")]
- pub case_insensitive: bool,
- /// Show line numbers (content mode only)
- #[serde(rename = "-n", default, skip_serializing_if = "is_false")]
- pub line_numbers: bool,
- /// Lines after match (content mode only)
- #[serde(rename = "-A", skip_serializing_if = "Option::is_none")]
- pub after_context: Option<u32>,
- /// Lines before match (content mode only)
- #[serde(rename = "-B", skip_serializing_if = "Option::is_none")]
- pub before_context: Option<u32>,
- /// Lines before and after match (content mode only)
- #[serde(rename = "-C", skip_serializing_if = "Option::is_none")]
- pub context: Option<u32>,
- /// Enable multiline/cross-line matching
- #[serde(default, skip_serializing_if = "is_false")]
- pub multiline: bool,
- /// Limit output to first N results
- #[serde(skip_serializing_if = "Option::is_none")]
- pub head_limit: Option<u32>,
-}
-
-impl std::fmt::Display for GrepToolParams {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- write!(f, "grep")?;
-
- // Boolean flags
- if self.case_insensitive {
- write!(f, " -i")?;
- }
- if self.line_numbers {
- write!(f, " -n")?;
- }
-
- // Context options
- if let Some(after) = self.after_context {
- write!(f, " -A {}", after)?;
- }
- if let Some(before) = self.before_context {
- write!(f, " -B {}", before)?;
- }
- if let Some(context) = self.context {
- write!(f, " -C {}", context)?;
- }
-
- // Output mode
- if let Some(mode) = &self.output_mode {
- match mode {
- GrepOutputMode::FilesWithMatches => write!(f, " -l")?,
- GrepOutputMode::Count => write!(f, " -c")?,
- GrepOutputMode::Content => {} // Default mode
- }
- }
-
- // Head limit
- if let Some(limit) = self.head_limit {
- write!(f, " | head -{}", limit)?;
- }
-
- // Glob pattern
- if let Some(glob) = &self.glob {
- write!(f, " --include=\"{}\"", glob)?;
- }
-
- // File type
- if let Some(file_type) = &self.file_type {
- write!(f, " --type={}", file_type)?;
- }
-
- // Multiline
- if self.multiline {
- write!(f, " -P")?; // Perl-compatible regex for multiline
- }
-
- // Pattern (escaped if contains special characters)
- write!(f, " \"{}\"", self.pattern)?;
-
- // Path
- if let Some(path) = &self.path {
- write!(f, " {}", path)?;
- }
-
- Ok(())
- }
-}
-
-#[derive(Default, Deserialize, Serialize, JsonSchema, strum::Display, Debug)]
-#[serde(rename_all = "snake_case")]
-pub enum TodoPriority {
- High,
- #[default]
- Medium,
- Low,
-}
-
-impl Into<acp::PlanEntryPriority> for TodoPriority {
- fn into(self) -> acp::PlanEntryPriority {
- match self {
- TodoPriority::High => acp::PlanEntryPriority::High,
- TodoPriority::Medium => acp::PlanEntryPriority::Medium,
- TodoPriority::Low => acp::PlanEntryPriority::Low,
- }
- }
-}
-
-#[derive(Deserialize, Serialize, JsonSchema, Debug)]
-#[serde(rename_all = "snake_case")]
-pub enum TodoStatus {
- Pending,
- InProgress,
- Completed,
-}
-
-impl Into<acp::PlanEntryStatus> for TodoStatus {
- fn into(self) -> acp::PlanEntryStatus {
- match self {
- TodoStatus::Pending => acp::PlanEntryStatus::Pending,
- TodoStatus::InProgress => acp::PlanEntryStatus::InProgress,
- TodoStatus::Completed => acp::PlanEntryStatus::Completed,
- }
- }
-}
-
-#[derive(Deserialize, Serialize, JsonSchema, Debug)]
-pub struct Todo {
- /// Task description
- pub content: String,
- /// Current status of the todo
- pub status: TodoStatus,
- /// Priority level of the todo
- #[serde(default)]
- pub priority: TodoPriority,
-}
-
-impl Into<acp::PlanEntry> for Todo {
- fn into(self) -> acp::PlanEntry {
- acp::PlanEntry {
- content: self.content,
- priority: self.priority.into(),
- status: self.status.into(),
- }
- }
-}
-
-#[derive(Deserialize, JsonSchema, Debug)]
-pub struct TodoWriteToolParams {
- pub todos: Vec<Todo>,
-}
-
-#[derive(Deserialize, JsonSchema, Debug)]
-pub struct ExitPlanModeToolParams {
- /// Implementation plan in markdown format
- pub plan: String,
-}
-
-#[derive(Deserialize, JsonSchema, Debug)]
-pub struct TaskToolParams {
- /// Short 3-5 word description of task
- pub description: String,
- /// Detailed task for agent to perform
- pub prompt: String,
-}
-
-#[derive(Deserialize, JsonSchema, Debug)]
-pub struct NotebookReadToolParams {
- /// Absolute path to .ipynb file
- pub notebook_path: PathBuf,
- /// Specific cell ID to read
- #[serde(skip_serializing_if = "Option::is_none")]
- pub cell_id: Option<String>,
-}
-
-#[derive(Deserialize, Serialize, JsonSchema, Debug)]
-#[serde(rename_all = "snake_case")]
-pub enum CellType {
- Code,
- Markdown,
-}
-
-#[derive(Deserialize, Serialize, JsonSchema, Debug)]
-#[serde(rename_all = "snake_case")]
-pub enum EditMode {
- Replace,
- Insert,
- Delete,
-}
-
-#[derive(Deserialize, JsonSchema, Debug)]
-pub struct NotebookEditToolParams {
- /// Absolute path to .ipynb file
- pub notebook_path: PathBuf,
- /// New cell content
- pub new_source: String,
- /// Cell ID to edit
- #[serde(skip_serializing_if = "Option::is_none")]
- pub cell_id: Option<String>,
- /// Type of cell (code or markdown)
- #[serde(skip_serializing_if = "Option::is_none")]
- pub cell_type: Option<CellType>,
- /// Edit operation mode
- #[serde(skip_serializing_if = "Option::is_none")]
- pub edit_mode: Option<EditMode>,
-}
-
-#[derive(Deserialize, Serialize, JsonSchema, Debug)]
-pub struct MultiEditItem {
- /// The text to search for and replace
- pub old_string: String,
- /// The replacement text
- pub new_string: String,
- /// Whether to replace all occurrences or just the first
- #[serde(default, skip_serializing_if = "is_false")]
- pub replace_all: bool,
-}
-
-#[derive(Deserialize, JsonSchema, Debug)]
-pub struct MultiEditToolParams {
- /// Absolute path to file
- pub file_path: PathBuf,
- /// List of edits to apply
- pub edits: Vec<MultiEditItem>,
-}
-
-fn is_false(v: &bool) -> bool {
- !*v
-}
-
-#[derive(Deserialize, JsonSchema, Debug)]
-#[serde(rename_all = "snake_case")]
-pub enum GrepOutputMode {
- Content,
- FilesWithMatches,
- Count,
-}
-
-#[derive(Deserialize, JsonSchema, Debug)]
-pub struct WebFetchToolParams {
- /// Valid URL to fetch
- #[serde(rename = "url")]
- pub url: String,
- /// What to extract from content
- pub prompt: String,
-}
-
-#[derive(Deserialize, JsonSchema, Debug)]
-pub struct WebSearchToolParams {
- /// Search query (min 2 chars)
- pub query: String,
- /// Only include these domains
- #[serde(default, skip_serializing_if = "Vec::is_empty")]
- pub allowed_domains: Vec<String>,
- /// Exclude these domains
- #[serde(default, skip_serializing_if = "Vec::is_empty")]
- pub blocked_domains: Vec<String>,
-}
-
-impl std::fmt::Display for WebSearchToolParams {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- write!(f, "\"{}\"", self.query)?;
-
- if !self.allowed_domains.is_empty() {
- write!(f, " (allowed: {})", self.allowed_domains.join(", "))?;
- }
-
- if !self.blocked_domains.is_empty() {
- write!(f, " (blocked: {})", self.blocked_domains.join(", "))?;
- }
-
- Ok(())
- }
-}
@@ -0,0 +1,70 @@
+use crate::AgentServerDelegate;
+use acp_thread::AgentConnection;
+use anyhow::{Context as _, Result};
+use gpui::{App, SharedString, Task};
+use project::agent_server_store::ExternalAgentServerName;
+use std::{path::Path, rc::Rc};
+use ui::IconName;
+
+/// A generic agent server implementation for custom user-defined agents
+pub struct CustomAgentServer {
+ name: SharedString,
+}
+
+impl CustomAgentServer {
+ pub fn new(name: SharedString) -> Self {
+ Self { name }
+ }
+}
+
+impl crate::AgentServer for CustomAgentServer {
+ fn telemetry_id(&self) -> &'static str {
+ "custom"
+ }
+
+ fn name(&self) -> SharedString {
+ self.name.clone()
+ }
+
+ fn logo(&self) -> IconName {
+ IconName::Terminal
+ }
+
+ fn connect(
+ &self,
+ root_dir: Option<&Path>,
+ delegate: AgentServerDelegate,
+ cx: &mut App,
+ ) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
+ let name = self.name();
+ let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().to_string());
+ let is_remote = delegate.project.read(cx).is_via_remote_server();
+ let store = delegate.store.downgrade();
+
+ cx.spawn(async move |cx| {
+ let (command, root_dir, login) = store
+ .update(cx, |store, cx| {
+ let agent = store
+ .get_external_agent(&ExternalAgentServerName(name.clone()))
+ .with_context(|| {
+ format!("Custom agent server `{}` is not registered", name)
+ })?;
+ anyhow::Ok(agent.get_command(
+ root_dir.as_deref(),
+ Default::default(),
+ delegate.status_tx,
+ delegate.new_version_available,
+ &mut cx.to_async(),
+ ))
+ })??
+ .await?;
+ let connection =
+ crate::acp::connect(name, command, root_dir.as_ref(), is_remote, cx).await?;
+ Ok((connection, login))
+ })
+ }
+
+ fn into_any(self: Rc<Self>) -> Rc<dyn std::any::Any> {
+ self
+ }
+}
@@ -1,24 +1,33 @@
+use crate::{AgentServer, AgentServerDelegate};
+use acp_thread::{AcpThread, AgentThreadEntry, ToolCall, ToolCallStatus};
+use agent_client_protocol as acp;
+use futures::{FutureExt, StreamExt, channel::mpsc, select};
+use gpui::{AppContext, Entity, TestAppContext};
+use indoc::indoc;
+#[cfg(test)]
+use project::agent_server_store::{AgentServerCommand, CustomAgentServerSettings};
+use project::{FakeFs, Project, agent_server_store::AllAgentServersSettings};
use std::{
path::{Path, PathBuf},
sync::Arc,
time::Duration,
};
-
-use crate::{AgentServer, AgentServerSettings, AllAgentServersSettings};
-use acp_thread::{AcpThread, AgentThreadEntry, ToolCall, ToolCallStatus};
-use agent_client_protocol as acp;
-
-use futures::{FutureExt, StreamExt, channel::mpsc, select};
-use gpui::{Entity, TestAppContext};
-use indoc::indoc;
-use project::{FakeFs, Project};
-use settings::{Settings, SettingsStore};
use util::path;
-pub async fn test_basic(server: impl AgentServer + 'static, cx: &mut TestAppContext) {
- let fs = init_test(cx).await;
- let project = Project::test(fs, [], cx).await;
- let thread = new_test_thread(server, project.clone(), "/private/tmp", cx).await;
+pub async fn test_basic<T, F>(server: F, cx: &mut TestAppContext)
+where
+ T: AgentServer + 'static,
+ F: AsyncFn(&Arc<dyn fs::Fs>, &Entity<Project>, &mut TestAppContext) -> T,
+{
+ let fs = init_test(cx).await as Arc<dyn fs::Fs>;
+ let project = Project::test(fs.clone(), [], cx).await;
+ let thread = new_test_thread(
+ server(&fs, &project, cx).await,
+ project.clone(),
+ "/private/tmp",
+ cx,
+ )
+ .await;
thread
.update(cx, |thread, cx| thread.send_raw("Hello from Zed!", cx))
@@ -42,8 +51,12 @@ pub async fn test_basic(server: impl AgentServer + 'static, cx: &mut TestAppCont
});
}
-pub async fn test_path_mentions(server: impl AgentServer + 'static, cx: &mut TestAppContext) {
- let _fs = init_test(cx).await;
+pub async fn test_path_mentions<T, F>(server: F, cx: &mut TestAppContext)
+where
+ T: AgentServer + 'static,
+ F: AsyncFn(&Arc<dyn fs::Fs>, &Entity<Project>, &mut TestAppContext) -> T,
+{
+ let fs = init_test(cx).await as _;
let tempdir = tempfile::tempdir().unwrap();
std::fs::write(
@@ -56,7 +69,13 @@ pub async fn test_path_mentions(server: impl AgentServer + 'static, cx: &mut Tes
)
.expect("failed to write file");
let project = Project::example([tempdir.path()], &mut cx.to_async()).await;
- let thread = new_test_thread(server, project.clone(), tempdir.path(), cx).await;
+ let thread = new_test_thread(
+ server(&fs, &project, cx).await,
+ project.clone(),
+ tempdir.path(),
+ cx,
+ )
+ .await;
thread
.update(cx, |thread, cx| {
thread.send(
@@ -110,15 +129,25 @@ pub async fn test_path_mentions(server: impl AgentServer + 'static, cx: &mut Tes
drop(tempdir);
}
-pub async fn test_tool_call(server: impl AgentServer + 'static, cx: &mut TestAppContext) {
- let _fs = init_test(cx).await;
+pub async fn test_tool_call<T, F>(server: F, cx: &mut TestAppContext)
+where
+ T: AgentServer + 'static,
+ F: AsyncFn(&Arc<dyn fs::Fs>, &Entity<Project>, &mut TestAppContext) -> T,
+{
+ let fs = init_test(cx).await as _;
let tempdir = tempfile::tempdir().unwrap();
let foo_path = tempdir.path().join("foo");
std::fs::write(&foo_path, "Lorem ipsum dolor").expect("failed to write file");
let project = Project::example([tempdir.path()], &mut cx.to_async()).await;
- let thread = new_test_thread(server, project.clone(), "/private/tmp", cx).await;
+ let thread = new_test_thread(
+ server(&fs, &project, cx).await,
+ project.clone(),
+ "/private/tmp",
+ cx,
+ )
+ .await;
thread
.update(cx, |thread, cx| {
@@ -152,14 +181,23 @@ pub async fn test_tool_call(server: impl AgentServer + 'static, cx: &mut TestApp
drop(tempdir);
}
-pub async fn test_tool_call_with_permission(
- server: impl AgentServer + 'static,
+pub async fn test_tool_call_with_permission<T, F>(
+ server: F,
allow_option_id: acp::PermissionOptionId,
cx: &mut TestAppContext,
-) {
- let fs = init_test(cx).await;
- let project = Project::test(fs, [path!("/private/tmp").as_ref()], cx).await;
- let thread = new_test_thread(server, project.clone(), "/private/tmp", cx).await;
+) where
+ T: AgentServer + 'static,
+ F: AsyncFn(&Arc<dyn fs::Fs>, &Entity<Project>, &mut TestAppContext) -> T,
+{
+ let fs = init_test(cx).await as Arc<dyn fs::Fs>;
+ let project = Project::test(fs.clone(), [path!("/private/tmp").as_ref()], cx).await;
+ let thread = new_test_thread(
+ server(&fs, &project, cx).await,
+ project.clone(),
+ "/private/tmp",
+ cx,
+ )
+ .await;
let full_turn = thread.update(cx, |thread, cx| {
thread.send_raw(
r#"Run exactly `touch hello.txt && echo "Hello, world!" | tee hello.txt` in the terminal."#,
@@ -247,11 +285,21 @@ pub async fn test_tool_call_with_permission(
});
}
-pub async fn test_cancel(server: impl AgentServer + 'static, cx: &mut TestAppContext) {
- let fs = init_test(cx).await;
-
- let project = Project::test(fs, [path!("/private/tmp").as_ref()], cx).await;
- let thread = new_test_thread(server, project.clone(), "/private/tmp", cx).await;
+pub async fn test_cancel<T, F>(server: F, cx: &mut TestAppContext)
+where
+ T: AgentServer + 'static,
+ F: AsyncFn(&Arc<dyn fs::Fs>, &Entity<Project>, &mut TestAppContext) -> T,
+{
+ let fs = init_test(cx).await as Arc<dyn fs::Fs>;
+
+ let project = Project::test(fs.clone(), [path!("/private/tmp").as_ref()], cx).await;
+ let thread = new_test_thread(
+ server(&fs, &project, cx).await,
+ project.clone(),
+ "/private/tmp",
+ cx,
+ )
+ .await;
let _ = thread.update(cx, |thread, cx| {
thread.send_raw(
r#"Run exactly `touch hello.txt && echo "Hello, world!" | tee hello.txt` in the terminal."#,
@@ -316,10 +364,20 @@ pub async fn test_cancel(server: impl AgentServer + 'static, cx: &mut TestAppCon
});
}
-pub async fn test_thread_drop(server: impl AgentServer + 'static, cx: &mut TestAppContext) {
- let fs = init_test(cx).await;
- let project = Project::test(fs, [], cx).await;
- let thread = new_test_thread(server, project.clone(), "/private/tmp", cx).await;
+pub async fn test_thread_drop<T, F>(server: F, cx: &mut TestAppContext)
+where
+ T: AgentServer + 'static,
+ F: AsyncFn(&Arc<dyn fs::Fs>, &Entity<Project>, &mut TestAppContext) -> T,
+{
+ let fs = init_test(cx).await as Arc<dyn fs::Fs>;
+ let project = Project::test(fs.clone(), [], cx).await;
+ let thread = new_test_thread(
+ server(&fs, &project, cx).await,
+ project.clone(),
+ "/private/tmp",
+ cx,
+ )
+ .await;
thread
.update(cx, |thread, cx| thread.send_raw("Hello from test!", cx))
@@ -386,27 +444,43 @@ macro_rules! common_e2e_tests {
}
};
}
+pub use common_e2e_tests;
// Helpers
pub async fn init_test(cx: &mut TestAppContext) -> Arc<FakeFs> {
+ use settings::Settings;
+
env_logger::try_init().ok();
cx.update(|cx| {
- let settings_store = SettingsStore::test(cx);
+ let settings_store = settings::SettingsStore::test(cx);
cx.set_global(settings_store);
Project::init_settings(cx);
language::init(cx);
- crate::settings::init(cx);
-
- crate::AllAgentServersSettings::override_global(
+ gpui_tokio::init(cx);
+ let http_client = reqwest_client::ReqwestClient::user_agent("agent tests").unwrap();
+ cx.set_http_client(Arc::new(http_client));
+ client::init_settings(cx);
+ let client = client::Client::production(cx);
+ let user_store = cx.new(|cx| client::UserStore::new(client.clone(), cx));
+ language_model::init(client.clone(), cx);
+ language_models::init(user_store, client, cx);
+ agent_settings::init(cx);
+ AllAgentServersSettings::register(cx);
+
+ #[cfg(test)]
+ AllAgentServersSettings::override_global(
AllAgentServersSettings {
- claude: Some(AgentServerSettings {
- command: crate::claude::tests::local_command(),
- }),
- gemini: Some(AgentServerSettings {
- command: crate::gemini::tests::local_command(),
+ claude: Some(CustomAgentServerSettings {
+ command: AgentServerCommand {
+ path: "claude-code-acp".into(),
+ args: vec![],
+ env: None,
+ },
}),
+ gemini: Some(crate::gemini::tests::local_command().into()),
+ custom: collections::HashMap::default(),
},
cx,
);
@@ -423,17 +497,17 @@ pub async fn new_test_thread(
current_dir: impl AsRef<Path>,
cx: &mut TestAppContext,
) -> Entity<AcpThread> {
- let connection = cx
- .update(|cx| server.connect(current_dir.as_ref(), &project, cx))
- .await
- .unwrap();
+ let store = project.read_with(cx, |project, _| project.agent_server_store().clone());
+ let delegate = AgentServerDelegate::new(store, project.clone(), None, None);
- let thread = cx
- .update(|cx| connection.new_thread(project.clone(), current_dir.as_ref(), cx))
+ let (connection, _) = cx
+ .update(|cx| server.connect(Some(current_dir.as_ref()), delegate, cx))
.await
.unwrap();
- thread
+ cx.update(|cx| connection.new_thread(project.clone(), current_dir.as_ref(), cx))
+ .await
+ .unwrap()
}
pub async fn run_until_first_tool_call(
@@ -1,32 +1,24 @@
use std::rc::Rc;
use std::{any::Any, path::Path};
-use crate::{AgentServer, AgentServerCommand};
-use acp_thread::{AgentConnection, LoadError};
-use anyhow::Result;
-use gpui::{Entity, Task};
-use project::Project;
-use settings::SettingsStore;
-use ui::App;
-
-use crate::AllAgentServersSettings;
+use crate::{AgentServer, AgentServerDelegate};
+use acp_thread::AgentConnection;
+use anyhow::{Context as _, Result};
+use collections::HashMap;
+use gpui::{App, SharedString, Task};
+use language_models::provider::google::GoogleLanguageModelProvider;
+use project::agent_server_store::GEMINI_NAME;
#[derive(Clone)]
pub struct Gemini;
-const ACP_ARG: &str = "--experimental-acp";
-
impl AgentServer for Gemini {
- fn name(&self) -> &'static str {
- "Gemini"
- }
-
- fn empty_state_headline(&self) -> &'static str {
- "Welcome to Gemini"
+ fn telemetry_id(&self) -> &'static str {
+ "gemini-cli"
}
- fn empty_state_message(&self) -> &'static str {
- "Ask questions, edit files, run commands.\nBe specific for the best results."
+ fn name(&self) -> SharedString {
+ "Gemini CLI".into()
}
fn logo(&self) -> ui::IconName {
@@ -35,55 +27,37 @@ impl AgentServer for Gemini {
fn connect(
&self,
- root_dir: &Path,
- project: &Entity<Project>,
+ root_dir: Option<&Path>,
+ delegate: AgentServerDelegate,
cx: &mut App,
- ) -> Task<Result<Rc<dyn AgentConnection>>> {
- let project = project.clone();
- let root_dir = root_dir.to_path_buf();
- let server_name = self.name();
- cx.spawn(async move |cx| {
- let settings = cx.read_global(|settings: &SettingsStore, _| {
- settings.get::<AllAgentServersSettings>(None).gemini.clone()
- })?;
-
- let Some(command) =
- AgentServerCommand::resolve("gemini", &[ACP_ARG], None, settings, &project, cx).await
- else {
- anyhow::bail!("Failed to find gemini binary");
- };
+ ) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
+ let name = self.name();
+ let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().to_string());
+ let is_remote = delegate.project.read(cx).is_via_remote_server();
+ let store = delegate.store.downgrade();
- let result = crate::acp::connect(server_name, command.clone(), &root_dir, cx).await;
- if result.is_err() {
- let version_fut = util::command::new_smol_command(&command.path)
- .args(command.args.iter())
- .arg("--version")
- .kill_on_drop(true)
- .output();
-
- let help_fut = util::command::new_smol_command(&command.path)
- .args(command.args.iter())
- .arg("--help")
- .kill_on_drop(true)
- .output();
-
- let (version_output, help_output) = futures::future::join(version_fut, help_fut).await;
-
- let current_version = String::from_utf8(version_output?.stdout)?;
- let supported = String::from_utf8(help_output?.stdout)?.contains(ACP_ARG);
-
- if !supported {
- return Err(LoadError::Unsupported {
- error_message: format!(
- "Your installed version of Gemini {} doesn't support the Agentic Coding Protocol (ACP).",
- current_version
- ).into(),
- upgrade_message: "Upgrade Gemini to Latest".into(),
- upgrade_command: "npm install -g @google/gemini-cli@latest".into(),
- }.into())
- }
+ cx.spawn(async move |cx| {
+ let mut extra_env = HashMap::default();
+ if let Some(api_key) = cx.update(GoogleLanguageModelProvider::api_key)?.await.ok() {
+ extra_env.insert("GEMINI_API_KEY".into(), api_key.key);
}
- result
+ let (command, root_dir, login) = store
+ .update(cx, |store, cx| {
+ let agent = store
+ .get_external_agent(&GEMINI_NAME.into())
+ .context("Gemini CLI is not registered")?;
+ anyhow::Ok(agent.get_command(
+ root_dir.as_deref(),
+ extra_env,
+ delegate.status_tx,
+ delegate.new_version_available,
+ &mut cx.to_async(),
+ ))
+ })??
+ .await?;
+ let connection =
+ crate::acp::connect(name, command, root_dir.as_ref(), is_remote, cx).await?;
+ Ok((connection, login))
})
}
@@ -94,11 +68,12 @@ impl AgentServer for Gemini {
#[cfg(test)]
pub(crate) mod tests {
+ use project::agent_server_store::AgentServerCommand;
+
use super::*;
- use crate::AgentServerCommand;
use std::path::Path;
- crate::common_e2e_tests!(Gemini, allow_option_id = "proceed_once");
+ crate::common_e2e_tests!(async |_, _, _| Gemini, allow_option_id = "proceed_once");
pub fn local_command() -> AgentServerCommand {
let cli_path = Path::new(env!("CARGO_MANIFEST_DIR"))
@@ -1,45 +0,0 @@
-use crate::AgentServerCommand;
-use anyhow::Result;
-use gpui::App;
-use schemars::JsonSchema;
-use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
-
-pub fn init(cx: &mut App) {
- AllAgentServersSettings::register(cx);
-}
-
-#[derive(Default, Deserialize, Serialize, Clone, JsonSchema, Debug)]
-pub struct AllAgentServersSettings {
- pub gemini: Option<AgentServerSettings>,
- pub claude: Option<AgentServerSettings>,
-}
-
-#[derive(Deserialize, Serialize, Clone, JsonSchema, Debug)]
-pub struct AgentServerSettings {
- #[serde(flatten)]
- pub command: AgentServerCommand,
-}
-
-impl settings::Settings for AllAgentServersSettings {
- const KEY: Option<&'static str> = Some("agent_servers");
-
- type FileContent = Self;
-
- fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
- let mut settings = AllAgentServersSettings::default();
-
- for AllAgentServersSettings { gemini, claude } in sources.defaults_and_customizations() {
- if gemini.is_some() {
- settings.gemini = gemini.clone();
- }
- if claude.is_some() {
- settings.claude = claude.clone();
- }
- }
-
- Ok(settings)
- }
-
- fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut Self::FileContent) {}
-}
@@ -8,13 +8,15 @@ use gpui::{App, Pixels, SharedString};
use language_model::LanguageModel;
use schemars::{JsonSchema, json_schema};
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
use std::borrow::Cow;
pub use crate::agent_profile::*;
pub const SUMMARIZE_THREAD_PROMPT: &str =
include_str!("../../agent/src/prompts/summarize_thread_prompt.txt");
+pub const SUMMARIZE_THREAD_DETAILED_PROMPT: &str =
+ include_str!("../../agent/src/prompts/summarize_thread_detailed_prompt.txt");
pub fn init(cx: &mut App) {
AgentSettings::register(cx);
@@ -221,7 +223,8 @@ impl AgentSettingsContent {
}
}
-#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)]
+#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default, SettingsUi, SettingsKey)]
+#[settings_key(key = "agent", fallback_key = "assistant")]
pub struct AgentSettingsContent {
/// Whether the Agent is enabled.
///
@@ -350,18 +353,19 @@ impl JsonSchema for LanguageModelProviderSetting {
fn json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema {
json_schema!({
"enum": [
- "anthropic",
"amazon-bedrock",
+ "anthropic",
+ "copilot_chat",
+ "deepseek",
"google",
"lmstudio",
+ "mistral",
"ollama",
"openai",
- "zed.dev",
- "copilot_chat",
- "deepseek",
"openrouter",
- "mistral",
- "vercel"
+ "vercel",
+ "x_ai",
+ "zed.dev"
]
})
}
@@ -396,10 +400,6 @@ pub struct ContextServerPresetContent {
}
impl Settings for AgentSettings {
- const KEY: Option<&'static str> = Some("agent");
-
- const FALLBACK_KEY: Option<&'static str> = Some("assistant");
-
const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
type FileContent = AgentSettingsContent;
@@ -503,9 +503,8 @@ impl Settings for AgentSettings {
}
}
- debug_assert_eq!(
- sources.default.always_allow_tool_actions.unwrap_or(false),
- false,
+ debug_assert!(
+ !sources.default.always_allow_tool_actions.unwrap_or(false),
"For security, agent.always_allow_tool_actions should always be false in default.json. If it's true, that is a bug that should be fixed!"
);
@@ -67,6 +67,7 @@ ordered-float.workspace = true
parking_lot.workspace = true
paths.workspace = true
picker.workspace = true
+postage.workspace = true
project.workspace = true
prompt_store.workspace = true
proto.workspace = true
@@ -79,6 +80,7 @@ serde.workspace = true
serde_json.workspace = true
serde_json_lenient.workspace = true
settings.workspace = true
+shlex.workspace = true
smol.workspace = true
streaming_diff.workspace = true
task.workspace = true
@@ -104,9 +106,11 @@ zed_actions.workspace = true
[dev-dependencies]
acp_thread = { workspace = true, features = ["test-support"] }
agent = { workspace = true, features = ["test-support"] }
+agent2 = { workspace = true, features = ["test-support"] }
assistant_context = { workspace = true, features = ["test-support"] }
assistant_tools.workspace = true
buffer_diff = { workspace = true, features = ["test-support"] }
+db = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, "features" = ["test-support"] }
indoc.workspace = true
@@ -1,16 +1,22 @@
+use std::cell::{Cell, RefCell};
use std::ops::Range;
+use std::rc::Rc;
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
use acp_thread::MentionUri;
+use agent_client_protocol as acp;
+use agent2::{HistoryEntry, HistoryStore};
use anyhow::Result;
use editor::{CompletionProvider, Editor, ExcerptId};
use fuzzy::{StringMatch, StringMatchCandidate};
use gpui::{App, Entity, Task, WeakEntity};
use language::{Buffer, CodeLabel, HighlightId};
use lsp::CompletionContext;
+use project::lsp_store::CompletionDocumentation;
use project::{
- Completion, CompletionIntent, CompletionResponse, Project, ProjectPath, Symbol, WorktreeId,
+ Completion, CompletionDisplayOptions, CompletionIntent, CompletionResponse, Project,
+ ProjectPath, Symbol, WorktreeId,
};
use prompt_store::PromptStore;
use rope::Point;
@@ -18,25 +24,21 @@ use text::{Anchor, ToPoint as _};
use ui::prelude::*;
use workspace::Workspace;
-use agent::thread_store::{TextThreadStore, ThreadStore};
-
-use crate::acp::message_editor::MessageEditor;
+use crate::AgentPanel;
+use crate::acp::message_editor::{MessageEditor, MessageEditorEvent};
use crate::context_picker::file_context_picker::{FileMatch, search_files};
use crate::context_picker::rules_context_picker::{RulesContextEntry, search_rules};
use crate::context_picker::symbol_context_picker::SymbolMatch;
use crate::context_picker::symbol_context_picker::search_symbols;
-use crate::context_picker::thread_context_picker::{
- ThreadContextEntry, ThreadMatch, search_threads,
-};
use crate::context_picker::{
- ContextPickerAction, ContextPickerEntry, ContextPickerMode, RecentEntry,
- available_context_picker_entries, recent_context_picker_entries, selection_ranges,
+ ContextPickerAction, ContextPickerEntry, ContextPickerMode, selection_ranges,
};
pub(crate) enum Match {
File(FileMatch),
Symbol(SymbolMatch),
- Thread(ThreadMatch),
+ Thread(HistoryEntry),
+ RecentThread(HistoryEntry),
Fetch(SharedString),
Rules(RulesContextEntry),
Entry(EntryMatch),
@@ -53,6 +55,7 @@ impl Match {
Match::File(file) => file.mat.score,
Match::Entry(mode) => mode.mat.as_ref().map(|mat| mat.score).unwrap_or(1.),
Match::Thread(_) => 1.,
+ Match::RecentThread(_) => 1.,
Match::Symbol(_) => 1.,
Match::Rules(_) => 1.,
Match::Fetch(_) => 1.,
@@ -60,209 +63,31 @@ impl Match {
}
}
-fn search(
- mode: Option<ContextPickerMode>,
- query: String,
- cancellation_flag: Arc<AtomicBool>,
- recent_entries: Vec<RecentEntry>,
- prompt_store: Option<Entity<PromptStore>>,
- thread_store: WeakEntity<ThreadStore>,
- text_thread_context_store: WeakEntity<assistant_context::ContextStore>,
- workspace: Entity<Workspace>,
- cx: &mut App,
-) -> Task<Vec<Match>> {
- match mode {
- Some(ContextPickerMode::File) => {
- let search_files_task =
- search_files(query.clone(), cancellation_flag.clone(), &workspace, cx);
- cx.background_spawn(async move {
- search_files_task
- .await
- .into_iter()
- .map(Match::File)
- .collect()
- })
- }
-
- Some(ContextPickerMode::Symbol) => {
- let search_symbols_task =
- search_symbols(query.clone(), cancellation_flag.clone(), &workspace, cx);
- cx.background_spawn(async move {
- search_symbols_task
- .await
- .into_iter()
- .map(Match::Symbol)
- .collect()
- })
- }
-
- Some(ContextPickerMode::Thread) => {
- if let Some((thread_store, context_store)) = thread_store
- .upgrade()
- .zip(text_thread_context_store.upgrade())
- {
- let search_threads_task = search_threads(
- query.clone(),
- cancellation_flag.clone(),
- thread_store,
- context_store,
- cx,
- );
- cx.background_spawn(async move {
- search_threads_task
- .await
- .into_iter()
- .map(Match::Thread)
- .collect()
- })
- } else {
- Task::ready(Vec::new())
- }
- }
-
- Some(ContextPickerMode::Fetch) => {
- if !query.is_empty() {
- Task::ready(vec![Match::Fetch(query.into())])
- } else {
- Task::ready(Vec::new())
- }
- }
-
- Some(ContextPickerMode::Rules) => {
- if let Some(prompt_store) = prompt_store.as_ref() {
- let search_rules_task =
- search_rules(query.clone(), cancellation_flag.clone(), prompt_store, cx);
- cx.background_spawn(async move {
- search_rules_task
- .await
- .into_iter()
- .map(Match::Rules)
- .collect::<Vec<_>>()
- })
- } else {
- Task::ready(Vec::new())
- }
- }
-
- None => {
- if query.is_empty() {
- let mut matches = recent_entries
- .into_iter()
- .map(|entry| match entry {
- RecentEntry::File {
- project_path,
- path_prefix,
- } => Match::File(FileMatch {
- mat: fuzzy::PathMatch {
- score: 1.,
- positions: Vec::new(),
- worktree_id: project_path.worktree_id.to_usize(),
- path: project_path.path,
- path_prefix,
- is_dir: false,
- distance_to_relative_ancestor: 0,
- },
- is_recent: true,
- }),
- RecentEntry::Thread(thread_context_entry) => Match::Thread(ThreadMatch {
- thread: thread_context_entry,
- is_recent: true,
- }),
- })
- .collect::<Vec<_>>();
-
- matches.extend(
- available_context_picker_entries(
- &prompt_store,
- &Some(thread_store.clone()),
- &workspace,
- cx,
- )
- .into_iter()
- .map(|mode| {
- Match::Entry(EntryMatch {
- entry: mode,
- mat: None,
- })
- }),
- );
-
- Task::ready(matches)
- } else {
- let executor = cx.background_executor().clone();
-
- let search_files_task =
- search_files(query.clone(), cancellation_flag.clone(), &workspace, cx);
-
- let entries = available_context_picker_entries(
- &prompt_store,
- &Some(thread_store.clone()),
- &workspace,
- cx,
- );
- let entry_candidates = entries
- .iter()
- .enumerate()
- .map(|(ix, entry)| StringMatchCandidate::new(ix, entry.keyword()))
- .collect::<Vec<_>>();
-
- cx.background_spawn(async move {
- let mut matches = search_files_task
- .await
- .into_iter()
- .map(Match::File)
- .collect::<Vec<_>>();
-
- let entry_matches = fuzzy::match_strings(
- &entry_candidates,
- &query,
- false,
- true,
- 100,
- &Arc::new(AtomicBool::default()),
- executor,
- )
- .await;
-
- matches.extend(entry_matches.into_iter().map(|mat| {
- Match::Entry(EntryMatch {
- entry: entries[mat.candidate_id],
- mat: Some(mat),
- })
- }));
-
- matches.sort_by(|a, b| {
- b.score()
- .partial_cmp(&a.score())
- .unwrap_or(std::cmp::Ordering::Equal)
- });
-
- matches
- })
- }
- }
- }
-}
-
pub struct ContextPickerCompletionProvider {
- workspace: WeakEntity<Workspace>,
- thread_store: WeakEntity<ThreadStore>,
- text_thread_store: WeakEntity<TextThreadStore>,
message_editor: WeakEntity<MessageEditor>,
+ workspace: WeakEntity<Workspace>,
+ history_store: Entity<HistoryStore>,
+ prompt_store: Option<Entity<PromptStore>>,
+ prompt_capabilities: Rc<Cell<acp::PromptCapabilities>>,
+ available_commands: Rc<RefCell<Vec<acp::AvailableCommand>>>,
}
impl ContextPickerCompletionProvider {
pub fn new(
- workspace: WeakEntity<Workspace>,
- thread_store: WeakEntity<ThreadStore>,
- text_thread_store: WeakEntity<TextThreadStore>,
message_editor: WeakEntity<MessageEditor>,
+ workspace: WeakEntity<Workspace>,
+ history_store: Entity<HistoryStore>,
+ prompt_store: Option<Entity<PromptStore>>,
+ prompt_capabilities: Rc<Cell<acp::PromptCapabilities>>,
+ available_commands: Rc<RefCell<Vec<acp::AvailableCommand>>>,
) -> Self {
Self {
- workspace,
- thread_store,
- text_thread_store,
message_editor,
+ workspace,
+ history_store,
+ prompt_store,
+ prompt_capabilities,
+ available_commands,
}
}
@@ -275,7 +100,7 @@ impl ContextPickerCompletionProvider {
) -> Option<Completion> {
match entry {
ContextPickerEntry::Mode(mode) => Some(Completion {
- replace_range: source_range.clone(),
+ replace_range: source_range,
new_text: format!("@{} ", mode.keyword()),
label: CodeLabel::plain(mode.label().to_string(), None),
icon_path: Some(mode.icon().path().into()),
@@ -288,83 +113,19 @@ impl ContextPickerCompletionProvider {
confirm: Some(Arc::new(|_, _, _| true)),
}),
ContextPickerEntry::Action(action) => {
- let (new_text, on_action) = match action {
- ContextPickerAction::AddSelections => {
- const PLACEHOLDER: &str = "selection ";
- let selections = selection_ranges(workspace, cx)
- .into_iter()
- .enumerate()
- .map(|(ix, (buffer, range))| {
- (
- buffer,
- range,
- (PLACEHOLDER.len() * ix)..(PLACEHOLDER.len() * (ix + 1) - 1),
- )
- })
- .collect::<Vec<_>>();
-
- let new_text: String = PLACEHOLDER.repeat(selections.len());
-
- let callback = Arc::new({
- let source_range = source_range.clone();
- move |_, window: &mut Window, cx: &mut App| {
- let selections = selections.clone();
- let message_editor = message_editor.clone();
- let source_range = source_range.clone();
- window.defer(cx, move |window, cx| {
- message_editor
- .update(cx, |message_editor, cx| {
- message_editor.confirm_mention_for_selection(
- source_range,
- selections,
- window,
- cx,
- )
- })
- .ok();
- });
- false
- }
- });
-
- (new_text, callback)
- }
- };
-
- Some(Completion {
- replace_range: source_range.clone(),
- new_text,
- label: CodeLabel::plain(action.label().to_string(), None),
- icon_path: Some(action.icon().path().into()),
- documentation: None,
- source: project::CompletionSource::Custom,
- insert_text_mode: None,
- // This ensures that when a user accepts this completion, the
- // completion menu will still be shown after "@category " is
- // inserted
- confirm: Some(on_action),
- })
+ Self::completion_for_action(action, source_range, message_editor, workspace, cx)
}
}
}
fn completion_for_thread(
- thread_entry: ThreadContextEntry,
+ thread_entry: HistoryEntry,
source_range: Range<Anchor>,
recent: bool,
editor: WeakEntity<MessageEditor>,
cx: &mut App,
) -> Completion {
- let uri = match &thread_entry {
- ThreadContextEntry::Thread { id, title } => MentionUri::Thread {
- id: id.clone(),
- name: title.to_string(),
- },
- ThreadContextEntry::Context { path, title } => MentionUri::TextThread {
- path: path.to_path_buf(),
- name: title.to_string(),
- },
- };
+ let uri = thread_entry.mention_uri();
let icon_for_completion = if recent {
IconName::HistoryRerun.path().into()
@@ -382,7 +143,7 @@ impl ContextPickerCompletionProvider {
documentation: None,
insert_text_mode: None,
source: project::CompletionSource::Custom,
- icon_path: Some(icon_for_completion.clone()),
+ icon_path: Some(icon_for_completion),
confirm: Some(confirm_completion_callback(
thread_entry.title().clone(),
source_range.start,
@@ -413,9 +174,9 @@ impl ContextPickerCompletionProvider {
documentation: None,
insert_text_mode: None,
source: project::CompletionSource::Custom,
- icon_path: Some(icon_path.clone()),
+ icon_path: Some(icon_path),
confirm: Some(confirm_completion_callback(
- rule.title.clone(),
+ rule.title,
source_range.start,
new_text_len - 1,
editor,
@@ -455,7 +216,7 @@ impl ContextPickerCompletionProvider {
let completion_icon_path = if is_recent {
IconName::HistoryRerun.path().into()
} else {
- crease_icon_path.clone()
+ crease_icon_path
};
let new_text = format!("{} ", uri.as_link());
@@ -491,9 +252,9 @@ impl ContextPickerCompletionProvider {
let abs_path = project.read(cx).absolute_path(&symbol.path, cx)?;
let uri = MentionUri::Symbol {
- path: abs_path,
+ abs_path,
name: symbol.name.clone(),
- line_range: symbol.range.start.0.row..symbol.range.end.0.row,
+ line_range: symbol.range.start.0.row..=symbol.range.end.0.row,
};
let new_text = format!("{} ", uri.as_link());
let new_text_len = new_text.len();
@@ -504,10 +265,10 @@ impl ContextPickerCompletionProvider {
label,
documentation: None,
source: project::CompletionSource::Custom,
- icon_path: Some(icon_path.clone()),
+ icon_path: Some(icon_path),
insert_text_mode: None,
confirm: Some(confirm_completion_callback(
- symbol.name.clone().into(),
+ symbol.name.into(),
source_range.start,
new_text_len - 1,
message_editor,
@@ -522,7 +283,7 @@ impl ContextPickerCompletionProvider {
message_editor: WeakEntity<MessageEditor>,
cx: &mut App,
) -> Option<Completion> {
- let new_text = format!("@fetch {} ", url_to_fetch.clone());
+ let new_text = format!("@fetch {} ", url_to_fetch);
let url_to_fetch = url::Url::parse(url_to_fetch.as_ref())
.or_else(|_| url::Url::parse(&format!("https://{url_to_fetch}")))
.ok()?;
@@ -536,7 +297,7 @@ impl ContextPickerCompletionProvider {
label: CodeLabel::plain(url_to_fetch.to_string(), None),
documentation: None,
source: project::CompletionSource::Custom,
- icon_path: Some(icon_path.clone()),
+ icon_path: Some(icon_path),
insert_text_mode: None,
confirm: Some(confirm_completion_callback(
url_to_fetch.to_string().into(),
@@ -547,6 +308,355 @@ impl ContextPickerCompletionProvider {
)),
})
}
+
+ pub(crate) fn completion_for_action(
+ action: ContextPickerAction,
+ source_range: Range<Anchor>,
+ message_editor: WeakEntity<MessageEditor>,
+ workspace: &Entity<Workspace>,
+ cx: &mut App,
+ ) -> Option<Completion> {
+ let (new_text, on_action) = match action {
+ ContextPickerAction::AddSelections => {
+ const PLACEHOLDER: &str = "selection ";
+ let selections = selection_ranges(workspace, cx)
+ .into_iter()
+ .enumerate()
+ .map(|(ix, (buffer, range))| {
+ (
+ buffer,
+ range,
+ (PLACEHOLDER.len() * ix)..(PLACEHOLDER.len() * (ix + 1) - 1),
+ )
+ })
+ .collect::<Vec<_>>();
+
+ let new_text: String = PLACEHOLDER.repeat(selections.len());
+
+ let callback = Arc::new({
+ let source_range = source_range.clone();
+ move |_, window: &mut Window, cx: &mut App| {
+ let selections = selections.clone();
+ let message_editor = message_editor.clone();
+ let source_range = source_range.clone();
+ window.defer(cx, move |window, cx| {
+ message_editor
+ .update(cx, |message_editor, cx| {
+ message_editor.confirm_mention_for_selection(
+ source_range,
+ selections,
+ window,
+ cx,
+ )
+ })
+ .ok();
+ });
+ false
+ }
+ });
+
+ (new_text, callback)
+ }
+ };
+
+ Some(Completion {
+ replace_range: source_range,
+ new_text,
+ label: CodeLabel::plain(action.label().to_string(), None),
+ icon_path: Some(action.icon().path().into()),
+ documentation: None,
+ source: project::CompletionSource::Custom,
+ insert_text_mode: None,
+ // This ensures that when a user accepts this completion, the
+ // completion menu will still be shown after "@category " is
+ // inserted
+ confirm: Some(on_action),
+ })
+ }
+
+ fn search_slash_commands(
+ &self,
+ query: String,
+ cx: &mut App,
+ ) -> Task<Vec<acp::AvailableCommand>> {
+ let commands = self.available_commands.borrow().clone();
+ if commands.is_empty() {
+ return Task::ready(Vec::new());
+ }
+
+ cx.spawn(async move |cx| {
+ let candidates = commands
+ .iter()
+ .enumerate()
+ .map(|(id, command)| StringMatchCandidate::new(id, &command.name))
+ .collect::<Vec<_>>();
+
+ let matches = fuzzy::match_strings(
+ &candidates,
+ &query,
+ false,
+ true,
+ 100,
+ &Arc::new(AtomicBool::default()),
+ cx.background_executor().clone(),
+ )
+ .await;
+
+ matches
+ .into_iter()
+ .map(|mat| commands[mat.candidate_id].clone())
+ .collect()
+ })
+ }
+
+ fn search_mentions(
+ &self,
+ mode: Option<ContextPickerMode>,
+ query: String,
+ cancellation_flag: Arc<AtomicBool>,
+ cx: &mut App,
+ ) -> Task<Vec<Match>> {
+ let Some(workspace) = self.workspace.upgrade() else {
+ return Task::ready(Vec::default());
+ };
+ match mode {
+ Some(ContextPickerMode::File) => {
+ let search_files_task = search_files(query, cancellation_flag, &workspace, cx);
+ cx.background_spawn(async move {
+ search_files_task
+ .await
+ .into_iter()
+ .map(Match::File)
+ .collect()
+ })
+ }
+
+ Some(ContextPickerMode::Symbol) => {
+ let search_symbols_task = search_symbols(query, cancellation_flag, &workspace, cx);
+ cx.background_spawn(async move {
+ search_symbols_task
+ .await
+ .into_iter()
+ .map(Match::Symbol)
+ .collect()
+ })
+ }
+
+ Some(ContextPickerMode::Thread) => {
+ let search_threads_task =
+ search_threads(query, cancellation_flag, &self.history_store, cx);
+ cx.background_spawn(async move {
+ search_threads_task
+ .await
+ .into_iter()
+ .map(Match::Thread)
+ .collect()
+ })
+ }
+
+ Some(ContextPickerMode::Fetch) => {
+ if !query.is_empty() {
+ Task::ready(vec![Match::Fetch(query.into())])
+ } else {
+ Task::ready(Vec::new())
+ }
+ }
+
+ Some(ContextPickerMode::Rules) => {
+ if let Some(prompt_store) = self.prompt_store.as_ref() {
+ let search_rules_task =
+ search_rules(query, cancellation_flag, prompt_store, cx);
+ cx.background_spawn(async move {
+ search_rules_task
+ .await
+ .into_iter()
+ .map(Match::Rules)
+ .collect::<Vec<_>>()
+ })
+ } else {
+ Task::ready(Vec::new())
+ }
+ }
+
+ None if query.is_empty() => {
+ let mut matches = self.recent_context_picker_entries(&workspace, cx);
+
+ matches.extend(
+ self.available_context_picker_entries(&workspace, cx)
+ .into_iter()
+ .map(|mode| {
+ Match::Entry(EntryMatch {
+ entry: mode,
+ mat: None,
+ })
+ }),
+ );
+
+ Task::ready(matches)
+ }
+ None => {
+ let executor = cx.background_executor().clone();
+
+ let search_files_task =
+ search_files(query.clone(), cancellation_flag, &workspace, cx);
+
+ let entries = self.available_context_picker_entries(&workspace, cx);
+ let entry_candidates = entries
+ .iter()
+ .enumerate()
+ .map(|(ix, entry)| StringMatchCandidate::new(ix, entry.keyword()))
+ .collect::<Vec<_>>();
+
+ cx.background_spawn(async move {
+ let mut matches = search_files_task
+ .await
+ .into_iter()
+ .map(Match::File)
+ .collect::<Vec<_>>();
+
+ let entry_matches = fuzzy::match_strings(
+ &entry_candidates,
+ &query,
+ false,
+ true,
+ 100,
+ &Arc::new(AtomicBool::default()),
+ executor,
+ )
+ .await;
+
+ matches.extend(entry_matches.into_iter().map(|mat| {
+ Match::Entry(EntryMatch {
+ entry: entries[mat.candidate_id],
+ mat: Some(mat),
+ })
+ }));
+
+ matches.sort_by(|a, b| {
+ b.score()
+ .partial_cmp(&a.score())
+ .unwrap_or(std::cmp::Ordering::Equal)
+ });
+
+ matches
+ })
+ }
+ }
+ }
+
+ fn recent_context_picker_entries(
+ &self,
+ workspace: &Entity<Workspace>,
+ cx: &mut App,
+ ) -> Vec<Match> {
+ let mut recent = Vec::with_capacity(6);
+
+ let mut mentions = self
+ .message_editor
+ .read_with(cx, |message_editor, _cx| message_editor.mentions())
+ .unwrap_or_default();
+ let workspace = workspace.read(cx);
+ let project = workspace.project().read(cx);
+
+ if let Some(agent_panel) = workspace.panel::<AgentPanel>(cx)
+ && let Some(thread) = agent_panel.read(cx).active_agent_thread(cx)
+ {
+ let thread = thread.read(cx);
+ mentions.insert(MentionUri::Thread {
+ id: thread.session_id().clone(),
+ name: thread.title().into(),
+ });
+ }
+
+ recent.extend(
+ workspace
+ .recent_navigation_history_iter(cx)
+ .filter(|(_, abs_path)| {
+ abs_path.as_ref().is_none_or(|path| {
+ !mentions.contains(&MentionUri::File {
+ abs_path: path.clone(),
+ })
+ })
+ })
+ .take(4)
+ .filter_map(|(project_path, _)| {
+ project
+ .worktree_for_id(project_path.worktree_id, cx)
+ .map(|worktree| {
+ let path_prefix = worktree.read(cx).root_name().into();
+ Match::File(FileMatch {
+ mat: fuzzy::PathMatch {
+ score: 1.,
+ positions: Vec::new(),
+ worktree_id: project_path.worktree_id.to_usize(),
+ path: project_path.path,
+ path_prefix,
+ is_dir: false,
+ distance_to_relative_ancestor: 0,
+ },
+ is_recent: true,
+ })
+ })
+ }),
+ );
+
+ if self.prompt_capabilities.get().embedded_context {
+ const RECENT_COUNT: usize = 2;
+ let threads = self
+ .history_store
+ .read(cx)
+ .recently_opened_entries(cx)
+ .into_iter()
+ .filter(|thread| !mentions.contains(&thread.mention_uri()))
+ .take(RECENT_COUNT)
+ .collect::<Vec<_>>();
+
+ recent.extend(threads.into_iter().map(Match::RecentThread));
+ }
+
+ recent
+ }
+
+ fn available_context_picker_entries(
+ &self,
+ workspace: &Entity<Workspace>,
+ cx: &mut App,
+ ) -> Vec<ContextPickerEntry> {
+ let embedded_context = self.prompt_capabilities.get().embedded_context;
+ let mut entries = if embedded_context {
+ vec![
+ ContextPickerEntry::Mode(ContextPickerMode::File),
+ ContextPickerEntry::Mode(ContextPickerMode::Symbol),
+ ContextPickerEntry::Mode(ContextPickerMode::Thread),
+ ]
+ } else {
+ // File is always available, but we don't need a mode entry
+ vec![]
+ };
+
+ let has_selection = workspace
+ .read(cx)
+ .active_item(cx)
+ .and_then(|item| item.downcast::<Editor>())
+ .is_some_and(|editor| {
+ editor.update(cx, |editor, cx| editor.has_non_empty_selection(cx))
+ });
+ if has_selection {
+ entries.push(ContextPickerEntry::Action(
+ ContextPickerAction::AddSelections,
+ ));
+ }
+
+ if embedded_context {
+ if self.prompt_store.is_some() {
+ entries.push(ContextPickerEntry::Mode(ContextPickerMode::Rules));
+ }
+
+ entries.push(ContextPickerEntry::Mode(ContextPickerMode::Fetch));
+ }
+
+ entries
+ }
}
fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx: &App) -> CodeLabel {
@@ -581,7 +691,11 @@ impl CompletionProvider for ContextPickerCompletionProvider {
let offset_to_line = buffer.point_to_offset(line_start);
let mut lines = buffer.text_for_range(line_start..position).lines();
let line = lines.next()?;
- MentionCompletion::try_parse(line, offset_to_line)
+ ContextCompletion::try_parse(
+ line,
+ offset_to_line,
+ self.prompt_capabilities.get().embedded_context,
+ )
});
let Some(state) = state else {
return Task::ready(Ok(Vec::new()));
@@ -593,124 +707,175 @@ impl CompletionProvider for ContextPickerCompletionProvider {
let project = workspace.read(cx).project().clone();
let snapshot = buffer.read(cx).snapshot();
- let source_range = snapshot.anchor_before(state.source_range.start)
- ..snapshot.anchor_after(state.source_range.end);
+ let source_range = snapshot.anchor_before(state.source_range().start)
+ ..snapshot.anchor_after(state.source_range().end);
- let thread_store = self.thread_store.clone();
- let text_thread_store = self.text_thread_store.clone();
let editor = self.message_editor.clone();
- let Ok((exclude_paths, exclude_threads)) =
- self.message_editor.update(cx, |message_editor, _cx| {
- message_editor.mentioned_path_and_threads()
- })
- else {
- return Task::ready(Ok(Vec::new()));
- };
-
- let MentionCompletion { mode, argument, .. } = state;
- let query = argument.unwrap_or_else(|| "".to_string());
-
- let recent_entries = recent_context_picker_entries(
- Some(thread_store.clone()),
- Some(text_thread_store.clone()),
- workspace.clone(),
- &exclude_paths,
- &exclude_threads,
- cx,
- );
-
- let prompt_store = thread_store
- .read_with(cx, |thread_store, _cx| thread_store.prompt_store().clone())
- .ok()
- .flatten();
- let search_task = search(
- mode,
- query,
- Arc::<AtomicBool>::default(),
- recent_entries,
- prompt_store,
- thread_store.clone(),
- text_thread_store.clone(),
- workspace.clone(),
- cx,
- );
-
- cx.spawn(async move |_, cx| {
- let matches = search_task.await;
-
- let completions = cx.update(|cx| {
- matches
- .into_iter()
- .filter_map(|mat| match mat {
- Match::File(FileMatch { mat, is_recent }) => {
- let project_path = ProjectPath {
- worktree_id: WorktreeId::from_usize(mat.worktree_id),
- path: mat.path.clone(),
+ match state {
+ ContextCompletion::SlashCommand(SlashCommandCompletion {
+ command, argument, ..
+ }) => {
+ let search_task = self.search_slash_commands(command.unwrap_or_default(), cx);
+ cx.background_spawn(async move {
+ let completions = search_task
+ .await
+ .into_iter()
+ .map(|command| {
+ let new_text = if let Some(argument) = argument.as_ref() {
+ format!("/{} {}", command.name, argument)
+ } else {
+ format!("/{} ", command.name)
};
- Self::completion_for_path(
- project_path,
- &mat.path_prefix,
- is_recent,
- mat.is_dir,
- source_range.clone(),
- editor.clone(),
- project.clone(),
- cx,
- )
- }
-
- Match::Symbol(SymbolMatch { symbol, .. }) => Self::completion_for_symbol(
- symbol,
- source_range.clone(),
- editor.clone(),
- workspace.clone(),
- cx,
- ),
-
- Match::Thread(ThreadMatch {
- thread, is_recent, ..
- }) => Some(Self::completion_for_thread(
- thread,
- source_range.clone(),
- is_recent,
- editor.clone(),
- cx,
- )),
-
- Match::Rules(user_rules) => Some(Self::completion_for_rules(
- user_rules,
- source_range.clone(),
- editor.clone(),
- cx,
- )),
+ let is_missing_argument = argument.is_none() && command.input.is_some();
+ Completion {
+ replace_range: source_range.clone(),
+ new_text,
+ label: CodeLabel::plain(command.name.to_string(), None),
+ documentation: Some(CompletionDocumentation::MultiLinePlainText(
+ command.description.into(),
+ )),
+ source: project::CompletionSource::Custom,
+ icon_path: None,
+ insert_text_mode: None,
+ confirm: Some(Arc::new({
+ let editor = editor.clone();
+ move |intent, _window, cx| {
+ if !is_missing_argument {
+ cx.defer({
+ let editor = editor.clone();
+ move |cx| {
+ editor
+ .update(cx, |_editor, cx| {
+ match intent {
+ CompletionIntent::Complete
+ | CompletionIntent::CompleteWithInsert
+ | CompletionIntent::CompleteWithReplace => {
+ if !is_missing_argument {
+ cx.emit(MessageEditorEvent::Send);
+ }
+ }
+ CompletionIntent::Compose => {}
+ }
+ })
+ .ok();
+ }
+ });
+ }
+ is_missing_argument
+ }
+ })),
+ }
+ })
+ .collect();
+
+ Ok(vec![CompletionResponse {
+ completions,
+ display_options: CompletionDisplayOptions {
+ dynamic_width: true,
+ },
+ // Since this does its own filtering (see `filter_completions()` returns false),
+ // there is no benefit to computing whether this set of completions is incomplete.
+ is_incomplete: true,
+ }])
+ })
+ }
+ ContextCompletion::Mention(MentionCompletion { mode, argument, .. }) => {
+ let query = argument.unwrap_or_default();
+ let search_task =
+ self.search_mentions(mode, query, Arc::<AtomicBool>::default(), cx);
- Match::Fetch(url) => Self::completion_for_fetch(
- source_range.clone(),
- url,
- editor.clone(),
- cx,
- ),
+ cx.spawn(async move |_, cx| {
+ let matches = search_task.await;
- Match::Entry(EntryMatch { entry, .. }) => Self::completion_for_entry(
- entry,
- source_range.clone(),
- editor.clone(),
- &workspace,
- cx,
- ),
- })
- .collect()
- })?;
-
- Ok(vec![CompletionResponse {
- completions,
- // Since this does its own filtering (see `filter_completions()` returns false),
- // there is no benefit to computing whether this set of completions is incomplete.
- is_incomplete: true,
- }])
- })
+ let completions = cx.update(|cx| {
+ matches
+ .into_iter()
+ .filter_map(|mat| match mat {
+ Match::File(FileMatch { mat, is_recent }) => {
+ let project_path = ProjectPath {
+ worktree_id: WorktreeId::from_usize(mat.worktree_id),
+ path: mat.path.clone(),
+ };
+
+ Self::completion_for_path(
+ project_path,
+ &mat.path_prefix,
+ is_recent,
+ mat.is_dir,
+ source_range.clone(),
+ editor.clone(),
+ project.clone(),
+ cx,
+ )
+ }
+
+ Match::Symbol(SymbolMatch { symbol, .. }) => {
+ Self::completion_for_symbol(
+ symbol,
+ source_range.clone(),
+ editor.clone(),
+ workspace.clone(),
+ cx,
+ )
+ }
+
+ Match::Thread(thread) => Some(Self::completion_for_thread(
+ thread,
+ source_range.clone(),
+ false,
+ editor.clone(),
+ cx,
+ )),
+
+ Match::RecentThread(thread) => Some(Self::completion_for_thread(
+ thread,
+ source_range.clone(),
+ true,
+ editor.clone(),
+ cx,
+ )),
+
+ Match::Rules(user_rules) => Some(Self::completion_for_rules(
+ user_rules,
+ source_range.clone(),
+ editor.clone(),
+ cx,
+ )),
+
+ Match::Fetch(url) => Self::completion_for_fetch(
+ source_range.clone(),
+ url,
+ editor.clone(),
+ cx,
+ ),
+
+ Match::Entry(EntryMatch { entry, .. }) => {
+ Self::completion_for_entry(
+ entry,
+ source_range.clone(),
+ editor.clone(),
+ &workspace,
+ cx,
+ )
+ }
+ })
+ .collect()
+ })?;
+
+ Ok(vec![CompletionResponse {
+ completions,
+ display_options: CompletionDisplayOptions {
+ dynamic_width: true,
+ },
+ // Since this does its own filtering (see `filter_completions()` returns false),
+ // there is no benefit to computing whether this set of completions is incomplete.
+ is_incomplete: true,
+ }])
+ })
+ }
+ }
}
fn is_completion_trigger(
@@ -1,15 +1,21 @@
-use std::ops::Range;
+use std::{
+ cell::{Cell, RefCell},
+ ops::Range,
+ rc::Rc,
+};
use acp_thread::{AcpThread, AgentThreadEntry};
-use agent::{TextThreadStore, ThreadStore};
+use agent_client_protocol::{self as acp, ToolCallId};
+use agent2::HistoryStore;
use collections::HashMap;
use editor::{Editor, EditorMode, MinimapVisibility};
use gpui::{
- AnyEntity, App, AppContext as _, Entity, EntityId, EventEmitter, Focusable,
- TextStyleRefinement, WeakEntity, Window,
+ AnyEntity, App, AppContext as _, Entity, EntityId, EventEmitter, FocusHandle, Focusable,
+ ScrollHandle, SharedString, TextStyleRefinement, WeakEntity, Window,
};
use language::language_settings::SoftWrap;
use project::Project;
+use prompt_store::PromptStore;
use settings::Settings as _;
use terminal_view::TerminalView;
use theme::ThemeSettings;
@@ -21,27 +27,33 @@ use crate::acp::message_editor::{MessageEditor, MessageEditorEvent};
pub struct EntryViewState {
workspace: WeakEntity<Workspace>,
project: Entity<Project>,
- thread_store: Entity<ThreadStore>,
- text_thread_store: Entity<TextThreadStore>,
+ history_store: Entity<HistoryStore>,
+ prompt_store: Option<Entity<PromptStore>>,
entries: Vec<Entry>,
- prevent_slash_commands: bool,
+ prompt_capabilities: Rc<Cell<acp::PromptCapabilities>>,
+ available_commands: Rc<RefCell<Vec<acp::AvailableCommand>>>,
+ agent_name: SharedString,
}
impl EntryViewState {
pub fn new(
workspace: WeakEntity<Workspace>,
project: Entity<Project>,
- thread_store: Entity<ThreadStore>,
- text_thread_store: Entity<TextThreadStore>,
- prevent_slash_commands: bool,
+ history_store: Entity<HistoryStore>,
+ prompt_store: Option<Entity<PromptStore>>,
+ prompt_capabilities: Rc<Cell<acp::PromptCapabilities>>,
+ available_commands: Rc<RefCell<Vec<acp::AvailableCommand>>>,
+ agent_name: SharedString,
) -> Self {
Self {
workspace,
project,
- thread_store,
- text_thread_store,
+ history_store,
+ prompt_store,
entries: Vec::new(),
- prevent_slash_commands,
+ prompt_capabilities,
+ available_commands,
+ agent_name,
}
}
@@ -77,10 +89,12 @@ impl EntryViewState {
let mut editor = MessageEditor::new(
self.workspace.clone(),
self.project.clone(),
- self.thread_store.clone(),
- self.text_thread_store.clone(),
+ self.history_store.clone(),
+ self.prompt_store.clone(),
+ self.prompt_capabilities.clone(),
+ self.available_commands.clone(),
+ self.agent_name.clone(),
"Edit message - @ to include context",
- self.prevent_slash_commands,
editor::EditorMode::AutoHeight {
min_lines: 1,
max_lines: None,
@@ -105,6 +119,7 @@ impl EntryViewState {
}
}
AgentThreadEntry::ToolCall(tool_call) => {
+ let id = tool_call.id.clone();
let terminals = tool_call.terminals().cloned().collect::<Vec<_>>();
let diffs = tool_call.diffs().cloned().collect::<Vec<_>>();
@@ -118,29 +133,64 @@ impl EntryViewState {
views
};
+ let is_tool_call_completed =
+ matches!(tool_call.status, acp_thread::ToolCallStatus::Completed);
+
for terminal in terminals {
- views.entry(terminal.entity_id()).or_insert_with(|| {
- create_terminal(
- self.workspace.clone(),
- self.project.clone(),
- terminal.clone(),
- window,
- cx,
- )
- .into_any()
- });
+ match views.entry(terminal.entity_id()) {
+ collections::hash_map::Entry::Vacant(entry) => {
+ let element = create_terminal(
+ self.workspace.clone(),
+ self.project.clone(),
+ terminal.clone(),
+ window,
+ cx,
+ )
+ .into_any();
+ cx.emit(EntryViewEvent {
+ entry_index: index,
+ view_event: ViewEvent::NewTerminal(id.clone()),
+ });
+ entry.insert(element);
+ }
+ collections::hash_map::Entry::Occupied(_entry) => {
+ if is_tool_call_completed && terminal.read(cx).output().is_none() {
+ cx.emit(EntryViewEvent {
+ entry_index: index,
+ view_event: ViewEvent::TerminalMovedToBackground(id.clone()),
+ });
+ }
+ }
+ }
}
for diff in diffs {
- views
- .entry(diff.entity_id())
- .or_insert_with(|| create_editor_diff(diff.clone(), window, cx).into_any());
+ views.entry(diff.entity_id()).or_insert_with(|| {
+ let element = create_editor_diff(diff.clone(), window, cx).into_any();
+ cx.emit(EntryViewEvent {
+ entry_index: index,
+ view_event: ViewEvent::NewDiff(id.clone()),
+ });
+ element
+ });
}
}
- AgentThreadEntry::AssistantMessage(_) => {
- if index == self.entries.len() {
- self.entries.push(Entry::empty())
- }
+ AgentThreadEntry::AssistantMessage(message) => {
+ let entry = if let Some(Entry::AssistantMessage(entry)) =
+ self.entries.get_mut(index)
+ {
+ entry
+ } else {
+ self.set_entry(
+ index,
+ Entry::AssistantMessage(AssistantMessageEntry::default()),
+ );
+ let Some(Entry::AssistantMessage(entry)) = self.entries.get_mut(index) else {
+ unreachable!()
+ };
+ entry
+ };
+ entry.sync(message);
}
};
}
@@ -157,10 +207,10 @@ impl EntryViewState {
self.entries.drain(range);
}
- pub fn settings_changed(&mut self, cx: &mut App) {
+ pub fn agent_font_size_changed(&mut self, cx: &mut App) {
for entry in self.entries.iter() {
match entry {
- Entry::UserMessage { .. } => {}
+ Entry::UserMessage { .. } | Entry::AssistantMessage { .. } => {}
Entry::Content(response_views) => {
for view in response_views.values() {
if let Ok(diff_editor) = view.clone().downcast::<Editor>() {
@@ -186,19 +236,50 @@ pub struct EntryViewEvent {
}
pub enum ViewEvent {
+ NewDiff(ToolCallId),
+ NewTerminal(ToolCallId),
+ TerminalMovedToBackground(ToolCallId),
MessageEditorEvent(Entity<MessageEditor>, MessageEditorEvent),
}
+#[derive(Default, Debug)]
+pub struct AssistantMessageEntry {
+ scroll_handles_by_chunk_index: HashMap<usize, ScrollHandle>,
+}
+
+impl AssistantMessageEntry {
+ pub fn scroll_handle_for_chunk(&self, ix: usize) -> Option<ScrollHandle> {
+ self.scroll_handles_by_chunk_index.get(&ix).cloned()
+ }
+
+ pub fn sync(&mut self, message: &acp_thread::AssistantMessage) {
+ if let Some(acp_thread::AssistantMessageChunk::Thought { .. }) = message.chunks.last() {
+ let ix = message.chunks.len() - 1;
+ let handle = self.scroll_handles_by_chunk_index.entry(ix).or_default();
+ handle.scroll_to_bottom();
+ }
+ }
+}
+
+#[derive(Debug)]
pub enum Entry {
UserMessage(Entity<MessageEditor>),
+ AssistantMessage(AssistantMessageEntry),
Content(HashMap<EntityId, AnyEntity>),
}
impl Entry {
+ pub fn focus_handle(&self, cx: &App) -> Option<FocusHandle> {
+ match self {
+ Self::UserMessage(editor) => Some(editor.read(cx).focus_handle(cx)),
+ Self::AssistantMessage(_) | Self::Content(_) => None,
+ }
+ }
+
pub fn message_editor(&self) -> Option<&Entity<MessageEditor>> {
match self {
Self::UserMessage(editor) => Some(editor),
- Entry::Content(_) => None,
+ Self::AssistantMessage(_) | Self::Content(_) => None,
}
}
@@ -219,6 +300,16 @@ impl Entry {
.map(|entity| entity.downcast::<TerminalView>().unwrap())
}
+ pub fn scroll_handle_for_assistant_message_chunk(
+ &self,
+ chunk_ix: usize,
+ ) -> Option<ScrollHandle> {
+ match self {
+ Self::AssistantMessage(message) => message.scroll_handle_for_chunk(chunk_ix),
+ Self::UserMessage(_) | Self::Content(_) => None,
+ }
+ }
+
fn content_map(&self) -> Option<&HashMap<EntityId, AnyEntity>> {
match self {
Self::Content(map) => Some(map),
@@ -234,7 +325,7 @@ impl Entry {
pub fn has_content(&self) -> bool {
match self {
Self::Content(map) => !map.is_empty(),
- Self::UserMessage(_) => false,
+ Self::UserMessage(_) | Self::AssistantMessage(_) => false,
}
}
}
@@ -312,9 +403,10 @@ mod tests {
use std::{path::Path, rc::Rc};
use acp_thread::{AgentConnection, StubAgentConnection};
- use agent::{TextThreadStore, ThreadStore};
use agent_client_protocol as acp;
use agent_settings::AgentSettings;
+ use agent2::HistoryStore;
+ use assistant_context::ContextStore;
use buffer_diff::{DiffHunkStatus, DiffHunkStatusKind};
use editor::{EditorSettings, RowInfo};
use fs::FakeFs;
@@ -377,16 +469,18 @@ mod tests {
connection.send_update(session_id, acp::SessionUpdate::ToolCall(tool_call), cx)
});
- let thread_store = cx.new(|cx| ThreadStore::fake(project.clone(), cx));
- let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx));
+ let context_store = cx.new(|cx| ContextStore::fake(project.clone(), cx));
+ let history_store = cx.new(|cx| HistoryStore::new(context_store, cx));
let view_state = cx.new(|_cx| {
EntryViewState::new(
workspace.downgrade(),
project.clone(),
- thread_store,
- text_thread_store,
- false,
+ history_store,
+ None,
+ Default::default(),
+ Default::default(),
+ "Test Agent".into(),
)
});
@@ -1,88 +1,97 @@
use crate::{
- acp::completion_provider::ContextPickerCompletionProvider,
- context_picker::fetch_context_picker::fetch_url_content,
+ acp::completion_provider::{ContextPickerCompletionProvider, SlashCommandCompletion},
+ context_picker::{ContextPickerAction, fetch_context_picker::fetch_url_content},
};
use acp_thread::{MentionUri, selection_name};
-use agent::{TextThreadStore, ThreadId, ThreadStore};
use agent_client_protocol as acp;
-use anyhow::{Context as _, Result, anyhow};
+use agent_servers::{AgentServer, AgentServerDelegate};
+use agent2::HistoryStore;
+use anyhow::{Result, anyhow};
use assistant_slash_commands::codeblock_fence_for_path;
use collections::{HashMap, HashSet};
use editor::{
- Anchor, AnchorRangeExt, ContextMenuOptions, ContextMenuPlacement, Editor, EditorElement,
- EditorEvent, EditorMode, EditorStyle, ExcerptId, FoldPlaceholder, MultiBuffer,
- SemanticsProvider, ToOffset,
+ Addon, Anchor, AnchorRangeExt, ContextMenuOptions, ContextMenuPlacement, Editor, EditorElement,
+ EditorEvent, EditorMode, EditorSnapshot, EditorStyle, ExcerptId, FoldPlaceholder, InlayId,
+ MultiBuffer, ToOffset,
actions::Paste,
- display_map::{Crease, CreaseId, FoldId},
+ display_map::{Crease, CreaseId, FoldId, Inlay},
};
use futures::{
- FutureExt as _, TryFutureExt as _,
- future::{Shared, join_all, try_join_all},
+ FutureExt as _,
+ future::{Shared, join_all},
};
use gpui::{
- AppContext, ClipboardEntry, Context, Entity, EventEmitter, FocusHandle, Focusable,
- HighlightStyle, Image, ImageFormat, Img, Subscription, Task, TextStyle, UnderlineStyle,
- WeakEntity,
+ Animation, AnimationExt as _, AppContext, ClipboardEntry, Context, Entity, EntityId,
+ EventEmitter, FocusHandle, Focusable, Image, ImageFormat, Img, KeyContext, SharedString,
+ Subscription, Task, TextStyle, WeakEntity, pulsating_between,
};
-use language::{Buffer, Language};
+use language::{Buffer, Language, language_settings::InlayHintKind};
use language_model::LanguageModelImage;
-use project::{Project, ProjectPath, Worktree};
+use postage::stream::Stream as _;
+use project::{
+ CompletionIntent, InlayHint, InlayHintLabel, Project, ProjectItem, ProjectPath, Worktree,
+};
+use prompt_store::{PromptId, PromptStore};
use rope::Point;
use settings::Settings;
use std::{
- cell::Cell,
+ cell::{Cell, RefCell},
ffi::OsStr,
- fmt::{Display, Write},
- ops::Range,
+ fmt::Write,
+ ops::{Range, RangeInclusive},
path::{Path, PathBuf},
rc::Rc,
sync::Arc,
time::Duration,
};
-use text::{OffsetRangeExt, ToOffset as _};
+use text::OffsetRangeExt;
use theme::ThemeSettings;
use ui::{
- ActiveTheme, AnyElement, App, ButtonCommon, ButtonLike, ButtonStyle, Color, Icon, IconName,
- IconSize, InteractiveElement, IntoElement, Label, LabelCommon, LabelSize, ParentElement,
- Render, SelectableButton, SharedString, Styled, TextSize, TintColor, Toggleable, Window, div,
- h_flex, px,
+ ActiveTheme, AnyElement, App, ButtonCommon, ButtonLike, ButtonStyle, Color, Element as _,
+ FluentBuilder as _, Icon, IconName, IconSize, InteractiveElement, IntoElement, Label,
+ LabelCommon, LabelSize, ParentElement, Render, SelectableButton, Styled, TextSize, TintColor,
+ Toggleable, Window, div, h_flex,
};
-use url::Url;
-use util::ResultExt;
+use util::{ResultExt, debug_panic};
use workspace::{Workspace, notifications::NotifyResultExt as _};
use zed_actions::agent::Chat;
-const PARSE_SLASH_COMMAND_DEBOUNCE: Duration = Duration::from_millis(50);
-
pub struct MessageEditor {
mention_set: MentionSet,
editor: Entity<Editor>,
project: Entity<Project>,
workspace: WeakEntity<Workspace>,
- thread_store: Entity<ThreadStore>,
- text_thread_store: Entity<TextThreadStore>,
- prevent_slash_commands: bool,
+ history_store: Entity<HistoryStore>,
+ prompt_store: Option<Entity<PromptStore>>,
+ prompt_capabilities: Rc<Cell<acp::PromptCapabilities>>,
+ available_commands: Rc<RefCell<Vec<acp::AvailableCommand>>>,
+ agent_name: SharedString,
_subscriptions: Vec<Subscription>,
_parse_slash_command_task: Task<()>,
}
-#[derive(Clone, Copy)]
+#[derive(Clone, Copy, Debug)]
pub enum MessageEditorEvent {
Send,
Cancel,
Focus,
+ LostFocus,
}
impl EventEmitter<MessageEditorEvent> for MessageEditor {}
+const COMMAND_HINT_INLAY_ID: usize = 0;
+
impl MessageEditor {
pub fn new(
workspace: WeakEntity<Workspace>,
project: Entity<Project>,
- thread_store: Entity<ThreadStore>,
- text_thread_store: Entity<TextThreadStore>,
+ history_store: Entity<HistoryStore>,
+ prompt_store: Option<Entity<PromptStore>>,
+ prompt_capabilities: Rc<Cell<acp::PromptCapabilities>>,
+ available_commands: Rc<RefCell<Vec<acp::AvailableCommand>>>,
+ agent_name: SharedString,
placeholder: impl Into<Arc<str>>,
- prevent_slash_commands: bool,
mode: EditorMode,
window: &mut Window,
cx: &mut Context<Self>,
@@ -94,15 +103,14 @@ impl MessageEditor {
},
None,
);
- let completion_provider = ContextPickerCompletionProvider::new(
- workspace.clone(),
- thread_store.downgrade(),
- text_thread_store.downgrade(),
+ let completion_provider = Rc::new(ContextPickerCompletionProvider::new(
cx.weak_entity(),
- );
- let semantics_provider = Rc::new(SlashCommandSemanticsProvider {
- range: Cell::new(None),
- });
+ workspace.clone(),
+ history_store.clone(),
+ prompt_store.clone(),
+ prompt_capabilities.clone(),
+ available_commands.clone(),
+ ));
let mention_set = MentionSet::default();
let editor = cx.new(|cx| {
let buffer = cx.new(|cx| Buffer::local("", cx).with_language(Arc::new(language), cx));
@@ -113,54 +121,145 @@ impl MessageEditor {
editor.set_show_indent_guides(false, cx);
editor.set_soft_wrap();
editor.set_use_modal_editing(true);
- editor.set_completion_provider(Some(Rc::new(completion_provider)));
+ editor.set_completion_provider(Some(completion_provider.clone()));
editor.set_context_menu_options(ContextMenuOptions {
min_entries_visible: 12,
max_entries_visible: 12,
placement: Some(ContextMenuPlacement::Above),
});
- if prevent_slash_commands {
- editor.set_semantics_provider(Some(semantics_provider.clone()));
- }
+ editor.register_addon(MessageEditorAddon::new());
editor
});
- cx.on_focus(&editor.focus_handle(cx), window, |_, _, cx| {
+ cx.on_focus_in(&editor.focus_handle(cx), window, |_, _, cx| {
cx.emit(MessageEditorEvent::Focus)
})
.detach();
+ cx.on_focus_out(&editor.focus_handle(cx), window, |_, _, _, cx| {
+ cx.emit(MessageEditorEvent::LostFocus)
+ })
+ .detach();
+ let mut has_hint = false;
let mut subscriptions = Vec::new();
- if prevent_slash_commands {
- subscriptions.push(cx.subscribe_in(&editor, window, {
- let semantics_provider = semantics_provider.clone();
- move |this, editor, event, window, cx| match event {
- EditorEvent::Edited { .. } => {
- this.highlight_slash_command(
- semantics_provider.clone(),
- editor.clone(),
- window,
+
+ subscriptions.push(cx.subscribe_in(&editor, window, {
+ move |this, editor, event, window, cx| {
+ if let EditorEvent::Edited { .. } = event {
+ let snapshot = editor.update(cx, |editor, cx| {
+ let new_hints = this
+ .command_hint(editor.buffer(), cx)
+ .into_iter()
+ .collect::<Vec<_>>();
+ let has_new_hint = !new_hints.is_empty();
+ editor.splice_inlays(
+ if has_hint {
+ &[InlayId::Hint(COMMAND_HINT_INLAY_ID)]
+ } else {
+ &[]
+ },
+ new_hints,
cx,
);
- }
- _ => {}
+ has_hint = has_new_hint;
+
+ editor.snapshot(window, cx)
+ });
+ this.mention_set.remove_invalid(snapshot);
+
+ cx.notify();
}
- }));
- }
+ }
+ }));
Self {
editor,
project,
mention_set,
- thread_store,
- text_thread_store,
workspace,
- prevent_slash_commands,
+ history_store,
+ prompt_store,
+ prompt_capabilities,
+ available_commands,
+ agent_name,
_subscriptions: subscriptions,
_parse_slash_command_task: Task::ready(()),
}
}
+ fn command_hint(&self, buffer: &Entity<MultiBuffer>, cx: &App) -> Option<Inlay> {
+ let available_commands = self.available_commands.borrow();
+ if available_commands.is_empty() {
+ return None;
+ }
+
+ let snapshot = buffer.read(cx).snapshot(cx);
+ let parsed_command = SlashCommandCompletion::try_parse(&snapshot.text(), 0)?;
+ if parsed_command.argument.is_some() {
+ return None;
+ }
+
+ let command_name = parsed_command.command?;
+ let available_command = available_commands
+ .iter()
+ .find(|command| command.name == command_name)?;
+
+ let acp::AvailableCommandInput::Unstructured { mut hint } =
+ available_command.input.clone()?;
+
+ let mut hint_pos = parsed_command.source_range.end + 1;
+ if hint_pos > snapshot.len() {
+ hint_pos = snapshot.len();
+ hint.insert(0, ' ');
+ }
+
+ let hint_pos = snapshot.anchor_after(hint_pos);
+
+ Some(Inlay::hint(
+ COMMAND_HINT_INLAY_ID,
+ hint_pos,
+ &InlayHint {
+ position: hint_pos.text_anchor,
+ label: InlayHintLabel::String(hint),
+ kind: Some(InlayHintKind::Parameter),
+ padding_left: false,
+ padding_right: false,
+ tooltip: None,
+ resolve_state: project::ResolveState::Resolved,
+ },
+ ))
+ }
+
+ pub fn insert_thread_summary(
+ &mut self,
+ thread: agent2::DbThreadMetadata,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let start = self.editor.update(cx, |editor, cx| {
+ editor.set_text(format!("{}\n", thread.title), window, cx);
+ editor
+ .buffer()
+ .read(cx)
+ .snapshot(cx)
+ .anchor_before(Point::zero())
+ .text_anchor
+ });
+
+ self.confirm_mention_completion(
+ thread.title.clone(),
+ start,
+ thread.title.len(),
+ MentionUri::Thread {
+ id: thread.id.clone(),
+ name: thread.title.to_string(),
+ },
+ window,
+ cx,
+ )
+ .detach();
+ }
+
#[cfg(test)]
pub(crate) fn editor(&self) -> &Entity<Editor> {
&self.editor
@@ -175,26 +274,15 @@ impl MessageEditor {
self.editor.read(cx).is_empty(cx)
}
- pub fn mentioned_path_and_threads(&self) -> (HashSet<PathBuf>, HashSet<ThreadId>) {
- let mut excluded_paths = HashSet::default();
- let mut excluded_threads = HashSet::default();
-
- for uri in self.mention_set.uri_by_crease_id.values() {
- match uri {
- MentionUri::File { abs_path, .. } => {
- excluded_paths.insert(abs_path.clone());
- }
- MentionUri::Thread { id, .. } => {
- excluded_threads.insert(id.clone());
- }
- _ => {}
- }
- }
-
- (excluded_paths, excluded_threads)
+ pub fn mentions(&self) -> HashSet<MentionUri> {
+ self.mention_set
+ .mentions
+ .values()
+ .map(|(uri, _)| uri.clone())
+ .collect()
}
- pub fn confirm_completion(
+ pub fn confirm_mention_completion(
&mut self,
crease_text: SharedString,
start: text::Anchor,
@@ -209,106 +297,178 @@ impl MessageEditor {
let Some((excerpt_id, _, _)) = snapshot.buffer_snapshot.as_singleton() else {
return Task::ready(());
};
- let Some(anchor) = snapshot
+ let Some(start_anchor) = snapshot
.buffer_snapshot
.anchor_in_excerpt(*excerpt_id, start)
else {
return Task::ready(());
};
-
- if let MentionUri::File { abs_path, .. } = &mention_uri {
- let extension = abs_path
- .extension()
- .and_then(OsStr::to_str)
- .unwrap_or_default();
-
- if Img::extensions().contains(&extension) && !extension.contains("svg") {
- let project = self.project.clone();
- let Some(project_path) = project
- .read(cx)
- .project_path_for_absolute_path(abs_path, cx)
- else {
- return Task::ready(());
- };
- let image = cx
- .spawn(async move |_, cx| {
- let image = project
- .update(cx, |project, cx| project.open_image(project_path, cx))
- .map_err(|e| e.to_string())?
- .await
- .map_err(|e| e.to_string())?;
- image
- .read_with(cx, |image, _cx| image.image.clone())
- .map_err(|e| e.to_string())
- })
- .shared();
- let Some(crease_id) = insert_crease_for_image(
- *excerpt_id,
- start,
- content_len,
- Some(abs_path.as_path().into()),
- image.clone(),
- self.editor.clone(),
- window,
- cx,
- ) else {
- return Task::ready(());
- };
- return self.confirm_mention_for_image(
- crease_id,
- anchor,
- Some(abs_path.clone()),
- image,
- window,
- cx,
- );
- }
- }
-
- let Some(crease_id) = crate::context_picker::insert_crease_for_mention(
- *excerpt_id,
- start,
- content_len,
- crease_text.clone(),
- mention_uri.icon_path(cx),
- self.editor.clone(),
- window,
- cx,
- ) else {
+ let end_anchor = snapshot
+ .buffer_snapshot
+ .anchor_before(start_anchor.to_offset(&snapshot.buffer_snapshot) + content_len + 1);
+
+ let crease = if let MentionUri::File { abs_path } = &mention_uri
+ && let Some(extension) = abs_path.extension()
+ && let Some(extension) = extension.to_str()
+ && Img::extensions().contains(&extension)
+ && !extension.contains("svg")
+ {
+ let Some(project_path) = self
+ .project
+ .read(cx)
+ .project_path_for_absolute_path(&abs_path, cx)
+ else {
+ log::error!("project path not found");
+ return Task::ready(());
+ };
+ let image = self
+ .project
+ .update(cx, |project, cx| project.open_image(project_path, cx));
+ let image = cx
+ .spawn(async move |_, cx| {
+ let image = image.await.map_err(|e| e.to_string())?;
+ let image = image
+ .update(cx, |image, _| image.image.clone())
+ .map_err(|e| e.to_string())?;
+ Ok(image)
+ })
+ .shared();
+ insert_crease_for_mention(
+ *excerpt_id,
+ start,
+ content_len,
+ mention_uri.name().into(),
+ IconName::Image.path().into(),
+ Some(image),
+ self.editor.clone(),
+ window,
+ cx,
+ )
+ } else {
+ insert_crease_for_mention(
+ *excerpt_id,
+ start,
+ content_len,
+ crease_text,
+ mention_uri.icon_path(cx),
+ None,
+ self.editor.clone(),
+ window,
+ cx,
+ )
+ };
+ let Some((crease_id, tx)) = crease else {
return Task::ready(());
};
- match mention_uri {
- MentionUri::Fetch { url } => {
- self.confirm_mention_for_fetch(crease_id, anchor, url, window, cx)
- }
- MentionUri::Directory { abs_path } => {
- self.confirm_mention_for_directory(crease_id, anchor, abs_path, window, cx)
+ let task = match mention_uri.clone() {
+ MentionUri::Fetch { url } => self.confirm_mention_for_fetch(url, cx),
+ MentionUri::Directory { abs_path } => self.confirm_mention_for_directory(abs_path, cx),
+ MentionUri::Thread { id, .. } => self.confirm_mention_for_thread(id, cx),
+ MentionUri::TextThread { path, .. } => self.confirm_mention_for_text_thread(path, cx),
+ MentionUri::File { abs_path } => self.confirm_mention_for_file(abs_path, cx),
+ MentionUri::Symbol {
+ abs_path,
+ line_range,
+ ..
+ } => self.confirm_mention_for_symbol(abs_path, line_range, cx),
+ MentionUri::Rule { id, .. } => self.confirm_mention_for_rule(id, cx),
+ MentionUri::PastedImage => {
+ debug_panic!("pasted image URI should not be included in completions");
+ Task::ready(Err(anyhow!(
+ "pasted imaged URI should not be included in completions"
+ )))
}
- MentionUri::Thread { id, name } => {
- self.confirm_mention_for_thread(crease_id, anchor, id, name, window, cx)
+ MentionUri::Selection { .. } => {
+ // Handled elsewhere
+ debug_panic!("unexpected selection URI");
+ Task::ready(Err(anyhow!("unexpected selection URI")))
}
- MentionUri::TextThread { path, name } => {
- self.confirm_mention_for_text_thread(crease_id, anchor, path, name, window, cx)
+ };
+ let task = cx
+ .spawn(async move |_, _| task.await.map_err(|e| e.to_string()))
+ .shared();
+ self.mention_set
+ .mentions
+ .insert(crease_id, (mention_uri, task.clone()));
+
+ // Notify the user if we failed to load the mentioned context
+ cx.spawn_in(window, async move |this, cx| {
+ let result = task.await.notify_async_err(cx);
+ drop(tx);
+ if result.is_none() {
+ this.update(cx, |this, cx| {
+ this.editor.update(cx, |editor, cx| {
+ // Remove mention
+ editor.edit([(start_anchor..end_anchor, "")], cx);
+ });
+ this.mention_set.mentions.remove(&crease_id);
+ })
+ .ok();
}
- MentionUri::File { .. }
- | MentionUri::Symbol { .. }
- | MentionUri::Rule { .. }
- | MentionUri::Selection { .. } => {
- self.mention_set.insert_uri(crease_id, mention_uri.clone());
- Task::ready(())
+ })
+ }
+
+ fn confirm_mention_for_file(
+ &mut self,
+ abs_path: PathBuf,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Mention>> {
+ let Some(project_path) = self
+ .project
+ .read(cx)
+ .project_path_for_absolute_path(&abs_path, cx)
+ else {
+ return Task::ready(Err(anyhow!("project path not found")));
+ };
+ let extension = abs_path
+ .extension()
+ .and_then(OsStr::to_str)
+ .unwrap_or_default();
+
+ if Img::extensions().contains(&extension) && !extension.contains("svg") {
+ if !self.prompt_capabilities.get().image {
+ return Task::ready(Err(anyhow!("This model does not support images yet")));
}
+ let task = self
+ .project
+ .update(cx, |project, cx| project.open_image(project_path, cx));
+ return cx.spawn(async move |_, cx| {
+ let image = task.await?;
+ let image = image.update(cx, |image, _| image.image.clone())?;
+ let format = image.format;
+ let image = cx
+ .update(|cx| LanguageModelImage::from_image(image, cx))?
+ .await;
+ if let Some(image) = image {
+ Ok(Mention::Image(MentionImage {
+ data: image.source,
+ format,
+ }))
+ } else {
+ Err(anyhow!("Failed to convert image"))
+ }
+ });
}
+
+ let buffer = self
+ .project
+ .update(cx, |project, cx| project.open_buffer(project_path, cx));
+ cx.spawn(async move |_, cx| {
+ let buffer = buffer.await?;
+ let mention = buffer.update(cx, |buffer, cx| Mention::Text {
+ content: buffer.text(),
+ tracked_buffers: vec![cx.entity()],
+ })?;
+ anyhow::Ok(mention)
+ })
}
fn confirm_mention_for_directory(
&mut self,
- crease_id: CreaseId,
- anchor: Anchor,
abs_path: PathBuf,
- window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<()> {
+ ) -> Task<Result<Mention>> {
fn collect_files_in_path(worktree: &Worktree, path: &Path) -> Vec<(Arc<Path>, PathBuf)> {
let mut files = Vec::new();
@@ -323,27 +483,23 @@ impl MessageEditor {
files
}
- let uri = MentionUri::Directory {
- abs_path: abs_path.clone(),
- };
let Some(project_path) = self
.project
.read(cx)
.project_path_for_absolute_path(&abs_path, cx)
else {
- return Task::ready(());
+ return Task::ready(Err(anyhow!("project path not found")));
};
let Some(entry) = self.project.read(cx).entry_for_path(&project_path, cx) else {
- return Task::ready(());
+ return Task::ready(Err(anyhow!("project entry not found")));
};
- let Some(worktree) = self.project.read(cx).worktree_for_entry(entry.id, cx) else {
- return Task::ready(());
+ let directory_path = entry.path.clone();
+ let worktree_id = project_path.worktree_id;
+ let Some(worktree) = self.project.read(cx).worktree_for_id(worktree_id, cx) else {
+ return Task::ready(Err(anyhow!("worktree not found")));
};
let project = self.project.clone();
- let task = cx.spawn(async move |_, cx| {
- let directory_path = entry.path.clone();
-
- let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id())?;
+ cx.spawn(async move |_, cx| {
let file_paths = worktree.read_with(cx, |worktree, _cx| {
collect_files_in_path(worktree, &directory_path)
})?;
@@ -370,106 +526,103 @@ impl MessageEditor {
let rope = buffer
.read_with(cx, |buffer, _cx| buffer.as_rope().clone())
.log_err()?;
- Some(rope)
+ Some((rope, buffer))
});
cx.background_spawn(async move {
- let rope = rope_task.await?;
- Some((rel_path, full_path, rope.to_string()))
+ let (rope, buffer) = rope_task.await?;
+ Some((rel_path, full_path, rope.to_string(), buffer))
})
}))
})?;
let contents = cx
.background_spawn(async move {
- let contents = descendants_future.await.into_iter().flatten();
- contents.collect()
+ let (contents, tracked_buffers) = descendants_future
+ .await
+ .into_iter()
+ .flatten()
+ .map(|(rel_path, full_path, rope, buffer)| {
+ ((rel_path, full_path, rope), buffer)
+ })
+ .unzip();
+ Mention::Text {
+ content: render_directory_contents(contents),
+ tracked_buffers,
+ }
})
.await;
anyhow::Ok(contents)
- });
- let task = cx
- .spawn(async move |_, _| {
- task.await
- .map(|contents| DirectoryContents(contents).to_string())
- .map_err(|e| e.to_string())
- })
- .shared();
-
- self.mention_set
- .directories
- .insert(abs_path.clone(), task.clone());
-
- let editor = self.editor.clone();
- cx.spawn_in(window, async move |this, cx| {
- if task.await.notify_async_err(cx).is_some() {
- this.update(cx, |this, _| {
- this.mention_set.insert_uri(crease_id, uri);
- })
- .ok();
- } else {
- editor
- .update(cx, |editor, cx| {
- editor.display_map.update(cx, |display_map, cx| {
- display_map.unfold_intersecting(vec![anchor..anchor], true, cx);
- });
- editor.remove_creases([crease_id], cx);
- })
- .ok();
- this.update(cx, |this, _cx| {
- this.mention_set.directories.remove(&abs_path);
- })
- .ok();
- }
})
}
fn confirm_mention_for_fetch(
&mut self,
- crease_id: CreaseId,
- anchor: Anchor,
url: url::Url,
- window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<()> {
- let Some(http_client) = self
+ ) -> Task<Result<Mention>> {
+ let http_client = match self
.workspace
- .update(cx, |workspace, _cx| workspace.client().http_client())
- .ok()
- else {
- return Task::ready(());
+ .update(cx, |workspace, _| workspace.client().http_client())
+ {
+ Ok(http_client) => http_client,
+ Err(e) => return Task::ready(Err(e)),
};
-
- let url_string = url.to_string();
- let fetch = cx
- .background_executor()
- .spawn(async move {
- fetch_url_content(http_client, url_string)
- .map_err(|e| e.to_string())
- .await
+ cx.background_executor().spawn(async move {
+ let content = fetch_url_content(http_client, url.to_string()).await?;
+ Ok(Mention::Text {
+ content,
+ tracked_buffers: Vec::new(),
})
- .shared();
- self.mention_set
- .add_fetch_result(url.clone(), fetch.clone());
+ })
+ }
- cx.spawn_in(window, async move |this, cx| {
- let fetch = fetch.await.notify_async_err(cx);
- this.update(cx, |this, cx| {
- if fetch.is_some() {
- this.mention_set
- .insert_uri(crease_id, MentionUri::Fetch { url });
- } else {
- // Remove crease if we failed to fetch
- this.editor.update(cx, |editor, cx| {
- editor.display_map.update(cx, |display_map, cx| {
- display_map.unfold_intersecting(vec![anchor..anchor], true, cx);
- });
- editor.remove_creases([crease_id], cx);
- });
- this.mention_set.fetch_results.remove(&url);
+ fn confirm_mention_for_symbol(
+ &mut self,
+ abs_path: PathBuf,
+ line_range: RangeInclusive<u32>,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Mention>> {
+ let Some(project_path) = self
+ .project
+ .read(cx)
+ .project_path_for_absolute_path(&abs_path, cx)
+ else {
+ return Task::ready(Err(anyhow!("project path not found")));
+ };
+ let buffer = self
+ .project
+ .update(cx, |project, cx| project.open_buffer(project_path, cx));
+ cx.spawn(async move |_, cx| {
+ let buffer = buffer.await?;
+ let mention = buffer.update(cx, |buffer, cx| {
+ let start = Point::new(*line_range.start(), 0).min(buffer.max_point());
+ let end = Point::new(*line_range.end() + 1, 0).min(buffer.max_point());
+ let content = buffer.text_for_range(start..end).collect();
+ Mention::Text {
+ content,
+ tracked_buffers: vec![cx.entity()],
}
+ })?;
+ anyhow::Ok(mention)
+ })
+ }
+
+ fn confirm_mention_for_rule(
+ &mut self,
+ id: PromptId,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Mention>> {
+ let Some(prompt_store) = self.prompt_store.clone() else {
+ return Task::ready(Err(anyhow!("missing prompt store")));
+ };
+ let prompt = prompt_store.read(cx).load(id, cx);
+ cx.spawn(async move |_, _| {
+ let prompt = prompt.await?;
+ Ok(Mention::Text {
+ content: prompt,
+ tracked_buffers: Vec::new(),
})
- .ok();
})
}
@@ -494,21 +647,24 @@ impl MessageEditor {
let range = snapshot.anchor_after(offset + range_to_fold.start)
..snapshot.anchor_after(offset + range_to_fold.end);
- let path = buffer
+ let abs_path = buffer
.read(cx)
- .file()
- .map_or(PathBuf::from("untitled"), |file| file.path().to_path_buf());
+ .project_path(cx)
+ .and_then(|project_path| self.project.read(cx).absolute_path(&project_path, cx));
let snapshot = buffer.read(cx).snapshot();
+ let text = snapshot
+ .text_for_range(selection_range.clone())
+ .collect::<String>();
let point_range = selection_range.to_point(&snapshot);
- let line_range = point_range.start.row..point_range.end.row;
+ let line_range = point_range.start.row..=point_range.end.row;
let uri = MentionUri::Selection {
- path: path.clone(),
+ abs_path: abs_path.clone(),
line_range: line_range.clone(),
};
let crease = crate::context_picker::crease_for_mention(
- selection_name(&path, &line_range).into(),
+ selection_name(abs_path.as_deref(), &line_range).into(),
uri.icon_path(cx),
range,
self.editor.downgrade(),
@@ -520,160 +676,154 @@ impl MessageEditor {
crease_ids.first().copied().unwrap()
});
- self.mention_set
- .insert_uri(crease_id, MentionUri::Selection { path, line_range });
+ self.mention_set.mentions.insert(
+ crease_id,
+ (
+ uri,
+ Task::ready(Ok(Mention::Text {
+ content: text,
+ tracked_buffers: vec![buffer],
+ }))
+ .shared(),
+ ),
+ );
}
}
fn confirm_mention_for_thread(
&mut self,
- crease_id: CreaseId,
- anchor: Anchor,
- id: ThreadId,
- name: String,
- window: &mut Window,
+ id: acp::SessionId,
cx: &mut Context<Self>,
- ) -> Task<()> {
- let uri = MentionUri::Thread {
- id: id.clone(),
- name,
- };
- let open_task = self.thread_store.update(cx, |thread_store, cx| {
- thread_store.open_thread(&id, window, cx)
- });
- let task = cx
- .spawn(async move |_, cx| {
- let thread = open_task.await.map_err(|e| e.to_string())?;
- let content = thread
- .read_with(cx, |thread, _cx| thread.latest_detailed_summary_or_text())
- .map_err(|e| e.to_string())?;
- Ok(content)
+ ) -> Task<Result<Mention>> {
+ let server = Rc::new(agent2::NativeAgentServer::new(
+ self.project.read(cx).fs().clone(),
+ self.history_store.clone(),
+ ));
+ let delegate = AgentServerDelegate::new(
+ self.project.read(cx).agent_server_store().clone(),
+ self.project.clone(),
+ None,
+ None,
+ );
+ let connection = server.connect(None, delegate, cx);
+ cx.spawn(async move |_, cx| {
+ let (agent, _) = connection.await?;
+ let agent = agent.downcast::<agent2::NativeAgentConnection>().unwrap();
+ let summary = agent
+ .0
+ .update(cx, |agent, cx| agent.thread_summary(id, cx))?
+ .await?;
+ anyhow::Ok(Mention::Text {
+ content: summary.to_string(),
+ tracked_buffers: Vec::new(),
})
- .shared();
-
- self.mention_set.insert_thread(id.clone(), task.clone());
-
- let editor = self.editor.clone();
- cx.spawn_in(window, async move |this, cx| {
- if task.await.notify_async_err(cx).is_some() {
- this.update(cx, |this, _| {
- this.mention_set.insert_uri(crease_id, uri);
- })
- .ok();
- } else {
- editor
- .update(cx, |editor, cx| {
- editor.display_map.update(cx, |display_map, cx| {
- display_map.unfold_intersecting(vec![anchor..anchor], true, cx);
- });
- editor.remove_creases([crease_id], cx);
- })
- .ok();
- this.update(cx, |this, _| {
- this.mention_set.thread_summaries.remove(&id);
- })
- .ok();
- }
})
}
fn confirm_mention_for_text_thread(
&mut self,
- crease_id: CreaseId,
- anchor: Anchor,
path: PathBuf,
- name: String,
- window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<()> {
- let uri = MentionUri::TextThread {
- path: path.clone(),
- name,
- };
- let context = self.text_thread_store.update(cx, |text_thread_store, cx| {
- text_thread_store.open_local_context(path.as_path().into(), cx)
+ ) -> Task<Result<Mention>> {
+ let context = self.history_store.update(cx, |text_thread_store, cx| {
+ text_thread_store.load_text_thread(path.as_path().into(), cx)
});
- let task = cx
- .spawn(async move |_, cx| {
- let context = context.await.map_err(|e| e.to_string())?;
- let xml = context
- .update(cx, |context, cx| context.to_xml(cx))
- .map_err(|e| e.to_string())?;
- Ok(xml)
+ cx.spawn(async move |_, cx| {
+ let context = context.await?;
+ let xml = context.update(cx, |context, cx| context.to_xml(cx))?;
+ Ok(Mention::Text {
+ content: xml,
+ tracked_buffers: Vec::new(),
})
- .shared();
-
- self.mention_set
- .insert_text_thread(path.clone(), task.clone());
+ })
+ }
- let editor = self.editor.clone();
- cx.spawn_in(window, async move |this, cx| {
- if task.await.notify_async_err(cx).is_some() {
- this.update(cx, |this, _| {
- this.mention_set.insert_uri(crease_id, uri);
- })
- .ok();
- } else {
- editor
- .update(cx, |editor, cx| {
- editor.display_map.update(cx, |display_map, cx| {
- display_map.unfold_intersecting(vec![anchor..anchor], true, cx);
- });
- editor.remove_creases([crease_id], cx);
- })
- .ok();
- this.update(cx, |this, _| {
- this.mention_set.text_thread_summaries.remove(&path);
- })
- .ok();
+ fn validate_slash_commands(
+ text: &str,
+ available_commands: &[acp::AvailableCommand],
+ agent_name: &str,
+ ) -> Result<()> {
+ if let Some(parsed_command) = SlashCommandCompletion::try_parse(text, 0) {
+ if let Some(command_name) = parsed_command.command {
+ // Check if this command is in the list of available commands from the server
+ let is_supported = available_commands
+ .iter()
+ .any(|cmd| cmd.name == command_name);
+
+ if !is_supported {
+ return Err(anyhow!(
+ "The /{} command is not supported by {}.\n\nAvailable commands: {}",
+ command_name,
+ agent_name,
+ if available_commands.is_empty() {
+ "none".to_string()
+ } else {
+ available_commands
+ .iter()
+ .map(|cmd| format!("/{}", cmd.name))
+ .collect::<Vec<_>>()
+ .join(", ")
+ }
+ ));
+ }
}
- })
+ }
+ Ok(())
}
pub fn contents(
&self,
- window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Result<Vec<acp::ContentBlock>>> {
- let contents =
- self.mention_set
- .contents(self.project.clone(), self.thread_store.clone(), window, cx);
+ ) -> Task<Result<(Vec<acp::ContentBlock>, Vec<Entity<Buffer>>)>> {
+ // Check for unsupported slash commands before spawning async task
+ let text = self.editor.read(cx).text(cx);
+ let available_commands = self.available_commands.borrow().clone();
+ if let Err(err) =
+ Self::validate_slash_commands(&text, &available_commands, &self.agent_name)
+ {
+ return Task::ready(Err(err));
+ }
+
+ let contents = self
+ .mention_set
+ .contents(&self.prompt_capabilities.get(), cx);
let editor = self.editor.clone();
- let prevent_slash_commands = self.prevent_slash_commands;
cx.spawn(async move |_, cx| {
let contents = contents.await?;
+ let mut all_tracked_buffers = Vec::new();
- editor.update(cx, |editor, cx| {
+ let result = editor.update(cx, |editor, cx| {
let mut ix = 0;
let mut chunks: Vec<acp::ContentBlock> = Vec::new();
let text = editor.text(cx);
editor.display_map.update(cx, |map, cx| {
let snapshot = map.snapshot(cx);
for (crease_id, crease) in snapshot.crease_snapshot.creases() {
- // Skip creases that have been edited out of the message buffer.
- if !crease.range().start.is_valid(&snapshot.buffer_snapshot) {
- continue;
- }
-
- let Some(mention) = contents.get(&crease_id) else {
+ let Some((uri, mention)) = contents.get(&crease_id) else {
continue;
};
let crease_range = crease.range().to_offset(&snapshot.buffer_snapshot);
if crease_range.start > ix {
- let chunk = if prevent_slash_commands
- && ix == 0
- && parse_slash_command(&text[ix..]).is_some()
- {
- format!(" {}", &text[ix..crease_range.start]).into()
- } else {
- text[ix..crease_range.start].into()
- };
+ //todo(): Custom slash command ContentBlock?
+ // let chunk = if prevent_slash_commands
+ // && ix == 0
+ // && parse_slash_command(&text[ix..]).is_some()
+ // {
+ // format!(" {}", &text[ix..crease_range.start]).into()
+ // } else {
+ // text[ix..crease_range.start].into()
+ // };
+ let chunk = text[ix..crease_range.start].into();
chunks.push(chunk);
}
let chunk = match mention {
- Mention::Text { uri, content } => {
+ Mention::Text {
+ content,
+ tracked_buffers,
+ } => {
+ all_tracked_buffers.extend(tracked_buffers.iter().cloned());
acp::ContentBlock::Resource(acp::EmbeddedResource {
annotations: None,
resource: acp::EmbeddedResourceResource::TextResourceContents(
@@ -73,11 +73,8 @@ impl AcpModelPickerDelegate {
this.update_in(cx, |this, window, cx| {
this.delegate.models = models.ok();
this.delegate.selected_model = selected_model.ok();
- this.delegate.update_matches(this.query(cx), window, cx)
- })?
- .await;
-
- Ok(())
+ this.refresh(window, cx)
+ })
}
refresh(&this, &session_id, cx).await.log_err();
@@ -195,8 +192,10 @@ impl PickerDelegate for AcpModelPickerDelegate {
}
}
- fn dismissed(&mut self, _: &mut Window, cx: &mut Context<Picker<Self>>) {
- cx.emit(DismissEvent);
+ fn dismissed(&mut self, window: &mut Window, cx: &mut Context<Picker<Self>>) {
+ cx.defer_in(window, |picker, window, cx| {
+ picker.set_query("", window, cx);
+ });
}
fn render_match(
@@ -36,6 +36,14 @@ impl AcpModelSelectorPopover {
pub fn toggle(&self, window: &mut Window, cx: &mut Context<Self>) {
self.menu_handle.toggle(window, cx);
}
+
+ pub fn active_model_name(&self, cx: &App) -> Option<SharedString> {
+ self.selector
+ .read(cx)
+ .delegate
+ .active_model()
+ .map(|model| model.name.clone())
+ }
}
impl Render for AcpModelSelectorPopover {
@@ -1,19 +1,20 @@
-use crate::RemoveSelectedThread;
+use crate::acp::AcpThreadView;
+use crate::{AgentPanel, RemoveSelectedThread};
use agent2::{HistoryEntry, HistoryStore};
use chrono::{Datelike as _, Local, NaiveDate, TimeDelta};
use editor::{Editor, EditorEvent};
-use fuzzy::{StringMatch, StringMatchCandidate};
+use fuzzy::StringMatchCandidate;
use gpui::{
- App, Empty, Entity, EventEmitter, FocusHandle, Focusable, ScrollStrategy, Stateful, Task,
- UniformListScrollHandle, Window, uniform_list,
+ App, Entity, EventEmitter, FocusHandle, Focusable, ScrollStrategy, Stateful, Task,
+ UniformListScrollHandle, WeakEntity, Window, uniform_list,
};
-use std::{fmt::Display, ops::Range, sync::Arc};
+use std::{fmt::Display, ops::Range};
+use text::Bias;
use time::{OffsetDateTime, UtcOffset};
use ui::{
HighlightedLabel, IconButtonShape, ListItem, ListItemSpacing, Scrollbar, ScrollbarState,
Tooltip, prelude::*,
};
-use util::ResultExt;
pub struct AcpThreadHistory {
pub(crate) history_store: Entity<HistoryStore>,
@@ -21,38 +22,38 @@ pub struct AcpThreadHistory {
selected_index: usize,
hovered_index: Option<usize>,
search_editor: Entity<Editor>,
- all_entries: Arc<Vec<HistoryEntry>>,
- // When the search is empty, we display date separators between history entries
- // This vector contains an enum of either a separator or an actual entry
- separated_items: Vec<ListItemType>,
- // Maps entry indexes to list item indexes
- separated_item_indexes: Vec<u32>,
- _separated_items_task: Option<Task<()>>,
- search_state: SearchState,
+ search_query: SharedString,
+
+ visible_items: Vec<ListItemType>,
+
scrollbar_visibility: bool,
scrollbar_state: ScrollbarState,
local_timezone: UtcOffset,
- _subscriptions: Vec<gpui::Subscription>,
-}
-enum SearchState {
- Empty,
- Searching {
- query: SharedString,
- _task: Task<()>,
- },
- Searched {
- query: SharedString,
- matches: Vec<StringMatch>,
- },
+ _update_task: Task<()>,
+ _subscriptions: Vec<gpui::Subscription>,
}
enum ListItemType {
BucketSeparator(TimeBucket),
Entry {
- index: usize,
+ entry: HistoryEntry,
format: EntryTimeFormat,
},
+ SearchResult {
+ entry: HistoryEntry,
+ positions: Vec<usize>,
+ },
+}
+
+impl ListItemType {
+ fn history_entry(&self) -> Option<&HistoryEntry> {
+ match self {
+ ListItemType::Entry { entry, .. } => Some(entry),
+ ListItemType::SearchResult { entry, .. } => Some(entry),
+ _ => None,
+ }
+ }
}
pub enum ThreadHistoryEvent {
@@ -77,12 +78,15 @@ impl AcpThreadHistory {
cx.subscribe(&search_editor, |this, search_editor, event, cx| {
if let EditorEvent::BufferEdited = event {
let query = search_editor.read(cx).text(cx);
- this.search(query.into(), cx);
+ if this.search_query != query {
+ this.search_query = query.into();
+ this.update_visible_items(false, cx);
+ }
}
});
let history_store_subscription = cx.observe(&history_store, |this, _, cx| {
- this.update_all_entries(cx);
+ this.update_visible_items(true, cx);
});
let scroll_handle = UniformListScrollHandle::default();
@@ -93,10 +97,7 @@ impl AcpThreadHistory {
scroll_handle,
selected_index: 0,
hovered_index: None,
- search_state: SearchState::Empty,
- all_entries: Default::default(),
- separated_items: Default::default(),
- separated_item_indexes: Default::default(),
+ visible_items: Default::default(),
search_editor,
scrollbar_visibility: true,
scrollbar_state,
@@ -104,29 +105,61 @@ impl AcpThreadHistory {
chrono::Local::now().offset().local_minus_utc(),
)
.unwrap(),
+ search_query: SharedString::default(),
_subscriptions: vec![search_editor_subscription, history_store_subscription],
- _separated_items_task: None,
+ _update_task: Task::ready(()),
};
- this.update_all_entries(cx);
+ this.update_visible_items(false, cx);
this
}
- fn update_all_entries(&mut self, cx: &mut Context<Self>) {
- let new_entries: Arc<Vec<HistoryEntry>> = self
+ fn update_visible_items(&mut self, preserve_selected_item: bool, cx: &mut Context<Self>) {
+ let entries = self
.history_store
- .update(cx, |store, cx| store.entries(cx))
- .into();
+ .update(cx, |store, _| store.entries().collect());
+ let new_list_items = if self.search_query.is_empty() {
+ self.add_list_separators(entries, cx)
+ } else {
+ self.filter_search_results(entries, cx)
+ };
+ let selected_history_entry = if preserve_selected_item {
+ self.selected_history_entry().cloned()
+ } else {
+ None
+ };
- self._separated_items_task.take();
+ self._update_task = cx.spawn(async move |this, cx| {
+ let new_visible_items = new_list_items.await;
+ this.update(cx, |this, cx| {
+ let new_selected_index = if let Some(history_entry) = selected_history_entry {
+ let history_entry_id = history_entry.id();
+ new_visible_items
+ .iter()
+ .position(|visible_entry| {
+ visible_entry
+ .history_entry()
+ .is_some_and(|entry| entry.id() == history_entry_id)
+ })
+ .unwrap_or(0)
+ } else {
+ 0
+ };
- let mut items = Vec::with_capacity(new_entries.len() + 1);
- let mut indexes = Vec::with_capacity(new_entries.len() + 1);
+ this.visible_items = new_visible_items;
+ this.set_selected_index(new_selected_index, Bias::Right, cx);
+ cx.notify();
+ })
+ .ok();
+ });
+ }
- let bg_task = cx.background_spawn(async move {
+ fn add_list_separators(&self, entries: Vec<HistoryEntry>, cx: &App) -> Task<Vec<ListItemType>> {
+ cx.background_spawn(async move {
+ let mut items = Vec::with_capacity(entries.len() + 1);
let mut bucket = None;
let today = Local::now().naive_local().date();
- for (index, entry) in new_entries.iter().enumerate() {
+ for entry in entries.into_iter() {
let entry_date = entry
.updated_at()
.with_timezone(&Local)
@@ -139,75 +172,33 @@ impl AcpThreadHistory {
items.push(ListItemType::BucketSeparator(entry_bucket));
}
- indexes.push(items.len() as u32);
items.push(ListItemType::Entry {
- index,
+ entry,
format: entry_bucket.into(),
});
}
- (new_entries, items, indexes)
- });
-
- let task = cx.spawn(async move |this, cx| {
- let (new_entries, items, indexes) = bg_task.await;
- this.update(cx, |this, cx| {
- let previously_selected_entry =
- this.all_entries.get(this.selected_index).map(|e| e.id());
-
- this.all_entries = new_entries;
- this.separated_items = items;
- this.separated_item_indexes = indexes;
-
- match &this.search_state {
- SearchState::Empty => {
- if this.selected_index >= this.all_entries.len() {
- this.set_selected_entry_index(
- this.all_entries.len().saturating_sub(1),
- cx,
- );
- } else if let Some(prev_id) = previously_selected_entry
- && let Some(new_ix) = this
- .all_entries
- .iter()
- .position(|probe| probe.id() == prev_id)
- {
- this.set_selected_entry_index(new_ix, cx);
- }
- }
- SearchState::Searching { query, .. } | SearchState::Searched { query, .. } => {
- this.search(query.clone(), cx);
- }
- }
-
- cx.notify();
- })
- .log_err();
- });
- self._separated_items_task = Some(task);
+ items
+ })
}
- fn search(&mut self, query: SharedString, cx: &mut Context<Self>) {
- if query.is_empty() {
- self.search_state = SearchState::Empty;
- cx.notify();
- return;
- }
-
- let all_entries = self.all_entries.clone();
-
- let fuzzy_search_task = cx.background_spawn({
- let query = query.clone();
+ fn filter_search_results(
+ &self,
+ entries: Vec<HistoryEntry>,
+ cx: &App,
+ ) -> Task<Vec<ListItemType>> {
+ let query = self.search_query.clone();
+ cx.background_spawn({
let executor = cx.background_executor().clone();
async move {
- let mut candidates = Vec::with_capacity(all_entries.len());
+ let mut candidates = Vec::with_capacity(entries.len());
- for (idx, entry) in all_entries.iter().enumerate() {
+ for (idx, entry) in entries.iter().enumerate() {
candidates.push(StringMatchCandidate::new(idx, entry.title()));
}
const MAX_MATCHES: usize = 100;
- fuzzy::match_strings(
+ let matches = fuzzy::match_strings(
&candidates,
&query,
false,
@@ -216,74 +207,61 @@ impl AcpThreadHistory {
&Default::default(),
executor,
)
- .await
- }
- });
+ .await;
- let task = cx.spawn({
- let query = query.clone();
- async move |this, cx| {
- let matches = fuzzy_search_task.await;
-
- this.update(cx, |this, cx| {
- let SearchState::Searching {
- query: current_query,
- _task,
- } = &this.search_state
- else {
- return;
- };
-
- if &query == current_query {
- this.search_state = SearchState::Searched {
- query: query.clone(),
- matches,
- };
-
- this.set_selected_entry_index(0, cx);
- cx.notify();
- };
- })
- .log_err();
+ matches
+ .into_iter()
+ .map(|search_match| ListItemType::SearchResult {
+ entry: entries[search_match.candidate_id].clone(),
+ positions: search_match.positions,
+ })
+ .collect()
}
- });
-
- self.search_state = SearchState::Searching { query, _task: task };
- cx.notify();
+ })
}
- fn matched_count(&self) -> usize {
- match &self.search_state {
- SearchState::Empty => self.all_entries.len(),
- SearchState::Searching { .. } => 0,
- SearchState::Searched { matches, .. } => matches.len(),
- }
+ fn search_produced_no_matches(&self) -> bool {
+ self.visible_items.is_empty() && !self.search_query.is_empty()
}
- fn list_item_count(&self) -> usize {
- match &self.search_state {
- SearchState::Empty => self.separated_items.len(),
- SearchState::Searching { .. } => 0,
- SearchState::Searched { matches, .. } => matches.len(),
- }
+ fn selected_history_entry(&self) -> Option<&HistoryEntry> {
+ self.get_history_entry(self.selected_index)
}
- fn search_produced_no_matches(&self) -> bool {
- match &self.search_state {
- SearchState::Empty => false,
- SearchState::Searching { .. } => false,
- SearchState::Searched { matches, .. } => matches.is_empty(),
- }
+ fn get_history_entry(&self, visible_items_ix: usize) -> Option<&HistoryEntry> {
+ self.visible_items.get(visible_items_ix)?.history_entry()
}
- fn get_match(&self, ix: usize) -> Option<&HistoryEntry> {
- match &self.search_state {
- SearchState::Empty => self.all_entries.get(ix),
- SearchState::Searching { .. } => None,
- SearchState::Searched { matches, .. } => matches
- .get(ix)
- .and_then(|m| self.all_entries.get(m.candidate_id)),
+ fn set_selected_index(&mut self, mut index: usize, bias: Bias, cx: &mut Context<Self>) {
+ if self.visible_items.len() == 0 {
+ self.selected_index = 0;
+ return;
}
+ while matches!(
+ self.visible_items.get(index),
+ None | Some(ListItemType::BucketSeparator(..))
+ ) {
+ index = match bias {
+ Bias::Left => {
+ if index == 0 {
+ self.visible_items.len() - 1
+ } else {
+ index - 1
+ }
+ }
+ Bias::Right => {
+ if index >= self.visible_items.len() - 1 {
+ 0
+ } else {
+ index + 1
+ }
+ }
+ };
+ }
+ self.selected_index = index;
+ self.scroll_handle
+ .scroll_to_item(index, ScrollStrategy::Top);
+ cx.notify()
}
pub fn select_previous(
@@ -292,13 +270,10 @@ impl AcpThreadHistory {
_window: &mut Window,
cx: &mut Context<Self>,
) {
- let count = self.matched_count();
- if count > 0 {
- if self.selected_index == 0 {
- self.set_selected_entry_index(count - 1, cx);
- } else {
- self.set_selected_entry_index(self.selected_index - 1, cx);
- }
+ if self.selected_index == 0 {
+ self.set_selected_index(self.visible_items.len() - 1, Bias::Left, cx);
+ } else {
+ self.set_selected_index(self.selected_index - 1, Bias::Left, cx);
}
}
@@ -308,13 +283,10 @@ impl AcpThreadHistory {
_window: &mut Window,
cx: &mut Context<Self>,
) {
- let count = self.matched_count();
- if count > 0 {
- if self.selected_index == count - 1 {
- self.set_selected_entry_index(0, cx);
- } else {
- self.set_selected_entry_index(self.selected_index + 1, cx);
- }
+ if self.selected_index == self.visible_items.len() - 1 {
+ self.set_selected_index(0, Bias::Right, cx);
+ } else {
+ self.set_selected_index(self.selected_index + 1, Bias::Right, cx);
}
}
@@ -324,35 +296,47 @@ impl AcpThreadHistory {
_window: &mut Window,
cx: &mut Context<Self>,
) {
- let count = self.matched_count();
- if count > 0 {
- self.set_selected_entry_index(0, cx);
- }
+ self.set_selected_index(0, Bias::Right, cx);
}
fn select_last(&mut self, _: &menu::SelectLast, _window: &mut Window, cx: &mut Context<Self>) {
- let count = self.matched_count();
- if count > 0 {
- self.set_selected_entry_index(count - 1, cx);
- }
+ self.set_selected_index(self.visible_items.len() - 1, Bias::Left, cx);
}
- fn set_selected_entry_index(&mut self, entry_index: usize, cx: &mut Context<Self>) {
- self.selected_index = entry_index;
+ fn confirm(&mut self, _: &menu::Confirm, _window: &mut Window, cx: &mut Context<Self>) {
+ self.confirm_entry(self.selected_index, cx);
+ }
- let scroll_ix = match self.search_state {
- SearchState::Empty | SearchState::Searching { .. } => self
- .separated_item_indexes
- .get(entry_index)
- .map(|ix| *ix as usize)
- .unwrap_or(entry_index + 1),
- SearchState::Searched { .. } => entry_index,
+ fn confirm_entry(&mut self, ix: usize, cx: &mut Context<Self>) {
+ let Some(entry) = self.get_history_entry(ix) else {
+ return;
};
+ cx.emit(ThreadHistoryEvent::Open(entry.clone()));
+ }
- self.scroll_handle
- .scroll_to_item(scroll_ix, ScrollStrategy::Top);
+ fn remove_selected_thread(
+ &mut self,
+ _: &RemoveSelectedThread,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.remove_thread(self.selected_index, cx)
+ }
- cx.notify();
+ fn remove_thread(&mut self, visible_item_ix: usize, cx: &mut Context<Self>) {
+ let Some(entry) = self.get_history_entry(visible_item_ix) else {
+ return;
+ };
+
+ let task = match entry {
+ HistoryEntry::AcpThread(thread) => self
+ .history_store
+ .update(cx, |this, cx| this.delete_thread(thread.id.clone(), cx)),
+ HistoryEntry::TextThread(context) => self.history_store.update(cx, |this, cx| {
+ this.delete_text_thread(context.path.clone(), cx)
+ }),
+ };
+ task.detach_and_log_err(cx);
}
fn render_scrollbar(&self, cx: &mut Context<Self>) -> Option<Stateful<Div>> {
@@ -392,91 +376,33 @@ impl AcpThreadHistory {
)
}
- fn confirm(&mut self, _: &menu::Confirm, _window: &mut Window, cx: &mut Context<Self>) {
- self.confirm_entry(self.selected_index, cx);
- }
-
- fn confirm_entry(&mut self, ix: usize, cx: &mut Context<Self>) {
- let Some(entry) = self.get_match(ix) else {
- return;
- };
- cx.emit(ThreadHistoryEvent::Open(entry.clone()));
- }
-
- fn remove_selected_thread(
- &mut self,
- _: &RemoveSelectedThread,
- _window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- self.remove_thread(self.selected_index, cx)
- }
-
- fn remove_thread(&mut self, ix: usize, cx: &mut Context<Self>) {
- let Some(entry) = self.get_match(ix) else {
- return;
- };
-
- let task = match entry {
- HistoryEntry::AcpThread(thread) => self
- .history_store
- .update(cx, |this, cx| this.delete_thread(thread.id.clone(), cx)),
- HistoryEntry::TextThread(context) => self.history_store.update(cx, |this, cx| {
- this.delete_text_thread(context.path.clone(), cx)
- }),
- };
- task.detach_and_log_err(cx);
- }
-
- fn list_items(
+ fn render_list_items(
&mut self,
range: Range<usize>,
_window: &mut Window,
cx: &mut Context<Self>,
) -> Vec<AnyElement> {
- match &self.search_state {
- SearchState::Empty => self
- .separated_items
- .get(range)
- .iter()
- .flat_map(|items| {
- items
- .iter()
- .map(|item| self.render_list_item(item, vec![], cx))
- })
- .collect(),
- SearchState::Searched { matches, .. } => matches[range]
- .iter()
- .filter_map(|m| {
- let entry = self.all_entries.get(m.candidate_id)?;
- Some(self.render_history_entry(
- entry,
- EntryTimeFormat::DateAndTime,
- m.candidate_id,
- m.positions.clone(),
- cx,
- ))
- })
- .collect(),
- SearchState::Searching { .. } => {
- vec![]
- }
- }
+ self.visible_items
+ .get(range.clone())
+ .into_iter()
+ .flatten()
+ .enumerate()
+ .map(|(ix, item)| self.render_list_item(item, range.start + ix, cx))
+ .collect()
}
- fn render_list_item(
- &self,
- item: &ListItemType,
- highlight_positions: Vec<usize>,
- cx: &Context<Self>,
- ) -> AnyElement {
+ fn render_list_item(&self, item: &ListItemType, ix: usize, cx: &Context<Self>) -> AnyElement {
match item {
- ListItemType::Entry { index, format } => match self.all_entries.get(*index) {
- Some(entry) => self
- .render_history_entry(entry, *format, *index, highlight_positions, cx)
- .into_any(),
- None => Empty.into_any_element(),
- },
+ ListItemType::Entry { entry, format } => self
+ .render_history_entry(entry, *format, ix, Vec::default(), cx)
+ .into_any(),
+ ListItemType::SearchResult { entry, positions } => self.render_history_entry(
+ entry,
+ EntryTimeFormat::DateAndTime,
+ ix,
+ positions.clone(),
+ cx,
+ ),
ListItemType::BucketSeparator(bucket) => div()
.px(DynamicSpacing::Base06.rems(cx))
.pt_2()
@@ -494,12 +420,12 @@ impl AcpThreadHistory {
&self,
entry: &HistoryEntry,
format: EntryTimeFormat,
- list_entry_ix: usize,
+ ix: usize,
highlight_positions: Vec<usize>,
cx: &Context<Self>,
) -> AnyElement {
- let selected = list_entry_ix == self.selected_index;
- let hovered = Some(list_entry_ix) == self.hovered_index;
+ let selected = ix == self.selected_index;
+ let hovered = Some(ix) == self.hovered_index;
let timestamp = entry.updated_at().timestamp();
let thread_timestamp = format.format_timestamp(timestamp, self.local_timezone);
@@ -507,7 +433,7 @@ impl AcpThreadHistory {
.w_full()
.pb_1()
.child(
- ListItem::new(list_entry_ix)
+ ListItem::new(ix)
.rounded()
.toggle_state(selected)
.spacing(ListItemSpacing::Sparse)
@@ -529,14 +455,14 @@ impl AcpThreadHistory {
)
.on_hover(cx.listener(move |this, is_hovered, _window, cx| {
if *is_hovered {
- this.hovered_index = Some(list_entry_ix);
- } else if this.hovered_index == Some(list_entry_ix) {
+ this.hovered_index = Some(ix);
+ } else if this.hovered_index == Some(ix) {
this.hovered_index = None;
}
cx.notify();
}))
- .end_slot::<IconButton>(if hovered || selected {
+ .end_slot::<IconButton>(if hovered {
Some(
IconButton::new("delete", IconName::Trash)
.shape(IconButtonShape::Square)
@@ -545,16 +471,14 @@ impl AcpThreadHistory {
.tooltip(move |window, cx| {
Tooltip::for_action("Delete", &RemoveSelectedThread, window, cx)
})
- .on_click(cx.listener(move |this, _, _, cx| {
- this.remove_thread(list_entry_ix, cx)
- })),
+ .on_click(
+ cx.listener(move |this, _, _, cx| this.remove_thread(ix, cx)),
+ ),
)
} else {
None
})
- .on_click(
- cx.listener(move |this, _, _, cx| this.confirm_entry(list_entry_ix, cx)),
- ),
+ .on_click(cx.listener(move |this, _, _, cx| this.confirm_entry(ix, cx))),
)
.into_any_element()
}
@@ -577,7 +501,7 @@ impl Render for AcpThreadHistory {
.on_action(cx.listener(Self::select_last))
.on_action(cx.listener(Self::confirm))
.on_action(cx.listener(Self::remove_selected_thread))
- .when(!self.all_entries.is_empty(), |parent| {
+ .when(!self.history_store.read(cx).is_empty(cx), |parent| {
parent.child(
h_flex()
.h(px(41.)) // Match the toolbar perfectly
@@ -603,7 +527,7 @@ impl Render for AcpThreadHistory {
.overflow_hidden()
.flex_grow();
- if self.all_entries.is_empty() {
+ if self.history_store.read(cx).is_empty(cx) {
view.justify_center()
.child(
h_flex().w_full().justify_center().child(
@@ -622,9 +546,9 @@ impl Render for AcpThreadHistory {
.child(
uniform_list(
"thread-history",
- self.list_item_count(),
+ self.visible_items.len(),
cx.processor(|this, range: Range<usize>, window, cx| {
- this.list_items(range, window, cx)
+ this.render_list_items(range, window, cx)
}),
)
.p_1()
@@ -639,6 +563,141 @@ impl Render for AcpThreadHistory {
}
}
+#[derive(IntoElement)]
+pub struct AcpHistoryEntryElement {
+ entry: HistoryEntry,
+ thread_view: WeakEntity<AcpThreadView>,
+ selected: bool,
+ hovered: bool,
+ on_hover: Box<dyn Fn(&bool, &mut Window, &mut App) + 'static>,
+}
+
+impl AcpHistoryEntryElement {
+ pub fn new(entry: HistoryEntry, thread_view: WeakEntity<AcpThreadView>) -> Self {
+ Self {
+ entry,
+ thread_view,
+ selected: false,
+ hovered: false,
+ on_hover: Box::new(|_, _, _| {}),
+ }
+ }
+
+ pub fn hovered(mut self, hovered: bool) -> Self {
+ self.hovered = hovered;
+ self
+ }
+
+ pub fn on_hover(mut self, on_hover: impl Fn(&bool, &mut Window, &mut App) + 'static) -> Self {
+ self.on_hover = Box::new(on_hover);
+ self
+ }
+}
+
+impl RenderOnce for AcpHistoryEntryElement {
+ fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement {
+ let id = self.entry.id();
+ let title = self.entry.title();
+ let timestamp = self.entry.updated_at();
+
+ let formatted_time = {
+ let now = chrono::Utc::now();
+ let duration = now.signed_duration_since(timestamp);
+
+ if duration.num_days() > 0 {
+ format!("{}d", duration.num_days())
+ } else if duration.num_hours() > 0 {
+ format!("{}h ago", duration.num_hours())
+ } else if duration.num_minutes() > 0 {
+ format!("{}m ago", duration.num_minutes())
+ } else {
+ "Just now".to_string()
+ }
+ };
+
+ ListItem::new(id)
+ .rounded()
+ .toggle_state(self.selected)
+ .spacing(ListItemSpacing::Sparse)
+ .start_slot(
+ h_flex()
+ .w_full()
+ .gap_2()
+ .justify_between()
+ .child(Label::new(title).size(LabelSize::Small).truncate())
+ .child(
+ Label::new(formatted_time)
+ .color(Color::Muted)
+ .size(LabelSize::XSmall),
+ ),
+ )
+ .on_hover(self.on_hover)
+ .end_slot::<IconButton>(if self.hovered || self.selected {
+ Some(
+ IconButton::new("delete", IconName::Trash)
+ .shape(IconButtonShape::Square)
+ .icon_size(IconSize::XSmall)
+ .icon_color(Color::Muted)
+ .tooltip(move |window, cx| {
+ Tooltip::for_action("Delete", &RemoveSelectedThread, window, cx)
+ })
+ .on_click({
+ let thread_view = self.thread_view.clone();
+ let entry = self.entry.clone();
+
+ move |_event, _window, cx| {
+ if let Some(thread_view) = thread_view.upgrade() {
+ thread_view.update(cx, |thread_view, cx| {
+ thread_view.delete_history_entry(entry.clone(), cx);
+ });
+ }
+ }
+ }),
+ )
+ } else {
+ None
+ })
+ .on_click({
+ let thread_view = self.thread_view.clone();
+ let entry = self.entry;
+
+ move |_event, window, cx| {
+ if let Some(workspace) = thread_view
+ .upgrade()
+ .and_then(|view| view.read(cx).workspace().upgrade())
+ {
+ match &entry {
+ HistoryEntry::AcpThread(thread_metadata) => {
+ if let Some(panel) = workspace.read(cx).panel::<AgentPanel>(cx) {
+ panel.update(cx, |panel, cx| {
+ panel.load_agent_thread(
+ thread_metadata.clone(),
+ window,
+ cx,
+ );
+ });
+ }
+ }
+ HistoryEntry::TextThread(context) => {
+ if let Some(panel) = workspace.read(cx).panel::<AgentPanel>(cx) {
+ panel.update(cx, |panel, cx| {
+ panel
+ .open_saved_prompt_editor(
+ context.path.clone(),
+ window,
+ cx,
+ )
+ .detach_and_log_err(cx);
+ });
+ }
+ }
+ }
+ }
+ }
+ })
+ }
+}
+
#[derive(Clone, Copy)]
pub enum EntryTimeFormat {
DateAndTime,
@@ -5,26 +5,26 @@ use acp_thread::{
};
use acp_thread::{AgentConnection, Plan};
use action_log::ActionLog;
-use agent::{TextThreadStore, ThreadStore};
-use agent_client_protocol::{self as acp};
-use agent_servers::{AgentServer, ClaudeCode};
+use agent_client_protocol::{self as acp, PromptCapabilities};
+use agent_servers::{AgentServer, AgentServerDelegate};
use agent_settings::{AgentProfileId, AgentSettings, CompletionMode, NotifyWhenAgentWaiting};
-use agent2::DbThreadMetadata;
-use anyhow::bail;
+use agent2::{DbThreadMetadata, HistoryEntry, HistoryEntryId, HistoryStore, NativeAgentServer};
+use anyhow::{Context as _, Result, anyhow, bail};
use audio::{Audio, Sound};
use buffer_diff::BufferDiff;
use client::zed_urls;
use collections::{HashMap, HashSet};
use editor::scroll::Autoscroll;
-use editor::{Editor, EditorMode, MultiBuffer, PathKey, SelectionEffects};
+use editor::{Editor, EditorEvent, EditorMode, MultiBuffer, PathKey, SelectionEffects};
use file_icons::FileIcons;
use fs::Fs;
+use futures::FutureExt as _;
use gpui::{
Action, Animation, AnimationExt, AnyView, App, BorderStyle, ClickEvent, ClipboardItem,
- EdgesRefinement, Empty, Entity, FocusHandle, Focusable, Hsla, Length, ListOffset, ListState,
- MouseButton, PlatformDisplay, SharedString, Stateful, StyleRefinement, Subscription, Task,
- TextStyle, TextStyleRefinement, Transformation, UnderlineStyle, WeakEntity, Window,
- WindowHandle, div, linear_color_stop, linear_gradient, list, percentage, point, prelude::*,
+ CursorStyle, EdgesRefinement, ElementId, Empty, Entity, FocusHandle, Focusable, Hsla, Length,
+ ListOffset, ListState, MouseButton, PlatformDisplay, SharedString, Stateful, StyleRefinement,
+ Subscription, Task, TextStyle, TextStyleRefinement, UnderlineStyle, WeakEntity, Window,
+ WindowHandle, div, ease_in_out, linear_color_stop, linear_gradient, list, point, prelude::*,
pulsating_between,
};
use language::Buffer;
@@ -32,17 +32,20 @@ use language::Buffer;
use language_model::LanguageModelRegistry;
use markdown::{HeadingLevelStyles, Markdown, MarkdownElement, MarkdownStyle};
use project::{Project, ProjectEntryId};
-use prompt_store::PromptId;
+use prompt_store::{PromptId, PromptStore};
use rope::Point;
use settings::{Settings as _, SettingsStore};
+use std::cell::{Cell, RefCell};
+use std::path::Path;
use std::sync::Arc;
use std::time::Instant;
-use std::{collections::BTreeMap, process::ExitStatus, rc::Rc, time::Duration};
+use std::{collections::BTreeMap, rc::Rc, time::Duration};
+use terminal_view::terminal_panel::TerminalPanel;
use text::Anchor;
-use theme::ThemeSettings;
+use theme::{AgentFontSize, ThemeSettings};
use ui::{
- Callout, Disclosure, Divider, DividerColor, ElevationIndex, KeyBinding, PopoverMenuHandle,
- Scrollbar, ScrollbarState, Tooltip, prelude::*,
+ Callout, CommonAnimationExt, Disclosure, Divider, DividerColor, ElevationIndex, KeyBinding,
+ PopoverMenuHandle, Scrollbar, ScrollbarState, SpinnerLabel, TintColor, Tooltip, prelude::*,
};
use util::{ResultExt, size::format_file_size, time::duration_alt_display};
use workspace::{CollaboratorId, Workspace};
@@ -55,25 +58,37 @@ use crate::acp::entry_view_state::{EntryViewEvent, ViewEvent};
use crate::acp::message_editor::{MessageEditor, MessageEditorEvent};
use crate::agent_diff::AgentDiff;
use crate::profile_selector::{ProfileProvider, ProfileSelector};
-use crate::ui::{AgentNotification, AgentNotificationEvent, BurnModeTooltip};
+
+use crate::ui::preview::UsageCallout;
+use crate::ui::{
+ AgentNotification, AgentNotificationEvent, BurnModeTooltip, UnavailableEditingTooltip,
+};
use crate::{
AgentDiffPane, AgentPanel, ContinueThread, ContinueWithBurnMode, ExpandMessageEditor, Follow,
- KeepAll, OpenAgentDiff, RejectAll, ToggleBurnMode, ToggleProfileSelector,
+ KeepAll, OpenAgentDiff, OpenHistory, RejectAll, ToggleBurnMode, ToggleProfileSelector,
};
-const RESPONSE_PADDING_X: Pixels = px(19.);
pub const MIN_EDITOR_LINES: usize = 4;
pub const MAX_EDITOR_LINES: usize = 8;
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+enum ThreadFeedback {
+ Positive,
+ Negative,
+}
+
+#[derive(Debug)]
enum ThreadError {
PaymentRequired,
ModelRequestLimitReached(cloud_llm_client::Plan),
ToolUseLimitReached,
+ Refusal,
+ AuthenticationRequired(SharedString),
Other(SharedString),
}
impl ThreadError {
- fn from_err(error: anyhow::Error) -> Self {
+ fn from_err(error: anyhow::Error, agent: &Rc<dyn AgentServer>) -> Self {
if error.is::<language_model::PaymentRequiredError>() {
Self::PaymentRequired
} else if error.is::<language_model::ToolUseLimitReachedError>() {
@@ -82,8 +97,22 @@ impl ThreadError {
error.downcast_ref::<language_model::ModelRequestLimitReachedError>()
{
Self::ModelRequestLimitReached(error.plan)
+ } else if let Some(acp_error) = error.downcast_ref::<acp::Error>()
+ && acp_error.code == acp::ErrorCode::AUTH_REQUIRED.code
+ {
+ Self::AuthenticationRequired(acp_error.message.clone().into())
} else {
- Self::Other(error.to_string().into())
+ let string = error.to_string();
+ // TODO: we should have Gemini return better errors here.
+ if agent.clone().downcast::<agent_servers::Gemini>().is_some()
+ && string.contains("Could not load the default credentials")
+ || string.contains("API key not valid")
+ || string.contains("Request had invalid authentication credentials")
+ {
+ Self::AuthenticationRequired(string.into())
+ } else {
+ Self::Other(error.to_string().into())
+ }
}
}
}
@@ -106,19 +135,146 @@ impl ProfileProvider for Entity<agent2::Thread> {
}
}
+#[derive(Default)]
+struct ThreadFeedbackState {
+ feedback: Option<ThreadFeedback>,
+ comments_editor: Option<Entity<Editor>>,
+}
+
+impl ThreadFeedbackState {
+ pub fn submit(
+ &mut self,
+ thread: Entity<AcpThread>,
+ feedback: ThreadFeedback,
+ window: &mut Window,
+ cx: &mut App,
+ ) {
+ let Some(telemetry) = thread.read(cx).connection().telemetry() else {
+ return;
+ };
+
+ if self.feedback == Some(feedback) {
+ return;
+ }
+
+ self.feedback = Some(feedback);
+ match feedback {
+ ThreadFeedback::Positive => {
+ self.comments_editor = None;
+ }
+ ThreadFeedback::Negative => {
+ self.comments_editor = Some(Self::build_feedback_comments_editor(window, cx));
+ }
+ }
+ let session_id = thread.read(cx).session_id().clone();
+ let agent_name = telemetry.agent_name();
+ let task = telemetry.thread_data(&session_id, cx);
+ let rating = match feedback {
+ ThreadFeedback::Positive => "positive",
+ ThreadFeedback::Negative => "negative",
+ };
+ cx.background_spawn(async move {
+ let thread = task.await?;
+ telemetry::event!(
+ "Agent Thread Rated",
+ session_id = session_id,
+ rating = rating,
+ agent = agent_name,
+ thread = thread
+ );
+ anyhow::Ok(())
+ })
+ .detach_and_log_err(cx);
+ }
+
+ pub fn submit_comments(&mut self, thread: Entity<AcpThread>, cx: &mut App) {
+ let Some(telemetry) = thread.read(cx).connection().telemetry() else {
+ return;
+ };
+
+ let Some(comments) = self
+ .comments_editor
+ .as_ref()
+ .map(|editor| editor.read(cx).text(cx))
+ .filter(|text| !text.trim().is_empty())
+ else {
+ return;
+ };
+
+ self.comments_editor.take();
+
+ let session_id = thread.read(cx).session_id().clone();
+ let agent_name = telemetry.agent_name();
+ let task = telemetry.thread_data(&session_id, cx);
+ cx.background_spawn(async move {
+ let thread = task.await?;
+ telemetry::event!(
+ "Agent Thread Feedback Comments",
+ session_id = session_id,
+ comments = comments,
+ agent = agent_name,
+ thread = thread
+ );
+ anyhow::Ok(())
+ })
+ .detach_and_log_err(cx);
+ }
+
+ pub fn clear(&mut self) {
+ *self = Self::default()
+ }
+
+ pub fn dismiss_comments(&mut self) {
+ self.comments_editor.take();
+ }
+
+ fn build_feedback_comments_editor(window: &mut Window, cx: &mut App) -> Entity<Editor> {
+ let buffer = cx.new(|cx| {
+ let empty_string = String::new();
+ MultiBuffer::singleton(cx.new(|cx| Buffer::local(empty_string, cx)), cx)
+ });
+
+ let editor = cx.new(|cx| {
+ let mut editor = Editor::new(
+ editor::EditorMode::AutoHeight {
+ min_lines: 1,
+ max_lines: Some(4),
+ },
+ buffer,
+ None,
+ window,
+ cx,
+ );
+ editor.set_placeholder_text(
+ "What went wrong? Share your feedback so we can improve.",
+ cx,
+ );
+ editor
+ });
+
+ editor.read(cx).focus_handle(cx).focus(window);
+ editor
+ }
+}
+
pub struct AcpThreadView {
agent: Rc<dyn AgentServer>,
workspace: WeakEntity<Workspace>,
project: Entity<Project>,
thread_state: ThreadState,
+ login: Option<task::SpawnInTerminal>,
+ history_store: Entity<HistoryStore>,
+ hovered_recent_history_item: Option<usize>,
entry_view_state: Entity<EntryViewState>,
message_editor: Entity<MessageEditor>,
+ focus_handle: FocusHandle,
model_selector: Option<Entity<AcpModelSelectorPopover>>,
profile_selector: Option<Entity<ProfileSelector>>,
notifications: Vec<WindowHandle<AgentNotification>>,
notification_subscriptions: HashMap<WindowHandle<AgentNotification>, Vec<Subscription>>,
thread_retry_status: Option<RetryStatus>,
thread_error: Option<ThreadError>,
+ thread_feedback: ThreadFeedbackState,
list_state: ListState,
scrollbar_state: ScrollbarState,
auth_task: Option<Task<()>>,
@@ -127,59 +283,86 @@ pub struct AcpThreadView {
edits_expanded: bool,
plan_expanded: bool,
editor_expanded: bool,
- terminal_expanded: bool,
+ should_be_following: bool,
editing_message: Option<usize>,
+ prompt_capabilities: Rc<Cell<PromptCapabilities>>,
+ available_commands: Rc<RefCell<Vec<acp::AvailableCommand>>>,
+ is_loading_contents: bool,
+ new_server_version_available: Option<SharedString>,
_cancel_task: Option<Task<()>>,
- _subscriptions: [Subscription; 3],
+ _subscriptions: [Subscription; 4],
}
enum ThreadState {
- Loading {
- _task: Task<()>,
- },
+ Loading(Entity<LoadingView>),
Ready {
thread: Entity<AcpThread>,
- _subscription: [Subscription; 2],
+ title_editor: Option<Entity<Editor>>,
+ _subscriptions: Vec<Subscription>,
},
LoadError(LoadError),
Unauthenticated {
connection: Rc<dyn AgentConnection>,
description: Option<Entity<Markdown>>,
configuration_view: Option<AnyView>,
+ pending_auth_method: Option<acp::AuthMethodId>,
_subscription: Option<Subscription>,
},
- ServerExited {
- status: ExitStatus,
- },
+}
+
+struct LoadingView {
+ title: SharedString,
+ _load_task: Task<()>,
+ _update_title_task: Task<anyhow::Result<()>>,
}
impl AcpThreadView {
pub fn new(
agent: Rc<dyn AgentServer>,
resume_thread: Option<DbThreadMetadata>,
+ summarize_thread: Option<DbThreadMetadata>,
workspace: WeakEntity<Workspace>,
project: Entity<Project>,
- thread_store: Entity<ThreadStore>,
- text_thread_store: Entity<TextThreadStore>,
+ history_store: Entity<HistoryStore>,
+ prompt_store: Option<Entity<PromptStore>>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
- let prevent_slash_commands = agent.clone().downcast::<ClaudeCode>().is_some();
+ let prompt_capabilities = Rc::new(Cell::new(acp::PromptCapabilities::default()));
+ let available_commands = Rc::new(RefCell::new(vec![]));
+
+ let placeholder = if agent.name() == "Zed Agent" {
+ format!("Message the {} — @ to include context", agent.name())
+ } else if agent.name() == "Claude Code" || !available_commands.borrow().is_empty() {
+ format!(
+ "Message {} — @ to include context, / for commands",
+ agent.name()
+ )
+ } else {
+ format!("Message {} — @ to include context", agent.name())
+ };
+
let message_editor = cx.new(|cx| {
- MessageEditor::new(
+ let mut editor = MessageEditor::new(
workspace.clone(),
project.clone(),
- thread_store.clone(),
- text_thread_store.clone(),
- "Message the agent - @ to include context",
- prevent_slash_commands,
+ history_store.clone(),
+ prompt_store.clone(),
+ prompt_capabilities.clone(),
+ available_commands.clone(),
+ agent.name(),
+ placeholder,
editor::EditorMode::AutoHeight {
min_lines: MIN_EDITOR_LINES,
max_lines: Some(MAX_EDITOR_LINES),
},
window,
cx,
- )
+ );
+ if let Some(entry) = summarize_thread {
+ editor.insert_thread_summary(entry, window, cx);
+ }
+ editor
});
let list_state = ListState::new(0, gpui::ListAlignment::Bottom, px(2048.0));
@@ -188,14 +371,17 @@ impl AcpThreadView {
EntryViewState::new(
workspace.clone(),
project.clone(),
- thread_store.clone(),
- text_thread_store.clone(),
- prevent_slash_commands,
+ history_store.clone(),
+ prompt_store.clone(),
+ prompt_capabilities.clone(),
+ available_commands.clone(),
+ agent.name(),
)
});
let subscriptions = [
- cx.observe_global_in::<SettingsStore>(window, Self::settings_changed),
+ cx.observe_global_in::<SettingsStore>(window, Self::agent_font_size_changed),
+ cx.observe_global_in::<AgentFontSize>(window, Self::agent_font_size_changed),
cx.subscribe_in(&message_editor, window, Self::handle_message_editor_event),
cx.subscribe_in(&entry_view_state, window, Self::handle_entry_view_event),
];
@@ -206,6 +392,7 @@ impl AcpThreadView {
project: project.clone(),
entry_view_state,
thread_state: Self::initial_state(agent, resume_thread, workspace, project, window, cx),
+ login: None,
message_editor,
model_selector: None,
profile_selector: None,
@@ -215,19 +402,41 @@ impl AcpThreadView {
scrollbar_state: ScrollbarState::new(list_state).parent_entity(&cx.entity()),
thread_retry_status: None,
thread_error: None,
+ thread_feedback: Default::default(),
auth_task: None,
expanded_tool_calls: HashSet::default(),
expanded_thinking_blocks: HashSet::default(),
editing_message: None,
edits_expanded: false,
plan_expanded: false,
+ prompt_capabilities,
+ available_commands,
editor_expanded: false,
- terminal_expanded: true,
+ should_be_following: false,
+ history_store,
+ hovered_recent_history_item: None,
+ is_loading_contents: false,
_subscriptions: subscriptions,
_cancel_task: None,
+ focus_handle: cx.focus_handle(),
+ new_server_version_available: None,
}
}
+ fn reset(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ self.thread_state = Self::initial_state(
+ self.agent.clone(),
+ None,
+ self.workspace.clone(),
+ self.project.clone(),
+ window,
+ cx,
+ );
+ self.available_commands.replace(vec![]);
+ self.new_server_version_available.take();
+ cx.notify();
+ }
+
fn initial_state(
agent: Rc<dyn AgentServer>,
resume_thread: Option<DbThreadMetadata>,
@@ -236,20 +445,54 @@ impl AcpThreadView {
window: &mut Window,
cx: &mut Context<Self>,
) -> ThreadState {
- let root_dir = project
- .read(cx)
- .visible_worktrees(cx)
- .next()
- .map(|worktree| worktree.read(cx).abs_path())
- .unwrap_or_else(|| paths::home_dir().as_path().into());
+ if project.read(cx).is_via_collab()
+ && agent.clone().downcast::<NativeAgentServer>().is_none()
+ {
+ return ThreadState::LoadError(LoadError::Other(
+ "External agents are not yet supported in shared projects.".into(),
+ ));
+ }
+ let mut worktrees = project.read(cx).visible_worktrees(cx).collect::<Vec<_>>();
+ // Pick the first non-single-file worktree for the root directory if there are any,
+ // and otherwise the parent of a single-file worktree, falling back to $HOME if there are no visible worktrees.
+ worktrees.sort_by(|l, r| {
+ l.read(cx)
+ .is_single_file()
+ .cmp(&r.read(cx).is_single_file())
+ });
+ let root_dir = worktrees
+ .into_iter()
+ .filter_map(|worktree| {
+ if worktree.read(cx).is_single_file() {
+ Some(worktree.read(cx).abs_path().parent()?.into())
+ } else {
+ Some(worktree.read(cx).abs_path())
+ }
+ })
+ .next();
+ let (status_tx, mut status_rx) = watch::channel("Loading…".into());
+ let (new_version_available_tx, mut new_version_available_rx) = watch::channel(None);
+ let delegate = AgentServerDelegate::new(
+ project.read(cx).agent_server_store().clone(),
+ project.clone(),
+ Some(status_tx),
+ Some(new_version_available_tx),
+ );
- let connect_task = agent.connect(&root_dir, &project, cx);
+ let connect_task = agent.connect(root_dir.as_deref(), delegate, cx);
let load_task = cx.spawn_in(window, async move |this, cx| {
let connection = match connect_task.await {
- Ok(connection) => connection,
+ Ok((connection, login)) => {
+ this.update(cx, |this, _| this.login = login).ok();
+ connection
+ }
Err(err) => {
- this.update(cx, |this, cx| {
- this.handle_load_error(err, cx);
+ this.update_in(cx, |this, window, cx| {
+ if err.downcast_ref::<LoadError>().is_some() {
+ this.handle_load_error(err, window, cx);
+ } else {
+ this.handle_thread_error(err, cx);
+ }
cx.notify();
})
.log_err();
@@ -260,7 +503,7 @@ impl AcpThreadView {
let result = if let Some(native_agent) = connection
.clone()
.downcast::<agent2::NativeAgentConnection>()
- && let Some(resume) = resume_thread
+ && let Some(resume) = resume_thread.clone()
{
cx.update(|_, cx| {
native_agent
@@ -269,6 +512,14 @@ impl AcpThreadView {
})
.log_err()
} else {
+ let root_dir = if let Some(acp_agent) = connection
+ .clone()
+ .downcast::<agent_servers::AcpConnection>()
+ {
+ acp_agent.root_dir().into()
+ } else {
+ root_dir.unwrap_or(paths::home_dir().as_path().into())
+ };
cx.update(|_, cx| {
connection
.clone()
@@ -298,21 +549,31 @@ impl AcpThreadView {
this.update_in(cx, |this, window, cx| {
match result {
Ok(thread) => {
- let thread_subscription =
- cx.subscribe_in(&thread, window, Self::handle_thread_event);
-
let action_log = thread.read(cx).action_log().clone();
- let action_log_subscription =
- cx.observe(&action_log, |_, _, cx| cx.notify());
+
+ this.prompt_capabilities
+ .set(thread.read(cx).prompt_capabilities());
let count = thread.read(cx).entries().len();
- this.list_state.splice(0..0, count);
this.entry_view_state.update(cx, |view_state, cx| {
for ix in 0..count {
view_state.sync_entry(ix, &thread, window, cx);
}
+ this.list_state.splice_focusable(
+ 0..0,
+ (0..count).map(|ix| view_state.entry(ix)?.focus_handle(cx)),
+ );
});
+ if let Some(resume) = resume_thread {
+ this.history_store.update(cx, |history, cx| {
+ history.push_recently_opened_entry(
+ HistoryEntryId::AcpThread(resume.id),
+ cx,
+ );
+ });
+ }
+
AgentDiff::set_active_thread(&workspace, thread.clone(), window, cx);
this.model_selector =
@@ -333,10 +594,33 @@ impl AcpThreadView {
})
});
+ let mut subscriptions = vec![
+ cx.subscribe_in(&thread, window, Self::handle_thread_event),
+ cx.observe(&action_log, |_, _, cx| cx.notify()),
+ ];
+
+ let title_editor =
+ if thread.update(cx, |thread, cx| thread.can_set_title(cx)) {
+ let editor = cx.new(|cx| {
+ let mut editor = Editor::single_line(window, cx);
+ editor.set_text(thread.read(cx).title(), window, cx);
+ editor
+ });
+ subscriptions.push(cx.subscribe_in(
+ &editor,
+ window,
+ Self::handle_title_editor_event,
+ ));
+ Some(editor)
+ } else {
+ None
+ };
this.thread_state = ThreadState::Ready {
thread,
- _subscription: [thread_subscription, action_log_subscription],
+ title_editor,
+ _subscriptions: subscriptions,
};
+ this.message_editor.focus_handle(cx).focus(window);
this.profile_selector = this.as_native_thread(cx).map(|thread| {
cx.new(|cx| {
@@ -352,14 +636,45 @@ impl AcpThreadView {
cx.notify();
}
Err(err) => {
- this.handle_load_error(err, cx);
+ this.handle_load_error(err, window, cx);
}
};
})
.log_err();
});
- ThreadState::Loading { _task: load_task }
+ cx.spawn(async move |this, cx| {
+ while let Ok(new_version) = new_version_available_rx.recv().await {
+ if let Some(new_version) = new_version {
+ this.update(cx, |this, cx| {
+ this.new_server_version_available = Some(new_version.into());
+ cx.notify();
+ })
+ .log_err();
+ }
+ }
+ })
+ .detach();
+
+ let loading_view = cx.new(|cx| {
+ let update_title_task = cx.spawn(async move |this, cx| {
+ loop {
+ let status = status_rx.recv().await?;
+ this.update(cx, |this: &mut LoadingView, cx| {
+ this.title = status;
+ cx.notify();
+ })?;
+ }
+ });
+
+ LoadingView {
+ title: "Loading…".into(),
+ _load_task: load_task,
+ _update_title_task: update_title_task,
+ }
+ });
+
+ ThreadState::Loading(loading_view)
}
fn handle_auth_required(
@@ -380,17 +695,13 @@ impl AcpThreadView {
move |_, ev, window, cx| {
if let language_model::Event::ProviderStateChanged(updated_provider_id) = &ev
&& &provider_id == updated_provider_id
+ && LanguageModelRegistry::global(cx)
+ .read(cx)
+ .provider(&provider_id)
+ .map_or(false, |provider| provider.is_authenticated(cx))
{
this.update(cx, |this, cx| {
- this.thread_state = Self::initial_state(
- agent.clone(),
- None,
- this.workspace.clone(),
- this.project.clone(),
- window,
- cx,
- );
- cx.notify();
+ this.reset(window, cx);
})
.ok();
}
@@ -399,7 +710,7 @@ impl AcpThreadView {
let view = registry.read(cx).provider(&provider_id).map(|provider| {
provider.configuration_view(
- language_model::ConfigurationViewTargetAgent::Other(agent_name),
+ language_model::ConfigurationViewTargetAgent::Other(agent_name.clone()),
window,
cx,
)
@@ -412,6 +723,7 @@ impl AcpThreadView {
this.update(cx, |this, cx| {
this.thread_state = ThreadState::Unauthenticated {
+ pending_auth_method: None,
connection,
configuration_view,
description: err
@@ -420,37 +732,64 @@ impl AcpThreadView {
.map(|desc| cx.new(|cx| Markdown::new(desc.into(), None, None, cx))),
_subscription: subscription,
};
+ if this.message_editor.focus_handle(cx).is_focused(window) {
+ this.focus_handle.focus(window)
+ }
cx.notify();
})
.ok();
}
- fn handle_load_error(&mut self, err: anyhow::Error, cx: &mut Context<Self>) {
+ fn handle_load_error(
+ &mut self,
+ err: anyhow::Error,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
if let Some(load_err) = err.downcast_ref::<LoadError>() {
self.thread_state = ThreadState::LoadError(load_err.clone());
} else {
self.thread_state = ThreadState::LoadError(LoadError::Other(err.to_string().into()))
}
+ if self.message_editor.focus_handle(cx).is_focused(window) {
+ self.focus_handle.focus(window)
+ }
cx.notify();
}
+ pub fn workspace(&self) -> &WeakEntity<Workspace> {
+ &self.workspace
+ }
+
pub fn thread(&self) -> Option<&Entity<AcpThread>> {
match &self.thread_state {
ThreadState::Ready { thread, .. } => Some(thread),
ThreadState::Unauthenticated { .. }
| ThreadState::Loading { .. }
- | ThreadState::LoadError(..)
- | ThreadState::ServerExited { .. } => None,
+ | ThreadState::LoadError { .. } => None,
}
}
pub fn title(&self, cx: &App) -> SharedString {
match &self.thread_state {
- ThreadState::Ready { thread, .. } => thread.read(cx).title(),
- ThreadState::Loading { .. } => "Loading…".into(),
- ThreadState::LoadError(_) => "Failed to load".into(),
- ThreadState::Unauthenticated { .. } => "Authentication Required".into(),
- ThreadState::ServerExited { .. } => "Server exited unexpectedly".into(),
+ ThreadState::Ready { .. } | ThreadState::Unauthenticated { .. } => "New Thread".into(),
+ ThreadState::Loading(loading_view) => loading_view.read(cx).title.clone(),
+ ThreadState::LoadError(error) => match error {
+ LoadError::Unsupported { .. } => format!("Upgrade {}", self.agent.name()).into(),
+ LoadError::FailedToInstall(_) => {
+ format!("Failed to Install {}", self.agent.name()).into()
+ }
+ LoadError::Exited { .. } => format!("{} Exited", self.agent.name()).into(),
+ LoadError::Other(_) => format!("Error Loading {}", self.agent.name()).into(),
+ },
+ }
+ }
+
+ pub fn title_editor(&self) -> Option<Entity<Editor>> {
+ if let ThreadState::Ready { title_editor, .. } = &self.thread_state {
+ title_editor.clone()
+ } else {
+ None
}
}
@@ -498,6 +837,35 @@ impl AcpThreadView {
cx.notify();
}
+ pub fn handle_title_editor_event(
+ &mut self,
+ title_editor: &Entity<Editor>,
+ event: &EditorEvent,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let Some(thread) = self.thread() else { return };
+
+ match event {
+ EditorEvent::BufferEdited => {
+ let new_title = title_editor.read(cx).text(cx);
+ thread.update(cx, |thread, cx| {
+ thread
+ .set_title(new_title.into(), cx)
+ .detach_and_log_err(cx);
+ })
+ }
+ EditorEvent::Blurred => {
+ if title_editor.read(cx).text(cx).is_empty() {
+ title_editor.update(cx, |editor, cx| {
+ editor.set_text("New Thread", window, cx);
+ });
+ }
+ }
+ _ => {}
+ }
+ }
+
pub fn handle_message_editor_event(
&mut self,
_: &Entity<MessageEditor>,
@@ -511,6 +879,7 @@ impl AcpThreadView {
MessageEditorEvent::Focus => {
self.cancel_editing(&Default::default(), window, cx);
}
+ MessageEditorEvent::LostFocus => {}
}
}
@@ -522,9 +891,40 @@ impl AcpThreadView {
cx: &mut Context<Self>,
) {
match &event.view_event {
+ ViewEvent::NewDiff(tool_call_id) => {
+ if AgentSettings::get_global(cx).expand_edit_card {
+ self.expanded_tool_calls.insert(tool_call_id.clone());
+ }
+ }
+ ViewEvent::NewTerminal(tool_call_id) => {
+ if AgentSettings::get_global(cx).expand_terminal_card {
+ self.expanded_tool_calls.insert(tool_call_id.clone());
+ }
+ }
+ ViewEvent::TerminalMovedToBackground(tool_call_id) => {
+ self.expanded_tool_calls.remove(tool_call_id);
+ }
ViewEvent::MessageEditorEvent(_editor, MessageEditorEvent::Focus) => {
- self.editing_message = Some(event.entry_index);
- cx.notify();
+ if let Some(thread) = self.thread()
+ && let Some(AgentThreadEntry::UserMessage(user_message)) =
+ thread.read(cx).entries().get(event.entry_index)
+ && user_message.id.is_some()
+ {
+ self.editing_message = Some(event.entry_index);
+ cx.notify();
+ }
+ }
+ ViewEvent::MessageEditorEvent(editor, MessageEditorEvent::LostFocus) => {
+ if let Some(thread) = self.thread()
+ && let Some(AgentThreadEntry::UserMessage(user_message)) =
+ thread.read(cx).entries().get(event.entry_index)
+ && user_message.id.is_some()
+ {
+ if editor.read(cx).text(cx).as_str() == user_message.content.to_markdown(cx) {
+ self.editing_message = None;
+ cx.notify();
+ }
+ }
}
ViewEvent::MessageEditorEvent(editor, MessageEditorEvent::Send) => {
self.regenerate(event.entry_index, editor, window, cx);
@@ -540,6 +940,9 @@ impl AcpThreadView {
let Some(thread) = self.thread() else {
return;
};
+ if !thread.read(cx).can_resume(cx) {
+ return;
+ }
let task = thread.update(cx, |thread, cx| thread.resume(cx));
cx.spawn(async move |this, cx| {
@@ -555,16 +958,61 @@ impl AcpThreadView {
}
fn send(&mut self, window: &mut Window, cx: &mut Context<Self>) {
- if let Some(thread) = self.thread()
- && thread.read(cx).status() != ThreadStatus::Idle
- {
+ let Some(thread) = self.thread() else { return };
+
+ if self.is_loading_contents {
+ return;
+ }
+
+ self.history_store.update(cx, |history, cx| {
+ history.push_recently_opened_entry(
+ HistoryEntryId::AcpThread(thread.read(cx).session_id().clone()),
+ cx,
+ );
+ });
+
+ if thread.read(cx).status() != ThreadStatus::Idle {
self.stop_current_and_send_new_message(window, cx);
return;
}
+ let text = self.message_editor.read(cx).text(cx);
+ let text = text.trim();
+ if text == "/login" || text == "/logout" {
+ let ThreadState::Ready { thread, .. } = &self.thread_state else {
+ return;
+ };
+
+ let connection = thread.read(cx).connection().clone();
+ if !connection
+ .auth_methods()
+ .iter()
+ .any(|method| method.id.0.as_ref() == "claude-login")
+ {
+ return;
+ };
+ let this = cx.weak_entity();
+ let agent = self.agent.clone();
+ window.defer(cx, |window, cx| {
+ Self::handle_auth_required(
+ this,
+ AuthRequired {
+ description: None,
+ provider_id: None,
+ },
+ agent,
+ connection,
+ window,
+ cx,
+ );
+ });
+ cx.notify();
+ return;
+ }
+
let contents = self
.message_editor
- .update(cx, |message_editor, cx| message_editor.contents(window, cx));
+ .update(cx, |message_editor, cx| message_editor.contents(cx));
self.send_impl(contents, window, cx)
}
@@ -577,7 +1025,7 @@ impl AcpThreadView {
let contents = self
.message_editor
- .update(cx, |message_editor, cx| message_editor.contents(window, cx));
+ .update(cx, |message_editor, cx| message_editor.contents(cx));
cx.spawn_in(window, async move |this, cx| {
cancelled.await;
@@ -592,18 +1040,38 @@ impl AcpThreadView {
fn send_impl(
&mut self,
- contents: Task<anyhow::Result<Vec<acp::ContentBlock>>>,
+ contents: Task<Result<(Vec<acp::ContentBlock>, Vec<Entity<Buffer>>)>>,
window: &mut Window,
cx: &mut Context<Self>,
) {
+ let agent_telemetry_id = self.agent.telemetry_id();
+
self.thread_error.take();
self.editing_message.take();
+ self.thread_feedback.clear();
- let Some(thread) = self.thread().cloned() else {
+ let Some(thread) = self.thread() else {
return;
};
+ let thread = thread.downgrade();
+ if self.should_be_following {
+ self.workspace
+ .update(cx, |workspace, cx| {
+ workspace.follow(CollaboratorId::Agent, window, cx);
+ })
+ .ok();
+ }
+
+ self.is_loading_contents = true;
+ let guard = cx.new(|_| ());
+ cx.observe_release(&guard, |this, _guard, cx| {
+ this.is_loading_contents = false;
+ cx.notify();
+ })
+ .detach();
+
let task = cx.spawn_in(window, async move |this, cx| {
- let contents = contents.await?;
+ let (contents, tracked_buffers) = contents.await?;
if contents.is_empty() {
return Ok(());
@@ -616,7 +1084,18 @@ impl AcpThreadView {
message_editor.clear(window, cx);
});
})?;
- let send = thread.update(cx, |thread, cx| thread.send(contents, cx))?;
+ let send = thread.update(cx, |thread, cx| {
+ thread.action_log().update(cx, |action_log, cx| {
+ for buffer in tracked_buffers {
+ action_log.buffer_read(buffer, cx)
+ }
+ });
+ drop(guard);
+
+ telemetry::event!("Agent Message Sent", agent = agent_telemetry_id);
+
+ thread.send(contents, cx)
+ })?;
send.await
});
@@ -23,9 +23,8 @@ use gpui::{
AbsoluteLength, Animation, AnimationExt, AnyElement, App, ClickEvent, ClipboardEntry,
ClipboardItem, DefiniteLength, EdgesRefinement, Empty, Entity, EventEmitter, Focusable, Hsla,
ListAlignment, ListOffset, ListState, MouseButton, PlatformDisplay, ScrollHandle, Stateful,
- StyleRefinement, Subscription, Task, TextStyle, TextStyleRefinement, Transformation,
- UnderlineStyle, WeakEntity, WindowHandle, linear_color_stop, linear_gradient, list, percentage,
- pulsating_between,
+ StyleRefinement, Subscription, Task, TextStyle, TextStyleRefinement, UnderlineStyle,
+ WeakEntity, WindowHandle, linear_color_stop, linear_gradient, list, pulsating_between,
};
use language::{Buffer, Language, LanguageRegistry};
use language_model::{
@@ -46,8 +45,8 @@ use std::time::Duration;
use text::ToPoint;
use theme::ThemeSettings;
use ui::{
- Banner, Disclosure, KeyBinding, PopoverMenuHandle, Scrollbar, ScrollbarState, TextSize,
- Tooltip, prelude::*,
+ Banner, CommonAnimationExt, Disclosure, KeyBinding, PopoverMenuHandle, Scrollbar,
+ ScrollbarState, TextSize, Tooltip, prelude::*,
};
use util::ResultExt as _;
use util::markdown::MarkdownCodeBlock;
@@ -491,7 +490,7 @@ fn render_markdown_code_block(
.on_click({
let active_thread = active_thread.clone();
let parsed_markdown = parsed_markdown.clone();
- let code_block_range = metadata.content_range.clone();
+ let code_block_range = metadata.content_range;
move |_event, _window, cx| {
active_thread.update(cx, |this, cx| {
this.copied_code_block_ids.insert((message_id, ix));
@@ -532,7 +531,6 @@ fn render_markdown_code_block(
"Expand Code"
}))
.on_click({
- let active_thread = active_thread.clone();
move |_event, _window, cx| {
active_thread.update(cx, |this, cx| {
this.toggle_codeblock_expanded(message_id, ix);
@@ -780,13 +778,11 @@ impl ActiveThread {
let list_state = ListState::new(0, ListAlignment::Bottom, px(2048.));
- let workspace_subscription = if let Some(workspace) = workspace.upgrade() {
- Some(cx.observe_release(&workspace, |this, _, cx| {
+ let workspace_subscription = workspace.upgrade().map(|workspace| {
+ cx.observe_release(&workspace, |this, _, cx| {
this.dismiss_notifications(cx);
- }))
- } else {
- None
- };
+ })
+ });
let mut this = Self {
language_registry,
@@ -916,7 +912,7 @@ impl ActiveThread {
) {
let rendered = self
.rendered_tool_uses
- .entry(tool_use_id.clone())
+ .entry(tool_use_id)
.or_insert_with(|| RenderedToolUse {
label: cx.new(|cx| {
Markdown::new("".into(), Some(self.language_registry.clone()), None, cx)
@@ -1005,8 +1001,22 @@ impl ActiveThread {
// Don't notify for intermediate tool use
}
Ok(StopReason::Refusal) => {
+ let model_name = self
+ .thread
+ .read(cx)
+ .configured_model()
+ .map(|configured| configured.model.name().0.to_string())
+ .unwrap_or_else(|| "The model".to_string());
+ let refusal_message = format!(
+ "{} refused to respond to this prompt. This can happen when a model believes the prompt violates its content policy or safety guidelines, so rephrasing it can sometimes address the issue.",
+ model_name
+ );
+ self.last_error = Some(ThreadError::Message {
+ header: SharedString::from("Request Refused"),
+ message: SharedString::from(refusal_message),
+ });
self.notify_with_sound(
- "Language model refused to respond",
+ format!("{} refused to respond", model_name),
IconName::Warning,
window,
cx,
@@ -1218,7 +1228,7 @@ impl ActiveThread {
match AgentSettings::get_global(cx).notify_when_agent_waiting {
NotifyWhenAgentWaiting::PrimaryScreen => {
if let Some(primary) = cx.primary_display() {
- self.pop_up(icon, caption.into(), title.clone(), window, primary, cx);
+ self.pop_up(icon, caption.into(), title, window, primary, cx);
}
}
NotifyWhenAgentWaiting::AllScreens => {
@@ -1373,12 +1383,12 @@ impl ActiveThread {
editor.focus_handle(cx).focus(window);
editor.move_to_end(&editor::actions::MoveToEnd, window, cx);
});
- let buffer_edited_subscription = cx.subscribe(&editor, |this, _, event, cx| match event {
- EditorEvent::BufferEdited => {
- this.update_editing_message_token_count(true, cx);
- }
- _ => {}
- });
+ let buffer_edited_subscription =
+ cx.subscribe(&editor, |this, _, event: &EditorEvent, cx| {
+ if event == &EditorEvent::BufferEdited {
+ this.update_editing_message_token_count(true, cx);
+ }
+ });
let context_picker_menu_handle = PopoverMenuHandle::default();
let context_strip = cx.new(|cx| {
@@ -1598,11 +1608,6 @@ impl ActiveThread {
return;
};
- if model.provider.must_accept_terms(cx) {
- cx.notify();
- return;
- }
-
let edited_text = state.editor.read(cx).text(cx);
let creases = state.editor.update(cx, extract_message_creases);
@@ -1765,7 +1770,7 @@ impl ActiveThread {
.thread
.read(cx)
.message(message_id)
- .map(|msg| msg.to_string())
+ .map(|msg| msg.to_message_content())
.unwrap_or_default();
telemetry::event!(
@@ -2112,7 +2117,7 @@ impl ActiveThread {
.gap_1()
.children(message_content)
.when_some(editing_message_state, |this, state| {
- let focus_handle = state.editor.focus_handle(cx).clone();
+ let focus_handle = state.editor.focus_handle(cx);
this.child(
h_flex()
@@ -2173,7 +2178,6 @@ impl ActiveThread {
.icon_color(Color::Muted)
.icon_size(IconSize::Small)
.tooltip({
- let focus_handle = focus_handle.clone();
move |window, cx| {
Tooltip::for_action_in(
"Regenerate",
@@ -2312,7 +2316,7 @@ impl ActiveThread {
.into_any_element()
} else if let Some(error) = error {
restore_checkpoint_button
- .tooltip(Tooltip::text(error.to_string()))
+ .tooltip(Tooltip::text(error))
.into_any_element()
} else {
restore_checkpoint_button.into_any_element()
@@ -2353,7 +2357,6 @@ impl ActiveThread {
this.submit_feedback_message(message_id, cx);
cx.notify();
}))
- .on_action(cx.listener(Self::confirm_editing_message))
.mb_2()
.mx_4()
.p_2()
@@ -2657,15 +2660,7 @@ impl ActiveThread {
Icon::new(IconName::ArrowCircle)
.color(Color::Accent)
.size(IconSize::Small)
- .with_animation(
- "arrow-circle",
- Animation::new(Duration::from_secs(2)).repeat(),
- |icon, delta| {
- icon.transform(Transformation::rotate(
- percentage(delta),
- ))
- },
- )
+ .with_rotate_animation(2)
}),
),
)
@@ -2841,17 +2836,11 @@ impl ActiveThread {
}
ToolUseStatus::Pending
| ToolUseStatus::InputStillStreaming
- | ToolUseStatus::Running => {
- let icon = Icon::new(IconName::ArrowCircle)
- .color(Color::Accent)
- .size(IconSize::Small);
- icon.with_animation(
- "arrow-circle",
- Animation::new(Duration::from_secs(2)).repeat(),
- |icon, delta| icon.transform(Transformation::rotate(percentage(delta))),
- )
- .into_any_element()
- }
+ | ToolUseStatus::Running => Icon::new(IconName::ArrowCircle)
+ .color(Color::Accent)
+ .size(IconSize::Small)
+ .with_rotate_animation(2)
+ .into_any_element(),
ToolUseStatus::Finished(_) => div().w_0().into_any_element(),
ToolUseStatus::Error(_) => {
let icon = Icon::new(IconName::Close)
@@ -2940,15 +2929,7 @@ impl ActiveThread {
Icon::new(IconName::ArrowCircle)
.size(IconSize::Small)
.color(Color::Accent)
- .with_animation(
- "arrow-circle",
- Animation::new(Duration::from_secs(2)).repeat(),
- |icon, delta| {
- icon.transform(Transformation::rotate(percentage(
- delta,
- )))
- },
- ),
+ .with_rotate_animation(2),
)
.child(
Label::new("Running…")
@@ -3604,7 +3585,7 @@ pub(crate) fn open_active_thread_as_markdown(
}
let buffer = project.update(cx, |project, cx| {
- project.create_local_buffer(&markdown, Some(markdown_language), cx)
+ project.create_local_buffer(&markdown, Some(markdown_language), true, cx)
});
let buffer =
cx.new(|cx| MultiBuffer::singleton(buffer, cx).with_title(thread_summary.clone()));
@@ -3,19 +3,21 @@ mod configure_context_server_modal;
mod manage_profiles_modal;
mod tool_picker;
-use std::{sync::Arc, time::Duration};
+use std::{ops::Range, sync::Arc};
use agent_settings::AgentSettings;
+use anyhow::Result;
use assistant_tool::{ToolSource, ToolWorkingSet};
use cloud_llm_client::Plan;
use collections::HashMap;
use context_server::ContextServerId;
+use editor::{Editor, SelectionEffects, scroll::Autoscroll};
use extension::ExtensionManifest;
use extension_host::ExtensionStore;
use fs::Fs;
use gpui::{
- Action, Animation, AnimationExt as _, AnyView, App, Corner, Entity, EventEmitter, FocusHandle,
- Focusable, ScrollHandle, Subscription, Task, Transformation, WeakEntity, percentage,
+ Action, AnyView, App, AsyncWindowContext, Corner, Entity, EventEmitter, FocusHandle, Focusable,
+ Hsla, ScrollHandle, Subscription, Task, WeakEntity,
};
use language::LanguageRegistry;
use language_model::{
@@ -23,29 +25,36 @@ use language_model::{
};
use notifications::status_toast::{StatusToast, ToastIcon};
use project::{
+ agent_server_store::{
+ AgentServerCommand, AgentServerStore, AllAgentServersSettings, CLAUDE_CODE_NAME,
+ CustomAgentServerSettings, GEMINI_NAME,
+ },
context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore},
project_settings::{ContextServerSettings, ProjectSettings},
};
-use settings::{Settings, update_settings_file};
+use settings::{Settings, SettingsStore, update_settings_file};
use ui::{
- Chip, ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex, Indicator, PopoverMenu,
- Scrollbar, ScrollbarState, Switch, SwitchColor, SwitchField, Tooltip, prelude::*,
+ Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex,
+ Indicator, PopoverMenu, Scrollbar, ScrollbarState, Switch, SwitchColor, SwitchField, Tooltip,
+ prelude::*,
};
use util::ResultExt as _;
-use workspace::Workspace;
+use workspace::{Workspace, create_and_open_local_file};
use zed_actions::ExtensionCategoryFilter;
pub(crate) use configure_context_server_modal::ConfigureContextServerModal;
pub(crate) use manage_profiles_modal::ManageProfilesModal;
use crate::{
- AddContextServer,
+ AddContextServer, ExternalAgent, NewExternalAgentThread,
agent_configuration::add_llm_provider_modal::{AddLlmProviderModal, LlmCompatibleProvider},
+ placeholder_command,
};
pub struct AgentConfiguration {
fs: Arc<dyn Fs>,
language_registry: Arc<LanguageRegistry>,
+ agent_server_store: Entity<AgentServerStore>,
workspace: WeakEntity<Workspace>,
focus_handle: FocusHandle,
configuration_views_by_provider: HashMap<LanguageModelProviderId, AnyView>,
@@ -56,11 +65,13 @@ pub struct AgentConfiguration {
_registry_subscription: Subscription,
scroll_handle: ScrollHandle,
scrollbar_state: ScrollbarState,
+ _check_for_gemini: Task<()>,
}
impl AgentConfiguration {
pub fn new(
fs: Arc<dyn Fs>,
+ agent_server_store: Entity<AgentServerStore>,
context_server_store: Entity<ContextServerStore>,
tools: Entity<ToolWorkingSet>,
language_registry: Arc<LanguageRegistry>,
@@ -93,27 +104,21 @@ impl AgentConfiguration {
let scroll_handle = ScrollHandle::new();
let scrollbar_state = ScrollbarState::new(scroll_handle.clone());
- let mut expanded_provider_configurations = HashMap::default();
- if LanguageModelRegistry::read_global(cx)
- .provider(&ZED_CLOUD_PROVIDER_ID)
- .is_some_and(|cloud_provider| cloud_provider.must_accept_terms(cx))
- {
- expanded_provider_configurations.insert(ZED_CLOUD_PROVIDER_ID, true);
- }
-
let mut this = Self {
fs,
language_registry,
workspace,
focus_handle,
configuration_views_by_provider: HashMap::default(),
+ agent_server_store,
context_server_store,
expanded_context_server_tools: HashMap::default(),
- expanded_provider_configurations,
+ expanded_provider_configurations: HashMap::default(),
tools,
_registry_subscription: registry_subscription,
scroll_handle,
scrollbar_state,
+ _check_for_gemini: Task::ready(()),
};
this.build_provider_configuration_views(window, cx);
this
@@ -165,8 +170,8 @@ impl AgentConfiguration {
provider: &Arc<dyn LanguageModelProvider>,
cx: &mut Context<Self>,
) -> impl IntoElement + use<> {
- let provider_id = provider.id().0.clone();
- let provider_name = provider.name().0.clone();
+ let provider_id = provider.id().0;
+ let provider_name = provider.name().0;
let provider_id_string = SharedString::from(format!("provider-disclosure-{provider_id}"));
let configuration_view = self
@@ -192,7 +197,7 @@ impl AgentConfiguration {
let is_signed_in = self
.workspace
.read_with(cx, |workspace, _| {
- workspace.client().status().borrow().is_connected()
+ !workspace.client().status().borrow().is_signed_out()
})
.unwrap_or(false);
@@ -219,7 +224,6 @@ impl AgentConfiguration {
.child(
h_flex()
.id(provider_id_string.clone())
- .cursor_pointer()
.px_2()
.py_0p5()
.w_full()
@@ -239,10 +243,7 @@ impl AgentConfiguration {
h_flex()
.w_full()
.gap_1()
- .child(
- Label::new(provider_name.clone())
- .size(LabelSize::Large),
- )
+ .child(Label::new(provider_name.clone()))
.map(|this| {
if is_zed_provider && is_signed_in {
this.child(
@@ -269,7 +270,7 @@ impl AgentConfiguration {
.closed_icon(IconName::ChevronDown),
)
.on_click(cx.listener({
- let provider_id = provider.id().clone();
+ let provider_id = provider.id();
move |this, _event, _window, _cx| {
let is_expanded = this
.expanded_provider_configurations
@@ -287,7 +288,7 @@ impl AgentConfiguration {
"Start New Thread",
)
.icon_position(IconPosition::Start)
- .icon(IconName::Plus)
+ .icon(IconName::Thread)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.label_size(LabelSize::Small)
@@ -337,6 +338,7 @@ impl AgentConfiguration {
.gap_0p5()
.child(
h_flex()
+ .pr_1()
.w_full()
.gap_2()
.justify_between()
@@ -386,7 +388,7 @@ impl AgentConfiguration {
),
)
.child(
- Label::new("Add at least one provider to use AI-powered features.")
+ Label::new("Add at least one provider to use AI-powered features with Zed's native agent.")
.color(Color::Muted),
),
),
@@ -527,6 +529,14 @@ impl AgentConfiguration {
}
}
+ fn card_item_bg_color(&self, cx: &mut Context<Self>) -> Hsla {
+ cx.theme().colors().background.opacity(0.25)
+ }
+
+ fn card_item_border_color(&self, cx: &mut Context<Self>) -> Hsla {
+ cx.theme().colors().border.opacity(0.6)
+ }
+
fn render_context_servers_section(
&mut self,
window: &mut Window,
@@ -544,7 +554,12 @@ impl AgentConfiguration {
v_flex()
.gap_0p5()
.child(Headline::new("Model Context Protocol (MCP) Servers"))
- .child(Label::new("Connect to context servers through the Model Context Protocol, either using Zed extensions or directly.").color(Color::Muted)),
+ .child(
+ Label::new(
+ "All context servers connected through the Model Context Protocol.",
+ )
+ .color(Color::Muted),
+ ),
)
.children(
context_server_ids.into_iter().map(|context_server_id| {
@@ -554,7 +569,7 @@ impl AgentConfiguration {
.child(
h_flex()
.justify_between()
- .gap_2()
+ .gap_1p5()
.child(
h_flex().w_full().child(
Button::new("add-context-server", "Add Custom Server")
@@ -645,8 +660,6 @@ impl AgentConfiguration {
.map_or([].as_slice(), |tools| tools.as_slice());
let tool_count = tools.len();
- let border_color = cx.theme().colors().border.opacity(0.6);
-
let (source_icon, source_tooltip) = if is_from_extension {
(
IconName::ZedMcpExtension,
@@ -664,10 +677,9 @@ impl AgentConfiguration {
Icon::new(IconName::LoadCircle)
.size(IconSize::XSmall)
.color(Color::Accent)
- .with_animation(
- SharedString::from(format!("{}-starting", context_server_id.0.clone(),)),
- Animation::new(Duration::from_secs(3)).repeat(),
- |icon, delta| icon.transform(Transformation::rotate(percentage(delta))),
+ .with_keyed_rotate_animation(
+ SharedString::from(format!("{}-starting", context_server_id.0)),
+ 3,
)
.into_any_element(),
"Server is starting.",
@@ -789,8 +801,8 @@ impl AgentConfiguration {
.id(item_id.clone())
.border_1()
.rounded_md()
- .border_color(border_color)
- .bg(cx.theme().colors().background.opacity(0.2))
+ .border_color(self.card_item_border_color(cx))
+ .bg(self.card_item_bg_color(cx))
.overflow_hidden()
.child(
h_flex()
@@ -798,7 +810,11 @@ impl AgentConfiguration {
.justify_between()
.when(
error.is_some() || are_tools_expanded && tool_count >= 1,
- |element| element.border_b_1().border_color(border_color),
+ |element| {
+ element
+ .border_b_1()
+ .border_color(self.card_item_border_color(cx))
+ },
)
.child(
h_flex()
@@ -865,7 +881,6 @@ impl AgentConfiguration {
.on_click({
let context_server_manager =
self.context_server_store.clone();
- let context_server_id = context_server_id.clone();
let fs = self.fs.clone();
move |state, _window, cx| {
@@ -958,7 +973,7 @@ impl AgentConfiguration {
}
parent.child(v_flex().py_1p5().px_1().gap_1().children(
- tools.into_iter().enumerate().map(|(ix, tool)| {
+ tools.iter().enumerate().map(|(ix, tool)| {
h_flex()
.id(("tool-item", ix))
.px_1()
@@ -981,6 +996,149 @@ impl AgentConfiguration {
))
})
}
+
+ fn render_agent_servers_section(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
+ let custom_settings = cx
+ .global::<SettingsStore>()
+ .get::<AllAgentServersSettings>(None)
+ .custom
+ .clone();
+ let user_defined_agents = self
+ .agent_server_store
+ .read(cx)
+ .external_agents()
+ .filter(|name| name.0 != GEMINI_NAME && name.0 != CLAUDE_CODE_NAME)
+ .cloned()
+ .collect::<Vec<_>>();
+ let user_defined_agents = user_defined_agents
+ .into_iter()
+ .map(|name| {
+ self.render_agent_server(
+ IconName::Ai,
+ name.clone(),
+ ExternalAgent::Custom {
+ name: name.clone().into(),
+ command: custom_settings
+ .get(&name.0)
+ .map(|settings| settings.command.clone())
+ .unwrap_or(placeholder_command()),
+ },
+ cx,
+ )
+ .into_any_element()
+ })
+ .collect::<Vec<_>>();
+
+ v_flex()
+ .border_b_1()
+ .border_color(cx.theme().colors().border)
+ .child(
+ v_flex()
+ .p(DynamicSpacing::Base16.rems(cx))
+ .pr(DynamicSpacing::Base20.rems(cx))
+ .gap_2()
+ .child(
+ v_flex()
+ .gap_0p5()
+ .child(
+ h_flex()
+ .pr_1()
+ .w_full()
+ .gap_2()
+ .justify_between()
+ .child(Headline::new("External Agents"))
+ .child(
+ Button::new("add-agent", "Add Agent")
+ .icon_position(IconPosition::Start)
+ .icon(IconName::Plus)
+ .icon_size(IconSize::Small)
+ .icon_color(Color::Muted)
+ .label_size(LabelSize::Small)
+ .on_click(
+ move |_, window, cx| {
+ if let Some(workspace) = window.root().flatten() {
+ let workspace = workspace.downgrade();
+ window
+ .spawn(cx, async |cx| {
+ open_new_agent_servers_entry_in_settings_editor(
+ workspace,
+ cx,
+ ).await
+ })
+ .detach_and_log_err(cx);
+ }
+ }
+ ),
+ )
+ )
+ .child(
+ Label::new(
+ "All agents connected through the Agent Client Protocol.",
+ )
+ .color(Color::Muted),
+ ),
+ )
+ .child(self.render_agent_server(
+ IconName::AiGemini,
+ "Gemini CLI",
+ ExternalAgent::Gemini,
+ cx,
+ ))
+ .child(self.render_agent_server(
+ IconName::AiClaude,
+ "Claude Code",
+ ExternalAgent::ClaudeCode,
+ cx,
+ ))
+ .children(user_defined_agents),
+ )
+ }
+
+ fn render_agent_server(
+ &self,
+ icon: IconName,
+ name: impl Into<SharedString>,
+ agent: ExternalAgent,
+ cx: &mut Context<Self>,
+ ) -> impl IntoElement {
+ let name = name.into();
+ h_flex()
+ .p_1()
+ .pl_2()
+ .gap_1p5()
+ .justify_between()
+ .border_1()
+ .rounded_md()
+ .border_color(self.card_item_border_color(cx))
+ .bg(self.card_item_bg_color(cx))
+ .overflow_hidden()
+ .child(
+ h_flex()
+ .gap_1p5()
+ .child(Icon::new(icon).size(IconSize::Small).color(Color::Muted))
+ .child(Label::new(name.clone())),
+ )
+ .child(
+ Button::new(
+ SharedString::from(format!("start_acp_thread-{name}")),
+ "Start New Thread",
+ )
+ .label_size(LabelSize::Small)
+ .icon(IconName::Thread)
+ .icon_position(IconPosition::Start)
+ .icon_size(IconSize::XSmall)
+ .icon_color(Color::Muted)
+ .on_click(move |_, window, cx| {
+ window.dispatch_action(
+ NewExternalAgentThread {
+ agent: Some(agent.clone()),
+ }
+ .boxed_clone(),
+ cx,
+ );
+ }),
+ )
+ }
}
impl Render for AgentConfiguration {
@@ -1000,6 +1158,7 @@ impl Render for AgentConfiguration {
.size_full()
.overflow_y_scroll()
.child(self.render_general_settings_section(cx))
+ .child(self.render_agent_servers_section(cx))
.child(self.render_context_servers_section(window, cx))
.child(self.render_provider_configuration_section(cx)),
)
@@ -1075,7 +1234,6 @@ fn show_unable_to_uninstall_extension_with_context_server(
cx,
move |this, _cx| {
let workspace_handle = workspace_handle.clone();
- let context_server_id = context_server_id.clone();
this.icon(ToastIcon::new(IconName::Warning).color(Color::Warning))
.dismiss_button(true)
@@ -1119,3 +1277,109 @@ fn show_unable_to_uninstall_extension_with_context_server(
workspace.toggle_status_toast(status_toast, cx);
}
+
+async fn open_new_agent_servers_entry_in_settings_editor(
+ workspace: WeakEntity<Workspace>,
+ cx: &mut AsyncWindowContext,
+) -> Result<()> {
+ let settings_editor = workspace
+ .update_in(cx, |_, window, cx| {
+ create_and_open_local_file(paths::settings_file(), window, cx, || {
+ settings::initial_user_settings_content().as_ref().into()
+ })
+ })?
+ .await?
+ .downcast::<Editor>()
+ .unwrap();
+
+ settings_editor
+ .downgrade()
+ .update_in(cx, |item, window, cx| {
+ let text = item.buffer().read(cx).snapshot(cx).text();
+
+ let settings = cx.global::<SettingsStore>();
+
+ let mut unique_server_name = None;
+ let edits = settings.edits_for_update::<AllAgentServersSettings>(&text, |file| {
+ let server_name: Option<SharedString> = (0..u8::MAX)
+ .map(|i| {
+ if i == 0 {
+ "your_agent".into()
+ } else {
+ format!("your_agent_{}", i).into()
+ }
+ })
+ .find(|name| !file.custom.contains_key(name));
+ if let Some(server_name) = server_name {
+ unique_server_name = Some(server_name.clone());
+ file.custom.insert(
+ server_name,
+ CustomAgentServerSettings {
+ command: AgentServerCommand {
+ path: "path_to_executable".into(),
+ args: vec![],
+ env: Some(HashMap::default()),
+ },
+ },
+ );
+ }
+ });
+
+ if edits.is_empty() {
+ return;
+ }
+
+ let ranges = edits
+ .iter()
+ .map(|(range, _)| range.clone())
+ .collect::<Vec<_>>();
+
+ item.edit(edits, cx);
+ if let Some((unique_server_name, buffer)) =
+ unique_server_name.zip(item.buffer().read(cx).as_singleton())
+ {
+ let snapshot = buffer.read(cx).snapshot();
+ if let Some(range) =
+ find_text_in_buffer(&unique_server_name, ranges[0].start, &snapshot)
+ {
+ item.change_selections(
+ SelectionEffects::scroll(Autoscroll::newest()),
+ window,
+ cx,
+ |selections| {
+ selections.select_ranges(vec![range]);
+ },
+ );
+ }
+ }
+ })
+}
+
+fn find_text_in_buffer(
+ text: &str,
+ start: usize,
+ snapshot: &language::BufferSnapshot,
+) -> Option<Range<usize>> {
+ let chars = text.chars().collect::<Vec<char>>();
+
+ let mut offset = start;
+ let mut char_offset = 0;
+ for c in snapshot.chars_at(start) {
+ if char_offset >= chars.len() {
+ break;
+ }
+ offset += 1;
+
+ if c == chars[char_offset] {
+ char_offset += 1;
+ } else {
+ char_offset = 0;
+ }
+ }
+
+ if char_offset == chars.len() {
+ Some(offset.saturating_sub(chars.len())..offset)
+ } else {
+ None
+ }
+}
@@ -668,10 +668,10 @@ mod tests {
);
let parsed_model = model_input.parse(cx).unwrap();
- assert_eq!(parsed_model.capabilities.tools, true);
- assert_eq!(parsed_model.capabilities.images, false);
- assert_eq!(parsed_model.capabilities.parallel_tool_calls, false);
- assert_eq!(parsed_model.capabilities.prompt_cache_key, false);
+ assert!(parsed_model.capabilities.tools);
+ assert!(!parsed_model.capabilities.images);
+ assert!(!parsed_model.capabilities.parallel_tool_calls);
+ assert!(!parsed_model.capabilities.prompt_cache_key);
});
}
@@ -693,10 +693,10 @@ mod tests {
model_input.capabilities.supports_prompt_cache_key = ToggleState::Unselected;
let parsed_model = model_input.parse(cx).unwrap();
- assert_eq!(parsed_model.capabilities.tools, false);
- assert_eq!(parsed_model.capabilities.images, false);
- assert_eq!(parsed_model.capabilities.parallel_tool_calls, false);
- assert_eq!(parsed_model.capabilities.prompt_cache_key, false);
+ assert!(!parsed_model.capabilities.tools);
+ assert!(!parsed_model.capabilities.images);
+ assert!(!parsed_model.capabilities.parallel_tool_calls);
+ assert!(!parsed_model.capabilities.prompt_cache_key);
});
}
@@ -719,10 +719,10 @@ mod tests {
let parsed_model = model_input.parse(cx).unwrap();
assert_eq!(parsed_model.name, "somemodel");
- assert_eq!(parsed_model.capabilities.tools, true);
- assert_eq!(parsed_model.capabilities.images, false);
- assert_eq!(parsed_model.capabilities.parallel_tool_calls, true);
- assert_eq!(parsed_model.capabilities.prompt_cache_key, false);
+ assert!(parsed_model.capabilities.tools);
+ assert!(!parsed_model.capabilities.images);
+ assert!(parsed_model.capabilities.parallel_tool_calls);
+ assert!(!parsed_model.capabilities.prompt_cache_key);
});
}
@@ -1,16 +1,14 @@
use std::{
path::PathBuf,
sync::{Arc, Mutex},
- time::Duration,
};
use anyhow::{Context as _, Result};
use context_server::{ContextServerCommand, ContextServerId};
use editor::{Editor, EditorElement, EditorStyle};
use gpui::{
- Animation, AnimationExt as _, AsyncWindowContext, DismissEvent, Entity, EventEmitter,
- FocusHandle, Focusable, Task, TextStyle, TextStyleRefinement, Transformation, UnderlineStyle,
- WeakEntity, percentage, prelude::*,
+ AsyncWindowContext, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Task,
+ TextStyle, TextStyleRefinement, UnderlineStyle, WeakEntity, prelude::*,
};
use language::{Language, LanguageRegistry};
use markdown::{Markdown, MarkdownElement, MarkdownStyle};
@@ -24,7 +22,9 @@ use project::{
};
use settings::{Settings as _, update_settings_file};
use theme::ThemeSettings;
-use ui::{KeyBinding, Modal, ModalFooter, ModalHeader, Section, Tooltip, prelude::*};
+use ui::{
+ CommonAnimationExt, KeyBinding, Modal, ModalFooter, ModalHeader, Section, Tooltip, prelude::*,
+};
use util::ResultExt as _;
use workspace::{ModalView, Workspace};
@@ -251,6 +251,7 @@ pub struct ConfigureContextServerModal {
workspace: WeakEntity<Workspace>,
source: ConfigurationSource,
state: State,
+ original_server_id: Option<ContextServerId>,
}
impl ConfigureContextServerModal {
@@ -261,7 +262,6 @@ impl ConfigureContextServerModal {
_cx: &mut Context<Workspace>,
) {
workspace.register_action({
- let language_registry = language_registry.clone();
move |_workspace, _: &AddContextServer, window, cx| {
let workspace_handle = cx.weak_entity();
let language_registry = language_registry.clone();
@@ -349,6 +349,11 @@ impl ConfigureContextServerModal {
context_server_store,
workspace: workspace_handle,
state: State::Idle,
+ original_server_id: match &target {
+ ConfigurationTarget::Existing { id, .. } => Some(id.clone()),
+ ConfigurationTarget::Extension { id, .. } => Some(id.clone()),
+ ConfigurationTarget::New => None,
+ },
source: ConfigurationSource::from_target(
target,
language_registry,
@@ -416,9 +421,19 @@ impl ConfigureContextServerModal {
// When we write the settings to the file, the context server will be restarted.
workspace.update(cx, |workspace, cx| {
let fs = workspace.app_state().fs.clone();
- update_settings_file::<ProjectSettings>(fs.clone(), cx, |project_settings, _| {
- project_settings.context_servers.insert(id.0, settings);
- });
+ let original_server_id = self.original_server_id.clone();
+ update_settings_file::<ProjectSettings>(
+ fs.clone(),
+ cx,
+ move |project_settings, _| {
+ if let Some(original_id) = original_server_id {
+ if original_id != id {
+ project_settings.context_servers.remove(&original_id.0);
+ }
+ }
+ project_settings.context_servers.insert(id.0, settings);
+ },
+ );
});
} else if let Some(existing_server) = existing_server {
self.context_server_store
@@ -487,7 +502,7 @@ impl ConfigureContextServerModal {
}
fn render_modal_description(&self, window: &mut Window, cx: &mut Context<Self>) -> AnyElement {
- const MODAL_DESCRIPTION: &'static str = "Visit the MCP server configuration docs to find all necessary arguments and environment variables.";
+ const MODAL_DESCRIPTION: &str = "Visit the MCP server configuration docs to find all necessary arguments and environment variables.";
if let ConfigurationSource::Extension {
installation_instructions: Some(installation_instructions),
@@ -639,11 +654,7 @@ impl ConfigureContextServerModal {
Icon::new(IconName::ArrowCircle)
.size(IconSize::XSmall)
.color(Color::Info)
- .with_animation(
- "arrow-circle",
- Animation::new(Duration::from_secs(2)).repeat(),
- |icon, delta| icon.transform(Transformation::rotate(percentage(delta))),
- )
+ .with_rotate_animation(2)
.into_any_element(),
)
.child(
@@ -464,7 +464,7 @@ impl ManageProfilesModal {
},
))
.child(ListSeparator)
- .child(h_flex().p_2().child(mode.name_editor.clone()))
+ .child(h_flex().p_2().child(mode.name_editor))
}
fn render_view_profile(
@@ -10,12 +10,12 @@ use editor::{
Direction, Editor, EditorEvent, EditorSettings, MultiBuffer, MultiBufferSnapshot,
SelectionEffects, ToPoint,
actions::{GoToHunk, GoToPreviousHunk},
+ multibuffer_context_lines,
scroll::Autoscroll,
};
use gpui::{
- Action, Animation, AnimationExt, AnyElement, AnyView, App, AppContext, Empty, Entity,
- EventEmitter, FocusHandle, Focusable, Global, SharedString, Subscription, Task, Transformation,
- WeakEntity, Window, percentage, prelude::*,
+ Action, AnyElement, AnyView, App, AppContext, Empty, Entity, EventEmitter, FocusHandle,
+ Focusable, Global, SharedString, Subscription, Task, WeakEntity, Window, prelude::*,
};
use language::{Buffer, Capability, DiskState, OffsetRangeExt, Point};
@@ -28,9 +28,8 @@ use std::{
collections::hash_map::Entry,
ops::Range,
sync::Arc,
- time::Duration,
};
-use ui::{IconButtonShape, KeyBinding, Tooltip, prelude::*, vertical_divider};
+use ui::{CommonAnimationExt, IconButtonShape, KeyBinding, Tooltip, prelude::*, vertical_divider};
use util::ResultExt;
use workspace::{
Item, ItemHandle, ItemNavHistory, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView,
@@ -185,7 +184,7 @@ impl AgentDiffPane {
let focus_handle = cx.focus_handle();
let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
- let project = thread.project(cx).clone();
+ let project = thread.project(cx);
let editor = cx.new(|cx| {
let mut editor =
Editor::for_multibuffer(multibuffer.clone(), Some(project.clone()), window, cx);
@@ -196,7 +195,7 @@ impl AgentDiffPane {
editor
});
- let action_log = thread.action_log(cx).clone();
+ let action_log = thread.action_log(cx);
let mut this = Self {
_subscriptions: vec![
@@ -257,7 +256,7 @@ impl AgentDiffPane {
path_key.clone(),
buffer.clone(),
diff_hunk_ranges,
- editor::DEFAULT_MULTIBUFFER_CONTEXT,
+ multibuffer_context_lines(cx),
cx,
);
multibuffer.add_diff(diff_handle, cx);
@@ -322,16 +321,14 @@ impl AgentDiffPane {
}
fn handle_native_thread_event(&mut self, event: &ThreadEvent, cx: &mut Context<Self>) {
- match event {
- ThreadEvent::SummaryGenerated => self.update_title(cx),
- _ => {}
+ if let ThreadEvent::SummaryGenerated = event {
+ self.update_title(cx)
}
}
fn handle_acp_thread_event(&mut self, event: &AcpThreadEvent, cx: &mut Context<Self>) {
- match event {
- AcpThreadEvent::TitleUpdated => self.update_title(cx),
- _ => {}
+ if let AcpThreadEvent::TitleUpdated = event {
+ self.update_title(cx)
}
}
@@ -1085,11 +1082,7 @@ impl Render for AgentDiffToolbar {
Icon::new(IconName::LoadCircle)
.size(IconSize::Small)
.color(Color::Accent)
- .with_animation(
- "load_circle",
- Animation::new(Duration::from_secs(3)).repeat(),
- |icon, delta| icon.transform(Transformation::rotate(percentage(delta))),
- ),
+ .with_rotate_animation(3),
)
.into_any();
@@ -1314,7 +1307,7 @@ impl AgentDiff {
let entity = cx.new(|_cx| Self::default());
let global = AgentDiffGlobal(entity.clone());
cx.set_global(global);
- entity.clone()
+ entity
})
}
@@ -1336,7 +1329,7 @@ impl AgentDiff {
window: &mut Window,
cx: &mut Context<Self>,
) {
- let action_log = thread.action_log(cx).clone();
+ let action_log = thread.action_log(cx);
let action_log_subscription = cx.observe_in(&action_log, window, {
let workspace = workspace.clone();
@@ -1524,12 +1517,18 @@ impl AgentDiff {
self.update_reviewing_editors(workspace, window, cx);
}
}
- AcpThreadEvent::Stopped | AcpThreadEvent::Error | AcpThreadEvent::ServerExited(_) => {
+ AcpThreadEvent::Stopped
+ | AcpThreadEvent::Error
+ | AcpThreadEvent::LoadError(_)
+ | AcpThreadEvent::Refusal => {
self.update_reviewing_editors(workspace, window, cx);
}
AcpThreadEvent::TitleUpdated
+ | AcpThreadEvent::TokenUsageUpdated
| AcpThreadEvent::EntriesRemoved(_)
| AcpThreadEvent::ToolAuthorizationRequired
+ | AcpThreadEvent::PromptCapabilitiesUpdated
+ | AcpThreadEvent::AvailableCommandsUpdated(_)
| AcpThreadEvent::Retry(_) => {}
}
}
@@ -1541,15 +1540,11 @@ impl AgentDiff {
window: &mut Window,
cx: &mut Context<Self>,
) {
- match event {
- workspace::Event::ItemAdded { item } => {
- if let Some(editor) = item.downcast::<Editor>()
- && let Some(buffer) = Self::full_editor_buffer(editor.read(cx), cx)
- {
- self.register_editor(workspace.downgrade(), buffer.clone(), editor, window, cx);
- }
- }
- _ => {}
+ if let workspace::Event::ItemAdded { item } = event
+ && let Some(editor) = item.downcast::<Editor>()
+ && let Some(buffer) = Self::full_editor_buffer(editor.read(cx), cx)
+ {
+ self.register_editor(workspace.downgrade(), buffer, editor, window, cx);
}
}
@@ -1648,7 +1643,7 @@ impl AgentDiff {
continue;
};
- for (weak_editor, _) in buffer_editors {
+ for weak_editor in buffer_editors.keys() {
let Some(editor) = weak_editor.upgrade() else {
continue;
};
@@ -66,10 +66,8 @@ impl AgentModelSelector {
fs.clone(),
cx,
move |settings, _cx| {
- settings.set_inline_assistant_model(
- provider.clone(),
- model_id.clone(),
- );
+ settings
+ .set_inline_assistant_model(provider.clone(), model_id);
},
);
}
@@ -4,12 +4,19 @@ use std::rc::Rc;
use std::sync::Arc;
use std::time::Duration;
+use acp_thread::AcpThread;
use agent2::{DbThreadMetadata, HistoryEntry};
use db::kvp::{Dismissable, KEY_VALUE_STORE};
+use project::agent_server_store::{
+ AgentServerCommand, AllAgentServersSettings, CLAUDE_CODE_NAME, GEMINI_NAME,
+};
use serde::{Deserialize, Serialize};
+use zed_actions::OpenBrowser;
+use zed_actions::agent::{OpenClaudeCodeOnboardingModal, ReauthenticateAgent};
use crate::acp::{AcpThreadHistory, ThreadHistoryEvent};
use crate::agent_diff::AgentDiffThread;
+use crate::ui::{AcpOnboardingModal, ClaudeCodeOnboardingModal};
use crate::{
AddContextServer, AgentDiffPane, ContinueThread, ContinueWithBurnMode,
DeleteRecentlyOpenThread, ExpandMessageEditor, Follow, InlineAssistant, NewTextThread,
@@ -24,12 +31,13 @@ use crate::{
slash_command::SlashCommandCompletionProvider,
text_thread_editor::{
AgentPanelDelegate, TextThreadEditor, humanize_token_count, make_lsp_adapter_delegate,
- render_remaining_tokens,
},
thread_history::{HistoryEntryElement, ThreadHistory},
ui::{AgentOnboardingModal, EndTrialUpsell},
};
-use crate::{ExternalAgent, NewExternalAgentThread};
+use crate::{
+ ExternalAgent, NewExternalAgentThread, NewNativeAgentThreadFromSummary, placeholder_command,
+};
use agent::{
Thread, ThreadError, ThreadEvent, ThreadId, ThreadSummary, TokenUsageRatio,
context_store::ContextStore,
@@ -45,7 +53,7 @@ use assistant_tool::ToolWorkingSet;
use client::{UserStore, zed_urls};
use cloud_llm_client::{CompletionIntent, Plan, UsageLimit};
use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer};
-use feature_flags::{self, AcpFeatureFlag, ClaudeCodeFeatureFlag, FeatureFlagAppExt};
+use feature_flags::{self, ClaudeCodeFeatureFlag, FeatureFlagAppExt, GeminiAndNativeFeatureFlag};
use fs::Fs;
use gpui::{
Action, Animation, AnimationExt as _, AnyElement, App, AsyncWindowContext, ClipboardItem,
@@ -53,20 +61,18 @@ use gpui::{
Pixels, Subscription, Task, UpdateGlobal, WeakEntity, prelude::*, pulsating_between,
};
use language::LanguageRegistry;
-use language_model::{
- ConfigurationError, ConfiguredModel, LanguageModelProviderTosView, LanguageModelRegistry,
-};
+use language_model::{ConfigurationError, ConfiguredModel, LanguageModelRegistry};
use project::{DisableAiSettings, Project, ProjectPath, Worktree};
use prompt_store::{PromptBuilder, PromptStore, UserPromptId};
use rules_library::{RulesLibrary, open_rules_library};
use search::{BufferSearchBar, buffer_search};
-use settings::{Settings, update_settings_file};
+use settings::{Settings, SettingsStore, update_settings_file};
use theme::ThemeSettings;
use time::UtcOffset;
use ui::utils::WithRemSize;
use ui::{
- Banner, Callout, ContextMenu, ContextMenuEntry, Divider, ElevationIndex, KeyBinding,
- PopoverMenu, PopoverMenuHandle, ProgressBar, Tab, Tooltip, prelude::*,
+ Banner, Callout, ContextMenu, ContextMenuEntry, ElevationIndex, KeyBinding, PopoverMenu,
+ PopoverMenuHandle, ProgressBar, Tab, Tooltip, prelude::*,
};
use util::ResultExt as _;
use workspace::{
@@ -75,13 +81,16 @@ use workspace::{
};
use zed_actions::{
DecreaseBufferFontSize, IncreaseBufferFontSize, ResetBufferFontSize,
- agent::{OpenOnboardingModal, OpenSettings, ResetOnboarding, ToggleModelSelector},
+ agent::{
+ OpenAcpOnboardingModal, OpenOnboardingModal, OpenSettings, ResetOnboarding,
+ ToggleModelSelector,
+ },
assistant::{OpenRulesLibrary, ToggleFocus},
};
const AGENT_PANEL_KEY: &str = "agent_panel";
-#[derive(Serialize, Deserialize)]
+#[derive(Serialize, Deserialize, Debug)]
struct SerializedAgentPanel {
width: Option<Pixels>,
selected_agent: Option<AgentType>,
@@ -97,6 +106,16 @@ pub fn init(cx: &mut App) {
workspace.focus_panel::<AgentPanel>(window, cx);
}
})
+ .register_action(
+ |workspace, action: &NewNativeAgentThreadFromSummary, window, cx| {
+ if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
+ panel.update(cx, |panel, cx| {
+ panel.new_native_agent_thread_from_summary(action, window, cx)
+ });
+ workspace.focus_panel::<AgentPanel>(window, cx);
+ }
+ },
+ )
.register_action(|workspace, _: &OpenHistory, window, cx| {
if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
workspace.focus_panel::<AgentPanel>(window, cx);
@@ -119,7 +138,7 @@ pub fn init(cx: &mut App) {
if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
workspace.focus_panel::<AgentPanel>(window, cx);
panel.update(cx, |panel, cx| {
- panel.external_thread(action.agent, None, window, cx)
+ panel.external_thread(action.agent.clone(), None, None, window, cx)
});
}
})
@@ -189,6 +208,12 @@ pub fn init(cx: &mut App) {
.register_action(|workspace, _: &OpenOnboardingModal, window, cx| {
AgentOnboardingModal::toggle(workspace, window, cx)
})
+ .register_action(|workspace, _: &OpenAcpOnboardingModal, window, cx| {
+ AcpOnboardingModal::toggle(workspace, window, cx)
+ })
+ .register_action(|workspace, _: &OpenClaudeCodeOnboardingModal, window, cx| {
+ ClaudeCodeOnboardingModal::toggle(workspace, window, cx)
+ })
.register_action(|_workspace, _: &ResetOnboarding, window, cx| {
window.dispatch_action(workspace::RestoreBanner.boxed_clone(), cx);
window.refresh();
@@ -230,7 +255,8 @@ enum WhichFontSize {
None,
}
-#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
+// TODO unify this with ExternalAgent
+#[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)]
pub enum AgentType {
#[default]
Zed,
@@ -238,24 +264,40 @@ pub enum AgentType {
Gemini,
ClaudeCode,
NativeAgent,
+ Custom {
+ name: SharedString,
+ command: AgentServerCommand,
+ },
}
impl AgentType {
- fn label(self) -> impl Into<SharedString> {
+ fn label(&self) -> SharedString {
match self {
- Self::Zed | Self::TextThread => "Zed Agent",
- Self::NativeAgent => "Agent 2",
- Self::Gemini => "Google Gemini",
- Self::ClaudeCode => "Claude Code",
+ Self::Zed | Self::TextThread => "Zed Agent".into(),
+ Self::NativeAgent => "Agent 2".into(),
+ Self::Gemini => "Gemini CLI".into(),
+ Self::ClaudeCode => "Claude Code".into(),
+ Self::Custom { name, .. } => name.into(),
}
}
- fn icon(self) -> IconName {
+ fn icon(&self) -> Option<IconName> {
match self {
- Self::Zed | Self::TextThread => IconName::AiZed,
- Self::NativeAgent => IconName::ZedAssistant,
- Self::Gemini => IconName::AiGemini,
- Self::ClaudeCode => IconName::AiClaude,
+ Self::Zed | Self::NativeAgent | Self::TextThread => None,
+ Self::Gemini => Some(IconName::AiGemini),
+ Self::ClaudeCode => Some(IconName::AiClaude),
+ Self::Custom { .. } => Some(IconName::Terminal),
+ }
+ }
+}
+
+impl From<ExternalAgent> for AgentType {
+ fn from(value: ExternalAgent) -> Self {
+ match value {
+ ExternalAgent::Gemini => Self::Gemini,
+ ExternalAgent::ClaudeCode => Self::ClaudeCode,
+ ExternalAgent::Custom { name, command } => Self::Custom { name, command },
+ ExternalAgent::NativeAgent => Self::NativeAgent,
}
}
}
@@ -354,7 +396,7 @@ impl ActiveView {
Self::Thread {
change_title_editor: editor,
thread: active_thread,
- message_editor: message_editor,
+ message_editor,
_subscriptions: subscriptions,
}
}
@@ -509,7 +551,7 @@ pub struct AgentPanel {
impl AgentPanel {
fn serialize(&mut self, cx: &mut Context<Self>) {
let width = self.width;
- let selected_agent = self.selected_agent;
+ let selected_agent = self.selected_agent.clone();
self.pending_serialization = Some(cx.background_spawn(async move {
KEY_VALUE_STORE
.write_kvp(
@@ -523,6 +565,7 @@ impl AgentPanel {
anyhow::Ok(())
}));
}
+
pub fn load(
workspace: WeakEntity<Workspace>,
prompt_builder: Arc<PromptBuilder>,
@@ -567,7 +610,7 @@ impl AgentPanel {
.log_err()
.flatten()
{
- Some(serde_json::from_str::<SerializedAgentPanel>(&panel)?)
+ serde_json::from_str::<SerializedAgentPanel>(&panel).log_err()
} else {
None
};
@@ -587,11 +630,15 @@ impl AgentPanel {
panel.update(cx, |panel, cx| {
panel.width = serialized_panel.width.map(|w| w.round());
if let Some(selected_agent) = serialized_panel.selected_agent {
- panel.selected_agent = selected_agent;
+ panel.selected_agent = selected_agent.clone();
panel.new_agent_thread(selected_agent, window, cx);
}
cx.notify();
});
+ } else {
+ panel.update(cx, |panel, cx| {
+ panel.new_agent_thread(AgentType::NativeAgent, window, cx);
+ });
}
panel
})?;
@@ -648,8 +695,7 @@ impl AgentPanel {
)
});
- let acp_history_store =
- cx.new(|cx| agent2::HistoryStore::new(context_store.clone(), [], cx));
+ let acp_history_store = cx.new(|cx| agent2::HistoryStore::new(context_store.clone(), cx));
let acp_history = cx.new(|cx| AcpThreadHistory::new(acp_history_store.clone(), window, cx));
cx.subscribe_in(
&acp_history,
@@ -659,6 +705,7 @@ impl AgentPanel {
this.external_thread(
Some(crate::ExternalAgent::NativeAgent),
Some(thread.clone()),
+ None,
window,
cx,
);
@@ -726,7 +773,7 @@ impl AgentPanel {
let assistant_navigation_menu =
ContextMenu::build_persistent(window, cx, move |mut menu, _window, cx| {
if let Some(panel) = panel.upgrade() {
- if cx.has_flag::<AcpFeatureFlag>() {
+ if cx.has_flag::<GeminiAndNativeFeatureFlag>() {
menu = Self::populate_recently_opened_menu_section_new(menu, panel, cx);
} else {
menu = Self::populate_recently_opened_menu_section_old(menu, panel, cx);
@@ -756,25 +803,25 @@ impl AgentPanel {
.ok();
});
- let _default_model_subscription = cx.subscribe(
- &LanguageModelRegistry::global(cx),
- |this, _, event: &language_model::Event, cx| match event {
- language_model::Event::DefaultModelChanged => match &this.active_view {
- ActiveView::Thread { thread, .. } => {
- thread
- .read(cx)
- .thread()
- .clone()
- .update(cx, |thread, cx| thread.get_or_init_configured_model(cx));
+ let _default_model_subscription =
+ cx.subscribe(
+ &LanguageModelRegistry::global(cx),
+ |this, _, event: &language_model::Event, cx| {
+ if let language_model::Event::DefaultModelChanged = event {
+ match &this.active_view {
+ ActiveView::Thread { thread, .. } => {
+ thread.read(cx).thread().clone().update(cx, |thread, cx| {
+ thread.get_or_init_configured_model(cx)
+ });
+ }
+ ActiveView::ExternalAgentThread { .. }
+ | ActiveView::TextThread { .. }
+ | ActiveView::History
+ | ActiveView::Configuration => {}
+ }
}
- ActiveView::ExternalAgentThread { .. }
- | ActiveView::TextThread { .. }
- | ActiveView::History
- | ActiveView::Configuration => {}
},
- _ => {}
- },
- );
+ );
let onboarding = cx.new(|cx| {
AgentPanelOnboarding::new(
@@ -881,7 +928,20 @@ impl AgentPanel {
}
}
+ fn active_thread_view(&self) -> Option<&Entity<AcpThreadView>> {
+ match &self.active_view {
+ ActiveView::ExternalAgentThread { thread_view, .. } => Some(thread_view),
+ ActiveView::Thread { .. }
+ | ActiveView::TextThread { .. }
+ | ActiveView::History
+ | ActiveView::Configuration => None,
+ }
+ }
+
fn new_thread(&mut self, action: &NewThread, window: &mut Window, cx: &mut Context<Self>) {
+ if cx.has_flag::<GeminiAndNativeFeatureFlag>() {
+ return self.new_agent_thread(AgentType::NativeAgent, window, cx);
+ }
// Preserve chat box text when using creating new thread
let preserved_text = self
.active_message_editor()
@@ -954,13 +1014,38 @@ impl AgentPanel {
message_editor.focus_handle(cx).focus(window);
- let thread_view = ActiveView::thread(active_thread.clone(), message_editor, window, cx);
+ let thread_view = ActiveView::thread(active_thread, message_editor, window, cx);
self.set_active_view(thread_view, window, cx);
AgentDiff::set_active_thread(&self.workspace, thread.clone(), window, cx);
}
+ fn new_native_agent_thread_from_summary(
+ &mut self,
+ action: &NewNativeAgentThreadFromSummary,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let Some(thread) = self
+ .acp_history_store
+ .read(cx)
+ .thread_from_session_id(&action.from_session_id)
+ else {
+ return;
+ };
+
+ self.external_thread(
+ Some(ExternalAgent::NativeAgent),
+ None,
+ Some(thread.clone()),
+ window,
+ cx,
+ );
+ }
+
fn new_prompt_editor(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ telemetry::event!("Agent Thread Started", agent = "zed-text");
+
let context = self
.context_store
.update(cx, |context_store, cx| context_store.create(cx));
@@ -982,6 +1067,11 @@ impl AgentPanel {
editor
});
+ if self.selected_agent != AgentType::TextThread {
+ self.selected_agent = AgentType::TextThread;
+ self.serialize(cx);
+ }
+
self.set_active_view(
ActiveView::prompt_editor(
context_editor.clone(),
@@ -1001,12 +1091,14 @@ impl AgentPanel {
&mut self,
agent_choice: Option<crate::ExternalAgent>,
resume_thread: Option<DbThreadMetadata>,
+ summarize_thread: Option<DbThreadMetadata>,
window: &mut Window,
cx: &mut Context<Self>,
) {
let workspace = self.workspace.clone();
let project = self.project.clone();
let fs = self.fs.clone();
+ let is_via_collab = self.project.read(cx).is_via_collab();
const LAST_USED_EXTERNAL_AGENT_KEY: &str = "agent_panel__last_used_external_agent";
@@ -1015,21 +1107,22 @@ impl AgentPanel {
agent: crate::ExternalAgent,
}
- let thread_store = self.thread_store.clone();
- let text_thread_store = self.context_store.clone();
let history = self.acp_history_store.clone();
cx.spawn_in(window, async move |this, cx| {
let ext_agent = match agent_choice {
Some(agent) => {
- cx.background_spawn(async move {
- if let Some(serialized) =
- serde_json::to_string(&LastUsedExternalAgent { agent }).log_err()
- {
- KEY_VALUE_STORE
- .write_kvp(LAST_USED_EXTERNAL_AGENT_KEY.to_string(), serialized)
- .await
- .log_err();
+ cx.background_spawn({
+ let agent = agent.clone();
+ async move {
+ if let Some(serialized) =
+ serde_json::to_string(&LastUsedExternalAgent { agent }).log_err()
+ {
+ KEY_VALUE_STORE
+ .write_kvp(LAST_USED_EXTERNAL_AGENT_KEY.to_string(), serialized)
+ .await
+ .log_err();
+ }
}
})
.detach();
@@ -1037,26 +1130,34 @@ impl AgentPanel {
agent
}
None => {
- cx.background_spawn(async move {
- KEY_VALUE_STORE.read_kvp(LAST_USED_EXTERNAL_AGENT_KEY)
- })
- .await
- .log_err()
- .flatten()
- .and_then(|value| {
- serde_json::from_str::<LastUsedExternalAgent>(&value).log_err()
- })
- .unwrap_or_default()
- .agent
+ if is_via_collab {
+ ExternalAgent::NativeAgent
+ } else {
+ cx.background_spawn(async move {
+ KEY_VALUE_STORE.read_kvp(LAST_USED_EXTERNAL_AGENT_KEY)
+ })
+ .await
+ .log_err()
+ .flatten()
+ .and_then(|value| {
+ serde_json::from_str::<LastUsedExternalAgent>(&value).log_err()
+ })
+ .unwrap_or_default()
+ .agent
+ }
}
};
+ telemetry::event!("Agent Thread Started", agent = ext_agent.name());
+
let server = ext_agent.server(fs, history);
this.update_in(cx, |this, window, cx| {
match ext_agent {
- crate::ExternalAgent::Gemini | crate::ExternalAgent::NativeAgent => {
- if !cx.has_flag::<AcpFeatureFlag>() {
+ crate::ExternalAgent::Gemini
+ | crate::ExternalAgent::NativeAgent
+ | crate::ExternalAgent::Custom { .. } => {
+ if !cx.has_flag::<GeminiAndNativeFeatureFlag>() {
return;
}
}
@@ -1067,14 +1168,21 @@ impl AgentPanel {
}
}
+ let selected_agent = ext_agent.into();
+ if this.selected_agent != selected_agent {
+ this.selected_agent = selected_agent;
+ this.serialize(cx);
+ }
+
let thread_view = cx.new(|cx| {
crate::acp::AcpThreadView::new(
server,
resume_thread,
+ summarize_thread,
workspace.clone(),
project,
- thread_store.clone(),
- text_thread_store.clone(),
+ this.acp_history_store.clone(),
+ this.prompt_store.clone(),
window,
cx,
)
@@ -1161,9 +1269,15 @@ impl AgentPanel {
cx,
)
});
+
+ if self.selected_agent != AgentType::TextThread {
+ self.selected_agent = AgentType::TextThread;
+ self.serialize(cx);
+ }
+
self.set_active_view(
ActiveView::prompt_editor(
- editor.clone(),
+ editor,
self.history_store.clone(),
self.acp_history_store.clone(),
self.language_registry.clone(),
@@ -1236,7 +1350,7 @@ impl AgentPanel {
});
message_editor.focus_handle(cx).focus(window);
- let thread_view = ActiveView::thread(active_thread.clone(), message_editor, window, cx);
+ let thread_view = ActiveView::thread(active_thread, message_editor, window, cx);
self.set_active_view(thread_view, window, cx);
AgentDiff::set_active_thread(&self.workspace, thread.clone(), window, cx);
}
@@ -1393,6 +1507,7 @@ impl AgentPanel {
}
pub(crate) fn open_configuration(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ let agent_server_store = self.project.read(cx).agent_server_store().clone();
let context_server_store = self.project.read(cx).context_server_store();
let tools = self.thread_store.read(cx).tools();
let fs = self.fs.clone();
@@ -1401,6 +1516,7 @@ impl AgentPanel {
self.configuration = Some(cx.new(|cx| {
AgentConfiguration::new(
fs,
+ agent_server_store,
context_server_store,
tools,
self.language_registry.clone(),
@@ -1497,6 +1613,14 @@ impl AgentPanel {
_ => None,
}
}
+ pub(crate) fn active_agent_thread(&self, cx: &App) -> Option<Entity<AcpThread>> {
+ match &self.active_view {
+ ActiveView::ExternalAgentThread { thread_view, .. } => {
+ thread_view.read(cx).thread().cloned()
+ }
+ _ => None,
+ }
+ }
pub(crate) fn delete_thread(
&mut self,
@@ -1517,7 +1641,7 @@ impl AgentPanel {
return;
}
- let model = thread_state.configured_model().map(|cm| cm.model.clone());
+ let model = thread_state.configured_model().map(|cm| cm.model);
if let Some(model) = model {
thread.update(cx, |active_thread, cx| {
active_thread.thread().update(cx, |thread, cx| {
@@ -1589,17 +1713,14 @@ impl AgentPanel {
let current_is_special = current_is_history || current_is_config;
let new_is_special = new_is_history || new_is_config;
- match &self.active_view {
- ActiveView::Thread { thread, .. } => {
- let thread = thread.read(cx);
- if thread.is_empty() {
- let id = thread.thread().read(cx).id().clone();
- self.history_store.update(cx, |store, cx| {
- store.remove_recently_opened_thread(id, cx);
- });
- }
+ if let ActiveView::Thread { thread, .. } = &self.active_view {
+ let thread = thread.read(cx);
+ if thread.is_empty() {
+ let id = thread.thread().read(cx).id().clone();
+ self.history_store.update(cx, |store, cx| {
+ store.remove_recently_opened_thread(id, cx);
+ });
}
- _ => {}
}
match &new_view {
@@ -1612,6 +1733,14 @@ impl AgentPanel {
if let Some(path) = context_editor.read(cx).context().read(cx).path() {
store.push_recently_opened_entry(HistoryEntryId::Context(path.clone()), cx)
}
+ });
+ self.acp_history_store.update(cx, |store, cx| {
+ if let Some(path) = context_editor.read(cx).context().read(cx).path() {
+ store.push_recently_opened_entry(
+ agent2::HistoryEntryId::TextThread(path.clone()),
+ cx,
+ )
+ }
})
}
ActiveView::ExternalAgentThread { .. } => {}
@@ -1667,7 +1796,7 @@ impl AgentPanel {
.open_thread_by_id(&id, window, cx)
.detach_and_log_err(cx),
HistoryEntryId::Context(path) => this
- .open_saved_prompt_editor(path.clone(), window, cx)
+ .open_saved_prompt_editor(path, window, cx)
.detach_and_log_err(cx),
})
.ok();
@@ -1729,6 +1858,7 @@ impl AgentPanel {
agent2::HistoryEntry::AcpThread(entry) => this.external_thread(
Some(ExternalAgent::NativeAgent),
Some(entry.clone()),
+ None,
window,
cx,
),
@@ -1762,21 +1892,8 @@ impl AgentPanel {
menu
}
- pub fn set_selected_agent(
- &mut self,
- agent: AgentType,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- if self.selected_agent != agent {
- self.selected_agent = agent;
- self.serialize(cx);
- }
- self.new_agent_thread(agent, window, cx);
- }
-
pub fn selected_agent(&self) -> AgentType {
- self.selected_agent
+ self.selected_agent.clone()
}
pub fn new_agent_thread(
@@ -1798,17 +1915,51 @@ impl AgentPanel {
AgentType::TextThread => {
window.dispatch_action(NewTextThread.boxed_clone(), cx);
}
- AgentType::NativeAgent => {
- self.external_thread(Some(crate::ExternalAgent::NativeAgent), None, window, cx)
- }
+ AgentType::NativeAgent => self.external_thread(
+ Some(crate::ExternalAgent::NativeAgent),
+ None,
+ None,
+ window,
+ cx,
+ ),
AgentType::Gemini => {
- self.external_thread(Some(crate::ExternalAgent::Gemini), None, window, cx)
+ self.external_thread(Some(crate::ExternalAgent::Gemini), None, None, window, cx)
}
AgentType::ClaudeCode => {
- self.external_thread(Some(crate::ExternalAgent::ClaudeCode), None, window, cx)
+ self.selected_agent = AgentType::ClaudeCode;
+ self.serialize(cx);
+ self.external_thread(
+ Some(crate::ExternalAgent::ClaudeCode),
+ None,
+ None,
+ window,
+ cx,
+ )
}
+ AgentType::Custom { name, command } => self.external_thread(
+ Some(crate::ExternalAgent::Custom { name, command }),
+ None,
+ None,
+ window,
+ cx,
+ ),
}
}
+
+ pub fn load_agent_thread(
+ &mut self,
+ thread: DbThreadMetadata,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.external_thread(
+ Some(ExternalAgent::NativeAgent),
+ Some(thread),
+ None,
+ window,
+ cx,
+ );
+ }
}
impl Focusable for AgentPanel {
@@ -1817,7 +1968,7 @@ impl Focusable for AgentPanel {
ActiveView::Thread { message_editor, .. } => message_editor.focus_handle(cx),
ActiveView::ExternalAgentThread { thread_view, .. } => thread_view.focus_handle(cx),
ActiveView::History => {
- if cx.has_flag::<feature_flags::AcpFeatureFlag>() {
+ if cx.has_flag::<feature_flags::GeminiAndNativeFeatureFlag>() {
self.acp_history.focus_handle(cx)
} else {
self.history.focus_handle(cx)
@@ -1944,11 +2095,13 @@ impl AgentPanel {
};
match state {
- ThreadSummary::Pending => Label::new(ThreadSummary::DEFAULT.clone())
+ ThreadSummary::Pending => Label::new(ThreadSummary::DEFAULT)
.truncate()
+ .color(Color::Muted)
.into_any_element(),
ThreadSummary::Generating => Label::new(LOADING_SUMMARY_PLACEHOLDER)
.truncate()
+ .color(Color::Muted)
.into_any_element(),
ThreadSummary::Ready(_) => div()
.w_full()
@@ -1980,9 +2133,33 @@ impl AgentPanel {
}
}
ActiveView::ExternalAgentThread { thread_view } => {
- Label::new(thread_view.read(cx).title(cx))
- .truncate()
- .into_any_element()
+ if let Some(title_editor) = thread_view.read(cx).title_editor() {
+ div()
+ .w_full()
+ .on_action({
+ let thread_view = thread_view.downgrade();
+ move |_: &menu::Confirm, window, cx| {
+ if let Some(thread_view) = thread_view.upgrade() {
+ thread_view.focus_handle(cx).focus(window);
+ }
+ }
+ })
+ .on_action({
+ let thread_view = thread_view.downgrade();
+ move |_: &editor::actions::Cancel, window, cx| {
+ if let Some(thread_view) = thread_view.upgrade() {
+ thread_view.focus_handle(cx).focus(window);
+ }
+ }
+ })
+ .child(title_editor)
+ .into_any_element()
+ } else {
+ Label::new(thread_view.read(cx).title(cx))
+ .color(Color::Muted)
+ .truncate()
+ .into_any_element()
+ }
}
ActiveView::TextThread {
title_editor,
@@ -1993,6 +2170,7 @@ impl AgentPanel {
match summary {
ContextSummary::Pending => Label::new(ContextSummary::DEFAULT)
+ .color(Color::Muted)
.truncate()
.into_any_element(),
ContextSummary::Content(summary) => {
@@ -2004,6 +2182,7 @@ impl AgentPanel {
} else {
Label::new(LOADING_SUMMARY_PLACEHOLDER)
.truncate()
+ .color(Color::Muted)
.into_any_element()
}
}
@@ -2064,6 +2243,8 @@ impl AgentPanel {
"Enable Full Screen"
};
+ let selected_agent = self.selected_agent.clone();
+
PopoverMenu::new("agent-options-menu")
.trigger_with_tooltip(
IconButton::new("agent-options-menu", IconName::Ellipsis)
@@ -2084,7 +2265,6 @@ impl AgentPanel {
.anchor(Corner::TopRight)
.with_handle(self.agent_panel_menu_handle.clone())
.menu({
- let focus_handle = focus_handle.clone();
move |window, cx| {
Some(ContextMenu::build(window, cx, |mut menu, _window, _| {
menu = menu.context(focus_handle.clone());
@@ -2144,20 +2324,29 @@ impl AgentPanel {
.action("Settings", Box::new(OpenSettings))
.separator()
.action(full_screen_label, Box::new(ToggleZoom));
+
+ if selected_agent == AgentType::Gemini {
+ menu = menu.action("Reauthenticate", Box::new(ReauthenticateAgent))
+ }
+
menu
}))
}
})
}
- fn render_recent_entries_menu(&self, cx: &mut Context<Self>) -> impl IntoElement {
+ fn render_recent_entries_menu(
+ &self,
+ icon: IconName,
+ corner: Corner,
+ cx: &mut Context<Self>,
+ ) -> impl IntoElement {
let focus_handle = self.focus_handle(cx);
PopoverMenu::new("agent-nav-menu")
.trigger_with_tooltip(
- IconButton::new("agent-nav-menu", IconName::MenuAlt).icon_size(IconSize::Small),
+ IconButton::new("agent-nav-menu", icon).icon_size(IconSize::Small),
{
- let focus_handle = focus_handle.clone();
move |window, cx| {
Tooltip::for_action_in(
"Toggle Recent Threads",
@@ -2169,11 +2358,13 @@ impl AgentPanel {
}
},
)
- .anchor(Corner::TopLeft)
+ .anchor(corner)
.with_handle(self.assistant_navigation_menu_handle.clone())
.menu({
let menu = self.assistant_navigation_menu.clone();
move |window, cx| {
+ telemetry::event!("View Thread History Clicked");
+
if let Some(menu) = menu.as_ref() {
menu.update(cx, |_, cx| {
cx.defer_in(window, |menu, window, cx| {
@@ -2195,8 +2386,6 @@ impl AgentPanel {
this.go_back(&workspace::GoBack, window, cx);
}))
.tooltip({
- let focus_handle = focus_handle.clone();
-
move |window, cx| {
Tooltip::for_action_in("Go Back", &workspace::GoBack, &focus_handle, window, cx)
}
@@ -2222,7 +2411,6 @@ impl AgentPanel {
.anchor(Corner::TopRight)
.with_handle(self.new_thread_menu_handle.clone())
.menu({
- let focus_handle = focus_handle.clone();
move |window, cx| {
let active_thread = active_thread.clone();
Some(ContextMenu::build(window, cx, |mut menu, _window, cx| {
@@ -2296,7 +2484,9 @@ impl AgentPanel {
.pl(DynamicSpacing::Base04.rems(cx))
.child(self.render_toolbar_back_button(cx))
.into_any_element(),
- _ => self.render_recent_entries_menu(cx).into_any_element(),
+ _ => self
+ .render_recent_entries_menu(IconName::MenuAlt, Corner::TopLeft, cx)
+ .into_any_element(),
})
.child(self.render_title_view(window, cx)),
)
@@ -2319,11 +2509,14 @@ impl AgentPanel {
}
fn render_toolbar_new(&self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let agent_server_store = self.project.read(cx).agent_server_store().clone();
let focus_handle = self.focus_handle(cx);
let active_thread = match &self.active_view {
- ActiveView::Thread { thread, .. } => Some(thread.read(cx).thread().clone()),
- ActiveView::ExternalAgentThread { .. }
+ ActiveView::ExternalAgentThread { thread_view } => {
+ thread_view.read(cx).as_native_thread(cx)
+ }
+ ActiveView::Thread { .. }
| ActiveView::TextThread { .. }
| ActiveView::History
| ActiveView::Configuration => None,
@@ -2348,10 +2541,16 @@ impl AgentPanel {
.anchor(Corner::TopLeft)
.with_handle(self.new_thread_menu_handle.clone())
.menu({
- let focus_handle = focus_handle.clone();
let workspace = self.workspace.clone();
+ let is_via_collab = workspace
+ .update(cx, |workspace, cx| {
+ workspace.project().read(cx).is_via_collab()
+ })
+ .unwrap_or_default();
move |window, cx| {
+ telemetry::event!("New Thread Clicked");
+
let active_thread = active_thread.clone();
Some(ContextMenu::build(window, cx, |mut menu, _window, cx| {
menu = menu
@@ -2361,15 +2560,15 @@ impl AgentPanel {
let thread = active_thread.read(cx);
if !thread.is_empty() {
- let thread_id = thread.id().clone();
+ let session_id = thread.id().clone();
this.item(
ContextMenuEntry::new("New From Summary")
.icon(IconName::ThreadFromSummary)
.icon_color(Color::Muted)
.handler(move |window, cx| {
window.dispatch_action(
- Box::new(NewThread {
- from_thread_id: Some(thread_id.clone()),
+ Box::new(NewNativeAgentThreadFromSummary {
+ from_session_id: session_id.clone(),
}),
cx,
);
@@ -2381,9 +2580,9 @@ impl AgentPanel {
})
.item(
ContextMenuEntry::new("New Thread")
+ .action(NewThread::default().boxed_clone())
.icon(IconName::Thread)
.icon_color(Color::Muted)
- .action(NewThread::default().boxed_clone())
.handler({
let workspace = workspace.clone();
move |window, cx| {
@@ -2393,8 +2592,8 @@ impl AgentPanel {
workspace.panel::<AgentPanel>(cx)
{
panel.update(cx, |panel, cx| {
- panel.set_selected_agent(
- AgentType::Zed,
+ panel.new_agent_thread(
+ AgentType::NativeAgent,
window,
cx,
);
@@ -2419,7 +2618,7 @@ impl AgentPanel {
workspace.panel::<AgentPanel>(cx)
{
panel.update(cx, |panel, cx| {
- panel.set_selected_agent(
+ panel.new_agent_thread(
AgentType::TextThread,
window,
cx,
@@ -2431,38 +2630,14 @@ impl AgentPanel {
}
}),
)
- .item(
- ContextMenuEntry::new("New Native Agent Thread")
- .icon(IconName::ZedAssistant)
- .icon_color(Color::Muted)
- .handler({
- let workspace = workspace.clone();
- move |window, cx| {
- if let Some(workspace) = workspace.upgrade() {
- workspace.update(cx, |workspace, cx| {
- if let Some(panel) =
- workspace.panel::<AgentPanel>(cx)
- {
- panel.update(cx, |panel, cx| {
- panel.set_selected_agent(
- AgentType::NativeAgent,
- window,
- cx,
- );
- });
- }
- });
- }
- }
- }),
- )
.separator()
.header("External Agents")
- .when(cx.has_flag::<AcpFeatureFlag>(), |menu| {
+ .when(cx.has_flag::<GeminiAndNativeFeatureFlag>(), |menu| {
menu.item(
- ContextMenuEntry::new("New Gemini Thread")
+ ContextMenuEntry::new("New Gemini CLI Thread")
.icon(IconName::AiGemini)
.icon_color(Color::Muted)
+ .disabled(is_via_collab)
.handler({
let workspace = workspace.clone();
move |window, cx| {
@@ -34,12 +34,13 @@ use client::Client;
use command_palette_hooks::CommandPaletteFilter;
use feature_flags::FeatureFlagAppExt as _;
use fs::Fs;
-use gpui::{Action, App, Entity, actions};
+use gpui::{Action, App, Entity, SharedString, actions};
use language::LanguageRegistry;
use language_model::{
ConfiguredModel, LanguageModel, LanguageModelId, LanguageModelProviderId, LanguageModelRegistry,
};
use project::DisableAiSettings;
+use project::agent_server_store::AgentServerCommand;
use prompt_store::PromptBuilder;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
@@ -128,6 +129,12 @@ actions!(
]
);
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Action)]
+#[action(namespace = agent)]
+#[action(deprecated_aliases = ["assistant::QuoteSelection"])]
+/// Quotes the current selection in the agent panel's message editor.
+pub struct QuoteSelection;
+
/// Creates a new conversation thread, optionally based on an existing thread.
#[derive(Default, Clone, PartialEq, Deserialize, JsonSchema, Action)]
#[action(namespace = agent)]
@@ -146,25 +153,57 @@ pub struct NewExternalAgentThread {
agent: Option<ExternalAgent>,
}
-#[derive(Default, Debug, Clone, Copy, PartialEq, Serialize, Deserialize, JsonSchema)]
+#[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)]
+#[action(namespace = agent)]
+#[serde(deny_unknown_fields)]
+pub struct NewNativeAgentThreadFromSummary {
+ from_session_id: agent_client_protocol::SessionId,
+}
+
+// TODO unify this with AgentType
+#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
enum ExternalAgent {
#[default]
Gemini,
ClaudeCode,
NativeAgent,
+ Custom {
+ name: SharedString,
+ command: AgentServerCommand,
+ },
+}
+
+fn placeholder_command() -> AgentServerCommand {
+ AgentServerCommand {
+ path: "/placeholder".into(),
+ args: vec![],
+ env: None,
+ }
}
impl ExternalAgent {
+ fn name(&self) -> &'static str {
+ match self {
+ Self::NativeAgent => "zed",
+ Self::Gemini => "gemini-cli",
+ Self::ClaudeCode => "claude-code",
+ Self::Custom { .. } => "custom",
+ }
+ }
+
pub fn server(
&self,
fs: Arc<dyn fs::Fs>,
history: Entity<agent2::HistoryStore>,
) -> Rc<dyn agent_servers::AgentServer> {
match self {
- ExternalAgent::Gemini => Rc::new(agent_servers::Gemini),
- ExternalAgent::ClaudeCode => Rc::new(agent_servers::ClaudeCode),
- ExternalAgent::NativeAgent => Rc::new(agent2::NativeAgentServer::new(fs, history)),
+ Self::Gemini => Rc::new(agent_servers::Gemini),
+ Self::ClaudeCode => Rc::new(agent_servers::ClaudeCode),
+ Self::NativeAgent => Rc::new(agent2::NativeAgentServer::new(fs, history)),
+ Self::Custom { name, command: _ } => {
+ Rc::new(agent_servers::CustomAgentServer::new(name.clone()))
+ }
}
}
}
@@ -240,12 +279,7 @@ pub fn init(
client.telemetry().clone(),
cx,
);
- terminal_inline_assistant::init(
- fs.clone(),
- prompt_builder.clone(),
- client.telemetry().clone(),
- cx,
- );
+ terminal_inline_assistant::init(fs.clone(), prompt_builder, client.telemetry().clone(), cx);
cx.observe_new(move |workspace, window, cx| {
ConfigureContextServerModal::register(workspace, language_registry.clone(), window, cx)
})
@@ -310,8 +344,7 @@ fn update_command_palette_filter(cx: &mut App) {
];
filter.show_action_types(edit_prediction_actions.iter());
- filter
- .show_action_types([TypeId::of::<zed_actions::OpenZedPredictOnboarding>()].iter());
+ filter.show_action_types(&[TypeId::of::<zed_actions::OpenZedPredictOnboarding>()]);
}
});
}
@@ -391,7 +424,6 @@ fn register_slash_commands(cx: &mut App) {
slash_command_registry.register_command(assistant_slash_commands::FetchSlashCommand, true);
cx.observe_flag::<assistant_slash_commands::StreamingExampleSlashCommandFeatureFlag, _>({
- let slash_command_registry = slash_command_registry.clone();
move |is_enabled, _cx| {
if is_enabled {
slash_command_registry.register_command(
@@ -1129,7 +1129,7 @@ mod tests {
)
});
- let chunks_tx = simulate_response_stream(codegen.clone(), cx);
+ let chunks_tx = simulate_response_stream(&codegen, cx);
let mut new_text = concat!(
" let mut x = 0;\n",
@@ -1139,7 +1139,7 @@ mod tests {
);
while !new_text.is_empty() {
let max_len = cmp::min(new_text.len(), 10);
- let len = rng.gen_range(1..=max_len);
+ let len = rng.random_range(1..=max_len);
let (chunk, suffix) = new_text.split_at(len);
chunks_tx.unbounded_send(chunk.to_string()).unwrap();
new_text = suffix;
@@ -1196,7 +1196,7 @@ mod tests {
)
});
- let chunks_tx = simulate_response_stream(codegen.clone(), cx);
+ let chunks_tx = simulate_response_stream(&codegen, cx);
cx.background_executor.run_until_parked();
@@ -1208,7 +1208,7 @@ mod tests {
);
while !new_text.is_empty() {
let max_len = cmp::min(new_text.len(), 10);
- let len = rng.gen_range(1..=max_len);
+ let len = rng.random_range(1..=max_len);
let (chunk, suffix) = new_text.split_at(len);
chunks_tx.unbounded_send(chunk.to_string()).unwrap();
new_text = suffix;
@@ -1265,7 +1265,7 @@ mod tests {
)
});
- let chunks_tx = simulate_response_stream(codegen.clone(), cx);
+ let chunks_tx = simulate_response_stream(&codegen, cx);
cx.background_executor.run_until_parked();
@@ -1277,7 +1277,7 @@ mod tests {
);
while !new_text.is_empty() {
let max_len = cmp::min(new_text.len(), 10);
- let len = rng.gen_range(1..=max_len);
+ let len = rng.random_range(1..=max_len);
let (chunk, suffix) = new_text.split_at(len);
chunks_tx.unbounded_send(chunk.to_string()).unwrap();
new_text = suffix;
@@ -1334,7 +1334,7 @@ mod tests {
)
});
- let chunks_tx = simulate_response_stream(codegen.clone(), cx);
+ let chunks_tx = simulate_response_stream(&codegen, cx);
let new_text = concat!(
"func main() {\n",
"\tx := 0\n",
@@ -1391,7 +1391,7 @@ mod tests {
)
});
- let chunks_tx = simulate_response_stream(codegen.clone(), cx);
+ let chunks_tx = simulate_response_stream(&codegen, cx);
chunks_tx
.unbounded_send("let mut x = 0;\nx += 1;".to_string())
.unwrap();
@@ -1473,7 +1473,7 @@ mod tests {
}
fn simulate_response_stream(
- codegen: Entity<CodegenAlternative>,
+ codegen: &Entity<CodegenAlternative>,
cx: &mut TestAppContext,
) -> mpsc::UnboundedSender<String> {
let (chunks_tx, chunks_rx) = mpsc::unbounded();
@@ -385,12 +385,11 @@ impl ContextPicker {
}
pub fn select_first(&mut self, window: &mut Window, cx: &mut Context<Self>) {
- match &self.mode {
- ContextPickerState::Default(entity) => entity.update(cx, |entity, cx| {
+ // Other variants already select their first entry on open automatically
+ if let ContextPickerState::Default(entity) = &self.mode {
+ entity.update(cx, |entity, cx| {
entity.select_first(&Default::default(), window, cx)
- }),
- // Other variants already select their first entry on open automatically
- _ => {}
+ })
}
}
@@ -819,13 +818,8 @@ pub fn crease_for_mention(
let render_trailer = move |_row, _unfold, _window: &mut Window, _cx: &mut App| Empty.into_any();
- Crease::inline(
- range,
- placeholder.clone(),
- fold_toggle("mention"),
- render_trailer,
- )
- .with_metadata(CreaseMetadata { icon_path, label })
+ Crease::inline(range, placeholder, fold_toggle("mention"), render_trailer)
+ .with_metadata(CreaseMetadata { icon_path, label })
}
fn render_fold_icon_button(
@@ -993,7 +987,8 @@ impl MentionLink {
.read(cx)
.project()
.read(cx)
- .entry_for_path(&project_path, cx)?;
+ .entry_for_path(&project_path, cx)?
+ .clone();
Some(MentionLink::File(project_path, entry))
}
Self::SYMBOL => {
@@ -13,7 +13,10 @@ use http_client::HttpClientWithUrl;
use itertools::Itertools;
use language::{Buffer, CodeLabel, HighlightId};
use lsp::CompletionContext;
-use project::{Completion, CompletionIntent, CompletionResponse, ProjectPath, Symbol, WorktreeId};
+use project::{
+ Completion, CompletionDisplayOptions, CompletionIntent, CompletionResponse, ProjectPath,
+ Symbol, WorktreeId,
+};
use prompt_store::PromptStore;
use rope::Point;
use text::{Anchor, OffsetRangeExt, ToPoint};
@@ -79,8 +82,7 @@ fn search(
) -> Task<Vec<Match>> {
match mode {
Some(ContextPickerMode::File) => {
- let search_files_task =
- search_files(query.clone(), cancellation_flag.clone(), &workspace, cx);
+ let search_files_task = search_files(query, cancellation_flag, &workspace, cx);
cx.background_spawn(async move {
search_files_task
.await
@@ -91,8 +93,7 @@ fn search(
}
Some(ContextPickerMode::Symbol) => {
- let search_symbols_task =
- search_symbols(query.clone(), cancellation_flag.clone(), &workspace, cx);
+ let search_symbols_task = search_symbols(query, cancellation_flag, &workspace, cx);
cx.background_spawn(async move {
search_symbols_task
.await
@@ -108,13 +109,8 @@ fn search(
.and_then(|t| t.upgrade())
.zip(text_thread_context_store.as_ref().and_then(|t| t.upgrade()))
{
- let search_threads_task = search_threads(
- query.clone(),
- cancellation_flag.clone(),
- thread_store,
- context_store,
- cx,
- );
+ let search_threads_task =
+ search_threads(query, cancellation_flag, thread_store, context_store, cx);
cx.background_spawn(async move {
search_threads_task
.await
@@ -137,8 +133,7 @@ fn search(
Some(ContextPickerMode::Rules) => {
if let Some(prompt_store) = prompt_store.as_ref() {
- let search_rules_task =
- search_rules(query.clone(), cancellation_flag.clone(), prompt_store, cx);
+ let search_rules_task = search_rules(query, cancellation_flag, prompt_store, cx);
cx.background_spawn(async move {
search_rules_task
.await
@@ -196,7 +191,7 @@ fn search(
let executor = cx.background_executor().clone();
let search_files_task =
- search_files(query.clone(), cancellation_flag.clone(), &workspace, cx);
+ search_files(query.clone(), cancellation_flag, &workspace, cx);
let entries =
available_context_picker_entries(&prompt_store, &thread_store, &workspace, cx);
@@ -283,7 +278,7 @@ impl ContextPickerCompletionProvider {
) -> Option<Completion> {
match entry {
ContextPickerEntry::Mode(mode) => Some(Completion {
- replace_range: source_range.clone(),
+ replace_range: source_range,
new_text: format!("@{} ", mode.keyword()),
label: CodeLabel::plain(mode.label().to_string(), None),
icon_path: Some(mode.icon().path().into()),
@@ -330,9 +325,6 @@ impl ContextPickerCompletionProvider {
);
let callback = Arc::new({
- let context_store = context_store.clone();
- let selections = selections.clone();
- let selection_infos = selection_infos.clone();
move |_, window: &mut Window, cx: &mut App| {
context_store.update(cx, |context_store, cx| {
for (buffer, range) in &selections {
@@ -441,7 +433,7 @@ impl ContextPickerCompletionProvider {
excerpt_id,
source_range.start,
new_text_len - 1,
- editor.clone(),
+ editor,
context_store.clone(),
move |window, cx| match &thread_entry {
ThreadContextEntry::Thread { id, .. } => {
@@ -510,7 +502,7 @@ impl ContextPickerCompletionProvider {
excerpt_id,
source_range.start,
new_text_len - 1,
- editor.clone(),
+ editor,
context_store.clone(),
move |_, cx| {
let user_prompt_id = rules.prompt_id;
@@ -547,7 +539,7 @@ impl ContextPickerCompletionProvider {
excerpt_id,
source_range.start,
new_text_len - 1,
- editor.clone(),
+ editor,
context_store.clone(),
move |_, cx| {
let context_store = context_store.clone();
@@ -704,16 +696,16 @@ impl ContextPickerCompletionProvider {
excerpt_id,
source_range.start,
new_text_len - 1,
- editor.clone(),
+ editor,
context_store.clone(),
move |_, cx| {
let symbol = symbol.clone();
let context_store = context_store.clone();
let workspace = workspace.clone();
let result = super::symbol_context_picker::add_symbol(
- symbol.clone(),
+ symbol,
false,
- workspace.clone(),
+ workspace,
context_store.downgrade(),
cx,
);
@@ -908,6 +900,7 @@ impl CompletionProvider for ContextPickerCompletionProvider {
Ok(vec![CompletionResponse {
completions,
+ display_options: CompletionDisplayOptions::default(),
// Since this does its own filtering (see `filter_completions()` returns false),
// there is no benefit to computing whether this set of completions is incomplete.
is_incomplete: true,
@@ -1162,7 +1155,7 @@ mod tests {
impl Focusable for AtMentionEditor {
fn focus_handle(&self, cx: &App) -> FocusHandle {
- self.0.read(cx).focus_handle(cx).clone()
+ self.0.read(cx).focus_handle(cx)
}
}
@@ -1480,7 +1473,7 @@ mod tests {
let completions = editor.current_completions().expect("Missing completions");
completions
.into_iter()
- .map(|completion| completion.label.text.to_string())
+ .map(|completion| completion.label.text)
.collect::<Vec<_>>()
}
@@ -144,7 +144,8 @@ impl InlineAssistant {
let Some(terminal_panel) = workspace.read(cx).panel::<TerminalPanel>(cx) else {
return;
};
- let enabled = AgentSettings::get_global(cx).enabled;
+ let enabled = !DisableAiSettings::get_global(cx).disable_ai
+ && AgentSettings::get_global(cx).enabled;
terminal_panel.update(cx, |terminal_panel, cx| {
terminal_panel.set_assistant_enabled(enabled, cx)
});
@@ -1532,13 +1533,11 @@ impl InlineAssistant {
.and_then(|item| item.act_as::<Editor>(cx))
{
Some(InlineAssistTarget::Editor(workspace_editor))
- } else if let Some(terminal_view) = workspace
- .active_item(cx)
- .and_then(|item| item.act_as::<TerminalView>(cx))
- {
- Some(InlineAssistTarget::Terminal(terminal_view))
} else {
- None
+ workspace
+ .active_item(cx)
+ .and_then(|item| item.act_as::<TerminalView>(cx))
+ .map(InlineAssistTarget::Terminal)
}
}
}
@@ -1693,7 +1692,7 @@ impl InlineAssist {
}),
range,
codegen: codegen.clone(),
- workspace: workspace.clone(),
+ workspace,
_subscriptions: vec![
window.on_focus_in(&prompt_editor_focus_handle, cx, move |_, cx| {
InlineAssistant::update_global(cx, |this, cx| {
@@ -93,8 +93,8 @@ impl<T: 'static> Render for PromptEditor<T> {
};
let bottom_padding = match &self.mode {
- PromptEditorMode::Buffer { .. } => Pixels::from(0.),
- PromptEditorMode::Terminal { .. } => Pixels::from(8.0),
+ PromptEditorMode::Buffer { .. } => rems_from_px(2.0),
+ PromptEditorMode::Terminal { .. } => rems_from_px(8.0),
};
buttons.extend(self.render_buttons(window, cx));
@@ -334,7 +334,7 @@ impl<T: 'static> PromptEditor<T> {
EditorEvent::Edited { .. } => {
if let Some(workspace) = window.root::<Workspace>().flatten() {
workspace.update(cx, |workspace, cx| {
- let is_via_ssh = workspace.project().read(cx).is_via_ssh();
+ let is_via_ssh = workspace.project().read(cx).is_via_remote_server();
workspace
.client()
@@ -762,20 +762,22 @@ impl<T: 'static> PromptEditor<T> {
)
}
- fn render_editor(&mut self, window: &mut Window, cx: &mut Context<Self>) -> AnyElement {
- let font_size = TextSize::Default.rems(cx);
- let line_height = font_size.to_pixels(window.rem_size()) * 1.3;
+ fn render_editor(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> AnyElement {
+ let colors = cx.theme().colors();
div()
.key_context("InlineAssistEditor")
.size_full()
.p_2()
.pl_1()
- .bg(cx.theme().colors().editor_background)
+ .bg(colors.editor_background)
.child({
let settings = ThemeSettings::get_global(cx);
+ let font_size = settings.buffer_font_size(cx);
+ let line_height = font_size * 1.2;
+
let text_style = TextStyle {
- color: cx.theme().colors().editor_foreground,
+ color: colors.editor_foreground,
font_family: settings.buffer_font.family.clone(),
font_features: settings.buffer_font.features.clone(),
font_size: font_size.into(),
@@ -786,7 +788,7 @@ impl<T: 'static> PromptEditor<T> {
EditorElement::new(
&self.editor,
EditorStyle {
- background: cx.theme().colors().editor_background,
+ background: colors.editor_background,
local_player: cx.theme().players().local(),
text: text_style,
..Default::default()
@@ -1229,27 +1231,27 @@ pub enum GenerationMode {
impl GenerationMode {
fn start_label(self) -> &'static str {
match self {
- GenerationMode::Generate { .. } => "Generate",
+ GenerationMode::Generate => "Generate",
GenerationMode::Transform => "Transform",
}
}
fn tooltip_interrupt(self) -> &'static str {
match self {
- GenerationMode::Generate { .. } => "Interrupt Generation",
+ GenerationMode::Generate => "Interrupt Generation",
GenerationMode::Transform => "Interrupt Transform",
}
}
fn tooltip_restart(self) -> &'static str {
match self {
- GenerationMode::Generate { .. } => "Restart Generation",
+ GenerationMode::Generate => "Restart Generation",
GenerationMode::Transform => "Restart Transform",
}
}
fn tooltip_accept(self) -> &'static str {
match self {
- GenerationMode::Generate { .. } => "Accept Generation",
+ GenerationMode::Generate => "Accept Generation",
GenerationMode::Transform => "Accept Transform",
}
}
@@ -93,7 +93,7 @@ impl LanguageModelPickerDelegate {
let entries = models.entries();
Self {
- on_model_changed: on_model_changed.clone(),
+ on_model_changed,
all_models: Arc::new(models),
selected_index: Self::get_active_model_index(&entries, get_active_model(cx)),
filtered_entries: entries,
@@ -514,7 +514,7 @@ impl PickerDelegate for LanguageModelPickerDelegate {
.pl_0p5()
.gap_1p5()
.w(px(240.))
- .child(Label::new(model_info.model.name().0.clone()).truncate()),
+ .child(Label::new(model_info.model.name().0).truncate()),
)
.end_slot(div().pr_3().when(is_selected, |this| {
this.child(
@@ -117,7 +117,7 @@ pub(crate) fn create_editor(
let mut editor = Editor::new(
editor::EditorMode::AutoHeight {
min_lines,
- max_lines: max_lines,
+ max_lines,
},
buffer,
None,
@@ -125,6 +125,7 @@ pub(crate) fn create_editor(
cx,
);
editor.set_placeholder_text("Message the agent – @ to include context", cx);
+ editor.disable_word_completions();
editor.set_show_indent_guides(false, cx);
editor.set_soft_wrap();
editor.set_use_modal_editing(true);
@@ -215,9 +216,10 @@ impl MessageEditor {
let subscriptions = vec![
cx.subscribe_in(&context_strip, window, Self::handle_context_strip_event),
- cx.subscribe(&editor, |this, _, event, cx| match event {
- EditorEvent::BufferEdited => this.handle_message_changed(cx),
- _ => {}
+ cx.subscribe(&editor, |this, _, event: &EditorEvent, cx| {
+ if event == &EditorEvent::BufferEdited {
+ this.handle_message_changed(cx)
+ }
}),
cx.observe(&context_store, |this, _, cx| {
// When context changes, reload it for token counting.
@@ -247,7 +249,7 @@ impl MessageEditor {
editor: editor.clone(),
project: thread.read(cx).project().clone(),
thread,
- incompatible_tools_state: incompatible_tools.clone(),
+ incompatible_tools_state: incompatible_tools,
workspace,
context_store,
prompt_store,
@@ -377,18 +379,13 @@ impl MessageEditor {
}
fn send_to_model(&mut self, window: &mut Window, cx: &mut Context<Self>) {
- let Some(ConfiguredModel { model, provider }) = self
+ let Some(ConfiguredModel { model, .. }) = self
.thread
.update(cx, |thread, cx| thread.get_or_init_configured_model(cx))
else {
return;
};
- if provider.must_accept_terms(cx) {
- cx.notify();
- return;
- }
-
let (user_message, user_message_creases) = self.editor.update(cx, |editor, cx| {
let creases = extract_message_creases(editor, cx);
let text = editor.text(cx);
@@ -838,7 +835,6 @@ impl MessageEditor {
.child(self.profile_selector.clone())
.child(self.model_selector.clone())
.map({
- let focus_handle = focus_handle.clone();
move |parent| {
if is_generating {
parent
@@ -1132,7 +1128,7 @@ impl MessageEditor {
)
.when(is_edit_changes_expanded, |parent| {
parent.child(
- v_flex().children(changed_buffers.into_iter().enumerate().flat_map(
+ v_flex().children(changed_buffers.iter().enumerate().flat_map(
|(index, (buffer, _diff))| {
let file = buffer.read(cx).file()?;
let path = file.path();
@@ -1605,7 +1601,8 @@ pub fn extract_message_creases(
.collect::<HashMap<_, _>>();
// Filter the addon's list of creases based on what the editor reports,
// since the addon might have removed creases in it.
- let creases = editor.display_map.update(cx, |display_map, cx| {
+
+ editor.display_map.update(cx, |display_map, cx| {
display_map
.snapshot(cx)
.crease_snapshot
@@ -1629,8 +1626,7 @@ pub fn extract_message_creases(
}
})
.collect()
- });
- creases
+ })
}
impl EventEmitter<MessageEditorEvent> for MessageEditor {}
@@ -1682,7 +1678,7 @@ impl Render for MessageEditor {
let has_history = self
.history_store
.as_ref()
- .and_then(|hs| hs.update(cx, |hs, cx| hs.entries(cx).len() > 0).ok())
+ .and_then(|hs| hs.update(cx, |hs, cx| !hs.entries(cx).is_empty()).ok())
.unwrap_or(false)
|| self
.thread
@@ -1695,7 +1691,7 @@ impl Render for MessageEditor {
!has_history && is_signed_out && has_configured_providers,
|this| this.child(cx.new(ApiKeysWithProviders::new)),
)
- .when(changed_buffers.len() > 0, |parent| {
+ .when(!changed_buffers.is_empty(), |parent| {
parent.child(self.render_edits_bar(&changed_buffers, window, cx))
})
.child(self.render_editor(window, cx))
@@ -1800,7 +1796,7 @@ impl AgentPreview for MessageEditor {
.bg(cx.theme().colors().panel_background)
.border_1()
.border_color(cx.theme().colors().border)
- .child(default_message_editor.clone())
+ .child(default_message_editor)
.into_any_element(),
)])
.into_any_element(),
@@ -137,12 +137,11 @@ impl ProfileSelector {
entry.handler({
let fs = self.fs.clone();
let provider = self.provider.clone();
- let profile_id = profile_id.clone();
move |_window, cx| {
update_settings_file::<AgentSettings>(fs.clone(), cx, {
let profile_id = profile_id.clone();
move |settings, _cx| {
- settings.set_profile(profile_id.clone());
+ settings.set_profile(profile_id);
}
});
@@ -175,7 +174,6 @@ impl Render for ProfileSelector {
PopoverMenu::new("profile-selector")
.trigger_with_tooltip(trigger_button, {
- let focus_handle = focus_handle.clone();
move |window, cx| {
Tooltip::for_action_in(
"Toggle Profile Menu",
@@ -7,7 +7,10 @@ use fuzzy::{StringMatchCandidate, match_strings};
use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity, Window};
use language::{Anchor, Buffer, ToPoint};
use parking_lot::Mutex;
-use project::{CompletionIntent, CompletionSource, lsp_store::CompletionDocumentation};
+use project::{
+ CompletionDisplayOptions, CompletionIntent, CompletionSource,
+ lsp_store::CompletionDocumentation,
+};
use rope::Point;
use std::{
ops::Range,
@@ -88,8 +91,6 @@ impl SlashCommandCompletionProvider {
.map(|(editor, workspace)| {
let command_name = mat.string.clone();
let command_range = command_range.clone();
- let editor = editor.clone();
- let workspace = workspace.clone();
Arc::new(
move |intent: CompletionIntent,
window: &mut Window,
@@ -135,6 +136,7 @@ impl SlashCommandCompletionProvider {
vec![project::CompletionResponse {
completions,
+ display_options: CompletionDisplayOptions::default(),
is_incomplete: false,
}]
})
@@ -158,7 +160,7 @@ impl SlashCommandCompletionProvider {
if let Some(command) = self.slash_commands.command(command_name, cx) {
let completions = command.complete_argument(
arguments,
- new_cancel_flag.clone(),
+ new_cancel_flag,
self.workspace.clone(),
window,
cx,
@@ -239,6 +241,7 @@ impl SlashCommandCompletionProvider {
Ok(vec![project::CompletionResponse {
completions,
+ display_options: CompletionDisplayOptions::default(),
// TODO: Could have slash commands indicate whether their completions are incomplete.
is_incomplete: true,
}])
@@ -246,6 +249,7 @@ impl SlashCommandCompletionProvider {
} else {
Task::ready(Ok(vec![project::CompletionResponse {
completions: Vec::new(),
+ display_options: CompletionDisplayOptions::default(),
is_incomplete: true,
}]))
}
@@ -307,6 +311,7 @@ impl CompletionProvider for SlashCommandCompletionProvider {
else {
return Task::ready(Ok(vec![project::CompletionResponse {
completions: Vec::new(),
+ display_options: CompletionDisplayOptions::default(),
is_incomplete: false,
}]));
};
@@ -327,9 +327,7 @@ where
};
let picker_view = cx.new(|cx| {
- let picker =
- Picker::uniform_list(delegate, window, cx).max_height(Some(rems(20.).into()));
- picker
+ Picker::uniform_list(delegate, window, cx).max_height(Some(rems(20.).into()))
});
let handle = self
@@ -2,10 +2,11 @@ use anyhow::Result;
use gpui::App;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
/// Settings for slash commands.
-#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)]
+#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema, SettingsUi, SettingsKey)]
+#[settings_key(key = "slash_commands")]
pub struct SlashCommandSettings {
/// Settings for the `/cargo-workspace` slash command.
#[serde(default)]
@@ -21,8 +22,6 @@ pub struct CargoWorkspaceCommandSettings {
}
impl Settings for SlashCommandSettings {
- const KEY: Option<&'static str> = Some("slash_commands");
-
type FileContent = Self;
fn load(sources: SettingsSources<Self::FileContent>, _cx: &mut App) -> Result<Self> {
@@ -432,7 +432,7 @@ impl TerminalInlineAssist {
terminal: terminal.downgrade(),
prompt_editor: Some(prompt_editor.clone()),
codegen: codegen.clone(),
- workspace: workspace.clone(),
+ workspace,
context_store,
prompt_store,
_subscriptions: vec![
@@ -1,4 +1,5 @@
use crate::{
+ QuoteSelection,
language_model_selector::{LanguageModelSelector, language_model_selector},
ui::BurnModeTooltip,
};
@@ -24,8 +25,8 @@ use gpui::{
Action, Animation, AnimationExt, AnyElement, AnyView, App, ClipboardEntry, ClipboardItem,
Empty, Entity, EventEmitter, FocusHandle, Focusable, FontWeight, Global, InteractiveElement,
IntoElement, ParentElement, Pixels, Render, RenderImage, SharedString, Size,
- StatefulInteractiveElement, Styled, Subscription, Task, Transformation, WeakEntity, actions,
- div, img, percentage, point, prelude::*, pulsating_between, size,
+ StatefulInteractiveElement, Styled, Subscription, Task, WeakEntity, actions, div, img, point,
+ prelude::*, pulsating_between, size,
};
use language::{
BufferSnapshot, LspAdapterDelegate, ToOffset,
@@ -52,8 +53,8 @@ use std::{
};
use text::SelectionGoal;
use ui::{
- ButtonLike, Disclosure, ElevationIndex, KeyBinding, PopoverMenuHandle, TintColor, Tooltip,
- prelude::*,
+ ButtonLike, CommonAnimationExt, Disclosure, ElevationIndex, KeyBinding, PopoverMenuHandle,
+ TintColor, Tooltip, prelude::*,
};
use util::{ResultExt, maybe};
use workspace::{
@@ -89,8 +90,6 @@ actions!(
CycleMessageRole,
/// Inserts the selected text into the active editor.
InsertIntoEditor,
- /// Quotes the current selection in the assistant conversation.
- QuoteSelection,
/// Splits the conversation at the current cursor position.
Split,
]
@@ -191,7 +190,6 @@ pub struct TextThreadEditor {
invoked_slash_command_creases: HashMap<InvokedSlashCommandId, CreaseId>,
_subscriptions: Vec<Subscription>,
last_error: Option<AssistError>,
- show_accept_terms: bool,
pub(crate) slash_menu_handle:
PopoverMenuHandle<Picker<slash_command_picker::SlashCommandDelegate>>,
// dragged_file_worktrees is used to keep references to worktrees that were added
@@ -290,7 +288,6 @@ impl TextThreadEditor {
invoked_slash_command_creases: HashMap::default(),
_subscriptions,
last_error: None,
- show_accept_terms: false,
slash_menu_handle: Default::default(),
dragged_file_worktrees: Vec::new(),
language_model_selector: cx.new(|cx| {
@@ -364,24 +361,12 @@ impl TextThreadEditor {
if self.sending_disabled(cx) {
return;
}
+ telemetry::event!("Agent Message Sent", agent = "zed-text");
self.send_to_model(window, cx);
}
fn send_to_model(&mut self, window: &mut Window, cx: &mut Context<Self>) {
- let provider = LanguageModelRegistry::read_global(cx)
- .default_model()
- .map(|default| default.provider);
- if provider
- .as_ref()
- .is_some_and(|provider| provider.must_accept_terms(cx))
- {
- self.show_accept_terms = true;
- cx.notify();
- return;
- }
-
self.last_error = None;
-
if let Some(user_message) = self.context.update(cx, |context, cx| context.assist(cx)) {
let new_selection = {
let cursor = user_message
@@ -540,7 +525,7 @@ impl TextThreadEditor {
let context = self.context.read(cx);
let sections = context
.slash_command_output_sections()
- .into_iter()
+ .iter()
.filter(|section| section.is_valid(context.buffer().read(cx)))
.cloned()
.collect::<Vec<_>>();
@@ -1076,15 +1061,7 @@ impl TextThreadEditor {
Icon::new(IconName::ArrowCircle)
.size(IconSize::XSmall)
.color(Color::Info)
- .with_animation(
- "arrow-circle",
- Animation::new(Duration::from_secs(2)).repeat(),
- |icon, delta| {
- icon.transform(Transformation::rotate(
- percentage(delta),
- ))
- },
- )
+ .with_rotate_animation(2)
.into_any_element(),
);
note = Some(Self::esc_kbd(cx).into_any_element());
@@ -1237,7 +1214,7 @@ impl TextThreadEditor {
let mut new_blocks = vec![];
let mut block_index_to_message = vec![];
for message in self.context.read(cx).messages(cx) {
- if let Some(_) = blocks_to_remove.remove(&message.id) {
+ if blocks_to_remove.remove(&message.id).is_some() {
// This is an old message that we might modify.
let Some((meta, block_id)) = old_blocks.get_mut(&message.id) else {
debug_assert!(
@@ -1275,7 +1252,7 @@ impl TextThreadEditor {
context_editor_view: &Entity<TextThreadEditor>,
cx: &mut Context<Workspace>,
) -> Option<(String, bool)> {
- const CODE_FENCE_DELIMITER: &'static str = "```";
+ const CODE_FENCE_DELIMITER: &str = "```";
let context_editor = context_editor_view.read(cx).editor.clone();
context_editor.update(cx, |context_editor, cx| {
@@ -1739,7 +1716,7 @@ impl TextThreadEditor {
render_slash_command_output_toggle,
|_, _, _, _| Empty.into_any(),
)
- .with_metadata(metadata.crease.clone())
+ .with_metadata(metadata.crease)
}),
cx,
);
@@ -1810,7 +1787,7 @@ impl TextThreadEditor {
.filter_map(|(anchor, render_image)| {
const MAX_HEIGHT_IN_LINES: u32 = 8;
let anchor = buffer.anchor_in_excerpt(excerpt_id, anchor).unwrap();
- let image = render_image.clone();
+ let image = render_image;
anchor.is_valid(&buffer).then(|| BlockProperties {
placement: BlockPlacement::Above(anchor),
height: Some(MAX_HEIGHT_IN_LINES),
@@ -1872,8 +1849,55 @@ impl TextThreadEditor {
.update(cx, |context, cx| context.summarize(true, cx));
}
+ fn render_remaining_tokens(&self, cx: &App) -> Option<impl IntoElement + use<>> {
+ let (token_count_color, token_count, max_token_count, tooltip) =
+ match token_state(&self.context, cx)? {
+ TokenState::NoTokensLeft {
+ max_token_count,
+ token_count,
+ } => (
+ Color::Error,
+ token_count,
+ max_token_count,
+ Some("Token Limit Reached"),
+ ),
+ TokenState::HasMoreTokens {
+ max_token_count,
+ token_count,
+ over_warn_threshold,
+ } => {
+ let (color, tooltip) = if over_warn_threshold {
+ (Color::Warning, Some("Token Limit is Close to Exhaustion"))
+ } else {
+ (Color::Muted, None)
+ };
+ (color, token_count, max_token_count, tooltip)
+ }
+ };
+
+ Some(
+ h_flex()
+ .id("token-count")
+ .gap_0p5()
+ .child(
+ Label::new(humanize_token_count(token_count))
+ .size(LabelSize::Small)
+ .color(token_count_color),
+ )
+ .child(Label::new("/").size(LabelSize::Small).color(Color::Muted))
+ .child(
+ Label::new(humanize_token_count(max_token_count))
+ .size(LabelSize::Small)
+ .color(Color::Muted),
+ )
+ .when_some(tooltip, |element, tooltip| {
+ element.tooltip(Tooltip::text(tooltip))
+ }),
+ )
+ }
+
fn render_send_button(&self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
- let focus_handle = self.focus_handle(cx).clone();
+ let focus_handle = self.focus_handle(cx);
let (style, tooltip) = match token_state(&self.context, cx) {
Some(TokenState::NoTokensLeft { .. }) => (
@@ -1931,7 +1955,6 @@ impl TextThreadEditor {
ConfigurationError::NoProvider
| ConfigurationError::ModelNotFound
| ConfigurationError::ProviderNotAuthenticated(_) => true,
- ConfigurationError::ProviderPendingTermsAcceptance(_) => self.show_accept_terms,
}
}
@@ -2015,7 +2038,7 @@ impl TextThreadEditor {
None => IconName::Ai,
};
- let focus_handle = self.editor().focus_handle(cx).clone();
+ let focus_handle = self.editor().focus_handle(cx);
PickerPopoverMenu::new(
self.language_model_selector.clone(),
@@ -2161,8 +2184,8 @@ impl TextThreadEditor {
/// Returns the contents of the *outermost* fenced code block that contains the given offset.
fn find_surrounding_code_block(snapshot: &BufferSnapshot, offset: usize) -> Option<Range<usize>> {
- const CODE_BLOCK_NODE: &'static str = "fenced_code_block";
- const CODE_BLOCK_CONTENT: &'static str = "code_fence_content";
+ const CODE_BLOCK_NODE: &str = "fenced_code_block";
+ const CODE_BLOCK_CONTENT: &str = "code_fence_content";
let layer = snapshot.syntax_layers().next()?;
@@ -2436,9 +2459,14 @@ impl Render for TextThreadEditor {
)
.child(
h_flex()
- .gap_1()
- .child(self.render_language_model_selector(window, cx))
- .child(self.render_send_button(window, cx)),
+ .gap_2p5()
+ .children(self.render_remaining_tokens(cx))
+ .child(
+ h_flex()
+ .gap_1()
+ .child(self.render_language_model_selector(window, cx))
+ .child(self.render_send_button(window, cx)),
+ ),
),
)
}
@@ -2726,58 +2754,6 @@ impl FollowableItem for TextThreadEditor {
}
}
-pub fn render_remaining_tokens(
- context_editor: &Entity<TextThreadEditor>,
- cx: &App,
-) -> Option<impl IntoElement + use<>> {
- let context = &context_editor.read(cx).context;
-
- let (token_count_color, token_count, max_token_count, tooltip) = match token_state(context, cx)?
- {
- TokenState::NoTokensLeft {
- max_token_count,
- token_count,
- } => (
- Color::Error,
- token_count,
- max_token_count,
- Some("Token Limit Reached"),
- ),
- TokenState::HasMoreTokens {
- max_token_count,
- token_count,
- over_warn_threshold,
- } => {
- let (color, tooltip) = if over_warn_threshold {
- (Color::Warning, Some("Token Limit is Close to Exhaustion"))
- } else {
- (Color::Muted, None)
- };
- (color, token_count, max_token_count, tooltip)
- }
- };
-
- Some(
- h_flex()
- .id("token-count")
- .gap_0p5()
- .child(
- Label::new(humanize_token_count(token_count))
- .size(LabelSize::Small)
- .color(token_count_color),
- )
- .child(Label::new("/").size(LabelSize::Small).color(Color::Muted))
- .child(
- Label::new(humanize_token_count(max_token_count))
- .size(LabelSize::Small)
- .color(Color::Muted),
- )
- .when_some(tooltip, |element, tooltip| {
- element.tooltip(Tooltip::text(tooltip))
- }),
- )
-}
-
enum PendingSlashCommand {}
fn invoked_slash_command_fold_placeholder(
@@ -2806,11 +2782,7 @@ fn invoked_slash_command_fold_placeholder(
.child(Label::new(format!("/{}", command.name)))
.map(|parent| match &command.status {
InvokedSlashCommandStatus::Running(_) => {
- parent.child(Icon::new(IconName::ArrowCircle).with_animation(
- "arrow-circle",
- Animation::new(Duration::from_secs(4)).repeat(),
- |icon, delta| icon.transform(Transformation::rotate(percentage(delta))),
- ))
+ parent.child(Icon::new(IconName::ArrowCircle).with_rotate_animation(4))
}
InvokedSlashCommandStatus::Error(message) => parent.child(
Label::new(format!("error: {message}"))
@@ -3129,7 +3101,7 @@ mod tests {
let context_editor = window
.update(&mut cx, |_, window, cx| {
cx.new(|cx| {
- let editor = TextThreadEditor::for_context(
+ TextThreadEditor::for_context(
context.clone(),
fs,
workspace.downgrade(),
@@ -3137,8 +3109,7 @@ mod tests {
None,
window,
cx,
- );
- editor
+ )
})
})
.unwrap();
@@ -14,13 +14,11 @@ pub struct IncompatibleToolsState {
impl IncompatibleToolsState {
pub fn new(thread: Entity<Thread>, cx: &mut Context<Self>) -> Self {
- let _tool_working_set_subscription =
- cx.subscribe(&thread, |this, _, event, _| match event {
- ThreadEvent::ProfileChanged => {
- this.cache.clear();
- }
- _ => {}
- });
+ let _tool_working_set_subscription = cx.subscribe(&thread, |this, _, event, _| {
+ if let ThreadEvent::ProfileChanged = event {
+ this.cache.clear();
+ }
+ });
Self {
cache: HashMap::default(),
@@ -1,14 +1,18 @@
+mod acp_onboarding_modal;
mod agent_notification;
mod burn_mode_tooltip;
+mod claude_code_onboarding_modal;
mod context_pill;
mod end_trial_upsell;
-// mod new_thread_button;
mod onboarding_modal;
pub mod preview;
+mod unavailable_editing_tooltip;
+pub use acp_onboarding_modal::*;
pub use agent_notification::*;
pub use burn_mode_tooltip::*;
+pub use claude_code_onboarding_modal::*;
pub use context_pill::*;
pub use end_trial_upsell::*;
-// pub use new_thread_button::*;
pub use onboarding_modal::*;
+pub use unavailable_editing_tooltip::*;
@@ -0,0 +1,246 @@
+use client::zed_urls;
+use gpui::{
+ ClickEvent, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, MouseDownEvent, Render,
+ linear_color_stop, linear_gradient,
+};
+use ui::{TintColor, Vector, VectorName, prelude::*};
+use workspace::{ModalView, Workspace};
+
+use crate::agent_panel::{AgentPanel, AgentType};
+
+macro_rules! acp_onboarding_event {
+ ($name:expr) => {
+ telemetry::event!($name, source = "ACP Onboarding");
+ };
+ ($name:expr, $($key:ident $(= $value:expr)?),+ $(,)?) => {
+ telemetry::event!($name, source = "ACP Onboarding", $($key $(= $value)?),+);
+ };
+}
+
+pub struct AcpOnboardingModal {
+ focus_handle: FocusHandle,
+ workspace: Entity<Workspace>,
+}
+
+impl AcpOnboardingModal {
+ pub fn toggle(workspace: &mut Workspace, window: &mut Window, cx: &mut Context<Workspace>) {
+ let workspace_entity = cx.entity();
+ workspace.toggle_modal(window, cx, |_window, cx| Self {
+ workspace: workspace_entity,
+ focus_handle: cx.focus_handle(),
+ });
+ }
+
+ fn open_panel(&mut self, _: &ClickEvent, window: &mut Window, cx: &mut Context<Self>) {
+ self.workspace.update(cx, |workspace, cx| {
+ workspace.focus_panel::<AgentPanel>(window, cx);
+
+ if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
+ panel.update(cx, |panel, cx| {
+ panel.new_agent_thread(AgentType::Gemini, window, cx);
+ });
+ }
+ });
+
+ cx.emit(DismissEvent);
+
+ acp_onboarding_event!("Open Panel Clicked");
+ }
+
+ fn view_docs(&mut self, _: &ClickEvent, _: &mut Window, cx: &mut Context<Self>) {
+ cx.open_url(&zed_urls::external_agents_docs(cx));
+ cx.notify();
+
+ acp_onboarding_event!("Documentation Link Clicked");
+ }
+
+ fn cancel(&mut self, _: &menu::Cancel, _: &mut Window, cx: &mut Context<Self>) {
+ cx.emit(DismissEvent);
+ }
+}
+
+impl EventEmitter<DismissEvent> for AcpOnboardingModal {}
+
+impl Focusable for AcpOnboardingModal {
+ fn focus_handle(&self, _cx: &App) -> FocusHandle {
+ self.focus_handle.clone()
+ }
+}
+
+impl ModalView for AcpOnboardingModal {}
+
+impl Render for AcpOnboardingModal {
+ fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let illustration_element = |label: bool, opacity: f32| {
+ h_flex()
+ .px_1()
+ .py_0p5()
+ .gap_1()
+ .rounded_sm()
+ .bg(cx.theme().colors().element_active.opacity(0.05))
+ .border_1()
+ .border_color(cx.theme().colors().border)
+ .border_dashed()
+ .child(
+ Icon::new(IconName::Stop)
+ .size(IconSize::Small)
+ .color(Color::Custom(cx.theme().colors().text_muted.opacity(0.15))),
+ )
+ .map(|this| {
+ if label {
+ this.child(
+ Label::new("Your Agent Here")
+ .size(LabelSize::Small)
+ .color(Color::Muted),
+ )
+ } else {
+ this.child(
+ div().w_16().h_1().rounded_full().bg(cx
+ .theme()
+ .colors()
+ .element_active
+ .opacity(0.6)),
+ )
+ }
+ })
+ .opacity(opacity)
+ };
+
+ let illustration = h_flex()
+ .relative()
+ .h(rems_from_px(126.))
+ .bg(cx.theme().colors().editor_background)
+ .border_b_1()
+ .border_color(cx.theme().colors().border_variant)
+ .justify_center()
+ .gap_8()
+ .rounded_t_md()
+ .overflow_hidden()
+ .child(
+ div().absolute().inset_0().w(px(515.)).h(px(126.)).child(
+ Vector::new(VectorName::AcpGrid, rems_from_px(515.), rems_from_px(126.))
+ .color(ui::Color::Custom(cx.theme().colors().text.opacity(0.02))),
+ ),
+ )
+ .child(div().absolute().inset_0().size_full().bg(linear_gradient(
+ 0.,
+ linear_color_stop(
+ cx.theme().colors().elevated_surface_background.opacity(0.1),
+ 0.9,
+ ),
+ linear_color_stop(
+ cx.theme().colors().elevated_surface_background.opacity(0.),
+ 0.,
+ ),
+ )))
+ .child(
+ div()
+ .absolute()
+ .inset_0()
+ .size_full()
+ .bg(gpui::black().opacity(0.15)),
+ )
+ .child(
+ Vector::new(
+ VectorName::AcpLogoSerif,
+ rems_from_px(257.),
+ rems_from_px(47.),
+ )
+ .color(ui::Color::Custom(cx.theme().colors().text.opacity(0.8))),
+ )
+ .child(
+ v_flex()
+ .gap_1p5()
+ .child(illustration_element(false, 0.15))
+ .child(illustration_element(true, 0.3))
+ .child(
+ h_flex()
+ .pl_1()
+ .pr_2()
+ .py_0p5()
+ .gap_1()
+ .rounded_sm()
+ .bg(cx.theme().colors().element_active.opacity(0.2))
+ .border_1()
+ .border_color(cx.theme().colors().border)
+ .child(
+ Icon::new(IconName::AiGemini)
+ .size(IconSize::Small)
+ .color(Color::Muted),
+ )
+ .child(Label::new("New Gemini CLI Thread").size(LabelSize::Small)),
+ )
+ .child(illustration_element(true, 0.3))
+ .child(illustration_element(false, 0.15)),
+ );
+
+ let heading = v_flex()
+ .w_full()
+ .gap_1()
+ .child(
+ Label::new("Now Available")
+ .size(LabelSize::Small)
+ .color(Color::Muted),
+ )
+ .child(Headline::new("Bring Your Own Agent to Zed").size(HeadlineSize::Large));
+
+ let copy = "Bring the agent of your choice to Zed via our new Agent Client Protocol (ACP), starting with Google's Gemini CLI integration.";
+
+ let open_panel_button = Button::new("open-panel", "Start with Gemini CLI")
+ .icon_size(IconSize::Indicator)
+ .style(ButtonStyle::Tinted(TintColor::Accent))
+ .full_width()
+ .on_click(cx.listener(Self::open_panel));
+
+ let docs_button = Button::new("add-other-agents", "Add Other Agents")
+ .icon(IconName::ArrowUpRight)
+ .icon_size(IconSize::Indicator)
+ .icon_color(Color::Muted)
+ .full_width()
+ .on_click(cx.listener(Self::view_docs));
+
+ let close_button = h_flex().absolute().top_2().right_2().child(
+ IconButton::new("cancel", IconName::Close).on_click(cx.listener(
+ |_, _: &ClickEvent, _window, cx| {
+ acp_onboarding_event!("Canceled", trigger = "X click");
+ cx.emit(DismissEvent);
+ },
+ )),
+ );
+
+ v_flex()
+ .id("acp-onboarding")
+ .key_context("AcpOnboardingModal")
+ .relative()
+ .w(rems(34.))
+ .h_full()
+ .elevation_3(cx)
+ .track_focus(&self.focus_handle(cx))
+ .overflow_hidden()
+ .on_action(cx.listener(Self::cancel))
+ .on_action(cx.listener(|_, _: &menu::Cancel, _window, cx| {
+ acp_onboarding_event!("Canceled", trigger = "Action");
+ cx.emit(DismissEvent);
+ }))
+ .on_any_mouse_down(cx.listener(|this, _: &MouseDownEvent, window, _cx| {
+ this.focus_handle.focus(window);
+ }))
+ .child(illustration)
+ .child(
+ v_flex()
+ .p_4()
+ .gap_2()
+ .child(heading)
+ .child(Label::new(copy).color(Color::Muted))
+ .child(
+ v_flex()
+ .w_full()
+ .mt_2()
+ .gap_1()
+ .child(open_panel_button)
+ .child(docs_button),
+ ),
+ )
+ .child(close_button)
+ }
+}
@@ -62,6 +62,8 @@ impl AgentNotification {
app_id: Some(app_id.to_owned()),
window_min_size: None,
window_decorations: Some(WindowDecorations::Client),
+ tabbing_identifier: None,
+ ..Default::default()
}
}
}
@@ -0,0 +1,254 @@
+use client::zed_urls;
+use gpui::{
+ ClickEvent, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, MouseDownEvent, Render,
+ linear_color_stop, linear_gradient,
+};
+use ui::{TintColor, Vector, VectorName, prelude::*};
+use workspace::{ModalView, Workspace};
+
+use crate::agent_panel::{AgentPanel, AgentType};
+
+macro_rules! claude_code_onboarding_event {
+ ($name:expr) => {
+ telemetry::event!($name, source = "ACP Claude Code Onboarding");
+ };
+ ($name:expr, $($key:ident $(= $value:expr)?),+ $(,)?) => {
+ telemetry::event!($name, source = "ACP Claude Code Onboarding", $($key $(= $value)?),+);
+ };
+}
+
+pub struct ClaudeCodeOnboardingModal {
+ focus_handle: FocusHandle,
+ workspace: Entity<Workspace>,
+}
+
+impl ClaudeCodeOnboardingModal {
+ pub fn toggle(workspace: &mut Workspace, window: &mut Window, cx: &mut Context<Workspace>) {
+ let workspace_entity = cx.entity();
+ workspace.toggle_modal(window, cx, |_window, cx| Self {
+ workspace: workspace_entity,
+ focus_handle: cx.focus_handle(),
+ });
+ }
+
+ fn open_panel(&mut self, _: &ClickEvent, window: &mut Window, cx: &mut Context<Self>) {
+ self.workspace.update(cx, |workspace, cx| {
+ workspace.focus_panel::<AgentPanel>(window, cx);
+
+ if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
+ panel.update(cx, |panel, cx| {
+ panel.new_agent_thread(AgentType::ClaudeCode, window, cx);
+ });
+ }
+ });
+
+ cx.emit(DismissEvent);
+
+ claude_code_onboarding_event!("Open Panel Clicked");
+ }
+
+ fn view_docs(&mut self, _: &ClickEvent, _: &mut Window, cx: &mut Context<Self>) {
+ cx.open_url(&zed_urls::external_agents_docs(cx));
+ cx.notify();
+
+ claude_code_onboarding_event!("Documentation Link Clicked");
+ }
+
+ fn cancel(&mut self, _: &menu::Cancel, _: &mut Window, cx: &mut Context<Self>) {
+ cx.emit(DismissEvent);
+ }
+}
+
+impl EventEmitter<DismissEvent> for ClaudeCodeOnboardingModal {}
+
+impl Focusable for ClaudeCodeOnboardingModal {
+ fn focus_handle(&self, _cx: &App) -> FocusHandle {
+ self.focus_handle.clone()
+ }
+}
+
+impl ModalView for ClaudeCodeOnboardingModal {}
+
+impl Render for ClaudeCodeOnboardingModal {
+ fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let illustration_element = |icon: IconName, label: Option<SharedString>, opacity: f32| {
+ h_flex()
+ .px_1()
+ .py_0p5()
+ .gap_1()
+ .rounded_sm()
+ .bg(cx.theme().colors().element_active.opacity(0.05))
+ .border_1()
+ .border_color(cx.theme().colors().border)
+ .border_dashed()
+ .child(
+ Icon::new(icon)
+ .size(IconSize::Small)
+ .color(Color::Custom(cx.theme().colors().text_muted.opacity(0.15))),
+ )
+ .map(|this| {
+ if let Some(label_text) = label {
+ this.child(
+ Label::new(label_text)
+ .size(LabelSize::Small)
+ .color(Color::Muted),
+ )
+ } else {
+ this.child(
+ div().w_16().h_1().rounded_full().bg(cx
+ .theme()
+ .colors()
+ .element_active
+ .opacity(0.6)),
+ )
+ }
+ })
+ .opacity(opacity)
+ };
+
+ let illustration = h_flex()
+ .relative()
+ .h(rems_from_px(126.))
+ .bg(cx.theme().colors().editor_background)
+ .border_b_1()
+ .border_color(cx.theme().colors().border_variant)
+ .justify_center()
+ .gap_8()
+ .rounded_t_md()
+ .overflow_hidden()
+ .child(
+ div().absolute().inset_0().w(px(515.)).h(px(126.)).child(
+ Vector::new(VectorName::AcpGrid, rems_from_px(515.), rems_from_px(126.))
+ .color(ui::Color::Custom(cx.theme().colors().text.opacity(0.02))),
+ ),
+ )
+ .child(div().absolute().inset_0().size_full().bg(linear_gradient(
+ 0.,
+ linear_color_stop(
+ cx.theme().colors().elevated_surface_background.opacity(0.1),
+ 0.9,
+ ),
+ linear_color_stop(
+ cx.theme().colors().elevated_surface_background.opacity(0.),
+ 0.,
+ ),
+ )))
+ .child(
+ div()
+ .absolute()
+ .inset_0()
+ .size_full()
+ .bg(gpui::black().opacity(0.15)),
+ )
+ .child(
+ Vector::new(
+ VectorName::AcpLogoSerif,
+ rems_from_px(257.),
+ rems_from_px(47.),
+ )
+ .color(ui::Color::Custom(cx.theme().colors().text.opacity(0.8))),
+ )
+ .child(
+ v_flex()
+ .gap_1p5()
+ .child(illustration_element(IconName::Stop, None, 0.15))
+ .child(illustration_element(
+ IconName::AiGemini,
+ Some("New Gemini CLI Thread".into()),
+ 0.3,
+ ))
+ .child(
+ h_flex()
+ .pl_1()
+ .pr_2()
+ .py_0p5()
+ .gap_1()
+ .rounded_sm()
+ .bg(cx.theme().colors().element_active.opacity(0.2))
+ .border_1()
+ .border_color(cx.theme().colors().border)
+ .child(
+ Icon::new(IconName::AiClaude)
+ .size(IconSize::Small)
+ .color(Color::Muted),
+ )
+ .child(Label::new("New Claude Code Thread").size(LabelSize::Small)),
+ )
+ .child(illustration_element(
+ IconName::Stop,
+ Some("Your Agent Here".into()),
+ 0.3,
+ ))
+ .child(illustration_element(IconName::Stop, None, 0.15)),
+ );
+
+ let heading = v_flex()
+ .w_full()
+ .gap_1()
+ .child(
+ Label::new("Beta Release")
+ .size(LabelSize::Small)
+ .color(Color::Muted),
+ )
+ .child(Headline::new("Claude Code: Natively in Zed").size(HeadlineSize::Large));
+
+ let copy = "Powered by the Agent Client Protocol, you can now run Claude Code as\na first-class citizen in Zed's agent panel.";
+
+ let open_panel_button = Button::new("open-panel", "Start with Claude Code")
+ .icon_size(IconSize::Indicator)
+ .style(ButtonStyle::Tinted(TintColor::Accent))
+ .full_width()
+ .on_click(cx.listener(Self::open_panel));
+
+ let docs_button = Button::new("add-other-agents", "Add Other Agents")
+ .icon(IconName::ArrowUpRight)
+ .icon_size(IconSize::Indicator)
+ .icon_color(Color::Muted)
+ .full_width()
+ .on_click(cx.listener(Self::view_docs));
+
+ let close_button = h_flex().absolute().top_2().right_2().child(
+ IconButton::new("cancel", IconName::Close).on_click(cx.listener(
+ |_, _: &ClickEvent, _window, cx| {
+ claude_code_onboarding_event!("Canceled", trigger = "X click");
+ cx.emit(DismissEvent);
+ },
+ )),
+ );
+
+ v_flex()
+ .id("acp-onboarding")
+ .key_context("AcpOnboardingModal")
+ .relative()
+ .w(rems(34.))
+ .h_full()
+ .elevation_3(cx)
+ .track_focus(&self.focus_handle(cx))
+ .overflow_hidden()
+ .on_action(cx.listener(Self::cancel))
+ .on_action(cx.listener(|_, _: &menu::Cancel, _window, cx| {
+ claude_code_onboarding_event!("Canceled", trigger = "Action");
+ cx.emit(DismissEvent);
+ }))
+ .on_any_mouse_down(cx.listener(|this, _: &MouseDownEvent, window, _cx| {
+ this.focus_handle.focus(window);
+ }))
+ .child(illustration)
+ .child(
+ v_flex()
+ .p_4()
+ .gap_2()
+ .child(heading)
+ .child(Label::new(copy).color(Color::Muted))
+ .child(
+ v_flex()
+ .w_full()
+ .mt_2()
+ .gap_1()
+ .child(open_panel_button)
+ .child(docs_button),
+ ),
+ )
+ .child(close_button)
+ }
+}
@@ -499,7 +499,7 @@ impl AddedContext {
let thread = handle.thread.clone();
Some(Rc::new(move |_, cx| {
let text = thread.read(cx).latest_detailed_summary_or_text();
- ContextPillHover::new_text(text.clone(), cx).into()
+ ContextPillHover::new_text(text, cx).into()
}))
},
handle: AgentContextHandle::Thread(handle),
@@ -574,7 +574,7 @@ impl AddedContext {
.unwrap_or_else(|| "Unnamed Rule".into());
Some(AddedContext {
kind: ContextKind::Rules,
- name: title.clone(),
+ name: title,
parent: None,
tooltip: None,
icon_path: None,
@@ -1,75 +0,0 @@
-use gpui::{ClickEvent, ElementId, IntoElement, ParentElement, Styled};
-use ui::prelude::*;
-
-#[derive(IntoElement)]
-pub struct NewThreadButton {
- id: ElementId,
- label: SharedString,
- icon: IconName,
- keybinding: Option<ui::KeyBinding>,
- on_click: Option<Box<dyn Fn(&ClickEvent, &mut Window, &mut App) + 'static>>,
-}
-
-impl NewThreadButton {
- fn new(id: impl Into<ElementId>, label: impl Into<SharedString>, icon: IconName) -> Self {
- Self {
- id: id.into(),
- label: label.into(),
- icon,
- keybinding: None,
- on_click: None,
- }
- }
-
- fn keybinding(mut self, keybinding: Option<ui::KeyBinding>) -> Self {
- self.keybinding = keybinding;
- self
- }
-
- fn on_click<F>(mut self, handler: F) -> Self
- where
- F: Fn(&mut Window, &mut App) + 'static,
- {
- self.on_click = Some(Box::new(
- move |_: &ClickEvent, window: &mut Window, cx: &mut App| handler(window, cx),
- ));
- self
- }
-}
-
-impl RenderOnce for NewThreadButton {
- fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
- h_flex()
- .id(self.id)
- .w_full()
- .py_1p5()
- .px_2()
- .gap_1()
- .justify_between()
- .rounded_md()
- .border_1()
- .border_color(cx.theme().colors().border.opacity(0.4))
- .bg(cx.theme().colors().element_active.opacity(0.2))
- .hover(|style| {
- style
- .bg(cx.theme().colors().element_hover)
- .border_color(cx.theme().colors().border)
- })
- .child(
- h_flex()
- .gap_1p5()
- .child(
- Icon::new(self.icon)
- .size(IconSize::XSmall)
- .color(Color::Muted),
- )
- .child(Label::new(self.label).size(LabelSize::Small)),
- )
- .when_some(self.keybinding, |this, keybinding| {
- this.child(keybinding.size(rems_from_px(10.)))
- })
- .when_some(self.on_click, |this, on_click| {
- this.on_click(move |event, window, cx| on_click(event, window, cx))
- })
- }
-}
@@ -86,23 +86,18 @@ impl RenderOnce for UsageCallout {
(IconName::Warning, Severity::Warning)
};
- div()
- .border_t_1()
- .border_color(cx.theme().colors().border)
- .child(
- Callout::new()
- .icon(icon)
- .severity(severity)
- .icon(icon)
- .title(title)
- .description(message)
- .actions_slot(
- Button::new("upgrade", button_text)
- .label_size(LabelSize::Small)
- .on_click(move |_, _, cx| {
- cx.open_url(&url);
- }),
- ),
+ Callout::new()
+ .icon(icon)
+ .severity(severity)
+ .icon(icon)
+ .title(title)
+ .description(message)
+ .actions_slot(
+ Button::new("upgrade", button_text)
+ .label_size(LabelSize::Small)
+ .on_click(move |_, _, cx| {
+ cx.open_url(&url);
+ }),
)
.into_any_element()
}
@@ -0,0 +1,29 @@
+use gpui::{Context, IntoElement, Render, Window};
+use ui::{prelude::*, tooltip_container};
+
+pub struct UnavailableEditingTooltip {
+ agent_name: SharedString,
+}
+
+impl UnavailableEditingTooltip {
+ pub fn new(agent_name: SharedString) -> Self {
+ Self { agent_name }
+ }
+}
+
+impl Render for UnavailableEditingTooltip {
+ fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ tooltip_container(window, cx, |this, _, _| {
+ this.child(Label::new("Unavailable Editing")).child(
+ div().max_w_64().child(
+ Label::new(format!(
+ "Editing previous messages is not available for {} yet.",
+ self.agent_name
+ ))
+ .size(LabelSize::Small)
+ .color(Color::Muted),
+ ),
+ )
+ })
+ }
+}
@@ -33,7 +33,7 @@ impl ApiKeysWithProviders {
.filter(|provider| {
provider.is_authenticated(cx) && provider.id() != ZED_CLOUD_PROVIDER_ID
})
- .map(|provider| (provider.icon(), provider.name().0.clone()))
+ .map(|provider| (provider.icon(), provider.name().0))
.collect()
}
}
@@ -50,7 +50,7 @@ impl AgentPanelOnboarding {
.filter(|provider| {
provider.is_authenticated(cx) && provider.id() != ZED_CLOUD_PROVIDER_ID
})
- .map(|provider| (provider.icon(), provider.name().0.clone()))
+ .map(|provider| (provider.icon(), provider.name().0))
.collect()
}
}
@@ -74,7 +74,7 @@ impl Render for AgentPanelOnboarding {
}),
)
.map(|this| {
- if enrolled_in_trial || is_pro_user || self.configured_providers.len() >= 1 {
+ if enrolled_in_trial || is_pro_user || !self.configured_providers.is_empty() {
this
} else {
this.child(ApiKeysWithoutProviders::new())
@@ -19,7 +19,7 @@ use std::sync::Arc;
use client::{Client, UserStore, zed_urls};
use gpui::{AnyElement, Entity, IntoElement, ParentElement};
-use ui::{Divider, RegisterComponent, TintColor, Tooltip, prelude::*};
+use ui::{Divider, RegisterComponent, Tooltip, prelude::*};
#[derive(PartialEq)]
pub enum SignInStatus {
@@ -43,12 +43,10 @@ impl From<client::Status> for SignInStatus {
#[derive(RegisterComponent, IntoElement)]
pub struct ZedAiOnboarding {
pub sign_in_status: SignInStatus,
- pub has_accepted_terms_of_service: bool,
pub plan: Option<Plan>,
pub account_too_young: bool,
pub continue_with_zed_ai: Arc<dyn Fn(&mut Window, &mut App)>,
pub sign_in: Arc<dyn Fn(&mut Window, &mut App)>,
- pub accept_terms_of_service: Arc<dyn Fn(&mut Window, &mut App)>,
pub dismiss_onboarding: Option<Arc<dyn Fn(&mut Window, &mut App)>>,
}
@@ -64,17 +62,9 @@ impl ZedAiOnboarding {
Self {
sign_in_status: status.into(),
- has_accepted_terms_of_service: store.has_accepted_terms_of_service(),
plan: store.plan(),
account_too_young: store.account_too_young(),
continue_with_zed_ai,
- accept_terms_of_service: Arc::new({
- let store = user_store.clone();
- move |_window, cx| {
- let task = store.update(cx, |store, cx| store.accept_terms_of_service(cx));
- task.detach_and_log_err(cx);
- }
- }),
sign_in: Arc::new(move |_window, cx| {
cx.spawn({
let client = client.clone();
@@ -94,42 +84,6 @@ impl ZedAiOnboarding {
self
}
- fn render_accept_terms_of_service(&self) -> AnyElement {
- v_flex()
- .gap_1()
- .w_full()
- .child(Headline::new("Accept Terms of Service"))
- .child(
- Label::new("We don’t sell your data, track you across the web, or compromise your privacy.")
- .color(Color::Muted)
- .mb_2(),
- )
- .child(
- Button::new("terms_of_service", "Review Terms of Service")
- .full_width()
- .style(ButtonStyle::Outlined)
- .icon(IconName::ArrowUpRight)
- .icon_color(Color::Muted)
- .icon_size(IconSize::Small)
- .on_click(move |_, _window, cx| {
- telemetry::event!("Review Terms of Service Clicked");
- cx.open_url(&zed_urls::terms_of_service(cx))
- }),
- )
- .child(
- Button::new("accept_terms", "Accept")
- .full_width()
- .style(ButtonStyle::Tinted(TintColor::Accent))
- .on_click({
- let callback = self.accept_terms_of_service.clone();
- move |_, window, cx| {
- telemetry::event!("Terms of Service Accepted");
- (callback)(window, cx)}
- }),
- )
- .into_any_element()
- }
-
fn render_sign_in_disclaimer(&self, _cx: &mut App) -> AnyElement {
let signing_in = matches!(self.sign_in_status, SignInStatus::SigningIn);
let plan_definitions = PlanDefinitions;
@@ -359,14 +313,10 @@ impl ZedAiOnboarding {
impl RenderOnce for ZedAiOnboarding {
fn render(self, _window: &mut ui::Window, cx: &mut App) -> impl IntoElement {
if matches!(self.sign_in_status, SignInStatus::SignedIn) {
- if self.has_accepted_terms_of_service {
- match self.plan {
- None | Some(Plan::ZedFree) => self.render_free_plan_state(cx),
- Some(Plan::ZedProTrial) => self.render_trial_state(cx),
- Some(Plan::ZedPro) => self.render_pro_plan_state(cx),
- }
- } else {
- self.render_accept_terms_of_service()
+ match self.plan {
+ None | Some(Plan::ZedFree) => self.render_free_plan_state(cx),
+ Some(Plan::ZedProTrial) => self.render_trial_state(cx),
+ Some(Plan::ZedPro) => self.render_pro_plan_state(cx),
}
} else {
self.render_sign_in_disclaimer(cx)
@@ -390,18 +340,15 @@ impl Component for ZedAiOnboarding {
fn preview(_window: &mut Window, _cx: &mut App) -> Option<AnyElement> {
fn onboarding(
sign_in_status: SignInStatus,
- has_accepted_terms_of_service: bool,
plan: Option<Plan>,
account_too_young: bool,
) -> AnyElement {
ZedAiOnboarding {
sign_in_status,
- has_accepted_terms_of_service,
plan,
account_too_young,
continue_with_zed_ai: Arc::new(|_, _| {}),
sign_in: Arc::new(|_, _| {}),
- accept_terms_of_service: Arc::new(|_, _| {}),
dismiss_onboarding: None,
}
.into_any_element()
@@ -415,27 +362,23 @@ impl Component for ZedAiOnboarding {
.children(vec![
single_example(
"Not Signed-in",
- onboarding(SignInStatus::SignedOut, false, None, false),
- ),
- single_example(
- "Not Accepted ToS",
- onboarding(SignInStatus::SignedIn, false, None, false),
+ onboarding(SignInStatus::SignedOut, None, false),
),
single_example(
"Young Account",
- onboarding(SignInStatus::SignedIn, true, None, true),
+ onboarding(SignInStatus::SignedIn, None, true),
),
single_example(
"Free Plan",
- onboarding(SignInStatus::SignedIn, true, Some(Plan::ZedFree), false),
+ onboarding(SignInStatus::SignedIn, Some(Plan::ZedFree), false),
),
single_example(
"Pro Trial",
- onboarding(SignInStatus::SignedIn, true, Some(Plan::ZedProTrial), false),
+ onboarding(SignInStatus::SignedIn, Some(Plan::ZedProTrial), false),
),
single_example(
"Pro Plan",
- onboarding(SignInStatus::SignedIn, true, Some(Plan::ZedPro), false),
+ onboarding(SignInStatus::SignedIn, Some(Plan::ZedPro), false),
),
])
.into_any_element(),
@@ -1,22 +1,19 @@
-use std::{sync::Arc, time::Duration};
+use std::sync::Arc;
use client::{Client, UserStore, zed_urls};
use cloud_llm_client::Plan;
-use gpui::{
- Animation, AnimationExt, AnyElement, App, Entity, IntoElement, RenderOnce, Transformation,
- Window, percentage,
-};
-use ui::{Divider, Vector, VectorName, prelude::*};
+use gpui::{AnyElement, App, Entity, IntoElement, RenderOnce, Window};
+use ui::{CommonAnimationExt, Divider, Vector, VectorName, prelude::*};
use crate::{SignInStatus, YoungAccountBanner, plan_definitions::PlanDefinitions};
#[derive(IntoElement, RegisterComponent)]
pub struct AiUpsellCard {
- pub sign_in_status: SignInStatus,
- pub sign_in: Arc<dyn Fn(&mut Window, &mut App)>,
- pub account_too_young: bool,
- pub user_plan: Option<Plan>,
- pub tab_index: Option<isize>,
+ sign_in_status: SignInStatus,
+ sign_in: Arc<dyn Fn(&mut Window, &mut App)>,
+ account_too_young: bool,
+ user_plan: Option<Plan>,
+ tab_index: Option<isize>,
}
impl AiUpsellCard {
@@ -43,6 +40,11 @@ impl AiUpsellCard {
tab_index: None,
}
}
+
+ pub fn tab_index(mut self, tab_index: Option<isize>) -> Self {
+ self.tab_index = tab_index;
+ self
+ }
}
impl RenderOnce for AiUpsellCard {
@@ -84,10 +86,16 @@ impl RenderOnce for AiUpsellCard {
)
.child(plan_definitions.free_plan());
- let grid_bg = h_flex().absolute().inset_0().w_full().h(px(240.)).child(
- Vector::new(VectorName::Grid, rems_from_px(500.), rems_from_px(240.))
- .color(Color::Custom(cx.theme().colors().border.opacity(0.05))),
- );
+ let grid_bg = h_flex()
+ .absolute()
+ .inset_0()
+ .w_full()
+ .h(px(240.))
+ .bg(gpui::pattern_slash(
+ cx.theme().colors().border.opacity(0.1),
+ 2.,
+ 25.,
+ ));
let gradient_bg = div()
.absolute()
@@ -142,11 +150,7 @@ impl RenderOnce for AiUpsellCard {
rems_from_px(72.),
)
.color(Color::Custom(cx.theme().colors().text_accent.alpha(0.3)))
- .with_animation(
- "loading_stamp",
- Animation::new(Duration::from_secs(10)).repeat(),
- |this, delta| this.transform(Transformation::rotate(percentage(delta))),
- ),
+ .with_rotate_animation(10),
);
let pro_trial_stamp = div()
@@ -1,6 +1,7 @@
use std::sync::Arc;
use client::{Client, UserStore};
+use cloud_llm_client::Plan;
use gpui::{Entity, IntoElement, ParentElement};
use ui::prelude::*;
@@ -35,6 +36,8 @@ impl EditPredictionOnboarding {
impl Render for EditPredictionOnboarding {
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let is_free_plan = self.user_store.read(cx).plan() == Some(Plan::ZedFree);
+
let github_copilot = v_flex()
.gap_1()
.child(Label::new(if self.copilot_is_configured {
@@ -67,7 +70,8 @@ impl Render for EditPredictionOnboarding {
self.continue_with_zed_ai.clone(),
cx,
))
- .child(ui::Divider::horizontal())
- .child(github_copilot)
+ .when(is_free_plan, |this| {
+ this.child(ui::Divider::horizontal()).child(github_copilot)
+ })
}
}
@@ -6,7 +6,7 @@ pub struct YoungAccountBanner;
impl RenderOnce for YoungAccountBanner {
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
- const YOUNG_ACCOUNT_DISCLAIMER: &str = "To prevent abuse of our service, we cannot offer plans to GitHub accounts created fewer than 30 days ago. To request an exception, reach out to billing-support@zed.dev.";
+ const YOUNG_ACCOUNT_DISCLAIMER: &str = "To prevent abuse of our service, GitHub accounts created fewer than 30 days ago are not eligible for free plan usage or Pro plan free trial. To request an exception, reach out to billing-support@zed.dev.";
let label = div()
.w_full()
@@ -363,17 +363,15 @@ pub async fn complete(
api_url: &str,
api_key: &str,
request: Request,
+ beta_headers: String,
) -> Result<Response, AnthropicError> {
let uri = format!("{api_url}/v1/messages");
- let beta_headers = Model::from_id(&request.model)
- .map(|model| model.beta_headers())
- .unwrap_or_else(|_| Model::DEFAULT_BETA_HEADERS.join(","));
let request_builder = HttpRequest::builder()
.method(Method::POST)
.uri(uri)
.header("Anthropic-Version", "2023-06-01")
.header("Anthropic-Beta", beta_headers)
- .header("X-Api-Key", api_key)
+ .header("X-Api-Key", api_key.trim())
.header("Content-Type", "application/json");
let serialized_request =
@@ -409,8 +407,9 @@ pub async fn stream_completion(
api_url: &str,
api_key: &str,
request: Request,
+ beta_headers: String,
) -> Result<BoxStream<'static, Result<Event, AnthropicError>>, AnthropicError> {
- stream_completion_with_rate_limit_info(client, api_url, api_key, request)
+ stream_completion_with_rate_limit_info(client, api_url, api_key, request, beta_headers)
.await
.map(|output| output.0)
}
@@ -506,6 +505,7 @@ pub async fn stream_completion_with_rate_limit_info(
api_url: &str,
api_key: &str,
request: Request,
+ beta_headers: String,
) -> Result<
(
BoxStream<'static, Result<Event, AnthropicError>>,
@@ -518,15 +518,13 @@ pub async fn stream_completion_with_rate_limit_info(
stream: true,
};
let uri = format!("{api_url}/v1/messages");
- let beta_headers = Model::from_id(&request.base.model)
- .map(|model| model.beta_headers())
- .unwrap_or_else(|_| Model::DEFAULT_BETA_HEADERS.join(","));
+
let request_builder = HttpRequest::builder()
.method(Method::POST)
.uri(uri)
.header("Anthropic-Version", "2023-06-01")
.header("Anthropic-Beta", beta_headers)
- .header("X-Api-Key", api_key)
+ .header("X-Api-Key", api_key.trim())
.header("Content-Type", "application/json");
let serialized_request =
serde_json::to_string(&request).map_err(AnthropicError::SerializeRequest)?;
@@ -50,8 +50,9 @@ text.workspace = true
ui.workspace = true
util.workspace = true
uuid.workspace = true
-workspace-hack.workspace = true
workspace.workspace = true
+workspace-hack.workspace = true
+zed_env_vars.workspace = true
[dev-dependencies]
indoc.workspace = true
@@ -590,7 +590,7 @@ impl From<&Message> for MessageMetadata {
impl MessageMetadata {
pub fn is_cache_valid(&self, buffer: &BufferSnapshot, range: &Range<usize>) -> bool {
- let result = match &self.cache {
+ match &self.cache {
Some(MessageCacheMetadata { cached_at, .. }) => !buffer.has_edits_since_in_range(
cached_at,
Range {
@@ -599,8 +599,7 @@ impl MessageMetadata {
},
),
_ => false,
- };
- result
+ }
}
}
@@ -2081,15 +2080,12 @@ impl AssistantContext {
match event {
LanguageModelCompletionEvent::StatusUpdate(status_update) => {
- match status_update {
- CompletionRequestStatus::UsageUpdated { amount, limit } => {
- this.update_model_request_usage(
- amount as u32,
- limit,
- cx,
- );
- }
- _ => {}
+ if let CompletionRequestStatus::UsageUpdated { amount, limit } = status_update {
+ this.update_model_request_usage(
+ amount as u32,
+ limit,
+ cx,
+ );
}
}
LanguageModelCompletionEvent::StartMessage { .. } => {}
@@ -2286,7 +2282,7 @@ impl AssistantContext {
let mut contents = self.contents(cx).peekable();
fn collect_text_content(buffer: &Buffer, range: Range<usize>) -> Option<String> {
- let text: String = buffer.text_for_range(range.clone()).collect();
+ let text: String = buffer.text_for_range(range).collect();
if text.trim().is_empty() {
None
} else {
@@ -764,7 +764,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std
let network = Arc::new(Mutex::new(Network::new(rng.clone())));
let mut contexts = Vec::new();
- let num_peers = rng.gen_range(min_peers..=max_peers);
+ let num_peers = rng.random_range(min_peers..=max_peers);
let context_id = ContextId::new();
let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap());
for i in 0..num_peers {
@@ -806,10 +806,10 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std
|| !network.lock().is_idle()
|| network.lock().contains_disconnected_peers()
{
- let context_index = rng.gen_range(0..contexts.len());
+ let context_index = rng.random_range(0..contexts.len());
let context = &contexts[context_index];
- match rng.gen_range(0..100) {
+ match rng.random_range(0..100) {
0..=29 if mutation_count > 0 => {
log::info!("Context {}: edit buffer", context_index);
context.update(cx, |context, cx| {
@@ -874,10 +874,10 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std
merge_same_roles: true,
})];
- let num_sections = rng.gen_range(0..=3);
+ let num_sections = rng.random_range(0..=3);
let mut section_start = 0;
for _ in 0..num_sections {
- let mut section_end = rng.gen_range(section_start..=output_text.len());
+ let mut section_end = rng.random_range(section_start..=output_text.len());
while !output_text.is_char_boundary(section_end) {
section_end += 1;
}
@@ -924,7 +924,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std
75..=84 if mutation_count > 0 => {
context.update(cx, |context, cx| {
if let Some(message) = context.messages(cx).choose(&mut rng) {
- let new_status = match rng.gen_range(0..3) {
+ let new_status = match rng.random_range(0..3) {
0 => MessageStatus::Done,
1 => MessageStatus::Pending,
_ => MessageStatus::Error(SharedString::from("Random error")),
@@ -971,7 +971,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std
network.lock().broadcast(replica_id, ops_to_send);
context.update(cx, |context, cx| context.apply_ops(ops_to_receive, cx));
- } else if rng.gen_bool(0.1) && replica_id != 0 {
+ } else if rng.random_bool(0.1) && replica_id != 0 {
log::info!("Context {}: disconnecting", context_index);
network.lock().disconnect_peer(replica_id);
} else if network.lock().has_unreceived(replica_id) {
@@ -1321,7 +1321,7 @@ fn test_summarize_error(
fn setup_context_editor_with_fake_model(
cx: &mut TestAppContext,
) -> (Entity<AssistantContext>, Arc<FakeLanguageModel>) {
- let registry = Arc::new(LanguageRegistry::test(cx.executor().clone()));
+ let registry = Arc::new(LanguageRegistry::test(cx.executor()));
let fake_provider = Arc::new(FakeLanguageModelProvider::default());
let fake_model = Arc::new(fake_provider.test_model());
@@ -1376,7 +1376,7 @@ fn messages_cache(
context
.read(cx)
.messages(cx)
- .map(|message| (message.id, message.cache.clone()))
+ .map(|message| (message.id, message.cache))
.collect()
}
@@ -1436,6 +1436,6 @@ impl SlashCommand for FakeSlashCommand {
sections: vec![],
run_commands_in_text: false,
}
- .to_event_stream()))
+ .into_event_stream()))
}
}
@@ -24,6 +24,7 @@ use rpc::AnyProtoClient;
use std::sync::LazyLock;
use std::{cmp::Reverse, ffi::OsStr, mem, path::Path, sync::Arc, time::Duration};
use util::{ResultExt, TryFutureExt};
+use zed_env_vars::ZED_STATELESS;
pub(crate) fn init(client: &AnyProtoClient) {
client.add_entity_message_handler(ContextStore::handle_advertise_contexts);
@@ -788,8 +789,6 @@ impl ContextStore {
fn reload(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
let fs = self.fs.clone();
cx.spawn(async move |this, cx| {
- pub static ZED_STATELESS: LazyLock<bool> =
- LazyLock::new(|| std::env::var("ZED_STATELESS").is_ok_and(|v| !v.is_empty()));
if *ZED_STATELESS {
return Ok(());
}
@@ -862,7 +861,7 @@ impl ContextStore {
ContextServerStatus::Running => {
self.load_context_server_slash_commands(
server_id.clone(),
- context_server_store.clone(),
+ context_server_store,
cx,
);
}
@@ -161,7 +161,7 @@ impl SlashCommandOutput {
}
/// Returns this [`SlashCommandOutput`] as a stream of [`SlashCommandEvent`]s.
- pub fn to_event_stream(mut self) -> BoxStream<'static, Result<SlashCommandEvent>> {
+ pub fn into_event_stream(mut self) -> BoxStream<'static, Result<SlashCommandEvent>> {
self.ensure_valid_section_ranges();
let mut events = Vec::new();
@@ -363,7 +363,7 @@ mod tests {
run_commands_in_text: false,
};
- let events = output.clone().to_event_stream().collect::<Vec<_>>().await;
+ let events = output.clone().into_event_stream().collect::<Vec<_>>().await;
let events = events
.into_iter()
.filter_map(|event| event.ok())
@@ -386,7 +386,7 @@ mod tests {
);
let new_output =
- SlashCommandOutput::from_event_stream(output.clone().to_event_stream())
+ SlashCommandOutput::from_event_stream(output.clone().into_event_stream())
.await
.unwrap();
@@ -415,7 +415,7 @@ mod tests {
run_commands_in_text: false,
};
- let events = output.clone().to_event_stream().collect::<Vec<_>>().await;
+ let events = output.clone().into_event_stream().collect::<Vec<_>>().await;
let events = events
.into_iter()
.filter_map(|event| event.ok())
@@ -452,7 +452,7 @@ mod tests {
);
let new_output =
- SlashCommandOutput::from_event_stream(output.clone().to_event_stream())
+ SlashCommandOutput::from_event_stream(output.clone().into_event_stream())
.await
.unwrap();
@@ -493,7 +493,7 @@ mod tests {
run_commands_in_text: false,
};
- let events = output.clone().to_event_stream().collect::<Vec<_>>().await;
+ let events = output.clone().into_event_stream().collect::<Vec<_>>().await;
let events = events
.into_iter()
.filter_map(|event| event.ok())
@@ -562,7 +562,7 @@ mod tests {
);
let new_output =
- SlashCommandOutput::from_event_stream(output.clone().to_event_stream())
+ SlashCommandOutput::from_event_stream(output.clone().into_event_stream())
.await
.unwrap();
@@ -166,7 +166,7 @@ impl SlashCommand for ExtensionSlashCommand {
.collect(),
run_commands_in_text: false,
}
- .to_event_stream())
+ .into_event_stream())
})
}
}
@@ -150,7 +150,7 @@ impl SlashCommand for CargoWorkspaceSlashCommand {
}],
run_commands_in_text: false,
}
- .to_event_stream())
+ .into_event_stream())
})
});
output.unwrap_or_else(|error| Task::ready(Err(error)))
@@ -191,7 +191,7 @@ impl SlashCommand for ContextServerSlashCommand {
text: prompt,
run_commands_in_text: false,
}
- .to_event_stream())
+ .into_event_stream())
})
} else {
Task::ready(Err(anyhow!("Context server not found")))
@@ -85,7 +85,7 @@ impl SlashCommand for DefaultSlashCommand {
text,
run_commands_in_text: true,
}
- .to_event_stream())
+ .into_event_stream())
})
}
}
@@ -118,7 +118,7 @@ impl SlashCommand for DeltaSlashCommand {
}
anyhow::ensure!(changes_detected, "no new changes detected");
- Ok(output.to_event_stream())
+ Ok(output.into_event_stream())
})
}
}
@@ -44,7 +44,7 @@ impl DiagnosticsSlashCommand {
score: 0.,
positions: Vec::new(),
worktree_id: entry.worktree_id.to_usize(),
- path: entry.path.clone(),
+ path: entry.path,
path_prefix: path_prefix.clone(),
is_dir: false, // Diagnostics can't be produced for directories
distance_to_relative_ancestor: 0,
@@ -189,7 +189,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
window.spawn(cx, async move |_| {
task.await?
- .map(|output| output.to_event_stream())
+ .map(|output| output.into_event_stream())
.context("No diagnostics found")
})
}
@@ -177,7 +177,7 @@ impl SlashCommand for FetchSlashCommand {
}],
run_commands_in_text: false,
}
- .to_event_stream())
+ .into_event_stream())
})
}
}
@@ -223,7 +223,7 @@ fn collect_files(
cx: &mut App,
) -> impl Stream<Item = Result<SlashCommandEvent>> + use<> {
let Ok(matchers) = glob_inputs
- .into_iter()
+ .iter()
.map(|glob_input| {
custom_path_matcher::PathMatcher::new(&[glob_input.to_owned()])
.with_context(|| format!("invalid path {glob_input}"))
@@ -371,7 +371,7 @@ fn collect_files(
&mut output,
)
.log_err();
- let mut buffer_events = output.to_event_stream();
+ let mut buffer_events = output.into_event_stream();
while let Some(event) = buffer_events.next().await {
events_tx.unbounded_send(event)?;
}
@@ -379,7 +379,7 @@ fn collect_files(
}
}
- while let Some(_) = directory_stack.pop() {
+ while directory_stack.pop().is_some() {
events_tx.unbounded_send(Ok(SlashCommandEvent::EndSection))?;
}
}
@@ -491,8 +491,8 @@ mod custom_path_matcher {
impl PathMatcher {
pub fn new(globs: &[String]) -> Result<Self, globset::Error> {
let globs = globs
- .into_iter()
- .map(|glob| Glob::new(&SanitizedPath::from(glob).to_glob_string()))
+ .iter()
+ .map(|glob| Glob::new(&SanitizedPath::new(glob).to_glob_string()))
.collect::<Result<Vec<_>, _>>()?;
let sources = globs.iter().map(|glob| glob.glob().to_owned()).collect();
let sources_with_trailing_slash = globs
@@ -66,6 +66,6 @@ impl SlashCommand for NowSlashCommand {
}],
run_commands_in_text: false,
}
- .to_event_stream()))
+ .into_event_stream()))
}
}
@@ -80,7 +80,7 @@ impl SlashCommand for PromptSlashCommand {
};
let store = PromptStore::global(cx);
- let title = SharedString::from(title.clone());
+ let title = SharedString::from(title);
let prompt = cx.spawn({
let title = title.clone();
async move |cx| {
@@ -117,7 +117,7 @@ impl SlashCommand for PromptSlashCommand {
}],
run_commands_in_text: true,
}
- .to_event_stream())
+ .into_event_stream())
})
}
}
@@ -92,7 +92,7 @@ impl SlashCommand for OutlineSlashCommand {
text: outline_text,
run_commands_in_text: false,
}
- .to_event_stream())
+ .into_event_stream())
})
});
@@ -157,7 +157,7 @@ impl SlashCommand for TabSlashCommand {
for (full_path, buffer, _) in tab_items_search.await? {
append_buffer_to_output(&buffer, full_path.as_deref(), &mut output).log_err();
}
- Ok(output.to_event_stream())
+ Ok(output.into_event_stream())
})
}
}
@@ -156,13 +156,13 @@ fn resolve_context_server_tool_name_conflicts(
if duplicated_tool_names.is_empty() {
return context_server_tools
- .into_iter()
+ .iter()
.map(|tool| (resolve_tool_name(tool).into(), tool.clone()))
.collect();
}
context_server_tools
- .into_iter()
+ .iter()
.filter_map(|tool| {
let mut tool_name = resolve_tool_name(tool);
if !duplicated_tool_names.contains(&tool_name) {
@@ -72,11 +72,10 @@ pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
register_web_search_tool(&LanguageModelRegistry::global(cx), cx);
cx.subscribe(
&LanguageModelRegistry::global(cx),
- move |registry, event, cx| match event {
- language_model::Event::DefaultModelChanged => {
+ move |registry, event, cx| {
+ if let language_model::Event::DefaultModelChanged = event {
register_web_search_tool(®istry, cx);
}
- _ => {}
},
)
.detach();
@@ -35,7 +35,7 @@ impl Tool for DeletePathTool {
}
fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity<Project>, _: &App) -> bool {
- false
+ true
}
fn may_perform_edits(&self) -> bool {
@@ -1315,17 +1315,17 @@ mod tests {
#[gpui::test(iterations = 100)]
async fn test_random_indents(mut rng: StdRng) {
- let len = rng.gen_range(1..=100);
+ let len = rng.random_range(1..=100);
let new_text = util::RandomCharIter::new(&mut rng)
.with_simple_text()
.take(len)
.collect::<String>();
let new_text = new_text
.split('\n')
- .map(|line| format!("{}{}", " ".repeat(rng.gen_range(0..=8)), line))
+ .map(|line| format!("{}{}", " ".repeat(rng.random_range(0..=8)), line))
.collect::<Vec<_>>()
.join("\n");
- let delta = IndentDelta::Spaces(rng.gen_range(-4..=4));
+ let delta = IndentDelta::Spaces(rng.random_range(-4i8..=4i8) as isize);
let chunks = to_random_chunks(&mut rng, &new_text);
let new_text_chunks = stream::iter(chunks.iter().enumerate().map(|(index, chunk)| {
@@ -1357,7 +1357,7 @@ mod tests {
}
fn to_random_chunks(rng: &mut StdRng, input: &str) -> Vec<String> {
- let chunk_count = rng.gen_range(1..=cmp::min(input.len(), 50));
+ let chunk_count = rng.random_range(1..=cmp::min(input.len(), 50));
let mut chunk_indices = (0..input.len()).choose_multiple(rng, chunk_count);
chunk_indices.sort();
chunk_indices.push(input.len());
@@ -204,7 +204,7 @@ mod tests {
}
fn parse_random_chunks(input: &str, parser: &mut CreateFileParser, rng: &mut StdRng) -> String {
- let chunk_count = rng.gen_range(1..=cmp::min(input.len(), 50));
+ let chunk_count = rng.random_range(1..=cmp::min(input.len(), 50));
let mut chunk_indices = (0..input.len()).choose_multiple(rng, chunk_count);
chunk_indices.sort();
chunk_indices.push(input.len());
@@ -996,7 +996,7 @@ mod tests {
}
fn parse_random_chunks(input: &str, parser: &mut EditParser, rng: &mut StdRng) -> Vec<Edit> {
- let chunk_count = rng.gen_range(1..=cmp::min(input.len(), 50));
+ let chunk_count = rng.random_range(1..=cmp::min(input.len(), 50));
let mut chunk_indices = (0..input.len()).choose_multiple(rng, chunk_count);
chunk_indices.sort();
chunk_indices.push(input.len());
@@ -1153,8 +1153,7 @@ impl EvalInput {
.expect("Conversation must end with an edit_file tool use")
.clone();
- let edit_file_input: EditFileToolInput =
- serde_json::from_value(tool_use.input.clone()).unwrap();
+ let edit_file_input: EditFileToolInput = serde_json::from_value(tool_use.input).unwrap();
EvalInput {
conversation,
@@ -1400,7 +1399,7 @@ fn eval(
}
fn run_eval(eval: EvalInput, tx: mpsc::Sender<Result<EvalOutput>>) {
- let dispatcher = gpui::TestDispatcher::new(StdRng::from_entropy());
+ let dispatcher = gpui::TestDispatcher::new(StdRng::from_os_rng());
let mut cx = TestAppContext::build(dispatcher, None);
let output = cx.executor().block_test(async {
let test = EditAgentTest::new(&mut cx).await;
@@ -1460,7 +1459,7 @@ impl EditAgentTest {
async fn new(cx: &mut TestAppContext) -> Self {
cx.executor().allow_parking();
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
cx.update(|cx| {
settings::init(cx);
gpui_tokio::init(cx);
@@ -1475,7 +1474,7 @@ impl EditAgentTest {
Project::init_settings(cx);
language::init(cx);
language_model::init(client.clone(), cx);
- language_models::init(user_store.clone(), client.clone(), cx);
+ language_models::init(user_store, client.clone(), cx);
crate::init(client.http_client(), cx);
});
@@ -1708,7 +1707,7 @@ async fn retry_on_rate_limit<R>(mut request: impl AsyncFnMut() -> Result<R>) ->
};
if let Some(retry_after) = retry_delay {
- let jitter = retry_after.mul_f64(rand::thread_rng().gen_range(0.0..1.0));
+ let jitter = retry_after.mul_f64(rand::rng().random_range(0.0..1.0));
eprintln!("Attempt #{attempt}: Retry after {retry_after:?} + jitter of {jitter:?}");
Timer::after(retry_after + jitter).await;
} else {
@@ -319,7 +319,7 @@ mod tests {
);
let snapshot = buffer.snapshot();
- let mut finder = StreamingFuzzyMatcher::new(snapshot.clone());
+ let mut finder = StreamingFuzzyMatcher::new(snapshot);
assert_eq!(push(&mut finder, ""), None);
assert_eq!(finish(finder), None);
}
@@ -333,7 +333,7 @@ mod tests {
);
let snapshot = buffer.snapshot();
- let mut finder = StreamingFuzzyMatcher::new(snapshot.clone());
+ let mut finder = StreamingFuzzyMatcher::new(snapshot);
// Push partial query
assert_eq!(push(&mut finder, "This"), None);
@@ -365,7 +365,7 @@ mod tests {
);
let snapshot = buffer.snapshot();
- let mut finder = StreamingFuzzyMatcher::new(snapshot.clone());
+ let mut finder = StreamingFuzzyMatcher::new(snapshot);
// Push a fuzzy query that should match the first function
assert_eq!(
@@ -391,7 +391,7 @@ mod tests {
);
let snapshot = buffer.snapshot();
- let mut finder = StreamingFuzzyMatcher::new(snapshot.clone());
+ let mut finder = StreamingFuzzyMatcher::new(snapshot);
// No match initially
assert_eq!(push(&mut finder, "Lin"), None);
@@ -420,7 +420,7 @@ mod tests {
);
let snapshot = buffer.snapshot();
- let mut finder = StreamingFuzzyMatcher::new(snapshot.clone());
+ let mut finder = StreamingFuzzyMatcher::new(snapshot);
// Push text in small chunks across line boundaries
assert_eq!(push(&mut finder, "jumps "), None); // No newline yet
@@ -458,7 +458,7 @@ mod tests {
);
let snapshot = buffer.snapshot();
- let mut finder = StreamingFuzzyMatcher::new(snapshot.clone());
+ let mut finder = StreamingFuzzyMatcher::new(snapshot);
assert_eq!(
push(&mut finder, "impl Debug for User {\n"),
@@ -711,7 +711,7 @@ mod tests {
"Expected to match `second_function` based on the line hint"
);
- let mut matcher = StreamingFuzzyMatcher::new(snapshot.clone());
+ let mut matcher = StreamingFuzzyMatcher::new(snapshot);
matcher.push(query, None);
matcher.finish();
let best_match = matcher.select_best_match();
@@ -727,7 +727,7 @@ mod tests {
let buffer = TextBuffer::new(0, BufferId::new(1).unwrap(), text.clone());
let snapshot = buffer.snapshot();
- let mut matcher = StreamingFuzzyMatcher::new(snapshot.clone());
+ let mut matcher = StreamingFuzzyMatcher::new(snapshot);
// Split query into random chunks
let chunks = to_random_chunks(rng, query);
@@ -771,7 +771,7 @@ mod tests {
}
fn to_random_chunks(rng: &mut StdRng, input: &str) -> Vec<String> {
- let chunk_count = rng.gen_range(1..=cmp::min(input.len(), 50));
+ let chunk_count = rng.random_range(1..=cmp::min(input.len(), 50));
let mut chunk_indices = (0..input.len()).choose_multiple(rng, chunk_count);
chunk_indices.sort();
chunk_indices.push(input.len());
@@ -794,10 +794,8 @@ mod tests {
fn finish(mut finder: StreamingFuzzyMatcher) -> Option<String> {
let snapshot = finder.snapshot.clone();
let matches = finder.finish();
- if let Some(range) = matches.first() {
- Some(snapshot.text_for_range(range.clone()).collect::<String>())
- } else {
- None
- }
+ matches
+ .first()
+ .map(|range| snapshot.text_for_range(range.clone()).collect::<String>())
}
}
@@ -11,11 +11,13 @@ use assistant_tool::{
AnyToolCard, Tool, ToolCard, ToolResult, ToolResultContent, ToolResultOutput, ToolUseStatus,
};
use buffer_diff::{BufferDiff, BufferDiffSnapshot};
-use editor::{Editor, EditorMode, MinimapVisibility, MultiBuffer, PathKey};
+use editor::{
+ Editor, EditorMode, MinimapVisibility, MultiBuffer, PathKey, multibuffer_context_lines,
+};
use futures::StreamExt;
use gpui::{
Animation, AnimationExt, AnyWindowHandle, App, AppContext, AsyncApp, Entity, Task,
- TextStyleRefinement, Transformation, WeakEntity, percentage, pulsating_between, px,
+ TextStyleRefinement, WeakEntity, pulsating_between, px,
};
use indoc::formatdoc;
use language::{
@@ -42,7 +44,7 @@ use std::{
time::Duration,
};
use theme::ThemeSettings;
-use ui::{Disclosure, Tooltip, prelude::*};
+use ui::{CommonAnimationExt, Disclosure, Tooltip, prelude::*};
use util::ResultExt;
use workspace::Workspace;
@@ -376,7 +378,7 @@ impl Tool for EditFileTool {
let output = EditFileToolOutput {
original_path: project_path.path.to_path_buf(),
- new_text: new_text.clone(),
+ new_text,
old_text,
raw_output: Some(agent_output),
};
@@ -474,7 +476,7 @@ impl Tool for EditFileTool {
PathKey::for_buffer(&buffer, cx),
buffer,
diff_hunk_ranges,
- editor::DEFAULT_MULTIBUFFER_CONTEXT,
+ multibuffer_context_lines(cx),
cx,
);
multibuffer.add_diff(buffer_diff, cx);
@@ -643,7 +645,7 @@ impl EditFileToolCard {
diff
});
- self.buffer = Some(buffer.clone());
+ self.buffer = Some(buffer);
self.base_text = Some(base_text.into());
self.buffer_diff = Some(buffer_diff.clone());
@@ -703,7 +705,7 @@ impl EditFileToolCard {
PathKey::for_buffer(buffer, cx),
buffer.clone(),
ranges,
- editor::DEFAULT_MULTIBUFFER_CONTEXT,
+ multibuffer_context_lines(cx),
cx,
);
let end = multibuffer.len(cx);
@@ -776,7 +778,6 @@ impl EditFileToolCard {
let buffer_diff = cx.spawn({
let buffer = buffer.clone();
- let language_registry = language_registry.clone();
async move |_this, cx| {
build_buffer_diff(base_text, &buffer, &language_registry, cx).await
}
@@ -792,7 +793,7 @@ impl EditFileToolCard {
path_key,
buffer,
ranges,
- editor::DEFAULT_MULTIBUFFER_CONTEXT,
+ multibuffer_context_lines(cx),
cx,
);
multibuffer.add_diff(buffer_diff.clone(), cx);
@@ -863,7 +864,6 @@ impl ToolCard for EditFileToolCard {
)
.on_click({
let path = self.path.clone();
- let workspace = workspace.clone();
move |_, window, cx| {
workspace
.update(cx, {
@@ -939,11 +939,7 @@ impl ToolCard for EditFileToolCard {
Icon::new(IconName::ArrowCircle)
.size(IconSize::XSmall)
.color(Color::Info)
- .with_animation(
- "arrow-circle",
- Animation::new(Duration::from_secs(2)).repeat(),
- |icon, delta| icon.transform(Transformation::rotate(percentage(delta))),
- ),
+ .with_rotate_animation(2),
)
})
.when_some(error_message, |header, error_message| {
@@ -1356,8 +1352,7 @@ mod tests {
mode: mode.clone(),
};
- let result = cx.update(|cx| resolve_path(&input, project, cx));
- result
+ cx.update(|cx| resolve_path(&input, project, cx))
}
fn assert_resolved_path_eq(path: anyhow::Result<ProjectPath>, expected: &str) {
@@ -118,7 +118,7 @@ impl Tool for FetchTool {
}
fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity<Project>, _: &App) -> bool {
- false
+ true
}
fn may_perform_edits(&self) -> bool {
@@ -234,7 +234,7 @@ impl ToolCard for FindPathToolCard {
workspace: WeakEntity<Workspace>,
cx: &mut Context<Self>,
) -> impl IntoElement {
- let matches_label: SharedString = if self.paths.len() == 0 {
+ let matches_label: SharedString = if self.paths.is_empty() {
"No matches".into()
} else if self.paths.len() == 1 {
"1 match".into()
@@ -435,8 +435,8 @@ mod test {
assert_eq!(
matches,
&[
- PathBuf::from("root/apple/banana/carrot"),
- PathBuf::from("root/apple/bandana/carbonara")
+ PathBuf::from(path!("root/apple/banana/carrot")),
+ PathBuf::from(path!("root/apple/bandana/carbonara"))
]
);
@@ -447,8 +447,8 @@ mod test {
assert_eq!(
matches,
&[
- PathBuf::from("root/apple/banana/carrot"),
- PathBuf::from("root/apple/bandana/carbonara")
+ PathBuf::from(path!("root/apple/banana/carrot")),
+ PathBuf::from(path!("root/apple/bandana/carbonara"))
]
);
}
@@ -327,7 +327,7 @@ mod tests {
init_test(cx);
cx.executor().allow_parking();
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/root"),
serde_json::json!({
@@ -415,7 +415,7 @@ mod tests {
init_test(cx);
cx.executor().allow_parking();
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/root"),
serde_json::json!({
@@ -494,7 +494,7 @@ mod tests {
init_test(cx);
cx.executor().allow_parking();
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
// Create test file with syntax structures
fs.insert_tree(
@@ -68,7 +68,7 @@ impl Tool for ReadFileTool {
}
fn icon(&self) -> IconName {
- IconName::ToolRead
+ IconName::ToolSearch
}
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value> {
@@ -8,14 +8,14 @@ use anyhow::{Context as _, Result, anyhow};
use assistant_tool::{Tool, ToolCard, ToolResult, ToolUseStatus};
use futures::{FutureExt as _, future::Shared};
use gpui::{
- Animation, AnimationExt, AnyWindowHandle, App, AppContext, Empty, Entity, EntityId, Task,
- TextStyleRefinement, Transformation, WeakEntity, Window, percentage,
+ AnyWindowHandle, App, AppContext, Empty, Entity, EntityId, Task, TextStyleRefinement,
+ WeakEntity, Window,
};
use language::LineEnding;
use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat};
use markdown::{Markdown, MarkdownElement, MarkdownStyle};
use portable_pty::{CommandBuilder, PtySize, native_pty_system};
-use project::{Project, terminals::TerminalKind};
+use project::Project;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::Settings;
@@ -28,7 +28,7 @@ use std::{
};
use terminal_view::TerminalView;
use theme::ThemeSettings;
-use ui::{Disclosure, Tooltip, prelude::*};
+use ui::{CommonAnimationExt, Disclosure, Tooltip, prelude::*};
use util::{
ResultExt, get_system_shell, markdown::MarkdownInlineCode, size::format_file_size,
time::duration_alt_display,
@@ -59,12 +59,9 @@ impl TerminalTool {
}
if which::which("bash").is_ok() {
- log::info!("agent selected bash for terminal tool");
"bash".into()
} else {
- let shell = get_system_shell();
- log::info!("agent selected {shell} for terminal tool");
- shell
+ get_system_shell()
}
});
Self {
@@ -216,21 +213,20 @@ impl Tool for TerminalTool {
async move |cx| {
let program = program.await;
let env = env.await;
- let terminal = project
+ project
.update(cx, |project, cx| {
- project.create_terminal(
- TerminalKind::Task(task::SpawnInTerminal {
+ project.create_terminal_task(
+ task::SpawnInTerminal {
command: Some(program),
args,
cwd,
env,
..Default::default()
- }),
+ },
cx,
)
})?
- .await;
- terminal
+ .await
}
});
@@ -353,7 +349,7 @@ fn process_content(
if is_empty {
"Command executed successfully.".to_string()
} else {
- content.to_string()
+ content
}
}
Some(exit_status) => {
@@ -526,11 +522,7 @@ impl ToolCard for TerminalToolCard {
Icon::new(IconName::ArrowCircle)
.size(IconSize::XSmall)
.color(Color::Info)
- .with_animation(
- "arrow-circle",
- Animation::new(Duration::from_secs(2)).repeat(),
- |icon, delta| icon.transform(Transformation::rotate(percentage(delta))),
- ),
+ .with_rotate_animation(2),
)
})
.when(tool_failed || command_failed, |header| {
@@ -101,14 +101,11 @@ impl RenderOnce for ToolCallCardHeader {
})
.when_some(secondary_text, |this, secondary_text| {
this.child(bullet_divider())
- .child(div().text_size(font_size).child(secondary_text.clone()))
+ .child(div().text_size(font_size).child(secondary_text))
})
.when_some(code_path, |this, code_path| {
- this.child(bullet_divider()).child(
- Label::new(code_path.clone())
- .size(LabelSize::Small)
- .inline_code(cx),
- )
+ this.child(bullet_divider())
+ .child(Label::new(code_path).size(LabelSize::Small).inline_code(cx))
})
.with_animation(
"loading-label",
@@ -193,10 +193,7 @@ impl ToolCard for WebSearchToolCard {
)
}
})
- .on_click({
- let url = url.clone();
- move |_, _, cx| cx.open_url(&url)
- })
+ .on_click(move |_, _, cx| cx.open_url(&url))
}))
.into_any(),
),
@@ -15,9 +15,10 @@ doctest = false
[dependencies]
anyhow.workspace = true
collections.workspace = true
-derive_more.workspace = true
gpui.workspace = true
-parking_lot.workspace = true
+settings.workspace = true
+schemars.workspace = true
+serde.workspace = true
rodio = { workspace = true, features = [ "wav", "playback", "tracing" ] }
util.workspace = true
workspace-hack.workspace = true
@@ -1,54 +0,0 @@
-use std::{io::Cursor, sync::Arc};
-
-use anyhow::{Context as _, Result};
-use collections::HashMap;
-use gpui::{App, AssetSource, Global};
-use rodio::{Decoder, Source, source::Buffered};
-
-type Sound = Buffered<Decoder<Cursor<Vec<u8>>>>;
-
-pub struct SoundRegistry {
- cache: Arc<parking_lot::Mutex<HashMap<String, Sound>>>,
- assets: Box<dyn AssetSource>,
-}
-
-struct GlobalSoundRegistry(Arc<SoundRegistry>);
-
-impl Global for GlobalSoundRegistry {}
-
-impl SoundRegistry {
- pub fn new(source: impl AssetSource) -> Arc<Self> {
- Arc::new(Self {
- cache: Default::default(),
- assets: Box::new(source),
- })
- }
-
- pub fn global(cx: &App) -> Arc<Self> {
- cx.global::<GlobalSoundRegistry>().0.clone()
- }
-
- pub(crate) fn set_global(source: impl AssetSource, cx: &mut App) {
- cx.set_global(GlobalSoundRegistry(SoundRegistry::new(source)));
- }
-
- pub fn get(&self, name: &str) -> Result<impl Source<Item = f32> + use<>> {
- if let Some(wav) = self.cache.lock().get(name) {
- return Ok(wav.clone());
- }
-
- let path = format!("sounds/{}.wav", name);
- let bytes = self
- .assets
- .load(&path)?
- .map(anyhow::Ok)
- .with_context(|| format!("No asset available for path {path}"))??
- .into_owned();
- let cursor = Cursor::new(bytes);
- let source = Decoder::new(cursor)?.buffered();
-
- self.cache.lock().insert(name.to_string(), source.clone());
-
- Ok(source)
- }
-}
@@ -1,16 +1,19 @@
-use assets::SoundRegistry;
-use derive_more::{Deref, DerefMut};
-use gpui::{App, AssetSource, BorrowAppContext, Global};
-use rodio::{OutputStream, OutputStreamBuilder};
+use anyhow::{Context as _, Result, anyhow};
+use collections::HashMap;
+use gpui::{App, BorrowAppContext, Global};
+use rodio::{Decoder, OutputStream, OutputStreamBuilder, Source, source::Buffered};
+use settings::Settings;
+use std::io::Cursor;
use util::ResultExt;
-mod assets;
+mod audio_settings;
+pub use audio_settings::AudioSettings;
-pub fn init(source: impl AssetSource, cx: &mut App) {
- SoundRegistry::set_global(source, cx);
- cx.set_global(GlobalAudio(Audio::new()));
+pub fn init(cx: &mut App) {
+ AudioSettings::register(cx);
}
+#[derive(Copy, Clone, Eq, Hash, PartialEq)]
pub enum Sound {
Joined,
Leave,
@@ -38,18 +41,12 @@ impl Sound {
#[derive(Default)]
pub struct Audio {
output_handle: Option<OutputStream>,
+ source_cache: HashMap<Sound, Buffered<Decoder<Cursor<Vec<u8>>>>>,
}
-#[derive(Deref, DerefMut)]
-struct GlobalAudio(Audio);
-
-impl Global for GlobalAudio {}
+impl Global for Audio {}
impl Audio {
- pub fn new() -> Self {
- Self::default()
- }
-
fn ensure_output_exists(&mut self) -> Option<&OutputStream> {
if self.output_handle.is_none() {
self.output_handle = OutputStreamBuilder::open_default_stream().log_err();
@@ -58,26 +55,51 @@ impl Audio {
self.output_handle.as_ref()
}
- pub fn play_sound(sound: Sound, cx: &mut App) {
- if !cx.has_global::<GlobalAudio>() {
- return;
- }
+ pub fn play_source(
+ source: impl rodio::Source + Send + 'static,
+ cx: &mut App,
+ ) -> anyhow::Result<()> {
+ cx.update_default_global(|this: &mut Self, _cx| {
+ let output_handle = this
+ .ensure_output_exists()
+ .ok_or_else(|| anyhow!("Could not open audio output"))?;
+ output_handle.mixer().add(source);
+ Ok(())
+ })
+ }
- cx.update_global::<GlobalAudio, _>(|this, cx| {
+ pub fn play_sound(sound: Sound, cx: &mut App) {
+ cx.update_default_global(|this: &mut Self, cx| {
+ let source = this.sound_source(sound, cx).log_err()?;
let output_handle = this.ensure_output_exists()?;
- let source = SoundRegistry::global(cx).get(sound.file()).log_err()?;
output_handle.mixer().add(source);
Some(())
});
}
pub fn end_call(cx: &mut App) {
- if !cx.has_global::<GlobalAudio>() {
- return;
- }
-
- cx.update_global::<GlobalAudio, _>(|this, _| {
+ cx.update_default_global(|this: &mut Self, _cx| {
this.output_handle.take();
});
}
+
+ fn sound_source(&mut self, sound: Sound, cx: &App) -> Result<impl Source + use<>> {
+ if let Some(wav) = self.source_cache.get(&sound) {
+ return Ok(wav.clone());
+ }
+
+ let path = format!("sounds/{}.wav", sound.file());
+ let bytes = cx
+ .asset_source()
+ .load(&path)?
+ .map(anyhow::Ok)
+ .with_context(|| format!("No asset available for path {path}"))??
+ .into_owned();
+ let cursor = Cursor::new(bytes);
+ let source = Decoder::new(cursor)?.buffered();
+
+ self.source_cache.insert(sound, source.clone());
+
+ Ok(source)
+ }
}
@@ -0,0 +1,32 @@
+use anyhow::Result;
+use gpui::App;
+use schemars::JsonSchema;
+use serde::{Deserialize, Serialize};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
+
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi)]
+pub struct AudioSettings {
+ /// Opt into the new audio system.
+ #[serde(rename = "experimental.rodio_audio", default)]
+ pub rodio_audio: bool, // default is false
+}
+
+/// Configuration of audio in Zed.
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)]
+#[serde(default)]
+#[settings_key(key = "audio")]
+pub struct AudioSettingsContent {
+ /// Whether to use the experimental audio system
+ #[serde(rename = "experimental.rodio_audio", default)]
+ pub rodio_audio: bool,
+}
+
+impl Settings for AudioSettings {
+ type FileContent = AudioSettingsContent;
+
+ fn load(sources: SettingsSources<Self::FileContent>, _cx: &mut App) -> Result<Self> {
+ sources.json_merge()
+ }
+
+ fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut Self::FileContent) {}
+}
@@ -10,7 +10,7 @@ use paths::remote_servers_dir;
use release_channel::{AppCommitSha, ReleaseChannel};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources, SettingsStore};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsStore, SettingsUi};
use smol::{fs, io::AsyncReadExt};
use smol::{fs::File, process::Command};
use std::{
@@ -118,14 +118,13 @@ struct AutoUpdateSetting(bool);
/// Whether or not to automatically check for updates.
///
/// Default: true
-#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize)]
+#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize, SettingsUi, SettingsKey)]
#[serde(transparent)]
+#[settings_key(key = "auto_update")]
struct AutoUpdateSettingContent(bool);
impl Settings for AutoUpdateSetting {
- const KEY: Option<&'static str> = Some("auto_update");
-
- type FileContent = Option<AutoUpdateSettingContent>;
+ type FileContent = AutoUpdateSettingContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
let auto_update = [
@@ -135,17 +134,19 @@ impl Settings for AutoUpdateSetting {
sources.user,
]
.into_iter()
- .find_map(|value| value.copied().flatten())
- .unwrap_or(sources.default.ok_or_else(Self::missing_default)?);
+ .find_map(|value| value.copied())
+ .unwrap_or(*sources.default);
Ok(Self(auto_update.0))
}
fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut Self::FileContent) {
- vscode.enum_setting("update.mode", current, |s| match s {
+ let mut cur = &mut Some(*current);
+ vscode.enum_setting("update.mode", &mut cur, |s| match s {
"none" | "manual" => Some(AutoUpdateSettingContent(false)),
_ => Some(AutoUpdateSettingContent(true)),
});
+ *current = cur.unwrap();
}
}
@@ -128,23 +128,20 @@ mod windows_impl {
#[test]
fn test_parse_args() {
// launch can be specified via two separate arguments
- assert_eq!(parse_args(["--launch".into(), "true".into()]).launch, true);
- assert_eq!(
- parse_args(["--launch".into(), "false".into()]).launch,
- false
- );
+ assert!(parse_args(["--launch".into(), "true".into()]).launch);
+ assert!(!parse_args(["--launch".into(), "false".into()]).launch);
// launch can be specified via one single argument
- assert_eq!(parse_args(["--launch=true".into()]).launch, true);
- assert_eq!(parse_args(["--launch=false".into()]).launch, false);
+ assert!(parse_args(["--launch=true".into()]).launch);
+ assert!(!parse_args(["--launch=false".into()]).launch);
// launch defaults to true on no arguments
- assert_eq!(parse_args([]).launch, true);
+ assert!(parse_args([]).launch);
// launch defaults to true on invalid arguments
- assert_eq!(parse_args(["--launch".into()]).launch, true);
- assert_eq!(parse_args(["--launch=".into()]).launch, true);
- assert_eq!(parse_args(["--launch=invalid".into()]).launch, true);
+ assert!(parse_args(["--launch".into()]).launch);
+ assert!(parse_args(["--launch=".into()]).launch);
+ assert!(parse_args(["--launch=invalid".into()]).launch);
}
}
}
@@ -16,7 +16,7 @@ use crate::windows_impl::WM_JOB_UPDATED;
type Job = fn(&Path) -> Result<()>;
#[cfg(not(test))]
-pub(crate) const JOBS: [Job; 6] = [
+pub(crate) const JOBS: &[Job] = &[
// Delete old files
|app_dir| {
let zed_executable = app_dir.join("Zed.exe");
@@ -32,6 +32,12 @@ pub(crate) const JOBS: [Job; 6] = [
std::fs::remove_file(&zed_cli)
.context(format!("Failed to remove old file {}", zed_cli.display()))
},
+ |app_dir| {
+ let zed_wsl = app_dir.join("bin\\zed");
+ log::info!("Removing old file: {}", zed_wsl.display());
+ std::fs::remove_file(&zed_wsl)
+ .context(format!("Failed to remove old file {}", zed_wsl.display()))
+ },
// Copy new files
|app_dir| {
let zed_executable_source = app_dir.join("install\\Zed.exe");
@@ -65,6 +71,22 @@ pub(crate) const JOBS: [Job; 6] = [
zed_cli_dest.display()
))
},
+ |app_dir| {
+ let zed_wsl_source = app_dir.join("install\\bin\\zed");
+ let zed_wsl_dest = app_dir.join("bin\\zed");
+ log::info!(
+ "Copying new file {} to {}",
+ zed_wsl_source.display(),
+ zed_wsl_dest.display()
+ );
+ std::fs::copy(&zed_wsl_source, &zed_wsl_dest)
+ .map(|_| ())
+ .context(format!(
+ "Failed to copy new file {} to {}",
+ zed_wsl_source.display(),
+ zed_wsl_dest.display()
+ ))
+ },
// Clean up installer folder and updates folder
|app_dir| {
let updates_folder = app_dir.join("updates");
@@ -85,16 +107,12 @@ pub(crate) const JOBS: [Job; 6] = [
];
#[cfg(test)]
-pub(crate) const JOBS: [Job; 2] = [
+pub(crate) const JOBS: &[Job] = &[
|_| {
std::thread::sleep(Duration::from_millis(1000));
if let Ok(config) = std::env::var("ZED_AUTO_UPDATE") {
match config.as_str() {
- "err" => Err(std::io::Error::new(
- std::io::ErrorKind::Other,
- "Simulated error",
- ))
- .context("Anyhow!"),
+ "err" => Err(std::io::Error::other("Simulated error")).context("Anyhow!"),
_ => panic!("Unknown ZED_AUTO_UPDATE value: {}", config),
}
} else {
@@ -105,11 +123,7 @@ pub(crate) const JOBS: [Job; 2] = [
std::thread::sleep(Duration::from_millis(1000));
if let Ok(config) = std::env::var("ZED_AUTO_UPDATE") {
match config.as_str() {
- "err" => Err(std::io::Error::new(
- std::io::ErrorKind::Other,
- "Simulated error",
- ))
- .context("Anyhow!"),
+ "err" => Err(std::io::Error::other("Simulated error")).context("Anyhow!"),
_ => panic!("Unknown ZED_AUTO_UPDATE value: {}", config),
}
} else {
@@ -88,10 +88,7 @@ fn view_release_notes_locally(
.update_in(cx, |workspace, window, cx| {
let project = workspace.project().clone();
let buffer = project.update(cx, |project, cx| {
- let buffer = project.create_local_buffer("", markdown, cx);
- project
- .mark_buffer_as_non_searchable(buffer.read(cx).remote_id(), cx);
- buffer
+ project.create_local_buffer("", markdown, false, cx)
});
buffer.update(cx, |buffer, cx| {
buffer.edit([(0..0, body.release_notes)], None, cx)
@@ -114,7 +111,7 @@ fn view_release_notes_locally(
cx,
);
workspace.add_item_to_active_pane(
- Box::new(markdown_preview.clone()),
+ Box::new(markdown_preview),
None,
true,
window,
@@ -3,6 +3,7 @@ mod models;
use anyhow::{Context, Error, Result, anyhow};
use aws_sdk_bedrockruntime as bedrock;
pub use aws_sdk_bedrockruntime as bedrock_client;
+use aws_sdk_bedrockruntime::types::InferenceConfiguration;
pub use aws_sdk_bedrockruntime::types::{
AnyToolChoice as BedrockAnyToolChoice, AutoToolChoice as BedrockAutoToolChoice,
ContentBlock as BedrockInnerContent, Tool as BedrockTool, ToolChoice as BedrockToolChoice,
@@ -17,7 +18,8 @@ pub use bedrock::types::{
ConverseOutput as BedrockResponse, ConverseStreamOutput as BedrockStreamingResponse,
ImageBlock as BedrockImageBlock, Message as BedrockMessage,
ReasoningContentBlock as BedrockThinkingBlock, ReasoningTextBlock as BedrockThinkingTextBlock,
- ResponseStream as BedrockResponseStream, ToolResultBlock as BedrockToolResultBlock,
+ ResponseStream as BedrockResponseStream, SystemContentBlock as BedrockSystemContentBlock,
+ ToolResultBlock as BedrockToolResultBlock,
ToolResultContentBlock as BedrockToolResultContentBlock,
ToolResultStatus as BedrockToolResultStatus, ToolUseBlock as BedrockToolUseBlock,
};
@@ -58,6 +60,20 @@ pub async fn stream_completion(
response = response.set_tool_config(request.tools);
}
+ let inference_config = InferenceConfiguration::builder()
+ .max_tokens(request.max_tokens as i32)
+ .set_temperature(request.temperature)
+ .set_top_p(request.top_p)
+ .build();
+
+ response = response.inference_config(inference_config);
+
+ if let Some(system) = request.system {
+ if !system.is_empty() {
+ response = response.system(BedrockSystemContentBlock::Text(system));
+ }
+ }
+
let output = response
.send()
.await
@@ -151,12 +151,12 @@ impl Model {
pub fn id(&self) -> &str {
match self {
- Model::ClaudeSonnet4 => "claude-4-sonnet",
- Model::ClaudeSonnet4Thinking => "claude-4-sonnet-thinking",
- Model::ClaudeOpus4 => "claude-4-opus",
- Model::ClaudeOpus4_1 => "claude-4-opus-1",
- Model::ClaudeOpus4Thinking => "claude-4-opus-thinking",
- Model::ClaudeOpus4_1Thinking => "claude-4-opus-1-thinking",
+ Model::ClaudeSonnet4 => "claude-sonnet-4",
+ Model::ClaudeSonnet4Thinking => "claude-sonnet-4-thinking",
+ Model::ClaudeOpus4 => "claude-opus-4",
+ Model::ClaudeOpus4_1 => "claude-opus-4-1",
+ Model::ClaudeOpus4Thinking => "claude-opus-4-thinking",
+ Model::ClaudeOpus4_1Thinking => "claude-opus-4-1-thinking",
Model::Claude3_5SonnetV2 => "claude-3-5-sonnet-v2",
Model::Claude3_5Sonnet => "claude-3-5-sonnet",
Model::Claude3Opus => "claude-3-opus",
@@ -359,14 +359,12 @@ impl Model {
pub fn max_output_tokens(&self) -> u64 {
match self {
Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3_5Haiku => 4_096,
- Self::Claude3_7Sonnet
- | Self::Claude3_7SonnetThinking
- | Self::ClaudeSonnet4
- | Self::ClaudeSonnet4Thinking
- | Self::ClaudeOpus4
- | Model::ClaudeOpus4Thinking
+ Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => 128_000,
+ Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking => 64_000,
+ Self::ClaudeOpus4
+ | Self::ClaudeOpus4Thinking
| Self::ClaudeOpus4_1
- | Model::ClaudeOpus4_1Thinking => 128_000,
+ | Self::ClaudeOpus4_1Thinking => 32_000,
Self::Claude3_5SonnetV2 | Self::PalmyraWriterX4 | Self::PalmyraWriterX5 => 8_192,
Self::Custom {
max_output_tokens, ..
@@ -784,10 +782,10 @@ mod tests {
);
// Test thinking models have different friendly IDs but same request IDs
- assert_eq!(Model::ClaudeSonnet4.id(), "claude-4-sonnet");
+ assert_eq!(Model::ClaudeSonnet4.id(), "claude-sonnet-4");
assert_eq!(
Model::ClaudeSonnet4Thinking.id(),
- "claude-4-sonnet-thinking"
+ "claude-sonnet-4-thinking"
);
assert_eq!(
Model::ClaudeSonnet4.request_id(),
@@ -162,6 +162,22 @@ impl BufferDiffSnapshot {
}
}
+ fn unchanged(
+ buffer: &text::BufferSnapshot,
+ base_text: language::BufferSnapshot,
+ ) -> BufferDiffSnapshot {
+ debug_assert_eq!(buffer.text(), base_text.text());
+ BufferDiffSnapshot {
+ inner: BufferDiffInner {
+ base_text,
+ hunks: SumTree::new(buffer),
+ pending_hunks: SumTree::new(buffer),
+ base_text_exists: false,
+ },
+ secondary_diff: None,
+ }
+ }
+
fn new_with_base_text(
buffer: text::BufferSnapshot,
base_text: Option<Arc<String>>,
@@ -175,12 +191,8 @@ impl BufferDiffSnapshot {
if let Some(text) = &base_text {
let base_text_rope = Rope::from(text.as_str());
base_text_pair = Some((text.clone(), base_text_rope.clone()));
- let snapshot = language::Buffer::build_snapshot(
- base_text_rope,
- language.clone(),
- language_registry.clone(),
- cx,
- );
+ let snapshot =
+ language::Buffer::build_snapshot(base_text_rope, language, language_registry, cx);
base_text_snapshot = cx.background_spawn(snapshot);
base_text_exists = true;
} else {
@@ -217,7 +229,10 @@ impl BufferDiffSnapshot {
cx: &App,
) -> impl Future<Output = Self> + use<> {
let base_text_exists = base_text.is_some();
- let base_text_pair = base_text.map(|text| (text, base_text_snapshot.as_rope().clone()));
+ let base_text_pair = base_text.map(|text| {
+ debug_assert_eq!(&*text, &base_text_snapshot.text());
+ (text, base_text_snapshot.as_rope().clone())
+ });
cx.background_executor()
.spawn_labeled(*CALCULATE_DIFF_TASK, async move {
Self {
@@ -877,6 +892,18 @@ impl BufferDiff {
}
}
+ pub fn new_unchanged(
+ buffer: &text::BufferSnapshot,
+ base_text: language::BufferSnapshot,
+ ) -> Self {
+ debug_assert_eq!(buffer.text(), base_text.text());
+ BufferDiff {
+ buffer_id: buffer.remote_id(),
+ inner: BufferDiffSnapshot::unchanged(buffer, base_text).inner,
+ secondary_diff: None,
+ }
+ }
+
#[cfg(any(test, feature = "test-support"))]
pub fn new_with_base_text(
base_text: &str,
@@ -957,7 +984,7 @@ impl BufferDiff {
.buffer_range
.start;
let end = self
- .hunks_intersecting_range_rev(range.clone(), buffer)
+ .hunks_intersecting_range_rev(range, buffer)
.next()?
.buffer_range
.end;
@@ -1441,7 +1468,7 @@ mod tests {
.unindent();
let buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text);
- let unstaged_diff = BufferDiffSnapshot::new_sync(buffer.clone(), index_text.clone(), cx);
+ let unstaged_diff = BufferDiffSnapshot::new_sync(buffer.clone(), index_text, cx);
let mut uncommitted_diff =
BufferDiffSnapshot::new_sync(buffer.clone(), head_text.clone(), cx);
uncommitted_diff.secondary_diff = Some(Box::new(unstaged_diff));
@@ -2017,10 +2044,10 @@ mod tests {
#[gpui::test(iterations = 100)]
async fn test_staging_and_unstaging_hunks(cx: &mut TestAppContext, mut rng: StdRng) {
fn gen_line(rng: &mut StdRng) -> String {
- if rng.gen_bool(0.2) {
+ if rng.random_bool(0.2) {
"\n".to_owned()
} else {
- let c = rng.gen_range('A'..='Z');
+ let c = rng.random_range('A'..='Z');
format!("{c}{c}{c}\n")
}
}
@@ -2028,8 +2055,8 @@ mod tests {
fn gen_working_copy(rng: &mut StdRng, head: &str) -> String {
let mut old_lines = {
let mut old_lines = Vec::new();
- let mut old_lines_iter = head.lines();
- while let Some(line) = old_lines_iter.next() {
+ let old_lines_iter = head.lines();
+ for line in old_lines_iter {
assert!(!line.ends_with("\n"));
old_lines.push(line.to_owned());
}
@@ -2039,7 +2066,7 @@ mod tests {
old_lines.into_iter()
};
let mut result = String::new();
- let unchanged_count = rng.gen_range(0..=old_lines.len());
+ let unchanged_count = rng.random_range(0..=old_lines.len());
result +=
&old_lines
.by_ref()
@@ -2049,14 +2076,14 @@ mod tests {
s
});
while old_lines.len() > 0 {
- let deleted_count = rng.gen_range(0..=old_lines.len());
+ let deleted_count = rng.random_range(0..=old_lines.len());
let _advance = old_lines
.by_ref()
.take(deleted_count)
.map(|line| line.len() + 1)
.sum::<usize>();
let minimum_added = if deleted_count == 0 { 1 } else { 0 };
- let added_count = rng.gen_range(minimum_added..=5);
+ let added_count = rng.random_range(minimum_added..=5);
let addition = (0..added_count).map(|_| gen_line(rng)).collect::<String>();
result += &addition;
@@ -2065,7 +2092,8 @@ mod tests {
if blank_lines == old_lines.len() {
break;
};
- let unchanged_count = rng.gen_range((blank_lines + 1).max(1)..=old_lines.len());
+ let unchanged_count =
+ rng.random_range((blank_lines + 1).max(1)..=old_lines.len());
result += &old_lines.by_ref().take(unchanged_count).fold(
String::new(),
|mut s, line| {
@@ -2122,7 +2150,7 @@ mod tests {
)
});
let working_copy = working_copy.read_with(cx, |working_copy, _| working_copy.snapshot());
- let mut index_text = if rng.r#gen() {
+ let mut index_text = if rng.random() {
Rope::from(head_text.as_str())
} else {
working_copy.as_rope().clone()
@@ -2133,12 +2161,12 @@ mod tests {
diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &working_copy, cx)
.collect::<Vec<_>>()
});
- if hunks.len() == 0 {
+ if hunks.is_empty() {
return;
}
for _ in 0..operations {
- let i = rng.gen_range(0..hunks.len());
+ let i = rng.random_range(0..hunks.len());
let hunk = &mut hunks[i];
let hunk_to_change = hunk.clone();
let stage = match hunk.secondary_status {
@@ -1161,7 +1161,7 @@ impl Room {
let request = self.client.request(proto::ShareProject {
room_id: self.id(),
worktrees: project.read(cx).worktree_metadata_protos(cx),
- is_ssh_project: project.read(cx).is_via_ssh(),
+ is_ssh_project: project.read(cx).is_via_remote_server(),
});
cx.spawn(async move |this, cx| {
@@ -2,7 +2,7 @@ use anyhow::Result;
use gpui::App;
use schemars::JsonSchema;
use serde_derive::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
#[derive(Deserialize, Debug)]
pub struct CallSettings {
@@ -11,7 +11,8 @@ pub struct CallSettings {
}
/// Configuration of voice calls in Zed.
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)]
+#[settings_key(key = "calls")]
pub struct CallSettingsContent {
/// Whether the microphone should be muted when joining a channel or a call.
///
@@ -25,8 +26,6 @@ pub struct CallSettingsContent {
}
impl Settings for CallSettings {
- const KEY: Option<&'static str> = Some("calls");
-
type FileContent = CallSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
@@ -135,7 +135,7 @@ impl ChannelBuffer {
}
}
- for (_, old_collaborator) in &self.collaborators {
+ for old_collaborator in self.collaborators.values() {
if !new_collaborators.contains_key(&old_collaborator.peer_id) {
self.buffer.update(cx, |buffer, cx| {
buffer.remove_peer(old_collaborator.replica_id, cx)
@@ -129,7 +129,7 @@ impl ChannelChat {
loaded_all_messages: false,
next_pending_message_id: 0,
last_acknowledged_id: None,
- rng: StdRng::from_entropy(),
+ rng: StdRng::from_os_rng(),
first_loaded_message_id: None,
_subscription: subscription.set_entity(&cx.entity(), &cx.to_async()),
}
@@ -183,7 +183,7 @@ impl ChannelChat {
let channel_id = self.channel_id;
let pending_id = ChannelMessageId::Pending(post_inc(&mut self.next_pending_message_id));
- let nonce = self.rng.r#gen();
+ let nonce = self.rng.random();
self.insert_messages(
SumTree::from_item(
ChannelMessage {
@@ -257,7 +257,7 @@ impl ChannelChat {
cx,
);
- let nonce: u128 = self.rng.r#gen();
+ let nonce: u128 = self.rng.random();
let request = self.rpc.request(proto::UpdateChannelMessage {
channel_id: self.channel_id.0,
@@ -1073,7 +1073,7 @@ impl ChannelStore {
if let Some(this) = this.upgrade() {
this.update(cx, |this, cx| {
- for (_, buffer) in &this.opened_buffers {
+ for buffer in this.opened_buffers.values() {
if let OpenEntityHandle::Open(buffer) = &buffer
&& let Some(buffer) = buffer.upgrade()
{
@@ -438,7 +438,7 @@ fn init_test(cx: &mut App) -> Entity<ChannelStore> {
let clock = Arc::new(FakeSystemClock::new());
let http = FakeHttpClient::with_404_response();
- let client = Client::new(clock, http.clone(), cx);
+ let client = Client::new(clock, http, cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
client::init(&client, cx);
@@ -14,6 +14,7 @@ pub enum CliRequest {
paths: Vec<String>,
urls: Vec<String>,
diff_paths: Vec<[String; 2]>,
+ wsl: Option<String>,
wait: bool,
open_new_workspace: Option<bool>,
env: Option<HashMap<String, String>>,
@@ -6,7 +6,6 @@
use anyhow::{Context as _, Result};
use clap::Parser;
use cli::{CliRequest, CliResponse, IpcHandshake, ipc::IpcOneShotServer};
-use collections::HashMap;
use parking_lot::Mutex;
use std::{
env, fs, io,
@@ -85,6 +84,18 @@ struct Args {
/// Run zed in dev-server mode
#[arg(long)]
dev_server_token: Option<String>,
+ /// The username and WSL distribution to use when opening paths. If not specified,
+ /// Zed will attempt to open the paths directly.
+ ///
+ /// The username is optional, and if not specified, the default user for the distribution
+ /// will be used.
+ ///
+ /// Example: `me@Ubuntu` or `Ubuntu`.
+ ///
+ /// WARN: You should not fill in this field by hand.
+ #[cfg(target_os = "windows")]
+ #[arg(long, value_name = "USER@DISTRO")]
+ wsl: Option<String>,
/// Not supported in Zed CLI, only supported on Zed binary
/// Will attempt to give the correct command to run
#[arg(long)]
@@ -129,14 +140,41 @@ fn parse_path_with_position(argument_str: &str) -> anyhow::Result<String> {
Ok(canonicalized.to_string(|path| path.to_string_lossy().to_string()))
}
-fn main() -> Result<()> {
- #[cfg(all(not(debug_assertions), target_os = "windows"))]
- unsafe {
- use ::windows::Win32::System::Console::{ATTACH_PARENT_PROCESS, AttachConsole};
+fn parse_path_in_wsl(source: &str, wsl: &str) -> Result<String> {
+ let mut command = util::command::new_std_command("wsl.exe");
- let _ = AttachConsole(ATTACH_PARENT_PROCESS);
+ let (user, distro_name) = if let Some((user, distro)) = wsl.split_once('@') {
+ if user.is_empty() {
+ anyhow::bail!("user is empty in wsl argument");
+ }
+ (Some(user), distro)
+ } else {
+ (None, wsl)
+ };
+
+ if let Some(user) = user {
+ command.arg("--user").arg(user);
}
+ let output = command
+ .arg("--distribution")
+ .arg(distro_name)
+ .arg("wslpath")
+ .arg("-m")
+ .arg(source)
+ .output()?;
+
+ let result = String::from_utf8_lossy(&output.stdout);
+ let prefix = format!("//wsl.localhost/{}", distro_name);
+
+ Ok(result
+ .trim()
+ .strip_prefix(&prefix)
+ .unwrap_or(&result)
+ .to_string())
+}
+
+fn main() -> Result<()> {
#[cfg(unix)]
util::prevent_root_execution();
@@ -223,6 +261,8 @@ fn main() -> Result<()> {
let env = {
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
{
+ use collections::HashMap;
+
// On Linux, the desktop entry uses `cli` to spawn `zed`.
// We need to handle env vars correctly since std::env::vars() may not contain
// project-specific vars (e.g. those set by direnv).
@@ -235,8 +275,19 @@ fn main() -> Result<()> {
}
}
- #[cfg(not(any(target_os = "linux", target_os = "freebsd")))]
- Some(std::env::vars().collect::<HashMap<_, _>>())
+ #[cfg(target_os = "windows")]
+ {
+ // On Windows, by default, a child process inherits a copy of the environment block of the parent process.
+ // So we don't need to pass env vars explicitly.
+ None
+ }
+
+ #[cfg(not(any(target_os = "linux", target_os = "freebsd", target_os = "windows")))]
+ {
+ use collections::HashMap;
+
+ Some(std::env::vars().collect::<HashMap<_, _>>())
+ }
};
let exit_status = Arc::new(Mutex::new(None));
@@ -253,6 +304,11 @@ fn main() -> Result<()> {
]);
}
+ #[cfg(target_os = "windows")]
+ let wsl = args.wsl.as_ref();
+ #[cfg(not(target_os = "windows"))]
+ let wsl = None;
+
for path in args.paths_with_position.iter() {
if path.starts_with("zed://")
|| path.starts_with("http://")
@@ -271,8 +327,10 @@ fn main() -> Result<()> {
paths.push(tmp_file.path().to_string_lossy().to_string());
let (tmp_file, _) = tmp_file.keep()?;
anonymous_fd_tmp_files.push((file, tmp_file));
+ } else if let Some(wsl) = wsl {
+ urls.push(format!("file://{}", parse_path_in_wsl(path, wsl)?));
} else {
- paths.push(parse_path_with_position(path)?)
+ paths.push(parse_path_with_position(path)?);
}
}
@@ -288,10 +346,16 @@ fn main() -> Result<()> {
let (_, handshake) = server.accept().context("Handshake after Zed spawn")?;
let (tx, rx) = (handshake.requests, handshake.responses);
+ #[cfg(target_os = "windows")]
+ let wsl = args.wsl;
+ #[cfg(not(target_os = "windows"))]
+ let wsl = None;
+
tx.send(CliRequest::Open {
paths,
urls,
diff_paths,
+ wsl,
wait: args.wait,
open_new_workspace,
env,
@@ -494,11 +558,11 @@ mod linux {
Ok(Fork::Parent(_)) => Ok(()),
Ok(Fork::Child) => {
unsafe { std::env::set_var(FORCE_CLI_MODE_ENV_VAR_NAME, "") };
- if let Err(_) = fork::setsid() {
+ if fork::setsid().is_err() {
eprintln!("failed to setsid: {}", std::io::Error::last_os_error());
process::exit(1);
}
- if let Err(_) = fork::close_fd() {
+ if fork::close_fd().is_err() {
eprintln!("failed to close_fd: {}", std::io::Error::last_os_error());
}
let error =
@@ -534,8 +598,8 @@ mod flatpak {
use std::process::Command;
use std::{env, process};
- const EXTRA_LIB_ENV_NAME: &'static str = "ZED_FLATPAK_LIB_PATH";
- const NO_ESCAPE_ENV_NAME: &'static str = "ZED_FLATPAK_NO_ESCAPE";
+ const EXTRA_LIB_ENV_NAME: &str = "ZED_FLATPAK_LIB_PATH";
+ const NO_ESCAPE_ENV_NAME: &str = "ZED_FLATPAK_NO_ESCAPE";
/// Adds bundled libraries to LD_LIBRARY_PATH if running under flatpak
pub fn ld_extra_libs() {
@@ -644,15 +708,15 @@ mod windows {
Storage::FileSystem::{
CreateFileW, FILE_FLAGS_AND_ATTRIBUTES, FILE_SHARE_MODE, OPEN_EXISTING, WriteFile,
},
- System::Threading::CreateMutexW,
+ System::Threading::{CREATE_NEW_PROCESS_GROUP, CreateMutexW},
},
core::HSTRING,
};
use crate::{Detect, InstalledApp};
- use std::io;
use std::path::{Path, PathBuf};
use std::process::ExitStatus;
+ use std::{io, os::windows::process::CommandExt};
fn check_single_instance() -> bool {
let mutex = unsafe {
@@ -691,6 +755,7 @@ mod windows {
fn launch(&self, ipc_url: String) -> anyhow::Result<()> {
if check_single_instance() {
std::process::Command::new(self.0.clone())
+ .creation_flags(CREATE_NEW_PROCESS_GROUP.0)
.arg(ipc_url)
.spawn()?;
} else {
@@ -926,7 +991,7 @@ mod mac_os {
fn path(&self) -> PathBuf {
match self {
- Bundle::App { app_bundle, .. } => app_bundle.join("Contents/MacOS/zed").clone(),
+ Bundle::App { app_bundle, .. } => app_bundle.join("Contents/MacOS/zed"),
Bundle::LocalPath { executable, .. } => executable.clone(),
}
}
@@ -75,7 +75,7 @@ util = { workspace = true, features = ["test-support"] }
windows.workspace = true
[target.'cfg(target_os = "macos")'.dependencies]
-cocoa.workspace = true
+objc2-foundation.workspace = true
[target.'cfg(any(target_os = "windows", target_os = "macos"))'.dependencies]
tokio-native-tls = "0.3"
@@ -31,7 +31,7 @@ use release_channel::{AppVersion, ReleaseChannel};
use rpc::proto::{AnyTypedEnvelope, EnvelopedMessage, PeerId, RequestMessage};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
use std::{
any::TypeId,
convert::TryFrom,
@@ -66,6 +66,8 @@ pub static IMPERSONATE_LOGIN: LazyLock<Option<String>> = LazyLock::new(|| {
.and_then(|s| if s.is_empty() { None } else { Some(s) })
});
+pub static USE_WEB_LOGIN: LazyLock<bool> = LazyLock::new(|| std::env::var("ZED_WEB_LOGIN").is_ok());
+
pub static ADMIN_API_TOKEN: LazyLock<Option<String>> = LazyLock::new(|| {
std::env::var("ZED_ADMIN_API_TOKEN")
.ok()
@@ -94,7 +96,8 @@ actions!(
]
);
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)]
+#[settings_key(None)]
pub struct ClientSettingsContent {
server_url: Option<String>,
}
@@ -105,8 +108,6 @@ pub struct ClientSettings {
}
impl Settings for ClientSettings {
- const KEY: Option<&'static str> = None;
-
type FileContent = ClientSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
@@ -120,7 +121,8 @@ impl Settings for ClientSettings {
fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut Self::FileContent) {}
}
-#[derive(Default, Clone, Serialize, Deserialize, JsonSchema)]
+#[derive(Default, Clone, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)]
+#[settings_key(None)]
pub struct ProxySettingsContent {
proxy: Option<String>,
}
@@ -131,8 +133,6 @@ pub struct ProxySettings {
}
impl Settings for ProxySettings {
- const KEY: Option<&'static str> = None;
-
type FileContent = ProxySettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
@@ -181,7 +181,7 @@ pub fn init(client: &Arc<Client>, cx: &mut App) {
});
cx.on_action({
- let client = client.clone();
+ let client = client;
move |_: &Reconnect, cx| {
if let Some(client) = client.upgrade() {
cx.spawn(async move |cx| {
@@ -285,6 +285,7 @@ pub enum Status {
},
ConnectionLost,
Reauthenticating,
+ Reauthenticated,
Reconnecting,
ReconnectionError {
next_reconnection: Instant,
@@ -296,6 +297,21 @@ impl Status {
matches!(self, Self::Connected { .. })
}
+ pub fn was_connected(&self) -> bool {
+ matches!(
+ self,
+ Self::ConnectionLost
+ | Self::Reauthenticating
+ | Self::Reauthenticated
+ | Self::Reconnecting
+ )
+ }
+
+ /// Returns whether the client is currently connected or was connected at some point.
+ pub fn is_or_was_connected(&self) -> bool {
+ self.is_connected() || self.was_connected()
+ }
+
pub fn is_signing_in(&self) -> bool {
matches!(
self,
@@ -509,7 +525,8 @@ pub struct TelemetrySettings {
}
/// Control what info is collected by Zed.
-#[derive(Default, Clone, Serialize, Deserialize, JsonSchema, Debug)]
+#[derive(Default, Clone, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)]
+#[settings_key(key = "telemetry")]
pub struct TelemetrySettingsContent {
/// Send debug info like crash reports.
///
@@ -522,8 +539,6 @@ pub struct TelemetrySettingsContent {
}
impl settings::Settings for TelemetrySettings {
- const KEY: Option<&'static str> = Some("telemetry");
-
type FileContent = TelemetrySettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
@@ -673,7 +688,7 @@ impl Client {
#[cfg(any(test, feature = "test-support"))]
let mut rng = StdRng::seed_from_u64(0);
#[cfg(not(any(test, feature = "test-support")))]
- let mut rng = StdRng::from_entropy();
+ let mut rng = StdRng::from_os_rng();
let mut delay = INITIAL_RECONNECTION_DELAY;
loop {
@@ -703,8 +718,9 @@ impl Client {
},
cx,
);
- let jitter =
- Duration::from_millis(rng.gen_range(0..delay.as_millis() as u64));
+ let jitter = Duration::from_millis(
+ rng.random_range(0..delay.as_millis() as u64),
+ );
cx.background_executor().timer(delay + jitter).await;
delay = cmp::min(delay * 2, MAX_RECONNECTION_DELAY);
} else {
@@ -791,7 +807,7 @@ impl Client {
Arc::new(move |subscriber, envelope, client, cx| {
let subscriber = subscriber.downcast::<E>().unwrap();
let envelope = envelope.into_any().downcast::<TypedEnvelope<M>>().unwrap();
- handler(subscriber, *envelope, client.clone(), cx).boxed_local()
+ handler(subscriber, *envelope, client, cx).boxed_local()
}),
);
if prev_handler.is_some() {
@@ -855,11 +871,13 @@ impl Client {
try_provider: bool,
cx: &AsyncApp,
) -> Result<Credentials> {
- if self.status().borrow().is_signed_out() {
+ let is_reauthenticating = if self.status().borrow().is_signed_out() {
self.set_status(Status::Authenticating, cx);
+ false
} else {
self.set_status(Status::Reauthenticating, cx);
- }
+ true
+ };
let mut credentials = None;
@@ -917,7 +935,14 @@ impl Client {
self.cloud_client
.set_credentials(credentials.user_id as u32, credentials.access_token.clone());
self.state.write().credentials = Some(credentials.clone());
- self.set_status(Status::Authenticated, cx);
+ self.set_status(
+ if is_reauthenticating {
+ Status::Reauthenticated
+ } else {
+ Status::Authenticated
+ },
+ cx,
+ );
Ok(credentials)
}
@@ -1029,11 +1054,12 @@ impl Client {
Status::SignedOut | Status::Authenticated => true,
Status::ConnectionError
| Status::ConnectionLost
- | Status::Authenticating { .. }
+ | Status::Authenticating
| Status::AuthenticationError
- | Status::Reauthenticating { .. }
+ | Status::Reauthenticating
+ | Status::Reauthenticated
| Status::ReconnectionError { .. } => false,
- Status::Connected { .. } | Status::Connecting { .. } | Status::Reconnecting { .. } => {
+ Status::Connected { .. } | Status::Connecting | Status::Reconnecting => {
return ConnectionResult::Result(Ok(()));
}
Status::UpgradeRequired => {
@@ -1290,19 +1316,21 @@ impl Client {
"http" => Http,
_ => Err(anyhow!("invalid rpc url: {}", rpc_url))?,
};
- let rpc_host = rpc_url
- .host_str()
- .zip(rpc_url.port_or_known_default())
- .context("missing host in rpc url")?;
-
- let stream = {
- let handle = cx.update(|cx| gpui_tokio::Tokio::handle(cx)).ok().unwrap();
- let _guard = handle.enter();
- match proxy {
- Some(proxy) => connect_proxy_stream(&proxy, rpc_host).await?,
- None => Box::new(TcpStream::connect(rpc_host).await?),
+
+ let stream = gpui_tokio::Tokio::spawn_result(cx, {
+ let rpc_url = rpc_url.clone();
+ async move {
+ let rpc_host = rpc_url
+ .host_str()
+ .zip(rpc_url.port_or_known_default())
+ .context("missing host in rpc url")?;
+ Ok(match proxy {
+ Some(proxy) => connect_proxy_stream(&proxy, rpc_host).await?,
+ None => Box::new(TcpStream::connect(rpc_host).await?),
+ })
}
- };
+ })?
+ .await?;
log::info!("connected to rpc endpoint {}", rpc_url);
@@ -1390,11 +1418,13 @@ impl Client {
if let Some((login, token)) =
IMPERSONATE_LOGIN.as_ref().zip(ADMIN_API_TOKEN.as_ref())
{
- eprintln!("authenticate as admin {login}, {token}");
+ if !*USE_WEB_LOGIN {
+ eprintln!("authenticate as admin {login}, {token}");
- return this
- .authenticate_as_admin(http, login.clone(), token.clone())
- .await;
+ return this
+ .authenticate_as_admin(http, login.clone(), token.clone())
+ .await;
+ }
}
// Start an HTTP server to receive the redirect from Zed's sign-in page.
@@ -1664,21 +1694,10 @@ impl Client {
);
cx.spawn(async move |_| match future.await {
Ok(()) => {
- log::debug!(
- "rpc message handled. client_id:{}, sender_id:{:?}, type:{}",
- client_id,
- original_sender_id,
- type_name
- );
+ log::debug!("rpc message handled. client_id:{client_id}, sender_id:{original_sender_id:?}, type:{type_name}");
}
Err(error) => {
- log::error!(
- "error handling message. client_id:{}, sender_id:{:?}, type:{}, error:{:?}",
- client_id,
- original_sender_id,
- type_name,
- error
- );
+ log::error!("error handling message. client_id:{client_id}, sender_id:{original_sender_id:?}, type:{type_name}, error:{error:#}");
}
})
.detach();
@@ -1902,10 +1921,7 @@ mod tests {
assert!(matches!(status.next().await, Some(Status::Connecting)));
executor.advance_clock(CONNECTION_TIMEOUT);
- assert!(matches!(
- status.next().await,
- Some(Status::ConnectionError { .. })
- ));
+ assert!(matches!(status.next().await, Some(Status::ConnectionError)));
auth_and_connect.await.into_response().unwrap_err();
// Allow the connection to be established.
@@ -1929,10 +1945,7 @@ mod tests {
})
});
executor.advance_clock(2 * INITIAL_RECONNECTION_DELAY);
- assert!(matches!(
- status.next().await,
- Some(Status::Reconnecting { .. })
- ));
+ assert!(matches!(status.next().await, Some(Status::Reconnecting)));
executor.advance_clock(CONNECTION_TIMEOUT);
assert!(matches!(
@@ -2048,10 +2061,7 @@ mod tests {
assert_eq!(*auth_count.lock(), 1);
assert_eq!(*dropped_auth_count.lock(), 0);
- let _authenticate = cx.spawn({
- let client = client.clone();
- |cx| async move { client.connect(false, &cx).await }
- });
+ let _authenticate = cx.spawn(|cx| async move { client.connect(false, &cx).await });
executor.run_until_parked();
assert_eq!(*auth_count.lock(), 2);
assert_eq!(*dropped_auth_count.lock(), 1);
@@ -76,7 +76,7 @@ static ZED_CLIENT_CHECKSUM_SEED: LazyLock<Option<Vec<u8>>> = LazyLock::new(|| {
pub static MINIDUMP_ENDPOINT: LazyLock<Option<String>> = LazyLock::new(|| {
option_env!("ZED_MINIDUMP_ENDPOINT")
- .map(|s| s.to_owned())
+ .map(str::to_string)
.or_else(|| env::var("ZED_MINIDUMP_ENDPOINT").ok())
});
@@ -84,6 +84,10 @@ static DOTNET_PROJECT_FILES_REGEX: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(r"^(global\.json|Directory\.Build\.props|.*\.(csproj|fsproj|vbproj|sln))$").unwrap()
});
+#[cfg(target_os = "macos")]
+static MACOS_VERSION_REGEX: LazyLock<Regex> =
+ LazyLock::new(|| Regex::new(r"(\s*\(Build [^)]*[0-9]\))").unwrap());
+
pub fn os_name() -> String {
#[cfg(target_os = "macos")]
{
@@ -108,19 +112,16 @@ pub fn os_name() -> String {
pub fn os_version() -> String {
#[cfg(target_os = "macos")]
{
- use cocoa::base::nil;
- use cocoa::foundation::NSProcessInfo;
-
- unsafe {
- let process_info = cocoa::foundation::NSProcessInfo::processInfo(nil);
- let version = process_info.operatingSystemVersion();
- gpui::SemanticVersion::new(
- version.majorVersion as usize,
- version.minorVersion as usize,
- version.patchVersion as usize,
- )
+ use objc2_foundation::NSProcessInfo;
+ let process_info = NSProcessInfo::processInfo();
+ let version_nsstring = unsafe { process_info.operatingSystemVersionString() };
+ // "Version 15.6.1 (Build 24G90)" -> "15.6.1 (Build 24G90)"
+ let version_string = version_nsstring.to_string().replace("Version ", "");
+ // "15.6.1 (Build 24G90)" -> "15.6.1"
+ // "26.0.0 (Build 25A5349a)" -> unchanged (Beta or Rapid Security Response; ends with letter)
+ MACOS_VERSION_REGEX
+ .replace_all(&version_string, "")
.to_string()
- }
}
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
{
@@ -739,7 +740,7 @@ mod tests {
);
// Third scan of worktree does not double report, as we already reported
- test_project_discovery_helper(telemetry.clone(), vec!["package.json"], None, worktree_id);
+ test_project_discovery_helper(telemetry, vec!["package.json"], None, worktree_id);
}
#[gpui::test]
@@ -751,7 +752,7 @@ mod tests {
let telemetry = cx.update(|cx| Telemetry::new(clock.clone(), http, cx));
test_project_discovery_helper(
- telemetry.clone(),
+ telemetry,
vec!["package.json", "pnpm-lock.yaml"],
Some(vec!["node", "pnpm"]),
1,
@@ -767,7 +768,7 @@ mod tests {
let telemetry = cx.update(|cx| Telemetry::new(clock.clone(), http, cx));
test_project_discovery_helper(
- telemetry.clone(),
+ telemetry,
vec!["package.json", "yarn.lock"],
Some(vec!["node", "yarn"]),
1,
@@ -786,7 +787,7 @@ mod tests {
// project type for the same worktree multiple times
test_project_discovery_helper(
- telemetry.clone().clone(),
+ telemetry.clone(),
vec!["global.json"],
Some(vec!["dotnet"]),
1,
@@ -1,5 +1,5 @@
use super::{Client, Status, TypedEnvelope, proto};
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result};
use chrono::{DateTime, Utc};
use cloud_api_client::websocket_protocol::MessageToClient;
use cloud_api_client::{GetAuthenticatedUserResponse, PlanInfo};
@@ -41,16 +41,11 @@ impl std::fmt::Display for ChannelId {
pub struct ProjectId(pub u64);
impl ProjectId {
- pub fn to_proto(&self) -> u64 {
+ pub fn to_proto(self) -> u64 {
self.0
}
}
-#[derive(
- Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize,
-)]
-pub struct DevServerProjectId(pub u64);
-
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct ParticipantIndex(pub u32);
@@ -116,7 +111,6 @@ pub struct UserStore {
edit_prediction_usage: Option<EditPredictionUsage>,
plan_info: Option<PlanInfo>,
current_user: watch::Receiver<Option<Arc<User>>>,
- accepted_tos_at: Option<Option<cloud_api_client::Timestamp>>,
contacts: Vec<Arc<Contact>>,
incoming_contact_requests: Vec<Arc<User>>,
outgoing_contact_requests: Vec<Arc<User>>,
@@ -194,7 +188,6 @@ impl UserStore {
plan_info: None,
model_request_usage: None,
edit_prediction_usage: None,
- accepted_tos_at: None,
contacts: Default::default(),
incoming_contact_requests: Default::default(),
participant_indices: Default::default(),
@@ -223,7 +216,9 @@ impl UserStore {
return Ok(());
};
match status {
- Status::Authenticated | Status::Connected { .. } => {
+ Status::Authenticated
+ | Status::Reauthenticated
+ | Status::Connected { .. } => {
if let Some(user_id) = client.user_id() {
let response = client
.cloud_client()
@@ -271,7 +266,6 @@ impl UserStore {
Status::SignedOut => {
current_user_tx.send(None).await.ok();
this.update(cx, |this, cx| {
- this.accepted_tos_at = None;
cx.emit(Event::PrivateUserInfoUpdated);
cx.notify();
this.clear_contacts()
@@ -791,19 +785,6 @@ impl UserStore {
.set_authenticated_user_info(Some(response.user.metrics_id.clone()), staff);
}
- let accepted_tos_at = {
- #[cfg(debug_assertions)]
- if std::env::var("ZED_IGNORE_ACCEPTED_TOS").is_ok() {
- None
- } else {
- response.user.accepted_tos_at
- }
-
- #[cfg(not(debug_assertions))]
- response.user.accepted_tos_at
- };
-
- self.accepted_tos_at = Some(accepted_tos_at);
self.model_request_usage = Some(ModelRequestUsage(RequestUsage {
limit: response.plan.usage.model_requests.limit,
amount: response.plan.usage.model_requests.used as i32,
@@ -846,32 +827,6 @@ impl UserStore {
self.current_user.clone()
}
- pub fn has_accepted_terms_of_service(&self) -> bool {
- self.accepted_tos_at
- .is_some_and(|accepted_tos_at| accepted_tos_at.is_some())
- }
-
- pub fn accept_terms_of_service(&self, cx: &Context<Self>) -> Task<Result<()>> {
- if self.current_user().is_none() {
- return Task::ready(Err(anyhow!("no current user")));
- };
-
- let client = self.client.clone();
- cx.spawn(async move |this, cx| -> anyhow::Result<()> {
- let client = client.upgrade().context("client not found")?;
- let response = client
- .cloud_client()
- .accept_terms_of_service()
- .await
- .context("error accepting tos")?;
- this.update(cx, |this, cx| {
- this.accepted_tos_at = Some(response.user.accepted_tos_at);
- cx.emit(Event::PrivateUserInfoUpdated);
- })?;
- Ok(())
- })
- }
-
fn load_users(
&self,
request: impl RequestMessage<Response = UsersResponse>,
@@ -43,3 +43,11 @@ pub fn ai_privacy_and_security(cx: &App) -> String {
server_url = server_url(cx)
)
}
+
+/// Returns the URL to Zed AI's external agents documentation.
+pub fn external_agents_docs(cx: &App) -> String {
+ format!(
+ "{server_url}/docs/ai/external-agents",
+ server_url = server_url(cx)
+ )
+}
@@ -102,13 +102,7 @@ impl CloudApiClient {
let credentials = credentials.as_ref().context("no credentials provided")?;
let authorization_header = format!("{} {}", credentials.user_id, credentials.access_token);
- Ok(cx.spawn(async move |cx| {
- let handle = cx
- .update(|cx| Tokio::handle(cx))
- .ok()
- .context("failed to get Tokio handle")?;
- let _guard = handle.enter();
-
+ Ok(Tokio::spawn_result(cx, async move {
let ws = WebSocket::connect(connect_url)
.with_request(
request::Builder::new()
@@ -121,34 +115,6 @@ impl CloudApiClient {
}))
}
- pub async fn accept_terms_of_service(&self) -> Result<AcceptTermsOfServiceResponse> {
- let request = self.build_request(
- Request::builder().method(Method::POST).uri(
- self.http_client
- .build_zed_cloud_url("/client/terms_of_service/accept", &[])?
- .as_ref(),
- ),
- AsyncBody::default(),
- )?;
-
- let mut response = self.http_client.send(request).await?;
-
- if !response.status().is_success() {
- let mut body = String::new();
- response.body_mut().read_to_string(&mut body).await?;
-
- anyhow::bail!(
- "Failed to accept terms of service.\nStatus: {:?}\nBody: {body}",
- response.status()
- )
- }
-
- let mut body = String::new();
- response.body_mut().read_to_string(&mut body).await?;
-
- Ok(serde_json::from_str(&body)?)
- }
-
pub async fn create_llm_token(
&self,
system_id: Option<String>,
@@ -175,6 +175,7 @@ CREATE TABLE "language_servers" (
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
"name" VARCHAR NOT NULL,
"capabilities" TEXT NOT NULL,
+ "worktree_id" BIGINT,
PRIMARY KEY (project_id, id)
);
@@ -0,0 +1,3 @@
+alter table billing_subscriptions
+ alter column stripe_subscription_id drop not null,
+ alter column stripe_subscription_status drop not null;
@@ -0,0 +1,4 @@
+alter table billing_subscriptions
+ add column orb_subscription_status text,
+ add column orb_current_billing_period_start_date timestamp without time zone,
+ add column orb_current_billing_period_end_date timestamp without time zone;
@@ -0,0 +1,2 @@
+ALTER TABLE language_servers
+ ADD COLUMN worktree_id BIGINT;
@@ -280,7 +280,7 @@ pub async fn post_hang(
service = "client",
version = %report.app_version.unwrap_or_default().to_string(),
os_name = %report.os_name,
- os_version = report.os_version.unwrap_or_default().to_string(),
+ os_version = report.os_version.unwrap_or_default(),
incident_id = %incident_id,
installation_id = %report.installation_id.unwrap_or_default(),
backtrace = %backtrace,
@@ -227,7 +227,7 @@ pub async fn verify_access_token(
#[cfg(test)]
mod test {
- use rand::thread_rng;
+ use rand::prelude::*;
use scrypt::password_hash::{PasswordHasher, SaltString};
use sea_orm::EntityTrait;
@@ -236,7 +236,7 @@ mod test {
#[gpui::test]
async fn test_verify_access_token(cx: &mut gpui::TestAppContext) {
- let test_db = crate::db::TestDb::sqlite(cx.executor().clone());
+ let test_db = crate::db::TestDb::sqlite(cx.executor());
let db = test_db.db();
let user = db
@@ -358,9 +358,42 @@ mod test {
None,
None,
params,
- &SaltString::generate(thread_rng()),
+ &SaltString::generate(PasswordHashRngCompat::new()),
)
.map_err(anyhow::Error::new)?
.to_string())
}
+
+ // TODO: remove once we password_hash v0.6 is released.
+ struct PasswordHashRngCompat(rand::rngs::ThreadRng);
+
+ impl PasswordHashRngCompat {
+ fn new() -> Self {
+ Self(rand::rng())
+ }
+ }
+
+ impl scrypt::password_hash::rand_core::RngCore for PasswordHashRngCompat {
+ fn next_u32(&mut self) -> u32 {
+ self.0.next_u32()
+ }
+
+ fn next_u64(&mut self) -> u64 {
+ self.0.next_u64()
+ }
+
+ fn fill_bytes(&mut self, dest: &mut [u8]) {
+ self.0.fill_bytes(dest);
+ }
+
+ fn try_fill_bytes(
+ &mut self,
+ dest: &mut [u8],
+ ) -> Result<(), scrypt::password_hash::rand_core::Error> {
+ self.fill_bytes(dest);
+ Ok(())
+ }
+ }
+
+ impl scrypt::password_hash::rand_core::CryptoRng for PasswordHashRngCompat {}
}
@@ -256,7 +256,7 @@ impl Database {
let test_options = self.test_options.as_ref().unwrap();
test_options.executor.simulate_random_delay().await;
let fail_probability = *test_options.query_failure_probability.lock();
- if test_options.executor.rng().gen_bool(fail_probability) {
+ if test_options.executor.rng().random_bool(fail_probability) {
return Err(anyhow!("simulated query failure"))?;
}
@@ -685,7 +685,7 @@ impl LocalSettingsKind {
}
}
- pub fn to_proto(&self) -> proto::LocalSettingsKind {
+ pub fn to_proto(self) -> proto::LocalSettingsKind {
match self {
Self::Settings => proto::LocalSettingsKind::Settings,
Self::Tasks => proto::LocalSettingsKind::Tasks,
@@ -694,6 +694,7 @@ impl Database {
project_id: ActiveValue::set(project_id),
id: ActiveValue::set(server.id as i64),
name: ActiveValue::set(server.name.clone()),
+ worktree_id: ActiveValue::set(server.worktree_id.map(|id| id as i64)),
capabilities: ActiveValue::set(update.capabilities.clone()),
})
.on_conflict(
@@ -704,6 +705,7 @@ impl Database {
.update_columns([
language_server::Column::Name,
language_server::Column::Capabilities,
+ language_server::Column::WorktreeId,
])
.to_owned(),
)
@@ -1065,7 +1067,7 @@ impl Database {
server: proto::LanguageServer {
id: language_server.id as u64,
name: language_server.name,
- worktree_id: None,
+ worktree_id: language_server.worktree_id.map(|id| id as u64),
},
capabilities: language_server.capabilities,
})
@@ -809,7 +809,7 @@ impl Database {
server: proto::LanguageServer {
id: language_server.id as u64,
name: language_server.name,
- worktree_id: None,
+ worktree_id: language_server.worktree_id.map(|id| id as u64),
},
capabilities: language_server.capabilities,
})
@@ -10,6 +10,7 @@ pub struct Model {
pub id: i64,
pub name: String,
pub capabilities: String,
+ pub worktree_id: Option<i64>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
@@ -75,10 +75,10 @@ impl TestDb {
static LOCK: Mutex<()> = Mutex::new(());
let _guard = LOCK.lock();
- let mut rng = StdRng::from_entropy();
+ let mut rng = StdRng::from_os_rng();
let url = format!(
"postgres://postgres@localhost/zed-test-{}",
- rng.r#gen::<u128>()
+ rng.random::<u128>()
);
let runtime = tokio::runtime::Builder::new_current_thread()
.enable_io()
@@ -8,7 +8,7 @@ use time::{Duration, OffsetDateTime, PrimitiveDateTime};
// SQLite does not support array arguments, so we only test this against a real postgres instance
#[gpui::test]
async fn test_get_embeddings_postgres(cx: &mut gpui::TestAppContext) {
- let test_db = TestDb::postgres(cx.executor().clone());
+ let test_db = TestDb::postgres(cx.executor());
let db = test_db.db();
let provider = "test_model";
@@ -38,7 +38,7 @@ async fn test_get_embeddings_postgres(cx: &mut gpui::TestAppContext) {
#[gpui::test]
async fn test_purge_old_embeddings(cx: &mut gpui::TestAppContext) {
- let test_db = TestDb::postgres(cx.executor().clone());
+ let test_db = TestDb::postgres(cx.executor());
let db = test_db.db();
let model = "test_model";
@@ -310,7 +310,7 @@ impl Server {
let mut server = Self {
id: parking_lot::Mutex::new(id),
peer: Peer::new(id.0 as u32),
- app_state: app_state.clone(),
+ app_state,
connection_pool: Default::default(),
handlers: Default::default(),
teardown: watch::channel(false).0,
@@ -400,6 +400,8 @@ impl Server {
.add_request_handler(forward_mutating_project_request::<proto::SaveBuffer>)
.add_request_handler(forward_mutating_project_request::<proto::BlameBuffer>)
.add_request_handler(multi_lsp_query)
+ .add_request_handler(lsp_query)
+ .add_message_handler(broadcast_project_message_from_host::<proto::LspQueryResponse>)
.add_request_handler(forward_mutating_project_request::<proto::RestartLanguageServers>)
.add_request_handler(forward_mutating_project_request::<proto::StopLanguageServers>)
.add_request_handler(forward_mutating_project_request::<proto::LinkedEditingRange>)
@@ -474,7 +476,9 @@ impl Server {
.add_request_handler(forward_mutating_project_request::<proto::GitChangeBranch>)
.add_request_handler(forward_mutating_project_request::<proto::CheckForPushedCommits>)
.add_message_handler(broadcast_project_message_from_host::<proto::AdvertiseContexts>)
- .add_message_handler(update_context);
+ .add_message_handler(update_context)
+ .add_request_handler(forward_mutating_project_request::<proto::ToggleLspLogs>)
+ .add_message_handler(broadcast_project_message_from_host::<proto::LanguageServerLog>);
Arc::new(server)
}
@@ -910,7 +914,9 @@ impl Server {
user_id=field::Empty,
login=field::Empty,
impersonator=field::Empty,
+ // todo(lsp) remove after Zed Stable hits v0.204.x
multi_lsp_query_request=field::Empty,
+ lsp_query_request=field::Empty,
release_channel=field::Empty,
{ TOTAL_DURATION_MS }=field::Empty,
{ PROCESSING_DURATION_MS }=field::Empty,
@@ -1386,9 +1392,7 @@ async fn create_room(
let live_kit = live_kit?;
let user_id = session.user_id().to_string();
- let token = live_kit
- .room_token(&livekit_room, &user_id.to_string())
- .trace_err()?;
+ let token = live_kit.room_token(&livekit_room, &user_id).trace_err()?;
Some(proto::LiveKitConnectionInfo {
server_url: live_kit.url().into(),
@@ -2015,9 +2019,9 @@ async fn join_project(
.unzip();
response.send(proto::JoinProjectResponse {
project_id: project.id.0 as u64,
- worktrees: worktrees.clone(),
+ worktrees,
replica_id: replica_id.0 as u32,
- collaborators: collaborators.clone(),
+ collaborators,
language_servers,
language_server_capabilities,
role: project.role.into(),
@@ -2358,6 +2362,7 @@ where
Ok(())
}
+// todo(lsp) remove after Zed Stable hits v0.204.x
async fn multi_lsp_query(
request: MultiLspQuery,
response: Response<MultiLspQuery>,
@@ -2368,6 +2373,21 @@ async fn multi_lsp_query(
forward_mutating_project_request(request, response, session).await
}
+async fn lsp_query(
+ request: proto::LspQuery,
+ response: Response<proto::LspQuery>,
+ session: MessageContext,
+) -> Result<()> {
+ let (name, should_write) = request.query_name_and_write_permissions();
+ tracing::Span::current().record("lsp_query_request", name);
+ tracing::info!("lsp_query message received");
+ if should_write {
+ forward_mutating_project_request(request, response, session).await
+ } else {
+ forward_read_only_project_request(request, response, session).await
+ }
+}
+
/// Notify other participants that a new buffer has been created
async fn create_buffer_for_peer(
request: proto::CreateBufferForPeer,
@@ -15,13 +15,14 @@ use editor::{
},
};
use fs::Fs;
-use futures::{StreamExt, lock::Mutex};
+use futures::{SinkExt, StreamExt, channel::mpsc, lock::Mutex};
use gpui::{App, Rgba, TestAppContext, UpdateGlobal, VisualContext, VisualTestContext};
use indoc::indoc;
use language::{
FakeLspAdapter,
language_settings::{AllLanguageSettings, InlayHintSettings},
};
+use lsp::LSP_REQUEST_TIMEOUT;
use project::{
ProjectPath, SERVER_PROGRESS_THROTTLE_TIMEOUT,
lsp_store::lsp_ext_command::{ExpandedMacro, LspExtExpandMacro},
@@ -368,7 +369,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
.set_request_handler::<lsp::request::Completion, _, _>(|params, _| async move {
assert_eq!(
params.text_document_position.text_document.uri,
- lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
);
assert_eq!(
params.text_document_position.position,
@@ -487,7 +488,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
.set_request_handler::<lsp::request::Completion, _, _>(|params, _| async move {
assert_eq!(
params.text_document_position.text_document.uri,
- lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
);
assert_eq!(
params.text_document_position.position,
@@ -614,7 +615,7 @@ async fn test_collaborating_with_code_actions(
.set_request_handler::<lsp::request::CodeActionRequest, _, _>(|params, _| async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
);
assert_eq!(params.range.start, lsp::Position::new(0, 0));
assert_eq!(params.range.end, lsp::Position::new(0, 0));
@@ -636,7 +637,7 @@ async fn test_collaborating_with_code_actions(
.set_request_handler::<lsp::request::CodeActionRequest, _, _>(|params, _| async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
);
assert_eq!(params.range.start, lsp::Position::new(1, 31));
assert_eq!(params.range.end, lsp::Position::new(1, 31));
@@ -648,7 +649,7 @@ async fn test_collaborating_with_code_actions(
changes: Some(
[
(
- lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
vec![lsp::TextEdit::new(
lsp::Range::new(
lsp::Position::new(1, 22),
@@ -658,7 +659,7 @@ async fn test_collaborating_with_code_actions(
)],
),
(
- lsp::Url::from_file_path(path!("/a/other.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/a/other.rs")).unwrap(),
vec![lsp::TextEdit::new(
lsp::Range::new(
lsp::Position::new(0, 0),
@@ -720,7 +721,7 @@ async fn test_collaborating_with_code_actions(
changes: Some(
[
(
- lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
vec![lsp::TextEdit::new(
lsp::Range::new(
lsp::Position::new(1, 22),
@@ -730,7 +731,7 @@ async fn test_collaborating_with_code_actions(
)],
),
(
- lsp::Url::from_file_path(path!("/a/other.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/a/other.rs")).unwrap(),
vec![lsp::TextEdit::new(
lsp::Range::new(
lsp::Position::new(0, 0),
@@ -948,14 +949,14 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
changes: Some(
[
(
- lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
vec![lsp::TextEdit::new(
lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
"THREE".to_string(),
)],
),
(
- lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
vec![
lsp::TextEdit::new(
lsp::Range::new(
@@ -1017,6 +1018,211 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
})
}
+#[gpui::test]
+async fn test_slow_lsp_server(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
+ let mut server = TestServer::start(cx_a.executor()).await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
+ server
+ .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+ .await;
+ let active_call_a = cx_a.read(ActiveCall::global);
+ cx_b.update(editor::init);
+
+ let command_name = "test_command";
+ let capabilities = lsp::ServerCapabilities {
+ code_lens_provider: Some(lsp::CodeLensOptions {
+ resolve_provider: None,
+ }),
+ execute_command_provider: Some(lsp::ExecuteCommandOptions {
+ commands: vec![command_name.to_string()],
+ ..lsp::ExecuteCommandOptions::default()
+ }),
+ ..lsp::ServerCapabilities::default()
+ };
+ client_a.language_registry().add(rust_lang());
+ let mut fake_language_servers = client_a.language_registry().register_fake_lsp(
+ "Rust",
+ FakeLspAdapter {
+ capabilities: capabilities.clone(),
+ ..FakeLspAdapter::default()
+ },
+ );
+ client_b.language_registry().add(rust_lang());
+ client_b.language_registry().register_fake_lsp_adapter(
+ "Rust",
+ FakeLspAdapter {
+ capabilities,
+ ..FakeLspAdapter::default()
+ },
+ );
+
+ client_a
+ .fs()
+ .insert_tree(
+ path!("/dir"),
+ json!({
+ "one.rs": "const ONE: usize = 1;"
+ }),
+ )
+ .await;
+ let (project_a, worktree_id) = client_a.build_local_project(path!("/dir"), cx_a).await;
+ let project_id = active_call_a
+ .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+ .await
+ .unwrap();
+ let project_b = client_b.join_remote_project(project_id, cx_b).await;
+
+ let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
+ let editor_b = workspace_b
+ .update_in(cx_b, |workspace, window, cx| {
+ workspace.open_path((worktree_id, "one.rs"), None, true, window, cx)
+ })
+ .await
+ .unwrap()
+ .downcast::<Editor>()
+ .unwrap();
+ let (lsp_store_b, buffer_b) = editor_b.update(cx_b, |editor, cx| {
+ let lsp_store = editor.project().unwrap().read(cx).lsp_store();
+ let buffer = editor.buffer().read(cx).as_singleton().unwrap();
+ (lsp_store, buffer)
+ });
+ let fake_language_server = fake_language_servers.next().await.unwrap();
+ cx_a.run_until_parked();
+ cx_b.run_until_parked();
+
+ let long_request_time = LSP_REQUEST_TIMEOUT / 2;
+ let (request_started_tx, mut request_started_rx) = mpsc::unbounded();
+ let requests_started = Arc::new(AtomicUsize::new(0));
+ let requests_completed = Arc::new(AtomicUsize::new(0));
+ let _lens_requests = fake_language_server
+ .set_request_handler::<lsp::request::CodeLensRequest, _, _>({
+ let request_started_tx = request_started_tx.clone();
+ let requests_started = requests_started.clone();
+ let requests_completed = requests_completed.clone();
+ move |params, cx| {
+ let mut request_started_tx = request_started_tx.clone();
+ let requests_started = requests_started.clone();
+ let requests_completed = requests_completed.clone();
+ async move {
+ assert_eq!(
+ params.text_document.uri.as_str(),
+ uri!("file:///dir/one.rs")
+ );
+ requests_started.fetch_add(1, atomic::Ordering::Release);
+ request_started_tx.send(()).await.unwrap();
+ cx.background_executor().timer(long_request_time).await;
+ let i = requests_completed.fetch_add(1, atomic::Ordering::Release) + 1;
+ Ok(Some(vec![lsp::CodeLens {
+ range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 9)),
+ command: Some(lsp::Command {
+ title: format!("LSP Command {i}"),
+ command: command_name.to_string(),
+ arguments: None,
+ }),
+ data: None,
+ }]))
+ }
+ }
+ });
+
+ // Move cursor to a location, this should trigger the code lens call.
+ editor_b.update_in(cx_b, |editor, window, cx| {
+ editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
+ s.select_ranges([7..7])
+ });
+ });
+ let () = request_started_rx.next().await.unwrap();
+ assert_eq!(
+ requests_started.load(atomic::Ordering::Acquire),
+ 1,
+ "Selection change should have initiated the first request"
+ );
+ assert_eq!(
+ requests_completed.load(atomic::Ordering::Acquire),
+ 0,
+ "Slow requests should be running still"
+ );
+ let _first_task = lsp_store_b.update(cx_b, |lsp_store, cx| {
+ lsp_store
+ .forget_code_lens_task(buffer_b.read(cx).remote_id())
+ .expect("Should have the fetch task started")
+ });
+
+ editor_b.update_in(cx_b, |editor, window, cx| {
+ editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
+ s.select_ranges([1..1])
+ });
+ });
+ let () = request_started_rx.next().await.unwrap();
+ assert_eq!(
+ requests_started.load(atomic::Ordering::Acquire),
+ 2,
+ "Selection change should have initiated the second request"
+ );
+ assert_eq!(
+ requests_completed.load(atomic::Ordering::Acquire),
+ 0,
+ "Slow requests should be running still"
+ );
+ let _second_task = lsp_store_b.update(cx_b, |lsp_store, cx| {
+ lsp_store
+ .forget_code_lens_task(buffer_b.read(cx).remote_id())
+ .expect("Should have the fetch task started for the 2nd time")
+ });
+
+ editor_b.update_in(cx_b, |editor, window, cx| {
+ editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
+ s.select_ranges([2..2])
+ });
+ });
+ let () = request_started_rx.next().await.unwrap();
+ assert_eq!(
+ requests_started.load(atomic::Ordering::Acquire),
+ 3,
+ "Selection change should have initiated the third request"
+ );
+ assert_eq!(
+ requests_completed.load(atomic::Ordering::Acquire),
+ 0,
+ "Slow requests should be running still"
+ );
+
+ _first_task.await.unwrap();
+ _second_task.await.unwrap();
+ cx_b.run_until_parked();
+ assert_eq!(
+ requests_started.load(atomic::Ordering::Acquire),
+ 3,
+ "No selection changes should trigger no more code lens requests"
+ );
+ assert_eq!(
+ requests_completed.load(atomic::Ordering::Acquire),
+ 3,
+ "After enough time, all 3 LSP requests should have been served by the language server"
+ );
+ let resulting_lens_actions = editor_b
+ .update(cx_b, |editor, cx| {
+ let lsp_store = editor.project().unwrap().read(cx).lsp_store();
+ lsp_store.update(cx, |lsp_store, cx| {
+ lsp_store.code_lens_actions(&buffer_b, cx)
+ })
+ })
+ .await
+ .unwrap()
+ .unwrap();
+ assert_eq!(
+ resulting_lens_actions.len(),
+ 1,
+ "Should have fetched one code lens action, but got: {resulting_lens_actions:?}"
+ );
+ assert_eq!(
+ resulting_lens_actions.first().unwrap().lsp_action.title(),
+ "LSP Command 3",
+ "Only the final code lens action should be in the data"
+ )
+}
+
#[gpui::test(iterations = 10)]
async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
let mut server = TestServer::start(cx_a.executor()).await;
@@ -1368,7 +1574,7 @@ async fn test_on_input_format_from_host_to_guest(
|params, _| async move {
assert_eq!(
params.text_document_position.text_document.uri,
- lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
);
assert_eq!(
params.text_document_position.position,
@@ -1511,7 +1717,7 @@ async fn test_on_input_format_from_guest_to_host(
.set_request_handler::<lsp::request::OnTypeFormatting, _, _>(|params, _| async move {
assert_eq!(
params.text_document_position.text_document.uri,
- lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
);
assert_eq!(
params.text_document_position.position,
@@ -1695,7 +1901,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
);
let edits_made = task_edits_made.load(atomic::Ordering::Acquire);
Ok(Some(vec![lsp::InlayHint {
@@ -1945,7 +2151,7 @@ async fn test_inlay_hint_refresh_is_forwarded(
async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
);
let other_hints = task_other_hints.load(atomic::Ordering::Acquire);
let character = if other_hints { 0 } else { 2 };
@@ -2126,7 +2332,7 @@ async fn test_lsp_document_color(cx_a: &mut TestAppContext, cx_b: &mut TestAppCo
async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
);
requests_made.fetch_add(1, atomic::Ordering::Release);
Ok(vec![lsp::ColorInformation {
@@ -2415,11 +2621,11 @@ async fn test_lsp_pull_diagnostics(
let requests_made = closure_diagnostics_pulls_made.clone();
let diagnostics_pulls_result_ids = closure_diagnostics_pulls_result_ids.clone();
async move {
- let message = if lsp::Url::from_file_path(path!("/a/main.rs")).unwrap()
+ let message = if lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap()
== params.text_document.uri
{
expected_pull_diagnostic_main_message.to_string()
- } else if lsp::Url::from_file_path(path!("/a/lib.rs")).unwrap()
+ } else if lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap()
== params.text_document.uri
{
expected_pull_diagnostic_lib_message.to_string()
@@ -2511,7 +2717,7 @@ async fn test_lsp_pull_diagnostics(
items: vec![
lsp::WorkspaceDocumentDiagnosticReport::Full(
lsp::WorkspaceFullDocumentDiagnosticReport {
- uri: lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
version: None,
full_document_diagnostic_report:
lsp::FullDocumentDiagnosticReport {
@@ -2540,7 +2746,7 @@ async fn test_lsp_pull_diagnostics(
),
lsp::WorkspaceDocumentDiagnosticReport::Full(
lsp::WorkspaceFullDocumentDiagnosticReport {
- uri: lsp::Url::from_file_path(path!("/a/lib.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap(),
version: None,
full_document_diagnostic_report:
lsp::FullDocumentDiagnosticReport {
@@ -2615,7 +2821,7 @@ async fn test_lsp_pull_diagnostics(
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
&lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range {
start: lsp::Position {
@@ -2636,7 +2842,7 @@ async fn test_lsp_pull_diagnostics(
);
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
&lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/a/lib.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap(),
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range {
start: lsp::Position {
@@ -2664,7 +2870,7 @@ async fn test_lsp_pull_diagnostics(
items: vec![
lsp::WorkspaceDocumentDiagnosticReport::Full(
lsp::WorkspaceFullDocumentDiagnosticReport {
- uri: lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
version: None,
full_document_diagnostic_report:
lsp::FullDocumentDiagnosticReport {
@@ -2696,7 +2902,7 @@ async fn test_lsp_pull_diagnostics(
),
lsp::WorkspaceDocumentDiagnosticReport::Full(
lsp::WorkspaceFullDocumentDiagnosticReport {
- uri: lsp::Url::from_file_path(path!("/a/lib.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap(),
version: None,
full_document_diagnostic_report:
lsp::FullDocumentDiagnosticReport {
@@ -2845,7 +3051,7 @@ async fn test_lsp_pull_diagnostics(
lsp::WorkspaceDiagnosticReportResult::Report(lsp::WorkspaceDiagnosticReport {
items: vec![lsp::WorkspaceDocumentDiagnosticReport::Full(
lsp::WorkspaceFullDocumentDiagnosticReport {
- uri: lsp::Url::from_file_path(path!("/a/lib.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap(),
version: None,
full_document_diagnostic_report: lsp::FullDocumentDiagnosticReport {
result_id: Some(format!(
@@ -2908,7 +3114,7 @@ async fn test_lsp_pull_diagnostics(
{
assert!(
- diagnostics_pulls_result_ids.lock().await.len() > 0,
+ !diagnostics_pulls_result_ids.lock().await.is_empty(),
"Initial diagnostics pulls should report None at least"
);
assert_eq!(
@@ -3219,16 +3425,16 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
assert_eq!(
entries,
vec![
- Some(blame_entry("1b1b1b", 0..1)),
- Some(blame_entry("0d0d0d", 1..2)),
- Some(blame_entry("3a3a3a", 2..3)),
- Some(blame_entry("4c4c4c", 3..4)),
+ Some((buffer_id_b, blame_entry("1b1b1b", 0..1))),
+ Some((buffer_id_b, blame_entry("0d0d0d", 1..2))),
+ Some((buffer_id_b, blame_entry("3a3a3a", 2..3))),
+ Some((buffer_id_b, blame_entry("4c4c4c", 3..4))),
]
);
blame.update(cx, |blame, _| {
- for (idx, entry) in entries.iter().flatten().enumerate() {
- let details = blame.details_for_entry(entry).unwrap();
+ for (idx, (buffer, entry)) in entries.iter().flatten().enumerate() {
+ let details = blame.details_for_entry(*buffer, entry).unwrap();
assert_eq!(details.message, format!("message for idx-{}", idx));
assert_eq!(
details.permalink.unwrap().to_string(),
@@ -3268,9 +3474,9 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
entries,
vec![
None,
- Some(blame_entry("0d0d0d", 1..2)),
- Some(blame_entry("3a3a3a", 2..3)),
- Some(blame_entry("4c4c4c", 3..4)),
+ Some((buffer_id_b, blame_entry("0d0d0d", 1..2))),
+ Some((buffer_id_b, blame_entry("3a3a3a", 2..3))),
+ Some((buffer_id_b, blame_entry("4c4c4c", 3..4))),
]
);
});
@@ -3305,8 +3511,8 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
vec![
None,
None,
- Some(blame_entry("3a3a3a", 2..3)),
- Some(blame_entry("4c4c4c", 3..4)),
+ Some((buffer_id_b, blame_entry("3a3a3a", 2..3))),
+ Some((buffer_id_b, blame_entry("4c4c4c", 3..4))),
]
);
});
@@ -3593,7 +3799,7 @@ async fn test_add_breakpoints(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte
let abs_path = project_a.read_with(cx_a, |project, cx| {
project
.absolute_path(&project_path, cx)
- .map(|path_buf| Arc::from(path_buf.to_owned()))
+ .map(Arc::from)
.unwrap()
});
@@ -3647,20 +3853,16 @@ async fn test_add_breakpoints(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte
let breakpoints_a = editor_a.update(cx_a, |editor, cx| {
editor
.breakpoint_store()
- .clone()
.unwrap()
.read(cx)
.all_source_breakpoints(cx)
- .clone()
});
let breakpoints_b = editor_b.update(cx_b, |editor, cx| {
editor
.breakpoint_store()
- .clone()
.unwrap()
.read(cx)
.all_source_breakpoints(cx)
- .clone()
});
assert_eq!(1, breakpoints_a.len());
@@ -3680,20 +3882,16 @@ async fn test_add_breakpoints(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte
let breakpoints_a = editor_a.update(cx_a, |editor, cx| {
editor
.breakpoint_store()
- .clone()
.unwrap()
.read(cx)
.all_source_breakpoints(cx)
- .clone()
});
let breakpoints_b = editor_b.update(cx_b, |editor, cx| {
editor
.breakpoint_store()
- .clone()
.unwrap()
.read(cx)
.all_source_breakpoints(cx)
- .clone()
});
assert_eq!(1, breakpoints_a.len());
@@ -3713,20 +3911,16 @@ async fn test_add_breakpoints(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte
let breakpoints_a = editor_a.update(cx_a, |editor, cx| {
editor
.breakpoint_store()
- .clone()
.unwrap()
.read(cx)
.all_source_breakpoints(cx)
- .clone()
});
let breakpoints_b = editor_b.update(cx_b, |editor, cx| {
editor
.breakpoint_store()
- .clone()
.unwrap()
.read(cx)
.all_source_breakpoints(cx)
- .clone()
});
assert_eq!(1, breakpoints_a.len());
@@ -3746,20 +3940,16 @@ async fn test_add_breakpoints(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte
let breakpoints_a = editor_a.update(cx_a, |editor, cx| {
editor
.breakpoint_store()
- .clone()
.unwrap()
.read(cx)
.all_source_breakpoints(cx)
- .clone()
});
let breakpoints_b = editor_b.update(cx_b, |editor, cx| {
editor
.breakpoint_store()
- .clone()
.unwrap()
.read(cx)
.all_source_breakpoints(cx)
- .clone()
});
assert_eq!(0, breakpoints_a.len());
@@ -3850,7 +4040,7 @@ async fn test_client_can_query_lsp_ext(cx_a: &mut TestAppContext, cx_b: &mut Tes
|params, _| async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
);
assert_eq!(params.position, lsp::Position::new(0, 0));
Ok(Some(ExpandedMacro {
@@ -3885,7 +4075,7 @@ async fn test_client_can_query_lsp_ext(cx_a: &mut TestAppContext, cx_b: &mut Tes
|params, _| async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
);
assert_eq!(
params.position,
@@ -970,7 +970,7 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T
// the follow.
workspace_b.update_in(cx_b, |workspace, window, cx| {
workspace.active_pane().update(cx, |pane, cx| {
- pane.activate_prev_item(true, window, cx);
+ pane.activate_previous_item(&Default::default(), window, cx);
});
});
executor.run_until_parked();
@@ -1073,7 +1073,7 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T
// Client A cycles through some tabs.
workspace_a.update_in(cx_a, |workspace, window, cx| {
workspace.active_pane().update(cx, |pane, cx| {
- pane.activate_prev_item(true, window, cx);
+ pane.activate_previous_item(&Default::default(), window, cx);
});
});
executor.run_until_parked();
@@ -1117,7 +1117,7 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T
workspace_a.update_in(cx_a, |workspace, window, cx| {
workspace.active_pane().update(cx, |pane, cx| {
- pane.activate_prev_item(true, window, cx);
+ pane.activate_previous_item(&Default::default(), window, cx);
});
});
executor.run_until_parked();
@@ -1164,7 +1164,7 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T
workspace_a.update_in(cx_a, |workspace, window, cx| {
workspace.active_pane().update(cx, |pane, cx| {
- pane.activate_prev_item(true, window, cx);
+ pane.activate_previous_item(&Default::default(), window, cx);
});
});
executor.run_until_parked();
@@ -2098,7 +2098,7 @@ async fn test_following_after_replacement(cx_a: &mut TestAppContext, cx_b: &mut
share_workspace(&workspace, cx_a).await.unwrap();
let buffer = workspace.update(cx_a, |workspace, cx| {
workspace.project().update(cx, |project, cx| {
- project.create_local_buffer(&sample_text(26, 5, 'a'), None, cx)
+ project.create_local_buffer(&sample_text(26, 5, 'a'), None, false, cx)
})
});
let multibuffer = cx_a.new(|cx| {
@@ -2506,7 +2506,7 @@ async fn test_propagate_saves_and_fs_changes(
});
let new_buffer_a = project_a
- .update(cx_a, |p, cx| p.create_buffer(cx))
+ .update(cx_a, |p, cx| p.create_buffer(false, cx))
.await
.unwrap();
@@ -3208,7 +3208,7 @@ async fn test_fs_operations(
})
.await
.unwrap()
- .to_included()
+ .into_included()
.unwrap();
worktree_a.read_with(cx_a, |worktree, _| {
@@ -3237,7 +3237,7 @@ async fn test_fs_operations(
})
.await
.unwrap()
- .to_included()
+ .into_included()
.unwrap();
worktree_a.read_with(cx_a, |worktree, _| {
@@ -3266,7 +3266,7 @@ async fn test_fs_operations(
})
.await
.unwrap()
- .to_included()
+ .into_included()
.unwrap();
worktree_a.read_with(cx_a, |worktree, _| {
@@ -3295,7 +3295,7 @@ async fn test_fs_operations(
})
.await
.unwrap()
- .to_included()
+ .into_included()
.unwrap();
project_b
@@ -3304,7 +3304,7 @@ async fn test_fs_operations(
})
.await
.unwrap()
- .to_included()
+ .into_included()
.unwrap();
project_b
@@ -3313,7 +3313,7 @@ async fn test_fs_operations(
})
.await
.unwrap()
- .to_included()
+ .into_included()
.unwrap();
worktree_a.read_with(cx_a, |worktree, _| {
@@ -4075,7 +4075,7 @@ async fn test_collaborating_with_diagnostics(
.await;
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
&lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
severity: Some(lsp::DiagnosticSeverity::WARNING),
@@ -4095,7 +4095,7 @@ async fn test_collaborating_with_diagnostics(
.unwrap();
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
&lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
severity: Some(lsp::DiagnosticSeverity::ERROR),
@@ -4169,7 +4169,7 @@ async fn test_collaborating_with_diagnostics(
// Simulate a language server reporting more errors for a file.
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
&lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(),
version: None,
diagnostics: vec![
lsp::Diagnostic {
@@ -4265,7 +4265,7 @@ async fn test_collaborating_with_diagnostics(
// Simulate a language server reporting no errors for a file.
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
&lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(),
version: None,
diagnostics: Vec::new(),
},
@@ -4372,7 +4372,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
for file_name in file_names {
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
&lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(Path::new(path!("/test")).join(file_name)).unwrap(),
+ uri: lsp::Uri::from_file_path(Path::new(path!("/test")).join(file_name)).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
severity: Some(lsp::DiagnosticSeverity::WARNING),
@@ -4838,7 +4838,7 @@ async fn test_definition(
|_, _| async move {
Ok(Some(lsp::GotoDefinitionResponse::Scalar(
lsp::Location::new(
- lsp::Url::from_file_path(path!("/root/dir-2/b.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/root/dir-2/b.rs")).unwrap(),
lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
),
)))
@@ -4850,6 +4850,7 @@ async fn test_definition(
let definitions_1 = project_b
.update(cx_b, |p, cx| p.definitions(&buffer_b, 23, cx))
.await
+ .unwrap()
.unwrap();
cx_b.read(|cx| {
assert_eq!(
@@ -4875,7 +4876,7 @@ async fn test_definition(
|_, _| async move {
Ok(Some(lsp::GotoDefinitionResponse::Scalar(
lsp::Location::new(
- lsp::Url::from_file_path(path!("/root/dir-2/b.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/root/dir-2/b.rs")).unwrap(),
lsp::Range::new(lsp::Position::new(1, 6), lsp::Position::new(1, 11)),
),
)))
@@ -4885,6 +4886,7 @@ async fn test_definition(
let definitions_2 = project_b
.update(cx_b, |p, cx| p.definitions(&buffer_b, 33, cx))
.await
+ .unwrap()
.unwrap();
cx_b.read(|cx| {
assert_eq!(definitions_2.len(), 1);
@@ -4912,7 +4914,7 @@ async fn test_definition(
);
Ok(Some(lsp::GotoDefinitionResponse::Scalar(
lsp::Location::new(
- lsp::Url::from_file_path(path!("/root/dir-2/c.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/root/dir-2/c.rs")).unwrap(),
lsp::Range::new(lsp::Position::new(0, 5), lsp::Position::new(0, 7)),
),
)))
@@ -4922,6 +4924,7 @@ async fn test_definition(
let type_definitions = project_b
.update(cx_b, |p, cx| p.type_definitions(&buffer_b, 7, cx))
.await
+ .unwrap()
.unwrap();
cx_b.read(|cx| {
assert_eq!(
@@ -4970,7 +4973,7 @@ async fn test_references(
"Rust",
FakeLspAdapter {
name: "my-fake-lsp-adapter",
- capabilities: capabilities,
+ capabilities,
..FakeLspAdapter::default()
},
);
@@ -5046,21 +5049,21 @@ async fn test_references(
lsp_response_tx
.unbounded_send(Ok(Some(vec![
lsp::Location {
- uri: lsp::Url::from_file_path(path!("/root/dir-1/two.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/root/dir-1/two.rs")).unwrap(),
range: lsp::Range::new(lsp::Position::new(0, 24), lsp::Position::new(0, 27)),
},
lsp::Location {
- uri: lsp::Url::from_file_path(path!("/root/dir-1/two.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/root/dir-1/two.rs")).unwrap(),
range: lsp::Range::new(lsp::Position::new(0, 35), lsp::Position::new(0, 38)),
},
lsp::Location {
- uri: lsp::Url::from_file_path(path!("/root/dir-2/three.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/root/dir-2/three.rs")).unwrap(),
range: lsp::Range::new(lsp::Position::new(0, 37), lsp::Position::new(0, 40)),
},
])))
.unwrap();
- let references = references.await.unwrap();
+ let references = references.await.unwrap().unwrap();
executor.run_until_parked();
project_b.read_with(cx_b, |project, cx| {
// User is informed that a request is no longer pending.
@@ -5104,7 +5107,7 @@ async fn test_references(
lsp_response_tx
.unbounded_send(Err(anyhow!("can't find references")))
.unwrap();
- assert_eq!(references.await.unwrap(), []);
+ assert_eq!(references.await.unwrap().unwrap(), []);
// User is informed that the request is no longer pending.
executor.run_until_parked();
@@ -5505,7 +5508,8 @@ async fn test_lsp_hover(
// Request hover information as the guest.
let mut hovers = project_b
.update(cx_b, |p, cx| p.hover(&buffer_b, 22, cx))
- .await;
+ .await
+ .unwrap();
assert_eq!(
hovers.len(),
2,
@@ -5621,7 +5625,7 @@ async fn test_project_symbols(
lsp::SymbolInformation {
name: "TWO".into(),
location: lsp::Location {
- uri: lsp::Url::from_file_path(path!("/code/crate-2/two.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/code/crate-2/two.rs")).unwrap(),
range: lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
},
kind: lsp::SymbolKind::CONSTANT,
@@ -5733,7 +5737,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it(
|_, _| async move {
Ok(Some(lsp::GotoDefinitionResponse::Scalar(
lsp::Location::new(
- lsp::Url::from_file_path(path!("/root/b.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/root/b.rs")).unwrap(),
lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
),
)))
@@ -5742,7 +5746,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it(
let definitions;
let buffer_b2;
- if rng.r#gen() {
+ if rng.random() {
cx_a.run_until_parked();
cx_b.run_until_parked();
definitions = project_b.update(cx_b, |p, cx| p.definitions(&buffer_b1, 23, cx));
@@ -5764,7 +5768,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it(
definitions = project_b.update(cx_b, |p, cx| p.definitions(&buffer_b1, 23, cx));
}
- let definitions = definitions.await.unwrap();
+ let definitions = definitions.await.unwrap().unwrap();
assert_eq!(
definitions.len(),
1,
@@ -84,7 +84,7 @@ impl RandomizedTest for RandomChannelBufferTest {
}
loop {
- match rng.gen_range(0..100_u32) {
+ match rng.random_range(0..100_u32) {
0..=29 => {
let channel_name = client.channel_store().read_with(cx, |store, cx| {
store.ordered_channels().find_map(|(_, channel)| {
@@ -266,7 +266,7 @@ impl RandomizedTest for RandomChannelBufferTest {
"client {user_id} has different text than client {prev_user_id} for channel {channel_name}",
);
} else {
- prev_text = Some((user_id, text.clone()));
+ prev_text = Some((user_id, text));
}
// Assert that all clients and the server agree about who is present in the
@@ -17,7 +17,7 @@ use project::{
DEFAULT_COMPLETION_CONTEXT, Project, ProjectPath, search::SearchQuery, search::SearchResult,
};
use rand::{
- distributions::{Alphanumeric, DistString},
+ distr::{self, SampleString},
prelude::*,
};
use serde::{Deserialize, Serialize};
@@ -168,19 +168,19 @@ impl RandomizedTest for ProjectCollaborationTest {
) -> ClientOperation {
let call = cx.read(ActiveCall::global);
loop {
- match rng.gen_range(0..100_u32) {
+ match rng.random_range(0..100_u32) {
// Mutate the call
0..=29 => {
// Respond to an incoming call
if call.read_with(cx, |call, _| call.incoming().borrow().is_some()) {
- break if rng.gen_bool(0.7) {
+ break if rng.random_bool(0.7) {
ClientOperation::AcceptIncomingCall
} else {
ClientOperation::RejectIncomingCall
};
}
- match rng.gen_range(0..100_u32) {
+ match rng.random_range(0..100_u32) {
// Invite a contact to the current call
0..=70 => {
let available_contacts =
@@ -212,7 +212,7 @@ impl RandomizedTest for ProjectCollaborationTest {
}
// Mutate projects
- 30..=59 => match rng.gen_range(0..100_u32) {
+ 30..=59 => match rng.random_range(0..100_u32) {
// Open a new project
0..=70 => {
// Open a remote project
@@ -270,7 +270,7 @@ impl RandomizedTest for ProjectCollaborationTest {
}
// Mutate project worktrees
- 81.. => match rng.gen_range(0..100_u32) {
+ 81.. => match rng.random_range(0..100_u32) {
// Add a worktree to a local project
0..=50 => {
let Some(project) = client.local_projects().choose(rng).cloned() else {
@@ -279,7 +279,7 @@ impl RandomizedTest for ProjectCollaborationTest {
let project_root_name = root_name_for_project(&project, cx);
let mut paths = client.fs().paths(false);
paths.remove(0);
- let new_root_path = if paths.is_empty() || rng.r#gen() {
+ let new_root_path = if paths.is_empty() || rng.random() {
Path::new(path!("/")).join(plan.next_root_dir_name())
} else {
paths.choose(rng).unwrap().clone()
@@ -309,7 +309,7 @@ impl RandomizedTest for ProjectCollaborationTest {
.choose(rng)
});
let Some(worktree) = worktree else { continue };
- let is_dir = rng.r#gen::<bool>();
+ let is_dir = rng.random::<bool>();
let mut full_path =
worktree.read_with(cx, |w, _| PathBuf::from(w.root_name()));
full_path.push(gen_file_name(rng));
@@ -334,7 +334,7 @@ impl RandomizedTest for ProjectCollaborationTest {
let project_root_name = root_name_for_project(&project, cx);
let is_local = project.read_with(cx, |project, _| project.is_local());
- match rng.gen_range(0..100_u32) {
+ match rng.random_range(0..100_u32) {
// Manipulate an existing buffer
0..=70 => {
let Some(buffer) = client
@@ -349,7 +349,7 @@ impl RandomizedTest for ProjectCollaborationTest {
let full_path = buffer
.read_with(cx, |buffer, cx| buffer.file().unwrap().full_path(cx));
- match rng.gen_range(0..100_u32) {
+ match rng.random_range(0..100_u32) {
// Close the buffer
0..=15 => {
break ClientOperation::CloseBuffer {
@@ -360,7 +360,7 @@ impl RandomizedTest for ProjectCollaborationTest {
}
// Save the buffer
16..=29 if buffer.read_with(cx, |b, _| b.is_dirty()) => {
- let detach = rng.gen_bool(0.3);
+ let detach = rng.random_bool(0.3);
break ClientOperation::SaveBuffer {
project_root_name,
is_local,
@@ -383,17 +383,17 @@ impl RandomizedTest for ProjectCollaborationTest {
_ => {
let offset = buffer.read_with(cx, |buffer, _| {
buffer.clip_offset(
- rng.gen_range(0..=buffer.len()),
+ rng.random_range(0..=buffer.len()),
language::Bias::Left,
)
});
- let detach = rng.r#gen();
+ let detach = rng.random();
break ClientOperation::RequestLspDataInBuffer {
project_root_name,
full_path,
offset,
is_local,
- kind: match rng.gen_range(0..5_u32) {
+ kind: match rng.random_range(0..5_u32) {
0 => LspRequestKind::Rename,
1 => LspRequestKind::Highlights,
2 => LspRequestKind::Definition,
@@ -407,8 +407,8 @@ impl RandomizedTest for ProjectCollaborationTest {
}
71..=80 => {
- let query = rng.gen_range('a'..='z').to_string();
- let detach = rng.gen_bool(0.3);
+ let query = rng.random_range('a'..='z').to_string();
+ let detach = rng.random_bool(0.3);
break ClientOperation::SearchProject {
project_root_name,
is_local,
@@ -460,7 +460,7 @@ impl RandomizedTest for ProjectCollaborationTest {
// Create or update a file or directory
96.. => {
- let is_dir = rng.r#gen::<bool>();
+ let is_dir = rng.random::<bool>();
let content;
let mut path;
let dir_paths = client.fs().directories(false);
@@ -470,11 +470,11 @@ impl RandomizedTest for ProjectCollaborationTest {
path = dir_paths.choose(rng).unwrap().clone();
path.push(gen_file_name(rng));
} else {
- content = Alphanumeric.sample_string(rng, 16);
+ content = distr::Alphanumeric.sample_string(rng, 16);
// Create a new file or overwrite an existing file
let file_paths = client.fs().files();
- if file_paths.is_empty() || rng.gen_bool(0.5) {
+ if file_paths.is_empty() || rng.random_bool(0.5) {
path = dir_paths.choose(rng).unwrap().clone();
path.push(gen_file_name(rng));
path.set_extension("rs");
@@ -643,7 +643,7 @@ impl RandomizedTest for ProjectCollaborationTest {
);
let project = project.await?;
- client.dev_server_projects_mut().push(project.clone());
+ client.dev_server_projects_mut().push(project);
}
ClientOperation::CreateWorktreeEntry {
@@ -1090,7 +1090,7 @@ impl RandomizedTest for ProjectCollaborationTest {
move |_, cx| {
let background = cx.background_executor();
let mut rng = background.rng();
- let count = rng.gen_range::<usize, _>(1..3);
+ let count = rng.random_range::<usize, _>(1..3);
let files = fs.as_fake().files();
let files = (0..count)
.map(|_| files.choose(&mut rng).unwrap().clone())
@@ -1101,7 +1101,7 @@ impl RandomizedTest for ProjectCollaborationTest {
files
.into_iter()
.map(|file| lsp::Location {
- uri: lsp::Url::from_file_path(file).unwrap(),
+ uri: lsp::Uri::from_file_path(file).unwrap(),
range: Default::default(),
})
.collect(),
@@ -1117,12 +1117,12 @@ impl RandomizedTest for ProjectCollaborationTest {
let background = cx.background_executor();
let mut rng = background.rng();
- let highlight_count = rng.gen_range(1..=5);
+ let highlight_count = rng.random_range(1..=5);
for _ in 0..highlight_count {
- let start_row = rng.gen_range(0..100);
- let start_column = rng.gen_range(0..100);
- let end_row = rng.gen_range(0..100);
- let end_column = rng.gen_range(0..100);
+ let start_row = rng.random_range(0..100);
+ let start_column = rng.random_range(0..100);
+ let end_row = rng.random_range(0..100);
+ let end_column = rng.random_range(0..100);
let start = PointUtf16::new(start_row, start_column);
let end = PointUtf16::new(end_row, end_column);
let range =
@@ -1219,8 +1219,8 @@ impl RandomizedTest for ProjectCollaborationTest {
guest_project.remote_id(),
);
assert_eq!(
- guest_snapshot.entries(false, 0).collect::<Vec<_>>(),
- host_snapshot.entries(false, 0).collect::<Vec<_>>(),
+ guest_snapshot.entries(false, 0).map(null_out_entry_size).collect::<Vec<_>>(),
+ host_snapshot.entries(false, 0).map(null_out_entry_size).collect::<Vec<_>>(),
"{} has different snapshot than the host for worktree {:?} ({:?}) and project {:?}",
client.username,
host_snapshot.abs_path(),
@@ -1248,6 +1248,18 @@ impl RandomizedTest for ProjectCollaborationTest {
);
}
});
+
+ // A hack to work around a hack in
+ // https://github.com/zed-industries/zed/pull/16696 that wasn't
+ // detected until we upgraded the rng crate. This whole crate is
+ // going away with DeltaDB soon, so we hold our nose and
+ // continue.
+ fn null_out_entry_size(entry: &project::Entry) -> project::Entry {
+ project::Entry {
+ size: 0,
+ ..entry.clone()
+ }
+ }
}
let buffers = client.buffers().clone();
@@ -1422,7 +1434,7 @@ fn generate_git_operation(rng: &mut StdRng, client: &TestClient) -> GitOperation
.filter(|path| path.starts_with(repo_path))
.collect::<Vec<_>>();
- let count = rng.gen_range(0..=paths.len());
+ let count = rng.random_range(0..=paths.len());
paths.shuffle(rng);
paths.truncate(count);
@@ -1434,13 +1446,13 @@ fn generate_git_operation(rng: &mut StdRng, client: &TestClient) -> GitOperation
let repo_path = client.fs().directories(false).choose(rng).unwrap().clone();
- match rng.gen_range(0..100_u32) {
+ match rng.random_range(0..100_u32) {
0..=25 => {
let file_paths = generate_file_paths(&repo_path, rng, client);
let contents = file_paths
.into_iter()
- .map(|path| (path, Alphanumeric.sample_string(rng, 16)))
+ .map(|path| (path, distr::Alphanumeric.sample_string(rng, 16)))
.collect();
GitOperation::WriteGitIndex {
@@ -1449,7 +1461,8 @@ fn generate_git_operation(rng: &mut StdRng, client: &TestClient) -> GitOperation
}
}
26..=63 => {
- let new_branch = (rng.gen_range(0..10) > 3).then(|| Alphanumeric.sample_string(rng, 8));
+ let new_branch =
+ (rng.random_range(0..10) > 3).then(|| distr::Alphanumeric.sample_string(rng, 8));
GitOperation::WriteGitBranch {
repo_path,
@@ -1596,7 +1609,7 @@ fn choose_random_project(client: &TestClient, rng: &mut StdRng) -> Option<Entity
fn gen_file_name(rng: &mut StdRng) -> String {
let mut name = String::new();
for _ in 0..10 {
- let letter = rng.gen_range('a'..='z');
+ let letter = rng.random_range('a'..='z');
name.push(letter);
}
name
@@ -1604,7 +1617,7 @@ fn gen_file_name(rng: &mut StdRng) -> String {
fn gen_status(rng: &mut StdRng) -> FileStatus {
fn gen_tracked_status(rng: &mut StdRng) -> TrackedStatus {
- match rng.gen_range(0..3) {
+ match rng.random_range(0..3) {
0 => TrackedStatus {
index_status: StatusCode::Unmodified,
worktree_status: StatusCode::Unmodified,
@@ -1626,7 +1639,7 @@ fn gen_status(rng: &mut StdRng) -> FileStatus {
}
fn gen_unmerged_status_code(rng: &mut StdRng) -> UnmergedStatusCode {
- match rng.gen_range(0..3) {
+ match rng.random_range(0..3) {
0 => UnmergedStatusCode::Updated,
1 => UnmergedStatusCode::Added,
2 => UnmergedStatusCode::Deleted,
@@ -1634,7 +1647,7 @@ fn gen_status(rng: &mut StdRng) -> FileStatus {
}
}
- match rng.gen_range(0..2) {
+ match rng.random_range(0..2) {
0 => FileStatus::Unmerged(UnmergedStatus {
first_head: gen_unmerged_status_code(rng),
second_head: gen_unmerged_status_code(rng),
@@ -208,9 +208,9 @@ pub fn save_randomized_test_plan() {
impl<T: RandomizedTest> TestPlan<T> {
pub async fn new(server: &mut TestServer, mut rng: StdRng) -> Arc<Mutex<Self>> {
- let allow_server_restarts = rng.gen_bool(0.7);
- let allow_client_reconnection = rng.gen_bool(0.7);
- let allow_client_disconnection = rng.gen_bool(0.1);
+ let allow_server_restarts = rng.random_bool(0.7);
+ let allow_client_reconnection = rng.random_bool(0.7);
+ let allow_client_disconnection = rng.random_bool(0.1);
let mut users = Vec::new();
for ix in 0..max_peers() {
@@ -407,7 +407,7 @@ impl<T: RandomizedTest> TestPlan<T> {
}
Some(loop {
- break match self.rng.gen_range(0..100) {
+ break match self.rng.random_range(0..100) {
0..=29 if clients.len() < self.users.len() => {
let user = self
.users
@@ -421,13 +421,13 @@ impl<T: RandomizedTest> TestPlan<T> {
}
}
30..=34 if clients.len() > 1 && self.allow_client_disconnection => {
- let (client, cx) = &clients[self.rng.gen_range(0..clients.len())];
+ let (client, cx) = &clients[self.rng.random_range(0..clients.len())];
let user_id = client.current_user_id(cx);
self.operation_ix += 1;
ServerOperation::RemoveConnection { user_id }
}
35..=39 if clients.len() > 1 && self.allow_client_reconnection => {
- let (client, cx) = &clients[self.rng.gen_range(0..clients.len())];
+ let (client, cx) = &clients[self.rng.random_range(0..clients.len())];
let user_id = client.current_user_id(cx);
self.operation_ix += 1;
ServerOperation::BounceConnection { user_id }
@@ -439,12 +439,12 @@ impl<T: RandomizedTest> TestPlan<T> {
_ if !clients.is_empty() => {
let count = self
.rng
- .gen_range(1..10)
+ .random_range(1..10)
.min(self.max_operations - self.operation_ix);
let batch_id = util::post_inc(&mut self.next_batch_id);
let mut user_ids = (0..count)
.map(|_| {
- let ix = self.rng.gen_range(0..clients.len());
+ let ix = self.rng.random_range(0..clients.len());
let (client, cx) = &clients[ix];
client.current_user_id(cx)
})
@@ -453,7 +453,7 @@ impl<T: RandomizedTest> TestPlan<T> {
ServerOperation::MutateClients {
user_ids,
batch_id,
- quiesce: self.rng.gen_bool(0.7),
+ quiesce: self.rng.random_bool(0.7),
}
}
_ => continue,
@@ -26,7 +26,7 @@ use project::{
debugger::session::ThreadId,
lsp_store::{FormatTrigger, LspFormatTarget},
};
-use remote::SshRemoteClient;
+use remote::RemoteClient;
use remote_server::{HeadlessAppState, HeadlessProject};
use rpc::proto;
use serde_json::json;
@@ -59,7 +59,7 @@ async fn test_sharing_an_ssh_remote_project(
.await;
// Set up project on remote FS
- let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx);
+ let (opts, server_ssh) = RemoteClient::fake_server(cx_a, server_cx);
let remote_fs = FakeFs::new(server_cx.executor());
remote_fs
.insert_tree(
@@ -101,7 +101,7 @@ async fn test_sharing_an_ssh_remote_project(
)
});
- let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
+ let client_ssh = RemoteClient::fake_client(opts, cx_a).await;
let (project_a, worktree_id) = client_a
.build_ssh_project(path!("/code/project1"), client_ssh, cx_a)
.await;
@@ -235,7 +235,7 @@ async fn test_ssh_collaboration_git_branches(
.await;
// Set up project on remote FS
- let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx);
+ let (opts, server_ssh) = RemoteClient::fake_server(cx_a, server_cx);
let remote_fs = FakeFs::new(server_cx.executor());
remote_fs
.insert_tree("/project", serde_json::json!({ ".git":{} }))
@@ -268,7 +268,7 @@ async fn test_ssh_collaboration_git_branches(
)
});
- let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
+ let client_ssh = RemoteClient::fake_client(opts, cx_a).await;
let (project_a, _) = client_a
.build_ssh_project("/project", client_ssh, cx_a)
.await;
@@ -420,7 +420,7 @@ async fn test_ssh_collaboration_formatting_with_prettier(
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
.await;
- let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx);
+ let (opts, server_ssh) = RemoteClient::fake_server(cx_a, server_cx);
let remote_fs = FakeFs::new(server_cx.executor());
let buffer_text = "let one = \"two\"";
let prettier_format_suffix = project::TEST_PRETTIER_FORMAT_SUFFIX;
@@ -473,7 +473,7 @@ async fn test_ssh_collaboration_formatting_with_prettier(
)
});
- let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
+ let client_ssh = RemoteClient::fake_client(opts, cx_a).await;
let (project_a, worktree_id) = client_a
.build_ssh_project(path!("/project"), client_ssh, cx_a)
.await;
@@ -602,7 +602,7 @@ async fn test_remote_server_debugger(
release_channel::init(SemanticVersion::default(), cx);
dap_adapters::init(cx);
});
- let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx);
+ let (opts, server_ssh) = RemoteClient::fake_server(cx_a, server_cx);
let remote_fs = FakeFs::new(server_cx.executor());
remote_fs
.insert_tree(
@@ -633,7 +633,7 @@ async fn test_remote_server_debugger(
)
});
- let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
+ let client_ssh = RemoteClient::fake_client(opts, cx_a).await;
let mut server = TestServer::start(server_cx.executor()).await;
let client_a = server.create_client(cx_a, "user_a").await;
cx_a.update(|cx| {
@@ -711,7 +711,7 @@ async fn test_slow_adapter_startup_retries(
release_channel::init(SemanticVersion::default(), cx);
dap_adapters::init(cx);
});
- let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx);
+ let (opts, server_ssh) = RemoteClient::fake_server(cx_a, server_cx);
let remote_fs = FakeFs::new(server_cx.executor());
remote_fs
.insert_tree(
@@ -742,7 +742,7 @@ async fn test_slow_adapter_startup_retries(
)
});
- let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
+ let client_ssh = RemoteClient::fake_client(opts, cx_a).await;
let mut server = TestServer::start(server_cx.executor()).await;
let client_a = server.create_client(cx_a, "user_a").await;
cx_a.update(|cx| {
@@ -26,7 +26,7 @@ use node_runtime::NodeRuntime;
use notifications::NotificationStore;
use parking_lot::Mutex;
use project::{Project, WorktreeId};
-use remote::SshRemoteClient;
+use remote::RemoteClient;
use rpc::{
RECEIVE_TIMEOUT,
proto::{self, ChannelRole},
@@ -370,8 +370,8 @@ impl TestServer {
let client = TestClient {
app_state,
username: name.to_string(),
- channel_store: cx.read(ChannelStore::global).clone(),
- notification_store: cx.read(NotificationStore::global).clone(),
+ channel_store: cx.read(ChannelStore::global),
+ notification_store: cx.read(NotificationStore::global),
state: Default::default(),
};
client.wait_for_current_user(cx).await;
@@ -765,11 +765,11 @@ impl TestClient {
pub async fn build_ssh_project(
&self,
root_path: impl AsRef<Path>,
- ssh: Entity<SshRemoteClient>,
+ ssh: Entity<RemoteClient>,
cx: &mut TestAppContext,
) -> (Entity<Project>, WorktreeId) {
let project = cx.update(|cx| {
- Project::ssh(
+ Project::remote(
ssh,
self.client().clone(),
self.app_state.node_runtime.clone(),
@@ -897,7 +897,7 @@ impl TestClient {
let window = cx.update(|cx| cx.active_window().unwrap().downcast::<Workspace>().unwrap());
let entity = window.root(cx).unwrap();
- let cx = VisualTestContext::from_window(*window.deref(), cx).as_mut();
+ let cx = VisualTestContext::from_window(*window.deref(), cx).into_mut();
// it might be nice to try and cleanup these at the end of each test.
(entity, cx)
}
@@ -66,7 +66,7 @@ impl ChannelView {
channel_id,
link_position,
pane.clone(),
- workspace.clone(),
+ workspace,
window,
cx,
);
@@ -1038,7 +1038,7 @@ impl Render for ChatPanel {
.cloned();
el.when_some(reply_message, |el, reply_message| {
- let user_being_replied_to = reply_message.sender.clone();
+ let user_being_replied_to = reply_message.sender;
el.child(
h_flex()
@@ -12,7 +12,9 @@ use language::{
Anchor, Buffer, BufferSnapshot, CodeLabel, LanguageRegistry, ToOffset,
language_settings::SoftWrap,
};
-use project::{Completion, CompletionResponse, CompletionSource, search::SearchQuery};
+use project::{
+ Completion, CompletionDisplayOptions, CompletionResponse, CompletionSource, search::SearchQuery,
+};
use settings::Settings;
use std::{
ops::Range,
@@ -275,6 +277,7 @@ impl MessageEditor {
Task::ready(Ok(vec![CompletionResponse {
completions: Vec::new(),
+ display_options: CompletionDisplayOptions::default(),
is_incomplete: false,
}]))
}
@@ -317,6 +320,7 @@ impl MessageEditor {
CompletionResponse {
is_incomplete: completions.len() >= LIMIT,
+ display_options: CompletionDisplayOptions::default(),
completions,
}
}
@@ -397,11 +401,10 @@ impl MessageEditor {
) -> Option<(Anchor, String, &'static [StringMatchCandidate])> {
static EMOJI_FUZZY_MATCH_CANDIDATES: LazyLock<Vec<StringMatchCandidate>> =
LazyLock::new(|| {
- let emojis = emojis::iter()
+ emojis::iter()
.flat_map(|s| s.shortcodes())
.map(|emoji| StringMatchCandidate::new(0, emoji))
- .collect::<Vec<_>>();
- emojis
+ .collect::<Vec<_>>()
});
let end_offset = end_anchor.to_offset(buffer.read(cx));
@@ -1821,10 +1821,10 @@ impl CollabPanel {
}
fn select_channel_editor(&mut self) {
- self.selection = self.entries.iter().position(|entry| match entry {
- ListEntry::ChannelEditor { .. } => true,
- _ => false,
- });
+ self.selection = self
+ .entries
+ .iter()
+ .position(|entry| matches!(entry, ListEntry::ChannelEditor { .. }));
}
fn new_subchannel(
@@ -2507,7 +2507,7 @@ impl CollabPanel {
let button = match section {
Section::ActiveCall => channel_link.map(|channel_link| {
- let channel_link_copy = channel_link.clone();
+ let channel_link_copy = channel_link;
IconButton::new("channel-link", IconName::Copy)
.icon_size(IconSize::Small)
.size(ButtonSize::None)
@@ -2691,7 +2691,7 @@ impl CollabPanel {
h_flex()
.w_full()
.justify_between()
- .child(Label::new(github_login.clone()))
+ .child(Label::new(github_login))
.child(h_flex().children(controls)),
)
.start_slot(Avatar::new(user.avatar_uri.clone()))
@@ -2905,6 +2905,8 @@ impl CollabPanel {
h_flex().absolute().right(rems(0.)).h_full().child(
h_flex()
.h_full()
+ .bg(cx.theme().colors().background)
+ .rounded_l_sm()
.gap_1()
.px_1()
.child(
@@ -2920,8 +2922,7 @@ impl CollabPanel {
.on_click(cx.listener(move |this, _, window, cx| {
this.join_channel_chat(channel_id, window, cx)
}))
- .tooltip(Tooltip::text("Open channel chat"))
- .visible_on_hover(""),
+ .tooltip(Tooltip::text("Open channel chat")),
)
.child(
IconButton::new("channel_notes", IconName::Reader)
@@ -2936,9 +2937,9 @@ impl CollabPanel {
.on_click(cx.listener(move |this, _, window, cx| {
this.open_channel_notes(channel_id, window, cx)
}))
- .tooltip(Tooltip::text("Open channel notes"))
- .visible_on_hover(""),
- ),
+ .tooltip(Tooltip::text("Open channel notes")),
+ )
+ .visible_on_hover(""),
),
)
.tooltip({
@@ -3046,7 +3047,7 @@ impl Render for CollabPanel {
.on_action(cx.listener(CollabPanel::move_channel_down))
.track_focus(&self.focus_handle)
.size_full()
- .child(if !self.client.status().borrow().is_connected() {
+ .child(if !self.client.status().borrow().is_or_was_connected() {
self.render_signed_out(cx)
} else {
self.render_signed_in(window, cx)
@@ -3125,7 +3126,7 @@ impl Panel for CollabPanel {
impl Focusable for CollabPanel {
fn focus_handle(&self, cx: &App) -> gpui::FocusHandle {
- self.filter_editor.focus_handle(cx).clone()
+ self.filter_editor.focus_handle(cx)
}
}
@@ -66,5 +66,7 @@ fn notification_window_options(
app_id: Some(app_id.to_owned()),
window_min_size: None,
window_decorations: Some(WindowDecorations::Client),
+ tabbing_identifier: None,
+ ..Default::default()
}
}
@@ -289,7 +289,7 @@ impl NotificationPanel {
.gap_1()
.size_full()
.overflow_hidden()
- .child(Label::new(text.clone()))
+ .child(Label::new(text))
.child(
h_flex()
.child(
@@ -1,7 +1,7 @@
use gpui::Pixels;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
use workspace::dock::DockPosition;
#[derive(Deserialize, Debug)]
@@ -27,7 +27,8 @@ pub struct ChatPanelSettings {
pub default_width: Pixels,
}
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)]
+#[settings_key(key = "chat_panel")]
pub struct ChatPanelSettingsContent {
/// When to show the panel button in the status bar.
///
@@ -43,6 +44,23 @@ pub struct ChatPanelSettingsContent {
pub default_width: Option<f32>,
}
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)]
+#[settings_key(key = "collaboration_panel")]
+pub struct PanelSettingsContent {
+ /// Whether to show the panel button in the status bar.
+ ///
+ /// Default: true
+ pub button: Option<bool>,
+ /// Where to dock the panel.
+ ///
+ /// Default: left
+ pub dock: Option<DockPosition>,
+ /// Default width of the panel in pixels.
+ ///
+ /// Default: 240
+ pub default_width: Option<f32>,
+}
+
#[derive(Deserialize, Debug)]
pub struct NotificationPanelSettings {
pub button: bool,
@@ -50,23 +68,25 @@ pub struct NotificationPanelSettings {
pub default_width: Pixels,
}
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
-pub struct PanelSettingsContent {
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)]
+#[settings_key(key = "notification_panel")]
+pub struct NotificationPanelSettingsContent {
/// Whether to show the panel button in the status bar.
///
/// Default: true
pub button: Option<bool>,
/// Where to dock the panel.
///
- /// Default: left
+ /// Default: right
pub dock: Option<DockPosition>,
/// Default width of the panel in pixels.
///
- /// Default: 240
+ /// Default: 300
pub default_width: Option<f32>,
}
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)]
+#[settings_key(key = "message_editor")]
pub struct MessageEditorSettings {
/// Whether to automatically replace emoji shortcodes with emoji characters.
/// For example: typing `:wave:` gets replaced with `👋`.
@@ -76,8 +96,6 @@ pub struct MessageEditorSettings {
}
impl Settings for CollaborationPanelSettings {
- const KEY: Option<&'static str> = Some("collaboration_panel");
-
type FileContent = PanelSettingsContent;
fn load(
@@ -91,8 +109,6 @@ impl Settings for CollaborationPanelSettings {
}
impl Settings for ChatPanelSettings {
- const KEY: Option<&'static str> = Some("chat_panel");
-
type FileContent = ChatPanelSettingsContent;
fn load(
@@ -106,9 +122,7 @@ impl Settings for ChatPanelSettings {
}
impl Settings for NotificationPanelSettings {
- const KEY: Option<&'static str> = Some("notification_panel");
-
- type FileContent = PanelSettingsContent;
+ type FileContent = NotificationPanelSettingsContent;
fn load(
sources: SettingsSources<Self::FileContent>,
@@ -121,8 +135,6 @@ impl Settings for NotificationPanelSettings {
}
impl Settings for MessageEditorSettings {
- const KEY: Option<&'static str> = Some("message_editor");
-
type FileContent = MessageEditorSettings;
fn load(
@@ -206,7 +206,7 @@ impl CommandPaletteDelegate {
if parse_zed_link(&query, cx).is_some() {
intercept_results = vec![CommandInterceptResult {
action: OpenZedUrl { url: query.clone() }.boxed_clone(),
- string: query.clone(),
+ string: query,
positions: vec![],
}]
}
@@ -1,7 +1,10 @@
use anyhow::Result;
use db::{
- define_connection, query,
- sqlez::{bindable::Column, statement::Statement},
+ query,
+ sqlez::{
+ bindable::Column, domain::Domain, statement::Statement,
+ thread_safe_connection::ThreadSafeConnection,
+ },
sqlez_macros::sql,
};
use serde::{Deserialize, Serialize};
@@ -50,8 +53,11 @@ impl Column for SerializedCommandInvocation {
}
}
-define_connection!(pub static ref COMMAND_PALETTE_HISTORY: CommandPaletteDB<()> =
- &[sql!(
+pub struct CommandPaletteDB(ThreadSafeConnection);
+
+impl Domain for CommandPaletteDB {
+ const NAME: &str = stringify!(CommandPaletteDB);
+ const MIGRATIONS: &[&str] = &[sql!(
CREATE TABLE IF NOT EXISTS command_invocations(
id INTEGER PRIMARY KEY AUTOINCREMENT,
command_name TEXT NOT NULL,
@@ -59,7 +65,9 @@ define_connection!(pub static ref COMMAND_PALETTE_HISTORY: CommandPaletteDB<()>
last_invoked INTEGER DEFAULT (unixepoch()) NOT NULL
) STRICT;
)];
-);
+}
+
+db::static_connection!(COMMAND_PALETTE_HISTORY, CommandPaletteDB, []);
impl CommandPaletteDB {
pub async fn write_command_invocation(
@@ -76,7 +76,7 @@ impl CommandPaletteFilter {
}
/// Hides all actions with the given types.
- pub fn hide_action_types(&mut self, action_types: &[TypeId]) {
+ pub fn hide_action_types<'a>(&mut self, action_types: impl IntoIterator<Item = &'a TypeId>) {
for action_type in action_types {
self.hidden_action_types.insert(*action_type);
self.shown_action_types.remove(action_type);
@@ -84,7 +84,7 @@ impl CommandPaletteFilter {
}
/// Shows all actions with the given types.
- pub fn show_action_types<'a>(&mut self, action_types: impl Iterator<Item = &'a TypeId>) {
+ pub fn show_action_types<'a>(&mut self, action_types: impl IntoIterator<Item = &'a TypeId>) {
for action_type in action_types {
self.shown_action_types.insert(*action_type);
self.hidden_action_types.remove(action_type);
@@ -20,5 +20,8 @@ strum.workspace = true
theme.workspace = true
workspace-hack.workspace = true
+[dev-dependencies]
+documented.workspace = true
+
[features]
default = []
@@ -227,6 +227,8 @@ pub trait Component {
/// Example:
///
/// ```
+ /// use documented::Documented;
+ ///
/// /// This is a doc comment.
/// #[derive(Documented)]
/// struct MyComponent;
@@ -42,7 +42,7 @@ impl RenderOnce for ComponentExample {
div()
.text_size(rems(0.875))
.text_color(cx.theme().colors().text_muted)
- .child(description.clone()),
+ .child(description),
)
}),
)
@@ -25,7 +25,7 @@ use crate::{
};
const JSON_RPC_VERSION: &str = "2.0";
-const REQUEST_TIMEOUT: Duration = Duration::from_secs(60);
+const DEFAULT_REQUEST_TIMEOUT: Duration = Duration::from_secs(60);
// Standard JSON-RPC error codes
pub const PARSE_ERROR: i32 = -32700;
@@ -60,6 +60,7 @@ pub(crate) struct Client {
executor: BackgroundExecutor,
#[allow(dead_code)]
transport: Arc<dyn Transport>,
+ request_timeout: Option<Duration>,
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
@@ -67,11 +68,7 @@ pub(crate) struct Client {
pub(crate) struct ContextServerId(pub Arc<str>);
fn is_null_value<T: Serialize>(value: &T) -> bool {
- if let Ok(Value::Null) = serde_json::to_value(value) {
- true
- } else {
- false
- }
+ matches!(serde_json::to_value(value), Ok(Value::Null))
}
#[derive(Serialize, Deserialize)]
@@ -147,6 +144,7 @@ pub struct ModelContextServerBinary {
pub executable: PathBuf,
pub args: Vec<String>,
pub env: Option<HashMap<String, String>>,
+ pub timeout: Option<u64>,
}
impl Client {
@@ -161,7 +159,7 @@ impl Client {
working_directory: &Option<PathBuf>,
cx: AsyncApp,
) -> Result<Self> {
- log::info!(
+ log::debug!(
"starting context server (executable={:?}, args={:?})",
binary.executable,
&binary.args
@@ -173,8 +171,9 @@ impl Client {
.map(|name| name.to_string_lossy().to_string())
.unwrap_or_else(String::new);
+ let timeout = binary.timeout.map(Duration::from_millis);
let transport = Arc::new(StdioTransport::new(binary, working_directory, &cx)?);
- Self::new(server_id, server_name.into(), transport, cx)
+ Self::new(server_id, server_name.into(), transport, timeout, cx)
}
/// Creates a new Client instance for a context server.
@@ -182,6 +181,7 @@ impl Client {
server_id: ContextServerId,
server_name: Arc<str>,
transport: Arc<dyn Transport>,
+ request_timeout: Option<Duration>,
cx: AsyncApp,
) -> Result<Self> {
let (outbound_tx, outbound_rx) = channel::unbounded::<String>();
@@ -241,6 +241,7 @@ impl Client {
io_tasks: Mutex::new(Some((input_task, output_task))),
output_done_rx: Mutex::new(Some(output_done_rx)),
transport,
+ request_timeout,
})
}
@@ -295,7 +296,7 @@ impl Client {
/// Continuously reads and logs any error messages from the server.
async fn handle_err(transport: Arc<dyn Transport>) -> anyhow::Result<()> {
while let Some(err) = transport.receive_err().next().await {
- log::warn!("context server stderr: {}", err.trim());
+ log::debug!("context server stderr: {}", err.trim());
}
Ok(())
@@ -331,8 +332,13 @@ impl Client {
method: &str,
params: impl Serialize,
) -> Result<T> {
- self.request_with(method, params, None, Some(REQUEST_TIMEOUT))
- .await
+ self.request_with(
+ method,
+ params,
+ None,
+ self.request_timeout.or(Some(DEFAULT_REQUEST_TIMEOUT)),
+ )
+ .await
}
pub async fn request_with<T: DeserializeOwned>(
@@ -34,6 +34,8 @@ pub struct ContextServerCommand {
pub path: PathBuf,
pub args: Vec<String>,
pub env: Option<HashMap<String, String>>,
+ /// Timeout for tool calls in milliseconds. Defaults to 60000 (60 seconds) if not specified.
+ pub timeout: Option<u64>,
}
impl std::fmt::Debug for ContextServerCommand {
@@ -123,6 +125,7 @@ impl ContextServer {
executable: Path::new(&command.path).to_path_buf(),
args: command.args.clone(),
env: command.env.clone(),
+ timeout: command.timeout,
},
working_directory,
cx.clone(),
@@ -131,13 +134,14 @@ impl ContextServer {
client::ContextServerId(self.id.0.clone()),
self.id().0,
transport.clone(),
+ None,
cx.clone(),
)?,
})
}
async fn initialize(&self, client: Client) -> Result<()> {
- log::info!("starting context server {}", self.id);
+ log::debug!("starting context server {}", self.id);
let protocol = crate::protocol::ModelContextProtocol::new(client);
let client_info = types::Implementation {
name: "Zed".to_string(),
@@ -14,6 +14,7 @@ use serde::de::DeserializeOwned;
use serde_json::{json, value::RawValue};
use smol::stream::StreamExt;
use std::{
+ any::TypeId,
cell::RefCell,
path::{Path, PathBuf},
rc::Rc,
@@ -77,7 +78,7 @@ impl McpServer {
socket_path,
_server_task: server_task,
tools,
- handlers: handlers,
+ handlers,
})
})
}
@@ -87,23 +88,30 @@ impl McpServer {
settings.inline_subschemas = true;
let mut generator = settings.into_generator();
- let output_schema = generator.root_schema_for::<T::Output>();
- let unit_schema = generator.root_schema_for::<T::Output>();
+ let input_schema = generator.root_schema_for::<T::Input>();
+
+ let description = input_schema
+ .get("description")
+ .and_then(|desc| desc.as_str())
+ .map(|desc| desc.to_string());
+ debug_assert!(
+ description.is_some(),
+ "Input schema struct must include a doc comment for the tool description"
+ );
let registered_tool = RegisteredTool {
tool: Tool {
name: T::NAME.into(),
- description: Some(tool.description().into()),
- input_schema: generator.root_schema_for::<T::Input>().into(),
- output_schema: if output_schema == unit_schema {
+ description,
+ input_schema: input_schema.into(),
+ output_schema: if TypeId::of::<T::Output>() == TypeId::of::<()>() {
None
} else {
- Some(output_schema.into())
+ Some(generator.root_schema_for::<T::Output>().into())
},
annotations: Some(tool.annotations()),
},
handler: Box::new({
- let tool = tool.clone();
move |input_value, cx| {
let input = match input_value {
Some(input) => serde_json::from_value(input),
@@ -399,8 +407,6 @@ pub trait McpServerTool {
const NAME: &'static str;
- fn description(&self) -> &'static str;
-
fn annotations(&self) -> ToolAnnotations {
ToolAnnotations {
title: None,
@@ -418,6 +424,7 @@ pub trait McpServerTool {
) -> impl Future<Output = Result<ToolResponse<Self::Output>>>;
}
+#[derive(Debug)]
pub struct ToolResponse<T> {
pub content: Vec<ToolResponseContent>,
pub structured_content: T,
@@ -1,6 +1,6 @@
use anyhow::Context as _;
use collections::HashMap;
-use futures::{Stream, StreamExt as _, lock::Mutex};
+use futures::{FutureExt, Stream, StreamExt as _, future::BoxFuture, lock::Mutex};
use gpui::BackgroundExecutor;
use std::{pin::Pin, sync::Arc};
@@ -14,9 +14,12 @@ pub fn create_fake_transport(
executor: BackgroundExecutor,
) -> FakeTransport {
let name = name.into();
- FakeTransport::new(executor).on_request::<crate::types::requests::Initialize>(move |_params| {
- create_initialize_response(name.clone())
- })
+ FakeTransport::new(executor).on_request::<crate::types::requests::Initialize, _>(
+ move |_params| {
+ let name = name.clone();
+ async move { create_initialize_response(name.clone()) }
+ },
+ )
}
fn create_initialize_response(server_name: String) -> InitializeResponse {
@@ -32,8 +35,10 @@ fn create_initialize_response(server_name: String) -> InitializeResponse {
}
pub struct FakeTransport {
- request_handlers:
- HashMap<&'static str, Arc<dyn Fn(serde_json::Value) -> serde_json::Value + Send + Sync>>,
+ request_handlers: HashMap<
+ &'static str,
+ Arc<dyn Send + Sync + Fn(serde_json::Value) -> BoxFuture<'static, serde_json::Value>>,
+ >,
tx: futures::channel::mpsc::UnboundedSender<String>,
rx: Arc<Mutex<futures::channel::mpsc::UnboundedReceiver<String>>>,
executor: BackgroundExecutor,
@@ -50,18 +55,25 @@ impl FakeTransport {
}
}
- pub fn on_request<T: crate::types::Request>(
+ pub fn on_request<T, Fut>(
mut self,
- handler: impl Fn(T::Params) -> T::Response + Send + Sync + 'static,
- ) -> Self {
+ handler: impl 'static + Send + Sync + Fn(T::Params) -> Fut,
+ ) -> Self
+ where
+ T: crate::types::Request,
+ Fut: 'static + Send + Future<Output = T::Response>,
+ {
self.request_handlers.insert(
T::METHOD,
Arc::new(move |value| {
- let params = value.get("params").expect("Missing parameters").clone();
+ let params = value
+ .get("params")
+ .cloned()
+ .unwrap_or(serde_json::Value::Null);
let params: T::Params =
serde_json::from_value(params).expect("Invalid parameters received");
let response = handler(params);
- serde_json::to_value(response).unwrap()
+ async move { serde_json::to_value(response.await).unwrap() }.boxed()
}),
);
self
@@ -77,7 +89,7 @@ impl Transport for FakeTransport {
if let Some(method) = msg.get("method") {
let method = method.as_str().expect("Invalid method received");
if let Some(handler) = self.request_handlers.get(method) {
- let payload = handler(msg);
+ let payload = handler(msg).await;
let response = serde_json::json!({
"jsonrpc": "2.0",
"id": id,
@@ -711,6 +711,16 @@ pub enum ToolResponseContent {
Resource { resource: ResourceContents },
}
+impl ToolResponseContent {
+ pub fn text(&self) -> Option<&str> {
+ if let ToolResponseContent::Text { text } = self {
+ Some(text)
+ } else {
+ None
+ }
+ }
+}
+
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ListToolsResponse {
@@ -81,10 +81,7 @@ pub fn init(
};
copilot_chat::init(fs.clone(), http.clone(), configuration, cx);
- let copilot = cx.new({
- let node_runtime = node_runtime.clone();
- move |cx| Copilot::start(new_server_id, fs, node_runtime, cx)
- });
+ let copilot = cx.new(move |cx| Copilot::start(new_server_id, fs, node_runtime, cx));
Copilot::set_global(copilot.clone(), cx);
cx.observe(&copilot, |copilot, cx| {
copilot.update(cx, |copilot, cx| copilot.update_action_visibilities(cx));
@@ -129,7 +126,7 @@ impl CopilotServer {
fn as_authenticated(&mut self) -> Result<&mut RunningCopilotServer> {
let server = self.as_running()?;
anyhow::ensure!(
- matches!(server.sign_in_status, SignInStatus::Authorized { .. }),
+ matches!(server.sign_in_status, SignInStatus::Authorized),
"must sign in before using copilot"
);
Ok(server)
@@ -200,7 +197,7 @@ impl Status {
}
struct RegisteredBuffer {
- uri: lsp::Url,
+ uri: lsp::Uri,
language_id: String,
snapshot: BufferSnapshot,
snapshot_version: i32,
@@ -581,12 +578,12 @@ impl Copilot {
pub(crate) fn sign_in(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
if let CopilotServer::Running(server) = &mut self.server {
let task = match &server.sign_in_status {
- SignInStatus::Authorized { .. } => Task::ready(Ok(())).shared(),
+ SignInStatus::Authorized => Task::ready(Ok(())).shared(),
SignInStatus::SigningIn { task, .. } => {
cx.notify();
task.clone()
}
- SignInStatus::SignedOut { .. } | SignInStatus::Unauthorized { .. } => {
+ SignInStatus::SignedOut { .. } | SignInStatus::Unauthorized => {
let lsp = server.lsp.clone();
let task = cx
.spawn(async move |this, cx| {
@@ -730,7 +727,7 @@ impl Copilot {
..
}) = &mut self.server
{
- if !matches!(status, SignInStatus::Authorized { .. }) {
+ if !matches!(status, SignInStatus::Authorized) {
return;
}
@@ -1012,8 +1009,8 @@ impl Copilot {
CopilotServer::Error(error) => Status::Error(error.clone()),
CopilotServer::Running(RunningCopilotServer { sign_in_status, .. }) => {
match sign_in_status {
- SignInStatus::Authorized { .. } => Status::Authorized,
- SignInStatus::Unauthorized { .. } => Status::Unauthorized,
+ SignInStatus::Authorized => Status::Authorized,
+ SignInStatus::Unauthorized => Status::Unauthorized,
SignInStatus::SigningIn { prompt, .. } => Status::SigningIn {
prompt: prompt.clone(),
},
@@ -1098,7 +1095,7 @@ impl Copilot {
_ => {
filter.hide_action_types(&signed_in_actions);
filter.hide_action_types(&auth_actions);
- filter.show_action_types(no_auth_actions.iter());
+ filter.show_action_types(&no_auth_actions);
}
}
}
@@ -1111,9 +1108,9 @@ fn id_for_language(language: Option<&Arc<Language>>) -> String {
.unwrap_or_else(|| "plaintext".to_string())
}
-fn uri_for_buffer(buffer: &Entity<Buffer>, cx: &App) -> Result<lsp::Url, ()> {
+fn uri_for_buffer(buffer: &Entity<Buffer>, cx: &App) -> Result<lsp::Uri, ()> {
if let Some(file) = buffer.read(cx).file().and_then(|file| file.as_local()) {
- lsp::Url::from_file_path(file.abs_path(cx))
+ lsp::Uri::from_file_path(file.abs_path(cx))
} else {
format!("buffer://{}", buffer.entity_id())
.parse()
@@ -1204,7 +1201,7 @@ mod tests {
let (copilot, mut lsp) = Copilot::fake(cx);
let buffer_1 = cx.new(|cx| Buffer::local("Hello", cx));
- let buffer_1_uri: lsp::Url = format!("buffer://{}", buffer_1.entity_id().as_u64())
+ let buffer_1_uri: lsp::Uri = format!("buffer://{}", buffer_1.entity_id().as_u64())
.parse()
.unwrap();
copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_1, cx));
@@ -1222,7 +1219,7 @@ mod tests {
);
let buffer_2 = cx.new(|cx| Buffer::local("Goodbye", cx));
- let buffer_2_uri: lsp::Url = format!("buffer://{}", buffer_2.entity_id().as_u64())
+ let buffer_2_uri: lsp::Uri = format!("buffer://{}", buffer_2.entity_id().as_u64())
.parse()
.unwrap();
copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_2, cx));
@@ -1273,7 +1270,7 @@ mod tests {
text_document: lsp::TextDocumentIdentifier::new(buffer_1_uri),
}
);
- let buffer_1_uri = lsp::Url::from_file_path(path!("/root/child/buffer-1")).unwrap();
+ let buffer_1_uri = lsp::Uri::from_file_path(path!("/root/child/buffer-1")).unwrap();
assert_eq!(
lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
.await,
@@ -62,12 +62,6 @@ impl CopilotChatConfiguration {
}
}
-// Copilot's base model; defined by Microsoft in premium requests table
-// This will be moved to the front of the Copilot model list, and will be used for
-// 'fast' requests (e.g. title generation)
-// https://docs.github.com/en/copilot/managing-copilot/monitoring-usage-and-entitlements/about-premium-requests
-const DEFAULT_MODEL_ID: &str = "gpt-4.1";
-
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum Role {
@@ -101,22 +95,39 @@ where
Ok(models)
}
-#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
+#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)]
pub struct Model {
+ billing: ModelBilling,
capabilities: ModelCapabilities,
id: String,
name: String,
policy: Option<ModelPolicy>,
vendor: ModelVendor,
+ is_chat_default: bool,
+ // The model with this value true is selected by VSCode copilot if a premium request limit is
+ // reached. Zed does not currently implement this behaviour
+ is_chat_fallback: bool,
model_picker_enabled: bool,
}
+#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)]
+struct ModelBilling {
+ is_premium: bool,
+ multiplier: f64,
+ // List of plans a model is restricted to
+ // Field is not present if a model is available for all plans
+ #[serde(default)]
+ restricted_to: Option<Vec<String>>,
+}
+
#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
struct ModelCapabilities {
family: String,
#[serde(default)]
limits: ModelLimits,
supports: ModelSupportedFeatures,
+ #[serde(rename = "type")]
+ model_type: String,
}
#[derive(Default, Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
@@ -153,6 +164,8 @@ pub enum ModelVendor {
OpenAI,
Google,
Anthropic,
+ #[serde(rename = "xAI")]
+ XAI,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
@@ -602,6 +615,7 @@ async fn get_models(
.into_iter()
.filter(|model| {
model.model_picker_enabled
+ && model.capabilities.model_type.as_str() == "chat"
&& model
.policy
.as_ref()
@@ -610,9 +624,7 @@ async fn get_models(
.dedup_by(|a, b| a.capabilities.family == b.capabilities.family)
.collect();
- if let Some(default_model_position) =
- models.iter().position(|model| model.id == DEFAULT_MODEL_ID)
- {
+ if let Some(default_model_position) = models.iter().position(|model| model.is_chat_default) {
let default_model = models.remove(default_model_position);
models.insert(0, default_model);
}
@@ -630,7 +642,9 @@ async fn request_models(
.uri(models_url.as_ref())
.header("Authorization", format!("Bearer {}", api_token))
.header("Content-Type", "application/json")
- .header("Copilot-Integration-Id", "vscode-chat");
+ .header("Copilot-Integration-Id", "vscode-chat")
+ .header("Editor-Version", "vscode/1.103.2")
+ .header("x-github-api-version", "2025-05-01");
let request = request_builder.body(AsyncBody::empty())?;
@@ -801,6 +815,10 @@ mod tests {
let json = r#"{
"data": [
{
+ "billing": {
+ "is_premium": false,
+ "multiplier": 0
+ },
"capabilities": {
"family": "gpt-4",
"limits": {
@@ -814,6 +832,8 @@ mod tests {
"type": "chat"
},
"id": "gpt-4",
+ "is_chat_default": false,
+ "is_chat_fallback": false,
"model_picker_enabled": false,
"name": "GPT 4",
"object": "model",
@@ -825,6 +845,16 @@ mod tests {
"some-unknown-field": 123
},
{
+ "billing": {
+ "is_premium": true,
+ "multiplier": 1,
+ "restricted_to": [
+ "pro",
+ "pro_plus",
+ "business",
+ "enterprise"
+ ]
+ },
"capabilities": {
"family": "claude-3.7-sonnet",
"limits": {
@@ -848,6 +878,8 @@ mod tests {
"type": "chat"
},
"id": "claude-3.7-sonnet",
+ "is_chat_default": false,
+ "is_chat_fallback": false,
"model_picker_enabled": true,
"name": "Claude 3.7 Sonnet",
"object": "model",
@@ -301,6 +301,7 @@ mod tests {
init_test(cx, |settings| {
settings.defaults.completions = Some(CompletionSettings {
words: WordsCompletionMode::Disabled,
+ words_min_length: 0,
lsp: true,
lsp_fetch_timeout_ms: 0,
lsp_insert_mode: LspInsertMode::Insert,
@@ -533,6 +534,7 @@ mod tests {
init_test(cx, |settings| {
settings.defaults.completions = Some(CompletionSettings {
words: WordsCompletionMode::Disabled,
+ words_min_length: 0,
lsp: true,
lsp_fetch_timeout_ms: 0,
lsp_insert_mode: LspInsertMode::Insert,
@@ -1083,7 +1085,7 @@ mod tests {
let replace_range_marker: TextRangeMarker = ('<', '>').into();
let (_, mut marked_ranges) = marked_text_ranges_by(
marked_string,
- vec![complete_from_marker.clone(), replace_range_marker.clone()],
+ vec![complete_from_marker, replace_range_marker.clone()],
);
let replace_range =
@@ -102,7 +102,7 @@ pub struct GetCompletionsDocument {
pub tab_size: u32,
pub indent_size: u32,
pub insert_spaces: bool,
- pub uri: lsp::Url,
+ pub uri: lsp::Uri,
pub relative_path: String,
pub position: lsp::Position,
pub version: usize,
@@ -6,6 +6,7 @@ edition.workspace = true
license = "GPL-3.0-or-later"
[dependencies]
+bincode.workspace = true
crash-handler.workspace = true
log.workspace = true
minidumper.workspace = true
@@ -14,8 +15,12 @@ release_channel.workspace = true
smol.workspace = true
serde.workspace = true
serde_json.workspace = true
+system_specs.workspace = true
workspace-hack.workspace = true
+[target.'cfg(target_os = "macos")'.dependencies]
+mach2.workspace = true
+
[lints]
workspace = true
@@ -4,6 +4,8 @@ use minidumper::{Client, LoopAction, MinidumpBinary};
use release_channel::{RELEASE_CHANNEL, ReleaseChannel};
use serde::{Deserialize, Serialize};
+#[cfg(target_os = "macos")]
+use std::sync::atomic::AtomicU32;
use std::{
env,
fs::{self, File},
@@ -26,6 +28,9 @@ pub static REQUESTED_MINIDUMP: AtomicBool = AtomicBool::new(false);
const CRASH_HANDLER_PING_TIMEOUT: Duration = Duration::from_secs(60);
const CRASH_HANDLER_CONNECT_TIMEOUT: Duration = Duration::from_secs(10);
+#[cfg(target_os = "macos")]
+static PANIC_THREAD_ID: AtomicU32 = AtomicU32::new(0);
+
pub async fn init(crash_init: InitCrashHandler) {
if *RELEASE_CHANNEL == ReleaseChannel::Dev && env::var("ZED_GENERATE_MINIDUMPS").is_err() {
return;
@@ -74,6 +79,9 @@ pub async fn init(crash_init: InitCrashHandler) {
.compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed)
.is_ok()
{
+ #[cfg(target_os = "macos")]
+ suspend_all_other_threads();
+
client.ping().unwrap();
client.request_dump(crash_context).is_ok()
} else {
@@ -98,9 +106,28 @@ pub async fn init(crash_init: InitCrashHandler) {
}
}
+#[cfg(target_os = "macos")]
+unsafe fn suspend_all_other_threads() {
+ let task = unsafe { mach2::traps::current_task() };
+ let mut threads: mach2::mach_types::thread_act_array_t = std::ptr::null_mut();
+ let mut count = 0;
+ unsafe {
+ mach2::task::task_threads(task, &raw mut threads, &raw mut count);
+ }
+ let current = unsafe { mach2::mach_init::mach_thread_self() };
+ let panic_thread = PANIC_THREAD_ID.load(Ordering::SeqCst);
+ for i in 0..count {
+ let t = unsafe { *threads.add(i as usize) };
+ if t != current && t != panic_thread {
+ unsafe { mach2::thread_act::thread_suspend(t) };
+ }
+ }
+}
+
pub struct CrashServer {
initialization_params: OnceLock<InitCrashHandler>,
panic_info: OnceLock<CrashPanic>,
+ active_gpu: OnceLock<system_specs::GpuSpecs>,
has_connection: Arc<AtomicBool>,
}
@@ -108,6 +135,9 @@ pub struct CrashServer {
pub struct CrashInfo {
pub init: InitCrashHandler,
pub panic: Option<CrashPanic>,
+ pub minidump_error: Option<String>,
+ pub gpus: Vec<system_specs::GpuInfo>,
+ pub active_gpu: Option<system_specs::GpuSpecs>,
}
#[derive(Debug, Deserialize, Serialize, Clone)]
@@ -116,7 +146,6 @@ pub struct InitCrashHandler {
pub zed_version: String,
pub release_channel: String,
pub commit_sha: String,
- // pub gpu: String,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
@@ -142,16 +171,26 @@ impl minidumper::ServerHandler for CrashServer {
}
fn on_minidump_created(&self, result: Result<MinidumpBinary, minidumper::Error>) -> LoopAction {
- match result {
+ let minidump_error = match result {
Ok(mut md_bin) => {
use io::Write;
let _ = md_bin.file.flush();
- info!("wrote minidump to disk {:?}", md_bin.path);
+ None
}
- Err(e) => {
- info!("failed to write minidump: {:#}", e);
+ Err(e) => Some(format!("{e:?}")),
+ };
+
+ #[cfg(not(any(target_os = "linux", target_os = "freebsd")))]
+ let gpus = vec![];
+
+ #[cfg(any(target_os = "linux", target_os = "freebsd"))]
+ let gpus = match system_specs::read_gpu_info_from_sys_class_drm() {
+ Ok(gpus) => gpus,
+ Err(err) => {
+ log::warn!("Failed to collect GPU information for crash report: {err}");
+ vec![]
}
- }
+ };
let crash_info = CrashInfo {
init: self
@@ -160,6 +199,9 @@ impl minidumper::ServerHandler for CrashServer {
.expect("not initialized")
.clone(),
panic: self.panic_info.get().cloned(),
+ minidump_error,
+ active_gpu: self.active_gpu.get().cloned(),
+ gpus,
};
let crash_data_path = paths::logs_dir()
@@ -185,6 +227,13 @@ impl minidumper::ServerHandler for CrashServer {
serde_json::from_slice::<CrashPanic>(&buffer).expect("invalid panic data");
self.panic_info.set(panic_data).expect("already panicked");
}
+ 3 => {
+ let gpu_specs: system_specs::GpuSpecs =
+ bincode::deserialize(&buffer).expect("gpu specs");
+ self.active_gpu
+ .set(gpu_specs)
+ .expect("already set active gpu");
+ }
_ => {
panic!("invalid message kind");
}
@@ -218,6 +267,13 @@ pub fn handle_panic(message: String, span: Option<&Location>) {
)
.ok();
log::error!("triggering a crash to generate a minidump...");
+
+ #[cfg(target_os = "macos")]
+ PANIC_THREAD_ID.store(
+ unsafe { mach2::mach_init::mach_thread_self() },
+ Ordering::SeqCst,
+ );
+
#[cfg(target_os = "linux")]
CrashHandler.simulate_signal(crash_handler::Signal::Trap as u32);
#[cfg(not(target_os = "linux"))]
@@ -254,6 +310,7 @@ pub fn crash_server(socket: &Path) {
initialization_params: OnceLock::new(),
panic_info: OnceLock::new(),
has_connection,
+ active_gpu: OnceLock::new(),
}),
&shutdown,
Some(CRASH_HANDLER_PING_TIMEOUT),
@@ -23,7 +23,7 @@ impl SessionId {
Self(client_id as u32)
}
- pub fn to_proto(&self) -> u64 {
+ pub fn to_proto(self) -> u64 {
self.0 as u64
}
}
@@ -2,9 +2,9 @@ use dap_types::SteppingGranularity;
use gpui::{App, Global};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
-#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
+#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq, SettingsUi)]
#[serde(rename_all = "snake_case")]
pub enum DebugPanelDockPosition {
Left,
@@ -12,12 +12,17 @@ pub enum DebugPanelDockPosition {
Right,
}
-#[derive(Serialize, Deserialize, JsonSchema, Clone, Copy)]
+#[derive(Serialize, Deserialize, JsonSchema, Clone, Copy, SettingsUi, SettingsKey)]
#[serde(default)]
+// todo(settings_ui) @ben: I'm pretty sure not having the fields be optional here is a bug,
+// it means the defaults will override previously set values if a single key is missing
+#[settings_ui(group = "Debugger")]
+#[settings_key(key = "debugger")]
pub struct DebuggerSettings {
/// Determines the stepping granularity.
///
/// Default: line
+ #[settings_ui(skip)]
pub stepping_granularity: SteppingGranularity,
/// Whether the breakpoints should be reused across Zed sessions.
///
@@ -60,8 +65,6 @@ impl Default for DebuggerSettings {
}
impl Settings for DebuggerSettings {
- const KEY: Option<&'static str> = Some("debugger");
-
type FileContent = Self;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> anyhow::Result<Self> {
@@ -234,11 +234,12 @@ impl PythonDebugAdapter {
.await
.map_err(|e| format!("{e:#?}"))?
.success();
+
if !did_succeed {
return Err("Failed to create base virtual environment".into());
}
- const DIR: &'static str = if cfg!(target_os = "windows") {
+ const DIR: &str = if cfg!(target_os = "windows") {
"Scripts"
} else {
"bin"
@@ -27,6 +27,7 @@ sqlez.workspace = true
sqlez_macros.workspace = true
util.workspace = true
workspace-hack.workspace = true
+zed_env_vars.workspace = true
[dev-dependencies]
gpui = { workspace = true, features = ["test-support"] }
@@ -17,9 +17,10 @@ use sqlez::thread_safe_connection::ThreadSafeConnection;
use sqlez_macros::sql;
use std::future::Future;
use std::path::Path;
+use std::sync::atomic::AtomicBool;
use std::sync::{LazyLock, atomic::Ordering};
-use std::{env, sync::atomic::AtomicBool};
use util::{ResultExt, maybe};
+use zed_env_vars::ZED_STATELESS;
const CONNECTION_INITIALIZE_QUERY: &str = sql!(
PRAGMA foreign_keys=TRUE;
@@ -36,9 +37,6 @@ const FALLBACK_DB_NAME: &str = "FALLBACK_MEMORY_DB";
const DB_FILE_NAME: &str = "db.sqlite";
-pub static ZED_STATELESS: LazyLock<bool> =
- LazyLock::new(|| env::var("ZED_STATELESS").is_ok_and(|v| !v.is_empty()));
-
pub static ALL_FILE_DB_FAILED: LazyLock<AtomicBool> = LazyLock::new(|| AtomicBool::new(false));
/// Open or create a database at the given directory path.
@@ -74,7 +72,7 @@ pub async fn open_db<M: Migrator + 'static>(db_dir: &Path, scope: &str) -> Threa
}
async fn open_main_db<M: Migrator>(db_path: &Path) -> Option<ThreadSafeConnection> {
- log::info!("Opening database {}", db_path.display());
+ log::trace!("Opening database {}", db_path.display());
ThreadSafeConnection::builder::<M>(db_path.to_string_lossy().as_ref(), true)
.with_db_initialization_query(DB_INITIALIZE_QUERY)
.with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY)
@@ -110,11 +108,14 @@ pub async fn open_test_db<M: Migrator>(db_name: &str) -> ThreadSafeConnection {
}
/// Implements a basic DB wrapper for a given domain
+///
+/// Arguments:
+/// - static variable name for connection
+/// - type of connection wrapper
+/// - dependencies, whose migrations should be run prior to this domain's migrations
#[macro_export]
-macro_rules! define_connection {
- (pub static ref $id:ident: $t:ident<()> = $migrations:expr; $($global:ident)?) => {
- pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection);
-
+macro_rules! static_connection {
+ ($id:ident, $t:ident, [ $($d:ty),* ] $(, $global:ident)?) => {
impl ::std::ops::Deref for $t {
type Target = $crate::sqlez::thread_safe_connection::ThreadSafeConnection;
@@ -123,16 +124,6 @@ macro_rules! define_connection {
}
}
- impl $crate::sqlez::domain::Domain for $t {
- fn name() -> &'static str {
- stringify!($t)
- }
-
- fn migrations() -> &'static [&'static str] {
- $migrations
- }
- }
-
impl $t {
#[cfg(any(test, feature = "test-support"))]
pub async fn open_test_db(name: &'static str) -> Self {
@@ -142,7 +133,8 @@ macro_rules! define_connection {
#[cfg(any(test, feature = "test-support"))]
pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| {
- $t($crate::smol::block_on($crate::open_test_db::<$t>(stringify!($id))))
+ #[allow(unused_parens)]
+ $t($crate::smol::block_on($crate::open_test_db::<($($d,)* $t)>(stringify!($id))))
});
#[cfg(not(any(test, feature = "test-support")))]
@@ -153,46 +145,10 @@ macro_rules! define_connection {
} else {
$crate::RELEASE_CHANNEL.dev_name()
};
- $t($crate::smol::block_on($crate::open_db::<$t>(db_dir, scope)))
+ #[allow(unused_parens)]
+ $t($crate::smol::block_on($crate::open_db::<($($d,)* $t)>(db_dir, scope)))
});
- };
- (pub static ref $id:ident: $t:ident<$($d:ty),+> = $migrations:expr; $($global:ident)?) => {
- pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection);
-
- impl ::std::ops::Deref for $t {
- type Target = $crate::sqlez::thread_safe_connection::ThreadSafeConnection;
-
- fn deref(&self) -> &Self::Target {
- &self.0
- }
- }
-
- impl $crate::sqlez::domain::Domain for $t {
- fn name() -> &'static str {
- stringify!($t)
- }
-
- fn migrations() -> &'static [&'static str] {
- $migrations
- }
- }
-
- #[cfg(any(test, feature = "test-support"))]
- pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| {
- $t($crate::smol::block_on($crate::open_test_db::<($($d),+, $t)>(stringify!($id))))
- });
-
- #[cfg(not(any(test, feature = "test-support")))]
- pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| {
- let db_dir = $crate::database_dir();
- let scope = if false $(|| stringify!($global) == "global")? {
- "global"
- } else {
- $crate::RELEASE_CHANNEL.dev_name()
- };
- $t($crate::smol::block_on($crate::open_db::<($($d),+, $t)>(db_dir, scope)))
- });
- };
+ }
}
pub fn write_and_log<F>(cx: &App, db_write: impl FnOnce() -> F + Send + 'static)
@@ -219,17 +175,12 @@ mod tests {
enum BadDB {}
impl Domain for BadDB {
- fn name() -> &'static str {
- "db_tests"
- }
-
- fn migrations() -> &'static [&'static str] {
- &[
- sql!(CREATE TABLE test(value);),
- // failure because test already exists
- sql!(CREATE TABLE test(value);),
- ]
- }
+ const NAME: &str = "db_tests";
+ const MIGRATIONS: &[&str] = &[
+ sql!(CREATE TABLE test(value);),
+ // failure because test already exists
+ sql!(CREATE TABLE test(value);),
+ ];
}
let tempdir = tempfile::Builder::new()
@@ -251,25 +202,15 @@ mod tests {
enum CorruptedDB {}
impl Domain for CorruptedDB {
- fn name() -> &'static str {
- "db_tests"
- }
-
- fn migrations() -> &'static [&'static str] {
- &[sql!(CREATE TABLE test(value);)]
- }
+ const NAME: &str = "db_tests";
+ const MIGRATIONS: &[&str] = &[sql!(CREATE TABLE test(value);)];
}
enum GoodDB {}
impl Domain for GoodDB {
- fn name() -> &'static str {
- "db_tests" //Notice same name
- }
-
- fn migrations() -> &'static [&'static str] {
- &[sql!(CREATE TABLE test2(value);)] //But different migration
- }
+ const NAME: &str = "db_tests"; //Notice same name
+ const MIGRATIONS: &[&str] = &[sql!(CREATE TABLE test2(value);)];
}
let tempdir = tempfile::Builder::new()
@@ -305,25 +246,16 @@ mod tests {
enum CorruptedDB {}
impl Domain for CorruptedDB {
- fn name() -> &'static str {
- "db_tests"
- }
+ const NAME: &str = "db_tests";
- fn migrations() -> &'static [&'static str] {
- &[sql!(CREATE TABLE test(value);)]
- }
+ const MIGRATIONS: &[&str] = &[sql!(CREATE TABLE test(value);)];
}
enum GoodDB {}
impl Domain for GoodDB {
- fn name() -> &'static str {
- "db_tests" //Notice same name
- }
-
- fn migrations() -> &'static [&'static str] {
- &[sql!(CREATE TABLE test2(value);)] //But different migration
- }
+ const NAME: &str = "db_tests"; //Notice same name
+ const MIGRATIONS: &[&str] = &[sql!(CREATE TABLE test2(value);)]; // But different migration
}
let tempdir = tempfile::Builder::new()
@@ -2,16 +2,26 @@ use gpui::App;
use sqlez_macros::sql;
use util::ResultExt as _;
-use crate::{define_connection, query, write_and_log};
+use crate::{
+ query,
+ sqlez::{domain::Domain, thread_safe_connection::ThreadSafeConnection},
+ write_and_log,
+};
-define_connection!(pub static ref KEY_VALUE_STORE: KeyValueStore<()> =
- &[sql!(
+pub struct KeyValueStore(crate::sqlez::thread_safe_connection::ThreadSafeConnection);
+
+impl Domain for KeyValueStore {
+ const NAME: &str = stringify!(KeyValueStore);
+
+ const MIGRATIONS: &[&str] = &[sql!(
CREATE TABLE IF NOT EXISTS kv_store(
key TEXT PRIMARY KEY,
value TEXT NOT NULL
) STRICT;
)];
-);
+}
+
+crate::static_connection!(KEY_VALUE_STORE, KeyValueStore, []);
pub trait Dismissable {
const KEY: &'static str;
@@ -91,15 +101,19 @@ mod tests {
}
}
-define_connection!(pub static ref GLOBAL_KEY_VALUE_STORE: GlobalKeyValueStore<()> =
- &[sql!(
+pub struct GlobalKeyValueStore(ThreadSafeConnection);
+
+impl Domain for GlobalKeyValueStore {
+ const NAME: &str = stringify!(GlobalKeyValueStore);
+ const MIGRATIONS: &[&str] = &[sql!(
CREATE TABLE IF NOT EXISTS kv_store(
key TEXT PRIMARY KEY,
value TEXT NOT NULL
) STRICT;
)];
- global
-);
+}
+
+crate::static_connection!(GLOBAL_KEY_VALUE_STORE, GlobalKeyValueStore, [], global);
impl GlobalKeyValueStore {
query! {
@@ -664,7 +664,7 @@ impl ToolbarItemView for DapLogToolbarItemView {
if let Some(item) = active_pane_item
&& let Some(log_view) = item.downcast::<DapLogView>()
{
- self.log_view = Some(log_view.clone());
+ self.log_view = Some(log_view);
return workspace::ToolbarItemLocation::PrimaryLeft;
}
self.log_view = None;
@@ -1,8 +1,10 @@
use dap::{DapRegistry, DebugRequest};
use fuzzy::{StringMatch, StringMatchCandidate};
-use gpui::{AppContext, DismissEvent, Entity, EventEmitter, Focusable, Render};
+use gpui::{AppContext, DismissEvent, Entity, EventEmitter, Focusable, Render, Task};
use gpui::{Subscription, WeakEntity};
use picker::{Picker, PickerDelegate};
+use project::Project;
+use rpc::proto;
use task::ZedDebugConfig;
use util::debug_panic;
@@ -56,29 +58,28 @@ impl AttachModal {
pub fn new(
definition: ZedDebugConfig,
workspace: WeakEntity<Workspace>,
+ project: Entity<Project>,
modal: bool,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
- let mut processes: Box<[_]> = System::new_all()
- .processes()
- .values()
- .map(|process| {
- let name = process.name().to_string_lossy().into_owned();
- Candidate {
- name: name.into(),
- pid: process.pid().as_u32(),
- command: process
- .cmd()
- .iter()
- .map(|s| s.to_string_lossy().to_string())
- .collect::<Vec<_>>(),
- }
- })
- .collect();
- processes.sort_by_key(|k| k.name.clone());
- let processes = processes.into_iter().collect();
- Self::with_processes(workspace, definition, processes, modal, window, cx)
+ let processes_task = get_processes_for_project(&project, cx);
+
+ let modal = Self::with_processes(workspace, definition, Arc::new([]), modal, window, cx);
+
+ cx.spawn_in(window, async move |this, cx| {
+ let processes = processes_task.await;
+ this.update_in(cx, |modal, window, cx| {
+ modal.picker.update(cx, |picker, cx| {
+ picker.delegate.candidates = processes;
+ picker.refresh(window, cx);
+ });
+ })?;
+ anyhow::Ok(())
+ })
+ .detach_and_log_err(cx);
+
+ modal
}
pub(super) fn with_processes(
@@ -332,6 +333,57 @@ impl PickerDelegate for AttachModalDelegate {
}
}
+fn get_processes_for_project(project: &Entity<Project>, cx: &mut App) -> Task<Arc<[Candidate]>> {
+ let project = project.read(cx);
+
+ if let Some(remote_client) = project.remote_client() {
+ let proto_client = remote_client.read(cx).proto_client();
+ cx.spawn(async move |_cx| {
+ let response = proto_client
+ .request(proto::GetProcesses {
+ project_id: proto::REMOTE_SERVER_PROJECT_ID,
+ })
+ .await
+ .unwrap_or_else(|_| proto::GetProcessesResponse {
+ processes: Vec::new(),
+ });
+
+ let mut processes: Vec<Candidate> = response
+ .processes
+ .into_iter()
+ .map(|p| Candidate {
+ pid: p.pid,
+ name: p.name.into(),
+ command: p.command,
+ })
+ .collect();
+
+ processes.sort_by_key(|k| k.name.clone());
+ Arc::from(processes.into_boxed_slice())
+ })
+ } else {
+ let mut processes: Box<[_]> = System::new_all()
+ .processes()
+ .values()
+ .map(|process| {
+ let name = process.name().to_string_lossy().into_owned();
+ Candidate {
+ name: name.into(),
+ pid: process.pid().as_u32(),
+ command: process
+ .cmd()
+ .iter()
+ .map(|s| s.to_string_lossy().to_string())
+ .collect::<Vec<_>>(),
+ }
+ })
+ .collect();
+ processes.sort_by_key(|k| k.name.clone());
+ let processes = processes.into_iter().collect();
+ Task::ready(processes)
+ }
+}
+
#[cfg(any(test, feature = "test-support"))]
pub(crate) fn _process_names(modal: &AttachModal, cx: &mut Context<AttachModal>) -> Vec<String> {
modal.picker.read_with(cx, |picker, _| {
@@ -13,11 +13,8 @@ use anyhow::{Context as _, Result, anyhow};
use collections::IndexMap;
use dap::adapters::DebugAdapterName;
use dap::debugger_settings::DebugPanelDockPosition;
-use dap::{
- ContinuedEvent, LoadedSourceEvent, ModuleEvent, OutputEvent, StoppedEvent, ThreadEvent,
- client::SessionId, debugger_settings::DebuggerSettings,
-};
use dap::{DapRegistry, StartDebuggingRequestArguments};
+use dap::{client::SessionId, debugger_settings::DebuggerSettings};
use editor::Editor;
use gpui::{
Action, App, AsyncWindowContext, ClipboardItem, Context, DismissEvent, Entity, EntityId,
@@ -46,23 +43,6 @@ use workspace::{
};
use zed_actions::ToggleFocus;
-pub enum DebugPanelEvent {
- Exited(SessionId),
- Terminated(SessionId),
- Stopped {
- client_id: SessionId,
- event: StoppedEvent,
- go_to_stack_frame: bool,
- },
- Thread((SessionId, ThreadEvent)),
- Continued((SessionId, ContinuedEvent)),
- Output((SessionId, OutputEvent)),
- Module((SessionId, ModuleEvent)),
- LoadedSource((SessionId, LoadedSourceEvent)),
- ClientShutdown(SessionId),
- CapabilitiesChanged(SessionId),
-}
-
pub struct DebugPanel {
size: Pixels,
active_session: Option<Entity<DebugSession>>,
@@ -257,7 +237,7 @@ impl DebugPanel {
.as_ref()
.map(|entity| entity.downgrade()),
task_context: task_context.clone(),
- worktree_id: worktree_id,
+ worktree_id,
});
};
running.resolve_scenario(
@@ -386,10 +366,10 @@ impl DebugPanel {
return;
};
- let dap_store_handle = self.project.read(cx).dap_store().clone();
+ let dap_store_handle = self.project.read(cx).dap_store();
let label = curr_session.read(cx).label();
let quirks = curr_session.read(cx).quirks();
- let adapter = curr_session.read(cx).adapter().clone();
+ let adapter = curr_session.read(cx).adapter();
let binary = curr_session.read(cx).binary().cloned().unwrap();
let task_context = curr_session.read(cx).task_context().clone();
@@ -447,9 +427,9 @@ impl DebugPanel {
return;
};
- let dap_store_handle = self.project.read(cx).dap_store().clone();
+ let dap_store_handle = self.project.read(cx).dap_store();
let label = self.label_for_child_session(&parent_session, request, cx);
- let adapter = parent_session.read(cx).adapter().clone();
+ let adapter = parent_session.read(cx).adapter();
let quirks = parent_session.read(cx).quirks();
let Some(mut binary) = parent_session.read(cx).binary().cloned() else {
log::error!("Attempted to start a child-session without a binary");
@@ -932,7 +912,6 @@ impl DebugPanel {
.cloned(),
|this, running_state| {
this.children({
- let running_state = running_state.clone();
let threads =
running_state.update(cx, |running_state, cx| {
let session = running_state.session();
@@ -1408,7 +1387,6 @@ async fn register_session_inner(
}
impl EventEmitter<PanelEvent> for DebugPanel {}
-impl EventEmitter<DebugPanelEvent> for DebugPanel {}
impl Focusable for DebugPanel {
fn focus_handle(&self, _: &App) -> FocusHandle {
@@ -1645,7 +1623,6 @@ impl Render for DebugPanel {
}
})
.on_action({
- let this = this.clone();
move |_: &ToggleSessionPicker, window, cx| {
this.update(cx, |this, cx| {
this.toggle_session_picker(window, cx);
@@ -85,6 +85,10 @@ actions!(
Rerun,
/// Toggles expansion of the selected item in the debugger UI.
ToggleExpandItem,
+ /// Toggle the user frame filter in the stack frame list
+ /// When toggled on, only frames from the user's code are shown
+ /// When toggled off, all frames are shown
+ ToggleUserFrames,
]
);
@@ -279,6 +283,18 @@ pub fn init(cx: &mut App) {
.ok();
}
})
+ .on_action(move |_: &ToggleUserFrames, _, cx| {
+ if let Some((thread_status, stack_frame_list)) = active_item
+ .read_with(cx, |item, cx| {
+ (item.thread_status(cx), item.stack_frame_list().clone())
+ })
+ .ok()
+ {
+ stack_frame_list.update(cx, |stack_frame_list, cx| {
+ stack_frame_list.toggle_frame_filter(thread_status, cx);
+ })
+ }
+ })
});
})
.detach();
@@ -293,9 +309,8 @@ pub fn init(cx: &mut App) {
let Some(debug_panel) = workspace.read(cx).panel::<DebugPanel>(cx) else {
return;
};
- let Some(active_session) = debug_panel
- .clone()
- .update(cx, |panel, _| panel.active_session())
+ let Some(active_session) =
+ debug_panel.update(cx, |panel, _| panel.active_session())
else {
return;
};
@@ -1,9 +1,9 @@
-use std::{rc::Rc, time::Duration};
+use std::rc::Rc;
use collections::HashMap;
-use gpui::{Animation, AnimationExt as _, Entity, Transformation, WeakEntity, percentage};
+use gpui::{Entity, WeakEntity};
use project::debugger::session::{ThreadId, ThreadStatus};
-use ui::{ContextMenu, DropdownMenu, DropdownStyle, Indicator, prelude::*};
+use ui::{CommonAnimationExt, ContextMenu, DropdownMenu, DropdownStyle, Indicator, prelude::*};
use util::{maybe, truncate_and_trailoff};
use crate::{
@@ -113,23 +113,6 @@ impl DebugPanel {
}
};
session_entries.push(root_entry);
-
- session_entries.extend(
- sessions_with_children
- .by_ref()
- .take_while(|(session, _)| {
- session
- .read(cx)
- .session(cx)
- .read(cx)
- .parent_id(cx)
- .is_some()
- })
- .map(|(session, _)| SessionListEntry {
- leaf: session.clone(),
- ancestors: vec![],
- }),
- );
}
let weak = cx.weak_entity();
@@ -152,11 +135,7 @@ impl DebugPanel {
Icon::new(IconName::ArrowCircle)
.size(IconSize::Small)
.color(Color::Muted)
- .with_animation(
- "arrow-circle",
- Animation::new(Duration::from_secs(2)).repeat(),
- |icon, delta| icon.transform(Transformation::rotate(percentage(delta))),
- )
+ .with_rotate_animation(2)
.into_any_element()
} else {
match running_state.thread_status(cx).unwrap_or_default() {
@@ -272,10 +251,9 @@ impl DebugPanel {
.child(session_entry.label_element(self_depth, cx))
.child(
IconButton::new("close-debug-session", IconName::Close)
- .visible_on_hover(id.clone())
+ .visible_on_hover(id)
.icon_size(IconSize::Small)
.on_click({
- let weak = weak.clone();
move |_, window, cx| {
weak.update(cx, |panel, cx| {
panel.close_session(session_entity_id, window, cx);
@@ -20,7 +20,7 @@ use gpui::{
};
use itertools::Itertools as _;
use picker::{Picker, PickerDelegate, highlighted_match_with_paths::HighlightedMatch};
-use project::{DebugScenarioContext, TaskContexts, TaskSourceKind, task_store::TaskStore};
+use project::{DebugScenarioContext, Project, TaskContexts, TaskSourceKind, task_store::TaskStore};
use settings::Settings;
use task::{DebugScenario, RevealTarget, ZedDebugConfig};
use theme::ThemeSettings;
@@ -88,8 +88,10 @@ impl NewProcessModal {
})?;
workspace.update_in(cx, |workspace, window, cx| {
let workspace_handle = workspace.weak_handle();
+ let project = workspace.project().clone();
workspace.toggle_modal(window, cx, |window, cx| {
- let attach_mode = AttachMode::new(None, workspace_handle.clone(), window, cx);
+ let attach_mode =
+ AttachMode::new(None, workspace_handle.clone(), project, window, cx);
let debug_picker = cx.new(|cx| {
let delegate =
@@ -785,7 +787,7 @@ impl RenderOnce for AttachMode {
v_flex()
.w_full()
.track_focus(&self.attach_picker.focus_handle(cx))
- .child(self.attach_picker.clone())
+ .child(self.attach_picker)
}
}
@@ -940,6 +942,7 @@ impl AttachMode {
pub(super) fn new(
debugger: Option<DebugAdapterName>,
workspace: WeakEntity<Workspace>,
+ project: Entity<Project>,
window: &mut Window,
cx: &mut Context<NewProcessModal>,
) -> Entity<Self> {
@@ -950,7 +953,7 @@ impl AttachMode {
stop_on_entry: Some(false),
};
let attach_picker = cx.new(|cx| {
- let modal = AttachModal::new(definition.clone(), workspace, false, window, cx);
+ let modal = AttachModal::new(definition.clone(), workspace, project, false, window, cx);
window.focus(&modal.focus_handle(cx));
modal
@@ -1383,14 +1386,28 @@ impl PickerDelegate for DebugDelegate {
.border_color(cx.theme().colors().border_variant)
.children({
let action = menu::SecondaryConfirm.boxed_clone();
- KeyBinding::for_action(&*action, window, cx).map(|keybind| {
- Button::new("edit-debug-task", "Edit in debug.json")
- .label_size(LabelSize::Small)
- .key_binding(keybind)
- .on_click(move |_, window, cx| {
- window.dispatch_action(action.boxed_clone(), cx)
- })
- })
+ if self.matches.is_empty() {
+ Some(
+ Button::new("edit-debug-json", "Edit debug.json")
+ .label_size(LabelSize::Small)
+ .on_click(cx.listener(|_picker, _, window, cx| {
+ window.dispatch_action(
+ zed_actions::OpenProjectDebugTasks.boxed_clone(),
+ cx,
+ );
+ cx.emit(DismissEvent);
+ })),
+ )
+ } else {
+ KeyBinding::for_action(&*action, window, cx).map(|keybind| {
+ Button::new("edit-debug-task", "Edit in debug.json")
+ .label_size(LabelSize::Small)
+ .key_binding(keybind)
+ .on_click(move |_, window, cx| {
+ window.dispatch_action(action.boxed_clone(), cx)
+ })
+ })
+ }
})
.map(|this| {
if (current_modifiers.alt || self.matches.is_empty()) && !self.prompt.is_empty() {
@@ -256,7 +256,7 @@ pub(crate) fn deserialize_pane_layout(
Some(Member::Axis(PaneAxis::load(
if should_invert { axis.invert() } else { axis },
members,
- flexes.clone(),
+ flexes,
)))
}
SerializedPaneLayout::Pane(serialized_pane) => {
@@ -270,12 +270,9 @@ pub(crate) fn deserialize_pane_layout(
.children
.iter()
.map(|child| match child {
- DebuggerPaneItem::Frames => Box::new(SubView::new(
- stack_frame_list.focus_handle(cx),
- stack_frame_list.clone().into(),
- DebuggerPaneItem::Frames,
- cx,
- )),
+ DebuggerPaneItem::Frames => {
+ Box::new(SubView::stack_frame_list(stack_frame_list.clone(), cx))
+ }
DebuggerPaneItem::Variables => Box::new(SubView::new(
variable_list.focus_handle(cx),
variable_list.clone().into(),
@@ -2,9 +2,7 @@ pub mod running;
use crate::{StackTraceView, persistence::SerializedLayout, session::running::DebugTerminal};
use dap::client::SessionId;
-use gpui::{
- App, Axis, Entity, EventEmitter, FocusHandle, Focusable, Subscription, Task, WeakEntity,
-};
+use gpui::{App, Axis, Entity, EventEmitter, FocusHandle, Focusable, Task, WeakEntity};
use project::debugger::session::Session;
use project::worktree_store::WorktreeStore;
use project::{Project, debugger::session::SessionQuirks};
@@ -24,13 +22,6 @@ pub struct DebugSession {
stack_trace_view: OnceCell<Entity<StackTraceView>>,
_worktree_store: WeakEntity<WorktreeStore>,
workspace: WeakEntity<Workspace>,
- _subscriptions: [Subscription; 1],
-}
-
-#[derive(Debug)]
-pub enum DebugPanelItemEvent {
- Close,
- Stopped { go_to_stack_frame: bool },
}
impl DebugSession {
@@ -59,9 +50,6 @@ impl DebugSession {
let quirks = session.read(cx).quirks();
cx.new(|cx| Self {
- _subscriptions: [cx.subscribe(&running_state, |_, _, _, cx| {
- cx.notify();
- })],
remote_id: None,
running_state,
quirks,
@@ -87,7 +75,7 @@ impl DebugSession {
self.stack_trace_view.get_or_init(|| {
let stackframe_list = running_state.read(cx).stack_frame_list().clone();
- let stack_frame_view = cx.new(|cx| {
+ cx.new(|cx| {
StackTraceView::new(
workspace.clone(),
project.clone(),
@@ -95,9 +83,7 @@ impl DebugSession {
window,
cx,
)
- });
-
- stack_frame_view
+ })
})
}
@@ -135,7 +121,7 @@ impl DebugSession {
}
}
-impl EventEmitter<DebugPanelItemEvent> for DebugSession {}
+impl EventEmitter<()> for DebugSession {}
impl Focusable for DebugSession {
fn focus_handle(&self, cx: &App) -> FocusHandle {
@@ -144,7 +130,7 @@ impl Focusable for DebugSession {
}
impl Item for DebugSession {
- type Event = DebugPanelItemEvent;
+ type Event = ();
fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString {
"Debugger".into()
}
@@ -14,7 +14,6 @@ use crate::{
session::running::memory_view::MemoryView,
};
-use super::DebugPanelItemEvent;
use anyhow::{Context as _, Result, anyhow};
use breakpoint_list::BreakpointList;
use collections::{HashMap, IndexMap};
@@ -36,7 +35,6 @@ use module_list::ModuleList;
use project::{
DebugScenarioContext, Project, WorktreeId,
debugger::session::{self, Session, SessionEvent, SessionStateEvent, ThreadId, ThreadStatus},
- terminals::TerminalKind,
};
use rpc::proto::ViewId;
use serde_json::Value;
@@ -158,6 +156,29 @@ impl SubView {
})
}
+ pub(crate) fn stack_frame_list(
+ stack_frame_list: Entity<StackFrameList>,
+ cx: &mut App,
+ ) -> Entity<Self> {
+ let weak_list = stack_frame_list.downgrade();
+ let this = Self::new(
+ stack_frame_list.focus_handle(cx),
+ stack_frame_list.into(),
+ DebuggerPaneItem::Frames,
+ cx,
+ );
+
+ this.update(cx, |this, _| {
+ this.with_actions(Box::new(move |_, cx| {
+ weak_list
+ .update(cx, |this, _| this.render_control_strip())
+ .unwrap_or_else(|_| div().into_any_element())
+ }));
+ });
+
+ this
+ }
+
pub(crate) fn console(console: Entity<Console>, cx: &mut App) -> Entity<Self> {
let weak_console = console.downgrade();
let this = Self::new(
@@ -180,7 +201,7 @@ impl SubView {
let weak_list = list.downgrade();
let focus_handle = list.focus_handle(cx);
let this = Self::new(
- focus_handle.clone(),
+ focus_handle,
list.into(),
DebuggerPaneItem::BreakpointList,
cx,
@@ -358,7 +379,7 @@ pub(crate) fn new_debugger_pane(
}
};
- let ret = cx.new(move |cx| {
+ cx.new(move |cx| {
let mut pane = Pane::new(
workspace.clone(),
project.clone(),
@@ -562,9 +583,7 @@ pub(crate) fn new_debugger_pane(
}
});
pane
- });
-
- ret
+ })
}
pub struct DebugTerminal {
@@ -918,7 +937,11 @@ impl RunningState {
let task_store = project.read(cx).task_store().downgrade();
let weak_project = project.downgrade();
let weak_workspace = workspace.downgrade();
- let is_local = project.read(cx).is_local();
+ let remote_shell = project
+ .read(cx)
+ .remote_client()
+ .as_ref()
+ .and_then(|remote| remote.read(cx).shell());
cx.spawn_in(window, async move |this, cx| {
let DebugScenario {
@@ -1002,7 +1025,7 @@ impl RunningState {
None
};
- let builder = ShellBuilder::new(is_local, &task.resolved.shell);
+ let builder = ShellBuilder::new(remote_shell.as_deref(), &task.resolved.shell);
let command_label = builder.command_label(&task.resolved.command_label);
let (command, args) =
builder.build(task.resolved.command.clone(), &task.resolved.args);
@@ -1015,12 +1038,11 @@ impl RunningState {
};
let terminal = project
.update(cx, |project, cx| {
- project.create_terminal(
- TerminalKind::Task(task_with_shell.clone()),
+ project.create_terminal_task(
+ task_with_shell.clone(),
cx,
)
- })?
- .await?;
+ })?.await?;
let terminal_view = cx.new_window_entity(|window, cx| {
TerminalView::new(
@@ -1115,9 +1137,8 @@ impl RunningState {
};
let session = self.session.read(cx);
- let cwd = Some(&request.cwd)
- .filter(|cwd| cwd.len() > 0)
- .map(PathBuf::from)
+ let cwd = (!request.cwd.is_empty())
+ .then(|| PathBuf::from(&request.cwd))
.or_else(|| session.binary().unwrap().cwd.clone());
let mut envs: HashMap<String, String> =
@@ -1152,7 +1173,7 @@ impl RunningState {
} else {
None
}
- } else if args.len() > 0 {
+ } else if !args.is_empty() {
Some(args.remove(0))
} else {
None
@@ -1165,13 +1186,13 @@ impl RunningState {
.filter(|title| !title.is_empty())
.or_else(|| command.clone())
.unwrap_or_else(|| "Debug terminal".to_string());
- let kind = TerminalKind::Task(task::SpawnInTerminal {
+ let kind = task::SpawnInTerminal {
id: task::TaskId("debug".to_string()),
full_label: title.clone(),
label: title.clone(),
- command: command.clone(),
+ command,
args,
- command_label: title.clone(),
+ command_label: title,
cwd,
env: envs,
use_new_terminal: true,
@@ -1183,12 +1204,13 @@ impl RunningState {
show_summary: false,
show_command: false,
show_rerun: false,
- });
+ };
let workspace = self.workspace.clone();
let weak_project = project.downgrade();
- let terminal_task = project.update(cx, |project, cx| project.create_terminal(kind, cx));
+ let terminal_task =
+ project.update(cx, |project, cx| project.create_terminal_task(kind, cx));
let terminal_task = cx.spawn_in(window, async move |_, cx| {
let terminal = terminal_task.await?;
@@ -1758,7 +1780,7 @@ impl RunningState {
this.activate_item(0, false, false, window, cx);
});
- let rightmost_pane = new_debugger_pane(workspace.clone(), project.clone(), window, cx);
+ let rightmost_pane = new_debugger_pane(workspace.clone(), project, window, cx);
rightmost_pane.update(cx, |this, cx| {
this.add_item(
Box::new(SubView::new(
@@ -1803,8 +1825,6 @@ impl RunningState {
}
}
-impl EventEmitter<DebugPanelItemEvent> for RunningState {}
-
impl Focusable for RunningState {
fn focus_handle(&self, _: &App) -> FocusHandle {
self.focus_handle.clone()
@@ -244,7 +244,7 @@ impl BreakpointList {
return;
}
let ix = match self.selected_ix {
- _ if self.breakpoints.len() == 0 => None,
+ _ if self.breakpoints.is_empty() => None,
None => Some(0),
Some(ix) => {
if ix == self.breakpoints.len() - 1 {
@@ -268,7 +268,7 @@ impl BreakpointList {
return;
}
let ix = match self.selected_ix {
- _ if self.breakpoints.len() == 0 => None,
+ _ if self.breakpoints.is_empty() => None,
None => Some(self.breakpoints.len() - 1),
Some(ix) => {
if ix == 0 {
@@ -286,7 +286,7 @@ impl BreakpointList {
cx.propagate();
return;
}
- let ix = if self.breakpoints.len() > 0 {
+ let ix = if !self.breakpoints.is_empty() {
Some(0)
} else {
None
@@ -299,7 +299,7 @@ impl BreakpointList {
cx.propagate();
return;
}
- let ix = if self.breakpoints.len() > 0 {
+ let ix = if !self.breakpoints.is_empty() {
Some(self.breakpoints.len() - 1)
} else {
None
@@ -329,8 +329,8 @@ impl BreakpointList {
let text = self.input.read(cx).text(cx);
match mode {
- ActiveBreakpointStripMode::Log => match &entry.kind {
- BreakpointEntryKind::LineBreakpoint(line_breakpoint) => {
+ ActiveBreakpointStripMode::Log => {
+ if let BreakpointEntryKind::LineBreakpoint(line_breakpoint) = &entry.kind {
Self::edit_line_breakpoint_inner(
&self.breakpoint_store,
line_breakpoint.breakpoint.path.clone(),
@@ -339,10 +339,9 @@ impl BreakpointList {
cx,
);
}
- _ => {}
- },
- ActiveBreakpointStripMode::Condition => match &entry.kind {
- BreakpointEntryKind::LineBreakpoint(line_breakpoint) => {
+ }
+ ActiveBreakpointStripMode::Condition => {
+ if let BreakpointEntryKind::LineBreakpoint(line_breakpoint) = &entry.kind {
Self::edit_line_breakpoint_inner(
&self.breakpoint_store,
line_breakpoint.breakpoint.path.clone(),
@@ -351,10 +350,9 @@ impl BreakpointList {
cx,
);
}
- _ => {}
- },
- ActiveBreakpointStripMode::HitCondition => match &entry.kind {
- BreakpointEntryKind::LineBreakpoint(line_breakpoint) => {
+ }
+ ActiveBreakpointStripMode::HitCondition => {
+ if let BreakpointEntryKind::LineBreakpoint(line_breakpoint) = &entry.kind {
Self::edit_line_breakpoint_inner(
&self.breakpoint_store,
line_breakpoint.breakpoint.path.clone(),
@@ -363,8 +361,7 @@ impl BreakpointList {
cx,
);
}
- _ => {}
- },
+ }
}
self.focus_handle.focus(window);
} else {
@@ -426,13 +423,10 @@ impl BreakpointList {
return;
};
- match &mut entry.kind {
- BreakpointEntryKind::LineBreakpoint(line_breakpoint) => {
- let path = line_breakpoint.breakpoint.path.clone();
- let row = line_breakpoint.breakpoint.row;
- self.edit_line_breakpoint(path, row, BreakpointEditAction::Toggle, cx);
- }
- _ => {}
+ if let BreakpointEntryKind::LineBreakpoint(line_breakpoint) = &mut entry.kind {
+ let path = line_breakpoint.breakpoint.path.clone();
+ let row = line_breakpoint.breakpoint.row;
+ self.edit_line_breakpoint(path, row, BreakpointEditAction::Toggle, cx);
}
cx.notify();
}
@@ -691,7 +685,6 @@ impl BreakpointList {
selection_kind.map(|kind| kind.0) != Some(SelectedBreakpointKind::Source),
)
.on_click({
- let focus_handle = focus_handle.clone();
move |_, window, cx| {
focus_handle.focus(window);
window.dispatch_action(UnsetBreakpoint.boxed_clone(), cx)
@@ -967,7 +960,7 @@ impl LineBreakpoint {
props,
breakpoint: BreakpointEntry {
kind: BreakpointEntryKind::LineBreakpoint(self.clone()),
- weak: weak,
+ weak,
},
is_selected,
focus_handle,
@@ -1145,7 +1138,6 @@ impl ExceptionBreakpoint {
}
})
.on_click({
- let list = list.clone();
move |_, _, cx| {
list.update(cx, |this, cx| {
this.toggle_exception_breakpoint(&id, cx);
@@ -1179,7 +1171,7 @@ impl ExceptionBreakpoint {
props,
breakpoint: BreakpointEntry {
kind: BreakpointEntryKind::ExceptionBreakpoint(self.clone()),
- weak: weak,
+ weak,
},
is_selected,
focus_handle,
@@ -15,7 +15,7 @@ use gpui::{
use language::{Anchor, Buffer, CodeLabel, TextBufferSnapshot, ToOffset};
use menu::{Confirm, SelectNext, SelectPrevious};
use project::{
- Completion, CompletionResponse,
+ Completion, CompletionDisplayOptions, CompletionResponse,
debugger::session::{CompletionsQuery, OutputToken, Session},
lsp_store::CompletionDocumentation,
search_history::{SearchHistory, SearchHistoryCursor},
@@ -365,7 +365,7 @@ impl Console {
Some(ContextMenu::build(window, cx, |context_menu, _, _| {
context_menu
.when_some(keybinding_target.clone(), |el, keybinding_target| {
- el.context(keybinding_target.clone())
+ el.context(keybinding_target)
})
.action("Watch Expression", WatchExpression.boxed_clone())
}))
@@ -685,6 +685,7 @@ impl ConsoleQueryBarCompletionProvider {
Ok(vec![project::CompletionResponse {
is_incomplete: completions.len() >= LIMIT,
+ display_options: CompletionDisplayOptions::default(),
completions,
}])
})
@@ -797,6 +798,7 @@ impl ConsoleQueryBarCompletionProvider {
Ok(vec![project::CompletionResponse {
completions,
+ display_options: CompletionDisplayOptions::default(),
is_incomplete: false,
}])
})
@@ -57,7 +57,7 @@ impl LoadedSourceList {
h_flex()
.text_ui_xs(cx)
.text_color(cx.theme().colors().text_muted)
- .when_some(source.path.clone(), |this, path| this.child(path)),
+ .when_some(source.path, |this, path| this.child(path)),
)
.into_any()
}
@@ -461,7 +461,7 @@ impl MemoryView {
let data_breakpoint_info = this.data_breakpoint_info(context.clone(), None, cx);
cx.spawn(async move |this, cx| {
if let Some(info) = data_breakpoint_info.await {
- let Some(data_id) = info.data_id.clone() else {
+ let Some(data_id) = info.data_id else {
return;
};
_ = this.update(cx, |this, cx| {
@@ -157,7 +157,7 @@ impl ModuleList {
h_flex()
.text_ui_xs(cx)
.text_color(cx.theme().colors().text_muted)
- .when_some(module.path.clone(), |this, path| this.child(path)),
+ .when_some(module.path, |this, path| this.child(path)),
)
.into_any()
}
@@ -223,7 +223,7 @@ impl ModuleList {
fn select_next(&mut self, _: &menu::SelectNext, _window: &mut Window, cx: &mut Context<Self>) {
let ix = match self.selected_ix {
- _ if self.entries.len() == 0 => None,
+ _ if self.entries.is_empty() => None,
None => Some(0),
Some(ix) => {
if ix == self.entries.len() - 1 {
@@ -243,7 +243,7 @@ impl ModuleList {
cx: &mut Context<Self>,
) {
let ix = match self.selected_ix {
- _ if self.entries.len() == 0 => None,
+ _ if self.entries.is_empty() => None,
None => Some(self.entries.len() - 1),
Some(ix) => {
if ix == 0 {
@@ -262,7 +262,7 @@ impl ModuleList {
_window: &mut Window,
cx: &mut Context<Self>,
) {
- let ix = if self.entries.len() > 0 {
+ let ix = if !self.entries.is_empty() {
Some(0)
} else {
None
@@ -271,7 +271,7 @@ impl ModuleList {
}
fn select_last(&mut self, _: &menu::SelectLast, _window: &mut Window, cx: &mut Context<Self>) {
- let ix = if self.entries.len() > 0 {
+ let ix = if !self.entries.is_empty() {
Some(self.entries.len() - 1)
} else {
None
@@ -4,16 +4,17 @@ use std::time::Duration;
use anyhow::{Context as _, Result, anyhow};
use dap::StackFrameId;
+use db::kvp::KEY_VALUE_STORE;
use gpui::{
- AnyElement, Entity, EventEmitter, FocusHandle, Focusable, FontWeight, ListState, MouseButton,
- Stateful, Subscription, Task, WeakEntity, list,
+ Action, AnyElement, Entity, EventEmitter, FocusHandle, Focusable, FontWeight, ListState,
+ MouseButton, Stateful, Subscription, Task, WeakEntity, list,
};
use util::debug_panic;
-use crate::StackTraceView;
+use crate::{StackTraceView, ToggleUserFrames};
use language::PointUtf16;
use project::debugger::breakpoint_store::ActiveStackFrame;
-use project::debugger::session::{Session, SessionEvent, StackFrame};
+use project::debugger::session::{Session, SessionEvent, StackFrame, ThreadStatus};
use project::{ProjectItem, ProjectPath};
use ui::{Scrollbar, ScrollbarState, Tooltip, prelude::*};
use workspace::{ItemHandle, Workspace};
@@ -26,6 +27,34 @@ pub enum StackFrameListEvent {
BuiltEntries,
}
+/// Represents the filter applied to the stack frame list
+#[derive(PartialEq, Eq, Copy, Clone, Debug)]
+pub(crate) enum StackFrameFilter {
+ /// Show all frames
+ All,
+ /// Show only frames from the user's code
+ OnlyUserFrames,
+}
+
+impl StackFrameFilter {
+ fn from_str_or_default(s: impl AsRef<str>) -> Self {
+ match s.as_ref() {
+ "user" => StackFrameFilter::OnlyUserFrames,
+ "all" => StackFrameFilter::All,
+ _ => StackFrameFilter::All,
+ }
+ }
+}
+
+impl From<StackFrameFilter> for String {
+ fn from(filter: StackFrameFilter) -> Self {
+ match filter {
+ StackFrameFilter::All => "all".to_string(),
+ StackFrameFilter::OnlyUserFrames => "user".to_string(),
+ }
+ }
+}
+
pub struct StackFrameList {
focus_handle: FocusHandle,
_subscription: Subscription,
@@ -37,6 +66,8 @@ pub struct StackFrameList {
opened_stack_frame_id: Option<StackFrameId>,
scrollbar_state: ScrollbarState,
list_state: ListState,
+ list_filter: StackFrameFilter,
+ filter_entries_indices: Vec<usize>,
error: Option<SharedString>,
_refresh_task: Task<()>,
}
@@ -73,6 +104,16 @@ impl StackFrameList {
let list_state = ListState::new(0, gpui::ListAlignment::Top, px(1000.));
let scrollbar_state = ScrollbarState::new(list_state.clone());
+ let list_filter = KEY_VALUE_STORE
+ .read_kvp(&format!(
+ "stack-frame-list-filter-{}",
+ session.read(cx).adapter().0
+ ))
+ .ok()
+ .flatten()
+ .map(StackFrameFilter::from_str_or_default)
+ .unwrap_or(StackFrameFilter::All);
+
let mut this = Self {
session,
workspace,
@@ -80,9 +121,11 @@ impl StackFrameList {
state,
_subscription,
entries: Default::default(),
+ filter_entries_indices: Vec::default(),
error: None,
selected_ix: None,
opened_stack_frame_id: None,
+ list_filter,
list_state,
scrollbar_state,
_refresh_task: Task::ready(()),
@@ -103,7 +146,15 @@ impl StackFrameList {
) -> Vec<dap::StackFrame> {
self.entries
.iter()
- .flat_map(|frame| match frame {
+ .enumerate()
+ .filter(|(ix, _)| {
+ self.list_filter == StackFrameFilter::All
+ || self
+ .filter_entries_indices
+ .binary_search_by_key(&ix, |ix| ix)
+ .is_ok()
+ })
+ .flat_map(|(_, frame)| match frame {
StackFrameEntry::Normal(frame) => vec![frame.clone()],
StackFrameEntry::Label(frame) if show_labels => vec![frame.clone()],
StackFrameEntry::Collapsed(frames) if show_collapsed => frames.clone(),
@@ -123,11 +174,29 @@ impl StackFrameList {
#[cfg(test)]
pub(crate) fn dap_stack_frames(&self, cx: &mut App) -> Vec<dap::StackFrame> {
- self.stack_frames(cx)
- .unwrap_or_default()
- .into_iter()
- .map(|stack_frame| stack_frame.dap.clone())
- .collect()
+ match self.list_filter {
+ StackFrameFilter::All => self
+ .stack_frames(cx)
+ .unwrap_or_default()
+ .into_iter()
+ .map(|stack_frame| stack_frame.dap)
+ .collect(),
+ StackFrameFilter::OnlyUserFrames => self
+ .filter_entries_indices
+ .iter()
+ .map(|ix| match &self.entries[*ix] {
+ StackFrameEntry::Label(label) => label,
+ StackFrameEntry::Collapsed(_) => panic!("Collapsed tabs should not be visible"),
+ StackFrameEntry::Normal(frame) => frame,
+ })
+ .cloned()
+ .collect(),
+ }
+ }
+
+ #[cfg(test)]
+ pub(crate) fn list_filter(&self) -> StackFrameFilter {
+ self.list_filter
}
pub fn opened_stack_frame_id(&self) -> Option<StackFrameId> {
@@ -187,12 +256,34 @@ impl StackFrameList {
self.entries.clear();
self.selected_ix = None;
self.list_state.reset(0);
+ self.filter_entries_indices.clear();
cx.emit(StackFrameListEvent::BuiltEntries);
cx.notify();
return;
}
};
- for stack_frame in &stack_frames {
+
+ let worktree_prefixes: Vec<_> = self
+ .workspace
+ .read_with(cx, |workspace, cx| {
+ workspace
+ .visible_worktrees(cx)
+ .map(|tree| tree.read(cx).abs_path())
+ .collect()
+ })
+ .unwrap_or_default();
+
+ let mut filter_entries_indices = Vec::default();
+ for stack_frame in stack_frames.iter() {
+ let frame_in_visible_worktree = stack_frame.dap.source.as_ref().is_some_and(|source| {
+ source.path.as_ref().is_some_and(|path| {
+ worktree_prefixes
+ .iter()
+ .filter_map(|tree| tree.to_str())
+ .any(|tree| path.starts_with(tree))
+ })
+ });
+
match stack_frame.dap.presentation_hint {
Some(dap::StackFramePresentationHint::Deemphasize)
| Some(dap::StackFramePresentationHint::Subtle) => {
@@ -218,15 +309,19 @@ impl StackFrameList {
first_stack_frame_with_path.get_or_insert(entries.len());
}
entries.push(StackFrameEntry::Normal(stack_frame.dap.clone()));
+ if frame_in_visible_worktree {
+ filter_entries_indices.push(entries.len() - 1);
+ }
}
}
}
let collapsed_entries = std::mem::take(&mut collapsed_entries);
if !collapsed_entries.is_empty() {
- entries.push(StackFrameEntry::Collapsed(collapsed_entries.clone()));
+ entries.push(StackFrameEntry::Collapsed(collapsed_entries));
}
self.entries = entries;
+ self.filter_entries_indices = filter_entries_indices;
if let Some(ix) = first_stack_frame_with_path
.or(first_stack_frame)
@@ -242,7 +337,14 @@ impl StackFrameList {
self.selected_ix = ix;
}
- self.list_state.reset(self.entries.len());
+ match self.list_filter {
+ StackFrameFilter::All => {
+ self.list_state.reset(self.entries.len());
+ }
+ StackFrameFilter::OnlyUserFrames => {
+ self.list_state.reset(self.filter_entries_indices.len());
+ }
+ }
cx.emit(StackFrameListEvent::BuiltEntries);
cx.notify();
}
@@ -418,7 +520,7 @@ impl StackFrameList {
let source = stack_frame.source.clone();
let is_selected_frame = Some(ix) == self.selected_ix;
- let path = source.clone().and_then(|s| s.path.or(s.name));
+ let path = source.and_then(|s| s.path.or(s.name));
let formatted_path = path.map(|path| format!("{}:{}", path, stack_frame.line,));
let formatted_path = formatted_path.map(|path| {
Label::new(path)
@@ -519,7 +621,16 @@ impl StackFrameList {
let entries = std::mem::take(stack_frames)
.into_iter()
.map(StackFrameEntry::Normal);
+ // HERE
+ let entries_len = entries.len();
self.entries.splice(ix..ix + 1, entries);
+ let (Ok(filtered_indices_start) | Err(filtered_indices_start)) =
+ self.filter_entries_indices.binary_search(&ix);
+
+ for idx in &mut self.filter_entries_indices[filtered_indices_start..] {
+ *idx += entries_len - 1;
+ }
+
self.selected_ix = Some(ix);
self.list_state.reset(self.entries.len());
cx.emit(StackFrameListEvent::BuiltEntries);
@@ -572,6 +683,11 @@ impl StackFrameList {
}
fn render_entry(&self, ix: usize, cx: &mut Context<Self>) -> AnyElement {
+ let ix = match self.list_filter {
+ StackFrameFilter::All => ix,
+ StackFrameFilter::OnlyUserFrames => self.filter_entries_indices[ix],
+ };
+
match &self.entries[ix] {
StackFrameEntry::Label(stack_frame) => self.render_label_entry(stack_frame, cx),
StackFrameEntry::Normal(stack_frame) => self.render_normal_entry(ix, stack_frame, cx),
@@ -621,7 +737,7 @@ impl StackFrameList {
fn select_next(&mut self, _: &menu::SelectNext, _window: &mut Window, cx: &mut Context<Self>) {
let ix = match self.selected_ix {
- _ if self.entries.len() == 0 => None,
+ _ if self.entries.is_empty() => None,
None => Some(0),
Some(ix) => {
if ix == self.entries.len() - 1 {
@@ -641,7 +757,7 @@ impl StackFrameList {
cx: &mut Context<Self>,
) {
let ix = match self.selected_ix {
- _ if self.entries.len() == 0 => None,
+ _ if self.entries.is_empty() => None,
None => Some(self.entries.len() - 1),
Some(ix) => {
if ix == 0 {
@@ -660,7 +776,7 @@ impl StackFrameList {
_window: &mut Window,
cx: &mut Context<Self>,
) {
- let ix = if self.entries.len() > 0 {
+ let ix = if !self.entries.is_empty() {
Some(0)
} else {
None
@@ -669,7 +785,7 @@ impl StackFrameList {
}
fn select_last(&mut self, _: &menu::SelectLast, _window: &mut Window, cx: &mut Context<Self>) {
- let ix = if self.entries.len() > 0 {
+ let ix = if !self.entries.is_empty() {
Some(self.entries.len() - 1)
} else {
None
@@ -702,6 +818,67 @@ impl StackFrameList {
self.activate_selected_entry(window, cx);
}
+ pub(crate) fn toggle_frame_filter(
+ &mut self,
+ thread_status: Option<ThreadStatus>,
+ cx: &mut Context<Self>,
+ ) {
+ self.list_filter = match self.list_filter {
+ StackFrameFilter::All => StackFrameFilter::OnlyUserFrames,
+ StackFrameFilter::OnlyUserFrames => StackFrameFilter::All,
+ };
+
+ if let Some(database_id) = self
+ .workspace
+ .read_with(cx, |workspace, _| workspace.database_id())
+ .ok()
+ .flatten()
+ {
+ let database_id: i64 = database_id.into();
+ let save_task = KEY_VALUE_STORE.write_kvp(
+ format!(
+ "stack-frame-list-filter-{}-{}",
+ self.session.read(cx).adapter().0,
+ database_id,
+ ),
+ self.list_filter.into(),
+ );
+ cx.background_spawn(save_task).detach();
+ }
+
+ if let Some(ThreadStatus::Stopped) = thread_status {
+ match self.list_filter {
+ StackFrameFilter::All => {
+ self.list_state.reset(self.entries.len());
+ }
+ StackFrameFilter::OnlyUserFrames => {
+ self.list_state.reset(self.filter_entries_indices.len());
+ if !self
+ .selected_ix
+ .map(|ix| self.filter_entries_indices.contains(&ix))
+ .unwrap_or_default()
+ {
+ self.selected_ix = None;
+ }
+ }
+ }
+
+ if let Some(ix) = self.selected_ix {
+ let scroll_to = match self.list_filter {
+ StackFrameFilter::All => ix,
+ StackFrameFilter::OnlyUserFrames => self
+ .filter_entries_indices
+ .binary_search_by_key(&ix, |ix| *ix)
+ .expect("This index will always exist"),
+ };
+ self.list_state.scroll_to_reveal_item(scroll_to);
+ }
+
+ cx.emit(StackFrameListEvent::BuiltEntries);
+ cx.notify();
+ }
+ }
+
fn render_list(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
div().p_1().size_full().child(
list(
@@ -711,6 +888,30 @@ impl StackFrameList {
.size_full(),
)
}
+
+ pub(crate) fn render_control_strip(&self) -> AnyElement {
+ let tooltip_title = match self.list_filter {
+ StackFrameFilter::All => "Show stack frames from your project",
+ StackFrameFilter::OnlyUserFrames => "Show all stack frames",
+ };
+
+ h_flex()
+ .child(
+ IconButton::new(
+ "filter-by-visible-worktree-stack-frame-list",
+ IconName::ListFilter,
+ )
+ .tooltip(move |window, cx| {
+ Tooltip::for_action(tooltip_title, &ToggleUserFrames, window, cx)
+ })
+ .toggle_state(self.list_filter == StackFrameFilter::OnlyUserFrames)
+ .icon_size(IconSize::Small)
+ .on_click(|_, window, cx| {
+ window.dispatch_action(ToggleUserFrames.boxed_clone(), cx)
+ }),
+ )
+ .into_any_element()
+ }
}
impl Render for StackFrameList {
@@ -291,7 +291,7 @@ impl VariableList {
}
self.session.update(cx, |session, cx| {
- session.variables(scope.variables_reference, cx).len() > 0
+ !session.variables(scope.variables_reference, cx).is_empty()
})
})
.map(|scope| {
@@ -313,7 +313,7 @@ impl VariableList {
watcher.variables_reference,
watcher.variables_reference,
EntryPath::for_watcher(watcher.expression.clone()),
- DapEntry::Watcher(watcher.clone()),
+ DapEntry::Watcher(watcher),
)
})
.collect::<Vec<_>>(),
@@ -947,7 +947,7 @@ impl VariableList {
#[track_caller]
#[cfg(test)]
pub(crate) fn assert_visual_entries(&self, expected: Vec<&str>) {
- const INDENT: &'static str = " ";
+ const INDENT: &str = " ";
let entries = &self.entries;
let mut visual_entries = Vec::with_capacity(entries.len());
@@ -997,7 +997,7 @@ impl VariableList {
DapEntry::Watcher { .. } => continue,
DapEntry::Variable(dap) => scopes[idx].1.push(dap.clone()),
DapEntry::Scope(scope) => {
- if scopes.len() > 0 {
+ if !scopes.is_empty() {
idx += 1;
}
@@ -1301,8 +1301,6 @@ impl VariableList {
IconName::Close,
)
.on_click({
- let weak = weak.clone();
- let path = path.clone();
move |_, window, cx| {
weak.update(cx, |variable_list, cx| {
variable_list.selection = Some(path.clone());
@@ -1470,7 +1468,6 @@ impl VariableList {
}))
})
.on_secondary_mouse_down(cx.listener({
- let path = path.clone();
let entry = variable.clone();
move |this, event: &MouseDownEvent, window, cx| {
this.selection = Some(path.clone());
@@ -1330,7 +1330,6 @@ async fn test_unsetting_breakpoints_on_clear_breakpoint_action(
let called_set_breakpoints = Arc::new(AtomicBool::new(false));
client.on_request::<SetBreakpoints, _>({
- let called_set_breakpoints = called_set_breakpoints.clone();
move |_, args| {
assert!(
args.breakpoints.is_none_or(|bps| bps.is_empty()),
@@ -1445,7 +1444,6 @@ async fn test_we_send_arguments_from_user_config(
let launch_handler_called = Arc::new(AtomicBool::new(false));
start_debug_session_with(&workspace, cx, debug_definition.clone(), {
- let debug_definition = debug_definition.clone();
let launch_handler_called = launch_handler_called.clone();
move |client| {
@@ -1783,9 +1781,8 @@ async fn test_debug_adapters_shutdown_on_app_quit(
let disconnect_request_received = Arc::new(AtomicBool::new(false));
let disconnect_clone = disconnect_request_received.clone();
- let disconnect_clone_for_handler = disconnect_clone.clone();
client.on_request::<Disconnect, _>(move |_, _| {
- disconnect_clone_for_handler.store(true, Ordering::SeqCst);
+ disconnect_clone.store(true, Ordering::SeqCst);
Ok(())
});
@@ -106,9 +106,7 @@ async fn test_debug_session_substitutes_variables_and_relativizes_paths(
);
let expected_other_field = if input_path.contains("$ZED_WORKTREE_ROOT") {
- input_path
- .replace("$ZED_WORKTREE_ROOT", path!("/test/worktree/path"))
- .to_owned()
+ input_path.replace("$ZED_WORKTREE_ROOT", path!("/test/worktree/path"))
} else {
input_path.to_string()
};
@@ -1,6 +1,6 @@
use crate::{
debugger_panel::DebugPanel,
- session::running::stack_frame_list::StackFrameEntry,
+ session::running::stack_frame_list::{StackFrameEntry, StackFrameFilter},
tests::{active_debug_session_panel, init_test, init_test_workspace, start_debug_session},
};
use dap::{
@@ -752,3 +752,346 @@ async fn test_collapsed_entries(executor: BackgroundExecutor, cx: &mut TestAppCo
});
});
}
+
+#[gpui::test]
+async fn test_stack_frame_filter(executor: BackgroundExecutor, cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(executor.clone());
+
+ let test_file_content = r#"
+ function main() {
+ doSomething();
+ }
+
+ function doSomething() {
+ console.log('doing something');
+ }
+ "#
+ .unindent();
+
+ fs.insert_tree(
+ path!("/project"),
+ json!({
+ "src": {
+ "test.js": test_file_content,
+ }
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, [path!("/project").as_ref()], cx).await;
+ let workspace = init_test_workspace(&project, cx).await;
+ let cx = &mut VisualTestContext::from_window(*workspace, cx);
+
+ let session = start_debug_session(&workspace, cx, |_| {}).unwrap();
+ let client = session.update(cx, |session, _| session.adapter_client().unwrap());
+
+ client.on_request::<Threads, _>(move |_, _| {
+ Ok(dap::ThreadsResponse {
+ threads: vec![dap::Thread {
+ id: 1,
+ name: "Thread 1".into(),
+ }],
+ })
+ });
+
+ client.on_request::<Scopes, _>(move |_, _| Ok(dap::ScopesResponse { scopes: vec![] }));
+
+ let stack_frames = vec![
+ StackFrame {
+ id: 1,
+ name: "main".into(),
+ source: Some(dap::Source {
+ name: Some("test.js".into()),
+ path: Some(path!("/project/src/test.js").into()),
+ source_reference: None,
+ presentation_hint: None,
+ origin: None,
+ sources: None,
+ adapter_data: None,
+ checksums: None,
+ }),
+ line: 2,
+ column: 1,
+ end_line: None,
+ end_column: None,
+ can_restart: None,
+ instruction_pointer_reference: None,
+ module_id: None,
+ presentation_hint: None,
+ },
+ StackFrame {
+ id: 2,
+ name: "node:internal/modules/cjs/loader".into(),
+ source: Some(dap::Source {
+ name: Some("loader.js".into()),
+ path: Some(path!("/usr/lib/node/internal/modules/cjs/loader.js").into()),
+ source_reference: None,
+ presentation_hint: None,
+ origin: None,
+ sources: None,
+ adapter_data: None,
+ checksums: None,
+ }),
+ line: 100,
+ column: 1,
+ end_line: None,
+ end_column: None,
+ can_restart: None,
+ instruction_pointer_reference: None,
+ module_id: None,
+ presentation_hint: Some(dap::StackFramePresentationHint::Deemphasize),
+ },
+ StackFrame {
+ id: 3,
+ name: "node:internal/modules/run_main".into(),
+ source: Some(dap::Source {
+ name: Some("run_main.js".into()),
+ path: Some(path!("/usr/lib/node/internal/modules/run_main.js").into()),
+ source_reference: None,
+ presentation_hint: None,
+ origin: None,
+ sources: None,
+ adapter_data: None,
+ checksums: None,
+ }),
+ line: 50,
+ column: 1,
+ end_line: None,
+ end_column: None,
+ can_restart: None,
+ instruction_pointer_reference: None,
+ module_id: None,
+ presentation_hint: Some(dap::StackFramePresentationHint::Deemphasize),
+ },
+ StackFrame {
+ id: 4,
+ name: "node:internal/modules/run_main2".into(),
+ source: Some(dap::Source {
+ name: Some("run_main.js".into()),
+ path: Some(path!("/usr/lib/node/internal/modules/run_main2.js").into()),
+ source_reference: None,
+ presentation_hint: None,
+ origin: None,
+ sources: None,
+ adapter_data: None,
+ checksums: None,
+ }),
+ line: 50,
+ column: 1,
+ end_line: None,
+ end_column: None,
+ can_restart: None,
+ instruction_pointer_reference: None,
+ module_id: None,
+ presentation_hint: Some(dap::StackFramePresentationHint::Deemphasize),
+ },
+ StackFrame {
+ id: 5,
+ name: "doSomething".into(),
+ source: Some(dap::Source {
+ name: Some("test.js".into()),
+ path: Some(path!("/project/src/test.js").into()),
+ source_reference: None,
+ presentation_hint: None,
+ origin: None,
+ sources: None,
+ adapter_data: None,
+ checksums: None,
+ }),
+ line: 3,
+ column: 1,
+ end_line: None,
+ end_column: None,
+ can_restart: None,
+ instruction_pointer_reference: None,
+ module_id: None,
+ presentation_hint: None,
+ },
+ ];
+
+ // Store a copy for assertions
+ let stack_frames_for_assertions = stack_frames.clone();
+
+ client.on_request::<StackTrace, _>({
+ let stack_frames = Arc::new(stack_frames.clone());
+ move |_, args| {
+ assert_eq!(1, args.thread_id);
+
+ Ok(dap::StackTraceResponse {
+ stack_frames: (*stack_frames).clone(),
+ total_frames: None,
+ })
+ }
+ });
+
+ client
+ .fake_event(dap::messages::Events::Stopped(dap::StoppedEvent {
+ reason: dap::StoppedEventReason::Pause,
+ description: None,
+ thread_id: Some(1),
+ preserve_focus_hint: None,
+ text: None,
+ all_threads_stopped: None,
+ hit_breakpoint_ids: None,
+ }))
+ .await;
+
+ cx.run_until_parked();
+
+ // trigger threads to load
+ active_debug_session_panel(workspace, cx).update(cx, |session, cx| {
+ session.running_state().update(cx, |running_state, cx| {
+ running_state
+ .session()
+ .update(cx, |session, cx| session.threads(cx));
+ });
+ });
+
+ cx.run_until_parked();
+
+ // select first thread
+ active_debug_session_panel(workspace, cx).update_in(cx, |session, window, cx| {
+ session.running_state().update(cx, |running_state, cx| {
+ running_state.select_current_thread(
+ &running_state
+ .session()
+ .update(cx, |session, cx| session.threads(cx)),
+ window,
+ cx,
+ );
+ });
+ });
+
+ cx.run_until_parked();
+
+ // trigger stack frames to load
+ active_debug_session_panel(workspace, cx).update(cx, |debug_panel_item, cx| {
+ let stack_frame_list = debug_panel_item
+ .running_state()
+ .update(cx, |state, _| state.stack_frame_list().clone());
+
+ stack_frame_list.update(cx, |stack_frame_list, cx| {
+ stack_frame_list.dap_stack_frames(cx);
+ });
+ });
+
+ cx.run_until_parked();
+
+ let stack_frame_list =
+ active_debug_session_panel(workspace, cx).update_in(cx, |debug_panel_item, window, cx| {
+ let stack_frame_list = debug_panel_item
+ .running_state()
+ .update(cx, |state, _| state.stack_frame_list().clone());
+
+ stack_frame_list.update(cx, |stack_frame_list, cx| {
+ stack_frame_list.build_entries(true, window, cx);
+
+ // Verify we have the expected collapsed structure
+ assert_eq!(
+ stack_frame_list.entries(),
+ &vec![
+ StackFrameEntry::Normal(stack_frames_for_assertions[0].clone()),
+ StackFrameEntry::Collapsed(vec![
+ stack_frames_for_assertions[1].clone(),
+ stack_frames_for_assertions[2].clone(),
+ stack_frames_for_assertions[3].clone()
+ ]),
+ StackFrameEntry::Normal(stack_frames_for_assertions[4].clone()),
+ ]
+ );
+ });
+
+ stack_frame_list
+ });
+
+ stack_frame_list.update(cx, |stack_frame_list, cx| {
+ let all_frames = stack_frame_list.flatten_entries(true, false);
+ assert_eq!(all_frames.len(), 5, "Should see all 5 frames initially");
+
+ stack_frame_list
+ .toggle_frame_filter(Some(project::debugger::session::ThreadStatus::Stopped), cx);
+ assert_eq!(
+ stack_frame_list.list_filter(),
+ StackFrameFilter::OnlyUserFrames
+ );
+ });
+
+ stack_frame_list.update(cx, |stack_frame_list, cx| {
+ let user_frames = stack_frame_list.dap_stack_frames(cx);
+ assert_eq!(user_frames.len(), 2, "Should only see 2 user frames");
+ assert_eq!(user_frames[0].name, "main");
+ assert_eq!(user_frames[1].name, "doSomething");
+
+ // Toggle back to all frames
+ stack_frame_list
+ .toggle_frame_filter(Some(project::debugger::session::ThreadStatus::Stopped), cx);
+ assert_eq!(stack_frame_list.list_filter(), StackFrameFilter::All);
+ });
+
+ stack_frame_list.update(cx, |stack_frame_list, cx| {
+ let all_frames_again = stack_frame_list.flatten_entries(true, false);
+ assert_eq!(
+ all_frames_again.len(),
+ 5,
+ "Should see all 5 frames after toggling back"
+ );
+
+ // Test 3: Verify collapsed entries stay expanded
+ stack_frame_list.expand_collapsed_entry(1, cx);
+ assert_eq!(
+ stack_frame_list.entries(),
+ &vec![
+ StackFrameEntry::Normal(stack_frames_for_assertions[0].clone()),
+ StackFrameEntry::Normal(stack_frames_for_assertions[1].clone()),
+ StackFrameEntry::Normal(stack_frames_for_assertions[2].clone()),
+ StackFrameEntry::Normal(stack_frames_for_assertions[3].clone()),
+ StackFrameEntry::Normal(stack_frames_for_assertions[4].clone()),
+ ]
+ );
+
+ stack_frame_list
+ .toggle_frame_filter(Some(project::debugger::session::ThreadStatus::Stopped), cx);
+ assert_eq!(
+ stack_frame_list.list_filter(),
+ StackFrameFilter::OnlyUserFrames
+ );
+ });
+
+ stack_frame_list.update(cx, |stack_frame_list, cx| {
+ stack_frame_list
+ .toggle_frame_filter(Some(project::debugger::session::ThreadStatus::Stopped), cx);
+ assert_eq!(stack_frame_list.list_filter(), StackFrameFilter::All);
+ });
+
+ stack_frame_list.update(cx, |stack_frame_list, cx| {
+ stack_frame_list
+ .toggle_frame_filter(Some(project::debugger::session::ThreadStatus::Stopped), cx);
+ assert_eq!(
+ stack_frame_list.list_filter(),
+ StackFrameFilter::OnlyUserFrames
+ );
+
+ assert_eq!(
+ stack_frame_list.dap_stack_frames(cx).as_slice(),
+ &[
+ stack_frames_for_assertions[0].clone(),
+ stack_frames_for_assertions[4].clone()
+ ]
+ );
+
+ // Verify entries remain expanded
+ assert_eq!(
+ stack_frame_list.entries(),
+ &vec![
+ StackFrameEntry::Normal(stack_frames_for_assertions[0].clone()),
+ StackFrameEntry::Normal(stack_frames_for_assertions[1].clone()),
+ StackFrameEntry::Normal(stack_frames_for_assertions[2].clone()),
+ StackFrameEntry::Normal(stack_frames_for_assertions[3].clone()),
+ StackFrameEntry::Normal(stack_frames_for_assertions[4].clone()),
+ ],
+ "Expanded entries should remain expanded after toggling filter"
+ );
+ });
+}
@@ -1445,11 +1445,8 @@ async fn test_variable_list_only_sends_requests_when_rendering(
cx.run_until_parked();
- let running_state = active_debug_session_panel(workspace, cx).update_in(cx, |item, _, _| {
- let state = item.running_state().clone();
-
- state
- });
+ let running_state = active_debug_session_panel(workspace, cx)
+ .update_in(cx, |item, _, _| item.running_state().clone());
client
.fake_event(dap::messages::Events::Stopped(dap::StoppedEvent {
@@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::convert::TryFrom;
-pub const DEEPSEEK_API_URL: &str = "https://api.deepseek.com";
+pub const DEEPSEEK_API_URL: &str = "https://api.deepseek.com/v1";
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(rename_all = "lowercase")]
@@ -96,7 +96,7 @@ impl Model {
pub fn max_token_count(&self) -> u64 {
match self {
- Self::Chat | Self::Reasoner => 64_000,
+ Self::Chat | Self::Reasoner => 128_000,
Self::Custom { max_tokens, .. } => *max_tokens,
}
}
@@ -104,7 +104,7 @@ impl Model {
pub fn max_output_tokens(&self) -> Option<u64> {
match self {
Self::Chat => Some(8_192),
- Self::Reasoner => Some(8_192),
+ Self::Reasoner => Some(64_000),
Self::Custom {
max_output_tokens, ..
} => *max_output_tokens,
@@ -263,12 +263,12 @@ pub async fn stream_completion(
api_key: &str,
request: Request,
) -> Result<BoxStream<'static, Result<StreamResponse>>> {
- let uri = format!("{api_url}/v1/chat/completions");
+ let uri = format!("{api_url}/chat/completions");
let request_builder = HttpRequest::builder()
.method(Method::POST)
.uri(uri)
.header("Content-Type", "application/json")
- .header("Authorization", format!("Bearer {}", api_key));
+ .header("Authorization", format!("Bearer {}", api_key.trim()));
let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
let mut response = client.send(request).await?;
@@ -18,7 +18,6 @@ collections.workspace = true
component.workspace = true
ctor.workspace = true
editor.workspace = true
-futures.workspace = true
gpui.workspace = true
indoc.workspace = true
language.workspace = true
@@ -0,0 +1,982 @@
+use crate::{
+ DIAGNOSTICS_UPDATE_DELAY, IncludeWarnings, ToggleWarnings, context_range_for_entry,
+ diagnostic_renderer::{DiagnosticBlock, DiagnosticRenderer},
+ toolbar_controls::DiagnosticsToolbarEditor,
+};
+use anyhow::Result;
+use collections::HashMap;
+use editor::{
+ Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey,
+ display_map::{BlockPlacement, BlockProperties, BlockStyle, CustomBlockId},
+ multibuffer_context_lines,
+};
+use gpui::{
+ AnyElement, App, AppContext, Context, Entity, EntityId, EventEmitter, FocusHandle, Focusable,
+ InteractiveElement, IntoElement, ParentElement, Render, SharedString, Styled, Subscription,
+ Task, WeakEntity, Window, actions, div,
+};
+use language::{Buffer, DiagnosticEntry, Point};
+use project::{
+ DiagnosticSummary, Event, Project, ProjectItem, ProjectPath,
+ project_settings::{DiagnosticSeverity, ProjectSettings},
+};
+use settings::Settings;
+use std::{
+ any::{Any, TypeId},
+ cmp::Ordering,
+ sync::Arc,
+};
+use text::{Anchor, BufferSnapshot, OffsetRangeExt};
+use ui::{Button, ButtonStyle, Icon, IconName, Label, Tooltip, h_flex, prelude::*};
+use util::paths::PathExt;
+use workspace::{
+ ItemHandle, ItemNavHistory, ToolbarItemLocation, Workspace,
+ item::{BreadcrumbText, Item, ItemEvent, TabContentParams},
+};
+
+actions!(
+ diagnostics,
+ [
+ /// Opens the project diagnostics view for the currently focused file.
+ DeployCurrentFile,
+ ]
+);
+
+/// The `BufferDiagnosticsEditor` is meant to be used when dealing specifically
+/// with diagnostics for a single buffer, as only the excerpts of the buffer
+/// where diagnostics are available are displayed.
+pub(crate) struct BufferDiagnosticsEditor {
+ pub project: Entity<Project>,
+ focus_handle: FocusHandle,
+ editor: Entity<Editor>,
+ /// The current diagnostic entries in the `BufferDiagnosticsEditor`. Used to
+ /// allow quick comparison of updated diagnostics, to confirm if anything
+ /// has changed.
+ pub(crate) diagnostics: Vec<DiagnosticEntry<Anchor>>,
+ /// The blocks used to display the diagnostics' content in the editor, next
+ /// to the excerpts where the diagnostic originated.
+ blocks: Vec<CustomBlockId>,
+ /// Multibuffer to contain all excerpts that contain diagnostics, which are
+ /// to be rendered in the editor.
+ multibuffer: Entity<MultiBuffer>,
+ /// The buffer for which the editor is displaying diagnostics and excerpts
+ /// for.
+ buffer: Option<Entity<Buffer>>,
+ /// The path for which the editor is displaying diagnostics for.
+ project_path: ProjectPath,
+ /// Summary of the number of warnings and errors for the path. Used to
+ /// display the number of warnings and errors in the tab's content.
+ summary: DiagnosticSummary,
+ /// Whether to include warnings in the list of diagnostics shown in the
+ /// editor.
+ pub(crate) include_warnings: bool,
+ /// Keeps track of whether there's a background task already running to
+ /// update the excerpts, in order to avoid firing multiple tasks for this purpose.
+ pub(crate) update_excerpts_task: Option<Task<Result<()>>>,
+ /// The project's subscription, responsible for processing events related to
+ /// diagnostics.
+ _subscription: Subscription,
+}
+
+impl BufferDiagnosticsEditor {
+ /// Creates new instance of the `BufferDiagnosticsEditor` which can then be
+ /// displayed by adding it to a pane.
+ pub fn new(
+ project_path: ProjectPath,
+ project_handle: Entity<Project>,
+ buffer: Option<Entity<Buffer>>,
+ include_warnings: bool,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Self {
+ // Subscribe to project events related to diagnostics so the
+ // `BufferDiagnosticsEditor` can update its state accordingly.
+ let project_event_subscription = cx.subscribe_in(
+ &project_handle,
+ window,
+ |buffer_diagnostics_editor, _project, event, window, cx| match event {
+ Event::DiskBasedDiagnosticsStarted { .. } => {
+ cx.notify();
+ }
+ Event::DiskBasedDiagnosticsFinished { .. } => {
+ buffer_diagnostics_editor.update_all_excerpts(window, cx);
+ }
+ Event::DiagnosticsUpdated {
+ paths,
+ language_server_id,
+ } => {
+ // When diagnostics have been updated, the
+ // `BufferDiagnosticsEditor` should update its state only if
+ // one of the paths matches its `project_path`, otherwise
+ // the event should be ignored.
+ if paths.contains(&buffer_diagnostics_editor.project_path) {
+ buffer_diagnostics_editor.update_diagnostic_summary(cx);
+
+ if buffer_diagnostics_editor.editor.focus_handle(cx).contains_focused(window, cx) || buffer_diagnostics_editor.focus_handle.contains_focused(window, cx) {
+ log::debug!("diagnostics updated for server {language_server_id}. recording change");
+ } else {
+ log::debug!("diagnostics updated for server {language_server_id}. updating excerpts");
+ buffer_diagnostics_editor.update_all_excerpts(window, cx);
+ }
+ }
+ }
+ _ => {}
+ },
+ );
+
+ let focus_handle = cx.focus_handle();
+
+ cx.on_focus_in(
+ &focus_handle,
+ window,
+ |buffer_diagnostics_editor, window, cx| buffer_diagnostics_editor.focus_in(window, cx),
+ )
+ .detach();
+
+ cx.on_focus_out(
+ &focus_handle,
+ window,
+ |buffer_diagnostics_editor, _event, window, cx| {
+ buffer_diagnostics_editor.focus_out(window, cx)
+ },
+ )
+ .detach();
+
+ let summary = project_handle
+ .read(cx)
+ .diagnostic_summary_for_path(&project_path, cx);
+
+ let multibuffer = cx.new(|cx| MultiBuffer::new(project_handle.read(cx).capability()));
+ let max_severity = Self::max_diagnostics_severity(include_warnings);
+ let editor = cx.new(|cx| {
+ let mut editor = Editor::for_multibuffer(
+ multibuffer.clone(),
+ Some(project_handle.clone()),
+ window,
+ cx,
+ );
+ editor.set_vertical_scroll_margin(5, cx);
+ editor.disable_inline_diagnostics();
+ editor.set_max_diagnostics_severity(max_severity, cx);
+ editor.set_all_diagnostics_active(cx);
+ editor
+ });
+
+ // Subscribe to events triggered by the editor in order to correctly
+ // update the buffer's excerpts.
+ cx.subscribe_in(
+ &editor,
+ window,
+ |buffer_diagnostics_editor, _editor, event: &EditorEvent, window, cx| {
+ cx.emit(event.clone());
+
+ match event {
+ // If the user tries to focus on the editor but there's actually
+ // no excerpts for the buffer, focus back on the
+ // `BufferDiagnosticsEditor` instance.
+ EditorEvent::Focused => {
+ if buffer_diagnostics_editor.multibuffer.read(cx).is_empty() {
+ window.focus(&buffer_diagnostics_editor.focus_handle);
+ }
+ }
+ EditorEvent::Blurred => {
+ buffer_diagnostics_editor.update_all_excerpts(window, cx)
+ }
+ _ => {}
+ }
+ },
+ )
+ .detach();
+
+ let diagnostics = vec![];
+ let update_excerpts_task = None;
+ let mut buffer_diagnostics_editor = Self {
+ project: project_handle,
+ focus_handle,
+ editor,
+ diagnostics,
+ blocks: Default::default(),
+ multibuffer,
+ buffer,
+ project_path,
+ summary,
+ include_warnings,
+ update_excerpts_task,
+ _subscription: project_event_subscription,
+ };
+
+ buffer_diagnostics_editor.update_all_diagnostics(window, cx);
+ buffer_diagnostics_editor
+ }
+
+ fn deploy(
+ workspace: &mut Workspace,
+ _: &DeployCurrentFile,
+ window: &mut Window,
+ cx: &mut Context<Workspace>,
+ ) {
+ // Determine the currently opened path by finding the active editor and
+ // finding the project path for the buffer.
+ // If there's no active editor with a project path, avoiding deploying
+ // the buffer diagnostics view.
+ if let Some(editor) = workspace.active_item_as::<Editor>(cx)
+ && let Some(project_path) = editor.project_path(cx)
+ {
+ // Check if there's already a `BufferDiagnosticsEditor` tab for this
+ // same path, and if so, focus on that one instead of creating a new
+ // one.
+ let existing_editor = workspace
+ .items_of_type::<BufferDiagnosticsEditor>(cx)
+ .find(|editor| editor.read(cx).project_path == project_path);
+
+ if let Some(editor) = existing_editor {
+ workspace.activate_item(&editor, true, true, window, cx);
+ } else {
+ let include_warnings = match cx.try_global::<IncludeWarnings>() {
+ Some(include_warnings) => include_warnings.0,
+ None => ProjectSettings::get_global(cx).diagnostics.include_warnings,
+ };
+
+ let item = cx.new(|cx| {
+ Self::new(
+ project_path,
+ workspace.project().clone(),
+ editor.read(cx).buffer().read(cx).as_singleton(),
+ include_warnings,
+ window,
+ cx,
+ )
+ });
+
+ workspace.add_item_to_active_pane(Box::new(item), None, true, window, cx);
+ }
+ }
+ }
+
+ pub fn register(
+ workspace: &mut Workspace,
+ _window: Option<&mut Window>,
+ _: &mut Context<Workspace>,
+ ) {
+ workspace.register_action(Self::deploy);
+ }
+
+ fn update_all_diagnostics(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ self.update_all_excerpts(window, cx);
+ }
+
+ fn update_diagnostic_summary(&mut self, cx: &mut Context<Self>) {
+ let project = self.project.read(cx);
+
+ self.summary = project.diagnostic_summary_for_path(&self.project_path, cx);
+ }
+
+ /// Enqueue an update to the excerpts and diagnostic blocks being shown in
+ /// the editor.
+ pub(crate) fn update_all_excerpts(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ // If there's already a task updating the excerpts, early return and let
+ // the other task finish.
+ if self.update_excerpts_task.is_some() {
+ return;
+ }
+
+ let buffer = self.buffer.clone();
+
+ self.update_excerpts_task = Some(cx.spawn_in(window, async move |editor, cx| {
+ cx.background_executor()
+ .timer(DIAGNOSTICS_UPDATE_DELAY)
+ .await;
+
+ if let Some(buffer) = buffer {
+ editor
+ .update_in(cx, |editor, window, cx| {
+ editor.update_excerpts(buffer, window, cx)
+ })?
+ .await?;
+ };
+
+ let _ = editor.update(cx, |editor, cx| {
+ editor.update_excerpts_task = None;
+ cx.notify();
+ });
+
+ Ok(())
+ }));
+ }
+
+ /// Updates the excerpts in the `BufferDiagnosticsEditor` for a single
+ /// buffer.
+ fn update_excerpts(
+ &mut self,
+ buffer: Entity<Buffer>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<()>> {
+ let was_empty = self.multibuffer.read(cx).is_empty();
+ let multibuffer_context = multibuffer_context_lines(cx);
+ let buffer_snapshot = buffer.read(cx).snapshot();
+ let buffer_snapshot_max = buffer_snapshot.max_point();
+ let max_severity = Self::max_diagnostics_severity(self.include_warnings)
+ .into_lsp()
+ .unwrap_or(lsp::DiagnosticSeverity::WARNING);
+
+ cx.spawn_in(window, async move |buffer_diagnostics_editor, mut cx| {
+ // Fetch the diagnostics for the whole of the buffer
+ // (`Point::zero()..buffer_snapshot.max_point()`) so we can confirm
+ // if the diagnostics changed, if it didn't, early return as there's
+ // nothing to update.
+ let diagnostics = buffer_snapshot
+ .diagnostics_in_range::<_, Anchor>(Point::zero()..buffer_snapshot_max, false)
+ .collect::<Vec<_>>();
+
+ let unchanged =
+ buffer_diagnostics_editor.update(cx, |buffer_diagnostics_editor, _cx| {
+ if buffer_diagnostics_editor
+ .diagnostics_are_unchanged(&diagnostics, &buffer_snapshot)
+ {
+ return true;
+ }
+
+ buffer_diagnostics_editor.set_diagnostics(&diagnostics);
+ return false;
+ })?;
+
+ if unchanged {
+ return Ok(());
+ }
+
+ // Mapping between the Group ID and a vector of DiagnosticEntry.
+ let mut grouped: HashMap<usize, Vec<_>> = HashMap::default();
+ for entry in diagnostics {
+ grouped
+ .entry(entry.diagnostic.group_id)
+ .or_default()
+ .push(DiagnosticEntry {
+ range: entry.range.to_point(&buffer_snapshot),
+ diagnostic: entry.diagnostic,
+ })
+ }
+
+ let mut blocks: Vec<DiagnosticBlock> = Vec::new();
+ for (_, group) in grouped {
+ // If the minimum severity of the group is higher than the
+ // maximum severity, or it doesn't even have severity, skip this
+ // group.
+ if group
+ .iter()
+ .map(|d| d.diagnostic.severity)
+ .min()
+ .is_none_or(|severity| severity > max_severity)
+ {
+ continue;
+ }
+
+ let diagnostic_blocks = cx.update(|_window, cx| {
+ DiagnosticRenderer::diagnostic_blocks_for_group(
+ group,
+ buffer_snapshot.remote_id(),
+ Some(Arc::new(buffer_diagnostics_editor.clone())),
+ cx,
+ )
+ })?;
+
+ // For each of the diagnostic blocks to be displayed in the
+ // editor, figure out its index in the list of blocks.
+ //
+ // The following rules are used to determine the order:
+ // 1. Blocks with a lower start position should come first.
+ // 2. If two blocks have the same start position, the one with
+ // the higher end position should come first.
+ for diagnostic_block in diagnostic_blocks {
+ let index = blocks.partition_point(|probe| {
+ match probe
+ .initial_range
+ .start
+ .cmp(&diagnostic_block.initial_range.start)
+ {
+ Ordering::Less => true,
+ Ordering::Greater => false,
+ Ordering::Equal => {
+ probe.initial_range.end > diagnostic_block.initial_range.end
+ }
+ }
+ });
+
+ blocks.insert(index, diagnostic_block);
+ }
+ }
+
+ // Build the excerpt ranges for this specific buffer's diagnostics,
+ // so those excerpts can later be used to update the excerpts shown
+ // in the editor.
+ // This is done by iterating over the list of diagnostic blocks and
+ // determine what range does the diagnostic block span.
+ let mut excerpt_ranges: Vec<ExcerptRange<Point>> = Vec::new();
+
+ for diagnostic_block in blocks.iter() {
+ let excerpt_range = context_range_for_entry(
+ diagnostic_block.initial_range.clone(),
+ multibuffer_context,
+ buffer_snapshot.clone(),
+ &mut cx,
+ )
+ .await;
+
+ let index = excerpt_ranges
+ .binary_search_by(|probe| {
+ probe
+ .context
+ .start
+ .cmp(&excerpt_range.start)
+ .then(probe.context.end.cmp(&excerpt_range.end))
+ .then(
+ probe
+ .primary
+ .start
+ .cmp(&diagnostic_block.initial_range.start),
+ )
+ .then(probe.primary.end.cmp(&diagnostic_block.initial_range.end))
+ .then(Ordering::Greater)
+ })
+ .unwrap_or_else(|index| index);
+
+ excerpt_ranges.insert(
+ index,
+ ExcerptRange {
+ context: excerpt_range,
+ primary: diagnostic_block.initial_range.clone(),
+ },
+ )
+ }
+
+ // Finally, update the editor's content with the new excerpt ranges
+ // for this editor, as well as the diagnostic blocks.
+ buffer_diagnostics_editor.update_in(cx, |buffer_diagnostics_editor, window, cx| {
+ // Remove the list of `CustomBlockId` from the editor's display
+ // map, ensuring that if any diagnostics have been solved, the
+ // associated block stops being shown.
+ let block_ids = buffer_diagnostics_editor.blocks.clone();
+
+ buffer_diagnostics_editor.editor.update(cx, |editor, cx| {
+ editor.display_map.update(cx, |display_map, cx| {
+ display_map.remove_blocks(block_ids.into_iter().collect(), cx);
+ })
+ });
+
+ let (anchor_ranges, _) =
+ buffer_diagnostics_editor
+ .multibuffer
+ .update(cx, |multibuffer, cx| {
+ multibuffer.set_excerpt_ranges_for_path(
+ PathKey::for_buffer(&buffer, cx),
+ buffer.clone(),
+ &buffer_snapshot,
+ excerpt_ranges,
+ cx,
+ )
+ });
+
+ if was_empty {
+ if let Some(anchor_range) = anchor_ranges.first() {
+ let range_to_select = anchor_range.start..anchor_range.start;
+
+ buffer_diagnostics_editor.editor.update(cx, |editor, cx| {
+ editor.change_selections(Default::default(), window, cx, |selection| {
+ selection.select_anchor_ranges([range_to_select])
+ })
+ });
+
+ // If the `BufferDiagnosticsEditor` is currently
+ // focused, move focus to its editor.
+ if buffer_diagnostics_editor.focus_handle.is_focused(window) {
+ buffer_diagnostics_editor
+ .editor
+ .read(cx)
+ .focus_handle(cx)
+ .focus(window);
+ }
+ }
+ }
+
+ // Cloning the blocks before moving ownership so these can later
+ // be used to set the block contents for testing purposes.
+ #[cfg(test)]
+ let cloned_blocks = blocks.clone();
+
+ // Build new diagnostic blocks to be added to the editor's
+ // display map for the new diagnostics. Update the `blocks`
+ // property before finishing, to ensure the blocks are removed
+ // on the next execution.
+ let editor_blocks =
+ anchor_ranges
+ .into_iter()
+ .zip(blocks.into_iter())
+ .map(|(anchor, block)| {
+ let editor = buffer_diagnostics_editor.editor.downgrade();
+
+ BlockProperties {
+ placement: BlockPlacement::Near(anchor.start),
+ height: Some(1),
+ style: BlockStyle::Flex,
+ render: Arc::new(move |block_context| {
+ block.render_block(editor.clone(), block_context)
+ }),
+ priority: 1,
+ }
+ });
+
+ let block_ids = buffer_diagnostics_editor.editor.update(cx, |editor, cx| {
+ editor.display_map.update(cx, |display_map, cx| {
+ display_map.insert_blocks(editor_blocks, cx)
+ })
+ });
+
+ // In order to be able to verify which diagnostic blocks are
+ // rendered in the editor, the `set_block_content_for_tests`
+ // function must be used, so that the
+ // `editor::test::editor_content_with_blocks` function can then
+ // be called to fetch these blocks.
+ #[cfg(test)]
+ {
+ for (block_id, block) in block_ids.iter().zip(cloned_blocks.iter()) {
+ let markdown = block.markdown.clone();
+ editor::test::set_block_content_for_tests(
+ &buffer_diagnostics_editor.editor,
+ *block_id,
+ cx,
+ move |cx| {
+ markdown::MarkdownElement::rendered_text(
+ markdown.clone(),
+ cx,
+ editor::hover_popover::diagnostics_markdown_style,
+ )
+ },
+ );
+ }
+ }
+
+ buffer_diagnostics_editor.blocks = block_ids;
+ cx.notify()
+ })
+ })
+ }
+
+ fn set_diagnostics(&mut self, diagnostics: &Vec<DiagnosticEntry<Anchor>>) {
+ self.diagnostics = diagnostics.clone();
+ }
+
+ fn diagnostics_are_unchanged(
+ &self,
+ diagnostics: &Vec<DiagnosticEntry<Anchor>>,
+ snapshot: &BufferSnapshot,
+ ) -> bool {
+ if self.diagnostics.len() != diagnostics.len() {
+ return false;
+ }
+
+ self.diagnostics
+ .iter()
+ .zip(diagnostics.iter())
+ .all(|(existing, new)| {
+ existing.diagnostic.message == new.diagnostic.message
+ && existing.diagnostic.severity == new.diagnostic.severity
+ && existing.diagnostic.is_primary == new.diagnostic.is_primary
+ && existing.range.to_offset(snapshot) == new.range.to_offset(snapshot)
+ })
+ }
+
+ fn focus_in(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ // If the `BufferDiagnosticsEditor` is focused and the multibuffer is
+ // not empty, focus on the editor instead, which will allow the user to
+ // start interacting and editing the buffer's contents.
+ if self.focus_handle.is_focused(window) && !self.multibuffer.read(cx).is_empty() {
+ self.editor.focus_handle(cx).focus(window)
+ }
+ }
+
+ fn focus_out(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ if !self.focus_handle.is_focused(window) && !self.editor.focus_handle(cx).is_focused(window)
+ {
+ self.update_all_excerpts(window, cx);
+ }
+ }
+
+ pub fn toggle_warnings(
+ &mut self,
+ _: &ToggleWarnings,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let include_warnings = !self.include_warnings;
+ let max_severity = Self::max_diagnostics_severity(include_warnings);
+
+ self.editor.update(cx, |editor, cx| {
+ editor.set_max_diagnostics_severity(max_severity, cx);
+ });
+
+ self.include_warnings = include_warnings;
+ self.diagnostics.clear();
+ self.update_all_diagnostics(window, cx);
+ }
+
+ fn max_diagnostics_severity(include_warnings: bool) -> DiagnosticSeverity {
+ match include_warnings {
+ true => DiagnosticSeverity::Warning,
+ false => DiagnosticSeverity::Error,
+ }
+ }
+
+ #[cfg(test)]
+ pub fn editor(&self) -> &Entity<Editor> {
+ &self.editor
+ }
+
+ #[cfg(test)]
+ pub fn summary(&self) -> &DiagnosticSummary {
+ &self.summary
+ }
+}
+
+impl Focusable for BufferDiagnosticsEditor {
+ fn focus_handle(&self, _: &App) -> FocusHandle {
+ self.focus_handle.clone()
+ }
+}
+
+impl EventEmitter<EditorEvent> for BufferDiagnosticsEditor {}
+
+impl Item for BufferDiagnosticsEditor {
+ type Event = EditorEvent;
+
+ fn act_as_type<'a>(
+ &'a self,
+ type_id: std::any::TypeId,
+ self_handle: &'a Entity<Self>,
+ _: &'a App,
+ ) -> Option<gpui::AnyView> {
+ if type_id == TypeId::of::<Self>() {
+ Some(self_handle.to_any())
+ } else if type_id == TypeId::of::<Editor>() {
+ Some(self.editor.to_any())
+ } else {
+ None
+ }
+ }
+
+ fn added_to_workspace(
+ &mut self,
+ workspace: &mut Workspace,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.editor.update(cx, |editor, cx| {
+ editor.added_to_workspace(workspace, window, cx)
+ });
+ }
+
+ fn breadcrumb_location(&self, _: &App) -> ToolbarItemLocation {
+ ToolbarItemLocation::PrimaryLeft
+ }
+
+ fn breadcrumbs(&self, theme: &theme::Theme, cx: &App) -> Option<Vec<BreadcrumbText>> {
+ self.editor.breadcrumbs(theme, cx)
+ }
+
+ fn can_save(&self, _cx: &App) -> bool {
+ true
+ }
+
+ fn clone_on_split(
+ &self,
+ _workspace_id: Option<workspace::WorkspaceId>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Option<Entity<Self>>
+ where
+ Self: Sized,
+ {
+ Some(cx.new(|cx| {
+ BufferDiagnosticsEditor::new(
+ self.project_path.clone(),
+ self.project.clone(),
+ self.buffer.clone(),
+ self.include_warnings,
+ window,
+ cx,
+ )
+ }))
+ }
+
+ fn deactivated(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ self.editor
+ .update(cx, |editor, cx| editor.deactivated(window, cx));
+ }
+
+ fn for_each_project_item(&self, cx: &App, f: &mut dyn FnMut(EntityId, &dyn ProjectItem)) {
+ self.editor.for_each_project_item(cx, f);
+ }
+
+ fn has_conflict(&self, cx: &App) -> bool {
+ self.multibuffer.read(cx).has_conflict(cx)
+ }
+
+ fn has_deleted_file(&self, cx: &App) -> bool {
+ self.multibuffer.read(cx).has_deleted_file(cx)
+ }
+
+ fn is_dirty(&self, cx: &App) -> bool {
+ self.multibuffer.read(cx).is_dirty(cx)
+ }
+
+ fn is_singleton(&self, _cx: &App) -> bool {
+ false
+ }
+
+ fn navigate(
+ &mut self,
+ data: Box<dyn Any>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> bool {
+ self.editor
+ .update(cx, |editor, cx| editor.navigate(data, window, cx))
+ }
+
+ fn reload(
+ &mut self,
+ project: Entity<Project>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<()>> {
+ self.editor.reload(project, window, cx)
+ }
+
+ fn save(
+ &mut self,
+ options: workspace::item::SaveOptions,
+ project: Entity<Project>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<()>> {
+ self.editor.save(options, project, window, cx)
+ }
+
+ fn save_as(
+ &mut self,
+ _project: Entity<Project>,
+ _path: ProjectPath,
+ _window: &mut Window,
+ _cx: &mut Context<Self>,
+ ) -> Task<Result<()>> {
+ unreachable!()
+ }
+
+ fn set_nav_history(
+ &mut self,
+ nav_history: ItemNavHistory,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.editor.update(cx, |editor, _| {
+ editor.set_nav_history(Some(nav_history));
+ })
+ }
+
+ // Builds the content to be displayed in the tab.
+ fn tab_content(&self, params: TabContentParams, _window: &Window, _cx: &App) -> AnyElement {
+ let error_count = self.summary.error_count;
+ let warning_count = self.summary.warning_count;
+ let label = Label::new(
+ self.project_path
+ .path
+ .file_name()
+ .map(|f| f.to_sanitized_string())
+ .unwrap_or_else(|| self.project_path.path.to_sanitized_string()),
+ );
+
+ h_flex()
+ .gap_1()
+ .child(label)
+ .when(error_count == 0 && warning_count == 0, |parent| {
+ parent.child(
+ h_flex()
+ .gap_1()
+ .child(Icon::new(IconName::Check).color(Color::Success)),
+ )
+ })
+ .when(error_count > 0, |parent| {
+ parent.child(
+ h_flex()
+ .gap_1()
+ .child(Icon::new(IconName::XCircle).color(Color::Error))
+ .child(Label::new(error_count.to_string()).color(params.text_color())),
+ )
+ })
+ .when(warning_count > 0, |parent| {
+ parent.child(
+ h_flex()
+ .gap_1()
+ .child(Icon::new(IconName::Warning).color(Color::Warning))
+ .child(Label::new(warning_count.to_string()).color(params.text_color())),
+ )
+ })
+ .into_any_element()
+ }
+
+ fn tab_content_text(&self, _detail: usize, _app: &App) -> SharedString {
+ "Buffer Diagnostics".into()
+ }
+
+ fn tab_tooltip_text(&self, _: &App) -> Option<SharedString> {
+ Some(
+ format!(
+ "Buffer Diagnostics - {}",
+ self.project_path.path.to_sanitized_string()
+ )
+ .into(),
+ )
+ }
+
+ fn telemetry_event_text(&self) -> Option<&'static str> {
+ Some("Buffer Diagnostics Opened")
+ }
+
+ fn to_item_events(event: &EditorEvent, f: impl FnMut(ItemEvent)) {
+ Editor::to_item_events(event, f)
+ }
+}
+
+impl Render for BufferDiagnosticsEditor {
+ fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let filename = self.project_path.path.to_sanitized_string();
+ let error_count = self.summary.error_count;
+ let warning_count = match self.include_warnings {
+ true => self.summary.warning_count,
+ false => 0,
+ };
+
+ let child = if error_count + warning_count == 0 {
+ let label = match warning_count {
+ 0 => "No problems in",
+ _ => "No errors in",
+ };
+
+ v_flex()
+ .key_context("EmptyPane")
+ .size_full()
+ .gap_1()
+ .justify_center()
+ .items_center()
+ .text_center()
+ .bg(cx.theme().colors().editor_background)
+ .child(
+ div()
+ .h_flex()
+ .child(Label::new(label).color(Color::Muted))
+ .child(
+ Button::new("open-file", filename)
+ .style(ButtonStyle::Transparent)
+ .tooltip(Tooltip::text("Open File"))
+ .on_click(cx.listener(|buffer_diagnostics, _, window, cx| {
+ if let Some(workspace) = window.root::<Workspace>().flatten() {
+ workspace.update(cx, |workspace, cx| {
+ workspace
+ .open_path(
+ buffer_diagnostics.project_path.clone(),
+ None,
+ true,
+ window,
+ cx,
+ )
+ .detach_and_log_err(cx);
+ })
+ }
+ })),
+ ),
+ )
+ .when(self.summary.warning_count > 0, |div| {
+ let label = match self.summary.warning_count {
+ 1 => "Show 1 warning".into(),
+ warning_count => format!("Show {} warnings", warning_count),
+ };
+
+ div.child(
+ Button::new("diagnostics-show-warning-label", label).on_click(cx.listener(
+ |buffer_diagnostics_editor, _, window, cx| {
+ buffer_diagnostics_editor.toggle_warnings(
+ &Default::default(),
+ window,
+ cx,
+ );
+ cx.notify();
+ },
+ )),
+ )
+ })
+ } else {
+ div().size_full().child(self.editor.clone())
+ };
+
+ div()
+ .key_context("Diagnostics")
+ .track_focus(&self.focus_handle(cx))
+ .size_full()
+ .child(child)
+ }
+}
+
+impl DiagnosticsToolbarEditor for WeakEntity<BufferDiagnosticsEditor> {
+ fn include_warnings(&self, cx: &App) -> bool {
+ self.read_with(cx, |buffer_diagnostics_editor, _cx| {
+ buffer_diagnostics_editor.include_warnings
+ })
+ .unwrap_or(false)
+ }
+
+ fn has_stale_excerpts(&self, _cx: &App) -> bool {
+ false
+ }
+
+ fn is_updating(&self, cx: &App) -> bool {
+ self.read_with(cx, |buffer_diagnostics_editor, cx| {
+ buffer_diagnostics_editor.update_excerpts_task.is_some()
+ || buffer_diagnostics_editor
+ .project
+ .read(cx)
+ .language_servers_running_disk_based_diagnostics(cx)
+ .next()
+ .is_some()
+ })
+ .unwrap_or(false)
+ }
+
+ fn stop_updating(&self, cx: &mut App) {
+ let _ = self.update(cx, |buffer_diagnostics_editor, cx| {
+ buffer_diagnostics_editor.update_excerpts_task = None;
+ cx.notify();
+ });
+ }
+
+ fn refresh_diagnostics(&self, window: &mut Window, cx: &mut App) {
+ let _ = self.update(cx, |buffer_diagnostics_editor, cx| {
+ buffer_diagnostics_editor.update_all_excerpts(window, cx);
+ });
+ }
+
+ fn toggle_warnings(&self, window: &mut Window, cx: &mut App) {
+ let _ = self.update(cx, |buffer_diagnostics_editor, cx| {
+ buffer_diagnostics_editor.toggle_warnings(&Default::default(), window, cx);
+ });
+ }
+
+ fn get_diagnostics_for_buffer(
+ &self,
+ _buffer_id: text::BufferId,
+ cx: &App,
+ ) -> Vec<language::DiagnosticEntry<text::Anchor>> {
+ self.read_with(cx, |buffer_diagnostics_editor, _cx| {
+ buffer_diagnostics_editor.diagnostics.clone()
+ })
+ .unwrap_or_default()
+ }
+}
@@ -18,7 +18,7 @@ use ui::{
};
use util::maybe;
-use crate::ProjectDiagnosticsEditor;
+use crate::toolbar_controls::DiagnosticsToolbarEditor;
pub struct DiagnosticRenderer;
@@ -26,7 +26,7 @@ impl DiagnosticRenderer {
pub fn diagnostic_blocks_for_group(
diagnostic_group: Vec<DiagnosticEntry<Point>>,
buffer_id: BufferId,
- diagnostics_editor: Option<WeakEntity<ProjectDiagnosticsEditor>>,
+ diagnostics_editor: Option<Arc<dyn DiagnosticsToolbarEditor>>,
cx: &mut App,
) -> Vec<DiagnosticBlock> {
let Some(primary_ix) = diagnostic_group
@@ -130,6 +130,7 @@ impl editor::DiagnosticRenderer for DiagnosticRenderer {
cx: &mut App,
) -> Vec<BlockProperties<Anchor>> {
let blocks = Self::diagnostic_blocks_for_group(diagnostic_group, buffer_id, None, cx);
+
blocks
.into_iter()
.map(|block| {
@@ -182,7 +183,7 @@ pub(crate) struct DiagnosticBlock {
pub(crate) initial_range: Range<Point>,
pub(crate) severity: DiagnosticSeverity,
pub(crate) markdown: Entity<Markdown>,
- pub(crate) diagnostics_editor: Option<WeakEntity<ProjectDiagnosticsEditor>>,
+ pub(crate) diagnostics_editor: Option<Arc<dyn DiagnosticsToolbarEditor>>,
}
impl DiagnosticBlock {
@@ -233,7 +234,7 @@ impl DiagnosticBlock {
pub fn open_link(
editor: &mut Editor,
- diagnostics_editor: &Option<WeakEntity<ProjectDiagnosticsEditor>>,
+ diagnostics_editor: &Option<Arc<dyn DiagnosticsToolbarEditor>>,
link: SharedString,
window: &mut Window,
cx: &mut Context<Editor>,
@@ -254,18 +255,10 @@ impl DiagnosticBlock {
if let Some(diagnostics_editor) = diagnostics_editor {
if let Some(diagnostic) = diagnostics_editor
- .read_with(cx, |diagnostics, _| {
- diagnostics
- .diagnostics
- .get(&buffer_id)
- .cloned()
- .unwrap_or_default()
- .into_iter()
- .filter(|d| d.diagnostic.group_id == group_id)
- .nth(ix)
- })
- .ok()
- .flatten()
+ .get_diagnostics_for_buffer(buffer_id, cx)
+ .into_iter()
+ .filter(|d| d.diagnostic.group_id == group_id)
+ .nth(ix)
{
let multibuffer = editor.buffer().read(cx);
let Some(snapshot) = multibuffer
@@ -297,9 +290,9 @@ impl DiagnosticBlock {
};
}
- fn jump_to<T: ToOffset>(
+ fn jump_to<I: ToOffset>(
editor: &mut Editor,
- range: Range<T>,
+ range: Range<I>,
window: &mut Window,
cx: &mut Context<Editor>,
) {
@@ -1,19 +1,21 @@
pub mod items;
mod toolbar_controls;
+mod buffer_diagnostics;
mod diagnostic_renderer;
#[cfg(test)]
mod diagnostics_tests;
use anyhow::Result;
+use buffer_diagnostics::BufferDiagnosticsEditor;
use collections::{BTreeSet, HashMap};
use diagnostic_renderer::DiagnosticBlock;
use editor::{
- DEFAULT_MULTIBUFFER_CONTEXT, Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey,
+ Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey,
display_map::{BlockPlacement, BlockProperties, BlockStyle, CustomBlockId},
+ multibuffer_context_lines,
};
-use futures::future::join_all;
use gpui::{
AnyElement, AnyView, App, AsyncApp, Context, Entity, EventEmitter, FocusHandle, Focusable,
Global, InteractiveElement, IntoElement, ParentElement, Render, SharedString, Styled,
@@ -24,7 +26,6 @@ use language::{
};
use project::{
DiagnosticSummary, Project, ProjectPath,
- lsp_store::rust_analyzer_ext::{cancel_flycheck, run_flycheck},
project_settings::{DiagnosticSeverity, ProjectSettings},
};
use settings::Settings;
@@ -37,6 +38,7 @@ use std::{
};
use text::{BufferId, OffsetRangeExt};
use theme::ActiveTheme;
+use toolbar_controls::DiagnosticsToolbarEditor;
pub use toolbar_controls::ToolbarControls;
use ui::{Icon, IconName, Label, h_flex, prelude::*};
use util::ResultExt;
@@ -65,6 +67,7 @@ impl Global for IncludeWarnings {}
pub fn init(cx: &mut App) {
editor::set_diagnostic_renderer(diagnostic_renderer::DiagnosticRenderer {}, cx);
cx.observe_new(ProjectDiagnosticsEditor::register).detach();
+ cx.observe_new(BufferDiagnosticsEditor::register).detach();
}
pub(crate) struct ProjectDiagnosticsEditor {
@@ -79,20 +82,14 @@ pub(crate) struct ProjectDiagnosticsEditor {
paths_to_update: BTreeSet<ProjectPath>,
include_warnings: bool,
update_excerpts_task: Option<Task<Result<()>>>,
- cargo_diagnostics_fetch: CargoDiagnosticsFetchState,
diagnostic_summary_update: Task<()>,
_subscription: Subscription,
}
-struct CargoDiagnosticsFetchState {
- fetch_task: Option<Task<()>>,
- cancel_task: Option<Task<()>>,
- diagnostic_sources: Arc<Vec<ProjectPath>>,
-}
-
impl EventEmitter<EditorEvent> for ProjectDiagnosticsEditor {}
const DIAGNOSTICS_UPDATE_DELAY: Duration = Duration::from_millis(50);
+const DIAGNOSTICS_SUMMARY_UPDATE_DELAY: Duration = Duration::from_millis(30);
impl Render for ProjectDiagnosticsEditor {
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
@@ -102,43 +99,44 @@ impl Render for ProjectDiagnosticsEditor {
0
};
- let child = if warning_count + self.summary.error_count == 0 {
- let label = if self.summary.warning_count == 0 {
- SharedString::new_static("No problems in workspace")
+ let child =
+ if warning_count + self.summary.error_count == 0 && self.editor.read(cx).is_empty(cx) {
+ let label = if self.summary.warning_count == 0 {
+ SharedString::new_static("No problems in workspace")
+ } else {
+ SharedString::new_static("No errors in workspace")
+ };
+ v_flex()
+ .key_context("EmptyPane")
+ .size_full()
+ .gap_1()
+ .justify_center()
+ .items_center()
+ .text_center()
+ .bg(cx.theme().colors().editor_background)
+ .child(Label::new(label).color(Color::Muted))
+ .when(self.summary.warning_count > 0, |this| {
+ let plural_suffix = if self.summary.warning_count > 1 {
+ "s"
+ } else {
+ ""
+ };
+ let label = format!(
+ "Show {} warning{}",
+ self.summary.warning_count, plural_suffix
+ );
+ this.child(
+ Button::new("diagnostics-show-warning-label", label).on_click(
+ cx.listener(|this, _, window, cx| {
+ this.toggle_warnings(&Default::default(), window, cx);
+ cx.notify();
+ }),
+ ),
+ )
+ })
} else {
- SharedString::new_static("No errors in workspace")
+ div().size_full().child(self.editor.clone())
};
- v_flex()
- .key_context("EmptyPane")
- .size_full()
- .gap_1()
- .justify_center()
- .items_center()
- .text_center()
- .bg(cx.theme().colors().editor_background)
- .child(Label::new(label).color(Color::Muted))
- .when(self.summary.warning_count > 0, |this| {
- let plural_suffix = if self.summary.warning_count > 1 {
- "s"
- } else {
- ""
- };
- let label = format!(
- "Show {} warning{}",
- self.summary.warning_count, plural_suffix
- );
- this.child(
- Button::new("diagnostics-show-warning-label", label).on_click(cx.listener(
- |this, _, window, cx| {
- this.toggle_warnings(&Default::default(), window, cx);
- cx.notify();
- },
- )),
- )
- })
- } else {
- div().size_full().child(self.editor.clone())
- };
div()
.key_context("Diagnostics")
@@ -151,7 +149,7 @@ impl Render for ProjectDiagnosticsEditor {
}
impl ProjectDiagnosticsEditor {
- fn register(
+ pub fn register(
workspace: &mut Workspace,
_window: Option<&mut Window>,
_: &mut Context<Workspace>,
@@ -167,7 +165,7 @@ impl ProjectDiagnosticsEditor {
cx: &mut Context<Self>,
) -> Self {
let project_event_subscription =
- cx.subscribe_in(&project_handle, window, |this, project, event, window, cx| match event {
+ cx.subscribe_in(&project_handle, window, |this, _project, event, window, cx| match event {
project::Event::DiskBasedDiagnosticsStarted { .. } => {
cx.notify();
}
@@ -180,13 +178,12 @@ impl ProjectDiagnosticsEditor {
paths,
} => {
this.paths_to_update.extend(paths.clone());
- let project = project.clone();
this.diagnostic_summary_update = cx.spawn(async move |this, cx| {
cx.background_executor()
- .timer(Duration::from_millis(30))
+ .timer(DIAGNOSTICS_SUMMARY_UPDATE_DELAY)
.await;
this.update(cx, |this, cx| {
- this.summary = project.read(cx).diagnostic_summary(false, cx);
+ this.update_diagnostic_summary(cx);
})
.log_err();
});
@@ -241,6 +238,7 @@ impl ProjectDiagnosticsEditor {
}
}
EditorEvent::Blurred => this.update_stale_excerpts(window, cx),
+ EditorEvent::Saved => this.update_stale_excerpts(window, cx),
_ => {}
}
},
@@ -260,11 +258,7 @@ impl ProjectDiagnosticsEditor {
)
});
this.diagnostics.clear();
- this.update_all_diagnostics(false, window, cx);
- })
- .detach();
- cx.observe_release(&cx.entity(), |editor, _, cx| {
- editor.stop_cargo_diagnostics_fetch(cx);
+ this.update_all_excerpts(window, cx);
})
.detach();
@@ -281,20 +275,15 @@ impl ProjectDiagnosticsEditor {
editor,
paths_to_update: Default::default(),
update_excerpts_task: None,
- cargo_diagnostics_fetch: CargoDiagnosticsFetchState {
- fetch_task: None,
- cancel_task: None,
- diagnostic_sources: Arc::new(Vec::new()),
- },
diagnostic_summary_update: Task::ready(()),
_subscription: project_event_subscription,
};
- this.update_all_diagnostics(true, window, cx);
+ this.update_all_excerpts(window, cx);
this
}
fn update_stale_excerpts(&mut self, window: &mut Window, cx: &mut Context<Self>) {
- if self.update_excerpts_task.is_some() {
+ if self.update_excerpts_task.is_some() || self.multibuffer.read(cx).is_dirty(cx) {
return;
}
@@ -341,6 +330,7 @@ impl ProjectDiagnosticsEditor {
let is_active = workspace
.active_item(cx)
.is_some_and(|item| item.item_id() == existing.item_id());
+
workspace.activate_item(&existing, true, !is_active, window, cx);
} else {
let workspace_handle = cx.entity().downgrade();
@@ -373,20 +363,10 @@ impl ProjectDiagnosticsEditor {
window: &mut Window,
cx: &mut Context<Self>,
) {
- let fetch_cargo_diagnostics = ProjectSettings::get_global(cx)
- .diagnostics
- .fetch_cargo_diagnostics();
-
- if fetch_cargo_diagnostics {
- if self.cargo_diagnostics_fetch.fetch_task.is_some() {
- self.stop_cargo_diagnostics_fetch(cx);
- } else {
- self.update_all_diagnostics(false, window, cx);
- }
- } else if self.update_excerpts_task.is_some() {
+ if self.update_excerpts_task.is_some() {
self.update_excerpts_task = None;
} else {
- self.update_all_diagnostics(false, window, cx);
+ self.update_all_excerpts(window, cx);
}
cx.notify();
}
@@ -404,93 +384,29 @@ impl ProjectDiagnosticsEditor {
}
}
- fn update_all_diagnostics(
- &mut self,
- first_launch: bool,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- let cargo_diagnostics_sources = self.cargo_diagnostics_sources(cx);
- if cargo_diagnostics_sources.is_empty() {
- self.update_all_excerpts(window, cx);
- } else if first_launch && !self.summary.is_empty() {
- self.update_all_excerpts(window, cx);
- } else {
- self.fetch_cargo_diagnostics(Arc::new(cargo_diagnostics_sources), cx);
- }
- }
-
- fn fetch_cargo_diagnostics(
- &mut self,
- diagnostics_sources: Arc<Vec<ProjectPath>>,
- cx: &mut Context<Self>,
- ) {
- let project = self.project.clone();
- self.cargo_diagnostics_fetch.cancel_task = None;
- self.cargo_diagnostics_fetch.fetch_task = None;
- self.cargo_diagnostics_fetch.diagnostic_sources = diagnostics_sources.clone();
- if self.cargo_diagnostics_fetch.diagnostic_sources.is_empty() {
- return;
- }
-
- self.cargo_diagnostics_fetch.fetch_task = Some(cx.spawn(async move |editor, cx| {
- let mut fetch_tasks = Vec::new();
- for buffer_path in diagnostics_sources.iter().cloned() {
- if cx
- .update(|cx| {
- fetch_tasks.push(run_flycheck(project.clone(), buffer_path, cx));
- })
- .is_err()
- {
- break;
- }
- }
-
- let _ = join_all(fetch_tasks).await;
- editor
- .update(cx, |editor, _| {
- editor.cargo_diagnostics_fetch.fetch_task = None;
- })
- .ok();
- }));
- }
-
- fn stop_cargo_diagnostics_fetch(&mut self, cx: &mut App) {
- self.cargo_diagnostics_fetch.fetch_task = None;
- let mut cancel_gasks = Vec::new();
- for buffer_path in std::mem::take(&mut self.cargo_diagnostics_fetch.diagnostic_sources)
- .iter()
- .cloned()
- {
- cancel_gasks.push(cancel_flycheck(self.project.clone(), buffer_path, cx));
- }
-
- self.cargo_diagnostics_fetch.cancel_task = Some(cx.background_spawn(async move {
- let _ = join_all(cancel_gasks).await;
- log::info!("Finished fetching cargo diagnostics");
- }));
- }
-
/// Enqueue an update of all excerpts. Updates all paths that either
/// currently have diagnostics or are currently present in this view.
fn update_all_excerpts(&mut self, window: &mut Window, cx: &mut Context<Self>) {
self.project.update(cx, |project, cx| {
- let mut paths = project
+ let mut project_paths = project
.diagnostic_summaries(false, cx)
- .map(|(path, _, _)| path)
+ .map(|(project_path, _, _)| project_path)
.collect::<BTreeSet<_>>();
+
self.multibuffer.update(cx, |multibuffer, cx| {
for buffer in multibuffer.all_buffers() {
if let Some(file) = buffer.read(cx).file() {
- paths.insert(ProjectPath {
+ project_paths.insert(ProjectPath {
path: file.path().clone(),
worktree_id: file.worktree_id(cx),
});
}
}
});
- self.paths_to_update = paths;
+
+ self.paths_to_update = project_paths;
});
+
self.update_stale_excerpts(window, cx);
}
@@ -520,6 +436,7 @@ impl ProjectDiagnosticsEditor {
let was_empty = self.multibuffer.read(cx).is_empty();
let buffer_snapshot = buffer.read(cx).snapshot();
let buffer_id = buffer_snapshot.remote_id();
+
let max_severity = if self.include_warnings {
lsp::DiagnosticSeverity::WARNING
} else {
@@ -533,6 +450,7 @@ impl ProjectDiagnosticsEditor {
false,
)
.collect::<Vec<_>>();
+
let unchanged = this.update(cx, |this, _| {
if this.diagnostics.get(&buffer_id).is_some_and(|existing| {
this.diagnostics_are_unchanged(existing, &diagnostics, &buffer_snapshot)
@@ -567,7 +485,7 @@ impl ProjectDiagnosticsEditor {
crate::diagnostic_renderer::DiagnosticRenderer::diagnostic_blocks_for_group(
group,
buffer_snapshot.remote_id(),
- Some(this.clone()),
+ Some(Arc::new(this.clone())),
cx,
)
})?;
@@ -588,14 +506,16 @@ impl ProjectDiagnosticsEditor {
}
let mut excerpt_ranges: Vec<ExcerptRange<Point>> = Vec::new();
+ let context_lines = cx.update(|_, cx| multibuffer_context_lines(cx))?;
for b in blocks.iter() {
let excerpt_range = context_range_for_entry(
b.initial_range.clone(),
- DEFAULT_MULTIBUFFER_CONTEXT,
+ context_lines,
buffer_snapshot.clone(),
cx,
)
.await;
+
let i = excerpt_ranges
.binary_search_by(|probe| {
probe
@@ -665,6 +585,7 @@ impl ProjectDiagnosticsEditor {
priority: 1,
}
});
+
let block_ids = this.editor.update(cx, |editor, cx| {
editor.display_map.update(cx, |display_map, cx| {
display_map.insert_blocks(editor_blocks, cx)
@@ -696,28 +617,8 @@ impl ProjectDiagnosticsEditor {
})
}
- pub fn cargo_diagnostics_sources(&self, cx: &App) -> Vec<ProjectPath> {
- let fetch_cargo_diagnostics = ProjectSettings::get_global(cx)
- .diagnostics
- .fetch_cargo_diagnostics();
- if !fetch_cargo_diagnostics {
- return Vec::new();
- }
- self.project
- .read(cx)
- .worktrees(cx)
- .filter_map(|worktree| {
- let _cargo_toml_entry = worktree.read(cx).entry_for_path("Cargo.toml")?;
- let rust_file_entry = worktree.read(cx).entries(false, 0).find(|entry| {
- entry
- .path
- .extension()
- .and_then(|extension| extension.to_str())
- == Some("rs")
- })?;
- self.project.read(cx).path_for_entry(rust_file_entry.id, cx)
- })
- .collect()
+ fn update_diagnostic_summary(&mut self, cx: &mut Context<Self>) {
+ self.summary = self.project.read(cx).diagnostic_summary(false, cx);
}
}
@@ -927,6 +828,68 @@ impl Item for ProjectDiagnosticsEditor {
}
}
+impl DiagnosticsToolbarEditor for WeakEntity<ProjectDiagnosticsEditor> {
+ fn include_warnings(&self, cx: &App) -> bool {
+ self.read_with(cx, |project_diagnostics_editor, _cx| {
+ project_diagnostics_editor.include_warnings
+ })
+ .unwrap_or(false)
+ }
+
+ fn has_stale_excerpts(&self, cx: &App) -> bool {
+ self.read_with(cx, |project_diagnostics_editor, _cx| {
+ !project_diagnostics_editor.paths_to_update.is_empty()
+ })
+ .unwrap_or(false)
+ }
+
+ fn is_updating(&self, cx: &App) -> bool {
+ self.read_with(cx, |project_diagnostics_editor, cx| {
+ project_diagnostics_editor.update_excerpts_task.is_some()
+ || project_diagnostics_editor
+ .project
+ .read(cx)
+ .language_servers_running_disk_based_diagnostics(cx)
+ .next()
+ .is_some()
+ })
+ .unwrap_or(false)
+ }
+
+ fn stop_updating(&self, cx: &mut App) {
+ let _ = self.update(cx, |project_diagnostics_editor, cx| {
+ project_diagnostics_editor.update_excerpts_task = None;
+ cx.notify();
+ });
+ }
+
+ fn refresh_diagnostics(&self, window: &mut Window, cx: &mut App) {
+ let _ = self.update(cx, |project_diagnostics_editor, cx| {
+ project_diagnostics_editor.update_all_excerpts(window, cx);
+ });
+ }
+
+ fn toggle_warnings(&self, window: &mut Window, cx: &mut App) {
+ let _ = self.update(cx, |project_diagnostics_editor, cx| {
+ project_diagnostics_editor.toggle_warnings(&Default::default(), window, cx);
+ });
+ }
+
+ fn get_diagnostics_for_buffer(
+ &self,
+ buffer_id: text::BufferId,
+ cx: &App,
+ ) -> Vec<language::DiagnosticEntry<text::Anchor>> {
+ self.read_with(cx, |project_diagnostics_editor, _cx| {
+ project_diagnostics_editor
+ .diagnostics
+ .get(&buffer_id)
+ .cloned()
+ .unwrap_or_default()
+ })
+ .unwrap_or_default()
+ }
+}
const DIAGNOSTIC_EXPANSION_ROW_LIMIT: u32 = 32;
async fn context_range_for_entry(
@@ -24,6 +24,7 @@ use settings::SettingsStore;
use std::{
env,
path::{Path, PathBuf},
+ str::FromStr,
};
use unindent::Unindent as _;
use util::{RandomCharIter, path, post_inc};
@@ -70,7 +71,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*window, cx);
let workspace = window.root(cx).unwrap();
- let uri = lsp::Url::from_file_path(path!("/test/main.rs")).unwrap();
+ let uri = lsp::Uri::from_file_path(path!("/test/main.rs")).unwrap();
// Create some diagnostics
lsp_store.update(cx, |lsp_store, cx| {
@@ -167,7 +168,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
.update_diagnostics(
language_server_id,
lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/test/consts.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/test/consts.rs")).unwrap(),
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(
lsp::Position::new(0, 15),
@@ -243,7 +244,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
.update_diagnostics(
language_server_id,
lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/test/consts.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/test/consts.rs")).unwrap(),
diagnostics: vec![
lsp::Diagnostic {
range: lsp::Range::new(
@@ -356,14 +357,14 @@ async fn test_diagnostics_with_folds(cx: &mut TestAppContext) {
.update_diagnostics(
server_id_1,
lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/test/main.js")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/test/main.js")).unwrap(),
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(4, 0), lsp::Position::new(4, 4)),
severity: Some(lsp::DiagnosticSeverity::WARNING),
message: "no method `tset`".to_string(),
related_information: Some(vec![lsp::DiagnosticRelatedInformation {
location: lsp::Location::new(
- lsp::Url::from_file_path(path!("/test/main.js")).unwrap(),
+ lsp::Uri::from_file_path(path!("/test/main.js")).unwrap(),
lsp::Range::new(
lsp::Position::new(0, 9),
lsp::Position::new(0, 13),
@@ -465,7 +466,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
.update_diagnostics(
server_id_1,
lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/test/main.js")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/test/main.js")).unwrap(),
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 1)),
severity: Some(lsp::DiagnosticSeverity::WARNING),
@@ -509,7 +510,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
.update_diagnostics(
server_id_2,
lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/test/main.js")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/test/main.js")).unwrap(),
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(1, 1)),
severity: Some(lsp::DiagnosticSeverity::ERROR),
@@ -552,7 +553,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
.update_diagnostics(
server_id_1,
lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/test/main.js")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/test/main.js")).unwrap(),
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(2, 0), lsp::Position::new(2, 1)),
severity: Some(lsp::DiagnosticSeverity::WARNING),
@@ -571,7 +572,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
.update_diagnostics(
server_id_2,
lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/test/main.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/test/main.rs")).unwrap(),
diagnostics: vec![],
version: None,
},
@@ -608,7 +609,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
.update_diagnostics(
server_id_2,
lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/test/main.js")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/test/main.js")).unwrap(),
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(3, 0), lsp::Position::new(3, 1)),
severity: Some(lsp::DiagnosticSeverity::WARNING),
@@ -681,7 +682,7 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng
Default::default();
for _ in 0..operations {
- match rng.gen_range(0..100) {
+ match rng.random_range(0..100) {
// language server completes its diagnostic check
0..=20 if !updated_language_servers.is_empty() => {
let server_id = *updated_language_servers.iter().choose(&mut rng).unwrap();
@@ -690,7 +691,7 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng
lsp_store.disk_based_diagnostics_finished(server_id, cx)
});
- if rng.gen_bool(0.5) {
+ if rng.random_bool(0.5) {
cx.run_until_parked();
}
}
@@ -700,7 +701,7 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng
let (path, server_id, diagnostics) =
match current_diagnostics.iter_mut().choose(&mut rng) {
// update existing set of diagnostics
- Some(((path, server_id), diagnostics)) if rng.gen_bool(0.5) => {
+ Some(((path, server_id), diagnostics)) if rng.random_bool(0.5) => {
(path.clone(), *server_id, diagnostics)
}
@@ -708,13 +709,13 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng
_ => {
let path: PathBuf =
format!(path!("/test/{}.rs"), post_inc(&mut next_filename)).into();
- let len = rng.gen_range(128..256);
+ let len = rng.random_range(128..256);
let content =
RandomCharIter::new(&mut rng).take(len).collect::<String>();
fs.insert_file(&path, content.into_bytes()).await;
let server_id = match language_server_ids.iter().choose(&mut rng) {
- Some(server_id) if rng.gen_bool(0.5) => *server_id,
+ Some(server_id) if rng.random_bool(0.5) => *server_id,
_ => {
let id = LanguageServerId(language_server_ids.len());
language_server_ids.push(id);
@@ -745,8 +746,8 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng
.update_diagnostics(
server_id,
lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(&path).unwrap_or_else(|_| {
- lsp::Url::parse("file:///test/fallback.rs").unwrap()
+ uri: lsp::Uri::from_file_path(&path).unwrap_or_else(|_| {
+ lsp::Uri::from_str("file:///test/fallback.rs").unwrap()
}),
diagnostics: diagnostics.clone(),
version: None,
@@ -845,7 +846,7 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S
let mut next_inlay_id = 0;
for _ in 0..operations {
- match rng.gen_range(0..100) {
+ match rng.random_range(0..100) {
// language server completes its diagnostic check
0..=20 if !updated_language_servers.is_empty() => {
let server_id = *updated_language_servers.iter().choose(&mut rng).unwrap();
@@ -854,7 +855,7 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S
lsp_store.disk_based_diagnostics_finished(server_id, cx)
});
- if rng.gen_bool(0.5) {
+ if rng.random_bool(0.5) {
cx.run_until_parked();
}
}
@@ -862,8 +863,8 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S
21..=50 => mutated_diagnostics.update_in(cx, |diagnostics, window, cx| {
diagnostics.editor.update(cx, |editor, cx| {
let snapshot = editor.snapshot(window, cx);
- if snapshot.buffer_snapshot.len() > 0 {
- let position = rng.gen_range(0..snapshot.buffer_snapshot.len());
+ if !snapshot.buffer_snapshot.is_empty() {
+ let position = rng.random_range(0..snapshot.buffer_snapshot.len());
let position = snapshot.buffer_snapshot.clip_offset(position, Bias::Left);
log::info!(
"adding inlay at {position}/{}: {:?}",
@@ -889,7 +890,7 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S
let (path, server_id, diagnostics) =
match current_diagnostics.iter_mut().choose(&mut rng) {
// update existing set of diagnostics
- Some(((path, server_id), diagnostics)) if rng.gen_bool(0.5) => {
+ Some(((path, server_id), diagnostics)) if rng.random_bool(0.5) => {
(path.clone(), *server_id, diagnostics)
}
@@ -897,13 +898,13 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S
_ => {
let path: PathBuf =
format!(path!("/test/{}.rs"), post_inc(&mut next_filename)).into();
- let len = rng.gen_range(128..256);
+ let len = rng.random_range(128..256);
let content =
RandomCharIter::new(&mut rng).take(len).collect::<String>();
fs.insert_file(&path, content.into_bytes()).await;
let server_id = match language_server_ids.iter().choose(&mut rng) {
- Some(server_id) if rng.gen_bool(0.5) => *server_id,
+ Some(server_id) if rng.random_bool(0.5) => *server_id,
_ => {
let id = LanguageServerId(language_server_ids.len());
language_server_ids.push(id);
@@ -934,8 +935,8 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S
.update_diagnostics(
server_id,
lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(&path).unwrap_or_else(|_| {
- lsp::Url::parse("file:///test/fallback.rs").unwrap()
+ uri: lsp::Uri::from_file_path(&path).unwrap_or_else(|_| {
+ lsp::Uri::from_str("file:///test/fallback.rs").unwrap()
}),
diagnostics: diagnostics.clone(),
version: None,
@@ -985,7 +986,7 @@ async fn active_diagnostics_dismiss_after_invalidation(cx: &mut TestAppContext)
.update_diagnostics(
LanguageServerId(0),
lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/root/file")).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(
@@ -1028,7 +1029,7 @@ async fn active_diagnostics_dismiss_after_invalidation(cx: &mut TestAppContext)
.update_diagnostics(
LanguageServerId(0),
lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/root/file")).unwrap(),
version: None,
diagnostics: Vec::new(),
},
@@ -1078,7 +1079,7 @@ async fn cycle_through_same_place_diagnostics(cx: &mut TestAppContext) {
.update_diagnostics(
LanguageServerId(0),
lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/root/file")).unwrap(),
version: None,
diagnostics: vec![
lsp::Diagnostic {
@@ -1246,7 +1247,7 @@ async fn test_diagnostics_with_links(cx: &mut TestAppContext) {
lsp_store.update_diagnostics(
LanguageServerId(0),
lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/root/file")).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 12)),
@@ -1299,7 +1300,7 @@ async fn test_hover_diagnostic_and_info_popovers(cx: &mut gpui::TestAppContext)
lsp_store.update_diagnostics(
LanguageServerId(0),
lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/root/dir/file.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/root/dir/file.rs")).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
range,
@@ -1376,7 +1377,7 @@ async fn test_diagnostics_with_code(cx: &mut TestAppContext) {
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*window, cx);
let workspace = window.root(cx).unwrap();
- let uri = lsp::Url::from_file_path(path!("/root/main.js")).unwrap();
+ let uri = lsp::Uri::from_file_path(path!("/root/main.js")).unwrap();
// Create diagnostics with code fields
lsp_store.update(cx, |lsp_store, cx| {
@@ -1460,7 +1461,7 @@ async fn go_to_diagnostic_with_severity(cx: &mut TestAppContext) {
.update_diagnostics(
LanguageServerId(0),
lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/root/file")).unwrap(),
version: None,
diagnostics: vec![
lsp::Diagnostic {
@@ -1566,6 +1567,440 @@ async fn go_to_diagnostic_with_severity(cx: &mut TestAppContext) {
cx.assert_editor_state(indoc! {"error ˇwarning info hint"});
}
+#[gpui::test]
+async fn test_buffer_diagnostics(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ // We'll be creating two different files, both with diagnostics, so we can
+ // later verify that, since the `BufferDiagnosticsEditor` only shows
+ // diagnostics for the provided path, the diagnostics for the other file
+ // will not be shown, contrary to what happens with
+ // `ProjectDiagnosticsEditor`.
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ path!("/test"),
+ json!({
+ "main.rs": "
+ fn main() {
+ let x = vec![];
+ let y = vec![];
+ a(x);
+ b(y);
+ c(y);
+ d(x);
+ }
+ "
+ .unindent(),
+ "other.rs": "
+ fn other() {
+ let unused = 42;
+ undefined_function();
+ }
+ "
+ .unindent(),
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await;
+ let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
+ let cx = &mut VisualTestContext::from_window(*window, cx);
+ let project_path = project::ProjectPath {
+ worktree_id: project.read_with(cx, |project, cx| {
+ project.worktrees(cx).next().unwrap().read(cx).id()
+ }),
+ path: Arc::from(Path::new("main.rs")),
+ };
+ let buffer = project
+ .update(cx, |project, cx| {
+ project.open_buffer(project_path.clone(), cx)
+ })
+ .await
+ .ok();
+
+ // Create the diagnostics for `main.rs`.
+ let language_server_id = LanguageServerId(0);
+ let uri = lsp::Uri::from_file_path(path!("/test/main.rs")).unwrap();
+ let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
+
+ lsp_store.update(cx, |lsp_store, cx| {
+ lsp_store.update_diagnostics(language_server_id, lsp::PublishDiagnosticsParams {
+ uri: uri.clone(),
+ diagnostics: vec![
+ lsp::Diagnostic{
+ range: lsp::Range::new(lsp::Position::new(5, 6), lsp::Position::new(5, 7)),
+ severity: Some(lsp::DiagnosticSeverity::WARNING),
+ message: "use of moved value\nvalue used here after move".to_string(),
+ related_information: Some(vec![
+ lsp::DiagnosticRelatedInformation {
+ location: lsp::Location::new(uri.clone(), lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 9))),
+ message: "move occurs because `y` has type `Vec<char>`, which does not implement the `Copy` trait".to_string()
+ },
+ lsp::DiagnosticRelatedInformation {
+ location: lsp::Location::new(uri.clone(), lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 7))),
+ message: "value moved here".to_string()
+ },
+ ]),
+ ..Default::default()
+ },
+ lsp::Diagnostic{
+ range: lsp::Range::new(lsp::Position::new(6, 6), lsp::Position::new(6, 7)),
+ severity: Some(lsp::DiagnosticSeverity::ERROR),
+ message: "use of moved value\nvalue used here after move".to_string(),
+ related_information: Some(vec![
+ lsp::DiagnosticRelatedInformation {
+ location: lsp::Location::new(uri.clone(), lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9))),
+ message: "move occurs because `x` has type `Vec<char>`, which does not implement the `Copy` trait".to_string()
+ },
+ lsp::DiagnosticRelatedInformation {
+ location: lsp::Location::new(uri.clone(), lsp::Range::new(lsp::Position::new(3, 6), lsp::Position::new(3, 7))),
+ message: "value moved here".to_string()
+ },
+ ]),
+ ..Default::default()
+ }
+ ],
+ version: None
+ }, None, DiagnosticSourceKind::Pushed, &[], cx).unwrap();
+
+ // Create diagnostics for other.rs to ensure that the file and
+ // diagnostics are not included in `BufferDiagnosticsEditor` when it is
+ // deployed for main.rs.
+ lsp_store.update_diagnostics(language_server_id, lsp::PublishDiagnosticsParams {
+ uri: lsp::Uri::from_file_path(path!("/test/other.rs")).unwrap(),
+ diagnostics: vec![
+ lsp::Diagnostic{
+ range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 14)),
+ severity: Some(lsp::DiagnosticSeverity::WARNING),
+ message: "unused variable: `unused`".to_string(),
+ ..Default::default()
+ },
+ lsp::Diagnostic{
+ range: lsp::Range::new(lsp::Position::new(2, 4), lsp::Position::new(2, 22)),
+ severity: Some(lsp::DiagnosticSeverity::ERROR),
+ message: "cannot find function `undefined_function` in this scope".to_string(),
+ ..Default::default()
+ }
+ ],
+ version: None
+ }, None, DiagnosticSourceKind::Pushed, &[], cx).unwrap();
+ });
+
+ let buffer_diagnostics = window.build_entity(cx, |window, cx| {
+ BufferDiagnosticsEditor::new(
+ project_path.clone(),
+ project.clone(),
+ buffer,
+ true,
+ window,
+ cx,
+ )
+ });
+ let editor = buffer_diagnostics.update(cx, |buffer_diagnostics, _| {
+ buffer_diagnostics.editor().clone()
+ });
+
+ // Since the excerpt updates is handled by a background task, we need to
+ // wait a little bit to ensure that the buffer diagnostic's editor content
+ // is rendered.
+ cx.executor()
+ .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10));
+
+ pretty_assertions::assert_eq!(
+ editor_content_with_blocks(&editor, cx),
+ indoc::indoc! {
+ "§ main.rs
+ § -----
+ fn main() {
+ let x = vec![];
+ § move occurs because `x` has type `Vec<char>`, which does not implement
+ § the `Copy` trait (back)
+ let y = vec![];
+ § move occurs because `y` has type `Vec<char>`, which does not implement
+ § the `Copy` trait
+ a(x); § value moved here
+ b(y); § value moved here
+ c(y);
+ § use of moved value
+ § value used here after move
+ d(x);
+ § use of moved value
+ § value used here after move
+ § hint: move occurs because `x` has type `Vec<char>`, which does not
+ § implement the `Copy` trait
+ }"
+ }
+ );
+}
+
+#[gpui::test]
+async fn test_buffer_diagnostics_without_warnings(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ path!("/test"),
+ json!({
+ "main.rs": "
+ fn main() {
+ let x = vec![];
+ let y = vec![];
+ a(x);
+ b(y);
+ c(y);
+ d(x);
+ }
+ "
+ .unindent(),
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await;
+ let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
+ let cx = &mut VisualTestContext::from_window(*window, cx);
+ let project_path = project::ProjectPath {
+ worktree_id: project.read_with(cx, |project, cx| {
+ project.worktrees(cx).next().unwrap().read(cx).id()
+ }),
+ path: Arc::from(Path::new("main.rs")),
+ };
+ let buffer = project
+ .update(cx, |project, cx| {
+ project.open_buffer(project_path.clone(), cx)
+ })
+ .await
+ .ok();
+
+ let language_server_id = LanguageServerId(0);
+ let uri = lsp::Uri::from_file_path(path!("/test/main.rs")).unwrap();
+ let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
+
+ lsp_store.update(cx, |lsp_store, cx| {
+ lsp_store.update_diagnostics(language_server_id, lsp::PublishDiagnosticsParams {
+ uri: uri.clone(),
+ diagnostics: vec![
+ lsp::Diagnostic{
+ range: lsp::Range::new(lsp::Position::new(5, 6), lsp::Position::new(5, 7)),
+ severity: Some(lsp::DiagnosticSeverity::WARNING),
+ message: "use of moved value\nvalue used here after move".to_string(),
+ related_information: Some(vec![
+ lsp::DiagnosticRelatedInformation {
+ location: lsp::Location::new(uri.clone(), lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 9))),
+ message: "move occurs because `y` has type `Vec<char>`, which does not implement the `Copy` trait".to_string()
+ },
+ lsp::DiagnosticRelatedInformation {
+ location: lsp::Location::new(uri.clone(), lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 7))),
+ message: "value moved here".to_string()
+ },
+ ]),
+ ..Default::default()
+ },
+ lsp::Diagnostic{
+ range: lsp::Range::new(lsp::Position::new(6, 6), lsp::Position::new(6, 7)),
+ severity: Some(lsp::DiagnosticSeverity::ERROR),
+ message: "use of moved value\nvalue used here after move".to_string(),
+ related_information: Some(vec![
+ lsp::DiagnosticRelatedInformation {
+ location: lsp::Location::new(uri.clone(), lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9))),
+ message: "move occurs because `x` has type `Vec<char>`, which does not implement the `Copy` trait".to_string()
+ },
+ lsp::DiagnosticRelatedInformation {
+ location: lsp::Location::new(uri.clone(), lsp::Range::new(lsp::Position::new(3, 6), lsp::Position::new(3, 7))),
+ message: "value moved here".to_string()
+ },
+ ]),
+ ..Default::default()
+ }
+ ],
+ version: None
+ }, None, DiagnosticSourceKind::Pushed, &[], cx).unwrap();
+ });
+
+ let include_warnings = false;
+ let buffer_diagnostics = window.build_entity(cx, |window, cx| {
+ BufferDiagnosticsEditor::new(
+ project_path.clone(),
+ project.clone(),
+ buffer,
+ include_warnings,
+ window,
+ cx,
+ )
+ });
+
+ let editor = buffer_diagnostics.update(cx, |buffer_diagnostics, _cx| {
+ buffer_diagnostics.editor().clone()
+ });
+
+ // Since the excerpt updates is handled by a background task, we need to
+ // wait a little bit to ensure that the buffer diagnostic's editor content
+ // is rendered.
+ cx.executor()
+ .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10));
+
+ pretty_assertions::assert_eq!(
+ editor_content_with_blocks(&editor, cx),
+ indoc::indoc! {
+ "§ main.rs
+ § -----
+ fn main() {
+ let x = vec![];
+ § move occurs because `x` has type `Vec<char>`, which does not implement
+ § the `Copy` trait (back)
+ let y = vec![];
+ a(x); § value moved here
+ b(y);
+ c(y);
+ d(x);
+ § use of moved value
+ § value used here after move
+ § hint: move occurs because `x` has type `Vec<char>`, which does not
+ § implement the `Copy` trait
+ }"
+ }
+ );
+}
+
+#[gpui::test]
+async fn test_buffer_diagnostics_multiple_servers(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ path!("/test"),
+ json!({
+ "main.rs": "
+ fn main() {
+ let x = vec![];
+ let y = vec![];
+ a(x);
+ b(y);
+ c(y);
+ d(x);
+ }
+ "
+ .unindent(),
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await;
+ let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
+ let cx = &mut VisualTestContext::from_window(*window, cx);
+ let project_path = project::ProjectPath {
+ worktree_id: project.read_with(cx, |project, cx| {
+ project.worktrees(cx).next().unwrap().read(cx).id()
+ }),
+ path: Arc::from(Path::new("main.rs")),
+ };
+ let buffer = project
+ .update(cx, |project, cx| {
+ project.open_buffer(project_path.clone(), cx)
+ })
+ .await
+ .ok();
+
+ // Create the diagnostics for `main.rs`.
+ // Two warnings are being created, one for each language server, in order to
+ // assert that both warnings are rendered in the editor.
+ let language_server_id_a = LanguageServerId(0);
+ let language_server_id_b = LanguageServerId(1);
+ let uri = lsp::Uri::from_file_path(path!("/test/main.rs")).unwrap();
+ let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
+
+ lsp_store.update(cx, |lsp_store, cx| {
+ lsp_store
+ .update_diagnostics(
+ language_server_id_a,
+ lsp::PublishDiagnosticsParams {
+ uri: uri.clone(),
+ diagnostics: vec![lsp::Diagnostic {
+ range: lsp::Range::new(lsp::Position::new(5, 6), lsp::Position::new(5, 7)),
+ severity: Some(lsp::DiagnosticSeverity::WARNING),
+ message: "use of moved value\nvalue used here after move".to_string(),
+ related_information: None,
+ ..Default::default()
+ }],
+ version: None,
+ },
+ None,
+ DiagnosticSourceKind::Pushed,
+ &[],
+ cx,
+ )
+ .unwrap();
+
+ lsp_store
+ .update_diagnostics(
+ language_server_id_b,
+ lsp::PublishDiagnosticsParams {
+ uri: uri.clone(),
+ diagnostics: vec![lsp::Diagnostic {
+ range: lsp::Range::new(lsp::Position::new(6, 6), lsp::Position::new(6, 7)),
+ severity: Some(lsp::DiagnosticSeverity::WARNING),
+ message: "use of moved value\nvalue used here after move".to_string(),
+ related_information: None,
+ ..Default::default()
+ }],
+ version: None,
+ },
+ None,
+ DiagnosticSourceKind::Pushed,
+ &[],
+ cx,
+ )
+ .unwrap();
+ });
+
+ let buffer_diagnostics = window.build_entity(cx, |window, cx| {
+ BufferDiagnosticsEditor::new(
+ project_path.clone(),
+ project.clone(),
+ buffer,
+ true,
+ window,
+ cx,
+ )
+ });
+ let editor = buffer_diagnostics.update(cx, |buffer_diagnostics, _| {
+ buffer_diagnostics.editor().clone()
+ });
+
+ // Since the excerpt updates is handled by a background task, we need to
+ // wait a little bit to ensure that the buffer diagnostic's editor content
+ // is rendered.
+ cx.executor()
+ .advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10));
+
+ pretty_assertions::assert_eq!(
+ editor_content_with_blocks(&editor, cx),
+ indoc::indoc! {
+ "§ main.rs
+ § -----
+ a(x);
+ b(y);
+ c(y);
+ § use of moved value
+ § value used here after move
+ d(x);
+ § use of moved value
+ § value used here after move
+ }"
+ }
+ );
+
+ buffer_diagnostics.update(cx, |buffer_diagnostics, _cx| {
+ assert_eq!(
+ *buffer_diagnostics.summary(),
+ DiagnosticSummary {
+ warning_count: 2,
+ error_count: 0
+ }
+ );
+ })
+}
+
fn init_test(cx: &mut TestAppContext) {
cx.update(|cx| {
zlog::init_test();
@@ -1588,10 +2023,10 @@ fn randomly_update_diagnostics_for_path(
next_id: &mut usize,
rng: &mut impl Rng,
) {
- let mutation_count = rng.gen_range(1..=3);
+ let mutation_count = rng.random_range(1..=3);
for _ in 0..mutation_count {
- if rng.gen_bool(0.3) && !diagnostics.is_empty() {
- let idx = rng.gen_range(0..diagnostics.len());
+ if rng.random_bool(0.3) && !diagnostics.is_empty() {
+ let idx = rng.random_range(0..diagnostics.len());
log::info!(" removing diagnostic at index {idx}");
diagnostics.remove(idx);
} else {
@@ -1600,7 +2035,7 @@ fn randomly_update_diagnostics_for_path(
let new_diagnostic = random_lsp_diagnostic(rng, fs, path, unique_id);
- let ix = rng.gen_range(0..=diagnostics.len());
+ let ix = rng.random_range(0..=diagnostics.len());
log::info!(
" inserting {} at index {ix}. {},{}..{},{}",
new_diagnostic.message,
@@ -1637,8 +2072,8 @@ fn random_lsp_diagnostic(
let file_content = fs.read_file_sync(path).unwrap();
let file_text = Rope::from(String::from_utf8_lossy(&file_content).as_ref());
- let start = rng.gen_range(0..file_text.len().saturating_add(ERROR_MARGIN));
- let end = rng.gen_range(start..file_text.len().saturating_add(ERROR_MARGIN));
+ let start = rng.random_range(0..file_text.len().saturating_add(ERROR_MARGIN));
+ let end = rng.random_range(start..file_text.len().saturating_add(ERROR_MARGIN));
let start_point = file_text.offset_to_point_utf16(start);
let end_point = file_text.offset_to_point_utf16(end);
@@ -1648,7 +2083,7 @@ fn random_lsp_diagnostic(
lsp::Position::new(end_point.row, end_point.column),
);
- let severity = if rng.gen_bool(0.5) {
+ let severity = if rng.random_bool(0.5) {
Some(lsp::DiagnosticSeverity::ERROR)
} else {
Some(lsp::DiagnosticSeverity::WARNING)
@@ -1656,13 +2091,14 @@ fn random_lsp_diagnostic(
let message = format!("diagnostic {unique_id}");
- let related_information = if rng.gen_bool(0.3) {
- let info_count = rng.gen_range(1..=3);
+ let related_information = if rng.random_bool(0.3) {
+ let info_count = rng.random_range(1..=3);
let mut related_info = Vec::with_capacity(info_count);
for i in 0..info_count {
- let info_start = rng.gen_range(0..file_text.len().saturating_add(ERROR_MARGIN));
- let info_end = rng.gen_range(info_start..file_text.len().saturating_add(ERROR_MARGIN));
+ let info_start = rng.random_range(0..file_text.len().saturating_add(ERROR_MARGIN));
+ let info_end =
+ rng.random_range(info_start..file_text.len().saturating_add(ERROR_MARGIN));
let info_start_point = file_text.offset_to_point_utf16(info_start);
let info_end_point = file_text.offset_to_point_utf16(info_end);
@@ -1673,7 +2109,7 @@ fn random_lsp_diagnostic(
);
related_info.push(lsp::DiagnosticRelatedInformation {
- location: lsp::Location::new(lsp::Url::from_file_path(path).unwrap(), info_range),
+ location: lsp::Location::new(lsp::Uri::from_file_path(path).unwrap(), info_range),
message: format!("related info {i} for diagnostic {unique_id}"),
});
}
@@ -32,49 +32,38 @@ impl Render for DiagnosticIndicator {
}
let diagnostic_indicator = match (self.summary.error_count, self.summary.warning_count) {
- (0, 0) => h_flex().map(|this| {
- this.child(
- Icon::new(IconName::Check)
- .size(IconSize::Small)
- .color(Color::Default),
- )
- }),
- (0, warning_count) => h_flex()
- .gap_1()
- .child(
- Icon::new(IconName::Warning)
- .size(IconSize::Small)
- .color(Color::Warning),
- )
- .child(Label::new(warning_count.to_string()).size(LabelSize::Small)),
- (error_count, 0) => h_flex()
- .gap_1()
- .child(
- Icon::new(IconName::XCircle)
- .size(IconSize::Small)
- .color(Color::Error),
- )
- .child(Label::new(error_count.to_string()).size(LabelSize::Small)),
+ (0, 0) => h_flex().child(
+ Icon::new(IconName::Check)
+ .size(IconSize::Small)
+ .color(Color::Default),
+ ),
(error_count, warning_count) => h_flex()
.gap_1()
- .child(
- Icon::new(IconName::XCircle)
- .size(IconSize::Small)
- .color(Color::Error),
- )
- .child(Label::new(error_count.to_string()).size(LabelSize::Small))
- .child(
- Icon::new(IconName::Warning)
- .size(IconSize::Small)
- .color(Color::Warning),
- )
- .child(Label::new(warning_count.to_string()).size(LabelSize::Small)),
+ .when(error_count > 0, |this| {
+ this.child(
+ Icon::new(IconName::XCircle)
+ .size(IconSize::Small)
+ .color(Color::Error),
+ )
+ .child(Label::new(error_count.to_string()).size(LabelSize::Small))
+ })
+ .when(warning_count > 0, |this| {
+ this.child(
+ Icon::new(IconName::Warning)
+ .size(IconSize::Small)
+ .color(Color::Warning),
+ )
+ .child(Label::new(warning_count.to_string()).size(LabelSize::Small))
+ }),
};
let status = if let Some(diagnostic) = &self.current_diagnostic {
- let message = diagnostic.message.split('\n').next().unwrap().to_string();
+ let message = diagnostic
+ .message
+ .split_once('\n')
+ .map_or(&*diagnostic.message, |(first, _)| first);
Some(
- Button::new("diagnostic_message", message)
+ Button::new("diagnostic_message", SharedString::new(message))
.label_size(LabelSize::Small)
.tooltip(|window, cx| {
Tooltip::for_action(
@@ -1,43 +1,56 @@
-use std::sync::Arc;
-
-use crate::{ProjectDiagnosticsEditor, ToggleDiagnosticsRefresh};
-use gpui::{Context, Entity, EventEmitter, ParentElement, Render, WeakEntity, Window};
+use crate::{BufferDiagnosticsEditor, ProjectDiagnosticsEditor, ToggleDiagnosticsRefresh};
+use gpui::{Context, EventEmitter, ParentElement, Render, Window};
+use language::DiagnosticEntry;
+use text::{Anchor, BufferId};
use ui::prelude::*;
use ui::{IconButton, IconButtonShape, IconName, Tooltip};
use workspace::{ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, item::ItemHandle};
pub struct ToolbarControls {
- editor: Option<WeakEntity<ProjectDiagnosticsEditor>>,
+ editor: Option<Box<dyn DiagnosticsToolbarEditor>>,
+}
+
+pub(crate) trait DiagnosticsToolbarEditor: Send + Sync {
+ /// Informs the toolbar whether warnings are included in the diagnostics.
+ fn include_warnings(&self, cx: &App) -> bool;
+ /// Toggles whether warning diagnostics should be displayed by the
+ /// diagnostics editor.
+ fn toggle_warnings(&self, window: &mut Window, cx: &mut App);
+ /// Indicates whether any of the excerpts displayed by the diagnostics
+ /// editor are stale.
+ fn has_stale_excerpts(&self, cx: &App) -> bool;
+ /// Indicates whether the diagnostics editor is currently updating the
+ /// diagnostics.
+ fn is_updating(&self, cx: &App) -> bool;
+ /// Requests that the diagnostics editor stop updating the diagnostics.
+ fn stop_updating(&self, cx: &mut App);
+ /// Requests that the diagnostics editor updates the displayed diagnostics
+ /// with the latest information.
+ fn refresh_diagnostics(&self, window: &mut Window, cx: &mut App);
+ /// Returns a list of diagnostics for the provided buffer id.
+ fn get_diagnostics_for_buffer(
+ &self,
+ buffer_id: BufferId,
+ cx: &App,
+ ) -> Vec<DiagnosticEntry<Anchor>>;
}
impl Render for ToolbarControls {
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
- let mut include_warnings = false;
let mut has_stale_excerpts = false;
+ let mut include_warnings = false;
let mut is_updating = false;
- let cargo_diagnostics_sources = Arc::new(self.diagnostics().map_or(Vec::new(), |editor| {
- editor.read(cx).cargo_diagnostics_sources(cx)
- }));
- let fetch_cargo_diagnostics = !cargo_diagnostics_sources.is_empty();
- if let Some(editor) = self.diagnostics() {
- let diagnostics = editor.read(cx);
- include_warnings = diagnostics.include_warnings;
- has_stale_excerpts = !diagnostics.paths_to_update.is_empty();
- is_updating = if fetch_cargo_diagnostics {
- diagnostics.cargo_diagnostics_fetch.fetch_task.is_some()
- } else {
- diagnostics.update_excerpts_task.is_some()
- || diagnostics
- .project
- .read(cx)
- .language_servers_running_disk_based_diagnostics(cx)
- .next()
- .is_some()
- };
+ match &self.editor {
+ Some(editor) => {
+ include_warnings = editor.include_warnings(cx);
+ has_stale_excerpts = editor.has_stale_excerpts(cx);
+ is_updating = editor.is_updating(cx);
+ }
+ None => {}
}
- let tooltip = if include_warnings {
+ let warning_tooltip = if include_warnings {
"Exclude Warnings"
} else {
"Include Warnings"
@@ -62,12 +75,12 @@ impl Render for ToolbarControls {
&ToggleDiagnosticsRefresh,
))
.on_click(cx.listener(move |toolbar_controls, _, _, cx| {
- if let Some(diagnostics) = toolbar_controls.diagnostics() {
- diagnostics.update(cx, |diagnostics, cx| {
- diagnostics.stop_cargo_diagnostics_fetch(cx);
- diagnostics.update_excerpts_task = None;
+ match toolbar_controls.editor() {
+ Some(editor) => {
+ editor.stop_updating(cx);
cx.notify();
- });
+ }
+ None => {}
}
})),
)
@@ -76,27 +89,17 @@ impl Render for ToolbarControls {
IconButton::new("refresh-diagnostics", IconName::ArrowCircle)
.icon_color(Color::Info)
.shape(IconButtonShape::Square)
- .disabled(!has_stale_excerpts && !fetch_cargo_diagnostics)
+ .disabled(!has_stale_excerpts)
.tooltip(Tooltip::for_action_title(
"Refresh diagnostics",
&ToggleDiagnosticsRefresh,
))
.on_click(cx.listener({
- move |toolbar_controls, _, window, cx| {
- if let Some(diagnostics) = toolbar_controls.diagnostics() {
- let cargo_diagnostics_sources =
- Arc::clone(&cargo_diagnostics_sources);
- diagnostics.update(cx, move |diagnostics, cx| {
- if fetch_cargo_diagnostics {
- diagnostics.fetch_cargo_diagnostics(
- cargo_diagnostics_sources,
- cx,
- );
- } else {
- diagnostics.update_all_excerpts(window, cx);
- }
- });
- }
+ move |toolbar_controls, _, window, cx| match toolbar_controls
+ .editor()
+ {
+ Some(editor) => editor.refresh_diagnostics(window, cx),
+ None => {}
}
})),
)
@@ -106,13 +109,10 @@ impl Render for ToolbarControls {
IconButton::new("toggle-warnings", IconName::Warning)
.icon_color(warning_color)
.shape(IconButtonShape::Square)
- .tooltip(Tooltip::text(tooltip))
- .on_click(cx.listener(|this, _, window, cx| {
- if let Some(editor) = this.diagnostics() {
- editor.update(cx, |editor, cx| {
- editor.toggle_warnings(&Default::default(), window, cx);
- });
- }
+ .tooltip(Tooltip::text(warning_tooltip))
+ .on_click(cx.listener(|this, _, window, cx| match &this.editor {
+ Some(editor) => editor.toggle_warnings(window, cx),
+ None => {}
})),
)
}
@@ -129,7 +129,10 @@ impl ToolbarItemView for ToolbarControls {
) -> ToolbarItemLocation {
if let Some(pane_item) = active_pane_item.as_ref() {
if let Some(editor) = pane_item.downcast::<ProjectDiagnosticsEditor>() {
- self.editor = Some(editor.downgrade());
+ self.editor = Some(Box::new(editor.downgrade()));
+ ToolbarItemLocation::PrimaryRight
+ } else if let Some(editor) = pane_item.downcast::<BufferDiagnosticsEditor>() {
+ self.editor = Some(Box::new(editor.downgrade()));
ToolbarItemLocation::PrimaryRight
} else {
ToolbarItemLocation::Hidden
@@ -151,7 +154,7 @@ impl ToolbarControls {
ToolbarControls { editor: None }
}
- fn diagnostics(&self) -> Option<Entity<ProjectDiagnosticsEditor>> {
- self.editor.as_ref()?.upgrade()
+ fn editor(&self) -> Option<&dyn DiagnosticsToolbarEditor> {
+ self.editor.as_deref()
}
}
@@ -19,9 +19,13 @@ static KEYMAP_LINUX: LazyLock<KeymapFile> = LazyLock::new(|| {
load_keymap("keymaps/default-linux.json").expect("Failed to load Linux keymap")
});
+static KEYMAP_WINDOWS: LazyLock<KeymapFile> = LazyLock::new(|| {
+ load_keymap("keymaps/default-windows.json").expect("Failed to load Windows keymap")
+});
+
static ALL_ACTIONS: LazyLock<Vec<ActionDef>> = LazyLock::new(dump_all_gpui_actions);
-const FRONT_MATTER_COMMENT: &'static str = "<!-- ZED_META {} -->";
+const FRONT_MATTER_COMMENT: &str = "<!-- ZED_META {} -->";
fn main() -> Result<()> {
zlog::init();
@@ -61,15 +65,13 @@ impl PreprocessorError {
for alias in action.deprecated_aliases {
if alias == &action_name {
return PreprocessorError::DeprecatedActionUsed {
- used: action_name.clone(),
+ used: action_name,
should_be: action.name.to_string(),
};
}
}
}
- PreprocessorError::ActionNotFound {
- action_name: action_name.to_string(),
- }
+ PreprocessorError::ActionNotFound { action_name }
}
}
@@ -101,12 +103,13 @@ fn handle_preprocessing() -> Result<()> {
let mut errors = HashSet::<PreprocessorError>::new();
handle_frontmatter(&mut book, &mut errors);
+ template_big_table_of_actions(&mut book);
template_and_validate_keybindings(&mut book, &mut errors);
template_and_validate_actions(&mut book, &mut errors);
if !errors.is_empty() {
- const ANSI_RED: &'static str = "\x1b[31m";
- const ANSI_RESET: &'static str = "\x1b[0m";
+ const ANSI_RED: &str = "\x1b[31m";
+ const ANSI_RESET: &str = "\x1b[0m";
for error in &errors {
eprintln!("{ANSI_RED}ERROR{ANSI_RESET}: {}", error);
}
@@ -143,11 +146,20 @@ fn handle_frontmatter(book: &mut Book, errors: &mut HashSet<PreprocessorError>)
&serde_json::to_string(&metadata).expect("Failed to serialize metadata"),
)
});
- match new_content {
- Cow::Owned(content) => {
- chapter.content = content;
- }
- Cow::Borrowed(_) => {}
+ if let Cow::Owned(content) = new_content {
+ chapter.content = content;
+ }
+ });
+}
+
+fn template_big_table_of_actions(book: &mut Book) {
+ for_each_chapter_mut(book, |chapter| {
+ let needle = "{#ACTIONS_TABLE#}";
+ if let Some(start) = chapter.content.rfind(needle) {
+ chapter.content.replace_range(
+ start..start + needle.len(),
+ &generate_big_table_of_actions(),
+ );
}
});
}
@@ -208,6 +220,7 @@ fn find_binding(os: &str, action: &str) -> Option<String> {
let keymap = match os {
"macos" => &KEYMAP_MACOS,
"linux" | "freebsd" => &KEYMAP_LINUX,
+ "windows" => &KEYMAP_WINDOWS,
_ => unreachable!("Not a valid OS: {}", os),
};
@@ -282,6 +295,7 @@ struct ActionDef {
name: &'static str,
human_name: String,
deprecated_aliases: &'static [&'static str],
+ docs: Option<&'static str>,
}
fn dump_all_gpui_actions() -> Vec<ActionDef> {
@@ -290,6 +304,7 @@ fn dump_all_gpui_actions() -> Vec<ActionDef> {
name: action.name,
human_name: command_palette::humanize_action_name(action.name),
deprecated_aliases: action.deprecated_aliases,
+ docs: action.documentation,
})
.collect::<Vec<ActionDef>>();
@@ -409,13 +424,13 @@ fn handle_postprocessing() -> Result<()> {
.captures(contents)
.with_context(|| format!("Failed to find title in {:?}", pretty_path))
.expect("Page has <title> element")[1];
- let title = title_tag_contents
+
+ title_tag_contents
.trim()
.strip_suffix("- Zed")
.unwrap_or(title_tag_contents)
.trim()
- .to_string();
- title
+ .to_string()
}
}
@@ -423,3 +438,54 @@ fn title_regex() -> &'static Regex {
static TITLE_REGEX: OnceLock<Regex> = OnceLock::new();
TITLE_REGEX.get_or_init(|| Regex::new(r"<title>\s*(.*?)\s*</title>").unwrap())
}
+
+fn generate_big_table_of_actions() -> String {
+ let actions = &*ALL_ACTIONS;
+ let mut output = String::new();
+
+ let mut actions_sorted = actions.iter().collect::<Vec<_>>();
+ actions_sorted.sort_by_key(|a| a.name);
+
+ // Start the definition list with custom styling for better spacing
+ output.push_str("<dl style=\"line-height: 1.8;\">\n");
+
+ for action in actions_sorted.into_iter() {
+ // Add the humanized action name as the term with margin
+ output.push_str(
+ "<dt style=\"margin-top: 1.5em; margin-bottom: 0.5em; font-weight: bold;\"><code>",
+ );
+ output.push_str(&action.human_name);
+ output.push_str("</code></dt>\n");
+
+ // Add the definition with keymap name and description
+ output.push_str("<dd style=\"margin-left: 2em; margin-bottom: 1em;\">\n");
+
+ // Add the description, escaping HTML if needed
+ if let Some(description) = action.docs {
+ output.push_str(
+ &description
+ .replace("&", "&")
+ .replace("<", "<")
+ .replace(">", ">"),
+ );
+ output.push_str("<br>\n");
+ }
+ output.push_str("Keymap Name: <code>");
+ output.push_str(action.name);
+ output.push_str("</code><br>\n");
+ if !action.deprecated_aliases.is_empty() {
+ output.push_str("Deprecated Aliases:");
+ for alias in action.deprecated_aliases.iter() {
+ output.push_str("<code>");
+ output.push_str(alias);
+ output.push_str("</code>, ");
+ }
+ }
+ output.push_str("\n</dd>\n");
+ }
+
+ // Close the definition list
+ output.push_str("</dl>\n");
+
+ output
+}
@@ -34,7 +34,7 @@ pub enum DataCollectionState {
impl DataCollectionState {
pub fn is_supported(&self) -> bool {
- !matches!(self, DataCollectionState::Unsupported { .. })
+ !matches!(self, DataCollectionState::Unsupported)
}
pub fn is_enabled(&self) -> bool {
@@ -89,9 +89,6 @@ pub trait EditPredictionProvider: 'static + Sized {
debounce: bool,
cx: &mut Context<Self>,
);
- fn needs_terms_acceptance(&self, _cx: &App) -> bool {
- false
- }
fn cycle(
&mut self,
buffer: Entity<Buffer>,
@@ -124,7 +121,6 @@ pub trait EditPredictionProviderHandle {
fn data_collection_state(&self, cx: &App) -> DataCollectionState;
fn usage(&self, cx: &App) -> Option<EditPredictionUsage>;
fn toggle_data_collection(&self, cx: &mut App);
- fn needs_terms_acceptance(&self, cx: &App) -> bool;
fn is_refreshing(&self, cx: &App) -> bool;
fn refresh(
&self,
@@ -196,10 +192,6 @@ where
self.read(cx).is_enabled(buffer, cursor_position, cx)
}
- fn needs_terms_acceptance(&self, cx: &App) -> bool {
- self.read(cx).needs_terms_acceptance(cx)
- }
-
fn is_refreshing(&self, cx: &App) -> bool {
self.read(cx).is_refreshing()
}
@@ -168,7 +168,7 @@ impl Render for EditPredictionButton {
let account_status = agent.account_status.clone();
match account_status {
AccountStatus::NeedsActivation { activate_url } => {
- SupermavenButtonStatus::NeedsActivation(activate_url.clone())
+ SupermavenButtonStatus::NeedsActivation(activate_url)
}
AccountStatus::Unknown => SupermavenButtonStatus::Initializing,
AccountStatus::Ready => SupermavenButtonStatus::Ready,
@@ -242,13 +242,9 @@ impl Render for EditPredictionButton {
IconName::ZedPredictDisabled
};
- if zeta::should_show_upsell_modal(&self.user_store, cx) {
+ if zeta::should_show_upsell_modal() {
let tooltip_meta = if self.user_store.read(cx).current_user().is_some() {
- if self.user_store.read(cx).has_accepted_terms_of_service() {
- "Choose a Plan"
- } else {
- "Accept the Terms of Service"
- }
+ "Choose a Plan"
} else {
"Sign In"
};
@@ -228,21 +228,29 @@ pub struct ShowCompletions {
pub struct HandleInput(pub String);
/// Deletes from the cursor to the end of the next word.
+/// Stops before the end of the next word, if whitespace sequences of length >= 2 are encountered.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
pub struct DeleteToNextWordEnd {
#[serde(default)]
pub ignore_newlines: bool,
+ // Whether to stop before the end of the next word, if language-defined bracket is encountered.
+ #[serde(default)]
+ pub ignore_brackets: bool,
}
/// Deletes from the cursor to the start of the previous word.
+/// Stops before the start of the previous word, if whitespace sequences of length >= 2 are encountered.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
pub struct DeleteToPreviousWordStart {
#[serde(default)]
pub ignore_newlines: bool,
+ // Whether to stop before the start of the previous word, if language-defined bracket is encountered.
+ #[serde(default)]
+ pub ignore_brackets: bool,
}
/// Folds all code blocks at the specified indentation level.
@@ -753,6 +761,8 @@ actions!(
UniqueLinesCaseInsensitive,
/// Removes duplicate lines (case-sensitive).
UniqueLinesCaseSensitive,
- UnwrapSyntaxNode
+ UnwrapSyntaxNode,
+ /// Wraps selections in tag specified by language.
+ WrapSelectionsInTag
]
);
@@ -1,7 +1,9 @@
+use crate::scroll::ScrollAmount;
use fuzzy::{StringMatch, StringMatchCandidate};
use gpui::{
- AnyElement, Entity, Focusable, FontWeight, ListSizingBehavior, ScrollStrategy, SharedString,
- Size, StrikethroughStyle, StyledText, Task, UniformListScrollHandle, div, px, uniform_list,
+ AnyElement, Entity, Focusable, FontWeight, ListSizingBehavior, ScrollHandle, ScrollStrategy,
+ SharedString, Size, StrikethroughStyle, StyledText, Task, UniformListScrollHandle, div, px,
+ uniform_list,
};
use itertools::Itertools;
use language::CodeLabel;
@@ -9,9 +11,9 @@ use language::{Buffer, LanguageName, LanguageRegistry};
use markdown::{Markdown, MarkdownElement};
use multi_buffer::{Anchor, ExcerptId};
use ordered_float::OrderedFloat;
-use project::CompletionSource;
use project::lsp_store::CompletionDocumentation;
use project::{CodeAction, Completion, TaskSourceKind};
+use project::{CompletionDisplayOptions, CompletionSource};
use task::DebugScenario;
use task::TaskContext;
@@ -184,6 +186,20 @@ impl CodeContextMenu {
CodeContextMenu::CodeActions(_) => false,
}
}
+
+ pub fn scroll_aside(
+ &mut self,
+ scroll_amount: ScrollAmount,
+ window: &mut Window,
+ cx: &mut Context<Editor>,
+ ) {
+ match self {
+ CodeContextMenu::Completions(completions_menu) => {
+ completions_menu.scroll_aside(scroll_amount, window, cx)
+ }
+ CodeContextMenu::CodeActions(_) => (),
+ }
+ }
}
pub enum ContextMenuOrigin {
@@ -207,12 +223,16 @@ pub struct CompletionsMenu {
filter_task: Task<()>,
cancel_filter: Arc<AtomicBool>,
scroll_handle: UniformListScrollHandle,
+ // The `ScrollHandle` used on the Markdown documentation rendered on the
+ // side of the completions menu.
+ pub scroll_handle_aside: ScrollHandle,
resolve_completions: bool,
show_completion_documentation: bool,
last_rendered_range: Rc<RefCell<Option<Range<usize>>>>,
markdown_cache: Rc<RefCell<VecDeque<(MarkdownCacheKey, Entity<Markdown>)>>>,
language_registry: Option<Arc<LanguageRegistry>>,
language: Option<LanguageName>,
+ display_options: CompletionDisplayOptions,
snippet_sort_order: SnippetSortOrder,
}
@@ -231,7 +251,7 @@ enum MarkdownCacheKey {
pub enum CompletionsMenuSource {
Normal,
SnippetChoices,
- Words,
+ Words { ignore_threshold: bool },
}
// TODO: There should really be a wrapper around fuzzy match tasks that does this.
@@ -252,6 +272,7 @@ impl CompletionsMenu {
is_incomplete: bool,
buffer: Entity<Buffer>,
completions: Box<[Completion]>,
+ display_options: CompletionDisplayOptions,
snippet_sort_order: SnippetSortOrder,
language_registry: Option<Arc<LanguageRegistry>>,
language: Option<LanguageName>,
@@ -279,11 +300,13 @@ impl CompletionsMenu {
filter_task: Task::ready(()),
cancel_filter: Arc::new(AtomicBool::new(false)),
scroll_handle: UniformListScrollHandle::new(),
+ scroll_handle_aside: ScrollHandle::new(),
resolve_completions: true,
last_rendered_range: RefCell::new(None).into(),
markdown_cache: RefCell::new(VecDeque::new()).into(),
language_registry,
language,
+ display_options,
snippet_sort_order,
};
@@ -348,12 +371,14 @@ impl CompletionsMenu {
filter_task: Task::ready(()),
cancel_filter: Arc::new(AtomicBool::new(false)),
scroll_handle: UniformListScrollHandle::new(),
+ scroll_handle_aside: ScrollHandle::new(),
resolve_completions: false,
show_completion_documentation: false,
last_rendered_range: RefCell::new(None).into(),
markdown_cache: RefCell::new(VecDeque::new()).into(),
language_registry: None,
language: None,
+ display_options: CompletionDisplayOptions::default(),
snippet_sort_order,
}
}
@@ -514,7 +539,7 @@ impl CompletionsMenu {
// Expand the range to resolve more completions than are predicted to be visible, to reduce
// jank on navigation.
let entry_indices = util::expanded_and_wrapped_usize_range(
- entry_range.clone(),
+ entry_range,
RESOLVE_BEFORE_ITEMS,
RESOLVE_AFTER_ITEMS,
entries.len(),
@@ -716,6 +741,33 @@ impl CompletionsMenu {
cx: &mut Context<Editor>,
) -> AnyElement {
let show_completion_documentation = self.show_completion_documentation;
+ let widest_completion_ix = if self.display_options.dynamic_width {
+ let completions = self.completions.borrow();
+ let widest_completion_ix = self
+ .entries
+ .borrow()
+ .iter()
+ .enumerate()
+ .max_by_key(|(_, mat)| {
+ let completion = &completions[mat.candidate_id];
+ let documentation = &completion.documentation;
+
+ let mut len = completion.label.text.chars().count();
+ if let Some(CompletionDocumentation::SingleLine(text)) = documentation {
+ if show_completion_documentation {
+ len += text.chars().count();
+ }
+ }
+
+ len
+ })
+ .map(|(ix, _)| ix);
+ drop(completions);
+ widest_completion_ix
+ } else {
+ None
+ };
+
let selected_item = self.selected_item;
let completions = self.completions.clone();
let entries = self.entries.clone();
@@ -842,7 +894,13 @@ impl CompletionsMenu {
.max_h(max_height_in_lines as f32 * window.line_height())
.track_scroll(self.scroll_handle.clone())
.with_sizing_behavior(ListSizingBehavior::Infer)
- .w(rems(34.));
+ .map(|this| {
+ if self.display_options.dynamic_width {
+ this.with_width_from_item(widest_completion_ix)
+ } else {
+ this.w(rems(34.))
+ }
+ });
Popover::new().child(list).into_any_element()
}
@@ -911,6 +969,7 @@ impl CompletionsMenu {
.max_w(max_size.width)
.max_h(max_size.height)
.overflow_y_scroll()
+ .track_scroll(&self.scroll_handle_aside)
.occlude(),
)
.into_any_element(),
@@ -1175,6 +1234,23 @@ impl CompletionsMenu {
}
});
}
+
+ pub fn scroll_aside(
+ &mut self,
+ amount: ScrollAmount,
+ window: &mut Window,
+ cx: &mut Context<Editor>,
+ ) {
+ let mut offset = self.scroll_handle_aside.offset();
+
+ offset.y -= amount.pixels(
+ window.line_height(),
+ self.scroll_handle_aside.bounds().size.height - px(16.),
+ ) / 2.0;
+
+ cx.notify();
+ self.scroll_handle_aside.set_offset(offset);
+ }
}
#[derive(Clone)]
@@ -1552,15 +1552,15 @@ pub mod tests {
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
.unwrap_or(10);
- let mut tab_size = rng.gen_range(1..=4);
- let buffer_start_excerpt_header_height = rng.gen_range(1..=5);
- let excerpt_header_height = rng.gen_range(1..=5);
+ let mut tab_size = rng.random_range(1..=4);
+ let buffer_start_excerpt_header_height = rng.random_range(1..=5);
+ let excerpt_header_height = rng.random_range(1..=5);
let font_size = px(14.0);
let max_wrap_width = 300.0;
- let mut wrap_width = if rng.gen_bool(0.1) {
+ let mut wrap_width = if rng.random_bool(0.1) {
None
} else {
- Some(px(rng.gen_range(0.0..=max_wrap_width)))
+ Some(px(rng.random_range(0.0..=max_wrap_width)))
};
log::info!("tab size: {}", tab_size);
@@ -1571,8 +1571,8 @@ pub mod tests {
});
let buffer = cx.update(|cx| {
- if rng.r#gen() {
- let len = rng.gen_range(0..10);
+ if rng.random() {
+ let len = rng.random_range(0..10);
let text = util::RandomCharIter::new(&mut rng)
.take(len)
.collect::<String>();
@@ -1609,12 +1609,12 @@ pub mod tests {
log::info!("display text: {:?}", snapshot.text());
for _i in 0..operations {
- match rng.gen_range(0..100) {
+ match rng.random_range(0..100) {
0..=19 => {
- wrap_width = if rng.gen_bool(0.2) {
+ wrap_width = if rng.random_bool(0.2) {
None
} else {
- Some(px(rng.gen_range(0.0..=max_wrap_width)))
+ Some(px(rng.random_range(0.0..=max_wrap_width)))
};
log::info!("setting wrap width to {:?}", wrap_width);
map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx));
@@ -1634,28 +1634,27 @@ pub mod tests {
}
30..=44 => {
map.update(cx, |map, cx| {
- if rng.r#gen() || blocks.is_empty() {
+ if rng.random() || blocks.is_empty() {
let buffer = map.snapshot(cx).buffer_snapshot;
- let block_properties = (0..rng.gen_range(1..=1))
+ let block_properties = (0..rng.random_range(1..=1))
.map(|_| {
- let position =
- buffer.anchor_after(buffer.clip_offset(
- rng.gen_range(0..=buffer.len()),
- Bias::Left,
- ));
+ let position = buffer.anchor_after(buffer.clip_offset(
+ rng.random_range(0..=buffer.len()),
+ Bias::Left,
+ ));
- let placement = if rng.r#gen() {
+ let placement = if rng.random() {
BlockPlacement::Above(position)
} else {
BlockPlacement::Below(position)
};
- let height = rng.gen_range(1..5);
+ let height = rng.random_range(1..5);
log::info!(
"inserting block {:?} with height {}",
placement.as_ref().map(|p| p.to_point(&buffer)),
height
);
- let priority = rng.gen_range(1..100);
+ let priority = rng.random_range(1..100);
BlockProperties {
placement,
style: BlockStyle::Fixed,
@@ -1668,9 +1667,9 @@ pub mod tests {
blocks.extend(map.insert_blocks(block_properties, cx));
} else {
blocks.shuffle(&mut rng);
- let remove_count = rng.gen_range(1..=4.min(blocks.len()));
+ let remove_count = rng.random_range(1..=4.min(blocks.len()));
let block_ids_to_remove = (0..remove_count)
- .map(|_| blocks.remove(rng.gen_range(0..blocks.len())))
+ .map(|_| blocks.remove(rng.random_range(0..blocks.len())))
.collect();
log::info!("removing block ids {:?}", block_ids_to_remove);
map.remove_blocks(block_ids_to_remove, cx);
@@ -1679,16 +1678,16 @@ pub mod tests {
}
45..=79 => {
let mut ranges = Vec::new();
- for _ in 0..rng.gen_range(1..=3) {
+ for _ in 0..rng.random_range(1..=3) {
buffer.read_with(cx, |buffer, cx| {
let buffer = buffer.read(cx);
- let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right);
- let start = buffer.clip_offset(rng.gen_range(0..=end), Left);
+ let end = buffer.clip_offset(rng.random_range(0..=buffer.len()), Right);
+ let start = buffer.clip_offset(rng.random_range(0..=end), Left);
ranges.push(start..end);
});
}
- if rng.r#gen() && fold_count > 0 {
+ if rng.random() && fold_count > 0 {
log::info!("unfolding ranges: {:?}", ranges);
map.update(cx, |map, cx| {
map.unfold_intersecting(ranges, true, cx);
@@ -1727,8 +1726,8 @@ pub mod tests {
// Line boundaries
let buffer = &snapshot.buffer_snapshot;
for _ in 0..5 {
- let row = rng.gen_range(0..=buffer.max_point().row);
- let column = rng.gen_range(0..=buffer.line_len(MultiBufferRow(row)));
+ let row = rng.random_range(0..=buffer.max_point().row);
+ let column = rng.random_range(0..=buffer.line_len(MultiBufferRow(row)));
let point = buffer.clip_point(Point::new(row, column), Left);
let (prev_buffer_bound, prev_display_bound) = snapshot.prev_line_boundary(point);
@@ -1776,8 +1775,8 @@ pub mod tests {
let min_point = snapshot.clip_point(DisplayPoint::new(DisplayRow(0), 0), Left);
let max_point = snapshot.clip_point(snapshot.max_point(), Right);
for _ in 0..5 {
- let row = rng.gen_range(0..=snapshot.max_point().row().0);
- let column = rng.gen_range(0..=snapshot.line_len(DisplayRow(row)));
+ let row = rng.random_range(0..=snapshot.max_point().row().0);
+ let column = rng.random_range(0..=snapshot.line_len(DisplayRow(row)));
let point = snapshot.clip_point(DisplayPoint::new(DisplayRow(row), column), Left);
log::info!("Moving from point {:?}", point);
@@ -128,10 +128,10 @@ impl<T> BlockPlacement<T> {
}
}
- fn sort_order(&self) -> u8 {
+ fn tie_break(&self) -> u8 {
match self {
- BlockPlacement::Above(_) => 0,
- BlockPlacement::Replace(_) => 1,
+ BlockPlacement::Replace(_) => 0,
+ BlockPlacement::Above(_) => 1,
BlockPlacement::Near(_) => 2,
BlockPlacement::Below(_) => 3,
}
@@ -143,7 +143,7 @@ impl BlockPlacement<Anchor> {
self.start()
.cmp(other.start(), buffer)
.then_with(|| other.end().cmp(self.end(), buffer))
- .then_with(|| self.sort_order().cmp(&other.sort_order()))
+ .then_with(|| self.tie_break().cmp(&other.tie_break()))
}
fn to_wrap_row(&self, wrap_snapshot: &WrapSnapshot) -> Option<BlockPlacement<WrapRow>> {
@@ -290,7 +290,10 @@ pub enum Block {
ExcerptBoundary {
excerpt: ExcerptInfo,
height: u32,
- starts_new_buffer: bool,
+ },
+ BufferHeader {
+ excerpt: ExcerptInfo,
+ height: u32,
},
}
@@ -303,27 +306,37 @@ impl Block {
..
} => BlockId::ExcerptBoundary(next_excerpt.id),
Block::FoldedBuffer { first_excerpt, .. } => BlockId::FoldedBuffer(first_excerpt.id),
+ Block::BufferHeader {
+ excerpt: next_excerpt,
+ ..
+ } => BlockId::ExcerptBoundary(next_excerpt.id),
}
}
pub fn has_height(&self) -> bool {
match self {
Block::Custom(block) => block.height.is_some(),
- Block::ExcerptBoundary { .. } | Block::FoldedBuffer { .. } => true,
+ Block::ExcerptBoundary { .. }
+ | Block::FoldedBuffer { .. }
+ | Block::BufferHeader { .. } => true,
}
}
pub fn height(&self) -> u32 {
match self {
Block::Custom(block) => block.height.unwrap_or(0),
- Block::ExcerptBoundary { height, .. } | Block::FoldedBuffer { height, .. } => *height,
+ Block::ExcerptBoundary { height, .. }
+ | Block::FoldedBuffer { height, .. }
+ | Block::BufferHeader { height, .. } => *height,
}
}
pub fn style(&self) -> BlockStyle {
match self {
Block::Custom(block) => block.style,
- Block::ExcerptBoundary { .. } | Block::FoldedBuffer { .. } => BlockStyle::Sticky,
+ Block::ExcerptBoundary { .. }
+ | Block::FoldedBuffer { .. }
+ | Block::BufferHeader { .. } => BlockStyle::Sticky,
}
}
@@ -332,6 +345,7 @@ impl Block {
Block::Custom(block) => matches!(block.placement, BlockPlacement::Above(_)),
Block::FoldedBuffer { .. } => false,
Block::ExcerptBoundary { .. } => true,
+ Block::BufferHeader { .. } => true,
}
}
@@ -340,6 +354,7 @@ impl Block {
Block::Custom(block) => matches!(block.placement, BlockPlacement::Near(_)),
Block::FoldedBuffer { .. } => false,
Block::ExcerptBoundary { .. } => false,
+ Block::BufferHeader { .. } => false,
}
}
@@ -351,6 +366,7 @@ impl Block {
),
Block::FoldedBuffer { .. } => false,
Block::ExcerptBoundary { .. } => false,
+ Block::BufferHeader { .. } => false,
}
}
@@ -359,6 +375,7 @@ impl Block {
Block::Custom(block) => matches!(block.placement, BlockPlacement::Replace(_)),
Block::FoldedBuffer { .. } => true,
Block::ExcerptBoundary { .. } => false,
+ Block::BufferHeader { .. } => false,
}
}
@@ -367,6 +384,7 @@ impl Block {
Block::Custom(_) => false,
Block::FoldedBuffer { .. } => true,
Block::ExcerptBoundary { .. } => true,
+ Block::BufferHeader { .. } => true,
}
}
@@ -374,9 +392,8 @@ impl Block {
match self {
Block::Custom(_) => false,
Block::FoldedBuffer { .. } => true,
- Block::ExcerptBoundary {
- starts_new_buffer, ..
- } => *starts_new_buffer,
+ Block::ExcerptBoundary { .. } => false,
+ Block::BufferHeader { .. } => true,
}
}
}
@@ -393,14 +410,14 @@ impl Debug for Block {
.field("first_excerpt", &first_excerpt)
.field("height", height)
.finish(),
- Self::ExcerptBoundary {
- starts_new_buffer,
- excerpt,
- height,
- } => f
+ Self::ExcerptBoundary { excerpt, height } => f
.debug_struct("ExcerptBoundary")
.field("excerpt", excerpt)
- .field("starts_new_buffer", starts_new_buffer)
+ .field("height", height)
+ .finish(),
+ Self::BufferHeader { excerpt, height } => f
+ .debug_struct("BufferHeader")
+ .field("excerpt", excerpt)
.field("height", height)
.finish(),
}
@@ -662,13 +679,11 @@ impl BlockMap {
}),
);
- if buffer.show_headers() {
- blocks_in_edit.extend(self.header_and_footer_blocks(
- buffer,
- (start_bound, end_bound),
- wrap_snapshot,
- ));
- }
+ blocks_in_edit.extend(self.header_and_footer_blocks(
+ buffer,
+ (start_bound, end_bound),
+ wrap_snapshot,
+ ));
BlockMap::sort_blocks(&mut blocks_in_edit);
@@ -771,7 +786,7 @@ impl BlockMap {
if self.buffers_with_disabled_headers.contains(&new_buffer_id) {
continue;
}
- if self.folded_buffers.contains(&new_buffer_id) {
+ if self.folded_buffers.contains(&new_buffer_id) && buffer.show_headers() {
let mut last_excerpt_end_row = first_excerpt.end_row;
while let Some(next_boundary) = boundaries.peek() {
@@ -804,20 +819,24 @@ impl BlockMap {
}
}
- if new_buffer_id.is_some() {
+ let starts_new_buffer = new_buffer_id.is_some();
+ let block = if starts_new_buffer && buffer.show_headers() {
height += self.buffer_header_height;
- } else {
+ Block::BufferHeader {
+ excerpt: excerpt_boundary.next,
+ height,
+ }
+ } else if excerpt_boundary.prev.is_some() {
height += self.excerpt_header_height;
- }
-
- return Some((
- BlockPlacement::Above(WrapRow(wrap_row)),
Block::ExcerptBoundary {
excerpt: excerpt_boundary.next,
height,
- starts_new_buffer: new_buffer_id.is_some(),
- },
- ));
+ }
+ } else {
+ continue;
+ };
+
+ return Some((BlockPlacement::Above(WrapRow(wrap_row)), block));
}
})
}
@@ -828,6 +847,7 @@ impl BlockMap {
.start()
.cmp(placement_b.start())
.then_with(|| placement_b.end().cmp(placement_a.end()))
+ .then_with(|| placement_a.tie_break().cmp(&placement_b.tie_break()))
.then_with(|| {
if block_a.is_header() {
Ordering::Less
@@ -837,18 +857,29 @@ impl BlockMap {
Ordering::Equal
}
})
- .then_with(|| placement_a.sort_order().cmp(&placement_b.sort_order()))
.then_with(|| match (block_a, block_b) {
(
Block::ExcerptBoundary {
excerpt: excerpt_a, ..
+ }
+ | Block::BufferHeader {
+ excerpt: excerpt_a, ..
},
Block::ExcerptBoundary {
excerpt: excerpt_b, ..
+ }
+ | Block::BufferHeader {
+ excerpt: excerpt_b, ..
},
) => Some(excerpt_a.id).cmp(&Some(excerpt_b.id)),
- (Block::ExcerptBoundary { .. }, Block::Custom(_)) => Ordering::Less,
- (Block::Custom(_), Block::ExcerptBoundary { .. }) => Ordering::Greater,
+ (
+ Block::ExcerptBoundary { .. } | Block::BufferHeader { .. },
+ Block::Custom(_),
+ ) => Ordering::Less,
+ (
+ Block::Custom(_),
+ Block::ExcerptBoundary { .. } | Block::BufferHeader { .. },
+ ) => Ordering::Greater,
(Block::Custom(block_a), Block::Custom(block_b)) => block_a
.priority
.cmp(&block_b.priority)
@@ -1377,7 +1408,9 @@ impl BlockSnapshot {
while let Some(transform) = cursor.item() {
match &transform.block {
- Some(Block::ExcerptBoundary { excerpt, .. }) => {
+ Some(
+ Block::ExcerptBoundary { excerpt, .. } | Block::BufferHeader { excerpt, .. },
+ ) => {
return Some(StickyHeaderExcerpt { excerpt });
}
Some(block) if block.is_buffer_header() => return None,
@@ -2156,7 +2189,7 @@ mod tests {
}
let multi_buffer_snapshot = multi_buffer.read(cx).snapshot(cx);
- let (_, inlay_snapshot) = InlayMap::new(multi_buffer_snapshot.clone());
+ let (_, inlay_snapshot) = InlayMap::new(multi_buffer_snapshot);
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
let (_, wraps_snapshot) = WrapMap::new(tab_snapshot, font, font_size, Some(wrap_width), cx);
@@ -2275,7 +2308,7 @@ mod tests {
new_heights.insert(block_ids[0], 3);
block_map_writer.resize(new_heights);
- let snapshot = block_map.read(wraps_snapshot.clone(), Default::default());
+ let snapshot = block_map.read(wraps_snapshot, Default::default());
// Same height as before, should remain the same
assert_eq!(snapshot.text(), "aaa\n\n\n\n\n\nbbb\nccc\nddd\n\n\n");
}
@@ -2360,16 +2393,14 @@ mod tests {
buffer.edit([(Point::new(2, 0)..Point::new(3, 0), "")], None, cx);
buffer.snapshot(cx)
});
- let (inlay_snapshot, inlay_edits) = inlay_map.sync(
- buffer_snapshot.clone(),
- buffer_subscription.consume().into_inner(),
- );
+ let (inlay_snapshot, inlay_edits) =
+ inlay_map.sync(buffer_snapshot, buffer_subscription.consume().into_inner());
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
let (tab_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size);
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
wrap_map.sync(tab_snapshot, tab_edits, cx)
});
- let blocks_snapshot = block_map.read(wraps_snapshot.clone(), wrap_edits);
+ let blocks_snapshot = block_map.read(wraps_snapshot, wrap_edits);
assert_eq!(blocks_snapshot.text(), "line1\n\n\n\n\nline5");
let buffer_snapshot = buffer.update(cx, |buffer, cx| {
@@ -2454,7 +2485,7 @@ mod tests {
// Removing the replace block shows all the hidden blocks again.
let mut writer = block_map.write(wraps_snapshot.clone(), Default::default());
writer.remove(HashSet::from_iter([replace_block_id]));
- let blocks_snapshot = block_map.read(wraps_snapshot.clone(), Default::default());
+ let blocks_snapshot = block_map.read(wraps_snapshot, Default::default());
assert_eq!(
blocks_snapshot.text(),
"\nline1\n\nline2\n\n\nline 2.1\nline2.2\nline 2.3\nline 2.4\n\nline4\n\nline5"
@@ -2793,7 +2824,7 @@ mod tests {
buffer.read_with(cx, |buffer, cx| {
writer.fold_buffers([buffer_id_3], buffer, cx);
});
- let blocks_snapshot = block_map.read(wrap_snapshot.clone(), Patch::default());
+ let blocks_snapshot = block_map.read(wrap_snapshot, Patch::default());
let blocks = blocks_snapshot
.blocks_in_range(0..u32::MAX)
.collect::<Vec<_>>();
@@ -2846,7 +2877,7 @@ mod tests {
assert_eq!(buffer_ids.len(), 1);
let buffer_id = buffer_ids[0];
- let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
+ let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot);
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
let (_, wrap_snapshot) =
@@ -2860,7 +2891,7 @@ mod tests {
buffer.read_with(cx, |buffer, cx| {
writer.fold_buffers([buffer_id], buffer, cx);
});
- let blocks_snapshot = block_map.read(wrap_snapshot.clone(), Patch::default());
+ let blocks_snapshot = block_map.read(wrap_snapshot, Patch::default());
let blocks = blocks_snapshot
.blocks_in_range(0..u32::MAX)
.collect::<Vec<_>>();
@@ -2868,12 +2899,7 @@ mod tests {
1,
blocks
.iter()
- .filter(|(_, block)| {
- match block {
- Block::FoldedBuffer { .. } => true,
- _ => false,
- }
- })
+ .filter(|(_, block)| { matches!(block, Block::FoldedBuffer { .. }) })
.count(),
"Should have one folded block, producing a header of the second buffer"
);
@@ -2896,21 +2922,21 @@ mod tests {
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
.unwrap_or(10);
- let wrap_width = if rng.gen_bool(0.2) {
+ let wrap_width = if rng.random_bool(0.2) {
None
} else {
- Some(px(rng.gen_range(0.0..=100.0)))
+ Some(px(rng.random_range(0.0..=100.0)))
};
let tab_size = 1.try_into().unwrap();
let font_size = px(14.0);
- let buffer_start_header_height = rng.gen_range(1..=5);
- let excerpt_header_height = rng.gen_range(1..=5);
+ let buffer_start_header_height = rng.random_range(1..=5);
+ let excerpt_header_height = rng.random_range(1..=5);
log::info!("Wrap width: {:?}", wrap_width);
log::info!("Excerpt Header Height: {:?}", excerpt_header_height);
- let is_singleton = rng.r#gen();
+ let is_singleton = rng.random();
let buffer = if is_singleton {
- let len = rng.gen_range(0..10);
+ let len = rng.random_range(0..10);
let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
log::info!("initial singleton buffer text: {:?}", text);
cx.update(|cx| MultiBuffer::build_simple(&text, cx))
@@ -2940,30 +2966,30 @@ mod tests {
for _ in 0..operations {
let mut buffer_edits = Vec::new();
- match rng.gen_range(0..=100) {
+ match rng.random_range(0..=100) {
0..=19 => {
- let wrap_width = if rng.gen_bool(0.2) {
+ let wrap_width = if rng.random_bool(0.2) {
None
} else {
- Some(px(rng.gen_range(0.0..=100.0)))
+ Some(px(rng.random_range(0.0..=100.0)))
};
log::info!("Setting wrap width to {:?}", wrap_width);
wrap_map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx));
}
20..=39 => {
- let block_count = rng.gen_range(1..=5);
+ let block_count = rng.random_range(1..=5);
let block_properties = (0..block_count)
.map(|_| {
let buffer = cx.update(|cx| buffer.read(cx).read(cx).clone());
let offset =
- buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Left);
+ buffer.clip_offset(rng.random_range(0..=buffer.len()), Bias::Left);
let mut min_height = 0;
- let placement = match rng.gen_range(0..3) {
+ let placement = match rng.random_range(0..3) {
0 => {
min_height = 1;
let start = buffer.anchor_after(offset);
let end = buffer.anchor_after(buffer.clip_offset(
- rng.gen_range(offset..=buffer.len()),
+ rng.random_range(offset..=buffer.len()),
Bias::Left,
));
BlockPlacement::Replace(start..=end)
@@ -2972,7 +2998,7 @@ mod tests {
_ => BlockPlacement::Below(buffer.anchor_after(offset)),
};
- let height = rng.gen_range(min_height..5);
+ let height = rng.random_range(min_height..5);
BlockProperties {
style: BlockStyle::Fixed,
placement,
@@ -3014,7 +3040,7 @@ mod tests {
}
}
40..=59 if !block_map.custom_blocks.is_empty() => {
- let block_count = rng.gen_range(1..=4.min(block_map.custom_blocks.len()));
+ let block_count = rng.random_range(1..=4.min(block_map.custom_blocks.len()));
let block_ids_to_remove = block_map
.custom_blocks
.choose_multiple(&mut rng, block_count)
@@ -3069,8 +3095,8 @@ mod tests {
let mut folded_count = folded_buffers.len();
let mut unfolded_count = unfolded_buffers.len();
- let fold = !unfolded_buffers.is_empty() && rng.gen_bool(0.5);
- let unfold = !folded_buffers.is_empty() && rng.gen_bool(0.5);
+ let fold = !unfolded_buffers.is_empty() && rng.random_bool(0.5);
+ let unfold = !folded_buffers.is_empty() && rng.random_bool(0.5);
if !fold && !unfold {
log::info!(
"Noop fold/unfold operation. Unfolded buffers: {unfolded_count}, folded buffers: {folded_count}"
@@ -3081,7 +3107,7 @@ mod tests {
buffer.update(cx, |buffer, cx| {
if fold {
let buffer_to_fold =
- unfolded_buffers[rng.gen_range(0..unfolded_buffers.len())];
+ unfolded_buffers[rng.random_range(0..unfolded_buffers.len())];
log::info!("Folding {buffer_to_fold:?}");
let related_excerpts = buffer_snapshot
.excerpts()
@@ -3107,7 +3133,7 @@ mod tests {
}
if unfold {
let buffer_to_unfold =
- folded_buffers[rng.gen_range(0..folded_buffers.len())];
+ folded_buffers[rng.random_range(0..folded_buffers.len())];
log::info!("Unfolding {buffer_to_unfold:?}");
unfolded_count += 1;
folded_count -= 1;
@@ -3120,7 +3146,7 @@ mod tests {
}
_ => {
buffer.update(cx, |buffer, cx| {
- let mutation_count = rng.gen_range(1..=5);
+ let mutation_count = rng.random_range(1..=5);
let subscription = buffer.subscribe();
buffer.randomly_mutate(&mut rng, mutation_count, cx);
buffer_snapshot = buffer.snapshot(cx);
@@ -3190,9 +3216,9 @@ mod tests {
// so we special case row 0 to assume a leading '\n'.
//
// Linehood is the birthright of strings.
- let mut input_text_lines = input_text.split('\n').enumerate().peekable();
+ let input_text_lines = input_text.split('\n').enumerate().peekable();
let mut block_row = 0;
- while let Some((wrap_row, input_line)) = input_text_lines.next() {
+ for (wrap_row, input_line) in input_text_lines {
let wrap_row = wrap_row as u32;
let multibuffer_row = wraps_snapshot
.to_point(WrapPoint::new(wrap_row, 0), Bias::Left)
@@ -3305,7 +3331,7 @@ mod tests {
);
for start_row in 0..expected_row_count {
- let end_row = rng.gen_range(start_row + 1..=expected_row_count);
+ let end_row = rng.random_range(start_row + 1..=expected_row_count);
let mut expected_text = expected_lines[start_row..end_row].join("\n");
if end_row < expected_row_count {
expected_text.push('\n');
@@ -3400,8 +3426,8 @@ mod tests {
);
for _ in 0..10 {
- let end_row = rng.gen_range(1..=expected_lines.len());
- let start_row = rng.gen_range(0..end_row);
+ let end_row = rng.random_range(1..=expected_lines.len());
+ let start_row = rng.random_range(0..end_row);
let mut expected_longest_rows_in_range = vec![];
let mut longest_line_len_in_range = 0;
@@ -3532,7 +3558,7 @@ mod tests {
..buffer_snapshot.anchor_after(Point::new(1, 0))],
false,
);
- let blocks_snapshot = block_map.read(wraps_snapshot.clone(), Default::default());
+ let blocks_snapshot = block_map.read(wraps_snapshot, Default::default());
assert_eq!(blocks_snapshot.text(), "abc\n\ndef\nghi\njkl\nmno");
}
@@ -1557,7 +1557,7 @@ mod tests {
let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx);
let subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
let buffer_snapshot = buffer.read(cx).snapshot(cx);
- let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
+ let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot);
let mut map = FoldMap::new(inlay_snapshot.clone()).0;
let (mut writer, _, _) = map.write(inlay_snapshot, vec![]);
@@ -1636,7 +1636,7 @@ mod tests {
let buffer = MultiBuffer::build_simple("abcdefghijkl", cx);
let subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
let buffer_snapshot = buffer.read(cx).snapshot(cx);
- let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
+ let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot);
{
let mut map = FoldMap::new(inlay_snapshot.clone()).0;
@@ -1712,7 +1712,7 @@ mod tests {
let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx);
let subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
let buffer_snapshot = buffer.read(cx).snapshot(cx);
- let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
+ let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot);
let mut map = FoldMap::new(inlay_snapshot.clone()).0;
let (mut writer, _, _) = map.write(inlay_snapshot.clone(), vec![]);
@@ -1720,7 +1720,7 @@ mod tests {
(Point::new(0, 2)..Point::new(2, 2), FoldPlaceholder::test()),
(Point::new(3, 1)..Point::new(4, 1), FoldPlaceholder::test()),
]);
- let (snapshot, _) = map.read(inlay_snapshot.clone(), vec![]);
+ let (snapshot, _) = map.read(inlay_snapshot, vec![]);
assert_eq!(snapshot.text(), "aa⋯cccc\nd⋯eeeee");
let buffer_snapshot = buffer.update(cx, |buffer, cx| {
@@ -1747,7 +1747,7 @@ mod tests {
(Point::new(1, 2)..Point::new(3, 2), FoldPlaceholder::test()),
(Point::new(3, 1)..Point::new(4, 1), FoldPlaceholder::test()),
]);
- let (snapshot, _) = map.read(inlay_snapshot.clone(), vec![]);
+ let (snapshot, _) = map.read(inlay_snapshot, vec![]);
let fold_ranges = snapshot
.folds_in_range(Point::new(1, 0)..Point::new(1, 3))
.map(|fold| {
@@ -1771,9 +1771,9 @@ mod tests {
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
.unwrap_or(10);
- let len = rng.gen_range(0..10);
+ let len = rng.random_range(0..10);
let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
- let buffer = if rng.r#gen() {
+ let buffer = if rng.random() {
MultiBuffer::build_simple(&text, cx)
} else {
MultiBuffer::build_random(&mut rng, cx)
@@ -1782,7 +1782,7 @@ mod tests {
let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
let mut map = FoldMap::new(inlay_snapshot.clone()).0;
- let (mut initial_snapshot, _) = map.read(inlay_snapshot.clone(), vec![]);
+ let (mut initial_snapshot, _) = map.read(inlay_snapshot, vec![]);
let mut snapshot_edits = Vec::new();
let mut next_inlay_id = 0;
@@ -1790,7 +1790,7 @@ mod tests {
log::info!("text: {:?}", buffer_snapshot.text());
let mut buffer_edits = Vec::new();
let mut inlay_edits = Vec::new();
- match rng.gen_range(0..=100) {
+ match rng.random_range(0..=100) {
0..=39 => {
snapshot_edits.extend(map.randomly_mutate(&mut rng));
}
@@ -1800,7 +1800,7 @@ mod tests {
}
_ => buffer.update(cx, |buffer, cx| {
let subscription = buffer.subscribe();
- let edit_count = rng.gen_range(1..=5);
+ let edit_count = rng.random_range(1..=5);
buffer.randomly_mutate(&mut rng, edit_count, cx);
buffer_snapshot = buffer.snapshot(cx);
let edits = subscription.consume().into_inner();
@@ -1917,10 +1917,14 @@ mod tests {
}
for _ in 0..5 {
- let mut start = snapshot
- .clip_offset(FoldOffset(rng.gen_range(0..=snapshot.len().0)), Bias::Left);
- let mut end = snapshot
- .clip_offset(FoldOffset(rng.gen_range(0..=snapshot.len().0)), Bias::Right);
+ let mut start = snapshot.clip_offset(
+ FoldOffset(rng.random_range(0..=snapshot.len().0)),
+ Bias::Left,
+ );
+ let mut end = snapshot.clip_offset(
+ FoldOffset(rng.random_range(0..=snapshot.len().0)),
+ Bias::Right,
+ );
if start > end {
mem::swap(&mut start, &mut end);
}
@@ -1975,8 +1979,8 @@ mod tests {
for _ in 0..5 {
let end =
- buffer_snapshot.clip_offset(rng.gen_range(0..=buffer_snapshot.len()), Right);
- let start = buffer_snapshot.clip_offset(rng.gen_range(0..=end), Left);
+ buffer_snapshot.clip_offset(rng.random_range(0..=buffer_snapshot.len()), Right);
+ let start = buffer_snapshot.clip_offset(rng.random_range(0..=end), Left);
let expected_folds = map
.snapshot
.folds
@@ -2001,10 +2005,10 @@ mod tests {
let text = snapshot.text();
for _ in 0..5 {
- let start_row = rng.gen_range(0..=snapshot.max_point().row());
- let start_column = rng.gen_range(0..=snapshot.line_len(start_row));
- let end_row = rng.gen_range(0..=snapshot.max_point().row());
- let end_column = rng.gen_range(0..=snapshot.line_len(end_row));
+ let start_row = rng.random_range(0..=snapshot.max_point().row());
+ let start_column = rng.random_range(0..=snapshot.line_len(start_row));
+ let end_row = rng.random_range(0..=snapshot.max_point().row());
+ let end_column = rng.random_range(0..=snapshot.line_len(end_row));
let mut start =
snapshot.clip_point(FoldPoint::new(start_row, start_column), Bias::Left);
let mut end = snapshot.clip_point(FoldPoint::new(end_row, end_column), Bias::Right);
@@ -2109,17 +2113,17 @@ mod tests {
rng: &mut impl Rng,
) -> Vec<(FoldSnapshot, Vec<FoldEdit>)> {
let mut snapshot_edits = Vec::new();
- match rng.gen_range(0..=100) {
+ match rng.random_range(0..=100) {
0..=39 if !self.snapshot.folds.is_empty() => {
let inlay_snapshot = self.snapshot.inlay_snapshot.clone();
let buffer = &inlay_snapshot.buffer;
let mut to_unfold = Vec::new();
- for _ in 0..rng.gen_range(1..=3) {
- let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right);
- let start = buffer.clip_offset(rng.gen_range(0..=end), Left);
+ for _ in 0..rng.random_range(1..=3) {
+ let end = buffer.clip_offset(rng.random_range(0..=buffer.len()), Right);
+ let start = buffer.clip_offset(rng.random_range(0..=end), Left);
to_unfold.push(start..end);
}
- let inclusive = rng.r#gen();
+ let inclusive = rng.random();
log::info!("unfolding {:?} (inclusive: {})", to_unfold, inclusive);
let (mut writer, snapshot, edits) = self.write(inlay_snapshot, vec![]);
snapshot_edits.push((snapshot, edits));
@@ -2130,9 +2134,9 @@ mod tests {
let inlay_snapshot = self.snapshot.inlay_snapshot.clone();
let buffer = &inlay_snapshot.buffer;
let mut to_fold = Vec::new();
- for _ in 0..rng.gen_range(1..=2) {
- let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right);
- let start = buffer.clip_offset(rng.gen_range(0..=end), Left);
+ for _ in 0..rng.random_range(1..=2) {
+ let end = buffer.clip_offset(rng.random_range(0..=buffer.len()), Right);
+ let start = buffer.clip_offset(rng.random_range(0..=end), Left);
to_fold.push((start..end, FoldPlaceholder::test()));
}
log::info!("folding {:?}", to_fold);
@@ -719,14 +719,18 @@ impl InlayMap {
let mut to_remove = Vec::new();
let mut to_insert = Vec::new();
let snapshot = &mut self.snapshot;
- for i in 0..rng.gen_range(1..=5) {
- if self.inlays.is_empty() || rng.r#gen() {
+ for i in 0..rng.random_range(1..=5) {
+ if self.inlays.is_empty() || rng.random() {
let position = snapshot.buffer.random_byte_range(0, rng).start;
- let bias = if rng.r#gen() { Bias::Left } else { Bias::Right };
- let len = if rng.gen_bool(0.01) {
+ let bias = if rng.random() {
+ Bias::Left
+ } else {
+ Bias::Right
+ };
+ let len = if rng.random_bool(0.01) {
0
} else {
- rng.gen_range(1..=5)
+ rng.random_range(1..=5)
};
let text = util::RandomCharIter::new(&mut *rng)
.filter(|ch| *ch != '\r')
@@ -1665,8 +1669,8 @@ mod tests {
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
.unwrap_or(10);
- let len = rng.gen_range(0..30);
- let buffer = if rng.r#gen() {
+ let len = rng.random_range(0..30);
+ let buffer = if rng.random() {
let text = util::RandomCharIter::new(&mut rng)
.take(len)
.collect::<String>();
@@ -1683,7 +1687,7 @@ mod tests {
let mut prev_inlay_text = inlay_snapshot.text();
let mut buffer_edits = Vec::new();
- match rng.gen_range(0..=100) {
+ match rng.random_range(0..=100) {
0..=50 => {
let (snapshot, edits) = inlay_map.randomly_mutate(&mut next_inlay_id, &mut rng);
log::info!("mutated text: {:?}", snapshot.text());
@@ -1691,7 +1695,7 @@ mod tests {
}
_ => buffer.update(cx, |buffer, cx| {
let subscription = buffer.subscribe();
- let edit_count = rng.gen_range(1..=5);
+ let edit_count = rng.random_range(1..=5);
buffer.randomly_mutate(&mut rng, edit_count, cx);
buffer_snapshot = buffer.snapshot(cx);
let edits = subscription.consume().into_inner();
@@ -1740,7 +1744,7 @@ mod tests {
}
let mut text_highlights = TextHighlights::default();
- let text_highlight_count = rng.gen_range(0_usize..10);
+ let text_highlight_count = rng.random_range(0_usize..10);
let mut text_highlight_ranges = (0..text_highlight_count)
.map(|_| buffer_snapshot.random_byte_range(0, &mut rng))
.collect::<Vec<_>>();
@@ -1762,10 +1766,10 @@ mod tests {
let mut inlay_highlights = InlayHighlights::default();
if !inlays.is_empty() {
- let inlay_highlight_count = rng.gen_range(0..inlays.len());
+ let inlay_highlight_count = rng.random_range(0..inlays.len());
let mut inlay_indices = BTreeSet::default();
while inlay_indices.len() < inlay_highlight_count {
- inlay_indices.insert(rng.gen_range(0..inlays.len()));
+ inlay_indices.insert(rng.random_range(0..inlays.len()));
}
let new_highlights = TreeMap::from_ordered_entries(
inlay_indices
@@ -1782,8 +1786,8 @@ mod tests {
}),
n => {
let inlay_text = inlay.text.to_string();
- let mut highlight_end = rng.gen_range(1..n);
- let mut highlight_start = rng.gen_range(0..highlight_end);
+ let mut highlight_end = rng.random_range(1..n);
+ let mut highlight_start = rng.random_range(0..highlight_end);
while !inlay_text.is_char_boundary(highlight_end) {
highlight_end += 1;
}
@@ -1805,9 +1809,9 @@ mod tests {
}
for _ in 0..5 {
- let mut end = rng.gen_range(0..=inlay_snapshot.len().0);
+ let mut end = rng.random_range(0..=inlay_snapshot.len().0);
end = expected_text.clip_offset(end, Bias::Right);
- let mut start = rng.gen_range(0..=end);
+ let mut start = rng.random_range(0..=end);
start = expected_text.clip_offset(start, Bias::Right);
let range = InlayOffset(start)..InlayOffset(end);
@@ -61,14 +61,14 @@ pub fn replacement(c: char) -> Option<&'static str> {
// but could if we tracked state in the classifier.
const IDEOGRAPHIC_SPACE: char = '\u{3000}';
-const C0_SYMBOLS: &'static [&'static str] = &[
+const C0_SYMBOLS: &[&str] = &[
"␀", "␁", "␂", "␃", "␄", "␅", "␆", "␇", "␈", "␉", "␊", "␋", "␌", "␍", "␎", "␏", "␐", "␑", "␒",
"␓", "␔", "␕", "␖", "␗", "␘", "␙", "␚", "␛", "␜", "␝", "␞", "␟",
];
-const DEL: &'static str = "␡";
+const DEL: &str = "␡";
// generated using ucd-generate: ucd-generate general-category --include Format --chars ucd-16.0.0
-pub const FORMAT: &'static [(char, char)] = &[
+pub const FORMAT: &[(char, char)] = &[
('\u{ad}', '\u{ad}'),
('\u{600}', '\u{605}'),
('\u{61c}', '\u{61c}'),
@@ -93,7 +93,7 @@ pub const FORMAT: &'static [(char, char)] = &[
];
// hand-made base on https://invisible-characters.com (Excluding Cf)
-pub const OTHER: &'static [(char, char)] = &[
+pub const OTHER: &[(char, char)] = &[
('\u{034f}', '\u{034f}'),
('\u{115F}', '\u{1160}'),
('\u{17b4}', '\u{17b5}'),
@@ -107,7 +107,7 @@ pub const OTHER: &'static [(char, char)] = &[
];
// a subset of FORMAT/OTHER that may appear within glyphs
-const PRESERVE: &'static [(char, char)] = &[
+const PRESERVE: &[(char, char)] = &[
('\u{034f}', '\u{034f}'),
('\u{200d}', '\u{200d}'),
('\u{17b4}', '\u{17b5}'),
@@ -116,7 +116,7 @@ impl TabMap {
state.new.end = edit.new.end;
Some(None) // Skip this edit, it's merged
} else {
- let new_state = edit.clone();
+ let new_state = edit;
let result = Some(Some(state.clone())); // Yield the previous edit
**state = new_state;
result
@@ -611,7 +611,7 @@ mod tests {
fn test_expand_tabs(cx: &mut gpui::App) {
let buffer = MultiBuffer::build_simple("", cx);
let buffer_snapshot = buffer.read(cx).snapshot(cx);
- let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
+ let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot);
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
@@ -628,7 +628,7 @@ mod tests {
let buffer = MultiBuffer::build_simple(input, cx);
let buffer_snapshot = buffer.read(cx).snapshot(cx);
- let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
+ let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot);
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (_, mut tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
@@ -675,7 +675,7 @@ mod tests {
let buffer = MultiBuffer::build_simple(input, cx);
let buffer_snapshot = buffer.read(cx).snapshot(cx);
- let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
+ let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot);
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (_, mut tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
@@ -689,7 +689,7 @@ mod tests {
let buffer = MultiBuffer::build_simple(input, cx);
let buffer_snapshot = buffer.read(cx).snapshot(cx);
- let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
+ let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot);
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
@@ -736,9 +736,9 @@ mod tests {
#[gpui::test(iterations = 100)]
fn test_random_tabs(cx: &mut gpui::App, mut rng: StdRng) {
- let tab_size = NonZeroU32::new(rng.gen_range(1..=4)).unwrap();
- let len = rng.gen_range(0..30);
- let buffer = if rng.r#gen() {
+ let tab_size = NonZeroU32::new(rng.random_range(1..=4)).unwrap();
+ let len = rng.random_range(0..30);
+ let buffer = if rng.random() {
let text = util::RandomCharIter::new(&mut rng)
.take(len)
.collect::<String>();
@@ -749,7 +749,7 @@ mod tests {
let buffer_snapshot = buffer.read(cx).snapshot(cx);
log::info!("Buffer text: {:?}", buffer_snapshot.text());
- let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
+ let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot);
log::info!("InlayMap text: {:?}", inlay_snapshot.text());
let (mut fold_map, _) = FoldMap::new(inlay_snapshot.clone());
fold_map.randomly_mutate(&mut rng);
@@ -758,7 +758,7 @@ mod tests {
let (inlay_snapshot, _) = inlay_map.randomly_mutate(&mut 0, &mut rng);
log::info!("InlayMap text: {:?}", inlay_snapshot.text());
- let (mut tab_map, _) = TabMap::new(fold_snapshot.clone(), tab_size);
+ let (mut tab_map, _) = TabMap::new(fold_snapshot, tab_size);
let tabs_snapshot = tab_map.set_max_expansion_column(32);
let text = text::Rope::from(tabs_snapshot.text().as_str());
@@ -769,11 +769,11 @@ mod tests {
);
for _ in 0..5 {
- let end_row = rng.gen_range(0..=text.max_point().row);
- let end_column = rng.gen_range(0..=text.line_len(end_row));
+ let end_row = rng.random_range(0..=text.max_point().row);
+ let end_column = rng.random_range(0..=text.line_len(end_row));
let mut end = TabPoint(text.clip_point(Point::new(end_row, end_column), Bias::Right));
- let start_row = rng.gen_range(0..=text.max_point().row);
- let start_column = rng.gen_range(0..=text.line_len(start_row));
+ let start_row = rng.random_range(0..=text.max_point().row);
+ let start_column = rng.random_range(0..=text.line_len(start_row));
let mut start =
TabPoint(text.clip_point(Point::new(start_row, start_column), Bias::Left));
if start > end {
@@ -1215,12 +1215,12 @@ mod tests {
.unwrap_or(10);
let text_system = cx.read(|cx| cx.text_system().clone());
- let mut wrap_width = if rng.gen_bool(0.1) {
+ let mut wrap_width = if rng.random_bool(0.1) {
None
} else {
- Some(px(rng.gen_range(0.0..=1000.0)))
+ Some(px(rng.random_range(0.0..=1000.0)))
};
- let tab_size = NonZeroU32::new(rng.gen_range(1..=4)).unwrap();
+ let tab_size = NonZeroU32::new(rng.random_range(1..=4)).unwrap();
let font = test_font();
let _font_id = text_system.resolve_font(&font);
@@ -1230,10 +1230,10 @@ mod tests {
log::info!("Wrap width: {:?}", wrap_width);
let buffer = cx.update(|cx| {
- if rng.r#gen() {
+ if rng.random() {
MultiBuffer::build_random(&mut rng, cx)
} else {
- let len = rng.gen_range(0..10);
+ let len = rng.random_range(0..10);
let text = util::RandomCharIter::new(&mut rng)
.take(len)
.collect::<String>();
@@ -1281,12 +1281,12 @@ mod tests {
log::info!("{} ==============================================", _i);
let mut buffer_edits = Vec::new();
- match rng.gen_range(0..=100) {
+ match rng.random_range(0..=100) {
0..=19 => {
- wrap_width = if rng.gen_bool(0.2) {
+ wrap_width = if rng.random_bool(0.2) {
None
} else {
- Some(px(rng.gen_range(0.0..=1000.0)))
+ Some(px(rng.random_range(0.0..=1000.0)))
};
log::info!("Setting wrap width to {:?}", wrap_width);
wrap_map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx));
@@ -1317,7 +1317,7 @@ mod tests {
_ => {
buffer.update(cx, |buffer, cx| {
let subscription = buffer.subscribe();
- let edit_count = rng.gen_range(1..=5);
+ let edit_count = rng.random_range(1..=5);
buffer.randomly_mutate(&mut rng, edit_count, cx);
buffer_snapshot = buffer.snapshot(cx);
buffer_edits.extend(subscription.consume());
@@ -1341,7 +1341,7 @@ mod tests {
snapshot.verify_chunks(&mut rng);
edits.push((snapshot, wrap_edits));
- if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) && rng.gen_bool(0.4) {
+ if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) && rng.random_bool(0.4) {
log::info!("Waiting for wrapping to finish");
while wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
notifications.next().await.unwrap();
@@ -1479,8 +1479,8 @@ mod tests {
impl WrapSnapshot {
fn verify_chunks(&mut self, rng: &mut impl Rng) {
for _ in 0..5 {
- let mut end_row = rng.gen_range(0..=self.max_point().row());
- let start_row = rng.gen_range(0..=end_row);
+ let mut end_row = rng.random_range(0..=self.max_point().row());
+ let start_row = rng.random_range(0..=end_row);
end_row += 1;
let mut expected_text = self.text_chunks(start_row).collect::<String>();
@@ -147,23 +147,24 @@ use multi_buffer::{
use parking_lot::Mutex;
use persistence::DB;
use project::{
- BreakpointWithPosition, CodeAction, Completion, CompletionIntent, CompletionResponse,
- CompletionSource, DisableAiSettings, DocumentHighlight, InlayHint, Location, LocationLink,
- PrepareRenameResponse, Project, ProjectItem, ProjectPath, ProjectTransaction, TaskSourceKind,
- debugger::breakpoint_store::Breakpoint,
+ BreakpointWithPosition, CodeAction, Completion, CompletionDisplayOptions, CompletionIntent,
+ CompletionResponse, CompletionSource, DisableAiSettings, DocumentHighlight, InlayHint,
+ Location, LocationLink, PrepareRenameResponse, Project, ProjectItem, ProjectPath,
+ ProjectTransaction, TaskSourceKind,
debugger::{
breakpoint_store::{
- BreakpointEditAction, BreakpointSessionState, BreakpointState, BreakpointStore,
- BreakpointStoreEvent,
+ Breakpoint, BreakpointEditAction, BreakpointSessionState, BreakpointState,
+ BreakpointStore, BreakpointStoreEvent,
},
session::{Session, SessionEvent},
},
git_store::{GitStoreEvent, RepositoryEvent},
lsp_store::{CompletionDocumentation, FormatTrigger, LspFormatTarget, OpenLspBufferHandle},
- project_settings::{DiagnosticSeverity, GoToDiagnosticSeverityFilter},
- project_settings::{GitGutterSetting, ProjectSettings},
+ project_settings::{
+ DiagnosticSeverity, GitGutterSetting, GoToDiagnosticSeverityFilter, ProjectSettings,
+ },
};
-use rand::{seq::SliceRandom, thread_rng};
+use rand::seq::SliceRandom;
use rpc::{ErrorCode, ErrorExt, proto::PeerId};
use scroll::{Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager, ScrollbarAutoHide};
use selections_collection::{
@@ -187,7 +188,6 @@ use std::{
sync::{Arc, LazyLock},
time::{Duration, Instant},
};
-use sum_tree::TreeMap;
use task::{ResolvedTask, RunnableTag, TaskTemplate, TaskVariables};
use text::{BufferId, FromAnchor, OffsetUtf16, Rope};
use theme::{
@@ -217,7 +217,6 @@ use crate::{
pub const FILE_HEADER_HEIGHT: u32 = 2;
pub const MULTI_BUFFER_EXCERPT_HEADER_HEIGHT: u32 = 1;
-pub const DEFAULT_MULTIBUFFER_CONTEXT: u32 = 2;
const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500);
const MAX_LINE_LEN: usize = 1024;
const MIN_NAVIGATION_HISTORY_ROW_DELTA: i64 = 10;
@@ -225,7 +224,7 @@ const MAX_SELECTION_HISTORY_LEN: usize = 1024;
pub(crate) const CURSORS_VISIBLE_FOR: Duration = Duration::from_millis(2000);
#[doc(hidden)]
pub const CODE_ACTIONS_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(250);
-const SELECTION_HIGHLIGHT_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(100);
+pub const SELECTION_HIGHLIGHT_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(100);
pub(crate) const CODE_ACTION_TIMEOUT: Duration = Duration::from_secs(5);
pub(crate) const FORMAT_TIMEOUT: Duration = Duration::from_secs(5);
@@ -266,7 +265,6 @@ pub type RenderDiffHunkControlsFn = Arc<
enum ReportEditorEvent {
Saved { auto_saved: bool },
EditorOpened,
- ZetaTosClicked,
Closed,
}
@@ -275,7 +273,6 @@ impl ReportEditorEvent {
match self {
Self::Saved { .. } => "Editor Saved",
Self::EditorOpened => "Editor Opened",
- Self::ZetaTosClicked => "Edit Prediction Provider ToS Clicked",
Self::Closed => "Editor Closed",
}
}
@@ -795,10 +792,7 @@ impl MinimapVisibility {
}
fn disabled(&self) -> bool {
- match *self {
- Self::Disabled => true,
- _ => false,
- }
+ matches!(*self, Self::Disabled)
}
fn settings_visibility(&self) -> bool {
@@ -1049,6 +1043,7 @@ pub struct Editor {
inline_diagnostics_update: Task<()>,
inline_diagnostics_enabled: bool,
diagnostics_enabled: bool,
+ word_completions_enabled: bool,
inline_diagnostics: Vec<(Anchor, InlineDiagnostic)>,
soft_wrap_mode_override: Option<language_settings::SoftWrap>,
hard_wrap: Option<usize>,
@@ -1078,8 +1073,8 @@ pub struct Editor {
placeholder_text: Option<Arc<str>>,
highlight_order: usize,
highlighted_rows: HashMap<TypeId, Vec<RowHighlight>>,
- background_highlights: TreeMap<HighlightKey, BackgroundHighlight>,
- gutter_highlights: TreeMap<TypeId, GutterHighlight>,
+ background_highlights: HashMap<HighlightKey, BackgroundHighlight>,
+ gutter_highlights: HashMap<TypeId, GutterHighlight>,
scrollbar_marker_state: ScrollbarMarkerState,
active_indent_guides_state: ActiveIndentGuidesState,
nav_history: Option<ItemNavHistory>,
@@ -1813,7 +1808,7 @@ impl Editor {
let font_size = style.font_size.to_pixels(window.rem_size());
let editor = cx.entity().downgrade();
let fold_placeholder = FoldPlaceholder {
- constrain_width: true,
+ constrain_width: false,
render: Arc::new(move |fold_id, fold_range, cx| {
let editor = editor.clone();
div()
@@ -1870,8 +1865,8 @@ impl Editor {
blink_manager
});
- let soft_wrap_mode_override = matches!(mode, EditorMode::SingleLine { .. })
- .then(|| language_settings::SoftWrap::None);
+ let soft_wrap_mode_override =
+ matches!(mode, EditorMode::SingleLine).then(|| language_settings::SoftWrap::None);
let mut project_subscriptions = Vec::new();
if full_mode && let Some(project) = project.as_ref() {
@@ -1916,6 +1911,60 @@ impl Editor {
editor.update_lsp_data(false, Some(*buffer_id), window, cx);
}
}
+
+ project::Event::EntryRenamed(transaction) => {
+ let Some(workspace) = editor.workspace() else {
+ return;
+ };
+ let Some(active_editor) = workspace.read(cx).active_item_as::<Self>(cx)
+ else {
+ return;
+ };
+ if active_editor.entity_id() == cx.entity_id() {
+ let edited_buffers_already_open = {
+ let other_editors: Vec<Entity<Editor>> = workspace
+ .read(cx)
+ .panes()
+ .iter()
+ .flat_map(|pane| pane.read(cx).items_of_type::<Editor>())
+ .filter(|editor| editor.entity_id() != cx.entity_id())
+ .collect();
+
+ transaction.0.keys().all(|buffer| {
+ other_editors.iter().any(|editor| {
+ let multi_buffer = editor.read(cx).buffer();
+ multi_buffer.read(cx).is_singleton()
+ && multi_buffer.read(cx).as_singleton().map_or(
+ false,
+ |singleton| {
+ singleton.entity_id() == buffer.entity_id()
+ },
+ )
+ })
+ })
+ };
+
+ if !edited_buffers_already_open {
+ let workspace = workspace.downgrade();
+ let transaction = transaction.clone();
+ cx.defer_in(window, move |_, window, cx| {
+ cx.spawn_in(window, async move |editor, cx| {
+ Self::open_project_transaction(
+ &editor,
+ workspace,
+ transaction,
+ "Rename".to_string(),
+ cx,
+ )
+ .await
+ .ok()
+ })
+ .detach();
+ });
+ }
+ }
+ }
+
_ => {}
},
));
@@ -1956,26 +2005,24 @@ impl Editor {
let git_store = project.read(cx).git_store().clone();
let project = project.clone();
project_subscriptions.push(cx.subscribe(&git_store, move |this, _, event, cx| {
- match event {
- GitStoreEvent::RepositoryUpdated(
- _,
- RepositoryEvent::Updated {
- new_instance: true, ..
- },
- _,
- ) => {
- this.load_diff_task = Some(
- update_uncommitted_diff_for_buffer(
- cx.entity(),
- &project,
- this.buffer.read(cx).all_buffers(),
- this.buffer.clone(),
- cx,
- )
- .shared(),
- );
- }
- _ => {}
+ if let GitStoreEvent::RepositoryUpdated(
+ _,
+ RepositoryEvent::Updated {
+ new_instance: true, ..
+ },
+ _,
+ ) = event
+ {
+ this.load_diff_task = Some(
+ update_uncommitted_diff_for_buffer(
+ cx.entity(),
+ &project,
+ this.buffer.read(cx).all_buffers(),
+ this.buffer.clone(),
+ cx,
+ )
+ .shared(),
+ );
}
}));
}
@@ -1998,14 +2045,12 @@ impl Editor {
.detach();
}
- let show_indent_guides = if matches!(
- mode,
- EditorMode::SingleLine { .. } | EditorMode::Minimap { .. }
- ) {
- Some(false)
- } else {
- None
- };
+ let show_indent_guides =
+ if matches!(mode, EditorMode::SingleLine | EditorMode::Minimap { .. }) {
+ Some(false)
+ } else {
+ None
+ };
let breakpoint_store = match (&mode, project.as_ref()) {
(EditorMode::Full { .. }, Some(project)) => Some(project.read(cx).breakpoint_store()),
@@ -2065,7 +2110,7 @@ impl Editor {
vertical: full_mode,
},
minimap_visibility: MinimapVisibility::for_mode(&mode, cx),
- offset_content: !matches!(mode, EditorMode::SingleLine { .. }),
+ offset_content: !matches!(mode, EditorMode::SingleLine),
show_breadcrumbs: EditorSettings::get_global(cx).toolbar.breadcrumbs,
show_gutter: full_mode,
show_line_numbers: (!full_mode).then_some(false),
@@ -2080,8 +2125,8 @@ impl Editor {
placeholder_text: None,
highlight_order: 0,
highlighted_rows: HashMap::default(),
- background_highlights: TreeMap::default(),
- gutter_highlights: TreeMap::default(),
+ background_highlights: HashMap::default(),
+ gutter_highlights: HashMap::default(),
scrollbar_marker_state: ScrollbarMarkerState::default(),
active_indent_guides_state: ActiveIndentGuidesState::default(),
nav_history: None,
@@ -2132,6 +2177,7 @@ impl Editor {
},
inline_diagnostics_enabled: full_mode,
diagnostics_enabled: full_mode,
+ word_completions_enabled: full_mode,
inline_value_cache: InlineValueCache::new(inlay_hint_settings.show_value_hints),
inlay_hint_cache: InlayHintCache::new(inlay_hint_settings),
gutter_hovered: false,
@@ -2419,7 +2465,7 @@ impl Editor {
let mut key_context = KeyContext::new_with_defaults();
key_context.add("Editor");
let mode = match self.mode {
- EditorMode::SingleLine { .. } => "single_line",
+ EditorMode::SingleLine => "single_line",
EditorMode::AutoHeight { .. } => "auto_height",
EditorMode::Minimap { .. } => "minimap",
EditorMode::Full { .. } => "full",
@@ -2556,7 +2602,7 @@ impl Editor {
|| binding
.keystrokes()
.first()
- .is_some_and(|keystroke| keystroke.modifiers.modified())
+ .is_some_and(|keystroke| keystroke.modifiers().modified())
}))
}
@@ -2586,7 +2632,7 @@ impl Editor {
cx: &mut Context<Workspace>,
) -> Task<Result<Entity<Editor>>> {
let project = workspace.project().clone();
- let create = project.update(cx, |project, cx| project.create_buffer(cx));
+ let create = project.update(cx, |project, cx| project.create_buffer(true, cx));
cx.spawn_in(window, async move |workspace, cx| {
let buffer = create.await?;
@@ -2624,7 +2670,7 @@ impl Editor {
cx: &mut Context<Workspace>,
) {
let project = workspace.project().clone();
- let create = project.update(cx, |project, cx| project.create_buffer(cx));
+ let create = project.update(cx, |project, cx| project.create_buffer(true, cx));
cx.spawn_in(window, async move |workspace, cx| {
let buffer = create.await?;
@@ -3252,35 +3298,31 @@ impl Editor {
selections.select_anchors(other_selections);
});
- let other_subscription =
- cx.subscribe(&other, |this, other, other_evt, cx| match other_evt {
- EditorEvent::SelectionsChanged { local: true } => {
- let other_selections = other.read(cx).selections.disjoint.to_vec();
- if other_selections.is_empty() {
- return;
- }
- this.selections.change_with(cx, |selections| {
- selections.select_anchors(other_selections);
- });
+ let other_subscription = cx.subscribe(&other, |this, other, other_evt, cx| {
+ if let EditorEvent::SelectionsChanged { local: true } = other_evt {
+ let other_selections = other.read(cx).selections.disjoint.to_vec();
+ if other_selections.is_empty() {
+ return;
}
- _ => {}
- });
+ this.selections.change_with(cx, |selections| {
+ selections.select_anchors(other_selections);
+ });
+ }
+ });
- let this_subscription =
- cx.subscribe_self::<EditorEvent>(move |this, this_evt, cx| match this_evt {
- EditorEvent::SelectionsChanged { local: true } => {
- let these_selections = this.selections.disjoint.to_vec();
- if these_selections.is_empty() {
- return;
- }
- other.update(cx, |other_editor, cx| {
- other_editor.selections.change_with(cx, |selections| {
- selections.select_anchors(these_selections);
- })
- });
+ let this_subscription = cx.subscribe_self::<EditorEvent>(move |this, this_evt, cx| {
+ if let EditorEvent::SelectionsChanged { local: true } = this_evt {
+ let these_selections = this.selections.disjoint.to_vec();
+ if these_selections.is_empty() {
+ return;
}
- _ => {}
- });
+ other.update(cx, |other_editor, cx| {
+ other_editor.selections.change_with(cx, |selections| {
+ selections.select_anchors(these_selections);
+ })
+ });
+ }
+ });
Subscription::join(other_subscription, this_subscription)
}
@@ -4568,7 +4610,7 @@ impl Editor {
let mut char_position = 0u32;
let mut end_tag_offset = None;
- 'outer: for chunk in snapshot.text_for_range(range.clone()) {
+ 'outer: for chunk in snapshot.text_for_range(range) {
if let Some(byte_pos) = chunk.find(&**end_tag) {
let chars_before_match =
chunk[..byte_pos].chars().count() as u32;
@@ -4883,8 +4925,15 @@ impl Editor {
});
match completions_source {
- Some(CompletionsMenuSource::Words) => {
- self.show_word_completions(&ShowWordCompletions, window, cx)
+ Some(CompletionsMenuSource::Words { .. }) => {
+ self.open_or_update_completions_menu(
+ Some(CompletionsMenuSource::Words {
+ ignore_threshold: false,
+ }),
+ None,
+ window,
+ cx,
+ );
}
Some(CompletionsMenuSource::Normal)
| Some(CompletionsMenuSource::SnippetChoices)
@@ -4918,11 +4967,7 @@ impl Editor {
cx: &mut Context<Self>,
) -> bool {
let position = self.selections.newest_anchor().head();
- let multibuffer = self.buffer.read(cx);
- let Some(buffer) = position
- .buffer_id
- .and_then(|buffer_id| multibuffer.buffer(buffer_id).clone())
- else {
+ let Some(buffer) = self.buffer.read(cx).buffer_for_anchor(position, cx) else {
return false;
};
@@ -5396,7 +5441,14 @@ impl Editor {
window: &mut Window,
cx: &mut Context<Self>,
) {
- self.open_or_update_completions_menu(Some(CompletionsMenuSource::Words), None, window, cx);
+ self.open_or_update_completions_menu(
+ Some(CompletionsMenuSource::Words {
+ ignore_threshold: true,
+ }),
+ None,
+ window,
+ cx,
+ );
}
pub fn show_completions(
@@ -5445,9 +5497,13 @@ impl Editor {
drop(multibuffer_snapshot);
+ let mut ignore_word_threshold = false;
let provider = match requested_source {
Some(CompletionsMenuSource::Normal) | None => self.completion_provider.clone(),
- Some(CompletionsMenuSource::Words) => None,
+ Some(CompletionsMenuSource::Words { ignore_threshold }) => {
+ ignore_word_threshold = ignore_threshold;
+ None
+ }
Some(CompletionsMenuSource::SnippetChoices) => {
log::error!("bug: SnippetChoices requested_source is not handled");
None
@@ -5568,6 +5624,13 @@ impl Editor {
.as_ref()
.is_none_or(|query| !query.chars().any(|c| c.is_digit(10)));
+ let omit_word_completions = !self.word_completions_enabled
+ || (!ignore_word_threshold
+ && match &query {
+ Some(query) => query.chars().count() < completion_settings.words_min_length,
+ None => completion_settings.words_min_length != 0,
+ });
+
let (mut words, provider_responses) = match &provider {
Some(provider) => {
let provider_responses = provider.completions(
@@ -5579,9 +5642,11 @@ impl Editor {
cx,
);
- let words = match completion_settings.words {
- WordsCompletionMode::Disabled => Task::ready(BTreeMap::default()),
- WordsCompletionMode::Enabled | WordsCompletionMode::Fallback => cx
+ let words = match (omit_word_completions, completion_settings.words) {
+ (true, _) | (_, WordsCompletionMode::Disabled) => {
+ Task::ready(BTreeMap::default())
+ }
+ (false, WordsCompletionMode::Enabled | WordsCompletionMode::Fallback) => cx
.background_spawn(async move {
buffer_snapshot.words_in_range(WordsQuery {
fuzzy_contents: None,
@@ -5593,16 +5658,20 @@ impl Editor {
(words, provider_responses)
}
- None => (
- cx.background_spawn(async move {
- buffer_snapshot.words_in_range(WordsQuery {
- fuzzy_contents: None,
- range: word_search_range,
- skip_digits,
+ None => {
+ let words = if omit_word_completions {
+ Task::ready(BTreeMap::default())
+ } else {
+ cx.background_spawn(async move {
+ buffer_snapshot.words_in_range(WordsQuery {
+ fuzzy_contents: None,
+ range: word_search_range,
+ skip_digits,
+ })
})
- }),
- Task::ready(Ok(Vec::new())),
- ),
+ };
+ (words, Task::ready(Ok(Vec::new())))
+ }
};
let snippet_sort_order = EditorSettings::get_global(cx).snippet_sort_order;
@@ -5619,17 +5688,25 @@ impl Editor {
// that having one source with `is_incomplete: true` doesn't cause all to be re-queried.
let mut completions = Vec::new();
let mut is_incomplete = false;
+ let mut display_options: Option<CompletionDisplayOptions> = None;
if let Some(provider_responses) = provider_responses.await.log_err()
&& !provider_responses.is_empty()
{
for response in provider_responses {
completions.extend(response.completions);
is_incomplete = is_incomplete || response.is_incomplete;
+ match display_options.as_mut() {
+ None => {
+ display_options = Some(response.display_options);
+ }
+ Some(options) => options.merge(&response.display_options),
+ }
}
if completion_settings.words == WordsCompletionMode::Fallback {
words = Task::ready(BTreeMap::default());
}
}
+ let display_options = display_options.unwrap_or_default();
let mut words = words.await;
if let Some(word_to_exclude) = &word_to_exclude {
@@ -5671,6 +5748,7 @@ impl Editor {
is_incomplete,
buffer.clone(),
completions.into(),
+ display_options,
snippet_sort_order,
languages,
language,
@@ -5692,34 +5770,31 @@ impl Editor {
let Ok(()) = editor.update_in(cx, |editor, window, cx| {
// Newer menu already set, so exit.
- match editor.context_menu.borrow().as_ref() {
- Some(CodeContextMenu::Completions(prev_menu)) => {
- if prev_menu.id > id {
- return;
- }
- }
- _ => {}
+ if let Some(CodeContextMenu::Completions(prev_menu)) =
+ editor.context_menu.borrow().as_ref()
+ && prev_menu.id > id
+ {
+ return;
};
// Only valid to take prev_menu because it the new menu is immediately set
// below, or the menu is hidden.
- match editor.context_menu.borrow_mut().take() {
- Some(CodeContextMenu::Completions(prev_menu)) => {
- let position_matches =
- if prev_menu.initial_position == menu.initial_position {
- true
- } else {
- let snapshot = editor.buffer.read(cx).read(cx);
- prev_menu.initial_position.to_offset(&snapshot)
- == menu.initial_position.to_offset(&snapshot)
- };
- if position_matches {
- // Preserve markdown cache before `set_filter_results` because it will
- // try to populate the documentation cache.
- menu.preserve_markdown_cache(prev_menu);
- }
+ if let Some(CodeContextMenu::Completions(prev_menu)) =
+ editor.context_menu.borrow_mut().take()
+ {
+ let position_matches =
+ if prev_menu.initial_position == menu.initial_position {
+ true
+ } else {
+ let snapshot = editor.buffer.read(cx).read(cx);
+ prev_menu.initial_position.to_offset(&snapshot)
+ == menu.initial_position.to_offset(&snapshot)
+ };
+ if position_matches {
+ // Preserve markdown cache before `set_filter_results` because it will
+ // try to populate the documentation cache.
+ menu.preserve_markdown_cache(prev_menu);
}
- _ => {}
};
menu.set_filter_results(matches, provider, window, cx);
@@ -5889,7 +5964,7 @@ impl Editor {
multibuffer_anchor.start.to_offset(&snapshot)
..multibuffer_anchor.end.to_offset(&snapshot)
};
- if newest_anchor.head().buffer_id != Some(buffer.remote_id()) {
+ if snapshot.buffer_id_for_anchor(newest_anchor.head()) != Some(buffer.remote_id()) {
return None;
}
@@ -6210,12 +6285,11 @@ impl Editor {
}
});
Some(cx.background_spawn(async move {
- let scenarios = futures::future::join_all(scenarios)
+ futures::future::join_all(scenarios)
.await
.into_iter()
.flatten()
- .collect::<Vec<_>>();
- scenarios
+ .collect::<Vec<_>>()
}))
})
.unwrap_or_else(|| Task::ready(vec![]))
@@ -6313,7 +6387,7 @@ impl Editor {
}))
}
CodeActionsItem::DebugScenario(scenario) => {
- let context = actions_menu.actions.context.clone();
+ let context = actions_menu.actions.context;
workspace.update(cx, |workspace, cx| {
dap::send_telemetry(&scenario, TelemetrySpawnLocation::Gutter, cx);
@@ -6332,7 +6406,7 @@ impl Editor {
}
pub async fn open_project_transaction(
- this: &WeakEntity<Editor>,
+ editor: &WeakEntity<Editor>,
workspace: WeakEntity<Workspace>,
transaction: ProjectTransaction,
title: String,
@@ -6350,7 +6424,7 @@ impl Editor {
if let Some((buffer, transaction)) = entries.first() {
if entries.len() == 1 {
- let excerpt = this.update(cx, |editor, cx| {
+ let excerpt = editor.update(cx, |editor, cx| {
editor
.buffer()
.read(cx)
@@ -6389,7 +6463,7 @@ impl Editor {
PathKey::for_buffer(buffer_handle, cx),
buffer_handle.clone(),
edited_ranges,
- DEFAULT_MULTIBUFFER_CONTEXT,
+ multibuffer_context_lines(cx),
cx,
);
@@ -6513,7 +6587,7 @@ impl Editor {
fn refresh_code_actions(&mut self, window: &mut Window, cx: &mut Context<Self>) -> Option<()> {
let newest_selection = self.selections.newest_anchor().clone();
- let newest_selection_adjusted = self.selections.newest_adjusted(cx).clone();
+ let newest_selection_adjusted = self.selections.newest_adjusted(cx);
let buffer = self.buffer.read(cx);
if newest_selection.head().diff_base_anchor.is_some() {
return None;
@@ -6608,7 +6682,7 @@ impl Editor {
buffer_row: Some(point.row),
..Default::default()
};
- let Some(blame_entry) = blame
+ let Some((buffer, blame_entry)) = blame
.update(cx, |blame, cx| blame.blame_for_rows(&[row_info], cx).next())
.flatten()
else {
@@ -6618,12 +6692,19 @@ impl Editor {
let anchor = self.selections.newest_anchor().head();
let position = self.to_pixel_point(anchor, &snapshot, window);
if let (Some(position), Some(last_bounds)) = (position, self.last_bounds) {
- self.show_blame_popover(&blame_entry, position + last_bounds.origin, true, cx);
+ self.show_blame_popover(
+ buffer,
+ &blame_entry,
+ position + last_bounds.origin,
+ true,
+ cx,
+ );
};
}
fn show_blame_popover(
&mut self,
+ buffer: BufferId,
blame_entry: &BlameEntry,
position: gpui::Point<Pixels>,
ignore_timeout: bool,
@@ -6647,7 +6728,7 @@ impl Editor {
return;
};
let blame = blame.read(cx);
- let details = blame.details_for_entry(&blame_entry);
+ let details = blame.details_for_entry(buffer, &blame_entry);
let markdown = cx.new(|cx| {
Markdown::new(
details
@@ -6747,7 +6828,6 @@ impl Editor {
return;
}
- let buffer_id = cursor_position.buffer_id;
let buffer = this.buffer.read(cx);
if buffer
.text_anchor_for_position(cursor_position, cx)
@@ -6760,8 +6840,8 @@ impl Editor {
let mut write_ranges = Vec::new();
let mut read_ranges = Vec::new();
for highlight in highlights {
- for (excerpt_id, excerpt_range) in
- buffer.excerpts_for_buffer(cursor_buffer.read(cx).remote_id(), cx)
+ let buffer_id = cursor_buffer.read(cx).remote_id();
+ for (excerpt_id, excerpt_range) in buffer.excerpts_for_buffer(buffer_id, cx)
{
let start = highlight
.range
@@ -6776,12 +6856,12 @@ impl Editor {
}
let range = Anchor {
- buffer_id,
+ buffer_id: Some(buffer_id),
excerpt_id,
text_anchor: start,
diff_base_anchor: None,
}..Anchor {
- buffer_id,
+ buffer_id: Some(buffer_id),
excerpt_id,
text_anchor: end,
diff_base_anchor: None,
@@ -6816,7 +6896,7 @@ impl Editor {
&mut self,
cx: &mut Context<Editor>,
) -> Option<(String, Range<Anchor>)> {
- if matches!(self.mode, EditorMode::SingleLine { .. }) {
+ if matches!(self.mode, EditorMode::SingleLine) {
return None;
}
if !EditorSettings::get_global(cx).selection_highlight {
@@ -7674,16 +7754,16 @@ impl Editor {
.keystroke()
{
modifiers_held = modifiers_held
- || (&accept_keystroke.modifiers == modifiers
- && accept_keystroke.modifiers.modified());
+ || (accept_keystroke.modifiers() == modifiers
+ && accept_keystroke.modifiers().modified());
};
if let Some(accept_partial_keystroke) = self
.accept_edit_prediction_keybind(true, window, cx)
.keystroke()
{
modifiers_held = modifiers_held
- || (&accept_partial_keystroke.modifiers == modifiers
- && accept_partial_keystroke.modifiers.modified());
+ || (accept_partial_keystroke.modifiers() == modifiers
+ && accept_partial_keystroke.modifiers().modified());
}
if modifiers_held {
@@ -7733,6 +7813,11 @@ impl Editor {
return None;
}
+ if self.ime_transaction.is_some() {
+ self.discard_edit_prediction(false, cx);
+ return None;
+ }
+
let selection = self.selections.newest_anchor();
let cursor = selection.head();
let multibuffer = self.buffer.read(cx).snapshot(cx);
@@ -7771,12 +7856,9 @@ impl Editor {
self.edit_prediction_settings =
self.edit_prediction_settings_at_position(&buffer, cursor_buffer_position, cx);
- match self.edit_prediction_settings {
- EditPredictionSettings::Disabled => {
- self.discard_edit_prediction(false, cx);
- return None;
- }
- _ => {}
+ if let EditPredictionSettings::Disabled = self.edit_prediction_settings {
+ self.discard_edit_prediction(false, cx);
+ return None;
};
self.edit_prediction_indent_conflict = multibuffer.is_line_whitespace_upto(cursor);
@@ -8235,8 +8317,6 @@ impl Editor {
.icon_color(color)
.style(ButtonStyle::Transparent)
.on_click(cx.listener({
- let breakpoint = breakpoint.clone();
-
move |editor, event: &ClickEvent, window, cx| {
let edit_action = if event.modifiers().platform || breakpoint.is_disabled() {
BreakpointEditAction::InvertState
@@ -9037,7 +9117,7 @@ impl Editor {
let is_platform_style_mac = PlatformStyle::platform() == PlatformStyle::Mac;
- let modifiers_color = if accept_keystroke.modifiers == window.modifiers() {
+ let modifiers_color = if *accept_keystroke.modifiers() == window.modifiers() {
Color::Accent
} else {
Color::Muted
@@ -9049,19 +9129,19 @@ impl Editor {
.font(theme::ThemeSettings::get_global(cx).buffer_font.clone())
.text_size(TextSize::XSmall.rems(cx))
.child(h_flex().children(ui::render_modifiers(
- &accept_keystroke.modifiers,
+ accept_keystroke.modifiers(),
PlatformStyle::platform(),
Some(modifiers_color),
Some(IconSize::XSmall.rems().into()),
true,
)))
.when(is_platform_style_mac, |parent| {
- parent.child(accept_keystroke.key.clone())
+ parent.child(accept_keystroke.key().to_string())
})
.when(!is_platform_style_mac, |parent| {
parent.child(
Key::new(
- util::capitalize(&accept_keystroke.key),
+ util::capitalize(accept_keystroke.key()),
Some(Color::Default),
)
.size(Some(IconSize::XSmall.rems().into())),
@@ -9164,52 +9244,13 @@ impl Editor {
max_width: Pixels,
cursor_point: Point,
style: &EditorStyle,
- accept_keystroke: Option<&gpui::Keystroke>,
+ accept_keystroke: Option<&gpui::KeybindingKeystroke>,
_window: &Window,
cx: &mut Context<Editor>,
) -> Option<AnyElement> {
let provider = self.edit_prediction_provider.as_ref()?;
let provider_icon = Self::get_prediction_provider_icon_name(&self.edit_prediction_provider);
- if provider.provider.needs_terms_acceptance(cx) {
- return Some(
- h_flex()
- .min_w(min_width)
- .flex_1()
- .px_2()
- .py_1()
- .gap_3()
- .elevation_2(cx)
- .hover(|style| style.bg(cx.theme().colors().element_hover))
- .id("accept-terms")
- .cursor_pointer()
- .on_mouse_down(MouseButton::Left, |_, window, _| window.prevent_default())
- .on_click(cx.listener(|this, _event, window, cx| {
- cx.stop_propagation();
- this.report_editor_event(ReportEditorEvent::ZetaTosClicked, None, cx);
- window.dispatch_action(
- zed_actions::OpenZedPredictOnboarding.boxed_clone(),
- cx,
- );
- }))
- .child(
- h_flex()
- .flex_1()
- .gap_2()
- .child(Icon::new(provider_icon))
- .child(Label::new("Accept Terms of Service"))
- .child(div().w_full())
- .child(
- Icon::new(IconName::ArrowUpRight)
- .color(Color::Muted)
- .size(IconSize::Small),
- )
- .into_any_element(),
- )
- .into_any(),
- );
- }
-
let is_refreshing = provider.provider.is_refreshing(cx);
fn pending_completion_container(icon: IconName) -> Div {
@@ -9281,7 +9322,7 @@ impl Editor {
accept_keystroke.as_ref(),
|el, accept_keystroke| {
el.child(h_flex().children(ui::render_modifiers(
- &accept_keystroke.modifiers,
+ accept_keystroke.modifiers(),
PlatformStyle::platform(),
Some(Color::Default),
Some(IconSize::XSmall.rems().into()),
@@ -9351,7 +9392,7 @@ impl Editor {
.child(completion),
)
.when_some(accept_keystroke, |el, accept_keystroke| {
- if !accept_keystroke.modifiers.modified() {
+ if !accept_keystroke.modifiers().modified() {
return el;
}
@@ -9370,7 +9411,7 @@ impl Editor {
.font(theme::ThemeSettings::get_global(cx).buffer_font.clone())
.when(is_platform_style_mac, |parent| parent.gap_1())
.child(h_flex().children(ui::render_modifiers(
- &accept_keystroke.modifiers,
+ accept_keystroke.modifiers(),
PlatformStyle::platform(),
Some(if !has_completion {
Color::Muted
@@ -9551,17 +9592,21 @@ impl Editor {
selection: Range<Anchor>,
cx: &mut Context<Self>,
) {
- let buffer_id = match (&selection.start.buffer_id, &selection.end.buffer_id) {
- (Some(a), Some(b)) if a == b => a,
- _ => {
- log::error!("expected anchor range to have matching buffer IDs");
- return;
- }
+ let Some((_, buffer, _)) = self
+ .buffer()
+ .read(cx)
+ .excerpt_containing(selection.start, cx)
+ else {
+ return;
};
- let multi_buffer = self.buffer().read(cx);
- let Some(buffer) = multi_buffer.buffer(*buffer_id) else {
+ let Some((_, end_buffer, _)) = self.buffer().read(cx).excerpt_containing(selection.end, cx)
+ else {
return;
};
+ if buffer != end_buffer {
+ log::error!("expected anchor range to have matching buffer IDs");
+ return;
+ }
let id = post_inc(&mut self.next_completion_id);
let snippet_sort_order = EditorSettings::get_global(cx).snippet_sort_order;
@@ -9807,6 +9852,9 @@ impl Editor {
}
pub fn backspace(&mut self, _: &Backspace, window: &mut Window, cx: &mut Context<Self>) {
+ if self.read_only(cx) {
+ return;
+ }
self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx);
self.transact(window, cx, |this, window, cx| {
this.select_autoclose_pair(window, cx);
@@ -9900,6 +9948,9 @@ impl Editor {
}
pub fn delete(&mut self, _: &Delete, window: &mut Window, cx: &mut Context<Self>) {
+ if self.read_only(cx) {
+ return;
+ }
self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx);
self.transact(window, cx, |this, window, cx| {
this.change_selections(Default::default(), window, cx, |s| {
@@ -10469,6 +10520,86 @@ impl Editor {
})
}
+ fn enable_wrap_selections_in_tag(&self, cx: &App) -> bool {
+ let snapshot = self.buffer.read(cx).snapshot(cx);
+ for selection in self.selections.disjoint_anchors().iter() {
+ if snapshot
+ .language_at(selection.start)
+ .and_then(|lang| lang.config().wrap_characters.as_ref())
+ .is_some()
+ {
+ return true;
+ }
+ }
+ false
+ }
+
+ fn wrap_selections_in_tag(
+ &mut self,
+ _: &WrapSelectionsInTag,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx);
+
+ let snapshot = self.buffer.read(cx).snapshot(cx);
+
+ let mut edits = Vec::new();
+ let mut boundaries = Vec::new();
+
+ for selection in self.selections.all::<Point>(cx).iter() {
+ let Some(wrap_config) = snapshot
+ .language_at(selection.start)
+ .and_then(|lang| lang.config().wrap_characters.clone())
+ else {
+ continue;
+ };
+
+ let open_tag = format!("{}{}", wrap_config.start_prefix, wrap_config.start_suffix);
+ let close_tag = format!("{}{}", wrap_config.end_prefix, wrap_config.end_suffix);
+
+ let start_before = snapshot.anchor_before(selection.start);
+ let end_after = snapshot.anchor_after(selection.end);
+
+ edits.push((start_before..start_before, open_tag));
+ edits.push((end_after..end_after, close_tag));
+
+ boundaries.push((
+ start_before,
+ end_after,
+ wrap_config.start_prefix.len(),
+ wrap_config.end_suffix.len(),
+ ));
+ }
+
+ if edits.is_empty() {
+ return;
+ }
+
+ self.transact(window, cx, |this, window, cx| {
+ let buffer = this.buffer.update(cx, |buffer, cx| {
+ buffer.edit(edits, None, cx);
+ buffer.snapshot(cx)
+ });
+
+ let mut new_selections = Vec::with_capacity(boundaries.len() * 2);
+ for (start_before, end_after, start_prefix_len, end_suffix_len) in
+ boundaries.into_iter()
+ {
+ let open_offset = start_before.to_offset(&buffer) + start_prefix_len;
+ let close_offset = end_after.to_offset(&buffer).saturating_sub(end_suffix_len);
+ new_selections.push(open_offset..open_offset);
+ new_selections.push(close_offset..close_offset);
+ }
+
+ this.change_selections(Default::default(), window, cx, |s| {
+ s.select_ranges(new_selections);
+ });
+
+ this.request_autoscroll(Autoscroll::fit(), cx);
+ });
+ }
+
pub fn reload_file(&mut self, _: &ReloadFile, window: &mut Window, cx: &mut Context<Self>) {
let Some(project) = self.project.clone() else {
return;
@@ -6,7 +6,7 @@ use language::CursorShape;
use project::project_settings::DiagnosticSeverity;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources, VsCodeSettings};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi, VsCodeSettings};
use util::serde::default_true;
/// Imports from the VSCode settings at
@@ -17,6 +17,7 @@ pub struct EditorSettings {
pub cursor_shape: Option<CursorShape>,
pub current_line_highlight: CurrentLineHighlight,
pub selection_highlight: bool,
+ pub rounded_selection: bool,
pub lsp_highlight_debounce: u64,
pub hover_popover_enabled: bool,
pub hover_popover_delay: u64,
@@ -37,6 +38,7 @@ pub struct EditorSettings {
pub multi_cursor_modifier: MultiCursorModifier,
pub redact_private_values: bool,
pub expand_excerpt_lines: u32,
+ pub excerpt_context_lines: u32,
pub middle_click_paste: bool,
#[serde(default)]
pub double_click_in_multibuffer: DoubleClickInMultibuffer,
@@ -55,10 +57,13 @@ pub struct EditorSettings {
pub inline_code_actions: bool,
pub drag_and_drop_selection: DragAndDropSelection,
pub lsp_document_colors: DocumentColorsRenderMode,
+ pub minimum_contrast_for_highlights: f32,
}
/// How to render LSP `textDocument/documentColor` colors in the editor.
-#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[derive(
+ Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi,
+)]
#[serde(rename_all = "snake_case")]
pub enum DocumentColorsRenderMode {
/// Do not query and render document colors.
@@ -72,7 +77,7 @@ pub enum DocumentColorsRenderMode {
Background,
}
-#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi)]
#[serde(rename_all = "snake_case")]
pub enum CurrentLineHighlight {
// Don't highlight the current line.
@@ -86,7 +91,7 @@ pub enum CurrentLineHighlight {
}
/// When to populate a new search's query based on the text under the cursor.
-#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi)]
#[serde(rename_all = "snake_case")]
pub enum SeedQuerySetting {
/// Always populate the search query with the word under the cursor.
@@ -98,7 +103,9 @@ pub enum SeedQuerySetting {
}
/// What to do when multibuffer is double clicked in some of its excerpts (parts of singleton buffers).
-#[derive(Default, Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[derive(
+ Default, Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi,
+)]
#[serde(rename_all = "snake_case")]
pub enum DoubleClickInMultibuffer {
/// Behave as a regular buffer and select the whole word.
@@ -117,7 +124,9 @@ pub struct Jupyter {
pub enabled: bool,
}
-#[derive(Default, Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[derive(
+ Default, Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi,
+)]
#[serde(rename_all = "snake_case")]
pub struct JupyterContent {
/// Whether the Jupyter feature is enabled.
@@ -289,7 +298,9 @@ pub struct ScrollbarAxes {
}
/// Whether to allow drag and drop text selection in buffer.
-#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
+#[derive(
+ Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq, SettingsUi,
+)]
pub struct DragAndDropSelection {
/// When true, enables drag and drop text selection in buffer.
///
@@ -329,7 +340,7 @@ pub enum ScrollbarDiagnostics {
/// The key to use for adding multiple cursors
///
/// Default: alt
-#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
+#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq, SettingsUi)]
#[serde(rename_all = "snake_case")]
pub enum MultiCursorModifier {
Alt,
@@ -340,7 +351,7 @@ pub enum MultiCursorModifier {
/// Whether the editor will scroll beyond the last line.
///
/// Default: one_page
-#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
+#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq, SettingsUi)]
#[serde(rename_all = "snake_case")]
pub enum ScrollBeyondLastLine {
/// The editor will not scroll beyond the last line.
@@ -354,7 +365,9 @@ pub enum ScrollBeyondLastLine {
}
/// Default options for buffer and project search items.
-#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
+#[derive(
+ Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq, SettingsUi,
+)]
pub struct SearchSettings {
/// Whether to show the project search button in the status bar.
#[serde(default = "default_true")]
@@ -370,7 +383,9 @@ pub struct SearchSettings {
}
/// What to do when go to definition yields no results.
-#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[derive(
+ Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi,
+)]
#[serde(rename_all = "snake_case")]
pub enum GoToDefinitionFallback {
/// Disables the fallback.
@@ -383,7 +398,9 @@ pub enum GoToDefinitionFallback {
/// Determines when the mouse cursor should be hidden in an editor or input box.
///
/// Default: on_typing_and_movement
-#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[derive(
+ Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi,
+)]
#[serde(rename_all = "snake_case")]
pub enum HideMouseMode {
/// Never hide the mouse cursor
@@ -398,7 +415,9 @@ pub enum HideMouseMode {
/// Determines how snippets are sorted relative to other completion items.
///
/// Default: inline
-#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[derive(
+ Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi,
+)]
#[serde(rename_all = "snake_case")]
pub enum SnippetSortOrder {
/// Place snippets at the top of the completion list
@@ -412,7 +431,9 @@ pub enum SnippetSortOrder {
None,
}
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)]
+#[settings_ui(group = "Editor")]
+#[settings_key(None)]
pub struct EditorSettingsContent {
/// Whether the cursor blinks in the editor.
///
@@ -421,7 +442,7 @@ pub struct EditorSettingsContent {
/// Cursor shape for the default editor.
/// Can be "bar", "block", "underline", or "hollow".
///
- /// Default: None
+ /// Default: bar
pub cursor_shape: Option<CursorShape>,
/// Determines when the mouse cursor should be hidden in an editor or input box.
///
@@ -439,6 +460,10 @@ pub struct EditorSettingsContent {
///
/// Default: true
pub selection_highlight: Option<bool>,
+ /// Whether the text selection should have rounded corners.
+ ///
+ /// Default: true
+ pub rounded_selection: Option<bool>,
/// The debounce delay before querying highlights from the language
/// server based on the current cursor location.
///
@@ -515,6 +540,11 @@ pub struct EditorSettingsContent {
/// Default: 3
pub expand_excerpt_lines: Option<u32>,
+ /// How many lines of context to provide in multibuffer excerpts by default
+ ///
+ /// Default: 2
+ pub excerpt_context_lines: Option<u32>,
+
/// Whether to enable middle-click paste on Linux
///
/// Default: true
@@ -544,6 +574,12 @@ pub struct EditorSettingsContent {
///
/// Default: false
pub show_signature_help_after_edits: Option<bool>,
+ /// The minimum APCA perceptual contrast to maintain when
+ /// rendering text over highlight backgrounds in the editor.
+ ///
+ /// Values range from 0 to 106. Set to 0 to disable adjustments.
+ /// Default: 45
+ pub minimum_contrast_for_highlights: Option<f32>,
/// Whether to follow-up empty go to definition responses from the language server or not.
/// `FindAllReferences` allows to look up references of the same symbol instead.
@@ -583,7 +619,7 @@ pub struct EditorSettingsContent {
}
// Status bar related settings
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq, SettingsUi)]
pub struct StatusBarContent {
/// Whether to display the active language button in the status bar.
///
@@ -596,7 +632,7 @@ pub struct StatusBarContent {
}
// Toolbar related settings
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq, SettingsUi)]
pub struct ToolbarContent {
/// Whether to display breadcrumbs in the editor toolbar.
///
@@ -622,7 +658,9 @@ pub struct ToolbarContent {
}
/// Scrollbar related settings
-#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Default)]
+#[derive(
+ Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Default, SettingsUi,
+)]
pub struct ScrollbarContent {
/// When to show the scrollbar in the editor.
///
@@ -657,7 +695,9 @@ pub struct ScrollbarContent {
}
/// Minimap related settings
-#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
+#[derive(
+ Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, SettingsUi,
+)]
pub struct MinimapContent {
/// When to show the minimap in the editor.
///
@@ -705,7 +745,10 @@ pub struct ScrollbarAxesContent {
}
/// Gutter related settings
-#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
+#[derive(
+ Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq, SettingsUi,
+)]
+#[settings_ui(group = "Gutter")]
pub struct GutterContent {
/// Whether to show line numbers in the gutter.
///
@@ -736,8 +779,6 @@ impl EditorSettings {
}
impl Settings for EditorSettings {
- const KEY: Option<&'static str> = None;
-
type FileContent = EditorSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> anyhow::Result<Self> {
@@ -781,6 +822,7 @@ impl Settings for EditorSettings {
"editor.selectionHighlight",
&mut current.selection_highlight,
);
+ vscode.bool_setting("editor.roundedSelection", &mut current.rounded_selection);
vscode.bool_setting("editor.hover.enabled", &mut current.hover_popover_enabled);
vscode.u64_setting("editor.hover.delay", &mut current.hover_popover_delay);
@@ -88,7 +88,7 @@ impl RenderOnce for BufferFontFamilyControl {
.child(Icon::new(IconName::Font))
.child(DropdownMenu::new(
"buffer-font-family",
- value.clone(),
+ value,
ContextMenu::build(window, cx, |mut menu, _, cx| {
let font_family_cache = FontFamilyCache::global(cx);
@@ -57,7 +57,9 @@ use util::{
use workspace::{
CloseActiveItem, CloseAllItems, CloseOtherItems, MoveItemToPaneInDirection, NavigationEntry,
OpenOptions, ViewId,
+ invalid_buffer_view::InvalidBufferView,
item::{FollowEvent, FollowableItem, Item, ItemHandle, SaveOptions},
+ register_project_item,
};
#[gpui::test]
@@ -708,7 +710,7 @@ async fn test_navigation_history(cx: &mut TestAppContext) {
_ = workspace.update(cx, |_v, window, cx| {
cx.new(|cx| {
let buffer = MultiBuffer::build_simple(&sample_text(300, 5, 'a'), cx);
- let mut editor = build_editor(buffer.clone(), window, cx);
+ let mut editor = build_editor(buffer, window, cx);
let handle = cx.entity();
editor.set_nav_history(Some(pane.read(cx).nav_history_for_item(&handle)));
@@ -898,7 +900,7 @@ fn test_fold_action(cx: &mut TestAppContext) {
.unindent(),
cx,
);
- build_editor(buffer.clone(), window, cx)
+ build_editor(buffer, window, cx)
});
_ = editor.update(cx, |editor, window, cx| {
@@ -989,7 +991,7 @@ fn test_fold_action_whitespace_sensitive_language(cx: &mut TestAppContext) {
.unindent(),
cx,
);
- build_editor(buffer.clone(), window, cx)
+ build_editor(buffer, window, cx)
});
_ = editor.update(cx, |editor, window, cx| {
@@ -1074,7 +1076,7 @@ fn test_fold_action_multiple_line_breaks(cx: &mut TestAppContext) {
.unindent(),
cx,
);
- build_editor(buffer.clone(), window, cx)
+ build_editor(buffer, window, cx)
});
_ = editor.update(cx, |editor, window, cx| {
@@ -1173,7 +1175,7 @@ fn test_fold_at_level(cx: &mut TestAppContext) {
.unindent(),
cx,
);
- build_editor(buffer.clone(), window, cx)
+ build_editor(buffer, window, cx)
});
_ = editor.update(cx, |editor, window, cx| {
@@ -1335,7 +1337,7 @@ fn test_move_cursor_multibyte(cx: &mut TestAppContext) {
let editor = cx.add_window(|window, cx| {
let buffer = MultiBuffer::build_simple("🟥🟧🟨🟩🟦🟪\nabcde\nαβγδε", cx);
- build_editor(buffer.clone(), window, cx)
+ build_editor(buffer, window, cx)
});
assert_eq!('🟥'.len_utf8(), 4);
@@ -1452,7 +1454,7 @@ fn test_move_cursor_different_line_lengths(cx: &mut TestAppContext) {
let editor = cx.add_window(|window, cx| {
let buffer = MultiBuffer::build_simple("ⓐⓑⓒⓓⓔ\nabcd\nαβγ\nabcd\nⓐⓑⓒⓓⓔ\n", cx);
- build_editor(buffer.clone(), window, cx)
+ build_editor(buffer, window, cx)
});
_ = editor.update(cx, |editor, window, cx| {
editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
@@ -2474,154 +2476,488 @@ async fn test_delete_to_beginning_of_line(cx: &mut TestAppContext) {
}
#[gpui::test]
-fn test_delete_to_word_boundary(cx: &mut TestAppContext) {
+async fn test_delete_to_word_boundary(cx: &mut TestAppContext) {
init_test(cx, |_| {});
- let editor = cx.add_window(|window, cx| {
- let buffer = MultiBuffer::build_simple("one two three four", cx);
- build_editor(buffer.clone(), window, cx)
- });
+ let mut cx = EditorTestContext::new(cx).await;
- _ = editor.update(cx, |editor, window, cx| {
- editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
- s.select_display_ranges([
- // an empty selection - the preceding word fragment is deleted
- DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 2),
- // characters selected - they are deleted
- DisplayPoint::new(DisplayRow(0), 9)..DisplayPoint::new(DisplayRow(0), 12),
- ])
- });
+ // For an empty selection, the preceding word fragment is deleted.
+ // For non-empty selections, only selected characters are deleted.
+ cx.set_state("onˇe two t«hreˇ»e four");
+ cx.update_editor(|editor, window, cx| {
editor.delete_to_previous_word_start(
&DeleteToPreviousWordStart {
ignore_newlines: false,
+ ignore_brackets: false,
},
window,
cx,
);
- assert_eq!(editor.buffer.read(cx).read(cx).text(), "e two te four");
});
+ cx.assert_editor_state("ˇe two tˇe four");
- _ = editor.update(cx, |editor, window, cx| {
- editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
- s.select_display_ranges([
- // an empty selection - the following word fragment is deleted
- DisplayPoint::new(DisplayRow(0), 3)..DisplayPoint::new(DisplayRow(0), 3),
- // characters selected - they are deleted
- DisplayPoint::new(DisplayRow(0), 9)..DisplayPoint::new(DisplayRow(0), 10),
- ])
- });
+ cx.set_state("e tˇwo te «fˇ»our");
+ cx.update_editor(|editor, window, cx| {
editor.delete_to_next_word_end(
&DeleteToNextWordEnd {
ignore_newlines: false,
+ ignore_brackets: false,
},
window,
cx,
);
- assert_eq!(editor.buffer.read(cx).read(cx).text(), "e t te our");
});
+ cx.assert_editor_state("e tˇ te ˇour");
}
#[gpui::test]
-fn test_delete_to_previous_word_start_or_newline(cx: &mut TestAppContext) {
+async fn test_delete_whitespaces(cx: &mut TestAppContext) {
init_test(cx, |_| {});
- let editor = cx.add_window(|window, cx| {
- let buffer = MultiBuffer::build_simple("one\n2\nthree\n4", cx);
- build_editor(buffer.clone(), window, cx)
+ let mut cx = EditorTestContext::new(cx).await;
+
+ cx.set_state("here is some text ˇwith a space");
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_previous_word_start(
+ &DeleteToPreviousWordStart {
+ ignore_newlines: false,
+ ignore_brackets: true,
+ },
+ window,
+ cx,
+ );
});
- let del_to_prev_word_start = DeleteToPreviousWordStart {
- ignore_newlines: false,
- };
- let del_to_prev_word_start_ignore_newlines = DeleteToPreviousWordStart {
- ignore_newlines: true,
- };
+ // Continuous whitespace sequences are removed entirely, words behind them are not affected by the deletion action.
+ cx.assert_editor_state("here is some textˇwith a space");
- _ = editor.update(cx, |editor, window, cx| {
- editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
- s.select_display_ranges([
- DisplayPoint::new(DisplayRow(3), 1)..DisplayPoint::new(DisplayRow(3), 1)
- ])
- });
- editor.delete_to_previous_word_start(&del_to_prev_word_start, window, cx);
- assert_eq!(editor.buffer.read(cx).read(cx).text(), "one\n2\nthree\n");
- editor.delete_to_previous_word_start(&del_to_prev_word_start, window, cx);
- assert_eq!(editor.buffer.read(cx).read(cx).text(), "one\n2\nthree");
- editor.delete_to_previous_word_start(&del_to_prev_word_start, window, cx);
- assert_eq!(editor.buffer.read(cx).read(cx).text(), "one\n2\n");
- editor.delete_to_previous_word_start(&del_to_prev_word_start, window, cx);
- assert_eq!(editor.buffer.read(cx).read(cx).text(), "one\n2");
- editor.delete_to_previous_word_start(&del_to_prev_word_start_ignore_newlines, window, cx);
- assert_eq!(editor.buffer.read(cx).read(cx).text(), "one\n");
- editor.delete_to_previous_word_start(&del_to_prev_word_start_ignore_newlines, window, cx);
- assert_eq!(editor.buffer.read(cx).read(cx).text(), "");
+ cx.set_state("here is some text ˇwith a space");
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_previous_word_start(
+ &DeleteToPreviousWordStart {
+ ignore_newlines: false,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
+ );
});
-}
+ cx.assert_editor_state("here is some textˇwith a space");
-#[gpui::test]
-fn test_delete_to_next_word_end_or_newline(cx: &mut TestAppContext) {
- init_test(cx, |_| {});
+ cx.set_state("here is some textˇ with a space");
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_next_word_end(
+ &DeleteToNextWordEnd {
+ ignore_newlines: false,
+ ignore_brackets: true,
+ },
+ window,
+ cx,
+ );
+ });
+ // Same happens in the other direction.
+ cx.assert_editor_state("here is some textˇwith a space");
- let editor = cx.add_window(|window, cx| {
- let buffer = MultiBuffer::build_simple("\none\n two\nthree\n four", cx);
- build_editor(buffer.clone(), window, cx)
+ cx.set_state("here is some textˇ with a space");
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_next_word_end(
+ &DeleteToNextWordEnd {
+ ignore_newlines: false,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
+ );
});
- let del_to_next_word_end = DeleteToNextWordEnd {
- ignore_newlines: false,
- };
- let del_to_next_word_end_ignore_newlines = DeleteToNextWordEnd {
- ignore_newlines: true,
- };
+ cx.assert_editor_state("here is some textˇwith a space");
- _ = editor.update(cx, |editor, window, cx| {
- editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
- s.select_display_ranges([
- DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)
- ])
- });
- editor.delete_to_next_word_end(&del_to_next_word_end, window, cx);
- assert_eq!(
- editor.buffer.read(cx).read(cx).text(),
- "one\n two\nthree\n four"
+ cx.set_state("here is some textˇ with a space");
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_next_word_end(
+ &DeleteToNextWordEnd {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
);
- editor.delete_to_next_word_end(&del_to_next_word_end, window, cx);
- assert_eq!(
- editor.buffer.read(cx).read(cx).text(),
- "\n two\nthree\n four"
+ });
+ cx.assert_editor_state("here is some textˇwith a space");
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_previous_word_start(
+ &DeleteToPreviousWordStart {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
);
- editor.delete_to_next_word_end(&del_to_next_word_end, window, cx);
- assert_eq!(
- editor.buffer.read(cx).read(cx).text(),
- "two\nthree\n four"
+ });
+ cx.assert_editor_state("here is some ˇwith a space");
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_previous_word_start(
+ &DeleteToPreviousWordStart {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
+ );
+ });
+ // Single whitespaces are removed with the word behind them.
+ cx.assert_editor_state("here is ˇwith a space");
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_previous_word_start(
+ &DeleteToPreviousWordStart {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
+ );
+ });
+ cx.assert_editor_state("here ˇwith a space");
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_previous_word_start(
+ &DeleteToPreviousWordStart {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
+ );
+ });
+ cx.assert_editor_state("ˇwith a space");
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_previous_word_start(
+ &DeleteToPreviousWordStart {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
+ );
+ });
+ cx.assert_editor_state("ˇwith a space");
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_next_word_end(
+ &DeleteToNextWordEnd {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
+ );
+ });
+ // Same happens in the other direction.
+ cx.assert_editor_state("ˇ a space");
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_next_word_end(
+ &DeleteToNextWordEnd {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
+ );
+ });
+ cx.assert_editor_state("ˇ space");
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_next_word_end(
+ &DeleteToNextWordEnd {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
+ );
+ });
+ cx.assert_editor_state("ˇ");
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_next_word_end(
+ &DeleteToNextWordEnd {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
+ );
+ });
+ cx.assert_editor_state("ˇ");
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_previous_word_start(
+ &DeleteToPreviousWordStart {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
);
- editor.delete_to_next_word_end(&del_to_next_word_end, window, cx);
- assert_eq!(editor.buffer.read(cx).read(cx).text(), "\nthree\n four");
- editor.delete_to_next_word_end(&del_to_next_word_end_ignore_newlines, window, cx);
- assert_eq!(editor.buffer.read(cx).read(cx).text(), "\n four");
- editor.delete_to_next_word_end(&del_to_next_word_end_ignore_newlines, window, cx);
- assert_eq!(editor.buffer.read(cx).read(cx).text(), "");
});
+ cx.assert_editor_state("ˇ");
}
#[gpui::test]
-fn test_newline(cx: &mut TestAppContext) {
+async fn test_delete_to_bracket(cx: &mut TestAppContext) {
init_test(cx, |_| {});
- let editor = cx.add_window(|window, cx| {
- let buffer = MultiBuffer::build_simple("aaaa\n bbbb\n", cx);
- build_editor(buffer.clone(), window, cx)
- });
+ let language = Arc::new(
+ Language::new(
+ LanguageConfig {
+ brackets: BracketPairConfig {
+ pairs: vec![
+ BracketPair {
+ start: "\"".to_string(),
+ end: "\"".to_string(),
+ close: true,
+ surround: true,
+ newline: false,
+ },
+ BracketPair {
+ start: "(".to_string(),
+ end: ")".to_string(),
+ close: true,
+ surround: true,
+ newline: true,
+ },
+ ],
+ ..BracketPairConfig::default()
+ },
+ ..LanguageConfig::default()
+ },
+ Some(tree_sitter_rust::LANGUAGE.into()),
+ )
+ .with_brackets_query(
+ r#"
+ ("(" @open ")" @close)
+ ("\"" @open "\"" @close)
+ "#,
+ )
+ .unwrap(),
+ );
- _ = editor.update(cx, |editor, window, cx| {
- editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
- s.select_display_ranges([
- DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 2),
- DisplayPoint::new(DisplayRow(1), 2)..DisplayPoint::new(DisplayRow(1), 2),
- DisplayPoint::new(DisplayRow(1), 6)..DisplayPoint::new(DisplayRow(1), 6),
- ])
- });
+ let mut cx = EditorTestContext::new(cx).await;
+ cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx));
- editor.newline(&Newline, window, cx);
- assert_eq!(editor.text(cx), "aa\naa\n \n bb\n bb\n");
+ cx.set_state(r#"macro!("// ˇCOMMENT");"#);
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_previous_word_start(
+ &DeleteToPreviousWordStart {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
+ );
+ });
+ // Deletion stops before brackets if asked to not ignore them.
+ cx.assert_editor_state(r#"macro!("ˇCOMMENT");"#);
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_previous_word_start(
+ &DeleteToPreviousWordStart {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
+ );
+ });
+ // Deletion has to remove a single bracket and then stop again.
+ cx.assert_editor_state(r#"macro!(ˇCOMMENT");"#);
+
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_previous_word_start(
+ &DeleteToPreviousWordStart {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
+ );
+ });
+ cx.assert_editor_state(r#"macro!ˇCOMMENT");"#);
+
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_previous_word_start(
+ &DeleteToPreviousWordStart {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
+ );
+ });
+ cx.assert_editor_state(r#"ˇCOMMENT");"#);
+
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_previous_word_start(
+ &DeleteToPreviousWordStart {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
+ );
+ });
+ cx.assert_editor_state(r#"ˇCOMMENT");"#);
+
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_next_word_end(
+ &DeleteToNextWordEnd {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
+ );
+ });
+ // Brackets on the right are not paired anymore, hence deletion does not stop at them
+ cx.assert_editor_state(r#"ˇ");"#);
+
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_next_word_end(
+ &DeleteToNextWordEnd {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
+ );
+ });
+ cx.assert_editor_state(r#"ˇ"#);
+
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_next_word_end(
+ &DeleteToNextWordEnd {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ },
+ window,
+ cx,
+ );
+ });
+ cx.assert_editor_state(r#"ˇ"#);
+
+ cx.set_state(r#"macro!("// ˇCOMMENT");"#);
+ cx.update_editor(|editor, window, cx| {
+ editor.delete_to_previous_word_start(
+ &DeleteToPreviousWordStart {
+ ignore_newlines: true,
+ ignore_brackets: true,
+ },
+ window,
+ cx,
+ );
+ });
+ cx.assert_editor_state(r#"macroˇCOMMENT");"#);
+}
+
+#[gpui::test]
+fn test_delete_to_previous_word_start_or_newline(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let editor = cx.add_window(|window, cx| {
+ let buffer = MultiBuffer::build_simple("one\n2\nthree\n4", cx);
+ build_editor(buffer, window, cx)
+ });
+ let del_to_prev_word_start = DeleteToPreviousWordStart {
+ ignore_newlines: false,
+ ignore_brackets: false,
+ };
+ let del_to_prev_word_start_ignore_newlines = DeleteToPreviousWordStart {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ };
+
+ _ = editor.update(cx, |editor, window, cx| {
+ editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
+ s.select_display_ranges([
+ DisplayPoint::new(DisplayRow(3), 1)..DisplayPoint::new(DisplayRow(3), 1)
+ ])
+ });
+ editor.delete_to_previous_word_start(&del_to_prev_word_start, window, cx);
+ assert_eq!(editor.buffer.read(cx).read(cx).text(), "one\n2\nthree\n");
+ editor.delete_to_previous_word_start(&del_to_prev_word_start, window, cx);
+ assert_eq!(editor.buffer.read(cx).read(cx).text(), "one\n2\nthree");
+ editor.delete_to_previous_word_start(&del_to_prev_word_start, window, cx);
+ assert_eq!(editor.buffer.read(cx).read(cx).text(), "one\n2\n");
+ editor.delete_to_previous_word_start(&del_to_prev_word_start, window, cx);
+ assert_eq!(editor.buffer.read(cx).read(cx).text(), "one\n2");
+ editor.delete_to_previous_word_start(&del_to_prev_word_start_ignore_newlines, window, cx);
+ assert_eq!(editor.buffer.read(cx).read(cx).text(), "one\n");
+ editor.delete_to_previous_word_start(&del_to_prev_word_start_ignore_newlines, window, cx);
+ assert_eq!(editor.buffer.read(cx).read(cx).text(), "");
+ });
+}
+
+#[gpui::test]
+fn test_delete_to_next_word_end_or_newline(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let editor = cx.add_window(|window, cx| {
+ let buffer = MultiBuffer::build_simple("\none\n two\nthree\n four", cx);
+ build_editor(buffer, window, cx)
+ });
+ let del_to_next_word_end = DeleteToNextWordEnd {
+ ignore_newlines: false,
+ ignore_brackets: false,
+ };
+ let del_to_next_word_end_ignore_newlines = DeleteToNextWordEnd {
+ ignore_newlines: true,
+ ignore_brackets: false,
+ };
+
+ _ = editor.update(cx, |editor, window, cx| {
+ editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
+ s.select_display_ranges([
+ DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)
+ ])
+ });
+ editor.delete_to_next_word_end(&del_to_next_word_end, window, cx);
+ assert_eq!(
+ editor.buffer.read(cx).read(cx).text(),
+ "one\n two\nthree\n four"
+ );
+ editor.delete_to_next_word_end(&del_to_next_word_end, window, cx);
+ assert_eq!(
+ editor.buffer.read(cx).read(cx).text(),
+ "\n two\nthree\n four"
+ );
+ editor.delete_to_next_word_end(&del_to_next_word_end, window, cx);
+ assert_eq!(
+ editor.buffer.read(cx).read(cx).text(),
+ "two\nthree\n four"
+ );
+ editor.delete_to_next_word_end(&del_to_next_word_end, window, cx);
+ assert_eq!(editor.buffer.read(cx).read(cx).text(), "\nthree\n four");
+ editor.delete_to_next_word_end(&del_to_next_word_end_ignore_newlines, window, cx);
+ assert_eq!(editor.buffer.read(cx).read(cx).text(), "\n four");
+ editor.delete_to_next_word_end(&del_to_next_word_end_ignore_newlines, window, cx);
+ assert_eq!(editor.buffer.read(cx).read(cx).text(), "four");
+ editor.delete_to_next_word_end(&del_to_next_word_end_ignore_newlines, window, cx);
+ assert_eq!(editor.buffer.read(cx).read(cx).text(), "");
+ });
+}
+
+#[gpui::test]
+fn test_newline(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let editor = cx.add_window(|window, cx| {
+ let buffer = MultiBuffer::build_simple("aaaa\n bbbb\n", cx);
+ build_editor(buffer, window, cx)
+ });
+
+ _ = editor.update(cx, |editor, window, cx| {
+ editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
+ s.select_display_ranges([
+ DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 2),
+ DisplayPoint::new(DisplayRow(1), 2)..DisplayPoint::new(DisplayRow(1), 2),
+ DisplayPoint::new(DisplayRow(1), 6)..DisplayPoint::new(DisplayRow(1), 6),
+ ])
+ });
+
+ editor.newline(&Newline, window, cx);
+ assert_eq!(editor.text(cx), "aa\naa\n \n bb\n bb\n");
});
}
@@ -2644,7 +2980,7 @@ fn test_newline_with_old_selections(cx: &mut TestAppContext) {
.as_str(),
cx,
);
- let mut editor = build_editor(buffer.clone(), window, cx);
+ let mut editor = build_editor(buffer, window, cx);
editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
s.select_ranges([
Point::new(2, 4)..Point::new(2, 5),
@@ -3175,7 +3511,7 @@ fn test_insert_with_old_selections(cx: &mut TestAppContext) {
let editor = cx.add_window(|window, cx| {
let buffer = MultiBuffer::build_simple("a( X ), b( Y ), c( Z )", cx);
- let mut editor = build_editor(buffer.clone(), window, cx);
+ let mut editor = build_editor(buffer, window, cx);
editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
s.select_ranges([3..4, 11..12, 19..20])
});
@@ -4401,6 +4737,129 @@ async fn test_unique_lines_single_selection(cx: &mut TestAppContext) {
"});
}
+#[gpui::test]
+async fn test_wrap_in_tag_single_selection(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let mut cx = EditorTestContext::new(cx).await;
+
+ let js_language = Arc::new(Language::new(
+ LanguageConfig {
+ name: "JavaScript".into(),
+ wrap_characters: Some(language::WrapCharactersConfig {
+ start_prefix: "<".into(),
+ start_suffix: ">".into(),
+ end_prefix: "</".into(),
+ end_suffix: ">".into(),
+ }),
+ ..LanguageConfig::default()
+ },
+ None,
+ ));
+
+ cx.update_buffer(|buffer, cx| buffer.set_language(Some(js_language), cx));
+
+ cx.set_state(indoc! {"
+ «testˇ»
+ "});
+ cx.update_editor(|e, window, cx| e.wrap_selections_in_tag(&WrapSelectionsInTag, window, cx));
+ cx.assert_editor_state(indoc! {"
+ <«ˇ»>test</«ˇ»>
+ "});
+
+ cx.set_state(indoc! {"
+ «test
+ testˇ»
+ "});
+ cx.update_editor(|e, window, cx| e.wrap_selections_in_tag(&WrapSelectionsInTag, window, cx));
+ cx.assert_editor_state(indoc! {"
+ <«ˇ»>test
+ test</«ˇ»>
+ "});
+
+ cx.set_state(indoc! {"
+ teˇst
+ "});
+ cx.update_editor(|e, window, cx| e.wrap_selections_in_tag(&WrapSelectionsInTag, window, cx));
+ cx.assert_editor_state(indoc! {"
+ te<«ˇ»></«ˇ»>st
+ "});
+}
+
+#[gpui::test]
+async fn test_wrap_in_tag_multi_selection(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let mut cx = EditorTestContext::new(cx).await;
+
+ let js_language = Arc::new(Language::new(
+ LanguageConfig {
+ name: "JavaScript".into(),
+ wrap_characters: Some(language::WrapCharactersConfig {
+ start_prefix: "<".into(),
+ start_suffix: ">".into(),
+ end_prefix: "</".into(),
+ end_suffix: ">".into(),
+ }),
+ ..LanguageConfig::default()
+ },
+ None,
+ ));
+
+ cx.update_buffer(|buffer, cx| buffer.set_language(Some(js_language), cx));
+
+ cx.set_state(indoc! {"
+ «testˇ»
+ «testˇ» «testˇ»
+ «testˇ»
+ "});
+ cx.update_editor(|e, window, cx| e.wrap_selections_in_tag(&WrapSelectionsInTag, window, cx));
+ cx.assert_editor_state(indoc! {"
+ <«ˇ»>test</«ˇ»>
+ <«ˇ»>test</«ˇ»> <«ˇ»>test</«ˇ»>
+ <«ˇ»>test</«ˇ»>
+ "});
+
+ cx.set_state(indoc! {"
+ «test
+ testˇ»
+ «test
+ testˇ»
+ "});
+ cx.update_editor(|e, window, cx| e.wrap_selections_in_tag(&WrapSelectionsInTag, window, cx));
+ cx.assert_editor_state(indoc! {"
+ <«ˇ»>test
+ test</«ˇ»>
+ <«ˇ»>test
+ test</«ˇ»>
+ "});
+}
+
+#[gpui::test]
+async fn test_wrap_in_tag_does_nothing_in_unsupported_languages(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let mut cx = EditorTestContext::new(cx).await;
+
+ let plaintext_language = Arc::new(Language::new(
+ LanguageConfig {
+ name: "Plain Text".into(),
+ ..LanguageConfig::default()
+ },
+ None,
+ ));
+
+ cx.update_buffer(|buffer, cx| buffer.set_language(Some(plaintext_language), cx));
+
+ cx.set_state(indoc! {"
+ «testˇ»
+ "});
+ cx.update_editor(|e, window, cx| e.wrap_selections_in_tag(&WrapSelectionsInTag, window, cx));
+ cx.assert_editor_state(indoc! {"
+ «testˇ»
+ "});
+}
+
#[gpui::test]
async fn test_manipulate_immutable_lines_with_multi_selection(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -4904,10 +5363,24 @@ async fn test_manipulate_text(cx: &mut TestAppContext) {
cx.assert_editor_state(indoc! {"
«HeLlO, wOrLD!ˇ»
"});
-}
-#[gpui::test]
-fn test_duplicate_line(cx: &mut TestAppContext) {
+ // Test selections with `line_mode = true`.
+ cx.update_editor(|editor, _window, _cx| editor.selections.line_mode = true);
+ cx.set_state(indoc! {"
+ «The quick brown
+ fox jumps over
+ tˇ»he lazy dog
+ "});
+ cx.update_editor(|e, window, cx| e.convert_to_upper_case(&ConvertToUpperCase, window, cx));
+ cx.assert_editor_state(indoc! {"
+ «THE QUICK BROWN
+ FOX JUMPS OVER
+ THE LAZY DOGˇ»
+ "});
+}
+
+#[gpui::test]
+fn test_duplicate_line(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let editor = cx.add_window(|window, cx| {
@@ -5436,14 +5909,18 @@ async fn test_rewrap(cx: &mut TestAppContext) {
},
None,
));
- let rust_language = Arc::new(Language::new(
- LanguageConfig {
- name: "Rust".into(),
- line_comments: vec!["// ".into(), "/// ".into()],
- ..LanguageConfig::default()
- },
- Some(tree_sitter_rust::LANGUAGE.into()),
- ));
+ let rust_language = Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ line_comments: vec!["// ".into(), "/// ".into()],
+ ..LanguageConfig::default()
+ },
+ Some(tree_sitter_rust::LANGUAGE.into()),
+ )
+ .with_override_query("[(line_comment)(block_comment)] @comment.inclusive")
+ .unwrap(),
+ );
let plaintext_language = Arc::new(Language::new(
LanguageConfig {
@@ -5562,7 +6039,7 @@ async fn test_rewrap(cx: &mut TestAppContext) {
# ˇThis is a long comment using a pound
# sign.
"},
- python_language.clone(),
+ python_language,
&mut cx,
);
@@ -5598,150 +6075,555 @@ async fn test_rewrap(cx: &mut TestAppContext) {
&mut cx,
);
- // Test that rewrapping boundary works and preserves relative indent for Markdown documents
+ // Test that rewrapping boundary works and preserves relative indent for Markdown documents
+ assert_rewrap(
+ indoc! {"
+ «1. This is a numbered list item that is very long and needs to be wrapped properly.
+ 2. This is a numbered list item that is very long and needs to be wrapped properly.
+ - This is an unordered list item that is also very long and should not merge with the numbered item.ˇ»
+ "},
+ indoc! {"
+ «1. This is a numbered list item that is
+ very long and needs to be wrapped
+ properly.
+ 2. This is a numbered list item that is
+ very long and needs to be wrapped
+ properly.
+ - This is an unordered list item that is
+ also very long and should not merge
+ with the numbered item.ˇ»
+ "},
+ markdown_language.clone(),
+ &mut cx,
+ );
+
+ // Test that rewrapping add indents for rewrapping boundary if not exists already.
+ assert_rewrap(
+ indoc! {"
+ «1. This is a numbered list item that is
+ very long and needs to be wrapped
+ properly.
+ 2. This is a numbered list item that is
+ very long and needs to be wrapped
+ properly.
+ - This is an unordered list item that is
+ also very long and should not merge with
+ the numbered item.ˇ»
+ "},
+ indoc! {"
+ «1. This is a numbered list item that is
+ very long and needs to be wrapped
+ properly.
+ 2. This is a numbered list item that is
+ very long and needs to be wrapped
+ properly.
+ - This is an unordered list item that is
+ also very long and should not merge
+ with the numbered item.ˇ»
+ "},
+ markdown_language.clone(),
+ &mut cx,
+ );
+
+ // Test that rewrapping maintain indents even when they already exists.
+ assert_rewrap(
+ indoc! {"
+ «1. This is a numbered list
+ item that is very long and needs to be wrapped properly.
+ 2. This is a numbered list
+ item that is very long and needs to be wrapped properly.
+ - This is an unordered list item that is also very long and
+ should not merge with the numbered item.ˇ»
+ "},
+ indoc! {"
+ «1. This is a numbered list item that is
+ very long and needs to be wrapped
+ properly.
+ 2. This is a numbered list item that is
+ very long and needs to be wrapped
+ properly.
+ - This is an unordered list item that is
+ also very long and should not merge
+ with the numbered item.ˇ»
+ "},
+ markdown_language,
+ &mut cx,
+ );
+
+ // Test that rewrapping works in plain text where `allow_rewrap` is `Anywhere`
+ assert_rewrap(
+ indoc! {"
+ ˇThis is a very long line of plain text that will be wrapped.
+ "},
+ indoc! {"
+ ˇThis is a very long line of plain text
+ that will be wrapped.
+ "},
+ plaintext_language.clone(),
+ &mut cx,
+ );
+
+ // Test that non-commented code acts as a paragraph boundary within a selection
+ assert_rewrap(
+ indoc! {"
+ «// This is the first long comment block to be wrapped.
+ fn my_func(a: u32);
+ // This is the second long comment block to be wrapped.ˇ»
+ "},
+ indoc! {"
+ «// This is the first long comment block
+ // to be wrapped.
+ fn my_func(a: u32);
+ // This is the second long comment block
+ // to be wrapped.ˇ»
+ "},
+ rust_language,
+ &mut cx,
+ );
+
+ // Test rewrapping multiple selections, including ones with blank lines or tabs
+ assert_rewrap(
+ indoc! {"
+ «ˇThis is a very long line that will be wrapped.
+
+ This is another paragraph in the same selection.»
+
+ «\tThis is a very long indented line that will be wrapped.ˇ»
+ "},
+ indoc! {"
+ «ˇThis is a very long line that will be
+ wrapped.
+
+ This is another paragraph in the same
+ selection.»
+
+ «\tThis is a very long indented line
+ \tthat will be wrapped.ˇ»
+ "},
+ plaintext_language,
+ &mut cx,
+ );
+
+ // Test that an empty comment line acts as a paragraph boundary
+ assert_rewrap(
+ indoc! {"
+ // ˇThis is a long comment that will be wrapped.
+ //
+ // And this is another long comment that will also be wrapped.ˇ
+ "},
+ indoc! {"
+ // ˇThis is a long comment that will be
+ // wrapped.
+ //
+ // And this is another long comment that
+ // will also be wrapped.ˇ
+ "},
+ cpp_language,
+ &mut cx,
+ );
+
+ #[track_caller]
+ fn assert_rewrap(
+ unwrapped_text: &str,
+ wrapped_text: &str,
+ language: Arc<Language>,
+ cx: &mut EditorTestContext,
+ ) {
+ cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx));
+ cx.set_state(unwrapped_text);
+ cx.update_editor(|e, window, cx| e.rewrap(&Rewrap, window, cx));
+ cx.assert_editor_state(wrapped_text);
+ }
+}
+
+#[gpui::test]
+async fn test_rewrap_block_comments(cx: &mut TestAppContext) {
+ init_test(cx, |settings| {
+ settings.languages.0.extend([(
+ "Rust".into(),
+ LanguageSettingsContent {
+ allow_rewrap: Some(language_settings::RewrapBehavior::InComments),
+ preferred_line_length: Some(40),
+ ..Default::default()
+ },
+ )])
+ });
+
+ let mut cx = EditorTestContext::new(cx).await;
+
+ let rust_lang = Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ line_comments: vec!["// ".into()],
+ block_comment: Some(BlockCommentConfig {
+ start: "/*".into(),
+ end: "*/".into(),
+ prefix: "* ".into(),
+ tab_size: 1,
+ }),
+ documentation_comment: Some(BlockCommentConfig {
+ start: "/**".into(),
+ end: "*/".into(),
+ prefix: "* ".into(),
+ tab_size: 1,
+ }),
+
+ ..LanguageConfig::default()
+ },
+ Some(tree_sitter_rust::LANGUAGE.into()),
+ )
+ .with_override_query("[(line_comment) (block_comment)] @comment.inclusive")
+ .unwrap(),
+ );
+
+ // regular block comment
+ assert_rewrap(
+ indoc! {"
+ /*
+ *ˇ Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+ */
+ /*ˇ Lorem ipsum dolor sit amet, consectetur adipiscing elit. */
+ "},
+ indoc! {"
+ /*
+ *ˇ Lorem ipsum dolor sit amet,
+ * consectetur adipiscing elit.
+ */
+ /*
+ *ˇ Lorem ipsum dolor sit amet,
+ * consectetur adipiscing elit.
+ */
+ "},
+ rust_lang.clone(),
+ &mut cx,
+ );
+
+ // indent is respected
+ assert_rewrap(
+ indoc! {"
+ {}
+ /*ˇ Lorem ipsum dolor sit amet, consectetur adipiscing elit. */
+ "},
+ indoc! {"
+ {}
+ /*
+ *ˇ Lorem ipsum dolor sit amet,
+ * consectetur adipiscing elit.
+ */
+ "},
+ rust_lang.clone(),
+ &mut cx,
+ );
+
+ // short block comments with inline delimiters
+ assert_rewrap(
+ indoc! {"
+ /*ˇ Lorem ipsum dolor sit amet, consectetur adipiscing elit. */
+ /*ˇ Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+ */
+ /*
+ *ˇ Lorem ipsum dolor sit amet, consectetur adipiscing elit. */
+ "},
+ indoc! {"
+ /*
+ *ˇ Lorem ipsum dolor sit amet,
+ * consectetur adipiscing elit.
+ */
+ /*
+ *ˇ Lorem ipsum dolor sit amet,
+ * consectetur adipiscing elit.
+ */
+ /*
+ *ˇ Lorem ipsum dolor sit amet,
+ * consectetur adipiscing elit.
+ */
+ "},
+ rust_lang.clone(),
+ &mut cx,
+ );
+
+ // multiline block comment with inline start/end delimiters
+ assert_rewrap(
+ indoc! {"
+ /*ˇ Lorem ipsum dolor sit amet,
+ * consectetur adipiscing elit. */
+ "},
+ indoc! {"
+ /*
+ *ˇ Lorem ipsum dolor sit amet,
+ * consectetur adipiscing elit.
+ */
+ "},
+ rust_lang.clone(),
+ &mut cx,
+ );
+
+ // block comment rewrap still respects paragraph bounds
+ assert_rewrap(
+ indoc! {"
+ /*
+ *ˇ Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+ *
+ * Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+ */
+ "},
+ indoc! {"
+ /*
+ *ˇ Lorem ipsum dolor sit amet,
+ * consectetur adipiscing elit.
+ *
+ * Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+ */
+ "},
+ rust_lang.clone(),
+ &mut cx,
+ );
+
+ // documentation comments
+ assert_rewrap(
+ indoc! {"
+ /**ˇ Lorem ipsum dolor sit amet, consectetur adipiscing elit. */
+ /**
+ *ˇ Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+ */
+ "},
+ indoc! {"
+ /**
+ *ˇ Lorem ipsum dolor sit amet,
+ * consectetur adipiscing elit.
+ */
+ /**
+ *ˇ Lorem ipsum dolor sit amet,
+ * consectetur adipiscing elit.
+ */
+ "},
+ rust_lang.clone(),
+ &mut cx,
+ );
+
+ // different, adjacent comments
assert_rewrap(
indoc! {"
- «1. This is a numbered list item that is very long and needs to be wrapped properly.
- 2. This is a numbered list item that is very long and needs to be wrapped properly.
- - This is an unordered list item that is also very long and should not merge with the numbered item.ˇ»
+ /**
+ *ˇ Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+ */
+ /*ˇ Lorem ipsum dolor sit amet, consectetur adipiscing elit. */
+ //ˇ Lorem ipsum dolor sit amet, consectetur adipiscing elit.
"},
indoc! {"
- «1. This is a numbered list item that is
- very long and needs to be wrapped
- properly.
- 2. This is a numbered list item that is
- very long and needs to be wrapped
- properly.
- - This is an unordered list item that is
- also very long and should not merge
- with the numbered item.ˇ»
+ /**
+ *ˇ Lorem ipsum dolor sit amet,
+ * consectetur adipiscing elit.
+ */
+ /*
+ *ˇ Lorem ipsum dolor sit amet,
+ * consectetur adipiscing elit.
+ */
+ //ˇ Lorem ipsum dolor sit amet,
+ // consectetur adipiscing elit.
"},
- markdown_language.clone(),
+ rust_lang.clone(),
&mut cx,
);
- // Test that rewrapping add indents for rewrapping boundary if not exists already.
+ // selection w/ single short block comment
assert_rewrap(
indoc! {"
- «1. This is a numbered list item that is
- very long and needs to be wrapped
- properly.
- 2. This is a numbered list item that is
- very long and needs to be wrapped
- properly.
- - This is an unordered list item that is
- also very long and should not merge with
- the numbered item.ˇ»
+ «/* Lorem ipsum dolor sit amet, consectetur adipiscing elit. */ˇ»
"},
indoc! {"
- «1. This is a numbered list item that is
- very long and needs to be wrapped
- properly.
- 2. This is a numbered list item that is
- very long and needs to be wrapped
- properly.
- - This is an unordered list item that is
- also very long and should not merge
- with the numbered item.ˇ»
+ «/*
+ * Lorem ipsum dolor sit amet,
+ * consectetur adipiscing elit.
+ */ˇ»
"},
- markdown_language.clone(),
+ rust_lang.clone(),
&mut cx,
);
- // Test that rewrapping maintain indents even when they already exists.
+ // rewrapping a single comment w/ abutting comments
assert_rewrap(
indoc! {"
- «1. This is a numbered list
- item that is very long and needs to be wrapped properly.
- 2. This is a numbered list
- item that is very long and needs to be wrapped properly.
- - This is an unordered list item that is also very long and
- should not merge with the numbered item.ˇ»
+ /* ˇLorem ipsum dolor sit amet, consectetur adipiscing elit. */
+ /* Lorem ipsum dolor sit amet, consectetur adipiscing elit. */
"},
indoc! {"
- «1. This is a numbered list item that is
- very long and needs to be wrapped
- properly.
- 2. This is a numbered list item that is
- very long and needs to be wrapped
- properly.
- - This is an unordered list item that is
- also very long and should not merge
- with the numbered item.ˇ»
+ /*
+ * ˇLorem ipsum dolor sit amet,
+ * consectetur adipiscing elit.
+ */
+ /* Lorem ipsum dolor sit amet, consectetur adipiscing elit. */
"},
- markdown_language.clone(),
+ rust_lang.clone(),
&mut cx,
);
- // Test that rewrapping works in plain text where `allow_rewrap` is `Anywhere`
+ // selection w/ non-abutting short block comments
assert_rewrap(
indoc! {"
- ˇThis is a very long line of plain text that will be wrapped.
+ «/* Lorem ipsum dolor sit amet, consectetur adipiscing elit. */
+
+ /* Lorem ipsum dolor sit amet, consectetur adipiscing elit. */ˇ»
"},
indoc! {"
- ˇThis is a very long line of plain text
- that will be wrapped.
+ «/*
+ * Lorem ipsum dolor sit amet,
+ * consectetur adipiscing elit.
+ */
+
+ /*
+ * Lorem ipsum dolor sit amet,
+ * consectetur adipiscing elit.
+ */ˇ»
"},
- plaintext_language.clone(),
+ rust_lang.clone(),
&mut cx,
);
- // Test that non-commented code acts as a paragraph boundary within a selection
+ // selection of multiline block comments
assert_rewrap(
indoc! {"
- «// This is the first long comment block to be wrapped.
- fn my_func(a: u32);
- // This is the second long comment block to be wrapped.ˇ»
- "},
+ «/* Lorem ipsum dolor sit amet,
+ * consectetur adipiscing elit. */ˇ»
+ "},
indoc! {"
- «// This is the first long comment block
- // to be wrapped.
- fn my_func(a: u32);
- // This is the second long comment block
- // to be wrapped.ˇ»
- "},
- rust_language.clone(),
+ «/*
+ * Lorem ipsum dolor sit amet,
+ * consectetur adipiscing elit.
+ */ˇ»
+ "},
+ rust_lang.clone(),
&mut cx,
);
- // Test rewrapping multiple selections, including ones with blank lines or tabs
+ // partial selection of multiline block comments
assert_rewrap(
indoc! {"
- «ˇThis is a very long line that will be wrapped.
-
- This is another paragraph in the same selection.»
-
- «\tThis is a very long indented line that will be wrapped.ˇ»
- "},
+ «/* Lorem ipsum dolor sit amet,ˇ»
+ * consectetur adipiscing elit. */
+ /* Lorem ipsum dolor sit amet,
+ «* consectetur adipiscing elit. */ˇ»
+ "},
indoc! {"
- «ˇThis is a very long line that will be
- wrapped.
-
- This is another paragraph in the same
- selection.»
+ «/*
+ * Lorem ipsum dolor sit amet,ˇ»
+ * consectetur adipiscing elit. */
+ /* Lorem ipsum dolor sit amet,
+ «* consectetur adipiscing elit.
+ */ˇ»
+ "},
+ rust_lang.clone(),
+ &mut cx,
+ );
- «\tThis is a very long indented line
- \tthat will be wrapped.ˇ»
- "},
- plaintext_language.clone(),
+ // selection w/ abutting short block comments
+ // TODO: should not be combined; should rewrap as 2 comments
+ assert_rewrap(
+ indoc! {"
+ «/* Lorem ipsum dolor sit amet, consectetur adipiscing elit. */
+ /* Lorem ipsum dolor sit amet, consectetur adipiscing elit. */ˇ»
+ "},
+ // desired behavior:
+ // indoc! {"
+ // «/*
+ // * Lorem ipsum dolor sit amet,
+ // * consectetur adipiscing elit.
+ // */
+ // /*
+ // * Lorem ipsum dolor sit amet,
+ // * consectetur adipiscing elit.
+ // */ˇ»
+ // "},
+ // actual behaviour:
+ indoc! {"
+ «/*
+ * Lorem ipsum dolor sit amet,
+ * consectetur adipiscing elit. Lorem
+ * ipsum dolor sit amet, consectetur
+ * adipiscing elit.
+ */ˇ»
+ "},
+ rust_lang.clone(),
&mut cx,
);
- // Test that an empty comment line acts as a paragraph boundary
+ // TODO: same as above, but with delimiters on separate line
+ // assert_rewrap(
+ // indoc! {"
+ // «/* Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+ // */
+ // /*
+ // * Lorem ipsum dolor sit amet, consectetur adipiscing elit. */ˇ»
+ // "},
+ // // desired:
+ // // indoc! {"
+ // // «/*
+ // // * Lorem ipsum dolor sit amet,
+ // // * consectetur adipiscing elit.
+ // // */
+ // // /*
+ // // * Lorem ipsum dolor sit amet,
+ // // * consectetur adipiscing elit.
+ // // */ˇ»
+ // // "},
+ // // actual: (but with trailing w/s on the empty lines)
+ // indoc! {"
+ // «/*
+ // * Lorem ipsum dolor sit amet,
+ // * consectetur adipiscing elit.
+ // *
+ // */
+ // /*
+ // *
+ // * Lorem ipsum dolor sit amet,
+ // * consectetur adipiscing elit.
+ // */ˇ»
+ // "},
+ // rust_lang.clone(),
+ // &mut cx,
+ // );
+
+ // TODO these are unhandled edge cases; not correct, just documenting known issues
assert_rewrap(
indoc! {"
- // ˇThis is a long comment that will be wrapped.
- //
- // And this is another long comment that will also be wrapped.ˇ
- "},
+ /*
+ //ˇ Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+ */
+ /*
+ //ˇ Lorem ipsum dolor sit amet, consectetur adipiscing elit. */
+ /*ˇ Lorem ipsum dolor sit amet */ /* consectetur adipiscing elit. */
+ "},
+ // desired:
+ // indoc! {"
+ // /*
+ // *ˇ Lorem ipsum dolor sit amet,
+ // * consectetur adipiscing elit.
+ // */
+ // /*
+ // *ˇ Lorem ipsum dolor sit amet,
+ // * consectetur adipiscing elit.
+ // */
+ // /*
+ // *ˇ Lorem ipsum dolor sit amet
+ // */ /* consectetur adipiscing elit. */
+ // "},
+ // actual:
indoc! {"
- // ˇThis is a long comment that will be
- // wrapped.
- //
- // And this is another long comment that
- // will also be wrapped.ˇ
- "},
- cpp_language,
+ /*
+ //ˇ Lorem ipsum dolor sit amet,
+ // consectetur adipiscing elit.
+ */
+ /*
+ * //ˇ Lorem ipsum dolor sit amet,
+ * consectetur adipiscing elit.
+ */
+ /*
+ *ˇ Lorem ipsum dolor sit amet */ /*
+ * consectetur adipiscing elit.
+ */
+ "},
+ rust_lang,
&mut cx,
);
@@ -43,10 +43,10 @@ use gpui::{
Bounds, ClickEvent, ClipboardItem, ContentMask, Context, Corner, Corners, CursorStyle,
DispatchPhase, Edges, Element, ElementInputHandler, Entity, Focusable as _, FontId,
GlobalElementId, Hitbox, HitboxBehavior, Hsla, InteractiveElement, IntoElement, IsZero,
- Keystroke, Length, ModifiersChangedEvent, MouseButton, MouseClickEvent, MouseDownEvent,
- MouseMoveEvent, MouseUpEvent, PaintQuad, ParentElement, Pixels, ScrollDelta, ScrollHandle,
- ScrollWheelEvent, ShapedLine, SharedString, Size, StatefulInteractiveElement, Style, Styled,
- TextRun, TextStyleRefinement, WeakEntity, Window, anchored, deferred, div, fill,
+ KeybindingKeystroke, Length, ModifiersChangedEvent, MouseButton, MouseClickEvent,
+ MouseDownEvent, MouseMoveEvent, MouseUpEvent, PaintQuad, ParentElement, Pixels, ScrollDelta,
+ ScrollHandle, ScrollWheelEvent, ShapedLine, SharedString, Size, StatefulInteractiveElement,
+ Style, Styled, TextRun, TextStyleRefinement, WeakEntity, Window, anchored, deferred, div, fill,
linear_color_stop, linear_gradient, outline, point, px, quad, relative, size, solid_background,
transparent_black,
};
@@ -74,6 +74,7 @@ use std::{
fmt::{self, Write},
iter, mem,
ops::{Deref, Range},
+ path::{self, Path},
rc::Rc,
sync::Arc,
time::{Duration, Instant},
@@ -81,6 +82,7 @@ use std::{
use sum_tree::Bias;
use text::{BufferId, SelectionGoal};
use theme::{ActiveTheme, Appearance, BufferLineHeight, PlayerColor};
+use ui::utils::ensure_minimum_contrast;
use ui::{
ButtonLike, ContextMenu, Indicator, KeyBinding, POPOVER_Y_PADDING, Tooltip, h_flex, prelude::*,
right_click_menu,
@@ -89,8 +91,8 @@ use unicode_segmentation::UnicodeSegmentation;
use util::post_inc;
use util::{RangeExt, ResultExt, debug_panic};
use workspace::{
- CollaboratorId, OpenInTerminal, OpenTerminal, RevealInProjectPanel, Workspace, item::Item,
- notifications::NotifyTaskExt,
+ CollaboratorId, ItemSettings, OpenInTerminal, OpenTerminal, RevealInProjectPanel, Workspace,
+ item::Item, notifications::NotifyTaskExt,
};
/// Determines what kinds of highlights should be applied to a lines background.
@@ -115,6 +117,7 @@ struct SelectionLayout {
struct InlineBlameLayout {
element: AnyElement,
bounds: Bounds<Pixels>,
+ buffer_id: BufferId,
entry: BlameEntry,
}
@@ -584,6 +587,9 @@ impl EditorElement {
register_action(editor, window, Editor::edit_log_breakpoint);
register_action(editor, window, Editor::enable_breakpoint);
register_action(editor, window, Editor::disable_breakpoint);
+ if editor.read(cx).enable_wrap_selections_in_tag(cx) {
+ register_action(editor, window, Editor::wrap_selections_in_tag);
+ }
}
fn register_key_listeners(&self, window: &mut Window, _: &mut App, layout: &EditorLayout) {
@@ -724,7 +730,7 @@ impl EditorElement {
ColumnarMode::FromMouse => true,
ColumnarMode::FromSelection => false,
},
- mode: mode,
+ mode,
goal_column: point_for_position.exact_unclipped.column(),
},
window,
@@ -1152,7 +1158,7 @@ impl EditorElement {
cx.notify();
}
- if let Some((bounds, blame_entry)) = &position_map.inline_blame_bounds {
+ if let Some((bounds, buffer_id, blame_entry)) = &position_map.inline_blame_bounds {
let mouse_over_inline_blame = bounds.contains(&event.position);
let mouse_over_popover = editor
.inline_blame_popover
@@ -1165,7 +1171,7 @@ impl EditorElement {
.is_some_and(|state| state.keyboard_grace);
if mouse_over_inline_blame || mouse_over_popover {
- editor.show_blame_popover(blame_entry, event.position, false, cx);
+ editor.show_blame_popover(*buffer_id, blame_entry, event.position, false, cx);
} else if !keyboard_grace {
editor.hide_blame_popover(cx);
}
@@ -2437,20 +2443,19 @@ impl EditorElement {
.unwrap_or_default()
.padding as f32;
- if let Some(edit_prediction) = editor.active_edit_prediction.as_ref() {
- match &edit_prediction.completion {
- EditPrediction::Edit {
- display_mode: EditDisplayMode::TabAccept,
- ..
- } => padding += INLINE_ACCEPT_SUGGESTION_EM_WIDTHS,
- _ => {}
- }
+ if let Some(edit_prediction) = editor.active_edit_prediction.as_ref()
+ && let EditPrediction::Edit {
+ display_mode: EditDisplayMode::TabAccept,
+ ..
+ } = &edit_prediction.completion
+ {
+ padding += INLINE_ACCEPT_SUGGESTION_EM_WIDTHS
}
padding * em_width
};
- let entry = blame
+ let (buffer_id, entry) = blame
.update(cx, |blame, cx| {
blame.blame_for_rows(&[*row_info], cx).next()
})
@@ -2485,13 +2490,22 @@ impl EditorElement {
let size = element.layout_as_root(AvailableSpace::min_size(), window, cx);
let bounds = Bounds::new(absolute_offset, size);
- self.layout_blame_entry_popover(entry.clone(), blame, line_height, text_hitbox, window, cx);
+ self.layout_blame_entry_popover(
+ entry.clone(),
+ blame,
+ line_height,
+ text_hitbox,
+ row_info.buffer_id?,
+ window,
+ cx,
+ );
element.prepaint_as_root(absolute_offset, AvailableSpace::min_size(), window, cx);
Some(InlineBlameLayout {
element,
bounds,
+ buffer_id,
entry,
})
}
@@ -2502,6 +2516,7 @@ impl EditorElement {
blame: Entity<GitBlame>,
line_height: Pixels,
text_hitbox: &Hitbox,
+ buffer: BufferId,
window: &mut Window,
cx: &mut App,
) {
@@ -2526,6 +2541,7 @@ impl EditorElement {
popover_state.markdown,
workspace,
&blame,
+ buffer,
window,
cx,
)
@@ -2600,14 +2616,16 @@ impl EditorElement {
.into_iter()
.enumerate()
.flat_map(|(ix, blame_entry)| {
+ let (buffer_id, blame_entry) = blame_entry?;
let mut element = render_blame_entry(
ix,
&blame,
- blame_entry?,
+ blame_entry,
&self.style,
&mut last_used_color,
self.editor.clone(),
workspace.clone(),
+ buffer_id,
blame_renderer.clone(),
cx,
)?;
@@ -2750,7 +2768,10 @@ impl EditorElement {
let mut block_offset = 0;
let mut found_excerpt_header = false;
for (_, block) in snapshot.blocks_in_range(prev_line..row_range.start) {
- if matches!(block, Block::ExcerptBoundary { .. }) {
+ if matches!(
+ block,
+ Block::ExcerptBoundary { .. } | Block::BufferHeader { .. }
+ ) {
found_excerpt_header = true;
break;
}
@@ -2767,7 +2788,10 @@ impl EditorElement {
let mut block_height = 0;
let mut found_excerpt_header = false;
for (_, block) in snapshot.blocks_in_range(row_range.end..cons_line) {
- if matches!(block, Block::ExcerptBoundary { .. }) {
+ if matches!(
+ block,
+ Block::ExcerptBoundary { .. } | Block::BufferHeader { .. }
+ ) {
found_excerpt_header = true;
}
block_height += block.height();
@@ -2978,8 +3002,8 @@ impl EditorElement {
.ilog10()
+ 1;
- let elements = buffer_rows
- .into_iter()
+ buffer_rows
+ .iter()
.enumerate()
.map(|(ix, row_info)| {
let ExpandInfo {
@@ -3034,9 +3058,7 @@ impl EditorElement {
Some((toggle, origin))
})
- .collect();
-
- elements
+ .collect()
}
fn calculate_relative_line_numbers(
@@ -3136,7 +3158,7 @@ impl EditorElement {
let relative_rows = self.calculate_relative_line_numbers(snapshot, &rows, relative_to);
let mut line_number = String::new();
let line_numbers = buffer_rows
- .into_iter()
+ .iter()
.enumerate()
.flat_map(|(ix, row_info)| {
let display_row = DisplayRow(rows.start.0 + ix as u32);
@@ -3213,7 +3235,7 @@ impl EditorElement {
&& self.editor.read(cx).is_singleton(cx);
if include_fold_statuses {
row_infos
- .into_iter()
+ .iter()
.enumerate()
.map(|(ix, info)| {
if info.expand_info.is_some() {
@@ -3253,12 +3275,165 @@ impl EditorElement {
.collect()
}
+ fn bg_segments_per_row(
+ rows: Range<DisplayRow>,
+ selections: &[(PlayerColor, Vec<SelectionLayout>)],
+ highlight_ranges: &[(Range<DisplayPoint>, Hsla)],
+ base_background: Hsla,
+ ) -> Vec<Vec<(Range<DisplayPoint>, Hsla)>> {
+ if rows.start >= rows.end {
+ return Vec::new();
+ }
+ if !base_background.is_opaque() {
+ // We don't actually know what color is behind this editor.
+ return Vec::new();
+ }
+ let highlight_iter = highlight_ranges.iter().cloned();
+ let selection_iter = selections.iter().flat_map(|(player_color, layouts)| {
+ let color = player_color.selection;
+ layouts.iter().filter_map(move |selection_layout| {
+ if selection_layout.range.start != selection_layout.range.end {
+ Some((selection_layout.range.clone(), color))
+ } else {
+ None
+ }
+ })
+ });
+ let mut per_row_map = vec![Vec::new(); rows.len()];
+ for (range, color) in highlight_iter.chain(selection_iter) {
+ let covered_rows = if range.end.column() == 0 {
+ cmp::max(range.start.row(), rows.start)..cmp::min(range.end.row(), rows.end)
+ } else {
+ cmp::max(range.start.row(), rows.start)
+ ..cmp::min(range.end.row().next_row(), rows.end)
+ };
+ for row in covered_rows.iter_rows() {
+ let seg_start = if row == range.start.row() {
+ range.start
+ } else {
+ DisplayPoint::new(row, 0)
+ };
+ let seg_end = if row == range.end.row() && range.end.column() != 0 {
+ range.end
+ } else {
+ DisplayPoint::new(row, u32::MAX)
+ };
+ let ix = row.minus(rows.start) as usize;
+ debug_assert!(row >= rows.start && row < rows.end);
+ debug_assert!(ix < per_row_map.len());
+ per_row_map[ix].push((seg_start..seg_end, color));
+ }
+ }
+ for row_segments in per_row_map.iter_mut() {
+ if row_segments.is_empty() {
+ continue;
+ }
+ let segments = mem::take(row_segments);
+ let merged = Self::merge_overlapping_ranges(segments, base_background);
+ *row_segments = merged;
+ }
+ per_row_map
+ }
+
+ /// Merge overlapping ranges by splitting at all range boundaries and blending colors where
+ /// multiple ranges overlap. The result contains non-overlapping ranges ordered from left to right.
+ ///
+ /// Expects `start.row() == end.row()` for each range.
+ fn merge_overlapping_ranges(
+ ranges: Vec<(Range<DisplayPoint>, Hsla)>,
+ base_background: Hsla,
+ ) -> Vec<(Range<DisplayPoint>, Hsla)> {
+ struct Boundary {
+ pos: DisplayPoint,
+ is_start: bool,
+ index: usize,
+ color: Hsla,
+ }
+
+ let mut boundaries: SmallVec<[Boundary; 16]> = SmallVec::with_capacity(ranges.len() * 2);
+ for (index, (range, color)) in ranges.iter().enumerate() {
+ debug_assert!(
+ range.start.row() == range.end.row(),
+ "expects single-row ranges"
+ );
+ if range.start < range.end {
+ boundaries.push(Boundary {
+ pos: range.start,
+ is_start: true,
+ index,
+ color: *color,
+ });
+ boundaries.push(Boundary {
+ pos: range.end,
+ is_start: false,
+ index,
+ color: *color,
+ });
+ }
+ }
+
+ if boundaries.is_empty() {
+ return Vec::new();
+ }
+
+ boundaries
+ .sort_unstable_by(|a, b| a.pos.cmp(&b.pos).then_with(|| a.is_start.cmp(&b.is_start)));
+
+ let mut processed_ranges: Vec<(Range<DisplayPoint>, Hsla)> = Vec::new();
+ let mut active_ranges: SmallVec<[(usize, Hsla); 8]> = SmallVec::new();
+
+ let mut i = 0;
+ let mut start_pos = boundaries[0].pos;
+
+ let boundaries_len = boundaries.len();
+ while i < boundaries_len {
+ let current_boundary_pos = boundaries[i].pos;
+ if start_pos < current_boundary_pos {
+ if !active_ranges.is_empty() {
+ let mut color = base_background;
+ for &(_, c) in &active_ranges {
+ color = Hsla::blend(color, c);
+ }
+ if let Some((last_range, last_color)) = processed_ranges.last_mut() {
+ if *last_color == color && last_range.end == start_pos {
+ last_range.end = current_boundary_pos;
+ } else {
+ processed_ranges.push((start_pos..current_boundary_pos, color));
+ }
+ } else {
+ processed_ranges.push((start_pos..current_boundary_pos, color));
+ }
+ }
+ }
+ while i < boundaries_len && boundaries[i].pos == current_boundary_pos {
+ let active_range = &boundaries[i];
+ if active_range.is_start {
+ let idx = active_range.index;
+ let pos = active_ranges
+ .binary_search_by_key(&idx, |(i, _)| *i)
+ .unwrap_or_else(|p| p);
+ active_ranges.insert(pos, (idx, active_range.color));
+ } else {
+ let idx = active_range.index;
+ if let Ok(pos) = active_ranges.binary_search_by_key(&idx, |(i, _)| *i) {
+ active_ranges.remove(pos);
+ }
+ }
+ i += 1;
+ }
+ start_pos = current_boundary_pos;
+ }
+
+ processed_ranges
+ }
+
fn layout_lines(
rows: Range<DisplayRow>,
snapshot: &EditorSnapshot,
style: &EditorStyle,
editor_width: Pixels,
is_row_soft_wrapped: impl Copy + Fn(usize) -> bool,
+ bg_segments_per_row: &[Vec<(Range<DisplayPoint>, Hsla)>],
window: &mut Window,
cx: &mut App,
) -> Vec<LineWithInvisibles> {
@@ -3314,6 +3489,7 @@ impl EditorElement {
&snapshot.mode,
editor_width,
is_row_soft_wrapped,
+ bg_segments_per_row,
window,
cx,
)
@@ -3455,42 +3631,41 @@ impl EditorElement {
.into_any_element()
}
- Block::ExcerptBoundary {
- excerpt,
- height,
- starts_new_buffer,
- ..
- } => {
+ Block::ExcerptBoundary { .. } => {
let color = cx.theme().colors().clone();
let mut result = v_flex().id(block_id).w_full();
+ result = result.child(
+ h_flex().relative().child(
+ div()
+ .top(line_height / 2.)
+ .absolute()
+ .w_full()
+ .h_px()
+ .bg(color.border_variant),
+ ),
+ );
+
+ result.into_any()
+ }
+
+ Block::BufferHeader { excerpt, height } => {
+ let mut result = v_flex().id(block_id).w_full();
+
let jump_data = header_jump_data(snapshot, block_row_start, *height, excerpt);
- if *starts_new_buffer {
- if sticky_header_excerpt_id != Some(excerpt.id) {
- let selected = selected_buffer_ids.contains(&excerpt.buffer_id);
+ if sticky_header_excerpt_id != Some(excerpt.id) {
+ let selected = selected_buffer_ids.contains(&excerpt.buffer_id);
- result = result.child(div().pr(editor_margins.right).child(
- self.render_buffer_header(
- excerpt, false, selected, false, jump_data, window, cx,
- ),
- ));
- } else {
- result =
- result.child(div().h(FILE_HEADER_HEIGHT as f32 * window.line_height()));
- }
- } else {
- result = result.child(
- h_flex().relative().child(
- div()
- .top(line_height / 2.)
- .absolute()
- .w_full()
- .h_px()
- .bg(color.border_variant),
+ result = result.child(div().pr(editor_margins.right).child(
+ self.render_buffer_header(
+ excerpt, false, selected, false, jump_data, window, cx,
),
- );
- };
+ ));
+ } else {
+ result =
+ result.child(div().h(FILE_HEADER_HEIGHT as f32 * window.line_height()));
+ }
result.into_any()
}
@@ -3600,171 +3775,187 @@ impl EditorElement {
let focus_handle = editor.focus_handle(cx);
let colors = cx.theme().colors();
- let header =
- div()
- .p_1()
- .w_full()
- .h(FILE_HEADER_HEIGHT as f32 * window.line_height())
- .child(
- h_flex()
- .size_full()
- .gap_2()
- .flex_basis(Length::Definite(DefiniteLength::Fraction(0.667)))
- .pl_0p5()
- .pr_5()
- .rounded_sm()
- .when(is_sticky, |el| el.shadow_md())
- .border_1()
- .map(|div| {
- let border_color = if is_selected
- && is_folded
- && focus_handle.contains_focused(window, cx)
- {
- colors.border_focused
- } else {
- colors.border
- };
- div.border_color(border_color)
- })
- .bg(colors.editor_subheader_background)
- .hover(|style| style.bg(colors.element_hover))
- .map(|header| {
- let editor = self.editor.clone();
- let buffer_id = for_excerpt.buffer_id;
- let toggle_chevron_icon =
- FileIcons::get_chevron_icon(!is_folded, cx).map(Icon::from_path);
- header.child(
- div()
- .hover(|style| style.bg(colors.element_selected))
- .rounded_xs()
- .child(
- ButtonLike::new("toggle-buffer-fold")
- .style(ui::ButtonStyle::Transparent)
- .height(px(28.).into())
- .width(px(28.))
- .children(toggle_chevron_icon)
- .tooltip({
- let focus_handle = focus_handle.clone();
- move |window, cx| {
- Tooltip::with_meta_in(
- "Toggle Excerpt Fold",
- Some(&ToggleFold),
- "Alt+click to toggle all",
- &focus_handle,
+ let header = div()
+ .p_1()
+ .w_full()
+ .h(FILE_HEADER_HEIGHT as f32 * window.line_height())
+ .child(
+ h_flex()
+ .size_full()
+ .gap_2()
+ .flex_basis(Length::Definite(DefiniteLength::Fraction(0.667)))
+ .pl_0p5()
+ .pr_5()
+ .rounded_sm()
+ .when(is_sticky, |el| el.shadow_md())
+ .border_1()
+ .map(|div| {
+ let border_color = if is_selected
+ && is_folded
+ && focus_handle.contains_focused(window, cx)
+ {
+ colors.border_focused
+ } else {
+ colors.border
+ };
+ div.border_color(border_color)
+ })
+ .bg(colors.editor_subheader_background)
+ .hover(|style| style.bg(colors.element_hover))
+ .map(|header| {
+ let editor = self.editor.clone();
+ let buffer_id = for_excerpt.buffer_id;
+ let toggle_chevron_icon =
+ FileIcons::get_chevron_icon(!is_folded, cx).map(Icon::from_path);
+ header.child(
+ div()
+ .hover(|style| style.bg(colors.element_selected))
+ .rounded_xs()
+ .child(
+ ButtonLike::new("toggle-buffer-fold")
+ .style(ui::ButtonStyle::Transparent)
+ .height(px(28.).into())
+ .width(px(28.))
+ .children(toggle_chevron_icon)
+ .tooltip({
+ let focus_handle = focus_handle.clone();
+ move |window, cx| {
+ Tooltip::with_meta_in(
+ "Toggle Excerpt Fold",
+ Some(&ToggleFold),
+ "Alt+click to toggle all",
+ &focus_handle,
+ window,
+ cx,
+ )
+ }
+ })
+ .on_click(move |event, window, cx| {
+ if event.modifiers().alt {
+ // Alt+click toggles all buffers
+ editor.update(cx, |editor, cx| {
+ editor.toggle_fold_all(
+ &ToggleFoldAll,
window,
cx,
- )
- }
- })
- .on_click(move |event, window, cx| {
- if event.modifiers().alt {
- // Alt+click toggles all buffers
+ );
+ });
+ } else {
+ // Regular click toggles single buffer
+ if is_folded {
editor.update(cx, |editor, cx| {
- editor.toggle_fold_all(
- &ToggleFoldAll,
- window,
- cx,
- );
+ editor.unfold_buffer(buffer_id, cx);
});
} else {
- // Regular click toggles single buffer
- if is_folded {
- editor.update(cx, |editor, cx| {
- editor.unfold_buffer(buffer_id, cx);
- });
- } else {
- editor.update(cx, |editor, cx| {
- editor.fold_buffer(buffer_id, cx);
- });
- }
+ editor.update(cx, |editor, cx| {
+ editor.fold_buffer(buffer_id, cx);
+ });
}
- }),
- ),
- )
- })
- .children(
- editor
- .addons
- .values()
- .filter_map(|addon| {
- addon.render_buffer_header_controls(for_excerpt, window, cx)
- })
- .take(1),
+ }
+ }),
+ ),
)
- .children(indicator)
- .child(
- h_flex()
- .cursor_pointer()
- .id("path header block")
- .size_full()
- .justify_between()
- .overflow_hidden()
- .child(
- h_flex()
- .gap_2()
- .child(
- Label::new(
- filename
- .map(SharedString::from)
- .unwrap_or_else(|| "untitled".into()),
- )
- .single_line()
- .when_some(file_status, |el, status| {
- el.color(if status.is_conflicted() {
- Color::Conflict
- } else if status.is_modified() {
- Color::Modified
- } else if status.is_deleted() {
- Color::Disabled
- } else {
- Color::Created
- })
- .when(status.is_deleted(), |el| el.strikethrough())
- }),
- )
- .when_some(parent_path, |then, path| {
- then.child(div().child(path).text_color(
- if file_status.is_some_and(FileStatus::is_deleted) {
- colors.text_disabled
- } else {
- colors.text_muted
+ })
+ .children(
+ editor
+ .addons
+ .values()
+ .filter_map(|addon| {
+ addon.render_buffer_header_controls(for_excerpt, window, cx)
+ })
+ .take(1),
+ )
+ .child(
+ h_flex()
+ .size(Pixels(12.0))
+ .justify_center()
+ .children(indicator),
+ )
+ .child(
+ h_flex()
+ .cursor_pointer()
+ .id("path header block")
+ .size_full()
+ .justify_between()
+ .overflow_hidden()
+ .child(
+ h_flex()
+ .gap_2()
+ .map(|path_header| {
+ let filename = filename
+ .map(SharedString::from)
+ .unwrap_or_else(|| "untitled".into());
+
+ path_header
+ .when(ItemSettings::get_global(cx).file_icons, |el| {
+ let path = path::Path::new(filename.as_str());
+ let icon = FileIcons::get_icon(path, cx)
+ .unwrap_or_default();
+ let icon =
+ Icon::from_path(icon).color(Color::Muted);
+ el.child(icon)
+ })
+ .child(Label::new(filename).single_line().when_some(
+ file_status,
+ |el, status| {
+ el.color(if status.is_conflicted() {
+ Color::Conflict
+ } else if status.is_modified() {
+ Color::Modified
+ } else if status.is_deleted() {
+ Color::Disabled
+ } else {
+ Color::Created
+ })
+ .when(status.is_deleted(), |el| {
+ el.strikethrough()
+ })
},
))
- }),
- )
- .when(
- can_open_excerpts && is_selected && relative_path.is_some(),
- |el| {
- el.child(
- h_flex()
- .id("jump-to-file-button")
- .gap_2p5()
- .child(Label::new("Jump To File"))
- .children(
- KeyBinding::for_action_in(
- &OpenExcerpts,
- &focus_handle,
- window,
- cx,
- )
- .map(|binding| binding.into_any_element()),
- ),
- )
- },
- )
- .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation())
- .on_click(window.listener_for(&self.editor, {
- move |editor, e: &ClickEvent, window, cx| {
- editor.open_excerpts_common(
- Some(jump_data.clone()),
- e.modifiers().secondary(),
- window,
- cx,
- );
- }
- })),
- ),
- );
+ })
+ .when_some(parent_path, |then, path| {
+ then.child(div().child(path).text_color(
+ if file_status.is_some_and(FileStatus::is_deleted) {
+ colors.text_disabled
+ } else {
+ colors.text_muted
+ },
+ ))
+ }),
+ )
+ .when(
+ can_open_excerpts && is_selected && relative_path.is_some(),
+ |el| {
+ el.child(
+ h_flex()
+ .id("jump-to-file-button")
+ .gap_2p5()
+ .child(Label::new("Jump To File"))
+ .children(
+ KeyBinding::for_action_in(
+ &OpenExcerpts,
+ &focus_handle,
+ window,
+ cx,
+ )
+ .map(|binding| binding.into_any_element()),
+ ),
+ )
+ },
+ )
+ .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation())
+ .on_click(window.listener_for(&self.editor, {
+ move |editor, e: &ClickEvent, window, cx| {
+ editor.open_excerpts_common(
+ Some(jump_data.clone()),
+ e.modifiers().secondary(),
+ window,
+ cx,
+ );
+ }
+ })),
+ ),
+ );
let file = for_excerpt.buffer.file().cloned();
let editor = self.editor.clone();
@@ -3780,25 +3971,31 @@ impl EditorElement {
&& let Some(worktree) =
project.read(cx).worktree_for_id(file.worktree_id(cx), cx)
{
+ let worktree = worktree.read(cx);
let relative_path = file.path();
- let entry_for_path = worktree.read(cx).entry_for_path(relative_path);
- let abs_path = entry_for_path.and_then(|e| e.canonical_path.as_deref());
- let has_relative_path =
- worktree.read(cx).root_entry().is_some_and(Entry::is_dir);
+ let entry_for_path = worktree.entry_for_path(relative_path);
+ let abs_path = entry_for_path.map(|e| {
+ e.canonical_path.as_deref().map_or_else(
+ || worktree.abs_path().join(relative_path),
+ Path::to_path_buf,
+ )
+ });
+ let has_relative_path = worktree.root_entry().is_some_and(Entry::is_dir);
- let parent_abs_path =
- abs_path.and_then(|abs_path| Some(abs_path.parent()?.to_path_buf()));
+ let parent_abs_path = abs_path
+ .as_ref()
+ .and_then(|abs_path| Some(abs_path.parent()?.to_path_buf()));
let relative_path = has_relative_path
.then_some(relative_path)
.map(ToOwned::to_owned);
let visible_in_project_panel =
- relative_path.is_some() && worktree.read(cx).is_visible();
+ relative_path.is_some() && worktree.is_visible();
let reveal_in_project_panel = entry_for_path
.filter(|_| visible_in_project_panel)
.map(|entry| entry.id);
menu = menu
- .when_some(abs_path.map(ToOwned::to_owned), |menu, abs_path| {
+ .when_some(abs_path, |menu, abs_path| {
menu.entry(
"Copy Path",
Some(Box::new(zed_actions::workspace::CopyPath)),
@@ -5711,7 +5908,10 @@ impl EditorElement {
let end_row_in_current_excerpt = snapshot
.blocks_in_range(start_row..end_row)
.find_map(|(start_row, block)| {
- if matches!(block, Block::ExcerptBoundary { .. }) {
+ if matches!(
+ block,
+ Block::ExcerptBoundary { .. } | Block::BufferHeader { .. }
+ ) {
Some(start_row)
} else {
None
@@ -5881,7 +6081,7 @@ impl EditorElement {
};
self.paint_lines_background(layout, window, cx);
- let invisible_display_ranges = self.paint_highlights(layout, window);
+ let invisible_display_ranges = self.paint_highlights(layout, window, cx);
self.paint_document_colors(layout, window);
self.paint_lines(&invisible_display_ranges, layout, window, cx);
self.paint_redactions(layout, window);
@@ -5903,6 +6103,7 @@ impl EditorElement {
&mut self,
layout: &mut EditorLayout,
window: &mut Window,
+ cx: &mut App,
) -> SmallVec<[Range<DisplayPoint>; 32]> {
window.paint_layer(layout.position_map.text_hitbox.bounds, |window| {
let mut invisible_display_ranges = SmallVec::<[Range<DisplayPoint>; 32]>::new();
@@ -5919,7 +6120,11 @@ impl EditorElement {
);
}
- let corner_radius = 0.15 * layout.position_map.line_height;
+ let corner_radius = if EditorSettings::get_global(cx).rounded_selection {
+ 0.15 * layout.position_map.line_height
+ } else {
+ Pixels::ZERO
+ };
for (player_color, selections) in &layout.selections {
for selection in selections.iter() {
@@ -7122,7 +7327,7 @@ fn header_jump_data(
pub struct AcceptEditPredictionBinding(pub(crate) Option<gpui::KeyBinding>);
impl AcceptEditPredictionBinding {
- pub fn keystroke(&self) -> Option<&Keystroke> {
+ pub fn keystroke(&self) -> Option<&KeybindingKeystroke> {
if let Some(binding) = self.0.as_ref() {
match &binding.keystrokes() {
[keystroke, ..] => Some(keystroke),
@@ -7207,12 +7412,13 @@ fn render_blame_entry_popover(
markdown: Entity<Markdown>,
workspace: WeakEntity<Workspace>,
blame: &Entity<GitBlame>,
+ buffer: BufferId,
window: &mut Window,
cx: &mut App,
) -> Option<AnyElement> {
let renderer = cx.global::<GlobalBlameRenderer>().0.clone();
let blame = blame.read(cx);
- let repository = blame.repository(cx)?.clone();
+ let repository = blame.repository(cx, buffer)?;
renderer.render_blame_entry_popover(
blame_entry,
scroll_handle,
@@ -7233,6 +7439,7 @@ fn render_blame_entry(
last_used_color: &mut Option<(PlayerColor, Oid)>,
editor: Entity<Editor>,
workspace: Entity<Workspace>,
+ buffer: BufferId,
renderer: Arc<dyn BlameRenderer>,
cx: &mut App,
) -> Option<AnyElement> {
@@ -7253,8 +7460,8 @@ fn render_blame_entry(
last_used_color.replace((sha_color, blame_entry.sha));
let blame = blame.read(cx);
- let details = blame.details_for_entry(&blame_entry);
- let repository = blame.repository(cx)?;
+ let details = blame.details_for_entry(buffer, &blame_entry);
+ let repository = blame.repository(cx, buffer)?;
renderer.render_blame_entry(
&style.text,
blame_entry,
@@ -7309,6 +7516,7 @@ impl LineWithInvisibles {
editor_mode: &EditorMode,
text_width: Pixels,
is_row_soft_wrapped: impl Copy + Fn(usize) -> bool,
+ bg_segments_per_row: &[Vec<(Range<DisplayPoint>, Hsla)>],
window: &mut Window,
cx: &mut App,
) -> Vec<Self> {
@@ -7324,6 +7532,7 @@ impl LineWithInvisibles {
let mut row = 0;
let mut line_exceeded_max_len = false;
let font_size = text_style.font_size.to_pixels(window.rem_size());
+ let min_contrast = EditorSettings::get_global(cx).minimum_contrast_for_highlights;
let ellipsis = SharedString::from("⋯");
@@ -7336,10 +7545,16 @@ impl LineWithInvisibles {
}]) {
if let Some(replacement) = highlighted_chunk.replacement {
if !line.is_empty() {
+ let segments = bg_segments_per_row.get(row).map(|v| &v[..]).unwrap_or(&[]);
+ let text_runs: &[TextRun] = if segments.is_empty() {
+ &styles
+ } else {
+ &Self::split_runs_by_bg_segments(&styles, segments, min_contrast)
+ };
let shaped_line = window.text_system().shape_line(
line.clone().into(),
font_size,
- &styles,
+ text_runs,
None,
);
width += shaped_line.width;
@@ -7417,10 +7632,16 @@ impl LineWithInvisibles {
} else {
for (ix, mut line_chunk) in highlighted_chunk.text.split('\n').enumerate() {
if ix > 0 {
+ let segments = bg_segments_per_row.get(row).map(|v| &v[..]).unwrap_or(&[]);
+ let text_runs = if segments.is_empty() {
+ &styles
+ } else {
+ &Self::split_runs_by_bg_segments(&styles, segments, min_contrast)
+ };
let shaped_line = window.text_system().shape_line(
line.clone().into(),
font_size,
- &styles,
+ text_runs,
None,
);
width += shaped_line.width;
@@ -10,16 +10,18 @@ use gpui::{
AnyElement, App, AppContext as _, Context, Entity, Hsla, ScrollHandle, Subscription, Task,
TextStyle, WeakEntity, Window,
};
-use language::{Bias, Buffer, BufferSnapshot, Edit};
+use itertools::Itertools;
+use language::{Bias, BufferSnapshot, Edit};
use markdown::Markdown;
-use multi_buffer::RowInfo;
+use multi_buffer::{MultiBuffer, RowInfo};
use project::{
- Project, ProjectItem,
+ Project, ProjectItem as _,
git_store::{GitStoreEvent, Repository, RepositoryEvent},
};
use smallvec::SmallVec;
use std::{sync::Arc, time::Duration};
use sum_tree::SumTree;
+use text::BufferId;
use workspace::Workspace;
#[derive(Clone, Debug, Default)]
@@ -63,16 +65,19 @@ impl<'a> sum_tree::Dimension<'a, GitBlameEntrySummary> for u32 {
}
}
-pub struct GitBlame {
- project: Entity<Project>,
- buffer: Entity<Buffer>,
+struct GitBlameBuffer {
entries: SumTree<GitBlameEntry>,
- commit_details: HashMap<Oid, ParsedCommitMessage>,
buffer_snapshot: BufferSnapshot,
buffer_edits: text::Subscription,
+ commit_details: HashMap<Oid, ParsedCommitMessage>,
+}
+
+pub struct GitBlame {
+ project: Entity<Project>,
+ multi_buffer: WeakEntity<MultiBuffer>,
+ buffers: HashMap<BufferId, GitBlameBuffer>,
task: Task<Result<()>>,
focused: bool,
- generated: bool,
changed_while_blurred: bool,
user_triggered: bool,
regenerate_on_edit_task: Task<Result<()>>,
@@ -184,47 +189,46 @@ impl gpui::Global for GlobalBlameRenderer {}
impl GitBlame {
pub fn new(
- buffer: Entity<Buffer>,
+ multi_buffer: Entity<MultiBuffer>,
project: Entity<Project>,
user_triggered: bool,
focused: bool,
cx: &mut Context<Self>,
) -> Self {
- let entries = SumTree::from_item(
- GitBlameEntry {
- rows: buffer.read(cx).max_point().row + 1,
- blame: None,
+ let multi_buffer_subscription = cx.subscribe(
+ &multi_buffer,
+ |git_blame, multi_buffer, event, cx| match event {
+ multi_buffer::Event::DirtyChanged => {
+ if !multi_buffer.read(cx).is_dirty(cx) {
+ git_blame.generate(cx);
+ }
+ }
+ multi_buffer::Event::ExcerptsAdded { .. }
+ | multi_buffer::Event::ExcerptsEdited { .. } => git_blame.regenerate_on_edit(cx),
+ _ => {}
},
- &(),
);
- let buffer_subscriptions = cx.subscribe(&buffer, |this, buffer, event, cx| match event {
- language::BufferEvent::DirtyChanged => {
- if !buffer.read(cx).is_dirty() {
- this.generate(cx);
- }
- }
- language::BufferEvent::Edited => {
- this.regenerate_on_edit(cx);
- }
- _ => {}
- });
-
let project_subscription = cx.subscribe(&project, {
- let buffer = buffer.clone();
-
- move |this, _, event, cx| match event {
- project::Event::WorktreeUpdatedEntries(_, updated) => {
- let project_entry_id = buffer.read(cx).entry_id(cx);
+ let multi_buffer = multi_buffer.downgrade();
+
+ move |git_blame, _, event, cx| {
+ if let project::Event::WorktreeUpdatedEntries(_, updated) = event {
+ let Some(multi_buffer) = multi_buffer.upgrade() else {
+ return;
+ };
+ let project_entry_id = multi_buffer
+ .read(cx)
+ .as_singleton()
+ .and_then(|it| it.read(cx).entry_id(cx));
if updated
.iter()
.any(|(_, entry_id, _)| project_entry_id == Some(*entry_id))
{
log::debug!("Updated buffers. Regenerating blame data...",);
- this.generate(cx);
+ git_blame.generate(cx);
}
}
- _ => {}
}
});
@@ -240,24 +244,17 @@ impl GitBlame {
_ => {}
});
- let buffer_snapshot = buffer.read(cx).snapshot();
- let buffer_edits = buffer.update(cx, |buffer, _| buffer.subscribe());
-
let mut this = Self {
project,
- buffer,
- buffer_snapshot,
- entries,
- buffer_edits,
+ multi_buffer: multi_buffer.downgrade(),
+ buffers: HashMap::default(),
user_triggered,
focused,
changed_while_blurred: false,
- commit_details: HashMap::default(),
task: Task::ready(Ok(())),
- generated: false,
regenerate_on_edit_task: Task::ready(Ok(())),
_regenerate_subscriptions: vec![
- buffer_subscriptions,
+ multi_buffer_subscription,
project_subscription,
git_store_subscription,
],
@@ -266,56 +263,63 @@ impl GitBlame {
this
}
- pub fn repository(&self, cx: &App) -> Option<Entity<Repository>> {
+ pub fn repository(&self, cx: &App, id: BufferId) -> Option<Entity<Repository>> {
self.project
.read(cx)
.git_store()
.read(cx)
- .repository_and_path_for_buffer_id(self.buffer.read(cx).remote_id(), cx)
+ .repository_and_path_for_buffer_id(id, cx)
.map(|(repo, _)| repo)
}
pub fn has_generated_entries(&self) -> bool {
- self.generated
+ !self.buffers.is_empty()
}
- pub fn details_for_entry(&self, entry: &BlameEntry) -> Option<ParsedCommitMessage> {
- self.commit_details.get(&entry.sha).cloned()
+ pub fn details_for_entry(
+ &self,
+ buffer: BufferId,
+ entry: &BlameEntry,
+ ) -> Option<ParsedCommitMessage> {
+ self.buffers
+ .get(&buffer)?
+ .commit_details
+ .get(&entry.sha)
+ .cloned()
}
pub fn blame_for_rows<'a>(
&'a mut self,
rows: &'a [RowInfo],
- cx: &App,
- ) -> impl 'a + Iterator<Item = Option<BlameEntry>> {
- self.sync(cx);
-
- let buffer_id = self.buffer_snapshot.remote_id();
- let mut cursor = self.entries.cursor::<u32>(&());
- rows.into_iter().map(move |info| {
- let row = info
- .buffer_row
- .filter(|_| info.buffer_id == Some(buffer_id))?;
- cursor.seek_forward(&row, Bias::Right);
- cursor.item()?.blame.clone()
+ cx: &'a mut App,
+ ) -> impl Iterator<Item = Option<(BufferId, BlameEntry)>> + use<'a> {
+ rows.iter().map(move |info| {
+ let buffer_id = info.buffer_id?;
+ self.sync(cx, buffer_id);
+
+ let buffer_row = info.buffer_row?;
+ let mut cursor = self.buffers.get(&buffer_id)?.entries.cursor::<u32>(&());
+ cursor.seek_forward(&buffer_row, Bias::Right);
+ Some((buffer_id, cursor.item()?.blame.clone()?))
})
}
- pub fn max_author_length(&mut self, cx: &App) -> usize {
- self.sync(cx);
-
+ pub fn max_author_length(&mut self, cx: &mut App) -> usize {
let mut max_author_length = 0;
-
- for entry in self.entries.iter() {
- let author_len = entry
- .blame
- .as_ref()
- .and_then(|entry| entry.author.as_ref())
- .map(|author| author.len());
- if let Some(author_len) = author_len
- && author_len > max_author_length
- {
- max_author_length = author_len;
+ self.sync_all(cx);
+
+ for buffer in self.buffers.values() {
+ for entry in buffer.entries.iter() {
+ let author_len = entry
+ .blame
+ .as_ref()
+ .and_then(|entry| entry.author.as_ref())
+ .map(|author| author.len());
+ if let Some(author_len) = author_len
+ && author_len > max_author_length
+ {
+ max_author_length = author_len;
+ }
}
}
@@ -337,22 +341,48 @@ impl GitBlame {
}
}
- fn sync(&mut self, cx: &App) {
- let edits = self.buffer_edits.consume();
- let new_snapshot = self.buffer.read(cx).snapshot();
+ fn sync_all(&mut self, cx: &mut App) {
+ let Some(multi_buffer) = self.multi_buffer.upgrade() else {
+ return;
+ };
+ multi_buffer
+ .read(cx)
+ .excerpt_buffer_ids()
+ .into_iter()
+ .for_each(|id| self.sync(cx, id));
+ }
+
+ fn sync(&mut self, cx: &mut App, buffer_id: BufferId) {
+ let Some(blame_buffer) = self.buffers.get_mut(&buffer_id) else {
+ return;
+ };
+ let Some(buffer) = self
+ .multi_buffer
+ .upgrade()
+ .and_then(|multi_buffer| multi_buffer.read(cx).buffer(buffer_id))
+ else {
+ return;
+ };
+ let edits = blame_buffer.buffer_edits.consume();
+ let new_snapshot = buffer.read(cx).snapshot();
let mut row_edits = edits
.into_iter()
.map(|edit| {
- let old_point_range = self.buffer_snapshot.offset_to_point(edit.old.start)
- ..self.buffer_snapshot.offset_to_point(edit.old.end);
+ let old_point_range = blame_buffer.buffer_snapshot.offset_to_point(edit.old.start)
+ ..blame_buffer.buffer_snapshot.offset_to_point(edit.old.end);
let new_point_range = new_snapshot.offset_to_point(edit.new.start)
..new_snapshot.offset_to_point(edit.new.end);
if old_point_range.start.column
- == self.buffer_snapshot.line_len(old_point_range.start.row)
+ == blame_buffer
+ .buffer_snapshot
+ .line_len(old_point_range.start.row)
&& (new_snapshot.chars_at(edit.new.start).next() == Some('\n')
- || self.buffer_snapshot.line_len(old_point_range.end.row) == 0)
+ || blame_buffer
+ .buffer_snapshot
+ .line_len(old_point_range.end.row)
+ == 0)
{
Edit {
old: old_point_range.start.row + 1..old_point_range.end.row + 1,
@@ -376,7 +406,7 @@ impl GitBlame {
.peekable();
let mut new_entries = SumTree::default();
- let mut cursor = self.entries.cursor::<u32>(&());
+ let mut cursor = blame_buffer.entries.cursor::<u32>(&());
while let Some(mut edit) = row_edits.next() {
while let Some(next_edit) = row_edits.peek() {
@@ -434,17 +464,28 @@ impl GitBlame {
new_entries.append(cursor.suffix(), &());
drop(cursor);
- self.buffer_snapshot = new_snapshot;
- self.entries = new_entries;
+ blame_buffer.buffer_snapshot = new_snapshot;
+ blame_buffer.entries = new_entries;
}
#[cfg(test)]
fn check_invariants(&mut self, cx: &mut Context<Self>) {
- self.sync(cx);
- assert_eq!(
- self.entries.summary().rows,
- self.buffer.read(cx).max_point().row + 1
- );
+ self.sync_all(cx);
+ for (&id, buffer) in &self.buffers {
+ assert_eq!(
+ buffer.entries.summary().rows,
+ self.multi_buffer
+ .upgrade()
+ .unwrap()
+ .read(cx)
+ .buffer(id)
+ .unwrap()
+ .read(cx)
+ .max_point()
+ .row
+ + 1
+ );
+ }
}
fn generate(&mut self, cx: &mut Context<Self>) {
@@ -452,62 +493,105 @@ impl GitBlame {
self.changed_while_blurred = true;
return;
}
- let buffer_edits = self.buffer.update(cx, |buffer, _| buffer.subscribe());
- let snapshot = self.buffer.read(cx).snapshot();
let blame = self.project.update(cx, |project, cx| {
- project.blame_buffer(&self.buffer, None, cx)
+ let Some(multi_buffer) = self.multi_buffer.upgrade() else {
+ return Vec::new();
+ };
+ multi_buffer
+ .read(cx)
+ .all_buffer_ids()
+ .into_iter()
+ .filter_map(|id| {
+ let buffer = multi_buffer.read(cx).buffer(id)?;
+ let snapshot = buffer.read(cx).snapshot();
+ let buffer_edits = buffer.update(cx, |buffer, _| buffer.subscribe());
+
+ let blame_buffer = project.blame_buffer(&buffer, None, cx);
+ Some((id, snapshot, buffer_edits, blame_buffer))
+ })
+ .collect::<Vec<_>>()
});
let provider_registry = GitHostingProviderRegistry::default_global(cx);
self.task = cx.spawn(async move |this, cx| {
- let result = cx
+ let (result, errors) = cx
.background_spawn({
- let snapshot = snapshot.clone();
async move {
- let Some(Blame {
- entries,
- messages,
- remote_url,
- }) = blame.await?
- else {
- return Ok(None);
- };
-
- let entries = build_blame_entry_sum_tree(entries, snapshot.max_point().row);
- let commit_details =
- parse_commit_messages(messages, remote_url, provider_registry).await;
-
- anyhow::Ok(Some((entries, commit_details)))
+ let mut res = vec![];
+ let mut errors = vec![];
+ for (id, snapshot, buffer_edits, blame) in blame {
+ match blame.await {
+ Ok(Some(Blame {
+ entries,
+ messages,
+ remote_url,
+ })) => {
+ let entries = build_blame_entry_sum_tree(
+ entries,
+ snapshot.max_point().row,
+ );
+ let commit_details = parse_commit_messages(
+ messages,
+ remote_url,
+ provider_registry.clone(),
+ )
+ .await;
+
+ res.push((
+ id,
+ snapshot,
+ buffer_edits,
+ Some(entries),
+ commit_details,
+ ));
+ }
+ Ok(None) => {
+ res.push((id, snapshot, buffer_edits, None, Default::default()))
+ }
+ Err(e) => errors.push(e),
+ }
+ }
+ (res, errors)
}
})
.await;
- this.update(cx, |this, cx| match result {
- Ok(None) => {
- // Nothing to do, e.g. no repository found
+ this.update(cx, |this, cx| {
+ this.buffers.clear();
+ for (id, snapshot, buffer_edits, entries, commit_details) in result {
+ let Some(entries) = entries else {
+ continue;
+ };
+ this.buffers.insert(
+ id,
+ GitBlameBuffer {
+ buffer_edits,
+ buffer_snapshot: snapshot,
+ entries,
+ commit_details,
+ },
+ );
}
- Ok(Some((entries, commit_details))) => {
- this.buffer_edits = buffer_edits;
- this.buffer_snapshot = snapshot;
- this.entries = entries;
- this.commit_details = commit_details;
- this.generated = true;
- cx.notify();
+ cx.notify();
+ if !errors.is_empty() {
+ this.project.update(cx, |_, cx| {
+ if this.user_triggered {
+ log::error!("failed to get git blame data: {errors:?}");
+ let notification = errors
+ .into_iter()
+ .format_with(",", |e, f| f(&format_args!("{:#}", e)))
+ .to_string();
+ cx.emit(project::Event::Toast {
+ notification_id: "git-blame".into(),
+ message: notification,
+ });
+ } else {
+ // If we weren't triggered by a user, we just log errors in the background, instead of sending
+ // notifications.
+ log::debug!("failed to get git blame data: {errors:?}");
+ }
+ })
}
- Err(error) => this.project.update(cx, |_, cx| {
- if this.user_triggered {
- log::error!("failed to get git blame data: {error:?}");
- let notification = format!("{:#}", error).trim().to_string();
- cx.emit(project::Event::Toast {
- notification_id: "git-blame".into(),
- message: notification,
- });
- } else {
- // If we weren't triggered by a user, we just log errors in the background, instead of sending
- // notifications.
- log::debug!("failed to get git blame data: {error:?}");
- }
- }),
})
});
}
@@ -521,7 +605,7 @@ impl GitBlame {
this.update(cx, |this, cx| {
this.generate(cx);
})
- })
+ });
}
}
@@ -660,6 +744,9 @@ mod tests {
)
.collect::<Vec<_>>(),
expected
+ .into_iter()
+ .map(|it| Some((buffer_id, it?)))
+ .collect::<Vec<_>>()
);
}
@@ -706,6 +793,7 @@ mod tests {
})
.await
.unwrap();
+ let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
let blame = cx.new(|cx| GitBlame::new(buffer.clone(), project.clone(), true, true, cx));
@@ -786,6 +874,7 @@ mod tests {
.await
.unwrap();
let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
+ let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
let git_blame = cx.new(|cx| GitBlame::new(buffer.clone(), project, false, true, cx));
@@ -807,14 +896,14 @@ mod tests {
)
.collect::<Vec<_>>(),
vec![
- Some(blame_entry("1b1b1b", 0..1)),
- Some(blame_entry("0d0d0d", 1..2)),
- Some(blame_entry("3a3a3a", 2..3)),
+ Some((buffer_id, blame_entry("1b1b1b", 0..1))),
+ Some((buffer_id, blame_entry("0d0d0d", 1..2))),
+ Some((buffer_id, blame_entry("3a3a3a", 2..3))),
None,
None,
- Some(blame_entry("3a3a3a", 5..6)),
- Some(blame_entry("0d0d0d", 6..7)),
- Some(blame_entry("3a3a3a", 7..8)),
+ Some((buffer_id, blame_entry("3a3a3a", 5..6))),
+ Some((buffer_id, blame_entry("0d0d0d", 6..7))),
+ Some((buffer_id, blame_entry("3a3a3a", 7..8))),
]
);
// Subset of lines
@@ -832,8 +921,8 @@ mod tests {
)
.collect::<Vec<_>>(),
vec![
- Some(blame_entry("0d0d0d", 1..2)),
- Some(blame_entry("3a3a3a", 2..3)),
+ Some((buffer_id, blame_entry("0d0d0d", 1..2))),
+ Some((buffer_id, blame_entry("3a3a3a", 2..3))),
None
]
);
@@ -853,7 +942,7 @@ mod tests {
cx
)
.collect::<Vec<_>>(),
- vec![Some(blame_entry("0d0d0d", 1..2)), None, None]
+ vec![Some((buffer_id, blame_entry("0d0d0d", 1..2))), None, None]
);
});
}
@@ -896,6 +985,7 @@ mod tests {
.await
.unwrap();
let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
+ let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
let git_blame = cx.new(|cx| GitBlame::new(buffer.clone(), project, false, true, cx));
@@ -1017,7 +1107,7 @@ mod tests {
init_test(cx);
let fs = FakeFs::new(cx.executor());
- let buffer_initial_text_len = rng.gen_range(5..15);
+ let buffer_initial_text_len = rng.random_range(5..15);
let mut buffer_initial_text = Rope::from(
RandomCharIter::new(&mut rng)
.take(buffer_initial_text_len)
@@ -1062,13 +1152,14 @@ mod tests {
})
.await
.unwrap();
+ let mbuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx));
- let git_blame = cx.new(|cx| GitBlame::new(buffer.clone(), project, false, true, cx));
+ let git_blame = cx.new(|cx| GitBlame::new(mbuffer.clone(), project, false, true, cx));
cx.executor().run_until_parked();
git_blame.update(cx, |blame, cx| blame.check_invariants(cx));
for _ in 0..operations {
- match rng.gen_range(0..100) {
+ match rng.random_range(0..100) {
0..=19 => {
log::info!("quiescing");
cx.executor().run_until_parked();
@@ -1111,8 +1202,8 @@ mod tests {
let mut blame_entries = Vec::new();
for ix in 0..5 {
if last_row < max_row {
- let row_start = rng.gen_range(last_row..max_row);
- let row_end = rng.gen_range(row_start + 1..cmp::min(row_start + 3, max_row) + 1);
+ let row_start = rng.random_range(last_row..max_row);
+ let row_end = rng.random_range(row_start + 1..cmp::min(row_start + 3, max_row) + 1);
blame_entries.push(blame_entry(&ix.to_string(), row_start..row_end));
last_row = row_end;
} else {
@@ -1,6 +1,7 @@
use crate::{Editor, RangeToAnchorExt};
-use gpui::{Context, Window};
+use gpui::{Context, HighlightStyle, Window};
use language::CursorShape;
+use theme::ActiveTheme;
enum MatchingBracketHighlight {}
@@ -9,7 +10,7 @@ pub fn refresh_matching_bracket_highlights(
window: &mut Window,
cx: &mut Context<Editor>,
) {
- editor.clear_background_highlights::<MatchingBracketHighlight>(cx);
+ editor.clear_highlights::<MatchingBracketHighlight>(cx);
let newest_selection = editor.selections.newest::<usize>(cx);
// Don't highlight brackets if the selection isn't empty
@@ -35,12 +36,19 @@ pub fn refresh_matching_bracket_highlights(
.buffer_snapshot
.innermost_enclosing_bracket_ranges(head..tail, None)
{
- editor.highlight_background::<MatchingBracketHighlight>(
- &[
+ editor.highlight_text::<MatchingBracketHighlight>(
+ vec![
opening_range.to_anchors(&snapshot.buffer_snapshot),
closing_range.to_anchors(&snapshot.buffer_snapshot),
],
- |theme| theme.colors().editor_document_highlight_bracket_background,
+ HighlightStyle {
+ background_color: Some(
+ cx.theme()
+ .colors()
+ .editor_document_highlight_bracket_background,
+ ),
+ ..Default::default()
+ },
cx,
)
}
@@ -104,7 +112,7 @@ mod tests {
another_test(1, 2, 3);
}
"#});
- cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
+ cx.assert_editor_text_highlights::<MatchingBracketHighlight>(indoc! {r#"
pub fn test«(»"Test argument"«)» {
another_test(1, 2, 3);
}
@@ -115,7 +123,7 @@ mod tests {
another_test(1, ˇ2, 3);
}
"#});
- cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
+ cx.assert_editor_text_highlights::<MatchingBracketHighlight>(indoc! {r#"
pub fn test("Test argument") {
another_test«(»1, 2, 3«)»;
}
@@ -126,7 +134,7 @@ mod tests {
anotherˇ_test(1, 2, 3);
}
"#});
- cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
+ cx.assert_editor_text_highlights::<MatchingBracketHighlight>(indoc! {r#"
pub fn test("Test argument") «{»
another_test(1, 2, 3);
«}»
@@ -138,7 +146,7 @@ mod tests {
another_test(1, 2, 3);
}
"#});
- cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
+ cx.assert_editor_text_highlights::<MatchingBracketHighlight>(indoc! {r#"
pub fn test("Test argument") {
another_test(1, 2, 3);
}
@@ -150,8 +158,8 @@ mod tests {
another_test(1, 2, 3);
}
"#});
- cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
- pub fn test("Test argument") {
+ cx.assert_editor_text_highlights::<MatchingBracketHighlight>(indoc! {r#"
+ pub fn test«("Test argument") {
another_test(1, 2, 3);
}
"#});
@@ -188,22 +188,26 @@ impl Editor {
pub fn scroll_hover(
&mut self,
- amount: &ScrollAmount,
+ amount: ScrollAmount,
window: &mut Window,
cx: &mut Context<Self>,
) -> bool {
let selection = self.selections.newest_anchor().head();
let snapshot = self.snapshot(window, cx);
- let Some(popover) = self.hover_state.info_popovers.iter().find(|popover| {
+ if let Some(popover) = self.hover_state.info_popovers.iter().find(|popover| {
popover
.symbol_range
.point_within_range(&TriggerPoint::Text(selection), &snapshot)
- }) else {
- return false;
- };
- popover.scroll(amount, window, cx);
- true
+ }) {
+ popover.scroll(amount, window, cx);
+ true
+ } else if let Some(context_menu) = self.context_menu.borrow_mut().as_mut() {
+ context_menu.scroll_aside(amount, window, cx);
+ true
+ } else {
+ false
+ }
}
fn cmd_click_reveal_task(
@@ -321,7 +325,10 @@ pub fn update_inlay_link_and_hover_points(
if let Some(cached_hint) = inlay_hint_cache.hint_by_id(excerpt_id, hovered_hint.id) {
match cached_hint.resolve_state {
ResolveState::CanResolve(_, _) => {
- if let Some(buffer_id) = previous_valid_anchor.buffer_id {
+ if let Some(buffer_id) = snapshot
+ .buffer_snapshot
+ .buffer_id_for_anchor(previous_valid_anchor)
+ {
inlay_hint_cache.spawn_hint_resolve(
buffer_id,
excerpt_id,
@@ -559,7 +566,7 @@ pub fn show_link_definition(
provider.definitions(&buffer, buffer_position, preferred_kind, cx)
})?;
if let Some(task) = task {
- task.await.ok().map(|definition_result| {
+ task.await.ok().flatten().map(|definition_result| {
(
definition_result.iter().find_map(|link| {
link.origin.as_ref().and_then(|origin| {
@@ -174,11 +174,9 @@ pub fn hover_at_inlay(
let subscription = this
.update(cx, |_, cx| {
- if let Some(parsed_content) = &parsed_content {
- Some(cx.observe(parsed_content, |_, _, cx| cx.notify()))
- } else {
- None
- }
+ parsed_content.as_ref().map(|parsed_content| {
+ cx.observe(parsed_content, |_, _, cx| cx.notify())
+ })
})
.ok()
.flatten();
@@ -430,7 +428,7 @@ fn show_hover(
};
let hovers_response = if let Some(hover_request) = hover_request {
- hover_request.await
+ hover_request.await.unwrap_or_default()
} else {
Vec::new()
};
@@ -450,11 +448,9 @@ fn show_hover(
let scroll_handle = ScrollHandle::new();
let subscription = this
.update(cx, |_, cx| {
- if let Some(parsed_content) = &parsed_content {
- Some(cx.observe(parsed_content, |_, _, cx| cx.notify()))
- } else {
- None
- }
+ parsed_content.as_ref().map(|parsed_content| {
+ cx.observe(parsed_content, |_, _, cx| cx.notify())
+ })
})
.ok()
.flatten();
@@ -502,11 +498,9 @@ fn show_hover(
hover_highlights.push(range.clone());
let subscription = this
.update(cx, |_, cx| {
- if let Some(parsed_content) = &parsed_content {
- Some(cx.observe(parsed_content, |_, _, cx| cx.notify()))
- } else {
- None
- }
+ parsed_content.as_ref().map(|parsed_content| {
+ cx.observe(parsed_content, |_, _, cx| cx.notify())
+ })
})
.ok()
.flatten();
@@ -603,18 +597,15 @@ async fn parse_blocks(
})
.join("\n\n");
- let rendered_block = cx
- .new_window_entity(|_window, cx| {
- Markdown::new(
- combined_text.into(),
- language_registry.cloned(),
- language.map(|language| language.name()),
- cx,
- )
- })
- .ok();
-
- rendered_block
+ cx.new_window_entity(|_window, cx| {
+ Markdown::new(
+ combined_text.into(),
+ language_registry.cloned(),
+ language.map(|language| language.name()),
+ cx,
+ )
+ })
+ .ok()
}
pub fn hover_markdown_style(window: &Window, cx: &App) -> MarkdownStyle {
@@ -626,7 +617,7 @@ pub fn hover_markdown_style(window: &Window, cx: &App) -> MarkdownStyle {
let mut base_text_style = window.text_style();
base_text_style.refine(&TextStyleRefinement {
- font_family: Some(ui_font_family.clone()),
+ font_family: Some(ui_font_family),
font_fallbacks: ui_font_fallbacks,
color: Some(cx.theme().colors().editor_foreground),
..Default::default()
@@ -675,7 +666,7 @@ pub fn diagnostics_markdown_style(window: &Window, cx: &App) -> MarkdownStyle {
let mut base_text_style = window.text_style();
base_text_style.refine(&TextStyleRefinement {
- font_family: Some(ui_font_family.clone()),
+ font_family: Some(ui_font_family),
font_fallbacks: ui_font_fallbacks,
color: Some(cx.theme().colors().editor_foreground),
..Default::default()
@@ -905,7 +896,7 @@ impl InfoPopover {
.into_any_element()
}
- pub fn scroll(&self, amount: &ScrollAmount, window: &mut Window, cx: &mut Context<Editor>) {
+ pub fn scroll(&self, amount: ScrollAmount, window: &mut Window, cx: &mut Context<Editor>) {
let mut current = self.scroll_handle.offset();
current.y -= amount.pixels(
window.line_height(),
@@ -164,8 +164,8 @@ pub fn indent_guides_in_range(
let end_anchor = snapshot.buffer_snapshot.anchor_after(end_offset);
let mut fold_ranges = Vec::<Range<Point>>::new();
- let mut folds = snapshot.folds_in_range(start_offset..end_offset).peekable();
- while let Some(fold) = folds.next() {
+ let folds = snapshot.folds_in_range(start_offset..end_offset).peekable();
+ for fold in folds {
let start = fold.range.start.to_point(&snapshot.buffer_snapshot);
let end = fold.range.end.to_point(&snapshot.buffer_snapshot);
if let Some(last_range) = fold_ranges.last_mut()
@@ -475,10 +475,7 @@ impl InlayHintCache {
let excerpt_cached_hints = excerpt_cached_hints.read();
let mut excerpt_cache = excerpt_cached_hints.ordered_hints.iter().fuse().peekable();
shown_excerpt_hints_to_remove.retain(|(shown_anchor, shown_hint_id)| {
- let Some(buffer) = shown_anchor
- .buffer_id
- .and_then(|buffer_id| multi_buffer.buffer(buffer_id))
- else {
+ let Some(buffer) = multi_buffer.buffer_for_anchor(*shown_anchor, cx) else {
return false;
};
let buffer_snapshot = buffer.read(cx).snapshot();
@@ -1342,7 +1339,7 @@ pub mod tests {
let i = task_lsp_request_count.fetch_add(1, Ordering::Release) + 1;
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path(file_with_hints).unwrap(),
+ lsp::Uri::from_file_path(file_with_hints).unwrap(),
);
Ok(Some(vec![lsp::InlayHint {
position: lsp::Position::new(0, i),
@@ -1452,7 +1449,7 @@ pub mod tests {
async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path(file_with_hints).unwrap(),
+ lsp::Uri::from_file_path(file_with_hints).unwrap(),
);
let current_call_id =
Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::SeqCst);
@@ -1597,7 +1594,7 @@ pub mod tests {
"Rust" => {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path(path!("/a/main.rs"))
+ lsp::Uri::from_file_path(path!("/a/main.rs"))
.unwrap(),
);
rs_lsp_request_count.fetch_add(1, Ordering::Release)
@@ -1606,7 +1603,7 @@ pub mod tests {
"Markdown" => {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path(path!("/a/other.md"))
+ lsp::Uri::from_file_path(path!("/a/other.md"))
.unwrap(),
);
md_lsp_request_count.fetch_add(1, Ordering::Release)
@@ -1792,7 +1789,7 @@ pub mod tests {
async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path(file_with_hints).unwrap(),
+ lsp::Uri::from_file_path(file_with_hints).unwrap(),
);
Ok(Some(vec![
lsp::InlayHint {
@@ -2130,7 +2127,7 @@ pub mod tests {
let i = lsp_request_count.fetch_add(1, Ordering::SeqCst) + 1;
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path(file_with_hints).unwrap(),
+ lsp::Uri::from_file_path(file_with_hints).unwrap(),
);
Ok(Some(vec![lsp::InlayHint {
position: lsp::Position::new(0, i),
@@ -2293,7 +2290,7 @@ pub mod tests {
async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
);
task_lsp_request_ranges.lock().push(params.range);
@@ -2636,11 +2633,11 @@ pub mod tests {
let task_editor_edited = Arc::clone(&closure_editor_edited);
async move {
let hint_text = if params.text_document.uri
- == lsp::Url::from_file_path(path!("/a/main.rs")).unwrap()
+ == lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap()
{
"main hint"
} else if params.text_document.uri
- == lsp::Url::from_file_path(path!("/a/other.rs")).unwrap()
+ == lsp::Uri::from_file_path(path!("/a/other.rs")).unwrap()
{
"other hint"
} else {
@@ -2947,11 +2944,11 @@ pub mod tests {
let task_editor_edited = Arc::clone(&closure_editor_edited);
async move {
let hint_text = if params.text_document.uri
- == lsp::Url::from_file_path(path!("/a/main.rs")).unwrap()
+ == lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap()
{
"main hint"
} else if params.text_document.uri
- == lsp::Url::from_file_path(path!("/a/other.rs")).unwrap()
+ == lsp::Uri::from_file_path(path!("/a/other.rs")).unwrap()
{
"other hint"
} else {
@@ -3119,7 +3116,7 @@ pub mod tests {
async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
);
let query_start = params.range.start;
Ok(Some(vec![lsp::InlayHint {
@@ -3191,7 +3188,7 @@ pub mod tests {
async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path(file_with_hints).unwrap(),
+ lsp::Uri::from_file_path(file_with_hints).unwrap(),
);
let i = lsp_request_count.fetch_add(1, Ordering::SeqCst) + 1;
@@ -3354,7 +3351,7 @@ pub mod tests {
move |params, _| async move {
assert_eq!(
params.text_document.uri,
- lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
);
Ok(Some(
serde_json::from_value(json!([
@@ -42,6 +42,7 @@ use ui::{IconDecorationKind, prelude::*};
use util::{ResultExt, TryFutureExt, paths::PathExt};
use workspace::{
CollaboratorId, ItemId, ItemNavHistory, ToolbarItemLocation, ViewId, Workspace, WorkspaceId,
+ invalid_buffer_view::InvalidBufferView,
item::{FollowableItem, Item, ItemEvent, ProjectItem, SaveOptions},
searchable::{Direction, SearchEvent, SearchableItem, SearchableItemHandle},
};
@@ -103,9 +104,9 @@ impl FollowableItem for Editor {
multibuffer = MultiBuffer::new(project.read(cx).capability());
let mut sorted_excerpts = state.excerpts.clone();
sorted_excerpts.sort_by_key(|e| e.id);
- let mut sorted_excerpts = sorted_excerpts.into_iter().peekable();
+ let sorted_excerpts = sorted_excerpts.into_iter().peekable();
- while let Some(excerpt) = sorted_excerpts.next() {
+ for excerpt in sorted_excerpts {
let Ok(buffer_id) = BufferId::new(excerpt.buffer_id) else {
continue;
};
@@ -293,7 +294,7 @@ impl FollowableItem for Editor {
EditorEvent::ExcerptsRemoved { ids, .. } => {
update
.deleted_excerpts
- .extend(ids.iter().map(ExcerptId::to_proto));
+ .extend(ids.iter().copied().map(ExcerptId::to_proto));
true
}
EditorEvent::ScrollPositionChanged { autoscroll, .. } if !autoscroll => {
@@ -650,7 +651,8 @@ impl Item for Editor {
if let Some(path) = path_for_buffer(&self.buffer, detail, true, cx) {
path.to_string_lossy().to_string().into()
} else {
- "untitled".into()
+ // Use the same logic as the displayed title for consistency
+ self.buffer.read(cx).title(cx).to_string().into()
}
}
@@ -774,12 +776,6 @@ impl Item for Editor {
self.nav_history = Some(history);
}
- fn discarded(&self, _project: Entity<Project>, _: &mut Window, cx: &mut Context<Self>) {
- for buffer in self.buffer().clone().read(cx).all_buffers() {
- buffer.update(cx, |buffer, cx| buffer.discarded(cx))
- }
- }
-
fn on_removed(&self, cx: &App) {
self.report_editor_event(ReportEditorEvent::Closed, None, cx);
}
@@ -1009,24 +1005,18 @@ impl Item for Editor {
) {
self.workspace = Some((workspace.weak_handle(), workspace.database_id()));
if let Some(workspace) = &workspace.weak_handle().upgrade() {
- cx.subscribe(
- workspace,
- |editor, _, event: &workspace::Event, _cx| match event {
- workspace::Event::ModalOpened => {
- editor.mouse_context_menu.take();
- editor.inline_blame_popover.take();
- }
- _ => {}
- },
- )
+ cx.subscribe(workspace, |editor, _, event: &workspace::Event, _cx| {
+ if let workspace::Event::ModalOpened = event {
+ editor.mouse_context_menu.take();
+ editor.inline_blame_popover.take();
+ }
+ })
.detach();
}
}
fn to_item_events(event: &EditorEvent, mut f: impl FnMut(ItemEvent)) {
match event {
- EditorEvent::Closed => f(ItemEvent::CloseItem),
-
EditorEvent::Saved | EditorEvent::TitleChanged => {
f(ItemEvent::UpdateTab);
f(ItemEvent::UpdateBreadcrumbs);
@@ -1140,7 +1130,7 @@ impl SerializableItem for Editor {
// First create the empty buffer
let buffer = project
- .update(cx, |project, cx| project.create_buffer(cx))?
+ .update(cx, |project, cx| project.create_buffer(true, cx))?
.await?;
// Then set the text so that the dirty bit is set correctly
@@ -1248,7 +1238,7 @@ impl SerializableItem for Editor {
..
} => window.spawn(cx, async move |cx| {
let buffer = project
- .update(cx, |project, cx| project.create_buffer(cx))?
+ .update(cx, |project, cx| project.create_buffer(true, cx))?
.await?;
cx.update(|window, cx| {
@@ -1405,6 +1395,16 @@ impl ProjectItem for Editor {
editor
}
+
+ fn for_broken_project_item(
+ abs_path: &Path,
+ is_local: bool,
+ e: &anyhow::Error,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Option<InvalidBufferView> {
+ Some(InvalidBufferView::new(abs_path, is_local, e, window, cx))
+ }
}
fn clip_ranges<'a>(
@@ -181,7 +181,7 @@ pub(crate) fn generate_auto_close_edits(
*/
{
let tag_node_name_equals = |node: &Node, name: &str| {
- let is_empty = name.len() == 0;
+ let is_empty = name.is_empty();
if let Some(node_name) = node.named_child(TS_NODE_TAG_NAME_CHILD_INDEX) {
let range = node_name.byte_range();
return buffer.text_for_range(range).equals_str(name);
@@ -207,7 +207,7 @@ pub(crate) fn generate_auto_close_edits(
cur = descendant;
}
- assert!(ancestors.len() > 0);
+ assert!(!ancestors.is_empty());
let mut tree_root_node = open_tag;
@@ -507,7 +507,7 @@ pub(crate) fn handle_from(
{
let selections = this
- .read_with(cx, |this, _| this.selections.disjoint_anchors().clone())
+ .read_with(cx, |this, _| this.selections.disjoint_anchors())
.ok()?;
for selection in selections.iter() {
let Some(selection_buffer_offset_head) =
@@ -808,10 +808,7 @@ mod jsx_tag_autoclose_tests {
);
buf
});
- let buffer_c = cx.new(|cx| {
- let buf = language::Buffer::local("<span", cx);
- buf
- });
+ let buffer_c = cx.new(|cx| language::Buffer::local("<span", cx));
let buffer = cx.new(|cx| {
let mut buf = MultiBuffer::new(language::Capability::ReadWrite);
buf.push_excerpts(
@@ -72,7 +72,7 @@ pub(super) fn refresh_linked_ranges(
// Throw away selections spanning multiple buffers.
continue;
}
- if let Some(buffer) = end_position.buffer_id.and_then(|id| buffer.buffer(id)) {
+ if let Some(buffer) = buffer.buffer_for_anchor(end_position, cx) {
applicable_selections.push((
buffer,
start_position.text_anchor,
@@ -190,14 +190,16 @@ pub fn deploy_context_menu(
.all::<PointUtf16>(cx)
.into_iter()
.any(|s| !s.is_empty());
- let has_git_repo = anchor.buffer_id.is_some_and(|buffer_id| {
- project
- .read(cx)
- .git_store()
- .read(cx)
- .repository_and_path_for_buffer_id(buffer_id, cx)
- .is_some()
- });
+ let has_git_repo = buffer
+ .buffer_id_for_anchor(anchor)
+ .is_some_and(|buffer_id| {
+ project
+ .read(cx)
+ .git_store()
+ .read(cx)
+ .repository_and_path_for_buffer_id(buffer_id, cx)
+ .is_some()
+ });
let evaluate_selection = window.is_action_available(&EvaluateSelectedText, cx);
let run_to_cursor = window.is_action_available(&RunToCursor, cx);
@@ -4,7 +4,7 @@
use super::{Bias, DisplayPoint, DisplaySnapshot, SelectionGoal, ToDisplayPoint};
use crate::{DisplayRow, EditorStyle, ToOffset, ToPoint, scroll::ScrollAnchor};
use gpui::{Pixels, WindowTextSystem};
-use language::Point;
+use language::{CharClassifier, Point};
use multi_buffer::{MultiBufferRow, MultiBufferSnapshot};
use serde::Deserialize;
use workspace::searchable::Direction;
@@ -289,12 +289,114 @@ pub fn previous_word_start_or_newline(map: &DisplaySnapshot, point: DisplayPoint
let classifier = map.buffer_snapshot.char_classifier_at(raw_point);
find_preceding_boundary_display_point(map, point, FindRange::MultiLine, |left, right| {
- (classifier.kind(left) != classifier.kind(right) && !right.is_whitespace())
+ (classifier.kind(left) != classifier.kind(right) && !classifier.is_whitespace(right))
|| left == '\n'
|| right == '\n'
})
}
+/// Text movements are too greedy, making deletions too greedy too.
+/// Makes deletions more ergonomic by potentially reducing the deletion range based on its text contents:
+/// * whitespace sequences with length >= 2 stop the deletion after removal (despite movement jumping over the word behind the whitespaces)
+/// * brackets stop the deletion after removal (despite movement currently not accounting for these and jumping over)
+pub fn adjust_greedy_deletion(
+ map: &DisplaySnapshot,
+ delete_from: DisplayPoint,
+ delete_until: DisplayPoint,
+ ignore_brackets: bool,
+) -> DisplayPoint {
+ if delete_from == delete_until {
+ return delete_until;
+ }
+ let is_backward = delete_from > delete_until;
+ let delete_range = if is_backward {
+ map.display_point_to_point(delete_until, Bias::Left)
+ .to_offset(&map.buffer_snapshot)
+ ..map
+ .display_point_to_point(delete_from, Bias::Right)
+ .to_offset(&map.buffer_snapshot)
+ } else {
+ map.display_point_to_point(delete_from, Bias::Left)
+ .to_offset(&map.buffer_snapshot)
+ ..map
+ .display_point_to_point(delete_until, Bias::Right)
+ .to_offset(&map.buffer_snapshot)
+ };
+
+ let trimmed_delete_range = if ignore_brackets {
+ delete_range
+ } else {
+ let brackets_in_delete_range = map
+ .buffer_snapshot
+ .bracket_ranges(delete_range.clone())
+ .into_iter()
+ .flatten()
+ .flat_map(|(left_bracket, right_bracket)| {
+ [
+ left_bracket.start,
+ left_bracket.end,
+ right_bracket.start,
+ right_bracket.end,
+ ]
+ })
+ .filter(|&bracket| delete_range.start < bracket && bracket < delete_range.end);
+ let closest_bracket = if is_backward {
+ brackets_in_delete_range.max()
+ } else {
+ brackets_in_delete_range.min()
+ };
+
+ if is_backward {
+ closest_bracket.unwrap_or(delete_range.start)..delete_range.end
+ } else {
+ delete_range.start..closest_bracket.unwrap_or(delete_range.end)
+ }
+ };
+
+ let mut whitespace_sequences = Vec::new();
+ let mut current_offset = trimmed_delete_range.start;
+ let mut whitespace_sequence_length = 0;
+ let mut whitespace_sequence_start = 0;
+ for ch in map
+ .buffer_snapshot
+ .text_for_range(trimmed_delete_range.clone())
+ .flat_map(str::chars)
+ {
+ if ch.is_whitespace() {
+ if whitespace_sequence_length == 0 {
+ whitespace_sequence_start = current_offset;
+ }
+ whitespace_sequence_length += 1;
+ } else {
+ if whitespace_sequence_length >= 2 {
+ whitespace_sequences.push((whitespace_sequence_start, current_offset));
+ }
+ whitespace_sequence_start = 0;
+ whitespace_sequence_length = 0;
+ }
+ current_offset += ch.len_utf8();
+ }
+ if whitespace_sequence_length >= 2 {
+ whitespace_sequences.push((whitespace_sequence_start, current_offset));
+ }
+
+ let closest_whitespace_end = if is_backward {
+ whitespace_sequences.last().map(|&(start, _)| start)
+ } else {
+ whitespace_sequences.first().map(|&(_, end)| end)
+ };
+
+ closest_whitespace_end
+ .unwrap_or_else(|| {
+ if is_backward {
+ trimmed_delete_range.start
+ } else {
+ trimmed_delete_range.end
+ }
+ })
+ .to_display_point(map)
+}
+
/// Returns a position of the previous subword boundary, where a subword is defined as a run of
/// word characters of the same "subkind" - where subcharacter kinds are '_' character,
/// lowerspace characters and uppercase characters.
@@ -303,15 +405,18 @@ pub fn previous_subword_start(map: &DisplaySnapshot, point: DisplayPoint) -> Dis
let classifier = map.buffer_snapshot.char_classifier_at(raw_point);
find_preceding_boundary_display_point(map, point, FindRange::MultiLine, |left, right| {
- let is_word_start =
- classifier.kind(left) != classifier.kind(right) && !right.is_whitespace();
- let is_subword_start = classifier.is_word('-') && left == '-' && right != '-'
- || left == '_' && right != '_'
- || left.is_lowercase() && right.is_uppercase();
- is_word_start || is_subword_start || left == '\n'
+ is_subword_start(left, right, &classifier) || left == '\n'
})
}
+pub fn is_subword_start(left: char, right: char, classifier: &CharClassifier) -> bool {
+ let is_word_start = classifier.kind(left) != classifier.kind(right) && !right.is_whitespace();
+ let is_subword_start = classifier.is_word('-') && left == '-' && right != '-'
+ || left == '_' && right != '_'
+ || left.is_lowercase() && right.is_uppercase();
+ is_word_start || is_subword_start
+}
+
/// Returns a position of the next word boundary, where a word character is defined as either
/// uppercase letter, lowercase letter, '_' character or language-specific word character (like '-' in CSS).
pub fn next_word_end(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint {
@@ -361,15 +466,19 @@ pub fn next_subword_end(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPo
let classifier = map.buffer_snapshot.char_classifier_at(raw_point);
find_boundary(map, point, FindRange::MultiLine, |left, right| {
- let is_word_end =
- (classifier.kind(left) != classifier.kind(right)) && !classifier.is_whitespace(left);
- let is_subword_end = classifier.is_word('-') && left != '-' && right == '-'
- || left != '_' && right == '_'
- || left.is_lowercase() && right.is_uppercase();
- is_word_end || is_subword_end || right == '\n'
+ is_subword_end(left, right, &classifier) || right == '\n'
})
}
+pub fn is_subword_end(left: char, right: char, classifier: &CharClassifier) -> bool {
+ let is_word_end =
+ (classifier.kind(left) != classifier.kind(right)) && !classifier.is_whitespace(left);
+ let is_subword_end = classifier.is_word('-') && left != '-' && right == '-'
+ || left != '_' && right == '_'
+ || left.is_lowercase() && right.is_uppercase();
+ is_word_end || is_subword_end
+}
+
/// Returns a position of the start of the current paragraph, where a paragraph
/// is defined as a run of non-blank lines.
pub fn start_of_paragraph(
@@ -1,13 +1,17 @@
use anyhow::Result;
-use db::sqlez::bindable::{Bind, Column, StaticColumnCount};
-use db::sqlez::statement::Statement;
+use db::{
+ query,
+ sqlez::{
+ bindable::{Bind, Column, StaticColumnCount},
+ domain::Domain,
+ statement::Statement,
+ },
+ sqlez_macros::sql,
+};
use fs::MTime;
use itertools::Itertools as _;
use std::path::PathBuf;
-use db::sqlez_macros::sql;
-use db::{define_connection, query};
-
use workspace::{ItemId, WorkspaceDb, WorkspaceId};
#[derive(Clone, Debug, PartialEq, Default)]
@@ -83,7 +87,11 @@ impl Column for SerializedEditor {
}
}
-define_connection!(
+pub struct EditorDb(db::sqlez::thread_safe_connection::ThreadSafeConnection);
+
+impl Domain for EditorDb {
+ const NAME: &str = stringify!(EditorDb);
+
// Current schema shape using pseudo-rust syntax:
// editors(
// item_id: usize,
@@ -113,7 +121,8 @@ define_connection!(
// start: usize,
// end: usize,
// )
- pub static ref DB: EditorDb<WorkspaceDb> = &[
+
+ const MIGRATIONS: &[&str] = &[
sql! (
CREATE TABLE editors(
item_id INTEGER NOT NULL,
@@ -189,7 +198,9 @@ define_connection!(
) STRICT;
),
];
-);
+}
+
+db::static_connection!(DB, EditorDb, [WorkspaceDb]);
// https://www.sqlite.org/limits.html
// > <..> the maximum value of a host parameter number is SQLITE_MAX_VARIABLE_NUMBER,
@@ -241,24 +241,13 @@ impl ProposedChangesEditor {
event: &BufferEvent,
_cx: &mut Context<Self>,
) {
- match event {
- BufferEvent::Operation { .. } => {
- self.recalculate_diffs_tx
- .unbounded_send(RecalculateDiff {
- buffer,
- debounce: true,
- })
- .ok();
- }
- // BufferEvent::DiffBaseChanged => {
- // self.recalculate_diffs_tx
- // .unbounded_send(RecalculateDiff {
- // buffer,
- // debounce: false,
- // })
- // .ok();
- // }
- _ => (),
+ if let BufferEvent::Operation { .. } = event {
+ self.recalculate_diffs_tx
+ .unbounded_send(RecalculateDiff {
+ buffer,
+ debounce: true,
+ })
+ .ok();
}
}
}
@@ -442,7 +431,7 @@ impl SemanticsProvider for BranchBufferSemanticsProvider {
buffer: &Entity<Buffer>,
position: text::Anchor,
cx: &mut App,
- ) -> Option<Task<Vec<project::Hover>>> {
+ ) -> Option<Task<Option<Vec<project::Hover>>>> {
let buffer = self.to_base(buffer, &[position], cx)?;
self.0.hover(&buffer, position, cx)
}
@@ -501,7 +490,7 @@ impl SemanticsProvider for BranchBufferSemanticsProvider {
position: text::Anchor,
kind: crate::GotoDefinitionKind,
cx: &mut App,
- ) -> Option<Task<anyhow::Result<Vec<project::LocationLink>>>> {
+ ) -> Option<Task<anyhow::Result<Option<Vec<project::LocationLink>>>>> {
let buffer = self.to_base(buffer, &[position], cx)?;
self.0.definitions(&buffer, position, kind, cx)
}
@@ -26,6 +26,17 @@ fn is_rust_language(language: &Language) -> bool {
}
pub fn apply_related_actions(editor: &Entity<Editor>, window: &mut Window, cx: &mut App) {
+ if editor.read(cx).project().is_some_and(|project| {
+ project
+ .read(cx)
+ .language_server_statuses(cx)
+ .any(|(_, status)| status.name == RUST_ANALYZER_NAME)
+ }) {
+ register_action(editor, window, cancel_flycheck_action);
+ register_action(editor, window, run_flycheck_action);
+ register_action(editor, window, clear_flycheck_action);
+ }
+
if editor
.read(cx)
.buffer()
@@ -38,9 +49,6 @@ pub fn apply_related_actions(editor: &Entity<Editor>, window: &mut Window, cx: &
register_action(editor, window, go_to_parent_module);
register_action(editor, window, expand_macro_recursively);
register_action(editor, window, open_docs);
- register_action(editor, window, cancel_flycheck_action);
- register_action(editor, window, run_flycheck_action);
- register_action(editor, window, clear_flycheck_action);
}
}
@@ -192,7 +200,7 @@ pub fn expand_macro_recursively(
}
let buffer = project
- .update(cx, |project, cx| project.create_buffer(cx))?
+ .update(cx, |project, cx| project.create_buffer(false, cx))?
.await?;
workspace.update_in(cx, |workspace, window, cx| {
buffer.update(cx, |buffer, cx| {
@@ -309,7 +317,7 @@ fn cancel_flycheck_action(
let Some(project) = &editor.project else {
return;
};
- let Some(buffer_id) = editor
+ let buffer_id = editor
.selections
.disjoint_anchors()
.iter()
@@ -321,10 +329,7 @@ fn cancel_flycheck_action(
.read(cx)
.entry_id(cx)?;
project.path_for_entry(entry_id, cx)
- })
- else {
- return;
- };
+ });
cancel_flycheck(project.clone(), buffer_id, cx).detach_and_log_err(cx);
}
@@ -337,7 +342,7 @@ fn run_flycheck_action(
let Some(project) = &editor.project else {
return;
};
- let Some(buffer_id) = editor
+ let buffer_id = editor
.selections
.disjoint_anchors()
.iter()
@@ -349,10 +354,7 @@ fn run_flycheck_action(
.read(cx)
.entry_id(cx)?;
project.path_for_entry(entry_id, cx)
- })
- else {
- return;
- };
+ });
run_flycheck(project.clone(), buffer_id, cx).detach_and_log_err(cx);
}
@@ -365,7 +367,7 @@ fn clear_flycheck_action(
let Some(project) = &editor.project else {
return;
};
- let Some(buffer_id) = editor
+ let buffer_id = editor
.selections
.disjoint_anchors()
.iter()
@@ -377,9 +379,6 @@ fn clear_flycheck_action(
.read(cx)
.entry_id(cx)?;
project.path_for_entry(entry_id, cx)
- })
- else {
- return;
- };
+ });
clear_flycheck(project.clone(), buffer_id, cx).detach_and_log_err(cx);
}
@@ -675,7 +675,7 @@ impl Editor {
window: &mut Window,
cx: &mut Context<Self>,
) {
- if matches!(self.mode, EditorMode::SingleLine { .. }) {
+ if matches!(self.mode, EditorMode::SingleLine) {
cx.propagate();
return;
}
@@ -16,7 +16,7 @@ impl Editor {
return;
}
- if matches!(self.mode, EditorMode::SingleLine { .. }) {
+ if matches!(self.mode, EditorMode::SingleLine) {
cx.propagate();
return;
}
@@ -15,7 +15,7 @@ impl ScrollDirection {
}
}
-#[derive(Debug, Clone, PartialEq, Deserialize)]
+#[derive(Debug, Clone, Copy, PartialEq, Deserialize)]
pub enum ScrollAmount {
// Scroll N lines (positive is towards the end of the document)
Line(f32),
@@ -67,10 +67,7 @@ impl ScrollAmount {
}
pub fn is_full_page(&self) -> bool {
- match self {
- ScrollAmount::Page(count) if count.abs() == 1.0 => true,
- _ => false,
- }
+ matches!(self, ScrollAmount::Page(count) if count.abs() == 1.0)
}
pub fn direction(&self) -> ScrollDirection {
@@ -119,8 +119,8 @@ impl SelectionsCollection {
cx: &mut App,
) -> Option<Selection<D>> {
let map = self.display_map(cx);
- let selection = resolve_selections(self.pending_anchor().as_ref(), &map).next();
- selection
+
+ resolve_selections(self.pending_anchor().as_ref(), &map).next()
}
pub(crate) fn pending_mode(&self) -> Option<SelectMode> {
@@ -276,18 +276,18 @@ impl SelectionsCollection {
cx: &mut App,
) -> Selection<D> {
let map = self.display_map(cx);
- let selection = resolve_selections([self.newest_anchor()], &map)
+
+ resolve_selections([self.newest_anchor()], &map)
.next()
- .unwrap();
- selection
+ .unwrap()
}
pub fn newest_display(&self, cx: &mut App) -> Selection<DisplayPoint> {
let map = self.display_map(cx);
- let selection = resolve_selections_display([self.newest_anchor()], &map)
+
+ resolve_selections_display([self.newest_anchor()], &map)
.next()
- .unwrap();
- selection
+ .unwrap()
}
pub fn oldest_anchor(&self) -> &Selection<Anchor> {
@@ -303,10 +303,10 @@ impl SelectionsCollection {
cx: &mut App,
) -> Selection<D> {
let map = self.display_map(cx);
- let selection = resolve_selections([self.oldest_anchor()], &map)
+
+ resolve_selections([self.oldest_anchor()], &map)
.next()
- .unwrap();
- selection
+ .unwrap()
}
pub fn first_anchor(&self) -> Selection<Anchor> {
@@ -182,7 +182,9 @@ impl Editor {
let signature_help = task.await;
editor
.update(cx, |editor, cx| {
- let Some(mut signature_help) = signature_help.into_iter().next() else {
+ let Some(mut signature_help) =
+ signature_help.unwrap_or_default().into_iter().next()
+ else {
editor
.signature_help_state
.hide(SignatureHelpHiddenBy::AutoClose);
@@ -230,26 +230,23 @@ pub fn editor_content_with_blocks(editor: &Entity<Editor>, cx: &mut VisualTestCo
lines[row as usize].push_str("§ -----");
}
}
- Block::ExcerptBoundary {
- excerpt,
- height,
- starts_new_buffer,
- } => {
- if starts_new_buffer {
- lines[row.0 as usize].push_str(&cx.update(|_, cx| {
- format!(
- "§ {}",
- excerpt
- .buffer
- .file()
- .unwrap()
- .file_name(cx)
- .to_string_lossy()
- )
- }));
- } else {
- lines[row.0 as usize].push_str("§ -----")
+ Block::ExcerptBoundary { height, .. } => {
+ for row in row.0..row.0 + height {
+ lines[row as usize].push_str("§ -----");
}
+ }
+ Block::BufferHeader { excerpt, height } => {
+ lines[row.0 as usize].push_str(&cx.update(|_, cx| {
+ format!(
+ "§ {}",
+ excerpt
+ .buffer
+ .file()
+ .unwrap()
+ .file_name(cx)
+ .to_string_lossy()
+ )
+ }));
for row in row.0 + 1..row.0 + height {
lines[row as usize].push_str("§ -----");
}
@@ -29,7 +29,7 @@ pub struct EditorLspTestContext {
pub cx: EditorTestContext,
pub lsp: lsp::FakeLanguageServer,
pub workspace: Entity<Workspace>,
- pub buffer_lsp_url: lsp::Url,
+ pub buffer_lsp_url: lsp::Uri,
}
pub(crate) fn rust_lang() -> Arc<Language> {
@@ -189,7 +189,7 @@ impl EditorLspTestContext {
},
lsp,
workspace,
- buffer_lsp_url: lsp::Url::from_file_path(root.join("dir").join(file_name)).unwrap(),
+ buffer_lsp_url: lsp::Uri::from_file_path(root.join("dir").join(file_name)).unwrap(),
}
}
@@ -300,6 +300,7 @@ impl EditorLspTestContext {
self.to_lsp_range(ranges[0].clone())
}
+ #[expect(clippy::wrong_self_convention, reason = "This is test code")]
pub fn to_lsp_range(&mut self, range: Range<usize>) -> lsp::Range {
let snapshot = self.update_editor(|editor, window, cx| editor.snapshot(window, cx));
let start_point = range.start.to_point(&snapshot.buffer_snapshot);
@@ -326,6 +327,7 @@ impl EditorLspTestContext {
})
}
+ #[expect(clippy::wrong_self_convention, reason = "This is test code")]
pub fn to_lsp(&mut self, offset: usize) -> lsp::Position {
let snapshot = self.update_editor(|editor, window, cx| editor.snapshot(window, cx));
let point = offset.to_point(&snapshot.buffer_snapshot);
@@ -356,7 +358,7 @@ impl EditorLspTestContext {
where
T: 'static + request::Request,
T::Params: 'static + Send,
- F: 'static + Send + FnMut(lsp::Url, T::Params, gpui::AsyncApp) -> Fut,
+ F: 'static + Send + FnMut(lsp::Uri, T::Params, gpui::AsyncApp) -> Fut,
Fut: 'static + Future<Output = Result<T::Result>>,
{
let url = self.buffer_lsp_url.clone();
@@ -119,13 +119,7 @@ impl EditorTestContext {
for excerpt in excerpts.into_iter() {
let (text, ranges) = marked_text_ranges(excerpt, false);
let buffer = cx.new(|cx| Buffer::local(text, cx));
- multibuffer.push_excerpts(
- buffer,
- ranges
- .into_iter()
- .map(|range| ExcerptRange::new(range.clone())),
- cx,
- );
+ multibuffer.push_excerpts(buffer, ranges.into_iter().map(ExcerptRange::new), cx);
}
multibuffer
});
@@ -426,7 +420,7 @@ impl EditorTestContext {
if expected_text == "[FOLDED]\n" {
assert!(is_folded, "excerpt {} should be folded", ix);
let is_selected = selections.iter().any(|s| s.head().excerpt_id == excerpt_id);
- if expected_selections.len() > 0 {
+ if !expected_selections.is_empty() {
assert!(
is_selected,
"excerpt {ix} should be selected. got {:?}",
@@ -103,7 +103,7 @@ fn main() {
let languages: HashSet<String> = args.languages.into_iter().collect();
let http_client = Arc::new(ReqwestClient::new());
- let app = Application::headless().with_http_client(http_client.clone());
+ let app = Application::headless().with_http_client(http_client);
let all_threads = examples::all(&examples_dir);
app.run(move |cx| {
@@ -416,11 +416,7 @@ pub fn init(cx: &mut App) -> Arc<AgentAppState> {
language::init(cx);
debug_adapter_extension::init(extension_host_proxy.clone(), cx);
- language_extension::init(
- LspAccess::Noop,
- extension_host_proxy.clone(),
- languages.clone(),
- );
+ language_extension::init(LspAccess::Noop, extension_host_proxy, languages.clone());
language_model::init(client.clone(), cx);
language_models::init(user_store.clone(), client.clone(), cx);
languages::init(languages.clone(), node_runtime.clone(), cx);
@@ -530,7 +526,7 @@ async fn judge_example(
example_name = example.name.clone(),
example_repetition = example.repetition,
diff_evaluation = judge_output.diff.clone(),
- thread_evaluation = judge_output.thread.clone(),
+ thread_evaluation = judge_output.thread,
tool_metrics = run_output.tool_metrics,
response_count = run_output.response_count,
token_usage = run_output.token_usage,
@@ -710,7 +706,7 @@ fn print_report(
println!("Average thread score: {average_thread_score}%");
}
- println!("");
+ println!();
print_h2("CUMULATIVE TOOL METRICS");
println!("{}", cumulative_tool_metrics);
@@ -335,7 +335,7 @@ impl ExampleContext {
for message in thread.messages().skip(message_count_before) {
messages.push(Message {
_role: message.role,
- text: message.to_string(),
+ text: message.to_message_content(),
tool_use: thread
.tool_uses_for_message(message.id, cx)
.into_iter()
@@ -90,11 +90,8 @@ impl ExampleInstance {
worktrees_dir: &Path,
repetition: usize,
) -> Self {
- let name = thread.meta().name.to_string();
- let run_directory = run_dir
- .join(&name)
- .join(repetition.to_string())
- .to_path_buf();
+ let name = thread.meta().name;
+ let run_directory = run_dir.join(&name).join(repetition.to_string());
let repo_path = repo_path_for_url(repos_dir, &thread.meta().url);
@@ -678,8 +675,8 @@ pub fn wait_for_lang_server(
[
cx.subscribe(&lsp_store, {
let log_prefix = log_prefix.clone();
- move |_, event, _| match event {
- project::LspStoreEvent::LanguageServerUpdate {
+ move |_, event, _| {
+ if let project::LspStoreEvent::LanguageServerUpdate {
message:
client::proto::update_language_server::Variant::WorkProgress(
LspWorkProgress {
@@ -688,8 +685,10 @@ pub fn wait_for_lang_server(
},
),
..
- } => println!("{}⟲ {message}", log_prefix),
- _ => {}
+ } = event
+ {
+ println!("{}⟲ {message}", log_prefix)
+ }
}
}),
cx.subscribe(project, {
@@ -770,7 +769,7 @@ pub async fn query_lsp_diagnostics(
}
fn parse_assertion_result(response: &str) -> Result<RanAssertionResult> {
- let analysis = get_tag("analysis", response)?.to_string();
+ let analysis = get_tag("analysis", response)?;
let passed = match get_tag("passed", response)?.to_lowercase().as_str() {
"true" => true,
"false" => false,
@@ -914,9 +913,9 @@ impl RequestMarkdown {
for tool in &request.tools {
write!(&mut tools, "# {}\n\n", tool.name).unwrap();
write!(&mut tools, "{}\n\n", tool.description).unwrap();
- write!(
+ writeln!(
&mut tools,
- "{}\n",
+ "{}",
MarkdownCodeBlock {
tag: "json",
text: &format!("{:#}", tool.input_schema)
@@ -1190,7 +1189,7 @@ mod test {
output.analysis,
Some("The model did a good job but there were still compilations errors.".into())
);
- assert_eq!(output.passed, true);
+ assert!(output.passed);
let response = r#"
Text around ignored
@@ -1210,6 +1209,6 @@ mod test {
output.analysis,
Some("Failed to compile:\n- Error 1\n- Error 2".into())
);
- assert_eq!(output.passed, false);
+ assert!(!output.passed);
}
}
@@ -484,14 +484,10 @@ impl ExtensionBuilder {
_ => {}
}
- match &payload {
- CustomSection(c) => {
- if strip_custom_section(c.name()) {
- continue;
- }
- }
-
- _ => {}
+ if let CustomSection(c) = &payload
+ && strip_custom_section(c.name())
+ {
+ continue;
}
if let Some((id, range)) = payload.as_section() {
RawSection {
@@ -232,10 +232,10 @@ pub trait Extension: Send + Sync {
///
/// To work through a real-world example, take a `cargo run` task and a hypothetical `cargo` locator:
/// 1. We may need to modify the task; in this case, it is problematic that `cargo run` spawns a binary. We should turn `cargo run` into a debug scenario with
- /// `cargo build` task. This is the decision we make at `dap_locator_create_scenario` scope.
+ /// `cargo build` task. This is the decision we make at `dap_locator_create_scenario` scope.
/// 2. Then, after the build task finishes, we will run `run_dap_locator` of the locator that produced the build task to find the program to be debugged. This function
- /// should give us a debugger-agnostic configuration for launching a debug target (that we end up resolving with [`Extension::dap_config_to_scenario`]). It's almost as if the user
- /// found the artifact path by themselves.
+ /// should give us a debugger-agnostic configuration for launching a debug target (that we end up resolving with [`Extension::dap_config_to_scenario`]). It's almost as if the user
+ /// found the artifact path by themselves.
///
/// Note that you're not obliged to use build tasks with locators. Specifically, it is sufficient to provide a debug configuration directly in the return value of
/// `dap_locator_create_scenario` if you're able to do that. Make sure to not fill out `build` field in that case, as that will prevent Zed from running second phase of resolution in such case.
@@ -145,7 +145,7 @@ mod tests {
command: "*".to_string(),
args: vec!["**".to_string()],
})],
- manifest.clone(),
+ manifest,
);
assert!(granter.grant_exec("ls", &["-la"]).is_ok());
}
@@ -43,7 +43,7 @@ use language::{
use node_runtime::NodeRuntime;
use project::ContextProviderWithTasks;
use release_channel::ReleaseChannel;
-use remote::SshRemoteClient;
+use remote::{RemoteClient, RemoteConnectionOptions};
use semantic_version::SemanticVersion;
use serde::{Deserialize, Serialize};
use settings::Settings;
@@ -117,7 +117,7 @@ pub struct ExtensionStore {
pub wasm_host: Arc<WasmHost>,
pub wasm_extensions: Vec<(Arc<ExtensionManifest>, WasmExtension)>,
pub tasks: Vec<Task<()>>,
- pub ssh_clients: HashMap<String, WeakEntity<SshRemoteClient>>,
+ pub remote_clients: HashMap<RemoteConnectionOptions, WeakEntity<RemoteClient>>,
pub ssh_registered_tx: UnboundedSender<()>,
}
@@ -270,7 +270,7 @@ impl ExtensionStore {
reload_tx,
tasks: Vec::new(),
- ssh_clients: HashMap::default(),
+ remote_clients: HashMap::default(),
ssh_registered_tx: connection_registered_tx,
};
@@ -1175,16 +1175,16 @@ impl ExtensionStore {
}
}
- for (server_id, _) in &extension.manifest.context_servers {
+ for server_id in extension.manifest.context_servers.keys() {
self.proxy.unregister_context_server(server_id.clone(), cx);
}
- for (adapter, _) in &extension.manifest.debug_adapters {
+ for adapter in extension.manifest.debug_adapters.keys() {
self.proxy.unregister_debug_adapter(adapter.clone());
}
- for (locator, _) in &extension.manifest.debug_locators {
+ for locator in extension.manifest.debug_locators.keys() {
self.proxy.unregister_debug_locator(locator.clone());
}
- for (command_name, _) in &extension.manifest.slash_commands {
+ for command_name in extension.manifest.slash_commands.keys() {
self.proxy.unregister_slash_command(command_name.clone());
}
}
@@ -1386,7 +1386,7 @@ impl ExtensionStore {
);
}
- for (id, _context_server_entry) in &manifest.context_servers {
+ for id in manifest.context_servers.keys() {
this.proxy
.register_context_server(extension.clone(), id.clone(), cx);
}
@@ -1675,9 +1675,8 @@ impl ExtensionStore {
let schema_path = &extension::build_debug_adapter_schema_path(adapter_name, meta);
if fs.is_file(&src_dir.join(schema_path)).await {
- match schema_path.parent() {
- Some(parent) => fs.create_dir(&tmp_dir.join(parent)).await?,
- None => {}
+ if let Some(parent) = schema_path.parent() {
+ fs.create_dir(&tmp_dir.join(parent)).await?
}
fs.copy_file(
&src_dir.join(schema_path),
@@ -1694,7 +1693,7 @@ impl ExtensionStore {
async fn sync_extensions_over_ssh(
this: &WeakEntity<Self>,
- client: WeakEntity<SshRemoteClient>,
+ client: WeakEntity<RemoteClient>,
cx: &mut AsyncApp,
) -> Result<()> {
let extensions = this.update(cx, |this, _cx| {
@@ -1766,8 +1765,8 @@ impl ExtensionStore {
pub async fn update_ssh_clients(this: &WeakEntity<Self>, cx: &mut AsyncApp) -> Result<()> {
let clients = this.update(cx, |this, _cx| {
- this.ssh_clients.retain(|_k, v| v.upgrade().is_some());
- this.ssh_clients.values().cloned().collect::<Vec<_>>()
+ this.remote_clients.retain(|_k, v| v.upgrade().is_some());
+ this.remote_clients.values().cloned().collect::<Vec<_>>()
})?;
for client in clients {
@@ -1779,17 +1778,16 @@ impl ExtensionStore {
anyhow::Ok(())
}
- pub fn register_ssh_client(&mut self, client: Entity<SshRemoteClient>, cx: &mut Context<Self>) {
- let connection_options = client.read(cx).connection_options();
- let ssh_url = connection_options.ssh_url();
+ pub fn register_remote_client(&mut self, client: Entity<RemoteClient>, cx: &mut Context<Self>) {
+ let options = client.read(cx).connection_options();
- if let Some(existing_client) = self.ssh_clients.get(&ssh_url)
+ if let Some(existing_client) = self.remote_clients.get(&options)
&& existing_client.upgrade().is_some()
{
return;
}
- self.ssh_clients.insert(ssh_url, client.downgrade());
+ self.remote_clients.insert(options, client.downgrade());
self.ssh_registered_tx.unbounded_send(()).ok();
}
}
@@ -3,10 +3,11 @@ use collections::HashMap;
use gpui::App;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
use std::sync::Arc;
-#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)]
+#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema, SettingsUi, SettingsKey)]
+#[settings_key(None)]
pub struct ExtensionSettings {
/// The extensions that should be automatically installed by Zed.
///
@@ -38,8 +39,6 @@ impl ExtensionSettings {
}
impl Settings for ExtensionSettings {
- const KEY: Option<&'static str> = None;
-
type FileContent = Self;
fn load(sources: SettingsSources<Self::FileContent>, _cx: &mut App) -> Result<Self> {
@@ -532,7 +532,7 @@ fn wasm_engine(executor: &BackgroundExecutor) -> wasmtime::Engine {
// `Future::poll`.
const EPOCH_INTERVAL: Duration = Duration::from_millis(100);
let mut timer = Timer::interval(EPOCH_INTERVAL);
- while let Some(_) = timer.next().await {
+ while (timer.next().await).is_some() {
// Exit the loop and thread once the engine is dropped.
let Some(engine) = engine_ref.upgrade() else {
break;
@@ -61,7 +61,6 @@ impl RenderOnce for FeatureUpsell {
.icon_size(IconSize::Small)
.icon_position(IconPosition::End)
.on_click({
- let docs_url = docs_url.clone();
move |_event, _window, cx| {
telemetry::event!(
"Documentation Viewed",
@@ -694,7 +694,7 @@ impl ExtensionsPage {
cx.open_url(&repository_url);
}
}))
- .tooltip(Tooltip::text(repository_url.clone()))
+ .tooltip(Tooltip::text(repository_url))
})),
)
}
@@ -827,7 +827,7 @@ impl ExtensionsPage {
cx.open_url(&repository_url);
}
}))
- .tooltip(Tooltip::text(repository_url.clone())),
+ .tooltip(Tooltip::text(repository_url)),
)
.child(
PopoverMenu::new(SharedString::from(format!(
@@ -863,7 +863,7 @@ impl ExtensionsPage {
window: &mut Window,
cx: &mut App,
) -> Entity<ContextMenu> {
- let context_menu = ContextMenu::build(window, cx, |context_menu, window, _| {
+ ContextMenu::build(window, cx, |context_menu, window, _| {
context_menu
.entry(
"Install Another Version...",
@@ -887,9 +887,7 @@ impl ExtensionsPage {
cx.write_to_clipboard(ClipboardItem::new_string(authors.join(", ")));
}
})
- });
-
- context_menu
+ })
}
fn show_extension_version_list(
@@ -1347,7 +1345,7 @@ impl ExtensionsPage {
this.update_settings::<VimModeSetting>(
selection,
cx,
- |setting, value| *setting = Some(value),
+ |setting, value| setting.vim_mode = Some(value),
);
}),
)),
@@ -89,16 +89,29 @@ impl FeatureFlag for JjUiFeatureFlag {
const NAME: &'static str = "jj-ui";
}
-pub struct AcpFeatureFlag;
+pub struct GeminiAndNativeFeatureFlag;
-impl FeatureFlag for AcpFeatureFlag {
- const NAME: &'static str = "acp";
+impl FeatureFlag for GeminiAndNativeFeatureFlag {
+ // This was previously called "acp".
+ //
+ // We renamed it because existing builds used it to enable the Claude Code
+ // integration too, and we'd like to turn Gemini/Native on in new builds
+ // without enabling Claude Code in old builds.
+ const NAME: &'static str = "gemini-and-native";
+
+ fn enabled_for_all() -> bool {
+ true
+ }
}
pub struct ClaudeCodeFeatureFlag;
impl FeatureFlag for ClaudeCodeFeatureFlag {
const NAME: &'static str = "claude-code";
+
+ fn enabled_for_all() -> bool {
+ true
+ }
}
pub trait FeatureFlagViewExt<V: 'static> {
@@ -196,7 +209,7 @@ impl FeatureFlagAppExt for App {
fn has_flag<T: FeatureFlag>(&self) -> bool {
self.try_global::<FeatureFlags>()
.map(|flags| flags.has_flag::<T>())
- .unwrap_or(false)
+ .unwrap_or(T::enabled_for_all())
}
fn is_staff(&self) -> bool {
@@ -15,13 +15,9 @@ path = "src/feedback.rs"
test-support = []
[dependencies]
-client.workspace = true
gpui.workspace = true
-human_bytes = "0.4.1"
menu.workspace = true
-release_channel.workspace = true
-serde.workspace = true
-sysinfo.workspace = true
+system_specs.workspace = true
ui.workspace = true
urlencoding.workspace = true
util.workspace = true
@@ -1,18 +1,14 @@
use gpui::{App, ClipboardItem, PromptLevel, actions};
-use system_specs::SystemSpecs;
+use system_specs::{CopySystemSpecsIntoClipboard, SystemSpecs};
use util::ResultExt;
use workspace::Workspace;
use zed_actions::feedback::FileBugReport;
pub mod feedback_modal;
-pub mod system_specs;
-
actions!(
zed,
[
- /// Copies system specifications to the clipboard for bug reports.
- CopySystemSpecsIntoClipboard,
/// Opens email client to send feedback to Zed support.
EmailZed,
/// Opens the Zed repository on GitHub.
@@ -267,10 +267,9 @@ impl FileFinder {
) {
self.picker.update(cx, |picker, cx| {
picker.delegate.include_ignored = match picker.delegate.include_ignored {
- Some(true) => match FileFinderSettings::get_global(cx).include_ignored {
- Some(_) => Some(false),
- None => None,
- },
+ Some(true) => FileFinderSettings::get_global(cx)
+ .include_ignored
+ .map(|_| false),
Some(false) => Some(true),
None => Some(true),
};
@@ -1382,7 +1381,7 @@ impl PickerDelegate for FileFinderDelegate {
project
.worktree_for_id(history_item.project.worktree_id, cx)
.is_some()
- || ((project.is_local() || project.is_via_ssh())
+ || ((project.is_local() || project.is_via_remote_server())
&& history_item.absolute.is_some())
}),
self.currently_opened_path.as_ref(),
@@ -1402,13 +1401,16 @@ impl PickerDelegate for FileFinderDelegate {
#[cfg(windows)]
let raw_query = raw_query.trim().to_owned().replace("/", "\\");
#[cfg(not(windows))]
- let raw_query = raw_query.trim().to_owned();
+ let raw_query = raw_query.trim();
- let file_query_end = if path_position.path.to_str().unwrap_or(&raw_query) == raw_query {
+ let raw_query = raw_query.trim_end_matches(':').to_owned();
+ let path = path_position.path.to_str();
+ let path_trimmed = path.unwrap_or(&raw_query).trim_end_matches(':');
+ let file_query_end = if path_trimmed == raw_query {
None
} else {
// Safe to unwrap as we won't get here when the unwrap in if fails
- Some(path_position.path.to_str().unwrap().len())
+ Some(path.unwrap().len())
};
let query = FileSearchQuery {
@@ -1750,7 +1752,7 @@ impl PickerDelegate for FileFinderDelegate {
Some(ContextMenu::build(window, cx, {
let focus_handle = focus_handle.clone();
move |menu, _, _| {
- menu.context(focus_handle.clone())
+ menu.context(focus_handle)
.action(
"Split Left",
pane::SplitLeft.boxed_clone(),
@@ -1,7 +1,7 @@
use anyhow::Result;
use schemars::JsonSchema;
use serde_derive::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
#[derive(Deserialize, Debug, Clone, Copy, PartialEq)]
pub struct FileFinderSettings {
@@ -11,7 +11,8 @@ pub struct FileFinderSettings {
pub include_ignored: Option<bool>,
}
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)]
+#[settings_key(key = "file_finder")]
pub struct FileFinderSettingsContent {
/// Whether to show file icons in the file finder.
///
@@ -42,8 +43,6 @@ pub struct FileFinderSettingsContent {
}
impl Settings for FileFinderSettings {
- const KEY: Option<&'static str> = Some("file_finder");
-
type FileContent = FileFinderSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut gpui::App) -> Result<Self> {
@@ -218,6 +218,7 @@ async fn test_matching_paths(cx: &mut TestAppContext) {
" ndan ",
" band ",
"a bandana",
+ "bandana:",
] {
picker
.update_in(cx, |picker, window, cx| {
@@ -252,6 +253,53 @@ async fn test_matching_paths(cx: &mut TestAppContext) {
}
}
+#[gpui::test]
+async fn test_matching_paths_with_colon(cx: &mut TestAppContext) {
+ let app_state = init_test(cx);
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(
+ path!("/root"),
+ json!({
+ "a": {
+ "foo:bar.rs": "",
+ "foo.rs": "",
+ }
+ }),
+ )
+ .await;
+
+ let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
+
+ let (picker, _, cx) = build_find_picker(project, cx);
+
+ // 'foo:' matches both files
+ cx.simulate_input("foo:");
+ picker.update(cx, |picker, _| {
+ assert_eq!(picker.delegate.matches.len(), 3);
+ assert_match_at_position(picker, 0, "foo.rs");
+ assert_match_at_position(picker, 1, "foo:bar.rs");
+ });
+
+ // 'foo:b' matches one of the files
+ cx.simulate_input("b");
+ picker.update(cx, |picker, _| {
+ assert_eq!(picker.delegate.matches.len(), 2);
+ assert_match_at_position(picker, 0, "foo:bar.rs");
+ });
+
+ cx.dispatch_action(editor::actions::Backspace);
+
+ // 'foo:1' matches both files, specifying which row to jump to
+ cx.simulate_input("1");
+ picker.update(cx, |picker, _| {
+ assert_eq!(picker.delegate.matches.len(), 3);
+ assert_match_at_position(picker, 0, "foo.rs");
+ assert_match_at_position(picker, 1, "foo:bar.rs");
+ });
+}
+
#[gpui::test]
async fn test_unicode_paths(cx: &mut TestAppContext) {
let app_state = init_test(cx);
@@ -1,7 +1,7 @@
use crate::file_finder_settings::FileFinderSettings;
use file_icons::FileIcons;
use futures::channel::oneshot;
-use fuzzy::{StringMatch, StringMatchCandidate};
+use fuzzy::{CharBag, StringMatch, StringMatchCandidate};
use gpui::{HighlightStyle, StyledText, Task};
use picker::{Picker, PickerDelegate};
use project::{DirectoryItem, DirectoryLister};
@@ -23,7 +23,6 @@ use workspace::Workspace;
pub(crate) struct OpenPathPrompt;
-#[derive(Debug)]
pub struct OpenPathDelegate {
tx: Option<oneshot::Sender<Option<Vec<PathBuf>>>>,
lister: DirectoryLister,
@@ -35,6 +34,9 @@ pub struct OpenPathDelegate {
prompt_root: String,
path_style: PathStyle,
replace_prompt: Task<()>,
+ render_footer:
+ Arc<dyn Fn(&mut Window, &mut Context<Picker<Self>>) -> Option<AnyElement> + 'static>,
+ hidden_entries: bool,
}
impl OpenPathDelegate {
@@ -60,9 +62,25 @@ impl OpenPathDelegate {
},
path_style,
replace_prompt: Task::ready(()),
+ render_footer: Arc::new(|_, _| None),
+ hidden_entries: false,
}
}
+ pub fn with_footer(
+ mut self,
+ footer: Arc<
+ dyn Fn(&mut Window, &mut Context<Picker<Self>>) -> Option<AnyElement> + 'static,
+ >,
+ ) -> Self {
+ self.render_footer = footer;
+ self
+ }
+
+ pub fn show_hidden(mut self) -> Self {
+ self.hidden_entries = true;
+ self
+ }
fn get_entry(&self, selected_match_index: usize) -> Option<CandidateInfo> {
match &self.directory_state {
DirectoryState::List { entries, .. } => {
@@ -112,7 +130,7 @@ impl OpenPathDelegate {
entries,
..
} => user_input
- .into_iter()
+ .iter()
.filter(|user_input| !user_input.exists || !user_input.is_dir)
.map(|user_input| user_input.file.string.clone())
.chain(self.string_matches.iter().filter_map(|string_match| {
@@ -125,6 +143,13 @@ impl OpenPathDelegate {
DirectoryState::None { .. } => Vec::new(),
}
}
+
+ fn current_dir(&self) -> &'static str {
+ match self.path_style {
+ PathStyle::Posix => "./",
+ PathStyle::Windows => ".\\",
+ }
+ }
}
#[derive(Debug)]
@@ -233,6 +258,7 @@ impl PickerDelegate for OpenPathDelegate {
cx: &mut Context<Picker<Self>>,
) -> Task<()> {
let lister = &self.lister;
+ let input_is_empty = query.is_empty();
let (dir, suffix) = get_dir_and_suffix(query, self.path_style);
let query = match &self.directory_state {
@@ -261,8 +287,9 @@ impl PickerDelegate for OpenPathDelegate {
self.cancel_flag.store(true, atomic::Ordering::Release);
self.cancel_flag = Arc::new(AtomicBool::new(false));
let cancel_flag = self.cancel_flag.clone();
-
+ let hidden_entries = self.hidden_entries;
let parent_path_is_root = self.prompt_root == dir;
+ let current_dir = self.current_dir();
cx.spawn_in(window, async move |this, cx| {
if let Some(query) = query {
let paths = query.await;
@@ -353,10 +380,38 @@ impl PickerDelegate for OpenPathDelegate {
return;
};
- if !suffix.starts_with('.') {
- new_entries.retain(|entry| !entry.path.string.starts_with('.'));
+ let mut max_id = 0;
+ if !suffix.starts_with('.') && !hidden_entries {
+ new_entries.retain(|entry| {
+ max_id = max_id.max(entry.path.id);
+ !entry.path.string.starts_with('.')
+ });
}
+
if suffix.is_empty() {
+ let should_prepend_with_current_dir = this
+ .read_with(cx, |picker, _| {
+ !input_is_empty
+ && !matches!(
+ picker.delegate.directory_state,
+ DirectoryState::Create { .. }
+ )
+ })
+ .unwrap_or(false);
+ if should_prepend_with_current_dir {
+ new_entries.insert(
+ 0,
+ CandidateInfo {
+ path: StringMatchCandidate {
+ id: max_id + 1,
+ string: current_dir.to_string(),
+ char_bag: CharBag::from(current_dir),
+ },
+ is_dir: true,
+ },
+ );
+ }
+
this.update(cx, |this, cx| {
this.delegate.selected_index = 0;
this.delegate.string_matches = new_entries
@@ -485,6 +540,10 @@ impl PickerDelegate for OpenPathDelegate {
_: &mut Context<Picker<Self>>,
) -> Option<String> {
let candidate = self.get_entry(self.selected_index)?;
+ if candidate.path.string.is_empty() || candidate.path.string == self.current_dir() {
+ return None;
+ }
+
let path_style = self.path_style;
Some(
maybe!({
@@ -629,12 +688,18 @@ impl PickerDelegate for OpenPathDelegate {
DirectoryState::None { .. } => Vec::new(),
};
+ let is_current_dir_candidate = candidate.path.string == self.current_dir();
+
let file_icon = maybe!({
if !settings.file_icons {
return None;
}
let icon = if candidate.is_dir {
- FileIcons::get_folder_icon(false, cx)?
+ if is_current_dir_candidate {
+ return Some(Icon::new(IconName::ReplyArrowRight).color(Color::Muted));
+ } else {
+ FileIcons::get_folder_icon(false, cx)?
+ }
} else {
let path = path::Path::new(&candidate.path.string);
FileIcons::get_icon(path, cx)?
@@ -652,8 +717,10 @@ impl PickerDelegate for OpenPathDelegate {
.child(HighlightedLabel::new(
if parent_path == &self.prompt_root {
format!("{}{}", self.prompt_root, candidate.path.string)
+ } else if is_current_dir_candidate {
+ "open this directory".to_string()
} else {
- candidate.path.string.clone()
+ candidate.path.string
},
match_positions,
)),
@@ -684,7 +751,7 @@ impl PickerDelegate for OpenPathDelegate {
};
StyledText::new(label)
.with_default_highlights(
- &window.text_style().clone(),
+ &window.text_style(),
vec![(
delta..delta + label_len,
HighlightStyle::color(Color::Conflict.color(cx)),
@@ -694,7 +761,7 @@ impl PickerDelegate for OpenPathDelegate {
} else {
StyledText::new(format!("{label} (create)"))
.with_default_highlights(
- &window.text_style().clone(),
+ &window.text_style(),
vec![(
delta..delta + label_len,
HighlightStyle::color(Color::Created.color(cx)),
@@ -732,6 +799,14 @@ impl PickerDelegate for OpenPathDelegate {
}
}
+ fn render_footer(
+ &self,
+ window: &mut Window,
+ cx: &mut Context<Picker<Self>>,
+ ) -> Option<AnyElement> {
+ (self.render_footer)(window, cx)
+ }
+
fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option<SharedString> {
Some(match &self.directory_state {
DirectoryState::Create { .. } => SharedString::from("Type a path…"),
@@ -747,6 +822,17 @@ impl PickerDelegate for OpenPathDelegate {
fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
Arc::from(format!("[directory{MAIN_SEPARATOR_STR}]filename.ext"))
}
+
+ fn separators_after_indices(&self) -> Vec<usize> {
+ let Some(m) = self.string_matches.first() else {
+ return Vec::new();
+ };
+ if m.string == self.current_dir() {
+ vec![0]
+ } else {
+ Vec::new()
+ }
+ }
}
fn path_candidates(
@@ -43,12 +43,17 @@ async fn test_open_path_prompt(cx: &mut TestAppContext) {
insert_query(query, &picker, cx).await;
assert_eq!(collect_match_candidates(&picker, cx), vec!["root"]);
+ #[cfg(not(windows))]
+ let expected_separator = "./";
+ #[cfg(windows)]
+ let expected_separator = ".\\";
+
// If the query ends with a slash, the picker should show the contents of the directory.
let query = path!("/root/");
insert_query(query, &picker, cx).await;
assert_eq!(
collect_match_candidates(&picker, cx),
- vec!["a1", "a2", "a3", "dir1", "dir2"]
+ vec![expected_separator, "a1", "a2", "a3", "dir1", "dir2"]
);
// Show candidates for the query "a".
@@ -72,7 +77,7 @@ async fn test_open_path_prompt(cx: &mut TestAppContext) {
insert_query(query, &picker, cx).await;
assert_eq!(
collect_match_candidates(&picker, cx),
- vec!["c", "d1", "d2", "d3", "dir3", "dir4"]
+ vec![expected_separator, "c", "d1", "d2", "d3", "dir3", "dir4"]
);
// Show candidates for the query "d".
@@ -116,71 +121,86 @@ async fn test_open_path_prompt_completion(cx: &mut TestAppContext) {
// Confirm completion for the query "/root", since it's a directory, it should add a trailing slash.
let query = path!("/root");
insert_query(query, &picker, cx).await;
- assert_eq!(confirm_completion(query, 0, &picker, cx), path!("/root/"));
+ assert_eq!(
+ confirm_completion(query, 0, &picker, cx).unwrap(),
+ path!("/root/")
+ );
// Confirm completion for the query "/root/", selecting the first candidate "a", since it's a file, it should not add a trailing slash.
let query = path!("/root/");
insert_query(query, &picker, cx).await;
- assert_eq!(confirm_completion(query, 0, &picker, cx), path!("/root/a"));
+ assert_eq!(
+ confirm_completion(query, 0, &picker, cx),
+ None,
+ "First entry is `./` and when we confirm completion, it is tabbed below"
+ );
+ assert_eq!(
+ confirm_completion(query, 1, &picker, cx).unwrap(),
+ path!("/root/a"),
+ "Second entry is the first entry of a directory that we want to be completed"
+ );
// Confirm completion for the query "/root/", selecting the second candidate "dir1", since it's a directory, it should add a trailing slash.
let query = path!("/root/");
insert_query(query, &picker, cx).await;
assert_eq!(
- confirm_completion(query, 1, &picker, cx),
+ confirm_completion(query, 2, &picker, cx).unwrap(),
path!("/root/dir1/")
);
let query = path!("/root/a");
insert_query(query, &picker, cx).await;
- assert_eq!(confirm_completion(query, 0, &picker, cx), path!("/root/a"));
+ assert_eq!(
+ confirm_completion(query, 0, &picker, cx).unwrap(),
+ path!("/root/a")
+ );
let query = path!("/root/d");
insert_query(query, &picker, cx).await;
assert_eq!(
- confirm_completion(query, 1, &picker, cx),
+ confirm_completion(query, 1, &picker, cx).unwrap(),
path!("/root/dir2/")
);
let query = path!("/root/dir2");
insert_query(query, &picker, cx).await;
assert_eq!(
- confirm_completion(query, 0, &picker, cx),
+ confirm_completion(query, 0, &picker, cx).unwrap(),
path!("/root/dir2/")
);
let query = path!("/root/dir2/");
insert_query(query, &picker, cx).await;
assert_eq!(
- confirm_completion(query, 0, &picker, cx),
+ confirm_completion(query, 1, &picker, cx).unwrap(),
path!("/root/dir2/c")
);
let query = path!("/root/dir2/");
insert_query(query, &picker, cx).await;
assert_eq!(
- confirm_completion(query, 2, &picker, cx),
+ confirm_completion(query, 3, &picker, cx).unwrap(),
path!("/root/dir2/dir3/")
);
let query = path!("/root/dir2/d");
insert_query(query, &picker, cx).await;
assert_eq!(
- confirm_completion(query, 0, &picker, cx),
+ confirm_completion(query, 0, &picker, cx).unwrap(),
path!("/root/dir2/d")
);
let query = path!("/root/dir2/d");
insert_query(query, &picker, cx).await;
assert_eq!(
- confirm_completion(query, 1, &picker, cx),
+ confirm_completion(query, 1, &picker, cx).unwrap(),
path!("/root/dir2/dir3/")
);
let query = path!("/root/dir2/di");
insert_query(query, &picker, cx).await;
assert_eq!(
- confirm_completion(query, 1, &picker, cx),
+ confirm_completion(query, 1, &picker, cx).unwrap(),
path!("/root/dir2/dir4/")
);
}
@@ -211,42 +231,63 @@ async fn test_open_path_prompt_on_windows(cx: &mut TestAppContext) {
insert_query(query, &picker, cx).await;
assert_eq!(
collect_match_candidates(&picker, cx),
- vec!["a", "dir1", "dir2"]
+ vec![".\\", "a", "dir1", "dir2"]
+ );
+ assert_eq!(
+ confirm_completion(query, 0, &picker, cx),
+ None,
+ "First entry is `.\\` and when we confirm completion, it is tabbed below"
+ );
+ assert_eq!(
+ confirm_completion(query, 1, &picker, cx).unwrap(),
+ "C:/root/a",
+ "Second entry is the first entry of a directory that we want to be completed"
);
- assert_eq!(confirm_completion(query, 0, &picker, cx), "C:/root/a");
let query = "C:\\root/";
insert_query(query, &picker, cx).await;
assert_eq!(
collect_match_candidates(&picker, cx),
- vec!["a", "dir1", "dir2"]
+ vec![".\\", "a", "dir1", "dir2"]
+ );
+ assert_eq!(
+ confirm_completion(query, 1, &picker, cx).unwrap(),
+ "C:\\root/a"
);
- assert_eq!(confirm_completion(query, 0, &picker, cx), "C:\\root/a");
let query = "C:\\root\\";
insert_query(query, &picker, cx).await;
assert_eq!(
collect_match_candidates(&picker, cx),
- vec!["a", "dir1", "dir2"]
+ vec![".\\", "a", "dir1", "dir2"]
+ );
+ assert_eq!(
+ confirm_completion(query, 1, &picker, cx).unwrap(),
+ "C:\\root\\a"
);
- assert_eq!(confirm_completion(query, 0, &picker, cx), "C:\\root\\a");
// Confirm completion for the query "C:/root/d", selecting the second candidate "dir2", since it's a directory, it should add a trailing slash.
let query = "C:/root/d";
insert_query(query, &picker, cx).await;
assert_eq!(collect_match_candidates(&picker, cx), vec!["dir1", "dir2"]);
- assert_eq!(confirm_completion(query, 1, &picker, cx), "C:/root/dir2\\");
+ assert_eq!(
+ confirm_completion(query, 1, &picker, cx).unwrap(),
+ "C:/root/dir2\\"
+ );
let query = "C:\\root/d";
insert_query(query, &picker, cx).await;
assert_eq!(collect_match_candidates(&picker, cx), vec!["dir1", "dir2"]);
- assert_eq!(confirm_completion(query, 0, &picker, cx), "C:\\root/dir1\\");
+ assert_eq!(
+ confirm_completion(query, 0, &picker, cx).unwrap(),
+ "C:\\root/dir1\\"
+ );
let query = "C:\\root\\d";
insert_query(query, &picker, cx).await;
assert_eq!(collect_match_candidates(&picker, cx), vec!["dir1", "dir2"]);
assert_eq!(
- confirm_completion(query, 0, &picker, cx),
+ confirm_completion(query, 0, &picker, cx).unwrap(),
"C:\\root\\dir1\\"
);
}
@@ -276,20 +317,29 @@ async fn test_open_path_prompt_on_windows_with_remote(cx: &mut TestAppContext) {
insert_query(query, &picker, cx).await;
assert_eq!(
collect_match_candidates(&picker, cx),
- vec!["a", "dir1", "dir2"]
+ vec!["./", "a", "dir1", "dir2"]
+ );
+ assert_eq!(
+ confirm_completion(query, 1, &picker, cx).unwrap(),
+ "/root/a"
);
- assert_eq!(confirm_completion(query, 0, &picker, cx), "/root/a");
// Confirm completion for the query "/root/d", selecting the second candidate "dir2", since it's a directory, it should add a trailing slash.
let query = "/root/d";
insert_query(query, &picker, cx).await;
assert_eq!(collect_match_candidates(&picker, cx), vec!["dir1", "dir2"]);
- assert_eq!(confirm_completion(query, 1, &picker, cx), "/root/dir2/");
+ assert_eq!(
+ confirm_completion(query, 1, &picker, cx).unwrap(),
+ "/root/dir2/"
+ );
let query = "/root/d";
insert_query(query, &picker, cx).await;
assert_eq!(collect_match_candidates(&picker, cx), vec!["dir1", "dir2"]);
- assert_eq!(confirm_completion(query, 0, &picker, cx), "/root/dir1/");
+ assert_eq!(
+ confirm_completion(query, 0, &picker, cx).unwrap(),
+ "/root/dir1/"
+ );
}
#[gpui::test]
@@ -396,15 +446,13 @@ fn confirm_completion(
select: usize,
picker: &Entity<Picker<OpenPathDelegate>>,
cx: &mut VisualTestContext,
-) -> String {
- picker
- .update_in(cx, |f, window, cx| {
- if f.delegate.selected_index() != select {
- f.delegate.set_selected_index(select, window, cx);
- }
- f.delegate.confirm_completion(query.to_string(), window, cx)
- })
- .unwrap()
+) -> Option<String> {
+ picker.update_in(cx, |f, window, cx| {
+ if f.delegate.selected_index() != select {
+ f.delegate.set_selected_index(select, window, cx);
+ }
+ f.delegate.confirm_completion(query.to_string(), window, cx)
+ })
}
fn collect_match_candidates(
@@ -345,7 +345,7 @@ impl GitRepository for FakeGitRepository {
fn create_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
self.with_state_async(true, move |state| {
- state.branches.insert(name.to_owned());
+ state.branches.insert(name);
Ok(())
})
}
@@ -20,6 +20,9 @@ use std::os::fd::{AsFd, AsRawFd};
#[cfg(unix)]
use std::os::unix::fs::{FileTypeExt, MetadataExt};
+#[cfg(any(target_os = "macos", target_os = "freebsd"))]
+use std::mem::MaybeUninit;
+
use async_tar::Archive;
use futures::{AsyncRead, Stream, StreamExt, future::BoxFuture};
use git::repository::{GitRepository, RealGitRepository};
@@ -261,14 +264,15 @@ impl FileHandle for std::fs::File {
};
let fd = self.as_fd();
- let mut path_buf: [libc::c_char; libc::PATH_MAX as usize] = [0; libc::PATH_MAX as usize];
+ let mut path_buf = MaybeUninit::<[u8; libc::PATH_MAX as usize]>::uninit();
let result = unsafe { libc::fcntl(fd.as_raw_fd(), libc::F_GETPATH, path_buf.as_mut_ptr()) };
if result == -1 {
anyhow::bail!("fcntl returned -1".to_string());
}
- let c_str = unsafe { CStr::from_ptr(path_buf.as_ptr()) };
+ // SAFETY: `fcntl` will initialize the path buffer.
+ let c_str = unsafe { CStr::from_ptr(path_buf.as_ptr().cast()) };
let path = PathBuf::from(OsStr::from_bytes(c_str.to_bytes()));
Ok(path)
}
@@ -296,15 +300,16 @@ impl FileHandle for std::fs::File {
};
let fd = self.as_fd();
- let mut kif: libc::kinfo_file = unsafe { std::mem::zeroed() };
+ let mut kif = MaybeUninit::<libc::kinfo_file>::uninit();
kif.kf_structsize = libc::KINFO_FILE_SIZE;
- let result = unsafe { libc::fcntl(fd.as_raw_fd(), libc::F_KINFO, &mut kif) };
+ let result = unsafe { libc::fcntl(fd.as_raw_fd(), libc::F_KINFO, kif.as_mut_ptr()) };
if result == -1 {
anyhow::bail!("fcntl returned -1".to_string());
}
- let c_str = unsafe { CStr::from_ptr(kif.kf_path.as_ptr()) };
+ // SAFETY: `fcntl` will initialize the kif.
+ let c_str = unsafe { CStr::from_ptr(kif.assume_init().kf_path.as_ptr()) };
let path = PathBuf::from(OsStr::from_bytes(c_str.to_bytes()));
Ok(path)
}
@@ -495,7 +500,8 @@ impl Fs for RealFs {
};
// todo(windows)
// When new version of `windows-rs` release, make this operation `async`
- let path = SanitizedPath::from(path.canonicalize()?);
+ let path = path.canonicalize()?;
+ let path = SanitizedPath::new(&path);
let path_string = path.to_string();
let file = StorageFile::GetFileFromPathAsync(&HSTRING::from(path_string))?.get()?;
file.DeleteAsync(StorageDeleteOption::Default)?.get()?;
@@ -522,7 +528,8 @@ impl Fs for RealFs {
// todo(windows)
// When new version of `windows-rs` release, make this operation `async`
- let path = SanitizedPath::from(path.canonicalize()?);
+ let path = path.canonicalize()?;
+ let path = SanitizedPath::new(&path);
let path_string = path.to_string();
let folder = StorageFolder::GetFolderFromPathAsync(&HSTRING::from(path_string))?.get()?;
folder.DeleteAsync(StorageDeleteOption::Default)?.get()?;
@@ -783,7 +790,7 @@ impl Fs for RealFs {
{
target = parent.join(target);
if let Ok(canonical) = self.canonicalize(&target).await {
- target = SanitizedPath::from(canonical).as_path().to_path_buf();
+ target = SanitizedPath::new(&canonical).as_path().to_path_buf();
}
}
watcher.add(&target).ok();
@@ -1960,7 +1967,7 @@ impl FileHandle for FakeHandle {
};
if state.try_entry(&target, false).is_some() {
- return Ok(target.clone());
+ return Ok(target);
}
anyhow::bail!("fake fd target not found")
}
@@ -2256,7 +2263,7 @@ impl Fs for FakeFs {
async fn load(&self, path: &Path) -> Result<String> {
let content = self.load_internal(path).await?;
- Ok(String::from_utf8(content.clone())?)
+ Ok(String::from_utf8(content)?)
}
async fn load_bytes(&self, path: &Path) -> Result<Vec<u8>> {
@@ -2412,19 +2419,18 @@ impl Fs for FakeFs {
tx,
original_path: path.to_owned(),
fs_state: self.state.clone(),
- prefixes: Mutex::new(vec![path.to_owned()]),
+ prefixes: Mutex::new(vec![path]),
});
(
Box::pin(futures::StreamExt::filter(rx, {
let watcher = watcher.clone();
move |events| {
let result = events.iter().any(|evt_path| {
- let result = watcher
+ watcher
.prefixes
.lock()
.iter()
- .any(|prefix| evt_path.path.starts_with(prefix));
- result
+ .any(|prefix| evt_path.path.starts_with(prefix))
});
let executor = executor.clone();
async move {
@@ -42,7 +42,7 @@ impl Drop for FsWatcher {
impl Watcher for FsWatcher {
fn add(&self, path: &std::path::Path) -> anyhow::Result<()> {
- let root_path = SanitizedPath::from(path);
+ let root_path = SanitizedPath::new_arc(path);
let tx = self.tx.clone();
let pending_paths = self.pending_path_events.clone();
@@ -70,7 +70,7 @@ impl Watcher for FsWatcher {
.paths
.iter()
.filter_map(|event_path| {
- let event_path = SanitizedPath::from(event_path);
+ let event_path = SanitizedPath::new(event_path);
event_path.starts_with(&root_path).then(|| PathEvent {
path: event_path.as_path().to_path_buf(),
kind,
@@ -159,7 +159,7 @@ impl GlobalWatcher {
path: path.clone(),
};
state.watchers.insert(id, registration_state);
- *state.path_registrations.entry(path.clone()).or_insert(0) += 1;
+ *state.path_registrations.entry(path).or_insert(0) += 1;
Ok(id)
}
@@ -916,7 +916,7 @@ impl GitRepository for RealGitRepository {
.context("no stdin for git cat-file subprocess")?;
let mut stdin = BufWriter::new(stdin);
for rev in &revs {
- write!(&mut stdin, "{rev}\n")?;
+ writeln!(&mut stdin, "{rev}")?;
}
stdin.flush()?;
drop(stdin);
@@ -1205,9 +1205,10 @@ impl GitRepository for RealGitRepository {
env: Arc<HashMap<String, String>>,
) -> BoxFuture<'_, Result<()>> {
let working_directory = self.working_directory();
+ let git_binary_path = self.git_binary_path.clone();
self.executor
.spawn(async move {
- let mut cmd = new_smol_command("git");
+ let mut cmd = new_smol_command(&git_binary_path);
cmd.current_dir(&working_directory?)
.envs(env.iter())
.args(["stash", "push", "--quiet"])
@@ -1229,9 +1230,10 @@ impl GitRepository for RealGitRepository {
fn stash_pop(&self, env: Arc<HashMap<String, String>>) -> BoxFuture<'_, Result<()>> {
let working_directory = self.working_directory();
+ let git_binary_path = self.git_binary_path.clone();
self.executor
.spawn(async move {
- let mut cmd = new_smol_command("git");
+ let mut cmd = new_smol_command(&git_binary_path);
cmd.current_dir(&working_directory?)
.envs(env.iter())
.args(["stash", "pop"]);
@@ -1256,9 +1258,10 @@ impl GitRepository for RealGitRepository {
env: Arc<HashMap<String, String>>,
) -> BoxFuture<'_, Result<()>> {
let working_directory = self.working_directory();
+ let git_binary_path = self.git_binary_path.clone();
self.executor
.spawn(async move {
- let mut cmd = new_smol_command("git");
+ let mut cmd = new_smol_command(&git_binary_path);
cmd.current_dir(&working_directory?)
.envs(env.iter())
.args(["commit", "--quiet", "-m"])
@@ -1302,7 +1305,7 @@ impl GitRepository for RealGitRepository {
let executor = cx.background_executor().clone();
async move {
let working_directory = working_directory?;
- let mut command = new_smol_command("git");
+ let mut command = new_smol_command(&self.git_binary_path);
command
.envs(env.iter())
.current_dir(&working_directory)
@@ -1333,7 +1336,7 @@ impl GitRepository for RealGitRepository {
let working_directory = self.working_directory();
let executor = cx.background_executor().clone();
async move {
- let mut command = new_smol_command("git");
+ let mut command = new_smol_command(&self.git_binary_path);
command
.envs(env.iter())
.current_dir(&working_directory?)
@@ -1359,7 +1362,7 @@ impl GitRepository for RealGitRepository {
let remote_name = format!("{}", fetch_options);
let executor = cx.background_executor().clone();
async move {
- let mut command = new_smol_command("git");
+ let mut command = new_smol_command(&self.git_binary_path);
command
.envs(env.iter())
.current_dir(&working_directory?)
@@ -2028,7 +2031,7 @@ fn parse_branch_input(input: &str) -> Result<Vec<Branch>> {
branches.push(Branch {
is_head: is_current_branch,
- ref_name: ref_name,
+ ref_name,
most_recent_commit: Some(CommitSummary {
sha: head_sha,
subject,
@@ -153,17 +153,11 @@ impl FileStatus {
}
pub fn is_conflicted(self) -> bool {
- match self {
- FileStatus::Unmerged { .. } => true,
- _ => false,
- }
+ matches!(self, FileStatus::Unmerged { .. })
}
pub fn is_ignored(self) -> bool {
- match self {
- FileStatus::Ignored => true,
- _ => false,
- }
+ matches!(self, FileStatus::Ignored)
}
pub fn has_changes(&self) -> bool {
@@ -176,40 +170,31 @@ impl FileStatus {
pub fn is_modified(self) -> bool {
match self {
- FileStatus::Tracked(tracked) => match (tracked.index_status, tracked.worktree_status) {
- (StatusCode::Modified, _) | (_, StatusCode::Modified) => true,
- _ => false,
- },
+ FileStatus::Tracked(tracked) => matches!(
+ (tracked.index_status, tracked.worktree_status),
+ (StatusCode::Modified, _) | (_, StatusCode::Modified)
+ ),
_ => false,
}
}
pub fn is_created(self) -> bool {
match self {
- FileStatus::Tracked(tracked) => match (tracked.index_status, tracked.worktree_status) {
- (StatusCode::Added, _) | (_, StatusCode::Added) => true,
- _ => false,
- },
+ FileStatus::Tracked(tracked) => matches!(
+ (tracked.index_status, tracked.worktree_status),
+ (StatusCode::Added, _) | (_, StatusCode::Added)
+ ),
FileStatus::Untracked => true,
_ => false,
}
}
pub fn is_deleted(self) -> bool {
- match self {
- FileStatus::Tracked(tracked) => match (tracked.index_status, tracked.worktree_status) {
- (StatusCode::Deleted, _) | (_, StatusCode::Deleted) => true,
- _ => false,
- },
- _ => false,
- }
+ matches!(self, FileStatus::Tracked(tracked) if matches!((tracked.index_status, tracked.worktree_status), (StatusCode::Deleted, _) | (_, StatusCode::Deleted)))
}
pub fn is_untracked(self) -> bool {
- match self {
- FileStatus::Untracked => true,
- _ => false,
- }
+ matches!(self, FileStatus::Untracked)
}
pub fn summary(self) -> GitSummary {
@@ -5,7 +5,7 @@ use git::GitHostingProviderRegistry;
use gpui::App;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsStore};
+use settings::{Settings, SettingsKey, SettingsStore, SettingsUi};
use url::Url;
use util::ResultExt as _;
@@ -78,7 +78,8 @@ pub struct GitHostingProviderConfig {
pub name: String,
}
-#[derive(Default, Debug, Clone, Serialize, Deserialize, JsonSchema)]
+#[derive(Default, Debug, Clone, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)]
+#[settings_key(None)]
pub struct GitHostingProviderSettings {
/// The list of custom Git hosting providers.
#[serde(default)]
@@ -86,8 +87,6 @@ pub struct GitHostingProviderSettings {
}
impl Settings for GitHostingProviderSettings {
- const KEY: Option<&'static str> = None;
-
type FileContent = Self;
fn load(sources: settings::SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
@@ -172,7 +172,7 @@ impl BlameRenderer for GitBlameRenderer {
.clone()
.unwrap_or("<no name>".to_string())
.into(),
- author_email: blame.author_mail.clone().unwrap_or("".to_string()).into(),
+ author_email: blame.author_mail.unwrap_or("".to_string()).into(),
message: details,
};
@@ -186,7 +186,7 @@ impl BlameRenderer for GitBlameRenderer {
.get(0..8)
.map(|sha| sha.to_string().into())
.unwrap_or_else(|| commit_details.sha.clone());
- let full_sha = commit_details.sha.to_string().clone();
+ let full_sha = commit_details.sha.to_string();
let absolute_timestamp = format_local_timestamp(
commit_details.commit_time,
OffsetDateTime::now_utc(),
@@ -377,7 +377,7 @@ impl BlameRenderer for GitBlameRenderer {
has_parent: true,
},
repository.downgrade(),
- workspace.clone(),
+ workspace,
window,
cx,
)
@@ -48,7 +48,7 @@ pub fn open(
window: &mut Window,
cx: &mut Context<Workspace>,
) {
- let repository = workspace.project().read(cx).active_repository(cx).clone();
+ let repository = workspace.project().read(cx).active_repository(cx);
let style = BranchListStyle::Modal;
workspace.toggle_modal(window, cx, |window, cx| {
BranchList::new(repository, style, rems(34.), window, cx)
@@ -144,7 +144,7 @@ impl BranchList {
})
.detach_and_log_err(cx);
- let delegate = BranchListDelegate::new(repository.clone(), style);
+ let delegate = BranchListDelegate::new(repository, style);
let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx));
let _subscription = cx.subscribe(&picker, |_, _, _, cx| {
@@ -341,7 +341,6 @@ impl PickerDelegate for BranchListDelegate {
};
picker
.update(cx, |picker, _| {
- #[allow(clippy::nonminimal_bool)]
if !query.is_empty()
&& !matches
.first()
@@ -35,7 +35,7 @@ impl ModalContainerProperties {
// Calculate width based on character width
let mut modal_width = 460.0;
- let style = window.text_style().clone();
+ let style = window.text_style();
let font_id = window.text_system().resolve_font(&style.font());
let font_size = style.font_size.to_pixels(window.rem_size());
@@ -179,7 +179,7 @@ impl CommitModal {
let commit_editor = git_panel.update(cx, |git_panel, cx| {
git_panel.set_modal_open(true, cx);
- let buffer = git_panel.commit_message_buffer(cx).clone();
+ let buffer = git_panel.commit_message_buffer(cx);
let panel_editor = git_panel.commit_editor.clone();
let project = git_panel.project.clone();
@@ -285,7 +285,7 @@ impl CommitModal {
Some(ContextMenu::build(window, cx, |context_menu, _, _| {
context_menu
.when_some(keybinding_target.clone(), |el, keybinding_target| {
- el.context(keybinding_target.clone())
+ el.context(keybinding_target)
})
.when(has_previous_commit, |this| {
this.toggleable_entry(
@@ -391,15 +391,9 @@ impl CommitModal {
});
let focus_handle = self.focus_handle(cx);
- let close_kb_hint =
- if let Some(close_kb) = ui::KeyBinding::for_action(&menu::Cancel, window, cx) {
- Some(
- KeybindingHint::new(close_kb, cx.theme().colors().editor_background)
- .suffix("Cancel"),
- )
- } else {
- None
- };
+ let close_kb_hint = ui::KeyBinding::for_action(&menu::Cancel, window, cx).map(|close_kb| {
+ KeybindingHint::new(close_kb, cx.theme().colors().editor_background).suffix("Cancel")
+ });
h_flex()
.group("commit_editor_footer")
@@ -482,7 +476,7 @@ impl CommitModal {
}),
self.render_git_commit_menu(
ElementId::Name(format!("split-button-right-{}", commit_label).into()),
- Some(focus_handle.clone()),
+ Some(focus_handle),
)
.into_any_element(),
)),
@@ -181,7 +181,7 @@ impl Render for CommitTooltip {
.get(0..8)
.map(|sha| sha.to_string().into())
.unwrap_or_else(|| self.commit.sha.clone());
- let full_sha = self.commit.sha.to_string().clone();
+ let full_sha = self.commit.sha.to_string();
let absolute_timestamp = format_local_timestamp(
self.commit.commit_time,
OffsetDateTime::now_utc(),
@@ -1,6 +1,6 @@
use anyhow::{Context as _, Result};
use buffer_diff::{BufferDiff, BufferDiffSnapshot};
-use editor::{Editor, EditorEvent, MultiBuffer, SelectionEffects};
+use editor::{Editor, EditorEvent, MultiBuffer, SelectionEffects, multibuffer_context_lines};
use git::repository::{CommitDetails, CommitDiff, CommitSummary, RepoPath};
use gpui::{
AnyElement, AnyView, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter,
@@ -195,7 +195,7 @@ impl CommitView {
PathKey::namespaced(FILE_NAMESPACE, path),
buffer,
diff_hunk_ranges,
- editor::DEFAULT_MULTIBUFFER_CONTEXT,
+ multibuffer_context_lines(cx),
cx,
);
multibuffer.add_diff(buffer_diff, cx);
@@ -55,7 +55,7 @@ pub fn register_editor(editor: &mut Editor, buffer: Entity<MultiBuffer>, cx: &mu
buffers: Default::default(),
});
- let buffers = buffer.read(cx).all_buffers().clone();
+ let buffers = buffer.read(cx).all_buffers();
for buffer in buffers {
buffer_added(editor, buffer, cx);
}
@@ -129,7 +129,7 @@ fn buffer_added(editor: &mut Editor, buffer: Entity<Buffer>, cx: &mut Context<Ed
let subscription = cx.subscribe(&conflict_set, conflicts_updated);
BufferConflicts {
block_ids: Vec::new(),
- conflict_set: conflict_set.clone(),
+ conflict_set,
_subscription: subscription,
}
});
@@ -437,7 +437,6 @@ fn render_conflict_buttons(
Button::new("both", "Use Both")
.label_size(LabelSize::Small)
.on_click({
- let editor = editor.clone();
let conflict = conflict.clone();
let ours = conflict.ours.clone();
let theirs = conflict.theirs.clone();
@@ -123,7 +123,7 @@ impl FileDiffView {
old_buffer,
new_buffer,
_recalculate_diff_task: cx.spawn(async move |this, cx| {
- while let Ok(_) = buffer_changes_rx.recv().await {
+ while buffer_changes_rx.recv().await.is_ok() {
loop {
let mut timer = cx
.background_executor()
@@ -31,11 +31,11 @@ use git::{
UnstageAll,
};
use gpui::{
- Action, Animation, AnimationExt as _, AsyncApp, AsyncWindowContext, Axis, ClickEvent, Corner,
- DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, KeyContext,
- ListHorizontalSizingBehavior, ListSizingBehavior, MouseButton, MouseDownEvent, Point,
- PromptLevel, ScrollStrategy, Subscription, Task, Transformation, UniformListScrollHandle,
- WeakEntity, actions, anchored, deferred, percentage, uniform_list,
+ Action, AsyncApp, AsyncWindowContext, Axis, ClickEvent, Corner, DismissEvent, Entity,
+ EventEmitter, FocusHandle, Focusable, KeyContext, ListHorizontalSizingBehavior,
+ ListSizingBehavior, MouseButton, MouseDownEvent, Point, PromptLevel, ScrollStrategy,
+ Subscription, Task, UniformListScrollHandle, WeakEntity, actions, anchored, deferred,
+ uniform_list,
};
use itertools::Itertools;
use language::{Buffer, File};
@@ -63,8 +63,8 @@ use std::{collections::HashSet, sync::Arc, time::Duration, usize};
use strum::{IntoEnumIterator, VariantNames};
use time::OffsetDateTime;
use ui::{
- Checkbox, ContextMenu, ElevationIndex, IconPosition, Label, LabelSize, PopoverMenu, Scrollbar,
- ScrollbarState, SplitButton, Tooltip, prelude::*,
+ Checkbox, CommonAnimationExt, ContextMenu, ElevationIndex, IconPosition, Label, LabelSize,
+ PopoverMenu, Scrollbar, ScrollbarState, SplitButton, Tooltip, prelude::*,
};
use util::{ResultExt, TryFutureExt, maybe};
use workspace::SERIALIZATION_THROTTLE_TIME;
@@ -388,9 +388,6 @@ pub(crate) fn commit_message_editor(
window: &mut Window,
cx: &mut Context<Editor>,
) -> Editor {
- project.update(cx, |this, cx| {
- this.mark_buffer_as_non_searchable(commit_message_buffer.read(cx).remote_id(), cx);
- });
let buffer = cx.new(|cx| MultiBuffer::singleton(commit_message_buffer, cx));
let max_lines = if in_panel { MAX_PANEL_EDITOR_LINES } else { 18 };
let mut commit_editor = Editor::new(
@@ -426,7 +423,7 @@ impl GitPanel {
let git_store = project.read(cx).git_store().clone();
let active_repository = project.read(cx).active_repository(cx);
- let git_panel = cx.new(|cx| {
+ cx.new(|cx| {
let focus_handle = cx.focus_handle();
cx.on_focus(&focus_handle, window, Self::focus_in).detach();
cx.on_focus_out(&focus_handle, window, |this, _, window, cx| {
@@ -563,9 +560,7 @@ impl GitPanel {
this.schedule_update(false, window, cx);
this
- });
-
- git_panel
+ })
}
fn hide_scrollbars(&mut self, window: &mut Window, cx: &mut Context<Self>) {
@@ -1198,14 +1193,13 @@ impl GitPanel {
window,
cx,
);
- cx.spawn(async move |this, cx| match prompt.await {
- Ok(RestoreCancel::RestoreTrackedFiles) => {
+ cx.spawn(async move |this, cx| {
+ if let Ok(RestoreCancel::RestoreTrackedFiles) = prompt.await {
this.update(cx, |this, cx| {
this.perform_checkout(entries, cx);
})
.ok();
}
- _ => {}
})
.detach();
}
@@ -1338,7 +1332,7 @@ impl GitPanel {
section.contains(status_entry, repository)
&& status_entry.staging.as_bool() != Some(goal_staged_state)
})
- .map(|status_entry| status_entry.clone())
+ .cloned()
.collect::<Vec<_>>();
(goal_staged_state, entries)
@@ -1470,7 +1464,6 @@ impl GitPanel {
.read(cx)
.as_singleton()
.unwrap()
- .clone()
}
fn toggle_staged_for_selected(
@@ -2179,7 +2172,7 @@ impl GitPanel {
let worktree = if worktrees.len() == 1 {
Task::ready(Some(worktrees.first().unwrap().clone()))
- } else if worktrees.len() == 0 {
+ } else if worktrees.is_empty() {
let result = window.prompt(
PromptLevel::Warning,
"Unable to initialize a git repository",
@@ -2762,22 +2755,22 @@ impl GitPanel {
}
}
- if conflict_entries.len() == 0 && staged_count == 1 && pending_staged_count == 0 {
+ if conflict_entries.is_empty() && staged_count == 1 && pending_staged_count == 0 {
match pending_status_for_single_staged {
Some(TargetStatus::Staged) | None => {
self.single_staged_entry = single_staged_entry;
}
_ => {}
}
- } else if conflict_entries.len() == 0 && pending_staged_count == 1 {
+ } else if conflict_entries.is_empty() && pending_staged_count == 1 {
self.single_staged_entry = last_pending_staged;
}
- if conflict_entries.len() == 0 && changed_entries.len() == 1 {
+ if conflict_entries.is_empty() && changed_entries.len() == 1 {
self.single_tracked_entry = changed_entries.first().cloned();
}
- if conflict_entries.len() > 0 {
+ if !conflict_entries.is_empty() {
self.entries.push(GitListEntry::Header(GitHeaderEntry {
header: Section::Conflict,
}));
@@ -2785,7 +2778,7 @@ impl GitPanel {
.extend(conflict_entries.into_iter().map(GitListEntry::Status));
}
- if changed_entries.len() > 0 {
+ if !changed_entries.is_empty() {
if !sort_by_path {
self.entries.push(GitListEntry::Header(GitHeaderEntry {
header: Section::Tracked,
@@ -2794,7 +2787,7 @@ impl GitPanel {
self.entries
.extend(changed_entries.into_iter().map(GitListEntry::Status));
}
- if new_entries.len() > 0 {
+ if !new_entries.is_empty() {
self.entries.push(GitListEntry::Header(GitHeaderEntry {
header: Section::New,
}));
@@ -2987,9 +2980,7 @@ impl GitPanel {
let status_toast = StatusToast::new(message, cx, move |this, _cx| {
use remote_output::SuccessStyle::*;
match style {
- Toast { .. } => {
- this.icon(ToastIcon::new(IconName::GitBranchAlt).color(Color::Muted))
- }
+ Toast => this.icon(ToastIcon::new(IconName::GitBranchAlt).color(Color::Muted)),
ToastWithLog { output } => this
.icon(ToastIcon::new(IconName::GitBranchAlt).color(Color::Muted))
.action("View Log", move |window, cx| {
@@ -3094,13 +3085,7 @@ impl GitPanel {
Icon::new(IconName::ArrowCircle)
.size(IconSize::XSmall)
.color(Color::Info)
- .with_animation(
- "arrow-circle",
- Animation::new(Duration::from_secs(2)).repeat(),
- |icon, delta| {
- icon.transform(Transformation::rotate(percentage(delta)))
- },
- ),
+ .with_rotate_animation(2),
)
.child(
Label::new("Generating Commit...")
@@ -3210,7 +3195,7 @@ impl GitPanel {
Some(ContextMenu::build(window, cx, |context_menu, _, _| {
context_menu
.when_some(keybinding_target.clone(), |el, keybinding_target| {
- el.context(keybinding_target.clone())
+ el.context(keybinding_target)
})
.when(has_previous_commit, |this| {
this.toggleable_entry(
@@ -3390,7 +3375,7 @@ impl GitPanel {
let enable_coauthors = self.render_co_authors(cx);
let editor_focus_handle = self.commit_editor.focus_handle(cx);
- let expand_tooltip_focus_handle = editor_focus_handle.clone();
+ let expand_tooltip_focus_handle = editor_focus_handle;
let branch = active_repository.read(cx).branch.clone();
let head_commit = active_repository.read(cx).head_commit.clone();
@@ -3419,7 +3404,7 @@ impl GitPanel {
display_name,
branch,
head_commit,
- Some(git_panel.clone()),
+ Some(git_panel),
))
.child(
panel_editor_container(window, cx)
@@ -3570,7 +3555,7 @@ impl GitPanel {
}),
self.render_git_commit_menu(
ElementId::Name(format!("split-button-right-{}", title).into()),
- Some(commit_tooltip_focus_handle.clone()),
+ Some(commit_tooltip_focus_handle),
cx,
)
.into_any_element(),
@@ -3636,7 +3621,7 @@ impl GitPanel {
CommitView::open(
commit.clone(),
repo.clone(),
- workspace.clone().clone(),
+ workspace.clone(),
window,
cx,
);
@@ -4344,7 +4329,7 @@ impl GitPanel {
}
})
.child(
- self.entry_label(display_name.clone(), label_color)
+ self.entry_label(display_name, label_color)
.when(status.is_deleted(), |this| this.strikethrough()),
),
)
@@ -4482,7 +4467,7 @@ fn current_language_model(cx: &Context<'_, GitPanel>) -> Option<Arc<dyn Language
impl Render for GitPanel {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let project = self.project.read(cx);
- let has_entries = self.entries.len() > 0;
+ let has_entries = !self.entries.is_empty();
let room = self
.workspace
.upgrade()
@@ -4693,7 +4678,7 @@ impl GitPanelMessageTooltip {
author_email: details.author_email.clone(),
commit_time: OffsetDateTime::from_unix_timestamp(details.commit_timestamp)?,
message: Some(ParsedCommitMessage {
- message: details.message.clone(),
+ message: details.message,
..Default::default()
}),
};
@@ -4826,7 +4811,7 @@ impl RenderOnce for PanelRepoFooter {
};
let truncated_branch_name = if branch_actual_len <= branch_display_len {
- branch_name.to_string()
+ branch_name
} else {
util::truncate_and_trailoff(branch_name.trim_ascii(), branch_display_len)
};
@@ -4839,7 +4824,7 @@ impl RenderOnce for PanelRepoFooter {
let repo_selector = PopoverMenu::new("repository-switcher")
.menu({
- let project = project.clone();
+ let project = project;
move |window, cx| {
let project = project.clone()?;
Some(cx.new(|cx| RepositorySelector::new(project, rems(16.), window, cx)))
@@ -5010,10 +4995,7 @@ impl Component for PanelRepoFooter {
div()
.w(example_width)
.overflow_hidden()
- .child(PanelRepoFooter::new_preview(
- active_repository(1).clone(),
- None,
- ))
+ .child(PanelRepoFooter::new_preview(active_repository(1), None))
.into_any_element(),
),
single_example(
@@ -5022,7 +5004,7 @@ impl Component for PanelRepoFooter {
.w(example_width)
.overflow_hidden()
.child(PanelRepoFooter::new_preview(
- active_repository(2).clone(),
+ active_repository(2),
Some(branch(unknown_upstream)),
))
.into_any_element(),
@@ -5033,7 +5015,7 @@ impl Component for PanelRepoFooter {
.w(example_width)
.overflow_hidden()
.child(PanelRepoFooter::new_preview(
- active_repository(3).clone(),
+ active_repository(3),
Some(branch(no_remote_upstream)),
))
.into_any_element(),
@@ -5044,7 +5026,7 @@ impl Component for PanelRepoFooter {
.w(example_width)
.overflow_hidden()
.child(PanelRepoFooter::new_preview(
- active_repository(4).clone(),
+ active_repository(4),
Some(branch(not_ahead_or_behind_upstream)),
))
.into_any_element(),
@@ -5055,7 +5037,7 @@ impl Component for PanelRepoFooter {
.w(example_width)
.overflow_hidden()
.child(PanelRepoFooter::new_preview(
- active_repository(5).clone(),
+ active_repository(5),
Some(branch(behind_upstream)),
))
.into_any_element(),
@@ -5066,7 +5048,7 @@ impl Component for PanelRepoFooter {
.w(example_width)
.overflow_hidden()
.child(PanelRepoFooter::new_preview(
- active_repository(6).clone(),
+ active_repository(6),
Some(branch(ahead_of_upstream)),
))
.into_any_element(),
@@ -5077,7 +5059,7 @@ impl Component for PanelRepoFooter {
.w(example_width)
.overflow_hidden()
.child(PanelRepoFooter::new_preview(
- active_repository(7).clone(),
+ active_repository(7),
Some(branch(ahead_and_behind_upstream)),
))
.into_any_element(),
@@ -2,7 +2,7 @@ use editor::ShowScrollbar;
use gpui::Pixels;
use schemars::JsonSchema;
use serde_derive::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
use workspace::dock::DockPosition;
#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
@@ -36,7 +36,8 @@ pub enum StatusStyle {
LabelColor,
}
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)]
+#[settings_key(key = "git_panel")]
pub struct GitPanelSettingsContent {
/// Whether to show the panel button in the status bar.
///
@@ -90,8 +91,6 @@ pub struct GitPanelSettings {
}
impl Settings for GitPanelSettings {
- const KEY: Option<&'static str> = Some("git_panel");
-
type FileContent = GitPanelSettingsContent;
fn load(
@@ -245,12 +245,12 @@ fn render_remote_button(
}
(0, 0) => None,
(ahead, 0) => Some(remote_button::render_push_button(
- keybinding_target.clone(),
+ keybinding_target,
id,
ahead,
)),
(ahead, behind) => Some(remote_button::render_pull_button(
- keybinding_target.clone(),
+ keybinding_target,
id,
ahead,
behind,
@@ -425,16 +425,9 @@ mod remote_button {
let command = command.into();
if let Some(handle) = focus_handle {
- Tooltip::with_meta_in(
- label.clone(),
- Some(action),
- command.clone(),
- &handle,
- window,
- cx,
- )
+ Tooltip::with_meta_in(label, Some(action), command, &handle, window, cx)
} else {
- Tooltip::with_meta(label.clone(), Some(action), command.clone(), window, cx)
+ Tooltip::with_meta(label, Some(action), command, window, cx)
}
}
@@ -457,7 +450,7 @@ mod remote_button {
Some(ContextMenu::build(window, cx, |context_menu, _, _| {
context_menu
.when_some(keybinding_target.clone(), |el, keybinding_target| {
- el.context(keybinding_target.clone())
+ el.context(keybinding_target)
})
.action("Fetch", git::Fetch.boxed_clone())
.action("Fetch From", git::FetchFrom.boxed_clone())
@@ -10,6 +10,7 @@ use collections::HashSet;
use editor::{
Editor, EditorEvent, SelectionEffects,
actions::{GoToHunk, GoToPreviousHunk},
+ multibuffer_context_lines,
scroll::Autoscroll,
};
use futures::StreamExt;
@@ -242,7 +243,7 @@ impl ProjectDiff {
TRACKED_NAMESPACE
};
- let path_key = PathKey::namespaced(namespace, entry.repo_path.0.clone());
+ let path_key = PathKey::namespaced(namespace, entry.repo_path.0);
self.move_to_path(path_key, window, cx)
}
@@ -346,22 +347,19 @@ impl ProjectDiff {
window: &mut Window,
cx: &mut Context<Self>,
) {
- match event {
- EditorEvent::SelectionsChanged { local: true } => {
- let Some(project_path) = self.active_path(cx) else {
- return;
- };
- self.workspace
- .update(cx, |workspace, cx| {
- if let Some(git_panel) = workspace.panel::<GitPanel>(cx) {
- git_panel.update(cx, |git_panel, cx| {
- git_panel.select_entry_by_path(project_path, window, cx)
- })
- }
- })
- .ok();
- }
- _ => {}
+ if let EditorEvent::SelectionsChanged { local: true } = event {
+ let Some(project_path) = self.active_path(cx) else {
+ return;
+ };
+ self.workspace
+ .update(cx, |workspace, cx| {
+ if let Some(git_panel) = workspace.panel::<GitPanel>(cx) {
+ git_panel.update(cx, |git_panel, cx| {
+ git_panel.select_entry_by_path(project_path, window, cx)
+ })
+ }
+ })
+ .ok();
}
if editor.focus_handle(cx).contains_focused(window, cx)
&& self.multibuffer.read(cx).is_empty()
@@ -451,10 +449,10 @@ impl ProjectDiff {
let diff = diff.read(cx);
let diff_hunk_ranges = diff
.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx)
- .map(|diff_hunk| diff_hunk.buffer_range.clone());
+ .map(|diff_hunk| diff_hunk.buffer_range);
let conflicts = conflict_addon
.conflict_set(snapshot.remote_id())
- .map(|conflict_set| conflict_set.read(cx).snapshot().conflicts.clone())
+ .map(|conflict_set| conflict_set.read(cx).snapshot().conflicts)
.unwrap_or_default();
let conflicts = conflicts.iter().map(|conflict| conflict.range.clone());
@@ -468,7 +466,7 @@ impl ProjectDiff {
path_key.clone(),
buffer,
excerpt_ranges,
- editor::DEFAULT_MULTIBUFFER_CONTEXT,
+ multibuffer_context_lines(cx),
cx,
);
(was_empty, is_newly_added)
@@ -513,7 +511,7 @@ impl ProjectDiff {
mut recv: postage::watch::Receiver<()>,
cx: &mut AsyncWindowContext,
) -> Result<()> {
- while let Some(_) = recv.next().await {
+ while (recv.next().await).is_some() {
let buffers_to_load = this.update(cx, |this, cx| this.load_buffers(cx))?;
for buffer_to_load in buffers_to_load {
if let Some(buffer) = buffer_to_load.await.log_err() {
@@ -740,7 +738,7 @@ impl Render for ProjectDiff {
} else {
None
};
- let keybinding_focus_handle = self.focus_handle(cx).clone();
+ let keybinding_focus_handle = self.focus_handle(cx);
el.child(
v_flex()
.gap_1()
@@ -1073,8 +1071,7 @@ pub struct ProjectDiffEmptyState {
impl RenderOnce for ProjectDiffEmptyState {
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
let status_against_remote = |ahead_by: usize, behind_by: usize| -> bool {
- match self.current_branch {
- Some(Branch {
+ matches!(self.current_branch, Some(Branch {
upstream:
Some(Upstream {
tracking:
@@ -1084,9 +1081,7 @@ impl RenderOnce for ProjectDiffEmptyState {
..
}),
..
- }) if (ahead > 0) == (ahead_by > 0) && (behind > 0) == (behind_by > 0) => true,
- _ => false,
- }
+ }) if (ahead > 0) == (ahead_by > 0) && (behind > 0) == (behind_by > 0))
};
let change_count = |current_branch: &Branch| -> (usize, usize) {
@@ -48,7 +48,7 @@ impl TextDiffView {
let selection_data = source_editor.update(cx, |editor, cx| {
let multibuffer = editor.buffer().read(cx);
- let source_buffer = multibuffer.as_singleton()?.clone();
+ let source_buffer = multibuffer.as_singleton()?;
let selections = editor.selections.all::<Point>(cx);
let buffer_snapshot = source_buffer.read(cx);
let first_selection = selections.first()?;
@@ -207,7 +207,7 @@ impl TextDiffView {
path: Some(format!("Clipboard ↔ {selection_location_path}").into()),
buffer_changes_tx,
_recalculate_diff_task: cx.spawn(async move |_, cx| {
- while let Ok(_) = buffer_changes_rx.recv().await {
+ while buffer_changes_rx.recv().await.is_ok() {
loop {
let mut timer = cx
.background_executor()
@@ -259,7 +259,7 @@ async fn update_diff_buffer(
let source_buffer_snapshot = source_buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
let base_buffer_snapshot = clipboard_buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
- let base_text = base_buffer_snapshot.text().to_string();
+ let base_text = base_buffer_snapshot.text();
let diff_snapshot = cx
.update(|cx| {
@@ -2,7 +2,7 @@ use editor::{Editor, EditorSettings, MultiBufferSnapshot};
use gpui::{App, Entity, FocusHandle, Focusable, Subscription, Task, WeakEntity};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
use std::{fmt::Write, num::NonZeroU32, time::Duration};
use text::{Point, Selection};
use ui::{
@@ -106,7 +106,7 @@ impl CursorPosition {
cursor_position.selected_count.selections = editor.selections.count();
match editor.mode() {
editor::EditorMode::AutoHeight { .. }
- | editor::EditorMode::SingleLine { .. }
+ | editor::EditorMode::SingleLine
| editor::EditorMode::Minimap { .. } => {
cursor_position.position = None;
cursor_position.context = None;
@@ -301,14 +301,13 @@ pub(crate) enum LineIndicatorFormat {
Long,
}
-#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize)]
+#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize, SettingsUi, SettingsKey)]
#[serde(transparent)]
+#[settings_key(key = "line_indicator_format")]
pub(crate) struct LineIndicatorFormatContent(LineIndicatorFormat);
impl Settings for LineIndicatorFormat {
- const KEY: Option<&'static str> = Some("line_indicator_format");
-
- type FileContent = Option<LineIndicatorFormatContent>;
+ type FileContent = LineIndicatorFormatContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> anyhow::Result<Self> {
let format = [
@@ -317,8 +316,8 @@ impl Settings for LineIndicatorFormat {
sources.user,
]
.into_iter()
- .find_map(|value| value.copied().flatten())
- .unwrap_or(sources.default.ok_or_else(Self::missing_default)?);
+ .find_map(|value| value.copied())
+ .unwrap_or(*sources.default);
Ok(format.0)
}
@@ -157,7 +157,7 @@ impl GoToLine {
self.prev_scroll_position.take();
cx.emit(DismissEvent)
}
- editor::EditorEvent::BufferEdited { .. } => self.highlight_current_line(cx),
+ editor::EditorEvent::BufferEdited => self.highlight_current_line(cx),
_ => {}
}
}
@@ -712,7 +712,7 @@ mod tests {
) -> Entity<GoToLine> {
cx.dispatch_action(editor::actions::ToggleGoToLine);
workspace.update(cx, |workspace, cx| {
- workspace.active_modal::<GoToLine>(cx).unwrap().clone()
+ workspace.active_modal::<GoToLine>(cx).unwrap()
})
}
@@ -13,6 +13,7 @@ pub async fn stream_generate_content(
api_key: &str,
mut request: GenerateContentRequest,
) -> Result<BoxStream<'static, Result<GenerateContentResponse>>> {
+ let api_key = api_key.trim();
validate_generate_content_request(&request)?;
// The `model` field is emptied as it is provided as a path parameter.
@@ -12,13 +12,13 @@ license = "Apache-2.0"
workspace = true
[features]
-default = ["http_client", "font-kit", "wayland", "x11", "windows-manifest"]
+default = ["font-kit", "wayland", "x11", "windows-manifest"]
test-support = [
"leak-detection",
"collections/test-support",
"rand",
"util/test-support",
- "http_client?/test-support",
+ "http_client/test-support",
"wayland",
"x11",
]
@@ -91,7 +91,7 @@ derive_more.workspace = true
etagere = "0.2"
futures.workspace = true
gpui_macros.workspace = true
-http_client = { optional = true, workspace = true }
+http_client.workspace = true
image.workspace = true
inventory.workspace = true
itertools.workspace = true
@@ -23,7 +23,7 @@ On macOS, GPUI uses Metal for rendering. In order to use Metal, you need to do t
- Install [Xcode](https://apps.apple.com/us/app/xcode/id497799835?mt=12) from the macOS App Store, or from the [Apple Developer](https://developer.apple.com/download/all/) website. Note this requires a developer account.
-> Ensure you launch XCode after installing, and install the macOS components, which is the default option.
+> Ensure you launch Xcode after installing, and install the macOS components, which is the default option. If you are on macOS 26 (Tahoe) you will need to use `--features gpui/runtime_shaders` or add the feature in the root `Cargo.toml`
- Install [Xcode command line tools](https://developer.apple.com/xcode/resources/)
@@ -38,58 +38,58 @@ pub struct Quote {
impl Quote {
pub fn random() -> Self {
use rand::Rng;
- let mut rng = rand::thread_rng();
+ let mut rng = rand::rng();
// simulate a base price in a realistic range
- let prev_close = rng.gen_range(100.0..200.0);
- let change = rng.gen_range(-5.0..5.0);
+ let prev_close = rng.random_range(100.0..200.0);
+ let change = rng.random_range(-5.0..5.0);
let last_done = prev_close + change;
- let open = prev_close + rng.gen_range(-3.0..3.0);
- let high = (prev_close + rng.gen_range::<f64, _>(0.0..10.0)).max(open);
- let low = (prev_close - rng.gen_range::<f64, _>(0.0..10.0)).min(open);
- let timestamp = Duration::from_secs(rng.gen_range(0..86400));
- let volume = rng.gen_range(1_000_000..100_000_000);
+ let open = prev_close + rng.random_range(-3.0..3.0);
+ let high = (prev_close + rng.random_range::<f64, _>(0.0..10.0)).max(open);
+ let low = (prev_close - rng.random_range::<f64, _>(0.0..10.0)).min(open);
+ let timestamp = Duration::from_secs(rng.random_range(0..86400));
+ let volume = rng.random_range(1_000_000..100_000_000);
let turnover = last_done * volume as f64;
let symbol = {
let mut ticker = String::new();
- if rng.gen_bool(0.5) {
+ if rng.random_bool(0.5) {
ticker.push_str(&format!(
"{:03}.{}",
- rng.gen_range(100..1000),
- rng.gen_range(0..10)
+ rng.random_range(100..1000),
+ rng.random_range(0..10)
));
} else {
ticker.push_str(&format!(
"{}{}",
- rng.gen_range('A'..='Z'),
- rng.gen_range('A'..='Z')
+ rng.random_range('A'..='Z'),
+ rng.random_range('A'..='Z')
));
}
- ticker.push_str(&format!(".{}", rng.gen_range('A'..='Z')));
+ ticker.push_str(&format!(".{}", rng.random_range('A'..='Z')));
ticker
};
let name = format!(
"{} {} - #{}",
symbol,
- rng.gen_range(1..100),
- rng.gen_range(10000..100000)
+ rng.random_range(1..100),
+ rng.random_range(10000..100000)
);
- let ttm = rng.gen_range(0.0..10.0);
- let market_cap = rng.gen_range(1_000_000.0..10_000_000.0);
- let float_cap = market_cap + rng.gen_range(1_000.0..10_000.0);
- let shares = rng.gen_range(100.0..1000.0);
+ let ttm = rng.random_range(0.0..10.0);
+ let market_cap = rng.random_range(1_000_000.0..10_000_000.0);
+ let float_cap = market_cap + rng.random_range(1_000.0..10_000.0);
+ let shares = rng.random_range(100.0..1000.0);
let pb = market_cap / shares;
let pe = market_cap / shares;
let eps = market_cap / shares;
- let dividend = rng.gen_range(0.0..10.0);
- let dividend_yield = rng.gen_range(0.0..10.0);
- let dividend_per_share = rng.gen_range(0.0..10.0);
+ let dividend = rng.random_range(0.0..10.0);
+ let dividend_yield = rng.random_range(0.0..10.0);
+ let dividend_per_share = rng.random_range(0.0..10.0);
let dividend_date = SharedString::new(format!(
"{}-{}-{}",
- rng.gen_range(2000..2023),
- rng.gen_range(1..12),
- rng.gen_range(1..28)
+ rng.random_range(2000..2023),
+ rng.random_range(1..12),
+ rng.random_range(1..28)
));
- let dividend_payment = rng.gen_range(0.0..10.0);
+ let dividend_payment = rng.random_range(0.0..10.0);
Self {
name: name.into(),
@@ -75,65 +75,71 @@ impl Render for ImageShowcase {
fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
div()
.id("main")
+ .bg(gpui::white())
.overflow_y_scroll()
.p_5()
.size_full()
- .flex()
- .flex_col()
- .justify_center()
- .items_center()
- .gap_8()
- .bg(rgb(0xffffff))
.child(
div()
.flex()
- .flex_row()
+ .flex_col()
.justify_center()
.items_center()
.gap_8()
- .child(ImageContainer::new(
- "Image loaded from a local file",
- self.local_resource.clone(),
- ))
- .child(ImageContainer::new(
- "Image loaded from a remote resource",
- self.remote_resource.clone(),
+ .child(img(
+ "https://github.com/zed-industries/zed/actions/workflows/ci.yml/badge.svg",
))
- .child(ImageContainer::new(
- "Image loaded from an asset",
- self.asset_resource.clone(),
- )),
- )
- .child(
- div()
- .flex()
- .flex_row()
- .gap_8()
.child(
div()
- .flex_col()
- .child("Auto Width")
- .child(img("https://picsum.photos/800/400").h(px(180.))),
+ .flex()
+ .flex_row()
+ .justify_center()
+ .items_center()
+ .gap_8()
+ .child(ImageContainer::new(
+ "Image loaded from a local file",
+ self.local_resource.clone(),
+ ))
+ .child(ImageContainer::new(
+ "Image loaded from a remote resource",
+ self.remote_resource.clone(),
+ ))
+ .child(ImageContainer::new(
+ "Image loaded from an asset",
+ self.asset_resource.clone(),
+ )),
+ )
+ .child(
+ div()
+ .flex()
+ .flex_row()
+ .gap_8()
+ .child(
+ div()
+ .flex_col()
+ .child("Auto Width")
+ .child(img("https://picsum.photos/800/400").h(px(180.))),
+ )
+ .child(
+ div()
+ .flex_col()
+ .child("Auto Height")
+ .child(img("https://picsum.photos/800/400").w(px(180.))),
+ ),
)
.child(
div()
+ .flex()
.flex_col()
- .child("Auto Height")
- .child(img("https://picsum.photos/800/400").w(px(180.))),
+ .justify_center()
+ .items_center()
+ .w_full()
+ .border_1()
+ .border_color(rgb(0xC0C0C0))
+ .child("image with max width 100%")
+ .child(img("https://picsum.photos/800/400").max_w_full()),
),
)
- .child(
- div()
- .flex()
- .flex_col()
- .justify_center()
- .items_center()
- .w_full()
- .border_1()
- .border_color(rgb(0xC0C0C0))
- .child("image with max width 100%")
- .child(img("https://picsum.photos/800/400").max_w_full()),
- )
}
}
@@ -446,7 +446,7 @@ impl Element for TextElement {
let (display_text, text_color) = if content.is_empty() {
(input.placeholder.clone(), hsla(0., 0., 0., 0.2))
} else {
- (content.clone(), style.color)
+ (content, style.color)
};
let run = TextRun {
@@ -474,7 +474,7 @@ impl Element for TextElement {
},
TextRun {
len: display_text.len() - marked_range.end,
- ..run.clone()
+ ..run
},
]
.into_iter()
@@ -155,7 +155,7 @@ impl RenderOnce for Specimen {
.text_size(px(font_size * scale))
.line_height(relative(line_height))
.p(px(10.0))
- .child(self.string.clone())
+ .child(self.string)
}
}
@@ -152,6 +152,36 @@ impl Render for WindowDemo {
)
.unwrap();
}))
+ .child(button("Unresizable", move |_, cx| {
+ cx.open_window(
+ WindowOptions {
+ is_resizable: false,
+ window_bounds: Some(window_bounds),
+ ..Default::default()
+ },
+ |_, cx| {
+ cx.new(|_| SubWindow {
+ custom_titlebar: false,
+ })
+ },
+ )
+ .unwrap();
+ }))
+ .child(button("Unminimizable", move |_, cx| {
+ cx.open_window(
+ WindowOptions {
+ is_minimizable: false,
+ window_bounds: Some(window_bounds),
+ ..Default::default()
+ },
+ |_, cx| {
+ cx.new(|_| SubWindow {
+ custom_titlebar: false,
+ })
+ },
+ )
+ .unwrap();
+ }))
.child(button("Hide Application", |window, cx| {
cx.hide();
@@ -62,6 +62,8 @@ fn build_window_options(display_id: DisplayId, bounds: Bounds<Pixels>) -> Window
app_id: None,
window_min_size: None,
window_decorations: None,
+ tabbing_identifier: None,
+ ..Default::default()
}
}
@@ -73,18 +73,18 @@ macro_rules! actions {
/// - `name = "ActionName"` overrides the action's name. This must not contain `::`.
///
/// - `no_json` causes the `build` method to always error and `action_json_schema` to return `None`,
-/// and allows actions not implement `serde::Serialize` and `schemars::JsonSchema`.
+/// and allows actions not implement `serde::Serialize` and `schemars::JsonSchema`.
///
/// - `no_register` skips registering the action. This is useful for implementing the `Action` trait
-/// while not supporting invocation by name or JSON deserialization.
+/// while not supporting invocation by name or JSON deserialization.
///
/// - `deprecated_aliases = ["editor::SomeAction"]` specifies deprecated old names for the action.
-/// These action names should *not* correspond to any actions that are registered. These old names
-/// can then still be used to refer to invoke this action. In Zed, the keymap JSON schema will
-/// accept these old names and provide warnings.
+/// These action names should *not* correspond to any actions that are registered. These old names
+/// can then still be used to refer to invoke this action. In Zed, the keymap JSON schema will
+/// accept these old names and provide warnings.
///
/// - `deprecated = "Message about why this action is deprecation"` specifies a deprecation message.
-/// In Zed, the keymap JSON schema will cause this to be displayed as a warning.
+/// In Zed, the keymap JSON schema will cause this to be displayed as a warning.
///
/// # Manual Implementation
///
@@ -7,7 +7,7 @@ use std::{
path::{Path, PathBuf},
rc::{Rc, Weak},
sync::{Arc, atomic::Ordering::SeqCst},
- time::Duration,
+ time::{Duration, Instant},
};
use anyhow::{Context as _, Result, anyhow};
@@ -17,6 +17,7 @@ use futures::{
channel::oneshot,
future::{LocalBoxFuture, Shared},
};
+use itertools::Itertools;
use parking_lot::RwLock;
use slotmap::SlotMap;
@@ -37,10 +38,10 @@ use crate::{
AssetSource, BackgroundExecutor, Bounds, ClipboardItem, CursorStyle, DispatchPhase, DisplayId,
EventEmitter, FocusHandle, FocusMap, ForegroundExecutor, Global, KeyBinding, KeyContext,
Keymap, Keystroke, LayoutId, Menu, MenuItem, OwnedMenu, PathPromptOptions, Pixels, Platform,
- PlatformDisplay, PlatformKeyboardLayout, Point, PromptBuilder, PromptButton, PromptHandle,
- PromptLevel, Render, RenderImage, RenderablePromptHandle, Reservation, ScreenCaptureSource,
- SubscriberSet, Subscription, SvgRenderer, Task, TextSystem, Window, WindowAppearance,
- WindowHandle, WindowId, WindowInvalidator,
+ PlatformDisplay, PlatformKeyboardLayout, PlatformKeyboardMapper, Point, PromptBuilder,
+ PromptButton, PromptHandle, PromptLevel, Render, RenderImage, RenderablePromptHandle,
+ Reservation, ScreenCaptureSource, SharedString, SubscriberSet, Subscription, SvgRenderer, Task,
+ TextSystem, Window, WindowAppearance, WindowHandle, WindowId, WindowInvalidator,
colors::{Colors, GlobalColors},
current_platform, hash, init_app_menus,
};
@@ -237,6 +238,303 @@ type WindowClosedHandler = Box<dyn FnMut(&mut App)>;
type ReleaseListener = Box<dyn FnOnce(&mut dyn Any, &mut App) + 'static>;
type NewEntityListener = Box<dyn FnMut(AnyEntity, &mut Option<&mut Window>, &mut App) + 'static>;
+#[doc(hidden)]
+#[derive(Clone, PartialEq, Eq)]
+pub struct SystemWindowTab {
+ pub id: WindowId,
+ pub title: SharedString,
+ pub handle: AnyWindowHandle,
+ pub last_active_at: Instant,
+}
+
+impl SystemWindowTab {
+ /// Create a new instance of the window tab.
+ pub fn new(title: SharedString, handle: AnyWindowHandle) -> Self {
+ Self {
+ id: handle.id,
+ title,
+ handle,
+ last_active_at: Instant::now(),
+ }
+ }
+}
+
+/// A controller for managing window tabs.
+#[derive(Default)]
+pub struct SystemWindowTabController {
+ visible: Option<bool>,
+ tab_groups: FxHashMap<usize, Vec<SystemWindowTab>>,
+}
+
+impl Global for SystemWindowTabController {}
+
+impl SystemWindowTabController {
+ /// Create a new instance of the window tab controller.
+ pub fn new() -> Self {
+ Self {
+ visible: None,
+ tab_groups: FxHashMap::default(),
+ }
+ }
+
+ /// Initialize the global window tab controller.
+ pub fn init(cx: &mut App) {
+ cx.set_global(SystemWindowTabController::new());
+ }
+
+ /// Get all tab groups.
+ pub fn tab_groups(&self) -> &FxHashMap<usize, Vec<SystemWindowTab>> {
+ &self.tab_groups
+ }
+
+ /// Get the next tab group window handle.
+ pub fn get_next_tab_group_window(cx: &mut App, id: WindowId) -> Option<&AnyWindowHandle> {
+ let controller = cx.global::<SystemWindowTabController>();
+ let current_group = controller
+ .tab_groups
+ .iter()
+ .find_map(|(group, tabs)| tabs.iter().find(|tab| tab.id == id).map(|_| group));
+
+ let current_group = current_group?;
+ let mut group_ids: Vec<_> = controller.tab_groups.keys().collect();
+ let idx = group_ids.iter().position(|g| *g == current_group)?;
+ let next_idx = (idx + 1) % group_ids.len();
+
+ controller
+ .tab_groups
+ .get(group_ids[next_idx])
+ .and_then(|tabs| {
+ tabs.iter()
+ .max_by_key(|tab| tab.last_active_at)
+ .or_else(|| tabs.first())
+ .map(|tab| &tab.handle)
+ })
+ }
+
+ /// Get the previous tab group window handle.
+ pub fn get_prev_tab_group_window(cx: &mut App, id: WindowId) -> Option<&AnyWindowHandle> {
+ let controller = cx.global::<SystemWindowTabController>();
+ let current_group = controller
+ .tab_groups
+ .iter()
+ .find_map(|(group, tabs)| tabs.iter().find(|tab| tab.id == id).map(|_| group));
+
+ let current_group = current_group?;
+ let mut group_ids: Vec<_> = controller.tab_groups.keys().collect();
+ let idx = group_ids.iter().position(|g| *g == current_group)?;
+ let prev_idx = if idx == 0 {
+ group_ids.len() - 1
+ } else {
+ idx - 1
+ };
+
+ controller
+ .tab_groups
+ .get(group_ids[prev_idx])
+ .and_then(|tabs| {
+ tabs.iter()
+ .max_by_key(|tab| tab.last_active_at)
+ .or_else(|| tabs.first())
+ .map(|tab| &tab.handle)
+ })
+ }
+
+ /// Get all tabs in the same window.
+ pub fn tabs(&self, id: WindowId) -> Option<&Vec<SystemWindowTab>> {
+ let tab_group = self
+ .tab_groups
+ .iter()
+ .find_map(|(group, tabs)| tabs.iter().find(|tab| tab.id == id).map(|_| *group));
+
+ if let Some(tab_group) = tab_group {
+ self.tab_groups.get(&tab_group)
+ } else {
+ None
+ }
+ }
+
+ /// Initialize the visibility of the system window tab controller.
+ pub fn init_visible(cx: &mut App, visible: bool) {
+ let mut controller = cx.global_mut::<SystemWindowTabController>();
+ if controller.visible.is_none() {
+ controller.visible = Some(visible);
+ }
+ }
+
+ /// Get the visibility of the system window tab controller.
+ pub fn is_visible(&self) -> bool {
+ self.visible.unwrap_or(false)
+ }
+
+ /// Set the visibility of the system window tab controller.
+ pub fn set_visible(cx: &mut App, visible: bool) {
+ let mut controller = cx.global_mut::<SystemWindowTabController>();
+ controller.visible = Some(visible);
+ }
+
+ /// Update the last active of a window.
+ pub fn update_last_active(cx: &mut App, id: WindowId) {
+ let mut controller = cx.global_mut::<SystemWindowTabController>();
+ for windows in controller.tab_groups.values_mut() {
+ for tab in windows.iter_mut() {
+ if tab.id == id {
+ tab.last_active_at = Instant::now();
+ }
+ }
+ }
+ }
+
+ /// Update the position of a tab within its group.
+ pub fn update_tab_position(cx: &mut App, id: WindowId, ix: usize) {
+ let mut controller = cx.global_mut::<SystemWindowTabController>();
+ for (_, windows) in controller.tab_groups.iter_mut() {
+ if let Some(current_pos) = windows.iter().position(|tab| tab.id == id) {
+ if ix < windows.len() && current_pos != ix {
+ let window_tab = windows.remove(current_pos);
+ windows.insert(ix, window_tab);
+ }
+ break;
+ }
+ }
+ }
+
+ /// Update the title of a tab.
+ pub fn update_tab_title(cx: &mut App, id: WindowId, title: SharedString) {
+ let controller = cx.global::<SystemWindowTabController>();
+ let tab = controller
+ .tab_groups
+ .values()
+ .flat_map(|windows| windows.iter())
+ .find(|tab| tab.id == id);
+
+ if tab.map_or(true, |t| t.title == title) {
+ return;
+ }
+
+ let mut controller = cx.global_mut::<SystemWindowTabController>();
+ for windows in controller.tab_groups.values_mut() {
+ for tab in windows.iter_mut() {
+ if tab.id == id {
+ tab.title = title.clone();
+ }
+ }
+ }
+ }
+
+ /// Insert a tab into a tab group.
+ pub fn add_tab(cx: &mut App, id: WindowId, tabs: Vec<SystemWindowTab>) {
+ let mut controller = cx.global_mut::<SystemWindowTabController>();
+ let Some(tab) = tabs.clone().into_iter().find(|tab| tab.id == id) else {
+ return;
+ };
+
+ let mut expected_tab_ids: Vec<_> = tabs
+ .iter()
+ .filter(|tab| tab.id != id)
+ .map(|tab| tab.id)
+ .sorted()
+ .collect();
+
+ let mut tab_group_id = None;
+ for (group_id, group_tabs) in &controller.tab_groups {
+ let tab_ids: Vec<_> = group_tabs.iter().map(|tab| tab.id).sorted().collect();
+ if tab_ids == expected_tab_ids {
+ tab_group_id = Some(*group_id);
+ break;
+ }
+ }
+
+ if let Some(tab_group_id) = tab_group_id {
+ if let Some(tabs) = controller.tab_groups.get_mut(&tab_group_id) {
+ tabs.push(tab);
+ }
+ } else {
+ let new_group_id = controller.tab_groups.len();
+ controller.tab_groups.insert(new_group_id, tabs);
+ }
+ }
+
+ /// Remove a tab from a tab group.
+ pub fn remove_tab(cx: &mut App, id: WindowId) -> Option<SystemWindowTab> {
+ let mut controller = cx.global_mut::<SystemWindowTabController>();
+ let mut removed_tab = None;
+
+ controller.tab_groups.retain(|_, tabs| {
+ if let Some(pos) = tabs.iter().position(|tab| tab.id == id) {
+ removed_tab = Some(tabs.remove(pos));
+ }
+ !tabs.is_empty()
+ });
+
+ removed_tab
+ }
+
+ /// Move a tab to a new tab group.
+ pub fn move_tab_to_new_window(cx: &mut App, id: WindowId) {
+ let mut removed_tab = Self::remove_tab(cx, id);
+ let mut controller = cx.global_mut::<SystemWindowTabController>();
+
+ if let Some(tab) = removed_tab {
+ let new_group_id = controller.tab_groups.keys().max().map_or(0, |k| k + 1);
+ controller.tab_groups.insert(new_group_id, vec![tab]);
+ }
+ }
+
+ /// Merge all tab groups into a single group.
+ pub fn merge_all_windows(cx: &mut App, id: WindowId) {
+ let mut controller = cx.global_mut::<SystemWindowTabController>();
+ let Some(initial_tabs) = controller.tabs(id) else {
+ return;
+ };
+
+ let mut all_tabs = initial_tabs.clone();
+ for tabs in controller.tab_groups.values() {
+ all_tabs.extend(
+ tabs.iter()
+ .filter(|tab| !initial_tabs.contains(tab))
+ .cloned(),
+ );
+ }
+
+ controller.tab_groups.clear();
+ controller.tab_groups.insert(0, all_tabs);
+ }
+
+ /// Selects the next tab in the tab group in the trailing direction.
+ pub fn select_next_tab(cx: &mut App, id: WindowId) {
+ let mut controller = cx.global_mut::<SystemWindowTabController>();
+ let Some(tabs) = controller.tabs(id) else {
+ return;
+ };
+
+ let current_index = tabs.iter().position(|tab| tab.id == id).unwrap();
+ let next_index = (current_index + 1) % tabs.len();
+
+ let _ = &tabs[next_index].handle.update(cx, |_, window, _| {
+ window.activate_window();
+ });
+ }
+
+ /// Selects the previous tab in the tab group in the leading direction.
+ pub fn select_previous_tab(cx: &mut App, id: WindowId) {
+ let mut controller = cx.global_mut::<SystemWindowTabController>();
+ let Some(tabs) = controller.tabs(id) else {
+ return;
+ };
+
+ let current_index = tabs.iter().position(|tab| tab.id == id).unwrap();
+ let previous_index = if current_index == 0 {
+ tabs.len() - 1
+ } else {
+ current_index - 1
+ };
+
+ let _ = &tabs[previous_index].handle.update(cx, |_, window, _| {
+ window.activate_window();
+ });
+ }
+}
+
/// Contains the state of the full application, and passed as a reference to a variety of callbacks.
/// Other [Context] derefs to this type.
/// You need a reference to an `App` to access the state of a [Entity].
@@ -263,6 +561,7 @@ pub struct App {
pub(crate) focus_handles: Arc<FocusMap>,
pub(crate) keymap: Rc<RefCell<Keymap>>,
pub(crate) keyboard_layout: Box<dyn PlatformKeyboardLayout>,
+ pub(crate) keyboard_mapper: Rc<dyn PlatformKeyboardMapper>,
pub(crate) global_action_listeners:
FxHashMap<TypeId, Vec<Rc<dyn Fn(&dyn Any, DispatchPhase, &mut Self)>>>,
pending_effects: VecDeque<Effect>,
@@ -312,6 +611,7 @@ impl App {
let text_system = Arc::new(TextSystem::new(platform.text_system()));
let entities = EntityMap::new();
let keyboard_layout = platform.keyboard_layout();
+ let keyboard_mapper = platform.keyboard_mapper();
let app = Rc::new_cyclic(|this| AppCell {
app: RefCell::new(App {
@@ -337,6 +637,7 @@ impl App {
focus_handles: Arc::new(RwLock::new(SlotMap::with_key())),
keymap: Rc::new(RefCell::new(Keymap::default())),
keyboard_layout,
+ keyboard_mapper,
global_action_listeners: FxHashMap::default(),
pending_effects: VecDeque::new(),
pending_notifications: FxHashSet::default(),
@@ -369,6 +670,7 @@ impl App {
});
init_app_menus(platform.as_ref(), &app.borrow());
+ SystemWindowTabController::init(&mut app.borrow_mut());
platform.on_keyboard_layout_change(Box::new({
let app = Rc::downgrade(&app);
@@ -376,6 +678,7 @@ impl App {
if let Some(app) = app.upgrade() {
let cx = &mut app.borrow_mut();
cx.keyboard_layout = cx.platform.keyboard_layout();
+ cx.keyboard_mapper = cx.platform.keyboard_mapper();
cx.keyboard_layout_observers
.clone()
.retain(&(), move |callback| (callback)(cx));
@@ -424,6 +727,11 @@ impl App {
self.keyboard_layout.as_ref()
}
+ /// Get the current keyboard mapper.
+ pub fn keyboard_mapper(&self) -> &Rc<dyn PlatformKeyboardMapper> {
+ &self.keyboard_mapper
+ }
+
/// Invokes a handler when the current keyboard layout changes
pub fn on_keyboard_layout_change<F>(&self, mut callback: F) -> Subscription
where
@@ -1050,12 +1358,7 @@ impl App {
F: FnOnce(AnyView, &mut Window, &mut App) -> T,
{
self.update(|cx| {
- let mut window = cx
- .windows
- .get_mut(id)
- .context("window not found")?
- .take()
- .context("window not found")?;
+ let mut window = cx.windows.get_mut(id)?.take()?;
let root_view = window.root.clone().unwrap();
@@ -1072,15 +1375,14 @@ impl App {
true
});
} else {
- cx.windows
- .get_mut(id)
- .context("window not found")?
- .replace(window);
+ cx.windows.get_mut(id)?.replace(window);
}
- Ok(result)
+ Some(result)
})
+ .context("window not found")
}
+
/// Creates an `AsyncApp`, which can be cloned and has a static lifetime
/// so it can be held across `await` points.
pub fn to_async(&self) -> AsyncApp {
@@ -1707,8 +2009,8 @@ impl App {
.unwrap_or_else(|| {
is_first = true;
let future = A::load(source.clone(), self);
- let task = self.background_executor().spawn(future).shared();
- task
+
+ self.background_executor().spawn(future).shared()
});
self.loading_assets.insert(asset_id, Box::new(task.clone()));
@@ -2005,7 +2307,7 @@ pub struct AnyDrag {
}
/// Contains state associated with a tooltip. You'll only need this struct if you're implementing
-/// tooltip behavior on a custom element. Otherwise, use [Div::tooltip].
+/// tooltip behavior on a custom element. Otherwise, use [Div::tooltip](crate::Interactivity::tooltip).
#[derive(Clone)]
pub struct AnyTooltip {
/// The view used to display the tooltip
@@ -218,7 +218,7 @@ impl AsyncApp {
Some(read(app.try_global()?, &app))
}
- /// A convenience method for [App::update_global]
+ /// A convenience method for [`App::update_global`](BorrowAppContext::update_global)
/// for updating the global state of the specified type.
pub fn update_global<G: Global, R>(
&self,
@@ -293,7 +293,7 @@ impl AsyncWindowContext {
.update(self, |_, window, cx| read(cx.global(), window, cx))
}
- /// A convenience method for [`App::update_global`].
+ /// A convenience method for [`App::update_global`](BorrowAppContext::update_global).
/// for updating the global state of the specified type.
pub fn update_global<G, R>(
&mut self,
@@ -465,7 +465,7 @@ impl VisualContext for AsyncWindowContext {
V: Focusable,
{
self.window.update(self, |_, window, cx| {
- view.read(cx).focus_handle(cx).clone().focus(window);
+ view.read(cx).focus_handle(cx).focus(window);
})
}
}
@@ -231,14 +231,15 @@ impl AnyEntity {
Self {
entity_id: id,
entity_type,
- entity_map: entity_map.clone(),
#[cfg(any(test, feature = "leak-detection"))]
handle_id: entity_map
+ .clone()
.upgrade()
.unwrap()
.write()
.leak_detector
.handle_created(id),
+ entity_map,
}
}
@@ -134,7 +134,7 @@ impl TestAppContext {
app: App::new_app(platform.clone(), asset_source, http_client),
background_executor,
foreground_executor,
- dispatcher: dispatcher.clone(),
+ dispatcher,
test_platform: platform,
text_system,
fn_name,
@@ -144,7 +144,7 @@ impl TestAppContext {
/// Create a single TestAppContext, for non-multi-client tests
pub fn single() -> Self {
- let dispatcher = TestDispatcher::new(StdRng::from_entropy());
+ let dispatcher = TestDispatcher::new(StdRng::seed_from_u64(0));
Self::build(dispatcher, None)
}
@@ -192,6 +192,7 @@ impl TestAppContext {
&self.foreground_executor
}
+ #[expect(clippy::wrong_self_convention)]
fn new<T: 'static>(&mut self, build_entity: impl FnOnce(&mut Context<T>) -> T) -> Entity<T> {
let mut cx = self.app.borrow_mut();
cx.new(build_entity)
@@ -244,7 +245,7 @@ impl TestAppContext {
)
.unwrap();
drop(cx);
- let cx = VisualTestContext::from_window(*window.deref(), self).as_mut();
+ let cx = VisualTestContext::from_window(*window.deref(), self).into_mut();
cx.run_until_parked();
cx
}
@@ -273,7 +274,7 @@ impl TestAppContext {
.unwrap();
drop(cx);
let view = window.root(self).unwrap();
- let cx = VisualTestContext::from_window(*window.deref(), self).as_mut();
+ let cx = VisualTestContext::from_window(*window.deref(), self).into_mut();
cx.run_until_parked();
// it might be nice to try and cleanup these at the end of each test.
@@ -338,7 +339,7 @@ impl TestAppContext {
/// Returns all windows open in the test.
pub fn windows(&self) -> Vec<AnyWindowHandle> {
- self.app.borrow().windows().clone()
+ self.app.borrow().windows()
}
/// Run the given task on the main thread.
@@ -618,7 +619,7 @@ impl<V> Entity<V> {
}
}),
cx.subscribe(self, {
- let mut tx = tx.clone();
+ let mut tx = tx;
move |_, _: &Evt, _| {
tx.blocking_send(()).ok();
}
@@ -882,7 +883,7 @@ impl VisualTestContext {
/// Get an &mut VisualTestContext (which is mostly what you need to pass to other methods).
/// This method internally retains the VisualTestContext until the end of the test.
- pub fn as_mut(self) -> &'static mut Self {
+ pub fn into_mut(self) -> &'static mut Self {
let ptr = Box::into_raw(Box::new(self));
// safety: on_quit will be called after the test has finished.
// the executor will ensure that all tasks related to the test have stopped.
@@ -1025,7 +1026,7 @@ impl VisualContext for VisualTestContext {
fn focus<V: crate::Focusable>(&mut self, view: &Entity<V>) -> Self::Result<()> {
self.window
.update(&mut self.cx, |_, window, cx| {
- view.read(cx).focus_handle(cx).clone().focus(window)
+ view.read(cx).focus_handle(cx).focus(window)
})
.unwrap()
}
@@ -1,8 +1,9 @@
use std::{
alloc::{self, handle_alloc_error},
cell::Cell,
+ num::NonZeroUsize,
ops::{Deref, DerefMut},
- ptr,
+ ptr::{self, NonNull},
rc::Rc,
};
@@ -30,23 +31,23 @@ impl Drop for Chunk {
fn drop(&mut self) {
unsafe {
let chunk_size = self.end.offset_from_unsigned(self.start);
- // this never fails as it succeeded during allocation
- let layout = alloc::Layout::from_size_align(chunk_size, 1).unwrap();
+ // SAFETY: This succeeded during allocation.
+ let layout = alloc::Layout::from_size_align_unchecked(chunk_size, 1);
alloc::dealloc(self.start, layout);
}
}
}
impl Chunk {
- fn new(chunk_size: usize) -> Self {
+ fn new(chunk_size: NonZeroUsize) -> Self {
unsafe {
// this only fails if chunk_size is unreasonably huge
- let layout = alloc::Layout::from_size_align(chunk_size, 1).unwrap();
+ let layout = alloc::Layout::from_size_align(chunk_size.get(), 1).unwrap();
let start = alloc::alloc(layout);
if start.is_null() {
handle_alloc_error(layout);
}
- let end = start.add(chunk_size);
+ let end = start.add(chunk_size.get());
Self {
start,
end,
@@ -55,14 +56,14 @@ impl Chunk {
}
}
- fn allocate(&mut self, layout: alloc::Layout) -> Option<*mut u8> {
+ fn allocate(&mut self, layout: alloc::Layout) -> Option<NonNull<u8>> {
unsafe {
let aligned = self.offset.add(self.offset.align_offset(layout.align()));
let next = aligned.add(layout.size());
if next <= self.end {
self.offset = next;
- Some(aligned)
+ NonNull::new(aligned)
} else {
None
}
@@ -79,7 +80,7 @@ pub struct Arena {
elements: Vec<ArenaElement>,
valid: Rc<Cell<bool>>,
current_chunk_index: usize,
- chunk_size: usize,
+ chunk_size: NonZeroUsize,
}
impl Drop for Arena {
@@ -90,7 +91,7 @@ impl Drop for Arena {
impl Arena {
pub fn new(chunk_size: usize) -> Self {
- assert!(chunk_size > 0);
+ let chunk_size = NonZeroUsize::try_from(chunk_size).unwrap();
Self {
chunks: vec![Chunk::new(chunk_size)],
elements: Vec::new(),
@@ -101,7 +102,7 @@ impl Arena {
}
pub fn capacity(&self) -> usize {
- self.chunks.len() * self.chunk_size
+ self.chunks.len() * self.chunk_size.get()
}
pub fn clear(&mut self) {
@@ -136,20 +137,20 @@ impl Arena {
let layout = alloc::Layout::new::<T>();
let mut current_chunk = &mut self.chunks[self.current_chunk_index];
let ptr = if let Some(ptr) = current_chunk.allocate(layout) {
- ptr
+ ptr.as_ptr()
} else {
self.current_chunk_index += 1;
if self.current_chunk_index >= self.chunks.len() {
self.chunks.push(Chunk::new(self.chunk_size));
assert_eq!(self.current_chunk_index, self.chunks.len() - 1);
- log::info!(
+ log::trace!(
"increased element arena capacity to {}kb",
self.capacity() / 1024,
);
}
current_chunk = &mut self.chunks[self.current_chunk_index];
if let Some(ptr) = current_chunk.allocate(layout) {
- ptr
+ ptr.as_ptr()
} else {
panic!(
"Arena chunk_size of {} is too small to allocate {} bytes",
@@ -1,4 +1,4 @@
-use crate::{DevicePixels, Result, SharedString, Size, size};
+use crate::{DevicePixels, Pixels, Result, SharedString, Size, size};
use smallvec::SmallVec;
use image::{Delay, Frame};
@@ -42,6 +42,8 @@ pub(crate) struct RenderImageParams {
pub struct RenderImage {
/// The ID associated with this image
pub id: ImageId,
+ /// The scale factor of this image on render.
+ pub(crate) scale_factor: f32,
data: SmallVec<[Frame; 1]>,
}
@@ -60,6 +62,7 @@ impl RenderImage {
Self {
id: ImageId(NEXT_ID.fetch_add(1, SeqCst)),
+ scale_factor: 1.0,
data: data.into(),
}
}
@@ -77,6 +80,12 @@ impl RenderImage {
size(width.into(), height.into())
}
+ /// Get the size of this image, in pixels for display, adjusted for the scale factor.
+ pub(crate) fn render_size(&self, frame_index: usize) -> Size<Pixels> {
+ self.size(frame_index)
+ .map(|v| (v.0 as f32 / self.scale_factor).into())
+ }
+
/// Get the delay of this frame from the previous
pub fn delay(&self, frame_index: usize) -> Delay {
self.data[frame_index].delay()
@@ -309,12 +309,12 @@ mod tests {
let mut expected_quads: Vec<(Bounds<f32>, u32)> = Vec::new();
// Insert a random number of random AABBs into the tree.
- let num_bounds = rng.gen_range(1..=max_bounds);
+ let num_bounds = rng.random_range(1..=max_bounds);
for _ in 0..num_bounds {
- let min_x: f32 = rng.gen_range(-100.0..100.0);
- let min_y: f32 = rng.gen_range(-100.0..100.0);
- let width: f32 = rng.gen_range(0.0..50.0);
- let height: f32 = rng.gen_range(0.0..50.0);
+ let min_x: f32 = rng.random_range(-100.0..100.0);
+ let min_y: f32 = rng.random_range(-100.0..100.0);
+ let width: f32 = rng.random_range(0.0..50.0);
+ let height: f32 = rng.random_range(0.0..50.0);
let bounds = Bounds {
origin: Point { x: min_x, y: min_y },
size: Size { width, height },
@@ -473,6 +473,11 @@ impl Hsla {
self.a == 0.0
}
+ /// Returns true if the HSLA color is fully opaque, false otherwise.
+ pub fn is_opaque(&self) -> bool {
+ self.a == 1.0
+ }
+
/// Blends `other` on top of `self` based on `other`'s alpha value. The resulting color is a combination of `self`'s and `other`'s colors.
///
/// If `other`'s alpha value is 1.0 or greater, `other` color is fully opaque, thus `other` is returned as the output color.
@@ -905,9 +910,9 @@ mod tests {
assert_eq!(background.solid, color);
assert_eq!(background.opacity(0.5).solid, color.opacity(0.5));
- assert_eq!(background.is_transparent(), false);
+ assert!(!background.is_transparent());
background.solid = hsla(0.0, 0.0, 0.0, 0.0);
- assert_eq!(background.is_transparent(), true);
+ assert!(background.is_transparent());
}
#[test]
@@ -921,7 +926,7 @@ mod tests {
assert_eq!(background.opacity(0.5).colors[0], from.opacity(0.5));
assert_eq!(background.opacity(0.5).colors[1], to.opacity(0.5));
- assert_eq!(background.is_transparent(), false);
- assert_eq!(background.opacity(0.0).is_transparent(), true);
+ assert!(!background.is_transparent());
+ assert!(background.opacity(0.0).is_transparent());
}
}
@@ -88,9 +88,9 @@ impl Deref for GlobalColors {
impl Global for GlobalColors {}
-/// Implement this trait to allow global [Color] access via `cx.default_colors()`.
+/// Implement this trait to allow global [Colors] access via `cx.default_colors()`.
pub trait DefaultColors {
- /// Returns the default [`gpui::Colors`]
+ /// Returns the default [`Colors`]
fn default_colors(&self) -> &Arc<Colors>;
}
@@ -14,13 +14,13 @@
//! tree and any callbacks they have registered with GPUI are dropped and the process repeats.
//!
//! But some state is too simple and voluminous to store in every view that needs it, e.g.
-//! whether a hover has been started or not. For this, GPUI provides the [`Element::State`], associated type.
+//! whether a hover has been started or not. For this, GPUI provides the [`Element::PrepaintState`], associated type.
//!
//! # Implementing your own elements
//!
//! Elements are intended to be the low level, imperative API to GPUI. They are responsible for upholding,
//! or breaking, GPUI's features as they deem necessary. As an example, most GPUI elements are expected
-//! to stay in the bounds that their parent element gives them. But with [`WindowContext::break_content_mask`],
+//! to stay in the bounds that their parent element gives them. But with [`Window::with_content_mask`],
//! you can ignore this restriction and paint anywhere inside of the window's bounds. This is useful for overlays
//! and popups and anything else that shows up 'on top' of other elements.
//! With great power, comes great responsibility.
@@ -87,7 +87,7 @@ pub trait AnimationExt {
}
}
-impl<E> AnimationExt for E {}
+impl<E: IntoElement + 'static> AnimationExt for E {}
/// A GPUI element that applies an animation to another element
pub struct AnimationElement<E> {
@@ -533,7 +533,7 @@ impl Interactivity {
}
/// Use the given callback to construct a new tooltip view when the mouse hovers over this element.
- /// The imperative API equivalent to [`InteractiveElement::tooltip`]
+ /// The imperative API equivalent to [`StatefulInteractiveElement::tooltip`]
pub fn tooltip(&mut self, build_tooltip: impl Fn(&mut Window, &mut App) -> AnyView + 'static)
where
Self: Sized,
@@ -550,7 +550,7 @@ impl Interactivity {
/// Use the given callback to construct a new tooltip view when the mouse hovers over this element.
/// The tooltip itself is also hoverable and won't disappear when the user moves the mouse into
- /// the tooltip. The imperative API equivalent to [`InteractiveElement::hoverable_tooltip`]
+ /// the tooltip. The imperative API equivalent to [`StatefulInteractiveElement::hoverable_tooltip`]
pub fn hoverable_tooltip(
&mut self,
build_tooltip: impl Fn(&mut Window, &mut App) -> AnyView + 'static,
@@ -676,7 +676,7 @@ pub trait InteractiveElement: Sized {
#[cfg(any(test, feature = "test-support"))]
/// Set a key that can be used to look up this element's bounds
- /// in the [`VisualTestContext::debug_bounds`] map
+ /// in the [`crate::VisualTestContext::debug_bounds`] map
/// This is a noop in release builds
fn debug_selector(mut self, f: impl FnOnce() -> String) -> Self {
self.interactivity().debug_selector = Some(f());
@@ -685,7 +685,7 @@ pub trait InteractiveElement: Sized {
#[cfg(not(any(test, feature = "test-support")))]
/// Set a key that can be used to look up this element's bounds
- /// in the [`VisualTestContext::debug_bounds`] map
+ /// in the [`crate::VisualTestContext::debug_bounds`] map
/// This is a noop in release builds
#[inline]
fn debug_selector(self, _: impl FnOnce() -> String) -> Self {
@@ -1087,7 +1087,7 @@ pub trait StatefulInteractiveElement: InteractiveElement {
/// On drag initiation, this callback will be used to create a new view to render the dragged value for a
/// drag and drop operation. This API should also be used as the equivalent of 'on drag start' with
- /// the [`Self::on_drag_move`] API.
+ /// the [`InteractiveElement::on_drag_move`] API.
/// The callback also has access to the offset of triggering click from the origin of parent element.
/// The fluent API equivalent to [`Interactivity::on_drag`]
///
@@ -332,20 +332,18 @@ impl Element for Img {
state.started_loading = None;
}
- let image_size = data.size(frame_index);
- style.aspect_ratio =
- Some(image_size.width.0 as f32 / image_size.height.0 as f32);
+ let image_size = data.render_size(frame_index);
+ style.aspect_ratio = Some(image_size.width / image_size.height);
if let Length::Auto = style.size.width {
style.size.width = match style.size.height {
Length::Definite(DefiniteLength::Absolute(
AbsoluteLength::Pixels(height),
)) => Length::Definite(
- px(image_size.width.0 as f32 * height.0
- / image_size.height.0 as f32)
- .into(),
+ px(image_size.width.0 * height.0 / image_size.height.0)
+ .into(),
),
- _ => Length::Definite(px(image_size.width.0 as f32).into()),
+ _ => Length::Definite(image_size.width.into()),
};
}
@@ -354,11 +352,10 @@ impl Element for Img {
Length::Definite(DefiniteLength::Absolute(
AbsoluteLength::Pixels(width),
)) => Length::Definite(
- px(image_size.height.0 as f32 * width.0
- / image_size.width.0 as f32)
- .into(),
+ px(image_size.height.0 * width.0 / image_size.width.0)
+ .into(),
),
- _ => Length::Definite(px(image_size.height.0 as f32).into()),
+ _ => Length::Definite(image_size.height.into()),
};
}
@@ -475,7 +472,7 @@ impl Element for Img {
.paint_image(
new_bounds,
corner_radii,
- data.clone(),
+ data,
layout_state.frame_index,
self.style.grayscale,
)
@@ -701,7 +698,9 @@ impl Asset for ImageAssetLoader {
swap_rgba_pa_to_bgra(pixel);
}
- RenderImage::new(SmallVec::from_elem(Frame::new(buffer), 1))
+ let mut image = RenderImage::new(SmallVec::from_elem(Frame::new(buffer), 1));
+ image.scale_factor = SMOOTH_SVG_SCALE_FACTOR;
+ image
};
Ok(Arc::new(data))
@@ -5,7 +5,7 @@
//! In order to minimize re-renders, this element's state is stored intrusively
//! on your own views, so that your code can coordinate directly with the list element's cached state.
//!
-//! If all of your elements are the same height, see [`UniformList`] for a simpler API
+//! If all of your elements are the same height, see [`crate::UniformList`] for a simpler API
use crate::{
AnyElement, App, AvailableSpace, Bounds, ContentMask, DispatchPhase, Edges, Element, EntityId,
@@ -235,7 +235,7 @@ impl ListState {
}
/// Register with the list state that the items in `old_range` have been replaced
- /// by new items. As opposed to [`splice`], this method allows an iterator of optional focus handles
+ /// by new items. As opposed to [`Self::splice`], this method allows an iterator of optional focus handles
/// to be supplied to properly integrate with items in the list that can be focused. If a focused item
/// is scrolled out of view, the list will continue to render it to allow keyboard interaction.
pub fn splice_focusable(
@@ -326,7 +326,7 @@ impl TextLayout {
vec![text_style.to_run(text.len())]
};
- let layout_id = window.request_measured_layout(Default::default(), {
+ window.request_measured_layout(Default::default(), {
let element_state = self.clone();
move |known_dimensions, available_space, window, cx| {
@@ -416,9 +416,7 @@ impl TextLayout {
size
}
- });
-
- layout_id
+ })
}
fn prepaint(&self, bounds: Bounds<Pixels>, text: &str) {
@@ -391,7 +391,7 @@ impl BackgroundExecutor {
}
/// in tests, run all tasks that are ready to run. If after doing so
- /// the test still has outstanding tasks, this will panic. (See also `allow_parking`)
+ /// the test still has outstanding tasks, this will panic. (See also [`Self::allow_parking`])
#[cfg(any(test, feature = "test-support"))]
pub fn run_until_parked(&self) {
self.dispatcher.as_test().unwrap().run_until_parked()
@@ -405,7 +405,7 @@ impl BackgroundExecutor {
self.dispatcher.as_test().unwrap().allow_parking();
}
- /// undoes the effect of [`allow_parking`].
+ /// undoes the effect of [`Self::allow_parking`].
#[cfg(any(test, feature = "test-support"))]
pub fn forbid_parking(&self) {
self.dispatcher.as_test().unwrap().forbid_parking();
@@ -480,7 +480,7 @@ impl ForegroundExecutor {
/// Variant of `async_task::spawn_local` that includes the source location of the spawn in panics.
///
/// Copy-modified from:
-/// https://github.com/smol-rs/async-task/blob/ca9dbe1db9c422fd765847fa91306e30a6bb58a9/src/runnable.rs#L405
+/// <https://github.com/smol-rs/async-task/blob/ca9dbe1db9c422fd765847fa91306e30a6bb58a9/src/runnable.rs#L405>
#[track_caller]
fn spawn_local_with_source_location<Fut, S>(
future: Fut,
@@ -1046,7 +1046,7 @@ where
size: self.size.clone()
+ size(
amount.left.clone() + amount.right.clone(),
- amount.top.clone() + amount.bottom.clone(),
+ amount.top.clone() + amount.bottom,
),
}
}
@@ -1159,10 +1159,10 @@ where
/// Computes the space available within outer bounds.
pub fn space_within(&self, outer: &Self) -> Edges<T> {
Edges {
- top: self.top().clone() - outer.top().clone(),
- right: outer.right().clone() - self.right().clone(),
- bottom: outer.bottom().clone() - self.bottom().clone(),
- left: self.left().clone() - outer.left().clone(),
+ top: self.top() - outer.top(),
+ right: outer.right() - self.right(),
+ bottom: outer.bottom() - self.bottom(),
+ left: self.left() - outer.left(),
}
}
}
@@ -1641,7 +1641,7 @@ impl Bounds<Pixels> {
}
/// Convert the bounds from logical pixels to physical pixels
- pub fn to_device_pixels(&self, factor: f32) -> Bounds<DevicePixels> {
+ pub fn to_device_pixels(self, factor: f32) -> Bounds<DevicePixels> {
Bounds {
origin: point(
DevicePixels((self.origin.x.0 * factor).round() as i32),
@@ -1712,7 +1712,7 @@ where
top: self.top.clone() * rhs.top,
right: self.right.clone() * rhs.right,
bottom: self.bottom.clone() * rhs.bottom,
- left: self.left.clone() * rhs.left,
+ left: self.left * rhs.left,
}
}
}
@@ -1957,7 +1957,7 @@ impl Edges<DefiniteLength> {
/// assert_eq!(edges_in_pixels.bottom, px(32.0)); // 2 rems
/// assert_eq!(edges_in_pixels.left, px(50.0)); // 25% of parent width
/// ```
- pub fn to_pixels(&self, parent_size: Size<AbsoluteLength>, rem_size: Pixels) -> Edges<Pixels> {
+ pub fn to_pixels(self, parent_size: Size<AbsoluteLength>, rem_size: Pixels) -> Edges<Pixels> {
Edges {
top: self.top.to_pixels(parent_size.height, rem_size),
right: self.right.to_pixels(parent_size.width, rem_size),
@@ -2027,7 +2027,7 @@ impl Edges<AbsoluteLength> {
/// assert_eq!(edges_in_pixels.bottom, px(20.0)); // Already in pixels
/// assert_eq!(edges_in_pixels.left, px(32.0)); // 2 rems converted to pixels
/// ```
- pub fn to_pixels(&self, rem_size: Pixels) -> Edges<Pixels> {
+ pub fn to_pixels(self, rem_size: Pixels) -> Edges<Pixels> {
Edges {
top: self.top.to_pixels(rem_size),
right: self.right.to_pixels(rem_size),
@@ -2272,7 +2272,7 @@ impl Corners<AbsoluteLength> {
/// assert_eq!(corners_in_pixels.bottom_right, Pixels(30.0));
/// assert_eq!(corners_in_pixels.bottom_left, Pixels(32.0)); // 2 rems converted to pixels
/// ```
- pub fn to_pixels(&self, rem_size: Pixels) -> Corners<Pixels> {
+ pub fn to_pixels(self, rem_size: Pixels) -> Corners<Pixels> {
Corners {
top_left: self.top_left.to_pixels(rem_size),
top_right: self.top_right.to_pixels(rem_size),
@@ -2411,7 +2411,7 @@ where
top_left: self.top_left.clone() * rhs.top_left,
top_right: self.top_right.clone() * rhs.top_right,
bottom_right: self.bottom_right.clone() * rhs.bottom_right,
- bottom_left: self.bottom_left.clone() * rhs.bottom_left,
+ bottom_left: self.bottom_left * rhs.bottom_left,
}
}
}
@@ -2858,7 +2858,7 @@ impl DevicePixels {
/// let total_bytes = pixels.to_bytes(bytes_per_pixel);
/// assert_eq!(total_bytes, 40); // 10 pixels * 4 bytes/pixel = 40 bytes
/// ```
- pub fn to_bytes(&self, bytes_per_pixel: u8) -> u32 {
+ pub fn to_bytes(self, bytes_per_pixel: u8) -> u32 {
self.0 as u32 * bytes_per_pixel as u32
}
}
@@ -3073,8 +3073,8 @@ pub struct Rems(pub f32);
impl Rems {
/// Convert this Rem value to pixels.
- pub fn to_pixels(&self, rem_size: Pixels) -> Pixels {
- *self * rem_size
+ pub fn to_pixels(self, rem_size: Pixels) -> Pixels {
+ self * rem_size
}
}
@@ -3168,9 +3168,9 @@ impl AbsoluteLength {
/// assert_eq!(length_in_pixels.to_pixels(rem_size), Pixels(42.0));
/// assert_eq!(length_in_rems.to_pixels(rem_size), Pixels(32.0));
/// ```
- pub fn to_pixels(&self, rem_size: Pixels) -> Pixels {
+ pub fn to_pixels(self, rem_size: Pixels) -> Pixels {
match self {
- AbsoluteLength::Pixels(pixels) => *pixels,
+ AbsoluteLength::Pixels(pixels) => pixels,
AbsoluteLength::Rems(rems) => rems.to_pixels(rem_size),
}
}
@@ -3184,10 +3184,10 @@ impl AbsoluteLength {
/// # Returns
///
/// Returns the `AbsoluteLength` as `Pixels`.
- pub fn to_rems(&self, rem_size: Pixels) -> Rems {
+ pub fn to_rems(self, rem_size: Pixels) -> Rems {
match self {
AbsoluteLength::Pixels(pixels) => Rems(pixels.0 / rem_size.0),
- AbsoluteLength::Rems(rems) => *rems,
+ AbsoluteLength::Rems(rems) => rems,
}
}
}
@@ -3315,12 +3315,12 @@ impl DefiniteLength {
/// assert_eq!(length_in_rems.to_pixels(base_size, rem_size), Pixels(32.0));
/// assert_eq!(length_as_fraction.to_pixels(base_size, rem_size), Pixels(50.0));
/// ```
- pub fn to_pixels(&self, base_size: AbsoluteLength, rem_size: Pixels) -> Pixels {
+ pub fn to_pixels(self, base_size: AbsoluteLength, rem_size: Pixels) -> Pixels {
match self {
DefiniteLength::Absolute(size) => size.to_pixels(rem_size),
DefiniteLength::Fraction(fraction) => match base_size {
- AbsoluteLength::Pixels(px) => px * *fraction,
- AbsoluteLength::Rems(rems) => rems * rem_size * *fraction,
+ AbsoluteLength::Pixels(px) => px * fraction,
+ AbsoluteLength::Rems(rems) => rems * rem_size * fraction,
},
}
}
@@ -24,7 +24,7 @@
//! - State management and communication with [`Entity`]'s. Whenever you need to store application state
//! that communicates between different parts of your application, you'll want to use GPUI's
//! entities. Entities are owned by GPUI and are only accessible through an owned smart pointer
-//! similar to an [`std::rc::Rc`]. See the [`app::context`] module for more information.
+//! similar to an [`std::rc::Rc`]. See [`app::Context`] for more information.
//!
//! - High level, declarative UI with views. All UI in GPUI starts with a view. A view is simply
//! a [`Entity`] that can be rendered, by implementing the [`Render`] trait. At the start of each frame, GPUI
@@ -37,7 +37,7 @@
//! provide a nice wrapper around an imperative API that provides as much flexibility and control as
//! you need. Elements have total control over how they and their child elements are rendered and
//! can be used for making efficient views into large lists, implement custom layouting for a code editor,
-//! and anything else you can think of. See the [`element`] module for more information.
+//! and anything else you can think of. See the [`elements`] module for more information.
//!
//! Each of these registers has one or more corresponding contexts that can be accessed from all GPUI services.
//! This context is your main interface to GPUI, and is used extensively throughout the framework.
@@ -51,9 +51,9 @@
//! Use this for implementing keyboard shortcuts, such as cmd-q (See `action` module for more information).
//! - Platform services, such as `quit the app` or `open a URL` are available as methods on the [`app::App`].
//! - An async executor that is integrated with the platform's event loop. See the [`executor`] module for more information.,
-//! - The [`gpui::test`](test) macro provides a convenient way to write tests for your GPUI applications. Tests also have their
-//! own kind of context, a [`TestAppContext`] which provides ways of simulating common platform input. See [`app::test_context`]
-//! and [`test`] modules for more details.
+//! - The [`gpui::test`](macro@test) macro provides a convenient way to write tests for your GPUI applications. Tests also have their
+//! own kind of context, a [`TestAppContext`] which provides ways of simulating common platform input. See [`TestAppContext`]
+//! and [`mod@test`] modules for more details.
//!
//! Currently, the best way to learn about these APIs is to read the Zed source code, ask us about it at a fireside hack, or drop
//! a question in the [Zed Discord](https://zed.dev/community-links). We're working on improving the documentation, creating more examples,
@@ -117,7 +117,7 @@ pub mod private {
mod seal {
/// A mechanism for restricting implementations of a trait to only those in GPUI.
- /// See: https://predr.ag/blog/definitive-guide-to-sealed-traits-in-rust/
+ /// See: <https://predr.ag/blog/definitive-guide-to-sealed-traits-in-rust/>
pub trait Sealed {}
}
@@ -172,6 +172,10 @@ pub trait AppContext {
type Result<T>;
/// Create a new entity in the app context.
+ #[expect(
+ clippy::wrong_self_convention,
+ reason = "`App::new` is an ubiquitous function for creating entities"
+ )]
fn new<T: 'static>(
&mut self,
build_entity: impl FnOnce(&mut Context<T>) -> T,
@@ -348,7 +352,7 @@ impl<T> Flatten<T> for Result<T> {
}
/// Information about the GPU GPUI is running on.
-#[derive(Default, Debug)]
+#[derive(Default, Debug, serde::Serialize, serde::Deserialize, Clone)]
pub struct GpuSpecs {
/// Whether the GPU is really a fake (like `llvmpipe`) running on the CPU.
pub is_software_emulated: bool,
@@ -72,7 +72,7 @@ pub trait EntityInputHandler: 'static + Sized {
) -> Option<usize>;
}
-/// The canonical implementation of [`PlatformInputHandler`]. Call [`Window::handle_input`]
+/// The canonical implementation of [`crate::PlatformInputHandler`]. Call [`Window::handle_input`]
/// with an instance during your element's paint.
pub struct ElementInputHandler<V> {
view: Entity<V>,
@@ -552,7 +552,7 @@ impl DispatchTree {
let mut current_node_id = Some(target);
while let Some(node_id) = current_node_id {
dispatch_path.push(node_id);
- current_node_id = self.nodes[node_id.0].parent;
+ current_node_id = self.nodes.get(node_id.0).and_then(|node| node.parent);
}
dispatch_path.reverse(); // Reverse the path so it goes from the root to the focused node.
dispatch_path
@@ -4,7 +4,7 @@ mod context;
pub use binding::*;
pub use context::*;
-use crate::{Action, Keystroke, is_no_action};
+use crate::{Action, AsKeystroke, Keystroke, is_no_action};
use collections::{HashMap, HashSet};
use smallvec::SmallVec;
use std::any::TypeId;
@@ -141,7 +141,7 @@ impl Keymap {
/// only.
pub fn bindings_for_input(
&self,
- input: &[Keystroke],
+ input: &[impl AsKeystroke],
context_stack: &[KeyContext],
) -> (SmallVec<[KeyBinding; 1]>, bool) {
let mut matched_bindings = SmallVec::<[(usize, BindingIndex, &KeyBinding); 1]>::new();
@@ -192,7 +192,6 @@ impl Keymap {
(bindings, !pending.is_empty())
}
-
/// Check if the given binding is enabled, given a certain key context.
/// Returns the deepest depth at which the binding matches, or None if it doesn't match.
fn binding_enabled(&self, binding: &KeyBinding, contexts: &[KeyContext]) -> Option<usize> {
@@ -264,7 +263,7 @@ mod tests {
];
let mut keymap = Keymap::default();
- keymap.add_bindings(bindings.clone());
+ keymap.add_bindings(bindings);
let (result, pending) = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-a").unwrap()],
@@ -290,7 +289,7 @@ mod tests {
];
let mut keymap = Keymap::default();
- keymap.add_bindings(bindings.clone());
+ keymap.add_bindings(bindings);
// binding is only enabled in a specific context
assert!(
@@ -344,7 +343,7 @@ mod tests {
];
let mut keymap = Keymap::default();
- keymap.add_bindings(bindings.clone());
+ keymap.add_bindings(bindings);
let space = || Keystroke::parse("space").unwrap();
let w = || Keystroke::parse("w").unwrap();
@@ -364,29 +363,29 @@ mod tests {
// Ensure `space` results in pending input on the workspace, but not editor
let space_workspace = keymap.bindings_for_input(&[space()], &workspace_context());
assert!(space_workspace.0.is_empty());
- assert_eq!(space_workspace.1, true);
+ assert!(space_workspace.1);
let space_editor = keymap.bindings_for_input(&[space()], &editor_workspace_context());
assert!(space_editor.0.is_empty());
- assert_eq!(space_editor.1, false);
+ assert!(!space_editor.1);
// Ensure `space w` results in pending input on the workspace, but not editor
let space_w_workspace = keymap.bindings_for_input(&space_w, &workspace_context());
assert!(space_w_workspace.0.is_empty());
- assert_eq!(space_w_workspace.1, true);
+ assert!(space_w_workspace.1);
let space_w_editor = keymap.bindings_for_input(&space_w, &editor_workspace_context());
assert!(space_w_editor.0.is_empty());
- assert_eq!(space_w_editor.1, false);
+ assert!(!space_w_editor.1);
// Ensure `space w w` results in the binding in the workspace, but not in the editor
let space_w_w_workspace = keymap.bindings_for_input(&space_w_w, &workspace_context());
assert!(!space_w_w_workspace.0.is_empty());
- assert_eq!(space_w_w_workspace.1, false);
+ assert!(!space_w_w_workspace.1);
let space_w_w_editor = keymap.bindings_for_input(&space_w_w, &editor_workspace_context());
assert!(space_w_w_editor.0.is_empty());
- assert_eq!(space_w_w_editor.1, false);
+ assert!(!space_w_w_editor.1);
// Now test what happens if we have another binding defined AFTER the NoAction
// that should result in pending
@@ -396,11 +395,11 @@ mod tests {
KeyBinding::new("space w x", ActionAlpha {}, Some("editor")),
];
let mut keymap = Keymap::default();
- keymap.add_bindings(bindings.clone());
+ keymap.add_bindings(bindings);
let space_editor = keymap.bindings_for_input(&[space()], &editor_workspace_context());
assert!(space_editor.0.is_empty());
- assert_eq!(space_editor.1, true);
+ assert!(space_editor.1);
// Now test what happens if we have another binding defined BEFORE the NoAction
// that should result in pending
@@ -410,11 +409,11 @@ mod tests {
KeyBinding::new("space w w", NoAction {}, Some("editor")),
];
let mut keymap = Keymap::default();
- keymap.add_bindings(bindings.clone());
+ keymap.add_bindings(bindings);
let space_editor = keymap.bindings_for_input(&[space()], &editor_workspace_context());
assert!(space_editor.0.is_empty());
- assert_eq!(space_editor.1, true);
+ assert!(space_editor.1);
// Now test what happens if we have another binding defined at a higher context
// that should result in pending
@@ -424,11 +423,11 @@ mod tests {
KeyBinding::new("space w w", NoAction {}, Some("editor")),
];
let mut keymap = Keymap::default();
- keymap.add_bindings(bindings.clone());
+ keymap.add_bindings(bindings);
let space_editor = keymap.bindings_for_input(&[space()], &editor_workspace_context());
assert!(space_editor.0.is_empty());
- assert_eq!(space_editor.1, true);
+ assert!(space_editor.1);
}
#[test]
@@ -439,7 +438,7 @@ mod tests {
];
let mut keymap = Keymap::default();
- keymap.add_bindings(bindings.clone());
+ keymap.add_bindings(bindings);
// Ensure `space` results in pending input on the workspace, but not editor
let (result, pending) = keymap.bindings_for_input(
@@ -447,7 +446,7 @@ mod tests {
&[KeyContext::parse("editor").unwrap()],
);
assert!(result.is_empty());
- assert_eq!(pending, true);
+ assert!(pending);
let bindings = [
KeyBinding::new("ctrl-w left", ActionAlpha {}, Some("editor")),
@@ -455,7 +454,7 @@ mod tests {
];
let mut keymap = Keymap::default();
- keymap.add_bindings(bindings.clone());
+ keymap.add_bindings(bindings);
// Ensure `space` results in pending input on the workspace, but not editor
let (result, pending) = keymap.bindings_for_input(
@@ -463,7 +462,7 @@ mod tests {
&[KeyContext::parse("editor").unwrap()],
);
assert_eq!(result.len(), 1);
- assert_eq!(pending, false);
+ assert!(!pending);
}
#[test]
@@ -474,7 +473,7 @@ mod tests {
];
let mut keymap = Keymap::default();
- keymap.add_bindings(bindings.clone());
+ keymap.add_bindings(bindings);
// Ensure `space` results in pending input on the workspace, but not editor
let (result, pending) = keymap.bindings_for_input(
@@ -482,7 +481,7 @@ mod tests {
&[KeyContext::parse("editor").unwrap()],
);
assert!(result.is_empty());
- assert_eq!(pending, false);
+ assert!(!pending);
}
#[test]
@@ -494,7 +493,7 @@ mod tests {
];
let mut keymap = Keymap::default();
- keymap.add_bindings(bindings.clone());
+ keymap.add_bindings(bindings);
// Ensure `space` results in pending input on the workspace, but not editor
let (result, pending) = keymap.bindings_for_input(
@@ -505,7 +504,7 @@ mod tests {
],
);
assert_eq!(result.len(), 1);
- assert_eq!(pending, false);
+ assert!(!pending);
}
#[test]
@@ -516,7 +515,7 @@ mod tests {
];
let mut keymap = Keymap::default();
- keymap.add_bindings(bindings.clone());
+ keymap.add_bindings(bindings);
// Ensure `space` results in pending input on the workspace, but not editor
let (result, pending) = keymap.bindings_for_input(
@@ -527,7 +526,7 @@ mod tests {
],
);
assert_eq!(result.len(), 0);
- assert_eq!(pending, false);
+ assert!(!pending);
}
#[test]
@@ -537,7 +536,7 @@ mod tests {
KeyBinding::new("ctrl-x 0", ActionAlpha, Some("Workspace")),
];
let mut keymap = Keymap::default();
- keymap.add_bindings(bindings.clone());
+ keymap.add_bindings(bindings);
let matched = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-x")].map(Result::unwrap),
@@ -560,7 +559,7 @@ mod tests {
KeyBinding::new("ctrl-x 0", NoAction, Some("Workspace")),
];
let mut keymap = Keymap::default();
- keymap.add_bindings(bindings.clone());
+ keymap.add_bindings(bindings);
let matched = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-x")].map(Result::unwrap),
@@ -579,7 +578,7 @@ mod tests {
KeyBinding::new("ctrl-x 0", NoAction, Some("vim_mode == normal")),
];
let mut keymap = Keymap::default();
- keymap.add_bindings(bindings.clone());
+ keymap.add_bindings(bindings);
let matched = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-x")].map(Result::unwrap),
@@ -602,7 +601,7 @@ mod tests {
KeyBinding::new("ctrl-x", ActionBeta, Some("vim_mode == normal")),
];
let mut keymap = Keymap::default();
- keymap.add_bindings(bindings.clone());
+ keymap.add_bindings(bindings);
let matched = keymap.bindings_for_input(
&[Keystroke::parse("ctrl-x")].map(Result::unwrap),
@@ -629,7 +628,7 @@ mod tests {
];
let mut keymap = Keymap::default();
- keymap.add_bindings(bindings.clone());
+ keymap.add_bindings(bindings);
assert_bindings(&keymap, &ActionAlpha {}, &["ctrl-a"]);
assert_bindings(&keymap, &ActionBeta {}, &[]);
@@ -639,7 +638,7 @@ mod tests {
fn assert_bindings(keymap: &Keymap, action: &dyn Action, expected: &[&str]) {
let actual = keymap
.bindings_for_action(action)
- .map(|binding| binding.keystrokes[0].unparse())
+ .map(|binding| binding.keystrokes[0].inner().unparse())
.collect::<Vec<_>>();
assert_eq!(actual, expected, "{:?}", action);
}
@@ -1,14 +1,15 @@
use std::rc::Rc;
-use collections::HashMap;
-
-use crate::{Action, InvalidKeystrokeError, KeyBindingContextPredicate, Keystroke, SharedString};
+use crate::{
+ Action, AsKeystroke, DummyKeyboardMapper, InvalidKeystrokeError, KeyBindingContextPredicate,
+ KeybindingKeystroke, Keystroke, PlatformKeyboardMapper, SharedString,
+};
use smallvec::SmallVec;
/// A keybinding and its associated metadata, from the keymap.
pub struct KeyBinding {
pub(crate) action: Box<dyn Action>,
- pub(crate) keystrokes: SmallVec<[Keystroke; 2]>,
+ pub(crate) keystrokes: SmallVec<[KeybindingKeystroke; 2]>,
pub(crate) context_predicate: Option<Rc<KeyBindingContextPredicate>>,
pub(crate) meta: Option<KeyBindingMetaIndex>,
/// The json input string used when building the keybinding, if any
@@ -30,12 +31,17 @@ impl Clone for KeyBinding {
impl KeyBinding {
/// Construct a new keybinding from the given data. Panics on parse error.
pub fn new<A: Action>(keystrokes: &str, action: A, context: Option<&str>) -> Self {
- let context_predicate = if let Some(context) = context {
- Some(KeyBindingContextPredicate::parse(context).unwrap().into())
- } else {
- None
- };
- Self::load(keystrokes, Box::new(action), context_predicate, None, None).unwrap()
+ let context_predicate =
+ context.map(|context| KeyBindingContextPredicate::parse(context).unwrap().into());
+ Self::load(
+ keystrokes,
+ Box::new(action),
+ context_predicate,
+ false,
+ None,
+ &DummyKeyboardMapper,
+ )
+ .unwrap()
}
/// Load a keybinding from the given raw data.
@@ -43,24 +49,22 @@ impl KeyBinding {
keystrokes: &str,
action: Box<dyn Action>,
context_predicate: Option<Rc<KeyBindingContextPredicate>>,
- key_equivalents: Option<&HashMap<char, char>>,
+ use_key_equivalents: bool,
action_input: Option<SharedString>,
+ keyboard_mapper: &dyn PlatformKeyboardMapper,
) -> std::result::Result<Self, InvalidKeystrokeError> {
- let mut keystrokes: SmallVec<[Keystroke; 2]> = keystrokes
+ let keystrokes: SmallVec<[KeybindingKeystroke; 2]> = keystrokes
.split_whitespace()
- .map(Keystroke::parse)
+ .map(|source| {
+ let keystroke = Keystroke::parse(source)?;
+ Ok(KeybindingKeystroke::new_with_mapper(
+ keystroke,
+ use_key_equivalents,
+ keyboard_mapper,
+ ))
+ })
.collect::<std::result::Result<_, _>>()?;
- if let Some(equivalents) = key_equivalents {
- for keystroke in keystrokes.iter_mut() {
- if keystroke.key.chars().count() == 1
- && let Some(key) = equivalents.get(&keystroke.key.chars().next().unwrap())
- {
- keystroke.key = key.to_string();
- }
- }
- }
-
Ok(Self {
keystrokes,
action,
@@ -82,13 +86,13 @@ impl KeyBinding {
}
/// Check if the given keystrokes match this binding.
- pub fn match_keystrokes(&self, typed: &[Keystroke]) -> Option<bool> {
+ pub fn match_keystrokes(&self, typed: &[impl AsKeystroke]) -> Option<bool> {
if self.keystrokes.len() < typed.len() {
return None;
}
for (target, typed) in self.keystrokes.iter().zip(typed.iter()) {
- if !typed.should_match(target) {
+ if !typed.as_keystroke().should_match(target) {
return None;
}
}
@@ -97,7 +101,7 @@ impl KeyBinding {
}
/// Get the keystrokes associated with this binding
- pub fn keystrokes(&self) -> &[Keystroke] {
+ pub fn keystrokes(&self) -> &[KeybindingKeystroke] {
self.keystrokes.as_slice()
}
@@ -668,11 +668,7 @@ mod tests {
let contexts = vec![other_context.clone(), child_context.clone()];
assert!(!predicate.eval(&contexts));
- let contexts = vec![
- parent_context.clone(),
- other_context.clone(),
- child_context.clone(),
- ];
+ let contexts = vec![parent_context.clone(), other_context, child_context.clone()];
assert!(predicate.eval(&contexts));
assert!(!predicate.eval(&[]));
@@ -681,7 +677,7 @@ mod tests {
let zany_predicate = KeyBindingContextPredicate::parse("child > child").unwrap();
assert!(!zany_predicate.eval(slice::from_ref(&child_context)));
- assert!(zany_predicate.eval(&[child_context.clone(), child_context.clone()]));
+ assert!(zany_predicate.eval(&[child_context.clone(), child_context]));
}
#[test]
@@ -718,7 +714,7 @@ mod tests {
let not_descendant = KeyBindingContextPredicate::parse("parent > !child").unwrap();
assert!(!not_descendant.eval(slice::from_ref(&parent_context)));
assert!(!not_descendant.eval(slice::from_ref(&child_context)));
- assert!(!not_descendant.eval(&[parent_context.clone(), child_context.clone()]));
+ assert!(!not_descendant.eval(&[parent_context, child_context]));
let double_not = KeyBindingContextPredicate::parse("!!editor").unwrap();
assert!(double_not.eval(slice::from_ref(&editor_context)));
@@ -318,7 +318,7 @@ impl PathBuilder {
Ok(Self::build_path(buf))
}
- /// Builds a [`Path`] from a [`lyon::VertexBuffers`].
+ /// Builds a [`Path`] from a [`lyon::tessellation::VertexBuffers`].
pub fn build_path(buf: VertexBuffers<lyon::math::Point, u16>) -> Path<Pixels> {
if buf.vertices.is_empty() {
return Path::new(Point::default());
@@ -39,8 +39,8 @@ use crate::{
Action, AnyWindowHandle, App, AsyncWindowContext, BackgroundExecutor, Bounds,
DEFAULT_WINDOW_SIZE, DevicePixels, DispatchEventResult, Font, FontId, FontMetrics, FontRun,
ForegroundExecutor, GlyphId, GpuSpecs, ImageSource, Keymap, LineLayout, Pixels, PlatformInput,
- Point, RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, ScaledPixels, Scene,
- ShapedGlyph, ShapedRun, SharedString, Size, SvgRenderer, SvgSize, Task, TaskLabel, Window,
+ Point, RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, Scene, ShapedGlyph,
+ ShapedRun, SharedString, Size, SvgRenderer, SvgSize, SystemWindowTab, Task, TaskLabel, Window,
WindowControlArea, hash, point, px, size,
};
use anyhow::Result;
@@ -231,7 +231,6 @@ pub(crate) trait Platform: 'static {
fn on_quit(&self, callback: Box<dyn FnMut()>);
fn on_reopen(&self, callback: Box<dyn FnMut()>);
- fn on_keyboard_layout_change(&self, callback: Box<dyn FnMut()>);
fn set_menus(&self, menus: Vec<Menu>, keymap: &Keymap);
fn get_menus(&self) -> Option<Vec<OwnedMenu>> {
@@ -251,7 +250,6 @@ pub(crate) trait Platform: 'static {
fn on_app_menu_action(&self, callback: Box<dyn FnMut(&dyn Action)>);
fn on_will_open_app_menu(&self, callback: Box<dyn FnMut()>);
fn on_validate_app_menu_command(&self, callback: Box<dyn FnMut(&dyn Action) -> bool>);
- fn keyboard_layout(&self) -> Box<dyn PlatformKeyboardLayout>;
fn compositor_name(&self) -> &'static str {
""
@@ -272,6 +270,10 @@ pub(crate) trait Platform: 'static {
fn write_credentials(&self, url: &str, username: &str, password: &[u8]) -> Task<Result<()>>;
fn read_credentials(&self, url: &str) -> Task<Result<Option<(String, Vec<u8>)>>>;
fn delete_credentials(&self, url: &str) -> Task<Result<()>>;
+
+ fn keyboard_layout(&self) -> Box<dyn PlatformKeyboardLayout>;
+ fn keyboard_mapper(&self) -> Rc<dyn PlatformKeyboardMapper>;
+ fn on_keyboard_layout_change(&self, callback: Box<dyn FnMut()>);
}
/// A handle to a platform's display, e.g. a monitor or laptop screen.
@@ -500,9 +502,27 @@ pub(crate) trait PlatformWindow: HasWindowHandle + HasDisplayHandle {
fn sprite_atlas(&self) -> Arc<dyn PlatformAtlas>;
// macOS specific methods
+ fn get_title(&self) -> String {
+ String::new()
+ }
+ fn tabbed_windows(&self) -> Option<Vec<SystemWindowTab>> {
+ None
+ }
+ fn tab_bar_visible(&self) -> bool {
+ false
+ }
fn set_edited(&mut self, _edited: bool) {}
fn show_character_palette(&self) {}
fn titlebar_double_click(&self) {}
+ fn on_move_tab_to_new_window(&self, _callback: Box<dyn FnMut()>) {}
+ fn on_merge_all_windows(&self, _callback: Box<dyn FnMut()>) {}
+ fn on_select_previous_tab(&self, _callback: Box<dyn FnMut()>) {}
+ fn on_select_next_tab(&self, _callback: Box<dyn FnMut()>) {}
+ fn on_toggle_tab_bar(&self, _callback: Box<dyn FnMut()>) {}
+ fn merge_all_windows(&self) {}
+ fn move_tab_to_new_window(&self) {}
+ fn toggle_window_tab_overview(&self) {}
+ fn set_tabbing_identifier(&self, _identifier: Option<String>) {}
#[cfg(target_os = "windows")]
fn get_raw_handle(&self) -> windows::HWND;
@@ -528,7 +548,7 @@ pub(crate) trait PlatformWindow: HasWindowHandle + HasDisplayHandle {
fn set_client_inset(&self, _inset: Pixels) {}
fn gpu_specs(&self) -> Option<GpuSpecs>;
- fn update_ime_position(&self, _bounds: Bounds<ScaledPixels>);
+ fn update_ime_position(&self, _bounds: Bounds<Pixels>);
#[cfg(any(test, feature = "test-support"))]
fn as_test(&mut self) -> Option<&mut TestWindow> {
@@ -673,7 +693,7 @@ impl PlatformTextSystem for NoopTextSystem {
}
}
let mut runs = Vec::default();
- if glyphs.len() > 0 {
+ if !glyphs.is_empty() {
runs.push(ShapedRun {
font_id: FontId(0),
glyphs,
@@ -1089,6 +1109,12 @@ pub struct WindowOptions {
/// Whether the window should be movable by the user
pub is_movable: bool,
+ /// Whether the window should be resizable by the user
+ pub is_resizable: bool,
+
+ /// Whether the window should be minimized by the user
+ pub is_minimizable: bool,
+
/// The display to create the window on, if this is None,
/// the window will be created on the main display
pub display_id: Option<DisplayId>,
@@ -1105,6 +1131,9 @@ pub struct WindowOptions {
/// Whether to use client or server side decorations. Wayland only
/// Note that this may be ignored.
pub window_decorations: Option<WindowDecorations>,
+
+ /// Tab group name, allows opening the window as a native tab on macOS 10.12+. Windows with the same tabbing identifier will be grouped together.
+ pub tabbing_identifier: Option<String>,
}
/// The variables that can be configured when creating a new window
@@ -1131,6 +1160,14 @@ pub(crate) struct WindowParams {
#[cfg_attr(any(target_os = "linux", target_os = "freebsd"), allow(dead_code))]
pub is_movable: bool,
+ /// Whether the window should be resizable by the user
+ #[cfg_attr(any(target_os = "linux", target_os = "freebsd"), allow(dead_code))]
+ pub is_resizable: bool,
+
+ /// Whether the window should be minimized by the user
+ #[cfg_attr(any(target_os = "linux", target_os = "freebsd"), allow(dead_code))]
+ pub is_minimizable: bool,
+
#[cfg_attr(
any(target_os = "linux", target_os = "freebsd", target_os = "windows"),
allow(dead_code)
@@ -1144,6 +1181,8 @@ pub(crate) struct WindowParams {
pub display_id: Option<DisplayId>,
pub window_min_size: Option<Size<Pixels>>,
+ #[cfg(target_os = "macos")]
+ pub tabbing_identifier: Option<String>,
}
/// Represents the status of how a window should be opened.
@@ -1189,11 +1228,14 @@ impl Default for WindowOptions {
show: true,
kind: WindowKind::Normal,
is_movable: true,
+ is_resizable: true,
+ is_minimizable: true,
display_id: None,
window_background: WindowBackgroundAppearance::default(),
app_id: None,
window_min_size: None,
window_decorations: None,
+ tabbing_identifier: None,
}
}
}
@@ -371,7 +371,7 @@ impl BladeRenderer {
.or_else(|| {
[4, 2, 1]
.into_iter()
- .find(|count| context.gpu.supports_texture_sample_count(*count))
+ .find(|&n| (context.gpu.capabilities().sample_count_mask & n) != 0)
})
.unwrap_or(1);
let pipelines = BladePipelines::new(&context.gpu, surface.info(), path_sample_count);
@@ -1,3 +1,7 @@
+use collections::HashMap;
+
+use crate::{KeybindingKeystroke, Keystroke};
+
/// A trait for platform-specific keyboard layouts
pub trait PlatformKeyboardLayout {
/// Get the keyboard layout ID, which should be unique to the layout
@@ -5,3 +9,33 @@ pub trait PlatformKeyboardLayout {
/// Get the keyboard layout display name
fn name(&self) -> &str;
}
+
+/// A trait for platform-specific keyboard mappings
+pub trait PlatformKeyboardMapper {
+ /// Map a key equivalent to its platform-specific representation
+ fn map_key_equivalent(
+ &self,
+ keystroke: Keystroke,
+ use_key_equivalents: bool,
+ ) -> KeybindingKeystroke;
+ /// Get the key equivalents for the current keyboard layout,
+ /// only used on macOS
+ fn get_key_equivalents(&self) -> Option<&HashMap<char, char>>;
+}
+
+/// A dummy implementation of the platform keyboard mapper
+pub struct DummyKeyboardMapper;
+
+impl PlatformKeyboardMapper for DummyKeyboardMapper {
+ fn map_key_equivalent(
+ &self,
+ keystroke: Keystroke,
+ _use_key_equivalents: bool,
+ ) -> KeybindingKeystroke {
+ KeybindingKeystroke::from_keystroke(keystroke)
+ }
+
+ fn get_key_equivalents(&self) -> Option<&HashMap<char, char>> {
+ None
+ }
+}
@@ -5,6 +5,14 @@ use std::{
fmt::{Display, Write},
};
+use crate::PlatformKeyboardMapper;
+
+/// This is a helper trait so that we can simplify the implementation of some functions
+pub trait AsKeystroke {
+ /// Returns the GPUI representation of the keystroke.
+ fn as_keystroke(&self) -> &Keystroke;
+}
+
/// A keystroke and associated metadata generated by the platform
#[derive(Clone, Debug, Eq, PartialEq, Default, Deserialize, Hash)]
pub struct Keystroke {
@@ -24,6 +32,19 @@ pub struct Keystroke {
pub key_char: Option<String>,
}
+/// Represents a keystroke that can be used in keybindings and displayed to the user.
+#[derive(Debug, Clone, Eq, PartialEq, Hash)]
+pub struct KeybindingKeystroke {
+ /// The GPUI representation of the keystroke.
+ inner: Keystroke,
+ /// The modifiers to display.
+ #[cfg(target_os = "windows")]
+ display_modifiers: Modifiers,
+ /// The key to display.
+ #[cfg(target_os = "windows")]
+ display_key: String,
+}
+
/// Error type for `Keystroke::parse`. This is used instead of `anyhow::Error` so that Zed can use
/// markdown to display it.
#[derive(Debug)]
@@ -58,7 +79,7 @@ impl Keystroke {
///
/// This method assumes that `self` was typed and `target' is in the keymap, and checks
/// both possibilities for self against the target.
- pub fn should_match(&self, target: &Keystroke) -> bool {
+ pub fn should_match(&self, target: &KeybindingKeystroke) -> bool {
#[cfg(not(target_os = "windows"))]
if let Some(key_char) = self
.key_char
@@ -71,7 +92,7 @@ impl Keystroke {
..Default::default()
};
- if &target.key == key_char && target.modifiers == ime_modifiers {
+ if &target.inner.key == key_char && target.inner.modifiers == ime_modifiers {
return true;
}
}
@@ -83,12 +104,12 @@ impl Keystroke {
.filter(|key_char| key_char != &&self.key)
{
// On Windows, if key_char is set, then the typed keystroke produced the key_char
- if &target.key == key_char && target.modifiers == Modifiers::none() {
+ if &target.inner.key == key_char && target.inner.modifiers == Modifiers::none() {
return true;
}
}
- target.modifiers == self.modifiers && target.key == self.key
+ target.inner.modifiers == self.modifiers && target.inner.key == self.key
}
/// key syntax is:
@@ -200,31 +221,7 @@ impl Keystroke {
/// Produces a representation of this key that Parse can understand.
pub fn unparse(&self) -> String {
- let mut str = String::new();
- if self.modifiers.function {
- str.push_str("fn-");
- }
- if self.modifiers.control {
- str.push_str("ctrl-");
- }
- if self.modifiers.alt {
- str.push_str("alt-");
- }
- if self.modifiers.platform {
- #[cfg(target_os = "macos")]
- str.push_str("cmd-");
-
- #[cfg(any(target_os = "linux", target_os = "freebsd"))]
- str.push_str("super-");
-
- #[cfg(target_os = "windows")]
- str.push_str("win-");
- }
- if self.modifiers.shift {
- str.push_str("shift-");
- }
- str.push_str(&self.key);
- str
+ unparse(&self.modifiers, &self.key)
}
/// Returns true if this keystroke left
@@ -266,6 +263,117 @@ impl Keystroke {
}
}
+impl KeybindingKeystroke {
+ #[cfg(target_os = "windows")]
+ pub(crate) fn new(inner: Keystroke, display_modifiers: Modifiers, display_key: String) -> Self {
+ KeybindingKeystroke {
+ inner,
+ display_modifiers,
+ display_key,
+ }
+ }
+
+ /// Create a new keybinding keystroke from the given keystroke using the given keyboard mapper.
+ pub fn new_with_mapper(
+ inner: Keystroke,
+ use_key_equivalents: bool,
+ keyboard_mapper: &dyn PlatformKeyboardMapper,
+ ) -> Self {
+ keyboard_mapper.map_key_equivalent(inner, use_key_equivalents)
+ }
+
+ /// Create a new keybinding keystroke from the given keystroke, without any platform-specific mapping.
+ pub fn from_keystroke(keystroke: Keystroke) -> Self {
+ #[cfg(target_os = "windows")]
+ {
+ let key = keystroke.key.clone();
+ let modifiers = keystroke.modifiers;
+ KeybindingKeystroke {
+ inner: keystroke,
+ display_modifiers: modifiers,
+ display_key: key,
+ }
+ }
+ #[cfg(not(target_os = "windows"))]
+ {
+ KeybindingKeystroke { inner: keystroke }
+ }
+ }
+
+ /// Returns the GPUI representation of the keystroke.
+ pub fn inner(&self) -> &Keystroke {
+ &self.inner
+ }
+
+ /// Returns the modifiers.
+ ///
+ /// Platform-specific behavior:
+ /// - On macOS and Linux, this modifiers is the same as `inner.modifiers`, which is the GPUI representation of the keystroke.
+ /// - On Windows, this modifiers is the display modifiers, for example, a `ctrl-@` keystroke will have `inner.modifiers` as
+ /// `Modifiers::control()` and `display_modifiers` as `Modifiers::control_shift()`.
+ pub fn modifiers(&self) -> &Modifiers {
+ #[cfg(target_os = "windows")]
+ {
+ &self.display_modifiers
+ }
+ #[cfg(not(target_os = "windows"))]
+ {
+ &self.inner.modifiers
+ }
+ }
+
+ /// Returns the key.
+ ///
+ /// Platform-specific behavior:
+ /// - On macOS and Linux, this key is the same as `inner.key`, which is the GPUI representation of the keystroke.
+ /// - On Windows, this key is the display key, for example, a `ctrl-@` keystroke will have `inner.key` as `@` and `display_key` as `2`.
+ pub fn key(&self) -> &str {
+ #[cfg(target_os = "windows")]
+ {
+ &self.display_key
+ }
+ #[cfg(not(target_os = "windows"))]
+ {
+ &self.inner.key
+ }
+ }
+
+ /// Sets the modifiers. On Windows this modifies both `inner.modifiers` and `display_modifiers`.
+ pub fn set_modifiers(&mut self, modifiers: Modifiers) {
+ self.inner.modifiers = modifiers;
+ #[cfg(target_os = "windows")]
+ {
+ self.display_modifiers = modifiers;
+ }
+ }
+
+ /// Sets the key. On Windows this modifies both `inner.key` and `display_key`.
+ pub fn set_key(&mut self, key: String) {
+ #[cfg(target_os = "windows")]
+ {
+ self.display_key = key.clone();
+ }
+ self.inner.key = key;
+ }
+
+ /// Produces a representation of this key that Parse can understand.
+ pub fn unparse(&self) -> String {
+ #[cfg(target_os = "windows")]
+ {
+ unparse(&self.display_modifiers, &self.display_key)
+ }
+ #[cfg(not(target_os = "windows"))]
+ {
+ unparse(&self.inner.modifiers, &self.inner.key)
+ }
+ }
+
+ /// Removes the key_char
+ pub fn remove_key_char(&mut self) {
+ self.inner.key_char = None;
+ }
+}
+
fn is_printable_key(key: &str) -> bool {
!matches!(
key,
@@ -322,65 +430,15 @@ fn is_printable_key(key: &str) -> bool {
impl std::fmt::Display for Keystroke {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- if self.modifiers.control {
- #[cfg(target_os = "macos")]
- f.write_char('^')?;
-
- #[cfg(not(target_os = "macos"))]
- write!(f, "ctrl-")?;
- }
- if self.modifiers.alt {
- #[cfg(target_os = "macos")]
- f.write_char('⌥')?;
-
- #[cfg(not(target_os = "macos"))]
- write!(f, "alt-")?;
- }
- if self.modifiers.platform {
- #[cfg(target_os = "macos")]
- f.write_char('⌘')?;
-
- #[cfg(any(target_os = "linux", target_os = "freebsd"))]
- f.write_char('❖')?;
-
- #[cfg(target_os = "windows")]
- f.write_char('⊞')?;
- }
- if self.modifiers.shift {
- #[cfg(target_os = "macos")]
- f.write_char('⇧')?;
+ display_modifiers(&self.modifiers, f)?;
+ display_key(&self.key, f)
+ }
+}
- #[cfg(not(target_os = "macos"))]
- write!(f, "shift-")?;
- }
- let key = match self.key.as_str() {
- #[cfg(target_os = "macos")]
- "backspace" => '⌫',
- #[cfg(target_os = "macos")]
- "up" => '↑',
- #[cfg(target_os = "macos")]
- "down" => '↓',
- #[cfg(target_os = "macos")]
- "left" => '←',
- #[cfg(target_os = "macos")]
- "right" => '→',
- #[cfg(target_os = "macos")]
- "tab" => '⇥',
- #[cfg(target_os = "macos")]
- "escape" => '⎋',
- #[cfg(target_os = "macos")]
- "shift" => '⇧',
- #[cfg(target_os = "macos")]
- "control" => '⌃',
- #[cfg(target_os = "macos")]
- "alt" => '⌥',
- #[cfg(target_os = "macos")]
- "platform" => '⌘',
-
- key if key.len() == 1 => key.chars().next().unwrap().to_ascii_uppercase(),
- key => return f.write_str(key),
- };
- f.write_char(key)
+impl std::fmt::Display for KeybindingKeystroke {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ display_modifiers(self.modifiers(), f)?;
+ display_key(self.key(), f)
}
}
@@ -600,3 +658,110 @@ pub struct Capslock {
#[serde(default)]
pub on: bool,
}
+
+impl AsKeystroke for Keystroke {
+ fn as_keystroke(&self) -> &Keystroke {
+ self
+ }
+}
+
+impl AsKeystroke for KeybindingKeystroke {
+ fn as_keystroke(&self) -> &Keystroke {
+ &self.inner
+ }
+}
+
+fn display_modifiers(modifiers: &Modifiers, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ if modifiers.control {
+ #[cfg(target_os = "macos")]
+ f.write_char('^')?;
+
+ #[cfg(not(target_os = "macos"))]
+ write!(f, "ctrl-")?;
+ }
+ if modifiers.alt {
+ #[cfg(target_os = "macos")]
+ f.write_char('⌥')?;
+
+ #[cfg(not(target_os = "macos"))]
+ write!(f, "alt-")?;
+ }
+ if modifiers.platform {
+ #[cfg(target_os = "macos")]
+ f.write_char('⌘')?;
+
+ #[cfg(any(target_os = "linux", target_os = "freebsd"))]
+ f.write_char('❖')?;
+
+ #[cfg(target_os = "windows")]
+ f.write_char('⊞')?;
+ }
+ if modifiers.shift {
+ #[cfg(target_os = "macos")]
+ f.write_char('⇧')?;
+
+ #[cfg(not(target_os = "macos"))]
+ write!(f, "shift-")?;
+ }
+ Ok(())
+}
+
+fn display_key(key: &str, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let key = match key {
+ #[cfg(target_os = "macos")]
+ "backspace" => '⌫',
+ #[cfg(target_os = "macos")]
+ "up" => '↑',
+ #[cfg(target_os = "macos")]
+ "down" => '↓',
+ #[cfg(target_os = "macos")]
+ "left" => '←',
+ #[cfg(target_os = "macos")]
+ "right" => '→',
+ #[cfg(target_os = "macos")]
+ "tab" => '⇥',
+ #[cfg(target_os = "macos")]
+ "escape" => '⎋',
+ #[cfg(target_os = "macos")]
+ "shift" => '⇧',
+ #[cfg(target_os = "macos")]
+ "control" => '⌃',
+ #[cfg(target_os = "macos")]
+ "alt" => '⌥',
+ #[cfg(target_os = "macos")]
+ "platform" => '⌘',
+
+ key if key.len() == 1 => key.chars().next().unwrap().to_ascii_uppercase(),
+ key => return f.write_str(key),
+ };
+ f.write_char(key)
+}
+
+#[inline]
+fn unparse(modifiers: &Modifiers, key: &str) -> String {
+ let mut result = String::new();
+ if modifiers.function {
+ result.push_str("fn-");
+ }
+ if modifiers.control {
+ result.push_str("ctrl-");
+ }
+ if modifiers.alt {
+ result.push_str("alt-");
+ }
+ if modifiers.platform {
+ #[cfg(target_os = "macos")]
+ result.push_str("cmd-");
+
+ #[cfg(any(target_os = "linux", target_os = "freebsd"))]
+ result.push_str("super-");
+
+ #[cfg(target_os = "windows")]
+ result.push_str("win-");
+ }
+ if modifiers.shift {
+ result.push_str("shift-");
+ }
+ result.push_str(&key);
+ result
+}
@@ -25,8 +25,8 @@ use xkbcommon::xkb::{self, Keycode, Keysym, State};
use crate::{
Action, AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, DisplayId,
ForegroundExecutor, Keymap, LinuxDispatcher, Menu, MenuItem, OwnedMenu, PathPromptOptions,
- Pixels, Platform, PlatformDisplay, PlatformKeyboardLayout, PlatformTextSystem, PlatformWindow,
- Point, Result, Task, WindowAppearance, WindowParams, px,
+ Pixels, Platform, PlatformDisplay, PlatformKeyboardLayout, PlatformKeyboardMapper,
+ PlatformTextSystem, PlatformWindow, Point, Result, Task, WindowAppearance, WindowParams, px,
};
#[cfg(any(feature = "wayland", feature = "x11"))]
@@ -108,13 +108,13 @@ impl LinuxCommon {
let callbacks = PlatformHandlers::default();
- let dispatcher = Arc::new(LinuxDispatcher::new(main_sender.clone()));
+ let dispatcher = Arc::new(LinuxDispatcher::new(main_sender));
let background_executor = BackgroundExecutor::new(dispatcher.clone());
let common = LinuxCommon {
background_executor,
- foreground_executor: ForegroundExecutor::new(dispatcher.clone()),
+ foreground_executor: ForegroundExecutor::new(dispatcher),
text_system,
appearance: WindowAppearance::Light,
auto_hide_scrollbars: false,
@@ -144,6 +144,10 @@ impl<P: LinuxClient + 'static> Platform for P {
self.keyboard_layout()
}
+ fn keyboard_mapper(&self) -> Rc<dyn PlatformKeyboardMapper> {
+ Rc::new(crate::DummyKeyboardMapper)
+ }
+
fn on_keyboard_layout_change(&self, callback: Box<dyn FnMut()>) {
self.with_common(|common| common.callbacks.keyboard_layout_change = Some(callback));
}
@@ -667,7 +671,7 @@ pub(super) const DEFAULT_CURSOR_ICON_NAME: &str = "left_ptr";
impl CursorStyle {
#[cfg(any(feature = "wayland", feature = "x11"))]
- pub(super) fn to_icon_names(&self) -> &'static [&'static str] {
+ pub(super) fn to_icon_names(self) -> &'static [&'static str] {
// Based on cursor names from chromium:
// https://github.com/chromium/chromium/blob/d3069cf9c973dc3627fa75f64085c6a86c8f41bf/ui/base/cursor/cursor_factory.cc#L113
match self {
@@ -844,6 +848,7 @@ impl crate::Keystroke {
Keysym::Down => "down".to_owned(),
Keysym::Home => "home".to_owned(),
Keysym::End => "end".to_owned(),
+ Keysym::Insert => "insert".to_owned(),
_ => {
let name = xkb::keysym_get_name(key_sym).to_lowercase();
@@ -990,21 +995,18 @@ mod tests {
#[test]
fn test_is_within_click_distance() {
let zero = Point::new(px(0.0), px(0.0));
- assert_eq!(
- is_within_click_distance(zero, Point::new(px(5.0), px(5.0))),
- true
- );
- assert_eq!(
- is_within_click_distance(zero, Point::new(px(-4.9), px(5.0))),
- true
- );
- assert_eq!(
- is_within_click_distance(Point::new(px(3.0), px(2.0)), Point::new(px(-2.0), px(-2.0))),
- true
- );
- assert_eq!(
- is_within_click_distance(zero, Point::new(px(5.0), px(5.1))),
- false
- );
+ assert!(is_within_click_distance(zero, Point::new(px(5.0), px(5.0))));
+ assert!(is_within_click_distance(
+ zero,
+ Point::new(px(-4.9), px(5.0))
+ ));
+ assert!(is_within_click_distance(
+ Point::new(px(3.0), px(2.0)),
+ Point::new(px(-2.0), px(-2.0))
+ ));
+ assert!(!is_within_click_distance(
+ zero,
+ Point::new(px(5.0), px(5.1))
+ ),);
}
}
@@ -12,7 +12,7 @@ use wayland_protocols::wp::cursor_shape::v1::client::wp_cursor_shape_device_v1::
use crate::CursorStyle;
impl CursorStyle {
- pub(super) fn to_shape(&self) -> Shape {
+ pub(super) fn to_shape(self) -> Shape {
match self {
CursorStyle::Arrow => Shape::Default,
CursorStyle::IBeam => Shape::Text,
@@ -75,8 +75,8 @@ use crate::{
FileDropEvent, ForegroundExecutor, KeyDownEvent, KeyUpEvent, Keystroke, LinuxCommon,
LinuxKeyboardLayout, Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent,
MouseExitEvent, MouseMoveEvent, MouseUpEvent, NavigationDirection, Pixels, PlatformDisplay,
- PlatformInput, PlatformKeyboardLayout, Point, SCROLL_LINES, ScaledPixels, ScrollDelta,
- ScrollWheelEvent, Size, TouchPhase, WindowParams, point, px, size,
+ PlatformInput, PlatformKeyboardLayout, Point, SCROLL_LINES, ScrollDelta, ScrollWheelEvent,
+ Size, TouchPhase, WindowParams, point, px, size,
};
use crate::{
SharedString,
@@ -323,7 +323,7 @@ impl WaylandClientStatePtr {
}
}
- pub fn update_ime_position(&self, bounds: Bounds<ScaledPixels>) {
+ pub fn update_ime_position(&self, bounds: Bounds<Pixels>) {
let client = self.get_client();
let mut state = client.borrow_mut();
if state.composing || state.text_input.is_none() || state.pre_edit_text.is_some() {
@@ -528,7 +528,7 @@ impl WaylandClient {
client.common.appearance = appearance;
- for (_, window) in &mut client.windows {
+ for window in client.windows.values_mut() {
window.set_appearance(appearance);
}
}
@@ -949,11 +949,8 @@ impl Dispatch<WlCallback, ObjectId> for WaylandClientStatePtr {
};
drop(state);
- match event {
- wl_callback::Event::Done { .. } => {
- window.frame();
- }
- _ => {}
+ if let wl_callback::Event::Done { .. } = event {
+ window.frame();
}
}
}
@@ -1283,7 +1280,6 @@ impl Dispatch<wl_keyboard::WlKeyboard, ()> for WaylandClientStatePtr {
let Some(focused_window) = focused_window else {
return;
};
- let focused_window = focused_window.clone();
let keymap_state = state.keymap_state.as_ref().unwrap();
let keycode = Keycode::from(key + MIN_KEYCODE);
@@ -2014,25 +2010,22 @@ impl Dispatch<wl_data_offer::WlDataOffer, ()> for WaylandClientStatePtr {
let client = this.get_client();
let mut state = client.borrow_mut();
- match event {
- wl_data_offer::Event::Offer { mime_type } => {
- // Drag and drop
- if mime_type == FILE_LIST_MIME_TYPE {
- let serial = state.serial_tracker.get(SerialKind::DataDevice);
- let mime_type = mime_type.clone();
- data_offer.accept(serial, Some(mime_type));
- }
+ if let wl_data_offer::Event::Offer { mime_type } = event {
+ // Drag and drop
+ if mime_type == FILE_LIST_MIME_TYPE {
+ let serial = state.serial_tracker.get(SerialKind::DataDevice);
+ let mime_type = mime_type.clone();
+ data_offer.accept(serial, Some(mime_type));
+ }
- // Clipboard
- if let Some(offer) = state
- .data_offers
- .iter_mut()
- .find(|wrapper| wrapper.inner.id() == data_offer.id())
- {
- offer.add_mime_type(mime_type);
- }
+ // Clipboard
+ if let Some(offer) = state
+ .data_offers
+ .iter_mut()
+ .find(|wrapper| wrapper.inner.id() == data_offer.id())
+ {
+ offer.add_mime_type(mime_type);
}
- _ => {}
}
}
}
@@ -2113,13 +2106,10 @@ impl Dispatch<zwp_primary_selection_offer_v1::ZwpPrimarySelectionOfferV1, ()>
let client = this.get_client();
let mut state = client.borrow_mut();
- match event {
- zwp_primary_selection_offer_v1::Event::Offer { mime_type } => {
- if let Some(offer) = state.primary_data_offer.as_mut() {
- offer.add_mime_type(mime_type);
- }
- }
- _ => {}
+ if let zwp_primary_selection_offer_v1::Event::Offer { mime_type } = event
+ && let Some(offer) = state.primary_data_offer.as_mut()
+ {
+ offer.add_mime_type(mime_type);
}
}
}
@@ -67,7 +67,7 @@ impl Cursor {
{
self.loaded_theme = Some(LoadedTheme {
theme,
- name: theme_name.map(|name| name.to_string()),
+ name: theme_name,
scaled_size: self.scaled_size,
});
}
@@ -25,9 +25,8 @@ use crate::scene::Scene;
use crate::{
AnyWindowHandle, Bounds, Decorations, Globals, GpuSpecs, Modifiers, Output, Pixels,
PlatformDisplay, PlatformInput, Point, PromptButton, PromptLevel, RequestFrameOptions,
- ResizeEdge, ScaledPixels, Size, Tiling, WaylandClientStatePtr, WindowAppearance,
- WindowBackgroundAppearance, WindowBounds, WindowControlArea, WindowControls, WindowDecorations,
- WindowParams, px, size,
+ ResizeEdge, Size, Tiling, WaylandClientStatePtr, WindowAppearance, WindowBackgroundAppearance,
+ WindowBounds, WindowControlArea, WindowControls, WindowDecorations, WindowParams, px, size,
};
use crate::{
Capslock,
@@ -355,85 +354,82 @@ impl WaylandWindowStatePtr {
}
pub fn handle_xdg_surface_event(&self, event: xdg_surface::Event) {
- match event {
- xdg_surface::Event::Configure { serial } => {
- {
- let mut state = self.state.borrow_mut();
- if let Some(window_controls) = state.in_progress_window_controls.take() {
- state.window_controls = window_controls;
-
- drop(state);
- let mut callbacks = self.callbacks.borrow_mut();
- if let Some(appearance_changed) = callbacks.appearance_changed.as_mut() {
- appearance_changed();
- }
+ if let xdg_surface::Event::Configure { serial } = event {
+ {
+ let mut state = self.state.borrow_mut();
+ if let Some(window_controls) = state.in_progress_window_controls.take() {
+ state.window_controls = window_controls;
+
+ drop(state);
+ let mut callbacks = self.callbacks.borrow_mut();
+ if let Some(appearance_changed) = callbacks.appearance_changed.as_mut() {
+ appearance_changed();
}
}
- {
- let mut state = self.state.borrow_mut();
-
- if let Some(mut configure) = state.in_progress_configure.take() {
- let got_unmaximized = state.maximized && !configure.maximized;
- state.fullscreen = configure.fullscreen;
- state.maximized = configure.maximized;
- state.tiling = configure.tiling;
- // Limit interactive resizes to once per vblank
- if configure.resizing && state.resize_throttle {
- return;
- } else if configure.resizing {
- state.resize_throttle = true;
- }
- if !configure.fullscreen && !configure.maximized {
- configure.size = if got_unmaximized {
- Some(state.window_bounds.size)
- } else {
- compute_outer_size(state.inset(), configure.size, state.tiling)
- };
- if let Some(size) = configure.size {
- state.window_bounds = Bounds {
- origin: Point::default(),
- size,
- };
- }
- }
- drop(state);
+ }
+ {
+ let mut state = self.state.borrow_mut();
+
+ if let Some(mut configure) = state.in_progress_configure.take() {
+ let got_unmaximized = state.maximized && !configure.maximized;
+ state.fullscreen = configure.fullscreen;
+ state.maximized = configure.maximized;
+ state.tiling = configure.tiling;
+ // Limit interactive resizes to once per vblank
+ if configure.resizing && state.resize_throttle {
+ return;
+ } else if configure.resizing {
+ state.resize_throttle = true;
+ }
+ if !configure.fullscreen && !configure.maximized {
+ configure.size = if got_unmaximized {
+ Some(state.window_bounds.size)
+ } else {
+ compute_outer_size(state.inset(), configure.size, state.tiling)
+ };
if let Some(size) = configure.size {
- self.resize(size);
+ state.window_bounds = Bounds {
+ origin: Point::default(),
+ size,
+ };
}
}
- }
- let mut state = self.state.borrow_mut();
- state.xdg_surface.ack_configure(serial);
-
- let window_geometry = inset_by_tiling(
- state.bounds.map_origin(|_| px(0.0)),
- state.inset(),
- state.tiling,
- )
- .map(|v| v.0 as i32)
- .map_size(|v| if v <= 0 { 1 } else { v });
-
- state.xdg_surface.set_window_geometry(
- window_geometry.origin.x,
- window_geometry.origin.y,
- window_geometry.size.width,
- window_geometry.size.height,
- );
-
- let request_frame_callback = !state.acknowledged_first_configure;
- if request_frame_callback {
- state.acknowledged_first_configure = true;
drop(state);
- self.frame();
+ if let Some(size) = configure.size {
+ self.resize(size);
+ }
}
}
- _ => {}
+ let mut state = self.state.borrow_mut();
+ state.xdg_surface.ack_configure(serial);
+
+ let window_geometry = inset_by_tiling(
+ state.bounds.map_origin(|_| px(0.0)),
+ state.inset(),
+ state.tiling,
+ )
+ .map(|v| v.0 as i32)
+ .map_size(|v| if v <= 0 { 1 } else { v });
+
+ state.xdg_surface.set_window_geometry(
+ window_geometry.origin.x,
+ window_geometry.origin.y,
+ window_geometry.size.width,
+ window_geometry.size.height,
+ );
+
+ let request_frame_callback = !state.acknowledged_first_configure;
+ if request_frame_callback {
+ state.acknowledged_first_configure = true;
+ drop(state);
+ self.frame();
+ }
}
}
pub fn handle_toplevel_decoration_event(&self, event: zxdg_toplevel_decoration_v1::Event) {
- match event {
- zxdg_toplevel_decoration_v1::Event::Configure { mode } => match mode {
+ if let zxdg_toplevel_decoration_v1::Event::Configure { mode } = event {
+ match mode {
WEnum::Value(zxdg_toplevel_decoration_v1::Mode::ServerSide) => {
self.state.borrow_mut().decorations = WindowDecorations::Server;
if let Some(mut appearance_changed) =
@@ -457,17 +453,13 @@ impl WaylandWindowStatePtr {
WEnum::Unknown(v) => {
log::warn!("Unknown decoration mode: {}", v);
}
- },
- _ => {}
+ }
}
}
pub fn handle_fractional_scale_event(&self, event: wp_fractional_scale_v1::Event) {
- match event {
- wp_fractional_scale_v1::Event::PreferredScale { scale } => {
- self.rescale(scale as f32 / 120.0);
- }
- _ => {}
+ if let wp_fractional_scale_v1::Event::PreferredScale { scale } = event {
+ self.rescale(scale as f32 / 120.0);
}
}
@@ -1085,7 +1077,7 @@ impl PlatformWindow for WaylandWindow {
}
}
- fn update_ime_position(&self, bounds: Bounds<ScaledPixels>) {
+ fn update_ime_position(&self, bounds: Bounds<Pixels>) {
let state = self.borrow();
state.client.update_ime_position(bounds);
}
@@ -1146,7 +1138,7 @@ fn update_window(mut state: RefMut<WaylandWindowState>) {
}
impl WindowDecorations {
- fn to_xdg(&self) -> zxdg_toplevel_decoration_v1::Mode {
+ fn to_xdg(self) -> zxdg_toplevel_decoration_v1::Mode {
match self {
WindowDecorations::Client => zxdg_toplevel_decoration_v1::Mode::ClientSide,
WindowDecorations::Server => zxdg_toplevel_decoration_v1::Mode::ServerSide,
@@ -1155,7 +1147,7 @@ impl WindowDecorations {
}
impl ResizeEdge {
- fn to_xdg(&self) -> xdg_toplevel::ResizeEdge {
+ fn to_xdg(self) -> xdg_toplevel::ResizeEdge {
match self {
ResizeEdge::Top => xdg_toplevel::ResizeEdge::Top,
ResizeEdge::TopRight => xdg_toplevel::ResizeEdge::TopRight,
@@ -62,8 +62,7 @@ use crate::{
AnyWindowHandle, Bounds, ClipboardItem, CursorStyle, DisplayId, FileDropEvent, Keystroke,
LinuxKeyboardLayout, Modifiers, ModifiersChangedEvent, MouseButton, Pixels, Platform,
PlatformDisplay, PlatformInput, PlatformKeyboardLayout, Point, RequestFrameOptions,
- ScaledPixels, ScrollDelta, Size, TouchPhase, WindowParams, X11Window,
- modifiers_from_xinput_info, point, px,
+ ScrollDelta, Size, TouchPhase, WindowParams, X11Window, modifiers_from_xinput_info, point, px,
};
/// Value for DeviceId parameters which selects all devices.
@@ -232,15 +231,12 @@ impl X11ClientStatePtr {
};
let mut state = client.0.borrow_mut();
- if let Some(window_ref) = state.windows.remove(&x_window) {
- match window_ref.refresh_state {
- Some(RefreshState::PeriodicRefresh {
- event_loop_token, ..
- }) => {
- state.loop_handle.remove(event_loop_token);
- }
- _ => {}
- }
+ if let Some(window_ref) = state.windows.remove(&x_window)
+ && let Some(RefreshState::PeriodicRefresh {
+ event_loop_token, ..
+ }) = window_ref.refresh_state
+ {
+ state.loop_handle.remove(event_loop_token);
}
if state.mouse_focused_window == Some(x_window) {
state.mouse_focused_window = None;
@@ -255,7 +251,7 @@ impl X11ClientStatePtr {
}
}
- pub fn update_ime_position(&self, bounds: Bounds<ScaledPixels>) {
+ pub fn update_ime_position(&self, bounds: Bounds<Pixels>) {
let Some(client) = self.get_client() else {
return;
};
@@ -273,6 +269,7 @@ impl X11ClientStatePtr {
state.ximc = Some(ximc);
return;
};
+ let scaled_bounds = bounds.scale(state.scale_factor);
let ic_attributes = ximc
.build_ic_attributes()
.push(
@@ -285,8 +282,8 @@ impl X11ClientStatePtr {
b.push(
xim::AttributeName::SpotLocation,
xim::Point {
- x: u32::from(bounds.origin.x + bounds.size.width) as i16,
- y: u32::from(bounds.origin.y + bounds.size.height) as i16,
+ x: u32::from(scaled_bounds.origin.x + scaled_bounds.size.width) as i16,
+ y: u32::from(scaled_bounds.origin.y + scaled_bounds.size.height) as i16,
},
);
})
@@ -459,7 +456,7 @@ impl X11Client {
move |event, _, client| match event {
XDPEvent::WindowAppearance(appearance) => {
client.with_common(|common| common.appearance = appearance);
- for (_, window) in &mut client.0.borrow_mut().windows {
+ for window in client.0.borrow_mut().windows.values_mut() {
window.window.set_appearance(appearance);
}
}
@@ -706,14 +703,14 @@ impl X11Client {
state.xim_handler = Some(xim_handler);
return;
};
- if let Some(area) = window.get_ime_area() {
+ if let Some(scaled_area) = window.get_ime_area() {
ic_attributes =
ic_attributes.nested_list(xim::AttributeName::PreeditAttributes, |b| {
b.push(
xim::AttributeName::SpotLocation,
xim::Point {
- x: u32::from(area.origin.x + area.size.width) as i16,
- y: u32::from(area.origin.y + area.size.height) as i16,
+ x: u32::from(scaled_area.origin.x + scaled_area.size.width) as i16,
+ y: u32::from(scaled_area.origin.y + scaled_area.size.height) as i16,
},
);
});
@@ -876,22 +873,19 @@ impl X11Client {
let Some(reply) = reply else {
return Some(());
};
- match str::from_utf8(&reply.value) {
- Ok(file_list) => {
- let paths: SmallVec<[_; 2]> = file_list
- .lines()
- .filter_map(|path| Url::parse(path).log_err())
- .filter_map(|url| url.to_file_path().log_err())
- .collect();
- let input = PlatformInput::FileDrop(FileDropEvent::Entered {
- position: state.xdnd_state.position,
- paths: crate::ExternalPaths(paths),
- });
- drop(state);
- window.handle_input(input);
- self.0.borrow_mut().xdnd_state.retrieved = true;
- }
- Err(_) => {}
+ if let Ok(file_list) = str::from_utf8(&reply.value) {
+ let paths: SmallVec<[_; 2]> = file_list
+ .lines()
+ .filter_map(|path| Url::parse(path).log_err())
+ .filter_map(|url| url.to_file_path().log_err())
+ .collect();
+ let input = PlatformInput::FileDrop(FileDropEvent::Entered {
+ position: state.xdnd_state.position,
+ paths: crate::ExternalPaths(paths),
+ });
+ drop(state);
+ window.handle_input(input);
+ self.0.borrow_mut().xdnd_state.retrieved = true;
}
}
Event::ConfigureNotify(event) => {
@@ -1335,7 +1329,7 @@ impl X11Client {
state.composing = false;
drop(state);
if let Some(mut keystroke) = keystroke {
- keystroke.key_char = Some(text.clone());
+ keystroke.key_char = Some(text);
window.handle_input(PlatformInput::KeyDown(crate::KeyDownEvent {
keystroke,
is_held: false,
@@ -1357,7 +1351,7 @@ impl X11Client {
drop(state);
window.handle_ime_preedit(text);
- if let Some(area) = window.get_ime_area() {
+ if let Some(scaled_area) = window.get_ime_area() {
let ic_attributes = ximc
.build_ic_attributes()
.push(
@@ -1370,8 +1364,8 @@ impl X11Client {
b.push(
xim::AttributeName::SpotLocation,
xim::Point {
- x: u32::from(area.origin.x + area.size.width) as i16,
- y: u32::from(area.origin.y + area.size.height) as i16,
+ x: u32::from(scaled_area.origin.x + scaled_area.size.width) as i16,
+ y: u32::from(scaled_area.origin.y + scaled_area.size.height) as i16,
},
);
})
@@ -2114,7 +2108,7 @@ fn current_pointer_device_states(
.classes
.iter()
.filter_map(|class| class.data.as_scroll())
- .map(|class| *class)
+ .copied()
.rev()
.collect::<Vec<_>>();
let old_state = scroll_values_to_preserve.get(&info.deviceid);
@@ -95,7 +95,7 @@ fn query_render_extent(
}
impl ResizeEdge {
- fn to_moveresize(&self) -> u32 {
+ fn to_moveresize(self) -> u32 {
match self {
ResizeEdge::TopLeft => 0,
ResizeEdge::Top => 1,
@@ -1019,8 +1019,9 @@ impl X11WindowStatePtr {
}
}
- pub fn get_ime_area(&self) -> Option<Bounds<Pixels>> {
+ pub fn get_ime_area(&self) -> Option<Bounds<ScaledPixels>> {
let mut state = self.state.borrow_mut();
+ let scale_factor = state.scale_factor;
let mut bounds: Option<Bounds<Pixels>> = None;
if let Some(mut input_handler) = state.input_handler.take() {
drop(state);
@@ -1030,7 +1031,7 @@ impl X11WindowStatePtr {
let mut state = self.state.borrow_mut();
state.input_handler = Some(input_handler);
};
- bounds
+ bounds.map(|b| b.scale(scale_factor))
}
pub fn set_bounds(&self, bounds: Bounds<i32>) -> anyhow::Result<()> {
@@ -1618,7 +1619,7 @@ impl PlatformWindow for X11Window {
}
}
- fn update_ime_position(&self, bounds: Bounds<ScaledPixels>) {
+ fn update_ime_position(&self, bounds: Bounds<Pixels>) {
let mut state = self.0.state.borrow_mut();
let client = state.client.clone();
drop(state);
@@ -426,7 +426,7 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke {
key_char = Some(chars_for_modified_key(native_event.keyCode(), mods));
}
- let mut key = if shift
+ if shift
&& chars_ignoring_modifiers
.chars()
.all(|c| c.is_ascii_lowercase())
@@ -437,9 +437,7 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke {
chars_with_shift
} else {
chars_ignoring_modifiers
- };
-
- key
+ }
}
};
@@ -1,8 +1,9 @@
+use collections::HashMap;
use std::ffi::{CStr, c_void};
use objc::{msg_send, runtime::Object, sel, sel_impl};
-use crate::PlatformKeyboardLayout;
+use crate::{KeybindingKeystroke, Keystroke, PlatformKeyboardLayout, PlatformKeyboardMapper};
use super::{
TISCopyCurrentKeyboardLayoutInputSource, TISGetInputSourceProperty, kTISPropertyInputSourceID,
@@ -14,6 +15,10 @@ pub(crate) struct MacKeyboardLayout {
name: String,
}
+pub(crate) struct MacKeyboardMapper {
+ key_equivalents: Option<HashMap<char, char>>,
+}
+
impl PlatformKeyboardLayout for MacKeyboardLayout {
fn id(&self) -> &str {
&self.id
@@ -24,6 +29,27 @@ impl PlatformKeyboardLayout for MacKeyboardLayout {
}
}
+impl PlatformKeyboardMapper for MacKeyboardMapper {
+ fn map_key_equivalent(
+ &self,
+ mut keystroke: Keystroke,
+ use_key_equivalents: bool,
+ ) -> KeybindingKeystroke {
+ if use_key_equivalents && let Some(key_equivalents) = &self.key_equivalents {
+ if keystroke.key.chars().count() == 1
+ && let Some(key) = key_equivalents.get(&keystroke.key.chars().next().unwrap())
+ {
+ keystroke.key = key.to_string();
+ }
+ }
+ KeybindingKeystroke::from_keystroke(keystroke)
+ }
+
+ fn get_key_equivalents(&self) -> Option<&HashMap<char, char>> {
+ self.key_equivalents.as_ref()
+ }
+}
+
impl MacKeyboardLayout {
pub(crate) fn new() -> Self {
unsafe {
@@ -47,3 +73,1428 @@ impl MacKeyboardLayout {
}
}
}
+
+impl MacKeyboardMapper {
+ pub(crate) fn new(layout_id: &str) -> Self {
+ let key_equivalents = get_key_equivalents(layout_id);
+
+ Self { key_equivalents }
+ }
+}
+
+// On some keyboards (e.g. German QWERTZ) it is not possible to type the full ASCII range
+// without using option. This means that some of our built in keyboard shortcuts do not work
+// for those users.
+//
+// The way macOS solves this problem is to move shortcuts around so that they are all reachable,
+// even if the mnemonic changes. https://developer.apple.com/documentation/swiftui/keyboardshortcut/localization-swift.struct
+//
+// For example, cmd-> is the "switch window" shortcut because the > key is right above tab.
+// To ensure this doesn't cause problems for shortcuts defined for a QWERTY layout, apple moves
+// any shortcuts defined as cmd-> to cmd-:. Coincidentally this s also the same keyboard position
+// as cmd-> on a QWERTY layout.
+//
+// Another example is cmd-[ and cmd-], as they cannot be typed without option, those keys are remapped to cmd-ö
+// and cmd-ä. These shortcuts are not in the same position as a QWERTY keyboard, because on a QWERTZ keyboard
+// the + key is in the way; and shortcuts bound to cmd-+ are still typed as cmd-+ on either keyboard (though the
+// specific key moves)
+//
+// As far as I can tell, there's no way to query the mappings Apple uses except by rendering a menu with every
+// possible key combination, and inspecting the UI to see what it rendered. So that's what we did...
+//
+// These mappings were generated by running https://github.com/ConradIrwin/keyboard-inspector, tidying up the
+// output to remove languages with no mappings and other oddities, and converting it to a less verbose representation with:
+// jq -s 'map(to_entries | map({key: .key, value: [(.value | to_entries | map(.key) | join("")), (.value | to_entries | map(.value) | join(""))]}) | from_entries) | add'
+// From there I used multi-cursor to produce this match statement.
+fn get_key_equivalents(layout_id: &str) -> Option<HashMap<char, char>> {
+ let mappings: &[(char, char)] = match layout_id {
+ "com.apple.keylayout.ABC-AZERTY" => &[
+ ('!', '1'),
+ ('"', '%'),
+ ('#', '3'),
+ ('$', '4'),
+ ('%', '5'),
+ ('&', '7'),
+ ('(', '9'),
+ (')', '0'),
+ ('*', '8'),
+ ('.', ';'),
+ ('/', ':'),
+ ('0', 'à'),
+ ('1', '&'),
+ ('2', 'é'),
+ ('3', '"'),
+ ('4', '\''),
+ ('5', '('),
+ ('6', '§'),
+ ('7', 'è'),
+ ('8', '!'),
+ ('9', 'ç'),
+ (':', '°'),
+ (';', ')'),
+ ('<', '.'),
+ ('>', '/'),
+ ('@', '2'),
+ ('[', '^'),
+ ('\'', 'ù'),
+ ('\\', '`'),
+ (']', '$'),
+ ('^', '6'),
+ ('`', '<'),
+ ('{', '¨'),
+ ('|', '£'),
+ ('}', '*'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.ABC-QWERTZ" => &[
+ ('"', '`'),
+ ('#', '§'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', 'ß'),
+ (':', 'Ü'),
+ (';', 'ü'),
+ ('<', ';'),
+ ('=', '*'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'ö'),
+ ('\'', '´'),
+ ('\\', '#'),
+ (']', 'ä'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Ö'),
+ ('|', '\''),
+ ('}', 'Ä'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Albanian" => &[
+ ('"', '\''),
+ (':', 'Ç'),
+ (';', 'ç'),
+ ('<', ';'),
+ ('>', ':'),
+ ('@', '"'),
+ ('\'', '@'),
+ ('\\', 'ë'),
+ ('`', '<'),
+ ('|', 'Ë'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Austrian" => &[
+ ('"', '`'),
+ ('#', '§'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', 'ß'),
+ (':', 'Ü'),
+ (';', 'ü'),
+ ('<', ';'),
+ ('=', '*'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'ö'),
+ ('\'', '´'),
+ ('\\', '#'),
+ (']', 'ä'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Ö'),
+ ('|', '\''),
+ ('}', 'Ä'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Azeri" => &[
+ ('"', 'Ə'),
+ (',', 'ç'),
+ ('.', 'ş'),
+ ('/', '.'),
+ (':', 'I'),
+ (';', 'ı'),
+ ('<', 'Ç'),
+ ('>', 'Ş'),
+ ('?', ','),
+ ('W', 'Ü'),
+ ('[', 'ö'),
+ ('\'', 'ə'),
+ (']', 'ğ'),
+ ('w', 'ü'),
+ ('{', 'Ö'),
+ ('|', '/'),
+ ('}', 'Ğ'),
+ ],
+ "com.apple.keylayout.Belgian" => &[
+ ('!', '1'),
+ ('"', '%'),
+ ('#', '3'),
+ ('$', '4'),
+ ('%', '5'),
+ ('&', '7'),
+ ('(', '9'),
+ (')', '0'),
+ ('*', '8'),
+ ('.', ';'),
+ ('/', ':'),
+ ('0', 'à'),
+ ('1', '&'),
+ ('2', 'é'),
+ ('3', '"'),
+ ('4', '\''),
+ ('5', '('),
+ ('6', '§'),
+ ('7', 'è'),
+ ('8', '!'),
+ ('9', 'ç'),
+ (':', '°'),
+ (';', ')'),
+ ('<', '.'),
+ ('>', '/'),
+ ('@', '2'),
+ ('[', '^'),
+ ('\'', 'ù'),
+ ('\\', '`'),
+ (']', '$'),
+ ('^', '6'),
+ ('`', '<'),
+ ('{', '¨'),
+ ('|', '£'),
+ ('}', '*'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Brazilian-ABNT2" => &[
+ ('"', '`'),
+ ('/', 'ç'),
+ ('?', 'Ç'),
+ ('\'', '´'),
+ ('\\', '~'),
+ ('^', '¨'),
+ ('`', '\''),
+ ('|', '^'),
+ ('~', '"'),
+ ],
+ "com.apple.keylayout.Brazilian-Pro" => &[('^', 'ˆ'), ('~', '˜')],
+ "com.apple.keylayout.British" => &[('#', '£')],
+ "com.apple.keylayout.Canadian-CSA" => &[
+ ('"', 'È'),
+ ('/', 'é'),
+ ('<', '\''),
+ ('>', '"'),
+ ('?', 'É'),
+ ('[', '^'),
+ ('\'', 'è'),
+ ('\\', 'à'),
+ (']', 'ç'),
+ ('`', 'ù'),
+ ('{', '¨'),
+ ('|', 'À'),
+ ('}', 'Ç'),
+ ('~', 'Ù'),
+ ],
+ "com.apple.keylayout.Croatian" => &[
+ ('"', 'Ć'),
+ ('&', '\''),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ (':', 'Č'),
+ (';', 'č'),
+ ('<', ';'),
+ ('=', '*'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'š'),
+ ('\'', 'ć'),
+ ('\\', 'ž'),
+ (']', 'đ'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Š'),
+ ('|', 'Ž'),
+ ('}', 'Đ'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Croatian-PC" => &[
+ ('"', 'Ć'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', '\''),
+ (':', 'Č'),
+ (';', 'č'),
+ ('<', ';'),
+ ('=', '*'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'š'),
+ ('\'', 'ć'),
+ ('\\', 'ž'),
+ (']', 'đ'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Š'),
+ ('|', 'Ž'),
+ ('}', 'Đ'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Czech" => &[
+ ('!', '1'),
+ ('"', '!'),
+ ('#', '3'),
+ ('$', '4'),
+ ('%', '5'),
+ ('&', '7'),
+ ('(', '9'),
+ (')', '0'),
+ ('*', '8'),
+ ('+', '%'),
+ ('/', '\''),
+ ('0', 'é'),
+ ('1', '+'),
+ ('2', 'ě'),
+ ('3', 'š'),
+ ('4', 'č'),
+ ('5', 'ř'),
+ ('6', 'ž'),
+ ('7', 'ý'),
+ ('8', 'á'),
+ ('9', 'í'),
+ (':', '"'),
+ (';', 'ů'),
+ ('<', '?'),
+ ('>', ':'),
+ ('?', 'ˇ'),
+ ('@', '2'),
+ ('[', 'ú'),
+ ('\'', '§'),
+ (']', ')'),
+ ('^', '6'),
+ ('`', '¨'),
+ ('{', 'Ú'),
+ ('}', '('),
+ ('~', '`'),
+ ],
+ "com.apple.keylayout.Czech-QWERTY" => &[
+ ('!', '1'),
+ ('"', '!'),
+ ('#', '3'),
+ ('$', '4'),
+ ('%', '5'),
+ ('&', '7'),
+ ('(', '9'),
+ (')', '0'),
+ ('*', '8'),
+ ('+', '%'),
+ ('/', '\''),
+ ('0', 'é'),
+ ('1', '+'),
+ ('2', 'ě'),
+ ('3', 'š'),
+ ('4', 'č'),
+ ('5', 'ř'),
+ ('6', 'ž'),
+ ('7', 'ý'),
+ ('8', 'á'),
+ ('9', 'í'),
+ (':', '"'),
+ (';', 'ů'),
+ ('<', '?'),
+ ('>', ':'),
+ ('?', 'ˇ'),
+ ('@', '2'),
+ ('[', 'ú'),
+ ('\'', '§'),
+ (']', ')'),
+ ('^', '6'),
+ ('`', '¨'),
+ ('{', 'Ú'),
+ ('}', '('),
+ ('~', '`'),
+ ],
+ "com.apple.keylayout.Danish" => &[
+ ('"', '^'),
+ ('$', '€'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', '´'),
+ (':', 'Å'),
+ (';', 'å'),
+ ('<', ';'),
+ ('=', '`'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'æ'),
+ ('\'', '¨'),
+ ('\\', '\''),
+ (']', 'ø'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Æ'),
+ ('|', '*'),
+ ('}', 'Ø'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Faroese" => &[
+ ('"', 'Ø'),
+ ('$', '€'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', '´'),
+ (':', 'Æ'),
+ (';', 'æ'),
+ ('<', ';'),
+ ('=', '`'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'å'),
+ ('\'', 'ø'),
+ ('\\', '\''),
+ (']', 'ð'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Å'),
+ ('|', '*'),
+ ('}', 'Ð'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Finnish" => &[
+ ('"', '^'),
+ ('$', '€'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', '´'),
+ (':', 'Å'),
+ (';', 'å'),
+ ('<', ';'),
+ ('=', '`'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'ö'),
+ ('\'', '¨'),
+ ('\\', '\''),
+ (']', 'ä'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Ö'),
+ ('|', '*'),
+ ('}', 'Ä'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.FinnishExtended" => &[
+ ('"', 'ˆ'),
+ ('$', '€'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', '´'),
+ (':', 'Å'),
+ (';', 'å'),
+ ('<', ';'),
+ ('=', '`'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'ö'),
+ ('\'', '¨'),
+ ('\\', '\''),
+ (']', 'ä'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Ö'),
+ ('|', '*'),
+ ('}', 'Ä'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.FinnishSami-PC" => &[
+ ('"', 'ˆ'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', '´'),
+ (':', 'Å'),
+ (';', 'å'),
+ ('<', ';'),
+ ('=', '`'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'ö'),
+ ('\'', '¨'),
+ ('\\', '@'),
+ (']', 'ä'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Ö'),
+ ('|', '*'),
+ ('}', 'Ä'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.French" => &[
+ ('!', '1'),
+ ('"', '%'),
+ ('#', '3'),
+ ('$', '4'),
+ ('%', '5'),
+ ('&', '7'),
+ ('(', '9'),
+ (')', '0'),
+ ('*', '8'),
+ ('.', ';'),
+ ('/', ':'),
+ ('0', 'à'),
+ ('1', '&'),
+ ('2', 'é'),
+ ('3', '"'),
+ ('4', '\''),
+ ('5', '('),
+ ('6', '§'),
+ ('7', 'è'),
+ ('8', '!'),
+ ('9', 'ç'),
+ (':', '°'),
+ (';', ')'),
+ ('<', '.'),
+ ('>', '/'),
+ ('@', '2'),
+ ('[', '^'),
+ ('\'', 'ù'),
+ ('\\', '`'),
+ (']', '$'),
+ ('^', '6'),
+ ('`', '<'),
+ ('{', '¨'),
+ ('|', '£'),
+ ('}', '*'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.French-PC" => &[
+ ('!', '1'),
+ ('"', '%'),
+ ('#', '3'),
+ ('$', '4'),
+ ('%', '5'),
+ ('&', '7'),
+ ('(', '9'),
+ (')', '0'),
+ ('*', '8'),
+ ('-', ')'),
+ ('.', ';'),
+ ('/', ':'),
+ ('0', 'à'),
+ ('1', '&'),
+ ('2', 'é'),
+ ('3', '"'),
+ ('4', '\''),
+ ('5', '('),
+ ('6', '-'),
+ ('7', 'è'),
+ ('8', '_'),
+ ('9', 'ç'),
+ (':', '§'),
+ (';', '!'),
+ ('<', '.'),
+ ('>', '/'),
+ ('@', '2'),
+ ('[', '^'),
+ ('\'', 'ù'),
+ ('\\', '*'),
+ (']', '$'),
+ ('^', '6'),
+ ('_', '°'),
+ ('`', '<'),
+ ('{', '¨'),
+ ('|', 'μ'),
+ ('}', '£'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.French-numerical" => &[
+ ('!', '1'),
+ ('"', '%'),
+ ('#', '3'),
+ ('$', '4'),
+ ('%', '5'),
+ ('&', '7'),
+ ('(', '9'),
+ (')', '0'),
+ ('*', '8'),
+ ('.', ';'),
+ ('/', ':'),
+ ('0', 'à'),
+ ('1', '&'),
+ ('2', 'é'),
+ ('3', '"'),
+ ('4', '\''),
+ ('5', '('),
+ ('6', '§'),
+ ('7', 'è'),
+ ('8', '!'),
+ ('9', 'ç'),
+ (':', '°'),
+ (';', ')'),
+ ('<', '.'),
+ ('>', '/'),
+ ('@', '2'),
+ ('[', '^'),
+ ('\'', 'ù'),
+ ('\\', '`'),
+ (']', '$'),
+ ('^', '6'),
+ ('`', '<'),
+ ('{', '¨'),
+ ('|', '£'),
+ ('}', '*'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.German" => &[
+ ('"', '`'),
+ ('#', '§'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', 'ß'),
+ (':', 'Ü'),
+ (';', 'ü'),
+ ('<', ';'),
+ ('=', '*'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'ö'),
+ ('\'', '´'),
+ ('\\', '#'),
+ (']', 'ä'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Ö'),
+ ('|', '\''),
+ ('}', 'Ä'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.German-DIN-2137" => &[
+ ('"', '`'),
+ ('#', '§'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', 'ß'),
+ (':', 'Ü'),
+ (';', 'ü'),
+ ('<', ';'),
+ ('=', '*'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'ö'),
+ ('\'', '´'),
+ ('\\', '#'),
+ (']', 'ä'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Ö'),
+ ('|', '\''),
+ ('}', 'Ä'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Hawaiian" => &[('\'', 'ʻ')],
+ "com.apple.keylayout.Hungarian" => &[
+ ('!', '\''),
+ ('"', 'Á'),
+ ('#', '+'),
+ ('$', '!'),
+ ('&', '='),
+ ('(', ')'),
+ (')', 'Ö'),
+ ('*', '('),
+ ('+', 'Ó'),
+ ('/', 'ü'),
+ ('0', 'ö'),
+ (':', 'É'),
+ (';', 'é'),
+ ('<', 'Ü'),
+ ('=', 'ó'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'ő'),
+ ('\'', 'á'),
+ ('\\', 'ű'),
+ (']', 'ú'),
+ ('^', '/'),
+ ('`', 'í'),
+ ('{', 'Ő'),
+ ('|', 'Ű'),
+ ('}', 'Ú'),
+ ('~', 'Í'),
+ ],
+ "com.apple.keylayout.Hungarian-QWERTY" => &[
+ ('!', '\''),
+ ('"', 'Á'),
+ ('#', '+'),
+ ('$', '!'),
+ ('&', '='),
+ ('(', ')'),
+ (')', 'Ö'),
+ ('*', '('),
+ ('+', 'Ó'),
+ ('/', 'ü'),
+ ('0', 'ö'),
+ (':', 'É'),
+ (';', 'é'),
+ ('<', 'Ü'),
+ ('=', 'ó'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'ő'),
+ ('\'', 'á'),
+ ('\\', 'ű'),
+ (']', 'ú'),
+ ('^', '/'),
+ ('`', 'í'),
+ ('{', 'Ő'),
+ ('|', 'Ű'),
+ ('}', 'Ú'),
+ ('~', 'Í'),
+ ],
+ "com.apple.keylayout.Icelandic" => &[
+ ('"', 'Ö'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', '\''),
+ (':', 'Ð'),
+ (';', 'ð'),
+ ('<', ';'),
+ ('=', '*'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'æ'),
+ ('\'', 'ö'),
+ ('\\', 'þ'),
+ (']', '´'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Æ'),
+ ('|', 'Þ'),
+ ('}', '´'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Irish" => &[('#', '£')],
+ "com.apple.keylayout.IrishExtended" => &[('#', '£')],
+ "com.apple.keylayout.Italian" => &[
+ ('!', '1'),
+ ('"', '%'),
+ ('#', '3'),
+ ('$', '4'),
+ ('%', '5'),
+ ('&', '7'),
+ ('(', '9'),
+ (')', '0'),
+ ('*', '8'),
+ (',', ';'),
+ ('.', ':'),
+ ('/', ','),
+ ('0', 'é'),
+ ('1', '&'),
+ ('2', '"'),
+ ('3', '\''),
+ ('4', '('),
+ ('5', 'ç'),
+ ('6', 'è'),
+ ('7', ')'),
+ ('8', '£'),
+ ('9', 'à'),
+ (':', '!'),
+ (';', 'ò'),
+ ('<', '.'),
+ ('>', '/'),
+ ('@', '2'),
+ ('[', 'ì'),
+ ('\'', 'ù'),
+ ('\\', '§'),
+ (']', '$'),
+ ('^', '6'),
+ ('`', '<'),
+ ('{', '^'),
+ ('|', '°'),
+ ('}', '*'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Italian-Pro" => &[
+ ('"', '^'),
+ ('#', '£'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', '\''),
+ (':', 'é'),
+ (';', 'è'),
+ ('<', ';'),
+ ('=', '*'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'ò'),
+ ('\'', 'ì'),
+ ('\\', 'ù'),
+ (']', 'à'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'ç'),
+ ('|', '§'),
+ ('}', '°'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.LatinAmerican" => &[
+ ('"', '¨'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', '\''),
+ (':', 'Ñ'),
+ (';', 'ñ'),
+ ('<', ';'),
+ ('=', '*'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', '{'),
+ ('\'', '´'),
+ ('\\', '¿'),
+ (']', '}'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', '['),
+ ('|', '¡'),
+ ('}', ']'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Lithuanian" => &[
+ ('!', 'Ą'),
+ ('#', 'Ę'),
+ ('$', 'Ė'),
+ ('%', 'Į'),
+ ('&', 'Ų'),
+ ('*', 'Ū'),
+ ('+', 'Ž'),
+ ('1', 'ą'),
+ ('2', 'č'),
+ ('3', 'ę'),
+ ('4', 'ė'),
+ ('5', 'į'),
+ ('6', 'š'),
+ ('7', 'ų'),
+ ('8', 'ū'),
+ ('=', 'ž'),
+ ('@', 'Č'),
+ ('^', 'Š'),
+ ],
+ "com.apple.keylayout.Maltese" => &[
+ ('#', '£'),
+ ('[', 'ġ'),
+ (']', 'ħ'),
+ ('`', 'ż'),
+ ('{', 'Ġ'),
+ ('}', 'Ħ'),
+ ('~', 'Ż'),
+ ],
+ "com.apple.keylayout.NorthernSami" => &[
+ ('"', 'Ŋ'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', '´'),
+ (':', 'Å'),
+ (';', 'å'),
+ ('<', ';'),
+ ('=', '`'),
+ ('>', ':'),
+ ('@', '"'),
+ ('Q', 'Á'),
+ ('W', 'Š'),
+ ('X', 'Č'),
+ ('[', 'ø'),
+ ('\'', 'ŋ'),
+ ('\\', 'đ'),
+ (']', 'æ'),
+ ('^', '&'),
+ ('`', 'ž'),
+ ('q', 'á'),
+ ('w', 'š'),
+ ('x', 'č'),
+ ('{', 'Ø'),
+ ('|', 'Đ'),
+ ('}', 'Æ'),
+ ('~', 'Ž'),
+ ],
+ "com.apple.keylayout.Norwegian" => &[
+ ('"', '^'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', '´'),
+ (':', 'Å'),
+ (';', 'å'),
+ ('<', ';'),
+ ('=', '`'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'ø'),
+ ('\'', '¨'),
+ ('\\', '@'),
+ (']', 'æ'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Ø'),
+ ('|', '*'),
+ ('}', 'Æ'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.NorwegianExtended" => &[
+ ('"', 'ˆ'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', '´'),
+ (':', 'Å'),
+ (';', 'å'),
+ ('<', ';'),
+ ('=', '`'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'ø'),
+ ('\\', '@'),
+ (']', 'æ'),
+ ('`', '<'),
+ ('}', 'Æ'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.NorwegianSami-PC" => &[
+ ('"', 'ˆ'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', '´'),
+ (':', 'Å'),
+ (';', 'å'),
+ ('<', ';'),
+ ('=', '`'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'ø'),
+ ('\'', '¨'),
+ ('\\', '@'),
+ (']', 'æ'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Ø'),
+ ('|', '*'),
+ ('}', 'Æ'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Polish" => &[
+ ('!', '§'),
+ ('"', 'ę'),
+ ('#', '!'),
+ ('$', '?'),
+ ('%', '+'),
+ ('&', ':'),
+ ('(', '/'),
+ (')', '"'),
+ ('*', '_'),
+ ('+', ']'),
+ (',', '.'),
+ ('.', ','),
+ ('/', 'ż'),
+ (':', 'Ł'),
+ (';', 'ł'),
+ ('<', 'ś'),
+ ('=', '['),
+ ('>', 'ń'),
+ ('?', 'Ż'),
+ ('@', '%'),
+ ('[', 'ó'),
+ ('\'', 'ą'),
+ ('\\', ';'),
+ (']', '('),
+ ('^', '='),
+ ('_', 'ć'),
+ ('`', '<'),
+ ('{', 'ź'),
+ ('|', '$'),
+ ('}', ')'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Portuguese" => &[
+ ('"', '`'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', '\''),
+ (':', 'ª'),
+ (';', 'º'),
+ ('<', ';'),
+ ('=', '*'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'ç'),
+ ('\'', '´'),
+ (']', '~'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Ç'),
+ ('}', '^'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Sami-PC" => &[
+ ('"', 'Ŋ'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', '´'),
+ (':', 'Å'),
+ (';', 'å'),
+ ('<', ';'),
+ ('=', '`'),
+ ('>', ':'),
+ ('@', '"'),
+ ('Q', 'Á'),
+ ('W', 'Š'),
+ ('X', 'Č'),
+ ('[', 'ø'),
+ ('\'', 'ŋ'),
+ ('\\', 'đ'),
+ (']', 'æ'),
+ ('^', '&'),
+ ('`', 'ž'),
+ ('q', 'á'),
+ ('w', 'š'),
+ ('x', 'č'),
+ ('{', 'Ø'),
+ ('|', 'Đ'),
+ ('}', 'Æ'),
+ ('~', 'Ž'),
+ ],
+ "com.apple.keylayout.Serbian-Latin" => &[
+ ('"', 'Ć'),
+ ('&', '\''),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ (':', 'Č'),
+ (';', 'č'),
+ ('<', ';'),
+ ('=', '*'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'š'),
+ ('\'', 'ć'),
+ ('\\', 'ž'),
+ (']', 'đ'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Š'),
+ ('|', 'Ž'),
+ ('}', 'Đ'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Slovak" => &[
+ ('!', '1'),
+ ('"', '!'),
+ ('#', '3'),
+ ('$', '4'),
+ ('%', '5'),
+ ('&', '7'),
+ ('(', '9'),
+ (')', '0'),
+ ('*', '8'),
+ ('+', '%'),
+ ('/', '\''),
+ ('0', 'é'),
+ ('1', '+'),
+ ('2', 'ľ'),
+ ('3', 'š'),
+ ('4', 'č'),
+ ('5', 'ť'),
+ ('6', 'ž'),
+ ('7', 'ý'),
+ ('8', 'á'),
+ ('9', 'í'),
+ (':', '"'),
+ (';', 'ô'),
+ ('<', '?'),
+ ('>', ':'),
+ ('?', 'ˇ'),
+ ('@', '2'),
+ ('[', 'ú'),
+ ('\'', '§'),
+ (']', 'ä'),
+ ('^', '6'),
+ ('`', 'ň'),
+ ('{', 'Ú'),
+ ('}', 'Ä'),
+ ('~', 'Ň'),
+ ],
+ "com.apple.keylayout.Slovak-QWERTY" => &[
+ ('!', '1'),
+ ('"', '!'),
+ ('#', '3'),
+ ('$', '4'),
+ ('%', '5'),
+ ('&', '7'),
+ ('(', '9'),
+ (')', '0'),
+ ('*', '8'),
+ ('+', '%'),
+ ('/', '\''),
+ ('0', 'é'),
+ ('1', '+'),
+ ('2', 'ľ'),
+ ('3', 'š'),
+ ('4', 'č'),
+ ('5', 'ť'),
+ ('6', 'ž'),
+ ('7', 'ý'),
+ ('8', 'á'),
+ ('9', 'í'),
+ (':', '"'),
+ (';', 'ô'),
+ ('<', '?'),
+ ('>', ':'),
+ ('?', 'ˇ'),
+ ('@', '2'),
+ ('[', 'ú'),
+ ('\'', '§'),
+ (']', 'ä'),
+ ('^', '6'),
+ ('`', 'ň'),
+ ('{', 'Ú'),
+ ('}', 'Ä'),
+ ('~', 'Ň'),
+ ],
+ "com.apple.keylayout.Slovenian" => &[
+ ('"', 'Ć'),
+ ('&', '\''),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ (':', 'Č'),
+ (';', 'č'),
+ ('<', ';'),
+ ('=', '*'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'š'),
+ ('\'', 'ć'),
+ ('\\', 'ž'),
+ (']', 'đ'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Š'),
+ ('|', 'Ž'),
+ ('}', 'Đ'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Spanish" => &[
+ ('!', '¡'),
+ ('"', '¨'),
+ ('.', 'ç'),
+ ('/', '.'),
+ (':', 'º'),
+ (';', '´'),
+ ('<', '¿'),
+ ('>', 'Ç'),
+ ('@', '!'),
+ ('[', 'ñ'),
+ ('\'', '`'),
+ ('\\', '\''),
+ (']', ';'),
+ ('^', '/'),
+ ('`', '<'),
+ ('{', 'Ñ'),
+ ('|', '"'),
+ ('}', ':'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Spanish-ISO" => &[
+ ('"', '¨'),
+ ('#', '·'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('.', 'ç'),
+ ('/', '.'),
+ (':', 'º'),
+ (';', '´'),
+ ('<', '¿'),
+ ('>', 'Ç'),
+ ('@', '"'),
+ ('[', 'ñ'),
+ ('\'', '`'),
+ ('\\', '\''),
+ (']', ';'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Ñ'),
+ ('|', '"'),
+ ('}', '`'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Swedish" => &[
+ ('"', '^'),
+ ('$', '€'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', '´'),
+ (':', 'Å'),
+ (';', 'å'),
+ ('<', ';'),
+ ('=', '`'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'ö'),
+ ('\'', '¨'),
+ ('\\', '\''),
+ (']', 'ä'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Ö'),
+ ('|', '*'),
+ ('}', 'Ä'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Swedish-Pro" => &[
+ ('"', '^'),
+ ('$', '€'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', '´'),
+ (':', 'Å'),
+ (';', 'å'),
+ ('<', ';'),
+ ('=', '`'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'ö'),
+ ('\'', '¨'),
+ ('\\', '\''),
+ (']', 'ä'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Ö'),
+ ('|', '*'),
+ ('}', 'Ä'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.SwedishSami-PC" => &[
+ ('"', 'ˆ'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('/', '´'),
+ (':', 'Å'),
+ (';', 'å'),
+ ('<', ';'),
+ ('=', '`'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'ö'),
+ ('\'', '¨'),
+ ('\\', '@'),
+ (']', 'ä'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Ö'),
+ ('|', '*'),
+ ('}', 'Ä'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.SwissFrench" => &[
+ ('!', '+'),
+ ('"', '`'),
+ ('#', '*'),
+ ('$', 'ç'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('+', '!'),
+ ('/', '\''),
+ (':', 'ü'),
+ (';', 'è'),
+ ('<', ';'),
+ ('=', '¨'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'é'),
+ ('\'', '^'),
+ ('\\', '$'),
+ (']', 'à'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'ö'),
+ ('|', '£'),
+ ('}', 'ä'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.SwissGerman" => &[
+ ('!', '+'),
+ ('"', '`'),
+ ('#', '*'),
+ ('$', 'ç'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('+', '!'),
+ ('/', '\''),
+ (':', 'è'),
+ (';', 'ü'),
+ ('<', ';'),
+ ('=', '¨'),
+ ('>', ':'),
+ ('@', '"'),
+ ('[', 'ö'),
+ ('\'', '^'),
+ ('\\', '$'),
+ (']', 'ä'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'é'),
+ ('|', '£'),
+ ('}', 'à'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Turkish" => &[
+ ('"', '-'),
+ ('#', '"'),
+ ('$', '\''),
+ ('%', '('),
+ ('&', ')'),
+ ('(', '%'),
+ (')', ':'),
+ ('*', '_'),
+ (',', 'ö'),
+ ('-', 'ş'),
+ ('.', 'ç'),
+ ('/', '.'),
+ (':', '$'),
+ ('<', 'Ö'),
+ ('>', 'Ç'),
+ ('@', '*'),
+ ('[', 'ğ'),
+ ('\'', ','),
+ ('\\', 'ü'),
+ (']', 'ı'),
+ ('^', '/'),
+ ('_', 'Ş'),
+ ('`', '<'),
+ ('{', 'Ğ'),
+ ('|', 'Ü'),
+ ('}', 'I'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Turkish-QWERTY-PC" => &[
+ ('"', 'I'),
+ ('#', '^'),
+ ('$', '+'),
+ ('&', '/'),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ ('+', ':'),
+ (',', 'ö'),
+ ('.', 'ç'),
+ ('/', '*'),
+ (':', 'Ş'),
+ (';', 'ş'),
+ ('<', 'Ö'),
+ ('=', '.'),
+ ('>', 'Ç'),
+ ('@', '\''),
+ ('[', 'ğ'),
+ ('\'', 'ı'),
+ ('\\', ','),
+ (']', 'ü'),
+ ('^', '&'),
+ ('`', '<'),
+ ('{', 'Ğ'),
+ ('|', ';'),
+ ('}', 'Ü'),
+ ('~', '>'),
+ ],
+ "com.apple.keylayout.Turkish-Standard" => &[
+ ('"', 'Ş'),
+ ('#', '^'),
+ ('&', '\''),
+ ('(', ')'),
+ (')', '='),
+ ('*', '('),
+ (',', '.'),
+ ('.', ','),
+ (':', 'Ç'),
+ (';', 'ç'),
+ ('<', ':'),
+ ('=', '*'),
+ ('>', ';'),
+ ('@', '"'),
+ ('[', 'ğ'),
+ ('\'', 'ş'),
+ ('\\', 'ü'),
+ (']', 'ı'),
+ ('^', '&'),
+ ('`', 'ö'),
+ ('{', 'Ğ'),
+ ('|', 'Ü'),
+ ('}', 'I'),
+ ('~', 'Ö'),
+ ],
+ "com.apple.keylayout.Turkmen" => &[
+ ('C', 'Ç'),
+ ('Q', 'Ä'),
+ ('V', 'Ý'),
+ ('X', 'Ü'),
+ ('[', 'ň'),
+ ('\\', 'ş'),
+ (']', 'ö'),
+ ('^', '№'),
+ ('`', 'ž'),
+ ('c', 'ç'),
+ ('q', 'ä'),
+ ('v', 'ý'),
+ ('x', 'ü'),
+ ('{', 'Ň'),
+ ('|', 'Ş'),
+ ('}', 'Ö'),
+ ('~', 'Ž'),
+ ],
+ "com.apple.keylayout.USInternational-PC" => &[('^', 'ˆ'), ('~', '˜')],
+ "com.apple.keylayout.Welsh" => &[('#', '£')],
+
+ _ => return None,
+ };
+
+ Some(HashMap::from_iter(mappings.iter().cloned()))
+}
@@ -332,7 +332,7 @@ impl MetalRenderer {
self.path_intermediate_texture = Some(self.device.new_texture(&texture_descriptor));
if self.path_sample_count > 1 {
- let mut msaa_descriptor = texture_descriptor.clone();
+ let mut msaa_descriptor = texture_descriptor;
msaa_descriptor.set_texture_type(metal::MTLTextureType::D2Multisample);
msaa_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
msaa_descriptor.set_sample_count(self.path_sample_count as _);
@@ -1,5 +1,5 @@
use super::{
- BoolExt, MacKeyboardLayout,
+ BoolExt, MacKeyboardLayout, MacKeyboardMapper,
attributed_string::{NSAttributedString, NSMutableAttributedString},
events::key_to_native,
renderer,
@@ -8,8 +8,9 @@ use crate::{
Action, AnyWindowHandle, BackgroundExecutor, ClipboardEntry, ClipboardItem, ClipboardString,
CursorStyle, ForegroundExecutor, Image, ImageFormat, KeyContext, Keymap, MacDispatcher,
MacDisplay, MacWindow, Menu, MenuItem, OsMenu, OwnedMenu, PathPromptOptions, Platform,
- PlatformDisplay, PlatformKeyboardLayout, PlatformTextSystem, PlatformWindow, Result,
- SemanticVersion, SystemMenuType, Task, WindowAppearance, WindowParams, hash,
+ PlatformDisplay, PlatformKeyboardLayout, PlatformKeyboardMapper, PlatformTextSystem,
+ PlatformWindow, Result, SemanticVersion, SystemMenuType, Task, WindowAppearance, WindowParams,
+ hash,
};
use anyhow::{Context as _, anyhow};
use block::ConcreteBlock;
@@ -171,6 +172,7 @@ pub(crate) struct MacPlatformState {
finish_launching: Option<Box<dyn FnOnce()>>,
dock_menu: Option<id>,
menus: Option<Vec<OwnedMenu>>,
+ keyboard_mapper: Rc<MacKeyboardMapper>,
}
impl Default for MacPlatform {
@@ -189,6 +191,9 @@ impl MacPlatform {
#[cfg(not(feature = "font-kit"))]
let text_system = Arc::new(crate::NoopTextSystem::new());
+ let keyboard_layout = MacKeyboardLayout::new();
+ let keyboard_mapper = Rc::new(MacKeyboardMapper::new(keyboard_layout.id()));
+
Self(Mutex::new(MacPlatformState {
headless,
text_system,
@@ -209,6 +214,7 @@ impl MacPlatform {
dock_menu: None,
on_keyboard_layout_change: None,
menus: None,
+ keyboard_mapper,
}))
}
@@ -348,19 +354,19 @@ impl MacPlatform {
let mut mask = NSEventModifierFlags::empty();
for (modifier, flag) in &[
(
- keystroke.modifiers.platform,
+ keystroke.modifiers().platform,
NSEventModifierFlags::NSCommandKeyMask,
),
(
- keystroke.modifiers.control,
+ keystroke.modifiers().control,
NSEventModifierFlags::NSControlKeyMask,
),
(
- keystroke.modifiers.alt,
+ keystroke.modifiers().alt,
NSEventModifierFlags::NSAlternateKeyMask,
),
(
- keystroke.modifiers.shift,
+ keystroke.modifiers().shift,
NSEventModifierFlags::NSShiftKeyMask,
),
] {
@@ -373,7 +379,7 @@ impl MacPlatform {
.initWithTitle_action_keyEquivalent_(
ns_string(name),
selector,
- ns_string(key_to_native(&keystroke.key).as_ref()),
+ ns_string(key_to_native(keystroke.key()).as_ref()),
)
.autorelease();
if Self::os_version() >= SemanticVersion::new(12, 0, 0) {
@@ -882,6 +888,10 @@ impl Platform for MacPlatform {
Box::new(MacKeyboardLayout::new())
}
+ fn keyboard_mapper(&self) -> Rc<dyn PlatformKeyboardMapper> {
+ self.0.lock().keyboard_mapper.clone()
+ }
+
fn app_path(&self) -> Result<PathBuf> {
unsafe {
let bundle: id = NSBundle::mainBundle();
@@ -1393,6 +1403,8 @@ extern "C" fn will_terminate(this: &mut Object, _: Sel, _: id) {
extern "C" fn on_keyboard_layout_change(this: &mut Object, _: Sel, _: id) {
let platform = unsafe { get_mac_platform(this) };
let mut lock = platform.0.lock();
+ let keyboard_layout = MacKeyboardLayout::new();
+ lock.keyboard_mapper = Rc::new(MacKeyboardMapper::new(keyboard_layout.id()));
if let Some(mut callback) = lock.on_keyboard_layout_change.take() {
drop(lock);
callback();
@@ -4,8 +4,9 @@ use crate::{
ForegroundExecutor, KeyDownEvent, Keystroke, Modifiers, ModifiersChangedEvent, MouseButton,
MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, PlatformAtlas, PlatformDisplay,
PlatformInput, PlatformWindow, Point, PromptButton, PromptLevel, RequestFrameOptions,
- ScaledPixels, Size, Timer, WindowAppearance, WindowBackgroundAppearance, WindowBounds,
- WindowControlArea, WindowKind, WindowParams, platform::PlatformInputHandler, point, px, size,
+ SharedString, Size, SystemWindowTab, Timer, WindowAppearance, WindowBackgroundAppearance,
+ WindowBounds, WindowControlArea, WindowKind, WindowParams, dispatch_get_main_queue,
+ dispatch_sys::dispatch_async_f, platform::PlatformInputHandler, point, px, size,
};
use block::ConcreteBlock;
use cocoa::{
@@ -24,6 +25,7 @@ use cocoa::{
NSUserDefaults,
},
};
+
use core_graphics::display::{CGDirectDisplayID, CGPoint, CGRect};
use ctor::ctor;
use futures::channel::oneshot;
@@ -82,6 +84,12 @@ type NSDragOperation = NSUInteger;
const NSDragOperationNone: NSDragOperation = 0;
#[allow(non_upper_case_globals)]
const NSDragOperationCopy: NSDragOperation = 1;
+#[derive(PartialEq)]
+pub enum UserTabbingPreference {
+ Never,
+ Always,
+ InFullScreen,
+}
#[link(name = "CoreGraphics", kind = "framework")]
unsafe extern "C" {
@@ -343,6 +351,36 @@ unsafe fn build_window_class(name: &'static str, superclass: &Class) -> *const C
conclude_drag_operation as extern "C" fn(&Object, Sel, id),
);
+ decl.add_method(
+ sel!(addTitlebarAccessoryViewController:),
+ add_titlebar_accessory_view_controller as extern "C" fn(&Object, Sel, id),
+ );
+
+ decl.add_method(
+ sel!(moveTabToNewWindow:),
+ move_tab_to_new_window as extern "C" fn(&Object, Sel, id),
+ );
+
+ decl.add_method(
+ sel!(mergeAllWindows:),
+ merge_all_windows as extern "C" fn(&Object, Sel, id),
+ );
+
+ decl.add_method(
+ sel!(selectNextTab:),
+ select_next_tab as extern "C" fn(&Object, Sel, id),
+ );
+
+ decl.add_method(
+ sel!(selectPreviousTab:),
+ select_previous_tab as extern "C" fn(&Object, Sel, id),
+ );
+
+ decl.add_method(
+ sel!(toggleTabBar:),
+ toggle_tab_bar as extern "C" fn(&Object, Sel, id),
+ );
+
decl.register()
}
}
@@ -375,6 +413,11 @@ struct MacWindowState {
// Whether the next left-mouse click is also the focusing click.
first_mouse: bool,
fullscreen_restore_bounds: Bounds<Pixels>,
+ move_tab_to_new_window_callback: Option<Box<dyn FnMut()>>,
+ merge_all_windows_callback: Option<Box<dyn FnMut()>>,
+ select_next_tab_callback: Option<Box<dyn FnMut()>>,
+ select_previous_tab_callback: Option<Box<dyn FnMut()>>,
+ toggle_tab_bar_callback: Option<Box<dyn FnMut()>>,
}
impl MacWindowState {
@@ -530,10 +573,13 @@ impl MacWindow {
titlebar,
kind,
is_movable,
+ is_resizable,
+ is_minimizable,
focus,
show,
display_id,
window_min_size,
+ tabbing_identifier,
}: WindowParams,
executor: ForegroundExecutor,
renderer_context: renderer::Context,
@@ -541,14 +587,25 @@ impl MacWindow {
unsafe {
let pool = NSAutoreleasePool::new(nil);
- let () = msg_send![class!(NSWindow), setAllowsAutomaticWindowTabbing: NO];
+ let allows_automatic_window_tabbing = tabbing_identifier.is_some();
+ if allows_automatic_window_tabbing {
+ let () = msg_send![class!(NSWindow), setAllowsAutomaticWindowTabbing: YES];
+ } else {
+ let () = msg_send![class!(NSWindow), setAllowsAutomaticWindowTabbing: NO];
+ }
let mut style_mask;
if let Some(titlebar) = titlebar.as_ref() {
- style_mask = NSWindowStyleMask::NSClosableWindowMask
- | NSWindowStyleMask::NSMiniaturizableWindowMask
- | NSWindowStyleMask::NSResizableWindowMask
- | NSWindowStyleMask::NSTitledWindowMask;
+ style_mask =
+ NSWindowStyleMask::NSClosableWindowMask | NSWindowStyleMask::NSTitledWindowMask;
+
+ if is_resizable {
+ style_mask |= NSWindowStyleMask::NSResizableWindowMask;
+ }
+
+ if is_minimizable {
+ style_mask |= NSWindowStyleMask::NSMiniaturizableWindowMask;
+ }
if titlebar.appears_transparent {
style_mask |= NSWindowStyleMask::NSFullSizeContentViewWindowMask;
@@ -660,6 +717,11 @@ impl MacWindow {
external_files_dragged: false,
first_mouse: false,
fullscreen_restore_bounds: Bounds::default(),
+ move_tab_to_new_window_callback: None,
+ merge_all_windows_callback: None,
+ select_next_tab_callback: None,
+ select_previous_tab_callback: None,
+ toggle_tab_bar_callback: None,
})));
(*native_window).set_ivar(
@@ -714,6 +776,13 @@ impl MacWindow {
WindowKind::Normal => {
native_window.setLevel_(NSNormalWindowLevel);
native_window.setAcceptsMouseMovedEvents_(YES);
+
+ if let Some(tabbing_identifier) = tabbing_identifier {
+ let tabbing_id = NSString::alloc(nil).init_str(tabbing_identifier.as_str());
+ let _: () = msg_send![native_window, setTabbingIdentifier: tabbing_id];
+ } else {
+ let _: () = msg_send![native_window, setTabbingIdentifier:nil];
+ }
}
WindowKind::PopUp => {
// Use a tracking area to allow receiving MouseMoved events even when
@@ -742,6 +811,38 @@ impl MacWindow {
}
}
+ let app = NSApplication::sharedApplication(nil);
+ let main_window: id = msg_send![app, mainWindow];
+ if allows_automatic_window_tabbing
+ && !main_window.is_null()
+ && main_window != native_window
+ {
+ let main_window_is_fullscreen = main_window
+ .styleMask()
+ .contains(NSWindowStyleMask::NSFullScreenWindowMask);
+ let user_tabbing_preference = Self::get_user_tabbing_preference()
+ .unwrap_or(UserTabbingPreference::InFullScreen);
+ let should_add_as_tab = user_tabbing_preference == UserTabbingPreference::Always
+ || user_tabbing_preference == UserTabbingPreference::InFullScreen
+ && main_window_is_fullscreen;
+
+ if should_add_as_tab {
+ let main_window_can_tab: BOOL =
+ msg_send![main_window, respondsToSelector: sel!(addTabbedWindow:ordered:)];
+ let main_window_visible: BOOL = msg_send![main_window, isVisible];
+
+ if main_window_can_tab == YES && main_window_visible == YES {
+ let _: () = msg_send![main_window, addTabbedWindow: native_window ordered: NSWindowOrderingMode::NSWindowAbove];
+
+ // Ensure the window is visible immediately after adding the tab, since the tab bar is updated with a new entry at this point.
+ // Note: Calling orderFront here can break fullscreen mode (makes fullscreen windows exit fullscreen), so only do this if the main window is not fullscreen.
+ if !main_window_is_fullscreen {
+ let _: () = msg_send![native_window, orderFront: nil];
+ }
+ }
+ }
+ }
+
if focus && show {
native_window.makeKeyAndOrderFront_(nil);
} else if show {
@@ -796,6 +897,33 @@ impl MacWindow {
window_handles
}
}
+
+ pub fn get_user_tabbing_preference() -> Option<UserTabbingPreference> {
+ unsafe {
+ let defaults: id = NSUserDefaults::standardUserDefaults();
+ let domain = NSString::alloc(nil).init_str("NSGlobalDomain");
+ let key = NSString::alloc(nil).init_str("AppleWindowTabbingMode");
+
+ let dict: id = msg_send![defaults, persistentDomainForName: domain];
+ let value: id = if !dict.is_null() {
+ msg_send![dict, objectForKey: key]
+ } else {
+ nil
+ };
+
+ let value_str = if !value.is_null() {
+ CStr::from_ptr(NSString::UTF8String(value)).to_string_lossy()
+ } else {
+ "".into()
+ };
+
+ match value_str.as_ref() {
+ "manual" => Some(UserTabbingPreference::Never),
+ "always" => Some(UserTabbingPreference::Always),
+ _ => Some(UserTabbingPreference::InFullScreen),
+ }
+ }
+ }
}
impl Drop for MacWindow {
@@ -851,6 +979,65 @@ impl PlatformWindow for MacWindow {
.detach();
}
+ fn merge_all_windows(&self) {
+ let native_window = self.0.lock().native_window;
+ unsafe extern "C" fn merge_windows_async(context: *mut std::ffi::c_void) {
+ let native_window = context as id;
+ let _: () = msg_send![native_window, mergeAllWindows:nil];
+ }
+
+ unsafe {
+ dispatch_async_f(
+ dispatch_get_main_queue(),
+ native_window as *mut std::ffi::c_void,
+ Some(merge_windows_async),
+ );
+ }
+ }
+
+ fn move_tab_to_new_window(&self) {
+ let native_window = self.0.lock().native_window;
+ unsafe extern "C" fn move_tab_async(context: *mut std::ffi::c_void) {
+ let native_window = context as id;
+ let _: () = msg_send![native_window, moveTabToNewWindow:nil];
+ let _: () = msg_send![native_window, makeKeyAndOrderFront: nil];
+ }
+
+ unsafe {
+ dispatch_async_f(
+ dispatch_get_main_queue(),
+ native_window as *mut std::ffi::c_void,
+ Some(move_tab_async),
+ );
+ }
+ }
+
+ fn toggle_window_tab_overview(&self) {
+ let native_window = self.0.lock().native_window;
+ unsafe {
+ let _: () = msg_send![native_window, toggleTabOverview:nil];
+ }
+ }
+
+ fn set_tabbing_identifier(&self, tabbing_identifier: Option<String>) {
+ let native_window = self.0.lock().native_window;
+ unsafe {
+ let allows_automatic_window_tabbing = tabbing_identifier.is_some();
+ if allows_automatic_window_tabbing {
+ let () = msg_send![class!(NSWindow), setAllowsAutomaticWindowTabbing: YES];
+ } else {
+ let () = msg_send![class!(NSWindow), setAllowsAutomaticWindowTabbing: NO];
+ }
+
+ if let Some(tabbing_identifier) = tabbing_identifier {
+ let tabbing_id = NSString::alloc(nil).init_str(tabbing_identifier.as_str());
+ let _: () = msg_send![native_window, setTabbingIdentifier: tabbing_id];
+ } else {
+ let _: () = msg_send![native_window, setTabbingIdentifier:nil];
+ }
+ }
+ }
+
fn scale_factor(&self) -> f32 {
self.0.as_ref().lock().scale_factor()
}
@@ -1051,6 +1238,17 @@ impl PlatformWindow for MacWindow {
}
}
+ fn get_title(&self) -> String {
+ unsafe {
+ let title: id = msg_send![self.0.lock().native_window, title];
+ if title.is_null() {
+ "".to_string()
+ } else {
+ title.to_str().to_string()
+ }
+ }
+ }
+
fn set_app_id(&mut self, _app_id: &str) {}
fn set_background_appearance(&self, background_appearance: WindowBackgroundAppearance) {
@@ -1090,7 +1288,7 @@ impl PlatformWindow for MacWindow {
NSView::removeFromSuperview(blur_view);
this.blurred_view = None;
}
- } else if this.blurred_view == None {
+ } else if this.blurred_view.is_none() {
let content_view = this.native_window.contentView();
let frame = NSView::bounds(content_view);
let mut blur_view: id = msg_send![BLURRED_VIEW_CLASS, alloc];
@@ -1212,6 +1410,62 @@ impl PlatformWindow for MacWindow {
self.0.lock().appearance_changed_callback = Some(callback);
}
+ fn tabbed_windows(&self) -> Option<Vec<SystemWindowTab>> {
+ unsafe {
+ let windows: id = msg_send![self.0.lock().native_window, tabbedWindows];
+ if windows.is_null() {
+ return None;
+ }
+
+ let count: NSUInteger = msg_send![windows, count];
+ let mut result = Vec::new();
+ for i in 0..count {
+ let window: id = msg_send![windows, objectAtIndex:i];
+ if msg_send![window, isKindOfClass: WINDOW_CLASS] {
+ let handle = get_window_state(&*window).lock().handle;
+ let title: id = msg_send![window, title];
+ let title = SharedString::from(title.to_str().to_string());
+
+ result.push(SystemWindowTab::new(title, handle));
+ }
+ }
+
+ Some(result)
+ }
+ }
+
+ fn tab_bar_visible(&self) -> bool {
+ unsafe {
+ let tab_group: id = msg_send![self.0.lock().native_window, tabGroup];
+ if tab_group.is_null() {
+ false
+ } else {
+ let tab_bar_visible: BOOL = msg_send![tab_group, isTabBarVisible];
+ tab_bar_visible == YES
+ }
+ }
+ }
+
+ fn on_move_tab_to_new_window(&self, callback: Box<dyn FnMut()>) {
+ self.0.as_ref().lock().move_tab_to_new_window_callback = Some(callback);
+ }
+
+ fn on_merge_all_windows(&self, callback: Box<dyn FnMut()>) {
+ self.0.as_ref().lock().merge_all_windows_callback = Some(callback);
+ }
+
+ fn on_select_next_tab(&self, callback: Box<dyn FnMut()>) {
+ self.0.as_ref().lock().select_next_tab_callback = Some(callback);
+ }
+
+ fn on_select_previous_tab(&self, callback: Box<dyn FnMut()>) {
+ self.0.as_ref().lock().select_previous_tab_callback = Some(callback);
+ }
+
+ fn on_toggle_tab_bar(&self, callback: Box<dyn FnMut()>) {
+ self.0.as_ref().lock().toggle_tab_bar_callback = Some(callback);
+ }
+
fn draw(&self, scene: &crate::Scene) {
let mut this = self.0.lock();
this.renderer.draw(scene);
@@ -1225,7 +1479,7 @@ impl PlatformWindow for MacWindow {
None
}
- fn update_ime_position(&self, _bounds: Bounds<ScaledPixels>) {
+ fn update_ime_position(&self, _bounds: Bounds<Pixels>) {
let executor = self.0.lock().executor.clone();
executor
.spawn(async move {
@@ -1653,6 +1907,7 @@ extern "C" fn window_did_change_occlusion_state(this: &Object, _: Sel, _: id) {
.occlusionState()
.contains(NSWindowOcclusionState::NSWindowOcclusionStateVisible)
{
+ lock.move_traffic_light();
lock.start_display_link();
} else {
lock.stop_display_link();
@@ -1714,7 +1969,7 @@ extern "C" fn window_did_change_screen(this: &Object, _: Sel, _: id) {
extern "C" fn window_did_change_key_status(this: &Object, selector: Sel, _: id) {
let window_state = unsafe { get_window_state(this) };
- let lock = window_state.lock();
+ let mut lock = window_state.lock();
let is_active = unsafe { lock.native_window.isKeyWindow() == YES };
// When opening a pop-up while the application isn't active, Cocoa sends a spurious
@@ -1735,9 +1990,34 @@ extern "C" fn window_did_change_key_status(this: &Object, selector: Sel, _: id)
let executor = lock.executor.clone();
drop(lock);
+
+ // If window is becoming active, trigger immediate synchronous frame request.
+ if selector == sel!(windowDidBecomeKey:) && is_active {
+ let window_state = unsafe { get_window_state(this) };
+ let mut lock = window_state.lock();
+
+ if let Some(mut callback) = lock.request_frame_callback.take() {
+ #[cfg(not(feature = "macos-blade"))]
+ lock.renderer.set_presents_with_transaction(true);
+ lock.stop_display_link();
+ drop(lock);
+ callback(Default::default());
+
+ let mut lock = window_state.lock();
+ lock.request_frame_callback = Some(callback);
+ #[cfg(not(feature = "macos-blade"))]
+ lock.renderer.set_presents_with_transaction(false);
+ lock.start_display_link();
+ }
+ }
+
executor
.spawn(async move {
let mut lock = window_state.as_ref().lock();
+ if is_active {
+ lock.move_traffic_light();
+ }
+
if let Some(mut callback) = lock.activate_callback.take() {
drop(lock);
callback(is_active);
@@ -2063,8 +2343,8 @@ fn screen_point_to_gpui_point(this: &Object, position: NSPoint) -> Point<Pixels>
let frame = get_frame(this);
let window_x = position.x - frame.origin.x;
let window_y = frame.size.height - (position.y - frame.origin.y);
- let position = point(px(window_x as f32), px(window_y as f32));
- position
+
+ point(px(window_x as f32), px(window_y as f32))
}
extern "C" fn dragging_entered(this: &Object, _: Sel, dragging_info: id) -> NSDragOperation {
@@ -2273,3 +2553,80 @@ unsafe fn remove_layer_background(layer: id) {
}
}
}
+
+extern "C" fn add_titlebar_accessory_view_controller(this: &Object, _: Sel, view_controller: id) {
+ unsafe {
+ let _: () = msg_send![super(this, class!(NSWindow)), addTitlebarAccessoryViewController: view_controller];
+
+ // Hide the native tab bar and set its height to 0, since we render our own.
+ let accessory_view: id = msg_send![view_controller, view];
+ let _: () = msg_send![accessory_view, setHidden: YES];
+ let mut frame: NSRect = msg_send![accessory_view, frame];
+ frame.size.height = 0.0;
+ let _: () = msg_send![accessory_view, setFrame: frame];
+ }
+}
+
+extern "C" fn move_tab_to_new_window(this: &Object, _: Sel, _: id) {
+ unsafe {
+ let _: () = msg_send![super(this, class!(NSWindow)), moveTabToNewWindow:nil];
+
+ let window_state = get_window_state(this);
+ let mut lock = window_state.as_ref().lock();
+ if let Some(mut callback) = lock.move_tab_to_new_window_callback.take() {
+ drop(lock);
+ callback();
+ window_state.lock().move_tab_to_new_window_callback = Some(callback);
+ }
+ }
+}
+
+extern "C" fn merge_all_windows(this: &Object, _: Sel, _: id) {
+ unsafe {
+ let _: () = msg_send![super(this, class!(NSWindow)), mergeAllWindows:nil];
+
+ let window_state = get_window_state(this);
+ let mut lock = window_state.as_ref().lock();
+ if let Some(mut callback) = lock.merge_all_windows_callback.take() {
+ drop(lock);
+ callback();
+ window_state.lock().merge_all_windows_callback = Some(callback);
+ }
+ }
+}
+
+extern "C" fn select_next_tab(this: &Object, _sel: Sel, _id: id) {
+ let window_state = unsafe { get_window_state(this) };
+ let mut lock = window_state.as_ref().lock();
+ if let Some(mut callback) = lock.select_next_tab_callback.take() {
+ drop(lock);
+ callback();
+ window_state.lock().select_next_tab_callback = Some(callback);
+ }
+}
+
+extern "C" fn select_previous_tab(this: &Object, _sel: Sel, _id: id) {
+ let window_state = unsafe { get_window_state(this) };
+ let mut lock = window_state.as_ref().lock();
+ if let Some(mut callback) = lock.select_previous_tab_callback.take() {
+ drop(lock);
+ callback();
+ window_state.lock().select_previous_tab_callback = Some(callback);
+ }
+}
+
+extern "C" fn toggle_tab_bar(this: &Object, _sel: Sel, _id: id) {
+ unsafe {
+ let _: () = msg_send![super(this, class!(NSWindow)), toggleTabBar:nil];
+
+ let window_state = get_window_state(this);
+ let mut lock = window_state.as_ref().lock();
+ lock.move_traffic_light();
+
+ if let Some(mut callback) = lock.toggle_tab_bar_callback.take() {
+ drop(lock);
+ callback();
+ window_state.lock().toggle_tab_bar_callback = Some(callback);
+ }
+ }
+}
@@ -228,7 +228,7 @@ fn run_capture(
display,
size,
}));
- if let Err(_) = stream_send_result {
+ if stream_send_result.is_err() {
return;
}
while !cancel_stream.load(std::sync::atomic::Ordering::SeqCst) {
@@ -118,7 +118,7 @@ impl TestDispatcher {
}
YieldNow {
- count: self.state.lock().random.gen_range(0..10),
+ count: self.state.lock().random.random_range(0..10),
}
}
@@ -151,11 +151,11 @@ impl TestDispatcher {
if deprioritized_background_len == 0 {
return false;
}
- let ix = state.random.gen_range(0..deprioritized_background_len);
+ let ix = state.random.random_range(0..deprioritized_background_len);
main_thread = false;
runnable = state.deprioritized_background.swap_remove(ix);
} else {
- main_thread = state.random.gen_ratio(
+ main_thread = state.random.random_ratio(
foreground_len as u32,
(foreground_len + background_len) as u32,
);
@@ -170,7 +170,7 @@ impl TestDispatcher {
.pop_front()
.unwrap();
} else {
- let ix = state.random.gen_range(0..background_len);
+ let ix = state.random.random_range(0..background_len);
runnable = state.background.swap_remove(ix);
};
};
@@ -241,7 +241,7 @@ impl TestDispatcher {
pub fn gen_block_on_ticks(&self) -> usize {
let mut lock = self.state.lock();
let block_on_ticks = lock.block_on_ticks.clone();
- lock.random.gen_range(block_on_ticks)
+ lock.random.random_range(block_on_ticks)
}
}
@@ -1,8 +1,9 @@
use crate::{
AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, DevicePixels,
- ForegroundExecutor, Keymap, NoopTextSystem, Platform, PlatformDisplay, PlatformKeyboardLayout,
- PlatformTextSystem, PromptButton, ScreenCaptureFrame, ScreenCaptureSource, ScreenCaptureStream,
- SourceMetadata, Task, TestDisplay, TestWindow, WindowAppearance, WindowParams, size,
+ DummyKeyboardMapper, ForegroundExecutor, Keymap, NoopTextSystem, Platform, PlatformDisplay,
+ PlatformKeyboardLayout, PlatformKeyboardMapper, PlatformTextSystem, PromptButton,
+ ScreenCaptureFrame, ScreenCaptureSource, ScreenCaptureStream, SourceMetadata, Task,
+ TestDisplay, TestWindow, WindowAppearance, WindowParams, size,
};
use anyhow::Result;
use collections::VecDeque;
@@ -187,14 +188,14 @@ impl TestPlatform {
.push_back(TestPrompt {
msg: msg.to_string(),
detail: detail.map(|s| s.to_string()),
- answers: answers.clone(),
+ answers,
tx,
});
rx
}
pub(crate) fn set_active_window(&self, window: Option<TestWindow>) {
- let executor = self.foreground_executor().clone();
+ let executor = self.foreground_executor();
let previous_window = self.active_window.borrow_mut().take();
self.active_window.borrow_mut().clone_from(&window);
@@ -237,6 +238,10 @@ impl Platform for TestPlatform {
Box::new(TestKeyboardLayout)
}
+ fn keyboard_mapper(&self) -> Rc<dyn PlatformKeyboardMapper> {
+ Rc::new(DummyKeyboardMapper)
+ }
+
fn on_keyboard_layout_change(&self, _: Box<dyn FnMut()>) {}
fn run(&self, _on_finish_launching: Box<dyn FnOnce()>) {
@@ -1,8 +1,8 @@
use crate::{
AnyWindowHandle, AtlasKey, AtlasTextureId, AtlasTile, Bounds, DispatchEventResult, GpuSpecs,
Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, PlatformInputHandler, PlatformWindow,
- Point, PromptButton, RequestFrameOptions, ScaledPixels, Size, TestPlatform, TileId,
- WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControlArea, WindowParams,
+ Point, PromptButton, RequestFrameOptions, Size, TestPlatform, TileId, WindowAppearance,
+ WindowBackgroundAppearance, WindowBounds, WindowControlArea, WindowParams,
};
use collections::HashMap;
use parking_lot::Mutex;
@@ -289,7 +289,7 @@ impl PlatformWindow for TestWindow {
unimplemented!()
}
- fn update_ime_position(&self, _bounds: Bounds<ScaledPixels>) {}
+ fn update_ime_position(&self, _bounds: Bounds<Pixels>) {}
fn gpu_specs(&self) -> Option<GpuSpecs> {
None
@@ -2,6 +2,7 @@ mod clipboard;
mod destination_list;
mod direct_write;
mod directx_atlas;
+mod directx_devices;
mod directx_renderer;
mod dispatcher;
mod display;
@@ -18,6 +19,7 @@ pub(crate) use clipboard::*;
pub(crate) use destination_list::*;
pub(crate) use direct_write::*;
pub(crate) use directx_atlas::*;
+pub(crate) use directx_devices::*;
pub(crate) use directx_renderer::*;
pub(crate) use dispatcher::*;
pub(crate) use display::*;
@@ -0,0 +1,28 @@
+float color_brightness(float3 color) {
+ // REC. 601 luminance coefficients for perceived brightness
+ return dot(color, float3(0.30f, 0.59f, 0.11f));
+}
+
+float light_on_dark_contrast(float enhancedContrast, float3 color) {
+ float brightness = color_brightness(color);
+ float multiplier = saturate(4.0f * (0.75f - brightness));
+ return enhancedContrast * multiplier;
+}
+
+float enhance_contrast(float alpha, float k) {
+ return alpha * (k + 1.0f) / (alpha * k + 1.0f);
+}
+
+float apply_alpha_correction(float a, float b, float4 g) {
+ float brightness_adjustment = g.x * b + g.y;
+ float correction = brightness_adjustment * a + (g.z * b + g.w);
+ return a + a * (1.0f - a) * correction;
+}
+
+float apply_contrast_and_gamma_correction(float sample, float3 color, float enhanced_contrast_factor, float4 gamma_ratios) {
+ float enhanced_contrast = light_on_dark_contrast(enhanced_contrast_factor, color);
+ float brightness = color_brightness(color);
+
+ float contrasted = enhance_contrast(sample, enhanced_contrast);
+ return apply_alpha_correction(contrasted, brightness, gamma_ratios);
+}
@@ -1,3 +1,5 @@
+#include "alpha_correction.hlsl"
+
struct RasterVertexOutput {
float4 position : SV_Position;
float2 texcoord : TEXCOORD0;
@@ -23,17 +25,20 @@ struct Bounds {
int2 size;
};
-Texture2D<float4> t_layer : register(t0);
+Texture2D<float> t_layer : register(t0);
SamplerState s_layer : register(s0);
cbuffer GlyphLayerTextureParams : register(b0) {
Bounds bounds;
float4 run_color;
+ float4 gamma_ratios;
+ float grayscale_enhanced_contrast;
+ float3 _pad;
};
float4 emoji_rasterization_fragment(PixelInput input): SV_Target {
- float3 sampled = t_layer.Sample(s_layer, input.texcoord.xy).rgb;
- float alpha = (sampled.r + sampled.g + sampled.b) / 3;
-
- return float4(run_color.rgb, alpha);
+ float sample = t_layer.Sample(s_layer, input.texcoord.xy).r;
+ float alpha_corrected = apply_contrast_and_gamma_correction(sample, run_color.rgb, grayscale_enhanced_contrast, gamma_ratios);
+ float alpha = alpha_corrected * run_color.a;
+ return float4(run_color.rgb * alpha, alpha);
}
@@ -1,7 +1,7 @@
use std::{borrow::Cow, sync::Arc};
use ::util::ResultExt;
-use anyhow::Result;
+use anyhow::{Context, Result};
use collections::HashMap;
use itertools::Itertools;
use parking_lot::{RwLock, RwLockUpgradableReadGuard};
@@ -10,12 +10,8 @@ use windows::{
Foundation::*,
Globalization::GetUserDefaultLocaleName,
Graphics::{
- Direct3D::D3D_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP,
- Direct3D11::*,
- DirectWrite::*,
- Dxgi::Common::*,
- Gdi::{IsRectEmpty, LOGFONTW},
- Imaging::*,
+ Direct3D::D3D_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP, Direct3D11::*, DirectWrite::*,
+ Dxgi::Common::*, Gdi::LOGFONTW,
},
System::SystemServices::LOCALE_NAME_MAX_LENGTH,
UI::WindowsAndMessaging::*,
@@ -40,12 +36,10 @@ pub(crate) struct DirectWriteTextSystem(RwLock<DirectWriteState>);
struct DirectWriteComponent {
locale: String,
factory: IDWriteFactory5,
- bitmap_factory: AgileReference<IWICImagingFactory>,
in_memory_loader: IDWriteInMemoryFontFileLoader,
builder: IDWriteFontSetBuilder1,
text_renderer: Arc<TextRendererWrapper>,
- render_params: IDWriteRenderingParams3,
gpu_state: GPUState,
}
@@ -76,11 +70,10 @@ struct FontIdentifier {
}
impl DirectWriteComponent {
- pub fn new(bitmap_factory: &IWICImagingFactory, gpu_context: &DirectXDevices) -> Result<Self> {
+ pub fn new(directx_devices: &DirectXDevices) -> Result<Self> {
// todo: ideally this would not be a large unsafe block but smaller isolated ones for easier auditing
unsafe {
let factory: IDWriteFactory5 = DWriteCreateFactory(DWRITE_FACTORY_TYPE_SHARED)?;
- let bitmap_factory = AgileReference::new(bitmap_factory)?;
// The `IDWriteInMemoryFontFileLoader` here is supported starting from
// Windows 10 Creators Update, which consequently requires the entire
// `DirectWriteTextSystem` to run on `win10 1703`+.
@@ -92,36 +85,14 @@ impl DirectWriteComponent {
let locale = String::from_utf16_lossy(&locale_vec);
let text_renderer = Arc::new(TextRendererWrapper::new(&locale));
- let render_params = {
- let default_params: IDWriteRenderingParams3 =
- factory.CreateRenderingParams()?.cast()?;
- let gamma = default_params.GetGamma();
- let enhanced_contrast = default_params.GetEnhancedContrast();
- let gray_contrast = default_params.GetGrayscaleEnhancedContrast();
- let cleartype_level = default_params.GetClearTypeLevel();
- let grid_fit_mode = default_params.GetGridFitMode();
-
- factory.CreateCustomRenderingParams(
- gamma,
- enhanced_contrast,
- gray_contrast,
- cleartype_level,
- DWRITE_PIXEL_GEOMETRY_RGB,
- DWRITE_RENDERING_MODE1_NATURAL_SYMMETRIC,
- grid_fit_mode,
- )?
- };
-
- let gpu_state = GPUState::new(gpu_context)?;
+ let gpu_state = GPUState::new(directx_devices)?;
Ok(DirectWriteComponent {
locale,
factory,
- bitmap_factory,
in_memory_loader,
builder,
text_renderer,
- render_params,
gpu_state,
})
}
@@ -129,9 +100,9 @@ impl DirectWriteComponent {
}
impl GPUState {
- fn new(gpu_context: &DirectXDevices) -> Result<Self> {
- let device = gpu_context.device.clone();
- let device_context = gpu_context.device_context.clone();
+ fn new(directx_devices: &DirectXDevices) -> Result<Self> {
+ let device = directx_devices.device.clone();
+ let device_context = directx_devices.device_context.clone();
let blend_state = {
let mut blend_state = None;
@@ -141,10 +112,10 @@ impl GPUState {
RenderTarget: [
D3D11_RENDER_TARGET_BLEND_DESC {
BlendEnable: true.into(),
- SrcBlend: D3D11_BLEND_SRC_ALPHA,
+ SrcBlend: D3D11_BLEND_ONE,
DestBlend: D3D11_BLEND_INV_SRC_ALPHA,
BlendOp: D3D11_BLEND_OP_ADD,
- SrcBlendAlpha: D3D11_BLEND_SRC_ALPHA,
+ SrcBlendAlpha: D3D11_BLEND_ONE,
DestBlendAlpha: D3D11_BLEND_INV_SRC_ALPHA,
BlendOpAlpha: D3D11_BLEND_OP_ADD,
RenderTargetWriteMask: D3D11_COLOR_WRITE_ENABLE_ALL.0 as u8,
@@ -212,11 +183,8 @@ impl GPUState {
}
impl DirectWriteTextSystem {
- pub(crate) fn new(
- gpu_context: &DirectXDevices,
- bitmap_factory: &IWICImagingFactory,
- ) -> Result<Self> {
- let components = DirectWriteComponent::new(bitmap_factory, gpu_context)?;
+ pub(crate) fn new(directx_devices: &DirectXDevices) -> Result<Self> {
+ let components = DirectWriteComponent::new(directx_devices)?;
let system_font_collection = unsafe {
let mut result = std::mem::zeroed();
components
@@ -242,6 +210,10 @@ impl DirectWriteTextSystem {
font_id_by_identifier: HashMap::default(),
})))
}
+
+ pub(crate) fn handle_gpu_lost(&self, directx_devices: &DirectXDevices) {
+ self.0.write().handle_gpu_lost(directx_devices);
+ }
}
impl PlatformTextSystem for DirectWriteTextSystem {
@@ -762,18 +734,22 @@ impl DirectWriteState {
unsafe {
font.font_face.GetRecommendedRenderingMode(
params.font_size.0,
- // The dpi here seems that it has the same effect with `Some(&transform)`
- 1.0,
- 1.0,
+ // Using 96 as scale is applied by the transform
+ 96.0,
+ 96.0,
Some(&transform),
false,
DWRITE_OUTLINE_THRESHOLD_ANTIALIASED,
DWRITE_MEASURING_MODE_NATURAL,
- &self.components.render_params,
+ None,
&mut rendering_mode,
&mut grid_fit_mode,
)?;
}
+ let rendering_mode = match rendering_mode {
+ DWRITE_RENDERING_MODE1_OUTLINE => DWRITE_RENDERING_MODE1_NATURAL_SYMMETRIC,
+ m => m,
+ };
let glyph_analysis = unsafe {
self.components.factory.CreateGlyphRunAnalysis(
@@ -782,8 +758,7 @@ impl DirectWriteState {
rendering_mode,
DWRITE_MEASURING_MODE_NATURAL,
grid_fit_mode,
- // We're using cleartype not grayscale for monochrome is because it provides better quality
- DWRITE_TEXT_ANTIALIAS_MODE_CLEARTYPE,
+ DWRITE_TEXT_ANTIALIAS_MODE_GRAYSCALE,
baseline_origin_x,
baseline_origin_y,
)
@@ -794,10 +769,14 @@ impl DirectWriteState {
fn raster_bounds(&self, params: &RenderGlyphParams) -> Result<Bounds<DevicePixels>> {
let glyph_analysis = self.create_glyph_run_analysis(params)?;
- let bounds = unsafe { glyph_analysis.GetAlphaTextureBounds(DWRITE_TEXTURE_CLEARTYPE_3x1)? };
- // Some glyphs cannot be drawn with ClearType, such as bitmap fonts. In that case
- // GetAlphaTextureBounds() supposedly returns an empty RECT, but I haven't tested that yet.
- if !unsafe { IsRectEmpty(&bounds) }.as_bool() {
+ let bounds = unsafe { glyph_analysis.GetAlphaTextureBounds(DWRITE_TEXTURE_ALIASED_1x1)? };
+
+ if bounds.right < bounds.left {
+ Ok(Bounds {
+ origin: point(0.into(), 0.into()),
+ size: size(0.into(), 0.into()),
+ })
+ } else {
Ok(Bounds {
origin: point(bounds.left.into(), bounds.top.into()),
size: size(
@@ -805,25 +784,6 @@ impl DirectWriteState {
(bounds.bottom - bounds.top).into(),
),
})
- } else {
- // If it's empty, retry with grayscale AA.
- let bounds =
- unsafe { glyph_analysis.GetAlphaTextureBounds(DWRITE_TEXTURE_ALIASED_1x1)? };
-
- if bounds.right < bounds.left {
- Ok(Bounds {
- origin: point(0.into(), 0.into()),
- size: size(0.into(), 0.into()),
- })
- } else {
- Ok(Bounds {
- origin: point(bounds.left.into(), bounds.top.into()),
- size: size(
- (bounds.right - bounds.left).into(),
- (bounds.bottom - bounds.top).into(),
- ),
- })
- }
}
}
@@ -872,13 +832,12 @@ impl DirectWriteState {
glyph_bounds: Bounds<DevicePixels>,
) -> Result<Vec<u8>> {
let mut bitmap_data =
- vec![0u8; glyph_bounds.size.width.0 as usize * glyph_bounds.size.height.0 as usize * 3];
+ vec![0u8; glyph_bounds.size.width.0 as usize * glyph_bounds.size.height.0 as usize];
let glyph_analysis = self.create_glyph_run_analysis(params)?;
unsafe {
glyph_analysis.CreateAlphaTexture(
- // We're using cleartype not grayscale for monochrome is because it provides better quality
- DWRITE_TEXTURE_CLEARTYPE_3x1,
+ DWRITE_TEXTURE_ALIASED_1x1,
&RECT {
left: glyph_bounds.origin.x.0,
top: glyph_bounds.origin.y.0,
@@ -889,30 +848,6 @@ impl DirectWriteState {
)?;
}
- let bitmap_factory = self.components.bitmap_factory.resolve()?;
- let bitmap = unsafe {
- bitmap_factory.CreateBitmapFromMemory(
- glyph_bounds.size.width.0 as u32,
- glyph_bounds.size.height.0 as u32,
- &GUID_WICPixelFormat24bppRGB,
- glyph_bounds.size.width.0 as u32 * 3,
- &bitmap_data,
- )
- }?;
-
- let grayscale_bitmap =
- unsafe { WICConvertBitmapSource(&GUID_WICPixelFormat8bppGray, &bitmap) }?;
-
- let mut bitmap_data =
- vec![0u8; glyph_bounds.size.width.0 as usize * glyph_bounds.size.height.0 as usize];
- unsafe {
- grayscale_bitmap.CopyPixels(
- std::ptr::null() as _,
- glyph_bounds.size.width.0 as u32,
- &mut bitmap_data,
- )
- }?;
-
Ok(bitmap_data)
}
@@ -981,25 +916,24 @@ impl DirectWriteState {
DWRITE_RENDERING_MODE1_NATURAL_SYMMETRIC,
DWRITE_MEASURING_MODE_NATURAL,
DWRITE_GRID_FIT_MODE_DEFAULT,
- DWRITE_TEXT_ANTIALIAS_MODE_CLEARTYPE,
+ DWRITE_TEXT_ANTIALIAS_MODE_GRAYSCALE,
baseline_origin_x,
baseline_origin_y,
)
}?;
let color_bounds =
- unsafe { color_analysis.GetAlphaTextureBounds(DWRITE_TEXTURE_CLEARTYPE_3x1) }?;
+ unsafe { color_analysis.GetAlphaTextureBounds(DWRITE_TEXTURE_ALIASED_1x1) }?;
let color_size = size(
color_bounds.right - color_bounds.left,
color_bounds.bottom - color_bounds.top,
);
if color_size.width > 0 && color_size.height > 0 {
- let mut alpha_data =
- vec![0u8; (color_size.width * color_size.height * 3) as usize];
+ let mut alpha_data = vec![0u8; (color_size.width * color_size.height) as usize];
unsafe {
color_analysis.CreateAlphaTexture(
- DWRITE_TEXTURE_CLEARTYPE_3x1,
+ DWRITE_TEXTURE_ALIASED_1x1,
&color_bounds,
&mut alpha_data,
)
@@ -1015,10 +949,6 @@ impl DirectWriteState {
}
};
let bounds = bounds(point(color_bounds.left, color_bounds.top), color_size);
- let alpha_data = alpha_data
- .chunks_exact(3)
- .flat_map(|chunk| [chunk[0], chunk[1], chunk[2], 255])
- .collect::<Vec<_>>();
glyph_layers.push(GlyphLayerTexture::new(
&self.components.gpu_state,
run_color,
@@ -1135,10 +1065,18 @@ impl DirectWriteState {
unsafe { device_context.PSSetSamplers(0, Some(&gpu_state.sampler)) };
unsafe { device_context.OMSetBlendState(&gpu_state.blend_state, None, 0xffffffff) };
+ let crate::FontInfo {
+ gamma_ratios,
+ grayscale_enhanced_contrast,
+ } = DirectXRenderer::get_font_info();
+
for layer in glyph_layers {
let params = GlyphLayerTextureParams {
run_color: layer.run_color,
bounds: layer.bounds,
+ gamma_ratios: *gamma_ratios,
+ grayscale_enhanced_contrast: *grayscale_enhanced_contrast,
+ _pad: [0f32; 3],
};
unsafe {
let mut dest = std::mem::zeroed();
@@ -1202,6 +1140,20 @@ impl DirectWriteState {
};
}
+ // Convert from premultiplied to straight alpha
+ for chunk in rasterized.chunks_exact_mut(4) {
+ let b = chunk[0] as f32;
+ let g = chunk[1] as f32;
+ let r = chunk[2] as f32;
+ let a = chunk[3] as f32;
+ if a > 0.0 {
+ let inv_a = 255.0 / a;
+ chunk[0] = (b * inv_a).clamp(0.0, 255.0) as u8;
+ chunk[1] = (g * inv_a).clamp(0.0, 255.0) as u8;
+ chunk[2] = (r * inv_a).clamp(0.0, 255.0) as u8;
+ }
+ }
+
Ok(rasterized)
}
@@ -1263,6 +1215,20 @@ impl DirectWriteState {
));
result
}
+
+ fn handle_gpu_lost(&mut self, directx_devices: &DirectXDevices) {
+ try_to_recover_from_device_lost(
+ || GPUState::new(directx_devices).context("Recreating GPU state for DirectWrite"),
+ |gpu_state| self.components.gpu_state = gpu_state,
+ || {
+ log::error!(
+ "Failed to recreate GPU state for DirectWrite after multiple attempts."
+ );
+ // Do something here?
+ // At this point, the device loss is considered unrecoverable.
+ },
+ );
+ }
}
impl Drop for DirectWriteState {
@@ -1298,14 +1264,14 @@ impl GlyphLayerTexture {
Height: texture_size.height as u32,
MipLevels: 1,
ArraySize: 1,
- Format: DXGI_FORMAT_R8G8B8A8_UNORM,
+ Format: DXGI_FORMAT_R8_UNORM,
SampleDesc: DXGI_SAMPLE_DESC {
Count: 1,
Quality: 0,
},
Usage: D3D11_USAGE_DEFAULT,
BindFlags: D3D11_BIND_SHADER_RESOURCE.0 as u32,
- CPUAccessFlags: D3D11_CPU_ACCESS_WRITE.0 as u32,
+ CPUAccessFlags: 0,
MiscFlags: 0,
};
@@ -1334,7 +1300,7 @@ impl GlyphLayerTexture {
0,
None,
alpha_data.as_ptr() as _,
- (texture_size.width * 4) as u32,
+ texture_size.width as u32,
0,
)
};
@@ -1352,6 +1318,9 @@ impl GlyphLayerTexture {
struct GlyphLayerTextureParams {
bounds: Bounds<i32>,
run_color: Rgba,
+ gamma_ratios: [f32; 4],
+ grayscale_enhanced_contrast: f32,
+ _pad: [f32; 3],
}
struct TextRendererWrapper(pub IDWriteTextRenderer);
@@ -3,9 +3,8 @@ use etagere::BucketedAtlasAllocator;
use parking_lot::Mutex;
use windows::Win32::Graphics::{
Direct3D11::{
- D3D11_BIND_SHADER_RESOURCE, D3D11_BOX, D3D11_CPU_ACCESS_WRITE, D3D11_TEXTURE2D_DESC,
- D3D11_USAGE_DEFAULT, ID3D11Device, ID3D11DeviceContext, ID3D11ShaderResourceView,
- ID3D11Texture2D,
+ D3D11_BIND_SHADER_RESOURCE, D3D11_BOX, D3D11_TEXTURE2D_DESC, D3D11_USAGE_DEFAULT,
+ ID3D11Device, ID3D11DeviceContext, ID3D11ShaderResourceView, ID3D11Texture2D,
},
Dxgi::Common::*,
};
@@ -189,7 +188,7 @@ impl DirectXAtlasState {
},
Usage: D3D11_USAGE_DEFAULT,
BindFlags: bind_flag.0 as u32,
- CPUAccessFlags: D3D11_CPU_ACCESS_WRITE.0 as u32,
+ CPUAccessFlags: 0,
MiscFlags: 0,
};
let mut texture: Option<ID3D11Texture2D> = None;
@@ -0,0 +1,199 @@
+use anyhow::{Context, Result};
+use util::ResultExt;
+use windows::Win32::{
+ Foundation::HMODULE,
+ Graphics::{
+ Direct3D::{
+ D3D_DRIVER_TYPE_UNKNOWN, D3D_FEATURE_LEVEL, D3D_FEATURE_LEVEL_10_1,
+ D3D_FEATURE_LEVEL_11_0, D3D_FEATURE_LEVEL_11_1,
+ },
+ Direct3D11::{
+ D3D11_CREATE_DEVICE_BGRA_SUPPORT, D3D11_CREATE_DEVICE_DEBUG,
+ D3D11_FEATURE_D3D10_X_HARDWARE_OPTIONS, D3D11_FEATURE_DATA_D3D10_X_HARDWARE_OPTIONS,
+ D3D11_SDK_VERSION, D3D11CreateDevice, ID3D11Device, ID3D11DeviceContext,
+ },
+ Dxgi::{
+ CreateDXGIFactory2, DXGI_CREATE_FACTORY_DEBUG, DXGI_CREATE_FACTORY_FLAGS,
+ DXGI_GPU_PREFERENCE_MINIMUM_POWER, IDXGIAdapter1, IDXGIFactory6,
+ },
+ },
+};
+
+pub(crate) fn try_to_recover_from_device_lost<T>(
+ mut f: impl FnMut() -> Result<T>,
+ on_success: impl FnOnce(T),
+ on_error: impl FnOnce(),
+) {
+ let result = (0..5).find_map(|i| {
+ if i > 0 {
+ // Add a small delay before retrying
+ std::thread::sleep(std::time::Duration::from_millis(100));
+ }
+ f().log_err()
+ });
+
+ if let Some(result) = result {
+ on_success(result);
+ } else {
+ on_error();
+ }
+}
+
+#[derive(Clone)]
+pub(crate) struct DirectXDevices {
+ pub(crate) adapter: IDXGIAdapter1,
+ pub(crate) dxgi_factory: IDXGIFactory6,
+ pub(crate) device: ID3D11Device,
+ pub(crate) device_context: ID3D11DeviceContext,
+}
+
+impl DirectXDevices {
+ pub(crate) fn new() -> Result<Self> {
+ let debug_layer_available = check_debug_layer_available();
+ let dxgi_factory =
+ get_dxgi_factory(debug_layer_available).context("Creating DXGI factory")?;
+ let adapter =
+ get_adapter(&dxgi_factory, debug_layer_available).context("Getting DXGI adapter")?;
+ let (device, device_context) = {
+ let mut context: Option<ID3D11DeviceContext> = None;
+ let mut feature_level = D3D_FEATURE_LEVEL::default();
+ let device = get_device(
+ &adapter,
+ Some(&mut context),
+ Some(&mut feature_level),
+ debug_layer_available,
+ )
+ .context("Creating Direct3D device")?;
+ match feature_level {
+ D3D_FEATURE_LEVEL_11_1 => {
+ log::info!("Created device with Direct3D 11.1 feature level.")
+ }
+ D3D_FEATURE_LEVEL_11_0 => {
+ log::info!("Created device with Direct3D 11.0 feature level.")
+ }
+ D3D_FEATURE_LEVEL_10_1 => {
+ log::info!("Created device with Direct3D 10.1 feature level.")
+ }
+ _ => unreachable!(),
+ }
+ (device, context.unwrap())
+ };
+
+ Ok(Self {
+ adapter,
+ dxgi_factory,
+ device,
+ device_context,
+ })
+ }
+}
+
+#[inline]
+fn check_debug_layer_available() -> bool {
+ #[cfg(debug_assertions)]
+ {
+ use windows::Win32::Graphics::Dxgi::{DXGIGetDebugInterface1, IDXGIInfoQueue};
+
+ unsafe { DXGIGetDebugInterface1::<IDXGIInfoQueue>(0) }
+ .log_err()
+ .is_some()
+ }
+ #[cfg(not(debug_assertions))]
+ {
+ false
+ }
+}
+
+#[inline]
+fn get_dxgi_factory(debug_layer_available: bool) -> Result<IDXGIFactory6> {
+ let factory_flag = if debug_layer_available {
+ DXGI_CREATE_FACTORY_DEBUG
+ } else {
+ #[cfg(debug_assertions)]
+ log::warn!(
+ "Failed to get DXGI debug interface. DirectX debugging features will be disabled."
+ );
+ DXGI_CREATE_FACTORY_FLAGS::default()
+ };
+ unsafe { Ok(CreateDXGIFactory2(factory_flag)?) }
+}
+
+#[inline]
+fn get_adapter(dxgi_factory: &IDXGIFactory6, debug_layer_available: bool) -> Result<IDXGIAdapter1> {
+ for adapter_index in 0.. {
+ let adapter: IDXGIAdapter1 = unsafe {
+ dxgi_factory
+ .EnumAdapterByGpuPreference(adapter_index, DXGI_GPU_PREFERENCE_MINIMUM_POWER)
+ }?;
+ if let Ok(desc) = unsafe { adapter.GetDesc1() } {
+ let gpu_name = String::from_utf16_lossy(&desc.Description)
+ .trim_matches(char::from(0))
+ .to_string();
+ log::info!("Using GPU: {}", gpu_name);
+ }
+ // Check to see whether the adapter supports Direct3D 11, but don't
+ // create the actual device yet.
+ if get_device(&adapter, None, None, debug_layer_available)
+ .log_err()
+ .is_some()
+ {
+ return Ok(adapter);
+ }
+ }
+
+ unreachable!()
+}
+
+#[inline]
+fn get_device(
+ adapter: &IDXGIAdapter1,
+ context: Option<*mut Option<ID3D11DeviceContext>>,
+ feature_level: Option<*mut D3D_FEATURE_LEVEL>,
+ debug_layer_available: bool,
+) -> Result<ID3D11Device> {
+ let mut device: Option<ID3D11Device> = None;
+ let device_flags = if debug_layer_available {
+ D3D11_CREATE_DEVICE_BGRA_SUPPORT | D3D11_CREATE_DEVICE_DEBUG
+ } else {
+ D3D11_CREATE_DEVICE_BGRA_SUPPORT
+ };
+ unsafe {
+ D3D11CreateDevice(
+ adapter,
+ D3D_DRIVER_TYPE_UNKNOWN,
+ HMODULE::default(),
+ device_flags,
+ // 4x MSAA is required for Direct3D Feature Level 10.1 or better
+ Some(&[
+ D3D_FEATURE_LEVEL_11_1,
+ D3D_FEATURE_LEVEL_11_0,
+ D3D_FEATURE_LEVEL_10_1,
+ ]),
+ D3D11_SDK_VERSION,
+ Some(&mut device),
+ feature_level,
+ context,
+ )?;
+ }
+ let device = device.unwrap();
+ let mut data = D3D11_FEATURE_DATA_D3D10_X_HARDWARE_OPTIONS::default();
+ unsafe {
+ device
+ .CheckFeatureSupport(
+ D3D11_FEATURE_D3D10_X_HARDWARE_OPTIONS,
+ &mut data as *mut _ as _,
+ std::mem::size_of::<D3D11_FEATURE_DATA_D3D10_X_HARDWARE_OPTIONS>() as u32,
+ )
+ .context("Checking GPU device feature support")?;
+ }
+ if data
+ .ComputeShaders_Plus_RawAndStructuredBuffers_Via_Shader_4_x
+ .as_bool()
+ {
+ Ok(device)
+ } else {
+ Err(anyhow::anyhow!(
+ "Required feature StructuredBuffer is not supported by GPU/driver"
+ ))
+ }
+}
@@ -1,14 +1,18 @@
-use std::{mem::ManuallyDrop, sync::Arc};
+use std::{
+ mem::ManuallyDrop,
+ sync::{Arc, OnceLock},
+};
use ::util::ResultExt;
use anyhow::{Context, Result};
use windows::{
Win32::{
- Foundation::{HMODULE, HWND},
+ Foundation::HWND,
Graphics::{
Direct3D::*,
Direct3D11::*,
DirectComposition::*,
+ DirectWrite::*,
Dxgi::{Common::*, *},
},
},
@@ -27,21 +31,27 @@ const RENDER_TARGET_FORMAT: DXGI_FORMAT = DXGI_FORMAT_B8G8R8A8_UNORM;
// This configuration is used for MSAA rendering on paths only, and it's guaranteed to be supported by DirectX 11.
const PATH_MULTISAMPLE_COUNT: u32 = 4;
+pub(crate) struct FontInfo {
+ pub gamma_ratios: [f32; 4],
+ pub grayscale_enhanced_contrast: f32,
+}
+
pub(crate) struct DirectXRenderer {
hwnd: HWND,
atlas: Arc<DirectXAtlas>,
- devices: ManuallyDrop<DirectXDevices>,
+ devices: ManuallyDrop<DirectXRendererDevices>,
resources: ManuallyDrop<DirectXResources>,
globals: DirectXGlobalElements,
pipelines: DirectXRenderPipelines,
direct_composition: Option<DirectComposition>,
+ font_info: &'static FontInfo,
}
/// Direct3D objects
#[derive(Clone)]
-pub(crate) struct DirectXDevices {
- adapter: IDXGIAdapter1,
- dxgi_factory: IDXGIFactory6,
+pub(crate) struct DirectXRendererDevices {
+ pub(crate) adapter: IDXGIAdapter1,
+ pub(crate) dxgi_factory: IDXGIFactory6,
pub(crate) device: ID3D11Device,
pub(crate) device_context: ID3D11DeviceContext,
dxgi_device: Option<IDXGIDevice>,
@@ -86,39 +96,17 @@ struct DirectComposition {
comp_visual: IDCompositionVisual,
}
-impl DirectXDevices {
- pub(crate) fn new(disable_direct_composition: bool) -> Result<ManuallyDrop<Self>> {
- let debug_layer_available = check_debug_layer_available();
- let dxgi_factory =
- get_dxgi_factory(debug_layer_available).context("Creating DXGI factory")?;
- let adapter =
- get_adapter(&dxgi_factory, debug_layer_available).context("Getting DXGI adapter")?;
- let (device, device_context) = {
- let mut device: Option<ID3D11Device> = None;
- let mut context: Option<ID3D11DeviceContext> = None;
- let mut feature_level = D3D_FEATURE_LEVEL::default();
- get_device(
- &adapter,
- Some(&mut device),
- Some(&mut context),
- Some(&mut feature_level),
- debug_layer_available,
- )
- .context("Creating Direct3D device")?;
- match feature_level {
- D3D_FEATURE_LEVEL_11_1 => {
- log::info!("Created device with Direct3D 11.1 feature level.")
- }
- D3D_FEATURE_LEVEL_11_0 => {
- log::info!("Created device with Direct3D 11.0 feature level.")
- }
- D3D_FEATURE_LEVEL_10_1 => {
- log::info!("Created device with Direct3D 10.1 feature level.")
- }
- _ => unreachable!(),
- }
- (device.unwrap(), context.unwrap())
- };
+impl DirectXRendererDevices {
+ pub(crate) fn new(
+ directx_devices: &DirectXDevices,
+ disable_direct_composition: bool,
+ ) -> Result<ManuallyDrop<Self>> {
+ let DirectXDevices {
+ adapter,
+ dxgi_factory,
+ device,
+ device_context,
+ } = directx_devices;
let dxgi_device = if disable_direct_composition {
None
} else {
@@ -126,23 +114,27 @@ impl DirectXDevices {
};
Ok(ManuallyDrop::new(Self {
- adapter,
- dxgi_factory,
+ adapter: adapter.clone(),
+ dxgi_factory: dxgi_factory.clone(),
+ device: device.clone(),
+ device_context: device_context.clone(),
dxgi_device,
- device,
- device_context,
}))
}
}
impl DirectXRenderer {
- pub(crate) fn new(hwnd: HWND, disable_direct_composition: bool) -> Result<Self> {
+ pub(crate) fn new(
+ hwnd: HWND,
+ directx_devices: &DirectXDevices,
+ disable_direct_composition: bool,
+ ) -> Result<Self> {
if disable_direct_composition {
log::info!("Direct Composition is disabled.");
}
- let devices =
- DirectXDevices::new(disable_direct_composition).context("Creating DirectX devices")?;
+ let devices = DirectXRendererDevices::new(directx_devices, disable_direct_composition)
+ .context("Creating DirectX devices")?;
let atlas = Arc::new(DirectXAtlas::new(&devices.device, &devices.device_context));
let resources = DirectXResources::new(&devices, 1, 1, hwnd, disable_direct_composition)
@@ -171,6 +163,7 @@ impl DirectXRenderer {
globals,
pipelines,
direct_composition,
+ font_info: Self::get_font_info(),
})
}
@@ -183,10 +176,12 @@ impl DirectXRenderer {
&self.devices.device_context,
self.globals.global_params_buffer[0].as_ref().unwrap(),
&[GlobalParams {
+ gamma_ratios: self.font_info.gamma_ratios,
viewport_size: [
self.resources.viewport[0].Width,
self.resources.viewport[0].Height,
],
+ grayscale_enhanced_contrast: self.font_info.grayscale_enhanced_contrast,
_pad: 0,
}],
)?;
@@ -205,28 +200,30 @@ impl DirectXRenderer {
Ok(())
}
+ #[inline]
fn present(&mut self) -> Result<()> {
- unsafe {
- let result = self.resources.swap_chain.Present(0, DXGI_PRESENT(0));
- // Presenting the swap chain can fail if the DirectX device was removed or reset.
- if result == DXGI_ERROR_DEVICE_REMOVED || result == DXGI_ERROR_DEVICE_RESET {
- let reason = self.devices.device.GetDeviceRemovedReason();
+ let result = unsafe { self.resources.swap_chain.Present(0, DXGI_PRESENT(0)) };
+ result.ok().context("Presenting swap chain failed")
+ }
+
+ pub(crate) fn handle_device_lost(&mut self, directx_devices: &DirectXDevices) {
+ try_to_recover_from_device_lost(
+ || {
+ self.handle_device_lost_impl(directx_devices)
+ .context("DirectXRenderer handling device lost")
+ },
+ |_| {},
+ || {
log::error!(
- "DirectX device removed or reset when drawing. Reason: {:?}",
- reason
+ "DirectXRenderer failed to recover from device lost after multiple attempts"
);
- self.handle_device_lost()?;
- } else {
- result.ok()?;
- }
- }
- Ok(())
+ // Do something here?
+ // At this point, the device loss is considered unrecoverable.
+ },
+ );
}
- fn handle_device_lost(&mut self) -> Result<()> {
- // Here we wait a bit to ensure the the system has time to recover from the device lost state.
- // If we don't wait, the final drawing result will be blank.
- std::thread::sleep(std::time::Duration::from_millis(300));
+ fn handle_device_lost_impl(&mut self, directx_devices: &DirectXDevices) -> Result<()> {
let disable_direct_composition = self.direct_composition.is_none();
unsafe {
@@ -249,7 +246,7 @@ impl DirectXRenderer {
ManuallyDrop::drop(&mut self.devices);
}
- let devices = DirectXDevices::new(disable_direct_composition)
+ let devices = DirectXRendererDevices::new(directx_devices, disable_direct_composition)
.context("Recreating DirectX devices")?;
let resources = DirectXResources::new(
&devices,
@@ -324,49 +321,39 @@ impl DirectXRenderer {
if self.resources.width == width && self.resources.height == height {
return Ok(());
}
+ self.resources.width = width;
+ self.resources.height = height;
+
+ // Clear the render target before resizing
+ unsafe { self.devices.device_context.OMSetRenderTargets(None, None) };
+ unsafe { ManuallyDrop::drop(&mut self.resources.render_target) };
+ drop(self.resources.render_target_view[0].take().unwrap());
+
+ // Resizing the swap chain requires a call to the underlying DXGI adapter, which can return the device removed error.
+ // The app might have moved to a monitor that's attached to a different graphics device.
+ // When a graphics device is removed or reset, the desktop resolution often changes, resulting in a window size change.
+ // But here we just return the error, because we are handling device lost scenarios elsewhere.
unsafe {
- // Clear the render target before resizing
- self.devices.device_context.OMSetRenderTargets(None, None);
- ManuallyDrop::drop(&mut self.resources.render_target);
- drop(self.resources.render_target_view[0].take().unwrap());
-
- let result = self.resources.swap_chain.ResizeBuffers(
- BUFFER_COUNT as u32,
- width,
- height,
- RENDER_TARGET_FORMAT,
- DXGI_SWAP_CHAIN_FLAG(0),
- );
- // Resizing the swap chain requires a call to the underlying DXGI adapter, which can return the device removed error.
- // The app might have moved to a monitor that's attached to a different graphics device.
- // When a graphics device is removed or reset, the desktop resolution often changes, resulting in a window size change.
- match result {
- Ok(_) => {}
- Err(e) => {
- if e.code() == DXGI_ERROR_DEVICE_REMOVED || e.code() == DXGI_ERROR_DEVICE_RESET
- {
- let reason = self.devices.device.GetDeviceRemovedReason();
- log::error!(
- "DirectX device removed or reset when resizing. Reason: {:?}",
- reason
- );
- self.resources.width = width;
- self.resources.height = height;
- self.handle_device_lost()?;
- return Ok(());
- } else {
- log::error!("Failed to resize swap chain: {:?}", e);
- return Err(e.into());
- }
- }
- }
-
self.resources
- .recreate_resources(&self.devices, width, height)?;
+ .swap_chain
+ .ResizeBuffers(
+ BUFFER_COUNT as u32,
+ width,
+ height,
+ RENDER_TARGET_FORMAT,
+ DXGI_SWAP_CHAIN_FLAG(0),
+ )
+ .context("Failed to resize swap chain")?;
+ }
+
+ self.resources
+ .recreate_resources(&self.devices, width, height)?;
+ unsafe {
self.devices
.device_context
.OMSetRenderTargets(Some(&self.resources.render_target_view), None);
}
+
Ok(())
}
@@ -617,11 +604,57 @@ impl DirectXRenderer {
driver_info: driver_version,
})
}
+
+ pub(crate) fn get_font_info() -> &'static FontInfo {
+ static CACHED_FONT_INFO: OnceLock<FontInfo> = OnceLock::new();
+ CACHED_FONT_INFO.get_or_init(|| unsafe {
+ let factory: IDWriteFactory5 = DWriteCreateFactory(DWRITE_FACTORY_TYPE_SHARED).unwrap();
+ let render_params: IDWriteRenderingParams1 =
+ factory.CreateRenderingParams().unwrap().cast().unwrap();
+ FontInfo {
+ gamma_ratios: Self::get_gamma_ratios(render_params.GetGamma()),
+ grayscale_enhanced_contrast: render_params.GetGrayscaleEnhancedContrast(),
+ }
+ })
+ }
+
+ // Gamma ratios for brightening/darkening edges for better contrast
+ // https://github.com/microsoft/terminal/blob/1283c0f5b99a2961673249fa77c6b986efb5086c/src/renderer/atlas/dwrite.cpp#L50
+ fn get_gamma_ratios(gamma: f32) -> [f32; 4] {
+ const GAMMA_INCORRECT_TARGET_RATIOS: [[f32; 4]; 13] = [
+ [0.0000 / 4.0, 0.0000 / 4.0, 0.0000 / 4.0, 0.0000 / 4.0], // gamma = 1.0
+ [0.0166 / 4.0, -0.0807 / 4.0, 0.2227 / 4.0, -0.0751 / 4.0], // gamma = 1.1
+ [0.0350 / 4.0, -0.1760 / 4.0, 0.4325 / 4.0, -0.1370 / 4.0], // gamma = 1.2
+ [0.0543 / 4.0, -0.2821 / 4.0, 0.6302 / 4.0, -0.1876 / 4.0], // gamma = 1.3
+ [0.0739 / 4.0, -0.3963 / 4.0, 0.8167 / 4.0, -0.2287 / 4.0], // gamma = 1.4
+ [0.0933 / 4.0, -0.5161 / 4.0, 0.9926 / 4.0, -0.2616 / 4.0], // gamma = 1.5
+ [0.1121 / 4.0, -0.6395 / 4.0, 1.1588 / 4.0, -0.2877 / 4.0], // gamma = 1.6
+ [0.1300 / 4.0, -0.7649 / 4.0, 1.3159 / 4.0, -0.3080 / 4.0], // gamma = 1.7
+ [0.1469 / 4.0, -0.8911 / 4.0, 1.4644 / 4.0, -0.3234 / 4.0], // gamma = 1.8
+ [0.1627 / 4.0, -1.0170 / 4.0, 1.6051 / 4.0, -0.3347 / 4.0], // gamma = 1.9
+ [0.1773 / 4.0, -1.1420 / 4.0, 1.7385 / 4.0, -0.3426 / 4.0], // gamma = 2.0
+ [0.1908 / 4.0, -1.2652 / 4.0, 1.8650 / 4.0, -0.3476 / 4.0], // gamma = 2.1
+ [0.2031 / 4.0, -1.3864 / 4.0, 1.9851 / 4.0, -0.3501 / 4.0], // gamma = 2.2
+ ];
+
+ const NORM13: f32 = ((0x10000 as f64) / (255.0 * 255.0) * 4.0) as f32;
+ const NORM24: f32 = ((0x100 as f64) / (255.0) * 4.0) as f32;
+
+ let index = ((gamma * 10.0).round() as usize).clamp(10, 22) - 10;
+ let ratios = GAMMA_INCORRECT_TARGET_RATIOS[index];
+
+ [
+ ratios[0] * NORM13,
+ ratios[1] * NORM24,
+ ratios[2] * NORM13,
+ ratios[3] * NORM24,
+ ]
+ }
}
impl DirectXResources {
pub fn new(
- devices: &DirectXDevices,
+ devices: &DirectXRendererDevices,
width: u32,
height: u32,
hwnd: HWND,
@@ -666,7 +699,7 @@ impl DirectXResources {
#[inline]
fn recreate_resources(
&mut self,
- devices: &DirectXDevices,
+ devices: &DirectXRendererDevices,
width: u32,
height: u32,
) -> Result<()> {
@@ -686,8 +719,6 @@ impl DirectXResources {
self.path_intermediate_msaa_view = path_intermediate_msaa_view;
self.path_intermediate_srv = path_intermediate_srv;
self.viewport = viewport;
- self.width = width;
- self.height = height;
Ok(())
}
}
@@ -822,8 +853,10 @@ impl DirectXGlobalElements {
#[derive(Debug, Default)]
#[repr(C)]
struct GlobalParams {
+ gamma_ratios: [f32; 4],
viewport_size: [f32; 2],
- _pad: u64,
+ grayscale_enhanced_contrast: f32,
+ _pad: u32,
}
struct PipelineState<T> {
@@ -980,92 +1013,6 @@ impl Drop for DirectXResources {
}
}
-#[inline]
-fn check_debug_layer_available() -> bool {
- #[cfg(debug_assertions)]
- {
- unsafe { DXGIGetDebugInterface1::<IDXGIInfoQueue>(0) }
- .log_err()
- .is_some()
- }
- #[cfg(not(debug_assertions))]
- {
- false
- }
-}
-
-#[inline]
-fn get_dxgi_factory(debug_layer_available: bool) -> Result<IDXGIFactory6> {
- let factory_flag = if debug_layer_available {
- DXGI_CREATE_FACTORY_DEBUG
- } else {
- #[cfg(debug_assertions)]
- log::warn!(
- "Failed to get DXGI debug interface. DirectX debugging features will be disabled."
- );
- DXGI_CREATE_FACTORY_FLAGS::default()
- };
- unsafe { Ok(CreateDXGIFactory2(factory_flag)?) }
-}
-
-fn get_adapter(dxgi_factory: &IDXGIFactory6, debug_layer_available: bool) -> Result<IDXGIAdapter1> {
- for adapter_index in 0.. {
- let adapter: IDXGIAdapter1 = unsafe {
- dxgi_factory
- .EnumAdapterByGpuPreference(adapter_index, DXGI_GPU_PREFERENCE_MINIMUM_POWER)
- }?;
- if let Ok(desc) = unsafe { adapter.GetDesc1() } {
- let gpu_name = String::from_utf16_lossy(&desc.Description)
- .trim_matches(char::from(0))
- .to_string();
- log::info!("Using GPU: {}", gpu_name);
- }
- // Check to see whether the adapter supports Direct3D 11, but don't
- // create the actual device yet.
- if get_device(&adapter, None, None, None, debug_layer_available)
- .log_err()
- .is_some()
- {
- return Ok(adapter);
- }
- }
-
- unreachable!()
-}
-
-fn get_device(
- adapter: &IDXGIAdapter1,
- device: Option<*mut Option<ID3D11Device>>,
- context: Option<*mut Option<ID3D11DeviceContext>>,
- feature_level: Option<*mut D3D_FEATURE_LEVEL>,
- debug_layer_available: bool,
-) -> Result<()> {
- let device_flags = if debug_layer_available {
- D3D11_CREATE_DEVICE_BGRA_SUPPORT | D3D11_CREATE_DEVICE_DEBUG
- } else {
- D3D11_CREATE_DEVICE_BGRA_SUPPORT
- };
- unsafe {
- D3D11CreateDevice(
- adapter,
- D3D_DRIVER_TYPE_UNKNOWN,
- HMODULE::default(),
- device_flags,
- // 4x MSAA is required for Direct3D Feature Level 10.1 or better
- Some(&[
- D3D_FEATURE_LEVEL_11_1,
- D3D_FEATURE_LEVEL_11_0,
- D3D_FEATURE_LEVEL_10_1,
- ]),
- D3D11_SDK_VERSION,
- device,
- feature_level,
- context,
- )?;
- }
- Ok(())
-}
-
#[inline]
fn get_comp_device(dxgi_device: &IDXGIDevice) -> Result<IDCompositionDevice> {
Ok(unsafe { DCompositionCreateDevice(dxgi_device)? })
@@ -1130,7 +1077,7 @@ fn create_swap_chain(
#[inline]
fn create_resources(
- devices: &DirectXDevices,
+ devices: &DirectXRendererDevices,
swap_chain: &IDXGISwapChain1,
width: u32,
height: u32,
@@ -1544,6 +1491,10 @@ pub(crate) mod shader_resources {
#[cfg(debug_assertions)]
pub(super) fn build_shader_blob(entry: ShaderModule, target: ShaderTarget) -> Result<ID3DBlob> {
unsafe {
+ use windows::Win32::Graphics::{
+ Direct3D::ID3DInclude, Hlsl::D3D_COMPILE_STANDARD_FILE_INCLUDE,
+ };
+
let shader_name = if matches!(entry, ShaderModule::EmojiRasterization) {
"color_text_raster.hlsl"
} else {
@@ -1572,10 +1523,15 @@ pub(crate) mod shader_resources {
let entry_point = PCSTR::from_raw(entry.as_ptr());
let target_cstr = PCSTR::from_raw(target.as_ptr());
+ // really dirty trick because winapi bindings are unhappy otherwise
+ let include_handler = &std::mem::transmute::<usize, ID3DInclude>(
+ D3D_COMPILE_STANDARD_FILE_INCLUDE as usize,
+ );
+
let ret = D3DCompileFromFile(
&HSTRING::from(shader_path.to_str().unwrap()),
None,
- None,
+ include_handler,
entry_point,
target_cstr,
D3DCOMPILE_DEBUG | D3DCOMPILE_SKIP_OPTIMIZATION,
@@ -1760,7 +1716,7 @@ mod amd {
anyhow::bail!("Failed to initialize AMD AGS, error code: {}", result);
}
- // Vulkan acctually returns this as the driver version
+ // Vulkan actually returns this as the driver version
let software_version = if !gpu_info.radeon_software_version.is_null() {
std::ffi::CStr::from_ptr(gpu_info.radeon_software_version)
.to_string_lossy()
@@ -9,41 +9,42 @@ use parking::Parker;
use parking_lot::Mutex;
use util::ResultExt;
use windows::{
- Foundation::TimeSpan,
System::Threading::{
- ThreadPool, ThreadPoolTimer, TimerElapsedHandler, WorkItemHandler, WorkItemOptions,
- WorkItemPriority,
+ ThreadPool, ThreadPoolTimer, TimerElapsedHandler, WorkItemHandler, WorkItemPriority,
},
Win32::{
Foundation::{LPARAM, WPARAM},
- UI::WindowsAndMessaging::PostThreadMessageW,
+ UI::WindowsAndMessaging::PostMessageW,
},
};
-use crate::{PlatformDispatcher, TaskLabel, WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD};
+use crate::{
+ HWND, PlatformDispatcher, SafeHwnd, TaskLabel, WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD,
+};
pub(crate) struct WindowsDispatcher {
main_sender: Sender<Runnable>,
parker: Mutex<Parker>,
main_thread_id: ThreadId,
- main_thread_id_win32: u32,
+ platform_window_handle: SafeHwnd,
validation_number: usize,
}
impl WindowsDispatcher {
pub(crate) fn new(
main_sender: Sender<Runnable>,
- main_thread_id_win32: u32,
+ platform_window_handle: HWND,
validation_number: usize,
) -> Self {
let parker = Mutex::new(Parker::new());
let main_thread_id = current().id();
+ let platform_window_handle = platform_window_handle.into();
WindowsDispatcher {
main_sender,
parker,
main_thread_id,
- main_thread_id_win32,
+ platform_window_handle,
validation_number,
}
}
@@ -56,12 +57,7 @@ impl WindowsDispatcher {
Ok(())
})
};
- ThreadPool::RunWithPriorityAndOptionsAsync(
- &handler,
- WorkItemPriority::High,
- WorkItemOptions::TimeSliced,
- )
- .log_err();
+ ThreadPool::RunWithPriorityAsync(&handler, WorkItemPriority::High).log_err();
}
fn dispatch_on_threadpool_after(&self, runnable: Runnable, duration: Duration) {
@@ -72,12 +68,7 @@ impl WindowsDispatcher {
Ok(())
})
};
- let delay = TimeSpan {
- // A time period expressed in 100-nanosecond units.
- // 10,000,000 ticks per second
- Duration: (duration.as_nanos() / 100) as i64,
- };
- ThreadPoolTimer::CreateTimer(&handler, delay).log_err();
+ ThreadPoolTimer::CreateTimer(&handler, duration.into()).log_err();
}
}
@@ -96,8 +87,8 @@ impl PlatformDispatcher for WindowsDispatcher {
fn dispatch_on_main_thread(&self, runnable: Runnable) {
match self.main_sender.send(runnable) {
Ok(_) => unsafe {
- PostThreadMessageW(
- self.main_thread_id_win32,
+ PostMessageW(
+ Some(self.platform_window_handle.as_raw()),
WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD,
WPARAM(self.validation_number),
LPARAM(0),
@@ -24,6 +24,8 @@ pub(crate) const WM_GPUI_CLOSE_ONE_WINDOW: u32 = WM_USER + 2;
pub(crate) const WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD: u32 = WM_USER + 3;
pub(crate) const WM_GPUI_DOCK_MENU_ACTION: u32 = WM_USER + 4;
pub(crate) const WM_GPUI_FORCE_UPDATE_WINDOW: u32 = WM_USER + 5;
+pub(crate) const WM_GPUI_KEYBOARD_LAYOUT_CHANGED: u32 = WM_USER + 6;
+pub(crate) const WM_GPUI_GPU_DEVICE_LOST: u32 = WM_USER + 7;
const SIZE_MOVE_LOOP_TIMER_ID: usize = 1;
const AUTO_HIDE_TASKBAR_THICKNESS_PX: i32 = 1;
@@ -39,7 +41,6 @@ impl WindowsWindowInner {
let handled = match msg {
WM_ACTIVATE => self.handle_activate_msg(wparam),
WM_CREATE => self.handle_create_msg(handle),
- WM_DEVICECHANGE => self.handle_device_change_msg(handle, wparam),
WM_MOVE => self.handle_move_msg(handle, lparam),
WM_SIZE => self.handle_size_msg(wparam, lparam),
WM_GETMINMAXINFO => self.handle_get_min_max_info_msg(lparam),
@@ -99,10 +100,11 @@ impl WindowsWindowInner {
WM_IME_COMPOSITION => self.handle_ime_composition(handle, lparam),
WM_SETCURSOR => self.handle_set_cursor(handle, lparam),
WM_SETTINGCHANGE => self.handle_system_settings_changed(handle, wparam, lparam),
- WM_INPUTLANGCHANGE => self.handle_input_language_changed(lparam),
+ WM_INPUTLANGCHANGE => self.handle_input_language_changed(),
WM_SHOWWINDOW => self.handle_window_visibility_changed(handle, wparam),
WM_GPUI_CURSOR_STYLE_CHANGED => self.handle_cursor_changed(lparam),
WM_GPUI_FORCE_UPDATE_WINDOW => self.draw_window(handle, true),
+ WM_GPUI_GPU_DEVICE_LOST => self.handle_device_lost(lparam),
_ => None,
};
if let Some(n) = handled {
@@ -264,8 +266,8 @@ impl WindowsWindowInner {
callback();
}
unsafe {
- PostThreadMessageW(
- self.main_thread_id_win32,
+ PostMessageW(
+ Some(self.platform_window_handle),
WM_GPUI_CLOSE_ONE_WINDOW,
WPARAM(self.validation_number),
LPARAM(handle.0 as isize),
@@ -708,7 +710,7 @@ impl WindowsWindowInner {
.system_settings
.auto_hide_taskbar_position
{
- // Fot the auto-hide taskbar, adjust in by 1 pixel on taskbar edge,
+ // For the auto-hide taskbar, adjust in by 1 pixel on taskbar edge,
// so the window isn't treated as a "fullscreen app", which would cause
// the taskbar to disappear.
match taskbar_position {
@@ -956,7 +958,7 @@ impl WindowsWindowInner {
click_count,
first_mouse: false,
});
- let result = func(input.clone());
+ let result = func(input);
let handled = !result.propagate || result.default_prevented;
self.state.borrow_mut().callbacks.input = Some(func);
@@ -1128,32 +1130,33 @@ impl WindowsWindowInner {
&& let Some(parameter_string) = unsafe { parameter.to_string() }.log_err()
{
log::info!("System settings changed: {}", parameter_string);
- match parameter_string.as_str() {
- "ImmersiveColorSet" => {
- let new_appearance = system_appearance()
- .context("unable to get system appearance when handling ImmersiveColorSet")
- .log_err()?;
- let mut lock = self.state.borrow_mut();
- if new_appearance != lock.appearance {
- lock.appearance = new_appearance;
- let mut callback = lock.callbacks.appearance_changed.take()?;
- drop(lock);
- callback();
- self.state.borrow_mut().callbacks.appearance_changed = Some(callback);
- configure_dwm_dark_mode(handle, new_appearance);
- }
+ if parameter_string.as_str() == "ImmersiveColorSet" {
+ let new_appearance = system_appearance()
+ .context("unable to get system appearance when handling ImmersiveColorSet")
+ .log_err()?;
+ let mut lock = self.state.borrow_mut();
+ if new_appearance != lock.appearance {
+ lock.appearance = new_appearance;
+ let mut callback = lock.callbacks.appearance_changed.take()?;
+ drop(lock);
+ callback();
+ self.state.borrow_mut().callbacks.appearance_changed = Some(callback);
+ configure_dwm_dark_mode(handle, new_appearance);
}
- _ => {}
}
}
Some(0)
}
- fn handle_input_language_changed(&self, lparam: LPARAM) -> Option<isize> {
- let thread = self.main_thread_id_win32;
- let validation = self.validation_number;
+ fn handle_input_language_changed(&self) -> Option<isize> {
unsafe {
- PostThreadMessageW(thread, WM_INPUTLANGCHANGE, WPARAM(validation), lparam).log_err();
+ PostMessageW(
+ Some(self.platform_window_handle),
+ WM_GPUI_KEYBOARD_LAYOUT_CHANGED,
+ WPARAM(self.validation_number),
+ LPARAM(0),
+ )
+ .log_err();
}
Some(0)
}
@@ -1165,26 +1168,12 @@ impl WindowsWindowInner {
None
}
- fn handle_device_change_msg(&self, handle: HWND, wparam: WPARAM) -> Option<isize> {
- if wparam.0 == DBT_DEVNODES_CHANGED as usize {
- // The reason for sending this message is to actually trigger a redraw of the window.
- unsafe {
- PostMessageW(
- Some(handle),
- WM_GPUI_FORCE_UPDATE_WINDOW,
- WPARAM(0),
- LPARAM(0),
- )
- .log_err();
- }
- // If the GPU device is lost, this redraw will take care of recreating the device context.
- // The WM_GPUI_FORCE_UPDATE_WINDOW message will take care of redrawing the window, after
- // the device context has been recreated.
- self.draw_window(handle, true)
- } else {
- // Other device change messages are not handled.
- None
- }
+ fn handle_device_lost(&self, lparam: LPARAM) -> Option<isize> {
+ let mut lock = self.state.borrow_mut();
+ let devices = lparam.0 as *const DirectXDevices;
+ let devices = unsafe { &*devices };
+ lock.renderer.handle_device_lost(&devices);
+ Some(0)
}
#[inline]
@@ -1469,7 +1458,7 @@ pub(crate) fn current_modifiers() -> Modifiers {
#[inline]
pub(crate) fn current_capslock() -> Capslock {
let on = unsafe { GetKeyState(VK_CAPITAL.0 as i32) & 1 } > 0;
- Capslock { on: on }
+ Capslock { on }
}
fn get_client_area_insets(
@@ -1,22 +1,31 @@
use anyhow::Result;
+use collections::HashMap;
use windows::Win32::UI::{
Input::KeyboardAndMouse::{
- GetKeyboardLayoutNameW, MAPVK_VK_TO_CHAR, MapVirtualKeyW, ToUnicode, VIRTUAL_KEY, VK_0,
- VK_1, VK_2, VK_3, VK_4, VK_5, VK_6, VK_7, VK_8, VK_9, VK_ABNT_C1, VK_CONTROL, VK_MENU,
- VK_OEM_1, VK_OEM_2, VK_OEM_3, VK_OEM_4, VK_OEM_5, VK_OEM_6, VK_OEM_7, VK_OEM_8, VK_OEM_102,
- VK_OEM_COMMA, VK_OEM_MINUS, VK_OEM_PERIOD, VK_OEM_PLUS, VK_SHIFT,
+ GetKeyboardLayoutNameW, MAPVK_VK_TO_CHAR, MAPVK_VK_TO_VSC, MapVirtualKeyW, ToUnicode,
+ VIRTUAL_KEY, VK_0, VK_1, VK_2, VK_3, VK_4, VK_5, VK_6, VK_7, VK_8, VK_9, VK_ABNT_C1,
+ VK_CONTROL, VK_MENU, VK_OEM_1, VK_OEM_2, VK_OEM_3, VK_OEM_4, VK_OEM_5, VK_OEM_6, VK_OEM_7,
+ VK_OEM_8, VK_OEM_102, VK_OEM_COMMA, VK_OEM_MINUS, VK_OEM_PERIOD, VK_OEM_PLUS, VK_SHIFT,
},
WindowsAndMessaging::KL_NAMELENGTH,
};
use windows_core::HSTRING;
-use crate::{Modifiers, PlatformKeyboardLayout};
+use crate::{
+ KeybindingKeystroke, Keystroke, Modifiers, PlatformKeyboardLayout, PlatformKeyboardMapper,
+};
pub(crate) struct WindowsKeyboardLayout {
id: String,
name: String,
}
+pub(crate) struct WindowsKeyboardMapper {
+ key_to_vkey: HashMap<String, (u16, bool)>,
+ vkey_to_key: HashMap<u16, String>,
+ vkey_to_shifted: HashMap<u16, String>,
+}
+
impl PlatformKeyboardLayout for WindowsKeyboardLayout {
fn id(&self) -> &str {
&self.id
@@ -27,6 +36,61 @@ impl PlatformKeyboardLayout for WindowsKeyboardLayout {
}
}
+impl PlatformKeyboardMapper for WindowsKeyboardMapper {
+ fn map_key_equivalent(
+ &self,
+ mut keystroke: Keystroke,
+ use_key_equivalents: bool,
+ ) -> KeybindingKeystroke {
+ let Some((vkey, shifted_key)) = self.get_vkey_from_key(&keystroke.key, use_key_equivalents)
+ else {
+ return KeybindingKeystroke::from_keystroke(keystroke);
+ };
+ if shifted_key && keystroke.modifiers.shift {
+ log::warn!(
+ "Keystroke '{}' has both shift and a shifted key, this is likely a bug",
+ keystroke.key
+ );
+ }
+
+ let shift = shifted_key || keystroke.modifiers.shift;
+ keystroke.modifiers.shift = false;
+
+ let Some(key) = self.vkey_to_key.get(&vkey).cloned() else {
+ log::error!(
+ "Failed to map key equivalent '{:?}' to a valid key",
+ keystroke
+ );
+ return KeybindingKeystroke::from_keystroke(keystroke);
+ };
+
+ keystroke.key = if shift {
+ let Some(shifted_key) = self.vkey_to_shifted.get(&vkey).cloned() else {
+ log::error!(
+ "Failed to map keystroke {:?} with virtual key '{:?}' to a shifted key",
+ keystroke,
+ vkey
+ );
+ return KeybindingKeystroke::from_keystroke(keystroke);
+ };
+ shifted_key
+ } else {
+ key.clone()
+ };
+
+ let modifiers = Modifiers {
+ shift,
+ ..keystroke.modifiers
+ };
+
+ KeybindingKeystroke::new(keystroke, modifiers, key)
+ }
+
+ fn get_key_equivalents(&self) -> Option<&HashMap<char, char>> {
+ None
+ }
+}
+
impl WindowsKeyboardLayout {
pub(crate) fn new() -> Result<Self> {
let mut buffer = [0u16; KL_NAMELENGTH as usize];
@@ -48,6 +112,41 @@ impl WindowsKeyboardLayout {
}
}
+impl WindowsKeyboardMapper {
+ pub(crate) fn new() -> Self {
+ let mut key_to_vkey = HashMap::default();
+ let mut vkey_to_key = HashMap::default();
+ let mut vkey_to_shifted = HashMap::default();
+ for vkey in CANDIDATE_VKEYS {
+ if let Some(key) = get_key_from_vkey(*vkey) {
+ key_to_vkey.insert(key.clone(), (vkey.0, false));
+ vkey_to_key.insert(vkey.0, key);
+ }
+ let scan_code = unsafe { MapVirtualKeyW(vkey.0 as u32, MAPVK_VK_TO_VSC) };
+ if scan_code == 0 {
+ continue;
+ }
+ if let Some(shifted_key) = get_shifted_key(*vkey, scan_code) {
+ key_to_vkey.insert(shifted_key.clone(), (vkey.0, true));
+ vkey_to_shifted.insert(vkey.0, shifted_key);
+ }
+ }
+ Self {
+ key_to_vkey,
+ vkey_to_key,
+ vkey_to_shifted,
+ }
+ }
+
+ fn get_vkey_from_key(&self, key: &str, use_key_equivalents: bool) -> Option<(u16, bool)> {
+ if use_key_equivalents {
+ get_vkey_from_key_with_us_layout(key)
+ } else {
+ self.key_to_vkey.get(key).cloned()
+ }
+ }
+}
+
pub(crate) fn get_keystroke_key(
vkey: VIRTUAL_KEY,
scan_code: u32,
@@ -140,3 +239,134 @@ pub(crate) fn generate_key_char(
_ => None,
}
}
+
+fn get_vkey_from_key_with_us_layout(key: &str) -> Option<(u16, bool)> {
+ match key {
+ // ` => VK_OEM_3
+ "`" => Some((VK_OEM_3.0, false)),
+ "~" => Some((VK_OEM_3.0, true)),
+ "1" => Some((VK_1.0, false)),
+ "!" => Some((VK_1.0, true)),
+ "2" => Some((VK_2.0, false)),
+ "@" => Some((VK_2.0, true)),
+ "3" => Some((VK_3.0, false)),
+ "#" => Some((VK_3.0, true)),
+ "4" => Some((VK_4.0, false)),
+ "$" => Some((VK_4.0, true)),
+ "5" => Some((VK_5.0, false)),
+ "%" => Some((VK_5.0, true)),
+ "6" => Some((VK_6.0, false)),
+ "^" => Some((VK_6.0, true)),
+ "7" => Some((VK_7.0, false)),
+ "&" => Some((VK_7.0, true)),
+ "8" => Some((VK_8.0, false)),
+ "*" => Some((VK_8.0, true)),
+ "9" => Some((VK_9.0, false)),
+ "(" => Some((VK_9.0, true)),
+ "0" => Some((VK_0.0, false)),
+ ")" => Some((VK_0.0, true)),
+ "-" => Some((VK_OEM_MINUS.0, false)),
+ "_" => Some((VK_OEM_MINUS.0, true)),
+ "=" => Some((VK_OEM_PLUS.0, false)),
+ "+" => Some((VK_OEM_PLUS.0, true)),
+ "[" => Some((VK_OEM_4.0, false)),
+ "{" => Some((VK_OEM_4.0, true)),
+ "]" => Some((VK_OEM_6.0, false)),
+ "}" => Some((VK_OEM_6.0, true)),
+ "\\" => Some((VK_OEM_5.0, false)),
+ "|" => Some((VK_OEM_5.0, true)),
+ ";" => Some((VK_OEM_1.0, false)),
+ ":" => Some((VK_OEM_1.0, true)),
+ "'" => Some((VK_OEM_7.0, false)),
+ "\"" => Some((VK_OEM_7.0, true)),
+ "," => Some((VK_OEM_COMMA.0, false)),
+ "<" => Some((VK_OEM_COMMA.0, true)),
+ "." => Some((VK_OEM_PERIOD.0, false)),
+ ">" => Some((VK_OEM_PERIOD.0, true)),
+ "/" => Some((VK_OEM_2.0, false)),
+ "?" => Some((VK_OEM_2.0, true)),
+ _ => None,
+ }
+}
+
+const CANDIDATE_VKEYS: &[VIRTUAL_KEY] = &[
+ VK_OEM_3,
+ VK_OEM_MINUS,
+ VK_OEM_PLUS,
+ VK_OEM_4,
+ VK_OEM_5,
+ VK_OEM_6,
+ VK_OEM_1,
+ VK_OEM_7,
+ VK_OEM_COMMA,
+ VK_OEM_PERIOD,
+ VK_OEM_2,
+ VK_OEM_102,
+ VK_OEM_8,
+ VK_ABNT_C1,
+ VK_0,
+ VK_1,
+ VK_2,
+ VK_3,
+ VK_4,
+ VK_5,
+ VK_6,
+ VK_7,
+ VK_8,
+ VK_9,
+];
+
+#[cfg(test)]
+mod tests {
+ use crate::{Keystroke, Modifiers, PlatformKeyboardMapper, WindowsKeyboardMapper};
+
+ #[test]
+ fn test_keyboard_mapper() {
+ let mapper = WindowsKeyboardMapper::new();
+
+ // Normal case
+ let keystroke = Keystroke {
+ modifiers: Modifiers::control(),
+ key: "a".to_string(),
+ key_char: None,
+ };
+ let mapped = mapper.map_key_equivalent(keystroke.clone(), true);
+ assert_eq!(*mapped.inner(), keystroke);
+ assert_eq!(mapped.key(), "a");
+ assert_eq!(*mapped.modifiers(), Modifiers::control());
+
+ // Shifted case, ctrl-$
+ let keystroke = Keystroke {
+ modifiers: Modifiers::control(),
+ key: "$".to_string(),
+ key_char: None,
+ };
+ let mapped = mapper.map_key_equivalent(keystroke.clone(), true);
+ assert_eq!(*mapped.inner(), keystroke);
+ assert_eq!(mapped.key(), "4");
+ assert_eq!(*mapped.modifiers(), Modifiers::control_shift());
+
+ // Shifted case, but shift is true
+ let keystroke = Keystroke {
+ modifiers: Modifiers::control_shift(),
+ key: "$".to_string(),
+ key_char: None,
+ };
+ let mapped = mapper.map_key_equivalent(keystroke, true);
+ assert_eq!(mapped.inner().modifiers, Modifiers::control());
+ assert_eq!(mapped.key(), "4");
+ assert_eq!(*mapped.modifiers(), Modifiers::control_shift());
+
+ // Windows style
+ let keystroke = Keystroke {
+ modifiers: Modifiers::control_shift(),
+ key: "4".to_string(),
+ key_char: None,
+ };
+ let mapped = mapper.map_key_equivalent(keystroke, true);
+ assert_eq!(mapped.inner().modifiers, Modifiers::control());
+ assert_eq!(mapped.inner().key, "$");
+ assert_eq!(mapped.key(), "4");
+ assert_eq!(*mapped.modifiers(), Modifiers::control_shift());
+ }
+}
@@ -1,8 +1,9 @@
use std::{
cell::RefCell,
+ ffi::OsStr,
mem::ManuallyDrop,
path::{Path, PathBuf},
- rc::Rc,
+ rc::{Rc, Weak},
sync::Arc,
};
@@ -17,12 +18,9 @@ use windows::{
UI::ViewManagement::UISettings,
Win32::{
Foundation::*,
- Graphics::{
- Gdi::*,
- Imaging::{CLSID_WICImagingFactory, IWICImagingFactory},
- },
+ Graphics::{Direct3D11::ID3D11Device, Gdi::*},
Security::Credentials::*,
- System::{Com::*, LibraryLoader::*, Ole::*, SystemInformation::*, Threading::*},
+ System::{Com::*, LibraryLoader::*, Ole::*, SystemInformation::*},
UI::{Input::KeyboardAndMouse::*, Shell::*, WindowsAndMessaging::*},
},
core::*,
@@ -31,28 +29,34 @@ use windows::{
use crate::*;
pub(crate) struct WindowsPlatform {
- state: RefCell<WindowsPlatformState>,
+ inner: Rc<WindowsPlatformInner>,
raw_window_handles: Arc<RwLock<SmallVec<[SafeHwnd; 4]>>>,
// The below members will never change throughout the entire lifecycle of the app.
icon: HICON,
- main_receiver: flume::Receiver<Runnable>,
background_executor: BackgroundExecutor,
foreground_executor: ForegroundExecutor,
text_system: Arc<DirectWriteTextSystem>,
windows_version: WindowsVersion,
- bitmap_factory: ManuallyDrop<IWICImagingFactory>,
drop_target_helper: IDropTargetHelper,
- validation_number: usize,
- main_thread_id_win32: u32,
+ handle: HWND,
disable_direct_composition: bool,
}
+struct WindowsPlatformInner {
+ state: RefCell<WindowsPlatformState>,
+ raw_window_handles: std::sync::Weak<RwLock<SmallVec<[SafeHwnd; 4]>>>,
+ // The below members will never change throughout the entire lifecycle of the app.
+ validation_number: usize,
+ main_receiver: flume::Receiver<Runnable>,
+}
+
pub(crate) struct WindowsPlatformState {
callbacks: PlatformCallbacks,
menus: Vec<OwnedMenu>,
jump_list: JumpList,
// NOTE: standard cursor handles don't need to close.
pub(crate) current_cursor: Option<HCURSOR>,
+ directx_devices: ManuallyDrop<DirectXDevices>,
}
#[derive(Default)]
@@ -67,15 +71,17 @@ struct PlatformCallbacks {
}
impl WindowsPlatformState {
- fn new() -> Self {
+ fn new(directx_devices: DirectXDevices) -> Self {
let callbacks = PlatformCallbacks::default();
let jump_list = JumpList::new();
let current_cursor = load_cursor(CursorStyle::Arrow);
+ let directx_devices = ManuallyDrop::new(directx_devices);
Self {
callbacks,
jump_list,
current_cursor,
+ directx_devices,
menus: Vec::new(),
}
}
@@ -86,51 +92,72 @@ impl WindowsPlatform {
unsafe {
OleInitialize(None).context("unable to initialize Windows OLE")?;
}
+ let directx_devices = DirectXDevices::new().context("Creating DirectX devices")?;
let (main_sender, main_receiver) = flume::unbounded::<Runnable>();
- let main_thread_id_win32 = unsafe { GetCurrentThreadId() };
- let validation_number = rand::random::<usize>();
+ let validation_number = if usize::BITS == 64 {
+ rand::random::<u64>() as usize
+ } else {
+ rand::random::<u32>() as usize
+ };
+ let raw_window_handles = Arc::new(RwLock::new(SmallVec::new()));
+ let text_system = Arc::new(
+ DirectWriteTextSystem::new(&directx_devices)
+ .context("Error creating DirectWriteTextSystem")?,
+ );
+ register_platform_window_class();
+ let mut context = PlatformWindowCreateContext {
+ inner: None,
+ raw_window_handles: Arc::downgrade(&raw_window_handles),
+ validation_number,
+ main_receiver: Some(main_receiver),
+ directx_devices: Some(directx_devices),
+ };
+ let result = unsafe {
+ CreateWindowExW(
+ WINDOW_EX_STYLE(0),
+ PLATFORM_WINDOW_CLASS_NAME,
+ None,
+ WINDOW_STYLE(0),
+ 0,
+ 0,
+ 0,
+ 0,
+ Some(HWND_MESSAGE),
+ None,
+ None,
+ Some(&context as *const _ as *const _),
+ )
+ };
+ let inner = context.inner.take().unwrap()?;
+ let handle = result?;
let dispatcher = Arc::new(WindowsDispatcher::new(
main_sender,
- main_thread_id_win32,
+ handle,
validation_number,
));
let disable_direct_composition = std::env::var(DISABLE_DIRECT_COMPOSITION)
.is_ok_and(|value| value == "true" || value == "1");
let background_executor = BackgroundExecutor::new(dispatcher.clone());
let foreground_executor = ForegroundExecutor::new(dispatcher);
- let directx_devices = DirectXDevices::new(disable_direct_composition)
- .context("Unable to init directx devices.")?;
- let bitmap_factory = ManuallyDrop::new(unsafe {
- CoCreateInstance(&CLSID_WICImagingFactory, None, CLSCTX_INPROC_SERVER)
- .context("Error creating bitmap factory.")?
- });
- let text_system = Arc::new(
- DirectWriteTextSystem::new(&directx_devices, &bitmap_factory)
- .context("Error creating DirectWriteTextSystem")?,
- );
+
let drop_target_helper: IDropTargetHelper = unsafe {
CoCreateInstance(&CLSID_DragDropHelper, None, CLSCTX_INPROC_SERVER)
.context("Error creating drop target helper.")?
};
let icon = load_icon().unwrap_or_default();
- let state = RefCell::new(WindowsPlatformState::new());
- let raw_window_handles = Arc::new(RwLock::new(SmallVec::new()));
let windows_version = WindowsVersion::new().context("Error retrieve windows version")?;
Ok(Self {
- state,
+ inner,
+ handle,
raw_window_handles,
icon,
- main_receiver,
background_executor,
foreground_executor,
text_system,
disable_direct_composition,
windows_version,
- bitmap_factory,
drop_target_helper,
- validation_number,
- main_thread_id_win32,
})
}
@@ -152,119 +179,21 @@ impl WindowsPlatform {
});
}
- fn close_one_window(&self, target_window: HWND) -> bool {
- let mut lock = self.raw_window_handles.write();
- let index = lock
- .iter()
- .position(|handle| handle.as_raw() == target_window)
- .unwrap();
- lock.remove(index);
-
- lock.is_empty()
- }
-
- #[inline]
- fn run_foreground_task(&self) {
- for runnable in self.main_receiver.drain() {
- runnable.run();
- }
- }
-
fn generate_creation_info(&self) -> WindowCreationInfo {
WindowCreationInfo {
icon: self.icon,
executor: self.foreground_executor.clone(),
- current_cursor: self.state.borrow().current_cursor,
+ current_cursor: self.inner.state.borrow().current_cursor,
windows_version: self.windows_version,
drop_target_helper: self.drop_target_helper.clone(),
- validation_number: self.validation_number,
- main_receiver: self.main_receiver.clone(),
- main_thread_id_win32: self.main_thread_id_win32,
+ validation_number: self.inner.validation_number,
+ main_receiver: self.inner.main_receiver.clone(),
+ platform_window_handle: self.handle,
disable_direct_composition: self.disable_direct_composition,
+ directx_devices: (*self.inner.state.borrow().directx_devices).clone(),
}
}
- fn handle_dock_action_event(&self, action_idx: usize) {
- let mut lock = self.state.borrow_mut();
- if let Some(mut callback) = lock.callbacks.app_menu_action.take() {
- let Some(action) = lock
- .jump_list
- .dock_menus
- .get(action_idx)
- .map(|dock_menu| dock_menu.action.boxed_clone())
- else {
- lock.callbacks.app_menu_action = Some(callback);
- log::error!("Dock menu for index {action_idx} not found");
- return;
- };
- drop(lock);
- callback(&*action);
- self.state.borrow_mut().callbacks.app_menu_action = Some(callback);
- }
- }
-
- fn handle_input_lang_change(&self) {
- let mut lock = self.state.borrow_mut();
- if let Some(mut callback) = lock.callbacks.keyboard_layout_change.take() {
- drop(lock);
- callback();
- self.state
- .borrow_mut()
- .callbacks
- .keyboard_layout_change
- .get_or_insert(callback);
- }
- }
-
- // Returns if the app should quit.
- fn handle_events(&self) {
- let mut msg = MSG::default();
- unsafe {
- while GetMessageW(&mut msg, None, 0, 0).as_bool() {
- match msg.message {
- WM_QUIT => return,
- WM_INPUTLANGCHANGE
- | WM_GPUI_CLOSE_ONE_WINDOW
- | WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD
- | WM_GPUI_DOCK_MENU_ACTION => {
- if self.handle_gpui_events(msg.message, msg.wParam, msg.lParam, &msg) {
- return;
- }
- }
- _ => {
- DispatchMessageW(&msg);
- }
- }
- }
- }
- }
-
- // Returns true if the app should quit.
- fn handle_gpui_events(
- &self,
- message: u32,
- wparam: WPARAM,
- lparam: LPARAM,
- msg: *const MSG,
- ) -> bool {
- if wparam.0 != self.validation_number {
- unsafe { DispatchMessageW(msg) };
- return false;
- }
- match message {
- WM_GPUI_CLOSE_ONE_WINDOW => {
- if self.close_one_window(HWND(lparam.0 as _)) {
- return true;
- }
- }
- WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD => self.run_foreground_task(),
- WM_GPUI_DOCK_MENU_ACTION => self.handle_dock_action_event(lparam.0 as _),
- WM_INPUTLANGCHANGE => self.handle_input_lang_change(),
- _ => unreachable!(),
- }
- false
- }
-
fn set_dock_menus(&self, menus: Vec<MenuItem>) {
let mut actions = Vec::new();
menus.into_iter().for_each(|menu| {
@@ -272,7 +201,7 @@ impl WindowsPlatform {
actions.push(dock_menu);
}
});
- let mut lock = self.state.borrow_mut();
+ let mut lock = self.inner.state.borrow_mut();
lock.jump_list.dock_menus = actions;
update_jump_list(&lock.jump_list).log_err();
}
@@ -288,7 +217,7 @@ impl WindowsPlatform {
actions.push(dock_menu);
}
});
- let mut lock = self.state.borrow_mut();
+ let mut lock = self.inner.state.borrow_mut();
lock.jump_list.dock_menus = actions;
lock.jump_list.recent_workspaces = entries;
update_jump_list(&lock.jump_list)
@@ -309,19 +238,30 @@ impl WindowsPlatform {
}
fn begin_vsync_thread(&self) {
+ let mut directx_device = (*self.inner.state.borrow().directx_devices).clone();
+ let platform_window: SafeHwnd = self.handle.into();
+ let validation_number = self.inner.validation_number;
let all_windows = Arc::downgrade(&self.raw_window_handles);
+ let text_system = Arc::downgrade(&self.text_system);
std::thread::spawn(move || {
let vsync_provider = VSyncProvider::new();
loop {
vsync_provider.wait_for_vsync();
+ if check_device_lost(&directx_device.device) {
+ handle_gpu_device_lost(
+ &mut directx_device,
+ platform_window.as_raw(),
+ validation_number,
+ &all_windows,
+ &text_system,
+ );
+ }
let Some(all_windows) = all_windows.upgrade() else {
break;
};
for hwnd in all_windows.read().iter() {
unsafe {
- RedrawWindow(Some(hwnd.as_raw()), None, None, RDW_INVALIDATE)
- .ok()
- .log_err();
+ let _ = RedrawWindow(Some(hwnd.as_raw()), None, None, RDW_INVALIDATE);
}
}
}
@@ -350,16 +290,30 @@ impl Platform for WindowsPlatform {
)
}
+ fn keyboard_mapper(&self) -> Rc<dyn PlatformKeyboardMapper> {
+ Rc::new(WindowsKeyboardMapper::new())
+ }
+
fn on_keyboard_layout_change(&self, callback: Box<dyn FnMut()>) {
- self.state.borrow_mut().callbacks.keyboard_layout_change = Some(callback);
+ self.inner
+ .state
+ .borrow_mut()
+ .callbacks
+ .keyboard_layout_change = Some(callback);
}
fn run(&self, on_finish_launching: Box<dyn 'static + FnOnce()>) {
on_finish_launching();
self.begin_vsync_thread();
- self.handle_events();
- if let Some(ref mut callback) = self.state.borrow_mut().callbacks.quit {
+ let mut msg = MSG::default();
+ unsafe {
+ while GetMessageW(&mut msg, None, 0, 0).as_bool() {
+ DispatchMessageW(&msg);
+ }
+ }
+
+ if let Some(ref mut callback) = self.inner.state.borrow_mut().callbacks.quit {
callback();
}
}
@@ -460,19 +414,21 @@ impl Platform for WindowsPlatform {
}
fn open_url(&self, url: &str) {
+ if url.is_empty() {
+ return;
+ }
let url_string = url.to_string();
self.background_executor()
.spawn(async move {
- if url_string.is_empty() {
- return;
- }
- open_target(url_string.as_str());
+ open_target(&url_string)
+ .with_context(|| format!("Opening url: {}", url_string))
+ .log_err();
})
.detach();
}
fn on_open_urls(&self, callback: Box<dyn FnMut(Vec<String>)>) {
- self.state.borrow_mut().callbacks.open_urls = Some(callback);
+ self.inner.state.borrow_mut().callbacks.open_urls = Some(callback);
}
fn prompt_for_paths(
@@ -514,55 +470,47 @@ impl Platform for WindowsPlatform {
}
fn reveal_path(&self, path: &Path) {
- let Ok(file_full_path) = path.canonicalize() else {
- log::error!("unable to parse file path");
+ if path.as_os_str().is_empty() {
return;
- };
+ }
+ let path = path.to_path_buf();
self.background_executor()
.spawn(async move {
- let Some(path) = file_full_path.to_str() else {
- return;
- };
- if path.is_empty() {
- return;
- }
- open_target_in_explorer(path);
+ open_target_in_explorer(&path)
+ .with_context(|| format!("Revealing path {} in explorer", path.display()))
+ .log_err();
})
.detach();
}
fn open_with_system(&self, path: &Path) {
- let Ok(full_path) = path.canonicalize() else {
- log::error!("unable to parse file full path: {}", path.display());
+ if path.as_os_str().is_empty() {
return;
- };
+ }
+ let path = path.to_path_buf();
self.background_executor()
.spawn(async move {
- let Some(full_path_str) = full_path.to_str() else {
- return;
- };
- if full_path_str.is_empty() {
- return;
- };
- open_target(full_path_str);
+ open_target(&path)
+ .with_context(|| format!("Opening {} with system", path.display()))
+ .log_err();
})
.detach();
}
fn on_quit(&self, callback: Box<dyn FnMut()>) {
- self.state.borrow_mut().callbacks.quit = Some(callback);
+ self.inner.state.borrow_mut().callbacks.quit = Some(callback);
}
fn on_reopen(&self, callback: Box<dyn FnMut()>) {
- self.state.borrow_mut().callbacks.reopen = Some(callback);
+ self.inner.state.borrow_mut().callbacks.reopen = Some(callback);
}
fn set_menus(&self, menus: Vec<Menu>, _keymap: &Keymap) {
- self.state.borrow_mut().menus = menus.into_iter().map(|menu| menu.owned()).collect();
+ self.inner.state.borrow_mut().menus = menus.into_iter().map(|menu| menu.owned()).collect();
}
fn get_menus(&self) -> Option<Vec<OwnedMenu>> {
- Some(self.state.borrow().menus.clone())
+ Some(self.inner.state.borrow().menus.clone())
}
fn set_dock_menu(&self, menus: Vec<MenuItem>, _keymap: &Keymap) {
@@ -570,15 +518,19 @@ impl Platform for WindowsPlatform {
}
fn on_app_menu_action(&self, callback: Box<dyn FnMut(&dyn Action)>) {
- self.state.borrow_mut().callbacks.app_menu_action = Some(callback);
+ self.inner.state.borrow_mut().callbacks.app_menu_action = Some(callback);
}
fn on_will_open_app_menu(&self, callback: Box<dyn FnMut()>) {
- self.state.borrow_mut().callbacks.will_open_app_menu = Some(callback);
+ self.inner.state.borrow_mut().callbacks.will_open_app_menu = Some(callback);
}
fn on_validate_app_menu_command(&self, callback: Box<dyn FnMut(&dyn Action) -> bool>) {
- self.state.borrow_mut().callbacks.validate_app_menu_command = Some(callback);
+ self.inner
+ .state
+ .borrow_mut()
+ .callbacks
+ .validate_app_menu_command = Some(callback);
}
fn app_path(&self) -> Result<PathBuf> {
@@ -592,7 +544,7 @@ impl Platform for WindowsPlatform {
fn set_cursor_style(&self, style: CursorStyle) {
let hcursor = load_cursor(style);
- let mut lock = self.state.borrow_mut();
+ let mut lock = self.inner.state.borrow_mut();
if lock.current_cursor.map(|c| c.0) != hcursor.map(|c| c.0) {
self.post_message(
WM_GPUI_CURSOR_STYLE_CHANGED,
@@ -695,10 +647,10 @@ impl Platform for WindowsPlatform {
fn perform_dock_menu_action(&self, action: usize) {
unsafe {
- PostThreadMessageW(
- self.main_thread_id_win32,
+ PostMessageW(
+ Some(self.handle),
WM_GPUI_DOCK_MENU_ACTION,
- WPARAM(self.validation_number),
+ WPARAM(self.inner.validation_number),
LPARAM(action as isize),
)
.log_err();
@@ -714,15 +666,147 @@ impl Platform for WindowsPlatform {
}
}
+impl WindowsPlatformInner {
+ fn new(context: &mut PlatformWindowCreateContext) -> Result<Rc<Self>> {
+ let state = RefCell::new(WindowsPlatformState::new(
+ context.directx_devices.take().unwrap(),
+ ));
+ Ok(Rc::new(Self {
+ state,
+ raw_window_handles: context.raw_window_handles.clone(),
+ validation_number: context.validation_number,
+ main_receiver: context.main_receiver.take().unwrap(),
+ }))
+ }
+
+ fn handle_msg(
+ self: &Rc<Self>,
+ handle: HWND,
+ msg: u32,
+ wparam: WPARAM,
+ lparam: LPARAM,
+ ) -> LRESULT {
+ let handled = match msg {
+ WM_GPUI_CLOSE_ONE_WINDOW
+ | WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD
+ | WM_GPUI_DOCK_MENU_ACTION
+ | WM_GPUI_KEYBOARD_LAYOUT_CHANGED
+ | WM_GPUI_GPU_DEVICE_LOST => self.handle_gpui_events(msg, wparam, lparam),
+ _ => None,
+ };
+ if let Some(result) = handled {
+ LRESULT(result)
+ } else {
+ unsafe { DefWindowProcW(handle, msg, wparam, lparam) }
+ }
+ }
+
+ fn handle_gpui_events(&self, message: u32, wparam: WPARAM, lparam: LPARAM) -> Option<isize> {
+ if wparam.0 != self.validation_number {
+ log::error!("Wrong validation number while processing message: {message}");
+ return None;
+ }
+ match message {
+ WM_GPUI_CLOSE_ONE_WINDOW => {
+ if self.close_one_window(HWND(lparam.0 as _)) {
+ unsafe { PostQuitMessage(0) };
+ }
+ Some(0)
+ }
+ WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD => self.run_foreground_task(),
+ WM_GPUI_DOCK_MENU_ACTION => self.handle_dock_action_event(lparam.0 as _),
+ WM_GPUI_KEYBOARD_LAYOUT_CHANGED => self.handle_keyboard_layout_change(),
+ WM_GPUI_GPU_DEVICE_LOST => self.handle_device_lost(lparam),
+ _ => unreachable!(),
+ }
+ }
+
+ fn close_one_window(&self, target_window: HWND) -> bool {
+ let Some(all_windows) = self.raw_window_handles.upgrade() else {
+ log::error!("Failed to upgrade raw window handles");
+ return false;
+ };
+ let mut lock = all_windows.write();
+ let index = lock
+ .iter()
+ .position(|handle| handle.as_raw() == target_window)
+ .unwrap();
+ lock.remove(index);
+
+ lock.is_empty()
+ }
+
+ #[inline]
+ fn run_foreground_task(&self) -> Option<isize> {
+ for runnable in self.main_receiver.drain() {
+ runnable.run();
+ }
+ Some(0)
+ }
+
+ fn handle_dock_action_event(&self, action_idx: usize) -> Option<isize> {
+ let mut lock = self.state.borrow_mut();
+ let mut callback = lock.callbacks.app_menu_action.take()?;
+ let Some(action) = lock
+ .jump_list
+ .dock_menus
+ .get(action_idx)
+ .map(|dock_menu| dock_menu.action.boxed_clone())
+ else {
+ lock.callbacks.app_menu_action = Some(callback);
+ log::error!("Dock menu for index {action_idx} not found");
+ return Some(1);
+ };
+ drop(lock);
+ callback(&*action);
+ self.state.borrow_mut().callbacks.app_menu_action = Some(callback);
+ Some(0)
+ }
+
+ fn handle_keyboard_layout_change(&self) -> Option<isize> {
+ let mut callback = self
+ .state
+ .borrow_mut()
+ .callbacks
+ .keyboard_layout_change
+ .take()?;
+ callback();
+ self.state.borrow_mut().callbacks.keyboard_layout_change = Some(callback);
+ Some(0)
+ }
+
+ fn handle_device_lost(&self, lparam: LPARAM) -> Option<isize> {
+ let mut lock = self.state.borrow_mut();
+ let directx_devices = lparam.0 as *const DirectXDevices;
+ let directx_devices = unsafe { &*directx_devices };
+ unsafe {
+ ManuallyDrop::drop(&mut lock.directx_devices);
+ }
+ lock.directx_devices = ManuallyDrop::new(directx_devices.clone());
+
+ Some(0)
+ }
+}
+
impl Drop for WindowsPlatform {
fn drop(&mut self) {
unsafe {
- ManuallyDrop::drop(&mut self.bitmap_factory);
+ DestroyWindow(self.handle)
+ .context("Destroying platform window")
+ .log_err();
OleUninitialize();
}
}
}
+impl Drop for WindowsPlatformState {
+ fn drop(&mut self) {
+ unsafe {
+ ManuallyDrop::drop(&mut self.directx_devices);
+ }
+ }
+}
+
pub(crate) struct WindowCreationInfo {
pub(crate) icon: HICON,
pub(crate) executor: ForegroundExecutor,
@@ -731,43 +815,80 @@ pub(crate) struct WindowCreationInfo {
pub(crate) drop_target_helper: IDropTargetHelper,
pub(crate) validation_number: usize,
pub(crate) main_receiver: flume::Receiver<Runnable>,
- pub(crate) main_thread_id_win32: u32,
+ pub(crate) platform_window_handle: HWND,
pub(crate) disable_direct_composition: bool,
+ pub(crate) directx_devices: DirectXDevices,
}
-fn open_target(target: &str) {
- unsafe {
- let ret = ShellExecuteW(
+struct PlatformWindowCreateContext {
+ inner: Option<Result<Rc<WindowsPlatformInner>>>,
+ raw_window_handles: std::sync::Weak<RwLock<SmallVec<[SafeHwnd; 4]>>>,
+ validation_number: usize,
+ main_receiver: Option<flume::Receiver<Runnable>>,
+ directx_devices: Option<DirectXDevices>,
+}
+
+fn open_target(target: impl AsRef<OsStr>) -> Result<()> {
+ let target = target.as_ref();
+ let ret = unsafe {
+ ShellExecuteW(
None,
windows::core::w!("open"),
&HSTRING::from(target),
None,
None,
SW_SHOWDEFAULT,
- );
- if ret.0 as isize <= 32 {
- log::error!("Unable to open target: {}", std::io::Error::last_os_error());
- }
+ )
+ };
+ if ret.0 as isize <= 32 {
+ Err(anyhow::anyhow!(
+ "Unable to open target: {}",
+ std::io::Error::last_os_error()
+ ))
+ } else {
+ Ok(())
}
}
-fn open_target_in_explorer(target: &str) {
+fn open_target_in_explorer(target: &Path) -> Result<()> {
+ let dir = target.parent().context("No parent folder found")?;
+ let desktop = unsafe { SHGetDesktopFolder()? };
+
+ let mut dir_item = std::ptr::null_mut();
unsafe {
- let ret = ShellExecuteW(
+ desktop.ParseDisplayName(
+ HWND::default(),
None,
- windows::core::w!("open"),
- windows::core::w!("explorer.exe"),
- &HSTRING::from(format!("/select,{}", target).as_str()),
+ &HSTRING::from(dir),
None,
- SW_SHOWDEFAULT,
- );
- if ret.0 as isize <= 32 {
- log::error!(
- "Unable to open target in explorer: {}",
- std::io::Error::last_os_error()
- );
- }
+ &mut dir_item,
+ std::ptr::null_mut(),
+ )?;
}
+
+ let mut file_item = std::ptr::null_mut();
+ unsafe {
+ desktop.ParseDisplayName(
+ HWND::default(),
+ None,
+ &HSTRING::from(target),
+ None,
+ &mut file_item,
+ std::ptr::null_mut(),
+ )?;
+ }
+
+ let highlight = [file_item as *const _];
+ unsafe { SHOpenFolderAndSelectItems(dir_item as _, Some(&highlight), 0) }.or_else(|err| {
+ if err.code().0 == ERROR_FILE_NOT_FOUND.0 as i32 {
+ // On some systems, the above call mysteriously fails with "file not
+ // found" even though the file is there. In these cases, ShellExecute()
+ // seems to work as a fallback (although it won't select the file).
+ open_target(dir).context("Opening target parent folder")
+ } else {
+ Err(anyhow::anyhow!("Can not open target path: {}", err))
+ }
+ })
}
fn file_open_dialog(
@@ -824,7 +945,7 @@ fn file_save_dialog(
if !directory.to_string_lossy().is_empty()
&& let Some(full_path) = directory.canonicalize().log_err()
{
- let full_path = SanitizedPath::from(full_path);
+ let full_path = SanitizedPath::new(&full_path);
let full_path_string = full_path.to_string();
let path_item: IShellItem =
unsafe { SHCreateItemFromParsingName(&HSTRING::from(full_path_string), None)? };
@@ -877,6 +998,135 @@ fn should_auto_hide_scrollbars() -> Result<bool> {
Ok(ui_settings.AutoHideScrollBars()?)
}
+fn check_device_lost(device: &ID3D11Device) -> bool {
+ let device_state = unsafe { device.GetDeviceRemovedReason() };
+ match device_state {
+ Ok(_) => false,
+ Err(err) => {
+ log::error!("DirectX device lost detected: {:?}", err);
+ true
+ }
+ }
+}
+
+fn handle_gpu_device_lost(
+ directx_devices: &mut DirectXDevices,
+ platform_window: HWND,
+ validation_number: usize,
+ all_windows: &std::sync::Weak<RwLock<SmallVec<[SafeHwnd; 4]>>>,
+ text_system: &std::sync::Weak<DirectWriteTextSystem>,
+) {
+ // Here we wait a bit to ensure the the system has time to recover from the device lost state.
+ // If we don't wait, the final drawing result will be blank.
+ std::thread::sleep(std::time::Duration::from_millis(350));
+
+ try_to_recover_from_device_lost(
+ || {
+ DirectXDevices::new()
+ .context("Failed to recreate new DirectX devices after device lost")
+ },
+ |new_devices| *directx_devices = new_devices,
+ || {
+ log::error!("Failed to recover DirectX devices after multiple attempts.");
+ // Do something here?
+ // At this point, the device loss is considered unrecoverable.
+ // std::process::exit(1);
+ },
+ );
+ log::info!("DirectX devices successfully recreated.");
+
+ unsafe {
+ SendMessageW(
+ platform_window,
+ WM_GPUI_GPU_DEVICE_LOST,
+ Some(WPARAM(validation_number)),
+ Some(LPARAM(directx_devices as *const _ as _)),
+ );
+ }
+
+ if let Some(text_system) = text_system.upgrade() {
+ text_system.handle_gpu_lost(&directx_devices);
+ }
+ if let Some(all_windows) = all_windows.upgrade() {
+ for window in all_windows.read().iter() {
+ unsafe {
+ SendMessageW(
+ window.as_raw(),
+ WM_GPUI_GPU_DEVICE_LOST,
+ Some(WPARAM(validation_number)),
+ Some(LPARAM(directx_devices as *const _ as _)),
+ );
+ }
+ }
+ std::thread::sleep(std::time::Duration::from_millis(200));
+ for window in all_windows.read().iter() {
+ unsafe {
+ SendMessageW(
+ window.as_raw(),
+ WM_GPUI_FORCE_UPDATE_WINDOW,
+ Some(WPARAM(validation_number)),
+ None,
+ );
+ }
+ }
+ }
+}
+
+const PLATFORM_WINDOW_CLASS_NAME: PCWSTR = w!("Zed::PlatformWindow");
+
+fn register_platform_window_class() {
+ let wc = WNDCLASSW {
+ lpfnWndProc: Some(window_procedure),
+ lpszClassName: PCWSTR(PLATFORM_WINDOW_CLASS_NAME.as_ptr()),
+ ..Default::default()
+ };
+ unsafe { RegisterClassW(&wc) };
+}
+
+unsafe extern "system" fn window_procedure(
+ hwnd: HWND,
+ msg: u32,
+ wparam: WPARAM,
+ lparam: LPARAM,
+) -> LRESULT {
+ if msg == WM_NCCREATE {
+ let params = lparam.0 as *const CREATESTRUCTW;
+ let params = unsafe { &*params };
+ let creation_context = params.lpCreateParams as *mut PlatformWindowCreateContext;
+ let creation_context = unsafe { &mut *creation_context };
+ return match WindowsPlatformInner::new(creation_context) {
+ Ok(inner) => {
+ let weak = Box::new(Rc::downgrade(&inner));
+ unsafe { set_window_long(hwnd, GWLP_USERDATA, Box::into_raw(weak) as isize) };
+ creation_context.inner = Some(Ok(inner));
+ unsafe { DefWindowProcW(hwnd, msg, wparam, lparam) }
+ }
+ Err(error) => {
+ creation_context.inner = Some(Err(error));
+ LRESULT(0)
+ }
+ };
+ }
+
+ let ptr = unsafe { get_window_long(hwnd, GWLP_USERDATA) } as *mut Weak<WindowsPlatformInner>;
+ if ptr.is_null() {
+ return unsafe { DefWindowProcW(hwnd, msg, wparam, lparam) };
+ }
+ let inner = unsafe { &*ptr };
+ let result = if let Some(inner) = inner.upgrade() {
+ inner.handle_msg(hwnd, msg, wparam, lparam)
+ } else {
+ unsafe { DefWindowProcW(hwnd, msg, wparam, lparam) }
+ };
+
+ if msg == WM_NCDESTROY {
+ unsafe { set_window_long(hwnd, GWLP_USERDATA, 0) };
+ unsafe { drop(Box::from_raw(ptr)) };
+ }
+
+ result
+}
+
#[cfg(test)]
mod tests {
use crate::{ClipboardItem, read_from_clipboard, write_to_clipboard};
@@ -1,6 +1,10 @@
+#include "alpha_correction.hlsl"
+
cbuffer GlobalParams: register(b0) {
+ float4 gamma_ratios;
float2 global_viewport_size;
- uint2 _pad;
+ float grayscale_enhanced_contrast;
+ uint _pad;
};
Texture2D<float4> t_sprite: register(t0);
@@ -1098,7 +1102,8 @@ MonochromeSpriteVertexOutput monochrome_sprite_vertex(uint vertex_id: SV_VertexI
float4 monochrome_sprite_fragment(MonochromeSpriteFragmentInput input): SV_Target {
float sample = t_sprite.Sample(s_sprite, input.tile_position).r;
- return float4(input.color.rgb, input.color.a * sample);
+ float alpha_corrected = apply_contrast_and_gamma_correction(sample, input.color.rgb, grayscale_enhanced_contrast, gamma_ratios);
+ return float4(input.color.rgb, input.color.a * alpha_corrected);
}
/*
@@ -94,7 +94,7 @@ impl VSyncProvider {
// DwmFlush and DCompositionWaitForCompositorClock returns very early
// instead of waiting until vblank when the monitor goes to sleep or is
// unplugged (nothing to present due to desktop occlusion). We use 1ms as
- // a threshhold for the duration of the wait functions and fallback to
+ // a threshold for the duration of the wait functions and fallback to
// Sleep() if it returns before that. This could happen during normal
// operation for the first call after the vsync thread becomes non-idle,
// but it shouldn't happen often.
@@ -73,12 +73,13 @@ pub(crate) struct WindowsWindowInner {
pub(crate) windows_version: WindowsVersion,
pub(crate) validation_number: usize,
pub(crate) main_receiver: flume::Receiver<Runnable>,
- pub(crate) main_thread_id_win32: u32,
+ pub(crate) platform_window_handle: HWND,
}
impl WindowsWindowState {
fn new(
hwnd: HWND,
+ directx_devices: &DirectXDevices,
window_params: &CREATESTRUCTW,
current_cursor: Option<HCURSOR>,
display: WindowsDisplay,
@@ -104,7 +105,7 @@ impl WindowsWindowState {
};
let border_offset = WindowBorderOffset::default();
let restore_from_minimized = None;
- let renderer = DirectXRenderer::new(hwnd, disable_direct_composition)
+ let renderer = DirectXRenderer::new(hwnd, directx_devices, disable_direct_composition)
.context("Creating DirectX renderer")?;
let callbacks = Callbacks::default();
let input_handler = None;
@@ -205,9 +206,10 @@ impl WindowsWindowState {
}
impl WindowsWindowInner {
- fn new(context: &WindowCreateContext, hwnd: HWND, cs: &CREATESTRUCTW) -> Result<Rc<Self>> {
+ fn new(context: &mut WindowCreateContext, hwnd: HWND, cs: &CREATESTRUCTW) -> Result<Rc<Self>> {
let state = RefCell::new(WindowsWindowState::new(
hwnd,
+ &context.directx_devices,
cs,
context.current_cursor,
context.display,
@@ -228,7 +230,7 @@ impl WindowsWindowInner {
windows_version: context.windows_version,
validation_number: context.validation_number,
main_receiver: context.main_receiver.clone(),
- main_thread_id_win32: context.main_thread_id_win32,
+ platform_window_handle: context.platform_window_handle,
}))
}
@@ -342,9 +344,10 @@ struct WindowCreateContext {
drop_target_helper: IDropTargetHelper,
validation_number: usize,
main_receiver: flume::Receiver<Runnable>,
- main_thread_id_win32: u32,
+ platform_window_handle: HWND,
appearance: WindowAppearance,
disable_direct_composition: bool,
+ directx_devices: DirectXDevices,
}
impl WindowsWindow {
@@ -361,8 +364,9 @@ impl WindowsWindow {
drop_target_helper,
validation_number,
main_receiver,
- main_thread_id_win32,
+ platform_window_handle,
disable_direct_composition,
+ directx_devices,
} = creation_info;
register_window_class(icon);
let hide_title_bar = params
@@ -382,10 +386,17 @@ impl WindowsWindow {
let (mut dwexstyle, dwstyle) = if params.kind == WindowKind::PopUp {
(WS_EX_TOOLWINDOW, WINDOW_STYLE(0x0))
} else {
- (
- WS_EX_APPWINDOW,
- WS_THICKFRAME | WS_SYSMENU | WS_MAXIMIZEBOX | WS_MINIMIZEBOX,
- )
+ let mut dwstyle = WS_SYSMENU;
+
+ if params.is_resizable {
+ dwstyle |= WS_THICKFRAME | WS_MAXIMIZEBOX;
+ }
+
+ if params.is_minimizable {
+ dwstyle |= WS_MINIMIZEBOX;
+ }
+
+ (WS_EX_APPWINDOW, dwstyle)
};
if !disable_direct_composition {
dwexstyle |= WS_EX_NOREDIRECTIONBITMAP;
@@ -412,9 +423,10 @@ impl WindowsWindow {
drop_target_helper,
validation_number,
main_receiver,
- main_thread_id_win32,
+ platform_window_handle,
appearance,
disable_direct_composition,
+ directx_devices,
};
let creation_result = unsafe {
CreateWindowExW(
@@ -592,10 +604,7 @@ impl PlatformWindow for WindowsWindow {
) -> Option<Receiver<usize>> {
let (done_tx, done_rx) = oneshot::channel();
let msg = msg.to_string();
- let detail_string = match detail {
- Some(info) => Some(info.to_string()),
- None => None,
- };
+ let detail_string = detail.map(|detail| detail.to_string());
let handle = self.0.hwnd;
let answers = answers.to_vec();
self.0
@@ -830,7 +839,7 @@ impl PlatformWindow for WindowsWindow {
self.0.state.borrow().renderer.gpu_specs().log_err()
}
- fn update_ime_position(&self, _bounds: Bounds<ScaledPixels>) {
+ fn update_ime_position(&self, _bounds: Bounds<Pixels>) {
// There is no such thing on Windows.
}
}
@@ -108,7 +108,7 @@ impl From<SharedString> for Arc<str> {
fn from(val: SharedString) -> Self {
match val.0 {
ArcCow::Borrowed(borrowed) => Arc::from(borrowed),
- ArcCow::Owned(owned) => owned.clone(),
+ ArcCow::Owned(owned) => owned,
}
}
}
@@ -45,27 +45,18 @@ impl TabHandles {
})
.unwrap_or_default();
- if let Some(next_handle) = self.handles.get(next_ix) {
- Some(next_handle.clone())
- } else {
- None
- }
+ self.handles.get(next_ix).cloned()
}
pub(crate) fn prev(&self, focused_id: Option<&FocusId>) -> Option<FocusHandle> {
let ix = self.current_index(focused_id).unwrap_or_default();
- let prev_ix;
- if ix == 0 {
- prev_ix = self.handles.len().saturating_sub(1);
+ let prev_ix = if ix == 0 {
+ self.handles.len().saturating_sub(1)
} else {
- prev_ix = ix.saturating_sub(1);
- }
+ ix.saturating_sub(1)
+ };
- if let Some(prev_handle) = self.handles.get(prev_ix) {
- Some(prev_handle.clone())
- } else {
- None
- }
+ self.handles.get(prev_ix).cloned()
}
}
@@ -58,23 +58,21 @@ impl TaffyLayoutEngine {
children: &[LayoutId],
) -> LayoutId {
let taffy_style = style.to_taffy(rem_size);
- let layout_id = if children.is_empty() {
+
+ if children.is_empty() {
self.taffy
.new_leaf(taffy_style)
.expect(EXPECT_MESSAGE)
.into()
} else {
- let parent_id = self
- .taffy
+ self.taffy
// This is safe because LayoutId is repr(transparent) to taffy::tree::NodeId.
.new_with_children(taffy_style, unsafe {
std::mem::transmute::<&[LayoutId], &[taffy::NodeId]>(children)
})
.expect(EXPECT_MESSAGE)
- .into();
- parent_id
- };
- layout_id
+ .into()
+ }
}
pub fn request_measured_layout(
@@ -91,8 +89,7 @@ impl TaffyLayoutEngine {
) -> LayoutId {
let taffy_style = style.to_taffy(rem_size);
- let layout_id = self
- .taffy
+ self.taffy
.new_leaf_with_context(
taffy_style,
NodeContext {
@@ -100,8 +97,7 @@ impl TaffyLayoutEngine {
},
)
.expect(EXPECT_MESSAGE)
- .into();
- layout_id
+ .into()
}
// Used to understand performance
@@ -168,7 +164,6 @@ impl TaffyLayoutEngine {
// for (a, b) in self.get_edges(id)? {
// println!("N{} --> N{}", u64::from(a), u64::from(b));
// }
- // println!("");
//
if !self.computed_layouts.insert(id) {
@@ -64,6 +64,9 @@ pub fn run_test(
if attempt < max_retries {
println!("attempt {} failed, retrying", attempt);
attempt += 1;
+ // The panic payload might itself trigger an unwind on drop:
+ // https://doc.rust-lang.org/std/panic/fn.catch_unwind.html#notes
+ std::mem::forget(error);
} else {
if is_multiple_runs {
eprintln!("failing seed: {}", seed);
@@ -351,7 +351,7 @@ impl WindowTextSystem {
///
/// Note that this method can only shape a single line of text. It will panic
/// if the text contains newlines. If you need to shape multiple lines of text,
- /// use `TextLayout::shape_text` instead.
+ /// use [`Self::shape_text`] instead.
pub fn shape_line(
&self,
text: SharedString,
@@ -517,7 +517,7 @@ impl WindowTextSystem {
/// Layout the given line of text, at the given font_size.
/// Subsets of the line can be styled independently with the `runs` parameter.
- /// Generally, you should prefer to use `TextLayout::shape_line` instead, which
+ /// Generally, you should prefer to use [`Self::shape_line`] instead, which
/// can be painted directly.
pub fn layout_line<Text>(
&self,
@@ -668,7 +668,7 @@ impl Display for FontStyle {
}
}
-/// A styled run of text, for use in [`TextLayout`].
+/// A styled run of text, for use in [`crate::TextLayout`].
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct TextRun {
/// A number of utf8 bytes
@@ -694,7 +694,7 @@ impl TextRun {
}
}
-/// An identifier for a specific glyph, as returned by [`TextSystem::layout_line`].
+/// An identifier for a specific glyph, as returned by [`WindowTextSystem::layout_line`].
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
#[repr(C)]
pub struct GlyphId(pub(crate) u32);
@@ -44,7 +44,7 @@ impl LineWrapper {
let mut prev_c = '\0';
let mut index = 0;
let mut candidates = fragments
- .into_iter()
+ .iter()
.flat_map(move |fragment| fragment.wrap_boundary_candidates())
.peekable();
iter::from_fn(move || {
@@ -181,7 +181,7 @@ impl LineWrapper {
matches!(c, '\u{0400}'..='\u{04FF}') ||
// Some other known special characters that should be treated as word characters,
// e.g. `a-b`, `var_name`, `I'm`, '@mention`, `#hashtag`, `100%`, `3.1415`, `2^3`, `a~b`, etc.
- matches!(c, '-' | '_' | '.' | '\'' | '$' | '%' | '@' | '#' | '^' | '~' | ',') ||
+ matches!(c, '-' | '_' | '.' | '\'' | '$' | '%' | '@' | '#' | '^' | '~' | ',' | '!' | ';' | '*') ||
// Characters that used in URL, e.g. `https://github.com/zed-industries/zed?a=1&b=2` for better wrapping a long URL.
matches!(c, '/' | ':' | '?' | '&' | '=') ||
// `⋯` character is special used in Zed, to keep this at the end of the line.
@@ -58,13 +58,7 @@ pub trait FluentBuilder {
where
Self: Sized,
{
- self.map(|this| {
- if let Some(_) = option {
- this
- } else {
- then(this)
- }
- })
+ self.map(|this| if option.is_some() { this } else { then(this) })
}
}
@@ -105,9 +99,9 @@ impl<T: Future> Future for WithTimeout<T> {
fn poll(self: Pin<&mut Self>, cx: &mut task::Context) -> task::Poll<Self::Output> {
// SAFETY: the fields of Timeout are private and we never move the future ourselves
// And its already pinned since we are being polled (all futures need to be pinned to be polled)
- let this = unsafe { self.get_unchecked_mut() };
- let future = unsafe { Pin::new_unchecked(&mut this.future) };
- let timer = unsafe { Pin::new_unchecked(&mut this.timer) };
+ let this = unsafe { &raw mut *self.get_unchecked_mut() };
+ let future = unsafe { Pin::new_unchecked(&mut (*this).future) };
+ let timer = unsafe { Pin::new_unchecked(&mut (*this).timer) };
if let task::Poll::Ready(output) = future.poll(cx) {
task::Poll::Ready(Ok(output))
@@ -12,11 +12,11 @@ use crate::{
PlatformInputHandler, PlatformWindow, Point, PolychromeSprite, PromptButton, PromptLevel, Quad,
Render, RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, Replay, ResizeEdge,
SMOOTH_SVG_SCALE_FACTOR, SUBPIXEL_VARIANTS, ScaledPixels, Scene, Shadow, SharedString, Size,
- StrikethroughStyle, Style, SubscriberSet, Subscription, TabHandles, TaffyLayoutEngine, Task,
- TextStyle, TextStyleRefinement, TransformationMatrix, Underline, UnderlineStyle,
- WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControls, WindowDecorations,
- WindowOptions, WindowParams, WindowTextSystem, point, prelude::*, px, rems, size,
- transparent_black,
+ StrikethroughStyle, Style, SubscriberSet, Subscription, SystemWindowTab,
+ SystemWindowTabController, TabHandles, TaffyLayoutEngine, Task, TextStyle, TextStyleRefinement,
+ TransformationMatrix, Underline, UnderlineStyle, WindowAppearance, WindowBackgroundAppearance,
+ WindowBounds, WindowControls, WindowDecorations, WindowOptions, WindowParams, WindowTextSystem,
+ point, prelude::*, px, rems, size, transparent_black,
};
use anyhow::{Context as _, Result, anyhow};
use collections::{FxHashMap, FxHashSet};
@@ -585,7 +585,7 @@ pub enum HitboxBehavior {
/// if phase == DispatchPhase::Capture && hitbox.is_hovered(window) {
/// cx.stop_propagation();
/// }
- /// }
+ /// })
/// ```
///
/// This has effects beyond event handling - any use of hitbox checking, such as hover
@@ -605,11 +605,11 @@ pub enum HitboxBehavior {
/// bubble-phase handler for every mouse event type **except** `ScrollWheelEvent`:
///
/// ```
- /// window.on_mouse_event(move |_: &EveryMouseEventTypeExceptScroll, phase, window, _cx| {
+ /// window.on_mouse_event(move |_: &EveryMouseEventTypeExceptScroll, phase, window, cx| {
/// if phase == DispatchPhase::Bubble && hitbox.should_handle_scroll(window) {
/// cx.stop_propagation();
/// }
- /// }
+ /// })
/// ```
///
/// See the documentation of [`Hitbox::is_hovered`] for details of why `ScrollWheelEvent` is
@@ -939,11 +939,15 @@ impl Window {
show,
kind,
is_movable,
+ is_resizable,
+ is_minimizable,
display_id,
window_background,
app_id,
window_min_size,
window_decorations,
+ #[cfg_attr(not(target_os = "macos"), allow(unused_variables))]
+ tabbing_identifier,
} = options;
let bounds = window_bounds
@@ -956,12 +960,23 @@ impl Window {
titlebar,
kind,
is_movable,
+ is_resizable,
+ is_minimizable,
focus,
show,
display_id,
window_min_size,
+ #[cfg(target_os = "macos")]
+ tabbing_identifier,
},
)?;
+
+ let tab_bar_visible = platform_window.tab_bar_visible();
+ SystemWindowTabController::init_visible(cx, tab_bar_visible);
+ if let Some(tabs) = platform_window.tabbed_windows() {
+ SystemWindowTabController::add_tab(cx, handle.window_id(), tabs);
+ }
+
let display_id = platform_window.display().map(|display| display.id());
let sprite_atlas = platform_window.sprite_atlas();
let mouse_position = platform_window.mouse_position();
@@ -991,9 +1006,13 @@ impl Window {
}
platform_window.on_close(Box::new({
+ let window_id = handle.window_id();
let mut cx = cx.to_async();
move || {
let _ = handle.update(&mut cx, |_, window, _| window.remove_window());
+ let _ = cx.update(|cx| {
+ SystemWindowTabController::remove_tab(cx, window_id);
+ });
}
}));
platform_window.on_request_frame(Box::new({
@@ -1082,7 +1101,11 @@ impl Window {
.activation_observers
.clone()
.retain(&(), |callback| callback(window, cx));
+
+ window.bounds_changed(cx);
window.refresh();
+
+ SystemWindowTabController::update_last_active(cx, window.handle.id);
})
.log_err();
}
@@ -1123,6 +1146,57 @@ impl Window {
.unwrap_or(None)
})
});
+ platform_window.on_move_tab_to_new_window({
+ let mut cx = cx.to_async();
+ Box::new(move || {
+ handle
+ .update(&mut cx, |_, _window, cx| {
+ SystemWindowTabController::move_tab_to_new_window(cx, handle.window_id());
+ })
+ .log_err();
+ })
+ });
+ platform_window.on_merge_all_windows({
+ let mut cx = cx.to_async();
+ Box::new(move || {
+ handle
+ .update(&mut cx, |_, _window, cx| {
+ SystemWindowTabController::merge_all_windows(cx, handle.window_id());
+ })
+ .log_err();
+ })
+ });
+ platform_window.on_select_next_tab({
+ let mut cx = cx.to_async();
+ Box::new(move || {
+ handle
+ .update(&mut cx, |_, _window, cx| {
+ SystemWindowTabController::select_next_tab(cx, handle.window_id());
+ })
+ .log_err();
+ })
+ });
+ platform_window.on_select_previous_tab({
+ let mut cx = cx.to_async();
+ Box::new(move || {
+ handle
+ .update(&mut cx, |_, _window, cx| {
+ SystemWindowTabController::select_previous_tab(cx, handle.window_id())
+ })
+ .log_err();
+ })
+ });
+ platform_window.on_toggle_tab_bar({
+ let mut cx = cx.to_async();
+ Box::new(move || {
+ handle
+ .update(&mut cx, |_, window, cx| {
+ let tab_bar_visible = window.platform_window.tab_bar_visible();
+ SystemWindowTabController::set_visible(cx, tab_bar_visible);
+ })
+ .log_err();
+ })
+ });
if let Some(app_id) = app_id {
platform_window.set_app_id(&app_id);
@@ -1835,7 +1909,7 @@ impl Window {
}
/// Produces a new frame and assigns it to `rendered_frame`. To actually show
- /// the contents of the new [Scene], use [present].
+ /// the contents of the new [`Scene`], use [`Self::present`].
#[profiling::function]
pub fn draw(&mut self, cx: &mut App) -> ArenaClearNeeded {
self.invalidate_entities();
@@ -2377,7 +2451,7 @@ impl Window {
/// Perform prepaint on child elements in a "retryable" manner, so that any side effects
/// of prepaints can be discarded before prepainting again. This is used to support autoscroll
/// where we need to prepaint children to detect the autoscroll bounds, then adjust the
- /// element offset and prepaint again. See [`List`] for an example. This method should only be
+ /// element offset and prepaint again. See [`crate::List`] for an example. This method should only be
/// called during the prepaint phase of element drawing.
pub fn transact<T, U>(&mut self, f: impl FnOnce(&mut Self) -> Result<T, U>) -> Result<T, U> {
self.invalidator.debug_assert_prepaint();
@@ -2402,9 +2476,9 @@ impl Window {
result
}
- /// When you call this method during [`prepaint`], containing elements will attempt to
+ /// When you call this method during [`Element::prepaint`], containing elements will attempt to
/// scroll to cause the specified bounds to become visible. When they decide to autoscroll, they will call
- /// [`prepaint`] again with a new set of bounds. See [`List`] for an example of an element
+ /// [`Element::prepaint`] again with a new set of bounds. See [`crate::List`] for an example of an element
/// that supports this method being called on the elements it contains. This method should only be
/// called during the prepaint phase of element drawing.
pub fn request_autoscroll(&mut self, bounds: Bounds<Pixels>) {
@@ -2412,8 +2486,8 @@ impl Window {
self.requested_autoscroll = Some(bounds);
}
- /// This method can be called from a containing element such as [`List`] to support the autoscroll behavior
- /// described in [`request_autoscroll`].
+ /// This method can be called from a containing element such as [`crate::List`] to support the autoscroll behavior
+ /// described in [`Self::request_autoscroll`].
pub fn take_autoscroll(&mut self) -> Option<Bounds<Pixels>> {
self.invalidator.debug_assert_prepaint();
self.requested_autoscroll.take()
@@ -2453,7 +2527,7 @@ impl Window {
/// time.
pub fn get_asset<A: Asset>(&mut self, source: &A::Source, cx: &mut App) -> Option<A::Output> {
let (task, _) = cx.fetch_asset::<A>(source);
- task.clone().now_or_never()
+ task.now_or_never()
}
/// Obtain the current element offset. This method should only be called during the
/// prepaint phase of element drawing.
@@ -2741,7 +2815,7 @@ impl Window {
/// Paint one or more quads into the scene for the next frame at the current stacking context.
/// Quads are colored rectangular regions with an optional background, border, and corner radius.
- /// see [`fill`](crate::fill), [`outline`](crate::outline), and [`quad`](crate::quad) to construct this type.
+ /// see [`fill`], [`outline`], and [`quad`] to construct this type.
///
/// This method should only be called as part of the paint phase of element drawing.
///
@@ -3044,7 +3118,7 @@ impl Window {
let tile = self
.sprite_atlas
- .get_or_insert_with(¶ms.clone().into(), &mut || {
+ .get_or_insert_with(¶ms.into(), &mut || {
Ok(Some((
data.size(frame_index),
Cow::Borrowed(
@@ -3731,7 +3805,7 @@ impl Window {
self.dispatch_keystroke_observers(
event,
Some(binding.action),
- match_result.context_stack.clone(),
+ match_result.context_stack,
cx,
);
self.pending_input_changed(cx);
@@ -4022,9 +4096,7 @@ impl Window {
self.on_next_frame(|window, cx| {
if let Some(mut input_handler) = window.platform_window.take_input_handler() {
if let Some(bounds) = input_handler.selected_bounds(window, cx) {
- window
- .platform_window
- .update_ime_position(bounds.scale(window.scale_factor()));
+ window.platform_window.update_ime_position(bounds);
}
window.platform_window.set_input_handler(input_handler);
}
@@ -4275,11 +4347,54 @@ impl Window {
}
/// Perform titlebar double-click action.
- /// This is MacOS specific.
+ /// This is macOS specific.
pub fn titlebar_double_click(&self) {
self.platform_window.titlebar_double_click();
}
+ /// Gets the window's title at the platform level.
+ /// This is macOS specific.
+ pub fn window_title(&self) -> String {
+ self.platform_window.get_title()
+ }
+
+ /// Returns a list of all tabbed windows and their titles.
+ /// This is macOS specific.
+ pub fn tabbed_windows(&self) -> Option<Vec<SystemWindowTab>> {
+ self.platform_window.tabbed_windows()
+ }
+
+ /// Returns the tab bar visibility.
+ /// This is macOS specific.
+ pub fn tab_bar_visible(&self) -> bool {
+ self.platform_window.tab_bar_visible()
+ }
+
+ /// Merges all open windows into a single tabbed window.
+ /// This is macOS specific.
+ pub fn merge_all_windows(&self) {
+ self.platform_window.merge_all_windows()
+ }
+
+ /// Moves the tab to a new containing window.
+ /// This is macOS specific.
+ pub fn move_tab_to_new_window(&self) {
+ self.platform_window.move_tab_to_new_window()
+ }
+
+ /// Shows or hides the window tab overview.
+ /// This is macOS specific.
+ pub fn toggle_window_tab_overview(&self) {
+ self.platform_window.toggle_window_tab_overview()
+ }
+
+ /// Sets the tabbing identifier for the window.
+ /// This is macOS specific.
+ pub fn set_tabbing_identifier(&self, tabbing_identifier: Option<String>) {
+ self.platform_window
+ .set_tabbing_identifier(tabbing_identifier)
+ }
+
/// Toggles the inspector mode on this window.
#[cfg(any(feature = "inspector", debug_assertions))]
pub fn toggle_inspector(&mut self, cx: &mut App) {
@@ -4442,7 +4557,7 @@ impl Window {
if let Some((_, inspector_id)) =
self.hovered_inspector_hitbox(inspector, &self.rendered_frame)
{
- inspector.set_active_element_id(inspector_id.clone(), self);
+ inspector.set_active_element_id(inspector_id, self);
}
}
});
@@ -4468,6 +4583,13 @@ impl Window {
}
None
}
+
+ /// For testing: set the current modifier keys state.
+ /// This does not generate any events.
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn set_modifiers(&mut self, modifiers: Modifiers) {
+ self.modifiers = modifiers;
+ }
}
// #[derive(Clone, Copy, Eq, PartialEq, Hash)]
@@ -4583,7 +4705,7 @@ impl<V: 'static + Render> WindowHandle<V> {
where
C: AppContext,
{
- cx.read_window(self, |root_view, _cx| root_view.clone())
+ cx.read_window(self, |root_view, _cx| root_view)
}
/// Check if this window is 'active'.
@@ -4697,7 +4819,7 @@ impl HasDisplayHandle for Window {
}
}
-/// An identifier for an [`Element`](crate::Element).
+/// An identifier for an [`Element`].
///
/// Can be constructed with a string, a number, or both, as well
/// as other internal representations.
@@ -16,6 +16,13 @@ pub(crate) fn derive_action(input: TokenStream) -> TokenStream {
let mut deprecated = None;
let mut doc_str: Option<String> = None;
+ /*
+ *
+ * #[action()]
+ * Struct Foo {
+ * bar: bool // is bar considered an attribute
+ }
+ */
for attr in &input.attrs {
if attr.path().is_ident("action") {
attr.parse_nested_meta(|meta| {
@@ -172,7 +172,7 @@ pub fn box_shadow_style_methods(input: TokenStream) -> TokenStream {
/// - `#[gpui::test(iterations = 5)]` runs five times, providing as seed the values in the range `0..5`.
/// - `#[gpui::test(retries = 3)]` runs up to four times if it fails to try and make it pass.
/// - `#[gpui::test(on_failure = "crate::test::report_failure")]` will call the specified function after the
-/// tests fail so that you can write out more detail about the failure.
+/// tests fail so that you can write out more detail about the failure.
///
/// You can combine `iterations = ...` with `seeds(...)`:
/// - `#[gpui::test(iterations = 5, seed = 10)]` is equivalent to `#[gpui::test(seeds(0, 1, 2, 3, 4, 10))]`.
@@ -86,7 +86,7 @@ impl Parse for Args {
Ok(Args {
seeds,
max_retries,
- max_iterations: max_iterations,
+ max_iterations,
on_failure_fn_name,
})
}
@@ -34,13 +34,6 @@ trait Transform: Clone {
/// Adds one to the value
fn add_one(self) -> Self;
-
- /// cfg attributes are respected
- #[cfg(all())]
- fn cfg_included(self) -> Self;
-
- #[cfg(any())]
- fn cfg_omitted(self) -> Self;
}
#[derive(Debug, Clone, PartialEq)]
@@ -70,10 +63,6 @@ impl Transform for Number {
fn add_one(self) -> Self {
Number(self.0 + 1)
}
-
- fn cfg_included(self) -> Self {
- Number(self.0)
- }
}
#[test]
@@ -83,14 +72,13 @@ fn test_derive_inspector_reflection() {
// Get all methods that match the pattern fn(self) -> Self or fn(mut self) -> Self
let methods = methods::<Number>();
- assert_eq!(methods.len(), 6);
+ assert_eq!(methods.len(), 5);
let method_names: Vec<_> = methods.iter().map(|m| m.name).collect();
assert!(method_names.contains(&"double"));
assert!(method_names.contains(&"triple"));
assert!(method_names.contains(&"increment"));
assert!(method_names.contains(&"quadruple"));
assert!(method_names.contains(&"add_one"));
- assert!(method_names.contains(&"cfg_included"));
// Invoke methods by name
let num = Number(5);
@@ -106,9 +94,7 @@ fn test_derive_inspector_reflection() {
.invoke(num.clone());
assert_eq!(incremented, Number(6));
- let quadrupled = find_method::<Number>("quadruple")
- .unwrap()
- .invoke(num.clone());
+ let quadrupled = find_method::<Number>("quadruple").unwrap().invoke(num);
assert_eq!(quadrupled, Number(20));
// Try to invoke a non-existent method
@@ -13,6 +13,7 @@ path = "src/gpui_tokio.rs"
doctest = false
[dependencies]
+anyhow.workspace = true
util.workspace = true
gpui.workspace = true
tokio = { workspace = true, features = ["rt", "rt-multi-thread"] }
@@ -52,6 +52,28 @@ impl Tokio {
})
}
+ /// Spawns the given future on Tokio's thread pool, and returns it via a GPUI task
+ /// Note that the Tokio task will be cancelled if the GPUI task is dropped
+ pub fn spawn_result<C, Fut, R>(cx: &C, f: Fut) -> C::Result<Task<anyhow::Result<R>>>
+ where
+ C: AppContext,
+ Fut: Future<Output = anyhow::Result<R>> + Send + 'static,
+ R: Send + 'static,
+ {
+ cx.read_global(|tokio: &GlobalTokio, cx| {
+ let join_handle = tokio.runtime.spawn(f);
+ let abort_handle = join_handle.abort_handle();
+ let cancel = defer(move || {
+ abort_handle.abort();
+ });
+ cx.background_spawn(async move {
+ let result = join_handle.await?;
+ drop(cancel);
+ result
+ })
+ })
+ }
+
pub fn handle(cx: &App) -> tokio::runtime::Handle {
GlobalTokio::global(cx).runtime.handle().clone()
}
@@ -40,7 +40,7 @@ impl AsyncBody {
}
pub fn from_bytes(bytes: Bytes) -> Self {
- Self(Inner::Bytes(Cursor::new(bytes.clone())))
+ Self(Inner::Bytes(Cursor::new(bytes)))
}
}
@@ -435,8 +435,7 @@ impl HttpClient for FakeHttpClient {
&self,
req: Request<AsyncBody>,
) -> BoxFuture<'static, anyhow::Result<Response<AsyncBody>>> {
- let future = (self.handler.lock().as_ref().unwrap())(req);
- future
+ ((self.handler.lock().as_ref().unwrap())(req)) as _
}
fn user_agent(&self) -> Option<&HeaderValue> {
@@ -34,6 +34,7 @@ pub enum IconName {
ArrowRightLeft,
ArrowUp,
ArrowUpRight,
+ Attach,
AudioOff,
AudioOn,
Backspace,
@@ -145,6 +146,7 @@ pub enum IconName {
Library,
LineHeight,
ListCollapse,
+ ListFilter,
ListTodo,
ListTree,
ListX,
@@ -155,6 +157,7 @@ pub enum IconName {
Maximize,
Menu,
MenuAlt,
+ MenuAltTemp,
Mic,
MicMute,
Minimize,
@@ -163,6 +166,7 @@ pub enum IconName {
PageDown,
PageUp,
Pencil,
+ PencilUnavailable,
Person,
Pin,
PlayOutlined,
@@ -212,6 +216,7 @@ pub enum IconName {
Tab,
Terminal,
TerminalAlt,
+ TerminalGhost,
TextSnippet,
TextThread,
Thread,
@@ -245,6 +250,8 @@ pub enum IconName {
Warning,
WholeWord,
XCircle,
+ XCircleFilled,
+ ZedAgent,
ZedAssistant,
ZedBurnMode,
ZedBurnModeOn,
@@ -401,12 +401,19 @@ pub fn init(cx: &mut App) {
mod persistence {
use std::path::PathBuf;
- use db::{define_connection, query, sqlez_macros::sql};
+ use db::{
+ query,
+ sqlez::{domain::Domain, thread_safe_connection::ThreadSafeConnection},
+ sqlez_macros::sql,
+ };
use workspace::{ItemId, WorkspaceDb, WorkspaceId};
- define_connection! {
- pub static ref IMAGE_VIEWER: ImageViewerDb<WorkspaceDb> =
- &[sql!(
+ pub struct ImageViewerDb(ThreadSafeConnection);
+
+ impl Domain for ImageViewerDb {
+ const NAME: &str = stringify!(ImageViewerDb);
+
+ const MIGRATIONS: &[&str] = &[sql!(
CREATE TABLE image_viewers (
workspace_id INTEGER,
item_id INTEGER UNIQUE,
@@ -417,9 +424,11 @@ mod persistence {
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
ON DELETE CASCADE
) STRICT;
- )];
+ )];
}
+ db::static_connection!(IMAGE_VIEWER, ImageViewerDb, [WorkspaceDb]);
+
impl ImageViewerDb {
query! {
pub async fn save_image_path(
@@ -1,10 +1,11 @@
use gpui::App;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
/// The settings for the image viewer.
-#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Default)]
+#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Default, SettingsUi, SettingsKey)]
+#[settings_key(key = "image_viewer")]
pub struct ImageViewerSettings {
/// The unit to use for displaying image file sizes.
///
@@ -24,8 +25,6 @@ pub enum ImageFileSizeUnit {
}
impl Settings for ImageViewerSettings {
- const KEY: Option<&'static str> = Some("image_viewer");
-
type FileContent = Self;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> anyhow::Result<Self> {
@@ -24,6 +24,7 @@ serde_json_lenient.workspace = true
theme.workspace = true
ui.workspace = true
util.workspace = true
+util_macros.workspace = true
workspace-hack.workspace = true
workspace.workspace = true
zed_actions.workspace = true
@@ -14,7 +14,10 @@ use language::{
DiagnosticSeverity, LanguageServerId, Point, ToOffset as _, ToPoint as _,
};
use project::lsp_store::CompletionDocumentation;
-use project::{Completion, CompletionResponse, CompletionSource, Project, ProjectPath};
+use project::{
+ Completion, CompletionDisplayOptions, CompletionResponse, CompletionSource, Project,
+ ProjectPath,
+};
use std::fmt::Write as _;
use std::ops::Range;
use std::path::Path;
@@ -25,7 +28,7 @@ use util::split_str_with_ranges;
/// Path used for unsaved buffer that contains style json. To support the json language server, this
/// matches the name used in the generated schemas.
-const ZED_INSPECTOR_STYLE_JSON: &str = "/zed-inspector-style.json";
+const ZED_INSPECTOR_STYLE_JSON: &str = util_macros::path!("/zed-inspector-style.json");
pub(crate) struct DivInspector {
state: State,
@@ -93,8 +96,8 @@ impl DivInspector {
Ok((json_style_buffer, rust_style_buffer)) => {
this.update_in(cx, |this, window, cx| {
this.state = State::BuffersLoaded {
- json_style_buffer: json_style_buffer,
- rust_style_buffer: rust_style_buffer,
+ json_style_buffer,
+ rust_style_buffer,
};
// Initialize editors immediately instead of waiting for
@@ -200,8 +203,8 @@ impl DivInspector {
cx.subscribe_in(&json_style_editor, window, {
let id = id.clone();
let rust_style_buffer = rust_style_buffer.clone();
- move |this, editor, event: &EditorEvent, window, cx| match event {
- EditorEvent::BufferEdited => {
+ move |this, editor, event: &EditorEvent, window, cx| {
+ if event == &EditorEvent::BufferEdited {
let style_json = editor.read(cx).text(cx);
match serde_json_lenient::from_str_lenient::<StyleRefinement>(&style_json) {
Ok(new_style) => {
@@ -243,7 +246,6 @@ impl DivInspector {
Err(err) => this.json_style_error = Some(err.to_string().into()),
}
}
- _ => {}
}
})
.detach();
@@ -251,11 +253,10 @@ impl DivInspector {
cx.subscribe(&rust_style_editor, {
let json_style_buffer = json_style_buffer.clone();
let rust_style_buffer = rust_style_buffer.clone();
- move |this, _editor, event: &EditorEvent, cx| match event {
- EditorEvent::BufferEdited => {
+ move |this, _editor, event: &EditorEvent, cx| {
+ if let EditorEvent::BufferEdited = event {
this.update_json_style_from_rust(&json_style_buffer, &rust_style_buffer, cx);
}
- _ => {}
}
})
.detach();
@@ -271,23 +272,19 @@ impl DivInspector {
}
fn reset_style(&mut self, cx: &mut App) {
- match &self.state {
- State::Ready {
- rust_style_buffer,
- json_style_buffer,
- ..
- } => {
- if let Err(err) = self.reset_style_editors(
- &rust_style_buffer.clone(),
- &json_style_buffer.clone(),
- cx,
- ) {
- self.json_style_error = Some(format!("{err}").into());
- } else {
- self.json_style_error = None;
- }
+ if let State::Ready {
+ rust_style_buffer,
+ json_style_buffer,
+ ..
+ } = &self.state
+ {
+ if let Err(err) =
+ self.reset_style_editors(&rust_style_buffer.clone(), &json_style_buffer.clone(), cx)
+ {
+ self.json_style_error = Some(format!("{err}").into());
+ } else {
+ self.json_style_error = None;
}
- _ => {}
}
}
@@ -670,6 +667,7 @@ impl CompletionProvider for RustStyleCompletionProvider {
confirm: None,
})
.collect(),
+ display_options: CompletionDisplayOptions::default(),
is_incomplete: false,
}]))
}
@@ -50,16 +50,13 @@ impl RealJujutsuRepository {
impl JujutsuRepository for RealJujutsuRepository {
fn list_bookmarks(&self) -> Vec<Bookmark> {
- let bookmarks = self
- .repository
+ self.repository
.view()
.bookmarks()
.map(|(ref_name, _target)| Bookmark {
ref_name: ref_name.as_str().to_string().into(),
})
- .collect();
-
- bookmarks
+ .collect()
}
}
@@ -5,7 +5,7 @@ use editor::{Editor, SelectionEffects};
use gpui::{App, AppContext as _, Context, Window, actions};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
use std::{
fs::OpenOptions,
path::{Path, PathBuf},
@@ -22,7 +22,8 @@ actions!(
);
/// Settings specific to journaling
-#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
+#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)]
+#[settings_key(key = "journal")]
pub struct JournalSettings {
/// The path of the directory where journal entries are stored.
///
@@ -52,8 +53,6 @@ pub enum HourFormat {
}
impl settings::Settings for JournalSettings {
- const KEY: Option<&'static str> = Some("journal");
-
type FileContent = Self;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
@@ -123,7 +122,7 @@ pub fn new_journal_entry(workspace: &Workspace, window: &mut Window, cx: &mut Ap
}
let app_state = workspace.app_state().clone();
- let view_snapshot = workspace.weak_handle().clone();
+ let view_snapshot = workspace.weak_handle();
window
.spawn(cx, async move |cx| {
@@ -195,11 +194,9 @@ pub fn new_journal_entry(workspace: &Workspace, window: &mut Window, cx: &mut Ap
}
fn journal_dir(path: &str) -> Option<PathBuf> {
- let expanded_journal_dir = shellexpand::full(path) //TODO handle this better
+ shellexpand::full(path) //TODO handle this better
.ok()
- .map(|dir| Path::new(&dir.to_string()).to_path_buf().join("journal"));
-
- expanded_journal_dir
+ .map(|dir| Path::new(&dir.to_string()).to_path_buf().join("journal"))
}
fn heading_entry(now: NaiveTime, hour_format: &Option<HourFormat>) -> String {
@@ -0,0 +1,53 @@
+[package]
+name = "keymap_editor"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/keymap_editor.rs"
+
+[dependencies]
+anyhow.workspace = true
+collections.workspace = true
+command_palette.workspace = true
+component.workspace = true
+db.workspace = true
+editor.workspace = true
+fs.workspace = true
+fuzzy.workspace = true
+gpui.workspace = true
+itertools.workspace = true
+language.workspace = true
+log.workspace = true
+menu.workspace = true
+notifications.workspace = true
+paths.workspace = true
+project.workspace = true
+search.workspace = true
+serde.workspace = true
+serde_json.workspace = true
+settings.workspace = true
+telemetry.workspace = true
+tempfile.workspace = true
+theme.workspace = true
+tree-sitter-json.workspace = true
+tree-sitter-rust.workspace = true
+ui.workspace = true
+ui_input.workspace = true
+util.workspace = true
+vim.workspace = true
+workspace-hack.workspace = true
+workspace.workspace = true
+zed_actions.workspace = true
+
+[dev-dependencies]
+db = {"workspace"= true, "features" = ["test-support"]}
+fs = { workspace = true, features = ["test-support"] }
+gpui = { workspace = true, features = ["test-support"] }
+project = { workspace = true, features = ["test-support"] }
+workspace = { workspace = true, features = ["test-support"] }
@@ -0,0 +1 @@
+../../LICENSE-GPL
@@ -5,6 +5,8 @@ use std::{
time::Duration,
};
+mod ui_components;
+
use anyhow::{Context as _, anyhow};
use collections::{HashMap, HashSet};
use editor::{CompletionProvider, Editor, EditorEvent};
@@ -12,13 +14,15 @@ use fs::Fs;
use fuzzy::{StringMatch, StringMatchCandidate};
use gpui::{
Action, AppContext as _, AsyncApp, Axis, ClickEvent, Context, DismissEvent, Entity,
- EventEmitter, FocusHandle, Focusable, Global, IsZero, KeyContext, Keystroke, MouseButton,
- Point, ScrollStrategy, ScrollWheelEvent, Stateful, StyledText, Subscription, Task,
- TextStyleRefinement, WeakEntity, actions, anchored, deferred, div,
+ EventEmitter, FocusHandle, Focusable, Global, IsZero,
+ KeyBindingContextPredicate::{And, Descendant, Equal, Identifier, Not, NotEqual, Or},
+ KeyContext, KeybindingKeystroke, MouseButton, PlatformKeyboardMapper, Point, ScrollStrategy,
+ ScrollWheelEvent, Stateful, StyledText, Subscription, Task, TextStyleRefinement, WeakEntity,
+ actions, anchored, deferred, div,
};
use language::{Language, LanguageConfig, ToOffset as _};
use notifications::status_toast::{StatusToast, ToastIcon};
-use project::Project;
+use project::{CompletionDisplayOptions, Project};
use settings::{BaseKeymap, KeybindSource, KeymapFile, Settings as _, SettingsAssets};
use ui::{
ActiveTheme as _, App, Banner, BorrowAppContext, ContextMenu, IconButtonShape, Indicator,
@@ -32,8 +36,10 @@ use workspace::{
register_serializable_item,
};
+pub use ui_components::*;
+
use crate::{
- keybindings::persistence::KEYBINDING_EDITORS,
+ persistence::KEYBINDING_EDITORS,
ui_components::{
keystroke_input::{ClearKeystrokes, KeystrokeInput, StartRecording, StopRecording},
table::{ColumnWidths, ResizeBehavior, Table, TableInteractionState},
@@ -172,7 +178,7 @@ impl FilterState {
#[derive(Debug, Default, PartialEq, Eq, Clone, Hash)]
struct ActionMapping {
- keystrokes: Vec<Keystroke>,
+ keystrokes: Vec<KeybindingKeystroke>,
context: Option<SharedString>,
}
@@ -182,15 +188,6 @@ struct KeybindConflict {
remaining_conflict_amount: usize,
}
-impl KeybindConflict {
- fn from_iter<'a>(mut indices: impl Iterator<Item = &'a ConflictOrigin>) -> Option<Self> {
- indices.next().map(|origin| Self {
- first_conflict_index: origin.index,
- remaining_conflict_amount: indices.count(),
- })
- }
-}
-
#[derive(Clone, Copy, PartialEq)]
struct ConflictOrigin {
override_source: KeybindSource,
@@ -238,13 +235,21 @@ impl ConflictOrigin {
#[derive(Default)]
struct ConflictState {
conflicts: Vec<Option<ConflictOrigin>>,
- keybind_mapping: HashMap<ActionMapping, Vec<ConflictOrigin>>,
+ keybind_mapping: ConflictKeybindMapping,
has_user_conflicts: bool,
}
+type ConflictKeybindMapping = HashMap<
+ Vec<KeybindingKeystroke>,
+ Vec<(
+ Option<gpui::KeyBindingContextPredicate>,
+ Vec<ConflictOrigin>,
+ )>,
+>;
+
impl ConflictState {
fn new(key_bindings: &[ProcessedBinding]) -> Self {
- let mut action_keybind_mapping: HashMap<_, Vec<ConflictOrigin>> = HashMap::default();
+ let mut action_keybind_mapping = ConflictKeybindMapping::default();
let mut largest_index = 0;
for (index, binding) in key_bindings
@@ -252,29 +257,48 @@ impl ConflictState {
.enumerate()
.flat_map(|(index, binding)| Some(index).zip(binding.keybind_information()))
{
- action_keybind_mapping
- .entry(binding.get_action_mapping())
- .or_default()
- .push(ConflictOrigin::new(binding.source, index));
+ let mapping = binding.get_action_mapping();
+ let predicate = mapping
+ .context
+ .and_then(|ctx| gpui::KeyBindingContextPredicate::parse(&ctx).ok());
+ let entry = action_keybind_mapping
+ .entry(mapping.keystrokes)
+ .or_default();
+ let origin = ConflictOrigin::new(binding.source, index);
+ if let Some((_, origins)) =
+ entry
+ .iter_mut()
+ .find(|(other_predicate, _)| match (&predicate, other_predicate) {
+ (None, None) => true,
+ (Some(a), Some(b)) => normalized_ctx_eq(a, b),
+ _ => false,
+ })
+ {
+ origins.push(origin);
+ } else {
+ entry.push((predicate, vec![origin]));
+ }
largest_index = index;
}
let mut conflicts = vec![None; largest_index + 1];
let mut has_user_conflicts = false;
- for indices in action_keybind_mapping.values_mut() {
- indices.sort_unstable_by_key(|origin| origin.override_source);
- let Some((fst, snd)) = indices.get(0).zip(indices.get(1)) else {
- continue;
- };
+ for entries in action_keybind_mapping.values_mut() {
+ for (_, indices) in entries.iter_mut() {
+ indices.sort_unstable_by_key(|origin| origin.override_source);
+ let Some((fst, snd)) = indices.get(0).zip(indices.get(1)) else {
+ continue;
+ };
- for origin in indices.iter() {
- conflicts[origin.index] =
- origin.get_conflict_with(if origin == fst { snd } else { fst })
- }
+ for origin in indices.iter() {
+ conflicts[origin.index] =
+ origin.get_conflict_with(if origin == fst { snd } else { fst })
+ }
- has_user_conflicts |= fst.override_source == KeybindSource::User
- && snd.override_source == KeybindSource::User;
+ has_user_conflicts |= fst.override_source == KeybindSource::User
+ && snd.override_source == KeybindSource::User;
+ }
}
Self {
@@ -289,15 +313,34 @@ impl ConflictState {
action_mapping: &ActionMapping,
keybind_idx: Option<usize>,
) -> Option<KeybindConflict> {
- self.keybind_mapping
- .get(action_mapping)
- .and_then(|indices| {
- KeybindConflict::from_iter(
- indices
+ let ActionMapping {
+ keystrokes,
+ context,
+ } = action_mapping;
+ let predicate = context
+ .as_deref()
+ .and_then(|ctx| gpui::KeyBindingContextPredicate::parse(&ctx).ok());
+ self.keybind_mapping.get(keystrokes).and_then(|entries| {
+ entries
+ .iter()
+ .find_map(|(other_predicate, indices)| {
+ match (&predicate, other_predicate) {
+ (None, None) => true,
+ (Some(pred), Some(other)) => normalized_ctx_eq(pred, other),
+ _ => false,
+ }
+ .then_some(indices)
+ })
+ .and_then(|indices| {
+ let mut indices = indices
.iter()
- .filter(|&conflict| Some(conflict.index) != keybind_idx),
- )
- })
+ .filter(|&conflict| Some(conflict.index) != keybind_idx);
+ indices.next().map(|origin| KeybindConflict {
+ first_conflict_index: origin.index,
+ remaining_conflict_amount: indices.count(),
+ })
+ })
+ })
}
fn conflict_for_idx(&self, idx: usize) -> Option<ConflictOrigin> {
@@ -375,12 +418,14 @@ impl Focusable for KeymapEditor {
}
}
/// Helper function to check if two keystroke sequences match exactly
-fn keystrokes_match_exactly(keystrokes1: &[Keystroke], keystrokes2: &[Keystroke]) -> bool {
+fn keystrokes_match_exactly(
+ keystrokes1: &[KeybindingKeystroke],
+ keystrokes2: &[KeybindingKeystroke],
+) -> bool {
keystrokes1.len() == keystrokes2.len()
- && keystrokes1
- .iter()
- .zip(keystrokes2)
- .all(|(k1, k2)| k1.key == k2.key && k1.modifiers == k2.modifiers)
+ && keystrokes1.iter().zip(keystrokes2).all(|(k1, k2)| {
+ k1.inner().key == k2.inner().key && k1.inner().modifiers == k2.inner().modifiers
+ })
}
impl KeymapEditor {
@@ -470,7 +515,7 @@ impl KeymapEditor {
self.filter_editor.read(cx).text(cx)
}
- fn current_keystroke_query(&self, cx: &App) -> Vec<Keystroke> {
+ fn current_keystroke_query(&self, cx: &App) -> Vec<KeybindingKeystroke> {
match self.search_mode {
SearchMode::KeyStroke { .. } => self.keystroke_editor.read(cx).keystrokes().to_vec(),
SearchMode::Normal => Default::default(),
@@ -491,7 +536,7 @@ impl KeymapEditor {
let keystroke_query = keystroke_query
.into_iter()
- .map(|keystroke| keystroke.unparse())
+ .map(|keystroke| keystroke.inner().unparse())
.collect::<Vec<String>>()
.join(" ");
@@ -515,7 +560,7 @@ impl KeymapEditor {
async fn update_matches(
this: WeakEntity<Self>,
action_query: String,
- keystroke_query: Vec<Keystroke>,
+ keystroke_query: Vec<KeybindingKeystroke>,
cx: &mut AsyncApp,
) -> anyhow::Result<()> {
let action_query = command_palette::normalize_action_query(&action_query);
@@ -564,13 +609,15 @@ impl KeymapEditor {
{
let query = &keystroke_query[query_cursor];
let keystroke = &keystrokes[keystroke_cursor];
- let matches =
- query.modifiers.is_subset_of(&keystroke.modifiers)
- && ((query.key.is_empty()
- || query.key == keystroke.key)
- && query.key_char.as_ref().is_none_or(
- |q_kc| q_kc == &keystroke.key,
- ));
+ let matches = query
+ .inner()
+ .modifiers
+ .is_subset_of(&keystroke.inner().modifiers)
+ && ((query.inner().key.is_empty()
+ || query.inner().key == keystroke.inner().key)
+ && query.inner().key_char.as_ref().is_none_or(
+ |q_kc| q_kc == &keystroke.inner().key,
+ ));
if matches {
found_count += 1;
query_cursor += 1;
@@ -621,8 +668,7 @@ impl KeymapEditor {
let key_bindings_ptr = cx.key_bindings();
let lock = key_bindings_ptr.borrow();
let key_bindings = lock.bindings();
- let mut unmapped_action_names =
- HashSet::from_iter(cx.all_action_names().into_iter().copied());
+ let mut unmapped_action_names = HashSet::from_iter(cx.all_action_names().iter().copied());
let action_documentation = cx.action_documentation();
let mut generator = KeymapFile::action_schema_generator();
let actions_with_schemas = HashSet::from_iter(
@@ -640,7 +686,7 @@ impl KeymapEditor {
.map(KeybindSource::from_meta)
.unwrap_or(KeybindSource::Unknown);
- let keystroke_text = ui::text_for_keystrokes(key_binding.keystrokes(), cx);
+ let keystroke_text = ui::text_for_keybinding_keystrokes(key_binding.keystrokes(), cx);
let ui_key_binding = ui::KeyBinding::new_from_gpui(key_binding.clone(), cx)
.vim_mode(source == KeybindSource::Vim);
@@ -1164,8 +1210,11 @@ impl KeymapEditor {
.read(cx)
.get_scrollbar_offset(Axis::Vertical),
));
- cx.spawn(async move |_, _| remove_keybinding(to_remove, &fs, tab_size).await)
- .detach_and_notify_err(window, cx);
+ let keyboard_mapper = cx.keyboard_mapper().clone();
+ cx.spawn(async move |_, _| {
+ remove_keybinding(to_remove, &fs, tab_size, keyboard_mapper.as_ref()).await
+ })
+ .detach_and_notify_err(window, cx);
}
fn copy_context_to_clipboard(
@@ -1183,8 +1232,8 @@ impl KeymapEditor {
return;
};
- telemetry::event!("Keybinding Context Copied", context = context.clone());
- cx.write_to_clipboard(gpui::ClipboardItem::new_string(context.clone()));
+ telemetry::event!("Keybinding Context Copied", context = context);
+ cx.write_to_clipboard(gpui::ClipboardItem::new_string(context));
}
fn copy_action_to_clipboard(
@@ -1200,8 +1249,8 @@ impl KeymapEditor {
return;
};
- telemetry::event!("Keybinding Action Copied", action = action.clone());
- cx.write_to_clipboard(gpui::ClipboardItem::new_string(action.clone()));
+ telemetry::event!("Keybinding Action Copied", action = action);
+ cx.write_to_clipboard(gpui::ClipboardItem::new_string(action));
}
fn toggle_conflict_filter(
@@ -1289,7 +1338,7 @@ struct HumanizedActionNameCache {
impl HumanizedActionNameCache {
fn new(cx: &App) -> Self {
- let cache = HashMap::from_iter(cx.all_action_names().into_iter().map(|&action_name| {
+ let cache = HashMap::from_iter(cx.all_action_names().iter().map(|&action_name| {
(
action_name,
command_palette::humanize_action_name(action_name).into(),
@@ -1384,7 +1433,7 @@ impl ProcessedBinding {
.map(|keybind| keybind.get_action_mapping())
}
- fn keystrokes(&self) -> Option<&[Keystroke]> {
+ fn keystrokes(&self) -> Option<&[KeybindingKeystroke]> {
self.ui_key_binding()
.map(|binding| binding.keystrokes.as_slice())
}
@@ -1465,7 +1514,7 @@ impl RenderOnce for KeybindContextString {
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
match self {
KeybindContextString::Global => {
- muted_styled_text(KeybindContextString::GLOBAL.clone(), cx).into_any_element()
+ muted_styled_text(KeybindContextString::GLOBAL, cx).into_any_element()
}
KeybindContextString::Local(name, language) => {
SyntaxHighlightedText::new(name, language).into_any_element()
@@ -1749,7 +1798,7 @@ impl Render for KeymapEditor {
} else {
const NULL: SharedString =
SharedString::new_static("<null>");
- muted_styled_text(NULL.clone(), cx)
+ muted_styled_text(NULL, cx)
.into_any_element()
}
})
@@ -1857,18 +1906,15 @@ impl Render for KeymapEditor {
mouse_down_event: &gpui::MouseDownEvent,
window,
cx| {
- match mouse_down_event.button {
- MouseButton::Right => {
- this.select_index(
- row_index, None, window, cx,
- );
- this.create_context_menu(
- mouse_down_event.position,
- window,
- cx,
- );
- }
- _ => {}
+ if mouse_down_event.button == MouseButton::Right {
+ this.select_index(
+ row_index, None, window, cx,
+ );
+ this.create_context_menu(
+ mouse_down_event.position,
+ window,
+ cx,
+ );
}
},
))
@@ -2185,7 +2231,7 @@ impl KeybindingEditorModal {
Ok(action_arguments)
}
- fn validate_keystrokes(&self, cx: &App) -> anyhow::Result<Vec<Keystroke>> {
+ fn validate_keystrokes(&self, cx: &App) -> anyhow::Result<Vec<KeybindingKeystroke>> {
let new_keystrokes = self
.keybind_editor
.read_with(cx, |editor, _| editor.keystrokes().to_vec());
@@ -2214,12 +2260,10 @@ impl KeybindingEditorModal {
let fs = self.fs.clone();
let tab_size = cx.global::<settings::SettingsStore>().json_tab_size();
- let new_keystrokes = self
- .validate_keystrokes(cx)
- .map_err(InputError::error)?
- .into_iter()
- .map(remove_key_char)
- .collect::<Vec<_>>();
+ let mut new_keystrokes = self.validate_keystrokes(cx).map_err(InputError::error)?;
+ new_keystrokes
+ .iter_mut()
+ .for_each(|ks| ks.remove_key_char());
let new_context = self.validate_context(cx).map_err(InputError::error)?;
let new_action_args = self
@@ -2281,6 +2325,7 @@ impl KeybindingEditorModal {
}).unwrap_or(Ok(()))?;
let create = self.creating;
+ let keyboard_mapper = cx.keyboard_mapper().clone();
cx.spawn(async move |this, cx| {
let action_name = existing_keybind.action().name;
@@ -2293,6 +2338,7 @@ impl KeybindingEditorModal {
new_action_args.as_deref(),
&fs,
tab_size,
+ keyboard_mapper.as_ref(),
)
.await
{
@@ -2410,14 +2456,6 @@ impl KeybindingEditorModal {
}
}
-fn remove_key_char(Keystroke { modifiers, key, .. }: Keystroke) -> Keystroke {
- Keystroke {
- modifiers,
- key,
- ..Default::default()
- }
-}
-
impl Render for KeybindingEditorModal {
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let theme = cx.theme().colors();
@@ -2873,6 +2911,7 @@ impl CompletionProvider for KeyContextCompletionProvider {
confirm: None,
})
.collect(),
+ display_options: CompletionDisplayOptions::default(),
is_incomplete: false,
}]))
}
@@ -2957,6 +2996,7 @@ async fn save_keybinding_update(
new_args: Option<&str>,
fs: &Arc<dyn Fs>,
tab_size: usize,
+ keyboard_mapper: &dyn PlatformKeyboardMapper,
) -> anyhow::Result<()> {
let keymap_contents = settings::KeymapFile::load_keymap_file(fs)
.await
@@ -2999,9 +3039,13 @@ async fn save_keybinding_update(
let (new_keybinding, removed_keybinding, source) = operation.generate_telemetry();
- let updated_keymap_contents =
- settings::KeymapFile::update_keybinding(operation, keymap_contents, tab_size)
- .map_err(|err| anyhow::anyhow!("Could not save updated keybinding: {}", err))?;
+ let updated_keymap_contents = settings::KeymapFile::update_keybinding(
+ operation,
+ keymap_contents,
+ tab_size,
+ keyboard_mapper,
+ )
+ .map_err(|err| anyhow::anyhow!("Could not save updated keybinding: {}", err))?;
fs.write(
paths::keymap_file().as_path(),
updated_keymap_contents.as_bytes(),
@@ -3022,6 +3066,7 @@ async fn remove_keybinding(
existing: ProcessedBinding,
fs: &Arc<dyn Fs>,
tab_size: usize,
+ keyboard_mapper: &dyn PlatformKeyboardMapper,
) -> anyhow::Result<()> {
let Some(keystrokes) = existing.keystrokes() else {
anyhow::bail!("Cannot remove a keybinding that does not exist");
@@ -3045,9 +3090,13 @@ async fn remove_keybinding(
};
let (new_keybinding, removed_keybinding, source) = operation.generate_telemetry();
- let updated_keymap_contents =
- settings::KeymapFile::update_keybinding(operation, keymap_contents, tab_size)
- .context("Failed to update keybinding")?;
+ let updated_keymap_contents = settings::KeymapFile::update_keybinding(
+ operation,
+ keymap_contents,
+ tab_size,
+ keyboard_mapper,
+ )
+ .context("Failed to update keybinding")?;
fs.write(
paths::keymap_file().as_path(),
updated_keymap_contents.as_bytes(),
@@ -3093,29 +3142,29 @@ fn collect_contexts_from_assets() -> Vec<SharedString> {
queue.push(root_context);
while let Some(context) = queue.pop() {
match context {
- gpui::KeyBindingContextPredicate::Identifier(ident) => {
+ Identifier(ident) => {
contexts.insert(ident);
}
- gpui::KeyBindingContextPredicate::Equal(ident_a, ident_b) => {
+ Equal(ident_a, ident_b) => {
contexts.insert(ident_a);
contexts.insert(ident_b);
}
- gpui::KeyBindingContextPredicate::NotEqual(ident_a, ident_b) => {
+ NotEqual(ident_a, ident_b) => {
contexts.insert(ident_a);
contexts.insert(ident_b);
}
- gpui::KeyBindingContextPredicate::Descendant(ctx_a, ctx_b) => {
+ Descendant(ctx_a, ctx_b) => {
queue.push(*ctx_a);
queue.push(*ctx_b);
}
- gpui::KeyBindingContextPredicate::Not(ctx) => {
+ Not(ctx) => {
queue.push(*ctx);
}
- gpui::KeyBindingContextPredicate::And(ctx_a, ctx_b) => {
+ And(ctx_a, ctx_b) => {
queue.push(*ctx_a);
queue.push(*ctx_b);
}
- gpui::KeyBindingContextPredicate::Or(ctx_a, ctx_b) => {
+ Or(ctx_a, ctx_b) => {
queue.push(*ctx_a);
queue.push(*ctx_b);
}
@@ -3130,6 +3179,127 @@ fn collect_contexts_from_assets() -> Vec<SharedString> {
contexts
}
+fn normalized_ctx_eq(
+ a: &gpui::KeyBindingContextPredicate,
+ b: &gpui::KeyBindingContextPredicate,
+) -> bool {
+ use gpui::KeyBindingContextPredicate::*;
+ return match (a, b) {
+ (Identifier(_), Identifier(_)) => a == b,
+ (Equal(a_left, a_right), Equal(b_left, b_right)) => {
+ (a_left == b_left && a_right == b_right) || (a_left == b_right && a_right == b_left)
+ }
+ (NotEqual(a_left, a_right), NotEqual(b_left, b_right)) => {
+ (a_left == b_left && a_right == b_right) || (a_left == b_right && a_right == b_left)
+ }
+ (Descendant(a_parent, a_child), Descendant(b_parent, b_child)) => {
+ normalized_ctx_eq(a_parent, b_parent) && normalized_ctx_eq(a_child, b_child)
+ }
+ (Not(a_expr), Not(b_expr)) => normalized_ctx_eq(a_expr, b_expr),
+ // Handle double negation: !(!a) == a
+ (Not(a_expr), b) if matches!(a_expr.as_ref(), Not(_)) => {
+ let Not(a_inner) = a_expr.as_ref() else {
+ unreachable!();
+ };
+ normalized_ctx_eq(b, a_inner)
+ }
+ (a, Not(b_expr)) if matches!(b_expr.as_ref(), Not(_)) => {
+ let Not(b_inner) = b_expr.as_ref() else {
+ unreachable!();
+ };
+ normalized_ctx_eq(a, b_inner)
+ }
+ (And(a_left, a_right), And(b_left, b_right))
+ if matches!(a_left.as_ref(), And(_, _))
+ || matches!(a_right.as_ref(), And(_, _))
+ || matches!(b_left.as_ref(), And(_, _))
+ || matches!(b_right.as_ref(), And(_, _)) =>
+ {
+ let mut a_operands = Vec::new();
+ flatten_and(a, &mut a_operands);
+ let mut b_operands = Vec::new();
+ flatten_and(b, &mut b_operands);
+ compare_operand_sets(&a_operands, &b_operands)
+ }
+ (And(a_left, a_right), And(b_left, b_right)) => {
+ (normalized_ctx_eq(a_left, b_left) && normalized_ctx_eq(a_right, b_right))
+ || (normalized_ctx_eq(a_left, b_right) && normalized_ctx_eq(a_right, b_left))
+ }
+ (Or(a_left, a_right), Or(b_left, b_right))
+ if matches!(a_left.as_ref(), Or(_, _))
+ || matches!(a_right.as_ref(), Or(_, _))
+ || matches!(b_left.as_ref(), Or(_, _))
+ || matches!(b_right.as_ref(), Or(_, _)) =>
+ {
+ let mut a_operands = Vec::new();
+ flatten_or(a, &mut a_operands);
+ let mut b_operands = Vec::new();
+ flatten_or(b, &mut b_operands);
+ compare_operand_sets(&a_operands, &b_operands)
+ }
+ (Or(a_left, a_right), Or(b_left, b_right)) => {
+ (normalized_ctx_eq(a_left, b_left) && normalized_ctx_eq(a_right, b_right))
+ || (normalized_ctx_eq(a_left, b_right) && normalized_ctx_eq(a_right, b_left))
+ }
+ _ => false,
+ };
+
+ fn flatten_and<'a>(
+ pred: &'a gpui::KeyBindingContextPredicate,
+ operands: &mut Vec<&'a gpui::KeyBindingContextPredicate>,
+ ) {
+ use gpui::KeyBindingContextPredicate::*;
+ match pred {
+ And(left, right) => {
+ flatten_and(left, operands);
+ flatten_and(right, operands);
+ }
+ _ => operands.push(pred),
+ }
+ }
+
+ fn flatten_or<'a>(
+ pred: &'a gpui::KeyBindingContextPredicate,
+ operands: &mut Vec<&'a gpui::KeyBindingContextPredicate>,
+ ) {
+ use gpui::KeyBindingContextPredicate::*;
+ match pred {
+ Or(left, right) => {
+ flatten_or(left, operands);
+ flatten_or(right, operands);
+ }
+ _ => operands.push(pred),
+ }
+ }
+
+ fn compare_operand_sets(
+ a: &[&gpui::KeyBindingContextPredicate],
+ b: &[&gpui::KeyBindingContextPredicate],
+ ) -> bool {
+ if a.len() != b.len() {
+ return false;
+ }
+
+ // For each operand in a, find a matching operand in b
+ let mut b_matched = vec![false; b.len()];
+ for a_operand in a {
+ let mut found = false;
+ for (b_idx, b_operand) in b.iter().enumerate() {
+ if !b_matched[b_idx] && normalized_ctx_eq(a_operand, b_operand) {
+ b_matched[b_idx] = true;
+ found = true;
+ break;
+ }
+ }
+ if !found {
+ return false;
+ }
+ }
+
+ true
+ }
+}
+
impl SerializableItem for KeymapEditor {
fn serialized_item_kind() -> &'static str {
"KeymapEditor"
@@ -3192,12 +3362,15 @@ impl SerializableItem for KeymapEditor {
}
mod persistence {
- use db::{define_connection, query, sqlez_macros::sql};
+ use db::{query, sqlez::domain::Domain, sqlez_macros::sql};
use workspace::WorkspaceDb;
- define_connection! {
- pub static ref KEYBINDING_EDITORS: KeybindingEditorDb<WorkspaceDb> =
- &[sql!(
+ pub struct KeybindingEditorDb(db::sqlez::thread_safe_connection::ThreadSafeConnection);
+
+ impl Domain for KeybindingEditorDb {
+ const NAME: &str = stringify!(KeybindingEditorDb);
+
+ const MIGRATIONS: &[&str] = &[sql!(
CREATE TABLE keybinding_editors (
workspace_id INTEGER,
item_id INTEGER UNIQUE,
@@ -3206,9 +3379,11 @@ mod persistence {
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
ON DELETE CASCADE
) STRICT;
- )];
+ )];
}
+ db::static_connection!(KEYBINDING_EDITORS, KeybindingEditorDb, [WorkspaceDb]);
+
impl KeybindingEditorDb {
query! {
pub async fn save_keybinding_editor(
@@ -3232,3 +3407,152 @@ mod persistence {
}
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn normalized_ctx_cmp() {
+ #[track_caller]
+ fn cmp(a: &str, b: &str) -> bool {
+ let a = gpui::KeyBindingContextPredicate::parse(a)
+ .expect("Failed to parse keybinding context a");
+ let b = gpui::KeyBindingContextPredicate::parse(b)
+ .expect("Failed to parse keybinding context b");
+ normalized_ctx_eq(&a, &b)
+ }
+
+ // Basic equality - identical expressions
+ assert!(cmp("a && b", "a && b"));
+ assert!(cmp("a || b", "a || b"));
+ assert!(cmp("a == b", "a == b"));
+ assert!(cmp("a != b", "a != b"));
+ assert!(cmp("a > b", "a > b"));
+ assert!(cmp("!a", "!a"));
+
+ // AND operator - associative/commutative
+ assert!(cmp("a && b", "b && a"));
+ assert!(cmp("a && b && c", "c && b && a"));
+ assert!(cmp("a && b && c", "b && a && c"));
+ assert!(cmp("a && b && c && d", "d && c && b && a"));
+
+ // OR operator - associative/commutative
+ assert!(cmp("a || b", "b || a"));
+ assert!(cmp("a || b || c", "c || b || a"));
+ assert!(cmp("a || b || c", "b || a || c"));
+ assert!(cmp("a || b || c || d", "d || c || b || a"));
+
+ // Equality operator - associative/commutative
+ assert!(cmp("a == b", "b == a"));
+ assert!(cmp("x == y", "y == x"));
+
+ // Inequality operator - associative/commutative
+ assert!(cmp("a != b", "b != a"));
+ assert!(cmp("x != y", "y != x"));
+
+ // Complex nested expressions with associative operators
+ assert!(cmp("(a && b) || c", "c || (a && b)"));
+ assert!(cmp("(a && b) || c", "c || (b && a)"));
+ assert!(cmp("(a || b) && c", "c && (a || b)"));
+ assert!(cmp("(a || b) && c", "c && (b || a)"));
+ assert!(cmp("(a && b) || (c && d)", "(c && d) || (a && b)"));
+ assert!(cmp("(a && b) || (c && d)", "(d && c) || (b && a)"));
+
+ // Multiple levels of nesting
+ assert!(cmp("((a && b) || c) && d", "d && ((a && b) || c)"));
+ assert!(cmp("((a && b) || c) && d", "d && (c || (b && a))"));
+ assert!(cmp("a && (b || (c && d))", "(b || (c && d)) && a"));
+ assert!(cmp("a && (b || (c && d))", "(b || (d && c)) && a"));
+
+ // Negation with associative operators
+ assert!(cmp("!a && b", "b && !a"));
+ assert!(cmp("!a || b", "b || !a"));
+ assert!(cmp("!(a && b) || c", "c || !(a && b)"));
+ assert!(cmp("!(a && b) || c", "c || !(b && a)"));
+
+ // Descendant operator (>) - NOT associative/commutative
+ assert!(cmp("a > b", "a > b"));
+ assert!(!cmp("a > b", "b > a"));
+ assert!(!cmp("a > b > c", "c > b > a"));
+ assert!(!cmp("a > b > c", "a > c > b"));
+
+ // Mixed operators with descendant
+ assert!(cmp("(a > b) && c", "c && (a > b)"));
+ assert!(!cmp("(a > b) && c", "c && (b > a)"));
+ assert!(cmp("(a > b) || (c > d)", "(c > d) || (a > b)"));
+ assert!(!cmp("(a > b) || (c > d)", "(b > a) || (d > c)"));
+
+ // Negative cases - different operators
+ assert!(!cmp("a && b", "a || b"));
+ assert!(!cmp("a == b", "a != b"));
+ assert!(!cmp("a && b", "a > b"));
+ assert!(!cmp("a || b", "a > b"));
+ assert!(!cmp("a == b", "a && b"));
+ assert!(!cmp("a != b", "a || b"));
+
+ // Negative cases - different operands
+ assert!(!cmp("a && b", "a && c"));
+ assert!(!cmp("a && b", "c && d"));
+ assert!(!cmp("a || b", "a || c"));
+ assert!(!cmp("a || b", "c || d"));
+ assert!(!cmp("a == b", "a == c"));
+ assert!(!cmp("a != b", "a != c"));
+ assert!(!cmp("a > b", "a > c"));
+ assert!(!cmp("a > b", "c > b"));
+
+ // Negative cases - with negation
+ assert!(!cmp("!a", "a"));
+ assert!(!cmp("!a && b", "a && b"));
+ assert!(!cmp("!(a && b)", "a && b"));
+ assert!(!cmp("!a || b", "a || b"));
+ assert!(!cmp("!(a || b)", "a || b"));
+
+ // Negative cases - complex expressions
+ assert!(!cmp("(a && b) || c", "(a || b) && c"));
+ assert!(!cmp("a && (b || c)", "a || (b && c)"));
+ assert!(!cmp("(a && b) || (c && d)", "(a || b) && (c || d)"));
+ assert!(!cmp("a > b && c", "a && b > c"));
+
+ // Edge cases - multiple same operands
+ assert!(cmp("a && a", "a && a"));
+ assert!(cmp("a || a", "a || a"));
+ assert!(cmp("a && a && b", "b && a && a"));
+ assert!(cmp("a || a || b", "b || a || a"));
+
+ // Edge cases - deeply nested
+ assert!(cmp(
+ "((a && b) || (c && d)) && ((e || f) && g)",
+ "((e || f) && g) && ((c && d) || (a && b))"
+ ));
+ assert!(cmp(
+ "((a && b) || (c && d)) && ((e || f) && g)",
+ "(g && (f || e)) && ((d && c) || (b && a))"
+ ));
+
+ // Edge cases - repeated patterns
+ assert!(cmp("(a && b) || (a && b)", "(b && a) || (b && a)"));
+ assert!(cmp("(a || b) && (a || b)", "(b || a) && (b || a)"));
+
+ // Negative cases - subtle differences
+ assert!(!cmp("a && b && c", "a && b"));
+ assert!(!cmp("a || b || c", "a || b"));
+ assert!(!cmp("(a && b) || c", "a && (b || c)"));
+
+ // a > b > c is not the same as a > c, should not be equal
+ assert!(!cmp("a > b > c", "a > c"));
+
+ // Double negation with complex expressions
+ assert!(cmp("!(!(a && b))", "a && b"));
+ assert!(cmp("!(!(a || b))", "a || b"));
+ assert!(cmp("!(!(a > b))", "a > b"));
+ assert!(cmp("!(!a) && b", "a && b"));
+ assert!(cmp("!(!a) || b", "a || b"));
+ assert!(cmp("!(!(a && b)) || c", "(a && b) || c"));
+ assert!(cmp("!(!(a && b)) || c", "(b && a) || c"));
+ assert!(cmp("!(!a)", "a"));
+ assert!(cmp("a", "!(!a)"));
+ assert!(cmp("!(!(!a))", "!a"));
+ assert!(cmp("!(!(!(!a)))", "a"));
+ }
+}
@@ -1,6 +1,6 @@
use gpui::{
Animation, AnimationExt, Context, EventEmitter, FocusHandle, Focusable, FontWeight, KeyContext,
- Keystroke, Modifiers, ModifiersChangedEvent, Subscription, Task, actions,
+ KeybindingKeystroke, Keystroke, Modifiers, ModifiersChangedEvent, Subscription, Task, actions,
};
use ui::{
ActiveTheme as _, Color, IconButton, IconButtonShape, IconName, IconSize, Label, LabelSize,
@@ -19,7 +19,7 @@ actions!(
]
);
-const KEY_CONTEXT_VALUE: &'static str = "KeystrokeInput";
+const KEY_CONTEXT_VALUE: &str = "KeystrokeInput";
const CLOSE_KEYSTROKE_CAPTURE_END_TIMEOUT: std::time::Duration =
std::time::Duration::from_millis(300);
@@ -42,8 +42,8 @@ impl PartialEq for CloseKeystrokeResult {
}
pub struct KeystrokeInput {
- keystrokes: Vec<Keystroke>,
- placeholder_keystrokes: Option<Vec<Keystroke>>,
+ keystrokes: Vec<KeybindingKeystroke>,
+ placeholder_keystrokes: Option<Vec<KeybindingKeystroke>>,
outer_focus_handle: FocusHandle,
inner_focus_handle: FocusHandle,
intercept_subscription: Option<Subscription>,
@@ -70,7 +70,7 @@ impl KeystrokeInput {
const KEYSTROKE_COUNT_MAX: usize = 3;
pub fn new(
- placeholder_keystrokes: Option<Vec<Keystroke>>,
+ placeholder_keystrokes: Option<Vec<KeybindingKeystroke>>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
@@ -97,7 +97,7 @@ impl KeystrokeInput {
}
}
- pub fn set_keystrokes(&mut self, keystrokes: Vec<Keystroke>, cx: &mut Context<Self>) {
+ pub fn set_keystrokes(&mut self, keystrokes: Vec<KeybindingKeystroke>, cx: &mut Context<Self>) {
self.keystrokes = keystrokes;
self.keystrokes_changed(cx);
}
@@ -106,7 +106,7 @@ impl KeystrokeInput {
self.search = search;
}
- pub fn keystrokes(&self) -> &[Keystroke] {
+ pub fn keystrokes(&self) -> &[KeybindingKeystroke] {
if let Some(placeholders) = self.placeholder_keystrokes.as_ref()
&& self.keystrokes.is_empty()
{
@@ -116,19 +116,19 @@ impl KeystrokeInput {
&& self
.keystrokes
.last()
- .is_some_and(|last| last.key.is_empty())
+ .is_some_and(|last| last.key().is_empty())
{
return &self.keystrokes[..self.keystrokes.len() - 1];
}
&self.keystrokes
}
- fn dummy(modifiers: Modifiers) -> Keystroke {
- Keystroke {
+ fn dummy(modifiers: Modifiers) -> KeybindingKeystroke {
+ KeybindingKeystroke::from_keystroke(Keystroke {
modifiers,
key: "".to_string(),
key_char: None,
- }
+ })
}
fn keystrokes_changed(&self, cx: &mut Context<Self>) {
@@ -254,7 +254,7 @@ impl KeystrokeInput {
self.keystrokes_changed(cx);
if let Some(last) = self.keystrokes.last_mut()
- && last.key.is_empty()
+ && last.key().is_empty()
&& keystrokes_len <= Self::KEYSTROKE_COUNT_MAX
{
if !self.search && !event.modifiers.modified() {
@@ -263,13 +263,14 @@ impl KeystrokeInput {
}
if self.search {
if self.previous_modifiers.modified() {
- last.modifiers |= event.modifiers;
+ let modifiers = *last.modifiers() | event.modifiers;
+ last.set_modifiers(modifiers);
} else {
self.keystrokes.push(Self::dummy(event.modifiers));
}
self.previous_modifiers |= event.modifiers;
} else {
- last.modifiers = event.modifiers;
+ last.set_modifiers(event.modifiers);
return;
}
} else if keystrokes_len < Self::KEYSTROKE_COUNT_MAX {
@@ -297,14 +298,15 @@ impl KeystrokeInput {
return;
}
- let mut keystroke = keystroke.clone();
+ let keystroke = KeybindingKeystroke::new_with_mapper(
+ keystroke.clone(),
+ false,
+ cx.keyboard_mapper().as_ref(),
+ );
if let Some(last) = self.keystrokes.last()
- && last.key.is_empty()
+ && last.key().is_empty()
&& (!self.search || self.previous_modifiers.modified())
{
- let key = keystroke.key.clone();
- keystroke = last.clone();
- keystroke.key = key;
self.keystrokes.pop();
}
@@ -320,15 +322,19 @@ impl KeystrokeInput {
return;
}
- self.keystrokes.push(keystroke.clone());
+ self.keystrokes.push(keystroke);
self.keystrokes_changed(cx);
+ // The reason we use the real modifiers from the window instead of the keystroke's modifiers
+ // is that for keystrokes like `ctrl-$` the modifiers reported by keystroke is `ctrl` which
+ // is wrong, it should be `ctrl-shift`. The window's modifiers are always correct.
+ let real_modifiers = window.modifiers();
if self.search {
- self.previous_modifiers = keystroke.modifiers;
+ self.previous_modifiers = real_modifiers;
return;
}
- if self.keystrokes.len() < Self::KEYSTROKE_COUNT_MAX && keystroke.modifiers.modified() {
- self.keystrokes.push(Self::dummy(keystroke.modifiers));
+ if self.keystrokes.len() < Self::KEYSTROKE_COUNT_MAX && real_modifiers.modified() {
+ self.keystrokes.push(Self::dummy(real_modifiers));
}
}
@@ -364,7 +370,7 @@ impl KeystrokeInput {
&self.keystrokes
};
keystrokes.iter().map(move |keystroke| {
- h_flex().children(ui::render_keystroke(
+ h_flex().children(ui::render_keybinding_keystroke(
keystroke,
Some(Color::Default),
Some(rems(0.875).into()),
@@ -706,8 +712,11 @@ mod tests {
// Combine current modifiers with keystroke modifiers
keystroke.modifiers |= self.current_modifiers;
+ let real_modifiers = keystroke.modifiers;
+ keystroke = to_gpui_keystroke(keystroke);
self.update_input(|input, window, cx| {
+ window.set_modifiers(real_modifiers);
input.handle_keystroke(&keystroke, window, cx);
});
@@ -735,6 +744,7 @@ mod tests {
};
self.update_input(|input, window, cx| {
+ window.set_modifiers(new_modifiers);
input.on_modifiers_changed(&event, window, cx);
});
@@ -809,9 +819,13 @@ mod tests {
/// Verifies that the keystrokes match the expected strings
#[track_caller]
pub fn expect_keystrokes(&mut self, expected: &[&str]) -> &mut Self {
- let actual = self
- .input
- .read_with(&self.cx, |input, _| input.keystrokes.clone());
+ let actual: Vec<Keystroke> = self.input.read_with(&self.cx, |input, _| {
+ input
+ .keystrokes
+ .iter()
+ .map(|keystroke| keystroke.inner().clone())
+ .collect()
+ });
Self::expect_keystrokes_equal(&actual, expected);
self
}
@@ -938,8 +952,102 @@ mod tests {
}
}
+ /// For GPUI, when you press `ctrl-shift-2`, it produces `ctrl-@` without the shift modifier.
+ fn to_gpui_keystroke(mut keystroke: Keystroke) -> Keystroke {
+ if keystroke.modifiers.shift {
+ match keystroke.key.as_str() {
+ "`" => {
+ keystroke.key = "~".into();
+ keystroke.modifiers.shift = false;
+ }
+ "1" => {
+ keystroke.key = "!".into();
+ keystroke.modifiers.shift = false;
+ }
+ "2" => {
+ keystroke.key = "@".into();
+ keystroke.modifiers.shift = false;
+ }
+ "3" => {
+ keystroke.key = "#".into();
+ keystroke.modifiers.shift = false;
+ }
+ "4" => {
+ keystroke.key = "$".into();
+ keystroke.modifiers.shift = false;
+ }
+ "5" => {
+ keystroke.key = "%".into();
+ keystroke.modifiers.shift = false;
+ }
+ "6" => {
+ keystroke.key = "^".into();
+ keystroke.modifiers.shift = false;
+ }
+ "7" => {
+ keystroke.key = "&".into();
+ keystroke.modifiers.shift = false;
+ }
+ "8" => {
+ keystroke.key = "*".into();
+ keystroke.modifiers.shift = false;
+ }
+ "9" => {
+ keystroke.key = "(".into();
+ keystroke.modifiers.shift = false;
+ }
+ "0" => {
+ keystroke.key = ")".into();
+ keystroke.modifiers.shift = false;
+ }
+ "-" => {
+ keystroke.key = "_".into();
+ keystroke.modifiers.shift = false;
+ }
+ "=" => {
+ keystroke.key = "+".into();
+ keystroke.modifiers.shift = false;
+ }
+ "[" => {
+ keystroke.key = "{".into();
+ keystroke.modifiers.shift = false;
+ }
+ "]" => {
+ keystroke.key = "}".into();
+ keystroke.modifiers.shift = false;
+ }
+ "\\" => {
+ keystroke.key = "|".into();
+ keystroke.modifiers.shift = false;
+ }
+ ";" => {
+ keystroke.key = ":".into();
+ keystroke.modifiers.shift = false;
+ }
+ "'" => {
+ keystroke.key = "\"".into();
+ keystroke.modifiers.shift = false;
+ }
+ "," => {
+ keystroke.key = "<".into();
+ keystroke.modifiers.shift = false;
+ }
+ "." => {
+ keystroke.key = ">".into();
+ keystroke.modifiers.shift = false;
+ }
+ "/" => {
+ keystroke.key = "?".into();
+ keystroke.modifiers.shift = false;
+ }
+ _ => {}
+ }
+ }
+ keystroke
+ }
+
struct KeystrokeUpdateTracker {
- initial_keystrokes: Vec<Keystroke>,
+ initial_keystrokes: Vec<KeybindingKeystroke>,
_subscription: Subscription,
input: Entity<KeystrokeInput>,
received_keystrokes_updated: bool,
@@ -983,8 +1091,8 @@ mod tests {
);
}
- fn keystrokes_str(ks: &[Keystroke]) -> String {
- ks.iter().map(|ks| ks.unparse()).join(" ")
+ fn keystrokes_str(ks: &[KeybindingKeystroke]) -> String {
+ ks.iter().map(|ks| ks.inner().unparse()).join(" ")
}
}
}
@@ -1041,7 +1149,15 @@ mod tests {
.send_events(&["+cmd", "shift-f", "-cmd"])
// In search mode, when completing a modifier-only keystroke with a key,
// only the original modifiers are preserved, not the keystroke's modifiers
- .expect_keystrokes(&["cmd-f"]);
+ //
+ // Update:
+ // This behavior was changed to preserve all modifiers in search mode, this is now reflected in the expected keystrokes.
+ // Specifically, considering the sequence: `+cmd +shift -shift 2`, we expect it to produce the same result as `+cmd +shift 2`
+ // which is `cmd-@`. But in the case of `+cmd +shift -shift 2`, the keystroke we receive is `cmd-2`, which means that
+ // we need to dynamically map the key from `2` to `@` when the shift modifier is not present, which is not possible.
+ // Therefore, we now preserve all modifiers in search mode to ensure consistent behavior.
+ // And also, VSCode seems to preserve all modifiers in search mode as well.
+ .expect_keystrokes(&["cmd-shift-f"]);
}
#[gpui::test]
@@ -1218,7 +1334,7 @@ mod tests {
.await
.with_search_mode(true)
.send_events(&["+ctrl", "+shift", "-shift", "a", "-ctrl"])
- .expect_keystrokes(&["ctrl-shift-a"]);
+ .expect_keystrokes(&["ctrl-a"]);
}
#[gpui::test]
@@ -1326,7 +1442,7 @@ mod tests {
.await
.with_search_mode(true)
.send_events(&["+ctrl+alt", "-ctrl", "j"])
- .expect_keystrokes(&["ctrl-alt-j"]);
+ .expect_keystrokes(&["alt-j"]);
}
#[gpui::test]
@@ -1348,11 +1464,11 @@ mod tests {
.send_events(&["+ctrl+alt", "-ctrl", "+shift"])
.expect_keystrokes(&["ctrl-shift-alt-"])
.send_keystroke("j")
- .expect_keystrokes(&["ctrl-shift-alt-j"])
+ .expect_keystrokes(&["shift-alt-j"])
.send_keystroke("i")
- .expect_keystrokes(&["ctrl-shift-alt-j", "shift-alt-i"])
+ .expect_keystrokes(&["shift-alt-j", "shift-alt-i"])
.send_events(&["-shift-alt", "+cmd"])
- .expect_keystrokes(&["ctrl-shift-alt-j", "shift-alt-i", "cmd-"]);
+ .expect_keystrokes(&["shift-alt-j", "shift-alt-i", "cmd-"]);
}
#[gpui::test]
@@ -1385,4 +1501,13 @@ mod tests {
.send_events(&["+ctrl", "-ctrl", "+alt", "-alt", "+shift", "-shift"])
.expect_empty();
}
+
+ #[gpui::test]
+ async fn test_not_search_shifted_keys(cx: &mut TestAppContext) {
+ init_test(cx)
+ .await
+ .with_search_mode(false)
+ .send_events(&["+ctrl", "+shift", "4", "-all"])
+ .expect_keystrokes(&["ctrl-$"]);
+ }
}
@@ -213,7 +213,7 @@ impl TableInteractionState {
let mut column_ix = 0;
let resizable_columns_slice = *resizable_columns;
- let mut resizable_columns = resizable_columns.into_iter();
+ let mut resizable_columns = resizable_columns.iter();
let dividers = intersperse_with(spacers, || {
window.with_id(column_ix, |window| {
@@ -801,7 +801,7 @@ impl<const COLS: usize> Table<COLS> {
) -> Self {
self.rows = TableContents::UniformList(UniformListData {
element_id: id.into(),
- row_count: row_count,
+ row_count,
render_item_fn: Box::new(render_item_fn),
});
self
@@ -32,7 +32,7 @@ use parking_lot::Mutex;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use serde_json::Value;
-use settings::WorktreeId;
+use settings::{SettingsUi, WorktreeId};
use smallvec::SmallVec;
use smol::future::yield_now;
use std::{
@@ -173,7 +173,9 @@ pub enum IndentKind {
}
/// The shape of a selection cursor.
-#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[derive(
+ Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi,
+)]
#[serde(rename_all = "snake_case")]
pub enum CursorShape {
/// A vertical bar
@@ -202,7 +204,7 @@ pub struct Diagnostic {
pub source: Option<String>,
/// A machine-readable code that identifies this diagnostic.
pub code: Option<NumberOrString>,
- pub code_description: Option<lsp::Url>,
+ pub code_description: Option<lsp::Uri>,
/// Whether this diagnostic is a hint, warning, or error.
pub severity: DiagnosticSeverity,
/// The human-readable message associated with this diagnostic.
@@ -282,6 +284,14 @@ pub enum Operation {
/// The language server ID.
server_id: LanguageServerId,
},
+
+ /// An update to the line ending type of this buffer.
+ UpdateLineEnding {
+ /// The line ending type.
+ line_ending: LineEnding,
+ /// The buffer's lamport timestamp.
+ lamport_timestamp: clock::Lamport,
+ },
}
/// An event that occurs in a buffer.
@@ -313,10 +323,6 @@ pub enum BufferEvent {
DiagnosticsUpdated,
/// The buffer gained or lost editing capabilities.
CapabilityChanged,
- /// The buffer was explicitly requested to close.
- Closed,
- /// The buffer was discarded when closing.
- Discarded,
}
/// The file associated with a buffer.
@@ -974,8 +980,6 @@ impl Buffer {
TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
let mut syntax = SyntaxMap::new(&text).snapshot();
if let Some(language) = language.clone() {
- let text = text.clone();
- let language = language.clone();
let language_registry = language_registry.clone();
syntax.reparse(&text, language_registry, language);
}
@@ -1020,9 +1024,6 @@ impl Buffer {
let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
let mut syntax = SyntaxMap::new(&text).snapshot();
if let Some(language) = language.clone() {
- let text = text.clone();
- let language = language.clone();
- let language_registry = language_registry.clone();
syntax.reparse(&text, language_registry, language);
}
BufferSnapshot {
@@ -1128,7 +1129,7 @@ impl Buffer {
} else {
ranges.as_slice()
}
- .into_iter()
+ .iter()
.peekable();
let mut edits = Vec::new();
@@ -1247,10 +1248,27 @@ impl Buffer {
self.syntax_map.lock().language_registry()
}
+ /// Assign the line ending type to the buffer.
+ pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
+ self.text.set_line_ending(line_ending);
+
+ let lamport_timestamp = self.text.lamport_clock.tick();
+ self.send_operation(
+ Operation::UpdateLineEnding {
+ line_ending,
+ lamport_timestamp,
+ },
+ true,
+ cx,
+ );
+ }
+
/// Assign the buffer a new [`Capability`].
pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
- self.capability = capability;
- cx.emit(BufferEvent::CapabilityChanged)
+ if self.capability != capability {
+ self.capability = capability;
+ cx.emit(BufferEvent::CapabilityChanged)
+ }
}
/// This method is called to signal that the buffer has been saved.
@@ -1270,12 +1288,6 @@ impl Buffer {
cx.notify();
}
- /// This method is called to signal that the buffer has been discarded.
- pub fn discarded(&self, cx: &mut Context<Self>) {
- cx.emit(BufferEvent::Discarded);
- cx.notify();
- }
-
/// Reloads the contents of the buffer from disk.
pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
let (tx, rx) = futures::channel::oneshot::channel();
@@ -1395,7 +1407,8 @@ impl Buffer {
is_first = false;
return true;
}
- let any_sub_ranges_contain_range = layer
+
+ layer
.included_sub_ranges
.map(|sub_ranges| {
sub_ranges.iter().any(|sub_range| {
@@ -1404,9 +1417,7 @@ impl Buffer {
!is_before_start && !is_after_end
})
})
- .unwrap_or(true);
- let result = any_sub_ranges_contain_range;
- result
+ .unwrap_or(true)
})
.last()
.map(|info| info.language.clone())
@@ -1575,11 +1586,21 @@ impl Buffer {
self.send_operation(op, true, cx);
}
- pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
- let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
- return None;
- };
- Some(&self.diagnostics[idx].1)
+ pub fn buffer_diagnostics(
+ &self,
+ for_server: Option<LanguageServerId>,
+ ) -> Vec<&DiagnosticEntry<Anchor>> {
+ match for_server {
+ Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
+ Ok(idx) => self.diagnostics[idx].1.iter().collect(),
+ Err(_) => Vec::new(),
+ },
+ None => self
+ .diagnostics
+ .iter()
+ .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
+ .collect(),
+ }
}
fn request_autoindent(&mut self, cx: &mut Context<Self>) {
@@ -2207,7 +2228,7 @@ impl Buffer {
self.remote_selections.insert(
AGENT_REPLICA_ID,
SelectionSet {
- selections: selections.clone(),
+ selections,
lamport_timestamp,
line_mode,
cursor_shape,
@@ -2559,7 +2580,7 @@ impl Buffer {
Operation::UpdateSelections { selections, .. } => selections
.iter()
.all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
- Operation::UpdateCompletionTriggers { .. } => true,
+ Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
}
}
@@ -2616,7 +2637,7 @@ impl Buffer {
self.completion_triggers = self
.completion_triggers_per_language_server
.values()
- .flat_map(|triggers| triggers.into_iter().cloned())
+ .flat_map(|triggers| triggers.iter().cloned())
.collect();
} else {
self.completion_triggers_per_language_server
@@ -2625,6 +2646,13 @@ impl Buffer {
}
self.text.lamport_clock.observe(lamport_timestamp);
}
+ Operation::UpdateLineEnding {
+ line_ending,
+ lamport_timestamp,
+ } => {
+ self.text.set_line_ending(line_ending);
+ self.text.lamport_clock.observe(lamport_timestamp);
+ }
}
}
@@ -2776,7 +2804,7 @@ impl Buffer {
self.completion_triggers = self
.completion_triggers_per_language_server
.values()
- .flat_map(|triggers| triggers.into_iter().cloned())
+ .flat_map(|triggers| triggers.iter().cloned())
.collect();
} else {
self.completion_triggers_per_language_server
@@ -2844,12 +2872,12 @@ impl Buffer {
let new_start = last_end.map_or(0, |last_end| last_end + 1);
let mut range = self.random_byte_range(new_start, rng);
- if rng.gen_bool(0.2) {
+ if rng.random_bool(0.2) {
mem::swap(&mut range.start, &mut range.end);
}
last_end = Some(range.end);
- let new_text_len = rng.gen_range(0..10);
+ let new_text_len = rng.random_range(0..10);
let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
new_text = new_text.to_uppercase();
@@ -3007,9 +3035,9 @@ impl BufferSnapshot {
}
let mut error_ranges = Vec::<Range<Point>>::new();
- let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
- grammar.error_query.as_ref()
- });
+ let mut matches = self
+ .syntax
+ .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
while let Some(mat) = matches.peek() {
let node = mat.captures[0].node;
let start = Point::from_ts_point(node.start_position());
@@ -4076,7 +4104,7 @@ impl BufferSnapshot {
// Get the ranges of the innermost pair of brackets.
let mut result: Option<(Range<usize>, Range<usize>)> = None;
- for pair in self.enclosing_bracket_ranges(range.clone()) {
+ for pair in self.enclosing_bracket_ranges(range) {
if let Some(range_filter) = range_filter
&& !range_filter(pair.open_range.clone(), pair.close_range.clone())
{
@@ -4249,7 +4277,7 @@ impl BufferSnapshot {
.map(|(range, name)| {
(
name.to_string(),
- self.text_for_range(range.clone()).collect::<String>(),
+ self.text_for_range(range).collect::<String>(),
)
})
.collect();
@@ -4816,6 +4844,9 @@ impl operation_queue::Operation for Operation {
}
| Operation::UpdateCompletionTriggers {
lamport_timestamp, ..
+ }
+ | Operation::UpdateLineEnding {
+ lamport_timestamp, ..
} => *lamport_timestamp,
}
}
@@ -67,6 +67,78 @@ fn test_line_endings(cx: &mut gpui::App) {
});
}
+#[gpui::test]
+fn test_set_line_ending(cx: &mut TestAppContext) {
+ let base = cx.new(|cx| Buffer::local("one\ntwo\nthree\n", cx));
+ let base_replica = cx.new(|cx| {
+ Buffer::from_proto(1, Capability::ReadWrite, base.read(cx).to_proto(cx), None).unwrap()
+ });
+ base.update(cx, |_buffer, cx| {
+ cx.subscribe(&base_replica, |this, _, event, cx| {
+ if let BufferEvent::Operation {
+ operation,
+ is_local: true,
+ } = event
+ {
+ this.apply_ops([operation.clone()], cx);
+ }
+ })
+ .detach();
+ });
+ base_replica.update(cx, |_buffer, cx| {
+ cx.subscribe(&base, |this, _, event, cx| {
+ if let BufferEvent::Operation {
+ operation,
+ is_local: true,
+ } = event
+ {
+ this.apply_ops([operation.clone()], cx);
+ }
+ })
+ .detach();
+ });
+
+ // Base
+ base_replica.read_with(cx, |buffer, _| {
+ assert_eq!(buffer.line_ending(), LineEnding::Unix);
+ });
+ base.update(cx, |buffer, cx| {
+ assert_eq!(buffer.line_ending(), LineEnding::Unix);
+ buffer.set_line_ending(LineEnding::Windows, cx);
+ assert_eq!(buffer.line_ending(), LineEnding::Windows);
+ });
+ base_replica.read_with(cx, |buffer, _| {
+ assert_eq!(buffer.line_ending(), LineEnding::Windows);
+ });
+ base.update(cx, |buffer, cx| {
+ buffer.set_line_ending(LineEnding::Unix, cx);
+ assert_eq!(buffer.line_ending(), LineEnding::Unix);
+ });
+ base_replica.read_with(cx, |buffer, _| {
+ assert_eq!(buffer.line_ending(), LineEnding::Unix);
+ });
+
+ // Replica
+ base.read_with(cx, |buffer, _| {
+ assert_eq!(buffer.line_ending(), LineEnding::Unix);
+ });
+ base_replica.update(cx, |buffer, cx| {
+ assert_eq!(buffer.line_ending(), LineEnding::Unix);
+ buffer.set_line_ending(LineEnding::Windows, cx);
+ assert_eq!(buffer.line_ending(), LineEnding::Windows);
+ });
+ base.read_with(cx, |buffer, _| {
+ assert_eq!(buffer.line_ending(), LineEnding::Windows);
+ });
+ base_replica.update(cx, |buffer, cx| {
+ buffer.set_line_ending(LineEnding::Unix, cx);
+ assert_eq!(buffer.line_ending(), LineEnding::Unix);
+ });
+ base.read_with(cx, |buffer, _| {
+ assert_eq!(buffer.line_ending(), LineEnding::Unix);
+ });
+}
+
#[gpui::test]
fn test_select_language(cx: &mut App) {
init_settings(cx, |_| {});
@@ -1744,7 +1816,7 @@ fn test_autoindent_block_mode(cx: &mut App) {
buffer.edit(
[(Point::new(2, 8)..Point::new(2, 8), inserted_text)],
Some(AutoindentMode::Block {
- original_indent_columns: original_indent_columns.clone(),
+ original_indent_columns,
}),
cx,
);
@@ -1790,9 +1862,9 @@ fn test_autoindent_block_mode_with_newline(cx: &mut App) {
"#
.unindent();
buffer.edit(
- [(Point::new(2, 0)..Point::new(2, 0), inserted_text.clone())],
+ [(Point::new(2, 0)..Point::new(2, 0), inserted_text)],
Some(AutoindentMode::Block {
- original_indent_columns: original_indent_columns.clone(),
+ original_indent_columns,
}),
cx,
);
@@ -1843,7 +1915,7 @@ fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut App) {
buffer.edit(
[(Point::new(2, 0)..Point::new(2, 0), inserted_text)],
Some(AutoindentMode::Block {
- original_indent_columns: original_indent_columns.clone(),
+ original_indent_columns,
}),
cx,
);
@@ -2030,7 +2102,7 @@ fn test_autoindent_with_injected_languages(cx: &mut App) {
let language_registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone()));
language_registry.add(html_language.clone());
- language_registry.add(javascript_language.clone());
+ language_registry.add(javascript_language);
cx.new(|cx| {
let (text, ranges) = marked_text_ranges(
@@ -3013,7 +3085,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) {
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
.unwrap_or(10);
- let base_text_len = rng.gen_range(0..10);
+ let base_text_len = rng.random_range(0..10);
let base_text = RandomCharIter::new(&mut rng)
.take(base_text_len)
.collect::<String>();
@@ -3022,7 +3094,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) {
let network = Arc::new(Mutex::new(Network::new(rng.clone())));
let base_buffer = cx.new(|cx| Buffer::local(base_text.as_str(), cx));
- for i in 0..rng.gen_range(min_peers..=max_peers) {
+ for i in 0..rng.random_range(min_peers..=max_peers) {
let buffer = cx.new(|cx| {
let state = base_buffer.read(cx).to_proto(cx);
let ops = cx
@@ -3035,7 +3107,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) {
.map(|op| proto::deserialize_operation(op).unwrap()),
cx,
);
- buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
+ buffer.set_group_interval(Duration::from_millis(rng.random_range(0..=200)));
let network = network.clone();
cx.subscribe(&cx.entity(), move |buffer, _, event, _| {
if let BufferEvent::Operation {
@@ -3066,11 +3138,11 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) {
let mut next_diagnostic_id = 0;
let mut active_selections = BTreeMap::default();
loop {
- let replica_index = rng.gen_range(0..replica_ids.len());
+ let replica_index = rng.random_range(0..replica_ids.len());
let replica_id = replica_ids[replica_index];
let buffer = &mut buffers[replica_index];
let mut new_buffer = None;
- match rng.gen_range(0..100) {
+ match rng.random_range(0..100) {
0..=29 if mutation_count != 0 => {
buffer.update(cx, |buffer, cx| {
buffer.start_transaction_at(now);
@@ -3082,13 +3154,13 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) {
}
30..=39 if mutation_count != 0 => {
buffer.update(cx, |buffer, cx| {
- if rng.gen_bool(0.2) {
+ if rng.random_bool(0.2) {
log::info!("peer {} clearing active selections", replica_id);
active_selections.remove(&replica_id);
buffer.remove_active_selections(cx);
} else {
let mut selections = Vec::new();
- for id in 0..rng.gen_range(1..=5) {
+ for id in 0..rng.random_range(1..=5) {
let range = buffer.random_byte_range(0, &mut rng);
selections.push(Selection {
id,
@@ -3111,7 +3183,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) {
mutation_count -= 1;
}
40..=49 if mutation_count != 0 && replica_id == 0 => {
- let entry_count = rng.gen_range(1..=5);
+ let entry_count = rng.random_range(1..=5);
buffer.update(cx, |buffer, cx| {
let diagnostics = DiagnosticSet::new(
(0..entry_count).map(|_| {
@@ -3166,7 +3238,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) {
new_buffer.replica_id(),
new_buffer.text()
);
- new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
+ new_buffer.set_group_interval(Duration::from_millis(rng.random_range(0..=200)));
let network = network.clone();
cx.subscribe(&cx.entity(), move |buffer, _, event, _| {
if let BufferEvent::Operation {
@@ -3238,7 +3310,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) {
_ => {}
}
- now += Duration::from_millis(rng.gen_range(0..=200));
+ now += Duration::from_millis(rng.random_range(0..=200));
buffers.extend(new_buffer);
for buffer in &buffers {
@@ -3320,23 +3392,23 @@ fn test_trailing_whitespace_ranges(mut rng: StdRng) {
// Generate a random multi-line string containing
// some lines with trailing whitespace.
let mut text = String::new();
- for _ in 0..rng.gen_range(0..16) {
- for _ in 0..rng.gen_range(0..36) {
- text.push(match rng.gen_range(0..10) {
+ for _ in 0..rng.random_range(0..16) {
+ for _ in 0..rng.random_range(0..36) {
+ text.push(match rng.random_range(0..10) {
0..=1 => ' ',
3 => '\t',
- _ => rng.gen_range('a'..='z'),
+ _ => rng.random_range('a'..='z'),
});
}
text.push('\n');
}
- match rng.gen_range(0..10) {
+ match rng.random_range(0..10) {
// sometimes remove the last newline
0..=1 => drop(text.pop()), //
// sometimes add extra newlines
- 2..=3 => text.push_str(&"\n".repeat(rng.gen_range(1..5))),
+ 2..=3 => text.push_str(&"\n".repeat(rng.random_range(1..5))),
_ => {}
}
@@ -69,6 +69,7 @@ pub use text_diff::{
use theme::SyntaxTheme;
pub use toolchain::{
LanguageToolchainStore, LocalLanguageToolchainStore, Toolchain, ToolchainList, ToolchainLister,
+ ToolchainMetadata, ToolchainScope,
};
use tree_sitter::{self, Query, QueryCursor, WasmStore, wasmtime};
use util::serde::default_true;
@@ -206,7 +207,7 @@ impl CachedLspAdapter {
}
pub fn name(&self) -> LanguageServerName {
- self.adapter.name().clone()
+ self.adapter.name()
}
pub async fn get_language_server_command(
@@ -331,7 +332,7 @@ pub trait LspAdapter: 'static + Send + Sync {
// for each worktree we might have open.
if binary_options.allow_path_lookup
&& let Some(binary) = self.check_if_user_installed(delegate.as_ref(), toolchains, cx).await {
- log::info!(
+ log::debug!(
"found user-installed language server for {}. path: {:?}, arguments: {:?}",
self.name().0,
binary.path,
@@ -395,6 +396,7 @@ pub trait LspAdapter: 'static + Send + Sync {
async fn fetch_latest_server_version(
&self,
delegate: &dyn LspAdapterDelegate,
+ cx: &AsyncApp,
) -> Result<Box<dyn 'static + Send + Any>>;
fn will_fetch_server(
@@ -588,6 +590,11 @@ pub trait LspAdapter: 'static + Send + Sync {
"Not implemented for this adapter. This method should only be called on the default JSON language server adapter"
);
}
+
+ /// True for the extension adapter and false otherwise.
+ fn is_extension(&self) -> bool {
+ false
+ }
}
async fn try_fetch_server_binary<L: LspAdapter + 'static + Send + Sync + ?Sized>(
@@ -601,18 +608,18 @@ async fn try_fetch_server_binary<L: LspAdapter + 'static + Send + Sync + ?Sized>
}
let name = adapter.name();
- log::info!("fetching latest version of language server {:?}", name.0);
+ log::debug!("fetching latest version of language server {:?}", name.0);
delegate.update_status(name.clone(), BinaryStatus::CheckingForUpdate);
let latest_version = adapter
- .fetch_latest_server_version(delegate.as_ref())
+ .fetch_latest_server_version(delegate.as_ref(), cx)
.await?;
if let Some(binary) = adapter
.check_if_version_installed(latest_version.as_ref(), &container_dir, delegate.as_ref())
.await
{
- log::info!("language server {:?} is already installed", name.0);
+ log::debug!("language server {:?} is already installed", name.0);
delegate.update_status(name.clone(), BinaryStatus::None);
Ok(binary)
} else {
@@ -720,6 +727,9 @@ pub struct LanguageConfig {
/// How to soft-wrap long lines of text.
#[serde(default)]
pub soft_wrap: Option<SoftWrap>,
+ /// When set, selections can be wrapped using prefix/suffix pairs on both sides.
+ #[serde(default)]
+ pub wrap_characters: Option<WrapCharactersConfig>,
/// The name of a Prettier parser that will be used for this language when no file path is available.
/// If there's a parser name in the language settings, that will be used instead.
#[serde(default)]
@@ -923,6 +933,7 @@ impl Default for LanguageConfig {
hard_tabs: None,
tab_size: None,
soft_wrap: None,
+ wrap_characters: None,
prettier_parser_name: None,
hidden: false,
jsx_tag_auto_close: None,
@@ -932,6 +943,18 @@ impl Default for LanguageConfig {
}
}
+#[derive(Clone, Debug, Deserialize, JsonSchema)]
+pub struct WrapCharactersConfig {
+ /// Opening token split into a prefix and suffix. The first caret goes
+ /// after the prefix (i.e., between prefix and suffix).
+ pub start_prefix: String,
+ pub start_suffix: String,
+ /// Closing token split into a prefix and suffix. The second caret goes
+ /// after the prefix (i.e., between prefix and suffix).
+ pub end_prefix: String,
+ pub end_suffix: String,
+}
+
fn auto_indent_using_last_non_empty_line_default() -> bool {
true
}
@@ -1234,6 +1257,7 @@ struct InjectionPatternConfig {
combined: bool,
}
+#[derive(Debug)]
struct BracketsConfig {
query: Query,
open_capture_ix: u32,
@@ -1513,9 +1537,8 @@ impl Language {
.map(|ix| {
let mut config = BracketsPatternConfig::default();
for setting in query.property_settings(ix) {
- match setting.key.as_ref() {
- "newline.only" => config.newline_only = true,
- _ => {}
+ if setting.key.as_ref() == "newline.only" {
+ config.newline_only = true
}
}
config
@@ -2206,6 +2229,7 @@ impl LspAdapter for FakeLspAdapter {
async fn fetch_latest_server_version(
&self,
_: &dyn LspAdapterDelegate,
+ _: &AsyncApp,
) -> Result<Box<dyn 'static + Send + Any>> {
unreachable!();
}
@@ -2251,6 +2275,10 @@ impl LspAdapter for FakeLspAdapter {
let label_for_completion = self.label_for_completion.as_ref()?;
label_for_completion(item, language)
}
+
+ fn is_extension(&self) -> bool {
+ false
+ }
}
fn get_capture_indices(query: &Query, captures: &mut [(&str, &mut Option<u32>)]) {
@@ -49,7 +49,7 @@ impl LanguageName {
pub fn from_proto(s: String) -> Self {
Self(SharedString::from(s))
}
- pub fn to_proto(self) -> String {
+ pub fn to_proto(&self) -> String {
self.0.to_string()
}
pub fn lsp_id(&self) -> String {
@@ -374,14 +374,23 @@ impl LanguageRegistry {
pub fn register_available_lsp_adapter(
&self,
name: LanguageServerName,
- load: impl Fn() -> Arc<dyn LspAdapter> + 'static + Send + Sync,
+ adapter: Arc<dyn LspAdapter>,
) {
- self.state.write().available_lsp_adapters.insert(
+ let mut state = self.state.write();
+
+ if adapter.is_extension()
+ && let Some(existing_adapter) = state.all_lsp_adapters.get(&name)
+ && !existing_adapter.adapter.is_extension()
+ {
+ log::warn!(
+ "not registering extension-provided language server {name:?}, since a builtin language server exists with that name",
+ );
+ return;
+ }
+
+ state.available_lsp_adapters.insert(
name,
- Arc::new(move || {
- let lsp_adapter = load();
- CachedLspAdapter::new(lsp_adapter)
- }),
+ Arc::new(move || CachedLspAdapter::new(adapter.clone())),
);
}
@@ -396,13 +405,21 @@ impl LanguageRegistry {
Some(load_lsp_adapter())
}
- pub fn register_lsp_adapter(
- &self,
- language_name: LanguageName,
- adapter: Arc<dyn LspAdapter>,
- ) -> Arc<CachedLspAdapter> {
- let cached = CachedLspAdapter::new(adapter);
+ pub fn register_lsp_adapter(&self, language_name: LanguageName, adapter: Arc<dyn LspAdapter>) {
let mut state = self.state.write();
+
+ if adapter.is_extension()
+ && let Some(existing_adapter) = state.all_lsp_adapters.get(&adapter.name())
+ && !existing_adapter.adapter.is_extension()
+ {
+ log::warn!(
+ "not registering extension-provided language server {:?} for language {language_name:?}, since a builtin language server exists with that name",
+ adapter.name(),
+ );
+ return;
+ }
+
+ let cached = CachedLspAdapter::new(adapter);
state
.lsp_adapters
.entry(language_name)
@@ -411,8 +428,6 @@ impl LanguageRegistry {
state
.all_lsp_adapters
.insert(cached.name.clone(), cached.clone());
-
- cached
}
/// Register a fake language server and adapter
@@ -432,7 +447,7 @@ impl LanguageRegistry {
let mut state = self.state.write();
state
.lsp_adapters
- .entry(language_name.clone())
+ .entry(language_name)
.or_default()
.push(adapter.clone());
state.all_lsp_adapters.insert(adapter.name(), adapter);
@@ -454,7 +469,7 @@ impl LanguageRegistry {
let cached_adapter = CachedLspAdapter::new(Arc::new(adapter));
state
.lsp_adapters
- .entry(language_name.clone())
+ .entry(language_name)
.or_default()
.push(cached_adapter.clone());
state
@@ -1167,8 +1182,7 @@ impl LanguageRegistryState {
soft_wrap: language.config.soft_wrap,
auto_indent_on_paste: language.config.auto_indent_on_paste,
..Default::default()
- }
- .clone(),
+ },
);
self.languages.push(language);
self.version += 1;
@@ -5,7 +5,7 @@ use anyhow::Result;
use collections::{FxHashMap, HashMap, HashSet};
use ec4rs::{
Properties as EditorconfigProperties,
- property::{FinalNewline, IndentSize, IndentStyle, TabWidth, TrimTrailingWs},
+ property::{FinalNewline, IndentSize, IndentStyle, MaxLineLen, TabWidth, TrimTrailingWs},
};
use globset::{Glob, GlobMatcher, GlobSet, GlobSetBuilder};
use gpui::{App, Modifiers};
@@ -17,7 +17,8 @@ use serde::{
};
use settings::{
- ParameterizedJsonSchema, Settings, SettingsLocation, SettingsSources, SettingsStore,
+ ParameterizedJsonSchema, Settings, SettingsKey, SettingsLocation, SettingsSources,
+ SettingsStore, SettingsUi,
};
use shellexpand;
use std::{borrow::Cow, num::NonZeroU32, path::Path, slice, sync::Arc};
@@ -199,7 +200,7 @@ impl LanguageSettings {
if language_server.0.as_ref() == Self::REST_OF_LANGUAGE_SERVERS {
rest.clone()
} else {
- vec![language_server.clone()]
+ vec![language_server]
}
})
.collect::<Vec<_>>()
@@ -207,7 +208,9 @@ impl LanguageSettings {
}
/// The provider that supplies edit predictions.
-#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize, JsonSchema)]
+#[derive(
+ Copy, Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize, JsonSchema, SettingsUi,
+)]
#[serde(rename_all = "snake_case")]
pub enum EditPredictionProvider {
None,
@@ -230,13 +233,14 @@ impl EditPredictionProvider {
/// The settings for edit predictions, such as [GitHub Copilot](https://github.com/features/copilot)
/// or [Supermaven](https://supermaven.com).
-#[derive(Clone, Debug, Default)]
+#[derive(Clone, Debug, Default, SettingsUi)]
pub struct EditPredictionSettings {
/// The provider that supplies edit predictions.
pub provider: EditPredictionProvider,
/// A list of globs representing files that edit predictions should be disabled for.
/// This list adds to a pre-existing, sensible default set of globs.
/// Any additional ones you add are combined with them.
+ #[settings_ui(skip)]
pub disabled_globs: Vec<DisabledGlob>,
/// Configures how edit predictions are displayed in the buffer.
pub mode: EditPredictionsMode,
@@ -268,7 +272,9 @@ pub struct DisabledGlob {
}
/// The mode in which edit predictions should be displayed.
-#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize, JsonSchema)]
+#[derive(
+ Copy, Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize, JsonSchema, SettingsUi,
+)]
#[serde(rename_all = "snake_case")]
pub enum EditPredictionsMode {
/// If provider supports it, display inline when holding modifier key (e.g., alt).
@@ -281,18 +287,24 @@ pub enum EditPredictionsMode {
Eager,
}
-#[derive(Clone, Debug, Default)]
+#[derive(Clone, Debug, Default, SettingsUi)]
pub struct CopilotSettings {
/// HTTP/HTTPS proxy to use for Copilot.
+ #[settings_ui(skip)]
pub proxy: Option<String>,
/// Disable certificate verification for proxy (not recommended).
pub proxy_no_verify: Option<bool>,
/// Enterprise URI for Copilot.
+ #[settings_ui(skip)]
pub enterprise_uri: Option<String>,
}
/// The settings for all languages.
-#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize, JsonSchema)]
+#[derive(
+ Debug, Clone, Default, PartialEq, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey,
+)]
+#[settings_key(None)]
+#[settings_ui(group = "Default Language Settings")]
pub struct AllLanguageSettingsContent {
/// The settings for enabling/disabling features.
#[serde(default)]
@@ -305,10 +317,12 @@ pub struct AllLanguageSettingsContent {
pub defaults: LanguageSettingsContent,
/// The settings for individual languages.
#[serde(default)]
+ #[settings_ui(skip)]
pub languages: LanguageToSettingsMap,
/// Settings for associating file extensions and filenames
/// with languages.
#[serde(default)]
+ #[settings_ui(skip)]
pub file_types: HashMap<Arc<str>, Vec<String>>,
}
@@ -341,7 +355,7 @@ inventory::submit! {
}
/// Controls how completions are processed for this language.
-#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi)]
#[serde(rename_all = "snake_case")]
pub struct CompletionSettings {
/// Controls how words are completed.
@@ -350,6 +364,12 @@ pub struct CompletionSettings {
/// Default: `fallback`
#[serde(default = "default_words_completion_mode")]
pub words: WordsCompletionMode,
+ /// How many characters has to be in the completions query to automatically show the words-based completions.
+ /// Before that value, it's still possible to trigger the words-based completion manually with the corresponding editor command.
+ ///
+ /// Default: 3
+ #[serde(default = "default_3")]
+ pub words_min_length: usize,
/// Whether to fetch LSP completions or not.
///
/// Default: true
@@ -359,7 +379,7 @@ pub struct CompletionSettings {
/// When set to 0, waits indefinitely.
///
/// Default: 0
- #[serde(default = "default_lsp_fetch_timeout_ms")]
+ #[serde(default)]
pub lsp_fetch_timeout_ms: u64,
/// Controls how LSP completions are inserted.
///
@@ -405,12 +425,12 @@ fn default_lsp_insert_mode() -> LspInsertMode {
LspInsertMode::ReplaceSuffix
}
-fn default_lsp_fetch_timeout_ms() -> u64 {
- 0
+fn default_3() -> usize {
+ 3
}
/// The settings for a particular language.
-#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize, JsonSchema)]
+#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize, JsonSchema, SettingsUi)]
pub struct LanguageSettingsContent {
/// How many columns a tab should occupy.
///
@@ -607,12 +627,13 @@ pub enum RewrapBehavior {
}
/// The contents of the edit prediction settings.
-#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq)]
+#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq, SettingsUi)]
pub struct EditPredictionSettingsContent {
/// A list of globs representing files that edit predictions should be disabled for.
/// This list adds to a pre-existing, sensible default set of globs.
/// Any additional ones you add are combined with them.
#[serde(default)]
+ #[settings_ui(skip)]
pub disabled_globs: Option<Vec<String>>,
/// The mode used to display edit predictions in the buffer.
/// Provider support required.
@@ -627,12 +648,13 @@ pub struct EditPredictionSettingsContent {
pub enabled_in_text_threads: bool,
}
-#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq)]
+#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq, SettingsUi)]
pub struct CopilotSettingsContent {
/// HTTP/HTTPS proxy to use for Copilot.
///
/// Default: none
#[serde(default)]
+ #[settings_ui(skip)]
pub proxy: Option<String>,
/// Disable certificate verification for the proxy (not recommended).
///
@@ -643,19 +665,21 @@ pub struct CopilotSettingsContent {
///
/// Default: none
#[serde(default)]
+ #[settings_ui(skip)]
pub enterprise_uri: Option<String>,
}
/// The settings for enabling/disabling features.
-#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize, JsonSchema)]
+#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize, JsonSchema, SettingsUi)]
#[serde(rename_all = "snake_case")]
+#[settings_ui(group = "Features")]
pub struct FeaturesContent {
/// Determines which edit prediction provider to use.
pub edit_prediction_provider: Option<EditPredictionProvider>,
}
/// Controls the soft-wrapping behavior in the editor.
-#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi)]
#[serde(rename_all = "snake_case")]
pub enum SoftWrap {
/// Prefer a single line generally, unless an overly long line is encountered.
@@ -924,7 +948,9 @@ pub enum Formatter {
}
/// The settings for indent guides.
-#[derive(Default, Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
+#[derive(
+ Default, Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema, SettingsUi,
+)]
pub struct IndentGuideSettings {
/// Whether to display indent guides in the editor.
///
@@ -1131,6 +1157,10 @@ impl AllLanguageSettings {
}
fn merge_with_editorconfig(settings: &mut LanguageSettings, cfg: &EditorconfigProperties) {
+ let preferred_line_length = cfg.get::<MaxLineLen>().ok().and_then(|v| match v {
+ MaxLineLen::Value(u) => Some(u as u32),
+ MaxLineLen::Off => None,
+ });
let tab_size = cfg.get::<IndentSize>().ok().and_then(|v| match v {
IndentSize::Value(u) => NonZeroU32::new(u as u32),
IndentSize::UseTabWidth => cfg.get::<TabWidth>().ok().and_then(|w| match w {
@@ -1158,6 +1188,7 @@ fn merge_with_editorconfig(settings: &mut LanguageSettings, cfg: &EditorconfigPr
*target = value;
}
}
+ merge(&mut settings.preferred_line_length, preferred_line_length);
merge(&mut settings.tab_size, tab_size);
merge(&mut settings.hard_tabs, hard_tabs);
merge(
@@ -1202,8 +1233,6 @@ impl InlayHintKind {
}
impl settings::Settings for AllLanguageSettings {
- const KEY: Option<&'static str> = None;
-
type FileContent = AllLanguageSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
@@ -1463,6 +1492,7 @@ impl settings::Settings for AllLanguageSettings {
} else {
d.completions = Some(CompletionSettings {
words: mode,
+ words_min_length: 3,
lsp: true,
lsp_fetch_timeout_ms: 0,
lsp_insert_mode: LspInsertMode::ReplaceSuffix,
@@ -1793,7 +1823,7 @@ mod tests {
assert!(!settings.enabled_for_file(&dot_env_file, &cx));
// Test tilde expansion
- let home = shellexpand::tilde("~").into_owned().to_string();
+ let home = shellexpand::tilde("~").into_owned();
let home_file = make_test_file(&[&home, "test.rs"]);
let settings = build_settings(&["~/test.rs"]);
assert!(!settings.enabled_for_file(&home_file, &cx));
@@ -90,6 +90,15 @@ pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation {
language_server_id: server_id.to_proto(),
},
),
+
+ crate::Operation::UpdateLineEnding {
+ line_ending,
+ lamport_timestamp,
+ } => proto::operation::Variant::UpdateLineEnding(proto::operation::UpdateLineEnding {
+ replica_id: lamport_timestamp.replica_id as u32,
+ lamport_timestamp: lamport_timestamp.value,
+ line_ending: serialize_line_ending(*line_ending) as i32,
+ }),
}),
}
}
@@ -341,6 +350,18 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<crate::Operati
server_id: LanguageServerId::from_proto(message.language_server_id),
}
}
+ proto::operation::Variant::UpdateLineEnding(message) => {
+ crate::Operation::UpdateLineEnding {
+ lamport_timestamp: clock::Lamport {
+ replica_id: message.replica_id as ReplicaId,
+ value: message.lamport_timestamp,
+ },
+ line_ending: deserialize_line_ending(
+ proto::LineEnding::from_i32(message.line_ending)
+ .context("missing line_ending")?,
+ ),
+ }
+ }
},
)
}
@@ -431,7 +452,7 @@ pub fn deserialize_diagnostics(
code: diagnostic.code.map(lsp::NumberOrString::from_string),
code_description: diagnostic
.code_description
- .and_then(|s| lsp::Url::parse(&s).ok()),
+ .and_then(|s| lsp::Uri::from_str(&s).ok()),
is_primary: diagnostic.is_primary,
is_disk_based: diagnostic.is_disk_based,
is_unnecessary: diagnostic.is_unnecessary,
@@ -496,6 +517,10 @@ pub fn lamport_timestamp_for_operation(operation: &proto::Operation) -> Option<c
replica_id = op.replica_id;
value = op.lamport_timestamp;
}
+ proto::operation::Variant::UpdateLineEnding(op) => {
+ replica_id = op.replica_id;
+ value = op.lamport_timestamp;
+ }
}
Some(clock::Lamport {
@@ -832,7 +832,7 @@ impl SyntaxSnapshot {
query: fn(&Grammar) -> Option<&Query>,
) -> SyntaxMapCaptures<'a> {
SyntaxMapCaptures::new(
- range.clone(),
+ range,
text,
[SyntaxLayer {
language,
@@ -58,8 +58,7 @@ fn test_splice_included_ranges() {
assert_eq!(change, 0..1);
// does not create overlapping ranges
- let (new_ranges, change) =
- splice_included_ranges(ranges.clone(), &[0..18], &[ts_range(20..32)]);
+ let (new_ranges, change) = splice_included_ranges(ranges, &[0..18], &[ts_range(20..32)]);
assert_eq!(
new_ranges,
&[ts_range(20..32), ts_range(50..60), ts_range(80..90)]
@@ -104,7 +103,7 @@ fn test_syntax_map_layers_for_range(cx: &mut App) {
);
let mut syntax_map = SyntaxMap::new(&buffer);
- syntax_map.set_language_registry(registry.clone());
+ syntax_map.set_language_registry(registry);
syntax_map.reparse(language.clone(), &buffer);
assert_layers_for_range(
@@ -165,7 +164,7 @@ fn test_syntax_map_layers_for_range(cx: &mut App) {
// Put the vec! macro back, adding back the syntactic layer.
buffer.undo();
syntax_map.interpolate(&buffer);
- syntax_map.reparse(language.clone(), &buffer);
+ syntax_map.reparse(language, &buffer);
assert_layers_for_range(
&syntax_map,
@@ -252,8 +251,8 @@ fn test_dynamic_language_injection(cx: &mut App) {
assert!(syntax_map.contains_unknown_injections());
registry.add(Arc::new(html_lang()));
- syntax_map.reparse(markdown.clone(), &buffer);
- syntax_map.reparse(markdown_inline.clone(), &buffer);
+ syntax_map.reparse(markdown, &buffer);
+ syntax_map.reparse(markdown_inline, &buffer);
assert_layers_for_range(
&syntax_map,
&buffer,
@@ -862,7 +861,7 @@ fn test_syntax_map_languages_loading_with_erb(cx: &mut App) {
log::info!("editing");
buffer.edit_via_marked_text(&text);
syntax_map.interpolate(&buffer);
- syntax_map.reparse(language.clone(), &buffer);
+ syntax_map.reparse(language, &buffer);
assert_capture_ranges(
&syntax_map,
@@ -986,7 +985,7 @@ fn test_random_edits(
syntax_map.reparse(language.clone(), &buffer);
let mut reference_syntax_map = SyntaxMap::new(&buffer);
- reference_syntax_map.set_language_registry(registry.clone());
+ reference_syntax_map.set_language_registry(registry);
log::info!("initial text:\n{}", buffer.text());
@@ -88,11 +88,11 @@ pub fn text_diff_with_options(
let new_offset = new_byte_range.start;
hunk_input.clear();
hunk_input.update_before(tokenize(
- &old_text[old_byte_range.clone()],
+ &old_text[old_byte_range],
options.language_scope.clone(),
));
hunk_input.update_after(tokenize(
- &new_text[new_byte_range.clone()],
+ &new_text[new_byte_range],
options.language_scope.clone(),
));
diff_internal(&hunk_input, |old_byte_range, new_byte_range, _, _| {
@@ -103,7 +103,7 @@ pub fn text_diff_with_options(
let replacement_text = if new_byte_range.is_empty() {
empty.clone()
} else {
- new_text[new_byte_range.clone()].into()
+ new_text[new_byte_range].into()
};
edits.push((old_byte_range, replacement_text));
});
@@ -111,9 +111,9 @@ pub fn text_diff_with_options(
let replacement_text = if new_byte_range.is_empty() {
empty.clone()
} else {
- new_text[new_byte_range.clone()].into()
+ new_text[new_byte_range].into()
};
- edits.push((old_byte_range.clone(), replacement_text));
+ edits.push((old_byte_range, replacement_text));
}
},
);
@@ -186,7 +186,7 @@ fn tokenize(text: &str, language_scope: Option<LanguageScope>) -> impl Iterator<
let mut prev = None;
let mut start_ix = 0;
iter::from_fn(move || {
- while let Some((ix, c)) = chars.next() {
+ for (ix, c) in chars.by_ref() {
let mut token = None;
let kind = classifier.kind(c);
if let Some((prev_char, prev_kind)) = prev
@@ -11,13 +11,15 @@ use std::{
use async_trait::async_trait;
use collections::HashMap;
+use fs::Fs;
use gpui::{AsyncApp, SharedString};
use settings::WorktreeId;
+use task::ShellKind;
use crate::{LanguageName, ManifestName};
/// Represents a single toolchain.
-#[derive(Clone, Debug, Eq)]
+#[derive(Clone, Eq, Debug)]
pub struct Toolchain {
/// User-facing label
pub name: SharedString,
@@ -27,38 +29,104 @@ pub struct Toolchain {
pub as_json: serde_json::Value,
}
+/// Declares a scope of a toolchain added by user.
+///
+/// When the user adds a toolchain, we give them an option to see that toolchain in:
+/// - All of their projects
+/// - A project they're currently in.
+/// - Only in the subproject they're currently in.
+#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
+pub enum ToolchainScope {
+ Subproject(WorktreeId, Arc<Path>),
+ Project,
+ /// Available in all projects on this box. It wouldn't make sense to show suggestions across machines.
+ Global,
+}
+
+impl ToolchainScope {
+ pub fn label(&self) -> &'static str {
+ match self {
+ ToolchainScope::Subproject(_, _) => "Subproject",
+ ToolchainScope::Project => "Project",
+ ToolchainScope::Global => "Global",
+ }
+ }
+
+ pub fn description(&self) -> &'static str {
+ match self {
+ ToolchainScope::Subproject(_, _) => {
+ "Available only in the subproject you're currently in."
+ }
+ ToolchainScope::Project => "Available in all locations in your current project.",
+ ToolchainScope::Global => "Available in all of your projects on this machine.",
+ }
+ }
+}
+
impl std::hash::Hash for Toolchain {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
- self.name.hash(state);
- self.path.hash(state);
- self.language_name.hash(state);
+ let Self {
+ name,
+ path,
+ language_name,
+ as_json: _,
+ } = self;
+ name.hash(state);
+ path.hash(state);
+ language_name.hash(state);
}
}
impl PartialEq for Toolchain {
fn eq(&self, other: &Self) -> bool {
+ let Self {
+ name,
+ path,
+ language_name,
+ as_json: _,
+ } = self;
// Do not use as_json for comparisons; it shouldn't impact equality, as it's not user-surfaced.
// Thus, there could be multiple entries that look the same in the UI.
- (&self.name, &self.path, &self.language_name).eq(&(
- &other.name,
- &other.path,
- &other.language_name,
- ))
+ (name, path, language_name).eq(&(&other.name, &other.path, &other.language_name))
}
}
#[async_trait]
-pub trait ToolchainLister: Send + Sync {
+pub trait ToolchainLister: Send + Sync + 'static {
+ /// List all available toolchains for a given path.
async fn list(
&self,
worktree_root: PathBuf,
- subroot_relative_path: Option<Arc<Path>>,
+ subroot_relative_path: Arc<Path>,
project_env: Option<HashMap<String, String>>,
) -> ToolchainList;
- // Returns a term which we should use in UI to refer to a toolchain.
- fn term(&self) -> SharedString;
- /// Returns the name of the manifest file for this toolchain.
- fn manifest_name(&self) -> ManifestName;
+
+ /// Given a user-created toolchain, resolve lister-specific details.
+ /// Put another way: fill in the details of the toolchain so the user does not have to.
+ async fn resolve(
+ &self,
+ path: PathBuf,
+ project_env: Option<HashMap<String, String>>,
+ ) -> anyhow::Result<Toolchain>;
+
+ async fn activation_script(
+ &self,
+ toolchain: &Toolchain,
+ shell: ShellKind,
+ fs: &dyn Fs,
+ ) -> Vec<String>;
+ /// Returns various "static" bits of information about this toolchain lister. This function should be pure.
+ fn meta(&self) -> ToolchainMetadata;
+}
+
+#[derive(Clone, PartialEq, Eq, Hash)]
+pub struct ToolchainMetadata {
+ /// Returns a term which we should use in UI to refer to toolchains produced by a given `[ToolchainLister]`.
+ pub term: SharedString,
+ /// A user-facing placeholder describing the semantic meaning of a path to a new toolchain.
+ pub new_toolchain_placeholder: SharedString,
+ /// The name of the manifest file for this toolchain.
+ pub manifest_name: ManifestName,
}
#[async_trait(?Send)]
@@ -82,7 +150,7 @@ pub trait LocalLanguageToolchainStore: Send + Sync + 'static {
) -> Option<Toolchain>;
}
-#[async_trait(?Send )]
+#[async_trait(?Send)]
impl<T: LocalLanguageToolchainStore> LanguageToolchainStore for T {
async fn active_toolchain(
self: Arc<Self>,
@@ -96,7 +164,7 @@ impl<T: LocalLanguageToolchainStore> LanguageToolchainStore for T {
}
type DefaultIndex = usize;
-#[derive(Default, Clone)]
+#[derive(Default, Clone, Debug)]
pub struct ToolchainList {
pub toolchains: Vec<Toolchain>,
pub default: Option<DefaultIndex>,
@@ -204,6 +204,7 @@ impl LspAdapter for ExtensionLspAdapter {
async fn fetch_latest_server_version(
&self,
_: &dyn LspAdapterDelegate,
+ _: &AsyncApp,
) -> Result<Box<dyn 'static + Send + Any>> {
unreachable!("get_language_server_command is overridden")
}
@@ -397,6 +398,10 @@ impl LspAdapter for ExtensionLspAdapter {
Ok(labels_from_extension(labels, language))
}
+
+ fn is_extension(&self) -> bool {
+ true
+ }
}
fn labels_from_extension(
@@ -17,6 +17,7 @@ test-support = []
[dependencies]
anthropic = { workspace = true, features = ["schemars"] }
+open_router.workspace = true
anyhow.workspace = true
base64.workspace = true
client.workspace = true
@@ -4,12 +4,16 @@ use crate::{
LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState,
LanguageModelRequest, LanguageModelToolChoice,
};
+use anyhow::anyhow;
use futures::{FutureExt, channel::mpsc, future::BoxFuture, stream::BoxStream};
use gpui::{AnyView, App, AsyncApp, Entity, Task, Window};
use http_client::Result;
use parking_lot::Mutex;
use smol::stream::StreamExt;
-use std::sync::Arc;
+use std::sync::{
+ Arc,
+ atomic::{AtomicBool, Ordering::SeqCst},
+};
#[derive(Clone)]
pub struct FakeLanguageModelProvider {
@@ -106,6 +110,7 @@ pub struct FakeLanguageModel {
>,
)>,
>,
+ forbid_requests: AtomicBool,
}
impl Default for FakeLanguageModel {
@@ -114,11 +119,20 @@ impl Default for FakeLanguageModel {
provider_id: LanguageModelProviderId::from("fake".to_string()),
provider_name: LanguageModelProviderName::from("Fake".to_string()),
current_completion_txs: Mutex::new(Vec::new()),
+ forbid_requests: AtomicBool::new(false),
}
}
}
impl FakeLanguageModel {
+ pub fn allow_requests(&self) {
+ self.forbid_requests.store(false, SeqCst);
+ }
+
+ pub fn forbid_requests(&self) {
+ self.forbid_requests.store(true, SeqCst);
+ }
+
pub fn pending_completions(&self) -> Vec<LanguageModelRequest> {
self.current_completion_txs
.lock()
@@ -251,9 +265,18 @@ impl LanguageModel for FakeLanguageModel {
LanguageModelCompletionError,
>,
> {
- let (tx, rx) = mpsc::unbounded();
- self.current_completion_txs.lock().push((request, tx));
- async move { Ok(rx.boxed()) }.boxed()
+ if self.forbid_requests.load(SeqCst) {
+ async move {
+ Err(LanguageModelCompletionError::Other(anyhow!(
+ "requests are forbidden"
+ )))
+ }
+ .boxed()
+ } else {
+ let (tx, rx) = mpsc::unbounded();
+ self.current_completion_txs.lock().push((request, tx));
+ async move { Ok(rx.boxed()) }.boxed()
+ }
}
fn as_fake(&self) -> &Self {
@@ -14,9 +14,10 @@ use client::Client;
use cloud_llm_client::{CompletionMode, CompletionRequestStatus};
use futures::FutureExt;
use futures::{StreamExt, future::BoxFuture, stream::BoxStream};
-use gpui::{AnyElement, AnyView, App, AsyncApp, SharedString, Task, Window};
+use gpui::{AnyView, App, AsyncApp, SharedString, Task, Window};
use http_client::{StatusCode, http};
use icons::IconName;
+use open_router::OpenRouterError;
use parking_lot::Mutex;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize, de::DeserializeOwned};
@@ -54,7 +55,7 @@ pub const ZED_CLOUD_PROVIDER_NAME: LanguageModelProviderName =
pub fn init(client: Arc<Client>, cx: &mut App) {
init_settings(cx);
- RefreshLlmTokenListener::register(client.clone(), cx);
+ RefreshLlmTokenListener::register(client, cx);
}
pub fn init_settings(cx: &mut App) {
@@ -300,7 +301,7 @@ impl From<AnthropicError> for LanguageModelCompletionError {
},
AnthropicError::ServerOverloaded { retry_after } => Self::ServerOverloaded {
provider,
- retry_after: retry_after,
+ retry_after,
},
AnthropicError::ApiError(api_error) => api_error.into(),
}
@@ -347,6 +348,72 @@ impl From<anthropic::ApiError> for LanguageModelCompletionError {
}
}
+impl From<OpenRouterError> for LanguageModelCompletionError {
+ fn from(error: OpenRouterError) -> Self {
+ let provider = LanguageModelProviderName::new("OpenRouter");
+ match error {
+ OpenRouterError::SerializeRequest(error) => Self::SerializeRequest { provider, error },
+ OpenRouterError::BuildRequestBody(error) => Self::BuildRequestBody { provider, error },
+ OpenRouterError::HttpSend(error) => Self::HttpSend { provider, error },
+ OpenRouterError::DeserializeResponse(error) => {
+ Self::DeserializeResponse { provider, error }
+ }
+ OpenRouterError::ReadResponse(error) => Self::ApiReadResponseError { provider, error },
+ OpenRouterError::RateLimit { retry_after } => Self::RateLimitExceeded {
+ provider,
+ retry_after: Some(retry_after),
+ },
+ OpenRouterError::ServerOverloaded { retry_after } => Self::ServerOverloaded {
+ provider,
+ retry_after,
+ },
+ OpenRouterError::ApiError(api_error) => api_error.into(),
+ }
+ }
+}
+
+impl From<open_router::ApiError> for LanguageModelCompletionError {
+ fn from(error: open_router::ApiError) -> Self {
+ use open_router::ApiErrorCode::*;
+ let provider = LanguageModelProviderName::new("OpenRouter");
+ match error.code {
+ InvalidRequestError => Self::BadRequestFormat {
+ provider,
+ message: error.message,
+ },
+ AuthenticationError => Self::AuthenticationError {
+ provider,
+ message: error.message,
+ },
+ PaymentRequiredError => Self::AuthenticationError {
+ provider,
+ message: format!("Payment required: {}", error.message),
+ },
+ PermissionError => Self::PermissionError {
+ provider,
+ message: error.message,
+ },
+ RequestTimedOut => Self::HttpResponseError {
+ provider,
+ status_code: StatusCode::REQUEST_TIMEOUT,
+ message: error.message,
+ },
+ RateLimitError => Self::RateLimitExceeded {
+ provider,
+ retry_after: None,
+ },
+ ApiError => Self::ApiInternalServerError {
+ provider,
+ message: error.message,
+ },
+ OverloadedError => Self::ServerOverloaded {
+ provider,
+ retry_after: None,
+ },
+ }
+ }
+}
+
/// Indicates the format used to define the input schema for a language model tool.
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum LanguageModelToolSchemaFormat {
@@ -538,7 +605,7 @@ pub trait LanguageModel: Send + Sync {
if let Some(first_event) = events.next().await {
match first_event {
Ok(LanguageModelCompletionEvent::StartMessage { message_id: id }) => {
- message_id = Some(id.clone());
+ message_id = Some(id);
}
Ok(LanguageModelCompletionEvent::Text(text)) => {
first_item_text = Some(text);
@@ -640,24 +707,14 @@ pub trait LanguageModelProvider: 'static {
window: &mut Window,
cx: &mut App,
) -> AnyView;
- fn must_accept_terms(&self, _cx: &App) -> bool {
- false
- }
- fn render_accept_terms(
- &self,
- _view: LanguageModelProviderTosView,
- _cx: &mut App,
- ) -> Option<AnyElement> {
- None
- }
fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>>;
}
-#[derive(Default, Clone, Copy)]
+#[derive(Default, Clone)]
pub enum ConfigurationViewTargetAgent {
#[default]
ZedAgent,
- Other(&'static str),
+ Other(SharedString),
}
#[derive(PartialEq, Eq)]
@@ -82,7 +82,7 @@ impl LlmApiToken {
let response = client.cloud_client().create_llm_token(system_id).await?;
*lock = Some(response.token.0.clone());
- Ok(response.token.0.clone())
+ Ok(response.token.0)
}
}
@@ -25,9 +25,6 @@ pub enum ConfigurationError {
ModelNotFound,
#[error("{} LLM provider is not configured.", .0.name().0)]
ProviderNotAuthenticated(Arc<dyn LanguageModelProvider>),
- #[error("Using the {} LLM provider requires accepting the Terms of Service.",
- .0.name().0)]
- ProviderPendingTermsAcceptance(Arc<dyn LanguageModelProvider>),
}
impl std::fmt::Debug for ConfigurationError {
@@ -38,9 +35,6 @@ impl std::fmt::Debug for ConfigurationError {
Self::ProviderNotAuthenticated(provider) => {
write!(f, "ProviderNotAuthenticated({})", provider.id())
}
- Self::ProviderPendingTermsAcceptance(provider) => {
- write!(f, "ProviderPendingTermsAcceptance({})", provider.id())
- }
}
}
}
@@ -200,12 +194,6 @@ impl LanguageModelRegistry {
return Some(ConfigurationError::ProviderNotAuthenticated(model.provider));
}
- if model.provider.must_accept_terms(cx) {
- return Some(ConfigurationError::ProviderPendingTermsAcceptance(
- model.provider,
- ));
- }
-
None
}
@@ -220,6 +208,7 @@ impl LanguageModelRegistry {
) -> impl Iterator<Item = Arc<dyn LanguageModel>> + 'a {
self.providers
.values()
+ .filter(|provider| provider.is_authenticated(cx))
.flat_map(|provider| provider.provided_models(cx))
}
@@ -19,7 +19,7 @@ impl Role {
}
}
- pub fn to_proto(&self) -> proto::LanguageModelRole {
+ pub fn to_proto(self) -> proto::LanguageModelRole {
match self {
Role::User => proto::LanguageModelRole::LanguageModelUser,
Role::Assistant => proto::LanguageModelRole::LanguageModelAssistant,
@@ -104,7 +104,7 @@ fn register_language_model_providers(
cx: &mut Context<LanguageModelRegistry>,
) {
registry.register_provider(
- CloudLanguageModelProvider::new(user_store.clone(), client.clone(), cx),
+ CloudLanguageModelProvider::new(user_store, client.clone(), cx),
cx,
);
@@ -197,7 +197,7 @@ impl AnthropicLanguageModelProvider {
})
}
- pub fn api_key(cx: &mut App) -> Task<Result<ApiKey>> {
+ pub fn api_key(cx: &mut App) -> Task<Result<ApiKey, AuthenticateError>> {
let credentials_provider = <dyn CredentialsProvider>::global(cx);
let api_url = AllLanguageModelSettings::get_global(cx)
.anthropic
@@ -424,14 +424,21 @@ impl AnthropicModel {
return futures::future::ready(Err(anyhow!("App state dropped").into())).boxed();
};
+ let beta_headers = self.model.beta_headers();
+
async move {
let Some(api_key) = api_key else {
return Err(LanguageModelCompletionError::NoApiKey {
provider: PROVIDER_NAME,
});
};
- let request =
- anthropic::stream_completion(http_client.as_ref(), &api_url, &api_key, request);
+ let request = anthropic::stream_completion(
+ http_client.as_ref(),
+ &api_url,
+ &api_key,
+ request,
+ beta_headers,
+ );
request.await.map_err(Into::into)
}
.boxed()
@@ -1041,9 +1048,9 @@ impl Render for ConfigurationView {
v_flex()
.size_full()
.on_action(cx.listener(Self::save_api_key))
- .child(Label::new(format!("To use {}, you need to add an API key. Follow these steps:", match self.target_agent {
- ConfigurationViewTargetAgent::ZedAgent => "Zed's agent with Anthropic",
- ConfigurationViewTargetAgent::Other(agent) => agent,
+ .child(Label::new(format!("To use {}, you need to add an API key. Follow these steps:", match &self.target_agent {
+ ConfigurationViewTargetAgent::ZedAgent => "Zed's agent with Anthropic".into(),
+ ConfigurationViewTargetAgent::Other(agent) => agent.clone(),
})))
.child(
List::new()
@@ -917,7 +917,7 @@ pub fn map_to_language_model_completion_events(
Some(ContentBlockDelta::ReasoningContent(thinking)) => match thinking {
ReasoningContentBlockDelta::Text(thoughts) => {
Some(Ok(LanguageModelCompletionEvent::Thinking {
- text: thoughts.clone(),
+ text: thoughts,
signature: None,
}))
}
@@ -968,7 +968,7 @@ pub fn map_to_language_model_completion_events(
id: tool_use.id.into(),
name: tool_use.name.into(),
is_input_complete: true,
- raw_input: tool_use.input_json.clone(),
+ raw_input: tool_use.input_json,
input,
},
))
@@ -1086,21 +1086,18 @@ impl ConfigurationView {
.access_key_id_editor
.read(cx)
.text(cx)
- .to_string()
.trim()
.to_string();
let secret_access_key = self
.secret_access_key_editor
.read(cx)
.text(cx)
- .to_string()
.trim()
.to_string();
let session_token = self
.session_token_editor
.read(cx)
.text(cx)
- .to_string()
.trim()
.to_string();
let session_token = if session_token.is_empty() {
@@ -1108,13 +1105,7 @@ impl ConfigurationView {
} else {
Some(session_token)
};
- let region = self
- .region_editor
- .read(cx)
- .text(cx)
- .to_string()
- .trim()
- .to_string();
+ let region = self.region_editor.read(cx).text(cx).trim().to_string();
let region = if region.is_empty() {
"us-east-1".to_string()
} else {
@@ -23,9 +23,9 @@ use language_model::{
AuthenticateError, LanguageModel, LanguageModelCacheConfiguration,
LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId, LanguageModelName,
LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
- LanguageModelProviderState, LanguageModelProviderTosView, LanguageModelRequest,
- LanguageModelToolChoice, LanguageModelToolSchemaFormat, LlmApiToken,
- ModelRequestLimitReachedError, PaymentRequiredError, RateLimiter, RefreshLlmTokenListener,
+ LanguageModelProviderState, LanguageModelRequest, LanguageModelToolChoice,
+ LanguageModelToolSchemaFormat, LlmApiToken, ModelRequestLimitReachedError,
+ PaymentRequiredError, RateLimiter, RefreshLlmTokenListener,
};
use release_channel::AppVersion;
use schemars::JsonSchema;
@@ -118,7 +118,6 @@ pub struct State {
llm_api_token: LlmApiToken,
user_store: Entity<UserStore>,
status: client::Status,
- accept_terms_of_service_task: Option<Task<Result<()>>>,
models: Vec<Arc<cloud_llm_client::LanguageModel>>,
default_model: Option<Arc<cloud_llm_client::LanguageModel>>,
default_fast_model: Option<Arc<cloud_llm_client::LanguageModel>>,
@@ -140,9 +139,8 @@ impl State {
Self {
client: client.clone(),
llm_api_token: LlmApiToken::default(),
- user_store: user_store.clone(),
+ user_store,
status,
- accept_terms_of_service_task: None,
models: Vec::new(),
default_model: None,
default_fast_model: None,
@@ -197,24 +195,6 @@ impl State {
state.update(cx, |_, cx| cx.notify())
})
}
-
- fn has_accepted_terms_of_service(&self, cx: &App) -> bool {
- self.user_store.read(cx).has_accepted_terms_of_service()
- }
-
- fn accept_terms_of_service(&mut self, cx: &mut Context<Self>) {
- let user_store = self.user_store.clone();
- self.accept_terms_of_service_task = Some(cx.spawn(async move |this, cx| {
- let _ = user_store
- .update(cx, |store, cx| store.accept_terms_of_service(cx))?
- .await;
- this.update(cx, |this, cx| {
- this.accept_terms_of_service_task = None;
- cx.notify()
- })
- }));
- }
-
fn update_models(&mut self, response: ListModelsResponse, cx: &mut Context<Self>) {
let mut models = Vec::new();
@@ -307,7 +287,7 @@ impl CloudLanguageModelProvider {
Self {
client,
- state: state.clone(),
+ state,
_maintain_client_status: maintain_client_status,
}
}
@@ -320,7 +300,7 @@ impl CloudLanguageModelProvider {
Arc::new(CloudLanguageModel {
id: LanguageModelId(SharedString::from(model.id.0.clone())),
model,
- llm_api_token: llm_api_token.clone(),
+ llm_api_token,
client: self.client.clone(),
request_limiter: RateLimiter::new(4),
})
@@ -384,7 +364,7 @@ impl LanguageModelProvider for CloudLanguageModelProvider {
fn is_authenticated(&self, cx: &App) -> bool {
let state = self.state.read(cx);
- !state.is_signed_out(cx) && state.has_accepted_terms_of_service(cx)
+ !state.is_signed_out(cx)
}
fn authenticate(&self, _cx: &mut App) -> Task<Result<(), AuthenticateError>> {
@@ -401,112 +381,11 @@ impl LanguageModelProvider for CloudLanguageModelProvider {
.into()
}
- fn must_accept_terms(&self, cx: &App) -> bool {
- !self.state.read(cx).has_accepted_terms_of_service(cx)
- }
-
- fn render_accept_terms(
- &self,
- view: LanguageModelProviderTosView,
- cx: &mut App,
- ) -> Option<AnyElement> {
- let state = self.state.read(cx);
- if state.has_accepted_terms_of_service(cx) {
- return None;
- }
- Some(
- render_accept_terms(view, state.accept_terms_of_service_task.is_some(), {
- let state = self.state.clone();
- move |_window, cx| {
- state.update(cx, |state, cx| state.accept_terms_of_service(cx));
- }
- })
- .into_any_element(),
- )
- }
-
fn reset_credentials(&self, _cx: &mut App) -> Task<Result<()>> {
Task::ready(Ok(()))
}
}
-fn render_accept_terms(
- view_kind: LanguageModelProviderTosView,
- accept_terms_of_service_in_progress: bool,
- accept_terms_callback: impl Fn(&mut Window, &mut App) + 'static,
-) -> impl IntoElement {
- let thread_fresh_start = matches!(view_kind, LanguageModelProviderTosView::ThreadFreshStart);
- let thread_empty_state = matches!(view_kind, LanguageModelProviderTosView::ThreadEmptyState);
-
- let terms_button = Button::new("terms_of_service", "Terms of Service")
- .style(ButtonStyle::Subtle)
- .icon(IconName::ArrowUpRight)
- .icon_color(Color::Muted)
- .icon_size(IconSize::Small)
- .when(thread_empty_state, |this| this.label_size(LabelSize::Small))
- .on_click(move |_, _window, cx| cx.open_url("https://zed.dev/terms-of-service"));
-
- let button_container = h_flex().child(
- Button::new("accept_terms", "I accept the Terms of Service")
- .when(!thread_empty_state, |this| {
- this.full_width()
- .style(ButtonStyle::Tinted(TintColor::Accent))
- .icon(IconName::Check)
- .icon_position(IconPosition::Start)
- .icon_size(IconSize::Small)
- })
- .when(thread_empty_state, |this| {
- this.style(ButtonStyle::Tinted(TintColor::Warning))
- .label_size(LabelSize::Small)
- })
- .disabled(accept_terms_of_service_in_progress)
- .on_click(move |_, window, cx| (accept_terms_callback)(window, cx)),
- );
-
- if thread_empty_state {
- h_flex()
- .w_full()
- .flex_wrap()
- .justify_between()
- .child(
- h_flex()
- .child(
- Label::new("To start using Zed AI, please read and accept the")
- .size(LabelSize::Small),
- )
- .child(terms_button),
- )
- .child(button_container)
- } else {
- v_flex()
- .w_full()
- .gap_2()
- .child(
- h_flex()
- .flex_wrap()
- .when(thread_fresh_start, |this| this.justify_center())
- .child(Label::new(
- "To start using Zed AI, please read and accept the",
- ))
- .child(terms_button),
- )
- .child({
- match view_kind {
- LanguageModelProviderTosView::TextThreadPopup => {
- button_container.w_full().justify_end()
- }
- LanguageModelProviderTosView::Configuration => {
- button_container.w_full().justify_start()
- }
- LanguageModelProviderTosView::ThreadFreshStart => {
- button_container.w_full().justify_center()
- }
- LanguageModelProviderTosView::ThreadEmptyState => div().w_0(),
- }
- })
- }
-}
-
pub struct CloudLanguageModel {
id: LanguageModelId,
model: Arc<cloud_llm_client::LanguageModel>,
@@ -1107,10 +986,7 @@ struct ZedAiConfiguration {
plan: Option<Plan>,
subscription_period: Option<(DateTime<Utc>, DateTime<Utc>)>,
eligible_for_trial: bool,
- has_accepted_terms_of_service: bool,
account_too_young: bool,
- accept_terms_of_service_in_progress: bool,
- accept_terms_of_service_callback: Arc<dyn Fn(&mut Window, &mut App) + Send + Sync>,
sign_in_callback: Arc<dyn Fn(&mut Window, &mut App) + Send + Sync>,
}
@@ -1176,58 +1052,30 @@ impl RenderOnce for ZedAiConfiguration {
);
}
- v_flex()
- .gap_2()
- .w_full()
- .when(!self.has_accepted_terms_of_service, |this| {
- this.child(render_accept_terms(
- LanguageModelProviderTosView::Configuration,
- self.accept_terms_of_service_in_progress,
- {
- let callback = self.accept_terms_of_service_callback.clone();
- move |window, cx| (callback)(window, cx)
- },
- ))
- })
- .map(|this| {
- if self.has_accepted_terms_of_service && self.account_too_young {
- this.child(young_account_banner).child(
- Button::new("upgrade", "Upgrade to Pro")
- .style(ui::ButtonStyle::Tinted(ui::TintColor::Accent))
- .full_width()
- .on_click(|_, _, cx| {
- cx.open_url(&zed_urls::upgrade_to_zed_pro_url(cx))
- }),
- )
- } else if self.has_accepted_terms_of_service {
- this.text_sm()
- .child(subscription_text)
- .child(manage_subscription_buttons)
- } else {
- this
- }
- })
- .when(self.has_accepted_terms_of_service, |this| this)
+ v_flex().gap_2().w_full().map(|this| {
+ if self.account_too_young {
+ this.child(young_account_banner).child(
+ Button::new("upgrade", "Upgrade to Pro")
+ .style(ui::ButtonStyle::Tinted(ui::TintColor::Accent))
+ .full_width()
+ .on_click(|_, _, cx| cx.open_url(&zed_urls::upgrade_to_zed_pro_url(cx))),
+ )
+ } else {
+ this.text_sm()
+ .child(subscription_text)
+ .child(manage_subscription_buttons)
+ }
+ })
}
}
struct ConfigurationView {
state: Entity<State>,
- accept_terms_of_service_callback: Arc<dyn Fn(&mut Window, &mut App) + Send + Sync>,
sign_in_callback: Arc<dyn Fn(&mut Window, &mut App) + Send + Sync>,
}
impl ConfigurationView {
fn new(state: Entity<State>) -> Self {
- let accept_terms_of_service_callback = Arc::new({
- let state = state.clone();
- move |_window: &mut Window, cx: &mut App| {
- state.update(cx, |state, cx| {
- state.accept_terms_of_service(cx);
- });
- }
- });
-
let sign_in_callback = Arc::new({
let state = state.clone();
move |_window: &mut Window, cx: &mut App| {
@@ -1239,7 +1087,6 @@ impl ConfigurationView {
Self {
state,
- accept_terms_of_service_callback,
sign_in_callback,
}
}
@@ -1255,10 +1102,7 @@ impl Render for ConfigurationView {
plan: user_store.plan(),
subscription_period: user_store.subscription_period(),
eligible_for_trial: user_store.trial_started_at().is_none(),
- has_accepted_terms_of_service: state.has_accepted_terms_of_service(cx),
account_too_young: user_store.account_too_young(),
- accept_terms_of_service_in_progress: state.accept_terms_of_service_task.is_some(),
- accept_terms_of_service_callback: self.accept_terms_of_service_callback.clone(),
sign_in_callback: self.sign_in_callback.clone(),
}
}
@@ -1283,7 +1127,6 @@ impl Component for ZedAiConfiguration {
plan: Option<Plan>,
eligible_for_trial: bool,
account_too_young: bool,
- has_accepted_terms_of_service: bool,
) -> AnyElement {
ZedAiConfiguration {
is_connected,
@@ -1292,10 +1135,7 @@ impl Component for ZedAiConfiguration {
.is_some()
.then(|| (Utc::now(), Utc::now() + chrono::Duration::days(7))),
eligible_for_trial,
- has_accepted_terms_of_service,
account_too_young,
- accept_terms_of_service_in_progress: false,
- accept_terms_of_service_callback: Arc::new(|_, _| {}),
sign_in_callback: Arc::new(|_, _| {}),
}
.into_any_element()
@@ -1306,33 +1146,30 @@ impl Component for ZedAiConfiguration {
.p_4()
.gap_4()
.children(vec![
- single_example(
- "Not connected",
- configuration(false, None, false, false, true),
- ),
+ single_example("Not connected", configuration(false, None, false, false)),
single_example(
"Accept Terms of Service",
- configuration(true, None, true, false, false),
+ configuration(true, None, true, false),
),
single_example(
"No Plan - Not eligible for trial",
- configuration(true, None, false, false, true),
+ configuration(true, None, false, false),
),
single_example(
"No Plan - Eligible for trial",
- configuration(true, None, true, false, true),
+ configuration(true, None, true, false),
),
single_example(
"Free Plan",
- configuration(true, Some(Plan::ZedFree), true, false, true),
+ configuration(true, Some(Plan::ZedFree), true, false),
),
single_example(
"Zed Pro Trial Plan",
- configuration(true, Some(Plan::ZedProTrial), true, false, true),
+ configuration(true, Some(Plan::ZedProTrial), true, false),
),
single_example(
"Zed Pro Plan",
- configuration(true, Some(Plan::ZedPro), true, false, true),
+ configuration(true, Some(Plan::ZedPro), true, false),
),
])
.into_any_element(),
@@ -14,10 +14,7 @@ use copilot::{Copilot, Status};
use futures::future::BoxFuture;
use futures::stream::BoxStream;
use futures::{FutureExt, Stream, StreamExt};
-use gpui::{
- Action, Animation, AnimationExt, AnyView, App, AsyncApp, Entity, Render, Subscription, Task,
- Transformation, percentage, svg,
-};
+use gpui::{Action, AnyView, App, AsyncApp, Entity, Render, Subscription, Task, svg};
use language::language_settings::all_language_settings;
use language_model::{
AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
@@ -28,10 +25,11 @@ use language_model::{
StopReason, TokenUsage,
};
use settings::SettingsStore;
-use std::time::Duration;
-use ui::prelude::*;
+use ui::{CommonAnimationExt, prelude::*};
use util::debug_panic;
+use crate::provider::x_ai::count_xai_tokens;
+
use super::anthropic::count_anthropic_tokens;
use super::google::count_google_tokens;
use super::open_ai::count_open_ai_tokens;
@@ -228,7 +226,9 @@ impl LanguageModel for CopilotChatLanguageModel {
ModelVendor::OpenAI | ModelVendor::Anthropic => {
LanguageModelToolSchemaFormat::JsonSchema
}
- ModelVendor::Google => LanguageModelToolSchemaFormat::JsonSchemaSubset,
+ ModelVendor::Google | ModelVendor::XAI => {
+ LanguageModelToolSchemaFormat::JsonSchemaSubset
+ }
}
}
@@ -256,6 +256,10 @@ impl LanguageModel for CopilotChatLanguageModel {
match self.model.vendor() {
ModelVendor::Anthropic => count_anthropic_tokens(request, cx),
ModelVendor::Google => count_google_tokens(request, cx),
+ ModelVendor::XAI => {
+ let model = x_ai::Model::from_id(self.model.id()).unwrap_or_default();
+ count_xai_tokens(request, model, cx)
+ }
ModelVendor::OpenAI => {
let model = open_ai::Model::from_id(self.model.id()).unwrap_or_default();
count_open_ai_tokens(request, model, cx)
@@ -475,7 +479,6 @@ fn into_copilot_chat(
}
}
- let mut tool_called = false;
let mut messages: Vec<ChatMessage> = Vec::new();
for message in request_messages {
match message.role {
@@ -545,7 +548,6 @@ fn into_copilot_chat(
let mut tool_calls = Vec::new();
for content in &message.content {
if let MessageContent::ToolUse(tool_use) = content {
- tool_called = true;
tool_calls.push(ToolCall {
id: tool_use.id.to_string(),
content: copilot::copilot_chat::ToolCallContent::Function {
@@ -590,7 +592,7 @@ fn into_copilot_chat(
}
}
- let mut tools = request
+ let tools = request
.tools
.iter()
.map(|tool| Tool::Function {
@@ -602,22 +604,6 @@ fn into_copilot_chat(
})
.collect::<Vec<_>>();
- // The API will return a Bad Request (with no error message) when tools
- // were used previously in the conversation but no tools are provided as
- // part of this request. Inserting a dummy tool seems to circumvent this
- // error.
- if tool_called && tools.is_empty() {
- tools.push(Tool::Function {
- function: copilot::copilot_chat::Function {
- name: "noop".to_string(),
- description: "No operation".to_string(),
- parameters: serde_json::json!({
- "type": "object"
- }),
- },
- });
- }
-
Ok(CopilotChatRequest {
intent: true,
n: 1,
@@ -682,11 +668,7 @@ impl Render for ConfigurationView {
}),
)
} else {
- let loading_icon = Icon::new(IconName::ArrowCircle).with_animation(
- "arrow-circle",
- Animation::new(Duration::from_secs(4)).repeat(),
- |icon, delta| icon.transform(Transformation::rotate(percentage(delta))),
- );
+ let loading_icon = Icon::new(IconName::ArrowCircle).with_rotate_animation(4);
const ERROR_LABEL: &str = "Copilot Chat requires an active GitHub Copilot subscription. Please ensure Copilot is configured and try again, or use a different Assistant provider.";
@@ -12,9 +12,9 @@ use gpui::{
};
use http_client::HttpClient;
use language_model::{
- AuthenticateError, LanguageModelCompletionError, LanguageModelCompletionEvent,
- LanguageModelToolChoice, LanguageModelToolSchemaFormat, LanguageModelToolUse,
- LanguageModelToolUseId, MessageContent, StopReason,
+ AuthenticateError, ConfigurationViewTargetAgent, LanguageModelCompletionError,
+ LanguageModelCompletionEvent, LanguageModelToolChoice, LanguageModelToolSchemaFormat,
+ LanguageModelToolUse, LanguageModelToolUseId, MessageContent, StopReason,
};
use language_model::{
LanguageModel, LanguageModelId, LanguageModelName, LanguageModelProvider,
@@ -37,6 +37,8 @@ use util::ResultExt;
use crate::AllLanguageModelSettings;
use crate::ui::InstructionListItem;
+use super::anthropic::ApiKey;
+
const PROVIDER_ID: LanguageModelProviderId = language_model::GOOGLE_PROVIDER_ID;
const PROVIDER_NAME: LanguageModelProviderName = language_model::GOOGLE_PROVIDER_NAME;
@@ -198,6 +200,33 @@ impl GoogleLanguageModelProvider {
request_limiter: RateLimiter::new(4),
})
}
+
+ pub fn api_key(cx: &mut App) -> Task<Result<ApiKey>> {
+ let credentials_provider = <dyn CredentialsProvider>::global(cx);
+ let api_url = AllLanguageModelSettings::get_global(cx)
+ .google
+ .api_url
+ .clone();
+
+ if let Ok(key) = std::env::var(GEMINI_API_KEY_VAR) {
+ Task::ready(Ok(ApiKey {
+ key,
+ from_env: true,
+ }))
+ } else {
+ cx.spawn(async move |cx| {
+ let (_, api_key) = credentials_provider
+ .read_credentials(&api_url, cx)
+ .await?
+ .ok_or(AuthenticateError::CredentialsNotFound)?;
+
+ Ok(ApiKey {
+ key: String::from_utf8(api_key).context("invalid {PROVIDER_NAME} API key")?,
+ from_env: false,
+ })
+ })
+ }
+ }
}
impl LanguageModelProviderState for GoogleLanguageModelProvider {
@@ -279,11 +308,11 @@ impl LanguageModelProvider for GoogleLanguageModelProvider {
fn configuration_view(
&self,
- _target_agent: language_model::ConfigurationViewTargetAgent,
+ target_agent: language_model::ConfigurationViewTargetAgent,
window: &mut Window,
cx: &mut App,
) -> AnyView {
- cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
+ cx.new(|cx| ConfigurationView::new(self.state.clone(), target_agent, window, cx))
.into()
}
@@ -387,7 +416,7 @@ impl LanguageModel for GoogleLanguageModel {
cx: &App,
) -> BoxFuture<'static, Result<u64>> {
let model_id = self.model.request_id().to_string();
- let request = into_google(request, model_id.clone(), self.model.mode());
+ let request = into_google(request, model_id, self.model.mode());
let http_client = self.http_client.clone();
let api_key = self.state.read(cx).api_key.clone();
@@ -577,7 +606,7 @@ pub fn into_google(
top_k: None,
}),
safety_settings: None,
- tools: (request.tools.len() > 0).then(|| {
+ tools: (!request.tools.is_empty()).then(|| {
vec![google_ai::Tool {
function_declarations: request
.tools
@@ -776,11 +805,17 @@ fn convert_usage(usage: &UsageMetadata) -> language_model::TokenUsage {
struct ConfigurationView {
api_key_editor: Entity<Editor>,
state: gpui::Entity<State>,
+ target_agent: language_model::ConfigurationViewTargetAgent,
load_credentials_task: Option<Task<()>>,
}
impl ConfigurationView {
- fn new(state: gpui::Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
+ fn new(
+ state: gpui::Entity<State>,
+ target_agent: language_model::ConfigurationViewTargetAgent,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Self {
cx.observe(&state, |_, _, cx| {
cx.notify();
})
@@ -810,6 +845,7 @@ impl ConfigurationView {
editor.set_placeholder_text("AIzaSy...", cx);
editor
}),
+ target_agent,
state,
load_credentials_task,
}
@@ -885,7 +921,10 @@ impl Render for ConfigurationView {
v_flex()
.size_full()
.on_action(cx.listener(Self::save_api_key))
- .child(Label::new("To use Zed's agent with Google AI, you need to add an API key. Follow these steps:"))
+ .child(Label::new(format!("To use {}, you need to add an API key. Follow these steps:", match &self.target_agent {
+ ConfigurationViewTargetAgent::ZedAgent => "Zed's agent with Google AI".into(),
+ ConfigurationViewTargetAgent::Other(agent) => agent.clone(),
+ })))
.child(
List::new()
.child(InstructionListItem::new(
@@ -210,7 +210,7 @@ impl LanguageModelProvider for LmStudioLanguageModelProvider {
.map(|model| {
Arc::new(LmStudioLanguageModel {
id: LanguageModelId::from(model.name.clone()),
- model: model.clone(),
+ model,
http_client: self.http_client.clone(),
request_limiter: RateLimiter::new(4),
}) as Arc<dyn LanguageModel>
@@ -11,8 +11,8 @@ use language_model::{
LanguageModelToolUseId, MessageContent, RateLimiter, Role, StopReason, TokenUsage,
};
use ollama::{
- ChatMessage, ChatOptions, ChatRequest, ChatResponseDelta, KeepAlive, OllamaFunctionTool,
- OllamaToolCall, get_models, show_model, stream_chat_completion,
+ ChatMessage, ChatOptions, ChatRequest, ChatResponseDelta, KeepAlive, OllamaFunctionCall,
+ OllamaFunctionTool, OllamaToolCall, get_models, show_model, stream_chat_completion,
};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
@@ -237,7 +237,7 @@ impl LanguageModelProvider for OllamaLanguageModelProvider {
.map(|model| {
Arc::new(OllamaLanguageModel {
id: LanguageModelId::from(model.name.clone()),
- model: model.clone(),
+ model,
http_client: self.http_client.clone(),
request_limiter: RateLimiter::new(4),
}) as Arc<dyn LanguageModel>
@@ -282,59 +282,85 @@ impl OllamaLanguageModel {
fn to_ollama_request(&self, request: LanguageModelRequest) -> ChatRequest {
let supports_vision = self.model.supports_vision.unwrap_or(false);
- ChatRequest {
- model: self.model.name.clone(),
- messages: request
- .messages
- .into_iter()
- .map(|msg| {
- let images = if supports_vision {
- msg.content
- .iter()
- .filter_map(|content| match content {
- MessageContent::Image(image) => Some(image.source.to_string()),
- _ => None,
- })
- .collect::<Vec<String>>()
- } else {
- vec![]
- };
-
- match msg.role {
- Role::User => ChatMessage::User {
+ let mut messages = Vec::with_capacity(request.messages.len());
+
+ for mut msg in request.messages.into_iter() {
+ let images = if supports_vision {
+ msg.content
+ .iter()
+ .filter_map(|content| match content {
+ MessageContent::Image(image) => Some(image.source.to_string()),
+ _ => None,
+ })
+ .collect::<Vec<String>>()
+ } else {
+ vec![]
+ };
+
+ match msg.role {
+ Role::User => {
+ for tool_result in msg
+ .content
+ .extract_if(.., |x| matches!(x, MessageContent::ToolResult(..)))
+ {
+ match tool_result {
+ MessageContent::ToolResult(tool_result) => {
+ messages.push(ChatMessage::Tool {
+ tool_name: tool_result.tool_name.to_string(),
+ content: tool_result.content.to_str().unwrap_or("").to_string(),
+ })
+ }
+ _ => unreachable!("Only tool result should be extracted"),
+ }
+ }
+ if !msg.content.is_empty() {
+ messages.push(ChatMessage::User {
content: msg.string_contents(),
images: if images.is_empty() {
None
} else {
Some(images)
},
- },
- Role::Assistant => {
- let content = msg.string_contents();
- let thinking =
- msg.content.into_iter().find_map(|content| match content {
- MessageContent::Thinking { text, .. } if !text.is_empty() => {
- Some(text)
- }
- _ => None,
- });
- ChatMessage::Assistant {
- content,
- tool_calls: None,
- images: if images.is_empty() {
- None
- } else {
- Some(images)
- },
- thinking,
+ })
+ }
+ }
+ Role::Assistant => {
+ let content = msg.string_contents();
+ let mut thinking = None;
+ let mut tool_calls = Vec::new();
+ for content in msg.content.into_iter() {
+ match content {
+ MessageContent::Thinking { text, .. } if !text.is_empty() => {
+ thinking = Some(text)
+ }
+ MessageContent::ToolUse(tool_use) => {
+ tool_calls.push(OllamaToolCall::Function(OllamaFunctionCall {
+ name: tool_use.name.to_string(),
+ arguments: tool_use.input,
+ }));
}
+ _ => (),
}
- Role::System => ChatMessage::System {
- content: msg.string_contents(),
- },
}
- })
- .collect(),
+ messages.push(ChatMessage::Assistant {
+ content,
+ tool_calls: Some(tool_calls),
+ images: if images.is_empty() {
+ None
+ } else {
+ Some(images)
+ },
+ thinking,
+ })
+ }
+ Role::System => messages.push(ChatMessage::System {
+ content: msg.string_contents(),
+ }),
+ }
+ }
+ ChatRequest {
+ model: self.model.name.clone(),
+ messages,
keep_alive: self.model.keep_alive.clone().unwrap_or_default(),
stream: true,
options: Some(ChatOptions {
@@ -347,7 +373,11 @@ impl OllamaLanguageModel {
.model
.supports_thinking
.map(|supports_thinking| supports_thinking && request.thinking_allowed),
- tools: request.tools.into_iter().map(tool_into_ollama).collect(),
+ tools: if self.model.supports_tools.unwrap_or(false) {
+ request.tools.into_iter().map(tool_into_ollama).collect()
+ } else {
+ vec![]
+ },
}
}
}
@@ -479,6 +509,9 @@ fn map_to_language_model_completion_events(
ChatMessage::System { content } => {
events.push(Ok(LanguageModelCompletionEvent::Text(content)));
}
+ ChatMessage::Tool { content, .. } => {
+ events.push(Ok(LanguageModelCompletionEvent::Text(content)));
+ }
ChatMessage::Assistant {
content,
tool_calls,
@@ -56,13 +56,13 @@ pub struct OpenAiLanguageModelProvider {
pub struct State {
api_key: Option<String>,
api_key_from_env: bool,
+ last_api_url: String,
_subscription: Subscription,
}
const OPENAI_API_KEY_VAR: &str = "OPENAI_API_KEY";
impl State {
- //
fn is_authenticated(&self) -> bool {
self.api_key.is_some()
}
@@ -104,11 +104,7 @@ impl State {
})
}
- fn authenticate(&self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
- if self.is_authenticated() {
- return Task::ready(Ok(()));
- }
-
+ fn get_api_key(&self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
let credentials_provider = <dyn CredentialsProvider>::global(cx);
let api_url = AllLanguageModelSettings::get_global(cx)
.openai
@@ -136,14 +132,52 @@ impl State {
Ok(())
})
}
+
+ fn authenticate(&self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
+ if self.is_authenticated() {
+ return Task::ready(Ok(()));
+ }
+
+ self.get_api_key(cx)
+ }
}
impl OpenAiLanguageModelProvider {
pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
+ let initial_api_url = AllLanguageModelSettings::get_global(cx)
+ .openai
+ .api_url
+ .clone();
+
let state = cx.new(|cx| State {
api_key: None,
api_key_from_env: false,
- _subscription: cx.observe_global::<SettingsStore>(|_this: &mut State, cx| {
+ last_api_url: initial_api_url.clone(),
+ _subscription: cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
+ let current_api_url = AllLanguageModelSettings::get_global(cx)
+ .openai
+ .api_url
+ .clone();
+
+ if this.last_api_url != current_api_url {
+ this.last_api_url = current_api_url;
+ if !this.api_key_from_env {
+ this.api_key = None;
+ let spawn_task = cx.spawn(async move |handle, cx| {
+ if let Ok(task) = handle.update(cx, |this, cx| this.get_api_key(cx)) {
+ if let Err(_) = task.await {
+ handle
+ .update(cx, |this, _| {
+ this.api_key = None;
+ this.api_key_from_env = false;
+ })
+ .ok();
+ }
+ }
+ });
+ spawn_task.detach();
+ }
+ }
cx.notify();
}),
});
@@ -404,7 +438,7 @@ pub fn into_open_ai(
match content {
MessageContent::Text(text) | MessageContent::Thinking { text, .. } => {
add_message_content_part(
- open_ai::MessagePart::Text { text: text },
+ open_ai::MessagePart::Text { text },
message.role,
&mut messages,
)
@@ -586,7 +620,9 @@ impl OpenAiEventMapper {
};
if let Some(content) = choice.delta.content.clone() {
- events.push(Ok(LanguageModelCompletionEvent::Text(content)));
+ if !content.is_empty() {
+ events.push(Ok(LanguageModelCompletionEvent::Text(content)));
+ }
}
if let Some(tool_calls) = choice.delta.tool_calls.as_ref() {
@@ -9,7 +9,7 @@ use language_model::{
AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
- LanguageModelToolChoice, RateLimiter,
+ LanguageModelToolChoice, LanguageModelToolSchemaFormat, RateLimiter,
};
use menu;
use open_ai::{ResponseStreamEvent, stream_completion};
@@ -113,11 +113,7 @@ impl State {
})
}
- fn authenticate(&self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
- if self.is_authenticated() {
- return Task::ready(Ok(()));
- }
-
+ fn get_api_key(&self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
let credentials_provider = <dyn CredentialsProvider>::global(cx);
let env_var_name = self.env_var_name.clone();
let api_url = self.settings.api_url.clone();
@@ -143,6 +139,14 @@ impl State {
Ok(())
})
}
+
+ fn authenticate(&self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
+ if self.is_authenticated() {
+ return Task::ready(Ok(()));
+ }
+
+ self.get_api_key(cx)
+ }
}
impl OpenAiCompatibleLanguageModelProvider {
@@ -160,11 +164,27 @@ impl OpenAiCompatibleLanguageModelProvider {
api_key: None,
api_key_from_env: false,
_subscription: cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
- let Some(settings) = resolve_settings(&this.id, cx) else {
+ let Some(settings) = resolve_settings(&this.id, cx).cloned() else {
return;
};
- if &this.settings != settings {
- this.settings = settings.clone();
+ if &this.settings != &settings {
+ if settings.api_url != this.settings.api_url && !this.api_key_from_env {
+ let spawn_task = cx.spawn(async move |handle, cx| {
+ if let Ok(task) = handle.update(cx, |this, cx| this.get_api_key(cx)) {
+ if let Err(_) = task.await {
+ handle
+ .update(cx, |this, _| {
+ this.api_key = None;
+ this.api_key_from_env = false;
+ })
+ .ok();
+ }
+ }
+ });
+ spawn_task.detach();
+ }
+
+ this.settings = settings;
cx.notify();
}
}),
@@ -322,6 +342,10 @@ impl LanguageModel for OpenAiCompatibleLanguageModel {
self.model.capabilities.tools
}
+ fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
+ LanguageModelToolSchemaFormat::JsonSchemaSubset
+ }
+
fn supports_images(&self) -> bool {
self.model.capabilities.images
}
@@ -92,7 +92,7 @@ pub struct State {
api_key_from_env: bool,
http_client: Arc<dyn HttpClient>,
available_models: Vec<open_router::Model>,
- fetch_models_task: Option<Task<Result<()>>>,
+ fetch_models_task: Option<Task<Result<(), LanguageModelCompletionError>>>,
settings: OpenRouterSettings,
_subscription: Subscription,
}
@@ -152,6 +152,7 @@ impl State {
.open_router
.api_url
.clone();
+
cx.spawn(async move |this, cx| {
let (api_key, from_env) = if let Ok(api_key) = std::env::var(OPENROUTER_API_KEY_VAR) {
(api_key, true)
@@ -161,11 +162,11 @@ impl State {
.await?
.ok_or(AuthenticateError::CredentialsNotFound)?;
(
- String::from_utf8(api_key)
- .context(format!("invalid {} API key", PROVIDER_NAME))?,
+ String::from_utf8(api_key).context("invalid {PROVIDER_NAME} API key")?,
false,
)
};
+
this.update(cx, |this, cx| {
this.api_key = Some(api_key);
this.api_key_from_env = from_env;
@@ -177,18 +178,35 @@ impl State {
})
}
- fn fetch_models(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
+ fn fetch_models(
+ &mut self,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<(), LanguageModelCompletionError>> {
let settings = &AllLanguageModelSettings::get_global(cx).open_router;
let http_client = self.http_client.clone();
let api_url = settings.api_url.clone();
-
+ let Some(api_key) = self.api_key.clone() else {
+ return Task::ready(Err(LanguageModelCompletionError::NoApiKey {
+ provider: PROVIDER_NAME,
+ }));
+ };
cx.spawn(async move |this, cx| {
- let models = list_models(http_client.as_ref(), &api_url).await?;
+ let models = list_models(http_client.as_ref(), &api_url, &api_key)
+ .await
+ .map_err(|e| {
+ LanguageModelCompletionError::Other(anyhow::anyhow!(
+ "OpenRouter error: {:?}",
+ e
+ ))
+ })?;
this.update(cx, |this, cx| {
this.available_models = models;
cx.notify();
})
+ .map_err(|e| LanguageModelCompletionError::Other(e))?;
+
+ Ok(())
})
}
@@ -334,27 +352,37 @@ impl OpenRouterLanguageModel {
&self,
request: open_router::Request,
cx: &AsyncApp,
- ) -> BoxFuture<'static, Result<futures::stream::BoxStream<'static, Result<ResponseStreamEvent>>>>
- {
+ ) -> BoxFuture<
+ 'static,
+ Result<
+ futures::stream::BoxStream<
+ 'static,
+ Result<ResponseStreamEvent, open_router::OpenRouterError>,
+ >,
+ LanguageModelCompletionError,
+ >,
+ > {
let http_client = self.http_client.clone();
let Ok((api_key, api_url)) = cx.read_entity(&self.state, |state, cx| {
let settings = &AllLanguageModelSettings::get_global(cx).open_router;
(state.api_key.clone(), settings.api_url.clone())
}) else {
- return futures::future::ready(Err(anyhow!(
- "App state dropped: Unable to read API key or API URL from the application state"
- )))
+ return futures::future::ready(Err(LanguageModelCompletionError::Other(anyhow!(
+ "App state dropped"
+ ))))
.boxed();
};
- let future = self.request_limiter.stream(async move {
- let api_key = api_key.ok_or_else(|| anyhow!("Missing OpenRouter API Key"))?;
+ async move {
+ let Some(api_key) = api_key else {
+ return Err(LanguageModelCompletionError::NoApiKey {
+ provider: PROVIDER_NAME,
+ });
+ };
let request = stream_completion(http_client.as_ref(), &api_url, &api_key, request);
- let response = request.await?;
- Ok(response)
- });
-
- async move { Ok(future.await?.boxed()) }.boxed()
+ request.await.map_err(Into::into)
+ }
+ .boxed()
}
}
@@ -381,7 +409,7 @@ impl LanguageModel for OpenRouterLanguageModel {
fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
let model_id = self.model.id().trim().to_lowercase();
- if model_id.contains("gemini") || model_id.contains("grok-4") {
+ if model_id.contains("gemini") || model_id.contains("grok") {
LanguageModelToolSchemaFormat::JsonSchemaSubset
} else {
LanguageModelToolSchemaFormat::JsonSchema
@@ -435,12 +463,12 @@ impl LanguageModel for OpenRouterLanguageModel {
>,
> {
let request = into_open_router(request, &self.model, self.max_output_tokens());
- let completions = self.stream_completion(request, cx);
- async move {
- let mapper = OpenRouterEventMapper::new();
- Ok(mapper.map_stream(completions.await?).boxed())
- }
- .boxed()
+ let request = self.stream_completion(request, cx);
+ let future = self.request_limiter.stream(async move {
+ let response = request.await?;
+ Ok(OpenRouterEventMapper::new().map_stream(response))
+ });
+ async move { Ok(future.await?.boxed()) }.boxed()
}
}
@@ -608,13 +636,17 @@ impl OpenRouterEventMapper {
pub fn map_stream(
mut self,
- events: Pin<Box<dyn Send + Stream<Item = Result<ResponseStreamEvent>>>>,
+ events: Pin<
+ Box<
+ dyn Send + Stream<Item = Result<ResponseStreamEvent, open_router::OpenRouterError>>,
+ >,
+ >,
) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
{
events.flat_map(move |event| {
futures::stream::iter(match event {
Ok(event) => self.map_event(event),
- Err(error) => vec![Err(LanguageModelCompletionError::from(anyhow!(error)))],
+ Err(error) => vec![Err(error.into())],
})
})
}
@@ -319,7 +319,7 @@ impl LanguageModel for XAiLanguageModel {
}
fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
let model_id = self.model.id().trim().to_lowercase();
- if model_id.eq(x_ai::Model::Grok4.id()) {
+ if model_id.eq(x_ai::Model::Grok4.id()) || model_id.eq(x_ai::Model::GrokCodeFast1.id()) {
LanguageModelToolSchemaFormat::JsonSchemaSubset
} else {
LanguageModelToolSchemaFormat::JsonSchema
@@ -5,7 +5,7 @@ use collections::HashMap;
use gpui::App;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
use crate::provider::{
self,
@@ -46,7 +46,10 @@ pub struct AllLanguageModelSettings {
pub zed_dot_dev: ZedDotDevSettings,
}
-#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
+#[derive(
+ Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, SettingsUi, SettingsKey,
+)]
+#[settings_key(key = "language_models")]
pub struct AllLanguageModelSettingsContent {
pub anthropic: Option<AnthropicSettingsContent>,
pub bedrock: Option<AmazonBedrockSettingsContent>,
@@ -145,8 +148,6 @@ pub struct OpenRouterSettingsContent {
}
impl settings::Settings for AllLanguageModelSettings {
- const KEY: Option<&'static str> = Some("language_models");
-
const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
type FileContent = AllLanguageModelSettingsContent;
@@ -37,7 +37,7 @@ impl IntoElement for InstructionListItem {
let item_content = if let (Some(button_label), Some(button_link)) =
(self.button_label, self.button_link)
{
- let link = button_link.clone();
+ let link = button_link;
let unique_id = SharedString::from(format!("{}-button", self.label));
h_flex()
@@ -0,0 +1,30 @@
+[package]
+name = "language_onboarding"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/python.rs"
+
+[features]
+default = []
+
+[dependencies]
+db.workspace = true
+editor.workspace = true
+gpui.workspace = true
+project.workspace = true
+ui.workspace = true
+workspace.workspace = true
+workspace-hack.workspace = true
+
+# Uncomment other workspace dependencies as needed
+# assistant.workspace = true
+# client.workspace = true
+# project.workspace = true
+# settings.workspace = true
@@ -0,0 +1 @@
+../../LICENSE-GPL
@@ -0,0 +1,95 @@
+use db::kvp::Dismissable;
+use editor::Editor;
+use gpui::{Context, EventEmitter, Subscription};
+use ui::{
+ Banner, Button, Clickable, Color, FluentBuilder as _, IconButton, IconName,
+ InteractiveElement as _, IntoElement, Label, LabelCommon, LabelSize, ParentElement as _,
+ Render, Styled as _, Window, div, h_flex, v_flex,
+};
+use workspace::{ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace};
+
+pub struct BasedPyrightBanner {
+ dismissed: bool,
+ have_basedpyright: bool,
+ _subscriptions: [Subscription; 1],
+}
+
+impl Dismissable for BasedPyrightBanner {
+ const KEY: &str = "basedpyright-banner";
+}
+
+impl BasedPyrightBanner {
+ pub fn new(workspace: &Workspace, cx: &mut Context<Self>) -> Self {
+ let subscription = cx.subscribe(workspace.project(), |this, _, event, _| {
+ if let project::Event::LanguageServerAdded(_, name, _) = event
+ && name == "basedpyright"
+ {
+ this.have_basedpyright = true;
+ }
+ });
+ let dismissed = Self::dismissed();
+ Self {
+ dismissed,
+ have_basedpyright: false,
+ _subscriptions: [subscription],
+ }
+ }
+}
+
+impl EventEmitter<ToolbarItemEvent> for BasedPyrightBanner {}
+
+impl Render for BasedPyrightBanner {
+ fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ div()
+ .id("basedpyright-banner")
+ .when(!self.dismissed && self.have_basedpyright, |el| {
+ el.child(
+ Banner::new()
+ .severity(ui::Severity::Info)
+ .child(
+ h_flex()
+ .gap_2()
+ .child(v_flex()
+ .child("Basedpyright is now the only default language server for Python")
+ .child(Label::new("We have disabled PyRight and pylsp by default. They can be re-enabled in your settings.").size(LabelSize::XSmall).color(Color::Muted))
+ )
+ .child(
+ Button::new("learn-more", "Learn More")
+ .icon(IconName::ArrowUpRight)
+ .on_click(|_, _, cx| {
+ cx.open_url("https://zed.dev/docs/languages/python")
+ }),
+ ),
+ )
+ .action_slot(IconButton::new("dismiss", IconName::Close).on_click(
+ cx.listener(|this, _, _, cx| {
+ this.dismissed = true;
+ Self::set_dismissed(true, cx);
+ cx.notify();
+ }),
+ ))
+ .into_any_element(),
+ )
+ })
+ }
+}
+
+impl ToolbarItemView for BasedPyrightBanner {
+ fn set_active_pane_item(
+ &mut self,
+ active_pane_item: Option<&dyn workspace::ItemHandle>,
+ _window: &mut ui::Window,
+ cx: &mut Context<Self>,
+ ) -> ToolbarItemLocation {
+ if let Some(item) = active_pane_item
+ && let Some(editor) = item.act_as::<Editor>(cx)
+ && let Some(path) = editor.update(cx, |editor, cx| editor.target_file_abs_path(cx))
+ && let Some(file_name) = path.file_name()
+ && file_name.as_encoded_bytes().ends_with(".py".as_bytes())
+ {
+ return ToolbarItemLocation::Secondary;
+ }
+
+ ToolbarItemLocation::Hidden
+ }
+}
@@ -16,6 +16,7 @@ doctest = false
anyhow.workspace = true
client.workspace = true
collections.workspace = true
+command_palette_hooks.workspace = true
copilot.workspace = true
editor.workspace = true
futures.workspace = true
@@ -24,6 +25,7 @@ itertools.workspace = true
language.workspace = true
lsp.workspace = true
project.workspace = true
+proto.workspace = true
serde_json.workspace = true
settings.workspace = true
theme.workspace = true
@@ -4,7 +4,6 @@ use gpui::{
};
use itertools::Itertools;
use serde_json::json;
-use settings::get_key_equivalents;
use ui::{Button, ButtonStyle};
use ui::{
ButtonCommon, Clickable, Context, FluentBuilder, InteractiveElement, Label, LabelCommon,
@@ -169,7 +168,8 @@ impl Item for KeyContextView {
impl Render for KeyContextView {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl ui::IntoElement {
use itertools::Itertools;
- let key_equivalents = get_key_equivalents(cx.keyboard_layout().id());
+
+ let key_equivalents = cx.keyboard_mapper().get_key_equivalents();
v_flex()
.id("key-context-view")
.overflow_scroll()
@@ -1,20 +1,20 @@
mod key_context_view;
-mod lsp_log;
-pub mod lsp_tool;
+pub mod lsp_button;
+pub mod lsp_log_view;
mod syntax_tree_view;
#[cfg(test)]
-mod lsp_log_tests;
+mod lsp_log_view_tests;
use gpui::{App, AppContext, Entity};
-pub use lsp_log::{LogStore, LspLogToolbarItemView, LspLogView};
+pub use lsp_log_view::LspLogView;
pub use syntax_tree_view::{SyntaxTreeToolbarItemView, SyntaxTreeView};
use ui::{Context, Window};
use workspace::{Item, ItemHandle, SplitDirection, Workspace};
pub fn init(cx: &mut App) {
- lsp_log::init(cx);
+ lsp_log_view::init(false, cx);
syntax_tree_view::init(cx);
key_context_view::init(cx);
}
@@ -11,7 +11,10 @@ use editor::{Editor, EditorEvent};
use gpui::{Corner, Entity, Subscription, Task, WeakEntity, actions};
use language::{BinaryStatus, BufferId, ServerHealth};
use lsp::{LanguageServerId, LanguageServerName, LanguageServerSelector};
-use project::{LspStore, LspStoreEvent, Worktree, project_settings::ProjectSettings};
+use project::{
+ LspStore, LspStoreEvent, Worktree, lsp_store::log_store::GlobalLogStore,
+ project_settings::ProjectSettings,
+};
use settings::{Settings as _, SettingsStore};
use ui::{
Context, ContextMenu, ContextMenuEntry, ContextMenuItem, DocumentationAside, DocumentationSide,
@@ -20,7 +23,7 @@ use ui::{
use workspace::{StatusItemView, Workspace};
-use crate::lsp_log::GlobalLogStore;
+use crate::lsp_log_view;
actions!(
lsp_tool,
@@ -30,7 +33,7 @@ actions!(
]
);
-pub struct LspTool {
+pub struct LspButton {
server_state: Entity<LanguageServerState>,
popover_menu_handle: PopoverMenuHandle<ContextMenu>,
lsp_menu: Option<Entity<ContextMenu>>,
@@ -121,9 +124,8 @@ impl LanguageServerState {
menu = menu.align_popover_bottom();
let lsp_logs = cx
.try_global::<GlobalLogStore>()
- .and_then(|lsp_logs| lsp_logs.0.upgrade());
- let lsp_store = self.lsp_store.upgrade();
- let Some((lsp_logs, lsp_store)) = lsp_logs.zip(lsp_store) else {
+ .map(|lsp_logs| lsp_logs.0.clone());
+ let Some(lsp_logs) = lsp_logs else {
return menu;
};
@@ -210,10 +212,11 @@ impl LanguageServerState {
};
let server_selector = server_info.server_selector();
- // TODO currently, Zed remote does not work well with the LSP logs
- // https://github.com/zed-industries/zed/issues/28557
- let has_logs = lsp_store.read(cx).as_local().is_some()
- && lsp_logs.read(cx).has_server_logs(&server_selector);
+ let is_remote = self
+ .lsp_store
+ .update(cx, |lsp_store, _| lsp_store.as_remote().is_some())
+ .unwrap_or(false);
+ let has_logs = is_remote || lsp_logs.read(cx).has_server_logs(&server_selector);
let status_color = server_info
.binary_status
@@ -241,10 +244,10 @@ impl LanguageServerState {
.as_ref()
.or_else(|| server_info.binary_status.as_ref()?.message.as_ref())
.cloned();
- let hover_label = if has_logs {
- Some("View Logs")
- } else if message.is_some() {
+ let hover_label = if message.is_some() {
Some("View Message")
+ } else if has_logs {
+ Some("View Logs")
} else {
None
};
@@ -288,21 +291,12 @@ impl LanguageServerState {
let server_name = server_info.name.clone();
let workspace = self.workspace.clone();
move |window, cx| {
- if has_logs {
- lsp_logs.update(cx, |lsp_logs, cx| {
- lsp_logs.open_server_trace(
- workspace.clone(),
- server_selector.clone(),
- window,
- cx,
- );
- });
- } else if let Some(message) = &message {
+ if let Some(message) = &message {
let Some(create_buffer) = workspace
.update(cx, |workspace, cx| {
workspace
.project()
- .update(cx, |project, cx| project.create_buffer(cx))
+ .update(cx, |project, cx| project.create_buffer(false, cx))
})
.ok()
else {
@@ -347,6 +341,14 @@ impl LanguageServerState {
anyhow::Ok(())
})
.detach();
+ } else if has_logs {
+ lsp_log_view::open_server_trace(
+ &lsp_logs,
+ workspace.clone(),
+ server_selector.clone(),
+ window,
+ cx,
+ );
} else {
cx.propagate();
}
@@ -510,7 +512,7 @@ impl ServerData<'_> {
}
}
-impl LspTool {
+impl LspButton {
pub fn new(
workspace: &Workspace,
popover_menu_handle: PopoverMenuHandle<ContextMenu>,
@@ -518,37 +520,59 @@ impl LspTool {
cx: &mut Context<Self>,
) -> Self {
let settings_subscription =
- cx.observe_global_in::<SettingsStore>(window, move |lsp_tool, window, cx| {
+ cx.observe_global_in::<SettingsStore>(window, move |lsp_button, window, cx| {
if ProjectSettings::get_global(cx).global_lsp_settings.button {
- if lsp_tool.lsp_menu.is_none() {
- lsp_tool.refresh_lsp_menu(true, window, cx);
+ if lsp_button.lsp_menu.is_none() {
+ lsp_button.refresh_lsp_menu(true, window, cx);
}
- } else if lsp_tool.lsp_menu.take().is_some() {
+ } else if lsp_button.lsp_menu.take().is_some() {
cx.notify();
}
});
let lsp_store = workspace.project().read(cx).lsp_store();
+ let mut language_servers = LanguageServers::default();
+ for (_, status) in lsp_store.read(cx).language_server_statuses() {
+ language_servers.binary_statuses.insert(
+ status.name.clone(),
+ LanguageServerBinaryStatus {
+ status: BinaryStatus::None,
+ message: None,
+ },
+ );
+ }
+
let lsp_store_subscription =
- cx.subscribe_in(&lsp_store, window, |lsp_tool, _, e, window, cx| {
- lsp_tool.on_lsp_store_event(e, window, cx)
+ cx.subscribe_in(&lsp_store, window, |lsp_button, _, e, window, cx| {
+ lsp_button.on_lsp_store_event(e, window, cx)
});
- let state = cx.new(|_| LanguageServerState {
+ let server_state = cx.new(|_| LanguageServerState {
workspace: workspace.weak_handle(),
items: Vec::new(),
lsp_store: lsp_store.downgrade(),
active_editor: None,
- language_servers: LanguageServers::default(),
+ language_servers,
});
- Self {
- server_state: state,
+ let mut lsp_button = Self {
+ server_state,
popover_menu_handle,
lsp_menu: None,
lsp_menu_refresh: Task::ready(()),
_subscriptions: vec![settings_subscription, lsp_store_subscription],
+ };
+ if !lsp_button
+ .server_state
+ .read(cx)
+ .language_servers
+ .binary_statuses
+ .is_empty()
+ {
+ lsp_button.refresh_lsp_menu(true, window, cx);
}
+
+ lsp_button
}
fn on_lsp_store_event(
@@ -708,6 +732,25 @@ impl LspTool {
}
}
}
+ state
+ .lsp_store
+ .update(cx, |lsp_store, cx| {
+ for (server_id, status) in lsp_store.language_server_statuses() {
+ if let Some(worktree) = status.worktree.and_then(|worktree_id| {
+ lsp_store
+ .worktree_store()
+ .read(cx)
+ .worktree_for_id(worktree_id, cx)
+ }) {
+ server_ids_to_worktrees.insert(server_id, worktree.clone());
+ server_names_to_worktrees
+ .entry(status.name.clone())
+ .or_default()
+ .insert((worktree, server_id));
+ }
+ }
+ })
+ .ok();
let mut servers_per_worktree = BTreeMap::<SharedString, Vec<ServerData>>::new();
let mut servers_without_worktree = Vec::<ServerData>::new();
@@ -852,18 +895,18 @@ impl LspTool {
) {
if create_if_empty || self.lsp_menu.is_some() {
let state = self.server_state.clone();
- self.lsp_menu_refresh = cx.spawn_in(window, async move |lsp_tool, cx| {
+ self.lsp_menu_refresh = cx.spawn_in(window, async move |lsp_button, cx| {
cx.background_executor()
.timer(Duration::from_millis(30))
.await;
- lsp_tool
- .update_in(cx, |lsp_tool, window, cx| {
- lsp_tool.regenerate_items(cx);
+ lsp_button
+ .update_in(cx, |lsp_button, window, cx| {
+ lsp_button.regenerate_items(cx);
let menu = ContextMenu::build(window, cx, |menu, _, cx| {
state.update(cx, |state, cx| state.fill_menu(menu, cx))
});
- lsp_tool.lsp_menu = Some(menu.clone());
- lsp_tool.popover_menu_handle.refresh_menu(
+ lsp_button.lsp_menu = Some(menu.clone());
+ lsp_button.popover_menu_handle.refresh_menu(
window,
cx,
Rc::new(move |_, _| Some(menu.clone())),
@@ -876,7 +919,7 @@ impl LspTool {
}
}
-impl StatusItemView for LspTool {
+impl StatusItemView for LspButton {
fn set_active_pane_item(
&mut self,
active_pane_item: Option<&dyn workspace::ItemHandle>,
@@ -899,9 +942,9 @@ impl StatusItemView for LspTool {
let _editor_subscription = cx.subscribe_in(
&editor,
window,
- |lsp_tool, _, e: &EditorEvent, window, cx| match e {
+ |lsp_button, _, e: &EditorEvent, window, cx| match e {
EditorEvent::ExcerptsAdded { buffer, .. } => {
- let updated = lsp_tool.server_state.update(cx, |state, cx| {
+ let updated = lsp_button.server_state.update(cx, |state, cx| {
if let Some(active_editor) = state.active_editor.as_mut() {
let buffer_id = buffer.read(cx).remote_id();
active_editor.editor_buffers.insert(buffer_id)
@@ -910,13 +953,13 @@ impl StatusItemView for LspTool {
}
});
if updated {
- lsp_tool.refresh_lsp_menu(false, window, cx);
+ lsp_button.refresh_lsp_menu(false, window, cx);
}
}
EditorEvent::ExcerptsRemoved {
removed_buffer_ids, ..
} => {
- let removed = lsp_tool.server_state.update(cx, |state, _| {
+ let removed = lsp_button.server_state.update(cx, |state, _| {
let mut removed = false;
if let Some(active_editor) = state.active_editor.as_mut() {
for id in removed_buffer_ids {
@@ -930,7 +973,7 @@ impl StatusItemView for LspTool {
removed
});
if removed {
- lsp_tool.refresh_lsp_menu(false, window, cx);
+ lsp_button.refresh_lsp_menu(false, window, cx);
}
}
_ => {}
@@ -960,7 +1003,7 @@ impl StatusItemView for LspTool {
}
}
-impl Render for LspTool {
+impl Render for LspButton {
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl ui::IntoElement {
if self.server_state.read(cx).language_servers.is_empty() || self.lsp_menu.is_none() {
return div();
@@ -1005,11 +1048,11 @@ impl Render for LspTool {
(None, "All Servers Operational")
};
- let lsp_tool = cx.entity();
+ let lsp_button = cx.entity();
div().child(
PopoverMenu::new("lsp-tool")
- .menu(move |_, cx| lsp_tool.read(cx).lsp_menu.clone())
+ .menu(move |_, cx| lsp_button.read(cx).lsp_menu.clone())
.anchor(Corner::BottomLeft)
.with_handle(self.popover_menu_handle.clone())
.trigger_with_tooltip(
@@ -1,20 +1,25 @@
-use collections::{HashMap, VecDeque};
+use collections::VecDeque;
use copilot::Copilot;
use editor::{Editor, EditorEvent, actions::MoveToEnd, scroll::Autoscroll};
-use futures::{StreamExt, channel::mpsc};
use gpui::{
- AnyView, App, Context, Corner, Entity, EventEmitter, FocusHandle, Focusable, Global,
- IntoElement, ParentElement, Render, Styled, Subscription, WeakEntity, Window, actions, div,
+ AnyView, App, Context, Corner, Entity, EventEmitter, FocusHandle, Focusable, IntoElement,
+ ParentElement, Render, Styled, Subscription, WeakEntity, Window, actions, div,
};
use itertools::Itertools;
use language::{LanguageServerId, language_settings::SoftWrap};
use lsp::{
- IoKind, LanguageServer, LanguageServerName, LanguageServerSelector, MessageType,
+ LanguageServer, LanguageServerBinary, LanguageServerName, LanguageServerSelector, MessageType,
SetTraceParams, TraceValue, notification::SetTrace,
};
-use project::{Project, WorktreeId, search::SearchQuery};
+use project::{
+ Project,
+ lsp_store::log_store::{self, Event, LanguageServerKind, LogKind, LogStore, Message},
+ search::SearchQuery,
+};
+use proto::toggle_lsp_logs::LogType;
use std::{any::TypeId, borrow::Cow, sync::Arc};
use ui::{Button, Checkbox, ContextMenu, Label, PopoverMenu, ToggleState, prelude::*};
+use util::ResultExt as _;
use workspace::{
SplitDirection, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, WorkspaceId,
item::{Item, ItemHandle},
@@ -23,132 +28,53 @@ use workspace::{
use crate::get_or_create_tool;
-const SEND_LINE: &str = "\n// Send:";
-const RECEIVE_LINE: &str = "\n// Receive:";
-const MAX_STORED_LOG_ENTRIES: usize = 2000;
-
-pub struct LogStore {
- projects: HashMap<WeakEntity<Project>, ProjectState>,
- language_servers: HashMap<LanguageServerId, LanguageServerState>,
- copilot_log_subscription: Option<lsp::Subscription>,
- _copilot_subscription: Option<gpui::Subscription>,
- io_tx: mpsc::UnboundedSender<(LanguageServerId, IoKind, String)>,
-}
-
-struct ProjectState {
- _subscriptions: [gpui::Subscription; 2],
-}
-
-trait Message: AsRef<str> {
- type Level: Copy + std::fmt::Debug;
- fn should_include(&self, _: Self::Level) -> bool {
- true
- }
-}
-
-pub(super) struct LogMessage {
- message: String,
- typ: MessageType,
-}
-
-impl AsRef<str> for LogMessage {
- fn as_ref(&self) -> &str {
- &self.message
- }
-}
-
-impl Message for LogMessage {
- type Level = MessageType;
-
- fn should_include(&self, level: Self::Level) -> bool {
- match (self.typ, level) {
- (MessageType::ERROR, _) => true,
- (_, MessageType::ERROR) => false,
- (MessageType::WARNING, _) => true,
- (_, MessageType::WARNING) => false,
- (MessageType::INFO, _) => true,
- (_, MessageType::INFO) => false,
- _ => true,
- }
- }
-}
-
-pub(super) struct TraceMessage {
- message: String,
-}
-
-impl AsRef<str> for TraceMessage {
- fn as_ref(&self) -> &str {
- &self.message
- }
-}
-
-impl Message for TraceMessage {
- type Level = ();
-}
-
-struct RpcMessage {
- message: String,
-}
-
-impl AsRef<str> for RpcMessage {
- fn as_ref(&self) -> &str {
- &self.message
- }
-}
-
-impl Message for RpcMessage {
- type Level = ();
-}
-
-pub(super) struct LanguageServerState {
- name: Option<LanguageServerName>,
- worktree_id: Option<WorktreeId>,
- kind: LanguageServerKind,
- log_messages: VecDeque<LogMessage>,
- trace_messages: VecDeque<TraceMessage>,
- rpc_state: Option<LanguageServerRpcState>,
- trace_level: TraceValue,
- log_level: MessageType,
- io_logs_subscription: Option<lsp::Subscription>,
-}
-
-#[derive(PartialEq, Clone)]
-pub enum LanguageServerKind {
- Local { project: WeakEntity<Project> },
- Remote { project: WeakEntity<Project> },
- Global,
-}
-
-impl LanguageServerKind {
- fn is_remote(&self) -> bool {
- matches!(self, LanguageServerKind::Remote { .. })
- }
-}
-
-impl std::fmt::Debug for LanguageServerKind {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- match self {
- LanguageServerKind::Local { .. } => write!(f, "LanguageServerKind::Local"),
- LanguageServerKind::Remote { .. } => write!(f, "LanguageServerKind::Remote"),
- LanguageServerKind::Global => write!(f, "LanguageServerKind::Global"),
- }
- }
-}
-
-impl LanguageServerKind {
- fn project(&self) -> Option<&WeakEntity<Project>> {
- match self {
- Self::Local { project } => Some(project),
- Self::Remote { project } => Some(project),
- Self::Global { .. } => None,
- }
- }
-}
-
-struct LanguageServerRpcState {
- rpc_messages: VecDeque<RpcMessage>,
- last_message_kind: Option<MessageKind>,
+pub fn open_server_trace(
+ log_store: &Entity<LogStore>,
+ workspace: WeakEntity<Workspace>,
+ server: LanguageServerSelector,
+ window: &mut Window,
+ cx: &mut App,
+) {
+ log_store.update(cx, |_, cx| {
+ cx.spawn_in(window, async move |log_store, cx| {
+ let Some(log_store) = log_store.upgrade() else {
+ return;
+ };
+ workspace
+ .update_in(cx, |workspace, window, cx| {
+ let project = workspace.project().clone();
+ let tool_log_store = log_store.clone();
+ let log_view = get_or_create_tool(
+ workspace,
+ SplitDirection::Right,
+ window,
+ cx,
+ move |window, cx| LspLogView::new(project, tool_log_store, window, cx),
+ );
+ log_view.update(cx, |log_view, cx| {
+ let server_id = match server {
+ LanguageServerSelector::Id(id) => Some(id),
+ LanguageServerSelector::Name(name) => {
+ log_store.read(cx).language_servers.iter().find_map(
+ |(id, state)| {
+ if state.name.as_ref() == Some(&name) {
+ Some(*id)
+ } else {
+ None
+ }
+ },
+ )
+ }
+ };
+ if let Some(server_id) = server_id {
+ log_view.show_rpc_trace_for_server(server_id, window, cx);
+ }
+ });
+ })
+ .ok();
+ })
+ .detach();
+ })
}
pub struct LspLogView {
@@ -167,32 +93,6 @@ pub struct LspLogToolbarItemView {
_log_view_subscription: Option<Subscription>,
}
-#[derive(Copy, Clone, PartialEq, Eq)]
-enum MessageKind {
- Send,
- Receive,
-}
-
-#[derive(Clone, Copy, Debug, Default, PartialEq)]
-pub enum LogKind {
- Rpc,
- Trace,
- #[default]
- Logs,
- ServerInfo,
-}
-
-impl LogKind {
- fn label(&self) -> &'static str {
- match self {
- LogKind::Rpc => RPC_MESSAGES,
- LogKind::Trace => SERVER_TRACE,
- LogKind::Logs => SERVER_LOGS,
- LogKind::ServerInfo => SERVER_INFO,
- }
- }
-}
-
#[derive(Clone, Debug, PartialEq)]
pub(crate) struct LogMenuItem {
pub server_id: LanguageServerId,
@@ -212,59 +112,24 @@ actions!(
]
);
-pub(super) struct GlobalLogStore(pub WeakEntity<LogStore>);
-
-impl Global for GlobalLogStore {}
-
-pub fn init(cx: &mut App) {
- let log_store = cx.new(LogStore::new);
- cx.set_global(GlobalLogStore(log_store.downgrade()));
+pub fn init(on_headless_host: bool, cx: &mut App) {
+ let log_store = log_store::init(on_headless_host, cx);
- cx.observe_new(move |workspace: &mut Workspace, _, cx| {
- let project = workspace.project();
- if project.read(cx).is_local() || project.read(cx).is_via_ssh() {
- log_store.update(cx, |store, cx| {
- store.add_project(project, cx);
- });
- }
-
- let log_store = log_store.clone();
- workspace.register_action(move |workspace, _: &OpenLanguageServerLogs, window, cx| {
- let project = workspace.project().read(cx);
- if project.is_local() || project.is_via_ssh() {
- let project = workspace.project().clone();
- let log_store = log_store.clone();
- get_or_create_tool(
- workspace,
- SplitDirection::Right,
- window,
- cx,
- move |window, cx| LspLogView::new(project, log_store, window, cx),
- );
- }
- });
- })
- .detach();
-}
-
-impl LogStore {
- pub fn new(cx: &mut Context<Self>) -> Self {
- let (io_tx, mut io_rx) = mpsc::unbounded();
-
- let copilot_subscription = Copilot::global(cx).map(|copilot| {
+ log_store.update(cx, |_, cx| {
+ Copilot::global(cx).map(|copilot| {
let copilot = &copilot;
- cx.subscribe(copilot, |this, copilot, edit_prediction_event, cx| {
+ cx.subscribe(copilot, |log_store, copilot, edit_prediction_event, cx| {
if let copilot::Event::CopilotLanguageServerStarted = edit_prediction_event
&& let Some(server) = copilot.read(cx).language_server()
{
let server_id = server.server_id();
- let weak_this = cx.weak_entity();
- this.copilot_log_subscription =
+ let weak_lsp_store = cx.weak_entity();
+ log_store.copilot_log_subscription =
Some(server.on_notification::<copilot::request::LogMessage, _>(
move |params, cx| {
- weak_this
- .update(cx, |this, cx| {
- this.add_language_server_log(
+ weak_lsp_store
+ .update(cx, |lsp_store, cx| {
+ lsp_store.add_language_server_log(
server_id,
MessageType::LOG,
¶ms.message,
@@ -274,8 +139,9 @@ impl LogStore {
.ok();
},
));
+
let name = LanguageServerName::new_static("copilot");
- this.add_language_server(
+ log_store.add_language_server(
LanguageServerKind::Global,
server.server_id(),
Some(name),
@@ -285,432 +151,29 @@ impl LogStore {
);
}
})
- });
-
- let this = Self {
- copilot_log_subscription: None,
- _copilot_subscription: copilot_subscription,
- projects: HashMap::default(),
- language_servers: HashMap::default(),
- io_tx,
- };
-
- cx.spawn(async move |this, cx| {
- while let Some((server_id, io_kind, message)) = io_rx.next().await {
- if let Some(this) = this.upgrade() {
- this.update(cx, |this, cx| {
- this.on_io(server_id, io_kind, &message, cx);
- })?;
- }
- }
- anyhow::Ok(())
+ .detach();
})
- .detach_and_log_err(cx);
- this
- }
+ });
- pub fn add_project(&mut self, project: &Entity<Project>, cx: &mut Context<Self>) {
- let weak_project = project.downgrade();
- self.projects.insert(
- project.downgrade(),
- ProjectState {
- _subscriptions: [
- cx.observe_release(project, move |this, _, _| {
- this.projects.remove(&weak_project);
- this.language_servers
- .retain(|_, state| state.kind.project() != Some(&weak_project));
- }),
- cx.subscribe(project, |this, project, event, cx| {
- let server_kind = if project.read(cx).is_via_ssh() {
- LanguageServerKind::Remote {
- project: project.downgrade(),
- }
- } else {
- LanguageServerKind::Local {
- project: project.downgrade(),
- }
- };
-
- match event {
- project::Event::LanguageServerAdded(id, name, worktree_id) => {
- this.add_language_server(
- server_kind,
- *id,
- Some(name.clone()),
- *worktree_id,
- project
- .read(cx)
- .lsp_store()
- .read(cx)
- .language_server_for_id(*id),
- cx,
- );
- }
- project::Event::LanguageServerRemoved(id) => {
- this.remove_language_server(*id, cx);
- }
- project::Event::LanguageServerLog(id, typ, message) => {
- this.add_language_server(server_kind, *id, None, None, None, cx);
- match typ {
- project::LanguageServerLogType::Log(typ) => {
- this.add_language_server_log(*id, *typ, message, cx);
- }
- project::LanguageServerLogType::Trace(_) => {
- this.add_language_server_trace(*id, message, cx);
- }
- }
- }
- _ => {}
- }
- }),
- ],
- },
- );
- }
-
- pub(super) fn get_language_server_state(
- &mut self,
- id: LanguageServerId,
- ) -> Option<&mut LanguageServerState> {
- self.language_servers.get_mut(&id)
- }
-
- fn add_language_server(
- &mut self,
- kind: LanguageServerKind,
- server_id: LanguageServerId,
- name: Option<LanguageServerName>,
- worktree_id: Option<WorktreeId>,
- server: Option<Arc<LanguageServer>>,
- cx: &mut Context<Self>,
- ) -> Option<&mut LanguageServerState> {
- let server_state = self.language_servers.entry(server_id).or_insert_with(|| {
- cx.notify();
- LanguageServerState {
- name: None,
- worktree_id: None,
- kind,
- rpc_state: None,
- log_messages: VecDeque::with_capacity(MAX_STORED_LOG_ENTRIES),
- trace_messages: VecDeque::with_capacity(MAX_STORED_LOG_ENTRIES),
- trace_level: TraceValue::Off,
- log_level: MessageType::LOG,
- io_logs_subscription: None,
- }
+ cx.observe_new(move |workspace: &mut Workspace, _, cx| {
+ log_store.update(cx, |store, cx| {
+ store.add_project(workspace.project(), cx);
});
- if let Some(name) = name {
- server_state.name = Some(name);
- }
- if let Some(worktree_id) = worktree_id {
- server_state.worktree_id = Some(worktree_id);
- }
-
- if let Some(server) = server
- .clone()
- .filter(|_| server_state.io_logs_subscription.is_none())
- {
- let io_tx = self.io_tx.clone();
- let server_id = server.server_id();
- server_state.io_logs_subscription = Some(server.on_io(move |io_kind, message| {
- io_tx
- .unbounded_send((server_id, io_kind, message.to_string()))
- .ok();
- }));
- }
-
- Some(server_state)
- }
-
- fn add_language_server_log(
- &mut self,
- id: LanguageServerId,
- typ: MessageType,
- message: &str,
- cx: &mut Context<Self>,
- ) -> Option<()> {
- let language_server_state = self.get_language_server_state(id)?;
-
- let log_lines = &mut language_server_state.log_messages;
- Self::add_language_server_message(
- log_lines,
- id,
- LogMessage {
- message: message.trim_end().to_string(),
- typ,
- },
- language_server_state.log_level,
- LogKind::Logs,
- cx,
- );
- Some(())
- }
-
- fn add_language_server_trace(
- &mut self,
- id: LanguageServerId,
- message: &str,
- cx: &mut Context<Self>,
- ) -> Option<()> {
- let language_server_state = self.get_language_server_state(id)?;
-
- let log_lines = &mut language_server_state.trace_messages;
- Self::add_language_server_message(
- log_lines,
- id,
- TraceMessage {
- message: message.trim().to_string(),
- },
- (),
- LogKind::Trace,
- cx,
- );
- Some(())
- }
-
- fn add_language_server_message<T: Message>(
- log_lines: &mut VecDeque<T>,
- id: LanguageServerId,
- message: T,
- current_severity: <T as Message>::Level,
- kind: LogKind,
- cx: &mut Context<Self>,
- ) {
- while log_lines.len() + 1 >= MAX_STORED_LOG_ENTRIES {
- log_lines.pop_front();
- }
- let text = message.as_ref().to_string();
- let visible = message.should_include(current_severity);
- log_lines.push_back(message);
-
- if visible {
- cx.emit(Event::NewServerLogEntry { id, kind, text });
- cx.notify();
- }
- }
-
- fn remove_language_server(&mut self, id: LanguageServerId, cx: &mut Context<Self>) {
- self.language_servers.remove(&id);
- cx.notify();
- }
-
- pub(super) fn server_logs(&self, server_id: LanguageServerId) -> Option<&VecDeque<LogMessage>> {
- Some(&self.language_servers.get(&server_id)?.log_messages)
- }
-
- pub(super) fn server_trace(
- &self,
- server_id: LanguageServerId,
- ) -> Option<&VecDeque<TraceMessage>> {
- Some(&self.language_servers.get(&server_id)?.trace_messages)
- }
-
- fn server_ids_for_project<'a>(
- &'a self,
- lookup_project: &'a WeakEntity<Project>,
- ) -> impl Iterator<Item = LanguageServerId> + 'a {
- self.language_servers
- .iter()
- .filter_map(move |(id, state)| match &state.kind {
- LanguageServerKind::Local { project } | LanguageServerKind::Remote { project } => {
- if project == lookup_project {
- Some(*id)
- } else {
- None
- }
- }
- LanguageServerKind::Global => Some(*id),
- })
- }
-
- fn enable_rpc_trace_for_language_server(
- &mut self,
- server_id: LanguageServerId,
- ) -> Option<&mut LanguageServerRpcState> {
- let rpc_state = self
- .language_servers
- .get_mut(&server_id)?
- .rpc_state
- .get_or_insert_with(|| LanguageServerRpcState {
- rpc_messages: VecDeque::with_capacity(MAX_STORED_LOG_ENTRIES),
- last_message_kind: None,
- });
- Some(rpc_state)
- }
-
- pub fn disable_rpc_trace_for_language_server(
- &mut self,
- server_id: LanguageServerId,
- ) -> Option<()> {
- self.language_servers.get_mut(&server_id)?.rpc_state.take();
- Some(())
- }
-
- pub fn has_server_logs(&self, server: &LanguageServerSelector) -> bool {
- match server {
- LanguageServerSelector::Id(id) => self.language_servers.contains_key(id),
- LanguageServerSelector::Name(name) => self
- .language_servers
- .iter()
- .any(|(_, state)| state.name.as_ref() == Some(name)),
- }
- }
-
- pub fn open_server_log(
- &mut self,
- workspace: WeakEntity<Workspace>,
- server: LanguageServerSelector,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- cx.spawn_in(window, async move |log_store, cx| {
- let Some(log_store) = log_store.upgrade() else {
- return;
- };
- workspace
- .update_in(cx, |workspace, window, cx| {
- let project = workspace.project().clone();
- let tool_log_store = log_store.clone();
- let log_view = get_or_create_tool(
- workspace,
- SplitDirection::Right,
- window,
- cx,
- move |window, cx| LspLogView::new(project, tool_log_store, window, cx),
- );
- log_view.update(cx, |log_view, cx| {
- let server_id = match server {
- LanguageServerSelector::Id(id) => Some(id),
- LanguageServerSelector::Name(name) => {
- log_store.read(cx).language_servers.iter().find_map(
- |(id, state)| {
- if state.name.as_ref() == Some(&name) {
- Some(*id)
- } else {
- None
- }
- },
- )
- }
- };
- if let Some(server_id) = server_id {
- log_view.show_logs_for_server(server_id, window, cx);
- }
- });
- })
- .ok();
- })
- .detach();
- }
-
- pub fn open_server_trace(
- &mut self,
- workspace: WeakEntity<Workspace>,
- server: LanguageServerSelector,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- cx.spawn_in(window, async move |log_store, cx| {
- let Some(log_store) = log_store.upgrade() else {
- return;
- };
- workspace
- .update_in(cx, |workspace, window, cx| {
- let project = workspace.project().clone();
- let tool_log_store = log_store.clone();
- let log_view = get_or_create_tool(
- workspace,
- SplitDirection::Right,
- window,
- cx,
- move |window, cx| LspLogView::new(project, tool_log_store, window, cx),
- );
- log_view.update(cx, |log_view, cx| {
- let server_id = match server {
- LanguageServerSelector::Id(id) => Some(id),
- LanguageServerSelector::Name(name) => {
- log_store.read(cx).language_servers.iter().find_map(
- |(id, state)| {
- if state.name.as_ref() == Some(&name) {
- Some(*id)
- } else {
- None
- }
- },
- )
- }
- };
- if let Some(server_id) = server_id {
- log_view.show_rpc_trace_for_server(server_id, window, cx);
- }
- });
- })
- .ok();
- })
- .detach();
- }
-
- fn on_io(
- &mut self,
- language_server_id: LanguageServerId,
- io_kind: IoKind,
- message: &str,
- cx: &mut Context<Self>,
- ) -> Option<()> {
- let is_received = match io_kind {
- IoKind::StdOut => true,
- IoKind::StdIn => false,
- IoKind::StdErr => {
- self.add_language_server_log(language_server_id, MessageType::LOG, message, cx);
- return Some(());
- }
- };
-
- let state = self
- .get_language_server_state(language_server_id)?
- .rpc_state
- .as_mut()?;
- let kind = if is_received {
- MessageKind::Receive
- } else {
- MessageKind::Send
- };
-
- let rpc_log_lines = &mut state.rpc_messages;
- if state.last_message_kind != Some(kind) {
- while rpc_log_lines.len() + 1 >= MAX_STORED_LOG_ENTRIES {
- rpc_log_lines.pop_front();
- }
- let line_before_message = match kind {
- MessageKind::Send => SEND_LINE,
- MessageKind::Receive => RECEIVE_LINE,
- };
- rpc_log_lines.push_back(RpcMessage {
- message: line_before_message.to_string(),
- });
- cx.emit(Event::NewServerLogEntry {
- id: language_server_id,
- kind: LogKind::Rpc,
- text: line_before_message.to_string(),
- });
- }
-
- while rpc_log_lines.len() + 1 >= MAX_STORED_LOG_ENTRIES {
- rpc_log_lines.pop_front();
- }
-
- let message = message.trim();
- rpc_log_lines.push_back(RpcMessage {
- message: message.to_string(),
- });
- cx.emit(Event::NewServerLogEntry {
- id: language_server_id,
- kind: LogKind::Rpc,
- text: message.to_string(),
+ let log_store = log_store.clone();
+ workspace.register_action(move |workspace, _: &OpenLanguageServerLogs, window, cx| {
+ let log_store = log_store.clone();
+ let project = workspace.project().clone();
+ get_or_create_tool(
+ workspace,
+ SplitDirection::Right,
+ window,
+ cx,
+ move |window, cx| LspLogView::new(project, log_store, window, cx),
+ );
});
- cx.notify();
- Some(())
- }
+ })
+ .detach();
}
impl LspLogView {
@@ -754,13 +217,14 @@ impl LspLogView {
cx.notify();
});
+
let events_subscriptions = cx.subscribe_in(
&log_store,
window,
move |log_view, _, e, window, cx| match e {
Event::NewServerLogEntry { id, kind, text } => {
if log_view.current_server_id == Some(*id)
- && *kind == log_view.active_entry_kind
+ && LogKind::from_server_log_type(kind) == log_view.active_entry_kind
{
log_view.editor.update(cx, |editor, cx| {
editor.set_read_only(false);
@@ -803,7 +267,20 @@ impl LspLogView {
window.focus(&log_view.editor.focus_handle(cx));
});
- let mut this = Self {
+ cx.on_release(|log_view, cx| {
+ log_view.log_store.update(cx, |log_store, cx| {
+ for (server_id, state) in &log_store.language_servers {
+ if let Some(log_kind) = state.toggled_log_kind {
+ if let Some(log_type) = log_type(log_kind) {
+ send_toggle_log_message(state, *server_id, false, log_type, cx);
+ }
+ }
+ }
+ });
+ })
+ .detach();
+
+ let mut lsp_log_view = Self {
focus_handle,
editor,
editor_subscriptions,
@@ -818,9 +295,9 @@ impl LspLogView {
],
};
if let Some(server_id) = server_id {
- this.show_logs_for_server(server_id, window, cx);
+ lsp_log_view.show_logs_for_server(server_id, window, cx);
}
- this
+ lsp_log_view
}
fn editor_for_logs(
@@ -841,14 +318,14 @@ impl LspLogView {
}
fn editor_for_server_info(
- server: &LanguageServer,
+ info: ServerInfo,
window: &mut Window,
cx: &mut Context<Self>,
) -> (Entity<Editor>, Vec<Subscription>) {
let server_info = format!(
"* Server: {NAME} (id {ID})
-* Binary: {BINARY:#?}
+* Binary: {BINARY}
* Registered workspace folders:
{WORKSPACE_FOLDERS}
@@ -856,22 +333,21 @@ impl LspLogView {
* Capabilities: {CAPABILITIES}
* Configuration: {CONFIGURATION}",
- NAME = server.name(),
- ID = server.server_id(),
- BINARY = server.binary(),
- WORKSPACE_FOLDERS = server
- .workspace_folders()
- .into_iter()
- .filter_map(|path| path
- .to_file_path()
- .ok()
- .map(|path| path.to_string_lossy().into_owned()))
- .collect::<Vec<_>>()
- .join(", "),
- CAPABILITIES = serde_json::to_string_pretty(&server.capabilities())
+ NAME = info.name,
+ ID = info.id,
+ BINARY = info
+ .binary
+ .as_ref()
+ .map_or_else(|| "Unknown".to_string(), |binary| format!("{binary:#?}")),
+ WORKSPACE_FOLDERS = info.workspace_folders.join(", "),
+ CAPABILITIES = serde_json::to_string_pretty(&info.capabilities)
.unwrap_or_else(|e| format!("Failed to serialize capabilities: {e}")),
- CONFIGURATION = serde_json::to_string_pretty(server.configuration())
- .unwrap_or_else(|e| format!("Failed to serialize configuration: {e}")),
+ CONFIGURATION = info
+ .configuration
+ .map(|configuration| serde_json::to_string_pretty(&configuration))
+ .transpose()
+ .unwrap_or_else(|e| Some(format!("Failed to serialize configuration: {e}")))
+ .unwrap_or_else(|| "Unknown".to_string()),
);
let editor = initialize_new_editor(server_info, false, window, cx);
let editor_subscription = cx.subscribe(
@@ -894,7 +370,9 @@ impl LspLogView {
.language_servers
.iter()
.map(|(server_id, state)| match &state.kind {
- LanguageServerKind::Local { .. } | LanguageServerKind::Remote { .. } => {
+ LanguageServerKind::Local { .. }
+ | LanguageServerKind::Remote { .. }
+ | LanguageServerKind::LocalSsh { .. } => {
let worktree_root_name = state
.worktree_id
.and_then(|id| self.project.read(cx).worktree_for_id(id, cx))
@@ -930,7 +408,7 @@ impl LspLogView {
let state = log_store.language_servers.get(&server_id)?;
Some(LogMenuItem {
server_id,
- server_name: name.clone(),
+ server_name: name,
server_kind: state.kind.clone(),
worktree_root_name: "supplementary".to_string(),
rpc_trace_enabled: state.rpc_state.is_some(),
@@ -972,6 +450,12 @@ impl LspLogView {
cx.notify();
}
self.editor.read(cx).focus_handle(cx).focus(window);
+ self.log_store.update(cx, |log_store, cx| {
+ let state = log_store.get_language_server_state(server_id)?;
+ state.toggled_log_kind = Some(LogKind::Logs);
+ send_toggle_log_message(state, server_id, true, LogType::Log, cx);
+ Some(())
+ });
}
fn update_log_level(
@@ -1006,17 +490,29 @@ impl LspLogView {
window: &mut Window,
cx: &mut Context<Self>,
) {
+ let trace_level = self
+ .log_store
+ .update(cx, |log_store, _| {
+ Some(log_store.get_language_server_state(server_id)?.trace_level)
+ })
+ .unwrap_or(TraceValue::Messages);
let log_contents = self
.log_store
.read(cx)
.server_trace(server_id)
- .map(|v| log_contents(v, ()));
+ .map(|v| log_contents(v, trace_level));
if let Some(log_contents) = log_contents {
self.current_server_id = Some(server_id);
self.active_entry_kind = LogKind::Trace;
let (editor, editor_subscriptions) = Self::editor_for_logs(log_contents, window, cx);
self.editor = editor;
self.editor_subscriptions = editor_subscriptions;
+ self.log_store.update(cx, |log_store, cx| {
+ let state = log_store.get_language_server_state(server_id)?;
+ state.toggled_log_kind = Some(LogKind::Trace);
+ send_toggle_log_message(state, server_id, true, LogType::Trace, cx);
+ Some(())
+ });
cx.notify();
}
self.editor.read(cx).focus_handle(cx).focus(window);
@@ -1028,6 +524,7 @@ impl LspLogView {
window: &mut Window,
cx: &mut Context<Self>,
) {
+ self.toggle_rpc_trace_for_server(server_id, true, window, cx);
let rpc_log = self.log_store.update(cx, |log_store, _| {
log_store
.enable_rpc_trace_for_language_server(server_id)
@@ -1072,12 +569,16 @@ impl LspLogView {
window: &mut Window,
cx: &mut Context<Self>,
) {
- self.log_store.update(cx, |log_store, _| {
+ self.log_store.update(cx, |log_store, cx| {
if enabled {
log_store.enable_rpc_trace_for_language_server(server_id);
} else {
log_store.disable_rpc_trace_for_language_server(server_id);
}
+
+ if let Some(server_state) = log_store.language_servers.get(&server_id) {
+ send_toggle_log_message(server_state, server_id, enabled, LogType::Rpc, cx);
+ };
});
if !enabled && Some(server_id) == self.current_server_id {
self.show_logs_for_server(server_id, window, cx);
@@ -1116,17 +617,85 @@ impl LspLogView {
window: &mut Window,
cx: &mut Context<Self>,
) {
- let lsp_store = self.project.read(cx).lsp_store();
- let Some(server) = lsp_store.read(cx).language_server_for_id(server_id) else {
+ let Some(server_info) = self
+ .project
+ .read(cx)
+ .lsp_store()
+ .update(cx, |lsp_store, _| {
+ lsp_store
+ .language_server_for_id(server_id)
+ .as_ref()
+ .map(|language_server| ServerInfo::new(language_server))
+ .or_else(move || {
+ let capabilities =
+ lsp_store.lsp_server_capabilities.get(&server_id)?.clone();
+ let name = lsp_store
+ .language_server_statuses
+ .get(&server_id)
+ .map(|status| status.name.clone())?;
+ Some(ServerInfo {
+ id: server_id,
+ capabilities,
+ binary: None,
+ name,
+ workspace_folders: Vec::new(),
+ configuration: None,
+ })
+ })
+ })
+ else {
return;
};
self.current_server_id = Some(server_id);
self.active_entry_kind = LogKind::ServerInfo;
- let (editor, editor_subscriptions) = Self::editor_for_server_info(&server, window, cx);
+ let (editor, editor_subscriptions) = Self::editor_for_server_info(server_info, window, cx);
self.editor = editor;
self.editor_subscriptions = editor_subscriptions;
cx.notify();
self.editor.read(cx).focus_handle(cx).focus(window);
+ self.log_store.update(cx, |log_store, cx| {
+ let state = log_store.get_language_server_state(server_id)?;
+ if let Some(log_kind) = state.toggled_log_kind.take() {
+ if let Some(log_type) = log_type(log_kind) {
+ send_toggle_log_message(state, server_id, false, log_type, cx);
+ }
+ };
+ Some(())
+ });
+ }
+}
+
+fn log_type(log_kind: LogKind) -> Option<LogType> {
+ match log_kind {
+ LogKind::Rpc => Some(LogType::Rpc),
+ LogKind::Trace => Some(LogType::Trace),
+ LogKind::Logs => Some(LogType::Log),
+ LogKind::ServerInfo => None,
+ }
+}
+
+fn send_toggle_log_message(
+ server_state: &log_store::LanguageServerState,
+ server_id: LanguageServerId,
+ enabled: bool,
+ log_type: LogType,
+ cx: &mut App,
+) {
+ if let LanguageServerKind::Remote { project } = &server_state.kind {
+ project
+ .update(cx, |project, cx| {
+ if let Some((client, project_id)) = project.lsp_store().read(cx).upstream_client() {
+ client
+ .send(proto::ToggleLspLogs {
+ project_id,
+ log_type: log_type as i32,
+ server_id: server_id.to_proto(),
+ enabled,
+ })
+ .log_err();
+ }
+ })
+ .ok();
}
}
@@ -1,20 +1,22 @@
use std::sync::Arc;
-use crate::lsp_log::LogMenuItem;
+use crate::lsp_log_view::LogMenuItem;
use super::*;
use futures::StreamExt;
use gpui::{AppContext as _, SemanticVersion, TestAppContext, VisualTestContext};
use language::{FakeLspAdapter, Language, LanguageConfig, LanguageMatcher, tree_sitter_rust};
use lsp::LanguageServerName;
-use lsp_log::LogKind;
-use project::{FakeFs, Project};
+use project::{
+ FakeFs, Project,
+ lsp_store::log_store::{LanguageServerKind, LogKind, LogStore},
+};
use serde_json::json;
use settings::SettingsStore;
use util::path;
#[gpui::test]
-async fn test_lsp_logs(cx: &mut TestAppContext) {
+async fn test_lsp_log_view(cx: &mut TestAppContext) {
zlog::init_test();
init_test(cx);
@@ -51,7 +53,7 @@ async fn test_lsp_logs(cx: &mut TestAppContext) {
},
);
- let log_store = cx.new(LogStore::new);
+ let log_store = cx.new(|cx| LogStore::new(false, cx));
log_store.update(cx, |store, cx| store.add_project(&project, cx));
let _rust_buffer = project
@@ -94,7 +96,7 @@ async fn test_lsp_logs(cx: &mut TestAppContext) {
rpc_trace_enabled: false,
selected_entry: LogKind::Logs,
trace_level: lsp::TraceValue::Off,
- server_kind: lsp_log::LanguageServerKind::Local {
+ server_kind: LanguageServerKind::Local {
project: project.downgrade()
}
}]
@@ -1,17 +1,22 @@
+use command_palette_hooks::CommandPaletteFilter;
use editor::{Anchor, Editor, ExcerptId, SelectionEffects, scroll::Autoscroll};
use gpui::{
- App, AppContext as _, Context, Div, Entity, EventEmitter, FocusHandle, Focusable, Hsla,
- InteractiveElement, IntoElement, MouseButton, MouseDownEvent, MouseMoveEvent, ParentElement,
- Render, ScrollStrategy, SharedString, Styled, UniformListScrollHandle, WeakEntity, Window,
- actions, div, rems, uniform_list,
+ App, AppContext as _, Context, Div, Entity, EntityId, EventEmitter, FocusHandle, Focusable,
+ Hsla, InteractiveElement, IntoElement, MouseButton, MouseDownEvent, MouseMoveEvent,
+ ParentElement, Render, ScrollStrategy, SharedString, Styled, UniformListScrollHandle,
+ WeakEntity, Window, actions, div, rems, uniform_list,
};
use language::{Buffer, OwnedSyntaxLayer};
-use std::{mem, ops::Range};
+use std::{any::TypeId, mem, ops::Range};
use theme::ActiveTheme;
use tree_sitter::{Node, TreeCursor};
-use ui::{ButtonLike, Color, ContextMenu, Label, LabelCommon, PopoverMenu, h_flex};
+use ui::{
+ ButtonCommon, ButtonLike, Clickable, Color, ContextMenu, FluentBuilder as _, IconButton,
+ IconName, Label, LabelCommon, LabelSize, PopoverMenu, StyledExt, Tooltip, h_flex, v_flex,
+};
use workspace::{
- SplitDirection, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace,
+ Event as WorkspaceEvent, SplitDirection, ToolbarItemEvent, ToolbarItemLocation,
+ ToolbarItemView, Workspace,
item::{Item, ItemHandle},
};
@@ -19,17 +24,51 @@ actions!(
dev,
[
/// Opens the syntax tree view for the current file.
- OpenSyntaxTreeView
+ OpenSyntaxTreeView,
+ ]
+);
+
+actions!(
+ syntax_tree_view,
+ [
+ /// Update the syntax tree view to show the last focused file.
+ UseActiveEditor
]
);
pub fn init(cx: &mut App) {
- cx.observe_new(|workspace: &mut Workspace, _, _| {
- workspace.register_action(|workspace, _: &OpenSyntaxTreeView, window, cx| {
+ let syntax_tree_actions = [TypeId::of::<UseActiveEditor>()];
+
+ CommandPaletteFilter::update_global(cx, |this, _| {
+ this.hide_action_types(&syntax_tree_actions);
+ });
+
+ cx.observe_new(move |workspace: &mut Workspace, _, _| {
+ workspace.register_action(move |workspace, _: &OpenSyntaxTreeView, window, cx| {
+ CommandPaletteFilter::update_global(cx, |this, _| {
+ this.show_action_types(&syntax_tree_actions);
+ });
+
let active_item = workspace.active_item(cx);
let workspace_handle = workspace.weak_handle();
- let syntax_tree_view =
- cx.new(|cx| SyntaxTreeView::new(workspace_handle, active_item, window, cx));
+ let syntax_tree_view = cx.new(|cx| {
+ cx.on_release(move |view: &mut SyntaxTreeView, cx| {
+ if view
+ .workspace_handle
+ .read_with(cx, |workspace, cx| {
+ workspace.item_of_type::<SyntaxTreeView>(cx).is_none()
+ })
+ .unwrap_or_default()
+ {
+ CommandPaletteFilter::update_global(cx, |this, _| {
+ this.hide_action_types(&syntax_tree_actions);
+ });
+ }
+ })
+ .detach();
+
+ SyntaxTreeView::new(workspace_handle, active_item, window, cx)
+ });
workspace.split_item(
SplitDirection::Right,
Box::new(syntax_tree_view),
@@ -37,6 +76,13 @@ pub fn init(cx: &mut App) {
cx,
)
});
+ workspace.register_action(|workspace, _: &UseActiveEditor, window, cx| {
+ if let Some(tree_view) = workspace.item_of_type::<SyntaxTreeView>(cx) {
+ tree_view.update(cx, |view, cx| {
+ view.update_active_editor(&Default::default(), window, cx)
+ })
+ }
+ });
})
.detach();
}
@@ -45,6 +91,9 @@ pub struct SyntaxTreeView {
workspace_handle: WeakEntity<Workspace>,
editor: Option<EditorState>,
list_scroll_handle: UniformListScrollHandle,
+ /// The last active editor in the workspace. Note that this is specifically not the
+ /// currently shown editor.
+ last_active_editor: Option<Entity<Editor>>,
selected_descendant_ix: Option<usize>,
hovered_descendant_ix: Option<usize>,
focus_handle: FocusHandle,
@@ -61,6 +110,14 @@ struct EditorState {
_subscription: gpui::Subscription,
}
+impl EditorState {
+ fn has_language(&self) -> bool {
+ self.active_buffer
+ .as_ref()
+ .is_some_and(|buffer| buffer.active_layer.is_some())
+ }
+}
+
#[derive(Clone)]
struct BufferState {
buffer: Entity<Buffer>,
@@ -79,17 +136,25 @@ impl SyntaxTreeView {
workspace_handle: workspace_handle.clone(),
list_scroll_handle: UniformListScrollHandle::new(),
editor: None,
+ last_active_editor: None,
hovered_descendant_ix: None,
selected_descendant_ix: None,
focus_handle: cx.focus_handle(),
};
- this.workspace_updated(active_item, window, cx);
- cx.observe_in(
+ this.handle_item_updated(active_item, window, cx);
+
+ cx.subscribe_in(
&workspace_handle.upgrade().unwrap(),
window,
- |this, workspace, window, cx| {
- this.workspace_updated(workspace.read(cx).active_item(cx), window, cx);
+ move |this, workspace, event, window, cx| match event {
+ WorkspaceEvent::ItemAdded { .. } | WorkspaceEvent::ActiveItemChanged => {
+ this.handle_item_updated(workspace.read(cx).active_item(cx), window, cx)
+ }
+ WorkspaceEvent::ItemRemoved { item_id } => {
+ this.handle_item_removed(item_id, window, cx);
+ }
+ _ => {}
},
)
.detach();
@@ -97,20 +162,56 @@ impl SyntaxTreeView {
this
}
- fn workspace_updated(
+ fn handle_item_updated(
&mut self,
active_item: Option<Box<dyn ItemHandle>>,
window: &mut Window,
cx: &mut Context<Self>,
) {
- if let Some(item) = active_item
- && item.item_id() != cx.entity_id()
- && let Some(editor) = item.act_as::<Editor>(cx)
- {
+ let Some(editor) = active_item
+ .filter(|item| item.item_id() != cx.entity_id())
+ .and_then(|item| item.act_as::<Editor>(cx))
+ else {
+ return;
+ };
+
+ if let Some(editor_state) = self.editor.as_ref().filter(|state| state.has_language()) {
+ self.last_active_editor = (editor_state.editor != editor).then_some(editor);
+ } else {
self.set_editor(editor, window, cx);
}
}
+ fn handle_item_removed(
+ &mut self,
+ item_id: &EntityId,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if self
+ .editor
+ .as_ref()
+ .is_some_and(|state| state.editor.entity_id() == *item_id)
+ {
+ self.editor = None;
+ // Try activating the last active editor if there is one
+ self.update_active_editor(&Default::default(), window, cx);
+ cx.notify();
+ }
+ }
+
+ fn update_active_editor(
+ &mut self,
+ _: &UseActiveEditor,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let Some(editor) = self.last_active_editor.take() else {
+ return;
+ };
+ self.set_editor(editor, window, cx);
+ }
+
fn set_editor(&mut self, editor: Entity<Editor>, window: &mut Window, cx: &mut Context<Self>) {
if let Some(state) = &self.editor {
if state.editor == editor {
@@ -156,7 +257,7 @@ impl SyntaxTreeView {
.buffer_snapshot
.range_to_buffer_ranges(selection_range)
.pop()?;
- let buffer = multi_buffer.buffer(buffer.remote_id()).unwrap().clone();
+ let buffer = multi_buffer.buffer(buffer.remote_id()).unwrap();
Some((buffer, range, excerpt_id))
})?;
@@ -294,101 +395,153 @@ impl SyntaxTreeView {
.pl(rems(depth as f32))
.hover(|style| style.bg(colors.element_hover))
}
-}
-
-impl Render for SyntaxTreeView {
- fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
- let mut rendered = div().flex_1().bg(cx.theme().colors().editor_background);
- if let Some(layer) = self
- .editor
- .as_ref()
- .and_then(|editor| editor.active_buffer.as_ref())
- .and_then(|buffer| buffer.active_layer.as_ref())
- {
- let layer = layer.clone();
- rendered = rendered.child(uniform_list(
- "SyntaxTreeView",
- layer.node().descendant_count(),
- cx.processor(move |this, range: Range<usize>, _, cx| {
- let mut items = Vec::new();
- let mut cursor = layer.node().walk();
- let mut descendant_ix = range.start;
- cursor.goto_descendant(descendant_ix);
- let mut depth = cursor.depth();
- let mut visited_children = false;
- while descendant_ix < range.end {
- if visited_children {
- if cursor.goto_next_sibling() {
- visited_children = false;
- } else if cursor.goto_parent() {
- depth -= 1;
- } else {
- break;
- }
- } else {
- items.push(
- Self::render_node(
- &cursor,
- depth,
- Some(descendant_ix) == this.selected_descendant_ix,
+ fn compute_items(
+ &mut self,
+ layer: &OwnedSyntaxLayer,
+ range: Range<usize>,
+ cx: &Context<Self>,
+ ) -> Vec<Div> {
+ let mut items = Vec::new();
+ let mut cursor = layer.node().walk();
+ let mut descendant_ix = range.start;
+ cursor.goto_descendant(descendant_ix);
+ let mut depth = cursor.depth();
+ let mut visited_children = false;
+ while descendant_ix < range.end {
+ if visited_children {
+ if cursor.goto_next_sibling() {
+ visited_children = false;
+ } else if cursor.goto_parent() {
+ depth -= 1;
+ } else {
+ break;
+ }
+ } else {
+ items.push(
+ Self::render_node(
+ &cursor,
+ depth,
+ Some(descendant_ix) == self.selected_descendant_ix,
+ cx,
+ )
+ .on_mouse_down(
+ MouseButton::Left,
+ cx.listener(move |tree_view, _: &MouseDownEvent, window, cx| {
+ tree_view.update_editor_with_range_for_descendant_ix(
+ descendant_ix,
+ window,
+ cx,
+ |editor, mut range, window, cx| {
+ // Put the cursor at the beginning of the node.
+ mem::swap(&mut range.start, &mut range.end);
+
+ editor.change_selections(
+ SelectionEffects::scroll(Autoscroll::newest()),
+ window,
+ cx,
+ |selections| {
+ selections.select_ranges(vec![range]);
+ },
+ );
+ },
+ );
+ }),
+ )
+ .on_mouse_move(cx.listener(
+ move |tree_view, _: &MouseMoveEvent, window, cx| {
+ if tree_view.hovered_descendant_ix != Some(descendant_ix) {
+ tree_view.hovered_descendant_ix = Some(descendant_ix);
+ tree_view.update_editor_with_range_for_descendant_ix(
+ descendant_ix,
+ window,
cx,
- )
- .on_mouse_down(
- MouseButton::Left,
- cx.listener(move |tree_view, _: &MouseDownEvent, window, cx| {
- tree_view.update_editor_with_range_for_descendant_ix(
- descendant_ix,
- window, cx,
- |editor, mut range, window, cx| {
- // Put the cursor at the beginning of the node.
- mem::swap(&mut range.start, &mut range.end);
-
- editor.change_selections(
- SelectionEffects::scroll(Autoscroll::newest()),
- window, cx,
- |selections| {
- selections.select_ranges(vec![range]);
- },
- );
+ |editor, range, _, cx| {
+ editor.clear_background_highlights::<Self>(cx);
+ editor.highlight_background::<Self>(
+ &[range],
+ |theme| {
+ theme
+ .colors()
+ .editor_document_highlight_write_background
},
+ cx,
);
- }),
- )
- .on_mouse_move(cx.listener(
- move |tree_view, _: &MouseMoveEvent, window, cx| {
- if tree_view.hovered_descendant_ix != Some(descendant_ix) {
- tree_view.hovered_descendant_ix = Some(descendant_ix);
- tree_view.update_editor_with_range_for_descendant_ix(descendant_ix, window, cx, |editor, range, _, cx| {
- editor.clear_background_highlights::<Self>( cx);
- editor.highlight_background::<Self>(
- &[range],
- |theme| theme.colors().editor_document_highlight_write_background,
- cx,
- );
- });
- cx.notify();
- }
},
- )),
- );
- descendant_ix += 1;
- if cursor.goto_first_child() {
- depth += 1;
- } else {
- visited_children = true;
+ );
+ cx.notify();
}
- }
- }
- items
- }),
- )
- .size_full()
- .track_scroll(self.list_scroll_handle.clone())
- .text_bg(cx.theme().colors().background).into_any_element());
+ },
+ )),
+ );
+ descendant_ix += 1;
+ if cursor.goto_first_child() {
+ depth += 1;
+ } else {
+ visited_children = true;
+ }
+ }
}
+ items
+ }
+}
- rendered
+impl Render for SyntaxTreeView {
+ fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ div()
+ .flex_1()
+ .bg(cx.theme().colors().editor_background)
+ .map(|this| {
+ let editor_state = self.editor.as_ref();
+
+ if let Some(layer) = editor_state
+ .and_then(|editor| editor.active_buffer.as_ref())
+ .and_then(|buffer| buffer.active_layer.as_ref())
+ {
+ let layer = layer.clone();
+ this.child(
+ uniform_list(
+ "SyntaxTreeView",
+ layer.node().descendant_count(),
+ cx.processor(move |this, range: Range<usize>, _, cx| {
+ this.compute_items(&layer, range, cx)
+ }),
+ )
+ .size_full()
+ .track_scroll(self.list_scroll_handle.clone())
+ .text_bg(cx.theme().colors().background)
+ .into_any_element(),
+ )
+ } else {
+ let inner_content = v_flex()
+ .items_center()
+ .text_center()
+ .gap_2()
+ .max_w_3_5()
+ .map(|this| {
+ if editor_state.is_some_and(|state| !state.has_language()) {
+ this.child(Label::new("Current editor has no associated language"))
+ .child(
+ Label::new(concat!(
+ "Try assigning a language or",
+ "switching to a different buffer"
+ ))
+ .size(LabelSize::Small),
+ )
+ } else {
+ this.child(Label::new("Not attached to an editor")).child(
+ Label::new("Focus an editor to show a new tree view")
+ .size(LabelSize::Small),
+ )
+ }
+ });
+
+ this.h_flex()
+ .size_full()
+ .justify_center()
+ .child(inner_content)
+ }
+ })
}
}
@@ -506,6 +659,26 @@ impl SyntaxTreeToolbarItemView {
.child(Label::new(active_layer.language.name()))
.child(Label::new(format_node_range(active_layer.node())))
}
+
+ fn render_update_button(&mut self, cx: &mut Context<Self>) -> Option<IconButton> {
+ self.tree_view.as_ref().and_then(|view| {
+ view.update(cx, |view, cx| {
+ view.last_active_editor.as_ref().map(|editor| {
+ IconButton::new("syntax-view-update", IconName::RotateCw)
+ .tooltip({
+ let active_tab_name = editor.read_with(cx, |editor, cx| {
+ editor.tab_content_text(Default::default(), cx)
+ });
+
+ Tooltip::text(format!("Update view to '{active_tab_name}'"))
+ })
+ .on_click(cx.listener(|this, _, window, cx| {
+ this.update_active_editor(&Default::default(), window, cx);
+ }))
+ })
+ })
+ })
+ }
}
fn format_node_range(node: Node) -> String {
@@ -522,8 +695,10 @@ fn format_node_range(node: Node) -> String {
impl Render for SyntaxTreeToolbarItemView {
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
- self.render_menu(cx)
- .unwrap_or_else(|| PopoverMenu::new("Empty Syntax Tree"))
+ h_flex()
+ .gap_1()
+ .children(self.render_menu(cx))
+ .children(self.render_update_button(cx))
}
}
@@ -42,7 +42,6 @@ async-trait.workspace = true
chrono.workspace = true
collections.workspace = true
dap.workspace = true
-feature_flags.workspace = true
futures.workspace = true
gpui.workspace = true
http_client.workspace = true
@@ -5,8 +5,9 @@ use gpui::{App, AsyncApp};
use http_client::github::{AssetKind, GitHubLspBinaryVersion, latest_github_release};
pub use language::*;
use lsp::{InitializeParams, LanguageServerBinary, LanguageServerName};
-use project::lsp_store::clangd_ext;
+use project::{lsp_store::clangd_ext, project_settings::ProjectSettings};
use serde_json::json;
+use settings::Settings as _;
use smol::fs;
use std::{any::Any, env::consts, path::PathBuf, sync::Arc};
use util::{ResultExt, fs::remove_matching, maybe, merge_json_value_into};
@@ -22,7 +23,7 @@ impl CLspAdapter {
#[async_trait(?Send)]
impl super::LspAdapter for CLspAdapter {
fn name(&self) -> LanguageServerName {
- Self::SERVER_NAME.clone()
+ Self::SERVER_NAME
}
async fn check_if_user_installed(
@@ -42,9 +43,19 @@ impl super::LspAdapter for CLspAdapter {
async fn fetch_latest_server_version(
&self,
delegate: &dyn LspAdapterDelegate,
+ cx: &AsyncApp,
) -> Result<Box<dyn 'static + Send + Any>> {
- let release =
- latest_github_release("clangd/clangd", true, false, delegate.http_client()).await?;
+ let release = latest_github_release(
+ "clangd/clangd",
+ true,
+ ProjectSettings::try_read_global(cx, |s| {
+ s.lsp.get(&Self::SERVER_NAME)?.fetch.as_ref()?.pre_release
+ })
+ .flatten()
+ .unwrap_or(false),
+ delegate.http_client(),
+ )
+ .await?;
let os_suffix = match consts::OS {
"macos" => "mac",
"linux" => "linux",
@@ -253,8 +264,7 @@ impl super::LspAdapter for CLspAdapter {
.grammar()
.and_then(|g| g.highlight_id_for_name(highlight_name?))
{
- let mut label =
- CodeLabel::plain(label.to_string(), completion.filter_text.as_deref());
+ let mut label = CodeLabel::plain(label, completion.filter_text.as_deref());
label.runs.push((
0..label.text.rfind('(').unwrap_or(label.text.len()),
highlight_id,
@@ -264,10 +274,7 @@ impl super::LspAdapter for CLspAdapter {
}
_ => {}
}
- Some(CodeLabel::plain(
- label.to_string(),
- completion.filter_text.as_deref(),
- ))
+ Some(CodeLabel::plain(label, completion.filter_text.as_deref()))
}
async fn label_for_symbol(
@@ -3,8 +3,27 @@
(namespace_identifier) @namespace
(concept_definition
- (identifier) @concept)
+ name: (identifier) @concept)
+(requires_clause
+ constraint: (template_type
+ name: (type_identifier) @concept))
+
+(module_name
+ (identifier) @module)
+
+(module_declaration
+ name: (module_name
+ (identifier) @module))
+
+(import_declaration
+ name: (module_name
+ (identifier) @module))
+
+(import_declaration
+ partition: (module_partition
+ (module_name
+ (identifier) @module)))
(call_expression
function: (qualified_identifier
@@ -61,6 +80,9 @@
(operator_name
(identifier)? @operator) @function
+(operator_name
+ "<=>" @operator.spaceship)
+
(destructor_name (identifier) @function)
((namespace_identifier) @type
@@ -68,21 +90,17 @@
(auto) @type
(type_identifier) @type
-type :(primitive_type) @type.primitive
-(sized_type_specifier) @type.primitive
-
-(requires_clause
- constraint: (template_type
- name: (type_identifier) @concept))
+type: (primitive_type) @type.builtin
+(sized_type_specifier) @type.builtin
(attribute
- name: (identifier) @keyword)
+ name: (identifier) @attribute)
-((identifier) @constant
- (#match? @constant "^_*[A-Z][A-Z\\d_]*$"))
+((identifier) @constant.builtin
+ (#match? @constant.builtin "^_*[A-Z][A-Z\\d_]*$"))
(statement_identifier) @label
-(this) @variable.special
+(this) @variable.builtin
("static_assert") @function.builtin
[
@@ -96,7 +114,9 @@ type :(primitive_type) @type.primitive
"co_return"
"co_yield"
"concept"
+ "consteval"
"constexpr"
+ "constinit"
"continue"
"decltype"
"default"
@@ -105,15 +125,20 @@ type :(primitive_type) @type.primitive
"else"
"enum"
"explicit"
+ "export"
"extern"
"final"
"for"
"friend"
+ "goto"
"if"
+ "import"
"inline"
+ "module"
"namespace"
"new"
"noexcept"
+ "operator"
"override"
"private"
"protected"
@@ -124,6 +149,7 @@ type :(primitive_type) @type.primitive
"struct"
"switch"
"template"
+ "thread_local"
"throw"
"try"
"typedef"
@@ -146,7 +172,7 @@ type :(primitive_type) @type.primitive
"#ifndef"
"#include"
(preproc_directive)
-] @keyword
+] @keyword.directive
(comment) @comment
@@ -224,10 +250,24 @@ type :(primitive_type) @type.primitive
">"
"<="
">="
- "<=>"
- "||"
"?"
+ "and"
+ "and_eq"
+ "bitand"
+ "bitor"
+ "compl"
+ "not"
+ "not_eq"
+ "or"
+ "or_eq"
+ "xor"
+ "xor_eq"
] @operator
+"<=>" @operator.spaceship
+
+(binary_expression
+ operator: "<=>" @operator.spaceship)
+
(conditional_expression ":" @operator)
(user_defined_literal (literal_suffix) @operator)
@@ -61,6 +61,7 @@ impl LspAdapter for CssLspAdapter {
async fn fetch_latest_server_version(
&self,
_: &dyn LspAdapterDelegate,
+ _: &AsyncApp,
) -> Result<Box<dyn 'static + Any + Send>> {
Ok(Box::new(
self.node
@@ -53,12 +53,13 @@ const BINARY: &str = if cfg!(target_os = "windows") {
#[async_trait(?Send)]
impl super::LspAdapter for GoLspAdapter {
fn name(&self) -> LanguageServerName {
- Self::SERVER_NAME.clone()
+ Self::SERVER_NAME
}
async fn fetch_latest_server_version(
&self,
delegate: &dyn LspAdapterDelegate,
+ _: &AsyncApp,
) -> Result<Box<dyn 'static + Send + Any>> {
let release =
latest_github_release("golang/tools", false, false, delegate.http_client()).await?;
@@ -525,7 +526,7 @@ impl ContextProvider for GoContextProvider {
})
.unwrap_or_else(|| format!("{}", buffer_dir.to_string_lossy()));
- (GO_PACKAGE_TASK_VARIABLE.clone(), package_name.to_string())
+ (GO_PACKAGE_TASK_VARIABLE.clone(), package_name)
});
let go_module_root_variable = local_abs_path
@@ -702,7 +703,7 @@ impl ContextProvider for GoContextProvider {
label: format!("go generate {}", GO_PACKAGE_TASK_VARIABLE.template_value()),
command: "go".into(),
args: vec!["generate".into()],
- cwd: package_cwd.clone(),
+ cwd: package_cwd,
tags: vec!["go-generate".to_owned()],
..TaskTemplate::default()
},
@@ -710,7 +711,7 @@ impl ContextProvider for GoContextProvider {
label: "go generate ./...".into(),
command: "go".into(),
args: vec!["generate".into(), "./...".into()],
- cwd: module_cwd.clone(),
+ cwd: module_cwd,
..TaskTemplate::default()
},
])))
@@ -764,6 +765,7 @@ mod tests {
let highlight_type = grammar.highlight_id_for_name("type").unwrap();
let highlight_keyword = grammar.highlight_id_for_name("keyword").unwrap();
let highlight_number = grammar.highlight_id_for_name("number").unwrap();
+ let highlight_field = grammar.highlight_id_for_name("property").unwrap();
assert_eq!(
adapter
@@ -828,7 +830,7 @@ mod tests {
Some(CodeLabel {
text: "two.Three a.Bcd".to_string(),
filter_range: 0..9,
- runs: vec![(12..15, highlight_type)],
+ runs: vec![(4..9, highlight_field), (12..15, highlight_type)],
})
);
}
@@ -1,13 +1,15 @@
(identifier) @variable
(type_identifier) @type
-(field_identifier) @variable.member
+(field_identifier) @property
(package_identifier) @namespace
+(label_name) @label
+
(keyed_element
.
(literal_element
- (identifier) @variable.member))
+ (identifier) @property))
(call_expression
function: (identifier) @function)
@@ -6,6 +6,7 @@ first_line_pattern = '^#!.*\b(?:[/ ]node|deno run.*--ext[= ]js)\b'
line_comments = ["// "]
block_comment = { start = "/*", prefix = "* ", end = "*/", tab_size = 1 }
documentation_comment = { start = "/**", prefix = "* ", end = "*/", tab_size = 1 }
+wrap_characters = { start_prefix = "<", start_suffix = ">", end_prefix = "</", end_suffix = ">" }
autoclose_before = ";:.,=}])>"
brackets = [
{ start = "{", end = "}", close = true, newline = true },
@@ -231,6 +231,7 @@
"implements"
"interface"
"keyof"
+ "module"
"namespace"
"private"
"protected"
@@ -250,4 +251,4 @@
(jsx_closing_element (["</" ">"]) @punctuation.bracket.jsx)
(jsx_self_closing_element (["<" "/>"]) @punctuation.bracket.jsx)
(jsx_attribute "=" @punctuation.delimiter.jsx)
-(jsx_text) @text.jsx
+(jsx_text) @text.jsx
@@ -11,6 +11,21 @@
(#set! injection.language "css"))
)
+(call_expression
+ function: (member_expression
+ object: (identifier) @_obj (#eq? @_obj "styled")
+ property: (property_identifier))
+ arguments: (template_string (string_fragment) @injection.content
+ (#set! injection.language "css"))
+)
+
+(call_expression
+ function: (call_expression
+ function: (identifier) @_name (#eq? @_name "styled"))
+ arguments: (template_string (string_fragment) @injection.content
+ (#set! injection.language "css"))
+)
+
(call_expression
function: (identifier) @_name (#eq? @_name "html")
arguments: (template_string) @injection.content
@@ -58,3 +73,9 @@
arguments: (arguments (template_string (string_fragment) @injection.content
(#set! injection.language "graphql")))
)
+
+(call_expression
+ function: (identifier) @_name(#match? @_name "^iso$")
+ arguments: (arguments (template_string (string_fragment) @injection.content
+ (#set! injection.language "isograph")))
+)
@@ -234,7 +234,7 @@ impl JsonLspAdapter {
schemas
.as_array_mut()
.unwrap()
- .extend(cx.all_action_names().into_iter().map(|&name| {
+ .extend(cx.all_action_names().iter().map(|&name| {
project::lsp_store::json_language_server_ext::url_schema_for_action(name)
}));
@@ -321,6 +321,7 @@ impl LspAdapter for JsonLspAdapter {
async fn fetch_latest_server_version(
&self,
_: &dyn LspAdapterDelegate,
+ _: &AsyncApp,
) -> Result<Box<dyn 'static + Send + Any>> {
Ok(Box::new(
self.node
@@ -488,12 +489,13 @@ impl NodeVersionAdapter {
#[async_trait(?Send)]
impl LspAdapter for NodeVersionAdapter {
fn name(&self) -> LanguageServerName {
- Self::SERVER_NAME.clone()
+ Self::SERVER_NAME
}
async fn fetch_latest_server_version(
&self,
delegate: &dyn LspAdapterDelegate,
+ _: &AsyncApp,
) -> Result<Box<dyn 'static + Send + Any>> {
let release = latest_github_release(
"zed-industries/package-version-server",
@@ -1 +1,2 @@
+(comment) @comment.inclusive
(string) @string
@@ -1,5 +1,4 @@
use anyhow::Context as _;
-use feature_flags::{FeatureFlag, FeatureFlagAppExt as _};
use gpui::{App, SharedString, UpdateGlobal};
use node_runtime::NodeRuntime;
use python::PyprojectTomlManifestProvider;
@@ -54,12 +53,6 @@ pub static LANGUAGE_GIT_COMMIT: std::sync::LazyLock<Arc<Language>> =
))
});
-struct BasedPyrightFeatureFlag;
-
-impl FeatureFlag for BasedPyrightFeatureFlag {
- const NAME: &'static str = "basedpyright";
-}
-
pub fn init(languages: Arc<LanguageRegistry>, node: NodeRuntime, cx: &mut App) {
#[cfg(feature = "load-grammars")]
languages.register_native_grammars([
@@ -97,14 +90,14 @@ pub fn init(languages: Arc<LanguageRegistry>, node: NodeRuntime, cx: &mut App) {
let python_context_provider = Arc::new(python::PythonContextProvider);
let python_lsp_adapter = Arc::new(python::PythonLspAdapter::new(node.clone()));
let basedpyright_lsp_adapter = Arc::new(BasedPyrightLspAdapter::new());
- let python_toolchain_provider = Arc::new(python::PythonToolchainProvider::default());
+ let python_toolchain_provider = Arc::new(python::PythonToolchainProvider);
let rust_context_provider = Arc::new(rust::RustContextProvider);
let rust_lsp_adapter = Arc::new(rust::RustLspAdapter);
let tailwind_adapter = Arc::new(tailwind::TailwindLspAdapter::new(node.clone()));
let typescript_context = Arc::new(typescript::TypeScriptContextProvider::new());
let typescript_lsp_adapter = Arc::new(typescript::TypeScriptLspAdapter::new(node.clone()));
let vtsls_adapter = Arc::new(vtsls::VtslsLspAdapter::new(node.clone()));
- let yaml_lsp_adapter = Arc::new(yaml::YamlLspAdapter::new(node.clone()));
+ let yaml_lsp_adapter = Arc::new(yaml::YamlLspAdapter::new(node));
let built_in_languages = [
LanguageInfo {
@@ -119,12 +112,12 @@ pub fn init(languages: Arc<LanguageRegistry>, node: NodeRuntime, cx: &mut App) {
},
LanguageInfo {
name: "cpp",
- adapters: vec![c_lsp_adapter.clone()],
+ adapters: vec![c_lsp_adapter],
..Default::default()
},
LanguageInfo {
name: "css",
- adapters: vec![css_lsp_adapter.clone()],
+ adapters: vec![css_lsp_adapter],
..Default::default()
},
LanguageInfo {
@@ -146,20 +139,20 @@ pub fn init(languages: Arc<LanguageRegistry>, node: NodeRuntime, cx: &mut App) {
},
LanguageInfo {
name: "gowork",
- adapters: vec![go_lsp_adapter.clone()],
- context: Some(go_context_provider.clone()),
+ adapters: vec![go_lsp_adapter],
+ context: Some(go_context_provider),
..Default::default()
},
LanguageInfo {
name: "json",
- adapters: vec![json_lsp_adapter.clone(), node_version_lsp_adapter.clone()],
+ adapters: vec![json_lsp_adapter.clone(), node_version_lsp_adapter],
context: Some(json_context_provider.clone()),
..Default::default()
},
LanguageInfo {
name: "jsonc",
- adapters: vec![json_lsp_adapter.clone()],
- context: Some(json_context_provider.clone()),
+ adapters: vec![json_lsp_adapter],
+ context: Some(json_context_provider),
..Default::default()
},
LanguageInfo {
@@ -174,7 +167,7 @@ pub fn init(languages: Arc<LanguageRegistry>, node: NodeRuntime, cx: &mut App) {
},
LanguageInfo {
name: "python",
- adapters: vec![python_lsp_adapter.clone(), py_lsp_adapter.clone()],
+ adapters: vec![basedpyright_lsp_adapter],
context: Some(python_context_provider),
toolchain: Some(python_toolchain_provider),
manifest_name: Some(SharedString::new_static("pyproject.toml").into()),
@@ -201,7 +194,7 @@ pub fn init(languages: Arc<LanguageRegistry>, node: NodeRuntime, cx: &mut App) {
LanguageInfo {
name: "javascript",
adapters: vec![typescript_lsp_adapter.clone(), vtsls_adapter.clone()],
- context: Some(typescript_context.clone()),
+ context: Some(typescript_context),
..Default::default()
},
LanguageInfo {
@@ -240,17 +233,6 @@ pub fn init(languages: Arc<LanguageRegistry>, node: NodeRuntime, cx: &mut App) {
);
}
- let mut basedpyright_lsp_adapter = Some(basedpyright_lsp_adapter);
- cx.observe_flag::<BasedPyrightFeatureFlag, _>({
- let languages = languages.clone();
- move |enabled, _| {
- if enabled && let Some(adapter) = basedpyright_lsp_adapter.take() {
- languages.register_available_lsp_adapter(adapter.name(), move || adapter.clone());
- }
- }
- })
- .detach();
-
// Register globally available language servers.
//
// This will allow users to add support for a built-in language server (e.g., Tailwind)
@@ -267,27 +249,19 @@ pub fn init(languages: Arc<LanguageRegistry>, node: NodeRuntime, cx: &mut App) {
// ```
languages.register_available_lsp_adapter(
LanguageServerName("tailwindcss-language-server".into()),
- {
- let adapter = tailwind_adapter.clone();
- move || adapter.clone()
- },
+ tailwind_adapter.clone(),
);
- languages.register_available_lsp_adapter(LanguageServerName("eslint".into()), {
- let adapter = eslint_adapter.clone();
- move || adapter.clone()
- });
- languages.register_available_lsp_adapter(LanguageServerName("vtsls".into()), {
- let adapter = vtsls_adapter.clone();
- move || adapter.clone()
- });
+ languages.register_available_lsp_adapter(
+ LanguageServerName("eslint".into()),
+ eslint_adapter.clone(),
+ );
+ languages.register_available_lsp_adapter(LanguageServerName("vtsls".into()), vtsls_adapter);
languages.register_available_lsp_adapter(
LanguageServerName("typescript-language-server".into()),
- {
- let adapter = typescript_lsp_adapter.clone();
- move || adapter.clone()
- },
+ typescript_lsp_adapter,
);
-
+ languages.register_available_lsp_adapter(python_lsp_adapter.name(), python_lsp_adapter);
+ languages.register_available_lsp_adapter(py_lsp_adapter.name(), py_lsp_adapter);
// Register Tailwind for the existing languages that should have it by default.
//
// This can be driven by the `language_servers` setting once we have a way for
@@ -296,6 +270,7 @@ pub fn init(languages: Arc<LanguageRegistry>, node: NodeRuntime, cx: &mut App) {
"Astro",
"CSS",
"ERB",
+ "HTML+ERB",
"HTML/ERB",
"HEEX",
"HTML",
@@ -12,6 +12,7 @@ brackets = [
{ start = "\"", end = "\"", close = false, newline = false },
{ start = "'", end = "'", close = false, newline = false },
{ start = "`", end = "`", close = false, newline = false },
+ { start = "*", end = "*", close = false, newline = false, surround = true },
]
rewrap_prefixes = [
"[-*+]\\s+",
@@ -2,21 +2,22 @@ use anyhow::{Context as _, ensure};
use anyhow::{Result, anyhow};
use async_trait::async_trait;
use collections::HashMap;
+use futures::AsyncBufReadExt;
use gpui::{App, Task};
use gpui::{AsyncApp, SharedString};
-use language::Toolchain;
use language::ToolchainList;
use language::ToolchainLister;
use language::language_settings::language_settings;
use language::{ContextLocation, LanguageToolchainStore};
use language::{ContextProvider, LspAdapter, LspAdapterDelegate};
use language::{LanguageName, ManifestName, ManifestProvider, ManifestQuery};
+use language::{Toolchain, ToolchainMetadata};
use lsp::LanguageServerBinary;
use lsp::LanguageServerName;
use node_runtime::{NodeRuntime, VersionStrategy};
use pet_core::Configuration;
use pet_core::os_environment::Environment;
-use pet_core::python_environment::PythonEnvironmentKind;
+use pet_core::python_environment::{PythonEnvironment, PythonEnvironmentKind};
use project::Fs;
use project::lsp_store::language_server_settings;
use serde_json::{Value, json};
@@ -30,13 +31,11 @@ use std::{
borrow::Cow,
ffi::OsString,
fmt::Write,
- fs,
- io::{self, BufRead},
path::{Path, PathBuf},
sync::Arc,
};
-use task::{TaskTemplate, TaskTemplates, VariableName};
-use util::ResultExt;
+use task::{ShellKind, TaskTemplate, TaskTemplates, VariableName};
+use util::{ResultExt, maybe};
pub(crate) struct PyprojectTomlManifestProvider;
@@ -103,7 +102,7 @@ impl PythonLspAdapter {
#[async_trait(?Send)]
impl LspAdapter for PythonLspAdapter {
fn name(&self) -> LanguageServerName {
- Self::SERVER_NAME.clone()
+ Self::SERVER_NAME
}
async fn initialization_options(
@@ -158,6 +157,7 @@ impl LspAdapter for PythonLspAdapter {
async fn fetch_latest_server_version(
&self,
_: &dyn LspAdapterDelegate,
+ _: &AsyncApp,
) -> Result<Box<dyn 'static + Any + Send>> {
Ok(Box::new(
self.node
@@ -329,41 +329,35 @@ impl LspAdapter for PythonLspAdapter {
.unwrap_or_default();
// If we have a detected toolchain, configure Pyright to use it
- if let Some(toolchain) = toolchain {
+ if let Some(toolchain) = toolchain
+ && let Ok(env) = serde_json::from_value::<
+ pet_core::python_environment::PythonEnvironment,
+ >(toolchain.as_json.clone())
+ {
if user_settings.is_null() {
user_settings = Value::Object(serde_json::Map::default());
}
let object = user_settings.as_object_mut().unwrap();
let interpreter_path = toolchain.path.to_string();
+ if let Some(venv_dir) = env.prefix {
+ // Set venvPath and venv at the root level
+ // This matches the format of a pyrightconfig.json file
+ if let Some(parent) = venv_dir.parent() {
+ // Use relative path if the venv is inside the workspace
+ let venv_path = if parent == adapter.worktree_root_path() {
+ ".".to_string()
+ } else {
+ parent.to_string_lossy().into_owned()
+ };
+ object.insert("venvPath".to_string(), Value::String(venv_path));
+ }
- // Detect if this is a virtual environment
- if let Some(interpreter_dir) = Path::new(&interpreter_path).parent()
- && let Some(venv_dir) = interpreter_dir.parent()
- {
- // Check if this looks like a virtual environment
- if venv_dir.join("pyvenv.cfg").exists()
- || venv_dir.join("bin/activate").exists()
- || venv_dir.join("Scripts/activate.bat").exists()
- {
- // Set venvPath and venv at the root level
- // This matches the format of a pyrightconfig.json file
- if let Some(parent) = venv_dir.parent() {
- // Use relative path if the venv is inside the workspace
- let venv_path = if parent == adapter.worktree_root_path() {
- ".".to_string()
- } else {
- parent.to_string_lossy().into_owned()
- };
- object.insert("venvPath".to_string(), Value::String(venv_path));
- }
-
- if let Some(venv_name) = venv_dir.file_name() {
- object.insert(
- "venv".to_owned(),
- Value::String(venv_name.to_string_lossy().into_owned()),
- );
- }
+ if let Some(venv_name) = venv_dir.file_name() {
+ object.insert(
+ "venv".to_owned(),
+ Value::String(venv_name.to_string_lossy().into_owned()),
+ );
}
}
@@ -416,9 +410,6 @@ const PYTHON_TEST_TARGET_TASK_VARIABLE: VariableName =
const PYTHON_ACTIVE_TOOLCHAIN_PATH: VariableName =
VariableName::Custom(Cow::Borrowed("PYTHON_ACTIVE_ZED_TOOLCHAIN"));
-const PYTHON_ACTIVE_TOOLCHAIN_PATH_RAW: VariableName =
- VariableName::Custom(Cow::Borrowed("PYTHON_ACTIVE_ZED_TOOLCHAIN_RAW"));
-
const PYTHON_MODULE_NAME_TASK_VARIABLE: VariableName =
VariableName::Custom(Cow::Borrowed("PYTHON_MODULE_NAME"));
@@ -442,7 +433,7 @@ impl ContextProvider for PythonContextProvider {
let worktree_id = location_file.as_ref().map(|f| f.worktree_id(cx));
cx.spawn(async move |cx| {
- let raw_toolchain = if let Some(worktree_id) = worktree_id {
+ let active_toolchain = if let Some(worktree_id) = worktree_id {
let file_path = location_file
.as_ref()
.and_then(|f| f.path().parent())
@@ -460,15 +451,13 @@ impl ContextProvider for PythonContextProvider {
String::from("python3")
};
- let active_toolchain = format!("\"{raw_toolchain}\"");
let toolchain = (PYTHON_ACTIVE_TOOLCHAIN_PATH, active_toolchain);
- let raw_toolchain_var = (PYTHON_ACTIVE_TOOLCHAIN_PATH_RAW, raw_toolchain);
Ok(task::TaskVariables::from_iter(
test_target
.into_iter()
.chain(module_target.into_iter())
- .chain([toolchain, raw_toolchain_var]),
+ .chain([toolchain]),
))
})
}
@@ -485,31 +474,31 @@ impl ContextProvider for PythonContextProvider {
// Execute a selection
TaskTemplate {
label: "execute selection".to_owned(),
- command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(),
+ command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value_with_whitespace(),
args: vec![
"-c".to_owned(),
VariableName::SelectedText.template_value_with_whitespace(),
],
- cwd: Some("$ZED_WORKTREE_ROOT".into()),
+ cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
},
// Execute an entire file
TaskTemplate {
label: format!("run '{}'", VariableName::File.template_value()),
- command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(),
+ command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value_with_whitespace(),
args: vec![VariableName::File.template_value_with_whitespace()],
- cwd: Some("$ZED_WORKTREE_ROOT".into()),
+ cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
},
// Execute a file as module
TaskTemplate {
label: format!("run module '{}'", VariableName::File.template_value()),
- command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(),
+ command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value_with_whitespace(),
args: vec![
"-m".to_owned(),
- PYTHON_MODULE_NAME_TASK_VARIABLE.template_value(),
+ PYTHON_MODULE_NAME_TASK_VARIABLE.template_value_with_whitespace(),
],
- cwd: Some("$ZED_WORKTREE_ROOT".into()),
+ cwd: Some(VariableName::WorktreeRoot.template_value()),
tags: vec!["python-module-main-method".to_owned()],
..TaskTemplate::default()
},
@@ -521,19 +510,19 @@ impl ContextProvider for PythonContextProvider {
// Run tests for an entire file
TaskTemplate {
label: format!("unittest '{}'", VariableName::File.template_value()),
- command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(),
+ command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value_with_whitespace(),
args: vec![
"-m".to_owned(),
"unittest".to_owned(),
VariableName::File.template_value_with_whitespace(),
],
- cwd: Some("$ZED_WORKTREE_ROOT".into()),
+ cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
},
// Run test(s) for a specific target within a file
TaskTemplate {
label: "unittest $ZED_CUSTOM_PYTHON_TEST_TARGET".to_owned(),
- command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(),
+ command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value_with_whitespace(),
args: vec![
"-m".to_owned(),
"unittest".to_owned(),
@@ -543,7 +532,7 @@ impl ContextProvider for PythonContextProvider {
"python-unittest-class".to_owned(),
"python-unittest-method".to_owned(),
],
- cwd: Some("$ZED_WORKTREE_ROOT".into()),
+ cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
},
]
@@ -553,25 +542,25 @@ impl ContextProvider for PythonContextProvider {
// Run tests for an entire file
TaskTemplate {
label: format!("pytest '{}'", VariableName::File.template_value()),
- command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(),
+ command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value_with_whitespace(),
args: vec![
"-m".to_owned(),
"pytest".to_owned(),
VariableName::File.template_value_with_whitespace(),
],
- cwd: Some("$ZED_WORKTREE_ROOT".into()),
+ cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
},
// Run test(s) for a specific target within a file
TaskTemplate {
label: "pytest $ZED_CUSTOM_PYTHON_TEST_TARGET".to_owned(),
- command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value(),
+ command: PYTHON_ACTIVE_TOOLCHAIN_PATH.template_value_with_whitespace(),
args: vec![
"-m".to_owned(),
"pytest".to_owned(),
PYTHON_TEST_TARGET_TASK_VARIABLE.template_value_with_whitespace(),
],
- cwd: Some("$ZED_WORKTREE_ROOT".into()),
+ cwd: Some(VariableName::WorktreeRoot.template_value()),
tags: vec![
"python-pytest-class".to_owned(),
"python-pytest-method".to_owned(),
@@ -699,19 +688,9 @@ fn python_env_kind_display(k: &PythonEnvironmentKind) -> &'static str {
}
}
-pub(crate) struct PythonToolchainProvider {
- term: SharedString,
-}
+pub(crate) struct PythonToolchainProvider;
-impl Default for PythonToolchainProvider {
- fn default() -> Self {
- Self {
- term: SharedString::new_static("Virtual Environment"),
- }
- }
-}
-
-static ENV_PRIORITY_LIST: &'static [PythonEnvironmentKind] = &[
+static ENV_PRIORITY_LIST: &[PythonEnvironmentKind] = &[
// Prioritize non-Conda environments.
PythonEnvironmentKind::Poetry,
PythonEnvironmentKind::Pipenv,
@@ -741,25 +720,24 @@ fn env_priority(kind: Option<PythonEnvironmentKind>) -> usize {
/// Return the name of environment declared in <worktree-root/.venv.
///
/// https://virtualfish.readthedocs.io/en/latest/plugins.html#auto-activation-auto-activation
-fn get_worktree_venv_declaration(worktree_root: &Path) -> Option<String> {
- fs::File::open(worktree_root.join(".venv"))
- .and_then(|file| {
- let mut venv_name = String::new();
- io::BufReader::new(file).read_line(&mut venv_name)?;
- Ok(venv_name.trim().to_string())
- })
- .ok()
+async fn get_worktree_venv_declaration(worktree_root: &Path) -> Option<String> {
+ let file = async_fs::File::open(worktree_root.join(".venv"))
+ .await
+ .ok()?;
+ let mut venv_name = String::new();
+ smol::io::BufReader::new(file)
+ .read_line(&mut venv_name)
+ .await
+ .ok()?;
+ Some(venv_name.trim().to_string())
}
#[async_trait]
impl ToolchainLister for PythonToolchainProvider {
- fn manifest_name(&self) -> language::ManifestName {
- ManifestName::from(SharedString::new_static("pyproject.toml"))
- }
async fn list(
&self,
worktree_root: PathBuf,
- subroot_relative_path: Option<Arc<Path>>,
+ subroot_relative_path: Arc<Path>,
project_env: Option<HashMap<String, String>>,
) -> ToolchainList {
let env = project_env.unwrap_or_default();
@@ -771,13 +749,15 @@ impl ToolchainLister for PythonToolchainProvider {
);
let mut config = Configuration::default();
- let mut directories = vec![worktree_root.clone()];
- if let Some(subroot_relative_path) = subroot_relative_path {
- debug_assert!(subroot_relative_path.is_relative());
- directories.push(worktree_root.join(subroot_relative_path));
- }
-
- config.workspace_directories = Some(directories);
+ debug_assert!(subroot_relative_path.is_relative());
+ // `.ancestors()` will yield at least one path, so in case of empty `subroot_relative_path`, we'll just use
+ // worktree root as the workspace directory.
+ config.workspace_directories = Some(
+ subroot_relative_path
+ .ancestors()
+ .map(|ancestor| worktree_root.join(ancestor))
+ .collect(),
+ );
for locator in locators.iter() {
locator.configure(&config);
}
@@ -791,7 +771,7 @@ impl ToolchainLister for PythonToolchainProvider {
.map_or(Vec::new(), |mut guard| std::mem::take(&mut guard));
let wr = worktree_root;
- let wr_venv = get_worktree_venv_declaration(&wr);
+ let wr_venv = get_worktree_venv_declaration(&wr).await;
// Sort detected environments by:
// environment name matching activation file (<workdir>/.venv)
// environment project dir matching worktree_root
@@ -854,32 +834,7 @@ impl ToolchainLister for PythonToolchainProvider {
let mut toolchains: Vec<_> = toolchains
.into_iter()
- .filter_map(|toolchain| {
- let mut name = String::from("Python");
- if let Some(ref version) = toolchain.version {
- _ = write!(name, " {version}");
- }
-
- let name_and_kind = match (&toolchain.name, &toolchain.kind) {
- (Some(name), Some(kind)) => {
- Some(format!("({name}; {})", python_env_kind_display(kind)))
- }
- (Some(name), None) => Some(format!("({name})")),
- (None, Some(kind)) => Some(format!("({})", python_env_kind_display(kind))),
- (None, None) => None,
- };
-
- if let Some(nk) = name_and_kind {
- _ = write!(name, " {nk}");
- }
-
- Some(Toolchain {
- name: name.into(),
- path: toolchain.executable.as_ref()?.to_str()?.to_owned().into(),
- language_name: LanguageName::new("Python"),
- as_json: serde_json::to_value(toolchain).ok()?,
- })
- })
+ .filter_map(venv_to_toolchain)
.collect();
toolchains.dedup();
ToolchainList {
@@ -888,9 +843,129 @@ impl ToolchainLister for PythonToolchainProvider {
groups: Default::default(),
}
}
- fn term(&self) -> SharedString {
- self.term.clone()
+ fn meta(&self) -> ToolchainMetadata {
+ ToolchainMetadata {
+ term: SharedString::new_static("Virtual Environment"),
+ new_toolchain_placeholder: SharedString::new_static(
+ "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
+ ),
+ manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
+ }
+ }
+
+ async fn resolve(
+ &self,
+ path: PathBuf,
+ env: Option<HashMap<String, String>>,
+ ) -> anyhow::Result<Toolchain> {
+ let env = env.unwrap_or_default();
+ let environment = EnvironmentApi::from_env(&env);
+ let locators = pet::locators::create_locators(
+ Arc::new(pet_conda::Conda::from(&environment)),
+ Arc::new(pet_poetry::Poetry::from(&environment)),
+ &environment,
+ );
+ let toolchain = pet::resolve::resolve_environment(&path, &locators, &environment)
+ .context("Could not find a virtual environment in provided path")?;
+ let venv = toolchain.resolved.unwrap_or(toolchain.discovered);
+ venv_to_toolchain(venv).context("Could not convert a venv into a toolchain")
+ }
+
+ async fn activation_script(
+ &self,
+ toolchain: &Toolchain,
+ shell: ShellKind,
+ fs: &dyn Fs,
+ ) -> Vec<String> {
+ let Ok(toolchain) = serde_json::from_value::<pet_core::python_environment::PythonEnvironment>(
+ toolchain.as_json.clone(),
+ ) else {
+ return vec![];
+ };
+ let mut activation_script = vec![];
+
+ match toolchain.kind {
+ Some(PythonEnvironmentKind::Pixi) => {
+ let env = toolchain.name.as_deref().unwrap_or("default");
+ activation_script.push(format!("pixi shell -e {env}"))
+ }
+ Some(PythonEnvironmentKind::Conda) => {
+ if let Some(name) = &toolchain.name {
+ activation_script.push(format!("conda activate {name}"));
+ } else {
+ activation_script.push("conda activate".to_string());
+ }
+ }
+ Some(PythonEnvironmentKind::Venv | PythonEnvironmentKind::VirtualEnv) => {
+ if let Some(prefix) = &toolchain.prefix {
+ let activate_keyword = match shell {
+ ShellKind::Cmd => ".",
+ ShellKind::Nushell => "overlay use",
+ ShellKind::Powershell => ".",
+ ShellKind::Fish => "source",
+ ShellKind::Csh => "source",
+ ShellKind::Posix => "source",
+ };
+ let activate_script_name = match shell {
+ ShellKind::Posix => "activate",
+ ShellKind::Csh => "activate.csh",
+ ShellKind::Fish => "activate.fish",
+ ShellKind::Nushell => "activate.nu",
+ ShellKind::Powershell => "activate.ps1",
+ ShellKind::Cmd => "activate.bat",
+ };
+ let path = prefix.join(BINARY_DIR).join(activate_script_name);
+ if fs.is_file(&path).await {
+ activation_script
+ .push(format!("{activate_keyword} \"{}\"", path.display()));
+ }
+ }
+ }
+ Some(PythonEnvironmentKind::Pyenv) => {
+ let Some(manager) = toolchain.manager else {
+ return vec![];
+ };
+ let version = toolchain.version.as_deref().unwrap_or("system");
+ let pyenv = manager.executable;
+ let pyenv = pyenv.display();
+ activation_script.extend(match shell {
+ ShellKind::Fish => Some(format!("\"{pyenv}\" shell - fish {version}")),
+ ShellKind::Posix => Some(format!("\"{pyenv}\" shell - sh {version}")),
+ ShellKind::Nushell => Some(format!("\"{pyenv}\" shell - nu {version}")),
+ ShellKind::Powershell => None,
+ ShellKind::Csh => None,
+ ShellKind::Cmd => None,
+ })
+ }
+ _ => {}
+ }
+ activation_script
+ }
+}
+
+fn venv_to_toolchain(venv: PythonEnvironment) -> Option<Toolchain> {
+ let mut name = String::from("Python");
+ if let Some(ref version) = venv.version {
+ _ = write!(name, " {version}");
+ }
+
+ let name_and_kind = match (&venv.name, &venv.kind) {
+ (Some(name), Some(kind)) => Some(format!("({name}; {})", python_env_kind_display(kind))),
+ (Some(name), None) => Some(format!("({name})")),
+ (None, Some(kind)) => Some(format!("({})", python_env_kind_display(kind))),
+ (None, None) => None,
+ };
+
+ if let Some(nk) = name_and_kind {
+ _ = write!(name, " {nk}");
}
+
+ Some(Toolchain {
+ name: name.into(),
+ path: venv.executable.as_ref()?.to_str()?.to_owned().into(),
+ language_name: LanguageName::new("Python"),
+ as_json: serde_json::to_value(venv).ok()?,
+ })
}
pub struct EnvironmentApi<'a> {
@@ -1026,7 +1101,7 @@ const BINARY_DIR: &str = if cfg!(target_os = "windows") {
#[async_trait(?Send)]
impl LspAdapter for PyLspAdapter {
fn name(&self) -> LanguageServerName {
- Self::SERVER_NAME.clone()
+ Self::SERVER_NAME
}
async fn check_if_user_installed(
@@ -1043,10 +1118,10 @@ impl LspAdapter for PyLspAdapter {
arguments: vec![],
})
} else {
- let venv = toolchain?;
- let pylsp_path = Path::new(venv.path.as_ref()).parent()?.join("pylsp");
+ let toolchain = toolchain?;
+ let pylsp_path = Path::new(toolchain.path.as_ref()).parent()?.join("pylsp");
pylsp_path.exists().then(|| LanguageServerBinary {
- path: venv.path.to_string().into(),
+ path: toolchain.path.to_string().into(),
arguments: vec![pylsp_path.into()],
env: None,
})
@@ -1056,6 +1131,7 @@ impl LspAdapter for PyLspAdapter {
async fn fetch_latest_server_version(
&self,
_: &dyn LspAdapterDelegate,
+ _: &AsyncApp,
) -> Result<Box<dyn 'static + Any + Send>> {
Ok(Box::new(()) as Box<_>)
}
@@ -1318,7 +1394,7 @@ impl BasedPyrightLspAdapter {
#[async_trait(?Send)]
impl LspAdapter for BasedPyrightLspAdapter {
fn name(&self) -> LanguageServerName {
- Self::SERVER_NAME.clone()
+ Self::SERVER_NAME
}
async fn initialization_options(
@@ -1367,6 +1443,7 @@ impl LspAdapter for BasedPyrightLspAdapter {
async fn fetch_latest_server_version(
&self,
_: &dyn LspAdapterDelegate,
+ _: &AsyncApp,
) -> Result<Box<dyn 'static + Any + Send>> {
Ok(Box::new(()) as Box<_>)
}
@@ -1510,61 +1587,69 @@ impl LspAdapter for BasedPyrightLspAdapter {
.unwrap_or_default();
// If we have a detected toolchain, configure Pyright to use it
- if let Some(toolchain) = toolchain {
+ if let Some(toolchain) = toolchain
+ && let Ok(env) = serde_json::from_value::<
+ pet_core::python_environment::PythonEnvironment,
+ >(toolchain.as_json.clone())
+ {
if user_settings.is_null() {
user_settings = Value::Object(serde_json::Map::default());
}
let object = user_settings.as_object_mut().unwrap();
let interpreter_path = toolchain.path.to_string();
+ if let Some(venv_dir) = env.prefix {
+ // Set venvPath and venv at the root level
+ // This matches the format of a pyrightconfig.json file
+ if let Some(parent) = venv_dir.parent() {
+ // Use relative path if the venv is inside the workspace
+ let venv_path = if parent == adapter.worktree_root_path() {
+ ".".to_string()
+ } else {
+ parent.to_string_lossy().into_owned()
+ };
+ object.insert("venvPath".to_string(), Value::String(venv_path));
+ }
- // Detect if this is a virtual environment
- if let Some(interpreter_dir) = Path::new(&interpreter_path).parent()
- && let Some(venv_dir) = interpreter_dir.parent()
- {
- // Check if this looks like a virtual environment
- if venv_dir.join("pyvenv.cfg").exists()
- || venv_dir.join("bin/activate").exists()
- || venv_dir.join("Scripts/activate.bat").exists()
- {
- // Set venvPath and venv at the root level
- // This matches the format of a pyrightconfig.json file
- if let Some(parent) = venv_dir.parent() {
- // Use relative path if the venv is inside the workspace
- let venv_path = if parent == adapter.worktree_root_path() {
- ".".to_string()
- } else {
- parent.to_string_lossy().into_owned()
- };
- object.insert("venvPath".to_string(), Value::String(venv_path));
- }
-
- if let Some(venv_name) = venv_dir.file_name() {
- object.insert(
- "venv".to_owned(),
- Value::String(venv_name.to_string_lossy().into_owned()),
- );
- }
+ if let Some(venv_name) = venv_dir.file_name() {
+ object.insert(
+ "venv".to_owned(),
+ Value::String(venv_name.to_string_lossy().into_owned()),
+ );
}
}
- // Always set the python interpreter path
- // Get or create the python section
- let python = object
+ // Set both pythonPath and defaultInterpreterPath for compatibility
+ if let Some(python) = object
.entry("python")
.or_insert(Value::Object(serde_json::Map::default()))
.as_object_mut()
- .unwrap();
-
- // Set both pythonPath and defaultInterpreterPath for compatibility
- python.insert(
- "pythonPath".to_owned(),
- Value::String(interpreter_path.clone()),
- );
- python.insert(
- "defaultInterpreterPath".to_owned(),
- Value::String(interpreter_path),
- );
+ {
+ python.insert(
+ "pythonPath".to_owned(),
+ Value::String(interpreter_path.clone()),
+ );
+ python.insert(
+ "defaultInterpreterPath".to_owned(),
+ Value::String(interpreter_path),
+ );
+ }
+ // Basedpyright by default uses `strict` type checking, we tone it down as to not surpris users
+ maybe!({
+ let basedpyright = object
+ .entry("basedpyright")
+ .or_insert(Value::Object(serde_json::Map::default()));
+ let analysis = basedpyright
+ .as_object_mut()?
+ .entry("analysis")
+ .or_insert(Value::Object(serde_json::Map::default()));
+ if let serde_json::map::Entry::Vacant(v) =
+ analysis.as_object_mut()?.entry("typeCheckingMode")
+ {
+ v.insert(Value::String("standard".to_owned()));
+ }
+ Some(())
+ });
}
user_settings
@@ -106,7 +106,7 @@ impl ManifestProvider for CargoManifestProvider {
#[async_trait(?Send)]
impl LspAdapter for RustLspAdapter {
fn name(&self) -> LanguageServerName {
- SERVER_NAME.clone()
+ SERVER_NAME
}
async fn check_if_user_installed(
@@ -147,11 +147,16 @@ impl LspAdapter for RustLspAdapter {
async fn fetch_latest_server_version(
&self,
delegate: &dyn LspAdapterDelegate,
+ cx: &AsyncApp,
) -> Result<Box<dyn 'static + Send + Any>> {
let release = latest_github_release(
"rust-lang/rust-analyzer",
true,
- false,
+ ProjectSettings::try_read_global(cx, |s| {
+ s.lsp.get(&SERVER_NAME)?.fetch.as_ref()?.pre_release
+ })
+ .flatten()
+ .unwrap_or(false),
delegate.http_client(),
)
.await?;
@@ -510,20 +515,6 @@ impl LspAdapter for RustLspAdapter {
}
}
- let cargo_diagnostics_fetched_separately = ProjectSettings::get_global(cx)
- .diagnostics
- .fetch_cargo_diagnostics();
- if cargo_diagnostics_fetched_separately {
- let disable_check_on_save = json!({
- "checkOnSave": false,
- });
- if let Some(initialization_options) = &mut original.initialization_options {
- merge_json_value_into(disable_check_on_save, initialization_options);
- } else {
- original.initialization_options = Some(disable_check_on_save);
- }
- }
-
Ok(original)
}
}
@@ -659,7 +650,7 @@ impl ContextProvider for RustContextProvider {
.variables
.get(CUSTOM_TARGET_DIR)
.cloned();
- let run_task_args = if let Some(package_to_run) = package_to_run.clone() {
+ let run_task_args = if let Some(package_to_run) = package_to_run {
vec!["run".into(), "-p".into(), package_to_run]
} else {
vec!["run".into()]
@@ -1019,8 +1010,8 @@ async fn get_cached_server_binary(container_dir: PathBuf) -> Option<LanguageServ
let path = last.context("no cached binary")?;
let path = match RustLspAdapter::GITHUB_ASSET_KIND {
- AssetKind::TarGz | AssetKind::Gz => path.clone(), // Tar and gzip extract in place.
- AssetKind::Zip => path.clone().join("rust-analyzer.exe"), // zip contains a .exe
+ AssetKind::TarGz | AssetKind::Gz => path, // Tar and gzip extract in place.
+ AssetKind::Zip => path.join("rust-analyzer.exe"), // zip contains a .exe
};
anyhow::Ok(LanguageServerBinary {
@@ -1072,7 +1063,7 @@ mod tests {
#[gpui::test]
async fn test_process_rust_diagnostics() {
let mut params = lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/a")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/a")).unwrap(),
version: None,
diagnostics: vec![
// no newlines
@@ -5,6 +5,7 @@
(primitive_type) @type.builtin
(self) @variable.special
(field_identifier) @property
+(shorthand_field_identifier) @property
(trait_item name: (type_identifier) @type.interface)
(impl_item trait: (type_identifier) @type.interface)
@@ -195,12 +196,13 @@ operator: "/" @operator
(attribute_item (attribute [
(identifier) @attribute
(scoped_identifier name: (identifier) @attribute)
+ (token_tree (identifier) @attribute (#match? @attribute "^[a-z\\d_]*$"))
+ (token_tree (identifier) @none "::" (#match? @none "^[a-z\\d_]*$"))
]))
+
(inner_attribute_item (attribute [
(identifier) @attribute
(scoped_identifier name: (identifier) @attribute)
+ (token_tree (identifier) @attribute (#match? @attribute "^[a-z\\d_]*$"))
+ (token_tree (identifier) @none "::" (#match? @none "^[a-z\\d_]*$"))
]))
-; Match nested snake case identifiers in attribute items.
-(token_tree (identifier) @attribute (#match? @attribute "^[a-z\\d_]*$"))
-; Override the attribute match for paths in scoped type/enum identifiers.
-(token_tree (identifier) @variable "::" (identifier) @type (#match? @type "^[A-Z]"))
@@ -44,7 +44,7 @@ impl TailwindLspAdapter {
#[async_trait(?Send)]
impl LspAdapter for TailwindLspAdapter {
fn name(&self) -> LanguageServerName {
- Self::SERVER_NAME.clone()
+ Self::SERVER_NAME
}
async fn check_if_user_installed(
@@ -66,6 +66,7 @@ impl LspAdapter for TailwindLspAdapter {
async fn fetch_latest_server_version(
&self,
_: &dyn LspAdapterDelegate,
+ _: &AsyncApp,
) -> Result<Box<dyn 'static + Any + Send>> {
Ok(Box::new(
self.node
@@ -184,6 +185,7 @@ impl LspAdapter for TailwindLspAdapter {
(LanguageName::new("Elixir"), "phoenix-heex".to_string()),
(LanguageName::new("HEEX"), "phoenix-heex".to_string()),
(LanguageName::new("ERB"), "erb".to_string()),
+ (LanguageName::new("HTML+ERB"), "erb".to_string()),
(LanguageName::new("HTML/ERB"), "erb".to_string()),
(LanguageName::new("PHP"), "php".to_string()),
(LanguageName::new("Vue.js"), "vue".to_string()),
@@ -4,6 +4,7 @@ path_suffixes = ["tsx"]
line_comments = ["// "]
block_comment = { start = "/*", prefix = "* ", end = "*/", tab_size = 1 }
documentation_comment = { start = "/**", prefix = "* ", end = "*/", tab_size = 1 }
+wrap_characters = { start_prefix = "<", start_suffix = ">", end_prefix = "</", end_suffix = ">" }
autoclose_before = ";:.,=}])>"
brackets = [
{ start = "{", end = "}", close = true, newline = true },
@@ -237,6 +237,7 @@
"implements"
"interface"
"keyof"
+ "module"
"namespace"
"private"
"protected"
@@ -256,4 +257,4 @@
(jsx_closing_element (["</" ">"]) @punctuation.bracket.jsx)
(jsx_self_closing_element (["<" "/>"]) @punctuation.bracket.jsx)
(jsx_attribute "=" @punctuation.delimiter.jsx)
-(jsx_text) @text.jsx
+(jsx_text) @text.jsx
@@ -11,6 +11,21 @@
(#set! injection.language "css"))
)
+(call_expression
+ function: (member_expression
+ object: (identifier) @_obj (#eq? @_obj "styled")
+ property: (property_identifier))
+ arguments: (template_string (string_fragment) @injection.content
+ (#set! injection.language "css"))
+)
+
+(call_expression
+ function: (call_expression
+ function: (identifier) @_name (#eq? @_name "styled"))
+ arguments: (template_string (string_fragment) @injection.content
+ (#set! injection.language "css"))
+)
+
(call_expression
function: (identifier) @_name (#eq? @_name "html")
arguments: (template_string (string_fragment) @injection.content
@@ -58,3 +73,9 @@
arguments: (arguments (template_string (string_fragment) @injection.content
(#set! injection.language "graphql")))
)
+
+(call_expression
+ function: (identifier) @_name(#match? @_name "^iso$")
+ arguments: (arguments (template_string (string_fragment) @injection.content
+ (#set! injection.language "isograph")))
+)
@@ -557,12 +557,13 @@ struct TypeScriptVersions {
#[async_trait(?Send)]
impl LspAdapter for TypeScriptLspAdapter {
fn name(&self) -> LanguageServerName {
- Self::SERVER_NAME.clone()
+ Self::SERVER_NAME
}
async fn fetch_latest_server_version(
&self,
_: &dyn LspAdapterDelegate,
+ _: &AsyncApp,
) -> Result<Box<dyn 'static + Send + Any>> {
Ok(Box::new(TypeScriptVersions {
typescript_version: self.node.npm_package_latest_version("typescript").await?,
@@ -879,12 +880,13 @@ impl LspAdapter for EsLintLspAdapter {
}
fn name(&self) -> LanguageServerName {
- Self::SERVER_NAME.clone()
+ Self::SERVER_NAME
}
async fn fetch_latest_server_version(
&self,
_delegate: &dyn LspAdapterDelegate,
+ _: &AsyncApp,
) -> Result<Box<dyn 'static + Send + Any>> {
let url = build_asset_url(
"zed-industries/vscode-eslint",
@@ -5,6 +5,7 @@ first_line_pattern = '^#!.*\b(?:deno run|ts-node|bun|tsx|[/ ]node)\b'
line_comments = ["// "]
block_comment = { start = "/*", prefix = "* ", end = "*/", tab_size = 1 }
documentation_comment = { start = "/**", prefix = "* ", end = "*/", tab_size = 1 }
+wrap_characters = { start_prefix = "<", start_suffix = ">", end_prefix = "</", end_suffix = ">" }
autoclose_before = ";:.,=}])>"
brackets = [
{ start = "{", end = "}", close = true, newline = true },
@@ -248,6 +248,7 @@
"is"
"keyof"
"let"
+ "module"
"namespace"
"new"
"of"
@@ -272,4 +273,4 @@
"while"
"with"
"yield"
-] @keyword
+] @keyword
@@ -15,6 +15,21 @@
(#set! injection.language "css"))
)
+(call_expression
+ function: (member_expression
+ object: (identifier) @_obj (#eq? @_obj "styled")
+ property: (property_identifier))
+ arguments: (template_string (string_fragment) @injection.content
+ (#set! injection.language "css"))
+)
+
+(call_expression
+ function: (call_expression
+ function: (identifier) @_name (#eq? @_name "styled"))
+ arguments: (template_string (string_fragment) @injection.content
+ (#set! injection.language "css"))
+)
+
(call_expression
function: (identifier) @_name (#eq? @_name "html")
arguments: (template_string) @injection.content
@@ -63,6 +78,12 @@
(#set! injection.language "graphql")))
)
+(call_expression
+ function: (identifier) @_name(#match? @_name "^iso$")
+ arguments: (arguments (template_string (string_fragment) @injection.content
+ (#set! injection.language "isograph")))
+)
+
;; Angular Component template injection
(call_expression
function: [
@@ -67,12 +67,13 @@ const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("vtsls");
#[async_trait(?Send)]
impl LspAdapter for VtslsLspAdapter {
fn name(&self) -> LanguageServerName {
- SERVER_NAME.clone()
+ SERVER_NAME
}
async fn fetch_latest_server_version(
&self,
_: &dyn LspAdapterDelegate,
+ _: &AsyncApp,
) -> Result<Box<dyn 'static + Send + Any>> {
Ok(Box::new(TypeScriptVersions {
typescript_version: self.node.npm_package_latest_version("typescript").await?,
@@ -38,12 +38,13 @@ impl YamlLspAdapter {
#[async_trait(?Send)]
impl LspAdapter for YamlLspAdapter {
fn name(&self) -> LanguageServerName {
- Self::SERVER_NAME.clone()
+ Self::SERVER_NAME
}
async fn fetch_latest_server_version(
&self,
_: &dyn LspAdapterDelegate,
+ _: &AsyncApp,
) -> Result<Box<dyn 'static + Any + Send>> {
Ok(Box::new(
self.node
@@ -0,0 +1,5 @@
+(comment) @comment.inclusive
+[
+ (single_quote_scalar)
+ (double_quote_scalar)
+] @string
@@ -0,0 +1,24 @@
+[package]
+name = "line_ending_selector"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/line_ending_selector.rs"
+doctest = false
+
+[dependencies]
+editor.workspace = true
+gpui.workspace = true
+language.workspace = true
+picker.workspace = true
+project.workspace = true
+ui.workspace = true
+util.workspace = true
+workspace.workspace = true
+workspace-hack.workspace = true
@@ -0,0 +1 @@
+../../LICENSE-GPL
@@ -0,0 +1,192 @@
+use editor::Editor;
+use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Task, WeakEntity, actions};
+use language::{Buffer, LineEnding};
+use picker::{Picker, PickerDelegate};
+use project::Project;
+use std::sync::Arc;
+use ui::{ListItem, ListItemSpacing, prelude::*};
+use util::ResultExt;
+use workspace::ModalView;
+
+actions!(
+ line_ending,
+ [
+ /// Toggles the line ending selector modal.
+ Toggle
+ ]
+);
+
+pub fn init(cx: &mut App) {
+ cx.observe_new(LineEndingSelector::register).detach();
+}
+
+pub struct LineEndingSelector {
+ picker: Entity<Picker<LineEndingSelectorDelegate>>,
+}
+
+impl LineEndingSelector {
+ fn register(editor: &mut Editor, _window: Option<&mut Window>, cx: &mut Context<Editor>) {
+ let editor_handle = cx.weak_entity();
+ editor
+ .register_action(move |_: &Toggle, window, cx| {
+ Self::toggle(&editor_handle, window, cx);
+ })
+ .detach();
+ }
+
+ fn toggle(editor: &WeakEntity<Editor>, window: &mut Window, cx: &mut App) {
+ let Some((workspace, buffer)) = editor
+ .update(cx, |editor, cx| {
+ Some((editor.workspace()?, editor.active_excerpt(cx)?.1))
+ })
+ .ok()
+ .flatten()
+ else {
+ return;
+ };
+
+ workspace.update(cx, |workspace, cx| {
+ let project = workspace.project().clone();
+ workspace.toggle_modal(window, cx, move |window, cx| {
+ LineEndingSelector::new(buffer, project, window, cx)
+ });
+ })
+ }
+
+ fn new(
+ buffer: Entity<Buffer>,
+ project: Entity<Project>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Self {
+ let line_ending = buffer.read(cx).line_ending();
+ let delegate =
+ LineEndingSelectorDelegate::new(cx.entity().downgrade(), buffer, project, line_ending);
+ let picker = cx.new(|cx| Picker::nonsearchable_uniform_list(delegate, window, cx));
+ Self { picker }
+ }
+}
+
+impl Render for LineEndingSelector {
+ fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
+ v_flex().w(rems(34.)).child(self.picker.clone())
+ }
+}
+
+impl Focusable for LineEndingSelector {
+ fn focus_handle(&self, cx: &App) -> FocusHandle {
+ self.picker.focus_handle(cx)
+ }
+}
+
+impl EventEmitter<DismissEvent> for LineEndingSelector {}
+impl ModalView for LineEndingSelector {}
+
+struct LineEndingSelectorDelegate {
+ line_ending_selector: WeakEntity<LineEndingSelector>,
+ buffer: Entity<Buffer>,
+ project: Entity<Project>,
+ line_ending: LineEnding,
+ matches: Vec<LineEnding>,
+ selected_index: usize,
+}
+
+impl LineEndingSelectorDelegate {
+ fn new(
+ line_ending_selector: WeakEntity<LineEndingSelector>,
+ buffer: Entity<Buffer>,
+ project: Entity<Project>,
+ line_ending: LineEnding,
+ ) -> Self {
+ Self {
+ line_ending_selector,
+ buffer,
+ project,
+ line_ending,
+ matches: vec![LineEnding::Unix, LineEnding::Windows],
+ selected_index: 0,
+ }
+ }
+}
+
+impl PickerDelegate for LineEndingSelectorDelegate {
+ type ListItem = ListItem;
+
+ fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
+ "Select a line ending…".into()
+ }
+
+ fn match_count(&self) -> usize {
+ self.matches.len()
+ }
+
+ fn confirm(&mut self, _: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
+ if let Some(line_ending) = self.matches.get(self.selected_index) {
+ self.buffer.update(cx, |this, cx| {
+ this.set_line_ending(*line_ending, cx);
+ });
+ let buffer = self.buffer.clone();
+ let project = self.project.clone();
+ cx.defer(move |cx| {
+ project.update(cx, |this, cx| {
+ this.save_buffer(buffer, cx).detach();
+ });
+ });
+ }
+ self.dismissed(window, cx);
+ }
+
+ fn dismissed(&mut self, _: &mut Window, cx: &mut Context<Picker<Self>>) {
+ self.line_ending_selector
+ .update(cx, |_, cx| cx.emit(DismissEvent))
+ .log_err();
+ }
+
+ fn selected_index(&self) -> usize {
+ self.selected_index
+ }
+
+ fn set_selected_index(
+ &mut self,
+ ix: usize,
+ _window: &mut Window,
+ _: &mut Context<Picker<Self>>,
+ ) {
+ self.selected_index = ix;
+ }
+
+ fn update_matches(
+ &mut self,
+ _query: String,
+ _window: &mut Window,
+ _cx: &mut Context<Picker<Self>>,
+ ) -> gpui::Task<()> {
+ return Task::ready(());
+ }
+
+ fn render_match(
+ &self,
+ ix: usize,
+ selected: bool,
+ _: &mut Window,
+ _: &mut Context<Picker<Self>>,
+ ) -> Option<Self::ListItem> {
+ let line_ending = self.matches[ix];
+ let label = match line_ending {
+ LineEnding::Unix => "LF",
+ LineEnding::Windows => "CRLF",
+ };
+
+ let mut list_item = ListItem::new(ix)
+ .inset(true)
+ .spacing(ListItemSpacing::Sparse)
+ .toggle_state(selected)
+ .child(Label::new(label));
+
+ if self.line_ending == line_ending {
+ list_item = list_item.end_slot(Icon::new(IconName::Check).color(Color::Muted));
+ }
+
+ Some(list_item)
+ }
+}
@@ -25,6 +25,7 @@ async-trait.workspace = true
collections.workspace = true
cpal.workspace = true
futures.workspace = true
+audio.workspace = true
gpui = { workspace = true, features = ["screen-capture", "x11", "wayland", "windows-manifest"] }
gpui_tokio.workspace = true
http_client_tls.workspace = true
@@ -35,6 +36,7 @@ nanoid.workspace = true
parking_lot.workspace = true
postage.workspace = true
smallvec.workspace = true
+settings.workspace = true
tokio-tungstenite.workspace = true
util.workspace = true
workspace-hack.workspace = true
@@ -183,7 +183,7 @@ impl LivekitWindow {
match track {
livekit_client::RemoteTrack::Audio(track) => {
output.audio_output_stream = Some((
- publication.clone(),
+ publication,
room.play_remote_audio_track(&track, cx).unwrap(),
));
}
@@ -24,8 +24,11 @@ mod livekit_client;
)))]
pub use livekit_client::*;
-// If you need proper LSP in livekit_client you've got to comment out
-// the mocks and test
+// If you need proper LSP in livekit_client you've got to comment
+// - the cfg blocks above
+// - the mods: mock_client & test and their conditional blocks
+// - the pub use mock_client::* and their conditional blocks
+
#[cfg(any(
test,
feature = "test-support",
@@ -1,15 +1,16 @@
use std::sync::Arc;
use anyhow::{Context as _, Result};
+use audio::AudioSettings;
use collections::HashMap;
use futures::{SinkExt, channel::mpsc};
use gpui::{App, AsyncApp, ScreenCaptureSource, ScreenCaptureStream, Task};
use gpui_tokio::Tokio;
+use log::info;
use playback::capture_local_video_track;
+use settings::Settings;
mod playback;
-#[cfg(feature = "record-microphone")]
-mod record;
use crate::{LocalTrack, Participant, RemoteTrack, RoomEvent, TrackPublication};
pub use playback::AudioStream;
@@ -125,9 +126,14 @@ impl Room {
pub fn play_remote_audio_track(
&self,
track: &RemoteAudioTrack,
- _cx: &App,
+ cx: &mut App,
) -> Result<playback::AudioStream> {
- Ok(self.playback.play_remote_audio_track(&track.0))
+ if AudioSettings::get_global(cx).rodio_audio {
+ info!("Using experimental.rodio_audio audio pipeline");
+ playback::play_remote_audio_track(&track.0, cx)
+ } else {
+ Ok(self.playback.play_remote_audio_track(&track.0))
+ }
}
}
@@ -18,13 +18,16 @@ use livekit::webrtc::{
video_stream::native::NativeVideoStream,
};
use parking_lot::Mutex;
+use rodio::Source;
use std::cell::RefCell;
use std::sync::Weak;
-use std::sync::atomic::{self, AtomicI32};
+use std::sync::atomic::{AtomicBool, AtomicI32, Ordering};
use std::time::Duration;
use std::{borrow::Cow, collections::VecDeque, sync::Arc, thread};
use util::{ResultExt as _, maybe};
+mod source;
+
pub(crate) struct AudioStack {
executor: BackgroundExecutor,
apm: Arc<Mutex<apm::AudioProcessingModule>>,
@@ -40,6 +43,29 @@ pub(crate) struct AudioStack {
const SAMPLE_RATE: u32 = 48000;
const NUM_CHANNELS: u32 = 2;
+pub(crate) fn play_remote_audio_track(
+ track: &livekit::track::RemoteAudioTrack,
+ cx: &mut gpui::App,
+) -> Result<AudioStream> {
+ let stop_handle = Arc::new(AtomicBool::new(false));
+ let stop_handle_clone = stop_handle.clone();
+ let stream = source::LiveKitStream::new(cx.background_executor(), track)
+ .stoppable()
+ .periodic_access(Duration::from_millis(50), move |s| {
+ if stop_handle.load(Ordering::Relaxed) {
+ s.stop();
+ }
+ });
+ audio::Audio::play_source(stream, cx).context("Could not play audio")?;
+
+ let on_drop = util::defer(move || {
+ stop_handle_clone.store(true, Ordering::Relaxed);
+ });
+ Ok(AudioStream::Output {
+ _drop: Box::new(on_drop),
+ })
+}
+
impl AudioStack {
pub(crate) fn new(executor: BackgroundExecutor) -> Self {
let apm = Arc::new(Mutex::new(apm::AudioProcessingModule::new(
@@ -61,7 +87,7 @@ impl AudioStack {
) -> AudioStream {
let output_task = self.start_output();
- let next_ssrc = self.next_ssrc.fetch_add(1, atomic::Ordering::Relaxed);
+ let next_ssrc = self.next_ssrc.fetch_add(1, Ordering::Relaxed);
let source = AudioMixerSource {
ssrc: next_ssrc,
sample_rate: SAMPLE_RATE,
@@ -97,6 +123,23 @@ impl AudioStack {
}
}
+ fn start_output(&self) -> Arc<Task<()>> {
+ if let Some(task) = self._output_task.borrow().upgrade() {
+ return task;
+ }
+ let task = Arc::new(self.executor.spawn({
+ let apm = self.apm.clone();
+ let mixer = self.mixer.clone();
+ async move {
+ Self::play_output(apm, mixer, SAMPLE_RATE, NUM_CHANNELS)
+ .await
+ .log_err();
+ }
+ }));
+ *self._output_task.borrow_mut() = Arc::downgrade(&task);
+ task
+ }
+
pub(crate) fn capture_local_microphone_track(
&self,
) -> Result<(crate::LocalAudioTrack, AudioStream)> {
@@ -117,7 +160,6 @@ impl AudioStack {
let (frame_tx, mut frame_rx) = futures::channel::mpsc::unbounded();
let transmit_task = self.executor.spawn({
- let source = source.clone();
async move {
while let Some(frame) = frame_rx.next().await {
source.capture_frame(&frame).await.log_err();
@@ -132,29 +174,12 @@ impl AudioStack {
drop(transmit_task);
drop(capture_task);
});
- return Ok((
+ Ok((
super::LocalAudioTrack(track),
AudioStream::Output {
_drop: Box::new(on_drop),
},
- ));
- }
-
- fn start_output(&self) -> Arc<Task<()>> {
- if let Some(task) = self._output_task.borrow().upgrade() {
- return task;
- }
- let task = Arc::new(self.executor.spawn({
- let apm = self.apm.clone();
- let mixer = self.mixer.clone();
- async move {
- Self::play_output(apm, mixer, SAMPLE_RATE, NUM_CHANNELS)
- .await
- .log_err();
- }
- }));
- *self._output_task.borrow_mut() = Arc::downgrade(&task);
- task
+ ))
}
async fn play_output(
@@ -0,0 +1,67 @@
+use futures::StreamExt;
+use libwebrtc::{audio_stream::native::NativeAudioStream, prelude::AudioFrame};
+use livekit::track::RemoteAudioTrack;
+use rodio::{Source, buffer::SamplesBuffer, conversions::SampleTypeConverter};
+
+use crate::livekit_client::playback::{NUM_CHANNELS, SAMPLE_RATE};
+
+fn frame_to_samplesbuffer(frame: AudioFrame) -> SamplesBuffer {
+ let samples = frame.data.iter().copied();
+ let samples = SampleTypeConverter::<_, _>::new(samples);
+ let samples: Vec<f32> = samples.collect();
+ SamplesBuffer::new(frame.num_channels as u16, frame.sample_rate, samples)
+}
+
+pub struct LiveKitStream {
+ // shared_buffer: SharedBuffer,
+ inner: rodio::queue::SourcesQueueOutput,
+ _receiver_task: gpui::Task<()>,
+}
+
+impl LiveKitStream {
+ pub fn new(executor: &gpui::BackgroundExecutor, track: &RemoteAudioTrack) -> Self {
+ let mut stream =
+ NativeAudioStream::new(track.rtc_track(), SAMPLE_RATE as i32, NUM_CHANNELS as i32);
+ let (queue_input, queue_output) = rodio::queue::queue(true);
+ // spawn rtc stream
+ let receiver_task = executor.spawn({
+ async move {
+ while let Some(frame) = stream.next().await {
+ let samples = frame_to_samplesbuffer(frame);
+ queue_input.append(samples);
+ }
+ }
+ });
+
+ LiveKitStream {
+ _receiver_task: receiver_task,
+ inner: queue_output,
+ }
+ }
+}
+
+impl Iterator for LiveKitStream {
+ type Item = rodio::Sample;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner.next()
+ }
+}
+
+impl Source for LiveKitStream {
+ fn current_span_len(&self) -> Option<usize> {
+ self.inner.current_span_len()
+ }
+
+ fn channels(&self) -> rodio::ChannelCount {
+ self.inner.channels()
+ }
+
+ fn sample_rate(&self) -> rodio::SampleRate {
+ self.inner.sample_rate()
+ }
+
+ fn total_duration(&self) -> Option<std::time::Duration> {
+ self.inner.total_duration()
+ }
+}
@@ -86,11 +86,12 @@ impl Model {
}
#[derive(Debug, Serialize, Deserialize)]
-#[serde(untagged)]
+#[serde(rename_all = "lowercase")]
pub enum ToolChoice {
Auto,
Required,
None,
+ #[serde(untagged)]
Other(ToolDefinition),
}
@@ -45,7 +45,7 @@ use util::{ConnectionResult, ResultExt, TryFutureExt, redact};
const JSON_RPC_VERSION: &str = "2.0";
const CONTENT_LEN_HEADER: &str = "Content-Length: ";
-const LSP_REQUEST_TIMEOUT: Duration = Duration::from_secs(60 * 2);
+pub const LSP_REQUEST_TIMEOUT: Duration = Duration::from_secs(60 * 2);
const SERVER_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(5);
type NotificationHandler = Box<dyn Send + FnMut(Option<RequestId>, Value, &mut AsyncApp)>;
@@ -100,8 +100,8 @@ pub struct LanguageServer {
io_tasks: Mutex<Option<(Task<Option<()>>, Task<Option<()>>)>>,
output_done_rx: Mutex<Option<barrier::Receiver>>,
server: Arc<Mutex<Option<Child>>>,
- workspace_folders: Option<Arc<Mutex<BTreeSet<Url>>>>,
- root_uri: Url,
+ workspace_folders: Option<Arc<Mutex<BTreeSet<Uri>>>>,
+ root_uri: Uri,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
@@ -166,6 +166,12 @@ impl<'a> From<&'a str> for LanguageServerName {
}
}
+impl PartialEq<str> for LanguageServerName {
+ fn eq(&self, other: &str) -> bool {
+ self.0 == other
+ }
+}
+
/// Handle to a language server RPC activity subscription.
pub enum Subscription {
Notification {
@@ -310,7 +316,7 @@ impl LanguageServer {
binary: LanguageServerBinary,
root_path: &Path,
code_action_kinds: Option<Vec<CodeActionKind>>,
- workspace_folders: Option<Arc<Mutex<BTreeSet<Url>>>>,
+ workspace_folders: Option<Arc<Mutex<BTreeSet<Uri>>>>,
cx: &mut AsyncApp,
) -> Result<Self> {
let working_dir = if root_path.is_dir() {
@@ -318,7 +324,7 @@ impl LanguageServer {
} else {
root_path.parent().unwrap_or_else(|| Path::new("/"))
};
- let root_uri = Url::from_file_path(&working_dir)
+ let root_uri = Uri::from_file_path(&working_dir)
.map_err(|()| anyhow!("{working_dir:?} is not a valid URI"))?;
log::info!(
@@ -384,8 +390,8 @@ impl LanguageServer {
server: Option<Child>,
code_action_kinds: Option<Vec<CodeActionKind>>,
binary: LanguageServerBinary,
- root_uri: Url,
- workspace_folders: Option<Arc<Mutex<BTreeSet<Url>>>>,
+ root_uri: Uri,
+ workspace_folders: Option<Arc<Mutex<BTreeSet<Uri>>>>,
cx: &mut AsyncApp,
on_unhandled_notification: F,
) -> Self
@@ -1350,7 +1356,7 @@ impl LanguageServer {
}
/// Add new workspace folder to the list.
- pub fn add_workspace_folder(&self, uri: Url) {
+ pub fn add_workspace_folder(&self, uri: Uri) {
if self
.capabilities()
.workspace
@@ -1383,8 +1389,9 @@ impl LanguageServer {
self.notify::<DidChangeWorkspaceFolders>(¶ms).ok();
}
}
- /// Add new workspace folder to the list.
- pub fn remove_workspace_folder(&self, uri: Url) {
+
+ /// Remove existing workspace folder from the list.
+ pub fn remove_workspace_folder(&self, uri: Uri) {
if self
.capabilities()
.workspace
@@ -1416,7 +1423,7 @@ impl LanguageServer {
self.notify::<DidChangeWorkspaceFolders>(¶ms).ok();
}
}
- pub fn set_workspace_folders(&self, folders: BTreeSet<Url>) {
+ pub fn set_workspace_folders(&self, folders: BTreeSet<Uri>) {
let Some(workspace_folders) = self.workspace_folders.as_ref() else {
return;
};
@@ -1449,7 +1456,7 @@ impl LanguageServer {
}
}
- pub fn workspace_folders(&self) -> BTreeSet<Url> {
+ pub fn workspace_folders(&self) -> BTreeSet<Uri> {
self.workspace_folders.as_ref().map_or_else(
|| BTreeSet::from_iter([self.root_uri.clone()]),
|folders| folders.lock().clone(),
@@ -1458,7 +1465,7 @@ impl LanguageServer {
pub fn register_buffer(
&self,
- uri: Url,
+ uri: Uri,
language_id: String,
version: i32,
initial_text: String,
@@ -1469,7 +1476,7 @@ impl LanguageServer {
.ok();
}
- pub fn unregister_buffer(&self, uri: Url) {
+ pub fn unregister_buffer(&self, uri: Uri) {
self.notify::<notification::DidCloseTextDocument>(&DidCloseTextDocumentParams {
text_document: TextDocumentIdentifier::new(uri),
})
@@ -1586,7 +1593,7 @@ impl FakeLanguageServer {
let server_name = LanguageServerName(name.clone().into());
let process_name = Arc::from(name.as_str());
let root = Self::root_path();
- let workspace_folders: Arc<Mutex<BTreeSet<Url>>> = Default::default();
+ let workspace_folders: Arc<Mutex<BTreeSet<Uri>>> = Default::default();
let mut server = LanguageServer::new_internal(
server_id,
server_name.clone(),
@@ -1656,13 +1663,13 @@ impl FakeLanguageServer {
(server, fake)
}
#[cfg(target_os = "windows")]
- fn root_path() -> Url {
- Url::from_file_path("C:/").unwrap()
+ fn root_path() -> Uri {
+ Uri::from_file_path("C:/").unwrap()
}
#[cfg(not(target_os = "windows"))]
- fn root_path() -> Url {
- Url::from_file_path("/").unwrap()
+ fn root_path() -> Uri {
+ Uri::from_file_path("/").unwrap()
}
}
@@ -1864,7 +1871,7 @@ mod tests {
server
.notify::<notification::DidOpenTextDocument>(&DidOpenTextDocumentParams {
text_document: TextDocumentItem::new(
- Url::from_str("file://a/b").unwrap(),
+ Uri::from_str("file://a/b").unwrap(),
"rust".to_string(),
0,
"".to_string(),
@@ -1885,7 +1892,7 @@ mod tests {
message: "ok".to_string(),
});
fake.notify::<notification::PublishDiagnostics>(&PublishDiagnosticsParams {
- uri: Url::from_str("file://b/c").unwrap(),
+ uri: Uri::from_str("file://b/c").unwrap(),
version: Some(5),
diagnostics: vec![],
});
@@ -30,7 +30,7 @@ pub fn main() {
let node_runtime = NodeRuntime::unavailable();
let language_registry = Arc::new(LanguageRegistry::new(cx.background_executor().clone()));
- languages::init(language_registry.clone(), node_runtime, cx);
+ languages::init(language_registry, node_runtime, cx);
theme::init(LoadThemes::JustBase, cx);
Assets.load_fonts(cx).unwrap();
@@ -69,6 +69,7 @@ pub struct MarkdownStyle {
pub heading_level_styles: Option<HeadingLevelStyles>,
pub table_overflow_x_scroll: bool,
pub height_is_multiple_of_line_height: bool,
+ pub prevent_mouse_interaction: bool,
}
impl Default for MarkdownStyle {
@@ -89,6 +90,7 @@ impl Default for MarkdownStyle {
heading_level_styles: None,
table_overflow_x_scroll: false,
height_is_multiple_of_line_height: false,
+ prevent_mouse_interaction: false,
}
}
}
@@ -575,16 +577,22 @@ impl MarkdownElement {
window: &mut Window,
cx: &mut App,
) {
+ if self.style.prevent_mouse_interaction {
+ return;
+ }
+
let is_hovering_link = hitbox.is_hovered(window)
&& !self.markdown.read(cx).selection.pending
&& rendered_text
.link_for_position(window.mouse_position())
.is_some();
- if is_hovering_link {
- window.set_cursor_style(CursorStyle::PointingHand, hitbox);
- } else {
- window.set_cursor_style(CursorStyle::IBeam, hitbox);
+ if !self.style.prevent_mouse_interaction {
+ if is_hovering_link {
+ window.set_cursor_style(CursorStyle::PointingHand, hitbox);
+ } else {
+ window.set_cursor_style(CursorStyle::IBeam, hitbox);
+ }
}
let on_open_url = self.on_url_click.take();
@@ -1084,7 +1092,13 @@ impl Element for MarkdownElement {
cx,
);
el.child(
- div().absolute().top_1().right_0p5().w_5().child(codeblock),
+ h_flex()
+ .w_4()
+ .absolute()
+ .top_1p5()
+ .right_1p5()
+ .justify_end()
+ .child(codeblock),
)
});
}
@@ -1109,11 +1123,12 @@ impl Element for MarkdownElement {
cx,
);
el.child(
- div()
+ h_flex()
+ .w_4()
.absolute()
.top_0()
.right_0()
- .w_5()
+ .justify_end()
.visible_on_hover("code_block")
.child(codeblock),
)
@@ -1314,11 +1329,11 @@ fn render_copy_code_block_button(
)
.icon_color(Color::Muted)
.icon_size(IconSize::Small)
+ .style(ButtonStyle::Filled)
.shape(ui::IconButtonShape::Square)
- .tooltip(Tooltip::text("Copy Code"))
+ .tooltip(Tooltip::text("Copy"))
.on_click({
- let id = id.clone();
- let markdown = markdown.clone();
+ let markdown = markdown;
move |_event, _window, cx| {
let id = id.clone();
markdown.update(cx, |this, cx| {
@@ -19,19 +19,21 @@ anyhow.workspace = true
async-recursion.workspace = true
collections.workspace = true
editor.workspace = true
+fs.workspace = true
gpui.workspace = true
+html5ever.workspace = true
language.workspace = true
linkify.workspace = true
log.workspace = true
+markup5ever_rcdom.workspace = true
pretty_assertions.workspace = true
pulldown-cmark.workspace = true
settings.workspace = true
theme.workspace = true
ui.workspace = true
util.workspace = true
-workspace.workspace = true
workspace-hack.workspace = true
-fs.workspace = true
+workspace.workspace = true
[dev-dependencies]
editor = { workspace = true, features = ["test-support"] }
@@ -1,5 +1,6 @@
use gpui::{
- FontStyle, FontWeight, HighlightStyle, SharedString, StrikethroughStyle, UnderlineStyle, px,
+ DefiniteLength, FontStyle, FontWeight, HighlightStyle, SharedString, StrikethroughStyle,
+ UnderlineStyle, px,
};
use language::HighlightId;
use std::{fmt::Display, ops::Range, path::PathBuf};
@@ -15,6 +16,7 @@ pub enum ParsedMarkdownElement {
/// A paragraph of text and other inline elements.
Paragraph(MarkdownParagraph),
HorizontalRule(Range<usize>),
+ Image(Image),
}
impl ParsedMarkdownElement {
@@ -30,6 +32,7 @@ impl ParsedMarkdownElement {
MarkdownParagraphChunk::Image(image) => image.source_range.clone(),
},
Self::HorizontalRule(range) => range.clone(),
+ Self::Image(image) => image.source_range.clone(),
})
}
@@ -152,7 +155,7 @@ pub struct ParsedMarkdownText {
/// Where the text is located in the source Markdown document.
pub source_range: Range<usize>,
/// The text content stripped of any formatting symbols.
- pub contents: String,
+ pub contents: SharedString,
/// The list of highlights contained in the Markdown document.
pub highlights: Vec<(Range<usize>, MarkdownHighlight)>,
/// The regions of the various ranges in the Markdown document.
@@ -290,6 +293,8 @@ pub struct Image {
pub link: Link,
pub source_range: Range<usize>,
pub alt_text: Option<SharedString>,
+ pub width: Option<DefiniteLength>,
+ pub height: Option<DefiniteLength>,
}
impl Image {
@@ -303,10 +308,20 @@ impl Image {
source_range,
link,
alt_text: None,
+ width: None,
+ height: None,
})
}
pub fn set_alt_text(&mut self, alt_text: SharedString) {
self.alt_text = Some(alt_text);
}
+
+ pub fn set_width(&mut self, width: DefiniteLength) {
+ self.width = Some(width);
+ }
+
+ pub fn set_height(&mut self, height: DefiniteLength) {
+ self.height = Some(height);
+ }
}
@@ -1,10 +1,12 @@
use crate::markdown_elements::*;
use async_recursion::async_recursion;
use collections::FxHashMap;
-use gpui::FontWeight;
+use gpui::{DefiniteLength, FontWeight, px, relative};
+use html5ever::{ParseOpts, local_name, parse_document, tendril::TendrilSink};
use language::LanguageRegistry;
+use markup5ever_rcdom::RcDom;
use pulldown_cmark::{Alignment, Event, Options, Parser, Tag, TagEnd};
-use std::{ops::Range, path::PathBuf, sync::Arc, vec};
+use std::{cell::RefCell, collections::HashMap, ops::Range, path::PathBuf, rc::Rc, sync::Arc, vec};
pub async fn parse_markdown(
markdown_input: &str,
@@ -172,13 +174,17 @@ impl<'a> MarkdownParser<'a> {
self.cursor += 1;
- let code_block = self.parse_code_block(language).await;
+ let code_block = self.parse_code_block(language).await?;
Some(vec![ParsedMarkdownElement::CodeBlock(code_block)])
}
+ Tag::HtmlBlock => {
+ self.cursor += 1;
+
+ Some(self.parse_html_block().await)
+ }
_ => None,
},
Event::Rule => {
- let source_range = source_range.clone();
self.cursor += 1;
Some(vec![ParsedMarkdownElement::HorizontalRule(source_range)])
}
@@ -347,7 +353,7 @@ impl<'a> MarkdownParser<'a> {
if !text.is_empty() {
let parsed_regions = MarkdownParagraphChunk::Text(ParsedMarkdownText {
source_range: source_range.clone(),
- contents: text.clone(),
+ contents: text.into(),
highlights: highlights.clone(),
region_ranges: region_ranges.clone(),
regions: regions.clone(),
@@ -379,7 +385,7 @@ impl<'a> MarkdownParser<'a> {
TagEnd::Image => {
if let Some(mut image) = image.take() {
if !text.is_empty() {
- image.alt_text = Some(std::mem::take(&mut text).into());
+ image.set_alt_text(std::mem::take(&mut text).into());
}
markdown_text_like.push(MarkdownParagraphChunk::Image(image));
}
@@ -401,8 +407,8 @@ impl<'a> MarkdownParser<'a> {
}
if !text.is_empty() {
markdown_text_like.push(MarkdownParagraphChunk::Text(ParsedMarkdownText {
- source_range: source_range.clone(),
- contents: text,
+ source_range,
+ contents: text.into(),
highlights,
regions,
region_ranges,
@@ -420,7 +426,7 @@ impl<'a> MarkdownParser<'a> {
self.cursor += 1;
ParsedMarkdownHeading {
- source_range: source_range.clone(),
+ source_range,
level: match level {
pulldown_cmark::HeadingLevel::H1 => HeadingLevel::H1,
pulldown_cmark::HeadingLevel::H2 => HeadingLevel::H2,
@@ -696,13 +702,22 @@ impl<'a> MarkdownParser<'a> {
}
}
- async fn parse_code_block(&mut self, language: Option<String>) -> ParsedMarkdownCodeBlock {
- let (_event, source_range) = self.previous().unwrap();
+ async fn parse_code_block(
+ &mut self,
+ language: Option<String>,
+ ) -> Option<ParsedMarkdownCodeBlock> {
+ let Some((_event, source_range)) = self.previous() else {
+ return None;
+ };
+
let source_range = source_range.clone();
let mut code = String::new();
while !self.eof() {
- let (current, _source_range) = self.current().unwrap();
+ let Some((current, _source_range)) = self.current() else {
+ break;
+ };
+
match current {
Event::Text(text) => {
code.push_str(text);
@@ -735,23 +750,241 @@ impl<'a> MarkdownParser<'a> {
None
};
- ParsedMarkdownCodeBlock {
+ Some(ParsedMarkdownCodeBlock {
source_range,
contents: code.into(),
language,
highlights,
+ })
+ }
+
+ async fn parse_html_block(&mut self) -> Vec<ParsedMarkdownElement> {
+ let mut elements = Vec::new();
+ let Some((_event, _source_range)) = self.previous() else {
+ return elements;
+ };
+
+ while !self.eof() {
+ let Some((current, source_range)) = self.current() else {
+ break;
+ };
+ let source_range = source_range.clone();
+ match current {
+ Event::Html(html) => {
+ let mut cursor = std::io::Cursor::new(html.as_bytes());
+ let Some(dom) = parse_document(RcDom::default(), ParseOpts::default())
+ .from_utf8()
+ .read_from(&mut cursor)
+ .ok()
+ else {
+ self.cursor += 1;
+ continue;
+ };
+
+ self.cursor += 1;
+
+ self.parse_html_node(source_range, &dom.document, &mut elements);
+ }
+ Event::End(TagEnd::CodeBlock) => {
+ self.cursor += 1;
+ break;
+ }
+ _ => {
+ break;
+ }
+ }
+ }
+
+ elements
+ }
+
+ fn parse_html_node(
+ &self,
+ source_range: Range<usize>,
+ node: &Rc<markup5ever_rcdom::Node>,
+ elements: &mut Vec<ParsedMarkdownElement>,
+ ) {
+ match &node.data {
+ markup5ever_rcdom::NodeData::Document => {
+ self.consume_children(source_range, node, elements);
+ }
+ markup5ever_rcdom::NodeData::Text { contents } => {
+ elements.push(ParsedMarkdownElement::Paragraph(vec![
+ MarkdownParagraphChunk::Text(ParsedMarkdownText {
+ source_range,
+ regions: Vec::default(),
+ region_ranges: Vec::default(),
+ highlights: Vec::default(),
+ contents: contents.borrow().to_string().into(),
+ }),
+ ]));
+ }
+ markup5ever_rcdom::NodeData::Comment { .. } => {}
+ markup5ever_rcdom::NodeData::Element { name, attrs, .. } => {
+ if local_name!("img") == name.local {
+ if let Some(image) = self.extract_image(source_range, attrs) {
+ elements.push(ParsedMarkdownElement::Image(image));
+ }
+ } else if local_name!("p") == name.local {
+ self.parse_paragraph(
+ source_range,
+ node,
+ &mut MarkdownParagraph::new(),
+ elements,
+ );
+ } else {
+ self.consume_children(source_range, node, elements);
+ }
+ }
+ _ => {}
+ }
+ }
+
+ fn parse_paragraph(
+ &self,
+ source_range: Range<usize>,
+ node: &Rc<markup5ever_rcdom::Node>,
+ paragraph: &mut MarkdownParagraph,
+ elements: &mut Vec<ParsedMarkdownElement>,
+ ) {
+ match &node.data {
+ markup5ever_rcdom::NodeData::Text { contents } => {
+ paragraph.push(MarkdownParagraphChunk::Text(ParsedMarkdownText {
+ source_range,
+ regions: Vec::default(),
+ region_ranges: Vec::default(),
+ highlights: Vec::default(),
+ contents: contents.borrow().to_string().into(),
+ }));
+ }
+ markup5ever_rcdom::NodeData::Element { name, attrs, .. } => {
+ if local_name!("img") == name.local {
+ if let Some(image) = self.extract_image(source_range, attrs) {
+ paragraph.push(MarkdownParagraphChunk::Image(image));
+ }
+ } else {
+ self.consume_paragraph(source_range, node, paragraph, elements);
+
+ if !paragraph.is_empty() {
+ elements.push(ParsedMarkdownElement::Paragraph(std::mem::take(paragraph)));
+ }
+ }
+ }
+ _ => {}
+ }
+ }
+
+ fn consume_paragraph(
+ &self,
+ source_range: Range<usize>,
+ node: &Rc<markup5ever_rcdom::Node>,
+ paragraph: &mut MarkdownParagraph,
+ elements: &mut Vec<ParsedMarkdownElement>,
+ ) {
+ for node in node.children.borrow().iter() {
+ self.parse_paragraph(source_range.clone(), node, paragraph, elements);
+ }
+ }
+
+ fn consume_children(
+ &self,
+ source_range: Range<usize>,
+ node: &Rc<markup5ever_rcdom::Node>,
+ elements: &mut Vec<ParsedMarkdownElement>,
+ ) {
+ for node in node.children.borrow().iter() {
+ self.parse_html_node(source_range.clone(), node, elements);
+ }
+ }
+
+ fn attr_value(
+ attrs: &RefCell<Vec<html5ever::Attribute>>,
+ name: html5ever::LocalName,
+ ) -> Option<String> {
+ attrs.borrow().iter().find_map(|attr| {
+ if attr.name.local == name {
+ Some(attr.value.to_string())
+ } else {
+ None
+ }
+ })
+ }
+
+ fn extract_styles_from_attributes(
+ attrs: &RefCell<Vec<html5ever::Attribute>>,
+ ) -> HashMap<String, String> {
+ let mut styles = HashMap::new();
+
+ if let Some(style) = Self::attr_value(attrs, local_name!("style")) {
+ for decl in style.split(';') {
+ let mut parts = decl.splitn(2, ':');
+ if let Some((key, value)) = parts.next().zip(parts.next()) {
+ styles.insert(
+ key.trim().to_lowercase().to_string(),
+ value.trim().to_string(),
+ );
+ }
+ }
+ }
+
+ styles
+ }
+
+ fn extract_image(
+ &self,
+ source_range: Range<usize>,
+ attrs: &RefCell<Vec<html5ever::Attribute>>,
+ ) -> Option<Image> {
+ let src = Self::attr_value(attrs, local_name!("src"))?;
+
+ let mut image = Image::identify(src, source_range, self.file_location_directory.clone())?;
+
+ if let Some(alt) = Self::attr_value(attrs, local_name!("alt")) {
+ image.set_alt_text(alt.into());
+ }
+
+ let styles = Self::extract_styles_from_attributes(attrs);
+
+ if let Some(width) = Self::attr_value(attrs, local_name!("width"))
+ .or_else(|| styles.get("width").cloned())
+ .and_then(|width| Self::parse_html_element_dimension(&width))
+ {
+ image.set_width(width);
+ }
+
+ if let Some(height) = Self::attr_value(attrs, local_name!("height"))
+ .or_else(|| styles.get("height").cloned())
+ .and_then(|height| Self::parse_html_element_dimension(&height))
+ {
+ image.set_height(height);
+ }
+
+ Some(image)
+ }
+
+ fn parse_html_element_dimension(value: &str) -> Option<DefiniteLength> {
+ if value.ends_with("%") {
+ value
+ .trim_end_matches("%")
+ .parse::<f32>()
+ .ok()
+ .map(|value| relative(value / 100.))
+ } else {
+ value
+ .trim_end_matches("px")
+ .parse()
+ .ok()
+ .map(|value| px(value).into())
}
}
}
#[cfg(test)]
mod tests {
- use core::panic;
-
use super::*;
-
use ParsedMarkdownListItemType::*;
- use gpui::BackgroundExecutor;
+ use core::panic;
+ use gpui::{AbsoluteLength, BackgroundExecutor, DefiniteLength};
use language::{
HighlightId, Language, LanguageConfig, LanguageMatcher, LanguageRegistry, tree_sitter_rust,
};
@@ -828,7 +1061,7 @@ mod tests {
ParsedMarkdownElement::Paragraph(vec![MarkdownParagraphChunk::Text(
ParsedMarkdownText {
source_range: 0..35,
- contents: "Some bostrikethroughld text".to_string(),
+ contents: "Some bostrikethroughld text".into(),
highlights: Vec::new(),
region_ranges: Vec::new(),
regions: Vec::new(),
@@ -926,6 +1159,8 @@ mod tests {
url: "https://blog.logrocket.com/wp-content/uploads/2024/04/exploring-zed-open-source-code-editor-rust-2.png".to_string(),
},
alt_text: Some("test".into()),
+ height: None,
+ width: None,
},)
);
}
@@ -947,6 +1182,8 @@ mod tests {
url: "http://example.com/foo.png".to_string(),
},
alt_text: None,
+ height: None,
+ width: None,
},)
);
}
@@ -966,6 +1203,8 @@ mod tests {
url: "http://example.com/foo.png".to_string(),
},
alt_text: Some("foo bar baz".into()),
+ height: None,
+ width: None,
}),],
);
}
@@ -991,10 +1230,12 @@ mod tests {
url: "http://example.com/foo.png".to_string(),
},
alt_text: Some("foo".into()),
+ height: None,
+ width: None,
}),
MarkdownParagraphChunk::Text(ParsedMarkdownText {
source_range: 0..81,
- contents: " Lorem Ipsum ".to_string(),
+ contents: " Lorem Ipsum ".into(),
highlights: Vec::new(),
region_ranges: Vec::new(),
regions: Vec::new(),
@@ -1005,11 +1246,205 @@ mod tests {
url: "http://example.com/bar.png".to_string(),
},
alt_text: Some("bar".into()),
+ height: None,
+ width: None,
})
]
);
}
+ #[test]
+ fn test_parse_html_element_dimension() {
+ // Test percentage values
+ assert_eq!(
+ MarkdownParser::parse_html_element_dimension("50%"),
+ Some(DefiniteLength::Fraction(0.5))
+ );
+ assert_eq!(
+ MarkdownParser::parse_html_element_dimension("100%"),
+ Some(DefiniteLength::Fraction(1.0))
+ );
+ assert_eq!(
+ MarkdownParser::parse_html_element_dimension("25%"),
+ Some(DefiniteLength::Fraction(0.25))
+ );
+ assert_eq!(
+ MarkdownParser::parse_html_element_dimension("0%"),
+ Some(DefiniteLength::Fraction(0.0))
+ );
+
+ // Test pixel values
+ assert_eq!(
+ MarkdownParser::parse_html_element_dimension("100px"),
+ Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(100.0))))
+ );
+ assert_eq!(
+ MarkdownParser::parse_html_element_dimension("50px"),
+ Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(50.0))))
+ );
+ assert_eq!(
+ MarkdownParser::parse_html_element_dimension("0px"),
+ Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(0.0))))
+ );
+
+ // Test values without units (should be treated as pixels)
+ assert_eq!(
+ MarkdownParser::parse_html_element_dimension("100"),
+ Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(100.0))))
+ );
+ assert_eq!(
+ MarkdownParser::parse_html_element_dimension("42"),
+ Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(42.0))))
+ );
+
+ // Test invalid values
+ assert_eq!(
+ MarkdownParser::parse_html_element_dimension("invalid"),
+ None
+ );
+ assert_eq!(MarkdownParser::parse_html_element_dimension("px"), None);
+ assert_eq!(MarkdownParser::parse_html_element_dimension("%"), None);
+ assert_eq!(MarkdownParser::parse_html_element_dimension(""), None);
+ assert_eq!(MarkdownParser::parse_html_element_dimension("abc%"), None);
+ assert_eq!(MarkdownParser::parse_html_element_dimension("abcpx"), None);
+
+ // Test decimal values
+ assert_eq!(
+ MarkdownParser::parse_html_element_dimension("50.5%"),
+ Some(DefiniteLength::Fraction(0.505))
+ );
+ assert_eq!(
+ MarkdownParser::parse_html_element_dimension("100.25px"),
+ Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(100.25))))
+ );
+ assert_eq!(
+ MarkdownParser::parse_html_element_dimension("42.0"),
+ Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(42.0))))
+ );
+ }
+
+ #[gpui::test]
+ async fn test_inline_html_image_tag() {
+ let parsed =
+ parse("<p>Some text<img src=\"http://example.com/foo.png\" /> some more text</p>")
+ .await;
+
+ assert_eq!(
+ ParsedMarkdown {
+ children: vec![ParsedMarkdownElement::Paragraph(vec![
+ MarkdownParagraphChunk::Text(ParsedMarkdownText {
+ source_range: 0..71,
+ contents: "Some text".into(),
+ highlights: Default::default(),
+ region_ranges: Default::default(),
+ regions: Default::default()
+ }),
+ MarkdownParagraphChunk::Image(Image {
+ source_range: 0..71,
+ link: Link::Web {
+ url: "http://example.com/foo.png".to_string(),
+ },
+ alt_text: None,
+ height: None,
+ width: None,
+ }),
+ MarkdownParagraphChunk::Text(ParsedMarkdownText {
+ source_range: 0..71,
+ contents: " some more text".into(),
+ highlights: Default::default(),
+ region_ranges: Default::default(),
+ regions: Default::default()
+ }),
+ ])]
+ },
+ parsed
+ );
+ }
+
+ #[gpui::test]
+ async fn test_html_image_tag() {
+ let parsed = parse("<img src=\"http://example.com/foo.png\" />").await;
+
+ assert_eq!(
+ ParsedMarkdown {
+ children: vec![ParsedMarkdownElement::Image(Image {
+ source_range: 0..40,
+ link: Link::Web {
+ url: "http://example.com/foo.png".to_string(),
+ },
+ alt_text: None,
+ height: None,
+ width: None,
+ })]
+ },
+ parsed
+ );
+ }
+
+ #[gpui::test]
+ async fn test_html_image_tag_with_alt_text() {
+ let parsed = parse("<img src=\"http://example.com/foo.png\" alt=\"Foo\" />").await;
+
+ assert_eq!(
+ ParsedMarkdown {
+ children: vec![ParsedMarkdownElement::Image(Image {
+ source_range: 0..50,
+ link: Link::Web {
+ url: "http://example.com/foo.png".to_string(),
+ },
+ alt_text: Some("Foo".into()),
+ height: None,
+ width: None,
+ })]
+ },
+ parsed
+ );
+ }
+
+ #[gpui::test]
+ async fn test_html_image_tag_with_height_and_width() {
+ let parsed =
+ parse("<img src=\"http://example.com/foo.png\" height=\"100\" width=\"200\" />").await;
+
+ assert_eq!(
+ ParsedMarkdown {
+ children: vec![ParsedMarkdownElement::Image(Image {
+ source_range: 0..65,
+ link: Link::Web {
+ url: "http://example.com/foo.png".to_string(),
+ },
+ alt_text: None,
+ height: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(100.)))),
+ width: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(200.)))),
+ })]
+ },
+ parsed
+ );
+ }
+
+ #[gpui::test]
+ async fn test_html_image_style_tag_with_height_and_width() {
+ let parsed = parse(
+ "<img src=\"http://example.com/foo.png\" style=\"height:100px; width:200px;\" />",
+ )
+ .await;
+
+ assert_eq!(
+ ParsedMarkdown {
+ children: vec![ParsedMarkdownElement::Image(Image {
+ source_range: 0..75,
+ link: Link::Web {
+ url: "http://example.com/foo.png".to_string(),
+ },
+ alt_text: None,
+ height: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(100.)))),
+ width: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(200.)))),
+ })]
+ },
+ parsed
+ );
+ }
+
#[gpui::test]
async fn test_header_only_table() {
let markdown = "\
@@ -1457,7 +1892,7 @@ fn main() {
region_ranges: Vec::new(),
regions: Vec::new(),
source_range,
- contents: contents.to_string(),
+ contents: contents.to_string().into(),
})]
}
@@ -115,8 +115,7 @@ impl MarkdownPreviewView {
pane.activate_item(existing_follow_view_idx, true, true, window, cx);
});
} else {
- let view =
- Self::create_following_markdown_view(workspace, editor.clone(), window, cx);
+ let view = Self::create_following_markdown_view(workspace, editor, window, cx);
workspace.active_pane().update(cx, |pane, cx| {
pane.add_item(Box::new(view.clone()), true, true, None, window, cx)
});
@@ -1,5 +1,5 @@
use crate::markdown_elements::{
- HeadingLevel, Link, MarkdownParagraph, MarkdownParagraphChunk, ParsedMarkdown,
+ HeadingLevel, Image, Link, MarkdownParagraph, MarkdownParagraphChunk, ParsedMarkdown,
ParsedMarkdownBlockQuote, ParsedMarkdownCodeBlock, ParsedMarkdownElement,
ParsedMarkdownHeading, ParsedMarkdownListItem, ParsedMarkdownListItemType, ParsedMarkdownTable,
ParsedMarkdownTableAlignment, ParsedMarkdownTableRow,
@@ -164,6 +164,7 @@ pub fn render_markdown_block(block: &ParsedMarkdownElement, cx: &mut RenderConte
BlockQuote(block_quote) => render_markdown_block_quote(block_quote, cx),
CodeBlock(code_block) => render_markdown_code_block(code_block, cx),
HorizontalRule(_) => render_markdown_rule(cx),
+ Image(image) => render_markdown_image(image, cx),
}
}
@@ -276,7 +277,11 @@ fn render_markdown_list_item(
.items_start()
.children(vec![
bullet,
- div().children(contents).pr(cx.scaled_rems(1.0)).w_full(),
+ v_flex()
+ .children(contents)
+ .gap(cx.scaled_rems(1.0))
+ .pr(cx.scaled_rems(1.0))
+ .w_full(),
]);
cx.with_common_p(item).into_any()
@@ -623,15 +628,13 @@ fn render_markdown_code_block(
}
fn render_markdown_paragraph(parsed: &MarkdownParagraph, cx: &mut RenderContext) -> AnyElement {
- cx.with_common_p(div())
+ cx.with_common_p(h_flex().flex_wrap())
.children(render_markdown_text(parsed, cx))
- .flex()
- .flex_col()
.into_any_element()
}
fn render_markdown_text(parsed_new: &MarkdownParagraph, cx: &mut RenderContext) -> Vec<AnyElement> {
- let mut any_element = vec![];
+ let mut any_element = Vec::with_capacity(parsed_new.len());
// these values are cloned in-order satisfy borrow checker
let syntax_theme = cx.syntax_theme.clone();
let workspace_clone = cx.workspace.clone();
@@ -722,65 +725,7 @@ fn render_markdown_text(parsed_new: &MarkdownParagraph, cx: &mut RenderContext)
}
MarkdownParagraphChunk::Image(image) => {
- let image_resource = match image.link.clone() {
- Link::Web { url } => Resource::Uri(url.into()),
- Link::Path { path, .. } => Resource::Path(Arc::from(path)),
- };
-
- let element_id = cx.next_id(&image.source_range);
-
- let image_element = div()
- .id(element_id)
- .cursor_pointer()
- .child(
- img(ImageSource::Resource(image_resource))
- .max_w_full()
- .with_fallback({
- let alt_text = image.alt_text.clone();
- move || div().children(alt_text.clone()).into_any_element()
- }),
- )
- .tooltip({
- let link = image.link.clone();
- move |_, cx| {
- InteractiveMarkdownElementTooltip::new(
- Some(link.to_string()),
- "open image",
- cx,
- )
- .into()
- }
- })
- .on_click({
- let workspace = workspace_clone.clone();
- let link = image.link.clone();
- move |_, window, cx| {
- if window.modifiers().secondary() {
- match &link {
- Link::Web { url } => cx.open_url(url),
- Link::Path { path, .. } => {
- if let Some(workspace) = &workspace {
- _ = workspace.update(cx, |workspace, cx| {
- workspace
- .open_abs_path(
- path.clone(),
- OpenOptions {
- visible: Some(OpenVisible::None),
- ..Default::default()
- },
- window,
- cx,
- )
- .detach();
- });
- }
- }
- }
- }
- }
- })
- .into_any();
- any_element.push(image_element);
+ any_element.push(render_markdown_image(image, cx));
}
}
}
@@ -793,18 +738,86 @@ fn render_markdown_rule(cx: &mut RenderContext) -> AnyElement {
div().py(cx.scaled_rems(0.5)).child(rule).into_any()
}
+fn render_markdown_image(image: &Image, cx: &mut RenderContext) -> AnyElement {
+ let image_resource = match image.link.clone() {
+ Link::Web { url } => Resource::Uri(url.into()),
+ Link::Path { path, .. } => Resource::Path(Arc::from(path)),
+ };
+
+ let element_id = cx.next_id(&image.source_range);
+ let workspace = cx.workspace.clone();
+
+ div()
+ .id(element_id)
+ .cursor_pointer()
+ .child(
+ img(ImageSource::Resource(image_resource))
+ .max_w_full()
+ .with_fallback({
+ let alt_text = image.alt_text.clone();
+ move || div().children(alt_text.clone()).into_any_element()
+ })
+ .when_some(image.height, |this, height| this.h(height))
+ .when_some(image.width, |this, width| this.w(width)),
+ )
+ .tooltip({
+ let link = image.link.clone();
+ let alt_text = image.alt_text.clone();
+ move |_, cx| {
+ InteractiveMarkdownElementTooltip::new(
+ Some(alt_text.clone().unwrap_or(link.to_string().into())),
+ "open image",
+ cx,
+ )
+ .into()
+ }
+ })
+ .on_click({
+ let link = image.link.clone();
+ move |_, window, cx| {
+ if window.modifiers().secondary() {
+ match &link {
+ Link::Web { url } => cx.open_url(url),
+ Link::Path { path, .. } => {
+ if let Some(workspace) = &workspace {
+ _ = workspace.update(cx, |workspace, cx| {
+ workspace
+ .open_abs_path(
+ path.clone(),
+ OpenOptions {
+ visible: Some(OpenVisible::None),
+ ..Default::default()
+ },
+ window,
+ cx,
+ )
+ .detach();
+ });
+ }
+ }
+ }
+ }
+ }
+ })
+ .into_any()
+}
+
struct InteractiveMarkdownElementTooltip {
tooltip_text: Option<SharedString>,
- action_text: String,
+ action_text: SharedString,
}
impl InteractiveMarkdownElementTooltip {
- pub fn new(tooltip_text: Option<String>, action_text: &str, cx: &mut App) -> Entity<Self> {
+ pub fn new(
+ tooltip_text: Option<SharedString>,
+ action_text: impl Into<SharedString>,
+ cx: &mut App,
+ ) -> Entity<Self> {
let tooltip_text = tooltip_text.map(|t| util::truncate_and_trailoff(&t, 50).into());
cx.new(|_cx| Self {
tooltip_text,
- action_text: action_text.to_string(),
+ action_text: action_text.into(),
})
}
}
@@ -20,14 +20,14 @@ fn replace_deprecated_settings_values(
.nodes_for_capture_index(parent_object_capture_ix)
.next()?
.byte_range();
- let parent_object_name = contents.get(parent_object_range.clone())?;
+ let parent_object_name = contents.get(parent_object_range)?;
let setting_name_ix = query.capture_index_for_name("setting_name")?;
let setting_name_range = mat
.nodes_for_capture_index(setting_name_ix)
.next()?
.byte_range();
- let setting_name = contents.get(setting_name_range.clone())?;
+ let setting_name = contents.get(setting_name_range)?;
let setting_value_ix = query.capture_index_for_name("setting_value")?;
let setting_value_range = mat
@@ -279,7 +279,7 @@ fn rename_context_key(
new_predicate = new_predicate.replace(old_key, new_key);
}
if new_predicate != old_predicate {
- Some((context_predicate_range, new_predicate.to_string()))
+ Some((context_predicate_range, new_predicate))
} else {
None
}
@@ -57,7 +57,7 @@ pub fn replace_edit_prediction_provider_setting(
.nodes_for_capture_index(parent_object_capture_ix)
.next()?
.byte_range();
- let parent_object_name = contents.get(parent_object_range.clone())?;
+ let parent_object_name = contents.get(parent_object_range)?;
let setting_name_ix = query.capture_index_for_name("setting_name")?;
let setting_range = mat
@@ -25,7 +25,7 @@ fn replace_tab_close_button_setting_key(
.nodes_for_capture_index(parent_object_capture_ix)
.next()?
.byte_range();
- let parent_object_name = contents.get(parent_object_range.clone())?;
+ let parent_object_name = contents.get(parent_object_range)?;
let setting_name_ix = query.capture_index_for_name("setting_name")?;
let setting_range = mat
@@ -51,14 +51,14 @@ fn replace_tab_close_button_setting_value(
.nodes_for_capture_index(parent_object_capture_ix)
.next()?
.byte_range();
- let parent_object_name = contents.get(parent_object_range.clone())?;
+ let parent_object_name = contents.get(parent_object_range)?;
let setting_name_ix = query.capture_index_for_name("setting_name")?;
let setting_name_range = mat
.nodes_for_capture_index(setting_name_ix)
.next()?
.byte_range();
- let setting_name = contents.get(setting_name_range.clone())?;
+ let setting_name = contents.get(setting_name_range)?;
let setting_value_ix = query.capture_index_for_name("setting_value")?;
let setting_value_range = mat
@@ -19,7 +19,7 @@ fn replace_setting_value(
.nodes_for_capture_index(setting_capture_ix)
.next()?
.byte_range();
- let setting_name = contents.get(setting_name_range.clone())?;
+ let setting_name = contents.get(setting_name_range)?;
if setting_name != "hide_mouse_while_typing" {
return None;
@@ -19,7 +19,7 @@ fn replace_preferred_completion_mode_value(
.nodes_for_capture_index(parent_object_capture_ix)
.next()?
.byte_range();
- let parent_object_name = contents.get(parent_object_range.clone())?;
+ let parent_object_name = contents.get(parent_object_range)?;
if parent_object_name != "agent" {
return None;
@@ -30,7 +30,7 @@ fn replace_preferred_completion_mode_value(
.nodes_for_capture_index(setting_name_capture_ix)
.next()?
.byte_range();
- let setting_name = contents.get(setting_name_range.clone())?;
+ let setting_name = contents.get(setting_name_range)?;
if setting_name != "preferred_completion_mode" {
return None;
@@ -28,7 +28,7 @@ fn migrate(text: &str, patterns: MigrationPatterns, query: &Query) -> Result<Opt
let mut parser = tree_sitter::Parser::new();
parser.set_language(&tree_sitter_json::LANGUAGE.into())?;
let syntax_tree = parser
- .parse(&text, None)
+ .parse(text, None)
.context("failed to parse settings")?;
let mut cursor = tree_sitter::QueryCursor::new();
@@ -286,12 +286,13 @@ pub enum Prediction {
}
#[derive(Debug, Serialize, Deserialize)]
-#[serde(rename_all = "snake_case")]
+#[serde(rename_all = "lowercase")]
pub enum ToolChoice {
Auto,
Required,
None,
Any,
+ #[serde(untagged)]
Function(ToolDefinition),
}
@@ -482,7 +483,7 @@ pub async fn stream_completion(
.method(Method::POST)
.uri(uri)
.header("Content-Type", "application/json")
- .header("Authorization", format!("Bearer {}", api_key));
+ .header("Authorization", format!("Bearer {}", api_key.trim()));
let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
let mut response = client.send(request).await?;
@@ -113,15 +113,10 @@ pub enum Event {
transaction_id: TransactionId,
},
Reloaded,
- ReloadNeeded,
-
LanguageChanged(BufferId),
- CapabilityChanged,
Reparsed(BufferId),
Saved,
FileHandleChanged,
- Closed,
- Discarded,
DirtyChanged,
DiagnosticsUpdated,
BufferDiffChanged,
@@ -735,7 +730,7 @@ impl MultiBuffer {
pub fn as_singleton(&self) -> Option<Entity<Buffer>> {
if self.singleton {
- return Some(
+ Some(
self.buffers
.borrow()
.values()
@@ -743,7 +738,7 @@ impl MultiBuffer {
.unwrap()
.buffer
.clone(),
- );
+ )
} else {
None
}
@@ -835,7 +830,7 @@ impl MultiBuffer {
this.convert_edits_to_buffer_edits(edits, &snapshot, &original_indent_columns);
drop(snapshot);
- let mut buffer_ids = Vec::new();
+ let mut buffer_ids = Vec::with_capacity(buffer_edits.len());
for (buffer_id, mut edits) in buffer_edits {
buffer_ids.push(buffer_id);
edits.sort_by_key(|edit| edit.range.start);
@@ -2196,6 +2191,15 @@ impl MultiBuffer {
})
}
+ pub fn buffer_for_anchor(&self, anchor: Anchor, cx: &App) -> Option<Entity<Buffer>> {
+ if let Some(buffer_id) = anchor.buffer_id {
+ self.buffer(buffer_id)
+ } else {
+ let (_, buffer, _) = self.excerpt_containing(anchor, cx)?;
+ Some(buffer)
+ }
+ }
+
// If point is at the end of the buffer, the last excerpt is returned
pub fn point_to_buffer_offset<T: ToOffset>(
&self,
@@ -2424,28 +2428,24 @@ impl MultiBuffer {
event: &language::BufferEvent,
cx: &mut Context<Self>,
) {
+ use language::BufferEvent;
cx.emit(match event {
- language::BufferEvent::Edited => Event::Edited {
+ BufferEvent::Edited => Event::Edited {
singleton_buffer_edited: true,
- edited_buffer: Some(buffer.clone()),
+ edited_buffer: Some(buffer),
},
- language::BufferEvent::DirtyChanged => Event::DirtyChanged,
- language::BufferEvent::Saved => Event::Saved,
- language::BufferEvent::FileHandleChanged => Event::FileHandleChanged,
- language::BufferEvent::Reloaded => Event::Reloaded,
- language::BufferEvent::ReloadNeeded => Event::ReloadNeeded,
- language::BufferEvent::LanguageChanged => {
- Event::LanguageChanged(buffer.read(cx).remote_id())
- }
- language::BufferEvent::Reparsed => Event::Reparsed(buffer.read(cx).remote_id()),
- language::BufferEvent::DiagnosticsUpdated => Event::DiagnosticsUpdated,
- language::BufferEvent::Closed => Event::Closed,
- language::BufferEvent::Discarded => Event::Discarded,
- language::BufferEvent::CapabilityChanged => {
+ BufferEvent::DirtyChanged => Event::DirtyChanged,
+ BufferEvent::Saved => Event::Saved,
+ BufferEvent::FileHandleChanged => Event::FileHandleChanged,
+ BufferEvent::Reloaded => Event::Reloaded,
+ BufferEvent::LanguageChanged => Event::LanguageChanged(buffer.read(cx).remote_id()),
+ BufferEvent::Reparsed => Event::Reparsed(buffer.read(cx).remote_id()),
+ BufferEvent::DiagnosticsUpdated => Event::DiagnosticsUpdated,
+ BufferEvent::CapabilityChanged => {
self.capability = buffer.read(cx).capability();
- Event::CapabilityChanged
+ return;
}
- language::BufferEvent::Operation { .. } => return,
+ BufferEvent::Operation { .. } | BufferEvent::ReloadNeeded => return,
});
}
@@ -2543,6 +2543,10 @@ impl MultiBuffer {
.collect()
}
+ pub fn all_buffer_ids(&self) -> Vec<BufferId> {
+ self.buffers.borrow().keys().copied().collect()
+ }
+
pub fn buffer(&self, buffer_id: BufferId) -> Option<Entity<Buffer>> {
self.buffers
.borrow()
@@ -3560,9 +3564,7 @@ impl MultiBuffer {
let multi = cx.new(|_| Self::new(Capability::ReadWrite));
for (text, ranges) in excerpts {
let buffer = cx.new(|cx| Buffer::local(text, cx));
- let excerpt_ranges = ranges
- .into_iter()
- .map(|range| ExcerptRange::new(range.clone()));
+ let excerpt_ranges = ranges.into_iter().map(ExcerptRange::new);
multi.update(cx, |multi, cx| {
multi.push_excerpts(buffer, excerpt_ranges, cx)
});
@@ -3578,7 +3580,7 @@ impl MultiBuffer {
pub fn build_random(rng: &mut impl rand::Rng, cx: &mut gpui::App) -> Entity<Self> {
cx.new(|cx| {
let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
- let mutation_count = rng.gen_range(1..=5);
+ let mutation_count = rng.random_range(1..=5);
multibuffer.randomly_edit_excerpts(rng, mutation_count, cx);
multibuffer
})
@@ -3601,16 +3603,17 @@ impl MultiBuffer {
}
let new_start = last_end.map_or(0, |last_end| last_end + 1);
- let end = snapshot.clip_offset(rng.gen_range(new_start..=snapshot.len()), Bias::Right);
- let start = snapshot.clip_offset(rng.gen_range(new_start..=end), Bias::Right);
+ let end =
+ snapshot.clip_offset(rng.random_range(new_start..=snapshot.len()), Bias::Right);
+ let start = snapshot.clip_offset(rng.random_range(new_start..=end), Bias::Right);
last_end = Some(end);
let mut range = start..end;
- if rng.gen_bool(0.2) {
+ if rng.random_bool(0.2) {
mem::swap(&mut range.start, &mut range.end);
}
- let new_text_len = rng.gen_range(0..10);
+ let new_text_len = rng.random_range(0..10);
let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
edits.push((range, new_text.into()));
@@ -3637,18 +3640,18 @@ impl MultiBuffer {
let mut buffers = Vec::new();
for _ in 0..mutation_count {
- if rng.gen_bool(0.05) {
+ if rng.random_bool(0.05) {
log::info!("Clearing multi-buffer");
self.clear(cx);
continue;
- } else if rng.gen_bool(0.1) && !self.excerpt_ids().is_empty() {
+ } else if rng.random_bool(0.1) && !self.excerpt_ids().is_empty() {
let ids = self.excerpt_ids();
let mut excerpts = HashSet::default();
- for _ in 0..rng.gen_range(0..ids.len()) {
+ for _ in 0..rng.random_range(0..ids.len()) {
excerpts.extend(ids.choose(rng).copied());
}
- let line_count = rng.gen_range(0..5);
+ let line_count = rng.random_range(0..5);
log::info!("Expanding excerpts {excerpts:?} by {line_count} lines");
@@ -3662,8 +3665,8 @@ impl MultiBuffer {
}
let excerpt_ids = self.excerpt_ids();
- if excerpt_ids.is_empty() || (rng.r#gen() && excerpt_ids.len() < max_excerpts) {
- let buffer_handle = if rng.r#gen() || self.buffers.borrow().is_empty() {
+ if excerpt_ids.is_empty() || (rng.random() && excerpt_ids.len() < max_excerpts) {
+ let buffer_handle = if rng.random() || self.buffers.borrow().is_empty() {
let text = RandomCharIter::new(&mut *rng).take(10).collect::<String>();
buffers.push(cx.new(|cx| Buffer::local(text, cx)));
let buffer = buffers.last().unwrap().read(cx);
@@ -3685,11 +3688,11 @@ impl MultiBuffer {
let buffer = buffer_handle.read(cx);
let buffer_text = buffer.text();
- let ranges = (0..rng.gen_range(0..5))
+ let ranges = (0..rng.random_range(0..5))
.map(|_| {
let end_ix =
- buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Right);
- let start_ix = buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left);
+ buffer.clip_offset(rng.random_range(0..=buffer.len()), Bias::Right);
+ let start_ix = buffer.clip_offset(rng.random_range(0..=end_ix), Bias::Left);
ExcerptRange::new(start_ix..end_ix)
})
.collect::<Vec<_>>();
@@ -3706,7 +3709,7 @@ impl MultiBuffer {
let excerpt_id = self.push_excerpts(buffer_handle.clone(), ranges, cx);
log::info!("Inserted with ids: {:?}", excerpt_id);
} else {
- let remove_count = rng.gen_range(1..=excerpt_ids.len());
+ let remove_count = rng.random_range(1..=excerpt_ids.len());
let mut excerpts_to_remove = excerpt_ids
.choose_multiple(rng, remove_count)
.cloned()
@@ -3728,7 +3731,7 @@ impl MultiBuffer {
) {
use rand::prelude::*;
- if rng.gen_bool(0.7) || self.singleton {
+ if rng.random_bool(0.7) || self.singleton {
let buffer = self
.buffers
.borrow()
@@ -3738,7 +3741,7 @@ impl MultiBuffer {
if let Some(buffer) = buffer {
buffer.update(cx, |buffer, cx| {
- if rng.r#gen() {
+ if rng.random() {
buffer.randomly_edit(rng, mutation_count, cx);
} else {
buffer.randomly_undo_redo(rng, cx);
@@ -4071,13 +4074,9 @@ impl MultiBufferSnapshot {
buffer_end = buffer_end.min(end_buffer_offset);
}
- if let Some(iterator) =
- get_buffer_metadata(&excerpt.buffer, buffer_start..buffer_end)
- {
- Some(&mut current_excerpt_metadata.insert((excerpt.id, iterator)).1)
- } else {
- None
- }
+ get_buffer_metadata(&excerpt.buffer, buffer_start..buffer_end).map(|iterator| {
+ &mut current_excerpt_metadata.insert((excerpt.id, iterator)).1
+ })
};
// Visit each metadata item.
@@ -5205,13 +5204,9 @@ impl MultiBufferSnapshot {
if offset == diff_transforms.start().0
&& bias == Bias::Left
&& let Some(prev_item) = diff_transforms.prev_item()
+ && let DiffTransform::DeletedHunk { .. } = prev_item
{
- match prev_item {
- DiffTransform::DeletedHunk { .. } => {
- diff_transforms.prev();
- }
- _ => {}
- }
+ diff_transforms.prev();
}
let offset_in_transform = offset - diff_transforms.start().0;
let mut excerpt_offset = diff_transforms.start().1;
@@ -5238,15 +5233,6 @@ impl MultiBufferSnapshot {
excerpt_offset += ExcerptOffset::new(offset_in_transform);
};
- if let Some((excerpt_id, buffer_id, buffer)) = self.as_singleton() {
- return Anchor {
- buffer_id: Some(buffer_id),
- excerpt_id: *excerpt_id,
- text_anchor: buffer.anchor_at(excerpt_offset.value, bias),
- diff_base_anchor,
- };
- }
-
let mut excerpts = self
.excerpts
.cursor::<Dimensions<ExcerptOffset, Option<ExcerptId>>>(&());
@@ -5270,10 +5256,17 @@ impl MultiBufferSnapshot {
text_anchor,
diff_base_anchor,
}
- } else if excerpt_offset.is_zero() && bias == Bias::Left {
- Anchor::min()
} else {
- Anchor::max()
+ let mut anchor = if excerpt_offset.is_zero() && bias == Bias::Left {
+ Anchor::min()
+ } else {
+ Anchor::max()
+ };
+ // TODO this is a hack, remove it
+ if let Some((excerpt_id, _, _)) = self.as_singleton() {
+ anchor.excerpt_id = *excerpt_id;
+ }
+ anchor
}
}
@@ -6315,6 +6308,14 @@ impl MultiBufferSnapshot {
})
}
+ pub fn buffer_id_for_anchor(&self, anchor: Anchor) -> Option<BufferId> {
+ if let Some(id) = anchor.buffer_id {
+ return Some(id);
+ }
+ let excerpt = self.excerpt_containing(anchor..anchor)?;
+ Some(excerpt.buffer_id())
+ }
+
pub fn selections_in_range<'a>(
&'a self,
range: &'a Range<Anchor>,
@@ -6388,8 +6389,8 @@ impl MultiBufferSnapshot {
#[cfg(any(test, feature = "test-support"))]
impl MultiBufferSnapshot {
pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
- let end = self.clip_offset(rng.gen_range(start_offset..=self.len()), Bias::Right);
- let start = self.clip_offset(rng.gen_range(start_offset..=end), Bias::Right);
+ let end = self.clip_offset(rng.random_range(start_offset..=self.len()), Bias::Right);
+ let start = self.clip_offset(rng.random_range(start_offset..=end), Bias::Right);
start..end
}
@@ -6993,7 +6994,7 @@ impl Excerpt {
}
fn contains(&self, anchor: &Anchor) -> bool {
- Some(self.buffer_id) == anchor.buffer_id
+ (anchor.buffer_id == None || anchor.buffer_id == Some(self.buffer_id))
&& self
.range
.context
@@ -7153,7 +7154,7 @@ impl ExcerptId {
Self(usize::MAX)
}
- pub fn to_proto(&self) -> u64 {
+ pub fn to_proto(self) -> u64 {
self.0 as _
}
@@ -2250,11 +2250,11 @@ impl ReferenceMultibuffer {
let base_buffer = diff.base_text();
let mut offset = buffer_range.start;
- let mut hunks = diff
+ let hunks = diff
.hunks_intersecting_range(excerpt.range.clone(), buffer, cx)
.peekable();
- while let Some(hunk) = hunks.next() {
+ for hunk in hunks {
// Ignore hunks that are outside the excerpt range.
let mut hunk_range = hunk.buffer_range.to_offset(buffer);
@@ -2491,12 +2491,12 @@ async fn test_random_set_ranges(cx: &mut TestAppContext, mut rng: StdRng) {
for _ in 0..operations {
let snapshot = buf.update(cx, |buf, _| buf.snapshot());
- let num_ranges = rng.gen_range(0..=10);
+ let num_ranges = rng.random_range(0..=10);
let max_row = snapshot.max_point().row;
let mut ranges = (0..num_ranges)
.map(|_| {
- let start = rng.gen_range(0..max_row);
- let end = rng.gen_range(start + 1..max_row + 1);
+ let start = rng.random_range(0..max_row);
+ let end = rng.random_range(start + 1..max_row + 1);
Point::row_range(start..end)
})
.collect::<Vec<_>>();
@@ -2562,11 +2562,11 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
let mut needs_diff_calculation = false;
for _ in 0..operations {
- match rng.gen_range(0..100) {
+ match rng.random_range(0..100) {
0..=14 if !buffers.is_empty() => {
let buffer = buffers.choose(&mut rng).unwrap();
buffer.update(cx, |buf, cx| {
- let edit_count = rng.gen_range(1..5);
+ let edit_count = rng.random_range(1..5);
buf.randomly_edit(&mut rng, edit_count, cx);
log::info!("buffer text:\n{}", buf.text());
needs_diff_calculation = true;
@@ -2577,11 +2577,11 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
multibuffer.update(cx, |multibuffer, cx| {
let ids = multibuffer.excerpt_ids();
let mut excerpts = HashSet::default();
- for _ in 0..rng.gen_range(0..ids.len()) {
+ for _ in 0..rng.random_range(0..ids.len()) {
excerpts.extend(ids.choose(&mut rng).copied());
}
- let line_count = rng.gen_range(0..5);
+ let line_count = rng.random_range(0..5);
let excerpt_ixs = excerpts
.iter()
@@ -2600,7 +2600,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
}
20..=29 if !reference.excerpts.is_empty() => {
let mut ids_to_remove = vec![];
- for _ in 0..rng.gen_range(1..=3) {
+ for _ in 0..rng.random_range(1..=3) {
let Some(excerpt) = reference.excerpts.choose(&mut rng) else {
break;
};
@@ -2620,8 +2620,12 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
let multibuffer =
multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx));
let offset =
- multibuffer.clip_offset(rng.gen_range(0..=multibuffer.len()), Bias::Left);
- let bias = if rng.r#gen() { Bias::Left } else { Bias::Right };
+ multibuffer.clip_offset(rng.random_range(0..=multibuffer.len()), Bias::Left);
+ let bias = if rng.random() {
+ Bias::Left
+ } else {
+ Bias::Right
+ };
log::info!("Creating anchor at {} with bias {:?}", offset, bias);
anchors.push(multibuffer.anchor_at(offset, bias));
anchors.sort_by(|a, b| a.cmp(b, &multibuffer));
@@ -2654,7 +2658,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
45..=55 if !reference.excerpts.is_empty() => {
multibuffer.update(cx, |multibuffer, cx| {
let snapshot = multibuffer.snapshot(cx);
- let excerpt_ix = rng.gen_range(0..reference.excerpts.len());
+ let excerpt_ix = rng.random_range(0..reference.excerpts.len());
let excerpt = &reference.excerpts[excerpt_ix];
let start = excerpt.range.start;
let end = excerpt.range.end;
@@ -2691,7 +2695,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
});
}
_ => {
- let buffer_handle = if buffers.is_empty() || rng.gen_bool(0.4) {
+ let buffer_handle = if buffers.is_empty() || rng.random_bool(0.4) {
let mut base_text = util::RandomCharIter::new(&mut rng)
.take(256)
.collect::<String>();
@@ -2708,7 +2712,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
buffers.choose(&mut rng).unwrap()
};
- let prev_excerpt_ix = rng.gen_range(0..=reference.excerpts.len());
+ let prev_excerpt_ix = rng.random_range(0..=reference.excerpts.len());
let prev_excerpt_id = reference
.excerpts
.get(prev_excerpt_ix)
@@ -2716,8 +2720,8 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
let excerpt_ix = (prev_excerpt_ix + 1).min(reference.excerpts.len());
let (range, anchor_range) = buffer_handle.read_with(cx, |buffer, _| {
- let end_row = rng.gen_range(0..=buffer.max_point().row);
- let start_row = rng.gen_range(0..=end_row);
+ let end_row = rng.random_range(0..=buffer.max_point().row);
+ let start_row = rng.random_range(0..=end_row);
let end_ix = buffer.point_to_offset(Point::new(end_row, 0));
let start_ix = buffer.point_to_offset(Point::new(start_row, 0));
let anchor_range = buffer.anchor_before(start_ix)..buffer.anchor_after(end_ix);
@@ -2766,7 +2770,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
}
}
- if rng.gen_bool(0.3) {
+ if rng.random_bool(0.3) {
multibuffer.update(cx, |multibuffer, cx| {
old_versions.push((multibuffer.snapshot(cx), multibuffer.subscribe()));
})
@@ -2815,7 +2819,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
pretty_assertions::assert_eq!(actual_row_infos, expected_row_infos);
for _ in 0..5 {
- let start_row = rng.gen_range(0..=expected_row_infos.len());
+ let start_row = rng.random_range(0..=expected_row_infos.len());
assert_eq!(
snapshot
.row_infos(MultiBufferRow(start_row as u32))
@@ -2872,8 +2876,8 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
let text_rope = Rope::from(expected_text.as_str());
for _ in 0..10 {
- let end_ix = text_rope.clip_offset(rng.gen_range(0..=text_rope.len()), Bias::Right);
- let start_ix = text_rope.clip_offset(rng.gen_range(0..=end_ix), Bias::Left);
+ let end_ix = text_rope.clip_offset(rng.random_range(0..=text_rope.len()), Bias::Right);
+ let start_ix = text_rope.clip_offset(rng.random_range(0..=end_ix), Bias::Left);
let text_for_range = snapshot
.text_for_range(start_ix..end_ix)
@@ -2908,7 +2912,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
}
for _ in 0..10 {
- let end_ix = text_rope.clip_offset(rng.gen_range(0..=text_rope.len()), Bias::Right);
+ let end_ix = text_rope.clip_offset(rng.random_range(0..=text_rope.len()), Bias::Right);
assert_eq!(
snapshot.reversed_chars_at(end_ix).collect::<String>(),
expected_text[..end_ix].chars().rev().collect::<String>(),
@@ -2916,8 +2920,8 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
}
for _ in 0..10 {
- let end_ix = rng.gen_range(0..=text_rope.len());
- let start_ix = rng.gen_range(0..=end_ix);
+ let end_ix = rng.random_range(0..=text_rope.len());
+ let start_ix = rng.random_range(0..=end_ix);
assert_eq!(
snapshot
.bytes_in_range(start_ix..end_ix)
@@ -76,9 +76,8 @@ impl NodeRuntime {
let mut state = self.0.lock().await;
let options = loop {
- match state.options.borrow().as_ref() {
- Some(options) => break options.clone(),
- None => {}
+ if let Some(options) = state.options.borrow().as_ref() {
+ break options.clone();
}
match state.options.changed().await {
Ok(()) => {}
@@ -117,6 +117,10 @@ pub enum ChatMessage {
System {
content: String,
},
+ Tool {
+ tool_name: String,
+ content: String,
+ },
}
#[derive(Serialize, Deserialize, Debug)]
@@ -19,7 +19,7 @@ use util::ResultExt;
use workspace::{ModalView, Workspace};
use zed_actions::agent::OpenSettings;
-const FEATURED_PROVIDERS: [&'static str; 4] = ["anthropic", "google", "openai", "ollama"];
+const FEATURED_PROVIDERS: [&str; 4] = ["anthropic", "google", "openai", "ollama"];
fn render_llm_provider_section(
tab_index: &mut isize,
@@ -264,13 +264,9 @@ pub(crate) fn render_ai_setup_page(
);
let fs = <dyn Fs>::global(cx);
- update_settings_file::<DisableAiSettings>(
- fs,
- cx,
- move |ai_settings: &mut Option<bool>, _| {
- *ai_settings = Some(enabled);
- },
- );
+ update_settings_file::<DisableAiSettings>(fs, cx, move |ai_settings, _| {
+ ai_settings.disable_ai = Some(enabled);
+ });
},
)
.tab_index({
@@ -283,17 +279,13 @@ pub(crate) fn render_ai_setup_page(
v_flex()
.mt_2()
.gap_6()
- .child({
- let mut ai_upsell_card =
- AiUpsellCard::new(client, &user_store, user_store.read(cx).plan(), cx);
-
- ai_upsell_card.tab_index = Some({
- tab_index += 1;
- tab_index - 1
- });
-
- ai_upsell_card
- })
+ .child(
+ AiUpsellCard::new(client, &user_store, user_store.read(cx).plan(), cx)
+ .tab_index(Some({
+ tab_index += 1;
+ tab_index - 1
+ })),
+ )
.child(render_llm_provider_section(
&mut tab_index,
workspace,
@@ -410,7 +402,7 @@ impl AiPrivacyTooltip {
impl Render for AiPrivacyTooltip {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
- const DESCRIPTION: &'static str = "We believe in opt-in data sharing as the default for building AI products, rather than opt-out. We'll only use or store your data if you affirmatively send it to us. ";
+ const DESCRIPTION: &str = "We believe in opt-in data sharing as the default for building AI products, rather than opt-out. We'll only use or store your data if you affirmatively send it to us. ";
tooltip_container(window, cx, move |this, _, _| {
this.child(
@@ -187,7 +187,7 @@ impl PickerDelegate for BaseKeymapSelectorDelegate {
);
update_settings_file::<BaseKeymap>(self.fs.clone(), cx, move |setting, _| {
- *setting = Some(base_keymap)
+ setting.base_keymap = Some(base_keymap)
});
}
@@ -16,8 +16,8 @@ use vim_mode_setting::VimModeSetting;
use crate::theme_preview::{ThemePreviewStyle, ThemePreviewTile};
-const LIGHT_THEMES: [&'static str; 3] = ["One Light", "Ayu Light", "Gruvbox Light"];
-const DARK_THEMES: [&'static str; 3] = ["One Dark", "Ayu Dark", "Gruvbox Dark"];
+const LIGHT_THEMES: [&str; 3] = ["One Light", "Ayu Light", "Gruvbox Light"];
+const DARK_THEMES: [&str; 3] = ["One Dark", "Ayu Dark", "Gruvbox Dark"];
const FAMILY_NAMES: [SharedString; 3] = [
SharedString::new_static("One"),
SharedString::new_static("Ayu"),
@@ -68,6 +68,12 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement
MODE_NAMES[mode as usize].clone(),
move |_, _, cx| {
write_mode_change(mode, cx);
+
+ telemetry::event!(
+ "Welcome Theme mode Changed",
+ from = theme_mode,
+ to = mode
+ );
},
)
}),
@@ -105,7 +111,7 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement
ThemeMode::Dark => Appearance::Dark,
ThemeMode::System => *system_appearance,
};
- let current_theme_name = theme_selection.theme(appearance);
+ let current_theme_name = SharedString::new(theme_selection.theme(appearance));
let theme_names = match appearance {
Appearance::Light => LIGHT_THEMES,
@@ -114,7 +120,7 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement
let themes = theme_names.map(|theme| theme_registry.get(theme).unwrap());
- let theme_previews = [0, 1, 2].map(|index| {
+ [0, 1, 2].map(|index| {
let theme = &themes[index];
let is_selected = theme.name == current_theme_name;
let name = theme.name.clone();
@@ -126,7 +132,7 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement
.gap_1()
.child(
h_flex()
- .id(name.clone())
+ .id(name)
.relative()
.w_full()
.border_2()
@@ -149,8 +155,15 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement
})
.on_click({
let theme_name = theme.name.clone();
+ let current_theme_name = current_theme_name.clone();
+
move |_, _, cx| {
write_theme_change(theme_name.clone(), theme_mode, cx);
+ telemetry::event!(
+ "Welcome Theme Changed",
+ from = current_theme_name,
+ to = theme_name
+ );
}
})
.map(|this| {
@@ -176,9 +189,7 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement
.color(Color::Muted)
.size(LabelSize::Small),
)
- });
-
- theme_previews
+ })
}
fn write_mode_change(mode: ThemeMode, cx: &mut App) {
@@ -203,7 +214,7 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement
});
} else {
let appearance = *SystemAppearance::global(cx);
- settings.set_theme(theme.clone(), appearance);
+ settings.set_theme(theme, appearance);
}
});
}
@@ -241,6 +252,17 @@ fn render_telemetry_section(tab_index: &mut isize, cx: &App) -> impl IntoElement
cx,
move |setting, _| setting.metrics = Some(enabled),
);
+
+ // This telemetry event shouldn't fire when it's off. If it does we'll be alerted
+ // and can fix it in a timely manner to respect a user's choice.
+ telemetry::event!("Welcome Page Telemetry Metrics Toggled",
+ options = if enabled {
+ "on"
+ } else {
+ "off"
+ }
+ );
+
}},
).tab_index({
*tab_index += 1;
@@ -269,6 +291,16 @@ fn render_telemetry_section(tab_index: &mut isize, cx: &App) -> impl IntoElement
cx,
move |setting, _| setting.diagnostics = Some(enabled),
);
+
+ // This telemetry event shouldn't fire when it's off. If it does we'll be alerted
+ // and can fix it in a timely manner to respect a user's choice.
+ telemetry::event!("Welcome Page Telemetry Diagnostics Toggled",
+ options = if enabled {
+ "on"
+ } else {
+ "off"
+ }
+ );
}
}
).tab_index({
@@ -327,8 +359,10 @@ fn render_base_keymap_section(tab_index: &mut isize, cx: &mut App) -> impl IntoE
let fs = <dyn Fs>::global(cx);
update_settings_file::<BaseKeymap>(fs, cx, move |setting, _| {
- *setting = Some(keymap_base);
+ setting.base_keymap = Some(keymap_base);
});
+
+ telemetry::event!("Welcome Keymap Changed", keymap = keymap_base);
}
}
@@ -346,13 +380,21 @@ fn render_vim_mode_switch(tab_index: &mut isize, cx: &mut App) -> impl IntoEleme
{
let fs = <dyn Fs>::global(cx);
move |&selection, _, cx| {
- update_settings_file::<VimModeSetting>(fs.clone(), cx, move |setting, _| {
- *setting = match selection {
- ToggleState::Selected => Some(true),
- ToggleState::Unselected => Some(false),
- ToggleState::Indeterminate => None,
+ let vim_mode = match selection {
+ ToggleState::Selected => true,
+ ToggleState::Unselected => false,
+ ToggleState::Indeterminate => {
+ return;
}
+ };
+ update_settings_file::<VimModeSetting>(fs.clone(), cx, move |setting, _| {
+ setting.vim_mode = Some(vim_mode);
});
+
+ telemetry::event!(
+ "Welcome Vim Mode Toggled",
+ options = if vim_mode { "on" } else { "off" },
+ );
}
},
)
@@ -104,7 +104,7 @@ fn write_ui_font_family(font: SharedString, cx: &mut App) {
"Welcome Font Changed",
type = "ui font",
old = theme_settings.ui_font_family,
- new = font.clone()
+ new = font
);
theme_settings.ui_font_family = Some(FontFamilyName(font.into()));
});
@@ -134,7 +134,7 @@ fn write_buffer_font_family(font_family: SharedString, cx: &mut App) {
"Welcome Font Changed",
type = "editor font",
old = theme_settings.buffer_font_family,
- new = font_family.clone()
+ new = font_family
);
theme_settings.buffer_font_family = Some(FontFamilyName(font_family.into()));
@@ -314,7 +314,7 @@ fn render_font_customization_section(
.child(
PopoverMenu::new("ui-font-picker")
.menu({
- let ui_font_picker = ui_font_picker.clone();
+ let ui_font_picker = ui_font_picker;
move |_window, _cx| Some(ui_font_picker.clone())
})
.trigger(
@@ -378,7 +378,7 @@ fn render_font_customization_section(
.child(
PopoverMenu::new("buffer-font-picker")
.menu({
- let buffer_font_picker = buffer_font_picker.clone();
+ let buffer_font_picker = buffer_font_picker;
move |_window, _cx| Some(buffer_font_picker.clone())
})
.trigger(
@@ -449,28 +449,28 @@ impl FontPickerDelegate {
) -> Self {
let font_family_cache = FontFamilyCache::global(cx);
- let fonts: Vec<SharedString> = font_family_cache
- .list_font_families(cx)
- .into_iter()
- .collect();
-
+ let fonts = font_family_cache
+ .try_list_font_families()
+ .unwrap_or_else(|| vec![current_font.clone()]);
let selected_index = fonts
.iter()
.position(|font| *font == current_font)
.unwrap_or(0);
+ let filtered_fonts = fonts
+ .iter()
+ .enumerate()
+ .map(|(index, font)| StringMatch {
+ candidate_id: index,
+ string: font.to_string(),
+ positions: Vec::new(),
+ score: 0.0,
+ })
+ .collect();
+
Self {
- fonts: fonts.clone(),
- filtered_fonts: fonts
- .iter()
- .enumerate()
- .map(|(index, font)| StringMatch {
- candidate_id: index,
- string: font.to_string(),
- positions: Vec::new(),
- score: 0.0,
- })
- .collect(),
+ fonts,
+ filtered_fonts,
selected_index,
current_font,
on_font_changed: Arc::new(on_font_changed),
@@ -605,8 +605,8 @@ fn render_popular_settings_section(
window: &mut Window,
cx: &mut App,
) -> impl IntoElement {
- const LIGATURE_TOOLTIP: &'static str =
- "Font ligatures combine two characters into one. For example, turning =/= into ≠.";
+ const LIGATURE_TOOLTIP: &str =
+ "Font ligatures combine two characters into one. For example, turning != into ≠.";
v_flex()
.pt_6()
@@ -242,12 +242,25 @@ struct Onboarding {
impl Onboarding {
fn new(workspace: &Workspace, cx: &mut App) -> Entity<Self> {
- cx.new(|cx| Self {
- workspace: workspace.weak_handle(),
- focus_handle: cx.focus_handle(),
- selected_page: SelectedPage::Basics,
- user_store: workspace.user_store().clone(),
- _settings_subscription: cx.observe_global::<SettingsStore>(move |_, cx| cx.notify()),
+ let font_family_cache = theme::FontFamilyCache::global(cx);
+
+ cx.new(|cx| {
+ cx.spawn(async move |this, cx| {
+ font_family_cache.prefetch(cx).await;
+ this.update(cx, |_, cx| {
+ cx.notify();
+ })
+ })
+ .detach();
+
+ Self {
+ workspace: workspace.weak_handle(),
+ focus_handle: cx.focus_handle(),
+ selected_page: SelectedPage::Basics,
+ user_store: workspace.user_store().clone(),
+ _settings_subscription: cx
+ .observe_global::<SettingsStore>(move |_, cx| cx.notify()),
+ }
})
}
@@ -476,6 +489,7 @@ impl Onboarding {
.map(|kb| kb.size(rems_from_px(12.))),
)
.on_click(|_, window, cx| {
+ telemetry::event!("Welcome Sign In Clicked");
window.dispatch_action(SignIn.boxed_clone(), cx);
})
.into_any_element()
@@ -850,13 +864,19 @@ impl workspace::SerializableItem for Onboarding {
}
mod persistence {
- use db::{define_connection, query, sqlez_macros::sql};
+ use db::{
+ query,
+ sqlez::{domain::Domain, thread_safe_connection::ThreadSafeConnection},
+ sqlez_macros::sql,
+ };
use workspace::WorkspaceDb;
- define_connection! {
- pub static ref ONBOARDING_PAGES: OnboardingPagesDb<WorkspaceDb> =
- &[
- sql!(
+ pub struct OnboardingPagesDb(ThreadSafeConnection);
+
+ impl Domain for OnboardingPagesDb {
+ const NAME: &str = stringify!(OnboardingPagesDb);
+
+ const MIGRATIONS: &[&str] = &[sql!(
CREATE TABLE onboarding_pages (
workspace_id INTEGER,
item_id INTEGER UNIQUE,
@@ -866,10 +886,11 @@ mod persistence {
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
ON DELETE CASCADE
) STRICT;
- ),
- ];
+ )];
}
+ db::static_connection!(ONBOARDING_PAGES, OnboardingPagesDb, [WorkspaceDb]);
+
impl OnboardingPagesDb {
query! {
pub async fn save_onboarding_page(
@@ -206,7 +206,7 @@ impl ThemePreviewTile {
sidebar_width,
skeleton_height.clone(),
))
- .child(Self::render_pane(seed, theme, skeleton_height.clone()))
+ .child(Self::render_pane(seed, theme, skeleton_height))
}
fn render_borderless(seed: f32, theme: Arc<Theme>) -> impl IntoElement {
@@ -260,7 +260,7 @@ impl ThemePreviewTile {
.overflow_hidden()
.child(div().size_full().child(Self::render_editor(
seed,
- theme.clone(),
+ theme,
sidebar_width,
Self::SKELETON_HEIGHT_DEFAULT,
)))
@@ -329,9 +329,9 @@ impl Component for ThemePreviewTile {
let themes_to_preview = vec![
one_dark.clone().ok(),
- one_light.clone().ok(),
- gruvbox_dark.clone().ok(),
- gruvbox_light.clone().ok(),
+ one_light.ok(),
+ gruvbox_dark.ok(),
+ gruvbox_light.ok(),
]
.into_iter()
.flatten()
@@ -348,7 +348,7 @@ impl Component for ThemePreviewTile {
div()
.w(px(240.))
.h(px(180.))
- .child(ThemePreviewTile::new(one_dark.clone(), 0.42))
+ .child(ThemePreviewTile::new(one_dark, 0.42))
.into_any_element(),
)])]
} else {
@@ -362,13 +362,12 @@ impl Component for ThemePreviewTile {
.gap_4()
.children(
themes_to_preview
- .iter()
- .enumerate()
- .map(|(_, theme)| {
+ .into_iter()
+ .map(|theme| {
div()
.w(px(200.))
.h(px(140.))
- .child(ThemePreviewTile::new(theme.clone(), 0.42))
+ .child(ThemePreviewTile::new(theme, 0.42))
})
.collect::<Vec<_>>(),
)
@@ -414,13 +414,19 @@ impl workspace::SerializableItem for WelcomePage {
}
mod persistence {
- use db::{define_connection, query, sqlez_macros::sql};
+ use db::{
+ query,
+ sqlez::{domain::Domain, thread_safe_connection::ThreadSafeConnection},
+ sqlez_macros::sql,
+ };
use workspace::WorkspaceDb;
- define_connection! {
- pub static ref WELCOME_PAGES: WelcomePagesDb<WorkspaceDb> =
- &[
- sql!(
+ pub struct WelcomePagesDb(ThreadSafeConnection);
+
+ impl Domain for WelcomePagesDb {
+ const NAME: &str = stringify!(WelcomePagesDb);
+
+ const MIGRATIONS: &[&str] = (&[sql!(
CREATE TABLE welcome_pages (
workspace_id INTEGER,
item_id INTEGER UNIQUE,
@@ -430,10 +436,11 @@ mod persistence {
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
ON DELETE CASCADE
) STRICT;
- ),
- ];
+ )]);
}
+ db::static_connection!(WELCOME_PAGES, WelcomePagesDb, [WorkspaceDb]);
+
impl WelcomePagesDb {
query! {
pub async fn save_welcome_page(
@@ -269,11 +269,12 @@ pub struct Request {
}
#[derive(Debug, Serialize, Deserialize)]
-#[serde(untagged)]
+#[serde(rename_all = "lowercase")]
pub enum ToolChoice {
Auto,
Required,
None,
+ #[serde(untagged)]
Other(ToolDefinition),
}
@@ -446,7 +447,6 @@ pub enum ResponseStreamResult {
#[derive(Serialize, Deserialize, Debug)]
pub struct ResponseStreamEvent {
- pub model: String,
pub choices: Vec<ChoiceDelta>,
pub usage: Option<Usage>,
}
@@ -462,7 +462,7 @@ pub async fn stream_completion(
.method(Method::POST)
.uri(uri)
.header("Content-Type", "application/json")
- .header("Authorization", format!("Bearer {}", api_key));
+ .header("Authorization", format!("Bearer {}", api_key.trim()));
let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
let mut response = client.send(request).await?;
@@ -566,7 +566,7 @@ pub fn embed<'a>(
.method(Method::POST)
.uri(uri)
.header("Content-Type", "application/json")
- .header("Authorization", format!("Bearer {}", api_key))
+ .header("Authorization", format!("Bearer {}", api_key.trim()))
.body(body)
.map(|request| client.send(request));
@@ -22,4 +22,6 @@ http_client.workspace = true
schemars = { workspace = true, optional = true }
serde.workspace = true
serde_json.workspace = true
+thiserror.workspace = true
+strum.workspace = true
workspace-hack.workspace = true
@@ -1,12 +1,31 @@
-use anyhow::{Context, Result, anyhow};
+use anyhow::{Result, anyhow};
use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
-use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
+use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest, http};
use serde::{Deserialize, Serialize};
use serde_json::Value;
-use std::convert::TryFrom;
+use std::{convert::TryFrom, io, time::Duration};
+use strum::EnumString;
+use thiserror::Error;
pub const OPEN_ROUTER_API_URL: &str = "https://openrouter.ai/api/v1";
+fn extract_retry_after(headers: &http::HeaderMap) -> Option<std::time::Duration> {
+ if let Some(reset) = headers.get("X-RateLimit-Reset") {
+ if let Ok(s) = reset.to_str() {
+ if let Ok(epoch_ms) = s.parse::<u64>() {
+ let now = std::time::SystemTime::now()
+ .duration_since(std::time::UNIX_EPOCH)
+ .unwrap_or_default()
+ .as_millis() as u64;
+ if epoch_ms > now {
+ return Some(std::time::Duration::from_millis(epoch_ms - now));
+ }
+ }
+ }
+ }
+ None
+}
+
fn is_none_or_empty<T: AsRef<[U]>, U>(opt: &Option<T>) -> bool {
opt.as_ref().is_none_or(|v| v.as_ref().is_empty())
}
@@ -413,76 +432,12 @@ pub struct ModelArchitecture {
pub input_modalities: Vec<String>,
}
-pub async fn complete(
- client: &dyn HttpClient,
- api_url: &str,
- api_key: &str,
- request: Request,
-) -> Result<Response> {
- let uri = format!("{api_url}/chat/completions");
- let request_builder = HttpRequest::builder()
- .method(Method::POST)
- .uri(uri)
- .header("Content-Type", "application/json")
- .header("Authorization", format!("Bearer {}", api_key))
- .header("HTTP-Referer", "https://zed.dev")
- .header("X-Title", "Zed Editor");
-
- let mut request_body = request;
- request_body.stream = false;
-
- let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request_body)?))?;
- let mut response = client.send(request).await?;
-
- if response.status().is_success() {
- let mut body = String::new();
- response.body_mut().read_to_string(&mut body).await?;
- let response: Response = serde_json::from_str(&body)?;
- Ok(response)
- } else {
- let mut body = String::new();
- response.body_mut().read_to_string(&mut body).await?;
-
- #[derive(Deserialize)]
- struct OpenRouterResponse {
- error: OpenRouterError,
- }
-
- #[derive(Deserialize)]
- struct OpenRouterError {
- message: String,
- #[serde(default)]
- code: String,
- }
-
- match serde_json::from_str::<OpenRouterResponse>(&body) {
- Ok(response) if !response.error.message.is_empty() => {
- let error_message = if !response.error.code.is_empty() {
- format!("{}: {}", response.error.code, response.error.message)
- } else {
- response.error.message
- };
-
- Err(anyhow!(
- "Failed to connect to OpenRouter API: {}",
- error_message
- ))
- }
- _ => Err(anyhow!(
- "Failed to connect to OpenRouter API: {} {}",
- response.status(),
- body,
- )),
- }
- }
-}
-
pub async fn stream_completion(
client: &dyn HttpClient,
api_url: &str,
api_key: &str,
request: Request,
-) -> Result<BoxStream<'static, Result<ResponseStreamEvent>>> {
+) -> Result<BoxStream<'static, Result<ResponseStreamEvent, OpenRouterError>>, OpenRouterError> {
let uri = format!("{api_url}/chat/completions");
let request_builder = HttpRequest::builder()
.method(Method::POST)
@@ -492,8 +447,15 @@ pub async fn stream_completion(
.header("HTTP-Referer", "https://zed.dev")
.header("X-Title", "Zed Editor");
- let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
- let mut response = client.send(request).await?;
+ let request = request_builder
+ .body(AsyncBody::from(
+ serde_json::to_string(&request).map_err(OpenRouterError::SerializeRequest)?,
+ ))
+ .map_err(OpenRouterError::BuildRequestBody)?;
+ let mut response = client
+ .send(request)
+ .await
+ .map_err(OpenRouterError::HttpSend)?;
if response.status().is_success() {
let reader = BufReader::new(response.into_body());
@@ -513,86 +475,89 @@ pub async fn stream_completion(
match serde_json::from_str::<ResponseStreamEvent>(line) {
Ok(response) => Some(Ok(response)),
Err(error) => {
- #[derive(Deserialize)]
- struct ErrorResponse {
- error: String,
- }
-
- match serde_json::from_str::<ErrorResponse>(line) {
- Ok(err_response) => Some(Err(anyhow!(err_response.error))),
- Err(_) => {
- if line.trim().is_empty() {
- None
- } else {
- Some(Err(anyhow!(
- "Failed to parse response: {}. Original content: '{}'",
- error, line
- )))
- }
- }
+ if line.trim().is_empty() {
+ None
+ } else {
+ Some(Err(OpenRouterError::DeserializeResponse(error)))
}
}
}
}
}
- Err(error) => Some(Err(anyhow!(error))),
+ Err(error) => Some(Err(OpenRouterError::ReadResponse(error))),
}
})
.boxed())
} else {
- let mut body = String::new();
- response.body_mut().read_to_string(&mut body).await?;
-
- #[derive(Deserialize)]
- struct OpenRouterResponse {
- error: OpenRouterError,
- }
-
- #[derive(Deserialize)]
- struct OpenRouterError {
- message: String,
- #[serde(default)]
- code: String,
- }
-
- match serde_json::from_str::<OpenRouterResponse>(&body) {
- Ok(response) if !response.error.message.is_empty() => {
- let error_message = if !response.error.code.is_empty() {
- format!("{}: {}", response.error.code, response.error.message)
- } else {
- response.error.message
- };
+ let code = ApiErrorCode::from_status(response.status().as_u16());
- Err(anyhow!(
- "Failed to connect to OpenRouter API: {}",
- error_message
- ))
+ let mut body = String::new();
+ response
+ .body_mut()
+ .read_to_string(&mut body)
+ .await
+ .map_err(OpenRouterError::ReadResponse)?;
+
+ let error_response = match serde_json::from_str::<OpenRouterErrorResponse>(&body) {
+ Ok(OpenRouterErrorResponse { error }) => error,
+ Err(_) => OpenRouterErrorBody {
+ code: response.status().as_u16(),
+ message: body,
+ metadata: None,
+ },
+ };
+
+ match code {
+ ApiErrorCode::RateLimitError => {
+ let retry_after = extract_retry_after(response.headers());
+ Err(OpenRouterError::RateLimit {
+ retry_after: retry_after.unwrap_or_else(|| std::time::Duration::from_secs(60)),
+ })
+ }
+ ApiErrorCode::OverloadedError => {
+ let retry_after = extract_retry_after(response.headers());
+ Err(OpenRouterError::ServerOverloaded { retry_after })
}
- _ => Err(anyhow!(
- "Failed to connect to OpenRouter API: {} {}",
- response.status(),
- body,
- )),
+ _ => Err(OpenRouterError::ApiError(ApiError {
+ code: code,
+ message: error_response.message,
+ })),
}
}
}
-pub async fn list_models(client: &dyn HttpClient, api_url: &str) -> Result<Vec<Model>> {
- let uri = format!("{api_url}/models");
+pub async fn list_models(
+ client: &dyn HttpClient,
+ api_url: &str,
+ api_key: &str,
+) -> Result<Vec<Model>, OpenRouterError> {
+ let uri = format!("{api_url}/models/user");
let request_builder = HttpRequest::builder()
.method(Method::GET)
.uri(uri)
- .header("Accept", "application/json");
+ .header("Accept", "application/json")
+ .header("Authorization", format!("Bearer {}", api_key))
+ .header("HTTP-Referer", "https://zed.dev")
+ .header("X-Title", "Zed Editor");
- let request = request_builder.body(AsyncBody::default())?;
- let mut response = client.send(request).await?;
+ let request = request_builder
+ .body(AsyncBody::default())
+ .map_err(OpenRouterError::BuildRequestBody)?;
+ let mut response = client
+ .send(request)
+ .await
+ .map_err(OpenRouterError::HttpSend)?;
let mut body = String::new();
- response.body_mut().read_to_string(&mut body).await?;
+ response
+ .body_mut()
+ .read_to_string(&mut body)
+ .await
+ .map_err(OpenRouterError::ReadResponse)?;
if response.status().is_success() {
let response: ListModelsResponse =
- serde_json::from_str(&body).context("Unable to parse OpenRouter models response")?;
+ serde_json::from_str(&body).map_err(OpenRouterError::DeserializeResponse)?;
let models = response
.data
@@ -637,10 +602,141 @@ pub async fn list_models(client: &dyn HttpClient, api_url: &str) -> Result<Vec<M
Ok(models)
} else {
- Err(anyhow!(
- "Failed to connect to OpenRouter API: {} {}",
- response.status(),
- body,
- ))
+ let code = ApiErrorCode::from_status(response.status().as_u16());
+
+ let mut body = String::new();
+ response
+ .body_mut()
+ .read_to_string(&mut body)
+ .await
+ .map_err(OpenRouterError::ReadResponse)?;
+
+ let error_response = match serde_json::from_str::<OpenRouterErrorResponse>(&body) {
+ Ok(OpenRouterErrorResponse { error }) => error,
+ Err(_) => OpenRouterErrorBody {
+ code: response.status().as_u16(),
+ message: body,
+ metadata: None,
+ },
+ };
+
+ match code {
+ ApiErrorCode::RateLimitError => {
+ let retry_after = extract_retry_after(response.headers());
+ Err(OpenRouterError::RateLimit {
+ retry_after: retry_after.unwrap_or_else(|| std::time::Duration::from_secs(60)),
+ })
+ }
+ ApiErrorCode::OverloadedError => {
+ let retry_after = extract_retry_after(response.headers());
+ Err(OpenRouterError::ServerOverloaded { retry_after })
+ }
+ _ => Err(OpenRouterError::ApiError(ApiError {
+ code: code,
+ message: error_response.message,
+ })),
+ }
+ }
+}
+
+#[derive(Debug)]
+pub enum OpenRouterError {
+ /// Failed to serialize the HTTP request body to JSON
+ SerializeRequest(serde_json::Error),
+
+ /// Failed to construct the HTTP request body
+ BuildRequestBody(http::Error),
+
+ /// Failed to send the HTTP request
+ HttpSend(anyhow::Error),
+
+ /// Failed to deserialize the response from JSON
+ DeserializeResponse(serde_json::Error),
+
+ /// Failed to read from response stream
+ ReadResponse(io::Error),
+
+ /// Rate limit exceeded
+ RateLimit { retry_after: Duration },
+
+ /// Server overloaded
+ ServerOverloaded { retry_after: Option<Duration> },
+
+ /// API returned an error response
+ ApiError(ApiError),
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct OpenRouterErrorBody {
+ pub code: u16,
+ pub message: String,
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ pub metadata: Option<std::collections::HashMap<String, serde_json::Value>>,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct OpenRouterErrorResponse {
+ pub error: OpenRouterErrorBody,
+}
+
+#[derive(Debug, Serialize, Deserialize, Error)]
+#[error("OpenRouter API Error: {code}: {message}")]
+pub struct ApiError {
+ pub code: ApiErrorCode,
+ pub message: String,
+}
+
+/// An OpenROuter API error code.
+/// <https://openrouter.ai/docs/api-reference/errors#error-codes>
+#[derive(Debug, PartialEq, Eq, Clone, Copy, EnumString, Serialize, Deserialize)]
+#[strum(serialize_all = "snake_case")]
+pub enum ApiErrorCode {
+ /// 400: Bad Request (invalid or missing params, CORS)
+ InvalidRequestError,
+ /// 401: Invalid credentials (OAuth session expired, disabled/invalid API key)
+ AuthenticationError,
+ /// 402: Your account or API key has insufficient credits. Add more credits and retry the request.
+ PaymentRequiredError,
+ /// 403: Your chosen model requires moderation and your input was flagged
+ PermissionError,
+ /// 408: Your request timed out
+ RequestTimedOut,
+ /// 429: You are being rate limited
+ RateLimitError,
+ /// 502: Your chosen model is down or we received an invalid response from it
+ ApiError,
+ /// 503: There is no available model provider that meets your routing requirements
+ OverloadedError,
+}
+
+impl std::fmt::Display for ApiErrorCode {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let s = match self {
+ ApiErrorCode::InvalidRequestError => "invalid_request_error",
+ ApiErrorCode::AuthenticationError => "authentication_error",
+ ApiErrorCode::PaymentRequiredError => "payment_required_error",
+ ApiErrorCode::PermissionError => "permission_error",
+ ApiErrorCode::RequestTimedOut => "request_timed_out",
+ ApiErrorCode::RateLimitError => "rate_limit_error",
+ ApiErrorCode::ApiError => "api_error",
+ ApiErrorCode::OverloadedError => "overloaded_error",
+ };
+ write!(f, "{s}")
+ }
+}
+
+impl ApiErrorCode {
+ pub fn from_status(status: u16) -> Self {
+ match status {
+ 400 => ApiErrorCode::InvalidRequestError,
+ 401 => ApiErrorCode::AuthenticationError,
+ 402 => ApiErrorCode::PaymentRequiredError,
+ 403 => ApiErrorCode::PermissionError,
+ 408 => ApiErrorCode::RequestTimedOut,
+ 429 => ApiErrorCode::RateLimitError,
+ 502 => ApiErrorCode::ApiError,
+ 503 => ApiErrorCode::OverloadedError,
+ _ => ApiErrorCode::ApiError,
+ }
}
}
@@ -733,7 +733,8 @@ impl OutlinePanel {
) -> Entity<Self> {
let project = workspace.project().clone();
let workspace_handle = cx.entity().downgrade();
- let outline_panel = cx.new(|cx| {
+
+ cx.new(|cx| {
let filter_editor = cx.new(|cx| {
let mut editor = Editor::single_line(window, cx);
editor.set_placeholder_text("Filter...", cx);
@@ -912,9 +913,7 @@ impl OutlinePanel {
outline_panel.replace_active_editor(item, editor, window, cx);
}
outline_panel
- });
-
- outline_panel
+ })
}
fn serialization_key(workspace: &Workspace) -> Option<String> {
@@ -2624,7 +2623,7 @@ impl OutlinePanel {
}
fn entry_name(&self, worktree_id: &WorktreeId, entry: &Entry, cx: &App) -> String {
- let name = match self.project.read(cx).worktree_for_id(*worktree_id, cx) {
+ match self.project.read(cx).worktree_for_id(*worktree_id, cx) {
Some(worktree) => {
let worktree = worktree.read(cx);
match worktree.snapshot().root_entry() {
@@ -2645,8 +2644,7 @@ impl OutlinePanel {
}
}
None => file_name(entry.path.as_ref()),
- };
- name
+ }
}
fn update_fs_entries(
@@ -2681,7 +2679,8 @@ impl OutlinePanel {
new_collapsed_entries = outline_panel.collapsed_entries.clone();
new_unfolded_dirs = outline_panel.unfolded_dirs.clone();
let multi_buffer_snapshot = active_multi_buffer.read(cx).snapshot(cx);
- let buffer_excerpts = multi_buffer_snapshot.excerpts().fold(
+
+ multi_buffer_snapshot.excerpts().fold(
HashMap::default(),
|mut buffer_excerpts, (excerpt_id, buffer_snapshot, excerpt_range)| {
let buffer_id = buffer_snapshot.remote_id();
@@ -2728,8 +2727,7 @@ impl OutlinePanel {
);
buffer_excerpts
},
- );
- buffer_excerpts
+ )
}) else {
return;
};
@@ -4395,12 +4393,13 @@ impl OutlinePanel {
})
.filter(|(match_range, _)| {
let editor = active_editor.read(cx);
- if let Some(buffer_id) = match_range.start.buffer_id
+ let snapshot = editor.buffer().read(cx).snapshot(cx);
+ if let Some(buffer_id) = snapshot.buffer_id_for_anchor(match_range.start)
&& editor.is_buffer_folded(buffer_id, cx)
{
return false;
}
- if let Some(buffer_id) = match_range.start.buffer_id
+ if let Some(buffer_id) = snapshot.buffer_id_for_anchor(match_range.end)
&& editor.is_buffer_folded(buffer_id, cx)
{
return false;
@@ -4807,7 +4806,7 @@ impl OutlinePanel {
.with_compute_indents_fn(cx.entity(), |outline_panel, range, _, _| {
let entries = outline_panel.cached_entries.get(range);
if let Some(entries) = entries {
- entries.into_iter().map(|item| item.depth).collect()
+ entries.iter().map(|item| item.depth).collect()
} else {
smallvec::SmallVec::new()
}
@@ -5093,7 +5092,7 @@ impl Panel for OutlinePanel {
impl Focusable for OutlinePanel {
fn focus_handle(&self, cx: &App) -> FocusHandle {
- self.filter_editor.focus_handle(cx).clone()
+ self.filter_editor.focus_handle(cx)
}
}
@@ -5103,9 +5102,9 @@ impl EventEmitter<PanelEvent> for OutlinePanel {}
impl Render for OutlinePanel {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
- let (is_local, is_via_ssh) = self
- .project
- .read_with(cx, |project, _| (project.is_local(), project.is_via_ssh()));
+ let (is_local, is_via_ssh) = self.project.read_with(cx, |project, _| {
+ (project.is_local(), project.is_via_remote_server())
+ });
let query = self.query(cx);
let pinned = self.pinned;
let settings = OutlinePanelSettings::get_global(cx);
@@ -5340,7 +5339,7 @@ fn subscribe_for_editor_events(
}
EditorEvent::Reparsed(buffer_id) => {
if let Some(excerpts) = outline_panel.excerpts.get_mut(buffer_id) {
- for (_, excerpt) in excerpts {
+ for excerpt in excerpts.values_mut() {
excerpt.invalidate_outlines();
}
}
@@ -5403,8 +5402,9 @@ mod tests {
init_test(cx);
let fs = FakeFs::new(cx.background_executor.clone());
- populate_with_test_ra_project(&fs, "/rust-analyzer").await;
- let project = Project::test(fs.clone(), ["/rust-analyzer".as_ref()], cx).await;
+ let root = path!("/rust-analyzer");
+ populate_with_test_ra_project(&fs, root).await;
+ let project = Project::test(fs.clone(), [Path::new(root)], cx).await;
project.read_with(cx, |project, _| {
project.languages().add(Arc::new(rust_lang()))
});
@@ -5449,15 +5449,16 @@ mod tests {
});
});
- let all_matches = r#"/rust-analyzer/
+ let all_matches = format!(
+ r#"{root}/
crates/
ide/src/
inlay_hints/
fn_lifetime_fn.rs
- search: match config.param_names_for_lifetime_elision_hints {
- search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints {
- search: Some(it) if config.param_names_for_lifetime_elision_hints => {
- search: InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG },
+ search: match config.param_names_for_lifetime_elision_hints {{
+ search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints {{
+ search: Some(it) if config.param_names_for_lifetime_elision_hints => {{
+ search: InlayHintsConfig {{ param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG }},
inlay_hints.rs
search: pub param_names_for_lifetime_elision_hints: bool,
search: param_names_for_lifetime_elision_hints: self
@@ -5468,7 +5469,9 @@ mod tests {
analysis_stats.rs
search: param_names_for_lifetime_elision_hints: true,
config.rs
- search: param_names_for_lifetime_elision_hints: self"#;
+ search: param_names_for_lifetime_elision_hints: self"#
+ );
+
let select_first_in_all_matches = |line_to_select: &str| {
assert!(all_matches.contains(line_to_select));
all_matches.replacen(
@@ -5525,7 +5528,7 @@ mod tests {
cx,
),
format!(
- r#"/rust-analyzer/
+ r#"{root}/
crates/
ide/src/
inlay_hints/
@@ -5595,7 +5598,7 @@ mod tests {
cx,
),
format!(
- r#"/rust-analyzer/
+ r#"{root}/
crates/
ide/src/{SELECTED_MARKER}
rust-analyzer/src/
@@ -5632,8 +5635,9 @@ mod tests {
init_test(cx);
let fs = FakeFs::new(cx.background_executor.clone());
- populate_with_test_ra_project(&fs, "/rust-analyzer").await;
- let project = Project::test(fs.clone(), ["/rust-analyzer".as_ref()], cx).await;
+ let root = path!("/rust-analyzer");
+ populate_with_test_ra_project(&fs, root).await;
+ let project = Project::test(fs.clone(), [Path::new(root)], cx).await;
project.read_with(cx, |project, _| {
project.languages().add(Arc::new(rust_lang()))
});
@@ -5677,15 +5681,16 @@ mod tests {
);
});
});
- let all_matches = r#"/rust-analyzer/
+ let all_matches = format!(
+ r#"{root}/
crates/
ide/src/
inlay_hints/
fn_lifetime_fn.rs
- search: match config.param_names_for_lifetime_elision_hints {
- search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints {
- search: Some(it) if config.param_names_for_lifetime_elision_hints => {
- search: InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG },
+ search: match config.param_names_for_lifetime_elision_hints {{
+ search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints {{
+ search: Some(it) if config.param_names_for_lifetime_elision_hints => {{
+ search: InlayHintsConfig {{ param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG }},
inlay_hints.rs
search: pub param_names_for_lifetime_elision_hints: bool,
search: param_names_for_lifetime_elision_hints: self
@@ -5696,7 +5701,8 @@ mod tests {
analysis_stats.rs
search: param_names_for_lifetime_elision_hints: true,
config.rs
- search: param_names_for_lifetime_elision_hints: self"#;
+ search: param_names_for_lifetime_elision_hints: self"#
+ );
cx.executor()
.advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100));
@@ -5769,8 +5775,9 @@ mod tests {
init_test(cx);
let fs = FakeFs::new(cx.background_executor.clone());
- populate_with_test_ra_project(&fs, path!("/rust-analyzer")).await;
- let project = Project::test(fs.clone(), [path!("/rust-analyzer").as_ref()], cx).await;
+ let root = path!("/rust-analyzer");
+ populate_with_test_ra_project(&fs, root).await;
+ let project = Project::test(fs.clone(), [Path::new(root)], cx).await;
project.read_with(cx, |project, _| {
project.languages().add(Arc::new(rust_lang()))
});
@@ -5814,9 +5821,8 @@ mod tests {
);
});
});
- let root_path = format!("{}/", path!("/rust-analyzer"));
let all_matches = format!(
- r#"{root_path}
+ r#"{root}/
crates/
ide/src/
inlay_hints/
@@ -5978,7 +5984,7 @@ mod tests {
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
- "/root",
+ path!("/root"),
json!({
"one": {
"a.txt": "aaa aaa"
@@ -5990,7 +5996,7 @@ mod tests {
}),
)
.await;
- let project = Project::test(fs.clone(), [Path::new("/root/one")], cx).await;
+ let project = Project::test(fs.clone(), [Path::new(path!("/root/one"))], cx).await;
let workspace = add_outline_panel(&project, cx).await;
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let outline_panel = outline_panel(&workspace, cx);
@@ -6001,7 +6007,7 @@ mod tests {
let items = workspace
.update(cx, |workspace, window, cx| {
workspace.open_paths(
- vec![PathBuf::from("/root/two")],
+ vec![PathBuf::from(path!("/root/two"))],
OpenOptions {
visible: Some(OpenVisible::OnlyDirectories),
..Default::default()
@@ -6065,13 +6071,17 @@ mod tests {
outline_panel.selected_entry(),
cx,
),
- r#"/root/one/
+ format!(
+ r#"{}/
a.txt
search: aaa aaa <==== selected
search: aaa aaa
-/root/two/
+{}/
b.txt
- search: a aaa"#
+ search: a aaa"#,
+ path!("/root/one"),
+ path!("/root/two"),
+ ),
);
});
@@ -6091,11 +6101,15 @@ mod tests {
outline_panel.selected_entry(),
cx,
),
- r#"/root/one/
+ format!(
+ r#"{}/
a.txt <==== selected
-/root/two/
+{}/
b.txt
- search: a aaa"#
+ search: a aaa"#,
+ path!("/root/one"),
+ path!("/root/two"),
+ ),
);
});
@@ -6115,9 +6129,13 @@ mod tests {
outline_panel.selected_entry(),
cx,
),
- r#"/root/one/
+ format!(
+ r#"{}/
a.txt
-/root/two/ <==== selected"#
+{}/ <==== selected"#,
+ path!("/root/one"),
+ path!("/root/two"),
+ ),
);
});
@@ -6136,11 +6154,15 @@ mod tests {
outline_panel.selected_entry(),
cx,
),
- r#"/root/one/
+ format!(
+ r#"{}/
a.txt
-/root/two/ <==== selected
+{}/ <==== selected
b.txt
- search: a aaa"#
+ search: a aaa"#,
+ path!("/root/one"),
+ path!("/root/two"),
+ )
);
});
}
@@ -6166,7 +6188,7 @@ struct OutlineEntryExcerpt {
}),
)
.await;
- let project = Project::test(fs.clone(), [root.as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [Path::new(root)], cx).await;
project.read_with(cx, |project, _| {
project.languages().add(Arc::new(
rust_lang()
@@ -6509,7 +6531,7 @@ outline: struct OutlineEntryExcerpt
async fn test_frontend_repo_structure(cx: &mut TestAppContext) {
init_test(cx);
- let root = "/frontend-project";
+ let root = path!("/frontend-project");
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
root,
@@ -6546,7 +6568,7 @@ outline: struct OutlineEntryExcerpt
}),
)
.await;
- let project = Project::test(fs.clone(), [root.as_ref()], cx).await;
+ let project = Project::test(fs.clone(), [Path::new(root)], cx).await;
let workspace = add_outline_panel(&project, cx).await;
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let outline_panel = outline_panel(&workspace, cx);
@@ -6600,10 +6622,11 @@ outline: struct OutlineEntryExcerpt
outline_panel.selected_entry(),
cx,
),
- r#"/frontend-project/
+ format!(
+ r#"{root}/
public/lottie/
syntax-tree.json
- search: { "something": "static" } <==== selected
+ search: {{ "something": "static" }} <==== selected
src/
app/(site)/
(about)/jobs/[slug]/
@@ -6615,6 +6638,7 @@ outline: struct OutlineEntryExcerpt
components/
ErrorBoundary.tsx
search: static"#
+ )
);
});
@@ -6637,15 +6661,17 @@ outline: struct OutlineEntryExcerpt
outline_panel.selected_entry(),
cx,
),
- r#"/frontend-project/
+ format!(
+ r#"{root}/
public/lottie/
syntax-tree.json
- search: { "something": "static" }
+ search: {{ "something": "static" }}
src/
app/(site)/ <==== selected
components/
ErrorBoundary.tsx
search: static"#
+ )
);
});
@@ -6665,15 +6691,17 @@ outline: struct OutlineEntryExcerpt
outline_panel.selected_entry(),
cx,
),
- r#"/frontend-project/
+ format!(
+ r#"{root}/
public/lottie/
syntax-tree.json
- search: { "something": "static" }
+ search: {{ "something": "static" }}
src/
app/(site)/
components/
ErrorBoundary.tsx
search: static <==== selected"#
+ )
);
});
@@ -6697,14 +6725,16 @@ outline: struct OutlineEntryExcerpt
outline_panel.selected_entry(),
cx,
),
- r#"/frontend-project/
+ format!(
+ r#"{root}/
public/lottie/
syntax-tree.json
- search: { "something": "static" }
+ search: {{ "something": "static" }}
src/
app/(site)/
components/
ErrorBoundary.tsx <==== selected"#
+ )
);
});
@@ -6728,15 +6758,17 @@ outline: struct OutlineEntryExcerpt
outline_panel.selected_entry(),
cx,
),
- r#"/frontend-project/
+ format!(
+ r#"{root}/
public/lottie/
syntax-tree.json
- search: { "something": "static" }
+ search: {{ "something": "static" }}
src/
app/(site)/
components/
ErrorBoundary.tsx <==== selected
search: static"#
+ )
);
});
}
@@ -2,7 +2,7 @@ use editor::ShowScrollbar;
use gpui::Pixels;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Copy, PartialEq)]
#[serde(rename_all = "snake_case")]
@@ -61,7 +61,8 @@ pub struct IndentGuidesSettingsContent {
pub show: Option<ShowIndentGuides>,
}
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)]
+#[settings_key(key = "outline_panel")]
pub struct OutlinePanelSettingsContent {
/// Whether to show the outline panel button in the status bar.
///
@@ -116,8 +117,6 @@ pub struct OutlinePanelSettingsContent {
}
impl Settings for OutlinePanelSettings {
- const KEY: Option<&'static str> = Some("outline_panel");
-
type FileContent = OutlinePanelSettingsContent;
fn load(
@@ -52,7 +52,7 @@ impl RenderOnce for PanelTab {
pub fn panel_button(label: impl Into<SharedString>) -> ui::Button {
let label = label.into();
- let id = ElementId::Name(label.clone().to_lowercase().replace(' ', "_").into());
+ let id = ElementId::Name(label.to_lowercase().replace(' ', "_").into());
ui::Button::new(id, label)
.label_size(ui::LabelSize::Small)
.icon_size(ui::IconSize::Small)
@@ -33,6 +33,11 @@ pub fn remote_server_dir_relative() -> &'static Path {
Path::new(".zed_server")
}
+/// Returns the relative path to the zed_wsl_server directory on the wsl host.
+pub fn remote_wsl_server_dir_relative() -> &'static Path {
+ Path::new(".zed_wsl_server")
+}
+
/// Sets a custom directory for all user data, overriding the default data directory.
/// This function must be called before any other path operations that depend on the data directory.
/// The directory's path will be canonicalized to an absolute path by a blocking FS operation.
@@ -41,7 +46,7 @@ pub fn remote_server_dir_relative() -> &'static Path {
/// # Arguments
///
/// * `dir` - The path to use as the custom data directory. This will be used as the base
-/// directory for all user data, including databases, extensions, and logs.
+/// directory for all user data, including databases, extensions, and logs.
///
/// # Returns
///
@@ -63,7 +68,7 @@ pub fn set_custom_data_dir(dir: &str) -> &'static PathBuf {
let abs_path = path
.canonicalize()
.expect("failed to canonicalize custom data directory's path to an absolute path");
- path = PathBuf::from(util::paths::SanitizedPath::from(abs_path))
+ path = util::paths::SanitizedPath::new(&abs_path).into()
}
std::fs::create_dir_all(&path).expect("failed to create custom data directory");
path
@@ -85,7 +85,7 @@ where
.menu(move |_window, _cx| Some(picker.clone()))
.trigger_with_tooltip(self.trigger, self.tooltip)
.anchor(self.anchor)
- .when_some(self.handle.clone(), |menu, handle| menu.with_handle(handle))
+ .when_some(self.handle, |menu, handle| menu.with_handle(handle))
.offset(gpui::Point {
x: px(0.0),
y: px(-2.0),
@@ -67,6 +67,7 @@ regex.workspace = true
remote.workspace = true
rpc.workspace = true
schemars.workspace = true
+semver.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
@@ -85,6 +86,7 @@ text.workspace = true
toml.workspace = true
url.workspace = true
util.workspace = true
+watch.workspace = true
which.workspace = true
worktree.workspace = true
zlog.workspace = true
@@ -0,0 +1,1091 @@
+use std::{
+ any::Any,
+ borrow::Borrow,
+ path::{Path, PathBuf},
+ str::FromStr as _,
+ sync::Arc,
+ time::Duration,
+};
+
+use anyhow::{Context as _, Result, bail};
+use collections::HashMap;
+use fs::{Fs, RemoveOptions, RenameOptions};
+use futures::StreamExt as _;
+use gpui::{
+ App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
+};
+use node_runtime::NodeRuntime;
+use remote::RemoteClient;
+use rpc::{
+ AnyProtoClient, TypedEnvelope,
+ proto::{self, ToProto},
+};
+use schemars::JsonSchema;
+use serde::{Deserialize, Serialize};
+use settings::{SettingsKey, SettingsSources, SettingsStore, SettingsUi};
+use util::{ResultExt as _, debug_panic};
+
+use crate::ProjectEnvironment;
+
+#[derive(Deserialize, Serialize, Clone, PartialEq, Eq, JsonSchema)]
+pub struct AgentServerCommand {
+ #[serde(rename = "command")]
+ pub path: PathBuf,
+ #[serde(default)]
+ pub args: Vec<String>,
+ pub env: Option<HashMap<String, String>>,
+}
+
+impl std::fmt::Debug for AgentServerCommand {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let filtered_env = self.env.as_ref().map(|env| {
+ env.iter()
+ .map(|(k, v)| {
+ (
+ k,
+ if util::redact::should_redact(k) {
+ "[REDACTED]"
+ } else {
+ v
+ },
+ )
+ })
+ .collect::<Vec<_>>()
+ });
+
+ f.debug_struct("AgentServerCommand")
+ .field("path", &self.path)
+ .field("args", &self.args)
+ .field("env", &filtered_env)
+ .finish()
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub struct ExternalAgentServerName(pub SharedString);
+
+impl std::fmt::Display for ExternalAgentServerName {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}", self.0)
+ }
+}
+
+impl From<&'static str> for ExternalAgentServerName {
+ fn from(value: &'static str) -> Self {
+ ExternalAgentServerName(value.into())
+ }
+}
+
+impl From<ExternalAgentServerName> for SharedString {
+ fn from(value: ExternalAgentServerName) -> Self {
+ value.0
+ }
+}
+
+impl Borrow<str> for ExternalAgentServerName {
+ fn borrow(&self) -> &str {
+ &self.0
+ }
+}
+
+pub trait ExternalAgentServer {
+ fn get_command(
+ &mut self,
+ root_dir: Option<&str>,
+ extra_env: HashMap<String, String>,
+ status_tx: Option<watch::Sender<SharedString>>,
+ new_version_available_tx: Option<watch::Sender<Option<String>>>,
+ cx: &mut AsyncApp,
+ ) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>>;
+
+ fn as_any_mut(&mut self) -> &mut dyn Any;
+}
+
+impl dyn ExternalAgentServer {
+ fn downcast_mut<T: ExternalAgentServer + 'static>(&mut self) -> Option<&mut T> {
+ self.as_any_mut().downcast_mut()
+ }
+}
+
+enum AgentServerStoreState {
+ Local {
+ node_runtime: NodeRuntime,
+ fs: Arc<dyn Fs>,
+ project_environment: Entity<ProjectEnvironment>,
+ downstream_client: Option<(u64, AnyProtoClient)>,
+ settings: Option<AllAgentServersSettings>,
+ _subscriptions: [Subscription; 1],
+ },
+ Remote {
+ project_id: u64,
+ upstream_client: Entity<RemoteClient>,
+ },
+ Collab,
+}
+
+pub struct AgentServerStore {
+ state: AgentServerStoreState,
+ external_agents: HashMap<ExternalAgentServerName, Box<dyn ExternalAgentServer>>,
+}
+
+pub struct AgentServersUpdated;
+
+impl EventEmitter<AgentServersUpdated> for AgentServerStore {}
+
+impl AgentServerStore {
+ pub fn init_remote(session: &AnyProtoClient) {
+ session.add_entity_message_handler(Self::handle_external_agents_updated);
+ session.add_entity_message_handler(Self::handle_loading_status_updated);
+ session.add_entity_message_handler(Self::handle_new_version_available);
+ }
+
+ pub fn init_headless(session: &AnyProtoClient) {
+ session.add_entity_request_handler(Self::handle_get_agent_server_command);
+ }
+
+ fn agent_servers_settings_changed(&mut self, cx: &mut Context<Self>) {
+ let AgentServerStoreState::Local {
+ node_runtime,
+ fs,
+ project_environment,
+ downstream_client,
+ settings: old_settings,
+ ..
+ } = &mut self.state
+ else {
+ debug_panic!(
+ "should not be subscribed to agent server settings changes in non-local project"
+ );
+ return;
+ };
+
+ let new_settings = cx
+ .global::<SettingsStore>()
+ .get::<AllAgentServersSettings>(None)
+ .clone();
+ if Some(&new_settings) == old_settings.as_ref() {
+ return;
+ }
+
+ self.external_agents.clear();
+ self.external_agents.insert(
+ GEMINI_NAME.into(),
+ Box::new(LocalGemini {
+ fs: fs.clone(),
+ node_runtime: node_runtime.clone(),
+ project_environment: project_environment.clone(),
+ custom_command: new_settings
+ .gemini
+ .clone()
+ .and_then(|settings| settings.custom_command()),
+ ignore_system_version: new_settings
+ .gemini
+ .as_ref()
+ .and_then(|settings| settings.ignore_system_version)
+ .unwrap_or(true),
+ }),
+ );
+ self.external_agents.insert(
+ CLAUDE_CODE_NAME.into(),
+ Box::new(LocalClaudeCode {
+ fs: fs.clone(),
+ node_runtime: node_runtime.clone(),
+ project_environment: project_environment.clone(),
+ custom_command: new_settings.claude.clone().map(|settings| settings.command),
+ }),
+ );
+ self.external_agents
+ .extend(new_settings.custom.iter().map(|(name, settings)| {
+ (
+ ExternalAgentServerName(name.clone()),
+ Box::new(LocalCustomAgent {
+ command: settings.command.clone(),
+ project_environment: project_environment.clone(),
+ }) as Box<dyn ExternalAgentServer>,
+ )
+ }));
+
+ *old_settings = Some(new_settings.clone());
+
+ if let Some((project_id, downstream_client)) = downstream_client {
+ downstream_client
+ .send(proto::ExternalAgentsUpdated {
+ project_id: *project_id,
+ names: self
+ .external_agents
+ .keys()
+ .map(|name| name.to_string())
+ .collect(),
+ })
+ .log_err();
+ }
+ cx.emit(AgentServersUpdated);
+ }
+
+ pub fn local(
+ node_runtime: NodeRuntime,
+ fs: Arc<dyn Fs>,
+ project_environment: Entity<ProjectEnvironment>,
+ cx: &mut Context<Self>,
+ ) -> Self {
+ let subscription = cx.observe_global::<SettingsStore>(|this, cx| {
+ this.agent_servers_settings_changed(cx);
+ });
+ let this = Self {
+ state: AgentServerStoreState::Local {
+ node_runtime,
+ fs,
+ project_environment,
+ downstream_client: None,
+ settings: None,
+ _subscriptions: [subscription],
+ },
+ external_agents: Default::default(),
+ };
+ cx.spawn(async move |this, cx| {
+ cx.background_executor().timer(Duration::from_secs(1)).await;
+ this.update(cx, |this, cx| {
+ this.agent_servers_settings_changed(cx);
+ })
+ .ok();
+ })
+ .detach();
+ this
+ }
+
+ pub(crate) fn remote(
+ project_id: u64,
+ upstream_client: Entity<RemoteClient>,
+ _cx: &mut Context<Self>,
+ ) -> Self {
+ // Set up the builtin agents here so they're immediately available in
+ // remote projects--we know that the HeadlessProject on the other end
+ // will have them.
+ let external_agents = [
+ (
+ GEMINI_NAME.into(),
+ Box::new(RemoteExternalAgentServer {
+ project_id,
+ upstream_client: upstream_client.clone(),
+ name: GEMINI_NAME.into(),
+ status_tx: None,
+ new_version_available_tx: None,
+ }) as Box<dyn ExternalAgentServer>,
+ ),
+ (
+ CLAUDE_CODE_NAME.into(),
+ Box::new(RemoteExternalAgentServer {
+ project_id,
+ upstream_client: upstream_client.clone(),
+ name: CLAUDE_CODE_NAME.into(),
+ status_tx: None,
+ new_version_available_tx: None,
+ }) as Box<dyn ExternalAgentServer>,
+ ),
+ ]
+ .into_iter()
+ .collect();
+
+ Self {
+ state: AgentServerStoreState::Remote {
+ project_id,
+ upstream_client,
+ },
+ external_agents,
+ }
+ }
+
+ pub(crate) fn collab(_cx: &mut Context<Self>) -> Self {
+ Self {
+ state: AgentServerStoreState::Collab,
+ external_agents: Default::default(),
+ }
+ }
+
+ pub fn shared(&mut self, project_id: u64, client: AnyProtoClient) {
+ match &mut self.state {
+ AgentServerStoreState::Local {
+ downstream_client, ..
+ } => {
+ client
+ .send(proto::ExternalAgentsUpdated {
+ project_id,
+ names: self
+ .external_agents
+ .keys()
+ .map(|name| name.to_string())
+ .collect(),
+ })
+ .log_err();
+ *downstream_client = Some((project_id, client));
+ }
+ AgentServerStoreState::Remote { .. } => {
+ debug_panic!(
+ "external agents over collab not implemented, remote project should not be shared"
+ );
+ }
+ AgentServerStoreState::Collab => {
+ debug_panic!("external agents over collab not implemented, should not be shared");
+ }
+ }
+ }
+
+ pub fn get_external_agent(
+ &mut self,
+ name: &ExternalAgentServerName,
+ ) -> Option<&mut (dyn ExternalAgentServer + 'static)> {
+ self.external_agents
+ .get_mut(name)
+ .map(|agent| agent.as_mut())
+ }
+
+ pub fn external_agents(&self) -> impl Iterator<Item = &ExternalAgentServerName> {
+ self.external_agents.keys()
+ }
+
+ async fn handle_get_agent_server_command(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::GetAgentServerCommand>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::AgentServerCommand> {
+ let (command, root_dir, login) = this
+ .update(&mut cx, |this, cx| {
+ let AgentServerStoreState::Local {
+ downstream_client, ..
+ } = &this.state
+ else {
+ debug_panic!("should not receive GetAgentServerCommand in a non-local project");
+ bail!("unexpected GetAgentServerCommand request in a non-local project");
+ };
+ let agent = this
+ .external_agents
+ .get_mut(&*envelope.payload.name)
+ .with_context(|| format!("agent `{}` not found", envelope.payload.name))?;
+ let (status_tx, new_version_available_tx) = downstream_client
+ .clone()
+ .map(|(project_id, downstream_client)| {
+ let (status_tx, mut status_rx) = watch::channel(SharedString::from(""));
+ let (new_version_available_tx, mut new_version_available_rx) =
+ watch::channel(None);
+ cx.spawn({
+ let downstream_client = downstream_client.clone();
+ let name = envelope.payload.name.clone();
+ async move |_, _| {
+ while let Some(status) = status_rx.recv().await.ok() {
+ downstream_client.send(
+ proto::ExternalAgentLoadingStatusUpdated {
+ project_id,
+ name: name.clone(),
+ status: status.to_string(),
+ },
+ )?;
+ }
+ anyhow::Ok(())
+ }
+ })
+ .detach_and_log_err(cx);
+ cx.spawn({
+ let name = envelope.payload.name.clone();
+ async move |_, _| {
+ if let Some(version) =
+ new_version_available_rx.recv().await.ok().flatten()
+ {
+ downstream_client.send(
+ proto::NewExternalAgentVersionAvailable {
+ project_id,
+ name: name.clone(),
+ version,
+ },
+ )?;
+ }
+ anyhow::Ok(())
+ }
+ })
+ .detach_and_log_err(cx);
+ (status_tx, new_version_available_tx)
+ })
+ .unzip();
+ anyhow::Ok(agent.get_command(
+ envelope.payload.root_dir.as_deref(),
+ HashMap::default(),
+ status_tx,
+ new_version_available_tx,
+ &mut cx.to_async(),
+ ))
+ })??
+ .await?;
+ Ok(proto::AgentServerCommand {
+ path: command.path.to_string_lossy().to_string(),
+ args: command.args,
+ env: command
+ .env
+ .map(|env| env.into_iter().collect())
+ .unwrap_or_default(),
+ root_dir: root_dir,
+ login: login.map(|login| login.to_proto()),
+ })
+ }
+
+ async fn handle_external_agents_updated(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::ExternalAgentsUpdated>,
+ mut cx: AsyncApp,
+ ) -> Result<()> {
+ this.update(&mut cx, |this, cx| {
+ let AgentServerStoreState::Remote {
+ project_id,
+ upstream_client,
+ } = &this.state
+ else {
+ debug_panic!(
+ "handle_external_agents_updated should not be called for a non-remote project"
+ );
+ bail!("unexpected ExternalAgentsUpdated message")
+ };
+
+ let mut status_txs = this
+ .external_agents
+ .iter_mut()
+ .filter_map(|(name, agent)| {
+ Some((
+ name.clone(),
+ agent
+ .downcast_mut::<RemoteExternalAgentServer>()?
+ .status_tx
+ .take(),
+ ))
+ })
+ .collect::<HashMap<_, _>>();
+ let mut new_version_available_txs = this
+ .external_agents
+ .iter_mut()
+ .filter_map(|(name, agent)| {
+ Some((
+ name.clone(),
+ agent
+ .downcast_mut::<RemoteExternalAgentServer>()?
+ .new_version_available_tx
+ .take(),
+ ))
+ })
+ .collect::<HashMap<_, _>>();
+
+ this.external_agents = envelope
+ .payload
+ .names
+ .into_iter()
+ .map(|name| {
+ let agent = RemoteExternalAgentServer {
+ project_id: *project_id,
+ upstream_client: upstream_client.clone(),
+ name: ExternalAgentServerName(name.clone().into()),
+ status_tx: status_txs.remove(&*name).flatten(),
+ new_version_available_tx: new_version_available_txs
+ .remove(&*name)
+ .flatten(),
+ };
+ (
+ ExternalAgentServerName(name.into()),
+ Box::new(agent) as Box<dyn ExternalAgentServer>,
+ )
+ })
+ .collect();
+ cx.emit(AgentServersUpdated);
+ Ok(())
+ })?
+ }
+
+ async fn handle_loading_status_updated(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::ExternalAgentLoadingStatusUpdated>,
+ mut cx: AsyncApp,
+ ) -> Result<()> {
+ this.update(&mut cx, |this, _| {
+ if let Some(agent) = this.external_agents.get_mut(&*envelope.payload.name)
+ && let Some(agent) = agent.downcast_mut::<RemoteExternalAgentServer>()
+ && let Some(status_tx) = &mut agent.status_tx
+ {
+ status_tx.send(envelope.payload.status.into()).ok();
+ }
+ })
+ }
+
+ async fn handle_new_version_available(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::NewExternalAgentVersionAvailable>,
+ mut cx: AsyncApp,
+ ) -> Result<()> {
+ this.update(&mut cx, |this, _| {
+ if let Some(agent) = this.external_agents.get_mut(&*envelope.payload.name)
+ && let Some(agent) = agent.downcast_mut::<RemoteExternalAgentServer>()
+ && let Some(new_version_available_tx) = &mut agent.new_version_available_tx
+ {
+ new_version_available_tx
+ .send(Some(envelope.payload.version))
+ .ok();
+ }
+ })
+ }
+}
+
+fn get_or_npm_install_builtin_agent(
+ binary_name: SharedString,
+ package_name: SharedString,
+ entrypoint_path: PathBuf,
+ minimum_version: Option<semver::Version>,
+ status_tx: Option<watch::Sender<SharedString>>,
+ new_version_available: Option<watch::Sender<Option<String>>>,
+ fs: Arc<dyn Fs>,
+ node_runtime: NodeRuntime,
+ cx: &mut AsyncApp,
+) -> Task<std::result::Result<AgentServerCommand, anyhow::Error>> {
+ cx.spawn(async move |cx| {
+ let node_path = node_runtime.binary_path().await?;
+ let dir = paths::data_dir()
+ .join("external_agents")
+ .join(binary_name.as_str());
+ fs.create_dir(&dir).await?;
+
+ let mut stream = fs.read_dir(&dir).await?;
+ let mut versions = Vec::new();
+ let mut to_delete = Vec::new();
+ while let Some(entry) = stream.next().await {
+ let Ok(entry) = entry else { continue };
+ let Some(file_name) = entry.file_name() else {
+ continue;
+ };
+
+ if let Some(name) = file_name.to_str()
+ && let Some(version) = semver::Version::from_str(name).ok()
+ && fs
+ .is_file(&dir.join(file_name).join(&entrypoint_path))
+ .await
+ {
+ versions.push((version, file_name.to_owned()));
+ } else {
+ to_delete.push(file_name.to_owned())
+ }
+ }
+
+ versions.sort();
+ let newest_version = if let Some((version, file_name)) = versions.last().cloned()
+ && minimum_version.is_none_or(|minimum_version| version >= minimum_version)
+ {
+ versions.pop();
+ Some(file_name)
+ } else {
+ None
+ };
+ log::debug!("existing version of {package_name}: {newest_version:?}");
+ to_delete.extend(versions.into_iter().map(|(_, file_name)| file_name));
+
+ cx.background_spawn({
+ let fs = fs.clone();
+ let dir = dir.clone();
+ async move {
+ for file_name in to_delete {
+ fs.remove_dir(
+ &dir.join(file_name),
+ RemoveOptions {
+ recursive: true,
+ ignore_if_not_exists: false,
+ },
+ )
+ .await
+ .ok();
+ }
+ }
+ })
+ .detach();
+
+ let version = if let Some(file_name) = newest_version {
+ cx.background_spawn({
+ let file_name = file_name.clone();
+ let dir = dir.clone();
+ let fs = fs.clone();
+ async move {
+ let latest_version =
+ node_runtime.npm_package_latest_version(&package_name).await;
+ if let Ok(latest_version) = latest_version
+ && &latest_version != &file_name.to_string_lossy()
+ {
+ download_latest_version(
+ fs,
+ dir.clone(),
+ node_runtime,
+ package_name.clone(),
+ )
+ .await
+ .log_err();
+ if let Some(mut new_version_available) = new_version_available {
+ new_version_available.send(Some(latest_version)).ok();
+ }
+ }
+ }
+ })
+ .detach();
+ file_name
+ } else {
+ if let Some(mut status_tx) = status_tx {
+ status_tx.send("Installing…".into()).ok();
+ }
+ let dir = dir.clone();
+ cx.background_spawn(download_latest_version(
+ fs.clone(),
+ dir.clone(),
+ node_runtime,
+ package_name.clone(),
+ ))
+ .await?
+ .into()
+ };
+
+ let agent_server_path = dir.join(version).join(entrypoint_path);
+ let agent_server_path_exists = fs.is_file(&agent_server_path).await;
+ anyhow::ensure!(
+ agent_server_path_exists,
+ "Missing entrypoint path {} after installation",
+ agent_server_path.to_string_lossy()
+ );
+
+ anyhow::Ok(AgentServerCommand {
+ path: node_path,
+ args: vec![agent_server_path.to_string_lossy().to_string()],
+ env: None,
+ })
+ })
+}
+
+fn find_bin_in_path(
+ bin_name: SharedString,
+ root_dir: PathBuf,
+ env: HashMap<String, String>,
+ cx: &mut AsyncApp,
+) -> Task<Option<PathBuf>> {
+ cx.background_executor().spawn(async move {
+ let which_result = if cfg!(windows) {
+ which::which(bin_name.as_str())
+ } else {
+ let shell_path = env.get("PATH").cloned();
+ which::which_in(bin_name.as_str(), shell_path.as_ref(), &root_dir)
+ };
+
+ if let Err(which::Error::CannotFindBinaryPath) = which_result {
+ return None;
+ }
+
+ which_result.log_err()
+ })
+}
+
+async fn download_latest_version(
+ fs: Arc<dyn Fs>,
+ dir: PathBuf,
+ node_runtime: NodeRuntime,
+ package_name: SharedString,
+) -> Result<String> {
+ log::debug!("downloading latest version of {package_name}");
+
+ let tmp_dir = tempfile::tempdir_in(&dir)?;
+
+ node_runtime
+ .npm_install_packages(tmp_dir.path(), &[(&package_name, "latest")])
+ .await?;
+
+ let version = node_runtime
+ .npm_package_installed_version(tmp_dir.path(), &package_name)
+ .await?
+ .context("expected package to be installed")?;
+
+ fs.rename(
+ &tmp_dir.keep(),
+ &dir.join(&version),
+ RenameOptions {
+ ignore_if_exists: true,
+ overwrite: false,
+ },
+ )
+ .await?;
+
+ anyhow::Ok(version)
+}
+
+struct RemoteExternalAgentServer {
+ project_id: u64,
+ upstream_client: Entity<RemoteClient>,
+ name: ExternalAgentServerName,
+ status_tx: Option<watch::Sender<SharedString>>,
+ new_version_available_tx: Option<watch::Sender<Option<String>>>,
+}
+
+// new method: status_updated
+// does nothing in the all-local case
+// for RemoteExternalAgentServer, sends on the stored tx
+// etc.
+
+impl ExternalAgentServer for RemoteExternalAgentServer {
+ fn get_command(
+ &mut self,
+ root_dir: Option<&str>,
+ extra_env: HashMap<String, String>,
+ status_tx: Option<watch::Sender<SharedString>>,
+ new_version_available_tx: Option<watch::Sender<Option<String>>>,
+ cx: &mut AsyncApp,
+ ) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>> {
+ let project_id = self.project_id;
+ let name = self.name.to_string();
+ let upstream_client = self.upstream_client.downgrade();
+ let root_dir = root_dir.map(|root_dir| root_dir.to_owned());
+ self.status_tx = status_tx;
+ self.new_version_available_tx = new_version_available_tx;
+ cx.spawn(async move |cx| {
+ let mut response = upstream_client
+ .update(cx, |upstream_client, _| {
+ upstream_client
+ .proto_client()
+ .request(proto::GetAgentServerCommand {
+ project_id,
+ name,
+ root_dir: root_dir.clone(),
+ })
+ })?
+ .await?;
+ let root_dir = response.root_dir;
+ response.env.extend(extra_env);
+ let command = upstream_client.update(cx, |client, _| {
+ client.build_command(
+ Some(response.path),
+ &response.args,
+ &response.env.into_iter().collect(),
+ Some(root_dir.clone()),
+ None,
+ )
+ })??;
+ Ok((
+ AgentServerCommand {
+ path: command.program.into(),
+ args: command.args,
+ env: Some(command.env),
+ },
+ root_dir,
+ response
+ .login
+ .map(|login| task::SpawnInTerminal::from_proto(login)),
+ ))
+ })
+ }
+
+ fn as_any_mut(&mut self) -> &mut dyn Any {
+ self
+ }
+}
+
+struct LocalGemini {
+ fs: Arc<dyn Fs>,
+ node_runtime: NodeRuntime,
+ project_environment: Entity<ProjectEnvironment>,
+ custom_command: Option<AgentServerCommand>,
+ ignore_system_version: bool,
+}
+
+impl ExternalAgentServer for LocalGemini {
+ fn get_command(
+ &mut self,
+ root_dir: Option<&str>,
+ extra_env: HashMap<String, String>,
+ status_tx: Option<watch::Sender<SharedString>>,
+ new_version_available_tx: Option<watch::Sender<Option<String>>>,
+ cx: &mut AsyncApp,
+ ) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>> {
+ let fs = self.fs.clone();
+ let node_runtime = self.node_runtime.clone();
+ let project_environment = self.project_environment.downgrade();
+ let custom_command = self.custom_command.clone();
+ let ignore_system_version = self.ignore_system_version;
+ let root_dir: Arc<Path> = root_dir
+ .map(|root_dir| Path::new(root_dir))
+ .unwrap_or(paths::home_dir())
+ .into();
+
+ cx.spawn(async move |cx| {
+ let mut env = project_environment
+ .update(cx, |project_environment, cx| {
+ project_environment.get_directory_environment(root_dir.clone(), cx)
+ })?
+ .await
+ .unwrap_or_default();
+
+ let mut command = if let Some(mut custom_command) = custom_command {
+ env.extend(custom_command.env.unwrap_or_default());
+ custom_command.env = Some(env);
+ custom_command
+ } else if !ignore_system_version
+ && let Some(bin) =
+ find_bin_in_path("gemini".into(), root_dir.to_path_buf(), env.clone(), cx).await
+ {
+ AgentServerCommand {
+ path: bin,
+ args: Vec::new(),
+ env: Some(env),
+ }
+ } else {
+ let mut command = get_or_npm_install_builtin_agent(
+ GEMINI_NAME.into(),
+ "@google/gemini-cli".into(),
+ "node_modules/@google/gemini-cli/dist/index.js".into(),
+ Some("0.2.1".parse().unwrap()),
+ status_tx,
+ new_version_available_tx,
+ fs,
+ node_runtime,
+ cx,
+ )
+ .await?;
+ command.env = Some(env);
+ command
+ };
+
+ // Gemini CLI doesn't seem to have a dedicated invocation for logging in--we just run it normally without any arguments.
+ let login = task::SpawnInTerminal {
+ command: Some(command.path.clone().to_proto()),
+ args: command.args.clone(),
+ env: command.env.clone().unwrap_or_default(),
+ label: "gemini /auth".into(),
+ ..Default::default()
+ };
+
+ command.env.get_or_insert_default().extend(extra_env);
+ command.args.push("--experimental-acp".into());
+ Ok((command, root_dir.to_proto(), Some(login)))
+ })
+ }
+
+ fn as_any_mut(&mut self) -> &mut dyn Any {
+ self
+ }
+}
+
+struct LocalClaudeCode {
+ fs: Arc<dyn Fs>,
+ node_runtime: NodeRuntime,
+ project_environment: Entity<ProjectEnvironment>,
+ custom_command: Option<AgentServerCommand>,
+}
+
+impl ExternalAgentServer for LocalClaudeCode {
+ fn get_command(
+ &mut self,
+ root_dir: Option<&str>,
+ extra_env: HashMap<String, String>,
+ status_tx: Option<watch::Sender<SharedString>>,
+ new_version_available_tx: Option<watch::Sender<Option<String>>>,
+ cx: &mut AsyncApp,
+ ) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>> {
+ let fs = self.fs.clone();
+ let node_runtime = self.node_runtime.clone();
+ let project_environment = self.project_environment.downgrade();
+ let custom_command = self.custom_command.clone();
+ let root_dir: Arc<Path> = root_dir
+ .map(|root_dir| Path::new(root_dir))
+ .unwrap_or(paths::home_dir())
+ .into();
+
+ cx.spawn(async move |cx| {
+ let mut env = project_environment
+ .update(cx, |project_environment, cx| {
+ project_environment.get_directory_environment(root_dir.clone(), cx)
+ })?
+ .await
+ .unwrap_or_default();
+ env.insert("ANTHROPIC_API_KEY".into(), "".into());
+
+ let (mut command, login) = if let Some(mut custom_command) = custom_command {
+ env.extend(custom_command.env.unwrap_or_default());
+ custom_command.env = Some(env);
+ (custom_command, None)
+ } else {
+ let mut command = get_or_npm_install_builtin_agent(
+ "claude-code-acp".into(),
+ "@zed-industries/claude-code-acp".into(),
+ "node_modules/@zed-industries/claude-code-acp/dist/index.js".into(),
+ Some("0.2.5".parse().unwrap()),
+ status_tx,
+ new_version_available_tx,
+ fs,
+ node_runtime,
+ cx,
+ )
+ .await?;
+ command.env = Some(env);
+ let login = command
+ .args
+ .first()
+ .and_then(|path| {
+ path.strip_suffix("/@zed-industries/claude-code-acp/dist/index.js")
+ })
+ .map(|path_prefix| task::SpawnInTerminal {
+ command: Some(command.path.clone().to_proto()),
+ args: vec![
+ Path::new(path_prefix)
+ .join("@anthropic-ai/claude-code/cli.js")
+ .to_string_lossy()
+ .to_string(),
+ "/login".into(),
+ ],
+ env: command.env.clone().unwrap_or_default(),
+ label: "claude /login".into(),
+ ..Default::default()
+ });
+ (command, login)
+ };
+
+ command.env.get_or_insert_default().extend(extra_env);
+ Ok((command, root_dir.to_proto(), login))
+ })
+ }
+
+ fn as_any_mut(&mut self) -> &mut dyn Any {
+ self
+ }
+}
+
+struct LocalCustomAgent {
+ project_environment: Entity<ProjectEnvironment>,
+ command: AgentServerCommand,
+}
+
+impl ExternalAgentServer for LocalCustomAgent {
+ fn get_command(
+ &mut self,
+ root_dir: Option<&str>,
+ extra_env: HashMap<String, String>,
+ _status_tx: Option<watch::Sender<SharedString>>,
+ _new_version_available_tx: Option<watch::Sender<Option<String>>>,
+ cx: &mut AsyncApp,
+ ) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>> {
+ let mut command = self.command.clone();
+ let root_dir: Arc<Path> = root_dir
+ .map(|root_dir| Path::new(root_dir))
+ .unwrap_or(paths::home_dir())
+ .into();
+ let project_environment = self.project_environment.downgrade();
+ cx.spawn(async move |cx| {
+ let mut env = project_environment
+ .update(cx, |project_environment, cx| {
+ project_environment.get_directory_environment(root_dir.clone(), cx)
+ })?
+ .await
+ .unwrap_or_default();
+ env.extend(command.env.unwrap_or_default());
+ env.extend(extra_env);
+ command.env = Some(env);
+ Ok((command, root_dir.to_proto(), None))
+ })
+ }
+
+ fn as_any_mut(&mut self) -> &mut dyn Any {
+ self
+ }
+}
+
+pub const GEMINI_NAME: &'static str = "gemini";
+pub const CLAUDE_CODE_NAME: &'static str = "claude";
+
+#[derive(
+ Default, Deserialize, Serialize, Clone, JsonSchema, Debug, SettingsUi, SettingsKey, PartialEq,
+)]
+#[settings_key(key = "agent_servers")]
+pub struct AllAgentServersSettings {
+ pub gemini: Option<BuiltinAgentServerSettings>,
+ pub claude: Option<CustomAgentServerSettings>,
+
+ /// Custom agent servers configured by the user
+ #[serde(flatten)]
+ pub custom: HashMap<SharedString, CustomAgentServerSettings>,
+}
+
+#[derive(Default, Deserialize, Serialize, Clone, JsonSchema, Debug, PartialEq)]
+pub struct BuiltinAgentServerSettings {
+ /// Absolute path to a binary to be used when launching this agent.
+ ///
+ /// This can be used to run a specific binary without automatic downloads or searching `$PATH`.
+ #[serde(rename = "command")]
+ pub path: Option<PathBuf>,
+ /// If a binary is specified in `command`, it will be passed these arguments.
+ pub args: Option<Vec<String>>,
+ /// If a binary is specified in `command`, it will be passed these environment variables.
+ pub env: Option<HashMap<String, String>>,
+ /// Whether to skip searching `$PATH` for an agent server binary when
+ /// launching this agent.
+ ///
+ /// This has no effect if a `command` is specified. Otherwise, when this is
+ /// `false`, Zed will search `$PATH` for an agent server binary and, if one
+ /// is found, use it for threads with this agent. If no agent binary is
+ /// found on `$PATH`, Zed will automatically install and use its own binary.
+ /// When this is `true`, Zed will not search `$PATH`, and will always use
+ /// its own binary.
+ ///
+ /// Default: true
+ pub ignore_system_version: Option<bool>,
+}
+
+impl BuiltinAgentServerSettings {
+ pub(crate) fn custom_command(self) -> Option<AgentServerCommand> {
+ self.path.map(|path| AgentServerCommand {
+ path,
+ args: self.args.unwrap_or_default(),
+ env: self.env,
+ })
+ }
+}
+
+impl From<AgentServerCommand> for BuiltinAgentServerSettings {
+ fn from(value: AgentServerCommand) -> Self {
+ BuiltinAgentServerSettings {
+ path: Some(value.path),
+ args: Some(value.args),
+ env: value.env,
+ ..Default::default()
+ }
+ }
+}
+
+#[derive(Deserialize, Serialize, Clone, JsonSchema, Debug, PartialEq)]
+pub struct CustomAgentServerSettings {
+ #[serde(flatten)]
+ pub command: AgentServerCommand,
+}
+
+impl settings::Settings for AllAgentServersSettings {
+ type FileContent = Self;
+
+ fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
+ let mut settings = AllAgentServersSettings::default();
+
+ for AllAgentServersSettings {
+ gemini,
+ claude,
+ custom,
+ } in sources.defaults_and_customizations()
+ {
+ if gemini.is_some() {
+ settings.gemini = gemini.clone();
+ }
+ if claude.is_some() {
+ settings.claude = claude.clone();
+ }
+
+ // Merge custom agents
+ for (name, config) in custom {
+ // Skip built-in agent names to avoid conflicts
+ if name != GEMINI_NAME && name != CLAUDE_CODE_NAME {
+ settings.custom.insert(name.clone(), config.clone());
+ }
+ }
+ }
+
+ Ok(settings)
+ }
+
+ fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut Self::FileContent) {}
+}
@@ -20,7 +20,7 @@ use language::{
},
};
use rpc::{
- AnyProtoClient, ErrorExt as _, TypedEnvelope,
+ AnyProtoClient, ErrorCode, ErrorExt as _, TypedEnvelope,
proto::{self, ToProto},
};
use smol::channel::Receiver;
@@ -88,9 +88,18 @@ pub enum BufferStoreEvent {
},
}
-#[derive(Default, Debug)]
+#[derive(Default, Debug, Clone)]
pub struct ProjectTransaction(pub HashMap<Entity<Buffer>, language::Transaction>);
+impl PartialEq for ProjectTransaction {
+ fn eq(&self, other: &Self) -> bool {
+ self.0.len() == other.0.len()
+ && self.0.iter().all(|(buffer, transaction)| {
+ other.0.get(buffer).is_some_and(|t| t.id == transaction.id)
+ })
+ }
+}
+
impl EventEmitter<BufferStoreEvent> for BufferStore {}
impl RemoteBufferStore {
@@ -168,7 +177,7 @@ impl RemoteBufferStore {
.with_context(|| {
format!("no worktree found for id {}", file.worktree_id)
})?;
- buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
+ buffer_file = Some(Arc::new(File::from_proto(file, worktree, cx)?)
as Arc<dyn language::File>);
}
Buffer::from_proto(replica_id, capability, state, buffer_file)
@@ -310,7 +319,11 @@ impl RemoteBufferStore {
})
}
- fn create_buffer(&self, cx: &mut Context<BufferStore>) -> Task<Result<Entity<Buffer>>> {
+ fn create_buffer(
+ &self,
+ project_searchable: bool,
+ cx: &mut Context<BufferStore>,
+ ) -> Task<Result<Entity<Buffer>>> {
let create = self.upstream_client.request(proto::OpenNewBuffer {
project_id: self.project_id,
});
@@ -318,8 +331,13 @@ impl RemoteBufferStore {
let response = create.await?;
let buffer_id = BufferId::new(response.buffer_id)?;
- this.update(cx, |this, cx| this.wait_for_remote_buffer(buffer_id, cx))?
- .await
+ this.update(cx, |this, cx| {
+ if !project_searchable {
+ this.non_searchable_buffers.insert(buffer_id);
+ }
+ this.wait_for_remote_buffer(buffer_id, cx)
+ })?
+ .await
})
}
@@ -413,13 +431,10 @@ impl LocalBufferStore {
cx: &mut Context<BufferStore>,
) {
cx.subscribe(worktree, |this, worktree, event, cx| {
- if worktree.read(cx).is_local() {
- match event {
- worktree::Event::UpdatedEntries(changes) => {
- Self::local_worktree_entries_changed(this, &worktree, changes, cx);
- }
- _ => {}
- }
+ if worktree.read(cx).is_local()
+ && let worktree::Event::UpdatedEntries(changes) = event
+ {
+ Self::local_worktree_entries_changed(this, &worktree, changes, cx);
}
})
.detach();
@@ -594,7 +609,7 @@ impl LocalBufferStore {
else {
return Task::ready(Err(anyhow!("no such worktree")));
};
- self.save_local_buffer(buffer, worktree, path.path.clone(), true, cx)
+ self.save_local_buffer(buffer, worktree, path.path, true, cx)
}
fn open_buffer(
@@ -664,12 +679,21 @@ impl LocalBufferStore {
})
}
- fn create_buffer(&self, cx: &mut Context<BufferStore>) -> Task<Result<Entity<Buffer>>> {
+ fn create_buffer(
+ &self,
+ project_searchable: bool,
+ cx: &mut Context<BufferStore>,
+ ) -> Task<Result<Entity<Buffer>>> {
cx.spawn(async move |buffer_store, cx| {
let buffer =
cx.new(|cx| Buffer::local("", cx).with_language(language::PLAIN_TEXT.clone(), cx))?;
buffer_store.update(cx, |buffer_store, cx| {
buffer_store.add_buffer(buffer.clone(), cx).log_err();
+ if !project_searchable {
+ buffer_store
+ .non_searchable_buffers
+ .insert(buffer.read(cx).remote_id());
+ }
})?;
Ok(buffer)
})
@@ -831,13 +855,25 @@ impl BufferStore {
}
};
- cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
+ cx.background_spawn(async move {
+ task.await.map_err(|e| {
+ if e.error_code() != ErrorCode::Internal {
+ anyhow!(e.error_code())
+ } else {
+ anyhow!("{e}")
+ }
+ })
+ })
}
- pub fn create_buffer(&mut self, cx: &mut Context<Self>) -> Task<Result<Entity<Buffer>>> {
+ pub fn create_buffer(
+ &mut self,
+ project_searchable: bool,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Entity<Buffer>>> {
match &self.state {
- BufferStoreState::Local(this) => this.create_buffer(cx),
- BufferStoreState::Remote(this) => this.create_buffer(cx),
+ BufferStoreState::Local(this) => this.create_buffer(project_searchable, cx),
+ BufferStoreState::Remote(this) => this.create_buffer(project_searchable, cx),
}
}
@@ -848,7 +884,7 @@ impl BufferStore {
) -> Task<Result<()>> {
match &mut self.state {
BufferStoreState::Local(this) => this.save_buffer(buffer, cx),
- BufferStoreState::Remote(this) => this.save_remote_buffer(buffer.clone(), None, cx),
+ BufferStoreState::Remote(this) => this.save_remote_buffer(buffer, None, cx),
}
}
@@ -938,7 +974,15 @@ impl BufferStore {
) -> impl Iterator<Item = (&ProjectPath, impl Future<Output = Result<Entity<Buffer>>>)> {
self.loading_buffers.iter().map(|(path, task)| {
let task = task.clone();
- (path, async move { task.await.map_err(|e| anyhow!("{e}")) })
+ (path, async move {
+ task.await.map_err(|e| {
+ if e.error_code() != ErrorCode::Internal {
+ anyhow!(e.error_code())
+ } else {
+ anyhow!("{e}")
+ }
+ })
+ })
})
}
@@ -947,10 +991,9 @@ impl BufferStore {
}
pub fn get_by_path(&self, path: &ProjectPath) -> Option<Entity<Buffer>> {
- self.path_to_buffer_id.get(path).and_then(|buffer_id| {
- let buffer = self.get(*buffer_id);
- buffer
- })
+ self.path_to_buffer_id
+ .get(path)
+ .and_then(|buffer_id| self.get(*buffer_id))
}
pub fn get(&self, buffer_id: BufferId) -> Option<Entity<Buffer>> {
@@ -1142,7 +1185,7 @@ impl BufferStore {
envelope: TypedEnvelope<proto::UpdateBuffer>,
mut cx: AsyncApp,
) -> Result<proto::Ack> {
- let payload = envelope.payload.clone();
+ let payload = envelope.payload;
let buffer_id = BufferId::new(payload.buffer_id)?;
let ops = payload
.operations
@@ -1589,6 +1632,7 @@ impl BufferStore {
&mut self,
text: &str,
language: Option<Arc<Language>>,
+ project_searchable: bool,
cx: &mut Context<Self>,
) -> Entity<Buffer> {
let buffer = cx.new(|cx| {
@@ -1598,6 +1642,9 @@ impl BufferStore {
self.add_buffer(buffer.clone(), cx).log_err();
let buffer_id = buffer.read(cx).remote_id();
+ if !project_searchable {
+ self.non_searchable_buffers.insert(buffer_id);
+ }
if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
self.path_to_buffer_id.insert(
@@ -1667,10 +1714,6 @@ impl BufferStore {
}
serialized_transaction
}
-
- pub(crate) fn mark_buffer_as_non_searchable(&mut self, buffer_id: BufferId) {
- self.non_searchable_buffers.insert(buffer_id);
- }
}
impl OpenBuffer {
@@ -4,8 +4,8 @@ use gpui::{Hsla, Rgba};
use lsp::{CompletionItem, Documentation};
use regex::{Regex, RegexBuilder};
-const HEX: &'static str = r#"(#(?:[\da-fA-F]{3}){1,2})"#;
-const RGB_OR_HSL: &'static str = r#"(rgba?|hsla?)\(\s*(\d{1,3}%?)\s*,\s*(\d{1,3}%?)\s*,\s*(\d{1,3}%?)\s*(?:,\s*(1|0?\.\d+))?\s*\)"#;
+const HEX: &str = r#"(#(?:[\da-fA-F]{3}){1,2})"#;
+const RGB_OR_HSL: &str = r#"(rgba?|hsla?)\(\s*(\d{1,3}%?)\s*,\s*(\d{1,3}%?)\s*,\s*(\d{1,3}%?)\s*(?:,\s*(1|0?\.\d+))?\s*\)"#;
static RELAXED_HEX_REGEX: LazyLock<Regex> = LazyLock::new(|| {
RegexBuilder::new(HEX)
@@ -141,7 +141,7 @@ mod tests {
use gpui::rgba;
use lsp::{CompletionItem, CompletionItemKind};
- pub const COLOR_TABLE: &[(&'static str, Option<u32>)] = &[
+ pub const COLOR_TABLE: &[(&str, Option<u32>)] = &[
// -- Invalid --
// Invalid hex
("f0f", None),
@@ -399,7 +399,6 @@ impl ContextServerStore {
async move |this, cx| {
match server.clone().start(cx).await {
Ok(_) => {
- log::info!("Started {} context server", id);
debug_assert!(server.client().is_some());
this.update(cx, |this, cx| {
@@ -642,8 +641,8 @@ mod tests {
#[gpui::test]
async fn test_context_server_status(cx: &mut TestAppContext) {
- const SERVER_1_ID: &'static str = "mcp-1";
- const SERVER_2_ID: &'static str = "mcp-2";
+ const SERVER_1_ID: &str = "mcp-1";
+ const SERVER_2_ID: &str = "mcp-2";
let (_fs, project) = setup_context_server_test(
cx,
@@ -722,8 +721,8 @@ mod tests {
#[gpui::test]
async fn test_context_server_status_events(cx: &mut TestAppContext) {
- const SERVER_1_ID: &'static str = "mcp-1";
- const SERVER_2_ID: &'static str = "mcp-2";
+ const SERVER_1_ID: &str = "mcp-1";
+ const SERVER_2_ID: &str = "mcp-2";
let (_fs, project) = setup_context_server_test(
cx,
@@ -761,7 +760,7 @@ mod tests {
&store,
vec![
(server_1_id.clone(), ContextServerStatus::Starting),
- (server_1_id.clone(), ContextServerStatus::Running),
+ (server_1_id, ContextServerStatus::Running),
(server_2_id.clone(), ContextServerStatus::Starting),
(server_2_id.clone(), ContextServerStatus::Running),
(server_2_id.clone(), ContextServerStatus::Stopped),
@@ -784,7 +783,7 @@ mod tests {
#[gpui::test(iterations = 25)]
async fn test_context_server_concurrent_starts(cx: &mut TestAppContext) {
- const SERVER_1_ID: &'static str = "mcp-1";
+ const SERVER_1_ID: &str = "mcp-1";
let (_fs, project) = setup_context_server_test(
cx,
@@ -845,8 +844,8 @@ mod tests {
#[gpui::test]
async fn test_context_server_maintain_servers_loop(cx: &mut TestAppContext) {
- const SERVER_1_ID: &'static str = "mcp-1";
- const SERVER_2_ID: &'static str = "mcp-2";
+ const SERVER_1_ID: &str = "mcp-1";
+ const SERVER_2_ID: &str = "mcp-2";
let server_1_id = ContextServerId(SERVER_1_ID.into());
let server_2_id = ContextServerId(SERVER_2_ID.into());
@@ -977,6 +976,7 @@ mod tests {
path: "somebinary".into(),
args: vec!["arg".to_string()],
env: None,
+ timeout: None,
},
},
),
@@ -1017,6 +1017,7 @@ mod tests {
path: "somebinary".into(),
args: vec!["anotherArg".to_string()],
env: None,
+ timeout: None,
},
},
),
@@ -1084,7 +1085,7 @@ mod tests {
#[gpui::test]
async fn test_context_server_enabled_disabled(cx: &mut TestAppContext) {
- const SERVER_1_ID: &'static str = "mcp-1";
+ const SERVER_1_ID: &str = "mcp-1";
let server_1_id = ContextServerId(SERVER_1_ID.into());
@@ -1099,6 +1100,7 @@ mod tests {
path: "somebinary".into(),
args: vec!["arg".to_string()],
env: None,
+ timeout: None,
},
},
)],
@@ -1151,6 +1153,7 @@ mod tests {
path: "somebinary".into(),
args: vec!["arg".to_string()],
env: None,
+ timeout: None,
},
},
)],
@@ -1178,6 +1181,7 @@ mod tests {
command: ContextServerCommand {
path: "somebinary".into(),
args: vec!["arg".to_string()],
+ timeout: None,
env: None,
},
},
@@ -1231,6 +1235,7 @@ mod tests {
path: "somebinary".into(),
args: vec!["arg".to_string()],
env: None,
+ timeout: None,
},
}
}
@@ -1319,6 +1324,7 @@ mod tests {
path: self.path.clone(),
args: vec!["arg1".to_string(), "arg2".to_string()],
env: None,
+ timeout: None,
}))
}
@@ -63,12 +63,13 @@ impl registry::ContextServerDescriptor for ContextServerDescriptor {
.await?;
command.command = extension.path_from_extension(&command.command);
- log::info!("loaded command for context server {id}: {command:?}");
+ log::debug!("loaded command for context server {id}: {command:?}");
Ok(ContextServerCommand {
path: command.command,
args: command.args,
env: Some(command.env.into_iter().collect()),
+ timeout: None,
})
})
}
@@ -6,9 +6,9 @@
//!
//! There are few reasons for this divide:
//! - Breakpoints persist across debug sessions and they're not really specific to any particular session. Sure, we have to send protocol messages for them
-//! (so they're a "thing" in the protocol), but we also want to set them before any session starts up.
+//! (so they're a "thing" in the protocol), but we also want to set them before any session starts up.
//! - Debug clients are doing the heavy lifting, and this is where UI grabs all of it's data from. They also rely on breakpoint store during initialization to obtain
-//! current set of breakpoints.
+//! current set of breakpoints.
//! - Since DAP store knows about all of the available debug sessions, it is responsible for routing RPC requests to sessions. It also knows how to find adapters for particular kind of session.
pub mod breakpoint_store;
@@ -192,7 +192,7 @@ impl BreakpointStore {
}
pub(crate) fn shared(&mut self, project_id: u64, downstream_client: AnyProtoClient) {
- self.downstream_client = Some((downstream_client.clone(), project_id));
+ self.downstream_client = Some((downstream_client, project_id));
}
pub(crate) fn unshared(&mut self, cx: &mut Context<Self>) {
@@ -450,9 +450,9 @@ impl BreakpointStore {
});
if let Some(found_bp) = found_bp {
- found_bp.message = Some(log_message.clone());
+ found_bp.message = Some(log_message);
} else {
- breakpoint.bp.message = Some(log_message.clone());
+ breakpoint.bp.message = Some(log_message);
// We did not remove any breakpoint, hence let's toggle one.
breakpoint_set
.breakpoints
@@ -482,9 +482,9 @@ impl BreakpointStore {
});
if let Some(found_bp) = found_bp {
- found_bp.hit_condition = Some(hit_condition.clone());
+ found_bp.hit_condition = Some(hit_condition);
} else {
- breakpoint.bp.hit_condition = Some(hit_condition.clone());
+ breakpoint.bp.hit_condition = Some(hit_condition);
// We did not remove any breakpoint, hence let's toggle one.
breakpoint_set
.breakpoints
@@ -514,9 +514,9 @@ impl BreakpointStore {
});
if let Some(found_bp) = found_bp {
- found_bp.condition = Some(condition.clone());
+ found_bp.condition = Some(condition);
} else {
- breakpoint.bp.condition = Some(condition.clone());
+ breakpoint.bp.condition = Some(condition);
// We did not remove any breakpoint, hence let's toggle one.
breakpoint_set
.breakpoints
@@ -591,7 +591,7 @@ impl BreakpointStore {
cx: &mut Context<Self>,
) {
if let Some(breakpoints) = self.breakpoints.remove(&old_path) {
- self.breakpoints.insert(new_path.clone(), breakpoints);
+ self.breakpoints.insert(new_path, breakpoints);
cx.notify();
}
@@ -831,7 +831,6 @@ impl BreakpointStore {
new_breakpoints.insert(path, breakpoints_for_file);
}
this.update(cx, |this, cx| {
- log::info!("Finish deserializing breakpoints & initializing breakpoint store");
for (path, count) in new_breakpoints.iter().map(|(path, bp_in_file)| {
(path.to_string_lossy(), bp_in_file.breakpoints.len())
}) {
@@ -905,7 +904,7 @@ impl BreakpointState {
}
#[inline]
- pub fn to_int(&self) -> i32 {
+ pub fn to_int(self) -> i32 {
match self {
BreakpointState::Enabled => 0,
BreakpointState::Disabled => 1,
@@ -1454,7 +1454,7 @@ impl DapCommand for EvaluateCommand {
variables_reference: message.variable_reference,
named_variables: message.named_variables,
indexed_variables: message.indexed_variables,
- memory_reference: message.memory_reference.clone(),
+ memory_reference: message.memory_reference,
value_location_reference: None, //TODO
})
}
@@ -5,11 +5,8 @@ use super::{
session::{self, Session, SessionStateEvent},
};
use crate::{
- InlayHint, InlayHintLabel, ProjectEnvironment, ResolveState,
- debugger::session::SessionQuirks,
- project_settings::ProjectSettings,
- terminals::{SshCommand, wrap_for_ssh},
- worktree_store::WorktreeStore,
+ InlayHint, InlayHintLabel, ProjectEnvironment, ResolveState, debugger::session::SessionQuirks,
+ project_settings::ProjectSettings, worktree_store::WorktreeStore,
};
use anyhow::{Context as _, Result, anyhow};
use async_trait::async_trait;
@@ -34,7 +31,7 @@ use http_client::HttpClient;
use language::{Buffer, LanguageToolchainStore, language_settings::InlayHintKind};
use node_runtime::NodeRuntime;
-use remote::{SshRemoteClient, ssh_session::SshArgs};
+use remote::RemoteClient;
use rpc::{
AnyProtoClient, TypedEnvelope,
proto::{self},
@@ -68,7 +65,7 @@ pub enum DapStoreEvent {
enum DapStoreMode {
Local(LocalDapStore),
- Ssh(SshDapStore),
+ Remote(RemoteDapStore),
Collab,
}
@@ -80,8 +77,8 @@ pub struct LocalDapStore {
toolchain_store: Arc<dyn LanguageToolchainStore>,
}
-pub struct SshDapStore {
- ssh_client: Entity<SshRemoteClient>,
+pub struct RemoteDapStore {
+ remote_client: Entity<RemoteClient>,
upstream_client: AnyProtoClient,
upstream_project_id: u64,
}
@@ -147,16 +144,16 @@ impl DapStore {
Self::new(mode, breakpoint_store, worktree_store, cx)
}
- pub fn new_ssh(
+ pub fn new_remote(
project_id: u64,
- ssh_client: Entity<SshRemoteClient>,
+ remote_client: Entity<RemoteClient>,
breakpoint_store: Entity<BreakpointStore>,
worktree_store: Entity<WorktreeStore>,
cx: &mut Context<Self>,
) -> Self {
- let mode = DapStoreMode::Ssh(SshDapStore {
- upstream_client: ssh_client.read(cx).proto_client(),
- ssh_client,
+ let mode = DapStoreMode::Remote(RemoteDapStore {
+ upstream_client: remote_client.read(cx).proto_client(),
+ remote_client,
upstream_project_id: project_id,
});
@@ -242,59 +239,57 @@ impl DapStore {
Ok(binary)
})
}
- DapStoreMode::Ssh(ssh) => {
- let request = ssh.upstream_client.request(proto::GetDebugAdapterBinary {
- session_id: session_id.to_proto(),
- project_id: ssh.upstream_project_id,
- worktree_id: worktree.read(cx).id().to_proto(),
- definition: Some(definition.to_proto()),
- });
- let ssh_client = ssh.ssh_client.clone();
+ DapStoreMode::Remote(remote) => {
+ let request = remote
+ .upstream_client
+ .request(proto::GetDebugAdapterBinary {
+ session_id: session_id.to_proto(),
+ project_id: remote.upstream_project_id,
+ worktree_id: worktree.read(cx).id().to_proto(),
+ definition: Some(definition.to_proto()),
+ });
+ let remote = remote.remote_client.clone();
cx.spawn(async move |_, cx| {
let response = request.await?;
let binary = DebugAdapterBinary::from_proto(response)?;
- let (mut ssh_command, envs, path_style) =
- ssh_client.read_with(cx, |ssh, _| {
- let (SshArgs { arguments, envs }, path_style) =
- ssh.ssh_info().context("SSH arguments not found")?;
- anyhow::Ok((
- SshCommand { arguments },
- envs.unwrap_or_default(),
- path_style,
- ))
- })??;
-
- let mut connection = None;
- if let Some(c) = binary.connection {
- let local_bind_addr = Ipv4Addr::LOCALHOST;
- let port =
- dap::transport::TcpTransport::unused_port(local_bind_addr).await?;
- ssh_command.add_port_forwarding(port, c.host.to_string(), c.port);
+ let port_forwarding;
+ let connection;
+ if let Some(c) = binary.connection {
+ let host = Ipv4Addr::LOCALHOST;
+ let port;
+ if remote.read_with(cx, |remote, _cx| remote.shares_network_interface())? {
+ port = c.port;
+ port_forwarding = None;
+ } else {
+ port = dap::transport::TcpTransport::unused_port(host).await?;
+ port_forwarding = Some((port, c.host.to_string(), c.port));
+ }
connection = Some(TcpArguments {
port,
- host: local_bind_addr,
+ host,
timeout: c.timeout,
})
+ } else {
+ port_forwarding = None;
+ connection = None;
}
- let (program, args) = wrap_for_ssh(
- &ssh_command,
- binary
- .command
- .as_ref()
- .map(|command| (command, &binary.arguments)),
- binary.cwd.as_deref(),
- binary.envs,
- None,
- path_style,
- );
+ let command = remote.read_with(cx, |remote, _cx| {
+ remote.build_command(
+ binary.command,
+ &binary.arguments,
+ &binary.envs,
+ binary.cwd.map(|path| path.display().to_string()),
+ port_forwarding,
+ )
+ })??;
Ok(DebugAdapterBinary {
- command: Some(program),
- arguments: args,
- envs,
+ command: Some(command.program),
+ arguments: command.args,
+ envs: command.env,
cwd: None,
connection,
request_args: binary.request_args,
@@ -360,9 +355,9 @@ impl DapStore {
)))
}
}
- DapStoreMode::Ssh(ssh) => {
- let request = ssh.upstream_client.request(proto::RunDebugLocators {
- project_id: ssh.upstream_project_id,
+ DapStoreMode::Remote(remote) => {
+ let request = remote.upstream_client.request(proto::RunDebugLocators {
+ project_id: remote.upstream_project_id,
build_command: Some(build_command.to_proto()),
locator: locator_name.to_owned(),
});
@@ -470,9 +465,8 @@ impl DapStore {
session_id: impl Borrow<SessionId>,
) -> Option<Entity<session::Session>> {
let session_id = session_id.borrow();
- let client = self.sessions.get(session_id).cloned();
- client
+ self.sessions.get(session_id).cloned()
}
pub fn sessions(&self) -> impl Iterator<Item = &Entity<Session>> {
self.sessions.values()
@@ -685,7 +679,7 @@ impl DapStore {
let shutdown_id = parent_session.update(cx, |parent_session, _| {
parent_session.remove_child_session_id(session_id);
- if parent_session.child_session_ids().len() == 0 {
+ if parent_session.child_session_ids().is_empty() {
Some(parent_session.session_id())
} else {
None
@@ -702,7 +696,7 @@ impl DapStore {
cx.emit(DapStoreEvent::DebugClientShutdown(session_id));
cx.background_spawn(async move {
- if shutdown_children.len() > 0 {
+ if !shutdown_children.is_empty() {
let _ = join_all(shutdown_children).await;
}
@@ -722,7 +716,7 @@ impl DapStore {
downstream_client: AnyProtoClient,
_: &mut Context<Self>,
) {
- self.downstream_client = Some((downstream_client.clone(), project_id));
+ self.downstream_client = Some((downstream_client, project_id));
}
pub fn unshared(&mut self, cx: &mut Context<Self>) {
@@ -117,7 +117,7 @@ impl DapLocator for CargoLocator {
.cwd
.clone()
.context("Couldn't get cwd from debug config which is needed for locators")?;
- let builder = ShellBuilder::new(true, &build_config.shell).non_interactive();
+ let builder = ShellBuilder::new(None, &build_config.shell).non_interactive();
let (program, args) = builder.build(
Some("cargo".into()),
&build_config
@@ -126,7 +126,7 @@ impl DapLocator for CargoLocator {
.cloned()
.take_while(|arg| arg != "--")
.chain(Some("--message-format=json".to_owned()))
- .collect(),
+ .collect::<Vec<_>>(),
);
let mut child = util::command::new_smol_command(program)
.args(args)
@@ -174,7 +174,7 @@ impl DapLocator for GoLocator {
request: "launch".to_string(),
mode: "test".to_string(),
program,
- args: args,
+ args,
build_flags,
cwd: build_config.cwd.clone(),
env: build_config.env.clone(),
@@ -185,7 +185,7 @@ impl DapLocator for GoLocator {
label: resolved_label.to_string().into(),
adapter: adapter.0.clone(),
build: None,
- config: config,
+ config,
tcp_connection: None,
})
}
@@ -220,7 +220,7 @@ impl DapLocator for GoLocator {
request: "launch".to_string(),
mode: "debug".to_string(),
program,
- args: args,
+ args,
build_flags,
})
.unwrap();
@@ -25,7 +25,7 @@ impl DapLocator for PythonLocator {
if adapter.0.as_ref() != "Debugpy" {
return None;
}
- let valid_program = build_config.command.starts_with("$ZED_")
+ let valid_program = build_config.command.starts_with("\"$ZED_")
|| Path::new(&build_config.command)
.file_name()
.is_some_and(|name| name.to_str().is_some_and(|path| path.starts_with("python")));
@@ -33,13 +33,7 @@ impl DapLocator for PythonLocator {
// We cannot debug selections.
return None;
}
- let command = if build_config.command
- == VariableName::Custom("PYTHON_ACTIVE_ZED_TOOLCHAIN".into()).template_value()
- {
- VariableName::Custom("PYTHON_ACTIVE_ZED_TOOLCHAIN_RAW".into()).template_value()
- } else {
- build_config.command.clone()
- };
+ let command = build_config.command.clone();
let module_specifier_position = build_config
.args
.iter()
@@ -57,10 +51,8 @@ impl DapLocator for PythonLocator {
let program_position = mod_name
.is_none()
.then(|| {
- build_config
- .args
- .iter()
- .position(|arg| *arg == "\"$ZED_FILE\"")
+ let zed_file = VariableName::File.template_value_with_whitespace();
+ build_config.args.iter().position(|arg| *arg == zed_file)
})
.flatten();
let args = if let Some(position) = program_position {
@@ -3,6 +3,7 @@
//! Each byte in memory can either be mapped or unmapped. We try to mimic that twofold:
//! - We assume that the memory is divided into pages of a fixed size.
//! - We assume that each page can be either mapped or unmapped.
+//!
//! These two assumptions drive the shape of the memory representation.
//! In particular, we want the unmapped pages to be represented without allocating any memory, as *most*
//! of the memory in a program space is usually unmapped.
@@ -165,8 +166,8 @@ impl Memory {
/// - If it succeeds/fails wholesale, cool; we have no unknown memory regions in this page.
/// - If it succeeds partially, we know # of mapped bytes.
/// We might also know the # of unmapped bytes.
-/// However, we're still unsure about what's *after* the unreadable region.
///
+/// However, we're still unsure about what's *after* the unreadable region.
/// This is where this builder comes in. It lets us track the state of figuring out contents of a single page.
pub(super) struct MemoryPageBuilder {
chunks: MappedPageContents,
@@ -226,7 +226,7 @@ impl RunningMode {
fn unset_breakpoints_from_paths(&self, paths: &Vec<Arc<Path>>, cx: &mut App) -> Task<()> {
let tasks: Vec<_> = paths
- .into_iter()
+ .iter()
.map(|path| {
self.request(dap_command::SetBreakpoints {
source: client_source(path),
@@ -508,13 +508,12 @@ impl RunningMode {
.ok();
}
- let ret = if configuration_done_supported {
+ if configuration_done_supported {
this.request(ConfigurationDone {})
} else {
Task::ready(Ok(()))
}
- .await;
- ret
+ .await
}
});
@@ -839,7 +838,7 @@ impl Session {
})
.detach();
- let this = Self {
+ Self {
mode: SessionState::Booting(None),
id: session_id,
child_session_ids: HashSet::default(),
@@ -868,9 +867,7 @@ impl Session {
task_context,
memory: memory::Memory::new(),
quirks,
- };
-
- this
+ }
})
}
@@ -1397,7 +1394,7 @@ impl Session {
let breakpoint_store = self.breakpoint_store.clone();
if let Some((local, path)) = self.as_running_mut().and_then(|local| {
let breakpoint = local.tmp_breakpoint.take()?;
- let path = breakpoint.path.clone();
+ let path = breakpoint.path;
Some((local, path))
}) {
local
@@ -1713,7 +1710,7 @@ impl Session {
this.threads = result
.into_iter()
- .map(|thread| (ThreadId(thread.id), Thread::from(thread.clone())))
+ .map(|thread| (ThreadId(thread.id), Thread::from(thread)))
.collect();
this.invalidate_command_type::<StackTraceCommand>();
@@ -2556,10 +2553,7 @@ impl Session {
mode: Option<String>,
cx: &mut Context<Self>,
) -> Task<Option<dap::DataBreakpointInfoResponse>> {
- let command = DataBreakpointInfoCommand {
- context: context.clone(),
- mode,
- };
+ let command = DataBreakpointInfoCommand { context, mode };
self.request(command, |_, response, _| response.ok(), cx)
}
@@ -44,7 +44,7 @@ use parking_lot::Mutex;
use postage::stream::Stream as _;
use rpc::{
AnyProtoClient, TypedEnvelope,
- proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset, split_repository_update},
+ proto::{self, FromProto, ToProto, git_reset, split_repository_update},
};
use serde::Deserialize;
use std::{
@@ -62,7 +62,7 @@ use std::{
};
use sum_tree::{Edit, SumTree, TreeSet};
use text::{Bias, BufferId};
-use util::{ResultExt, debug_panic, post_inc};
+use util::{ResultExt, debug_panic, paths::SanitizedPath, post_inc};
use worktree::{
File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
@@ -141,14 +141,10 @@ enum GitStoreState {
project_environment: Entity<ProjectEnvironment>,
fs: Arc<dyn Fs>,
},
- Ssh {
- upstream_client: AnyProtoClient,
- upstream_project_id: ProjectId,
- downstream: Option<(AnyProtoClient, ProjectId)>,
- },
Remote {
upstream_client: AnyProtoClient,
- upstream_project_id: ProjectId,
+ upstream_project_id: u64,
+ downstream: Option<(AnyProtoClient, ProjectId)>,
},
}
@@ -355,7 +351,7 @@ impl GitStore {
worktree_store: &Entity<WorktreeStore>,
buffer_store: Entity<BufferStore>,
upstream_client: AnyProtoClient,
- project_id: ProjectId,
+ project_id: u64,
cx: &mut Context<Self>,
) -> Self {
Self::new(
@@ -364,23 +360,6 @@ impl GitStore {
GitStoreState::Remote {
upstream_client,
upstream_project_id: project_id,
- },
- cx,
- )
- }
-
- pub fn ssh(
- worktree_store: &Entity<WorktreeStore>,
- buffer_store: Entity<BufferStore>,
- upstream_client: AnyProtoClient,
- cx: &mut Context<Self>,
- ) -> Self {
- Self::new(
- worktree_store.clone(),
- buffer_store,
- GitStoreState::Ssh {
- upstream_client,
- upstream_project_id: ProjectId(SSH_PROJECT_ID),
downstream: None,
},
cx,
@@ -451,7 +430,7 @@ impl GitStore {
pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
match &mut self.state {
- GitStoreState::Ssh {
+ GitStoreState::Remote {
downstream: downstream_client,
..
} => {
@@ -527,9 +506,6 @@ impl GitStore {
}),
});
}
- GitStoreState::Remote { .. } => {
- debug_panic!("shared called on remote store");
- }
}
}
@@ -541,15 +517,12 @@ impl GitStore {
} => {
downstream_client.take();
}
- GitStoreState::Ssh {
+ GitStoreState::Remote {
downstream: downstream_client,
..
} => {
downstream_client.take();
}
- GitStoreState::Remote { .. } => {
- debug_panic!("unshared called on remote store");
- }
}
self.shared_diffs.clear();
}
@@ -769,7 +742,7 @@ impl GitStore {
.as_ref()
.and_then(|weak| weak.upgrade())
{
- let conflict_set = conflict_set.clone();
+ let conflict_set = conflict_set;
let buffer_snapshot = buffer.read(cx).text_snapshot();
git_state.update(cx, |state, cx| {
@@ -912,7 +885,7 @@ impl GitStore {
return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
};
let content = match &version {
- Some(version) => buffer.rope_for_version(version).clone(),
+ Some(version) => buffer.rope_for_version(version),
None => buffer.as_rope().clone(),
};
let version = version.unwrap_or(buffer.version());
@@ -1047,21 +1020,17 @@ impl GitStore {
} => downstream_client
.as_ref()
.map(|state| (state.client.clone(), state.project_id)),
- GitStoreState::Ssh {
+ GitStoreState::Remote {
downstream: downstream_client,
..
} => downstream_client.clone(),
- GitStoreState::Remote { .. } => None,
}
}
fn upstream_client(&self) -> Option<AnyProtoClient> {
match &self.state {
GitStoreState::Local { .. } => None,
- GitStoreState::Ssh {
- upstream_client, ..
- }
- | GitStoreState::Remote {
+ GitStoreState::Remote {
upstream_client, ..
} => Some(upstream_client.clone()),
}
@@ -1432,12 +1401,7 @@ impl GitStore {
cx.background_executor()
.spawn(async move { fs.git_init(&path, fallback_branch_name) })
}
- GitStoreState::Ssh {
- upstream_client,
- upstream_project_id: project_id,
- ..
- }
- | GitStoreState::Remote {
+ GitStoreState::Remote {
upstream_client,
upstream_project_id: project_id,
..
@@ -1447,7 +1411,7 @@ impl GitStore {
cx.background_executor().spawn(async move {
client
.request(proto::GitInit {
- project_id: project_id.0,
+ project_id: project_id,
abs_path: path.to_string_lossy().to_string(),
fallback_branch_name,
})
@@ -1471,13 +1435,18 @@ impl GitStore {
cx.background_executor()
.spawn(async move { fs.git_clone(&repo, &path).await })
}
- GitStoreState::Ssh {
+ GitStoreState::Remote {
upstream_client,
upstream_project_id,
..
} => {
+ if upstream_client.is_via_collab() {
+ return Task::ready(Err(anyhow!(
+ "Git Clone isn't supported for project guests"
+ )));
+ }
let request = upstream_client.request(proto::GitClone {
- project_id: upstream_project_id.0,
+ project_id: *upstream_project_id,
abs_path: path.to_string_lossy().to_string(),
remote_repo: repo,
});
@@ -1491,9 +1460,6 @@ impl GitStore {
}
})
}
- GitStoreState::Remote { .. } => {
- Task::ready(Err(anyhow!("Git Clone isn't supported for remote users")))
- }
}
}
@@ -1506,10 +1472,7 @@ impl GitStore {
let mut update = envelope.payload;
let id = RepositoryId::from_proto(update.id);
- let client = this
- .upstream_client()
- .context("no upstream client")?
- .clone();
+ let client = this.upstream_client().context("no upstream client")?;
let mut is_new = false;
let repo = this.repositories.entry(id).or_insert_with(|| {
@@ -3271,6 +3234,7 @@ impl Repository {
let git_store = self.git_store.upgrade()?;
let worktree_store = git_store.read(cx).worktree_store.read(cx);
let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
+ let abs_path = SanitizedPath::new(&abs_path);
let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
Some(ProjectPath {
worktree_id: worktree.read(cx).id(),
@@ -3353,7 +3317,7 @@ impl Repository {
) -> Task<Result<Entity<Buffer>>> {
cx.spawn(async move |repository, cx| {
let buffer = buffer_store
- .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
+ .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
.await?;
if let Some(language_registry) = language_registry {
@@ -3418,7 +3382,6 @@ impl Repository {
reset_mode: ResetMode,
_cx: &mut App,
) -> oneshot::Receiver<Result<()>> {
- let commit = commit.to_string();
let id = self.id;
self.send_job(None, move |git_repo, _| async move {
@@ -3644,7 +3607,7 @@ impl Repository {
let to_stage = self
.cached_status()
.filter(|entry| !entry.status.staging().is_fully_staged())
- .map(|entry| entry.repo_path.clone())
+ .map(|entry| entry.repo_path)
.collect();
self.stage_entries(to_stage, cx)
}
@@ -3653,16 +3616,13 @@ impl Repository {
let to_unstage = self
.cached_status()
.filter(|entry| entry.status.staging().has_staged())
- .map(|entry| entry.repo_path.clone())
+ .map(|entry| entry.repo_path)
.collect();
self.unstage_entries(to_unstage, cx)
}
pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
- let to_stash = self
- .cached_status()
- .map(|entry| entry.repo_path.clone())
- .collect();
+ let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
self.stash_entries(to_stash, cx)
}
@@ -369,7 +369,7 @@ mod tests {
.unindent();
let buffer_id = BufferId::new(1).unwrap();
- let buffer = Buffer::new(0, buffer_id, test_content.to_string());
+ let buffer = Buffer::new(0, buffer_id, test_content);
let snapshot = buffer.snapshot();
let conflict_snapshot = ConflictSet::parse(&snapshot);
@@ -400,7 +400,7 @@ mod tests {
>>>>>>> "#
.unindent();
let buffer_id = BufferId::new(1).unwrap();
- let buffer = Buffer::new(0, buffer_id, test_content.to_string());
+ let buffer = Buffer::new(0, buffer_id, test_content);
let snapshot = buffer.snapshot();
let conflict_snapshot = ConflictSet::parse(&snapshot);
@@ -653,7 +653,7 @@ mod tests {
cx.run_until_parked();
conflict_set.update(cx, |conflict_set, _| {
- assert_eq!(conflict_set.has_conflict, false);
+ assert!(!conflict_set.has_conflict);
assert_eq!(conflict_set.snapshot.conflicts.len(), 0);
});
@@ -42,8 +42,8 @@ impl<'a> GitTraversal<'a> {
// other_repo/
// .git/
// our_query.txt
- let mut query = path.ancestors();
- while let Some(query) = query.next() {
+ let query = path.ancestors();
+ for query in query {
let (_, snapshot) = self
.repo_root_to_snapshot
.range(Path::new("")..=query)
@@ -199,7 +199,7 @@ pub struct GitEntryRef<'a> {
}
impl GitEntryRef<'_> {
- pub fn to_owned(&self) -> GitEntry {
+ pub fn to_owned(self) -> GitEntry {
GitEntry {
entry: self.entry.clone(),
git_summary: self.git_summary,
@@ -244,7 +244,7 @@ impl ProjectItem for ImageItem {
}
fn project_path(&self, cx: &App) -> Option<ProjectPath> {
- Some(self.project_path(cx).clone())
+ Some(self.project_path(cx))
}
fn is_dirty(&self) -> bool {
@@ -375,7 +375,6 @@ impl ImageStore {
let (mut tx, rx) = postage::watch::channel();
entry.insert(rx.clone());
- let project_path = project_path.clone();
let load_image = self
.state
.open_image(project_path.path.clone(), worktree, cx);
@@ -446,15 +445,12 @@ impl ImageStore {
event: &ImageItemEvent,
cx: &mut Context<Self>,
) {
- match event {
- ImageItemEvent::FileHandleChanged => {
- if let Some(local) = self.state.as_local() {
- local.update(cx, |local, cx| {
- local.image_changed_file(image, cx);
- })
- }
- }
- _ => {}
+ if let ImageItemEvent::FileHandleChanged = event
+ && let Some(local) = self.state.as_local()
+ {
+ local.update(cx, |local, cx| {
+ local.image_changed_file(image, cx);
+ })
}
}
}
@@ -531,13 +527,10 @@ impl ImageStoreImpl for Entity<LocalImageStore> {
impl LocalImageStore {
fn subscribe_to_worktree(&mut self, worktree: &Entity<Worktree>, cx: &mut Context<Self>) {
cx.subscribe(worktree, |this, worktree, event, cx| {
- if worktree.read(cx).is_local() {
- match event {
- worktree::Event::UpdatedEntries(changes) => {
- this.local_worktree_entries_changed(&worktree, changes, cx);
- }
- _ => {}
- }
+ if worktree.read(cx).is_local()
+ && let worktree::Event::UpdatedEntries(changes) = event
+ {
+ this.local_worktree_entries_changed(&worktree, changes, cx);
}
})
.detach();
@@ -50,8 +50,8 @@ pub fn lsp_formatting_options(settings: &LanguageSettings) -> lsp::FormattingOpt
}
}
-pub fn file_path_to_lsp_url(path: &Path) -> Result<lsp::Url> {
- match lsp::Url::from_file_path(path) {
+pub fn file_path_to_lsp_url(path: &Path) -> Result<lsp::Uri> {
+ match lsp::Uri::from_file_path(path) {
Ok(url) => Ok(url),
Err(()) => anyhow::bail!("Invalid file path provided to LSP request: {path:?}"),
}
@@ -2501,8 +2501,8 @@ pub(crate) fn parse_completion_text_edit(
};
Some(ParsedCompletionEdit {
- insert_range: insert_range,
- replace_range: replace_range,
+ insert_range,
+ replace_range,
new_text: new_text.clone(),
})
}
@@ -2595,11 +2595,9 @@ impl LspCommand for GetCodeActions {
server_id: LanguageServerId,
cx: AsyncApp,
) -> Result<Vec<CodeAction>> {
- let requested_kinds_set = if let Some(kinds) = self.kinds {
- Some(kinds.into_iter().collect::<HashSet<_>>())
- } else {
- None
- };
+ let requested_kinds_set = self
+ .kinds
+ .map(|kinds| kinds.into_iter().collect::<HashSet<_>>());
let language_server = cx.update(|cx| {
lsp_store
@@ -2739,7 +2737,7 @@ impl GetCodeActions {
Some(lsp::CodeActionProviderCapability::Options(CodeActionOptions {
code_action_kinds: Some(supported_action_kinds),
..
- })) => Some(supported_action_kinds.clone()),
+ })) => Some(supported_action_kinds),
_ => capabilities.code_action_kinds,
}
}
@@ -3137,7 +3135,7 @@ impl InlayHints {
Some(((uri, range), server_id)) => Some((
LanguageServerId(server_id as usize),
lsp::Location {
- uri: lsp::Url::parse(&uri)
+ uri: lsp::Uri::from_str(&uri)
.context("invalid uri in hint part {part:?}")?,
range: lsp::Range::new(
point_to_lsp(PointUtf16::new(
@@ -3446,8 +3444,7 @@ impl LspCommand for GetCodeLens {
capabilities
.server_capabilities
.code_lens_provider
- .as_ref()
- .is_some_and(|code_lens_options| code_lens_options.resolve_provider.unwrap_or(false))
+ .is_some()
}
fn to_lsp(
@@ -3736,7 +3733,7 @@ impl GetDocumentDiagnostics {
.filter_map(|diagnostics| {
Some(LspPullDiagnostics::Response {
server_id: LanguageServerId::from_proto(diagnostics.server_id),
- uri: lsp::Url::from_str(diagnostics.uri.as_str()).log_err()?,
+ uri: lsp::Uri::from_str(diagnostics.uri.as_str()).log_err()?,
diagnostics: if diagnostics.changed {
PulledDiagnostics::Unchanged {
result_id: diagnostics.result_id?,
@@ -3791,9 +3788,9 @@ impl GetDocumentDiagnostics {
start: point_to_lsp(PointUtf16::new(start.row, start.column)),
end: point_to_lsp(PointUtf16::new(end.row, end.column)),
},
- uri: lsp::Url::parse(&info.location_url.unwrap()).unwrap(),
+ uri: lsp::Uri::from_str(&info.location_url.unwrap()).unwrap(),
},
- message: info.message.clone(),
+ message: info.message,
}
})
.collect::<Vec<_>>();
@@ -3821,12 +3818,11 @@ impl GetDocumentDiagnostics {
_ => None,
},
code,
- code_description: match diagnostic.code_description {
- Some(code_description) => Some(CodeDescription {
- href: Some(lsp::Url::parse(&code_description).unwrap()),
+ code_description: diagnostic
+ .code_description
+ .map(|code_description| CodeDescription {
+ href: Some(lsp::Uri::from_str(&code_description).unwrap()),
}),
- None => None,
- },
related_information: Some(related_information),
tags: Some(tags),
source: diagnostic.source.clone(),
@@ -3965,7 +3961,7 @@ pub struct WorkspaceLspPullDiagnostics {
}
fn process_full_workspace_diagnostics_report(
- diagnostics: &mut HashMap<lsp::Url, WorkspaceLspPullDiagnostics>,
+ diagnostics: &mut HashMap<lsp::Uri, WorkspaceLspPullDiagnostics>,
server_id: LanguageServerId,
report: lsp::WorkspaceFullDocumentDiagnosticReport,
) {
@@ -3988,7 +3984,7 @@ fn process_full_workspace_diagnostics_report(
}
fn process_unchanged_workspace_diagnostics_report(
- diagnostics: &mut HashMap<lsp::Url, WorkspaceLspPullDiagnostics>,
+ diagnostics: &mut HashMap<lsp::Uri, WorkspaceLspPullDiagnostics>,
server_id: LanguageServerId,
report: lsp::WorkspaceUnchangedDocumentDiagnosticReport,
) {
@@ -4347,9 +4343,9 @@ impl LspCommand for GetDocumentColor {
}
fn process_related_documents(
- diagnostics: &mut HashMap<lsp::Url, LspPullDiagnostics>,
+ diagnostics: &mut HashMap<lsp::Uri, LspPullDiagnostics>,
server_id: LanguageServerId,
- documents: impl IntoIterator<Item = (lsp::Url, lsp::DocumentDiagnosticReportKind)>,
+ documents: impl IntoIterator<Item = (lsp::Uri, lsp::DocumentDiagnosticReportKind)>,
) {
for (url, report_kind) in documents {
match report_kind {
@@ -4364,9 +4360,9 @@ fn process_related_documents(
}
fn process_unchanged_diagnostics_report(
- diagnostics: &mut HashMap<lsp::Url, LspPullDiagnostics>,
+ diagnostics: &mut HashMap<lsp::Uri, LspPullDiagnostics>,
server_id: LanguageServerId,
- uri: lsp::Url,
+ uri: lsp::Uri,
report: lsp::UnchangedDocumentDiagnosticReport,
) {
let result_id = report.result_id;
@@ -4408,9 +4404,9 @@ fn process_unchanged_diagnostics_report(
}
fn process_full_diagnostics_report(
- diagnostics: &mut HashMap<lsp::Url, LspPullDiagnostics>,
+ diagnostics: &mut HashMap<lsp::Uri, LspPullDiagnostics>,
server_id: LanguageServerId,
- uri: lsp::Url,
+ uri: lsp::Uri,
report: lsp::FullDocumentDiagnosticReport,
) {
let result_id = report.result_id;
@@ -4491,9 +4487,8 @@ mod tests {
data: Some(json!({"detail": "test detail"})),
};
- let proto_diagnostic =
- GetDocumentDiagnostics::serialize_lsp_diagnostic(lsp_diagnostic.clone())
- .expect("Failed to serialize diagnostic");
+ let proto_diagnostic = GetDocumentDiagnostics::serialize_lsp_diagnostic(lsp_diagnostic)
+ .expect("Failed to serialize diagnostic");
let start = proto_diagnostic.start.unwrap();
let end = proto_diagnostic.end.unwrap();
@@ -4545,7 +4540,7 @@ mod tests {
fn test_related_information() {
let related_info = lsp::DiagnosticRelatedInformation {
location: lsp::Location {
- uri: lsp::Url::parse("file:///test.rs").unwrap(),
+ uri: lsp::Uri::from_str("file:///test.rs").unwrap(),
range: lsp::Range {
start: lsp::Position::new(1, 1),
end: lsp::Position::new(1, 5),
@@ -5,24 +5,28 @@
//! This module is split up into three distinct parts:
//! - [`LocalLspStore`], which is ran on the host machine (either project host or SSH host), that manages the lifecycle of language servers.
//! - [`RemoteLspStore`], which is ran on the remote machine (project guests) which is mostly about passing through the requests via RPC.
-//! The remote stores don't really care about which language server they're running against - they don't usually get to decide which language server is going to responsible for handling their request.
+//! The remote stores don't really care about which language server they're running against - they don't usually get to decide which language server is going to responsible for handling their request.
//! - [`LspStore`], which unifies the two under one consistent interface for interacting with language servers.
//!
//! Most of the interesting work happens at the local layer, as bulk of the complexity is with managing the lifecycle of language servers. The actual implementation of the LSP protocol is handled by [`lsp`] crate.
pub mod clangd_ext;
pub mod json_language_server_ext;
+pub mod log_store;
pub mod lsp_ext_command;
pub mod rust_analyzer_ext;
use crate::{
- CodeAction, ColorPresentation, Completion, CompletionResponse, CompletionSource,
- CoreCompletion, DocumentColor, Hover, InlayHint, LocationLink, LspAction, LspPullDiagnostics,
- ManifestProvidersStore, ProjectItem, ProjectPath, ProjectTransaction, PulledDiagnostics,
- ResolveState, Symbol,
+ CodeAction, ColorPresentation, Completion, CompletionDisplayOptions, CompletionResponse,
+ CompletionSource, CoreCompletion, DocumentColor, Hover, InlayHint, LocationLink, LspAction,
+ LspPullDiagnostics, ManifestProvidersStore, Project, ProjectItem, ProjectPath,
+ ProjectTransaction, PulledDiagnostics, ResolveState, Symbol,
buffer_store::{BufferStore, BufferStoreEvent},
environment::ProjectEnvironment,
lsp_command::{self, *},
- lsp_store,
+ lsp_store::{
+ self,
+ log_store::{GlobalLogStore, LanguageServerKind},
+ },
manifest_tree::{
LanguageServerTree, LanguageServerTreeNode, LaunchDisposition, ManifestQueryDelegate,
ManifestTree,
@@ -72,19 +76,19 @@ use lsp::{
AdapterServerCapabilities, CodeActionKind, CompletionContext, DiagnosticSeverity,
DiagnosticTag, DidChangeWatchedFilesRegistrationOptions, Edit, FileOperationFilter,
FileOperationPatternKind, FileOperationRegistrationOptions, FileRename, FileSystemWatcher,
- LanguageServer, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerId,
- LanguageServerName, LanguageServerSelector, LspRequestFuture, MessageActionItem, MessageType,
- OneOf, RenameFilesParams, SymbolKind, TextDocumentSyncSaveOptions, TextEdit, WillRenameFiles,
- WorkDoneProgressCancelParams, WorkspaceFolder, notification::DidRenameFiles,
+ LSP_REQUEST_TIMEOUT, LanguageServer, LanguageServerBinary, LanguageServerBinaryOptions,
+ LanguageServerId, LanguageServerName, LanguageServerSelector, LspRequestFuture,
+ MessageActionItem, MessageType, OneOf, RenameFilesParams, SymbolKind,
+ TextDocumentSyncSaveOptions, TextEdit, Uri, WillRenameFiles, WorkDoneProgressCancelParams,
+ WorkspaceFolder, notification::DidRenameFiles,
};
use node_runtime::read_package_installed_version;
use parking_lot::Mutex;
use postage::{mpsc, sink::Sink, stream::Stream, watch};
use rand::prelude::*;
-
use rpc::{
AnyProtoClient,
- proto::{FromProto, ToProto},
+ proto::{FromProto, LspRequestId, LspRequestMessage as _, ToProto},
};
use serde::Serialize;
use settings::{Settings, SettingsLocation, SettingsStore};
@@ -92,7 +96,7 @@ use sha2::{Digest, Sha256};
use smol::channel::Sender;
use snippet::Snippet;
use std::{
- any::Any,
+ any::{Any, TypeId},
borrow::Cow,
cell::RefCell,
cmp::{Ordering, Reverse},
@@ -109,7 +113,7 @@ use std::{
};
use sum_tree::Dimensions;
use text::{Anchor, BufferId, LineEnding, OffsetRangeExt};
-use url::Url;
+
use util::{
ConnectionResult, ResultExt as _, debug_panic, defer, maybe, merge_json_value_into,
paths::{PathExt, SanitizedPath},
@@ -296,7 +300,7 @@ impl LocalLspStore {
let stderr_capture = Arc::new(Mutex::new(Some(String::new())));
let server_id = self.languages.next_language_server_id();
- log::info!(
+ log::trace!(
"attempting to start language server {:?}, path: {root_path:?}, id: {server_id}",
adapter.name.0
);
@@ -309,7 +313,7 @@ impl LocalLspStore {
true,
cx,
);
- let pending_workspace_folders: Arc<Mutex<BTreeSet<Url>>> = Default::default();
+ let pending_workspace_folders: Arc<Mutex<BTreeSet<Uri>>> = Default::default();
let pending_server = cx.spawn({
let adapter = adapter.clone();
@@ -550,7 +554,7 @@ impl LocalLspStore {
if let Some(settings) = settings.binary.as_ref() {
if let Some(arguments) = &settings.arguments {
- binary.arguments = arguments.into_iter().map(Into::into).collect();
+ binary.arguments = arguments.iter().map(Into::into).collect();
}
if let Some(env) = &settings.env {
shell_env.extend(env.iter().map(|(k, v)| (k.clone(), v.clone())));
@@ -917,7 +921,7 @@ impl LocalLspStore {
message: params.message,
actions: vec![],
response_channel: tx,
- lsp_name: name.clone(),
+ lsp_name: name,
};
let _ = this.update(&mut cx, |_, cx| {
@@ -976,7 +980,9 @@ impl LocalLspStore {
this.update(&mut cx, |_, cx| {
cx.emit(LspStoreEvent::LanguageServerLog(
server_id,
- LanguageServerLogType::Trace(params.verbose),
+ LanguageServerLogType::Trace {
+ verbose_info: params.verbose,
+ },
params.message,
));
})
@@ -1060,8 +1066,8 @@ impl LocalLspStore {
};
let delegate: Arc<dyn ManifestDelegate> =
Arc::new(ManifestQueryDelegate::new(worktree.read(cx).snapshot()));
- let root = self
- .lsp_tree
+
+ self.lsp_tree
.get(
project_path,
language.name(),
@@ -1069,9 +1075,7 @@ impl LocalLspStore {
&delegate,
cx,
)
- .collect::<Vec<_>>();
-
- root
+ .collect::<Vec<_>>()
}
fn language_server_ids_for_buffer(
@@ -2397,9 +2401,10 @@ impl LocalLspStore {
let server_id = server_node.server_id_or_init(|disposition| {
let path = &disposition.path;
- let server_id = {
+
+ {
let uri =
- Url::from_file_path(worktree.read(cx).abs_path().join(&path.path));
+ Uri::from_file_path(worktree.read(cx).abs_path().join(&path.path));
let server_id = self.get_or_insert_language_server(
&worktree,
@@ -2415,9 +2420,7 @@ impl LocalLspStore {
state.add_workspace_folder(uri);
};
server_id
- };
-
- server_id
+ }
})?;
let server_state = self.language_servers.get(&server_id)?;
if let LanguageServerState::Running {
@@ -2561,11 +2564,8 @@ impl LocalLspStore {
None => return,
};
- let Ok(file_url) = lsp::Url::from_file_path(old_path.as_path()) else {
- debug_panic!(
- "`{}` is not parseable as an URI",
- old_path.to_string_lossy()
- );
+ let Ok(file_url) = lsp::Uri::from_file_path(old_path.as_path()) else {
+ debug_panic!("{old_path:?} is not parseable as an URI");
return;
};
self.unregister_buffer_from_language_servers(buffer, &file_url, cx);
@@ -2574,7 +2574,7 @@ impl LocalLspStore {
pub(crate) fn unregister_buffer_from_language_servers(
&mut self,
buffer: &Entity<Buffer>,
- file_url: &lsp::Url,
+ file_url: &lsp::Uri,
cx: &mut App,
) {
buffer.update(cx, |buffer, cx| {
@@ -2957,7 +2957,7 @@ impl LocalLspStore {
.update(cx, |this, cx| {
let path = buffer_to_edit.read(cx).project_path(cx);
let active_entry = this.active_entry;
- let is_active_entry = path.clone().is_some_and(|project_path| {
+ let is_active_entry = path.is_some_and(|project_path| {
this.worktree_store
.read(cx)
.entry_for_path(&project_path, cx)
@@ -3047,16 +3047,14 @@ impl LocalLspStore {
buffer.edit([(range, text)], None, cx);
}
- let transaction = buffer.end_transaction(cx).and_then(|transaction_id| {
+ buffer.end_transaction(cx).and_then(|transaction_id| {
if push_to_history {
buffer.finalize_last_transaction();
buffer.get_transaction(transaction_id).cloned()
} else {
buffer.forget_transaction(transaction_id)
}
- });
-
- transaction
+ })
})?;
if let Some(transaction) = transaction {
project_transaction.0.insert(buffer_to_edit, transaction);
@@ -3184,7 +3182,7 @@ impl LocalLspStore {
} else {
let (path, pattern) = match &watcher.glob_pattern {
lsp::GlobPattern::String(s) => {
- let watcher_path = SanitizedPath::from(s);
+ let watcher_path = SanitizedPath::new(s);
let path = glob_literal_prefix(watcher_path.as_path());
let pattern = watcher_path
.as_path()
@@ -3276,7 +3274,7 @@ impl LocalLspStore {
let worktree_root_path = tree.abs_path();
match &watcher.glob_pattern {
lsp::GlobPattern::String(s) => {
- let watcher_path = SanitizedPath::from(s);
+ let watcher_path = SanitizedPath::new(s);
let relative = watcher_path
.as_path()
.strip_prefix(&worktree_root_path)
@@ -3486,15 +3484,16 @@ pub struct LspStore {
buffer_store: Entity<BufferStore>,
worktree_store: Entity<WorktreeStore>,
pub languages: Arc<LanguageRegistry>,
- language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
+ pub language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
active_entry: Option<ProjectEntryId>,
_maintain_workspace_config: (Task<Result<()>>, watch::Sender<()>),
_maintain_buffer_languages: Task<()>,
diagnostic_summaries:
HashMap<WorktreeId, HashMap<Arc<Path>, HashMap<LanguageServerId, DiagnosticSummary>>>,
- pub(super) lsp_server_capabilities: HashMap<LanguageServerId, lsp::ServerCapabilities>,
+ pub lsp_server_capabilities: HashMap<LanguageServerId, lsp::ServerCapabilities>,
lsp_document_colors: HashMap<BufferId, DocumentColorData>,
lsp_code_lens: HashMap<BufferId, CodeLensData>,
+ running_lsp_requests: HashMap<TypeId, (Global, HashMap<LspRequestId, Task<()>>)>,
}
#[derive(Debug, Default, Clone)]
@@ -3504,7 +3503,7 @@ pub struct DocumentColors {
}
type DocumentColorTask = Shared<Task<std::result::Result<DocumentColors, Arc<anyhow::Error>>>>;
-type CodeLensTask = Shared<Task<std::result::Result<Vec<CodeAction>, Arc<anyhow::Error>>>>;
+type CodeLensTask = Shared<Task<std::result::Result<Option<Vec<CodeAction>>, Arc<anyhow::Error>>>>;
#[derive(Debug, Default)]
struct DocumentColorData {
@@ -3568,6 +3567,7 @@ pub struct LanguageServerStatus {
pub pending_work: BTreeMap<String, LanguageServerProgress>,
pub has_pending_diagnostic_updates: bool,
progress_tokens: HashSet<String>,
+ pub worktree: Option<WorktreeId>,
}
#[derive(Clone, Debug)]
@@ -3584,6 +3584,8 @@ struct CoreSymbol {
impl LspStore {
pub fn init(client: &AnyProtoClient) {
+ client.add_entity_request_handler(Self::handle_lsp_query);
+ client.add_entity_message_handler(Self::handle_lsp_query_response);
client.add_entity_request_handler(Self::handle_multi_lsp_query);
client.add_entity_request_handler(Self::handle_restart_language_servers);
client.add_entity_request_handler(Self::handle_stop_language_servers);
@@ -3758,11 +3760,12 @@ impl LspStore {
worktree_store,
languages: languages.clone(),
language_server_statuses: Default::default(),
- nonce: StdRng::from_entropy().r#gen(),
+ nonce: StdRng::from_os_rng().random(),
diagnostic_summaries: HashMap::default(),
lsp_server_capabilities: HashMap::default(),
lsp_document_colors: HashMap::default(),
lsp_code_lens: HashMap::default(),
+ running_lsp_requests: HashMap::default(),
active_entry: None,
_maintain_workspace_config,
_maintain_buffer_languages: Self::maintain_buffer_languages(languages, cx),
@@ -3819,11 +3822,12 @@ impl LspStore {
worktree_store,
languages: languages.clone(),
language_server_statuses: Default::default(),
- nonce: StdRng::from_entropy().r#gen(),
+ nonce: StdRng::from_os_rng().random(),
diagnostic_summaries: HashMap::default(),
lsp_server_capabilities: HashMap::default(),
lsp_document_colors: HashMap::default(),
lsp_code_lens: HashMap::default(),
+ running_lsp_requests: HashMap::default(),
active_entry: None,
_maintain_workspace_config,
@@ -3928,10 +3932,8 @@ impl LspStore {
event: &ToolchainStoreEvent,
_: &mut Context<Self>,
) {
- match event {
- ToolchainStoreEvent::ToolchainActivated { .. } => {
- self.request_workspace_config_refresh()
- }
+ if let ToolchainStoreEvent::ToolchainActivated = event {
+ self.request_workspace_config_refresh()
}
}
@@ -4370,13 +4372,11 @@ impl LspStore {
if let Some((client, downstream_project_id)) = self.downstream_client.clone()
&& let Some(diangostic_summaries) = self.diagnostic_summaries.get(&worktree.id())
{
- let mut summaries = diangostic_summaries
- .into_iter()
- .flat_map(|(path, summaries)| {
- summaries
- .into_iter()
- .map(|(server_id, summary)| summary.to_proto(*server_id, path))
- });
+ let mut summaries = diangostic_summaries.iter().flat_map(|(path, summaries)| {
+ summaries
+ .iter()
+ .map(|(server_id, summary)| summary.to_proto(*server_id, path))
+ });
if let Some(summary) = summaries.next() {
client
.send(proto::UpdateDiagnosticSummary {
@@ -4390,8 +4390,6 @@ impl LspStore {
}
}
- // TODO: remove MultiLspQuery: instead, the proto handler should pick appropriate server(s)
- // Then, use `send_lsp_proto_request` or analogue for most of the LSP proto requests and inline this check inside
fn is_capable_for_proto_request<R>(
&self,
buffer: &Entity<Buffer>,
@@ -4564,7 +4562,7 @@ impl LspStore {
anyhow::anyhow!(message)
})?;
- let response = request
+ request
.response_from_lsp(
response,
this.upgrade().context("no app context")?,
@@ -4572,8 +4570,7 @@ impl LspStore {
language_server.server_id(),
cx.clone(),
)
- .await;
- response
+ .await
})
}
@@ -4649,7 +4646,6 @@ impl LspStore {
Some((file, language, raw_buffer.remote_id()))
})
.sorted_by_key(|(file, _, _)| Reverse(file.worktree.read(cx).is_visible()));
-
for (file, language, buffer_id) in buffers {
let worktree_id = file.worktree_id(cx);
let Some(worktree) = local
@@ -4691,11 +4687,10 @@ impl LspStore {
cx,
)
.collect::<Vec<_>>();
-
for node in nodes {
let server_id = node.server_id_or_init(|disposition| {
let path = &disposition.path;
- let uri = Url::from_file_path(worktree_root.join(&path.path));
+ let uri = Uri::from_file_path(worktree_root.join(&path.path));
let key = LanguageServerSeed {
worktree_id,
name: disposition.server_name.clone(),
@@ -4853,7 +4848,7 @@ impl LspStore {
push_to_history: bool,
cx: &mut Context<Self>,
) -> Task<anyhow::Result<ProjectTransaction>> {
- if let Some(_) = self.as_local() {
+ if self.as_local().is_some() {
cx.spawn(async move |lsp_store, cx| {
let buffers = buffers.into_iter().collect::<Vec<_>>();
let result = LocalLspStore::execute_code_action_kind_locally(
@@ -5243,154 +5238,130 @@ impl LspStore {
pub fn definitions(
&mut self,
- buffer_handle: &Entity<Buffer>,
+ buffer: &Entity<Buffer>,
position: PointUtf16,
cx: &mut Context<Self>,
- ) -> Task<Result<Vec<LocationLink>>> {
+ ) -> Task<Result<Option<Vec<LocationLink>>>> {
if let Some((upstream_client, project_id)) = self.upstream_client() {
let request = GetDefinitions { position };
- if !self.is_capable_for_proto_request(buffer_handle, &request, cx) {
- return Task::ready(Ok(Vec::new()));
+ if !self.is_capable_for_proto_request(buffer, &request, cx) {
+ return Task::ready(Ok(None));
}
- let request_task = upstream_client.request(proto::MultiLspQuery {
- buffer_id: buffer_handle.read(cx).remote_id().into(),
- version: serialize_version(&buffer_handle.read(cx).version()),
+ let request_task = upstream_client.request_lsp(
project_id,
- strategy: Some(proto::multi_lsp_query::Strategy::All(
- proto::AllLanguageServers {},
- )),
- request: Some(proto::multi_lsp_query::Request::GetDefinition(
- request.to_proto(project_id, buffer_handle.read(cx)),
- )),
- });
- let buffer = buffer_handle.clone();
+ LSP_REQUEST_TIMEOUT,
+ cx.background_executor().clone(),
+ request.to_proto(project_id, buffer.read(cx)),
+ );
+ let buffer = buffer.clone();
cx.spawn(async move |weak_project, cx| {
let Some(project) = weak_project.upgrade() else {
- return Ok(Vec::new());
+ return Ok(None);
};
- let responses = request_task.await?.responses;
- let actions = join_all(
- responses
- .into_iter()
- .filter_map(|lsp_response| match lsp_response.response? {
- proto::lsp_response::Response::GetDefinitionResponse(response) => {
- Some(response)
- }
- unexpected => {
- debug_panic!("Unexpected response: {unexpected:?}");
- None
- }
- })
- .map(|definitions_response| {
- GetDefinitions { position }.response_from_proto(
- definitions_response,
- project.clone(),
- buffer.clone(),
- cx.clone(),
- )
- }),
- )
+ let Some(responses) = request_task.await? else {
+ return Ok(None);
+ };
+ let actions = join_all(responses.payload.into_iter().map(|response| {
+ GetDefinitions { position }.response_from_proto(
+ response.response,
+ project.clone(),
+ buffer.clone(),
+ cx.clone(),
+ )
+ }))
.await;
- Ok(actions
- .into_iter()
- .collect::<Result<Vec<Vec<_>>>>()?
- .into_iter()
- .flatten()
- .dedup()
- .collect())
+ Ok(Some(
+ actions
+ .into_iter()
+ .collect::<Result<Vec<Vec<_>>>>()?
+ .into_iter()
+ .flatten()
+ .dedup()
+ .collect(),
+ ))
})
} else {
let definitions_task = self.request_multiple_lsp_locally(
- buffer_handle,
+ buffer,
Some(position),
GetDefinitions { position },
cx,
);
cx.background_spawn(async move {
- Ok(definitions_task
- .await
- .into_iter()
- .flat_map(|(_, definitions)| definitions)
- .dedup()
- .collect())
+ Ok(Some(
+ definitions_task
+ .await
+ .into_iter()
+ .flat_map(|(_, definitions)| definitions)
+ .dedup()
+ .collect(),
+ ))
})
}
}
pub fn declarations(
&mut self,
- buffer_handle: &Entity<Buffer>,
+ buffer: &Entity<Buffer>,
position: PointUtf16,
cx: &mut Context<Self>,
- ) -> Task<Result<Vec<LocationLink>>> {
+ ) -> Task<Result<Option<Vec<LocationLink>>>> {
if let Some((upstream_client, project_id)) = self.upstream_client() {
let request = GetDeclarations { position };
- if !self.is_capable_for_proto_request(buffer_handle, &request, cx) {
- return Task::ready(Ok(Vec::new()));
+ if !self.is_capable_for_proto_request(buffer, &request, cx) {
+ return Task::ready(Ok(None));
}
- let request_task = upstream_client.request(proto::MultiLspQuery {
- buffer_id: buffer_handle.read(cx).remote_id().into(),
- version: serialize_version(&buffer_handle.read(cx).version()),
+ let request_task = upstream_client.request_lsp(
project_id,
- strategy: Some(proto::multi_lsp_query::Strategy::All(
- proto::AllLanguageServers {},
- )),
- request: Some(proto::multi_lsp_query::Request::GetDeclaration(
- request.to_proto(project_id, buffer_handle.read(cx)),
- )),
- });
- let buffer = buffer_handle.clone();
+ LSP_REQUEST_TIMEOUT,
+ cx.background_executor().clone(),
+ request.to_proto(project_id, buffer.read(cx)),
+ );
+ let buffer = buffer.clone();
cx.spawn(async move |weak_project, cx| {
let Some(project) = weak_project.upgrade() else {
- return Ok(Vec::new());
+ return Ok(None);
};
- let responses = request_task.await?.responses;
- let actions = join_all(
- responses
- .into_iter()
- .filter_map(|lsp_response| match lsp_response.response? {
- proto::lsp_response::Response::GetDeclarationResponse(response) => {
- Some(response)
- }
- unexpected => {
- debug_panic!("Unexpected response: {unexpected:?}");
- None
- }
- })
- .map(|declarations_response| {
- GetDeclarations { position }.response_from_proto(
- declarations_response,
- project.clone(),
- buffer.clone(),
- cx.clone(),
- )
- }),
- )
+ let Some(responses) = request_task.await? else {
+ return Ok(None);
+ };
+ let actions = join_all(responses.payload.into_iter().map(|response| {
+ GetDeclarations { position }.response_from_proto(
+ response.response,
+ project.clone(),
+ buffer.clone(),
+ cx.clone(),
+ )
+ }))
.await;
- Ok(actions
- .into_iter()
- .collect::<Result<Vec<Vec<_>>>>()?
- .into_iter()
- .flatten()
- .dedup()
- .collect())
+ Ok(Some(
+ actions
+ .into_iter()
+ .collect::<Result<Vec<Vec<_>>>>()?
+ .into_iter()
+ .flatten()
+ .dedup()
+ .collect(),
+ ))
})
} else {
let declarations_task = self.request_multiple_lsp_locally(
- buffer_handle,
+ buffer,
Some(position),
GetDeclarations { position },
cx,
);
cx.background_spawn(async move {
- Ok(declarations_task
- .await
- .into_iter()
- .flat_map(|(_, declarations)| declarations)
- .dedup()
- .collect())
+ Ok(Some(
+ declarations_task
+ .await
+ .into_iter()
+ .flat_map(|(_, declarations)| declarations)
+ .dedup()
+ .collect(),
+ ))
})
}
}
@@ -5400,59 +5371,45 @@ impl LspStore {
buffer: &Entity<Buffer>,
position: PointUtf16,
cx: &mut Context<Self>,
- ) -> Task<Result<Vec<LocationLink>>> {
+ ) -> Task<Result<Option<Vec<LocationLink>>>> {
if let Some((upstream_client, project_id)) = self.upstream_client() {
let request = GetTypeDefinitions { position };
if !self.is_capable_for_proto_request(buffer, &request, cx) {
- return Task::ready(Ok(Vec::new()));
+ return Task::ready(Ok(None));
}
- let request_task = upstream_client.request(proto::MultiLspQuery {
- buffer_id: buffer.read(cx).remote_id().into(),
- version: serialize_version(&buffer.read(cx).version()),
+ let request_task = upstream_client.request_lsp(
project_id,
- strategy: Some(proto::multi_lsp_query::Strategy::All(
- proto::AllLanguageServers {},
- )),
- request: Some(proto::multi_lsp_query::Request::GetTypeDefinition(
- request.to_proto(project_id, buffer.read(cx)),
- )),
- });
+ LSP_REQUEST_TIMEOUT,
+ cx.background_executor().clone(),
+ request.to_proto(project_id, buffer.read(cx)),
+ );
let buffer = buffer.clone();
cx.spawn(async move |weak_project, cx| {
let Some(project) = weak_project.upgrade() else {
- return Ok(Vec::new());
+ return Ok(None);
};
- let responses = request_task.await?.responses;
- let actions = join_all(
- responses
- .into_iter()
- .filter_map(|lsp_response| match lsp_response.response? {
- proto::lsp_response::Response::GetTypeDefinitionResponse(response) => {
- Some(response)
- }
- unexpected => {
- debug_panic!("Unexpected response: {unexpected:?}");
- None
- }
- })
- .map(|type_definitions_response| {
- GetTypeDefinitions { position }.response_from_proto(
- type_definitions_response,
- project.clone(),
- buffer.clone(),
- cx.clone(),
- )
- }),
- )
+ let Some(responses) = request_task.await? else {
+ return Ok(None);
+ };
+ let actions = join_all(responses.payload.into_iter().map(|response| {
+ GetTypeDefinitions { position }.response_from_proto(
+ response.response,
+ project.clone(),
+ buffer.clone(),
+ cx.clone(),
+ )
+ }))
.await;
- Ok(actions
- .into_iter()
- .collect::<Result<Vec<Vec<_>>>>()?
- .into_iter()
- .flatten()
- .dedup()
- .collect())
+ Ok(Some(
+ actions
+ .into_iter()
+ .collect::<Result<Vec<Vec<_>>>>()?
+ .into_iter()
+ .flatten()
+ .dedup()
+ .collect(),
+ ))
})
} else {
let type_definitions_task = self.request_multiple_lsp_locally(
@@ -5462,12 +5419,14 @@ impl LspStore {
cx,
);
cx.background_spawn(async move {
- Ok(type_definitions_task
- .await
- .into_iter()
- .flat_map(|(_, type_definitions)| type_definitions)
- .dedup()
- .collect())
+ Ok(Some(
+ type_definitions_task
+ .await
+ .into_iter()
+ .flat_map(|(_, type_definitions)| type_definitions)
+ .dedup()
+ .collect(),
+ ))
})
}
}
@@ -5477,59 +5436,45 @@ impl LspStore {
buffer: &Entity<Buffer>,
position: PointUtf16,
cx: &mut Context<Self>,
- ) -> Task<Result<Vec<LocationLink>>> {
+ ) -> Task<Result<Option<Vec<LocationLink>>>> {
if let Some((upstream_client, project_id)) = self.upstream_client() {
let request = GetImplementations { position };
if !self.is_capable_for_proto_request(buffer, &request, cx) {
- return Task::ready(Ok(Vec::new()));
+ return Task::ready(Ok(None));
}
- let request_task = upstream_client.request(proto::MultiLspQuery {
- buffer_id: buffer.read(cx).remote_id().into(),
- version: serialize_version(&buffer.read(cx).version()),
+ let request_task = upstream_client.request_lsp(
project_id,
- strategy: Some(proto::multi_lsp_query::Strategy::All(
- proto::AllLanguageServers {},
- )),
- request: Some(proto::multi_lsp_query::Request::GetImplementation(
- request.to_proto(project_id, buffer.read(cx)),
- )),
- });
+ LSP_REQUEST_TIMEOUT,
+ cx.background_executor().clone(),
+ request.to_proto(project_id, buffer.read(cx)),
+ );
let buffer = buffer.clone();
cx.spawn(async move |weak_project, cx| {
let Some(project) = weak_project.upgrade() else {
- return Ok(Vec::new());
+ return Ok(None);
};
- let responses = request_task.await?.responses;
- let actions = join_all(
- responses
- .into_iter()
- .filter_map(|lsp_response| match lsp_response.response? {
- proto::lsp_response::Response::GetImplementationResponse(response) => {
- Some(response)
- }
- unexpected => {
- debug_panic!("Unexpected response: {unexpected:?}");
- None
- }
- })
- .map(|implementations_response| {
- GetImplementations { position }.response_from_proto(
- implementations_response,
- project.clone(),
- buffer.clone(),
- cx.clone(),
- )
- }),
- )
+ let Some(responses) = request_task.await? else {
+ return Ok(None);
+ };
+ let actions = join_all(responses.payload.into_iter().map(|response| {
+ GetImplementations { position }.response_from_proto(
+ response.response,
+ project.clone(),
+ buffer.clone(),
+ cx.clone(),
+ )
+ }))
.await;
- Ok(actions
- .into_iter()
- .collect::<Result<Vec<Vec<_>>>>()?
- .into_iter()
- .flatten()
- .dedup()
- .collect())
+ Ok(Some(
+ actions
+ .into_iter()
+ .collect::<Result<Vec<Vec<_>>>>()?
+ .into_iter()
+ .flatten()
+ .dedup()
+ .collect(),
+ ))
})
} else {
let implementations_task = self.request_multiple_lsp_locally(
@@ -5539,12 +5484,14 @@ impl LspStore {
cx,
);
cx.background_spawn(async move {
- Ok(implementations_task
- .await
- .into_iter()
- .flat_map(|(_, implementations)| implementations)
- .dedup()
- .collect())
+ Ok(Some(
+ implementations_task
+ .await
+ .into_iter()
+ .flat_map(|(_, implementations)| implementations)
+ .dedup()
+ .collect(),
+ ))
})
}
}
@@ -5554,59 +5501,44 @@ impl LspStore {
buffer: &Entity<Buffer>,
position: PointUtf16,
cx: &mut Context<Self>,
- ) -> Task<Result<Vec<Location>>> {
+ ) -> Task<Result<Option<Vec<Location>>>> {
if let Some((upstream_client, project_id)) = self.upstream_client() {
let request = GetReferences { position };
if !self.is_capable_for_proto_request(buffer, &request, cx) {
- return Task::ready(Ok(Vec::new()));
+ return Task::ready(Ok(None));
}
- let request_task = upstream_client.request(proto::MultiLspQuery {
- buffer_id: buffer.read(cx).remote_id().into(),
- version: serialize_version(&buffer.read(cx).version()),
+
+ let request_task = upstream_client.request_lsp(
project_id,
- strategy: Some(proto::multi_lsp_query::Strategy::All(
- proto::AllLanguageServers {},
- )),
- request: Some(proto::multi_lsp_query::Request::GetReferences(
- request.to_proto(project_id, buffer.read(cx)),
- )),
- });
+ LSP_REQUEST_TIMEOUT,
+ cx.background_executor().clone(),
+ request.to_proto(project_id, buffer.read(cx)),
+ );
let buffer = buffer.clone();
cx.spawn(async move |weak_project, cx| {
let Some(project) = weak_project.upgrade() else {
- return Ok(Vec::new());
+ return Ok(None);
+ };
+ let Some(responses) = request_task.await? else {
+ return Ok(None);
};
- let responses = request_task.await?.responses;
- let actions = join_all(
- responses
- .into_iter()
- .filter_map(|lsp_response| match lsp_response.response? {
- proto::lsp_response::Response::GetReferencesResponse(response) => {
- Some(response)
- }
- unexpected => {
- debug_panic!("Unexpected response: {unexpected:?}");
- None
- }
- })
- .map(|references_response| {
- GetReferences { position }.response_from_proto(
- references_response,
- project.clone(),
- buffer.clone(),
- cx.clone(),
- )
- }),
- )
- .await;
- Ok(actions
- .into_iter()
- .collect::<Result<Vec<Vec<_>>>>()?
- .into_iter()
- .flatten()
- .dedup()
- .collect())
+ let locations = join_all(responses.payload.into_iter().map(|lsp_response| {
+ GetReferences { position }.response_from_proto(
+ lsp_response.response,
+ project.clone(),
+ buffer.clone(),
+ cx.clone(),
+ )
+ }))
+ .await
+ .into_iter()
+ .collect::<Result<Vec<Vec<_>>>>()?
+ .into_iter()
+ .flatten()
+ .dedup()
+ .collect();
+ Ok(Some(locations))
})
} else {
let references_task = self.request_multiple_lsp_locally(
@@ -5616,12 +5548,14 @@ impl LspStore {
cx,
);
cx.background_spawn(async move {
- Ok(references_task
- .await
- .into_iter()
- .flat_map(|(_, references)| references)
- .dedup()
- .collect())
+ Ok(Some(
+ references_task
+ .await
+ .into_iter()
+ .flat_map(|(_, references)| references)
+ .dedup()
+ .collect(),
+ ))
})
}
}
@@ -5632,82 +5566,67 @@ impl LspStore {
range: Range<Anchor>,
kinds: Option<Vec<CodeActionKind>>,
cx: &mut Context<Self>,
- ) -> Task<Result<Vec<CodeAction>>> {
+ ) -> Task<Result<Option<Vec<CodeAction>>>> {
if let Some((upstream_client, project_id)) = self.upstream_client() {
let request = GetCodeActions {
range: range.clone(),
kinds: kinds.clone(),
};
if !self.is_capable_for_proto_request(buffer, &request, cx) {
- return Task::ready(Ok(Vec::new()));
+ return Task::ready(Ok(None));
}
- let request_task = upstream_client.request(proto::MultiLspQuery {
- buffer_id: buffer.read(cx).remote_id().into(),
- version: serialize_version(&buffer.read(cx).version()),
+ let request_task = upstream_client.request_lsp(
project_id,
- strategy: Some(proto::multi_lsp_query::Strategy::All(
- proto::AllLanguageServers {},
- )),
- request: Some(proto::multi_lsp_query::Request::GetCodeActions(
- request.to_proto(project_id, buffer.read(cx)),
- )),
- });
+ LSP_REQUEST_TIMEOUT,
+ cx.background_executor().clone(),
+ request.to_proto(project_id, buffer.read(cx)),
+ );
let buffer = buffer.clone();
cx.spawn(async move |weak_project, cx| {
let Some(project) = weak_project.upgrade() else {
- return Ok(Vec::new());
+ return Ok(None);
};
- let responses = request_task.await?.responses;
- let actions = join_all(
- responses
- .into_iter()
- .filter_map(|lsp_response| match lsp_response.response? {
- proto::lsp_response::Response::GetCodeActionsResponse(response) => {
- Some(response)
- }
- unexpected => {
- debug_panic!("Unexpected response: {unexpected:?}");
- None
- }
- })
- .map(|code_actions_response| {
- GetCodeActions {
- range: range.clone(),
- kinds: kinds.clone(),
- }
- .response_from_proto(
- code_actions_response,
- project.clone(),
- buffer.clone(),
- cx.clone(),
- )
- }),
- )
+ let Some(responses) = request_task.await? else {
+ return Ok(None);
+ };
+ let actions = join_all(responses.payload.into_iter().map(|response| {
+ GetCodeActions {
+ range: range.clone(),
+ kinds: kinds.clone(),
+ }
+ .response_from_proto(
+ response.response,
+ project.clone(),
+ buffer.clone(),
+ cx.clone(),
+ )
+ }))
.await;
- Ok(actions
- .into_iter()
- .collect::<Result<Vec<Vec<_>>>>()?
- .into_iter()
- .flatten()
- .collect())
- })
- } else {
- let all_actions_task = self.request_multiple_lsp_locally(
+ Ok(Some(
+ actions
+ .into_iter()
+ .collect::<Result<Vec<Vec<_>>>>()?
+ .into_iter()
+ .flatten()
+ .collect(),
+ ))
+ })
+ } else {
+ let all_actions_task = self.request_multiple_lsp_locally(
buffer,
Some(range.start),
- GetCodeActions {
- range: range.clone(),
- kinds: kinds.clone(),
- },
+ GetCodeActions { range, kinds },
cx,
);
cx.background_spawn(async move {
- Ok(all_actions_task
- .await
- .into_iter()
- .flat_map(|(_, actions)| actions)
- .collect())
+ Ok(Some(
+ all_actions_task
+ .await
+ .into_iter()
+ .flat_map(|(_, actions)| actions)
+ .collect(),
+ ))
})
}
}
@@ -58,7 +58,7 @@ pub fn register_notifications(
language_server
.on_notification::<InactiveRegions, _>({
- let adapter = adapter.clone();
+ let adapter = adapter;
let this = lsp_store;
move |params: InactiveRegionsParams, cx| {
@@ -0,0 +1,712 @@
+use std::{collections::VecDeque, sync::Arc};
+
+use collections::HashMap;
+use futures::{StreamExt, channel::mpsc};
+use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Global, Subscription, WeakEntity};
+use lsp::{
+ IoKind, LanguageServer, LanguageServerId, LanguageServerName, LanguageServerSelector,
+ MessageType, TraceValue,
+};
+use rpc::proto;
+use settings::WorktreeId;
+
+use crate::{LanguageServerLogType, LspStore, Project, ProjectItem as _};
+
+const SEND_LINE: &str = "\n// Send:";
+const RECEIVE_LINE: &str = "\n// Receive:";
+const MAX_STORED_LOG_ENTRIES: usize = 2000;
+
+pub fn init(on_headless_host: bool, cx: &mut App) -> Entity<LogStore> {
+ let log_store = cx.new(|cx| LogStore::new(on_headless_host, cx));
+ cx.set_global(GlobalLogStore(log_store.clone()));
+ log_store
+}
+
+pub struct GlobalLogStore(pub Entity<LogStore>);
+
+impl Global for GlobalLogStore {}
+
+#[derive(Debug)]
+pub enum Event {
+ NewServerLogEntry {
+ id: LanguageServerId,
+ kind: LanguageServerLogType,
+ text: String,
+ },
+}
+
+impl EventEmitter<Event> for LogStore {}
+
+pub struct LogStore {
+ on_headless_host: bool,
+ projects: HashMap<WeakEntity<Project>, ProjectState>,
+ pub copilot_log_subscription: Option<lsp::Subscription>,
+ pub language_servers: HashMap<LanguageServerId, LanguageServerState>,
+ io_tx: mpsc::UnboundedSender<(LanguageServerId, IoKind, String)>,
+}
+
+struct ProjectState {
+ _subscriptions: [Subscription; 2],
+}
+
+pub trait Message: AsRef<str> {
+ type Level: Copy + std::fmt::Debug;
+ fn should_include(&self, _: Self::Level) -> bool {
+ true
+ }
+}
+
+#[derive(Debug)]
+pub struct LogMessage {
+ message: String,
+ typ: MessageType,
+}
+
+impl AsRef<str> for LogMessage {
+ fn as_ref(&self) -> &str {
+ &self.message
+ }
+}
+
+impl Message for LogMessage {
+ type Level = MessageType;
+
+ fn should_include(&self, level: Self::Level) -> bool {
+ match (self.typ, level) {
+ (MessageType::ERROR, _) => true,
+ (_, MessageType::ERROR) => false,
+ (MessageType::WARNING, _) => true,
+ (_, MessageType::WARNING) => false,
+ (MessageType::INFO, _) => true,
+ (_, MessageType::INFO) => false,
+ _ => true,
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct TraceMessage {
+ message: String,
+ is_verbose: bool,
+}
+
+impl AsRef<str> for TraceMessage {
+ fn as_ref(&self) -> &str {
+ &self.message
+ }
+}
+
+impl Message for TraceMessage {
+ type Level = TraceValue;
+
+ fn should_include(&self, level: Self::Level) -> bool {
+ match level {
+ TraceValue::Off => false,
+ TraceValue::Messages => !self.is_verbose,
+ TraceValue::Verbose => true,
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct RpcMessage {
+ message: String,
+}
+
+impl AsRef<str> for RpcMessage {
+ fn as_ref(&self) -> &str {
+ &self.message
+ }
+}
+
+impl Message for RpcMessage {
+ type Level = ();
+}
+
+pub struct LanguageServerState {
+ pub name: Option<LanguageServerName>,
+ pub worktree_id: Option<WorktreeId>,
+ pub kind: LanguageServerKind,
+ log_messages: VecDeque<LogMessage>,
+ trace_messages: VecDeque<TraceMessage>,
+ pub rpc_state: Option<LanguageServerRpcState>,
+ pub trace_level: TraceValue,
+ pub log_level: MessageType,
+ io_logs_subscription: Option<lsp::Subscription>,
+ pub toggled_log_kind: Option<LogKind>,
+}
+
+impl std::fmt::Debug for LanguageServerState {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("LanguageServerState")
+ .field("name", &self.name)
+ .field("worktree_id", &self.worktree_id)
+ .field("kind", &self.kind)
+ .field("log_messages", &self.log_messages)
+ .field("trace_messages", &self.trace_messages)
+ .field("rpc_state", &self.rpc_state)
+ .field("trace_level", &self.trace_level)
+ .field("log_level", &self.log_level)
+ .field("toggled_log_kind", &self.toggled_log_kind)
+ .finish_non_exhaustive()
+ }
+}
+
+#[derive(PartialEq, Clone)]
+pub enum LanguageServerKind {
+ Local { project: WeakEntity<Project> },
+ Remote { project: WeakEntity<Project> },
+ LocalSsh { lsp_store: WeakEntity<LspStore> },
+ Global,
+}
+
+impl std::fmt::Debug for LanguageServerKind {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ LanguageServerKind::Local { .. } => write!(f, "LanguageServerKind::Local"),
+ LanguageServerKind::Remote { .. } => write!(f, "LanguageServerKind::Remote"),
+ LanguageServerKind::LocalSsh { .. } => write!(f, "LanguageServerKind::LocalSsh"),
+ LanguageServerKind::Global => write!(f, "LanguageServerKind::Global"),
+ }
+ }
+}
+
+impl LanguageServerKind {
+ pub fn project(&self) -> Option<&WeakEntity<Project>> {
+ match self {
+ Self::Local { project } => Some(project),
+ Self::Remote { project } => Some(project),
+ Self::LocalSsh { .. } => None,
+ Self::Global { .. } => None,
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct LanguageServerRpcState {
+ pub rpc_messages: VecDeque<RpcMessage>,
+ last_message_kind: Option<MessageKind>,
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+enum MessageKind {
+ Send,
+ Receive,
+}
+
+#[derive(Clone, Copy, Debug, Default, PartialEq)]
+pub enum LogKind {
+ Rpc,
+ Trace,
+ #[default]
+ Logs,
+ ServerInfo,
+}
+
+impl LogKind {
+ pub fn from_server_log_type(log_type: &LanguageServerLogType) -> Self {
+ match log_type {
+ LanguageServerLogType::Log(_) => Self::Logs,
+ LanguageServerLogType::Trace { .. } => Self::Trace,
+ LanguageServerLogType::Rpc { .. } => Self::Rpc,
+ }
+ }
+}
+
+impl LogStore {
+ pub fn new(on_headless_host: bool, cx: &mut Context<Self>) -> Self {
+ let (io_tx, mut io_rx) = mpsc::unbounded();
+
+ let log_store = Self {
+ projects: HashMap::default(),
+ language_servers: HashMap::default(),
+ copilot_log_subscription: None,
+ on_headless_host,
+ io_tx,
+ };
+ cx.spawn(async move |log_store, cx| {
+ while let Some((server_id, io_kind, message)) = io_rx.next().await {
+ if let Some(log_store) = log_store.upgrade() {
+ log_store.update(cx, |log_store, cx| {
+ log_store.on_io(server_id, io_kind, &message, cx);
+ })?;
+ }
+ }
+ anyhow::Ok(())
+ })
+ .detach_and_log_err(cx);
+
+ log_store
+ }
+
+ pub fn add_project(&mut self, project: &Entity<Project>, cx: &mut Context<Self>) {
+ let weak_project = project.downgrade();
+ self.projects.insert(
+ project.downgrade(),
+ ProjectState {
+ _subscriptions: [
+ cx.observe_release(project, move |this, _, _| {
+ this.projects.remove(&weak_project);
+ this.language_servers
+ .retain(|_, state| state.kind.project() != Some(&weak_project));
+ }),
+ cx.subscribe(project, move |log_store, project, event, cx| {
+ let server_kind = if project.read(cx).is_local() {
+ LanguageServerKind::Local {
+ project: project.downgrade(),
+ }
+ } else {
+ LanguageServerKind::Remote {
+ project: project.downgrade(),
+ }
+ };
+ match event {
+ crate::Event::LanguageServerAdded(id, name, worktree_id) => {
+ log_store.add_language_server(
+ server_kind,
+ *id,
+ Some(name.clone()),
+ *worktree_id,
+ project
+ .read(cx)
+ .lsp_store()
+ .read(cx)
+ .language_server_for_id(*id),
+ cx,
+ );
+ }
+ crate::Event::LanguageServerBufferRegistered {
+ server_id,
+ buffer_id,
+ name,
+ ..
+ } => {
+ let worktree_id = project
+ .read(cx)
+ .buffer_for_id(*buffer_id, cx)
+ .and_then(|buffer| {
+ Some(buffer.read(cx).project_path(cx)?.worktree_id)
+ });
+ let name = name.clone().or_else(|| {
+ project
+ .read(cx)
+ .lsp_store()
+ .read(cx)
+ .language_server_statuses
+ .get(server_id)
+ .map(|status| status.name.clone())
+ });
+ log_store.add_language_server(
+ server_kind,
+ *server_id,
+ name,
+ worktree_id,
+ None,
+ cx,
+ );
+ }
+ crate::Event::LanguageServerRemoved(id) => {
+ log_store.remove_language_server(*id, cx);
+ }
+ crate::Event::LanguageServerLog(id, typ, message) => {
+ log_store.add_language_server(
+ server_kind,
+ *id,
+ None,
+ None,
+ None,
+ cx,
+ );
+ match typ {
+ crate::LanguageServerLogType::Log(typ) => {
+ log_store.add_language_server_log(*id, *typ, message, cx);
+ }
+ crate::LanguageServerLogType::Trace { verbose_info } => {
+ log_store.add_language_server_trace(
+ *id,
+ message,
+ verbose_info.clone(),
+ cx,
+ );
+ }
+ crate::LanguageServerLogType::Rpc { received } => {
+ let kind = if *received {
+ MessageKind::Receive
+ } else {
+ MessageKind::Send
+ };
+ log_store.add_language_server_rpc(*id, kind, message, cx);
+ }
+ }
+ }
+ crate::Event::ToggleLspLogs {
+ server_id,
+ enabled,
+ toggled_log_kind,
+ } => {
+ if let Some(server_state) =
+ log_store.get_language_server_state(*server_id)
+ {
+ if *enabled {
+ server_state.toggled_log_kind = Some(*toggled_log_kind);
+ } else {
+ server_state.toggled_log_kind = None;
+ }
+ }
+ if LogKind::Rpc == *toggled_log_kind {
+ if *enabled {
+ log_store.enable_rpc_trace_for_language_server(*server_id);
+ } else {
+ log_store.disable_rpc_trace_for_language_server(*server_id);
+ }
+ }
+ }
+ _ => {}
+ }
+ }),
+ ],
+ },
+ );
+ }
+
+ pub fn get_language_server_state(
+ &mut self,
+ id: LanguageServerId,
+ ) -> Option<&mut LanguageServerState> {
+ self.language_servers.get_mut(&id)
+ }
+
+ pub fn add_language_server(
+ &mut self,
+ kind: LanguageServerKind,
+ server_id: LanguageServerId,
+ name: Option<LanguageServerName>,
+ worktree_id: Option<WorktreeId>,
+ server: Option<Arc<LanguageServer>>,
+ cx: &mut Context<Self>,
+ ) -> Option<&mut LanguageServerState> {
+ let server_state = self.language_servers.entry(server_id).or_insert_with(|| {
+ cx.notify();
+ LanguageServerState {
+ name: None,
+ worktree_id: None,
+ kind,
+ rpc_state: None,
+ log_messages: VecDeque::with_capacity(MAX_STORED_LOG_ENTRIES),
+ trace_messages: VecDeque::with_capacity(MAX_STORED_LOG_ENTRIES),
+ trace_level: TraceValue::Off,
+ log_level: MessageType::LOG,
+ io_logs_subscription: None,
+ toggled_log_kind: None,
+ }
+ });
+
+ if let Some(name) = name {
+ server_state.name = Some(name);
+ }
+ if let Some(worktree_id) = worktree_id {
+ server_state.worktree_id = Some(worktree_id);
+ }
+
+ if let Some(server) = server.filter(|_| server_state.io_logs_subscription.is_none()) {
+ let io_tx = self.io_tx.clone();
+ let server_id = server.server_id();
+ server_state.io_logs_subscription = Some(server.on_io(move |io_kind, message| {
+ io_tx
+ .unbounded_send((server_id, io_kind, message.to_string()))
+ .ok();
+ }));
+ }
+
+ Some(server_state)
+ }
+
+ pub fn add_language_server_log(
+ &mut self,
+ id: LanguageServerId,
+ typ: MessageType,
+ message: &str,
+ cx: &mut Context<Self>,
+ ) -> Option<()> {
+ let store_logs = !self.on_headless_host;
+ let language_server_state = self.get_language_server_state(id)?;
+
+ let log_lines = &mut language_server_state.log_messages;
+ let message = message.trim_end().to_string();
+ if !store_logs {
+ // Send all messages regardless of the visibility in case of not storing, to notify the receiver anyway
+ self.emit_event(
+ Event::NewServerLogEntry {
+ id,
+ kind: LanguageServerLogType::Log(typ),
+ text: message,
+ },
+ cx,
+ );
+ } else if let Some(new_message) = Self::push_new_message(
+ log_lines,
+ LogMessage { message, typ },
+ language_server_state.log_level,
+ ) {
+ self.emit_event(
+ Event::NewServerLogEntry {
+ id,
+ kind: LanguageServerLogType::Log(typ),
+ text: new_message,
+ },
+ cx,
+ );
+ }
+ Some(())
+ }
+
+ fn add_language_server_trace(
+ &mut self,
+ id: LanguageServerId,
+ message: &str,
+ verbose_info: Option<String>,
+ cx: &mut Context<Self>,
+ ) -> Option<()> {
+ let store_logs = !self.on_headless_host;
+ let language_server_state = self.get_language_server_state(id)?;
+
+ let log_lines = &mut language_server_state.trace_messages;
+ if !store_logs {
+ // Send all messages regardless of the visibility in case of not storing, to notify the receiver anyway
+ self.emit_event(
+ Event::NewServerLogEntry {
+ id,
+ kind: LanguageServerLogType::Trace { verbose_info },
+ text: message.trim().to_string(),
+ },
+ cx,
+ );
+ } else if let Some(new_message) = Self::push_new_message(
+ log_lines,
+ TraceMessage {
+ message: message.trim().to_string(),
+ is_verbose: false,
+ },
+ TraceValue::Messages,
+ ) {
+ if let Some(verbose_message) = verbose_info.as_ref() {
+ Self::push_new_message(
+ log_lines,
+ TraceMessage {
+ message: verbose_message.clone(),
+ is_verbose: true,
+ },
+ TraceValue::Verbose,
+ );
+ }
+ self.emit_event(
+ Event::NewServerLogEntry {
+ id,
+ kind: LanguageServerLogType::Trace { verbose_info },
+ text: new_message,
+ },
+ cx,
+ );
+ }
+ Some(())
+ }
+
+ fn push_new_message<T: Message>(
+ log_lines: &mut VecDeque<T>,
+ message: T,
+ current_severity: <T as Message>::Level,
+ ) -> Option<String> {
+ while log_lines.len() + 1 >= MAX_STORED_LOG_ENTRIES {
+ log_lines.pop_front();
+ }
+ let visible = message.should_include(current_severity);
+
+ let visible_message = visible.then(|| message.as_ref().to_string());
+ log_lines.push_back(message);
+ visible_message
+ }
+
+ fn add_language_server_rpc(
+ &mut self,
+ language_server_id: LanguageServerId,
+ kind: MessageKind,
+ message: &str,
+ cx: &mut Context<'_, Self>,
+ ) {
+ let store_logs = !self.on_headless_host;
+ let Some(state) = self
+ .get_language_server_state(language_server_id)
+ .and_then(|state| state.rpc_state.as_mut())
+ else {
+ return;
+ };
+
+ let received = kind == MessageKind::Receive;
+ let rpc_log_lines = &mut state.rpc_messages;
+ if state.last_message_kind != Some(kind) {
+ while rpc_log_lines.len() + 1 >= MAX_STORED_LOG_ENTRIES {
+ rpc_log_lines.pop_front();
+ }
+ let line_before_message = match kind {
+ MessageKind::Send => SEND_LINE,
+ MessageKind::Receive => RECEIVE_LINE,
+ };
+ if store_logs {
+ rpc_log_lines.push_back(RpcMessage {
+ message: line_before_message.to_string(),
+ });
+ }
+ // Do not send a synthetic message over the wire, it will be derived from the actual RPC message
+ cx.emit(Event::NewServerLogEntry {
+ id: language_server_id,
+ kind: LanguageServerLogType::Rpc { received },
+ text: line_before_message.to_string(),
+ });
+ }
+
+ while rpc_log_lines.len() + 1 >= MAX_STORED_LOG_ENTRIES {
+ rpc_log_lines.pop_front();
+ }
+
+ if store_logs {
+ rpc_log_lines.push_back(RpcMessage {
+ message: message.trim().to_owned(),
+ });
+ }
+
+ self.emit_event(
+ Event::NewServerLogEntry {
+ id: language_server_id,
+ kind: LanguageServerLogType::Rpc { received },
+ text: message.to_owned(),
+ },
+ cx,
+ );
+ }
+
+ pub fn remove_language_server(&mut self, id: LanguageServerId, cx: &mut Context<Self>) {
+ self.language_servers.remove(&id);
+ cx.notify();
+ }
+
+ pub fn server_logs(&self, server_id: LanguageServerId) -> Option<&VecDeque<LogMessage>> {
+ Some(&self.language_servers.get(&server_id)?.log_messages)
+ }
+
+ pub fn server_trace(&self, server_id: LanguageServerId) -> Option<&VecDeque<TraceMessage>> {
+ Some(&self.language_servers.get(&server_id)?.trace_messages)
+ }
+
+ pub fn server_ids_for_project<'a>(
+ &'a self,
+ lookup_project: &'a WeakEntity<Project>,
+ ) -> impl Iterator<Item = LanguageServerId> + 'a {
+ self.language_servers
+ .iter()
+ .filter_map(move |(id, state)| match &state.kind {
+ LanguageServerKind::Local { project } | LanguageServerKind::Remote { project } => {
+ if project == lookup_project {
+ Some(*id)
+ } else {
+ None
+ }
+ }
+ LanguageServerKind::Global | LanguageServerKind::LocalSsh { .. } => Some(*id),
+ })
+ }
+
+ pub fn enable_rpc_trace_for_language_server(
+ &mut self,
+ server_id: LanguageServerId,
+ ) -> Option<&mut LanguageServerRpcState> {
+ let rpc_state = self
+ .language_servers
+ .get_mut(&server_id)?
+ .rpc_state
+ .get_or_insert_with(|| LanguageServerRpcState {
+ rpc_messages: VecDeque::with_capacity(MAX_STORED_LOG_ENTRIES),
+ last_message_kind: None,
+ });
+ Some(rpc_state)
+ }
+
+ pub fn disable_rpc_trace_for_language_server(
+ &mut self,
+ server_id: LanguageServerId,
+ ) -> Option<()> {
+ self.language_servers.get_mut(&server_id)?.rpc_state.take();
+ Some(())
+ }
+
+ pub fn has_server_logs(&self, server: &LanguageServerSelector) -> bool {
+ match server {
+ LanguageServerSelector::Id(id) => self.language_servers.contains_key(id),
+ LanguageServerSelector::Name(name) => self
+ .language_servers
+ .iter()
+ .any(|(_, state)| state.name.as_ref() == Some(name)),
+ }
+ }
+
+ fn on_io(
+ &mut self,
+ language_server_id: LanguageServerId,
+ io_kind: IoKind,
+ message: &str,
+ cx: &mut Context<Self>,
+ ) -> Option<()> {
+ let is_received = match io_kind {
+ IoKind::StdOut => true,
+ IoKind::StdIn => false,
+ IoKind::StdErr => {
+ self.add_language_server_log(language_server_id, MessageType::LOG, message, cx);
+ return Some(());
+ }
+ };
+
+ let kind = if is_received {
+ MessageKind::Receive
+ } else {
+ MessageKind::Send
+ };
+
+ self.add_language_server_rpc(language_server_id, kind, message, cx);
+ cx.notify();
+ Some(())
+ }
+
+ fn emit_event(&mut self, e: Event, cx: &mut Context<Self>) {
+ let on_headless_host = self.on_headless_host;
+ match &e {
+ Event::NewServerLogEntry { id, kind, text } => {
+ if let Some(state) = self.get_language_server_state(*id) {
+ let downstream_client = match &state.kind {
+ LanguageServerKind::Remote { project }
+ | LanguageServerKind::Local { project } => project
+ .upgrade()
+ .map(|project| project.read(cx).lsp_store()),
+ LanguageServerKind::LocalSsh { lsp_store } => lsp_store.upgrade(),
+ LanguageServerKind::Global => None,
+ }
+ .and_then(|lsp_store| lsp_store.read(cx).downstream_client());
+ if let Some((client, project_id)) = downstream_client {
+ if on_headless_host
+ || Some(LogKind::from_server_log_type(kind)) == state.toggled_log_kind
+ {
+ client
+ .send(proto::LanguageServerLog {
+ project_id,
+ language_server_id: id.to_proto(),
+ message: text.clone(),
+ log_type: Some(kind.to_proto()),
+ })
+ .ok();
+ }
+ }
+ }
+ }
+ }
+
+ cx.emit(e);
+ }
+}
@@ -213,7 +213,7 @@ impl LspCommand for OpenDocs {
) -> Result<OpenDocsParams> {
Ok(OpenDocsParams {
text_document: lsp::TextDocumentIdentifier {
- uri: lsp::Url::from_file_path(path).unwrap(),
+ uri: lsp::Uri::from_file_path(path).unwrap(),
},
position: point_to_lsp(self.position),
})
@@ -1,8 +1,8 @@
use ::serde::{Deserialize, Serialize};
use anyhow::Context as _;
-use gpui::{App, Entity, Task, WeakEntity};
-use language::ServerHealth;
-use lsp::{LanguageServer, LanguageServerName};
+use gpui::{App, AsyncApp, Entity, Task, WeakEntity};
+use language::{Buffer, ServerHealth};
+use lsp::{LanguageServer, LanguageServerId, LanguageServerName};
use rpc::proto;
use crate::{LspStore, LspStoreEvent, Project, ProjectPath, lsp_store};
@@ -34,7 +34,6 @@ pub fn register_notifications(lsp_store: WeakEntity<LspStore>, language_server:
language_server
.on_notification::<ServerStatus, _>({
- let name = name.clone();
move |params, cx| {
let message = params.message;
let log_message = message.as_ref().map(|message| {
@@ -84,31 +83,32 @@ pub fn register_notifications(lsp_store: WeakEntity<LspStore>, language_server:
pub fn cancel_flycheck(
project: Entity<Project>,
- buffer_path: ProjectPath,
+ buffer_path: Option<ProjectPath>,
cx: &mut App,
) -> Task<anyhow::Result<()>> {
let upstream_client = project.read(cx).lsp_store().read(cx).upstream_client();
let lsp_store = project.read(cx).lsp_store();
- let buffer = project.update(cx, |project, cx| {
- project.buffer_store().update(cx, |buffer_store, cx| {
- buffer_store.open_buffer(buffer_path, cx)
+ let buffer = buffer_path.map(|buffer_path| {
+ project.update(cx, |project, cx| {
+ project.buffer_store().update(cx, |buffer_store, cx| {
+ buffer_store.open_buffer(buffer_path, cx)
+ })
})
});
cx.spawn(async move |cx| {
- let buffer = buffer.await?;
- let Some(rust_analyzer_server) = project.read_with(cx, |project, cx| {
- project.language_server_id_for_name(buffer.read(cx), &RUST_ANALYZER_NAME, cx)
- })?
+ let buffer = match buffer {
+ Some(buffer) => Some(buffer.await?),
+ None => None,
+ };
+ let Some(rust_analyzer_server) = find_rust_analyzer_server(&project, buffer.as_ref(), cx)
else {
return Ok(());
};
- let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id().to_proto())?;
if let Some((client, project_id)) = upstream_client {
let request = proto::LspExtCancelFlycheck {
project_id,
- buffer_id,
language_server_id: rust_analyzer_server.to_proto(),
};
client
@@ -131,28 +131,33 @@ pub fn cancel_flycheck(
pub fn run_flycheck(
project: Entity<Project>,
- buffer_path: ProjectPath,
+ buffer_path: Option<ProjectPath>,
cx: &mut App,
) -> Task<anyhow::Result<()>> {
let upstream_client = project.read(cx).lsp_store().read(cx).upstream_client();
let lsp_store = project.read(cx).lsp_store();
- let buffer = project.update(cx, |project, cx| {
- project.buffer_store().update(cx, |buffer_store, cx| {
- buffer_store.open_buffer(buffer_path, cx)
+ let buffer = buffer_path.map(|buffer_path| {
+ project.update(cx, |project, cx| {
+ project.buffer_store().update(cx, |buffer_store, cx| {
+ buffer_store.open_buffer(buffer_path, cx)
+ })
})
});
cx.spawn(async move |cx| {
- let buffer = buffer.await?;
- let Some(rust_analyzer_server) = project.read_with(cx, |project, cx| {
- project.language_server_id_for_name(buffer.read(cx), &RUST_ANALYZER_NAME, cx)
- })?
+ let buffer = match buffer {
+ Some(buffer) => Some(buffer.await?),
+ None => None,
+ };
+ let Some(rust_analyzer_server) = find_rust_analyzer_server(&project, buffer.as_ref(), cx)
else {
return Ok(());
};
- let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id().to_proto())?;
if let Some((client, project_id)) = upstream_client {
+ let buffer_id = buffer
+ .map(|buffer| buffer.read_with(cx, |buffer, _| buffer.remote_id().to_proto()))
+ .transpose()?;
let request = proto::LspExtRunFlycheck {
project_id,
buffer_id,
@@ -183,31 +188,32 @@ pub fn run_flycheck(
pub fn clear_flycheck(
project: Entity<Project>,
- buffer_path: ProjectPath,
+ buffer_path: Option<ProjectPath>,
cx: &mut App,
) -> Task<anyhow::Result<()>> {
let upstream_client = project.read(cx).lsp_store().read(cx).upstream_client();
let lsp_store = project.read(cx).lsp_store();
- let buffer = project.update(cx, |project, cx| {
- project.buffer_store().update(cx, |buffer_store, cx| {
- buffer_store.open_buffer(buffer_path, cx)
+ let buffer = buffer_path.map(|buffer_path| {
+ project.update(cx, |project, cx| {
+ project.buffer_store().update(cx, |buffer_store, cx| {
+ buffer_store.open_buffer(buffer_path, cx)
+ })
})
});
cx.spawn(async move |cx| {
- let buffer = buffer.await?;
- let Some(rust_analyzer_server) = project.read_with(cx, |project, cx| {
- project.language_server_id_for_name(buffer.read(cx), &RUST_ANALYZER_NAME, cx)
- })?
+ let buffer = match buffer {
+ Some(buffer) => Some(buffer.await?),
+ None => None,
+ };
+ let Some(rust_analyzer_server) = find_rust_analyzer_server(&project, buffer.as_ref(), cx)
else {
return Ok(());
};
- let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id().to_proto())?;
if let Some((client, project_id)) = upstream_client {
let request = proto::LspExtClearFlycheck {
project_id,
- buffer_id,
language_server_id: rust_analyzer_server.to_proto(),
};
client
@@ -227,3 +233,40 @@ pub fn clear_flycheck(
anyhow::Ok(())
})
}
+
+fn find_rust_analyzer_server(
+ project: &Entity<Project>,
+ buffer: Option<&Entity<Buffer>>,
+ cx: &mut AsyncApp,
+) -> Option<LanguageServerId> {
+ project
+ .read_with(cx, |project, cx| {
+ buffer
+ .and_then(|buffer| {
+ project.language_server_id_for_name(buffer.read(cx), &RUST_ANALYZER_NAME, cx)
+ })
+ // If no rust-analyzer found for the current buffer (e.g. `settings.json`), fall back to the project lookup
+ // and use project's rust-analyzer if it's the only one.
+ .or_else(|| {
+ let rust_analyzer_servers = project
+ .lsp_store()
+ .read(cx)
+ .language_server_statuses
+ .iter()
+ .filter_map(|(server_id, server_status)| {
+ if server_status.name == RUST_ANALYZER_NAME {
+ Some(*server_id)
+ } else {
+ None
+ }
+ })
+ .collect::<Vec<_>>();
+ if rust_analyzer_servers.len() == 1 {
+ rust_analyzer_servers.first().copied()
+ } else {
+ None
+ }
+ })
+ })
+ .ok()?
+}
@@ -43,12 +43,9 @@ impl WorktreeRoots {
match event {
WorktreeEvent::UpdatedEntries(changes) => {
for (path, _, kind) in changes.iter() {
- match kind {
- worktree::PathChange::Removed => {
- let path = TriePath::from(path.as_ref());
- this.roots.remove(&path);
- }
- _ => {}
+ if kind == &worktree::PathChange::Removed {
+ let path = TriePath::from(path.as_ref());
+ this.roots.remove(&path);
}
}
}
@@ -80,7 +77,7 @@ impl ManifestTree {
_subscriptions: [
cx.subscribe(&worktree_store, Self::on_worktree_store_event),
cx.observe_global::<SettingsStore>(|this, cx| {
- for (_, roots) in &mut this.root_points {
+ for roots in this.root_points.values_mut() {
roots.update(cx, |worktree_roots, _| {
worktree_roots.roots = RootPathTrie::new();
})
@@ -197,11 +194,8 @@ impl ManifestTree {
evt: &WorktreeStoreEvent,
_: &mut Context<Self>,
) {
- match evt {
- WorktreeStoreEvent::WorktreeRemoved(_, worktree_id) => {
- self.root_points.remove(worktree_id);
- }
- _ => {}
+ if let WorktreeStoreEvent::WorktreeRemoved(_, worktree_id) = evt {
+ self.root_points.remove(worktree_id);
}
}
}
@@ -22,9 +22,9 @@ pub(super) struct RootPathTrie<Label> {
/// Label presence is a marker that allows to optimize searches within [RootPathTrie]; node label can be:
/// - Present; we know there's definitely a project root at this node.
/// - Known Absent - we know there's definitely no project root at this node and none of it's ancestors are Present (descendants can be present though!).
-/// The distinction is there to optimize searching; when we encounter a node with unknown status, we don't need to look at it's full path
-/// to the root of the worktree; it's sufficient to explore only the path between last node with a KnownAbsent state and the directory of a path, since we run searches
-/// from the leaf up to the root of the worktree.
+/// The distinction is there to optimize searching; when we encounter a node with unknown status, we don't need to look at it's full path
+/// to the root of the worktree; it's sufficient to explore only the path between last node with a KnownAbsent state and the directory of a path, since we run searches
+/// from the leaf up to the root of the worktree.
///
/// In practical terms, it means that by storing label presence we don't need to do a project discovery on a given folder more than once
/// (unless the node is invalidated, which can happen when FS entries are renamed/removed).
@@ -181,6 +181,7 @@ impl LanguageServerTree {
&root_path.path,
language_name.clone(),
);
+
(
Arc::new(InnerTreeNode::new(
adapter.name(),
@@ -312,8 +313,8 @@ impl LanguageServerTree {
/// Remove nodes with a given ID from the tree.
pub(crate) fn remove_nodes(&mut self, ids: &BTreeSet<LanguageServerId>) {
- for (_, servers) in &mut self.instances {
- for (_, nodes) in &mut servers.roots {
+ for servers in self.instances.values_mut() {
+ for nodes in &mut servers.roots.values_mut() {
nodes.retain(|_, (node, _)| node.id.get().is_none_or(|id| !ids.contains(id)));
}
}
@@ -408,6 +409,7 @@ impl ServerTreeRebase {
if live_node.id.get().is_some() {
return Some(node);
}
+
let disposition = &live_node.disposition;
let Some((existing_node, _)) = self
.old_contents
@@ -1,3 +1,4 @@
+pub mod agent_server_store;
pub mod buffer_store;
mod color_extractor;
pub mod connection_manager;
@@ -28,12 +29,17 @@ use context_server_store::ContextServerStore;
pub use environment::{EnvironmentErrorMessage, ProjectEnvironmentEvent};
use git::repository::get_git_committer;
use git_store::{Repository, RepositoryId};
+use schemars::JsonSchema;
pub mod search_history;
mod yarn;
use dap::inline_value::{InlineValueLocation, VariableLookupKind, VariableScope};
-use crate::git_store::GitStore;
+use crate::{
+ agent_server_store::{AgentServerStore, AllAgentServersSettings},
+ git_store::GitStore,
+ lsp_store::log_store::LogKind,
+};
pub use git_store::{
ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate,
git_traversal::{ChildEntriesGitIter, GitEntry, GitEntryRef, GitTraversal},
@@ -42,14 +48,12 @@ pub use manifest_tree::ManifestTree;
use anyhow::{Context as _, Result, anyhow};
use buffer_store::{BufferStore, BufferStoreEvent};
-use client::{
- Client, Collaborator, PendingEntitySubscription, ProjectId, TypedEnvelope, UserStore, proto,
-};
+use client::{Client, Collaborator, PendingEntitySubscription, TypedEnvelope, UserStore, proto};
use clock::ReplicaId;
use dap::client::DebugAdapterClient;
-use collections::{BTreeSet, HashMap, HashSet};
+use collections::{BTreeSet, HashMap, HashSet, IndexSet};
use debounced_delay::DebouncedDelay;
pub use debugger::breakpoint_store::BreakpointWithPosition;
use debugger::{
@@ -75,8 +79,9 @@ use gpui::{
};
use language::{
Buffer, BufferEvent, Capability, CodeLabel, CursorShape, Language, LanguageName,
- LanguageRegistry, PointUtf16, ToOffset, ToPointUtf16, Toolchain, ToolchainList, Transaction,
- Unclipped, language_settings::InlayHintKind, proto::split_operations,
+ LanguageRegistry, PointUtf16, ToOffset, ToPointUtf16, Toolchain, ToolchainMetadata,
+ ToolchainScope, Transaction, Unclipped, language_settings::InlayHintKind,
+ proto::split_operations,
};
use lsp::{
CodeActionKind, CompletionContext, CompletionItemKind, DocumentHighlightKind, InsertTextMode,
@@ -89,19 +94,23 @@ use node_runtime::NodeRuntime;
use parking_lot::Mutex;
pub use prettier_store::PrettierStore;
use project_settings::{ProjectSettings, SettingsObserver, SettingsObserverEvent};
-use remote::{SshConnectionOptions, SshRemoteClient};
+use remote::{RemoteClient, RemoteConnectionOptions};
use rpc::{
AnyProtoClient, ErrorCode,
- proto::{FromProto, LanguageServerPromptResponse, SSH_PROJECT_ID, ToProto},
+ proto::{FromProto, LanguageServerPromptResponse, REMOTE_SERVER_PROJECT_ID, ToProto},
};
use search::{SearchInputKind, SearchQuery, SearchResult};
use search_history::SearchHistory;
-use settings::{InvalidSettingsError, Settings, SettingsLocation, SettingsSources, SettingsStore};
+use settings::{
+ InvalidSettingsError, Settings, SettingsKey, SettingsLocation, SettingsSources, SettingsStore,
+ SettingsUi,
+};
use smol::channel::Receiver;
use snippet::Snippet;
use snippet_provider::SnippetProvider;
use std::{
borrow::Cow,
+ collections::BTreeMap,
ops::Range,
path::{Component, Path, PathBuf},
pin::pin,
@@ -115,7 +124,7 @@ use terminals::Terminals;
use text::{Anchor, BufferId, OffsetRangeExt, Point, Rope};
use toolchain_store::EmptyToolchainStore;
use util::{
- ResultExt as _,
+ ResultExt as _, maybe,
paths::{PathStyle, RemotePathBuf, SanitizedPath, compare_paths},
};
use worktree::{CreatedEntry, Snapshot, Traversal};
@@ -140,7 +149,7 @@ pub use lsp_store::{
LanguageServerStatus, LanguageServerToQuery, LspStore, LspStoreEvent,
SERVER_PROGRESS_THROTTLE_TIMEOUT,
};
-pub use toolchain_store::ToolchainStore;
+pub use toolchain_store::{ToolchainStore, Toolchains};
const MAX_PROJECT_SEARCH_HISTORY_SIZE: usize = 500;
const MAX_SEARCH_RESULT_FILES: usize = 5_000;
const MAX_SEARCH_RESULT_RANGES: usize = 10_000;
@@ -175,14 +184,15 @@ pub struct Project {
buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
languages: Arc<LanguageRegistry>,
dap_store: Entity<DapStore>,
+ agent_server_store: Entity<AgentServerStore>,
breakpoint_store: Entity<BreakpointStore>,
- client: Arc<client::Client>,
+ collab_client: Arc<client::Client>,
join_project_response_message_id: u32,
task_store: Entity<TaskStore>,
user_store: Entity<UserStore>,
fs: Arc<dyn Fs>,
- ssh_client: Option<Entity<SshRemoteClient>>,
+ remote_client: Option<Entity<RemoteClient>>,
client_state: ProjectClientState,
git_store: Entity<GitStore>,
collaborators: HashMap<proto::PeerId, Collaborator>,
@@ -282,6 +292,12 @@ pub enum Event {
server_id: LanguageServerId,
buffer_id: BufferId,
buffer_abs_path: PathBuf,
+ name: Option<LanguageServerName>,
+ },
+ ToggleLspLogs {
+ server_id: LanguageServerId,
+ enabled: bool,
+ toggled_log_kind: LogKind,
},
Toast {
notification_id: SharedString,
@@ -327,6 +343,7 @@ pub enum Event {
RevealInProjectPanel(ProjectEntryId),
SnippetEdit(BufferId, Vec<(lsp::Range, Snippet)>),
ExpandedAllForEntry(WorktreeId, ProjectEntryId),
+ EntryRenamed(ProjectTransaction),
AgentLocationChanged,
}
@@ -569,11 +586,23 @@ impl std::fmt::Debug for Completion {
/// Response from a source of completions.
pub struct CompletionResponse {
pub completions: Vec<Completion>,
+ pub display_options: CompletionDisplayOptions,
/// When false, indicates that the list is complete and so does not need to be re-queried if it
/// can be filtered instead.
pub is_incomplete: bool,
}
+#[derive(Default)]
+pub struct CompletionDisplayOptions {
+ pub dynamic_width: bool,
+}
+
+impl CompletionDisplayOptions {
+ pub fn merge(&mut self, other: &CompletionDisplayOptions) {
+ self.dynamic_width = self.dynamic_width && other.dynamic_width;
+ }
+}
+
/// Response from language server completion request.
#[derive(Clone, Debug, Default)]
pub(crate) struct CoreCompletionResponse {
@@ -661,7 +690,6 @@ pub enum ResolveState {
CanResolve(LanguageServerId, Option<lsp::LSPAny>),
Resolving,
}
-
impl InlayHint {
pub fn text(&self) -> Rope {
match &self.label {
@@ -925,7 +953,7 @@ pub enum LspPullDiagnostics {
/// The id of the language server that produced diagnostics.
server_id: LanguageServerId,
/// URI of the resource,
- uri: lsp::Url,
+ uri: lsp::Uri,
/// The diagnostics produced by this language server.
diagnostics: PulledDiagnostics,
},
@@ -947,15 +975,31 @@ pub enum PulledDiagnostics {
/// Whether to disable all AI features in Zed.
///
/// Default: false
-#[derive(Copy, Clone, Debug)]
+#[derive(Copy, Clone, Debug, settings::SettingsUi)]
pub struct DisableAiSettings {
pub disable_ai: bool,
}
-impl settings::Settings for DisableAiSettings {
- const KEY: Option<&'static str> = Some("disable_ai");
+#[derive(
+ Copy,
+ Clone,
+ PartialEq,
+ Eq,
+ Debug,
+ Default,
+ serde::Serialize,
+ serde::Deserialize,
+ SettingsUi,
+ SettingsKey,
+ JsonSchema,
+)]
+#[settings_key(None)]
+pub struct DisableAiSettingContent {
+ pub disable_ai: Option<bool>,
+}
- type FileContent = Option<bool>;
+impl settings::Settings for DisableAiSettings {
+ type FileContent = DisableAiSettingContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
// For security reasons, settings can only make AI restrictions MORE strict, not less.
@@ -968,7 +1012,7 @@ impl settings::Settings for DisableAiSettings {
.iter()
.chain(sources.user.iter())
.chain(sources.server.iter())
- .any(|disabled| **disabled == Some(true));
+ .any(|disabled| disabled.disable_ai == Some(true));
Ok(Self { disable_ai })
}
@@ -981,6 +1025,7 @@ impl Project {
WorktreeSettings::register(cx);
ProjectSettings::register(cx);
DisableAiSettings::register(cx);
+ AllAgentServersSettings::register(cx);
}
pub fn init(client: &Arc<Client>, cx: &mut App) {
@@ -1002,6 +1047,7 @@ impl Project {
client.add_entity_request_handler(Self::handle_open_buffer_by_path);
client.add_entity_request_handler(Self::handle_open_new_buffer);
client.add_entity_message_handler(Self::handle_create_buffer_for_peer);
+ client.add_entity_message_handler(Self::handle_toggle_lsp_logs);
WorktreeStore::init(&client);
BufferStore::init(&client);
@@ -1135,6 +1181,10 @@ impl Project {
)
});
+ let agent_server_store = cx.new(|cx| {
+ AgentServerStore::local(node.clone(), fs.clone(), environment.clone(), cx)
+ });
+
cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach();
Self {
@@ -1153,14 +1203,15 @@ impl Project {
active_entry: None,
snippets,
languages,
- client,
+ collab_client: client,
task_store,
user_store,
settings_observer,
fs,
- ssh_client: None,
+ remote_client: None,
breakpoint_store,
dap_store,
+ agent_server_store,
buffers_needing_diff: Default::default(),
git_diff_debouncer: DebouncedDelay::new(),
@@ -1182,8 +1233,8 @@ impl Project {
})
}
- pub fn ssh(
- ssh: Entity<SshRemoteClient>,
+ pub fn remote(
+ remote: Entity<RemoteClient>,
client: Arc<Client>,
node: NodeRuntime,
user_store: Entity<UserStore>,
@@ -1199,10 +1250,15 @@ impl Project {
let snippets =
SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx);
- let (ssh_proto, path_style) =
- ssh.read_with(cx, |ssh, _| (ssh.proto_client(), ssh.path_style()));
+ let (remote_proto, path_style) =
+ remote.read_with(cx, |remote, _| (remote.proto_client(), remote.path_style()));
let worktree_store = cx.new(|_| {
- WorktreeStore::remote(false, ssh_proto.clone(), SSH_PROJECT_ID, path_style)
+ WorktreeStore::remote(
+ false,
+ remote_proto.clone(),
+ REMOTE_SERVER_PROJECT_ID,
+ path_style,
+ )
});
cx.subscribe(&worktree_store, Self::on_worktree_store_event)
.detach();
@@ -1214,31 +1270,32 @@ impl Project {
let buffer_store = cx.new(|cx| {
BufferStore::remote(
worktree_store.clone(),
- ssh.read(cx).proto_client(),
- SSH_PROJECT_ID,
+ remote.read(cx).proto_client(),
+ REMOTE_SERVER_PROJECT_ID,
cx,
)
});
let image_store = cx.new(|cx| {
ImageStore::remote(
worktree_store.clone(),
- ssh.read(cx).proto_client(),
- SSH_PROJECT_ID,
+ remote.read(cx).proto_client(),
+ REMOTE_SERVER_PROJECT_ID,
cx,
)
});
cx.subscribe(&buffer_store, Self::on_buffer_store_event)
.detach();
- let toolchain_store = cx
- .new(|cx| ToolchainStore::remote(SSH_PROJECT_ID, ssh.read(cx).proto_client(), cx));
+ let toolchain_store = cx.new(|cx| {
+ ToolchainStore::remote(REMOTE_SERVER_PROJECT_ID, remote.read(cx).proto_client(), cx)
+ });
let task_store = cx.new(|cx| {
TaskStore::remote(
fs.clone(),
buffer_store.downgrade(),
worktree_store.clone(),
toolchain_store.read(cx).as_language_toolchain_store(),
- ssh.read(cx).proto_client(),
- SSH_PROJECT_ID,
+ remote.read(cx).proto_client(),
+ REMOTE_SERVER_PROJECT_ID,
cx,
)
});
@@ -1248,6 +1305,7 @@ impl Project {
fs.clone(),
worktree_store.clone(),
task_store.clone(),
+ Some(remote_proto.clone()),
cx,
)
});
@@ -1261,8 +1319,8 @@ impl Project {
buffer_store.clone(),
worktree_store.clone(),
languages.clone(),
- ssh_proto.clone(),
- SSH_PROJECT_ID,
+ remote_proto.clone(),
+ REMOTE_SERVER_PROJECT_ID,
fs.clone(),
cx,
)
@@ -1270,12 +1328,12 @@ impl Project {
cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach();
let breakpoint_store =
- cx.new(|_| BreakpointStore::remote(SSH_PROJECT_ID, ssh_proto.clone()));
+ cx.new(|_| BreakpointStore::remote(REMOTE_SERVER_PROJECT_ID, remote_proto.clone()));
let dap_store = cx.new(|cx| {
- DapStore::new_ssh(
- SSH_PROJECT_ID,
- ssh.clone(),
+ DapStore::new_remote(
+ REMOTE_SERVER_PROJECT_ID,
+ remote.clone(),
breakpoint_store.clone(),
worktree_store.clone(),
cx,
@@ -1283,10 +1341,19 @@ impl Project {
});
let git_store = cx.new(|cx| {
- GitStore::ssh(&worktree_store, buffer_store.clone(), ssh_proto.clone(), cx)
+ GitStore::remote(
+ &worktree_store,
+ buffer_store.clone(),
+ remote_proto.clone(),
+ REMOTE_SERVER_PROJECT_ID,
+ cx,
+ )
});
- cx.subscribe(&ssh, Self::on_ssh_event).detach();
+ let agent_server_store =
+ cx.new(|cx| AgentServerStore::remote(REMOTE_SERVER_PROJECT_ID, remote.clone(), cx));
+
+ cx.subscribe(&remote, Self::on_remote_client_event).detach();
let this = Self {
buffer_ordered_messages_tx: tx,
@@ -1301,15 +1368,18 @@ impl Project {
join_project_response_message_id: 0,
client_state: ProjectClientState::Local,
git_store,
+ agent_server_store,
client_subscriptions: Vec::new(),
_subscriptions: vec![
cx.on_release(Self::release),
cx.on_app_quit(|this, cx| {
- let shutdown = this.ssh_client.take().and_then(|client| {
- client.read(cx).shutdown_processes(
- Some(proto::ShutdownRemoteServer {}),
- cx.background_executor().clone(),
- )
+ let shutdown = this.remote_client.take().and_then(|client| {
+ client.update(cx, |client, cx| {
+ client.shutdown_processes(
+ Some(proto::ShutdownRemoteServer {}),
+ cx.background_executor().clone(),
+ )
+ })
});
cx.background_executor().spawn(async move {
@@ -1322,12 +1392,12 @@ impl Project {
active_entry: None,
snippets,
languages,
- client,
+ collab_client: client,
task_store,
user_store,
settings_observer,
fs,
- ssh_client: Some(ssh.clone()),
+ remote_client: Some(remote.clone()),
buffers_needing_diff: Default::default(),
git_diff_debouncer: DebouncedDelay::new(),
terminals: Terminals {
@@ -1345,53 +1415,36 @@ impl Project {
agent_location: None,
};
- // ssh -> local machine handlers
- let ssh = ssh.read(cx);
- ssh.subscribe_to_entity(SSH_PROJECT_ID, &cx.entity());
- ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.buffer_store);
- ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.worktree_store);
- ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.lsp_store);
- ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.dap_store);
- ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.settings_observer);
- ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.git_store);
-
- ssh_proto.add_entity_message_handler(Self::handle_create_buffer_for_peer);
- ssh_proto.add_entity_message_handler(Self::handle_update_worktree);
- ssh_proto.add_entity_message_handler(Self::handle_update_project);
- ssh_proto.add_entity_message_handler(Self::handle_toast);
- ssh_proto.add_entity_request_handler(Self::handle_language_server_prompt_request);
- ssh_proto.add_entity_message_handler(Self::handle_hide_toast);
- ssh_proto.add_entity_request_handler(Self::handle_update_buffer_from_ssh);
- BufferStore::init(&ssh_proto);
- LspStore::init(&ssh_proto);
- SettingsObserver::init(&ssh_proto);
- TaskStore::init(Some(&ssh_proto));
- ToolchainStore::init(&ssh_proto);
- DapStore::init(&ssh_proto, cx);
- GitStore::init(&ssh_proto);
+ // remote server -> local machine handlers
+ remote_proto.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &cx.entity());
+ remote_proto.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &this.buffer_store);
+ remote_proto.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &this.worktree_store);
+ remote_proto.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &this.lsp_store);
+ remote_proto.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &this.dap_store);
+ remote_proto.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &this.settings_observer);
+ remote_proto.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &this.git_store);
+ remote_proto.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &this.agent_server_store);
+
+ remote_proto.add_entity_message_handler(Self::handle_create_buffer_for_peer);
+ remote_proto.add_entity_message_handler(Self::handle_update_worktree);
+ remote_proto.add_entity_message_handler(Self::handle_update_project);
+ remote_proto.add_entity_message_handler(Self::handle_toast);
+ remote_proto.add_entity_request_handler(Self::handle_language_server_prompt_request);
+ remote_proto.add_entity_message_handler(Self::handle_hide_toast);
+ remote_proto.add_entity_request_handler(Self::handle_update_buffer_from_remote_server);
+ BufferStore::init(&remote_proto);
+ LspStore::init(&remote_proto);
+ SettingsObserver::init(&remote_proto);
+ TaskStore::init(Some(&remote_proto));
+ ToolchainStore::init(&remote_proto);
+ DapStore::init(&remote_proto, cx);
+ GitStore::init(&remote_proto);
+ AgentServerStore::init_remote(&remote_proto);
this
})
}
- pub async fn remote(
- remote_id: u64,
- client: Arc<Client>,
- user_store: Entity<UserStore>,
- languages: Arc<LanguageRegistry>,
- fs: Arc<dyn Fs>,
- cx: AsyncApp,
- ) -> Result<Entity<Self>> {
- let project =
- Self::in_room(remote_id, client, user_store, languages, fs, cx.clone()).await?;
- cx.update(|cx| {
- connection_manager::Manager::global(cx).update(cx, |manager, cx| {
- manager.maintain_project_connection(&project, cx)
- })
- })?;
- Ok(project)
- }
-
pub async fn in_room(
remote_id: u64,
client: Arc<Client>,
@@ -1481,7 +1534,7 @@ impl Project {
})?;
let lsp_store = cx.new(|cx| {
- let mut lsp_store = LspStore::new_remote(
+ LspStore::new_remote(
buffer_store.clone(),
worktree_store.clone(),
languages.clone(),
@@ -1489,12 +1542,7 @@ impl Project {
remote_id,
fs.clone(),
cx,
- );
- lsp_store.set_language_server_statuses_from_proto(
- response.payload.language_servers,
- response.payload.language_server_capabilities,
- );
- lsp_store
+ )
})?;
let task_store = cx.new(|cx| {
@@ -1514,7 +1562,13 @@ impl Project {
})?;
let settings_observer = cx.new(|cx| {
- SettingsObserver::new_remote(fs.clone(), worktree_store.clone(), task_store.clone(), cx)
+ SettingsObserver::new_remote(
+ fs.clone(),
+ worktree_store.clone(),
+ task_store.clone(),
+ None,
+ cx,
+ )
})?;
let git_store = cx.new(|cx| {
@@ -1523,12 +1577,14 @@ impl Project {
&worktree_store,
buffer_store.clone(),
client.clone().into(),
- ProjectId(remote_id),
+ remote_id,
cx,
)
})?;
- let this = cx.new(|cx| {
+ let agent_server_store = cx.new(|cx| AgentServerStore::collab(cx))?;
+
+ let project = cx.new(|cx| {
let replica_id = response.payload.replica_id as ReplicaId;
let snippets = SnippetProvider::new(fs.clone(), BTreeSet::from_iter([]), cx);
@@ -1559,7 +1615,7 @@ impl Project {
cx.subscribe(&dap_store, Self::on_dap_store_event).detach();
- let mut this = Self {
+ let mut project = Self {
buffer_ordered_messages_tx: tx,
buffer_store: buffer_store.clone(),
image_store,
@@ -1574,11 +1630,11 @@ impl Project {
task_store,
snippets,
fs,
- ssh_client: None,
+ remote_client: None,
settings_observer: settings_observer.clone(),
client_subscriptions: Default::default(),
_subscriptions: vec![cx.on_release(Self::release)],
- client: client.clone(),
+ collab_client: client.clone(),
client_state: ProjectClientState::Remote {
sharing_has_stopped: false,
capability: Capability::ReadWrite,
@@ -1588,6 +1644,7 @@ impl Project {
breakpoint_store,
dap_store: dap_store.clone(),
git_store: git_store.clone(),
+ agent_server_store,
buffers_needing_diff: Default::default(),
git_diff_debouncer: DebouncedDelay::new(),
terminals: Terminals {
@@ -1602,13 +1659,25 @@ impl Project {
toolchain_store: None,
agent_location: None,
};
- this.set_role(role, cx);
+ project.set_role(role, cx);
for worktree in worktrees {
- this.add_worktree(&worktree, cx);
+ project.add_worktree(&worktree, cx);
}
- this
+ project
})?;
+ let weak_project = project.downgrade();
+ lsp_store
+ .update(&mut cx, |lsp_store, cx| {
+ lsp_store.set_language_server_statuses_from_proto(
+ weak_project,
+ response.payload.language_servers,
+ response.payload.language_server_capabilities,
+ cx,
+ );
+ })
+ .ok();
+
let subscriptions = subscriptions
.into_iter()
.map(|s| match s {
@@ -1624,7 +1693,7 @@ impl Project {
EntitySubscription::SettingsObserver(subscription) => {
subscription.set_entity(&settings_observer, &cx)
}
- EntitySubscription::Project(subscription) => subscription.set_entity(&this, &cx),
+ EntitySubscription::Project(subscription) => subscription.set_entity(&project, &cx),
EntitySubscription::LspStore(subscription) => {
subscription.set_entity(&lsp_store, &cx)
}
@@ -1644,13 +1713,13 @@ impl Project {
.update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))?
.await?;
- this.update(&mut cx, |this, cx| {
+ project.update(&mut cx, |this, cx| {
this.set_collaborators_from_proto(response.payload.collaborators, cx)?;
this.client_subscriptions.extend(subscriptions);
anyhow::Ok(())
})??;
- Ok(this)
+ Ok(project)
}
fn new_search_history() -> SearchHistory {
@@ -1661,11 +1730,13 @@ impl Project {
}
fn release(&mut self, cx: &mut App) {
- if let Some(client) = self.ssh_client.take() {
- let shutdown = client.read(cx).shutdown_processes(
- Some(proto::ShutdownRemoteServer {}),
- cx.background_executor().clone(),
- );
+ if let Some(client) = self.remote_client.take() {
+ let shutdown = client.update(cx, |client, cx| {
+ client.shutdown_processes(
+ Some(proto::ShutdownRemoteServer {}),
+ cx.background_executor().clone(),
+ )
+ });
cx.background_spawn(async move {
if let Some(shutdown) = shutdown {
@@ -1681,7 +1752,7 @@ impl Project {
let _ = self.unshare_internal(cx);
}
ProjectClientState::Remote { remote_id, .. } => {
- let _ = self.client.send(proto::LeaveProject {
+ let _ = self.collab_client.send(proto::LeaveProject {
project_id: *remote_id,
});
self.disconnected_from_host_internal(cx);
@@ -1808,11 +1879,11 @@ impl Project {
}
pub fn client(&self) -> Arc<Client> {
- self.client.clone()
+ self.collab_client.clone()
}
- pub fn ssh_client(&self) -> Option<Entity<SshRemoteClient>> {
- self.ssh_client.clone()
+ pub fn remote_client(&self) -> Option<Entity<RemoteClient>> {
+ self.remote_client.clone()
}
pub fn user_store(&self) -> Entity<UserStore> {
@@ -1893,38 +1964,30 @@ impl Project {
if self.is_local() {
return true;
}
- if self.is_via_ssh() {
+ if self.is_via_remote_server() {
return true;
}
false
}
- pub fn ssh_connection_string(&self, cx: &App) -> Option<SharedString> {
- if let Some(ssh_state) = &self.ssh_client {
- return Some(ssh_state.read(cx).connection_string().into());
- }
-
- None
- }
-
- pub fn ssh_connection_state(&self, cx: &App) -> Option<remote::ConnectionState> {
- self.ssh_client
+ pub fn remote_connection_state(&self, cx: &App) -> Option<remote::ConnectionState> {
+ self.remote_client
.as_ref()
- .map(|ssh| ssh.read(cx).connection_state())
+ .map(|remote| remote.read(cx).connection_state())
}
- pub fn ssh_connection_options(&self, cx: &App) -> Option<SshConnectionOptions> {
- self.ssh_client
+ pub fn remote_connection_options(&self, cx: &App) -> Option<RemoteConnectionOptions> {
+ self.remote_client
.as_ref()
- .map(|ssh| ssh.read(cx).connection_options())
+ .map(|remote| remote.read(cx).connection_options())
}
pub fn replica_id(&self) -> ReplicaId {
match self.client_state {
ProjectClientState::Remote { replica_id, .. } => replica_id,
_ => {
- if self.ssh_client.is_some() {
+ if self.remote_client.is_some() {
1
} else {
0
@@ -2060,13 +2123,12 @@ impl Project {
exclude_sub_dirs: bool,
cx: &App,
) -> Option<bool> {
- let sanitized_path = SanitizedPath::from(path);
- let path = sanitized_path.as_path();
+ let path = SanitizedPath::new(path).as_path();
self.worktrees(cx)
.filter_map(|worktree| {
let worktree = worktree.read(cx);
let abs_path = worktree.as_local()?.abs_path();
- let contains = path == abs_path
+ let contains = path == abs_path.as_ref()
|| (path.starts_with(abs_path) && (!exclude_sub_dirs || !metadata.is_dir));
contains.then(|| worktree.is_visible())
})
@@ -2128,7 +2190,7 @@ impl Project {
let is_root_entry = self.entry_is_worktree_root(entry_id, cx);
let lsp_store = self.lsp_store().downgrade();
- cx.spawn(async move |_, cx| {
+ cx.spawn(async move |project, cx| {
let (old_abs_path, new_abs_path) = {
let root_path = worktree.read_with(cx, |this, _| this.abs_path())?;
let new_abs_path = if is_root_entry {
@@ -2138,7 +2200,7 @@ impl Project {
};
(root_path.join(&old_path), new_abs_path)
};
- LspStore::will_rename_entry(
+ let transaction = LspStore::will_rename_entry(
lsp_store.clone(),
worktree_id,
&old_abs_path,
@@ -2154,6 +2216,12 @@ impl Project {
})?
.await?;
+ project
+ .update(cx, |_, cx| {
+ cx.emit(Event::EntryRenamed(transaction));
+ })
+ .ok();
+
lsp_store
.read_with(cx, |this, _| {
this.did_rename_entry(worktree_id, &old_abs_path, &new_abs_path, is_dir);
@@ -2222,55 +2290,55 @@ impl Project {
);
self.client_subscriptions.extend([
- self.client
+ self.collab_client
.subscribe_to_entity(project_id)?
.set_entity(&cx.entity(), &cx.to_async()),
- self.client
+ self.collab_client
.subscribe_to_entity(project_id)?
.set_entity(&self.worktree_store, &cx.to_async()),
- self.client
+ self.collab_client
.subscribe_to_entity(project_id)?
.set_entity(&self.buffer_store, &cx.to_async()),
- self.client
+ self.collab_client
.subscribe_to_entity(project_id)?
.set_entity(&self.lsp_store, &cx.to_async()),
- self.client
+ self.collab_client
.subscribe_to_entity(project_id)?
.set_entity(&self.settings_observer, &cx.to_async()),
- self.client
+ self.collab_client
.subscribe_to_entity(project_id)?
.set_entity(&self.dap_store, &cx.to_async()),
- self.client
+ self.collab_client
.subscribe_to_entity(project_id)?
.set_entity(&self.breakpoint_store, &cx.to_async()),
- self.client
+ self.collab_client
.subscribe_to_entity(project_id)?
.set_entity(&self.git_store, &cx.to_async()),
]);
self.buffer_store.update(cx, |buffer_store, cx| {
- buffer_store.shared(project_id, self.client.clone().into(), cx)
+ buffer_store.shared(project_id, self.collab_client.clone().into(), cx)
});
self.worktree_store.update(cx, |worktree_store, cx| {
- worktree_store.shared(project_id, self.client.clone().into(), cx);
+ worktree_store.shared(project_id, self.collab_client.clone().into(), cx);
});
self.lsp_store.update(cx, |lsp_store, cx| {
- lsp_store.shared(project_id, self.client.clone().into(), cx)
+ lsp_store.shared(project_id, self.collab_client.clone().into(), cx)
});
self.breakpoint_store.update(cx, |breakpoint_store, _| {
- breakpoint_store.shared(project_id, self.client.clone().into())
+ breakpoint_store.shared(project_id, self.collab_client.clone().into())
});
self.dap_store.update(cx, |dap_store, cx| {
- dap_store.shared(project_id, self.client.clone().into(), cx);
+ dap_store.shared(project_id, self.collab_client.clone().into(), cx);
});
self.task_store.update(cx, |task_store, cx| {
- task_store.shared(project_id, self.client.clone().into(), cx);
+ task_store.shared(project_id, self.collab_client.clone().into(), cx);
});
self.settings_observer.update(cx, |settings_observer, cx| {
- settings_observer.shared(project_id, self.client.clone().into(), cx)
+ settings_observer.shared(project_id, self.collab_client.clone().into(), cx)
});
self.git_store.update(cx, |git_store, cx| {
- git_store.shared(project_id, self.client.clone().into(), cx)
+ git_store.shared(project_id, self.collab_client.clone().into(), cx)
});
self.client_state = ProjectClientState::Shared {
@@ -2295,7 +2363,7 @@ impl Project {
});
if let Some(remote_id) = self.remote_id() {
self.git_store.update(cx, |git_store, cx| {
- git_store.shared(remote_id, self.client.clone().into(), cx)
+ git_store.shared(remote_id, self.collab_client.clone().into(), cx)
});
}
cx.emit(Event::Reshared);
@@ -2321,10 +2389,14 @@ impl Project {
self.join_project_response_message_id = message_id;
self.set_worktrees_from_proto(message.worktrees, cx)?;
self.set_collaborators_from_proto(message.collaborators, cx)?;
- self.lsp_store.update(cx, |lsp_store, _| {
+
+ let project = cx.weak_entity();
+ self.lsp_store.update(cx, |lsp_store, cx| {
lsp_store.set_language_server_statuses_from_proto(
+ project,
message.language_servers,
message.language_server_capabilities,
+ cx,
)
});
self.enqueue_buffer_ordered_message(BufferOrderedMessage::Resync)
@@ -2372,7 +2444,7 @@ impl Project {
git_store.unshared(cx);
});
- self.client
+ self.collab_client
.send(proto::UnshareProject {
project_id: remote_id,
})
@@ -2439,15 +2511,17 @@ impl Project {
sharing_has_stopped,
..
} => *sharing_has_stopped,
- ProjectClientState::Local if self.is_via_ssh() => self.ssh_is_disconnected(cx),
+ ProjectClientState::Local if self.is_via_remote_server() => {
+ self.remote_client_is_disconnected(cx)
+ }
_ => false,
}
}
- fn ssh_is_disconnected(&self, cx: &App) -> bool {
- self.ssh_client
+ fn remote_client_is_disconnected(&self, cx: &App) -> bool {
+ self.remote_client
.as_ref()
- .map(|ssh| ssh.read(cx).is_disconnected())
+ .map(|remote| remote.read(cx).is_disconnected())
.unwrap_or(false)
}
@@ -2465,16 +2539,16 @@ impl Project {
pub fn is_local(&self) -> bool {
match &self.client_state {
ProjectClientState::Local | ProjectClientState::Shared { .. } => {
- self.ssh_client.is_none()
+ self.remote_client.is_none()
}
ProjectClientState::Remote { .. } => false,
}
}
- pub fn is_via_ssh(&self) -> bool {
+ pub fn is_via_remote_server(&self) -> bool {
match &self.client_state {
ProjectClientState::Local | ProjectClientState::Shared { .. } => {
- self.ssh_client.is_some()
+ self.remote_client.is_some()
}
ProjectClientState::Remote { .. } => false,
}
@@ -2487,22 +2561,28 @@ impl Project {
}
}
- pub fn create_buffer(&mut self, cx: &mut Context<Self>) -> Task<Result<Entity<Buffer>>> {
- self.buffer_store
- .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))
+ pub fn create_buffer(
+ &mut self,
+ searchable: bool,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Entity<Buffer>>> {
+ self.buffer_store.update(cx, |buffer_store, cx| {
+ buffer_store.create_buffer(searchable, cx)
+ })
}
pub fn create_local_buffer(
&mut self,
text: &str,
language: Option<Arc<Language>>,
+ project_searchable: bool,
cx: &mut Context<Self>,
) -> Entity<Buffer> {
- if self.is_via_collab() || self.is_via_ssh() {
+ if self.is_via_collab() || self.is_via_remote_server() {
panic!("called create_local_buffer on a remote project")
}
self.buffer_store.update(cx, |buffer_store, cx| {
- buffer_store.create_local_buffer(text, language, cx)
+ buffer_store.create_local_buffer(text, language, project_searchable, cx)
})
}
@@ -2511,7 +2591,7 @@ impl Project {
path: ProjectPath,
cx: &mut Context<Self>,
) -> Task<Result<(Option<ProjectEntryId>, Entity<Buffer>)>> {
- let task = self.open_buffer(path.clone(), cx);
+ let task = self.open_buffer(path, cx);
cx.spawn(async move |_project, cx| {
let buffer = task.await?;
let project_entry_id = buffer.read_with(cx, |buffer, cx| {
@@ -2622,10 +2702,10 @@ impl Project {
) -> Task<Result<Entity<Buffer>>> {
if let Some(buffer) = self.buffer_for_id(id, cx) {
Task::ready(Ok(buffer))
- } else if self.is_local() || self.is_via_ssh() {
+ } else if self.is_local() || self.is_via_remote_server() {
Task::ready(Err(anyhow!("buffer {id} does not exist")))
} else if let Some(project_id) = self.remote_id() {
- let request = self.client.request(proto::OpenBufferById {
+ let request = self.collab_client.request(proto::OpenBufferById {
project_id,
id: id.into(),
});
@@ -2743,7 +2823,7 @@ impl Project {
for (buffer_id, operations) in operations_by_buffer_id.drain() {
let request = this.read_with(cx, |this, _| {
let project_id = this.remote_id()?;
- Some(this.client.request(proto::UpdateBuffer {
+ Some(this.collab_client.request(proto::UpdateBuffer {
buffer_id: buffer_id.into(),
project_id,
operations,
@@ -4,7 +4,7 @@ use context_server::ContextServerCommand;
use dap::adapters::DebugAdapterName;
use fs::Fs;
use futures::StreamExt as _;
-use gpui::{App, AsyncApp, BorrowAppContext, Context, Entity, EventEmitter, Task};
+use gpui::{App, AsyncApp, BorrowAppContext, Context, Entity, EventEmitter, Subscription, Task};
use lsp::LanguageServerName;
use paths::{
EDITORCONFIG_NAME, local_debug_file_relative_path, local_settings_file_relative_path,
@@ -13,13 +13,13 @@ use paths::{
};
use rpc::{
AnyProtoClient, TypedEnvelope,
- proto::{self, FromProto, ToProto},
+ proto::{self, FromProto, REMOTE_SERVER_PROJECT_ID, ToProto},
};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{
- InvalidSettingsError, LocalSettingsKind, Settings, SettingsLocation, SettingsSources,
- SettingsStore, parse_json_with_comments, watch_config_file,
+ InvalidSettingsError, LocalSettingsKind, Settings, SettingsKey, SettingsLocation,
+ SettingsSources, SettingsStore, SettingsUi, parse_json_with_comments, watch_config_file,
};
use std::{
collections::BTreeMap,
@@ -36,7 +36,8 @@ use crate::{
worktree_store::{WorktreeStore, WorktreeStoreEvent},
};
-#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema)]
+#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)]
+#[settings_key(None)]
pub struct ProjectSettings {
/// Configuration for language servers.
///
@@ -181,17 +182,6 @@ pub struct DiagnosticsSettings {
/// Settings for showing inline diagnostics.
pub inline: InlineDiagnosticsSettings,
-
- /// Configuration, related to Rust language diagnostics.
- pub cargo: Option<CargoDiagnosticsSettings>,
-}
-
-impl DiagnosticsSettings {
- pub fn fetch_cargo_diagnostics(&self) -> bool {
- self.cargo
- .as_ref()
- .is_some_and(|cargo_diagnostics| cargo_diagnostics.fetch_cargo_diagnostics)
- }
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema)]
@@ -258,7 +248,6 @@ impl Default for DiagnosticsSettings {
include_warnings: true,
lsp_pull_diagnostics: LspPullDiagnosticsSettings::default(),
inline: InlineDiagnosticsSettings::default(),
- cargo: None,
}
}
}
@@ -292,18 +281,18 @@ impl Default for GlobalLspSettings {
}
}
-#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
-pub struct CargoDiagnosticsSettings {
- /// When enabled, Zed disables rust-analyzer's check on save and starts to query
- /// Cargo diagnostics separately.
- ///
- /// Default: false
- #[serde(default)]
- pub fetch_cargo_diagnostics: bool,
-}
-
#[derive(
- Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, JsonSchema,
+ Clone,
+ Copy,
+ Debug,
+ Eq,
+ PartialEq,
+ Ord,
+ PartialOrd,
+ Serialize,
+ Deserialize,
+ JsonSchema,
+ SettingsUi,
)]
#[serde(rename_all = "snake_case")]
pub enum DiagnosticSeverity {
@@ -312,6 +301,7 @@ pub enum DiagnosticSeverity {
Error,
Warning,
Info,
+ #[serde(alias = "all")]
Hint,
}
@@ -527,6 +517,12 @@ pub struct BinarySettings {
pub ignore_system_version: Option<bool>,
}
+#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, Hash)]
+pub struct FetchSettings {
+ // Whether to consider pre-releases for fetching
+ pub pre_release: Option<bool>,
+}
+
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema, Hash)]
#[serde(rename_all = "snake_case")]
pub struct LspSettings {
@@ -538,6 +534,7 @@ pub struct LspSettings {
/// Default: true
#[serde(default = "default_true")]
pub enable_lsp_tasks: bool,
+ pub fetch: Option<FetchSettings>,
}
impl Default for LspSettings {
@@ -547,6 +544,7 @@ impl Default for LspSettings {
initialization_options: None,
settings: None,
enable_lsp_tasks: true,
+ fetch: None,
}
}
}
@@ -571,8 +569,6 @@ impl Default for SessionSettings {
}
impl Settings for ProjectSettings {
- const KEY: Option<&'static str> = None;
-
type FileContent = Self;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> anyhow::Result<Self> {
@@ -615,6 +611,7 @@ impl Settings for ProjectSettings {
path: cmd.command,
args: cmd.args.unwrap_or_default(),
env: cmd.env,
+ timeout: None,
}
}
}
@@ -660,6 +657,7 @@ pub struct SettingsObserver {
worktree_store: Entity<WorktreeStore>,
project_id: u64,
task_store: Entity<TaskStore>,
+ _user_settings_watcher: Option<Subscription>,
_global_task_config_watcher: Task<()>,
_global_debug_config_watcher: Task<()>,
}
@@ -672,6 +670,7 @@ pub struct SettingsObserver {
impl SettingsObserver {
pub fn init(client: &AnyProtoClient) {
client.add_entity_message_handler(Self::handle_update_worktree_settings);
+ client.add_entity_message_handler(Self::handle_update_user_settings);
}
pub fn new_local(
@@ -688,7 +687,8 @@ impl SettingsObserver {
task_store,
mode: SettingsObserverMode::Local(fs.clone()),
downstream_client: None,
- project_id: 0,
+ _user_settings_watcher: None,
+ project_id: REMOTE_SERVER_PROJECT_ID,
_global_task_config_watcher: Self::subscribe_to_global_task_file_changes(
fs.clone(),
paths::tasks_file().clone(),
@@ -706,14 +706,38 @@ impl SettingsObserver {
fs: Arc<dyn Fs>,
worktree_store: Entity<WorktreeStore>,
task_store: Entity<TaskStore>,
+ upstream_client: Option<AnyProtoClient>,
cx: &mut Context<Self>,
) -> Self {
+ let mut user_settings_watcher = None;
+ if cx.try_global::<SettingsStore>().is_some() {
+ if let Some(upstream_client) = upstream_client {
+ let mut user_settings = None;
+ user_settings_watcher = Some(cx.observe_global::<SettingsStore>(move |_, cx| {
+ let new_settings = cx.global::<SettingsStore>().raw_user_settings();
+ if Some(new_settings) != user_settings.as_ref() {
+ if let Some(new_settings_string) = serde_json::to_string(new_settings).ok()
+ {
+ user_settings = Some(new_settings.clone());
+ upstream_client
+ .send(proto::UpdateUserSettings {
+ project_id: REMOTE_SERVER_PROJECT_ID,
+ contents: new_settings_string,
+ })
+ .log_err();
+ }
+ }
+ }));
+ }
+ };
+
Self {
worktree_store,
task_store,
mode: SettingsObserverMode::Remote,
downstream_client: None,
- project_id: 0,
+ project_id: REMOTE_SERVER_PROJECT_ID,
+ _user_settings_watcher: user_settings_watcher,
_global_task_config_watcher: Self::subscribe_to_global_task_file_changes(
fs.clone(),
paths::tasks_file().clone(),
@@ -805,6 +829,24 @@ impl SettingsObserver {
Ok(())
}
+ async fn handle_update_user_settings(
+ _: Entity<Self>,
+ envelope: TypedEnvelope<proto::UpdateUserSettings>,
+ cx: AsyncApp,
+ ) -> anyhow::Result<()> {
+ let new_settings = serde_json::from_str::<serde_json::Value>(&envelope.payload.contents)
+ .with_context(|| {
+ format!("deserializing {} user settings", envelope.payload.contents)
+ })?;
+ cx.update_global(|settings_store: &mut SettingsStore, cx| {
+ settings_store
+ .set_raw_user_settings(new_settings, cx)
+ .context("setting new user settings")?;
+ anyhow::Ok(())
+ })??;
+ Ok(())
+ }
+
fn on_worktree_store_event(
&mut self,
_: Entity<WorktreeStore>,
@@ -1091,7 +1133,7 @@ impl SettingsObserver {
project_id: self.project_id,
worktree_id: remote_worktree_id.to_proto(),
path: directory.to_proto(),
- content: file_content,
+ content: file_content.clone(),
kind: Some(local_settings_kind_to_proto(kind).into()),
})
.log_err();
@@ -4,6 +4,7 @@ use crate::{
Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
*,
};
+use async_trait::async_trait;
use buffer_diff::{
BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
DiffHunkStatusKind, assert_hunks,
@@ -17,17 +18,17 @@ use git::{
};
use git2::RepositoryInitOptions;
use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
-use http_client::Url;
use itertools::Itertools;
use language::{
Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, DiskState, FakeLspAdapter,
- LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
+ LanguageConfig, LanguageMatcher, LanguageName, LineEnding, ManifestName, ManifestProvider,
+ ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList, ToolchainLister,
language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
tree_sitter_rust, tree_sitter_typescript,
};
use lsp::{
DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
- WillRenameFiles, notification::DidRenameFiles,
+ Uri, WillRenameFiles, notification::DidRenameFiles,
};
use parking_lot::Mutex;
use paths::{config_dir, tasks_file};
@@ -38,7 +39,7 @@ use serde_json::json;
#[cfg(not(windows))]
use std::os;
use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
-use task::{ResolvedTask, TaskContext};
+use task::{ResolvedTask, ShellKind, TaskContext};
use unindent::Unindent as _;
use util::{
TryFutureExt as _, assert_set_eq, maybe, path,
@@ -140,8 +141,10 @@ async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
+ max_line_length = 120
[*.js]
tab_width = 10
+ max_line_length = off
"#,
".zed": {
"settings.json": r#"{
@@ -149,7 +152,8 @@ async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
"hard_tabs": false,
"ensure_final_newline_on_save": false,
"remove_trailing_whitespace_on_save": false,
- "soft_wrap": "editor_width"
+ "preferred_line_length": 64,
+ "soft_wrap": "editor_width",
}"#,
},
"a.rs": "fn a() {\n A\n}",
@@ -157,6 +161,7 @@ async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
".editorconfig": r#"
[*.rs]
indent_size = 2
+ max_line_length = off,
"#,
"b.rs": "fn b() {\n B\n}",
},
@@ -205,6 +210,7 @@ async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
assert_eq!(settings_a.hard_tabs, true);
assert_eq!(settings_a.ensure_final_newline_on_save, true);
assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
+ assert_eq!(settings_a.preferred_line_length, 120);
// .editorconfig in b/ overrides .editorconfig in root
assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
@@ -212,6 +218,10 @@ async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
// "indent_size" is not set, so "tab_width" is used
assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
+ // When max_line_length is "off", default to .zed/settings.json
+ assert_eq!(settings_b.preferred_line_length, 64);
+ assert_eq!(settings_c.preferred_line_length, 64);
+
// README.md should not be affected by .editorconfig's globe "*.rs"
assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
});
@@ -587,6 +597,208 @@ async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
);
}
+#[gpui::test]
+async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
+ cx: &mut gpui::TestAppContext,
+) {
+ pub(crate) struct PyprojectTomlManifestProvider;
+
+ impl ManifestProvider for PyprojectTomlManifestProvider {
+ fn name(&self) -> ManifestName {
+ SharedString::new_static("pyproject.toml").into()
+ }
+
+ fn search(
+ &self,
+ ManifestQuery {
+ path,
+ depth,
+ delegate,
+ }: ManifestQuery,
+ ) -> Option<Arc<Path>> {
+ for path in path.ancestors().take(depth) {
+ let p = path.join("pyproject.toml");
+ if delegate.exists(&p, Some(false)) {
+ return Some(path.into());
+ }
+ }
+
+ None
+ }
+ }
+
+ init_test(cx);
+ let fs = FakeFs::new(cx.executor());
+
+ fs.insert_tree(
+ path!("/the-root"),
+ json!({
+ ".zed": {
+ "settings.json": r#"
+ {
+ "languages": {
+ "Python": {
+ "language_servers": ["ty"]
+ }
+ }
+ }"#
+ },
+ "project-a": {
+ ".venv": {},
+ "file.py": "",
+ "pyproject.toml": ""
+ },
+ "project-b": {
+ ".venv": {},
+ "source_file.py":"",
+ "another_file.py": "",
+ "pyproject.toml": ""
+ }
+ }),
+ )
+ .await;
+ cx.update(|cx| {
+ ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
+ });
+
+ let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
+ let language_registry = project.read_with(cx, |project, _| project.languages().clone());
+ let _fake_python_server = language_registry.register_fake_lsp(
+ "Python",
+ FakeLspAdapter {
+ name: "ty",
+ capabilities: lsp::ServerCapabilities {
+ ..Default::default()
+ },
+ ..Default::default()
+ },
+ );
+
+ language_registry.add(python_lang(fs.clone()));
+ let (first_buffer, _handle) = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
+ })
+ .await
+ .unwrap();
+ cx.executor().run_until_parked();
+ let servers = project.update(cx, |project, cx| {
+ project.lsp_store.update(cx, |this, cx| {
+ first_buffer.update(cx, |buffer, cx| {
+ this.language_servers_for_local_buffer(buffer, cx)
+ .map(|(adapter, server)| (adapter.clone(), server.clone()))
+ .collect::<Vec<_>>()
+ })
+ })
+ });
+ cx.executor().run_until_parked();
+ assert_eq!(servers.len(), 1);
+ let (adapter, server) = servers.into_iter().next().unwrap();
+ assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
+ assert_eq!(server.server_id(), LanguageServerId(0));
+ // `workspace_folders` are set to the rooting point.
+ assert_eq!(
+ server.workspace_folders(),
+ BTreeSet::from_iter(
+ [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
+ )
+ );
+
+ let (second_project_buffer, _other_handle) = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
+ })
+ .await
+ .unwrap();
+ cx.executor().run_until_parked();
+ let servers = project.update(cx, |project, cx| {
+ project.lsp_store.update(cx, |this, cx| {
+ second_project_buffer.update(cx, |buffer, cx| {
+ this.language_servers_for_local_buffer(buffer, cx)
+ .map(|(adapter, server)| (adapter.clone(), server.clone()))
+ .collect::<Vec<_>>()
+ })
+ })
+ });
+ cx.executor().run_until_parked();
+ assert_eq!(servers.len(), 1);
+ let (adapter, server) = servers.into_iter().next().unwrap();
+ assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
+ // We're not using venvs at all here, so both folders should fall under the same root.
+ assert_eq!(server.server_id(), LanguageServerId(0));
+ // Now, let's select a different toolchain for one of subprojects.
+
+ let Toolchains {
+ toolchains: available_toolchains_for_b,
+ root_path,
+ ..
+ } = project
+ .update(cx, |this, cx| {
+ let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
+ this.available_toolchains(
+ ProjectPath {
+ worktree_id,
+ path: Arc::from("project-b/source_file.py".as_ref()),
+ },
+ LanguageName::new("Python"),
+ cx,
+ )
+ })
+ .await
+ .expect("A toolchain to be discovered");
+ assert_eq!(root_path.as_ref(), Path::new("project-b"));
+ assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
+ let currently_active_toolchain = project
+ .update(cx, |this, cx| {
+ let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
+ this.active_toolchain(
+ ProjectPath {
+ worktree_id,
+ path: Arc::from("project-b/source_file.py".as_ref()),
+ },
+ LanguageName::new("Python"),
+ cx,
+ )
+ })
+ .await;
+
+ assert!(currently_active_toolchain.is_none());
+ let _ = project
+ .update(cx, |this, cx| {
+ let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
+ this.activate_toolchain(
+ ProjectPath {
+ worktree_id,
+ path: root_path,
+ },
+ available_toolchains_for_b
+ .toolchains
+ .into_iter()
+ .next()
+ .unwrap(),
+ cx,
+ )
+ })
+ .await
+ .unwrap();
+ cx.run_until_parked();
+ let servers = project.update(cx, |project, cx| {
+ project.lsp_store.update(cx, |this, cx| {
+ second_project_buffer.update(cx, |buffer, cx| {
+ this.language_servers_for_local_buffer(buffer, cx)
+ .map(|(adapter, server)| (adapter.clone(), server.clone()))
+ .collect::<Vec<_>>()
+ })
+ })
+ });
+ cx.executor().run_until_parked();
+ assert_eq!(servers.len(), 1);
+ let (adapter, server) = servers.into_iter().next().unwrap();
+ assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
+ // There's a new language server in town.
+ assert_eq!(server.server_id(), LanguageServerId(1));
+}
+
#[gpui::test]
async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
init_test(cx);
@@ -683,7 +895,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.await
.text_document,
lsp::TextDocumentItem {
- uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
version: 0,
text: "const A: i32 = 1;".to_string(),
language_id: "rust".to_string(),
@@ -695,7 +907,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
assert_eq!(
buffer
.completion_triggers()
- .into_iter()
+ .iter()
.cloned()
.collect::<Vec<_>>(),
&[".".to_string(), "::".to_string()]
@@ -713,7 +925,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.await
.text_document,
lsp::VersionedTextDocumentIdentifier::new(
- lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1
)
);
@@ -734,7 +946,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.await
.text_document,
lsp::TextDocumentItem {
- uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
version: 0,
text: "{\"a\": 1}".to_string(),
language_id: "json".to_string(),
@@ -747,7 +959,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
assert_eq!(
buffer
.completion_triggers()
- .into_iter()
+ .iter()
.cloned()
.collect::<Vec<_>>(),
&[":".to_string()]
@@ -766,7 +978,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
assert_eq!(
buffer
.completion_triggers()
- .into_iter()
+ .iter()
.cloned()
.collect::<Vec<_>>(),
&[".".to_string(), "::".to_string()]
@@ -784,7 +996,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.await
.text_document,
lsp::VersionedTextDocumentIdentifier::new(
- lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1
)
);
@@ -800,7 +1012,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.await
.text_document,
lsp::TextDocumentIdentifier::new(
- lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
+ lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
)
);
assert_eq!(
@@ -809,7 +1021,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.await
.text_document,
lsp::TextDocumentIdentifier::new(
- lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
+ lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
)
);
@@ -826,7 +1038,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.receive_notification::<lsp::notification::DidCloseTextDocument>()
.await
.text_document,
- lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
+ lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
);
assert_eq!(
fake_rust_server
@@ -834,7 +1046,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.await
.text_document,
lsp::TextDocumentItem {
- uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
version: 0,
text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
language_id: "rust".to_string(),
@@ -876,7 +1088,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.receive_notification::<lsp::notification::DidCloseTextDocument>()
.await
.text_document,
- lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
+ lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
);
assert_eq!(
fake_json_server
@@ -884,7 +1096,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.await
.text_document,
lsp::TextDocumentItem {
- uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
version: 0,
text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
language_id: "json".to_string(),
@@ -910,7 +1122,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.await
.text_document,
lsp::VersionedTextDocumentIdentifier::new(
- lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
+ lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1
)
);
@@ -940,7 +1152,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.await
.text_document,
lsp::TextDocumentItem {
- uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
version: 0,
text: rust_buffer.update(cx, |buffer, _| buffer.text()),
language_id: "rust".to_string(),
@@ -961,13 +1173,13 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
],
[
lsp::TextDocumentItem {
- uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
version: 0,
text: json_buffer.update(cx, |buffer, _| buffer.text()),
language_id: "json".to_string(),
},
lsp::TextDocumentItem {
- uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
version: 0,
text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
language_id: "json".to_string(),
@@ -979,7 +1191,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
cx.update(|_| drop(_json_handle));
let close_message = lsp::DidCloseTextDocumentParams {
text_document: lsp::TextDocumentIdentifier::new(
- lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
+ lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
),
};
assert_eq!(
@@ -1108,7 +1320,7 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
let _out_of_worktree_buffer = project
.update(cx, |project, cx| {
project.open_local_buffer_via_lsp(
- lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
server_id,
cx,
)
@@ -1268,23 +1480,23 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
&*file_changes.lock(),
&[
lsp::FileEvent {
- uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
typ: lsp::FileChangeType::CHANGED,
},
lsp::FileEvent {
- uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
typ: lsp::FileChangeType::DELETED,
},
lsp::FileEvent {
- uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
typ: lsp::FileChangeType::CREATED,
},
lsp::FileEvent {
- uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
typ: lsp::FileChangeType::CREATED,
},
lsp::FileEvent {
- uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
typ: lsp::FileChangeType::CHANGED,
},
]
@@ -1331,7 +1543,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
.update_diagnostics(
LanguageServerId(0),
lsp::PublishDiagnosticsParams {
- uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
+ uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
@@ -1350,7 +1562,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
.update_diagnostics(
LanguageServerId(0),
lsp::PublishDiagnosticsParams {
- uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
+ uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
@@ -1442,7 +1654,7 @@ async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
.update_diagnostics(
server_id,
lsp::PublishDiagnosticsParams {
- uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
+ uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
@@ -1461,7 +1673,7 @@ async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
.update_diagnostics(
server_id,
lsp::PublishDiagnosticsParams {
- uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
+ uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
@@ -1605,7 +1817,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
);
fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
- uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
+ uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
@@ -1658,7 +1870,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
// Ensure publishing empty diagnostics twice only results in one update event.
fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
- uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
+ uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
version: None,
diagnostics: Default::default(),
});
@@ -1671,7 +1883,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
);
fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
- uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
+ uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
version: None,
diagnostics: Default::default(),
});
@@ -1743,6 +1955,7 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC
server_id: LanguageServerId(1),
buffer_id,
buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
+ name: Some(fake_server.server.name())
}
);
assert_eq!(
@@ -1802,7 +2015,7 @@ async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAp
// Publish diagnostics
let fake_server = fake_servers.next().await.unwrap();
fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
- uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
+ uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
@@ -1818,7 +2031,7 @@ async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAp
buffer
.snapshot()
.diagnostics_in_range::<_, usize>(0..1, false)
- .map(|entry| entry.diagnostic.message.clone())
+ .map(|entry| entry.diagnostic.message)
.collect::<Vec<_>>(),
["the message".to_string()]
);
@@ -1844,7 +2057,7 @@ async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAp
buffer
.snapshot()
.diagnostics_in_range::<_, usize>(0..1, false)
- .map(|entry| entry.diagnostic.message.clone())
+ .map(|entry| entry.diagnostic.message)
.collect::<Vec<_>>(),
Vec::<String>::new(),
);
@@ -1883,7 +2096,7 @@ async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::T
// Before restarting the server, report diagnostics with an unknown buffer version.
let fake_server = fake_servers.next().await.unwrap();
fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
version: Some(10000),
diagnostics: Vec::new(),
});
@@ -2134,7 +2347,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
// Report some diagnostics for the initial version of the buffer
fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
version: Some(open_notification.text_document.version),
diagnostics: vec![
lsp::Diagnostic {
@@ -2222,7 +2435,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
// Ensure overlapping diagnostics are highlighted correctly.
fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
version: Some(open_notification.text_document.version),
diagnostics: vec![
lsp::Diagnostic {
@@ -2316,7 +2529,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
// Handle out-of-order diagnostics
fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
version: Some(change_notification_2.text_document.version),
diagnostics: vec![
lsp::Diagnostic {
@@ -2997,7 +3210,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) {
Ok(Some(lsp::GotoDefinitionResponse::Scalar(
lsp::Location::new(
- lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
),
)))
@@ -3005,6 +3218,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) {
let mut definitions = project
.update(cx, |project, cx| project.definitions(&buffer, 22, cx))
.await
+ .unwrap()
.unwrap();
// Assert no new language server started
@@ -3519,7 +3733,7 @@ async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
.next()
.await;
- let action = actions.await.unwrap()[0].clone();
+ let action = actions.await.unwrap().unwrap()[0].clone();
let apply = project.update(cx, |project, cx| {
project.apply_code_action(buffer.clone(), action, true, cx)
});
@@ -3555,7 +3769,7 @@ async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
edit: lsp::WorkspaceEdit {
changes: Some(
[(
- lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
+ lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
vec![lsp::TextEdit {
range: lsp::Range::new(
lsp::Position::new(0, 0),
@@ -3662,7 +3876,7 @@ async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
);
let buffer = project
- .update(cx, |this, cx| this.create_buffer(cx))
+ .update(cx, |this, cx| this.create_buffer(false, cx))
.unwrap()
.await;
project.update(cx, |this, cx| {
@@ -3694,7 +3908,7 @@ async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
.await
.text_document,
lsp::TextDocumentItem {
- uri: lsp::Url::from_file_path(path!("/dir/file.rs")).unwrap(),
+ uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
version: 0,
text: "".to_string(),
language_id: "rust".to_string(),
@@ -3712,7 +3926,7 @@ async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/dir"),
json!({
@@ -3767,7 +3981,7 @@ async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext)
async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/dir"),
json!({
@@ -3874,7 +4088,9 @@ async fn test_save_as(cx: &mut gpui::TestAppContext) {
let languages = project.update(cx, |project, _| project.languages().clone());
languages.add(rust_lang());
- let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
+ let buffer = project.update(cx, |project, cx| {
+ project.create_local_buffer("", None, false, cx)
+ });
buffer.update(cx, |buffer, cx| {
buffer.edit([(0..0, "abc")], None, cx);
assert!(buffer.is_dirty());
@@ -4123,7 +4339,7 @@ async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
})
.unwrap()
.await
- .to_included()
+ .into_included()
.unwrap();
cx.executor().run_until_parked();
@@ -4532,7 +4748,7 @@ async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
.await
.unwrap();
- let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
+ let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
let message = lsp::PublishDiagnosticsParams {
uri: buffer_uri.clone(),
diagnostics: vec![
@@ -4854,7 +5070,7 @@ async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
new_text: "This is not a drill".to_owned(),
})],
text_document: lsp::OptionalVersionedTextDocumentIdentifier {
- uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
+ uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
version: Some(1337),
},
}]
@@ -4979,14 +5195,14 @@ async fn test_rename(cx: &mut gpui::TestAppContext) {
changes: Some(
[
(
- lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
vec![lsp::TextEdit::new(
lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
"THREE".to_string(),
)],
),
(
- lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
vec![
lsp::TextEdit::new(
lsp::Range::new(
@@ -5371,9 +5587,7 @@ async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
let _buffer = project.update(cx, |project, cx| {
- let buffer = project.create_local_buffer("file", None, cx);
- project.mark_buffer_as_non_searchable(buffer.read(cx).remote_id(), cx);
- buffer
+ project.create_local_buffer("file", None, false, cx)
});
assert_eq!(
@@ -5897,7 +6111,7 @@ async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
async fn test_create_entry(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/one/two",
json!({
@@ -5918,7 +6132,7 @@ async fn test_create_entry(cx: &mut gpui::TestAppContext) {
})
.await
.unwrap()
- .to_included()
+ .into_included()
.unwrap();
// Can't create paths outside the project
@@ -6110,6 +6324,7 @@ async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
hover_task
.await
.into_iter()
+ .flatten()
.map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
.sorted()
.collect::<Vec<_>>(),
@@ -6183,6 +6398,7 @@ async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
hover_task
.await
.into_iter()
+ .flatten()
.map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
.sorted()
.collect::<Vec<_>>(),
@@ -6261,7 +6477,7 @@ async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
.await
.expect("The code action request should have been triggered");
- let code_actions = code_actions_task.await.unwrap();
+ let code_actions = code_actions_task.await.unwrap().unwrap();
assert_eq!(code_actions.len(), 1);
assert_eq!(
code_actions[0].lsp_action.action_kind(),
@@ -6420,6 +6636,7 @@ async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
code_actions_task
.await
.unwrap()
+ .unwrap()
.into_iter()
.map(|code_action| code_action.lsp_action.title().to_owned())
.sorted()
@@ -7449,7 +7666,7 @@ async fn test_staging_random_hunks(
.unwrap_or(20);
// Try to induce races between diff recalculation and index writes.
- if rng.gen_bool(0.5) {
+ if rng.random_bool(0.5) {
executor.deprioritize(*CALCULATE_DIFF_TASK);
}
@@ -7505,7 +7722,7 @@ async fn test_staging_random_hunks(
assert_eq!(hunks.len(), 6);
for _i in 0..operations {
- let hunk_ix = rng.gen_range(0..hunks.len());
+ let hunk_ix = rng.random_range(0..hunks.len());
let hunk = &mut hunks[hunk_ix];
let row = hunk.range.start.row;
@@ -7523,7 +7740,7 @@ async fn test_staging_random_hunks(
hunk.secondary_status = SecondaryHunkAdditionPending;
}
- for _ in 0..rng.gen_range(0..10) {
+ for _ in 0..rng.random_range(0..10) {
log::info!("yielding");
cx.executor().simulate_random_delay().await;
}
@@ -8969,6 +9186,77 @@ fn rust_lang() -> Arc<Language> {
))
}
+fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
+ struct PythonMootToolchainLister(Arc<FakeFs>);
+ #[async_trait]
+ impl ToolchainLister for PythonMootToolchainLister {
+ async fn list(
+ &self,
+ worktree_root: PathBuf,
+ subroot_relative_path: Arc<Path>,
+ _: Option<HashMap<String, String>>,
+ ) -> ToolchainList {
+ // This lister will always return a path .venv directories within ancestors
+ let ancestors = subroot_relative_path
+ .ancestors()
+ .map(ToOwned::to_owned)
+ .collect::<Vec<_>>();
+ let mut toolchains = vec![];
+ for ancestor in ancestors {
+ let venv_path = worktree_root.join(ancestor).join(".venv");
+ if self.0.is_dir(&venv_path).await {
+ toolchains.push(Toolchain {
+ name: SharedString::new("Python Venv"),
+ path: venv_path.to_string_lossy().into_owned().into(),
+ language_name: LanguageName(SharedString::new_static("Python")),
+ as_json: serde_json::Value::Null,
+ })
+ }
+ }
+ ToolchainList {
+ toolchains,
+ ..Default::default()
+ }
+ }
+ async fn resolve(
+ &self,
+ _: PathBuf,
+ _: Option<HashMap<String, String>>,
+ ) -> anyhow::Result<Toolchain> {
+ Err(anyhow::anyhow!("Not implemented"))
+ }
+ fn meta(&self) -> ToolchainMetadata {
+ ToolchainMetadata {
+ term: SharedString::new_static("Virtual Environment"),
+ new_toolchain_placeholder: SharedString::new_static(
+ "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
+ ),
+ manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
+ }
+ }
+ async fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &dyn Fs) -> Vec<String> {
+ vec![]
+ }
+ }
+ Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "Python".into(),
+ matcher: LanguageMatcher {
+ path_suffixes: vec!["py".to_string()],
+ ..Default::default()
+ },
+ ..Default::default()
+ },
+ None, // We're not testing Python parsing with this language.
+ )
+ .with_manifest(Some(ManifestName::from(SharedString::new_static(
+ "pyproject.toml",
+ ))))
+ .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
+ )
+}
+
fn typescript_lang() -> Arc<Language> {
Arc::new(Language::new(
LanguageConfig {
@@ -143,7 +143,7 @@ impl SearchQuery {
pub fn regex(
query: impl ToString,
whole_word: bool,
- case_sensitive: bool,
+ mut case_sensitive: bool,
include_ignored: bool,
one_match_per_line: bool,
files_to_include: PathMatcher,
@@ -153,6 +153,14 @@ impl SearchQuery {
) -> Result<Self> {
let mut query = query.to_string();
let initial_query = Arc::from(query.as_str());
+
+ if let Some((case_sensitive_from_pattern, new_query)) =
+ Self::case_sensitive_from_pattern(&query)
+ {
+ case_sensitive = case_sensitive_from_pattern;
+ query = new_query
+ }
+
if whole_word {
let mut word_query = String::new();
if let Some(first) = query.get(0..1)
@@ -192,6 +200,45 @@ impl SearchQuery {
})
}
+ /// Extracts case sensitivity settings from pattern items in the provided
+ /// query and returns the same query, with the pattern items removed.
+ ///
+ /// The following pattern modifiers are supported:
+ ///
+ /// - `\c` (case_sensitive: false)
+ /// - `\C` (case_sensitive: true)
+ ///
+ /// If no pattern item were found, `None` will be returned.
+ fn case_sensitive_from_pattern(query: &str) -> Option<(bool, String)> {
+ if !(query.contains("\\c") || query.contains("\\C")) {
+ return None;
+ }
+
+ let mut was_escaped = false;
+ let mut new_query = String::new();
+ let mut is_case_sensitive = None;
+
+ for c in query.chars() {
+ if was_escaped {
+ if c == 'c' {
+ is_case_sensitive = Some(false);
+ } else if c == 'C' {
+ is_case_sensitive = Some(true);
+ } else {
+ new_query.push('\\');
+ new_query.push(c);
+ }
+ was_escaped = false
+ } else if c == '\\' {
+ was_escaped = true
+ } else {
+ new_query.push(c);
+ }
+ }
+
+ is_case_sensitive.map(|c| (c, new_query))
+ }
+
pub fn from_proto(message: proto::SearchQuery) -> Result<Self> {
let files_to_include = if message.files_to_include.is_empty() {
message
@@ -596,4 +643,87 @@ mod tests {
}
}
}
+
+ #[test]
+ fn test_case_sensitive_pattern_items() {
+ let case_sensitive = false;
+ let search_query = SearchQuery::regex(
+ "test\\C",
+ false,
+ case_sensitive,
+ false,
+ false,
+ Default::default(),
+ Default::default(),
+ false,
+ None,
+ )
+ .expect("Should be able to create a regex SearchQuery");
+
+ assert_eq!(
+ search_query.case_sensitive(),
+ true,
+ "Case sensitivity should be enabled when \\C pattern item is present in the query."
+ );
+
+ let case_sensitive = true;
+ let search_query = SearchQuery::regex(
+ "test\\c",
+ true,
+ case_sensitive,
+ false,
+ false,
+ Default::default(),
+ Default::default(),
+ false,
+ None,
+ )
+ .expect("Should be able to create a regex SearchQuery");
+
+ assert_eq!(
+ search_query.case_sensitive(),
+ false,
+ "Case sensitivity should be disabled when \\c pattern item is present, even if initially set to true."
+ );
+
+ let case_sensitive = false;
+ let search_query = SearchQuery::regex(
+ "test\\c\\C",
+ false,
+ case_sensitive,
+ false,
+ false,
+ Default::default(),
+ Default::default(),
+ false,
+ None,
+ )
+ .expect("Should be able to create a regex SearchQuery");
+
+ assert_eq!(
+ search_query.case_sensitive(),
+ true,
+ "Case sensitivity should be enabled when \\C is the last pattern item, even after a \\c."
+ );
+
+ let case_sensitive = false;
+ let search_query = SearchQuery::regex(
+ "tests\\\\C",
+ false,
+ case_sensitive,
+ false,
+ false,
+ Default::default(),
+ Default::default(),
+ false,
+ None,
+ )
+ .expect("Should be able to create a regex SearchQuery");
+
+ assert_eq!(
+ search_query.case_sensitive(),
+ false,
+ "Case sensitivity should not be enabled when \\C pattern item is preceded by a backslash."
+ );
+ }
}
@@ -110,7 +110,7 @@ impl<T: InventoryContents> InventoryFor<T> {
fn global_scenarios(&self) -> impl '_ + Iterator<Item = (TaskSourceKind, T)> {
self.global.iter().flat_map(|(file_path, templates)| {
- templates.into_iter().map(|template| {
+ templates.iter().map(|template| {
(
TaskSourceKind::AbsPath {
id_base: Cow::Owned(format!("global {}", T::GLOBAL_SOURCE_FILE)),
@@ -760,7 +760,7 @@ impl Inventory {
TaskSettingsLocation::Global(path) => {
previously_existing_scenarios = parsed_scenarios
.global_scenarios()
- .map(|(_, scenario)| scenario.label.clone())
+ .map(|(_, scenario)| scenario.label)
.collect::<HashSet<_>>();
parsed_scenarios
.global
@@ -770,7 +770,7 @@ impl Inventory {
TaskSettingsLocation::Worktree(location) => {
previously_existing_scenarios = parsed_scenarios
.worktree_scenarios(location.worktree_id)
- .map(|(_, scenario)| scenario.label.clone())
+ .map(|(_, scenario)| scenario.label)
.collect::<HashSet<_>>();
if new_templates.is_empty() {
@@ -1,79 +1,36 @@
-use crate::{Project, ProjectPath};
-use anyhow::{Context as _, Result};
+use anyhow::Result;
use collections::HashMap;
use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity};
-use itertools::Itertools;
+
+use itertools::Itertools as _;
use language::LanguageName;
-use remote::ssh_session::SshArgs;
+use remote::RemoteClient;
use settings::{Settings, SettingsLocation};
use smol::channel::bounded;
use std::{
borrow::Cow,
- env::{self},
path::{Path, PathBuf},
sync::Arc,
};
-use task::{DEFAULT_REMOTE_SHELL, Shell, ShellBuilder, SpawnInTerminal};
+use task::{Shell, ShellBuilder, ShellKind, SpawnInTerminal};
use terminal::{
- TaskState, TaskStatus, Terminal, TerminalBuilder,
- terminal_settings::{self, ActivateScript, TerminalSettings, VenvSettings},
-};
-use util::{
- ResultExt,
- paths::{PathStyle, RemotePathBuf},
+ TaskState, TaskStatus, Terminal, TerminalBuilder, terminal_settings::TerminalSettings,
};
+use util::{get_default_system_shell, get_system_shell, maybe};
-/// The directory inside a Python virtual environment that contains executables
-const PYTHON_VENV_BIN_DIR: &str = if cfg!(target_os = "windows") {
- "Scripts"
-} else {
- "bin"
-};
+use crate::{Project, ProjectPath};
pub struct Terminals {
pub(crate) local_handles: Vec<WeakEntity<terminal::Terminal>>,
}
-/// Terminals are opened either for the users shell, or to run a task.
-
-#[derive(Debug)]
-pub enum TerminalKind {
- /// Run a shell at the given path (or $HOME if None)
- Shell(Option<PathBuf>),
- /// Run a task.
- Task(SpawnInTerminal),
-}
-
-/// SshCommand describes how to connect to a remote server
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct SshCommand {
- pub arguments: Vec<String>,
-}
-
-impl SshCommand {
- pub fn add_port_forwarding(&mut self, local_port: u16, host: String, remote_port: u16) {
- self.arguments.push("-L".to_string());
- self.arguments
- .push(format!("{}:{}:{}", local_port, host, remote_port));
- }
-}
-
-pub struct SshDetails {
- pub host: String,
- pub ssh_command: SshCommand,
- pub envs: Option<HashMap<String, String>>,
- pub path_style: PathStyle,
-}
-
impl Project {
pub fn active_project_directory(&self, cx: &App) -> Option<Arc<Path>> {
- let worktree = self
- .active_entry()
+ self.active_entry()
.and_then(|entry_id| self.worktree_for_entry(entry_id, cx))
.into_iter()
.chain(self.worktrees(cx))
- .find_map(|tree| tree.read(cx).root_dir());
- worktree
+ .find_map(|tree| tree.read(cx).root_dir())
}
pub fn first_project_directory(&self, cx: &App) -> Option<PathBuf> {
@@ -86,70 +43,25 @@ impl Project {
}
}
- pub fn ssh_details(&self, cx: &App) -> Option<SshDetails> {
- if let Some(ssh_client) = &self.ssh_client {
- let ssh_client = ssh_client.read(cx);
- if let Some((SshArgs { arguments, envs }, path_style)) = ssh_client.ssh_info() {
- return Some(SshDetails {
- host: ssh_client.connection_options().host.clone(),
- ssh_command: SshCommand { arguments },
- envs,
- path_style,
- });
- }
- }
-
- None
- }
-
- pub fn create_terminal(
+ pub fn create_terminal_task(
&mut self,
- kind: TerminalKind,
+ spawn_task: SpawnInTerminal,
cx: &mut Context<Self>,
) -> Task<Result<Entity<Terminal>>> {
- let path: Option<Arc<Path>> = match &kind {
- TerminalKind::Shell(path) => path.as_ref().map(|path| Arc::from(path.as_ref())),
- TerminalKind::Task(spawn_task) => {
- if let Some(cwd) = &spawn_task.cwd {
- Some(Arc::from(cwd.as_ref()))
- } else {
- self.active_project_directory(cx)
- }
- }
- };
-
- let mut settings_location = None;
- if let Some(path) = path.as_ref()
- && let Some((worktree, _)) = self.find_worktree(path, cx)
- {
- settings_location = Some(SettingsLocation {
- worktree_id: worktree.read(cx).id(),
- path,
- });
- }
- let venv = TerminalSettings::get(settings_location, cx)
- .detect_venv
- .clone();
+ let is_via_remote = self.remote_client.is_some();
- cx.spawn(async move |project, cx| {
- let python_venv_directory = if let Some(path) = path {
- project
- .update(cx, |this, cx| this.python_venv_directory(path, venv, cx))?
- .await
+ let path: Option<Arc<Path>> = if let Some(cwd) = &spawn_task.cwd {
+ if is_via_remote {
+ Some(Arc::from(cwd.as_ref()))
} else {
- None
- };
- project.update(cx, |project, cx| {
- project.create_terminal_with_venv(kind, python_venv_directory, cx)
- })?
- })
- }
+ let cwd = cwd.to_string_lossy();
+ let tilde_substituted = shellexpand::tilde(&cwd);
+ Some(Arc::from(Path::new(tilde_substituted.as_ref())))
+ }
+ } else {
+ self.active_project_directory(cx)
+ };
- pub fn terminal_settings<'a>(
- &'a self,
- path: &'a Option<PathBuf>,
- cx: &'a App,
- ) -> &'a TerminalSettings {
let mut settings_location = None;
if let Some(path) = path.as_ref()
&& let Some((worktree, _)) = self.find_worktree(path, cx)
@@ -159,88 +71,220 @@ impl Project {
path,
});
}
- TerminalSettings::get(settings_location, cx)
- }
-
- pub fn exec_in_shell(&self, command: String, cx: &App) -> std::process::Command {
- let path = self.first_project_directory(cx);
- let ssh_details = self.ssh_details(cx);
- let settings = self.terminal_settings(&path, cx).clone();
+ let settings = TerminalSettings::get(settings_location, cx).clone();
+ let detect_venv = settings.detect_venv.as_option().is_some();
- let builder = ShellBuilder::new(ssh_details.is_none(), &settings.shell).non_interactive();
- let (command, args) = builder.build(Some(command), &Vec::new());
+ let (completion_tx, completion_rx) = bounded(1);
+ // Start with the environment that we might have inherited from the Zed CLI.
let mut env = self
.environment
.read(cx)
.get_cli_environment()
.unwrap_or_default();
+ // Then extend it with the explicit env variables from the settings, so they take
+ // precedence.
env.extend(settings.env);
- match self.ssh_details(cx) {
- Some(SshDetails {
- ssh_command,
- envs,
- path_style,
- ..
- }) => {
- let (command, args) = wrap_for_ssh(
- &ssh_command,
- Some((&command, &args)),
- path.as_deref(),
- env,
- None,
- path_style,
- );
- let mut command = std::process::Command::new(command);
- command.args(args);
- if let Some(envs) = envs {
- command.envs(envs);
- }
- command
- }
- None => {
- let mut command = std::process::Command::new(command);
- command.args(args);
- command.envs(env);
- if let Some(path) = path {
- command.current_dir(path);
+ let local_path = if is_via_remote { None } else { path.clone() };
+ let task_state = Some(TaskState {
+ id: spawn_task.id,
+ full_label: spawn_task.full_label,
+ label: spawn_task.label,
+ command_label: spawn_task.command_label,
+ hide: spawn_task.hide,
+ status: TaskStatus::Running,
+ show_summary: spawn_task.show_summary,
+ show_command: spawn_task.show_command,
+ show_rerun: spawn_task.show_rerun,
+ completion_rx,
+ });
+ let remote_client = self.remote_client.clone();
+ let shell = match &remote_client {
+ Some(remote_client) => remote_client
+ .read(cx)
+ .shell()
+ .unwrap_or_else(get_default_system_shell),
+ None => match &settings.shell {
+ Shell::Program(program) => program.clone(),
+ Shell::WithArguments {
+ program,
+ args: _,
+ title_override: _,
+ } => program.clone(),
+ Shell::System => get_system_shell(),
+ },
+ };
+
+ let project_path_contexts = self
+ .active_entry()
+ .and_then(|entry_id| self.path_for_entry(entry_id, cx))
+ .into_iter()
+ .chain(
+ self.visible_worktrees(cx)
+ .map(|wt| wt.read(cx).id())
+ .map(|worktree_id| ProjectPath {
+ worktree_id,
+ path: Arc::from(Path::new("")),
+ }),
+ );
+ let toolchains = project_path_contexts
+ .filter(|_| detect_venv)
+ .map(|p| self.active_toolchain(p, LanguageName::new("Python"), cx))
+ .collect::<Vec<_>>();
+ let lang_registry = self.languages.clone();
+ let fs = self.fs.clone();
+ cx.spawn(async move |project, cx| {
+ let activation_script = maybe!(async {
+ for toolchain in toolchains {
+ let Some(toolchain) = toolchain.await else {
+ continue;
+ };
+ let language = lang_registry
+ .language_for_name(&toolchain.language_name.0)
+ .await
+ .ok();
+ let lister = language?.toolchain_lister();
+ return Some(
+ lister?
+ .activation_script(&toolchain, ShellKind::new(&shell), fs.as_ref())
+ .await,
+ );
}
- command
- }
- }
+ None
+ })
+ .await
+ .unwrap_or_default();
+
+ project.update(cx, move |this, cx| {
+ let shell = {
+ env.extend(spawn_task.env);
+ match remote_client {
+ Some(remote_client) => match activation_script.clone() {
+ activation_script if !activation_script.is_empty() => {
+ let activation_script = activation_script.join("; ");
+ let to_run = if let Some(command) = spawn_task.command {
+ let command: Option<Cow<str>> = shlex::try_quote(&command).ok();
+ let args = spawn_task
+ .args
+ .iter()
+ .filter_map(|arg| shlex::try_quote(arg).ok());
+ command.into_iter().chain(args).join(" ")
+ } else {
+ format!("exec {shell} -l")
+ };
+ let args = vec![
+ "-c".to_owned(),
+ format!("{activation_script}; {to_run}",),
+ ];
+ create_remote_shell(
+ Some((&shell, &args)),
+ &mut env,
+ path,
+ remote_client,
+ cx,
+ )?
+ }
+ _ => create_remote_shell(
+ spawn_task
+ .command
+ .as_ref()
+ .map(|command| (command, &spawn_task.args)),
+ &mut env,
+ path,
+ remote_client,
+ cx,
+ )?,
+ },
+ None => match activation_script.clone() {
+ #[cfg(not(target_os = "windows"))]
+ activation_script if !activation_script.is_empty() => {
+ let activation_script = activation_script.join("; ");
+ let to_run = if let Some(command) = spawn_task.command {
+ let command: Option<Cow<str>> = shlex::try_quote(&command).ok();
+ let args = spawn_task
+ .args
+ .iter()
+ .filter_map(|arg| shlex::try_quote(arg).ok());
+ command.into_iter().chain(args).join(" ")
+ } else {
+ format!("exec {shell} -l")
+ };
+ Shell::WithArguments {
+ program: shell,
+ args: vec![
+ "-c".to_owned(),
+ format!("{activation_script}; {to_run}",),
+ ],
+ title_override: None,
+ }
+ }
+ _ => {
+ if let Some(program) = spawn_task.command {
+ Shell::WithArguments {
+ program,
+ args: spawn_task.args,
+ title_override: None,
+ }
+ } else {
+ Shell::System
+ }
+ }
+ },
+ }
+ };
+ TerminalBuilder::new(
+ local_path.map(|path| path.to_path_buf()),
+ task_state,
+ shell,
+ env,
+ settings.cursor_shape.unwrap_or_default(),
+ settings.alternate_scroll,
+ settings.max_scroll_history_lines,
+ is_via_remote,
+ cx.entity_id().as_u64(),
+ Some(completion_tx),
+ cx,
+ activation_script,
+ )
+ .map(|builder| {
+ let terminal_handle = cx.new(|cx| builder.subscribe(cx));
+
+ this.terminals
+ .local_handles
+ .push(terminal_handle.downgrade());
+
+ let id = terminal_handle.entity_id();
+ cx.observe_release(&terminal_handle, move |project, _terminal, cx| {
+ let handles = &mut project.terminals.local_handles;
+
+ if let Some(index) = handles
+ .iter()
+ .position(|terminal| terminal.entity_id() == id)
+ {
+ handles.remove(index);
+ cx.notify();
+ }
+ })
+ .detach();
+
+ terminal_handle
+ })
+ })?
+ })
}
- pub fn create_terminal_with_venv(
+ pub fn create_terminal_shell(
&mut self,
- kind: TerminalKind,
- python_venv_directory: Option<PathBuf>,
+ cwd: Option<PathBuf>,
cx: &mut Context<Self>,
- ) -> Result<Entity<Terminal>> {
- let this = &mut *self;
- let ssh_details = this.ssh_details(cx);
- let path: Option<Arc<Path>> = match &kind {
- TerminalKind::Shell(path) => path.as_ref().map(|path| Arc::from(path.as_ref())),
- TerminalKind::Task(spawn_task) => {
- if let Some(cwd) = &spawn_task.cwd {
- if ssh_details.is_some() {
- Some(Arc::from(cwd.as_ref()))
- } else {
- let cwd = cwd.to_string_lossy();
- let tilde_substituted = shellexpand::tilde(&cwd);
- Some(Arc::from(Path::new(tilde_substituted.as_ref())))
- }
- } else {
- this.active_project_directory(cx)
- }
- }
- };
-
- let is_ssh_terminal = ssh_details.is_some();
+ ) -> Task<Result<Entity<Terminal>>> {
+ let path = cwd.map(|p| Arc::from(&*p));
+ let is_via_remote = self.remote_client.is_some();
let mut settings_location = None;
if let Some(path) = path.as_ref()
- && let Some((worktree, _)) = this.find_worktree(path, cx)
+ && let Some((worktree, _)) = self.find_worktree(path, cx)
{
settings_location = Some(SettingsLocation {
worktree_id: worktree.read(cx).id(),
@@ -248,11 +292,10 @@ impl Project {
});
}
let settings = TerminalSettings::get(settings_location, cx).clone();
-
- let (completion_tx, completion_rx) = bounded(1);
+ let detect_venv = settings.detect_venv.as_option().is_some();
// Start with the environment that we might have inherited from the Zed CLI.
- let mut env = this
+ let mut env = self
.environment
.read(cx)
.get_cli_environment()
@@ -261,156 +304,124 @@ impl Project {
// precedence.
env.extend(settings.env);
- let local_path = if is_ssh_terminal { None } else { path.clone() };
+ let local_path = if is_via_remote { None } else { path.clone() };
- let mut python_venv_activate_command = Task::ready(None);
-
- let (spawn_task, shell) = match kind {
- TerminalKind::Shell(_) => {
- if let Some(python_venv_directory) = &python_venv_directory {
- python_venv_activate_command = this.python_activate_command(
- python_venv_directory,
- &settings.detect_venv,
- &settings.shell,
- cx,
- );
- }
+ let project_path_contexts = self
+ .active_entry()
+ .and_then(|entry_id| self.path_for_entry(entry_id, cx))
+ .into_iter()
+ .chain(
+ self.visible_worktrees(cx)
+ .map(|wt| wt.read(cx).id())
+ .map(|worktree_id| ProjectPath {
+ worktree_id,
+ path: Arc::from(Path::new("")),
+ }),
+ );
+ let toolchains = project_path_contexts
+ .filter(|_| detect_venv)
+ .map(|p| self.active_toolchain(p, LanguageName::new("Python"), cx))
+ .collect::<Vec<_>>();
+ let remote_client = self.remote_client.clone();
+ let shell = match &remote_client {
+ Some(remote_client) => remote_client
+ .read(cx)
+ .shell()
+ .unwrap_or_else(get_default_system_shell),
+ None => match &settings.shell {
+ Shell::Program(program) => program.clone(),
+ Shell::WithArguments {
+ program,
+ args: _,
+ title_override: _,
+ } => program.clone(),
+ Shell::System => get_system_shell(),
+ },
+ };
- match ssh_details {
- Some(SshDetails {
- host,
- ssh_command,
- envs,
- path_style,
- }) => {
- log::debug!("Connecting to a remote server: {ssh_command:?}");
-
- // Alacritty sets its terminfo to `alacritty`, this requiring hosts to have it installed
- // to properly display colors.
- // We do not have the luxury of assuming the host has it installed,
- // so we set it to a default that does not break the highlighting via ssh.
- env.entry("TERM".to_string())
- .or_insert_with(|| "xterm-256color".to_string());
-
- let (program, args) = wrap_for_ssh(
- &ssh_command,
- None,
- path.as_deref(),
- env,
- None,
- path_style,
- );
- env = HashMap::default();
- if let Some(envs) = envs {
- env.extend(envs);
- }
- (
- Option::<TaskState>::None,
- Shell::WithArguments {
- program,
- args,
- title_override: Some(format!("{} — Terminal", host).into()),
- },
- )
- }
- None => (None, settings.shell),
- }
- }
- TerminalKind::Task(spawn_task) => {
- let task_state = Some(TaskState {
- id: spawn_task.id,
- full_label: spawn_task.full_label,
- label: spawn_task.label,
- command_label: spawn_task.command_label,
- hide: spawn_task.hide,
- status: TaskStatus::Running,
- show_summary: spawn_task.show_summary,
- show_command: spawn_task.show_command,
- show_rerun: spawn_task.show_rerun,
- completion_rx,
- });
-
- env.extend(spawn_task.env);
-
- if let Some(venv_path) = &python_venv_directory {
- env.insert(
- "VIRTUAL_ENV".to_string(),
- venv_path.to_string_lossy().to_string(),
+ let lang_registry = self.languages.clone();
+ let fs = self.fs.clone();
+ cx.spawn(async move |project, cx| {
+ let activation_script = maybe!(async {
+ for toolchain in toolchains {
+ let Some(toolchain) = toolchain.await else {
+ continue;
+ };
+ let language = lang_registry
+ .language_for_name(&toolchain.language_name.0)
+ .await
+ .ok();
+ let lister = language?.toolchain_lister();
+ return Some(
+ lister?
+ .activation_script(&toolchain, ShellKind::new(&shell), fs.as_ref())
+ .await,
);
}
-
- match ssh_details {
- Some(SshDetails {
- host,
- ssh_command,
- envs,
- path_style,
- }) => {
- log::debug!("Connecting to a remote server: {ssh_command:?}");
- env.entry("TERM".to_string())
- .or_insert_with(|| "xterm-256color".to_string());
- let (program, args) = wrap_for_ssh(
- &ssh_command,
- spawn_task
- .command
- .as_ref()
- .map(|command| (command, &spawn_task.args)),
- path.as_deref(),
- env,
- python_venv_directory.as_deref(),
- path_style,
- );
- env = HashMap::default();
- if let Some(envs) = envs {
- env.extend(envs);
+ None
+ })
+ .await
+ .unwrap_or_default();
+ project.update(cx, move |this, cx| {
+ let shell = {
+ match remote_client {
+ Some(remote_client) => {
+ create_remote_shell(None, &mut env, path, remote_client, cx)?
}
- (
- task_state,
- Shell::WithArguments {
- program,
- args,
- title_override: Some(format!("{} — Terminal", host).into()),
- },
- )
+ None => settings.shell,
}
- None => {
- if let Some(venv_path) = &python_venv_directory {
- add_environment_path(&mut env, &venv_path.join(PYTHON_VENV_BIN_DIR))
- .log_err();
+ };
+ TerminalBuilder::new(
+ local_path.map(|path| path.to_path_buf()),
+ None,
+ shell,
+ env,
+ settings.cursor_shape.unwrap_or_default(),
+ settings.alternate_scroll,
+ settings.max_scroll_history_lines,
+ is_via_remote,
+ cx.entity_id().as_u64(),
+ None,
+ cx,
+ activation_script,
+ )
+ .map(|builder| {
+ let terminal_handle = cx.new(|cx| builder.subscribe(cx));
+
+ this.terminals
+ .local_handles
+ .push(terminal_handle.downgrade());
+
+ let id = terminal_handle.entity_id();
+ cx.observe_release(&terminal_handle, move |project, _terminal, cx| {
+ let handles = &mut project.terminals.local_handles;
+
+ if let Some(index) = handles
+ .iter()
+ .position(|terminal| terminal.entity_id() == id)
+ {
+ handles.remove(index);
+ cx.notify();
}
+ })
+ .detach();
- let shell = if let Some(program) = spawn_task.command {
- Shell::WithArguments {
- program,
- args: spawn_task.args,
- title_override: None,
- }
- } else {
- Shell::System
- };
- (task_state, shell)
- }
- }
- }
- };
- TerminalBuilder::new(
- local_path.map(|path| path.to_path_buf()),
- python_venv_directory,
- spawn_task,
- shell,
- env,
- settings.cursor_shape.unwrap_or_default(),
- settings.alternate_scroll,
- settings.max_scroll_history_lines,
- is_ssh_terminal,
- cx.entity_id().as_u64(),
- completion_tx,
- cx,
- )
- .map(|builder| {
+ terminal_handle
+ })
+ })?
+ })
+ }
+
+ pub fn clone_terminal(
+ &mut self,
+ terminal: &Entity<Terminal>,
+ cx: &mut Context<'_, Project>,
+ cwd: impl FnOnce() -> Option<PathBuf>,
+ ) -> Result<Entity<Terminal>> {
+ terminal.read(cx).clone_builder(cx, cwd).map(|builder| {
let terminal_handle = cx.new(|cx| builder.subscribe(cx));
- this.terminals
+ self.terminals
.local_handles
.push(terminal_handle.downgrade());
@@ -428,328 +439,106 @@ impl Project {
})
.detach();
- this.activate_python_virtual_environment(
- python_venv_activate_command,
- &terminal_handle,
- cx,
- );
-
terminal_handle
})
}
- fn python_venv_directory(
- &self,
- abs_path: Arc<Path>,
- venv_settings: VenvSettings,
- cx: &Context<Project>,
- ) -> Task<Option<PathBuf>> {
- cx.spawn(async move |this, cx| {
- if let Some((worktree, relative_path)) = this
- .update(cx, |this, cx| this.find_worktree(&abs_path, cx))
- .ok()?
- {
- let toolchain = this
- .update(cx, |this, cx| {
- this.active_toolchain(
- ProjectPath {
- worktree_id: worktree.read(cx).id(),
- path: relative_path.into(),
- },
- LanguageName::new("Python"),
- cx,
- )
- })
- .ok()?
- .await;
-
- if let Some(toolchain) = toolchain {
- let toolchain_path = Path::new(toolchain.path.as_ref());
- return Some(toolchain_path.parent()?.parent()?.to_path_buf());
- }
- }
- let venv_settings = venv_settings.as_option()?;
- this.update(cx, move |this, cx| {
- if let Some(path) = this.find_venv_in_worktree(&abs_path, &venv_settings, cx) {
- return Some(path);
- }
- this.find_venv_on_filesystem(&abs_path, &venv_settings, cx)
- })
- .ok()
- .flatten()
- })
- }
-
- fn find_venv_in_worktree(
- &self,
- abs_path: &Path,
- venv_settings: &terminal_settings::VenvSettingsContent,
- cx: &App,
- ) -> Option<PathBuf> {
- venv_settings
- .directories
- .iter()
- .map(|name| abs_path.join(name))
- .find(|venv_path| {
- let bin_path = venv_path.join(PYTHON_VENV_BIN_DIR);
- self.find_worktree(&bin_path, cx)
- .and_then(|(worktree, relative_path)| {
- worktree.read(cx).entry_for_path(&relative_path)
- })
- .is_some_and(|entry| entry.is_dir())
- })
- }
-
- fn find_venv_on_filesystem(
- &self,
- abs_path: &Path,
- venv_settings: &terminal_settings::VenvSettingsContent,
- cx: &App,
- ) -> Option<PathBuf> {
- let (worktree, _) = self.find_worktree(abs_path, cx)?;
- let fs = worktree.read(cx).as_local()?.fs();
- venv_settings
- .directories
- .iter()
- .map(|name| abs_path.join(name))
- .find(|venv_path| {
- let bin_path = venv_path.join(PYTHON_VENV_BIN_DIR);
- // One-time synchronous check is acceptable for terminal/task initialization
- smol::block_on(fs.metadata(&bin_path))
- .ok()
- .flatten()
- .is_some_and(|meta| meta.is_dir)
- })
- }
-
- fn activate_script_kind(shell: Option<&str>) -> ActivateScript {
- let shell_env = std::env::var("SHELL").ok();
- let shell_path = shell.or_else(|| shell_env.as_deref());
- let shell = std::path::Path::new(shell_path.unwrap_or(""))
- .file_name()
- .and_then(|name| name.to_str())
- .unwrap_or("");
- match shell {
- "fish" => ActivateScript::Fish,
- "tcsh" => ActivateScript::Csh,
- "nu" => ActivateScript::Nushell,
- "powershell" | "pwsh" => ActivateScript::PowerShell,
- _ => ActivateScript::Default,
+ pub fn terminal_settings<'a>(
+ &'a self,
+ path: &'a Option<PathBuf>,
+ cx: &'a App,
+ ) -> &'a TerminalSettings {
+ let mut settings_location = None;
+ if let Some(path) = path.as_ref()
+ && let Some((worktree, _)) = self.find_worktree(path, cx)
+ {
+ settings_location = Some(SettingsLocation {
+ worktree_id: worktree.read(cx).id(),
+ path,
+ });
}
+ TerminalSettings::get(settings_location, cx)
}
- fn python_activate_command(
- &self,
- venv_base_directory: &Path,
- venv_settings: &VenvSettings,
- shell: &Shell,
- cx: &mut App,
- ) -> Task<Option<String>> {
- let Some(venv_settings) = venv_settings.as_option() else {
- return Task::ready(None);
- };
- let activate_keyword = match venv_settings.activate_script {
- terminal_settings::ActivateScript::Default => match std::env::consts::OS {
- "windows" => ".",
- _ => ".",
- },
- terminal_settings::ActivateScript::Nushell => "overlay use",
- terminal_settings::ActivateScript::PowerShell => ".",
- terminal_settings::ActivateScript::Pyenv => "pyenv",
- _ => "source",
- };
- let script_kind =
- if venv_settings.activate_script == terminal_settings::ActivateScript::Default {
- match shell {
- Shell::Program(program) => Self::activate_script_kind(Some(program)),
- Shell::WithArguments {
- program,
- args: _,
- title_override: _,
- } => Self::activate_script_kind(Some(program)),
- Shell::System => Self::activate_script_kind(None),
- }
- } else {
- venv_settings.activate_script
- };
-
- let activate_script_name = match script_kind {
- terminal_settings::ActivateScript::Default
- | terminal_settings::ActivateScript::Pyenv => "activate",
- terminal_settings::ActivateScript::Csh => "activate.csh",
- terminal_settings::ActivateScript::Fish => "activate.fish",
- terminal_settings::ActivateScript::Nushell => "activate.nu",
- terminal_settings::ActivateScript::PowerShell => "activate.ps1",
- };
+ pub fn exec_in_shell(&self, command: String, cx: &App) -> Result<std::process::Command> {
+ let path = self.first_project_directory(cx);
+ let remote_client = self.remote_client.as_ref();
+ let settings = self.terminal_settings(&path, cx).clone();
+ let remote_shell = remote_client
+ .as_ref()
+ .and_then(|remote_client| remote_client.read(cx).shell());
+ let builder = ShellBuilder::new(remote_shell.as_deref(), &settings.shell).non_interactive();
+ let (command, args) = builder.build(Some(command), &Vec::new());
- let line_ending = match std::env::consts::OS {
- "windows" => "\r",
- _ => "\n",
- };
+ let mut env = self
+ .environment
+ .read(cx)
+ .get_cli_environment()
+ .unwrap_or_default();
+ env.extend(settings.env);
- if venv_settings.venv_name.is_empty() {
- let path = venv_base_directory
- .join(PYTHON_VENV_BIN_DIR)
- .join(activate_script_name)
- .to_string_lossy()
- .to_string();
-
- let is_valid_path = self.resolve_abs_path(path.as_ref(), cx);
- cx.background_spawn(async move {
- let quoted = shlex::try_quote(&path).ok()?;
- if is_valid_path.await.is_some_and(|meta| meta.is_file()) {
- Some(format!(
- "{} {} ; clear{}",
- activate_keyword, quoted, line_ending
- ))
- } else {
- None
+ match remote_client {
+ Some(remote_client) => {
+ let command_template =
+ remote_client
+ .read(cx)
+ .build_command(Some(command), &args, &env, None, None)?;
+ let mut command = std::process::Command::new(command_template.program);
+ command.args(command_template.args);
+ command.envs(command_template.env);
+ Ok(command)
+ }
+ None => {
+ let mut command = std::process::Command::new(command);
+ command.args(args);
+ command.envs(env);
+ if let Some(path) = path {
+ command.current_dir(path);
}
- })
- } else {
- Task::ready(Some(format!(
- "{activate_keyword} {activate_script_name} {name}; clear{line_ending}",
- name = venv_settings.venv_name
- )))
+ Ok(command)
+ }
}
}
- fn activate_python_virtual_environment(
- &self,
- command: Task<Option<String>>,
- terminal_handle: &Entity<Terminal>,
- cx: &mut App,
- ) {
- terminal_handle.update(cx, |_, cx| {
- cx.spawn(async move |this, cx| {
- if let Some(command) = command.await {
- this.update(cx, |this, _| {
- this.input(command.into_bytes());
- })
- .ok();
- }
- })
- .detach()
- });
- }
-
pub fn local_terminal_handles(&self) -> &Vec<WeakEntity<terminal::Terminal>> {
&self.terminals.local_handles
}
}
-pub fn wrap_for_ssh(
- ssh_command: &SshCommand,
- command: Option<(&String, &Vec<String>)>,
- path: Option<&Path>,
- env: HashMap<String, String>,
- venv_directory: Option<&Path>,
- path_style: PathStyle,
-) -> (String, Vec<String>) {
- let to_run = if let Some((command, args)) = command {
- // DEFAULT_REMOTE_SHELL is '"${SHELL:-sh}"' so must not be escaped
- let command: Option<Cow<str>> = if command == DEFAULT_REMOTE_SHELL {
- Some(command.into())
- } else {
- shlex::try_quote(command).ok()
- };
- let args = args.iter().filter_map(|arg| shlex::try_quote(arg).ok());
- command.into_iter().chain(args).join(" ")
- } else {
- "exec ${SHELL:-sh} -l".to_string()
- };
-
- let mut env_changes = String::new();
- for (k, v) in env.iter() {
- if let Some((k, v)) = shlex::try_quote(k).ok().zip(shlex::try_quote(v).ok()) {
- env_changes.push_str(&format!("{}={} ", k, v));
- }
- }
- if let Some(venv_directory) = venv_directory
- && let Ok(str) = shlex::try_quote(venv_directory.to_string_lossy().as_ref())
- {
- let path = RemotePathBuf::new(PathBuf::from(str.to_string()), path_style).to_string();
- env_changes.push_str(&format!("PATH={}:$PATH ", path));
- }
-
- let commands = if let Some(path) = path {
- let path = RemotePathBuf::new(path.to_path_buf(), path_style).to_string();
- // shlex will wrap the command in single quotes (''), disabling ~ expansion,
- // replace ith with something that works
- let tilde_prefix = "~/";
- if path.starts_with(tilde_prefix) {
- let trimmed_path = path
- .trim_start_matches("/")
- .trim_start_matches("~")
- .trim_start_matches("/");
-
- format!("cd \"$HOME/{trimmed_path}\"; {env_changes} {to_run}")
- } else {
- format!("cd \"{path}\"; {env_changes} {to_run}")
- }
- } else {
- format!("cd; {env_changes} {to_run}")
+fn create_remote_shell(
+ spawn_command: Option<(&String, &Vec<String>)>,
+ env: &mut HashMap<String, String>,
+ working_directory: Option<Arc<Path>>,
+ remote_client: Entity<RemoteClient>,
+ cx: &mut App,
+) -> Result<Shell> {
+ // Alacritty sets its terminfo to `alacritty`, this requiring hosts to have it installed
+ // to properly display colors.
+ // We do not have the luxury of assuming the host has it installed,
+ // so we set it to a default that does not break the highlighting via ssh.
+ env.entry("TERM".to_string())
+ .or_insert_with(|| "xterm-256color".to_string());
+
+ let (program, args) = match spawn_command {
+ Some((program, args)) => (Some(program.clone()), args),
+ None => (None, &Vec::new()),
};
- let shell_invocation = format!("sh -c {}", shlex::try_quote(&commands).unwrap());
-
- let program = "ssh".to_string();
- let mut args = ssh_command.arguments.clone();
-
- args.push("-t".to_string());
- args.push(shell_invocation);
- (program, args)
-}
-
-fn add_environment_path(env: &mut HashMap<String, String>, new_path: &Path) -> Result<()> {
- let mut env_paths = vec![new_path.to_path_buf()];
- if let Some(path) = env.get("PATH").or(env::var("PATH").ok().as_ref()) {
- let mut paths = std::env::split_paths(&path).collect::<Vec<_>>();
- env_paths.append(&mut paths);
- }
- let paths = std::env::join_paths(env_paths).context("failed to create PATH env variable")?;
- env.insert("PATH".to_string(), paths.to_string_lossy().to_string());
-
- Ok(())
-}
-
-#[cfg(test)]
-mod tests {
- use collections::HashMap;
-
- #[test]
- fn test_add_environment_path_with_existing_path() {
- let tmp_path = std::path::PathBuf::from("/tmp/new");
- let mut env = HashMap::default();
- let old_path = if cfg!(windows) {
- "/usr/bin;/usr/local/bin"
- } else {
- "/usr/bin:/usr/local/bin"
- };
- env.insert("PATH".to_string(), old_path.to_string());
- env.insert("OTHER".to_string(), "aaa".to_string());
-
- super::add_environment_path(&mut env, &tmp_path).unwrap();
- if cfg!(windows) {
- assert_eq!(env.get("PATH").unwrap(), &format!("/tmp/new;{}", old_path));
- } else {
- assert_eq!(env.get("PATH").unwrap(), &format!("/tmp/new:{}", old_path));
- }
- assert_eq!(env.get("OTHER").unwrap(), "aaa");
- }
-
- #[test]
- fn test_add_environment_path_with_empty_path() {
- let tmp_path = std::path::PathBuf::from("/tmp/new");
- let mut env = HashMap::default();
- env.insert("OTHER".to_string(), "aaa".to_string());
- let os_path = std::env::var("PATH").unwrap();
- super::add_environment_path(&mut env, &tmp_path).unwrap();
- if cfg!(windows) {
- assert_eq!(env.get("PATH").unwrap(), &format!("/tmp/new;{}", os_path));
- } else {
- assert_eq!(env.get("PATH").unwrap(), &format!("/tmp/new:{}", os_path));
- }
- assert_eq!(env.get("OTHER").unwrap(), "aaa");
- }
+ let command = remote_client.read(cx).build_command(
+ program,
+ args.as_slice(),
+ env,
+ working_directory.map(|path| path.display().to_string()),
+ None,
+ )?;
+ *env = command.env;
+
+ log::debug!("Connecting to a remote server: {:?}", command.program);
+ let host = remote_client.read(cx).connection_options().display_name();
+
+ Ok(Shell::WithArguments {
+ program: command.program,
+ args: command.args,
+ title_override: Some(format!("{} — Terminal", host).into()),
+ })
}
@@ -4,20 +4,23 @@ use std::{
sync::Arc,
};
-use anyhow::{Result, bail};
+use anyhow::{Context as _, Result, bail};
use async_trait::async_trait;
-use collections::BTreeMap;
+use collections::{BTreeMap, IndexSet};
use gpui::{
App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity,
};
use language::{
LanguageName, LanguageRegistry, LanguageToolchainStore, ManifestDelegate, Toolchain,
- ToolchainList,
+ ToolchainList, ToolchainScope,
};
use rpc::{
AnyProtoClient, TypedEnvelope,
- proto::{self, FromProto, ToProto},
+ proto::{
+ self, FromProto, ResolveToolchainResponse, ToProto,
+ resolve_toolchain_response::Response as ResolveResponsePayload,
+ },
};
use settings::WorktreeId;
use util::ResultExt as _;
@@ -28,21 +31,31 @@ use crate::{
worktree_store::WorktreeStore,
};
-pub struct ToolchainStore(ToolchainStoreInner);
+pub struct ToolchainStore {
+ mode: ToolchainStoreInner,
+ user_toolchains: BTreeMap<ToolchainScope, IndexSet<Toolchain>>,
+ _sub: Subscription,
+}
+
enum ToolchainStoreInner {
- Local(
- Entity<LocalToolchainStore>,
- #[allow(dead_code)] Subscription,
- ),
+ Local(Entity<LocalToolchainStore>),
Remote(Entity<RemoteToolchainStore>),
}
+pub struct Toolchains {
+ /// Auto-detected toolchains.
+ pub toolchains: ToolchainList,
+ /// Path of the project root at which we ran the automatic toolchain detection.
+ pub root_path: Arc<Path>,
+ pub user_toolchains: BTreeMap<ToolchainScope, IndexSet<Toolchain>>,
+}
impl EventEmitter<ToolchainStoreEvent> for ToolchainStore {}
impl ToolchainStore {
pub fn init(client: &AnyProtoClient) {
client.add_entity_request_handler(Self::handle_activate_toolchain);
client.add_entity_request_handler(Self::handle_list_toolchains);
client.add_entity_request_handler(Self::handle_active_toolchain);
+ client.add_entity_request_handler(Self::handle_resolve_toolchain);
}
pub fn local(
@@ -59,16 +72,26 @@ impl ToolchainStore {
active_toolchains: Default::default(),
manifest_tree,
});
- let subscription = cx.subscribe(&entity, |_, _, e: &ToolchainStoreEvent, cx| {
+ let _sub = cx.subscribe(&entity, |_, _, e: &ToolchainStoreEvent, cx| {
cx.emit(e.clone())
});
- Self(ToolchainStoreInner::Local(entity, subscription))
+ Self {
+ mode: ToolchainStoreInner::Local(entity),
+ user_toolchains: Default::default(),
+ _sub,
+ }
}
- pub(super) fn remote(project_id: u64, client: AnyProtoClient, cx: &mut App) -> Self {
- Self(ToolchainStoreInner::Remote(
- cx.new(|_| RemoteToolchainStore { client, project_id }),
- ))
+ pub(super) fn remote(project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) -> Self {
+ let entity = cx.new(|_| RemoteToolchainStore { client, project_id });
+ let _sub = cx.subscribe(&entity, |_, _, e: &ToolchainStoreEvent, cx| {
+ cx.emit(e.clone())
+ });
+ Self {
+ mode: ToolchainStoreInner::Remote(entity),
+ user_toolchains: Default::default(),
+ _sub,
+ }
}
pub(crate) fn activate_toolchain(
&self,
@@ -76,38 +99,125 @@ impl ToolchainStore {
toolchain: Toolchain,
cx: &mut App,
) -> Task<Option<()>> {
- match &self.0 {
- ToolchainStoreInner::Local(local, _) => {
+ match &self.mode {
+ ToolchainStoreInner::Local(local) => {
local.update(cx, |this, cx| this.activate_toolchain(path, toolchain, cx))
}
ToolchainStoreInner::Remote(remote) => {
- remote.read(cx).activate_toolchain(path, toolchain, cx)
+ remote.update(cx, |this, cx| this.activate_toolchain(path, toolchain, cx))
}
}
}
+
+ pub(crate) fn user_toolchains(&self) -> BTreeMap<ToolchainScope, IndexSet<Toolchain>> {
+ self.user_toolchains.clone()
+ }
+ pub(crate) fn add_toolchain(
+ &mut self,
+ toolchain: Toolchain,
+ scope: ToolchainScope,
+ cx: &mut Context<Self>,
+ ) {
+ let did_insert = self
+ .user_toolchains
+ .entry(scope)
+ .or_default()
+ .insert(toolchain);
+ if did_insert {
+ cx.emit(ToolchainStoreEvent::CustomToolchainsModified);
+ }
+ }
+
+ pub(crate) fn remove_toolchain(
+ &mut self,
+ toolchain: Toolchain,
+ scope: ToolchainScope,
+ cx: &mut Context<Self>,
+ ) {
+ let mut did_remove = false;
+ self.user_toolchains
+ .entry(scope)
+ .and_modify(|toolchains| did_remove = toolchains.shift_remove(&toolchain));
+ if did_remove {
+ cx.emit(ToolchainStoreEvent::CustomToolchainsModified);
+ }
+ }
+
+ pub(crate) fn resolve_toolchain(
+ &self,
+ abs_path: PathBuf,
+ language_name: LanguageName,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Toolchain>> {
+ debug_assert!(abs_path.is_absolute());
+ match &self.mode {
+ ToolchainStoreInner::Local(local) => local.update(cx, |this, cx| {
+ this.resolve_toolchain(abs_path, language_name, cx)
+ }),
+ ToolchainStoreInner::Remote(remote) => remote.update(cx, |this, cx| {
+ this.resolve_toolchain(abs_path, language_name, cx)
+ }),
+ }
+ }
pub(crate) fn list_toolchains(
&self,
path: ProjectPath,
language_name: LanguageName,
cx: &mut Context<Self>,
- ) -> Task<Option<(ToolchainList, Arc<Path>)>> {
- match &self.0 {
- ToolchainStoreInner::Local(local, _) => {
+ ) -> Task<Option<Toolchains>> {
+ let user_toolchains = self
+ .user_toolchains
+ .iter()
+ .filter(|(scope, _)| {
+ if let ToolchainScope::Subproject(worktree_id, relative_path) = scope {
+ path.worktree_id == *worktree_id && relative_path.starts_with(&path.path)
+ } else {
+ true
+ }
+ })
+ .map(|(scope, toolchains)| {
+ (
+ scope.clone(),
+ toolchains
+ .iter()
+ .filter(|toolchain| toolchain.language_name == language_name)
+ .cloned()
+ .collect::<IndexSet<_>>(),
+ )
+ })
+ .collect::<BTreeMap<_, _>>();
+ let task = match &self.mode {
+ ToolchainStoreInner::Local(local) => {
local.update(cx, |this, cx| this.list_toolchains(path, language_name, cx))
}
ToolchainStoreInner::Remote(remote) => {
remote.read(cx).list_toolchains(path, language_name, cx)
}
- }
+ };
+ cx.spawn(async move |_, _| {
+ let (mut toolchains, root_path) = task.await?;
+ toolchains.toolchains.retain(|toolchain| {
+ !user_toolchains
+ .values()
+ .any(|toolchains| toolchains.contains(toolchain))
+ });
+
+ Some(Toolchains {
+ toolchains,
+ root_path,
+ user_toolchains,
+ })
+ })
}
+
pub(crate) fn active_toolchain(
&self,
path: ProjectPath,
language_name: LanguageName,
cx: &App,
) -> Task<Option<Toolchain>> {
- match &self.0 {
- ToolchainStoreInner::Local(local, _) => Task::ready(local.read(cx).active_toolchain(
+ match &self.mode {
+ ToolchainStoreInner::Local(local) => Task::ready(local.read(cx).active_toolchain(
path.worktree_id,
&path.path,
language_name,
@@ -192,7 +302,7 @@ impl ToolchainStore {
})?
.await;
let has_values = toolchains.is_some();
- let groups = if let Some((toolchains, _)) = &toolchains {
+ let groups = if let Some(Toolchains { toolchains, .. }) = &toolchains {
toolchains
.groups
.iter()
@@ -206,7 +316,12 @@ impl ToolchainStore {
} else {
vec![]
};
- let (toolchains, relative_path) = if let Some((toolchains, relative_path)) = toolchains {
+ let (toolchains, relative_path) = if let Some(Toolchains {
+ toolchains,
+ root_path: relative_path,
+ ..
+ }) = toolchains
+ {
let toolchains = toolchains
.toolchains
.into_iter()
@@ -231,15 +346,44 @@ impl ToolchainStore {
relative_worktree_path: Some(relative_path.to_string_lossy().into_owned()),
})
}
+
+ async fn handle_resolve_toolchain(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::ResolveToolchain>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::ResolveToolchainResponse> {
+ let toolchain = this
+ .update(&mut cx, |this, cx| {
+ let language_name = LanguageName::from_proto(envelope.payload.language_name);
+ let path = PathBuf::from(envelope.payload.abs_path);
+ this.resolve_toolchain(path, language_name, cx)
+ })?
+ .await;
+ let response = match toolchain {
+ Ok(toolchain) => {
+ let toolchain = proto::Toolchain {
+ name: toolchain.name.to_string(),
+ path: toolchain.path.to_string(),
+ raw_json: toolchain.as_json.to_string(),
+ };
+ ResolveResponsePayload::Toolchain(toolchain)
+ }
+ Err(e) => ResolveResponsePayload::Error(e.to_string()),
+ };
+ Ok(ResolveToolchainResponse {
+ response: Some(response),
+ })
+ }
+
pub fn as_language_toolchain_store(&self) -> Arc<dyn LanguageToolchainStore> {
- match &self.0 {
- ToolchainStoreInner::Local(local, _) => Arc::new(LocalStore(local.downgrade())),
+ match &self.mode {
+ ToolchainStoreInner::Local(local) => Arc::new(LocalStore(local.downgrade())),
ToolchainStoreInner::Remote(remote) => Arc::new(RemoteStore(remote.downgrade())),
}
}
pub fn as_local_store(&self) -> Option<&Entity<LocalToolchainStore>> {
- match &self.0 {
- ToolchainStoreInner::Local(local, _) => Some(local),
+ match &self.mode {
+ ToolchainStoreInner::Local(local) => Some(local),
ToolchainStoreInner::Remote(_) => None,
}
}
@@ -306,6 +450,7 @@ struct RemoteStore(WeakEntity<RemoteToolchainStore>);
#[derive(Clone)]
pub enum ToolchainStoreEvent {
ToolchainActivated,
+ CustomToolchainsModified,
}
impl EventEmitter<ToolchainStoreEvent> for LocalToolchainStore {}
@@ -346,7 +491,7 @@ impl LocalToolchainStore {
.await
.ok()?;
let toolchains = language.toolchain_lister()?;
- let manifest_name = toolchains.manifest_name();
+ let manifest_name = toolchains.meta().manifest_name;
let (snapshot, worktree) = this
.update(cx, |this, cx| {
this.worktree_store
@@ -384,12 +529,7 @@ impl LocalToolchainStore {
cx.background_spawn(async move {
Some((
toolchains
- .list(
- worktree_root,
- Some(relative_path.path.clone())
- .filter(|_| *relative_path.path != *Path::new("")),
- project_env,
- )
+ .list(worktree_root, relative_path.path.clone(), project_env)
.await,
relative_path.path,
))
@@ -414,7 +554,36 @@ impl LocalToolchainStore {
})
.cloned()
}
+
+ fn resolve_toolchain(
+ &self,
+ path: PathBuf,
+ language_name: LanguageName,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Toolchain>> {
+ let registry = self.languages.clone();
+ let environment = self.project_environment.clone();
+ cx.spawn(async move |_, cx| {
+ let language = cx
+ .background_spawn(registry.language_for_name(&language_name.0))
+ .await
+ .with_context(|| format!("Language {} not found", language_name.0))?;
+ let toolchain_lister = language.toolchain_lister().with_context(|| {
+ format!("Language {} does not support toolchains", language_name.0)
+ })?;
+
+ let project_env = environment
+ .update(cx, |environment, cx| {
+ environment.get_directory_environment(path.as_path().into(), cx)
+ })?
+ .await;
+ cx.background_spawn(async move { toolchain_lister.resolve(path, project_env).await })
+ .await
+ })
+ }
}
+
+impl EventEmitter<ToolchainStoreEvent> for RemoteToolchainStore {}
struct RemoteToolchainStore {
client: AnyProtoClient,
project_id: u64,
@@ -425,27 +594,37 @@ impl RemoteToolchainStore {
&self,
project_path: ProjectPath,
toolchain: Toolchain,
- cx: &App,
+ cx: &mut Context<Self>,
) -> Task<Option<()>> {
let project_id = self.project_id;
let client = self.client.clone();
- cx.background_spawn(async move {
- let path = PathBuf::from(toolchain.path.to_string());
- let _ = client
- .request(proto::ActivateToolchain {
- project_id,
- worktree_id: project_path.worktree_id.to_proto(),
- language_name: toolchain.language_name.into(),
- toolchain: Some(proto::Toolchain {
- name: toolchain.name.into(),
- path: path.to_proto(),
- raw_json: toolchain.as_json.to_string(),
- }),
- path: Some(project_path.path.to_string_lossy().into_owned()),
+ cx.spawn(async move |this, cx| {
+ let did_activate = cx
+ .background_spawn(async move {
+ let path = PathBuf::from(toolchain.path.to_string());
+ let _ = client
+ .request(proto::ActivateToolchain {
+ project_id,
+ worktree_id: project_path.worktree_id.to_proto(),
+ language_name: toolchain.language_name.into(),
+ toolchain: Some(proto::Toolchain {
+ name: toolchain.name.into(),
+ path: path.to_proto(),
+ raw_json: toolchain.as_json.to_string(),
+ }),
+ path: Some(project_path.path.to_string_lossy().into_owned()),
+ })
+ .await
+ .log_err()?;
+ Some(())
})
- .await
- .log_err()?;
- Some(())
+ .await;
+ did_activate.and_then(|_| {
+ this.update(cx, |_, cx| {
+ cx.emit(ToolchainStoreEvent::ToolchainActivated);
+ })
+ .ok()
+ })
})
}
@@ -544,4 +723,47 @@ impl RemoteToolchainStore {
})
})
}
+
+ fn resolve_toolchain(
+ &self,
+ abs_path: PathBuf,
+ language_name: LanguageName,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Toolchain>> {
+ let project_id = self.project_id;
+ let client = self.client.clone();
+ cx.background_spawn(async move {
+ let response: proto::ResolveToolchainResponse = client
+ .request(proto::ResolveToolchain {
+ project_id,
+ language_name: language_name.clone().into(),
+ abs_path: abs_path.to_string_lossy().into_owned(),
+ })
+ .await?;
+
+ let response = response
+ .response
+ .context("Failed to resolve toolchain via RPC")?;
+ use proto::resolve_toolchain_response::Response;
+ match response {
+ Response::Toolchain(toolchain) => {
+ Ok(Toolchain {
+ language_name: language_name.clone(),
+ name: toolchain.name.into(),
+ // todo(windows)
+ // Do we need to convert path to native string?
+ path: PathBuf::from_proto(toolchain.path)
+ .to_string_lossy()
+ .to_string()
+ .into(),
+ as_json: serde_json::Value::from_str(&toolchain.raw_json)
+ .context("Deserializing ResolveToolchain LSP response")?,
+ })
+ }
+ Response::Error(error) => {
+ anyhow::bail!("{error}");
+ }
+ }
+ })
+ }
}
@@ -18,7 +18,7 @@ use gpui::{
use postage::oneshot;
use rpc::{
AnyProtoClient, ErrorExt, TypedEnvelope,
- proto::{self, FromProto, SSH_PROJECT_ID, ToProto},
+ proto::{self, FromProto, REMOTE_SERVER_PROJECT_ID, ToProto},
};
use smol::{
channel::{Receiver, Sender},
@@ -61,7 +61,7 @@ pub struct WorktreeStore {
worktrees_reordered: bool,
#[allow(clippy::type_complexity)]
loading_worktrees:
- HashMap<SanitizedPath, Shared<Task<Result<Entity<Worktree>, Arc<anyhow::Error>>>>>,
+ HashMap<Arc<SanitizedPath>, Shared<Task<Result<Entity<Worktree>, Arc<anyhow::Error>>>>>,
state: WorktreeStoreState,
}
@@ -153,10 +153,10 @@ impl WorktreeStore {
pub fn find_worktree(
&self,
- abs_path: impl Into<SanitizedPath>,
+ abs_path: impl AsRef<Path>,
cx: &App,
) -> Option<(Entity<Worktree>, PathBuf)> {
- let abs_path: SanitizedPath = abs_path.into();
+ let abs_path = SanitizedPath::new(&abs_path);
for tree in self.worktrees() {
if let Ok(relative_path) = abs_path.as_path().strip_prefix(tree.read(cx).abs_path()) {
return Some((tree.clone(), relative_path.into()));
@@ -203,20 +203,19 @@ impl WorktreeStore {
})
}
- pub fn entry_for_path(&self, path: &ProjectPath, cx: &App) -> Option<Entry> {
+ pub fn entry_for_path<'a>(&'a self, path: &ProjectPath, cx: &'a App) -> Option<&'a Entry> {
self.worktree_for_id(path.worktree_id, cx)?
.read(cx)
.entry_for_path(&path.path)
- .cloned()
}
pub fn create_worktree(
&mut self,
- abs_path: impl Into<SanitizedPath>,
+ abs_path: impl AsRef<Path>,
visible: bool,
cx: &mut Context<Self>,
) -> Task<Result<Entity<Worktree>>> {
- let abs_path: SanitizedPath = abs_path.into();
+ let abs_path: Arc<SanitizedPath> = SanitizedPath::new_arc(&abs_path);
if !self.loading_worktrees.contains_key(&abs_path) {
let task = match &self.state {
WorktreeStoreState::Remote {
@@ -227,9 +226,8 @@ impl WorktreeStore {
if upstream_client.is_via_collab() {
Task::ready(Err(Arc::new(anyhow!("cannot create worktrees via collab"))))
} else {
- let abs_path =
- RemotePathBuf::new(abs_path.as_path().to_path_buf(), *path_style);
- self.create_ssh_worktree(upstream_client.clone(), abs_path, visible, cx)
+ let abs_path = RemotePathBuf::new(abs_path.to_path_buf(), *path_style);
+ self.create_remote_worktree(upstream_client.clone(), abs_path, visible, cx)
}
}
WorktreeStoreState::Local { fs } => {
@@ -252,7 +250,7 @@ impl WorktreeStore {
})
}
- fn create_ssh_worktree(
+ fn create_remote_worktree(
&mut self,
client: AnyProtoClient,
abs_path: RemotePathBuf,
@@ -278,7 +276,7 @@ impl WorktreeStore {
let path = RemotePathBuf::new(abs_path.into(), path_style);
let response = client
.request(proto::AddWorktree {
- project_id: SSH_PROJECT_ID,
+ project_id: REMOTE_SERVER_PROJECT_ID,
path: path.to_proto(),
visible,
})
@@ -298,7 +296,7 @@ impl WorktreeStore {
let worktree = cx.update(|cx| {
Worktree::remote(
- SSH_PROJECT_ID,
+ REMOTE_SERVER_PROJECT_ID,
0,
proto::WorktreeMetadata {
id: response.worktree_id,
@@ -321,15 +319,21 @@ impl WorktreeStore {
fn create_local_worktree(
&mut self,
fs: Arc<dyn Fs>,
- abs_path: impl Into<SanitizedPath>,
+ abs_path: Arc<SanitizedPath>,
visible: bool,
cx: &mut Context<Self>,
) -> Task<Result<Entity<Worktree>, Arc<anyhow::Error>>> {
let next_entry_id = self.next_entry_id.clone();
- let path: SanitizedPath = abs_path.into();
cx.spawn(async move |this, cx| {
- let worktree = Worktree::local(path.clone(), visible, fs, next_entry_id, cx).await;
+ let worktree = Worktree::local(
+ SanitizedPath::cast_arc(abs_path.clone()),
+ visible,
+ fs,
+ next_entry_id,
+ cx,
+ )
+ .await;
let worktree = worktree?;
@@ -337,7 +341,7 @@ impl WorktreeStore {
if visible {
cx.update(|cx| {
- cx.add_recent_document(path.as_path());
+ cx.add_recent_document(abs_path.as_path());
})
.log_err();
}
@@ -457,7 +461,7 @@ impl WorktreeStore {
})
.collect::<HashMap<_, _>>();
- let (client, project_id) = self.upstream_client().clone().context("invalid project")?;
+ let (client, project_id) = self.upstream_client().context("invalid project")?;
for worktree in worktrees {
if let Some(old_worktree) =
@@ -69,6 +69,7 @@ use workspace::{
notifications::{DetachAndPromptErr, NotifyTaskExt},
};
use worktree::CreatedEntry;
+use zed_actions::workspace::OpenWithSystem;
const PROJECT_PANEL_KEY: &str = "ProjectPanel";
const NEW_ENTRY_ID: ProjectEntryId = ProjectEntryId::MAX;
@@ -255,8 +256,6 @@ actions!(
RevealInFileManager,
/// Removes the selected folder from the project.
RemoveFromProject,
- /// Opens the selected file with the system's default application.
- OpenWithSystem,
/// Cuts the selected file or directory.
Cut,
/// Pastes the previously cut or copied item.
@@ -447,7 +446,7 @@ impl ProjectPanel {
cx.subscribe(&project, |this, project, event, cx| match event {
project::Event::ActiveEntryChanged(Some(entry_id)) => {
if ProjectPanelSettings::get_global(cx).auto_reveal_entries {
- this.reveal_entry(project.clone(), *entry_id, true, cx).ok();
+ this.reveal_entry(project, *entry_id, true, cx).ok();
}
}
project::Event::ActiveEntryChanged(None) => {
@@ -462,10 +461,7 @@ impl ProjectPanel {
}
}
project::Event::RevealInProjectPanel(entry_id) => {
- if let Some(()) = this
- .reveal_entry(project.clone(), *entry_id, false, cx)
- .log_err()
- {
+ if let Some(()) = this.reveal_entry(project, *entry_id, false, cx).log_err() {
cx.emit(PanelEvent::Activate);
}
}
@@ -657,7 +653,7 @@ impl ProjectPanel {
let file_path = entry.path.clone();
let worktree_id = worktree.read(cx).id();
let entry_id = entry.id;
- let is_via_ssh = project.read(cx).is_via_ssh();
+ let is_via_ssh = project.read(cx).is_via_remote_server();
workspace
.open_path_preview(
@@ -813,7 +809,7 @@ impl ProjectPanel {
diagnostic_severity: DiagnosticSeverity,
) {
diagnostics
- .entry((project_path.worktree_id, path_buffer.clone()))
+ .entry((project_path.worktree_id, path_buffer))
.and_modify(|strongest_diagnostic_severity| {
*strongest_diagnostic_severity =
cmp::min(*strongest_diagnostic_severity, diagnostic_severity);
@@ -2515,7 +2511,7 @@ impl ProjectPanel {
if clip_is_cut {
// Convert the clipboard cut entry to a copy entry after the first paste.
- self.clipboard = self.clipboard.take().map(ClipboardEntry::to_copy_entry);
+ self.clipboard = self.clipboard.take().map(ClipboardEntry::into_copy_entry);
}
self.expand_entry(worktree_id, entry.id, cx);
@@ -2780,7 +2776,7 @@ impl ProjectPanel {
let destination_worktree = self.project.update(cx, |project, cx| {
let entry_path = project.path_for_entry(entry_to_move, cx)?;
- let destination_entry_path = project.path_for_entry(destination, cx)?.path.clone();
+ let destination_entry_path = project.path_for_entry(destination, cx)?.path;
let mut destination_path = destination_entry_path.as_ref();
if destination_is_file {
@@ -3589,7 +3585,7 @@ impl ProjectPanel {
previous_components.next();
}
- if let Some(_) = suffix_components {
+ if suffix_components.is_some() {
new_path.push(previous_components);
}
if let Some(str) = new_path.to_str() {
@@ -3898,14 +3894,12 @@ impl ProjectPanel {
// Always highlight directory or parent directory if it's file
if target_entry.is_dir() {
Some(target_entry.id)
- } else if let Some(parent_entry) = target_entry
- .path
- .parent()
- .and_then(|parent_path| target_worktree.entry_for_path(parent_path))
- {
- Some(parent_entry.id)
} else {
- None
+ target_entry
+ .path
+ .parent()
+ .and_then(|parent_path| target_worktree.entry_for_path(parent_path))
+ .map(|parent_entry| parent_entry.id)
}
}
@@ -3942,12 +3936,10 @@ impl ProjectPanel {
// Always highlight directory or parent directory if it's file
if target_entry.is_dir() {
Some(target_entry.id)
- } else if let Some(parent_entry) =
- target_parent_path.and_then(|parent_path| target_worktree.entry_for_path(parent_path))
- {
- Some(parent_entry.id)
} else {
- None
+ target_parent_path
+ .and_then(|parent_path| target_worktree.entry_for_path(parent_path))
+ .map(|parent_entry| parent_entry.id)
}
}
@@ -4023,8 +4015,8 @@ impl ProjectPanel {
.as_ref()
.map_or(ValidationState::None, |e| e.validation_state.clone())
{
- ValidationState::Error(msg) => Some((Color::Error.color(cx), msg.clone())),
- ValidationState::Warning(msg) => Some((Color::Warning.color(cx), msg.clone())),
+ ValidationState::Error(msg) => Some((Color::Error.color(cx), msg)),
+ ValidationState::Warning(msg) => Some((Color::Warning.color(cx), msg)),
ValidationState::None => None,
}
} else {
@@ -4097,6 +4089,7 @@ impl ProjectPanel {
.when(!is_sticky, |this| {
this
.when(is_highlighted && folded_directory_drag_target.is_none(), |this| this.border_color(transparent_white()).bg(item_colors.drag_over))
+ .when(settings.drag_and_drop, |this| this
.on_drag_move::<ExternalPaths>(cx.listener(
move |this, event: &DragMoveEvent<ExternalPaths>, _, cx| {
let is_current_target = this.drag_target_entry.as_ref()
@@ -4230,7 +4223,7 @@ impl ProjectPanel {
}
this.drag_onto(selections, entry_id, kind.is_file(), window, cx);
}),
- )
+ ))
})
.on_mouse_down(
MouseButton::Left,
@@ -4422,9 +4415,7 @@ impl ProjectPanel {
let components = Path::new(&file_name)
.components()
.map(|comp| {
- let comp_str =
- comp.as_os_str().to_string_lossy().into_owned();
- comp_str
+ comp.as_os_str().to_string_lossy().into_owned()
})
.collect::<Vec<_>>();
@@ -4443,6 +4434,7 @@ impl ProjectPanel {
div()
.when(!is_sticky, |div| {
div
+ .when(settings.drag_and_drop, |div| div
.on_drop(cx.listener(move |this, selections: &DraggedSelection, window, cx| {
this.hover_scroll_task.take();
this.drag_target_entry = None;
@@ -4474,7 +4466,7 @@ impl ProjectPanel {
}
},
- ))
+ )))
})
.child(
Label::new(DELIMITER.clone())
@@ -4494,6 +4486,7 @@ impl ProjectPanel {
.when(index != components_len - 1, |div|{
let target_entry_id = folded_ancestors.ancestors.get(components_len - 1 - index).cloned();
div
+ .when(settings.drag_and_drop, |div| div
.on_drag_move(cx.listener(
move |this, event: &DragMoveEvent<DraggedSelection>, _, _| {
if event.bounds.contains(&event.event.position) {
@@ -4531,7 +4524,7 @@ impl ProjectPanel {
target.index == index
), |this| {
this.bg(item_colors.drag_over)
- })
+ }))
})
})
.on_click(cx.listener(move |this, _, _, cx| {
@@ -5039,7 +5032,8 @@ impl ProjectPanel {
sticky_parents.reverse();
- let git_status_enabled = ProjectPanelSettings::get_global(cx).git_status;
+ let panel_settings = ProjectPanelSettings::get_global(cx);
+ let git_status_enabled = panel_settings.git_status;
let root_name = OsStr::new(worktree.root_name());
let git_summaries_by_id = if git_status_enabled {
@@ -5123,11 +5117,11 @@ impl Render for ProjectPanel {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let has_worktree = !self.visible_entries.is_empty();
let project = self.project.read(cx);
- let indent_size = ProjectPanelSettings::get_global(cx).indent_size;
- let show_indent_guides =
- ProjectPanelSettings::get_global(cx).indent_guides.show == ShowIndentGuides::Always;
+ let panel_settings = ProjectPanelSettings::get_global(cx);
+ let indent_size = panel_settings.indent_size;
+ let show_indent_guides = panel_settings.indent_guides.show == ShowIndentGuides::Always;
let show_sticky_entries = {
- if ProjectPanelSettings::get_global(cx).sticky_scroll {
+ if panel_settings.sticky_scroll {
let is_scrollable = self.scroll_handle.is_scrollable();
let is_scrolled = self.scroll_handle.offset().y < px(0.);
is_scrollable && is_scrolled
@@ -5215,8 +5209,10 @@ impl Render for ProjectPanel {
h_flex()
.id("project-panel")
.group("project-panel")
- .on_drag_move(cx.listener(handle_drag_move::<ExternalPaths>))
- .on_drag_move(cx.listener(handle_drag_move::<DraggedSelection>))
+ .when(panel_settings.drag_and_drop, |this| {
+ this.on_drag_move(cx.listener(handle_drag_move::<ExternalPaths>))
+ .on_drag_move(cx.listener(handle_drag_move::<DraggedSelection>))
+ })
.size_full()
.relative()
.on_modifiers_changed(cx.listener(
@@ -5305,7 +5301,7 @@ impl Render for ProjectPanel {
.on_action(cx.listener(Self::open_system))
.on_action(cx.listener(Self::open_in_terminal))
})
- .when(project.is_via_ssh(), |el| {
+ .when(project.is_via_remote_server(), |el| {
el.on_action(cx.listener(Self::open_in_terminal))
})
.on_mouse_down(
@@ -5507,7 +5503,7 @@ impl Render for ProjectPanel {
.with_priority(3)
}))
} else {
- let focus_handle = self.focus_handle(cx).clone();
+ let focus_handle = self.focus_handle(cx);
v_flex()
.id("empty-project_panel")
@@ -5554,30 +5550,32 @@ impl Render for ProjectPanel {
})),
)
.when(is_local, |div| {
- div.drag_over::<ExternalPaths>(|style, _, _, cx| {
- style.bg(cx.theme().colors().drop_target_background)
+ div.when(panel_settings.drag_and_drop, |div| {
+ div.drag_over::<ExternalPaths>(|style, _, _, cx| {
+ style.bg(cx.theme().colors().drop_target_background)
+ })
+ .on_drop(cx.listener(
+ move |this, external_paths: &ExternalPaths, window, cx| {
+ this.drag_target_entry = None;
+ this.hover_scroll_task.take();
+ if let Some(task) = this
+ .workspace
+ .update(cx, |workspace, cx| {
+ workspace.open_workspace_for_paths(
+ true,
+ external_paths.paths().to_owned(),
+ window,
+ cx,
+ )
+ })
+ .log_err()
+ {
+ task.detach_and_log_err(cx);
+ }
+ cx.stop_propagation();
+ },
+ ))
})
- .on_drop(cx.listener(
- move |this, external_paths: &ExternalPaths, window, cx| {
- this.drag_target_entry = None;
- this.hover_scroll_task.take();
- if let Some(task) = this
- .workspace
- .update(cx, |workspace, cx| {
- workspace.open_workspace_for_paths(
- true,
- external_paths.paths().to_owned(),
- window,
- cx,
- )
- })
- .log_err()
- {
- task.detach_and_log_err(cx);
- }
- cx.stop_propagation();
- },
- ))
})
}
}
@@ -5711,7 +5709,7 @@ impl ClipboardEntry {
}
}
- fn to_copy_entry(self) -> Self {
+ fn into_copy_entry(self) -> Self {
match self {
ClipboardEntry::Copied(_) => self,
ClipboardEntry::Cut(entries) => ClipboardEntry::Copied(entries),
@@ -2,7 +2,7 @@ use editor::ShowScrollbar;
use gpui::Pixels;
use schemars::JsonSchema;
use serde_derive::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Copy, PartialEq)]
#[serde(rename_all = "snake_case")]
@@ -47,6 +47,7 @@ pub struct ProjectPanelSettings {
pub scrollbar: ScrollbarSettings,
pub show_diagnostics: ShowDiagnostics,
pub hide_root: bool,
+ pub drag_and_drop: bool,
}
#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
@@ -91,7 +92,8 @@ pub enum ShowDiagnostics {
All,
}
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)]
+#[settings_key(key = "project_panel")]
pub struct ProjectPanelSettingsContent {
/// Whether to show the project panel button in the status bar.
///
@@ -160,11 +162,13 @@ pub struct ProjectPanelSettingsContent {
///
/// Default: true
pub sticky_scroll: Option<bool>,
+ /// Whether to enable drag-and-drop operations in the project panel.
+ ///
+ /// Default: true
+ pub drag_and_drop: Option<bool>,
}
impl Settings for ProjectPanelSettings {
- const KEY: Option<&'static str> = Some("project_panel");
-
type FileContent = ProjectPanelSettingsContent;
fn load(
@@ -17,7 +17,7 @@ use workspace::{
async fn test_visible_list(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root1",
json!({
@@ -106,7 +106,7 @@ async fn test_visible_list(cx: &mut gpui::TestAppContext) {
async fn test_opening_file(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/src"),
json!({
@@ -276,7 +276,7 @@ async fn test_exclusions_in_visible_list(cx: &mut gpui::TestAppContext) {
async fn test_auto_collapse_dir_paths(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/root1"),
json!({
@@ -459,7 +459,7 @@ async fn test_auto_collapse_dir_paths(cx: &mut gpui::TestAppContext) {
async fn test_editing_files(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root1",
json!({
@@ -877,7 +877,7 @@ async fn test_editing_files(cx: &mut gpui::TestAppContext) {
async fn test_adding_directories_via_file(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root1",
json!({
@@ -1010,7 +1010,7 @@ async fn test_adding_directories_via_file(cx: &mut gpui::TestAppContext) {
async fn test_adding_directory_via_file(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/root1"),
json!({
@@ -1137,7 +1137,7 @@ async fn test_adding_directory_via_file(cx: &mut gpui::TestAppContext) {
async fn test_copy_paste(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root1",
json!({
@@ -1235,7 +1235,7 @@ async fn test_copy_paste(cx: &mut gpui::TestAppContext) {
async fn test_cut_paste(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root",
json!({
@@ -1320,7 +1320,7 @@ async fn test_cut_paste(cx: &mut gpui::TestAppContext) {
async fn test_cut_paste_between_different_worktrees(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root1",
json!({
@@ -1416,7 +1416,7 @@ async fn test_cut_paste_between_different_worktrees(cx: &mut gpui::TestAppContex
async fn test_copy_paste_between_different_worktrees(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root1",
json!({
@@ -1551,7 +1551,7 @@ async fn test_copy_paste_between_different_worktrees(cx: &mut gpui::TestAppConte
async fn test_copy_paste_directory(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root",
json!({
@@ -1692,7 +1692,7 @@ async fn test_copy_paste_directory(cx: &mut gpui::TestAppContext) {
async fn test_copy_paste_directory_with_sibling_file(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/test",
json!({
@@ -1797,7 +1797,7 @@ async fn test_copy_paste_directory_with_sibling_file(cx: &mut gpui::TestAppConte
async fn test_copy_paste_nested_and_root_entries(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/test",
json!({
@@ -1876,7 +1876,7 @@ async fn test_copy_paste_nested_and_root_entries(cx: &mut gpui::TestAppContext)
async fn test_remove_opened_file(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/src"),
json!({
@@ -1968,7 +1968,7 @@ async fn test_remove_opened_file(cx: &mut gpui::TestAppContext) {
async fn test_create_duplicate_items(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/src",
json!({
@@ -2161,7 +2161,7 @@ async fn test_create_duplicate_items(cx: &mut gpui::TestAppContext) {
async fn test_select_git_entry(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/root"),
json!({
@@ -2440,7 +2440,7 @@ async fn test_select_git_entry(cx: &mut gpui::TestAppContext) {
async fn test_select_directory(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/project_root",
json!({
@@ -2541,7 +2541,7 @@ async fn test_select_directory(cx: &mut gpui::TestAppContext) {
async fn test_select_first_last(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/project_root",
json!({
@@ -2651,7 +2651,7 @@ async fn test_select_first_last(cx: &mut gpui::TestAppContext) {
async fn test_dir_toggle_collapse(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/project_root",
json!({
@@ -2693,7 +2693,7 @@ async fn test_dir_toggle_collapse(cx: &mut gpui::TestAppContext) {
async fn test_collapse_all_entries(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/project_root",
json!({
@@ -2751,7 +2751,7 @@ async fn test_collapse_all_entries(cx: &mut gpui::TestAppContext) {
async fn test_new_file_move(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.as_fake().insert_tree(path!("/root"), json!({})).await;
let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
@@ -2819,7 +2819,7 @@ async fn test_new_file_move(cx: &mut gpui::TestAppContext) {
async fn test_rename_root_of_worktree(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root1",
json!({
@@ -2895,7 +2895,7 @@ async fn test_rename_root_of_worktree(cx: &mut gpui::TestAppContext) {
async fn test_rename_with_hide_root(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root1",
json!({
@@ -2989,7 +2989,7 @@ async fn test_rename_with_hide_root(cx: &mut gpui::TestAppContext) {
#[gpui::test]
async fn test_multiple_marked_entries(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/project_root",
json!({
@@ -3731,7 +3731,7 @@ async fn test_creating_excluded_entries(cx: &mut gpui::TestAppContext) {
register_project_item::<TestProjectItemView>(cx);
});
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root1",
json!({
@@ -3914,7 +3914,7 @@ async fn test_creating_excluded_entries(cx: &mut gpui::TestAppContext) {
async fn test_selection_restored_when_creation_cancelled(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/src",
json!({
@@ -3982,7 +3982,7 @@ async fn test_selection_restored_when_creation_cancelled(cx: &mut gpui::TestAppC
async fn test_basic_file_deletion_scenarios(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root",
json!({
@@ -4105,7 +4105,7 @@ async fn test_basic_file_deletion_scenarios(cx: &mut gpui::TestAppContext) {
async fn test_deletion_gitignored(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/root"),
json!({
@@ -4206,7 +4206,7 @@ async fn test_deletion_gitignored(cx: &mut gpui::TestAppContext) {
async fn test_nested_deletion_gitignore(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/root"),
json!({
@@ -4271,7 +4271,7 @@ async fn test_nested_deletion_gitignore(cx: &mut gpui::TestAppContext) {
async fn test_complex_selection_scenarios(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root",
json!({
@@ -4382,7 +4382,7 @@ async fn test_complex_selection_scenarios(cx: &mut gpui::TestAppContext) {
async fn test_delete_all_files_and_directories(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root",
json!({
@@ -4457,7 +4457,7 @@ async fn test_delete_all_files_and_directories(cx: &mut gpui::TestAppContext) {
async fn test_nested_selection_deletion(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root",
json!({
@@ -4523,7 +4523,7 @@ async fn test_nested_selection_deletion(cx: &mut gpui::TestAppContext) {
async fn test_multiple_worktrees_deletion(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
// First worktree
fs.insert_tree(
"/root1",
@@ -4666,7 +4666,7 @@ async fn test_multiple_worktrees_deletion(cx: &mut gpui::TestAppContext) {
async fn test_selection_vs_marked_entries_priority(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root",
json!({
@@ -4766,7 +4766,7 @@ async fn test_selection_vs_marked_entries_priority(cx: &mut gpui::TestAppContext
async fn test_selection_fallback_to_next_highest_worktree(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root_b",
json!({
@@ -4859,7 +4859,7 @@ fn toggle_expand_dir(
async fn test_expand_all_for_entry(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/root"),
json!({
@@ -5050,7 +5050,7 @@ async fn test_expand_all_for_entry(cx: &mut gpui::TestAppContext) {
async fn test_collapse_all_for_entry(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/root"),
json!({
@@ -5234,7 +5234,7 @@ async fn test_collapse_all_for_entry(cx: &mut gpui::TestAppContext) {
async fn test_create_entries_without_selection(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/root"),
json!({
@@ -5299,7 +5299,7 @@ async fn test_create_entries_without_selection(cx: &mut gpui::TestAppContext) {
async fn test_create_entries_without_selection_hide_root(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/root"),
json!({
@@ -5448,7 +5448,7 @@ async fn test_create_entries_without_selection_hide_root(cx: &mut gpui::TestAppC
async fn test_highlight_entry_for_external_drag(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root",
json!({
@@ -5516,7 +5516,7 @@ async fn test_highlight_entry_for_external_drag(cx: &mut gpui::TestAppContext) {
async fn test_highlight_entry_for_selection_drag(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root",
json!({
@@ -5647,7 +5647,7 @@ async fn test_highlight_entry_for_selection_drag(cx: &mut gpui::TestAppContext)
async fn test_hide_root(cx: &mut gpui::TestAppContext) {
init_test(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root1",
json!({
@@ -5825,7 +5825,7 @@ async fn test_hide_root(cx: &mut gpui::TestAppContext) {
async fn test_compare_selected_files(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root",
json!({
@@ -5923,7 +5923,7 @@ async fn test_compare_selected_files(cx: &mut gpui::TestAppContext) {
async fn test_compare_files_context_menu(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
- let fs = FakeFs::new(cx.executor().clone());
+ let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/root",
json!({
@@ -6152,7 +6152,7 @@ fn init_test_with_editor(cx: &mut TestAppContext) {
language::init(cx);
editor::init(cx);
crate::init(cx);
- workspace::init(app_state.clone(), cx);
+ workspace::init(app_state, cx);
Project::init_settings(cx);
cx.update_global::<SettingsStore, _>(|store, cx| {
@@ -1,18 +1,19 @@
use editor::{Bias, Editor, SelectionEffects, scroll::Autoscroll, styled_runs_for_code_label};
use fuzzy::{StringMatch, StringMatchCandidate};
use gpui::{
- App, Context, DismissEvent, Entity, FontWeight, ParentElement, StyledText, Task, WeakEntity,
- Window, rems,
+ App, Context, DismissEvent, Entity, HighlightStyle, ParentElement, StyledText, Task, TextStyle,
+ WeakEntity, Window, relative, rems,
};
use ordered_float::OrderedFloat;
use picker::{Picker, PickerDelegate};
use project::{Project, Symbol};
+use settings::Settings;
use std::{borrow::Cow, cmp::Reverse, sync::Arc};
-use theme::ActiveTheme;
+use theme::{ActiveTheme, ThemeSettings};
use util::ResultExt;
use workspace::{
Workspace,
- ui::{Color, Label, LabelCommon, LabelLike, ListItem, ListItemSpacing, Toggleable, v_flex},
+ ui::{LabelLike, ListItem, ListItemSpacing, prelude::*},
};
pub fn init(cx: &mut App) {
@@ -213,7 +214,7 @@ impl PickerDelegate for ProjectSymbolsDelegate {
&self,
ix: usize,
selected: bool,
- window: &mut Window,
+ _window: &mut Window,
cx: &mut Context<Picker<Self>>,
) -> Option<Self::ListItem> {
let string_match = &self.matches[ix];
@@ -233,20 +234,31 @@ impl PickerDelegate for ProjectSymbolsDelegate {
}
}
let label = symbol.label.text.clone();
- let path = path.to_string().clone();
-
- let highlights = gpui::combine_highlights(
- string_match
- .positions
- .iter()
- .map(|pos| (*pos..pos + 1, FontWeight::BOLD.into())),
- syntax_runs.map(|(range, mut highlight)| {
- // Ignore font weight for syntax highlighting, as we'll use it
- // for fuzzy matches.
- highlight.font_weight = None;
- (range, highlight)
- }),
- );
+ let path = path.to_string();
+
+ let settings = ThemeSettings::get_global(cx);
+
+ let text_style = TextStyle {
+ color: cx.theme().colors().text,
+ font_family: settings.buffer_font.family.clone(),
+ font_features: settings.buffer_font.features.clone(),
+ font_fallbacks: settings.buffer_font.fallbacks.clone(),
+ font_size: settings.buffer_font_size(cx).into(),
+ font_weight: settings.buffer_font.weight,
+ line_height: relative(1.),
+ ..Default::default()
+ };
+
+ let highlight_style = HighlightStyle {
+ background_color: Some(cx.theme().colors().text_accent.alpha(0.3)),
+ ..Default::default()
+ };
+ let custom_highlights = string_match
+ .positions
+ .iter()
+ .map(|pos| (*pos..pos + 1, highlight_style));
+
+ let highlights = gpui::combine_highlights(custom_highlights, syntax_runs);
Some(
ListItem::new(ix)
@@ -255,15 +267,10 @@ impl PickerDelegate for ProjectSymbolsDelegate {
.toggle_state(selected)
.child(
v_flex()
- .child(
- LabelLike::new().child(
- StyledText::new(label).with_default_highlights(
- &window.text_style().clone(),
- highlights,
- ),
- ),
- )
- .child(Label::new(path).color(Color::Muted)),
+ .child(LabelLike::new().child(
+ StyledText::new(label).with_default_highlights(&text_style, highlights),
+ ))
+ .child(Label::new(path).size(LabelSize::Small).color(Color::Muted)),
),
)
}
@@ -439,7 +446,7 @@ mod tests {
deprecated: None,
container_name: None,
location: lsp::Location::new(
- lsp::Url::from_file_path(path.as_ref()).unwrap(),
+ lsp::Uri::from_file_path(path.as_ref()).unwrap(),
lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
),
}
@@ -229,12 +229,12 @@ impl PromptBuilder {
log_message.push_str(" -> ");
log_message.push_str(&target.display().to_string());
}
- log::info!("{}.", log_message);
+ log::trace!("{}.", log_message);
} else {
if !found_dir_once {
- log::info!("No prompt template overrides directory found at {}. Using built-in prompts.", templates_dir.display());
+ log::trace!("No prompt template overrides directory found at {}. Using built-in prompts.", templates_dir.display());
if let Some(target) = symlink_status {
- log::info!("Symlink found pointing to {}, but target is invalid.", target.display());
+ log::trace!("Symlink found pointing to {}, but target is invalid.", target.display());
}
}
@@ -247,7 +247,7 @@ impl PromptBuilder {
log_message.push_str(" -> ");
log_message.push_str(&target.display().to_string());
}
- log::info!("{}.", log_message);
+ log::trace!("{}.", log_message);
break;
}
}
@@ -403,7 +403,7 @@ impl PromptBuilder {
ContentPromptDiagnosticContext {
line_number: (start.row + 1) as usize,
error_message: entry.diagnostic.message.clone(),
- code_content: buffer.text_for_range(entry.range.clone()).collect(),
+ code_content: buffer.text_for_range(entry.range).collect(),
}
})
.collect();
@@ -1,4 +1,5 @@
fn main() {
+ println!("cargo:rerun-if-changed=proto");
let mut build = prost_build::Config::new();
build
.type_attribute(".", "#[derive(serde::Serialize, serde::Deserialize)]")
@@ -2,6 +2,7 @@ syntax = "proto3";
package zed.messages;
import "buffer.proto";
+import "task.proto";
message Context {
repeated ContextOperation operations = 1;
@@ -164,3 +165,35 @@ enum LanguageModelRole {
LanguageModelSystem = 2;
reserved 3;
}
+
+message GetAgentServerCommand {
+ uint64 project_id = 1;
+ string name = 2;
+ optional string root_dir = 3;
+}
+
+message AgentServerCommand {
+ string path = 1;
+ repeated string args = 2;
+ map<string, string> env = 3;
+ string root_dir = 4;
+
+ optional SpawnInTerminal login = 5;
+}
+
+message ExternalAgentsUpdated {
+ uint64 project_id = 1;
+ repeated string names = 2;
+}
+
+message ExternalAgentLoadingStatusUpdated {
+ uint64 project_id = 1;
+ string name = 2;
+ string status = 3;
+}
+
+message NewExternalAgentVersionAvailable {
+ uint64 project_id = 1;
+ string name = 2;
+ string version = 3;
+}
@@ -143,6 +143,7 @@ message Operation {
UpdateSelections update_selections = 3;
UpdateDiagnostics update_diagnostics = 4;
UpdateCompletionTriggers update_completion_triggers = 5;
+ UpdateLineEnding update_line_ending = 6;
}
message Edit {
@@ -174,6 +175,12 @@ message Operation {
repeated string triggers = 3;
uint64 language_server_id = 4;
}
+
+ message UpdateLineEnding {
+ uint32 replica_id = 1;
+ uint32 lamport_timestamp = 2;
+ LineEnding line_ending = 3;
+ }
}
message ProjectTransaction {
@@ -3,6 +3,7 @@ package zed.messages;
import "core.proto";
import "buffer.proto";
+import "task.proto";
enum BreakpointState {
Enabled = 0;
@@ -533,16 +534,22 @@ message DebugScenario {
optional string configuration = 7;
}
-message SpawnInTerminal {
- string label = 1;
- optional string command = 2;
- repeated string args = 3;
- map<string, string> env = 4;
- optional string cwd = 5;
-}
-
message LogToDebugConsole {
uint64 project_id = 1;
uint64 session_id = 2;
string message = 3;
}
+
+message GetProcesses {
+ uint64 project_id = 1;
+}
+
+message GetProcessesResponse {
+ repeated ProcessInfo processes = 1;
+}
+
+message ProcessInfo {
+ uint32 pid = 1;
+ string name = 2;
+ repeated string command = 3;
+}
@@ -610,11 +610,36 @@ message ServerMetadataUpdated {
message LanguageServerLog {
uint64 project_id = 1;
uint64 language_server_id = 2;
+ string message = 3;
oneof log_type {
- uint32 log_message_type = 3;
- LspLogTrace log_trace = 4;
+ LogMessage log = 4;
+ TraceMessage trace = 5;
+ RpcMessage rpc = 6;
+ }
+}
+
+message LogMessage {
+ LogLevel level = 1;
+
+ enum LogLevel {
+ LOG = 0;
+ INFO = 1;
+ WARNING = 2;
+ ERROR = 3;
+ }
+}
+
+message TraceMessage {
+ optional string verbose_info = 1;
+}
+
+message RpcMessage {
+ Kind kind = 1;
+
+ enum Kind {
+ RECEIVED = 0;
+ SENT = 1;
}
- string message = 5;
}
message LspLogTrace {
@@ -753,26 +778,45 @@ message TextEdit {
PointUtf16 lsp_range_end = 3;
}
-message MultiLspQuery {
+message LspQuery {
uint64 project_id = 1;
- uint64 buffer_id = 2;
- repeated VectorClockEntry version = 3;
- oneof strategy {
- AllLanguageServers all = 4;
- }
+ uint64 lsp_request_id = 2;
oneof request {
+ GetReferences get_references = 3;
+ GetDocumentColor get_document_color = 4;
GetHover get_hover = 5;
GetCodeActions get_code_actions = 6;
GetSignatureHelp get_signature_help = 7;
GetCodeLens get_code_lens = 8;
GetDocumentDiagnostics get_document_diagnostics = 9;
- GetDocumentColor get_document_color = 10;
- GetDefinition get_definition = 11;
- GetDeclaration get_declaration = 12;
- GetTypeDefinition get_type_definition = 13;
- GetImplementation get_implementation = 14;
- GetReferences get_references = 15;
+ GetDefinition get_definition = 10;
+ GetDeclaration get_declaration = 11;
+ GetTypeDefinition get_type_definition = 12;
+ GetImplementation get_implementation = 13;
+ }
+}
+
+message LspQueryResponse {
+ uint64 project_id = 1;
+ uint64 lsp_request_id = 2;
+ repeated LspResponse responses = 3;
+}
+
+message LspResponse {
+ oneof response {
+ GetHoverResponse get_hover_response = 1;
+ GetCodeActionsResponse get_code_actions_response = 2;
+ GetSignatureHelpResponse get_signature_help_response = 3;
+ GetCodeLensResponse get_code_lens_response = 4;
+ GetDocumentDiagnosticsResponse get_document_diagnostics_response = 5;
+ GetDocumentColorResponse get_document_color_response = 6;
+ GetDefinitionResponse get_definition_response = 8;
+ GetDeclarationResponse get_declaration_response = 9;
+ GetTypeDefinitionResponse get_type_definition_response = 10;
+ GetImplementationResponse get_implementation_response = 11;
+ GetReferencesResponse get_references_response = 12;
}
+ uint64 server_id = 7;
}
message AllLanguageServers {}
@@ -798,27 +842,6 @@ message StopLanguageServers {
bool all = 4;
}
-message MultiLspQueryResponse {
- repeated LspResponse responses = 1;
-}
-
-message LspResponse {
- oneof response {
- GetHoverResponse get_hover_response = 1;
- GetCodeActionsResponse get_code_actions_response = 2;
- GetSignatureHelpResponse get_signature_help_response = 3;
- GetCodeLensResponse get_code_lens_response = 4;
- GetDocumentDiagnosticsResponse get_document_diagnostics_response = 5;
- GetDocumentColorResponse get_document_color_response = 6;
- GetDefinitionResponse get_definition_response = 8;
- GetDeclarationResponse get_declaration_response = 9;
- GetTypeDefinitionResponse get_type_definition_response = 10;
- GetImplementationResponse get_implementation_response = 11;
- GetReferencesResponse get_references_response = 12;
- }
- uint64 server_id = 7;
-}
-
message LspExtRunnables {
uint64 project_id = 1;
uint64 buffer_id = 2;
@@ -836,21 +859,19 @@ message LspRunnable {
message LspExtCancelFlycheck {
uint64 project_id = 1;
- uint64 buffer_id = 2;
- uint64 language_server_id = 3;
+ uint64 language_server_id = 2;
}
message LspExtRunFlycheck {
uint64 project_id = 1;
- uint64 buffer_id = 2;
+ optional uint64 buffer_id = 2;
uint64 language_server_id = 3;
bool current_file_only = 4;
}
message LspExtClearFlycheck {
uint64 project_id = 1;
- uint64 buffer_id = 2;
- uint64 language_server_id = 3;
+ uint64 language_server_id = 2;
}
message LspDiagnosticRelatedInformation {
@@ -909,3 +930,43 @@ message PullWorkspaceDiagnostics {
uint64 project_id = 1;
uint64 server_id = 2;
}
+
+// todo(lsp) remove after Zed Stable hits v0.204.x
+message MultiLspQuery {
+ uint64 project_id = 1;
+ uint64 buffer_id = 2;
+ repeated VectorClockEntry version = 3;
+ oneof strategy {
+ AllLanguageServers all = 4;
+ }
+ oneof request {
+ GetHover get_hover = 5;
+ GetCodeActions get_code_actions = 6;
+ GetSignatureHelp get_signature_help = 7;
+ GetCodeLens get_code_lens = 8;
+ GetDocumentDiagnostics get_document_diagnostics = 9;
+ GetDocumentColor get_document_color = 10;
+ GetDefinition get_definition = 11;
+ GetDeclaration get_declaration = 12;
+ GetTypeDefinition get_type_definition = 13;
+ GetImplementation get_implementation = 14;
+ GetReferences get_references = 15;
+ }
+}
+
+message MultiLspQueryResponse {
+ repeated LspResponse responses = 1;
+}
+
+message ToggleLspLogs {
+ uint64 project_id = 1;
+ LogType log_type = 2;
+ uint64 server_id = 3;
+ bool enabled = 4;
+
+ enum LogType {
+ LOG = 0;
+ TRACE = 1;
+ RPC = 2;
+ }
+}
@@ -40,3 +40,11 @@ enum HideStrategy {
HideNever = 1;
HideOnSuccess = 2;
}
+
+message SpawnInTerminal {
+ string label = 1;
+ optional string command = 2;
+ repeated string args = 3;
+ map<string, string> env = 4;
+ optional string cwd = 5;
+}
@@ -44,3 +44,16 @@ message ActiveToolchain {
message ActiveToolchainResponse {
optional Toolchain toolchain = 1;
}
+
+message ResolveToolchain {
+ uint64 project_id = 1;
+ string abs_path = 2;
+ string language_name = 3;
+}
+
+message ResolveToolchainResponse {
+ oneof response {
+ Toolchain toolchain = 1;
+ string error = 2;
+ }
+}
@@ -150,3 +150,8 @@ enum LocalSettingsKind {
Editorconfig = 2;
Debug = 3;
}
+
+message UpdateUserSettings {
+ uint64 project_id = 1;
+ string contents = 2;
+}
@@ -393,7 +393,27 @@ message Envelope {
GetCrashFilesResponse get_crash_files_response = 362;
GitClone git_clone = 363;
- GitCloneResponse git_clone_response = 364; // current max
+ GitCloneResponse git_clone_response = 364;
+
+ LspQuery lsp_query = 365;
+ LspQueryResponse lsp_query_response = 366;
+ ToggleLspLogs toggle_lsp_logs = 367;
+
+ UpdateUserSettings update_user_settings = 368;
+
+ GetProcesses get_processes = 369;
+ GetProcessesResponse get_processes_response = 370;
+
+ ResolveToolchain resolve_toolchain = 371;
+ ResolveToolchainResponse resolve_toolchain_response = 372;
+
+ GetAgentServerCommand get_agent_server_command = 373;
+ AgentServerCommand agent_server_command = 374;
+
+ ExternalAgentsUpdated external_agents_updated = 375;
+
+ ExternalAgentLoadingStatusUpdated external_agent_loading_status_updated = 376;
+ NewExternalAgentVersionAvailable new_external_agent_version_available = 377; // current max
}
reserved 87 to 88;
@@ -69,3 +69,32 @@ macro_rules! entity_messages {
})*
};
}
+
+#[macro_export]
+macro_rules! lsp_messages {
+ ($(($request_name:ident, $response_name:ident, $stop_previous_requests:expr)),* $(,)?) => {
+ $(impl LspRequestMessage for $request_name {
+ type Response = $response_name;
+
+ fn to_proto_query(self) -> $crate::lsp_query::Request {
+ $crate::lsp_query::Request::$request_name(self)
+ }
+
+ fn response_to_proto_query(response: Self::Response) -> $crate::lsp_response::Response {
+ $crate::lsp_response::Response::$response_name(response)
+ }
+
+ fn buffer_id(&self) -> u64 {
+ self.buffer_id
+ }
+
+ fn buffer_version(&self) -> &[$crate::VectorClockEntry] {
+ &self.version
+ }
+
+ fn stop_previous_requests() -> bool {
+ $stop_previous_requests
+ }
+ })*
+ };
+}
@@ -16,8 +16,8 @@ pub use typed_envelope::*;
include!(concat!(env!("OUT_DIR"), "/zed.messages.rs"));
-pub const SSH_PEER_ID: PeerId = PeerId { owner_id: 0, id: 0 };
-pub const SSH_PROJECT_ID: u64 = 0;
+pub const REMOTE_SERVER_PEER_ID: PeerId = PeerId { owner_id: 0, id: 0 };
+pub const REMOTE_SERVER_PROJECT_ID: u64 = 0;
messages!(
(Ack, Foreground),
@@ -26,6 +26,8 @@ messages!(
(ActivateToolchain, Foreground),
(ActiveToolchain, Foreground),
(ActiveToolchainResponse, Foreground),
+ (ResolveToolchain, Background),
+ (ResolveToolchainResponse, Background),
(AddNotification, Foreground),
(AddProjectCollaborator, Foreground),
(AddWorktree, Foreground),
@@ -102,6 +104,8 @@ messages!(
(GetPathMetadata, Background),
(GetPathMetadataResponse, Background),
(GetPermalinkToLine, Foreground),
+ (GetProcesses, Background),
+ (GetProcessesResponse, Background),
(GetPermalinkToLineResponse, Foreground),
(GetProjectSymbols, Background),
(GetProjectSymbolsResponse, Background),
@@ -169,6 +173,9 @@ messages!(
(MarkNotificationRead, Foreground),
(MoveChannel, Foreground),
(ReorderChannel, Foreground),
+ (LspQuery, Background),
+ (LspQueryResponse, Background),
+ // todo(lsp) remove after Zed Stable hits v0.204.x
(MultiLspQuery, Background),
(MultiLspQueryResponse, Background),
(OnTypeFormatting, Background),
@@ -275,6 +282,7 @@ messages!(
(UpdateUserChannels, Foreground),
(UpdateWorktree, Foreground),
(UpdateWorktreeSettings, Foreground),
+ (UpdateUserSettings, Background),
(UpdateRepository, Foreground),
(RemoveRepository, Foreground),
(UsersResponse, Foreground),
@@ -309,7 +317,13 @@ messages!(
(GetDefaultBranch, Background),
(GetDefaultBranchResponse, Background),
(GitClone, Background),
- (GitCloneResponse, Background)
+ (GitCloneResponse, Background),
+ (ToggleLspLogs, Background),
+ (GetAgentServerCommand, Background),
+ (AgentServerCommand, Background),
+ (ExternalAgentsUpdated, Background),
+ (ExternalAgentLoadingStatusUpdated, Background),
+ (NewExternalAgentVersionAvailable, Background),
);
request_messages!(
@@ -426,7 +440,10 @@ request_messages!(
(SetRoomParticipantRole, Ack),
(BlameBuffer, BlameBufferResponse),
(RejoinRemoteProjects, RejoinRemoteProjectsResponse),
+ // todo(lsp) remove after Zed Stable hits v0.204.x
(MultiLspQuery, MultiLspQueryResponse),
+ (LspQuery, Ack),
+ (LspQueryResponse, Ack),
(RestartLanguageServers, Ack),
(StopLanguageServers, Ack),
(OpenContext, OpenContextResponse),
@@ -449,6 +466,7 @@ request_messages!(
(ListToolchains, ListToolchainsResponse),
(ActivateToolchain, Ack),
(ActiveToolchain, ActiveToolchainResponse),
+ (ResolveToolchain, ResolveToolchainResponse),
(GetPathMetadata, GetPathMetadataResponse),
(GetCrashFiles, GetCrashFilesResponse),
(CancelLanguageServerWork, Ack),
@@ -475,7 +493,24 @@ request_messages!(
(GetDocumentDiagnostics, GetDocumentDiagnosticsResponse),
(PullWorkspaceDiagnostics, Ack),
(GetDefaultBranch, GetDefaultBranchResponse),
- (GitClone, GitCloneResponse)
+ (GitClone, GitCloneResponse),
+ (ToggleLspLogs, Ack),
+ (GetProcesses, GetProcessesResponse),
+ (GetAgentServerCommand, AgentServerCommand)
+);
+
+lsp_messages!(
+ (GetReferences, GetReferencesResponse, true),
+ (GetDocumentColor, GetDocumentColorResponse, true),
+ (GetHover, GetHoverResponse, true),
+ (GetCodeActions, GetCodeActionsResponse, true),
+ (GetSignatureHelp, GetSignatureHelpResponse, true),
+ (GetCodeLens, GetCodeLensResponse, true),
+ (GetDocumentDiagnostics, GetDocumentDiagnosticsResponse, true),
+ (GetDefinition, GetDefinitionResponse, true),
+ (GetDeclaration, GetDeclarationResponse, true),
+ (GetTypeDefinition, GetTypeDefinitionResponse, true),
+ (GetImplementation, GetImplementationResponse, true),
);
entity_messages!(
@@ -520,6 +555,9 @@ entity_messages!(
LeaveProject,
LinkedEditingRange,
LoadCommitDiff,
+ LspQuery,
+ LspQueryResponse,
+ // todo(lsp) remove after Zed Stable hits v0.204.x
MultiLspQuery,
RestartLanguageServers,
StopLanguageServers,
@@ -558,6 +596,7 @@ entity_messages!(
UpdateRepository,
RemoveRepository,
UpdateWorktreeSettings,
+ UpdateUserSettings,
LspExtExpandMacro,
LspExtOpenDocs,
LspExtRunnables,
@@ -582,13 +621,16 @@ entity_messages!(
ListToolchains,
ActivateToolchain,
ActiveToolchain,
+ ResolveToolchain,
GetPathMetadata,
+ GetProcesses,
CancelLanguageServerWork,
RegisterBufferWithLanguageServers,
GitShow,
GitReset,
GitCheckoutFiles,
SetIndexText,
+ ToggleLspLogs,
Push,
Fetch,
@@ -608,7 +650,11 @@ entity_messages!(
GetDocumentDiagnostics,
PullWorkspaceDiagnostics,
GetDefaultBranch,
- GitClone
+ GitClone,
+ GetAgentServerCommand,
+ ExternalAgentsUpdated,
+ ExternalAgentLoadingStatusUpdated,
+ NewExternalAgentVersionAvailable,
);
entity_messages!(
@@ -777,6 +823,28 @@ pub fn split_repository_update(
}])
}
+impl LspQuery {
+ pub fn query_name_and_write_permissions(&self) -> (&str, bool) {
+ match self.request {
+ Some(lsp_query::Request::GetHover(_)) => ("GetHover", false),
+ Some(lsp_query::Request::GetCodeActions(_)) => ("GetCodeActions", true),
+ Some(lsp_query::Request::GetSignatureHelp(_)) => ("GetSignatureHelp", false),
+ Some(lsp_query::Request::GetCodeLens(_)) => ("GetCodeLens", true),
+ Some(lsp_query::Request::GetDocumentDiagnostics(_)) => {
+ ("GetDocumentDiagnostics", false)
+ }
+ Some(lsp_query::Request::GetDefinition(_)) => ("GetDefinition", false),
+ Some(lsp_query::Request::GetDeclaration(_)) => ("GetDeclaration", false),
+ Some(lsp_query::Request::GetTypeDefinition(_)) => ("GetTypeDefinition", false),
+ Some(lsp_query::Request::GetImplementation(_)) => ("GetImplementation", false),
+ Some(lsp_query::Request::GetReferences(_)) => ("GetReferences", false),
+ Some(lsp_query::Request::GetDocumentColor(_)) => ("GetDocumentColor", false),
+ None => ("<unknown>", true),
+ }
+ }
+}
+
+// todo(lsp) remove after Zed Stable hits v0.204.x
impl MultiLspQuery {
pub fn request_str(&self) -> &str {
match self.request {
@@ -31,6 +31,58 @@ pub trait RequestMessage: EnvelopedMessage {
type Response: EnvelopedMessage;
}
+/// A trait to bind LSP request and responses for the proto layer.
+/// Should be used for every LSP request that has to traverse through the proto layer.
+///
+/// `lsp_messages` macro in the same crate provides a convenient way to implement this.
+pub trait LspRequestMessage: EnvelopedMessage {
+ type Response: EnvelopedMessage;
+
+ fn to_proto_query(self) -> crate::lsp_query::Request;
+
+ fn response_to_proto_query(response: Self::Response) -> crate::lsp_response::Response;
+
+ fn buffer_id(&self) -> u64;
+
+ fn buffer_version(&self) -> &[crate::VectorClockEntry];
+
+ /// Whether to deduplicate the requests, or keep the previous ones running when another
+ /// request of the same kind is processed.
+ fn stop_previous_requests() -> bool;
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct LspRequestId(pub u64);
+
+/// A response from a single language server.
+/// There could be multiple responses for a single LSP request,
+/// from different servers.
+pub struct ProtoLspResponse<R> {
+ pub server_id: u64,
+ pub response: R,
+}
+
+impl ProtoLspResponse<Box<dyn AnyTypedEnvelope>> {
+ pub fn into_response<T: LspRequestMessage>(self) -> Result<ProtoLspResponse<T::Response>> {
+ let envelope = self
+ .response
+ .into_any()
+ .downcast::<TypedEnvelope<T::Response>>()
+ .map_err(|_| {
+ anyhow::anyhow!(
+ "cannot downcast LspResponse to {} for message {}",
+ T::Response::NAME,
+ T::NAME,
+ )
+ })?;
+
+ Ok(ProtoLspResponse {
+ server_id: self.server_id,
+ response: envelope.payload,
+ })
+ }
+}
+
pub trait AnyTypedEnvelope: Any + Send + Sync {
fn payload_type_id(&self) -> TypeId;
fn payload_type_name(&self) -> &'static str;
@@ -1,8 +1,6 @@
-use std::path::PathBuf;
-
use gpui::{ClickEvent, DismissEvent, EventEmitter, FocusHandle, Focusable, Render, WeakEntity};
use project::project_settings::ProjectSettings;
-use remote::SshConnectionOptions;
+use remote::RemoteConnectionOptions;
use settings::Settings;
use ui::{
Button, ButtonCommon, ButtonStyle, Clickable, Context, ElevationIndex, FluentBuilder, Headline,
@@ -11,11 +9,11 @@ use ui::{
};
use workspace::{ModalView, OpenOptions, Workspace, notifications::DetachAndPromptErr};
-use crate::open_ssh_project;
+use crate::open_remote_project;
enum Host {
- RemoteProject,
- SshRemoteProject(SshConnectionOptions),
+ CollabGuestProject,
+ RemoteServerProject(RemoteConnectionOptions),
}
pub struct DisconnectedOverlay {
@@ -66,11 +64,11 @@ impl DisconnectedOverlay {
}
let handle = cx.entity().downgrade();
- let ssh_connection_options = project.read(cx).ssh_connection_options(cx);
- let host = if let Some(ssh_connection_options) = ssh_connection_options {
- Host::SshRemoteProject(ssh_connection_options)
+ let remote_connection_options = project.read(cx).remote_connection_options(cx);
+ let host = if let Some(ssh_connection_options) = remote_connection_options {
+ Host::RemoteServerProject(ssh_connection_options)
} else {
- Host::RemoteProject
+ Host::CollabGuestProject
};
workspace.toggle_modal(window, cx, |_, cx| DisconnectedOverlay {
@@ -88,17 +86,14 @@ impl DisconnectedOverlay {
self.finished = true;
cx.emit(DismissEvent);
- match &self.host {
- Host::SshRemoteProject(ssh_connection_options) => {
- self.reconnect_to_ssh_remote(ssh_connection_options.clone(), window, cx);
- }
- _ => {}
+ if let Host::RemoteServerProject(ssh_connection_options) = &self.host {
+ self.reconnect_to_remote_project(ssh_connection_options.clone(), window, cx);
}
}
- fn reconnect_to_ssh_remote(
+ fn reconnect_to_remote_project(
&self,
- connection_options: SshConnectionOptions,
+ connection_options: RemoteConnectionOptions,
window: &mut Window,
cx: &mut Context<Self>,
) {
@@ -106,20 +101,20 @@ impl DisconnectedOverlay {
return;
};
- let Some(ssh_project) = workspace.read(cx).serialized_ssh_project() else {
- return;
- };
-
let Some(window_handle) = window.window_handle().downcast::<Workspace>() else {
return;
};
let app_state = workspace.read(cx).app_state().clone();
-
- let paths = ssh_project.paths.iter().map(PathBuf::from).collect();
+ let paths = workspace
+ .read(cx)
+ .root_paths(cx)
+ .iter()
+ .map(|path| path.to_path_buf())
+ .collect();
cx.spawn_in(window, async move |_, cx| {
- open_ssh_project(
+ open_remote_project(
connection_options,
paths,
app_state,
@@ -143,13 +138,13 @@ impl DisconnectedOverlay {
impl Render for DisconnectedOverlay {
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
- let can_reconnect = matches!(self.host, Host::SshRemoteProject(_));
+ let can_reconnect = matches!(self.host, Host::RemoteServerProject(_));
let message = match &self.host {
- Host::RemoteProject => {
+ Host::CollabGuestProject => {
"Your connection to the remote project has been lost.".to_string()
}
- Host::SshRemoteProject(options) => {
+ Host::RemoteServerProject(options) => {
let autosave = if ProjectSettings::get_global(cx)
.session
.restore_unsaved_buffers
@@ -160,7 +155,8 @@ impl Render for DisconnectedOverlay {
};
format!(
"Your connection to {} has been lost.{}",
- options.host, autosave
+ options.display_name(),
+ autosave
)
}
};
@@ -1,9 +1,10 @@
pub mod disconnected_overlay;
+mod remote_connections;
mod remote_servers;
mod ssh_config;
-mod ssh_connections;
-pub use ssh_connections::{is_connecting_over_ssh, open_ssh_project};
+use remote::RemoteConnectionOptions;
+pub use remote_connections::open_remote_project;
use disconnected_overlay::DisconnectedOverlay;
use fuzzy::{StringMatch, StringMatchCandidate};
@@ -16,18 +17,15 @@ use picker::{
Picker, PickerDelegate,
highlighted_match_with_paths::{HighlightedMatch, HighlightedMatchWithPaths},
};
+pub use remote_connections::SshSettings;
pub use remote_servers::RemoteServerProjects;
use settings::Settings;
-pub use ssh_connections::SshSettings;
-use std::{
- path::{Path, PathBuf},
- sync::Arc,
-};
+use std::{path::Path, sync::Arc};
use ui::{KeyBinding, ListItem, ListItemSpacing, Tooltip, prelude::*, tooltip_container};
use util::{ResultExt, paths::PathExt};
use workspace::{
- CloseIntent, HistoryManager, ModalView, OpenOptions, SerializedWorkspaceLocation, WORKSPACE_DB,
- Workspace, WorkspaceId, with_active_or_new_workspace,
+ CloseIntent, HistoryManager, ModalView, OpenOptions, PathList, SerializedWorkspaceLocation,
+ WORKSPACE_DB, Workspace, WorkspaceId, with_active_or_new_workspace,
};
use zed_actions::{OpenRecent, OpenRemote};
@@ -154,7 +152,7 @@ impl Render for RecentProjects {
pub struct RecentProjectsDelegate {
workspace: WeakEntity<Workspace>,
- workspaces: Vec<(WorkspaceId, SerializedWorkspaceLocation)>,
+ workspaces: Vec<(WorkspaceId, SerializedWorkspaceLocation, PathList)>,
selected_match_index: usize,
matches: Vec<StringMatch>,
render_paths: bool,
@@ -178,12 +176,15 @@ impl RecentProjectsDelegate {
}
}
- pub fn set_workspaces(&mut self, workspaces: Vec<(WorkspaceId, SerializedWorkspaceLocation)>) {
+ pub fn set_workspaces(
+ &mut self,
+ workspaces: Vec<(WorkspaceId, SerializedWorkspaceLocation, PathList)>,
+ ) {
self.workspaces = workspaces;
self.has_any_non_local_projects = !self
.workspaces
.iter()
- .all(|(_, location)| matches!(location, SerializedWorkspaceLocation::Local(_, _)));
+ .all(|(_, location, _)| matches!(location, SerializedWorkspaceLocation::Local));
}
}
impl EventEmitter<DismissEvent> for RecentProjectsDelegate {}
@@ -236,15 +237,14 @@ impl PickerDelegate for RecentProjectsDelegate {
.workspaces
.iter()
.enumerate()
- .filter(|(_, (id, _))| !self.is_current_workspace(*id, cx))
- .map(|(id, (_, location))| {
- let combined_string = location
- .sorted_paths()
+ .filter(|(_, (id, _, _))| !self.is_current_workspace(*id, cx))
+ .map(|(id, (_, _, paths))| {
+ let combined_string = paths
+ .paths()
.iter()
.map(|path| path.compact().to_string_lossy().into_owned())
.collect::<Vec<_>>()
.join("");
-
StringMatchCandidate::new(id, &combined_string)
})
.collect::<Vec<_>>();
@@ -279,7 +279,7 @@ impl PickerDelegate for RecentProjectsDelegate {
.get(self.selected_index())
.zip(self.workspace.upgrade())
{
- let (candidate_workspace_id, candidate_workspace_location) =
+ let (candidate_workspace_id, candidate_workspace_location, candidate_workspace_paths) =
&self.workspaces[selected_match.candidate_id];
let replace_current_window = if self.create_new_window {
secondary
@@ -291,9 +291,9 @@ impl PickerDelegate for RecentProjectsDelegate {
if workspace.database_id() == Some(*candidate_workspace_id) {
Task::ready(Ok(()))
} else {
- match candidate_workspace_location {
- SerializedWorkspaceLocation::Local(paths, _) => {
- let paths = paths.paths().to_vec();
+ match candidate_workspace_location.clone() {
+ SerializedWorkspaceLocation::Local => {
+ let paths = candidate_workspace_paths.paths().to_vec();
if replace_current_window {
cx.spawn_in(window, async move |workspace, cx| {
let continue_replacing = workspace
@@ -321,7 +321,7 @@ impl PickerDelegate for RecentProjectsDelegate {
workspace.open_workspace_for_paths(false, paths, window, cx)
}
}
- SerializedWorkspaceLocation::Ssh(ssh_project) => {
+ SerializedWorkspaceLocation::Remote(mut connection) => {
let app_state = workspace.app_state().clone();
let replace_window = if replace_current_window {
@@ -335,18 +335,16 @@ impl PickerDelegate for RecentProjectsDelegate {
..Default::default()
};
- let connection_options = SshSettings::get_global(cx)
- .connection_options_for(
- ssh_project.host.clone(),
- ssh_project.port,
- ssh_project.user.clone(),
- );
+ if let RemoteConnectionOptions::Ssh(connection) = &mut connection {
+ SshSettings::get_global(cx)
+ .fill_connection_options_from_settings(connection);
+ };
- let paths = ssh_project.paths.iter().map(PathBuf::from).collect();
+ let paths = candidate_workspace_paths.paths().to_vec();
cx.spawn_in(window, async move |_, cx| {
- open_ssh_project(
- connection_options,
+ open_remote_project(
+ connection.clone(),
paths,
app_state,
open_options,
@@ -383,12 +381,12 @@ impl PickerDelegate for RecentProjectsDelegate {
) -> Option<Self::ListItem> {
let hit = self.matches.get(ix)?;
- let (_, location) = self.workspaces.get(hit.candidate_id)?;
+ let (_, location, paths) = self.workspaces.get(hit.candidate_id)?;
let mut path_start_offset = 0;
- let (match_labels, paths): (Vec<_>, Vec<_>) = location
- .sorted_paths()
+ let (match_labels, paths): (Vec<_>, Vec<_>) = paths
+ .paths()
.iter()
.map(|p| p.compact())
.map(|path| {
@@ -416,14 +414,14 @@ impl PickerDelegate for RecentProjectsDelegate {
.gap_3()
.when(self.has_any_non_local_projects, |this| {
this.child(match location {
- SerializedWorkspaceLocation::Local(_, _) => {
- Icon::new(IconName::Screen)
+ SerializedWorkspaceLocation::Local => Icon::new(IconName::Screen)
+ .color(Color::Muted)
+ .into_any_element(),
+ SerializedWorkspaceLocation::Remote(_) => {
+ Icon::new(IconName::Server)
.color(Color::Muted)
.into_any_element()
}
- SerializedWorkspaceLocation::Ssh(_) => Icon::new(IconName::Server)
- .color(Color::Muted)
- .into_any_element(),
})
})
.child({
@@ -568,7 +566,7 @@ impl RecentProjectsDelegate {
cx: &mut Context<Picker<Self>>,
) {
if let Some(selected_match) = self.matches.get(ix) {
- let (workspace_id, _) = self.workspaces[selected_match.candidate_id];
+ let (workspace_id, _, _) = self.workspaces[selected_match.candidate_id];
cx.spawn_in(window, async move |this, cx| {
let _ = WORKSPACE_DB.delete_workspace_by_id(workspace_id).await;
let workspaces = WORKSPACE_DB
@@ -707,7 +705,8 @@ mod tests {
}];
delegate.set_workspaces(vec![(
WorkspaceId::default(),
- SerializedWorkspaceLocation::from_local_paths(vec![path!("/test/path/")]),
+ SerializedWorkspaceLocation::Local,
+ PathList::new(&[path!("/test/path")]),
)]);
});
})
@@ -1,5 +1,5 @@
use std::collections::BTreeSet;
-use std::{path::PathBuf, sync::Arc, time::Duration};
+use std::{path::PathBuf, sync::Arc};
use anyhow::{Context as _, Result};
use auto_update::AutoUpdater;
@@ -7,23 +7,25 @@ use editor::Editor;
use extension_host::ExtensionStore;
use futures::channel::oneshot;
use gpui::{
- Animation, AnimationExt, AnyWindowHandle, App, AsyncApp, DismissEvent, Entity, EventEmitter,
- Focusable, FontFeatures, ParentElement as _, PromptLevel, Render, SemanticVersion,
- SharedString, Task, TextStyleRefinement, Transformation, WeakEntity, percentage,
+ AnyWindowHandle, App, AsyncApp, DismissEvent, Entity, EventEmitter, Focusable, FontFeatures,
+ ParentElement as _, PromptLevel, Render, SemanticVersion, SharedString, Task,
+ TextStyleRefinement, WeakEntity,
};
use language::CursorShape;
use markdown::{Markdown, MarkdownElement, MarkdownStyle};
use release_channel::ReleaseChannel;
-use remote::ssh_session::{ConnectionIdentifier, SshPortForwardOption};
-use remote::{SshConnectionOptions, SshPlatform, SshRemoteClient};
+use remote::{
+ ConnectionIdentifier, RemoteClient, RemoteConnectionOptions, RemotePlatform,
+ SshConnectionOptions, SshPortForwardOption,
+};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
use theme::ThemeSettings;
use ui::{
- ActiveTheme, Color, Context, Icon, IconName, IconSize, InteractiveElement, IntoElement, Label,
- LabelCommon, Styled, Window, prelude::*,
+ ActiveTheme, Color, CommonAnimationExt, Context, Icon, IconName, IconSize, InteractiveElement,
+ IntoElement, Label, LabelCommon, Styled, Window, prelude::*,
};
use util::serde::default_true;
use workspace::{AppState, ModalView, Workspace};
@@ -41,32 +43,35 @@ impl SshSettings {
self.ssh_connections.clone().into_iter().flatten()
}
+ pub fn fill_connection_options_from_settings(&self, options: &mut SshConnectionOptions) {
+ for conn in self.ssh_connections() {
+ if conn.host == options.host
+ && conn.username == options.username
+ && conn.port == options.port
+ {
+ options.nickname = conn.nickname;
+ options.upload_binary_over_ssh = conn.upload_binary_over_ssh.unwrap_or_default();
+ options.args = Some(conn.args);
+ options.port_forwards = conn.port_forwards;
+ break;
+ }
+ }
+ }
+
pub fn connection_options_for(
&self,
host: String,
port: Option<u16>,
username: Option<String>,
) -> SshConnectionOptions {
- for conn in self.ssh_connections() {
- if conn.host == host && conn.username == username && conn.port == port {
- return SshConnectionOptions {
- nickname: conn.nickname,
- upload_binary_over_ssh: conn.upload_binary_over_ssh.unwrap_or_default(),
- args: Some(conn.args),
- host,
- port,
- username,
- port_forwards: conn.port_forwards,
- password: None,
- };
- }
- }
- SshConnectionOptions {
+ let mut options = SshConnectionOptions {
host,
port,
username,
..Default::default()
- }
+ };
+ self.fill_connection_options_from_settings(&mut options);
+ options
}
}
@@ -116,15 +121,14 @@ pub struct SshProject {
pub paths: Vec<String>,
}
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)]
+#[settings_key(None)]
pub struct RemoteSettingsContent {
pub ssh_connections: Option<Vec<SshConnection>>,
pub read_ssh_config: Option<bool>,
}
impl Settings for SshSettings {
- const KEY: Option<&'static str> = None;
-
type FileContent = RemoteSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
@@ -134,7 +138,7 @@ impl Settings for SshSettings {
fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut Self::FileContent) {}
}
-pub struct SshPrompt {
+pub struct RemoteConnectionPrompt {
connection_string: SharedString,
nickname: Option<SharedString>,
status_message: Option<SharedString>,
@@ -143,7 +147,7 @@ pub struct SshPrompt {
editor: Entity<Editor>,
}
-impl Drop for SshPrompt {
+impl Drop for RemoteConnectionPrompt {
fn drop(&mut self) {
if let Some(cancel) = self.cancellation.take() {
cancel.send(()).ok();
@@ -151,24 +155,22 @@ impl Drop for SshPrompt {
}
}
-pub struct SshConnectionModal {
- pub(crate) prompt: Entity<SshPrompt>,
+pub struct RemoteConnectionModal {
+ pub(crate) prompt: Entity<RemoteConnectionPrompt>,
paths: Vec<PathBuf>,
finished: bool,
}
-impl SshPrompt {
+impl RemoteConnectionPrompt {
pub(crate) fn new(
- connection_options: &SshConnectionOptions,
+ connection_string: String,
+ nickname: Option<String>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
- let connection_string = connection_options.connection_string().into();
- let nickname = connection_options.nickname.clone().map(|s| s.into());
-
Self {
- connection_string,
- nickname,
+ connection_string: connection_string.into(),
+ nickname: nickname.map(|nickname| nickname.into()),
editor: cx.new(|cx| Editor::single_line(window, cx)),
status_message: None,
cancellation: None,
@@ -231,7 +233,7 @@ impl SshPrompt {
}
}
-impl Render for SshPrompt {
+impl Render for RemoteConnectionPrompt {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let theme = ThemeSettings::get_global(cx);
@@ -265,13 +267,7 @@ impl Render for SshPrompt {
.child(
Icon::new(IconName::ArrowCircle)
.size(IconSize::Medium)
- .with_animation(
- "arrow-circle",
- Animation::new(Duration::from_secs(2)).repeat(),
- |icon, delta| {
- icon.transform(Transformation::rotate(percentage(delta)))
- },
- ),
+ .with_rotate_animation(2),
)
.child(
div()
@@ -296,15 +292,22 @@ impl Render for SshPrompt {
}
}
-impl SshConnectionModal {
+impl RemoteConnectionModal {
pub(crate) fn new(
- connection_options: &SshConnectionOptions,
+ connection_options: &RemoteConnectionOptions,
paths: Vec<PathBuf>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
+ let (connection_string, nickname) = match connection_options {
+ RemoteConnectionOptions::Ssh(options) => {
+ (options.connection_string(), options.nickname.clone())
+ }
+ RemoteConnectionOptions::Wsl(options) => (options.distro_name.clone(), None),
+ };
Self {
- prompt: cx.new(|cx| SshPrompt::new(connection_options, window, cx)),
+ prompt: cx
+ .new(|cx| RemoteConnectionPrompt::new(connection_string, nickname, window, cx)),
finished: false,
paths,
}
@@ -385,7 +388,7 @@ impl RenderOnce for SshConnectionHeader {
}
}
-impl Render for SshConnectionModal {
+impl Render for RemoteConnectionModal {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl ui::IntoElement {
let nickname = self.prompt.read(cx).nickname.clone();
let connection_string = self.prompt.read(cx).connection_string.clone();
@@ -422,15 +425,15 @@ impl Render for SshConnectionModal {
}
}
-impl Focusable for SshConnectionModal {
+impl Focusable for RemoteConnectionModal {
fn focus_handle(&self, cx: &gpui::App) -> gpui::FocusHandle {
self.prompt.read(cx).editor.focus_handle(cx)
}
}
-impl EventEmitter<DismissEvent> for SshConnectionModal {}
+impl EventEmitter<DismissEvent> for RemoteConnectionModal {}
-impl ModalView for SshConnectionModal {
+impl ModalView for RemoteConnectionModal {
fn on_before_dismiss(
&mut self,
_window: &mut Window,
@@ -445,13 +448,13 @@ impl ModalView for SshConnectionModal {
}
#[derive(Clone)]
-pub struct SshClientDelegate {
+pub struct RemoteClientDelegate {
window: AnyWindowHandle,
- ui: WeakEntity<SshPrompt>,
+ ui: WeakEntity<RemoteConnectionPrompt>,
known_password: Option<String>,
}
-impl remote::SshClientDelegate for SshClientDelegate {
+impl remote::RemoteClientDelegate for RemoteClientDelegate {
fn ask_password(&self, prompt: String, tx: oneshot::Sender<String>, cx: &mut AsyncApp) {
let mut known_password = self.known_password.clone();
if let Some(password) = known_password.take() {
@@ -473,7 +476,7 @@ impl remote::SshClientDelegate for SshClientDelegate {
fn download_server_binary_locally(
&self,
- platform: SshPlatform,
+ platform: RemotePlatform,
release_channel: ReleaseChannel,
version: Option<SemanticVersion>,
cx: &mut AsyncApp,
@@ -503,7 +506,7 @@ impl remote::SshClientDelegate for SshClientDelegate {
fn get_download_params(
&self,
- platform: SshPlatform,
+ platform: RemotePlatform,
release_channel: ReleaseChannel,
version: Option<SemanticVersion>,
cx: &mut AsyncApp,
@@ -521,7 +524,7 @@ impl remote::SshClientDelegate for SshClientDelegate {
}
}
-impl SshClientDelegate {
+impl RemoteClientDelegate {
fn update_status(&self, status: Option<&str>, cx: &mut AsyncApp) {
self.window
.update(cx, |_, _, cx| {
@@ -533,27 +536,23 @@ impl SshClientDelegate {
}
}
-pub fn is_connecting_over_ssh(workspace: &Workspace, cx: &App) -> bool {
- workspace.active_modal::<SshConnectionModal>(cx).is_some()
-}
-
pub fn connect_over_ssh(
unique_identifier: ConnectionIdentifier,
connection_options: SshConnectionOptions,
- ui: Entity<SshPrompt>,
+ ui: Entity<RemoteConnectionPrompt>,
window: &mut Window,
cx: &mut App,
-) -> Task<Result<Option<Entity<SshRemoteClient>>>> {
+) -> Task<Result<Option<Entity<RemoteClient>>>> {
let window = window.window_handle();
let known_password = connection_options.password.clone();
let (tx, rx) = oneshot::channel();
ui.update(cx, |ui, _cx| ui.set_cancellation_tx(tx));
- remote::SshRemoteClient::new(
+ remote::RemoteClient::ssh(
unique_identifier,
connection_options,
rx,
- Arc::new(SshClientDelegate {
+ Arc::new(RemoteClientDelegate {
window,
ui: ui.downgrade(),
known_password,
@@ -562,8 +561,8 @@ pub fn connect_over_ssh(
)
}
-pub async fn open_ssh_project(
- connection_options: SshConnectionOptions,
+pub async fn open_remote_project(
+ connection_options: RemoteConnectionOptions,
paths: Vec<PathBuf>,
app_state: Arc<AppState>,
open_options: workspace::OpenOptions,
@@ -574,13 +573,7 @@ pub async fn open_ssh_project(
} else {
let workspace_position = cx
.update(|cx| {
- workspace::ssh_workspace_position_from_db(
- connection_options.host.clone(),
- connection_options.port,
- connection_options.username.clone(),
- &paths,
- cx,
- )
+ workspace::remote_workspace_position_from_db(connection_options.clone(), &paths, cx)
})?
.await
.context("fetching ssh workspace position from db")?;
@@ -610,16 +603,16 @@ pub async fn open_ssh_project(
loop {
let (cancel_tx, cancel_rx) = oneshot::channel();
let delegate = window.update(cx, {
- let connection_options = connection_options.clone();
let paths = paths.clone();
+ let connection_options = connection_options.clone();
move |workspace, window, cx| {
window.activate_window();
workspace.toggle_modal(window, cx, |window, cx| {
- SshConnectionModal::new(&connection_options, paths, window, cx)
+ RemoteConnectionModal::new(&connection_options, paths, window, cx)
});
let ui = workspace
- .active_modal::<SshConnectionModal>(cx)?
+ .active_modal::<RemoteConnectionModal>(cx)?
.read(cx)
.prompt
.clone();
@@ -628,19 +621,25 @@ pub async fn open_ssh_project(
ui.set_cancellation_tx(cancel_tx);
});
- Some(Arc::new(SshClientDelegate {
+ Some(Arc::new(RemoteClientDelegate {
window: window.window_handle(),
ui: ui.downgrade(),
- known_password: connection_options.password.clone(),
+ known_password: if let RemoteConnectionOptions::Ssh(options) =
+ &connection_options
+ {
+ options.password.clone()
+ } else {
+ None
+ },
}))
}
})?;
let Some(delegate) = delegate else { break };
- let did_open_ssh_project = cx
+ let did_open_project = cx
.update(|cx| {
- workspace::open_ssh_project_with_new_connection(
+ workspace::open_remote_project_with_new_connection(
window,
connection_options.clone(),
cancel_rx,
@@ -654,19 +653,22 @@ pub async fn open_ssh_project(
window
.update(cx, |workspace, _, cx| {
- if let Some(ui) = workspace.active_modal::<SshConnectionModal>(cx) {
+ if let Some(ui) = workspace.active_modal::<RemoteConnectionModal>(cx) {
ui.update(cx, |modal, cx| modal.finished(cx))
}
})
.ok();
- if let Err(e) = did_open_ssh_project {
+ if let Err(e) = did_open_project {
log::error!("Failed to open project: {e:?}");
let response = window
.update(cx, |_, window, cx| {
window.prompt(
PromptLevel::Critical,
- "Failed to connect over SSH",
+ match connection_options {
+ RemoteConnectionOptions::Ssh(_) => "Failed to connect over SSH",
+ RemoteConnectionOptions::Wsl(_) => "Failed to connect to WSL",
+ },
Some(&e.to_string()),
&["Retry", "Ok"],
cx,
@@ -681,9 +683,9 @@ pub async fn open_ssh_project(
window
.update(cx, |workspace, _, cx| {
- if let Some(client) = workspace.project().read(cx).ssh_client().clone() {
+ if let Some(client) = workspace.project().read(cx).remote_client() {
ExtensionStore::global(cx)
- .update(cx, |store, cx| store.register_ssh_client(client, cx));
+ .update(cx, |store, cx| store.register_remote_client(client, cx));
}
})
.ok();
@@ -1,75 +1,57 @@
-use std::any::Any;
-use std::borrow::Cow;
-use std::collections::BTreeSet;
-use std::path::PathBuf;
-use std::rc::Rc;
-use std::sync::Arc;
-use std::sync::atomic;
-use std::sync::atomic::AtomicUsize;
-
+use crate::{
+ remote_connections::{
+ RemoteConnectionModal, RemoteConnectionPrompt, RemoteSettingsContent, SshConnection,
+ SshConnectionHeader, SshProject, SshSettings, connect_over_ssh, open_remote_project,
+ },
+ ssh_config::parse_ssh_config_hosts,
+};
use editor::Editor;
use file_finder::OpenPathDelegate;
-use futures::FutureExt;
-use futures::channel::oneshot;
-use futures::future::Shared;
-use futures::select;
-use gpui::ClickEvent;
-use gpui::ClipboardItem;
-use gpui::Subscription;
-use gpui::Task;
-use gpui::WeakEntity;
-use gpui::canvas;
+use futures::{FutureExt, channel::oneshot, future::Shared, select};
use gpui::{
- AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable,
- PromptLevel, ScrollHandle, Window,
+ AnyElement, App, ClickEvent, ClipboardItem, Context, DismissEvent, Entity, EventEmitter,
+ FocusHandle, Focusable, PromptLevel, ScrollHandle, Subscription, Task, WeakEntity, Window,
+ canvas,
};
-use paths::global_ssh_config_file;
-use paths::user_ssh_config_file;
+use paths::{global_ssh_config_file, user_ssh_config_file};
use picker::Picker;
-use project::Fs;
-use project::Project;
-use remote::ssh_session::ConnectionIdentifier;
-use remote::{SshConnectionOptions, SshRemoteClient};
-use settings::Settings;
-use settings::SettingsStore;
-use settings::update_settings_file;
-use settings::watch_config_file;
+use project::{Fs, Project};
+use remote::{
+ RemoteClient, RemoteConnectionOptions, SshConnectionOptions,
+ remote_client::ConnectionIdentifier,
+};
+use settings::{Settings, SettingsStore, update_settings_file, watch_config_file};
use smol::stream::StreamExt as _;
-use ui::Navigable;
-use ui::NavigableEntry;
+use std::{
+ any::Any,
+ borrow::Cow,
+ collections::BTreeSet,
+ path::PathBuf,
+ rc::Rc,
+ sync::{
+ Arc,
+ atomic::{self, AtomicUsize},
+ },
+};
use ui::{
- IconButtonShape, List, ListItem, ListSeparator, Modal, ModalHeader, Scrollbar, ScrollbarState,
- Section, Tooltip, prelude::*,
+ IconButtonShape, List, ListItem, ListSeparator, Modal, ModalHeader, Navigable, NavigableEntry,
+ Scrollbar, ScrollbarState, Section, Tooltip, prelude::*,
};
use util::{
ResultExt,
paths::{PathStyle, RemotePathBuf},
};
-use workspace::OpenOptions;
-use workspace::Toast;
-use workspace::notifications::NotificationId;
use workspace::{
- ModalView, Workspace, notifications::DetachAndPromptErr,
- open_ssh_project_with_existing_connection,
+ ModalView, OpenOptions, Toast, Workspace,
+ notifications::{DetachAndPromptErr, NotificationId},
+ open_remote_project_with_existing_connection,
};
-use crate::ssh_config::parse_ssh_config_hosts;
-use crate::ssh_connections::RemoteSettingsContent;
-use crate::ssh_connections::SshConnection;
-use crate::ssh_connections::SshConnectionHeader;
-use crate::ssh_connections::SshConnectionModal;
-use crate::ssh_connections::SshProject;
-use crate::ssh_connections::SshPrompt;
-use crate::ssh_connections::SshSettings;
-use crate::ssh_connections::connect_over_ssh;
-use crate::ssh_connections::open_ssh_project;
-
-mod navigation_base {}
pub struct RemoteServerProjects {
mode: Mode,
focus_handle: FocusHandle,
workspace: WeakEntity<Workspace>,
- retained_connections: Vec<Entity<SshRemoteClient>>,
+ retained_connections: Vec<Entity<RemoteClient>>,
ssh_config_updates: Task<()>,
ssh_config_servers: BTreeSet<SharedString>,
create_new_window: bool,
@@ -79,7 +61,7 @@ pub struct RemoteServerProjects {
struct CreateRemoteServer {
address_editor: Entity<Editor>,
address_error: Option<SharedString>,
- ssh_prompt: Option<Entity<SshPrompt>>,
+ ssh_prompt: Option<Entity<RemoteConnectionPrompt>>,
_creating: Option<Task<Option<()>>>,
}
@@ -119,7 +101,7 @@ impl EditNicknameState {
let starting_text = SshSettings::get_global(cx)
.ssh_connections()
.nth(index)
- .and_then(|state| state.nickname.clone())
+ .and_then(|state| state.nickname)
.filter(|text| !text.is_empty());
this.editor.update(cx, |this, cx| {
this.set_placeholder_text("Add a nickname for this server", cx);
@@ -165,7 +147,7 @@ impl ProjectPicker {
let nickname = connection.nickname.clone().map(|nick| nick.into());
let _path_task = cx
.spawn_in(window, {
- let workspace = workspace.clone();
+ let workspace = workspace;
async move |this, cx| {
let Ok(Some(paths)) = rx.await else {
workspace
@@ -222,8 +204,13 @@ impl ProjectPicker {
})
.log_err()?;
- open_ssh_project_with_existing_connection(
- connection, project, paths, app_state, window, cx,
+ open_remote_project_with_existing_connection(
+ RemoteConnectionOptions::Ssh(connection),
+ project,
+ paths,
+ app_state,
+ window,
+ cx,
)
.await
.log_err();
@@ -472,7 +459,14 @@ impl RemoteServerProjects {
return;
}
};
- let ssh_prompt = cx.new(|cx| SshPrompt::new(&connection_options, window, cx));
+ let ssh_prompt = cx.new(|cx| {
+ RemoteConnectionPrompt::new(
+ connection_options.connection_string(),
+ connection_options.nickname.clone(),
+ window,
+ cx,
+ )
+ });
let connection = connect_over_ssh(
ConnectionIdentifier::setup(),
@@ -520,7 +514,7 @@ impl RemoteServerProjects {
self.mode = Mode::CreateRemoteServer(CreateRemoteServer {
address_editor: editor,
address_error: None,
- ssh_prompt: Some(ssh_prompt.clone()),
+ ssh_prompt: Some(ssh_prompt),
_creating: Some(creating),
});
}
@@ -552,15 +546,20 @@ impl RemoteServerProjects {
};
let create_new_window = self.create_new_window;
- let connection_options = ssh_connection.into();
+ let connection_options: SshConnectionOptions = ssh_connection.into();
workspace.update(cx, |_, cx| {
cx.defer_in(window, move |workspace, window, cx| {
let app_state = workspace.app_state().clone();
workspace.toggle_modal(window, cx, |window, cx| {
- SshConnectionModal::new(&connection_options, Vec::new(), window, cx)
+ RemoteConnectionModal::new(
+ &RemoteConnectionOptions::Ssh(connection_options.clone()),
+ Vec::new(),
+ window,
+ cx,
+ )
});
let prompt = workspace
- .active_modal::<SshConnectionModal>(cx)
+ .active_modal::<RemoteConnectionModal>(cx)
.unwrap()
.read(cx)
.prompt
@@ -579,7 +578,7 @@ impl RemoteServerProjects {
let session = connect.await;
workspace.update(cx, |workspace, cx| {
- if let Some(prompt) = workspace.active_modal::<SshConnectionModal>(cx) {
+ if let Some(prompt) = workspace.active_modal::<RemoteConnectionModal>(cx) {
prompt.update(cx, |prompt, cx| prompt.finished(cx))
}
})?;
@@ -597,7 +596,7 @@ impl RemoteServerProjects {
let (path_style, project) = cx.update(|_, cx| {
(
session.read(cx).path_style(),
- project::Project::ssh(
+ project::Project::remote(
session,
app_state.client.clone(),
app_state.node_runtime.clone(),
@@ -843,7 +842,7 @@ impl RemoteServerProjects {
.start_slot(Icon::new(IconName::Plus).color(Color::Muted))
.child(Label::new("Open Folder"))
.on_click(cx.listener({
- let ssh_connection = connection.clone();
+ let ssh_connection = connection;
let host = host.clone();
move |this, _, window, cx| {
let new_ix = this.create_host_from_ssh_config(&host, cx);
@@ -898,8 +897,8 @@ impl RemoteServerProjects {
};
cx.spawn_in(window, async move |_, cx| {
- let result = open_ssh_project(
- server.into(),
+ let result = open_remote_project(
+ RemoteConnectionOptions::Ssh(server.into()),
project.paths.into_iter().map(PathBuf::from).collect(),
app_state,
OpenOptions {
@@ -1376,7 +1375,7 @@ impl RemoteServerProjects {
};
let connection_string = connection.host.clone();
- let nickname = connection.nickname.clone().map(|s| s.into());
+ let nickname = connection.nickname.map(|s| s.into());
v_flex()
.id("ssh-edit-nickname")
@@ -31,8 +31,8 @@ pub async fn read_message<S: AsyncRead + Unpin>(
stream.read_exact(buffer).await?;
let len = message_len_from_buffer(buffer);
- let result = read_message_with_len(stream, buffer, len).await;
- result
+
+ read_message_with_len(stream, buffer, len).await
}
pub async fn write_message<S: AsyncWrite + Unpin>(
@@ -51,6 +51,16 @@ pub async fn write_message<S: AsyncWrite + Unpin>(
Ok(())
}
+pub async fn write_size_prefixed_buffer<S: AsyncWrite + Unpin>(
+ stream: &mut S,
+ buffer: &mut Vec<u8>,
+) -> Result<()> {
+ let len = buffer.len() as u32;
+ stream.write_all(len.to_le_bytes().as_slice()).await?;
+ stream.write_all(buffer).await?;
+ Ok(())
+}
+
pub async fn read_message_raw<S: AsyncRead + Unpin>(
stream: &mut S,
buffer: &mut Vec<u8>,
@@ -1,9 +1,12 @@
pub mod json_log;
pub mod protocol;
pub mod proxy;
-pub mod ssh_session;
+pub mod remote_client;
+mod transport;
-pub use ssh_session::{
- ConnectionState, SshClientDelegate, SshConnectionOptions, SshPlatform, SshRemoteClient,
- SshRemoteEvent,
+pub use remote_client::{
+ ConnectionIdentifier, ConnectionState, RemoteClient, RemoteClientDelegate, RemoteClientEvent,
+ RemoteConnectionOptions, RemotePlatform,
};
+pub use transport::ssh::{SshConnectionOptions, SshPortForwardOption};
+pub use transport::wsl::WslConnectionOptions;
@@ -0,0 +1,1541 @@
+use crate::{
+ SshConnectionOptions,
+ protocol::MessageId,
+ proxy::ProxyLaunchError,
+ transport::{
+ ssh::SshRemoteConnection,
+ wsl::{WslConnectionOptions, WslRemoteConnection},
+ },
+};
+use anyhow::{Context as _, Result, anyhow};
+use async_trait::async_trait;
+use collections::HashMap;
+use futures::{
+ Future, FutureExt as _, StreamExt as _,
+ channel::{
+ mpsc::{self, Sender, UnboundedReceiver, UnboundedSender},
+ oneshot,
+ },
+ future::{BoxFuture, Shared},
+ select, select_biased,
+};
+use gpui::{
+ App, AppContext as _, AsyncApp, BackgroundExecutor, BorrowAppContext, Context, Entity,
+ EventEmitter, Global, SemanticVersion, Task, WeakEntity,
+};
+use parking_lot::Mutex;
+
+use release_channel::ReleaseChannel;
+use rpc::{
+ AnyProtoClient, ErrorExt, ProtoClient, ProtoMessageHandlerSet, RpcError,
+ proto::{self, Envelope, EnvelopedMessage, PeerId, RequestMessage, build_typed_envelope},
+};
+use std::{
+ collections::VecDeque,
+ fmt,
+ ops::ControlFlow,
+ path::PathBuf,
+ sync::{
+ Arc, Weak,
+ atomic::{AtomicU32, AtomicU64, Ordering::SeqCst},
+ },
+ time::{Duration, Instant},
+};
+use util::{
+ ResultExt,
+ paths::{PathStyle, RemotePathBuf},
+};
+
+#[derive(Copy, Clone, Debug)]
+pub struct RemotePlatform {
+ pub os: &'static str,
+ pub arch: &'static str,
+}
+
+#[derive(Clone, Debug)]
+pub struct CommandTemplate {
+ pub program: String,
+ pub args: Vec<String>,
+ pub env: HashMap<String, String>,
+}
+
+pub trait RemoteClientDelegate: Send + Sync {
+ fn ask_password(&self, prompt: String, tx: oneshot::Sender<String>, cx: &mut AsyncApp);
+ fn get_download_params(
+ &self,
+ platform: RemotePlatform,
+ release_channel: ReleaseChannel,
+ version: Option<SemanticVersion>,
+ cx: &mut AsyncApp,
+ ) -> Task<Result<Option<(String, String)>>>;
+ fn download_server_binary_locally(
+ &self,
+ platform: RemotePlatform,
+ release_channel: ReleaseChannel,
+ version: Option<SemanticVersion>,
+ cx: &mut AsyncApp,
+ ) -> Task<Result<PathBuf>>;
+ fn set_status(&self, status: Option<&str>, cx: &mut AsyncApp);
+}
+
+const MAX_MISSED_HEARTBEATS: usize = 5;
+const HEARTBEAT_INTERVAL: Duration = Duration::from_secs(5);
+const HEARTBEAT_TIMEOUT: Duration = Duration::from_secs(5);
+
+const MAX_RECONNECT_ATTEMPTS: usize = 3;
+
+enum State {
+ Connecting,
+ Connected {
+ ssh_connection: Arc<dyn RemoteConnection>,
+ delegate: Arc<dyn RemoteClientDelegate>,
+
+ multiplex_task: Task<Result<()>>,
+ heartbeat_task: Task<Result<()>>,
+ },
+ HeartbeatMissed {
+ missed_heartbeats: usize,
+
+ ssh_connection: Arc<dyn RemoteConnection>,
+ delegate: Arc<dyn RemoteClientDelegate>,
+
+ multiplex_task: Task<Result<()>>,
+ heartbeat_task: Task<Result<()>>,
+ },
+ Reconnecting,
+ ReconnectFailed {
+ ssh_connection: Arc<dyn RemoteConnection>,
+ delegate: Arc<dyn RemoteClientDelegate>,
+
+ error: anyhow::Error,
+ attempts: usize,
+ },
+ ReconnectExhausted,
+ ServerNotRunning,
+}
+
+impl fmt::Display for State {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Self::Connecting => write!(f, "connecting"),
+ Self::Connected { .. } => write!(f, "connected"),
+ Self::Reconnecting => write!(f, "reconnecting"),
+ Self::ReconnectFailed { .. } => write!(f, "reconnect failed"),
+ Self::ReconnectExhausted => write!(f, "reconnect exhausted"),
+ Self::HeartbeatMissed { .. } => write!(f, "heartbeat missed"),
+ Self::ServerNotRunning { .. } => write!(f, "server not running"),
+ }
+ }
+}
+
+impl State {
+ fn remote_connection(&self) -> Option<Arc<dyn RemoteConnection>> {
+ match self {
+ Self::Connected { ssh_connection, .. } => Some(ssh_connection.clone()),
+ Self::HeartbeatMissed { ssh_connection, .. } => Some(ssh_connection.clone()),
+ Self::ReconnectFailed { ssh_connection, .. } => Some(ssh_connection.clone()),
+ _ => None,
+ }
+ }
+
+ fn can_reconnect(&self) -> bool {
+ match self {
+ Self::Connected { .. }
+ | Self::HeartbeatMissed { .. }
+ | Self::ReconnectFailed { .. } => true,
+ State::Connecting
+ | State::Reconnecting
+ | State::ReconnectExhausted
+ | State::ServerNotRunning => false,
+ }
+ }
+
+ fn is_reconnect_failed(&self) -> bool {
+ matches!(self, Self::ReconnectFailed { .. })
+ }
+
+ fn is_reconnect_exhausted(&self) -> bool {
+ matches!(self, Self::ReconnectExhausted { .. })
+ }
+
+ fn is_server_not_running(&self) -> bool {
+ matches!(self, Self::ServerNotRunning)
+ }
+
+ fn is_reconnecting(&self) -> bool {
+ matches!(self, Self::Reconnecting { .. })
+ }
+
+ fn heartbeat_recovered(self) -> Self {
+ match self {
+ Self::HeartbeatMissed {
+ ssh_connection,
+ delegate,
+ multiplex_task,
+ heartbeat_task,
+ ..
+ } => Self::Connected {
+ ssh_connection,
+ delegate,
+ multiplex_task,
+ heartbeat_task,
+ },
+ _ => self,
+ }
+ }
+
+ fn heartbeat_missed(self) -> Self {
+ match self {
+ Self::Connected {
+ ssh_connection,
+ delegate,
+ multiplex_task,
+ heartbeat_task,
+ } => Self::HeartbeatMissed {
+ missed_heartbeats: 1,
+ ssh_connection,
+ delegate,
+ multiplex_task,
+ heartbeat_task,
+ },
+ Self::HeartbeatMissed {
+ missed_heartbeats,
+ ssh_connection,
+ delegate,
+ multiplex_task,
+ heartbeat_task,
+ } => Self::HeartbeatMissed {
+ missed_heartbeats: missed_heartbeats + 1,
+ ssh_connection,
+ delegate,
+ multiplex_task,
+ heartbeat_task,
+ },
+ _ => self,
+ }
+ }
+}
+
+/// The state of the ssh connection.
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum ConnectionState {
+ Connecting,
+ Connected,
+ HeartbeatMissed,
+ Reconnecting,
+ Disconnected,
+}
+
+impl From<&State> for ConnectionState {
+ fn from(value: &State) -> Self {
+ match value {
+ State::Connecting => Self::Connecting,
+ State::Connected { .. } => Self::Connected,
+ State::Reconnecting | State::ReconnectFailed { .. } => Self::Reconnecting,
+ State::HeartbeatMissed { .. } => Self::HeartbeatMissed,
+ State::ReconnectExhausted => Self::Disconnected,
+ State::ServerNotRunning => Self::Disconnected,
+ }
+ }
+}
+
+pub struct RemoteClient {
+ client: Arc<ChannelClient>,
+ unique_identifier: String,
+ connection_options: RemoteConnectionOptions,
+ path_style: PathStyle,
+ state: Option<State>,
+}
+
+#[derive(Debug)]
+pub enum RemoteClientEvent {
+ Disconnected,
+}
+
+impl EventEmitter<RemoteClientEvent> for RemoteClient {}
+
+// Identifies the socket on the remote server so that reconnects
+// can re-join the same project.
+pub enum ConnectionIdentifier {
+ Setup(u64),
+ Workspace(i64),
+}
+
+static NEXT_ID: AtomicU64 = AtomicU64::new(1);
+
+impl ConnectionIdentifier {
+ pub fn setup() -> Self {
+ Self::Setup(NEXT_ID.fetch_add(1, SeqCst))
+ }
+
+ // This string gets used in a socket name, and so must be relatively short.
+ // The total length of:
+ // /home/{username}/.local/share/zed/server_state/{name}/stdout.sock
+ // Must be less than about 100 characters
+ // https://unix.stackexchange.com/questions/367008/why-is-socket-path-length-limited-to-a-hundred-chars
+ // So our strings should be at most 20 characters or so.
+ fn to_string(&self, cx: &App) -> String {
+ let identifier_prefix = match ReleaseChannel::global(cx) {
+ ReleaseChannel::Stable => "".to_string(),
+ release_channel => format!("{}-", release_channel.dev_name()),
+ };
+ match self {
+ Self::Setup(setup_id) => format!("{identifier_prefix}setup-{setup_id}"),
+ Self::Workspace(workspace_id) => {
+ format!("{identifier_prefix}workspace-{workspace_id}",)
+ }
+ }
+ }
+}
+
+impl RemoteClient {
+ pub fn ssh(
+ unique_identifier: ConnectionIdentifier,
+ connection_options: SshConnectionOptions,
+ cancellation: oneshot::Receiver<()>,
+ delegate: Arc<dyn RemoteClientDelegate>,
+ cx: &mut App,
+ ) -> Task<Result<Option<Entity<Self>>>> {
+ Self::new(
+ unique_identifier,
+ RemoteConnectionOptions::Ssh(connection_options),
+ cancellation,
+ delegate,
+ cx,
+ )
+ }
+
+ pub fn new(
+ unique_identifier: ConnectionIdentifier,
+ connection_options: RemoteConnectionOptions,
+ cancellation: oneshot::Receiver<()>,
+ delegate: Arc<dyn RemoteClientDelegate>,
+ cx: &mut App,
+ ) -> Task<Result<Option<Entity<Self>>>> {
+ let unique_identifier = unique_identifier.to_string(cx);
+ cx.spawn(async move |cx| {
+ let success = Box::pin(async move {
+ let (outgoing_tx, outgoing_rx) = mpsc::unbounded::<Envelope>();
+ let (incoming_tx, incoming_rx) = mpsc::unbounded::<Envelope>();
+ let (connection_activity_tx, connection_activity_rx) = mpsc::channel::<()>(1);
+
+ let client =
+ cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx, "client"))?;
+
+ let ssh_connection = cx
+ .update(|cx| {
+ cx.update_default_global(|pool: &mut ConnectionPool, cx| {
+ pool.connect(connection_options.clone(), &delegate, cx)
+ })
+ })?
+ .await
+ .map_err(|e| e.cloned())?;
+
+ let path_style = ssh_connection.path_style();
+ let this = cx.new(|_| Self {
+ client: client.clone(),
+ unique_identifier: unique_identifier.clone(),
+ connection_options,
+ path_style,
+ state: Some(State::Connecting),
+ })?;
+
+ let io_task = ssh_connection.start_proxy(
+ unique_identifier,
+ false,
+ incoming_tx,
+ outgoing_rx,
+ connection_activity_tx,
+ delegate.clone(),
+ cx,
+ );
+
+ let multiplex_task = Self::monitor(this.downgrade(), io_task, cx);
+
+ if let Err(error) = client.ping(HEARTBEAT_TIMEOUT).await {
+ log::error!("failed to establish connection: {}", error);
+ return Err(error);
+ }
+
+ let heartbeat_task = Self::heartbeat(this.downgrade(), connection_activity_rx, cx);
+
+ this.update(cx, |this, _| {
+ this.state = Some(State::Connected {
+ ssh_connection,
+ delegate,
+ multiplex_task,
+ heartbeat_task,
+ });
+ })?;
+
+ Ok(Some(this))
+ });
+
+ select! {
+ _ = cancellation.fuse() => {
+ Ok(None)
+ }
+ result = success.fuse() => result
+ }
+ })
+ }
+
+ pub fn proto_client_from_channels(
+ incoming_rx: mpsc::UnboundedReceiver<Envelope>,
+ outgoing_tx: mpsc::UnboundedSender<Envelope>,
+ cx: &App,
+ name: &'static str,
+ ) -> AnyProtoClient {
+ ChannelClient::new(incoming_rx, outgoing_tx, cx, name).into()
+ }
+
+ pub fn shutdown_processes<T: RequestMessage>(
+ &mut self,
+ shutdown_request: Option<T>,
+ executor: BackgroundExecutor,
+ ) -> Option<impl Future<Output = ()> + use<T>> {
+ let state = self.state.take()?;
+ log::info!("shutting down ssh processes");
+
+ let State::Connected {
+ multiplex_task,
+ heartbeat_task,
+ ssh_connection,
+ delegate,
+ } = state
+ else {
+ return None;
+ };
+
+ let client = self.client.clone();
+
+ Some(async move {
+ if let Some(shutdown_request) = shutdown_request {
+ client.send(shutdown_request).log_err();
+ // We wait 50ms instead of waiting for a response, because
+ // waiting for a response would require us to wait on the main thread
+ // which we want to avoid in an `on_app_quit` callback.
+ executor.timer(Duration::from_millis(50)).await;
+ }
+
+ // Drop `multiplex_task` because it owns our ssh_proxy_process, which is a
+ // child of master_process.
+ drop(multiplex_task);
+ // Now drop the rest of state, which kills master process.
+ drop(heartbeat_task);
+ drop(ssh_connection);
+ drop(delegate);
+ })
+ }
+
+ fn reconnect(&mut self, cx: &mut Context<Self>) -> Result<()> {
+ let can_reconnect = self
+ .state
+ .as_ref()
+ .map(|state| state.can_reconnect())
+ .unwrap_or(false);
+ if !can_reconnect {
+ log::info!("aborting reconnect, because not in state that allows reconnecting");
+ let error = if let Some(state) = self.state.as_ref() {
+ format!("invalid state, cannot reconnect while in state {state}")
+ } else {
+ "no state set".to_string()
+ };
+ anyhow::bail!(error);
+ }
+
+ let state = self.state.take().unwrap();
+ let (attempts, remote_connection, delegate) = match state {
+ State::Connected {
+ ssh_connection,
+ delegate,
+ multiplex_task,
+ heartbeat_task,
+ }
+ | State::HeartbeatMissed {
+ ssh_connection,
+ delegate,
+ multiplex_task,
+ heartbeat_task,
+ ..
+ } => {
+ drop(multiplex_task);
+ drop(heartbeat_task);
+ (0, ssh_connection, delegate)
+ }
+ State::ReconnectFailed {
+ attempts,
+ ssh_connection,
+ delegate,
+ ..
+ } => (attempts, ssh_connection, delegate),
+ State::Connecting
+ | State::Reconnecting
+ | State::ReconnectExhausted
+ | State::ServerNotRunning => unreachable!(),
+ };
+
+ let attempts = attempts + 1;
+ if attempts > MAX_RECONNECT_ATTEMPTS {
+ log::error!(
+ "Failed to reconnect to after {} attempts, giving up",
+ MAX_RECONNECT_ATTEMPTS
+ );
+ self.set_state(State::ReconnectExhausted, cx);
+ return Ok(());
+ }
+
+ self.set_state(State::Reconnecting, cx);
+
+ log::info!("Trying to reconnect to ssh server... Attempt {}", attempts);
+
+ let unique_identifier = self.unique_identifier.clone();
+ let client = self.client.clone();
+ let reconnect_task = cx.spawn(async move |this, cx| {
+ macro_rules! failed {
+ ($error:expr, $attempts:expr, $ssh_connection:expr, $delegate:expr) => {
+ return State::ReconnectFailed {
+ error: anyhow!($error),
+ attempts: $attempts,
+ ssh_connection: $ssh_connection,
+ delegate: $delegate,
+ };
+ };
+ }
+
+ if let Err(error) = remote_connection
+ .kill()
+ .await
+ .context("Failed to kill ssh process")
+ {
+ failed!(error, attempts, remote_connection, delegate);
+ };
+
+ let connection_options = remote_connection.connection_options();
+
+ let (outgoing_tx, outgoing_rx) = mpsc::unbounded::<Envelope>();
+ let (incoming_tx, incoming_rx) = mpsc::unbounded::<Envelope>();
+ let (connection_activity_tx, connection_activity_rx) = mpsc::channel::<()>(1);
+
+ let (ssh_connection, io_task) = match async {
+ let ssh_connection = cx
+ .update_global(|pool: &mut ConnectionPool, cx| {
+ pool.connect(connection_options, &delegate, cx)
+ })?
+ .await
+ .map_err(|error| error.cloned())?;
+
+ let io_task = ssh_connection.start_proxy(
+ unique_identifier,
+ true,
+ incoming_tx,
+ outgoing_rx,
+ connection_activity_tx,
+ delegate.clone(),
+ cx,
+ );
+ anyhow::Ok((ssh_connection, io_task))
+ }
+ .await
+ {
+ Ok((ssh_connection, io_task)) => (ssh_connection, io_task),
+ Err(error) => {
+ failed!(error, attempts, remote_connection, delegate);
+ }
+ };
+
+ let multiplex_task = Self::monitor(this.clone(), io_task, cx);
+ client.reconnect(incoming_rx, outgoing_tx, cx);
+
+ if let Err(error) = client.resync(HEARTBEAT_TIMEOUT).await {
+ failed!(error, attempts, ssh_connection, delegate);
+ };
+
+ State::Connected {
+ ssh_connection,
+ delegate,
+ multiplex_task,
+ heartbeat_task: Self::heartbeat(this.clone(), connection_activity_rx, cx),
+ }
+ });
+
+ cx.spawn(async move |this, cx| {
+ let new_state = reconnect_task.await;
+ this.update(cx, |this, cx| {
+ this.try_set_state(cx, |old_state| {
+ if old_state.is_reconnecting() {
+ match &new_state {
+ State::Connecting
+ | State::Reconnecting
+ | State::HeartbeatMissed { .. }
+ | State::ServerNotRunning => {}
+ State::Connected { .. } => {
+ log::info!("Successfully reconnected");
+ }
+ State::ReconnectFailed {
+ error, attempts, ..
+ } => {
+ log::error!(
+ "Reconnect attempt {} failed: {:?}. Starting new attempt...",
+ attempts,
+ error
+ );
+ }
+ State::ReconnectExhausted => {
+ log::error!("Reconnect attempt failed and all attempts exhausted");
+ }
+ }
+ Some(new_state)
+ } else {
+ None
+ }
+ });
+
+ if this.state_is(State::is_reconnect_failed) {
+ this.reconnect(cx)
+ } else if this.state_is(State::is_reconnect_exhausted) {
+ Ok(())
+ } else {
+ log::debug!("State has transition from Reconnecting into new state while attempting reconnect.");
+ Ok(())
+ }
+ })
+ })
+ .detach_and_log_err(cx);
+
+ Ok(())
+ }
+
+ fn heartbeat(
+ this: WeakEntity<Self>,
+ mut connection_activity_rx: mpsc::Receiver<()>,
+ cx: &mut AsyncApp,
+ ) -> Task<Result<()>> {
+ let Ok(client) = this.read_with(cx, |this, _| this.client.clone()) else {
+ return Task::ready(Err(anyhow!("SshRemoteClient lost")));
+ };
+
+ cx.spawn(async move |cx| {
+ let mut missed_heartbeats = 0;
+
+ let keepalive_timer = cx.background_executor().timer(HEARTBEAT_INTERVAL).fuse();
+ futures::pin_mut!(keepalive_timer);
+
+ loop {
+ select_biased! {
+ result = connection_activity_rx.next().fuse() => {
+ if result.is_none() {
+ log::warn!("ssh heartbeat: connection activity channel has been dropped. stopping.");
+ return Ok(());
+ }
+
+ if missed_heartbeats != 0 {
+ missed_heartbeats = 0;
+ let _ =this.update(cx, |this, cx| {
+ this.handle_heartbeat_result(missed_heartbeats, cx)
+ })?;
+ }
+ }
+ _ = keepalive_timer => {
+ log::debug!("Sending heartbeat to server...");
+
+ let result = select_biased! {
+ _ = connection_activity_rx.next().fuse() => {
+ Ok(())
+ }
+ ping_result = client.ping(HEARTBEAT_TIMEOUT).fuse() => {
+ ping_result
+ }
+ };
+
+ if result.is_err() {
+ missed_heartbeats += 1;
+ log::warn!(
+ "No heartbeat from server after {:?}. Missed heartbeat {} out of {}.",
+ HEARTBEAT_TIMEOUT,
+ missed_heartbeats,
+ MAX_MISSED_HEARTBEATS
+ );
+ } else if missed_heartbeats != 0 {
+ missed_heartbeats = 0;
+ } else {
+ continue;
+ }
+
+ let result = this.update(cx, |this, cx| {
+ this.handle_heartbeat_result(missed_heartbeats, cx)
+ })?;
+ if result.is_break() {
+ return Ok(());
+ }
+ }
+ }
+
+ keepalive_timer.set(cx.background_executor().timer(HEARTBEAT_INTERVAL).fuse());
+ }
+ })
+ }
+
+ fn handle_heartbeat_result(
+ &mut self,
+ missed_heartbeats: usize,
+ cx: &mut Context<Self>,
+ ) -> ControlFlow<()> {
+ let state = self.state.take().unwrap();
+ let next_state = if missed_heartbeats > 0 {
+ state.heartbeat_missed()
+ } else {
+ state.heartbeat_recovered()
+ };
+
+ self.set_state(next_state, cx);
+
+ if missed_heartbeats >= MAX_MISSED_HEARTBEATS {
+ log::error!(
+ "Missed last {} heartbeats. Reconnecting...",
+ missed_heartbeats
+ );
+
+ self.reconnect(cx)
+ .context("failed to start reconnect process after missing heartbeats")
+ .log_err();
+ ControlFlow::Break(())
+ } else {
+ ControlFlow::Continue(())
+ }
+ }
+
+ fn monitor(
+ this: WeakEntity<Self>,
+ io_task: Task<Result<i32>>,
+ cx: &AsyncApp,
+ ) -> Task<Result<()>> {
+ cx.spawn(async move |cx| {
+ let result = io_task.await;
+
+ match result {
+ Ok(exit_code) => {
+ if let Some(error) = ProxyLaunchError::from_exit_code(exit_code) {
+ match error {
+ ProxyLaunchError::ServerNotRunning => {
+ log::error!("failed to reconnect because server is not running");
+ this.update(cx, |this, cx| {
+ this.set_state(State::ServerNotRunning, cx);
+ })?;
+ }
+ }
+ } else if exit_code > 0 {
+ log::error!("proxy process terminated unexpectedly");
+ this.update(cx, |this, cx| {
+ this.reconnect(cx).ok();
+ })?;
+ }
+ }
+ Err(error) => {
+ log::warn!("ssh io task died with error: {:?}. reconnecting...", error);
+ this.update(cx, |this, cx| {
+ this.reconnect(cx).ok();
+ })?;
+ }
+ }
+
+ Ok(())
+ })
+ }
+
+ fn state_is(&self, check: impl FnOnce(&State) -> bool) -> bool {
+ self.state.as_ref().is_some_and(check)
+ }
+
+ fn try_set_state(&mut self, cx: &mut Context<Self>, map: impl FnOnce(&State) -> Option<State>) {
+ let new_state = self.state.as_ref().and_then(map);
+ if let Some(new_state) = new_state {
+ self.state.replace(new_state);
+ cx.notify();
+ }
+ }
+
+ fn set_state(&mut self, state: State, cx: &mut Context<Self>) {
+ log::info!("setting state to '{}'", &state);
+
+ let is_reconnect_exhausted = state.is_reconnect_exhausted();
+ let is_server_not_running = state.is_server_not_running();
+ self.state.replace(state);
+
+ if is_reconnect_exhausted || is_server_not_running {
+ cx.emit(RemoteClientEvent::Disconnected);
+ }
+ cx.notify();
+ }
+
+ pub fn shell(&self) -> Option<String> {
+ Some(self.state.as_ref()?.remote_connection()?.shell())
+ }
+
+ pub fn shares_network_interface(&self) -> bool {
+ self.state
+ .as_ref()
+ .and_then(|state| state.remote_connection())
+ .map_or(false, |connection| connection.shares_network_interface())
+ }
+
+ pub fn build_command(
+ &self,
+ program: Option<String>,
+ args: &[String],
+ env: &HashMap<String, String>,
+ working_dir: Option<String>,
+ port_forward: Option<(u16, String, u16)>,
+ ) -> Result<CommandTemplate> {
+ let Some(connection) = self
+ .state
+ .as_ref()
+ .and_then(|state| state.remote_connection())
+ else {
+ return Err(anyhow!("no connection"));
+ };
+ connection.build_command(program, args, env, working_dir, port_forward)
+ }
+
+ pub fn upload_directory(
+ &self,
+ src_path: PathBuf,
+ dest_path: RemotePathBuf,
+ cx: &App,
+ ) -> Task<Result<()>> {
+ let Some(connection) = self
+ .state
+ .as_ref()
+ .and_then(|state| state.remote_connection())
+ else {
+ return Task::ready(Err(anyhow!("no ssh connection")));
+ };
+ connection.upload_directory(src_path, dest_path, cx)
+ }
+
+ pub fn proto_client(&self) -> AnyProtoClient {
+ self.client.clone().into()
+ }
+
+ pub fn connection_options(&self) -> RemoteConnectionOptions {
+ self.connection_options.clone()
+ }
+
+ pub fn connection_state(&self) -> ConnectionState {
+ self.state
+ .as_ref()
+ .map(ConnectionState::from)
+ .unwrap_or(ConnectionState::Disconnected)
+ }
+
+ pub fn is_disconnected(&self) -> bool {
+ self.connection_state() == ConnectionState::Disconnected
+ }
+
+ pub fn path_style(&self) -> PathStyle {
+ self.path_style
+ }
+
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn simulate_disconnect(&self, client_cx: &mut App) -> Task<()> {
+ let opts = self.connection_options();
+ client_cx.spawn(async move |cx| {
+ let connection = cx
+ .update_global(|c: &mut ConnectionPool, _| {
+ if let Some(ConnectionPoolEntry::Connecting(c)) = c.connections.get(&opts) {
+ c.clone()
+ } else {
+ panic!("missing test connection")
+ }
+ })
+ .unwrap()
+ .await
+ .unwrap();
+
+ connection.simulate_disconnect(cx);
+ })
+ }
+
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn fake_server(
+ client_cx: &mut gpui::TestAppContext,
+ server_cx: &mut gpui::TestAppContext,
+ ) -> (RemoteConnectionOptions, AnyProtoClient) {
+ let port = client_cx
+ .update(|cx| cx.default_global::<ConnectionPool>().connections.len() as u16 + 1);
+ let opts = RemoteConnectionOptions::Ssh(SshConnectionOptions {
+ host: "<fake>".to_string(),
+ port: Some(port),
+ ..Default::default()
+ });
+ let (outgoing_tx, _) = mpsc::unbounded::<Envelope>();
+ let (_, incoming_rx) = mpsc::unbounded::<Envelope>();
+ let server_client =
+ server_cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx, "fake-server"));
+ let connection: Arc<dyn RemoteConnection> = Arc::new(fake::FakeRemoteConnection {
+ connection_options: opts.clone(),
+ server_cx: fake::SendableCx::new(server_cx),
+ server_channel: server_client.clone(),
+ });
+
+ client_cx.update(|cx| {
+ cx.update_default_global(|c: &mut ConnectionPool, cx| {
+ c.connections.insert(
+ opts.clone(),
+ ConnectionPoolEntry::Connecting(
+ cx.background_spawn({
+ let connection = connection.clone();
+ async move { Ok(connection.clone()) }
+ })
+ .shared(),
+ ),
+ );
+ })
+ });
+
+ (opts, server_client.into())
+ }
+
+ #[cfg(any(test, feature = "test-support"))]
+ pub async fn fake_client(
+ opts: RemoteConnectionOptions,
+ client_cx: &mut gpui::TestAppContext,
+ ) -> Entity<Self> {
+ let (_tx, rx) = oneshot::channel();
+ client_cx
+ .update(|cx| {
+ Self::new(
+ ConnectionIdentifier::setup(),
+ opts,
+ rx,
+ Arc::new(fake::Delegate),
+ cx,
+ )
+ })
+ .await
+ .unwrap()
+ .unwrap()
+ }
+}
+
+enum ConnectionPoolEntry {
+ Connecting(Shared<Task<Result<Arc<dyn RemoteConnection>, Arc<anyhow::Error>>>>),
+ Connected(Weak<dyn RemoteConnection>),
+}
+
+#[derive(Default)]
+struct ConnectionPool {
+ connections: HashMap<RemoteConnectionOptions, ConnectionPoolEntry>,
+}
+
+impl Global for ConnectionPool {}
+
+impl ConnectionPool {
+ pub fn connect(
+ &mut self,
+ opts: RemoteConnectionOptions,
+ delegate: &Arc<dyn RemoteClientDelegate>,
+ cx: &mut App,
+ ) -> Shared<Task<Result<Arc<dyn RemoteConnection>, Arc<anyhow::Error>>>> {
+ let connection = self.connections.get(&opts);
+ match connection {
+ Some(ConnectionPoolEntry::Connecting(task)) => {
+ let delegate = delegate.clone();
+ cx.spawn(async move |cx| {
+ delegate.set_status(Some("Waiting for existing connection attempt"), cx);
+ })
+ .detach();
+ return task.clone();
+ }
+ Some(ConnectionPoolEntry::Connected(ssh)) => {
+ if let Some(ssh) = ssh.upgrade()
+ && !ssh.has_been_killed()
+ {
+ return Task::ready(Ok(ssh)).shared();
+ }
+ self.connections.remove(&opts);
+ }
+ None => {}
+ }
+
+ let task = cx
+ .spawn({
+ let opts = opts.clone();
+ let delegate = delegate.clone();
+ async move |cx| {
+ let connection = match opts.clone() {
+ RemoteConnectionOptions::Ssh(opts) => {
+ SshRemoteConnection::new(opts, delegate, cx)
+ .await
+ .map(|connection| Arc::new(connection) as Arc<dyn RemoteConnection>)
+ }
+ RemoteConnectionOptions::Wsl(opts) => {
+ WslRemoteConnection::new(opts, delegate, cx)
+ .await
+ .map(|connection| Arc::new(connection) as Arc<dyn RemoteConnection>)
+ }
+ };
+
+ cx.update_global(|pool: &mut Self, _| {
+ debug_assert!(matches!(
+ pool.connections.get(&opts),
+ Some(ConnectionPoolEntry::Connecting(_))
+ ));
+ match connection {
+ Ok(connection) => {
+ pool.connections.insert(
+ opts.clone(),
+ ConnectionPoolEntry::Connected(Arc::downgrade(&connection)),
+ );
+ Ok(connection)
+ }
+ Err(error) => {
+ pool.connections.remove(&opts);
+ Err(Arc::new(error))
+ }
+ }
+ })?
+ }
+ })
+ .shared();
+
+ self.connections
+ .insert(opts.clone(), ConnectionPoolEntry::Connecting(task.clone()));
+ task
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum RemoteConnectionOptions {
+ Ssh(SshConnectionOptions),
+ Wsl(WslConnectionOptions),
+}
+
+impl RemoteConnectionOptions {
+ pub fn display_name(&self) -> String {
+ match self {
+ RemoteConnectionOptions::Ssh(opts) => opts.host.clone(),
+ RemoteConnectionOptions::Wsl(opts) => opts.distro_name.clone(),
+ }
+ }
+}
+
+impl From<SshConnectionOptions> for RemoteConnectionOptions {
+ fn from(opts: SshConnectionOptions) -> Self {
+ RemoteConnectionOptions::Ssh(opts)
+ }
+}
+
+impl From<WslConnectionOptions> for RemoteConnectionOptions {
+ fn from(opts: WslConnectionOptions) -> Self {
+ RemoteConnectionOptions::Wsl(opts)
+ }
+}
+
+#[async_trait(?Send)]
+pub(crate) trait RemoteConnection: Send + Sync {
+ fn start_proxy(
+ &self,
+ unique_identifier: String,
+ reconnect: bool,
+ incoming_tx: UnboundedSender<Envelope>,
+ outgoing_rx: UnboundedReceiver<Envelope>,
+ connection_activity_tx: Sender<()>,
+ delegate: Arc<dyn RemoteClientDelegate>,
+ cx: &mut AsyncApp,
+ ) -> Task<Result<i32>>;
+ fn upload_directory(
+ &self,
+ src_path: PathBuf,
+ dest_path: RemotePathBuf,
+ cx: &App,
+ ) -> Task<Result<()>>;
+ async fn kill(&self) -> Result<()>;
+ fn has_been_killed(&self) -> bool;
+ fn shares_network_interface(&self) -> bool {
+ false
+ }
+ fn build_command(
+ &self,
+ program: Option<String>,
+ args: &[String],
+ env: &HashMap<String, String>,
+ working_dir: Option<String>,
+ port_forward: Option<(u16, String, u16)>,
+ ) -> Result<CommandTemplate>;
+ fn connection_options(&self) -> RemoteConnectionOptions;
+ fn path_style(&self) -> PathStyle;
+ fn shell(&self) -> String;
+
+ #[cfg(any(test, feature = "test-support"))]
+ fn simulate_disconnect(&self, _: &AsyncApp) {}
+}
+
+type ResponseChannels = Mutex<HashMap<MessageId, oneshot::Sender<(Envelope, oneshot::Sender<()>)>>>;
+
+struct ChannelClient {
+ next_message_id: AtomicU32,
+ outgoing_tx: Mutex<mpsc::UnboundedSender<Envelope>>,
+ buffer: Mutex<VecDeque<Envelope>>,
+ response_channels: ResponseChannels,
+ message_handlers: Mutex<ProtoMessageHandlerSet>,
+ max_received: AtomicU32,
+ name: &'static str,
+ task: Mutex<Task<Result<()>>>,
+}
+
+impl ChannelClient {
+ fn new(
+ incoming_rx: mpsc::UnboundedReceiver<Envelope>,
+ outgoing_tx: mpsc::UnboundedSender<Envelope>,
+ cx: &App,
+ name: &'static str,
+ ) -> Arc<Self> {
+ Arc::new_cyclic(|this| Self {
+ outgoing_tx: Mutex::new(outgoing_tx),
+ next_message_id: AtomicU32::new(0),
+ max_received: AtomicU32::new(0),
+ response_channels: ResponseChannels::default(),
+ message_handlers: Default::default(),
+ buffer: Mutex::new(VecDeque::new()),
+ name,
+ task: Mutex::new(Self::start_handling_messages(
+ this.clone(),
+ incoming_rx,
+ &cx.to_async(),
+ )),
+ })
+ }
+
+ fn start_handling_messages(
+ this: Weak<Self>,
+ mut incoming_rx: mpsc::UnboundedReceiver<Envelope>,
+ cx: &AsyncApp,
+ ) -> Task<Result<()>> {
+ cx.spawn(async move |cx| {
+ let peer_id = PeerId { owner_id: 0, id: 0 };
+ while let Some(incoming) = incoming_rx.next().await {
+ let Some(this) = this.upgrade() else {
+ return anyhow::Ok(());
+ };
+ if let Some(ack_id) = incoming.ack_id {
+ let mut buffer = this.buffer.lock();
+ while buffer.front().is_some_and(|msg| msg.id <= ack_id) {
+ buffer.pop_front();
+ }
+ }
+ if let Some(proto::envelope::Payload::FlushBufferedMessages(_)) = &incoming.payload
+ {
+ log::debug!(
+ "{}:ssh message received. name:FlushBufferedMessages",
+ this.name
+ );
+ {
+ let buffer = this.buffer.lock();
+ for envelope in buffer.iter() {
+ this.outgoing_tx
+ .lock()
+ .unbounded_send(envelope.clone())
+ .ok();
+ }
+ }
+ let mut envelope = proto::Ack {}.into_envelope(0, Some(incoming.id), None);
+ envelope.id = this.next_message_id.fetch_add(1, SeqCst);
+ this.outgoing_tx.lock().unbounded_send(envelope).ok();
+ continue;
+ }
+
+ this.max_received.store(incoming.id, SeqCst);
+
+ if let Some(request_id) = incoming.responding_to {
+ let request_id = MessageId(request_id);
+ let sender = this.response_channels.lock().remove(&request_id);
+ if let Some(sender) = sender {
+ let (tx, rx) = oneshot::channel();
+ if incoming.payload.is_some() {
+ sender.send((incoming, tx)).ok();
+ }
+ rx.await.ok();
+ }
+ } else if let Some(envelope) =
+ build_typed_envelope(peer_id, Instant::now(), incoming)
+ {
+ let type_name = envelope.payload_type_name();
+ let message_id = envelope.message_id();
+ if let Some(future) = ProtoMessageHandlerSet::handle_message(
+ &this.message_handlers,
+ envelope,
+ this.clone().into(),
+ cx.clone(),
+ ) {
+ log::debug!("{}:ssh message received. name:{type_name}", this.name);
+ cx.foreground_executor()
+ .spawn(async move {
+ match future.await {
+ Ok(_) => {
+ log::debug!(
+ "{}:ssh message handled. name:{type_name}",
+ this.name
+ );
+ }
+ Err(error) => {
+ log::error!(
+ "{}:error handling message. type:{}, error:{:#}",
+ this.name,
+ type_name,
+ format!("{error:#}").lines().fold(
+ String::new(),
+ |mut message, line| {
+ if !message.is_empty() {
+ message.push(' ');
+ }
+ message.push_str(line);
+ message
+ }
+ )
+ );
+ }
+ }
+ })
+ .detach()
+ } else {
+ log::error!("{}:unhandled ssh message name:{type_name}", this.name);
+ if let Err(e) = AnyProtoClient::from(this.clone()).send_response(
+ message_id,
+ anyhow::anyhow!("no handler registered for {type_name}").to_proto(),
+ ) {
+ log::error!(
+ "{}:error sending error response for {type_name}:{e:#}",
+ this.name
+ );
+ }
+ }
+ }
+ }
+ anyhow::Ok(())
+ })
+ }
+
+ fn reconnect(
+ self: &Arc<Self>,
+ incoming_rx: UnboundedReceiver<Envelope>,
+ outgoing_tx: UnboundedSender<Envelope>,
+ cx: &AsyncApp,
+ ) {
+ *self.outgoing_tx.lock() = outgoing_tx;
+ *self.task.lock() = Self::start_handling_messages(Arc::downgrade(self), incoming_rx, cx);
+ }
+
+ fn request<T: RequestMessage>(
+ &self,
+ payload: T,
+ ) -> impl 'static + Future<Output = Result<T::Response>> {
+ self.request_internal(payload, true)
+ }
+
+ fn request_internal<T: RequestMessage>(
+ &self,
+ payload: T,
+ use_buffer: bool,
+ ) -> impl 'static + Future<Output = Result<T::Response>> {
+ log::debug!("ssh request start. name:{}", T::NAME);
+ let response =
+ self.request_dynamic(payload.into_envelope(0, None, None), T::NAME, use_buffer);
+ async move {
+ let response = response.await?;
+ log::debug!("ssh request finish. name:{}", T::NAME);
+ T::Response::from_envelope(response).context("received a response of the wrong type")
+ }
+ }
+
+ async fn resync(&self, timeout: Duration) -> Result<()> {
+ smol::future::or(
+ async {
+ self.request_internal(proto::FlushBufferedMessages {}, false)
+ .await?;
+
+ for envelope in self.buffer.lock().iter() {
+ self.outgoing_tx
+ .lock()
+ .unbounded_send(envelope.clone())
+ .ok();
+ }
+ Ok(())
+ },
+ async {
+ smol::Timer::after(timeout).await;
+ anyhow::bail!("Timed out resyncing remote client")
+ },
+ )
+ .await
+ }
+
+ async fn ping(&self, timeout: Duration) -> Result<()> {
+ smol::future::or(
+ async {
+ self.request(proto::Ping {}).await?;
+ Ok(())
+ },
+ async {
+ smol::Timer::after(timeout).await;
+ anyhow::bail!("Timed out pinging remote client")
+ },
+ )
+ .await
+ }
+
+ fn send<T: EnvelopedMessage>(&self, payload: T) -> Result<()> {
+ log::debug!("ssh send name:{}", T::NAME);
+ self.send_dynamic(payload.into_envelope(0, None, None))
+ }
+
+ fn request_dynamic(
+ &self,
+ mut envelope: proto::Envelope,
+ type_name: &'static str,
+ use_buffer: bool,
+ ) -> impl 'static + Future<Output = Result<proto::Envelope>> {
+ envelope.id = self.next_message_id.fetch_add(1, SeqCst);
+ let (tx, rx) = oneshot::channel();
+ let mut response_channels_lock = self.response_channels.lock();
+ response_channels_lock.insert(MessageId(envelope.id), tx);
+ drop(response_channels_lock);
+
+ let result = if use_buffer {
+ self.send_buffered(envelope)
+ } else {
+ self.send_unbuffered(envelope)
+ };
+ async move {
+ if let Err(error) = &result {
+ log::error!("failed to send message: {error}");
+ anyhow::bail!("failed to send message: {error}");
+ }
+
+ let response = rx.await.context("connection lost")?.0;
+ if let Some(proto::envelope::Payload::Error(error)) = &response.payload {
+ return Err(RpcError::from_proto(error, type_name));
+ }
+ Ok(response)
+ }
+ }
+
+ pub fn send_dynamic(&self, mut envelope: proto::Envelope) -> Result<()> {
+ envelope.id = self.next_message_id.fetch_add(1, SeqCst);
+ self.send_buffered(envelope)
+ }
+
+ fn send_buffered(&self, mut envelope: proto::Envelope) -> Result<()> {
+ envelope.ack_id = Some(self.max_received.load(SeqCst));
+ self.buffer.lock().push_back(envelope.clone());
+ // ignore errors on send (happen while we're reconnecting)
+ // assume that the global "disconnected" overlay is sufficient.
+ self.outgoing_tx.lock().unbounded_send(envelope).ok();
+ Ok(())
+ }
+
+ fn send_unbuffered(&self, mut envelope: proto::Envelope) -> Result<()> {
+ envelope.ack_id = Some(self.max_received.load(SeqCst));
+ self.outgoing_tx.lock().unbounded_send(envelope).ok();
+ Ok(())
+ }
+}
+
+impl ProtoClient for ChannelClient {
+ fn request(
+ &self,
+ envelope: proto::Envelope,
+ request_type: &'static str,
+ ) -> BoxFuture<'static, Result<proto::Envelope>> {
+ self.request_dynamic(envelope, request_type, true).boxed()
+ }
+
+ fn send(&self, envelope: proto::Envelope, _message_type: &'static str) -> Result<()> {
+ self.send_dynamic(envelope)
+ }
+
+ fn send_response(&self, envelope: Envelope, _message_type: &'static str) -> anyhow::Result<()> {
+ self.send_dynamic(envelope)
+ }
+
+ fn message_handler_set(&self) -> &Mutex<ProtoMessageHandlerSet> {
+ &self.message_handlers
+ }
+
+ fn is_via_collab(&self) -> bool {
+ false
+ }
+}
+
+#[cfg(any(test, feature = "test-support"))]
+mod fake {
+ use super::{ChannelClient, RemoteClientDelegate, RemoteConnection, RemotePlatform};
+ use crate::remote_client::{CommandTemplate, RemoteConnectionOptions};
+ use anyhow::Result;
+ use async_trait::async_trait;
+ use collections::HashMap;
+ use futures::{
+ FutureExt, SinkExt, StreamExt,
+ channel::{
+ mpsc::{self, Sender},
+ oneshot,
+ },
+ select_biased,
+ };
+ use gpui::{App, AppContext as _, AsyncApp, SemanticVersion, Task, TestAppContext};
+ use release_channel::ReleaseChannel;
+ use rpc::proto::Envelope;
+ use std::{path::PathBuf, sync::Arc};
+ use util::paths::{PathStyle, RemotePathBuf};
+
+ pub(super) struct FakeRemoteConnection {
+ pub(super) connection_options: RemoteConnectionOptions,
+ pub(super) server_channel: Arc<ChannelClient>,
+ pub(super) server_cx: SendableCx,
+ }
+
+ pub(super) struct SendableCx(AsyncApp);
+ impl SendableCx {
+ // SAFETY: When run in test mode, GPUI is always single threaded.
+ pub(super) fn new(cx: &TestAppContext) -> Self {
+ Self(cx.to_async())
+ }
+
+ // SAFETY: Enforce that we're on the main thread by requiring a valid AsyncApp
+ fn get(&self, _: &AsyncApp) -> AsyncApp {
+ self.0.clone()
+ }
+ }
+
+ // SAFETY: There is no way to access a SendableCx from a different thread, see [`SendableCx::new`] and [`SendableCx::get`]
+ unsafe impl Send for SendableCx {}
+ unsafe impl Sync for SendableCx {}
+
+ #[async_trait(?Send)]
+ impl RemoteConnection for FakeRemoteConnection {
+ async fn kill(&self) -> Result<()> {
+ Ok(())
+ }
+
+ fn has_been_killed(&self) -> bool {
+ false
+ }
+
+ fn build_command(
+ &self,
+ program: Option<String>,
+ args: &[String],
+ env: &HashMap<String, String>,
+ _: Option<String>,
+ _: Option<(u16, String, u16)>,
+ ) -> Result<CommandTemplate> {
+ let ssh_program = program.unwrap_or_else(|| "sh".to_string());
+ let mut ssh_args = Vec::new();
+ ssh_args.push(ssh_program);
+ ssh_args.extend(args.iter().cloned());
+ Ok(CommandTemplate {
+ program: "ssh".into(),
+ args: ssh_args,
+ env: env.clone(),
+ })
+ }
+
+ fn upload_directory(
+ &self,
+ _src_path: PathBuf,
+ _dest_path: RemotePathBuf,
+ _cx: &App,
+ ) -> Task<Result<()>> {
+ unreachable!()
+ }
+
+ fn connection_options(&self) -> RemoteConnectionOptions {
+ self.connection_options.clone()
+ }
+
+ fn simulate_disconnect(&self, cx: &AsyncApp) {
+ let (outgoing_tx, _) = mpsc::unbounded::<Envelope>();
+ let (_, incoming_rx) = mpsc::unbounded::<Envelope>();
+ self.server_channel
+ .reconnect(incoming_rx, outgoing_tx, &self.server_cx.get(cx));
+ }
+
+ fn start_proxy(
+ &self,
+ _unique_identifier: String,
+ _reconnect: bool,
+ mut client_incoming_tx: mpsc::UnboundedSender<Envelope>,
+ mut client_outgoing_rx: mpsc::UnboundedReceiver<Envelope>,
+ mut connection_activity_tx: Sender<()>,
+ _delegate: Arc<dyn RemoteClientDelegate>,
+ cx: &mut AsyncApp,
+ ) -> Task<Result<i32>> {
+ let (mut server_incoming_tx, server_incoming_rx) = mpsc::unbounded::<Envelope>();
+ let (server_outgoing_tx, mut server_outgoing_rx) = mpsc::unbounded::<Envelope>();
+
+ self.server_channel.reconnect(
+ server_incoming_rx,
+ server_outgoing_tx,
+ &self.server_cx.get(cx),
+ );
+
+ cx.background_spawn(async move {
+ loop {
+ select_biased! {
+ server_to_client = server_outgoing_rx.next().fuse() => {
+ let Some(server_to_client) = server_to_client else {
+ return Ok(1)
+ };
+ connection_activity_tx.try_send(()).ok();
+ client_incoming_tx.send(server_to_client).await.ok();
+ }
+ client_to_server = client_outgoing_rx.next().fuse() => {
+ let Some(client_to_server) = client_to_server else {
+ return Ok(1)
+ };
+ server_incoming_tx.send(client_to_server).await.ok();
+ }
+ }
+ }
+ })
+ }
+
+ fn path_style(&self) -> PathStyle {
+ PathStyle::current()
+ }
+
+ fn shell(&self) -> String {
+ "sh".to_owned()
+ }
+ }
+
+ pub(super) struct Delegate;
+
+ impl RemoteClientDelegate for Delegate {
+ fn ask_password(&self, _: String, _: oneshot::Sender<String>, _: &mut AsyncApp) {
+ unreachable!()
+ }
+
+ fn download_server_binary_locally(
+ &self,
+ _: RemotePlatform,
+ _: ReleaseChannel,
+ _: Option<SemanticVersion>,
+ _: &mut AsyncApp,
+ ) -> Task<Result<PathBuf>> {
+ unreachable!()
+ }
+
+ fn get_download_params(
+ &self,
+ _platform: RemotePlatform,
+ _release_channel: ReleaseChannel,
+ _version: Option<SemanticVersion>,
+ _cx: &mut AsyncApp,
+ ) -> Task<Result<Option<(String, String)>>> {
+ unreachable!()
+ }
+
+ fn set_status(&self, _: Option<&str>, _: &mut AsyncApp) {}
+ }
+}
@@ -1,2733 +0,0 @@
-use crate::{
- json_log::LogRecord,
- protocol::{
- MESSAGE_LEN_SIZE, MessageId, message_len_from_buffer, read_message_with_len, write_message,
- },
- proxy::ProxyLaunchError,
-};
-use anyhow::{Context as _, Result, anyhow};
-use async_trait::async_trait;
-use collections::HashMap;
-use futures::{
- AsyncReadExt as _, Future, FutureExt as _, StreamExt as _,
- channel::{
- mpsc::{self, Sender, UnboundedReceiver, UnboundedSender},
- oneshot,
- },
- future::{BoxFuture, Shared},
- select, select_biased,
-};
-use gpui::{
- App, AppContext as _, AsyncApp, BackgroundExecutor, BorrowAppContext, Context, Entity,
- EventEmitter, Global, SemanticVersion, Task, WeakEntity,
-};
-use itertools::Itertools;
-use parking_lot::Mutex;
-
-use release_channel::{AppCommitSha, AppVersion, ReleaseChannel};
-use rpc::{
- AnyProtoClient, EntityMessageSubscriber, ErrorExt, ProtoClient, ProtoMessageHandlerSet,
- RpcError,
- proto::{self, Envelope, EnvelopedMessage, PeerId, RequestMessage, build_typed_envelope},
-};
-use schemars::JsonSchema;
-use serde::{Deserialize, Serialize};
-use smol::{
- fs,
- process::{self, Child, Stdio},
-};
-use std::{
- any::TypeId,
- collections::VecDeque,
- fmt, iter,
- ops::ControlFlow,
- path::{Path, PathBuf},
- sync::{
- Arc, Weak,
- atomic::{AtomicU32, AtomicU64, Ordering::SeqCst},
- },
- time::{Duration, Instant},
-};
-use tempfile::TempDir;
-use util::{
- ResultExt,
- paths::{PathStyle, RemotePathBuf},
-};
-
-#[derive(
- Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize,
-)]
-pub struct SshProjectId(pub u64);
-
-#[derive(Clone)]
-pub struct SshSocket {
- connection_options: SshConnectionOptions,
- #[cfg(not(target_os = "windows"))]
- socket_path: PathBuf,
- #[cfg(target_os = "windows")]
- envs: HashMap<String, String>,
-}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash, Deserialize, Serialize, JsonSchema)]
-pub struct SshPortForwardOption {
- #[serde(skip_serializing_if = "Option::is_none")]
- pub local_host: Option<String>,
- pub local_port: u16,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub remote_host: Option<String>,
- pub remote_port: u16,
-}
-
-#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)]
-pub struct SshConnectionOptions {
- pub host: String,
- pub username: Option<String>,
- pub port: Option<u16>,
- pub password: Option<String>,
- pub args: Option<Vec<String>>,
- pub port_forwards: Option<Vec<SshPortForwardOption>>,
-
- pub nickname: Option<String>,
- pub upload_binary_over_ssh: bool,
-}
-
-pub struct SshArgs {
- pub arguments: Vec<String>,
- pub envs: Option<HashMap<String, String>>,
-}
-
-#[macro_export]
-macro_rules! shell_script {
- ($fmt:expr, $($name:ident = $arg:expr),+ $(,)?) => {{
- format!(
- $fmt,
- $(
- $name = shlex::try_quote($arg).unwrap()
- ),+
- )
- }};
-}
-
-fn parse_port_number(port_str: &str) -> Result<u16> {
- port_str
- .parse()
- .with_context(|| format!("parsing port number: {port_str}"))
-}
-
-fn parse_port_forward_spec(spec: &str) -> Result<SshPortForwardOption> {
- let parts: Vec<&str> = spec.split(':').collect();
-
- match parts.len() {
- 4 => {
- let local_port = parse_port_number(parts[1])?;
- let remote_port = parse_port_number(parts[3])?;
-
- Ok(SshPortForwardOption {
- local_host: Some(parts[0].to_string()),
- local_port,
- remote_host: Some(parts[2].to_string()),
- remote_port,
- })
- }
- 3 => {
- let local_port = parse_port_number(parts[0])?;
- let remote_port = parse_port_number(parts[2])?;
-
- Ok(SshPortForwardOption {
- local_host: None,
- local_port,
- remote_host: Some(parts[1].to_string()),
- remote_port,
- })
- }
- _ => anyhow::bail!("Invalid port forward format"),
- }
-}
-
-impl SshConnectionOptions {
- pub fn parse_command_line(input: &str) -> Result<Self> {
- let input = input.trim_start_matches("ssh ");
- let mut hostname: Option<String> = None;
- let mut username: Option<String> = None;
- let mut port: Option<u16> = None;
- let mut args = Vec::new();
- let mut port_forwards: Vec<SshPortForwardOption> = Vec::new();
-
- // disallowed: -E, -e, -F, -f, -G, -g, -M, -N, -n, -O, -q, -S, -s, -T, -t, -V, -v, -W
- const ALLOWED_OPTS: &[&str] = &[
- "-4", "-6", "-A", "-a", "-C", "-K", "-k", "-X", "-x", "-Y", "-y",
- ];
- const ALLOWED_ARGS: &[&str] = &[
- "-B", "-b", "-c", "-D", "-F", "-I", "-i", "-J", "-l", "-m", "-o", "-P", "-p", "-R",
- "-w",
- ];
-
- let mut tokens = shlex::split(input).context("invalid input")?.into_iter();
-
- 'outer: while let Some(arg) = tokens.next() {
- if ALLOWED_OPTS.contains(&(&arg as &str)) {
- args.push(arg.to_string());
- continue;
- }
- if arg == "-p" {
- port = tokens.next().and_then(|arg| arg.parse().ok());
- continue;
- } else if let Some(p) = arg.strip_prefix("-p") {
- port = p.parse().ok();
- continue;
- }
- if arg == "-l" {
- username = tokens.next();
- continue;
- } else if let Some(l) = arg.strip_prefix("-l") {
- username = Some(l.to_string());
- continue;
- }
- if arg == "-L" || arg.starts_with("-L") {
- let forward_spec = if arg == "-L" {
- tokens.next()
- } else {
- Some(arg.strip_prefix("-L").unwrap().to_string())
- };
-
- if let Some(spec) = forward_spec {
- port_forwards.push(parse_port_forward_spec(&spec)?);
- } else {
- anyhow::bail!("Missing port forward format");
- }
- }
-
- for a in ALLOWED_ARGS {
- if arg == *a {
- args.push(arg);
- if let Some(next) = tokens.next() {
- args.push(next);
- }
- continue 'outer;
- } else if arg.starts_with(a) {
- args.push(arg);
- continue 'outer;
- }
- }
- if arg.starts_with("-") || hostname.is_some() {
- anyhow::bail!("unsupported argument: {:?}", arg);
- }
- let mut input = &arg as &str;
- // Destination might be: username1@username2@ip2@ip1
- if let Some((u, rest)) = input.rsplit_once('@') {
- input = rest;
- username = Some(u.to_string());
- }
- if let Some((rest, p)) = input.split_once(':') {
- input = rest;
- port = p.parse().ok()
- }
- hostname = Some(input.to_string())
- }
-
- let Some(hostname) = hostname else {
- anyhow::bail!("missing hostname");
- };
-
- let port_forwards = match port_forwards.len() {
- 0 => None,
- _ => Some(port_forwards),
- };
-
- Ok(Self {
- host: hostname.to_string(),
- username: username.clone(),
- port,
- port_forwards,
- args: Some(args),
- password: None,
- nickname: None,
- upload_binary_over_ssh: false,
- })
- }
-
- pub fn ssh_url(&self) -> String {
- let mut result = String::from("ssh://");
- if let Some(username) = &self.username {
- // Username might be: username1@username2@ip2
- let username = urlencoding::encode(username);
- result.push_str(&username);
- result.push('@');
- }
- result.push_str(&self.host);
- if let Some(port) = self.port {
- result.push(':');
- result.push_str(&port.to_string());
- }
- result
- }
-
- pub fn additional_args(&self) -> Vec<String> {
- let mut args = self.args.iter().flatten().cloned().collect::<Vec<String>>();
-
- if let Some(forwards) = &self.port_forwards {
- args.extend(forwards.iter().map(|pf| {
- let local_host = match &pf.local_host {
- Some(host) => host,
- None => "localhost",
- };
- let remote_host = match &pf.remote_host {
- Some(host) => host,
- None => "localhost",
- };
-
- format!(
- "-L{}:{}:{}:{}",
- local_host, pf.local_port, remote_host, pf.remote_port
- )
- }));
- }
-
- args
- }
-
- fn scp_url(&self) -> String {
- if let Some(username) = &self.username {
- format!("{}@{}", username, self.host)
- } else {
- self.host.clone()
- }
- }
-
- pub fn connection_string(&self) -> String {
- let host = if let Some(username) = &self.username {
- format!("{}@{}", username, self.host)
- } else {
- self.host.clone()
- };
- if let Some(port) = &self.port {
- format!("{}:{}", host, port)
- } else {
- host
- }
- }
-}
-
-#[derive(Copy, Clone, Debug)]
-pub struct SshPlatform {
- pub os: &'static str,
- pub arch: &'static str,
-}
-
-pub trait SshClientDelegate: Send + Sync {
- fn ask_password(&self, prompt: String, tx: oneshot::Sender<String>, cx: &mut AsyncApp);
- fn get_download_params(
- &self,
- platform: SshPlatform,
- release_channel: ReleaseChannel,
- version: Option<SemanticVersion>,
- cx: &mut AsyncApp,
- ) -> Task<Result<Option<(String, String)>>>;
-
- fn download_server_binary_locally(
- &self,
- platform: SshPlatform,
- release_channel: ReleaseChannel,
- version: Option<SemanticVersion>,
- cx: &mut AsyncApp,
- ) -> Task<Result<PathBuf>>;
- fn set_status(&self, status: Option<&str>, cx: &mut AsyncApp);
-}
-
-impl SshSocket {
- #[cfg(not(target_os = "windows"))]
- fn new(options: SshConnectionOptions, socket_path: PathBuf) -> Result<Self> {
- Ok(Self {
- connection_options: options,
- socket_path,
- })
- }
-
- #[cfg(target_os = "windows")]
- fn new(options: SshConnectionOptions, temp_dir: &TempDir, secret: String) -> Result<Self> {
- let askpass_script = temp_dir.path().join("askpass.bat");
- std::fs::write(&askpass_script, "@ECHO OFF\necho %ZED_SSH_ASKPASS%")?;
- let mut envs = HashMap::default();
- envs.insert("SSH_ASKPASS_REQUIRE".into(), "force".into());
- envs.insert("SSH_ASKPASS".into(), askpass_script.display().to_string());
- envs.insert("ZED_SSH_ASKPASS".into(), secret);
- Ok(Self {
- connection_options: options,
- envs,
- })
- }
-
- // :WARNING: ssh unquotes arguments when executing on the remote :WARNING:
- // e.g. $ ssh host sh -c 'ls -l' is equivalent to $ ssh host sh -c ls -l
- // and passes -l as an argument to sh, not to ls.
- // Furthermore, some setups (e.g. Coder) will change directory when SSH'ing
- // into a machine. You must use `cd` to get back to $HOME.
- // You need to do it like this: $ ssh host "cd; sh -c 'ls -l /tmp'"
- fn ssh_command(&self, program: &str, args: &[&str]) -> process::Command {
- let mut command = util::command::new_smol_command("ssh");
- let to_run = iter::once(&program)
- .chain(args.iter())
- .map(|token| {
- // We're trying to work with: sh, bash, zsh, fish, tcsh, ...?
- debug_assert!(
- !token.contains('\n'),
- "multiline arguments do not work in all shells"
- );
- shlex::try_quote(token).unwrap()
- })
- .join(" ");
- let to_run = format!("cd; {to_run}");
- log::debug!("ssh {} {:?}", self.connection_options.ssh_url(), to_run);
- self.ssh_options(&mut command)
- .arg(self.connection_options.ssh_url())
- .arg(to_run);
- command
- }
-
- async fn run_command(&self, program: &str, args: &[&str]) -> Result<String> {
- let output = self.ssh_command(program, args).output().await?;
- anyhow::ensure!(
- output.status.success(),
- "failed to run command: {}",
- String::from_utf8_lossy(&output.stderr)
- );
- Ok(String::from_utf8_lossy(&output.stdout).to_string())
- }
-
- #[cfg(not(target_os = "windows"))]
- fn ssh_options<'a>(&self, command: &'a mut process::Command) -> &'a mut process::Command {
- command
- .stdin(Stdio::piped())
- .stdout(Stdio::piped())
- .stderr(Stdio::piped())
- .args(self.connection_options.additional_args())
- .args(["-o", "ControlMaster=no", "-o"])
- .arg(format!("ControlPath={}", self.socket_path.display()))
- }
-
- #[cfg(target_os = "windows")]
- fn ssh_options<'a>(&self, command: &'a mut process::Command) -> &'a mut process::Command {
- command
- .stdin(Stdio::piped())
- .stdout(Stdio::piped())
- .stderr(Stdio::piped())
- .args(self.connection_options.additional_args())
- .envs(self.envs.clone())
- }
-
- // On Windows, we need to use `SSH_ASKPASS` to provide the password to ssh.
- // On Linux, we use the `ControlPath` option to create a socket file that ssh can use to
- #[cfg(not(target_os = "windows"))]
- fn ssh_args(&self) -> SshArgs {
- let mut arguments = self.connection_options.additional_args();
- arguments.extend(vec![
- "-o".to_string(),
- "ControlMaster=no".to_string(),
- "-o".to_string(),
- format!("ControlPath={}", self.socket_path.display()),
- self.connection_options.ssh_url(),
- ]);
- SshArgs {
- arguments,
- envs: None,
- }
- }
-
- #[cfg(target_os = "windows")]
- fn ssh_args(&self) -> SshArgs {
- let mut arguments = self.connection_options.additional_args();
- arguments.push(self.connection_options.ssh_url());
- SshArgs {
- arguments,
- envs: Some(self.envs.clone()),
- }
- }
-
- async fn platform(&self) -> Result<SshPlatform> {
- let uname = self.run_command("sh", &["-c", "uname -sm"]).await?;
- let Some((os, arch)) = uname.split_once(" ") else {
- anyhow::bail!("unknown uname: {uname:?}")
- };
-
- let os = match os.trim() {
- "Darwin" => "macos",
- "Linux" => "linux",
- _ => anyhow::bail!(
- "Prebuilt remote servers are not yet available for {os:?}. See https://zed.dev/docs/remote-development"
- ),
- };
- // exclude armv5,6,7 as they are 32-bit.
- let arch = if arch.starts_with("armv8")
- || arch.starts_with("armv9")
- || arch.starts_with("arm64")
- || arch.starts_with("aarch64")
- {
- "aarch64"
- } else if arch.starts_with("x86") {
- "x86_64"
- } else {
- anyhow::bail!(
- "Prebuilt remote servers are not yet available for {arch:?}. See https://zed.dev/docs/remote-development"
- )
- };
-
- Ok(SshPlatform { os, arch })
- }
-}
-
-const MAX_MISSED_HEARTBEATS: usize = 5;
-const HEARTBEAT_INTERVAL: Duration = Duration::from_secs(5);
-const HEARTBEAT_TIMEOUT: Duration = Duration::from_secs(5);
-
-const MAX_RECONNECT_ATTEMPTS: usize = 3;
-
-enum State {
- Connecting,
- Connected {
- ssh_connection: Arc<dyn RemoteConnection>,
- delegate: Arc<dyn SshClientDelegate>,
-
- multiplex_task: Task<Result<()>>,
- heartbeat_task: Task<Result<()>>,
- },
- HeartbeatMissed {
- missed_heartbeats: usize,
-
- ssh_connection: Arc<dyn RemoteConnection>,
- delegate: Arc<dyn SshClientDelegate>,
-
- multiplex_task: Task<Result<()>>,
- heartbeat_task: Task<Result<()>>,
- },
- Reconnecting,
- ReconnectFailed {
- ssh_connection: Arc<dyn RemoteConnection>,
- delegate: Arc<dyn SshClientDelegate>,
-
- error: anyhow::Error,
- attempts: usize,
- },
- ReconnectExhausted,
- ServerNotRunning,
-}
-
-impl fmt::Display for State {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match self {
- Self::Connecting => write!(f, "connecting"),
- Self::Connected { .. } => write!(f, "connected"),
- Self::Reconnecting => write!(f, "reconnecting"),
- Self::ReconnectFailed { .. } => write!(f, "reconnect failed"),
- Self::ReconnectExhausted => write!(f, "reconnect exhausted"),
- Self::HeartbeatMissed { .. } => write!(f, "heartbeat missed"),
- Self::ServerNotRunning { .. } => write!(f, "server not running"),
- }
- }
-}
-
-impl State {
- fn ssh_connection(&self) -> Option<&dyn RemoteConnection> {
- match self {
- Self::Connected { ssh_connection, .. } => Some(ssh_connection.as_ref()),
- Self::HeartbeatMissed { ssh_connection, .. } => Some(ssh_connection.as_ref()),
- Self::ReconnectFailed { ssh_connection, .. } => Some(ssh_connection.as_ref()),
- _ => None,
- }
- }
-
- fn can_reconnect(&self) -> bool {
- match self {
- Self::Connected { .. }
- | Self::HeartbeatMissed { .. }
- | Self::ReconnectFailed { .. } => true,
- State::Connecting
- | State::Reconnecting
- | State::ReconnectExhausted
- | State::ServerNotRunning => false,
- }
- }
-
- fn is_reconnect_failed(&self) -> bool {
- matches!(self, Self::ReconnectFailed { .. })
- }
-
- fn is_reconnect_exhausted(&self) -> bool {
- matches!(self, Self::ReconnectExhausted { .. })
- }
-
- fn is_server_not_running(&self) -> bool {
- matches!(self, Self::ServerNotRunning)
- }
-
- fn is_reconnecting(&self) -> bool {
- matches!(self, Self::Reconnecting { .. })
- }
-
- fn heartbeat_recovered(self) -> Self {
- match self {
- Self::HeartbeatMissed {
- ssh_connection,
- delegate,
- multiplex_task,
- heartbeat_task,
- ..
- } => Self::Connected {
- ssh_connection,
- delegate,
- multiplex_task,
- heartbeat_task,
- },
- _ => self,
- }
- }
-
- fn heartbeat_missed(self) -> Self {
- match self {
- Self::Connected {
- ssh_connection,
- delegate,
- multiplex_task,
- heartbeat_task,
- } => Self::HeartbeatMissed {
- missed_heartbeats: 1,
- ssh_connection,
- delegate,
- multiplex_task,
- heartbeat_task,
- },
- Self::HeartbeatMissed {
- missed_heartbeats,
- ssh_connection,
- delegate,
- multiplex_task,
- heartbeat_task,
- } => Self::HeartbeatMissed {
- missed_heartbeats: missed_heartbeats + 1,
- ssh_connection,
- delegate,
- multiplex_task,
- heartbeat_task,
- },
- _ => self,
- }
- }
-}
-
-/// The state of the ssh connection.
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
-pub enum ConnectionState {
- Connecting,
- Connected,
- HeartbeatMissed,
- Reconnecting,
- Disconnected,
-}
-
-impl From<&State> for ConnectionState {
- fn from(value: &State) -> Self {
- match value {
- State::Connecting => Self::Connecting,
- State::Connected { .. } => Self::Connected,
- State::Reconnecting | State::ReconnectFailed { .. } => Self::Reconnecting,
- State::HeartbeatMissed { .. } => Self::HeartbeatMissed,
- State::ReconnectExhausted => Self::Disconnected,
- State::ServerNotRunning => Self::Disconnected,
- }
- }
-}
-
-pub struct SshRemoteClient {
- client: Arc<ChannelClient>,
- unique_identifier: String,
- connection_options: SshConnectionOptions,
- path_style: PathStyle,
- state: Arc<Mutex<Option<State>>>,
-}
-
-#[derive(Debug)]
-pub enum SshRemoteEvent {
- Disconnected,
-}
-
-impl EventEmitter<SshRemoteEvent> for SshRemoteClient {}
-
-// Identifies the socket on the remote server so that reconnects
-// can re-join the same project.
-pub enum ConnectionIdentifier {
- Setup(u64),
- Workspace(i64),
-}
-
-static NEXT_ID: AtomicU64 = AtomicU64::new(1);
-
-impl ConnectionIdentifier {
- pub fn setup() -> Self {
- Self::Setup(NEXT_ID.fetch_add(1, SeqCst))
- }
- // This string gets used in a socket name, and so must be relatively short.
- // The total length of:
- // /home/{username}/.local/share/zed/server_state/{name}/stdout.sock
- // Must be less than about 100 characters
- // https://unix.stackexchange.com/questions/367008/why-is-socket-path-length-limited-to-a-hundred-chars
- // So our strings should be at most 20 characters or so.
- fn to_string(&self, cx: &App) -> String {
- let identifier_prefix = match ReleaseChannel::global(cx) {
- ReleaseChannel::Stable => "".to_string(),
- release_channel => format!("{}-", release_channel.dev_name()),
- };
- match self {
- Self::Setup(setup_id) => format!("{identifier_prefix}setup-{setup_id}"),
- Self::Workspace(workspace_id) => {
- format!("{identifier_prefix}workspace-{workspace_id}",)
- }
- }
- }
-}
-
-impl SshRemoteClient {
- pub fn new(
- unique_identifier: ConnectionIdentifier,
- connection_options: SshConnectionOptions,
- cancellation: oneshot::Receiver<()>,
- delegate: Arc<dyn SshClientDelegate>,
- cx: &mut App,
- ) -> Task<Result<Option<Entity<Self>>>> {
- let unique_identifier = unique_identifier.to_string(cx);
- cx.spawn(async move |cx| {
- let success = Box::pin(async move {
- let (outgoing_tx, outgoing_rx) = mpsc::unbounded::<Envelope>();
- let (incoming_tx, incoming_rx) = mpsc::unbounded::<Envelope>();
- let (connection_activity_tx, connection_activity_rx) = mpsc::channel::<()>(1);
-
- let client =
- cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx, "client"))?;
-
- let ssh_connection = cx
- .update(|cx| {
- cx.update_default_global(|pool: &mut ConnectionPool, cx| {
- pool.connect(connection_options.clone(), &delegate, cx)
- })
- })?
- .await
- .map_err(|e| e.cloned())?;
-
- let path_style = ssh_connection.path_style();
- let this = cx.new(|_| Self {
- client: client.clone(),
- unique_identifier: unique_identifier.clone(),
- connection_options,
- path_style,
- state: Arc::new(Mutex::new(Some(State::Connecting))),
- })?;
-
- let io_task = ssh_connection.start_proxy(
- unique_identifier,
- false,
- incoming_tx,
- outgoing_rx,
- connection_activity_tx,
- delegate.clone(),
- cx,
- );
-
- let multiplex_task = Self::monitor(this.downgrade(), io_task, cx);
-
- if let Err(error) = client.ping(HEARTBEAT_TIMEOUT).await {
- log::error!("failed to establish connection: {}", error);
- return Err(error);
- }
-
- let heartbeat_task = Self::heartbeat(this.downgrade(), connection_activity_rx, cx);
-
- this.update(cx, |this, _| {
- *this.state.lock() = Some(State::Connected {
- ssh_connection,
- delegate,
- multiplex_task,
- heartbeat_task,
- });
- })?;
-
- Ok(Some(this))
- });
-
- select! {
- _ = cancellation.fuse() => {
- Ok(None)
- }
- result = success.fuse() => result
- }
- })
- }
-
- pub fn shutdown_processes<T: RequestMessage>(
- &self,
- shutdown_request: Option<T>,
- executor: BackgroundExecutor,
- ) -> Option<impl Future<Output = ()> + use<T>> {
- let state = self.state.lock().take()?;
- log::info!("shutting down ssh processes");
-
- let State::Connected {
- multiplex_task,
- heartbeat_task,
- ssh_connection,
- delegate,
- } = state
- else {
- return None;
- };
-
- let client = self.client.clone();
-
- Some(async move {
- if let Some(shutdown_request) = shutdown_request {
- client.send(shutdown_request).log_err();
- // We wait 50ms instead of waiting for a response, because
- // waiting for a response would require us to wait on the main thread
- // which we want to avoid in an `on_app_quit` callback.
- executor.timer(Duration::from_millis(50)).await;
- }
-
- // Drop `multiplex_task` because it owns our ssh_proxy_process, which is a
- // child of master_process.
- drop(multiplex_task);
- // Now drop the rest of state, which kills master process.
- drop(heartbeat_task);
- drop(ssh_connection);
- drop(delegate);
- })
- }
-
- fn reconnect(&mut self, cx: &mut Context<Self>) -> Result<()> {
- let mut lock = self.state.lock();
-
- let can_reconnect = lock
- .as_ref()
- .map(|state| state.can_reconnect())
- .unwrap_or(false);
- if !can_reconnect {
- log::info!("aborting reconnect, because not in state that allows reconnecting");
- let error = if let Some(state) = lock.as_ref() {
- format!("invalid state, cannot reconnect while in state {state}")
- } else {
- "no state set".to_string()
- };
- anyhow::bail!(error);
- }
-
- let state = lock.take().unwrap();
- let (attempts, ssh_connection, delegate) = match state {
- State::Connected {
- ssh_connection,
- delegate,
- multiplex_task,
- heartbeat_task,
- }
- | State::HeartbeatMissed {
- ssh_connection,
- delegate,
- multiplex_task,
- heartbeat_task,
- ..
- } => {
- drop(multiplex_task);
- drop(heartbeat_task);
- (0, ssh_connection, delegate)
- }
- State::ReconnectFailed {
- attempts,
- ssh_connection,
- delegate,
- ..
- } => (attempts, ssh_connection, delegate),
- State::Connecting
- | State::Reconnecting
- | State::ReconnectExhausted
- | State::ServerNotRunning => unreachable!(),
- };
-
- let attempts = attempts + 1;
- if attempts > MAX_RECONNECT_ATTEMPTS {
- log::error!(
- "Failed to reconnect to after {} attempts, giving up",
- MAX_RECONNECT_ATTEMPTS
- );
- drop(lock);
- self.set_state(State::ReconnectExhausted, cx);
- return Ok(());
- }
- drop(lock);
-
- self.set_state(State::Reconnecting, cx);
-
- log::info!("Trying to reconnect to ssh server... Attempt {}", attempts);
-
- let unique_identifier = self.unique_identifier.clone();
- let client = self.client.clone();
- let reconnect_task = cx.spawn(async move |this, cx| {
- macro_rules! failed {
- ($error:expr, $attempts:expr, $ssh_connection:expr, $delegate:expr) => {
- return State::ReconnectFailed {
- error: anyhow!($error),
- attempts: $attempts,
- ssh_connection: $ssh_connection,
- delegate: $delegate,
- };
- };
- }
-
- if let Err(error) = ssh_connection
- .kill()
- .await
- .context("Failed to kill ssh process")
- {
- failed!(error, attempts, ssh_connection, delegate);
- };
-
- let connection_options = ssh_connection.connection_options();
-
- let (outgoing_tx, outgoing_rx) = mpsc::unbounded::<Envelope>();
- let (incoming_tx, incoming_rx) = mpsc::unbounded::<Envelope>();
- let (connection_activity_tx, connection_activity_rx) = mpsc::channel::<()>(1);
-
- let (ssh_connection, io_task) = match async {
- let ssh_connection = cx
- .update_global(|pool: &mut ConnectionPool, cx| {
- pool.connect(connection_options, &delegate, cx)
- })?
- .await
- .map_err(|error| error.cloned())?;
-
- let io_task = ssh_connection.start_proxy(
- unique_identifier,
- true,
- incoming_tx,
- outgoing_rx,
- connection_activity_tx,
- delegate.clone(),
- cx,
- );
- anyhow::Ok((ssh_connection, io_task))
- }
- .await
- {
- Ok((ssh_connection, io_task)) => (ssh_connection, io_task),
- Err(error) => {
- failed!(error, attempts, ssh_connection, delegate);
- }
- };
-
- let multiplex_task = Self::monitor(this.clone(), io_task, cx);
- client.reconnect(incoming_rx, outgoing_tx, cx);
-
- if let Err(error) = client.resync(HEARTBEAT_TIMEOUT).await {
- failed!(error, attempts, ssh_connection, delegate);
- };
-
- State::Connected {
- ssh_connection,
- delegate,
- multiplex_task,
- heartbeat_task: Self::heartbeat(this.clone(), connection_activity_rx, cx),
- }
- });
-
- cx.spawn(async move |this, cx| {
- let new_state = reconnect_task.await;
- this.update(cx, |this, cx| {
- this.try_set_state(cx, |old_state| {
- if old_state.is_reconnecting() {
- match &new_state {
- State::Connecting
- | State::Reconnecting { .. }
- | State::HeartbeatMissed { .. }
- | State::ServerNotRunning => {}
- State::Connected { .. } => {
- log::info!("Successfully reconnected");
- }
- State::ReconnectFailed {
- error, attempts, ..
- } => {
- log::error!(
- "Reconnect attempt {} failed: {:?}. Starting new attempt...",
- attempts,
- error
- );
- }
- State::ReconnectExhausted => {
- log::error!("Reconnect attempt failed and all attempts exhausted");
- }
- }
- Some(new_state)
- } else {
- None
- }
- });
-
- if this.state_is(State::is_reconnect_failed) {
- this.reconnect(cx)
- } else if this.state_is(State::is_reconnect_exhausted) {
- Ok(())
- } else {
- log::debug!("State has transition from Reconnecting into new state while attempting reconnect.");
- Ok(())
- }
- })
- })
- .detach_and_log_err(cx);
-
- Ok(())
- }
-
- fn heartbeat(
- this: WeakEntity<Self>,
- mut connection_activity_rx: mpsc::Receiver<()>,
- cx: &mut AsyncApp,
- ) -> Task<Result<()>> {
- let Ok(client) = this.read_with(cx, |this, _| this.client.clone()) else {
- return Task::ready(Err(anyhow!("SshRemoteClient lost")));
- };
-
- cx.spawn(async move |cx| {
- let mut missed_heartbeats = 0;
-
- let keepalive_timer = cx.background_executor().timer(HEARTBEAT_INTERVAL).fuse();
- futures::pin_mut!(keepalive_timer);
-
- loop {
- select_biased! {
- result = connection_activity_rx.next().fuse() => {
- if result.is_none() {
- log::warn!("ssh heartbeat: connection activity channel has been dropped. stopping.");
- return Ok(());
- }
-
- if missed_heartbeats != 0 {
- missed_heartbeats = 0;
- let _ =this.update(cx, |this, cx| {
- this.handle_heartbeat_result(missed_heartbeats, cx)
- })?;
- }
- }
- _ = keepalive_timer => {
- log::debug!("Sending heartbeat to server...");
-
- let result = select_biased! {
- _ = connection_activity_rx.next().fuse() => {
- Ok(())
- }
- ping_result = client.ping(HEARTBEAT_TIMEOUT).fuse() => {
- ping_result
- }
- };
-
- if result.is_err() {
- missed_heartbeats += 1;
- log::warn!(
- "No heartbeat from server after {:?}. Missed heartbeat {} out of {}.",
- HEARTBEAT_TIMEOUT,
- missed_heartbeats,
- MAX_MISSED_HEARTBEATS
- );
- } else if missed_heartbeats != 0 {
- missed_heartbeats = 0;
- } else {
- continue;
- }
-
- let result = this.update(cx, |this, cx| {
- this.handle_heartbeat_result(missed_heartbeats, cx)
- })?;
- if result.is_break() {
- return Ok(());
- }
- }
- }
-
- keepalive_timer.set(cx.background_executor().timer(HEARTBEAT_INTERVAL).fuse());
- }
-
- })
- }
-
- fn handle_heartbeat_result(
- &mut self,
- missed_heartbeats: usize,
- cx: &mut Context<Self>,
- ) -> ControlFlow<()> {
- let state = self.state.lock().take().unwrap();
- let next_state = if missed_heartbeats > 0 {
- state.heartbeat_missed()
- } else {
- state.heartbeat_recovered()
- };
-
- self.set_state(next_state, cx);
-
- if missed_heartbeats >= MAX_MISSED_HEARTBEATS {
- log::error!(
- "Missed last {} heartbeats. Reconnecting...",
- missed_heartbeats
- );
-
- self.reconnect(cx)
- .context("failed to start reconnect process after missing heartbeats")
- .log_err();
- ControlFlow::Break(())
- } else {
- ControlFlow::Continue(())
- }
- }
-
- fn monitor(
- this: WeakEntity<Self>,
- io_task: Task<Result<i32>>,
- cx: &AsyncApp,
- ) -> Task<Result<()>> {
- cx.spawn(async move |cx| {
- let result = io_task.await;
-
- match result {
- Ok(exit_code) => {
- if let Some(error) = ProxyLaunchError::from_exit_code(exit_code) {
- match error {
- ProxyLaunchError::ServerNotRunning => {
- log::error!("failed to reconnect because server is not running");
- this.update(cx, |this, cx| {
- this.set_state(State::ServerNotRunning, cx);
- })?;
- }
- }
- } else if exit_code > 0 {
- log::error!("proxy process terminated unexpectedly");
- this.update(cx, |this, cx| {
- this.reconnect(cx).ok();
- })?;
- }
- }
- Err(error) => {
- log::warn!("ssh io task died with error: {:?}. reconnecting...", error);
- this.update(cx, |this, cx| {
- this.reconnect(cx).ok();
- })?;
- }
- }
-
- Ok(())
- })
- }
-
- fn state_is(&self, check: impl FnOnce(&State) -> bool) -> bool {
- self.state.lock().as_ref().is_some_and(check)
- }
-
- fn try_set_state(&self, cx: &mut Context<Self>, map: impl FnOnce(&State) -> Option<State>) {
- let mut lock = self.state.lock();
- let new_state = lock.as_ref().and_then(map);
-
- if let Some(new_state) = new_state {
- lock.replace(new_state);
- cx.notify();
- }
- }
-
- fn set_state(&self, state: State, cx: &mut Context<Self>) {
- log::info!("setting state to '{}'", &state);
-
- let is_reconnect_exhausted = state.is_reconnect_exhausted();
- let is_server_not_running = state.is_server_not_running();
- self.state.lock().replace(state);
-
- if is_reconnect_exhausted || is_server_not_running {
- cx.emit(SshRemoteEvent::Disconnected);
- }
- cx.notify();
- }
-
- pub fn subscribe_to_entity<E: 'static>(&self, remote_id: u64, entity: &Entity<E>) {
- self.client.subscribe_to_entity(remote_id, entity);
- }
-
- pub fn ssh_info(&self) -> Option<(SshArgs, PathStyle)> {
- self.state
- .lock()
- .as_ref()
- .and_then(|state| state.ssh_connection())
- .map(|ssh_connection| (ssh_connection.ssh_args(), ssh_connection.path_style()))
- }
-
- pub fn upload_directory(
- &self,
- src_path: PathBuf,
- dest_path: RemotePathBuf,
- cx: &App,
- ) -> Task<Result<()>> {
- let state = self.state.lock();
- let Some(connection) = state.as_ref().and_then(|state| state.ssh_connection()) else {
- return Task::ready(Err(anyhow!("no ssh connection")));
- };
- connection.upload_directory(src_path, dest_path, cx)
- }
-
- pub fn proto_client(&self) -> AnyProtoClient {
- self.client.clone().into()
- }
-
- pub fn connection_string(&self) -> String {
- self.connection_options.connection_string()
- }
-
- pub fn connection_options(&self) -> SshConnectionOptions {
- self.connection_options.clone()
- }
-
- pub fn connection_state(&self) -> ConnectionState {
- self.state
- .lock()
- .as_ref()
- .map(ConnectionState::from)
- .unwrap_or(ConnectionState::Disconnected)
- }
-
- pub fn is_disconnected(&self) -> bool {
- self.connection_state() == ConnectionState::Disconnected
- }
-
- pub fn path_style(&self) -> PathStyle {
- self.path_style
- }
-
- #[cfg(any(test, feature = "test-support"))]
- pub fn simulate_disconnect(&self, client_cx: &mut App) -> Task<()> {
- let opts = self.connection_options();
- client_cx.spawn(async move |cx| {
- let connection = cx
- .update_global(|c: &mut ConnectionPool, _| {
- if let Some(ConnectionPoolEntry::Connecting(c)) = c.connections.get(&opts) {
- c.clone()
- } else {
- panic!("missing test connection")
- }
- })
- .unwrap()
- .await
- .unwrap();
-
- connection.simulate_disconnect(cx);
- })
- }
-
- #[cfg(any(test, feature = "test-support"))]
- pub fn fake_server(
- client_cx: &mut gpui::TestAppContext,
- server_cx: &mut gpui::TestAppContext,
- ) -> (SshConnectionOptions, Arc<ChannelClient>) {
- let port = client_cx
- .update(|cx| cx.default_global::<ConnectionPool>().connections.len() as u16 + 1);
- let opts = SshConnectionOptions {
- host: "<fake>".to_string(),
- port: Some(port),
- ..Default::default()
- };
- let (outgoing_tx, _) = mpsc::unbounded::<Envelope>();
- let (_, incoming_rx) = mpsc::unbounded::<Envelope>();
- let server_client =
- server_cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx, "fake-server"));
- let connection: Arc<dyn RemoteConnection> = Arc::new(fake::FakeRemoteConnection {
- connection_options: opts.clone(),
- server_cx: fake::SendableCx::new(server_cx),
- server_channel: server_client.clone(),
- });
-
- client_cx.update(|cx| {
- cx.update_default_global(|c: &mut ConnectionPool, cx| {
- c.connections.insert(
- opts.clone(),
- ConnectionPoolEntry::Connecting(
- cx.background_spawn({
- let connection = connection.clone();
- async move { Ok(connection.clone()) }
- })
- .shared(),
- ),
- );
- })
- });
-
- (opts, server_client)
- }
-
- #[cfg(any(test, feature = "test-support"))]
- pub async fn fake_client(
- opts: SshConnectionOptions,
- client_cx: &mut gpui::TestAppContext,
- ) -> Entity<Self> {
- let (_tx, rx) = oneshot::channel();
- client_cx
- .update(|cx| {
- Self::new(
- ConnectionIdentifier::setup(),
- opts,
- rx,
- Arc::new(fake::Delegate),
- cx,
- )
- })
- .await
- .unwrap()
- .unwrap()
- }
-}
-
-enum ConnectionPoolEntry {
- Connecting(Shared<Task<Result<Arc<dyn RemoteConnection>, Arc<anyhow::Error>>>>),
- Connected(Weak<dyn RemoteConnection>),
-}
-
-#[derive(Default)]
-struct ConnectionPool {
- connections: HashMap<SshConnectionOptions, ConnectionPoolEntry>,
-}
-
-impl Global for ConnectionPool {}
-
-impl ConnectionPool {
- pub fn connect(
- &mut self,
- opts: SshConnectionOptions,
- delegate: &Arc<dyn SshClientDelegate>,
- cx: &mut App,
- ) -> Shared<Task<Result<Arc<dyn RemoteConnection>, Arc<anyhow::Error>>>> {
- let connection = self.connections.get(&opts);
- match connection {
- Some(ConnectionPoolEntry::Connecting(task)) => {
- let delegate = delegate.clone();
- cx.spawn(async move |cx| {
- delegate.set_status(Some("Waiting for existing connection attempt"), cx);
- })
- .detach();
- return task.clone();
- }
- Some(ConnectionPoolEntry::Connected(ssh)) => {
- if let Some(ssh) = ssh.upgrade()
- && !ssh.has_been_killed()
- {
- return Task::ready(Ok(ssh)).shared();
- }
- self.connections.remove(&opts);
- }
- None => {}
- }
-
- let task = cx
- .spawn({
- let opts = opts.clone();
- let delegate = delegate.clone();
- async move |cx| {
- let connection = SshRemoteConnection::new(opts.clone(), delegate, cx)
- .await
- .map(|connection| Arc::new(connection) as Arc<dyn RemoteConnection>);
-
- cx.update_global(|pool: &mut Self, _| {
- debug_assert!(matches!(
- pool.connections.get(&opts),
- Some(ConnectionPoolEntry::Connecting(_))
- ));
- match connection {
- Ok(connection) => {
- pool.connections.insert(
- opts.clone(),
- ConnectionPoolEntry::Connected(Arc::downgrade(&connection)),
- );
- Ok(connection)
- }
- Err(error) => {
- pool.connections.remove(&opts);
- Err(Arc::new(error))
- }
- }
- })?
- }
- })
- .shared();
-
- self.connections
- .insert(opts.clone(), ConnectionPoolEntry::Connecting(task.clone()));
- task
- }
-}
-
-impl From<SshRemoteClient> for AnyProtoClient {
- fn from(client: SshRemoteClient) -> Self {
- AnyProtoClient::new(client.client.clone())
- }
-}
-
-#[async_trait(?Send)]
-trait RemoteConnection: Send + Sync {
- fn start_proxy(
- &self,
- unique_identifier: String,
- reconnect: bool,
- incoming_tx: UnboundedSender<Envelope>,
- outgoing_rx: UnboundedReceiver<Envelope>,
- connection_activity_tx: Sender<()>,
- delegate: Arc<dyn SshClientDelegate>,
- cx: &mut AsyncApp,
- ) -> Task<Result<i32>>;
- fn upload_directory(
- &self,
- src_path: PathBuf,
- dest_path: RemotePathBuf,
- cx: &App,
- ) -> Task<Result<()>>;
- async fn kill(&self) -> Result<()>;
- fn has_been_killed(&self) -> bool;
- /// On Windows, we need to use `SSH_ASKPASS` to provide the password to ssh.
- /// On Linux, we use the `ControlPath` option to create a socket file that ssh can use to
- fn ssh_args(&self) -> SshArgs;
- fn connection_options(&self) -> SshConnectionOptions;
- fn path_style(&self) -> PathStyle;
-
- #[cfg(any(test, feature = "test-support"))]
- fn simulate_disconnect(&self, _: &AsyncApp) {}
-}
-
-struct SshRemoteConnection {
- socket: SshSocket,
- master_process: Mutex<Option<Child>>,
- remote_binary_path: Option<RemotePathBuf>,
- ssh_platform: SshPlatform,
- ssh_path_style: PathStyle,
- _temp_dir: TempDir,
-}
-
-#[async_trait(?Send)]
-impl RemoteConnection for SshRemoteConnection {
- async fn kill(&self) -> Result<()> {
- let Some(mut process) = self.master_process.lock().take() else {
- return Ok(());
- };
- process.kill().ok();
- process.status().await?;
- Ok(())
- }
-
- fn has_been_killed(&self) -> bool {
- self.master_process.lock().is_none()
- }
-
- fn ssh_args(&self) -> SshArgs {
- self.socket.ssh_args()
- }
-
- fn connection_options(&self) -> SshConnectionOptions {
- self.socket.connection_options.clone()
- }
-
- fn upload_directory(
- &self,
- src_path: PathBuf,
- dest_path: RemotePathBuf,
- cx: &App,
- ) -> Task<Result<()>> {
- let mut command = util::command::new_smol_command("scp");
- let output = self
- .socket
- .ssh_options(&mut command)
- .args(
- self.socket
- .connection_options
- .port
- .map(|port| vec!["-P".to_string(), port.to_string()])
- .unwrap_or_default(),
- )
- .arg("-C")
- .arg("-r")
- .arg(&src_path)
- .arg(format!(
- "{}:{}",
- self.socket.connection_options.scp_url(),
- dest_path.to_string()
- ))
- .output();
-
- cx.background_spawn(async move {
- let output = output.await?;
-
- anyhow::ensure!(
- output.status.success(),
- "failed to upload directory {} -> {}: {}",
- src_path.display(),
- dest_path.to_string(),
- String::from_utf8_lossy(&output.stderr)
- );
-
- Ok(())
- })
- }
-
- fn start_proxy(
- &self,
- unique_identifier: String,
- reconnect: bool,
- incoming_tx: UnboundedSender<Envelope>,
- outgoing_rx: UnboundedReceiver<Envelope>,
- connection_activity_tx: Sender<()>,
- delegate: Arc<dyn SshClientDelegate>,
- cx: &mut AsyncApp,
- ) -> Task<Result<i32>> {
- delegate.set_status(Some("Starting proxy"), cx);
-
- let Some(remote_binary_path) = self.remote_binary_path.clone() else {
- return Task::ready(Err(anyhow!("Remote binary path not set")));
- };
-
- let mut start_proxy_command = shell_script!(
- "exec {binary_path} proxy --identifier {identifier}",
- binary_path = &remote_binary_path.to_string(),
- identifier = &unique_identifier,
- );
-
- for env_var in ["RUST_LOG", "RUST_BACKTRACE", "ZED_GENERATE_MINIDUMPS"] {
- if let Some(value) = std::env::var(env_var).ok() {
- start_proxy_command = format!(
- "{}={} {} ",
- env_var,
- shlex::try_quote(&value).unwrap(),
- start_proxy_command,
- );
- }
- }
-
- if reconnect {
- start_proxy_command.push_str(" --reconnect");
- }
-
- let ssh_proxy_process = match self
- .socket
- .ssh_command("sh", &["-c", &start_proxy_command])
- // IMPORTANT: we kill this process when we drop the task that uses it.
- .kill_on_drop(true)
- .spawn()
- {
- Ok(process) => process,
- Err(error) => {
- return Task::ready(Err(anyhow!("failed to spawn remote server: {}", error)));
- }
- };
-
- Self::multiplex(
- ssh_proxy_process,
- incoming_tx,
- outgoing_rx,
- connection_activity_tx,
- cx,
- )
- }
-
- fn path_style(&self) -> PathStyle {
- self.ssh_path_style
- }
-}
-
-impl SshRemoteConnection {
- async fn new(
- connection_options: SshConnectionOptions,
- delegate: Arc<dyn SshClientDelegate>,
- cx: &mut AsyncApp,
- ) -> Result<Self> {
- use askpass::AskPassResult;
-
- delegate.set_status(Some("Connecting"), cx);
-
- let url = connection_options.ssh_url();
-
- let temp_dir = tempfile::Builder::new()
- .prefix("zed-ssh-session")
- .tempdir()?;
- let askpass_delegate = askpass::AskPassDelegate::new(cx, {
- let delegate = delegate.clone();
- move |prompt, tx, cx| delegate.ask_password(prompt, tx, cx)
- });
-
- let mut askpass =
- askpass::AskPassSession::new(cx.background_executor(), askpass_delegate).await?;
-
- // Start the master SSH process, which does not do anything except for establish
- // the connection and keep it open, allowing other ssh commands to reuse it
- // via a control socket.
- #[cfg(not(target_os = "windows"))]
- let socket_path = temp_dir.path().join("ssh.sock");
-
- let mut master_process = {
- #[cfg(not(target_os = "windows"))]
- let args = [
- "-N",
- "-o",
- "ControlPersist=no",
- "-o",
- "ControlMaster=yes",
- "-o",
- ];
- // On Windows, `ControlMaster` and `ControlPath` are not supported:
- // https://github.com/PowerShell/Win32-OpenSSH/issues/405
- // https://github.com/PowerShell/Win32-OpenSSH/wiki/Project-Scope
- #[cfg(target_os = "windows")]
- let args = ["-N"];
- let mut master_process = util::command::new_smol_command("ssh");
- master_process
- .kill_on_drop(true)
- .stdin(Stdio::null())
- .stdout(Stdio::piped())
- .stderr(Stdio::piped())
- .env("SSH_ASKPASS_REQUIRE", "force")
- .env("SSH_ASKPASS", askpass.script_path())
- .args(connection_options.additional_args())
- .args(args);
- #[cfg(not(target_os = "windows"))]
- master_process.arg(format!("ControlPath={}", socket_path.display()));
- master_process.arg(&url).spawn()?
- };
- // Wait for this ssh process to close its stdout, indicating that authentication
- // has completed.
- let mut stdout = master_process.stdout.take().unwrap();
- let mut output = Vec::new();
-
- let result = select_biased! {
- result = askpass.run().fuse() => {
- match result {
- AskPassResult::CancelledByUser => {
- master_process.kill().ok();
- anyhow::bail!("SSH connection canceled")
- }
- AskPassResult::Timedout => {
- anyhow::bail!("connecting to host timed out")
- }
- }
- }
- _ = stdout.read_to_end(&mut output).fuse() => {
- anyhow::Ok(())
- }
- };
-
- if let Err(e) = result {
- return Err(e.context("Failed to connect to host"));
- }
-
- if master_process.try_status()?.is_some() {
- output.clear();
- let mut stderr = master_process.stderr.take().unwrap();
- stderr.read_to_end(&mut output).await?;
-
- let error_message = format!(
- "failed to connect: {}",
- String::from_utf8_lossy(&output).trim()
- );
- anyhow::bail!(error_message);
- }
-
- #[cfg(not(target_os = "windows"))]
- let socket = SshSocket::new(connection_options, socket_path)?;
- #[cfg(target_os = "windows")]
- let socket = SshSocket::new(connection_options, &temp_dir, askpass.get_password())?;
- drop(askpass);
-
- let ssh_platform = socket.platform().await?;
- let ssh_path_style = match ssh_platform.os {
- "windows" => PathStyle::Windows,
- _ => PathStyle::Posix,
- };
-
- let mut this = Self {
- socket,
- master_process: Mutex::new(Some(master_process)),
- _temp_dir: temp_dir,
- remote_binary_path: None,
- ssh_path_style,
- ssh_platform,
- };
-
- let (release_channel, version, commit) = cx.update(|cx| {
- (
- ReleaseChannel::global(cx),
- AppVersion::global(cx),
- AppCommitSha::try_global(cx),
- )
- })?;
- this.remote_binary_path = Some(
- this.ensure_server_binary(&delegate, release_channel, version, commit, cx)
- .await?,
- );
-
- Ok(this)
- }
-
- fn multiplex(
- mut ssh_proxy_process: Child,
- incoming_tx: UnboundedSender<Envelope>,
- mut outgoing_rx: UnboundedReceiver<Envelope>,
- mut connection_activity_tx: Sender<()>,
- cx: &AsyncApp,
- ) -> Task<Result<i32>> {
- let mut child_stderr = ssh_proxy_process.stderr.take().unwrap();
- let mut child_stdout = ssh_proxy_process.stdout.take().unwrap();
- let mut child_stdin = ssh_proxy_process.stdin.take().unwrap();
-
- let mut stdin_buffer = Vec::new();
- let mut stdout_buffer = Vec::new();
- let mut stderr_buffer = Vec::new();
- let mut stderr_offset = 0;
-
- let stdin_task = cx.background_spawn(async move {
- while let Some(outgoing) = outgoing_rx.next().await {
- write_message(&mut child_stdin, &mut stdin_buffer, outgoing).await?;
- }
- anyhow::Ok(())
- });
-
- let stdout_task = cx.background_spawn({
- let mut connection_activity_tx = connection_activity_tx.clone();
- async move {
- loop {
- stdout_buffer.resize(MESSAGE_LEN_SIZE, 0);
- let len = child_stdout.read(&mut stdout_buffer).await?;
-
- if len == 0 {
- return anyhow::Ok(());
- }
-
- if len < MESSAGE_LEN_SIZE {
- child_stdout.read_exact(&mut stdout_buffer[len..]).await?;
- }
-
- let message_len = message_len_from_buffer(&stdout_buffer);
- let envelope =
- read_message_with_len(&mut child_stdout, &mut stdout_buffer, message_len)
- .await?;
- connection_activity_tx.try_send(()).ok();
- incoming_tx.unbounded_send(envelope).ok();
- }
- }
- });
-
- let stderr_task: Task<anyhow::Result<()>> = cx.background_spawn(async move {
- loop {
- stderr_buffer.resize(stderr_offset + 1024, 0);
-
- let len = child_stderr
- .read(&mut stderr_buffer[stderr_offset..])
- .await?;
- if len == 0 {
- return anyhow::Ok(());
- }
-
- stderr_offset += len;
- let mut start_ix = 0;
- while let Some(ix) = stderr_buffer[start_ix..stderr_offset]
- .iter()
- .position(|b| b == &b'\n')
- {
- let line_ix = start_ix + ix;
- let content = &stderr_buffer[start_ix..line_ix];
- start_ix = line_ix + 1;
- if let Ok(record) = serde_json::from_slice::<LogRecord>(content) {
- record.log(log::logger())
- } else {
- eprintln!("(remote) {}", String::from_utf8_lossy(content));
- }
- }
- stderr_buffer.drain(0..start_ix);
- stderr_offset -= start_ix;
-
- connection_activity_tx.try_send(()).ok();
- }
- });
-
- cx.background_spawn(async move {
- let result = futures::select! {
- result = stdin_task.fuse() => {
- result.context("stdin")
- }
- result = stdout_task.fuse() => {
- result.context("stdout")
- }
- result = stderr_task.fuse() => {
- result.context("stderr")
- }
- };
-
- let status = ssh_proxy_process.status().await?.code().unwrap_or(1);
- match result {
- Ok(_) => Ok(status),
- Err(error) => Err(error),
- }
- })
- }
-
- #[allow(unused)]
- async fn ensure_server_binary(
- &self,
- delegate: &Arc<dyn SshClientDelegate>,
- release_channel: ReleaseChannel,
- version: SemanticVersion,
- commit: Option<AppCommitSha>,
- cx: &mut AsyncApp,
- ) -> Result<RemotePathBuf> {
- let version_str = match release_channel {
- ReleaseChannel::Nightly => {
- let commit = commit.map(|s| s.full()).unwrap_or_default();
- format!("{}-{}", version, commit)
- }
- ReleaseChannel::Dev => "build".to_string(),
- _ => version.to_string(),
- };
- let binary_name = format!(
- "zed-remote-server-{}-{}",
- release_channel.dev_name(),
- version_str
- );
- let dst_path = RemotePathBuf::new(
- paths::remote_server_dir_relative().join(binary_name),
- self.ssh_path_style,
- );
-
- let build_remote_server = std::env::var("ZED_BUILD_REMOTE_SERVER").ok();
- #[cfg(debug_assertions)]
- if let Some(build_remote_server) = build_remote_server {
- let src_path = self.build_local(build_remote_server, delegate, cx).await?;
- let tmp_path = RemotePathBuf::new(
- paths::remote_server_dir_relative().join(format!(
- "download-{}-{}",
- std::process::id(),
- src_path.file_name().unwrap().to_string_lossy()
- )),
- self.ssh_path_style,
- );
- self.upload_local_server_binary(&src_path, &tmp_path, delegate, cx)
- .await?;
- self.extract_server_binary(&dst_path, &tmp_path, delegate, cx)
- .await?;
- return Ok(dst_path);
- }
-
- if self
- .socket
- .run_command(&dst_path.to_string(), &["version"])
- .await
- .is_ok()
- {
- return Ok(dst_path);
- }
-
- let wanted_version = cx.update(|cx| match release_channel {
- ReleaseChannel::Nightly => Ok(None),
- ReleaseChannel::Dev => {
- anyhow::bail!(
- "ZED_BUILD_REMOTE_SERVER is not set and no remote server exists at ({:?})",
- dst_path
- )
- }
- _ => Ok(Some(AppVersion::global(cx))),
- })??;
-
- let tmp_path_gz = RemotePathBuf::new(
- PathBuf::from(format!(
- "{}-download-{}.gz",
- dst_path.to_string(),
- std::process::id()
- )),
- self.ssh_path_style,
- );
- if !self.socket.connection_options.upload_binary_over_ssh
- && let Some((url, body)) = delegate
- .get_download_params(self.ssh_platform, release_channel, wanted_version, cx)
- .await?
- {
- match self
- .download_binary_on_server(&url, &body, &tmp_path_gz, delegate, cx)
- .await
- {
- Ok(_) => {
- self.extract_server_binary(&dst_path, &tmp_path_gz, delegate, cx)
- .await?;
- return Ok(dst_path);
- }
- Err(e) => {
- log::error!(
- "Failed to download binary on server, attempting to upload server: {}",
- e
- )
- }
- }
- }
-
- let src_path = delegate
- .download_server_binary_locally(self.ssh_platform, release_channel, wanted_version, cx)
- .await?;
- self.upload_local_server_binary(&src_path, &tmp_path_gz, delegate, cx)
- .await?;
- self.extract_server_binary(&dst_path, &tmp_path_gz, delegate, cx)
- .await?;
- Ok(dst_path)
- }
-
- async fn download_binary_on_server(
- &self,
- url: &str,
- body: &str,
- tmp_path_gz: &RemotePathBuf,
- delegate: &Arc<dyn SshClientDelegate>,
- cx: &mut AsyncApp,
- ) -> Result<()> {
- if let Some(parent) = tmp_path_gz.parent() {
- self.socket
- .run_command(
- "sh",
- &[
- "-c",
- &shell_script!("mkdir -p {parent}", parent = parent.to_string().as_ref()),
- ],
- )
- .await?;
- }
-
- delegate.set_status(Some("Downloading remote development server on host"), cx);
-
- match self
- .socket
- .run_command(
- "curl",
- &[
- "-f",
- "-L",
- "-X",
- "GET",
- "-H",
- "Content-Type: application/json",
- "-d",
- body,
- url,
- "-o",
- &tmp_path_gz.to_string(),
- ],
- )
- .await
- {
- Ok(_) => {}
- Err(e) => {
- if self.socket.run_command("which", &["curl"]).await.is_ok() {
- return Err(e);
- }
-
- match self
- .socket
- .run_command(
- "wget",
- &[
- "--method=GET",
- "--header=Content-Type: application/json",
- "--body-data",
- body,
- url,
- "-O",
- &tmp_path_gz.to_string(),
- ],
- )
- .await
- {
- Ok(_) => {}
- Err(e) => {
- if self.socket.run_command("which", &["wget"]).await.is_ok() {
- return Err(e);
- } else {
- anyhow::bail!("Neither curl nor wget is available");
- }
- }
- }
- }
- }
-
- Ok(())
- }
-
- async fn upload_local_server_binary(
- &self,
- src_path: &Path,
- tmp_path_gz: &RemotePathBuf,
- delegate: &Arc<dyn SshClientDelegate>,
- cx: &mut AsyncApp,
- ) -> Result<()> {
- if let Some(parent) = tmp_path_gz.parent() {
- self.socket
- .run_command(
- "sh",
- &[
- "-c",
- &shell_script!("mkdir -p {parent}", parent = parent.to_string().as_ref()),
- ],
- )
- .await?;
- }
-
- let src_stat = fs::metadata(&src_path).await?;
- let size = src_stat.len();
-
- let t0 = Instant::now();
- delegate.set_status(Some("Uploading remote development server"), cx);
- log::info!(
- "uploading remote development server to {:?} ({}kb)",
- tmp_path_gz,
- size / 1024
- );
- self.upload_file(src_path, tmp_path_gz)
- .await
- .context("failed to upload server binary")?;
- log::info!("uploaded remote development server in {:?}", t0.elapsed());
- Ok(())
- }
-
- async fn extract_server_binary(
- &self,
- dst_path: &RemotePathBuf,
- tmp_path: &RemotePathBuf,
- delegate: &Arc<dyn SshClientDelegate>,
- cx: &mut AsyncApp,
- ) -> Result<()> {
- delegate.set_status(Some("Extracting remote development server"), cx);
- let server_mode = 0o755;
-
- let orig_tmp_path = tmp_path.to_string();
- let script = if let Some(tmp_path) = orig_tmp_path.strip_suffix(".gz") {
- shell_script!(
- "gunzip -f {orig_tmp_path} && chmod {server_mode} {tmp_path} && mv {tmp_path} {dst_path}",
- server_mode = &format!("{:o}", server_mode),
- dst_path = &dst_path.to_string(),
- )
- } else {
- shell_script!(
- "chmod {server_mode} {orig_tmp_path} && mv {orig_tmp_path} {dst_path}",
- server_mode = &format!("{:o}", server_mode),
- dst_path = &dst_path.to_string()
- )
- };
- self.socket.run_command("sh", &["-c", &script]).await?;
- Ok(())
- }
-
- async fn upload_file(&self, src_path: &Path, dest_path: &RemotePathBuf) -> Result<()> {
- log::debug!("uploading file {:?} to {:?}", src_path, dest_path);
- let mut command = util::command::new_smol_command("scp");
- let output = self
- .socket
- .ssh_options(&mut command)
- .args(
- self.socket
- .connection_options
- .port
- .map(|port| vec!["-P".to_string(), port.to_string()])
- .unwrap_or_default(),
- )
- .arg(src_path)
- .arg(format!(
- "{}:{}",
- self.socket.connection_options.scp_url(),
- dest_path.to_string()
- ))
- .output()
- .await?;
-
- anyhow::ensure!(
- output.status.success(),
- "failed to upload file {} -> {}: {}",
- src_path.display(),
- dest_path.to_string(),
- String::from_utf8_lossy(&output.stderr)
- );
- Ok(())
- }
-
- #[cfg(debug_assertions)]
- async fn build_local(
- &self,
- build_remote_server: String,
- delegate: &Arc<dyn SshClientDelegate>,
- cx: &mut AsyncApp,
- ) -> Result<PathBuf> {
- use smol::process::{Command, Stdio};
- use std::env::VarError;
-
- async fn run_cmd(command: &mut Command) -> Result<()> {
- let output = command
- .kill_on_drop(true)
- .stderr(Stdio::inherit())
- .output()
- .await?;
- anyhow::ensure!(
- output.status.success(),
- "Failed to run command: {command:?}"
- );
- Ok(())
- }
-
- let use_musl = !build_remote_server.contains("nomusl");
- let triple = format!(
- "{}-{}",
- self.ssh_platform.arch,
- match self.ssh_platform.os {
- "linux" =>
- if use_musl {
- "unknown-linux-musl"
- } else {
- "unknown-linux-gnu"
- },
- "macos" => "apple-darwin",
- _ => anyhow::bail!("can't cross compile for: {:?}", self.ssh_platform),
- }
- );
- let mut rust_flags = match std::env::var("RUSTFLAGS") {
- Ok(val) => val,
- Err(VarError::NotPresent) => String::new(),
- Err(e) => {
- log::error!("Failed to get env var `RUSTFLAGS` value: {e}");
- String::new()
- }
- };
- if self.ssh_platform.os == "linux" && use_musl {
- rust_flags.push_str(" -C target-feature=+crt-static");
- }
- if build_remote_server.contains("mold") {
- rust_flags.push_str(" -C link-arg=-fuse-ld=mold");
- }
-
- if self.ssh_platform.arch == std::env::consts::ARCH
- && self.ssh_platform.os == std::env::consts::OS
- {
- delegate.set_status(Some("Building remote server binary from source"), cx);
- log::info!("building remote server binary from source");
- run_cmd(
- Command::new("cargo")
- .args([
- "build",
- "--package",
- "remote_server",
- "--features",
- "debug-embed",
- "--target-dir",
- "target/remote_server",
- "--target",
- &triple,
- ])
- .env("RUSTFLAGS", &rust_flags),
- )
- .await?;
- } else {
- if build_remote_server.contains("cross") {
- #[cfg(target_os = "windows")]
- use util::paths::SanitizedPath;
-
- delegate.set_status(Some("Installing cross.rs for cross-compilation"), cx);
- log::info!("installing cross");
- run_cmd(Command::new("cargo").args([
- "install",
- "cross",
- "--git",
- "https://github.com/cross-rs/cross",
- ]))
- .await?;
-
- delegate.set_status(
- Some(&format!(
- "Building remote server binary from source for {} with Docker",
- &triple
- )),
- cx,
- );
- log::info!("building remote server binary from source for {}", &triple);
-
- // On Windows, the binding needs to be set to the canonical path
- #[cfg(target_os = "windows")]
- let src =
- SanitizedPath::from(smol::fs::canonicalize("./target").await?).to_glob_string();
- #[cfg(not(target_os = "windows"))]
- let src = "./target";
- run_cmd(
- Command::new("cross")
- .args([
- "build",
- "--package",
- "remote_server",
- "--features",
- "debug-embed",
- "--target-dir",
- "target/remote_server",
- "--target",
- &triple,
- ])
- .env(
- "CROSS_CONTAINER_OPTS",
- format!("--mount type=bind,src={src},dst=/app/target"),
- )
- .env("RUSTFLAGS", &rust_flags),
- )
- .await?;
- } else {
- let which = cx
- .background_spawn(async move { which::which("zig") })
- .await;
-
- if which.is_err() {
- #[cfg(not(target_os = "windows"))]
- {
- anyhow::bail!(
- "zig not found on $PATH, install zig (see https://ziglang.org/learn/getting-started or use zigup) or pass ZED_BUILD_REMOTE_SERVER=cross to use cross"
- )
- }
- #[cfg(target_os = "windows")]
- {
- anyhow::bail!(
- "zig not found on $PATH, install zig (use `winget install -e --id zig.zig` or see https://ziglang.org/learn/getting-started or use zigup) or pass ZED_BUILD_REMOTE_SERVER=cross to use cross"
- )
- }
- }
-
- delegate.set_status(Some("Adding rustup target for cross-compilation"), cx);
- log::info!("adding rustup target");
- run_cmd(Command::new("rustup").args(["target", "add"]).arg(&triple)).await?;
-
- delegate.set_status(Some("Installing cargo-zigbuild for cross-compilation"), cx);
- log::info!("installing cargo-zigbuild");
- run_cmd(Command::new("cargo").args(["install", "--locked", "cargo-zigbuild"]))
- .await?;
-
- delegate.set_status(
- Some(&format!(
- "Building remote binary from source for {triple} with Zig"
- )),
- cx,
- );
- log::info!("building remote binary from source for {triple} with Zig");
- run_cmd(
- Command::new("cargo")
- .args([
- "zigbuild",
- "--package",
- "remote_server",
- "--features",
- "debug-embed",
- "--target-dir",
- "target/remote_server",
- "--target",
- &triple,
- ])
- .env("RUSTFLAGS", &rust_flags),
- )
- .await?;
- }
- };
- let bin_path = Path::new("target")
- .join("remote_server")
- .join(&triple)
- .join("debug")
- .join("remote_server");
-
- let path = if !build_remote_server.contains("nocompress") {
- delegate.set_status(Some("Compressing binary"), cx);
-
- #[cfg(not(target_os = "windows"))]
- {
- run_cmd(Command::new("gzip").args(["-f", &bin_path.to_string_lossy()])).await?;
- }
- #[cfg(target_os = "windows")]
- {
- // On Windows, we use 7z to compress the binary
- let seven_zip = which::which("7z.exe").context("7z.exe not found on $PATH, install it (e.g. with `winget install -e --id 7zip.7zip`) or, if you don't want this behaviour, set $env:ZED_BUILD_REMOTE_SERVER=\"nocompress\"")?;
- let gz_path = format!("target/remote_server/{}/debug/remote_server.gz", triple);
- if smol::fs::metadata(&gz_path).await.is_ok() {
- smol::fs::remove_file(&gz_path).await?;
- }
- run_cmd(Command::new(seven_zip).args([
- "a",
- "-tgzip",
- &gz_path,
- &bin_path.to_string_lossy(),
- ]))
- .await?;
- }
-
- let mut archive_path = bin_path;
- archive_path.set_extension("gz");
- std::env::current_dir()?.join(archive_path)
- } else {
- bin_path
- };
-
- Ok(path)
- }
-}
-
-type ResponseChannels = Mutex<HashMap<MessageId, oneshot::Sender<(Envelope, oneshot::Sender<()>)>>>;
-
-pub struct ChannelClient {
- next_message_id: AtomicU32,
- outgoing_tx: Mutex<mpsc::UnboundedSender<Envelope>>,
- buffer: Mutex<VecDeque<Envelope>>,
- response_channels: ResponseChannels,
- message_handlers: Mutex<ProtoMessageHandlerSet>,
- max_received: AtomicU32,
- name: &'static str,
- task: Mutex<Task<Result<()>>>,
-}
-
-impl ChannelClient {
- pub fn new(
- incoming_rx: mpsc::UnboundedReceiver<Envelope>,
- outgoing_tx: mpsc::UnboundedSender<Envelope>,
- cx: &App,
- name: &'static str,
- ) -> Arc<Self> {
- Arc::new_cyclic(|this| Self {
- outgoing_tx: Mutex::new(outgoing_tx),
- next_message_id: AtomicU32::new(0),
- max_received: AtomicU32::new(0),
- response_channels: ResponseChannels::default(),
- message_handlers: Default::default(),
- buffer: Mutex::new(VecDeque::new()),
- name,
- task: Mutex::new(Self::start_handling_messages(
- this.clone(),
- incoming_rx,
- &cx.to_async(),
- )),
- })
- }
-
- fn start_handling_messages(
- this: Weak<Self>,
- mut incoming_rx: mpsc::UnboundedReceiver<Envelope>,
- cx: &AsyncApp,
- ) -> Task<Result<()>> {
- cx.spawn(async move |cx| {
- let peer_id = PeerId { owner_id: 0, id: 0 };
- while let Some(incoming) = incoming_rx.next().await {
- let Some(this) = this.upgrade() else {
- return anyhow::Ok(());
- };
- if let Some(ack_id) = incoming.ack_id {
- let mut buffer = this.buffer.lock();
- while buffer.front().is_some_and(|msg| msg.id <= ack_id) {
- buffer.pop_front();
- }
- }
- if let Some(proto::envelope::Payload::FlushBufferedMessages(_)) = &incoming.payload
- {
- log::debug!(
- "{}:ssh message received. name:FlushBufferedMessages",
- this.name
- );
- {
- let buffer = this.buffer.lock();
- for envelope in buffer.iter() {
- this.outgoing_tx
- .lock()
- .unbounded_send(envelope.clone())
- .ok();
- }
- }
- let mut envelope = proto::Ack {}.into_envelope(0, Some(incoming.id), None);
- envelope.id = this.next_message_id.fetch_add(1, SeqCst);
- this.outgoing_tx.lock().unbounded_send(envelope).ok();
- continue;
- }
-
- this.max_received.store(incoming.id, SeqCst);
-
- if let Some(request_id) = incoming.responding_to {
- let request_id = MessageId(request_id);
- let sender = this.response_channels.lock().remove(&request_id);
- if let Some(sender) = sender {
- let (tx, rx) = oneshot::channel();
- if incoming.payload.is_some() {
- sender.send((incoming, tx)).ok();
- }
- rx.await.ok();
- }
- } else if let Some(envelope) =
- build_typed_envelope(peer_id, Instant::now(), incoming)
- {
- let type_name = envelope.payload_type_name();
- if let Some(future) = ProtoMessageHandlerSet::handle_message(
- &this.message_handlers,
- envelope,
- this.clone().into(),
- cx.clone(),
- ) {
- log::debug!("{}:ssh message received. name:{type_name}", this.name);
- cx.foreground_executor()
- .spawn(async move {
- match future.await {
- Ok(_) => {
- log::debug!(
- "{}:ssh message handled. name:{type_name}",
- this.name
- );
- }
- Err(error) => {
- log::error!(
- "{}:error handling message. type:{}, error:{}",
- this.name,
- type_name,
- format!("{error:#}").lines().fold(
- String::new(),
- |mut message, line| {
- if !message.is_empty() {
- message.push(' ');
- }
- message.push_str(line);
- message
- }
- )
- );
- }
- }
- })
- .detach()
- } else {
- log::error!("{}:unhandled ssh message name:{type_name}", this.name);
- }
- }
- }
- anyhow::Ok(())
- })
- }
-
- pub fn reconnect(
- self: &Arc<Self>,
- incoming_rx: UnboundedReceiver<Envelope>,
- outgoing_tx: UnboundedSender<Envelope>,
- cx: &AsyncApp,
- ) {
- *self.outgoing_tx.lock() = outgoing_tx;
- *self.task.lock() = Self::start_handling_messages(Arc::downgrade(self), incoming_rx, cx);
- }
-
- pub fn subscribe_to_entity<E: 'static>(&self, remote_id: u64, entity: &Entity<E>) {
- let id = (TypeId::of::<E>(), remote_id);
-
- let mut message_handlers = self.message_handlers.lock();
- if message_handlers
- .entities_by_type_and_remote_id
- .contains_key(&id)
- {
- panic!("already subscribed to entity");
- }
-
- message_handlers.entities_by_type_and_remote_id.insert(
- id,
- EntityMessageSubscriber::Entity {
- handle: entity.downgrade().into(),
- },
- );
- }
-
- pub fn request<T: RequestMessage>(
- &self,
- payload: T,
- ) -> impl 'static + Future<Output = Result<T::Response>> {
- self.request_internal(payload, true)
- }
-
- fn request_internal<T: RequestMessage>(
- &self,
- payload: T,
- use_buffer: bool,
- ) -> impl 'static + Future<Output = Result<T::Response>> {
- log::debug!("ssh request start. name:{}", T::NAME);
- let response =
- self.request_dynamic(payload.into_envelope(0, None, None), T::NAME, use_buffer);
- async move {
- let response = response.await?;
- log::debug!("ssh request finish. name:{}", T::NAME);
- T::Response::from_envelope(response).context("received a response of the wrong type")
- }
- }
-
- pub async fn resync(&self, timeout: Duration) -> Result<()> {
- smol::future::or(
- async {
- self.request_internal(proto::FlushBufferedMessages {}, false)
- .await?;
-
- for envelope in self.buffer.lock().iter() {
- self.outgoing_tx
- .lock()
- .unbounded_send(envelope.clone())
- .ok();
- }
- Ok(())
- },
- async {
- smol::Timer::after(timeout).await;
- anyhow::bail!("Timed out resyncing remote client")
- },
- )
- .await
- }
-
- pub async fn ping(&self, timeout: Duration) -> Result<()> {
- smol::future::or(
- async {
- self.request(proto::Ping {}).await?;
- Ok(())
- },
- async {
- smol::Timer::after(timeout).await;
- anyhow::bail!("Timed out pinging remote client")
- },
- )
- .await
- }
-
- pub fn send<T: EnvelopedMessage>(&self, payload: T) -> Result<()> {
- log::debug!("ssh send name:{}", T::NAME);
- self.send_dynamic(payload.into_envelope(0, None, None))
- }
-
- fn request_dynamic(
- &self,
- mut envelope: proto::Envelope,
- type_name: &'static str,
- use_buffer: bool,
- ) -> impl 'static + Future<Output = Result<proto::Envelope>> {
- envelope.id = self.next_message_id.fetch_add(1, SeqCst);
- let (tx, rx) = oneshot::channel();
- let mut response_channels_lock = self.response_channels.lock();
- response_channels_lock.insert(MessageId(envelope.id), tx);
- drop(response_channels_lock);
-
- let result = if use_buffer {
- self.send_buffered(envelope)
- } else {
- self.send_unbuffered(envelope)
- };
- async move {
- if let Err(error) = &result {
- log::error!("failed to send message: {error}");
- anyhow::bail!("failed to send message: {error}");
- }
-
- let response = rx.await.context("connection lost")?.0;
- if let Some(proto::envelope::Payload::Error(error)) = &response.payload {
- return Err(RpcError::from_proto(error, type_name));
- }
- Ok(response)
- }
- }
-
- pub fn send_dynamic(&self, mut envelope: proto::Envelope) -> Result<()> {
- envelope.id = self.next_message_id.fetch_add(1, SeqCst);
- self.send_buffered(envelope)
- }
-
- fn send_buffered(&self, mut envelope: proto::Envelope) -> Result<()> {
- envelope.ack_id = Some(self.max_received.load(SeqCst));
- self.buffer.lock().push_back(envelope.clone());
- // ignore errors on send (happen while we're reconnecting)
- // assume that the global "disconnected" overlay is sufficient.
- self.outgoing_tx.lock().unbounded_send(envelope).ok();
- Ok(())
- }
-
- fn send_unbuffered(&self, mut envelope: proto::Envelope) -> Result<()> {
- envelope.ack_id = Some(self.max_received.load(SeqCst));
- self.outgoing_tx.lock().unbounded_send(envelope).ok();
- Ok(())
- }
-}
-
-impl ProtoClient for ChannelClient {
- fn request(
- &self,
- envelope: proto::Envelope,
- request_type: &'static str,
- ) -> BoxFuture<'static, Result<proto::Envelope>> {
- self.request_dynamic(envelope, request_type, true).boxed()
- }
-
- fn send(&self, envelope: proto::Envelope, _message_type: &'static str) -> Result<()> {
- self.send_dynamic(envelope)
- }
-
- fn send_response(&self, envelope: Envelope, _message_type: &'static str) -> anyhow::Result<()> {
- self.send_dynamic(envelope)
- }
-
- fn message_handler_set(&self) -> &Mutex<ProtoMessageHandlerSet> {
- &self.message_handlers
- }
-
- fn is_via_collab(&self) -> bool {
- false
- }
-}
-
-#[cfg(any(test, feature = "test-support"))]
-mod fake {
- use std::{path::PathBuf, sync::Arc};
-
- use anyhow::Result;
- use async_trait::async_trait;
- use futures::{
- FutureExt, SinkExt, StreamExt,
- channel::{
- mpsc::{self, Sender},
- oneshot,
- },
- select_biased,
- };
- use gpui::{App, AppContext as _, AsyncApp, SemanticVersion, Task, TestAppContext};
- use release_channel::ReleaseChannel;
- use rpc::proto::Envelope;
- use util::paths::{PathStyle, RemotePathBuf};
-
- use super::{
- ChannelClient, RemoteConnection, SshArgs, SshClientDelegate, SshConnectionOptions,
- SshPlatform,
- };
-
- pub(super) struct FakeRemoteConnection {
- pub(super) connection_options: SshConnectionOptions,
- pub(super) server_channel: Arc<ChannelClient>,
- pub(super) server_cx: SendableCx,
- }
-
- pub(super) struct SendableCx(AsyncApp);
- impl SendableCx {
- // SAFETY: When run in test mode, GPUI is always single threaded.
- pub(super) fn new(cx: &TestAppContext) -> Self {
- Self(cx.to_async())
- }
-
- // SAFETY: Enforce that we're on the main thread by requiring a valid AsyncApp
- fn get(&self, _: &AsyncApp) -> AsyncApp {
- self.0.clone()
- }
- }
-
- // SAFETY: There is no way to access a SendableCx from a different thread, see [`SendableCx::new`] and [`SendableCx::get`]
- unsafe impl Send for SendableCx {}
- unsafe impl Sync for SendableCx {}
-
- #[async_trait(?Send)]
- impl RemoteConnection for FakeRemoteConnection {
- async fn kill(&self) -> Result<()> {
- Ok(())
- }
-
- fn has_been_killed(&self) -> bool {
- false
- }
-
- fn ssh_args(&self) -> SshArgs {
- SshArgs {
- arguments: Vec::new(),
- envs: None,
- }
- }
-
- fn upload_directory(
- &self,
- _src_path: PathBuf,
- _dest_path: RemotePathBuf,
- _cx: &App,
- ) -> Task<Result<()>> {
- unreachable!()
- }
-
- fn connection_options(&self) -> SshConnectionOptions {
- self.connection_options.clone()
- }
-
- fn simulate_disconnect(&self, cx: &AsyncApp) {
- let (outgoing_tx, _) = mpsc::unbounded::<Envelope>();
- let (_, incoming_rx) = mpsc::unbounded::<Envelope>();
- self.server_channel
- .reconnect(incoming_rx, outgoing_tx, &self.server_cx.get(cx));
- }
-
- fn start_proxy(
- &self,
- _unique_identifier: String,
- _reconnect: bool,
- mut client_incoming_tx: mpsc::UnboundedSender<Envelope>,
- mut client_outgoing_rx: mpsc::UnboundedReceiver<Envelope>,
- mut connection_activity_tx: Sender<()>,
- _delegate: Arc<dyn SshClientDelegate>,
- cx: &mut AsyncApp,
- ) -> Task<Result<i32>> {
- let (mut server_incoming_tx, server_incoming_rx) = mpsc::unbounded::<Envelope>();
- let (server_outgoing_tx, mut server_outgoing_rx) = mpsc::unbounded::<Envelope>();
-
- self.server_channel.reconnect(
- server_incoming_rx,
- server_outgoing_tx,
- &self.server_cx.get(cx),
- );
-
- cx.background_spawn(async move {
- loop {
- select_biased! {
- server_to_client = server_outgoing_rx.next().fuse() => {
- let Some(server_to_client) = server_to_client else {
- return Ok(1)
- };
- connection_activity_tx.try_send(()).ok();
- client_incoming_tx.send(server_to_client).await.ok();
- }
- client_to_server = client_outgoing_rx.next().fuse() => {
- let Some(client_to_server) = client_to_server else {
- return Ok(1)
- };
- server_incoming_tx.send(client_to_server).await.ok();
- }
- }
- }
- })
- }
-
- fn path_style(&self) -> PathStyle {
- PathStyle::current()
- }
- }
-
- pub(super) struct Delegate;
-
- impl SshClientDelegate for Delegate {
- fn ask_password(&self, _: String, _: oneshot::Sender<String>, _: &mut AsyncApp) {
- unreachable!()
- }
-
- fn download_server_binary_locally(
- &self,
- _: SshPlatform,
- _: ReleaseChannel,
- _: Option<SemanticVersion>,
- _: &mut AsyncApp,
- ) -> Task<Result<PathBuf>> {
- unreachable!()
- }
-
- fn get_download_params(
- &self,
- _platform: SshPlatform,
- _release_channel: ReleaseChannel,
- _version: Option<SemanticVersion>,
- _cx: &mut AsyncApp,
- ) -> Task<Result<Option<(String, String)>>> {
- unreachable!()
- }
-
- fn set_status(&self, _: Option<&str>, _: &mut AsyncApp) {}
- }
-}
@@ -0,0 +1,336 @@
+use crate::{
+ json_log::LogRecord,
+ protocol::{MESSAGE_LEN_SIZE, message_len_from_buffer, read_message_with_len, write_message},
+};
+use anyhow::{Context as _, Result};
+use futures::{
+ AsyncReadExt as _, FutureExt as _, StreamExt as _,
+ channel::mpsc::{Sender, UnboundedReceiver, UnboundedSender},
+};
+use gpui::{AppContext as _, AsyncApp, Task};
+use rpc::proto::Envelope;
+use smol::process::Child;
+
+pub mod ssh;
+pub mod wsl;
+
+fn handle_rpc_messages_over_child_process_stdio(
+ mut ssh_proxy_process: Child,
+ incoming_tx: UnboundedSender<Envelope>,
+ mut outgoing_rx: UnboundedReceiver<Envelope>,
+ mut connection_activity_tx: Sender<()>,
+ cx: &AsyncApp,
+) -> Task<Result<i32>> {
+ let mut child_stderr = ssh_proxy_process.stderr.take().unwrap();
+ let mut child_stdout = ssh_proxy_process.stdout.take().unwrap();
+ let mut child_stdin = ssh_proxy_process.stdin.take().unwrap();
+
+ let mut stdin_buffer = Vec::new();
+ let mut stdout_buffer = Vec::new();
+ let mut stderr_buffer = Vec::new();
+ let mut stderr_offset = 0;
+
+ let stdin_task = cx.background_spawn(async move {
+ while let Some(outgoing) = outgoing_rx.next().await {
+ write_message(&mut child_stdin, &mut stdin_buffer, outgoing).await?;
+ }
+ anyhow::Ok(())
+ });
+
+ let stdout_task = cx.background_spawn({
+ let mut connection_activity_tx = connection_activity_tx.clone();
+ async move {
+ loop {
+ stdout_buffer.resize(MESSAGE_LEN_SIZE, 0);
+ let len = child_stdout.read(&mut stdout_buffer).await?;
+
+ if len == 0 {
+ return anyhow::Ok(());
+ }
+
+ if len < MESSAGE_LEN_SIZE {
+ child_stdout.read_exact(&mut stdout_buffer[len..]).await?;
+ }
+
+ let message_len = message_len_from_buffer(&stdout_buffer);
+ let envelope =
+ read_message_with_len(&mut child_stdout, &mut stdout_buffer, message_len)
+ .await?;
+ connection_activity_tx.try_send(()).ok();
+ incoming_tx.unbounded_send(envelope).ok();
+ }
+ }
+ });
+
+ let stderr_task: Task<anyhow::Result<()>> = cx.background_spawn(async move {
+ loop {
+ stderr_buffer.resize(stderr_offset + 1024, 0);
+
+ let len = child_stderr
+ .read(&mut stderr_buffer[stderr_offset..])
+ .await?;
+ if len == 0 {
+ return anyhow::Ok(());
+ }
+
+ stderr_offset += len;
+ let mut start_ix = 0;
+ while let Some(ix) = stderr_buffer[start_ix..stderr_offset]
+ .iter()
+ .position(|b| b == &b'\n')
+ {
+ let line_ix = start_ix + ix;
+ let content = &stderr_buffer[start_ix..line_ix];
+ start_ix = line_ix + 1;
+ if let Ok(record) = serde_json::from_slice::<LogRecord>(content) {
+ record.log(log::logger())
+ } else {
+ eprintln!("(remote) {}", String::from_utf8_lossy(content));
+ }
+ }
+ stderr_buffer.drain(0..start_ix);
+ stderr_offset -= start_ix;
+
+ connection_activity_tx.try_send(()).ok();
+ }
+ });
+
+ cx.background_spawn(async move {
+ let result = futures::select! {
+ result = stdin_task.fuse() => {
+ result.context("stdin")
+ }
+ result = stdout_task.fuse() => {
+ result.context("stdout")
+ }
+ result = stderr_task.fuse() => {
+ result.context("stderr")
+ }
+ };
+
+ let status = ssh_proxy_process.status().await?.code().unwrap_or(1);
+ match result {
+ Ok(_) => Ok(status),
+ Err(error) => Err(error),
+ }
+ })
+}
+
+#[cfg(debug_assertions)]
+async fn build_remote_server_from_source(
+ platform: &crate::RemotePlatform,
+ delegate: &dyn crate::RemoteClientDelegate,
+ cx: &mut AsyncApp,
+) -> Result<Option<std::path::PathBuf>> {
+ use std::path::Path;
+
+ let Some(build_remote_server) = std::env::var("ZED_BUILD_REMOTE_SERVER").ok() else {
+ return Ok(None);
+ };
+
+ use smol::process::{Command, Stdio};
+ use std::env::VarError;
+
+ async fn run_cmd(command: &mut Command) -> Result<()> {
+ let output = command
+ .kill_on_drop(true)
+ .stderr(Stdio::inherit())
+ .output()
+ .await?;
+ anyhow::ensure!(
+ output.status.success(),
+ "Failed to run command: {command:?}"
+ );
+ Ok(())
+ }
+
+ let use_musl = !build_remote_server.contains("nomusl");
+ let triple = format!(
+ "{}-{}",
+ platform.arch,
+ match platform.os {
+ "linux" =>
+ if use_musl {
+ "unknown-linux-musl"
+ } else {
+ "unknown-linux-gnu"
+ },
+ "macos" => "apple-darwin",
+ _ => anyhow::bail!("can't cross compile for: {:?}", platform),
+ }
+ );
+ let mut rust_flags = match std::env::var("RUSTFLAGS") {
+ Ok(val) => val,
+ Err(VarError::NotPresent) => String::new(),
+ Err(e) => {
+ log::error!("Failed to get env var `RUSTFLAGS` value: {e}");
+ String::new()
+ }
+ };
+ if platform.os == "linux" && use_musl {
+ rust_flags.push_str(" -C target-feature=+crt-static");
+ }
+ if build_remote_server.contains("mold") {
+ rust_flags.push_str(" -C link-arg=-fuse-ld=mold");
+ }
+
+ if platform.arch == std::env::consts::ARCH && platform.os == std::env::consts::OS {
+ delegate.set_status(Some("Building remote server binary from source"), cx);
+ log::info!("building remote server binary from source");
+ run_cmd(
+ Command::new("cargo")
+ .args([
+ "build",
+ "--package",
+ "remote_server",
+ "--features",
+ "debug-embed",
+ "--target-dir",
+ "target/remote_server",
+ "--target",
+ &triple,
+ ])
+ .env("RUSTFLAGS", &rust_flags),
+ )
+ .await?;
+ } else if build_remote_server.contains("cross") {
+ #[cfg(target_os = "windows")]
+ use util::paths::SanitizedPath;
+
+ delegate.set_status(Some("Installing cross.rs for cross-compilation"), cx);
+ log::info!("installing cross");
+ run_cmd(Command::new("cargo").args([
+ "install",
+ "cross",
+ "--git",
+ "https://github.com/cross-rs/cross",
+ ]))
+ .await?;
+
+ delegate.set_status(
+ Some(&format!(
+ "Building remote server binary from source for {} with Docker",
+ &triple
+ )),
+ cx,
+ );
+ log::info!("building remote server binary from source for {}", &triple);
+
+ // On Windows, the binding needs to be set to the canonical path
+ #[cfg(target_os = "windows")]
+ let src = SanitizedPath::new(&smol::fs::canonicalize("./target").await?).to_glob_string();
+ #[cfg(not(target_os = "windows"))]
+ let src = "./target";
+
+ run_cmd(
+ Command::new("cross")
+ .args([
+ "build",
+ "--package",
+ "remote_server",
+ "--features",
+ "debug-embed",
+ "--target-dir",
+ "target/remote_server",
+ "--target",
+ &triple,
+ ])
+ .env(
+ "CROSS_CONTAINER_OPTS",
+ format!("--mount type=bind,src={src},dst=/app/target"),
+ )
+ .env("RUSTFLAGS", &rust_flags),
+ )
+ .await?;
+ } else {
+ let which = cx
+ .background_spawn(async move { which::which("zig") })
+ .await;
+
+ if which.is_err() {
+ #[cfg(not(target_os = "windows"))]
+ {
+ anyhow::bail!(
+ "zig not found on $PATH, install zig (see https://ziglang.org/learn/getting-started or use zigup) or pass ZED_BUILD_REMOTE_SERVER=cross to use cross"
+ )
+ }
+ #[cfg(target_os = "windows")]
+ {
+ anyhow::bail!(
+ "zig not found on $PATH, install zig (use `winget install -e --id zig.zig` or see https://ziglang.org/learn/getting-started or use zigup) or pass ZED_BUILD_REMOTE_SERVER=cross to use cross"
+ )
+ }
+ }
+
+ delegate.set_status(Some("Adding rustup target for cross-compilation"), cx);
+ log::info!("adding rustup target");
+ run_cmd(Command::new("rustup").args(["target", "add"]).arg(&triple)).await?;
+
+ delegate.set_status(Some("Installing cargo-zigbuild for cross-compilation"), cx);
+ log::info!("installing cargo-zigbuild");
+ run_cmd(Command::new("cargo").args(["install", "--locked", "cargo-zigbuild"])).await?;
+
+ delegate.set_status(
+ Some(&format!(
+ "Building remote binary from source for {triple} with Zig"
+ )),
+ cx,
+ );
+ log::info!("building remote binary from source for {triple} with Zig");
+ run_cmd(
+ Command::new("cargo")
+ .args([
+ "zigbuild",
+ "--package",
+ "remote_server",
+ "--features",
+ "debug-embed",
+ "--target-dir",
+ "target/remote_server",
+ "--target",
+ &triple,
+ ])
+ .env("RUSTFLAGS", &rust_flags),
+ )
+ .await?;
+ };
+ let bin_path = Path::new("target")
+ .join("remote_server")
+ .join(&triple)
+ .join("debug")
+ .join("remote_server");
+
+ let path = if !build_remote_server.contains("nocompress") {
+ delegate.set_status(Some("Compressing binary"), cx);
+
+ #[cfg(not(target_os = "windows"))]
+ {
+ run_cmd(Command::new("gzip").args(["-f", &bin_path.to_string_lossy()])).await?;
+ }
+
+ #[cfg(target_os = "windows")]
+ {
+ // On Windows, we use 7z to compress the binary
+ let seven_zip = which::which("7z.exe").context("7z.exe not found on $PATH, install it (e.g. with `winget install -e --id 7zip.7zip`) or, if you don't want this behaviour, set $env:ZED_BUILD_REMOTE_SERVER=\"nocompress\"")?;
+ let gz_path = format!("target/remote_server/{}/debug/remote_server.gz", triple);
+ if smol::fs::metadata(&gz_path).await.is_ok() {
+ smol::fs::remove_file(&gz_path).await?;
+ }
+ run_cmd(Command::new(seven_zip).args([
+ "a",
+ "-tgzip",
+ &gz_path,
+ &bin_path.to_string_lossy(),
+ ]))
+ .await?;
+ }
+
+ let mut archive_path = bin_path;
+ archive_path.set_extension("gz");
+ std::env::current_dir()?.join(archive_path)
+ } else {
+ bin_path
+ };
+
+ Ok(Some(path))
+}
@@ -0,0 +1,1039 @@
+use crate::{
+ RemoteClientDelegate, RemotePlatform,
+ remote_client::{CommandTemplate, RemoteConnection, RemoteConnectionOptions},
+};
+use anyhow::{Context as _, Result, anyhow};
+use async_trait::async_trait;
+use collections::HashMap;
+use futures::{
+ AsyncReadExt as _, FutureExt as _,
+ channel::mpsc::{Sender, UnboundedReceiver, UnboundedSender},
+ select_biased,
+};
+use gpui::{App, AppContext as _, AsyncApp, SemanticVersion, Task};
+use itertools::Itertools;
+use parking_lot::Mutex;
+use release_channel::{AppCommitSha, AppVersion, ReleaseChannel};
+use rpc::proto::Envelope;
+use schemars::JsonSchema;
+use serde::{Deserialize, Serialize};
+use smol::{
+ fs,
+ process::{self, Child, Stdio},
+};
+use std::{
+ iter,
+ path::{Path, PathBuf},
+ sync::Arc,
+ time::Instant,
+};
+use tempfile::TempDir;
+use util::paths::{PathStyle, RemotePathBuf};
+
+pub(crate) struct SshRemoteConnection {
+ socket: SshSocket,
+ master_process: Mutex<Option<Child>>,
+ remote_binary_path: Option<RemotePathBuf>,
+ ssh_platform: RemotePlatform,
+ ssh_path_style: PathStyle,
+ ssh_shell: String,
+ _temp_dir: TempDir,
+}
+
+#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)]
+pub struct SshConnectionOptions {
+ pub host: String,
+ pub username: Option<String>,
+ pub port: Option<u16>,
+ pub password: Option<String>,
+ pub args: Option<Vec<String>>,
+ pub port_forwards: Option<Vec<SshPortForwardOption>>,
+
+ pub nickname: Option<String>,
+ pub upload_binary_over_ssh: bool,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash, Deserialize, Serialize, JsonSchema)]
+pub struct SshPortForwardOption {
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub local_host: Option<String>,
+ pub local_port: u16,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub remote_host: Option<String>,
+ pub remote_port: u16,
+}
+
+#[derive(Clone)]
+struct SshSocket {
+ connection_options: SshConnectionOptions,
+ #[cfg(not(target_os = "windows"))]
+ socket_path: PathBuf,
+ envs: HashMap<String, String>,
+}
+
+macro_rules! shell_script {
+ ($fmt:expr, $($name:ident = $arg:expr),+ $(,)?) => {{
+ format!(
+ $fmt,
+ $(
+ $name = shlex::try_quote($arg).unwrap()
+ ),+
+ )
+ }};
+}
+
+#[async_trait(?Send)]
+impl RemoteConnection for SshRemoteConnection {
+ async fn kill(&self) -> Result<()> {
+ let Some(mut process) = self.master_process.lock().take() else {
+ return Ok(());
+ };
+ process.kill().ok();
+ process.status().await?;
+ Ok(())
+ }
+
+ fn has_been_killed(&self) -> bool {
+ self.master_process.lock().is_none()
+ }
+
+ fn connection_options(&self) -> RemoteConnectionOptions {
+ RemoteConnectionOptions::Ssh(self.socket.connection_options.clone())
+ }
+
+ fn shell(&self) -> String {
+ self.ssh_shell.clone()
+ }
+
+ fn build_command(
+ &self,
+ input_program: Option<String>,
+ input_args: &[String],
+ input_env: &HashMap<String, String>,
+ working_dir: Option<String>,
+ port_forward: Option<(u16, String, u16)>,
+ ) -> Result<CommandTemplate> {
+ use std::fmt::Write as _;
+
+ let mut script = String::new();
+ if let Some(working_dir) = working_dir {
+ let working_dir =
+ RemotePathBuf::new(working_dir.into(), self.ssh_path_style).to_string();
+
+ // shlex will wrap the command in single quotes (''), disabling ~ expansion,
+ // replace ith with something that works
+ const TILDE_PREFIX: &'static str = "~/";
+ let working_dir = if working_dir.starts_with(TILDE_PREFIX) {
+ let working_dir = working_dir.trim_start_matches("~").trim_start_matches("/");
+ format!("$HOME/{working_dir}")
+ } else {
+ working_dir
+ };
+ write!(&mut script, "cd \"{working_dir}\"; ",).unwrap();
+ } else {
+ write!(&mut script, "cd; ").unwrap();
+ };
+
+ for (k, v) in input_env.iter() {
+ if let Some((k, v)) = shlex::try_quote(k).ok().zip(shlex::try_quote(v).ok()) {
+ write!(&mut script, "{}={} ", k, v).unwrap();
+ }
+ }
+
+ let shell = &self.ssh_shell;
+
+ if let Some(input_program) = input_program {
+ let command = shlex::try_quote(&input_program)?;
+ script.push_str(&command);
+ for arg in input_args {
+ let arg = shlex::try_quote(&arg)?;
+ script.push_str(" ");
+ script.push_str(&arg);
+ }
+ } else {
+ write!(&mut script, "exec {shell} -l").unwrap();
+ };
+
+ let shell_invocation = format!("{shell} -c {}", shlex::try_quote(&script).unwrap());
+
+ let mut args = Vec::new();
+ args.extend(self.socket.ssh_args());
+
+ if let Some((local_port, host, remote_port)) = port_forward {
+ args.push("-L".into());
+ args.push(format!("{local_port}:{host}:{remote_port}"));
+ }
+
+ args.push("-t".into());
+ args.push(shell_invocation);
+ Ok(CommandTemplate {
+ program: "ssh".into(),
+ args,
+ env: self.socket.envs.clone(),
+ })
+ }
+
+ fn upload_directory(
+ &self,
+ src_path: PathBuf,
+ dest_path: RemotePathBuf,
+ cx: &App,
+ ) -> Task<Result<()>> {
+ let mut command = util::command::new_smol_command("scp");
+ let output = self
+ .socket
+ .ssh_options(&mut command)
+ .args(
+ self.socket
+ .connection_options
+ .port
+ .map(|port| vec!["-P".to_string(), port.to_string()])
+ .unwrap_or_default(),
+ )
+ .arg("-C")
+ .arg("-r")
+ .arg(&src_path)
+ .arg(format!(
+ "{}:{}",
+ self.socket.connection_options.scp_url(),
+ dest_path
+ ))
+ .output();
+
+ cx.background_spawn(async move {
+ let output = output.await?;
+
+ anyhow::ensure!(
+ output.status.success(),
+ "failed to upload directory {} -> {}: {}",
+ src_path.display(),
+ dest_path.to_string(),
+ String::from_utf8_lossy(&output.stderr)
+ );
+
+ Ok(())
+ })
+ }
+
+ fn start_proxy(
+ &self,
+ unique_identifier: String,
+ reconnect: bool,
+ incoming_tx: UnboundedSender<Envelope>,
+ outgoing_rx: UnboundedReceiver<Envelope>,
+ connection_activity_tx: Sender<()>,
+ delegate: Arc<dyn RemoteClientDelegate>,
+ cx: &mut AsyncApp,
+ ) -> Task<Result<i32>> {
+ delegate.set_status(Some("Starting proxy"), cx);
+
+ let Some(remote_binary_path) = self.remote_binary_path.clone() else {
+ return Task::ready(Err(anyhow!("Remote binary path not set")));
+ };
+
+ let mut start_proxy_command = shell_script!(
+ "exec {binary_path} proxy --identifier {identifier}",
+ binary_path = &remote_binary_path.to_string(),
+ identifier = &unique_identifier,
+ );
+
+ for env_var in ["RUST_LOG", "RUST_BACKTRACE", "ZED_GENERATE_MINIDUMPS"] {
+ if let Some(value) = std::env::var(env_var).ok() {
+ start_proxy_command = format!(
+ "{}={} {} ",
+ env_var,
+ shlex::try_quote(&value).unwrap(),
+ start_proxy_command,
+ );
+ }
+ }
+
+ if reconnect {
+ start_proxy_command.push_str(" --reconnect");
+ }
+
+ let ssh_proxy_process = match self
+ .socket
+ .ssh_command("sh", &["-c", &start_proxy_command])
+ // IMPORTANT: we kill this process when we drop the task that uses it.
+ .kill_on_drop(true)
+ .spawn()
+ {
+ Ok(process) => process,
+ Err(error) => {
+ return Task::ready(Err(anyhow!("failed to spawn remote server: {}", error)));
+ }
+ };
+
+ super::handle_rpc_messages_over_child_process_stdio(
+ ssh_proxy_process,
+ incoming_tx,
+ outgoing_rx,
+ connection_activity_tx,
+ cx,
+ )
+ }
+
+ fn path_style(&self) -> PathStyle {
+ self.ssh_path_style
+ }
+}
+
+impl SshRemoteConnection {
+ pub(crate) async fn new(
+ connection_options: SshConnectionOptions,
+ delegate: Arc<dyn RemoteClientDelegate>,
+ cx: &mut AsyncApp,
+ ) -> Result<Self> {
+ use askpass::AskPassResult;
+
+ delegate.set_status(Some("Connecting"), cx);
+
+ let url = connection_options.ssh_url();
+
+ let temp_dir = tempfile::Builder::new()
+ .prefix("zed-ssh-session")
+ .tempdir()?;
+ let askpass_delegate = askpass::AskPassDelegate::new(cx, {
+ let delegate = delegate.clone();
+ move |prompt, tx, cx| delegate.ask_password(prompt, tx, cx)
+ });
+
+ let mut askpass =
+ askpass::AskPassSession::new(cx.background_executor(), askpass_delegate).await?;
+
+ // Start the master SSH process, which does not do anything except for establish
+ // the connection and keep it open, allowing other ssh commands to reuse it
+ // via a control socket.
+ #[cfg(not(target_os = "windows"))]
+ let socket_path = temp_dir.path().join("ssh.sock");
+
+ let mut master_process = {
+ #[cfg(not(target_os = "windows"))]
+ let args = [
+ "-N",
+ "-o",
+ "ControlPersist=no",
+ "-o",
+ "ControlMaster=yes",
+ "-o",
+ ];
+ // On Windows, `ControlMaster` and `ControlPath` are not supported:
+ // https://github.com/PowerShell/Win32-OpenSSH/issues/405
+ // https://github.com/PowerShell/Win32-OpenSSH/wiki/Project-Scope
+ #[cfg(target_os = "windows")]
+ let args = ["-N"];
+ let mut master_process = util::command::new_smol_command("ssh");
+ master_process
+ .kill_on_drop(true)
+ .stdin(Stdio::null())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .env("SSH_ASKPASS_REQUIRE", "force")
+ .env("SSH_ASKPASS", askpass.script_path())
+ .args(connection_options.additional_args())
+ .args(args);
+ #[cfg(not(target_os = "windows"))]
+ master_process.arg(format!("ControlPath={}", socket_path.display()));
+ master_process.arg(&url).spawn()?
+ };
+ // Wait for this ssh process to close its stdout, indicating that authentication
+ // has completed.
+ let mut stdout = master_process.stdout.take().unwrap();
+ let mut output = Vec::new();
+
+ let result = select_biased! {
+ result = askpass.run().fuse() => {
+ match result {
+ AskPassResult::CancelledByUser => {
+ master_process.kill().ok();
+ anyhow::bail!("SSH connection canceled")
+ }
+ AskPassResult::Timedout => {
+ anyhow::bail!("connecting to host timed out")
+ }
+ }
+ }
+ _ = stdout.read_to_end(&mut output).fuse() => {
+ anyhow::Ok(())
+ }
+ };
+
+ if let Err(e) = result {
+ return Err(e.context("Failed to connect to host"));
+ }
+
+ if master_process.try_status()?.is_some() {
+ output.clear();
+ let mut stderr = master_process.stderr.take().unwrap();
+ stderr.read_to_end(&mut output).await?;
+
+ let error_message = format!(
+ "failed to connect: {}",
+ String::from_utf8_lossy(&output).trim()
+ );
+ anyhow::bail!(error_message);
+ }
+
+ #[cfg(not(target_os = "windows"))]
+ let socket = SshSocket::new(connection_options, socket_path)?;
+ #[cfg(target_os = "windows")]
+ let socket = SshSocket::new(connection_options, &temp_dir, askpass.get_password())?;
+ drop(askpass);
+
+ let ssh_platform = socket.platform().await?;
+ let ssh_path_style = match ssh_platform.os {
+ "windows" => PathStyle::Windows,
+ _ => PathStyle::Posix,
+ };
+ let ssh_shell = socket.shell().await;
+
+ let mut this = Self {
+ socket,
+ master_process: Mutex::new(Some(master_process)),
+ _temp_dir: temp_dir,
+ remote_binary_path: None,
+ ssh_path_style,
+ ssh_platform,
+ ssh_shell,
+ };
+
+ let (release_channel, version, commit) = cx.update(|cx| {
+ (
+ ReleaseChannel::global(cx),
+ AppVersion::global(cx),
+ AppCommitSha::try_global(cx),
+ )
+ })?;
+ this.remote_binary_path = Some(
+ this.ensure_server_binary(&delegate, release_channel, version, commit, cx)
+ .await?,
+ );
+
+ Ok(this)
+ }
+
+ async fn ensure_server_binary(
+ &self,
+ delegate: &Arc<dyn RemoteClientDelegate>,
+ release_channel: ReleaseChannel,
+ version: SemanticVersion,
+ commit: Option<AppCommitSha>,
+ cx: &mut AsyncApp,
+ ) -> Result<RemotePathBuf> {
+ let version_str = match release_channel {
+ ReleaseChannel::Nightly => {
+ let commit = commit.map(|s| s.full()).unwrap_or_default();
+ format!("{}-{}", version, commit)
+ }
+ ReleaseChannel::Dev => "build".to_string(),
+ _ => version.to_string(),
+ };
+ let binary_name = format!(
+ "zed-remote-server-{}-{}",
+ release_channel.dev_name(),
+ version_str
+ );
+ let dst_path = RemotePathBuf::new(
+ paths::remote_server_dir_relative().join(binary_name),
+ self.ssh_path_style,
+ );
+
+ #[cfg(debug_assertions)]
+ if let Some(remote_server_path) =
+ super::build_remote_server_from_source(&self.ssh_platform, delegate.as_ref(), cx)
+ .await?
+ {
+ let tmp_path = RemotePathBuf::new(
+ paths::remote_server_dir_relative().join(format!(
+ "download-{}-{}",
+ std::process::id(),
+ remote_server_path.file_name().unwrap().to_string_lossy()
+ )),
+ self.ssh_path_style,
+ );
+ self.upload_local_server_binary(&remote_server_path, &tmp_path, delegate, cx)
+ .await?;
+ self.extract_server_binary(&dst_path, &tmp_path, delegate, cx)
+ .await?;
+ return Ok(dst_path);
+ }
+
+ if self
+ .socket
+ .run_command(&dst_path.to_string(), &["version"])
+ .await
+ .is_ok()
+ {
+ return Ok(dst_path);
+ }
+
+ let wanted_version = cx.update(|cx| match release_channel {
+ ReleaseChannel::Nightly => Ok(None),
+ ReleaseChannel::Dev => {
+ anyhow::bail!(
+ "ZED_BUILD_REMOTE_SERVER is not set and no remote server exists at ({:?})",
+ dst_path
+ )
+ }
+ _ => Ok(Some(AppVersion::global(cx))),
+ })??;
+
+ let tmp_path_gz = RemotePathBuf::new(
+ PathBuf::from(format!("{}-download-{}.gz", dst_path, std::process::id())),
+ self.ssh_path_style,
+ );
+ if !self.socket.connection_options.upload_binary_over_ssh
+ && let Some((url, body)) = delegate
+ .get_download_params(self.ssh_platform, release_channel, wanted_version, cx)
+ .await?
+ {
+ match self
+ .download_binary_on_server(&url, &body, &tmp_path_gz, delegate, cx)
+ .await
+ {
+ Ok(_) => {
+ self.extract_server_binary(&dst_path, &tmp_path_gz, delegate, cx)
+ .await?;
+ return Ok(dst_path);
+ }
+ Err(e) => {
+ log::error!(
+ "Failed to download binary on server, attempting to upload server: {}",
+ e
+ )
+ }
+ }
+ }
+
+ let src_path = delegate
+ .download_server_binary_locally(self.ssh_platform, release_channel, wanted_version, cx)
+ .await?;
+ self.upload_local_server_binary(&src_path, &tmp_path_gz, delegate, cx)
+ .await?;
+ self.extract_server_binary(&dst_path, &tmp_path_gz, delegate, cx)
+ .await?;
+ Ok(dst_path)
+ }
+
+ async fn download_binary_on_server(
+ &self,
+ url: &str,
+ body: &str,
+ tmp_path_gz: &RemotePathBuf,
+ delegate: &Arc<dyn RemoteClientDelegate>,
+ cx: &mut AsyncApp,
+ ) -> Result<()> {
+ if let Some(parent) = tmp_path_gz.parent() {
+ self.socket
+ .run_command(
+ "sh",
+ &[
+ "-c",
+ &shell_script!("mkdir -p {parent}", parent = parent.to_string().as_ref()),
+ ],
+ )
+ .await?;
+ }
+
+ delegate.set_status(Some("Downloading remote development server on host"), cx);
+
+ match self
+ .socket
+ .run_command(
+ "curl",
+ &[
+ "-f",
+ "-L",
+ "-X",
+ "GET",
+ "-H",
+ "Content-Type: application/json",
+ "-d",
+ body,
+ url,
+ "-o",
+ &tmp_path_gz.to_string(),
+ ],
+ )
+ .await
+ {
+ Ok(_) => {}
+ Err(e) => {
+ if self.socket.run_command("which", &["curl"]).await.is_ok() {
+ return Err(e);
+ }
+
+ match self
+ .socket
+ .run_command(
+ "wget",
+ &[
+ "--method=GET",
+ "--header=Content-Type: application/json",
+ "--body-data",
+ body,
+ url,
+ "-O",
+ &tmp_path_gz.to_string(),
+ ],
+ )
+ .await
+ {
+ Ok(_) => {}
+ Err(e) => {
+ if self.socket.run_command("which", &["wget"]).await.is_ok() {
+ return Err(e);
+ } else {
+ anyhow::bail!("Neither curl nor wget is available");
+ }
+ }
+ }
+ }
+ }
+
+ Ok(())
+ }
+
+ async fn upload_local_server_binary(
+ &self,
+ src_path: &Path,
+ tmp_path_gz: &RemotePathBuf,
+ delegate: &Arc<dyn RemoteClientDelegate>,
+ cx: &mut AsyncApp,
+ ) -> Result<()> {
+ if let Some(parent) = tmp_path_gz.parent() {
+ self.socket
+ .run_command(
+ "sh",
+ &[
+ "-c",
+ &shell_script!("mkdir -p {parent}", parent = parent.to_string().as_ref()),
+ ],
+ )
+ .await?;
+ }
+
+ let src_stat = fs::metadata(&src_path).await?;
+ let size = src_stat.len();
+
+ let t0 = Instant::now();
+ delegate.set_status(Some("Uploading remote development server"), cx);
+ log::info!(
+ "uploading remote development server to {:?} ({}kb)",
+ tmp_path_gz,
+ size / 1024
+ );
+ self.upload_file(src_path, tmp_path_gz)
+ .await
+ .context("failed to upload server binary")?;
+ log::info!("uploaded remote development server in {:?}", t0.elapsed());
+ Ok(())
+ }
+
+ async fn extract_server_binary(
+ &self,
+ dst_path: &RemotePathBuf,
+ tmp_path: &RemotePathBuf,
+ delegate: &Arc<dyn RemoteClientDelegate>,
+ cx: &mut AsyncApp,
+ ) -> Result<()> {
+ delegate.set_status(Some("Extracting remote development server"), cx);
+ let server_mode = 0o755;
+
+ let orig_tmp_path = tmp_path.to_string();
+ let script = if let Some(tmp_path) = orig_tmp_path.strip_suffix(".gz") {
+ shell_script!(
+ "gunzip -f {orig_tmp_path} && chmod {server_mode} {tmp_path} && mv {tmp_path} {dst_path}",
+ server_mode = &format!("{:o}", server_mode),
+ dst_path = &dst_path.to_string(),
+ )
+ } else {
+ shell_script!(
+ "chmod {server_mode} {orig_tmp_path} && mv {orig_tmp_path} {dst_path}",
+ server_mode = &format!("{:o}", server_mode),
+ dst_path = &dst_path.to_string()
+ )
+ };
+ self.socket.run_command("sh", &["-c", &script]).await?;
+ Ok(())
+ }
+
+ async fn upload_file(&self, src_path: &Path, dest_path: &RemotePathBuf) -> Result<()> {
+ log::debug!("uploading file {:?} to {:?}", src_path, dest_path);
+ let mut command = util::command::new_smol_command("scp");
+ let output = self
+ .socket
+ .ssh_options(&mut command)
+ .args(
+ self.socket
+ .connection_options
+ .port
+ .map(|port| vec!["-P".to_string(), port.to_string()])
+ .unwrap_or_default(),
+ )
+ .arg(src_path)
+ .arg(format!(
+ "{}:{}",
+ self.socket.connection_options.scp_url(),
+ dest_path
+ ))
+ .output()
+ .await?;
+
+ anyhow::ensure!(
+ output.status.success(),
+ "failed to upload file {} -> {}: {}",
+ src_path.display(),
+ dest_path.to_string(),
+ String::from_utf8_lossy(&output.stderr)
+ );
+ Ok(())
+ }
+}
+
+impl SshSocket {
+ #[cfg(not(target_os = "windows"))]
+ fn new(options: SshConnectionOptions, socket_path: PathBuf) -> Result<Self> {
+ Ok(Self {
+ connection_options: options,
+ envs: HashMap::default(),
+ socket_path,
+ })
+ }
+
+ #[cfg(target_os = "windows")]
+ fn new(options: SshConnectionOptions, temp_dir: &TempDir, secret: String) -> Result<Self> {
+ let askpass_script = temp_dir.path().join("askpass.bat");
+ std::fs::write(&askpass_script, "@ECHO OFF\necho %ZED_SSH_ASKPASS%")?;
+ let mut envs = HashMap::default();
+ envs.insert("SSH_ASKPASS_REQUIRE".into(), "force".into());
+ envs.insert("SSH_ASKPASS".into(), askpass_script.display().to_string());
+ envs.insert("ZED_SSH_ASKPASS".into(), secret);
+ Ok(Self {
+ connection_options: options,
+ envs,
+ })
+ }
+
+ // :WARNING: ssh unquotes arguments when executing on the remote :WARNING:
+ // e.g. $ ssh host sh -c 'ls -l' is equivalent to $ ssh host sh -c ls -l
+ // and passes -l as an argument to sh, not to ls.
+ // Furthermore, some setups (e.g. Coder) will change directory when SSH'ing
+ // into a machine. You must use `cd` to get back to $HOME.
+ // You need to do it like this: $ ssh host "cd; sh -c 'ls -l /tmp'"
+ fn ssh_command(&self, program: &str, args: &[&str]) -> process::Command {
+ let mut command = util::command::new_smol_command("ssh");
+ let to_run = iter::once(&program)
+ .chain(args.iter())
+ .map(|token| {
+ // We're trying to work with: sh, bash, zsh, fish, tcsh, ...?
+ debug_assert!(
+ !token.contains('\n'),
+ "multiline arguments do not work in all shells"
+ );
+ shlex::try_quote(token).unwrap()
+ })
+ .join(" ");
+ let to_run = format!("cd; {to_run}");
+ log::debug!("ssh {} {:?}", self.connection_options.ssh_url(), to_run);
+ self.ssh_options(&mut command)
+ .arg(self.connection_options.ssh_url())
+ .arg(to_run);
+ command
+ }
+
+ async fn run_command(&self, program: &str, args: &[&str]) -> Result<String> {
+ let output = self.ssh_command(program, args).output().await?;
+ anyhow::ensure!(
+ output.status.success(),
+ "failed to run command: {}",
+ String::from_utf8_lossy(&output.stderr)
+ );
+ Ok(String::from_utf8_lossy(&output.stdout).to_string())
+ }
+
+ #[cfg(not(target_os = "windows"))]
+ fn ssh_options<'a>(&self, command: &'a mut process::Command) -> &'a mut process::Command {
+ command
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .args(self.connection_options.additional_args())
+ .args(["-o", "ControlMaster=no", "-o"])
+ .arg(format!("ControlPath={}", self.socket_path.display()))
+ }
+
+ #[cfg(target_os = "windows")]
+ fn ssh_options<'a>(&self, command: &'a mut process::Command) -> &'a mut process::Command {
+ command
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .args(self.connection_options.additional_args())
+ .envs(self.envs.clone())
+ }
+
+ // On Windows, we need to use `SSH_ASKPASS` to provide the password to ssh.
+ // On Linux, we use the `ControlPath` option to create a socket file that ssh can use to
+ #[cfg(not(target_os = "windows"))]
+ fn ssh_args(&self) -> Vec<String> {
+ let mut arguments = self.connection_options.additional_args();
+ arguments.extend(vec![
+ "-o".to_string(),
+ "ControlMaster=no".to_string(),
+ "-o".to_string(),
+ format!("ControlPath={}", self.socket_path.display()),
+ self.connection_options.ssh_url(),
+ ]);
+ arguments
+ }
+
+ #[cfg(target_os = "windows")]
+ fn ssh_args(&self) -> Vec<String> {
+ let mut arguments = self.connection_options.additional_args();
+ arguments.push(self.connection_options.ssh_url());
+ arguments
+ }
+
+ async fn platform(&self) -> Result<RemotePlatform> {
+ let uname = self.run_command("sh", &["-c", "uname -sm"]).await?;
+ let Some((os, arch)) = uname.split_once(" ") else {
+ anyhow::bail!("unknown uname: {uname:?}")
+ };
+
+ let os = match os.trim() {
+ "Darwin" => "macos",
+ "Linux" => "linux",
+ _ => anyhow::bail!(
+ "Prebuilt remote servers are not yet available for {os:?}. See https://zed.dev/docs/remote-development"
+ ),
+ };
+ // exclude armv5,6,7 as they are 32-bit.
+ let arch = if arch.starts_with("armv8")
+ || arch.starts_with("armv9")
+ || arch.starts_with("arm64")
+ || arch.starts_with("aarch64")
+ {
+ "aarch64"
+ } else if arch.starts_with("x86") {
+ "x86_64"
+ } else {
+ anyhow::bail!(
+ "Prebuilt remote servers are not yet available for {arch:?}. See https://zed.dev/docs/remote-development"
+ )
+ };
+
+ Ok(RemotePlatform { os, arch })
+ }
+
+ async fn shell(&self) -> String {
+ match self.run_command("sh", &["-c", "echo $SHELL"]).await {
+ Ok(shell) => shell.trim().to_owned(),
+ Err(e) => {
+ log::error!("Failed to get shell: {e}");
+ "sh".to_owned()
+ }
+ }
+ }
+}
+
+fn parse_port_number(port_str: &str) -> Result<u16> {
+ port_str
+ .parse()
+ .with_context(|| format!("parsing port number: {port_str}"))
+}
+
+fn parse_port_forward_spec(spec: &str) -> Result<SshPortForwardOption> {
+ let parts: Vec<&str> = spec.split(':').collect();
+
+ match parts.len() {
+ 4 => {
+ let local_port = parse_port_number(parts[1])?;
+ let remote_port = parse_port_number(parts[3])?;
+
+ Ok(SshPortForwardOption {
+ local_host: Some(parts[0].to_string()),
+ local_port,
+ remote_host: Some(parts[2].to_string()),
+ remote_port,
+ })
+ }
+ 3 => {
+ let local_port = parse_port_number(parts[0])?;
+ let remote_port = parse_port_number(parts[2])?;
+
+ Ok(SshPortForwardOption {
+ local_host: None,
+ local_port,
+ remote_host: Some(parts[1].to_string()),
+ remote_port,
+ })
+ }
+ _ => anyhow::bail!("Invalid port forward format"),
+ }
+}
+
+impl SshConnectionOptions {
+ pub fn parse_command_line(input: &str) -> Result<Self> {
+ let input = input.trim_start_matches("ssh ");
+ let mut hostname: Option<String> = None;
+ let mut username: Option<String> = None;
+ let mut port: Option<u16> = None;
+ let mut args = Vec::new();
+ let mut port_forwards: Vec<SshPortForwardOption> = Vec::new();
+
+ // disallowed: -E, -e, -F, -f, -G, -g, -M, -N, -n, -O, -q, -S, -s, -T, -t, -V, -v, -W
+ const ALLOWED_OPTS: &[&str] = &[
+ "-4", "-6", "-A", "-a", "-C", "-K", "-k", "-X", "-x", "-Y", "-y",
+ ];
+ const ALLOWED_ARGS: &[&str] = &[
+ "-B", "-b", "-c", "-D", "-F", "-I", "-i", "-J", "-l", "-m", "-o", "-P", "-p", "-R",
+ "-w",
+ ];
+
+ let mut tokens = shlex::split(input).context("invalid input")?.into_iter();
+
+ 'outer: while let Some(arg) = tokens.next() {
+ if ALLOWED_OPTS.contains(&(&arg as &str)) {
+ args.push(arg.to_string());
+ continue;
+ }
+ if arg == "-p" {
+ port = tokens.next().and_then(|arg| arg.parse().ok());
+ continue;
+ } else if let Some(p) = arg.strip_prefix("-p") {
+ port = p.parse().ok();
+ continue;
+ }
+ if arg == "-l" {
+ username = tokens.next();
+ continue;
+ } else if let Some(l) = arg.strip_prefix("-l") {
+ username = Some(l.to_string());
+ continue;
+ }
+ if arg == "-L" || arg.starts_with("-L") {
+ let forward_spec = if arg == "-L" {
+ tokens.next()
+ } else {
+ Some(arg.strip_prefix("-L").unwrap().to_string())
+ };
+
+ if let Some(spec) = forward_spec {
+ port_forwards.push(parse_port_forward_spec(&spec)?);
+ } else {
+ anyhow::bail!("Missing port forward format");
+ }
+ }
+
+ for a in ALLOWED_ARGS {
+ if arg == *a {
+ args.push(arg);
+ if let Some(next) = tokens.next() {
+ args.push(next);
+ }
+ continue 'outer;
+ } else if arg.starts_with(a) {
+ args.push(arg);
+ continue 'outer;
+ }
+ }
+ if arg.starts_with("-") || hostname.is_some() {
+ anyhow::bail!("unsupported argument: {:?}", arg);
+ }
+ let mut input = &arg as &str;
+ // Destination might be: username1@username2@ip2@ip1
+ if let Some((u, rest)) = input.rsplit_once('@') {
+ input = rest;
+ username = Some(u.to_string());
+ }
+ if let Some((rest, p)) = input.split_once(':') {
+ input = rest;
+ port = p.parse().ok()
+ }
+ hostname = Some(input.to_string())
+ }
+
+ let Some(hostname) = hostname else {
+ anyhow::bail!("missing hostname");
+ };
+
+ let port_forwards = match port_forwards.len() {
+ 0 => None,
+ _ => Some(port_forwards),
+ };
+
+ Ok(Self {
+ host: hostname,
+ username,
+ port,
+ port_forwards,
+ args: Some(args),
+ password: None,
+ nickname: None,
+ upload_binary_over_ssh: false,
+ })
+ }
+
+ pub fn ssh_url(&self) -> String {
+ let mut result = String::from("ssh://");
+ if let Some(username) = &self.username {
+ // Username might be: username1@username2@ip2
+ let username = urlencoding::encode(username);
+ result.push_str(&username);
+ result.push('@');
+ }
+ result.push_str(&self.host);
+ if let Some(port) = self.port {
+ result.push(':');
+ result.push_str(&port.to_string());
+ }
+ result
+ }
+
+ pub fn additional_args(&self) -> Vec<String> {
+ let mut args = self.args.iter().flatten().cloned().collect::<Vec<String>>();
+
+ if let Some(forwards) = &self.port_forwards {
+ args.extend(forwards.iter().map(|pf| {
+ let local_host = match &pf.local_host {
+ Some(host) => host,
+ None => "localhost",
+ };
+ let remote_host = match &pf.remote_host {
+ Some(host) => host,
+ None => "localhost",
+ };
+
+ format!(
+ "-L{}:{}:{}:{}",
+ local_host, pf.local_port, remote_host, pf.remote_port
+ )
+ }));
+ }
+
+ args
+ }
+
+ fn scp_url(&self) -> String {
+ if let Some(username) = &self.username {
+ format!("{}@{}", username, self.host)
+ } else {
+ self.host.clone()
+ }
+ }
+
+ pub fn connection_string(&self) -> String {
+ let host = if let Some(username) = &self.username {
+ format!("{}@{}", username, self.host)
+ } else {
+ self.host.clone()
+ };
+ if let Some(port) = &self.port {
+ format!("{}:{}", host, port)
+ } else {
+ host
+ }
+ }
+}
@@ -0,0 +1,491 @@
+use crate::{
+ RemoteClientDelegate, RemotePlatform,
+ remote_client::{CommandTemplate, RemoteConnection, RemoteConnectionOptions},
+};
+use anyhow::{Result, anyhow, bail};
+use async_trait::async_trait;
+use collections::HashMap;
+use futures::channel::mpsc::{Sender, UnboundedReceiver, UnboundedSender};
+use gpui::{App, AppContext as _, AsyncApp, SemanticVersion, Task};
+use release_channel::{AppCommitSha, AppVersion, ReleaseChannel};
+use rpc::proto::Envelope;
+use smol::{fs, process};
+use std::{
+ fmt::Write as _,
+ path::{Path, PathBuf},
+ process::Stdio,
+ sync::Arc,
+ time::Instant,
+};
+use util::paths::{PathStyle, RemotePathBuf};
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WslConnectionOptions {
+ pub distro_name: String,
+ pub user: Option<String>,
+}
+
+pub(crate) struct WslRemoteConnection {
+ remote_binary_path: Option<RemotePathBuf>,
+ platform: RemotePlatform,
+ shell: String,
+ connection_options: WslConnectionOptions,
+}
+
+impl WslRemoteConnection {
+ pub(crate) async fn new(
+ connection_options: WslConnectionOptions,
+ delegate: Arc<dyn RemoteClientDelegate>,
+ cx: &mut AsyncApp,
+ ) -> Result<Self> {
+ log::info!(
+ "Connecting to WSL distro {} with user {:?}",
+ connection_options.distro_name,
+ connection_options.user
+ );
+ let (release_channel, version, commit) = cx.update(|cx| {
+ (
+ ReleaseChannel::global(cx),
+ AppVersion::global(cx),
+ AppCommitSha::try_global(cx),
+ )
+ })?;
+
+ let mut this = Self {
+ connection_options,
+ remote_binary_path: None,
+ platform: RemotePlatform { os: "", arch: "" },
+ shell: String::new(),
+ };
+ delegate.set_status(Some("Detecting WSL environment"), cx);
+ this.platform = this.detect_platform().await?;
+ this.shell = this.detect_shell().await?;
+ this.remote_binary_path = Some(
+ this.ensure_server_binary(&delegate, release_channel, version, commit, cx)
+ .await?,
+ );
+
+ Ok(this)
+ }
+
+ async fn detect_platform(&self) -> Result<RemotePlatform> {
+ let arch_str = self.run_wsl_command("uname", &["-m"]).await?;
+ let arch_str = arch_str.trim().to_string();
+ let arch = match arch_str.as_str() {
+ "x86_64" => "x86_64",
+ "aarch64" | "arm64" => "aarch64",
+ _ => "x86_64",
+ };
+ Ok(RemotePlatform { os: "linux", arch })
+ }
+
+ async fn detect_shell(&self) -> Result<String> {
+ Ok(self
+ .run_wsl_command("sh", &["-c", "echo $SHELL"])
+ .await
+ .ok()
+ .and_then(|shell_path| shell_path.trim().split('/').next_back().map(str::to_string))
+ .unwrap_or_else(|| "bash".to_string()))
+ }
+
+ async fn windows_path_to_wsl_path(&self, source: &Path) -> Result<String> {
+ windows_path_to_wsl_path_impl(&self.connection_options, source).await
+ }
+
+ fn wsl_command(&self, program: &str, args: &[&str]) -> process::Command {
+ wsl_command_impl(&self.connection_options, program, args)
+ }
+
+ async fn run_wsl_command(&self, program: &str, args: &[&str]) -> Result<String> {
+ run_wsl_command_impl(&self.connection_options, program, args).await
+ }
+
+ async fn ensure_server_binary(
+ &self,
+ delegate: &Arc<dyn RemoteClientDelegate>,
+ release_channel: ReleaseChannel,
+ version: SemanticVersion,
+ commit: Option<AppCommitSha>,
+ cx: &mut AsyncApp,
+ ) -> Result<RemotePathBuf> {
+ let version_str = match release_channel {
+ ReleaseChannel::Nightly => {
+ let commit = commit.map(|s| s.full()).unwrap_or_default();
+ format!("{}-{}", version, commit)
+ }
+ ReleaseChannel::Dev => "build".to_string(),
+ _ => version.to_string(),
+ };
+
+ let binary_name = format!(
+ "zed-remote-server-{}-{}",
+ release_channel.dev_name(),
+ version_str
+ );
+
+ let dst_path = RemotePathBuf::new(
+ paths::remote_wsl_server_dir_relative().join(binary_name),
+ PathStyle::Posix,
+ );
+
+ if let Some(parent) = dst_path.parent() {
+ self.run_wsl_command("mkdir", &["-p", &parent.to_string()])
+ .await
+ .map_err(|e| anyhow!("Failed to create directory: {}", e))?;
+ }
+
+ #[cfg(debug_assertions)]
+ if let Some(remote_server_path) =
+ super::build_remote_server_from_source(&self.platform, delegate.as_ref(), cx).await?
+ {
+ let tmp_path = RemotePathBuf::new(
+ paths::remote_wsl_server_dir_relative().join(format!(
+ "download-{}-{}",
+ std::process::id(),
+ remote_server_path.file_name().unwrap().to_string_lossy()
+ )),
+ PathStyle::Posix,
+ );
+ self.upload_file(&remote_server_path, &tmp_path, delegate, cx)
+ .await?;
+ self.extract_and_install(&tmp_path, &dst_path, delegate, cx)
+ .await?;
+ return Ok(dst_path);
+ }
+
+ if self
+ .run_wsl_command(&dst_path.to_string(), &["version"])
+ .await
+ .is_ok()
+ {
+ return Ok(dst_path);
+ }
+
+ delegate.set_status(Some("Installing remote server"), cx);
+
+ let wanted_version = match release_channel {
+ ReleaseChannel::Nightly | ReleaseChannel::Dev => None,
+ _ => Some(cx.update(|cx| AppVersion::global(cx))?),
+ };
+
+ let src_path = delegate
+ .download_server_binary_locally(self.platform, release_channel, wanted_version, cx)
+ .await?;
+
+ let tmp_path = RemotePathBuf::new(
+ PathBuf::from(format!("{}.{}.gz", dst_path, std::process::id())),
+ PathStyle::Posix,
+ );
+
+ self.upload_file(&src_path, &tmp_path, delegate, cx).await?;
+ self.extract_and_install(&tmp_path, &dst_path, delegate, cx)
+ .await?;
+
+ Ok(dst_path)
+ }
+
+ async fn upload_file(
+ &self,
+ src_path: &Path,
+ dst_path: &RemotePathBuf,
+ delegate: &Arc<dyn RemoteClientDelegate>,
+ cx: &mut AsyncApp,
+ ) -> Result<()> {
+ delegate.set_status(Some("Uploading remote server to WSL"), cx);
+
+ if let Some(parent) = dst_path.parent() {
+ self.run_wsl_command("mkdir", &["-p", &parent.to_string()])
+ .await
+ .map_err(|e| anyhow!("Failed to create directory when uploading file: {}", e))?;
+ }
+
+ let t0 = Instant::now();
+ let src_stat = fs::metadata(&src_path).await?;
+ let size = src_stat.len();
+ log::info!(
+ "uploading remote server to WSL {:?} ({}kb)",
+ dst_path,
+ size / 1024
+ );
+
+ let src_path_in_wsl = self.windows_path_to_wsl_path(src_path).await?;
+ self.run_wsl_command("cp", &["-f", &src_path_in_wsl, &dst_path.to_string()])
+ .await
+ .map_err(|e| {
+ anyhow!(
+ "Failed to copy file {}({}) to WSL {:?}: {}",
+ src_path.display(),
+ src_path_in_wsl,
+ dst_path,
+ e
+ )
+ })?;
+
+ log::info!("uploaded remote server in {:?}", t0.elapsed());
+ Ok(())
+ }
+
+ async fn extract_and_install(
+ &self,
+ tmp_path: &RemotePathBuf,
+ dst_path: &RemotePathBuf,
+ delegate: &Arc<dyn RemoteClientDelegate>,
+ cx: &mut AsyncApp,
+ ) -> Result<()> {
+ delegate.set_status(Some("Extracting remote server"), cx);
+
+ let tmp_path_str = tmp_path.to_string();
+ let dst_path_str = dst_path.to_string();
+
+ // Build extraction script with proper error handling
+ let script = if tmp_path_str.ends_with(".gz") {
+ let uncompressed = tmp_path_str.trim_end_matches(".gz");
+ format!(
+ "set -e; gunzip -f '{}' && chmod 755 '{}' && mv -f '{}' '{}'",
+ tmp_path_str, uncompressed, uncompressed, dst_path_str
+ )
+ } else {
+ format!(
+ "set -e; chmod 755 '{}' && mv -f '{}' '{}'",
+ tmp_path_str, tmp_path_str, dst_path_str
+ )
+ };
+
+ self.run_wsl_command("sh", &["-c", &script])
+ .await
+ .map_err(|e| anyhow!("Failed to extract server binary: {}", e))?;
+ Ok(())
+ }
+}
+
+#[async_trait(?Send)]
+impl RemoteConnection for WslRemoteConnection {
+ fn start_proxy(
+ &self,
+ unique_identifier: String,
+ reconnect: bool,
+ incoming_tx: UnboundedSender<Envelope>,
+ outgoing_rx: UnboundedReceiver<Envelope>,
+ connection_activity_tx: Sender<()>,
+ delegate: Arc<dyn RemoteClientDelegate>,
+ cx: &mut AsyncApp,
+ ) -> Task<Result<i32>> {
+ delegate.set_status(Some("Starting proxy"), cx);
+
+ let Some(remote_binary_path) = &self.remote_binary_path else {
+ return Task::ready(Err(anyhow!("Remote binary path not set")));
+ };
+
+ let mut proxy_command = format!(
+ "exec {} proxy --identifier {}",
+ remote_binary_path, unique_identifier
+ );
+
+ if reconnect {
+ proxy_command.push_str(" --reconnect");
+ }
+
+ for env_var in ["RUST_LOG", "RUST_BACKTRACE", "ZED_GENERATE_MINIDUMPS"] {
+ if let Some(value) = std::env::var(env_var).ok() {
+ proxy_command = format!("{}='{}' {}", env_var, value, proxy_command);
+ }
+ }
+ let proxy_process = match self
+ .wsl_command("sh", &["-lc", &proxy_command])
+ .kill_on_drop(true)
+ .spawn()
+ {
+ Ok(process) => process,
+ Err(error) => {
+ return Task::ready(Err(anyhow!("failed to spawn remote server: {}", error)));
+ }
+ };
+
+ super::handle_rpc_messages_over_child_process_stdio(
+ proxy_process,
+ incoming_tx,
+ outgoing_rx,
+ connection_activity_tx,
+ cx,
+ )
+ }
+
+ fn upload_directory(
+ &self,
+ src_path: PathBuf,
+ dest_path: RemotePathBuf,
+ cx: &App,
+ ) -> Task<Result<()>> {
+ cx.background_spawn({
+ let options = self.connection_options.clone();
+ async move {
+ let wsl_src = windows_path_to_wsl_path_impl(&options, &src_path).await?;
+
+ run_wsl_command_impl(&options, "cp", &["-r", &wsl_src, &dest_path.to_string()])
+ .await
+ .map_err(|e| {
+ anyhow!(
+ "failed to upload directory {} -> {}: {}",
+ src_path.display(),
+ dest_path.to_string(),
+ e
+ )
+ })?;
+
+ Ok(())
+ }
+ })
+ }
+
+ async fn kill(&self) -> Result<()> {
+ Ok(())
+ }
+
+ fn has_been_killed(&self) -> bool {
+ false
+ }
+
+ fn shares_network_interface(&self) -> bool {
+ true
+ }
+
+ fn build_command(
+ &self,
+ program: Option<String>,
+ args: &[String],
+ env: &HashMap<String, String>,
+ working_dir: Option<String>,
+ port_forward: Option<(u16, String, u16)>,
+ ) -> Result<CommandTemplate> {
+ if port_forward.is_some() {
+ bail!("WSL shares the network interface with the host system");
+ }
+
+ let working_dir = working_dir
+ .map(|working_dir| RemotePathBuf::new(working_dir.into(), PathStyle::Posix).to_string())
+ .unwrap_or("~".to_string());
+
+ let mut script = String::new();
+
+ for (k, v) in env.iter() {
+ write!(&mut script, "{}='{}' ", k, v).unwrap();
+ }
+
+ if let Some(program) = program {
+ let command = shlex::try_quote(&program)?;
+ script.push_str(&command);
+ for arg in args {
+ let arg = shlex::try_quote(&arg)?;
+ script.push_str(" ");
+ script.push_str(&arg);
+ }
+ } else {
+ write!(&mut script, "exec {} -l", self.shell).unwrap();
+ }
+
+ let wsl_args = if let Some(user) = &self.connection_options.user {
+ vec![
+ "--distribution".to_string(),
+ self.connection_options.distro_name.clone(),
+ "--user".to_string(),
+ user.clone(),
+ "--cd".to_string(),
+ working_dir,
+ "--".to_string(),
+ self.shell.clone(),
+ "-c".to_string(),
+ shlex::try_quote(&script)?.to_string(),
+ ]
+ } else {
+ vec![
+ "--distribution".to_string(),
+ self.connection_options.distro_name.clone(),
+ "--cd".to_string(),
+ working_dir,
+ "--".to_string(),
+ self.shell.clone(),
+ "-c".to_string(),
+ shlex::try_quote(&script)?.to_string(),
+ ]
+ };
+
+ Ok(CommandTemplate {
+ program: "wsl.exe".to_string(),
+ args: wsl_args,
+ env: HashMap::default(),
+ })
+ }
+
+ fn connection_options(&self) -> RemoteConnectionOptions {
+ RemoteConnectionOptions::Wsl(self.connection_options.clone())
+ }
+
+ fn path_style(&self) -> PathStyle {
+ PathStyle::Posix
+ }
+
+ fn shell(&self) -> String {
+ self.shell.clone()
+ }
+}
+
+/// `wslpath` is a executable available in WSL, it's a linux binary.
+/// So it doesn't support Windows style paths.
+async fn sanitize_path(path: &Path) -> Result<String> {
+ let path = smol::fs::canonicalize(path).await?;
+ let path_str = path.to_string_lossy();
+
+ let sanitized = path_str.strip_prefix(r"\\?\").unwrap_or(&path_str);
+ Ok(sanitized.replace('\\', "/"))
+}
+
+async fn windows_path_to_wsl_path_impl(
+ options: &WslConnectionOptions,
+ source: &Path,
+) -> Result<String> {
+ let source = sanitize_path(source).await?;
+ run_wsl_command_impl(options, "wslpath", &["-u", &source]).await
+}
+
+fn wsl_command_impl(
+ options: &WslConnectionOptions,
+ program: &str,
+ args: &[&str],
+) -> process::Command {
+ let mut command = util::command::new_smol_command("wsl.exe");
+
+ if let Some(user) = &options.user {
+ command.arg("--user").arg(user);
+ }
+
+ command
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .arg("--distribution")
+ .arg(&options.distro_name)
+ .arg("--cd")
+ .arg("~")
+ .arg(program)
+ .args(args);
+
+ command
+}
+
+async fn run_wsl_command_impl(
+ options: &WslConnectionOptions,
+ program: &str,
+ args: &[&str],
+) -> Result<String> {
+ let output = wsl_command_impl(options, program, args).output().await?;
+
+ if !output.status.success() {
+ return Err(anyhow!(
+ "Command '{}' failed: {}",
+ program,
+ String::from_utf8_lossy(&output.stderr).trim()
+ ));
+ }
+
+ Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
+}
@@ -65,6 +65,7 @@ telemetry_events.workspace = true
util.workspace = true
watch.workspace = true
worktree.workspace = true
+thiserror.workspace = true
[target.'cfg(not(windows))'.dependencies]
crashes.workspace = true
@@ -1,5 +1,6 @@
use ::proto::{FromProto, ToProto};
use anyhow::{Context as _, Result, anyhow};
+use lsp::LanguageServerId;
use extension::ExtensionHostProxy;
use extension_host::headless_host::HeadlessExtensionStore;
@@ -11,18 +12,19 @@ use node_runtime::NodeRuntime;
use project::{
LspStore, LspStoreEvent, ManifestTree, PrettierStore, ProjectEnvironment, ProjectPath,
ToolchainStore, WorktreeId,
+ agent_server_store::AgentServerStore,
buffer_store::{BufferStore, BufferStoreEvent},
debugger::{breakpoint_store::BreakpointStore, dap_store::DapStore},
git_store::GitStore,
+ lsp_store::log_store::{self, GlobalLogStore, LanguageServerKind},
project_settings::SettingsObserver,
search::SearchQuery,
task_store::TaskStore,
worktree_store::WorktreeStore,
};
-use remote::ssh_session::ChannelClient;
use rpc::{
AnyProtoClient, TypedEnvelope,
- proto::{self, SSH_PEER_ID, SSH_PROJECT_ID},
+ proto::{self, REMOTE_SERVER_PEER_ID, REMOTE_SERVER_PROJECT_ID},
};
use settings::initial_server_settings_content;
@@ -31,6 +33,7 @@ use std::{
path::{Path, PathBuf},
sync::{Arc, atomic::AtomicUsize},
};
+use sysinfo::System;
use util::ResultExt;
use worktree::Worktree;
@@ -42,15 +45,19 @@ pub struct HeadlessProject {
pub lsp_store: Entity<LspStore>,
pub task_store: Entity<TaskStore>,
pub dap_store: Entity<DapStore>,
+ pub agent_server_store: Entity<AgentServerStore>,
pub settings_observer: Entity<SettingsObserver>,
pub next_entry_id: Arc<AtomicUsize>,
pub languages: Arc<LanguageRegistry>,
pub extensions: Entity<HeadlessExtensionStore>,
pub git_store: Entity<GitStore>,
+ // Used mostly to keep alive the toolchain store for RPC handlers.
+ // Local variant is used within LSP store, but that's a separate entity.
+ pub _toolchain_store: Entity<ToolchainStore>,
}
pub struct HeadlessAppState {
- pub session: Arc<ChannelClient>,
+ pub session: AnyProtoClient,
pub fs: Arc<dyn Fs>,
pub http_client: Arc<dyn HttpClient>,
pub node_runtime: NodeRuntime,
@@ -63,6 +70,7 @@ impl HeadlessProject {
settings::init(cx);
language::init(cx);
project::Project::init_settings(cx);
+ log_store::init(true, cx);
}
pub fn new(
@@ -81,7 +89,7 @@ impl HeadlessProject {
let worktree_store = cx.new(|cx| {
let mut store = WorktreeStore::local(true, fs.clone());
- store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
+ store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
store
});
@@ -99,7 +107,7 @@ impl HeadlessProject {
let buffer_store = cx.new(|cx| {
let mut buffer_store = BufferStore::local(worktree_store.clone(), cx);
- buffer_store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
+ buffer_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
buffer_store
});
@@ -117,7 +125,7 @@ impl HeadlessProject {
breakpoint_store.clone(),
cx,
);
- dap_store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
+ dap_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
dap_store
});
@@ -129,7 +137,7 @@ impl HeadlessProject {
fs.clone(),
cx,
);
- store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
+ store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
store
});
@@ -152,7 +160,7 @@ impl HeadlessProject {
environment.clone(),
cx,
);
- task_store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
+ task_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
task_store
});
let settings_observer = cx.new(|cx| {
@@ -162,7 +170,7 @@ impl HeadlessProject {
task_store.clone(),
cx,
);
- observer.shared(SSH_PROJECT_ID, session.clone().into(), cx);
+ observer.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
observer
});
@@ -176,17 +184,24 @@ impl HeadlessProject {
.as_local_store()
.expect("Toolchain store to be local")
.clone(),
- environment,
+ environment.clone(),
manifest_tree,
languages.clone(),
http_client.clone(),
fs.clone(),
cx,
);
- lsp_store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
+ lsp_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
lsp_store
});
+ let agent_server_store = cx.new(|cx| {
+ let mut agent_server_store =
+ AgentServerStore::local(node_runtime.clone(), fs.clone(), environment, cx);
+ agent_server_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone());
+ agent_server_store
+ });
+
cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach();
language_extension::init(
language_extension::LspAccess::ViaLspStore(lsp_store.clone()),
@@ -194,15 +209,11 @@ impl HeadlessProject {
languages.clone(),
);
- cx.subscribe(
- &buffer_store,
- |_this, _buffer_store, event, cx| match event {
- BufferStoreEvent::BufferAdded(buffer) => {
- cx.subscribe(buffer, Self::on_buffer_event).detach();
- }
- _ => {}
- },
- )
+ cx.subscribe(&buffer_store, |_this, _buffer_store, event, cx| {
+ if let BufferStoreEvent::BufferAdded(buffer) = event {
+ cx.subscribe(buffer, Self::on_buffer_event).detach();
+ }
+ })
.detach();
let extensions = HeadlessExtensionStore::new(
@@ -214,57 +225,60 @@ impl HeadlessProject {
cx,
);
- let client: AnyProtoClient = session.clone().into();
-
// local_machine -> ssh handlers
- session.subscribe_to_entity(SSH_PROJECT_ID, &worktree_store);
- session.subscribe_to_entity(SSH_PROJECT_ID, &buffer_store);
- session.subscribe_to_entity(SSH_PROJECT_ID, &cx.entity());
- session.subscribe_to_entity(SSH_PROJECT_ID, &lsp_store);
- session.subscribe_to_entity(SSH_PROJECT_ID, &task_store);
- session.subscribe_to_entity(SSH_PROJECT_ID, &toolchain_store);
- session.subscribe_to_entity(SSH_PROJECT_ID, &dap_store);
- session.subscribe_to_entity(SSH_PROJECT_ID, &settings_observer);
- session.subscribe_to_entity(SSH_PROJECT_ID, &git_store);
-
- client.add_request_handler(cx.weak_entity(), Self::handle_list_remote_directory);
- client.add_request_handler(cx.weak_entity(), Self::handle_get_path_metadata);
- client.add_request_handler(cx.weak_entity(), Self::handle_shutdown_remote_server);
- client.add_request_handler(cx.weak_entity(), Self::handle_ping);
-
- client.add_entity_request_handler(Self::handle_add_worktree);
- client.add_request_handler(cx.weak_entity(), Self::handle_remove_worktree);
-
- client.add_entity_request_handler(Self::handle_open_buffer_by_path);
- client.add_entity_request_handler(Self::handle_open_new_buffer);
- client.add_entity_request_handler(Self::handle_find_search_candidates);
- client.add_entity_request_handler(Self::handle_open_server_settings);
-
- client.add_entity_request_handler(BufferStore::handle_update_buffer);
- client.add_entity_message_handler(BufferStore::handle_close_buffer);
-
- client.add_request_handler(
- extensions.clone().downgrade(),
+ session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &worktree_store);
+ session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &buffer_store);
+ session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &cx.entity());
+ session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &lsp_store);
+ session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &task_store);
+ session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &toolchain_store);
+ session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &dap_store);
+ session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &settings_observer);
+ session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &git_store);
+ session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &agent_server_store);
+
+ session.add_request_handler(cx.weak_entity(), Self::handle_list_remote_directory);
+ session.add_request_handler(cx.weak_entity(), Self::handle_get_path_metadata);
+ session.add_request_handler(cx.weak_entity(), Self::handle_shutdown_remote_server);
+ session.add_request_handler(cx.weak_entity(), Self::handle_ping);
+ session.add_request_handler(cx.weak_entity(), Self::handle_get_processes);
+
+ session.add_entity_request_handler(Self::handle_add_worktree);
+ session.add_request_handler(cx.weak_entity(), Self::handle_remove_worktree);
+
+ session.add_entity_request_handler(Self::handle_open_buffer_by_path);
+ session.add_entity_request_handler(Self::handle_open_new_buffer);
+ session.add_entity_request_handler(Self::handle_find_search_candidates);
+ session.add_entity_request_handler(Self::handle_open_server_settings);
+ session.add_entity_message_handler(Self::handle_toggle_lsp_logs);
+
+ session.add_entity_request_handler(BufferStore::handle_update_buffer);
+ session.add_entity_message_handler(BufferStore::handle_close_buffer);
+
+ session.add_request_handler(
+ extensions.downgrade(),
HeadlessExtensionStore::handle_sync_extensions,
);
- client.add_request_handler(
- extensions.clone().downgrade(),
+ session.add_request_handler(
+ extensions.downgrade(),
HeadlessExtensionStore::handle_install_extension,
);
- BufferStore::init(&client);
- WorktreeStore::init(&client);
- SettingsObserver::init(&client);
- LspStore::init(&client);
- TaskStore::init(Some(&client));
- ToolchainStore::init(&client);
- DapStore::init(&client, cx);
+ BufferStore::init(&session);
+ WorktreeStore::init(&session);
+ SettingsObserver::init(&session);
+ LspStore::init(&session);
+ TaskStore::init(Some(&session));
+ ToolchainStore::init(&session);
+ DapStore::init(&session, cx);
// todo(debugger): Re init breakpoint store when we set it up for collab
// BreakpointStore::init(&client);
- GitStore::init(&client);
+ GitStore::init(&session);
+ AgentServerStore::init_headless(&session);
HeadlessProject {
- session: client,
+ next_entry_id: Default::default(),
+ session,
settings_observer,
fs,
worktree_store,
@@ -272,10 +286,11 @@ impl HeadlessProject {
lsp_store,
task_store,
dap_store,
- next_entry_id: Default::default(),
+ agent_server_store,
languages,
extensions,
git_store,
+ _toolchain_store: toolchain_store,
}
}
@@ -285,28 +300,56 @@ impl HeadlessProject {
event: &BufferEvent,
cx: &mut Context<Self>,
) {
- match event {
- BufferEvent::Operation {
- operation,
- is_local: true,
- } => cx
- .background_spawn(self.session.request(proto::UpdateBuffer {
- project_id: SSH_PROJECT_ID,
- buffer_id: buffer.read(cx).remote_id().to_proto(),
- operations: vec![serialize_operation(operation)],
- }))
- .detach(),
- _ => {}
+ if let BufferEvent::Operation {
+ operation,
+ is_local: true,
+ } = event
+ {
+ cx.background_spawn(self.session.request(proto::UpdateBuffer {
+ project_id: REMOTE_SERVER_PROJECT_ID,
+ buffer_id: buffer.read(cx).remote_id().to_proto(),
+ operations: vec![serialize_operation(operation)],
+ }))
+ .detach()
}
}
fn on_lsp_store_event(
&mut self,
- _lsp_store: Entity<LspStore>,
+ lsp_store: Entity<LspStore>,
event: &LspStoreEvent,
cx: &mut Context<Self>,
) {
match event {
+ LspStoreEvent::LanguageServerAdded(id, name, worktree_id) => {
+ let log_store = cx
+ .try_global::<GlobalLogStore>()
+ .map(|lsp_logs| lsp_logs.0.clone());
+ if let Some(log_store) = log_store {
+ log_store.update(cx, |log_store, cx| {
+ log_store.add_language_server(
+ LanguageServerKind::LocalSsh {
+ lsp_store: self.lsp_store.downgrade(),
+ },
+ *id,
+ Some(name.clone()),
+ *worktree_id,
+ lsp_store.read(cx).language_server_for_id(*id),
+ cx,
+ );
+ });
+ }
+ }
+ LspStoreEvent::LanguageServerRemoved(id) => {
+ let log_store = cx
+ .try_global::<GlobalLogStore>()
+ .map(|lsp_logs| lsp_logs.0.clone());
+ if let Some(log_store) = log_store {
+ log_store.update(cx, |log_store, cx| {
+ log_store.remove_language_server(*id, cx);
+ });
+ }
+ }
LspStoreEvent::LanguageServerUpdate {
language_server_id,
name,
@@ -314,7 +357,7 @@ impl HeadlessProject {
} => {
self.session
.send(proto::UpdateLanguageServer {
- project_id: SSH_PROJECT_ID,
+ project_id: REMOTE_SERVER_PROJECT_ID,
server_name: name.as_ref().map(|name| name.to_string()),
language_server_id: language_server_id.to_proto(),
variant: Some(message.clone()),
@@ -324,25 +367,15 @@ impl HeadlessProject {
LspStoreEvent::Notification(message) => {
self.session
.send(proto::Toast {
- project_id: SSH_PROJECT_ID,
+ project_id: REMOTE_SERVER_PROJECT_ID,
notification_id: "lsp".to_string(),
message: message.clone(),
})
.log_err();
}
- LspStoreEvent::LanguageServerLog(language_server_id, log_type, message) => {
- self.session
- .send(proto::LanguageServerLog {
- project_id: SSH_PROJECT_ID,
- language_server_id: language_server_id.to_proto(),
- message: message.clone(),
- log_type: Some(log_type.to_proto()),
- })
- .log_err();
- }
LspStoreEvent::LanguageServerPrompt(prompt) => {
let request = self.session.request(proto::LanguageServerPromptRequest {
- project_id: SSH_PROJECT_ID,
+ project_id: REMOTE_SERVER_PROJECT_ID,
actions: prompt
.actions
.iter()
@@ -478,7 +511,7 @@ impl HeadlessProject {
let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?;
buffer_store.update(&mut cx, |buffer_store, cx| {
buffer_store
- .create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
+ .create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx)
.detach_and_log_err(cx);
})?;
@@ -496,7 +529,7 @@ impl HeadlessProject {
let buffer_store = this.buffer_store.clone();
let buffer = this
.buffer_store
- .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx));
+ .update(cx, |buffer_store, cx| buffer_store.create_buffer(true, cx));
anyhow::Ok((buffer_store, buffer))
})??;
@@ -504,7 +537,7 @@ impl HeadlessProject {
let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?;
buffer_store.update(&mut cx, |buffer_store, cx| {
buffer_store
- .create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
+ .create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx)
.detach_and_log_err(cx);
})?;
@@ -513,7 +546,33 @@ impl HeadlessProject {
})
}
- pub async fn handle_open_server_settings(
+ async fn handle_toggle_lsp_logs(
+ _: Entity<Self>,
+ envelope: TypedEnvelope<proto::ToggleLspLogs>,
+ mut cx: AsyncApp,
+ ) -> Result<()> {
+ let server_id = LanguageServerId::from_proto(envelope.payload.server_id);
+ let lsp_logs = cx
+ .update(|cx| {
+ cx.try_global::<GlobalLogStore>()
+ .map(|lsp_logs| lsp_logs.0.clone())
+ })?
+ .context("lsp logs store is missing")?;
+
+ lsp_logs.update(&mut cx, |lsp_logs, _| {
+ // RPC logs are very noisy and we need to toggle it on the headless server too.
+ // The rest of the logs for the ssh project are very important to have toggled always,
+ // to e.g. send language server error logs to the client before anything is toggled.
+ if envelope.payload.enabled {
+ lsp_logs.enable_rpc_trace_for_language_server(server_id);
+ } else {
+ lsp_logs.disable_rpc_trace_for_language_server(server_id);
+ }
+ })?;
+ Ok(())
+ }
+
+ async fn handle_open_server_settings(
this: Entity<Self>,
_: TypedEnvelope<proto::OpenServerSettings>,
mut cx: AsyncApp,
@@ -554,7 +613,7 @@ impl HeadlessProject {
buffer_store.update(cx, |buffer_store, cx| {
buffer_store
- .create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
+ .create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx)
.detach_and_log_err(cx);
});
@@ -566,7 +625,7 @@ impl HeadlessProject {
})
}
- pub async fn handle_find_search_candidates(
+ async fn handle_find_search_candidates(
this: Entity<Self>,
envelope: TypedEnvelope<proto::FindSearchCandidates>,
mut cx: AsyncApp,
@@ -590,7 +649,7 @@ impl HeadlessProject {
response.buffer_ids.push(buffer_id.to_proto());
buffer_store
.update(&mut cx, |buffer_store, cx| {
- buffer_store.create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
+ buffer_store.create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx)
})?
.await?;
}
@@ -598,7 +657,7 @@ impl HeadlessProject {
Ok(response)
}
- pub async fn handle_list_remote_directory(
+ async fn handle_list_remote_directory(
this: Entity<Self>,
envelope: TypedEnvelope<proto::ListRemoteDirectory>,
cx: AsyncApp,
@@ -630,7 +689,7 @@ impl HeadlessProject {
})
}
- pub async fn handle_get_path_metadata(
+ async fn handle_get_path_metadata(
this: Entity<Self>,
envelope: TypedEnvelope<proto::GetPathMetadata>,
cx: AsyncApp,
@@ -648,7 +707,7 @@ impl HeadlessProject {
})
}
- pub async fn handle_shutdown_remote_server(
+ async fn handle_shutdown_remote_server(
_this: Entity<Self>,
_envelope: TypedEnvelope<proto::ShutdownRemoteServer>,
cx: AsyncApp,
@@ -674,6 +733,34 @@ impl HeadlessProject {
log::debug!("Received ping from client");
Ok(proto::Ack {})
}
+
+ async fn handle_get_processes(
+ _this: Entity<Self>,
+ _envelope: TypedEnvelope<proto::GetProcesses>,
+ _cx: AsyncApp,
+ ) -> Result<proto::GetProcessesResponse> {
+ let mut processes = Vec::new();
+ let system = System::new_all();
+
+ for (_pid, process) in system.processes() {
+ let name = process.name().to_string_lossy().into_owned();
+ let command = process
+ .cmd()
+ .iter()
+ .map(|s| s.to_string_lossy().to_string())
+ .collect::<Vec<_>>();
+
+ processes.push(proto::ProcessInfo {
+ pid: process.pid().as_u32(),
+ name,
+ command,
+ });
+ }
+
+ processes.sort_by_key(|p| p.name.clone());
+
+ Ok(proto::GetProcessesResponse { processes })
+ }
}
fn prompt_to_proto(
@@ -1,6 +1,7 @@
#![cfg_attr(target_os = "windows", allow(unused, dead_code))]
-use clap::{Parser, Subcommand};
+use clap::Parser;
+use remote_server::Commands;
use std::path::PathBuf;
#[derive(Parser)]
@@ -21,105 +22,34 @@ struct Cli {
printenv: bool,
}
-#[derive(Subcommand)]
-enum Commands {
- Run {
- #[arg(long)]
- log_file: PathBuf,
- #[arg(long)]
- pid_file: PathBuf,
- #[arg(long)]
- stdin_socket: PathBuf,
- #[arg(long)]
- stdout_socket: PathBuf,
- #[arg(long)]
- stderr_socket: PathBuf,
- },
- Proxy {
- #[arg(long)]
- reconnect: bool,
- #[arg(long)]
- identifier: String,
- },
- Version,
-}
-
#[cfg(windows)]
fn main() {
unimplemented!()
}
#[cfg(not(windows))]
-fn main() {
- use release_channel::{RELEASE_CHANNEL, ReleaseChannel};
- use remote::proxy::ProxyLaunchError;
- use remote_server::unix::{execute_proxy, execute_run};
-
+fn main() -> anyhow::Result<()> {
let cli = Cli::parse();
if let Some(socket_path) = &cli.askpass {
askpass::main(socket_path);
- return;
+ return Ok(());
}
if let Some(socket) = &cli.crash_handler {
crashes::crash_server(socket.as_path());
- return;
+ return Ok(());
}
if cli.printenv {
util::shell_env::print_env();
- return;
+ return Ok(());
}
- let result = match cli.command {
- Some(Commands::Run {
- log_file,
- pid_file,
- stdin_socket,
- stdout_socket,
- stderr_socket,
- }) => execute_run(
- log_file,
- pid_file,
- stdin_socket,
- stdout_socket,
- stderr_socket,
- ),
- Some(Commands::Proxy {
- identifier,
- reconnect,
- }) => match execute_proxy(identifier, reconnect) {
- Ok(_) => Ok(()),
- Err(err) => {
- if let Some(err) = err.downcast_ref::<ProxyLaunchError>() {
- std::process::exit(err.to_exit_code());
- }
- Err(err)
- }
- },
- Some(Commands::Version) => {
- let release_channel = *RELEASE_CHANNEL;
- match release_channel {
- ReleaseChannel::Stable | ReleaseChannel::Preview => {
- println!("{}", env!("ZED_PKG_VERSION"))
- }
- ReleaseChannel::Nightly | ReleaseChannel::Dev => {
- println!(
- "{}",
- option_env!("ZED_COMMIT_SHA").unwrap_or(release_channel.dev_name())
- )
- }
- };
- std::process::exit(0);
- }
- None => {
- eprintln!("usage: remote <run|proxy|version>");
- std::process::exit(1);
- }
- };
- if let Err(error) = result {
- log::error!("exiting due to error: {}", error);
+ if let Some(command) = cli.command {
+ remote_server::run(command)
+ } else {
+ eprintln!("usage: remote <run|proxy|version>");
std::process::exit(1);
}
}
@@ -22,7 +22,7 @@ use project::{
Project, ProjectPath,
search::{SearchQuery, SearchResult},
};
-use remote::SshRemoteClient;
+use remote::RemoteClient;
use serde_json::json;
use settings::{Settings, SettingsLocation, SettingsStore, initial_server_settings_content};
use smol::stream::StreamExt;
@@ -280,7 +280,8 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo
AllLanguageSettings::get_global(cx)
.language(None, Some(&"Rust".into()), cx)
.language_servers,
- ["..."] // local settings are ignored
+ ["from-local-settings"],
+ "User language settings should be synchronized with the server settings"
)
});
@@ -300,7 +301,8 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo
AllLanguageSettings::get_global(cx)
.language(None, Some(&"Rust".into()), cx)
.language_servers,
- ["from-server-settings".to_string()]
+ ["from-server-settings".to_string()],
+ "Server language settings should take precedence over the user settings"
)
});
@@ -533,7 +535,7 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext
Ok(Some(lsp::WorkspaceEdit {
changes: Some(
[(
- lsp::Url::from_file_path(path!("/code/project1/src/lib.rs")).unwrap(),
+ lsp::Uri::from_file_path(path!("/code/project1/src/lib.rs")).unwrap(),
vec![lsp::TextEdit::new(
lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(0, 6)),
"two".to_string(),
@@ -1119,7 +1121,7 @@ async fn test_reconnect(cx: &mut TestAppContext, server_cx: &mut TestAppContext)
buffer.edit([(ix..ix + 1, "100")], None, cx);
});
- let client = cx.read(|cx| project.read(cx).ssh_client().unwrap());
+ let client = cx.read(|cx| project.read(cx).remote_client().unwrap());
client
.update(cx, |client, cx| client.simulate_disconnect(cx))
.detach();
@@ -1207,7 +1209,7 @@ async fn test_remote_rename_entry(cx: &mut TestAppContext, server_cx: &mut TestA
})
.await
.unwrap()
- .to_included()
+ .into_included()
.unwrap();
cx.run_until_parked();
@@ -1782,7 +1784,7 @@ pub async fn init_test(
});
init_logger();
- let (opts, ssh_server_client) = SshRemoteClient::fake_server(cx, server_cx);
+ let (opts, ssh_server_client) = RemoteClient::fake_server(cx, server_cx);
let http_client = Arc::new(BlockedHttpClient);
let node_runtime = NodeRuntime::unavailable();
let languages = Arc::new(LanguageRegistry::new(cx.executor()));
@@ -1804,7 +1806,7 @@ pub async fn init_test(
)
});
- let ssh = SshRemoteClient::fake_client(opts, cx).await;
+ let ssh = RemoteClient::fake_client(opts, cx).await;
let project = build_project(ssh, cx);
project
.update(cx, {
@@ -1819,7 +1821,7 @@ fn init_logger() {
zlog::init_test();
}
-fn build_project(ssh: Entity<SshRemoteClient>, cx: &mut TestAppContext) -> Entity<Project> {
+fn build_project(ssh: Entity<RemoteClient>, cx: &mut TestAppContext) -> Entity<Project> {
cx.update(|cx| {
if !cx.has_global::<SettingsStore>() {
let settings_store = SettingsStore::test(cx);
@@ -1845,5 +1847,5 @@ fn build_project(ssh: Entity<SshRemoteClient>, cx: &mut TestAppContext) -> Entit
language::init(cx);
});
- cx.update(|cx| Project::ssh(ssh, client, node, user_store, languages, fs, cx))
+ cx.update(|cx| Project::remote(ssh, client, node, user_store, languages, fs, cx))
}
@@ -6,4 +6,78 @@ pub mod unix;
#[cfg(test)]
mod remote_editing_tests;
+use clap::Subcommand;
+use std::path::PathBuf;
+
pub use headless_project::{HeadlessAppState, HeadlessProject};
+
+#[derive(Subcommand)]
+pub enum Commands {
+ Run {
+ #[arg(long)]
+ log_file: PathBuf,
+ #[arg(long)]
+ pid_file: PathBuf,
+ #[arg(long)]
+ stdin_socket: PathBuf,
+ #[arg(long)]
+ stdout_socket: PathBuf,
+ #[arg(long)]
+ stderr_socket: PathBuf,
+ },
+ Proxy {
+ #[arg(long)]
+ reconnect: bool,
+ #[arg(long)]
+ identifier: String,
+ },
+ Version,
+}
+
+#[cfg(not(windows))]
+pub fn run(command: Commands) -> anyhow::Result<()> {
+ use anyhow::Context;
+ use release_channel::{RELEASE_CHANNEL, ReleaseChannel};
+ use unix::{ExecuteProxyError, execute_proxy, execute_run};
+
+ match command {
+ Commands::Run {
+ log_file,
+ pid_file,
+ stdin_socket,
+ stdout_socket,
+ stderr_socket,
+ } => execute_run(
+ log_file,
+ pid_file,
+ stdin_socket,
+ stdout_socket,
+ stderr_socket,
+ ),
+ Commands::Proxy {
+ identifier,
+ reconnect,
+ } => execute_proxy(identifier, reconnect)
+ .inspect_err(|err| {
+ if let ExecuteProxyError::ServerNotRunning(err) = err {
+ std::process::exit(err.to_exit_code());
+ }
+ })
+ .context("running proxy on the remote server"),
+ Commands::Version => {
+ let release_channel = *RELEASE_CHANNEL;
+ match release_channel {
+ ReleaseChannel::Stable | ReleaseChannel::Preview => {
+ println!("{}", env!("ZED_PKG_VERSION"))
+ }
+ ReleaseChannel::Nightly | ReleaseChannel::Dev => {
+ println!(
+ "{}",
+ option_env!("ZED_COMMIT_SHA").unwrap_or(release_channel.dev_name())
+ )
+ }
+ };
+ Ok(())
+ }
+ }
+}
@@ -19,14 +19,14 @@ use project::project_settings::ProjectSettings;
use proto::CrashReport;
use release_channel::{AppVersion, RELEASE_CHANNEL, ReleaseChannel};
-use remote::proxy::ProxyLaunchError;
-use remote::ssh_session::ChannelClient;
+use remote::RemoteClient;
use remote::{
json_log::LogRecord,
protocol::{read_message, write_message},
+ proxy::ProxyLaunchError,
};
use reqwest_client::ReqwestClient;
-use rpc::proto::{self, Envelope, SSH_PROJECT_ID};
+use rpc::proto::{self, Envelope, REMOTE_SERVER_PROJECT_ID};
use rpc::{AnyProtoClient, TypedEnvelope};
use settings::{Settings, SettingsStore, watch_config_file};
use smol::channel::{Receiver, Sender};
@@ -36,6 +36,7 @@ use smol::Async;
use smol::{net::unix::UnixListener, stream::StreamExt as _};
use std::ffi::OsStr;
use std::ops::ControlFlow;
+use std::process::ExitStatus;
use std::str::FromStr;
use std::sync::LazyLock;
use std::{env, thread};
@@ -46,6 +47,7 @@ use std::{
sync::Arc,
};
use telemetry_events::LocationData;
+use thiserror::Error;
use util::ResultExt;
pub static VERSION: LazyLock<&str> = LazyLock::new(|| match *RELEASE_CHANNEL {
@@ -84,7 +86,7 @@ fn init_logging_server(log_file_path: PathBuf) -> Result<Receiver<Vec<u8>>> {
fn flush(&mut self) -> std::io::Result<()> {
self.channel
.send_blocking(self.buffer.clone())
- .map_err(|error| std::io::Error::new(std::io::ErrorKind::Other, error))?;
+ .map_err(std::io::Error::other)?;
self.buffer.clear();
self.file.flush()
}
@@ -160,7 +162,7 @@ fn init_panic_hook(session_id: String) {
let panic_data = telemetry_events::Panic {
thread: thread_name.into(),
- payload: payload.clone(),
+ payload,
location_data: info.location().map(|location| LocationData {
file: location.file().into(),
line: location.line(),
@@ -199,8 +201,7 @@ fn init_panic_hook(session_id: String) {
}));
}
-fn handle_crash_files_requests(project: &Entity<HeadlessProject>, client: &Arc<ChannelClient>) {
- let client: AnyProtoClient = client.clone().into();
+fn handle_crash_files_requests(project: &Entity<HeadlessProject>, client: &AnyProtoClient) {
client.add_request_handler(
project.downgrade(),
|_, _: TypedEnvelope<proto::GetCrashFiles>, _cx| async move {
@@ -276,7 +277,7 @@ fn start_server(
listeners: ServerListeners,
log_rx: Receiver<Vec<u8>>,
cx: &mut App,
-) -> Arc<ChannelClient> {
+) -> AnyProtoClient {
// This is the server idle timeout. If no connection comes in this timeout, the server will shut down.
const IDLE_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10 * 60);
@@ -334,7 +335,7 @@ fn start_server(
let (mut stdin_msg_tx, mut stdin_msg_rx) = mpsc::unbounded::<Envelope>();
cx.background_spawn(async move {
while let Ok(msg) = read_message(&mut stdin_stream, &mut input_buffer).await {
- if let Err(_) = stdin_msg_tx.send(msg).await {
+ if (stdin_msg_tx.send(msg).await).is_err() {
break;
}
}
@@ -395,7 +396,7 @@ fn start_server(
})
.detach();
- ChannelClient::new(incoming_rx, outgoing_tx, cx, "server")
+ RemoteClient::proto_client_from_channels(incoming_rx, outgoing_tx, cx, "server")
}
fn init_paths() -> anyhow::Result<()> {
@@ -527,7 +528,23 @@ pub fn execute_run(
Ok(())
}
-#[derive(Clone)]
+#[derive(Debug, Error)]
+pub(crate) enum ServerPathError {
+ #[error("Failed to create server_dir `{path}`")]
+ CreateServerDir {
+ #[source]
+ source: std::io::Error,
+ path: PathBuf,
+ },
+ #[error("Failed to create logs_dir `{path}`")]
+ CreateLogsDir {
+ #[source]
+ source: std::io::Error,
+ path: PathBuf,
+ },
+}
+
+#[derive(Clone, Debug)]
struct ServerPaths {
log_file: PathBuf,
pid_file: PathBuf,
@@ -537,10 +554,19 @@ struct ServerPaths {
}
impl ServerPaths {
- fn new(identifier: &str) -> Result<Self> {
+ fn new(identifier: &str) -> Result<Self, ServerPathError> {
let server_dir = paths::remote_server_state_dir().join(identifier);
- std::fs::create_dir_all(&server_dir)?;
- std::fs::create_dir_all(&logs_dir())?;
+ std::fs::create_dir_all(&server_dir).map_err(|source| {
+ ServerPathError::CreateServerDir {
+ source,
+ path: server_dir.clone(),
+ }
+ })?;
+ let log_dir = logs_dir();
+ std::fs::create_dir_all(log_dir).map_err(|source| ServerPathError::CreateLogsDir {
+ source: source,
+ path: log_dir.clone(),
+ })?;
let pid_file = server_dir.join("server.pid");
let stdin_socket = server_dir.join("stdin.sock");
@@ -558,7 +584,43 @@ impl ServerPaths {
}
}
-pub fn execute_proxy(identifier: String, is_reconnecting: bool) -> Result<()> {
+#[derive(Debug, Error)]
+pub(crate) enum ExecuteProxyError {
+ #[error("Failed to init server paths")]
+ ServerPath(#[from] ServerPathError),
+
+ #[error(transparent)]
+ ServerNotRunning(#[from] ProxyLaunchError),
+
+ #[error("Failed to check PidFile '{path}'")]
+ CheckPidFile {
+ #[source]
+ source: CheckPidError,
+ path: PathBuf,
+ },
+
+ #[error("Failed to kill existing server with pid '{pid}'")]
+ KillRunningServer {
+ #[source]
+ source: std::io::Error,
+ pid: u32,
+ },
+
+ #[error("failed to spawn server")]
+ SpawnServer(#[source] SpawnServerError),
+
+ #[error("stdin_task failed")]
+ StdinTask(#[source] anyhow::Error),
+ #[error("stdout_task failed")]
+ StdoutTask(#[source] anyhow::Error),
+ #[error("stderr_task failed")]
+ StderrTask(#[source] anyhow::Error),
+}
+
+pub(crate) fn execute_proxy(
+ identifier: String,
+ is_reconnecting: bool,
+) -> Result<(), ExecuteProxyError> {
init_logging_proxy();
let server_paths = ServerPaths::new(&identifier)?;
@@ -575,12 +637,19 @@ pub fn execute_proxy(identifier: String, is_reconnecting: bool) -> Result<()> {
log::info!("starting proxy process. PID: {}", std::process::id());
- let server_pid = check_pid_file(&server_paths.pid_file)?;
+ let server_pid = check_pid_file(&server_paths.pid_file).map_err(|source| {
+ ExecuteProxyError::CheckPidFile {
+ source,
+ path: server_paths.pid_file.clone(),
+ }
+ })?;
let server_running = server_pid.is_some();
if is_reconnecting {
if !server_running {
log::error!("attempted to reconnect, but no server running");
- anyhow::bail!(ProxyLaunchError::ServerNotRunning);
+ return Err(ExecuteProxyError::ServerNotRunning(
+ ProxyLaunchError::ServerNotRunning,
+ ));
}
} else {
if let Some(pid) = server_pid {
@@ -591,7 +660,7 @@ pub fn execute_proxy(identifier: String, is_reconnecting: bool) -> Result<()> {
kill_running_server(pid, &server_paths)?;
}
- spawn_server(&server_paths)?;
+ spawn_server(&server_paths).map_err(ExecuteProxyError::SpawnServer)?;
};
let stdin_task = smol::spawn(async move {
@@ -631,9 +700,9 @@ pub fn execute_proxy(identifier: String, is_reconnecting: bool) -> Result<()> {
if let Err(forwarding_result) = smol::block_on(async move {
futures::select! {
- result = stdin_task.fuse() => result.context("stdin_task failed"),
- result = stdout_task.fuse() => result.context("stdout_task failed"),
- result = stderr_task.fuse() => result.context("stderr_task failed"),
+ result = stdin_task.fuse() => result.map_err(ExecuteProxyError::StdinTask),
+ result = stdout_task.fuse() => result.map_err(ExecuteProxyError::StdoutTask),
+ result = stderr_task.fuse() => result.map_err(ExecuteProxyError::StderrTask),
}
}) {
log::error!(
@@ -646,12 +715,12 @@ pub fn execute_proxy(identifier: String, is_reconnecting: bool) -> Result<()> {
Ok(())
}
-fn kill_running_server(pid: u32, paths: &ServerPaths) -> Result<()> {
+fn kill_running_server(pid: u32, paths: &ServerPaths) -> Result<(), ExecuteProxyError> {
log::info!("killing existing server with PID {}", pid);
std::process::Command::new("kill")
.arg(pid.to_string())
.output()
- .context("failed to kill existing server")?;
+ .map_err(|source| ExecuteProxyError::KillRunningServer { source, pid })?;
for file in [
&paths.pid_file,
@@ -665,18 +734,39 @@ fn kill_running_server(pid: u32, paths: &ServerPaths) -> Result<()> {
Ok(())
}
-fn spawn_server(paths: &ServerPaths) -> Result<()> {
+#[derive(Debug, Error)]
+pub(crate) enum SpawnServerError {
+ #[error("failed to remove stdin socket")]
+ RemoveStdinSocket(#[source] std::io::Error),
+
+ #[error("failed to remove stdout socket")]
+ RemoveStdoutSocket(#[source] std::io::Error),
+
+ #[error("failed to remove stderr socket")]
+ RemoveStderrSocket(#[source] std::io::Error),
+
+ #[error("failed to get current_exe")]
+ CurrentExe(#[source] std::io::Error),
+
+ #[error("failed to launch server process")]
+ ProcessStatus(#[source] std::io::Error),
+
+ #[error("failed to launch and detach server process: {status}\n{paths}")]
+ LaunchStatus { status: ExitStatus, paths: String },
+}
+
+fn spawn_server(paths: &ServerPaths) -> Result<(), SpawnServerError> {
if paths.stdin_socket.exists() {
- std::fs::remove_file(&paths.stdin_socket)?;
+ std::fs::remove_file(&paths.stdin_socket).map_err(SpawnServerError::RemoveStdinSocket)?;
}
if paths.stdout_socket.exists() {
- std::fs::remove_file(&paths.stdout_socket)?;
+ std::fs::remove_file(&paths.stdout_socket).map_err(SpawnServerError::RemoveStdoutSocket)?;
}
if paths.stderr_socket.exists() {
- std::fs::remove_file(&paths.stderr_socket)?;
+ std::fs::remove_file(&paths.stderr_socket).map_err(SpawnServerError::RemoveStderrSocket)?;
}
- let binary_name = std::env::current_exe()?;
+ let binary_name = std::env::current_exe().map_err(SpawnServerError::CurrentExe)?;
let mut server_process = std::process::Command::new(binary_name);
server_process
.arg("run")
@@ -693,11 +783,17 @@ fn spawn_server(paths: &ServerPaths) -> Result<()> {
let status = server_process
.status()
- .context("failed to launch server process")?;
- anyhow::ensure!(
- status.success(),
- "failed to launch and detach server process"
- );
+ .map_err(SpawnServerError::ProcessStatus)?;
+
+ if !status.success() {
+ return Err(SpawnServerError::LaunchStatus {
+ status,
+ paths: format!(
+ "log file: {:?}, pid file: {:?}",
+ paths.log_file, paths.pid_file,
+ ),
+ });
+ }
let mut total_time_waited = std::time::Duration::from_secs(0);
let wait_duration = std::time::Duration::from_millis(20);
@@ -718,7 +814,15 @@ fn spawn_server(paths: &ServerPaths) -> Result<()> {
Ok(())
}
-fn check_pid_file(path: &Path) -> Result<Option<u32>> {
+#[derive(Debug, Error)]
+#[error("Failed to remove PID file for missing process (pid `{pid}`")]
+pub(crate) struct CheckPidError {
+ #[source]
+ source: std::io::Error,
+ pid: u32,
+}
+
+fn check_pid_file(path: &Path) -> Result<Option<u32>, CheckPidError> {
let Some(pid) = std::fs::read_to_string(&path)
.ok()
.and_then(|contents| contents.parse::<u32>().ok())
@@ -743,7 +847,7 @@ fn check_pid_file(path: &Path) -> Result<Option<u32>> {
log::debug!(
"Found PID file, but process with that PID does not exist. Removing PID file."
);
- std::fs::remove_file(&path).context("Failed to remove PID file")?;
+ std::fs::remove_file(&path).map_err(|source| CheckPidError { source, pid })?;
Ok(None)
}
}
@@ -763,36 +867,23 @@ where
R: AsyncRead + Unpin,
W: AsyncWrite + Unpin,
{
- use remote::protocol::read_message_raw;
+ use remote::protocol::{read_message_raw, write_size_prefixed_buffer};
let mut buffer = Vec::new();
loop {
read_message_raw(&mut reader, &mut buffer)
.await
.with_context(|| format!("failed to read message from {}", socket_name))?;
-
write_size_prefixed_buffer(&mut writer, &mut buffer)
.await
.with_context(|| format!("failed to write message to {}", socket_name))?;
-
writer.flush().await?;
-
buffer.clear();
}
}
-async fn write_size_prefixed_buffer<S: AsyncWrite + Unpin>(
- stream: &mut S,
- buffer: &mut Vec<u8>,
-) -> Result<()> {
- let len = buffer.len() as u32;
- stream.write_all(len.to_le_bytes().as_slice()).await?;
- stream.write_all(buffer).await?;
- Ok(())
-}
-
fn initialize_settings(
- session: Arc<ChannelClient>,
+ session: AnyProtoClient,
fs: Arc<dyn Fs>,
cx: &mut App,
) -> watch::Receiver<Option<NodeBinaryOptions>> {
@@ -800,14 +891,13 @@ fn initialize_settings(
watch_config_file(cx.background_executor(), fs, paths::settings_file().clone());
handle_settings_file_changes(user_settings_file_rx, cx, {
- let session = session.clone();
move |err, _cx| {
if let Some(e) = err {
log::info!("Server settings failed to change: {}", e);
session
.send(proto::Toast {
- project_id: SSH_PROJECT_ID,
+ project_id: REMOTE_SERVER_PROJECT_ID,
notification_id: "server-settings-failed".to_string(),
message: format!(
"Error in settings on remote host {:?}: {}",
@@ -819,7 +909,7 @@ fn initialize_settings(
} else {
session
.send(proto::HideToast {
- project_id: SSH_PROJECT_ID,
+ project_id: REMOTE_SERVER_PROJECT_ID,
notification_id: "server-settings-failed".to_string(),
})
.log_err();
@@ -828,29 +918,33 @@ fn initialize_settings(
});
let (mut tx, rx) = watch::channel(None);
+ let mut node_settings = None;
cx.observe_global::<SettingsStore>(move |cx| {
- let settings = &ProjectSettings::get_global(cx).node;
- log::info!("Got new node settings: {:?}", settings);
- let options = NodeBinaryOptions {
- allow_path_lookup: !settings.ignore_system_version,
- // TODO: Implement this setting
- allow_binary_download: true,
- use_paths: settings.path.as_ref().map(|node_path| {
- let node_path = PathBuf::from(shellexpand::tilde(node_path).as_ref());
- let npm_path = settings
- .npm_path
- .as_ref()
- .map(|path| PathBuf::from(shellexpand::tilde(&path).as_ref()));
- (
- node_path.clone(),
- npm_path.unwrap_or_else(|| {
- let base_path = PathBuf::new();
- node_path.parent().unwrap_or(&base_path).join("npm")
- }),
- )
- }),
- };
- tx.send(Some(options)).log_err();
+ let new_node_settings = &ProjectSettings::get_global(cx).node;
+ if Some(new_node_settings) != node_settings.as_ref() {
+ log::info!("Got new node settings: {new_node_settings:?}");
+ let options = NodeBinaryOptions {
+ allow_path_lookup: !new_node_settings.ignore_system_version,
+ // TODO: Implement this setting
+ allow_binary_download: true,
+ use_paths: new_node_settings.path.as_ref().map(|node_path| {
+ let node_path = PathBuf::from(shellexpand::tilde(node_path).as_ref());
+ let npm_path = new_node_settings
+ .npm_path
+ .as_ref()
+ .map(|path| PathBuf::from(shellexpand::tilde(&path).as_ref()));
+ (
+ node_path.clone(),
+ npm_path.unwrap_or_else(|| {
+ let base_path = PathBuf::new();
+ node_path.parent().unwrap_or(&base_path).join("npm")
+ }),
+ )
+ }),
+ };
+ node_settings = Some(new_node_settings.clone());
+ tx.send(Some(options)).ok();
+ }
})
.detach();
@@ -891,7 +985,8 @@ pub fn handle_settings_file_changes(
fn read_proxy_settings(cx: &mut Context<HeadlessProject>) -> Option<Url> {
let proxy_str = ProxySettings::get_global(cx).proxy.to_owned();
- let proxy_url = proxy_str
+
+ proxy_str
.as_ref()
.and_then(|input: &String| {
input
@@ -899,8 +994,7 @@ fn read_proxy_settings(cx: &mut Context<HeadlessProject>) -> Option<Url> {
.inspect_err(|e| log::error!("Error parsing proxy settings: {}", e))
.ok()
})
- .or_else(read_proxy_from_env);
- proxy_url
+ .or_else(read_proxy_from_env)
}
fn daemonize() -> Result<ControlFlow<()>> {
@@ -187,7 +187,7 @@ impl PickerDelegate for KernelPickerDelegate {
.size(LabelSize::Default),
),
)
- .when_some(path_or_url.clone(), |flex, path| {
+ .when_some(path_or_url, |flex, path| {
flex.text_ellipsis().child(
Label::new(path)
.size(LabelSize::Small)
@@ -269,10 +269,9 @@ where
};
let picker_view = cx.new(|cx| {
- let picker = Picker::uniform_list(delegate, window, cx)
+ Picker::uniform_list(delegate, window, cx)
.width(rems(30.))
- .max_height(Some(rems(20.).into()));
- picker
+ .max_height(Some(rems(20.).into()))
});
PopoverMenu::new("kernel-switcher")
@@ -4,7 +4,7 @@ use editor::EditorSettings;
use gpui::App;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
#[derive(Debug, Default)]
pub struct JupyterSettings {
@@ -20,7 +20,8 @@ impl JupyterSettings {
}
}
-#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
+#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)]
+#[settings_key(key = "jupyter")]
pub struct JupyterSettingsContent {
/// Default kernels to select for each language.
///
@@ -37,8 +38,6 @@ impl Default for JupyterSettingsContent {
}
impl Settings for JupyterSettings {
- const KEY: Option<&'static str> = Some("jupyter");
-
type FileContent = JupyterSettingsContent;
fn load(
@@ -11,7 +11,7 @@ use language::LanguageName;
pub use native_kernel::*;
mod remote_kernels;
-use project::{Project, ProjectPath, WorktreeId};
+use project::{Project, ProjectPath, Toolchains, WorktreeId};
pub use remote_kernels::*;
use anyhow::Result;
@@ -92,49 +92,58 @@ pub fn python_env_kernel_specifications(
let background_executor = cx.background_executor().clone();
async move {
- let toolchains = if let Some((toolchains, _)) = toolchains.await {
- toolchains
+ let (toolchains, user_toolchains) = if let Some(Toolchains {
+ toolchains,
+ root_path: _,
+ user_toolchains,
+ }) = toolchains.await
+ {
+ (toolchains, user_toolchains)
} else {
return Ok(Vec::new());
};
- let kernelspecs = toolchains.toolchains.into_iter().map(|toolchain| {
- background_executor.spawn(async move {
- let python_path = toolchain.path.to_string();
-
- // Check if ipykernel is installed
- let ipykernel_check = util::command::new_smol_command(&python_path)
- .args(&["-c", "import ipykernel"])
- .output()
- .await;
-
- if ipykernel_check.is_ok() && ipykernel_check.unwrap().status.success() {
- // Create a default kernelspec for this environment
- let default_kernelspec = JupyterKernelspec {
- argv: vec![
- python_path.clone(),
- "-m".to_string(),
- "ipykernel_launcher".to_string(),
- "-f".to_string(),
- "{connection_file}".to_string(),
- ],
- display_name: toolchain.name.to_string(),
- language: "python".to_string(),
- interrupt_mode: None,
- metadata: None,
- env: None,
- };
-
- Some(KernelSpecification::PythonEnv(LocalKernelSpecification {
- name: toolchain.name.to_string(),
- path: PathBuf::from(&python_path),
- kernelspec: default_kernelspec,
- }))
- } else {
- None
- }
- })
- });
+ let kernelspecs = user_toolchains
+ .into_values()
+ .flatten()
+ .chain(toolchains.toolchains)
+ .map(|toolchain| {
+ background_executor.spawn(async move {
+ let python_path = toolchain.path.to_string();
+
+ // Check if ipykernel is installed
+ let ipykernel_check = util::command::new_smol_command(&python_path)
+ .args(&["-c", "import ipykernel"])
+ .output()
+ .await;
+
+ if ipykernel_check.is_ok() && ipykernel_check.unwrap().status.success() {
+ // Create a default kernelspec for this environment
+ let default_kernelspec = JupyterKernelspec {
+ argv: vec![
+ python_path.clone(),
+ "-m".to_string(),
+ "ipykernel_launcher".to_string(),
+ "-f".to_string(),
+ "{connection_file}".to_string(),
+ ],
+ display_name: toolchain.name.to_string(),
+ language: "python".to_string(),
+ interrupt_mode: None,
+ metadata: None,
+ env: None,
+ };
+
+ Some(KernelSpecification::PythonEnv(LocalKernelSpecification {
+ name: toolchain.name.to_string(),
+ path: PathBuf::from(&python_path),
+ kernelspec: default_kernelspec,
+ }))
+ } else {
+ None
+ }
+ })
+ });
let kernel_specs = futures::future::join_all(kernelspecs)
.await
@@ -169,10 +178,7 @@ pub enum KernelStatus {
impl KernelStatus {
pub fn is_connected(&self) -> bool {
- match self {
- KernelStatus::Idle | KernelStatus::Busy => true,
- _ => false,
- }
+ matches!(self, KernelStatus::Idle | KernelStatus::Busy)
}
}
@@ -95,7 +95,7 @@ pub async fn list_remote_kernelspecs(
.kernelspecs
.into_iter()
.map(|(name, spec)| RemoteKernelSpecification {
- name: name.clone(),
+ name,
url: remote_server.base_url.clone(),
token: remote_server.token.clone(),
kernelspec: spec.spec,
@@ -103,7 +103,7 @@ pub async fn list_remote_kernelspecs(
.collect::<Vec<RemoteKernelSpecification>>();
anyhow::ensure!(!remote_kernelspecs.is_empty(), "No kernel specs found");
- Ok(remote_kernelspecs.clone())
+ Ok(remote_kernelspecs)
}
impl PartialEq for RemoteKernelSpecification {
@@ -91,7 +91,7 @@ fn convert_outputs(
cx: &mut App,
) -> Vec<Output> {
outputs
- .into_iter()
+ .iter()
.map(|output| match output {
nbformat::v4::Output::Stream { text, .. } => Output::Stream {
content: cx.new(|cx| TerminalOutput::from(&text.0, window, cx)),
@@ -584,8 +584,8 @@ impl project::ProjectItem for NotebookItem {
Ok(nbformat::Notebook::Legacy(legacy_notebook)) => {
// TODO: Decide if we want to mutate the notebook by including Cell IDs
// and any other conversions
- let notebook = nbformat::upgrade_legacy_notebook(legacy_notebook)?;
- notebook
+
+ nbformat::upgrade_legacy_notebook(legacy_notebook)?
}
// Bad notebooks and notebooks v4.0 and below are not supported
Err(e) => {
@@ -594,9 +594,10 @@ impl project::ProjectItem for NotebookItem {
};
let id = project
- .update(cx, |project, cx| project.entry_for_path(&path, cx))?
- .context("Entry not found")?
- .id;
+ .update(cx, |project, cx| {
+ project.entry_for_path(&path, cx).map(|entry| entry.id)
+ })?
+ .context("Entry not found")?;
cx.new(|_| NotebookItem {
path: abs_path,
@@ -33,16 +33,13 @@
//! This module is designed to work with Jupyter message protocols,
//! interpreting and displaying various types of Jupyter output.
-use std::time::Duration;
-
use editor::{Editor, MultiBuffer};
-use gpui::{
- Animation, AnimationExt, AnyElement, ClipboardItem, Entity, Render, Transformation, WeakEntity,
- percentage,
-};
+use gpui::{AnyElement, ClipboardItem, Entity, Render, WeakEntity};
use language::Buffer;
use runtimelib::{ExecutionState, JupyterMessageContent, MimeBundle, MimeType};
-use ui::{Context, IntoElement, Styled, Tooltip, Window, div, prelude::*, v_flex};
+use ui::{
+ CommonAnimationExt, Context, IntoElement, Styled, Tooltip, Window, div, prelude::*, v_flex,
+};
mod image;
use image::ImageView;
@@ -228,26 +225,23 @@ impl Output {
.child(div().flex_1().children(content))
.children(match self {
Self::Plain { content, .. } => {
- Self::render_output_controls(content.clone(), workspace.clone(), window, cx)
+ Self::render_output_controls(content.clone(), workspace, window, cx)
}
Self::Markdown { content, .. } => {
- Self::render_output_controls(content.clone(), workspace.clone(), window, cx)
+ Self::render_output_controls(content.clone(), workspace, window, cx)
}
Self::Stream { content, .. } => {
- Self::render_output_controls(content.clone(), workspace.clone(), window, cx)
+ Self::render_output_controls(content.clone(), workspace, window, cx)
}
Self::Image { content, .. } => {
- Self::render_output_controls(content.clone(), workspace.clone(), window, cx)
+ Self::render_output_controls(content.clone(), workspace, window, cx)
+ }
+ Self::ErrorOutput(err) => {
+ Self::render_output_controls(err.traceback.clone(), workspace, window, cx)
}
- Self::ErrorOutput(err) => Self::render_output_controls(
- err.traceback.clone(),
- workspace.clone(),
- window,
- cx,
- ),
Self::Message(_) => None,
Self::Table { content, .. } => {
- Self::render_output_controls(content.clone(), workspace.clone(), window, cx)
+ Self::render_output_controls(content.clone(), workspace, window, cx)
}
Self::ClearOutputWaitMarker => None,
})
@@ -484,11 +478,7 @@ impl Render for ExecutionView {
Icon::new(IconName::ArrowCircle)
.size(IconSize::Small)
.color(Color::Muted)
- .with_animation(
- "arrow-circle",
- Animation::new(Duration::from_secs(3)).repeat(),
- |icon, delta| icon.transform(Transformation::rotate(percentage(delta))),
- ),
+ .with_rotate_animation(3),
)
.child(Label::new("Executing...").color(Color::Muted))
.into_any_element(),
@@ -35,7 +35,7 @@ impl MarkdownView {
});
Self {
- raw_text: text.clone(),
+ raw_text: text,
image_cache: RetainAllImageCache::new(cx),
contents: None,
parsing_markdown_task: Some(task),
@@ -68,7 +68,7 @@ pub fn text_style(window: &mut Window, cx: &mut App) -> TextStyle {
let theme = cx.theme();
- let text_style = TextStyle {
+ TextStyle {
font_family,
font_features,
font_weight,
@@ -81,9 +81,7 @@ pub fn text_style(window: &mut Window, cx: &mut App) -> TextStyle {
// These are going to be overridden per-cell
color: theme.colors().terminal_foreground,
..Default::default()
- };
-
- text_style
+ }
}
/// Returns the default terminal size for the terminal output.
@@ -202,7 +202,7 @@ pub fn session(editor: WeakEntity<Editor>, cx: &mut App) -> SessionSupport {
return SessionSupport::Unsupported;
};
- let worktree_id = worktree_id_for_editor(editor.clone(), cx);
+ let worktree_id = worktree_id_for_editor(editor, cx);
let Some(worktree_id) = worktree_id else {
return SessionSupport::Unsupported;
@@ -216,7 +216,7 @@ pub fn session(editor: WeakEntity<Editor>, cx: &mut App) -> SessionSupport {
Some(kernelspec) => SessionSupport::Inactive(kernelspec),
None => {
// For language_supported, need to check available kernels for language
- if language_supported(&language.clone(), cx) {
+ if language_supported(&language, cx) {
SessionSupport::RequiresSetup(language.name())
} else {
SessionSupport::Unsupported
@@ -326,7 +326,7 @@ pub fn setup_editor_session_actions(editor: &mut Editor, editor_handle: WeakEnti
editor
.register_action({
- let editor_handle = editor_handle.clone();
+ let editor_handle = editor_handle;
move |_: &Restart, window, cx| {
if !JupyterSettings::enabled(cx) {
return;
@@ -420,7 +420,7 @@ fn runnable_ranges(
if let Some(language) = buffer.language()
&& language.name() == "Markdown".into()
{
- return (markdown_code_blocks(buffer, range.clone(), cx), None);
+ return (markdown_code_blocks(buffer, range, cx), None);
}
let (jupytext_snippets, next_cursor) = jupytext_cells(buffer, range.clone());
@@ -685,8 +685,8 @@ mod tests {
let python = languages::language("python", tree_sitter_python::LANGUAGE.into());
let language_registry = Arc::new(LanguageRegistry::new(cx.background_executor().clone()));
language_registry.add(markdown.clone());
- language_registry.add(typescript.clone());
- language_registry.add(python.clone());
+ language_registry.add(typescript);
+ language_registry.add(python);
// Two code blocks intersecting with selection
let buffer = cx.new(|cx| {
@@ -129,7 +129,6 @@ pub fn init(cx: &mut App) {
editor
.register_action({
- let editor_handle = editor_handle.clone();
move |_: &RunInPlace, window, cx| {
if !JupyterSettings::enabled(cx) {
return;
@@ -460,7 +460,6 @@ impl Session {
Kernel::StartingKernel(task) => {
// Queue up the execution as a task to run after the kernel starts
let task = task.clone();
- let message = message.clone();
cx.spawn(async move |this, cx| {
task.await;
@@ -568,7 +567,7 @@ impl Session {
match kernel {
Kernel::RunningKernel(mut kernel) => {
- let mut request_tx = kernel.request_tx().clone();
+ let mut request_tx = kernel.request_tx();
let forced = kernel.force_shutdown(window, cx);
@@ -605,7 +604,7 @@ impl Session {
// Do nothing if already restarting
}
Kernel::RunningKernel(mut kernel) => {
- let mut request_tx = kernel.request_tx().clone();
+ let mut request_tx = kernel.request_tx();
let forced = kernel.force_shutdown(window, cx);
@@ -15,10 +15,6 @@ test-support = []
path = "src/reqwest_client.rs"
doctest = true
-[[example]]
-name = "client"
-path = "examples/client.rs"
-
[dependencies]
anyhow.workspace = true
bytes.workspace = true
@@ -26,7 +22,6 @@ futures.workspace = true
http_client.workspace = true
http_client_tls.workspace = true
serde.workspace = true
-smol.workspace = true
log.workspace = true
tokio = { workspace = true, features = ["rt", "rt-multi-thread"] }
regex.workspace = true
@@ -1,41 +0,0 @@
-use std::time::Instant;
-
-use futures::AsyncReadExt as _;
-use futures::stream::FuturesUnordered;
-use http_client::AsyncBody;
-use http_client::HttpClient;
-use reqwest_client::ReqwestClient;
-use smol::stream::StreamExt;
-
-fn main() {
- let app = gpui::Application::new();
- app.run(|cx| {
- cx.spawn(async move |cx| {
- let client = ReqwestClient::new();
- let start = Instant::now();
- let requests = [
- client.get("https://www.google.com/", AsyncBody::empty(), true),
- client.get("https://zed.dev/", AsyncBody::empty(), true),
- client.get("https://docs.rs/", AsyncBody::empty(), true),
- ];
- let mut requests = requests.into_iter().collect::<FuturesUnordered<_>>();
- while let Some(response) = requests.next().await {
- let mut body = String::new();
- response
- .unwrap()
- .into_body()
- .read_to_string(&mut body)
- .await
- .unwrap();
- println!("{}", &body.len());
- }
- println!("{:?}", start.elapsed());
-
- cx.update(|cx| {
- cx.quit();
- })
- .ok();
- })
- .detach();
- })
-}
@@ -264,7 +264,7 @@ impl http_client::HttpClient for ReqwestClient {
let bytes = response
.bytes_stream()
- .map_err(|e| futures::io::Error::new(futures::io::ErrorKind::Other, e))
+ .map_err(futures::io::Error::other)
.into_async_read();
let body = http_client::AsyncBody::from_reader(bytes);
@@ -28,11 +28,11 @@ fn generate_random_rope_ranges(mut rng: StdRng, rope: &Rope) -> Vec<Range<usize>
let mut start = 0;
for _ in 0..num_ranges {
let range_start = rope.clip_offset(
- rng.gen_range(start..=(start + range_max_len)),
+ rng.random_range(start..=(start + range_max_len)),
sum_tree::Bias::Left,
);
let range_end = rope.clip_offset(
- rng.gen_range(range_start..(range_start + range_max_len)),
+ rng.random_range(range_start..(range_start + range_max_len)),
sum_tree::Bias::Right,
);
@@ -52,7 +52,7 @@ fn generate_random_rope_points(mut rng: StdRng, rope: &Rope) -> Vec<Point> {
let mut points = Vec::new();
for _ in 0..num_points {
- points.push(rope.offset_to_point(rng.gen_range(0..rope.len())));
+ points.push(rope.offset_to_point(rng.random_range(0..rope.len())));
}
points
}
@@ -92,7 +92,7 @@ impl Into<Chunk> for ChunkSlice<'_> {
impl<'a> ChunkSlice<'a> {
#[inline(always)]
- pub fn is_empty(self) -> bool {
+ pub fn is_empty(&self) -> bool {
self.text.is_empty()
}
@@ -408,7 +408,7 @@ impl<'a> ChunkSlice<'a> {
}
let row_offset_range = self.offset_range_for_row(point.0.row);
- let line = self.slice(row_offset_range.clone());
+ let line = self.slice(row_offset_range);
if point.0.column == 0 {
Point::new(point.0.row, 0)
} else if point.0.column >= line.len_utf16().0 as u32 {
@@ -543,7 +543,7 @@ impl Iterator for Tabs {
// Since tabs are 1 byte the tab offset is the same as the byte offset
let position = TabPosition {
byte_offset: tab_offset,
- char_offset: char_offset,
+ char_offset,
};
// Remove the tab we've just seen
self.tabs ^= 1 << tab_offset;
@@ -612,7 +612,7 @@ mod tests {
#[gpui::test(iterations = 100)]
fn test_random_chunks(mut rng: StdRng) {
- let chunk_len = rng.gen_range(0..=MAX_BASE);
+ let chunk_len = rng.random_range(0..=MAX_BASE);
let text = RandomCharIter::new(&mut rng)
.take(chunk_len)
.collect::<String>();
@@ -627,8 +627,8 @@ mod tests {
verify_chunk(chunk.as_slice(), text);
for _ in 0..10 {
- let mut start = rng.gen_range(0..=chunk.text.len());
- let mut end = rng.gen_range(start..=chunk.text.len());
+ let mut start = rng.random_range(0..=chunk.text.len());
+ let mut end = rng.random_range(start..=chunk.text.len());
while !chunk.text.is_char_boundary(start) {
start -= 1;
}
@@ -645,7 +645,7 @@ mod tests {
#[gpui::test(iterations = 1000)]
fn test_nth_set_bit_random(mut rng: StdRng) {
- let set_count = rng.gen_range(0..=128);
+ let set_count = rng.random_range(0..=128);
let mut set_bits = (0..128).choose_multiple(&mut rng, set_count);
set_bits.sort();
let mut n = 0;
@@ -639,18 +639,20 @@ impl<'a> Chunks<'a> {
pub fn seek(&mut self, mut offset: usize) {
offset = offset.clamp(self.range.start, self.range.end);
- let bias = if self.reversed {
- Bias::Left
+ if self.reversed {
+ if offset > self.chunks.end() {
+ self.chunks.seek_forward(&offset, Bias::Left);
+ } else if offset <= *self.chunks.start() {
+ self.chunks.seek(&offset, Bias::Left);
+ }
} else {
- Bias::Right
+ if offset >= self.chunks.end() {
+ self.chunks.seek_forward(&offset, Bias::Right);
+ } else if offset < *self.chunks.start() {
+ self.chunks.seek(&offset, Bias::Right);
+ }
};
- if offset >= self.chunks.end() {
- self.chunks.seek_forward(&offset, bias);
- } else {
- self.chunks.seek(&offset, bias);
- }
-
self.offset = offset;
}
@@ -936,24 +938,36 @@ impl Lines<'_> {
self.current_line.clear();
while let Some(chunk) = self.chunks.peek() {
- let lines = chunk.split('\n');
+ let chunk_lines = chunk.split('\n');
if self.reversed {
- let mut lines = lines.rev().peekable();
- while let Some(line) = lines.next() {
- self.current_line.insert_str(0, line);
- if lines.peek().is_some() {
+ let mut chunk_lines = chunk_lines.rev().peekable();
+ if let Some(chunk_line) = chunk_lines.next() {
+ let done = chunk_lines.peek().is_some();
+ if done {
self.chunks
- .seek(self.chunks.offset() - line.len() - "\n".len());
+ .seek(self.chunks.offset() - chunk_line.len() - "\n".len());
+ if self.current_line.is_empty() {
+ return Some(chunk_line);
+ }
+ }
+ self.current_line.insert_str(0, chunk_line);
+ if done {
return Some(&self.current_line);
}
}
} else {
- let mut lines = lines.peekable();
- while let Some(line) = lines.next() {
- self.current_line.push_str(line);
- if lines.peek().is_some() {
+ let mut chunk_lines = chunk_lines.peekable();
+ if let Some(chunk_line) = chunk_lines.next() {
+ let done = chunk_lines.peek().is_some();
+ if done {
self.chunks
- .seek(self.chunks.offset() + line.len() + "\n".len());
+ .seek(self.chunks.offset() + chunk_line.len() + "\n".len());
+ if self.current_line.is_empty() {
+ return Some(chunk_line);
+ }
+ }
+ self.current_line.push_str(chunk_line);
+ if done {
return Some(&self.current_line);
}
}
@@ -1573,6 +1587,20 @@ mod tests {
assert_eq!(lines.next(), Some("defg"));
assert_eq!(lines.next(), Some("abc"));
assert_eq!(lines.next(), None);
+
+ let rope = Rope::from("abc\nlonger line test\nhi");
+ let mut lines = rope.chunks().lines();
+ assert_eq!(lines.next(), Some("abc"));
+ assert_eq!(lines.next(), Some("longer line test"));
+ assert_eq!(lines.next(), Some("hi"));
+ assert_eq!(lines.next(), None);
+
+ let rope = Rope::from("abc\nlonger line test\nhi");
+ let mut lines = rope.reversed_chunks_in_range(0..rope.len()).lines();
+ assert_eq!(lines.next(), Some("hi"));
+ assert_eq!(lines.next(), Some("longer line test"));
+ assert_eq!(lines.next(), Some("abc"));
+ assert_eq!(lines.next(), None);
}
#[gpui::test(iterations = 100)]
@@ -1584,9 +1612,9 @@ mod tests {
let mut expected = String::new();
let mut actual = Rope::new();
for _ in 0..operations {
- let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right);
- let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left);
- let len = rng.gen_range(0..=64);
+ let end_ix = clip_offset(&expected, rng.random_range(0..=expected.len()), Right);
+ let start_ix = clip_offset(&expected, rng.random_range(0..=end_ix), Left);
+ let len = rng.random_range(0..=64);
let new_text: String = RandomCharIter::new(&mut rng).take(len).collect();
let mut new_actual = Rope::new();
@@ -1603,8 +1631,8 @@ mod tests {
log::info!("text: {:?}", expected);
for _ in 0..5 {
- let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right);
- let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left);
+ let end_ix = clip_offset(&expected, rng.random_range(0..=expected.len()), Right);
+ let start_ix = clip_offset(&expected, rng.random_range(0..=end_ix), Left);
let actual_text = actual.chunks_in_range(start_ix..end_ix).collect::<String>();
assert_eq!(actual_text, &expected[start_ix..end_ix]);
@@ -1669,14 +1697,14 @@ mod tests {
);
// Check that next_line/prev_line work correctly from random positions
- let mut offset = rng.gen_range(start_ix..=end_ix);
+ let mut offset = rng.random_range(start_ix..=end_ix);
while !expected.is_char_boundary(offset) {
offset -= 1;
}
chunks.seek(offset);
for _ in 0..5 {
- if rng.r#gen() {
+ if rng.random() {
let expected_next_line_start = expected[offset..end_ix]
.find('\n')
.map(|newline_ix| offset + newline_ix + 1);
@@ -1765,8 +1793,8 @@ mod tests {
}
assert!((start_ix..=end_ix).contains(&chunks.offset()));
- if rng.r#gen() {
- offset = rng.gen_range(start_ix..=end_ix);
+ if rng.random() {
+ offset = rng.random_range(start_ix..=end_ix);
while !expected.is_char_boundary(offset) {
offset -= 1;
}
@@ -1850,8 +1878,8 @@ mod tests {
}
for _ in 0..5 {
- let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right);
- let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left);
+ let end_ix = clip_offset(&expected, rng.random_range(0..=expected.len()), Right);
+ let start_ix = clip_offset(&expected, rng.random_range(0..=end_ix), Left);
assert_eq!(
actual.cursor(start_ix).summary::<TextSummary>(end_ix),
TextSummary::from(&expected[start_ix..end_ix])
@@ -1,6 +1,6 @@
use anyhow::{Context as _, Result};
use base64::prelude::*;
-use rand::{Rng as _, thread_rng};
+use rand::prelude::*;
use rsa::pkcs1::{DecodeRsaPublicKey, EncodeRsaPublicKey};
use rsa::traits::PaddingScheme;
use rsa::{Oaep, Pkcs1v15Encrypt, RsaPrivateKey, RsaPublicKey};
@@ -31,7 +31,7 @@ pub struct PrivateKey(RsaPrivateKey);
/// Generate a public and private key for asymmetric encryption.
pub fn keypair() -> Result<(PublicKey, PrivateKey)> {
- let mut rng = thread_rng();
+ let mut rng = RsaRngCompat::new();
let bits = 2048;
let private_key = RsaPrivateKey::new(&mut rng, bits)?;
let public_key = RsaPublicKey::from(&private_key);
@@ -40,10 +40,10 @@ pub fn keypair() -> Result<(PublicKey, PrivateKey)> {
/// Generate a random 64-character base64 string.
pub fn random_token() -> String {
- let mut rng = thread_rng();
+ let mut rng = rand::rng();
let mut token_bytes = [0; 48];
for byte in token_bytes.iter_mut() {
- *byte = rng.r#gen();
+ *byte = rng.random();
}
BASE64_URL_SAFE.encode(token_bytes)
}
@@ -52,7 +52,7 @@ impl PublicKey {
/// Convert a string to a base64-encoded string that can only be decoded with the corresponding
/// private key.
pub fn encrypt_string(&self, string: &str, format: EncryptionFormat) -> Result<String> {
- let mut rng = thread_rng();
+ let mut rng = RsaRngCompat::new();
let bytes = string.as_bytes();
let encrypted_bytes = match format {
EncryptionFormat::V0 => self.0.encrypt(&mut rng, Pkcs1v15Encrypt, bytes),
@@ -107,6 +107,36 @@ impl TryFrom<String> for PublicKey {
}
}
+// TODO: remove once we rsa v0.10 is released.
+struct RsaRngCompat(rand::rngs::ThreadRng);
+
+impl RsaRngCompat {
+ fn new() -> Self {
+ Self(rand::rng())
+ }
+}
+
+impl rsa::signature::rand_core::RngCore for RsaRngCompat {
+ fn next_u32(&mut self) -> u32 {
+ self.0.next_u32()
+ }
+
+ fn next_u64(&mut self) -> u64 {
+ self.0.next_u64()
+ }
+
+ fn fill_bytes(&mut self, dest: &mut [u8]) {
+ self.0.fill_bytes(dest);
+ }
+
+ fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), rsa::signature::rand_core::Error> {
+ self.fill_bytes(dest);
+ Ok(())
+ }
+}
+
+impl rsa::signature::rand_core::CryptoRng for RsaRngCompat {}
+
#[cfg(test)]
mod tests {
use super::*;
@@ -56,7 +56,7 @@ impl Connection {
) {
use anyhow::anyhow;
use futures::channel::mpsc;
- use std::io::{Error, ErrorKind};
+ use std::io::Error;
let (tx, rx) = mpsc::unbounded::<WebSocketMessage>();
@@ -71,7 +71,7 @@ impl Connection {
// Writes to a half-open TCP connection will error.
if killed.load(SeqCst) {
- std::io::Result::Err(Error::new(ErrorKind::Other, "connection lost"))?;
+ std::io::Result::Err(Error::other("connection lost"))?;
}
Ok(msg)
@@ -80,7 +80,6 @@ impl Connection {
});
let rx = rx.then({
- let executor = executor.clone();
move |msg| {
let killed = killed.clone();
let executor = executor.clone();
@@ -26,7 +26,6 @@ use std::{
time::Duration,
time::Instant,
};
-use tracing::instrument;
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize)]
pub struct ConnectionId {
@@ -109,7 +108,6 @@ impl Peer {
self.epoch.load(SeqCst)
}
- #[instrument(skip_all)]
pub fn add_connection<F, Fut, Out>(
self: &Arc<Self>,
connection: Connection,
@@ -380,7 +378,6 @@ impl Peer {
impl Future<Output = anyhow::Result<()>> + Send + use<>,
BoxStream<'static, Box<dyn AnyTypedEnvelope>>,
) {
- let executor = executor.clone();
self.add_connection(connection, move |duration| executor.timer(duration))
}
@@ -1,35 +1,48 @@
-use anyhow::Context;
+use anyhow::{Context, Result};
use collections::HashMap;
use futures::{
Future, FutureExt as _,
+ channel::oneshot,
future::{BoxFuture, LocalBoxFuture},
};
-use gpui::{AnyEntity, AnyWeakEntity, AsyncApp, Entity};
+use gpui::{AnyEntity, AnyWeakEntity, AsyncApp, BackgroundExecutor, Entity, FutureExt as _};
+use parking_lot::Mutex;
use proto::{
- AnyTypedEnvelope, EntityMessage, Envelope, EnvelopedMessage, RequestMessage, TypedEnvelope,
- error::ErrorExt as _,
+ AnyTypedEnvelope, EntityMessage, Envelope, EnvelopedMessage, LspRequestId, LspRequestMessage,
+ RequestMessage, TypedEnvelope, error::ErrorExt as _,
};
use std::{
any::{Any, TypeId},
- sync::{Arc, Weak},
+ sync::{
+ Arc, OnceLock,
+ atomic::{self, AtomicU64},
+ },
+ time::Duration,
};
#[derive(Clone)]
-pub struct AnyProtoClient(Arc<dyn ProtoClient>);
+pub struct AnyProtoClient(Arc<State>);
-impl AnyProtoClient {
- pub fn downgrade(&self) -> AnyWeakProtoClient {
- AnyWeakProtoClient(Arc::downgrade(&self.0))
- }
-}
+type RequestIds = Arc<
+ Mutex<
+ HashMap<
+ LspRequestId,
+ oneshot::Sender<
+ Result<
+ Option<TypedEnvelope<Vec<proto::ProtoLspResponse<Box<dyn AnyTypedEnvelope>>>>>,
+ >,
+ >,
+ >,
+ >,
+>;
-#[derive(Clone)]
-pub struct AnyWeakProtoClient(Weak<dyn ProtoClient>);
+static NEXT_LSP_REQUEST_ID: OnceLock<Arc<AtomicU64>> = OnceLock::new();
+static REQUEST_IDS: OnceLock<RequestIds> = OnceLock::new();
-impl AnyWeakProtoClient {
- pub fn upgrade(&self) -> Option<AnyProtoClient> {
- self.0.upgrade().map(AnyProtoClient)
- }
+struct State {
+ client: Arc<dyn ProtoClient>,
+ next_lsp_request_id: Arc<AtomicU64>,
+ request_ids: RequestIds,
}
pub trait ProtoClient: Send + Sync {
@@ -37,11 +50,11 @@ pub trait ProtoClient: Send + Sync {
&self,
envelope: Envelope,
request_type: &'static str,
- ) -> BoxFuture<'static, anyhow::Result<Envelope>>;
+ ) -> BoxFuture<'static, Result<Envelope>>;
- fn send(&self, envelope: Envelope, message_type: &'static str) -> anyhow::Result<()>;
+ fn send(&self, envelope: Envelope, message_type: &'static str) -> Result<()>;
- fn send_response(&self, envelope: Envelope, message_type: &'static str) -> anyhow::Result<()>;
+ fn send_response(&self, envelope: Envelope, message_type: &'static str) -> Result<()>;
fn message_handler_set(&self) -> &parking_lot::Mutex<ProtoMessageHandlerSet>;
@@ -65,7 +78,7 @@ pub type ProtoMessageHandler = Arc<
Box<dyn AnyTypedEnvelope>,
AnyProtoClient,
AsyncApp,
- ) -> LocalBoxFuture<'static, anyhow::Result<()>>,
+ ) -> LocalBoxFuture<'static, Result<()>>,
>;
impl ProtoMessageHandlerSet {
@@ -113,7 +126,7 @@ impl ProtoMessageHandlerSet {
message: Box<dyn AnyTypedEnvelope>,
client: AnyProtoClient,
cx: AsyncApp,
- ) -> Option<LocalBoxFuture<'static, anyhow::Result<()>>> {
+ ) -> Option<LocalBoxFuture<'static, Result<()>>> {
let payload_type_id = message.payload_type_id();
let mut this = this.lock();
let handler = this.message_handlers.get(&payload_type_id)?.clone();
@@ -169,43 +182,195 @@ where
T: ProtoClient + 'static,
{
fn from(client: Arc<T>) -> Self {
- Self(client)
+ Self::new(client)
}
}
impl AnyProtoClient {
pub fn new<T: ProtoClient + 'static>(client: Arc<T>) -> Self {
- Self(client)
+ Self(Arc::new(State {
+ client,
+ next_lsp_request_id: NEXT_LSP_REQUEST_ID
+ .get_or_init(|| Arc::new(AtomicU64::new(0)))
+ .clone(),
+ request_ids: REQUEST_IDS.get_or_init(RequestIds::default).clone(),
+ }))
}
pub fn is_via_collab(&self) -> bool {
- self.0.is_via_collab()
+ self.0.client.is_via_collab()
}
pub fn request<T: RequestMessage>(
&self,
request: T,
- ) -> impl Future<Output = anyhow::Result<T::Response>> + use<T> {
+ ) -> impl Future<Output = Result<T::Response>> + use<T> {
let envelope = request.into_envelope(0, None, None);
- let response = self.0.request(envelope, T::NAME);
+ let response = self.0.client.request(envelope, T::NAME);
async move {
T::Response::from_envelope(response.await?)
.context("received response of the wrong type")
}
}
- pub fn send<T: EnvelopedMessage>(&self, request: T) -> anyhow::Result<()> {
+ pub fn send<T: EnvelopedMessage>(&self, request: T) -> Result<()> {
let envelope = request.into_envelope(0, None, None);
- self.0.send(envelope, T::NAME)
+ self.0.client.send(envelope, T::NAME)
}
- pub fn send_response<T: EnvelopedMessage>(
+ pub fn send_response<T: EnvelopedMessage>(&self, request_id: u32, request: T) -> Result<()> {
+ let envelope = request.into_envelope(0, Some(request_id), None);
+ self.0.client.send(envelope, T::NAME)
+ }
+
+ pub fn request_lsp<T>(
&self,
- request_id: u32,
+ project_id: u64,
+ timeout: Duration,
+ executor: BackgroundExecutor,
request: T,
- ) -> anyhow::Result<()> {
- let envelope = request.into_envelope(0, Some(request_id), None);
- self.0.send(envelope, T::NAME)
+ ) -> impl Future<
+ Output = Result<Option<TypedEnvelope<Vec<proto::ProtoLspResponse<T::Response>>>>>,
+ > + use<T>
+ where
+ T: LspRequestMessage,
+ {
+ let new_id = LspRequestId(
+ self.0
+ .next_lsp_request_id
+ .fetch_add(1, atomic::Ordering::Acquire),
+ );
+ let (tx, rx) = oneshot::channel();
+ {
+ self.0.request_ids.lock().insert(new_id, tx);
+ }
+
+ let query = proto::LspQuery {
+ project_id,
+ lsp_request_id: new_id.0,
+ request: Some(request.to_proto_query()),
+ };
+ let request = self.request(query);
+ let request_ids = self.0.request_ids.clone();
+ async move {
+ match request.await {
+ Ok(_request_enqueued) => {}
+ Err(e) => {
+ request_ids.lock().remove(&new_id);
+ return Err(e).context("sending LSP proto request");
+ }
+ }
+
+ let response = rx.with_timeout(timeout, &executor).await;
+ {
+ request_ids.lock().remove(&new_id);
+ }
+ match response {
+ Ok(Ok(response)) => {
+ let response = response
+ .context("waiting for LSP proto response")?
+ .map(|response| {
+ anyhow::Ok(TypedEnvelope {
+ payload: response
+ .payload
+ .into_iter()
+ .map(|lsp_response| lsp_response.into_response::<T>())
+ .collect::<Result<Vec<_>>>()?,
+ sender_id: response.sender_id,
+ original_sender_id: response.original_sender_id,
+ message_id: response.message_id,
+ received_at: response.received_at,
+ })
+ })
+ .transpose()
+ .context("converting LSP proto response")?;
+ Ok(response)
+ }
+ Err(_cancelled_due_timeout) => Ok(None),
+ Ok(Err(_channel_dropped)) => Ok(None),
+ }
+ }
+ }
+
+ pub fn send_lsp_response<T: LspRequestMessage>(
+ &self,
+ project_id: u64,
+ lsp_request_id: LspRequestId,
+ server_responses: HashMap<u64, T::Response>,
+ ) -> Result<()> {
+ self.send(proto::LspQueryResponse {
+ project_id,
+ lsp_request_id: lsp_request_id.0,
+ responses: server_responses
+ .into_iter()
+ .map(|(server_id, response)| proto::LspResponse {
+ server_id,
+ response: Some(T::response_to_proto_query(response)),
+ })
+ .collect(),
+ })
+ }
+
+ pub fn handle_lsp_response(&self, mut envelope: TypedEnvelope<proto::LspQueryResponse>) {
+ let request_id = LspRequestId(envelope.payload.lsp_request_id);
+ let mut response_senders = self.0.request_ids.lock();
+ if let Some(tx) = response_senders.remove(&request_id) {
+ let responses = envelope.payload.responses.drain(..).collect::<Vec<_>>();
+ tx.send(Ok(Some(proto::TypedEnvelope {
+ sender_id: envelope.sender_id,
+ original_sender_id: envelope.original_sender_id,
+ message_id: envelope.message_id,
+ received_at: envelope.received_at,
+ payload: responses
+ .into_iter()
+ .filter_map(|response| {
+ use proto::lsp_response::Response;
+
+ let server_id = response.server_id;
+ let response = match response.response? {
+ Response::GetReferencesResponse(response) => {
+ to_any_envelope(&envelope, response)
+ }
+ Response::GetDocumentColorResponse(response) => {
+ to_any_envelope(&envelope, response)
+ }
+ Response::GetHoverResponse(response) => {
+ to_any_envelope(&envelope, response)
+ }
+ Response::GetCodeActionsResponse(response) => {
+ to_any_envelope(&envelope, response)
+ }
+ Response::GetSignatureHelpResponse(response) => {
+ to_any_envelope(&envelope, response)
+ }
+ Response::GetCodeLensResponse(response) => {
+ to_any_envelope(&envelope, response)
+ }
+ Response::GetDocumentDiagnosticsResponse(response) => {
+ to_any_envelope(&envelope, response)
+ }
+ Response::GetDefinitionResponse(response) => {
+ to_any_envelope(&envelope, response)
+ }
+ Response::GetDeclarationResponse(response) => {
+ to_any_envelope(&envelope, response)
+ }
+ Response::GetTypeDefinitionResponse(response) => {
+ to_any_envelope(&envelope, response)
+ }
+ Response::GetImplementationResponse(response) => {
+ to_any_envelope(&envelope, response)
+ }
+ };
+ Some(proto::ProtoLspResponse {
+ server_id,
+ response,
+ })
+ })
+ .collect(),
+ })))
+ .ok();
+ }
}
pub fn add_request_handler<M, E, H, F>(&self, entity: gpui::WeakEntity<E>, handler: H)
@@ -213,31 +378,35 @@ impl AnyProtoClient {
M: RequestMessage,
E: 'static,
H: 'static + Sync + Fn(Entity<E>, TypedEnvelope<M>, AsyncApp) -> F + Send + Sync,
- F: 'static + Future<Output = anyhow::Result<M::Response>>,
+ F: 'static + Future<Output = Result<M::Response>>,
{
- self.0.message_handler_set().lock().add_message_handler(
- TypeId::of::<M>(),
- entity.into(),
- Arc::new(move |entity, envelope, client, cx| {
- let entity = entity.downcast::<E>().unwrap();
- let envelope = envelope.into_any().downcast::<TypedEnvelope<M>>().unwrap();
- let request_id = envelope.message_id();
- handler(entity, *envelope, cx)
- .then(move |result| async move {
- match result {
- Ok(response) => {
- client.send_response(request_id, response)?;
- Ok(())
- }
- Err(error) => {
- client.send_response(request_id, error.to_proto())?;
- Err(error)
+ self.0
+ .client
+ .message_handler_set()
+ .lock()
+ .add_message_handler(
+ TypeId::of::<M>(),
+ entity.into(),
+ Arc::new(move |entity, envelope, client, cx| {
+ let entity = entity.downcast::<E>().unwrap();
+ let envelope = envelope.into_any().downcast::<TypedEnvelope<M>>().unwrap();
+ let request_id = envelope.message_id();
+ handler(entity, *envelope, cx)
+ .then(move |result| async move {
+ match result {
+ Ok(response) => {
+ client.send_response(request_id, response)?;
+ Ok(())
+ }
+ Err(error) => {
+ client.send_response(request_id, error.to_proto())?;
+ Err(error)
+ }
}
- }
- })
- .boxed_local()
- }),
- )
+ })
+ .boxed_local()
+ }),
+ )
}
pub fn add_entity_request_handler<M, E, H, F>(&self, handler: H)
@@ -245,7 +414,7 @@ impl AnyProtoClient {
M: EnvelopedMessage + RequestMessage + EntityMessage,
E: 'static,
H: 'static + Sync + Send + Fn(gpui::Entity<E>, TypedEnvelope<M>, AsyncApp) -> F,
- F: 'static + Future<Output = anyhow::Result<M::Response>>,
+ F: 'static + Future<Output = Result<M::Response>>,
{
let message_type_id = TypeId::of::<M>();
let entity_type_id = TypeId::of::<E>();
@@ -257,6 +426,7 @@ impl AnyProtoClient {
.remote_entity_id()
};
self.0
+ .client
.message_handler_set()
.lock()
.add_entity_message_handler(
@@ -290,7 +460,7 @@ impl AnyProtoClient {
M: EnvelopedMessage + EntityMessage,
E: 'static,
H: 'static + Sync + Send + Fn(gpui::Entity<E>, TypedEnvelope<M>, AsyncApp) -> F,
- F: 'static + Future<Output = anyhow::Result<()>>,
+ F: 'static + Future<Output = Result<()>>,
{
let message_type_id = TypeId::of::<M>();
let entity_type_id = TypeId::of::<E>();
@@ -302,6 +472,7 @@ impl AnyProtoClient {
.remote_entity_id()
};
self.0
+ .client
.message_handler_set()
.lock()
.add_entity_message_handler(
@@ -315,4 +486,36 @@ impl AnyProtoClient {
}),
);
}
+
+ pub fn subscribe_to_entity<E: 'static>(&self, remote_id: u64, entity: &Entity<E>) {
+ let id = (TypeId::of::<E>(), remote_id);
+
+ let mut message_handlers = self.0.client.message_handler_set().lock();
+ if message_handlers
+ .entities_by_type_and_remote_id
+ .contains_key(&id)
+ {
+ panic!("already subscribed to entity");
+ }
+
+ message_handlers.entities_by_type_and_remote_id.insert(
+ id,
+ EntityMessageSubscriber::Entity {
+ handle: entity.downgrade().into(),
+ },
+ );
+ }
+}
+
+fn to_any_envelope<T: EnvelopedMessage>(
+ envelope: &TypedEnvelope<proto::LspQueryResponse>,
+ response: T,
+) -> Box<dyn AnyTypedEnvelope> {
+ Box::new(proto::TypedEnvelope {
+ sender_id: envelope.sender_id,
+ original_sender_id: envelope.original_sender_id,
+ message_id: envelope.message_id,
+ received_at: envelope.received_at,
+ payload: response,
+ }) as Box<_>
}
@@ -49,7 +49,7 @@ actions!(
]
);
-const BUILT_IN_TOOLTIP_TEXT: &'static str = concat!(
+const BUILT_IN_TOOLTIP_TEXT: &str = concat!(
"This rule supports special functionality.\n",
"It's read-only, but you can remove it from your default rules."
);
@@ -414,11 +414,11 @@ impl RulesLibrary {
});
Self {
title_bar: if !cfg!(target_os = "macos") {
- Some(cx.new(|_| PlatformTitleBar::new("rules-library-title-bar")))
+ Some(cx.new(|cx| PlatformTitleBar::new("rules-library-title-bar", cx)))
} else {
None
},
- store: store.clone(),
+ store,
language_registry,
rule_editors: HashMap::default(),
active_rule_id: None,
@@ -1136,7 +1136,7 @@ impl RulesLibrary {
.child(
Label::new(format!(
"{} tokens",
- label_token_count.clone()
+ label_token_count
))
.color(Color::Muted),
)
@@ -0,0 +1,25 @@
+[package]
+name = "scheduler"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "Apache-2.0"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/scheduler.rs"
+doctest = false
+
+[features]
+test-support = []
+
+[dependencies]
+async-task.workspace = true
+chrono.workspace = true
+futures.workspace = true
+parking.workspace = true
+parking_lot.workspace = true
+rand.workspace = true
+workspace-hack.workspace = true
@@ -0,0 +1,34 @@
+use chrono::{DateTime, Duration, Utc};
+use parking_lot::Mutex;
+
+pub trait Clock {
+ fn now(&self) -> DateTime<Utc>;
+}
+
+pub struct TestClock {
+ now: Mutex<DateTime<Utc>>,
+}
+
+impl TestClock {
+ pub fn new() -> Self {
+ const START_TIME: &str = "2025-07-01T23:59:58-00:00";
+ let now = DateTime::parse_from_rfc3339(START_TIME).unwrap().to_utc();
+ Self {
+ now: Mutex::new(now),
+ }
+ }
+
+ pub fn set_now(&self, now: DateTime<Utc>) {
+ *self.now.lock() = now;
+ }
+
+ pub fn advance(&self, duration: Duration) {
+ *self.now.lock() += duration;
+ }
+}
+
+impl Clock for TestClock {
+ fn now(&self) -> DateTime<Utc> {
+ *self.now.lock()
+ }
+}
@@ -0,0 +1,137 @@
+use crate::{Scheduler, SessionId, Timer};
+use std::{
+ future::Future,
+ marker::PhantomData,
+ pin::Pin,
+ rc::Rc,
+ sync::Arc,
+ task::{Context, Poll},
+ time::Duration,
+};
+
+#[derive(Clone)]
+pub struct ForegroundExecutor {
+ session_id: SessionId,
+ scheduler: Arc<dyn Scheduler>,
+ not_send: PhantomData<Rc<()>>,
+}
+
+impl ForegroundExecutor {
+ pub fn spawn<F>(&self, future: F) -> Task<F::Output>
+ where
+ F: Future + 'static,
+ F::Output: 'static,
+ {
+ let session_id = self.session_id;
+ let scheduler = Arc::clone(&self.scheduler);
+ let (runnable, task) = async_task::spawn_local(future, move |runnable| {
+ scheduler.schedule_foreground(session_id, runnable);
+ });
+ runnable.schedule();
+ Task(TaskState::Spawned(task))
+ }
+
+ pub fn timer(&self, duration: Duration) -> Timer {
+ self.scheduler.timer(duration)
+ }
+}
+
+impl ForegroundExecutor {
+ pub fn new(session_id: SessionId, scheduler: Arc<dyn Scheduler>) -> Self {
+ assert!(
+ scheduler.is_main_thread(),
+ "ForegroundExecutor must be created on the same thread as the Scheduler"
+ );
+ Self {
+ session_id,
+ scheduler,
+ not_send: PhantomData,
+ }
+ }
+}
+
+impl BackgroundExecutor {
+ pub fn new(scheduler: Arc<dyn Scheduler>) -> Self {
+ Self { scheduler }
+ }
+}
+
+pub struct BackgroundExecutor {
+ scheduler: Arc<dyn Scheduler>,
+}
+
+impl BackgroundExecutor {
+ pub fn spawn<F>(&self, future: F) -> Task<F::Output>
+ where
+ F: Future + Send + 'static,
+ F::Output: Send + 'static,
+ {
+ let scheduler = Arc::clone(&self.scheduler);
+ let (runnable, task) = async_task::spawn(future, move |runnable| {
+ scheduler.schedule_background(runnable);
+ });
+ runnable.schedule();
+ Task(TaskState::Spawned(task))
+ }
+
+ pub fn block_on<Fut: Future>(&self, future: Fut) -> Fut::Output {
+ self.scheduler.block_on(future)
+ }
+
+ pub fn block_with_timeout<Fut: Unpin + Future>(
+ &self,
+ future: &mut Fut,
+ timeout: Duration,
+ ) -> Option<Fut::Output> {
+ self.scheduler.block_with_timeout(future, timeout)
+ }
+
+ pub fn timer(&self, duration: Duration) -> Timer {
+ self.scheduler.timer(duration)
+ }
+}
+
+/// Task is a primitive that allows work to happen in the background.
+///
+/// It implements [`Future`] so you can `.await` on it.
+///
+/// If you drop a task it will be cancelled immediately. Calling [`Task::detach`] allows
+/// the task to continue running, but with no way to return a value.
+#[must_use]
+#[derive(Debug)]
+pub struct Task<T>(TaskState<T>);
+
+#[derive(Debug)]
+enum TaskState<T> {
+ /// A task that is ready to return a value
+ Ready(Option<T>),
+
+ /// A task that is currently running.
+ Spawned(async_task::Task<T>),
+}
+
+impl<T> Task<T> {
+ /// Creates a new task that will resolve with the value
+ pub fn ready(val: T) -> Self {
+ Task(TaskState::Ready(Some(val)))
+ }
+
+ /// Detaching a task runs it to completion in the background
+ pub fn detach(self) {
+ match self {
+ Task(TaskState::Ready(_)) => {}
+ Task(TaskState::Spawned(task)) => task.detach(),
+ }
+ }
+}
+
+impl<T> Future for Task<T> {
+ type Output = T;
+
+ fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
+ match unsafe { self.get_unchecked_mut() } {
+ Task(TaskState::Ready(val)) => Poll::Ready(val.take().unwrap()),
+ Task(TaskState::Spawned(task)) => Pin::new(task).poll(cx),
+ }
+ }
+}
@@ -0,0 +1,63 @@
+mod clock;
+mod executor;
+mod test_scheduler;
+#[cfg(test)]
+mod tests;
+
+pub use clock::*;
+pub use executor::*;
+pub use test_scheduler::*;
+
+use async_task::Runnable;
+use futures::{FutureExt as _, channel::oneshot, future::LocalBoxFuture};
+use std::{
+ future::Future,
+ pin::Pin,
+ task::{Context, Poll},
+ time::Duration,
+};
+
+pub trait Scheduler: Send + Sync {
+ fn block(&self, future: LocalBoxFuture<()>, timeout: Option<Duration>);
+ fn schedule_foreground(&self, session_id: SessionId, runnable: Runnable);
+ fn schedule_background(&self, runnable: Runnable);
+ fn timer(&self, timeout: Duration) -> Timer;
+ fn is_main_thread(&self) -> bool;
+}
+
+impl dyn Scheduler {
+ pub fn block_on<Fut: Future>(&self, future: Fut) -> Fut::Output {
+ let mut output = None;
+ self.block(async { output = Some(future.await) }.boxed_local(), None);
+ output.unwrap()
+ }
+
+ pub fn block_with_timeout<Fut: Unpin + Future>(
+ &self,
+ future: &mut Fut,
+ timeout: Duration,
+ ) -> Option<Fut::Output> {
+ let mut output = None;
+ self.block(
+ async { output = Some(future.await) }.boxed_local(),
+ Some(timeout),
+ );
+ output
+ }
+}
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
+pub struct SessionId(u16);
+
+pub struct Timer(oneshot::Receiver<()>);
+
+impl Future for Timer {
+ type Output = ();
+
+ fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<()> {
+ match self.0.poll_unpin(cx) {
+ Poll::Ready(_) => Poll::Ready(()),
+ Poll::Pending => Poll::Pending,
+ }
+ }
+}
@@ -0,0 +1,352 @@
+use crate::{
+ BackgroundExecutor, Clock as _, ForegroundExecutor, Scheduler, SessionId, TestClock, Timer,
+};
+use async_task::Runnable;
+use chrono::{DateTime, Duration as ChronoDuration, Utc};
+use futures::{FutureExt as _, channel::oneshot, future::LocalBoxFuture};
+use parking_lot::Mutex;
+use rand::prelude::*;
+use std::{
+ collections::VecDeque,
+ future::Future,
+ panic::{self, AssertUnwindSafe},
+ pin::Pin,
+ sync::{
+ Arc,
+ atomic::{AtomicBool, Ordering::SeqCst},
+ },
+ task::{Context, Poll, Wake, Waker},
+ thread,
+ time::{Duration, Instant},
+};
+
+pub struct TestScheduler {
+ clock: Arc<TestClock>,
+ rng: Arc<Mutex<StdRng>>,
+ state: Mutex<SchedulerState>,
+ pub thread_id: thread::ThreadId,
+ pub config: SchedulerConfig,
+}
+
+impl TestScheduler {
+ /// Run a test once with default configuration (seed 0)
+ pub fn once<R>(f: impl AsyncFnOnce(Arc<TestScheduler>) -> R) -> R {
+ Self::with_seed(0, f)
+ }
+
+ /// Run a test multiple times with sequential seeds (0, 1, 2, ...)
+ pub fn many<R>(iterations: usize, mut f: impl AsyncFnMut(Arc<TestScheduler>) -> R) -> Vec<R> {
+ (0..iterations as u64)
+ .map(|seed| {
+ let mut unwind_safe_f = AssertUnwindSafe(&mut f);
+ match panic::catch_unwind(move || Self::with_seed(seed, &mut *unwind_safe_f)) {
+ Ok(result) => result,
+ Err(error) => {
+ eprintln!("Failing Seed: {seed}");
+ panic::resume_unwind(error);
+ }
+ }
+ })
+ .collect()
+ }
+
+ /// Run a test once with a specific seed
+ pub fn with_seed<R>(seed: u64, f: impl AsyncFnOnce(Arc<TestScheduler>) -> R) -> R {
+ let scheduler = Arc::new(TestScheduler::new(SchedulerConfig::with_seed(seed)));
+ let future = f(scheduler.clone());
+ let result = scheduler.block_on(future);
+ scheduler.run();
+ result
+ }
+
+ pub fn new(config: SchedulerConfig) -> Self {
+ Self {
+ rng: Arc::new(Mutex::new(StdRng::seed_from_u64(config.seed))),
+ state: Mutex::new(SchedulerState {
+ runnables: VecDeque::new(),
+ timers: Vec::new(),
+ randomize_order: config.randomize_order,
+ allow_parking: config.allow_parking,
+ next_session_id: SessionId(0),
+ }),
+ thread_id: thread::current().id(),
+ clock: Arc::new(TestClock::new()),
+ config,
+ }
+ }
+
+ pub fn clock(&self) -> Arc<TestClock> {
+ self.clock.clone()
+ }
+
+ pub fn rng(&self) -> Arc<Mutex<StdRng>> {
+ self.rng.clone()
+ }
+
+ /// Create a foreground executor for this scheduler
+ pub fn foreground(self: &Arc<Self>) -> ForegroundExecutor {
+ let session_id = {
+ let mut state = self.state.lock();
+ state.next_session_id.0 += 1;
+ state.next_session_id
+ };
+ ForegroundExecutor::new(session_id, self.clone())
+ }
+
+ /// Create a background executor for this scheduler
+ pub fn background(self: &Arc<Self>) -> BackgroundExecutor {
+ BackgroundExecutor::new(self.clone())
+ }
+
+ pub fn block_on<Fut: Future>(&self, future: Fut) -> Fut::Output {
+ (self as &dyn Scheduler).block_on(future)
+ }
+
+ pub fn yield_random(&self) -> Yield {
+ Yield(self.rng.lock().random_range(0..20))
+ }
+
+ pub fn run(&self) {
+ while self.step() || self.advance_clock() {
+ // Continue until no work remains
+ }
+ }
+
+ fn step(&self) -> bool {
+ let elapsed_timers = {
+ let mut state = self.state.lock();
+ let end_ix = state
+ .timers
+ .partition_point(|timer| timer.expiration <= self.clock.now());
+ state.timers.drain(..end_ix).collect::<Vec<_>>()
+ };
+
+ if !elapsed_timers.is_empty() {
+ return true;
+ }
+
+ let runnable = self.state.lock().runnables.pop_front();
+ if let Some(runnable) = runnable {
+ runnable.run();
+ return true;
+ }
+
+ false
+ }
+
+ fn advance_clock(&self) -> bool {
+ if let Some(timer) = self.state.lock().timers.first() {
+ self.clock.set_now(timer.expiration);
+ true
+ } else {
+ false
+ }
+ }
+}
+
+impl Scheduler for TestScheduler {
+ fn is_main_thread(&self) -> bool {
+ thread::current().id() == self.thread_id
+ }
+
+ fn schedule_foreground(&self, session_id: SessionId, runnable: Runnable) {
+ let mut state = self.state.lock();
+ let ix = if state.randomize_order {
+ let start_ix = state
+ .runnables
+ .iter()
+ .rposition(|task| task.session_id == Some(session_id))
+ .map_or(0, |ix| ix + 1);
+ self.rng
+ .lock()
+ .random_range(start_ix..=state.runnables.len())
+ } else {
+ state.runnables.len()
+ };
+ state.runnables.insert(
+ ix,
+ ScheduledRunnable {
+ session_id: Some(session_id),
+ runnable,
+ },
+ );
+ }
+
+ fn schedule_background(&self, runnable: Runnable) {
+ let mut state = self.state.lock();
+ let ix = if state.randomize_order {
+ self.rng.lock().random_range(0..=state.runnables.len())
+ } else {
+ state.runnables.len()
+ };
+ state.runnables.insert(
+ ix,
+ ScheduledRunnable {
+ session_id: None,
+ runnable,
+ },
+ );
+ }
+
+ fn timer(&self, duration: Duration) -> Timer {
+ let (tx, rx) = oneshot::channel();
+ let expiration = self.clock.now() + ChronoDuration::from_std(duration).unwrap();
+ let state = &mut *self.state.lock();
+ state.timers.push(ScheduledTimer {
+ expiration,
+ _notify: tx,
+ });
+ state.timers.sort_by_key(|timer| timer.expiration);
+ Timer(rx)
+ }
+
+ /// Block until the given future completes, with an optional timeout. If the
+ /// future is unable to make progress at any moment before the timeout and
+ /// no other tasks or timers remain, we panic unless parking is allowed. If
+ /// parking is allowed, we block up to the timeout or indefinitely if none
+ /// is provided. This is to allow testing a mix of deterministic and
+ /// non-deterministic async behavior, such as when interacting with I/O in
+ /// an otherwise deterministic test.
+ fn block(&self, mut future: LocalBoxFuture<()>, timeout: Option<Duration>) {
+ let (parker, unparker) = parking::pair();
+ let deadline = timeout.map(|timeout| Instant::now() + timeout);
+ let awoken = Arc::new(AtomicBool::new(false));
+ let waker = Waker::from(Arc::new(WakerFn::new({
+ let awoken = awoken.clone();
+ move || {
+ awoken.store(true, SeqCst);
+ unparker.unpark();
+ }
+ })));
+ let max_ticks = if timeout.is_some() {
+ self.rng
+ .lock()
+ .random_range(0..=self.config.max_timeout_ticks)
+ } else {
+ usize::MAX
+ };
+ let mut cx = Context::from_waker(&waker);
+
+ for _ in 0..max_ticks {
+ let Poll::Pending = future.poll_unpin(&mut cx) else {
+ break;
+ };
+
+ let mut stepped = None;
+ while self.rng.lock().random() && stepped.unwrap_or(true) {
+ *stepped.get_or_insert(false) |= self.step();
+ }
+
+ let stepped = stepped.unwrap_or(true);
+ let awoken = awoken.swap(false, SeqCst);
+ if !stepped && !awoken && !self.advance_clock() {
+ if self.state.lock().allow_parking {
+ if !park(&parker, deadline) {
+ break;
+ }
+ } else if deadline.is_some() {
+ break;
+ } else {
+ panic!("Parking forbidden");
+ }
+ }
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct SchedulerConfig {
+ pub seed: u64,
+ pub randomize_order: bool,
+ pub allow_parking: bool,
+ pub max_timeout_ticks: usize,
+}
+
+impl SchedulerConfig {
+ pub fn with_seed(seed: u64) -> Self {
+ Self {
+ seed,
+ ..Default::default()
+ }
+ }
+}
+
+impl Default for SchedulerConfig {
+ fn default() -> Self {
+ Self {
+ seed: 0,
+ randomize_order: true,
+ allow_parking: false,
+ max_timeout_ticks: 1000,
+ }
+ }
+}
+
+struct ScheduledRunnable {
+ session_id: Option<SessionId>,
+ runnable: Runnable,
+}
+
+impl ScheduledRunnable {
+ fn run(self) {
+ self.runnable.run();
+ }
+}
+
+struct ScheduledTimer {
+ expiration: DateTime<Utc>,
+ _notify: oneshot::Sender<()>,
+}
+
+struct SchedulerState {
+ runnables: VecDeque<ScheduledRunnable>,
+ timers: Vec<ScheduledTimer>,
+ randomize_order: bool,
+ allow_parking: bool,
+ next_session_id: SessionId,
+}
+
+struct WakerFn<F> {
+ f: F,
+}
+
+impl<F: Fn()> WakerFn<F> {
+ fn new(f: F) -> Self {
+ Self { f }
+ }
+}
+
+impl<F: Fn()> Wake for WakerFn<F> {
+ fn wake(self: Arc<Self>) {
+ (self.f)();
+ }
+
+ fn wake_by_ref(self: &Arc<Self>) {
+ (self.f)();
+ }
+}
+
+pub struct Yield(usize);
+
+impl Future for Yield {
+ type Output = ();
+
+ fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
+ if self.0 == 0 {
+ Poll::Ready(())
+ } else {
+ self.0 -= 1;
+ cx.waker().wake_by_ref();
+ Poll::Pending
+ }
+ }
+}
+
+fn park(parker: &parking::Parker, deadline: Option<Instant>) -> bool {
+ if let Some(deadline) = deadline {
+ parker.park_deadline(deadline)
+ } else {
+ parker.park();
+ true
+ }
+}
@@ -0,0 +1,348 @@
+use super::*;
+use futures::{
+ FutureExt,
+ channel::{mpsc, oneshot},
+ executor::block_on,
+ future,
+ sink::SinkExt,
+ stream::{FuturesUnordered, StreamExt},
+};
+use std::{
+ cell::RefCell,
+ collections::{BTreeSet, HashSet},
+ pin::Pin,
+ rc::Rc,
+ sync::Arc,
+ task::{Context, Poll},
+};
+
+#[test]
+fn test_foreground_executor_spawn() {
+ let result = TestScheduler::once(async |scheduler| {
+ let task = scheduler.foreground().spawn(async move { 42 });
+ task.await
+ });
+ assert_eq!(result, 42);
+}
+
+#[test]
+fn test_background_executor_spawn() {
+ TestScheduler::once(async |scheduler| {
+ let task = scheduler.background().spawn(async move { 42 });
+ let result = task.await;
+ assert_eq!(result, 42);
+ });
+}
+
+#[test]
+fn test_foreground_ordering() {
+ let mut traces = HashSet::new();
+
+ TestScheduler::many(100, async |scheduler| {
+ #[derive(Hash, PartialEq, Eq)]
+ struct TraceEntry {
+ session: usize,
+ task: usize,
+ }
+
+ let trace = Rc::new(RefCell::new(Vec::new()));
+
+ let foreground_1 = scheduler.foreground();
+ for task in 0..10 {
+ foreground_1
+ .spawn({
+ let trace = trace.clone();
+ async move {
+ trace.borrow_mut().push(TraceEntry { session: 0, task });
+ }
+ })
+ .detach();
+ }
+
+ let foreground_2 = scheduler.foreground();
+ for task in 0..10 {
+ foreground_2
+ .spawn({
+ let trace = trace.clone();
+ async move {
+ trace.borrow_mut().push(TraceEntry { session: 1, task });
+ }
+ })
+ .detach();
+ }
+
+ scheduler.run();
+
+ assert_eq!(
+ trace
+ .borrow()
+ .iter()
+ .filter(|entry| entry.session == 0)
+ .map(|entry| entry.task)
+ .collect::<Vec<_>>(),
+ (0..10).collect::<Vec<_>>()
+ );
+ assert_eq!(
+ trace
+ .borrow()
+ .iter()
+ .filter(|entry| entry.session == 1)
+ .map(|entry| entry.task)
+ .collect::<Vec<_>>(),
+ (0..10).collect::<Vec<_>>()
+ );
+
+ traces.insert(trace.take());
+ });
+
+ assert!(traces.len() > 1, "Expected at least two traces");
+}
+
+#[test]
+fn test_timer_ordering() {
+ TestScheduler::many(1, async |scheduler| {
+ let background = scheduler.background();
+ let futures = FuturesUnordered::new();
+ futures.push(
+ async {
+ background.timer(Duration::from_millis(100)).await;
+ 2
+ }
+ .boxed(),
+ );
+ futures.push(
+ async {
+ background.timer(Duration::from_millis(50)).await;
+ 1
+ }
+ .boxed(),
+ );
+ futures.push(
+ async {
+ background.timer(Duration::from_millis(150)).await;
+ 3
+ }
+ .boxed(),
+ );
+ assert_eq!(futures.collect::<Vec<_>>().await, vec![1, 2, 3]);
+ });
+}
+
+#[test]
+fn test_send_from_bg_to_fg() {
+ TestScheduler::once(async |scheduler| {
+ let foreground = scheduler.foreground();
+ let background = scheduler.background();
+
+ let (sender, receiver) = oneshot::channel::<i32>();
+
+ background
+ .spawn(async move {
+ sender.send(42).unwrap();
+ })
+ .detach();
+
+ let task = foreground.spawn(async move { receiver.await.unwrap() });
+ let result = task.await;
+ assert_eq!(result, 42);
+ });
+}
+
+#[test]
+fn test_randomize_order() {
+ // Test deterministic mode: different seeds should produce same execution order
+ let mut deterministic_results = HashSet::new();
+ for seed in 0..10 {
+ let config = SchedulerConfig {
+ seed,
+ randomize_order: false,
+ ..Default::default()
+ };
+ let order = block_on(capture_execution_order(config));
+ assert_eq!(order.len(), 6);
+ deterministic_results.insert(order);
+ }
+
+ // All deterministic runs should produce the same result
+ assert_eq!(
+ deterministic_results.len(),
+ 1,
+ "Deterministic mode should always produce same execution order"
+ );
+
+ // Test randomized mode: different seeds can produce different execution orders
+ let mut randomized_results = HashSet::new();
+ for seed in 0..20 {
+ let config = SchedulerConfig::with_seed(seed);
+ let order = block_on(capture_execution_order(config));
+ assert_eq!(order.len(), 6);
+ randomized_results.insert(order);
+ }
+
+ // Randomized mode should produce multiple different execution orders
+ assert!(
+ randomized_results.len() > 1,
+ "Randomized mode should produce multiple different orders"
+ );
+}
+
+async fn capture_execution_order(config: SchedulerConfig) -> Vec<String> {
+ let scheduler = Arc::new(TestScheduler::new(config));
+ let foreground = scheduler.foreground();
+ let background = scheduler.background();
+
+ let (sender, receiver) = mpsc::unbounded::<String>();
+
+ // Spawn foreground tasks
+ for i in 0..3 {
+ let mut sender = sender.clone();
+ foreground
+ .spawn(async move {
+ sender.send(format!("fg-{}", i)).await.ok();
+ })
+ .detach();
+ }
+
+ // Spawn background tasks
+ for i in 0..3 {
+ let mut sender = sender.clone();
+ background
+ .spawn(async move {
+ sender.send(format!("bg-{}", i)).await.ok();
+ })
+ .detach();
+ }
+
+ drop(sender); // Close sender to signal no more messages
+ scheduler.run();
+
+ receiver.collect().await
+}
+
+#[test]
+fn test_block() {
+ let scheduler = Arc::new(TestScheduler::new(SchedulerConfig::default()));
+ let executor = BackgroundExecutor::new(scheduler);
+ let (tx, rx) = oneshot::channel();
+
+ // Spawn background task to send value
+ let _ = executor
+ .spawn(async move {
+ tx.send(42).unwrap();
+ })
+ .detach();
+
+ // Block on receiving the value
+ let result = executor.block_on(async { rx.await.unwrap() });
+ assert_eq!(result, 42);
+}
+
+#[test]
+#[should_panic(expected = "Parking forbidden")]
+fn test_parking_panics() {
+ let scheduler = Arc::new(TestScheduler::new(SchedulerConfig::default()));
+ let executor = BackgroundExecutor::new(scheduler);
+ executor.block_on(future::pending::<()>());
+}
+
+#[test]
+fn test_block_with_parking() {
+ let config = SchedulerConfig {
+ allow_parking: true,
+ ..Default::default()
+ };
+ let scheduler = Arc::new(TestScheduler::new(config));
+ let executor = BackgroundExecutor::new(scheduler);
+ let (tx, rx) = oneshot::channel();
+
+ // Spawn background task to send value
+ let _ = executor
+ .spawn(async move {
+ tx.send(42).unwrap();
+ })
+ .detach();
+
+ // Block on receiving the value (will park if needed)
+ let result = executor.block_on(async { rx.await.unwrap() });
+ assert_eq!(result, 42);
+}
+
+#[test]
+fn test_helper_methods() {
+ // Test the once method
+ let result = TestScheduler::once(async |scheduler: Arc<TestScheduler>| {
+ let background = scheduler.background();
+ background.spawn(async { 42 }).await
+ });
+ assert_eq!(result, 42);
+
+ // Test the many method
+ let results = TestScheduler::many(3, async |scheduler: Arc<TestScheduler>| {
+ let background = scheduler.background();
+ background.spawn(async { 10 }).await
+ });
+ assert_eq!(results, vec![10, 10, 10]);
+
+ // Test the with_seed method
+ let result = TestScheduler::with_seed(123, async |scheduler: Arc<TestScheduler>| {
+ let background = scheduler.background();
+
+ // Spawn a background task and wait for its result
+ let task = background.spawn(async { 99 });
+ task.await
+ });
+ assert_eq!(result, 99);
+}
+
+#[test]
+fn test_block_with_timeout() {
+ // Test case: future completes within timeout
+ TestScheduler::once(async |scheduler| {
+ let background = scheduler.background();
+ let mut future = future::ready(42);
+ let output = background.block_with_timeout(&mut future, Duration::from_millis(100));
+ assert_eq!(output, Some(42));
+ });
+
+ // Test case: future times out
+ TestScheduler::once(async |scheduler| {
+ let background = scheduler.background();
+ let mut future = future::pending::<()>();
+ let output = background.block_with_timeout(&mut future, Duration::from_millis(50));
+ assert_eq!(output, None);
+ });
+
+ // Test case: future makes progress via timer but still times out
+ let mut results = BTreeSet::new();
+ TestScheduler::many(100, async |scheduler| {
+ let background = scheduler.background();
+ let mut task = background.spawn(async move {
+ Yield { polls: 10 }.await;
+ 42
+ });
+ let output = background.block_with_timeout(&mut task, Duration::from_millis(50));
+ results.insert(output);
+ });
+ assert_eq!(
+ results.into_iter().collect::<Vec<_>>(),
+ vec![None, Some(42)]
+ );
+}
+
+struct Yield {
+ polls: usize,
+}
+
+impl Future for Yield {
+ type Output = ();
+
+ fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
+ self.polls -= 1;
+ if self.polls == 0 {
+ Poll::Ready(())
+ } else {
+ cx.waker().wake_by_ref();
+ Poll::Pending
+ }
+ }
+}
@@ -716,10 +716,10 @@ impl BufferSearchBar {
self.replace_enabled = deploy.replace_enabled;
self.selection_search_enabled = deploy.selection_search_enabled;
if deploy.focus {
- let mut handle = self.query_editor.focus_handle(cx).clone();
+ let mut handle = self.query_editor.focus_handle(cx);
let mut select_query = true;
if deploy.replace_enabled && handle.is_focused(window) {
- handle = self.replacement_editor.focus_handle(cx).clone();
+ handle = self.replacement_editor.focus_handle(cx);
select_query = false;
};
@@ -749,14 +749,16 @@ impl BufferSearchBar {
return false;
};
- self.configured_options =
+ let configured_options =
SearchOptions::from_settings(&EditorSettings::get_global(cx).search);
- if self.dismissed
- && (self.configured_options != self.default_options
- || self.configured_options != self.search_options)
- {
- self.search_options = self.configured_options;
- self.default_options = self.configured_options;
+ let settings_changed = configured_options != self.configured_options;
+
+ if self.dismissed && settings_changed {
+ // Only update configuration options when search bar is dismissed,
+ // so we don't miss updates even after calling show twice
+ self.configured_options = configured_options;
+ self.search_options = configured_options;
+ self.default_options = configured_options;
}
self.dismissed = false;
@@ -1514,18 +1516,25 @@ mod tests {
cx,
)
});
- let cx = cx.add_empty_window();
- let editor =
- cx.new_window_entity(|window, cx| Editor::for_buffer(buffer.clone(), None, window, cx));
-
- let search_bar = cx.new_window_entity(|window, cx| {
+ let mut editor = None;
+ let window = cx.add_window(|window, cx| {
+ let default_key_bindings = settings::KeymapFile::load_asset_allow_partial_failure(
+ "keymaps/default-macos.json",
+ cx,
+ )
+ .unwrap();
+ cx.bind_keys(default_key_bindings);
+ editor = Some(cx.new(|cx| Editor::for_buffer(buffer.clone(), None, window, cx)));
let mut search_bar = BufferSearchBar::new(None, window, cx);
- search_bar.set_active_pane_item(Some(&editor), window, cx);
+ search_bar.set_active_pane_item(Some(&editor.clone().unwrap()), window, cx);
search_bar.show(window, cx);
search_bar
});
+ let search_bar = window.root(cx).unwrap();
+
+ let cx = VisualTestContext::from_window(*window, cx).into_mut();
- (editor, search_bar, cx)
+ (editor.unwrap(), search_bar, cx)
}
#[gpui::test]
@@ -2750,11 +2759,6 @@ mod tests {
"Search bar should be present and visible"
);
search_bar.deploy(&deploy, window, cx);
- assert_eq!(
- search_bar.configured_options,
- SearchOptions::NONE,
- "Should have configured search options matching the settings"
- );
assert_eq!(
search_bar.search_options,
SearchOptions::WHOLE_WORD,
@@ -2765,21 +2769,22 @@ mod tests {
search_bar.deploy(&deploy, window, cx);
assert_eq!(
search_bar.search_options,
- SearchOptions::NONE,
- "After hiding and showing the search bar, default options should be used"
+ SearchOptions::WHOLE_WORD,
+ "After hiding and showing the search bar, search options should be preserved"
);
search_bar.toggle_search_option(SearchOptions::REGEX, window, cx);
search_bar.toggle_search_option(SearchOptions::WHOLE_WORD, window, cx);
assert_eq!(
search_bar.search_options,
- SearchOptions::REGEX | SearchOptions::WHOLE_WORD,
+ SearchOptions::REGEX,
"Should enable the options toggled"
);
assert!(
!search_bar.dismissed,
"Search bar should be present and visible"
);
+ search_bar.toggle_search_option(SearchOptions::WHOLE_WORD, window, cx);
});
update_search_settings(
@@ -2800,11 +2805,6 @@ mod tests {
);
search_bar.deploy(&deploy, window, cx);
- assert_eq!(
- search_bar.configured_options,
- SearchOptions::CASE_SENSITIVE,
- "Should have configured search options matching the settings"
- );
assert_eq!(
search_bar.search_options,
SearchOptions::REGEX | SearchOptions::WHOLE_WORD,
@@ -2812,10 +2812,37 @@ mod tests {
);
search_bar.dismiss(&Dismiss, window, cx);
search_bar.deploy(&deploy, window, cx);
+ assert_eq!(
+ search_bar.configured_options,
+ SearchOptions::CASE_SENSITIVE,
+ "After a settings update and toggling the search bar, configured options should be updated"
+ );
assert_eq!(
search_bar.search_options,
SearchOptions::CASE_SENSITIVE,
- "After hiding and showing the search bar, default options should be used"
+ "After a settings update and toggling the search bar, configured options should be used"
+ );
+ });
+
+ update_search_settings(
+ SearchSettings {
+ button: true,
+ whole_word: true,
+ case_sensitive: true,
+ include_ignored: false,
+ regex: false,
+ },
+ cx,
+ );
+
+ search_bar.update_in(cx, |search_bar, window, cx| {
+ search_bar.deploy(&deploy, window, cx);
+ search_bar.dismiss(&Dismiss, window, cx);
+ search_bar.show(window, cx);
+ assert_eq!(
+ search_bar.search_options,
+ SearchOptions::CASE_SENSITIVE | SearchOptions::WHOLE_WORD,
+ "Calling deploy on an already deployed search bar should not prevent settings updates from being detected"
);
});
}
@@ -42,7 +42,6 @@ impl<T: 'static> SearchActionsRegistrar for DivRegistrar<'_, '_, T> {
self.div = self.div.take().map(|div| {
div.on_action(self.cx.listener(move |this, action, window, cx| {
let should_notify = (getter)(this, window, cx)
- .clone()
.map(|search_bar| {
search_bar.update(cx, |search_bar, cx| {
callback.execute(search_bar, action, window, cx)
@@ -11,6 +11,7 @@ use editor::{
Anchor, Editor, EditorEvent, EditorSettings, MAX_TAB_TITLE_LEN, MultiBuffer, SelectionEffects,
actions::{Backtab, SelectAll, Tab},
items::active_match_index,
+ multibuffer_context_lines,
};
use futures::{StreamExt, stream::FuturesOrdered};
use gpui::{
@@ -345,7 +346,7 @@ impl ProjectSearch {
excerpts.set_anchored_excerpts_for_path(
buffer,
ranges,
- editor::DEFAULT_MULTIBUFFER_CONTEXT,
+ multibuffer_context_lines(cx),
cx,
)
})
@@ -1113,8 +1114,8 @@ impl ProjectSearchView {
.await
.log_err();
}
- let should_search = result != 2;
- should_search
+
+ result != 2
} else {
true
};
@@ -1139,7 +1140,7 @@ impl ProjectSearchView {
fn build_search_query(&mut self, cx: &mut Context<Self>) -> Option<SearchQuery> {
// Do not bail early in this function, as we want to fill out `self.panels_with_errors`.
- let text = self.query_editor.read(cx).text(cx);
+ let text = self.search_query_text(cx);
let open_buffers = if self.included_opened_only {
Some(self.open_buffers(cx))
} else {
@@ -1383,6 +1384,9 @@ impl ProjectSearchView {
let match_ranges = self.entity.read(cx).match_ranges.clone();
if match_ranges.is_empty() {
self.active_match_index = None;
+ self.results_editor.update(cx, |editor, cx| {
+ editor.clear_background_highlights::<Self>(cx);
+ });
} else {
self.active_match_index = Some(0);
self.update_match_index(cx);
@@ -2337,7 +2341,7 @@ pub fn perform_project_search(
#[cfg(test)]
pub mod tests {
- use std::{ops::Deref as _, sync::Arc};
+ use std::{ops::Deref as _, sync::Arc, time::Duration};
use super::*;
use editor::{DisplayPoint, display_map::DisplayRow};
@@ -2380,6 +2384,7 @@ pub mod tests {
"\n\nconst THREE: usize = one::ONE + two::TWO;\n\n\nconst TWO: usize = one::ONE + one::ONE;"
);
let match_background_color = cx.theme().colors().search_match_background;
+ let selection_background_color = cx.theme().colors().editor_document_highlight_bracket_background;
assert_eq!(
search_view
.results_editor
@@ -2389,14 +2394,23 @@ pub mod tests {
DisplayPoint::new(DisplayRow(2), 32)..DisplayPoint::new(DisplayRow(2), 35),
match_background_color
),
+ (
+ DisplayPoint::new(DisplayRow(2), 37)..DisplayPoint::new(DisplayRow(2), 40),
+ selection_background_color
+ ),
(
DisplayPoint::new(DisplayRow(2), 37)..DisplayPoint::new(DisplayRow(2), 40),
match_background_color
),
+ (
+ DisplayPoint::new(DisplayRow(5), 6)..DisplayPoint::new(DisplayRow(5), 9),
+ selection_background_color
+ ),
(
DisplayPoint::new(DisplayRow(5), 6)..DisplayPoint::new(DisplayRow(5), 9),
match_background_color
- )
+ ),
+
]
);
assert_eq!(search_view.active_match_index, Some(0));
@@ -3187,6 +3201,7 @@ pub mod tests {
.read(cx)
.entry_for_path(&(worktree_id, "a").into(), cx)
.expect("no entry for /a/ directory")
+ .clone()
});
assert!(a_dir_entry.is_dir());
window
@@ -3716,7 +3731,7 @@ pub mod tests {
window
.update(cx, |_, _, cx| {
search_view.update(cx, |search_view, cx| {
- search_view.query_editor.read(cx).text(cx).to_string()
+ search_view.query_editor.read(cx).text(cx)
})
})
.unwrap()
@@ -3883,7 +3898,6 @@ pub mod tests {
// Add a project search item to the second pane
window
.update(cx, {
- let search_bar = search_bar.clone();
|workspace, window, cx| {
assert_eq!(workspace.panes().len(), 2);
second_pane.update(cx, |pane, cx| {
@@ -3906,7 +3920,7 @@ pub mod tests {
assert_eq!(workspace.active_pane(), &second_pane);
second_pane.update(cx, |this, cx| {
assert_eq!(this.active_item_index(), 1);
- this.activate_prev_item(false, window, cx);
+ this.activate_previous_item(&Default::default(), window, cx);
assert_eq!(this.active_item_index(), 0);
});
workspace.activate_pane_in_direction(workspace::SplitDirection::Left, window, cx);
@@ -3941,7 +3955,9 @@ pub mod tests {
// Focus the second pane's non-search item
window
.update(cx, |_workspace, window, cx| {
- second_pane.update(cx, |pane, cx| pane.activate_next_item(true, window, cx));
+ second_pane.update(cx, |pane, cx| {
+ pane.activate_next_item(&Default::default(), window, cx)
+ });
})
.unwrap();
@@ -4154,6 +4170,10 @@ pub mod tests {
search_view.search(cx);
})
.unwrap();
+ // Ensure editor highlights appear after the search is done
+ cx.executor().advance_clock(
+ editor::SELECTION_HIGHLIGHT_DEBOUNCE_TIMEOUT + Duration::from_millis(100),
+ );
cx.background_executor.run_until_parked();
}
}
@@ -116,8 +116,8 @@ impl SearchOption {
}
}
- pub fn to_toggle_action(&self) -> &'static dyn Action {
- match *self {
+ pub fn to_toggle_action(self) -> &'static dyn Action {
+ match self {
SearchOption::WholeWord => &ToggleWholeWord,
SearchOption::CaseSensitive => &ToggleCaseSensitive,
SearchOption::IncludeIgnored => &ToggleIncludeIgnored,
@@ -1,69 +0,0 @@
-[package]
-name = "semantic_index"
-description = "Process, chunk, and embed text as vectors for semantic search."
-version = "0.1.0"
-edition.workspace = true
-publish.workspace = true
-license = "GPL-3.0-or-later"
-
-[lints]
-workspace = true
-
-[lib]
-path = "src/semantic_index.rs"
-
-[[example]]
-name = "index"
-path = "examples/index.rs"
-crate-type = ["bin"]
-
-[dependencies]
-anyhow.workspace = true
-arrayvec.workspace = true
-blake3.workspace = true
-client.workspace = true
-clock.workspace = true
-collections.workspace = true
-feature_flags.workspace = true
-fs.workspace = true
-futures-batch.workspace = true
-futures.workspace = true
-gpui.workspace = true
-heed.workspace = true
-http_client.workspace = true
-language.workspace = true
-language_model.workspace = true
-log.workspace = true
-open_ai.workspace = true
-parking_lot.workspace = true
-project.workspace = true
-serde.workspace = true
-serde_json.workspace = true
-settings.workspace = true
-sha2.workspace = true
-smol.workspace = true
-streaming-iterator.workspace = true
-theme.workspace = true
-tree-sitter.workspace = true
-ui.workspace = true
-unindent.workspace = true
-util.workspace = true
-workspace.workspace = true
-worktree.workspace = true
-workspace-hack.workspace = true
-
-[dev-dependencies]
-client = { workspace = true, features = ["test-support"] }
-fs = { workspace = true, features = ["test-support"] }
-futures.workspace = true
-gpui = { workspace = true, features = ["test-support"] }
-http_client = { workspace = true, features = ["test-support"] }
-language = { workspace = true, features = ["test-support"] }
-languages.workspace = true
-project = { workspace = true, features = ["test-support"] }
-tempfile.workspace = true
-reqwest_client.workspace = true
-util = { workspace = true, features = ["test-support"] }
-workspace = { workspace = true, features = ["test-support"] }
-worktree = { workspace = true, features = ["test-support"] }
-zlog.workspace = true
@@ -1,140 +0,0 @@
-use client::Client;
-use futures::channel::oneshot;
-use gpui::Application;
-use http_client::HttpClientWithUrl;
-use language::language_settings::AllLanguageSettings;
-use project::Project;
-use semantic_index::{OpenAiEmbeddingModel, OpenAiEmbeddingProvider, SemanticDb};
-use settings::SettingsStore;
-use std::{
- path::{Path, PathBuf},
- sync::Arc,
-};
-
-fn main() {
- zlog::init();
-
- use clock::FakeSystemClock;
-
- Application::new().run(|cx| {
- let store = SettingsStore::test(cx);
- cx.set_global(store);
- language::init(cx);
- Project::init_settings(cx);
- SettingsStore::update(cx, |store, cx| {
- store.update_user_settings::<AllLanguageSettings>(cx, |_| {});
- });
-
- let clock = Arc::new(FakeSystemClock::new());
-
- let http = Arc::new(HttpClientWithUrl::new(
- Arc::new(
- reqwest_client::ReqwestClient::user_agent("Zed semantic index example").unwrap(),
- ),
- "http://localhost:11434",
- None,
- ));
- let client = client::Client::new(clock, http.clone(), cx);
- Client::set_global(client.clone(), cx);
-
- let args: Vec<String> = std::env::args().collect();
- if args.len() < 2 {
- eprintln!("Usage: cargo run --example index -p semantic_index -- <project_path>");
- cx.quit();
- return;
- }
-
- // let embedding_provider = semantic_index::FakeEmbeddingProvider;
-
- let api_key = std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY not set");
-
- let embedding_provider = Arc::new(OpenAiEmbeddingProvider::new(
- http.clone(),
- OpenAiEmbeddingModel::TextEmbedding3Small,
- open_ai::OPEN_AI_API_URL.to_string(),
- api_key,
- ));
-
- cx.spawn(async move |cx| {
- let semantic_index = SemanticDb::new(
- PathBuf::from("/tmp/semantic-index-db.mdb"),
- embedding_provider,
- cx,
- );
-
- let mut semantic_index = semantic_index.await.unwrap();
-
- let project_path = Path::new(&args[1]);
-
- let project = Project::example([project_path], cx).await;
-
- cx.update(|cx| {
- let language_registry = project.read(cx).languages().clone();
- let node_runtime = project.read(cx).node_runtime().unwrap().clone();
- languages::init(language_registry, node_runtime, cx);
- })
- .unwrap();
-
- let project_index = cx
- .update(|cx| semantic_index.project_index(project.clone(), cx))
- .unwrap()
- .unwrap();
-
- let (tx, rx) = oneshot::channel();
- let mut tx = Some(tx);
- let subscription = cx.update(|cx| {
- cx.subscribe(&project_index, move |_, event, _| {
- if let Some(tx) = tx.take() {
- _ = tx.send(*event);
- }
- })
- });
-
- let index_start = std::time::Instant::now();
- rx.await.expect("no event emitted");
- drop(subscription);
- println!("Index time: {:?}", index_start.elapsed());
-
- let results = cx
- .update(|cx| {
- let project_index = project_index.read(cx);
- let query = "converting an anchor to a point";
- project_index.search(vec![query.into()], 4, cx)
- })
- .unwrap()
- .await
- .unwrap();
-
- for search_result in results {
- let path = search_result.path.clone();
-
- let content = cx
- .update(|cx| {
- let worktree = search_result.worktree.read(cx);
- let entry_abs_path = worktree.abs_path().join(search_result.path.clone());
- let fs = project.read(cx).fs().clone();
- cx.spawn(async move |_| fs.load(&entry_abs_path).await.unwrap())
- })
- .unwrap()
- .await;
-
- let range = search_result.range.clone();
- let content = content[search_result.range].to_owned();
-
- println!(
- "✄✄✄✄✄✄✄✄✄✄✄✄✄✄ {:?} @ {} ✄✄✄✄✄✄✄✄✄✄✄✄✄✄",
- path, search_result.score
- );
- println!("{:?}:{:?}:{:?}", path, range.start, range.end);
- println!("{}", content);
- }
-
- cx.background_executor()
- .timer(std::time::Duration::from_secs(100000))
- .await;
-
- cx.update(|cx| cx.quit()).unwrap();
- })
- .detach();
- });
-}
@@ -1,3 +0,0 @@
-fn main() {
- println!("Hello Indexer!");
-}
@@ -1,43 +0,0 @@
-# Searching for a needle in a haystack
-
-When you have a large amount of text, it can be useful to search for a specific word or phrase. This is often referred to as "finding a needle in a haystack." In this markdown document, we're "hiding" a key phrase for our text search to find. Can you find it?
-
-## Instructions
-
-1. Use the search functionality in your text editor or markdown viewer to find the hidden phrase in this document.
-
-2. Once you've found the **phrase**, write it down and proceed to the next step.
-
-Honestly, I just want to fill up plenty of characters so that we chunk this markdown into several chunks.
-
-## Tips
-
-- Relax
-- Take a deep breath
-- Focus on the task at hand
-- Don't get distracted by other text
-- Use the search functionality to your advantage
-
-## Example code
-
-```python
-def search_for_needle(haystack, needle):
- if needle in haystack:
- return True
- else:
- return False
-```
-
-```javascript
-function searchForNeedle(haystack, needle) {
- return haystack.includes(needle);
-}
-```
-
-## Background
-
-When creating an index for a book or searching for a specific term in a large document, the ability to quickly find a specific word or phrase is essential. This is where search functionality comes in handy. However, one should _remember_ that the search is only as good as the index that was built. As they say, garbage in, garbage out!
-
-## Conclusion
-
-Searching for a needle in a haystack can be a challenging task, but with the right tools and techniques, it becomes much easier. Whether you're looking for a specific word in a document or trying to find a key piece of information in a large dataset, the ability to search efficiently is a valuable skill to have.
@@ -1,415 +0,0 @@
-use language::{Language, with_parser, with_query_cursor};
-use serde::{Deserialize, Serialize};
-use sha2::{Digest, Sha256};
-use std::{
- cmp::{self, Reverse},
- ops::Range,
- path::Path,
- sync::Arc,
-};
-use streaming_iterator::StreamingIterator;
-use tree_sitter::QueryCapture;
-use util::ResultExt as _;
-
-#[derive(Copy, Clone)]
-struct ChunkSizeRange {
- min: usize,
- max: usize,
-}
-
-const CHUNK_SIZE_RANGE: ChunkSizeRange = ChunkSizeRange {
- min: 1024,
- max: 8192,
-};
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-pub struct Chunk {
- pub range: Range<usize>,
- pub digest: [u8; 32],
-}
-
-pub fn chunk_text(text: &str, language: Option<&Arc<Language>>, path: &Path) -> Vec<Chunk> {
- chunk_text_with_size_range(text, language, path, CHUNK_SIZE_RANGE)
-}
-
-fn chunk_text_with_size_range(
- text: &str,
- language: Option<&Arc<Language>>,
- path: &Path,
- size_config: ChunkSizeRange,
-) -> Vec<Chunk> {
- let ranges = syntactic_ranges(text, language, path).unwrap_or_default();
- chunk_text_with_syntactic_ranges(text, &ranges, size_config)
-}
-
-fn syntactic_ranges(
- text: &str,
- language: Option<&Arc<Language>>,
- path: &Path,
-) -> Option<Vec<Range<usize>>> {
- let language = language?;
- let grammar = language.grammar()?;
- let outline = grammar.outline_config.as_ref()?;
- let tree = with_parser(|parser| {
- parser.set_language(&grammar.ts_language).log_err()?;
- parser.parse(text, None)
- });
-
- let Some(tree) = tree else {
- log::error!("failed to parse file {path:?} for chunking");
- return None;
- };
-
- struct RowInfo {
- offset: usize,
- is_comment: bool,
- }
-
- let scope = language.default_scope();
- let line_comment_prefixes = scope.line_comment_prefixes();
- let row_infos = text
- .split('\n')
- .map({
- let mut offset = 0;
- move |line| {
- let line = line.trim_start();
- let is_comment = line_comment_prefixes
- .iter()
- .any(|prefix| line.starts_with(prefix.as_ref()));
- let result = RowInfo { offset, is_comment };
- offset += line.len() + 1;
- result
- }
- })
- .collect::<Vec<_>>();
-
- // Retrieve a list of ranges of outline items (types, functions, etc) in the document.
- // Omit single-line outline items (e.g. struct fields, constant declarations), because
- // we'll already be attempting to split on lines.
- let mut ranges = with_query_cursor(|cursor| {
- cursor
- .matches(&outline.query, tree.root_node(), text.as_bytes())
- .filter_map_deref(|mat| {
- mat.captures
- .iter()
- .find_map(|QueryCapture { node, index }| {
- if *index == outline.item_capture_ix {
- let mut start_offset = node.start_byte();
- let mut start_row = node.start_position().row;
- let end_offset = node.end_byte();
- let end_row = node.end_position().row;
-
- // Expand the range to include any preceding comments.
- while start_row > 0 && row_infos[start_row - 1].is_comment {
- start_offset = row_infos[start_row - 1].offset;
- start_row -= 1;
- }
-
- if end_row > start_row {
- return Some(start_offset..end_offset);
- }
- }
- None
- })
- })
- .collect::<Vec<_>>()
- });
-
- ranges.sort_unstable_by_key(|range| (range.start, Reverse(range.end)));
- Some(ranges)
-}
-
-fn chunk_text_with_syntactic_ranges(
- text: &str,
- mut syntactic_ranges: &[Range<usize>],
- size_config: ChunkSizeRange,
-) -> Vec<Chunk> {
- let mut chunks = Vec::new();
- let mut range = 0..0;
- let mut range_end_nesting_depth = 0;
-
- // Try to split the text at line boundaries.
- let mut line_ixs = text
- .match_indices('\n')
- .map(|(ix, _)| ix + 1)
- .chain(if text.ends_with('\n') {
- None
- } else {
- Some(text.len())
- })
- .peekable();
-
- while let Some(&line_ix) = line_ixs.peek() {
- // If the current position is beyond the maximum chunk size, then
- // start a new chunk.
- if line_ix - range.start > size_config.max {
- if range.is_empty() {
- range.end = cmp::min(range.start + size_config.max, line_ix);
- while !text.is_char_boundary(range.end) {
- range.end -= 1;
- }
- }
-
- chunks.push(Chunk {
- range: range.clone(),
- digest: Sha256::digest(&text[range.clone()]).into(),
- });
- range_end_nesting_depth = 0;
- range.start = range.end;
- continue;
- }
-
- // Discard any syntactic ranges that end before the current position.
- while let Some(first_item) = syntactic_ranges.first() {
- if first_item.end < line_ix {
- syntactic_ranges = &syntactic_ranges[1..];
- continue;
- } else {
- break;
- }
- }
-
- // Count how many syntactic ranges contain the current position.
- let mut nesting_depth = 0;
- for range in syntactic_ranges {
- if range.start > line_ix {
- break;
- }
- if range.start < line_ix && range.end > line_ix {
- nesting_depth += 1;
- }
- }
-
- // Extend the current range to this position, unless an earlier candidate
- // end position was less nested syntactically.
- if range.len() < size_config.min || nesting_depth <= range_end_nesting_depth {
- range.end = line_ix;
- range_end_nesting_depth = nesting_depth;
- }
-
- line_ixs.next();
- }
-
- if !range.is_empty() {
- chunks.push(Chunk {
- range: range.clone(),
- digest: Sha256::digest(&text[range]).into(),
- });
- }
-
- chunks
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use language::{Language, LanguageConfig, LanguageMatcher, tree_sitter_rust};
- use unindent::Unindent as _;
-
- #[test]
- fn test_chunk_text_with_syntax() {
- let language = rust_language();
-
- let text = "
- struct Person {
- first_name: String,
- last_name: String,
- age: u32,
- }
-
- impl Person {
- fn new(first_name: String, last_name: String, age: u32) -> Self {
- Self { first_name, last_name, age }
- }
-
- /// Returns the first name
- /// something something something
- fn first_name(&self) -> &str {
- &self.first_name
- }
-
- fn last_name(&self) -> &str {
- &self.last_name
- }
-
- fn age(&self) -> u32 {
- self.age
- }
- }
- "
- .unindent();
-
- let chunks = chunk_text_with_size_range(
- &text,
- Some(&language),
- Path::new("lib.rs"),
- ChunkSizeRange {
- min: text.find('}').unwrap(),
- max: text.find("Self {").unwrap(),
- },
- );
-
- // The entire impl cannot fit in a chunk, so it is split.
- // Within the impl, two methods can fit in a chunk.
- assert_chunks(
- &text,
- &chunks,
- &[
- "struct Person {", // ...
- "impl Person {",
- " /// Returns the first name",
- " fn last_name",
- ],
- );
-
- let text = "
- struct T {}
- struct U {}
- struct V {}
- struct W {
- a: T,
- b: U,
- }
- "
- .unindent();
-
- let chunks = chunk_text_with_size_range(
- &text,
- Some(&language),
- Path::new("lib.rs"),
- ChunkSizeRange {
- min: text.find('{').unwrap(),
- max: text.find('V').unwrap(),
- },
- );
-
- // Two single-line structs can fit in a chunk.
- // The last struct cannot fit in a chunk together
- // with the previous single-line struct.
- assert_chunks(
- &text,
- &chunks,
- &[
- "struct T", // ...
- "struct V", // ...
- "struct W", // ...
- "}",
- ],
- );
- }
-
- #[test]
- fn test_chunk_with_long_lines() {
- let language = rust_language();
-
- let text = "
- struct S { a: u32 }
- struct T { a: u64 }
- struct U { a: u64, b: u64, c: u64, d: u64, e: u64, f: u64, g: u64, h: u64, i: u64, j: u64 }
- struct W { a: u64, b: u64, c: u64, d: u64, e: u64, f: u64, g: u64, h: u64, i: u64, j: u64 }
- "
- .unindent();
-
- let chunks = chunk_text_with_size_range(
- &text,
- Some(&language),
- Path::new("lib.rs"),
- ChunkSizeRange { min: 32, max: 64 },
- );
-
- // The line is too long to fit in one chunk
- assert_chunks(
- &text,
- &chunks,
- &[
- "struct S {", // ...
- "struct U",
- "4, h: u64, i: u64", // ...
- "struct W",
- "4, h: u64, i: u64", // ...
- ],
- );
- }
-
- #[track_caller]
- fn assert_chunks(text: &str, chunks: &[Chunk], expected_chunk_text_prefixes: &[&str]) {
- check_chunk_invariants(text, chunks);
-
- assert_eq!(
- chunks.len(),
- expected_chunk_text_prefixes.len(),
- "unexpected number of chunks: {chunks:?}",
- );
-
- let mut prev_chunk_end = 0;
- for (ix, chunk) in chunks.iter().enumerate() {
- let expected_prefix = expected_chunk_text_prefixes[ix];
- let chunk_text = &text[chunk.range.clone()];
- if !chunk_text.starts_with(expected_prefix) {
- let chunk_prefix_offset = text[prev_chunk_end..].find(expected_prefix);
- if let Some(chunk_prefix_offset) = chunk_prefix_offset {
- panic!(
- "chunk {ix} starts at unexpected offset {}. expected {}",
- chunk.range.start,
- chunk_prefix_offset + prev_chunk_end
- );
- } else {
- panic!("invalid expected chunk prefix {ix}: {expected_prefix:?}");
- }
- }
- prev_chunk_end = chunk.range.end;
- }
- }
-
- #[track_caller]
- fn check_chunk_invariants(text: &str, chunks: &[Chunk]) {
- for (ix, chunk) in chunks.iter().enumerate() {
- if ix > 0 && chunk.range.start != chunks[ix - 1].range.end {
- panic!("chunk ranges are not contiguous: {:?}", chunks);
- }
- }
-
- if text.is_empty() {
- assert!(chunks.is_empty())
- } else if chunks.first().unwrap().range.start != 0
- || chunks.last().unwrap().range.end != text.len()
- {
- panic!("chunks don't cover entire text {:?}", chunks);
- }
- }
-
- #[test]
- fn test_chunk_text() {
- let text = "a\n".repeat(1000);
- let chunks = chunk_text(&text, None, Path::new("lib.rs"));
- assert_eq!(
- chunks.len(),
- ((2000_f64) / (CHUNK_SIZE_RANGE.max as f64)).ceil() as usize
- );
- }
-
- fn rust_language() -> Arc<Language> {
- Arc::new(
- Language::new(
- LanguageConfig {
- name: "Rust".into(),
- matcher: LanguageMatcher {
- path_suffixes: vec!["rs".to_string()],
- ..Default::default()
- },
- ..Default::default()
- },
- Some(tree_sitter_rust::LANGUAGE.into()),
- )
- .with_outline_query(
- "
- (function_item name: (_) @name) @item
- (impl_item type: (_) @name) @item
- (struct_item name: (_) @name) @item
- (field_declaration name: (_) @name) @item
- ",
- )
- .unwrap(),
- )
- }
-}
@@ -1,134 +0,0 @@
-mod lmstudio;
-mod ollama;
-mod open_ai;
-
-pub use lmstudio::*;
-pub use ollama::*;
-pub use open_ai::*;
-use sha2::{Digest, Sha256};
-
-use anyhow::Result;
-use futures::{FutureExt, future::BoxFuture};
-use serde::{Deserialize, Serialize};
-use std::{fmt, future};
-
-/// Trait for embedding providers. Texts in, vectors out.
-pub trait EmbeddingProvider: Sync + Send {
- fn embed<'a>(&'a self, texts: &'a [TextToEmbed<'a>]) -> BoxFuture<'a, Result<Vec<Embedding>>>;
- fn batch_size(&self) -> usize;
-}
-
-#[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)]
-pub struct Embedding(Vec<f32>);
-
-impl Embedding {
- pub fn new(mut embedding: Vec<f32>) -> Self {
- let len = embedding.len();
- let mut norm = 0f32;
-
- for i in 0..len {
- norm += embedding[i] * embedding[i];
- }
-
- norm = norm.sqrt();
- for dimension in &mut embedding {
- *dimension /= norm;
- }
-
- Self(embedding)
- }
-
- fn len(&self) -> usize {
- self.0.len()
- }
-
- pub fn similarity(&self, others: &[Embedding]) -> (f32, usize) {
- debug_assert!(others.iter().all(|other| self.0.len() == other.0.len()));
- others
- .iter()
- .enumerate()
- .map(|(index, other)| {
- let dot_product: f32 = self
- .0
- .iter()
- .copied()
- .zip(other.0.iter().copied())
- .map(|(a, b)| a * b)
- .sum();
- (dot_product, index)
- })
- .max_by(|a, b| a.0.partial_cmp(&b.0).unwrap_or(std::cmp::Ordering::Equal))
- .unwrap_or((0.0, 0))
- }
-}
-
-impl fmt::Display for Embedding {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let digits_to_display = 3;
-
- // Start the Embedding display format
- write!(f, "Embedding(sized: {}; values: [", self.len())?;
-
- for (index, value) in self.0.iter().enumerate().take(digits_to_display) {
- // Lead with comma if not the first element
- if index != 0 {
- write!(f, ", ")?;
- }
- write!(f, "{:.3}", value)?;
- }
- if self.len() > digits_to_display {
- write!(f, "...")?;
- }
- write!(f, "])")
- }
-}
-
-#[derive(Debug)]
-pub struct TextToEmbed<'a> {
- pub text: &'a str,
- pub digest: [u8; 32],
-}
-
-impl<'a> TextToEmbed<'a> {
- pub fn new(text: &'a str) -> Self {
- let digest = Sha256::digest(text.as_bytes());
- Self {
- text,
- digest: digest.into(),
- }
- }
-}
-
-pub struct FakeEmbeddingProvider;
-
-impl EmbeddingProvider for FakeEmbeddingProvider {
- fn embed<'a>(&'a self, texts: &'a [TextToEmbed<'a>]) -> BoxFuture<'a, Result<Vec<Embedding>>> {
- let embeddings = texts
- .iter()
- .map(|_text| {
- let mut embedding = vec![0f32; 1536];
- for i in 0..embedding.len() {
- embedding[i] = i as f32;
- }
- Embedding::new(embedding)
- })
- .collect();
- future::ready(Ok(embeddings)).boxed()
- }
-
- fn batch_size(&self) -> usize {
- 16
- }
-}
-
-#[cfg(test)]
-mod test {
- use super::*;
-
- #[gpui::test]
- fn test_normalize_embedding() {
- let normalized = Embedding::new(vec![1.0, 1.0, 1.0]);
- let value: f32 = 1.0 / 3.0_f32.sqrt();
- assert_eq!(normalized, Embedding(vec![value; 3]));
- }
-}
@@ -1,70 +0,0 @@
-use anyhow::{Context as _, Result};
-use futures::{AsyncReadExt as _, FutureExt, future::BoxFuture};
-use http_client::HttpClient;
-use serde::{Deserialize, Serialize};
-use std::sync::Arc;
-
-use crate::{Embedding, EmbeddingProvider, TextToEmbed};
-
-pub enum LmStudioEmbeddingModel {
- NomicEmbedText,
-}
-
-pub struct LmStudioEmbeddingProvider {
- client: Arc<dyn HttpClient>,
- model: LmStudioEmbeddingModel,
-}
-
-#[derive(Serialize)]
-struct LmStudioEmbeddingRequest {
- model: String,
- prompt: String,
-}
-
-#[derive(Deserialize)]
-struct LmStudioEmbeddingResponse {
- embedding: Vec<f32>,
-}
-
-impl LmStudioEmbeddingProvider {
- pub fn new(client: Arc<dyn HttpClient>, model: LmStudioEmbeddingModel) -> Self {
- Self { client, model }
- }
-}
-
-impl EmbeddingProvider for LmStudioEmbeddingProvider {
- fn embed<'a>(&'a self, texts: &'a [TextToEmbed<'a>]) -> BoxFuture<'a, Result<Vec<Embedding>>> {
- let model = match self.model {
- LmStudioEmbeddingModel::NomicEmbedText => "nomic-embed-text",
- };
-
- futures::future::try_join_all(texts.iter().map(|to_embed| {
- let request = LmStudioEmbeddingRequest {
- model: model.to_string(),
- prompt: to_embed.text.to_string(),
- };
-
- let request = serde_json::to_string(&request).unwrap();
-
- async {
- let response = self
- .client
- .post_json("http://localhost:1234/api/v0/embeddings", request.into())
- .await?;
-
- let mut body = String::new();
- response.into_body().read_to_string(&mut body).await?;
-
- let response: LmStudioEmbeddingResponse =
- serde_json::from_str(&body).context("Unable to parse response")?;
-
- Ok(Embedding::new(response.embedding))
- }
- }))
- .boxed()
- }
-
- fn batch_size(&self) -> usize {
- 256
- }
-}
@@ -1,74 +0,0 @@
-use anyhow::{Context as _, Result};
-use futures::{AsyncReadExt as _, FutureExt, future::BoxFuture};
-use http_client::HttpClient;
-use serde::{Deserialize, Serialize};
-use std::sync::Arc;
-
-use crate::{Embedding, EmbeddingProvider, TextToEmbed};
-
-pub enum OllamaEmbeddingModel {
- NomicEmbedText,
- MxbaiEmbedLarge,
-}
-
-pub struct OllamaEmbeddingProvider {
- client: Arc<dyn HttpClient>,
- model: OllamaEmbeddingModel,
-}
-
-#[derive(Serialize)]
-struct OllamaEmbeddingRequest {
- model: String,
- prompt: String,
-}
-
-#[derive(Deserialize)]
-struct OllamaEmbeddingResponse {
- embedding: Vec<f32>,
-}
-
-impl OllamaEmbeddingProvider {
- pub fn new(client: Arc<dyn HttpClient>, model: OllamaEmbeddingModel) -> Self {
- Self { client, model }
- }
-}
-
-impl EmbeddingProvider for OllamaEmbeddingProvider {
- fn embed<'a>(&'a self, texts: &'a [TextToEmbed<'a>]) -> BoxFuture<'a, Result<Vec<Embedding>>> {
- //
- let model = match self.model {
- OllamaEmbeddingModel::NomicEmbedText => "nomic-embed-text",
- OllamaEmbeddingModel::MxbaiEmbedLarge => "mxbai-embed-large",
- };
-
- futures::future::try_join_all(texts.iter().map(|to_embed| {
- let request = OllamaEmbeddingRequest {
- model: model.to_string(),
- prompt: to_embed.text.to_string(),
- };
-
- let request = serde_json::to_string(&request).unwrap();
-
- async {
- let response = self
- .client
- .post_json("http://localhost:11434/api/embeddings", request.into())
- .await?;
-
- let mut body = String::new();
- response.into_body().read_to_string(&mut body).await?;
-
- let response: OllamaEmbeddingResponse =
- serde_json::from_str(&body).context("Unable to pull response")?;
-
- Ok(Embedding::new(response.embedding))
- }
- }))
- .boxed()
- }
-
- fn batch_size(&self) -> usize {
- // TODO: Figure out decent value
- 10
- }
-}
@@ -1,55 +0,0 @@
-use crate::{Embedding, EmbeddingProvider, TextToEmbed};
-use anyhow::Result;
-use futures::{FutureExt, future::BoxFuture};
-use http_client::HttpClient;
-pub use open_ai::OpenAiEmbeddingModel;
-use std::sync::Arc;
-
-pub struct OpenAiEmbeddingProvider {
- client: Arc<dyn HttpClient>,
- model: OpenAiEmbeddingModel,
- api_url: String,
- api_key: String,
-}
-
-impl OpenAiEmbeddingProvider {
- pub fn new(
- client: Arc<dyn HttpClient>,
- model: OpenAiEmbeddingModel,
- api_url: String,
- api_key: String,
- ) -> Self {
- Self {
- client,
- model,
- api_url,
- api_key,
- }
- }
-}
-
-impl EmbeddingProvider for OpenAiEmbeddingProvider {
- fn embed<'a>(&'a self, texts: &'a [TextToEmbed<'a>]) -> BoxFuture<'a, Result<Vec<Embedding>>> {
- let embed = open_ai::embed(
- self.client.as_ref(),
- &self.api_url,
- &self.api_key,
- self.model,
- texts.iter().map(|to_embed| to_embed.text),
- );
- async move {
- let response = embed.await?;
- Ok(response
- .data
- .into_iter()
- .map(|data| Embedding::new(data.embedding))
- .collect())
- }
- .boxed()
- }
-
- fn batch_size(&self) -> usize {
- // From https://platform.openai.com/docs/api-reference/embeddings/create
- 2048
- }
-}
@@ -1,471 +0,0 @@
-use crate::{
- chunking::{self, Chunk},
- embedding::{Embedding, EmbeddingProvider, TextToEmbed},
- indexing::{IndexingEntryHandle, IndexingEntrySet},
-};
-use anyhow::{Context as _, Result};
-use collections::Bound;
-use feature_flags::FeatureFlagAppExt;
-use fs::Fs;
-use fs::MTime;
-use futures::{FutureExt as _, stream::StreamExt};
-use futures_batch::ChunksTimeoutStreamExt;
-use gpui::{App, AppContext as _, Entity, Task};
-use heed::types::{SerdeBincode, Str};
-use language::LanguageRegistry;
-use log;
-use project::{Entry, UpdatedEntriesSet, Worktree};
-use serde::{Deserialize, Serialize};
-use smol::channel;
-use std::{cmp::Ordering, future::Future, iter, path::Path, pin::pin, sync::Arc, time::Duration};
-use util::ResultExt;
-use worktree::Snapshot;
-
-pub struct EmbeddingIndex {
- worktree: Entity<Worktree>,
- db_connection: heed::Env,
- db: heed::Database<Str, SerdeBincode<EmbeddedFile>>,
- fs: Arc<dyn Fs>,
- language_registry: Arc<LanguageRegistry>,
- embedding_provider: Arc<dyn EmbeddingProvider>,
- entry_ids_being_indexed: Arc<IndexingEntrySet>,
-}
-
-impl EmbeddingIndex {
- pub fn new(
- worktree: Entity<Worktree>,
- fs: Arc<dyn Fs>,
- db_connection: heed::Env,
- embedding_db: heed::Database<Str, SerdeBincode<EmbeddedFile>>,
- language_registry: Arc<LanguageRegistry>,
- embedding_provider: Arc<dyn EmbeddingProvider>,
- entry_ids_being_indexed: Arc<IndexingEntrySet>,
- ) -> Self {
- Self {
- worktree,
- fs,
- db_connection,
- db: embedding_db,
- language_registry,
- embedding_provider,
- entry_ids_being_indexed,
- }
- }
-
- pub fn db(&self) -> &heed::Database<Str, SerdeBincode<EmbeddedFile>> {
- &self.db
- }
-
- pub fn index_entries_changed_on_disk(
- &self,
- cx: &App,
- ) -> impl Future<Output = Result<()>> + use<> {
- if !cx.is_staff() {
- return async move { Ok(()) }.boxed();
- }
-
- let worktree = self.worktree.read(cx).snapshot();
- let worktree_abs_path = worktree.abs_path().clone();
- let scan = self.scan_entries(worktree, cx);
- let chunk = self.chunk_files(worktree_abs_path, scan.updated_entries, cx);
- let embed = Self::embed_files(self.embedding_provider.clone(), chunk.files, cx);
- let persist = self.persist_embeddings(scan.deleted_entry_ranges, embed.files, cx);
- async move {
- futures::try_join!(scan.task, chunk.task, embed.task, persist)?;
- Ok(())
- }
- .boxed()
- }
-
- pub fn index_updated_entries(
- &self,
- updated_entries: UpdatedEntriesSet,
- cx: &App,
- ) -> impl Future<Output = Result<()>> + use<> {
- if !cx.is_staff() {
- return async move { Ok(()) }.boxed();
- }
-
- let worktree = self.worktree.read(cx).snapshot();
- let worktree_abs_path = worktree.abs_path().clone();
- let scan = self.scan_updated_entries(worktree, updated_entries.clone(), cx);
- let chunk = self.chunk_files(worktree_abs_path, scan.updated_entries, cx);
- let embed = Self::embed_files(self.embedding_provider.clone(), chunk.files, cx);
- let persist = self.persist_embeddings(scan.deleted_entry_ranges, embed.files, cx);
- async move {
- futures::try_join!(scan.task, chunk.task, embed.task, persist)?;
- Ok(())
- }
- .boxed()
- }
-
- fn scan_entries(&self, worktree: Snapshot, cx: &App) -> ScanEntries {
- let (updated_entries_tx, updated_entries_rx) = channel::bounded(512);
- let (deleted_entry_ranges_tx, deleted_entry_ranges_rx) = channel::bounded(128);
- let db_connection = self.db_connection.clone();
- let db = self.db;
- let entries_being_indexed = self.entry_ids_being_indexed.clone();
- let task = cx.background_spawn(async move {
- let txn = db_connection
- .read_txn()
- .context("failed to create read transaction")?;
- let mut db_entries = db
- .iter(&txn)
- .context("failed to create iterator")?
- .move_between_keys()
- .peekable();
-
- let mut deletion_range: Option<(Bound<&str>, Bound<&str>)> = None;
- for entry in worktree.files(false, 0) {
- log::trace!("scanning for embedding index: {:?}", &entry.path);
-
- let entry_db_key = db_key_for_path(&entry.path);
-
- let mut saved_mtime = None;
- while let Some(db_entry) = db_entries.peek() {
- match db_entry {
- Ok((db_path, db_embedded_file)) => match (*db_path).cmp(&entry_db_key) {
- Ordering::Less => {
- if let Some(deletion_range) = deletion_range.as_mut() {
- deletion_range.1 = Bound::Included(db_path);
- } else {
- deletion_range =
- Some((Bound::Included(db_path), Bound::Included(db_path)));
- }
-
- db_entries.next();
- }
- Ordering::Equal => {
- if let Some(deletion_range) = deletion_range.take() {
- deleted_entry_ranges_tx
- .send((
- deletion_range.0.map(ToString::to_string),
- deletion_range.1.map(ToString::to_string),
- ))
- .await?;
- }
- saved_mtime = db_embedded_file.mtime;
- db_entries.next();
- break;
- }
- Ordering::Greater => {
- break;
- }
- },
- Err(_) => return Err(db_entries.next().unwrap().unwrap_err())?,
- }
- }
-
- if entry.mtime != saved_mtime {
- let handle = entries_being_indexed.insert(entry.id);
- updated_entries_tx.send((entry.clone(), handle)).await?;
- }
- }
-
- if let Some(db_entry) = db_entries.next() {
- let (db_path, _) = db_entry?;
- deleted_entry_ranges_tx
- .send((Bound::Included(db_path.to_string()), Bound::Unbounded))
- .await?;
- }
-
- Ok(())
- });
-
- ScanEntries {
- updated_entries: updated_entries_rx,
- deleted_entry_ranges: deleted_entry_ranges_rx,
- task,
- }
- }
-
- fn scan_updated_entries(
- &self,
- worktree: Snapshot,
- updated_entries: UpdatedEntriesSet,
- cx: &App,
- ) -> ScanEntries {
- let (updated_entries_tx, updated_entries_rx) = channel::bounded(512);
- let (deleted_entry_ranges_tx, deleted_entry_ranges_rx) = channel::bounded(128);
- let entries_being_indexed = self.entry_ids_being_indexed.clone();
- let task = cx.background_spawn(async move {
- for (path, entry_id, status) in updated_entries.iter() {
- match status {
- project::PathChange::Added
- | project::PathChange::Updated
- | project::PathChange::AddedOrUpdated => {
- if let Some(entry) = worktree.entry_for_id(*entry_id)
- && entry.is_file()
- {
- let handle = entries_being_indexed.insert(entry.id);
- updated_entries_tx.send((entry.clone(), handle)).await?;
- }
- }
- project::PathChange::Removed => {
- let db_path = db_key_for_path(path);
- deleted_entry_ranges_tx
- .send((Bound::Included(db_path.clone()), Bound::Included(db_path)))
- .await?;
- }
- project::PathChange::Loaded => {
- // Do nothing.
- }
- }
- }
-
- Ok(())
- });
-
- ScanEntries {
- updated_entries: updated_entries_rx,
- deleted_entry_ranges: deleted_entry_ranges_rx,
- task,
- }
- }
-
- fn chunk_files(
- &self,
- worktree_abs_path: Arc<Path>,
- entries: channel::Receiver<(Entry, IndexingEntryHandle)>,
- cx: &App,
- ) -> ChunkFiles {
- let language_registry = self.language_registry.clone();
- let fs = self.fs.clone();
- let (chunked_files_tx, chunked_files_rx) = channel::bounded(2048);
- let task = cx.spawn(async move |cx| {
- cx.background_executor()
- .scoped(|cx| {
- for _ in 0..cx.num_cpus() {
- cx.spawn(async {
- while let Ok((entry, handle)) = entries.recv().await {
- let entry_abs_path = worktree_abs_path.join(&entry.path);
- if let Some(text) = fs.load(&entry_abs_path).await.ok() {
- let language = language_registry
- .language_for_file_path(&entry.path)
- .await
- .ok();
- let chunked_file = ChunkedFile {
- chunks: chunking::chunk_text(
- &text,
- language.as_ref(),
- &entry.path,
- ),
- handle,
- path: entry.path,
- mtime: entry.mtime,
- text,
- };
-
- if chunked_files_tx.send(chunked_file).await.is_err() {
- return;
- }
- }
- }
- });
- }
- })
- .await;
- Ok(())
- });
-
- ChunkFiles {
- files: chunked_files_rx,
- task,
- }
- }
-
- pub fn embed_files(
- embedding_provider: Arc<dyn EmbeddingProvider>,
- chunked_files: channel::Receiver<ChunkedFile>,
- cx: &App,
- ) -> EmbedFiles {
- let embedding_provider = embedding_provider.clone();
- let (embedded_files_tx, embedded_files_rx) = channel::bounded(512);
- let task = cx.background_spawn(async move {
- let mut chunked_file_batches =
- pin!(chunked_files.chunks_timeout(512, Duration::from_secs(2)));
- while let Some(chunked_files) = chunked_file_batches.next().await {
- // View the batch of files as a vec of chunks
- // Flatten out to a vec of chunks that we can subdivide into batch sized pieces
- // Once those are done, reassemble them back into the files in which they belong
- // If any embeddings fail for a file, the entire file is discarded
-
- let chunks: Vec<TextToEmbed> = chunked_files
- .iter()
- .flat_map(|file| {
- file.chunks.iter().map(|chunk| TextToEmbed {
- text: &file.text[chunk.range.clone()],
- digest: chunk.digest,
- })
- })
- .collect::<Vec<_>>();
-
- let mut embeddings: Vec<Option<Embedding>> = Vec::new();
- for embedding_batch in chunks.chunks(embedding_provider.batch_size()) {
- if let Some(batch_embeddings) =
- embedding_provider.embed(embedding_batch).await.log_err()
- {
- if batch_embeddings.len() == embedding_batch.len() {
- embeddings.extend(batch_embeddings.into_iter().map(Some));
- continue;
- }
- log::error!(
- "embedding provider returned unexpected embedding count {}, expected {}",
- batch_embeddings.len(), embedding_batch.len()
- );
- }
-
- embeddings.extend(iter::repeat(None).take(embedding_batch.len()));
- }
-
- let mut embeddings = embeddings.into_iter();
- for chunked_file in chunked_files {
- let mut embedded_file = EmbeddedFile {
- path: chunked_file.path,
- mtime: chunked_file.mtime,
- chunks: Vec::new(),
- };
-
- let mut embedded_all_chunks = true;
- for (chunk, embedding) in
- chunked_file.chunks.into_iter().zip(embeddings.by_ref())
- {
- if let Some(embedding) = embedding {
- embedded_file
- .chunks
- .push(EmbeddedChunk { chunk, embedding });
- } else {
- embedded_all_chunks = false;
- }
- }
-
- if embedded_all_chunks {
- embedded_files_tx
- .send((embedded_file, chunked_file.handle))
- .await?;
- }
- }
- }
- Ok(())
- });
-
- EmbedFiles {
- files: embedded_files_rx,
- task,
- }
- }
-
- fn persist_embeddings(
- &self,
- deleted_entry_ranges: channel::Receiver<(Bound<String>, Bound<String>)>,
- embedded_files: channel::Receiver<(EmbeddedFile, IndexingEntryHandle)>,
- cx: &App,
- ) -> Task<Result<()>> {
- let db_connection = self.db_connection.clone();
- let db = self.db;
-
- cx.background_spawn(async move {
- let mut deleted_entry_ranges = pin!(deleted_entry_ranges);
- let mut embedded_files = pin!(embedded_files);
- loop {
- // Interleave deletions and persists of embedded files
- futures::select_biased! {
- deletion_range = deleted_entry_ranges.next() => {
- if let Some(deletion_range) = deletion_range {
- let mut txn = db_connection.write_txn()?;
- let start = deletion_range.0.as_ref().map(|start| start.as_str());
- let end = deletion_range.1.as_ref().map(|end| end.as_str());
- log::debug!("deleting embeddings in range {:?}", &(start, end));
- db.delete_range(&mut txn, &(start, end))?;
- txn.commit()?;
- }
- },
- file = embedded_files.next() => {
- if let Some((file, _)) = file {
- let mut txn = db_connection.write_txn()?;
- log::debug!("saving embedding for file {:?}", file.path);
- let key = db_key_for_path(&file.path);
- db.put(&mut txn, &key, &file)?;
- txn.commit()?;
- }
- },
- complete => break,
- }
- }
-
- Ok(())
- })
- }
-
- pub fn paths(&self, cx: &App) -> Task<Result<Vec<Arc<Path>>>> {
- let connection = self.db_connection.clone();
- let db = self.db;
- cx.background_spawn(async move {
- let tx = connection
- .read_txn()
- .context("failed to create read transaction")?;
- let result = db
- .iter(&tx)?
- .map(|entry| Ok(entry?.1.path.clone()))
- .collect::<Result<Vec<Arc<Path>>>>();
- drop(tx);
- result
- })
- }
-
- pub fn chunks_for_path(&self, path: Arc<Path>, cx: &App) -> Task<Result<Vec<EmbeddedChunk>>> {
- let connection = self.db_connection.clone();
- let db = self.db;
- cx.background_spawn(async move {
- let tx = connection
- .read_txn()
- .context("failed to create read transaction")?;
- Ok(db
- .get(&tx, &db_key_for_path(&path))?
- .context("no such path")?
- .chunks
- .clone())
- })
- }
-}
-
-struct ScanEntries {
- updated_entries: channel::Receiver<(Entry, IndexingEntryHandle)>,
- deleted_entry_ranges: channel::Receiver<(Bound<String>, Bound<String>)>,
- task: Task<Result<()>>,
-}
-
-struct ChunkFiles {
- files: channel::Receiver<ChunkedFile>,
- task: Task<Result<()>>,
-}
-
-pub struct ChunkedFile {
- pub path: Arc<Path>,
- pub mtime: Option<MTime>,
- pub handle: IndexingEntryHandle,
- pub text: String,
- pub chunks: Vec<Chunk>,
-}
-
-pub struct EmbedFiles {
- pub files: channel::Receiver<(EmbeddedFile, IndexingEntryHandle)>,
- pub task: Task<Result<()>>,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-pub struct EmbeddedFile {
- pub path: Arc<Path>,
- pub mtime: Option<MTime>,
- pub chunks: Vec<EmbeddedChunk>,
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize)]
-pub struct EmbeddedChunk {
- pub chunk: Chunk,
- pub embedding: Embedding,
-}
-
-fn db_key_for_path(path: &Arc<Path>) -> String {
- path.to_string_lossy().replace('/', "\0")
-}
@@ -1,49 +0,0 @@
-use collections::HashSet;
-use parking_lot::Mutex;
-use project::ProjectEntryId;
-use smol::channel;
-use std::sync::{Arc, Weak};
-
-/// The set of entries that are currently being indexed.
-pub struct IndexingEntrySet {
- entry_ids: Mutex<HashSet<ProjectEntryId>>,
- tx: channel::Sender<()>,
-}
-
-/// When dropped, removes the entry from the set of entries that are being indexed.
-#[derive(Clone)]
-pub(crate) struct IndexingEntryHandle {
- entry_id: ProjectEntryId,
- set: Weak<IndexingEntrySet>,
-}
-
-impl IndexingEntrySet {
- pub fn new(tx: channel::Sender<()>) -> Self {
- Self {
- entry_ids: Default::default(),
- tx,
- }
- }
-
- pub fn insert(self: &Arc<Self>, entry_id: ProjectEntryId) -> IndexingEntryHandle {
- self.entry_ids.lock().insert(entry_id);
- self.tx.send_blocking(()).ok();
- IndexingEntryHandle {
- entry_id,
- set: Arc::downgrade(self),
- }
- }
-
- pub fn len(&self) -> usize {
- self.entry_ids.lock().len()
- }
-}
-
-impl Drop for IndexingEntryHandle {
- fn drop(&mut self) {
- if let Some(set) = self.set.upgrade() {
- set.tx.send_blocking(()).ok();
- set.entry_ids.lock().remove(&self.entry_id);
- }
- }
-}
@@ -1,548 +0,0 @@
-use crate::{
- embedding::{EmbeddingProvider, TextToEmbed},
- summary_index::FileSummary,
- worktree_index::{WorktreeIndex, WorktreeIndexHandle},
-};
-use anyhow::{Context as _, Result, anyhow};
-use collections::HashMap;
-use fs::Fs;
-use futures::FutureExt;
-use gpui::{
- App, AppContext as _, Context, Entity, EntityId, EventEmitter, Subscription, Task, WeakEntity,
-};
-use language::LanguageRegistry;
-use log;
-use project::{Project, Worktree, WorktreeId};
-use serde::{Deserialize, Serialize};
-use smol::channel;
-use std::{
- cmp::Ordering,
- future::Future,
- num::NonZeroUsize,
- ops::{Range, RangeInclusive},
- path::{Path, PathBuf},
- sync::Arc,
-};
-use util::ResultExt;
-
-#[derive(Debug)]
-pub struct SearchResult {
- pub worktree: Entity<Worktree>,
- pub path: Arc<Path>,
- pub range: Range<usize>,
- pub score: f32,
- pub query_index: usize,
-}
-
-#[derive(Debug, PartialEq, Eq)]
-pub struct LoadedSearchResult {
- pub path: Arc<Path>,
- pub full_path: PathBuf,
- pub excerpt_content: String,
- pub row_range: RangeInclusive<u32>,
- pub query_index: usize,
-}
-
-pub struct WorktreeSearchResult {
- pub worktree_id: WorktreeId,
- pub path: Arc<Path>,
- pub range: Range<usize>,
- pub query_index: usize,
- pub score: f32,
-}
-
-#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
-pub enum Status {
- Idle,
- Loading,
- Scanning { remaining_count: NonZeroUsize },
-}
-
-pub struct ProjectIndex {
- db_connection: heed::Env,
- project: WeakEntity<Project>,
- worktree_indices: HashMap<EntityId, WorktreeIndexHandle>,
- language_registry: Arc<LanguageRegistry>,
- fs: Arc<dyn Fs>,
- last_status: Status,
- status_tx: channel::Sender<()>,
- embedding_provider: Arc<dyn EmbeddingProvider>,
- _maintain_status: Task<()>,
- _subscription: Subscription,
-}
-
-impl ProjectIndex {
- pub fn new(
- project: Entity<Project>,
- db_connection: heed::Env,
- embedding_provider: Arc<dyn EmbeddingProvider>,
- cx: &mut Context<Self>,
- ) -> Self {
- let language_registry = project.read(cx).languages().clone();
- let fs = project.read(cx).fs().clone();
- let (status_tx, status_rx) = channel::unbounded();
- let mut this = ProjectIndex {
- db_connection,
- project: project.downgrade(),
- worktree_indices: HashMap::default(),
- language_registry,
- fs,
- status_tx,
- last_status: Status::Idle,
- embedding_provider,
- _subscription: cx.subscribe(&project, Self::handle_project_event),
- _maintain_status: cx.spawn(async move |this, cx| {
- while status_rx.recv().await.is_ok() {
- if this.update(cx, |this, cx| this.update_status(cx)).is_err() {
- break;
- }
- }
- }),
- };
- this.update_worktree_indices(cx);
- this
- }
-
- pub fn status(&self) -> Status {
- self.last_status
- }
-
- pub fn project(&self) -> WeakEntity<Project> {
- self.project.clone()
- }
-
- pub fn fs(&self) -> Arc<dyn Fs> {
- self.fs.clone()
- }
-
- fn handle_project_event(
- &mut self,
- _: Entity<Project>,
- event: &project::Event,
- cx: &mut Context<Self>,
- ) {
- match event {
- project::Event::WorktreeAdded(_) | project::Event::WorktreeRemoved(_) => {
- self.update_worktree_indices(cx);
- }
- _ => {}
- }
- }
-
- fn update_worktree_indices(&mut self, cx: &mut Context<Self>) {
- let Some(project) = self.project.upgrade() else {
- return;
- };
-
- let worktrees = project
- .read(cx)
- .visible_worktrees(cx)
- .filter_map(|worktree| {
- if worktree.read(cx).is_local() {
- Some((worktree.entity_id(), worktree))
- } else {
- None
- }
- })
- .collect::<HashMap<_, _>>();
-
- self.worktree_indices
- .retain(|worktree_id, _| worktrees.contains_key(worktree_id));
- for (worktree_id, worktree) in worktrees {
- self.worktree_indices.entry(worktree_id).or_insert_with(|| {
- let worktree_index = WorktreeIndex::load(
- worktree.clone(),
- self.db_connection.clone(),
- self.language_registry.clone(),
- self.fs.clone(),
- self.status_tx.clone(),
- self.embedding_provider.clone(),
- cx,
- );
-
- let load_worktree = cx.spawn(async move |this, cx| {
- let result = match worktree_index.await {
- Ok(worktree_index) => {
- this.update(cx, |this, _| {
- this.worktree_indices.insert(
- worktree_id,
- WorktreeIndexHandle::Loaded {
- index: worktree_index.clone(),
- },
- );
- })?;
- Ok(worktree_index)
- }
- Err(error) => {
- this.update(cx, |this, _cx| {
- this.worktree_indices.remove(&worktree_id)
- })?;
- Err(Arc::new(error))
- }
- };
-
- this.update(cx, |this, cx| this.update_status(cx))?;
-
- result
- });
-
- WorktreeIndexHandle::Loading {
- index: load_worktree.shared(),
- }
- });
- }
-
- self.update_status(cx);
- }
-
- fn update_status(&mut self, cx: &mut Context<Self>) {
- let mut indexing_count = 0;
- let mut any_loading = false;
-
- for index in self.worktree_indices.values_mut() {
- match index {
- WorktreeIndexHandle::Loading { .. } => {
- any_loading = true;
- break;
- }
- WorktreeIndexHandle::Loaded { index, .. } => {
- indexing_count += index.read(cx).entry_ids_being_indexed().len();
- }
- }
- }
-
- let status = if any_loading {
- Status::Loading
- } else if let Some(remaining_count) = NonZeroUsize::new(indexing_count) {
- Status::Scanning { remaining_count }
- } else {
- Status::Idle
- };
-
- if status != self.last_status {
- self.last_status = status;
- cx.emit(status);
- }
- }
-
- pub fn search(
- &self,
- queries: Vec<String>,
- limit: usize,
- cx: &App,
- ) -> Task<Result<Vec<SearchResult>>> {
- let (chunks_tx, chunks_rx) = channel::bounded(1024);
- let mut worktree_scan_tasks = Vec::new();
- for worktree_index in self.worktree_indices.values() {
- let worktree_index = worktree_index.clone();
- let chunks_tx = chunks_tx.clone();
- worktree_scan_tasks.push(cx.spawn(async move |cx| {
- let index = match worktree_index {
- WorktreeIndexHandle::Loading { index } => {
- index.clone().await.map_err(|error| anyhow!(error))?
- }
- WorktreeIndexHandle::Loaded { index } => index.clone(),
- };
-
- index
- .read_with(cx, |index, cx| {
- let worktree_id = index.worktree().read(cx).id();
- let db_connection = index.db_connection().clone();
- let db = *index.embedding_index().db();
- cx.background_spawn(async move {
- let txn = db_connection
- .read_txn()
- .context("failed to create read transaction")?;
- let db_entries = db.iter(&txn).context("failed to iterate database")?;
- for db_entry in db_entries {
- let (_key, db_embedded_file) = db_entry?;
- for chunk in db_embedded_file.chunks {
- chunks_tx
- .send((worktree_id, db_embedded_file.path.clone(), chunk))
- .await?;
- }
- }
- anyhow::Ok(())
- })
- })?
- .await
- }));
- }
- drop(chunks_tx);
-
- let project = self.project.clone();
- let embedding_provider = self.embedding_provider.clone();
- cx.spawn(async move |cx| {
- #[cfg(debug_assertions)]
- let embedding_query_start = std::time::Instant::now();
- log::info!("Searching for {queries:?}");
- let queries: Vec<TextToEmbed> = queries
- .iter()
- .map(|s| TextToEmbed::new(s.as_str()))
- .collect();
-
- let query_embeddings = embedding_provider.embed(&queries[..]).await?;
- anyhow::ensure!(
- query_embeddings.len() == queries.len(),
- "The number of query embeddings does not match the number of queries"
- );
-
- let mut results_by_worker = Vec::new();
- for _ in 0..cx.background_executor().num_cpus() {
- results_by_worker.push(Vec::<WorktreeSearchResult>::new());
- }
-
- #[cfg(debug_assertions)]
- let search_start = std::time::Instant::now();
- cx.background_executor()
- .scoped(|cx| {
- for results in results_by_worker.iter_mut() {
- cx.spawn(async {
- while let Ok((worktree_id, path, chunk)) = chunks_rx.recv().await {
- let (score, query_index) =
- chunk.embedding.similarity(&query_embeddings);
-
- let ix = match results.binary_search_by(|probe| {
- score.partial_cmp(&probe.score).unwrap_or(Ordering::Equal)
- }) {
- Ok(ix) | Err(ix) => ix,
- };
- if ix < limit {
- results.insert(
- ix,
- WorktreeSearchResult {
- worktree_id,
- path: path.clone(),
- range: chunk.chunk.range.clone(),
- query_index,
- score,
- },
- );
- if results.len() > limit {
- results.pop();
- }
- }
- }
- });
- }
- })
- .await;
-
- for scan_task in futures::future::join_all(worktree_scan_tasks).await {
- scan_task.log_err();
- }
-
- project.read_with(cx, |project, cx| {
- let mut search_results = Vec::with_capacity(results_by_worker.len() * limit);
- for worker_results in results_by_worker {
- search_results.extend(worker_results.into_iter().filter_map(|result| {
- Some(SearchResult {
- worktree: project.worktree_for_id(result.worktree_id, cx)?,
- path: result.path,
- range: result.range,
- score: result.score,
- query_index: result.query_index,
- })
- }));
- }
- search_results.sort_unstable_by(|a, b| {
- b.score.partial_cmp(&a.score).unwrap_or(Ordering::Equal)
- });
- search_results.truncate(limit);
-
- #[cfg(debug_assertions)]
- {
- let search_elapsed = search_start.elapsed();
- log::debug!(
- "searched {} entries in {:?}",
- search_results.len(),
- search_elapsed
- );
- let embedding_query_elapsed = embedding_query_start.elapsed();
- log::debug!("embedding query took {:?}", embedding_query_elapsed);
- }
-
- search_results
- })
- })
- }
-
- #[cfg(test)]
- pub fn path_count(&self, cx: &App) -> Result<u64> {
- let mut result = 0;
- for worktree_index in self.worktree_indices.values() {
- if let WorktreeIndexHandle::Loaded { index, .. } = worktree_index {
- result += index.read(cx).path_count()?;
- }
- }
- Ok(result)
- }
-
- pub(crate) fn worktree_index(
- &self,
- worktree_id: WorktreeId,
- cx: &App,
- ) -> Option<Entity<WorktreeIndex>> {
- for index in self.worktree_indices.values() {
- if let WorktreeIndexHandle::Loaded { index, .. } = index
- && index.read(cx).worktree().read(cx).id() == worktree_id
- {
- return Some(index.clone());
- }
- }
- None
- }
-
- pub(crate) fn worktree_indices(&self, cx: &App) -> Vec<Entity<WorktreeIndex>> {
- let mut result = self
- .worktree_indices
- .values()
- .filter_map(|index| {
- if let WorktreeIndexHandle::Loaded { index, .. } = index {
- Some(index.clone())
- } else {
- None
- }
- })
- .collect::<Vec<_>>();
- result.sort_by_key(|index| index.read(cx).worktree().read(cx).id());
- result
- }
-
- pub fn all_summaries(&self, cx: &App) -> Task<Result<Vec<FileSummary>>> {
- let (summaries_tx, summaries_rx) = channel::bounded(1024);
- let mut worktree_scan_tasks = Vec::new();
- for worktree_index in self.worktree_indices.values() {
- let worktree_index = worktree_index.clone();
- let summaries_tx: channel::Sender<(String, String)> = summaries_tx.clone();
- worktree_scan_tasks.push(cx.spawn(async move |cx| {
- let index = match worktree_index {
- WorktreeIndexHandle::Loading { index } => {
- index.clone().await.map_err(|error| anyhow!(error))?
- }
- WorktreeIndexHandle::Loaded { index } => index.clone(),
- };
-
- index
- .read_with(cx, |index, cx| {
- let db_connection = index.db_connection().clone();
- let summary_index = index.summary_index();
- let file_digest_db = summary_index.file_digest_db();
- let summary_db = summary_index.summary_db();
-
- cx.background_spawn(async move {
- let txn = db_connection
- .read_txn()
- .context("failed to create db read transaction")?;
- let db_entries = file_digest_db
- .iter(&txn)
- .context("failed to iterate database")?;
- for db_entry in db_entries {
- let (file_path, db_file) = db_entry?;
-
- match summary_db.get(&txn, &db_file.digest) {
- Ok(opt_summary) => {
- // Currently, we only use summaries we already have. If the file hasn't been
- // summarized yet, then we skip it and don't include it in the inferred context.
- // If we want to do just-in-time summarization, this would be the place to do it!
- if let Some(summary) = opt_summary {
- summaries_tx
- .send((file_path.to_string(), summary.to_string()))
- .await?;
- } else {
- log::warn!("No summary found for {:?}", &db_file);
- }
- }
- Err(err) => {
- log::error!(
- "Error reading from summary database: {:?}",
- err
- );
- }
- }
- }
- anyhow::Ok(())
- })
- })?
- .await
- }));
- }
- drop(summaries_tx);
-
- let project = self.project.clone();
- cx.spawn(async move |cx| {
- let mut results_by_worker = Vec::new();
- for _ in 0..cx.background_executor().num_cpus() {
- results_by_worker.push(Vec::<FileSummary>::new());
- }
-
- cx.background_executor()
- .scoped(|cx| {
- for results in results_by_worker.iter_mut() {
- cx.spawn(async {
- while let Ok((filename, summary)) = summaries_rx.recv().await {
- results.push(FileSummary { filename, summary });
- }
- });
- }
- })
- .await;
-
- for scan_task in futures::future::join_all(worktree_scan_tasks).await {
- scan_task.log_err();
- }
-
- project.read_with(cx, |_project, _cx| {
- results_by_worker.into_iter().flatten().collect()
- })
- })
- }
-
- /// Empty out the backlogs of all the worktrees in the project
- pub fn flush_summary_backlogs(&self, cx: &App) -> impl Future<Output = ()> {
- let flush_start = std::time::Instant::now();
-
- futures::future::join_all(self.worktree_indices.values().map(|worktree_index| {
- let worktree_index = worktree_index.clone();
-
- cx.spawn(async move |cx| {
- let index = match worktree_index {
- WorktreeIndexHandle::Loading { index } => {
- index.clone().await.map_err(|error| anyhow!(error))?
- }
- WorktreeIndexHandle::Loaded { index } => index.clone(),
- };
- let worktree_abs_path =
- cx.update(|cx| index.read(cx).worktree().read(cx).abs_path())?;
-
- index
- .read_with(cx, |index, cx| {
- cx.background_spawn(
- index.summary_index().flush_backlog(worktree_abs_path, cx),
- )
- })?
- .await
- })
- }))
- .map(move |results| {
- // Log any errors, but don't block the user. These summaries are supposed to
- // improve quality by providing extra context, but they aren't hard requirements!
- for result in results {
- if let Err(err) = result {
- log::error!("Error flushing summary backlog: {:?}", err);
- }
- }
-
- log::info!("Summary backlog flushed in {:?}", flush_start.elapsed());
- })
- }
-
- pub fn remaining_summaries(&self, cx: &mut Context<Self>) -> usize {
- self.worktree_indices(cx)
- .iter()
- .map(|index| index.read(cx).summary_index().backlog_len())
- .sum()
- }
-}
-
-impl EventEmitter<Status> for ProjectIndex {}
@@ -1,306 +0,0 @@
-use crate::ProjectIndex;
-use gpui::{
- AnyElement, App, CursorStyle, Entity, EventEmitter, FocusHandle, Focusable, IntoElement,
- ListOffset, ListState, MouseMoveEvent, Render, UniformListScrollHandle, canvas, div, list,
- uniform_list,
-};
-use project::WorktreeId;
-use settings::Settings;
-use std::{ops::Range, path::Path, sync::Arc};
-use theme::ThemeSettings;
-use ui::prelude::*;
-use workspace::item::Item;
-
-pub struct ProjectIndexDebugView {
- index: Entity<ProjectIndex>,
- rows: Vec<Row>,
- selected_path: Option<PathState>,
- hovered_row_ix: Option<usize>,
- focus_handle: FocusHandle,
- list_scroll_handle: UniformListScrollHandle,
- _subscription: gpui::Subscription,
-}
-
-struct PathState {
- path: Arc<Path>,
- chunks: Vec<SharedString>,
- list_state: ListState,
-}
-
-enum Row {
- Worktree(Arc<Path>),
- Entry(WorktreeId, Arc<Path>),
-}
-
-impl ProjectIndexDebugView {
- pub fn new(index: Entity<ProjectIndex>, window: &mut Window, cx: &mut Context<Self>) -> Self {
- let mut this = Self {
- rows: Vec::new(),
- list_scroll_handle: UniformListScrollHandle::new(),
- selected_path: None,
- hovered_row_ix: None,
- focus_handle: cx.focus_handle(),
- _subscription: cx.subscribe_in(&index, window, |this, _, _, window, cx| {
- this.update_rows(window, cx)
- }),
- index,
- };
- this.update_rows(window, cx);
- this
- }
-
- fn update_rows(&mut self, window: &mut Window, cx: &mut Context<Self>) {
- let worktree_indices = self.index.read(cx).worktree_indices(cx);
- cx.spawn_in(window, async move |this, cx| {
- let mut rows = Vec::new();
-
- for index in worktree_indices {
- let (root_path, worktree_id, worktree_paths) =
- index.read_with(cx, |index, cx| {
- let worktree = index.worktree().read(cx);
- (
- worktree.abs_path(),
- worktree.id(),
- index.embedding_index().paths(cx),
- )
- })?;
- rows.push(Row::Worktree(root_path));
- rows.extend(
- worktree_paths
- .await?
- .into_iter()
- .map(|path| Row::Entry(worktree_id, path)),
- );
- }
-
- this.update(cx, |this, cx| {
- this.rows = rows;
- cx.notify();
- })
- })
- .detach();
- }
-
- fn handle_path_click(
- &mut self,
- worktree_id: WorktreeId,
- file_path: Arc<Path>,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) -> Option<()> {
- let project_index = self.index.read(cx);
- let fs = project_index.fs().clone();
- let worktree_index = project_index.worktree_index(worktree_id, cx)?.read(cx);
- let root_path = worktree_index.worktree().read(cx).abs_path();
- let chunks = worktree_index
- .embedding_index()
- .chunks_for_path(file_path.clone(), cx);
-
- cx.spawn_in(window, async move |this, cx| {
- let chunks = chunks.await?;
- let content = fs.load(&root_path.join(&file_path)).await?;
- let chunks = chunks
- .into_iter()
- .map(|chunk| {
- let mut start = chunk.chunk.range.start.min(content.len());
- let mut end = chunk.chunk.range.end.min(content.len());
- while !content.is_char_boundary(start) {
- start += 1;
- }
- while !content.is_char_boundary(end) {
- end -= 1;
- }
- content[start..end].to_string().into()
- })
- .collect::<Vec<_>>();
-
- this.update(cx, |this, cx| {
- this.selected_path = Some(PathState {
- path: file_path,
- list_state: ListState::new(chunks.len(), gpui::ListAlignment::Top, px(100.)),
- chunks,
- });
- cx.notify();
- })
- })
- .detach();
- None
- }
-
- fn render_chunk(&mut self, ix: usize, cx: &mut Context<Self>) -> AnyElement {
- let buffer_font = ThemeSettings::get_global(cx).buffer_font.clone();
- let Some(state) = &self.selected_path else {
- return div().into_any();
- };
-
- let colors = cx.theme().colors();
- let chunk = &state.chunks[ix];
-
- div()
- .text_ui(cx)
- .w_full()
- .font(buffer_font)
- .child(
- h_flex()
- .justify_between()
- .child(format!(
- "chunk {} of {}. length: {}",
- ix + 1,
- state.chunks.len(),
- chunk.len(),
- ))
- .child(
- h_flex()
- .child(
- Button::new(("prev", ix), "prev")
- .disabled(ix == 0)
- .on_click(cx.listener(move |this, _, _, _| {
- this.scroll_to_chunk(ix.saturating_sub(1))
- })),
- )
- .child(
- Button::new(("next", ix), "next")
- .disabled(ix + 1 == state.chunks.len())
- .on_click(cx.listener(move |this, _, _, _| {
- this.scroll_to_chunk(ix + 1)
- })),
- ),
- ),
- )
- .child(
- div()
- .bg(colors.editor_background)
- .text_xs()
- .child(chunk.clone()),
- )
- .into_any_element()
- }
-
- fn scroll_to_chunk(&mut self, ix: usize) {
- if let Some(state) = self.selected_path.as_mut() {
- state.list_state.scroll_to(ListOffset {
- item_ix: ix,
- offset_in_item: px(0.),
- })
- }
- }
-}
-
-impl Render for ProjectIndexDebugView {
- fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
- if let Some(selected_path) = self.selected_path.as_ref() {
- v_flex()
- .child(
- div()
- .id("selected-path-name")
- .child(
- h_flex()
- .justify_between()
- .child(selected_path.path.to_string_lossy().to_string())
- .child("x"),
- )
- .border_b_1()
- .border_color(cx.theme().colors().border)
- .cursor(CursorStyle::PointingHand)
- .on_click(cx.listener(|this, _, _, cx| {
- this.selected_path.take();
- cx.notify();
- })),
- )
- .child(
- list(
- selected_path.list_state.clone(),
- cx.processor(|this, ix, _, cx| this.render_chunk(ix, cx)),
- )
- .size_full(),
- )
- .size_full()
- .into_any_element()
- } else {
- let mut list = uniform_list(
- "ProjectIndexDebugView",
- self.rows.len(),
- cx.processor(move |this, range: Range<usize>, _, cx| {
- this.rows[range]
- .iter()
- .enumerate()
- .map(|(ix, row)| match row {
- Row::Worktree(root_path) => div()
- .id(ix)
- .child(Label::new(root_path.to_string_lossy().to_string())),
- Row::Entry(worktree_id, file_path) => div()
- .id(ix)
- .pl_8()
- .child(Label::new(file_path.to_string_lossy().to_string()))
- .on_mouse_move(cx.listener(
- move |this, _: &MouseMoveEvent, _, cx| {
- if this.hovered_row_ix != Some(ix) {
- this.hovered_row_ix = Some(ix);
- cx.notify();
- }
- },
- ))
- .cursor(CursorStyle::PointingHand)
- .on_click(cx.listener({
- let worktree_id = *worktree_id;
- let file_path = file_path.clone();
- move |this, _, window, cx| {
- this.handle_path_click(
- worktree_id,
- file_path.clone(),
- window,
- cx,
- );
- }
- })),
- })
- .collect()
- }),
- )
- .track_scroll(self.list_scroll_handle.clone())
- .size_full()
- .text_bg(cx.theme().colors().background)
- .into_any_element();
-
- canvas(
- move |bounds, window, cx| {
- list.prepaint_as_root(bounds.origin, bounds.size.into(), window, cx);
- list
- },
- |_, mut list, window, cx| {
- list.paint(window, cx);
- },
- )
- .size_full()
- .into_any_element()
- }
- }
-}
-
-impl EventEmitter<()> for ProjectIndexDebugView {}
-
-impl Item for ProjectIndexDebugView {
- type Event = ();
-
- fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString {
- "Project Index (Debug)".into()
- }
-
- fn clone_on_split(
- &self,
- _: Option<workspace::WorkspaceId>,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) -> Option<Entity<Self>>
- where
- Self: Sized,
- {
- Some(cx.new(|cx| Self::new(self.index.clone(), window, cx)))
- }
-}
-
-impl Focusable for ProjectIndexDebugView {
- fn focus_handle(&self, _: &App) -> gpui::FocusHandle {
- self.focus_handle.clone()
- }
-}
@@ -1,632 +0,0 @@
-mod chunking;
-mod embedding;
-mod embedding_index;
-mod indexing;
-mod project_index;
-mod project_index_debug_view;
-mod summary_backlog;
-mod summary_index;
-mod worktree_index;
-
-use anyhow::{Context as _, Result};
-use collections::HashMap;
-use fs::Fs;
-use gpui::{App, AppContext as _, AsyncApp, BorrowAppContext, Context, Entity, Global, WeakEntity};
-use language::LineEnding;
-use project::{Project, Worktree};
-use std::{
- cmp::Ordering,
- path::{Path, PathBuf},
- sync::Arc,
-};
-use util::ResultExt as _;
-use workspace::Workspace;
-
-pub use embedding::*;
-pub use project_index::{LoadedSearchResult, ProjectIndex, SearchResult, Status};
-pub use project_index_debug_view::ProjectIndexDebugView;
-pub use summary_index::FileSummary;
-
-pub struct SemanticDb {
- embedding_provider: Arc<dyn EmbeddingProvider>,
- db_connection: Option<heed::Env>,
- project_indices: HashMap<WeakEntity<Project>, Entity<ProjectIndex>>,
-}
-
-impl Global for SemanticDb {}
-
-impl SemanticDb {
- pub async fn new(
- db_path: PathBuf,
- embedding_provider: Arc<dyn EmbeddingProvider>,
- cx: &mut AsyncApp,
- ) -> Result<Self> {
- let db_connection = cx
- .background_spawn(async move {
- std::fs::create_dir_all(&db_path)?;
- unsafe {
- heed::EnvOpenOptions::new()
- .map_size(1024 * 1024 * 1024)
- .max_dbs(3000)
- .open(db_path)
- }
- })
- .await
- .context("opening database connection")?;
-
- cx.update(|cx| {
- cx.observe_new(
- |workspace: &mut Workspace, _window, cx: &mut Context<Workspace>| {
- let project = workspace.project().clone();
-
- if cx.has_global::<SemanticDb>() {
- cx.update_global::<SemanticDb, _>(|this, cx| {
- this.create_project_index(project, cx);
- })
- } else {
- log::info!("No SemanticDb, skipping project index")
- }
- },
- )
- .detach();
- })
- .ok();
-
- Ok(SemanticDb {
- db_connection: Some(db_connection),
- embedding_provider,
- project_indices: HashMap::default(),
- })
- }
-
- pub async fn load_results(
- mut results: Vec<SearchResult>,
- fs: &Arc<dyn Fs>,
- cx: &AsyncApp,
- ) -> Result<Vec<LoadedSearchResult>> {
- let mut max_scores_by_path = HashMap::<_, (f32, usize)>::default();
- for result in &results {
- let (score, query_index) = max_scores_by_path
- .entry((result.worktree.clone(), result.path.clone()))
- .or_default();
- if result.score > *score {
- *score = result.score;
- *query_index = result.query_index;
- }
- }
-
- results.sort_by(|a, b| {
- let max_score_a = max_scores_by_path[&(a.worktree.clone(), a.path.clone())].0;
- let max_score_b = max_scores_by_path[&(b.worktree.clone(), b.path.clone())].0;
- max_score_b
- .partial_cmp(&max_score_a)
- .unwrap_or(Ordering::Equal)
- .then_with(|| a.worktree.entity_id().cmp(&b.worktree.entity_id()))
- .then_with(|| a.path.cmp(&b.path))
- .then_with(|| a.range.start.cmp(&b.range.start))
- });
-
- let mut last_loaded_file: Option<(Entity<Worktree>, Arc<Path>, PathBuf, String)> = None;
- let mut loaded_results = Vec::<LoadedSearchResult>::new();
- for result in results {
- let full_path;
- let file_content;
- if let Some(last_loaded_file) =
- last_loaded_file
- .as_ref()
- .filter(|(last_worktree, last_path, _, _)| {
- last_worktree == &result.worktree && last_path == &result.path
- })
- {
- full_path = last_loaded_file.2.clone();
- file_content = &last_loaded_file.3;
- } else {
- let output = result.worktree.read_with(cx, |worktree, _cx| {
- let entry_abs_path = worktree.abs_path().join(&result.path);
- let mut entry_full_path = PathBuf::from(worktree.root_name());
- entry_full_path.push(&result.path);
- let file_content = async {
- let entry_abs_path = entry_abs_path;
- fs.load(&entry_abs_path).await
- };
- (entry_full_path, file_content)
- })?;
- full_path = output.0;
- let Some(content) = output.1.await.log_err() else {
- continue;
- };
- last_loaded_file = Some((
- result.worktree.clone(),
- result.path.clone(),
- full_path.clone(),
- content,
- ));
- file_content = &last_loaded_file.as_ref().unwrap().3;
- };
-
- let query_index = max_scores_by_path[&(result.worktree.clone(), result.path.clone())].1;
-
- let mut range_start = result.range.start.min(file_content.len());
- let mut range_end = result.range.end.min(file_content.len());
- while !file_content.is_char_boundary(range_start) {
- range_start += 1;
- }
- while !file_content.is_char_boundary(range_end) {
- range_end += 1;
- }
-
- let start_row = file_content[0..range_start].matches('\n').count() as u32;
- let mut end_row = file_content[0..range_end].matches('\n').count() as u32;
- let start_line_byte_offset = file_content[0..range_start]
- .rfind('\n')
- .map(|pos| pos + 1)
- .unwrap_or_default();
- let mut end_line_byte_offset = range_end;
- if file_content[..end_line_byte_offset].ends_with('\n') {
- end_row -= 1;
- } else {
- end_line_byte_offset = file_content[range_end..]
- .find('\n')
- .map(|pos| range_end + pos + 1)
- .unwrap_or_else(|| file_content.len());
- }
- let mut excerpt_content =
- file_content[start_line_byte_offset..end_line_byte_offset].to_string();
- LineEnding::normalize(&mut excerpt_content);
-
- if let Some(prev_result) = loaded_results.last_mut()
- && prev_result.full_path == full_path
- && *prev_result.row_range.end() + 1 == start_row
- {
- prev_result.row_range = *prev_result.row_range.start()..=end_row;
- prev_result.excerpt_content.push_str(&excerpt_content);
- continue;
- }
-
- loaded_results.push(LoadedSearchResult {
- path: result.path,
- full_path,
- excerpt_content,
- row_range: start_row..=end_row,
- query_index,
- });
- }
-
- for result in &mut loaded_results {
- while result.excerpt_content.ends_with("\n\n") {
- result.excerpt_content.pop();
- result.row_range =
- *result.row_range.start()..=result.row_range.end().saturating_sub(1)
- }
- }
-
- Ok(loaded_results)
- }
-
- pub fn project_index(
- &mut self,
- project: Entity<Project>,
- _cx: &mut App,
- ) -> Option<Entity<ProjectIndex>> {
- self.project_indices.get(&project.downgrade()).cloned()
- }
-
- pub fn remaining_summaries(
- &self,
- project: &WeakEntity<Project>,
- cx: &mut App,
- ) -> Option<usize> {
- self.project_indices.get(project).map(|project_index| {
- project_index.update(cx, |project_index, cx| {
- project_index.remaining_summaries(cx)
- })
- })
- }
-
- pub fn create_project_index(
- &mut self,
- project: Entity<Project>,
- cx: &mut App,
- ) -> Entity<ProjectIndex> {
- let project_index = cx.new(|cx| {
- ProjectIndex::new(
- project.clone(),
- self.db_connection.clone().unwrap(),
- self.embedding_provider.clone(),
- cx,
- )
- });
-
- let project_weak = project.downgrade();
- self.project_indices
- .insert(project_weak.clone(), project_index.clone());
-
- cx.observe_release(&project, move |_, cx| {
- if cx.has_global::<SemanticDb>() {
- cx.update_global::<SemanticDb, _>(|this, _| {
- this.project_indices.remove(&project_weak);
- })
- }
- })
- .detach();
-
- project_index
- }
-}
-
-impl Drop for SemanticDb {
- fn drop(&mut self) {
- self.db_connection.take().unwrap().prepare_for_closing();
- }
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use chunking::Chunk;
- use embedding_index::{ChunkedFile, EmbeddingIndex};
- use feature_flags::FeatureFlagAppExt;
- use fs::FakeFs;
- use futures::{FutureExt, future::BoxFuture};
- use gpui::TestAppContext;
- use indexing::IndexingEntrySet;
- use language::language_settings::AllLanguageSettings;
- use project::{Project, ProjectEntryId};
- use serde_json::json;
- use settings::SettingsStore;
- use smol::channel;
- use std::{future, path::Path, sync::Arc};
- use util::path;
-
- fn init_test(cx: &mut TestAppContext) {
- zlog::init_test();
-
- cx.update(|cx| {
- let store = SettingsStore::test(cx);
- cx.set_global(store);
- language::init(cx);
- cx.update_flags(false, vec![]);
- Project::init_settings(cx);
- SettingsStore::update(cx, |store, cx| {
- store.update_user_settings::<AllLanguageSettings>(cx, |_| {});
- });
- });
- }
-
- pub struct TestEmbeddingProvider {
- batch_size: usize,
- compute_embedding: Box<dyn Fn(&str) -> Result<Embedding> + Send + Sync>,
- }
-
- impl TestEmbeddingProvider {
- pub fn new(
- batch_size: usize,
- compute_embedding: impl 'static + Fn(&str) -> Result<Embedding> + Send + Sync,
- ) -> Self {
- Self {
- batch_size,
- compute_embedding: Box::new(compute_embedding),
- }
- }
- }
-
- impl EmbeddingProvider for TestEmbeddingProvider {
- fn embed<'a>(
- &'a self,
- texts: &'a [TextToEmbed<'a>],
- ) -> BoxFuture<'a, Result<Vec<Embedding>>> {
- let embeddings = texts
- .iter()
- .map(|to_embed| (self.compute_embedding)(to_embed.text))
- .collect();
- future::ready(embeddings).boxed()
- }
-
- fn batch_size(&self) -> usize {
- self.batch_size
- }
- }
-
- #[gpui::test]
- async fn test_search(cx: &mut TestAppContext) {
- cx.executor().allow_parking();
-
- init_test(cx);
-
- cx.update(|cx| {
- // This functionality is staff-flagged.
- cx.update_flags(true, vec![]);
- });
-
- let temp_dir = tempfile::tempdir().unwrap();
-
- let mut semantic_index = SemanticDb::new(
- temp_dir.path().into(),
- Arc::new(TestEmbeddingProvider::new(16, |text| {
- let mut embedding = vec![0f32; 2];
- // if the text contains garbage, give it a 1 in the first dimension
- if text.contains("garbage in") {
- embedding[0] = 0.9;
- } else {
- embedding[0] = -0.9;
- }
-
- if text.contains("garbage out") {
- embedding[1] = 0.9;
- } else {
- embedding[1] = -0.9;
- }
-
- Ok(Embedding::new(embedding))
- })),
- &mut cx.to_async(),
- )
- .await
- .unwrap();
-
- let fs = FakeFs::new(cx.executor());
- let project_path = Path::new("/fake_project");
-
- fs.insert_tree(
- project_path,
- json!({
- "fixture": {
- "main.rs": include_str!("../fixture/main.rs"),
- "needle.md": include_str!("../fixture/needle.md"),
- }
- }),
- )
- .await;
-
- let project = Project::test(fs, [project_path], cx).await;
-
- let project_index = cx.update(|cx| {
- let language_registry = project.read(cx).languages().clone();
- let node_runtime = project.read(cx).node_runtime().unwrap().clone();
- languages::init(language_registry, node_runtime, cx);
- semantic_index.create_project_index(project.clone(), cx)
- });
-
- cx.run_until_parked();
- while cx
- .update(|cx| semantic_index.remaining_summaries(&project.downgrade(), cx))
- .unwrap()
- > 0
- {
- cx.run_until_parked();
- }
-
- let results = cx
- .update(|cx| {
- let project_index = project_index.read(cx);
- let query = "garbage in, garbage out";
- project_index.search(vec![query.into()], 4, cx)
- })
- .await
- .unwrap();
-
- assert!(
- results.len() > 1,
- "should have found some results, but only found {:?}",
- results
- );
-
- for result in &results {
- println!("result: {:?}", result.path);
- println!("score: {:?}", result.score);
- }
-
- // Find result that is greater than 0.5
- let search_result = results.iter().find(|result| result.score > 0.9).unwrap();
-
- assert_eq!(
- search_result.path.to_string_lossy(),
- path!("fixture/needle.md")
- );
-
- let content = cx
- .update(|cx| {
- let worktree = search_result.worktree.read(cx);
- let entry_abs_path = worktree.abs_path().join(&search_result.path);
- let fs = project.read(cx).fs().clone();
- cx.background_spawn(async move { fs.load(&entry_abs_path).await.unwrap() })
- })
- .await;
-
- let range = search_result.range.clone();
- let content = content[range.clone()].to_owned();
-
- assert!(content.contains("garbage in, garbage out"));
- }
-
- #[gpui::test]
- async fn test_embed_files(cx: &mut TestAppContext) {
- cx.executor().allow_parking();
-
- let provider = Arc::new(TestEmbeddingProvider::new(3, |text| {
- anyhow::ensure!(
- !text.contains('g'),
- "cannot embed text containing a 'g' character"
- );
- Ok(Embedding::new(
- ('a'..='z')
- .map(|char| text.chars().filter(|c| *c == char).count() as f32)
- .collect(),
- ))
- }));
-
- let (indexing_progress_tx, _) = channel::unbounded();
- let indexing_entries = Arc::new(IndexingEntrySet::new(indexing_progress_tx));
-
- let (chunked_files_tx, chunked_files_rx) = channel::unbounded::<ChunkedFile>();
- chunked_files_tx
- .send_blocking(ChunkedFile {
- path: Path::new("test1.md").into(),
- mtime: None,
- handle: indexing_entries.insert(ProjectEntryId::from_proto(0)),
- text: "abcdefghijklmnop".to_string(),
- chunks: [0..4, 4..8, 8..12, 12..16]
- .into_iter()
- .map(|range| Chunk {
- range,
- digest: Default::default(),
- })
- .collect(),
- })
- .unwrap();
- chunked_files_tx
- .send_blocking(ChunkedFile {
- path: Path::new("test2.md").into(),
- mtime: None,
- handle: indexing_entries.insert(ProjectEntryId::from_proto(1)),
- text: "qrstuvwxyz".to_string(),
- chunks: [0..4, 4..8, 8..10]
- .into_iter()
- .map(|range| Chunk {
- range,
- digest: Default::default(),
- })
- .collect(),
- })
- .unwrap();
- chunked_files_tx.close();
-
- let embed_files_task =
- cx.update(|cx| EmbeddingIndex::embed_files(provider.clone(), chunked_files_rx, cx));
- embed_files_task.task.await.unwrap();
-
- let embedded_files_rx = embed_files_task.files;
- let mut embedded_files = Vec::new();
- while let Ok((embedded_file, _)) = embedded_files_rx.recv().await {
- embedded_files.push(embedded_file);
- }
-
- assert_eq!(embedded_files.len(), 1);
- assert_eq!(embedded_files[0].path.as_ref(), Path::new("test2.md"));
- assert_eq!(
- embedded_files[0]
- .chunks
- .iter()
- .map(|embedded_chunk| { embedded_chunk.embedding.clone() })
- .collect::<Vec<Embedding>>(),
- vec![
- (provider.compute_embedding)("qrst").unwrap(),
- (provider.compute_embedding)("uvwx").unwrap(),
- (provider.compute_embedding)("yz").unwrap(),
- ],
- );
- }
-
- #[gpui::test]
- async fn test_load_search_results(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = FakeFs::new(cx.executor());
- let project_path = Path::new("/fake_project");
-
- let file1_content = "one\ntwo\nthree\nfour\nfive\n";
- let file2_content = "aaa\nbbb\nccc\nddd\neee\n";
-
- fs.insert_tree(
- project_path,
- json!({
- "file1.txt": file1_content,
- "file2.txt": file2_content,
- }),
- )
- .await;
-
- let fs = fs as Arc<dyn Fs>;
- let project = Project::test(fs.clone(), [project_path], cx).await;
- let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
-
- // chunk that is already newline-aligned
- let search_results = vec![SearchResult {
- worktree: worktree.clone(),
- path: Path::new("file1.txt").into(),
- range: 0..file1_content.find("four").unwrap(),
- score: 0.5,
- query_index: 0,
- }];
- assert_eq!(
- SemanticDb::load_results(search_results, &fs, &cx.to_async())
- .await
- .unwrap(),
- &[LoadedSearchResult {
- path: Path::new("file1.txt").into(),
- full_path: "fake_project/file1.txt".into(),
- excerpt_content: "one\ntwo\nthree\n".into(),
- row_range: 0..=2,
- query_index: 0,
- }]
- );
-
- // chunk that is *not* newline-aligned
- let search_results = vec![SearchResult {
- worktree: worktree.clone(),
- path: Path::new("file1.txt").into(),
- range: file1_content.find("two").unwrap() + 1..file1_content.find("four").unwrap() + 2,
- score: 0.5,
- query_index: 0,
- }];
- assert_eq!(
- SemanticDb::load_results(search_results, &fs, &cx.to_async())
- .await
- .unwrap(),
- &[LoadedSearchResult {
- path: Path::new("file1.txt").into(),
- full_path: "fake_project/file1.txt".into(),
- excerpt_content: "two\nthree\nfour\n".into(),
- row_range: 1..=3,
- query_index: 0,
- }]
- );
-
- // chunks that are adjacent
-
- let search_results = vec![
- SearchResult {
- worktree: worktree.clone(),
- path: Path::new("file1.txt").into(),
- range: file1_content.find("two").unwrap()..file1_content.len(),
- score: 0.6,
- query_index: 0,
- },
- SearchResult {
- worktree: worktree.clone(),
- path: Path::new("file1.txt").into(),
- range: 0..file1_content.find("two").unwrap(),
- score: 0.5,
- query_index: 1,
- },
- SearchResult {
- worktree: worktree.clone(),
- path: Path::new("file2.txt").into(),
- range: 0..file2_content.len(),
- score: 0.8,
- query_index: 1,
- },
- ];
- assert_eq!(
- SemanticDb::load_results(search_results, &fs, &cx.to_async())
- .await
- .unwrap(),
- &[
- LoadedSearchResult {
- path: Path::new("file2.txt").into(),
- full_path: "fake_project/file2.txt".into(),
- excerpt_content: file2_content.into(),
- row_range: 0..=4,
- query_index: 1,
- },
- LoadedSearchResult {
- path: Path::new("file1.txt").into(),
- full_path: "fake_project/file1.txt".into(),
- excerpt_content: file1_content.into(),
- row_range: 0..=4,
- query_index: 0,
- }
- ]
- );
- }
-}
@@ -1,49 +0,0 @@
-use collections::HashMap;
-use fs::MTime;
-use std::{path::Path, sync::Arc};
-
-const MAX_FILES_BEFORE_RESUMMARIZE: usize = 4;
-const MAX_BYTES_BEFORE_RESUMMARIZE: u64 = 1_000_000; // 1 MB
-
-#[derive(Default, Debug)]
-pub struct SummaryBacklog {
- /// Key: path to a file that needs summarization, but that we haven't summarized yet. Value: that file's size on disk, in bytes, and its mtime.
- files: HashMap<Arc<Path>, (u64, Option<MTime>)>,
- /// Cache of the sum of all values in `files`, so we don't have to traverse the whole map to check if we're over the byte limit.
- total_bytes: u64,
-}
-
-impl SummaryBacklog {
- /// Store the given path in the backlog, along with how many bytes are in it.
- pub fn insert(&mut self, path: Arc<Path>, bytes_on_disk: u64, mtime: Option<MTime>) {
- let (prev_bytes, _) = self
- .files
- .insert(path, (bytes_on_disk, mtime))
- .unwrap_or_default(); // Default to 0 prev_bytes
-
- // Update the cached total by subtracting out the old amount and adding the new one.
- self.total_bytes = self.total_bytes - prev_bytes + bytes_on_disk;
- }
-
- /// Returns true if the total number of bytes in the backlog exceeds a predefined threshold.
- pub fn needs_drain(&self) -> bool {
- self.files.len() > MAX_FILES_BEFORE_RESUMMARIZE ||
- // The whole purpose of the cached total_bytes is to make this comparison cheap.
- // Otherwise we'd have to traverse the entire dictionary every time we wanted this answer.
- self.total_bytes > MAX_BYTES_BEFORE_RESUMMARIZE
- }
-
- /// Remove all the entries in the backlog and return the file paths as an iterator.
- #[allow(clippy::needless_lifetimes)] // Clippy thinks this 'a can be elided, but eliding it gives a compile error
- pub fn drain<'a>(&'a mut self) -> impl Iterator<Item = (Arc<Path>, Option<MTime>)> + 'a {
- self.total_bytes = 0;
-
- self.files
- .drain()
- .map(|(path, (_size, mtime))| (path, mtime))
- }
-
- pub fn len(&self) -> usize {
- self.files.len()
- }
-}
@@ -1,696 +0,0 @@
-use anyhow::{Context as _, Result, anyhow};
-use arrayvec::ArrayString;
-use fs::{Fs, MTime};
-use futures::{TryFutureExt, stream::StreamExt};
-use futures_batch::ChunksTimeoutStreamExt;
-use gpui::{App, AppContext as _, Entity, Task};
-use heed::{
- RoTxn,
- types::{SerdeBincode, Str},
-};
-use language_model::{
- LanguageModelCompletionEvent, LanguageModelId, LanguageModelRegistry, LanguageModelRequest,
- LanguageModelRequestMessage, Role,
-};
-use log;
-use parking_lot::Mutex;
-use project::{Entry, UpdatedEntriesSet, Worktree};
-use serde::{Deserialize, Serialize};
-use smol::channel;
-use std::{
- future::Future,
- path::Path,
- pin::pin,
- sync::Arc,
- time::{Duration, Instant},
-};
-use util::ResultExt;
-use worktree::Snapshot;
-
-use crate::{indexing::IndexingEntrySet, summary_backlog::SummaryBacklog};
-
-#[derive(Serialize, Deserialize, Debug)]
-pub struct FileSummary {
- pub filename: String,
- pub summary: String,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-struct UnsummarizedFile {
- // Path to the file on disk
- path: Arc<Path>,
- // The mtime of the file on disk
- mtime: Option<MTime>,
- // BLAKE3 hash of the source file's contents
- digest: Blake3Digest,
- // The source file's contents
- contents: String,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-struct SummarizedFile {
- // Path to the file on disk
- path: String,
- // The mtime of the file on disk
- mtime: Option<MTime>,
- // BLAKE3 hash of the source file's contents
- digest: Blake3Digest,
- // The LLM's summary of the file's contents
- summary: String,
-}
-
-/// This is what blake3's to_hex() method returns - see https://docs.rs/blake3/1.5.3/src/blake3/lib.rs.html#246
-pub type Blake3Digest = ArrayString<{ blake3::OUT_LEN * 2 }>;
-
-#[derive(Debug, Serialize, Deserialize)]
-pub struct FileDigest {
- pub mtime: Option<MTime>,
- pub digest: Blake3Digest,
-}
-
-struct NeedsSummary {
- files: channel::Receiver<UnsummarizedFile>,
- task: Task<Result<()>>,
-}
-
-struct SummarizeFiles {
- files: channel::Receiver<SummarizedFile>,
- task: Task<Result<()>>,
-}
-
-pub struct SummaryIndex {
- worktree: Entity<Worktree>,
- fs: Arc<dyn Fs>,
- db_connection: heed::Env,
- file_digest_db: heed::Database<Str, SerdeBincode<FileDigest>>, // Key: file path. Val: BLAKE3 digest of its contents.
- summary_db: heed::Database<SerdeBincode<Blake3Digest>, Str>, // Key: BLAKE3 digest of a file's contents. Val: LLM summary of those contents.
- backlog: Arc<Mutex<SummaryBacklog>>,
- _entry_ids_being_indexed: Arc<IndexingEntrySet>, // TODO can this be removed?
-}
-
-struct Backlogged {
- paths_to_digest: channel::Receiver<Vec<(Arc<Path>, Option<MTime>)>>,
- task: Task<Result<()>>,
-}
-
-struct MightNeedSummaryFiles {
- files: channel::Receiver<UnsummarizedFile>,
- task: Task<Result<()>>,
-}
-
-impl SummaryIndex {
- pub fn new(
- worktree: Entity<Worktree>,
- fs: Arc<dyn Fs>,
- db_connection: heed::Env,
- file_digest_db: heed::Database<Str, SerdeBincode<FileDigest>>,
- summary_db: heed::Database<SerdeBincode<Blake3Digest>, Str>,
- _entry_ids_being_indexed: Arc<IndexingEntrySet>,
- ) -> Self {
- Self {
- worktree,
- fs,
- db_connection,
- file_digest_db,
- summary_db,
- _entry_ids_being_indexed,
- backlog: Default::default(),
- }
- }
-
- pub fn file_digest_db(&self) -> heed::Database<Str, SerdeBincode<FileDigest>> {
- self.file_digest_db
- }
-
- pub fn summary_db(&self) -> heed::Database<SerdeBincode<Blake3Digest>, Str> {
- self.summary_db
- }
-
- pub fn index_entries_changed_on_disk(
- &self,
- is_auto_available: bool,
- cx: &App,
- ) -> impl Future<Output = Result<()>> + use<> {
- let start = Instant::now();
- let backlogged;
- let digest;
- let needs_summary;
- let summaries;
- let persist;
-
- if is_auto_available {
- let worktree = self.worktree.read(cx).snapshot();
- let worktree_abs_path = worktree.abs_path().clone();
-
- backlogged = self.scan_entries(worktree, cx);
- digest = self.digest_files(backlogged.paths_to_digest, worktree_abs_path, cx);
- needs_summary = self.check_summary_cache(digest.files, cx);
- summaries = self.summarize_files(needs_summary.files, cx);
- persist = self.persist_summaries(summaries.files, cx);
- } else {
- // This feature is only staff-shipped, so make the rest of these no-ops.
- backlogged = Backlogged {
- paths_to_digest: channel::unbounded().1,
- task: Task::ready(Ok(())),
- };
- digest = MightNeedSummaryFiles {
- files: channel::unbounded().1,
- task: Task::ready(Ok(())),
- };
- needs_summary = NeedsSummary {
- files: channel::unbounded().1,
- task: Task::ready(Ok(())),
- };
- summaries = SummarizeFiles {
- files: channel::unbounded().1,
- task: Task::ready(Ok(())),
- };
- persist = Task::ready(Ok(()));
- }
-
- async move {
- futures::try_join!(
- backlogged.task,
- digest.task,
- needs_summary.task,
- summaries.task,
- persist
- )?;
-
- if is_auto_available {
- log::info!(
- "Summarizing everything that changed on disk took {:?}",
- start.elapsed()
- );
- }
-
- Ok(())
- }
- }
-
- pub fn index_updated_entries(
- &mut self,
- updated_entries: UpdatedEntriesSet,
- is_auto_available: bool,
- cx: &App,
- ) -> impl Future<Output = Result<()>> + use<> {
- let start = Instant::now();
- let backlogged;
- let digest;
- let needs_summary;
- let summaries;
- let persist;
-
- if is_auto_available {
- let worktree = self.worktree.read(cx).snapshot();
- let worktree_abs_path = worktree.abs_path().clone();
-
- backlogged = self.scan_updated_entries(worktree, updated_entries.clone(), cx);
- digest = self.digest_files(backlogged.paths_to_digest, worktree_abs_path, cx);
- needs_summary = self.check_summary_cache(digest.files, cx);
- summaries = self.summarize_files(needs_summary.files, cx);
- persist = self.persist_summaries(summaries.files, cx);
- } else {
- // This feature is only staff-shipped, so make the rest of these no-ops.
- backlogged = Backlogged {
- paths_to_digest: channel::unbounded().1,
- task: Task::ready(Ok(())),
- };
- digest = MightNeedSummaryFiles {
- files: channel::unbounded().1,
- task: Task::ready(Ok(())),
- };
- needs_summary = NeedsSummary {
- files: channel::unbounded().1,
- task: Task::ready(Ok(())),
- };
- summaries = SummarizeFiles {
- files: channel::unbounded().1,
- task: Task::ready(Ok(())),
- };
- persist = Task::ready(Ok(()));
- }
-
- async move {
- futures::try_join!(
- backlogged.task,
- digest.task,
- needs_summary.task,
- summaries.task,
- persist
- )?;
-
- log::debug!("Summarizing updated entries took {:?}", start.elapsed());
-
- Ok(())
- }
- }
-
- fn check_summary_cache(
- &self,
- might_need_summary: channel::Receiver<UnsummarizedFile>,
- cx: &App,
- ) -> NeedsSummary {
- let db_connection = self.db_connection.clone();
- let db = self.summary_db;
- let (needs_summary_tx, needs_summary_rx) = channel::bounded(512);
- let task = cx.background_spawn(async move {
- let mut might_need_summary = pin!(might_need_summary);
- while let Some(file) = might_need_summary.next().await {
- let tx = db_connection
- .read_txn()
- .context("Failed to create read transaction for checking which hashes are in summary cache")?;
-
- match db.get(&tx, &file.digest) {
- Ok(opt_answer) => {
- if opt_answer.is_none() {
- // It's not in the summary cache db, so we need to summarize it.
- log::debug!("File {:?} (digest {:?}) was NOT in the db cache and needs to be resummarized.", file.path.display(), &file.digest);
- needs_summary_tx.send(file).await?;
- } else {
- log::debug!("File {:?} (digest {:?}) was in the db cache and does not need to be resummarized.", file.path.display(), &file.digest);
- }
- }
- Err(err) => {
- log::error!("Reading from the summaries database failed: {:?}", err);
- }
- }
- }
-
- Ok(())
- });
-
- NeedsSummary {
- files: needs_summary_rx,
- task,
- }
- }
-
- fn scan_entries(&self, worktree: Snapshot, cx: &App) -> Backlogged {
- let (tx, rx) = channel::bounded(512);
- let db_connection = self.db_connection.clone();
- let digest_db = self.file_digest_db;
- let backlog = Arc::clone(&self.backlog);
- let task = cx.background_spawn(async move {
- let txn = db_connection
- .read_txn()
- .context("failed to create read transaction")?;
-
- for entry in worktree.files(false, 0) {
- let needs_summary =
- Self::add_to_backlog(Arc::clone(&backlog), digest_db, &txn, entry);
-
- if !needs_summary.is_empty() {
- tx.send(needs_summary).await?;
- }
- }
-
- // TODO delete db entries for deleted files
-
- Ok(())
- });
-
- Backlogged {
- paths_to_digest: rx,
- task,
- }
- }
-
- fn add_to_backlog(
- backlog: Arc<Mutex<SummaryBacklog>>,
- digest_db: heed::Database<Str, SerdeBincode<FileDigest>>,
- txn: &RoTxn<'_>,
- entry: &Entry,
- ) -> Vec<(Arc<Path>, Option<MTime>)> {
- let entry_db_key = db_key_for_path(&entry.path);
-
- match digest_db.get(txn, &entry_db_key) {
- Ok(opt_saved_digest) => {
- // The file path is the same, but the mtime is different. (Or there was no mtime.)
- // It needs updating, so add it to the backlog! Then, if the backlog is full, drain it and summarize its contents.
- if entry.mtime != opt_saved_digest.and_then(|digest| digest.mtime) {
- let mut backlog = backlog.lock();
-
- log::info!(
- "Inserting {:?} ({:?} bytes) into backlog",
- &entry.path,
- entry.size,
- );
- backlog.insert(Arc::clone(&entry.path), entry.size, entry.mtime);
-
- if backlog.needs_drain() {
- log::info!("Draining summary backlog...");
- return backlog.drain().collect();
- }
- }
- }
- Err(err) => {
- log::error!(
- "Error trying to get file digest db entry {:?}: {:?}",
- &entry_db_key,
- err
- );
- }
- }
-
- Vec::new()
- }
-
- fn scan_updated_entries(
- &self,
- worktree: Snapshot,
- updated_entries: UpdatedEntriesSet,
- cx: &App,
- ) -> Backlogged {
- log::info!("Scanning for updated entries that might need summarization...");
- let (tx, rx) = channel::bounded(512);
- // let (deleted_entry_ranges_tx, deleted_entry_ranges_rx) = channel::bounded(128);
- let db_connection = self.db_connection.clone();
- let digest_db = self.file_digest_db;
- let backlog = Arc::clone(&self.backlog);
- let task = cx.background_spawn(async move {
- let txn = db_connection
- .read_txn()
- .context("failed to create read transaction")?;
-
- for (path, entry_id, status) in updated_entries.iter() {
- match status {
- project::PathChange::Loaded
- | project::PathChange::Added
- | project::PathChange::Updated
- | project::PathChange::AddedOrUpdated => {
- if let Some(entry) = worktree.entry_for_id(*entry_id)
- && entry.is_file()
- {
- let needs_summary =
- Self::add_to_backlog(Arc::clone(&backlog), digest_db, &txn, entry);
-
- if !needs_summary.is_empty() {
- tx.send(needs_summary).await?;
- }
- }
- }
- project::PathChange::Removed => {
- let _db_path = db_key_for_path(path);
- // TODO delete db entries for deleted files
- // deleted_entry_ranges_tx
- // .send((Bound::Included(db_path.clone()), Bound::Included(db_path)))
- // .await?;
- }
- }
- }
-
- Ok(())
- });
-
- Backlogged {
- paths_to_digest: rx,
- // deleted_entry_ranges: deleted_entry_ranges_rx,
- task,
- }
- }
-
- fn digest_files(
- &self,
- paths: channel::Receiver<Vec<(Arc<Path>, Option<MTime>)>>,
- worktree_abs_path: Arc<Path>,
- cx: &App,
- ) -> MightNeedSummaryFiles {
- let fs = self.fs.clone();
- let (rx, tx) = channel::bounded(2048);
- let task = cx.spawn(async move |cx| {
- cx.background_executor()
- .scoped(|cx| {
- for _ in 0..cx.num_cpus() {
- cx.spawn(async {
- while let Ok(pairs) = paths.recv().await {
- // Note: we could process all these files concurrently if desired. Might or might not speed things up.
- for (path, mtime) in pairs {
- let entry_abs_path = worktree_abs_path.join(&path);
-
- // Load the file's contents and compute its hash digest.
- let unsummarized_file = {
- let Some(contents) = fs
- .load(&entry_abs_path)
- .await
- .with_context(|| {
- format!("failed to read path {entry_abs_path:?}")
- })
- .log_err()
- else {
- continue;
- };
-
- let digest = {
- let mut hasher = blake3::Hasher::new();
- // Incorporate both the (relative) file path as well as the contents of the file into the hash.
- // This is because in some languages and frameworks, identical files can do different things
- // depending on their paths (e.g. Rails controllers). It's also why we send the path to the model.
- hasher.update(path.display().to_string().as_bytes());
- hasher.update(contents.as_bytes());
- hasher.finalize().to_hex()
- };
-
- UnsummarizedFile {
- digest,
- contents,
- path,
- mtime,
- }
- };
-
- if let Err(err) = rx
- .send(unsummarized_file)
- .map_err(|error| anyhow!(error))
- .await
- {
- log::error!("Error: {:?}", err);
-
- return;
- }
- }
- }
- });
- }
- })
- .await;
- Ok(())
- });
-
- MightNeedSummaryFiles { files: tx, task }
- }
-
- fn summarize_files(
- &self,
- unsummarized_files: channel::Receiver<UnsummarizedFile>,
- cx: &App,
- ) -> SummarizeFiles {
- let (summarized_tx, summarized_rx) = channel::bounded(512);
- let task = cx.spawn(async move |cx| {
- while let Ok(file) = unsummarized_files.recv().await {
- log::debug!("Summarizing {:?}", file);
- let summary = cx
- .update(|cx| Self::summarize_code(&file.contents, &file.path, cx))?
- .await
- .unwrap_or_else(|err| {
- // Log a warning because we'll continue anyway.
- // In the future, we may want to try splitting it up into multiple requests and concatenating the summaries,
- // but this might give bad summaries due to cutting off source code files in the middle.
- log::warn!("Failed to summarize {} - {:?}", file.path.display(), err);
-
- String::new()
- });
-
- // Note that the summary could be empty because of an error talking to a cloud provider,
- // e.g. because the context limit was exceeded. In that case, we return Ok(String::new()).
- if !summary.is_empty() {
- summarized_tx
- .send(SummarizedFile {
- path: file.path.display().to_string(),
- digest: file.digest,
- summary,
- mtime: file.mtime,
- })
- .await?
- }
- }
-
- Ok(())
- });
-
- SummarizeFiles {
- files: summarized_rx,
- task,
- }
- }
-
- fn summarize_code(
- code: &str,
- path: &Path,
- cx: &App,
- ) -> impl Future<Output = Result<String>> + use<> {
- let start = Instant::now();
- let (summary_model_id, use_cache): (LanguageModelId, bool) = (
- "Qwen/Qwen2-7B-Instruct".to_string().into(), // TODO read this from the user's settings.
- false, // qwen2 doesn't have a cache, but we should probably infer this from the model
- );
- let Some(model) = LanguageModelRegistry::read_global(cx)
- .available_models(cx)
- .find(|model| &model.id() == &summary_model_id)
- else {
- return cx.background_spawn(async move {
- anyhow::bail!("Couldn't find the preferred summarization model ({summary_model_id:?}) in the language registry's available models")
- });
- };
- let utf8_path = path.to_string_lossy();
- const PROMPT_BEFORE_CODE: &str = "Summarize what the code in this file does in 3 sentences, using no newlines or bullet points in the summary:";
- let prompt = format!("{PROMPT_BEFORE_CODE}\n{utf8_path}:\n{code}");
-
- log::debug!(
- "Summarizing code by sending this prompt to {:?}: {:?}",
- model.name(),
- &prompt
- );
-
- let request = LanguageModelRequest {
- thread_id: None,
- prompt_id: None,
- mode: None,
- intent: None,
- messages: vec![LanguageModelRequestMessage {
- role: Role::User,
- content: vec![prompt.into()],
- cache: use_cache,
- }],
- tools: Vec::new(),
- tool_choice: None,
- stop: Vec::new(),
- temperature: None,
- thinking_allowed: true,
- };
-
- let code_len = code.len();
- cx.spawn(async move |cx| {
- let stream = model.stream_completion(request, cx);
- cx.background_spawn(async move {
- let answer: String = stream
- .await?
- .filter_map(|event| async {
- if let Ok(LanguageModelCompletionEvent::Text(text)) = event {
- Some(text)
- } else {
- None
- }
- })
- .collect()
- .await;
-
- log::info!(
- "It took {:?} to summarize {:?} bytes of code.",
- start.elapsed(),
- code_len
- );
-
- log::debug!("Summary was: {:?}", &answer);
-
- Ok(answer)
- })
- .await
-
- // TODO if summarization failed, put it back in the backlog!
- })
- }
-
- fn persist_summaries(
- &self,
- summaries: channel::Receiver<SummarizedFile>,
- cx: &App,
- ) -> Task<Result<()>> {
- let db_connection = self.db_connection.clone();
- let digest_db = self.file_digest_db;
- let summary_db = self.summary_db;
- cx.background_spawn(async move {
- let mut summaries = pin!(summaries.chunks_timeout(4096, Duration::from_secs(2)));
- while let Some(summaries) = summaries.next().await {
- let mut txn = db_connection.write_txn()?;
- for file in &summaries {
- log::debug!(
- "Saving summary of {:?} - which is {} bytes of summary for content digest {:?}",
- &file.path,
- file.summary.len(),
- file.digest
- );
- digest_db.put(
- &mut txn,
- &file.path,
- &FileDigest {
- mtime: file.mtime,
- digest: file.digest,
- },
- )?;
- summary_db.put(&mut txn, &file.digest, &file.summary)?;
- }
- txn.commit()?;
-
- drop(summaries);
- log::debug!("committed summaries");
- }
-
- Ok(())
- })
- }
-
- /// Empty out the backlog of files that haven't been resummarized, and resummarize them immediately.
- pub(crate) fn flush_backlog(
- &self,
- worktree_abs_path: Arc<Path>,
- cx: &App,
- ) -> impl Future<Output = Result<()>> + use<> {
- let start = Instant::now();
- let backlogged = {
- let (tx, rx) = channel::bounded(512);
- let needs_summary: Vec<(Arc<Path>, Option<MTime>)> = {
- let mut backlog = self.backlog.lock();
-
- backlog.drain().collect()
- };
-
- let task = cx.background_spawn(async move {
- tx.send(needs_summary).await?;
- Ok(())
- });
-
- Backlogged {
- paths_to_digest: rx,
- task,
- }
- };
-
- let digest = self.digest_files(backlogged.paths_to_digest, worktree_abs_path, cx);
- let needs_summary = self.check_summary_cache(digest.files, cx);
- let summaries = self.summarize_files(needs_summary.files, cx);
- let persist = self.persist_summaries(summaries.files, cx);
-
- async move {
- futures::try_join!(
- backlogged.task,
- digest.task,
- needs_summary.task,
- summaries.task,
- persist
- )?;
-
- log::info!("Summarizing backlogged entries took {:?}", start.elapsed());
-
- Ok(())
- }
- }
-
- pub(crate) fn backlog_len(&self) -> usize {
- self.backlog.lock().len()
- }
-}
-
-fn db_key_for_path(path: &Arc<Path>) -> String {
- path.to_string_lossy().replace('/', "\0")
-}
@@ -1,205 +0,0 @@
-use crate::embedding::EmbeddingProvider;
-use crate::embedding_index::EmbeddingIndex;
-use crate::indexing::IndexingEntrySet;
-use crate::summary_index::SummaryIndex;
-use anyhow::Result;
-use fs::Fs;
-use futures::future::Shared;
-use gpui::{App, AppContext as _, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
-use language::LanguageRegistry;
-use log;
-use project::{UpdatedEntriesSet, Worktree};
-use smol::channel;
-use std::sync::Arc;
-use util::ResultExt;
-
-#[derive(Clone)]
-pub enum WorktreeIndexHandle {
- Loading {
- index: Shared<Task<Result<Entity<WorktreeIndex>, Arc<anyhow::Error>>>>,
- },
- Loaded {
- index: Entity<WorktreeIndex>,
- },
-}
-
-pub struct WorktreeIndex {
- worktree: Entity<Worktree>,
- db_connection: heed::Env,
- embedding_index: EmbeddingIndex,
- summary_index: SummaryIndex,
- entry_ids_being_indexed: Arc<IndexingEntrySet>,
- _index_entries: Task<Result<()>>,
- _subscription: Subscription,
-}
-
-impl WorktreeIndex {
- pub fn load(
- worktree: Entity<Worktree>,
- db_connection: heed::Env,
- language_registry: Arc<LanguageRegistry>,
- fs: Arc<dyn Fs>,
- status_tx: channel::Sender<()>,
- embedding_provider: Arc<dyn EmbeddingProvider>,
- cx: &mut App,
- ) -> Task<Result<Entity<Self>>> {
- let worktree_for_index = worktree.clone();
- let worktree_for_summary = worktree.clone();
- let worktree_abs_path = worktree.read(cx).abs_path();
- let embedding_fs = Arc::clone(&fs);
- let summary_fs = fs;
- cx.spawn(async move |cx| {
- let entries_being_indexed = Arc::new(IndexingEntrySet::new(status_tx));
- let (embedding_index, summary_index) = cx
- .background_spawn({
- let entries_being_indexed = Arc::clone(&entries_being_indexed);
- let db_connection = db_connection.clone();
- async move {
- let mut txn = db_connection.write_txn()?;
- let embedding_index = {
- let db_name = worktree_abs_path.to_string_lossy();
- let db = db_connection.create_database(&mut txn, Some(&db_name))?;
-
- EmbeddingIndex::new(
- worktree_for_index,
- embedding_fs,
- db_connection.clone(),
- db,
- language_registry,
- embedding_provider,
- Arc::clone(&entries_being_indexed),
- )
- };
- let summary_index = {
- let file_digest_db = {
- let db_name =
- // Prepend something that wouldn't be found at the beginning of an
- // absolute path, so we don't get db key namespace conflicts with
- // embeddings, which use the abs path as a key.
- format!("digests-{}", worktree_abs_path.to_string_lossy());
- db_connection.create_database(&mut txn, Some(&db_name))?
- };
- let summary_db = {
- let db_name =
- // Prepend something that wouldn't be found at the beginning of an
- // absolute path, so we don't get db key namespace conflicts with
- // embeddings, which use the abs path as a key.
- format!("summaries-{}", worktree_abs_path.to_string_lossy());
- db_connection.create_database(&mut txn, Some(&db_name))?
- };
- SummaryIndex::new(
- worktree_for_summary,
- summary_fs,
- db_connection.clone(),
- file_digest_db,
- summary_db,
- Arc::clone(&entries_being_indexed),
- )
- };
- txn.commit()?;
- anyhow::Ok((embedding_index, summary_index))
- }
- })
- .await?;
-
- cx.new(|cx| {
- Self::new(
- worktree,
- db_connection,
- embedding_index,
- summary_index,
- entries_being_indexed,
- cx,
- )
- })
- })
- }
-
- pub fn new(
- worktree: Entity<Worktree>,
- db_connection: heed::Env,
- embedding_index: EmbeddingIndex,
- summary_index: SummaryIndex,
- entry_ids_being_indexed: Arc<IndexingEntrySet>,
- cx: &mut Context<Self>,
- ) -> Self {
- let (updated_entries_tx, updated_entries_rx) = channel::unbounded();
- let _subscription = cx.subscribe(&worktree, move |_this, _worktree, event, _cx| {
- if let worktree::Event::UpdatedEntries(update) = event {
- log::debug!("Updating entries...");
- _ = updated_entries_tx.try_send(update.clone());
- }
- });
-
- Self {
- db_connection,
- embedding_index,
- summary_index,
- worktree,
- entry_ids_being_indexed,
- _index_entries: cx.spawn(async move |this, cx| {
- Self::index_entries(this, updated_entries_rx, cx).await
- }),
- _subscription,
- }
- }
-
- pub fn entry_ids_being_indexed(&self) -> &IndexingEntrySet {
- self.entry_ids_being_indexed.as_ref()
- }
-
- pub fn worktree(&self) -> &Entity<Worktree> {
- &self.worktree
- }
-
- pub fn db_connection(&self) -> &heed::Env {
- &self.db_connection
- }
-
- pub fn embedding_index(&self) -> &EmbeddingIndex {
- &self.embedding_index
- }
-
- pub fn summary_index(&self) -> &SummaryIndex {
- &self.summary_index
- }
-
- async fn index_entries(
- this: WeakEntity<Self>,
- updated_entries: channel::Receiver<UpdatedEntriesSet>,
- cx: &mut AsyncApp,
- ) -> Result<()> {
- let index = this.update(cx, |this, cx| {
- futures::future::try_join(
- this.embedding_index.index_entries_changed_on_disk(cx),
- this.summary_index.index_entries_changed_on_disk(false, cx),
- )
- })?;
- index.await.log_err();
-
- while let Ok(updated_entries) = updated_entries.recv().await {
- let index = this.update(cx, |this, cx| {
- futures::future::try_join(
- this.embedding_index
- .index_updated_entries(updated_entries.clone(), cx),
- this.summary_index
- .index_updated_entries(updated_entries, false, cx),
- )
- })?;
- index.await.log_err();
- }
-
- Ok(())
- }
-
- #[cfg(test)]
- pub fn path_count(&self) -> Result<u64> {
- use anyhow::Context as _;
-
- let txn = self
- .db_connection
- .read_txn()
- .context("failed to create read transaction")?;
- Ok(self.embedding_index().db().len(&txn)?)
- }
-}
@@ -31,7 +31,9 @@ schemars.workspace = true
serde.workspace = true
serde_derive.workspace = true
serde_json.workspace = true
+settings_ui_macros.workspace = true
serde_json_lenient.workspace = true
+serde_path_to_error.workspace = true
smallvec.workspace = true
tree-sitter-json.workspace = true
tree-sitter.workspace = true
@@ -1,13 +1,17 @@
use std::fmt::{Display, Formatter};
-use crate::{Settings, SettingsSources, VsCodeSettings};
+use crate::{self as settings};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
+use settings::{Settings, SettingsSources, VsCodeSettings};
+use settings_ui_macros::{SettingsKey, SettingsUi};
/// Base key bindings scheme. Base keymaps can be overridden with user keymaps.
///
/// Default: VSCode
-#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq, Default)]
+#[derive(
+ Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq, Default, SettingsUi,
+)]
pub enum BaseKeymap {
#[default]
VSCode,
@@ -96,25 +100,45 @@ impl BaseKeymap {
}
}
-impl Settings for BaseKeymap {
- const KEY: Option<&'static str> = Some("base_keymap");
+#[derive(
+ Copy,
+ Clone,
+ Debug,
+ Serialize,
+ Deserialize,
+ JsonSchema,
+ PartialEq,
+ Eq,
+ Default,
+ SettingsUi,
+ SettingsKey,
+)]
+// extracted so that it can be an option, and still work with derive(SettingsUi)
+#[settings_key(None)]
+pub struct BaseKeymapSetting {
+ pub base_keymap: Option<BaseKeymap>,
+}
- type FileContent = Option<Self>;
+impl Settings for BaseKeymap {
+ type FileContent = BaseKeymapSetting;
fn load(
sources: SettingsSources<Self::FileContent>,
_: &mut gpui::App,
) -> anyhow::Result<Self> {
- if let Some(Some(user_value)) = sources.user.copied() {
+ if let Some(Some(user_value)) = sources.user.map(|setting| setting.base_keymap) {
return Ok(user_value);
}
- if let Some(Some(server_value)) = sources.server.copied() {
+ if let Some(Some(server_value)) = sources.server.map(|setting| setting.base_keymap) {
return Ok(server_value);
}
- sources.default.ok_or_else(Self::missing_default)
+ sources
+ .default
+ .base_keymap
+ .ok_or_else(Self::missing_default)
}
fn import_from_vscode(_vscode: &VsCodeSettings, current: &mut Self::FileContent) {
- *current = Some(BaseKeymap::VSCode);
+ current.base_keymap = Some(BaseKeymap::VSCode);
}
}
@@ -1,1424 +0,0 @@
-use collections::HashMap;
-
-// On some keyboards (e.g. German QWERTZ) it is not possible to type the full ASCII range
-// without using option. This means that some of our built in keyboard shortcuts do not work
-// for those users.
-//
-// The way macOS solves this problem is to move shortcuts around so that they are all reachable,
-// even if the mnemonic changes. https://developer.apple.com/documentation/swiftui/keyboardshortcut/localization-swift.struct
-//
-// For example, cmd-> is the "switch window" shortcut because the > key is right above tab.
-// To ensure this doesn't cause problems for shortcuts defined for a QWERTY layout, apple moves
-// any shortcuts defined as cmd-> to cmd-:. Coincidentally this s also the same keyboard position
-// as cmd-> on a QWERTY layout.
-//
-// Another example is cmd-[ and cmd-], as they cannot be typed without option, those keys are remapped to cmd-ö
-// and cmd-ä. These shortcuts are not in the same position as a QWERTY keyboard, because on a QWERTZ keyboard
-// the + key is in the way; and shortcuts bound to cmd-+ are still typed as cmd-+ on either keyboard (though the
-// specific key moves)
-//
-// As far as I can tell, there's no way to query the mappings Apple uses except by rendering a menu with every
-// possible key combination, and inspecting the UI to see what it rendered. So that's what we did...
-//
-// These mappings were generated by running https://github.com/ConradIrwin/keyboard-inspector, tidying up the
-// output to remove languages with no mappings and other oddities, and converting it to a less verbose representation with:
-// jq -s 'map(to_entries | map({key: .key, value: [(.value | to_entries | map(.key) | join("")), (.value | to_entries | map(.value) | join(""))]}) | from_entries) | add'
-// From there I used multi-cursor to produce this match statement.
-#[cfg(target_os = "macos")]
-pub fn get_key_equivalents(layout: &str) -> Option<HashMap<char, char>> {
- let mappings: &[(char, char)] = match layout {
- "com.apple.keylayout.ABC-AZERTY" => &[
- ('!', '1'),
- ('"', '%'),
- ('#', '3'),
- ('$', '4'),
- ('%', '5'),
- ('&', '7'),
- ('(', '9'),
- (')', '0'),
- ('*', '8'),
- ('.', ';'),
- ('/', ':'),
- ('0', 'à'),
- ('1', '&'),
- ('2', 'é'),
- ('3', '"'),
- ('4', '\''),
- ('5', '('),
- ('6', '§'),
- ('7', 'è'),
- ('8', '!'),
- ('9', 'ç'),
- (':', '°'),
- (';', ')'),
- ('<', '.'),
- ('>', '/'),
- ('@', '2'),
- ('[', '^'),
- ('\'', 'ù'),
- ('\\', '`'),
- (']', '$'),
- ('^', '6'),
- ('`', '<'),
- ('{', '¨'),
- ('|', '£'),
- ('}', '*'),
- ('~', '>'),
- ],
- "com.apple.keylayout.ABC-QWERTZ" => &[
- ('"', '`'),
- ('#', '§'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', 'ß'),
- (':', 'Ü'),
- (';', 'ü'),
- ('<', ';'),
- ('=', '*'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'ö'),
- ('\'', '´'),
- ('\\', '#'),
- (']', 'ä'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Ö'),
- ('|', '\''),
- ('}', 'Ä'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Albanian" => &[
- ('"', '\''),
- (':', 'Ç'),
- (';', 'ç'),
- ('<', ';'),
- ('>', ':'),
- ('@', '"'),
- ('\'', '@'),
- ('\\', 'ë'),
- ('`', '<'),
- ('|', 'Ë'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Austrian" => &[
- ('"', '`'),
- ('#', '§'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', 'ß'),
- (':', 'Ü'),
- (';', 'ü'),
- ('<', ';'),
- ('=', '*'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'ö'),
- ('\'', '´'),
- ('\\', '#'),
- (']', 'ä'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Ö'),
- ('|', '\''),
- ('}', 'Ä'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Azeri" => &[
- ('"', 'Ə'),
- (',', 'ç'),
- ('.', 'ş'),
- ('/', '.'),
- (':', 'I'),
- (';', 'ı'),
- ('<', 'Ç'),
- ('>', 'Ş'),
- ('?', ','),
- ('W', 'Ü'),
- ('[', 'ö'),
- ('\'', 'ə'),
- (']', 'ğ'),
- ('w', 'ü'),
- ('{', 'Ö'),
- ('|', '/'),
- ('}', 'Ğ'),
- ],
- "com.apple.keylayout.Belgian" => &[
- ('!', '1'),
- ('"', '%'),
- ('#', '3'),
- ('$', '4'),
- ('%', '5'),
- ('&', '7'),
- ('(', '9'),
- (')', '0'),
- ('*', '8'),
- ('.', ';'),
- ('/', ':'),
- ('0', 'à'),
- ('1', '&'),
- ('2', 'é'),
- ('3', '"'),
- ('4', '\''),
- ('5', '('),
- ('6', '§'),
- ('7', 'è'),
- ('8', '!'),
- ('9', 'ç'),
- (':', '°'),
- (';', ')'),
- ('<', '.'),
- ('>', '/'),
- ('@', '2'),
- ('[', '^'),
- ('\'', 'ù'),
- ('\\', '`'),
- (']', '$'),
- ('^', '6'),
- ('`', '<'),
- ('{', '¨'),
- ('|', '£'),
- ('}', '*'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Brazilian-ABNT2" => &[
- ('"', '`'),
- ('/', 'ç'),
- ('?', 'Ç'),
- ('\'', '´'),
- ('\\', '~'),
- ('^', '¨'),
- ('`', '\''),
- ('|', '^'),
- ('~', '"'),
- ],
- "com.apple.keylayout.Brazilian-Pro" => &[('^', 'ˆ'), ('~', '˜')],
- "com.apple.keylayout.British" => &[('#', '£')],
- "com.apple.keylayout.Canadian-CSA" => &[
- ('"', 'È'),
- ('/', 'é'),
- ('<', '\''),
- ('>', '"'),
- ('?', 'É'),
- ('[', '^'),
- ('\'', 'è'),
- ('\\', 'à'),
- (']', 'ç'),
- ('`', 'ù'),
- ('{', '¨'),
- ('|', 'À'),
- ('}', 'Ç'),
- ('~', 'Ù'),
- ],
- "com.apple.keylayout.Croatian" => &[
- ('"', 'Ć'),
- ('&', '\''),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- (':', 'Č'),
- (';', 'č'),
- ('<', ';'),
- ('=', '*'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'š'),
- ('\'', 'ć'),
- ('\\', 'ž'),
- (']', 'đ'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Š'),
- ('|', 'Ž'),
- ('}', 'Đ'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Croatian-PC" => &[
- ('"', 'Ć'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', '\''),
- (':', 'Č'),
- (';', 'č'),
- ('<', ';'),
- ('=', '*'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'š'),
- ('\'', 'ć'),
- ('\\', 'ž'),
- (']', 'đ'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Š'),
- ('|', 'Ž'),
- ('}', 'Đ'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Czech" => &[
- ('!', '1'),
- ('"', '!'),
- ('#', '3'),
- ('$', '4'),
- ('%', '5'),
- ('&', '7'),
- ('(', '9'),
- (')', '0'),
- ('*', '8'),
- ('+', '%'),
- ('/', '\''),
- ('0', 'é'),
- ('1', '+'),
- ('2', 'ě'),
- ('3', 'š'),
- ('4', 'č'),
- ('5', 'ř'),
- ('6', 'ž'),
- ('7', 'ý'),
- ('8', 'á'),
- ('9', 'í'),
- (':', '"'),
- (';', 'ů'),
- ('<', '?'),
- ('>', ':'),
- ('?', 'ˇ'),
- ('@', '2'),
- ('[', 'ú'),
- ('\'', '§'),
- (']', ')'),
- ('^', '6'),
- ('`', '¨'),
- ('{', 'Ú'),
- ('}', '('),
- ('~', '`'),
- ],
- "com.apple.keylayout.Czech-QWERTY" => &[
- ('!', '1'),
- ('"', '!'),
- ('#', '3'),
- ('$', '4'),
- ('%', '5'),
- ('&', '7'),
- ('(', '9'),
- (')', '0'),
- ('*', '8'),
- ('+', '%'),
- ('/', '\''),
- ('0', 'é'),
- ('1', '+'),
- ('2', 'ě'),
- ('3', 'š'),
- ('4', 'č'),
- ('5', 'ř'),
- ('6', 'ž'),
- ('7', 'ý'),
- ('8', 'á'),
- ('9', 'í'),
- (':', '"'),
- (';', 'ů'),
- ('<', '?'),
- ('>', ':'),
- ('?', 'ˇ'),
- ('@', '2'),
- ('[', 'ú'),
- ('\'', '§'),
- (']', ')'),
- ('^', '6'),
- ('`', '¨'),
- ('{', 'Ú'),
- ('}', '('),
- ('~', '`'),
- ],
- "com.apple.keylayout.Danish" => &[
- ('"', '^'),
- ('$', '€'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', '´'),
- (':', 'Å'),
- (';', 'å'),
- ('<', ';'),
- ('=', '`'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'æ'),
- ('\'', '¨'),
- ('\\', '\''),
- (']', 'ø'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Æ'),
- ('|', '*'),
- ('}', 'Ø'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Faroese" => &[
- ('"', 'Ø'),
- ('$', '€'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', '´'),
- (':', 'Æ'),
- (';', 'æ'),
- ('<', ';'),
- ('=', '`'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'å'),
- ('\'', 'ø'),
- ('\\', '\''),
- (']', 'ð'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Å'),
- ('|', '*'),
- ('}', 'Ð'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Finnish" => &[
- ('"', '^'),
- ('$', '€'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', '´'),
- (':', 'Å'),
- (';', 'å'),
- ('<', ';'),
- ('=', '`'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'ö'),
- ('\'', '¨'),
- ('\\', '\''),
- (']', 'ä'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Ö'),
- ('|', '*'),
- ('}', 'Ä'),
- ('~', '>'),
- ],
- "com.apple.keylayout.FinnishExtended" => &[
- ('"', 'ˆ'),
- ('$', '€'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', '´'),
- (':', 'Å'),
- (';', 'å'),
- ('<', ';'),
- ('=', '`'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'ö'),
- ('\'', '¨'),
- ('\\', '\''),
- (']', 'ä'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Ö'),
- ('|', '*'),
- ('}', 'Ä'),
- ('~', '>'),
- ],
- "com.apple.keylayout.FinnishSami-PC" => &[
- ('"', 'ˆ'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', '´'),
- (':', 'Å'),
- (';', 'å'),
- ('<', ';'),
- ('=', '`'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'ö'),
- ('\'', '¨'),
- ('\\', '@'),
- (']', 'ä'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Ö'),
- ('|', '*'),
- ('}', 'Ä'),
- ('~', '>'),
- ],
- "com.apple.keylayout.French" => &[
- ('!', '1'),
- ('"', '%'),
- ('#', '3'),
- ('$', '4'),
- ('%', '5'),
- ('&', '7'),
- ('(', '9'),
- (')', '0'),
- ('*', '8'),
- ('.', ';'),
- ('/', ':'),
- ('0', 'à'),
- ('1', '&'),
- ('2', 'é'),
- ('3', '"'),
- ('4', '\''),
- ('5', '('),
- ('6', '§'),
- ('7', 'è'),
- ('8', '!'),
- ('9', 'ç'),
- (':', '°'),
- (';', ')'),
- ('<', '.'),
- ('>', '/'),
- ('@', '2'),
- ('[', '^'),
- ('\'', 'ù'),
- ('\\', '`'),
- (']', '$'),
- ('^', '6'),
- ('`', '<'),
- ('{', '¨'),
- ('|', '£'),
- ('}', '*'),
- ('~', '>'),
- ],
- "com.apple.keylayout.French-PC" => &[
- ('!', '1'),
- ('"', '%'),
- ('#', '3'),
- ('$', '4'),
- ('%', '5'),
- ('&', '7'),
- ('(', '9'),
- (')', '0'),
- ('*', '8'),
- ('-', ')'),
- ('.', ';'),
- ('/', ':'),
- ('0', 'à'),
- ('1', '&'),
- ('2', 'é'),
- ('3', '"'),
- ('4', '\''),
- ('5', '('),
- ('6', '-'),
- ('7', 'è'),
- ('8', '_'),
- ('9', 'ç'),
- (':', '§'),
- (';', '!'),
- ('<', '.'),
- ('>', '/'),
- ('@', '2'),
- ('[', '^'),
- ('\'', 'ù'),
- ('\\', '*'),
- (']', '$'),
- ('^', '6'),
- ('_', '°'),
- ('`', '<'),
- ('{', '¨'),
- ('|', 'μ'),
- ('}', '£'),
- ('~', '>'),
- ],
- "com.apple.keylayout.French-numerical" => &[
- ('!', '1'),
- ('"', '%'),
- ('#', '3'),
- ('$', '4'),
- ('%', '5'),
- ('&', '7'),
- ('(', '9'),
- (')', '0'),
- ('*', '8'),
- ('.', ';'),
- ('/', ':'),
- ('0', 'à'),
- ('1', '&'),
- ('2', 'é'),
- ('3', '"'),
- ('4', '\''),
- ('5', '('),
- ('6', '§'),
- ('7', 'è'),
- ('8', '!'),
- ('9', 'ç'),
- (':', '°'),
- (';', ')'),
- ('<', '.'),
- ('>', '/'),
- ('@', '2'),
- ('[', '^'),
- ('\'', 'ù'),
- ('\\', '`'),
- (']', '$'),
- ('^', '6'),
- ('`', '<'),
- ('{', '¨'),
- ('|', '£'),
- ('}', '*'),
- ('~', '>'),
- ],
- "com.apple.keylayout.German" => &[
- ('"', '`'),
- ('#', '§'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', 'ß'),
- (':', 'Ü'),
- (';', 'ü'),
- ('<', ';'),
- ('=', '*'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'ö'),
- ('\'', '´'),
- ('\\', '#'),
- (']', 'ä'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Ö'),
- ('|', '\''),
- ('}', 'Ä'),
- ('~', '>'),
- ],
- "com.apple.keylayout.German-DIN-2137" => &[
- ('"', '`'),
- ('#', '§'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', 'ß'),
- (':', 'Ü'),
- (';', 'ü'),
- ('<', ';'),
- ('=', '*'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'ö'),
- ('\'', '´'),
- ('\\', '#'),
- (']', 'ä'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Ö'),
- ('|', '\''),
- ('}', 'Ä'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Hawaiian" => &[('\'', 'ʻ')],
- "com.apple.keylayout.Hungarian" => &[
- ('!', '\''),
- ('"', 'Á'),
- ('#', '+'),
- ('$', '!'),
- ('&', '='),
- ('(', ')'),
- (')', 'Ö'),
- ('*', '('),
- ('+', 'Ó'),
- ('/', 'ü'),
- ('0', 'ö'),
- (':', 'É'),
- (';', 'é'),
- ('<', 'Ü'),
- ('=', 'ó'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'ő'),
- ('\'', 'á'),
- ('\\', 'ű'),
- (']', 'ú'),
- ('^', '/'),
- ('`', 'í'),
- ('{', 'Ő'),
- ('|', 'Ű'),
- ('}', 'Ú'),
- ('~', 'Í'),
- ],
- "com.apple.keylayout.Hungarian-QWERTY" => &[
- ('!', '\''),
- ('"', 'Á'),
- ('#', '+'),
- ('$', '!'),
- ('&', '='),
- ('(', ')'),
- (')', 'Ö'),
- ('*', '('),
- ('+', 'Ó'),
- ('/', 'ü'),
- ('0', 'ö'),
- (':', 'É'),
- (';', 'é'),
- ('<', 'Ü'),
- ('=', 'ó'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'ő'),
- ('\'', 'á'),
- ('\\', 'ű'),
- (']', 'ú'),
- ('^', '/'),
- ('`', 'í'),
- ('{', 'Ő'),
- ('|', 'Ű'),
- ('}', 'Ú'),
- ('~', 'Í'),
- ],
- "com.apple.keylayout.Icelandic" => &[
- ('"', 'Ö'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', '\''),
- (':', 'Ð'),
- (';', 'ð'),
- ('<', ';'),
- ('=', '*'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'æ'),
- ('\'', 'ö'),
- ('\\', 'þ'),
- (']', '´'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Æ'),
- ('|', 'Þ'),
- ('}', '´'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Irish" => &[('#', '£')],
- "com.apple.keylayout.IrishExtended" => &[('#', '£')],
- "com.apple.keylayout.Italian" => &[
- ('!', '1'),
- ('"', '%'),
- ('#', '3'),
- ('$', '4'),
- ('%', '5'),
- ('&', '7'),
- ('(', '9'),
- (')', '0'),
- ('*', '8'),
- (',', ';'),
- ('.', ':'),
- ('/', ','),
- ('0', 'é'),
- ('1', '&'),
- ('2', '"'),
- ('3', '\''),
- ('4', '('),
- ('5', 'ç'),
- ('6', 'è'),
- ('7', ')'),
- ('8', '£'),
- ('9', 'à'),
- (':', '!'),
- (';', 'ò'),
- ('<', '.'),
- ('>', '/'),
- ('@', '2'),
- ('[', 'ì'),
- ('\'', 'ù'),
- ('\\', '§'),
- (']', '$'),
- ('^', '6'),
- ('`', '<'),
- ('{', '^'),
- ('|', '°'),
- ('}', '*'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Italian-Pro" => &[
- ('"', '^'),
- ('#', '£'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', '\''),
- (':', 'é'),
- (';', 'è'),
- ('<', ';'),
- ('=', '*'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'ò'),
- ('\'', 'ì'),
- ('\\', 'ù'),
- (']', 'à'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'ç'),
- ('|', '§'),
- ('}', '°'),
- ('~', '>'),
- ],
- "com.apple.keylayout.LatinAmerican" => &[
- ('"', '¨'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', '\''),
- (':', 'Ñ'),
- (';', 'ñ'),
- ('<', ';'),
- ('=', '*'),
- ('>', ':'),
- ('@', '"'),
- ('[', '{'),
- ('\'', '´'),
- ('\\', '¿'),
- (']', '}'),
- ('^', '&'),
- ('`', '<'),
- ('{', '['),
- ('|', '¡'),
- ('}', ']'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Lithuanian" => &[
- ('!', 'Ą'),
- ('#', 'Ę'),
- ('$', 'Ė'),
- ('%', 'Į'),
- ('&', 'Ų'),
- ('*', 'Ū'),
- ('+', 'Ž'),
- ('1', 'ą'),
- ('2', 'č'),
- ('3', 'ę'),
- ('4', 'ė'),
- ('5', 'į'),
- ('6', 'š'),
- ('7', 'ų'),
- ('8', 'ū'),
- ('=', 'ž'),
- ('@', 'Č'),
- ('^', 'Š'),
- ],
- "com.apple.keylayout.Maltese" => &[
- ('#', '£'),
- ('[', 'ġ'),
- (']', 'ħ'),
- ('`', 'ż'),
- ('{', 'Ġ'),
- ('}', 'Ħ'),
- ('~', 'Ż'),
- ],
- "com.apple.keylayout.NorthernSami" => &[
- ('"', 'Ŋ'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', '´'),
- (':', 'Å'),
- (';', 'å'),
- ('<', ';'),
- ('=', '`'),
- ('>', ':'),
- ('@', '"'),
- ('Q', 'Á'),
- ('W', 'Š'),
- ('X', 'Č'),
- ('[', 'ø'),
- ('\'', 'ŋ'),
- ('\\', 'đ'),
- (']', 'æ'),
- ('^', '&'),
- ('`', 'ž'),
- ('q', 'á'),
- ('w', 'š'),
- ('x', 'č'),
- ('{', 'Ø'),
- ('|', 'Đ'),
- ('}', 'Æ'),
- ('~', 'Ž'),
- ],
- "com.apple.keylayout.Norwegian" => &[
- ('"', '^'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', '´'),
- (':', 'Å'),
- (';', 'å'),
- ('<', ';'),
- ('=', '`'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'ø'),
- ('\'', '¨'),
- ('\\', '@'),
- (']', 'æ'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Ø'),
- ('|', '*'),
- ('}', 'Æ'),
- ('~', '>'),
- ],
- "com.apple.keylayout.NorwegianExtended" => &[
- ('"', 'ˆ'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', '´'),
- (':', 'Å'),
- (';', 'å'),
- ('<', ';'),
- ('=', '`'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'ø'),
- ('\\', '@'),
- (']', 'æ'),
- ('`', '<'),
- ('}', 'Æ'),
- ('~', '>'),
- ],
- "com.apple.keylayout.NorwegianSami-PC" => &[
- ('"', 'ˆ'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', '´'),
- (':', 'Å'),
- (';', 'å'),
- ('<', ';'),
- ('=', '`'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'ø'),
- ('\'', '¨'),
- ('\\', '@'),
- (']', 'æ'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Ø'),
- ('|', '*'),
- ('}', 'Æ'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Polish" => &[
- ('!', '§'),
- ('"', 'ę'),
- ('#', '!'),
- ('$', '?'),
- ('%', '+'),
- ('&', ':'),
- ('(', '/'),
- (')', '"'),
- ('*', '_'),
- ('+', ']'),
- (',', '.'),
- ('.', ','),
- ('/', 'ż'),
- (':', 'Ł'),
- (';', 'ł'),
- ('<', 'ś'),
- ('=', '['),
- ('>', 'ń'),
- ('?', 'Ż'),
- ('@', '%'),
- ('[', 'ó'),
- ('\'', 'ą'),
- ('\\', ';'),
- (']', '('),
- ('^', '='),
- ('_', 'ć'),
- ('`', '<'),
- ('{', 'ź'),
- ('|', '$'),
- ('}', ')'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Portuguese" => &[
- ('"', '`'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', '\''),
- (':', 'ª'),
- (';', 'º'),
- ('<', ';'),
- ('=', '*'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'ç'),
- ('\'', '´'),
- (']', '~'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Ç'),
- ('}', '^'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Sami-PC" => &[
- ('"', 'Ŋ'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', '´'),
- (':', 'Å'),
- (';', 'å'),
- ('<', ';'),
- ('=', '`'),
- ('>', ':'),
- ('@', '"'),
- ('Q', 'Á'),
- ('W', 'Š'),
- ('X', 'Č'),
- ('[', 'ø'),
- ('\'', 'ŋ'),
- ('\\', 'đ'),
- (']', 'æ'),
- ('^', '&'),
- ('`', 'ž'),
- ('q', 'á'),
- ('w', 'š'),
- ('x', 'č'),
- ('{', 'Ø'),
- ('|', 'Đ'),
- ('}', 'Æ'),
- ('~', 'Ž'),
- ],
- "com.apple.keylayout.Serbian-Latin" => &[
- ('"', 'Ć'),
- ('&', '\''),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- (':', 'Č'),
- (';', 'č'),
- ('<', ';'),
- ('=', '*'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'š'),
- ('\'', 'ć'),
- ('\\', 'ž'),
- (']', 'đ'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Š'),
- ('|', 'Ž'),
- ('}', 'Đ'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Slovak" => &[
- ('!', '1'),
- ('"', '!'),
- ('#', '3'),
- ('$', '4'),
- ('%', '5'),
- ('&', '7'),
- ('(', '9'),
- (')', '0'),
- ('*', '8'),
- ('+', '%'),
- ('/', '\''),
- ('0', 'é'),
- ('1', '+'),
- ('2', 'ľ'),
- ('3', 'š'),
- ('4', 'č'),
- ('5', 'ť'),
- ('6', 'ž'),
- ('7', 'ý'),
- ('8', 'á'),
- ('9', 'í'),
- (':', '"'),
- (';', 'ô'),
- ('<', '?'),
- ('>', ':'),
- ('?', 'ˇ'),
- ('@', '2'),
- ('[', 'ú'),
- ('\'', '§'),
- (']', 'ä'),
- ('^', '6'),
- ('`', 'ň'),
- ('{', 'Ú'),
- ('}', 'Ä'),
- ('~', 'Ň'),
- ],
- "com.apple.keylayout.Slovak-QWERTY" => &[
- ('!', '1'),
- ('"', '!'),
- ('#', '3'),
- ('$', '4'),
- ('%', '5'),
- ('&', '7'),
- ('(', '9'),
- (')', '0'),
- ('*', '8'),
- ('+', '%'),
- ('/', '\''),
- ('0', 'é'),
- ('1', '+'),
- ('2', 'ľ'),
- ('3', 'š'),
- ('4', 'č'),
- ('5', 'ť'),
- ('6', 'ž'),
- ('7', 'ý'),
- ('8', 'á'),
- ('9', 'í'),
- (':', '"'),
- (';', 'ô'),
- ('<', '?'),
- ('>', ':'),
- ('?', 'ˇ'),
- ('@', '2'),
- ('[', 'ú'),
- ('\'', '§'),
- (']', 'ä'),
- ('^', '6'),
- ('`', 'ň'),
- ('{', 'Ú'),
- ('}', 'Ä'),
- ('~', 'Ň'),
- ],
- "com.apple.keylayout.Slovenian" => &[
- ('"', 'Ć'),
- ('&', '\''),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- (':', 'Č'),
- (';', 'č'),
- ('<', ';'),
- ('=', '*'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'š'),
- ('\'', 'ć'),
- ('\\', 'ž'),
- (']', 'đ'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Š'),
- ('|', 'Ž'),
- ('}', 'Đ'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Spanish" => &[
- ('!', '¡'),
- ('"', '¨'),
- ('.', 'ç'),
- ('/', '.'),
- (':', 'º'),
- (';', '´'),
- ('<', '¿'),
- ('>', 'Ç'),
- ('@', '!'),
- ('[', 'ñ'),
- ('\'', '`'),
- ('\\', '\''),
- (']', ';'),
- ('^', '/'),
- ('`', '<'),
- ('{', 'Ñ'),
- ('|', '"'),
- ('}', ':'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Spanish-ISO" => &[
- ('"', '¨'),
- ('#', '·'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('.', 'ç'),
- ('/', '.'),
- (':', 'º'),
- (';', '´'),
- ('<', '¿'),
- ('>', 'Ç'),
- ('@', '"'),
- ('[', 'ñ'),
- ('\'', '`'),
- ('\\', '\''),
- (']', ';'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Ñ'),
- ('|', '"'),
- ('}', '`'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Swedish" => &[
- ('"', '^'),
- ('$', '€'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', '´'),
- (':', 'Å'),
- (';', 'å'),
- ('<', ';'),
- ('=', '`'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'ö'),
- ('\'', '¨'),
- ('\\', '\''),
- (']', 'ä'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Ö'),
- ('|', '*'),
- ('}', 'Ä'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Swedish-Pro" => &[
- ('"', '^'),
- ('$', '€'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', '´'),
- (':', 'Å'),
- (';', 'å'),
- ('<', ';'),
- ('=', '`'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'ö'),
- ('\'', '¨'),
- ('\\', '\''),
- (']', 'ä'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Ö'),
- ('|', '*'),
- ('}', 'Ä'),
- ('~', '>'),
- ],
- "com.apple.keylayout.SwedishSami-PC" => &[
- ('"', 'ˆ'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('/', '´'),
- (':', 'Å'),
- (';', 'å'),
- ('<', ';'),
- ('=', '`'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'ö'),
- ('\'', '¨'),
- ('\\', '@'),
- (']', 'ä'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Ö'),
- ('|', '*'),
- ('}', 'Ä'),
- ('~', '>'),
- ],
- "com.apple.keylayout.SwissFrench" => &[
- ('!', '+'),
- ('"', '`'),
- ('#', '*'),
- ('$', 'ç'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('+', '!'),
- ('/', '\''),
- (':', 'ü'),
- (';', 'è'),
- ('<', ';'),
- ('=', '¨'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'é'),
- ('\'', '^'),
- ('\\', '$'),
- (']', 'à'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'ö'),
- ('|', '£'),
- ('}', 'ä'),
- ('~', '>'),
- ],
- "com.apple.keylayout.SwissGerman" => &[
- ('!', '+'),
- ('"', '`'),
- ('#', '*'),
- ('$', 'ç'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('+', '!'),
- ('/', '\''),
- (':', 'è'),
- (';', 'ü'),
- ('<', ';'),
- ('=', '¨'),
- ('>', ':'),
- ('@', '"'),
- ('[', 'ö'),
- ('\'', '^'),
- ('\\', '$'),
- (']', 'ä'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'é'),
- ('|', '£'),
- ('}', 'à'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Turkish" => &[
- ('"', '-'),
- ('#', '"'),
- ('$', '\''),
- ('%', '('),
- ('&', ')'),
- ('(', '%'),
- (')', ':'),
- ('*', '_'),
- (',', 'ö'),
- ('-', 'ş'),
- ('.', 'ç'),
- ('/', '.'),
- (':', '$'),
- ('<', 'Ö'),
- ('>', 'Ç'),
- ('@', '*'),
- ('[', 'ğ'),
- ('\'', ','),
- ('\\', 'ü'),
- (']', 'ı'),
- ('^', '/'),
- ('_', 'Ş'),
- ('`', '<'),
- ('{', 'Ğ'),
- ('|', 'Ü'),
- ('}', 'I'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Turkish-QWERTY-PC" => &[
- ('"', 'I'),
- ('#', '^'),
- ('$', '+'),
- ('&', '/'),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- ('+', ':'),
- (',', 'ö'),
- ('.', 'ç'),
- ('/', '*'),
- (':', 'Ş'),
- (';', 'ş'),
- ('<', 'Ö'),
- ('=', '.'),
- ('>', 'Ç'),
- ('@', '\''),
- ('[', 'ğ'),
- ('\'', 'ı'),
- ('\\', ','),
- (']', 'ü'),
- ('^', '&'),
- ('`', '<'),
- ('{', 'Ğ'),
- ('|', ';'),
- ('}', 'Ü'),
- ('~', '>'),
- ],
- "com.apple.keylayout.Turkish-Standard" => &[
- ('"', 'Ş'),
- ('#', '^'),
- ('&', '\''),
- ('(', ')'),
- (')', '='),
- ('*', '('),
- (',', '.'),
- ('.', ','),
- (':', 'Ç'),
- (';', 'ç'),
- ('<', ':'),
- ('=', '*'),
- ('>', ';'),
- ('@', '"'),
- ('[', 'ğ'),
- ('\'', 'ş'),
- ('\\', 'ü'),
- (']', 'ı'),
- ('^', '&'),
- ('`', 'ö'),
- ('{', 'Ğ'),
- ('|', 'Ü'),
- ('}', 'I'),
- ('~', 'Ö'),
- ],
- "com.apple.keylayout.Turkmen" => &[
- ('C', 'Ç'),
- ('Q', 'Ä'),
- ('V', 'Ý'),
- ('X', 'Ü'),
- ('[', 'ň'),
- ('\\', 'ş'),
- (']', 'ö'),
- ('^', '№'),
- ('`', 'ž'),
- ('c', 'ç'),
- ('q', 'ä'),
- ('v', 'ý'),
- ('x', 'ü'),
- ('{', 'Ň'),
- ('|', 'Ş'),
- ('}', 'Ö'),
- ('~', 'Ž'),
- ],
- "com.apple.keylayout.USInternational-PC" => &[('^', 'ˆ'), ('~', '˜')],
- "com.apple.keylayout.Welsh" => &[('#', '£')],
-
- _ => return None,
- };
-
- Some(HashMap::from_iter(mappings.into_iter().cloned()))
-}
-
-#[cfg(not(target_os = "macos"))]
-pub fn get_key_equivalents(_layout: &str) -> Option<HashMap<char, char>> {
- None
-}
@@ -3,7 +3,8 @@ use collections::{BTreeMap, HashMap, IndexMap};
use fs::Fs;
use gpui::{
Action, ActionBuildError, App, InvalidKeystrokeError, KEYSTROKE_PARSE_EXPECTED_MESSAGE,
- KeyBinding, KeyBindingContextPredicate, KeyBindingMetaIndex, Keystroke, NoAction, SharedString,
+ KeyBinding, KeyBindingContextPredicate, KeyBindingMetaIndex, KeybindingKeystroke, Keystroke,
+ NoAction, SharedString,
};
use schemars::{JsonSchema, json_schema};
use serde::Deserialize;
@@ -211,9 +212,6 @@ impl KeymapFile {
}
pub fn load(content: &str, cx: &App) -> KeymapFileLoadResult {
- let key_equivalents =
- crate::key_equivalents::get_key_equivalents(cx.keyboard_layout().id());
-
if content.is_empty() {
return KeymapFileLoadResult::Success {
key_bindings: Vec::new(),
@@ -255,12 +253,6 @@ impl KeymapFile {
}
};
- let key_equivalents = if *use_key_equivalents {
- key_equivalents.as_ref()
- } else {
- None
- };
-
let mut section_errors = String::new();
if !unrecognized_fields.is_empty() {
@@ -278,7 +270,7 @@ impl KeymapFile {
keystrokes,
action,
context_predicate.clone(),
- key_equivalents,
+ *use_key_equivalents,
cx,
);
match result {
@@ -336,7 +328,7 @@ impl KeymapFile {
keystrokes: &str,
action: &KeymapAction,
context: Option<Rc<KeyBindingContextPredicate>>,
- key_equivalents: Option<&HashMap<char, char>>,
+ use_key_equivalents: bool,
cx: &App,
) -> std::result::Result<KeyBinding, String> {
let (build_result, action_input_string) = match &action.0 {
@@ -404,8 +396,9 @@ impl KeymapFile {
keystrokes,
action,
context,
- key_equivalents,
+ use_key_equivalents,
action_input_string.map(SharedString::from),
+ cx.keyboard_mapper().as_ref(),
) {
Ok(key_binding) => key_binding,
Err(InvalidKeystrokeError { keystroke }) => {
@@ -607,6 +600,7 @@ impl KeymapFile {
mut operation: KeybindUpdateOperation<'a>,
mut keymap_contents: String,
tab_size: usize,
+ keyboard_mapper: &dyn gpui::PlatformKeyboardMapper,
) -> Result<String> {
match operation {
// if trying to replace a keybinding that is not user-defined, treat it as an add operation
@@ -646,7 +640,7 @@ impl KeymapFile {
.action_value()
.context("Failed to generate target action JSON value")?;
let Some((index, keystrokes_str)) =
- find_binding(&keymap, &target, &target_action_value)
+ find_binding(&keymap, &target, &target_action_value, keyboard_mapper)
else {
anyhow::bail!("Failed to find keybinding to remove");
};
@@ -681,7 +675,7 @@ impl KeymapFile {
.context("Failed to generate source action JSON value")?;
if let Some((index, keystrokes_str)) =
- find_binding(&keymap, &target, &target_action_value)
+ find_binding(&keymap, &target, &target_action_value, keyboard_mapper)
{
if target.context == source.context {
// if we are only changing the keybinding (common case)
@@ -781,7 +775,7 @@ impl KeymapFile {
}
let use_key_equivalents = from.and_then(|from| {
let action_value = from.action_value().context("Failed to serialize action value. `use_key_equivalents` on new keybinding may be incorrect.").log_err()?;
- let (index, _) = find_binding(&keymap, &from, &action_value)?;
+ let (index, _) = find_binding(&keymap, &from, &action_value, keyboard_mapper)?;
Some(keymap.0[index].use_key_equivalents)
}).unwrap_or(false);
if use_key_equivalents {
@@ -808,6 +802,7 @@ impl KeymapFile {
keymap: &'b KeymapFile,
target: &KeybindUpdateTarget<'a>,
target_action_value: &Value,
+ keyboard_mapper: &dyn gpui::PlatformKeyboardMapper,
) -> Option<(usize, &'b str)> {
let target_context_parsed =
KeyBindingContextPredicate::parse(target.context.unwrap_or("")).ok();
@@ -823,8 +818,15 @@ impl KeymapFile {
for (keystrokes_str, action) in bindings {
let Ok(keystrokes) = keystrokes_str
.split_whitespace()
- .map(Keystroke::parse)
- .collect::<Result<Vec<_>, _>>()
+ .map(|source| {
+ let keystroke = Keystroke::parse(source)?;
+ Ok(KeybindingKeystroke::new_with_mapper(
+ keystroke,
+ false,
+ keyboard_mapper,
+ ))
+ })
+ .collect::<Result<Vec<_>, InvalidKeystrokeError>>()
else {
continue;
};
@@ -832,7 +834,7 @@ impl KeymapFile {
|| !keystrokes
.iter()
.zip(target.keystrokes)
- .all(|(a, b)| a.should_match(b))
+ .all(|(a, b)| a.inner().should_match(b))
{
continue;
}
@@ -847,7 +849,7 @@ impl KeymapFile {
}
}
-#[derive(Clone)]
+#[derive(Clone, Debug)]
pub enum KeybindUpdateOperation<'a> {
Replace {
/// Describes the keybind to create
@@ -916,7 +918,7 @@ impl<'a> KeybindUpdateOperation<'a> {
#[derive(Debug, Clone)]
pub struct KeybindUpdateTarget<'a> {
pub context: Option<&'a str>,
- pub keystrokes: &'a [Keystroke],
+ pub keystrokes: &'a [KeybindingKeystroke],
pub action_name: &'a str,
pub action_arguments: Option<&'a str>,
}
@@ -941,6 +943,9 @@ impl<'a> KeybindUpdateTarget<'a> {
fn keystrokes_unparsed(&self) -> String {
let mut keystrokes = String::with_capacity(self.keystrokes.len() * 8);
for keystroke in self.keystrokes {
+ // The reason use `keystroke.unparse()` instead of `keystroke.inner.unparse()`
+ // here is that, we want the user to use `ctrl-shift-4` instead of `ctrl-$`
+ // by default on Windows.
keystrokes.push_str(&keystroke.unparse());
keystrokes.push(' ');
}
@@ -959,7 +964,7 @@ impl<'a> KeybindUpdateTarget<'a> {
}
}
-#[derive(Clone, Copy, Default, PartialEq, Eq, PartialOrd, Ord)]
+#[derive(Clone, Copy, Default, PartialEq, Eq, PartialOrd, Ord, Debug)]
pub enum KeybindSource {
User,
Vim,
@@ -1020,7 +1025,7 @@ impl From<KeybindSource> for KeyBindingMetaIndex {
#[cfg(test)]
mod tests {
- use gpui::Keystroke;
+ use gpui::{DummyKeyboardMapper, KeybindingKeystroke, Keystroke};
use unindent::Unindent;
use crate::{
@@ -1049,16 +1054,27 @@ mod tests {
operation: KeybindUpdateOperation,
expected: impl ToString,
) {
- let result = KeymapFile::update_keybinding(operation, input.to_string(), 4)
- .expect("Update succeeded");
+ let result = KeymapFile::update_keybinding(
+ operation,
+ input.to_string(),
+ 4,
+ &gpui::DummyKeyboardMapper,
+ )
+ .expect("Update succeeded");
pretty_assertions::assert_eq!(expected.to_string(), result);
}
#[track_caller]
- fn parse_keystrokes(keystrokes: &str) -> Vec<Keystroke> {
+ fn parse_keystrokes(keystrokes: &str) -> Vec<KeybindingKeystroke> {
keystrokes
.split(' ')
- .map(|s| Keystroke::parse(s).expect("Keystrokes valid"))
+ .map(|s| {
+ KeybindingKeystroke::new_with_mapper(
+ Keystroke::parse(s).expect("Keystrokes valid"),
+ false,
+ &DummyKeyboardMapper,
+ )
+ })
.collect()
}
@@ -1084,6 +1100,24 @@ mod tests {
.unindent(),
);
+ check_keymap_update(
+ "[]",
+ KeybindUpdateOperation::add(KeybindUpdateTarget {
+ keystrokes: &parse_keystrokes("\\ a"),
+ action_name: "zed::SomeAction",
+ context: None,
+ action_arguments: None,
+ }),
+ r#"[
+ {
+ "bindings": {
+ "\\ a": "zed::SomeAction"
+ }
+ }
+ ]"#
+ .unindent(),
+ );
+
check_keymap_update(
"[]",
KeybindUpdateOperation::add(KeybindUpdateTarget {
@@ -1286,6 +1320,79 @@ mod tests {
.unindent(),
);
+ check_keymap_update(
+ r#"[
+ {
+ "bindings": {
+ "\\ a": "zed::SomeAction"
+ }
+ }
+ ]"#
+ .unindent(),
+ KeybindUpdateOperation::Replace {
+ target: KeybindUpdateTarget {
+ keystrokes: &parse_keystrokes("\\ a"),
+ action_name: "zed::SomeAction",
+ context: None,
+ action_arguments: None,
+ },
+ source: KeybindUpdateTarget {
+ keystrokes: &parse_keystrokes("\\ b"),
+ action_name: "zed::SomeOtherAction",
+ context: None,
+ action_arguments: Some(r#"{"foo": "bar"}"#),
+ },
+ target_keybind_source: KeybindSource::User,
+ },
+ r#"[
+ {
+ "bindings": {
+ "\\ b": [
+ "zed::SomeOtherAction",
+ {
+ "foo": "bar"
+ }
+ ]
+ }
+ }
+ ]"#
+ .unindent(),
+ );
+
+ check_keymap_update(
+ r#"[
+ {
+ "bindings": {
+ "\\ a": "zed::SomeAction"
+ }
+ }
+ ]"#
+ .unindent(),
+ KeybindUpdateOperation::Replace {
+ target: KeybindUpdateTarget {
+ keystrokes: &parse_keystrokes("\\ a"),
+ action_name: "zed::SomeAction",
+ context: None,
+ action_arguments: None,
+ },
+ source: KeybindUpdateTarget {
+ keystrokes: &parse_keystrokes("\\ a"),
+ action_name: "zed::SomeAction",
+ context: None,
+ action_arguments: None,
+ },
+ target_keybind_source: KeybindSource::User,
+ },
+ r#"[
+ {
+ "bindings": {
+ "\\ a": "zed::SomeAction"
+ }
+ }
+ ]"#
+ .unindent(),
+ );
+
check_keymap_update(
r#"[
{
@@ -1478,6 +1585,37 @@ mod tests {
.unindent(),
);
+ check_keymap_update(
+ r#"[
+ {
+ "context": "SomeContext",
+ "bindings": {
+ "\\ a": "foo::bar",
+ "c": "foo::baz",
+ }
+ },
+ ]"#
+ .unindent(),
+ KeybindUpdateOperation::Remove {
+ target: KeybindUpdateTarget {
+ context: Some("SomeContext"),
+ keystrokes: &parse_keystrokes("\\ a"),
+ action_name: "foo::bar",
+ action_arguments: None,
+ },
+ target_keybind_source: KeybindSource::User,
+ },
+ r#"[
+ {
+ "context": "SomeContext",
+ "bindings": {
+ "c": "foo::baz",
+ }
+ },
+ ]"#
+ .unindent(),
+ );
+
check_keymap_update(
r#"[
{
@@ -1,10 +1,10 @@
mod base_keymap_setting;
mod editable_setting_control;
-mod key_equivalents;
mod keymap_file;
mod settings_file;
mod settings_json;
mod settings_store;
+mod settings_ui_core;
mod vscode_import;
use gpui::{App, Global};
@@ -14,7 +14,6 @@ use util::asset_str;
pub use base_keymap_setting::*;
pub use editable_setting_control::*;
-pub use key_equivalents::*;
pub use keymap_file::{
KeyBindingValidator, KeyBindingValidatorRegistration, KeybindSource, KeybindUpdateOperation,
KeybindUpdateTarget, KeymapFile, KeymapFileLoadResult,
@@ -22,9 +21,12 @@ pub use keymap_file::{
pub use settings_file::*;
pub use settings_json::*;
pub use settings_store::{
- InvalidSettingsError, LocalSettingsKind, Settings, SettingsLocation, SettingsSources,
- SettingsStore,
+ InvalidSettingsError, LocalSettingsKind, Settings, SettingsKey, SettingsLocation,
+ SettingsSources, SettingsStore,
};
+pub use settings_ui_core::*;
+// Re-export the derive macro
+pub use settings_ui_macros::{SettingsKey, SettingsUi};
pub use vscode_import::{VsCodeSettings, VsCodeSettingsSource};
#[derive(Clone, Debug, PartialEq)]
@@ -32,7 +34,7 @@ pub struct ActiveSettingsProfileName(pub String);
impl Global for ActiveSettingsProfileName {}
-#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
+#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord, serde::Serialize)]
pub struct WorktreeId(usize);
impl From<WorktreeId> for usize {
@@ -50,11 +52,11 @@ impl WorktreeId {
Self(id as usize)
}
- pub fn to_proto(&self) -> u64 {
+ pub fn to_proto(self) -> u64 {
self.0 as u64
}
- pub fn to_usize(&self) -> usize {
+ pub fn to_usize(self) -> usize {
self.0
}
}
@@ -89,7 +91,10 @@ pub fn default_settings() -> Cow<'static, str> {
#[cfg(target_os = "macos")]
pub const DEFAULT_KEYMAP_PATH: &str = "keymaps/default-macos.json";
-#[cfg(not(target_os = "macos"))]
+#[cfg(target_os = "windows")]
+pub const DEFAULT_KEYMAP_PATH: &str = "keymaps/default-windows.json";
+
+#[cfg(not(any(target_os = "macos", target_os = "windows")))]
pub const DEFAULT_KEYMAP_PATH: &str = "keymaps/default-linux.json";
pub fn default_keymap() -> Cow<'static, str> {
@@ -87,9 +87,9 @@ pub fn update_value_in_json_text<'a>(
}
/// * `replace_key` - When an exact key match according to `key_path` is found, replace the key with `replace_key` if `Some`.
-fn replace_value_in_json_text(
+pub fn replace_value_in_json_text<T: AsRef<str>>(
text: &str,
- key_path: &[&str],
+ key_path: &[T],
tab_size: usize,
new_value: Option<&Value>,
replace_key: Option<&str>,
@@ -140,8 +140,10 @@ fn replace_value_in_json_text(
let found_key = text
.get(key_range.clone())
- .map(|key_text| {
- depth < key_path.len() && key_text == format!("\"{}\"", key_path[depth])
+ .and_then(|key_text| {
+ serde_json::to_string(key_path[depth].as_ref())
+ .ok()
+ .map(|key_path| depth < key_path.len() && key_text == key_path)
})
.unwrap_or(false);
@@ -163,8 +165,8 @@ fn replace_value_in_json_text(
if depth == key_path.len() {
if let Some(new_value) = new_value {
let new_val = to_pretty_json(new_value, tab_size, tab_size * depth);
- if let Some(replace_key) = replace_key {
- let new_key = format!("\"{}\": ", replace_key);
+ if let Some(replace_key) = replace_key.and_then(|str| serde_json::to_string(str).ok()) {
+ let new_key = format!("{}: ", replace_key);
if let Some(key_start) = text[..existing_value_range.start].rfind('"') {
if let Some(prev_key_start) = text[..key_start].rfind('"') {
existing_value_range.start = prev_key_start;
@@ -209,7 +211,7 @@ fn replace_value_in_json_text(
if ch == ',' {
removal_end = existing_value_range.end + offset + 1;
// Also consume whitespace after the comma
- while let Some((_, next_ch)) = chars.next() {
+ for (_, next_ch) in chars.by_ref() {
if next_ch.is_whitespace() {
removal_end += next_ch.len_utf8();
} else {
@@ -226,13 +228,13 @@ fn replace_value_in_json_text(
}
} else {
// We have key paths, construct the sub objects
- let new_key = key_path[depth];
+ let new_key = key_path[depth].as_ref();
// We don't have the key, construct the nested objects
let mut new_value =
serde_json::to_value(new_value.unwrap_or(&serde_json::Value::Null)).unwrap();
for key in key_path[(depth + 1)..].iter().rev() {
- new_value = serde_json::json!({ key.to_string(): new_value });
+ new_value = serde_json::json!({ key.as_ref().to_string(): new_value });
}
if let Some(first_key_start) = first_key_start {
@@ -295,9 +297,9 @@ fn replace_value_in_json_text(
}
}
-const TS_DOCUMENT_KIND: &'static str = "document";
-const TS_ARRAY_KIND: &'static str = "array";
-const TS_COMMENT_KIND: &'static str = "comment";
+const TS_DOCUMENT_KIND: &str = "document";
+const TS_ARRAY_KIND: &str = "array";
+const TS_COMMENT_KIND: &str = "comment";
pub fn replace_top_level_array_value_in_json_text(
text: &str,
@@ -361,7 +363,7 @@ pub fn replace_top_level_array_value_in_json_text(
let needs_indent = range.start_point.row > 0;
if new_value.is_none() && key_path.is_empty() {
- let mut remove_range = text_range.clone();
+ let mut remove_range = text_range;
if index == 0 {
while cursor.goto_next_sibling()
&& (cursor.node().is_extra() || cursor.node().is_missing())
@@ -465,7 +467,7 @@ pub fn append_top_level_array_value_in_json_text(
}
let (mut replace_range, mut replace_value) =
- replace_value_in_json_text("", &[], tab_size, Some(new_value), None);
+ replace_value_in_json_text::<&str>("", &[], tab_size, Some(new_value), None);
replace_range.start = close_bracket_start;
replace_range.end = close_bracket_start;
@@ -563,7 +565,8 @@ pub fn to_pretty_json(
}
pub fn parse_json_with_comments<T: DeserializeOwned>(content: &str) -> Result<T> {
- Ok(serde_json_lenient::from_str(content)?)
+ let mut deserializer = serde_json_lenient::Deserializer::from_str(content);
+ Ok(serde_path_to_error::deserialize(&mut deserializer)?)
}
#[cfg(test)]
@@ -582,7 +585,7 @@ mod tests {
expected: String,
) {
let result = replace_value_in_json_text(&input, key_path, 4, value.as_ref(), None);
- let mut result_str = input.to_string();
+ let mut result_str = input;
result_str.replace_range(result.0, &result.1);
pretty_assertions::assert_eq!(expected, result_str);
}
@@ -7,11 +7,11 @@ use futures::{
channel::{mpsc, oneshot},
future::LocalBoxFuture,
};
-use gpui::{App, AsyncApp, BorrowAppContext, Global, Task, UpdateGlobal};
+use gpui::{App, AsyncApp, BorrowAppContext, Global, SharedString, Task, UpdateGlobal};
use paths::{EDITORCONFIG_NAME, local_settings_file_relative_path, task_file_name};
use schemars::JsonSchema;
-use serde::{Deserialize, Serialize, de::DeserializeOwned};
+use serde::{Serialize, de::DeserializeOwned};
use serde_json::{Value, json};
use smallvec::SmallVec;
use std::{
@@ -31,21 +31,24 @@ use util::{
pub type EditorconfigProperties = ec4rs::Properties;
use crate::{
- ActiveSettingsProfileName, ParameterizedJsonSchema, SettingsJsonSchemaParams, VsCodeSettings,
- WorktreeId, parse_json_with_comments, update_value_in_json_text,
+ ActiveSettingsProfileName, ParameterizedJsonSchema, SettingsJsonSchemaParams, SettingsUiEntry,
+ VsCodeSettings, WorktreeId, parse_json_with_comments, replace_value_in_json_text,
+ settings_ui_core::SettingsUi, update_value_in_json_text,
};
-/// A value that can be defined as a user setting.
-///
-/// Settings can be loaded from a combination of multiple JSON files.
-pub trait Settings: 'static + Send + Sync {
+pub trait SettingsKey: 'static + Send + Sync {
/// The name of a key within the JSON file from which this setting should
/// be deserialized. If this is `None`, then the setting will be deserialized
/// from the root object.
const KEY: Option<&'static str>;
const FALLBACK_KEY: Option<&'static str> = None;
+}
+/// A value that can be defined as a user setting.
+///
+/// Settings can be loaded from a combination of multiple JSON files.
+pub trait Settings: 'static + Send + Sync {
/// The name of the keys in the [`FileContent`](Self::FileContent) that should
/// always be written to a settings file, even if their value matches the default
/// value.
@@ -56,16 +59,32 @@ pub trait Settings: 'static + Send + Sync {
const PRESERVED_KEYS: Option<&'static [&'static str]> = None;
/// The type that is stored in an individual JSON file.
- type FileContent: Clone + Default + Serialize + DeserializeOwned + JsonSchema;
-
+ type FileContent: Clone
+ + Default
+ + Serialize
+ + DeserializeOwned
+ + JsonSchema
+ + SettingsUi
+ + SettingsKey;
+
+ /*
+ * let path = Settings
+ *
+ *
+ */
/// The logic for combining together values from one or more JSON files into the
/// final value for this setting.
+ ///
+ /// # Warning
+ /// `Self::FileContent` deserialized field names should match with `Self` deserialized field names
+ /// otherwise the field won't be deserialized properly and you will get the error:
+ /// "A default setting must be added to the `default.json` file"
fn load(sources: SettingsSources<Self::FileContent>, cx: &mut App) -> Result<Self>
where
Self: Sized;
fn missing_default() -> anyhow::Error {
- anyhow::anyhow!("missing default")
+ anyhow::anyhow!("missing default for: {}", std::any::type_name::<Self>())
}
/// Use [the helpers in the vscode_import module](crate::vscode_import) to apply known
@@ -98,6 +117,18 @@ pub trait Settings: 'static + Send + Sync {
cx.global::<SettingsStore>().get(None)
}
+ #[track_caller]
+ fn try_get(cx: &App) -> Option<&Self>
+ where
+ Self: Sized,
+ {
+ if cx.has_global::<SettingsStore>() {
+ cx.global::<SettingsStore>().try_get(None)
+ } else {
+ None
+ }
+ }
+
#[track_caller]
fn try_read_global<R>(cx: &AsyncApp, f: impl FnOnce(&Self) -> R) -> Option<R>
where
@@ -267,6 +298,7 @@ trait AnySettingValue: 'static + Send + Sync {
text: &mut String,
edits: &mut Vec<(Range<usize>, String)>,
);
+ fn settings_ui_item(&self) -> SettingsUiEntry;
}
struct DeserializedSetting(Box<dyn Any>);
@@ -402,6 +434,16 @@ impl SettingsStore {
.expect("no default value for setting type")
}
+ /// Get the value of a setting.
+ ///
+ /// Does not panic
+ pub fn try_get<T: Settings>(&self, path: Option<SettingsLocation>) -> Option<&T> {
+ self.setting_values
+ .get(&TypeId::of::<T>())
+ .map(|value| value.value_for_path(path))
+ .and_then(|value| value.downcast_ref::<T>())
+ }
+
/// Get all values from project specific settings
pub fn get_all_locals<T: Settings>(&self) -> Vec<(WorktreeId, Arc<Path>, &T)> {
self.setting_values
@@ -438,6 +480,13 @@ impl SettingsStore {
&self.raw_user_settings
}
+ /// Replaces current settings with the values from the given JSON.
+ pub fn set_raw_user_settings(&mut self, new_settings: Value, cx: &mut App) -> Result<()> {
+ self.raw_user_settings = new_settings;
+ self.recompute_values(None, cx)?;
+ Ok(())
+ }
+
/// Get the configured settings profile names.
pub fn configured_settings_profiles(&self) -> impl Iterator<Item = &str> {
self.raw_user_settings
@@ -453,6 +502,11 @@ impl SettingsStore {
self.raw_global_settings.as_ref()
}
+ /// Access the raw JSON value of the default settings.
+ pub fn raw_default_settings(&self) -> &Value {
+ &self.raw_default_settings
+ }
+
#[cfg(any(test, feature = "test-support"))]
pub fn test(cx: &mut App) -> Self {
let mut this = Self::new(cx);
@@ -491,63 +545,10 @@ impl SettingsStore {
}
}
- pub async fn load_global_settings(fs: &Arc<dyn Fs>) -> Result<String> {
- match fs.load(paths::global_settings_file()).await {
- result @ Ok(_) => result,
- Err(err) => {
- if let Some(e) = err.downcast_ref::<std::io::Error>()
- && e.kind() == std::io::ErrorKind::NotFound
- {
- return Ok("{}".to_string());
- }
- Err(err)
- }
- }
- }
-
- pub fn update_settings_file<T: Settings>(
+ fn update_settings_file_inner(
&self,
fs: Arc<dyn Fs>,
- update: impl 'static + Send + FnOnce(&mut T::FileContent, &App),
- ) {
- self.setting_file_updates_tx
- .unbounded_send(Box::new(move |cx: AsyncApp| {
- async move {
- let old_text = Self::load_settings(&fs).await?;
- let new_text = cx.read_global(|store: &SettingsStore, cx| {
- store.new_text_for_update::<T>(old_text, |content| update(content, cx))
- })?;
- let settings_path = paths::settings_file().as_path();
- if fs.is_file(settings_path).await {
- let resolved_path =
- fs.canonicalize(settings_path).await.with_context(|| {
- format!("Failed to canonicalize settings path {:?}", settings_path)
- })?;
-
- fs.atomic_write(resolved_path.clone(), new_text)
- .await
- .with_context(|| {
- format!("Failed to write settings to file {:?}", resolved_path)
- })?;
- } else {
- fs.atomic_write(settings_path.to_path_buf(), new_text)
- .await
- .with_context(|| {
- format!("Failed to write settings to file {:?}", settings_path)
- })?;
- }
-
- anyhow::Ok(())
- }
- .boxed_local()
- }))
- .ok();
- }
-
- pub fn import_vscode_settings(
- &self,
- fs: Arc<dyn Fs>,
- vscode_settings: VsCodeSettings,
+ update: impl 'static + Send + FnOnce(String, AsyncApp) -> Result<String>,
) -> oneshot::Receiver<Result<()>> {
let (tx, rx) = oneshot::channel::<Result<()>>();
self.setting_file_updates_tx
@@ -555,9 +556,7 @@ impl SettingsStore {
async move {
let res = async move {
let old_text = Self::load_settings(&fs).await?;
- let new_text = cx.read_global(|store: &SettingsStore, _cx| {
- store.get_vscode_edits(old_text, &vscode_settings)
- })?;
+ let new_text = update(old_text, cx)?;
let settings_path = paths::settings_file().as_path();
if fs.is_file(settings_path).await {
let resolved_path =
@@ -580,7 +579,6 @@ impl SettingsStore {
format!("Failed to write settings to file {:?}", settings_path)
})?;
}
-
anyhow::Ok(())
}
.await;
@@ -595,9 +593,67 @@ impl SettingsStore {
}
.boxed_local()
}))
- .ok();
+ .map_err(|err| anyhow::format_err!("Failed to update settings file: {}", err))
+ .log_with_level(log::Level::Warn);
+ return rx;
+ }
+
+ pub fn update_settings_file_at_path(
+ &self,
+ fs: Arc<dyn Fs>,
+ path: &[&str],
+ new_value: serde_json::Value,
+ ) -> oneshot::Receiver<Result<()>> {
+ let key_path = path
+ .into_iter()
+ .cloned()
+ .map(SharedString::new)
+ .collect::<Vec<_>>();
+ let update = move |mut old_text: String, cx: AsyncApp| {
+ cx.read_global(|store: &SettingsStore, _cx| {
+ // todo(settings_ui) use `update_value_in_json_text` for merging new and old objects with comment preservation, needs old value though...
+ let (range, replacement) = replace_value_in_json_text(
+ &old_text,
+ key_path.as_slice(),
+ store.json_tab_size(),
+ Some(&new_value),
+ None,
+ );
+ old_text.replace_range(range, &replacement);
+ old_text
+ })
+ };
+ self.update_settings_file_inner(fs, update)
+ }
+
+ pub fn update_settings_file<T: Settings>(
+ &self,
+ fs: Arc<dyn Fs>,
+ update: impl 'static + Send + FnOnce(&mut T::FileContent, &App),
+ ) {
+ _ = self.update_settings_file_inner(fs, move |old_text: String, cx: AsyncApp| {
+ cx.read_global(|store: &SettingsStore, cx| {
+ store.new_text_for_update::<T>(old_text, |content| update(content, cx))
+ })
+ });
+ }
- rx
+ pub fn import_vscode_settings(
+ &self,
+ fs: Arc<dyn Fs>,
+ vscode_settings: VsCodeSettings,
+ ) -> oneshot::Receiver<Result<()>> {
+ self.update_settings_file_inner(fs, move |old_text: String, cx: AsyncApp| {
+ cx.read_global(|store: &SettingsStore, _cx| {
+ store.get_vscode_edits(old_text, &vscode_settings)
+ })
+ })
+ }
+
+ pub fn settings_ui_items(&self) -> impl IntoIterator<Item = SettingsUiEntry> {
+ self.setting_values
+ .values()
+ .map(|item| item.settings_ui_item())
}
}
@@ -1350,7 +1406,7 @@ impl Debug for SettingsStore {
impl<T: Settings> AnySettingValue for SettingValue<T> {
fn key(&self) -> Option<&'static str> {
- T::KEY
+ T::FileContent::KEY
}
fn setting_type_name(&self) -> &'static str {
@@ -1402,21 +1458,46 @@ impl<T: Settings> AnySettingValue for SettingValue<T> {
mut json: &Value,
) -> (Option<&'static str>, Result<DeserializedSetting>) {
let mut key = None;
- if let Some(k) = T::KEY {
+ if let Some(k) = T::FileContent::KEY {
if let Some(value) = json.get(k) {
json = value;
key = Some(k);
- } else if let Some((k, value)) = T::FALLBACK_KEY.and_then(|k| Some((k, json.get(k)?))) {
+ } else if let Some((k, value)) =
+ T::FileContent::FALLBACK_KEY.and_then(|k| Some((k, json.get(k)?)))
+ {
json = value;
key = Some(k);
} else {
let value = T::FileContent::default();
- return (T::KEY, Ok(DeserializedSetting(Box::new(value))));
+ return (
+ T::FileContent::KEY,
+ Ok(DeserializedSetting(Box::new(value))),
+ );
}
}
- let value = T::FileContent::deserialize(json)
+ let value = serde_path_to_error::deserialize::<_, T::FileContent>(json)
.map(|value| DeserializedSetting(Box::new(value)))
- .map_err(anyhow::Error::from);
+ .map_err(|err| {
+ // construct a path using the key and reported error path if possible.
+ // Unfortunately, serde_path_to_error does not expose the necessary
+ // methods and data to simply add the key to the path
+ let mut path = String::new();
+ if let Some(key) = key {
+ path.push_str(key);
+ }
+ let err_path = err.path().to_string();
+ // when the path is empty, serde_path_to_error stringifies the path as ".",
+ // when the path is unknown, serde_path_to_error stringifies the path as an empty string
+ if !err_path.is_empty() && !err_path.starts_with(".") {
+ path.push('.');
+ path.push_str(&err_path);
+ }
+ if path.is_empty() {
+ anyhow::Error::from(err.into_inner())
+ } else {
+ anyhow::anyhow!("'{}': {}", err.into_inner(), path)
+ }
+ });
(key, value)
}
@@ -1435,6 +1516,7 @@ impl<T: Settings> AnySettingValue for SettingValue<T> {
}
}
}
+
self.global_value
.as_ref()
.unwrap_or_else(|| panic!("no default value for setting {}", self.setting_type_name()))
@@ -1493,6 +1575,10 @@ impl<T: Settings> AnySettingValue for SettingValue<T> {
edits,
);
}
+
+ fn settings_ui_item(&self) -> SettingsUiEntry {
+ <<T as Settings>::FileContent as SettingsUi>::settings_ui_entry()
+ }
}
#[cfg(test)]
@@ -1500,7 +1586,10 @@ mod tests {
use crate::VsCodeSettingsSource;
use super::*;
+ // This is so the SettingsUi macro can still work properly
+ use crate as settings;
use serde_derive::Deserialize;
+ use settings_ui_macros::{SettingsKey, SettingsUi};
use unindent::Unindent;
#[gpui::test]
@@ -2043,14 +2132,15 @@ mod tests {
pretty_assertions::assert_eq!(new, expected);
}
- #[derive(Debug, PartialEq, Deserialize)]
+ #[derive(Debug, PartialEq, Deserialize, SettingsUi)]
struct UserSettings {
name: String,
age: u32,
staff: bool,
}
- #[derive(Default, Clone, Serialize, Deserialize, JsonSchema)]
+ #[derive(Default, Clone, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)]
+ #[settings_key(key = "user")]
struct UserSettingsContent {
name: Option<String>,
age: Option<u32>,
@@ -2058,7 +2148,6 @@ mod tests {
}
impl Settings for UserSettings {
- const KEY: Option<&'static str> = Some("user");
type FileContent = UserSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
@@ -2073,12 +2162,37 @@ mod tests {
#[derive(Debug, Deserialize, PartialEq)]
struct TurboSetting(bool);
+ #[derive(
+ Copy,
+ Clone,
+ PartialEq,
+ Eq,
+ Debug,
+ Default,
+ serde::Serialize,
+ serde::Deserialize,
+ SettingsUi,
+ SettingsKey,
+ JsonSchema,
+ )]
+ #[serde(default)]
+ #[settings_key(None)]
+ pub struct TurboSettingContent {
+ turbo: Option<bool>,
+ }
+
impl Settings for TurboSetting {
- const KEY: Option<&'static str> = Some("turbo");
- type FileContent = Option<bool>;
+ type FileContent = TurboSettingContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
- sources.json_merge()
+ Ok(Self(
+ sources
+ .user
+ .or(sources.server)
+ .unwrap_or(sources.default)
+ .turbo
+ .unwrap_or_default(),
+ ))
}
fn import_from_vscode(_vscode: &VsCodeSettings, _current: &mut Self::FileContent) {}
@@ -2092,15 +2206,14 @@ mod tests {
key2: String,
}
- #[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
+ #[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)]
+ #[settings_key(None)]
struct MultiKeySettingsJson {
key1: Option<String>,
key2: Option<String>,
}
impl Settings for MultiKeySettings {
- const KEY: Option<&'static str> = None;
-
type FileContent = MultiKeySettingsJson;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
@@ -2130,15 +2243,16 @@ mod tests {
Hour24,
}
- #[derive(Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
+ #[derive(
+ Clone, Default, Debug, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey,
+ )]
+ #[settings_key(key = "journal")]
struct JournalSettingsJson {
pub path: Option<String>,
pub hour_format: Option<HourFormat>,
}
impl Settings for JournalSettings {
- const KEY: Option<&'static str> = Some("journal");
-
type FileContent = JournalSettingsJson;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
@@ -2218,7 +2332,10 @@ mod tests {
);
}
- #[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
+ #[derive(
+ Clone, Debug, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey,
+ )]
+ #[settings_key(None)]
struct LanguageSettings {
#[serde(default)]
languages: HashMap<String, LanguageSettingEntry>,
@@ -2231,8 +2348,6 @@ mod tests {
}
impl Settings for LanguageSettings {
- const KEY: Option<&'static str> = None;
-
type FileContent = Self;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
@@ -0,0 +1,187 @@
+use std::any::TypeId;
+
+use anyhow::Context as _;
+use fs::Fs;
+use gpui::{AnyElement, App, AppContext as _, ReadGlobal as _, Window};
+use smallvec::SmallVec;
+
+use crate::SettingsStore;
+
+pub trait SettingsUi {
+ fn settings_ui_item() -> SettingsUiItem {
+ // todo(settings_ui): remove this default impl, only entry should have a default impl
+ // because it's expected that the macro or custom impl use the item and the known paths to create the entry
+ SettingsUiItem::None
+ }
+
+ fn settings_ui_entry() -> SettingsUiEntry {
+ SettingsUiEntry {
+ path: None,
+ title: "None entry",
+ item: SettingsUiItem::None,
+ documentation: None,
+ }
+ }
+}
+
+pub struct SettingsUiEntry {
+ /// The path in the settings JSON file for this setting. Relative to parent
+ /// None implies `#[serde(flatten)]` or `Settings::KEY.is_none()` for top level settings
+ pub path: Option<&'static str>,
+ /// What is displayed for the text for this entry
+ pub title: &'static str,
+ /// documentation for this entry. Constructed from the documentation comment above the struct or field
+ pub documentation: Option<&'static str>,
+ pub item: SettingsUiItem,
+}
+
+pub enum SettingsUiItemSingle {
+ SwitchField,
+ /// A numeric stepper for a specific type of number
+ NumericStepper(NumType),
+ ToggleGroup {
+ /// Must be the same length as `labels`
+ variants: &'static [&'static str],
+ /// Must be the same length as `variants`
+ labels: &'static [&'static str],
+ },
+ /// This should be used when toggle group size > 6
+ DropDown {
+ /// Must be the same length as `labels`
+ variants: &'static [&'static str],
+ /// Must be the same length as `variants`
+ labels: &'static [&'static str],
+ },
+ Custom(Box<dyn Fn(SettingsValue<serde_json::Value>, &mut Window, &mut App) -> AnyElement>),
+}
+
+pub struct SettingsValue<T> {
+ pub title: &'static str,
+ pub documentation: Option<&'static str>,
+ pub path: SmallVec<[&'static str; 1]>,
+ pub value: Option<T>,
+ pub default_value: T,
+}
+
+impl<T> SettingsValue<T> {
+ pub fn read(&self) -> &T {
+ match &self.value {
+ Some(value) => value,
+ None => &self.default_value,
+ }
+ }
+}
+
+impl SettingsValue<serde_json::Value> {
+ pub fn write_value(path: &SmallVec<[&'static str; 1]>, value: serde_json::Value, cx: &mut App) {
+ let settings_store = SettingsStore::global(cx);
+ let fs = <dyn Fs>::global(cx);
+
+ let rx = settings_store.update_settings_file_at_path(fs.clone(), path.as_slice(), value);
+
+ let path = path.clone();
+ cx.background_spawn(async move {
+ rx.await?
+ .with_context(|| format!("Failed to update setting at path `{:?}`", path.join(".")))
+ })
+ .detach_and_log_err(cx);
+ }
+}
+
+impl<T: serde::Serialize> SettingsValue<T> {
+ pub fn write(
+ path: &SmallVec<[&'static str; 1]>,
+ value: T,
+ cx: &mut App,
+ ) -> Result<(), serde_json::Error> {
+ SettingsValue::write_value(path, serde_json::to_value(value)?, cx);
+ Ok(())
+ }
+}
+
+pub struct SettingsUiItemDynamic {
+ pub options: Vec<SettingsUiEntry>,
+ pub determine_option: fn(&serde_json::Value, &App) -> usize,
+}
+
+pub struct SettingsUiItemGroup {
+ pub items: Vec<SettingsUiEntry>,
+}
+
+pub enum SettingsUiItem {
+ Group(SettingsUiItemGroup),
+ Single(SettingsUiItemSingle),
+ Dynamic(SettingsUiItemDynamic),
+ None,
+}
+
+impl SettingsUi for bool {
+ fn settings_ui_item() -> SettingsUiItem {
+ SettingsUiItem::Single(SettingsUiItemSingle::SwitchField)
+ }
+}
+
+impl SettingsUi for Option<bool> {
+ fn settings_ui_item() -> SettingsUiItem {
+ SettingsUiItem::Single(SettingsUiItemSingle::SwitchField)
+ }
+}
+
+#[repr(u8)]
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum NumType {
+ U64 = 0,
+ U32 = 1,
+ F32 = 2,
+ USIZE = 3,
+}
+
+pub static NUM_TYPE_NAMES: std::sync::LazyLock<[&'static str; NumType::COUNT]> =
+ std::sync::LazyLock::new(|| NumType::ALL.map(NumType::type_name));
+pub static NUM_TYPE_IDS: std::sync::LazyLock<[TypeId; NumType::COUNT]> =
+ std::sync::LazyLock::new(|| NumType::ALL.map(NumType::type_id));
+
+impl NumType {
+ const COUNT: usize = 3;
+ const ALL: [NumType; Self::COUNT] = [NumType::U64, NumType::U32, NumType::F32];
+
+ pub fn type_id(self) -> TypeId {
+ match self {
+ NumType::U64 => TypeId::of::<u64>(),
+ NumType::U32 => TypeId::of::<u32>(),
+ NumType::F32 => TypeId::of::<f32>(),
+ NumType::USIZE => TypeId::of::<usize>(),
+ }
+ }
+
+ pub fn type_name(self) -> &'static str {
+ match self {
+ NumType::U64 => std::any::type_name::<u64>(),
+ NumType::U32 => std::any::type_name::<u32>(),
+ NumType::F32 => std::any::type_name::<f32>(),
+ NumType::USIZE => std::any::type_name::<usize>(),
+ }
+ }
+}
+
+macro_rules! numeric_stepper_for_num_type {
+ ($type:ty, $num_type:ident) => {
+ impl SettingsUi for $type {
+ fn settings_ui_item() -> SettingsUiItem {
+ SettingsUiItem::Single(SettingsUiItemSingle::NumericStepper(NumType::$num_type))
+ }
+ }
+
+ impl SettingsUi for Option<$type> {
+ fn settings_ui_item() -> SettingsUiItem {
+ SettingsUiItem::Single(SettingsUiItemSingle::NumericStepper(NumType::$num_type))
+ }
+ }
+ };
+}
+
+numeric_stepper_for_num_type!(u64, U64);
+numeric_stepper_for_num_type!(u32, U32);
+// todo(settings_ui) is there a better ui for f32?
+numeric_stepper_for_num_type!(f32, F32);
+numeric_stepper_for_num_type!(usize, USIZE);
@@ -2,7 +2,7 @@ use anyhow::{Context as _, Result, anyhow};
use fs::Fs;
use paths::{cursor_settings_file_paths, vscode_settings_file_paths};
use serde_json::{Map, Value};
-use std::{path::Path, rc::Rc, sync::Arc};
+use std::{path::Path, sync::Arc};
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum VsCodeSettingsSource {
@@ -21,7 +21,7 @@ impl std::fmt::Display for VsCodeSettingsSource {
pub struct VsCodeSettings {
pub source: VsCodeSettingsSource,
- pub path: Rc<Path>,
+ pub path: Arc<Path>,
content: Map<String, Value>,
}
@@ -135,7 +135,7 @@ impl SettingsProfileSelectorDelegate {
) -> Option<String> {
if let Some(profile_name) = profile_name {
cx.set_global(ActiveSettingsProfileName(profile_name.clone()));
- return Some(profile_name.clone());
+ return Some(profile_name);
}
if cx.has_global::<ActiveSettingsProfileName>() {
@@ -11,45 +11,31 @@ workspace = true
[lib]
path = "src/settings_ui.rs"
+[features]
+default = []
+test-support = []
+
[dependencies]
anyhow.workspace = true
-collections.workspace = true
-command_palette.workspace = true
command_palette_hooks.workspace = true
-component.workspace = true
-db.workspace = true
editor.workspace = true
feature_flags.workspace = true
-fs.workspace = true
-fuzzy.workspace = true
gpui.workspace = true
-itertools.workspace = true
-language.workspace = true
-log.workspace = true
-menu.workspace = true
-notifications.workspace = true
-paths.workspace = true
-project.workspace = true
-search.workspace = true
-serde.workspace = true
serde_json.workspace = true
+serde.workspace = true
settings.workspace = true
-telemetry.workspace = true
-tempfile.workspace = true
+smallvec.workspace = true
theme.workspace = true
-tree-sitter-json.workspace = true
-tree-sitter-rust.workspace = true
ui.workspace = true
-ui_input.workspace = true
-util.workspace = true
-vim.workspace = true
-workspace-hack.workspace = true
workspace.workspace = true
-zed_actions.workspace = true
+workspace-hack.workspace = true
+
[dev-dependencies]
-db = {"workspace"= true, "features" = ["test-support"]}
-fs = { workspace = true, features = ["test-support"] }
-gpui = { workspace = true, features = ["test-support"] }
-project = { workspace = true, features = ["test-support"] }
-workspace = { workspace = true, features = ["test-support"] }
+debugger_ui.workspace = true
+
+# Uncomment other workspace dependencies as needed
+# assistant.workspace = true
+# client.workspace = true
+# project.workspace = true
+# settings.workspace = true
@@ -83,7 +83,7 @@ impl RenderOnce for ThemeControl {
DropdownMenu::new(
"theme",
- value.clone(),
+ value,
ContextMenu::build(window, cx, |mut menu, _, cx| {
let theme_registry = ThemeRegistry::global(cx);
@@ -204,7 +204,7 @@ impl RenderOnce for UiFontFamilyControl {
.child(Icon::new(IconName::Font))
.child(DropdownMenu::new(
"ui-font-family",
- value.clone(),
+ value,
ContextMenu::build(window, cx, |mut menu, _, cx| {
let font_family_cache = FontFamilyCache::global(cx);
@@ -1,20 +1,27 @@
mod appearance_settings_controls;
use std::any::TypeId;
+use std::ops::{Not, Range};
+use anyhow::Context as _;
use command_palette_hooks::CommandPaletteFilter;
use editor::EditorSettingsControls;
use feature_flags::{FeatureFlag, FeatureFlagViewExt};
-use gpui::{App, Entity, EventEmitter, FocusHandle, Focusable, actions};
-use ui::prelude::*;
-use workspace::item::{Item, ItemEvent};
-use workspace::{Workspace, with_active_or_new_workspace};
+use gpui::{App, Entity, EventEmitter, FocusHandle, Focusable, ReadGlobal, ScrollHandle, actions};
+use settings::{
+ NumType, SettingsStore, SettingsUiEntry, SettingsUiItem, SettingsUiItemDynamic,
+ SettingsUiItemGroup, SettingsUiItemSingle, SettingsValue,
+};
+use smallvec::SmallVec;
+use ui::{NumericStepper, SwitchField, ToggleButtonGroup, ToggleButtonSimple, prelude::*};
+use workspace::{
+ Workspace,
+ item::{Item, ItemEvent},
+ with_active_or_new_workspace,
+};
use crate::appearance_settings_controls::AppearanceSettingsControls;
-pub mod keybindings;
-pub mod ui_components;
-
pub struct SettingsUiFeatureFlag;
impl FeatureFlag for SettingsUiFeatureFlag {
@@ -63,7 +70,7 @@ pub fn init(cx: &mut App) {
move |is_enabled, _workspace, _, cx| {
if is_enabled {
CommandPaletteFilter::update_global(cx, |filter, _cx| {
- filter.show_action_types(settings_ui_actions.iter());
+ filter.show_action_types(&settings_ui_actions);
});
} else {
CommandPaletteFilter::update_global(cx, |filter, _cx| {
@@ -75,18 +82,18 @@ pub fn init(cx: &mut App) {
.detach();
})
.detach();
-
- keybindings::init(cx);
}
pub struct SettingsPage {
focus_handle: FocusHandle,
+ settings_tree: SettingsUiTree,
}
impl SettingsPage {
pub fn new(_workspace: &Workspace, cx: &mut Context<Workspace>) -> Entity<Self> {
cx.new(|cx| Self {
focus_handle: cx.focus_handle(),
+ settings_tree: SettingsUiTree::new(cx),
})
}
}
@@ -119,26 +126,661 @@ impl Item for SettingsPage {
}
}
+// We want to iterate over the side bar with root groups
+// - this is a loop over top level groups, and if any are expanded, recursively displaying their items
+// - Should be able to get all items from a group (flatten a group)
+// - Should be able to toggle/untoggle groups in UI (at least in sidebar)
+// - Search should be available
+// - there should be an index of text -> item mappings, for using fuzzy::match
+// - Do we want to show the parent groups when a item is matched?
+
+struct UiEntry {
+ title: &'static str,
+ path: Option<&'static str>,
+ documentation: Option<&'static str>,
+ _depth: usize,
+ // a
+ // b < a descendant range < a total descendant range
+ // f | |
+ // g | |
+ // c < |
+ // d |
+ // e <
+ descendant_range: Range<usize>,
+ total_descendant_range: Range<usize>,
+ next_sibling: Option<usize>,
+ // expanded: bool,
+ render: Option<SettingsUiItemSingle>,
+ /// For dynamic items this is a way to select a value from a list of values
+ /// this is always none for non-dynamic items
+ select_descendant: Option<fn(&serde_json::Value, &App) -> usize>,
+}
+
+impl UiEntry {
+ fn first_descendant_index(&self) -> Option<usize> {
+ return self
+ .descendant_range
+ .is_empty()
+ .not()
+ .then_some(self.descendant_range.start);
+ }
+
+ fn nth_descendant_index(&self, tree: &[UiEntry], n: usize) -> Option<usize> {
+ let first_descendant_index = self.first_descendant_index()?;
+ let mut current_index = 0;
+ let mut current_descendant_index = Some(first_descendant_index);
+ while let Some(descendant_index) = current_descendant_index
+ && current_index < n
+ {
+ current_index += 1;
+ current_descendant_index = tree[descendant_index].next_sibling;
+ }
+ current_descendant_index
+ }
+}
+
+pub struct SettingsUiTree {
+ root_entry_indices: Vec<usize>,
+ entries: Vec<UiEntry>,
+ active_entry_index: usize,
+}
+
+fn build_tree_item(
+ tree: &mut Vec<UiEntry>,
+ entry: SettingsUiEntry,
+ depth: usize,
+ prev_index: Option<usize>,
+) {
+ let index = tree.len();
+ tree.push(UiEntry {
+ title: entry.title,
+ path: entry.path,
+ documentation: entry.documentation,
+ _depth: depth,
+ descendant_range: index + 1..index + 1,
+ total_descendant_range: index + 1..index + 1,
+ render: None,
+ next_sibling: None,
+ select_descendant: None,
+ });
+ if let Some(prev_index) = prev_index {
+ tree[prev_index].next_sibling = Some(index);
+ }
+ match entry.item {
+ SettingsUiItem::Group(SettingsUiItemGroup { items: group_items }) => {
+ for group_item in group_items {
+ let prev_index = tree[index]
+ .descendant_range
+ .is_empty()
+ .not()
+ .then_some(tree[index].descendant_range.end - 1);
+ tree[index].descendant_range.end = tree.len() + 1;
+ build_tree_item(tree, group_item, depth + 1, prev_index);
+ tree[index].total_descendant_range.end = tree.len();
+ }
+ }
+ SettingsUiItem::Single(item) => {
+ tree[index].render = Some(item);
+ }
+ SettingsUiItem::Dynamic(SettingsUiItemDynamic {
+ options,
+ determine_option,
+ }) => {
+ tree[index].select_descendant = Some(determine_option);
+ for option in options {
+ let prev_index = tree[index]
+ .descendant_range
+ .is_empty()
+ .not()
+ .then_some(tree[index].descendant_range.end - 1);
+ tree[index].descendant_range.end = tree.len() + 1;
+ build_tree_item(tree, option, depth + 1, prev_index);
+ tree[index].total_descendant_range.end = tree.len();
+ }
+ }
+ SettingsUiItem::None => {
+ return;
+ }
+ }
+}
+
+impl SettingsUiTree {
+ pub fn new(cx: &App) -> Self {
+ let settings_store = SettingsStore::global(cx);
+ let mut tree = vec![];
+ let mut root_entry_indices = vec![];
+ for item in settings_store.settings_ui_items() {
+ if matches!(item.item, SettingsUiItem::None)
+ // todo(settings_ui): How to handle top level single items? BaseKeymap is in this category. Probably need a way to
+ // link them to other groups
+ || matches!(item.item, SettingsUiItem::Single(_))
+ {
+ continue;
+ }
+
+ let prev_root_entry_index = root_entry_indices.last().copied();
+ root_entry_indices.push(tree.len());
+ build_tree_item(&mut tree, item, 0, prev_root_entry_index);
+ }
+
+ root_entry_indices.sort_by_key(|i| tree[*i].title);
+
+ let active_entry_index = root_entry_indices[0];
+ Self {
+ entries: tree,
+ root_entry_indices,
+ active_entry_index,
+ }
+ }
+
+ // todo(settings_ui): Make sure `Item::None` paths are added to the paths tree,
+ // so that we can keep none/skip and still test in CI that all settings have
+ #[cfg(feature = "test-support")]
+ pub fn all_paths(&self, cx: &App) -> Vec<Vec<&'static str>> {
+ fn all_paths_rec(
+ tree: &[UiEntry],
+ paths: &mut Vec<Vec<&'static str>>,
+ current_path: &mut Vec<&'static str>,
+ idx: usize,
+ cx: &App,
+ ) {
+ let child = &tree[idx];
+ let mut pushed_path = false;
+ if let Some(path) = child.path.as_ref() {
+ current_path.push(path);
+ paths.push(current_path.clone());
+ pushed_path = true;
+ }
+ // todo(settings_ui): handle dynamic nodes here
+ let selected_descendant_index = child
+ .select_descendant
+ .map(|select_descendant| {
+ read_settings_value_from_path(
+ SettingsStore::global(cx).raw_default_settings(),
+ ¤t_path,
+ )
+ .map(|value| select_descendant(value, cx))
+ })
+ .and_then(|selected_descendant_index| {
+ selected_descendant_index.map(|index| child.nth_descendant_index(tree, index))
+ });
+
+ if let Some(selected_descendant_index) = selected_descendant_index {
+ // just silently fail if we didn't find a setting value for the path
+ if let Some(descendant_index) = selected_descendant_index {
+ all_paths_rec(tree, paths, current_path, descendant_index, cx);
+ }
+ } else if let Some(desc_idx) = child.first_descendant_index() {
+ let mut desc_idx = Some(desc_idx);
+ while let Some(descendant_index) = desc_idx {
+ all_paths_rec(&tree, paths, current_path, descendant_index, cx);
+ desc_idx = tree[descendant_index].next_sibling;
+ }
+ }
+ if pushed_path {
+ current_path.pop();
+ }
+ }
+
+ let mut paths = Vec::new();
+ for &index in &self.root_entry_indices {
+ all_paths_rec(&self.entries, &mut paths, &mut Vec::new(), index, cx);
+ }
+ paths
+ }
+}
+
+fn render_nav(tree: &SettingsUiTree, _window: &mut Window, cx: &mut Context<SettingsPage>) -> Div {
+ let mut nav = v_flex().p_4().gap_2();
+ for &index in &tree.root_entry_indices {
+ nav = nav.child(
+ div()
+ .id(index)
+ .on_click(cx.listener(move |settings, _, _, _| {
+ settings.settings_tree.active_entry_index = index;
+ }))
+ .child(
+ Label::new(SharedString::new_static(tree.entries[index].title))
+ .size(LabelSize::Large)
+ .when(tree.active_entry_index == index, |this| {
+ this.color(Color::Selected)
+ }),
+ ),
+ );
+ }
+ nav
+}
+
+fn render_content(
+ tree: &SettingsUiTree,
+ window: &mut Window,
+ cx: &mut Context<SettingsPage>,
+) -> Div {
+ let content = v_flex().size_full().gap_4();
+
+ let mut path = smallvec::smallvec![];
+
+ fn render_recursive(
+ tree: &SettingsUiTree,
+ index: usize,
+ path: &mut SmallVec<[&'static str; 1]>,
+ mut element: Div,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Div {
+ let Some(child) = tree.entries.get(index) else {
+ return element.child(
+ Label::new(SharedString::new_static("No settings found")).color(Color::Error),
+ );
+ };
+
+ element =
+ element.child(Label::new(SharedString::new_static(child.title)).size(LabelSize::Large));
+
+ // todo(settings_ui): subgroups?
+ let mut pushed_path = false;
+ if let Some(child_path) = child.path {
+ path.push(child_path);
+ pushed_path = true;
+ }
+ let settings_value = settings_value_from_settings_and_path(
+ path.clone(),
+ child.title,
+ child.documentation,
+ // PERF: how to structure this better? There feels like there's a way to avoid the clone
+ // and every value lookup
+ SettingsStore::global(cx).raw_user_settings(),
+ SettingsStore::global(cx).raw_default_settings(),
+ );
+ if let Some(select_descendant) = child.select_descendant {
+ let selected_descendant = child
+ .nth_descendant_index(&tree.entries, select_descendant(settings_value.read(), cx));
+ if let Some(descendant_index) = selected_descendant {
+ element = render_recursive(&tree, descendant_index, path, element, window, cx);
+ }
+ }
+ if let Some(child_render) = child.render.as_ref() {
+ element = element.child(div().child(render_item_single(
+ settings_value,
+ child_render,
+ window,
+ cx,
+ )));
+ } else if let Some(child_index) = child.first_descendant_index() {
+ let mut index = Some(child_index);
+ while let Some(sub_child_index) = index {
+ element = render_recursive(tree, sub_child_index, path, element, window, cx);
+ index = tree.entries[sub_child_index].next_sibling;
+ }
+ } else {
+ element =
+ element.child(div().child(Label::new("// skipped (for now)").color(Color::Muted)))
+ }
+
+ if pushed_path {
+ path.pop();
+ }
+ return element;
+ }
+
+ return render_recursive(
+ tree,
+ tree.active_entry_index,
+ &mut path,
+ content,
+ window,
+ cx,
+ );
+}
+
impl Render for SettingsPage {
- fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
- v_flex()
+ fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let scroll_handle = window.use_state(cx, |_, _| ScrollHandle::new());
+ div()
+ .grid()
+ .grid_cols(16)
.p_4()
+ .bg(cx.theme().colors().editor_background)
.size_full()
- .gap_4()
- .child(Label::new("Settings").size(LabelSize::Large))
.child(
- v_flex().gap_1().child(Label::new("Appearance")).child(
- v_flex()
- .elevation_2(cx)
- .child(AppearanceSettingsControls::new()),
- ),
+ div()
+ .id("settings-ui-nav")
+ .col_span(2)
+ .h_full()
+ .child(render_nav(&self.settings_tree, window, cx)),
)
.child(
- v_flex().gap_1().child(Label::new("Editor")).child(
- v_flex()
- .elevation_2(cx)
- .child(EditorSettingsControls::new()),
+ div().col_span(6).h_full().child(
+ render_content(&self.settings_tree, window, cx)
+ .id("settings-ui-content")
+ .track_scroll(scroll_handle.read(cx))
+ .overflow_y_scroll(),
),
)
}
}
+
+// todo(settings_ui): remove, only here as inspiration
+#[allow(dead_code)]
+fn render_old_appearance_settings(cx: &mut App) -> impl IntoElement {
+ v_flex()
+ .p_4()
+ .size_full()
+ .gap_4()
+ .child(Label::new("Settings").size(LabelSize::Large))
+ .child(
+ v_flex().gap_1().child(Label::new("Appearance")).child(
+ v_flex()
+ .elevation_2(cx)
+ .child(AppearanceSettingsControls::new()),
+ ),
+ )
+ .child(
+ v_flex().gap_1().child(Label::new("Editor")).child(
+ v_flex()
+ .elevation_2(cx)
+ .child(EditorSettingsControls::new()),
+ ),
+ )
+}
+
+fn element_id_from_path(path: &[&'static str]) -> ElementId {
+ if path.len() == 0 {
+ panic!("Path length must not be zero");
+ } else if path.len() == 1 {
+ ElementId::Name(SharedString::new_static(path[0]))
+ } else {
+ ElementId::from((
+ ElementId::from(SharedString::new_static(path[path.len() - 2])),
+ SharedString::new_static(path[path.len() - 1]),
+ ))
+ }
+}
+
+fn render_item_single(
+ settings_value: SettingsValue<serde_json::Value>,
+ item: &SettingsUiItemSingle,
+ window: &mut Window,
+ cx: &mut App,
+) -> AnyElement {
+ match item {
+ SettingsUiItemSingle::Custom(_) => div()
+ .child(format!("Item: {}", settings_value.path.join(".")))
+ .into_any_element(),
+ SettingsUiItemSingle::SwitchField => {
+ render_any_item(settings_value, render_switch_field, window, cx)
+ }
+ SettingsUiItemSingle::NumericStepper(num_type) => {
+ render_any_numeric_stepper(settings_value, *num_type, window, cx)
+ }
+ SettingsUiItemSingle::ToggleGroup {
+ variants: values,
+ labels: titles,
+ } => render_toggle_button_group(settings_value, values, titles, window, cx),
+ SettingsUiItemSingle::DropDown { .. } => {
+ unimplemented!("This")
+ }
+ }
+}
+
+pub fn read_settings_value_from_path<'a>(
+ settings_contents: &'a serde_json::Value,
+ path: &[&str],
+) -> Option<&'a serde_json::Value> {
+ // todo(settings_ui) make non recursive, and move to `settings` alongside SettingsValue, and add method to SettingsValue to get nested
+ let Some((key, remaining)) = path.split_first() else {
+ return Some(settings_contents);
+ };
+ let Some(value) = settings_contents.get(key) else {
+ return None;
+ };
+
+ read_settings_value_from_path(value, remaining)
+}
+
+fn downcast_any_item<T: serde::de::DeserializeOwned>(
+ settings_value: SettingsValue<serde_json::Value>,
+) -> SettingsValue<T> {
+ let value = settings_value
+ .value
+ .map(|value| serde_json::from_value::<T>(value).expect("value is not a T"));
+ // todo(settings_ui) Create test that constructs UI tree, and asserts that all elements have default values
+ let default_value = serde_json::from_value::<T>(settings_value.default_value)
+ .with_context(|| format!("path: {:?}", settings_value.path.join(".")))
+ .expect("default value is not an Option<T>");
+ let deserialized_setting_value = SettingsValue {
+ title: settings_value.title,
+ path: settings_value.path,
+ documentation: settings_value.documentation,
+ value,
+ default_value,
+ };
+ deserialized_setting_value
+}
+
+fn render_any_item<T: serde::de::DeserializeOwned>(
+ settings_value: SettingsValue<serde_json::Value>,
+ render_fn: impl Fn(SettingsValue<T>, &mut Window, &mut App) -> AnyElement + 'static,
+ window: &mut Window,
+ cx: &mut App,
+) -> AnyElement {
+ let deserialized_setting_value = downcast_any_item(settings_value);
+ render_fn(deserialized_setting_value, window, cx)
+}
+
+fn render_any_numeric_stepper(
+ settings_value: SettingsValue<serde_json::Value>,
+ num_type: NumType,
+ window: &mut Window,
+ cx: &mut App,
+) -> AnyElement {
+ match num_type {
+ NumType::U64 => render_numeric_stepper::<u64>(
+ downcast_any_item(settings_value),
+ u64::saturating_sub,
+ u64::saturating_add,
+ |n| {
+ serde_json::Number::try_from(n)
+ .context("Failed to convert u64 to serde_json::Number")
+ },
+ window,
+ cx,
+ ),
+ NumType::U32 => render_numeric_stepper::<u32>(
+ downcast_any_item(settings_value),
+ u32::saturating_sub,
+ u32::saturating_add,
+ |n| {
+ serde_json::Number::try_from(n)
+ .context("Failed to convert u32 to serde_json::Number")
+ },
+ window,
+ cx,
+ ),
+ NumType::F32 => render_numeric_stepper::<f32>(
+ downcast_any_item(settings_value),
+ |a, b| a - b,
+ |a, b| a + b,
+ |n| {
+ serde_json::Number::from_f64(n as f64)
+ .context("Failed to convert f32 to serde_json::Number")
+ },
+ window,
+ cx,
+ ),
+ NumType::USIZE => render_numeric_stepper::<usize>(
+ downcast_any_item(settings_value),
+ usize::saturating_sub,
+ usize::saturating_add,
+ |n| {
+ serde_json::Number::try_from(n)
+ .context("Failed to convert usize to serde_json::Number")
+ },
+ window,
+ cx,
+ ),
+ }
+}
+
+fn render_numeric_stepper<
+ T: serde::de::DeserializeOwned + std::fmt::Display + Copy + From<u8> + 'static,
+>(
+ value: SettingsValue<T>,
+ saturating_sub: fn(T, T) -> T,
+ saturating_add: fn(T, T) -> T,
+ to_serde_number: fn(T) -> anyhow::Result<serde_json::Number>,
+ _window: &mut Window,
+ _cx: &mut App,
+) -> AnyElement {
+ let id = element_id_from_path(&value.path);
+ let path = value.path.clone();
+ let num = *value.read();
+
+ NumericStepper::new(
+ id,
+ num.to_string(),
+ {
+ let path = value.path.clone();
+ move |_, _, cx| {
+ let Some(number) = to_serde_number(saturating_sub(num, 1.into())).ok() else {
+ return;
+ };
+ let new_value = serde_json::Value::Number(number);
+ SettingsValue::write_value(&path, new_value, cx);
+ }
+ },
+ move |_, _, cx| {
+ let Some(number) = to_serde_number(saturating_add(num, 1.into())).ok() else {
+ return;
+ };
+
+ let new_value = serde_json::Value::Number(number);
+
+ SettingsValue::write_value(&path, new_value, cx);
+ },
+ )
+ .style(ui::NumericStepperStyle::Outlined)
+ .into_any_element()
+}
+
+fn render_switch_field(
+ value: SettingsValue<bool>,
+ _window: &mut Window,
+ _cx: &mut App,
+) -> AnyElement {
+ let id = element_id_from_path(&value.path);
+ let path = value.path.clone();
+ SwitchField::new(
+ id,
+ SharedString::new_static(value.title),
+ value.documentation.map(SharedString::new_static),
+ match value.read() {
+ true => ToggleState::Selected,
+ false => ToggleState::Unselected,
+ },
+ move |toggle_state, _, cx| {
+ let new_value = serde_json::Value::Bool(match toggle_state {
+ ToggleState::Indeterminate => {
+ return;
+ }
+ ToggleState::Selected => true,
+ ToggleState::Unselected => false,
+ });
+
+ SettingsValue::write_value(&path, new_value, cx);
+ },
+ )
+ .into_any_element()
+}
+
+fn render_toggle_button_group(
+ value: SettingsValue<serde_json::Value>,
+ variants: &'static [&'static str],
+ labels: &'static [&'static str],
+ _: &mut Window,
+ _: &mut App,
+) -> AnyElement {
+ let value = downcast_any_item::<String>(value);
+
+ fn make_toggle_group<const LEN: usize>(
+ group_name: &'static str,
+ value: SettingsValue<String>,
+ variants: &'static [&'static str],
+ labels: &'static [&'static str],
+ ) -> AnyElement {
+ let mut variants_array: [(&'static str, &'static str); LEN] = [("unused", "unused"); LEN];
+ for i in 0..LEN {
+ variants_array[i] = (variants[i], labels[i]);
+ }
+ let active_value = value.read();
+
+ let selected_idx = variants_array
+ .iter()
+ .enumerate()
+ .find_map(|(idx, (variant, _))| {
+ if variant == &active_value {
+ Some(idx)
+ } else {
+ None
+ }
+ });
+
+ let mut idx = 0;
+ ToggleButtonGroup::single_row(
+ group_name,
+ variants_array.map(|(variant, label)| {
+ let path = value.path.clone();
+ idx += 1;
+ ToggleButtonSimple::new(label, move |_, _, cx| {
+ SettingsValue::write_value(
+ &path,
+ serde_json::Value::String(variant.to_string()),
+ cx,
+ );
+ })
+ }),
+ )
+ .when_some(selected_idx, |this, ix| this.selected_index(ix))
+ .style(ui::ToggleButtonGroupStyle::Filled)
+ .into_any_element()
+ }
+
+ macro_rules! templ_toggl_with_const_param {
+ ($len:expr) => {
+ if variants.len() == $len {
+ return make_toggle_group::<$len>(value.title, value, variants, labels);
+ }
+ };
+ }
+ templ_toggl_with_const_param!(1);
+ templ_toggl_with_const_param!(2);
+ templ_toggl_with_const_param!(3);
+ templ_toggl_with_const_param!(4);
+ templ_toggl_with_const_param!(5);
+ templ_toggl_with_const_param!(6);
+ unreachable!("Too many variants");
+}
+
+fn settings_value_from_settings_and_path(
+ path: SmallVec<[&'static str; 1]>,
+ title: &'static str,
+ documentation: Option<&'static str>,
+ user_settings: &serde_json::Value,
+ default_settings: &serde_json::Value,
+) -> SettingsValue<serde_json::Value> {
+ let default_value = read_settings_value_from_path(default_settings, &path)
+ .with_context(|| format!("No default value for item at path {:?}", path.join(".")))
+ .expect("Default value set for item")
+ .clone();
+
+ let value = read_settings_value_from_path(user_settings, &path).cloned();
+ let settings_value = SettingsValue {
+ default_value,
+ value,
+ documentation,
+ path: path.clone(),
+ // todo(settings_ui) is title required inside SettingsValue?
+ title,
+ };
+ return settings_value;
+}
@@ -0,0 +1,23 @@
+[package]
+name = "settings_ui_macros"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lib]
+path = "src/settings_ui_macros.rs"
+proc-macro = true
+
+[lints]
+workspace = true
+
+[features]
+default = []
+
+[dependencies]
+heck.workspace = true
+proc-macro2.workspace = true
+quote.workspace = true
+syn.workspace = true
+workspace-hack.workspace = true
@@ -0,0 +1 @@
+../../LICENSE-GPL
@@ -0,0 +1,456 @@
+use std::ops::Not;
+
+use heck::{ToSnakeCase as _, ToTitleCase as _};
+use proc_macro2::TokenStream;
+use quote::{ToTokens, quote};
+use syn::{Data, DeriveInput, LitStr, Token, parse_macro_input};
+
+/// Derive macro for the `SettingsUi` marker trait.
+///
+/// This macro automatically implements the `SettingsUi` trait for the annotated type.
+/// The `SettingsUi` trait is a marker trait used to indicate that a type can be
+/// displayed in the settings UI.
+///
+/// # Example
+///
+/// ```
+/// use settings::SettingsUi;
+///
+/// #[derive(SettingsUi)]
+/// #[settings_ui(group = "Standard")]
+/// struct MySettings {
+/// enabled: bool,
+/// count: usize,
+/// }
+/// ```
+#[proc_macro_derive(SettingsUi, attributes(settings_ui))]
+pub fn derive_settings_ui(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
+ let input = parse_macro_input!(input as DeriveInput);
+ let name = &input.ident;
+
+ // Handle generic parameters if present
+ let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
+
+ let mut group_name = Option::<String>::None;
+ let mut path_name = Option::<String>::None;
+
+ for attr in &input.attrs {
+ if attr.path().is_ident("settings_ui") {
+ attr.parse_nested_meta(|meta| {
+ if meta.path.is_ident("group") {
+ if group_name.is_some() {
+ return Err(meta.error("Only one 'group' path can be specified"));
+ }
+ meta.input.parse::<Token![=]>()?;
+ let lit: LitStr = meta.input.parse()?;
+ group_name = Some(lit.value());
+ } else if meta.path.is_ident("path") {
+ // todo(settings_ui) rely entirely on settings_key, remove path attribute
+ if path_name.is_some() {
+ return Err(meta.error("Only one 'path' can be specified, either with `path` in `settings_ui` or with `settings_key`"));
+ }
+ meta.input.parse::<Token![=]>()?;
+ let lit: LitStr = meta.input.parse()?;
+ path_name = Some(lit.value());
+ }
+ Ok(())
+ })
+ .unwrap_or_else(|e| panic!("in #[settings_ui] attribute: {}", e));
+ } else if let Some(settings_key) = parse_setting_key_attr(attr) {
+ // todo(settings_ui) either remove fallback key or handle it here
+ if path_name.is_some() && settings_key.key.is_some() {
+ panic!("Both 'path' and 'settings_key' are specified. Must specify only one");
+ }
+ path_name = settings_key.key;
+ }
+ }
+
+ let doc_str = parse_documentation_from_attrs(&input.attrs);
+
+ let ui_item_fn_body = generate_ui_item_body(group_name.as_ref(), path_name.as_ref(), &input);
+
+ // todo(settings_ui): make group name optional, repurpose group as tag indicating item is group, and have "title" tag for custom title
+ let title = group_name.unwrap_or(input.ident.to_string().to_title_case());
+
+ let ui_entry_fn_body = map_ui_item_to_entry(
+ path_name.as_deref(),
+ &title,
+ doc_str.as_deref(),
+ quote! { Self },
+ );
+
+ let expanded = quote! {
+ impl #impl_generics settings::SettingsUi for #name #ty_generics #where_clause {
+ fn settings_ui_item() -> settings::SettingsUiItem {
+ #ui_item_fn_body
+ }
+
+ fn settings_ui_entry() -> settings::SettingsUiEntry {
+ #ui_entry_fn_body
+ }
+ }
+ };
+
+ proc_macro::TokenStream::from(expanded)
+}
+
+fn extract_type_from_option(ty: TokenStream) -> TokenStream {
+ match option_inner_type(ty.clone()) {
+ Some(inner_type) => inner_type,
+ None => ty,
+ }
+}
+
+fn option_inner_type(ty: TokenStream) -> Option<TokenStream> {
+ let ty = syn::parse2::<syn::Type>(ty).ok()?;
+ let syn::Type::Path(path) = ty else {
+ return None;
+ };
+ let segment = path.path.segments.last()?;
+ if segment.ident != "Option" {
+ return None;
+ }
+ let syn::PathArguments::AngleBracketed(args) = &segment.arguments else {
+ return None;
+ };
+ let arg = args.args.first()?;
+ let syn::GenericArgument::Type(ty) = arg else {
+ return None;
+ };
+ return Some(ty.to_token_stream());
+}
+
+fn map_ui_item_to_entry(
+ path: Option<&str>,
+ title: &str,
+ doc_str: Option<&str>,
+ ty: TokenStream,
+) -> TokenStream {
+ let ty = extract_type_from_option(ty);
+ // todo(settings_ui): does quote! just work with options?
+ let path = path.map_or_else(|| quote! {None}, |path| quote! {Some(#path)});
+ let doc_str = doc_str.map_or_else(|| quote! {None}, |doc_str| quote! {Some(#doc_str)});
+ quote! {
+ settings::SettingsUiEntry {
+ title: #title,
+ path: #path,
+ item: #ty::settings_ui_item(),
+ documentation: #doc_str,
+ }
+ }
+}
+
+fn generate_ui_item_body(
+ group_name: Option<&String>,
+ path_name: Option<&String>,
+ input: &syn::DeriveInput,
+) -> TokenStream {
+ match (group_name, path_name, &input.data) {
+ (_, _, Data::Union(_)) => unimplemented!("Derive SettingsUi for Unions"),
+ (None, _, Data::Struct(_)) => quote! {
+ settings::SettingsUiItem::None
+ },
+ (Some(_), _, Data::Struct(data_struct)) => {
+ let struct_serde_attrs = parse_serde_attributes(&input.attrs);
+ let fields = data_struct
+ .fields
+ .iter()
+ .filter(|field| {
+ !field.attrs.iter().any(|attr| {
+ let mut has_skip = false;
+ if attr.path().is_ident("settings_ui") {
+ let _ = attr.parse_nested_meta(|meta| {
+ if meta.path.is_ident("skip") {
+ has_skip = true;
+ }
+ Ok(())
+ });
+ }
+
+ has_skip
+ })
+ })
+ .map(|field| {
+ let field_serde_attrs = parse_serde_attributes(&field.attrs);
+ let name = field.ident.clone().expect("tuple fields").to_string();
+ let doc_str = parse_documentation_from_attrs(&field.attrs);
+
+ (
+ name.to_title_case(),
+ doc_str,
+ field_serde_attrs.flatten.not().then(|| {
+ struct_serde_attrs.apply_rename_to_field(&field_serde_attrs, &name)
+ }),
+ field.ty.to_token_stream(),
+ )
+ })
+ // todo(settings_ui): Re-format field name as nice title, and support setting different title with attr
+ .map(|(title, doc_str, path, ty)| {
+ map_ui_item_to_entry(path.as_deref(), &title, doc_str.as_deref(), ty)
+ });
+
+ quote! {
+ settings::SettingsUiItem::Group(settings::SettingsUiItemGroup{ items: vec![#(#fields),*] })
+ }
+ }
+ (None, _, Data::Enum(data_enum)) => {
+ let serde_attrs = parse_serde_attributes(&input.attrs);
+ let length = data_enum.variants.len();
+
+ let variants = data_enum.variants.iter().map(|variant| {
+ let string = variant.ident.clone().to_string();
+
+ let title = string.to_title_case();
+ let string = serde_attrs.rename_all.apply(&string);
+
+ (string, title)
+ });
+
+ let (variants, labels): (Vec<_>, Vec<_>) = variants.unzip();
+
+ if length > 6 {
+ quote! {
+ settings::SettingsUiItem::Single(settings::SettingsUiItemSingle::DropDown{ variants: &[#(#variants),*], labels: &[#(#labels),*] })
+ }
+ } else {
+ quote! {
+ settings::SettingsUiItem::Single(settings::SettingsUiItemSingle::ToggleGroup{ variants: &[#(#variants),*], labels: &[#(#labels),*] })
+ }
+ }
+ }
+ // todo(settings_ui) discriminated unions
+ (_, _, Data::Enum(_)) => quote! {
+ settings::SettingsUiItem::None
+ },
+ }
+}
+
+struct SerdeOptions {
+ rename_all: SerdeRenameAll,
+ rename: Option<String>,
+ flatten: bool,
+ _alias: Option<String>, // todo(settings_ui)
+}
+
+#[derive(PartialEq)]
+enum SerdeRenameAll {
+ Lowercase,
+ SnakeCase,
+ None,
+}
+
+impl SerdeRenameAll {
+ fn apply(&self, name: &str) -> String {
+ match self {
+ SerdeRenameAll::Lowercase => name.to_lowercase(),
+ SerdeRenameAll::SnakeCase => name.to_snake_case(),
+ SerdeRenameAll::None => name.to_string(),
+ }
+ }
+}
+
+impl SerdeOptions {
+ fn apply_rename_to_field(&self, field_options: &Self, name: &str) -> String {
+ // field renames take precedence over struct rename all cases
+ if let Some(rename) = &field_options.rename {
+ return rename.clone();
+ }
+ return self.rename_all.apply(name);
+ }
+}
+
+fn parse_serde_attributes(attrs: &[syn::Attribute]) -> SerdeOptions {
+ let mut options = SerdeOptions {
+ rename_all: SerdeRenameAll::None,
+ rename: None,
+ flatten: false,
+ _alias: None,
+ };
+
+ for attr in attrs {
+ if !attr.path().is_ident("serde") {
+ continue;
+ }
+ attr.parse_nested_meta(|meta| {
+ if meta.path.is_ident("rename_all") {
+ meta.input.parse::<Token![=]>()?;
+ let lit = meta.input.parse::<LitStr>()?.value();
+
+ if options.rename_all != SerdeRenameAll::None {
+ return Err(meta.error("duplicate `rename_all` attribute"));
+ } else if lit == "lowercase" {
+ options.rename_all = SerdeRenameAll::Lowercase;
+ } else if lit == "snake_case" {
+ options.rename_all = SerdeRenameAll::SnakeCase;
+ } else {
+ return Err(meta.error(format!("invalid `rename_all` attribute: {}", lit)));
+ }
+ // todo(settings_ui): Other options?
+ } else if meta.path.is_ident("flatten") {
+ options.flatten = true;
+ } else if meta.path.is_ident("rename") {
+ if options.rename.is_some() {
+ return Err(meta.error("Can only have one rename attribute"));
+ }
+
+ meta.input.parse::<Token![=]>()?;
+ let lit = meta.input.parse::<LitStr>()?.value();
+ options.rename = Some(lit);
+ }
+ Ok(())
+ })
+ .unwrap();
+ }
+
+ return options;
+}
+
+fn parse_documentation_from_attrs(attrs: &[syn::Attribute]) -> Option<String> {
+ let mut doc_str = Option::<String>::None;
+ for attr in attrs {
+ if attr.path().is_ident("doc") {
+ // /// ...
+ // becomes
+ // #[doc = "..."]
+ use syn::{Expr::Lit, ExprLit, Lit::Str, Meta, MetaNameValue};
+ if let Meta::NameValue(MetaNameValue {
+ value:
+ Lit(ExprLit {
+ lit: Str(ref lit_str),
+ ..
+ }),
+ ..
+ }) = attr.meta
+ {
+ let doc = lit_str.value();
+ let doc_str = doc_str.get_or_insert_default();
+ doc_str.push_str(doc.trim());
+ doc_str.push('\n');
+ }
+ }
+ }
+ return doc_str;
+}
+
+struct SettingsKey {
+ key: Option<String>,
+ fallback_key: Option<String>,
+}
+
+fn parse_setting_key_attr(attr: &syn::Attribute) -> Option<SettingsKey> {
+ if !attr.path().is_ident("settings_key") {
+ return None;
+ }
+
+ let mut settings_key = SettingsKey {
+ key: None,
+ fallback_key: None,
+ };
+
+ let mut found_none = false;
+
+ attr.parse_nested_meta(|meta| {
+ if meta.path.is_ident("None") {
+ found_none = true;
+ } else if meta.path.is_ident("key") {
+ if settings_key.key.is_some() {
+ return Err(meta.error("Only one 'group' path can be specified"));
+ }
+ meta.input.parse::<Token![=]>()?;
+ let lit: LitStr = meta.input.parse()?;
+ settings_key.key = Some(lit.value());
+ } else if meta.path.is_ident("fallback_key") {
+ if found_none {
+ return Err(meta.error("Cannot specify 'fallback_key' and 'None'"));
+ }
+
+ if settings_key.fallback_key.is_some() {
+ return Err(meta.error("Only one 'fallback_key' can be specified"));
+ }
+
+ meta.input.parse::<Token![=]>()?;
+ let lit: LitStr = meta.input.parse()?;
+ settings_key.fallback_key = Some(lit.value());
+ }
+ Ok(())
+ })
+ .unwrap_or_else(|e| panic!("in #[settings_key] attribute: {}", e));
+
+ if found_none && settings_key.fallback_key.is_some() {
+ panic!("in #[settings_key] attribute: Cannot specify 'None' and 'fallback_key'");
+ }
+ if found_none && settings_key.key.is_some() {
+ panic!("in #[settings_key] attribute: Cannot specify 'None' and 'key'");
+ }
+ if !found_none && settings_key.key.is_none() {
+ panic!("in #[settings_key] attribute: 'key' must be specified");
+ }
+
+ return Some(settings_key);
+}
+
+#[proc_macro_derive(SettingsKey, attributes(settings_key))]
+pub fn derive_settings_key(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
+ let input = parse_macro_input!(input as DeriveInput);
+ let name = &input.ident;
+
+ // Handle generic parameters if present
+ let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
+
+ let mut settings_key = Option::<SettingsKey>::None;
+
+ for attr in &input.attrs {
+ let parsed_settings_key = parse_setting_key_attr(attr);
+ if parsed_settings_key.is_some() && settings_key.is_some() {
+ panic!("Duplicate #[settings_key] attribute");
+ }
+ settings_key = settings_key.or(parsed_settings_key);
+ }
+
+ let Some(SettingsKey { key, fallback_key }) = settings_key else {
+ panic!("Missing #[settings_key] attribute");
+ };
+
+ let key = key.map_or_else(|| quote! {None}, |key| quote! {Some(#key)});
+ let fallback_key = fallback_key.map_or_else(
+ || quote! {None},
+ |fallback_key| quote! {Some(#fallback_key)},
+ );
+
+ let expanded = quote! {
+ impl #impl_generics settings::SettingsKey for #name #ty_generics #where_clause {
+ const KEY: Option<&'static str> = #key;
+
+ const FALLBACK_KEY: Option<&'static str> = #fallback_key;
+ };
+ };
+
+ proc_macro::TokenStream::from(expanded)
+}
+
+#[cfg(test)]
+mod tests {
+ use syn::{Attribute, parse_quote};
+
+ use super::*;
+
+ #[test]
+ fn test_extract_key() {
+ let input: Attribute = parse_quote!(
+ #[settings_key(key = "my_key")]
+ );
+ let settings_key = parse_setting_key_attr(&input).unwrap();
+ assert_eq!(settings_key.key, Some("my_key".to_string()));
+ assert_eq!(settings_key.fallback_key, None);
+ }
+
+ #[test]
+ fn test_empty_key() {
+ let input: Attribute = parse_quote!(
+ #[settings_key(None)]
+ );
+ let settings_key = parse_setting_key_attr(&input).unwrap();
+ assert_eq!(settings_key.key, None);
+ assert_eq!(settings_key.fallback_key, None);
+ }
+}
@@ -14,6 +14,7 @@ collections.workspace = true
futures.workspace = true
indoc.workspace = true
libsqlite3-sys.workspace = true
+log.workspace = true
parking_lot.workspace = true
smol.workspace = true
sqlformat.workspace = true
@@ -92,91 +92,97 @@ impl Connection {
let mut remaining_sql = sql.as_c_str();
let sql_start = remaining_sql.as_ptr();
- unsafe {
- let mut alter_table = None;
- while {
- let remaining_sql_str = remaining_sql.to_str().unwrap().trim();
- let any_remaining_sql = remaining_sql_str != ";" && !remaining_sql_str.is_empty();
- if any_remaining_sql {
- alter_table = parse_alter_table(remaining_sql_str);
+ let mut alter_table = None;
+ while {
+ let remaining_sql_str = remaining_sql.to_str().unwrap().trim();
+ let any_remaining_sql = remaining_sql_str != ";" && !remaining_sql_str.is_empty();
+ if any_remaining_sql {
+ alter_table = parse_alter_table(remaining_sql_str);
+ }
+ any_remaining_sql
+ } {
+ let mut raw_statement = ptr::null_mut::<sqlite3_stmt>();
+ let mut remaining_sql_ptr = ptr::null();
+
+ let (res, offset, message, _conn) = if let Some((table_to_alter, column)) = alter_table
+ {
+ // ALTER TABLE is a weird statement. When preparing the statement the table's
+ // existence is checked *before* syntax checking any other part of the statement.
+ // Therefore, we need to make sure that the table has been created before calling
+ // prepare. As we don't want to trash whatever database this is connected to, we
+ // create a new in-memory DB to test.
+
+ let temp_connection = Connection::open_memory(None);
+ //This should always succeed, if it doesn't then you really should know about it
+ temp_connection
+ .exec(&format!("CREATE TABLE {table_to_alter}({column})"))
+ .unwrap()()
+ .unwrap();
+
+ unsafe {
+ sqlite3_prepare_v2(
+ temp_connection.sqlite3,
+ remaining_sql.as_ptr(),
+ -1,
+ &mut raw_statement,
+ &mut remaining_sql_ptr,
+ )
+ };
+
+ #[cfg(not(any(target_os = "linux", target_os = "freebsd")))]
+ let offset = unsafe { sqlite3_error_offset(temp_connection.sqlite3) };
+
+ #[cfg(any(target_os = "linux", target_os = "freebsd"))]
+ let offset = 0;
+
+ unsafe {
+ (
+ sqlite3_errcode(temp_connection.sqlite3),
+ offset,
+ sqlite3_errmsg(temp_connection.sqlite3),
+ Some(temp_connection),
+ )
}
- any_remaining_sql
- } {
- let mut raw_statement = ptr::null_mut::<sqlite3_stmt>();
- let mut remaining_sql_ptr = ptr::null();
-
- let (res, offset, message, _conn) =
- if let Some((table_to_alter, column)) = alter_table {
- // ALTER TABLE is a weird statement. When preparing the statement the table's
- // existence is checked *before* syntax checking any other part of the statement.
- // Therefore, we need to make sure that the table has been created before calling
- // prepare. As we don't want to trash whatever database this is connected to, we
- // create a new in-memory DB to test.
-
- let temp_connection = Connection::open_memory(None);
- //This should always succeed, if it doesn't then you really should know about it
- temp_connection
- .exec(&format!("CREATE TABLE {table_to_alter}({column})"))
- .unwrap()()
- .unwrap();
-
- sqlite3_prepare_v2(
- temp_connection.sqlite3,
- remaining_sql.as_ptr(),
- -1,
- &mut raw_statement,
- &mut remaining_sql_ptr,
- );
-
- #[cfg(not(any(target_os = "linux", target_os = "freebsd")))]
- let offset = sqlite3_error_offset(temp_connection.sqlite3);
-
- #[cfg(any(target_os = "linux", target_os = "freebsd"))]
- let offset = 0;
-
- (
- sqlite3_errcode(temp_connection.sqlite3),
- offset,
- sqlite3_errmsg(temp_connection.sqlite3),
- Some(temp_connection),
- )
- } else {
- sqlite3_prepare_v2(
- self.sqlite3,
- remaining_sql.as_ptr(),
- -1,
- &mut raw_statement,
- &mut remaining_sql_ptr,
- );
-
- #[cfg(not(any(target_os = "linux", target_os = "freebsd")))]
- let offset = sqlite3_error_offset(self.sqlite3);
-
- #[cfg(any(target_os = "linux", target_os = "freebsd"))]
- let offset = 0;
-
- (
- sqlite3_errcode(self.sqlite3),
- offset,
- sqlite3_errmsg(self.sqlite3),
- None,
- )
- };
-
- sqlite3_finalize(raw_statement);
-
- if res == 1 && offset >= 0 {
- let sub_statement_correction =
- remaining_sql.as_ptr() as usize - sql_start as usize;
- let err_msg =
- String::from_utf8_lossy(CStr::from_ptr(message as *const _).to_bytes())
- .into_owned();
-
- return Some((err_msg, offset as usize + sub_statement_correction));
+ } else {
+ unsafe {
+ sqlite3_prepare_v2(
+ self.sqlite3,
+ remaining_sql.as_ptr(),
+ -1,
+ &mut raw_statement,
+ &mut remaining_sql_ptr,
+ )
+ };
+
+ #[cfg(not(any(target_os = "linux", target_os = "freebsd")))]
+ let offset = unsafe { sqlite3_error_offset(self.sqlite3) };
+
+ #[cfg(any(target_os = "linux", target_os = "freebsd"))]
+ let offset = 0;
+
+ unsafe {
+ (
+ sqlite3_errcode(self.sqlite3),
+ offset,
+ sqlite3_errmsg(self.sqlite3),
+ None,
+ )
}
- remaining_sql = CStr::from_ptr(remaining_sql_ptr);
- alter_table = None;
+ };
+
+ unsafe { sqlite3_finalize(raw_statement) };
+
+ if res == 1 && offset >= 0 {
+ let sub_statement_correction = remaining_sql.as_ptr() as usize - sql_start as usize;
+ let err_msg = String::from_utf8_lossy(unsafe {
+ CStr::from_ptr(message as *const _).to_bytes()
+ })
+ .into_owned();
+
+ return Some((err_msg, offset as usize + sub_statement_correction));
}
+ remaining_sql = unsafe { CStr::from_ptr(remaining_sql_ptr) };
+ alter_table = None;
}
None
}
@@ -1,8 +1,12 @@
use crate::connection::Connection;
pub trait Domain: 'static {
- fn name() -> &'static str;
- fn migrations() -> &'static [&'static str];
+ const NAME: &str;
+ const MIGRATIONS: &[&str];
+
+ fn should_allow_migration_change(_index: usize, _old: &str, _new: &str) -> bool {
+ false
+ }
}
pub trait Migrator: 'static {
@@ -17,7 +21,11 @@ impl Migrator for () {
impl<D: Domain> Migrator for D {
fn migrate(connection: &Connection) -> anyhow::Result<()> {
- connection.migrate(Self::name(), Self::migrations())
+ connection.migrate(
+ Self::NAME,
+ Self::MIGRATIONS,
+ Self::should_allow_migration_change,
+ )
}
}
@@ -34,7 +34,12 @@ impl Connection {
/// Note: Unlike everything else in SQLez, migrations are run eagerly, without first
/// preparing the SQL statements. This makes it possible to do multi-statement schema
/// updates in a single string without running into prepare errors.
- pub fn migrate(&self, domain: &'static str, migrations: &[&'static str]) -> Result<()> {
+ pub fn migrate(
+ &self,
+ domain: &'static str,
+ migrations: &[&'static str],
+ mut should_allow_migration_change: impl FnMut(usize, &str, &str) -> bool,
+ ) -> Result<()> {
self.with_savepoint("migrating", || {
// Setup the migrations table unconditionally
self.exec(indoc! {"
@@ -54,6 +59,7 @@ impl Connection {
let mut store_completed_migration = self
.exec_bound("INSERT INTO migrations (domain, step, migration) VALUES (?, ?, ?)")?;
+ let mut did_migrate = false;
for (index, migration) in migrations.iter().enumerate() {
let migration =
sqlformat::format(migration, &sqlformat::QueryParams::None, Default::default());
@@ -68,6 +74,9 @@ impl Connection {
if completed_migration == migration {
// Migration already run. Continue
continue;
+ } else if should_allow_migration_change(index, &completed_migration, &migration)
+ {
+ continue;
} else {
anyhow::bail!(formatdoc! {"
Migration changed for {domain} at step {index}
@@ -81,12 +90,58 @@ impl Connection {
}
self.eager_exec(&migration)?;
+ did_migrate = true;
store_completed_migration((domain, index, migration))?;
}
+ if did_migrate {
+ self.delete_rows_with_orphaned_foreign_key_references()?;
+ self.exec("PRAGMA foreign_key_check;")?()?;
+ }
+
Ok(())
})
}
+
+ /// Delete any rows that were orphaned by a migration. This is needed
+ /// because we disable foreign key constraints during migrations, so
+ /// that it's possible to re-create a table with the same name, without
+ /// deleting all associated data.
+ fn delete_rows_with_orphaned_foreign_key_references(&self) -> Result<()> {
+ let foreign_key_info: Vec<(String, String, String, String)> = self.select(
+ r#"
+ SELECT DISTINCT
+ schema.name as child_table,
+ foreign_keys.[from] as child_key,
+ foreign_keys.[table] as parent_table,
+ foreign_keys.[to] as parent_key
+ FROM sqlite_schema schema
+ JOIN pragma_foreign_key_list(schema.name) foreign_keys
+ WHERE
+ schema.type = 'table' AND
+ schema.name NOT LIKE "sqlite_%"
+ "#,
+ )?()?;
+
+ if !foreign_key_info.is_empty() {
+ log::info!(
+ "Found {} foreign key relationships to check",
+ foreign_key_info.len()
+ );
+ }
+
+ for (child_table, child_key, parent_table, parent_key) in foreign_key_info {
+ self.exec(&format!(
+ "
+ DELETE FROM {child_table}
+ WHERE {child_key} IS NOT NULL and {child_key} NOT IN
+ (SELECT {parent_key} FROM {parent_table})
+ "
+ ))?()?;
+ }
+
+ Ok(())
+ }
}
#[cfg(test)]
@@ -108,6 +163,7 @@ mod test {
a TEXT,
b TEXT
)"}],
+ disallow_migration_change,
)
.unwrap();
@@ -136,6 +192,7 @@ mod test {
d TEXT
)"},
],
+ disallow_migration_change,
)
.unwrap();
@@ -214,7 +271,11 @@ mod test {
// Run the migration verifying that the row got dropped
connection
- .migrate("test", &["DELETE FROM test_table"])
+ .migrate(
+ "test",
+ &["DELETE FROM test_table"],
+ disallow_migration_change,
+ )
.unwrap();
assert_eq!(
connection
@@ -232,7 +293,11 @@ mod test {
// Run the same migration again and verify that the table was left unchanged
connection
- .migrate("test", &["DELETE FROM test_table"])
+ .migrate(
+ "test",
+ &["DELETE FROM test_table"],
+ disallow_migration_change,
+ )
.unwrap();
assert_eq!(
connection
@@ -252,27 +317,28 @@ mod test {
.migrate(
"test migration",
&[
- indoc! {"
- CREATE TABLE test (
- col INTEGER
- )"},
- indoc! {"
- INSERT INTO test (col) VALUES (1)"},
+ "CREATE TABLE test (col INTEGER)",
+ "INSERT INTO test (col) VALUES (1)",
],
+ disallow_migration_change,
)
.unwrap();
+ let mut migration_changed = false;
+
// Create another migration with the same domain but different steps
let second_migration_result = connection.migrate(
"test migration",
&[
- indoc! {"
- CREATE TABLE test (
- color INTEGER
- )"},
- indoc! {"
- INSERT INTO test (color) VALUES (1)"},
+ "CREATE TABLE test (color INTEGER )",
+ "INSERT INTO test (color) VALUES (1)",
],
+ |_, old, new| {
+ assert_eq!(old, "CREATE TABLE test (col INTEGER)");
+ assert_eq!(new, "CREATE TABLE test (color INTEGER)");
+ migration_changed = true;
+ false
+ },
);
// Verify new migration returns error when run
@@ -284,7 +350,11 @@ mod test {
let connection = Connection::open_memory(Some("test_create_alter_drop"));
connection
- .migrate("first_migration", &["CREATE TABLE table1(a TEXT) STRICT;"])
+ .migrate(
+ "first_migration",
+ &["CREATE TABLE table1(a TEXT) STRICT;"],
+ disallow_migration_change,
+ )
.unwrap();
connection
@@ -305,6 +375,7 @@ mod test {
ALTER TABLE table2 RENAME TO table1;
"}],
+ disallow_migration_change,
)
.unwrap();
@@ -312,4 +383,8 @@ mod test {
assert_eq!(res, "test text");
}
+
+ fn disallow_migration_change(_: usize, _: &str, _: &str) -> bool {
+ false
+ }
}
@@ -44,41 +44,41 @@ impl<'a> Statement<'a> {
connection,
phantom: PhantomData,
};
- unsafe {
- let sql = CString::new(query.as_ref()).context("Error creating cstr")?;
- let mut remaining_sql = sql.as_c_str();
- while {
- let remaining_sql_str = remaining_sql
- .to_str()
- .context("Parsing remaining sql")?
- .trim();
- remaining_sql_str != ";" && !remaining_sql_str.is_empty()
- } {
- let mut raw_statement = ptr::null_mut::<sqlite3_stmt>();
- let mut remaining_sql_ptr = ptr::null();
+ let sql = CString::new(query.as_ref()).context("Error creating cstr")?;
+ let mut remaining_sql = sql.as_c_str();
+ while {
+ let remaining_sql_str = remaining_sql
+ .to_str()
+ .context("Parsing remaining sql")?
+ .trim();
+ remaining_sql_str != ";" && !remaining_sql_str.is_empty()
+ } {
+ let mut raw_statement = ptr::null_mut::<sqlite3_stmt>();
+ let mut remaining_sql_ptr = ptr::null();
+ unsafe {
sqlite3_prepare_v2(
connection.sqlite3,
remaining_sql.as_ptr(),
-1,
&mut raw_statement,
&mut remaining_sql_ptr,
- );
+ )
+ };
- connection.last_error().with_context(|| {
- format!("Prepare call failed for query:\n{}", query.as_ref())
- })?;
+ connection
+ .last_error()
+ .with_context(|| format!("Prepare call failed for query:\n{}", query.as_ref()))?;
- remaining_sql = CStr::from_ptr(remaining_sql_ptr);
- statement.raw_statements.push(raw_statement);
+ remaining_sql = unsafe { CStr::from_ptr(remaining_sql_ptr) };
+ statement.raw_statements.push(raw_statement);
- if !connection.can_write() && sqlite3_stmt_readonly(raw_statement) == 0 {
- let sql = CStr::from_ptr(sqlite3_sql(raw_statement));
+ if !connection.can_write() && unsafe { sqlite3_stmt_readonly(raw_statement) == 0 } {
+ let sql = unsafe { CStr::from_ptr(sqlite3_sql(raw_statement)) };
- bail!(
- "Write statement prepared with connection that is not write capable. SQL:\n{} ",
- sql.to_str()?
- )
- }
+ bail!(
+ "Write statement prepared with connection that is not write capable. SQL:\n{} ",
+ sql.to_str()?
+ )
}
}
@@ -271,23 +271,21 @@ impl<'a> Statement<'a> {
}
fn step(&mut self) -> Result<StepResult> {
- unsafe {
- match sqlite3_step(self.current_statement()) {
- SQLITE_ROW => Ok(StepResult::Row),
- SQLITE_DONE => {
- if self.current_statement >= self.raw_statements.len() - 1 {
- Ok(StepResult::Done)
- } else {
- self.current_statement += 1;
- self.step()
- }
- }
- SQLITE_MISUSE => anyhow::bail!("Statement step returned SQLITE_MISUSE"),
- _other_error => {
- self.connection.last_error()?;
- unreachable!("Step returned error code and last error failed to catch it");
+ match unsafe { sqlite3_step(self.current_statement()) } {
+ SQLITE_ROW => Ok(StepResult::Row),
+ SQLITE_DONE => {
+ if self.current_statement >= self.raw_statements.len() - 1 {
+ Ok(StepResult::Done)
+ } else {
+ self.current_statement += 1;
+ self.step()
}
}
+ SQLITE_MISUSE => anyhow::bail!("Statement step returned SQLITE_MISUSE"),
+ _other_error => {
+ self.connection.last_error()?;
+ unreachable!("Step returned error code and last error failed to catch it");
+ }
}
}
@@ -95,6 +95,14 @@ impl<M: Migrator> ThreadSafeConnectionBuilder<M> {
let mut migration_result =
anyhow::Result::<()>::Err(anyhow::anyhow!("Migration never run"));
+ let foreign_keys_enabled: bool =
+ connection.select_row::<i32>("PRAGMA foreign_keys")?()
+ .unwrap_or(None)
+ .map(|enabled| enabled != 0)
+ .unwrap_or(false);
+
+ connection.exec("PRAGMA foreign_keys = OFF;")?()?;
+
for _ in 0..MIGRATION_RETRIES {
migration_result = connection
.with_savepoint("thread_safe_multi_migration", || M::migrate(connection));
@@ -104,6 +112,9 @@ impl<M: Migrator> ThreadSafeConnectionBuilder<M> {
}
}
+ if foreign_keys_enabled {
+ connection.exec("PRAGMA foreign_keys = ON;")?()?;
+ }
migration_result
})
.await?;
@@ -278,12 +289,8 @@ mod test {
enum TestDomain {}
impl Domain for TestDomain {
- fn name() -> &'static str {
- "test"
- }
- fn migrations() -> &'static [&'static str] {
- &["CREATE TABLE test(col1 TEXT, col2 TEXT) STRICT;"]
- }
+ const NAME: &str = "test";
+ const MIGRATIONS: &[&str] = &["CREATE TABLE test(col1 TEXT, col2 TEXT) STRICT;"];
}
for _ in 0..100 {
@@ -312,12 +319,9 @@ mod test {
fn wild_zed_lost_failure() {
enum TestWorkspace {}
impl Domain for TestWorkspace {
- fn name() -> &'static str {
- "workspace"
- }
+ const NAME: &str = "workspace";
- fn migrations() -> &'static [&'static str] {
- &["
+ const MIGRATIONS: &[&str] = &["
CREATE TABLE workspaces(
workspace_id INTEGER PRIMARY KEY,
dock_visible INTEGER, -- Boolean
@@ -336,8 +340,7 @@ mod test {
ON DELETE CASCADE
ON UPDATE CASCADE
) STRICT;
- "]
- }
+ "];
}
let builder =
@@ -194,7 +194,7 @@ impl RenderOnce for StorySection {
// Section title
.py_2()
// Section description
- .when_some(self.description.clone(), |section, description| {
+ .when_some(self.description, |section, description| {
section.child(Story::description(description, cx))
})
.child(div().flex().flex_col().gap_2().children(children))
@@ -109,15 +109,13 @@ static ALL_STORY_SELECTORS: OnceLock<Vec<StorySelector>> = OnceLock::new();
impl ValueEnum for StorySelector {
fn value_variants<'a>() -> &'a [Self] {
- let stories = ALL_STORY_SELECTORS.get_or_init(|| {
+ (ALL_STORY_SELECTORS.get_or_init(|| {
let component_stories = ComponentStory::iter().map(StorySelector::Component);
component_stories
.chain(std::iter::once(StorySelector::KitchenSink))
.collect::<Vec<_>>()
- });
-
- stories
+ })) as _
}
fn to_possible_value(&self) -> Option<clap::builder::PossibleValue> {
@@ -945,7 +945,7 @@ mod tests {
let mut new_len = 0;
while new_len < new.len() {
- let mut chunk_len = rng.gen_range(1..=new.len() - new_len);
+ let mut chunk_len = rng.random_range(1..=new.len() - new_len);
while !new.is_char_boundary(new_len + chunk_len) {
chunk_len += 1;
}
@@ -1034,14 +1034,14 @@ mod tests {
fn randomly_edit(text: &str, rng: &mut impl Rng) -> String {
let mut result = String::from(text);
- let edit_count = rng.gen_range(1..=5);
+ let edit_count = rng.random_range(1..=5);
fn random_char_range(text: &str, rng: &mut impl Rng) -> (usize, usize) {
- let mut start = rng.gen_range(0..=text.len());
+ let mut start = rng.random_range(0..=text.len());
while !text.is_char_boundary(start) {
start -= 1;
}
- let mut end = rng.gen_range(start..=text.len());
+ let mut end = rng.random_range(start..=text.len());
while !text.is_char_boundary(end) {
end += 1;
}
@@ -1049,11 +1049,11 @@ mod tests {
}
for _ in 0..edit_count {
- match rng.gen_range(0..3) {
+ match rng.random_range(0..3) {
0 => {
// Insert
let (pos, _) = random_char_range(&result, rng);
- let insert_len = rng.gen_range(1..=5);
+ let insert_len = rng.random_range(1..=5);
let insert_text: String = random_text(rng, insert_len);
result.insert_str(pos, &insert_text);
}
@@ -909,7 +909,7 @@ where
#[cfg(test)]
mod tests {
use super::*;
- use rand::{distributions, prelude::*};
+ use rand::{distr::StandardUniform, prelude::*};
use std::cmp;
#[ctor::ctor]
@@ -951,24 +951,24 @@ mod tests {
let rng = &mut rng;
let mut tree = SumTree::<u8>::default();
- let count = rng.gen_range(0..10);
- if rng.r#gen() {
- tree.extend(rng.sample_iter(distributions::Standard).take(count), &());
+ let count = rng.random_range(0..10);
+ if rng.random() {
+ tree.extend(rng.sample_iter(StandardUniform).take(count), &());
} else {
let items = rng
- .sample_iter(distributions::Standard)
+ .sample_iter(StandardUniform)
.take(count)
.collect::<Vec<_>>();
tree.par_extend(items, &());
}
for _ in 0..num_operations {
- let splice_end = rng.gen_range(0..tree.extent::<Count>(&()).0 + 1);
- let splice_start = rng.gen_range(0..splice_end + 1);
- let count = rng.gen_range(0..10);
+ let splice_end = rng.random_range(0..tree.extent::<Count>(&()).0 + 1);
+ let splice_start = rng.random_range(0..splice_end + 1);
+ let count = rng.random_range(0..10);
let tree_end = tree.extent::<Count>(&());
let new_items = rng
- .sample_iter(distributions::Standard)
+ .sample_iter(StandardUniform)
.take(count)
.collect::<Vec<u8>>();
@@ -978,7 +978,7 @@ mod tests {
tree = {
let mut cursor = tree.cursor::<Count>(&());
let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right);
- if rng.r#gen() {
+ if rng.random() {
new_tree.extend(new_items, &());
} else {
new_tree.par_extend(new_items, &());
@@ -1005,7 +1005,7 @@ mod tests {
.filter(|(_, item)| (item & 1) == 0)
.collect::<Vec<_>>();
- let mut item_ix = if rng.r#gen() {
+ let mut item_ix = if rng.random() {
filter_cursor.next();
0
} else {
@@ -1022,12 +1022,12 @@ mod tests {
filter_cursor.next();
item_ix += 1;
- while item_ix > 0 && rng.gen_bool(0.2) {
+ while item_ix > 0 && rng.random_bool(0.2) {
log::info!("prev");
filter_cursor.prev();
item_ix -= 1;
- if item_ix == 0 && rng.gen_bool(0.2) {
+ if item_ix == 0 && rng.random_bool(0.2) {
filter_cursor.prev();
assert_eq!(filter_cursor.item(), None);
assert_eq!(filter_cursor.start().0, 0);
@@ -1039,9 +1039,9 @@ mod tests {
let mut before_start = false;
let mut cursor = tree.cursor::<Count>(&());
- let start_pos = rng.gen_range(0..=reference_items.len());
+ let start_pos = rng.random_range(0..=reference_items.len());
cursor.seek(&Count(start_pos), Bias::Right);
- let mut pos = rng.gen_range(start_pos..=reference_items.len());
+ let mut pos = rng.random_range(start_pos..=reference_items.len());
cursor.seek_forward(&Count(pos), Bias::Right);
for i in 0..10 {
@@ -1084,10 +1084,18 @@ mod tests {
}
for _ in 0..10 {
- let end = rng.gen_range(0..tree.extent::<Count>(&()).0 + 1);
- let start = rng.gen_range(0..end + 1);
- let start_bias = if rng.r#gen() { Bias::Left } else { Bias::Right };
- let end_bias = if rng.r#gen() { Bias::Left } else { Bias::Right };
+ let end = rng.random_range(0..tree.extent::<Count>(&()).0 + 1);
+ let start = rng.random_range(0..end + 1);
+ let start_bias = if rng.random() {
+ Bias::Left
+ } else {
+ Bias::Right
+ };
+ let end_bias = if rng.random() {
+ Bias::Left
+ } else {
+ Bias::Right
+ };
let mut cursor = tree.cursor::<Count>(&());
cursor.seek(&Count(start), start_bias);
@@ -2,6 +2,7 @@ use std::{cmp::Ordering, fmt::Debug};
use crate::{Bias, Dimension, Edit, Item, KeyedItem, SeekTarget, SumTree, Summary};
+/// A cheaply-cloneable ordered map based on a [SumTree](crate::SumTree).
#[derive(Clone, PartialEq, Eq)]
pub struct TreeMap<K, V>(SumTree<MapEntry<K, V>>)
where
@@ -384,9 +384,7 @@ impl SupermavenAgent {
match message {
SupermavenMessage::ActivationRequest(request) => {
self.account_status = match request.activate_url {
- Some(activate_url) => AccountStatus::NeedsActivation {
- activate_url: activate_url.clone(),
- },
+ Some(activate_url) => AccountStatus::NeedsActivation { activate_url },
None => AccountStatus::Ready,
};
}
@@ -19,8 +19,10 @@ pub struct SupermavenCompletionProvider {
supermaven: Entity<Supermaven>,
buffer_id: Option<EntityId>,
completion_id: Option<SupermavenCompletionStateId>,
+ completion_text: Option<String>,
file_extension: Option<String>,
pending_refresh: Option<Task<Result<()>>>,
+ completion_position: Option<language::Anchor>,
}
impl SupermavenCompletionProvider {
@@ -29,25 +31,26 @@ impl SupermavenCompletionProvider {
supermaven,
buffer_id: None,
completion_id: None,
+ completion_text: None,
file_extension: None,
pending_refresh: None,
+ completion_position: None,
}
}
}
// Computes the edit prediction from the difference between the completion text.
-// this is defined by greedily matching the buffer text against the completion text, with any leftover buffer placed at the end.
-// for example, given the completion text "moo cows are cool" and the buffer text "cowsre pool", the completion state would be
-// the inlays "moo ", " a", and "cool" which will render as "[moo ]cows[ a]re [cool]pool" in the editor.
+// This is defined by greedily matching the buffer text against the completion text.
+// Inlays are inserted for parts of the completion text that are not present in the buffer text.
+// For example, given the completion text "axbyc" and the buffer text "xy", the rendered output in the editor would be "[a]x[b]y[c]".
+// The parts in brackets are the inlays.
fn completion_from_diff(
snapshot: BufferSnapshot,
completion_text: &str,
position: Anchor,
delete_range: Range<Anchor>,
) -> EditPrediction {
- let buffer_text = snapshot
- .text_for_range(delete_range.clone())
- .collect::<String>();
+ let buffer_text = snapshot.text_for_range(delete_range).collect::<String>();
let mut edits: Vec<(Range<language::Anchor>, String)> = Vec::new();
@@ -135,6 +138,14 @@ impl EditPredictionProvider for SupermavenCompletionProvider {
debounce: bool,
cx: &mut Context<Self>,
) {
+ // Only make new completion requests when debounce is true (i.e., when text is typed)
+ // When debounce is false (i.e., cursor movement), we should not make new requests
+ if !debounce {
+ return;
+ }
+
+ reset_completion_cache(self, cx);
+
let Some(mut completion) = self.supermaven.update(cx, |supermaven, cx| {
supermaven.complete(&buffer_handle, cursor_position, cx)
}) else {
@@ -148,6 +159,17 @@ impl EditPredictionProvider for SupermavenCompletionProvider {
while let Some(()) = completion.updates.next().await {
this.update(cx, |this, cx| {
+ // Get the completion text and cache it
+ if let Some(text) =
+ this.supermaven
+ .read(cx)
+ .completion(&buffer_handle, cursor_position, cx)
+ {
+ this.completion_text = Some(text.to_string());
+
+ this.completion_position = Some(cursor_position);
+ }
+
this.completion_id = Some(completion.id);
this.buffer_id = Some(buffer_handle.entity_id());
this.file_extension = buffer_handle.read(cx).file().and_then(|file| {
@@ -158,7 +180,6 @@ impl EditPredictionProvider for SupermavenCompletionProvider {
.to_string(),
)
});
- this.pending_refresh = None;
cx.notify();
})?;
}
@@ -176,13 +197,11 @@ impl EditPredictionProvider for SupermavenCompletionProvider {
}
fn accept(&mut self, _cx: &mut Context<Self>) {
- self.pending_refresh = None;
- self.completion_id = None;
+ reset_completion_cache(self, _cx);
}
fn discard(&mut self, _cx: &mut Context<Self>) {
- self.pending_refresh = None;
- self.completion_id = None;
+ reset_completion_cache(self, _cx);
}
fn suggest(
@@ -191,10 +210,34 @@ impl EditPredictionProvider for SupermavenCompletionProvider {
cursor_position: Anchor,
cx: &mut Context<Self>,
) -> Option<EditPrediction> {
- let completion_text = self
- .supermaven
- .read(cx)
- .completion(buffer, cursor_position, cx)?;
+ if self.buffer_id != Some(buffer.entity_id()) {
+ return None;
+ }
+
+ if self.completion_id.is_none() {
+ return None;
+ }
+
+ let completion_text = if let Some(cached_text) = &self.completion_text {
+ cached_text.as_str()
+ } else {
+ let text = self
+ .supermaven
+ .read(cx)
+ .completion(buffer, cursor_position, cx)?;
+ self.completion_text = Some(text.to_string());
+ text
+ };
+
+ // Check if the cursor is still at the same position as the completion request
+ // If we don't have a completion position stored, don't show the completion
+ if let Some(completion_position) = self.completion_position {
+ if cursor_position != completion_position {
+ return None;
+ }
+ } else {
+ return None;
+ }
let completion_text = trim_to_end_of_line_unless_leading_newline(completion_text);
@@ -202,15 +245,20 @@ impl EditPredictionProvider for SupermavenCompletionProvider {
if !completion_text.trim().is_empty() {
let snapshot = buffer.read(cx).snapshot();
- let mut point = cursor_position.to_point(&snapshot);
- point.column = snapshot.line_len(point.row);
- let range = cursor_position..snapshot.anchor_after(point);
+
+ // Calculate the range from cursor to end of line correctly
+ let cursor_point = cursor_position.to_point(&snapshot);
+ let end_of_line = snapshot.anchor_after(language::Point::new(
+ cursor_point.row,
+ snapshot.line_len(cursor_point.row),
+ ));
+ let delete_range = cursor_position..end_of_line;
Some(completion_from_diff(
snapshot,
completion_text,
cursor_position,
- range,
+ delete_range,
))
} else {
None
@@ -218,6 +266,17 @@ impl EditPredictionProvider for SupermavenCompletionProvider {
}
}
+fn reset_completion_cache(
+ provider: &mut SupermavenCompletionProvider,
+ _cx: &mut Context<SupermavenCompletionProvider>,
+) {
+ provider.pending_refresh = None;
+ provider.completion_id = None;
+ provider.completion_text = None;
+ provider.completion_position = None;
+ provider.buffer_id = None;
+}
+
fn trim_to_end_of_line_unless_leading_newline(text: &str) -> &str {
if has_leading_newline(text) {
text
@@ -157,18 +157,15 @@ impl SvgPreviewView {
&active_editor,
window,
|this: &mut SvgPreviewView, _editor, event: &EditorEvent, window, cx| {
- match event {
- EditorEvent::Saved => {
- // Remove cached image to force reload
- if let Some(svg_path) = &this.svg_path {
- let resource = Resource::Path(svg_path.clone().into());
- this.image_cache.update(cx, |cache, cx| {
- cache.remove(&resource, window, cx);
- });
- }
- cx.notify();
+ if event == &EditorEvent::Saved {
+ // Remove cached image to force reload
+ if let Some(svg_path) = &this.svg_path {
+ let resource = Resource::Path(svg_path.clone().into());
+ this.image_cache.update(cx, |cache, cx| {
+ cache.remove(&resource, window, cx);
+ });
}
- _ => {}
+ cx.notify();
}
},
);
@@ -184,22 +181,18 @@ impl SvgPreviewView {
event: &workspace::Event,
_window,
cx| {
- match event {
- workspace::Event::ActiveItemChanged => {
- let workspace_read = workspace.read(cx);
- if let Some(active_item) = workspace_read.active_item(cx)
- && let Some(editor_entity) =
- active_item.downcast::<Editor>()
- && Self::is_svg_file(&editor_entity, cx)
- {
- let new_path = Self::get_svg_path(&editor_entity, cx);
- if this.svg_path != new_path {
- this.svg_path = new_path;
- cx.notify();
- }
+ if let workspace::Event::ActiveItemChanged = event {
+ let workspace_read = workspace.read(cx);
+ if let Some(active_item) = workspace_read.active_item(cx)
+ && let Some(editor_entity) = active_item.downcast::<Editor>()
+ && Self::is_svg_file(&editor_entity, cx)
+ {
+ let new_path = Self::get_svg_path(&editor_entity, cx);
+ if this.svg_path != new_path {
+ this.svg_path = new_path;
+ cx.notify();
}
}
- _ => {}
}
},
)
@@ -0,0 +1,28 @@
+[package]
+name = "system_specs"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/system_specs.rs"
+
+[features]
+default = []
+
+[dependencies]
+anyhow.workspace = true
+client.workspace = true
+gpui.workspace = true
+human_bytes.workspace = true
+release_channel.workspace = true
+serde.workspace = true
+sysinfo.workspace = true
+workspace-hack.workspace = true
+
+[target.'cfg(any(target_os = "linux", target_os = "freebsd"))'.dependencies]
+pciid-parser.workspace = true
@@ -0,0 +1 @@
+../../LICENSE-GPL
@@ -1,11 +1,22 @@
+//! # system_specs
+
use client::telemetry;
-use gpui::{App, AppContext as _, SemanticVersion, Task, Window};
+pub use gpui::GpuSpecs;
+use gpui::{App, AppContext as _, SemanticVersion, Task, Window, actions};
use human_bytes::human_bytes;
use release_channel::{AppCommitSha, AppVersion, ReleaseChannel};
use serde::Serialize;
use std::{env, fmt::Display};
use sysinfo::{MemoryRefreshKind, RefreshKind, System};
+actions!(
+ zed,
+ [
+ /// Copies system specifications to the clipboard for bug reports.
+ CopySystemSpecsIntoClipboard,
+ ]
+);
+
#[derive(Clone, Debug, Serialize)]
pub struct SystemSpecs {
app_version: String,
@@ -31,7 +42,7 @@ impl SystemSpecs {
let architecture = env::consts::ARCH;
let commit_sha = match release_channel {
ReleaseChannel::Dev | ReleaseChannel::Nightly => {
- AppCommitSha::try_global(cx).map(|sha| sha.full().clone())
+ AppCommitSha::try_global(cx).map(|sha| sha.full())
}
_ => None,
};
@@ -158,6 +169,115 @@ fn try_determine_available_gpus() -> Option<String> {
}
}
+#[derive(Debug, PartialEq, Eq, serde::Deserialize, serde::Serialize, Clone)]
+pub struct GpuInfo {
+ pub device_name: Option<String>,
+ pub device_pci_id: u16,
+ pub vendor_name: Option<String>,
+ pub vendor_pci_id: u16,
+ pub driver_version: Option<String>,
+ pub driver_name: Option<String>,
+}
+
+#[cfg(any(target_os = "linux", target_os = "freebsd"))]
+pub fn read_gpu_info_from_sys_class_drm() -> anyhow::Result<Vec<GpuInfo>> {
+ use anyhow::Context as _;
+ use pciid_parser;
+ let dir_iter = std::fs::read_dir("/sys/class/drm").context("Failed to read /sys/class/drm")?;
+ let mut pci_addresses = vec![];
+ let mut gpus = Vec::<GpuInfo>::new();
+ let pci_db = pciid_parser::Database::read().ok();
+ for entry in dir_iter {
+ let Ok(entry) = entry else {
+ continue;
+ };
+
+ let device_path = entry.path().join("device");
+ let Some(pci_address) = device_path.read_link().ok().and_then(|pci_address| {
+ pci_address
+ .file_name()
+ .and_then(std::ffi::OsStr::to_str)
+ .map(str::trim)
+ .map(str::to_string)
+ }) else {
+ continue;
+ };
+ let Ok(device_pci_id) = read_pci_id_from_path(device_path.join("device")) else {
+ continue;
+ };
+ let Ok(vendor_pci_id) = read_pci_id_from_path(device_path.join("vendor")) else {
+ continue;
+ };
+ let driver_name = std::fs::read_link(device_path.join("driver"))
+ .ok()
+ .and_then(|driver_link| {
+ driver_link
+ .file_name()
+ .and_then(std::ffi::OsStr::to_str)
+ .map(str::trim)
+ .map(str::to_string)
+ });
+ let driver_version = driver_name
+ .as_ref()
+ .and_then(|driver_name| {
+ std::fs::read_to_string(format!("/sys/module/{driver_name}/version")).ok()
+ })
+ .as_deref()
+ .map(str::trim)
+ .map(str::to_string);
+
+ let already_found = gpus
+ .iter()
+ .zip(&pci_addresses)
+ .any(|(gpu, gpu_pci_address)| {
+ gpu_pci_address == &pci_address
+ && gpu.driver_version == driver_version
+ && gpu.driver_name == driver_name
+ });
+
+ if already_found {
+ continue;
+ }
+
+ let vendor = pci_db
+ .as_ref()
+ .and_then(|db| db.vendors.get(&vendor_pci_id));
+ let vendor_name = vendor.map(|vendor| vendor.name.clone());
+ let device_name = vendor
+ .and_then(|vendor| vendor.devices.get(&device_pci_id))
+ .map(|device| device.name.clone());
+
+ gpus.push(GpuInfo {
+ device_name,
+ device_pci_id,
+ vendor_name,
+ vendor_pci_id,
+ driver_version,
+ driver_name,
+ });
+ pci_addresses.push(pci_address);
+ }
+
+ Ok(gpus)
+}
+
+#[cfg(any(target_os = "linux", target_os = "freebsd"))]
+fn read_pci_id_from_path(path: impl AsRef<std::path::Path>) -> anyhow::Result<u16> {
+ use anyhow::Context as _;
+ let id = std::fs::read_to_string(path)?;
+ let id = id
+ .trim()
+ .strip_prefix("0x")
+ .context("Not a device ID")
+ .context(id.clone())?;
+ anyhow::ensure!(
+ id.len() == 4,
+ "Not a device id, expected 4 digits, found {}",
+ id.len()
+ );
+ u16::from_str_radix(id, 16).context("Failed to parse device ID")
+}
+
/// Returns value of `ZED_BUNDLE_TYPE` set at compiletime or else at runtime.
///
/// The compiletime value is used by flatpak since it doesn't seem to have a way to provide a
@@ -2,12 +2,14 @@
mod tab_switcher_tests;
use collections::HashMap;
-use editor::items::entry_git_aware_label_color;
+use editor::items::{
+ entry_diagnostic_aware_icon_decoration_and_color, entry_git_aware_label_color,
+};
use fuzzy::StringMatchCandidate;
use gpui::{
Action, AnyElement, App, Context, DismissEvent, Entity, EntityId, EventEmitter, FocusHandle,
- Focusable, Modifiers, ModifiersChangedEvent, MouseButton, MouseUpEvent, ParentElement, Render,
- Styled, Task, WeakEntity, Window, actions, rems,
+ Focusable, Modifiers, ModifiersChangedEvent, MouseButton, MouseUpEvent, ParentElement, Point,
+ Render, Styled, Task, WeakEntity, Window, actions, rems,
};
use picker::{Picker, PickerDelegate};
use project::Project;
@@ -15,11 +17,14 @@ use schemars::JsonSchema;
use serde::Deserialize;
use settings::Settings;
use std::{cmp::Reverse, sync::Arc};
-use ui::{ListItem, ListItemSpacing, Tooltip, prelude::*};
+use ui::{
+ DecoratedIcon, IconDecoration, IconDecorationKind, ListItem, ListItemSpacing, Tooltip,
+ prelude::*,
+};
use util::ResultExt;
use workspace::{
ModalView, Pane, SaveIntent, Workspace,
- item::{ItemHandle, ItemSettings, TabContentParams},
+ item::{ItemHandle, ItemSettings, ShowDiagnostics, TabContentParams},
pane::{Event as PaneEvent, render_item_indicator, tab_details},
};
@@ -113,7 +118,13 @@ impl TabSwitcher {
}
let weak_workspace = workspace.weak_handle();
+
let project = workspace.project().clone();
+ let original_items: Vec<_> = workspace
+ .panes()
+ .iter()
+ .map(|p| (p.clone(), p.read(cx).active_item_index()))
+ .collect();
workspace.toggle_modal(window, cx, |window, cx| {
let delegate = TabSwitcherDelegate::new(
project,
@@ -124,6 +135,7 @@ impl TabSwitcher {
is_global,
window,
cx,
+ original_items,
);
TabSwitcher::new(delegate, window, is_global, cx)
});
@@ -221,7 +233,80 @@ pub struct TabSwitcherDelegate {
workspace: WeakEntity<Workspace>,
project: Entity<Project>,
matches: Vec<TabMatch>,
+ original_items: Vec<(Entity<Pane>, usize)>,
is_all_panes: bool,
+ restored_items: bool,
+}
+
+impl TabMatch {
+ fn icon(
+ &self,
+ project: &Entity<Project>,
+ selected: bool,
+ window: &Window,
+ cx: &App,
+ ) -> Option<DecoratedIcon> {
+ let icon = self.item.tab_icon(window, cx)?;
+ let item_settings = ItemSettings::get_global(cx);
+ let show_diagnostics = item_settings.show_diagnostics;
+ let git_status_color = item_settings
+ .git_status
+ .then(|| {
+ let path = self.item.project_path(cx)?;
+ let project = project.read(cx);
+ let entry = project.entry_for_path(&path, cx)?;
+ let git_status = project
+ .project_path_git_status(&path, cx)
+ .map(|status| status.summary())
+ .unwrap_or_default();
+ Some(entry_git_aware_label_color(
+ git_status,
+ entry.is_ignored,
+ selected,
+ ))
+ })
+ .flatten();
+ let colored_icon = icon.color(git_status_color.unwrap_or_default());
+
+ let most_severe_diagnostic_level = if show_diagnostics == ShowDiagnostics::Off {
+ None
+ } else {
+ let buffer_store = project.read(cx).buffer_store().read(cx);
+ let buffer = self
+ .item
+ .project_path(cx)
+ .and_then(|path| buffer_store.get_by_path(&path))
+ .map(|buffer| buffer.read(cx));
+ buffer.and_then(|buffer| {
+ buffer
+ .buffer_diagnostics(None)
+ .iter()
+ .map(|diagnostic_entry| diagnostic_entry.diagnostic.severity)
+ .min()
+ })
+ };
+
+ let decorations =
+ entry_diagnostic_aware_icon_decoration_and_color(most_severe_diagnostic_level)
+ .filter(|(d, _)| {
+ *d != IconDecorationKind::Triangle
+ || show_diagnostics != ShowDiagnostics::Errors
+ })
+ .map(|(icon, color)| {
+ let knockout_item_color = if selected {
+ cx.theme().colors().element_selected
+ } else {
+ cx.theme().colors().element_background
+ };
+ IconDecoration::new(icon, knockout_item_color, cx)
+ .color(color.color(cx))
+ .position(Point {
+ x: px(-2.),
+ y: px(-2.),
+ })
+ });
+ Some(DecoratedIcon::new(colored_icon, decorations))
+ }
}
impl TabSwitcherDelegate {
@@ -235,6 +320,7 @@ impl TabSwitcherDelegate {
is_all_panes: bool,
window: &mut Window,
cx: &mut Context<TabSwitcher>,
+ original_items: Vec<(Entity<Pane>, usize)>,
) -> Self {
Self::subscribe_to_updates(&pane, window, cx);
Self {
@@ -246,6 +332,8 @@ impl TabSwitcherDelegate {
project,
matches: Vec::new(),
is_all_panes,
+ original_items,
+ restored_items: false,
}
}
@@ -272,7 +360,12 @@ impl TabSwitcherDelegate {
.detach();
}
- fn update_all_pane_matches(&mut self, query: String, window: &mut Window, cx: &mut App) {
+ fn update_all_pane_matches(
+ &mut self,
+ query: String,
+ window: &mut Window,
+ cx: &mut Context<Picker<Self>>,
+ ) {
let Some(workspace) = self.workspace.upgrade() else {
return;
};
@@ -300,14 +393,6 @@ impl TabSwitcherDelegate {
let matches = if query.is_empty() {
let history = workspace.read(cx).recently_activated_items(cx);
- for item in &all_items {
- eprintln!(
- "{:?} {:?}",
- item.item.tab_content_text(0, cx),
- (Reverse(history.get(&item.item.item_id())), item.item_index)
- )
- }
- eprintln!("");
all_items
.sort_by_key(|tab| (Reverse(history.get(&tab.item.item_id())), tab.item_index));
all_items
@@ -338,7 +423,7 @@ impl TabSwitcherDelegate {
let selected_item_id = self.selected_item_id();
self.matches = matches;
- self.selected_index = self.compute_selected_index(selected_item_id);
+ self.selected_index = self.compute_selected_index(selected_item_id, window, cx);
}
fn update_matches(
@@ -397,7 +482,7 @@ impl TabSwitcherDelegate {
a_score.cmp(&b_score)
});
- self.selected_index = self.compute_selected_index(selected_item_id);
+ self.selected_index = self.compute_selected_index(selected_item_id, window, cx);
}
fn selected_item_id(&self) -> Option<EntityId> {
@@ -406,7 +491,12 @@ impl TabSwitcherDelegate {
.map(|tab_match| tab_match.item.item_id())
}
- fn compute_selected_index(&mut self, prev_selected_item_id: Option<EntityId>) -> usize {
+ fn compute_selected_index(
+ &mut self,
+ prev_selected_item_id: Option<EntityId>,
+ window: &mut Window,
+ cx: &mut Context<Picker<Self>>,
+ ) -> usize {
if self.matches.is_empty() {
return 0;
}
@@ -428,8 +518,10 @@ impl TabSwitcherDelegate {
return self.matches.len() - 1;
}
+ // This only runs when initially opening the picker
+ // Index 0 is already active, so don't preselect it for switching.
if self.matches.len() > 1 {
- // Index 0 is active, so don't preselect it for switching.
+ self.set_selected_index(1, window, cx);
return 1;
}
@@ -474,8 +566,25 @@ impl PickerDelegate for TabSwitcherDelegate {
self.selected_index
}
- fn set_selected_index(&mut self, ix: usize, _: &mut Window, cx: &mut Context<Picker<Self>>) {
+ fn set_selected_index(
+ &mut self,
+ ix: usize,
+ window: &mut Window,
+ cx: &mut Context<Picker<Self>>,
+ ) {
self.selected_index = ix;
+
+ let Some(selected_match) = self.matches.get(self.selected_index()) else {
+ return;
+ };
+ selected_match
+ .pane
+ .update(cx, |pane, cx| {
+ if let Some(index) = pane.index_for_item(selected_match.item.as_ref()) {
+ pane.activate_item(index, false, false, window, cx);
+ }
+ })
+ .ok();
cx.notify();
}
@@ -502,6 +611,13 @@ impl PickerDelegate for TabSwitcherDelegate {
let Some(selected_match) = self.matches.get(self.selected_index()) else {
return;
};
+
+ self.restored_items = true;
+ for (pane, index) in self.original_items.iter() {
+ pane.update(cx, |this, cx| {
+ this.activate_item(*index, false, false, window, cx);
+ })
+ }
selected_match
.pane
.update(cx, |pane, cx| {
@@ -512,7 +628,15 @@ impl PickerDelegate for TabSwitcherDelegate {
.ok();
}
- fn dismissed(&mut self, _: &mut Window, cx: &mut Context<Picker<TabSwitcherDelegate>>) {
+ fn dismissed(&mut self, window: &mut Window, cx: &mut Context<Picker<TabSwitcherDelegate>>) {
+ if !self.restored_items {
+ for (pane, index) in self.original_items.iter() {
+ pane.update(cx, |this, cx| {
+ this.activate_item(*index, false, false, window, cx);
+ })
+ }
+ }
+
self.tab_switcher
.update(cx, |_, cx| cx.emit(DismissEvent))
.log_err();
@@ -538,31 +662,7 @@ impl PickerDelegate for TabSwitcherDelegate {
};
let label = tab_match.item.tab_content(params, window, cx);
- let icon = tab_match.item.tab_icon(window, cx).map(|icon| {
- let git_status_color = ItemSettings::get_global(cx)
- .git_status
- .then(|| {
- tab_match
- .item
- .project_path(cx)
- .as_ref()
- .and_then(|path| {
- let project = self.project.read(cx);
- let entry = project.entry_for_path(path, cx)?;
- let git_status = project
- .project_path_git_status(path, cx)
- .map(|status| status.summary())
- .unwrap_or_default();
- Some((entry, git_status))
- })
- .map(|(entry, git_status)| {
- entry_git_aware_label_color(git_status, entry.is_ignored, selected)
- })
- })
- .flatten();
-
- icon.color(git_status_color.unwrap_or_default())
- });
+ let icon = tab_match.icon(&self.project, selected, window, cx);
let indicator = render_item_indicator(tab_match.item.boxed_clone(), cx);
let indicator_color = if let Some(ref indicator) = indicator {
@@ -604,7 +704,7 @@ impl PickerDelegate for TabSwitcherDelegate {
.inset(true)
.toggle_state(selected)
.child(h_flex().w_full().child(label))
- .start_slot::<Icon>(icon)
+ .start_slot::<DecoratedIcon>(icon)
.map(|el| {
if self.selected_index == ix {
el.end_slot::<AnyElement>(close_button)
@@ -1,16 +1,43 @@
+use std::fmt;
+
+use util::get_system_shell;
+
use crate::Shell;
-#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)]
-enum ShellKind {
+#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum ShellKind {
#[default]
Posix,
+ Csh,
+ Fish,
Powershell,
Nushell,
Cmd,
}
+impl fmt::Display for ShellKind {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ShellKind::Posix => write!(f, "sh"),
+ ShellKind::Csh => write!(f, "csh"),
+ ShellKind::Fish => write!(f, "fish"),
+ ShellKind::Powershell => write!(f, "powershell"),
+ ShellKind::Nushell => write!(f, "nu"),
+ ShellKind::Cmd => write!(f, "cmd"),
+ }
+ }
+}
+
impl ShellKind {
- fn new(program: &str) -> Self {
+ pub fn system() -> Self {
+ Self::new(&get_system_shell())
+ }
+
+ pub fn new(program: &str) -> Self {
+ #[cfg(windows)]
+ let (_, program) = program.rsplit_once('\\').unwrap_or(("", program));
+ #[cfg(not(windows))]
+ let (_, program) = program.rsplit_once('/').unwrap_or(("", program));
if program == "powershell"
|| program.ends_with("powershell.exe")
|| program == "pwsh"
@@ -21,18 +48,24 @@ impl ShellKind {
ShellKind::Cmd
} else if program == "nu" {
ShellKind::Nushell
+ } else if program == "fish" {
+ ShellKind::Fish
+ } else if program == "csh" {
+ ShellKind::Csh
} else {
- // Someother shell detected, the user might install and use a
+ // Some other shell detected, the user might install and use a
// unix-like shell.
ShellKind::Posix
}
}
- fn to_shell_variable(&self, input: &str) -> String {
+ fn to_shell_variable(self, input: &str) -> String {
match self {
Self::Powershell => Self::to_powershell_variable(input),
Self::Cmd => Self::to_cmd_variable(input),
Self::Posix => input.to_owned(),
+ Self::Fish => input.to_owned(),
+ Self::Csh => input.to_owned(),
Self::Nushell => Self::to_nushell_variable(input),
}
}
@@ -153,7 +186,7 @@ impl ShellKind {
match self {
ShellKind::Powershell => vec!["-C".to_owned(), combined_command],
ShellKind::Cmd => vec!["/C".to_owned(), combined_command],
- ShellKind::Posix | ShellKind::Nushell => interactive
+ ShellKind::Posix | ShellKind::Nushell | ShellKind::Fish | ShellKind::Csh => interactive
.then(|| "-i".to_owned())
.into_iter()
.chain(["-c".to_owned(), combined_command])
@@ -162,18 +195,6 @@ impl ShellKind {
}
}
-fn system_shell() -> String {
- if cfg!(target_os = "windows") {
- // `alacritty_terminal` uses this as default on Windows. See:
- // https://github.com/alacritty/alacritty/blob/0d4ab7bca43213d96ddfe40048fc0f922543c6f8/alacritty_terminal/src/tty/windows/mod.rs#L130
- // We could use `util::get_windows_system_shell()` here, but we are running tasks here, so leave it to `powershell.exe`
- // should be okay.
- "powershell.exe".to_string()
- } else {
- std::env::var("SHELL").unwrap_or("/bin/sh".to_string())
- }
-}
-
/// ShellBuilder is used to turn a user-requested task into a
/// program that can be executed by the shell.
pub struct ShellBuilder {
@@ -184,22 +205,18 @@ pub struct ShellBuilder {
kind: ShellKind,
}
-pub static DEFAULT_REMOTE_SHELL: &str = "\"${SHELL:-sh}\"";
-
impl ShellBuilder {
/// Create a new ShellBuilder as configured.
- pub fn new(is_local: bool, shell: &Shell) -> Self {
- let (program, args) = match shell {
- Shell::System => {
- if is_local {
- (system_shell(), Vec::new())
- } else {
- (DEFAULT_REMOTE_SHELL.to_string(), Vec::new())
- }
- }
- Shell::Program(shell) => (shell.clone(), Vec::new()),
- Shell::WithArguments { program, args, .. } => (program.clone(), args.clone()),
+ pub fn new(remote_system_shell: Option<&str>, shell: &Shell) -> Self {
+ let (program, args) = match remote_system_shell {
+ Some(program) => (program.to_string(), Vec::new()),
+ None => match shell {
+ Shell::System => (get_system_shell(), Vec::new()),
+ Shell::Program(shell) => (shell.clone(), Vec::new()),
+ Shell::WithArguments { program, args, .. } => (program.clone(), args.clone()),
+ },
};
+
let kind = ShellKind::new(&program);
Self {
program,
@@ -212,6 +229,7 @@ impl ShellBuilder {
self.interactive = false;
self
}
+
/// Returns the label to show in the terminal tab
pub fn command_label(&self, command_label: &str) -> String {
match self.kind {
@@ -221,7 +239,7 @@ impl ShellBuilder {
ShellKind::Cmd => {
format!("{} /C '{}'", self.program, command_label)
}
- ShellKind::Posix | ShellKind::Nushell => {
+ ShellKind::Posix | ShellKind::Nushell | ShellKind::Fish | ShellKind::Csh => {
let interactivity = self.interactive.then_some("-i ").unwrap_or_default();
format!(
"{} {interactivity}-c '$\"{}\"'",
@@ -234,16 +252,14 @@ impl ShellBuilder {
pub fn build(
mut self,
task_command: Option<String>,
- task_args: &Vec<String>,
+ task_args: &[String],
) -> (String, Vec<String>) {
if let Some(task_command) = task_command {
- let combined_command = task_args
- .into_iter()
- .fold(task_command, |mut command, arg| {
- command.push(' ');
- command.push_str(&self.kind.to_shell_variable(arg));
- command
- });
+ let combined_command = task_args.iter().fold(task_command, |mut command, arg| {
+ command.push(' ');
+ command.push_str(&self.kind.to_shell_variable(arg));
+ command
+ });
self.args
.extend(self.kind.args_for_shell(self.interactive, combined_command));
@@ -260,11 +276,11 @@ mod test {
#[test]
fn test_nu_shell_variable_substitution() {
let shell = Shell::Program("nu".to_owned());
- let shell_builder = ShellBuilder::new(true, &shell);
+ let shell_builder = ShellBuilder::new(None, &shell);
let (program, args) = shell_builder.build(
Some("echo".into()),
- &vec![
+ &[
"${hello}".to_string(),
"$world".to_string(),
"nothing".to_string(),
@@ -75,7 +75,6 @@ impl<T: PartialEq + 'static + Sync> TrackedFile<T> {
{
let parsed_contents: Arc<RwLock<T>> = Arc::default();
cx.background_spawn({
- let parsed_contents = parsed_contents.clone();
async move {
while let Some(new_contents) = tracker.next().await {
if Arc::strong_count(&parsed_contents) == 1 {
@@ -22,7 +22,7 @@ pub use debug_format::{
AttachRequest, BuildTaskDefinition, DebugRequest, DebugScenario, DebugTaskFile, LaunchRequest,
Request, TcpArgumentsTemplate, ZedDebugConfig,
};
-pub use shell_builder::{DEFAULT_REMOTE_SHELL, ShellBuilder};
+pub use shell_builder::{ShellBuilder, ShellKind};
pub use task_template::{
DebugArgsRequest, HideStrategy, RevealStrategy, TaskTemplate, TaskTemplates,
substitute_variables_in_map, substitute_variables_in_str,
@@ -100,7 +100,7 @@ impl SpawnInTerminal {
command: proto.command.clone(),
args: proto.args.clone(),
env: proto.env.into_iter().collect(),
- cwd: proto.cwd.map(PathBuf::from).clone(),
+ cwd: proto.cwd.map(PathBuf::from),
..Default::default()
}
}
@@ -183,6 +183,10 @@ impl TaskTemplate {
&mut substituted_variables,
)?
} else {
+ #[allow(
+ clippy::redundant_clone,
+ reason = "We want to clone the full_label to avoid borrowing it in the fold closure"
+ )]
full_label.clone()
}
.lines()
@@ -329,15 +333,16 @@ fn substitute_all_template_variables_in_str<A: AsRef<str>>(
if let Some(substituted_variable) = variable_names.get(variable_name) {
substituted_variables.insert(substituted_variable.clone());
}
-
- let mut name = name.as_ref().to_owned();
- // Got a task variable hit
+ // Got a task variable hit - use the variable value, ignore default
+ return Ok(Some(name.as_ref().to_owned()));
+ } else if variable_name.starts_with(ZED_VARIABLE_NAME_PREFIX) {
+ // Unknown ZED variable - use default if available
if !default.is_empty() {
- name.push_str(default);
+ // Strip the colon and return the default value
+ return Ok(Some(default[1..].to_owned()));
+ } else {
+ bail!("Unknown variable name: {variable_name}");
}
- return Ok(Some(name));
- } else if variable_name.starts_with(ZED_VARIABLE_NAME_PREFIX) {
- bail!("Unknown variable name: {variable_name}");
}
// This is an unknown variable.
// We should not error out, as they may come from user environment (e.g. $PATH). That means that the variable substitution might not be perfect.
@@ -453,7 +458,7 @@ mod tests {
TaskTemplate {
label: "".to_string(),
command: "".to_string(),
- ..task_with_all_properties.clone()
+ ..task_with_all_properties
},
] {
assert_eq!(
@@ -521,7 +526,7 @@ mod tests {
);
let cx = TaskContext {
- cwd: Some(context_cwd.clone()),
+ cwd: Some(context_cwd),
task_variables: TaskVariables::default(),
project_env: HashMap::default(),
};
@@ -768,7 +773,7 @@ mod tests {
"test_env_key".to_string(),
format!("test_env_var_{}", VariableName::Symbol.template_value()),
)]),
- ..task_with_all_properties.clone()
+ ..task_with_all_properties
},
]
.into_iter()
@@ -871,7 +876,7 @@ mod tests {
let context = TaskContext {
cwd: None,
- task_variables: TaskVariables::from_iter(all_variables.clone()),
+ task_variables: TaskVariables::from_iter(all_variables),
project_env,
};
@@ -888,4 +893,81 @@ mod tests {
"overwritten"
);
}
+
+ #[test]
+ fn test_variable_default_values() {
+ let task_with_defaults = TaskTemplate {
+ label: "test with defaults".to_string(),
+ command: format!(
+ "echo ${{{}}}",
+ VariableName::File.to_string() + ":fallback.txt"
+ ),
+ args: vec![
+ "${ZED_MISSING_VAR:default_value}".to_string(),
+ format!("${{{}}}", VariableName::Row.to_string() + ":42"),
+ ],
+ ..TaskTemplate::default()
+ };
+
+ // Test 1: When ZED_FILE exists, should use actual value and ignore default
+ let context_with_file = TaskContext {
+ cwd: None,
+ task_variables: TaskVariables::from_iter(vec![
+ (VariableName::File, "actual_file.rs".to_string()),
+ (VariableName::Row, "123".to_string()),
+ ]),
+ project_env: HashMap::default(),
+ };
+
+ let resolved = task_with_defaults
+ .resolve_task(TEST_ID_BASE, &context_with_file)
+ .expect("Should resolve task with existing variables");
+
+ assert_eq!(
+ resolved.resolved.command.unwrap(),
+ "echo actual_file.rs",
+ "Should use actual ZED_FILE value, not default"
+ );
+ assert_eq!(
+ resolved.resolved.args,
+ vec!["default_value", "123"],
+ "Should use default for missing var, actual value for existing var"
+ );
+
+ // Test 2: When ZED_FILE doesn't exist, should use default value
+ let context_without_file = TaskContext {
+ cwd: None,
+ task_variables: TaskVariables::from_iter(vec![(VariableName::Row, "456".to_string())]),
+ project_env: HashMap::default(),
+ };
+
+ let resolved = task_with_defaults
+ .resolve_task(TEST_ID_BASE, &context_without_file)
+ .expect("Should resolve task using default values");
+
+ assert_eq!(
+ resolved.resolved.command.unwrap(),
+ "echo fallback.txt",
+ "Should use default value when ZED_FILE is missing"
+ );
+ assert_eq!(
+ resolved.resolved.args,
+ vec!["default_value", "456"],
+ "Should use defaults for missing vars"
+ );
+
+ // Test 3: Missing ZED variable without default should fail
+ let task_no_default = TaskTemplate {
+ label: "test no default".to_string(),
+ command: "${ZED_MISSING_NO_DEFAULT}".to_string(),
+ ..TaskTemplate::default()
+ };
+
+ assert!(
+ task_no_default
+ .resolve_task(TEST_ID_BASE, &TaskContext::default())
+ .is_none(),
+ "Should fail when ZED variable has no default and doesn't exist"
+ );
+ }
}
@@ -461,7 +461,7 @@ impl PickerDelegate for TasksModalDelegate {
tooltip_label_text.push_str(&resolved_task.resolved.command_label);
}
- if template.tags.len() > 0 {
+ if !template.tags.is_empty() {
tooltip_label_text.push('\n');
tooltip_label_text.push_str(
template
@@ -550,7 +550,7 @@ impl PickerDelegate for TasksModalDelegate {
list_item.tooltip(move |_, _| item_label.clone())
})
.map(|item| {
- let item = if matches!(source_kind, TaskSourceKind::UserInput)
+ if matches!(source_kind, TaskSourceKind::UserInput)
|| Some(ix) <= self.divider_index
{
let task_index = hit.candidate_id;
@@ -579,8 +579,7 @@ impl PickerDelegate for TasksModalDelegate {
item.end_hover_slot(delete_button)
} else {
item
- };
- item
+ }
})
.toggle_state(selected)
.child(highlighted_location.render(window, cx)),
@@ -148,9 +148,9 @@ pub fn toggle_modal(
) -> Task<()> {
let task_store = workspace.project().read(cx).task_store().clone();
let workspace_handle = workspace.weak_handle();
- let can_open_modal = workspace.project().update(cx, |project, cx| {
- project.is_local() || project.ssh_connection_string(cx).is_some() || project.is_via_ssh()
- });
+ let can_open_modal = workspace
+ .project()
+ .read_with(cx, |project, _| !project.is_via_collab());
if can_open_modal {
let task_contexts = task_contexts(workspace, window, cx);
cx.spawn_in(window, async move |workspace, cx| {
@@ -434,7 +434,7 @@ mod tests {
)
.await;
let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
- let worktree_store = project.read_with(cx, |project, _| project.worktree_store().clone());
+ let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
let rust_language = Arc::new(
Language::new(
LanguageConfig::default(),
@@ -344,7 +344,6 @@ pub struct TerminalBuilder {
impl TerminalBuilder {
pub fn new(
working_directory: Option<PathBuf>,
- python_venv_directory: Option<PathBuf>,
task: Option<TaskState>,
shell: Shell,
mut env: HashMap<String, String>,
@@ -353,8 +352,9 @@ impl TerminalBuilder {
max_scroll_history_lines: Option<usize>,
is_ssh_terminal: bool,
window_id: u64,
- completion_tx: Sender<Option<ExitStatus>>,
+ completion_tx: Option<Sender<Option<ExitStatus>>>,
cx: &App,
+ activation_script: Vec<String>,
) -> Result<TerminalBuilder> {
// If the parent environment doesn't have a locale set
// (As is the case when launched from a .app on MacOS),
@@ -428,13 +428,10 @@ impl TerminalBuilder {
.clone()
.or_else(|| Some(home_dir().to_path_buf())),
drain_on_exit: true,
- env: env.into_iter().collect(),
+ env: env.clone().into_iter().collect(),
}
};
- // Setup Alacritty's env, which modifies the current process's environment
- alacritty_terminal::tty::setup_env();
-
let default_cursor_style = AlacCursorStyle::from(cursor_shape);
let scrolling_history = if task.is_some() {
// Tasks like `cargo build --all` may produce a lot of output, ergo allow maximum scrolling.
@@ -486,7 +483,7 @@ impl TerminalBuilder {
//And connect them together
let event_loop = EventLoop::new(
term.clone(),
- ZedListener(events_tx.clone()),
+ ZedListener(events_tx),
pty,
pty_options.drain_on_exit,
false,
@@ -496,7 +493,9 @@ impl TerminalBuilder {
let pty_tx = event_loop.channel();
let _io_thread = event_loop.spawn(); // DANGER
- let terminal = Terminal {
+ let no_task = task.is_none();
+
+ let mut terminal = Terminal {
task,
pty_tx: Notifier(pty_tx),
completion_tx,
@@ -517,13 +516,29 @@ impl TerminalBuilder {
hyperlink_regex_searches: RegexSearches::new(),
vi_mode_enabled: false,
is_ssh_terminal,
- python_venv_directory,
last_mouse_move_time: Instant::now(),
last_hyperlink_search_position: None,
#[cfg(windows)]
shell_program,
+ activation_script: activation_script.clone(),
+ template: CopyTemplate {
+ shell,
+ env,
+ cursor_shape,
+ alternate_scroll,
+ max_scroll_history_lines,
+ window_id,
+ },
};
+ if cfg!(not(target_os = "windows")) && !activation_script.is_empty() && no_task {
+ for activation_script in activation_script {
+ terminal.input(activation_script.into_bytes());
+ terminal.write_to_pty(b"\n");
+ }
+ terminal.clear();
+ }
+
Ok(TerminalBuilder {
terminal,
events_rx,
@@ -683,7 +698,7 @@ pub enum SelectionPhase {
pub struct Terminal {
pty_tx: Notifier,
- completion_tx: Sender<Option<ExitStatus>>,
+ completion_tx: Option<Sender<Option<ExitStatus>>>,
term: Arc<FairMutex<Term<ZedListener>>>,
term_config: Config,
events: VecDeque<InternalEvent>,
@@ -695,7 +710,6 @@ pub struct Terminal {
pub breadcrumb_text: String,
pub pty_info: PtyProcessInfo,
title_override: Option<SharedString>,
- pub python_venv_directory: Option<PathBuf>,
scroll_px: Pixels,
next_link_id: usize,
selection_phase: SelectionPhase,
@@ -707,6 +721,17 @@ pub struct Terminal {
last_hyperlink_search_position: Option<Point<Pixels>>,
#[cfg(windows)]
shell_program: Option<String>,
+ template: CopyTemplate,
+ activation_script: Vec<String>,
+}
+
+struct CopyTemplate {
+ shell: Shell,
+ env: HashMap<String, String>,
+ cursor_shape: CursorShape,
+ alternate_scroll: AlternateScroll,
+ max_scroll_history_lines: Option<usize>,
+ window_id: u64,
}
pub struct TaskState {
@@ -1116,11 +1141,6 @@ impl Terminal {
}
}
- pub fn clear_matches(&mut self) {
- self.matches.clear();
- self.set_selection(None);
- }
-
pub fn select_matches(&mut self, matches: &[RangeInclusive<AlacPoint>]) {
let matches_to_select = self
.matches
@@ -1661,7 +1681,7 @@ impl Terminal {
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
MouseButton::Middle => {
if let Some(item) = _cx.read_from_primary() {
- let text = item.text().unwrap_or_default().to_string();
+ let text = item.text().unwrap_or_default();
self.input(text.into_bytes());
}
}
@@ -1895,7 +1915,9 @@ impl Terminal {
}
});
- self.completion_tx.try_send(e).ok();
+ if let Some(tx) = &self.completion_tx {
+ tx.try_send(e).ok();
+ }
let task = match &mut self.task {
Some(task) => task,
None => {
@@ -1950,6 +1972,28 @@ impl Terminal {
pub fn vi_mode_enabled(&self) -> bool {
self.vi_mode_enabled
}
+
+ pub fn clone_builder(
+ &self,
+ cx: &App,
+ cwd: impl FnOnce() -> Option<PathBuf>,
+ ) -> Result<TerminalBuilder> {
+ let working_directory = self.working_directory().or_else(cwd);
+ TerminalBuilder::new(
+ working_directory,
+ None,
+ self.template.shell.clone(),
+ self.template.env.clone(),
+ self.template.cursor_shape,
+ self.template.alternate_scroll,
+ self.template.max_scroll_history_lines,
+ self.is_ssh_terminal,
+ self.template.window_id,
+ None,
+ cx,
+ self.activation_script.clone(),
+ )
+ }
}
// Helper function to convert a grid row to a string
@@ -2154,7 +2198,7 @@ mod tests {
};
use collections::HashMap;
use gpui::{Pixels, Point, TestAppContext, bounds, point, size};
- use rand::{Rng, distributions::Alphanumeric, rngs::ThreadRng, thread_rng};
+ use rand::{Rng, distr, rngs::ThreadRng};
#[ignore = "Test is flaky on macOS, and doesn't run on Windows"]
#[gpui::test]
@@ -2164,7 +2208,6 @@ mod tests {
let (completion_tx, completion_rx) = smol::channel::unbounded();
let terminal = cx.new(|cx| {
TerminalBuilder::new(
- None,
None,
None,
task::Shell::WithArguments {
@@ -2178,8 +2221,9 @@ mod tests {
None,
false,
0,
- completion_tx,
+ Some(completion_tx),
cx,
+ vec![],
)
.unwrap()
.subscribe(cx)
@@ -2205,13 +2249,14 @@ mod tests {
#[test]
fn test_mouse_to_cell_test() {
- let mut rng = thread_rng();
+ let mut rng = rand::rng();
const ITERATIONS: usize = 10;
const PRECISION: usize = 1000;
for _ in 0..ITERATIONS {
- let viewport_cells = rng.gen_range(15..20);
- let cell_size = rng.gen_range(5 * PRECISION..20 * PRECISION) as f32 / PRECISION as f32;
+ let viewport_cells = rng.random_range(15..20);
+ let cell_size =
+ rng.random_range(5 * PRECISION..20 * PRECISION) as f32 / PRECISION as f32;
let size = crate::TerminalBounds {
cell_width: Pixels::from(cell_size),
@@ -2233,8 +2278,8 @@ mod tests {
for col in 0..(viewport_cells - 1) {
let col = col as usize;
- let row_offset = rng.gen_range(0..PRECISION) as f32 / PRECISION as f32;
- let col_offset = rng.gen_range(0..PRECISION) as f32 / PRECISION as f32;
+ let row_offset = rng.random_range(0..PRECISION) as f32 / PRECISION as f32;
+ let col_offset = rng.random_range(0..PRECISION) as f32 / PRECISION as f32;
let mouse_pos = point(
Pixels::from(col as f32 * cell_size + col_offset),
@@ -2254,7 +2299,7 @@ mod tests {
#[test]
fn test_mouse_to_cell_clamp() {
- let mut rng = thread_rng();
+ let mut rng = rand::rng();
let size = crate::TerminalBounds {
cell_width: Pixels::from(10.),
@@ -2292,7 +2337,7 @@ mod tests {
for _ in 0..((size.height() / size.line_height()) as usize) {
let mut row_vec = Vec::new();
for _ in 0..((size.width() / size.cell_width()) as usize) {
- let cell_char = rng.sample(Alphanumeric) as char;
+ let cell_char = rng.sample(distr::Alphanumeric) as char;
row_vec.push(cell_char)
}
cells.push(row_vec)
@@ -1,1221 +1,1221 @@
-use alacritty_terminal::{
- Term,
- event::EventListener,
- grid::Dimensions,
- index::{Boundary, Column, Direction as AlacDirection, Line, Point as AlacPoint},
- term::search::{Match, RegexIter, RegexSearch},
-};
-use regex::Regex;
-use std::{ops::Index, sync::LazyLock};
-
-const URL_REGEX: &str = r#"(ipfs:|ipns:|magnet:|mailto:|gemini://|gopher://|https://|http://|news:|file://|git://|ssh:|ftp://)[^\u{0000}-\u{001F}\u{007F}-\u{009F}<>"\s{-}\^⟨⟩`']+"#;
-// Optional suffix matches MSBuild diagnostic suffixes for path parsing in PathLikeWithPosition
-// https://learn.microsoft.com/en-us/visualstudio/msbuild/msbuild-diagnostic-format-for-tasks
-const WORD_REGEX: &str =
- r#"[\$\+\w.\[\]:/\\@\-~()]+(?:\((?:\d+|\d+,\d+)\))|[\$\+\w.\[\]:/\\@\-~()]+"#;
-
-const PYTHON_FILE_LINE_REGEX: &str = r#"File "(?P<file>[^"]+)", line (?P<line>\d+)"#;
-
-static PYTHON_FILE_LINE_MATCHER: LazyLock<Regex> =
- LazyLock::new(|| Regex::new(PYTHON_FILE_LINE_REGEX).unwrap());
-
-fn python_extract_path_and_line(input: &str) -> Option<(&str, u32)> {
- if let Some(captures) = PYTHON_FILE_LINE_MATCHER.captures(input) {
- let path_part = captures.name("file")?.as_str();
-
- let line_number: u32 = captures.name("line")?.as_str().parse().ok()?;
- return Some((path_part, line_number));
- }
- None
-}
-
-pub(super) struct RegexSearches {
- url_regex: RegexSearch,
- word_regex: RegexSearch,
- python_file_line_regex: RegexSearch,
-}
-
-impl RegexSearches {
- pub(super) fn new() -> Self {
- Self {
- url_regex: RegexSearch::new(URL_REGEX).unwrap(),
- word_regex: RegexSearch::new(WORD_REGEX).unwrap(),
- python_file_line_regex: RegexSearch::new(PYTHON_FILE_LINE_REGEX).unwrap(),
- }
- }
-}
-
-pub(super) fn find_from_grid_point<T: EventListener>(
- term: &Term<T>,
- point: AlacPoint,
- regex_searches: &mut RegexSearches,
-) -> Option<(String, bool, Match)> {
- let grid = term.grid();
- let link = grid.index(point).hyperlink();
- let found_word = if let Some(ref url) = link {
- let mut min_index = point;
- loop {
- let new_min_index = min_index.sub(term, Boundary::Cursor, 1);
- if new_min_index == min_index || grid.index(new_min_index).hyperlink() != link {
- break;
- } else {
- min_index = new_min_index
- }
- }
-
- let mut max_index = point;
- loop {
- let new_max_index = max_index.add(term, Boundary::Cursor, 1);
- if new_max_index == max_index || grid.index(new_max_index).hyperlink() != link {
- break;
- } else {
- max_index = new_max_index
- }
- }
-
- let url = url.uri().to_owned();
- let url_match = min_index..=max_index;
-
- Some((url, true, url_match))
- } else if let Some(url_match) = regex_match_at(term, point, &mut regex_searches.url_regex) {
- let url = term.bounds_to_string(*url_match.start(), *url_match.end());
- Some((url, true, url_match))
- } else if let Some(python_match) =
- regex_match_at(term, point, &mut regex_searches.python_file_line_regex)
- {
- let matching_line = term.bounds_to_string(*python_match.start(), *python_match.end());
- python_extract_path_and_line(&matching_line).map(|(file_path, line_number)| {
- (format!("{file_path}:{line_number}"), false, python_match)
- })
- } else if let Some(word_match) = regex_match_at(term, point, &mut regex_searches.word_regex) {
- let file_path = term.bounds_to_string(*word_match.start(), *word_match.end());
-
- let (sanitized_match, sanitized_word) = 'sanitize: {
- let mut word_match = word_match;
- let mut file_path = file_path;
-
- if is_path_surrounded_by_common_symbols(&file_path) {
- word_match = Match::new(
- word_match.start().add(term, Boundary::Grid, 1),
- word_match.end().sub(term, Boundary::Grid, 1),
- );
- file_path = file_path[1..file_path.len() - 1].to_owned();
- }
-
- while file_path.ends_with(':') {
- file_path.pop();
- word_match = Match::new(
- *word_match.start(),
- word_match.end().sub(term, Boundary::Grid, 1),
- );
- }
- let mut colon_count = 0;
- for c in file_path.chars() {
- if c == ':' {
- colon_count += 1;
- }
- }
- // strip trailing comment after colon in case of
- // file/at/path.rs:row:column:description or error message
- // so that the file path is `file/at/path.rs:row:column`
- if colon_count > 2 {
- let last_index = file_path.rfind(':').unwrap();
- let prev_is_digit = last_index > 0
- && file_path
- .chars()
- .nth(last_index - 1)
- .is_some_and(|c| c.is_ascii_digit());
- let next_is_digit = last_index < file_path.len() - 1
- && file_path
- .chars()
- .nth(last_index + 1)
- .is_none_or(|c| c.is_ascii_digit());
- if prev_is_digit && !next_is_digit {
- let stripped_len = file_path.len() - last_index;
- word_match = Match::new(
- *word_match.start(),
- word_match.end().sub(term, Boundary::Grid, stripped_len),
- );
- file_path = file_path[0..last_index].to_owned();
- }
- }
-
- break 'sanitize (word_match, file_path);
- };
-
- Some((sanitized_word, false, sanitized_match))
- } else {
- None
- };
-
- found_word.map(|(maybe_url_or_path, is_url, word_match)| {
- if is_url {
- // Treat "file://" IRIs like file paths to ensure
- // that line numbers at the end of the path are
- // handled correctly
- if let Some(path) = maybe_url_or_path.strip_prefix("file://") {
- (path.to_string(), false, word_match)
- } else {
- (maybe_url_or_path, true, word_match)
- }
- } else {
- (maybe_url_or_path, false, word_match)
- }
- })
-}
-
-fn is_path_surrounded_by_common_symbols(path: &str) -> bool {
- // Avoid detecting `[]` or `()` strings as paths, surrounded by common symbols
- path.len() > 2
- // The rest of the brackets and various quotes cannot be matched by the [`WORD_REGEX`] hence not checked for.
- && (path.starts_with('[') && path.ends_with(']')
- || path.starts_with('(') && path.ends_with(')'))
-}
-
-/// Based on alacritty/src/display/hint.rs > regex_match_at
-/// Retrieve the match, if the specified point is inside the content matching the regex.
-fn regex_match_at<T>(term: &Term<T>, point: AlacPoint, regex: &mut RegexSearch) -> Option<Match> {
- visible_regex_match_iter(term, regex).find(|rm| rm.contains(&point))
-}
-
-/// Copied from alacritty/src/display/hint.rs:
-/// Iterate over all visible regex matches.
-fn visible_regex_match_iter<'a, T>(
- term: &'a Term<T>,
- regex: &'a mut RegexSearch,
-) -> impl Iterator<Item = Match> + 'a {
- const MAX_SEARCH_LINES: usize = 100;
-
- let viewport_start = Line(-(term.grid().display_offset() as i32));
- let viewport_end = viewport_start + term.bottommost_line();
- let mut start = term.line_search_left(AlacPoint::new(viewport_start, Column(0)));
- let mut end = term.line_search_right(AlacPoint::new(viewport_end, Column(0)));
- start.line = start.line.max(viewport_start - MAX_SEARCH_LINES);
- end.line = end.line.min(viewport_end + MAX_SEARCH_LINES);
-
- RegexIter::new(start, end, AlacDirection::Right, term, regex)
- .skip_while(move |rm| rm.end().line < viewport_start)
- .take_while(move |rm| rm.start().line <= viewport_end)
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use alacritty_terminal::{
- event::VoidListener,
- index::{Boundary, Point as AlacPoint},
- term::{Config, cell::Flags, test::TermSize},
- vte::ansi::Handler,
- };
- use std::{cell::RefCell, ops::RangeInclusive, path::PathBuf};
- use url::Url;
- use util::paths::PathWithPosition;
-
- fn re_test(re: &str, hay: &str, expected: Vec<&str>) {
- let results: Vec<_> = regex::Regex::new(re)
- .unwrap()
- .find_iter(hay)
- .map(|m| m.as_str())
- .collect();
- assert_eq!(results, expected);
- }
-
- #[test]
- fn test_url_regex() {
- re_test(
- URL_REGEX,
- "test http://example.com test 'https://website1.com' test mailto:bob@example.com train",
- vec![
- "http://example.com",
- "https://website1.com",
- "mailto:bob@example.com",
- ],
- );
- }
-
- #[test]
- fn test_word_regex() {
- re_test(
- WORD_REGEX,
- "hello, world! \"What\" is this?",
- vec!["hello", "world", "What", "is", "this"],
- );
- }
-
- #[test]
- fn test_word_regex_with_linenum() {
- // filename(line) and filename(line,col) as used in MSBuild output
- // should be considered a single "word", even though comma is
- // usually a word separator
- re_test(WORD_REGEX, "a Main.cs(20) b", vec!["a", "Main.cs(20)", "b"]);
- re_test(
- WORD_REGEX,
- "Main.cs(20,5) Error desc",
- vec!["Main.cs(20,5)", "Error", "desc"],
- );
- // filename:line:col is a popular format for unix tools
- re_test(
- WORD_REGEX,
- "a Main.cs:20:5 b",
- vec!["a", "Main.cs:20:5", "b"],
- );
- // Some tools output "filename:line:col:message", which currently isn't
- // handled correctly, but might be in the future
- re_test(
- WORD_REGEX,
- "Main.cs:20:5:Error desc",
- vec!["Main.cs:20:5:Error", "desc"],
- );
- }
-
- #[test]
- fn test_python_file_line_regex() {
- re_test(
- PYTHON_FILE_LINE_REGEX,
- "hay File \"/zed/bad_py.py\", line 8 stack",
- vec!["File \"/zed/bad_py.py\", line 8"],
- );
- re_test(PYTHON_FILE_LINE_REGEX, "unrelated", vec![]);
- }
-
- #[test]
- fn test_python_file_line() {
- let inputs: Vec<(&str, Option<(&str, u32)>)> = vec![
- (
- "File \"/zed/bad_py.py\", line 8",
- Some(("/zed/bad_py.py", 8u32)),
- ),
- ("File \"path/to/zed/bad_py.py\"", None),
- ("unrelated", None),
- ("", None),
- ];
- let actual = inputs
- .iter()
- .map(|input| python_extract_path_and_line(input.0))
- .collect::<Vec<_>>();
- let expected = inputs.iter().map(|(_, output)| *output).collect::<Vec<_>>();
- assert_eq!(actual, expected);
- }
-
- // We use custom columns in many tests to workaround this issue by ensuring a wrapped
- // line never ends on a wide char:
- //
- // <https://github.com/alacritty/alacritty/issues/8586>
- //
- // This issue was recently fixed, as soon as we update to a version containing the fix we
- // can remove all the custom columns from these tests.
- //
- macro_rules! test_hyperlink {
- ($($lines:expr),+; $hyperlink_kind:ident) => { {
- use crate::terminal_hyperlinks::tests::line_cells_count;
- use std::cmp;
-
- let test_lines = vec![$($lines),+];
- let (total_cells, longest_line_cells) =
- test_lines.iter().copied()
- .map(line_cells_count)
- .fold((0, 0), |state, cells| (state.0 + cells, cmp::max(state.1, cells)));
-
- test_hyperlink!(
- // Alacritty has issues with 2 columns, use 3 as the minimum for now.
- [3, longest_line_cells / 2, longest_line_cells + 1];
- total_cells;
- test_lines.iter().copied();
- $hyperlink_kind
- )
- } };
-
- ($($columns:literal),+; $($lines:expr),+; $hyperlink_kind:ident) => { {
- use crate::terminal_hyperlinks::tests::line_cells_count;
-
- let test_lines = vec![$($lines),+];
- let total_cells = test_lines.iter().copied().map(line_cells_count).sum();
-
- test_hyperlink!(
- [ $($columns),+ ]; total_cells; test_lines.iter().copied(); $hyperlink_kind
- )
- } };
-
- ([ $($columns:expr),+ ]; $total_cells:expr; $lines:expr; $hyperlink_kind:ident) => { {
- use crate::terminal_hyperlinks::tests::{ test_hyperlink, HyperlinkKind };
-
- let source_location = format!("{}:{}", std::file!(), std::line!());
- for columns in vec![ $($columns),+] {
- test_hyperlink(columns, $total_cells, $lines, HyperlinkKind::$hyperlink_kind,
- &source_location);
- }
- } };
- }
-
- mod path {
- /// 👉 := **hovered** on following char
- ///
- /// 👈 := **hovered** on wide char spacer of previous full width char
- ///
- /// **`‹›`** := expected **hyperlink** match
- ///
- /// **`«»`** := expected **path**, **row**, and **column** capture groups
- ///
- /// [**`c₀, c₁, …, cₙ;`**]ₒₚₜ := use specified terminal widths of `c₀, c₁, …, cₙ` **columns**
- /// (defaults to `3, longest_line_cells / 2, longest_line_cells + 1;`)
- ///
- macro_rules! test_path {
- ($($lines:literal),+) => { test_hyperlink!($($lines),+; Path) };
- ($($columns:literal),+; $($lines:literal),+) => {
- test_hyperlink!($($columns),+; $($lines),+; Path)
- };
- }
-
- #[test]
- fn simple() {
- // Rust paths
- // Just the path
- test_path!("‹«/👉test/cool.rs»›");
- test_path!("‹«/test/cool👉.rs»›");
-
- // path and line
- test_path!("‹«/👉test/cool.rs»:«4»›");
- test_path!("‹«/test/cool.rs»👉:«4»›");
- test_path!("‹«/test/cool.rs»:«👉4»›");
- test_path!("‹«/👉test/cool.rs»(«4»)›");
- test_path!("‹«/test/cool.rs»👉(«4»)›");
- test_path!("‹«/test/cool.rs»(«👉4»)›");
- test_path!("‹«/test/cool.rs»(«4»👉)›");
-
- // path, line, and column
- test_path!("‹«/👉test/cool.rs»:«4»:«2»›");
- test_path!("‹«/test/cool.rs»:«4»:«👉2»›");
- test_path!("‹«/👉test/cool.rs»(«4»,«2»)›");
- test_path!("‹«/test/cool.rs»(«4»👉,«2»)›");
-
- // path, line, column, and ':' suffix
- test_path!("‹«/👉test/cool.rs»:«4»:«2»›:");
- test_path!("‹«/test/cool.rs»:«4»:«👉2»›:");
- test_path!("‹«/👉test/cool.rs»(«4»,«2»)›:");
- test_path!("‹«/test/cool.rs»(«4»,«2»👉)›:");
-
- // path, line, column, and description
- test_path!("‹«/test/cool.rs»:«4»:«2»›👉:Error!");
- test_path!("‹«/test/cool.rs»:«4»:«2»›:👉Error!");
- test_path!("‹«/test/co👉ol.rs»(«4»,«2»)›:Error!");
-
- // Cargo output
- test_path!(" Compiling Cool 👉(‹«/test/Cool»›)");
- test_path!(" Compiling Cool (‹«/👉test/Cool»›)");
- test_path!(" Compiling Cool (‹«/test/Cool»›👉)");
-
- // Python
- test_path!("‹«awe👉some.py»›");
-
- test_path!(" ‹F👉ile \"«/awesome.py»\", line «42»›: Wat?");
- test_path!(" ‹File \"«/awe👉some.py»\", line «42»›: Wat?");
- test_path!(" ‹File \"«/awesome.py»👉\", line «42»›: Wat?");
- test_path!(" ‹File \"«/awesome.py»\", line «4👉2»›: Wat?");
- }
-
- #[test]
- fn colons_galore() {
- test_path!("‹«/test/co👉ol.rs»:«4»›");
- test_path!("‹«/test/co👉ol.rs»:«4»›:");
- test_path!("‹«/test/co👉ol.rs»:«4»:«2»›");
- test_path!("‹«/test/co👉ol.rs»:«4»:«2»›:");
- test_path!("‹«/test/co👉ol.rs»(«1»)›");
- test_path!("‹«/test/co👉ol.rs»(«1»)›:");
- test_path!("‹«/test/co👉ol.rs»(«1»,«618»)›");
- test_path!("‹«/test/co👉ol.rs»(«1»,«618»)›:");
- test_path!("‹«/test/co👉ol.rs»::«42»›");
- test_path!("‹«/test/co👉ol.rs»::«42»›:");
- test_path!("‹«/test/co👉ol.rs:4:2»(«1»,«618»)›");
- test_path!("‹«/test/co👉ol.rs»(«1»,«618»)›::");
- }
-
- #[test]
- fn word_wide_chars() {
- // Rust paths
- test_path!(4, 6, 12; "‹«/👉例/cool.rs»›");
- test_path!(4, 6, 12; "‹«/例👈/cool.rs»›");
- test_path!(4, 8, 16; "‹«/例/cool.rs»:«👉4»›");
- test_path!(4, 8, 16; "‹«/例/cool.rs»:«4»:«👉2»›");
-
- // Cargo output
- test_path!(4, 27, 30; " Compiling Cool (‹«/👉例/Cool»›)");
- test_path!(4, 27, 30; " Compiling Cool (‹«/例👈/Cool»›)");
-
- // Python
- test_path!(4, 11; "‹«👉例wesome.py»›");
- test_path!(4, 11; "‹«例👈wesome.py»›");
- test_path!(6, 17, 40; " ‹File \"«/👉例wesome.py»\", line «42»›: Wat?");
- test_path!(6, 17, 40; " ‹File \"«/例👈wesome.py»\", line «42»›: Wat?");
- }
-
- #[test]
- fn non_word_wide_chars() {
- // Mojo diagnostic message
- test_path!(4, 18, 38; " ‹File \"«/awe👉some.🔥»\", line «42»›: Wat?");
- test_path!(4, 18, 38; " ‹File \"«/awesome👉.🔥»\", line «42»›: Wat?");
- test_path!(4, 18, 38; " ‹File \"«/awesome.👉🔥»\", line «42»›: Wat?");
- test_path!(4, 18, 38; " ‹File \"«/awesome.🔥👈»\", line «42»›: Wat?");
- }
-
- /// These likely rise to the level of being worth fixing.
- mod issues {
- #[test]
- #[cfg_attr(not(target_os = "windows"), should_panic(expected = "Path = «例»"))]
- #[cfg_attr(target_os = "windows", should_panic(expected = r#"Path = «C:\\例»"#))]
- // <https://github.com/alacritty/alacritty/issues/8586>
- fn issue_alacritty_8586() {
- // Rust paths
- test_path!("‹«/👉例/cool.rs»›");
- test_path!("‹«/例👈/cool.rs»›");
- test_path!("‹«/例/cool.rs»:«👉4»›");
- test_path!("‹«/例/cool.rs»:«4»:«👉2»›");
-
- // Cargo output
- test_path!(" Compiling Cool (‹«/👉例/Cool»›)");
- test_path!(" Compiling Cool (‹«/例👈/Cool»›)");
-
- // Python
- test_path!("‹«👉例wesome.py»›");
- test_path!("‹«例👈wesome.py»›");
- test_path!(" ‹File \"«/👉例wesome.py»\", line «42»›: Wat?");
- test_path!(" ‹File \"«/例👈wesome.py»\", line «42»›: Wat?");
- }
-
- #[test]
- #[should_panic(expected = "No hyperlink found")]
- // <https://github.com/zed-industries/zed/issues/12338>
- fn issue_12338() {
- // Issue #12338
- test_path!(".rw-r--r-- 0 staff 05-27 14:03 ‹«test👉、2.txt»›");
- test_path!(".rw-r--r-- 0 staff 05-27 14:03 ‹«test、👈2.txt»›");
- test_path!(".rw-r--r-- 0 staff 05-27 14:03 ‹«test👉。3.txt»›");
- test_path!(".rw-r--r-- 0 staff 05-27 14:03 ‹«test。👈3.txt»›");
-
- // Rust paths
- test_path!("‹«/👉🏃/🦀.rs»›");
- test_path!("‹«/🏃👈/🦀.rs»›");
- test_path!("‹«/🏃/👉🦀.rs»:«4»›");
- test_path!("‹«/🏃/🦀👈.rs»:«4»:«2»›");
-
- // Cargo output
- test_path!(" Compiling Cool (‹«/👉🏃/Cool»›)");
- test_path!(" Compiling Cool (‹«/🏃👈/Cool»›)");
-
- // Python
- test_path!("‹«👉🏃wesome.py»›");
- test_path!("‹«🏃👈wesome.py»›");
- test_path!(" ‹File \"«/👉🏃wesome.py»\", line «42»›: Wat?");
- test_path!(" ‹File \"«/🏃👈wesome.py»\", line «42»›: Wat?");
-
- // Mojo
- test_path!("‹«/awe👉some.🔥»› is some good Mojo!");
- test_path!("‹«/awesome👉.🔥»› is some good Mojo!");
- test_path!("‹«/awesome.👉🔥»› is some good Mojo!");
- test_path!("‹«/awesome.🔥👈»› is some good Mojo!");
- test_path!(" ‹File \"«/👉🏃wesome.🔥»\", line «42»›: Wat?");
- test_path!(" ‹File \"«/🏃👈wesome.🔥»\", line «42»›: Wat?");
- }
-
- #[test]
- #[cfg_attr(
- not(target_os = "windows"),
- should_panic(
- expected = "Path = «test/controllers/template_items_controller_test.rb», line = 20, at grid cells (0, 0)..=(17, 1)"
- )
- )]
- #[cfg_attr(
- target_os = "windows",
- should_panic(
- expected = r#"Path = «test\\controllers\\template_items_controller_test.rb», line = 20, at grid cells (0, 0)..=(17, 1)"#
- )
- )]
- // <https://github.com/zed-industries/zed/issues/28194>
- //
- // #28194 was closed, but the link includes the description part (":in" here), which
- // seems wrong...
- fn issue_28194() {
- test_path!(
- "‹«test/c👉ontrollers/template_items_controller_test.rb»:«20»›:in 'block (2 levels) in <class:TemplateItemsControllerTest>'"
- );
- test_path!(
- "‹«test/controllers/template_items_controller_test.rb»:«19»›:i👉n 'block in <class:TemplateItemsControllerTest>'"
- );
- }
- }
-
- /// Minor issues arguably not important enough to fix/workaround...
- mod nits {
- #[test]
- #[cfg_attr(
- not(target_os = "windows"),
- should_panic(expected = "Path = «/test/cool.rs(4»")
- )]
- #[cfg_attr(
- target_os = "windows",
- should_panic(expected = r#"Path = «C:\\test\\cool.rs(4»"#)
- )]
- fn alacritty_bugs_with_two_columns() {
- test_path!(2; "‹«/👉test/cool.rs»(«4»)›");
- test_path!(2; "‹«/test/cool.rs»(«👉4»)›");
- test_path!(2; "‹«/test/cool.rs»(«4»,«👉2»)›");
-
- // Python
- test_path!(2; "‹«awe👉some.py»›");
- }
-
- #[test]
- #[cfg_attr(
- not(target_os = "windows"),
- should_panic(
- expected = "Path = «/test/cool.rs», line = 1, at grid cells (0, 0)..=(9, 0)"
- )
- )]
- #[cfg_attr(
- target_os = "windows",
- should_panic(
- expected = r#"Path = «C:\\test\\cool.rs», line = 1, at grid cells (0, 0)..=(9, 2)"#
- )
- )]
- fn invalid_row_column_should_be_part_of_path() {
- test_path!("‹«/👉test/cool.rs:1:618033988749»›");
- test_path!("‹«/👉test/cool.rs(1,618033988749)»›");
- }
-
- #[test]
- #[should_panic(expected = "Path = «»")]
- fn colon_suffix_succeeds_in_finding_an_empty_maybe_path() {
- test_path!("‹«/test/cool.rs»:«4»:«2»›👉:", "What is this?");
- test_path!("‹«/test/cool.rs»(«4»,«2»)›👉:", "What is this?");
- }
-
- #[test]
- #[cfg_attr(
- not(target_os = "windows"),
- should_panic(expected = "Path = «/test/cool.rs»")
- )]
- #[cfg_attr(
- target_os = "windows",
- should_panic(expected = r#"Path = «C:\\test\\cool.rs»"#)
- )]
- fn many_trailing_colons_should_be_parsed_as_part_of_the_path() {
- test_path!("‹«/test/cool.rs:::👉:»›");
- test_path!("‹«/te:st/👉co:ol.r:s:4:2::::::»›");
- }
- }
-
- #[cfg(target_os = "windows")]
- mod windows {
- // Lots of fun to be had with long file paths (verbatim) and UNC paths on Windows.
- // See <https://learn.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation>
- // See <https://users.rust-lang.org/t/understanding-windows-paths/58583>
- // See <https://github.com/rust-lang/cargo/issues/13919>
-
- #[test]
- fn unc() {
- test_path!(r#"‹«\\server\share\👉test\cool.rs»›"#);
- test_path!(r#"‹«\\server\share\test\cool👉.rs»›"#);
- }
-
- mod issues {
- #[test]
- #[should_panic(
- expected = r#"Path = «C:\\test\\cool.rs», at grid cells (0, 0)..=(6, 0)"#
- )]
- fn issue_verbatim() {
- test_path!(r#"‹«\\?\C:\👉test\cool.rs»›"#);
- test_path!(r#"‹«\\?\C:\test\cool👉.rs»›"#);
- }
-
- #[test]
- #[should_panic(
- expected = r#"Path = «\\\\server\\share\\test\\cool.rs», at grid cells (0, 0)..=(10, 2)"#
- )]
- fn issue_verbatim_unc() {
- test_path!(r#"‹«\\?\UNC\server\share\👉test\cool.rs»›"#);
- test_path!(r#"‹«\\?\UNC\server\share\test\cool👉.rs»›"#);
- }
- }
- }
- }
-
- mod file_iri {
- // File IRIs have a ton of use cases, most of which we currently do not support. A few of
- // those cases are documented here as tests which are expected to fail.
- // See https://en.wikipedia.org/wiki/File_URI_scheme
-
- /// [**`c₀, c₁, …, cₙ;`**]ₒₚₜ := use specified terminal widths of `c₀, c₁, …, cₙ` **columns**
- /// (defaults to `3, longest_line_cells / 2, longest_line_cells + 1;`)
- ///
- macro_rules! test_file_iri {
- ($file_iri:literal) => { { test_hyperlink!(concat!("‹«👉", $file_iri, "»›"); FileIri) } };
- ($($columns:literal),+; $file_iri:literal) => { {
- test_hyperlink!($($columns),+; concat!("‹«👉", $file_iri, "»›"); FileIri)
- } };
- }
-
- #[cfg(not(target_os = "windows"))]
- #[test]
- fn absolute_file_iri() {
- test_file_iri!("file:///test/cool/index.rs");
- test_file_iri!("file:///test/cool/");
- }
-
- mod issues {
- #[cfg(not(target_os = "windows"))]
- #[test]
- #[should_panic(expected = "Path = «/test/Ῥόδος/», at grid cells (0, 0)..=(15, 1)")]
- fn issue_file_iri_with_percent_encoded_characters() {
- // Non-space characters
- // file:///test/Ῥόδος/
- test_file_iri!("file:///test/%E1%BF%AC%CF%8C%CE%B4%CE%BF%CF%82/"); // URI
-
- // Spaces
- test_file_iri!("file:///te%20st/co%20ol/index.rs");
- test_file_iri!("file:///te%20st/co%20ol/");
- }
- }
-
- #[cfg(target_os = "windows")]
- mod windows {
- mod issues {
- // The test uses Url::to_file_path(), but it seems that the Url crate doesn't
- // support relative file IRIs.
- #[test]
- #[should_panic(
- expected = r#"Failed to interpret file IRI `file:/test/cool/index.rs` as a path"#
- )]
- fn issue_relative_file_iri() {
- test_file_iri!("file:/test/cool/index.rs");
- test_file_iri!("file:/test/cool/");
- }
-
- // See https://en.wikipedia.org/wiki/File_URI_scheme
- #[test]
- #[should_panic(
- expected = r#"Path = «C:\\test\\cool\\index.rs», at grid cells (0, 0)..=(9, 1)"#
- )]
- fn issue_absolute_file_iri() {
- test_file_iri!("file:///C:/test/cool/index.rs");
- test_file_iri!("file:///C:/test/cool/");
- }
-
- #[test]
- #[should_panic(
- expected = r#"Path = «C:\\test\\Ῥόδος\\», at grid cells (0, 0)..=(16, 1)"#
- )]
- fn issue_file_iri_with_percent_encoded_characters() {
- // Non-space characters
- // file:///test/Ῥόδος/
- test_file_iri!("file:///C:/test/%E1%BF%AC%CF%8C%CE%B4%CE%BF%CF%82/"); // URI
-
- // Spaces
- test_file_iri!("file:///C:/te%20st/co%20ol/index.rs");
- test_file_iri!("file:///C:/te%20st/co%20ol/");
- }
- }
- }
- }
-
- mod iri {
- /// [**`c₀, c₁, …, cₙ;`**]ₒₚₜ := use specified terminal widths of `c₀, c₁, …, cₙ` **columns**
- /// (defaults to `3, longest_line_cells / 2, longest_line_cells + 1;`)
- ///
- macro_rules! test_iri {
- ($iri:literal) => { { test_hyperlink!(concat!("‹«👉", $iri, "»›"); Iri) } };
- ($($columns:literal),+; $iri:literal) => { {
- test_hyperlink!($($columns),+; concat!("‹«👉", $iri, "»›"); Iri)
- } };
- }
-
- #[test]
- fn simple() {
- // In the order they appear in URL_REGEX, except 'file://' which is treated as a path
- test_iri!("ipfs://test/cool.ipfs");
- test_iri!("ipns://test/cool.ipns");
- test_iri!("magnet://test/cool.git");
- test_iri!("mailto:someone@somewhere.here");
- test_iri!("gemini://somewhere.here");
- test_iri!("gopher://somewhere.here");
- test_iri!("http://test/cool/index.html");
- test_iri!("http://10.10.10.10:1111/cool.html");
- test_iri!("http://test/cool/index.html?amazing=1");
- test_iri!("http://test/cool/index.html#right%20here");
- test_iri!("http://test/cool/index.html?amazing=1#right%20here");
- test_iri!("https://test/cool/index.html");
- test_iri!("https://10.10.10.10:1111/cool.html");
- test_iri!("https://test/cool/index.html?amazing=1");
- test_iri!("https://test/cool/index.html#right%20here");
- test_iri!("https://test/cool/index.html?amazing=1#right%20here");
- test_iri!("news://test/cool.news");
- test_iri!("git://test/cool.git");
- test_iri!("ssh://user@somewhere.over.here:12345/test/cool.git");
- test_iri!("ftp://test/cool.ftp");
- }
-
- #[test]
- fn wide_chars() {
- // In the order they appear in URL_REGEX, except 'file://' which is treated as a path
- test_iri!(4, 20; "ipfs://例🏃🦀/cool.ipfs");
- test_iri!(4, 20; "ipns://例🏃🦀/cool.ipns");
- test_iri!(6, 20; "magnet://例🏃🦀/cool.git");
- test_iri!(4, 20; "mailto:someone@somewhere.here");
- test_iri!(4, 20; "gemini://somewhere.here");
- test_iri!(4, 20; "gopher://somewhere.here");
- test_iri!(4, 20; "http://例🏃🦀/cool/index.html");
- test_iri!(4, 20; "http://10.10.10.10:1111/cool.html");
- test_iri!(4, 20; "http://例🏃🦀/cool/index.html?amazing=1");
- test_iri!(4, 20; "http://例🏃🦀/cool/index.html#right%20here");
- test_iri!(4, 20; "http://例🏃🦀/cool/index.html?amazing=1#right%20here");
- test_iri!(4, 20; "https://例🏃🦀/cool/index.html");
- test_iri!(4, 20; "https://10.10.10.10:1111/cool.html");
- test_iri!(4, 20; "https://例🏃🦀/cool/index.html?amazing=1");
- test_iri!(4, 20; "https://例🏃🦀/cool/index.html#right%20here");
- test_iri!(4, 20; "https://例🏃🦀/cool/index.html?amazing=1#right%20here");
- test_iri!(4, 20; "news://例🏃🦀/cool.news");
- test_iri!(5, 20; "git://例/cool.git");
- test_iri!(5, 20; "ssh://user@somewhere.over.here:12345/例🏃🦀/cool.git");
- test_iri!(7, 20; "ftp://例🏃🦀/cool.ftp");
- }
-
- // There are likely more tests needed for IRI vs URI
- #[test]
- fn iris() {
- // These refer to the same location, see example here:
- // <https://en.wikipedia.org/wiki/Internationalized_Resource_Identifier#Compatibility>
- test_iri!("https://en.wiktionary.org/wiki/Ῥόδος"); // IRI
- test_iri!("https://en.wiktionary.org/wiki/%E1%BF%AC%CF%8C%CE%B4%CE%BF%CF%82"); // URI
- }
-
- #[test]
- #[should_panic(expected = "Expected a path, but was a iri")]
- fn file_is_a_path() {
- test_iri!("file://test/cool/index.rs");
- }
- }
-
- #[derive(Debug, PartialEq)]
- enum HyperlinkKind {
- FileIri,
- Iri,
- Path,
- }
-
- struct ExpectedHyperlink {
- hovered_grid_point: AlacPoint,
- hovered_char: char,
- hyperlink_kind: HyperlinkKind,
- iri_or_path: String,
- row: Option<u32>,
- column: Option<u32>,
- hyperlink_match: RangeInclusive<AlacPoint>,
- }
-
- /// Converts to Windows style paths on Windows, like path!(), but at runtime for improved test
- /// readability.
- fn build_term_from_test_lines<'a>(
- hyperlink_kind: HyperlinkKind,
- term_size: TermSize,
- test_lines: impl Iterator<Item = &'a str>,
- ) -> (Term<VoidListener>, ExpectedHyperlink) {
- #[derive(Default, Eq, PartialEq)]
- enum HoveredState {
- #[default]
- HoveredScan,
- HoveredNextChar,
- Done,
- }
-
- #[derive(Default, Eq, PartialEq)]
- enum MatchState {
- #[default]
- MatchScan,
- MatchNextChar,
- Match(AlacPoint),
- Done,
- }
-
- #[derive(Default, Eq, PartialEq)]
- enum CapturesState {
- #[default]
- PathScan,
- PathNextChar,
- Path(AlacPoint),
- RowScan,
- Row(String),
- ColumnScan,
- Column(String),
- Done,
- }
-
- fn prev_input_point_from_term(term: &Term<VoidListener>) -> AlacPoint {
- let grid = term.grid();
- let cursor = &grid.cursor;
- let mut point = cursor.point;
-
- if !cursor.input_needs_wrap {
- point.column -= 1;
- }
-
- if grid.index(point).flags.contains(Flags::WIDE_CHAR_SPACER) {
- point.column -= 1;
- }
-
- point
- }
-
- let mut hovered_grid_point: Option<AlacPoint> = None;
- let mut hyperlink_match = AlacPoint::default()..=AlacPoint::default();
- let mut iri_or_path = String::default();
- let mut row = None;
- let mut column = None;
- let mut prev_input_point = AlacPoint::default();
- let mut hovered_state = HoveredState::default();
- let mut match_state = MatchState::default();
- let mut captures_state = CapturesState::default();
- let mut term = Term::new(Config::default(), &term_size, VoidListener);
-
- for text in test_lines {
- let chars: Box<dyn Iterator<Item = char>> =
- if cfg!(windows) && hyperlink_kind == HyperlinkKind::Path {
- Box::new(text.chars().map(|c| if c == '/' { '\\' } else { c })) as _
- } else {
- Box::new(text.chars()) as _
- };
- let mut chars = chars.peekable();
- while let Some(c) = chars.next() {
- match c {
- '👉' => {
- hovered_state = HoveredState::HoveredNextChar;
- }
- '👈' => {
- hovered_grid_point = Some(prev_input_point.add(&term, Boundary::Grid, 1));
- }
- '«' | '»' => {
- captures_state = match captures_state {
- CapturesState::PathScan => CapturesState::PathNextChar,
- CapturesState::PathNextChar => {
- panic!("Should have been handled by char input")
- }
- CapturesState::Path(start_point) => {
- iri_or_path = term.bounds_to_string(start_point, prev_input_point);
- CapturesState::RowScan
- }
- CapturesState::RowScan => CapturesState::Row(String::new()),
- CapturesState::Row(number) => {
- row = Some(number.parse::<u32>().unwrap());
- CapturesState::ColumnScan
- }
- CapturesState::ColumnScan => CapturesState::Column(String::new()),
- CapturesState::Column(number) => {
- column = Some(number.parse::<u32>().unwrap());
- CapturesState::Done
- }
- CapturesState::Done => {
- panic!("Extra '«', '»'")
- }
- }
- }
- '‹' | '›' => {
- match_state = match match_state {
- MatchState::MatchScan => MatchState::MatchNextChar,
- MatchState::MatchNextChar => {
- panic!("Should have been handled by char input")
- }
- MatchState::Match(start_point) => {
- hyperlink_match = start_point..=prev_input_point;
- MatchState::Done
- }
- MatchState::Done => {
- panic!("Extra '‹', '›'")
- }
- }
- }
- _ => {
- if let CapturesState::Row(number) | CapturesState::Column(number) =
- &mut captures_state
- {
- number.push(c)
- }
-
- let is_windows_abs_path_start = captures_state
- == CapturesState::PathNextChar
- && cfg!(windows)
- && hyperlink_kind == HyperlinkKind::Path
- && c == '\\'
- && chars.peek().is_some_and(|c| *c != '\\');
-
- if is_windows_abs_path_start {
- // Convert Unix abs path start into Windows abs path start so that the
- // same test can be used for both OSes.
- term.input('C');
- prev_input_point = prev_input_point_from_term(&term);
- term.input(':');
- term.input(c);
- } else {
- term.input(c);
- prev_input_point = prev_input_point_from_term(&term);
- }
-
- if hovered_state == HoveredState::HoveredNextChar {
- hovered_grid_point = Some(prev_input_point);
- hovered_state = HoveredState::Done;
- }
- if captures_state == CapturesState::PathNextChar {
- captures_state = CapturesState::Path(prev_input_point);
- }
- if match_state == MatchState::MatchNextChar {
- match_state = MatchState::Match(prev_input_point);
- }
- }
- }
- }
- term.move_down_and_cr(1);
- }
-
- if hyperlink_kind == HyperlinkKind::FileIri {
- let Ok(url) = Url::parse(&iri_or_path) else {
- panic!("Failed to parse file IRI `{iri_or_path}`");
- };
- let Ok(path) = url.to_file_path() else {
- panic!("Failed to interpret file IRI `{iri_or_path}` as a path");
- };
- iri_or_path = path.to_string_lossy().to_string();
- }
-
- if cfg!(windows) {
- // Handle verbatim and UNC paths for Windows
- if let Some(stripped) = iri_or_path.strip_prefix(r#"\\?\UNC\"#) {
- iri_or_path = format!(r#"\\{stripped}"#);
- } else if let Some(stripped) = iri_or_path.strip_prefix(r#"\\?\"#) {
- iri_or_path = stripped.to_string();
- }
- }
-
- let hovered_grid_point = hovered_grid_point.expect("Missing hovered point (👉 or 👈)");
- let hovered_char = term.grid().index(hovered_grid_point).c;
- (
- term,
- ExpectedHyperlink {
- hovered_grid_point,
- hovered_char,
- hyperlink_kind,
- iri_or_path,
- row,
- column,
- hyperlink_match,
- },
- )
- }
-
- fn line_cells_count(line: &str) -> usize {
- // This avoids taking a dependency on the unicode-width crate
- fn width(c: char) -> usize {
- match c {
- // Fullwidth unicode characters used in tests
- '例' | '🏃' | '🦀' | '🔥' => 2,
- _ => 1,
- }
- }
- const CONTROL_CHARS: &str = "‹«👉👈»›";
- line.chars()
- .filter(|c| !CONTROL_CHARS.contains(*c))
- .map(width)
- .sum::<usize>()
- }
-
- struct CheckHyperlinkMatch<'a> {
- term: &'a Term<VoidListener>,
- expected_hyperlink: &'a ExpectedHyperlink,
- source_location: &'a str,
- }
-
- impl<'a> CheckHyperlinkMatch<'a> {
- fn new(
- term: &'a Term<VoidListener>,
- expected_hyperlink: &'a ExpectedHyperlink,
- source_location: &'a str,
- ) -> Self {
- Self {
- term,
- expected_hyperlink,
- source_location,
- }
- }
-
- fn check_path_with_position_and_match(
- &self,
- path_with_position: PathWithPosition,
- hyperlink_match: &Match,
- ) {
- let format_path_with_position_and_match =
- |path_with_position: &PathWithPosition, hyperlink_match: &Match| {
- let mut result =
- format!("Path = «{}»", &path_with_position.path.to_string_lossy());
- if let Some(row) = path_with_position.row {
- result += &format!(", line = {row}");
- if let Some(column) = path_with_position.column {
- result += &format!(", column = {column}");
- }
- }
-
- result += &format!(
- ", at grid cells {}",
- Self::format_hyperlink_match(hyperlink_match)
- );
- result
- };
-
- assert_ne!(
- self.expected_hyperlink.hyperlink_kind,
- HyperlinkKind::Iri,
- "\n at {}\nExpected a path, but was a iri:\n{}",
- self.source_location,
- self.format_renderable_content()
- );
-
- assert_eq!(
- format_path_with_position_and_match(
- &PathWithPosition {
- path: PathBuf::from(self.expected_hyperlink.iri_or_path.clone()),
- row: self.expected_hyperlink.row,
- column: self.expected_hyperlink.column
- },
- &self.expected_hyperlink.hyperlink_match
- ),
- format_path_with_position_and_match(&path_with_position, hyperlink_match),
- "\n at {}:\n{}",
- self.source_location,
- self.format_renderable_content()
- );
- }
-
- fn check_iri_and_match(&self, iri: String, hyperlink_match: &Match) {
- let format_iri_and_match = |iri: &String, hyperlink_match: &Match| {
- format!(
- "Url = «{iri}», at grid cells {}",
- Self::format_hyperlink_match(hyperlink_match)
- )
- };
-
- assert_eq!(
- self.expected_hyperlink.hyperlink_kind,
- HyperlinkKind::Iri,
- "\n at {}\nExpected a iri, but was a path:\n{}",
- self.source_location,
- self.format_renderable_content()
- );
-
- assert_eq!(
- format_iri_and_match(
- &self.expected_hyperlink.iri_or_path,
- &self.expected_hyperlink.hyperlink_match
- ),
- format_iri_and_match(&iri, hyperlink_match),
- "\n at {}:\n{}",
- self.source_location,
- self.format_renderable_content()
- );
- }
-
- fn format_hyperlink_match(hyperlink_match: &Match) -> String {
- format!(
- "({}, {})..=({}, {})",
- hyperlink_match.start().line.0,
- hyperlink_match.start().column.0,
- hyperlink_match.end().line.0,
- hyperlink_match.end().column.0
- )
- }
-
- fn format_renderable_content(&self) -> String {
- let mut result = format!("\nHovered on '{}'\n", self.expected_hyperlink.hovered_char);
-
- let mut first_header_row = String::new();
- let mut second_header_row = String::new();
- let mut marker_header_row = String::new();
- for index in 0..self.term.columns() {
- let remainder = index % 10;
- first_header_row.push_str(
- &(index > 0 && remainder == 0)
- .then_some((index / 10).to_string())
- .unwrap_or(" ".into()),
- );
- second_header_row += &remainder.to_string();
- if index == self.expected_hyperlink.hovered_grid_point.column.0 {
- marker_header_row.push('↓');
- } else {
- marker_header_row.push(' ');
- }
- }
-
- result += &format!("\n [{}]\n", first_header_row);
- result += &format!(" [{}]\n", second_header_row);
- result += &format!(" {}", marker_header_row);
-
- let spacers: Flags = Flags::LEADING_WIDE_CHAR_SPACER | Flags::WIDE_CHAR_SPACER;
- for cell in self
- .term
- .renderable_content()
- .display_iter
- .filter(|cell| !cell.flags.intersects(spacers))
- {
- if cell.point.column.0 == 0 {
- let prefix =
- if cell.point.line == self.expected_hyperlink.hovered_grid_point.line {
- '→'
- } else {
- ' '
- };
- result += &format!("\n{prefix}[{:>3}] ", cell.point.line.to_string());
- }
-
- result.push(cell.c);
- }
-
- result
- }
- }
-
- fn test_hyperlink<'a>(
- columns: usize,
- total_cells: usize,
- test_lines: impl Iterator<Item = &'a str>,
- hyperlink_kind: HyperlinkKind,
- source_location: &str,
- ) {
- thread_local! {
- static TEST_REGEX_SEARCHES: RefCell<RegexSearches> = RefCell::new(RegexSearches::new());
- }
-
- let term_size = TermSize::new(columns, total_cells / columns + 2);
- let (term, expected_hyperlink) =
- build_term_from_test_lines(hyperlink_kind, term_size, test_lines);
- let hyperlink_found = TEST_REGEX_SEARCHES.with(|regex_searches| {
- find_from_grid_point(
- &term,
- expected_hyperlink.hovered_grid_point,
- &mut regex_searches.borrow_mut(),
- )
- });
- let check_hyperlink_match =
- CheckHyperlinkMatch::new(&term, &expected_hyperlink, source_location);
- match hyperlink_found {
- Some((hyperlink_word, false, hyperlink_match)) => {
- check_hyperlink_match.check_path_with_position_and_match(
- PathWithPosition::parse_str(&hyperlink_word),
- &hyperlink_match,
- );
- }
- Some((hyperlink_word, true, hyperlink_match)) => {
- check_hyperlink_match.check_iri_and_match(hyperlink_word, &hyperlink_match);
- }
- _ => {
- assert!(
- false,
- "No hyperlink found\n at {source_location}:\n{}",
- check_hyperlink_match.format_renderable_content()
- )
- }
- }
- }
-}
+use alacritty_terminal::{
+ Term,
+ event::EventListener,
+ grid::Dimensions,
+ index::{Boundary, Column, Direction as AlacDirection, Line, Point as AlacPoint},
+ term::search::{Match, RegexIter, RegexSearch},
+};
+use regex::Regex;
+use std::{ops::Index, sync::LazyLock};
+
+const URL_REGEX: &str = r#"(ipfs:|ipns:|magnet:|mailto:|gemini://|gopher://|https://|http://|news:|file://|git://|ssh:|ftp://)[^\u{0000}-\u{001F}\u{007F}-\u{009F}<>"\s{-}\^⟨⟩`']+"#;
+// Optional suffix matches MSBuild diagnostic suffixes for path parsing in PathLikeWithPosition
+// https://learn.microsoft.com/en-us/visualstudio/msbuild/msbuild-diagnostic-format-for-tasks
+const WORD_REGEX: &str =
+ r#"[\$\+\w.\[\]:/\\@\-~()]+(?:\((?:\d+|\d+,\d+)\))|[\$\+\w.\[\]:/\\@\-~()]+"#;
+
+const PYTHON_FILE_LINE_REGEX: &str = r#"File "(?P<file>[^"]+)", line (?P<line>\d+)"#;
+
+static PYTHON_FILE_LINE_MATCHER: LazyLock<Regex> =
+ LazyLock::new(|| Regex::new(PYTHON_FILE_LINE_REGEX).unwrap());
+
+fn python_extract_path_and_line(input: &str) -> Option<(&str, u32)> {
+ if let Some(captures) = PYTHON_FILE_LINE_MATCHER.captures(input) {
+ let path_part = captures.name("file")?.as_str();
+
+ let line_number: u32 = captures.name("line")?.as_str().parse().ok()?;
+ return Some((path_part, line_number));
+ }
+ None
+}
+
+pub(super) struct RegexSearches {
+ url_regex: RegexSearch,
+ word_regex: RegexSearch,
+ python_file_line_regex: RegexSearch,
+}
+
+impl RegexSearches {
+ pub(super) fn new() -> Self {
+ Self {
+ url_regex: RegexSearch::new(URL_REGEX).unwrap(),
+ word_regex: RegexSearch::new(WORD_REGEX).unwrap(),
+ python_file_line_regex: RegexSearch::new(PYTHON_FILE_LINE_REGEX).unwrap(),
+ }
+ }
+}
+
+pub(super) fn find_from_grid_point<T: EventListener>(
+ term: &Term<T>,
+ point: AlacPoint,
+ regex_searches: &mut RegexSearches,
+) -> Option<(String, bool, Match)> {
+ let grid = term.grid();
+ let link = grid.index(point).hyperlink();
+ let found_word = if let Some(ref url) = link {
+ let mut min_index = point;
+ loop {
+ let new_min_index = min_index.sub(term, Boundary::Cursor, 1);
+ if new_min_index == min_index || grid.index(new_min_index).hyperlink() != link {
+ break;
+ } else {
+ min_index = new_min_index
+ }
+ }
+
+ let mut max_index = point;
+ loop {
+ let new_max_index = max_index.add(term, Boundary::Cursor, 1);
+ if new_max_index == max_index || grid.index(new_max_index).hyperlink() != link {
+ break;
+ } else {
+ max_index = new_max_index
+ }
+ }
+
+ let url = url.uri().to_owned();
+ let url_match = min_index..=max_index;
+
+ Some((url, true, url_match))
+ } else if let Some(url_match) = regex_match_at(term, point, &mut regex_searches.url_regex) {
+ let url = term.bounds_to_string(*url_match.start(), *url_match.end());
+ Some((url, true, url_match))
+ } else if let Some(python_match) =
+ regex_match_at(term, point, &mut regex_searches.python_file_line_regex)
+ {
+ let matching_line = term.bounds_to_string(*python_match.start(), *python_match.end());
+ python_extract_path_and_line(&matching_line).map(|(file_path, line_number)| {
+ (format!("{file_path}:{line_number}"), false, python_match)
+ })
+ } else if let Some(word_match) = regex_match_at(term, point, &mut regex_searches.word_regex) {
+ let file_path = term.bounds_to_string(*word_match.start(), *word_match.end());
+
+ let (sanitized_match, sanitized_word) = 'sanitize: {
+ let mut word_match = word_match;
+ let mut file_path = file_path;
+
+ if is_path_surrounded_by_common_symbols(&file_path) {
+ word_match = Match::new(
+ word_match.start().add(term, Boundary::Grid, 1),
+ word_match.end().sub(term, Boundary::Grid, 1),
+ );
+ file_path = file_path[1..file_path.len() - 1].to_owned();
+ }
+
+ while file_path.ends_with(':') {
+ file_path.pop();
+ word_match = Match::new(
+ *word_match.start(),
+ word_match.end().sub(term, Boundary::Grid, 1),
+ );
+ }
+ let mut colon_count = 0;
+ for c in file_path.chars() {
+ if c == ':' {
+ colon_count += 1;
+ }
+ }
+ // strip trailing comment after colon in case of
+ // file/at/path.rs:row:column:description or error message
+ // so that the file path is `file/at/path.rs:row:column`
+ if colon_count > 2 {
+ let last_index = file_path.rfind(':').unwrap();
+ let prev_is_digit = last_index > 0
+ && file_path
+ .chars()
+ .nth(last_index - 1)
+ .is_some_and(|c| c.is_ascii_digit());
+ let next_is_digit = last_index < file_path.len() - 1
+ && file_path
+ .chars()
+ .nth(last_index + 1)
+ .is_none_or(|c| c.is_ascii_digit());
+ if prev_is_digit && !next_is_digit {
+ let stripped_len = file_path.len() - last_index;
+ word_match = Match::new(
+ *word_match.start(),
+ word_match.end().sub(term, Boundary::Grid, stripped_len),
+ );
+ file_path = file_path[0..last_index].to_owned();
+ }
+ }
+
+ break 'sanitize (word_match, file_path);
+ };
+
+ Some((sanitized_word, false, sanitized_match))
+ } else {
+ None
+ };
+
+ found_word.map(|(maybe_url_or_path, is_url, word_match)| {
+ if is_url {
+ // Treat "file://" IRIs like file paths to ensure
+ // that line numbers at the end of the path are
+ // handled correctly
+ if let Some(path) = maybe_url_or_path.strip_prefix("file://") {
+ (path.to_string(), false, word_match)
+ } else {
+ (maybe_url_or_path, true, word_match)
+ }
+ } else {
+ (maybe_url_or_path, false, word_match)
+ }
+ })
+}
+
+fn is_path_surrounded_by_common_symbols(path: &str) -> bool {
+ // Avoid detecting `[]` or `()` strings as paths, surrounded by common symbols
+ path.len() > 2
+ // The rest of the brackets and various quotes cannot be matched by the [`WORD_REGEX`] hence not checked for.
+ && (path.starts_with('[') && path.ends_with(']')
+ || path.starts_with('(') && path.ends_with(')'))
+}
+
+/// Based on alacritty/src/display/hint.rs > regex_match_at
+/// Retrieve the match, if the specified point is inside the content matching the regex.
+fn regex_match_at<T>(term: &Term<T>, point: AlacPoint, regex: &mut RegexSearch) -> Option<Match> {
+ visible_regex_match_iter(term, regex).find(|rm| rm.contains(&point))
+}
+
+/// Copied from alacritty/src/display/hint.rs:
+/// Iterate over all visible regex matches.
+fn visible_regex_match_iter<'a, T>(
+ term: &'a Term<T>,
+ regex: &'a mut RegexSearch,
+) -> impl Iterator<Item = Match> + 'a {
+ const MAX_SEARCH_LINES: usize = 100;
+
+ let viewport_start = Line(-(term.grid().display_offset() as i32));
+ let viewport_end = viewport_start + term.bottommost_line();
+ let mut start = term.line_search_left(AlacPoint::new(viewport_start, Column(0)));
+ let mut end = term.line_search_right(AlacPoint::new(viewport_end, Column(0)));
+ start.line = start.line.max(viewport_start - MAX_SEARCH_LINES);
+ end.line = end.line.min(viewport_end + MAX_SEARCH_LINES);
+
+ RegexIter::new(start, end, AlacDirection::Right, term, regex)
+ .skip_while(move |rm| rm.end().line < viewport_start)
+ .take_while(move |rm| rm.start().line <= viewport_end)
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use alacritty_terminal::{
+ event::VoidListener,
+ index::{Boundary, Point as AlacPoint},
+ term::{Config, cell::Flags, test::TermSize},
+ vte::ansi::Handler,
+ };
+ use std::{cell::RefCell, ops::RangeInclusive, path::PathBuf};
+ use url::Url;
+ use util::paths::PathWithPosition;
+
+ fn re_test(re: &str, hay: &str, expected: Vec<&str>) {
+ let results: Vec<_> = regex::Regex::new(re)
+ .unwrap()
+ .find_iter(hay)
+ .map(|m| m.as_str())
+ .collect();
+ assert_eq!(results, expected);
+ }
+
+ #[test]
+ fn test_url_regex() {
+ re_test(
+ URL_REGEX,
+ "test http://example.com test 'https://website1.com' test mailto:bob@example.com train",
+ vec![
+ "http://example.com",
+ "https://website1.com",
+ "mailto:bob@example.com",
+ ],
+ );
+ }
+
+ #[test]
+ fn test_word_regex() {
+ re_test(
+ WORD_REGEX,
+ "hello, world! \"What\" is this?",
+ vec!["hello", "world", "What", "is", "this"],
+ );
+ }
+
+ #[test]
+ fn test_word_regex_with_linenum() {
+ // filename(line) and filename(line,col) as used in MSBuild output
+ // should be considered a single "word", even though comma is
+ // usually a word separator
+ re_test(WORD_REGEX, "a Main.cs(20) b", vec!["a", "Main.cs(20)", "b"]);
+ re_test(
+ WORD_REGEX,
+ "Main.cs(20,5) Error desc",
+ vec!["Main.cs(20,5)", "Error", "desc"],
+ );
+ // filename:line:col is a popular format for unix tools
+ re_test(
+ WORD_REGEX,
+ "a Main.cs:20:5 b",
+ vec!["a", "Main.cs:20:5", "b"],
+ );
+ // Some tools output "filename:line:col:message", which currently isn't
+ // handled correctly, but might be in the future
+ re_test(
+ WORD_REGEX,
+ "Main.cs:20:5:Error desc",
+ vec!["Main.cs:20:5:Error", "desc"],
+ );
+ }
+
+ #[test]
+ fn test_python_file_line_regex() {
+ re_test(
+ PYTHON_FILE_LINE_REGEX,
+ "hay File \"/zed/bad_py.py\", line 8 stack",
+ vec!["File \"/zed/bad_py.py\", line 8"],
+ );
+ re_test(PYTHON_FILE_LINE_REGEX, "unrelated", vec![]);
+ }
+
+ #[test]
+ fn test_python_file_line() {
+ let inputs: Vec<(&str, Option<(&str, u32)>)> = vec![
+ (
+ "File \"/zed/bad_py.py\", line 8",
+ Some(("/zed/bad_py.py", 8u32)),
+ ),
+ ("File \"path/to/zed/bad_py.py\"", None),
+ ("unrelated", None),
+ ("", None),
+ ];
+ let actual = inputs
+ .iter()
+ .map(|input| python_extract_path_and_line(input.0))
+ .collect::<Vec<_>>();
+ let expected = inputs.iter().map(|(_, output)| *output).collect::<Vec<_>>();
+ assert_eq!(actual, expected);
+ }
+
+ // We use custom columns in many tests to workaround this issue by ensuring a wrapped
+ // line never ends on a wide char:
+ //
+ // <https://github.com/alacritty/alacritty/issues/8586>
+ //
+ // This issue was recently fixed, as soon as we update to a version containing the fix we
+ // can remove all the custom columns from these tests.
+ //
+ macro_rules! test_hyperlink {
+ ($($lines:expr),+; $hyperlink_kind:ident) => { {
+ use crate::terminal_hyperlinks::tests::line_cells_count;
+ use std::cmp;
+
+ let test_lines = vec![$($lines),+];
+ let (total_cells, longest_line_cells) =
+ test_lines.iter().copied()
+ .map(line_cells_count)
+ .fold((0, 0), |state, cells| (state.0 + cells, cmp::max(state.1, cells)));
+
+ test_hyperlink!(
+ // Alacritty has issues with 2 columns, use 3 as the minimum for now.
+ [3, longest_line_cells / 2, longest_line_cells + 1];
+ total_cells;
+ test_lines.iter().copied();
+ $hyperlink_kind
+ )
+ } };
+
+ ($($columns:literal),+; $($lines:expr),+; $hyperlink_kind:ident) => { {
+ use crate::terminal_hyperlinks::tests::line_cells_count;
+
+ let test_lines = vec![$($lines),+];
+ let total_cells = test_lines.iter().copied().map(line_cells_count).sum();
+
+ test_hyperlink!(
+ [ $($columns),+ ]; total_cells; test_lines.iter().copied(); $hyperlink_kind
+ )
+ } };
+
+ ([ $($columns:expr),+ ]; $total_cells:expr; $lines:expr; $hyperlink_kind:ident) => { {
+ use crate::terminal_hyperlinks::tests::{ test_hyperlink, HyperlinkKind };
+
+ let source_location = format!("{}:{}", std::file!(), std::line!());
+ for columns in vec![ $($columns),+] {
+ test_hyperlink(columns, $total_cells, $lines, HyperlinkKind::$hyperlink_kind,
+ &source_location);
+ }
+ } };
+ }
+
+ mod path {
+ /// 👉 := **hovered** on following char
+ ///
+ /// 👈 := **hovered** on wide char spacer of previous full width char
+ ///
+ /// **`‹›`** := expected **hyperlink** match
+ ///
+ /// **`«»`** := expected **path**, **row**, and **column** capture groups
+ ///
+ /// [**`c₀, c₁, …, cₙ;`**]ₒₚₜ := use specified terminal widths of `c₀, c₁, …, cₙ` **columns**
+ /// (defaults to `3, longest_line_cells / 2, longest_line_cells + 1;`)
+ ///
+ macro_rules! test_path {
+ ($($lines:literal),+) => { test_hyperlink!($($lines),+; Path) };
+ ($($columns:literal),+; $($lines:literal),+) => {
+ test_hyperlink!($($columns),+; $($lines),+; Path)
+ };
+ }
+
+ #[test]
+ fn simple() {
+ // Rust paths
+ // Just the path
+ test_path!("‹«/👉test/cool.rs»›");
+ test_path!("‹«/test/cool👉.rs»›");
+
+ // path and line
+ test_path!("‹«/👉test/cool.rs»:«4»›");
+ test_path!("‹«/test/cool.rs»👉:«4»›");
+ test_path!("‹«/test/cool.rs»:«👉4»›");
+ test_path!("‹«/👉test/cool.rs»(«4»)›");
+ test_path!("‹«/test/cool.rs»👉(«4»)›");
+ test_path!("‹«/test/cool.rs»(«👉4»)›");
+ test_path!("‹«/test/cool.rs»(«4»👉)›");
+
+ // path, line, and column
+ test_path!("‹«/👉test/cool.rs»:«4»:«2»›");
+ test_path!("‹«/test/cool.rs»:«4»:«👉2»›");
+ test_path!("‹«/👉test/cool.rs»(«4»,«2»)›");
+ test_path!("‹«/test/cool.rs»(«4»👉,«2»)›");
+
+ // path, line, column, and ':' suffix
+ test_path!("‹«/👉test/cool.rs»:«4»:«2»›:");
+ test_path!("‹«/test/cool.rs»:«4»:«👉2»›:");
+ test_path!("‹«/👉test/cool.rs»(«4»,«2»)›:");
+ test_path!("‹«/test/cool.rs»(«4»,«2»👉)›:");
+
+ // path, line, column, and description
+ test_path!("‹«/test/cool.rs»:«4»:«2»›👉:Error!");
+ test_path!("‹«/test/cool.rs»:«4»:«2»›:👉Error!");
+ test_path!("‹«/test/co👉ol.rs»(«4»,«2»)›:Error!");
+
+ // Cargo output
+ test_path!(" Compiling Cool 👉(‹«/test/Cool»›)");
+ test_path!(" Compiling Cool (‹«/👉test/Cool»›)");
+ test_path!(" Compiling Cool (‹«/test/Cool»›👉)");
+
+ // Python
+ test_path!("‹«awe👉some.py»›");
+
+ test_path!(" ‹F👉ile \"«/awesome.py»\", line «42»›: Wat?");
+ test_path!(" ‹File \"«/awe👉some.py»\", line «42»›: Wat?");
+ test_path!(" ‹File \"«/awesome.py»👉\", line «42»›: Wat?");
+ test_path!(" ‹File \"«/awesome.py»\", line «4👉2»›: Wat?");
+ }
+
+ #[test]
+ fn colons_galore() {
+ test_path!("‹«/test/co👉ol.rs»:«4»›");
+ test_path!("‹«/test/co👉ol.rs»:«4»›:");
+ test_path!("‹«/test/co👉ol.rs»:«4»:«2»›");
+ test_path!("‹«/test/co👉ol.rs»:«4»:«2»›:");
+ test_path!("‹«/test/co👉ol.rs»(«1»)›");
+ test_path!("‹«/test/co👉ol.rs»(«1»)›:");
+ test_path!("‹«/test/co👉ol.rs»(«1»,«618»)›");
+ test_path!("‹«/test/co👉ol.rs»(«1»,«618»)›:");
+ test_path!("‹«/test/co👉ol.rs»::«42»›");
+ test_path!("‹«/test/co👉ol.rs»::«42»›:");
+ test_path!("‹«/test/co👉ol.rs:4:2»(«1»,«618»)›");
+ test_path!("‹«/test/co👉ol.rs»(«1»,«618»)›::");
+ }
+
+ #[test]
+ fn word_wide_chars() {
+ // Rust paths
+ test_path!(4, 6, 12; "‹«/👉例/cool.rs»›");
+ test_path!(4, 6, 12; "‹«/例👈/cool.rs»›");
+ test_path!(4, 8, 16; "‹«/例/cool.rs»:«👉4»›");
+ test_path!(4, 8, 16; "‹«/例/cool.rs»:«4»:«👉2»›");
+
+ // Cargo output
+ test_path!(4, 27, 30; " Compiling Cool (‹«/👉例/Cool»›)");
+ test_path!(4, 27, 30; " Compiling Cool (‹«/例👈/Cool»›)");
+
+ // Python
+ test_path!(4, 11; "‹«👉例wesome.py»›");
+ test_path!(4, 11; "‹«例👈wesome.py»›");
+ test_path!(6, 17, 40; " ‹File \"«/👉例wesome.py»\", line «42»›: Wat?");
+ test_path!(6, 17, 40; " ‹File \"«/例👈wesome.py»\", line «42»›: Wat?");
+ }
+
+ #[test]
+ fn non_word_wide_chars() {
+ // Mojo diagnostic message
+ test_path!(4, 18, 38; " ‹File \"«/awe👉some.🔥»\", line «42»›: Wat?");
+ test_path!(4, 18, 38; " ‹File \"«/awesome👉.🔥»\", line «42»›: Wat?");
+ test_path!(4, 18, 38; " ‹File \"«/awesome.👉🔥»\", line «42»›: Wat?");
+ test_path!(4, 18, 38; " ‹File \"«/awesome.🔥👈»\", line «42»›: Wat?");
+ }
+
+ /// These likely rise to the level of being worth fixing.
+ mod issues {
+ #[test]
+ #[cfg_attr(not(target_os = "windows"), should_panic(expected = "Path = «例»"))]
+ #[cfg_attr(target_os = "windows", should_panic(expected = r#"Path = «C:\\例»"#))]
+ // <https://github.com/alacritty/alacritty/issues/8586>
+ fn issue_alacritty_8586() {
+ // Rust paths
+ test_path!("‹«/👉例/cool.rs»›");
+ test_path!("‹«/例👈/cool.rs»›");
+ test_path!("‹«/例/cool.rs»:«👉4»›");
+ test_path!("‹«/例/cool.rs»:«4»:«👉2»›");
+
+ // Cargo output
+ test_path!(" Compiling Cool (‹«/👉例/Cool»›)");
+ test_path!(" Compiling Cool (‹«/例👈/Cool»›)");
+
+ // Python
+ test_path!("‹«👉例wesome.py»›");
+ test_path!("‹«例👈wesome.py»›");
+ test_path!(" ‹File \"«/👉例wesome.py»\", line «42»›: Wat?");
+ test_path!(" ‹File \"«/例👈wesome.py»\", line «42»›: Wat?");
+ }
+
+ #[test]
+ #[should_panic(expected = "No hyperlink found")]
+ // <https://github.com/zed-industries/zed/issues/12338>
+ fn issue_12338() {
+ // Issue #12338
+ test_path!(".rw-r--r-- 0 staff 05-27 14:03 ‹«test👉、2.txt»›");
+ test_path!(".rw-r--r-- 0 staff 05-27 14:03 ‹«test、👈2.txt»›");
+ test_path!(".rw-r--r-- 0 staff 05-27 14:03 ‹«test👉。3.txt»›");
+ test_path!(".rw-r--r-- 0 staff 05-27 14:03 ‹«test。👈3.txt»›");
+
+ // Rust paths
+ test_path!("‹«/👉🏃/🦀.rs»›");
+ test_path!("‹«/🏃👈/🦀.rs»›");
+ test_path!("‹«/🏃/👉🦀.rs»:«4»›");
+ test_path!("‹«/🏃/🦀👈.rs»:«4»:«2»›");
+
+ // Cargo output
+ test_path!(" Compiling Cool (‹«/👉🏃/Cool»›)");
+ test_path!(" Compiling Cool (‹«/🏃👈/Cool»›)");
+
+ // Python
+ test_path!("‹«👉🏃wesome.py»›");
+ test_path!("‹«🏃👈wesome.py»›");
+ test_path!(" ‹File \"«/👉🏃wesome.py»\", line «42»›: Wat?");
+ test_path!(" ‹File \"«/🏃👈wesome.py»\", line «42»›: Wat?");
+
+ // Mojo
+ test_path!("‹«/awe👉some.🔥»› is some good Mojo!");
+ test_path!("‹«/awesome👉.🔥»› is some good Mojo!");
+ test_path!("‹«/awesome.👉🔥»› is some good Mojo!");
+ test_path!("‹«/awesome.🔥👈»› is some good Mojo!");
+ test_path!(" ‹File \"«/👉🏃wesome.🔥»\", line «42»›: Wat?");
+ test_path!(" ‹File \"«/🏃👈wesome.🔥»\", line «42»›: Wat?");
+ }
+
+ #[test]
+ #[cfg_attr(
+ not(target_os = "windows"),
+ should_panic(
+ expected = "Path = «test/controllers/template_items_controller_test.rb», line = 20, at grid cells (0, 0)..=(17, 1)"
+ )
+ )]
+ #[cfg_attr(
+ target_os = "windows",
+ should_panic(
+ expected = r#"Path = «test\\controllers\\template_items_controller_test.rb», line = 20, at grid cells (0, 0)..=(17, 1)"#
+ )
+ )]
+ // <https://github.com/zed-industries/zed/issues/28194>
+ //
+ // #28194 was closed, but the link includes the description part (":in" here), which
+ // seems wrong...
+ fn issue_28194() {
+ test_path!(
+ "‹«test/c👉ontrollers/template_items_controller_test.rb»:«20»›:in 'block (2 levels) in <class:TemplateItemsControllerTest>'"
+ );
+ test_path!(
+ "‹«test/controllers/template_items_controller_test.rb»:«19»›:i👉n 'block in <class:TemplateItemsControllerTest>'"
+ );
+ }
+ }
+
+ /// Minor issues arguably not important enough to fix/workaround...
+ mod nits {
+ #[test]
+ #[cfg_attr(
+ not(target_os = "windows"),
+ should_panic(expected = "Path = «/test/cool.rs(4»")
+ )]
+ #[cfg_attr(
+ target_os = "windows",
+ should_panic(expected = r#"Path = «C:\\test\\cool.rs(4»"#)
+ )]
+ fn alacritty_bugs_with_two_columns() {
+ test_path!(2; "‹«/👉test/cool.rs»(«4»)›");
+ test_path!(2; "‹«/test/cool.rs»(«👉4»)›");
+ test_path!(2; "‹«/test/cool.rs»(«4»,«👉2»)›");
+
+ // Python
+ test_path!(2; "‹«awe👉some.py»›");
+ }
+
+ #[test]
+ #[cfg_attr(
+ not(target_os = "windows"),
+ should_panic(
+ expected = "Path = «/test/cool.rs», line = 1, at grid cells (0, 0)..=(9, 0)"
+ )
+ )]
+ #[cfg_attr(
+ target_os = "windows",
+ should_panic(
+ expected = r#"Path = «C:\\test\\cool.rs», line = 1, at grid cells (0, 0)..=(9, 2)"#
+ )
+ )]
+ fn invalid_row_column_should_be_part_of_path() {
+ test_path!("‹«/👉test/cool.rs:1:618033988749»›");
+ test_path!("‹«/👉test/cool.rs(1,618033988749)»›");
+ }
+
+ #[test]
+ #[should_panic(expected = "Path = «»")]
+ fn colon_suffix_succeeds_in_finding_an_empty_maybe_path() {
+ test_path!("‹«/test/cool.rs»:«4»:«2»›👉:", "What is this?");
+ test_path!("‹«/test/cool.rs»(«4»,«2»)›👉:", "What is this?");
+ }
+
+ #[test]
+ #[cfg_attr(
+ not(target_os = "windows"),
+ should_panic(expected = "Path = «/test/cool.rs»")
+ )]
+ #[cfg_attr(
+ target_os = "windows",
+ should_panic(expected = r#"Path = «C:\\test\\cool.rs»"#)
+ )]
+ fn many_trailing_colons_should_be_parsed_as_part_of_the_path() {
+ test_path!("‹«/test/cool.rs:::👉:»›");
+ test_path!("‹«/te:st/👉co:ol.r:s:4:2::::::»›");
+ }
+ }
+
+ #[cfg(target_os = "windows")]
+ mod windows {
+ // Lots of fun to be had with long file paths (verbatim) and UNC paths on Windows.
+ // See <https://learn.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation>
+ // See <https://users.rust-lang.org/t/understanding-windows-paths/58583>
+ // See <https://github.com/rust-lang/cargo/issues/13919>
+
+ #[test]
+ fn unc() {
+ test_path!(r#"‹«\\server\share\👉test\cool.rs»›"#);
+ test_path!(r#"‹«\\server\share\test\cool👉.rs»›"#);
+ }
+
+ mod issues {
+ #[test]
+ #[should_panic(
+ expected = r#"Path = «C:\\test\\cool.rs», at grid cells (0, 0)..=(6, 0)"#
+ )]
+ fn issue_verbatim() {
+ test_path!(r#"‹«\\?\C:\👉test\cool.rs»›"#);
+ test_path!(r#"‹«\\?\C:\test\cool👉.rs»›"#);
+ }
+
+ #[test]
+ #[should_panic(
+ expected = r#"Path = «\\\\server\\share\\test\\cool.rs», at grid cells (0, 0)..=(10, 2)"#
+ )]
+ fn issue_verbatim_unc() {
+ test_path!(r#"‹«\\?\UNC\server\share\👉test\cool.rs»›"#);
+ test_path!(r#"‹«\\?\UNC\server\share\test\cool👉.rs»›"#);
+ }
+ }
+ }
+ }
+
+ mod file_iri {
+ // File IRIs have a ton of use cases, most of which we currently do not support. A few of
+ // those cases are documented here as tests which are expected to fail.
+ // See https://en.wikipedia.org/wiki/File_URI_scheme
+
+ /// [**`c₀, c₁, …, cₙ;`**]ₒₚₜ := use specified terminal widths of `c₀, c₁, …, cₙ` **columns**
+ /// (defaults to `3, longest_line_cells / 2, longest_line_cells + 1;`)
+ ///
+ macro_rules! test_file_iri {
+ ($file_iri:literal) => { { test_hyperlink!(concat!("‹«👉", $file_iri, "»›"); FileIri) } };
+ ($($columns:literal),+; $file_iri:literal) => { {
+ test_hyperlink!($($columns),+; concat!("‹«👉", $file_iri, "»›"); FileIri)
+ } };
+ }
+
+ #[cfg(not(target_os = "windows"))]
+ #[test]
+ fn absolute_file_iri() {
+ test_file_iri!("file:///test/cool/index.rs");
+ test_file_iri!("file:///test/cool/");
+ }
+
+ mod issues {
+ #[cfg(not(target_os = "windows"))]
+ #[test]
+ #[should_panic(expected = "Path = «/test/Ῥόδος/», at grid cells (0, 0)..=(15, 1)")]
+ fn issue_file_iri_with_percent_encoded_characters() {
+ // Non-space characters
+ // file:///test/Ῥόδος/
+ test_file_iri!("file:///test/%E1%BF%AC%CF%8C%CE%B4%CE%BF%CF%82/"); // URI
+
+ // Spaces
+ test_file_iri!("file:///te%20st/co%20ol/index.rs");
+ test_file_iri!("file:///te%20st/co%20ol/");
+ }
+ }
+
+ #[cfg(target_os = "windows")]
+ mod windows {
+ mod issues {
+ // The test uses Url::to_file_path(), but it seems that the Url crate doesn't
+ // support relative file IRIs.
+ #[test]
+ #[should_panic(
+ expected = r#"Failed to interpret file IRI `file:/test/cool/index.rs` as a path"#
+ )]
+ fn issue_relative_file_iri() {
+ test_file_iri!("file:/test/cool/index.rs");
+ test_file_iri!("file:/test/cool/");
+ }
+
+ // See https://en.wikipedia.org/wiki/File_URI_scheme
+ #[test]
+ #[should_panic(
+ expected = r#"Path = «C:\\test\\cool\\index.rs», at grid cells (0, 0)..=(9, 1)"#
+ )]
+ fn issue_absolute_file_iri() {
+ test_file_iri!("file:///C:/test/cool/index.rs");
+ test_file_iri!("file:///C:/test/cool/");
+ }
+
+ #[test]
+ #[should_panic(
+ expected = r#"Path = «C:\\test\\Ῥόδος\\», at grid cells (0, 0)..=(16, 1)"#
+ )]
+ fn issue_file_iri_with_percent_encoded_characters() {
+ // Non-space characters
+ // file:///test/Ῥόδος/
+ test_file_iri!("file:///C:/test/%E1%BF%AC%CF%8C%CE%B4%CE%BF%CF%82/"); // URI
+
+ // Spaces
+ test_file_iri!("file:///C:/te%20st/co%20ol/index.rs");
+ test_file_iri!("file:///C:/te%20st/co%20ol/");
+ }
+ }
+ }
+ }
+
+ mod iri {
+ /// [**`c₀, c₁, …, cₙ;`**]ₒₚₜ := use specified terminal widths of `c₀, c₁, …, cₙ` **columns**
+ /// (defaults to `3, longest_line_cells / 2, longest_line_cells + 1;`)
+ ///
+ macro_rules! test_iri {
+ ($iri:literal) => { { test_hyperlink!(concat!("‹«👉", $iri, "»›"); Iri) } };
+ ($($columns:literal),+; $iri:literal) => { {
+ test_hyperlink!($($columns),+; concat!("‹«👉", $iri, "»›"); Iri)
+ } };
+ }
+
+ #[test]
+ fn simple() {
+ // In the order they appear in URL_REGEX, except 'file://' which is treated as a path
+ test_iri!("ipfs://test/cool.ipfs");
+ test_iri!("ipns://test/cool.ipns");
+ test_iri!("magnet://test/cool.git");
+ test_iri!("mailto:someone@somewhere.here");
+ test_iri!("gemini://somewhere.here");
+ test_iri!("gopher://somewhere.here");
+ test_iri!("http://test/cool/index.html");
+ test_iri!("http://10.10.10.10:1111/cool.html");
+ test_iri!("http://test/cool/index.html?amazing=1");
+ test_iri!("http://test/cool/index.html#right%20here");
+ test_iri!("http://test/cool/index.html?amazing=1#right%20here");
+ test_iri!("https://test/cool/index.html");
+ test_iri!("https://10.10.10.10:1111/cool.html");
+ test_iri!("https://test/cool/index.html?amazing=1");
+ test_iri!("https://test/cool/index.html#right%20here");
+ test_iri!("https://test/cool/index.html?amazing=1#right%20here");
+ test_iri!("news://test/cool.news");
+ test_iri!("git://test/cool.git");
+ test_iri!("ssh://user@somewhere.over.here:12345/test/cool.git");
+ test_iri!("ftp://test/cool.ftp");
+ }
+
+ #[test]
+ fn wide_chars() {
+ // In the order they appear in URL_REGEX, except 'file://' which is treated as a path
+ test_iri!(4, 20; "ipfs://例🏃🦀/cool.ipfs");
+ test_iri!(4, 20; "ipns://例🏃🦀/cool.ipns");
+ test_iri!(6, 20; "magnet://例🏃🦀/cool.git");
+ test_iri!(4, 20; "mailto:someone@somewhere.here");
+ test_iri!(4, 20; "gemini://somewhere.here");
+ test_iri!(4, 20; "gopher://somewhere.here");
+ test_iri!(4, 20; "http://例🏃🦀/cool/index.html");
+ test_iri!(4, 20; "http://10.10.10.10:1111/cool.html");
+ test_iri!(4, 20; "http://例🏃🦀/cool/index.html?amazing=1");
+ test_iri!(4, 20; "http://例🏃🦀/cool/index.html#right%20here");
+ test_iri!(4, 20; "http://例🏃🦀/cool/index.html?amazing=1#right%20here");
+ test_iri!(4, 20; "https://例🏃🦀/cool/index.html");
+ test_iri!(4, 20; "https://10.10.10.10:1111/cool.html");
+ test_iri!(4, 20; "https://例🏃🦀/cool/index.html?amazing=1");
+ test_iri!(4, 20; "https://例🏃🦀/cool/index.html#right%20here");
+ test_iri!(4, 20; "https://例🏃🦀/cool/index.html?amazing=1#right%20here");
+ test_iri!(4, 20; "news://例🏃🦀/cool.news");
+ test_iri!(5, 20; "git://例/cool.git");
+ test_iri!(5, 20; "ssh://user@somewhere.over.here:12345/例🏃🦀/cool.git");
+ test_iri!(7, 20; "ftp://例🏃🦀/cool.ftp");
+ }
+
+ // There are likely more tests needed for IRI vs URI
+ #[test]
+ fn iris() {
+ // These refer to the same location, see example here:
+ // <https://en.wikipedia.org/wiki/Internationalized_Resource_Identifier#Compatibility>
+ test_iri!("https://en.wiktionary.org/wiki/Ῥόδος"); // IRI
+ test_iri!("https://en.wiktionary.org/wiki/%E1%BF%AC%CF%8C%CE%B4%CE%BF%CF%82"); // URI
+ }
+
+ #[test]
+ #[should_panic(expected = "Expected a path, but was a iri")]
+ fn file_is_a_path() {
+ test_iri!("file://test/cool/index.rs");
+ }
+ }
+
+ #[derive(Debug, PartialEq)]
+ enum HyperlinkKind {
+ FileIri,
+ Iri,
+ Path,
+ }
+
+ struct ExpectedHyperlink {
+ hovered_grid_point: AlacPoint,
+ hovered_char: char,
+ hyperlink_kind: HyperlinkKind,
+ iri_or_path: String,
+ row: Option<u32>,
+ column: Option<u32>,
+ hyperlink_match: RangeInclusive<AlacPoint>,
+ }
+
+ /// Converts to Windows style paths on Windows, like path!(), but at runtime for improved test
+ /// readability.
+ fn build_term_from_test_lines<'a>(
+ hyperlink_kind: HyperlinkKind,
+ term_size: TermSize,
+ test_lines: impl Iterator<Item = &'a str>,
+ ) -> (Term<VoidListener>, ExpectedHyperlink) {
+ #[derive(Default, Eq, PartialEq)]
+ enum HoveredState {
+ #[default]
+ HoveredScan,
+ HoveredNextChar,
+ Done,
+ }
+
+ #[derive(Default, Eq, PartialEq)]
+ enum MatchState {
+ #[default]
+ MatchScan,
+ MatchNextChar,
+ Match(AlacPoint),
+ Done,
+ }
+
+ #[derive(Default, Eq, PartialEq)]
+ enum CapturesState {
+ #[default]
+ PathScan,
+ PathNextChar,
+ Path(AlacPoint),
+ RowScan,
+ Row(String),
+ ColumnScan,
+ Column(String),
+ Done,
+ }
+
+ fn prev_input_point_from_term(term: &Term<VoidListener>) -> AlacPoint {
+ let grid = term.grid();
+ let cursor = &grid.cursor;
+ let mut point = cursor.point;
+
+ if !cursor.input_needs_wrap {
+ point.column -= 1;
+ }
+
+ if grid.index(point).flags.contains(Flags::WIDE_CHAR_SPACER) {
+ point.column -= 1;
+ }
+
+ point
+ }
+
+ let mut hovered_grid_point: Option<AlacPoint> = None;
+ let mut hyperlink_match = AlacPoint::default()..=AlacPoint::default();
+ let mut iri_or_path = String::default();
+ let mut row = None;
+ let mut column = None;
+ let mut prev_input_point = AlacPoint::default();
+ let mut hovered_state = HoveredState::default();
+ let mut match_state = MatchState::default();
+ let mut captures_state = CapturesState::default();
+ let mut term = Term::new(Config::default(), &term_size, VoidListener);
+
+ for text in test_lines {
+ let chars: Box<dyn Iterator<Item = char>> =
+ if cfg!(windows) && hyperlink_kind == HyperlinkKind::Path {
+ Box::new(text.chars().map(|c| if c == '/' { '\\' } else { c })) as _
+ } else {
+ Box::new(text.chars()) as _
+ };
+ let mut chars = chars.peekable();
+ while let Some(c) = chars.next() {
+ match c {
+ '👉' => {
+ hovered_state = HoveredState::HoveredNextChar;
+ }
+ '👈' => {
+ hovered_grid_point = Some(prev_input_point.add(&term, Boundary::Grid, 1));
+ }
+ '«' | '»' => {
+ captures_state = match captures_state {
+ CapturesState::PathScan => CapturesState::PathNextChar,
+ CapturesState::PathNextChar => {
+ panic!("Should have been handled by char input")
+ }
+ CapturesState::Path(start_point) => {
+ iri_or_path = term.bounds_to_string(start_point, prev_input_point);
+ CapturesState::RowScan
+ }
+ CapturesState::RowScan => CapturesState::Row(String::new()),
+ CapturesState::Row(number) => {
+ row = Some(number.parse::<u32>().unwrap());
+ CapturesState::ColumnScan
+ }
+ CapturesState::ColumnScan => CapturesState::Column(String::new()),
+ CapturesState::Column(number) => {
+ column = Some(number.parse::<u32>().unwrap());
+ CapturesState::Done
+ }
+ CapturesState::Done => {
+ panic!("Extra '«', '»'")
+ }
+ }
+ }
+ '‹' | '›' => {
+ match_state = match match_state {
+ MatchState::MatchScan => MatchState::MatchNextChar,
+ MatchState::MatchNextChar => {
+ panic!("Should have been handled by char input")
+ }
+ MatchState::Match(start_point) => {
+ hyperlink_match = start_point..=prev_input_point;
+ MatchState::Done
+ }
+ MatchState::Done => {
+ panic!("Extra '‹', '›'")
+ }
+ }
+ }
+ _ => {
+ if let CapturesState::Row(number) | CapturesState::Column(number) =
+ &mut captures_state
+ {
+ number.push(c)
+ }
+
+ let is_windows_abs_path_start = captures_state
+ == CapturesState::PathNextChar
+ && cfg!(windows)
+ && hyperlink_kind == HyperlinkKind::Path
+ && c == '\\'
+ && chars.peek().is_some_and(|c| *c != '\\');
+
+ if is_windows_abs_path_start {
+ // Convert Unix abs path start into Windows abs path start so that the
+ // same test can be used for both OSes.
+ term.input('C');
+ prev_input_point = prev_input_point_from_term(&term);
+ term.input(':');
+ term.input(c);
+ } else {
+ term.input(c);
+ prev_input_point = prev_input_point_from_term(&term);
+ }
+
+ if hovered_state == HoveredState::HoveredNextChar {
+ hovered_grid_point = Some(prev_input_point);
+ hovered_state = HoveredState::Done;
+ }
+ if captures_state == CapturesState::PathNextChar {
+ captures_state = CapturesState::Path(prev_input_point);
+ }
+ if match_state == MatchState::MatchNextChar {
+ match_state = MatchState::Match(prev_input_point);
+ }
+ }
+ }
+ }
+ term.move_down_and_cr(1);
+ }
+
+ if hyperlink_kind == HyperlinkKind::FileIri {
+ let Ok(url) = Url::parse(&iri_or_path) else {
+ panic!("Failed to parse file IRI `{iri_or_path}`");
+ };
+ let Ok(path) = url.to_file_path() else {
+ panic!("Failed to interpret file IRI `{iri_or_path}` as a path");
+ };
+ iri_or_path = path.to_string_lossy().to_string();
+ }
+
+ if cfg!(windows) {
+ // Handle verbatim and UNC paths for Windows
+ if let Some(stripped) = iri_or_path.strip_prefix(r#"\\?\UNC\"#) {
+ iri_or_path = format!(r#"\\{stripped}"#);
+ } else if let Some(stripped) = iri_or_path.strip_prefix(r#"\\?\"#) {
+ iri_or_path = stripped.to_string();
+ }
+ }
+
+ let hovered_grid_point = hovered_grid_point.expect("Missing hovered point (👉 or 👈)");
+ let hovered_char = term.grid().index(hovered_grid_point).c;
+ (
+ term,
+ ExpectedHyperlink {
+ hovered_grid_point,
+ hovered_char,
+ hyperlink_kind,
+ iri_or_path,
+ row,
+ column,
+ hyperlink_match,
+ },
+ )
+ }
+
+ fn line_cells_count(line: &str) -> usize {
+ // This avoids taking a dependency on the unicode-width crate
+ fn width(c: char) -> usize {
+ match c {
+ // Fullwidth unicode characters used in tests
+ '例' | '🏃' | '🦀' | '🔥' => 2,
+ _ => 1,
+ }
+ }
+ const CONTROL_CHARS: &str = "‹«👉👈»›";
+ line.chars()
+ .filter(|c| !CONTROL_CHARS.contains(*c))
+ .map(width)
+ .sum::<usize>()
+ }
+
+ struct CheckHyperlinkMatch<'a> {
+ term: &'a Term<VoidListener>,
+ expected_hyperlink: &'a ExpectedHyperlink,
+ source_location: &'a str,
+ }
+
+ impl<'a> CheckHyperlinkMatch<'a> {
+ fn new(
+ term: &'a Term<VoidListener>,
+ expected_hyperlink: &'a ExpectedHyperlink,
+ source_location: &'a str,
+ ) -> Self {
+ Self {
+ term,
+ expected_hyperlink,
+ source_location,
+ }
+ }
+
+ fn check_path_with_position_and_match(
+ &self,
+ path_with_position: PathWithPosition,
+ hyperlink_match: &Match,
+ ) {
+ let format_path_with_position_and_match =
+ |path_with_position: &PathWithPosition, hyperlink_match: &Match| {
+ let mut result =
+ format!("Path = «{}»", &path_with_position.path.to_string_lossy());
+ if let Some(row) = path_with_position.row {
+ result += &format!(", line = {row}");
+ if let Some(column) = path_with_position.column {
+ result += &format!(", column = {column}");
+ }
+ }
+
+ result += &format!(
+ ", at grid cells {}",
+ Self::format_hyperlink_match(hyperlink_match)
+ );
+ result
+ };
+
+ assert_ne!(
+ self.expected_hyperlink.hyperlink_kind,
+ HyperlinkKind::Iri,
+ "\n at {}\nExpected a path, but was a iri:\n{}",
+ self.source_location,
+ self.format_renderable_content()
+ );
+
+ assert_eq!(
+ format_path_with_position_and_match(
+ &PathWithPosition {
+ path: PathBuf::from(self.expected_hyperlink.iri_or_path.clone()),
+ row: self.expected_hyperlink.row,
+ column: self.expected_hyperlink.column
+ },
+ &self.expected_hyperlink.hyperlink_match
+ ),
+ format_path_with_position_and_match(&path_with_position, hyperlink_match),
+ "\n at {}:\n{}",
+ self.source_location,
+ self.format_renderable_content()
+ );
+ }
+
+ fn check_iri_and_match(&self, iri: String, hyperlink_match: &Match) {
+ let format_iri_and_match = |iri: &String, hyperlink_match: &Match| {
+ format!(
+ "Url = «{iri}», at grid cells {}",
+ Self::format_hyperlink_match(hyperlink_match)
+ )
+ };
+
+ assert_eq!(
+ self.expected_hyperlink.hyperlink_kind,
+ HyperlinkKind::Iri,
+ "\n at {}\nExpected a iri, but was a path:\n{}",
+ self.source_location,
+ self.format_renderable_content()
+ );
+
+ assert_eq!(
+ format_iri_and_match(
+ &self.expected_hyperlink.iri_or_path,
+ &self.expected_hyperlink.hyperlink_match
+ ),
+ format_iri_and_match(&iri, hyperlink_match),
+ "\n at {}:\n{}",
+ self.source_location,
+ self.format_renderable_content()
+ );
+ }
+
+ fn format_hyperlink_match(hyperlink_match: &Match) -> String {
+ format!(
+ "({}, {})..=({}, {})",
+ hyperlink_match.start().line.0,
+ hyperlink_match.start().column.0,
+ hyperlink_match.end().line.0,
+ hyperlink_match.end().column.0
+ )
+ }
+
+ fn format_renderable_content(&self) -> String {
+ let mut result = format!("\nHovered on '{}'\n", self.expected_hyperlink.hovered_char);
+
+ let mut first_header_row = String::new();
+ let mut second_header_row = String::new();
+ let mut marker_header_row = String::new();
+ for index in 0..self.term.columns() {
+ let remainder = index % 10;
+ first_header_row.push_str(
+ &(index > 0 && remainder == 0)
+ .then_some((index / 10).to_string())
+ .unwrap_or(" ".into()),
+ );
+ second_header_row += &remainder.to_string();
+ if index == self.expected_hyperlink.hovered_grid_point.column.0 {
+ marker_header_row.push('↓');
+ } else {
+ marker_header_row.push(' ');
+ }
+ }
+
+ result += &format!("\n [{}]\n", first_header_row);
+ result += &format!(" [{}]\n", second_header_row);
+ result += &format!(" {}", marker_header_row);
+
+ let spacers: Flags = Flags::LEADING_WIDE_CHAR_SPACER | Flags::WIDE_CHAR_SPACER;
+ for cell in self
+ .term
+ .renderable_content()
+ .display_iter
+ .filter(|cell| !cell.flags.intersects(spacers))
+ {
+ if cell.point.column.0 == 0 {
+ let prefix =
+ if cell.point.line == self.expected_hyperlink.hovered_grid_point.line {
+ '→'
+ } else {
+ ' '
+ };
+ result += &format!("\n{prefix}[{:>3}] ", cell.point.line.to_string());
+ }
+
+ result.push(cell.c);
+ }
+
+ result
+ }
+ }
+
+ fn test_hyperlink<'a>(
+ columns: usize,
+ total_cells: usize,
+ test_lines: impl Iterator<Item = &'a str>,
+ hyperlink_kind: HyperlinkKind,
+ source_location: &str,
+ ) {
+ thread_local! {
+ static TEST_REGEX_SEARCHES: RefCell<RegexSearches> = RefCell::new(RegexSearches::new());
+ }
+
+ let term_size = TermSize::new(columns, total_cells / columns + 2);
+ let (term, expected_hyperlink) =
+ build_term_from_test_lines(hyperlink_kind, term_size, test_lines);
+ let hyperlink_found = TEST_REGEX_SEARCHES.with(|regex_searches| {
+ find_from_grid_point(
+ &term,
+ expected_hyperlink.hovered_grid_point,
+ &mut regex_searches.borrow_mut(),
+ )
+ });
+ let check_hyperlink_match =
+ CheckHyperlinkMatch::new(&term, &expected_hyperlink, source_location);
+ match hyperlink_found {
+ Some((hyperlink_word, false, hyperlink_match)) => {
+ check_hyperlink_match.check_path_with_position_and_match(
+ PathWithPosition::parse_str(&hyperlink_word),
+ &hyperlink_match,
+ );
+ }
+ Some((hyperlink_word, true, hyperlink_match)) => {
+ check_hyperlink_match.check_iri_and_match(hyperlink_word, &hyperlink_match);
+ }
+ _ => {
+ assert!(
+ false,
+ "No hyperlink found\n at {source_location}:\n{}",
+ check_hyperlink_match.format_renderable_content()
+ )
+ }
+ }
+ }
+}
@@ -6,7 +6,7 @@ use gpui::{AbsoluteLength, App, FontFallbacks, FontFeatures, FontWeight, Pixels,
use schemars::JsonSchema;
use serde_derive::{Deserialize, Serialize};
-use settings::SettingsSources;
+use settings::{SettingsKey, SettingsSources, SettingsUi};
use std::path::PathBuf;
use task::Shell;
use theme::FontFamilyName;
@@ -135,7 +135,8 @@ pub enum ActivateScript {
Pyenv,
}
-#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
+#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)]
+#[settings_key(key = "terminal")]
pub struct TerminalSettingsContent {
/// What shell to use when opening a terminal.
///
@@ -253,8 +254,6 @@ pub struct TerminalSettingsContent {
}
impl settings::Settings for TerminalSettings {
- const KEY: Option<&'static str> = Some("terminal");
-
type FileContent = TerminalSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> anyhow::Result<Self> {
@@ -3,13 +3,17 @@ use async_recursion::async_recursion;
use collections::HashSet;
use futures::{StreamExt as _, stream::FuturesUnordered};
use gpui::{AppContext as _, AsyncWindowContext, Axis, Entity, Task, WeakEntity};
-use project::{Project, terminals::TerminalKind};
+use project::Project;
use serde::{Deserialize, Serialize};
-use std::path::{Path, PathBuf};
+use std::path::PathBuf;
use ui::{App, Context, Pixels, Window};
use util::ResultExt as _;
-use db::{define_connection, query, sqlez::statement::Statement, sqlez_macros::sql};
+use db::{
+ query,
+ sqlez::{domain::Domain, statement::Statement, thread_safe_connection::ThreadSafeConnection},
+ sqlez_macros::sql,
+};
use workspace::{
ItemHandle, ItemId, Member, Pane, PaneAxis, PaneGroup, SerializableItem as _, Workspace,
WorkspaceDb, WorkspaceId,
@@ -242,11 +246,9 @@ async fn deserialize_pane_group(
.update(cx, |workspace, cx| default_working_directory(workspace, cx))
.ok()
.flatten();
- let kind = TerminalKind::Shell(
- working_directory.as_deref().map(Path::to_path_buf),
- );
- let terminal =
- project.update(cx, |project, cx| project.create_terminal(kind, cx));
+ let terminal = project.update(cx, |project, cx| {
+ project.create_terminal_shell(working_directory, cx)
+ });
Some(Some(terminal))
} else {
Some(None)
@@ -375,9 +377,13 @@ impl<'de> Deserialize<'de> for SerializedAxis {
}
}
-define_connection! {
- pub static ref TERMINAL_DB: TerminalDb<WorkspaceDb> =
- &[sql!(
+pub struct TerminalDb(ThreadSafeConnection);
+
+impl Domain for TerminalDb {
+ const NAME: &str = stringify!(TerminalDb);
+
+ const MIGRATIONS: &[&str] = &[
+ sql!(
CREATE TABLE terminals (
workspace_id INTEGER,
item_id INTEGER UNIQUE,
@@ -414,6 +420,8 @@ define_connection! {
];
}
+db::static_connection!(TERMINAL_DB, TerminalDb, [WorkspaceDb]);
+
impl TerminalDb {
query! {
pub async fn update_workspace_id(
@@ -1,5 +1,4 @@
-use crate::color_contrast;
-use editor::{CursorLayout, HighlightedRange, HighlightedRangeLine};
+use editor::{CursorLayout, EditorSettings, HighlightedRange, HighlightedRangeLine};
use gpui::{
AbsoluteLength, AnyElement, App, AvailableSpace, Bounds, ContentMask, Context, DispatchPhase,
Element, ElementId, Entity, FocusHandle, Font, FontFeatures, FontStyle, FontWeight,
@@ -27,6 +26,7 @@ use terminal::{
terminal_settings::TerminalSettings,
};
use theme::{ActiveTheme, Theme, ThemeSettings};
+use ui::utils::ensure_minimum_contrast;
use ui::{ParentElement, Tooltip};
use util::ResultExt;
use workspace::Workspace;
@@ -534,7 +534,7 @@ impl TerminalElement {
// Only apply contrast adjustment to non-decorative characters
if !Self::is_decorative_character(indexed.c) {
- fg = color_contrast::ensure_minimum_contrast(fg, bg, minimum_contrast);
+ fg = ensure_minimum_contrast(fg, bg, minimum_contrast);
}
// Ghostty uses (175/255) as the multiplier (~0.69), Alacritty uses 0.66, Kitty
@@ -653,7 +653,7 @@ impl TerminalElement {
let terminal = self.terminal.clone();
let hitbox = hitbox.clone();
let focus = focus.clone();
- let terminal_view = terminal_view.clone();
+ let terminal_view = terminal_view;
move |e: &MouseMoveEvent, phase, window, cx| {
if phase != DispatchPhase::Bubble {
return;
@@ -1192,8 +1192,8 @@ impl Element for TerminalElement {
bounds.origin + Point::new(layout.gutter, px(0.)) - Point::new(px(0.), scroll_top);
let marked_text_cloned: Option<String> = {
- let ime_state = self.terminal_view.read(cx);
- ime_state.marked_text.clone()
+ let ime_state = &self.terminal_view.read(cx).ime_state;
+ ime_state.as_ref().map(|state| state.marked_text.clone())
};
let terminal_input_handler = TerminalInputHandler {
@@ -1257,12 +1257,17 @@ impl Element for TerminalElement {
if let Some((start_y, highlighted_range_lines)) =
to_highlighted_range_lines(relative_highlighted_range, layout, origin)
{
+ let corner_radius = if EditorSettings::get_global(cx).rounded_selection {
+ 0.15 * layout.dimensions.line_height
+ } else {
+ Pixels::ZERO
+ };
let hr = HighlightedRange {
start_y,
line_height: layout.dimensions.line_height,
lines: highlighted_range_lines,
color: *color,
- corner_radius: 0.15 * layout.dimensions.line_height,
+ corner_radius: corner_radius,
};
hr.paint(true, bounds, window);
}
@@ -1403,7 +1408,7 @@ impl InputHandler for TerminalInputHandler {
window.invalidate_character_coordinates();
let project = this.project().read(cx);
let telemetry = project.client().telemetry().clone();
- telemetry.log_edit_event("terminal", project.is_via_ssh());
+ telemetry.log_edit_event("terminal", project.is_via_remote_server());
})
.ok();
}
@@ -1416,11 +1421,9 @@ impl InputHandler for TerminalInputHandler {
_window: &mut Window,
cx: &mut App,
) {
- if let Some(range) = new_marked_range {
- self.terminal_view.update(cx, |view, view_cx| {
- view.set_marked_text(new_text.to_string(), range, view_cx);
- });
- }
+ self.terminal_view.update(cx, |view, view_cx| {
+ view.set_marked_text(new_text.to_string(), new_marked_range, view_cx);
+ });
}
fn unmark_text(&mut self, _window: &mut Window, cx: &mut App) {
@@ -1598,6 +1601,7 @@ pub fn convert_color(fg: &terminal::alacritty_terminal::vte::ansi::Color, theme:
mod tests {
use super::*;
use gpui::{AbsoluteLength, Hsla, font};
+ use ui::utils::apca_contrast;
#[test]
fn test_is_decorative_character() {
@@ -1713,7 +1717,7 @@ mod tests {
};
// Should have poor contrast
- let actual_contrast = color_contrast::apca_contrast(white_fg, light_gray_bg).abs();
+ let actual_contrast = apca_contrast(white_fg, light_gray_bg).abs();
assert!(
actual_contrast < 30.0,
"White on light gray should have poor APCA contrast: {}",
@@ -1721,12 +1725,12 @@ mod tests {
);
// After adjustment with minimum APCA contrast of 45, should be darker
- let adjusted = color_contrast::ensure_minimum_contrast(white_fg, light_gray_bg, 45.0);
+ let adjusted = ensure_minimum_contrast(white_fg, light_gray_bg, 45.0);
assert!(
adjusted.l < white_fg.l,
"Adjusted color should be darker than original"
);
- let adjusted_contrast = color_contrast::apca_contrast(adjusted, light_gray_bg).abs();
+ let adjusted_contrast = apca_contrast(adjusted, light_gray_bg).abs();
assert!(adjusted_contrast >= 45.0, "Should meet minimum contrast");
// Test case 2: Dark colors (poor contrast)
@@ -1744,7 +1748,7 @@ mod tests {
};
// Should have poor contrast
- let actual_contrast = color_contrast::apca_contrast(black_fg, dark_gray_bg).abs();
+ let actual_contrast = apca_contrast(black_fg, dark_gray_bg).abs();
assert!(
actual_contrast < 30.0,
"Black on dark gray should have poor APCA contrast: {}",
@@ -1752,16 +1756,16 @@ mod tests {
);
// After adjustment with minimum APCA contrast of 45, should be lighter
- let adjusted = color_contrast::ensure_minimum_contrast(black_fg, dark_gray_bg, 45.0);
+ let adjusted = ensure_minimum_contrast(black_fg, dark_gray_bg, 45.0);
assert!(
adjusted.l > black_fg.l,
"Adjusted color should be lighter than original"
);
- let adjusted_contrast = color_contrast::apca_contrast(adjusted, dark_gray_bg).abs();
+ let adjusted_contrast = apca_contrast(adjusted, dark_gray_bg).abs();
assert!(adjusted_contrast >= 45.0, "Should meet minimum contrast");
// Test case 3: Already good contrast
- let good_contrast = color_contrast::ensure_minimum_contrast(black_fg, white_fg, 45.0);
+ let good_contrast = ensure_minimum_contrast(black_fg, white_fg, 45.0);
assert_eq!(
good_contrast, black_fg,
"Good contrast should not be adjusted"
@@ -1788,11 +1792,11 @@ mod tests {
};
// With minimum contrast of 0.0, no adjustment should happen
- let no_adjust = color_contrast::ensure_minimum_contrast(white_fg, white_bg, 0.0);
+ let no_adjust = ensure_minimum_contrast(white_fg, white_bg, 0.0);
assert_eq!(no_adjust, white_fg, "No adjustment with min_contrast 0.0");
// With minimum APCA contrast of 15, it should adjust to a darker color
- let adjusted = color_contrast::ensure_minimum_contrast(white_fg, white_bg, 15.0);
+ let adjusted = ensure_minimum_contrast(white_fg, white_bg, 15.0);
assert!(
adjusted.l < white_fg.l,
"White on white should become darker, got l={}",
@@ -1800,7 +1804,7 @@ mod tests {
);
// Verify the contrast is now acceptable
- let new_contrast = color_contrast::apca_contrast(adjusted, white_bg).abs();
+ let new_contrast = apca_contrast(adjusted, white_bg).abs();
assert!(
new_contrast >= 15.0,
"Adjusted APCA contrast {} should be >= 15.0",
@@ -1838,8 +1842,7 @@ mod tests {
};
let font_size = AbsoluteLength::Pixels(px(12.0));
- let batch =
- BatchedTextRun::new_from_char(AlacPoint::new(0, 0), 'a', style1.clone(), font_size);
+ let batch = BatchedTextRun::new_from_char(AlacPoint::new(0, 0), 'a', style1, font_size);
// Should be able to append same style
assert!(batch.can_append(&style2));
@@ -16,7 +16,7 @@ use gpui::{
Task, WeakEntity, Window, actions,
};
use itertools::Itertools;
-use project::{Fs, Project, ProjectEntryId, terminals::TerminalKind};
+use project::{Fs, Project, ProjectEntryId};
use search::{BufferSearchBar, buffer_search::DivRegistrar};
use settings::Settings;
use task::{RevealStrategy, RevealTarget, ShellBuilder, SpawnInTerminal, TaskId};
@@ -41,7 +41,7 @@ use workspace::{
ui::IconName,
};
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Result, anyhow};
use zed_actions::assistant::InlineAssist;
const TERMINAL_PANEL_KEY: &str = "TerminalPanel";
@@ -49,6 +49,8 @@ const TERMINAL_PANEL_KEY: &str = "TerminalPanel";
actions!(
terminal_panel,
[
+ /// Toggles the terminal panel.
+ Toggle,
/// Toggles focus on the terminal panel.
ToggleFocus
]
@@ -64,6 +66,13 @@ pub fn init(cx: &mut App) {
workspace.toggle_panel_focus::<TerminalPanel>(window, cx);
}
});
+ workspace.register_action(|workspace, _: &Toggle, window, cx| {
+ if is_enabled_in_workspace(workspace, cx) {
+ if !workspace.toggle_panel_focus::<TerminalPanel>(window, cx) {
+ workspace.close_panel::<TerminalPanel>(window, cx);
+ }
+ }
+ });
},
)
.detach();
@@ -181,7 +190,6 @@ impl TerminalPanel {
.anchor(Corner::TopRight)
.with_handle(pane.split_item_context_menu_handle.clone())
.menu({
- let split_context = split_context.clone();
move |window, cx| {
ContextMenu::build(window, cx, |menu, _, _| {
menu.when_some(
@@ -236,7 +244,7 @@ impl TerminalPanel {
) -> Result<Entity<Self>> {
let mut terminal_panel = None;
- match workspace
+ if let Some((database_id, serialization_key)) = workspace
.read_with(&cx, |workspace, _| {
workspace
.database_id()
@@ -244,34 +252,29 @@ impl TerminalPanel {
})
.ok()
.flatten()
+ && let Some(serialized_panel) = cx
+ .background_spawn(async move { KEY_VALUE_STORE.read_kvp(&serialization_key) })
+ .await
+ .log_err()
+ .flatten()
+ .map(|panel| serde_json::from_str::<SerializedTerminalPanel>(&panel))
+ .transpose()
+ .log_err()
+ .flatten()
+ && let Ok(serialized) = workspace
+ .update_in(&mut cx, |workspace, window, cx| {
+ deserialize_terminal_panel(
+ workspace.weak_handle(),
+ workspace.project().clone(),
+ database_id,
+ serialized_panel,
+ window,
+ cx,
+ )
+ })?
+ .await
{
- Some((database_id, serialization_key)) => {
- if let Some(serialized_panel) = cx
- .background_spawn(async move { KEY_VALUE_STORE.read_kvp(&serialization_key) })
- .await
- .log_err()
- .flatten()
- .map(|panel| serde_json::from_str::<SerializedTerminalPanel>(&panel))
- .transpose()
- .log_err()
- .flatten()
- && let Ok(serialized) = workspace
- .update_in(&mut cx, |workspace, window, cx| {
- deserialize_terminal_panel(
- workspace.weak_handle(),
- workspace.project().clone(),
- database_id,
- serialized_panel,
- window,
- cx,
- )
- })?
- .await
- {
- terminal_panel = Some(serialized);
- }
- }
- _ => {}
+ terminal_panel = Some(serialized);
}
let terminal_panel = if let Some(panel) = terminal_panel {
@@ -382,14 +385,19 @@ impl TerminalPanel {
}
self.serialize(cx);
}
- pane::Event::Split(direction) => {
- let Some(new_pane) = self.new_pane_with_cloned_active_terminal(window, cx) else {
- return;
- };
+ &pane::Event::Split(direction) => {
+ let fut = self.new_pane_with_cloned_active_terminal(window, cx);
let pane = pane.clone();
- let direction = *direction;
- self.center.split(&pane, &new_pane, direction).log_err();
- window.focus(&new_pane.focus_handle(cx));
+ cx.spawn_in(window, async move |panel, cx| {
+ let Some(new_pane) = fut.await else {
+ return;
+ };
+ _ = panel.update_in(cx, |panel, window, cx| {
+ panel.center.split(&pane, &new_pane, direction).log_err();
+ window.focus(&new_pane.focus_handle(cx));
+ });
+ })
+ .detach();
}
pane::Event::Focus => {
self.active_pane = pane.clone();
@@ -406,57 +414,62 @@ impl TerminalPanel {
&mut self,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Option<Entity<Pane>> {
- let workspace = self.workspace.upgrade()?;
+ ) -> Task<Option<Entity<Pane>>> {
+ let Some(workspace) = self.workspace.upgrade() else {
+ return Task::ready(None);
+ };
let workspace = workspace.read(cx);
let database_id = workspace.database_id();
let weak_workspace = self.workspace.clone();
let project = workspace.project().clone();
- let (working_directory, python_venv_directory) = self
- .active_pane
+ let active_pane = &self.active_pane;
+ let terminal_view = active_pane
.read(cx)
.active_item()
- .and_then(|item| item.downcast::<TerminalView>())
- .map(|terminal_view| {
- let terminal = terminal_view.read(cx).terminal().read(cx);
- (
- terminal
- .working_directory()
- .or_else(|| default_working_directory(workspace, cx)),
- terminal.python_venv_directory.clone(),
- )
- })
- .unwrap_or((None, None));
- let kind = TerminalKind::Shell(working_directory);
- let terminal = project
- .update(cx, |project, cx| {
- project.create_terminal_with_venv(kind, python_venv_directory, cx)
- })
- .ok()?;
-
- let terminal_view = Box::new(cx.new(|cx| {
- TerminalView::new(
- terminal.clone(),
- weak_workspace.clone(),
- database_id,
- project.downgrade(),
- window,
- cx,
- )
- }));
- let pane = new_terminal_pane(
- weak_workspace,
- project,
- self.active_pane.read(cx).is_zoomed(),
- window,
- cx,
- );
- self.apply_tab_bar_buttons(&pane, cx);
- pane.update(cx, |pane, cx| {
- pane.add_item(terminal_view, true, true, None, window, cx);
+ .and_then(|item| item.downcast::<TerminalView>());
+ let working_directory = terminal_view.as_ref().and_then(|terminal_view| {
+ let terminal = terminal_view.read(cx).terminal().read(cx);
+ terminal
+ .working_directory()
+ .or_else(|| default_working_directory(workspace, cx))
});
+ let is_zoomed = active_pane.read(cx).is_zoomed();
+ cx.spawn_in(window, async move |panel, cx| {
+ let terminal = project
+ .update(cx, |project, cx| match terminal_view {
+ Some(view) => Task::ready(project.clone_terminal(
+ &view.read(cx).terminal.clone(),
+ cx,
+ || working_directory,
+ )),
+ None => project.create_terminal_shell(working_directory, cx),
+ })
+ .ok()?
+ .await
+ .ok()?;
- Some(pane)
+ panel
+ .update_in(cx, move |terminal_panel, window, cx| {
+ let terminal_view = Box::new(cx.new(|cx| {
+ TerminalView::new(
+ terminal.clone(),
+ weak_workspace.clone(),
+ database_id,
+ project.downgrade(),
+ window,
+ cx,
+ )
+ }));
+ let pane = new_terminal_pane(weak_workspace, project, is_zoomed, window, cx);
+ terminal_panel.apply_tab_bar_buttons(&pane, cx);
+ pane.update(cx, |pane, cx| {
+ pane.add_item(terminal_view, true, true, None, window, cx);
+ });
+ Some(pane)
+ })
+ .ok()
+ .flatten()
+ })
}
pub fn open_terminal(
@@ -471,8 +484,8 @@ impl TerminalPanel {
terminal_panel
.update(cx, |panel, cx| {
- panel.add_terminal(
- TerminalKind::Shell(Some(action.working_directory.clone())),
+ panel.add_terminal_shell(
+ Some(action.working_directory.clone()),
RevealStrategy::Always,
window,
cx,
@@ -481,20 +494,34 @@ impl TerminalPanel {
.detach_and_log_err(cx);
}
- fn spawn_task(
+ pub fn spawn_task(
&mut self,
task: &SpawnInTerminal,
window: &mut Window,
cx: &mut Context<Self>,
) -> Task<Result<WeakEntity<Terminal>>> {
- let Ok(is_local) = self
+ let remote_client = self
.workspace
- .update(cx, |workspace, cx| workspace.project().read(cx).is_local())
- else {
- return Task::ready(Err(anyhow!("Project is not local")));
+ .update(cx, |workspace, cx| {
+ let project = workspace.project().read(cx);
+ if project.is_via_collab() {
+ Err(anyhow!("cannot spawn tasks as a guest"))
+ } else {
+ Ok(project.remote_client())
+ }
+ })
+ .flatten();
+
+ let remote_client = match remote_client {
+ Ok(remote_client) => remote_client,
+ Err(e) => return Task::ready(Err(e)),
};
- let builder = ShellBuilder::new(is_local, &task.shell);
+ let remote_shell = remote_client
+ .as_ref()
+ .and_then(|remote_client| remote_client.read(cx).shell());
+
+ let builder = ShellBuilder::new(remote_shell.as_deref(), &task.shell);
let command_label = builder.command_label(&task.command_label);
let (command, args) = builder.build(task.command.clone(), &task.args);
@@ -563,15 +590,16 @@ impl TerminalPanel {
) -> Task<Result<WeakEntity<Terminal>>> {
let reveal = spawn_task.reveal;
let reveal_target = spawn_task.reveal_target;
- let kind = TerminalKind::Task(spawn_task);
match reveal_target {
RevealTarget::Center => self
.workspace
.update(cx, |workspace, cx| {
- Self::add_center_terminal(workspace, kind, window, cx)
+ Self::add_center_terminal(workspace, window, cx, |project, cx| {
+ project.create_terminal_task(spawn_task, cx)
+ })
})
.unwrap_or_else(|e| Task::ready(Err(e))),
- RevealTarget::Dock => self.add_terminal(kind, reveal, window, cx),
+ RevealTarget::Dock => self.add_terminal_task(spawn_task, reveal, window, cx),
}
}
@@ -586,11 +614,14 @@ impl TerminalPanel {
return;
};
- let kind = TerminalKind::Shell(default_working_directory(workspace, cx));
-
terminal_panel
.update(cx, |this, cx| {
- this.add_terminal(kind, RevealStrategy::Always, window, cx)
+ this.add_terminal_shell(
+ default_working_directory(workspace, cx),
+ RevealStrategy::Always,
+ window,
+ cx,
+ )
})
.detach_and_log_err(cx);
}
@@ -629,7 +660,7 @@ impl TerminalPanel {
workspace
.read(cx)
.panes()
- .into_iter()
+ .iter()
.cloned()
.flat_map(pane_terminal_views),
)
@@ -652,9 +683,13 @@ impl TerminalPanel {
pub fn add_center_terminal(
workspace: &mut Workspace,
- kind: TerminalKind,
window: &mut Window,
cx: &mut Context<Workspace>,
+ create_terminal: impl FnOnce(
+ &mut Project,
+ &mut Context<Project>,
+ ) -> Task<Result<Entity<Terminal>>>
+ + 'static,
) -> Task<Result<WeakEntity<Terminal>>> {
if !is_enabled_in_workspace(workspace, cx) {
return Task::ready(Err(anyhow!(
@@ -663,9 +698,7 @@ impl TerminalPanel {
}
let project = workspace.project().downgrade();
cx.spawn_in(window, async move |workspace, cx| {
- let terminal = project
- .update(cx, |project, cx| project.create_terminal(kind, cx))?
- .await?;
+ let terminal = project.update(cx, create_terminal)?.await?;
workspace.update_in(cx, |workspace, window, cx| {
let terminal_view = cx.new(|cx| {
@@ -684,9 +717,9 @@ impl TerminalPanel {
})
}
- pub fn add_terminal(
+ pub fn add_terminal_task(
&mut self,
- kind: TerminalKind,
+ task: SpawnInTerminal,
reveal_strategy: RevealStrategy,
window: &mut Window,
cx: &mut Context<Self>,
@@ -702,7 +735,66 @@ impl TerminalPanel {
})?;
let project = workspace.read_with(cx, |workspace, _| workspace.project().clone())?;
let terminal = project
- .update(cx, |project, cx| project.create_terminal(kind, cx))?
+ .update(cx, |project, cx| project.create_terminal_task(task, cx))?
+ .await?;
+ let result = workspace.update_in(cx, |workspace, window, cx| {
+ let terminal_view = Box::new(cx.new(|cx| {
+ TerminalView::new(
+ terminal.clone(),
+ workspace.weak_handle(),
+ workspace.database_id(),
+ workspace.project().downgrade(),
+ window,
+ cx,
+ )
+ }));
+
+ match reveal_strategy {
+ RevealStrategy::Always => {
+ workspace.focus_panel::<Self>(window, cx);
+ }
+ RevealStrategy::NoFocus => {
+ workspace.open_panel::<Self>(window, cx);
+ }
+ RevealStrategy::Never => {}
+ }
+
+ pane.update(cx, |pane, cx| {
+ let focus = pane.has_focus(window, cx)
+ || matches!(reveal_strategy, RevealStrategy::Always);
+ pane.add_item(terminal_view, true, focus, None, window, cx);
+ });
+
+ Ok(terminal.downgrade())
+ })?;
+ terminal_panel.update(cx, |terminal_panel, cx| {
+ terminal_panel.pending_terminals_to_add =
+ terminal_panel.pending_terminals_to_add.saturating_sub(1);
+ terminal_panel.serialize(cx)
+ })?;
+ result
+ })
+ }
+
+ pub fn add_terminal_shell(
+ &mut self,
+ cwd: Option<PathBuf>,
+ reveal_strategy: RevealStrategy,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<WeakEntity<Terminal>>> {
+ let workspace = self.workspace.clone();
+ cx.spawn_in(window, async move |terminal_panel, cx| {
+ if workspace.update(cx, |workspace, cx| !is_enabled_in_workspace(workspace, cx))? {
+ anyhow::bail!("terminal not yet supported for remote projects");
+ }
+ let pane = terminal_panel.update(cx, |terminal_panel, _| {
+ terminal_panel.pending_terminals_to_add += 1;
+ terminal_panel.active_pane.clone()
+ })?;
+ let project = workspace.read_with(cx, |workspace, _| workspace.project().clone())?;
+ let terminal = project
+ .update(cx, |project, cx| project.create_terminal_shell(cwd, cx))?
.await?;
let result = workspace.update_in(cx, |workspace, window, cx| {
let terminal_view = Box::new(cx.new(|cx| {
@@ -811,7 +903,7 @@ impl TerminalPanel {
})??;
let new_terminal = project
.update(cx, |project, cx| {
- project.create_terminal(TerminalKind::Task(spawn_task), cx)
+ project.create_terminal_task(spawn_task, cx)
})?
.await?;
terminal_to_replace.update_in(cx, |terminal_to_replace, window, cx| {
@@ -822,11 +914,16 @@ impl TerminalPanel {
RevealStrategy::Always => match reveal_target {
RevealTarget::Center => {
task_workspace.update_in(cx, |workspace, window, cx| {
- workspace
- .active_item(cx)
- .context("retrieving active terminal item in the workspace")?
- .item_focus_handle(cx)
- .focus(window);
+ let did_activate = workspace.activate_item(
+ &terminal_to_replace,
+ true,
+ true,
+ window,
+ cx,
+ );
+
+ anyhow::ensure!(did_activate, "Failed to retrieve terminal pane");
+
anyhow::Ok(())
})??;
}
@@ -1240,18 +1337,29 @@ impl Render for TerminalPanel {
let panes = terminal_panel.center.panes();
if let Some(&pane) = panes.get(action.0) {
window.focus(&pane.read(cx).focus_handle(cx));
- } else if let Some(new_pane) =
- terminal_panel.new_pane_with_cloned_active_terminal(window, cx)
- {
- terminal_panel
- .center
- .split(
- &terminal_panel.active_pane,
- &new_pane,
- SplitDirection::Right,
- )
- .log_err();
- window.focus(&new_pane.focus_handle(cx));
+ } else {
+ let future =
+ terminal_panel.new_pane_with_cloned_active_terminal(window, cx);
+ cx.spawn_in(window, async move |terminal_panel, cx| {
+ if let Some(new_pane) = future.await {
+ _ = terminal_panel.update_in(
+ cx,
+ |terminal_panel, window, cx| {
+ terminal_panel
+ .center
+ .split(
+ &terminal_panel.active_pane,
+ &new_pane,
+ SplitDirection::Right,
+ )
+ .log_err();
+ let new_pane = new_pane.read(cx);
+ window.focus(&new_pane.focus_handle(cx));
+ },
+ );
+ }
+ })
+ .detach();
}
}),
)
@@ -1387,13 +1495,14 @@ impl Panel for TerminalPanel {
return;
}
cx.defer_in(window, |this, window, cx| {
- let Ok(kind) = this.workspace.update(cx, |workspace, cx| {
- TerminalKind::Shell(default_working_directory(workspace, cx))
- }) else {
+ let Ok(kind) = this
+ .workspace
+ .update(cx, |workspace, cx| default_working_directory(workspace, cx))
+ else {
return;
};
- this.add_terminal(kind, RevealStrategy::Always, window, cx)
+ this.add_terminal_shell(kind, RevealStrategy::Always, window, cx)
.detach_and_log_err(cx)
})
}
@@ -0,0 +1,825 @@
+use super::{HoverTarget, HoveredWord, TerminalView};
+use anyhow::{Context as _, Result};
+use editor::Editor;
+use gpui::{App, AppContext, Context, Task, WeakEntity, Window};
+use itertools::Itertools;
+use project::{Entry, Metadata};
+use std::path::PathBuf;
+use terminal::PathLikeTarget;
+use util::{ResultExt, debug_panic, paths::PathWithPosition};
+use workspace::{OpenOptions, OpenVisible, Workspace};
+
+#[derive(Debug, Clone)]
+enum OpenTarget {
+ Worktree(PathWithPosition, Entry),
+ File(PathWithPosition, Metadata),
+}
+
+impl OpenTarget {
+ fn is_file(&self) -> bool {
+ match self {
+ OpenTarget::Worktree(_, entry) => entry.is_file(),
+ OpenTarget::File(_, metadata) => !metadata.is_dir,
+ }
+ }
+
+ fn is_dir(&self) -> bool {
+ match self {
+ OpenTarget::Worktree(_, entry) => entry.is_dir(),
+ OpenTarget::File(_, metadata) => metadata.is_dir,
+ }
+ }
+
+ fn path(&self) -> &PathWithPosition {
+ match self {
+ OpenTarget::Worktree(path, _) => path,
+ OpenTarget::File(path, _) => path,
+ }
+ }
+}
+
+pub(super) fn hover_path_like_target(
+ workspace: &WeakEntity<Workspace>,
+ hovered_word: HoveredWord,
+ path_like_target: &PathLikeTarget,
+ cx: &mut Context<TerminalView>,
+) -> Task<()> {
+ let file_to_open_task = possible_open_target(workspace, path_like_target, cx);
+ cx.spawn(async move |terminal_view, cx| {
+ let file_to_open = file_to_open_task.await;
+ terminal_view
+ .update(cx, |terminal_view, _| match file_to_open {
+ Some(OpenTarget::File(path, _) | OpenTarget::Worktree(path, _)) => {
+ terminal_view.hover = Some(HoverTarget {
+ tooltip: path.to_string(|path| path.to_string_lossy().to_string()),
+ hovered_word,
+ });
+ }
+ None => {
+ terminal_view.hover = None;
+ }
+ })
+ .ok();
+ })
+}
+
+fn possible_open_target(
+ workspace: &WeakEntity<Workspace>,
+ path_like_target: &PathLikeTarget,
+ cx: &App,
+) -> Task<Option<OpenTarget>> {
+ let Some(workspace) = workspace.upgrade() else {
+ return Task::ready(None);
+ };
+ // We have to check for both paths, as on Unix, certain paths with positions are valid file paths too.
+ // We can be on FS remote part, without real FS, so cannot canonicalize or check for existence the path right away.
+ let mut potential_paths = Vec::new();
+ let cwd = path_like_target.terminal_dir.as_ref();
+ let maybe_path = &path_like_target.maybe_path;
+ let original_path = PathWithPosition::from_path(PathBuf::from(maybe_path));
+ let path_with_position = PathWithPosition::parse_str(maybe_path);
+ let worktree_candidates = workspace
+ .read(cx)
+ .worktrees(cx)
+ .sorted_by_key(|worktree| {
+ let worktree_root = worktree.read(cx).abs_path();
+ match cwd.and_then(|cwd| worktree_root.strip_prefix(cwd).ok()) {
+ Some(cwd_child) => cwd_child.components().count(),
+ None => usize::MAX,
+ }
+ })
+ .collect::<Vec<_>>();
+ // Since we do not check paths via FS and joining, we need to strip off potential `./`, `a/`, `b/` prefixes out of it.
+ const GIT_DIFF_PATH_PREFIXES: &[&str] = &["a", "b"];
+ for prefix_str in GIT_DIFF_PATH_PREFIXES.iter().chain(std::iter::once(&".")) {
+ if let Some(stripped) = original_path.path.strip_prefix(prefix_str).ok() {
+ potential_paths.push(PathWithPosition {
+ path: stripped.to_owned(),
+ row: original_path.row,
+ column: original_path.column,
+ });
+ }
+ if let Some(stripped) = path_with_position.path.strip_prefix(prefix_str).ok() {
+ potential_paths.push(PathWithPosition {
+ path: stripped.to_owned(),
+ row: path_with_position.row,
+ column: path_with_position.column,
+ });
+ }
+ }
+
+ let insert_both_paths = original_path != path_with_position;
+ potential_paths.insert(0, original_path);
+ if insert_both_paths {
+ potential_paths.insert(1, path_with_position);
+ }
+
+ // If we won't find paths "easily", we can traverse the entire worktree to look what ends with the potential path suffix.
+ // That will be slow, though, so do the fast checks first.
+ let mut worktree_paths_to_check = Vec::new();
+ for worktree in &worktree_candidates {
+ let worktree_root = worktree.read(cx).abs_path();
+ let mut paths_to_check = Vec::with_capacity(potential_paths.len());
+
+ for path_with_position in &potential_paths {
+ let path_to_check = if worktree_root.ends_with(&path_with_position.path) {
+ let root_path_with_position = PathWithPosition {
+ path: worktree_root.to_path_buf(),
+ row: path_with_position.row,
+ column: path_with_position.column,
+ };
+ match worktree.read(cx).root_entry() {
+ Some(root_entry) => {
+ return Task::ready(Some(OpenTarget::Worktree(
+ root_path_with_position,
+ root_entry.clone(),
+ )));
+ }
+ None => root_path_with_position,
+ }
+ } else {
+ PathWithPosition {
+ path: path_with_position
+ .path
+ .strip_prefix(&worktree_root)
+ .unwrap_or(&path_with_position.path)
+ .to_owned(),
+ row: path_with_position.row,
+ column: path_with_position.column,
+ }
+ };
+
+ if path_to_check.path.is_relative()
+ && let Some(entry) = worktree.read(cx).entry_for_path(&path_to_check.path)
+ {
+ return Task::ready(Some(OpenTarget::Worktree(
+ PathWithPosition {
+ path: worktree_root.join(&entry.path),
+ row: path_to_check.row,
+ column: path_to_check.column,
+ },
+ entry.clone(),
+ )));
+ }
+
+ paths_to_check.push(path_to_check);
+ }
+
+ if !paths_to_check.is_empty() {
+ worktree_paths_to_check.push((worktree.clone(), paths_to_check));
+ }
+ }
+
+ // Before entire worktree traversal(s), make an attempt to do FS checks if available.
+ let fs_paths_to_check = if workspace.read(cx).project().read(cx).is_local() {
+ potential_paths
+ .into_iter()
+ .flat_map(|path_to_check| {
+ let mut paths_to_check = Vec::new();
+ let maybe_path = &path_to_check.path;
+ if maybe_path.starts_with("~") {
+ if let Some(home_path) =
+ maybe_path
+ .strip_prefix("~")
+ .ok()
+ .and_then(|stripped_maybe_path| {
+ Some(dirs::home_dir()?.join(stripped_maybe_path))
+ })
+ {
+ paths_to_check.push(PathWithPosition {
+ path: home_path,
+ row: path_to_check.row,
+ column: path_to_check.column,
+ });
+ }
+ } else {
+ paths_to_check.push(PathWithPosition {
+ path: maybe_path.clone(),
+ row: path_to_check.row,
+ column: path_to_check.column,
+ });
+ if maybe_path.is_relative() {
+ if let Some(cwd) = &cwd {
+ paths_to_check.push(PathWithPosition {
+ path: cwd.join(maybe_path),
+ row: path_to_check.row,
+ column: path_to_check.column,
+ });
+ }
+ for worktree in &worktree_candidates {
+ paths_to_check.push(PathWithPosition {
+ path: worktree.read(cx).abs_path().join(maybe_path),
+ row: path_to_check.row,
+ column: path_to_check.column,
+ });
+ }
+ }
+ }
+ paths_to_check
+ })
+ .collect()
+ } else {
+ Vec::new()
+ };
+
+ let worktree_check_task = cx.spawn(async move |cx| {
+ for (worktree, worktree_paths_to_check) in worktree_paths_to_check {
+ let found_entry = worktree
+ .update(cx, |worktree, _| {
+ let worktree_root = worktree.abs_path();
+ let traversal = worktree.traverse_from_path(true, true, false, "".as_ref());
+ for entry in traversal {
+ if let Some(path_in_worktree) = worktree_paths_to_check
+ .iter()
+ .find(|path_to_check| entry.path.ends_with(&path_to_check.path))
+ {
+ return Some(OpenTarget::Worktree(
+ PathWithPosition {
+ path: worktree_root.join(&entry.path),
+ row: path_in_worktree.row,
+ column: path_in_worktree.column,
+ },
+ entry.clone(),
+ ));
+ }
+ }
+ None
+ })
+ .ok()?;
+ if let Some(found_entry) = found_entry {
+ return Some(found_entry);
+ }
+ }
+ None
+ });
+
+ let fs = workspace.read(cx).project().read(cx).fs().clone();
+ cx.background_spawn(async move {
+ for mut path_to_check in fs_paths_to_check {
+ if let Some(fs_path_to_check) = fs.canonicalize(&path_to_check.path).await.ok()
+ && let Some(metadata) = fs.metadata(&fs_path_to_check).await.ok().flatten()
+ {
+ path_to_check.path = fs_path_to_check;
+ return Some(OpenTarget::File(path_to_check, metadata));
+ }
+ }
+
+ worktree_check_task.await
+ })
+}
+
+pub(super) fn open_path_like_target(
+ workspace: &WeakEntity<Workspace>,
+ terminal_view: &mut TerminalView,
+ path_like_target: &PathLikeTarget,
+ window: &mut Window,
+ cx: &mut Context<TerminalView>,
+) {
+ possibly_open_target(workspace, terminal_view, path_like_target, window, cx)
+ .detach_and_log_err(cx)
+}
+
+fn possibly_open_target(
+ workspace: &WeakEntity<Workspace>,
+ terminal_view: &mut TerminalView,
+ path_like_target: &PathLikeTarget,
+ window: &mut Window,
+ cx: &mut Context<TerminalView>,
+) -> Task<Result<Option<OpenTarget>>> {
+ if terminal_view.hover.is_none() {
+ return Task::ready(Ok(None));
+ }
+ let workspace = workspace.clone();
+ let path_like_target = path_like_target.clone();
+ cx.spawn_in(window, async move |terminal_view, cx| {
+ let Some(open_target) = terminal_view
+ .update(cx, |_, cx| {
+ possible_open_target(&workspace, &path_like_target, cx)
+ })?
+ .await
+ else {
+ return Ok(None);
+ };
+
+ let path_to_open = open_target.path();
+ let opened_items = workspace
+ .update_in(cx, |workspace, window, cx| {
+ workspace.open_paths(
+ vec![path_to_open.path.clone()],
+ OpenOptions {
+ visible: Some(OpenVisible::OnlyDirectories),
+ ..Default::default()
+ },
+ None,
+ window,
+ cx,
+ )
+ })
+ .context("workspace update")?
+ .await;
+ if opened_items.len() != 1 {
+ debug_panic!(
+ "Received {} items for one path {path_to_open:?}",
+ opened_items.len(),
+ );
+ }
+
+ if let Some(opened_item) = opened_items.first() {
+ if open_target.is_file() {
+ if let Some(Ok(opened_item)) = opened_item {
+ if let Some(row) = path_to_open.row {
+ let col = path_to_open.column.unwrap_or(0);
+ if let Some(active_editor) = opened_item.downcast::<Editor>() {
+ active_editor
+ .downgrade()
+ .update_in(cx, |editor, window, cx| {
+ editor.go_to_singleton_buffer_point(
+ language::Point::new(
+ row.saturating_sub(1),
+ col.saturating_sub(1),
+ ),
+ window,
+ cx,
+ )
+ })
+ .log_err();
+ }
+ }
+ return Ok(Some(open_target));
+ }
+ } else if open_target.is_dir() {
+ workspace.update(cx, |workspace, cx| {
+ workspace.project().update(cx, |_, cx| {
+ cx.emit(project::Event::ActivateProjectPanel);
+ })
+ })?;
+ return Ok(Some(open_target));
+ }
+ }
+ Ok(None)
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use gpui::TestAppContext;
+ use project::Project;
+ use serde_json::json;
+ use std::path::{Path, PathBuf};
+ use terminal::{HoveredWord, alacritty_terminal::index::Point as AlacPoint};
+ use util::path;
+ use workspace::AppState;
+
+ async fn init_test(
+ app_cx: &mut TestAppContext,
+ trees: impl IntoIterator<Item = (&str, serde_json::Value)>,
+ worktree_roots: impl IntoIterator<Item = &str>,
+ ) -> impl AsyncFnMut(HoveredWord, PathLikeTarget) -> (Option<HoverTarget>, Option<OpenTarget>)
+ {
+ let fs = app_cx.update(AppState::test).fs.as_fake().clone();
+
+ app_cx.update(|cx| {
+ terminal::init(cx);
+ theme::init(theme::LoadThemes::JustBase, cx);
+ Project::init_settings(cx);
+ language::init(cx);
+ editor::init(cx);
+ });
+
+ for (path, tree) in trees {
+ fs.insert_tree(path, tree).await;
+ }
+
+ let project = Project::test(
+ fs.clone(),
+ worktree_roots
+ .into_iter()
+ .map(Path::new)
+ .collect::<Vec<_>>(),
+ app_cx,
+ )
+ .await;
+
+ let (workspace, cx) =
+ app_cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
+
+ let terminal = project
+ .update(cx, |project: &mut Project, cx| {
+ project.create_terminal_shell(None, cx)
+ })
+ .await
+ .expect("Failed to create a terminal");
+
+ let workspace_a = workspace.clone();
+ let (terminal_view, cx) = app_cx.add_window_view(|window, cx| {
+ TerminalView::new(
+ terminal,
+ workspace_a.downgrade(),
+ None,
+ project.downgrade(),
+ window,
+ cx,
+ )
+ });
+
+ async move |hovered_word: HoveredWord,
+ path_like_target: PathLikeTarget|
+ -> (Option<HoverTarget>, Option<OpenTarget>) {
+ let workspace_a = workspace.clone();
+ terminal_view
+ .update(cx, |_, cx| {
+ hover_path_like_target(
+ &workspace_a.downgrade(),
+ hovered_word,
+ &path_like_target,
+ cx,
+ )
+ })
+ .await;
+
+ let hover_target =
+ terminal_view.read_with(cx, |terminal_view, _| terminal_view.hover.clone());
+
+ let open_target = terminal_view
+ .update_in(cx, |terminal_view, window, cx| {
+ possibly_open_target(
+ &workspace.downgrade(),
+ terminal_view,
+ &path_like_target,
+ window,
+ cx,
+ )
+ })
+ .await
+ .expect("Failed to possibly open target");
+
+ (hover_target, open_target)
+ }
+ }
+
+ async fn test_path_like_simple(
+ test_path_like: &mut impl AsyncFnMut(
+ HoveredWord,
+ PathLikeTarget,
+ ) -> (Option<HoverTarget>, Option<OpenTarget>),
+ maybe_path: &str,
+ tooltip: &str,
+ terminal_dir: Option<PathBuf>,
+ file: &str,
+ line: u32,
+ ) {
+ let (hover_target, open_target) = test_path_like(
+ HoveredWord {
+ word: maybe_path.to_string(),
+ word_match: AlacPoint::default()..=AlacPoint::default(),
+ id: 0,
+ },
+ PathLikeTarget {
+ maybe_path: maybe_path.to_string(),
+ terminal_dir,
+ },
+ )
+ .await;
+
+ let Some(hover_target) = hover_target else {
+ assert!(
+ hover_target.is_some(),
+ "Hover target should not be `None` at {file}:{line}:"
+ );
+ return;
+ };
+
+ assert_eq!(
+ hover_target.tooltip, tooltip,
+ "Tooltip mismatch at {file}:{line}:"
+ );
+ assert_eq!(
+ hover_target.hovered_word.word, maybe_path,
+ "Hovered word mismatch at {file}:{line}:"
+ );
+
+ let Some(open_target) = open_target else {
+ assert!(
+ open_target.is_some(),
+ "Open target should not be `None` at {file}:{line}:"
+ );
+ return;
+ };
+
+ assert_eq!(
+ open_target.path().path,
+ Path::new(tooltip),
+ "Open target path mismatch at {file}:{line}:"
+ );
+ }
+
+ macro_rules! none_or_some {
+ () => {
+ None
+ };
+ ($some:expr) => {
+ Some($some)
+ };
+ }
+
+ macro_rules! test_path_like {
+ ($test_path_like:expr, $maybe_path:literal, $tooltip:literal $(, $cwd:literal)?) => {
+ test_path_like_simple(
+ &mut $test_path_like,
+ path!($maybe_path),
+ path!($tooltip),
+ none_or_some!($($crate::PathBuf::from(path!($cwd)))?),
+ std::file!(),
+ std::line!(),
+ )
+ .await
+ };
+ }
+
+ #[doc = "test_path_likes!(<cx>, <trees>, <worktrees>, { $(<tests>;)+ })"]
+ macro_rules! test_path_likes {
+ ($cx:expr, $trees:expr, $worktrees:expr, { $($tests:expr;)+ }) => { {
+ let mut test_path_like = init_test($cx, $trees, $worktrees).await;
+ #[doc ="test!(<hovered maybe_path>, <expected tooltip>, <terminal cwd>)"]
+ macro_rules! test {
+ ($maybe_path:literal, $tooltip:literal) => {
+ test_path_like!(test_path_like, $maybe_path, $tooltip)
+ };
+ ($maybe_path:literal, $tooltip:literal, $cwd:literal) => {
+ test_path_like!(test_path_like, $maybe_path, $tooltip, $cwd)
+ }
+ }
+ $($tests);+
+ } }
+ }
+
+ #[gpui::test]
+ async fn one_folder_worktree(cx: &mut TestAppContext) {
+ test_path_likes!(
+ cx,
+ vec![(
+ path!("/test"),
+ json!({
+ "lib.rs": "",
+ "test.rs": "",
+ }),
+ )],
+ vec![path!("/test")],
+ {
+ test!("lib.rs", "/test/lib.rs");
+ test!("test.rs", "/test/test.rs");
+ }
+ )
+ }
+
+ #[gpui::test]
+ async fn mixed_worktrees(cx: &mut TestAppContext) {
+ test_path_likes!(
+ cx,
+ vec![
+ (
+ path!("/"),
+ json!({
+ "file.txt": "",
+ }),
+ ),
+ (
+ path!("/test"),
+ json!({
+ "lib.rs": "",
+ "test.rs": "",
+ "file.txt": "",
+ }),
+ ),
+ ],
+ vec![path!("/file.txt"), path!("/test")],
+ {
+ test!("file.txt", "/file.txt", "/");
+ test!("lib.rs", "/test/lib.rs", "/test");
+ test!("test.rs", "/test/test.rs", "/test");
+ test!("file.txt", "/test/file.txt", "/test");
+ }
+ )
+ }
+
+ #[gpui::test]
+ async fn worktree_file_preferred(cx: &mut TestAppContext) {
+ test_path_likes!(
+ cx,
+ vec![
+ (
+ path!("/"),
+ json!({
+ "file.txt": "",
+ }),
+ ),
+ (
+ path!("/test"),
+ json!({
+ "file.txt": "",
+ }),
+ ),
+ ],
+ vec![path!("/test")],
+ {
+ test!("file.txt", "/test/file.txt", "/test");
+ }
+ )
+ }
+
+ mod issues {
+ use super::*;
+
+ // https://github.com/zed-industries/zed/issues/28407
+ #[gpui::test]
+ async fn issue_28407_siblings(cx: &mut TestAppContext) {
+ test_path_likes!(
+ cx,
+ vec![(
+ path!("/dir1"),
+ json!({
+ "dir 2": {
+ "C.py": ""
+ },
+ "dir 3": {
+ "C.py": ""
+ },
+ }),
+ )],
+ vec![path!("/dir1")],
+ {
+ test!("C.py", "/dir1/dir 2/C.py", "/dir1");
+ test!("C.py", "/dir1/dir 2/C.py", "/dir1/dir 2");
+ test!("C.py", "/dir1/dir 3/C.py", "/dir1/dir 3");
+ }
+ )
+ }
+
+ // https://github.com/zed-industries/zed/issues/28407
+ // See https://github.com/zed-industries/zed/issues/34027
+ // See https://github.com/zed-industries/zed/issues/33498
+ #[gpui::test]
+ #[should_panic(expected = "Tooltip mismatch")]
+ async fn issue_28407_nesting(cx: &mut TestAppContext) {
+ test_path_likes!(
+ cx,
+ vec![(
+ path!("/project"),
+ json!({
+ "lib": {
+ "src": {
+ "main.rs": ""
+ },
+ },
+ "src": {
+ "main.rs": ""
+ },
+ }),
+ )],
+ vec![path!("/project")],
+ {
+ // Failing currently
+ test!("main.rs", "/project/src/main.rs", "/project");
+ test!("main.rs", "/project/src/main.rs", "/project/src");
+ test!("main.rs", "/project/lib/src/main.rs", "/project/lib");
+ test!("main.rs", "/project/lib/src/main.rs", "/project/lib/src");
+
+ test!("src/main.rs", "/project/src/main.rs", "/project");
+ test!("src/main.rs", "/project/src/main.rs", "/project/src");
+ // Failing currently
+ test!("src/main.rs", "/project/lib/src/main.rs", "/project/lib");
+ // Failing currently
+ test!(
+ "src/main.rs",
+ "/project/lib/src/main.rs",
+ "/project/lib/src"
+ );
+
+ test!("lib/src/main.rs", "/project/lib/src/main.rs", "/project");
+ test!(
+ "lib/src/main.rs",
+ "/project/lib/src/main.rs",
+ "/project/src"
+ );
+ test!(
+ "lib/src/main.rs",
+ "/project/lib/src/main.rs",
+ "/project/lib"
+ );
+ test!(
+ "lib/src/main.rs",
+ "/project/lib/src/main.rs",
+ "/project/lib/src"
+ );
+ }
+ )
+ }
+
+ // https://github.com/zed-industries/zed/issues/28339
+ #[gpui::test]
+ async fn issue_28339(cx: &mut TestAppContext) {
+ test_path_likes!(
+ cx,
+ vec![(
+ path!("/tmp"),
+ json!({
+ "issue28339": {
+ "foo": {
+ "bar.txt": ""
+ },
+ },
+ }),
+ )],
+ vec![path!("/tmp")],
+ {
+ test!(
+ "foo/./bar.txt",
+ "/tmp/issue28339/foo/bar.txt",
+ "/tmp/issue28339"
+ );
+ test!(
+ "foo/../foo/bar.txt",
+ "/tmp/issue28339/foo/bar.txt",
+ "/tmp/issue28339"
+ );
+ test!(
+ "foo/..///foo/bar.txt",
+ "/tmp/issue28339/foo/bar.txt",
+ "/tmp/issue28339"
+ );
+ test!(
+ "issue28339/../issue28339/foo/../foo/bar.txt",
+ "/tmp/issue28339/foo/bar.txt",
+ "/tmp/issue28339"
+ );
+ test!(
+ "./bar.txt",
+ "/tmp/issue28339/foo/bar.txt",
+ "/tmp/issue28339/foo"
+ );
+ test!(
+ "../foo/bar.txt",
+ "/tmp/issue28339/foo/bar.txt",
+ "/tmp/issue28339/foo"
+ );
+ }
+ )
+ }
+
+ // https://github.com/zed-industries/zed/issues/34027
+ #[gpui::test]
+ #[should_panic(expected = "Tooltip mismatch")]
+ async fn issue_34027(cx: &mut TestAppContext) {
+ test_path_likes!(
+ cx,
+ vec![(
+ path!("/tmp/issue34027"),
+ json!({
+ "test.txt": "",
+ "foo": {
+ "test.txt": "",
+ }
+ }),
+ ),],
+ vec![path!("/tmp/issue34027")],
+ {
+ test!("test.txt", "/tmp/issue34027/test.txt", "/tmp/issue34027");
+ test!(
+ "test.txt",
+ "/tmp/issue34027/foo/test.txt",
+ "/tmp/issue34027/foo"
+ );
+ }
+ )
+ }
+
+ // https://github.com/zed-industries/zed/issues/34027
+ #[gpui::test]
+ #[should_panic(expected = "Tooltip mismatch")]
+ async fn issue_34027_non_worktree_file(cx: &mut TestAppContext) {
+ test_path_likes!(
+ cx,
+ vec![
+ (
+ path!("/"),
+ json!({
+ "file.txt": "",
+ }),
+ ),
+ (
+ path!("/test"),
+ json!({
+ "file.txt": "",
+ }),
+ ),
+ ],
+ vec![path!("/test")],
+ {
+ test!("file.txt", "/file.txt", "/");
+ test!("file.txt", "/test/file.txt", "/test");
+ }
+ )
+ }
+ }
+}
@@ -104,7 +104,7 @@ impl SlashCommand for TerminalSlashCommand {
}],
run_commands_in_text: false,
}
- .to_event_stream()))
+ .into_event_stream()))
}
}
@@ -1,22 +1,21 @@
-mod color_contrast;
mod persistence;
pub mod terminal_element;
pub mod terminal_panel;
+mod terminal_path_like_target;
pub mod terminal_scrollbar;
mod terminal_slash_command;
pub mod terminal_tab_tooltip;
use assistant_slash_command::SlashCommandRegistry;
-use editor::{Editor, EditorSettings, actions::SelectAll, scroll::ScrollbarAutoHide};
+use editor::{EditorSettings, actions::SelectAll, scroll::ScrollbarAutoHide};
use gpui::{
Action, AnyElement, App, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable,
KeyContext, KeyDownEvent, Keystroke, MouseButton, MouseDownEvent, Pixels, Render,
ScrollWheelEvent, Stateful, Styled, Subscription, Task, WeakEntity, actions, anchored,
deferred, div,
};
-use itertools::Itertools;
use persistence::TERMINAL_DB;
-use project::{Entry, Metadata, Project, search::SearchQuery, terminals::TerminalKind};
+use project::{Project, search::SearchQuery};
use schemars::JsonSchema;
use task::TaskId;
use terminal::{
@@ -31,16 +30,17 @@ use terminal::{
};
use terminal_element::TerminalElement;
use terminal_panel::TerminalPanel;
+use terminal_path_like_target::{hover_path_like_target, open_path_like_target};
use terminal_scrollbar::TerminalScrollHandle;
use terminal_slash_command::TerminalSlashCommand;
use terminal_tab_tooltip::TerminalTooltip;
use ui::{
ContextMenu, Icon, IconName, Label, Scrollbar, ScrollbarState, Tooltip, h_flex, prelude::*,
};
-use util::{ResultExt, debug_panic, paths::PathWithPosition};
+use util::ResultExt;
use workspace::{
- CloseActiveItem, NewCenterTerminal, NewTerminal, OpenOptions, OpenVisible, ToolbarItemLocation,
- Workspace, WorkspaceId, delete_unloaded_items,
+ CloseActiveItem, NewCenterTerminal, NewTerminal, ToolbarItemLocation, Workspace, WorkspaceId,
+ delete_unloaded_items,
item::{
BreadcrumbText, Item, ItemEvent, SerializableItem, TabContentParams, TabTooltipContent,
},
@@ -48,7 +48,6 @@ use workspace::{
searchable::{Direction, SearchEvent, SearchOptions, SearchableItem, SearchableItemHandle},
};
-use anyhow::Context as _;
use serde::Deserialize;
use settings::{Settings, SettingsStore};
use smol::Timer;
@@ -63,8 +62,12 @@ use std::{
time::Duration,
};
+struct ImeState {
+ marked_text: String,
+ marked_range_utf16: Option<Range<usize>>,
+}
+
const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500);
-const GIT_DIFF_PATH_PREFIXES: &[&str] = &["a", "b"];
const TERMINAL_SCROLLBAR_WIDTH: Pixels = px(12.);
/// Event to transmit the scroll from the element to the view
@@ -140,8 +143,7 @@ pub struct TerminalView {
scroll_handle: TerminalScrollHandle,
show_scrollbar: bool,
hide_scrollbar_task: Option<Task<()>>,
- marked_text: Option<String>,
- marked_range_utf16: Option<Range<usize>>,
+ ime_state: Option<ImeState>,
_subscriptions: Vec<Subscription>,
_terminal_subscriptions: Vec<Subscription>,
}
@@ -181,6 +183,7 @@ impl ContentMode {
}
#[derive(Debug)]
+#[cfg_attr(test, derive(Clone, Eq, PartialEq))]
struct HoverTarget {
tooltip: String,
hovered_word: HoveredWord,
@@ -205,12 +208,9 @@ impl TerminalView {
cx: &mut Context<Workspace>,
) {
let working_directory = default_working_directory(workspace, cx);
- TerminalPanel::add_center_terminal(
- workspace,
- TerminalKind::Shell(working_directory),
- window,
- cx,
- )
+ TerminalPanel::add_center_terminal(workspace, window, cx, |project, cx| {
+ project.create_terminal_shell(working_directory, cx)
+ })
.detach_and_log_err(cx);
}
@@ -267,8 +267,7 @@ impl TerminalView {
show_scrollbar: !Self::should_autohide_scrollbar(cx),
hide_scrollbar_task: None,
cwd_serialized: false,
- marked_text: None,
- marked_range_utf16: None,
+ ime_state: None,
_subscriptions: vec![
focus_in,
focus_out,
@@ -327,24 +326,27 @@ impl TerminalView {
pub(crate) fn set_marked_text(
&mut self,
text: String,
- range: Range<usize>,
+ range: Option<Range<usize>>,
cx: &mut Context<Self>,
) {
- self.marked_text = Some(text);
- self.marked_range_utf16 = Some(range);
+ self.ime_state = Some(ImeState {
+ marked_text: text,
+ marked_range_utf16: range,
+ });
cx.notify();
}
/// Gets the current marked range (UTF-16).
pub(crate) fn marked_text_range(&self) -> Option<Range<usize>> {
- self.marked_range_utf16.clone()
+ self.ime_state
+ .as_ref()
+ .and_then(|state| state.marked_range_utf16.clone())
}
/// Clears the marked (pre-edit) text state.
pub(crate) fn clear_marked_text(&mut self, cx: &mut Context<Self>) {
- if self.marked_text.is_some() {
- self.marked_text = None;
- self.marked_range_utf16 = None;
+ if self.ime_state.is_some() {
+ self.ime_state = None;
cx.notify();
}
}
@@ -1066,37 +1068,13 @@ fn subscribe_for_terminal_events(
.as_ref()
.map(|hover| &hover.hovered_word)
{
- let valid_files_to_open_task = possible_open_target(
+ terminal_view.hover = None;
+ terminal_view.hover_tooltip_update = hover_path_like_target(
&workspace,
- &path_like_target.terminal_dir,
- &path_like_target.maybe_path,
+ hovered_word.clone(),
+ path_like_target,
cx,
);
- let hovered_word = hovered_word.clone();
-
- terminal_view.hover = None;
- terminal_view.hover_tooltip_update =
- cx.spawn(async move |terminal_view, cx| {
- let file_to_open = valid_files_to_open_task.await;
- terminal_view
- .update(cx, |terminal_view, _| match file_to_open {
- Some(
- OpenTarget::File(path, _)
- | OpenTarget::Worktree(path, _),
- ) => {
- terminal_view.hover = Some(HoverTarget {
- tooltip: path.to_string(|path| {
- path.to_string_lossy().to_string()
- }),
- hovered_word,
- });
- }
- None => {
- terminal_view.hover = None;
- }
- })
- .ok();
- });
cx.notify();
}
}
@@ -1110,86 +1088,13 @@ fn subscribe_for_terminal_events(
Event::Open(maybe_navigation_target) => match maybe_navigation_target {
MaybeNavigationTarget::Url(url) => cx.open_url(url),
-
- MaybeNavigationTarget::PathLike(path_like_target) => {
- if terminal_view.hover.is_none() {
- return;
- }
- let task_workspace = workspace.clone();
- let path_like_target = path_like_target.clone();
- cx.spawn_in(window, async move |terminal_view, cx| {
- let open_target = terminal_view
- .update(cx, |_, cx| {
- possible_open_target(
- &task_workspace,
- &path_like_target.terminal_dir,
- &path_like_target.maybe_path,
- cx,
- )
- })?
- .await;
- if let Some(open_target) = open_target {
- let path_to_open = open_target.path();
- let opened_items = task_workspace
- .update_in(cx, |workspace, window, cx| {
- workspace.open_paths(
- vec![path_to_open.path.clone()],
- OpenOptions {
- visible: Some(OpenVisible::OnlyDirectories),
- ..Default::default()
- },
- None,
- window,
- cx,
- )
- })
- .context("workspace update")?
- .await;
- if opened_items.len() != 1 {
- debug_panic!(
- "Received {} items for one path {path_to_open:?}",
- opened_items.len(),
- );
- }
-
- if let Some(opened_item) = opened_items.first() {
- if open_target.is_file() {
- if let Some(Ok(opened_item)) = opened_item
- && let Some(row) = path_to_open.row
- {
- let col = path_to_open.column.unwrap_or(0);
- if let Some(active_editor) =
- opened_item.downcast::<Editor>()
- {
- active_editor
- .downgrade()
- .update_in(cx, |editor, window, cx| {
- editor.go_to_singleton_buffer_point(
- language::Point::new(
- row.saturating_sub(1),
- col.saturating_sub(1),
- ),
- window,
- cx,
- )
- })
- .log_err();
- }
- }
- } else if open_target.is_dir() {
- task_workspace.update(cx, |workspace, cx| {
- workspace.project().update(cx, |_, cx| {
- cx.emit(project::Event::ActivateProjectPanel);
- })
- })?;
- }
- }
- }
-
- anyhow::Ok(())
- })
- .detach_and_log_err(cx)
- }
+ MaybeNavigationTarget::PathLike(path_like_target) => open_path_like_target(
+ &workspace,
+ terminal_view,
+ path_like_target,
+ window,
+ cx,
+ ),
},
Event::BreadcrumbsChanged => cx.emit(ItemEvent::UpdateBreadcrumbs),
Event::CloseTerminal => cx.emit(ItemEvent::CloseItem),
@@ -1203,241 +1108,6 @@ fn subscribe_for_terminal_events(
vec![terminal_subscription, terminal_events_subscription]
}
-#[derive(Debug, Clone)]
-enum OpenTarget {
- Worktree(PathWithPosition, Entry),
- File(PathWithPosition, Metadata),
-}
-
-impl OpenTarget {
- fn is_file(&self) -> bool {
- match self {
- OpenTarget::Worktree(_, entry) => entry.is_file(),
- OpenTarget::File(_, metadata) => !metadata.is_dir,
- }
- }
-
- fn is_dir(&self) -> bool {
- match self {
- OpenTarget::Worktree(_, entry) => entry.is_dir(),
- OpenTarget::File(_, metadata) => metadata.is_dir,
- }
- }
-
- fn path(&self) -> &PathWithPosition {
- match self {
- OpenTarget::Worktree(path, _) => path,
- OpenTarget::File(path, _) => path,
- }
- }
-}
-
-fn possible_open_target(
- workspace: &WeakEntity<Workspace>,
- cwd: &Option<PathBuf>,
- maybe_path: &str,
- cx: &App,
-) -> Task<Option<OpenTarget>> {
- let Some(workspace) = workspace.upgrade() else {
- return Task::ready(None);
- };
- // We have to check for both paths, as on Unix, certain paths with positions are valid file paths too.
- // We can be on FS remote part, without real FS, so cannot canonicalize or check for existence the path right away.
- let mut potential_paths = Vec::new();
- let original_path = PathWithPosition::from_path(PathBuf::from(maybe_path));
- let path_with_position = PathWithPosition::parse_str(maybe_path);
- let worktree_candidates = workspace
- .read(cx)
- .worktrees(cx)
- .sorted_by_key(|worktree| {
- let worktree_root = worktree.read(cx).abs_path();
- match cwd
- .as_ref()
- .and_then(|cwd| worktree_root.strip_prefix(cwd).ok())
- {
- Some(cwd_child) => cwd_child.components().count(),
- None => usize::MAX,
- }
- })
- .collect::<Vec<_>>();
- // Since we do not check paths via FS and joining, we need to strip off potential `./`, `a/`, `b/` prefixes out of it.
- for prefix_str in GIT_DIFF_PATH_PREFIXES.iter().chain(std::iter::once(&".")) {
- if let Some(stripped) = original_path.path.strip_prefix(prefix_str).ok() {
- potential_paths.push(PathWithPosition {
- path: stripped.to_owned(),
- row: original_path.row,
- column: original_path.column,
- });
- }
- if let Some(stripped) = path_with_position.path.strip_prefix(prefix_str).ok() {
- potential_paths.push(PathWithPosition {
- path: stripped.to_owned(),
- row: path_with_position.row,
- column: path_with_position.column,
- });
- }
- }
-
- let insert_both_paths = original_path != path_with_position;
- potential_paths.insert(0, original_path);
- if insert_both_paths {
- potential_paths.insert(1, path_with_position);
- }
-
- // If we won't find paths "easily", we can traverse the entire worktree to look what ends with the potential path suffix.
- // That will be slow, though, so do the fast checks first.
- let mut worktree_paths_to_check = Vec::new();
- for worktree in &worktree_candidates {
- let worktree_root = worktree.read(cx).abs_path();
- let mut paths_to_check = Vec::with_capacity(potential_paths.len());
-
- for path_with_position in &potential_paths {
- let path_to_check = if worktree_root.ends_with(&path_with_position.path) {
- let root_path_with_position = PathWithPosition {
- path: worktree_root.to_path_buf(),
- row: path_with_position.row,
- column: path_with_position.column,
- };
- match worktree.read(cx).root_entry() {
- Some(root_entry) => {
- return Task::ready(Some(OpenTarget::Worktree(
- root_path_with_position,
- root_entry.clone(),
- )));
- }
- None => root_path_with_position,
- }
- } else {
- PathWithPosition {
- path: path_with_position
- .path
- .strip_prefix(&worktree_root)
- .unwrap_or(&path_with_position.path)
- .to_owned(),
- row: path_with_position.row,
- column: path_with_position.column,
- }
- };
-
- if path_to_check.path.is_relative()
- && let Some(entry) = worktree.read(cx).entry_for_path(&path_to_check.path)
- {
- return Task::ready(Some(OpenTarget::Worktree(
- PathWithPosition {
- path: worktree_root.join(&entry.path),
- row: path_to_check.row,
- column: path_to_check.column,
- },
- entry.clone(),
- )));
- }
-
- paths_to_check.push(path_to_check);
- }
-
- if !paths_to_check.is_empty() {
- worktree_paths_to_check.push((worktree.clone(), paths_to_check));
- }
- }
-
- // Before entire worktree traversal(s), make an attempt to do FS checks if available.
- let fs_paths_to_check = if workspace.read(cx).project().read(cx).is_local() {
- potential_paths
- .into_iter()
- .flat_map(|path_to_check| {
- let mut paths_to_check = Vec::new();
- let maybe_path = &path_to_check.path;
- if maybe_path.starts_with("~") {
- if let Some(home_path) =
- maybe_path
- .strip_prefix("~")
- .ok()
- .and_then(|stripped_maybe_path| {
- Some(dirs::home_dir()?.join(stripped_maybe_path))
- })
- {
- paths_to_check.push(PathWithPosition {
- path: home_path,
- row: path_to_check.row,
- column: path_to_check.column,
- });
- }
- } else {
- paths_to_check.push(PathWithPosition {
- path: maybe_path.clone(),
- row: path_to_check.row,
- column: path_to_check.column,
- });
- if maybe_path.is_relative() {
- if let Some(cwd) = &cwd {
- paths_to_check.push(PathWithPosition {
- path: cwd.join(maybe_path),
- row: path_to_check.row,
- column: path_to_check.column,
- });
- }
- for worktree in &worktree_candidates {
- paths_to_check.push(PathWithPosition {
- path: worktree.read(cx).abs_path().join(maybe_path),
- row: path_to_check.row,
- column: path_to_check.column,
- });
- }
- }
- }
- paths_to_check
- })
- .collect()
- } else {
- Vec::new()
- };
-
- let worktree_check_task = cx.spawn(async move |cx| {
- for (worktree, worktree_paths_to_check) in worktree_paths_to_check {
- let found_entry = worktree
- .update(cx, |worktree, _| {
- let worktree_root = worktree.abs_path();
- let mut traversal = worktree.traverse_from_path(true, true, false, "".as_ref());
- while let Some(entry) = traversal.next() {
- if let Some(path_in_worktree) = worktree_paths_to_check
- .iter()
- .find(|path_to_check| entry.path.ends_with(&path_to_check.path))
- {
- return Some(OpenTarget::Worktree(
- PathWithPosition {
- path: worktree_root.join(&entry.path),
- row: path_in_worktree.row,
- column: path_in_worktree.column,
- },
- entry.clone(),
- ));
- }
- }
- None
- })
- .ok()?;
- if let Some(found_entry) = found_entry {
- return Some(found_entry);
- }
- }
- None
- });
-
- let fs = workspace.read(cx).project().read(cx).fs().clone();
- cx.background_spawn(async move {
- for mut path_to_check in fs_paths_to_check {
- if let Some(fs_path_to_check) = fs.canonicalize(&path_to_check.path).await.ok()
- && let Some(metadata) = fs.metadata(&fs_path_to_check).await.ok().flatten()
- {
- path_to_check.path = fs_path_to_check;
- return Some(OpenTarget::File(path_to_check, metadata));
- }
- }
-
- worktree_check_task.await
- })
-}
-
fn regex_search_for_query(query: &project::search::SearchQuery) -> Option<RegexSearch> {
let str = query.as_str();
if query.is_regex() {
@@ -1666,16 +1336,10 @@ impl Item for TerminalView {
let terminal = self
.project
.update(cx, |project, cx| {
- let terminal = self.terminal().read(cx);
- let working_directory = terminal
- .working_directory()
- .or_else(|| Some(project.active_project_directory(cx)?.to_path_buf()));
- let python_venv_directory = terminal.python_venv_directory.clone();
- project.create_terminal_with_venv(
- TerminalKind::Shell(working_directory),
- python_venv_directory,
- cx,
- )
+ let cwd = project
+ .active_project_directory(cx)
+ .map(|it| it.to_path_buf());
+ project.clone_terminal(self.terminal(), cx, || cwd)
})
.ok()?
.log_err()?;
@@ -1831,9 +1495,7 @@ impl SerializableItem for TerminalView {
.flatten();
let terminal = project
- .update(cx, |project, cx| {
- project.create_terminal(TerminalKind::Shell(cwd), cx)
- })?
+ .update(cx, |project, cx| project.create_terminal_shell(cwd, cx))?
.await?;
cx.update(|window, cx| {
cx.new(|cx| {
@@ -1867,7 +1529,7 @@ impl SearchableItem for TerminalView {
/// Clear stored matches
fn clear_matches(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
- self.terminal().update(cx, |term, _| term.clear_matches())
+ self.terminal().update(cx, |term, _| term.matches.clear())
}
/// Store matches returned from find_matches somewhere for rendering
@@ -1937,7 +1599,8 @@ impl SearchableItem for TerminalView {
// Selection head might have a value if there's a selection that isn't
// associated with a match. Therefore, if there are no matches, we should
// report None, no matter the state of the terminal
- let res = if !matches.is_empty() {
+
+ if !matches.is_empty() {
if let Some(selection_head) = self.terminal().read(cx).selection_head {
// If selection head is contained in a match. Return that match
match direction {
@@ -1977,9 +1640,7 @@ impl SearchableItem for TerminalView {
}
} else {
None
- };
-
- res
+ }
}
fn replace(
&mut self,
@@ -2193,7 +1854,7 @@ mod tests {
})
.await
.unwrap()
- .to_included()
+ .into_included()
.unwrap();
(wt, entry)
@@ -106,13 +106,13 @@ mod tests {
let mut rhs = Default::default();
while lhs == rhs {
lhs = Locator(
- (0..rng.gen_range(1..=5))
- .map(|_| rng.gen_range(0..=100))
+ (0..rng.random_range(1..=5))
+ .map(|_| rng.random_range(0..=100))
.collect(),
);
rhs = Locator(
- (0..rng.gen_range(1..=5))
- .map(|_| rng.gen_range(0..=100))
+ (0..rng.random_range(1..=5))
+ .map(|_| rng.random_range(0..=100))
.collect(),
);
}
@@ -65,8 +65,8 @@ impl<T: Clone, R: rand::Rng> Network<T, R> {
for message in &messages {
// Insert one or more duplicates of this message, potentially *before* the previous
// message sent by this peer to simulate out-of-order delivery.
- for _ in 0..self.rng.gen_range(1..4) {
- let insertion_index = self.rng.gen_range(0..inbox.len() + 1);
+ for _ in 0..self.rng.random_range(1..4) {
+ let insertion_index = self.rng.random_range(0..inbox.len() + 1);
inbox.insert(
insertion_index,
Envelope {
@@ -85,7 +85,7 @@ impl<T: Clone, R: rand::Rng> Network<T, R> {
pub fn receive(&mut self, receiver: ReplicaId) -> Vec<T> {
let inbox = self.inboxes.get_mut(&receiver).unwrap();
- let count = self.rng.gen_range(0..inbox.len() + 1);
+ let count = self.rng.random_range(0..inbox.len() + 1);
inbox
.drain(0..count)
.map(|envelope| envelope.message)
@@ -497,8 +497,8 @@ mod tests {
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
.unwrap_or(20);
- let initial_chars = (0..rng.gen_range(0..=100))
- .map(|_| rng.gen_range(b'a'..=b'z') as char)
+ let initial_chars = (0..rng.random_range(0..=100))
+ .map(|_| rng.random_range(b'a'..=b'z') as char)
.collect::<Vec<_>>();
log::info!("initial chars: {:?}", initial_chars);
@@ -517,11 +517,11 @@ mod tests {
break;
}
- let end = rng.gen_range(last_edit_end..=expected_chars.len());
- let start = rng.gen_range(last_edit_end..=end);
+ let end = rng.random_range(last_edit_end..=expected_chars.len());
+ let start = rng.random_range(last_edit_end..=end);
let old_len = end - start;
- let mut new_len = rng.gen_range(0..=3);
+ let mut new_len = rng.random_range(0..=3);
if start == end && new_len == 0 {
new_len += 1;
}
@@ -529,7 +529,7 @@ mod tests {
last_edit_end = start + new_len + 1;
let new_chars = (0..new_len)
- .map(|_| rng.gen_range(b'A'..=b'Z') as char)
+ .map(|_| rng.random_range(b'A'..=b'Z') as char)
.collect::<Vec<_>>();
log::info!(
" editing {:?}: {:?}",
@@ -36,14 +36,14 @@ fn test_random_edits(mut rng: StdRng) {
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
.unwrap_or(10);
- let reference_string_len = rng.gen_range(0..3);
+ let reference_string_len = rng.random_range(0..3);
let mut reference_string = RandomCharIter::new(&mut rng)
.take(reference_string_len)
.collect::<String>();
let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), reference_string.clone());
LineEnding::normalize(&mut reference_string);
- buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
+ buffer.set_group_interval(Duration::from_millis(rng.random_range(0..=200)));
let mut buffer_versions = Vec::new();
log::info!(
"buffer text {:?}, version: {:?}",
@@ -64,7 +64,7 @@ fn test_random_edits(mut rng: StdRng) {
buffer.version()
);
- if rng.gen_bool(0.25) {
+ if rng.random_bool(0.25) {
buffer.randomly_undo_redo(&mut rng);
reference_string = buffer.text();
log::info!(
@@ -82,7 +82,7 @@ fn test_random_edits(mut rng: StdRng) {
buffer.check_invariants();
- if rng.gen_bool(0.3) {
+ if rng.random_bool(0.3) {
buffer_versions.push((buffer.clone(), buffer.subscribe()));
}
}
@@ -112,8 +112,9 @@ fn test_random_edits(mut rng: StdRng) {
);
for _ in 0..5 {
- let end_ix = old_buffer.clip_offset(rng.gen_range(0..=old_buffer.len()), Bias::Right);
- let start_ix = old_buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left);
+ let end_ix =
+ old_buffer.clip_offset(rng.random_range(0..=old_buffer.len()), Bias::Right);
+ let start_ix = old_buffer.clip_offset(rng.random_range(0..=end_ix), Bias::Left);
let range = old_buffer.anchor_before(start_ix)..old_buffer.anchor_after(end_ix);
let mut old_text = old_buffer.text_for_range(range.clone()).collect::<String>();
let edits = buffer
@@ -731,7 +732,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) {
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
.unwrap_or(10);
- let base_text_len = rng.gen_range(0..10);
+ let base_text_len = rng.random_range(0..10);
let base_text = RandomCharIter::new(&mut rng)
.take(base_text_len)
.collect::<String>();
@@ -741,7 +742,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) {
for i in 0..peers {
let mut buffer = Buffer::new(i as ReplicaId, BufferId::new(1).unwrap(), base_text.clone());
- buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200));
+ buffer.history.group_interval = Duration::from_millis(rng.random_range(0..=200));
buffers.push(buffer);
replica_ids.push(i as u16);
network.add_peer(i as u16);
@@ -751,10 +752,10 @@ fn test_random_concurrent_edits(mut rng: StdRng) {
let mut mutation_count = operations;
loop {
- let replica_index = rng.gen_range(0..peers);
+ let replica_index = rng.random_range(0..peers);
let replica_id = replica_ids[replica_index];
let buffer = &mut buffers[replica_index];
- match rng.gen_range(0..=100) {
+ match rng.random_range(0..=100) {
0..=50 if mutation_count != 0 => {
let op = buffer.randomly_edit(&mut rng, 5).1;
network.broadcast(buffer.replica_id, vec![op]);
@@ -1818,8 +1818,8 @@ impl Buffer {
}
pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
- let end = self.clip_offset(rng.gen_range(start_offset..=self.len()), Bias::Right);
- let start = self.clip_offset(rng.gen_range(start_offset..=end), Bias::Right);
+ let end = self.clip_offset(rng.random_range(start_offset..=self.len()), Bias::Right);
+ let start = self.clip_offset(rng.random_range(start_offset..=end), Bias::Right);
start..end
}
@@ -1841,7 +1841,7 @@ impl Buffer {
let range = self.random_byte_range(new_start, rng);
last_end = Some(range.end);
- let new_text_len = rng.gen_range(0..10);
+ let new_text_len = rng.random_range(0..10);
let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
edits.push((range, new_text.into()));
@@ -1877,7 +1877,7 @@ impl Buffer {
use rand::prelude::*;
let mut ops = Vec::new();
- for _ in 0..rng.gen_range(1..=5) {
+ for _ in 0..rng.random_range(1..=5) {
if let Some(entry) = self.history.undo_stack.choose(rng) {
let transaction = entry.transaction.clone();
log::info!(
@@ -16,7 +16,7 @@ struct FontFamilyCacheState {
/// so we do it once and then use the cached values each render.
#[derive(Default)]
pub struct FontFamilyCache {
- state: RwLock<FontFamilyCacheState>,
+ state: Arc<RwLock<FontFamilyCacheState>>,
}
#[derive(Default)]
@@ -52,4 +52,44 @@ impl FontFamilyCache {
lock.font_families.clone()
}
+
+ /// Returns the list of font families if they have been loaded
+ pub fn try_list_font_families(&self) -> Option<Vec<SharedString>> {
+ self.state
+ .try_read()
+ .filter(|state| state.loaded_at.is_some())
+ .map(|state| state.font_families.clone())
+ }
+
+ /// Prefetch all font names in the background
+ pub async fn prefetch(&self, cx: &gpui::AsyncApp) {
+ if self
+ .state
+ .try_read()
+ .is_none_or(|state| state.loaded_at.is_some())
+ {
+ return;
+ }
+
+ let Ok(text_system) = cx.update(|cx| App::text_system(cx).clone()) else {
+ return;
+ };
+
+ let state = self.state.clone();
+
+ cx.background_executor()
+ .spawn(async move {
+ // We take this lock in the background executor to ensure that synchronous calls to `list_font_families` are blocked while we are prefetching,
+ // while not blocking the main thread and risking deadlocks
+ let mut lock = state.write();
+ let all_font_names = text_system
+ .all_font_names()
+ .into_iter()
+ .map(SharedString::from)
+ .collect();
+ lock.font_families = all_font_names;
+ lock.loaded_at = Some(Instant::now());
+ })
+ .await;
+ }
}
@@ -398,7 +398,7 @@ static DEFAULT_ICON_THEME: LazyLock<Arc<IconTheme>> = LazyLock::new(|| {
},
file_stems: icon_keys_by_association(FILE_STEMS_BY_ICON_KEY),
file_suffixes: icon_keys_by_association(FILE_SUFFIXES_BY_ICON_KEY),
- file_icons: HashMap::from_iter(FILE_ICONS.into_iter().map(|(ty, path)| {
+ file_icons: HashMap::from_iter(FILE_ICONS.iter().map(|(ty, path)| {
(
ty.to_string(),
IconDefinition {
@@ -13,7 +13,7 @@ use gpui::{
use refineable::Refineable;
use schemars::{JsonSchema, json_schema};
use serde::{Deserialize, Serialize};
-use settings::{ParameterizedJsonSchema, Settings, SettingsSources};
+use settings::{ParameterizedJsonSchema, Settings, SettingsKey, SettingsSources, SettingsUi};
use std::sync::Arc;
use util::ResultExt as _;
use util::schemars::replace_subschema;
@@ -253,8 +253,9 @@ pub(crate) struct UiFontSize(Pixels);
impl Global for UiFontSize {}
+/// In-memory override for the font size in the agent panel.
#[derive(Default)]
-pub(crate) struct AgentFontSize(Pixels);
+pub struct AgentFontSize(Pixels);
impl Global for AgentFontSize {}
@@ -365,7 +366,8 @@ impl IconThemeSelection {
}
/// Settings for rendering text in UI and text buffers.
-#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
+#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)]
+#[settings_key(None)]
pub struct ThemeSettingsContent {
/// The default font size for text in the UI.
#[serde(default)]
@@ -817,8 +819,6 @@ fn clamp_font_weight(weight: f32) -> FontWeight {
}
impl settings::Settings for ThemeSettings {
- const KEY: Option<&'static str> = None;
-
type FileContent = ThemeSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, cx: &mut App) -> Result<Self> {
@@ -257,9 +257,9 @@ pub fn refine_theme_family(theme_family_content: ThemeFamilyContent) -> ThemeFam
let author = theme_family_content.author.clone();
let mut theme_family = ThemeFamily {
- id: id.clone(),
- name: name.clone().into(),
- author: author.clone().into(),
+ id,
+ name: name.into(),
+ author: author.into(),
themes: vec![],
scales: default_color_scales(),
};
@@ -158,7 +158,7 @@ impl VsCodeThemeConverter {
.tab
.active_background
.clone()
- .or(vscode_tab_inactive_background.clone()),
+ .or(vscode_tab_inactive_background),
search_match_background: vscode_colors.editor.find_match_background.clone(),
panel_background: vscode_colors.panel.background.clone(),
pane_group_border: vscode_colors.editor_group.border.clone(),
@@ -171,22 +171,20 @@ impl VsCodeThemeConverter {
.scrollbar_slider
.active_background
.clone(),
- scrollbar_thumb_border: vscode_scrollbar_slider_background.clone(),
+ scrollbar_thumb_border: vscode_scrollbar_slider_background,
scrollbar_track_background: vscode_editor_background.clone(),
scrollbar_track_border: vscode_colors.editor_overview_ruler.border.clone(),
minimap_thumb_background: vscode_colors.minimap_slider.background.clone(),
minimap_thumb_hover_background: vscode_colors.minimap_slider.hover_background.clone(),
minimap_thumb_active_background: vscode_colors.minimap_slider.active_background.clone(),
- editor_foreground: vscode_editor_foreground
- .clone()
- .or(vscode_token_colors_foreground.clone()),
+ editor_foreground: vscode_editor_foreground.or(vscode_token_colors_foreground),
editor_background: vscode_editor_background.clone(),
- editor_gutter_background: vscode_editor_background.clone(),
+ editor_gutter_background: vscode_editor_background,
editor_active_line_background: vscode_colors.editor.line_highlight_background.clone(),
editor_line_number: vscode_colors.editor_line_number.foreground.clone(),
editor_active_line_number: vscode_colors.editor.foreground.clone(),
editor_wrap_guide: vscode_panel_border.clone(),
- editor_active_wrap_guide: vscode_panel_border.clone(),
+ editor_active_wrap_guide: vscode_panel_border,
editor_document_highlight_bracket_background: vscode_colors
.editor_bracket_match
.background
@@ -42,7 +42,7 @@ rpc.workspace = true
schemars.workspace = true
serde.workspace = true
settings.workspace = true
-settings_ui.workspace = true
+keymap_editor.workspace = true
smallvec.workspace = true
story = { workspace = true, optional = true }
telemetry.workspace = true
@@ -186,7 +186,7 @@ impl ApplicationMenu {
.trigger(
Button::new(
SharedString::from(format!("{}-menu-trigger", menu_name)),
- menu_name.clone(),
+ menu_name,
)
.style(ButtonStyle::Subtle)
.label_size(LabelSize::Small),
@@ -41,7 +41,8 @@ fn toggle_screen_sharing(
let Some(room) = call.room().cloned() else {
return;
};
- let toggle_screen_sharing = room.update(cx, |room, cx| {
+
+ room.update(cx, |room, cx| {
let clicked_on_currently_shared_screen =
room.shared_screen_id().is_some_and(|screen_id| {
Some(screen_id)
@@ -78,8 +79,7 @@ fn toggle_screen_sharing(
} else {
Task::ready(Ok(()))
}
- });
- toggle_screen_sharing
+ })
}
Err(e) => Task::ready(Err(e)),
};
@@ -155,7 +155,7 @@ impl TitleBar {
.gap_1()
.overflow_x_scroll()
.when_some(
- current_user.clone().zip(client.peer_id()).zip(room.clone()),
+ current_user.zip(client.peer_id()).zip(room),
|this, ((current_user, peer_id), room)| {
let player_colors = cx.theme().players();
let room = room.read(cx);
@@ -337,7 +337,7 @@ impl TitleBar {
let room = room.read(cx);
let project = self.project.read(cx);
- let is_local = project.is_local() || project.is_via_ssh();
+ let is_local = project.is_local() || project.is_via_remote_server();
let is_shared = is_local && project.is_shared();
let is_muted = room.is_muted();
let muted_by_user = room.muted_by_user();
@@ -7,6 +7,7 @@ pub struct OnboardingBanner {
dismissed: bool,
source: String,
details: BannerDetails,
+ visible_when: Option<Box<dyn Fn(&mut App) -> bool>>,
}
#[derive(Clone)]
@@ -42,12 +43,18 @@ impl OnboardingBanner {
label: label.into(),
subtitle: subtitle.or(Some(SharedString::from("Introducing:"))),
},
+ visible_when: None,
dismissed: get_dismissed(source),
}
}
- fn should_show(&self, _cx: &mut App) -> bool {
- !self.dismissed
+ pub fn visible_when(mut self, predicate: impl Fn(&mut App) -> bool + 'static) -> Self {
+ self.visible_when = Some(Box::new(predicate));
+ self
+ }
+
+ fn should_show(&self, cx: &mut App) -> bool {
+ !self.dismissed && self.visible_when.as_ref().map_or(true, |f| f(cx))
}
fn dismiss(&mut self, cx: &mut Context<Self>) {
@@ -119,7 +126,7 @@ impl Render for OnboardingBanner {
h_flex()
.h_full()
.gap_1()
- .child(Icon::new(self.details.icon_name).size(IconSize::Small))
+ .child(Icon::new(self.details.icon_name).size(IconSize::XSmall))
.child(
h_flex()
.gap_0p5()
@@ -1,28 +1,35 @@
use gpui::{
- AnyElement, Context, Decorations, Hsla, InteractiveElement, IntoElement, MouseButton,
+ AnyElement, Context, Decorations, Entity, Hsla, InteractiveElement, IntoElement, MouseButton,
ParentElement, Pixels, StatefulInteractiveElement, Styled, Window, WindowControlArea, div, px,
};
use smallvec::SmallVec;
use std::mem;
use ui::prelude::*;
-use crate::platforms::{platform_linux, platform_mac, platform_windows};
+use crate::{
+ platforms::{platform_linux, platform_mac, platform_windows},
+ system_window_tabs::SystemWindowTabs,
+};
pub struct PlatformTitleBar {
id: ElementId,
platform_style: PlatformStyle,
children: SmallVec<[AnyElement; 2]>,
should_move: bool,
+ system_window_tabs: Entity<SystemWindowTabs>,
}
impl PlatformTitleBar {
- pub fn new(id: impl Into<ElementId>) -> Self {
+ pub fn new(id: impl Into<ElementId>, cx: &mut Context<Self>) -> Self {
let platform_style = PlatformStyle::platform();
+ let system_window_tabs = cx.new(|_cx| SystemWindowTabs::new());
+
Self {
id: id.into(),
platform_style,
children: SmallVec::new(),
should_move: false,
+ system_window_tabs,
}
}
@@ -66,7 +73,7 @@ impl Render for PlatformTitleBar {
let close_action = Box::new(workspace::CloseWindow);
let children = mem::take(&mut self.children);
- h_flex()
+ let title_bar = h_flex()
.window_control_area(WindowControlArea::Drag)
.w_full()
.h(height)
@@ -162,7 +169,12 @@ impl Render for PlatformTitleBar {
title_bar.child(platform_windows::WindowsWindowControls::new(height))
}
}
- })
+ });
+
+ v_flex()
+ .w_full()
+ .child(title_bar)
+ .child(self.system_window_tabs.clone().into_any_element())
}
}
@@ -0,0 +1,520 @@
+use settings::{Settings, SettingsStore};
+
+use gpui::{
+ AnyWindowHandle, Context, Hsla, InteractiveElement, MouseButton, ParentElement, ScrollHandle,
+ Styled, SystemWindowTab, SystemWindowTabController, Window, WindowId, actions, canvas, div,
+};
+
+use theme::ThemeSettings;
+use ui::{
+ Color, ContextMenu, DynamicSpacing, IconButton, IconButtonShape, IconName, IconSize, Label,
+ LabelSize, Tab, h_flex, prelude::*, right_click_menu,
+};
+use workspace::{
+ CloseWindow, ItemSettings, Workspace, WorkspaceSettings,
+ item::{ClosePosition, ShowCloseButton},
+};
+
+actions!(
+ window,
+ [
+ ShowNextWindowTab,
+ ShowPreviousWindowTab,
+ MergeAllWindows,
+ MoveTabToNewWindow
+ ]
+);
+
+#[derive(Clone)]
+pub struct DraggedWindowTab {
+ pub id: WindowId,
+ pub ix: usize,
+ pub handle: AnyWindowHandle,
+ pub title: String,
+ pub width: Pixels,
+ pub is_active: bool,
+ pub active_background_color: Hsla,
+ pub inactive_background_color: Hsla,
+}
+
+pub struct SystemWindowTabs {
+ tab_bar_scroll_handle: ScrollHandle,
+ measured_tab_width: Pixels,
+ last_dragged_tab: Option<DraggedWindowTab>,
+}
+
+impl SystemWindowTabs {
+ pub fn new() -> Self {
+ Self {
+ tab_bar_scroll_handle: ScrollHandle::new(),
+ measured_tab_width: px(0.),
+ last_dragged_tab: None,
+ }
+ }
+
+ pub fn init(cx: &mut App) {
+ let mut was_use_system_window_tabs =
+ WorkspaceSettings::get_global(cx).use_system_window_tabs;
+
+ cx.observe_global::<SettingsStore>(move |cx| {
+ let use_system_window_tabs = WorkspaceSettings::get_global(cx).use_system_window_tabs;
+ if use_system_window_tabs == was_use_system_window_tabs {
+ return;
+ }
+ was_use_system_window_tabs = use_system_window_tabs;
+
+ let tabbing_identifier = if use_system_window_tabs {
+ Some(String::from("zed"))
+ } else {
+ None
+ };
+
+ if use_system_window_tabs {
+ SystemWindowTabController::init(cx);
+ }
+
+ cx.windows().iter().for_each(|handle| {
+ let _ = handle.update(cx, |_, window, cx| {
+ window.set_tabbing_identifier(tabbing_identifier.clone());
+ if use_system_window_tabs {
+ let tabs = if let Some(tabs) = window.tabbed_windows() {
+ tabs
+ } else {
+ vec![SystemWindowTab::new(
+ SharedString::from(window.window_title()),
+ window.window_handle(),
+ )]
+ };
+
+ SystemWindowTabController::add_tab(cx, handle.window_id(), tabs);
+ }
+ });
+ });
+ })
+ .detach();
+
+ cx.observe_new(|workspace: &mut Workspace, _, _| {
+ workspace.register_action_renderer(|div, _, window, cx| {
+ let window_id = window.window_handle().window_id();
+ let controller = cx.global::<SystemWindowTabController>();
+
+ let tab_groups = controller.tab_groups();
+ let tabs = controller.tabs(window_id);
+ let Some(tabs) = tabs else {
+ return div;
+ };
+
+ div.when(tabs.len() > 1, |div| {
+ div.on_action(move |_: &ShowNextWindowTab, window, cx| {
+ SystemWindowTabController::select_next_tab(
+ cx,
+ window.window_handle().window_id(),
+ );
+ })
+ .on_action(move |_: &ShowPreviousWindowTab, window, cx| {
+ SystemWindowTabController::select_previous_tab(
+ cx,
+ window.window_handle().window_id(),
+ );
+ })
+ .on_action(move |_: &MoveTabToNewWindow, window, cx| {
+ SystemWindowTabController::move_tab_to_new_window(
+ cx,
+ window.window_handle().window_id(),
+ );
+ window.move_tab_to_new_window();
+ })
+ })
+ .when(tab_groups.len() > 1, |div| {
+ div.on_action(move |_: &MergeAllWindows, window, cx| {
+ SystemWindowTabController::merge_all_windows(
+ cx,
+ window.window_handle().window_id(),
+ );
+ window.merge_all_windows();
+ })
+ })
+ });
+ })
+ .detach();
+ }
+
+ fn render_tab(
+ &self,
+ ix: usize,
+ item: SystemWindowTab,
+ tabs: Vec<SystemWindowTab>,
+ active_background_color: Hsla,
+ inactive_background_color: Hsla,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> impl IntoElement + use<> {
+ let entity = cx.entity();
+ let settings = ItemSettings::get_global(cx);
+ let close_side = &settings.close_position;
+ let show_close_button = &settings.show_close_button;
+
+ let rem_size = window.rem_size();
+ let width = self.measured_tab_width.max(rem_size * 10);
+ let is_active = window.window_handle().window_id() == item.id;
+ let title = item.title.to_string();
+
+ let label = Label::new(&title)
+ .size(LabelSize::Small)
+ .truncate()
+ .color(if is_active {
+ Color::Default
+ } else {
+ Color::Muted
+ });
+
+ let tab = h_flex()
+ .id(ix)
+ .group("tab")
+ .w_full()
+ .overflow_hidden()
+ .h(Tab::content_height(cx))
+ .relative()
+ .px(DynamicSpacing::Base16.px(cx))
+ .justify_center()
+ .border_l_1()
+ .border_color(cx.theme().colors().border)
+ .cursor_pointer()
+ .on_drag(
+ DraggedWindowTab {
+ id: item.id,
+ ix,
+ handle: item.handle,
+ title: item.title.to_string(),
+ width,
+ is_active,
+ active_background_color,
+ inactive_background_color,
+ },
+ move |tab, _, _, cx| {
+ entity.update(cx, |this, _cx| {
+ this.last_dragged_tab = Some(tab.clone());
+ });
+ cx.new(|_| tab.clone())
+ },
+ )
+ .drag_over::<DraggedWindowTab>({
+ let tab_ix = ix;
+ move |element, dragged_tab: &DraggedWindowTab, _, cx| {
+ let mut styled_tab = element
+ .bg(cx.theme().colors().drop_target_background)
+ .border_color(cx.theme().colors().drop_target_border)
+ .border_0();
+
+ if tab_ix < dragged_tab.ix {
+ styled_tab = styled_tab.border_l_2();
+ } else if tab_ix > dragged_tab.ix {
+ styled_tab = styled_tab.border_r_2();
+ }
+
+ styled_tab
+ }
+ })
+ .on_drop({
+ let tab_ix = ix;
+ cx.listener(move |this, dragged_tab: &DraggedWindowTab, _window, cx| {
+ this.last_dragged_tab = None;
+ Self::handle_tab_drop(dragged_tab, tab_ix, cx);
+ })
+ })
+ .on_click(move |_, _, cx| {
+ let _ = item.handle.update(cx, |_, window, _| {
+ window.activate_window();
+ });
+ })
+ .child(label)
+ .map(|this| match show_close_button {
+ ShowCloseButton::Hidden => this,
+ _ => this.child(
+ div()
+ .absolute()
+ .top_2()
+ .w_4()
+ .h_4()
+ .map(|this| match close_side {
+ ClosePosition::Left => this.left_1(),
+ ClosePosition::Right => this.right_1(),
+ })
+ .child(
+ IconButton::new("close", IconName::Close)
+ .shape(IconButtonShape::Square)
+ .icon_color(Color::Muted)
+ .icon_size(IconSize::XSmall)
+ .on_click({
+ move |_, window, cx| {
+ if item.handle.window_id()
+ == window.window_handle().window_id()
+ {
+ window.dispatch_action(Box::new(CloseWindow), cx);
+ } else {
+ let _ = item.handle.update(cx, |_, window, cx| {
+ window.dispatch_action(Box::new(CloseWindow), cx);
+ });
+ }
+ }
+ })
+ .map(|this| match show_close_button {
+ ShowCloseButton::Hover => this.visible_on_hover("tab"),
+ _ => this,
+ }),
+ ),
+ ),
+ })
+ .into_any();
+
+ let menu = right_click_menu(ix)
+ .trigger(|_, _, _| tab)
+ .menu(move |window, cx| {
+ let focus_handle = cx.focus_handle();
+ let tabs = tabs.clone();
+ let other_tabs = tabs.clone();
+ let move_tabs = tabs.clone();
+ let merge_tabs = tabs.clone();
+
+ ContextMenu::build(window, cx, move |mut menu, _window_, _cx| {
+ menu = menu.entry("Close Tab", None, move |window, cx| {
+ Self::handle_right_click_action(
+ cx,
+ window,
+ &tabs,
+ |tab| tab.id == item.id,
+ |window, cx| {
+ window.dispatch_action(Box::new(CloseWindow), cx);
+ },
+ );
+ });
+
+ menu = menu.entry("Close Other Tabs", None, move |window, cx| {
+ Self::handle_right_click_action(
+ cx,
+ window,
+ &other_tabs,
+ |tab| tab.id != item.id,
+ |window, cx| {
+ window.dispatch_action(Box::new(CloseWindow), cx);
+ },
+ );
+ });
+
+ menu = menu.entry("Move Tab to New Window", None, move |window, cx| {
+ Self::handle_right_click_action(
+ cx,
+ window,
+ &move_tabs,
+ |tab| tab.id == item.id,
+ |window, cx| {
+ SystemWindowTabController::move_tab_to_new_window(
+ cx,
+ window.window_handle().window_id(),
+ );
+ window.move_tab_to_new_window();
+ },
+ );
+ });
+
+ menu = menu.entry("Show All Tabs", None, move |window, cx| {
+ Self::handle_right_click_action(
+ cx,
+ window,
+ &merge_tabs,
+ |tab| tab.id == item.id,
+ |window, _cx| {
+ window.toggle_window_tab_overview();
+ },
+ );
+ });
+
+ menu.context(focus_handle)
+ })
+ });
+
+ div()
+ .flex_1()
+ .min_w(rem_size * 10)
+ .when(is_active, |this| this.bg(active_background_color))
+ .border_t_1()
+ .border_color(if is_active {
+ active_background_color
+ } else {
+ cx.theme().colors().border
+ })
+ .child(menu)
+ }
+
+ fn handle_tab_drop(dragged_tab: &DraggedWindowTab, ix: usize, cx: &mut Context<Self>) {
+ SystemWindowTabController::update_tab_position(cx, dragged_tab.id, ix);
+ }
+
+ fn handle_right_click_action<F, P>(
+ cx: &mut App,
+ window: &mut Window,
+ tabs: &Vec<SystemWindowTab>,
+ predicate: P,
+ mut action: F,
+ ) where
+ P: Fn(&SystemWindowTab) -> bool,
+ F: FnMut(&mut Window, &mut App),
+ {
+ for tab in tabs {
+ if predicate(tab) {
+ if tab.id == window.window_handle().window_id() {
+ action(window, cx);
+ } else {
+ let _ = tab.handle.update(cx, |_view, window, cx| {
+ action(window, cx);
+ });
+ }
+ }
+ }
+ }
+}
+
+impl Render for SystemWindowTabs {
+ fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let use_system_window_tabs = WorkspaceSettings::get_global(cx).use_system_window_tabs;
+ let active_background_color = cx.theme().colors().title_bar_background;
+ let inactive_background_color = cx.theme().colors().tab_bar_background;
+ let entity = cx.entity();
+
+ let controller = cx.global::<SystemWindowTabController>();
+ let visible = controller.is_visible();
+ let current_window_tab = vec![SystemWindowTab::new(
+ SharedString::from(window.window_title()),
+ window.window_handle(),
+ )];
+ let tabs = controller
+ .tabs(window.window_handle().window_id())
+ .unwrap_or(¤t_window_tab)
+ .clone();
+
+ let tab_items = tabs
+ .iter()
+ .enumerate()
+ .map(|(ix, item)| {
+ self.render_tab(
+ ix,
+ item.clone(),
+ tabs.clone(),
+ active_background_color,
+ inactive_background_color,
+ window,
+ cx,
+ )
+ })
+ .collect::<Vec<_>>();
+
+ let number_of_tabs = tab_items.len().max(1);
+ if (!window.tab_bar_visible() && !visible)
+ || (!use_system_window_tabs && number_of_tabs == 1)
+ {
+ return h_flex().into_any_element();
+ }
+
+ h_flex()
+ .w_full()
+ .h(Tab::container_height(cx))
+ .bg(inactive_background_color)
+ .on_mouse_up_out(
+ MouseButton::Left,
+ cx.listener(|this, _event, window, cx| {
+ if let Some(tab) = this.last_dragged_tab.take() {
+ SystemWindowTabController::move_tab_to_new_window(cx, tab.id);
+ if tab.id == window.window_handle().window_id() {
+ window.move_tab_to_new_window();
+ } else {
+ let _ = tab.handle.update(cx, |_, window, _cx| {
+ window.move_tab_to_new_window();
+ });
+ }
+ }
+ }),
+ )
+ .child(
+ h_flex()
+ .id("window tabs")
+ .w_full()
+ .h(Tab::container_height(cx))
+ .bg(inactive_background_color)
+ .overflow_x_scroll()
+ .track_scroll(&self.tab_bar_scroll_handle)
+ .children(tab_items)
+ .child(
+ canvas(
+ |_, _, _| (),
+ move |bounds, _, _, cx| {
+ let entity = entity.clone();
+ entity.update(cx, |this, cx| {
+ let width = bounds.size.width / number_of_tabs as f32;
+ if width != this.measured_tab_width {
+ this.measured_tab_width = width;
+ cx.notify();
+ }
+ });
+ },
+ )
+ .absolute()
+ .size_full(),
+ ),
+ )
+ .child(
+ h_flex()
+ .h_full()
+ .px(DynamicSpacing::Base06.rems(cx))
+ .border_t_1()
+ .border_l_1()
+ .border_color(cx.theme().colors().border)
+ .child(
+ IconButton::new("plus", IconName::Plus)
+ .icon_size(IconSize::Small)
+ .icon_color(Color::Muted)
+ .on_click(|_event, window, cx| {
+ window.dispatch_action(
+ Box::new(zed_actions::OpenRecent {
+ create_new_window: true,
+ }),
+ cx,
+ );
+ }),
+ ),
+ )
+ .into_any_element()
+ }
+}
+
+impl Render for DraggedWindowTab {
+ fn render(
+ &mut self,
+ _window: &mut gpui::Window,
+ cx: &mut gpui::Context<Self>,
+ ) -> impl gpui::IntoElement {
+ let ui_font = ThemeSettings::get_global(cx).ui_font.clone();
+ let label = Label::new(self.title.clone())
+ .size(LabelSize::Small)
+ .truncate()
+ .color(if self.is_active {
+ Color::Default
+ } else {
+ Color::Muted
+ });
+
+ h_flex()
+ .h(Tab::container_height(cx))
+ .w(self.width)
+ .px(DynamicSpacing::Base16.px(cx))
+ .justify_center()
+ .bg(if self.is_active {
+ self.active_background_color
+ } else {
+ self.inactive_background_color
+ })
+ .border_1()
+ .border_color(cx.theme().colors().border)
+ .font(ui_font)
+ .child(label)
+ }
+}
@@ -3,6 +3,7 @@ mod collab;
mod onboarding_banner;
pub mod platform_title_bar;
mod platforms;
+mod system_window_tabs;
mod title_bar_settings;
#[cfg(feature = "stories")]
@@ -11,6 +12,7 @@ mod stories;
use crate::{
application_menu::{ApplicationMenu, show_menus},
platform_title_bar::PlatformTitleBar,
+ system_window_tabs::SystemWindowTabs,
};
#[cfg(not(target_os = "macos"))]
@@ -27,10 +29,11 @@ use gpui::{
IntoElement, MouseButton, ParentElement, Render, StatefulInteractiveElement, Styled,
Subscription, WeakEntity, Window, actions, div,
};
+use keymap_editor;
use onboarding_banner::OnboardingBanner;
use project::Project;
+use remote::RemoteConnectionOptions;
use settings::Settings as _;
-use settings_ui::keybindings;
use std::sync::Arc;
use theme::ActiveTheme;
use title_bar_settings::TitleBarSettings;
@@ -65,6 +68,7 @@ actions!(
pub fn init(cx: &mut App) {
TitleBarSettings::register(cx);
+ SystemWindowTabs::init(cx);
cx.observe_new(|workspace: &mut Workspace, window, cx| {
let Some(window) = window else {
@@ -275,16 +279,18 @@ impl TitleBar {
let banner = cx.new(|cx| {
OnboardingBanner::new(
- "Debugger Onboarding",
- IconName::Debug,
- "The Debugger",
- None,
- zed_actions::debugger::OpenOnboardingModal.boxed_clone(),
+ "ACP Claude Code Onboarding",
+ IconName::AiClaude,
+ "Claude Code",
+ Some("Introducing:".into()),
+ zed_actions::agent::OpenClaudeCodeOnboardingModal.boxed_clone(),
cx,
)
+ // When updating this to a non-AI feature release, remove this line.
+ .visible_when(|cx| !project::DisableAiSettings::get_global(cx).disable_ai)
});
- let platform_titlebar = cx.new(|_| PlatformTitleBar::new(id));
+ let platform_titlebar = cx.new(|cx| PlatformTitleBar::new(id, cx));
Self {
platform_titlebar,
@@ -299,17 +305,18 @@ impl TitleBar {
}
}
- fn render_ssh_project_host(&self, cx: &mut Context<Self>) -> Option<AnyElement> {
- let options = self.project.read(cx).ssh_connection_options(cx)?;
- let host: SharedString = options.connection_string().into();
+ fn render_remote_project_connection(&self, cx: &mut Context<Self>) -> Option<AnyElement> {
+ let options = self.project.read(cx).remote_connection_options(cx)?;
+ let host: SharedString = options.display_name().into();
- let nickname = options
- .nickname
- .clone()
- .map(|nick| nick.into())
- .unwrap_or_else(|| host.clone());
+ let nickname = if let RemoteConnectionOptions::Ssh(options) = options {
+ options.nickname.map(|nick| nick.into())
+ } else {
+ None
+ };
+ let nickname = nickname.unwrap_or_else(|| host.clone());
- let (indicator_color, meta) = match self.project.read(cx).ssh_connection_state(cx)? {
+ let (indicator_color, meta) = match self.project.read(cx).remote_connection_state(cx)? {
remote::ConnectionState::Connecting => (Color::Info, format!("Connecting to: {host}")),
remote::ConnectionState::Connected => (Color::Success, format!("Connected to: {host}")),
remote::ConnectionState::HeartbeatMissed => (
@@ -325,7 +332,7 @@ impl TitleBar {
}
};
- let icon_color = match self.project.read(cx).ssh_connection_state(cx)? {
+ let icon_color = match self.project.read(cx).remote_connection_state(cx)? {
remote::ConnectionState::Connecting => Color::Info,
remote::ConnectionState::Connected => Color::Default,
remote::ConnectionState::HeartbeatMissed => Color::Warning,
@@ -351,11 +358,7 @@ impl TitleBar {
.indicator_border_color(Some(cx.theme().colors().title_bar_background))
.into_any_element(),
)
- .child(
- Label::new(nickname.clone())
- .size(LabelSize::Small)
- .truncate(),
- ),
+ .child(Label::new(nickname).size(LabelSize::Small).truncate()),
)
.tooltip(move |window, cx| {
Tooltip::with_meta(
@@ -384,8 +387,8 @@ impl TitleBar {
}
pub fn render_project_host(&self, cx: &mut Context<Self>) -> Option<AnyElement> {
- if self.project.read(cx).is_via_ssh() {
- return self.render_ssh_project_host(cx);
+ if self.project.read(cx).is_via_remote_server() {
+ return self.render_remote_project_connection(cx);
}
if self.project.read(cx).is_disconnected(cx) {
@@ -568,8 +571,8 @@ impl TitleBar {
match status {
client::Status::ConnectionError
| client::Status::ConnectionLost
- | client::Status::Reauthenticating { .. }
- | client::Status::Reconnecting { .. }
+ | client::Status::Reauthenticating
+ | client::Status::Reconnecting
| client::Status::ReconnectionError { .. } => Some(
div()
.id("disconnected")
@@ -686,7 +689,7 @@ impl TitleBar {
"Settings Profiles",
zed_actions::settings_profile_selector::Toggle.boxed_clone(),
)
- .action("Key Bindings", Box::new(keybindings::OpenKeymapEditor))
+ .action("Key Bindings", Box::new(keymap_editor::OpenKeymapEditor))
.action(
"Themes…",
zed_actions::theme_selector::Toggle::default().boxed_clone(),
@@ -734,7 +737,7 @@ impl TitleBar {
"Settings Profiles",
zed_actions::settings_profile_selector::Toggle.boxed_clone(),
)
- .action("Key Bindings", Box::new(keybindings::OpenKeymapEditor))
+ .action("Key Bindings", Box::new(keymap_editor::OpenKeymapEditor))
.action(
"Themes…",
zed_actions::theme_selector::Toggle::default().boxed_clone(),
@@ -1,7 +1,7 @@
use db::anyhow;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
#[derive(Copy, Clone, Deserialize, Debug)]
pub struct TitleBarSettings {
@@ -14,7 +14,11 @@ pub struct TitleBarSettings {
pub show_menus: bool,
}
-#[derive(Copy, Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
+#[derive(
+ Copy, Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey,
+)]
+#[settings_ui(group = "Title Bar")]
+#[settings_key(key = "title_bar")]
pub struct TitleBarSettingsContent {
/// Whether to show the branch icon beside branch switcher in the title bar.
///
@@ -47,8 +51,6 @@ pub struct TitleBarSettingsContent {
}
impl Settings for TitleBarSettings {
- const KEY: Option<&'static str> = Some("title_bar");
-
type FileContent = TitleBarSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut gpui::App) -> anyhow::Result<Self>
@@ -6,10 +6,15 @@ publish.workspace = true
license = "GPL-3.0-or-later"
[dependencies]
+anyhow.workspace = true
+convert_case.workspace = true
editor.workspace = true
+file_finder.workspace = true
+futures.workspace = true
fuzzy.workspace = true
gpui.workspace = true
language.workspace = true
+menu.workspace = true
picker.workspace = true
project.workspace = true
ui.workspace = true
@@ -5,8 +5,8 @@ use gpui::{
AsyncWindowContext, Context, Entity, IntoElement, ParentElement, Render, Subscription, Task,
WeakEntity, Window, div,
};
-use language::{Buffer, BufferEvent, LanguageName, Toolchain};
-use project::{Project, ProjectPath, WorktreeId, toolchain_store::ToolchainStoreEvent};
+use language::{Buffer, BufferEvent, LanguageName, Toolchain, ToolchainScope};
+use project::{Project, ProjectPath, Toolchains, WorktreeId, toolchain_store::ToolchainStoreEvent};
use ui::{Button, ButtonCommon, Clickable, FluentBuilder, LabelSize, SharedString, Tooltip};
use util::maybe;
use workspace::{StatusItemView, Workspace, item::ItemHandle};
@@ -38,7 +38,6 @@ impl ActiveToolchain {
.ok()
.flatten();
if let Some(editor) = editor {
- this.active_toolchain.take();
this.update_lister(editor, window, cx);
}
},
@@ -70,15 +69,15 @@ impl ActiveToolchain {
.read_with(cx, |this, _| Some(this.language()?.name()))
.ok()
.flatten()?;
- let term = workspace
+ let meta = workspace
.update(cx, |workspace, cx| {
let languages = workspace.project().read(cx).languages();
- Project::toolchain_term(languages.clone(), language_name.clone())
+ Project::toolchain_metadata(languages.clone(), language_name.clone())
})
.ok()?
.await?;
let _ = this.update(cx, |this, cx| {
- this.term = term;
+ this.term = meta.term;
cx.notify();
});
let (worktree_id, path) = active_file
@@ -124,16 +123,6 @@ impl ActiveToolchain {
if let Some((_, buffer, _)) = editor.active_excerpt(cx)
&& let Some(worktree_id) = buffer.read(cx).file().map(|file| file.worktree_id(cx))
{
- if self
- .active_buffer
- .as_ref()
- .is_some_and(|(old_worktree_id, old_buffer, _)| {
- (old_worktree_id, old_buffer.entity_id()) == (&worktree_id, buffer.entity_id())
- })
- {
- return;
- }
-
let subscription = cx.subscribe_in(
&buffer,
window,
@@ -181,7 +170,11 @@ impl ActiveToolchain {
let project = workspace
.read_with(cx, |this, _| this.project().clone())
.ok()?;
- let (toolchains, relative_path) = cx
+ let Toolchains {
+ toolchains,
+ root_path: relative_path,
+ user_toolchains,
+ } = cx
.update(|_, cx| {
project.read(cx).available_toolchains(
ProjectPath {
@@ -194,8 +187,20 @@ impl ActiveToolchain {
})
.ok()?
.await?;
- if let Some(toolchain) = toolchains.toolchains.first() {
- // Since we don't have a selected toolchain, pick one for user here.
+ // Since we don't have a selected toolchain, pick one for user here.
+ let default_choice = user_toolchains
+ .iter()
+ .find_map(|(scope, toolchains)| {
+ if scope == &ToolchainScope::Global {
+ // Ignore global toolchains when making a default choice. They're unlikely to be the right choice.
+ None
+ } else {
+ toolchains.first()
+ }
+ })
+ .or_else(|| toolchains.toolchains.first())
+ .cloned();
+ if let Some(toolchain) = &default_choice {
workspace::WORKSPACE_DB
.set_toolchain(
workspace_id,
@@ -220,7 +225,7 @@ impl ActiveToolchain {
.await;
}
- toolchains.toolchains.first().cloned()
+ default_choice
}
})
}
@@ -1,25 +1,39 @@
mod active_toolchain;
pub use active_toolchain::ActiveToolchain;
+use convert_case::Casing as _;
use editor::Editor;
+use file_finder::OpenPathDelegate;
+use futures::channel::oneshot;
use fuzzy::{StringMatch, StringMatchCandidate, match_strings};
use gpui::{
- App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, ParentElement,
- Render, Styled, Task, WeakEntity, Window, actions,
+ Action, Animation, AnimationExt, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle,
+ Focusable, KeyContext, ParentElement, Render, Styled, Subscription, Task, WeakEntity, Window,
+ actions, pulsating_between,
};
-use language::{LanguageName, Toolchain, ToolchainList};
+use language::{Language, LanguageName, Toolchain, ToolchainScope};
use picker::{Picker, PickerDelegate};
-use project::{Project, ProjectPath, WorktreeId};
-use std::{borrow::Cow, path::Path, sync::Arc};
-use ui::{HighlightedLabel, ListItem, ListItemSpacing, prelude::*};
-use util::ResultExt;
+use project::{DirectoryLister, Project, ProjectPath, Toolchains, WorktreeId};
+use std::{
+ borrow::Cow,
+ path::{Path, PathBuf},
+ sync::Arc,
+ time::Duration,
+};
+use ui::{
+ Divider, HighlightedLabel, KeyBinding, List, ListItem, ListItemSpacing, Navigable,
+ NavigableEntry, prelude::*,
+};
+use util::{ResultExt, maybe, paths::PathStyle};
use workspace::{ModalView, Workspace};
actions!(
toolchain,
[
/// Selects a toolchain for the current project.
- Select
+ Select,
+ /// Adds a new toolchain for the current project.
+ AddToolchain
]
);
@@ -28,9 +42,513 @@ pub fn init(cx: &mut App) {
}
pub struct ToolchainSelector {
+ state: State,
+ create_search_state: Arc<dyn Fn(&mut Window, &mut Context<Self>) -> SearchState + 'static>,
+ language: Option<Arc<Language>>,
+ project: Entity<Project>,
+ language_name: LanguageName,
+ worktree_id: WorktreeId,
+ relative_path: Arc<Path>,
+}
+
+#[derive(Clone)]
+struct SearchState {
picker: Entity<Picker<ToolchainSelectorDelegate>>,
}
+struct AddToolchainState {
+ state: AddState,
+ project: Entity<Project>,
+ language_name: LanguageName,
+ root_path: ProjectPath,
+ weak: WeakEntity<ToolchainSelector>,
+}
+
+struct ScopePickerState {
+ entries: [NavigableEntry; 3],
+ selected_scope: ToolchainScope,
+}
+
+#[expect(
+ dead_code,
+ reason = "These tasks have to be kept alive to run to completion"
+)]
+enum PathInputState {
+ WaitingForPath(Task<()>),
+ Resolving(Task<()>),
+}
+
+enum AddState {
+ Path {
+ picker: Entity<Picker<file_finder::OpenPathDelegate>>,
+ error: Option<Arc<str>>,
+ input_state: PathInputState,
+ _subscription: Subscription,
+ },
+ Name {
+ toolchain: Toolchain,
+ editor: Entity<Editor>,
+ scope_picker: ScopePickerState,
+ },
+}
+
+impl AddToolchainState {
+ fn new(
+ project: Entity<Project>,
+ language_name: LanguageName,
+ root_path: ProjectPath,
+ window: &mut Window,
+ cx: &mut Context<ToolchainSelector>,
+ ) -> Entity<Self> {
+ let weak = cx.weak_entity();
+
+ cx.new(|cx| {
+ let (lister, rx) = Self::create_path_browser_delegate(project.clone(), cx);
+ let picker = cx.new(|cx| Picker::uniform_list(lister, window, cx));
+ Self {
+ state: AddState::Path {
+ _subscription: cx.subscribe(&picker, |_, _, _: &DismissEvent, cx| {
+ cx.stop_propagation();
+ }),
+ picker,
+ error: None,
+ input_state: Self::wait_for_path(rx, window, cx),
+ },
+ project,
+ language_name,
+ root_path,
+ weak,
+ }
+ })
+ }
+
+ fn create_path_browser_delegate(
+ project: Entity<Project>,
+ cx: &mut Context<Self>,
+ ) -> (OpenPathDelegate, oneshot::Receiver<Option<Vec<PathBuf>>>) {
+ let (tx, rx) = oneshot::channel();
+ let weak = cx.weak_entity();
+ let lister = OpenPathDelegate::new(
+ tx,
+ DirectoryLister::Project(project),
+ false,
+ PathStyle::current(),
+ )
+ .show_hidden()
+ .with_footer(Arc::new(move |_, cx| {
+ let error = weak
+ .read_with(cx, |this, _| {
+ if let AddState::Path { error, .. } = &this.state {
+ error.clone()
+ } else {
+ None
+ }
+ })
+ .ok()
+ .flatten();
+ let is_loading = weak
+ .read_with(cx, |this, _| {
+ matches!(
+ this.state,
+ AddState::Path {
+ input_state: PathInputState::Resolving(_),
+ ..
+ }
+ )
+ })
+ .unwrap_or_default();
+ Some(
+ v_flex()
+ .child(Divider::horizontal())
+ .child(
+ h_flex()
+ .p_1()
+ .justify_between()
+ .gap_2()
+ .child(Label::new("Select Toolchain Path").color(Color::Muted).map(
+ |this| {
+ if is_loading {
+ this.with_animation(
+ "select-toolchain-label",
+ Animation::new(Duration::from_secs(2))
+ .repeat()
+ .with_easing(pulsating_between(0.4, 0.8)),
+ |label, delta| label.alpha(delta),
+ )
+ .into_any()
+ } else {
+ this.into_any_element()
+ }
+ },
+ ))
+ .when_some(error, |this, error| {
+ this.child(Label::new(error).color(Color::Error))
+ }),
+ )
+ .into_any(),
+ )
+ }));
+
+ (lister, rx)
+ }
+ fn resolve_path(
+ path: PathBuf,
+ root_path: ProjectPath,
+ language_name: LanguageName,
+ project: Entity<Project>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> PathInputState {
+ PathInputState::Resolving(cx.spawn_in(window, async move |this, cx| {
+ _ = maybe!(async move {
+ let toolchain = project
+ .update(cx, |this, cx| {
+ this.resolve_toolchain(path.clone(), language_name, cx)
+ })?
+ .await;
+ let Ok(toolchain) = toolchain else {
+ // Go back to the path input state
+ _ = this.update_in(cx, |this, window, cx| {
+ if let AddState::Path {
+ input_state,
+ picker,
+ error,
+ ..
+ } = &mut this.state
+ && matches!(input_state, PathInputState::Resolving(_))
+ {
+ let Err(e) = toolchain else { unreachable!() };
+ *error = Some(Arc::from(e.to_string()));
+ let (delegate, rx) =
+ Self::create_path_browser_delegate(this.project.clone(), cx);
+ picker.update(cx, |picker, cx| {
+ *picker = Picker::uniform_list(delegate, window, cx);
+ picker.set_query(
+ Arc::from(path.to_string_lossy().as_ref()),
+ window,
+ cx,
+ );
+ });
+ *input_state = Self::wait_for_path(rx, window, cx);
+ this.focus_handle(cx).focus(window);
+ }
+ });
+ return Err(anyhow::anyhow!("Failed to resolve toolchain"));
+ };
+ let resolved_toolchain_path = project.read_with(cx, |this, cx| {
+ this.find_project_path(&toolchain.path.as_ref(), cx)
+ })?;
+
+ // Suggest a default scope based on the applicability.
+ let scope = if let Some(project_path) = resolved_toolchain_path {
+ if root_path.path.as_ref() != Path::new("")
+ && project_path.starts_with(&root_path)
+ {
+ ToolchainScope::Subproject(root_path.worktree_id, root_path.path)
+ } else {
+ ToolchainScope::Project
+ }
+ } else {
+ // This path lies outside of the project.
+ ToolchainScope::Global
+ };
+
+ _ = this.update_in(cx, |this, window, cx| {
+ let scope_picker = ScopePickerState {
+ entries: std::array::from_fn(|_| NavigableEntry::focusable(cx)),
+ selected_scope: scope,
+ };
+ this.state = AddState::Name {
+ editor: cx.new(|cx| {
+ let mut editor = Editor::single_line(window, cx);
+ editor.set_text(toolchain.name.as_ref(), window, cx);
+ editor
+ }),
+ toolchain,
+ scope_picker,
+ };
+ this.focus_handle(cx).focus(window);
+ });
+
+ Result::<_, anyhow::Error>::Ok(())
+ })
+ .await;
+ }))
+ }
+
+ fn wait_for_path(
+ rx: oneshot::Receiver<Option<Vec<PathBuf>>>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> PathInputState {
+ let task = cx.spawn_in(window, async move |this, cx| {
+ maybe!(async move {
+ let result = rx.await.log_err()?;
+
+ let path = result
+ .into_iter()
+ .flat_map(|paths| paths.into_iter())
+ .next()?;
+ this.update_in(cx, |this, window, cx| {
+ if let AddState::Path {
+ input_state, error, ..
+ } = &mut this.state
+ && matches!(input_state, PathInputState::WaitingForPath(_))
+ {
+ error.take();
+ *input_state = Self::resolve_path(
+ path,
+ this.root_path.clone(),
+ this.language_name.clone(),
+ this.project.clone(),
+ window,
+ cx,
+ );
+ }
+ })
+ .ok()?;
+ Some(())
+ })
+ .await;
+ });
+ PathInputState::WaitingForPath(task)
+ }
+
+ fn confirm_toolchain(
+ &mut self,
+ _: &menu::Confirm,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let AddState::Name {
+ toolchain,
+ editor,
+ scope_picker,
+ } = &mut self.state
+ else {
+ return;
+ };
+
+ let text = editor.read(cx).text(cx);
+ if text.is_empty() {
+ return;
+ }
+
+ toolchain.name = SharedString::from(text);
+ self.project.update(cx, |this, cx| {
+ this.add_toolchain(toolchain.clone(), scope_picker.selected_scope.clone(), cx);
+ });
+ _ = self.weak.update(cx, |this, cx| {
+ this.state = State::Search((this.create_search_state)(window, cx));
+ this.focus_handle(cx).focus(window);
+ cx.notify();
+ });
+ }
+}
+impl Focusable for AddToolchainState {
+ fn focus_handle(&self, cx: &App) -> FocusHandle {
+ match &self.state {
+ AddState::Path { picker, .. } => picker.focus_handle(cx),
+ AddState::Name { editor, .. } => editor.focus_handle(cx),
+ }
+ }
+}
+
+impl AddToolchainState {
+ fn select_scope(&mut self, scope: ToolchainScope, cx: &mut Context<Self>) {
+ if let AddState::Name { scope_picker, .. } = &mut self.state {
+ scope_picker.selected_scope = scope;
+ cx.notify();
+ }
+ }
+}
+
+impl Focusable for State {
+ fn focus_handle(&self, cx: &App) -> FocusHandle {
+ match self {
+ State::Search(state) => state.picker.focus_handle(cx),
+ State::AddToolchain(state) => state.focus_handle(cx),
+ }
+ }
+}
+impl Render for AddToolchainState {
+ fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let theme = cx.theme().clone();
+ let weak = self.weak.upgrade();
+ let label = SharedString::new_static("Add");
+
+ v_flex()
+ .size_full()
+ // todo: These modal styles shouldn't be needed as the modal picker already has `elevation_3`
+ // They get duplicated in the middle state of adding a virtual env, but then are needed for this last state
+ .bg(cx.theme().colors().elevated_surface_background)
+ .border_1()
+ .border_color(cx.theme().colors().border_variant)
+ .rounded_lg()
+ .when_some(weak, |this, weak| {
+ this.on_action(window.listener_for(
+ &weak,
+ |this: &mut ToolchainSelector, _: &menu::Cancel, window, cx| {
+ this.state = State::Search((this.create_search_state)(window, cx));
+ this.state.focus_handle(cx).focus(window);
+ cx.notify();
+ },
+ ))
+ })
+ .on_action(cx.listener(Self::confirm_toolchain))
+ .map(|this| match &self.state {
+ AddState::Path { picker, .. } => this.child(picker.clone()),
+ AddState::Name {
+ editor,
+ scope_picker,
+ ..
+ } => {
+ let scope_options = [
+ ToolchainScope::Global,
+ ToolchainScope::Project,
+ ToolchainScope::Subproject(
+ self.root_path.worktree_id,
+ self.root_path.path.clone(),
+ ),
+ ];
+
+ let mut navigable_scope_picker = Navigable::new(
+ v_flex()
+ .child(
+ h_flex()
+ .w_full()
+ .p_2()
+ .border_b_1()
+ .border_color(theme.colors().border)
+ .child(editor.clone()),
+ )
+ .child(
+ v_flex()
+ .child(
+ Label::new("Scope")
+ .size(LabelSize::Small)
+ .color(Color::Muted)
+ .mt_1()
+ .ml_2(),
+ )
+ .child(List::new().children(
+ scope_options.iter().enumerate().map(|(i, scope)| {
+ let is_selected = *scope == scope_picker.selected_scope;
+ let label = scope.label();
+ let description = scope.description();
+ let scope_clone_for_action = scope.clone();
+ let scope_clone_for_click = scope.clone();
+
+ div()
+ .id(SharedString::from(format!("scope-option-{i}")))
+ .track_focus(&scope_picker.entries[i].focus_handle)
+ .on_action(cx.listener(
+ move |this, _: &menu::Confirm, _, cx| {
+ this.select_scope(
+ scope_clone_for_action.clone(),
+ cx,
+ );
+ },
+ ))
+ .child(
+ ListItem::new(SharedString::from(format!(
+ "scope-{i}"
+ )))
+ .toggle_state(
+ is_selected
+ || scope_picker.entries[i]
+ .focus_handle
+ .contains_focused(window, cx),
+ )
+ .inset(true)
+ .spacing(ListItemSpacing::Sparse)
+ .child(
+ h_flex()
+ .gap_2()
+ .child(Label::new(label))
+ .child(
+ Label::new(description)
+ .size(LabelSize::Small)
+ .color(Color::Muted),
+ ),
+ )
+ .on_click(cx.listener(move |this, _, _, cx| {
+ this.select_scope(
+ scope_clone_for_click.clone(),
+ cx,
+ );
+ })),
+ )
+ }),
+ ))
+ .child(Divider::horizontal())
+ .child(h_flex().p_1p5().justify_end().map(|this| {
+ let is_disabled = editor.read(cx).is_empty(cx);
+ let handle = self.focus_handle(cx);
+ this.child(
+ Button::new("add-toolchain", label)
+ .disabled(is_disabled)
+ .key_binding(KeyBinding::for_action_in(
+ &menu::Confirm,
+ &handle,
+ window,
+ cx,
+ ))
+ .on_click(cx.listener(|this, _, window, cx| {
+ this.confirm_toolchain(
+ &menu::Confirm,
+ window,
+ cx,
+ );
+ }))
+ .map(|this| {
+ if false {
+ this.with_animation(
+ "inspecting-user-toolchain",
+ Animation::new(Duration::from_millis(
+ 500,
+ ))
+ .repeat()
+ .with_easing(pulsating_between(
+ 0.4, 0.8,
+ )),
+ |label, delta| label.alpha(delta),
+ )
+ .into_any()
+ } else {
+ this.into_any_element()
+ }
+ }),
+ )
+ })),
+ )
+ .into_any_element(),
+ );
+
+ for entry in &scope_picker.entries {
+ navigable_scope_picker = navigable_scope_picker.entry(entry.clone());
+ }
+
+ this.child(navigable_scope_picker.render(window, cx))
+ }
+ })
+ }
+}
+
+#[derive(Clone)]
+enum State {
+ Search(SearchState),
+ AddToolchain(Entity<AddToolchainState>),
+}
+
+impl RenderOnce for State {
+ fn render(self, _: &mut Window, _: &mut App) -> impl IntoElement {
+ match self {
+ State::Search(state) => state.picker.into_any_element(),
+ State::AddToolchain(state) => state.into_any_element(),
+ }
+ }
+}
impl ToolchainSelector {
fn register(
workspace: &mut Workspace,
@@ -40,6 +558,16 @@ impl ToolchainSelector {
workspace.register_action(move |workspace, _: &Select, window, cx| {
Self::toggle(workspace, window, cx);
});
+ workspace.register_action(move |workspace, _: &AddToolchain, window, cx| {
+ let Some(toolchain_selector) = workspace.active_modal::<Self>(cx) else {
+ Self::toggle(workspace, window, cx);
+ return;
+ };
+
+ toolchain_selector.update(cx, |toolchain_selector, cx| {
+ toolchain_selector.handle_add_toolchain(&AddToolchain, window, cx);
+ });
+ });
}
fn toggle(
@@ -105,35 +633,100 @@ impl ToolchainSelector {
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
- let toolchain_selector = cx.entity().downgrade();
- let picker = cx.new(|cx| {
- let delegate = ToolchainSelectorDelegate::new(
- active_toolchain,
- toolchain_selector,
- workspace,
- worktree_id,
- worktree_root,
- project,
- relative_path,
- language_name,
+ let language_registry = project.read(cx).languages().clone();
+ cx.spawn({
+ let language_name = language_name.clone();
+ async move |this, cx| {
+ let language = language_registry
+ .language_for_name(&language_name.0)
+ .await
+ .ok();
+ this.update(cx, |this, cx| {
+ this.language = language;
+ cx.notify();
+ })
+ .ok();
+ }
+ })
+ .detach();
+ let project_clone = project.clone();
+ let language_name_clone = language_name.clone();
+ let relative_path_clone = relative_path.clone();
+
+ let create_search_state = Arc::new(move |window: &mut Window, cx: &mut Context<Self>| {
+ let toolchain_selector = cx.entity().downgrade();
+ let picker = cx.new(|cx| {
+ let delegate = ToolchainSelectorDelegate::new(
+ active_toolchain.clone(),
+ toolchain_selector,
+ workspace.clone(),
+ worktree_id,
+ worktree_root.clone(),
+ project_clone.clone(),
+ relative_path_clone.clone(),
+ language_name_clone.clone(),
+ window,
+ cx,
+ );
+ Picker::uniform_list(delegate, window, cx)
+ });
+ let picker_focus_handle = picker.focus_handle(cx);
+ picker.update(cx, |picker, _| {
+ picker.delegate.focus_handle = picker_focus_handle.clone();
+ });
+ SearchState { picker }
+ });
+
+ Self {
+ state: State::Search(create_search_state(window, cx)),
+ create_search_state,
+ language: None,
+ project,
+ language_name,
+ worktree_id,
+ relative_path,
+ }
+ }
+
+ fn handle_add_toolchain(
+ &mut self,
+ _: &AddToolchain,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if matches!(self.state, State::Search(_)) {
+ self.state = State::AddToolchain(AddToolchainState::new(
+ self.project.clone(),
+ self.language_name.clone(),
+ ProjectPath {
+ worktree_id: self.worktree_id,
+ path: self.relative_path.clone(),
+ },
window,
cx,
- );
- Picker::uniform_list(delegate, window, cx)
- });
- Self { picker }
+ ));
+ self.state.focus_handle(cx).focus(window);
+ cx.notify();
+ }
}
}
impl Render for ToolchainSelector {
- fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
- v_flex().w(rems(34.)).child(self.picker.clone())
+ fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let mut key_context = KeyContext::new_with_defaults();
+ key_context.add("ToolchainSelector");
+
+ v_flex()
+ .key_context(key_context)
+ .w(rems(34.))
+ .on_action(cx.listener(Self::handle_add_toolchain))
+ .child(self.state.clone().render(window, cx))
}
}
impl Focusable for ToolchainSelector {
fn focus_handle(&self, cx: &App) -> FocusHandle {
- self.picker.focus_handle(cx)
+ self.state.focus_handle(cx)
}
}
@@ -142,7 +735,7 @@ impl ModalView for ToolchainSelector {}
pub struct ToolchainSelectorDelegate {
toolchain_selector: WeakEntity<ToolchainSelector>,
- candidates: ToolchainList,
+ candidates: Arc<[(Toolchain, Option<ToolchainScope>)]>,
matches: Vec<StringMatch>,
selected_index: usize,
workspace: WeakEntity<Workspace>,
@@ -150,6 +743,9 @@ pub struct ToolchainSelectorDelegate {
worktree_abs_path_root: Arc<Path>,
relative_path: Arc<Path>,
placeholder_text: Arc<str>,
+ add_toolchain_text: Arc<str>,
+ project: Entity<Project>,
+ focus_handle: FocusHandle,
_fetch_candidates_task: Task<Option<()>>,
}
@@ -166,20 +762,33 @@ impl ToolchainSelectorDelegate {
window: &mut Window,
cx: &mut Context<Picker<Self>>,
) -> Self {
+ let _project = project.clone();
+
let _fetch_candidates_task = cx.spawn_in(window, {
- let project = project.clone();
async move |this, cx| {
- let term = project
+ let meta = _project
.read_with(cx, |this, _| {
- Project::toolchain_term(this.languages().clone(), language_name.clone())
+ Project::toolchain_metadata(this.languages().clone(), language_name.clone())
})
.ok()?
.await?;
let relative_path = this
- .read_with(cx, |this, _| this.delegate.relative_path.clone())
+ .update(cx, |this, cx| {
+ this.delegate.add_toolchain_text = format!(
+ "Add {}",
+ meta.term.as_ref().to_case(convert_case::Case::Title)
+ )
+ .into();
+ cx.notify();
+ this.delegate.relative_path.clone()
+ })
.ok()?;
- let (available_toolchains, relative_path) = project
+ let Toolchains {
+ toolchains: available_toolchains,
+ root_path: relative_path,
+ user_toolchains,
+ } = _project
.update(cx, |this, cx| {
this.available_toolchains(
ProjectPath {
@@ -201,7 +810,7 @@ impl ToolchainSelectorDelegate {
}
};
let placeholder_text =
- format!("Select a {} for {pretty_path}…", term.to_lowercase(),).into();
+ format!("Select a {} for {pretty_path}…", meta.term.to_lowercase(),).into();
let _ = this.update_in(cx, move |this, window, cx| {
this.delegate.relative_path = relative_path;
this.delegate.placeholder_text = placeholder_text;
@@ -209,15 +818,27 @@ impl ToolchainSelectorDelegate {
});
let _ = this.update_in(cx, move |this, window, cx| {
- this.delegate.candidates = available_toolchains;
+ this.delegate.candidates = user_toolchains
+ .into_iter()
+ .flat_map(|(scope, toolchains)| {
+ toolchains
+ .into_iter()
+ .map(move |toolchain| (toolchain, Some(scope.clone())))
+ })
+ .chain(
+ available_toolchains
+ .toolchains
+ .into_iter()
+ .map(|toolchain| (toolchain, None)),
+ )
+ .collect();
if let Some(active_toolchain) = active_toolchain
&& let Some(position) = this
.delegate
.candidates
- .toolchains
.iter()
- .position(|toolchain| *toolchain == active_toolchain)
+ .position(|(toolchain, _)| *toolchain == active_toolchain)
{
this.delegate.set_selected_index(position, window, cx);
}
@@ -239,6 +860,9 @@ impl ToolchainSelectorDelegate {
placeholder_text,
relative_path,
_fetch_candidates_task,
+ project,
+ focus_handle: cx.focus_handle(),
+ add_toolchain_text: Arc::from("Add Toolchain"),
}
}
fn relativize_path(path: SharedString, worktree_root: &Path) -> SharedString {
@@ -264,7 +888,7 @@ impl PickerDelegate for ToolchainSelectorDelegate {
fn confirm(&mut self, _: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
if let Some(string_match) = self.matches.get(self.selected_index) {
- let toolchain = self.candidates.toolchains[string_match.candidate_id].clone();
+ let (toolchain, _) = self.candidates[string_match.candidate_id].clone();
if let Some(workspace_id) = self
.workspace
.read_with(cx, |this, _| this.database_id())
@@ -331,11 +955,11 @@ impl PickerDelegate for ToolchainSelectorDelegate {
cx.spawn_in(window, async move |this, cx| {
let matches = if query.is_empty() {
candidates
- .toolchains
.into_iter()
.enumerate()
- .map(|(index, candidate)| {
- let path = Self::relativize_path(candidate.path, &worktree_root_path);
+ .map(|(index, (candidate, _))| {
+ let path =
+ Self::relativize_path(candidate.path.clone(), &worktree_root_path);
let string = format!("{}{}", candidate.name, path);
StringMatch {
candidate_id: index,
@@ -347,11 +971,11 @@ impl PickerDelegate for ToolchainSelectorDelegate {
.collect()
} else {
let candidates = candidates
- .toolchains
.into_iter()
.enumerate()
- .map(|(candidate_id, toolchain)| {
- let path = Self::relativize_path(toolchain.path, &worktree_root_path);
+ .map(|(candidate_id, (toolchain, _))| {
+ let path =
+ Self::relativize_path(toolchain.path.clone(), &worktree_root_path);
let string = format!("{}{}", toolchain.name, path);
StringMatchCandidate::new(candidate_id, &string)
})
@@ -384,11 +1008,11 @@ impl PickerDelegate for ToolchainSelectorDelegate {
&self,
ix: usize,
selected: bool,
- _window: &mut Window,
- _: &mut Context<Picker<Self>>,
+ _: &mut Window,
+ cx: &mut Context<Picker<Self>>,
) -> Option<Self::ListItem> {
let mat = &self.matches[ix];
- let toolchain = &self.candidates.toolchains[mat.candidate_id];
+ let (toolchain, scope) = &self.candidates[mat.candidate_id];
let label = toolchain.name.clone();
let path = Self::relativize_path(toolchain.path.clone(), &self.worktree_abs_path_root);
@@ -400,8 +1024,9 @@ impl PickerDelegate for ToolchainSelectorDelegate {
path_highlights.iter_mut().for_each(|index| {
*index -= label.len();
});
+ let id: SharedString = format!("toolchain-{ix}",).into();
Some(
- ListItem::new(ix)
+ ListItem::new(id)
.inset(true)
.spacing(ListItemSpacing::Sparse)
.toggle_state(selected)
@@ -410,7 +1035,89 @@ impl PickerDelegate for ToolchainSelectorDelegate {
HighlightedLabel::new(path, path_highlights)
.size(LabelSize::Small)
.color(Color::Muted),
- ),
+ )
+ .when_some(scope.as_ref(), |this, scope| {
+ let id: SharedString = format!(
+ "delete-custom-toolchain-{}-{}",
+ toolchain.name, toolchain.path
+ )
+ .into();
+ let toolchain = toolchain.clone();
+ let scope = scope.clone();
+
+ this.end_slot(IconButton::new(id, IconName::Trash))
+ .on_click(cx.listener(move |this, _, _, cx| {
+ this.delegate.project.update(cx, |this, cx| {
+ this.remove_toolchain(toolchain.clone(), scope.clone(), cx)
+ });
+
+ this.delegate.matches.retain_mut(|m| {
+ if m.candidate_id == ix {
+ return false;
+ } else if m.candidate_id > ix {
+ m.candidate_id -= 1;
+ }
+ true
+ });
+
+ this.delegate.candidates = this
+ .delegate
+ .candidates
+ .iter()
+ .enumerate()
+ .filter_map(|(i, toolchain)| (ix != i).then_some(toolchain.clone()))
+ .collect();
+
+ if this.delegate.selected_index >= ix {
+ this.delegate.selected_index =
+ this.delegate.selected_index.saturating_sub(1);
+ }
+ cx.stop_propagation();
+ cx.notify();
+ }))
+ }),
+ )
+ }
+ fn render_footer(
+ &self,
+ _window: &mut Window,
+ cx: &mut Context<Picker<Self>>,
+ ) -> Option<AnyElement> {
+ Some(
+ v_flex()
+ .rounded_b_md()
+ .child(Divider::horizontal())
+ .child(
+ h_flex()
+ .p_1p5()
+ .gap_0p5()
+ .justify_end()
+ .child(
+ Button::new("xd", self.add_toolchain_text.clone())
+ .key_binding(KeyBinding::for_action_in(
+ &AddToolchain,
+ &self.focus_handle,
+ _window,
+ cx,
+ ))
+ .on_click(|_, window, cx| {
+ window.dispatch_action(Box::new(AddToolchain), cx)
+ }),
+ )
+ .child(
+ Button::new("select", "Select")
+ .key_binding(KeyBinding::for_action_in(
+ &menu::Confirm,
+ &self.focus_handle,
+ _window,
+ cx,
+ ))
+ .on_click(|_, window, cx| {
+ window.dispatch_action(menu::Confirm.boxed_clone(), cx)
+ }),
+ ),
+ )
+ .into_any_element(),
)
}
}
@@ -582,13 +582,9 @@ impl RenderOnce for ButtonLike {
.when_some(self.width, |this, width| {
this.w(width).justify_center().text_center()
})
- .when(
- match self.style {
- ButtonStyle::Outlined => true,
- _ => false,
- },
- |this| this.border_1(),
- )
+ .when(matches!(self.style, ButtonStyle::Outlined), |this| {
+ this.border_1()
+ })
.when_some(self.rounding, |this, rounding| match rounding {
ButtonLikeRounding::All => this.rounded_sm(),
ButtonLikeRounding::Left => this.rounded_l_sm(),
@@ -81,7 +81,8 @@ impl Callout {
self
}
- /// Sets an optional tertiary call-to-action button.
+ /// Sets an optional dismiss button, which is usually an icon button with a close icon.
+ /// This button is always rendered as the last one to the far right.
pub fn dismiss_action(mut self, action: impl IntoElement) -> Self {
self.dismiss_action = Some(action.into_any_element());
self
@@ -131,6 +132,7 @@ impl RenderOnce for Callout {
h_flex()
.min_w_0()
+ .w_full()
.p_2()
.gap_2()
.items_start()
@@ -561,7 +561,7 @@ impl ContextMenu {
action: Some(action.boxed_clone()),
handler: Rc::new(move |_, window, cx| window.dispatch_action(action.boxed_clone(), cx)),
icon: Some(IconName::ArrowUpRight),
- icon_size: IconSize::Small,
+ icon_size: IconSize::XSmall,
icon_position: IconPosition::End,
icon_color: None,
disabled: false,
@@ -1,6 +1,6 @@
use std::sync::Arc;
-use gpui::{ClickEvent, CursorStyle};
+use gpui::{ClickEvent, CursorStyle, SharedString};
use crate::{Color, IconButton, IconButtonShape, IconName, IconSize, prelude::*};
@@ -14,6 +14,7 @@ pub struct Disclosure {
cursor_style: CursorStyle,
opened_icon: IconName,
closed_icon: IconName,
+ visible_on_hover: Option<SharedString>,
}
impl Disclosure {
@@ -27,6 +28,7 @@ impl Disclosure {
cursor_style: CursorStyle::PointingHand,
opened_icon: IconName::ChevronDown,
closed_icon: IconName::ChevronRight,
+ visible_on_hover: None,
}
}
@@ -73,6 +75,13 @@ impl Clickable for Disclosure {
}
}
+impl VisibleOnHover for Disclosure {
+ fn visible_on_hover(mut self, group_name: impl Into<SharedString>) -> Self {
+ self.visible_on_hover = Some(group_name.into());
+ self
+ }
+}
+
impl RenderOnce for Disclosure {
fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement {
IconButton::new(
@@ -87,6 +96,9 @@ impl RenderOnce for Disclosure {
.icon_size(IconSize::Small)
.disabled(self.disabled)
.toggle_state(self.selected)
+ .when_some(self.visible_on_hover.clone(), |this, group_name| {
+ this.visible_on_hover(group_name)
+ })
.when_some(self.on_toggle, move |this, on_toggle| {
this.on_click(move |event, window, cx| on_toggle(event, window, cx))
})
@@ -96,7 +96,7 @@ impl RenderOnce for DropdownMenu {
.style(self.style),
)
.attach(Corner::BottomLeft)
- .when_some(self.handle.clone(), |el, handle| el.with_handle(handle))
+ .when_some(self.handle, |el, handle| el.with_handle(handle))
}
}
@@ -169,7 +169,7 @@ impl Component for DropdownMenu {
"States",
vec![single_example(
"Disabled",
- DropdownMenu::new("disabled", "Disabled Dropdown", menu.clone())
+ DropdownMenu::new("disabled", "Disabled Dropdown", menu)
.disabled(true)
.into_any_element(),
)],
@@ -78,7 +78,7 @@ impl RenderOnce for Facepile {
}
}
-pub const EXAMPLE_FACES: [&'static str; 6] = [
+pub const EXAMPLE_FACES: [&str; 6] = [
"https://avatars.githubusercontent.com/u/326587?s=60&v=4",
"https://avatars.githubusercontent.com/u/2280405?s=60&v=4",
"https://avatars.githubusercontent.com/u/1789?s=60&v=4",
@@ -9,6 +9,7 @@ use gpui::{AnimationElement, AnyElement, Hsla, IntoElement, Rems, Transformation
pub use icon_decoration::*;
pub use icons::*;
+use crate::traits::transformable::Transformable;
use crate::{Indicator, prelude::*};
#[derive(IntoElement)]
@@ -180,8 +181,10 @@ impl Icon {
self.size = size;
self
}
+}
- pub fn transform(mut self, transformation: Transformation) -> Self {
+impl Transformable for Icon {
+ fn transform(mut self, transformation: Transformation) -> Self {
self.transformation = transformation;
self
}
@@ -7,12 +7,16 @@ use strum::{EnumIter, EnumString, IntoStaticStr};
use crate::Color;
use crate::prelude::*;
+use crate::traits::transformable::Transformable;
#[derive(
Debug, PartialEq, Eq, Copy, Clone, EnumIter, EnumString, IntoStaticStr, Serialize, Deserialize,
)]
#[strum(serialize_all = "snake_case")]
pub enum VectorName {
+ AcpGrid,
+ AcpLogo,
+ AcpLogoSerif,
AiGrid,
DebuggerGrid,
Grid,
@@ -71,8 +75,10 @@ impl Vector {
self.size = size;
self
}
+}
- pub fn transform(mut self, transformation: Transformation) -> Self {
+impl Transformable for Vector {
+ fn transform(mut self, transformation: Transformation) -> Self {
self.transformation = transformation;
self
}
@@ -195,7 +195,7 @@ mod uniform_list {
impl UniformListDecoration for IndentGuides {
fn compute(
&self,
- visible_range: Range<usize>,
+ mut visible_range: Range<usize>,
bounds: Bounds<Pixels>,
_scroll_offset: Point<Pixels>,
item_height: Pixels,
@@ -203,7 +203,6 @@ mod uniform_list {
window: &mut Window,
cx: &mut App,
) -> AnyElement {
- let mut visible_range = visible_range.clone();
let includes_trailing_indent = visible_range.end < item_count;
// Check if we have entries after the visible range,
// if so extend the visible range so we can fetch a trailing indent,
@@ -1,8 +1,8 @@
use crate::PlatformStyle;
use crate::{Icon, IconName, IconSize, h_flex, prelude::*};
use gpui::{
- Action, AnyElement, App, FocusHandle, Global, IntoElement, Keystroke, Modifiers, Window,
- relative,
+ Action, AnyElement, App, FocusHandle, Global, IntoElement, KeybindingKeystroke, Keystroke,
+ Modifiers, Window, relative,
};
use itertools::Itertools;
@@ -13,7 +13,7 @@ pub struct KeyBinding {
/// More than one keystroke produces a chord.
///
/// This should always contain at least one keystroke.
- pub keystrokes: Vec<Keystroke>,
+ pub keystrokes: Vec<KeybindingKeystroke>,
/// The [`PlatformStyle`] to use when displaying this keybinding.
platform_style: PlatformStyle,
@@ -59,7 +59,7 @@ impl KeyBinding {
cx.try_global::<VimStyle>().is_some_and(|g| g.0)
}
- pub fn new(keystrokes: Vec<Keystroke>, cx: &App) -> Self {
+ pub fn new(keystrokes: Vec<KeybindingKeystroke>, cx: &App) -> Self {
Self {
keystrokes,
platform_style: PlatformStyle::platform(),
@@ -99,16 +99,16 @@ impl KeyBinding {
}
fn render_key(
- keystroke: &Keystroke,
+ key: &str,
color: Option<Color>,
platform_style: PlatformStyle,
size: impl Into<Option<AbsoluteLength>>,
) -> AnyElement {
- let key_icon = icon_for_key(keystroke, platform_style);
+ let key_icon = icon_for_key(key, platform_style);
match key_icon {
Some(icon) => KeyIcon::new(icon, color).size(size).into_any_element(),
None => {
- let key = util::capitalize(&keystroke.key);
+ let key = util::capitalize(key);
Key::new(&key, color).size(size).into_any_element()
}
}
@@ -124,7 +124,7 @@ impl RenderOnce for KeyBinding {
"KEY_BINDING-{}",
self.keystrokes
.iter()
- .map(|k| k.key.to_string())
+ .map(|k| k.key().to_string())
.collect::<Vec<_>>()
.join(" ")
)
@@ -137,7 +137,7 @@ impl RenderOnce for KeyBinding {
.py_0p5()
.rounded_xs()
.text_color(cx.theme().colors().text_muted)
- .children(render_keystroke(
+ .children(render_keybinding_keystroke(
keystroke,
color,
self.size,
@@ -148,8 +148,8 @@ impl RenderOnce for KeyBinding {
}
}
-pub fn render_keystroke(
- keystroke: &Keystroke,
+pub fn render_keybinding_keystroke(
+ keystroke: &KeybindingKeystroke,
color: Option<Color>,
size: impl Into<Option<AbsoluteLength>>,
platform_style: PlatformStyle,
@@ -163,26 +163,34 @@ pub fn render_keystroke(
let size = size.into();
if use_text {
- let element = Key::new(keystroke_text(keystroke, platform_style, vim_mode), color)
- .size(size)
- .into_any_element();
+ let element = Key::new(
+ keystroke_text(
+ keystroke.modifiers(),
+ keystroke.key(),
+ platform_style,
+ vim_mode,
+ ),
+ color,
+ )
+ .size(size)
+ .into_any_element();
vec![element]
} else {
let mut elements = Vec::new();
elements.extend(render_modifiers(
- &keystroke.modifiers,
+ keystroke.modifiers(),
platform_style,
color,
size,
true,
));
- elements.push(render_key(keystroke, color, platform_style, size));
+ elements.push(render_key(keystroke.key(), color, platform_style, size));
elements
}
}
-fn icon_for_key(keystroke: &Keystroke, platform_style: PlatformStyle) -> Option<IconName> {
- match keystroke.key.as_str() {
+fn icon_for_key(key: &str, platform_style: PlatformStyle) -> Option<IconName> {
+ match key {
"left" => Some(IconName::ArrowLeft),
"right" => Some(IconName::ArrowRight),
"up" => Some(IconName::ArrowUp),
@@ -325,7 +333,7 @@ impl RenderOnce for Key {
.text_size(size)
.line_height(relative(1.))
.text_color(self.color.unwrap_or(Color::Muted).color(cx))
- .child(self.key.clone())
+ .child(self.key)
}
}
@@ -379,7 +387,7 @@ impl KeyIcon {
/// Returns a textual representation of the key binding for the given [`Action`].
pub fn text_for_action(action: &dyn Action, window: &Window, cx: &App) -> Option<String> {
let key_binding = window.highest_precedence_binding_for_action(action)?;
- Some(text_for_keystrokes(key_binding.keystrokes(), cx))
+ Some(text_for_keybinding_keystrokes(key_binding.keystrokes(), cx))
}
pub fn text_for_keystrokes(keystrokes: &[Keystroke], cx: &App) -> String {
@@ -387,22 +395,50 @@ pub fn text_for_keystrokes(keystrokes: &[Keystroke], cx: &App) -> String {
let vim_enabled = cx.try_global::<VimStyle>().is_some();
keystrokes
.iter()
- .map(|keystroke| keystroke_text(keystroke, platform_style, vim_enabled))
+ .map(|keystroke| {
+ keystroke_text(
+ &keystroke.modifiers,
+ &keystroke.key,
+ platform_style,
+ vim_enabled,
+ )
+ })
+ .join(" ")
+}
+
+pub fn text_for_keybinding_keystrokes(keystrokes: &[KeybindingKeystroke], cx: &App) -> String {
+ let platform_style = PlatformStyle::platform();
+ let vim_enabled = cx.try_global::<VimStyle>().is_some();
+ keystrokes
+ .iter()
+ .map(|keystroke| {
+ keystroke_text(
+ keystroke.modifiers(),
+ keystroke.key(),
+ platform_style,
+ vim_enabled,
+ )
+ })
.join(" ")
}
-pub fn text_for_keystroke(keystroke: &Keystroke, cx: &App) -> String {
+pub fn text_for_keystroke(modifiers: &Modifiers, key: &str, cx: &App) -> String {
let platform_style = PlatformStyle::platform();
let vim_enabled = cx.try_global::<VimStyle>().is_some();
- keystroke_text(keystroke, platform_style, vim_enabled)
+ keystroke_text(modifiers, key, platform_style, vim_enabled)
}
/// Returns a textual representation of the given [`Keystroke`].
-fn keystroke_text(keystroke: &Keystroke, platform_style: PlatformStyle, vim_mode: bool) -> String {
+fn keystroke_text(
+ modifiers: &Modifiers,
+ key: &str,
+ platform_style: PlatformStyle,
+ vim_mode: bool,
+) -> String {
let mut text = String::new();
let delimiter = '-';
- if keystroke.modifiers.function {
+ if modifiers.function {
match vim_mode {
false => text.push_str("Fn"),
true => text.push_str("fn"),
@@ -411,7 +447,7 @@ fn keystroke_text(keystroke: &Keystroke, platform_style: PlatformStyle, vim_mode
text.push(delimiter);
}
- if keystroke.modifiers.control {
+ if modifiers.control {
match (platform_style, vim_mode) {
(PlatformStyle::Mac, false) => text.push_str("Control"),
(PlatformStyle::Linux | PlatformStyle::Windows, false) => text.push_str("Ctrl"),
@@ -421,7 +457,7 @@ fn keystroke_text(keystroke: &Keystroke, platform_style: PlatformStyle, vim_mode
text.push(delimiter);
}
- if keystroke.modifiers.platform {
+ if modifiers.platform {
match (platform_style, vim_mode) {
(PlatformStyle::Mac, false) => text.push_str("Command"),
(PlatformStyle::Mac, true) => text.push_str("cmd"),
@@ -434,7 +470,7 @@ fn keystroke_text(keystroke: &Keystroke, platform_style: PlatformStyle, vim_mode
text.push(delimiter);
}
- if keystroke.modifiers.alt {
+ if modifiers.alt {
match (platform_style, vim_mode) {
(PlatformStyle::Mac, false) => text.push_str("Option"),
(PlatformStyle::Linux | PlatformStyle::Windows, false) => text.push_str("Alt"),
@@ -444,7 +480,7 @@ fn keystroke_text(keystroke: &Keystroke, platform_style: PlatformStyle, vim_mode
text.push(delimiter);
}
- if keystroke.modifiers.shift {
+ if modifiers.shift {
match (platform_style, vim_mode) {
(_, false) => text.push_str("Shift"),
(_, true) => text.push_str("shift"),
@@ -453,9 +489,9 @@ fn keystroke_text(keystroke: &Keystroke, platform_style: PlatformStyle, vim_mode
}
if vim_mode {
- text.push_str(&keystroke.key)
+ text.push_str(key)
} else {
- let key = match keystroke.key.as_str() {
+ let key = match key {
"pageup" => "PageUp",
"pagedown" => "PageDown",
key => &util::capitalize(key),
@@ -562,9 +598,11 @@ mod tests {
#[test]
fn test_text_for_keystroke() {
+ let keystroke = Keystroke::parse("cmd-c").unwrap();
assert_eq!(
keystroke_text(
- &Keystroke::parse("cmd-c").unwrap(),
+ &keystroke.modifiers,
+ &keystroke.key,
PlatformStyle::Mac,
false
),
@@ -572,7 +610,8 @@ mod tests {
);
assert_eq!(
keystroke_text(
- &Keystroke::parse("cmd-c").unwrap(),
+ &keystroke.modifiers,
+ &keystroke.key,
PlatformStyle::Linux,
false
),
@@ -580,16 +619,19 @@ mod tests {
);
assert_eq!(
keystroke_text(
- &Keystroke::parse("cmd-c").unwrap(),
+ &keystroke.modifiers,
+ &keystroke.key,
PlatformStyle::Windows,
false
),
"Win-C".to_string()
);
+ let keystroke = Keystroke::parse("ctrl-alt-delete").unwrap();
assert_eq!(
keystroke_text(
- &Keystroke::parse("ctrl-alt-delete").unwrap(),
+ &keystroke.modifiers,
+ &keystroke.key,
PlatformStyle::Mac,
false
),
@@ -597,7 +639,8 @@ mod tests {
);
assert_eq!(
keystroke_text(
- &Keystroke::parse("ctrl-alt-delete").unwrap(),
+ &keystroke.modifiers,
+ &keystroke.key,
PlatformStyle::Linux,
false
),
@@ -605,16 +648,19 @@ mod tests {
);
assert_eq!(
keystroke_text(
- &Keystroke::parse("ctrl-alt-delete").unwrap(),
+ &keystroke.modifiers,
+ &keystroke.key,
PlatformStyle::Windows,
false
),
"Ctrl-Alt-Delete".to_string()
);
+ let keystroke = Keystroke::parse("shift-pageup").unwrap();
assert_eq!(
keystroke_text(
- &Keystroke::parse("shift-pageup").unwrap(),
+ &keystroke.modifiers,
+ &keystroke.key,
PlatformStyle::Mac,
false
),
@@ -622,7 +668,8 @@ mod tests {
);
assert_eq!(
keystroke_text(
- &Keystroke::parse("shift-pageup").unwrap(),
+ &keystroke.modifiers,
+ &keystroke.key,
PlatformStyle::Linux,
false,
),
@@ -630,7 +677,8 @@ mod tests {
);
assert_eq!(
keystroke_text(
- &Keystroke::parse("shift-pageup").unwrap(),
+ &keystroke.modifiers,
+ &keystroke.key,
PlatformStyle::Windows,
false
),
@@ -269,7 +269,7 @@ impl Component for KeybindingHint {
),
single_example(
"Large",
- KeybindingHint::new(enter.clone(), bg_color)
+ KeybindingHint::new(enter, bg_color)
.size(Pixels::from(20.0))
.prefix("Large:")
.suffix("Size")
@@ -2,8 +2,10 @@ mod highlighted_label;
mod label;
mod label_like;
mod loading_label;
+mod spinner_label;
pub use highlighted_label::*;
pub use label::*;
pub use label_like::*;
pub use loading_label::*;
+pub use spinner_label::*;
@@ -0,0 +1,192 @@
+use crate::prelude::*;
+use gpui::{Animation, AnimationExt, FontWeight};
+use std::time::Duration;
+
+/// Different types of spinner animations
+#[derive(Debug, Default, Clone, Copy, PartialEq)]
+pub enum SpinnerVariant {
+ #[default]
+ Dots,
+ DotsVariant,
+}
+
+/// A spinner indication, based on the label component, that loops through
+/// frames of the specified animation. It implements `LabelCommon` as well.
+///
+/// # Default Example
+///
+/// ```
+/// use ui::{SpinnerLabel};
+///
+/// SpinnerLabel::new();
+/// ```
+///
+/// # Variant Example
+///
+/// ```
+/// use ui::{SpinnerLabel};
+///
+/// SpinnerLabel::dots_variant();
+/// ```
+#[derive(IntoElement, RegisterComponent)]
+pub struct SpinnerLabel {
+ base: Label,
+ variant: SpinnerVariant,
+ frames: Vec<&'static str>,
+ duration: Duration,
+}
+
+impl SpinnerVariant {
+ fn frames(&self) -> Vec<&'static str> {
+ match self {
+ SpinnerVariant::Dots => vec!["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"],
+ SpinnerVariant::DotsVariant => vec!["⣼", "⣹", "⢻", "⠿", "⡟", "⣏", "⣧", "⣶"],
+ }
+ }
+
+ fn duration(&self) -> Duration {
+ match self {
+ SpinnerVariant::Dots => Duration::from_millis(1000),
+ SpinnerVariant::DotsVariant => Duration::from_millis(1000),
+ }
+ }
+
+ fn animation_id(&self) -> &'static str {
+ match self {
+ SpinnerVariant::Dots => "spinner_label_dots",
+ SpinnerVariant::DotsVariant => "spinner_label_dots_variant",
+ }
+ }
+}
+
+impl SpinnerLabel {
+ pub fn new() -> Self {
+ Self::with_variant(SpinnerVariant::default())
+ }
+
+ pub fn with_variant(variant: SpinnerVariant) -> Self {
+ let frames = variant.frames();
+ let duration = variant.duration();
+
+ SpinnerLabel {
+ base: Label::new(frames[0]),
+ variant,
+ frames,
+ duration,
+ }
+ }
+
+ pub fn dots() -> Self {
+ Self::with_variant(SpinnerVariant::Dots)
+ }
+
+ pub fn dots_variant() -> Self {
+ Self::with_variant(SpinnerVariant::DotsVariant)
+ }
+}
+
+impl LabelCommon for SpinnerLabel {
+ fn size(mut self, size: LabelSize) -> Self {
+ self.base = self.base.size(size);
+ self
+ }
+
+ fn weight(mut self, weight: FontWeight) -> Self {
+ self.base = self.base.weight(weight);
+ self
+ }
+
+ fn line_height_style(mut self, line_height_style: LineHeightStyle) -> Self {
+ self.base = self.base.line_height_style(line_height_style);
+ self
+ }
+
+ fn color(mut self, color: Color) -> Self {
+ self.base = self.base.color(color);
+ self
+ }
+
+ fn strikethrough(mut self) -> Self {
+ self.base = self.base.strikethrough();
+ self
+ }
+
+ fn italic(mut self) -> Self {
+ self.base = self.base.italic();
+ self
+ }
+
+ fn alpha(mut self, alpha: f32) -> Self {
+ self.base = self.base.alpha(alpha);
+ self
+ }
+
+ fn underline(mut self) -> Self {
+ self.base = self.base.underline();
+ self
+ }
+
+ fn truncate(mut self) -> Self {
+ self.base = self.base.truncate();
+ self
+ }
+
+ fn single_line(mut self) -> Self {
+ self.base = self.base.single_line();
+ self
+ }
+
+ fn buffer_font(mut self, cx: &App) -> Self {
+ self.base = self.base.buffer_font(cx);
+ self
+ }
+
+ fn inline_code(mut self, cx: &App) -> Self {
+ self.base = self.base.inline_code(cx);
+ self
+ }
+}
+
+impl RenderOnce for SpinnerLabel {
+ fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement {
+ let frames = self.frames.clone();
+ let duration = self.duration;
+
+ self.base.color(Color::Muted).with_animation(
+ self.variant.animation_id(),
+ Animation::new(duration).repeat(),
+ move |mut label, delta| {
+ let frame_index = (delta * frames.len() as f32) as usize % frames.len();
+
+ label.set_text(frames[frame_index]);
+ label
+ },
+ )
+ }
+}
+
+impl Component for SpinnerLabel {
+ fn scope() -> ComponentScope {
+ ComponentScope::Loading
+ }
+
+ fn name() -> &'static str {
+ "Spinner Label"
+ }
+
+ fn sort_name() -> &'static str {
+ "Spinner Label"
+ }
+
+ fn preview(_window: &mut Window, _cx: &mut App) -> Option<AnyElement> {
+ let examples = vec![
+ single_example("Default", SpinnerLabel::new().into_any_element()),
+ single_example(
+ "Dots Variant",
+ SpinnerLabel::dots_variant().into_any_element(),
+ ),
+ ];
+
+ Some(example_group(examples).vertical().into_any_element())
+ }
+}
@@ -64,7 +64,7 @@ impl RenderOnce for AlertModal {
)
.child(Button::new(
self.primary_action.clone(),
- self.primary_action.clone(),
+ self.primary_action,
)),
),
)
@@ -28,7 +28,7 @@ where
T: StickyCandidate + Clone + 'static,
{
let entity_compute = entity.clone();
- let entity_render = entity.clone();
+ let entity_render = entity;
let compute_fn = Rc::new(
move |range: Range<usize>, window: &mut Window, cx: &mut App| -> SmallVec<[T; 8]> {
@@ -616,7 +616,7 @@ impl SwitchField {
Self {
id: id.into(),
label: label.into(),
- description: description,
+ description,
toggle_state: toggle_state.into(),
on_click: Arc::new(on_click),
disabled: false,
@@ -175,7 +175,7 @@ impl Tooltip {
move |_, cx| {
let title = title.clone();
cx.new(|_| Self {
- title: title,
+ title,
meta: None,
key_binding: None,
})
@@ -1,6 +1,8 @@
+pub mod animation_ext;
pub mod clickable;
pub mod disableable;
pub mod fixed;
pub mod styled_ext;
pub mod toggleable;
+pub mod transformable;
pub mod visible_on_hover;
@@ -0,0 +1,42 @@
+use std::time::Duration;
+
+use gpui::{Animation, AnimationElement, AnimationExt, Transformation, percentage};
+
+use crate::{prelude::*, traits::transformable::Transformable};
+
+/// An extension trait for adding common animations to animatable components.
+pub trait CommonAnimationExt: AnimationExt {
+ /// Render this component as rotating over the given duration.
+ ///
+ /// NOTE: This method uses the location of the caller to generate an ID for this state.
+ /// If this is not sufficient to identify your state (e.g. you're rendering a list item),
+ /// you can provide a custom ElementID using the `use_keyed_rotate_animation` method.
+ #[track_caller]
+ fn with_rotate_animation(self, duration: u64) -> AnimationElement<Self>
+ where
+ Self: Transformable + Sized,
+ {
+ self.with_keyed_rotate_animation(
+ ElementId::CodeLocation(*std::panic::Location::caller()),
+ duration,
+ )
+ }
+
+ /// Render this component as rotating with the given element ID over the given duration.
+ fn with_keyed_rotate_animation(
+ self,
+ id: impl Into<ElementId>,
+ duration: u64,
+ ) -> AnimationElement<Self>
+ where
+ Self: Transformable + Sized,
+ {
+ self.with_animation(
+ id,
+ Animation::new(Duration::from_secs(duration)).repeat(),
+ |component, delta| component.transform(Transformation::rotate(percentage(delta))),
+ )
+ }
+}
+
+impl<T: AnimationExt> CommonAnimationExt for T {}
@@ -0,0 +1,7 @@
+use gpui::Transformation;
+
+/// A trait for components that can be transformed.
+pub trait Transformable {
+ /// Sets the transformation for the element.
+ fn transform(self, transformation: Transformation) -> Self;
+}
@@ -17,3 +17,4 @@ pub mod utils;
pub use components::*;
pub use prelude::*;
pub use styles::*;
+pub use traits::animation_ext::*;
@@ -3,12 +3,14 @@
use gpui::App;
use theme::ActiveTheme;
+mod apca_contrast;
mod color_contrast;
mod corner_solver;
mod format_distance;
mod search_input;
mod with_rem_size;
+pub use apca_contrast::*;
pub use color_contrast::*;
pub use corner_solver::{CornerSolver, inner_corner_radius};
pub use format_distance::*;
@@ -13,9 +13,9 @@ impl DateTimeType {
///
/// If the [`DateTimeType`] is already a [`NaiveDateTime`], it will be returned as is.
/// If the [`DateTimeType`] is a [`DateTime<Local>`], it will be converted to a [`NaiveDateTime`].
- pub fn to_naive(&self) -> NaiveDateTime {
+ pub fn to_naive(self) -> NaiveDateTime {
match self {
- DateTimeType::Naive(naive) => *naive,
+ DateTimeType::Naive(naive) => naive,
DateTimeType::Local(local) => local.naive_local(),
}
}
@@ -159,7 +159,6 @@ fn distance_string(
} else {
format!("about {} hours", hours)
}
- .to_string()
} else if distance < 172_800 {
"1 day".to_string()
} else if distance < 2_592_000 {
@@ -206,21 +205,16 @@ fn distance_string(
} else {
format!("about {} years", years)
}
- .to_string()
} else if remaining_months < 9 {
if hide_prefix {
format!("{} years", years)
} else {
format!("over {} years", years)
}
- .to_string()
+ } else if hide_prefix {
+ format!("{} years", years + 1)
} else {
- if hide_prefix {
- format!("{} years", years + 1)
- } else {
- format!("almost {} years", years + 1)
- }
- .to_string()
+ format!("almost {} years", years + 1)
}
};
@@ -202,11 +202,11 @@ impl Component for SingleLineInput {
.children(vec![example_group(vec![
single_example(
"Small Label (Default)",
- div().child(input_small.clone()).into_any_element(),
+ div().child(input_small).into_any_element(),
),
single_example(
"Regular Label",
- div().child(input_regular.clone()).into_any_element(),
+ div().child(input_regular).into_any_element(),
),
])])
.into_any_element(),
@@ -154,7 +154,7 @@ mod tests {
let mut builder =
ZipEntryBuilder::new(filename.into(), async_zip::Compression::Deflate);
use std::os::unix::fs::PermissionsExt;
- let metadata = std::fs::metadata(&path)?;
+ let metadata = std::fs::metadata(path)?;
let perms = metadata.permissions().mode() as u16;
builder = builder.unix_permissions(perms);
writer.write_entry_whole(builder, &data).await?;
@@ -23,7 +23,7 @@ impl Display for MarkdownString {
/// the other characters involved are escaped:
///
/// * `!`, `]`, `(`, and `)` are used in link syntax, but `[` is escaped so these are parsed as
-/// plaintext.
+/// plaintext.
///
/// * `;` is used in HTML entity syntax, but `&` is escaped, so they are parsed as plaintext.
///
@@ -2,6 +2,8 @@ use globset::{Glob, GlobSet, GlobSetBuilder};
use regex::Regex;
use serde::{Deserialize, Serialize};
use std::cmp::Ordering;
+use std::fmt::{Display, Formatter};
+use std::mem;
use std::path::StripPrefixError;
use std::sync::{Arc, OnceLock};
use std::{
@@ -98,23 +100,84 @@ impl<T: AsRef<Path>> PathExt for T {
}
}
-/// Due to the issue of UNC paths on Windows, which can cause bugs in various parts of Zed, introducing this `SanitizedPath`
-/// leverages Rust's type system to ensure that all paths entering Zed are always "sanitized" by removing the `\\\\?\\` prefix.
-/// On non-Windows operating systems, this struct is effectively a no-op.
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct SanitizedPath(pub Arc<Path>);
+/// In memory, this is identical to `Path`. On non-Windows conversions to this type are no-ops. On
+/// windows, these conversions sanitize UNC paths by removing the `\\\\?\\` prefix.
+#[derive(Eq, PartialEq, Hash, Ord, PartialOrd)]
+#[repr(transparent)]
+pub struct SanitizedPath(Path);
impl SanitizedPath {
- pub fn starts_with(&self, prefix: &SanitizedPath) -> bool {
+ pub fn new<T: AsRef<Path> + ?Sized>(path: &T) -> &Self {
+ #[cfg(not(target_os = "windows"))]
+ return Self::unchecked_new(path.as_ref());
+
+ #[cfg(target_os = "windows")]
+ return Self::unchecked_new(dunce::simplified(path.as_ref()));
+ }
+
+ pub fn unchecked_new<T: AsRef<Path> + ?Sized>(path: &T) -> &Self {
+ // safe because `Path` and `SanitizedPath` have the same repr and Drop impl
+ unsafe { mem::transmute::<&Path, &Self>(path.as_ref()) }
+ }
+
+ pub fn from_arc(path: Arc<Path>) -> Arc<Self> {
+ // safe because `Path` and `SanitizedPath` have the same repr and Drop impl
+ #[cfg(not(target_os = "windows"))]
+ return unsafe { mem::transmute::<Arc<Path>, Arc<Self>>(path) };
+
+ // TODO: could avoid allocating here if dunce::simplified results in the same path
+ #[cfg(target_os = "windows")]
+ return Self::new(&path).into();
+ }
+
+ pub fn new_arc<T: AsRef<Path> + ?Sized>(path: &T) -> Arc<Self> {
+ Self::new(path).into()
+ }
+
+ pub fn cast_arc(path: Arc<Self>) -> Arc<Path> {
+ // safe because `Path` and `SanitizedPath` have the same repr and Drop impl
+ unsafe { mem::transmute::<Arc<Self>, Arc<Path>>(path) }
+ }
+
+ pub fn cast_arc_ref(path: &Arc<Self>) -> &Arc<Path> {
+ // safe because `Path` and `SanitizedPath` have the same repr and Drop impl
+ unsafe { mem::transmute::<&Arc<Self>, &Arc<Path>>(path) }
+ }
+
+ pub fn starts_with(&self, prefix: &Self) -> bool {
self.0.starts_with(&prefix.0)
}
- pub fn as_path(&self) -> &Arc<Path> {
+ pub fn as_path(&self) -> &Path {
&self.0
}
- pub fn to_string(&self) -> String {
- self.0.to_string_lossy().to_string()
+ pub fn file_name(&self) -> Option<&std::ffi::OsStr> {
+ self.0.file_name()
+ }
+
+ pub fn extension(&self) -> Option<&std::ffi::OsStr> {
+ self.0.extension()
+ }
+
+ pub fn join<P: AsRef<Path>>(&self, path: P) -> PathBuf {
+ self.0.join(path)
+ }
+
+ pub fn parent(&self) -> Option<&Self> {
+ self.0.parent().map(Self::unchecked_new)
+ }
+
+ pub fn strip_prefix(&self, base: &Self) -> Result<&Path, StripPrefixError> {
+ self.0.strip_prefix(base.as_path())
+ }
+
+ pub fn to_str(&self) -> Option<&str> {
+ self.0.to_str()
+ }
+
+ pub fn to_path_buf(&self) -> PathBuf {
+ self.0.to_path_buf()
}
pub fn to_glob_string(&self) -> String {
@@ -127,43 +190,41 @@ impl SanitizedPath {
self.0.to_string_lossy().to_string()
}
}
+}
- pub fn join(&self, path: &Self) -> Self {
- self.0.join(&path.0).into()
- }
-
- pub fn strip_prefix(&self, base: &Self) -> Result<&Path, StripPrefixError> {
- self.0.strip_prefix(base.as_path())
+impl std::fmt::Debug for SanitizedPath {
+ fn fmt(&self, formatter: &mut Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Debug::fmt(&self.0, formatter)
}
}
-impl From<SanitizedPath> for Arc<Path> {
- fn from(sanitized_path: SanitizedPath) -> Self {
- sanitized_path.0
+impl Display for SanitizedPath {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}", self.0.display())
}
}
-impl From<SanitizedPath> for PathBuf {
- fn from(sanitized_path: SanitizedPath) -> Self {
- sanitized_path.0.as_ref().into()
+impl From<&SanitizedPath> for Arc<SanitizedPath> {
+ fn from(sanitized_path: &SanitizedPath) -> Self {
+ let path: Arc<Path> = sanitized_path.0.into();
+ // safe because `Path` and `SanitizedPath` have the same repr and Drop impl
+ unsafe { mem::transmute(path) }
}
}
-impl<T: AsRef<Path>> From<T> for SanitizedPath {
- #[cfg(not(target_os = "windows"))]
- fn from(path: T) -> Self {
- let path = path.as_ref();
- SanitizedPath(path.into())
+impl From<&SanitizedPath> for PathBuf {
+ fn from(sanitized_path: &SanitizedPath) -> Self {
+ sanitized_path.as_path().into()
}
+}
- #[cfg(target_os = "windows")]
- fn from(path: T) -> Self {
- let path = path.as_ref();
- SanitizedPath(dunce::simplified(path).into())
+impl AsRef<Path> for SanitizedPath {
+ fn as_ref(&self) -> &Path {
+ &self.0
}
}
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum PathStyle {
Posix,
Windows,
@@ -220,12 +281,8 @@ impl RemotePathBuf {
Self::new(path_buf, style)
}
- pub fn to_string(&self) -> String {
- self.string.clone()
- }
-
#[cfg(target_os = "windows")]
- pub fn to_proto(self) -> String {
+ pub fn to_proto(&self) -> String {
match self.path_style() {
PathStyle::Posix => self.to_string(),
PathStyle::Windows => self.inner.to_string_lossy().replace('\\', "/"),
@@ -233,7 +290,7 @@ impl RemotePathBuf {
}
#[cfg(not(target_os = "windows"))]
- pub fn to_proto(self) -> String {
+ pub fn to_proto(&self) -> String {
match self.path_style() {
PathStyle::Posix => self.inner.to_string_lossy().to_string(),
PathStyle::Windows => self.to_string(),
@@ -255,6 +312,12 @@ impl RemotePathBuf {
}
}
+impl Display for RemotePathBuf {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}", self.string)
+ }
+}
+
/// A delimiter to use in `path_query:row_number:column_number` strings parsing.
pub const FILE_ROW_COLUMN_DELIMITER: char = ':';
@@ -1190,14 +1253,14 @@ mod tests {
#[cfg(target_os = "windows")]
fn test_sanitized_path() {
let path = Path::new("C:\\Users\\someone\\test_file.rs");
- let sanitized_path = SanitizedPath::from(path);
+ let sanitized_path = SanitizedPath::new(path);
assert_eq!(
sanitized_path.to_string(),
"C:\\Users\\someone\\test_file.rs"
);
let path = Path::new("\\\\?\\C:\\Users\\someone\\test_file.rs");
- let sanitized_path = SanitizedPath::from(path);
+ let sanitized_path = SanitizedPath::new(path);
assert_eq!(
sanitized_path.to_string(),
"C:\\Users\\someone\\test_file.rs"
@@ -7,7 +7,7 @@ const DEFS_PATH: &str = "#/$defs/";
///
/// This asserts that JsonSchema::schema_name() + "2" does not exist because this indicates that
/// there are multiple types that use this name, and unfortunately schemars APIs do not support
-/// resolving this ambiguity - see https://github.com/GREsau/schemars/issues/449
+/// resolving this ambiguity - see <https://github.com/GREsau/schemars/issues/449>
///
/// This takes a closure for `schema` because some settings types are not available on the remote
/// server, and so will crash when attempting to access e.g. GlobalThemeRegistry.
@@ -256,6 +256,9 @@ fn load_shell_from_passwd() -> Result<()> {
&mut result,
)
};
+ anyhow::ensure!(!result.is_null(), "passwd entry for uid {} not found", uid);
+
+ // SAFETY: If `getpwuid_r` doesn't error, we have the entry here.
let entry = unsafe { pwd.assume_init() };
anyhow::ensure!(
@@ -264,7 +267,6 @@ fn load_shell_from_passwd() -> Result<()> {
uid,
status
);
- anyhow::ensure!(!result.is_null(), "passwd entry for uid {} not found", uid);
anyhow::ensure!(
entry.pw_uid == uid,
"passwd entry has different uid ({}) than getuid ({}) returned",
@@ -329,7 +331,7 @@ pub fn load_login_shell_environment() -> Result<()> {
/// Configures the process to start a new session, to prevent interactive shells from taking control
/// of the terminal.
///
-/// For more details: https://registerspill.thorstenball.com/p/how-to-lose-control-of-your-shell
+/// For more details: <https://registerspill.thorstenball.com/p/how-to-lose-control-of-your-shell>
pub fn set_pre_exec_to_start_new_session(
command: &mut std::process::Command,
) -> &mut std::process::Command {
@@ -815,7 +817,8 @@ pub fn defer<F: FnOnce()>(f: F) -> Deferred<F> {
#[cfg(any(test, feature = "test-support"))]
mod rng {
- use rand::{Rng, seq::SliceRandom};
+ use rand::prelude::*;
+
pub struct RandomCharIter<T: Rng> {
rng: T,
simple_text: bool,
@@ -840,18 +843,18 @@ mod rng {
fn next(&mut self) -> Option<Self::Item> {
if self.simple_text {
- return if self.rng.gen_range(0..100) < 5 {
+ return if self.rng.random_range(0..100) < 5 {
Some('\n')
} else {
- Some(self.rng.gen_range(b'a'..b'z' + 1).into())
+ Some(self.rng.random_range(b'a'..b'z' + 1).into())
};
}
- match self.rng.gen_range(0..100) {
+ match self.rng.random_range(0..100) {
// whitespace
0..=19 => [' ', '\n', '\r', '\t'].choose(&mut self.rng).copied(),
// two-byte greek letters
- 20..=32 => char::from_u32(self.rng.gen_range(('α' as u32)..('ω' as u32 + 1))),
+ 20..=32 => char::from_u32(self.rng.random_range(('α' as u32)..('ω' as u32 + 1))),
// // three-byte characters
33..=45 => ['✋', '✅', '❌', '❎', '⭐']
.choose(&mut self.rng)
@@ -859,7 +862,7 @@ mod rng {
// // four-byte characters
46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.rng).copied(),
// ascii letters
- _ => Some(self.rng.gen_range(b'a'..b'z' + 1).into()),
+ _ => Some(self.rng.random_range(b'a'..b'z' + 1).into()),
}
}
}
@@ -1057,6 +1060,18 @@ pub fn get_system_shell() -> String {
}
}
+pub fn get_default_system_shell() -> String {
+ #[cfg(target_os = "windows")]
+ {
+ get_windows_system_shell()
+ }
+
+ #[cfg(not(target_os = "windows"))]
+ {
+ "/bin/sh".to_string()
+ }
+}
+
#[derive(Debug)]
pub enum ConnectionResult<O> {
Timeout,
@@ -1265,8 +1265,8 @@ fn generate_commands(_: &App) -> Vec<VimCommand> {
VimCommand::str(("L", "explore"), "project_panel::ToggleFocus"),
VimCommand::str(("S", "explore"), "project_panel::ToggleFocus"),
VimCommand::str(("Ve", "xplore"), "project_panel::ToggleFocus"),
- VimCommand::str(("te", "rm"), "terminal_panel::ToggleFocus"),
- VimCommand::str(("T", "erm"), "terminal_panel::ToggleFocus"),
+ VimCommand::str(("te", "rm"), "terminal_panel::Toggle"),
+ VimCommand::str(("T", "erm"), "terminal_panel::Toggle"),
VimCommand::str(("C", "ollab"), "collab_panel::ToggleFocus"),
VimCommand::str(("Ch", "at"), "chat_panel::ToggleFocus"),
VimCommand::str(("No", "tifications"), "notification_panel::ToggleFocus"),
@@ -1408,11 +1408,7 @@ pub fn command_interceptor(mut input: &str, cx: &App) -> Vec<CommandInterceptRes
start: Position::Line { row: 0, offset: 0 },
end: Some(Position::LastLine { offset: 0 }),
});
- if let Some(action) = OnMatchingLines::parse(query, invert, range, cx) {
- Some(action.boxed_clone())
- } else {
- None
- }
+ OnMatchingLines::parse(query, invert, range, cx).map(|action| action.boxed_clone())
} else if query.contains('!') {
ShellExec::parse(query, range.clone())
} else {
@@ -1496,7 +1492,7 @@ impl OnMatchingLines {
let mut search = String::new();
let mut escaped = false;
- while let Some(c) = chars.next() {
+ for c in chars.by_ref() {
if escaped {
escaped = false;
// unescape escaped parens
@@ -1648,7 +1644,7 @@ impl OnMatchingLines {
});
window.dispatch_action(action, cx);
cx.defer_in(window, move |editor, window, cx| {
- let newest = editor.selections.newest::<Point>(cx).clone();
+ let newest = editor.selections.newest::<Point>(cx);
editor.change_selections(
SelectionEffects::no_scroll(),
window,
@@ -1928,7 +1924,9 @@ impl ShellExec {
let Some(range) = input_range else { return };
- let mut process = project.read(cx).exec_in_shell(command, cx);
+ let Some(mut process) = project.read(cx).exec_in_shell(command, cx).log_err() else {
+ return;
+ };
process.stdout(Stdio::piped());
process.stderr(Stdio::piped());
@@ -89,7 +89,7 @@ impl Vim {
return;
};
- if prefix.len() > 0 {
+ if !prefix.is_empty() {
self.handle_literal_input(prefix, "", window, cx);
} else {
self.pop_operator(window, cx);
@@ -1,8 +1,14 @@
+mod boundary;
+mod object;
+mod select;
+
use editor::display_map::DisplaySnapshot;
-use editor::{DisplayPoint, Editor, SelectionEffects, ToOffset, ToPoint, movement};
+use editor::{
+ DisplayPoint, Editor, HideMouseCursorOrigin, SelectionEffects, ToOffset, ToPoint, movement,
+};
use gpui::{Action, actions};
use gpui::{Context, Window};
-use language::{CharClassifier, CharKind};
+use language::{CharClassifier, CharKind, Point};
use text::{Bias, SelectionGoal};
use crate::motion;
@@ -23,14 +29,20 @@ actions!(
HelixInsert,
/// Appends at the end of the selection.
HelixAppend,
+ /// Goes to the location of the last modification.
+ HelixGotoLastModification,
+ /// Select entire line or multiple lines, extending downwards.
+ HelixSelectLine,
]
);
pub fn register(editor: &mut Editor, cx: &mut Context<Vim>) {
Vim::action(editor, cx, Vim::helix_normal_after);
+ Vim::action(editor, cx, Vim::helix_select_lines);
Vim::action(editor, cx, Vim::helix_insert);
Vim::action(editor, cx, Vim::helix_append);
Vim::action(editor, cx, Vim::helix_yank);
+ Vim::action(editor, cx, Vim::helix_goto_last_modification);
}
impl Vim {
@@ -201,10 +213,7 @@ impl Vim {
let right_kind = classifier.kind_with(right, ignore_punctuation);
let at_newline = (left == '\n') ^ (right == '\n');
- let found = (left_kind != right_kind && right_kind != CharKind::Whitespace)
- || at_newline;
-
- found
+ (left_kind != right_kind && right_kind != CharKind::Whitespace) || at_newline
})
}
Motion::NextWordEnd { ignore_punctuation } => {
@@ -213,10 +222,7 @@ impl Vim {
let right_kind = classifier.kind_with(right, ignore_punctuation);
let at_newline = (left == '\n') ^ (right == '\n');
- let found = (left_kind != right_kind && left_kind != CharKind::Whitespace)
- || at_newline;
-
- found
+ (left_kind != right_kind && left_kind != CharKind::Whitespace) || at_newline
})
}
Motion::PreviousWordStart { ignore_punctuation } => {
@@ -225,10 +231,7 @@ impl Vim {
let right_kind = classifier.kind_with(right, ignore_punctuation);
let at_newline = (left == '\n') ^ (right == '\n');
- let found = (left_kind != right_kind && left_kind != CharKind::Whitespace)
- || at_newline;
-
- found
+ (left_kind != right_kind && left_kind != CharKind::Whitespace) || at_newline
})
}
Motion::PreviousWordEnd { ignore_punctuation } => {
@@ -237,10 +240,7 @@ impl Vim {
let right_kind = classifier.kind_with(right, ignore_punctuation);
let at_newline = (left == '\n') ^ (right == '\n');
- let found = (left_kind != right_kind && right_kind != CharKind::Whitespace)
- || at_newline;
-
- found
+ (left_kind != right_kind && right_kind != CharKind::Whitespace) || at_newline
})
}
Motion::FindForward {
@@ -442,6 +442,56 @@ impl Vim {
});
self.switch_mode(Mode::HelixNormal, true, window, cx);
}
+
+ pub fn helix_goto_last_modification(
+ &mut self,
+ _: &HelixGotoLastModification,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.jump(".".into(), false, false, window, cx);
+ }
+
+ pub fn helix_select_lines(
+ &mut self,
+ _: &HelixSelectLine,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let count = Vim::take_count(cx).unwrap_or(1);
+ self.update_editor(cx, |_, editor, cx| {
+ editor.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx);
+ let display_map = editor.display_map.update(cx, |map, cx| map.snapshot(cx));
+ let mut selections = editor.selections.all::<Point>(cx);
+ let max_point = display_map.buffer_snapshot.max_point();
+ let buffer_snapshot = &display_map.buffer_snapshot;
+
+ for selection in &mut selections {
+ // Start always goes to column 0 of the first selected line
+ let start_row = selection.start.row;
+ let current_end_row = selection.end.row;
+
+ // Check if cursor is on empty line by checking first character
+ let line_start_offset = buffer_snapshot.point_to_offset(Point::new(start_row, 0));
+ let first_char = buffer_snapshot.chars_at(line_start_offset).next();
+ let extra_line = if first_char == Some('\n') { 1 } else { 0 };
+
+ let end_row = current_end_row + count as u32 + extra_line;
+
+ selection.start = Point::new(start_row, 0);
+ selection.end = if end_row > max_point.row {
+ max_point
+ } else {
+ Point::new(end_row, 0)
+ };
+ selection.reversed = false;
+ }
+
+ editor.change_selections(Default::default(), window, cx, |s| {
+ s.select(selections);
+ });
+ });
+ }
}
#[cfg(test)]
@@ -453,6 +503,7 @@ mod test {
#[gpui::test]
async fn test_word_motions(cx: &mut gpui::TestAppContext) {
let mut cx = VimTestContext::new(cx, true).await;
+ cx.enable_helix();
// «
// ˇ
// »
@@ -514,6 +565,7 @@ mod test {
#[gpui::test]
async fn test_delete(cx: &mut gpui::TestAppContext) {
let mut cx = VimTestContext::new(cx, true).await;
+ cx.enable_helix();
// test delete a selection
cx.set_state(
@@ -594,6 +646,7 @@ mod test {
#[gpui::test]
async fn test_f_and_t(cx: &mut gpui::TestAppContext) {
let mut cx = VimTestContext::new(cx, true).await;
+ cx.enable_helix();
cx.set_state(
indoc! {"
@@ -647,6 +700,7 @@ mod test {
#[gpui::test]
async fn test_newline_char(cx: &mut gpui::TestAppContext) {
let mut cx = VimTestContext::new(cx, true).await;
+ cx.enable_helix();
cx.set_state("aa«\nˇ»bb cc", Mode::HelixNormal);
@@ -664,6 +718,7 @@ mod test {
#[gpui::test]
async fn test_insert_selected(cx: &mut gpui::TestAppContext) {
let mut cx = VimTestContext::new(cx, true).await;
+ cx.enable_helix();
cx.set_state(
indoc! {"
«The ˇ»quick brown
@@ -686,6 +741,7 @@ mod test {
#[gpui::test]
async fn test_append(cx: &mut gpui::TestAppContext) {
let mut cx = VimTestContext::new(cx, true).await;
+ cx.enable_helix();
// test from the end of the selection
cx.set_state(
indoc! {"
@@ -728,6 +784,7 @@ mod test {
#[gpui::test]
async fn test_replace(cx: &mut gpui::TestAppContext) {
let mut cx = VimTestContext::new(cx, true).await;
+ cx.enable_helix();
// No selection (single character)
cx.set_state("ˇaa", Mode::HelixNormal);
@@ -775,4 +832,210 @@ mod test {
cx.shared_clipboard().assert_eq("worl");
cx.assert_state("hello «worlˇ»d", Mode::HelixNormal);
}
+ #[gpui::test]
+ async fn test_shift_r_paste(cx: &mut gpui::TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+ cx.enable_helix();
+
+ // First copy some text to clipboard
+ cx.set_state("«hello worldˇ»", Mode::HelixNormal);
+ cx.simulate_keystrokes("y");
+
+ // Test paste with shift-r on single cursor
+ cx.set_state("foo ˇbar", Mode::HelixNormal);
+ cx.simulate_keystrokes("shift-r");
+
+ cx.assert_state("foo hello worldˇbar", Mode::HelixNormal);
+
+ // Test paste with shift-r on selection
+ cx.set_state("foo «barˇ» baz", Mode::HelixNormal);
+ cx.simulate_keystrokes("shift-r");
+
+ cx.assert_state("foo hello worldˇ baz", Mode::HelixNormal);
+ }
+
+ #[gpui::test]
+ async fn test_insert_mode_stickiness(cx: &mut gpui::TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+ cx.enable_helix();
+
+ // Make a modification at a specific location
+ cx.set_state("ˇhello", Mode::HelixNormal);
+ assert_eq!(cx.mode(), Mode::HelixNormal);
+ cx.simulate_keystrokes("i");
+ assert_eq!(cx.mode(), Mode::Insert);
+ cx.simulate_keystrokes("escape");
+ assert_eq!(cx.mode(), Mode::HelixNormal);
+ }
+
+ #[gpui::test]
+ async fn test_goto_last_modification(cx: &mut gpui::TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+ cx.enable_helix();
+
+ // Make a modification at a specific location
+ cx.set_state("line one\nline ˇtwo\nline three", Mode::HelixNormal);
+ cx.assert_state("line one\nline ˇtwo\nline three", Mode::HelixNormal);
+ cx.simulate_keystrokes("i");
+ cx.simulate_keystrokes("escape");
+ cx.simulate_keystrokes("i");
+ cx.simulate_keystrokes("m o d i f i e d space");
+ cx.simulate_keystrokes("escape");
+
+ // TODO: this fails, because state is no longer helix
+ cx.assert_state(
+ "line one\nline modified ˇtwo\nline three",
+ Mode::HelixNormal,
+ );
+
+ // Move cursor away from the modification
+ cx.simulate_keystrokes("up");
+
+ // Use "g ." to go back to last modification
+ cx.simulate_keystrokes("g .");
+
+ // Verify we're back at the modification location and still in HelixNormal mode
+ cx.assert_state(
+ "line one\nline modifiedˇ two\nline three",
+ Mode::HelixNormal,
+ );
+ }
+
+ #[gpui::test]
+ async fn test_helix_select_lines(cx: &mut gpui::TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+ cx.set_state(
+ "line one\nline ˇtwo\nline three\nline four",
+ Mode::HelixNormal,
+ );
+ cx.simulate_keystrokes("2 x");
+ cx.assert_state(
+ "line one\n«line two\nline three\nˇ»line four",
+ Mode::HelixNormal,
+ );
+
+ // Test extending existing line selection
+ cx.set_state(
+ indoc! {"
+ li«ˇne one
+ li»ne two
+ line three
+ line four"},
+ Mode::HelixNormal,
+ );
+ cx.simulate_keystrokes("x");
+ cx.assert_state(
+ indoc! {"
+ «line one
+ line two
+ ˇ»line three
+ line four"},
+ Mode::HelixNormal,
+ );
+
+ // Pressing x in empty line, select next line (because helix considers cursor a selection)
+ cx.set_state(
+ indoc! {"
+ line one
+ ˇ
+ line three
+ line four"},
+ Mode::HelixNormal,
+ );
+ cx.simulate_keystrokes("x");
+ cx.assert_state(
+ indoc! {"
+ line one
+ «
+ line three
+ ˇ»line four"},
+ Mode::HelixNormal,
+ );
+
+ // Empty line with count selects extra + count lines
+ cx.set_state(
+ indoc! {"
+ line one
+ ˇ
+ line three
+ line four
+ line five"},
+ Mode::HelixNormal,
+ );
+ cx.simulate_keystrokes("2 x");
+ cx.assert_state(
+ indoc! {"
+ line one
+ «
+ line three
+ line four
+ ˇ»line five"},
+ Mode::HelixNormal,
+ );
+
+ // Compare empty vs non-empty line behavior
+ cx.set_state(
+ indoc! {"
+ ˇnon-empty line
+ line two
+ line three"},
+ Mode::HelixNormal,
+ );
+ cx.simulate_keystrokes("x");
+ cx.assert_state(
+ indoc! {"
+ «non-empty line
+ ˇ»line two
+ line three"},
+ Mode::HelixNormal,
+ );
+
+ // Same test but with empty line - should select one extra
+ cx.set_state(
+ indoc! {"
+ ˇ
+ line two
+ line three"},
+ Mode::HelixNormal,
+ );
+ cx.simulate_keystrokes("x");
+ cx.assert_state(
+ indoc! {"
+ «
+ line two
+ ˇ»line three"},
+ Mode::HelixNormal,
+ );
+
+ // Test selecting multiple lines with count
+ cx.set_state(
+ indoc! {"
+ ˇline one
+ line two
+ line threeˇ
+ line four
+ line five"},
+ Mode::HelixNormal,
+ );
+ cx.simulate_keystrokes("x");
+ cx.assert_state(
+ indoc! {"
+ «line one
+ ˇ»line two
+ «line three
+ ˇ»line four
+ line five"},
+ Mode::HelixNormal,
+ );
+ cx.simulate_keystrokes("x");
+ cx.assert_state(
+ indoc! {"
+ «line one
+ line two
+ line three
+ line four
+ ˇ»line five"},
+ Mode::HelixNormal,
+ );
+ }
}
@@ -0,0 +1,740 @@
+use std::{
+ cmp::Ordering,
+ ops::{Deref, DerefMut, Range},
+};
+
+use editor::{
+ DisplayPoint,
+ display_map::{DisplaySnapshot, ToDisplayPoint},
+ movement,
+};
+use language::{CharClassifier, CharKind};
+use text::Bias;
+
+use crate::helix::object::HelixTextObject;
+
+/// Text objects (after helix definition) that can easily be
+/// found by reading a buffer and comparing two neighboring chars
+/// until a start / end is found
+trait BoundedObject {
+ /// The next start since `from` (inclusive).
+ /// If outer is true it is the start of "a" object (m a) rather than "inner" object (m i).
+ fn next_start(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset>;
+ /// The next end since `from` (inclusive).
+ /// If outer is true it is the end of "a" object (m a) rather than "inner" object (m i).
+ fn next_end(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset>;
+ /// The previous start since `from` (inclusive).
+ /// If outer is true it is the start of "a" object (m a) rather than "inner" object (m i).
+ fn previous_start(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset>;
+ /// The previous end since `from` (inclusive).
+ /// If outer is true it is the end of "a" object (m a) rather than "inner" object (m i).
+ fn previous_end(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset>;
+
+ /// Whether the range inside the object can be zero characters wide.
+ /// If so, the trait assumes that these ranges can't be directly adjacent to each other.
+ fn inner_range_can_be_zero_width(&self) -> bool;
+ /// Whether the "ma" can exceed the "mi" range on both sides at the same time
+ fn surround_on_both_sides(&self) -> bool;
+ /// Whether the outer range of an object could overlap with the outer range of the neighboring
+ /// object. If so, they can't be nested.
+ fn ambiguous_outer(&self) -> bool;
+
+ fn can_be_zero_width(&self, around: bool) -> bool {
+ if around {
+ false
+ } else {
+ self.inner_range_can_be_zero_width()
+ }
+ }
+
+ /// Switches from an "mi" range to an "ma" one.
+ /// Assumes the inner range is valid.
+ fn around(&self, map: &DisplaySnapshot, inner_range: Range<Offset>) -> Range<Offset> {
+ if self.surround_on_both_sides() {
+ let start = self
+ .previous_start(map, inner_range.start, true)
+ .unwrap_or(inner_range.start);
+ let end = self
+ .next_end(map, inner_range.end, true)
+ .unwrap_or(inner_range.end);
+
+ return start..end;
+ }
+
+ let mut start = inner_range.start;
+ let end = self
+ .next_end(map, inner_range.end, true)
+ .unwrap_or(inner_range.end);
+ if end == inner_range.end {
+ start = self
+ .previous_start(map, inner_range.start, true)
+ .unwrap_or(inner_range.start)
+ }
+
+ start..end
+ }
+ /// Switches from an "ma" range to an "mi" one.
+ /// Assumes the inner range is valid.
+ fn inside(&self, map: &DisplaySnapshot, outer_range: Range<Offset>) -> Range<Offset> {
+ let inner_start = self
+ .next_start(map, outer_range.start, false)
+ .unwrap_or_else(|| {
+ log::warn!("The motion might not have found the text object correctly");
+ outer_range.start
+ });
+ let inner_end = self
+ .previous_end(map, outer_range.end, false)
+ .unwrap_or_else(|| {
+ log::warn!("The motion might not have found the text object correctly");
+ outer_range.end
+ });
+ inner_start..inner_end
+ }
+
+ /// The next end since `start` (inclusive) on the same nesting level.
+ fn close_at_end(&self, start: Offset, map: &DisplaySnapshot, outer: bool) -> Option<Offset> {
+ let mut end_search_start = if self.can_be_zero_width(outer) {
+ start
+ } else {
+ start.next(map)?
+ };
+ let mut start_search_start = start.next(map)?;
+
+ loop {
+ let next_end = self.next_end(map, end_search_start, outer)?;
+ let maybe_next_start = self.next_start(map, start_search_start, outer);
+ if let Some(next_start) = maybe_next_start
+ && (*next_start < *next_end
+ || *next_start == *next_end && self.can_be_zero_width(outer))
+ && !self.ambiguous_outer()
+ {
+ let closing = self.close_at_end(next_start, map, outer)?;
+ end_search_start = closing.next(map)?;
+ start_search_start = if self.can_be_zero_width(outer) {
+ closing.next(map)?
+ } else {
+ closing
+ };
+ } else {
+ return Some(next_end);
+ }
+ }
+ }
+ /// The previous start since `end` (inclusive) on the same nesting level.
+ fn close_at_start(&self, end: Offset, map: &DisplaySnapshot, outer: bool) -> Option<Offset> {
+ let mut start_search_end = if self.can_be_zero_width(outer) {
+ end
+ } else {
+ end.previous(map)?
+ };
+ let mut end_search_end = end.previous(map)?;
+
+ loop {
+ let previous_start = self.previous_start(map, start_search_end, outer)?;
+ let maybe_previous_end = self.previous_end(map, end_search_end, outer);
+ if let Some(previous_end) = maybe_previous_end
+ && (*previous_end > *previous_start
+ || *previous_end == *previous_start && self.can_be_zero_width(outer))
+ && !self.ambiguous_outer()
+ {
+ let closing = self.close_at_start(previous_end, map, outer)?;
+ start_search_end = closing.previous(map)?;
+ end_search_end = if self.can_be_zero_width(outer) {
+ closing.previous(map)?
+ } else {
+ closing
+ };
+ } else {
+ return Some(previous_start);
+ }
+ }
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Debug)]
+struct Offset(usize);
+impl Deref for Offset {
+ type Target = usize;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+impl DerefMut for Offset {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.0
+ }
+}
+impl Offset {
+ fn next(self, map: &DisplaySnapshot) -> Option<Self> {
+ let next = Self(map.buffer_snapshot.clip_offset(*self + 1, Bias::Right));
+ (*next > *self).then(|| next)
+ }
+ fn previous(self, map: &DisplaySnapshot) -> Option<Self> {
+ if *self == 0 {
+ return None;
+ }
+ Some(Self(map.buffer_snapshot.clip_offset(*self - 1, Bias::Left)))
+ }
+ fn range(
+ start: (DisplayPoint, Bias),
+ end: (DisplayPoint, Bias),
+ map: &DisplaySnapshot,
+ ) -> Range<Self> {
+ Self(start.0.to_offset(map, start.1))..Self(end.0.to_offset(map, end.1))
+ }
+}
+
+impl<B: BoundedObject> HelixTextObject for B {
+ fn range(
+ &self,
+ map: &DisplaySnapshot,
+ relative_to: Range<DisplayPoint>,
+ around: bool,
+ ) -> Option<Range<DisplayPoint>> {
+ let relative_to = Offset::range(
+ (relative_to.start, Bias::Left),
+ (relative_to.end, Bias::Left),
+ map,
+ );
+
+ relative_range(self, around, map, |find_outer| {
+ let search_start = if self.can_be_zero_width(find_outer) {
+ relative_to.end
+ } else {
+ // If the objects can be directly next to each other an object end the
+ // cursor (relative_to) end would not count for close_at_end, so the search
+ // needs to start one character to the left.
+ relative_to.end.previous(map)?
+ };
+ let max_end = self.close_at_end(search_start, map, find_outer)?;
+ let min_start = self.close_at_start(max_end, map, find_outer)?;
+
+ (*min_start <= *relative_to.start).then(|| min_start..max_end)
+ })
+ }
+
+ fn next_range(
+ &self,
+ map: &DisplaySnapshot,
+ relative_to: Range<DisplayPoint>,
+ around: bool,
+ ) -> Option<Range<DisplayPoint>> {
+ let relative_to = Offset::range(
+ (relative_to.start, Bias::Left),
+ (relative_to.end, Bias::Left),
+ map,
+ );
+
+ relative_range(self, around, map, |find_outer| {
+ let min_start = self.next_start(map, relative_to.end, find_outer)?;
+ let max_end = self.close_at_end(min_start, map, find_outer)?;
+
+ Some(min_start..max_end)
+ })
+ }
+
+ fn previous_range(
+ &self,
+ map: &DisplaySnapshot,
+ relative_to: Range<DisplayPoint>,
+ around: bool,
+ ) -> Option<Range<DisplayPoint>> {
+ let relative_to = Offset::range(
+ (relative_to.start, Bias::Left),
+ (relative_to.end, Bias::Left),
+ map,
+ );
+
+ relative_range(self, around, map, |find_outer| {
+ let max_end = self.previous_end(map, relative_to.start, find_outer)?;
+ let min_start = self.close_at_start(max_end, map, find_outer)?;
+
+ Some(min_start..max_end)
+ })
+ }
+}
+
+fn relative_range<B: BoundedObject>(
+ object: &B,
+ outer: bool,
+ map: &DisplaySnapshot,
+ find_range: impl Fn(bool) -> Option<Range<Offset>>,
+) -> Option<Range<DisplayPoint>> {
+ // The cursor could be inside the outer range, but not the inner range.
+ // Whether that should count as found.
+ let find_outer = object.surround_on_both_sides() && !object.ambiguous_outer();
+ let range = find_range(find_outer)?;
+ let min_start = range.start;
+ let max_end = range.end;
+
+ let wanted_range = if outer && !find_outer {
+ // max_end is not yet the outer end
+ object.around(map, min_start..max_end)
+ } else if !outer && find_outer {
+ // max_end is the outer end, but the final result should have the inner end
+ object.inside(map, min_start..max_end)
+ } else {
+ min_start..max_end
+ };
+
+ let start = wanted_range.start.clone().to_display_point(map);
+ let end = wanted_range.end.clone().to_display_point(map);
+
+ Some(start..end)
+}
+
+/// A textobject whose boundaries can easily be found between two chars
+pub enum ImmediateBoundary {
+ Word { ignore_punctuation: bool },
+ Subword { ignore_punctuation: bool },
+ AngleBrackets,
+ BackQuotes,
+ CurlyBrackets,
+ DoubleQuotes,
+ Parentheses,
+ SingleQuotes,
+ SquareBrackets,
+ VerticalBars,
+}
+
+/// A textobject whose start and end can be found from an easy-to-find
+/// boundary between two chars by following a simple path from there
+pub enum FuzzyBoundary {
+ Sentence,
+ Paragraph,
+}
+
+impl ImmediateBoundary {
+ fn is_inner_start(&self, left: char, right: char, classifier: CharClassifier) -> bool {
+ match self {
+ Self::Word { ignore_punctuation } => {
+ let classifier = classifier.ignore_punctuation(*ignore_punctuation);
+ is_word_start(left, right, &classifier)
+ || (is_buffer_start(left) && classifier.kind(right) != CharKind::Whitespace)
+ }
+ Self::Subword { ignore_punctuation } => {
+ let classifier = classifier.ignore_punctuation(*ignore_punctuation);
+ movement::is_subword_start(left, right, &classifier)
+ || (is_buffer_start(left) && classifier.kind(right) != CharKind::Whitespace)
+ }
+ Self::AngleBrackets => left == '<',
+ Self::BackQuotes => left == '`',
+ Self::CurlyBrackets => left == '{',
+ Self::DoubleQuotes => left == '"',
+ Self::Parentheses => left == '(',
+ Self::SingleQuotes => left == '\'',
+ Self::SquareBrackets => left == '[',
+ Self::VerticalBars => left == '|',
+ }
+ }
+ fn is_inner_end(&self, left: char, right: char, classifier: CharClassifier) -> bool {
+ match self {
+ Self::Word { ignore_punctuation } => {
+ let classifier = classifier.ignore_punctuation(*ignore_punctuation);
+ is_word_end(left, right, &classifier)
+ || (is_buffer_end(right) && classifier.kind(left) != CharKind::Whitespace)
+ }
+ Self::Subword { ignore_punctuation } => {
+ let classifier = classifier.ignore_punctuation(*ignore_punctuation);
+ movement::is_subword_start(left, right, &classifier)
+ || (is_buffer_end(right) && classifier.kind(left) != CharKind::Whitespace)
+ }
+ Self::AngleBrackets => right == '>',
+ Self::BackQuotes => right == '`',
+ Self::CurlyBrackets => right == '}',
+ Self::DoubleQuotes => right == '"',
+ Self::Parentheses => right == ')',
+ Self::SingleQuotes => right == '\'',
+ Self::SquareBrackets => right == ']',
+ Self::VerticalBars => right == '|',
+ }
+ }
+ fn is_outer_start(&self, left: char, right: char, classifier: CharClassifier) -> bool {
+ match self {
+ word @ Self::Word { .. } => word.is_inner_end(left, right, classifier) || left == '\n',
+ subword @ Self::Subword { .. } => {
+ subword.is_inner_end(left, right, classifier) || left == '\n'
+ }
+ Self::AngleBrackets => right == '<',
+ Self::BackQuotes => right == '`',
+ Self::CurlyBrackets => right == '{',
+ Self::DoubleQuotes => right == '"',
+ Self::Parentheses => right == '(',
+ Self::SingleQuotes => right == '\'',
+ Self::SquareBrackets => right == '[',
+ Self::VerticalBars => right == '|',
+ }
+ }
+ fn is_outer_end(&self, left: char, right: char, classifier: CharClassifier) -> bool {
+ match self {
+ word @ Self::Word { .. } => {
+ word.is_inner_start(left, right, classifier) || right == '\n'
+ }
+ subword @ Self::Subword { .. } => {
+ subword.is_inner_start(left, right, classifier) || right == '\n'
+ }
+ Self::AngleBrackets => left == '>',
+ Self::BackQuotes => left == '`',
+ Self::CurlyBrackets => left == '}',
+ Self::DoubleQuotes => left == '"',
+ Self::Parentheses => left == ')',
+ Self::SingleQuotes => left == '\'',
+ Self::SquareBrackets => left == ']',
+ Self::VerticalBars => left == '|',
+ }
+ }
+}
+
+impl BoundedObject for ImmediateBoundary {
+ fn next_start(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset> {
+ try_find_boundary(map, from, |left, right| {
+ let classifier = map.buffer_snapshot.char_classifier_at(*from);
+ if outer {
+ self.is_outer_start(left, right, classifier)
+ } else {
+ self.is_inner_start(left, right, classifier)
+ }
+ })
+ }
+ fn next_end(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset> {
+ try_find_boundary(map, from, |left, right| {
+ let classifier = map.buffer_snapshot.char_classifier_at(*from);
+ if outer {
+ self.is_outer_end(left, right, classifier)
+ } else {
+ self.is_inner_end(left, right, classifier)
+ }
+ })
+ }
+ fn previous_start(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset> {
+ try_find_preceding_boundary(map, from, |left, right| {
+ let classifier = map.buffer_snapshot.char_classifier_at(*from);
+ if outer {
+ self.is_outer_start(left, right, classifier)
+ } else {
+ self.is_inner_start(left, right, classifier)
+ }
+ })
+ }
+ fn previous_end(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset> {
+ try_find_preceding_boundary(map, from, |left, right| {
+ let classifier = map.buffer_snapshot.char_classifier_at(*from);
+ if outer {
+ self.is_outer_end(left, right, classifier)
+ } else {
+ self.is_inner_end(left, right, classifier)
+ }
+ })
+ }
+ fn inner_range_can_be_zero_width(&self) -> bool {
+ match self {
+ Self::Subword { .. } | Self::Word { .. } => false,
+ _ => true,
+ }
+ }
+ fn surround_on_both_sides(&self) -> bool {
+ match self {
+ Self::Subword { .. } | Self::Word { .. } => false,
+ _ => true,
+ }
+ }
+ fn ambiguous_outer(&self) -> bool {
+ match self {
+ Self::BackQuotes
+ | Self::DoubleQuotes
+ | Self::SingleQuotes
+ | Self::VerticalBars
+ | Self::Subword { .. }
+ | Self::Word { .. } => true,
+ _ => false,
+ }
+ }
+}
+
+impl FuzzyBoundary {
+ /// When between two chars that form an easy-to-find identifier boundary,
+ /// what's the way to get to the actual start of the object, if any
+ fn is_near_potential_inner_start<'a>(
+ &self,
+ left: char,
+ right: char,
+ classifier: &CharClassifier,
+ ) -> Option<Box<dyn Fn(Offset, &'a DisplaySnapshot) -> Option<Offset>>> {
+ if is_buffer_start(left) {
+ return Some(Box::new(|identifier, _| Some(identifier)));
+ }
+ match self {
+ Self::Paragraph => {
+ if left != '\n' || right != '\n' {
+ return None;
+ }
+ Some(Box::new(|identifier, map| {
+ try_find_boundary(map, identifier, |left, right| left == '\n' && right != '\n')
+ }))
+ }
+ Self::Sentence => {
+ if let Some(find_paragraph_start) =
+ Self::Paragraph.is_near_potential_inner_start(left, right, classifier)
+ {
+ return Some(find_paragraph_start);
+ } else if !is_sentence_end(left, right, classifier) {
+ return None;
+ }
+ Some(Box::new(|identifier, map| {
+ let word = ImmediateBoundary::Word {
+ ignore_punctuation: false,
+ };
+ word.next_start(map, identifier, false)
+ }))
+ }
+ }
+ }
+ /// When between two chars that form an easy-to-find identifier boundary,
+ /// what's the way to get to the actual end of the object, if any
+ fn is_near_potential_inner_end<'a>(
+ &self,
+ left: char,
+ right: char,
+ classifier: &CharClassifier,
+ ) -> Option<Box<dyn Fn(Offset, &'a DisplaySnapshot) -> Option<Offset>>> {
+ if is_buffer_end(right) {
+ return Some(Box::new(|identifier, _| Some(identifier)));
+ }
+ match self {
+ Self::Paragraph => {
+ if left != '\n' || right != '\n' {
+ return None;
+ }
+ Some(Box::new(|identifier, map| {
+ try_find_preceding_boundary(map, identifier, |left, right| {
+ left != '\n' && right == '\n'
+ })
+ }))
+ }
+ Self::Sentence => {
+ if let Some(find_paragraph_end) =
+ Self::Paragraph.is_near_potential_inner_end(left, right, classifier)
+ {
+ return Some(find_paragraph_end);
+ } else if !is_sentence_end(left, right, classifier) {
+ return None;
+ }
+ Some(Box::new(|identifier, _| Some(identifier)))
+ }
+ }
+ }
+ /// When between two chars that form an easy-to-find identifier boundary,
+ /// what's the way to get to the actual end of the object, if any
+ fn is_near_potential_outer_start<'a>(
+ &self,
+ left: char,
+ right: char,
+ classifier: &CharClassifier,
+ ) -> Option<Box<dyn Fn(Offset, &'a DisplaySnapshot) -> Option<Offset>>> {
+ match self {
+ paragraph @ Self::Paragraph => {
+ paragraph.is_near_potential_inner_end(left, right, classifier)
+ }
+ sentence @ Self::Sentence => {
+ sentence.is_near_potential_inner_end(left, right, classifier)
+ }
+ }
+ }
+ /// When between two chars that form an easy-to-find identifier boundary,
+ /// what's the way to get to the actual end of the object, if any
+ fn is_near_potential_outer_end<'a>(
+ &self,
+ left: char,
+ right: char,
+ classifier: &CharClassifier,
+ ) -> Option<Box<dyn Fn(Offset, &'a DisplaySnapshot) -> Option<Offset>>> {
+ match self {
+ paragraph @ Self::Paragraph => {
+ paragraph.is_near_potential_inner_start(left, right, classifier)
+ }
+ sentence @ Self::Sentence => {
+ sentence.is_near_potential_inner_start(left, right, classifier)
+ }
+ }
+ }
+
+ // The boundary can be on the other side of `from` than the identifier, so the search needs to go both ways.
+ // Also, the distance (and direction) between identifier and boundary could vary, so a few ones need to be
+ // compared, even if one boundary was already found on the right side of `from`.
+ fn to_boundary(
+ &self,
+ map: &DisplaySnapshot,
+ from: Offset,
+ outer: bool,
+ backward: bool,
+ boundary_kind: Boundary,
+ ) -> Option<Offset> {
+ let generate_boundary_data = |left, right, point: Offset| {
+ let classifier = map.buffer_snapshot.char_classifier_at(*from);
+ let reach_boundary = if outer && boundary_kind == Boundary::Start {
+ self.is_near_potential_outer_start(left, right, &classifier)
+ } else if !outer && boundary_kind == Boundary::Start {
+ self.is_near_potential_inner_start(left, right, &classifier)
+ } else if outer && boundary_kind == Boundary::End {
+ self.is_near_potential_outer_end(left, right, &classifier)
+ } else {
+ self.is_near_potential_inner_end(left, right, &classifier)
+ };
+
+ reach_boundary.map(|reach_start| (point, reach_start))
+ };
+
+ let forwards = try_find_boundary_data(map, from, generate_boundary_data);
+ let backwards = try_find_preceding_boundary_data(map, from, generate_boundary_data);
+ let boundaries = [forwards, backwards]
+ .into_iter()
+ .flatten()
+ .filter_map(|(identifier, reach_boundary)| reach_boundary(identifier, map))
+ .filter(|boundary| match boundary.cmp(&from) {
+ Ordering::Equal => true,
+ Ordering::Less => backward,
+ Ordering::Greater => !backward,
+ });
+ if backward {
+ boundaries.max_by_key(|boundary| **boundary)
+ } else {
+ boundaries.min_by_key(|boundary| **boundary)
+ }
+ }
+}
+
+#[derive(PartialEq)]
+enum Boundary {
+ Start,
+ End,
+}
+
+impl BoundedObject for FuzzyBoundary {
+ fn next_start(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset> {
+ self.to_boundary(map, from, outer, false, Boundary::Start)
+ }
+ fn next_end(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset> {
+ self.to_boundary(map, from, outer, false, Boundary::End)
+ }
+ fn previous_start(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset> {
+ self.to_boundary(map, from, outer, true, Boundary::Start)
+ }
+ fn previous_end(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset> {
+ self.to_boundary(map, from, outer, true, Boundary::End)
+ }
+ fn inner_range_can_be_zero_width(&self) -> bool {
+ false
+ }
+ fn surround_on_both_sides(&self) -> bool {
+ false
+ }
+ fn ambiguous_outer(&self) -> bool {
+ false
+ }
+}
+
+/// Returns the first boundary after or at `from` in text direction.
+/// The start and end of the file are the chars `'\0'`.
+fn try_find_boundary(
+ map: &DisplaySnapshot,
+ from: Offset,
+ is_boundary: impl Fn(char, char) -> bool,
+) -> Option<Offset> {
+ let boundary = try_find_boundary_data(map, from, |left, right, point| {
+ if is_boundary(left, right) {
+ Some(point)
+ } else {
+ None
+ }
+ })?;
+ Some(boundary)
+}
+
+/// Returns some information about it (of type `T`) as soon as
+/// there is a boundary after or at `from` in text direction
+/// The start and end of the file are the chars `'\0'`.
+fn try_find_boundary_data<T>(
+ map: &DisplaySnapshot,
+ mut from: Offset,
+ boundary_information: impl Fn(char, char, Offset) -> Option<T>,
+) -> Option<T> {
+ let mut prev_ch = map
+ .buffer_snapshot
+ .reversed_chars_at(*from)
+ .next()
+ .unwrap_or('\0');
+
+ for ch in map.buffer_snapshot.chars_at(*from).chain(['\0']) {
+ if let Some(boundary_information) = boundary_information(prev_ch, ch, from) {
+ return Some(boundary_information);
+ }
+ *from += ch.len_utf8();
+ prev_ch = ch;
+ }
+
+ None
+}
+
+/// Returns the first boundary after or at `from` in text direction.
+/// The start and end of the file are the chars `'\0'`.
+fn try_find_preceding_boundary(
+ map: &DisplaySnapshot,
+ from: Offset,
+ is_boundary: impl Fn(char, char) -> bool,
+) -> Option<Offset> {
+ let boundary = try_find_preceding_boundary_data(map, from, |left, right, point| {
+ if is_boundary(left, right) {
+ Some(point)
+ } else {
+ None
+ }
+ })?;
+ Some(boundary)
+}
+
+/// Returns some information about it (of type `T`) as soon as
+/// there is a boundary before or at `from` in opposite text direction
+/// The start and end of the file are the chars `'\0'`.
+fn try_find_preceding_boundary_data<T>(
+ map: &DisplaySnapshot,
+ mut from: Offset,
+ is_boundary: impl Fn(char, char, Offset) -> Option<T>,
+) -> Option<T> {
+ let mut prev_ch = map.buffer_snapshot.chars_at(*from).next().unwrap_or('\0');
+
+ for ch in map.buffer_snapshot.reversed_chars_at(*from).chain(['\0']) {
+ if let Some(boundary_information) = is_boundary(ch, prev_ch, from) {
+ return Some(boundary_information);
+ }
+ from.0 = from.0.saturating_sub(ch.len_utf8());
+ prev_ch = ch;
+ }
+
+ None
+}
+
+fn is_buffer_start(left: char) -> bool {
+ left == '\0'
+}
+
+fn is_buffer_end(right: char) -> bool {
+ right == '\0'
+}
+
+fn is_word_start(left: char, right: char, classifier: &CharClassifier) -> bool {
+ classifier.kind(left) != classifier.kind(right)
+ && classifier.kind(right) != CharKind::Whitespace
+}
+
+fn is_word_end(left: char, right: char, classifier: &CharClassifier) -> bool {
+ classifier.kind(left) != classifier.kind(right) && classifier.kind(left) != CharKind::Whitespace
+}
+
+fn is_sentence_end(left: char, right: char, classifier: &CharClassifier) -> bool {
+ const ENDS: [char; 1] = ['.'];
+
+ if classifier.kind(right) != CharKind::Whitespace {
+ return false;
+ }
+ ENDS.into_iter().any(|end| left == end)
+}
@@ -0,0 +1,182 @@
+use std::{
+ error::Error,
+ fmt::{self, Display},
+ ops::Range,
+};
+
+use editor::{DisplayPoint, display_map::DisplaySnapshot, movement};
+use text::Selection;
+
+use crate::{
+ helix::boundary::{FuzzyBoundary, ImmediateBoundary},
+ object::Object as VimObject,
+};
+
+/// A text object from helix or an extra one
+pub trait HelixTextObject {
+ fn range(
+ &self,
+ map: &DisplaySnapshot,
+ relative_to: Range<DisplayPoint>,
+ around: bool,
+ ) -> Option<Range<DisplayPoint>>;
+
+ fn next_range(
+ &self,
+ map: &DisplaySnapshot,
+ relative_to: Range<DisplayPoint>,
+ around: bool,
+ ) -> Option<Range<DisplayPoint>>;
+
+ fn previous_range(
+ &self,
+ map: &DisplaySnapshot,
+ relative_to: Range<DisplayPoint>,
+ around: bool,
+ ) -> Option<Range<DisplayPoint>>;
+}
+
+impl VimObject {
+ /// Returns the range of the object the cursor is over.
+ /// Follows helix convention.
+ pub fn helix_range(
+ self,
+ map: &DisplaySnapshot,
+ selection: Selection<DisplayPoint>,
+ around: bool,
+ ) -> Result<Option<Range<DisplayPoint>>, VimToHelixError> {
+ let cursor = cursor_range(&selection, map);
+ if let Some(helix_object) = self.to_helix_object() {
+ Ok(helix_object.range(map, cursor, around))
+ } else {
+ Err(VimToHelixError)
+ }
+ }
+ /// Returns the range of the next object the cursor is not over.
+ /// Follows helix convention.
+ pub fn helix_next_range(
+ self,
+ map: &DisplaySnapshot,
+ selection: Selection<DisplayPoint>,
+ around: bool,
+ ) -> Result<Option<Range<DisplayPoint>>, VimToHelixError> {
+ let cursor = cursor_range(&selection, map);
+ if let Some(helix_object) = self.to_helix_object() {
+ Ok(helix_object.next_range(map, cursor, around))
+ } else {
+ Err(VimToHelixError)
+ }
+ }
+ /// Returns the range of the previous object the cursor is not over.
+ /// Follows helix convention.
+ pub fn helix_previous_range(
+ self,
+ map: &DisplaySnapshot,
+ selection: Selection<DisplayPoint>,
+ around: bool,
+ ) -> Result<Option<Range<DisplayPoint>>, VimToHelixError> {
+ let cursor = cursor_range(&selection, map);
+ if let Some(helix_object) = self.to_helix_object() {
+ Ok(helix_object.previous_range(map, cursor, around))
+ } else {
+ Err(VimToHelixError)
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct VimToHelixError;
+impl Display for VimToHelixError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(
+ f,
+ "Not all vim text objects have an implemented helix equivalent"
+ )
+ }
+}
+impl Error for VimToHelixError {}
+
+impl VimObject {
+ fn to_helix_object(self) -> Option<Box<dyn HelixTextObject>> {
+ Some(match self {
+ Self::AngleBrackets => Box::new(ImmediateBoundary::AngleBrackets),
+ Self::BackQuotes => Box::new(ImmediateBoundary::BackQuotes),
+ Self::CurlyBrackets => Box::new(ImmediateBoundary::CurlyBrackets),
+ Self::DoubleQuotes => Box::new(ImmediateBoundary::DoubleQuotes),
+ Self::Paragraph => Box::new(FuzzyBoundary::Paragraph),
+ Self::Parentheses => Box::new(ImmediateBoundary::Parentheses),
+ Self::Quotes => Box::new(ImmediateBoundary::SingleQuotes),
+ Self::Sentence => Box::new(FuzzyBoundary::Sentence),
+ Self::SquareBrackets => Box::new(ImmediateBoundary::SquareBrackets),
+ Self::Subword { ignore_punctuation } => {
+ Box::new(ImmediateBoundary::Subword { ignore_punctuation })
+ }
+ Self::VerticalBars => Box::new(ImmediateBoundary::VerticalBars),
+ Self::Word { ignore_punctuation } => {
+ Box::new(ImmediateBoundary::Word { ignore_punctuation })
+ }
+ _ => return None,
+ })
+ }
+}
+
+/// Returns the start of the cursor of a selection, whether that is collapsed or not.
+pub(crate) fn cursor_range(
+ selection: &Selection<DisplayPoint>,
+ map: &DisplaySnapshot,
+) -> Range<DisplayPoint> {
+ if selection.is_empty() | selection.reversed {
+ selection.head()..movement::right(map, selection.head())
+ } else {
+ movement::left(map, selection.head())..selection.head()
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use db::indoc;
+
+ use crate::{state::Mode, test::VimTestContext};
+
+ #[gpui::test]
+ async fn test_select_word_object(cx: &mut gpui::TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+ let start = indoc! {"
+ The quick brˇowˇnˇ
+ fox «ˇjumps» ov«er
+ the laˇ»zy dogˇ
+
+ "
+ };
+
+ cx.set_state(start, Mode::HelixNormal);
+
+ cx.simulate_keystrokes("m i w");
+
+ cx.assert_state(
+ indoc! {"
+ The quick «brownˇ»
+ fox «jumpsˇ» over
+ the «lazyˇ» dogˇ
+
+ "
+ },
+ Mode::HelixNormal,
+ );
+
+ cx.set_state(start, Mode::HelixNormal);
+
+ cx.simulate_keystrokes("m a w");
+
+ cx.assert_state(
+ indoc! {"
+ The quick« brownˇ»
+ fox «jumps ˇ»over
+ the «lazy ˇ»dogˇ
+
+ "
+ },
+ Mode::HelixNormal,
+ );
+ }
+}
@@ -0,0 +1,84 @@
+use text::SelectionGoal;
+use ui::{Context, Window};
+
+use crate::{Vim, helix::object::cursor_range, object::Object};
+
+impl Vim {
+ /// Selects the object each cursor is over.
+ /// Follows helix convention.
+ pub fn select_current_object(
+ &mut self,
+ object: Object,
+ around: bool,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.stop_recording(cx);
+ self.update_editor(cx, |_, editor, cx| {
+ editor.change_selections(Default::default(), window, cx, |s| {
+ s.move_with(|map, selection| {
+ let Some(range) = object
+ .helix_range(map, selection.clone(), around)
+ .unwrap_or({
+ let vim_range = object.range(map, selection.clone(), around, None);
+ vim_range.filter(|r| r.start <= cursor_range(selection, map).start)
+ })
+ else {
+ return;
+ };
+
+ selection.set_head_tail(range.end, range.start, SelectionGoal::None);
+ });
+ });
+ });
+ }
+
+ /// Selects the next object from each cursor which the cursor is not over.
+ /// Follows helix convention.
+ pub fn select_next_object(
+ &mut self,
+ object: Object,
+ around: bool,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.stop_recording(cx);
+ self.update_editor(cx, |_, editor, cx| {
+ editor.change_selections(Default::default(), window, cx, |s| {
+ s.move_with(|map, selection| {
+ let Ok(Some(range)) = object.helix_next_range(map, selection.clone(), around)
+ else {
+ return;
+ };
+
+ selection.set_head_tail(range.end, range.start, SelectionGoal::None);
+ });
+ });
+ });
+ }
+
+ /// Selects the previous object from each cursor which the cursor is not over.
+ /// Follows helix convention.
+ pub fn select_previous_object(
+ &mut self,
+ object: Object,
+ around: bool,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.stop_recording(cx);
+ self.update_editor(cx, |_, editor, cx| {
+ editor.change_selections(Default::default(), window, cx, |s| {
+ s.move_with(|map, selection| {
+ let Ok(Some(range)) =
+ object.helix_previous_range(map, selection.clone(), around)
+ else {
+ return;
+ };
+
+ selection.set_head_tail(range.start, range.end, SelectionGoal::None);
+ });
+ });
+ });
+ }
+}
@@ -74,11 +74,7 @@ impl ModeIndicator {
.map(|count| format!("{}", count)),
)
.chain(vim.selected_register.map(|reg| format!("\"{reg}")))
- .chain(
- vim.operator_stack
- .iter()
- .map(|item| item.status().to_string()),
- )
+ .chain(vim.operator_stack.iter().map(|item| item.status()))
.chain(
cx.global::<VimGlobals>()
.post_count
@@ -719,21 +719,14 @@ impl Vim {
target: Some(SurroundsType::Motion(motion)),
});
} else {
- self.normal_motion(
- motion.clone(),
- active_operator.clone(),
- count,
- forced_motion,
- window,
- cx,
- )
+ self.normal_motion(motion, active_operator, count, forced_motion, window, cx)
}
}
Mode::Visual | Mode::VisualLine | Mode::VisualBlock => {
- self.visual_motion(motion.clone(), count, window, cx)
+ self.visual_motion(motion, count, window, cx)
}
- Mode::HelixNormal => self.helix_normal_motion(motion.clone(), count, window, cx),
+ Mode::HelixNormal => self.helix_normal_motion(motion, count, window, cx),
}
self.clear_operator(window, cx);
if let Some(operator) = waiting_operator {
@@ -816,10 +809,7 @@ impl Motion {
}
fn skip_exclusive_special_case(&self) -> bool {
- match self {
- Motion::WrappingLeft | Motion::WrappingRight => true,
- _ => false,
- }
+ matches!(self, Motion::WrappingLeft | Motion::WrappingRight)
}
pub(crate) fn push_to_jump_list(&self) -> bool {
@@ -1330,7 +1320,7 @@ impl Motion {
pub fn range(
&self,
map: &DisplaySnapshot,
- selection: Selection<DisplayPoint>,
+ mut selection: Selection<DisplayPoint>,
times: Option<usize>,
text_layout_details: &TextLayoutDetails,
forced_motion: bool,
@@ -1375,7 +1365,6 @@ impl Motion {
(None, true) => Some((selection.head(), selection.goal)),
}?;
- let mut selection = selection.clone();
selection.set_head(new_head, goal);
let mut kind = match (self.default_kind(), forced_motion) {
@@ -1621,10 +1610,20 @@ fn up_down_buffer_rows(
map.line_len(begin_folded_line.row())
};
- (
- map.clip_point(DisplayPoint::new(begin_folded_line.row(), new_col), bias),
- goal,
- )
+ let point = DisplayPoint::new(begin_folded_line.row(), new_col);
+ let mut clipped_point = map.clip_point(point, bias);
+
+ // When navigating vertically in vim mode with inlay hints present,
+ // we need to handle the case where clipping moves us to a different row.
+ // This can happen when moving down (Bias::Right) and hitting an inlay hint.
+ // Re-clip with opposite bias to stay on the intended line.
+ //
+ // See: https://github.com/zed-industries/zed/issues/29134
+ if clipped_point.row() > point.row() {
+ clipped_point = map.clip_point(point, Bias::Left);
+ }
+
+ (clipped_point, goal)
}
fn down_display(
@@ -2404,9 +2403,7 @@ fn matching(map: &DisplaySnapshot, display_point: DisplayPoint) -> DisplayPoint
let line_range = map.prev_line_boundary(point).0..line_end;
let visible_line_range =
line_range.start..Point::new(line_range.end.row, line_range.end.column.saturating_sub(1));
- let ranges = map
- .buffer_snapshot
- .bracket_ranges(visible_line_range.clone());
+ let ranges = map.buffer_snapshot.bracket_ranges(visible_line_range);
if let Some(ranges) = ranges {
let line_range = line_range.start.to_offset(&map.buffer_snapshot)
..line_range.end.to_offset(&map.buffer_snapshot);
@@ -3855,6 +3852,84 @@ mod test {
);
}
+ #[gpui::test]
+ async fn test_visual_mode_with_inlay_hints_on_empty_line(cx: &mut gpui::TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+
+ // Test the exact scenario from issue #29134
+ cx.set_state(
+ indoc! {"
+ fn main() {
+ let this_is_a_long_name = Vec::<u32>::new();
+ let new_oneˇ = this_is_a_long_name
+ .iter()
+ .map(|i| i + 1)
+ .map(|i| i * 2)
+ .collect::<Vec<_>>();
+ }
+ "},
+ Mode::Normal,
+ );
+
+ // Add type hint inlay on the empty line (line 3, after "this_is_a_long_name")
+ cx.update_editor(|editor, _window, cx| {
+ let snapshot = editor.buffer().read(cx).snapshot(cx);
+ // The empty line is at line 3 (0-indexed)
+ let line_start = snapshot.anchor_after(Point::new(3, 0));
+ let inlay_text = ": Vec<u32>";
+ let inlay = Inlay::edit_prediction(1, line_start, inlay_text);
+ editor.splice_inlays(&[], vec![inlay], cx);
+ });
+
+ // Enter visual mode
+ cx.simulate_keystrokes("v");
+ cx.assert_state(
+ indoc! {"
+ fn main() {
+ let this_is_a_long_name = Vec::<u32>::new();
+ let new_one« ˇ»= this_is_a_long_name
+ .iter()
+ .map(|i| i + 1)
+ .map(|i| i * 2)
+ .collect::<Vec<_>>();
+ }
+ "},
+ Mode::Visual,
+ );
+
+ // Move down - should go to the beginning of line 4, not skip to line 5
+ cx.simulate_keystrokes("j");
+ cx.assert_state(
+ indoc! {"
+ fn main() {
+ let this_is_a_long_name = Vec::<u32>::new();
+ let new_one« = this_is_a_long_name
+ ˇ» .iter()
+ .map(|i| i + 1)
+ .map(|i| i * 2)
+ .collect::<Vec<_>>();
+ }
+ "},
+ Mode::Visual,
+ );
+
+ // Test with multiple movements
+ cx.set_state("let aˇ = 1;\nlet b = 2;\n\nlet c = 3;", Mode::Normal);
+
+ // Add type hint on the empty line
+ cx.update_editor(|editor, _window, cx| {
+ let snapshot = editor.buffer().read(cx).snapshot(cx);
+ let empty_line_start = snapshot.anchor_after(Point::new(2, 0));
+ let inlay_text = ": i32";
+ let inlay = Inlay::edit_prediction(2, empty_line_start, inlay_text);
+ editor.splice_inlays(&[], vec![inlay], cx);
+ });
+
+ // Enter visual mode and move down twice
+ cx.simulate_keystrokes("v j j");
+ cx.assert_state("let a« = 1;\nlet b = 2;\n\nˇ»let c = 3;", Mode::Visual);
+ }
+
#[gpui::test]
async fn test_go_to_percentage(cx: &mut gpui::TestAppContext) {
let mut cx = NeovimBackedTestContext::new(cx).await;
@@ -4099,7 +4174,7 @@ mod test {
cx.shared_state().await.assert_eq(indoc! {"
ˇhe quick brown fox
jumped over the lazy dog"});
- assert_eq!(cx.cx.forced_motion(), false);
+ assert!(!cx.cx.forced_motion());
cx.set_shared_state(indoc! {"
the quick bˇrown fox
@@ -4109,7 +4184,7 @@ mod test {
cx.shared_state().await.assert_eq(indoc! {"
ˇown fox
jumped over the lazy dog"});
- assert_eq!(cx.cx.forced_motion(), false);
+ assert!(!cx.cx.forced_motion());
cx.set_shared_state(indoc! {"
the quick brown foˇx
@@ -4119,7 +4194,7 @@ mod test {
cx.shared_state().await.assert_eq(indoc! {"
ˇ
jumped over the lazy dog"});
- assert_eq!(cx.cx.forced_motion(), false);
+ assert!(!cx.cx.forced_motion());
}
#[gpui::test]
@@ -4134,7 +4209,7 @@ mod test {
cx.shared_state().await.assert_eq(indoc! {"
ˇbrown fox
jumped over the lazy dog"});
- assert_eq!(cx.cx.forced_motion(), false);
+ assert!(!cx.cx.forced_motion());
cx.set_shared_state(indoc! {"
the quick bˇrown fox
@@ -4144,7 +4219,7 @@ mod test {
cx.shared_state().await.assert_eq(indoc! {"
the quickˇown fox
jumped over the lazy dog"});
- assert_eq!(cx.cx.forced_motion(), false);
+ assert!(!cx.cx.forced_motion());
cx.set_shared_state(indoc! {"
the quick brown foˇx
@@ -4154,7 +4229,7 @@ mod test {
cx.shared_state().await.assert_eq(indoc! {"
the quicˇk
jumped over the lazy dog"});
- assert_eq!(cx.cx.forced_motion(), false);
+ assert!(!cx.cx.forced_motion());
cx.set_shared_state(indoc! {"
ˇthe quick brown fox
@@ -4164,7 +4239,7 @@ mod test {
cx.shared_state().await.assert_eq(indoc! {"
ˇ fox
jumped over the lazy dog"});
- assert_eq!(cx.cx.forced_motion(), false);
+ assert!(!cx.cx.forced_motion());
cx.set_shared_state(indoc! {"
ˇthe quick brown fox
@@ -4174,7 +4249,7 @@ mod test {
cx.shared_state().await.assert_eq(indoc! {"
ˇuick brown fox
jumped over the lazy dog"});
- assert_eq!(cx.cx.forced_motion(), false);
+ assert!(!cx.cx.forced_motion());
}
#[gpui::test]
@@ -4189,7 +4264,7 @@ mod test {
cx.shared_state().await.assert_eq(indoc! {"
the quick brown foˇx
jumped over the lazy dog"});
- assert_eq!(cx.cx.forced_motion(), false);
+ assert!(!cx.cx.forced_motion());
cx.set_shared_state(indoc! {"
ˇthe quick brown fox
@@ -4199,7 +4274,7 @@ mod test {
cx.shared_state().await.assert_eq(indoc! {"
ˇx
jumped over the lazy dog"});
- assert_eq!(cx.cx.forced_motion(), false);
+ assert!(!cx.cx.forced_motion());
}
#[gpui::test]
@@ -4215,7 +4290,7 @@ mod test {
the quick brown fox
ˇthe quick brown fox
jumped over the lazy dog"});
- assert_eq!(cx.cx.forced_motion(), false);
+ assert!(!cx.cx.forced_motion());
cx.set_shared_state(indoc! {"
the quick bˇrown fox
@@ -4226,7 +4301,7 @@ mod test {
the quick brˇrown fox
jumped overown fox
jumped over the lazy dog"});
- assert_eq!(cx.cx.forced_motion(), false);
+ assert!(!cx.cx.forced_motion());
cx.set_shared_state(indoc! {"
the quick brown foˇx
@@ -4237,7 +4312,7 @@ mod test {
the quick brown foxˇx
jumped over the la
jumped over the lazy dog"});
- assert_eq!(cx.cx.forced_motion(), false);
+ assert!(!cx.cx.forced_motion());
cx.set_shared_state(indoc! {"
the quick brown fox
@@ -4248,7 +4323,7 @@ mod test {
thˇhe quick brown fox
je quick brown fox
jumped over the lazy dog"});
- assert_eq!(cx.cx.forced_motion(), false);
+ assert!(!cx.cx.forced_motion());
}
#[gpui::test]
@@ -4263,7 +4338,7 @@ mod test {
cx.shared_state().await.assert_eq(indoc! {"
ˇe quick brown fox
jumped over the lazy dog"});
- assert_eq!(cx.cx.forced_motion(), false);
+ assert!(!cx.cx.forced_motion());
cx.set_shared_state(indoc! {"
the quick bˇrown fox
@@ -4273,7 +4348,7 @@ mod test {
cx.shared_state().await.assert_eq(indoc! {"
the quick bˇn fox
jumped over the lazy dog"});
- assert_eq!(cx.cx.forced_motion(), false);
+ assert!(!cx.cx.forced_motion());
cx.set_shared_state(indoc! {"
the quick brown foˇx
@@ -4282,6 +4357,6 @@ mod test {
cx.simulate_shared_keystrokes("d v e").await;
cx.shared_state().await.assert_eq(indoc! {"
the quick brown foˇd over the lazy dog"});
- assert_eq!(cx.cx.forced_motion(), false);
+ assert!(!cx.cx.forced_motion());
}
}
@@ -495,10 +495,19 @@ impl Vim {
self.replace_with_register_object(object, around, window, cx)
}
Some(Operator::Exchange) => self.exchange_object(object, around, window, cx),
+ Some(Operator::HelixMatch) => {
+ self.select_current_object(object, around, window, cx)
+ }
_ => {
// Can't do anything for namespace operators. Ignoring
}
},
+ Some(Operator::HelixNext { around }) => {
+ self.select_next_object(object, around, window, cx);
+ }
+ Some(Operator::HelixPrevious { around }) => {
+ self.select_previous_object(object, around, window, cx);
+ }
Some(Operator::DeleteSurrounds) => {
waiting_operator = Some(Operator::DeleteSurrounds);
}
@@ -155,12 +155,11 @@ fn expand_changed_word_selection(
let classifier = map
.buffer_snapshot
.char_classifier_at(selection.start.to_point(map));
- let in_word = map
- .buffer_chars_at(selection.head().to_offset(map, Bias::Left))
+
+ map.buffer_chars_at(selection.head().to_offset(map, Bias::Left))
.next()
.map(|(c, _)| !classifier.is_whitespace(c))
- .unwrap_or_default();
- in_word
+ .unwrap_or_default()
};
if (times.is_none() || times.unwrap() == 1) && is_in_word() {
let next_char = map
@@ -70,8 +70,19 @@ impl Vim {
} else {
Point::new(row, 0)
};
+ let end = if row == selection.end.row {
+ selection.end
+ } else {
+ Point::new(row, snapshot.line_len(multi_buffer::MultiBufferRow(row)))
+ };
+
+ let number_result = if !selection.is_empty() {
+ find_number_in_range(&snapshot, start, end)
+ } else {
+ find_number(&snapshot, start)
+ };
- if let Some((range, num, radix)) = find_number(&snapshot, start) {
+ if let Some((range, num, radix)) = number_result {
let replace = match radix {
10 => increment_decimal_string(&num, delta),
16 => increment_hex_string(&num, delta),
@@ -189,6 +200,90 @@ fn increment_binary_string(num: &str, delta: i64) -> String {
format!("{:0width$b}", result, width = num.len())
}
+fn find_number_in_range(
+ snapshot: &MultiBufferSnapshot,
+ start: Point,
+ end: Point,
+) -> Option<(Range<Point>, String, u32)> {
+ let start_offset = start.to_offset(snapshot);
+ let end_offset = end.to_offset(snapshot);
+
+ let mut offset = start_offset;
+
+ // Backward scan to find the start of the number, but stop at start_offset
+ for ch in snapshot.reversed_chars_at(offset) {
+ if ch.is_ascii_hexdigit() || ch == '-' || ch == 'b' || ch == 'x' {
+ if offset == 0 {
+ break;
+ }
+ offset -= ch.len_utf8();
+ if offset < start_offset {
+ offset = start_offset;
+ break;
+ }
+ } else {
+ break;
+ }
+ }
+
+ let mut begin = None;
+ let mut end_num = None;
+ let mut num = String::new();
+ let mut radix = 10;
+
+ let mut chars = snapshot.chars_at(offset).peekable();
+
+ while let Some(ch) = chars.next() {
+ if offset >= end_offset {
+ break; // stop at end of selection
+ }
+
+ if num == "0" && ch == 'b' && chars.peek().is_some() && chars.peek().unwrap().is_digit(2) {
+ radix = 2;
+ begin = None;
+ num = String::new();
+ } else if num == "0"
+ && ch == 'x'
+ && chars.peek().is_some()
+ && chars.peek().unwrap().is_ascii_hexdigit()
+ {
+ radix = 16;
+ begin = None;
+ num = String::new();
+ }
+
+ if ch.is_digit(radix)
+ || (begin.is_none()
+ && ch == '-'
+ && chars.peek().is_some()
+ && chars.peek().unwrap().is_digit(radix))
+ {
+ if begin.is_none() {
+ begin = Some(offset);
+ }
+ num.push(ch);
+ } else if begin.is_some() {
+ end_num = Some(offset);
+ break;
+ } else if ch == '\n' {
+ break;
+ }
+
+ offset += ch.len_utf8();
+ }
+
+ if let Some(begin) = begin {
+ let end_num = end_num.unwrap_or(offset);
+ Some((
+ begin.to_point(snapshot)..end_num.to_point(snapshot),
+ num,
+ radix,
+ ))
+ } else {
+ None
+ }
+}
+
fn find_number(
snapshot: &MultiBufferSnapshot,
start: Point,
@@ -274,9 +369,9 @@ fn find_boolean(snapshot: &MultiBufferSnapshot, start: Point) -> Option<(Range<P
let mut end = None;
let mut word = String::new();
- let mut chars = snapshot.chars_at(offset);
+ let chars = snapshot.chars_at(offset);
- while let Some(ch) = chars.next() {
+ for ch in chars {
if ch.is_ascii_alphabetic() {
if begin.is_none() {
begin = Some(offset);
@@ -764,4 +859,18 @@ mod test {
cx.simulate_keystrokes("v b ctrl-a");
cx.assert_state("let enabled = ˇOff;", Mode::Normal);
}
+
+ #[gpui::test]
+ async fn test_increment_visual_partial_number(cx: &mut gpui::TestAppContext) {
+ let mut cx = NeovimBackedTestContext::new(cx).await;
+
+ cx.set_shared_state("ˇ123").await;
+ cx.simulate_shared_keystrokes("v l ctrl-a").await;
+ cx.shared_state().await.assert_eq(indoc! {"ˇ133"});
+ cx.simulate_shared_keystrokes("l v l ctrl-a").await;
+ cx.shared_state().await.assert_eq(indoc! {"1ˇ34"});
+ cx.simulate_shared_keystrokes("shift-v y p p ctrl-v k k l ctrl-a")
+ .await;
+ cx.shared_state().await.assert_eq(indoc! {"ˇ144\n144\n144"});
+ }
}
@@ -474,8 +474,7 @@ mod test {
Mode::Normal,
);
assert_eq!(
- cx.read_from_clipboard()
- .map(|item| item.text().unwrap().to_string()),
+ cx.read_from_clipboard().map(|item| item.text().unwrap()),
Some("jumps".into())
);
cx.simulate_keystrokes("d d p");
@@ -487,8 +486,7 @@ mod test {
Mode::Normal,
);
assert_eq!(
- cx.read_from_clipboard()
- .map(|item| item.text().unwrap().to_string()),
+ cx.read_from_clipboard().map(|item| item.text().unwrap()),
Some("jumps".into())
);
cx.write_to_clipboard(ClipboardItem::new_string("test-copy".to_string()));
@@ -98,7 +98,7 @@ impl Vim {
Vim::take_forced_motion(cx);
self.exit_temporary_normal(window, cx);
self.update_editor(cx, |_, editor, cx| {
- scroll_editor(editor, move_cursor, &amount, window, cx)
+ scroll_editor(editor, move_cursor, amount, window, cx)
});
}
}
@@ -106,7 +106,7 @@ impl Vim {
fn scroll_editor(
editor: &mut Editor,
preserve_cursor_position: bool,
- amount: &ScrollAmount,
+ amount: ScrollAmount,
window: &mut Window,
cx: &mut Context<Editor>,
) {
@@ -126,7 +126,7 @@ fn scroll_editor(
ScrollAmount::Line(amount.lines(visible_line_count) - 1.0)
}
}
- _ => amount.clone(),
+ _ => amount,
};
editor.scroll_screen(&amount, window, cx);
@@ -203,7 +203,10 @@ impl Vim {
// hook into the existing to clear out any vim search state on cmd+f or edit -> find.
fn search_deploy(&mut self, _: &buffer_search::Deploy, _: &mut Window, cx: &mut Context<Self>) {
+ // Preserve the current mode when resetting search state
+ let current_mode = self.mode;
self.search = Default::default();
+ self.search.prior_mode = current_mode;
cx.propagate();
}
@@ -187,9 +187,7 @@ fn find_mini_delimiters(
};
// Try to find delimiters in visible range first
- let ranges = map
- .buffer_snapshot
- .bracket_ranges(visible_line_range.clone());
+ let ranges = map.buffer_snapshot.bracket_ranges(visible_line_range);
if let Some(candidate) = cover_or_next(ranges, display_point, map, Some(&bracket_filter)) {
return Some(
DelimiterRange {
@@ -399,11 +397,11 @@ impl Vim {
let count = Self::take_count(cx);
match self.mode {
- Mode::Normal => self.normal_object(object, count, window, cx),
+ Mode::Normal | Mode::HelixNormal => self.normal_object(object, count, window, cx),
Mode::Visual | Mode::VisualLine | Mode::VisualBlock => {
self.visual_object(object, count, window, cx)
}
- Mode::Insert | Mode::Replace | Mode::HelixNormal => {
+ Mode::Insert | Mode::Replace => {
// Shouldn't execute a text object in insert mode. Ignoring
}
}
@@ -1366,7 +1364,7 @@ fn is_sentence_end(map: &DisplaySnapshot, offset: usize) -> bool {
/// Expands the passed range to include whitespace on one side or the other in a line. Attempts to add the
/// whitespace to the end first and falls back to the start if there was none.
-fn expand_to_include_whitespace(
+pub fn expand_to_include_whitespace(
map: &DisplaySnapshot,
range: Range<DisplayPoint>,
stop_at_newline: bool,
@@ -1512,7 +1510,7 @@ pub fn end_of_paragraph(map: &DisplaySnapshot, display_point: DisplayPoint) -> D
map.max_point()
}
-fn surrounding_markers(
+pub fn surrounding_markers(
map: &DisplaySnapshot,
relative_to: DisplayPoint,
around: bool,
@@ -7,8 +7,10 @@ use crate::{motion::Motion, object::Object};
use anyhow::Result;
use collections::HashMap;
use command_palette_hooks::{CommandPaletteFilter, CommandPaletteInterceptor};
-use db::define_connection;
-use db::sqlez_macros::sql;
+use db::{
+ sqlez::{domain::Domain, thread_safe_connection::ThreadSafeConnection},
+ sqlez_macros::sql,
+};
use editor::display_map::{is_invisible, replacement};
use editor::{Anchor, ClipboardSelection, Editor, MultiBuffer, ToPoint as EditorToPoint};
use gpui::{
@@ -132,6 +134,13 @@ pub enum Operator {
ToggleComments,
ReplaceWithRegister,
Exchange,
+ HelixMatch,
+ HelixNext {
+ around: bool,
+ },
+ HelixPrevious {
+ around: bool,
+ },
}
#[derive(Default, Clone, Debug)]
@@ -255,16 +264,11 @@ impl MarksState {
pub fn new(workspace: &Workspace, cx: &mut App) -> Entity<MarksState> {
cx.new(|cx| {
let buffer_store = workspace.project().read(cx).buffer_store().clone();
- let subscription =
- cx.subscribe(
- &buffer_store,
- move |this: &mut Self, _, event, cx| match event {
- project::buffer_store::BufferStoreEvent::BufferAdded(buffer) => {
- this.on_buffer_loaded(buffer, cx);
- }
- _ => {}
- },
- );
+ let subscription = cx.subscribe(&buffer_store, move |this: &mut Self, _, event, cx| {
+ if let project::buffer_store::BufferStoreEvent::BufferAdded(buffer) = event {
+ this.on_buffer_loaded(buffer, cx);
+ }
+ });
let mut this = Self {
workspace: workspace.weak_handle(),
@@ -405,7 +409,7 @@ impl MarksState {
} else {
HashMap::default()
};
- let old_points = self.serialized_marks.get(&path.clone());
+ let old_points = self.serialized_marks.get(&path);
if old_points == Some(&new_points) {
return;
}
@@ -548,7 +552,7 @@ impl MarksState {
.insert(name.clone(), anchors);
if self.is_global_mark(&name) {
self.global_marks
- .insert(name.clone(), MarkLocation::Buffer(multibuffer.entity_id()));
+ .insert(name, MarkLocation::Buffer(multibuffer.entity_id()));
}
if let Some(buffer) = buffer {
let buffer_id = buffer.read(cx).remote_id();
@@ -564,7 +568,7 @@ impl MarksState {
let buffer_id = buffer.read(cx).remote_id();
self.buffer_marks.entry(buffer_id).or_default().insert(
- name.clone(),
+ name,
anchors
.into_iter()
.map(|anchor| anchor.text_anchor)
@@ -596,7 +600,7 @@ impl MarksState {
if let Some(anchors) = self.buffer_marks.get(&buffer_id) {
let text_anchors = anchors.get(name)?;
let anchors = text_anchors
- .into_iter()
+ .iter()
.map(|anchor| Anchor::in_buffer(excerpt_id, buffer_id, *anchor))
.collect();
return Some(Mark::Local(anchors));
@@ -659,9 +663,9 @@ impl MarksState {
return;
}
};
- self.global_marks.remove(&mark_name.clone());
+ self.global_marks.remove(&mark_name);
self.serialized_marks
- .get_mut(&path.clone())
+ .get_mut(&path)
.map(|m| m.remove(&mark_name.clone()));
if let Some(workspace_id) = self.workspace_id(cx) {
cx.background_spawn(async move { DB.delete_mark(workspace_id, path, mark_name).await })
@@ -1023,6 +1027,9 @@ impl Operator {
Operator::RecordRegister => "q",
Operator::ReplayRegister => "@",
Operator::ToggleComments => "gc",
+ Operator::HelixMatch => "helix_m",
+ Operator::HelixNext { .. } => "helix_next",
+ Operator::HelixPrevious { .. } => "helix_previous",
}
}
@@ -1044,6 +1051,9 @@ impl Operator {
} => format!("^V{}", make_visible(prefix)),
Operator::AutoIndent => "=".to_string(),
Operator::ShellCommand => "=".to_string(),
+ Operator::HelixMatch => "m".to_string(),
+ Operator::HelixNext { .. } => "]".to_string(),
+ Operator::HelixPrevious { .. } => "[".to_string(),
_ => self.id().to_string(),
}
}
@@ -1082,7 +1092,10 @@ impl Operator {
| Operator::Object { .. }
| Operator::ChangeSurrounds { target: None }
| Operator::OppositeCase
- | Operator::ToggleComments => false,
+ | Operator::ToggleComments
+ | Operator::HelixMatch
+ | Operator::HelixNext { .. }
+ | Operator::HelixPrevious { .. } => false,
}
}
@@ -1106,7 +1119,9 @@ impl Operator {
| Operator::AddSurrounds { target: None }
| Operator::ChangeSurrounds { target: None }
| Operator::DeleteSurrounds
- | Operator::Exchange => true,
+ | Operator::Exchange
+ | Operator::HelixNext { .. }
+ | Operator::HelixPrevious { .. } => true,
Operator::Yank
| Operator::Object { .. }
| Operator::FindForward { .. }
@@ -1121,7 +1136,8 @@ impl Operator {
| Operator::Jump { .. }
| Operator::Register
| Operator::RecordRegister
- | Operator::ReplayRegister => false,
+ | Operator::ReplayRegister
+ | Operator::HelixMatch => false,
}
}
}
@@ -1287,7 +1303,7 @@ impl RegistersView {
if let Some(register) = register {
matches.push(RegisterMatch {
name: '%',
- contents: register.text.clone(),
+ contents: register.text,
})
}
}
@@ -1379,7 +1395,7 @@ impl PickerDelegate for MarksViewDelegate {
_: &mut Window,
cx: &mut Context<Picker<Self>>,
) -> gpui::Task<()> {
- let Some(workspace) = self.workspace.upgrade().clone() else {
+ let Some(workspace) = self.workspace.upgrade() else {
return Task::ready(());
};
cx.spawn(async move |picker, cx| {
@@ -1673,8 +1689,12 @@ impl MarksView {
}
}
-define_connection! (
- pub static ref DB: VimDb<WorkspaceDb> = &[
+pub struct VimDb(ThreadSafeConnection);
+
+impl Domain for VimDb {
+ const NAME: &str = stringify!(VimDb);
+
+ const MIGRATIONS: &[&str] = &[
sql! (
CREATE TABLE vim_marks (
workspace_id INTEGER,
@@ -1694,7 +1714,9 @@ define_connection! (
ON vim_global_marks_paths(workspace_id, mark_name);
),
];
-);
+}
+
+db::static_connection!(DB, VimDb, [WorkspaceDb]);
struct SerializedMark {
path: Arc<Path>,
@@ -1710,26 +1732,25 @@ impl VimDb {
marks: HashMap<String, Vec<Point>>,
) -> Result<()> {
log::debug!("Setting path {path:?} for {} marks", marks.len());
- let result = self
- .write(move |conn| {
- let mut query = conn.exec_bound(sql!(
- INSERT OR REPLACE INTO vim_marks
- (workspace_id, mark_name, path, value)
- VALUES
- (?, ?, ?, ?)
- ))?;
- for (mark_name, value) in marks {
- let pairs: Vec<(u32, u32)> = value
- .into_iter()
- .map(|point| (point.row, point.column))
- .collect();
- let serialized = serde_json::to_string(&pairs)?;
- query((workspace_id, mark_name, path.clone(), serialized))?;
- }
- Ok(())
- })
- .await;
- result
+
+ self.write(move |conn| {
+ let mut query = conn.exec_bound(sql!(
+ INSERT OR REPLACE INTO vim_marks
+ (workspace_id, mark_name, path, value)
+ VALUES
+ (?, ?, ?, ?)
+ ))?;
+ for (mark_name, value) in marks {
+ let pairs: Vec<(u32, u32)> = value
+ .into_iter()
+ .map(|point| (point.row, point.column))
+ .collect();
+ let serialized = serde_json::to_string(&pairs)?;
+ query((workspace_id, mark_name, path.clone(), serialized))?;
+ }
+ Ok(())
+ })
+ .await
}
fn get_marks(&self, workspace_id: WorkspaceId) -> Result<Vec<SerializedMark>> {
@@ -1,7 +1,7 @@
use crate::{
Vim,
motion::{self, Motion},
- object::Object,
+ object::{Object, surrounding_markers},
state::Mode,
};
use editor::{Bias, movement};
@@ -224,7 +224,7 @@ impl Vim {
window: &mut Window,
cx: &mut Context<Self>,
) {
- if let Some(will_replace_pair) = object_to_bracket_pair(target) {
+ if let Some(will_replace_pair) = self.object_to_bracket_pair(target, cx) {
self.stop_recording(cx);
self.update_editor(cx, |_, editor, cx| {
editor.transact(window, cx, |editor, window, cx| {
@@ -240,7 +240,24 @@ impl Vim {
newline: false,
},
};
- let surround = pair.end != surround_alias((*text).as_ref());
+
+ // Determines whether space should be added/removed after
+ // and before the surround pairs.
+ // For example, using `cs{[` will add a space before and
+ // after the pair, while using `cs{]` will not, notice the
+ // use of the closing bracket instead of the opening bracket
+ // on the target object.
+ // In the case of quotes, the opening and closing is the
+ // same, so no space will ever be added or removed.
+ let surround = match target {
+ Object::Quotes
+ | Object::BackQuotes
+ | Object::AnyQuotes
+ | Object::MiniQuotes
+ | Object::DoubleQuotes => true,
+ _ => pair.end != surround_alias((*text).as_ref()),
+ };
+
let (display_map, selections) = editor.selections.all_adjusted_display(cx);
let mut edits = Vec::new();
let mut anchors = Vec::new();
@@ -341,7 +358,7 @@ impl Vim {
cx: &mut Context<Self>,
) -> bool {
let mut valid = false;
- if let Some(pair) = object_to_bracket_pair(object) {
+ if let Some(pair) = self.object_to_bracket_pair(object, cx) {
self.update_editor(cx, |_, editor, cx| {
editor.transact(window, cx, |editor, window, cx| {
editor.set_clip_at_line_ends(false, cx);
@@ -388,6 +405,140 @@ impl Vim {
}
valid
}
+
+ fn object_to_bracket_pair(
+ &self,
+ object: Object,
+ cx: &mut Context<Self>,
+ ) -> Option<BracketPair> {
+ match object {
+ Object::Quotes => Some(BracketPair {
+ start: "'".to_string(),
+ end: "'".to_string(),
+ close: true,
+ surround: true,
+ newline: false,
+ }),
+ Object::BackQuotes => Some(BracketPair {
+ start: "`".to_string(),
+ end: "`".to_string(),
+ close: true,
+ surround: true,
+ newline: false,
+ }),
+ Object::DoubleQuotes => Some(BracketPair {
+ start: "\"".to_string(),
+ end: "\"".to_string(),
+ close: true,
+ surround: true,
+ newline: false,
+ }),
+ Object::VerticalBars => Some(BracketPair {
+ start: "|".to_string(),
+ end: "|".to_string(),
+ close: true,
+ surround: true,
+ newline: false,
+ }),
+ Object::Parentheses => Some(BracketPair {
+ start: "(".to_string(),
+ end: ")".to_string(),
+ close: true,
+ surround: true,
+ newline: false,
+ }),
+ Object::SquareBrackets => Some(BracketPair {
+ start: "[".to_string(),
+ end: "]".to_string(),
+ close: true,
+ surround: true,
+ newline: false,
+ }),
+ Object::CurlyBrackets => Some(BracketPair {
+ start: "{".to_string(),
+ end: "}".to_string(),
+ close: true,
+ surround: true,
+ newline: false,
+ }),
+ Object::AngleBrackets => Some(BracketPair {
+ start: "<".to_string(),
+ end: ">".to_string(),
+ close: true,
+ surround: true,
+ newline: false,
+ }),
+ Object::AnyBrackets => {
+ // If we're dealing with `AnyBrackets`, which can map to multiple
+ // bracket pairs, we'll need to first determine which `BracketPair` to
+ // target.
+ // As such, we keep track of the smallest range size, so
+ // that in cases like `({ name: "John" })` if the cursor is
+ // inside the curly brackets, we target the curly brackets
+ // instead of the parentheses.
+ let mut bracket_pair = None;
+ let mut min_range_size = usize::MAX;
+
+ let _ = self.editor.update(cx, |editor, cx| {
+ let (display_map, selections) = editor.selections.all_adjusted_display(cx);
+ // Even if there's multiple cursors, we'll simply rely on
+ // the first one to understand what bracket pair to map to.
+ // I believe we could, if worth it, go one step above and
+ // have a `BracketPair` per selection, so that `AnyBracket`
+ // could work in situations where the transformation below
+ // could be done.
+ //
+ // ```
+ // (< name:ˇ'Zed' >)
+ // <[ name:ˇ'DeltaDB' ]>
+ // ```
+ //
+ // After using `csb{`:
+ //
+ // ```
+ // (ˇ{ name:'Zed' })
+ // <ˇ{ name:'DeltaDB' }>
+ // ```
+ if let Some(selection) = selections.first() {
+ let relative_to = selection.head();
+ let bracket_pairs = [('(', ')'), ('[', ']'), ('{', '}'), ('<', '>')];
+ let cursor_offset = relative_to.to_offset(&display_map, Bias::Left);
+
+ for &(open, close) in bracket_pairs.iter() {
+ if let Some(range) = surrounding_markers(
+ &display_map,
+ relative_to,
+ true,
+ false,
+ open,
+ close,
+ ) {
+ let start_offset = range.start.to_offset(&display_map, Bias::Left);
+ let end_offset = range.end.to_offset(&display_map, Bias::Right);
+
+ if cursor_offset >= start_offset && cursor_offset <= end_offset {
+ let size = end_offset - start_offset;
+ if size < min_range_size {
+ min_range_size = size;
+ bracket_pair = Some(BracketPair {
+ start: open.to_string(),
+ end: close.to_string(),
+ close: true,
+ surround: true,
+ newline: false,
+ })
+ }
+ }
+ }
+ }
+ }
+ });
+
+ bracket_pair
+ }
+ _ => None,
+ }
+ }
}
fn find_surround_pair<'a>(pairs: &'a [BracketPair], ch: &str) -> Option<&'a BracketPair> {
@@ -488,74 +639,12 @@ fn pair_to_object(pair: &BracketPair) -> Option<Object> {
}
}
-fn object_to_bracket_pair(object: Object) -> Option<BracketPair> {
- match object {
- Object::Quotes => Some(BracketPair {
- start: "'".to_string(),
- end: "'".to_string(),
- close: true,
- surround: true,
- newline: false,
- }),
- Object::BackQuotes => Some(BracketPair {
- start: "`".to_string(),
- end: "`".to_string(),
- close: true,
- surround: true,
- newline: false,
- }),
- Object::DoubleQuotes => Some(BracketPair {
- start: "\"".to_string(),
- end: "\"".to_string(),
- close: true,
- surround: true,
- newline: false,
- }),
- Object::VerticalBars => Some(BracketPair {
- start: "|".to_string(),
- end: "|".to_string(),
- close: true,
- surround: true,
- newline: false,
- }),
- Object::Parentheses => Some(BracketPair {
- start: "(".to_string(),
- end: ")".to_string(),
- close: true,
- surround: true,
- newline: false,
- }),
- Object::SquareBrackets => Some(BracketPair {
- start: "[".to_string(),
- end: "]".to_string(),
- close: true,
- surround: true,
- newline: false,
- }),
- Object::CurlyBrackets => Some(BracketPair {
- start: "{".to_string(),
- end: "}".to_string(),
- close: true,
- surround: true,
- newline: false,
- }),
- Object::AngleBrackets => Some(BracketPair {
- start: "<".to_string(),
- end: ">".to_string(),
- close: true,
- surround: true,
- newline: false,
- }),
- _ => None,
- }
-}
-
#[cfg(test)]
mod test {
use gpui::KeyBinding;
use indoc::indoc;
- use crate::{PushAddSurrounds, state::Mode, test::VimTestContext};
+ use crate::{PushAddSurrounds, object::AnyBrackets, state::Mode, test::VimTestContext};
#[gpui::test]
async fn test_add_surrounds(cx: &mut gpui::TestAppContext) {
@@ -1128,6 +1217,81 @@ mod test {
];"},
Mode::Normal,
);
+
+ // test change quotes.
+ cx.set_state(indoc! {"' ˇstr '"}, Mode::Normal);
+ cx.simulate_keystrokes("c s ' \"");
+ cx.assert_state(indoc! {"ˇ\" str \""}, Mode::Normal);
+
+ // test multi cursor change quotes
+ cx.set_state(
+ indoc! {"
+ ' ˇstr '
+ some example text here
+ ˇ' str '
+ "},
+ Mode::Normal,
+ );
+ cx.simulate_keystrokes("c s ' \"");
+ cx.assert_state(
+ indoc! {"
+ ˇ\" str \"
+ some example text here
+ ˇ\" str \"
+ "},
+ Mode::Normal,
+ );
+ }
+
+ #[gpui::test]
+ async fn test_change_surrounds_any_brackets(cx: &mut gpui::TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+
+ // Update keybindings so that using `csb` triggers Vim's `AnyBrackets`
+ // action.
+ cx.update(|_, cx| {
+ cx.bind_keys([KeyBinding::new(
+ "b",
+ AnyBrackets,
+ Some("vim_operator == a || vim_operator == i || vim_operator == cs"),
+ )]);
+ });
+
+ cx.set_state(indoc! {"{braˇcketed}"}, Mode::Normal);
+ cx.simulate_keystrokes("c s b [");
+ cx.assert_state(indoc! {"ˇ[ bracketed ]"}, Mode::Normal);
+
+ cx.set_state(indoc! {"[braˇcketed]"}, Mode::Normal);
+ cx.simulate_keystrokes("c s b {");
+ cx.assert_state(indoc! {"ˇ{ bracketed }"}, Mode::Normal);
+
+ cx.set_state(indoc! {"<braˇcketed>"}, Mode::Normal);
+ cx.simulate_keystrokes("c s b [");
+ cx.assert_state(indoc! {"ˇ[ bracketed ]"}, Mode::Normal);
+
+ cx.set_state(indoc! {"(braˇcketed)"}, Mode::Normal);
+ cx.simulate_keystrokes("c s b [");
+ cx.assert_state(indoc! {"ˇ[ bracketed ]"}, Mode::Normal);
+
+ cx.set_state(indoc! {"(< name: ˇ'Zed' >)"}, Mode::Normal);
+ cx.simulate_keystrokes("c s b {");
+ cx.assert_state(indoc! {"(ˇ{ name: 'Zed' })"}, Mode::Normal);
+
+ cx.set_state(
+ indoc! {"
+ (< name: ˇ'Zed' >)
+ (< nˇame: 'DeltaDB' >)
+ "},
+ Mode::Normal,
+ );
+ cx.simulate_keystrokes("c s b {");
+ cx.set_state(
+ indoc! {"
+ (ˇ{ name: 'Zed' })
+ (ˇ{ name: 'DeltaDB' })
+ "},
+ Mode::Normal,
+ );
}
#[gpui::test]
@@ -8,13 +8,15 @@ use collections::HashMap;
use command_palette::CommandPalette;
use editor::{
AnchorRangeExt, DisplayPoint, Editor, EditorMode, MultiBuffer, actions::DeleteLine,
- display_map::DisplayRow, test::editor_test_context::EditorTestContext,
+ code_context_menus::CodeContextMenu, display_map::DisplayRow,
+ test::editor_test_context::EditorTestContext,
};
use futures::StreamExt;
-use gpui::{KeyBinding, Modifiers, MouseButton, TestAppContext};
+use gpui::{KeyBinding, Modifiers, MouseButton, TestAppContext, px};
use language::Point;
pub use neovim_backed_test_context::*;
use settings::SettingsStore;
+use ui::Pixels;
use util::test::marked_text_ranges;
pub use vim_test_context::*;
@@ -971,6 +973,87 @@ async fn test_comma_w(cx: &mut gpui::TestAppContext) {
.assert_eq("hellˇo hello\nhello hello");
}
+#[gpui::test]
+async fn test_completion_menu_scroll_aside(cx: &mut TestAppContext) {
+ let mut cx = VimTestContext::new_typescript(cx).await;
+
+ cx.lsp
+ .set_request_handler::<lsp::request::Completion, _, _>(move |_, _| async move {
+ Ok(Some(lsp::CompletionResponse::Array(vec![
+ lsp::CompletionItem {
+ label: "Test Item".to_string(),
+ documentation: Some(lsp::Documentation::String(
+ "This is some very long documentation content that will be displayed in the aside panel for scrolling.\n".repeat(50)
+ )),
+ ..Default::default()
+ },
+ ])))
+ });
+
+ cx.set_state("variableˇ", Mode::Insert);
+ cx.simulate_keystroke(".");
+ cx.executor().run_until_parked();
+
+ let mut initial_offset: Pixels = px(0.0);
+
+ cx.update_editor(|editor, _, _| {
+ let binding = editor.context_menu().borrow();
+ let Some(CodeContextMenu::Completions(menu)) = binding.as_ref() else {
+ panic!("Should have completions menu open");
+ };
+
+ initial_offset = menu.scroll_handle_aside.offset().y;
+ });
+
+ // The `ctrl-e` shortcut should scroll the completion menu's aside content
+ // down, so the updated offset should be lower than the initial offset.
+ cx.simulate_keystroke("ctrl-e");
+ cx.update_editor(|editor, _, _| {
+ let binding = editor.context_menu().borrow();
+ let Some(CodeContextMenu::Completions(menu)) = binding.as_ref() else {
+ panic!("Should have completions menu open");
+ };
+
+ assert!(menu.scroll_handle_aside.offset().y < initial_offset);
+ });
+
+ // The `ctrl-y` shortcut should do the inverse scrolling as `ctrl-e`, so the
+ // offset should now be the same as the initial offset.
+ cx.simulate_keystroke("ctrl-y");
+ cx.update_editor(|editor, _, _| {
+ let binding = editor.context_menu().borrow();
+ let Some(CodeContextMenu::Completions(menu)) = binding.as_ref() else {
+ panic!("Should have completions menu open");
+ };
+
+ assert_eq!(menu.scroll_handle_aside.offset().y, initial_offset);
+ });
+
+ // The `ctrl-d` shortcut should scroll the completion menu's aside content
+ // down, so the updated offset should be lower than the initial offset.
+ cx.simulate_keystroke("ctrl-d");
+ cx.update_editor(|editor, _, _| {
+ let binding = editor.context_menu().borrow();
+ let Some(CodeContextMenu::Completions(menu)) = binding.as_ref() else {
+ panic!("Should have completions menu open");
+ };
+
+ assert!(menu.scroll_handle_aside.offset().y < initial_offset);
+ });
+
+ // The `ctrl-u` shortcut should do the inverse scrolling as `ctrl-u`, so the
+ // offset should now be the same as the initial offset.
+ cx.simulate_keystroke("ctrl-u");
+ cx.update_editor(|editor, _, _| {
+ let binding = editor.context_menu().borrow();
+ let Some(CodeContextMenu::Completions(menu)) = binding.as_ref() else {
+ panic!("Should have completions menu open");
+ };
+
+ assert_eq!(menu.scroll_handle_aside.offset().y, initial_offset);
+ });
+}
+
#[gpui::test]
async fn test_rename(cx: &mut gpui::TestAppContext) {
let mut cx = VimTestContext::new_typescript(cx).await;
@@ -292,12 +292,7 @@ impl NeovimBackedTestContext {
register: '"',
state: self.shared_state().await,
neovim: self.neovim.read_register('"').await,
- editor: self
- .read_from_clipboard()
- .unwrap()
- .text()
- .unwrap()
- .to_owned(),
+ editor: self.read_from_clipboard().unwrap().text().unwrap(),
}
}
@@ -299,10 +299,10 @@ impl NeovimConnection {
if let Some(NeovimData::Get { .. }) = self.data.front() {
self.data.pop_front();
};
- if let Some(NeovimData::ReadRegister { name, value }) = self.data.pop_front() {
- if name == register {
- return value;
- }
+ if let Some(NeovimData::ReadRegister { name, value }) = self.data.pop_front()
+ && name == register
+ {
+ return value;
}
panic!("operation does not match recorded script. re-record with --features=neovim")
@@ -453,7 +453,7 @@ impl NeovimConnection {
};
if self.data.back() != Some(&state) {
- self.data.push_back(state.clone());
+ self.data.push_back(state);
}
(mode, ranges)
@@ -590,7 +590,7 @@ fn parse_state(marked_text: &str) -> (String, Vec<Range<Point>>) {
#[cfg(feature = "neovim")]
fn encode_ranges(text: &str, point_ranges: &Vec<Range<Point>>) -> String {
let byte_ranges = point_ranges
- .into_iter()
+ .iter()
.map(|range| {
let mut byte_range = 0..0;
let mut ix = 0;
@@ -49,6 +49,10 @@ impl VimTestContext {
Self::new_with_lsp(
EditorLspTestContext::new_typescript(
lsp::ServerCapabilities {
+ completion_provider: Some(lsp::CompletionOptions {
+ trigger_characters: Some(vec![".".to_string()]),
+ ..Default::default()
+ }),
rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
prepare_provider: Some(true),
work_done_progress_options: Default::default(),
@@ -64,7 +68,7 @@ impl VimTestContext {
pub fn init_keybindings(enabled: bool, cx: &mut App) {
SettingsStore::update_global(cx, |store, cx| {
- store.update_user_settings::<VimModeSetting>(cx, |s| *s = Some(enabled));
+ store.update_user_settings::<VimModeSetting>(cx, |s| s.vim_mode = Some(enabled));
});
let default_key_bindings = settings::KeymapFile::load_asset_allow_partial_failure(
"keymaps/default-macos.json",
@@ -130,7 +134,7 @@ impl VimTestContext {
pub fn enable_vim(&mut self) {
self.cx.update(|_, cx| {
SettingsStore::update_global(cx, |store, cx| {
- store.update_user_settings::<VimModeSetting>(cx, |s| *s = Some(true));
+ store.update_user_settings::<VimModeSetting>(cx, |s| s.vim_mode = Some(true));
});
})
}
@@ -138,7 +142,7 @@ impl VimTestContext {
pub fn disable_vim(&mut self) {
self.cx.update(|_, cx| {
SettingsStore::update_global(cx, |store, cx| {
- store.update_user_settings::<VimModeSetting>(cx, |s| *s = Some(false));
+ store.update_user_settings::<VimModeSetting>(cx, |s| s.vim_mode = Some(false));
});
})
}
@@ -147,7 +151,7 @@ impl VimTestContext {
self.cx.update(|_, cx| {
SettingsStore::update_global(cx, |store, cx| {
store.update_user_settings::<vim_mode_setting::HelixModeSetting>(cx, |s| {
- *s = Some(true)
+ s.helix_mode = Some(true)
});
});
})
@@ -225,7 +229,7 @@ impl VimTestContext {
VimClipboard {
editor: self
.read_from_clipboard()
- .map(|item| item.text().unwrap().to_string())
+ .map(|item| item.text().unwrap())
.unwrap_or_default(),
}
}
@@ -39,7 +39,9 @@ use object::Object;
use schemars::JsonSchema;
use serde::Deserialize;
use serde_derive::Serialize;
-use settings::{Settings, SettingsSources, SettingsStore, update_settings_file};
+use settings::{
+ Settings, SettingsKey, SettingsSources, SettingsStore, SettingsUi, update_settings_file,
+};
use state::{Mode, Operator, RecordedSelection, SearchState, VimGlobals};
use std::{mem, ops::Range, sync::Arc};
use surrounds::SurroundsType;
@@ -84,6 +86,22 @@ struct PushFindBackward {
multiline: bool,
}
+#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
+#[action(namespace = vim)]
+#[serde(deny_unknown_fields)]
+/// Selects the next object.
+struct PushHelixNext {
+ around: bool,
+}
+
+#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
+#[action(namespace = vim)]
+#[serde(deny_unknown_fields)]
+/// Selects the previous object.
+struct PushHelixPrevious {
+ around: bool,
+}
+
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -222,6 +240,8 @@ actions!(
PushReplaceWithRegister,
/// Toggles comments.
PushToggleComments,
+ /// Starts a match operation.
+ PushHelixMatch,
]
);
@@ -247,7 +267,7 @@ pub fn init(cx: &mut App) {
let fs = workspace.app_state().fs.clone();
let currently_enabled = Vim::enabled(cx);
update_settings_file::<VimModeSetting>(fs, cx, move |setting, _| {
- *setting = Some(!currently_enabled)
+ setting.vim_mode = Some(!currently_enabled)
})
});
@@ -759,6 +779,27 @@ impl Vim {
Vim::action(editor, cx, |vim, _: &Enter, window, cx| {
vim.input_ignored("\n".into(), window, cx)
});
+ Vim::action(editor, cx, |vim, _: &PushHelixMatch, window, cx| {
+ vim.push_operator(Operator::HelixMatch, window, cx)
+ });
+ Vim::action(editor, cx, |vim, action: &PushHelixNext, window, cx| {
+ vim.push_operator(
+ Operator::HelixNext {
+ around: action.around,
+ },
+ window,
+ cx,
+ );
+ });
+ Vim::action(editor, cx, |vim, action: &PushHelixPrevious, window, cx| {
+ vim.push_operator(
+ Operator::HelixPrevious {
+ around: action.around,
+ },
+ window,
+ cx,
+ );
+ });
normal::register(editor, cx);
insert::register(editor, cx);
@@ -1693,7 +1734,7 @@ impl Vim {
}) {
editor.do_paste(
®ister.text.to_string(),
- register.clipboard_selections.clone(),
+ register.clipboard_selections,
false,
window,
cx,
@@ -1734,10 +1775,7 @@ impl Vim {
editor.set_autoindent(vim.should_autoindent());
editor.selections.line_mode = matches!(vim.mode, Mode::VisualLine);
- let hide_edit_predictions = match vim.mode {
- Mode::Insert | Mode::Replace => false,
- _ => true,
- };
+ let hide_edit_predictions = !matches!(vim.mode, Mode::Insert | Mode::Replace);
editor.set_edit_predictions_hidden_for_vim_mode(hide_edit_predictions, window, cx);
});
cx.notify()
@@ -1788,7 +1826,8 @@ struct VimSettings {
pub cursor_shape: CursorShapeSettings,
}
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)]
+#[settings_key(key = "vim")]
struct VimSettingsContent {
pub default_mode: Option<ModeContent>,
pub toggle_relative_line_numbers: Option<bool>,
@@ -1827,8 +1866,6 @@ impl From<ModeContent> for Mode {
}
impl Settings for VimSettings {
- const KEY: Option<&'static str> = Some("vim");
-
type FileContent = VimSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
@@ -216,7 +216,6 @@ impl Vim {
// If the file ends with a newline (which is common) we don't do this.
// so that if you go to the end of such a file you can use "up" to go
// to the previous line and have it work somewhat as expected.
- #[allow(clippy::nonminimal_bool)]
if !selection.reversed
&& !selection.is_empty()
&& !(selection.end.column() == 0 && selection.end == map.max_point())
@@ -1203,7 +1202,7 @@ mod test {
the lazy dog"});
assert_eq!(
cx.read_from_clipboard()
- .map(|item| item.text().unwrap().to_string())
+ .map(|item| item.text().unwrap())
.unwrap(),
"The q"
);
@@ -0,0 +1,20 @@
+{"Put":{"state":"ˇ123"}}
+{"Key":"v"}
+{"Key":"l"}
+{"Key":"ctrl-a"}
+{"Get":{"state":"ˇ133","mode":"Normal"}}
+{"Key":"l"}
+{"Key":"v"}
+{"Key":"l"}
+{"Key":"ctrl-a"}
+{"Get":{"state":"1ˇ34","mode":"Normal"}}
+{"Key":"shift-v"}
+{"Key":"y"}
+{"Key":"p"}
+{"Key":"p"}
+{"Key":"ctrl-v"}
+{"Key":"k"}
+{"Key":"k"}
+{"Key":"l"}
+{"Key":"ctrl-a"}
+{"Get":{"state":"ˇ144\n144\n144","mode":"Normal"}}
@@ -14,5 +14,7 @@ path = "src/vim_mode_setting.rs"
[dependencies]
anyhow.workspace = true
gpui.workspace = true
+schemars.workspace = true
+serde.workspace = true
settings.workspace = true
workspace-hack.workspace = true
@@ -6,7 +6,8 @@
use anyhow::Result;
use gpui::App;
-use settings::{Settings, SettingsSources};
+use schemars::JsonSchema;
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
/// Initializes the `vim_mode_setting` crate.
pub fn init(cx: &mut App) {
@@ -14,24 +15,40 @@ pub fn init(cx: &mut App) {
HelixModeSetting::register(cx);
}
-/// Whether or not to enable Vim mode.
-///
-/// Default: false
pub struct VimModeSetting(pub bool);
-impl Settings for VimModeSetting {
- const KEY: Option<&'static str> = Some("vim_mode");
+#[derive(
+ Copy,
+ Clone,
+ PartialEq,
+ Eq,
+ Debug,
+ Default,
+ serde::Serialize,
+ serde::Deserialize,
+ SettingsUi,
+ SettingsKey,
+ JsonSchema,
+)]
+#[settings_key(None)]
+pub struct VimModeSettingContent {
+ /// Whether or not to enable Vim mode.
+ ///
+ /// Default: false
+ pub vim_mode: Option<bool>,
+}
- type FileContent = Option<bool>;
+impl Settings for VimModeSetting {
+ type FileContent = VimModeSettingContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
Ok(Self(
sources
.user
- .or(sources.server)
- .copied()
- .flatten()
- .unwrap_or(sources.default.ok_or_else(Self::missing_default)?),
+ .and_then(|mode| mode.vim_mode)
+ .or(sources.server.and_then(|mode| mode.vim_mode))
+ .or(sources.default.vim_mode)
+ .ok_or_else(Self::missing_default)?,
))
}
@@ -40,24 +57,41 @@ impl Settings for VimModeSetting {
}
}
-/// Whether or not to enable Helix mode.
-///
-/// Default: false
+#[derive(Debug)]
pub struct HelixModeSetting(pub bool);
-impl Settings for HelixModeSetting {
- const KEY: Option<&'static str> = Some("helix_mode");
+#[derive(
+ Copy,
+ Clone,
+ PartialEq,
+ Eq,
+ Debug,
+ Default,
+ serde::Serialize,
+ serde::Deserialize,
+ SettingsUi,
+ SettingsKey,
+ JsonSchema,
+)]
+#[settings_key(None)]
+pub struct HelixModeSettingContent {
+ /// Whether or not to enable Helix mode.
+ ///
+ /// Default: false
+ pub helix_mode: Option<bool>,
+}
- type FileContent = Option<bool>;
+impl Settings for HelixModeSetting {
+ type FileContent = HelixModeSettingContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
Ok(Self(
sources
.user
- .or(sources.server)
- .copied()
- .flatten()
- .unwrap_or(sources.default.ok_or_else(Self::missing_default)?),
+ .and_then(|mode| mode.helix_mode)
+ .or(sources.server.and_then(|mode| mode.helix_mode))
+ .or(sources.default.helix_mode)
+ .ok_or_else(Self::missing_default)?,
))
}
@@ -162,6 +162,19 @@ impl<T> Receiver<T> {
pending_waker_id: None,
}
}
+
+ /// Creates a new [`Receiver`] holding an initial value that will never change.
+ pub fn constant(value: T) -> Self {
+ let state = Arc::new(RwLock::new(State {
+ value,
+ wakers: BTreeMap::new(),
+ next_waker_id: WakerId::default(),
+ version: 0,
+ closed: false,
+ }));
+
+ Self { state, version: 0 }
+ }
}
impl<T: Clone> Receiver<T> {
@@ -218,7 +231,7 @@ mod tests {
let mut tasks = Vec::new();
tasks.push(cx.background_spawn({
- let executor = cx.executor().clone();
+ let executor = cx.executor();
let next_id = next_id.clone();
let closed = closed.clone();
async move {
@@ -57,7 +57,7 @@ impl WebSearchRegistry {
) {
let id = provider.id();
let provider = Arc::new(provider);
- self.providers.insert(id.clone(), provider.clone());
+ self.providers.insert(id, provider.clone());
if self.active_provider.is_none() {
self.active_provider = Some(provider);
}
@@ -50,7 +50,7 @@ impl State {
}
}
-pub const ZED_WEB_SEARCH_PROVIDER_ID: &'static str = "zed.dev";
+pub const ZED_WEB_SEARCH_PROVIDER_ID: &str = "zed.dev";
impl WebSearchProvider for CloudWebSearchProvider {
fn id(&self) -> WebSearchProviderId {
@@ -27,11 +27,10 @@ fn register_web_search_providers(
cx.subscribe(
&LanguageModelRegistry::global(cx),
- move |this, registry, event, cx| match event {
- language_model::Event::DefaultModelChanged => {
+ move |this, registry, event, cx| {
+ if let language_model::Event::DefaultModelChanged = event {
register_zed_web_search_provider(this, client.clone(), ®istry, cx)
}
- _ => {}
},
)
.detach();
@@ -29,7 +29,6 @@ test-support = [
any_vec.workspace = true
anyhow.workspace = true
async-recursion.workspace = true
-bincode = "1.2.1"
call.workspace = true
client.workspace = true
clock.workspace = true
@@ -80,5 +79,6 @@ project = { workspace = true, features = ["test-support"] }
session = { workspace = true, features = ["test-support"] }
settings = { workspace = true, features = ["test-support"] }
http_client = { workspace = true, features = ["test-support"] }
+pretty_assertions.workspace = true
tempfile.workspace = true
zlog.workspace = true
@@ -171,7 +171,7 @@ where
}
fn panel_focus_handle(&self, cx: &App) -> FocusHandle {
- self.read(cx).focus_handle(cx).clone()
+ self.read(cx).focus_handle(cx)
}
fn activation_priority(&self, cx: &App) -> u32 {
@@ -340,7 +340,7 @@ impl Dock {
pub fn panel<T: Panel>(&self) -> Option<Entity<T>> {
self.panel_entries
.iter()
- .find_map(|entry| entry.panel.to_any().clone().downcast().ok())
+ .find_map(|entry| entry.panel.to_any().downcast().ok())
}
pub fn panel_index_for_type<T: Panel>(&self) -> Option<usize> {
@@ -915,6 +915,11 @@ impl Render for PanelButtons {
.on_click({
let action = action.boxed_clone();
move |_, window, cx| {
+ telemetry::event!(
+ "Panel Button Clicked",
+ name = name,
+ toggle_state = !is_open
+ );
window.focus(&focus_handle);
window.dispatch_action(action.boxed_clone(), cx)
}
@@ -5,7 +5,9 @@ use smallvec::SmallVec;
use ui::App;
use util::{ResultExt, paths::PathExt};
-use crate::{NewWindow, SerializedWorkspaceLocation, WORKSPACE_DB, WorkspaceId};
+use crate::{
+ NewWindow, SerializedWorkspaceLocation, WORKSPACE_DB, WorkspaceId, path_list::PathList,
+};
pub fn init(cx: &mut App) {
let manager = cx.new(|_| HistoryManager::new());
@@ -44,7 +46,13 @@ impl HistoryManager {
.unwrap_or_default()
.into_iter()
.rev()
- .map(|(id, location)| HistoryManagerEntry::new(id, &location))
+ .filter_map(|(id, location, paths)| {
+ if matches!(location, SerializedWorkspaceLocation::Local) {
+ Some(HistoryManagerEntry::new(id, &paths))
+ } else {
+ None
+ }
+ })
.collect::<Vec<_>>();
this.update(cx, |this, cx| {
this.history = recent_folders;
@@ -118,9 +126,9 @@ impl HistoryManager {
}
impl HistoryManagerEntry {
- pub fn new(id: WorkspaceId, location: &SerializedWorkspaceLocation) -> Self {
- let path = location
- .sorted_paths()
+ pub fn new(id: WorkspaceId, paths: &PathList) -> Self {
+ let path = paths
+ .paths()
.iter()
.map(|path| path.compact())
.collect::<SmallVec<[PathBuf; 2]>>();
@@ -0,0 +1,117 @@
+use std::{path::Path, sync::Arc};
+
+use gpui::{EventEmitter, FocusHandle, Focusable};
+use ui::{
+ App, Button, ButtonCommon, ButtonStyle, Clickable, Context, FluentBuilder, InteractiveElement,
+ KeyBinding, Label, LabelCommon, LabelSize, ParentElement, Render, SharedString, Styled as _,
+ Window, h_flex, v_flex,
+};
+use zed_actions::workspace::OpenWithSystem;
+
+use crate::Item;
+
+/// A view to display when a certain buffer fails to open.
+pub struct InvalidBufferView {
+ /// Which path was attempted to open.
+ pub abs_path: Arc<Path>,
+ /// An error message, happened when opening the buffer.
+ pub error: SharedString,
+ is_local: bool,
+ focus_handle: FocusHandle,
+}
+
+impl InvalidBufferView {
+ pub fn new(
+ abs_path: &Path,
+ is_local: bool,
+ e: &anyhow::Error,
+ _: &mut Window,
+ cx: &mut App,
+ ) -> Self {
+ Self {
+ is_local,
+ abs_path: Arc::from(abs_path),
+ error: format!("{}", e.root_cause()).into(),
+ focus_handle: cx.focus_handle(),
+ }
+ }
+}
+
+impl Item for InvalidBufferView {
+ type Event = ();
+
+ fn tab_content_text(&self, mut detail: usize, _: &App) -> SharedString {
+ // Ensure we always render at least the filename.
+ detail += 1;
+
+ let path = self.abs_path.as_ref();
+
+ let mut prefix = path;
+ while detail > 0 {
+ if let Some(parent) = prefix.parent() {
+ prefix = parent;
+ detail -= 1;
+ } else {
+ break;
+ }
+ }
+
+ let path = if detail > 0 {
+ path
+ } else {
+ path.strip_prefix(prefix).unwrap_or(path)
+ };
+
+ SharedString::new(path.to_string_lossy())
+ }
+}
+
+impl EventEmitter<()> for InvalidBufferView {}
+
+impl Focusable for InvalidBufferView {
+ fn focus_handle(&self, _: &App) -> FocusHandle {
+ self.focus_handle.clone()
+ }
+}
+
+impl Render for InvalidBufferView {
+ fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl gpui::IntoElement {
+ let abs_path = self.abs_path.clone();
+ v_flex()
+ .size_full()
+ .track_focus(&self.focus_handle(cx))
+ .flex_none()
+ .justify_center()
+ .overflow_hidden()
+ .key_context("InvalidBuffer")
+ .child(
+ h_flex().size_full().justify_center().child(
+ v_flex()
+ .justify_center()
+ .gap_2()
+ .child(h_flex().justify_center().child("Could not open file"))
+ .child(
+ h_flex()
+ .justify_center()
+ .child(Label::new(self.error.clone()).size(LabelSize::Small)),
+ )
+ .when(self.is_local, |contents| {
+ contents.child(
+ h_flex().justify_center().child(
+ Button::new("open-with-system", "Open in Default App")
+ .on_click(move |_, _, cx| {
+ cx.open_with_system(&abs_path);
+ })
+ .style(ButtonStyle::Outlined)
+ .key_binding(KeyBinding::for_action(
+ &OpenWithSystem,
+ window,
+ cx,
+ )),
+ ),
+ )
+ }),
+ ),
+ )
+ }
+}
@@ -1,6 +1,7 @@
use crate::{
CollaboratorId, DelayedDebouncedEditAction, FollowableViewRegistry, ItemNavHistory,
SerializableItemRegistry, ToolbarItemLocation, ViewId, Workspace, WorkspaceId,
+ invalid_buffer_view::InvalidBufferView,
pane::{self, Pane},
persistence::model::ItemId,
searchable::SearchableItemHandle,
@@ -16,12 +17,13 @@ use gpui::{
use project::{Project, ProjectEntryId, ProjectPath};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsLocation, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsLocation, SettingsSources, SettingsUi};
use smallvec::SmallVec;
use std::{
any::{Any, TypeId},
cell::RefCell,
ops::Range,
+ path::Path,
rc::Rc,
sync::Arc,
time::Duration,
@@ -99,7 +101,8 @@ pub enum ActivateOnClose {
LeftNeighbour,
}
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)]
+#[settings_key(key = "tabs")]
pub struct ItemSettingsContent {
/// Whether to show the Git file status on a tab item.
///
@@ -128,7 +131,8 @@ pub struct ItemSettingsContent {
show_close_button: Option<ShowCloseButton>,
}
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)]
+#[settings_key(key = "preview_tabs")]
pub struct PreviewTabsSettingsContent {
/// Whether to show opened editors as preview tabs.
/// Preview tabs do not stay open, are reused until explicitly set to be kept open opened (via double-click or editing) and show file names in italic.
@@ -146,8 +150,6 @@ pub struct PreviewTabsSettingsContent {
}
impl Settings for ItemSettings {
- const KEY: Option<&'static str> = Some("tabs");
-
type FileContent = ItemSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
@@ -185,8 +187,6 @@ impl Settings for ItemSettings {
}
impl Settings for PreviewTabsSettings {
- const KEY: Option<&'static str> = Some("preview_tabs");
-
type FileContent = PreviewTabsSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
@@ -539,7 +539,6 @@ pub trait ItemHandle: 'static + Send {
cx: &mut Context<Workspace>,
);
fn deactivated(&self, window: &mut Window, cx: &mut App);
- fn discarded(&self, project: Entity<Project>, window: &mut Window, cx: &mut App);
fn on_removed(&self, cx: &App);
fn workspace_deactivated(&self, window: &mut Window, cx: &mut App);
fn navigate(&self, data: Box<dyn Any>, window: &mut Window, cx: &mut App) -> bool;
@@ -973,10 +972,6 @@ impl<T: Item> ItemHandle for Entity<T> {
});
}
- fn discarded(&self, project: Entity<Project>, window: &mut Window, cx: &mut App) {
- self.update(cx, |this, cx| this.discarded(project, window, cx));
- }
-
fn deactivated(&self, window: &mut Window, cx: &mut App) {
self.update(cx, |this, cx| this.deactivated(window, cx));
}
@@ -1161,6 +1156,22 @@ pub trait ProjectItem: Item {
) -> Self
where
Self: Sized;
+
+ /// A fallback handler, which will be called after [`project::ProjectItem::try_open`] fails,
+ /// with the error from that failure as an argument.
+ /// Allows to open an item that can gracefully display and handle errors.
+ fn for_broken_project_item(
+ _abs_path: &Path,
+ _is_local: bool,
+ _e: &anyhow::Error,
+ _window: &mut Window,
+ _cx: &mut App,
+ ) -> Option<InvalidBufferView>
+ where
+ Self: Sized,
+ {
+ None
+ }
}
#[derive(Debug)]
@@ -1012,7 +1012,6 @@ where
let message: SharedString = format!("Error: {err}").into();
log::error!("Showing error notification in app: {message}");
show_app_notification(workspace_error_notification_id(), cx, {
- let message = message.clone();
move |cx| {
cx.new({
let message = message.clone();
@@ -2,6 +2,7 @@ use crate::{
CloseWindow, NewFile, NewTerminal, OpenInTerminal, OpenOptions, OpenTerminal, OpenVisible,
SplitDirection, ToggleFileFinder, ToggleProjectSymbols, ToggleZoom, Workspace,
WorkspaceItemBuilder,
+ invalid_buffer_view::InvalidBufferView,
item::{
ActivateOnClose, ClosePosition, Item, ItemHandle, ItemSettings, PreviewTabsSettings,
ProjectItemKind, SaveOptions, ShowCloseButton, ShowDiagnostics, TabContentParams,
@@ -253,9 +254,6 @@ pub enum Event {
Remove {
focus_on_pane: Option<Entity<Pane>>,
},
- RemoveItem {
- idx: usize,
- },
RemovedItem {
item: Box<dyn ItemHandle>,
},
@@ -286,7 +284,6 @@ impl fmt::Debug for Event {
.field("local", local)
.finish(),
Event::Remove { .. } => f.write_str("Remove"),
- Event::RemoveItem { idx } => f.debug_struct("RemoveItem").field("idx", idx).finish(),
Event::RemovedItem { item } => f
.debug_struct("RemovedItem")
.field("item", &item.item_id())
@@ -480,7 +477,7 @@ impl Pane {
forward_stack: Default::default(),
closed_stack: Default::default(),
paths_by_item: Default::default(),
- pane: handle.clone(),
+ pane: handle,
next_timestamp,
}))),
toolbar: cx.new(|_| Toolbar::new()),
@@ -513,7 +510,7 @@ impl Pane {
}
}
- fn alternate_file(&mut self, window: &mut Window, cx: &mut Context<Pane>) {
+ fn alternate_file(&mut self, _: &AlternateFile, window: &mut Window, cx: &mut Context<Pane>) {
let (_, alternative) = &self.alternate_file_items;
if let Some(alternative) = alternative {
let existing = self
@@ -787,7 +784,7 @@ impl Pane {
!self.nav_history.0.lock().forward_stack.is_empty()
}
- pub fn navigate_backward(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ pub fn navigate_backward(&mut self, _: &GoBack, window: &mut Window, cx: &mut Context<Self>) {
if let Some(workspace) = self.workspace.upgrade() {
let pane = cx.entity().downgrade();
window.defer(cx, move |window, cx| {
@@ -798,7 +795,7 @@ impl Pane {
}
}
- fn navigate_forward(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ fn navigate_forward(&mut self, _: &GoForward, window: &mut Window, cx: &mut Context<Self>) {
if let Some(workspace) = self.workspace.upgrade() {
let pane = cx.entity().downgrade();
window.defer(cx, move |window, cx| {
@@ -897,19 +894,43 @@ impl Pane {
}
}
}
- if let Some((index, existing_item)) = existing_item {
- // If the item is already open, and the item is a preview item
- // and we are not allowing items to open as preview, mark the item as persistent.
- if let Some(preview_item_id) = self.preview_item_id
- && let Some(tab) = self.items.get(index)
- && tab.item_id() == preview_item_id
- && !allow_preview
- {
- self.set_preview_item_id(None, cx);
- }
- if activate {
- self.activate_item(index, focus_item, focus_item, window, cx);
+
+ let set_up_existing_item =
+ |index: usize, pane: &mut Self, window: &mut Window, cx: &mut Context<Self>| {
+ // If the item is already open, and the item is a preview item
+ // and we are not allowing items to open as preview, mark the item as persistent.
+ if let Some(preview_item_id) = pane.preview_item_id
+ && let Some(tab) = pane.items.get(index)
+ && tab.item_id() == preview_item_id
+ && !allow_preview
+ {
+ pane.set_preview_item_id(None, cx);
+ }
+ if activate {
+ pane.activate_item(index, focus_item, focus_item, window, cx);
+ }
+ };
+ let set_up_new_item = |new_item: Box<dyn ItemHandle>,
+ destination_index: Option<usize>,
+ pane: &mut Self,
+ window: &mut Window,
+ cx: &mut Context<Self>| {
+ if allow_preview {
+ pane.set_preview_item_id(Some(new_item.item_id()), cx);
}
+ pane.add_item_inner(
+ new_item,
+ true,
+ focus_item,
+ activate,
+ destination_index,
+ window,
+ cx,
+ );
+ };
+
+ if let Some((index, existing_item)) = existing_item {
+ set_up_existing_item(index, self, window, cx);
existing_item
} else {
// If the item is being opened as preview and we have an existing preview tab,
@@ -921,21 +942,46 @@ impl Pane {
};
let new_item = build_item(self, window, cx);
+ // A special case that won't ever get a `project_entry_id` but has to be deduplicated nonetheless.
+ if let Some(invalid_buffer_view) = new_item.downcast::<InvalidBufferView>() {
+ let mut already_open_view = None;
+ let mut views_to_close = HashSet::default();
+ for existing_error_view in self
+ .items_of_type::<InvalidBufferView>()
+ .filter(|item| item.read(cx).abs_path == invalid_buffer_view.read(cx).abs_path)
+ {
+ if already_open_view.is_none()
+ && existing_error_view.read(cx).error == invalid_buffer_view.read(cx).error
+ {
+ already_open_view = Some(existing_error_view);
+ } else {
+ views_to_close.insert(existing_error_view.item_id());
+ }
+ }
- if allow_preview {
- self.set_preview_item_id(Some(new_item.item_id()), cx);
- }
- self.add_item_inner(
- new_item.clone(),
- true,
- focus_item,
- activate,
- destination_index,
- window,
- cx,
- );
+ let resulting_item = match already_open_view {
+ Some(already_open_view) => {
+ if let Some(index) = self.index_for_item_id(already_open_view.item_id()) {
+ set_up_existing_item(index, self, window, cx);
+ }
+ Box::new(already_open_view) as Box<_>
+ }
+ None => {
+ set_up_new_item(new_item.clone(), destination_index, self, window, cx);
+ new_item
+ }
+ };
+
+ self.close_items(window, cx, SaveIntent::Skip, |existing_item| {
+ views_to_close.contains(&existing_item)
+ })
+ .detach();
- new_item
+ resulting_item
+ } else {
+ set_up_new_item(new_item.clone(), destination_index, self, window, cx);
+ new_item
+ }
}
}
@@ -1233,9 +1279,9 @@ impl Pane {
}
}
- pub fn activate_prev_item(
+ pub fn activate_previous_item(
&mut self,
- activate_pane: bool,
+ _: &ActivatePreviousItem,
window: &mut Window,
cx: &mut Context<Self>,
) {
@@ -1245,12 +1291,12 @@ impl Pane {
} else if !self.items.is_empty() {
index = self.items.len() - 1;
}
- self.activate_item(index, activate_pane, activate_pane, window, cx);
+ self.activate_item(index, true, true, window, cx);
}
pub fn activate_next_item(
&mut self,
- activate_pane: bool,
+ _: &ActivateNextItem,
window: &mut Window,
cx: &mut Context<Self>,
) {
@@ -1260,10 +1306,15 @@ impl Pane {
} else {
index = 0;
}
- self.activate_item(index, activate_pane, activate_pane, window, cx);
+ self.activate_item(index, true, true, window, cx);
}
- pub fn swap_item_left(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ pub fn swap_item_left(
+ &mut self,
+ _: &SwapItemLeft,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
let index = self.active_item_index;
if index == 0 {
return;
@@ -1273,9 +1324,14 @@ impl Pane {
self.activate_item(index - 1, true, true, window, cx);
}
- pub fn swap_item_right(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ pub fn swap_item_right(
+ &mut self,
+ _: &SwapItemRight,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
let index = self.active_item_index;
- if index + 1 == self.items.len() {
+ if index + 1 >= self.items.len() {
return;
}
@@ -1283,6 +1339,16 @@ impl Pane {
self.activate_item(index + 1, true, true, window, cx);
}
+ pub fn activate_last_item(
+ &mut self,
+ _: &ActivateLastItem,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let index = self.items.len().saturating_sub(1);
+ self.activate_item(index, true, true, window, cx);
+ }
+
pub fn close_active_item(
&mut self,
action: &CloseActiveItem,
@@ -2026,11 +2092,10 @@ impl Pane {
Ok(0) => {}
Ok(1) => {
// Don't save this file
- pane.update_in(cx, |pane, window, cx| {
+ pane.update_in(cx, |pane, _, cx| {
if pane.is_tab_pinned(item_ix) && !item.can_save(cx) {
pane.pinned_tab_count -= 1;
}
- item.discarded(project, window, cx)
})
.log_err();
return Ok(true);
@@ -2516,7 +2581,7 @@ impl Pane {
this.handle_external_paths_drop(paths, window, cx)
}))
.when_some(item.tab_tooltip_content(cx), |tab, content| match content {
- TabTooltipContent::Text(text) => tab.tooltip(Tooltip::text(text.clone())),
+ TabTooltipContent::Text(text) => tab.tooltip(Tooltip::text(text)),
TabTooltipContent::Custom(element_fn) => {
tab.tooltip(move |window, cx| element_fn(window, cx))
}
@@ -2583,10 +2648,8 @@ impl Pane {
.children(
std::iter::once(if let Some(decorated_icon) = decorated_icon {
Some(div().child(decorated_icon.into_any_element()))
- } else if let Some(icon) = icon {
- Some(div().child(icon.into_any_element()))
} else {
- None
+ icon.map(|icon| div().child(icon.into_any_element()))
})
.flatten(),
)
@@ -2833,7 +2896,9 @@ impl Pane {
.on_click({
let entity = cx.entity();
move |_, window, cx| {
- entity.update(cx, |pane, cx| pane.navigate_backward(window, cx))
+ entity.update(cx, |pane, cx| {
+ pane.navigate_backward(&Default::default(), window, cx)
+ })
}
})
.disabled(!self.can_navigate_backward())
@@ -2848,7 +2913,11 @@ impl Pane {
.icon_size(IconSize::Small)
.on_click({
let entity = cx.entity();
- move |_, window, cx| entity.update(cx, |pane, cx| pane.navigate_forward(window, cx))
+ move |_, window, cx| {
+ entity.update(cx, |pane, cx| {
+ pane.navigate_forward(&Default::default(), window, cx)
+ })
+ }
})
.disabled(!self.can_navigate_forward())
.tooltip({
@@ -3084,7 +3153,7 @@ impl Pane {
.read(cx)
.items()
.find(|item| item.item_id() == item_id)
- .map(|item| item.clone())
+ .cloned()
else {
return;
};
@@ -3480,9 +3549,6 @@ impl Render for Pane {
.size_full()
.flex_none()
.overflow_hidden()
- .on_action(cx.listener(|pane, _: &AlternateFile, window, cx| {
- pane.alternate_file(window, cx);
- }))
.on_action(
cx.listener(|pane, _: &SplitLeft, _, cx| pane.split(SplitDirection::Left, cx)),
)
@@ -3499,12 +3565,6 @@ impl Render for Pane {
.on_action(
cx.listener(|pane, _: &SplitDown, _, cx| pane.split(SplitDirection::Down, cx)),
)
- .on_action(
- cx.listener(|pane, _: &GoBack, window, cx| pane.navigate_backward(window, cx)),
- )
- .on_action(
- cx.listener(|pane, _: &GoForward, window, cx| pane.navigate_forward(window, cx)),
- )
.on_action(cx.listener(|_, _: &JoinIntoNext, _, cx| {
cx.emit(Event::JoinIntoNext);
}))
@@ -3512,6 +3572,8 @@ impl Render for Pane {
cx.emit(Event::JoinAll);
}))
.on_action(cx.listener(Pane::toggle_zoom))
+ .on_action(cx.listener(Self::navigate_backward))
+ .on_action(cx.listener(Self::navigate_forward))
.on_action(
cx.listener(|pane: &mut Pane, action: &ActivateItem, window, cx| {
pane.activate_item(
@@ -3523,33 +3585,14 @@ impl Render for Pane {
);
}),
)
- .on_action(
- cx.listener(|pane: &mut Pane, _: &ActivateLastItem, window, cx| {
- pane.activate_item(pane.items.len().saturating_sub(1), true, true, window, cx);
- }),
- )
- .on_action(
- cx.listener(|pane: &mut Pane, _: &ActivatePreviousItem, window, cx| {
- pane.activate_prev_item(true, window, cx);
- }),
- )
- .on_action(
- cx.listener(|pane: &mut Pane, _: &ActivateNextItem, window, cx| {
- pane.activate_next_item(true, window, cx);
- }),
- )
- .on_action(
- cx.listener(|pane, _: &SwapItemLeft, window, cx| pane.swap_item_left(window, cx)),
- )
- .on_action(
- cx.listener(|pane, _: &SwapItemRight, window, cx| pane.swap_item_right(window, cx)),
- )
- .on_action(cx.listener(|pane, action, window, cx| {
- pane.toggle_pin_tab(action, window, cx);
- }))
- .on_action(cx.listener(|pane, action, window, cx| {
- pane.unpin_all_tabs(action, window, cx);
- }))
+ .on_action(cx.listener(Self::alternate_file))
+ .on_action(cx.listener(Self::activate_last_item))
+ .on_action(cx.listener(Self::activate_previous_item))
+ .on_action(cx.listener(Self::activate_next_item))
+ .on_action(cx.listener(Self::swap_item_left))
+ .on_action(cx.listener(Self::swap_item_right))
+ .on_action(cx.listener(Self::toggle_pin_tab))
+ .on_action(cx.listener(Self::unpin_all_tabs))
.when(PreviewTabsSettings::get_global(cx).enabled, |this| {
this.on_action(cx.listener(|pane: &mut Pane, _: &TogglePreviewTab, _, cx| {
if let Some(active_item_id) = pane.active_item().map(|i| i.item_id()) {
@@ -6404,6 +6447,57 @@ mod tests {
.unwrap();
}
+ #[gpui::test]
+ async fn test_item_swapping_actions(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.executor());
+ let project = Project::test(fs, None, cx).await;
+ let (workspace, cx) =
+ cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
+
+ let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
+ assert_item_labels(&pane, [], cx);
+
+ // Test that these actions do not panic
+ pane.update_in(cx, |pane, window, cx| {
+ pane.swap_item_right(&Default::default(), window, cx);
+ });
+
+ pane.update_in(cx, |pane, window, cx| {
+ pane.swap_item_left(&Default::default(), window, cx);
+ });
+
+ add_labeled_item(&pane, "A", false, cx);
+ add_labeled_item(&pane, "B", false, cx);
+ add_labeled_item(&pane, "C", false, cx);
+ assert_item_labels(&pane, ["A", "B", "C*"], cx);
+
+ pane.update_in(cx, |pane, window, cx| {
+ pane.swap_item_right(&Default::default(), window, cx);
+ });
+ assert_item_labels(&pane, ["A", "B", "C*"], cx);
+
+ pane.update_in(cx, |pane, window, cx| {
+ pane.swap_item_left(&Default::default(), window, cx);
+ });
+ assert_item_labels(&pane, ["A", "C*", "B"], cx);
+
+ pane.update_in(cx, |pane, window, cx| {
+ pane.swap_item_left(&Default::default(), window, cx);
+ });
+ assert_item_labels(&pane, ["C*", "A", "B"], cx);
+
+ pane.update_in(cx, |pane, window, cx| {
+ pane.swap_item_left(&Default::default(), window, cx);
+ });
+ assert_item_labels(&pane, ["C*", "A", "B"], cx);
+
+ pane.update_in(cx, |pane, window, cx| {
+ pane.swap_item_right(&Default::default(), window, cx);
+ });
+ assert_item_labels(&pane, ["A", "C*", "B"], cx);
+ }
+
fn init_test(cx: &mut TestAppContext) {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
@@ -1175,7 +1175,7 @@ mod element {
bounding_boxes.clear();
let mut layout = PaneAxisLayout {
- dragged_handle: dragged_handle.clone(),
+ dragged_handle,
children: Vec::new(),
};
for (ix, mut child) in mem::take(&mut self.children).into_iter().enumerate() {
@@ -0,0 +1,123 @@
+use std::{
+ path::{Path, PathBuf},
+ sync::Arc,
+};
+
+use util::paths::SanitizedPath;
+
+/// A list of absolute paths, in a specific order.
+///
+/// The paths are stored in lexicographic order, so that they can be compared to
+/// other path lists without regard to the order of the paths.
+#[derive(Default, PartialEq, Eq, Debug, Clone)]
+pub struct PathList {
+ paths: Arc<[PathBuf]>,
+ order: Arc<[usize]>,
+}
+
+#[derive(Debug)]
+pub struct SerializedPathList {
+ pub paths: String,
+ pub order: String,
+}
+
+impl PathList {
+ pub fn new<P: AsRef<Path>>(paths: &[P]) -> Self {
+ let mut indexed_paths: Vec<(usize, PathBuf)> = paths
+ .iter()
+ .enumerate()
+ .map(|(ix, path)| (ix, SanitizedPath::new(path).into()))
+ .collect();
+ indexed_paths.sort_by(|(_, a), (_, b)| a.cmp(b));
+ let order = indexed_paths.iter().map(|e| e.0).collect::<Vec<_>>().into();
+ let paths = indexed_paths
+ .into_iter()
+ .map(|e| e.1)
+ .collect::<Vec<_>>()
+ .into();
+ Self { order, paths }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.paths.is_empty()
+ }
+
+ pub fn paths(&self) -> &[PathBuf] {
+ self.paths.as_ref()
+ }
+
+ pub fn order(&self) -> &[usize] {
+ self.order.as_ref()
+ }
+
+ pub fn is_lexicographically_ordered(&self) -> bool {
+ self.order.iter().enumerate().all(|(i, &j)| i == j)
+ }
+
+ pub fn deserialize(serialized: &SerializedPathList) -> Self {
+ let mut paths: Vec<PathBuf> = if serialized.paths.is_empty() {
+ Vec::new()
+ } else {
+ serialized.paths.split('\n').map(PathBuf::from).collect()
+ };
+
+ let mut order: Vec<usize> = serialized
+ .order
+ .split(',')
+ .filter_map(|s| s.parse().ok())
+ .collect();
+
+ if !paths.is_sorted() || order.len() != paths.len() {
+ order = (0..paths.len()).collect();
+ paths.sort();
+ }
+
+ Self {
+ paths: paths.into(),
+ order: order.into(),
+ }
+ }
+
+ pub fn serialize(&self) -> SerializedPathList {
+ use std::fmt::Write as _;
+
+ let mut paths = String::new();
+ for path in self.paths.iter() {
+ if !paths.is_empty() {
+ paths.push('\n');
+ }
+ paths.push_str(&path.to_string_lossy());
+ }
+
+ let mut order = String::new();
+ for ix in self.order.iter() {
+ if !order.is_empty() {
+ order.push(',');
+ }
+ write!(&mut order, "{}", *ix).unwrap();
+ }
+ SerializedPathList { paths, order }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_path_list() {
+ let list1 = PathList::new(&["a/d", "a/c"]);
+ let list2 = PathList::new(&["a/c", "a/d"]);
+
+ assert_eq!(list1.paths(), list2.paths());
+ assert_ne!(list1, list2);
+ assert_eq!(list1.order(), &[1, 0]);
+ assert_eq!(list2.order(), &[0, 1]);
+
+ let list1_deserialized = PathList::deserialize(&list1.serialize());
+ assert_eq!(list1_deserialized, list1);
+
+ let list2_deserialized = PathList::deserialize(&list2.serialize());
+ assert_eq!(list2_deserialized, list2);
+ }
+}
@@ -9,33 +9,40 @@ use std::{
};
use anyhow::{Context as _, Result, bail};
-use client::DevServerProjectId;
-use db::{define_connection, query, sqlez::connection::Connection, sqlez_macros::sql};
+use collections::{HashMap, IndexSet};
+use db::{
+ query,
+ sqlez::{connection::Connection, domain::Domain},
+ sqlez_macros::sql,
+};
use gpui::{Axis, Bounds, Task, WindowBounds, WindowId, point, size};
-use itertools::Itertools;
use project::debugger::breakpoint_store::{BreakpointState, SourceBreakpoint};
-use language::{LanguageName, Toolchain};
+use language::{LanguageName, Toolchain, ToolchainScope};
use project::WorktreeId;
-use remote::ssh_session::SshProjectId;
+use remote::{RemoteConnectionOptions, SshConnectionOptions, WslConnectionOptions};
use sqlez::{
bindable::{Bind, Column, StaticColumnCount},
- statement::{SqlType, Statement},
+ statement::Statement,
thread_safe_connection::ThreadSafeConnection,
};
-use ui::{App, px};
+use ui::{App, SharedString, px};
use util::{ResultExt, maybe};
use uuid::Uuid;
-use crate::WorkspaceId;
+use crate::{
+ WorkspaceId,
+ path_list::{PathList, SerializedPathList},
+ persistence::model::RemoteConnectionKind,
+};
use model::{
- GroupId, ItemId, LocalPaths, PaneId, SerializedItem, SerializedPane, SerializedPaneGroup,
- SerializedSshProject, SerializedWorkspace,
+ GroupId, ItemId, PaneId, RemoteConnectionId, SerializedItem, SerializedPane,
+ SerializedPaneGroup, SerializedWorkspace,
};
-use self::model::{DockStructure, LocalPathsOrder, SerializedWorkspaceLocation};
+use self::model::{DockStructure, SerializedWorkspaceLocation};
#[derive(Copy, Clone, Debug, PartialEq)]
pub(crate) struct SerializedAxis(pub(crate) gpui::Axis);
@@ -162,6 +169,7 @@ impl From<BreakpointState> for BreakpointStateWrapper<'static> {
BreakpointStateWrapper(Cow::Owned(kind))
}
}
+
impl StaticColumnCount for BreakpointStateWrapper<'_> {
fn column_count() -> usize {
1
@@ -186,11 +194,6 @@ impl Column for BreakpointStateWrapper<'_> {
}
}
-/// This struct is used to implement traits on Vec<breakpoint>
-#[derive(Debug)]
-#[allow(dead_code)]
-struct Breakpoints(Vec<Breakpoint>);
-
impl sqlez::bindable::StaticColumnCount for Breakpoint {
fn column_count() -> usize {
// Position, log message, condition message, and hit condition message
@@ -239,26 +242,6 @@ impl Column for Breakpoint {
}
}
-impl Column for Breakpoints {
- fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
- let mut breakpoints = Vec::new();
- let mut index = start_index;
-
- loop {
- match statement.column_type(index) {
- Ok(SqlType::Null) => break,
- _ => {
- let (breakpoint, next_index) = Breakpoint::column(statement, index)?;
-
- breakpoints.push(breakpoint);
- index = next_index;
- }
- }
- }
- Ok((Breakpoints(breakpoints), index))
- }
-}
-
#[derive(Clone, Debug, PartialEq)]
struct SerializedPixels(gpui::Pixels);
impl sqlez::bindable::StaticColumnCount for SerializedPixels {}
@@ -274,247 +257,189 @@ impl sqlez::bindable::Bind for SerializedPixels {
}
}
-define_connection! {
- // Current schema shape using pseudo-rust syntax:
- //
- // workspaces(
- // workspace_id: usize, // Primary key for workspaces
- // local_paths: Bincode<Vec<PathBuf>>,
- // local_paths_order: Bincode<Vec<usize>>,
- // dock_visible: bool, // Deprecated
- // dock_anchor: DockAnchor, // Deprecated
- // dock_pane: Option<usize>, // Deprecated
- // left_sidebar_open: boolean,
- // timestamp: String, // UTC YYYY-MM-DD HH:MM:SS
- // window_state: String, // WindowBounds Discriminant
- // window_x: Option<f32>, // WindowBounds::Fixed RectF x
- // window_y: Option<f32>, // WindowBounds::Fixed RectF y
- // window_width: Option<f32>, // WindowBounds::Fixed RectF width
- // window_height: Option<f32>, // WindowBounds::Fixed RectF height
- // display: Option<Uuid>, // Display id
- // fullscreen: Option<bool>, // Is the window fullscreen?
- // centered_layout: Option<bool>, // Is the Centered Layout mode activated?
- // session_id: Option<String>, // Session id
- // window_id: Option<u64>, // Window Id
- // )
- //
- // pane_groups(
- // group_id: usize, // Primary key for pane_groups
- // workspace_id: usize, // References workspaces table
- // parent_group_id: Option<usize>, // None indicates that this is the root node
- // position: Option<usize>, // None indicates that this is the root node
- // axis: Option<Axis>, // 'Vertical', 'Horizontal'
- // flexes: Option<Vec<f32>>, // A JSON array of floats
- // )
- //
- // panes(
- // pane_id: usize, // Primary key for panes
- // workspace_id: usize, // References workspaces table
- // active: bool,
- // )
- //
- // center_panes(
- // pane_id: usize, // Primary key for center_panes
- // parent_group_id: Option<usize>, // References pane_groups. If none, this is the root
- // position: Option<usize>, // None indicates this is the root
- // )
- //
- // CREATE TABLE items(
- // item_id: usize, // This is the item's view id, so this is not unique
- // workspace_id: usize, // References workspaces table
- // pane_id: usize, // References panes table
- // kind: String, // Indicates which view this connects to. This is the key in the item_deserializers global
- // position: usize, // Position of the item in the parent pane. This is equivalent to panes' position column
- // active: bool, // Indicates if this item is the active one in the pane
- // preview: bool // Indicates if this item is a preview item
- // )
- //
- // CREATE TABLE breakpoints(
- // workspace_id: usize Foreign Key, // References workspace table
- // path: PathBuf, // The absolute path of the file that this breakpoint belongs to
- // breakpoint_location: Vec<u32>, // A list of the locations of breakpoints
- // kind: int, // The kind of breakpoint (standard, log)
- // log_message: String, // log message for log breakpoints, otherwise it's Null
- // )
- pub static ref DB: WorkspaceDb<()> =
- &[
+pub struct WorkspaceDb(ThreadSafeConnection);
+
+impl Domain for WorkspaceDb {
+ const NAME: &str = stringify!(WorkspaceDb);
+
+ const MIGRATIONS: &[&str] = &[
+ sql!(
+ CREATE TABLE workspaces(
+ workspace_id INTEGER PRIMARY KEY,
+ workspace_location BLOB UNIQUE,
+ dock_visible INTEGER, // Deprecated. Preserving so users can downgrade Zed.
+ dock_anchor TEXT, // Deprecated. Preserving so users can downgrade Zed.
+ dock_pane INTEGER, // Deprecated. Preserving so users can downgrade Zed.
+ left_sidebar_open INTEGER, // Boolean
+ timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL,
+ FOREIGN KEY(dock_pane) REFERENCES panes(pane_id)
+ ) STRICT;
+
+ CREATE TABLE pane_groups(
+ group_id INTEGER PRIMARY KEY,
+ workspace_id INTEGER NOT NULL,
+ parent_group_id INTEGER, // NULL indicates that this is a root node
+ position INTEGER, // NULL indicates that this is a root node
+ axis TEXT NOT NULL, // Enum: 'Vertical' / 'Horizontal'
+ FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
+ ON DELETE CASCADE
+ ON UPDATE CASCADE,
+ FOREIGN KEY(parent_group_id) REFERENCES pane_groups(group_id) ON DELETE CASCADE
+ ) STRICT;
+
+ CREATE TABLE panes(
+ pane_id INTEGER PRIMARY KEY,
+ workspace_id INTEGER NOT NULL,
+ active INTEGER NOT NULL, // Boolean
+ FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
+ ON DELETE CASCADE
+ ON UPDATE CASCADE
+ ) STRICT;
+
+ CREATE TABLE center_panes(
+ pane_id INTEGER PRIMARY KEY,
+ parent_group_id INTEGER, // NULL means that this is a root pane
+ position INTEGER, // NULL means that this is a root pane
+ FOREIGN KEY(pane_id) REFERENCES panes(pane_id)
+ ON DELETE CASCADE,
+ FOREIGN KEY(parent_group_id) REFERENCES pane_groups(group_id) ON DELETE CASCADE
+ ) STRICT;
+
+ CREATE TABLE items(
+ item_id INTEGER NOT NULL, // This is the item's view id, so this is not unique
+ workspace_id INTEGER NOT NULL,
+ pane_id INTEGER NOT NULL,
+ kind TEXT NOT NULL,
+ position INTEGER NOT NULL,
+ active INTEGER NOT NULL,
+ FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
+ ON DELETE CASCADE
+ ON UPDATE CASCADE,
+ FOREIGN KEY(pane_id) REFERENCES panes(pane_id)
+ ON DELETE CASCADE,
+ PRIMARY KEY(item_id, workspace_id)
+ ) STRICT;
+ ),
+ sql!(
+ ALTER TABLE workspaces ADD COLUMN window_state TEXT;
+ ALTER TABLE workspaces ADD COLUMN window_x REAL;
+ ALTER TABLE workspaces ADD COLUMN window_y REAL;
+ ALTER TABLE workspaces ADD COLUMN window_width REAL;
+ ALTER TABLE workspaces ADD COLUMN window_height REAL;
+ ALTER TABLE workspaces ADD COLUMN display BLOB;
+ ),
+ // Drop foreign key constraint from workspaces.dock_pane to panes table.
+ sql!(
+ CREATE TABLE workspaces_2(
+ workspace_id INTEGER PRIMARY KEY,
+ workspace_location BLOB UNIQUE,
+ dock_visible INTEGER, // Deprecated. Preserving so users can downgrade Zed.
+ dock_anchor TEXT, // Deprecated. Preserving so users can downgrade Zed.
+ dock_pane INTEGER, // Deprecated. Preserving so users can downgrade Zed.
+ left_sidebar_open INTEGER, // Boolean
+ timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL,
+ window_state TEXT,
+ window_x REAL,
+ window_y REAL,
+ window_width REAL,
+ window_height REAL,
+ display BLOB
+ ) STRICT;
+ INSERT INTO workspaces_2 SELECT * FROM workspaces;
+ DROP TABLE workspaces;
+ ALTER TABLE workspaces_2 RENAME TO workspaces;
+ ),
+ // Add panels related information
+ sql!(
+ ALTER TABLE workspaces ADD COLUMN left_dock_visible INTEGER; //bool
+ ALTER TABLE workspaces ADD COLUMN left_dock_active_panel TEXT;
+ ALTER TABLE workspaces ADD COLUMN right_dock_visible INTEGER; //bool
+ ALTER TABLE workspaces ADD COLUMN right_dock_active_panel TEXT;
+ ALTER TABLE workspaces ADD COLUMN bottom_dock_visible INTEGER; //bool
+ ALTER TABLE workspaces ADD COLUMN bottom_dock_active_panel TEXT;
+ ),
+ // Add panel zoom persistence
+ sql!(
+ ALTER TABLE workspaces ADD COLUMN left_dock_zoom INTEGER; //bool
+ ALTER TABLE workspaces ADD COLUMN right_dock_zoom INTEGER; //bool
+ ALTER TABLE workspaces ADD COLUMN bottom_dock_zoom INTEGER; //bool
+ ),
+ // Add pane group flex data
+ sql!(
+ ALTER TABLE pane_groups ADD COLUMN flexes TEXT;
+ ),
+ // Add fullscreen field to workspace
+ // Deprecated, `WindowBounds` holds the fullscreen state now.
+ // Preserving so users can downgrade Zed.
+ sql!(
+ ALTER TABLE workspaces ADD COLUMN fullscreen INTEGER; //bool
+ ),
+ // Add preview field to items
+ sql!(
+ ALTER TABLE items ADD COLUMN preview INTEGER; //bool
+ ),
+ // Add centered_layout field to workspace
+ sql!(
+ ALTER TABLE workspaces ADD COLUMN centered_layout INTEGER; //bool
+ ),
+ sql!(
+ CREATE TABLE remote_projects (
+ remote_project_id INTEGER NOT NULL UNIQUE,
+ path TEXT,
+ dev_server_name TEXT
+ );
+ ALTER TABLE workspaces ADD COLUMN remote_project_id INTEGER;
+ ALTER TABLE workspaces RENAME COLUMN workspace_location TO local_paths;
+ ),
+ sql!(
+ DROP TABLE remote_projects;
+ CREATE TABLE dev_server_projects (
+ id INTEGER NOT NULL UNIQUE,
+ path TEXT,
+ dev_server_name TEXT
+ );
+ ALTER TABLE workspaces DROP COLUMN remote_project_id;
+ ALTER TABLE workspaces ADD COLUMN dev_server_project_id INTEGER;
+ ),
+ sql!(
+ ALTER TABLE workspaces ADD COLUMN local_paths_order BLOB;
+ ),
+ sql!(
+ ALTER TABLE workspaces ADD COLUMN session_id TEXT DEFAULT NULL;
+ ),
+ sql!(
+ ALTER TABLE workspaces ADD COLUMN window_id INTEGER DEFAULT NULL;
+ ),
+ sql!(
+ ALTER TABLE panes ADD COLUMN pinned_count INTEGER DEFAULT 0;
+ ),
+ sql!(
+ CREATE TABLE ssh_projects (
+ id INTEGER PRIMARY KEY,
+ host TEXT NOT NULL,
+ port INTEGER,
+ path TEXT NOT NULL,
+ user TEXT
+ );
+ ALTER TABLE workspaces ADD COLUMN ssh_project_id INTEGER REFERENCES ssh_projects(id) ON DELETE CASCADE;
+ ),
+ sql!(
+ ALTER TABLE ssh_projects RENAME COLUMN path TO paths;
+ ),
+ sql!(
+ CREATE TABLE toolchains (
+ workspace_id INTEGER,
+ worktree_id INTEGER,
+ language_name TEXT NOT NULL,
+ name TEXT NOT NULL,
+ path TEXT NOT NULL,
+ PRIMARY KEY (workspace_id, worktree_id, language_name)
+ );
+ ),
+ sql!(
+ ALTER TABLE toolchains ADD COLUMN raw_json TEXT DEFAULT "{}";
+ ),
sql!(
- CREATE TABLE workspaces(
- workspace_id INTEGER PRIMARY KEY,
- workspace_location BLOB UNIQUE,
- dock_visible INTEGER, // Deprecated. Preserving so users can downgrade Zed.
- dock_anchor TEXT, // Deprecated. Preserving so users can downgrade Zed.
- dock_pane INTEGER, // Deprecated. Preserving so users can downgrade Zed.
- left_sidebar_open INTEGER, // Boolean
- timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL,
- FOREIGN KEY(dock_pane) REFERENCES panes(pane_id)
- ) STRICT;
-
- CREATE TABLE pane_groups(
- group_id INTEGER PRIMARY KEY,
- workspace_id INTEGER NOT NULL,
- parent_group_id INTEGER, // NULL indicates that this is a root node
- position INTEGER, // NULL indicates that this is a root node
- axis TEXT NOT NULL, // Enum: 'Vertical' / 'Horizontal'
- FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
- ON DELETE CASCADE
- ON UPDATE CASCADE,
- FOREIGN KEY(parent_group_id) REFERENCES pane_groups(group_id) ON DELETE CASCADE
- ) STRICT;
-
- CREATE TABLE panes(
- pane_id INTEGER PRIMARY KEY,
- workspace_id INTEGER NOT NULL,
- active INTEGER NOT NULL, // Boolean
- FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
- ON DELETE CASCADE
- ON UPDATE CASCADE
- ) STRICT;
-
- CREATE TABLE center_panes(
- pane_id INTEGER PRIMARY KEY,
- parent_group_id INTEGER, // NULL means that this is a root pane
- position INTEGER, // NULL means that this is a root pane
- FOREIGN KEY(pane_id) REFERENCES panes(pane_id)
- ON DELETE CASCADE,
- FOREIGN KEY(parent_group_id) REFERENCES pane_groups(group_id) ON DELETE CASCADE
- ) STRICT;
-
- CREATE TABLE items(
- item_id INTEGER NOT NULL, // This is the item's view id, so this is not unique
- workspace_id INTEGER NOT NULL,
- pane_id INTEGER NOT NULL,
- kind TEXT NOT NULL,
- position INTEGER NOT NULL,
- active INTEGER NOT NULL,
- FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
- ON DELETE CASCADE
- ON UPDATE CASCADE,
- FOREIGN KEY(pane_id) REFERENCES panes(pane_id)
- ON DELETE CASCADE,
- PRIMARY KEY(item_id, workspace_id)
- ) STRICT;
- ),
- sql!(
- ALTER TABLE workspaces ADD COLUMN window_state TEXT;
- ALTER TABLE workspaces ADD COLUMN window_x REAL;
- ALTER TABLE workspaces ADD COLUMN window_y REAL;
- ALTER TABLE workspaces ADD COLUMN window_width REAL;
- ALTER TABLE workspaces ADD COLUMN window_height REAL;
- ALTER TABLE workspaces ADD COLUMN display BLOB;
- ),
- // Drop foreign key constraint from workspaces.dock_pane to panes table.
- sql!(
- CREATE TABLE workspaces_2(
- workspace_id INTEGER PRIMARY KEY,
- workspace_location BLOB UNIQUE,
- dock_visible INTEGER, // Deprecated. Preserving so users can downgrade Zed.
- dock_anchor TEXT, // Deprecated. Preserving so users can downgrade Zed.
- dock_pane INTEGER, // Deprecated. Preserving so users can downgrade Zed.
- left_sidebar_open INTEGER, // Boolean
- timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL,
- window_state TEXT,
- window_x REAL,
- window_y REAL,
- window_width REAL,
- window_height REAL,
- display BLOB
- ) STRICT;
- INSERT INTO workspaces_2 SELECT * FROM workspaces;
- DROP TABLE workspaces;
- ALTER TABLE workspaces_2 RENAME TO workspaces;
- ),
- // Add panels related information
- sql!(
- ALTER TABLE workspaces ADD COLUMN left_dock_visible INTEGER; //bool
- ALTER TABLE workspaces ADD COLUMN left_dock_active_panel TEXT;
- ALTER TABLE workspaces ADD COLUMN right_dock_visible INTEGER; //bool
- ALTER TABLE workspaces ADD COLUMN right_dock_active_panel TEXT;
- ALTER TABLE workspaces ADD COLUMN bottom_dock_visible INTEGER; //bool
- ALTER TABLE workspaces ADD COLUMN bottom_dock_active_panel TEXT;
- ),
- // Add panel zoom persistence
- sql!(
- ALTER TABLE workspaces ADD COLUMN left_dock_zoom INTEGER; //bool
- ALTER TABLE workspaces ADD COLUMN right_dock_zoom INTEGER; //bool
- ALTER TABLE workspaces ADD COLUMN bottom_dock_zoom INTEGER; //bool
- ),
- // Add pane group flex data
- sql!(
- ALTER TABLE pane_groups ADD COLUMN flexes TEXT;
- ),
- // Add fullscreen field to workspace
- // Deprecated, `WindowBounds` holds the fullscreen state now.
- // Preserving so users can downgrade Zed.
- sql!(
- ALTER TABLE workspaces ADD COLUMN fullscreen INTEGER; //bool
- ),
- // Add preview field to items
- sql!(
- ALTER TABLE items ADD COLUMN preview INTEGER; //bool
- ),
- // Add centered_layout field to workspace
- sql!(
- ALTER TABLE workspaces ADD COLUMN centered_layout INTEGER; //bool
- ),
- sql!(
- CREATE TABLE remote_projects (
- remote_project_id INTEGER NOT NULL UNIQUE,
- path TEXT,
- dev_server_name TEXT
- );
- ALTER TABLE workspaces ADD COLUMN remote_project_id INTEGER;
- ALTER TABLE workspaces RENAME COLUMN workspace_location TO local_paths;
- ),
- sql!(
- DROP TABLE remote_projects;
- CREATE TABLE dev_server_projects (
- id INTEGER NOT NULL UNIQUE,
- path TEXT,
- dev_server_name TEXT
- );
- ALTER TABLE workspaces DROP COLUMN remote_project_id;
- ALTER TABLE workspaces ADD COLUMN dev_server_project_id INTEGER;
- ),
- sql!(
- ALTER TABLE workspaces ADD COLUMN local_paths_order BLOB;
- ),
- sql!(
- ALTER TABLE workspaces ADD COLUMN session_id TEXT DEFAULT NULL;
- ),
- sql!(
- ALTER TABLE workspaces ADD COLUMN window_id INTEGER DEFAULT NULL;
- ),
- sql!(
- ALTER TABLE panes ADD COLUMN pinned_count INTEGER DEFAULT 0;
- ),
- sql!(
- CREATE TABLE ssh_projects (
- id INTEGER PRIMARY KEY,
- host TEXT NOT NULL,
- port INTEGER,
- path TEXT NOT NULL,
- user TEXT
- );
- ALTER TABLE workspaces ADD COLUMN ssh_project_id INTEGER REFERENCES ssh_projects(id) ON DELETE CASCADE;
- ),
- sql!(
- ALTER TABLE ssh_projects RENAME COLUMN path TO paths;
- ),
- sql!(
- CREATE TABLE toolchains (
- workspace_id INTEGER,
- worktree_id INTEGER,
- language_name TEXT NOT NULL,
- name TEXT NOT NULL,
- path TEXT NOT NULL,
- PRIMARY KEY (workspace_id, worktree_id, language_name)
- );
- ),
- sql!(
- ALTER TABLE toolchains ADD COLUMN raw_json TEXT DEFAULT "{}";
- ),
- sql!(
CREATE TABLE breakpoints (
workspace_id INTEGER NOT NULL,
path TEXT NOT NULL,
@@ -526,39 +451,269 @@ define_connection! {
ON UPDATE CASCADE
);
),
- sql!(
- ALTER TABLE workspaces ADD COLUMN local_paths_array TEXT;
- CREATE UNIQUE INDEX local_paths_array_uq ON workspaces(local_paths_array);
- ALTER TABLE workspaces ADD COLUMN local_paths_order_array TEXT;
- ),
- sql!(
- ALTER TABLE breakpoints ADD COLUMN state INTEGER DEFAULT(0) NOT NULL
- ),
- sql!(
- ALTER TABLE breakpoints DROP COLUMN kind
- ),
- sql!(ALTER TABLE toolchains ADD COLUMN relative_worktree_path TEXT DEFAULT "" NOT NULL),
- sql!(
- ALTER TABLE breakpoints ADD COLUMN condition TEXT;
- ALTER TABLE breakpoints ADD COLUMN hit_condition TEXT;
- ),
- sql!(CREATE TABLE toolchains2 (
- workspace_id INTEGER,
- worktree_id INTEGER,
- language_name TEXT NOT NULL,
- name TEXT NOT NULL,
- path TEXT NOT NULL,
- raw_json TEXT NOT NULL,
- relative_worktree_path TEXT NOT NULL,
- PRIMARY KEY (workspace_id, worktree_id, language_name, relative_worktree_path)) STRICT;
- INSERT INTO toolchains2
- SELECT * FROM toolchains;
- DROP TABLE toolchains;
- ALTER TABLE toolchains2 RENAME TO toolchains;
- )
+ sql!(
+ ALTER TABLE workspaces ADD COLUMN local_paths_array TEXT;
+ CREATE UNIQUE INDEX local_paths_array_uq ON workspaces(local_paths_array);
+ ALTER TABLE workspaces ADD COLUMN local_paths_order_array TEXT;
+ ),
+ sql!(
+ ALTER TABLE breakpoints ADD COLUMN state INTEGER DEFAULT(0) NOT NULL
+ ),
+ sql!(
+ ALTER TABLE breakpoints DROP COLUMN kind
+ ),
+ sql!(ALTER TABLE toolchains ADD COLUMN relative_worktree_path TEXT DEFAULT "" NOT NULL),
+ sql!(
+ ALTER TABLE breakpoints ADD COLUMN condition TEXT;
+ ALTER TABLE breakpoints ADD COLUMN hit_condition TEXT;
+ ),
+ sql!(CREATE TABLE toolchains2 (
+ workspace_id INTEGER,
+ worktree_id INTEGER,
+ language_name TEXT NOT NULL,
+ name TEXT NOT NULL,
+ path TEXT NOT NULL,
+ raw_json TEXT NOT NULL,
+ relative_worktree_path TEXT NOT NULL,
+ PRIMARY KEY (workspace_id, worktree_id, language_name, relative_worktree_path)) STRICT;
+ INSERT INTO toolchains2
+ SELECT * FROM toolchains;
+ DROP TABLE toolchains;
+ ALTER TABLE toolchains2 RENAME TO toolchains;
+ ),
+ sql!(
+ CREATE TABLE ssh_connections (
+ id INTEGER PRIMARY KEY,
+ host TEXT NOT NULL,
+ port INTEGER,
+ user TEXT
+ );
+
+ INSERT INTO ssh_connections (host, port, user)
+ SELECT DISTINCT host, port, user
+ FROM ssh_projects;
+
+ CREATE TABLE workspaces_2(
+ workspace_id INTEGER PRIMARY KEY,
+ paths TEXT,
+ paths_order TEXT,
+ ssh_connection_id INTEGER REFERENCES ssh_connections(id),
+ timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL,
+ window_state TEXT,
+ window_x REAL,
+ window_y REAL,
+ window_width REAL,
+ window_height REAL,
+ display BLOB,
+ left_dock_visible INTEGER,
+ left_dock_active_panel TEXT,
+ right_dock_visible INTEGER,
+ right_dock_active_panel TEXT,
+ bottom_dock_visible INTEGER,
+ bottom_dock_active_panel TEXT,
+ left_dock_zoom INTEGER,
+ right_dock_zoom INTEGER,
+ bottom_dock_zoom INTEGER,
+ fullscreen INTEGER,
+ centered_layout INTEGER,
+ session_id TEXT,
+ window_id INTEGER
+ ) STRICT;
+
+ INSERT
+ INTO workspaces_2
+ SELECT
+ workspaces.workspace_id,
+ CASE
+ WHEN ssh_projects.id IS NOT NULL THEN ssh_projects.paths
+ ELSE
+ CASE
+ WHEN workspaces.local_paths_array IS NULL OR workspaces.local_paths_array = "" THEN
+ NULL
+ ELSE
+ replace(workspaces.local_paths_array, ',', CHAR(10))
+ END
+ END as paths,
+
+ CASE
+ WHEN ssh_projects.id IS NOT NULL THEN ""
+ ELSE workspaces.local_paths_order_array
+ END as paths_order,
+
+ CASE
+ WHEN ssh_projects.id IS NOT NULL THEN (
+ SELECT ssh_connections.id
+ FROM ssh_connections
+ WHERE
+ ssh_connections.host IS ssh_projects.host AND
+ ssh_connections.port IS ssh_projects.port AND
+ ssh_connections.user IS ssh_projects.user
+ )
+ ELSE NULL
+ END as ssh_connection_id,
+
+ workspaces.timestamp,
+ workspaces.window_state,
+ workspaces.window_x,
+ workspaces.window_y,
+ workspaces.window_width,
+ workspaces.window_height,
+ workspaces.display,
+ workspaces.left_dock_visible,
+ workspaces.left_dock_active_panel,
+ workspaces.right_dock_visible,
+ workspaces.right_dock_active_panel,
+ workspaces.bottom_dock_visible,
+ workspaces.bottom_dock_active_panel,
+ workspaces.left_dock_zoom,
+ workspaces.right_dock_zoom,
+ workspaces.bottom_dock_zoom,
+ workspaces.fullscreen,
+ workspaces.centered_layout,
+ workspaces.session_id,
+ workspaces.window_id
+ FROM
+ workspaces LEFT JOIN
+ ssh_projects ON
+ workspaces.ssh_project_id = ssh_projects.id;
+
+ DELETE FROM workspaces_2
+ WHERE workspace_id NOT IN (
+ SELECT MAX(workspace_id)
+ FROM workspaces_2
+ GROUP BY ssh_connection_id, paths
+ );
+
+ DROP TABLE ssh_projects;
+ DROP TABLE workspaces;
+ ALTER TABLE workspaces_2 RENAME TO workspaces;
+
+ CREATE UNIQUE INDEX ix_workspaces_location ON workspaces(ssh_connection_id, paths);
+ ),
+ // Fix any data from when workspaces.paths were briefly encoded as JSON arrays
+ sql!(
+ UPDATE workspaces
+ SET paths = CASE
+ WHEN substr(paths, 1, 2) = '[' || '"' AND substr(paths, -2, 2) = '"' || ']' THEN
+ replace(
+ substr(paths, 3, length(paths) - 4),
+ '"' || ',' || '"',
+ CHAR(10)
+ )
+ ELSE
+ replace(paths, ',', CHAR(10))
+ END
+ WHERE paths IS NOT NULL
+ ),
+ sql!(
+ CREATE TABLE remote_connections(
+ id INTEGER PRIMARY KEY,
+ kind TEXT NOT NULL,
+ host TEXT,
+ port INTEGER,
+ user TEXT,
+ distro TEXT
+ );
+
+ CREATE TABLE workspaces_2(
+ workspace_id INTEGER PRIMARY KEY,
+ paths TEXT,
+ paths_order TEXT,
+ remote_connection_id INTEGER REFERENCES remote_connections(id),
+ timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL,
+ window_state TEXT,
+ window_x REAL,
+ window_y REAL,
+ window_width REAL,
+ window_height REAL,
+ display BLOB,
+ left_dock_visible INTEGER,
+ left_dock_active_panel TEXT,
+ right_dock_visible INTEGER,
+ right_dock_active_panel TEXT,
+ bottom_dock_visible INTEGER,
+ bottom_dock_active_panel TEXT,
+ left_dock_zoom INTEGER,
+ right_dock_zoom INTEGER,
+ bottom_dock_zoom INTEGER,
+ fullscreen INTEGER,
+ centered_layout INTEGER,
+ session_id TEXT,
+ window_id INTEGER
+ ) STRICT;
+
+ INSERT INTO remote_connections
+ SELECT
+ id,
+ "ssh" as kind,
+ host,
+ port,
+ user,
+ NULL as distro
+ FROM ssh_connections;
+
+ INSERT
+ INTO workspaces_2
+ SELECT
+ workspace_id,
+ paths,
+ paths_order,
+ ssh_connection_id as remote_connection_id,
+ timestamp,
+ window_state,
+ window_x,
+ window_y,
+ window_width,
+ window_height,
+ display,
+ left_dock_visible,
+ left_dock_active_panel,
+ right_dock_visible,
+ right_dock_active_panel,
+ bottom_dock_visible,
+ bottom_dock_active_panel,
+ left_dock_zoom,
+ right_dock_zoom,
+ bottom_dock_zoom,
+ fullscreen,
+ centered_layout,
+ session_id,
+ window_id
+ FROM
+ workspaces;
+
+ DROP TABLE workspaces;
+ ALTER TABLE workspaces_2 RENAME TO workspaces;
+
+ CREATE UNIQUE INDEX ix_workspaces_location ON workspaces(remote_connection_id, paths);
+ ),
+ sql!(CREATE TABLE user_toolchains (
+ remote_connection_id INTEGER,
+ workspace_id INTEGER NOT NULL,
+ worktree_id INTEGER NOT NULL,
+ relative_worktree_path TEXT NOT NULL,
+ language_name TEXT NOT NULL,
+ name TEXT NOT NULL,
+ path TEXT NOT NULL,
+ raw_json TEXT NOT NULL,
+
+ PRIMARY KEY (workspace_id, worktree_id, relative_worktree_path, language_name, name, path, raw_json)
+ ) STRICT;),
+ sql!(
+ DROP TABLE ssh_connections;
+ ),
];
+
+ // Allow recovering from bad migration that was initially shipped to nightly
+ // when introducing the ssh_connections table.
+ fn should_allow_migration_change(_index: usize, old: &str, new: &str) -> bool {
+ old.starts_with("CREATE TABLE ssh_connections")
+ && new.starts_with("CREATE TABLE ssh_connections")
+ }
}
+db::static_connection!(DB, WorkspaceDb, []);
+
impl WorkspaceDb {
/// Returns a serialized workspace for the given worktree_roots. If the passed array
/// is empty, the most recent workspace is returned instead. If no workspace for the
@@ -566,17 +721,33 @@ impl WorkspaceDb {
pub(crate) fn workspace_for_roots<P: AsRef<Path>>(
&self,
worktree_roots: &[P],
+ ) -> Option<SerializedWorkspace> {
+ self.workspace_for_roots_internal(worktree_roots, None)
+ }
+
+ pub(crate) fn remote_workspace_for_roots<P: AsRef<Path>>(
+ &self,
+ worktree_roots: &[P],
+ ssh_project_id: RemoteConnectionId,
+ ) -> Option<SerializedWorkspace> {
+ self.workspace_for_roots_internal(worktree_roots, Some(ssh_project_id))
+ }
+
+ pub(crate) fn workspace_for_roots_internal<P: AsRef<Path>>(
+ &self,
+ worktree_roots: &[P],
+ remote_connection_id: Option<RemoteConnectionId>,
) -> Option<SerializedWorkspace> {
// paths are sorted before db interactions to ensure that the order of the paths
// doesn't affect the workspace selection for existing workspaces
- let local_paths = LocalPaths::new(worktree_roots);
+ let root_paths = PathList::new(worktree_roots);
// Note that we re-assign the workspace_id here in case it's empty
// and we've grabbed the most recent workspace
let (
workspace_id,
- local_paths,
- local_paths_order,
+ paths,
+ paths_order,
window_bounds,
display,
centered_layout,
@@ -584,8 +755,8 @@ impl WorkspaceDb {
window_id,
): (
WorkspaceId,
- Option<LocalPaths>,
- Option<LocalPathsOrder>,
+ String,
+ String,
Option<SerializedWindowBounds>,
Option<Uuid>,
Option<bool>,
@@ -595,8 +766,8 @@ impl WorkspaceDb {
.select_row_bound(sql! {
SELECT
workspace_id,
- local_paths,
- local_paths_order,
+ paths,
+ paths_order,
window_state,
window_x,
window_y,
@@ -615,25 +786,31 @@ impl WorkspaceDb {
bottom_dock_zoom,
window_id
FROM workspaces
- WHERE local_paths = ?
+ WHERE
+ paths IS ? AND
+ remote_connection_id IS ?
+ LIMIT 1
+ })
+ .map(|mut prepared_statement| {
+ (prepared_statement)((
+ root_paths.serialize().paths,
+ remote_connection_id.map(|id| id.0 as i32),
+ ))
+ .unwrap()
})
- .and_then(|mut prepared_statement| (prepared_statement)(&local_paths))
.context("No workspaces found")
.warn_on_err()
.flatten()?;
- let local_paths = local_paths?;
- let location = match local_paths_order {
- Some(order) => SerializedWorkspaceLocation::Local(local_paths, order),
- None => {
- let order = LocalPathsOrder::default_for_paths(&local_paths);
- SerializedWorkspaceLocation::Local(local_paths, order)
- }
- };
+ let paths = PathList::deserialize(&SerializedPathList {
+ paths,
+ order: paths_order,
+ });
Some(SerializedWorkspace {
id: workspace_id,
- location,
+ location: SerializedWorkspaceLocation::Local,
+ paths,
center_group: self
.get_center_pane_group(workspace_id)
.context("Getting center group")
@@ -645,63 +822,7 @@ impl WorkspaceDb {
session_id: None,
breakpoints: self.breakpoints(workspace_id),
window_id,
- })
- }
-
- pub(crate) fn workspace_for_ssh_project(
- &self,
- ssh_project: &SerializedSshProject,
- ) -> Option<SerializedWorkspace> {
- let (workspace_id, window_bounds, display, centered_layout, docks, window_id): (
- WorkspaceId,
- Option<SerializedWindowBounds>,
- Option<Uuid>,
- Option<bool>,
- DockStructure,
- Option<u64>,
- ) = self
- .select_row_bound(sql! {
- SELECT
- workspace_id,
- window_state,
- window_x,
- window_y,
- window_width,
- window_height,
- display,
- centered_layout,
- left_dock_visible,
- left_dock_active_panel,
- left_dock_zoom,
- right_dock_visible,
- right_dock_active_panel,
- right_dock_zoom,
- bottom_dock_visible,
- bottom_dock_active_panel,
- bottom_dock_zoom,
- window_id
- FROM workspaces
- WHERE ssh_project_id = ?
- })
- .and_then(|mut prepared_statement| (prepared_statement)(ssh_project.id.0))
- .context("No workspaces found")
- .warn_on_err()
- .flatten()?;
-
- Some(SerializedWorkspace {
- id: workspace_id,
- location: SerializedWorkspaceLocation::Ssh(ssh_project.clone()),
- center_group: self
- .get_center_pane_group(workspace_id)
- .context("Getting center group")
- .log_err()?,
- window_bounds,
- centered_layout: centered_layout.unwrap_or(false),
- breakpoints: self.breakpoints(workspace_id),
- display,
- docks,
- session_id: None,
- window_id,
+ user_toolchains: self.user_toolchains(workspace_id, remote_connection_id),
})
}
@@ -751,19 +872,101 @@ impl WorkspaceDb {
}
}
+ fn user_toolchains(
+ &self,
+ workspace_id: WorkspaceId,
+ remote_connection_id: Option<RemoteConnectionId>,
+ ) -> BTreeMap<ToolchainScope, IndexSet<Toolchain>> {
+ type RowKind = (WorkspaceId, u64, String, String, String, String, String);
+
+ let toolchains: Vec<RowKind> = self
+ .select_bound(sql! {
+ SELECT workspace_id, worktree_id, relative_worktree_path,
+ language_name, name, path, raw_json
+ FROM user_toolchains WHERE remote_connection_id IS ?1 AND (
+ workspace_id IN (0, ?2)
+ )
+ })
+ .and_then(|mut statement| {
+ (statement)((remote_connection_id.map(|id| id.0), workspace_id))
+ })
+ .unwrap_or_default();
+ let mut ret = BTreeMap::<_, IndexSet<_>>::default();
+
+ for (
+ _workspace_id,
+ worktree_id,
+ relative_worktree_path,
+ language_name,
+ name,
+ path,
+ raw_json,
+ ) in toolchains
+ {
+ // INTEGER's that are primary keys (like workspace ids, remote connection ids and such) start at 1, so we're safe to
+ let scope = if _workspace_id == WorkspaceId(0) {
+ debug_assert_eq!(worktree_id, u64::MAX);
+ debug_assert_eq!(relative_worktree_path, String::default());
+ ToolchainScope::Global
+ } else {
+ debug_assert_eq!(workspace_id, _workspace_id);
+ debug_assert_eq!(
+ worktree_id == u64::MAX,
+ relative_worktree_path == String::default()
+ );
+
+ if worktree_id != u64::MAX && relative_worktree_path != String::default() {
+ ToolchainScope::Subproject(
+ WorktreeId::from_usize(worktree_id as usize),
+ Arc::from(relative_worktree_path.as_ref()),
+ )
+ } else {
+ ToolchainScope::Project
+ }
+ };
+ let Ok(as_json) = serde_json::from_str(&raw_json) else {
+ continue;
+ };
+ let toolchain = Toolchain {
+ name: SharedString::from(name),
+ path: SharedString::from(path),
+ language_name: LanguageName::from_proto(language_name),
+ as_json,
+ };
+ ret.entry(scope).or_default().insert(toolchain);
+ }
+
+ ret
+ }
+
/// Saves a workspace using the worktree roots. Will garbage collect any workspaces
/// that used this workspace previously
pub(crate) async fn save_workspace(&self, workspace: SerializedWorkspace) {
+ let paths = workspace.paths.serialize();
log::debug!("Saving workspace at location: {:?}", workspace.location);
self.write(move |conn| {
conn.with_savepoint("update_worktrees", || {
+ let remote_connection_id = match workspace.location.clone() {
+ SerializedWorkspaceLocation::Local => None,
+ SerializedWorkspaceLocation::Remote(connection_options) => {
+ Some(Self::get_or_create_remote_connection_internal(
+ conn,
+ connection_options
+ )?.0)
+ }
+ };
+
// Clear out panes and pane_groups
conn.exec_bound(sql!(
DELETE FROM pane_groups WHERE workspace_id = ?1;
DELETE FROM panes WHERE workspace_id = ?1;))?(workspace.id)
.context("Clearing old panes")?;
- conn.exec_bound(sql!(DELETE FROM breakpoints WHERE workspace_id = ?1))?(workspace.id).context("Clearing old breakpoints")?;
+ conn.exec_bound(
+ sql!(
+ DELETE FROM breakpoints WHERE workspace_id = ?1;
+ )
+ )?(workspace.id).context("Clearing old breakpoints")?;
for (path, breakpoints) in workspace.breakpoints {
for bp in breakpoints {
@@ -1,256 +1,49 @@
use super::{SerializedAxis, SerializedWindowBounds};
use crate::{
Member, Pane, PaneAxis, SerializableItemRegistry, Workspace, WorkspaceId, item::ItemHandle,
+ path_list::PathList,
};
-use anyhow::{Context as _, Result};
+use anyhow::Result;
use async_recursion::async_recursion;
+use collections::IndexSet;
use db::sqlez::{
bindable::{Bind, Column, StaticColumnCount},
statement::Statement,
};
use gpui::{AsyncWindowContext, Entity, WeakEntity};
-use itertools::Itertools as _;
+
+use language::{Toolchain, ToolchainScope};
use project::{Project, debugger::breakpoint_store::SourceBreakpoint};
-use remote::ssh_session::SshProjectId;
-use serde::{Deserialize, Serialize};
+use remote::RemoteConnectionOptions;
use std::{
collections::BTreeMap,
path::{Path, PathBuf},
sync::Arc,
};
-use util::{ResultExt, paths::SanitizedPath};
+use util::ResultExt;
use uuid::Uuid;
-#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
-pub struct SerializedSshProject {
- pub id: SshProjectId,
- pub host: String,
- pub port: Option<u16>,
- pub paths: Vec<String>,
- pub user: Option<String>,
-}
-
-impl SerializedSshProject {
- pub fn ssh_urls(&self) -> Vec<PathBuf> {
- self.paths
- .iter()
- .map(|path| {
- let mut result = String::new();
- if let Some(user) = &self.user {
- result.push_str(user);
- result.push('@');
- }
- result.push_str(&self.host);
- if let Some(port) = &self.port {
- result.push(':');
- result.push_str(&port.to_string());
- }
- result.push_str(path);
- PathBuf::from(result)
- })
- .collect()
- }
-}
-
-impl StaticColumnCount for SerializedSshProject {
- fn column_count() -> usize {
- 5
- }
-}
-
-impl Bind for &SerializedSshProject {
- fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
- let next_index = statement.bind(&self.id.0, start_index)?;
- let next_index = statement.bind(&self.host, next_index)?;
- let next_index = statement.bind(&self.port, next_index)?;
- let raw_paths = serde_json::to_string(&self.paths)?;
- let next_index = statement.bind(&raw_paths, next_index)?;
- statement.bind(&self.user, next_index)
- }
-}
-
-impl Column for SerializedSshProject {
- fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
- let id = statement.column_int64(start_index)?;
- let host = statement.column_text(start_index + 1)?.to_string();
- let (port, _) = Option::<u16>::column(statement, start_index + 2)?;
- let raw_paths = statement.column_text(start_index + 3)?.to_string();
- let paths: Vec<String> = serde_json::from_str(&raw_paths)?;
-
- let (user, _) = Option::<String>::column(statement, start_index + 4)?;
-
- Ok((
- Self {
- id: SshProjectId(id as u64),
- host,
- port,
- paths,
- user,
- },
- start_index + 5,
- ))
- }
-}
-
-#[derive(Debug, PartialEq, Clone)]
-pub struct LocalPaths(Arc<Vec<PathBuf>>);
-
-impl LocalPaths {
- pub fn new<P: AsRef<Path>>(paths: impl IntoIterator<Item = P>) -> Self {
- let mut paths: Vec<PathBuf> = paths
- .into_iter()
- .map(|p| SanitizedPath::from(p).into())
- .collect();
- // Ensure all future `zed workspace1 workspace2` and `zed workspace2 workspace1` calls are using the same workspace.
- // The actual workspace order is stored in the `LocalPathsOrder` struct.
- paths.sort();
- Self(Arc::new(paths))
- }
-
- pub fn paths(&self) -> &Arc<Vec<PathBuf>> {
- &self.0
- }
-}
-
-impl StaticColumnCount for LocalPaths {}
-impl Bind for &LocalPaths {
- fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
- statement.bind(&bincode::serialize(&self.0)?, start_index)
- }
-}
-
-impl Column for LocalPaths {
- fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
- let path_blob = statement.column_blob(start_index)?;
- let paths: Arc<Vec<PathBuf>> = if path_blob.is_empty() {
- Default::default()
- } else {
- bincode::deserialize(path_blob).context("Bincode deserialization of paths failed")?
- };
-
- Ok((Self(paths), start_index + 1))
- }
-}
-
-#[derive(Debug, PartialEq, Clone)]
-pub struct LocalPathsOrder(Vec<usize>);
-
-impl LocalPathsOrder {
- pub fn new(order: impl IntoIterator<Item = usize>) -> Self {
- Self(order.into_iter().collect())
- }
-
- pub fn order(&self) -> &[usize] {
- self.0.as_slice()
- }
+#[derive(
+ Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize,
+)]
+pub(crate) struct RemoteConnectionId(pub u64);
- pub fn default_for_paths(paths: &LocalPaths) -> Self {
- Self::new(0..paths.0.len())
- }
-}
-
-impl StaticColumnCount for LocalPathsOrder {}
-impl Bind for &LocalPathsOrder {
- fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
- statement.bind(&bincode::serialize(&self.0)?, start_index)
- }
-}
-
-impl Column for LocalPathsOrder {
- fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
- let order_blob = statement.column_blob(start_index)?;
- let order = if order_blob.is_empty() {
- Vec::new()
- } else {
- bincode::deserialize(order_blob).context("deserializing workspace root order")?
- };
-
- Ok((Self(order), start_index + 1))
- }
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub(crate) enum RemoteConnectionKind {
+ Ssh,
+ Wsl,
}
#[derive(Debug, PartialEq, Clone)]
pub enum SerializedWorkspaceLocation {
- Local(LocalPaths, LocalPathsOrder),
- Ssh(SerializedSshProject),
+ Local,
+ Remote(RemoteConnectionOptions),
}
impl SerializedWorkspaceLocation {
- /// Create a new `SerializedWorkspaceLocation` from a list of local paths.
- ///
- /// The paths will be sorted and the order will be stored in the `LocalPathsOrder` struct.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::path::Path;
- /// use zed_workspace::SerializedWorkspaceLocation;
- ///
- /// let location = SerializedWorkspaceLocation::from_local_paths(vec![
- /// Path::new("path/to/workspace1"),
- /// Path::new("path/to/workspace2"),
- /// ]);
- /// assert_eq!(location, SerializedWorkspaceLocation::Local(
- /// LocalPaths::new(vec![
- /// Path::new("path/to/workspace1"),
- /// Path::new("path/to/workspace2"),
- /// ]),
- /// LocalPathsOrder::new(vec![0, 1]),
- /// ));
- /// ```
- ///
- /// ```
- /// use std::path::Path;
- /// use zed_workspace::SerializedWorkspaceLocation;
- ///
- /// let location = SerializedWorkspaceLocation::from_local_paths(vec![
- /// Path::new("path/to/workspace2"),
- /// Path::new("path/to/workspace1"),
- /// ]);
- ///
- /// assert_eq!(location, SerializedWorkspaceLocation::Local(
- /// LocalPaths::new(vec![
- /// Path::new("path/to/workspace1"),
- /// Path::new("path/to/workspace2"),
- /// ]),
- /// LocalPathsOrder::new(vec![1, 0]),
- /// ));
- /// ```
- pub fn from_local_paths<P: AsRef<Path>>(paths: impl IntoIterator<Item = P>) -> Self {
- let mut indexed_paths: Vec<_> = paths
- .into_iter()
- .map(|p| p.as_ref().to_path_buf())
- .enumerate()
- .collect();
-
- indexed_paths.sort_by(|(_, a), (_, b)| a.cmp(b));
-
- let sorted_paths: Vec<_> = indexed_paths.iter().map(|(_, path)| path.clone()).collect();
- let order: Vec<_> = indexed_paths.iter().map(|(index, _)| *index).collect();
-
- Self::Local(LocalPaths::new(sorted_paths), LocalPathsOrder::new(order))
- }
-
/// Get sorted paths
pub fn sorted_paths(&self) -> Arc<Vec<PathBuf>> {
- match self {
- SerializedWorkspaceLocation::Local(paths, order) => {
- if order.order().len() == 0 {
- paths.paths().clone()
- } else {
- Arc::new(
- order
- .order()
- .iter()
- .zip(paths.paths().iter())
- .sorted_by_key(|(i, _)| **i)
- .map(|(_, p)| p.clone())
- .collect(),
- )
- }
- }
- SerializedWorkspaceLocation::Ssh(ssh_project) => Arc::new(ssh_project.ssh_urls()),
- }
+ unimplemented!()
}
}
@@ -258,6 +51,7 @@ impl SerializedWorkspaceLocation {
pub(crate) struct SerializedWorkspace {
pub(crate) id: WorkspaceId,
pub(crate) location: SerializedWorkspaceLocation,
+ pub(crate) paths: PathList,
pub(crate) center_group: SerializedPaneGroup,
pub(crate) window_bounds: Option<SerializedWindowBounds>,
pub(crate) centered_layout: bool,
@@ -265,6 +59,7 @@ pub(crate) struct SerializedWorkspace {
pub(crate) docks: DockStructure,
pub(crate) session_id: Option<String>,
pub(crate) breakpoints: BTreeMap<Arc<Path>, Vec<SourceBreakpoint>>,
+ pub(crate) user_toolchains: BTreeMap<ToolchainScope, IndexSet<Toolchain>>,
pub(crate) window_id: Option<u64>,
}
@@ -275,6 +70,23 @@ pub struct DockStructure {
pub(crate) bottom: DockData,
}
+impl RemoteConnectionKind {
+ pub(crate) fn serialize(&self) -> &'static str {
+ match self {
+ RemoteConnectionKind::Ssh => "ssh",
+ RemoteConnectionKind::Wsl => "wsl",
+ }
+ }
+
+ pub(crate) fn deserialize(text: &str) -> Option<Self> {
+ match text {
+ "ssh" => Some(Self::Ssh),
+ "wsl" => Some(Self::Wsl),
+ _ => None,
+ }
+ }
+}
+
impl Column for DockStructure {
fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
let (left, next_index) = DockData::column(statement, start_index)?;
@@ -581,80 +393,3 @@ impl Column for SerializedItem {
))
}
}
-
-#[cfg(test)]
-mod tests {
- use super::*;
-
- #[test]
- fn test_serialize_local_paths() {
- let paths = vec!["b", "a", "c"];
- let serialized = SerializedWorkspaceLocation::from_local_paths(paths);
-
- assert_eq!(
- serialized,
- SerializedWorkspaceLocation::Local(
- LocalPaths::new(vec!["a", "b", "c"]),
- LocalPathsOrder::new(vec![1, 0, 2])
- )
- );
- }
-
- #[test]
- fn test_sorted_paths() {
- let paths = vec!["b", "a", "c"];
- let serialized = SerializedWorkspaceLocation::from_local_paths(paths);
- assert_eq!(
- serialized.sorted_paths(),
- Arc::new(vec![
- PathBuf::from("b"),
- PathBuf::from("a"),
- PathBuf::from("c"),
- ])
- );
-
- let paths = Arc::new(vec![
- PathBuf::from("a"),
- PathBuf::from("b"),
- PathBuf::from("c"),
- ]);
- let order = vec![2, 0, 1];
- let serialized =
- SerializedWorkspaceLocation::Local(LocalPaths(paths.clone()), LocalPathsOrder(order));
- assert_eq!(
- serialized.sorted_paths(),
- Arc::new(vec![
- PathBuf::from("b"),
- PathBuf::from("c"),
- PathBuf::from("a"),
- ])
- );
-
- let paths = Arc::new(vec![
- PathBuf::from("a"),
- PathBuf::from("b"),
- PathBuf::from("c"),
- ]);
- let order = vec![];
- let serialized =
- SerializedWorkspaceLocation::Local(LocalPaths(paths.clone()), LocalPathsOrder(order));
- assert_eq!(serialized.sorted_paths(), paths);
-
- let urls = ["/a", "/b", "/c"];
- let serialized = SerializedWorkspaceLocation::Ssh(SerializedSshProject {
- id: SshProjectId(0),
- host: "host".to_string(),
- port: Some(22),
- paths: urls.iter().map(|s| s.to_string()).collect(),
- user: Some("user".to_string()),
- });
- assert_eq!(
- serialized.sorted_paths(),
- Arc::new(
- urls.iter()
- .map(|p| PathBuf::from(format!("user@host:22{}", p)))
- .collect()
- )
- );
- }
-}
@@ -371,13 +371,13 @@ impl<T: SearchableItem> SearchableItemHandle for Entity<T> {
impl From<Box<dyn SearchableItemHandle>> for AnyView {
fn from(this: Box<dyn SearchableItemHandle>) -> Self {
- this.to_any().clone()
+ this.to_any()
}
}
impl From<&Box<dyn SearchableItemHandle>> for AnyView {
fn from(this: &Box<dyn SearchableItemHandle>) -> Self {
- this.to_any().clone()
+ this.to_any()
}
}
@@ -33,13 +33,12 @@ impl SharedScreen {
cx: &mut Context<Self>,
) -> Self {
let my_sid = track.sid();
- cx.subscribe(&room, move |_, _, ev, cx| match ev {
- call::room::Event::RemoteVideoTrackUnsubscribed { sid } => {
- if sid == &my_sid {
- cx.emit(Event::Close)
- }
+ cx.subscribe(&room, move |_, _, ev, cx| {
+ if let call::room::Event::RemoteVideoTrackUnsubscribed { sid } = ev
+ && sid == &my_sid
+ {
+ cx.emit(Event::Close)
}
- _ => {}
})
.detach();
@@ -108,7 +108,7 @@ impl StatusBar {
self.left_items
.iter()
.chain(self.right_items.iter())
- .find_map(|item| item.to_any().clone().downcast().log_err())
+ .find_map(|item| item.to_any().downcast().log_err())
}
pub fn position_of_item<T>(&self) -> Option<usize>
@@ -217,6 +217,6 @@ impl<T: StatusItemView> StatusItemViewHandle for Entity<T> {
impl From<&dyn StatusItemViewHandle> for AnyView {
fn from(val: &dyn StatusItemViewHandle) -> Self {
- val.to_any().clone()
+ val.to_any()
}
}
@@ -20,7 +20,7 @@ impl Workspace {
window: &mut Window,
cx: &mut Context<Self>,
) {
- match self.project.read(cx).ssh_connection_state(cx) {
+ match self.project.read(cx).remote_connection_state(cx) {
None | Some(ConnectionState::Connected) => {}
Some(
ConnectionState::Connecting
@@ -303,7 +303,6 @@ impl ThemePreview {
.gap_1()
.children(all_colors.into_iter().map(|(color, name)| {
let id = ElementId::Name(format!("{:?}-preview", color).into());
- let name = name.clone();
div().size_8().flex_none().child(
ButtonLike::new(id)
.child(
@@ -1,10 +1,12 @@
pub mod dock;
pub mod history_manager;
+pub mod invalid_buffer_view;
pub mod item;
mod modal_layer;
pub mod notifications;
pub mod pane;
pub mod pane_group;
+mod path_list;
mod persistence;
pub mod searchable;
pub mod shared_screen;
@@ -17,6 +19,7 @@ mod workspace_settings;
pub use crate::notifications::NotificationFrame;
pub use dock::Panel;
+pub use path_list::PathList;
pub use toast_layer::{ToastAction, ToastLayer, ToastView};
use anyhow::{Context as _, Result, anyhow};
@@ -39,9 +42,9 @@ use gpui::{
Action, AnyEntity, AnyView, AnyWeakView, App, AsyncApp, AsyncWindowContext, Bounds, Context,
CursorStyle, Decorations, DragMoveEvent, Entity, EntityId, EventEmitter, FocusHandle,
Focusable, Global, HitboxBehavior, Hsla, KeyContext, Keystroke, ManagedView, MouseButton,
- PathPromptOptions, Point, PromptLevel, Render, ResizeEdge, Size, Stateful, Subscription, Task,
- Tiling, WeakEntity, WindowBounds, WindowHandle, WindowId, WindowOptions, actions, canvas,
- point, relative, size, transparent_black,
+ PathPromptOptions, Point, PromptLevel, Render, ResizeEdge, Size, Stateful, Subscription,
+ SystemWindowTabController, Task, Tiling, WeakEntity, WindowBounds, WindowHandle, WindowId,
+ WindowOptions, actions, canvas, point, relative, size, transparent_black,
};
pub use history_manager::*;
pub use item::{
@@ -61,20 +64,18 @@ use notifications::{
};
pub use pane::*;
pub use pane_group::*;
-use persistence::{
- DB, SerializedWindowBounds,
- model::{SerializedSshProject, SerializedWorkspace},
-};
+use persistence::{DB, SerializedWindowBounds, model::SerializedWorkspace};
pub use persistence::{
DB as WORKSPACE_DB, WorkspaceDb, delete_unloaded_items,
- model::{ItemId, LocalPaths, SerializedWorkspaceLocation},
+ model::{ItemId, SerializedWorkspaceLocation},
};
use postage::stream::Stream;
use project::{
DirectoryLister, Project, ProjectEntryId, ProjectPath, ResolvedPath, Worktree, WorktreeId,
debugger::{breakpoint_store::BreakpointStoreEvent, session::ThreadStatus},
+ toolchain_store::ToolchainStoreEvent,
};
-use remote::{SshClientDelegate, SshConnectionOptions, ssh_session::ConnectionIdentifier};
+use remote::{RemoteClientDelegate, RemoteConnectionOptions, remote_client::ConnectionIdentifier};
use schemars::JsonSchema;
use serde::Deserialize;
use session::AppSession;
@@ -612,21 +613,67 @@ impl ProjectItemRegistry {
);
self.build_project_item_for_path_fns
.push(|project, project_path, window, cx| {
+ let project_path = project_path.clone();
+ let is_file = project
+ .read(cx)
+ .entry_for_path(&project_path, cx)
+ .is_some_and(|entry| entry.is_file());
+ let entry_abs_path = project.read(cx).absolute_path(&project_path, cx);
+ let is_local = project.read(cx).is_local();
let project_item =
- <T::Item as project::ProjectItem>::try_open(project, project_path, cx)?;
+ <T::Item as project::ProjectItem>::try_open(project, &project_path, cx)?;
let project = project.clone();
Some(window.spawn(cx, async move |cx| {
- let project_item = project_item.await?;
- let project_entry_id: Option<ProjectEntryId> =
- project_item.read_with(cx, project::ProjectItem::entry_id)?;
- let build_workspace_item = Box::new(
- |pane: &mut Pane, window: &mut Window, cx: &mut Context<Pane>| {
- Box::new(cx.new(|cx| {
- T::for_project_item(project, Some(pane), project_item, window, cx)
- })) as Box<dyn ItemHandle>
- },
- ) as Box<_>;
- Ok((project_entry_id, build_workspace_item))
+ match project_item.await.with_context(|| {
+ format!(
+ "opening project path {:?}",
+ entry_abs_path.as_deref().unwrap_or(&project_path.path)
+ )
+ }) {
+ Ok(project_item) => {
+ let project_item = project_item;
+ let project_entry_id: Option<ProjectEntryId> =
+ project_item.read_with(cx, project::ProjectItem::entry_id)?;
+ let build_workspace_item = Box::new(
+ |pane: &mut Pane, window: &mut Window, cx: &mut Context<Pane>| {
+ Box::new(cx.new(|cx| {
+ T::for_project_item(
+ project,
+ Some(pane),
+ project_item,
+ window,
+ cx,
+ )
+ })) as Box<dyn ItemHandle>
+ },
+ ) as Box<_>;
+ Ok((project_entry_id, build_workspace_item))
+ }
+ Err(e) => {
+ if e.error_code() == ErrorCode::Internal {
+ if let Some(abs_path) =
+ entry_abs_path.as_deref().filter(|_| is_file)
+ {
+ if let Some(broken_project_item_view) =
+ cx.update(|window, cx| {
+ T::for_broken_project_item(
+ abs_path, is_local, &e, window, cx,
+ )
+ })?
+ {
+ let build_workspace_item = Box::new(
+ move |_: &mut Pane, _: &mut Window, cx: &mut Context<Pane>| {
+ cx.new(|_| broken_project_item_view).boxed_clone()
+ },
+ )
+ as Box<_>;
+ return Ok((None, build_workspace_item));
+ }
+ }
+ }
+ Err(e)
+ }
+ }
}))
});
}
@@ -903,7 +950,7 @@ impl AppState {
let languages = Arc::new(LanguageRegistry::test(cx.background_executor().clone()));
let clock = Arc::new(clock::FakeSystemClock::new());
let http_client = http_client::FakeHttpClient::with_404_response();
- let client = Client::new(clock, http_client.clone(), cx);
+ let client = Client::new(clock, http_client, cx);
let session = cx.new(|cx| AppSession::new(Session::test(), cx));
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
let workspace_store = cx.new(|cx| WorkspaceStore::new(client.clone(), cx));
@@ -984,8 +1031,10 @@ pub enum Event {
ItemAdded {
item: Box<dyn ItemHandle>,
},
- ItemRemoved,
ActiveItemChanged,
+ ItemRemoved {
+ item_id: EntityId,
+ },
UserSavedItem {
pane: WeakEntity<Pane>,
item: Box<dyn WeakItemHandle>,
@@ -1000,7 +1049,6 @@ pub enum Event {
},
ZoomChanged,
ModalOpened,
- ClearActivityIndicator,
}
#[derive(Debug)]
@@ -1013,7 +1061,7 @@ pub enum OpenVisible {
enum WorkspaceLocation {
// Valid local paths or SSH project to serialize
- Location(SerializedWorkspaceLocation),
+ Location(SerializedWorkspaceLocation, PathList),
// No valid location found hence clear session id
DetachFromSession,
// No valid location found to serialize
@@ -1097,7 +1145,6 @@ pub struct Workspace {
terminal_provider: Option<Box<dyn TerminalProvider>>,
debugger_provider: Option<Arc<dyn DebuggerProvider>>,
serializable_items_tx: UnboundedSender<Box<dyn SerializableItemHandle>>,
- serialized_ssh_project: Option<SerializedSshProject>,
_items_serializer: Task<Result<()>>,
session_id: Option<String>,
scheduled_tasks: Vec<Task<()>>,
@@ -1146,8 +1193,6 @@ impl Workspace {
project::Event::WorktreeRemoved(_) | project::Event::WorktreeAdded(_) => {
this.update_window_title(window, cx);
- this.update_ssh_paths(cx);
- this.serialize_ssh_paths(window, cx);
this.serialize_workspace(window, cx);
// This event could be triggered by `AddFolderToProject` or `RemoveFromProject`.
this.update_history(cx);
@@ -1231,6 +1276,19 @@ impl Workspace {
},
)
.detach();
+ if let Some(toolchain_store) = project.read(cx).toolchain_store() {
+ cx.subscribe_in(
+ &toolchain_store,
+ window,
+ |workspace, _, event, window, cx| match event {
+ ToolchainStoreEvent::CustomToolchainsModified => {
+ workspace.serialize_workspace(window, cx);
+ }
+ _ => {}
+ },
+ )
+ .detach();
+ }
cx.on_focus_lost(window, |this, window, cx| {
let focus_handle = this.focus_handle(cx);
@@ -1323,7 +1381,6 @@ impl Workspace {
let mut active_call = None;
if let Some(call) = ActiveCall::try_global(cx) {
- let call = call.clone();
let subscriptions = vec![cx.subscribe_in(&call, window, Self::on_active_call_event)];
active_call = Some((call, subscriptions));
}
@@ -1433,7 +1490,7 @@ impl Workspace {
serializable_items_tx,
_items_serializer,
session_id: Some(session_id),
- serialized_ssh_project: None,
+
scheduled_tasks: Vec::new(),
}
}
@@ -1473,20 +1530,9 @@ impl Workspace {
let serialized_workspace =
persistence::DB.workspace_for_roots(paths_to_open.as_slice());
- let workspace_location = serialized_workspace
- .as_ref()
- .map(|ws| &ws.location)
- .and_then(|loc| match loc {
- SerializedWorkspaceLocation::Local(_, order) => {
- Some((loc.sorted_paths(), order.order()))
- }
- _ => None,
- });
-
- if let Some((paths, order)) = workspace_location {
- paths_to_open = paths.iter().cloned().collect();
-
- if order.iter().enumerate().any(|(i, &j)| i != j) {
+ if let Some(paths) = serialized_workspace.as_ref().map(|ws| &ws.paths) {
+ paths_to_open = paths.paths().to_vec();
+ if !paths.is_lexicographically_ordered() {
project_handle
.update(cx, |project, cx| {
project.set_worktrees_reordered(true, cx);
@@ -1533,6 +1579,16 @@ impl Workspace {
})?
.await;
}
+ if let Some(workspace) = serialized_workspace.as_ref() {
+ project_handle.update(cx, |this, cx| {
+ for (scope, toolchains) in &workspace.user_toolchains {
+ for toolchain in toolchains {
+ this.add_toolchain(toolchain.clone(), scope.clone(), cx);
+ }
+ }
+ })?;
+ }
+
let window = if let Some(window) = requesting_window {
let centered_layout = serialized_workspace
.as_ref()
@@ -2006,14 +2062,6 @@ impl Workspace {
self.debugger_provider.clone()
}
- pub fn serialized_ssh_project(&self) -> Option<SerializedSshProject> {
- self.serialized_ssh_project.clone()
- }
-
- pub fn set_serialized_ssh_project(&mut self, serialized_ssh_project: SerializedSshProject) {
- self.serialized_ssh_project = Some(serialized_ssh_project);
- }
-
pub fn prompt_for_open_path(
&mut self,
path_prompt_options: PathPromptOptions,
@@ -2068,7 +2116,7 @@ impl Workspace {
cx: &mut Context<Self>,
) -> oneshot::Receiver<Option<Vec<PathBuf>>> {
if self.project.read(cx).is_via_collab()
- || self.project.read(cx).is_via_ssh()
+ || self.project.read(cx).is_via_remote_server()
|| !WorkspaceSettings::get_global(cx).use_system_path_prompts
{
let prompt = self.on_prompt_for_new_path.take().unwrap();
@@ -2250,27 +2298,43 @@ impl Workspace {
})?;
if let Some(active_call) = active_call
- && close_intent != CloseIntent::Quit
&& workspace_count == 1
&& active_call.read_with(cx, |call, _| call.room().is_some())?
{
- let answer = cx.update(|window, cx| {
- window.prompt(
- PromptLevel::Warning,
- "Do you want to leave the current call?",
- None,
- &["Close window and hang up", "Cancel"],
- cx,
- )
- })?;
+ if close_intent == CloseIntent::CloseWindow {
+ let answer = cx.update(|window, cx| {
+ window.prompt(
+ PromptLevel::Warning,
+ "Do you want to leave the current call?",
+ None,
+ &["Close window and hang up", "Cancel"],
+ cx,
+ )
+ })?;
- if answer.await.log_err() == Some(1) {
- return anyhow::Ok(false);
- } else {
- active_call
- .update(cx, |call, cx| call.hang_up(cx))?
- .await
- .log_err();
+ if answer.await.log_err() == Some(1) {
+ return anyhow::Ok(false);
+ } else {
+ active_call
+ .update(cx, |call, cx| call.hang_up(cx))?
+ .await
+ .log_err();
+ }
+ }
+ if close_intent == CloseIntent::ReplaceWindow {
+ _ = active_call.update(cx, |this, cx| {
+ let workspace = cx
+ .windows()
+ .iter()
+ .filter_map(|window| window.downcast::<Workspace>())
+ .next()
+ .unwrap();
+ let project = workspace.read(cx)?.project.clone();
+ if project.read(cx).is_shared() {
+ this.unshare_project(project, cx)?;
+ }
+ Ok::<_, anyhow::Error>(())
+ })?;
}
}
@@ -2503,8 +2567,6 @@ impl Workspace {
window: &mut Window,
cx: &mut Context<Self>,
) -> Task<Vec<Option<anyhow::Result<Box<dyn ItemHandle>>>>> {
- log::info!("open paths {abs_paths:?}");
-
let fs = self.app_state.fs.clone();
// Sort the paths to ensure we add worktrees for parents before their children.
@@ -2546,7 +2608,7 @@ impl Workspace {
};
let this = this.clone();
- let abs_path: Arc<Path> = SanitizedPath::from(abs_path.clone()).into();
+ let abs_path: Arc<Path> = SanitizedPath::new(&abs_path).as_path().into();
let fs = fs.clone();
let pane = pane.clone();
let task = cx.spawn(async move |cx| {
@@ -3055,6 +3117,16 @@ impl Workspace {
}
}
+ pub fn close_panel<T: Panel>(&self, window: &mut Window, cx: &mut Context<Self>) {
+ for dock in self.all_docks().iter() {
+ dock.update(cx, |dock, cx| {
+ if dock.panel::<T>().is_some() {
+ dock.set_open(false, window, cx)
+ }
+ })
+ }
+ }
+
pub fn panel<T: Panel>(&self, cx: &App) -> Option<Entity<T>> {
self.all_docks()
.iter()
@@ -3283,7 +3355,8 @@ impl Workspace {
let task = self.load_path(project_path.clone(), window, cx);
window.spawn(cx, async move |cx| {
let (project_entry_id, build_item) = task.await?;
- let result = pane.update_in(cx, |pane, window, cx| {
+
+ pane.update_in(cx, |pane, window, cx| {
pane.open_item(
project_entry_id,
project_path,
@@ -3295,8 +3368,7 @@ impl Workspace {
cx,
build_item,
)
- });
- result
+ })
})
}
@@ -3366,9 +3438,8 @@ impl Workspace {
window: &mut Window,
cx: &mut App,
) -> Task<Result<(Option<ProjectEntryId>, WorkspaceItemBuilder)>> {
- let project = self.project().clone();
let registry = cx.default_global::<ProjectItemRegistry>().clone();
- registry.open_path(&project, &path, window, cx)
+ registry.open_path(self.project(), &path, window, cx)
}
pub fn find_project_item<T>(
@@ -3903,7 +3974,6 @@ impl Workspace {
}
serialize_workspace = false;
}
- pane::Event::RemoveItem { .. } => {}
pane::Event::RemovedItem { item } => {
cx.emit(Event::ActiveItemChanged);
self.update_window_edited(window, cx);
@@ -3912,6 +3982,9 @@ impl Workspace {
{
entry.remove();
}
+ cx.emit(Event::ItemRemoved {
+ item_id: item.item_id(),
+ });
}
pane::Event::Focus => {
window.invalidate_character_coordinates();
@@ -3993,52 +4066,6 @@ impl Workspace {
maybe_pane_handle
}
- pub fn split_pane_with_item(
- &mut self,
- pane_to_split: WeakEntity<Pane>,
- split_direction: SplitDirection,
- from: WeakEntity<Pane>,
- item_id_to_move: EntityId,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- let Some(pane_to_split) = pane_to_split.upgrade() else {
- return;
- };
- let Some(from) = from.upgrade() else {
- return;
- };
-
- let new_pane = self.add_pane(window, cx);
- move_item(&from, &new_pane, item_id_to_move, 0, true, window, cx);
- self.center
- .split(&pane_to_split, &new_pane, split_direction)
- .unwrap();
- cx.notify();
- }
-
- pub fn split_pane_with_project_entry(
- &mut self,
- pane_to_split: WeakEntity<Pane>,
- split_direction: SplitDirection,
- project_entry: ProjectEntryId,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) -> Option<Task<Result<()>>> {
- let pane_to_split = pane_to_split.upgrade()?;
- let new_pane = self.add_pane(window, cx);
- self.center
- .split(&pane_to_split, &new_pane, split_direction)
- .unwrap();
-
- let path = self.project.read(cx).path_for_entry(project_entry, cx)?;
- let task = self.open_path(path, Some(new_pane.downgrade()), true, window, cx);
- Some(cx.foreground_executor().spawn(async move {
- task.await?;
- Ok(())
- }))
- }
-
pub fn join_all_panes(&mut self, window: &mut Window, cx: &mut Context<Self>) {
let active_item = self.active_pane.read(cx).active_item();
for pane in &self.panes {
@@ -4118,7 +4145,6 @@ impl Workspace {
.unwrap_or_else(|| {
self.split_pane(self.active_pane.clone(), SplitDirection::Right, window, cx)
})
- .clone()
}
pub fn pane_for(&self, handle: &dyn ItemHandle) -> Option<Entity<Pane>> {
@@ -4393,6 +4419,11 @@ impl Workspace {
return;
}
window.set_window_title(&title);
+ SystemWindowTabController::update_tab_title(
+ cx,
+ window.window_handle().window_id(),
+ SharedString::from(&title),
+ );
self.last_window_title = Some(title);
}
@@ -4736,14 +4767,12 @@ impl Workspace {
})
});
- if let Some(view) = view {
- Some(entry.insert(FollowerView {
+ view.map(|view| {
+ entry.insert(FollowerView {
view,
location: None,
- }))
- } else {
- None
- }
+ })
+ })
}
};
@@ -5050,59 +5079,12 @@ impl Workspace {
self.session_id.clone()
}
- fn local_paths(&self, cx: &App) -> Option<Vec<Arc<Path>>> {
- let project = self.project().read(cx);
-
- if project.is_local() {
- Some(
- project
- .visible_worktrees(cx)
- .map(|worktree| worktree.read(cx).abs_path())
- .collect::<Vec<_>>(),
- )
- } else {
- None
- }
- }
-
- fn update_ssh_paths(&mut self, cx: &App) {
+ pub fn root_paths(&self, cx: &App) -> Vec<Arc<Path>> {
let project = self.project().read(cx);
- if !project.is_local() {
- let paths: Vec<String> = project
- .visible_worktrees(cx)
- .map(|worktree| worktree.read(cx).abs_path().to_string_lossy().to_string())
- .collect();
- if let Some(ssh_project) = &mut self.serialized_ssh_project {
- ssh_project.paths = paths;
- }
- }
- }
-
- fn serialize_ssh_paths(&mut self, window: &mut Window, cx: &mut Context<Workspace>) {
- if self._schedule_serialize_ssh_paths.is_none() {
- self._schedule_serialize_ssh_paths =
- Some(cx.spawn_in(window, async move |this, cx| {
- cx.background_executor()
- .timer(SERIALIZATION_THROTTLE_TIME)
- .await;
- this.update_in(cx, |this, window, cx| {
- let task = if let Some(ssh_project) = &this.serialized_ssh_project {
- let ssh_project_id = ssh_project.id;
- let ssh_project_paths = ssh_project.paths.clone();
- window.spawn(cx, async move |_| {
- persistence::DB
- .update_ssh_project_paths(ssh_project_id, ssh_project_paths)
- .await
- })
- } else {
- Task::ready(Err(anyhow::anyhow!("No SSH project to serialize")))
- };
- task.detach();
- this._schedule_serialize_ssh_paths.take();
- })
- .log_err();
- }));
- }
+ project
+ .visible_worktrees(cx)
+ .map(|worktree| worktree.read(cx).abs_path())
+ .collect::<Vec<_>>()
}
fn remove_panes(&mut self, member: Member, window: &mut Window, cx: &mut Context<Workspace>) {
@@ -5275,20 +5257,27 @@ impl Workspace {
}
match self.serialize_workspace_location(cx) {
- WorkspaceLocation::Location(location) => {
+ WorkspaceLocation::Location(location, paths) => {
let breakpoints = self.project.update(cx, |project, cx| {
project
.breakpoint_store()
.read(cx)
.all_source_breakpoints(cx)
});
+ let user_toolchains = self
+ .project
+ .read(cx)
+ .user_toolchains(cx)
+ .unwrap_or_default();
let center_group = build_serialized_pane_group(&self.center.root, window, cx);
let docks = build_serialized_docks(self, window, cx);
let window_bounds = Some(SerializedWindowBounds(window.window_bounds()));
+
let serialized_workspace = SerializedWorkspace {
id: database_id,
location,
+ paths,
center_group,
window_bounds,
display: Default::default(),
@@ -5297,6 +5286,7 @@ impl Workspace {
session_id: self.session_id.clone(),
breakpoints,
window_id: Some(window.window_handle().window_id().as_u64()),
+ user_toolchains,
};
window.spawn(cx, async move |_| {
@@ -5314,13 +5304,12 @@ impl Workspace {
}
fn serialize_workspace_location(&self, cx: &App) -> WorkspaceLocation {
- if let Some(ssh_project) = &self.serialized_ssh_project {
- WorkspaceLocation::Location(SerializedWorkspaceLocation::Ssh(ssh_project.clone()))
- } else if let Some(local_paths) = self.local_paths(cx) {
- if !local_paths.is_empty() {
- WorkspaceLocation::Location(SerializedWorkspaceLocation::from_local_paths(
- local_paths,
- ))
+ let paths = PathList::new(&self.root_paths(cx));
+ if let Some(connection) = self.project.read(cx).remote_connection_options(cx) {
+ WorkspaceLocation::Location(SerializedWorkspaceLocation::Remote(connection), paths)
+ } else if self.project.read(cx).is_local() {
+ if !paths.is_empty() {
+ WorkspaceLocation::Location(SerializedWorkspaceLocation::Local, paths)
} else {
WorkspaceLocation::DetachFromSession
}
@@ -5333,13 +5322,13 @@ impl Workspace {
let Some(id) = self.database_id() else {
return;
};
- let location = match self.serialize_workspace_location(cx) {
- WorkspaceLocation::Location(location) => location,
- _ => return,
- };
+ if !self.project.read(cx).is_local() {
+ return;
+ }
if let Some(manager) = HistoryManager::global(cx) {
+ let paths = PathList::new(&self.root_paths(cx));
manager.update(cx, |this, cx| {
- this.update_history(id, HistoryManagerEntry::new(id, &location), cx);
+ this.update_history(id, HistoryManagerEntry::new(id, &paths), cx);
});
}
}
@@ -5857,17 +5846,22 @@ impl Workspace {
return;
};
let windows = cx.windows();
- let Some(next_window) = windows
- .iter()
- .cycle()
- .skip_while(|window| window.window_id() != current_window_id)
- .nth(1)
- else {
- return;
- };
- next_window
- .update(cx, |_, window, _| window.activate_window())
- .ok();
+ let next_window =
+ SystemWindowTabController::get_next_tab_group_window(cx, current_window_id).or_else(
+ || {
+ windows
+ .iter()
+ .cycle()
+ .skip_while(|window| window.window_id() != current_window_id)
+ .nth(1)
+ },
+ );
+
+ if let Some(window) = next_window {
+ window
+ .update(cx, |_, window, _| window.activate_window())
+ .ok();
+ }
}
pub fn activate_previous_window(&mut self, cx: &mut Context<Self>) {
@@ -5875,18 +5869,23 @@ impl Workspace {
return;
};
let windows = cx.windows();
- let Some(prev_window) = windows
- .iter()
- .rev()
- .cycle()
- .skip_while(|window| window.window_id() != current_window_id)
- .nth(1)
- else {
- return;
- };
- prev_window
- .update(cx, |_, window, _| window.activate_window())
- .ok();
+ let prev_window =
+ SystemWindowTabController::get_prev_tab_group_window(cx, current_window_id).or_else(
+ || {
+ windows
+ .iter()
+ .rev()
+ .cycle()
+ .skip_while(|window| window.window_id() != current_window_id)
+ .nth(1)
+ },
+ );
+
+ if let Some(window) = prev_window {
+ window
+ .update(cx, |_, window, _| window.activate_window())
+ .ok();
+ }
}
pub fn cancel(&mut self, _: &menu::Cancel, window: &mut Window, cx: &mut Context<Self>) {
@@ -6647,15 +6646,29 @@ impl Render for Workspace {
}
})
.children(self.zoomed.as_ref().and_then(|view| {
- Some(div()
+ let zoomed_view = view.upgrade()?;
+ let div = div()
.occlude()
.absolute()
.overflow_hidden()
.border_color(colors.border)
.bg(colors.background)
- .child(view.upgrade()?)
+ .child(zoomed_view)
.inset_0()
- .shadow_lg())
+ .shadow_lg();
+
+ if !WorkspaceSettings::get_global(cx).zoomed_padding {
+ return Some(div);
+ }
+
+ Some(match self.zoomed_position {
+ Some(DockPosition::Left) => div.right_2().border_r_1(),
+ Some(DockPosition::Right) => div.left_2().border_l_1(),
+ Some(DockPosition::Bottom) => div.top_2().border_t_1(),
+ None => {
+ div.top_2().bottom_2().left_2().right_2().border_1()
+ }
+ })
}))
.children(self.render_notifications(window, cx)),
)
@@ -6715,7 +6728,7 @@ impl WorkspaceStore {
.update(cx, |workspace, window, cx| {
let handler_response =
workspace.handle_follow(follower.project_id, window, cx);
- if let Some(active_view) = handler_response.active_view.clone()
+ if let Some(active_view) = handler_response.active_view
&& workspace.project.read(cx).remote_id() == follower.project_id
{
response.active_view = Some(active_view)
@@ -6805,14 +6818,14 @@ impl WorkspaceHandle for Entity<Workspace> {
}
}
-pub async fn last_opened_workspace_location() -> Option<SerializedWorkspaceLocation> {
+pub async fn last_opened_workspace_location() -> Option<(SerializedWorkspaceLocation, PathList)> {
DB.last_workspace().await.log_err().flatten()
}
pub fn last_session_workspace_locations(
last_session_id: &str,
last_session_window_stack: Option<Vec<WindowId>>,
-) -> Option<Vec<SerializedWorkspaceLocation>> {
+) -> Option<Vec<(SerializedWorkspaceLocation, PathList)>> {
DB.last_session_workspace_locations(last_session_id, last_session_window_stack)
.log_err()
}
@@ -6921,7 +6934,8 @@ async fn join_channel_internal(
| Status::Authenticating
| Status::Authenticated
| Status::Reconnecting
- | Status::Reauthenticating => continue,
+ | Status::Reauthenticating
+ | Status::Reauthenticated => continue,
Status::Connected { .. } => break 'outer,
Status::SignedOut | Status::AuthenticationError => {
return Err(ErrorCode::SignedOut.into());
@@ -6963,7 +6977,7 @@ async fn join_channel_internal(
return None;
}
- if (project.is_local() || project.is_via_ssh())
+ if (project.is_local() || project.is_via_remote_server())
&& project.visible_worktrees(cx).any(|tree| {
tree.read(cx)
.root_entry()
@@ -7305,22 +7319,22 @@ pub fn create_and_open_local_file(
})
}
-pub fn open_ssh_project_with_new_connection(
+pub fn open_remote_project_with_new_connection(
window: WindowHandle<Workspace>,
- connection_options: SshConnectionOptions,
+ connection_options: RemoteConnectionOptions,
cancel_rx: oneshot::Receiver<()>,
- delegate: Arc<dyn SshClientDelegate>,
+ delegate: Arc<dyn RemoteClientDelegate>,
app_state: Arc<AppState>,
paths: Vec<PathBuf>,
cx: &mut App,
) -> Task<Result<()>> {
cx.spawn(async move |cx| {
- let (serialized_ssh_project, workspace_id, serialized_workspace) =
- serialize_ssh_project(connection_options.clone(), paths.clone(), cx).await?;
+ let (workspace_id, serialized_workspace) =
+ serialize_remote_project(connection_options.clone(), paths.clone(), cx).await?;
let session = match cx
.update(|cx| {
- remote::SshRemoteClient::new(
+ remote::RemoteClient::new(
ConnectionIdentifier::Workspace(workspace_id.0),
connection_options,
cancel_rx,
@@ -7335,7 +7349,7 @@ pub fn open_ssh_project_with_new_connection(
};
let project = cx.update(|cx| {
- project::Project::ssh(
+ project::Project::remote(
session,
app_state.client.clone(),
app_state.node_runtime.clone(),
@@ -7346,10 +7360,9 @@ pub fn open_ssh_project_with_new_connection(
)
})?;
- open_ssh_project_inner(
+ open_remote_project_inner(
project,
paths,
- serialized_ssh_project,
workspace_id,
serialized_workspace,
app_state,
@@ -7360,8 +7373,8 @@ pub fn open_ssh_project_with_new_connection(
})
}
-pub fn open_ssh_project_with_existing_connection(
- connection_options: SshConnectionOptions,
+pub fn open_remote_project_with_existing_connection(
+ connection_options: RemoteConnectionOptions,
project: Entity<Project>,
paths: Vec<PathBuf>,
app_state: Arc<AppState>,
@@ -7369,13 +7382,12 @@ pub fn open_ssh_project_with_existing_connection(
cx: &mut AsyncApp,
) -> Task<Result<()>> {
cx.spawn(async move |cx| {
- let (serialized_ssh_project, workspace_id, serialized_workspace) =
- serialize_ssh_project(connection_options.clone(), paths.clone(), cx).await?;
+ let (workspace_id, serialized_workspace) =
+ serialize_remote_project(connection_options.clone(), paths.clone(), cx).await?;
- open_ssh_project_inner(
+ open_remote_project_inner(
project,
paths,
- serialized_ssh_project,
workspace_id,
serialized_workspace,
app_state,
@@ -7386,10 +7398,9 @@ pub fn open_ssh_project_with_existing_connection(
})
}
-async fn open_ssh_project_inner(
+async fn open_remote_project_inner(
project: Entity<Project>,
paths: Vec<PathBuf>,
- serialized_ssh_project: SerializedSshProject,
workspace_id: WorkspaceId,
serialized_workspace: Option<SerializedWorkspace>,
app_state: Arc<AppState>,
@@ -7442,7 +7453,6 @@ async fn open_ssh_project_inner(
let mut workspace =
Workspace::new(Some(workspace_id), project, app_state.clone(), window, cx);
- workspace.set_serialized_ssh_project(serialized_ssh_project);
workspace.update_history(cx);
if let Some(ref serialized) = serialized_workspace {
@@ -7475,32 +7485,18 @@ async fn open_ssh_project_inner(
Ok(())
}
-fn serialize_ssh_project(
- connection_options: SshConnectionOptions,
+fn serialize_remote_project(
+ connection_options: RemoteConnectionOptions,
paths: Vec<PathBuf>,
cx: &AsyncApp,
-) -> Task<
- Result<(
- SerializedSshProject,
- WorkspaceId,
- Option<SerializedWorkspace>,
- )>,
-> {
+) -> Task<Result<(WorkspaceId, Option<SerializedWorkspace>)>> {
cx.background_spawn(async move {
- let serialized_ssh_project = persistence::DB
- .get_or_create_ssh_project(
- connection_options.host.clone(),
- connection_options.port,
- paths
- .iter()
- .map(|path| path.to_string_lossy().to_string())
- .collect::<Vec<_>>(),
- connection_options.username.clone(),
- )
+ let remote_connection_id = persistence::DB
+ .get_or_create_remote_connection(connection_options)
.await?;
let serialized_workspace =
- persistence::DB.workspace_for_ssh_project(&serialized_ssh_project);
+ persistence::DB.remote_workspace_for_roots(&paths, remote_connection_id);
let workspace_id = if let Some(workspace_id) =
serialized_workspace.as_ref().map(|workspace| workspace.id)
@@ -7510,7 +7506,7 @@ fn serialize_ssh_project(
persistence::DB.next_id().await?
};
- Ok((serialized_ssh_project, workspace_id, serialized_workspace))
+ Ok((workspace_id, serialized_workspace))
})
}
@@ -7674,7 +7670,7 @@ pub fn client_side_decorations(
match decorations {
Decorations::Client { .. } => window.set_client_inset(theme::CLIENT_SIDE_DECORATION_SHADOW),
- Decorations::Server { .. } => window.set_client_inset(px(0.0)),
+ Decorations::Server => window.set_client_inset(px(0.0)),
}
struct GlobalResizeEdge(ResizeEdge);
@@ -8050,25 +8046,20 @@ pub struct WorkspacePosition {
pub centered_layout: bool,
}
-pub fn ssh_workspace_position_from_db(
- host: String,
- port: Option<u16>,
- user: Option<String>,
+pub fn remote_workspace_position_from_db(
+ connection_options: RemoteConnectionOptions,
paths_to_open: &[PathBuf],
cx: &App,
) -> Task<Result<WorkspacePosition>> {
- let paths = paths_to_open
- .iter()
- .map(|path| path.to_string_lossy().to_string())
- .collect::<Vec<_>>();
+ let paths = paths_to_open.to_vec();
cx.background_spawn(async move {
- let serialized_ssh_project = persistence::DB
- .get_or_create_ssh_project(host, port, paths, user)
+ let remote_connection_id = persistence::DB
+ .get_or_create_remote_connection(connection_options)
.await
.context("fetching serialized ssh project")?;
let serialized_workspace =
- persistence::DB.workspace_for_ssh_project(&serialized_ssh_project);
+ persistence::DB.remote_workspace_for_roots(&paths, remote_connection_id);
let (window_bounds, display) = if let Some(bounds) = window_bounds_env_override() {
(Some(WindowBounds::Windowed(bounds)), None)
@@ -6,7 +6,7 @@ use collections::HashMap;
use gpui::App;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
#[derive(Deserialize)]
pub struct WorkspaceSettings {
@@ -29,6 +29,8 @@ pub struct WorkspaceSettings {
pub on_last_window_closed: OnLastWindowClosed,
pub resize_all_panels_in_dock: Vec<DockPosition>,
pub close_on_file_delete: bool,
+ pub use_system_window_tabs: bool,
+ pub zoomed_padding: bool,
}
#[derive(Copy, Clone, Default, Serialize, Deserialize, JsonSchema)]
@@ -116,7 +118,8 @@ pub enum RestoreOnStartupBehavior {
LastSession,
}
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)]
+#[settings_key(None)]
pub struct WorkspaceSettingsContent {
/// Active pane styling settings.
pub active_pane_modifiers: Option<ActivePanelModifiers>,
@@ -202,6 +205,16 @@ pub struct WorkspaceSettingsContent {
///
/// Default: false
pub close_on_file_delete: Option<bool>,
+ /// Whether to allow windows to tab together based on the user’s tabbing preference (macOS only).
+ ///
+ /// Default: false
+ pub use_system_window_tabs: Option<bool>,
+ /// Whether to show padding for zoomed panels.
+ /// When enabled, zoomed bottom panels will have some top padding,
+ /// while zoomed left/right panels will have padding to the right/left (respectively).
+ ///
+ /// Default: true
+ pub zoomed_padding: Option<bool>,
}
#[derive(Deserialize)]
@@ -211,7 +224,8 @@ pub struct TabBarSettings {
pub show_tab_bar_buttons: bool,
}
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)]
+#[settings_key(key = "tab_bar")]
pub struct TabBarSettingsContent {
/// Whether or not to show the tab bar in the editor.
///
@@ -254,7 +268,7 @@ pub enum PaneSplitDirectionVertical {
Right,
}
-#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema)]
+#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, SettingsUi)]
#[serde(rename_all = "snake_case")]
pub struct CenteredLayoutSettings {
/// The relative width of the left padding of the central pane from the
@@ -270,8 +284,6 @@ pub struct CenteredLayoutSettings {
}
impl Settings for WorkspaceSettings {
- const KEY: Option<&'static str> = None;
-
type FileContent = WorkspaceSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
@@ -350,6 +362,8 @@ impl Settings for WorkspaceSettings {
current.max_tabs = Some(n)
}
+ vscode.bool_setting("window.nativeTabs", &mut current.use_system_window_tabs);
+
// some combination of "window.restoreWindows" and "workbench.startupEditor" might
// map to our "restore_on_startup"
@@ -359,8 +373,6 @@ impl Settings for WorkspaceSettings {
}
impl Settings for TabBarSettings {
- const KEY: Option<&'static str> = Some("tab_bar");
-
type FileContent = TabBarSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
@@ -158,7 +158,7 @@ pub struct RemoteWorktree {
#[derive(Clone)]
pub struct Snapshot {
id: WorktreeId,
- abs_path: SanitizedPath,
+ abs_path: Arc<SanitizedPath>,
root_name: String,
root_char_bag: CharBag,
entries_by_path: SumTree<Entry>,
@@ -457,7 +457,7 @@ enum ScanState {
scanning: bool,
},
RootUpdated {
- new_path: Option<SanitizedPath>,
+ new_path: Option<Arc<SanitizedPath>>,
},
}
@@ -763,8 +763,8 @@ impl Worktree {
pub fn abs_path(&self) -> Arc<Path> {
match self {
- Worktree::Local(worktree) => worktree.abs_path.clone().into(),
- Worktree::Remote(worktree) => worktree.abs_path.clone().into(),
+ Worktree::Local(worktree) => SanitizedPath::cast_arc(worktree.abs_path.clone()),
+ Worktree::Remote(worktree) => SanitizedPath::cast_arc(worktree.abs_path.clone()),
}
}
@@ -1775,36 +1775,54 @@ impl LocalWorktree {
};
absolutize_path
};
- let abs_path = abs_new_path.clone();
+
let fs = self.fs.clone();
+ let abs_path = abs_new_path.clone();
let case_sensitive = self.fs_case_sensitive;
- let rename = cx.background_spawn(async move {
- let abs_old_path = abs_old_path?;
- let abs_new_path = abs_new_path;
-
- let abs_old_path_lower = abs_old_path.to_str().map(|p| p.to_lowercase());
- let abs_new_path_lower = abs_new_path.to_str().map(|p| p.to_lowercase());
-
- // If we're on a case-insensitive FS and we're doing a case-only rename (i.e. `foobar` to `FOOBAR`)
- // we want to overwrite, because otherwise we run into a file-already-exists error.
- let overwrite = !case_sensitive
- && abs_old_path != abs_new_path
- && abs_old_path_lower == abs_new_path_lower;
+ let do_rename = async move |fs: &dyn Fs, old_path: &Path, new_path: &Path, overwrite| {
fs.rename(
- &abs_old_path,
- &abs_new_path,
+ &old_path,
+ &new_path,
fs::RenameOptions {
overwrite,
- ..Default::default()
+ ..fs::RenameOptions::default()
},
)
.await
- .with_context(|| format!("Renaming {abs_old_path:?} into {abs_new_path:?}"))
+ .with_context(|| format!("renaming {old_path:?} into {new_path:?}"))
+ };
+
+ let rename_task = cx.background_spawn(async move {
+ let abs_old_path = abs_old_path?;
+
+ // If we're on a case-insensitive FS and we're doing a case-only rename (i.e. `foobar` to `FOOBAR`)
+ // we want to overwrite, because otherwise we run into a file-already-exists error.
+ let overwrite = !case_sensitive
+ && abs_old_path != abs_new_path
+ && abs_old_path.to_str().map(|p| p.to_lowercase())
+ == abs_new_path.to_str().map(|p| p.to_lowercase());
+
+ // The directory we're renaming into might not exist yet
+ if let Err(e) = do_rename(fs.as_ref(), &abs_old_path, &abs_new_path, overwrite).await {
+ if let Some(err) = e.downcast_ref::<std::io::Error>()
+ && err.kind() == std::io::ErrorKind::NotFound
+ {
+ if let Some(parent) = abs_new_path.parent() {
+ fs.create_dir(parent)
+ .await
+ .with_context(|| format!("creating parent directory {parent:?}"))?;
+ return do_rename(fs.as_ref(), &abs_old_path, &abs_new_path, overwrite)
+ .await;
+ }
+ }
+ return Err(e);
+ }
+ Ok(())
});
cx.spawn(async move |this, cx| {
- rename.await?;
+ rename_task.await?;
Ok(this
.update(cx, |this, cx| {
let local = this.as_local_mut().unwrap();
@@ -1813,11 +1831,16 @@ impl LocalWorktree {
// Otherwise, the FS watcher would do it on the `RootUpdated` event,
// but with a noticeable delay, so we handle it proactively.
local.update_abs_path_and_refresh(
- Some(SanitizedPath::from(abs_path.clone())),
+ Some(SanitizedPath::new_arc(&abs_path)),
cx,
);
Task::ready(Ok(this.root_entry().cloned()))
} else {
+ // First refresh the parent directory (in case it was newly created)
+ if let Some(parent) = new_path.parent() {
+ let _ = local.refresh_entries_for_paths(vec![parent.into()]);
+ }
+ // Then refresh the new path
local.refresh_entry(new_path.clone(), Some(old_path), cx)
}
})?
@@ -1968,7 +1991,7 @@ impl LocalWorktree {
cx: &Context<Worktree>,
) -> Option<Task<Result<()>>> {
let path = self.entry_for_id(entry_id).unwrap().path.clone();
- let mut rx = self.add_path_prefix_to_scan(path.clone());
+ let mut rx = self.add_path_prefix_to_scan(path);
Some(cx.background_spawn(async move {
rx.next().await;
Ok(())
@@ -2090,7 +2113,7 @@ impl LocalWorktree {
fn update_abs_path_and_refresh(
&mut self,
- new_path: Option<SanitizedPath>,
+ new_path: Option<Arc<SanitizedPath>>,
cx: &Context<Worktree>,
) {
if let Some(new_path) = new_path {
@@ -2340,7 +2363,7 @@ impl Snapshot {
pub fn new(id: u64, root_name: String, abs_path: Arc<Path>) -> Self {
Snapshot {
id: WorktreeId::from_usize(id as usize),
- abs_path: abs_path.into(),
+ abs_path: SanitizedPath::from_arc(abs_path),
root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
root_name,
always_included_entries: Default::default(),
@@ -2368,7 +2391,7 @@ impl Snapshot {
//
// This is definitely a bug, but it's not clear if we should handle it here or not.
pub fn abs_path(&self) -> &Arc<Path> {
- self.abs_path.as_path()
+ SanitizedPath::cast_arc_ref(&self.abs_path)
}
fn build_initial_update(&self, project_id: u64, worktree_id: u64) -> proto::UpdateWorktree {
@@ -2464,7 +2487,7 @@ impl Snapshot {
Some(removed_entry.path)
}
- fn update_abs_path(&mut self, abs_path: SanitizedPath, root_name: String) {
+ fn update_abs_path(&mut self, abs_path: Arc<SanitizedPath>, root_name: String) {
self.abs_path = abs_path;
if root_name != self.root_name {
self.root_char_bag = root_name.chars().map(|c| c.to_ascii_lowercase()).collect();
@@ -2483,7 +2506,7 @@ impl Snapshot {
update.removed_entries.len()
);
self.update_abs_path(
- SanitizedPath::from(PathBuf::from_proto(update.abs_path)),
+ SanitizedPath::new_arc(&PathBuf::from_proto(update.abs_path)),
update.root_name,
);
@@ -3151,16 +3174,6 @@ impl BackgroundScannerState {
.work_directory_abs_path(&work_directory)
.log_err()?;
- if self
- .snapshot
- .git_repositories
- .get(&work_dir_entry.id)
- .is_some()
- {
- log::trace!("existing git repository for {work_directory:?}");
- return None;
- }
-
let dot_git_abs_path: Arc<Path> = self
.snapshot
.abs_path
@@ -3859,7 +3872,11 @@ impl BackgroundScanner {
root_entry.is_ignored = true;
state.insert_entry(root_entry.clone(), self.fs.as_ref(), self.watcher.as_ref());
}
- state.enqueue_scan_dir(root_abs_path.into(), &root_entry, &scan_job_tx);
+ state.enqueue_scan_dir(
+ SanitizedPath::cast_arc(root_abs_path),
+ &root_entry,
+ &scan_job_tx,
+ );
}
};
@@ -3940,8 +3957,9 @@ impl BackgroundScanner {
self.forcibly_load_paths(&request.relative_paths).await;
let root_path = self.state.lock().snapshot.abs_path.clone();
- let root_canonical_path = match self.fs.canonicalize(root_path.as_path()).await {
- Ok(path) => SanitizedPath::from(path),
+ let root_canonical_path = self.fs.canonicalize(root_path.as_path()).await;
+ let root_canonical_path = match &root_canonical_path {
+ Ok(path) => SanitizedPath::new(path),
Err(err) => {
log::error!("failed to canonicalize root path {root_path:?}: {err}");
return true;
@@ -3952,7 +3970,7 @@ impl BackgroundScanner {
.iter()
.map(|path| {
if path.file_name().is_some() {
- root_canonical_path.as_path().join(path).to_path_buf()
+ root_canonical_path.as_path().join(path)
} else {
root_canonical_path.as_path().to_path_buf()
}
@@ -3969,8 +3987,8 @@ impl BackgroundScanner {
}
self.reload_entries_for_paths(
- root_path,
- root_canonical_path,
+ &root_path,
+ &root_canonical_path,
&request.relative_paths,
abs_paths,
None,
@@ -3982,8 +4000,9 @@ impl BackgroundScanner {
async fn process_events(&self, mut abs_paths: Vec<PathBuf>) {
let root_path = self.state.lock().snapshot.abs_path.clone();
- let root_canonical_path = match self.fs.canonicalize(root_path.as_path()).await {
- Ok(path) => SanitizedPath::from(path),
+ let root_canonical_path = self.fs.canonicalize(root_path.as_path()).await;
+ let root_canonical_path = match &root_canonical_path {
+ Ok(path) => SanitizedPath::new(path),
Err(err) => {
let new_path = self
.state
@@ -3992,7 +4011,7 @@ impl BackgroundScanner {
.root_file_handle
.clone()
.and_then(|handle| handle.current_path(&self.fs).log_err())
- .map(SanitizedPath::from)
+ .map(|path| SanitizedPath::new_arc(&path))
.filter(|new_path| *new_path != root_path);
if let Some(new_path) = new_path.as_ref() {
@@ -4021,7 +4040,7 @@ impl BackgroundScanner {
abs_paths.sort_unstable();
abs_paths.dedup_by(|a, b| a.starts_with(b));
abs_paths.retain(|abs_path| {
- let abs_path = SanitizedPath::from(abs_path);
+ let abs_path = &SanitizedPath::new(abs_path);
let snapshot = &self.state.lock().snapshot;
{
@@ -4064,7 +4083,7 @@ impl BackgroundScanner {
return false;
};
- if abs_path.0.file_name() == Some(*GITIGNORE) {
+ if abs_path.file_name() == Some(*GITIGNORE) {
for (_, repo) in snapshot.git_repositories.iter().filter(|(_, repo)| repo.directory_contains(&relative_path)) {
if !dot_git_abs_paths.iter().any(|dot_git_abs_path| dot_git_abs_path == repo.common_dir_abs_path.as_ref()) {
dot_git_abs_paths.push(repo.common_dir_abs_path.to_path_buf());
@@ -4103,8 +4122,8 @@ impl BackgroundScanner {
let (scan_job_tx, scan_job_rx) = channel::unbounded();
log::debug!("received fs events {:?}", relative_paths);
self.reload_entries_for_paths(
- root_path,
- root_canonical_path,
+ &root_path,
+ &root_canonical_path,
&relative_paths,
abs_paths,
Some(scan_job_tx.clone()),
@@ -4451,8 +4470,8 @@ impl BackgroundScanner {
/// All list arguments should be sorted before calling this function
async fn reload_entries_for_paths(
&self,
- root_abs_path: SanitizedPath,
- root_canonical_path: SanitizedPath,
+ root_abs_path: &SanitizedPath,
+ root_canonical_path: &SanitizedPath,
relative_paths: &[Arc<Path>],
abs_paths: Vec<PathBuf>,
scan_queue_tx: Option<Sender<ScanJob>>,
@@ -4480,7 +4499,7 @@ impl BackgroundScanner {
}
}
- anyhow::Ok(Some((metadata, SanitizedPath::from(canonical_path))))
+ anyhow::Ok(Some((metadata, SanitizedPath::new_arc(&canonical_path))))
} else {
Ok(None)
}
@@ -5509,7 +5528,7 @@ impl ProjectEntryId {
Self(id as usize)
}
- pub fn to_proto(&self) -> u64 {
+ pub fn to_proto(self) -> u64 {
self.0 as u64
}
@@ -5517,14 +5536,14 @@ impl ProjectEntryId {
ProjectEntryId(id)
}
- pub fn to_usize(&self) -> usize {
+ pub fn to_usize(self) -> usize {
self.0
}
}
#[cfg(any(test, feature = "test-support"))]
impl CreatedEntry {
- pub fn to_included(self) -> Option<Entry> {
+ pub fn into_included(self) -> Option<Entry> {
match self {
CreatedEntry::Included(entry) => Some(entry),
CreatedEntry::Excluded { .. } => None,
@@ -4,7 +4,7 @@ use anyhow::Context as _;
use gpui::App;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
use util::paths::PathMatcher;
#[derive(Clone, PartialEq, Eq)]
@@ -31,7 +31,8 @@ impl WorktreeSettings {
}
}
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)]
+#[settings_key(None)]
pub struct WorktreeSettingsContent {
/// Completely ignore files matching globs from `file_scan_exclusions`. Overrides
/// `file_scan_inclusions`.
@@ -65,8 +66,6 @@ pub struct WorktreeSettingsContent {
}
impl Settings for WorktreeSettings {
- const KEY: Option<&'static str> = None;
-
type FileContent = WorktreeSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> anyhow::Result<Self> {
@@ -1254,7 +1254,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
let snapshot = Arc::new(Mutex::new(tree.snapshot()));
tree.observe_updates(0, cx, {
let snapshot = snapshot.clone();
- let settings = tree.settings().clone();
+ let settings = tree.settings();
move |update| {
snapshot
.lock()
@@ -1274,7 +1274,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
})
.await
.unwrap()
- .to_included()
+ .into_included()
.unwrap();
assert!(entry.is_dir());
@@ -1323,7 +1323,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
})
.await
.unwrap()
- .to_included()
+ .into_included()
.unwrap();
assert!(entry.is_file());
@@ -1357,7 +1357,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
})
.await
.unwrap()
- .to_included()
+ .into_included()
.unwrap();
assert!(entry.is_file());
@@ -1377,7 +1377,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
})
.await
.unwrap()
- .to_included()
+ .into_included()
.unwrap();
assert!(entry.is_file());
@@ -1395,7 +1395,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
})
.await
.unwrap()
- .to_included()
+ .into_included()
.unwrap();
assert!(entry.is_file());
@@ -1464,7 +1464,7 @@ async fn test_random_worktree_operations_during_initial_scan(
tree.as_local().unwrap().snapshot().check_invariants(true)
});
- if rng.gen_bool(0.6) {
+ if rng.random_bool(0.6) {
snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
}
}
@@ -1551,7 +1551,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
let mut snapshots = Vec::new();
let mut mutations_len = operations;
while mutations_len > 1 {
- if rng.gen_bool(0.2) {
+ if rng.random_bool(0.2) {
worktree
.update(cx, |worktree, cx| {
randomly_mutate_worktree(worktree, &mut rng, cx)
@@ -1563,8 +1563,8 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
}
let buffered_event_count = fs.as_fake().buffered_event_count();
- if buffered_event_count > 0 && rng.gen_bool(0.3) {
- let len = rng.gen_range(0..=buffered_event_count);
+ if buffered_event_count > 0 && rng.random_bool(0.3) {
+ let len = rng.random_range(0..=buffered_event_count);
log::info!("flushing {} events", len);
fs.as_fake().flush_events(len);
} else {
@@ -1573,7 +1573,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
}
cx.executor().run_until_parked();
- if rng.gen_bool(0.2) {
+ if rng.random_bool(0.2) {
log::info!("storing snapshot {}", snapshots.len());
let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
snapshots.push(snapshot);
@@ -1701,7 +1701,7 @@ fn randomly_mutate_worktree(
let snapshot = worktree.snapshot();
let entry = snapshot.entries(false, 0).choose(rng).unwrap();
- match rng.gen_range(0_u32..100) {
+ match rng.random_range(0_u32..100) {
0..=33 if entry.path.as_ref() != Path::new("") => {
log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
worktree.delete_entry(entry.id, false, cx).unwrap()
@@ -1726,14 +1726,14 @@ fn randomly_mutate_worktree(
);
let task = worktree.rename_entry(entry.id, new_path, cx);
cx.background_spawn(async move {
- task.await?.to_included().unwrap();
+ task.await?.into_included().unwrap();
Ok(())
})
}
_ => {
if entry.is_dir() {
let child_path = entry.path.join(random_filename(rng));
- let is_dir = rng.gen_bool(0.3);
+ let is_dir = rng.random_bool(0.3);
log::info!(
"creating {} at {:?}",
if is_dir { "dir" } else { "file" },
@@ -1776,11 +1776,11 @@ async fn randomly_mutate_fs(
}
}
- if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
+ if (files.is_empty() && dirs.len() == 1) || rng.random_bool(insertion_probability) {
let path = dirs.choose(rng).unwrap();
let new_path = path.join(random_filename(rng));
- if rng.r#gen() {
+ if rng.random() {
log::info!(
"creating dir {:?}",
new_path.strip_prefix(root_path).unwrap()
@@ -1793,7 +1793,7 @@ async fn randomly_mutate_fs(
);
fs.create_file(&new_path, Default::default()).await.unwrap();
}
- } else if rng.gen_bool(0.05) {
+ } else if rng.random_bool(0.05) {
let ignore_dir_path = dirs.choose(rng).unwrap();
let ignore_path = ignore_dir_path.join(*GITIGNORE);
@@ -1808,11 +1808,11 @@ async fn randomly_mutate_fs(
.cloned()
.collect::<Vec<_>>();
let files_to_ignore = {
- let len = rng.gen_range(0..=subfiles.len());
+ let len = rng.random_range(0..=subfiles.len());
subfiles.choose_multiple(rng, len)
};
let dirs_to_ignore = {
- let len = rng.gen_range(0..subdirs.len());
+ let len = rng.random_range(0..subdirs.len());
subdirs.choose_multiple(rng, len)
};
@@ -1848,7 +1848,7 @@ async fn randomly_mutate_fs(
file_path.into_iter().chain(dir_path).choose(rng).unwrap()
};
- let is_rename = rng.r#gen();
+ let is_rename = rng.random();
if is_rename {
let new_path_parent = dirs
.iter()
@@ -1857,7 +1857,7 @@ async fn randomly_mutate_fs(
.unwrap();
let overwrite_existing_dir =
- !old_path.starts_with(new_path_parent) && rng.gen_bool(0.3);
+ !old_path.starts_with(new_path_parent) && rng.random_bool(0.3);
let new_path = if overwrite_existing_dir {
fs.remove_dir(
new_path_parent,
@@ -1919,11 +1919,106 @@ async fn randomly_mutate_fs(
fn random_filename(rng: &mut impl Rng) -> String {
(0..6)
- .map(|_| rng.sample(rand::distributions::Alphanumeric))
+ .map(|_| rng.sample(rand::distr::Alphanumeric))
.map(char::from)
.collect()
}
+#[gpui::test]
+async fn test_rename_file_to_new_directory(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.background_executor.clone());
+ let expected_contents = "content";
+ fs.as_fake()
+ .insert_tree(
+ "/root",
+ json!({
+ "test.txt": expected_contents
+ }),
+ )
+ .await;
+ let worktree = Worktree::local(
+ Path::new("/root"),
+ true,
+ fs.clone(),
+ Arc::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+ cx.read(|cx| worktree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+
+ let entry_id = worktree.read_with(cx, |worktree, _| {
+ worktree.entry_for_path("test.txt").unwrap().id
+ });
+ let _result = worktree
+ .update(cx, |worktree, cx| {
+ worktree.rename_entry(entry_id, Path::new("dir1/dir2/dir3/test.txt"), cx)
+ })
+ .await
+ .unwrap();
+ worktree.read_with(cx, |worktree, _| {
+ assert!(
+ worktree.entry_for_path("test.txt").is_none(),
+ "Old file should have been removed"
+ );
+ assert!(
+ worktree.entry_for_path("dir1/dir2/dir3/test.txt").is_some(),
+ "Whole directory hierarchy and the new file should have been created"
+ );
+ });
+ assert_eq!(
+ worktree
+ .update(cx, |worktree, cx| {
+ worktree.load_file("dir1/dir2/dir3/test.txt".as_ref(), cx)
+ })
+ .await
+ .unwrap()
+ .text,
+ expected_contents,
+ "Moved file's contents should be preserved"
+ );
+
+ let entry_id = worktree.read_with(cx, |worktree, _| {
+ worktree
+ .entry_for_path("dir1/dir2/dir3/test.txt")
+ .unwrap()
+ .id
+ });
+ let _result = worktree
+ .update(cx, |worktree, cx| {
+ worktree.rename_entry(entry_id, Path::new("dir1/dir2/test.txt"), cx)
+ })
+ .await
+ .unwrap();
+ worktree.read_with(cx, |worktree, _| {
+ assert!(
+ worktree.entry_for_path("test.txt").is_none(),
+ "First file should not reappear"
+ );
+ assert!(
+ worktree.entry_for_path("dir1/dir2/dir3/test.txt").is_none(),
+ "Old file should have been removed"
+ );
+ assert!(
+ worktree.entry_for_path("dir1/dir2/test.txt").is_some(),
+ "No error should have occurred after moving into existing directory"
+ );
+ });
+ assert_eq!(
+ worktree
+ .update(cx, |worktree, cx| {
+ worktree.load_file("dir1/dir2/test.txt".as_ref(), cx)
+ })
+ .await
+ .unwrap()
+ .text,
+ expected_contents,
+ "Moved file's contents should be preserved"
+ );
+}
+
#[gpui::test]
async fn test_private_single_file_worktree(cx: &mut TestAppContext) {
init_test(cx);
@@ -20,6 +20,8 @@ pub enum Model {
Grok3MiniFast,
#[serde(rename = "grok-4-latest")]
Grok4,
+ #[serde(rename = "grok-code-fast-1")]
+ GrokCodeFast1,
#[serde(rename = "custom")]
Custom {
name: String,
@@ -43,6 +45,7 @@ impl Model {
"grok-3-mini" => Ok(Self::Grok3Mini),
"grok-3-fast" => Ok(Self::Grok3Fast),
"grok-3-mini-fast" => Ok(Self::Grok3MiniFast),
+ "grok-code-fast-1" => Ok(Self::GrokCodeFast1),
_ => anyhow::bail!("invalid model id '{id}'"),
}
}
@@ -55,6 +58,7 @@ impl Model {
Self::Grok3Fast => "grok-3-fast",
Self::Grok3MiniFast => "grok-3-mini-fast",
Self::Grok4 => "grok-4",
+ Self::GrokCodeFast1 => "grok-code-fast-1",
Self::Custom { name, .. } => name,
}
}
@@ -67,6 +71,7 @@ impl Model {
Self::Grok3Fast => "Grok 3 Fast",
Self::Grok3MiniFast => "Grok 3 Mini Fast",
Self::Grok4 => "Grok 4",
+ Self::GrokCodeFast1 => "Grok Code Fast 1",
Self::Custom {
name, display_name, ..
} => display_name.as_ref().unwrap_or(name),
@@ -76,7 +81,7 @@ impl Model {
pub fn max_token_count(&self) -> u64 {
match self {
Self::Grok3 | Self::Grok3Mini | Self::Grok3Fast | Self::Grok3MiniFast => 131_072,
- Self::Grok4 => 256_000,
+ Self::Grok4 | Self::GrokCodeFast1 => 256_000,
Self::Grok2Vision => 8_192,
Self::Custom { max_tokens, .. } => *max_tokens,
}
@@ -85,7 +90,7 @@ impl Model {
pub fn max_output_tokens(&self) -> Option<u64> {
match self {
Self::Grok3 | Self::Grok3Mini | Self::Grok3Fast | Self::Grok3MiniFast => Some(8_192),
- Self::Grok4 => Some(64_000),
+ Self::Grok4 | Self::GrokCodeFast1 => Some(64_000),
Self::Grok2Vision => Some(4_096),
Self::Custom {
max_output_tokens, ..
@@ -101,7 +106,7 @@ impl Model {
| Self::Grok3Fast
| Self::Grok3MiniFast
| Self::Grok4 => true,
- Model::Custom { .. } => false,
+ Self::GrokCodeFast1 | Model::Custom { .. } => false,
}
}
@@ -116,15 +121,13 @@ impl Model {
| Self::Grok3Mini
| Self::Grok3Fast
| Self::Grok3MiniFast
- | Self::Grok4 => true,
+ | Self::Grok4
+ | Self::GrokCodeFast1 => true,
Model::Custom { .. } => false,
}
}
pub fn supports_images(&self) -> bool {
- match self {
- Self::Grok2Vision => true,
- _ => false,
- }
+ matches!(self, Self::Grok2Vision)
}
}
@@ -2,7 +2,7 @@
description = "The fast, collaborative code editor."
edition.workspace = true
name = "zed"
-version = "0.201.0"
+version = "0.204.0"
publish.workspace = true
license = "GPL-3.0-or-later"
authors = ["Zed Team <hi@zed.dev>"]
@@ -20,10 +20,10 @@ path = "src/main.rs"
[dependencies]
activity_indicator.workspace = true
+acp_tools.workspace = true
agent.workspace = true
agent_ui.workspace = true
agent_settings.workspace = true
-agent_servers.workspace = true
anyhow.workspace = true
askpass.workspace = true
assets.workspace = true
@@ -33,6 +33,7 @@ audio.workspace = true
auto_update.workspace = true
auto_update_ui.workspace = true
backtrace = "0.3"
+bincode.workspace = true
breadcrumbs.workspace = true
call.workspace = true
channel.workspace = true
@@ -60,6 +61,7 @@ extensions_ui.workspace = true
feature_flags.workspace = true
feedback.workspace = true
file_finder.workspace = true
+system_specs.workspace = true
fs.workspace = true
futures.workspace = true
git.workspace = true
@@ -87,9 +89,11 @@ language.workspace = true
language_extension.workspace = true
language_model.workspace = true
language_models.workspace = true
+language_onboarding.workspace = true
language_selector.workspace = true
language_tools.workspace = true
languages = { workspace = true, features = ["load-grammars"] }
+line_ending_selector.workspace = true
libc.workspace = true
log.workspace = true
markdown.workspace = true
@@ -128,6 +132,7 @@ serde_json.workspace = true
session.workspace = true
settings.workspace = true
settings_ui.workspace = true
+keymap_editor.workspace = true
shellexpand.workspace = true
smol.workspace = true
snippet_provider.workspace = true
@@ -161,6 +166,7 @@ web_search_providers.workspace = true
workspace-hack.workspace = true
workspace.workspace = true
zed_actions.workspace = true
+zed_env_vars.workspace = true
zeta.workspace = true
zlog.workspace = true
zlog_settings.workspace = true
@@ -185,6 +191,7 @@ itertools.workspace = true
language = { workspace = true, features = ["test-support"] }
pretty_assertions.workspace = true
project = { workspace = true, features = ["test-support"] }
+settings_ui = { workspace = true, features = ["test-support"] }
terminal_view = { workspace = true, features = ["test-support"] }
tree-sitter-md.workspace = true
tree-sitter-rust.workspace = true
@@ -0,0 +1,4 @@
+<key>CFBundleSupportedPlatforms</key>
+<array>
+ <string>MacOSX</string>
+</array>
@@ -0,0 +1,25 @@
+#!/usr/bin/env sh
+
+if [ "$ZED_WSL_DEBUG_INFO" = true ]; then
+ set -x
+fi
+
+ZED_PATH="$(dirname "$(realpath "$0")")"
+
+IN_WSL=false
+if [ -n "$WSL_DISTRO_NAME" ]; then
+ # $WSL_DISTRO_NAME is available since WSL builds 18362, also for WSL2
+ IN_WSL=true
+fi
+
+if [ $IN_WSL = true ]; then
+ WSL_USER="$USER"
+ if [ -z "$WSL_USER" ]; then
+ WSL_USER="$USERNAME"
+ fi
+ "$ZED_PATH/zed.exe" --wsl "$WSL_USER@$WSL_DISTRO_NAME" "$@"
+ exit $?
+else
+ "$ZED_PATH/zed.exe" "$@"
+ exit $?
+fi
@@ -2,7 +2,7 @@ mod reliability;
mod zed;
use agent_ui::AgentPanel;
-use anyhow::{Context as _, Result};
+use anyhow::{Context as _, Error, Result};
use clap::{Parser, command};
use cli::FORCE_CLI_MODE_ENV_VAR_NAME;
use client::{Client, ProxySettings, UserStore, parse_zed_link};
@@ -16,7 +16,7 @@ use extension_host::ExtensionStore;
use fs::{Fs, RealFs};
use futures::{FutureExt, StreamExt, channel::oneshot, future, select_biased};
use git::GitHostingProviderRegistry;
-use gpui::{App, AppContext as _, Application, AsyncApp, Focusable as _, UpdateGlobal as _};
+use gpui::{App, AppContext, Application, AsyncApp, Focusable as _, UpdateGlobal as _};
use postage::stream::Stream as _;
use gpui_tokio::Tokio;
@@ -24,13 +24,14 @@ use http_client::{Url, read_proxy_from_env};
use language::LanguageRegistry;
use onboarding::{FIRST_OPEN, show_onboarding_view};
use prompt_store::PromptBuilder;
+use remote::RemoteConnectionOptions;
use reqwest_client::ReqwestClient;
use assets::Assets;
use node_runtime::{NodeBinaryOptions, NodeRuntime};
use parking_lot::Mutex;
use project::project_settings::ProjectSettings;
-use recent_projects::{SshSettings, open_ssh_project};
+use recent_projects::{SshSettings, open_remote_project};
use release_channel::{AppCommitSha, AppVersion, ReleaseChannel};
use session::{AppSession, Session};
use settings::{BaseKeymap, Settings, SettingsStore, watch_config_file};
@@ -48,8 +49,8 @@ use theme::{
use util::{ResultExt, TryFutureExt, maybe};
use uuid::Uuid;
use workspace::{
- AppState, SerializedWorkspaceLocation, Toast, Workspace, WorkspaceSettings, WorkspaceStore,
- notifications::NotificationId,
+ AppState, PathList, SerializedWorkspaceLocation, Toast, Workspace, WorkspaceSettings,
+ WorkspaceStore, notifications::NotificationId,
};
use zed::{
OpenListener, OpenRequest, RawOpenRequest, app_menus, build_window_options,
@@ -81,12 +82,9 @@ fn files_not_created_on_launch(errors: HashMap<io::ErrorKind, Vec<&Path>>) {
#[cfg(unix)]
{
- match kind {
- io::ErrorKind::PermissionDenied => {
- error_kind_details.push_str("\n\nConsider using chown and chmod tools for altering the directories permissions if your user has corresponding rights.\
- \nFor example, `sudo chown $(whoami):staff ~/.config` and `chmod +uwrx ~/.config`");
- }
- _ => {}
+ if kind == io::ErrorKind::PermissionDenied {
+ error_kind_details.push_str("\n\nConsider using chown and chmod tools for altering the directories permissions if your user has corresponding rights.\
+ \nFor example, `sudo chown $(whoami):staff ~/.config` and `chmod +uwrx ~/.config`");
}
}
@@ -244,9 +242,9 @@ pub fn main() {
option_env!("ZED_COMMIT_SHA").map(|commit_sha| AppCommitSha::new(commit_sha.to_string()));
if args.system_specs {
- let system_specs = feedback::system_specs::SystemSpecs::new_stateless(
+ let system_specs = system_specs::SystemSpecs::new_stateless(
app_version,
- app_commit_sha.clone(),
+ app_commit_sha,
*release_channel::RELEASE_CHANNEL,
);
println!("Zed System Specs (from CLI):\n{}", system_specs);
@@ -291,7 +289,7 @@ pub fn main() {
let (open_listener, mut open_rx) = OpenListener::new();
- let failed_single_instance_check = if *db::ZED_STATELESS
+ let failed_single_instance_check = if *zed_env_vars::ZED_STATELESS
|| *release_channel::RELEASE_CHANNEL == ReleaseChannel::Dev
{
false
@@ -364,6 +362,7 @@ pub fn main() {
open_listener.open(RawOpenRequest {
urls,
diff_paths: Vec::new(),
+ ..Default::default()
})
}
});
@@ -371,7 +370,7 @@ pub fn main() {
if let Some(app_state) = AppState::try_global(cx).and_then(|app_state| app_state.upgrade())
{
cx.spawn({
- let app_state = app_state.clone();
+ let app_state = app_state;
async move |cx| {
if let Err(e) = restore_or_create_workspace(app_state, cx).await {
fail_to_open_window_async(e, cx)
@@ -527,13 +526,13 @@ pub fn main() {
let app_session = cx.new(|cx| AppSession::new(session, cx));
let app_state = Arc::new(AppState {
- languages: languages.clone(),
+ languages,
client: client.clone(),
- user_store: user_store.clone(),
+ user_store,
fs: fs.clone(),
build_window_options,
workspace_store,
- node_runtime: node_runtime.clone(),
+ node_runtime,
session: app_session,
});
AppState::set_global(Arc::downgrade(&app_state), cx);
@@ -569,7 +568,7 @@ pub fn main() {
language_model::init(app_state.client.clone(), cx);
language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx);
agent_settings::init(cx);
- agent_servers::init(cx);
+ acp_tools::init(cx);
web_search::init(cx);
web_search_providers::init(app_state.client.clone(), cx);
snippet_provider::init(cx);
@@ -602,7 +601,7 @@ pub fn main() {
repl::notebook::init(cx);
diagnostics::init(cx);
- audio::init(Assets, cx);
+ audio::init(cx);
workspace::init(app_state.clone(), cx);
ui_prompt::init(cx);
@@ -621,6 +620,7 @@ pub fn main() {
terminal_view::init(cx);
journal::init(app_state.clone(), cx);
language_selector::init(cx);
+ line_ending_selector::init(cx);
toolchain_selector::init(cx);
theme_selector::init(cx);
settings_profile_selector::init(cx);
@@ -635,6 +635,7 @@ pub fn main() {
svg_preview::init(cx);
onboarding::init(cx);
settings_ui::init(cx);
+ keymap_editor::init(cx);
extensions_ui::init(cx);
zeta::init(cx);
inspector_ui::init(app_state.clone(), cx);
@@ -698,7 +699,7 @@ pub fn main() {
let urls: Vec<_> = args
.paths_or_urls
.iter()
- .filter_map(|arg| parse_url_arg(arg, cx).log_err())
+ .map(|arg| parse_url_arg(arg, cx))
.collect();
let diff_paths: Vec<[String; 2]> = args
@@ -707,8 +708,17 @@ pub fn main() {
.map(|chunk| [chunk[0].clone(), chunk[1].clone()])
.collect();
+ #[cfg(target_os = "windows")]
+ let wsl = args.wsl;
+ #[cfg(not(target_os = "windows"))]
+ let wsl = None;
+
if !urls.is_empty() || !diff_paths.is_empty() {
- open_listener.open(RawOpenRequest { urls, diff_paths })
+ open_listener.open(RawOpenRequest {
+ urls,
+ diff_paths,
+ wsl,
+ })
}
match open_rx
@@ -858,7 +868,6 @@ fn handle_open_request(request: OpenRequest, app_state: Arc<AppState>, cx: &mut
if let Some(kind) = request.kind {
match kind {
OpenRequestKind::CliConnection(connection) => {
- let app_state = app_state.clone();
cx.spawn(async move |cx| handle_cli_connection(connection, app_state, cx).await)
.detach();
}
@@ -898,10 +907,10 @@ fn handle_open_request(request: OpenRequest, app_state: Arc<AppState>, cx: &mut
return;
}
- if let Some(connection_options) = request.ssh_connection {
+ if let Some(connection_options) = request.remote_connection {
cx.spawn(async move |cx| {
let paths: Vec<PathBuf> = request.open_paths.into_iter().map(PathBuf::from).collect();
- open_ssh_project(
+ open_remote_project(
connection_options,
paths,
app_state,
@@ -1054,17 +1063,20 @@ async fn installation_id() -> Result<IdType> {
async fn restore_or_create_workspace(app_state: Arc<AppState>, cx: &mut AsyncApp) -> Result<()> {
if let Some(locations) = restorable_workspace_locations(cx, &app_state).await {
+ let use_system_window_tabs = cx
+ .update(|cx| WorkspaceSettings::get_global(cx).use_system_window_tabs)
+ .unwrap_or(false);
+ let mut results: Vec<Result<(), Error>> = Vec::new();
let mut tasks = Vec::new();
- for location in locations {
+ for (index, (location, paths)) in locations.into_iter().enumerate() {
match location {
- SerializedWorkspaceLocation::Local(location, _) => {
+ SerializedWorkspaceLocation::Local => {
let app_state = app_state.clone();
- let paths = location.paths().to_vec();
let task = cx.spawn(async move |cx| {
let open_task = cx.update(|cx| {
workspace::open_paths(
- &paths,
+ &paths.paths(),
app_state,
workspace::OpenOptions::default(),
cx,
@@ -1072,33 +1084,33 @@ async fn restore_or_create_workspace(app_state: Arc<AppState>, cx: &mut AsyncApp
})?;
open_task.await.map(|_| ())
});
- tasks.push(task);
+
+ // If we're using system window tabs and this is the first workspace,
+ // wait for it to finish so that the other windows can be added as tabs.
+ if use_system_window_tabs && index == 0 {
+ results.push(task.await);
+ } else {
+ tasks.push(task);
+ }
}
- SerializedWorkspaceLocation::Ssh(ssh) => {
+ SerializedWorkspaceLocation::Remote(mut connection_options) => {
let app_state = app_state.clone();
- let ssh_host = ssh.host.clone();
- let task = cx.spawn(async move |cx| {
- let connection_options = cx.update(|cx| {
+ if let RemoteConnectionOptions::Ssh(options) = &mut connection_options {
+ cx.update(|cx| {
SshSettings::get_global(cx)
- .connection_options_for(ssh.host, ssh.port, ssh.user)
- });
-
- match connection_options {
- Ok(connection_options) => recent_projects::open_ssh_project(
- connection_options,
- ssh.paths.into_iter().map(PathBuf::from).collect(),
- app_state,
- workspace::OpenOptions::default(),
- cx,
- )
- .await
- .map_err(|e| anyhow::anyhow!(e)),
- Err(e) => Err(anyhow::anyhow!(
- "Failed to get SSH connection options for {}: {}",
- ssh_host,
- e
- )),
- }
+ .fill_connection_options_from_settings(options)
+ })?;
+ }
+ let task = cx.spawn(async move |cx| {
+ recent_projects::open_remote_project(
+ connection_options,
+ paths.paths().into_iter().map(PathBuf::from).collect(),
+ app_state,
+ workspace::OpenOptions::default(),
+ cx,
+ )
+ .await
+ .map_err(|e| anyhow::anyhow!(e))
});
tasks.push(task);
}
@@ -1106,7 +1118,7 @@ async fn restore_or_create_workspace(app_state: Arc<AppState>, cx: &mut AsyncApp
}
// Wait for all workspaces to open concurrently
- let results = future::join_all(tasks).await;
+ results.extend(future::join_all(tasks).await);
// Show notifications for any errors that occurred
let mut error_count = 0;
@@ -1177,7 +1189,7 @@ async fn restore_or_create_workspace(app_state: Arc<AppState>, cx: &mut AsyncApp
pub(crate) async fn restorable_workspace_locations(
cx: &mut AsyncApp,
app_state: &Arc<AppState>,
-) -> Option<Vec<SerializedWorkspaceLocation>> {
+) -> Option<Vec<(SerializedWorkspaceLocation, PathList)>> {
let mut restore_behavior = cx
.update(|cx| WorkspaceSettings::get(None, cx).restore_on_startup)
.ok()?;
@@ -1280,6 +1292,19 @@ struct Args {
#[arg(long, value_name = "DIR")]
user_data_dir: Option<String>,
+ /// The username and WSL distribution to use when opening paths. If not specified,
+ /// Zed will attempt to open the paths directly.
+ ///
+ /// The username is optional, and if not specified, the default user for the distribution
+ /// will be used.
+ ///
+ /// Example: `me@Ubuntu` or `Ubuntu`.
+ ///
+ /// WARN: You should not fill in this field by hand.
+ #[cfg(target_os = "windows")]
+ #[arg(long, value_name = "USER@DISTRO")]
+ wsl: Option<String>,
+
/// Instructs zed to run as a dev server on this machine. (not implemented)
#[arg(long)]
dev_server_token: Option<String>,
@@ -1338,18 +1363,18 @@ impl ToString for IdType {
}
}
-fn parse_url_arg(arg: &str, cx: &App) -> Result<String> {
+fn parse_url_arg(arg: &str, cx: &App) -> String {
match std::fs::canonicalize(Path::new(&arg)) {
- Ok(path) => Ok(format!("file://{}", path.display())),
- Err(error) => {
+ Ok(path) => format!("file://{}", path.display()),
+ Err(_) => {
if arg.starts_with("file://")
|| arg.starts_with("zed-cli://")
|| arg.starts_with("ssh://")
|| parse_zed_link(arg, cx).is_some()
{
- Ok(arg.into())
+ arg.into()
} else {
- anyhow::bail!("error parsing path argument: {error}")
+ format!("file://{arg}")
}
}
}
@@ -1420,7 +1445,6 @@ fn eager_load_active_theme_and_icon_theme(fs: Arc<dyn Fs>, cx: &App) {
.path_to_extension_icon_theme(icon_theme_name)
{
cx.spawn({
- let theme_registry = theme_registry.clone();
let fs = fs.clone();
async move |cx| {
theme_registry
@@ -1442,9 +1466,7 @@ fn load_user_themes_in_background(fs: Arc<dyn fs::Fs>, cx: &mut App) {
cx.spawn({
let fs = fs.clone();
async move |cx| {
- if let Some(theme_registry) =
- cx.update(|cx| ThemeRegistry::global(cx).clone()).log_err()
- {
+ if let Some(theme_registry) = cx.update(|cx| ThemeRegistry::global(cx)).log_err() {
let themes_dir = paths::themes_dir().as_ref();
match fs
.metadata(themes_dir)
@@ -1483,7 +1505,7 @@ fn watch_themes(fs: Arc<dyn fs::Fs>, cx: &mut App) {
for event in paths {
if fs.metadata(&event.path).await.ok().flatten().is_some()
&& let Some(theme_registry) =
- cx.update(|cx| ThemeRegistry::global(cx).clone()).log_err()
+ cx.update(|cx| ThemeRegistry::global(cx)).log_err()
&& let Some(()) = theme_registry
.load_user_theme(&event.path, fs.clone())
.await
@@ -60,7 +60,9 @@ pub fn init_panic_hook(
.or_else(|| info.payload().downcast_ref::<String>().cloned())
.unwrap_or_else(|| "Box<Any>".to_string());
- crashes::handle_panic(payload.clone(), info.location());
+ if *release_channel::RELEASE_CHANNEL != ReleaseChannel::Dev {
+ crashes::handle_panic(payload.clone(), info.location());
+ }
let thread = thread::current();
let thread_name = thread.name().unwrap_or("<unnamed>");
@@ -87,7 +89,9 @@ pub fn init_panic_hook(
},
backtrace,
);
- std::process::exit(-1);
+ if MINIDUMP_ENDPOINT.is_none() {
+ std::process::exit(-1);
+ }
}
let main_module_base_address = get_main_module_base_address();
@@ -146,7 +150,9 @@ pub fn init_panic_hook(
}
zlog::flush();
- if !is_pty && let Some(panic_data_json) = serde_json::to_string(&panic_data).log_err() {
+ if (!is_pty || MINIDUMP_ENDPOINT.is_some())
+ && let Some(panic_data_json) = serde_json::to_string(&panic_data).log_err()
+ {
let timestamp = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string();
let panic_file_path = paths::logs_dir().join(format!("zed-{timestamp}.panic"));
let panic_file = fs::OpenOptions::new()
@@ -214,10 +220,10 @@ pub fn init(
let installation_id = installation_id.clone();
let system_id = system_id.clone();
- let Some(ssh_client) = project.ssh_client() else {
+ let Some(remote_client) = project.remote_client() else {
return;
};
- ssh_client.update(cx, |client, cx| {
+ remote_client.update(cx, |client, cx| {
if !TelemetrySettings::get_global(cx).diagnostics {
return;
}
@@ -251,6 +257,7 @@ pub fn init(
endpoint,
minidump_contents,
&metadata,
+ installation_id.clone(),
)
.await
.log_err();
@@ -478,7 +485,9 @@ fn upload_panics_and_crashes(
return;
}
cx.background_spawn(async move {
- upload_previous_minidumps(http.clone()).await.warn_on_err();
+ upload_previous_minidumps(http.clone(), installation_id.clone())
+ .await
+ .warn_on_err();
let most_recent_panic = upload_previous_panics(http.clone(), &panic_report_url)
.await
.log_err()
@@ -546,7 +555,10 @@ async fn upload_previous_panics(
Ok(most_recent_panic)
}
-pub async fn upload_previous_minidumps(http: Arc<HttpClientWithUrl>) -> anyhow::Result<()> {
+pub async fn upload_previous_minidumps(
+ http: Arc<HttpClientWithUrl>,
+ installation_id: Option<String>,
+) -> anyhow::Result<()> {
let Some(minidump_endpoint) = MINIDUMP_ENDPOINT.as_ref() else {
log::warn!("Minidump endpoint not set");
return Ok(());
@@ -569,6 +581,7 @@ pub async fn upload_previous_minidumps(http: Arc<HttpClientWithUrl>) -> anyhow::
.await
.context("Failed to read minidump")?,
&metadata,
+ installation_id.clone(),
)
.await
.log_err()
@@ -586,6 +599,7 @@ async fn upload_minidump(
endpoint: &str,
minidump: Vec<u8>,
metadata: &crashes::CrashInfo,
+ installation_id: Option<String>,
) -> Result<()> {
let mut form = Form::new()
.part(
@@ -601,12 +615,83 @@ async fn upload_minidump(
.text("sentry[tags][version]", metadata.init.zed_version.clone())
.text("sentry[release]", metadata.init.commit_sha.clone())
.text("platform", "rust");
+ let mut panic_message = "".to_owned();
if let Some(panic_info) = metadata.panic.as_ref() {
- form = form.text("sentry[logentry][formatted]", panic_info.message.clone());
- form = form.text("span", panic_info.span.clone());
- // TODO: add gpu-context, feature-flag-context, and more of device-context like gpu
- // name, screen resolution, available ram, device model, etc
+ panic_message = panic_info.message.clone();
+ form = form
+ .text("sentry[logentry][formatted]", panic_info.message.clone())
+ .text("span", panic_info.span.clone());
+ }
+ if let Some(minidump_error) = metadata.minidump_error.clone() {
+ form = form.text("minidump_error", minidump_error);
}
+ if let Some(id) = installation_id.clone() {
+ form = form.text("sentry[user][id]", id)
+ }
+
+ ::telemetry::event!(
+ "Minidump Uploaded",
+ panic_message = panic_message,
+ crashed_version = metadata.init.zed_version.clone(),
+ commit_sha = metadata.init.commit_sha.clone(),
+ );
+
+ let gpu_count = metadata.gpus.len();
+ for (index, gpu) in metadata.gpus.iter().cloned().enumerate() {
+ let system_specs::GpuInfo {
+ device_name,
+ device_pci_id,
+ vendor_name,
+ vendor_pci_id,
+ driver_version,
+ driver_name,
+ } = gpu;
+ let num = if gpu_count == 1 && metadata.active_gpu.is_none() {
+ String::new()
+ } else {
+ index.to_string()
+ };
+ let name = format!("gpu{num}");
+ let root = format!("sentry[contexts][{name}]");
+ form = form
+ .text(
+ format!("{root}[Description]"),
+ "A GPU found on the users system. May or may not be the GPU Zed is running on",
+ )
+ .text(format!("{root}[type]"), "gpu")
+ .text(format!("{root}[name]"), device_name.unwrap_or(name))
+ .text(format!("{root}[id]"), format!("{:#06x}", device_pci_id))
+ .text(
+ format!("{root}[vendor_id]"),
+ format!("{:#06x}", vendor_pci_id),
+ )
+ .text_if_some(format!("{root}[vendor_name]"), vendor_name)
+ .text_if_some(format!("{root}[driver_version]"), driver_version)
+ .text_if_some(format!("{root}[driver_name]"), driver_name);
+ }
+ if let Some(active_gpu) = metadata.active_gpu.clone() {
+ form = form
+ .text(
+ "sentry[contexts][Active_GPU][Description]",
+ "The GPU Zed is running on",
+ )
+ .text("sentry[contexts][Active_GPU][type]", "gpu")
+ .text("sentry[contexts][Active_GPU][name]", active_gpu.device_name)
+ .text(
+ "sentry[contexts][Active_GPU][driver_version]",
+ active_gpu.driver_info,
+ )
+ .text(
+ "sentry[contexts][Active_GPU][driver_name]",
+ active_gpu.driver_name,
+ )
+ .text(
+ "sentry[contexts][Active_GPU][is_software_emulated]",
+ active_gpu.is_software_emulated.to_string(),
+ );
+ }
+
+ // TODO: feature-flag-context, and more of device-context like screen resolution, available ram, device model, etc
let mut response_text = String::new();
let mut response = http.send_multipart_form(endpoint, form).await?;
@@ -621,6 +706,27 @@ async fn upload_minidump(
Ok(())
}
+trait FormExt {
+ fn text_if_some(
+ self,
+ label: impl Into<std::borrow::Cow<'static, str>>,
+ value: Option<impl Into<std::borrow::Cow<'static, str>>>,
+ ) -> Self;
+}
+
+impl FormExt for Form {
+ fn text_if_some(
+ self,
+ label: impl Into<std::borrow::Cow<'static, str>>,
+ value: Option<impl Into<std::borrow::Cow<'static, str>>>,
+ ) -> Self {
+ match value {
+ Some(value) => self.text(label.into(), value.into()),
+ None => self,
+ }
+ }
+}
+
async fn upload_panic(
http: &Arc<HttpClientWithUrl>,
panic_report_url: &Url,
@@ -32,7 +32,9 @@ use gpui::{
};
use image_viewer::ImageInfo;
use language::Capability;
-use language_tools::lsp_tool::{self, LspTool};
+use language_onboarding::BasedPyrightBanner;
+use language_tools::lsp_button::{self, LspButton};
+use language_tools::lsp_log_view::LspLogToolbarItemView;
use migrate::{MigrationBanner, MigrationEvent, MigrationNotification, MigrationType};
use migrator::{migrate_keymap, migrate_settings};
use onboarding::DOCS_URL;
@@ -47,7 +49,7 @@ use project::{DirectoryLister, ProjectItem};
use project_panel::ProjectPanel;
use prompt_store::PromptBuilder;
use quick_action_bar::QuickActionBar;
-use recent_projects::open_ssh_project;
+use recent_projects::open_remote_project;
use release_channel::{AppCommitSha, ReleaseChannel};
use rope::Rope;
use search::project_search::ProjectSearchBar;
@@ -282,6 +284,8 @@ pub fn build_window_options(display_uuid: Option<Uuid>, cx: &mut App) -> WindowO
_ => gpui::WindowDecorations::Client,
};
+ let use_system_window_tabs = WorkspaceSettings::get_global(cx).use_system_window_tabs;
+
WindowOptions {
titlebar: Some(TitlebarOptions {
title: None,
@@ -301,6 +305,12 @@ pub fn build_window_options(display_uuid: Option<Uuid>, cx: &mut App) -> WindowO
width: px(360.0),
height: px(240.0),
}),
+ tabbing_identifier: if use_system_window_tabs {
+ Some(String::from("zed"))
+ } else {
+ None
+ },
+ ..Default::default()
}
}
@@ -344,7 +354,17 @@ pub fn initialize_workspace(
if let Some(specs) = window.gpu_specs() {
log::info!("Using GPU: {:?}", specs);
- show_software_emulation_warning_if_needed(specs, window, cx);
+ show_software_emulation_warning_if_needed(specs.clone(), window, cx);
+ if let Some((crash_server, message)) = crashes::CRASH_HANDLER
+ .get()
+ .zip(bincode::serialize(&specs).ok())
+ && let Err(err) = crash_server.send_message(3, message)
+ {
+ log::warn!(
+ "Failed to store active gpu info for crash reporting: {}",
+ err
+ );
+ }
}
let edit_prediction_menu_handle = PopoverMenuHandle::default();
@@ -378,12 +398,12 @@ pub fn initialize_workspace(
let vim_mode_indicator = cx.new(|cx| vim::ModeIndicator::new(window, cx));
let image_info = cx.new(|_cx| ImageInfo::new(workspace));
- let lsp_tool_menu_handle = PopoverMenuHandle::default();
- let lsp_tool =
- cx.new(|cx| LspTool::new(workspace, lsp_tool_menu_handle.clone(), window, cx));
+ let lsp_button_menu_handle = PopoverMenuHandle::default();
+ let lsp_button =
+ cx.new(|cx| LspButton::new(workspace, lsp_button_menu_handle.clone(), window, cx));
workspace.register_action({
- move |_, _: &lsp_tool::ToggleMenu, window, cx| {
- lsp_tool_menu_handle.toggle(window, cx);
+ move |_, _: &lsp_button::ToggleMenu, window, cx| {
+ lsp_button_menu_handle.toggle(window, cx);
}
});
@@ -391,7 +411,7 @@ pub fn initialize_workspace(
cx.new(|_| go_to_line::cursor_position::CursorPosition::new(workspace));
workspace.status_bar().update(cx, |status_bar, cx| {
status_bar.add_left_item(search_button, window, cx);
- status_bar.add_left_item(lsp_tool, window, cx);
+ status_bar.add_left_item(lsp_button, window, cx);
status_bar.add_left_item(diagnostic_summary, window, cx);
status_bar.add_left_item(activity_indicator, window, cx);
status_bar.add_right_item(edit_prediction_button, window, cx);
@@ -526,8 +546,6 @@ fn initialize_panels(
window: &mut Window,
cx: &mut Context<Workspace>,
) {
- let prompt_builder = prompt_builder.clone();
-
cx.spawn_in(window, async move |workspace_handle, cx| {
let project_panel = ProjectPanel::load(workspace_handle.clone(), cx.clone());
let outline_panel = OutlinePanel::load(workspace_handle.clone(), cx.clone());
@@ -910,7 +928,7 @@ fn register_actions(
capture_audio(workspace, window, cx);
});
- if workspace.project().read(cx).is_via_ssh() {
+ if workspace.project().read(cx).is_via_remote_server() {
workspace.register_action({
move |workspace, _: &OpenServerSettings, window, cx| {
let open_server_settings = workspace
@@ -972,7 +990,7 @@ fn initialize_pane(
toolbar.add_item(diagnostic_editor_controls, window, cx);
let project_search_bar = cx.new(|_| ProjectSearchBar::new());
toolbar.add_item(project_search_bar, window, cx);
- let lsp_log_item = cx.new(|_| language_tools::LspLogToolbarItemView::new());
+ let lsp_log_item = cx.new(|_| LspLogToolbarItemView::new());
toolbar.add_item(lsp_log_item, window, cx);
let dap_log_item = cx.new(|_| debugger_tools::DapLogToolbarItemView::new());
toolbar.add_item(dap_log_item, window, cx);
@@ -984,6 +1002,8 @@ fn initialize_pane(
toolbar.add_item(project_diff_toolbar, window, cx);
let agent_diff_toolbar = cx.new(AgentDiffToolbar::new);
toolbar.add_item(agent_diff_toolbar, window, cx);
+ let basedpyright_banner = cx.new(|cx| BasedPyrightBanner::new(workspace, cx));
+ toolbar.add_item(basedpyright_banner, window, cx);
})
});
}
@@ -1150,7 +1170,7 @@ fn open_log_file(workspace: &mut Workspace, window: &mut Window, cx: &mut Contex
};
let project = workspace.project().clone();
let buffer = project.update(cx, |project, cx| {
- project.create_local_buffer(&log, None, cx)
+ project.create_local_buffer(&log, None, false, cx)
});
let buffer = cx
@@ -1300,15 +1320,31 @@ pub fn handle_keymap_file_changes(
})
.detach();
- let mut current_mapping = settings::get_key_equivalents(cx.keyboard_layout().id());
- cx.on_keyboard_layout_change(move |cx| {
- let next_mapping = settings::get_key_equivalents(cx.keyboard_layout().id());
- if next_mapping != current_mapping {
- current_mapping = next_mapping;
- keyboard_layout_tx.unbounded_send(()).ok();
- }
- })
- .detach();
+ #[cfg(target_os = "windows")]
+ {
+ let mut current_layout_id = cx.keyboard_layout().id().to_string();
+ cx.on_keyboard_layout_change(move |cx| {
+ let next_layout_id = cx.keyboard_layout().id();
+ if next_layout_id != current_layout_id {
+ current_layout_id = next_layout_id.to_string();
+ keyboard_layout_tx.unbounded_send(()).ok();
+ }
+ })
+ .detach();
+ }
+
+ #[cfg(not(target_os = "windows"))]
+ {
+ let mut current_mapping = cx.keyboard_mapper().get_key_equivalents().cloned();
+ cx.on_keyboard_layout_change(move |cx| {
+ let next_mapping = cx.keyboard_mapper().get_key_equivalents();
+ if current_mapping.as_ref() != next_mapping {
+ current_mapping = next_mapping.cloned();
+ keyboard_layout_tx.unbounded_send(()).ok();
+ }
+ })
+ .detach();
+ }
load_default_keymap(cx);
@@ -1394,7 +1430,7 @@ fn show_keymap_file_load_error(
cx: &mut App,
) {
show_markdown_app_notification(
- notification_id.clone(),
+ notification_id,
error_message,
"Open Keymap File".into(),
|window, cx| {
@@ -1474,7 +1510,7 @@ fn reload_keymaps(cx: &mut App, mut user_key_bindings: Vec<KeyBinding>) {
workspace::NewWindow,
)]);
// todo: nicer api here?
- settings_ui::keybindings::KeymapEventChannel::trigger_keymap_changed(cx);
+ keymap_editor::KeymapEventChannel::trigger_keymap_changed(cx);
}
pub fn load_default_keymap(cx: &mut App) {
@@ -1535,12 +1571,12 @@ pub fn open_new_ssh_project_from_project(
cx: &mut Context<Workspace>,
) -> Task<anyhow::Result<()>> {
let app_state = workspace.app_state().clone();
- let Some(ssh_client) = workspace.project().read(cx).ssh_client() else {
+ let Some(ssh_client) = workspace.project().read(cx).remote_client() else {
return Task::ready(Err(anyhow::anyhow!("Not an ssh project")));
};
let connection_options = ssh_client.read(cx).connection_options();
cx.spawn_in(window, async move |_, cx| {
- open_ssh_project(
+ open_remote_project(
connection_options,
paths,
app_state,
@@ -1620,13 +1656,12 @@ fn open_local_file(
.read_with(cx, |tree, _| tree.abs_path().join(settings_relative_path))?;
let fs = project.read_with(cx, |project, _| project.fs().clone())?;
- let file_exists = fs
- .metadata(&full_path)
+
+ fs.metadata(&full_path)
.await
.ok()
.flatten()
- .is_some_and(|metadata| !metadata.is_dir && !metadata.is_fifo);
- file_exists
+ .is_some_and(|metadata| !metadata.is_dir && !metadata.is_fifo)
};
if !file_exists {
@@ -1717,7 +1752,7 @@ fn open_telemetry_log_file(
workspace.update_in( cx, |workspace, window, cx| {
let project = workspace.project().clone();
- let buffer = project.update(cx, |project, cx| project.create_local_buffer(&content, json, cx));
+ let buffer = project.update(cx, |project, cx| project.create_local_buffer(&content, json,false, cx));
let buffer = cx.new(|cx| {
MultiBuffer::singleton(buffer, cx).with_title("Telemetry Log".into())
});
@@ -1756,7 +1791,8 @@ fn open_bundled_file(
workspace.with_local_workspace(window, cx, |workspace, window, cx| {
let project = workspace.project();
let buffer = project.update(cx, move |project, cx| {
- let buffer = project.create_local_buffer(text.as_ref(), language, cx);
+ let buffer =
+ project.create_local_buffer(text.as_ref(), language, false, cx);
buffer.update(cx, |buffer, cx| {
buffer.set_capability(Capability::ReadOnly, cx);
});
@@ -4349,6 +4385,8 @@ mod tests {
| "vim::PushJump"
| "vim::PushDigraph"
| "vim::PushLiteral"
+ | "vim::PushHelixNext"
+ | "vim::PushHelixPrevious"
| "vim::Number"
| "vim::SelectRegister"
| "git::StageAndNext"
@@ -4365,6 +4403,7 @@ mod tests {
| "workspace::MoveItemToPaneInDirection"
| "workspace::OpenTerminal"
| "workspace::SendKeystrokes"
+ | "agent::NewNativeAgentThreadFromSummary"
| "zed::OpenBrowser"
| "zed::OpenZedUrl" => {}
_ => {
@@ -4379,7 +4418,7 @@ mod tests {
}
}
}
- if errors.len() > 0 {
+ if !errors.is_empty() {
panic!(
"Failed to build actions using {{}} as input: {:?}. Errors:\n{}",
failing_names,
@@ -4463,6 +4502,7 @@ mod tests {
"keymap_editor",
"keystroke_input",
"language_selector",
+ "line_ending",
"lsp_tool",
"markdown",
"menu",
@@ -4485,6 +4525,7 @@ mod tests {
"snippets",
"supermaven",
"svg",
+ "syntax_tree_view",
"tab_switcher",
"task",
"terminal",
@@ -4494,6 +4535,7 @@ mod tests {
"toolchain",
"variable_list",
"vim",
+ "window",
"workspace",
"zed",
"zed_predict_onboarding",
@@ -4616,7 +4658,7 @@ mod tests {
gpui_tokio::init(cx);
vim_mode_setting::init(cx);
theme::init(theme::LoadThemes::JustBase, cx);
- audio::init((), cx);
+ audio::init(cx);
channel::init(&app_state.client, app_state.user_store.clone(), cx);
call::init(app_state.client.clone(), app_state.user_store.clone(), cx);
notifications::init(app_state.client.clone(), app_state.user_store.clone(), cx);
@@ -4721,7 +4763,7 @@ mod tests {
// and key strokes contain the given key
bindings
.into_iter()
- .any(|binding| binding.keystrokes().iter().any(|k| k.key == key)),
+ .any(|binding| binding.keystrokes().iter().any(|k| k.key() == key)),
"On {} Failed to find {} with key binding {}",
line,
action.name(),
@@ -4787,7 +4829,7 @@ mod tests {
cx.background_executor.run_until_parked();
// 5. Critical: Verify .zed is actually excluded from worktree
- let worktree = cx.update(|cx| project.read(cx).worktrees(cx).next().unwrap().clone());
+ let worktree = cx.update(|cx| project.read(cx).worktrees(cx).next().unwrap());
let has_zed_entry = cx.update(|cx| worktree.read(cx).entry_for_path(".zed").is_some());
@@ -4823,7 +4865,7 @@ mod tests {
.await
.unwrap();
- let new_content_str = new_content.clone();
+ let new_content_str = new_content;
eprintln!("New settings content: {}", new_content_str);
// The bug causes the settings to be overwritten with empty settings
@@ -4837,4 +4879,34 @@ mod tests {
"BUG FOUND: Project settings were overwritten when opening via command - original custom content was lost"
);
}
+
+ #[gpui::test]
+ fn test_settings_defaults(cx: &mut TestAppContext) {
+ cx.update(|cx| {
+ settings::init(cx);
+ workspace::init_settings(cx);
+ title_bar::init(cx);
+ editor::init_settings(cx);
+ debugger_ui::init(cx);
+ });
+ let default_json =
+ cx.read(|cx| cx.global::<SettingsStore>().raw_default_settings().clone());
+
+ let all_paths = cx.read(|cx| settings_ui::SettingsUiTree::new(cx).all_paths(cx));
+ let mut failures = Vec::new();
+ for path in all_paths {
+ if settings_ui::read_settings_value_from_path(&default_json, &path).is_none() {
+ failures.push(path);
+ }
+ }
+ if !failures.is_empty() {
+ panic!(
+ "No default value found for paths: {:#?}",
+ failures
+ .into_iter()
+ .map(|path| path.join("."))
+ .collect::<Vec<_>>()
+ );
+ }
+ }
}
@@ -1,6 +1,5 @@
use collab_ui::collab_panel;
use gpui::{Menu, MenuItem, OsAction};
-use settings_ui::keybindings;
use terminal_view::terminal_panel;
pub fn app_menus() -> Vec<Menu> {
@@ -17,7 +16,7 @@ pub fn app_menus() -> Vec<Menu> {
name: "Settings".into(),
items: vec![
MenuItem::action("Open Settings", super::OpenSettings),
- MenuItem::action("Open Key Bindings", keybindings::OpenKeymapEditor),
+ MenuItem::action("Open Key Bindings", keymap_editor::OpenKeymapEditor),
MenuItem::action("Open Default Settings", super::OpenDefaultSettings),
MenuItem::action(
"Open Default Key Bindings",
@@ -33,8 +33,6 @@ use workspace::{
pub fn init(app_state: Arc<AppState>, cx: &mut App) {
workspace::register_serializable_item::<ComponentPreview>(cx);
- let app_state = app_state.clone();
-
cx.observe_new(move |workspace: &mut Workspace, _window, cx| {
let app_state = app_state.clone();
let project = workspace.project().clone();
@@ -462,12 +460,12 @@ impl ComponentPreview {
Vec::new()
};
if valid_positions.is_empty() {
- Label::new(name.clone()).into_any_element()
+ Label::new(name).into_any_element()
} else {
- HighlightedLabel::new(name.clone(), valid_positions).into_any_element()
+ HighlightedLabel::new(name, valid_positions).into_any_element()
}
} else {
- Label::new(name.clone()).into_any_element()
+ Label::new(name).into_any_element()
})
.selectable(true)
.toggle_state(selected)
@@ -685,7 +683,7 @@ impl ComponentPreview {
.h_full()
.py_8()
.bg(cx.theme().colors().panel_background)
- .children(self.active_thread.clone().map(|thread| thread.clone()))
+ .children(self.active_thread.clone())
.when_none(&self.active_thread.clone(), |this| {
this.child("No active thread")
}),
@@ -716,7 +714,7 @@ impl Render for ComponentPreview {
if input.is_empty(cx) {
String::new()
} else {
- input.editor().read(cx).text(cx).to_string()
+ input.editor().read(cx).text(cx)
}
});
@@ -929,7 +927,7 @@ impl SerializableItem for ComponentPreview {
Err(_) => ActivePageId::default(),
};
- let user_store = project.read(cx).user_store().clone();
+ let user_store = project.read(cx).user_store();
let language_registry = project.read(cx).languages().clone();
let preview_page = if deserialized_active_page.0 == ActivePageId::default().0 {
Some(PreviewPage::default())
@@ -940,7 +938,7 @@ impl SerializableItem for ComponentPreview {
let found_component = all_components.iter().find(|c| c.id().0 == component_str);
if let Some(component) = found_component {
- Some(PreviewPage::Component(component.id().clone()))
+ Some(PreviewPage::Component(component.id()))
} else {
Some(PreviewPage::default())
}
@@ -1057,7 +1055,7 @@ impl ComponentPreviewPage {
.rounded_sm()
.bg(color.color(cx).alpha(0.12))
.child(
- Label::new(status.clone().to_string())
+ Label::new(status.to_string())
.size(LabelSize::Small)
.color(color),
),
@@ -1,10 +1,17 @@
use anyhow::Result;
-use db::{define_connection, query, sqlez::statement::Statement, sqlez_macros::sql};
+use db::{
+ query,
+ sqlez::{domain::Domain, statement::Statement, thread_safe_connection::ThreadSafeConnection},
+ sqlez_macros::sql,
+};
use workspace::{ItemId, WorkspaceDb, WorkspaceId};
-define_connection! {
- pub static ref COMPONENT_PREVIEW_DB: ComponentPreviewDb<WorkspaceDb> =
- &[sql!(
+pub struct ComponentPreviewDb(ThreadSafeConnection);
+
+impl Domain for ComponentPreviewDb {
+ const NAME: &str = stringify!(ComponentPreviewDb);
+
+ const MIGRATIONS: &[&str] = &[sql!(
CREATE TABLE component_previews (
workspace_id INTEGER,
item_id INTEGER UNIQUE,
@@ -13,9 +20,11 @@ define_connection! {
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
ON DELETE CASCADE
) STRICT;
- )];
+ )];
}
+db::static_connection!(COMPONENT_PREVIEW_DB, ComponentPreviewDb, [WorkspaceDb]);
+
impl ComponentPreviewDb {
pub async fn save_active_page(
&self,
@@ -8,8 +8,7 @@ use settings::SettingsStore;
use std::{cell::RefCell, rc::Rc, sync::Arc};
use supermaven::{Supermaven, SupermavenCompletionProvider};
use ui::Window;
-use workspace::Workspace;
-use zeta::{ProviderDataCollection, ZetaEditPredictionProvider};
+use zeta::ZetaEditPredictionProvider;
pub fn init(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
let editors: Rc<RefCell<HashMap<WeakEntity<Editor>, AnyWindowHandle>>> = Rc::default();
@@ -60,36 +59,25 @@ pub fn init(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
cx.subscribe(&user_store, {
let editors = editors.clone();
let client = client.clone();
- move |user_store, event, cx| match event {
- client::user::Event::PrivateUserInfoUpdated => {
- assign_edit_prediction_providers(
- &editors,
- provider,
- &client,
- user_store.clone(),
- cx,
- );
+
+ move |user_store, event, cx| {
+ if let client::user::Event::PrivateUserInfoUpdated = event {
+ assign_edit_prediction_providers(&editors, provider, &client, user_store, cx);
}
- _ => {}
}
})
.detach();
cx.observe_global::<SettingsStore>({
- let editors = editors.clone();
- let client = client.clone();
let user_store = user_store.clone();
move |cx| {
let new_provider = all_language_settings(None, cx).edit_predictions.provider;
if new_provider != provider {
- let tos_accepted = user_store.read(cx).has_accepted_terms_of_service();
-
telemetry::event!(
"Edit Prediction Provider Changed",
from = provider,
to = new_provider,
- zed_ai_tos_accepted = tos_accepted,
);
provider = new_provider;
@@ -100,28 +88,6 @@ pub fn init(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
user_store.clone(),
cx,
);
-
- if !tos_accepted {
- match provider {
- EditPredictionProvider::Zed => {
- let Some(window) = cx.active_window() else {
- return;
- };
-
- window
- .update(cx, |_, window, cx| {
- window.dispatch_action(
- Box::new(zed_actions::OpenZedPredictOnboarding),
- cx,
- );
- })
- .ok();
- }
- EditPredictionProvider::None
- | EditPredictionProvider::Copilot
- | EditPredictionProvider::Supermaven => {}
- }
- }
}
}
})
@@ -237,27 +203,19 @@ fn assign_edit_prediction_provider(
}
}
- let workspace = window
- .root::<Workspace>()
- .flatten()
- .map(|workspace| workspace.downgrade());
-
- let zeta =
- zeta::Zeta::register(workspace, worktree, client.clone(), user_store, cx);
+ let zeta = zeta::Zeta::register(worktree, client.clone(), user_store, cx);
if let Some(buffer) = &singleton_buffer
&& buffer.read(cx).file().is_some()
+ && let Some(project) = editor.project()
{
zeta.update(cx, |zeta, cx| {
- zeta.register_buffer(buffer, cx);
+ zeta.register_buffer(buffer, project, cx);
});
}
- let data_collection =
- ProviderDataCollection::new(zeta.clone(), singleton_buffer, cx);
-
let provider =
- cx.new(|_| zeta::ZetaEditPredictionProvider::new(zeta, data_collection));
+ cx.new(|_| zeta::ZetaEditPredictionProvider::new(zeta, singleton_buffer));
editor.set_edit_prediction_provider(Some(provider), window, cx);
}
@@ -17,8 +17,8 @@ use gpui::{App, AsyncApp, Global, WindowHandle};
use language::Point;
use onboarding::FIRST_OPEN;
use onboarding::show_onboarding_view;
-use recent_projects::{SshSettings, open_ssh_project};
-use remote::SshConnectionOptions;
+use recent_projects::{SshSettings, open_remote_project};
+use remote::{RemoteConnectionOptions, WslConnectionOptions};
use settings::Settings;
use std::path::{Path, PathBuf};
use std::sync::Arc;
@@ -26,6 +26,7 @@ use std::thread;
use std::time::Duration;
use util::ResultExt;
use util::paths::PathWithPosition;
+use workspace::PathList;
use workspace::item::ItemHandle;
use workspace::{AppState, OpenOptions, SerializedWorkspaceLocation, Workspace};
@@ -36,7 +37,7 @@ pub struct OpenRequest {
pub diff_paths: Vec<[String; 2]>,
pub open_channel_notes: Vec<(u64, Option<String>)>,
pub join_channel: Option<u64>,
- pub ssh_connection: Option<SshConnectionOptions>,
+ pub remote_connection: Option<RemoteConnectionOptions>,
}
#[derive(Debug)]
@@ -50,6 +51,23 @@ pub enum OpenRequestKind {
impl OpenRequest {
pub fn parse(request: RawOpenRequest, cx: &App) -> Result<Self> {
let mut this = Self::default();
+
+ this.diff_paths = request.diff_paths;
+ if let Some(wsl) = request.wsl {
+ let (user, distro_name) = if let Some((user, distro)) = wsl.split_once('@') {
+ if user.is_empty() {
+ anyhow::bail!("user is empty in wsl argument");
+ }
+ (Some(user.to_string()), distro.to_string())
+ } else {
+ (None, wsl)
+ };
+ this.remote_connection = Some(RemoteConnectionOptions::Wsl(WslConnectionOptions {
+ distro_name,
+ user,
+ }));
+ }
+
for url in request.urls {
if let Some(server_name) = url.strip_prefix("zed-cli://") {
this.kind = Some(OpenRequestKind::CliConnection(connect_to_cli(server_name)?));
@@ -79,8 +97,6 @@ impl OpenRequest {
}
}
- this.diff_paths = request.diff_paths;
-
Ok(this)
}
@@ -102,21 +118,20 @@ impl OpenRequest {
self.open_paths.is_empty(),
"cannot open both local and ssh paths"
);
- let mut connection_options = SshSettings::get_global(cx).connection_options_for(
- host.clone(),
- port,
- username.clone(),
- );
+ let mut connection_options =
+ SshSettings::get_global(cx).connection_options_for(host, port, username);
if let Some(password) = url.password() {
connection_options.password = Some(password.to_string());
}
- if let Some(ssh_connection) = &self.ssh_connection {
+
+ let connection_options = RemoteConnectionOptions::Ssh(connection_options);
+ if let Some(ssh_connection) = &self.remote_connection {
anyhow::ensure!(
*ssh_connection == connection_options,
- "cannot open multiple ssh connections"
+ "cannot open multiple different remote connections"
);
}
- self.ssh_connection = Some(connection_options);
+ self.remote_connection = Some(connection_options);
self.parse_file_path(url.path());
Ok(())
}
@@ -154,6 +169,7 @@ pub struct OpenListener(UnboundedSender<RawOpenRequest>);
pub struct RawOpenRequest {
pub urls: Vec<String>,
pub diff_paths: Vec<[String; 2]>,
+ pub wsl: Option<String>,
}
impl Global for OpenListener {}
@@ -305,13 +321,21 @@ pub async fn handle_cli_connection(
paths,
diff_paths,
wait,
+ wsl,
open_new_workspace,
env,
user_data_dir: _,
} => {
if !urls.is_empty() {
cx.update(|cx| {
- match OpenRequest::parse(RawOpenRequest { urls, diff_paths }, cx) {
+ match OpenRequest::parse(
+ RawOpenRequest {
+ urls,
+ diff_paths,
+ wsl,
+ },
+ cx,
+ ) {
Ok(open_request) => {
handle_open_request(open_request, app_state.clone(), cx);
responses.send(CliResponse::Exit { status: 0 }).log_err();
@@ -364,12 +388,14 @@ async fn open_workspaces(
if open_new_workspace == Some(true) {
Vec::new()
} else {
- let locations = restorable_workspace_locations(cx, &app_state).await;
- locations.unwrap_or_default()
+ restorable_workspace_locations(cx, &app_state)
+ .await
+ .unwrap_or_default()
}
} else {
- vec![SerializedWorkspaceLocation::from_local_paths(
- paths.into_iter().map(PathBuf::from),
+ vec![(
+ SerializedWorkspaceLocation::Local,
+ PathList::new(&paths.into_iter().map(PathBuf::from).collect::<Vec<_>>()),
)]
};
@@ -397,9 +423,9 @@ async fn open_workspaces(
// If there are paths to open, open a workspace for each grouping of paths
let mut errored = false;
- for location in grouped_locations {
+ for (location, workspace_paths) in grouped_locations {
match location {
- SerializedWorkspaceLocation::Local(workspace_paths, _) => {
+ SerializedWorkspaceLocation::Local => {
let workspace_paths = workspace_paths
.paths()
.iter()
@@ -422,30 +448,26 @@ async fn open_workspaces(
errored = true
}
}
- SerializedWorkspaceLocation::Ssh(ssh) => {
+ SerializedWorkspaceLocation::Remote(mut connection) => {
let app_state = app_state.clone();
- let connection_options = cx.update(|cx| {
- SshSettings::get_global(cx)
- .connection_options_for(ssh.host, ssh.port, ssh.user)
- });
- if let Ok(connection_options) = connection_options {
- cx.spawn(async move |cx| {
- open_ssh_project(
- connection_options,
- ssh.paths.into_iter().map(PathBuf::from).collect(),
- app_state,
- OpenOptions::default(),
- cx,
- )
- .await
- .log_err();
- })
- .detach();
- // We don't set `errored` here if `open_ssh_project` fails, because for ssh projects, the
- // error is displayed in the window.
- } else {
- errored = false;
+ if let RemoteConnectionOptions::Ssh(options) = &mut connection {
+ cx.update(|cx| {
+ SshSettings::get_global(cx)
+ .fill_connection_options_from_settings(options)
+ })?;
}
+ cx.spawn(async move |cx| {
+ open_remote_project(
+ connection,
+ workspace_paths.paths().to_vec(),
+ app_state,
+ OpenOptions::default(),
+ cx,
+ )
+ .await
+ .log_err();
+ })
+ .detach();
}
}
}
@@ -587,6 +609,7 @@ mod tests {
};
use editor::Editor;
use gpui::TestAppContext;
+ use remote::SshConnectionOptions;
use serde_json::json;
use std::sync::Arc;
use util::path;
@@ -609,8 +632,8 @@ mod tests {
.unwrap()
});
assert_eq!(
- request.ssh_connection.unwrap(),
- SshConnectionOptions {
+ request.remote_connection.unwrap(),
+ RemoteConnectionOptions::Ssh(SshConnectionOptions {
host: "localhost".into(),
username: Some("me".into()),
port: None,
@@ -619,7 +642,7 @@ mod tests {
port_forwards: None,
nickname: None,
upload_binary_over_ssh: false,
- }
+ })
);
assert_eq!(request.open_paths, vec!["/"]);
}
@@ -161,7 +161,7 @@ impl Render for QuickActionBar {
IconName::ZedAssistant,
false,
Box::new(InlineAssist::default()),
- focus_handle.clone(),
+ focus_handle,
"Inline Assist",
move |_, window, cx| {
window.dispatch_action(Box::new(InlineAssist::default()), cx);
@@ -215,7 +215,7 @@ impl Render for QuickActionBar {
)
})
.on_click({
- let focus = focus.clone();
+ let focus = focus;
move |_, window, cx| {
focus.dispatch_action(
&ToggleCodeActions {
@@ -72,7 +72,10 @@ impl QuickActionBar {
Tooltip::with_meta(
tooltip_text,
Some(open_action_for_tooltip),
- format!("{} to open in a split", text_for_keystroke(&alt_click, cx)),
+ format!(
+ "{} to open in a split",
+ text_for_keystroke(&alt_click.modifiers, &alt_click.key, cx)
+ ),
window,
cx,
)
@@ -1,7 +1,5 @@
-use std::time::Duration;
-
use gpui::ElementId;
-use gpui::{Animation, AnimationExt, AnyElement, Entity, Transformation, percentage};
+use gpui::{AnyElement, Entity};
use picker::Picker;
use repl::{
ExecutionState, JupyterSettings, Kernel, KernelSpecification, KernelStatus, Session,
@@ -10,8 +8,8 @@ use repl::{
worktree_id_for_editor,
};
use ui::{
- ButtonLike, ContextMenu, IconWithIndicator, Indicator, IntoElement, PopoverMenu,
- PopoverMenuHandle, Tooltip, prelude::*,
+ ButtonLike, CommonAnimationExt, ContextMenu, IconWithIndicator, Indicator, IntoElement,
+ PopoverMenu, PopoverMenuHandle, Tooltip, prelude::*,
};
use util::ResultExt;
@@ -196,7 +194,6 @@ impl QuickActionBar {
.into_any_element()
},
{
- let editor = editor.clone();
move |window, cx| {
repl::restart(editor.clone(), window, cx);
}
@@ -225,11 +222,7 @@ impl QuickActionBar {
.child(if menu_state.icon_is_animating {
Icon::new(menu_state.icon)
.color(menu_state.icon_color)
- .with_animation(
- "arrow-circle",
- Animation::new(Duration::from_secs(5)).repeat(),
- |icon, delta| icon.transform(Transformation::rotate(percentage(delta))),
- )
+ .with_rotate_animation(5)
.into_any_element()
} else {
IconWithIndicator::new(
@@ -346,7 +339,7 @@ impl QuickActionBar {
),
Tooltip::text("Select Kernel"),
)
- .with_handle(menu_handle.clone())
+ .with_handle(menu_handle)
.into_any_element()
}
@@ -362,7 +355,7 @@ impl QuickActionBar {
.shape(ui::IconButtonShape::Square)
.icon_size(ui::IconSize::Small)
.icon_color(Color::Muted)
- .tooltip(Tooltip::text(tooltip.clone()))
+ .tooltip(Tooltip::text(tooltip))
.on_click(|_, _window, cx| {
cx.open_url(&format!("{}#installation", ZED_REPL_DOCUMENTATION))
}),
@@ -153,6 +153,7 @@ fn send_args_to_instance(args: &Args) -> anyhow::Result<()> {
urls,
diff_paths,
wait: false,
+ wsl: args.wsl.clone(),
open_new_workspace: None,
env: None,
user_data_dir: args.user_data_dir.clone(),
@@ -156,7 +156,10 @@ pub mod workspace {
#[action(deprecated_aliases = ["editor::CopyPath", "outline_panel::CopyPath", "project_panel::CopyPath"])]
CopyPath,
#[action(deprecated_aliases = ["editor::CopyRelativePath", "outline_panel::CopyRelativePath", "project_panel::CopyRelativePath"])]
- CopyRelativePath
+ CopyRelativePath,
+ /// Opens the selected file with the system's default application.
+ #[action(deprecated_aliases = ["project_panel::OpenWithSystem"])]
+ OpenWithSystem,
]
);
}
@@ -281,13 +284,19 @@ pub mod agent {
OpenSettings,
/// Opens the agent onboarding modal.
OpenOnboardingModal,
+ /// Opens the ACP onboarding modal.
+ OpenAcpOnboardingModal,
+ /// Opens the Claude Code onboarding modal.
+ OpenClaudeCodeOnboardingModal,
/// Resets the agent onboarding state.
ResetOnboarding,
/// Starts a chat conversation with the agent.
Chat,
/// Toggles the language model selector dropdown.
#[action(deprecated_aliases = ["assistant::ToggleModelSelector", "assistant2::ToggleModelSelector"])]
- ToggleModelSelector
+ ToggleModelSelector,
+ /// Triggers re-authentication on Gemini
+ ReauthenticateAgent
]
);
}
@@ -0,0 +1,18 @@
+[package]
+name = "zed_env_vars"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/zed_env_vars.rs"
+
+[features]
+default = []
+
+[dependencies]
+workspace-hack.workspace = true
@@ -0,0 +1 @@
+../../LICENSE-GPL
@@ -0,0 +1,6 @@
+use std::sync::LazyLock;
+
+/// Whether Zed is running in stateless mode.
+/// When true, Zed will use in-memory databases instead of persistent storage.
+pub static ZED_STATELESS: LazyLock<bool> =
+ LazyLock::new(|| std::env::var("ZED_STATELESS").is_ok_and(|v| !v.is_empty()));
@@ -34,6 +34,7 @@ futures.workspace = true
gpui.workspace = true
http_client.workspace = true
indoc.workspace = true
+itertools.workspace = true
language.workspace = true
language_model.workspace = true
log.workspace = true
@@ -46,6 +47,7 @@ release_channel.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
+strum.workspace = true
telemetry.workspace = true
telemetry_events.workspace = true
theme.workspace = true
@@ -70,13 +72,13 @@ gpui = { workspace = true, features = ["test-support"] }
http_client = { workspace = true, features = ["test-support"] }
indoc.workspace = true
language = { workspace = true, features = ["test-support"] }
+parking_lot.workspace = true
reqwest_client = { workspace = true, features = ["test-support"] }
rpc = { workspace = true, features = ["test-support"] }
settings = { workspace = true, features = ["test-support"] }
theme = { workspace = true, features = ["test-support"] }
tree-sitter-go.workspace = true
tree-sitter-rust.workspace = true
-unindent.workspace = true
workspace = { workspace = true, features = ["test-support"] }
worktree = { workspace = true, features = ["test-support"] }
zlog.workspace = true
@@ -0,0 +1,13 @@
+Zero-Clause BSD
+=============
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED “AS IS” AND THE AUTHOR DISCLAIMS ALL
+WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES
+OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE
+FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY
+DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
+AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
@@ -0,0 +1,55 @@
+Apache License
+
+Version 2.0, January 2004
+
+http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
+
+"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
+
+"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
+
+"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
+
+"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
+
+"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
+
+"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
+
+"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
+
+"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
+
+You must give any other recipients of the Work or Derivative Works a copy of this License; and
+
+You must cause any modified files to carry prominent notices stating that You changed the files; and
+
+You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
+
@@ -1,109 +1,110 @@
- ^Apache License
- Version 2\.0, January 2004
- http://www\.apache\.org/licenses/
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
- 1\. Definitions\.
+ 1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document\.
+ and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License\.
+ the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
- control with that entity\. For the purposes of this definition,
- "control" means \(i\) the power, direct or indirect, to cause the
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
- otherwise, or \(ii\) ownership of fifty percent \(50%\) or more of the
- outstanding shares, or \(iii\) beneficial ownership of such entity\.
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
- "You" \(or "Your"\) shall mean an individual or Legal Entity
- exercising permissions granted by this License\.
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
- source, and configuration files\.
+ source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
- and conversions to other media types\.
+ and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
- \(an example is provided in the Appendix below\)\.
+ (an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on \(or derived from\) the Work and for which the
+ form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship\. For the purposes
+ represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
- separable from, or merely link \(or bind by name\) to the interfaces of,
- the Work and Derivative Works thereof\.
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner\. For the purposes of this definition, "submitted"
+ the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution\."
+ designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work\.
+ subsequently incorporated within the Work.
- 2\. Grant of Copyright License\. Subject to the terms and conditions of
+ 2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
- worldwide, non\-exclusive, no\-charge, royalty\-free, irrevocable
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form\.
+ Work and such Derivative Works in Source or Object form.
- 3\. Grant of Patent License\. Subject to the terms and conditions of
+ 3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
- worldwide, non\-exclusive, no\-charge, royalty\-free, irrevocable
- \(except as stated in this section\) patent license to make, have made,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
- Contribution\(s\) alone or by combination of their Contribution\(s\)
- with the Work to which such Contribution\(s\) was submitted\. If You
- institute patent litigation against any entity \(including a
- cross\-claim or counterclaim in a lawsuit\) alleging that the Work
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
- as of the date such litigation is filed\.
+ as of the date such litigation is filed.
- 4\. Redistribution\. You may reproduce and distribute copies of the
+ 4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
- \(a\) You must give any other recipients of the Work or
+ (a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
- \(b\) You must cause any modified files to carry prominent notices
+ (b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
- \(c\) You must retain, in the Source form of any Derivative Works
+ (c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
- \(d\) If the Work includes a "NOTICE" text file as part of its
+ (d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
@@ -112,90 +113,77 @@
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
- wherever such third\-party notices normally appear\. The contents
+ wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
- do not modify the License\. You may add Your own attribution
+ do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
- as modifying the License\.
+ as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License\.
+ the conditions stated in this License.
- 5\. Submission of Contributions\. Unless You explicitly state otherwise,
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions\.
+ this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions\.
+ with Licensor regarding such Contributions.
- 6\. Trademarks\. This License does not grant permission to use the trade
+ 6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file\.
+ origin of the Work and reproducing the content of the NOTICE file.
- 7\. Disclaimer of Warranty\. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work \(and each
- Contributor provides its Contributions\) on an "AS IS" BASIS,
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
- of TITLE, NON\-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE\. You are solely responsible for determining the
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License\.
+ risks associated with Your exercise of permissions under this License.
- 8\. Limitation of Liability\. In no event and under no legal theory,
- whether in tort \(including negligence\), contract, or otherwise,
- unless required by applicable law \(such as deliberate and grossly
- negligent acts\) or agreed to in writing, shall any Contributor be
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
- Work \(including but not limited to damages for loss of goodwill,
+ Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses\), even if such Contributor
- has been advised of the possibility of such damages\.
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
- 9\. Accepting Warranty or Additional Liability\. While redistributing
+ 9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
- License\. However, in accepting such obligations, You may act only
+ License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability\.(:?
-
- END OF TERMS AND CONDITIONS)?(:?
-
- APPENDIX: How to apply the Apache License to your work\.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "\[\]"
- replaced with your own identifying information\. \(Don't include
- the brackets!\) The text should be enclosed in the appropriate
- comment syntax for the file format\. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third\-party archives\.)?(:?
+ of your accepting any such warranty or additional liability.
- Copyright .*)?(:?
+ END OF TERMS AND CONDITIONS
- Licensed under the Apache License, Version 2\.0 \(the "License"\);
- you may not use this file except in compliance with the License\.
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
You may obtain a copy of the License at
- http://www\.apache\.org/licenses/LICENSE\-2\.0
+ http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied\.
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
- limitations under the License\.)?$
+ limitations under the License.
@@ -0,0 +1,13 @@
+Copyright 2011 Someone
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
@@ -0,0 +1,187 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ Copyright (c) 2017, The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
@@ -0,0 +1,20 @@
+Copyright (c) 2024 John Doe
+Some Organization
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this list
+of conditions and the following disclaimer.
+
+THIS SOFTWARE IS PROVIDED BY [Name of Organization] “AS IS” AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
+SHALL [Name of Organisation] BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
+OF SUCH DAMAGE.
@@ -0,0 +1,26 @@
+Copyright (c) 2024
+
+John Doe (john.doe@gmail.com)
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation and/or
+other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@@ -0,0 +1,29 @@
+BSD 3-Clause License
+
+Copyright (c) 2025, John Doe
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation and/or
+other materials provided with the distribution.
+
+* Neither the name of the copyright holder nor the names of its contributors
+may be used to endorse or promote products derived from this software without
+specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@@ -0,0 +1,27 @@
+// Copyright 2024 (this is copy modified from chromium)
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of da company nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@@ -0,0 +1,31 @@
+The Glasgow Haskell Compiler License
+
+Copyright 2002, The University Court of the University of Glasgow.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+- Redistributions of source code must retain the above copyright notice,
+this list of conditions and the following disclaimer.
+
+- Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation
+and/or other materials provided with the distribution.
+
+- Neither name of the University nor the names of its contributors may be
+used to endorse or promote products derived from this software without
+specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY COURT OF THE UNIVERSITY OF
+GLASGOW AND THE CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+UNIVERSITY COURT OF THE UNIVERSITY OF GLASGOW OR THE CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGE.
@@ -0,0 +1,30 @@
+Copyright (c) 2019 Someone
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+ * Neither the name of Someone nor the names of other
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@@ -0,0 +1,27 @@
+Copyright (c) 2009-2011, Mozilla Foundation and contributors
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+* Neither the names of the Mozilla Foundation nor the names of project
+ contributors may be used to endorse or promote products derived from this
+ software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@@ -0,0 +1,15 @@
+ISC License
+
+Copyright (c) 2024, John Doe
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
@@ -0,0 +1,26 @@
+Copyright (c) 2006-2009 Someone
+Copyright (c) 2009-2013 Some organization
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
@@ -0,0 +1,22 @@
+(The MIT License)
+
+Copyright (c) someone
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+'Software'), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
@@ -0,0 +1,21 @@
+ MIT License
+
+ Copyright (c) Someone.
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE
@@ -0,0 +1,35 @@
+Copyright (c) 2025, John Doe
+
+The Universal Permissive License (UPL), Version 1.0
+
+Subject to the condition set forth below, permission is hereby granted to any person
+obtaining a copy of this software, associated documentation and/or data (collectively
+the "Software"), free of charge and under any and all copyright rights in the
+Software, and any and all patent rights owned or freely licensable by each licensor
+hereunder covering either (i) the unmodified Software as contributed to or provided
+by such licensor, or (ii) the Larger Works (as defined below), to deal in both
+
+(a) the Software, and
+
+(b) any piece of software and/or hardware listed in the lrgrwrks.txt file if one is
+ included with the Software (each a "Larger Work" to which the Software is
+ contributed by such licensors),
+
+without restriction, including without limitation the rights to copy, create
+derivative works of, display, perform, and distribute the Software and make, use,
+sell, offer for sale, import, export, have made, and have sold the Software and the
+Larger Work(s), and to sublicense the foregoing rights on either these or other
+terms.
+
+This license is subject to the following condition:
+
+The above copyright notice and either this complete permission notice or at a minimum
+a reference to the UPL must be included in all copies or substantial portions of the
+Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
+INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
+CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
+OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
@@ -0,0 +1,19 @@
+Copyright (c) 2021 Someone
+
+This software is provided 'as-is', without any express or implied warranty. In
+no event will the authors be held liable for any damages arising from the use of
+this software.
+
+Permission is granted to anyone to use this software for any purpose, including
+commercial applications, and to alter it and redistribute it freely, subject to
+the following restrictions:
+
+ 1. The origin of this software must not be misrepresented; you must not claim
+ that you wrote the original software. If you use this software in a product,
+ an acknowledgment in the product documentation would be appreciated but is
+ not required.
+
+ 2. Altered source versions must be plainly marked as such, and must not be
+ misrepresented as being the original software.
+
+ 3. This notice may not be removed or altered from any source distribution.
@@ -0,0 +1,11 @@
+-- 0..512
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED “AS IS” AND THE AUTHOR DISCLAIMS ALL
+WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES
+OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE
+FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY
+DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
+AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
@@ -0,0 +1,109 @@
+-- 0..512
+-- 0..0 optional:
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http
+-- 0..1 optional:
+://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+-- 0..5
+Apache License
+
+Version 2.0, January 2004
+
+http
+-- 0..1
+://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
+
+"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
+
+"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
+
+"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
+
+"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
+
+"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
+
+"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
+
+"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
+
+"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
+
+-- 1..5
+You must give any other recipients of the Work or Derivative Works a copy of this License; and
+
+-- 1..5
+You must cause any modified files to carry prominent notices stating that You changed the files; and
+
+-- 1..5
+You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
+
+-- 1..5
@@ -0,0 +1,14 @@
+-- 0..512
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http
+-- 0..1
+://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
@@ -0,0 +1,32 @@
+-- 0..512
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+-- 1..5
+Redistributions of source code must retain the above copyright notice, this
+list of conditions and the following disclaimer.
+
+-- 1..5 optional:
+Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation and/or
+other materials provided with the distribution.
+
+-- 1..128 optional:
+may be used to endorse or promote products derived from this software without
+specific prior written permission.
+
+-- 1..5
+THIS SOFTWARE IS PROVIDED
+-- 1..128
+“AS IS” AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL
+-- 1..128
+BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@@ -1,15 +1,12 @@
-^.*ISC License.*
-
-Copyright.*
-
+-- 0..512
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies\.
+copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS\. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE\.$
+OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
@@ -1,21 +1,18 @@
-^.*MIT License.*
-
-Copyright.*
-
+-- 0..512
Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files \(the "Software"\), to deal
+of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software\.
+copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT\. IN NO EVENT SHALL THE
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE\.$
+SOFTWARE.
@@ -1,35 +1,32 @@
-^Copyright.*
-
-The Universal Permissive License.*
-
+-- 0..512
Subject to the condition set forth below, permission is hereby granted to any person
-obtaining a copy of this software, associated documentation and/or data \(collectively
-the "Software"\), free of charge and under any and all copyright rights in the
+obtaining a copy of this software, associated documentation and/or data (collectively
+the "Software"), free of charge and under any and all copyright rights in the
Software, and any and all patent rights owned or freely licensable by each licensor
-hereunder covering either \(i\) the unmodified Software as contributed to or provided
-by such licensor, or \(ii\) the Larger Works \(as defined below\), to deal in both
+hereunder covering either (i) the unmodified Software as contributed to or provided
+by such licensor, or (ii) the Larger Works (as defined below), to deal in both
-\(a\) the Software, and
+(a) the Software, and
-\(b\) any piece of software and/or hardware listed in the lrgrwrks\.txt file if one is
- included with the Software \(each a "Larger Work" to which the Software is
- contributed by such licensors\),
+(b) any piece of software and/or hardware listed in the lrgrwrks.txt file if one is
+ included with the Software (each a "Larger Work" to which the Software is
+ contributed by such licensors),
without restriction, including without limitation the rights to copy, create
derivative works of, display, perform, and distribute the Software and make, use,
sell, offer for sale, import, export, have made, and have sold the Software and the
-Larger Work\(s\), and to sublicense the foregoing rights on either these or other
-terms\.
+Larger Work(s), and to sublicense the foregoing rights on either these or other
+terms.
This license is subject to the following condition:
The above copyright notice and either this complete permission notice or at a minimum
a reference to the UPL must be included in all copies or substantial portions of the
-Software\.
+Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT\. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
-OR THE USE OR OTHER DEALINGS IN THE SOFTWARE\.$
+OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
@@ -0,0 +1,21 @@
+-- 0..512
+This software is provided 'as-is', without any express or implied warranty. In
+no event will the authors be held liable for any damages arising from the use of
+this software.
+
+Permission is granted to anyone to use this software for any purpose, including
+commercial applications, and to alter it and redistribute it freely, subject to
+the following restrictions:
+
+-- 1..5
+The origin of this software must not be misrepresented; you must not claim
+that you wrote the original software. If you use this software in a product,
+an acknowledgment in the product documentation would be appreciated but is
+not required.
+
+-- 1..5
+Altered source versions must be plainly marked as such, and must not be
+misrepresented as being the original software.
+
+-- 1..5
+This notice may not be removed or altered from any source distribution.
@@ -86,7 +86,7 @@ fn feature_gate_predict_edits_actions(cx: &mut App) {
if is_ai_disabled {
filter.hide_action_types(&zeta_all_action_types);
} else if has_feature_flag {
- filter.show_action_types(rate_completion_action_types.iter());
+ filter.show_action_types(&rate_completion_action_types);
} else {
filter.hide_action_types(&rate_completion_action_types);
}
@@ -98,7 +98,7 @@ fn feature_gate_predict_edits_actions(cx: &mut App) {
if !DisableAiSettings::get_global(cx).disable_ai {
if is_enabled {
CommandPaletteFilter::update_global(cx, |filter, _cx| {
- filter.show_action_types(rate_completion_action_types.iter());
+ filter.show_action_types(&rate_completion_action_types);
});
} else {
CommandPaletteFilter::update_global(cx, |filter, _cx| {
@@ -1,6 +1,6 @@
use crate::{
CURSOR_MARKER, EDITABLE_REGION_END_MARKER, EDITABLE_REGION_START_MARKER, START_OF_FILE_MARKER,
- tokens_for_bytes,
+ guess_token_count,
};
use language::{BufferSnapshot, Point};
use std::{fmt::Write, ops::Range};
@@ -22,7 +22,7 @@ pub fn excerpt_for_cursor_position(
let mut remaining_edit_tokens = editable_region_token_limit;
while let Some(parent) = snapshot.syntax_ancestor(scope_range.clone()) {
- let parent_tokens = tokens_for_bytes(parent.byte_range().len());
+ let parent_tokens = guess_token_count(parent.byte_range().len());
let parent_point_range = Point::new(
parent.start_position().row as u32,
parent.start_position().column as u32,
@@ -90,7 +90,7 @@ fn expand_range(
range: Range<Point>,
mut remaining_tokens: usize,
) -> Range<Point> {
- let mut expanded_range = range.clone();
+ let mut expanded_range = range;
expanded_range.start.column = 0;
expanded_range.end.column = snapshot.line_len(expanded_range.end.row);
loop {
@@ -99,7 +99,7 @@ fn expand_range(
if remaining_tokens > 0 && expanded_range.start.row > 0 {
expanded_range.start.row -= 1;
let line_tokens =
- tokens_for_bytes(snapshot.line_len(expanded_range.start.row) as usize);
+ guess_token_count(snapshot.line_len(expanded_range.start.row) as usize);
remaining_tokens = remaining_tokens.saturating_sub(line_tokens);
expanded = true;
}
@@ -107,7 +107,7 @@ fn expand_range(
if remaining_tokens > 0 && expanded_range.end.row < snapshot.max_point().row {
expanded_range.end.row += 1;
expanded_range.end.column = snapshot.line_len(expanded_range.end.row);
- let line_tokens = tokens_for_bytes(expanded_range.end.column as usize);
+ let line_tokens = guess_token_count(expanded_range.end.column as usize);
remaining_tokens = remaining_tokens.saturating_sub(line_tokens);
expanded = true;
}
@@ -149,7 +149,7 @@ mod tests {
let mut rng = rand::thread_rng();
let mut numbers = Vec::new();
for _ in 0..5 {
- numbers.push(rng.gen_range(1..101));
+ numbers.push(rng.random_range(1..101));
}
numbers
}
@@ -208,7 +208,7 @@ mod tests {
<|editable_region_end|>
let mut numbers = Vec::new();
for _ in 0..5 {
- numbers.push(rng.gen_range(1..101));
+ numbers.push(rng.random_range(1..101));
```"#}
);
}
@@ -1,24 +1,35 @@
use std::{
collections::BTreeSet,
+ fmt::{Display, Formatter},
+ ops::Range,
path::{Path, PathBuf},
sync::{Arc, LazyLock},
};
+use anyhow::{Result, anyhow};
use fs::Fs;
use futures::StreamExt as _;
use gpui::{App, AppContext as _, Entity, Subscription, Task};
+use itertools::Itertools;
use postage::watch;
use project::Worktree;
-use regex::Regex;
-use util::ResultExt as _;
+use strum::VariantArray;
+use util::{ResultExt as _, maybe};
use worktree::ChildEntriesOptions;
/// Matches the most common license locations, with US and UK English spelling.
static LICENSE_FILE_NAME_REGEX: LazyLock<regex::bytes::Regex> = LazyLock::new(|| {
regex::bytes::RegexBuilder::new(
"^ \
- (?: license | licence) \
- (?: [\\-._] (?: apache | isc | mit | upl))? \
+ (?: license | licence)? \
+ (?: [\\-._]? \
+ (?: apache (?: [\\-._] (?: 2.0 | 2 ))? | \
+ 0? bsd (?: [\\-._] [0123])? (?: [\\-._] clause)? | \
+ isc | \
+ mit | \
+ upl | \
+ zlib))? \
+ (?: [\\-._]? (?: license | licence))? \
(?: \\.txt | \\.md)? \
$",
)
@@ -28,40 +39,199 @@ static LICENSE_FILE_NAME_REGEX: LazyLock<regex::bytes::Regex> = LazyLock::new(||
.unwrap()
});
-fn is_license_eligible_for_data_collection(license: &str) -> bool {
- static LICENSE_REGEXES: LazyLock<Vec<Regex>> = LazyLock::new(|| {
- [
- include_str!("license_detection/apache.regex"),
- include_str!("license_detection/isc.regex"),
- include_str!("license_detection/mit.regex"),
- include_str!("license_detection/upl.regex"),
- ]
- .into_iter()
- .map(|pattern| Regex::new(&canonicalize_license_text(pattern)).unwrap())
- .collect()
- });
-
- let license = canonicalize_license_text(license);
- LICENSE_REGEXES.iter().any(|regex| regex.is_match(&license))
+#[derive(Debug, Clone, Copy, Eq, Ord, PartialOrd, PartialEq, VariantArray)]
+pub enum OpenSourceLicense {
+ Apache2_0,
+ BSDZero,
+ BSD,
+ ISC,
+ MIT,
+ UPL1_0,
+ Zlib,
}
-/// Canonicalizes the whitespace of license text and license regexes.
+impl Display for OpenSourceLicense {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}", self.spdx_identifier())
+ }
+}
+
+impl OpenSourceLicense {
+ /// These are SPDX identifiers for the licenses, except for BSD, where the variants are not
+ /// distinguished.
+ pub fn spdx_identifier(&self) -> &'static str {
+ match self {
+ OpenSourceLicense::Apache2_0 => "apache-2.0",
+ OpenSourceLicense::BSDZero => "0bsd",
+ OpenSourceLicense::BSD => "bsd",
+ OpenSourceLicense::ISC => "isc",
+ OpenSourceLicense::MIT => "mit",
+ OpenSourceLicense::UPL1_0 => "upl-1.0",
+ OpenSourceLicense::Zlib => "zlib",
+ }
+ }
+
+ pub fn patterns(&self) -> &'static [&'static str] {
+ match self {
+ OpenSourceLicense::Apache2_0 => &[
+ include_str!("../license_patterns/apache-2.0-pattern"),
+ include_str!("../license_patterns/apache-2.0-reference-pattern"),
+ ],
+ OpenSourceLicense::BSDZero => &[include_str!("../license_patterns/0bsd-pattern")],
+ OpenSourceLicense::BSD => &[include_str!("../license_patterns/bsd-pattern")],
+ OpenSourceLicense::ISC => &[include_str!("../license_patterns/isc-pattern")],
+ OpenSourceLicense::MIT => &[include_str!("../license_patterns/mit-pattern")],
+ OpenSourceLicense::UPL1_0 => &[include_str!("../license_patterns/upl-1.0-pattern")],
+ OpenSourceLicense::Zlib => &[include_str!("../license_patterns/zlib-pattern")],
+ }
+ }
+}
+
+// TODO: Consider using databake or similar to not parse at runtime.
+static LICENSE_PATTERNS: LazyLock<LicensePatterns> = LazyLock::new(|| {
+ let mut approximate_max_length = 0;
+ let mut patterns = Vec::new();
+ for license in OpenSourceLicense::VARIANTS {
+ for pattern in license.patterns() {
+ let (pattern, length) = parse_pattern(pattern).unwrap();
+ patterns.push((*license, pattern));
+ approximate_max_length = approximate_max_length.max(length);
+ }
+ }
+ LicensePatterns {
+ patterns,
+ approximate_max_length,
+ }
+});
+
+fn detect_license(text: &str) -> Option<OpenSourceLicense> {
+ let text = canonicalize_license_text(text);
+ for (license, pattern) in LICENSE_PATTERNS.patterns.iter() {
+ log::trace!("Checking if license is {}", license);
+ if check_pattern(&pattern, &text) {
+ return Some(*license);
+ }
+ }
+
+ None
+}
+
+struct LicensePatterns {
+ patterns: Vec<(OpenSourceLicense, Vec<PatternPart>)>,
+ approximate_max_length: usize,
+}
+
+#[derive(Debug, Clone, Default, PartialEq, Eq)]
+struct PatternPart {
+ /// Indicates that matching `text` is optional. Skipping `match_any_chars` is conditional on
+ /// matching `text`.
+ optional: bool,
+ /// Indicates the number of characters that can be skipped before matching `text`.
+ match_any_chars: Range<usize>,
+ /// The text to match, may be empty.
+ text: String,
+}
+
+/// Lines that start with "-- " begin a `PatternPart`. `-- 1..10` specifies `match_any_chars:
+/// 1..10`. `-- 1..10 optional:` additionally specifies `optional: true`. It's a parse error for a
+/// line to start with `--` without matching this format.
+///
+/// Text that does not have `--` prefixes participate in the `text` field and are canonicalized by
+/// lowercasing, replacing all runs of whitespace with a single space, and otherwise only keeping
+/// ascii alphanumeric characters.
+fn parse_pattern(pattern_source: &str) -> Result<(Vec<PatternPart>, usize)> {
+ let mut pattern = Vec::new();
+ let mut part = PatternPart::default();
+ let mut approximate_max_length = 0;
+ for line in pattern_source.lines() {
+ if let Some(directive) = line.trim().strip_prefix("--") {
+ if part != PatternPart::default() {
+ pattern.push(part);
+ part = PatternPart::default();
+ }
+ let valid = maybe!({
+ let directive_chunks = directive.split_whitespace().collect::<Vec<_>>();
+ if !(1..=2).contains(&directive_chunks.len()) {
+ return None;
+ }
+ if directive_chunks.len() == 2 {
+ part.optional = true;
+ }
+ let range_chunks = directive_chunks[0].split("..").collect::<Vec<_>>();
+ if range_chunks.len() != 2 {
+ return None;
+ }
+ part.match_any_chars.start = range_chunks[0].parse::<usize>().ok()?;
+ part.match_any_chars.end = range_chunks[1].parse::<usize>().ok()?;
+ if part.match_any_chars.start > part.match_any_chars.end {
+ return None;
+ }
+ approximate_max_length += part.match_any_chars.end;
+ Some(())
+ });
+ if valid.is_none() {
+ return Err(anyhow!("Invalid pattern directive: {}", line));
+ }
+ continue;
+ }
+ approximate_max_length += line.len() + 1;
+ let line = canonicalize_license_text(line);
+ if line.is_empty() {
+ continue;
+ }
+ if !part.text.is_empty() {
+ part.text.push(' ');
+ }
+ part.text.push_str(&line);
+ }
+ if part != PatternPart::default() {
+ pattern.push(part);
+ }
+ Ok((pattern, approximate_max_length))
+}
+
+/// Checks a pattern against text by iterating over the pattern parts in reverse order, and checking
+/// matches with the end of a prefix of the input. Assumes that `canonicalize_license_text` has
+/// already been applied to the input.
+fn check_pattern(pattern: &[PatternPart], input: &str) -> bool {
+ let mut input_ix = input.len();
+ let mut match_any_chars = 0..0;
+ for part in pattern.iter().rev() {
+ if part.text.is_empty() {
+ match_any_chars.start += part.match_any_chars.start;
+ match_any_chars.end += part.match_any_chars.end;
+ continue;
+ }
+ let search_range_start = input_ix.saturating_sub(match_any_chars.end + part.text.len());
+ let search_range_end = input_ix.saturating_sub(match_any_chars.start);
+ let found_ix = &input[search_range_start..search_range_end].rfind(&part.text);
+ if let Some(found_ix) = found_ix {
+ input_ix = search_range_start + found_ix;
+ match_any_chars = part.match_any_chars.clone();
+ } else if !part.optional {
+ log::trace!(
+ "Failed to match pattern `...{}` against input `...{}`",
+ &part.text[part.text.len().saturating_sub(128)..],
+ &input[input_ix.saturating_sub(128)..]
+ );
+ return false;
+ }
+ }
+ match_any_chars.contains(&input_ix)
+}
+
+/// Canonicalizes license text by removing all non-alphanumeric characters, lowercasing, and turning
+/// runs of whitespace into a single space. Unicode alphanumeric characters are intentionally
+/// preserved since these should cause license mismatch when not within a portion of the license
+/// where arbitrary text is allowed.
fn canonicalize_license_text(license: &str) -> String {
- static PARAGRAPH_SEPARATOR_REGEX: LazyLock<Regex> =
- LazyLock::new(|| Regex::new(r"\s*\n\s*\n\s*").unwrap());
-
- PARAGRAPH_SEPARATOR_REGEX
- .split(license)
- .filter(|paragraph| !paragraph.trim().is_empty())
- .map(|paragraph| {
- paragraph
- .trim()
- .split_whitespace()
- .collect::<Vec<_>>()
- .join(" ")
- })
- .collect::<Vec<_>>()
- .join("\n\n")
+ license
+ .chars()
+ .filter(|c| c.is_ascii_whitespace() || c.is_alphanumeric())
+ .map(|c| c.to_ascii_lowercase())
+ .collect::<String>()
+ .split_ascii_whitespace()
+ .join(" ")
}
pub enum LicenseDetectionWatcher {
@@ -143,27 +313,32 @@ impl LicenseDetectionWatcher {
}
async fn is_path_eligible(fs: &Arc<dyn Fs>, abs_path: PathBuf) -> Option<bool> {
- log::info!("checking if `{abs_path:?}` is an open source license");
- // Resolve symlinks so that the file size from metadata is correct.
+ log::debug!("checking if `{abs_path:?}` is an open source license");
+ // resolve symlinks so that the file size from metadata is correct
let Some(abs_path) = fs.canonicalize(&abs_path).await.ok() else {
- log::info!(
+ log::debug!(
"`{abs_path:?}` license file probably deleted (error canonicalizing the path)"
);
return None;
};
let metadata = fs.metadata(&abs_path).await.log_err()??;
- // If the license file is >32kb it's unlikely to legitimately match any eligible license.
- if metadata.len > 32768 {
+ if metadata.len > LICENSE_PATTERNS.approximate_max_length as u64 {
+ log::debug!(
+ "`{abs_path:?}` license file was skipped \
+ because its size of {} bytes was larger than the max size of {} bytes",
+ metadata.len,
+ LICENSE_PATTERNS.approximate_max_length
+ );
return None;
}
let text = fs.load(&abs_path).await.log_err()?;
- let is_eligible = is_license_eligible_for_data_collection(&text);
+ let is_eligible = detect_license(&text).is_some();
if is_eligible {
- log::info!(
+ log::debug!(
"`{abs_path:?}` matches a license that is eligible for data collection (if enabled)"
);
} else {
- log::info!(
+ log::debug!(
"`{abs_path:?}` does not match a license that is eligible for data collection"
);
}
@@ -183,238 +358,224 @@ impl LicenseDetectionWatcher {
#[cfg(test)]
mod tests {
-
use fs::FakeFs;
use gpui::TestAppContext;
use serde_json::json;
use settings::{Settings as _, SettingsStore};
- use unindent::unindent;
use worktree::WorktreeSettings;
use super::*;
- const MIT_LICENSE: &str = include_str!("license_detection/mit-text");
- const APACHE_LICENSE: &str = include_str!("license_detection/apache-text");
+ const APACHE_2_0_TXT: &str = include_str!("../license_examples/apache-2.0-ex0.txt");
+ const ISC_TXT: &str = include_str!("../license_examples/isc.txt");
+ const MIT_TXT: &str = include_str!("../license_examples/mit-ex0.txt");
+ const UPL_1_0_TXT: &str = include_str!("../license_examples/upl-1.0.txt");
+ const BSD_0_TXT: &str = include_str!("../license_examples/0bsd.txt");
- #[test]
- fn test_mit_positive_detection() {
- assert!(is_license_eligible_for_data_collection(MIT_LICENSE));
+ #[track_caller]
+ fn assert_matches_license(text: &str, license: OpenSourceLicense) {
+ assert_eq!(detect_license(text), Some(license));
+ assert!(text.len() < LICENSE_PATTERNS.approximate_max_length);
}
+ /*
+ // Uncomment this and run with `cargo test -p zeta -- --no-capture &> licenses-output` to
+ // traverse your entire home directory and run license detection on every file that has a
+ // license-like name.
#[test]
- fn test_mit_negative_detection() {
- let example_license = format!(
- r#"{MIT_LICENSE}
-
- This project is dual licensed under the MIT License and the Apache License, Version 2.0."#
+ fn test_check_all_licenses_in_home_dir() {
+ let mut detected = Vec::new();
+ let mut unrecognized = Vec::new();
+ let mut walked_entries = 0;
+ let homedir = std::env::home_dir().unwrap();
+ for entry in walkdir::WalkDir::new(&homedir) {
+ walked_entries += 1;
+ if walked_entries % 10000 == 0 {
+ println!(
+ "So far visited {} files in {}",
+ walked_entries,
+ homedir.display()
+ );
+ }
+ let Ok(entry) = entry else {
+ continue;
+ };
+ if !LICENSE_FILE_NAME_REGEX.is_match(entry.file_name().as_encoded_bytes()) {
+ continue;
+ }
+ let Ok(contents) = std::fs::read_to_string(entry.path()) else {
+ continue;
+ };
+ let path_string = entry.path().to_string_lossy().to_string();
+ let license = detect_license(&contents);
+ match license {
+ Some(license) => detected.push((license, path_string)),
+ None => unrecognized.push(path_string),
+ }
+ }
+ println!("\nDetected licenses:\n");
+ detected.sort();
+ for (license, path) in &detected {
+ println!("{}: {}", license.spdx_identifier(), path);
+ }
+ println!("\nUnrecognized licenses:\n");
+ for path in &unrecognized {
+ println!("{}", path);
+ }
+ panic!(
+ "{} licenses detected, {} unrecognized",
+ detected.len(),
+ unrecognized.len()
);
- assert!(!is_license_eligible_for_data_collection(&example_license));
+ println!("This line has a warning to make sure this test is always commented out");
}
+ */
#[test]
- fn test_isc_positive_detection() {
- let example_license = unindent(
- r#"
- ISC License
-
- Copyright (c) 2024, John Doe
-
- Permission to use, copy, modify, and/or distribute this software for any
- purpose with or without fee is hereby granted, provided that the above
- copyright notice and this permission notice appear in all copies.
-
- THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
- WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
- MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
- ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
- WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
- ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
- OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
- "#
- .trim(),
+ fn test_apache_positive_detection() {
+ assert_matches_license(APACHE_2_0_TXT, OpenSourceLicense::Apache2_0);
+ assert_matches_license(
+ include_str!("../license_examples/apache-2.0-ex1.txt"),
+ OpenSourceLicense::Apache2_0,
+ );
+ assert_matches_license(
+ include_str!("../license_examples/apache-2.0-ex2.txt"),
+ OpenSourceLicense::Apache2_0,
+ );
+ assert_matches_license(
+ include_str!("../license_examples/apache-2.0-ex3.txt"),
+ OpenSourceLicense::Apache2_0,
+ );
+ assert_matches_license(
+ include_str!("../license_examples/apache-2.0-ex4.txt"),
+ OpenSourceLicense::Apache2_0,
+ );
+ assert_matches_license(
+ include_str!("../../../LICENSE-APACHE"),
+ OpenSourceLicense::Apache2_0,
);
-
- assert!(is_license_eligible_for_data_collection(&example_license));
}
#[test]
- fn test_isc_negative_detection() {
- let example_license = unindent(
- r#"
- ISC License
-
- Copyright (c) 2024, John Doe
-
- Permission to use, copy, modify, and/or distribute this software for any
- purpose with or without fee is hereby granted, provided that the above
- copyright notice and this permission notice appear in all copies.
-
- THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
- WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
- MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
- ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
- WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
- ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
- OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
- This project is dual licensed under the ISC License and the MIT License.
- "#
- .trim(),
+ fn test_apache_negative_detection() {
+ assert_eq!(
+ detect_license(&format!(
+ "{APACHE_2_0_TXT}\n\nThe terms in this license are void if P=NP."
+ )),
+ None
);
-
- assert!(!is_license_eligible_for_data_collection(&example_license));
}
#[test]
- fn test_upl_positive_detection() {
- let example_license = unindent(
- r#"
- Copyright (c) 2025, John Doe
-
- The Universal Permissive License (UPL), Version 1.0
-
- Subject to the condition set forth below, permission is hereby granted to any person
- obtaining a copy of this software, associated documentation and/or data (collectively
- the "Software"), free of charge and under any and all copyright rights in the
- Software, and any and all patent rights owned or freely licensable by each licensor
- hereunder covering either (i) the unmodified Software as contributed to or provided
- by such licensor, or (ii) the Larger Works (as defined below), to deal in both
-
- (a) the Software, and
-
- (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if one is
- included with the Software (each a "Larger Work" to which the Software is
- contributed by such licensors),
-
- without restriction, including without limitation the rights to copy, create
- derivative works of, display, perform, and distribute the Software and make, use,
- sell, offer for sale, import, export, have made, and have sold the Software and the
- Larger Work(s), and to sublicense the foregoing rights on either these or other
- terms.
-
- This license is subject to the following condition:
-
- The above copyright notice and either this complete permission notice or at a minimum
- a reference to the UPL must be included in all copies or substantial portions of the
- Software.
-
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
- INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
- PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
- CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
- OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
- "#
- .trim(),
+ fn test_bsd_1_clause_positive_detection() {
+ assert_matches_license(
+ include_str!("../license_examples/bsd-1-clause.txt"),
+ OpenSourceLicense::BSD,
);
-
- assert!(is_license_eligible_for_data_collection(&example_license));
}
#[test]
- fn test_upl_negative_detection() {
- let example_license = unindent(
- r#"
- UPL License
-
- Copyright (c) 2024, John Doe
-
- The Universal Permissive License (UPL), Version 1.0
-
- Subject to the condition set forth below, permission is hereby granted to any person
- obtaining a copy of this software, associated documentation and/or data (collectively
- the "Software"), free of charge and under any and all copyright rights in the
- Software, and any and all patent rights owned or freely licensable by each licensor
- hereunder covering either (i) the unmodified Software as contributed to or provided
- by such licensor, or (ii) the Larger Works (as defined below), to deal in both
-
- (a) the Software, and
-
- (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if one is
- included with the Software (each a "Larger Work" to which the Software is
- contributed by such licensors),
+ fn test_bsd_2_clause_positive_detection() {
+ assert_matches_license(
+ include_str!("../license_examples/bsd-2-clause-ex0.txt"),
+ OpenSourceLicense::BSD,
+ );
+ }
- without restriction, including without limitation the rights to copy, create
- derivative works of, display, perform, and distribute the Software and make, use,
- sell, offer for sale, import, export, have made, and have sold the Software and the
- Larger Work(s), and to sublicense the foregoing rights on either these or other
- terms.
+ #[test]
+ fn test_bsd_3_clause_positive_detection() {
+ assert_matches_license(
+ include_str!("../license_examples/bsd-3-clause-ex0.txt"),
+ OpenSourceLicense::BSD,
+ );
+ assert_matches_license(
+ include_str!("../license_examples/bsd-3-clause-ex1.txt"),
+ OpenSourceLicense::BSD,
+ );
+ assert_matches_license(
+ include_str!("../license_examples/bsd-3-clause-ex2.txt"),
+ OpenSourceLicense::BSD,
+ );
+ assert_matches_license(
+ include_str!("../license_examples/bsd-3-clause-ex3.txt"),
+ OpenSourceLicense::BSD,
+ );
+ assert_matches_license(
+ include_str!("../license_examples/bsd-3-clause-ex4.txt"),
+ OpenSourceLicense::BSD,
+ );
+ }
- This license is subject to the following condition:
+ #[test]
+ fn test_bsd_0_positive_detection() {
+ assert_matches_license(BSD_0_TXT, OpenSourceLicense::BSDZero);
+ }
- The above copyright notice and either this complete permission notice or at a minimum
- a reference to the UPL must be included in all copies or substantial portions of the
- Software.
+ #[test]
+ fn test_isc_positive_detection() {
+ assert_matches_license(ISC_TXT, OpenSourceLicense::ISC);
+ }
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
- INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
- PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
- CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
- OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ #[test]
+ fn test_isc_negative_detection() {
+ let license_text = format!(
+ r#"{ISC_TXT}
- This project is dual licensed under the ISC License and the MIT License.
- "#
- .trim(),
+ This project is dual licensed under the ISC License and the MIT License."#
);
- assert!(!is_license_eligible_for_data_collection(&example_license));
+ assert_eq!(detect_license(&license_text), None);
}
#[test]
- fn test_apache_positive_detection() {
- assert!(is_license_eligible_for_data_collection(APACHE_LICENSE));
-
- let license_with_appendix = format!(
- r#"{APACHE_LICENSE}
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
+ fn test_mit_positive_detection() {
+ assert_matches_license(MIT_TXT, OpenSourceLicense::MIT);
+ assert_matches_license(
+ include_str!("../license_examples/mit-ex1.txt"),
+ OpenSourceLicense::MIT,
+ );
+ assert_matches_license(
+ include_str!("../license_examples/mit-ex2.txt"),
+ OpenSourceLicense::MIT,
+ );
+ assert_matches_license(
+ include_str!("../license_examples/mit-ex3.txt"),
+ OpenSourceLicense::MIT,
+ );
+ }
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
+ #[test]
+ fn test_mit_negative_detection() {
+ let license_text = format!(
+ r#"{MIT_TXT}
- Copyright [yyyy] [name of copyright owner]
+ This project is dual licensed under the MIT License and the Apache License, Version 2.0."#
+ );
+ assert_eq!(detect_license(&license_text), None);
+ }
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
+ #[test]
+ fn test_upl_positive_detection() {
+ assert_matches_license(UPL_1_0_TXT, OpenSourceLicense::UPL1_0);
+ }
- http://www.apache.org/licenses/LICENSE-2.0
+ #[test]
+ fn test_upl_negative_detection() {
+ let license_text = format!(
+ r#"{UPL_1_0_TXT}
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License."#
- );
- assert!(is_license_eligible_for_data_collection(
- &license_with_appendix
- ));
-
- // Sometimes people fill in the appendix with copyright info.
- let license_with_copyright = license_with_appendix.replace(
- "Copyright [yyyy] [name of copyright owner]",
- "Copyright 2025 John Doe",
+ This project is dual licensed under the UPL License and the MIT License."#
);
- assert!(license_with_copyright != license_with_appendix);
- assert!(is_license_eligible_for_data_collection(
- &license_with_copyright
- ));
+
+ assert_eq!(detect_license(&license_text), None);
}
#[test]
- fn test_apache_negative_detection() {
- assert!(!is_license_eligible_for_data_collection(&format!(
- "{APACHE_LICENSE}\n\nThe terms in this license are void if P=NP."
- )));
+ fn test_zlib_positive_detection() {
+ assert_matches_license(
+ include_str!("../license_examples/zlib-ex0.txt"),
+ OpenSourceLicense::Zlib,
+ );
}
#[test]
@@ -439,10 +600,22 @@ mod tests {
assert!(LICENSE_FILE_NAME_REGEX.is_match(b"LICENSE-ISC"));
assert!(LICENSE_FILE_NAME_REGEX.is_match(b"LICENSE-UPL"));
+ // Test with "license" coming after
+ assert!(LICENSE_FILE_NAME_REGEX.is_match(b"APACHE-LICENSE"));
+
+ // Test version numbers
+ assert!(LICENSE_FILE_NAME_REGEX.is_match(b"APACHE-2"));
+ assert!(LICENSE_FILE_NAME_REGEX.is_match(b"APACHE-2.0"));
+ assert!(LICENSE_FILE_NAME_REGEX.is_match(b"BSD-1"));
+ assert!(LICENSE_FILE_NAME_REGEX.is_match(b"BSD-2"));
+ assert!(LICENSE_FILE_NAME_REGEX.is_match(b"BSD-3"));
+ assert!(LICENSE_FILE_NAME_REGEX.is_match(b"BSD-3-CLAUSE"));
+
// Test combinations
assert!(LICENSE_FILE_NAME_REGEX.is_match(b"LICENSE-MIT.txt"));
assert!(LICENSE_FILE_NAME_REGEX.is_match(b"LICENCE.ISC.md"));
assert!(LICENSE_FILE_NAME_REGEX.is_match(b"license_upl"));
+ assert!(LICENSE_FILE_NAME_REGEX.is_match(b"LICENSE.APACHE.2.0"));
// Test case insensitive
assert!(LICENSE_FILE_NAME_REGEX.is_match(b"License"));
@@ -461,82 +634,20 @@ mod tests {
assert!(!LICENSE_FILE_NAME_REGEX.is_match(b"LICENSE.old"));
assert!(!LICENSE_FILE_NAME_REGEX.is_match(b"LICENSE-GPL"));
assert!(!LICENSE_FILE_NAME_REGEX.is_match(b"LICENSEABC"));
- assert!(!LICENSE_FILE_NAME_REGEX.is_match(b""));
}
#[test]
fn test_canonicalize_license_text() {
- // Test basic whitespace normalization
- let input = "Line 1\n Line 2 \n\n\n Line 3 ";
- let expected = "Line 1 Line 2\n\nLine 3";
- assert_eq!(canonicalize_license_text(input), expected);
-
- // Test paragraph separation
- let input = "Paragraph 1\nwith multiple lines\n\n\n\nParagraph 2\nwith more lines";
- let expected = "Paragraph 1 with multiple lines\n\nParagraph 2 with more lines";
- assert_eq!(canonicalize_license_text(input), expected);
-
- // Test empty paragraphs are filtered out
- let input = "\n\n\nParagraph 1\n\n\n \n\n\nParagraph 2\n\n\n";
- let expected = "Paragraph 1\n\nParagraph 2";
- assert_eq!(canonicalize_license_text(input), expected);
-
- // Test single line
- let input = " Single line with spaces ";
- let expected = "Single line with spaces";
- assert_eq!(canonicalize_license_text(input), expected);
-
- // Test multiple consecutive spaces within lines
- let input = "Word1 Word2\n\nWord3 Word4";
- let expected = "Word1 Word2\n\nWord3 Word4";
+ let input = " Paragraph 1\nwith multiple lines\n\n\n\nParagraph 2\nwith more lines\n ";
+ let expected = "paragraph 1 with multiple lines paragraph 2 with more lines";
assert_eq!(canonicalize_license_text(input), expected);
// Test tabs and mixed whitespace
let input = "Word1\t\tWord2\n\n Word3\r\n\r\n\r\nWord4 ";
- let expected = "Word1 Word2\n\nWord3\n\nWord4";
+ let expected = "word1 word2 word3 word4";
assert_eq!(canonicalize_license_text(input), expected);
}
- #[test]
- fn test_license_detection_canonicalizes_whitespace() {
- let mit_with_weird_spacing = unindent(
- r#"
- MIT License
-
-
- Copyright (c) 2024 John Doe
-
-
- Permission is hereby granted, free of charge, to any person obtaining a copy
- of this software and associated documentation files (the "Software"), to deal
- in the Software without restriction, including without limitation the rights
- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- copies of the Software, and to permit persons to whom the Software is
- furnished to do so, subject to the following conditions:
-
-
-
- The above copyright notice and this permission notice shall be included in all
- copies or substantial portions of the Software.
-
-
-
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- SOFTWARE.
- "#
- .trim(),
- );
-
- assert!(is_license_eligible_for_data_collection(
- &mit_with_weird_spacing
- ));
- }
-
fn init_test(cx: &mut TestAppContext) {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
@@ -590,14 +701,14 @@ mod tests {
assert!(matches!(watcher, LicenseDetectionWatcher::Local { .. }));
assert!(!watcher.is_project_open_source());
- fs.write(Path::new("/root/LICENSE-MIT"), MIT_LICENSE.as_bytes())
+ fs.write(Path::new("/root/LICENSE-MIT"), MIT_TXT.as_bytes())
.await
.unwrap();
cx.background_executor.run_until_parked();
assert!(watcher.is_project_open_source());
- fs.write(Path::new("/root/LICENSE-APACHE"), APACHE_LICENSE.as_bytes())
+ fs.write(Path::new("/root/LICENSE-APACHE"), APACHE_2_0_TXT.as_bytes())
.await
.unwrap();
@@ -630,7 +741,7 @@ mod tests {
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
"/root",
- json!({ "main.rs": "fn main() {}", "LICENSE-MIT": MIT_LICENSE }),
+ json!({ "main.rs": "fn main() {}", "LICENSE-MIT": MIT_TXT }),
)
.await;
@@ -14,7 +14,7 @@ use settings::update_settings_file;
use ui::{Vector, VectorName, prelude::*};
use workspace::{ModalView, Workspace};
-/// Introduces user to Zed's Edit Prediction feature and terms of service
+/// Introduces user to Zed's Edit Prediction feature
pub struct ZedPredictModal {
onboarding: Entity<EditPredictionOnboarding>,
focus_handle: FocusHandle,
@@ -86,7 +86,16 @@ impl Focusable for ZedPredictModal {
}
}
-impl ModalView for ZedPredictModal {}
+impl ModalView for ZedPredictModal {
+ fn on_before_dismiss(
+ &mut self,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> workspace::DismissDecision {
+ ZedPredictUpsell::set_dismissed(true, cx);
+ workspace::DismissDecision::Dismiss(true)
+ }
+}
impl Render for ZedPredictModal {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
@@ -24,23 +24,24 @@ use collections::{HashMap, HashSet, VecDeque};
use futures::AsyncReadExt;
use gpui::{
App, AppContext as _, AsyncApp, Context, Entity, EntityId, Global, SemanticVersion,
- Subscription, Task, WeakEntity, actions,
+ SharedString, Subscription, Task, actions,
};
use http_client::{AsyncBody, HttpClient, Method, Request, Response};
use input_excerpt::excerpt_for_cursor_position;
use language::{
- Anchor, Buffer, BufferSnapshot, EditPreview, OffsetRangeExt, ToOffset, ToPoint, text_diff,
+ Anchor, Buffer, BufferSnapshot, EditPreview, File, OffsetRangeExt, ToOffset, ToPoint, text_diff,
};
use language_model::{LlmApiToken, RefreshLlmTokenListener};
use project::{Project, ProjectPath};
use release_channel::AppVersion;
use settings::WorktreeId;
+use std::collections::hash_map;
+use std::mem;
use std::str::FromStr;
use std::{
cmp,
fmt::Write,
future::Future,
- mem,
ops::Range,
path::Path,
rc::Rc,
@@ -51,21 +52,19 @@ use telemetry_events::EditPredictionRating;
use thiserror::Error;
use util::ResultExt;
use uuid::Uuid;
-use workspace::Workspace;
-use workspace::notifications::{ErrorMessagePrompt, NotificationId};
+use workspace::notifications::{ErrorMessagePrompt, NotificationId, show_app_notification};
use worktree::Worktree;
-const CURSOR_MARKER: &'static str = "<|user_cursor_is_here|>";
-const START_OF_FILE_MARKER: &'static str = "<|start_of_file|>";
-const EDITABLE_REGION_START_MARKER: &'static str = "<|editable_region_start|>";
-const EDITABLE_REGION_END_MARKER: &'static str = "<|editable_region_end|>";
+const CURSOR_MARKER: &str = "<|user_cursor_is_here|>";
+const START_OF_FILE_MARKER: &str = "<|start_of_file|>";
+const EDITABLE_REGION_START_MARKER: &str = "<|editable_region_start|>";
+const EDITABLE_REGION_END_MARKER: &str = "<|editable_region_end|>";
const BUFFER_CHANGE_GROUPING_INTERVAL: Duration = Duration::from_secs(1);
const ZED_PREDICT_DATA_COLLECTION_CHOICE: &str = "zed_predict_data_collection_choice";
const MAX_CONTEXT_TOKENS: usize = 150;
const MAX_REWRITE_TOKENS: usize = 350;
const MAX_EVENT_TOKENS: usize = 500;
-const MAX_DIAGNOSTIC_GROUPS: usize = 10;
/// Maximum number of events to track.
const MAX_EVENT_COUNT: usize = 16;
@@ -118,12 +117,8 @@ impl Dismissable for ZedPredictUpsell {
}
}
-pub fn should_show_upsell_modal(user_store: &Entity<UserStore>, cx: &App) -> bool {
- if user_store.read(cx).has_accepted_terms_of_service() {
- !ZedPredictUpsell::dismissed()
- } else {
- true
- }
+pub fn should_show_upsell_modal() -> bool {
+ !ZedPredictUpsell::dismissed()
}
#[derive(Clone)]
@@ -166,7 +161,7 @@ fn interpolate(
) -> Option<Vec<(Range<Anchor>, String)>> {
let mut edits = Vec::new();
- let mut model_edits = current_edits.into_iter().peekable();
+ let mut model_edits = current_edits.iter().peekable();
for user_edit in new_snapshot.edits_since::<usize>(&old_snapshot.version) {
while let Some((model_old_range, _)) = model_edits.peek() {
let model_old_range = model_old_range.to_offset(old_snapshot);
@@ -216,13 +211,11 @@ impl std::fmt::Debug for EditPrediction {
}
pub struct Zeta {
- workspace: Option<WeakEntity<Workspace>>,
+ projects: HashMap<EntityId, ZetaProject>,
client: Arc<Client>,
- events: VecDeque<Event>,
- registered_buffers: HashMap<gpui::EntityId, RegisteredBuffer>,
shown_completions: VecDeque<EditPrediction>,
rated_completions: HashSet<EditPredictionId>,
- data_collection_choice: Entity<DataCollectionChoice>,
+ data_collection_choice: DataCollectionChoice,
llm_token: LlmApiToken,
_llm_token_subscription: Subscription,
/// Whether an update to a newer version of Zed is required to continue using Zeta.
@@ -231,20 +224,24 @@ pub struct Zeta {
license_detection_watchers: HashMap<WorktreeId, Rc<LicenseDetectionWatcher>>,
}
+struct ZetaProject {
+ events: VecDeque<Event>,
+ registered_buffers: HashMap<gpui::EntityId, RegisteredBuffer>,
+}
+
impl Zeta {
pub fn global(cx: &mut App) -> Option<Entity<Self>> {
cx.try_global::<ZetaGlobal>().map(|global| global.0.clone())
}
pub fn register(
- workspace: Option<WeakEntity<Workspace>>,
worktree: Option<Entity<Worktree>>,
client: Arc<Client>,
user_store: Entity<UserStore>,
cx: &mut App,
) -> Entity<Self> {
let this = Self::global(cx).unwrap_or_else(|| {
- let entity = cx.new(|cx| Self::new(workspace, client, user_store, cx));
+ let entity = cx.new(|cx| Self::new(client, user_store, cx));
cx.set_global(ZetaGlobal(entity.clone()));
entity
});
@@ -262,31 +259,23 @@ impl Zeta {
}
pub fn clear_history(&mut self) {
- self.events.clear();
+ for zeta_project in self.projects.values_mut() {
+ zeta_project.events.clear();
+ }
}
pub fn usage(&self, cx: &App) -> Option<EditPredictionUsage> {
self.user_store.read(cx).edit_prediction_usage()
}
- fn new(
- workspace: Option<WeakEntity<Workspace>>,
- client: Arc<Client>,
- user_store: Entity<UserStore>,
- cx: &mut Context<Self>,
- ) -> Self {
+ fn new(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut Context<Self>) -> Self {
let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx);
-
- let data_collection_choice = Self::load_data_collection_choices();
- let data_collection_choice = cx.new(|_| data_collection_choice);
-
+ let data_collection_choice = Self::load_data_collection_choice();
Self {
- workspace,
+ projects: HashMap::default(),
client,
- events: VecDeque::new(),
shown_completions: VecDeque::new(),
rated_completions: HashSet::default(),
- registered_buffers: HashMap::default(),
data_collection_choice,
llm_token: LlmApiToken::default(),
_llm_token_subscription: cx.subscribe(
@@ -307,12 +296,35 @@ impl Zeta {
}
}
- fn push_event(&mut self, event: Event) {
+ fn get_or_init_zeta_project(
+ &mut self,
+ project: &Entity<Project>,
+ cx: &mut Context<Self>,
+ ) -> &mut ZetaProject {
+ let project_id = project.entity_id();
+ match self.projects.entry(project_id) {
+ hash_map::Entry::Occupied(entry) => entry.into_mut(),
+ hash_map::Entry::Vacant(entry) => {
+ cx.observe_release(project, move |this, _, _cx| {
+ this.projects.remove(&project_id);
+ })
+ .detach();
+ entry.insert(ZetaProject {
+ events: VecDeque::with_capacity(MAX_EVENT_COUNT),
+ registered_buffers: HashMap::default(),
+ })
+ }
+ }
+ }
+
+ fn push_event(zeta_project: &mut ZetaProject, event: Event) {
+ let events = &mut zeta_project.events;
+
if let Some(Event::BufferChange {
new_snapshot: last_new_snapshot,
timestamp: last_timestamp,
..
- }) = self.events.back_mut()
+ }) = events.back_mut()
{
// Coalesce edits for the same buffer when they happen one after the other.
let Event::BufferChange {
@@ -331,54 +343,67 @@ impl Zeta {
}
}
- self.events.push_back(event);
- if self.events.len() >= MAX_EVENT_COUNT {
+ if events.len() >= MAX_EVENT_COUNT {
// These are halved instead of popping to improve prompt caching.
- self.events.drain(..MAX_EVENT_COUNT / 2);
+ events.drain(..MAX_EVENT_COUNT / 2);
}
- }
- pub fn register_buffer(&mut self, buffer: &Entity<Buffer>, cx: &mut Context<Self>) {
- let buffer_id = buffer.entity_id();
- let weak_buffer = buffer.downgrade();
-
- if let std::collections::hash_map::Entry::Vacant(entry) =
- self.registered_buffers.entry(buffer_id)
- {
- let snapshot = buffer.read(cx).snapshot();
-
- entry.insert(RegisteredBuffer {
- snapshot,
- _subscriptions: [
- cx.subscribe(buffer, move |this, buffer, event, cx| {
- this.handle_buffer_event(buffer, event, cx);
- }),
- cx.observe_release(buffer, move |this, _buffer, _cx| {
- this.registered_buffers.remove(&weak_buffer.entity_id());
- }),
- ],
- });
- };
+ events.push_back(event);
}
- fn handle_buffer_event(
+ pub fn register_buffer(
&mut self,
- buffer: Entity<Buffer>,
- event: &language::BufferEvent,
+ buffer: &Entity<Buffer>,
+ project: &Entity<Project>,
cx: &mut Context<Self>,
) {
- if let language::BufferEvent::Edited = event {
- self.report_changes_for_buffer(&buffer, cx);
+ let zeta_project = self.get_or_init_zeta_project(project, cx);
+ Self::register_buffer_impl(zeta_project, buffer, project, cx);
+ }
+
+ fn register_buffer_impl<'a>(
+ zeta_project: &'a mut ZetaProject,
+ buffer: &Entity<Buffer>,
+ project: &Entity<Project>,
+ cx: &mut Context<Self>,
+ ) -> &'a mut RegisteredBuffer {
+ let buffer_id = buffer.entity_id();
+ match zeta_project.registered_buffers.entry(buffer_id) {
+ hash_map::Entry::Occupied(entry) => entry.into_mut(),
+ hash_map::Entry::Vacant(entry) => {
+ let snapshot = buffer.read(cx).snapshot();
+ let project_entity_id = project.entity_id();
+ entry.insert(RegisteredBuffer {
+ snapshot,
+ _subscriptions: [
+ cx.subscribe(buffer, {
+ let project = project.downgrade();
+ move |this, buffer, event, cx| {
+ if let language::BufferEvent::Edited = event
+ && let Some(project) = project.upgrade()
+ {
+ this.report_changes_for_buffer(&buffer, &project, cx);
+ }
+ }
+ }),
+ cx.observe_release(buffer, move |this, _buffer, _cx| {
+ let Some(zeta_project) = this.projects.get_mut(&project_entity_id)
+ else {
+ return;
+ };
+ zeta_project.registered_buffers.remove(&buffer_id);
+ }),
+ ],
+ })
+ }
}
}
fn request_completion_impl<F, R>(
&mut self,
- workspace: Option<Entity<Workspace>>,
- project: Option<&Entity<Project>>,
+ project: &Entity<Project>,
buffer: &Entity<Buffer>,
cursor: language::Anchor,
- can_collect_data: bool,
cx: &mut Context<Self>,
perform_predict_edits: F,
) -> Task<Result<Option<EditPrediction>>>
@@ -390,19 +415,27 @@ impl Zeta {
{
let buffer = buffer.clone();
let buffer_snapshotted_at = Instant::now();
- let snapshot = self.report_changes_for_buffer(&buffer, cx);
+ let snapshot = self.report_changes_for_buffer(&buffer, project, cx);
let zeta = cx.entity();
- let events = self.events.clone();
let client = self.client.clone();
let llm_token = self.llm_token.clone();
let app_version = AppVersion::global(cx);
- let git_info = if let (true, Some(project), Some(file)) =
- (can_collect_data, project, snapshot.file())
- {
- git_info_for_file(project, &ProjectPath::from_file(file.as_ref(), cx), cx)
+ let zeta_project = self.get_or_init_zeta_project(project, cx);
+ let mut events = Vec::with_capacity(zeta_project.events.len());
+ events.extend(zeta_project.events.iter().cloned());
+ let events = Arc::new(events);
+
+ let (git_info, can_collect_file) = if let Some(file) = snapshot.file() {
+ let can_collect_file = self.can_collect_file(file, cx);
+ let git_info = if can_collect_file {
+ git_info_for_file(project, &ProjectPath::from_file(file.as_ref(), cx), cx)
+ } else {
+ None
+ };
+ (git_info, can_collect_file)
} else {
- None
+ (None, false)
};
let full_path: Arc<Path> = snapshot
@@ -412,25 +445,35 @@ impl Zeta {
let full_path_str = full_path.to_string_lossy().to_string();
let cursor_point = cursor.to_point(&snapshot);
let cursor_offset = cursor_point.to_offset(&snapshot);
- let make_events_prompt = move || prompt_for_events(&events, MAX_EVENT_TOKENS);
+ let prompt_for_events = {
+ let events = events.clone();
+ move || prompt_for_events_impl(&events, MAX_EVENT_TOKENS)
+ };
let gather_task = gather_context(
- project,
full_path_str,
&snapshot,
cursor_point,
- make_events_prompt,
- can_collect_data,
- git_info,
+ prompt_for_events,
cx,
);
cx.spawn(async move |this, cx| {
let GatherContextOutput {
- body,
+ mut body,
editable_range,
+ included_events_count,
} = gather_task.await?;
let done_gathering_context_at = Instant::now();
+ let included_events = &events[events.len() - included_events_count..events.len()];
+ body.can_collect_data = can_collect_file
+ && this
+ .read_with(cx, |this, cx| this.can_collect_events(included_events, cx))
+ .unwrap_or(false);
+ if body.can_collect_data {
+ body.git_info = git_info;
+ }
+
log::debug!(
"Events:\n{}\nExcerpt:\n{:?}",
body.input_events,
@@ -457,23 +500,20 @@ impl Zeta {
zeta.update_required = true;
});
- if let Some(workspace) = workspace {
- workspace.update(cx, |workspace, cx| {
- workspace.show_notification(
- NotificationId::unique::<ZedUpdateRequiredError>(),
- cx,
- |cx| {
- cx.new(|cx| {
- ErrorMessagePrompt::new(err.to_string(), cx)
- .with_link_button(
- "Update Zed",
- "https://zed.dev/releases",
- )
- })
- },
- );
- });
- }
+ let error_message: SharedString = err.to_string().into();
+ show_app_notification(
+ NotificationId::unique::<ZedUpdateRequiredError>(),
+ cx,
+ move |cx| {
+ cx.new(|cx| {
+ ErrorMessagePrompt::new(error_message.clone(), cx)
+ .with_link_button(
+ "Update Zed",
+ "https://zed.dev/releases",
+ )
+ })
+ },
+ );
})
.ok();
}
@@ -529,196 +569,28 @@ impl Zeta {
})
}
- // Generates several example completions of various states to fill the Zeta completion modal
- #[cfg(any(test, feature = "test-support"))]
- pub fn fill_with_fake_completions(&mut self, cx: &mut Context<Self>) -> Task<()> {
- use language::Point;
-
- let test_buffer_text = indoc::indoc! {r#"a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line
- And maybe a short line
-
- Then a few lines
-
- and then another
- "#};
-
- let project = None;
- let buffer = cx.new(|cx| Buffer::local(test_buffer_text, cx));
- let position = buffer.read(cx).anchor_before(Point::new(1, 0));
-
- let completion_tasks = vec![
- self.fake_completion(
- project,
- &buffer,
- position,
- PredictEditsResponse {
- request_id: Uuid::parse_str("e7861db5-0cea-4761-b1c5-ad083ac53a80").unwrap(),
- output_excerpt: format!("{EDITABLE_REGION_START_MARKER}
-a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line
-[here's an edit]
-And maybe a short line
-Then a few lines
-and then another
-{EDITABLE_REGION_END_MARKER}
- ", ),
- },
- cx,
- ),
- self.fake_completion(
- project,
- &buffer,
- position,
- PredictEditsResponse {
- request_id: Uuid::parse_str("077c556a-2c49-44e2-bbc6-dafc09032a5e").unwrap(),
- output_excerpt: format!(r#"{EDITABLE_REGION_START_MARKER}
-a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line
-And maybe a short line
-[and another edit]
-Then a few lines
-and then another
-{EDITABLE_REGION_END_MARKER}
- "#),
- },
- cx,
- ),
- self.fake_completion(
- project,
- &buffer,
- position,
- PredictEditsResponse {
- request_id: Uuid::parse_str("df8c7b23-3d1d-4f99-a306-1f6264a41277").unwrap(),
- output_excerpt: format!(r#"{EDITABLE_REGION_START_MARKER}
-a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line
-And maybe a short line
-
-Then a few lines
-
-and then another
-{EDITABLE_REGION_END_MARKER}
- "#),
- },
- cx,
- ),
- self.fake_completion(
- project,
- &buffer,
- position,
- PredictEditsResponse {
- request_id: Uuid::parse_str("c743958d-e4d8-44a8-aa5b-eb1e305c5f5c").unwrap(),
- output_excerpt: format!(r#"{EDITABLE_REGION_START_MARKER}
-a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line
-And maybe a short line
-
-Then a few lines
-
-and then another
-{EDITABLE_REGION_END_MARKER}
- "#),
- },
- cx,
- ),
- self.fake_completion(
- project,
- &buffer,
- position,
- PredictEditsResponse {
- request_id: Uuid::parse_str("ff5cd7ab-ad06-4808-986e-d3391e7b8355").unwrap(),
- output_excerpt: format!(r#"{EDITABLE_REGION_START_MARKER}
-a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line
-And maybe a short line
-Then a few lines
-[a third completion]
-and then another
-{EDITABLE_REGION_END_MARKER}
- "#),
- },
- cx,
- ),
- self.fake_completion(
- project,
- &buffer,
- position,
- PredictEditsResponse {
- request_id: Uuid::parse_str("83cafa55-cdba-4b27-8474-1865ea06be94").unwrap(),
- output_excerpt: format!(r#"{EDITABLE_REGION_START_MARKER}
-a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line
-And maybe a short line
-and then another
-[fourth completion example]
-{EDITABLE_REGION_END_MARKER}
- "#),
- },
- cx,
- ),
- self.fake_completion(
- project,
- &buffer,
- position,
- PredictEditsResponse {
- request_id: Uuid::parse_str("d5bd3afd-8723-47c7-bd77-15a3a926867b").unwrap(),
- output_excerpt: format!(r#"{EDITABLE_REGION_START_MARKER}
-a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line
-And maybe a short line
-Then a few lines
-and then another
-[fifth and final completion]
-{EDITABLE_REGION_END_MARKER}
- "#),
- },
- cx,
- ),
- ];
-
- cx.spawn(async move |zeta, cx| {
- for task in completion_tasks {
- task.await.unwrap();
- }
-
- zeta.update(cx, |zeta, _cx| {
- zeta.shown_completions.get_mut(2).unwrap().edits = Arc::new([]);
- zeta.shown_completions.get_mut(3).unwrap().edits = Arc::new([]);
- })
- .ok();
- })
- }
-
#[cfg(any(test, feature = "test-support"))]
pub fn fake_completion(
&mut self,
- project: Option<&Entity<Project>>,
+ project: &Entity<Project>,
buffer: &Entity<Buffer>,
position: language::Anchor,
response: PredictEditsResponse,
cx: &mut Context<Self>,
) -> Task<Result<Option<EditPrediction>>> {
- use std::future::ready;
-
- self.request_completion_impl(None, project, buffer, position, false, cx, |_params| {
- ready(Ok((response, None)))
+ self.request_completion_impl(project, buffer, position, cx, |_params| {
+ std::future::ready(Ok((response, None)))
})
}
pub fn request_completion(
&mut self,
- project: Option<&Entity<Project>>,
+ project: &Entity<Project>,
buffer: &Entity<Buffer>,
position: language::Anchor,
- can_collect_data: bool,
cx: &mut Context<Self>,
) -> Task<Result<Option<EditPrediction>>> {
- let workspace = self
- .workspace
- .as_ref()
- .and_then(|workspace| workspace.upgrade());
- self.request_completion_impl(
- workspace,
- project,
- buffer,
- position,
- can_collect_data,
- cx,
- Self::perform_predict_edits,
- )
+ self.request_completion_impl(project, buffer, position, cx, Self::perform_predict_edits)
}
pub fn perform_predict_edits(
@@ -1065,29 +937,80 @@ and then another
fn report_changes_for_buffer(
&mut self,
buffer: &Entity<Buffer>,
+ project: &Entity<Project>,
cx: &mut Context<Self>,
) -> BufferSnapshot {
- self.register_buffer(buffer, cx);
+ let zeta_project = self.get_or_init_zeta_project(project, cx);
+ let registered_buffer = Self::register_buffer_impl(zeta_project, buffer, project, cx);
- let registered_buffer = self
- .registered_buffers
- .get_mut(&buffer.entity_id())
- .unwrap();
let new_snapshot = buffer.read(cx).snapshot();
-
if new_snapshot.version != registered_buffer.snapshot.version {
let old_snapshot = mem::replace(&mut registered_buffer.snapshot, new_snapshot.clone());
- self.push_event(Event::BufferChange {
- old_snapshot,
- new_snapshot: new_snapshot.clone(),
- timestamp: Instant::now(),
- });
+ Self::push_event(
+ zeta_project,
+ Event::BufferChange {
+ old_snapshot,
+ new_snapshot: new_snapshot.clone(),
+ timestamp: Instant::now(),
+ },
+ );
}
new_snapshot
}
- fn load_data_collection_choices() -> DataCollectionChoice {
+ fn can_collect_file(&self, file: &Arc<dyn File>, cx: &App) -> bool {
+ self.data_collection_choice.is_enabled() && self.is_file_open_source(file, cx)
+ }
+
+ fn can_collect_events(&self, events: &[Event], cx: &App) -> bool {
+ if !self.data_collection_choice.is_enabled() {
+ return false;
+ }
+ let mut last_checked_file = None;
+ for event in events {
+ match event {
+ Event::BufferChange {
+ old_snapshot,
+ new_snapshot,
+ ..
+ } => {
+ if let Some(old_file) = old_snapshot.file()
+ && let Some(new_file) = new_snapshot.file()
+ {
+ if let Some(last_checked_file) = last_checked_file
+ && Arc::ptr_eq(last_checked_file, old_file)
+ && Arc::ptr_eq(last_checked_file, new_file)
+ {
+ continue;
+ }
+ if !self.can_collect_file(old_file, cx) {
+ return false;
+ }
+ if !Arc::ptr_eq(old_file, new_file) && !self.can_collect_file(new_file, cx)
+ {
+ return false;
+ }
+ last_checked_file = Some(new_file);
+ } else {
+ return false;
+ }
+ }
+ }
+ }
+ true
+ }
+
+ fn is_file_open_source(&self, file: &Arc<dyn File>, cx: &App) -> bool {
+ if !file.is_local() || file.is_private() {
+ return false;
+ }
+ self.license_detection_watchers
+ .get(&file.worktree_id(cx))
+ .is_some_and(|watcher| watcher.is_project_open_source())
+ }
+
+ fn load_data_collection_choice() -> DataCollectionChoice {
let choice = KEY_VALUE_STORE
.read_kvp(ZED_PREDICT_DATA_COLLECTION_CHOICE)
.log_err()
@@ -1103,6 +1026,17 @@ and then another
None => DataCollectionChoice::NotAnswered,
}
}
+
+ fn toggle_data_collection_choice(&mut self, cx: &mut Context<Self>) {
+ self.data_collection_choice = self.data_collection_choice.toggle();
+ let new_choice = self.data_collection_choice;
+ db::write_and_log(cx, move || {
+ KEY_VALUE_STORE.write_kvp(
+ ZED_PREDICT_DATA_COLLECTION_CHOICE.into(),
+ new_choice.is_enabled().to_string(),
+ )
+ });
+ }
}
pub struct PerformPredictEditsParams {
@@ -1159,49 +1093,19 @@ fn git_info_for_file(
pub struct GatherContextOutput {
pub body: PredictEditsBody,
pub editable_range: Range<usize>,
+ pub included_events_count: usize,
}
pub fn gather_context(
- project: Option<&Entity<Project>>,
full_path_str: String,
snapshot: &BufferSnapshot,
cursor_point: language::Point,
- make_events_prompt: impl FnOnce() -> String + Send + 'static,
- can_collect_data: bool,
- git_info: Option<PredictEditsGitInfo>,
+ prompt_for_events: impl FnOnce() -> (String, usize) + Send + 'static,
cx: &App,
) -> Task<Result<GatherContextOutput>> {
- let local_lsp_store =
- project.and_then(|project| project.read(cx).lsp_store().read(cx).as_local());
- let diagnostic_groups: Vec<(String, serde_json::Value)> =
- if can_collect_data && let Some(local_lsp_store) = local_lsp_store {
- snapshot
- .diagnostic_groups(None)
- .into_iter()
- .filter_map(|(language_server_id, diagnostic_group)| {
- let language_server =
- local_lsp_store.running_language_server_for_id(language_server_id)?;
- let diagnostic_group = diagnostic_group.resolve::<usize>(snapshot);
- let language_server_name = language_server.name().to_string();
- let serialized = serde_json::to_value(diagnostic_group).unwrap();
- Some((language_server_name, serialized))
- })
- .collect::<Vec<_>>()
- } else {
- Vec::new()
- };
-
cx.background_spawn({
let snapshot = snapshot.clone();
async move {
- let diagnostic_groups = if diagnostic_groups.is_empty()
- || diagnostic_groups.len() >= MAX_DIAGNOSTIC_GROUPS
- {
- None
- } else {
- Some(diagnostic_groups)
- };
-
let input_excerpt = excerpt_for_cursor_position(
cursor_point,
&full_path_str,
@@ -1209,15 +1113,15 @@ pub fn gather_context(
MAX_REWRITE_TOKENS,
MAX_CONTEXT_TOKENS,
);
- let input_events = make_events_prompt();
+ let (input_events, included_events_count) = prompt_for_events();
let editable_range = input_excerpt.editable_range.to_offset(&snapshot);
let body = PredictEditsBody {
input_events,
input_excerpt: input_excerpt.prompt,
- can_collect_data,
- diagnostic_groups,
- git_info,
+ can_collect_data: false,
+ diagnostic_groups: None,
+ git_info: None,
outline: None,
speculated_output: None,
};
@@ -1225,18 +1129,19 @@ pub fn gather_context(
Ok(GatherContextOutput {
body,
editable_range,
+ included_events_count,
})
}
})
}
-fn prompt_for_events(events: &VecDeque<Event>, mut remaining_tokens: usize) -> String {
+fn prompt_for_events_impl(events: &[Event], mut remaining_tokens: usize) -> (String, usize) {
let mut result = String::new();
- for event in events.iter().rev() {
+ for (ix, event) in events.iter().rev().enumerate() {
let event_string = event.to_prompt();
- let event_tokens = tokens_for_bytes(event_string.len());
+ let event_tokens = guess_token_count(event_string.len());
if event_tokens > remaining_tokens {
- break;
+ return (result, ix);
}
if !result.is_empty() {
@@ -1245,7 +1150,7 @@ fn prompt_for_events(events: &VecDeque<Event>, mut remaining_tokens: usize) -> S
result.insert_str(0, &event_string);
remaining_tokens -= event_tokens;
}
- result
+ return (result, events.len());
}
struct RegisteredBuffer {
@@ -1356,6 +1261,7 @@ impl DataCollectionChoice {
}
}
+ #[must_use]
pub fn toggle(&self) -> DataCollectionChoice {
match self {
Self::Enabled => Self::Disabled,
@@ -1374,79 +1280,6 @@ impl From<bool> for DataCollectionChoice {
}
}
-pub struct ProviderDataCollection {
- /// When set to None, data collection is not possible in the provider buffer
- choice: Option<Entity<DataCollectionChoice>>,
- license_detection_watcher: Option<Rc<LicenseDetectionWatcher>>,
-}
-
-impl ProviderDataCollection {
- pub fn new(zeta: Entity<Zeta>, buffer: Option<Entity<Buffer>>, cx: &mut App) -> Self {
- let choice_and_watcher = buffer.and_then(|buffer| {
- let file = buffer.read(cx).file()?;
-
- if !file.is_local() || file.is_private() {
- return None;
- }
-
- let zeta = zeta.read(cx);
- let choice = zeta.data_collection_choice.clone();
-
- let license_detection_watcher = zeta
- .license_detection_watchers
- .get(&file.worktree_id(cx))
- .cloned()?;
-
- Some((choice, license_detection_watcher))
- });
-
- if let Some((choice, watcher)) = choice_and_watcher {
- ProviderDataCollection {
- choice: Some(choice),
- license_detection_watcher: Some(watcher),
- }
- } else {
- ProviderDataCollection {
- choice: None,
- license_detection_watcher: None,
- }
- }
- }
-
- pub fn can_collect_data(&self, cx: &App) -> bool {
- self.is_data_collection_enabled(cx) && self.is_project_open_source()
- }
-
- pub fn is_data_collection_enabled(&self, cx: &App) -> bool {
- self.choice
- .as_ref()
- .is_some_and(|choice| choice.read(cx).is_enabled())
- }
-
- fn is_project_open_source(&self) -> bool {
- self.license_detection_watcher
- .as_ref()
- .is_some_and(|watcher| watcher.is_project_open_source())
- }
-
- pub fn toggle(&mut self, cx: &mut App) {
- if let Some(choice) = self.choice.as_mut() {
- let new_choice = choice.update(cx, |choice, _cx| {
- let new_choice = choice.toggle();
- *choice = new_choice;
- new_choice
- });
-
- db::write_and_log(cx, move || {
- KEY_VALUE_STORE.write_kvp(
- ZED_PREDICT_DATA_COLLECTION_CHOICE.into(),
- new_choice.is_enabled().to_string(),
- )
- });
- }
- }
-}
-
async fn llm_token_retry(
llm_token: &LlmApiToken,
client: &Arc<Client>,
@@ -1477,24 +1310,23 @@ async fn llm_token_retry(
pub struct ZetaEditPredictionProvider {
zeta: Entity<Zeta>,
+ singleton_buffer: Option<Entity<Buffer>>,
pending_completions: ArrayVec<PendingCompletion, 2>,
next_pending_completion_id: usize,
current_completion: Option<CurrentEditPrediction>,
- /// None if this is entirely disabled for this provider
- provider_data_collection: ProviderDataCollection,
last_request_timestamp: Instant,
}
impl ZetaEditPredictionProvider {
pub const THROTTLE_TIMEOUT: Duration = Duration::from_millis(300);
- pub fn new(zeta: Entity<Zeta>, provider_data_collection: ProviderDataCollection) -> Self {
+ pub fn new(zeta: Entity<Zeta>, singleton_buffer: Option<Entity<Buffer>>) -> Self {
Self {
zeta,
+ singleton_buffer,
pending_completions: ArrayVec::new(),
next_pending_completion_id: 0,
current_completion: None,
- provider_data_collection,
last_request_timestamp: Instant::now(),
}
}
@@ -1518,21 +1350,29 @@ impl edit_prediction::EditPredictionProvider for ZetaEditPredictionProvider {
}
fn data_collection_state(&self, cx: &App) -> DataCollectionState {
- let is_project_open_source = self.provider_data_collection.is_project_open_source();
-
- if self.provider_data_collection.is_data_collection_enabled(cx) {
- DataCollectionState::Enabled {
- is_project_open_source,
+ if let Some(buffer) = &self.singleton_buffer
+ && let Some(file) = buffer.read(cx).file()
+ {
+ let is_project_open_source = self.zeta.read(cx).is_file_open_source(file, cx);
+ if self.zeta.read(cx).data_collection_choice.is_enabled() {
+ DataCollectionState::Enabled {
+ is_project_open_source,
+ }
+ } else {
+ DataCollectionState::Disabled {
+ is_project_open_source,
+ }
}
} else {
- DataCollectionState::Disabled {
- is_project_open_source,
- }
+ return DataCollectionState::Disabled {
+ is_project_open_source: false,
+ };
}
}
fn toggle_data_collection(&mut self, cx: &mut App) {
- self.provider_data_collection.toggle(cx);
+ self.zeta
+ .update(cx, |zeta, cx| zeta.toggle_data_collection_choice(cx));
}
fn usage(&self, cx: &App) -> Option<EditPredictionUsage> {
@@ -1547,16 +1387,6 @@ impl edit_prediction::EditPredictionProvider for ZetaEditPredictionProvider {
) -> bool {
true
}
-
- fn needs_terms_acceptance(&self, cx: &App) -> bool {
- !self
- .zeta
- .read(cx)
- .user_store
- .read(cx)
- .has_accepted_terms_of_service()
- }
-
fn is_refreshing(&self) -> bool {
!self.pending_completions.is_empty()
}
@@ -107,11 +107,7 @@ pub fn init(cx: &mut App) -> ZetaCliAppState {
language::init(cx);
debug_adapter_extension::init(extension_host_proxy.clone(), cx);
- language_extension::init(
- LspAccess::Noop,
- extension_host_proxy.clone(),
- languages.clone(),
- );
+ language_extension::init(LspAccess::Noop, extension_host_proxy, languages.clone());
language_model::init(client.clone(), cx);
language_models::init(user_store.clone(), client.clone(), cx);
languages::init(languages.clone(), node_runtime.clone(), cx);
@@ -10,7 +10,7 @@ use language::Bias;
use language::Buffer;
use language::Point;
use language_model::LlmApiToken;
-use project::{Project, ProjectPath};
+use project::{Project, ProjectPath, Worktree};
use release_channel::AppVersion;
use reqwest_client::ReqwestClient;
use std::path::{Path, PathBuf};
@@ -129,15 +129,33 @@ async fn get_context(
return Err(anyhow!("Absolute paths are not supported in --cursor"));
}
- let (project, _lsp_open_handle, buffer) = if use_language_server {
- let (project, lsp_open_handle, buffer) =
- open_buffer_with_language_server(&worktree_path, &cursor.path, app_state, cx).await?;
- (Some(project), Some(lsp_open_handle), buffer)
+ let project = cx.update(|cx| {
+ Project::local(
+ app_state.client.clone(),
+ app_state.node_runtime.clone(),
+ app_state.user_store.clone(),
+ app_state.languages.clone(),
+ app_state.fs.clone(),
+ None,
+ cx,
+ )
+ })?;
+
+ let worktree = project
+ .update(cx, |project, cx| {
+ project.create_worktree(&worktree_path, true, cx)
+ })?
+ .await?;
+
+ let (_lsp_open_handle, buffer) = if use_language_server {
+ let (lsp_open_handle, buffer) =
+ open_buffer_with_language_server(&project, &worktree, &cursor.path, cx).await?;
+ (Some(lsp_open_handle), buffer)
} else {
let abs_path = worktree_path.join(&cursor.path);
let content = smol::fs::read_to_string(&abs_path).await?;
let buffer = cx.new(|cx| Buffer::local(content, cx))?;
- (None, None, buffer)
+ (None, buffer)
};
let worktree_name = worktree_path
@@ -171,57 +189,25 @@ async fn get_context(
Some(events) => events.read_to_string().await?,
None => String::new(),
};
- // Enable gathering extra data not currently needed for edit predictions
- let can_collect_data = true;
- let git_info = None;
- let mut gather_context_output = cx
- .update(|cx| {
- gather_context(
- project.as_ref(),
- full_path_str,
- &snapshot,
- clipped_cursor,
- move || events,
- can_collect_data,
- git_info,
- cx,
- )
- })?
- .await;
-
- // Disable data collection for these requests, as this is currently just used for evals
- match gather_context_output.as_mut() {
- Ok(gather_context_output) => gather_context_output.body.can_collect_data = false,
- Err(_) => {}
- }
-
- gather_context_output
+ let prompt_for_events = move || (events, 0);
+ cx.update(|cx| {
+ gather_context(
+ full_path_str,
+ &snapshot,
+ clipped_cursor,
+ prompt_for_events,
+ cx,
+ )
+ })?
+ .await
}
pub async fn open_buffer_with_language_server(
- worktree_path: &Path,
+ project: &Entity<Project>,
+ worktree: &Entity<Worktree>,
path: &Path,
- app_state: &Arc<ZetaCliAppState>,
cx: &mut AsyncApp,
-) -> Result<(Entity<Project>, Entity<Entity<Buffer>>, Entity<Buffer>)> {
- let project = cx.update(|cx| {
- Project::local(
- app_state.client.clone(),
- app_state.node_runtime.clone(),
- app_state.user_store.clone(),
- app_state.languages.clone(),
- app_state.fs.clone(),
- None,
- cx,
- )
- })?;
-
- let worktree = project
- .update(cx, |project, cx| {
- project.create_worktree(worktree_path, true, cx)
- })?
- .await?;
-
+) -> Result<(Entity<Entity<Buffer>>, Entity<Buffer>)> {
let project_path = worktree.read_with(cx, |worktree, _cx| ProjectPath {
worktree_id: worktree.id(),
path: path.to_path_buf().into(),
@@ -238,7 +224,7 @@ pub async fn open_buffer_with_language_server(
let log_prefix = path.to_string_lossy().to_string();
wait_for_lang_server(&project, &buffer, log_prefix, cx).await?;
- Ok((project, lsp_open_handle, buffer))
+ Ok((lsp_open_handle, buffer))
}
// TODO: Dedupe with similar function in crates/eval/src/instance.rs
@@ -277,8 +263,8 @@ pub fn wait_for_lang_server(
let subscriptions = [
cx.subscribe(&lsp_store, {
let log_prefix = log_prefix.clone();
- move |_, event, _| match event {
- project::LspStoreEvent::LanguageServerUpdate {
+ move |_, event, _| {
+ if let project::LspStoreEvent::LanguageServerUpdate {
message:
client::proto::update_language_server::Variant::WorkProgress(
client::proto::LspWorkProgress {
@@ -287,8 +273,10 @@ pub fn wait_for_lang_server(
},
),
..
- } => println!("{}⟲ {message}", log_prefix),
- _ => {}
+ } = event
+ {
+ println!("{}⟲ {message}", log_prefix)
+ }
}
}),
cx.subscribe(project, {
@@ -4,7 +4,6 @@ use std::{
OnceLock, RwLock,
atomic::{AtomicU8, Ordering},
},
- usize,
};
use crate::{SCOPE_DEPTH_MAX, SCOPE_STRING_SEP_STR, Scope, ScopeAlloc, env_config, private};
@@ -23,7 +22,7 @@ pub const LEVEL_ENABLED_MAX_DEFAULT: log::LevelFilter = log::LevelFilter::Info;
/// crate that the max level is everything, so that we can dynamically enable
/// logs that are more verbose than this level without the `log` crate throwing
/// them away before we see them
-static mut LEVEL_ENABLED_MAX_STATIC: log::LevelFilter = LEVEL_ENABLED_MAX_DEFAULT;
+static LEVEL_ENABLED_MAX_STATIC: AtomicU8 = AtomicU8::new(LEVEL_ENABLED_MAX_DEFAULT as u8);
/// A cache of the true maximum log level that _could_ be printed. This is based
/// on the maximally verbose level that is configured by the user, and is used
@@ -47,7 +46,7 @@ const DEFAULT_FILTERS: &[(&str, log::LevelFilter)] = &[
pub fn init_env_filter(filter: env_config::EnvFilter) {
if let Some(level_max) = filter.level_global {
- unsafe { LEVEL_ENABLED_MAX_STATIC = level_max }
+ LEVEL_ENABLED_MAX_STATIC.store(level_max as u8, Ordering::Release)
}
if ENV_FILTER.set(filter).is_err() {
panic!("Environment filter cannot be initialized twice");
@@ -55,7 +54,7 @@ pub fn init_env_filter(filter: env_config::EnvFilter) {
}
pub fn is_possibly_enabled_level(level: log::Level) -> bool {
- level as u8 <= LEVEL_ENABLED_MAX_CONFIG.load(Ordering::Relaxed)
+ level as u8 <= LEVEL_ENABLED_MAX_CONFIG.load(Ordering::Acquire)
}
pub fn is_scope_enabled(scope: &Scope, module_path: Option<&str>, level: log::Level) -> bool {
@@ -67,7 +66,7 @@ pub fn is_scope_enabled(scope: &Scope, module_path: Option<&str>, level: log::Le
// scope map
return false;
}
- let is_enabled_by_default = level <= unsafe { LEVEL_ENABLED_MAX_STATIC };
+ let is_enabled_by_default = level as u8 <= LEVEL_ENABLED_MAX_STATIC.load(Ordering::Acquire);
let global_scope_map = SCOPE_MAP.read().unwrap_or_else(|err| {
SCOPE_MAP.clear_poison();
err.into_inner()
@@ -93,13 +92,13 @@ pub fn is_scope_enabled(scope: &Scope, module_path: Option<&str>, level: log::Le
pub fn refresh_from_settings(settings: &HashMap<String, String>) {
let env_config = ENV_FILTER.get();
let map_new = ScopeMap::new_from_settings_and_env(settings, env_config, DEFAULT_FILTERS);
- let mut level_enabled_max = unsafe { LEVEL_ENABLED_MAX_STATIC };
+ let mut level_enabled_max = LEVEL_ENABLED_MAX_STATIC.load(Ordering::Acquire);
for entry in &map_new.entries {
if let Some(level) = entry.enabled {
- level_enabled_max = level_enabled_max.max(level);
+ level_enabled_max = level_enabled_max.max(level as u8);
}
}
- LEVEL_ENABLED_MAX_CONFIG.store(level_enabled_max as u8, Ordering::Release);
+ LEVEL_ENABLED_MAX_CONFIG.store(level_enabled_max, Ordering::Release);
{
let mut global_map = SCOPE_MAP.write().unwrap_or_else(|err| {
@@ -152,7 +151,7 @@ fn scope_alloc_from_scope_str(scope_str: &str) -> Option<ScopeAlloc> {
if index == 0 {
return None;
}
- if let Some(_) = scope_iter.next() {
+ if scope_iter.next().is_some() {
crate::warn!(
"Invalid scope key, too many nested scopes: '{scope_str}'. Max depth is {SCOPE_DEPTH_MAX}",
);
@@ -204,12 +203,10 @@ impl ScopeMap {
.map(|(scope_str, level_filter)| (scope_str.as_str(), *level_filter))
});
- let new_filters = items_input_map
- .into_iter()
- .filter_map(|(scope_str, level_str)| {
- let level_filter = level_filter_from_str(level_str)?;
- Some((scope_str.as_str(), level_filter))
- });
+ let new_filters = items_input_map.iter().filter_map(|(scope_str, level_str)| {
+ let level_filter = level_filter_from_str(level_str)?;
+ Some((scope_str.as_str(), level_filter))
+ });
let all_filters = default_filters
.iter()
@@ -296,7 +293,7 @@ impl ScopeMap {
sub_items_start + 1,
sub_items_end,
"Expected one item: got: {:?}",
- &items[items_range.clone()]
+ &items[items_range]
);
enabled = Some(items[sub_items_start].1);
} else {
@@ -4,7 +4,7 @@ use std::{
path::PathBuf,
sync::{
Mutex, OnceLock,
- atomic::{AtomicU64, Ordering},
+ atomic::{AtomicBool, AtomicU64, Ordering},
},
};
@@ -19,17 +19,17 @@ const ANSI_GREEN: &str = "\x1b[32m";
const ANSI_BLUE: &str = "\x1b[34m";
const ANSI_MAGENTA: &str = "\x1b[35m";
-/// Whether stdout output is enabled.
-static mut ENABLED_SINKS_STDOUT: bool = false;
-/// Whether stderr output is enabled.
-static mut ENABLED_SINKS_STDERR: bool = false;
-
/// Is Some(file) if file output is enabled.
static ENABLED_SINKS_FILE: Mutex<Option<std::fs::File>> = Mutex::new(None);
static SINK_FILE_PATH: OnceLock<&'static PathBuf> = OnceLock::new();
static SINK_FILE_PATH_ROTATE: OnceLock<&'static PathBuf> = OnceLock::new();
+
+// NB: Since this can be accessed in tests, we probably should stick to atomics here.
+/// Whether stdout output is enabled.
+static ENABLED_SINKS_STDOUT: AtomicBool = AtomicBool::new(false);
+/// Whether stderr output is enabled.
+static ENABLED_SINKS_STDERR: AtomicBool = AtomicBool::new(false);
/// Atomic counter for the size of the log file in bytes.
-// TODO: make non-atomic if writing single threaded
static SINK_FILE_SIZE_BYTES: AtomicU64 = AtomicU64::new(0);
/// Maximum size of the log file before it will be rotated, in bytes.
const SINK_FILE_SIZE_BYTES_MAX: u64 = 1024 * 1024; // 1 MB
@@ -42,15 +42,13 @@ pub struct Record<'a> {
}
pub fn init_output_stdout() {
- unsafe {
- ENABLED_SINKS_STDOUT = true;
- }
+ // Use atomics here instead of just a `static mut`, since in the context
+ // of tests these accesses can be multi-threaded.
+ ENABLED_SINKS_STDOUT.store(true, Ordering::Release);
}
pub fn init_output_stderr() {
- unsafe {
- ENABLED_SINKS_STDERR = true;
- }
+ ENABLED_SINKS_STDERR.store(true, Ordering::Release);
}
pub fn init_output_file(
@@ -79,7 +77,7 @@ pub fn init_output_file(
if size_bytes >= SINK_FILE_SIZE_BYTES_MAX {
rotate_log_file(&mut file, Some(path), path_rotate, &SINK_FILE_SIZE_BYTES);
} else {
- SINK_FILE_SIZE_BYTES.store(size_bytes, Ordering::Relaxed);
+ SINK_FILE_SIZE_BYTES.store(size_bytes, Ordering::Release);
}
*enabled_sinks_file = Some(file);
@@ -108,7 +106,7 @@ static LEVEL_ANSI_COLORS: [&str; 6] = [
// PERF: batching
pub fn submit(record: Record) {
- if unsafe { ENABLED_SINKS_STDOUT } {
+ if ENABLED_SINKS_STDOUT.load(Ordering::Acquire) {
let mut stdout = std::io::stdout().lock();
_ = writeln!(
&mut stdout,
@@ -123,7 +121,7 @@ pub fn submit(record: Record) {
},
record.message
);
- } else if unsafe { ENABLED_SINKS_STDERR } {
+ } else if ENABLED_SINKS_STDERR.load(Ordering::Acquire) {
let mut stdout = std::io::stderr().lock();
_ = writeln!(
&mut stdout,
@@ -173,7 +171,7 @@ pub fn submit(record: Record) {
},
record.message
);
- SINK_FILE_SIZE_BYTES.fetch_add(writer.written, Ordering::Relaxed) + writer.written
+ SINK_FILE_SIZE_BYTES.fetch_add(writer.written, Ordering::AcqRel) + writer.written
};
if file_size_bytes > SINK_FILE_SIZE_BYTES_MAX {
rotate_log_file(
@@ -187,7 +185,7 @@ pub fn submit(record: Record) {
}
pub fn flush() {
- if unsafe { ENABLED_SINKS_STDOUT } {
+ if ENABLED_SINKS_STDOUT.load(Ordering::Acquire) {
_ = std::io::stdout().lock().flush();
}
let mut file = ENABLED_SINKS_FILE.lock().unwrap_or_else(|handle| {
@@ -265,7 +263,7 @@ fn rotate_log_file<PathRef>(
// according to the documentation, it only fails if:
// - the file is not writeable: should never happen,
// - the size would cause an overflow (implementation specific): 0 should never cause an overflow
- atomic_size.store(0, Ordering::Relaxed);
+ atomic_size.store(0, Ordering::Release);
}
#[cfg(test)]
@@ -298,7 +296,7 @@ mod tests {
std::fs::read_to_string(&rotation_log_file_path).unwrap(),
contents,
);
- assert_eq!(size.load(Ordering::Relaxed), 0);
+ assert_eq!(size.load(Ordering::Acquire), 0);
}
/// Regression test, ensuring that if log level values change we are made aware
@@ -10,12 +10,9 @@ pub use sink::{flush, init_output_file, init_output_stderr, init_output_stdout};
pub const SCOPE_DEPTH_MAX: usize = 4;
pub fn init() {
- match try_init() {
- Err(err) => {
- log::error!("{err}");
- eprintln!("{err}");
- }
- Ok(()) => {}
+ if let Err(err) = try_init() {
+ log::error!("{err}");
+ eprintln!("{err}");
}
}
@@ -268,7 +265,7 @@ pub mod private {
pub type Scope = [&'static str; SCOPE_DEPTH_MAX];
pub type ScopeAlloc = [String; SCOPE_DEPTH_MAX];
-const SCOPE_STRING_SEP_STR: &'static str = ".";
+const SCOPE_STRING_SEP_STR: &str = ".";
const SCOPE_STRING_SEP_CHAR: char = '.';
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
@@ -3,7 +3,7 @@ use anyhow::Result;
use gpui::App;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsStore};
+use settings::{Settings, SettingsKey, SettingsStore, SettingsUi};
pub fn init(cx: &mut App) {
ZlogSettings::register(cx);
@@ -15,15 +15,25 @@ pub fn init(cx: &mut App) {
.detach();
}
-#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[derive(
+ Clone,
+ Debug,
+ Default,
+ Serialize,
+ Deserialize,
+ PartialEq,
+ Eq,
+ JsonSchema,
+ SettingsUi,
+ SettingsKey,
+)]
+#[settings_key(key = "log")]
pub struct ZlogSettings {
#[serde(default, flatten)]
pub scopes: std::collections::HashMap<String, String>,
}
impl Settings for ZlogSettings {
- const KEY: Option<&'static str> = Some("log");
-
type FileContent = Self;
fn load(sources: settings::SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self>
@@ -16,6 +16,7 @@
- [Configuring Zed](./configuring-zed.md)
- [Configuring Languages](./configuring-languages.md)
- [Key bindings](./key-bindings.md)
+ - [All Actions](./all-actions.md)
- [Snippets](./snippets.md)
- [Themes](./themes.md)
- [Icon Themes](./icon-themes.md)
@@ -46,6 +47,7 @@
- [Overview](./ai/overview.md)
- [Agent Panel](./ai/agent-panel.md)
- [Tools](./ai/tools.md)
+ - [External Agents](./ai/external-agents.md)
- [Inline Assistant](./ai/inline-assistant.md)
- [Edit Prediction](./ai/edit-prediction.md)
- [Text Threads](./ai/text-threads.md)
@@ -155,5 +157,6 @@
- [FreeBSD](./development/freebsd.md)
- [Local Collaboration](./development/local-collaboration.md)
- [Using Debuggers](./development/debuggers.md)
+ - [Glossary](./development/glossary.md)
- [Release Process](./development/releases.md)
- [Debugging Crashes](./development/debugging-crashes.md)
@@ -30,3 +30,8 @@ To sign out of Zed, you can use either of these methods:
Your Zed account's email address is the address provided by GitHub OAuth. If you have a public email address then it will be used, otherwise your primary GitHub email address will be used. Changes to your email address on GitHub can be synced to your Zed account by [signing in to zed.dev](https://zed.dev/sign_in).
Stripe is used for billing, and will use your Zed account's email address when starting a subscription. Changes to your Zed account email address do not currently update the email address used in Stripe. See [Updating Billing Information](./ai/billing.md#updating-billing-info) for how to change this email address.
+
+## Hiding Sign In button from the interface
+
+In case the Sign In feature is not used, it's possible to hide that from the interface by using `show_sign_in` settings property.
+Refer to [Visual Customization page](./visual-customization.md) for more details.
@@ -1,21 +1,34 @@
# Agent Panel
-The Agent Panel provides you with a surface to interact with LLMs, enabling various types of tasks, such as generating code, asking questions about your codebase, and general inquiries like emails, documentation, and more.
+The Agent Panel allows you to interact with many LLMs and coding agents that can help with in various types of tasks, such as generating code, codebase understanding, and other general inquiries like writing emails, documentation, and more.
To open it, use the `agent: new thread` action in [the Command Palette](../getting-started.md#command-palette) or click the ✨ (sparkles) icon in the status bar.
-If you're using the Agent Panel for the first time, you need to have at least one LLM provider configured.
+## Getting Started
+
+If you're using the Agent Panel for the first time, you need to have at least one LLM provider or external agent configured.
You can do that by:
1. [subscribing to our Pro plan](https://zed.dev/pricing), so you have access to our hosted models
-2. or by [bringing your own API keys](./llm-providers.md#use-your-own-keys) for your desired provider
+2. [bringing your own API keys](./llm-providers.md#use-your-own-keys) for your desired provider
+3. using an external agent like [Gemini CLI](./external-agents.md#gemini-cli) or [Claude Code](./external-agents.md#claude-code)
## Overview {#overview}
-After you've configured one or more LLM providers, type at the message editor and hit `enter` to submit your prompt.
+With an LLM provider or an external agent configured, type at the message editor and hit `enter` to submit your prompt.
If you need extra room to type, you can expand the message editor with {#kb agent::ExpandMessageEditor}.
You should start to see the responses stream in with indications of [which tools](./tools.md) the model is using to fulfill your prompt.
+From this point on, you can interact with the many supported features outlined below.
+
+> Note that for external agents, like [Gemini CLI](./external-agents.md#gemini-cli) or [Claude Code](./external-agents.md#claude-code), some of the features outlined below are _not_ currently supported—for example, _restoring threads from history_, _checkpoints_, _token usage display_, _model selection_, and others. All of them should hopefully be supported in the future.
+
+### Creating New Threads {#new-thread}
+
+By default, the Agent Panel uses Zed's first-party agent.
+
+To change that, go to the plus button in the top-right of the Agent Panel and choose another option.
+You choose to create a new [Text Thread](./text-threads.md) or, if you have [external agents](./external-agents.md) connected, you can create new threads with them.
### Editing Messages {#editing-messages}
@@ -30,7 +43,7 @@ The checkpoint button appears even if you interrupt the thread midway through an
### Navigating History {#navigating-history}
-To quickly navigate through recently opened threads, use the {#kb agent::ToggleNavigationMenu} binding, when focused on the panel's editor, or click the menu icon button at the top left of the panel to open the dropdown that shows you the six most recent threads.
+To quickly navigate through recently opened threads, use the {#kb agent::ToggleNavigationMenu} binding, when focused on the panel's editor, or click the menu icon button at the top right of the panel to open the dropdown that shows you the six most recent threads.
The items in this menu function similarly to tabs, and closing them doesn’t delete the thread; instead, it simply removes them from the recent list.
@@ -70,16 +83,13 @@ So, if your active tab had edits made by the AI, you'll see diffs with the same
Although Zed's agent is very efficient at reading through your code base to autonomously pick up relevant files, directories, and other context, manually adding context is still encouraged as a way to speed up and improve the AI's response quality.
-If you have a tab open while using the Agent Panel, that tab appears as a suggested context in form of a dashed button.
-You can also add other forms of context by either mentioning them with `@` or hitting the `+` icon button.
-
-You can even add previous threads as context by mentioning them with `@thread`, or by selecting the "New From Summary" option from the `+` menu to continue a longer conversation, keeping it within the context window.
+To add any file, directory, symbol, previous threads, rules files, or even web pages as context, type `@` to mention them in the editor.
Pasting images as context is also supported by the Agent Panel.
### Token Usage {#token-usage}
-Zed surfaces how many tokens you are consuming for your currently active thread in the panel's toolbar.
+Zed surfaces how many tokens you are consuming for your currently active thread nearby the profile selector in the panel's message editor.
Depending on how many pieces of context you add, your token consumption can grow rapidly.
With that in mind, once you get close to the model's context window, a banner appears below the message editor suggesting to start a new thread with the current one summarized and added as context.
@@ -145,7 +155,7 @@ Zed's UI will inform about this via a warning icon that appears close to the mod
## Text Threads {#text-threads}
-["Text threads"](./text-threads.md) present your conversation with the LLM in a different format—as raw text.
+["Text Threads"](./text-threads.md) present your conversation with the LLM in a different format—as raw text.
With text threads, you have full control over the conversation data.
You can remove and edit responses from the LLM, swap roles, and include more context earlier in the conversation.
@@ -131,7 +131,7 @@ The default value is `false`.
```json
{
"agent": {
- "always_allow_tool_actions": "true"
+ "always_allow_tool_actions": true
}
}
```
@@ -146,7 +146,7 @@ The default value is `false`.
```json
{
"agent": {
- "single_file_review": "true"
+ "single_file_review": true
}
}
```
@@ -163,7 +163,7 @@ The default value is `false`.
```json
{
"agent": {
- "play_sound_when_agent_done": "true"
+ "play_sound_when_agent_done": true
}
}
```
@@ -179,7 +179,7 @@ The default value is `false`.
```json
{
"agent": {
- "use_modifier_to_send": "true"
+ "use_modifier_to_send": true
}
}
```
@@ -194,7 +194,7 @@ It is set to `true` by default, but if set to false, the card's height is capped
```json
{
"agent": {
- "expand_edit_card": "false"
+ "expand_edit_card": false
}
}
```
@@ -207,7 +207,7 @@ It is set to `true` by default, but if set to false, the card will be fully coll
```json
{
"agent": {
- "expand_terminal_card": "false"
+ "expand_terminal_card": false
}
}
```
@@ -220,7 +220,7 @@ The default value is `true`.
```json
{
"agent": {
- "enable_feedback": "false"
+ "enable_feedback": false
}
}
```
@@ -0,0 +1,145 @@
+# External Agents
+
+Zed supports terminal-based agents through the [Agent Client Protocol (ACP)](https://agentclientprotocol.com).
+
+Currently, [Gemini CLI](https://github.com/google-gemini/gemini-cli) serves as the reference implementation.
+[Claude Code](https://www.anthropic.com/claude-code) is also included by default, and you can [add custom ACP-compatible agents](#add-custom-agents) as well.
+
+## Gemini CLI {#gemini-cli}
+
+Zed provides the ability to run [Gemini CLI](https://github.com/google-gemini/gemini-cli) directly in the [agent panel](./agent-panel.md).
+
+Under the hood we run Gemini CLI in the background, and talk to it over ACP.
+This means that you're running the real Gemini CLI, with all of the advantages of that, but you can see and interact with files in your editor.
+
+### Getting Started
+
+As of [Zed Stable v0.201.5](https://zed.dev/releases/stable/0.201.5) you should be able to use Gemini CLI directly from Zed. First open the agent panel with {#kb agent::ToggleFocus}, and then use the `+` button in the top right to start a new Gemini CLI thread.
+
+If you'd like to bind this to a keyboard shortcut, you can do so by editing your `keymap.json` file via the `zed: open keymap` command to include:
+
+```json
+[
+ {
+ "bindings": {
+ "cmd-alt-g": ["agent::NewExternalAgentThread", { "agent": "gemini" }]
+ }
+ }
+]
+```
+
+#### Installation
+
+The first time you create a Gemini CLI thread, Zed will install [@google/gemini-cli](https://github.com/zed-industries/claude-code-acp). This installation is only available to Zed and is kept up to date as you use the agent.
+
+By default, Zed will use this managed version of Gemini CLI even if you have it installed globally. However, you can configure it to use a version in your `PATH` by adding this to your settings:
+
+```json
+{
+ "agent_servers": {
+ "gemini": {
+ "ignore_system_version": false
+ }
+ }
+}
+```
+
+#### Authentication
+
+After you have Gemini CLI running, you'll be prompted to choose your authentication method.
+
+Most users should click the "Log in with Google". This will cause a browser window to pop-up and auth directly with Gemini CLI. Zed does not see your OAuth or access tokens in this case.
+
+You can also use the "Gemini API Key". If you select this, and have the `GEMINI_API_KEY` set, then we will use that. Otherwise Zed will prompt you for an API key which will be stored securely in your keychain, and used to start Gemini CLI from within Zed.
+
+The "Vertex AI" option is for those who are using [Vertex AI](https://cloud.google.com/vertex-ai), and have already configured their environment correctly.
+
+For more information, see the [Gemini CLI docs](https://github.com/google-gemini/gemini-cli/blob/main/docs/index.md).
+
+### Usage
+
+Similar to Zed's first-party agent, you can use Gemini CLI to do anything that you need.
+And to give it context, you can @-mention files, recent threads, symbols, or fetch the web.
+
+> Note that some first-party agent features don't yet work with Gemini CLI: editing past messages, resuming threads from history, checkpointing, and using the agent in SSH projects.
+> We hope to add these features in the near future.
+
+## Claude Code
+
+Similar to Gemini CLI, you can also run [Claude Code](https://www.anthropic.com/claude-code) directly via Zed's [agent panel](./agent-panel.md).
+Under the hood, Zed runs Claude Code and communicate to it over ACP, through [a dedicated adapter](https://github.com/zed-industries/claude-code-acp).
+
+### Getting Started
+
+Open the agent panel with {#kb agent::ToggleFocus}, and then use the `+` button in the top right to start a new Claude Code thread.
+
+If you'd like to bind this to a keyboard shortcut, you can do so by editing your `keymap.json` file via the `zed: open keymap` command to include:
+
+```json
+[
+ {
+ "bindings": {
+ "cmd-alt-c": ["agent::NewExternalAgentThread", { "agent": "claude_code" }]
+ }
+ }
+]
+```
+
+### Authentication
+
+As of version `0.202.7` (stable) and `0.203.2` (preview), authentication to Zed's Claude Code installation is decoupled entirely from Zed's agent. That is to say, an Anthropic API key added via the [Zed Agent's settings](./llm-providers.md#anthropic) will _not_ be utilized by Claude Code for authentication and billing.
+
+To ensure you're using your billing method of choice, [open a new Claude Code thread](./agent-panel.md#new-thread). Then, run `/login`, and authenticate either via API key, or via `Log in with Claude Code` to use a Claude Pro/Max subscription.
+
+#### Installation
+
+The first time you create a Claude Code thread, Zed will install [@zed-industries/claude-code-acp](https://github.com/zed-industries/claude-code-acp). This installation is only available to Zed and is kept up to date as you use the agent.
+
+Zed will always use this managed version of Claude Code even if you have it installed globally.
+
+### Usage
+
+Similar to Zed's first-party agent, you can use Claude Code to do anything that you need.
+And to give it context, you can @-mention files, recent threads, symbols, or fetch the web.
+
+In complement to talking to it [over ACP](https://agentclientprotocol.com), Zed relies on the [Claude Code SDK](https://docs.anthropic.com/en/docs/claude-code/sdk/sdk-overview) to support some of its specific features.
+However, the SDK doesn't yet expose everything needed to fully support all of them:
+
+- Slash Commands: A subset of [built-in commands](https://docs.anthropic.com/en/docs/claude-code/slash-commands#built-in-slash-commands) are supported, while [custom slash commands](https://docs.anthropic.com/en/docs/claude-code/slash-commands#custom-slash-commands) are fully supported.
+- [Subagents](https://docs.anthropic.com/en/docs/claude-code/sub-agents) are supported.
+- [Hooks](https://docs.anthropic.com/en/docs/claude-code/hooks-guide) are currently _not_ supported.
+
+> Also note that some [first-party agent](./agent-panel.md) features don't yet work with Claude Code: editing past messages, resuming threads from history, checkpointing, and using the agent in SSH projects.
+> We hope to add these features in the near future.
+
+#### CLAUDE.md
+
+Claude Code in Zed will automatically use any `CLAUDE.md` file found in your project root, project subdirectories, or root `.claude` directory.
+
+If you don't have a `CLAUDE.md` file, you can ask Claude Code to create one for you through the `init` slash command.
+
+## Add Custom Agents {#add-custom-agents}
+
+You can run any agent speaking ACP in Zed by changing your settings as follows:
+
+```json
+{
+ "agent_servers": {
+ "Custom Agent": {
+ "command": "node",
+ "args": ["~/projects/agent/index.js", "--acp"],
+ "env": {}
+ }
+ }
+}
+```
+
+This can also be useful if you're in the middle of developing a new agent that speaks the protocol and you want to debug it.
+
+You can also specify a custom path, arguments, or environment for the builtin integrations by using the `claude` and `gemini` names.
+
+## Debugging Agents
+
+When using external agents in Zed, you can access the debug view via with `dev: open acp logs` from the Command Palette. This lets you see the messages being sent and received between Zed and the agent.
+
+
@@ -40,7 +40,6 @@ Ensure your credentials have the following permissions set up:
- `bedrock:InvokeModelWithResponseStream`
- `bedrock:InvokeModel`
-- `bedrock:ConverseStream`
Your IAM policy should look similar to:
@@ -52,8 +51,7 @@ Your IAM policy should look similar to:
"Effect": "Allow",
"Action": [
"bedrock:InvokeModel",
- "bedrock:InvokeModelWithResponseStream",
- "bedrock:ConverseStream"
+ "bedrock:InvokeModelWithResponseStream"
],
"Resource": "*"
}
@@ -437,21 +435,24 @@ To do it via your `settings.json`, add the following snippet under `language_mod
```json
{
"language_models": {
- "openai": {
- "api_url": "https://api.together.xyz/v1", // Using Together AI as an example
- "available_models": [
- {
- "name": "mistralai/Mixtral-8x7B-Instruct-v0.1",
- "display_name": "Together Mixtral 8x7B",
- "max_tokens": 32768,
- "capabilities": {
- "tools": true,
- "images": false,
- "parallel_tool_calls": false,
- "prompt_cache_key": false
+ "openai_compatible": {
+ // Using Together AI as an example
+ "Together AI": {
+ "api_url": "https://api.together.xyz/v1",
+ "available_models": [
+ {
+ "name": "mistralai/Mixtral-8x7B-Instruct-v0.1",
+ "display_name": "Together Mixtral 8x7B",
+ "max_tokens": 32768,
+ "capabilities": {
+ "tools": true,
+ "images": false,
+ "parallel_tool_calls": false,
+ "prompt_cache_key": false
+ }
}
- }
- ]
+ ]
+ }
}
}
}
@@ -465,7 +466,7 @@ By default, OpenAI-compatible models inherit the following capabilities:
- `prompt_cache_key`: false (does not support `prompt_cache_key` parameter)
Note that LLM API keys aren't stored in your settings file.
-So, ensure you have it set in your environment variables (`OPENAI_API_KEY=<your api key>`) so your settings can pick it up.
+So, ensure you have it set in your environment variables (`<PROVIDER_NAME>_API_KEY=<your api key>`) so your settings can pick it up. In the example above, it would be `TOGETHER_AI_API_KEY=<your api key>`.
### OpenRouter {#openrouter}
@@ -6,13 +6,15 @@ Learn how to get started using AI with Zed and all its capabilities.
- [Configuration](./configuration.md): Learn how to set up different language model providers like Anthropic, OpenAI, Ollama, Google AI, and more.
+- [External Agents](./external-agents.md): Learn how to plug in your favorite agent into Zed.
+
- [Subscription](./subscription.md): Learn about Zed's hosted model service and other billing-related information.
- [Privacy and Security](./privacy-and-security.md): Understand how Zed handles privacy and security with AI features.
## Agentic Editing
-- [Agent Panel](./agent-panel.md): Create and manage interactions with language models.
+- [Agent Panel](./agent-panel.md): Create and manage interactions with LLM agents.
- [Rules](./rules.md): How to define rules for AI interactions.
@@ -16,7 +16,7 @@ To begin, type a message in a `You` block.
As you type, the remaining tokens count for the selected model is updated.
-Inserting text from an editor is as simple as highlighting the text and running `assistant: quote selection` ({#kb assistant::QuoteSelection}); Zed will wrap it in a fenced code block if it is code.
+Inserting text from an editor is as simple as highlighting the text and running `agent: quote selection` ({#kb agent::QuoteSelection}); Zed will wrap it in a fenced code block if it is code.

@@ -148,7 +148,7 @@ Usage: `/terminal [<number>]`
The `/selection` command inserts the selected text in the editor into the context. This is useful for referencing specific parts of your code.
-This is equivalent to the `assistant: quote selection` command ({#kb assistant::QuoteSelection}).
+This is equivalent to the `agent: quote selection` command ({#kb agent::QuoteSelection}).
Usage: `/selection`
@@ -0,0 +1,3 @@
+## All Actions
+
+{#ACTIONS_TABLE#}
@@ -251,7 +251,7 @@ You can toggle language server support globally or per-language:
}
```
-This disables the language server for Markdown files, which can be useful for performance in large documentation projects. You can configure this globally in your `~/.zed/settings.json` or inside a `.zed/settings.json` in your project directory.
+This disables the language server for Markdown files, which can be useful for performance in large documentation projects. You can configure this globally in your `~/.config/zed/settings.json` or inside a `.zed/settings.json` in your project directory.
## Formatting and Linting
@@ -104,6 +104,70 @@ Non-negative `float` values
}
```
+## Agent Font Size
+
+- Description: The font size for text in the agent panel. Inherits the UI font size if unset.
+- Setting: `agent_font_size`
+- Default: `null`
+
+**Options**
+
+`integer` values from `6` to `100` pixels (inclusive)
+
+## Allow Rewrap
+
+- Description: Controls where the {#action editor::Rewrap} action is allowed in the current language scope
+- Setting: `allow_rewrap`
+- Default: `"in_comments"`
+
+**Options**
+
+1. Allow rewrap in comments only:
+
+```json
+{
+ "allow_rewrap": "in_comments"
+}
+```
+
+2. Allow rewrap everywhere:
+
+```json
+{
+ "allow_rewrap": "everywhere"
+}
+```
+
+3. Never allow rewrap:
+
+```json
+{
+ "allow_rewrap": "never"
+}
+```
+
+Note: This setting has no effect in Vim mode, as rewrap is already allowed everywhere.
+
+## Auto Indent
+
+- Description: Whether indentation should be adjusted based on the context whilst typing. This can be specified on a per-language basis.
+- Setting: `auto_indent`
+- Default: `true`
+
+**Options**
+
+`boolean` values
+
+## Auto Indent On Paste
+
+- Description: Whether indentation of pasted content should be adjusted based on the context
+- Setting: `auto_indent_on_paste`
+- Default: `true`
+
+**Options**
+
+`boolean` values
+
## Auto Install extensions
- Description: Define extensions to be autoinstalled or never be installed.
@@ -182,42 +246,30 @@ Define extensions which should be installed (`true`) or never installed (`false`
}
```
-## Restore on Startup
+## Autoscroll on Clicks
-- Description: Controls session restoration on startup.
-- Setting: `restore_on_startup`
-- Default: `last_session`
+- Description: Whether to scroll when clicking near the edge of the visible text area.
+- Setting: `autoscroll_on_clicks`
+- Default: `false`
**Options**
-1. Restore all workspaces that were open when quitting Zed:
-
-```json
-{
- "restore_on_startup": "last_session"
-}
-```
+`boolean` values
-2. Restore the workspace that was closed last:
+## Auto Signature Help
-```json
-{
- "restore_on_startup": "last_workspace"
-}
-```
+- Description: Show method signatures in the editor, when inside parentheses
+- Setting: `auto_signature_help`
+- Default: `false`
-3. Always start with an empty editor:
+**Options**
-```json
-{
- "restore_on_startup": "none"
-}
-```
+`boolean` values
-## Autoscroll on Clicks
+### Show Signature Help After Edits
-- Description: Whether to scroll when clicking near the edge of the visible text area.
-- Setting: `autoscroll_on_clicks`
+- Description: Whether to show the signature help after completion or a bracket pair inserted. If `auto_signature_help` is enabled, this setting will be treated as enabled also.
+- Setting: `show_signature_help_after_edits`
- Default: `false`
**Options**
@@ -378,6 +430,24 @@ For example, to use `Nerd Font` as a fallback, add the following to your setting
`"standard"`, `"comfortable"` or `{ "custom": float }` (`1` is compact, `2` is loose)
+## Centered Layout
+
+- Description: Configuration for the centered layout mode.
+- Setting: `centered_layout`
+- Default:
+
+```json
+"centered_layout": {
+ "left_padding": 0.2,
+ "right_padding": 0.2,
+}
+```
+
+**Options**
+
+The `left_padding` and `right_padding` options define the relative width of the
+left and right padding of the central pane from the workspace when the centered layout mode is activated. Valid values range is from `0` to `0.4`.
+
## Close on File Delete
- Description: Whether to automatically close editor tabs when their corresponding files are deleted from disk.
@@ -402,23 +472,63 @@ Note: Dirty files (files with unsaved changes) will not be automatically closed
`boolean` values
-## Centered Layout
+## Diagnostics Max Severity
-- Description: Configuration for the centered layout mode.
-- Setting: `centered_layout`
-- Default:
+- Description: Which level to use to filter out diagnostics displayed in the editor
+- Setting: `diagnostics_max_severity`
+- Default: `null`
+
+**Options**
+
+1. Allow all diagnostics (default):
```json
-"centered_layout": {
- "left_padding": 0.2,
- "right_padding": 0.2,
+{
+ "diagnostics_max_severity": null
+}
+```
+
+2. Show only errors:
+
+```json
+{
+ "diagnostics_max_severity": "error"
+}
+```
+
+3. Show errors and warnings:
+
+```json
+{
+ "diagnostics_max_severity": "warning"
+}
+```
+
+4. Show errors, warnings, and information:
+
+```json
+{
+ "diagnostics_max_severity": "information"
+}
+```
+
+5. Show all including hints:
+
+```json
+{
+ "diagnostics_max_severity": "hint"
}
```
+## Disable AI
+
+- Description: Whether to disable all AI features in Zed
+- Setting: `disable_ai`
+- Default: `false`
+
**Options**
-The `left_padding` and `right_padding` options define the relative width of the
-left and right padding of the central pane from the workspace when the centered layout mode is activated. Valid values range is from `0` to `0.4`.
+`boolean` values
## Direnv Integration
@@ -435,6 +545,42 @@ There are two options to choose from:
1. `shell_hook`: Use the shell hook to load direnv. This relies on direnv to activate upon entering the directory. Supports POSIX shells and fish.
2. `direct`: Use `direnv export json` to load direnv. This will load direnv directly without relying on the shell hook and might cause some inconsistencies. This allows direnv to work with any shell.
+## Double Click In Multibuffer
+
+- Description: What to do when multibuffer is double clicked in some of its excerpts (parts of singleton buffers)
+- Setting: `double_click_in_multibuffer`
+- Default: `"select"`
+
+**Options**
+
+1. Behave as a regular buffer and select the whole word (default):
+
+```json
+{
+ "double_click_in_multibuffer": "select"
+}
+```
+
+2. Open the excerpt clicked as a new buffer in the new tab:
+
+```json
+{
+ "double_click_in_multibuffer": "open"
+}
+```
+
+For the case of "open", regular selection behavior can be achieved by holding `alt` when double clicking.
+
+## Drop Target Size
+
+- Description: Relative size of the drop target in the editor that will open dropped file as a split pane (0-0.5). For example, 0.25 means if you drop onto the top/bottom quarter of the pane a new vertical split will be used, if you drop onto the left/right quarter of the pane a new horizontal split will be used.
+- Setting: `drop_target_size`
+- Default: `0.2`
+
+**Options**
+
+`float` values between `0` and `0.5`
+
## Edit Predictions
- Description: Settings for edit predictions.
@@ -539,11 +685,11 @@ List of `string` values
- Setting: `selection_highlight`
- Default: `true`
-## LSP Highlight Debounce
+## Rounded Selection
-- Description: The debounce delay before querying highlights from the language server based on the current cursor location.
-- Setting: `lsp_highlight_debounce`
-- Default: `75`
+- Description: Whether the text selection should have rounded corners.
+- Setting: `rounded_selection`
+- Default: `true`
## Cursor Blink
@@ -587,6 +733,32 @@ List of `string` values
"cursor_shape": "hollow"
```
+## Gutter
+
+- Description: Settings for the editor gutter
+- Setting: `gutter`
+- Default:
+
+```json
+{
+ "gutter": {
+ "line_numbers": true,
+ "runnables": true,
+ "breakpoints": true,
+ "folds": true,
+ "min_line_number_digits": 4
+ }
+}
+```
+
+**Options**
+
+- `line_numbers`: Whether to show line numbers in the gutter
+- `runnables`: Whether to show runnable buttons in the gutter
+- `breakpoints`: Whether to show breakpoints in the gutter
+- `folds`: Whether to show fold buttons in the gutter
+- `min_line_number_digits`: Minimum number of characters to reserve space for in the gutter
+
## Hide Mouse
- Description: Determines when the mouse cursor should be hidden in an editor or input box.
@@ -1255,6 +1427,16 @@ or
Each option controls displaying of a particular toolbar element. If all elements are hidden, the editor toolbar is not displayed.
+## Use System Tabs
+
+- Description: Whether to allow windows to tab together based on the user’s tabbing preference (macOS only).
+- Setting: `use_system_window_tabs`
+- Default: `false`
+
+**Options**
+
+This setting enables integration with macOS’s native window tabbing feature. When set to `true`, Zed windows can be grouped together as tabs in a single macOS window, following the system-wide tabbing preferences set by the user (such as "Always", "In Full Screen", or "Never"). This setting is only available on macOS.
+
## Enable Language Server
- Description: Whether or not to use language servers to provide code intelligence.
@@ -1275,6 +1457,36 @@ Each option controls displaying of a particular toolbar element. If all elements
`boolean` values
+## Expand Excerpt Lines
+
+- Description: The default number of lines to expand excerpts in the multibuffer by
+- Setting: `expand_excerpt_lines`
+- Default: `5`
+
+**Options**
+
+Positive `integer` values
+
+## Excerpt Context Lines
+
+- Description: The number of lines of context to provide when showing excerpts in the multibuffer.
+- Setting: `excerpt_context_lines`
+- Default: `2`
+
+**Options**
+
+Positive `integer` value between 1 and 32. Values outside of this range will be clamped to this range.
+
+## Extend Comment On Newline
+
+- Description: Whether to start a new line with a comment when a previous line is a comment as well.
+- Setting: `extend_comment_on_newline`
+- Default: `true`
+
+**Options**
+
+`boolean` values
+
## Status Bar
- Description: Control various elements in the status bar. Note that some items in the status bar have their own settings set elsewhere.
@@ -1333,79 +1545,171 @@ While other options may be changed at a runtime and should be placed under `sett
}
```
-## LSP Highlight Debounce
+## Global LSP Settings
-- Description: The debounce delay in milliseconds before querying highlights from the language server based on the current cursor location.
-- Setting: `lsp_highlight_debounce`
-- Default: `75`
+- Description: Configuration for global LSP settings that apply to all language servers
+- Setting: `global_lsp_settings`
+- Default:
+
+```json
+{
+ "global_lsp_settings": {
+ "button": true
+ }
+}
+```
**Options**
-`integer` values representing milliseconds
+- `button`: Whether to show the LSP status button in the status bar
-## Format On Save
+## LSP Highlight Debounce
-- Description: Whether or not to perform a buffer format before saving.
-- Setting: `format_on_save`
-- Default: `on`
+- Description: The debounce delay in milliseconds before querying highlights from the language server based on the current cursor location.
+- Setting: `lsp_highlight_debounce`
+- Default: `75`
-**Options**
+## Global LSP Settings
-1. `on`, enables format on save obeying `formatter` setting:
+- Description: Common language server settings.
+- Setting: `global_lsp_settings`
+- Default:
```json
-{
- "format_on_save": "on"
+"global_lsp_settings": {
+ "button": true
}
```
-2. `off`, disables format on save:
+**Options**
+
+`integer` values representing milliseconds
+
+## Features
+
+- Description: Features that can be globally enabled or disabled
+- Setting: `features`
+- Default:
```json
{
- "format_on_save": "off"
+ "features": {
+ "edit_prediction_provider": "zed"
+ }
}
```
-## Formatter
+### Edit Prediction Provider
-- Description: How to perform a buffer format.
-- Setting: `formatter`
-- Default: `auto`
+- Description: Which edit prediction provider to use
+- Setting: `edit_prediction_provider`
+- Default: `"zed"`
**Options**
-1. To use the current language server, use `"language_server"`:
+1. Use Zeta as the edit prediction provider:
```json
{
- "formatter": "language_server"
+ "features": {
+ "edit_prediction_provider": "zed"
+ }
}
```
-2. Or to use an external command, use `"external"`. Specify the name of the formatting program to run, and an array of arguments to pass to the program. The buffer's text will be passed to the program on stdin, and the formatted output should be written to stdout. For example, the following command would strip trailing spaces using [`sed(1)`](https://linux.die.net/man/1/sed):
+2. Use Copilot as the edit prediction provider:
```json
{
- "formatter": {
- "external": {
- "command": "sed",
- "arguments": ["-e", "s/ *$//"]
- }
+ "features": {
+ "edit_prediction_provider": "copilot"
}
}
```
-3. External formatters may optionally include a `{buffer_path}` placeholder which at runtime will include the path of the buffer being formatted. Formatters operate by receiving file content via standard input, reformatting it and then outputting it to standard output and so normally don't know the filename of what they are formatting. Tools like Prettier support receiving the file path via a command line argument which can then used to impact formatting decisions.
-
-WARNING: `{buffer_path}` should not be used to direct your formatter to read from a filename. Your formatter should only read from standard input and should not read or write files directly.
+3. Use Supermaven as the edit prediction provider:
```json
- "formatter": {
- "external": {
- "command": "prettier",
- "arguments": ["--stdin-filepath", "{buffer_path}"]
- }
+{
+ "features": {
+ "edit_prediction_provider": "supermaven"
+ }
+}
+```
+
+4. Turn off edit predictions across all providers
+
+```json
+{
+ "features": {
+ "edit_prediction_provider": "none"
+ }
+}
+```
+
+## Format On Save
+
+- Description: Whether or not to perform a buffer format before saving.
+- Setting: `format_on_save`
+- Default: `on`
+
+**Options**
+
+1. `on`, enables format on save obeying `formatter` setting:
+
+```json
+{
+ "format_on_save": "on"
+}
+```
+
+2. `off`, disables format on save:
+
+```json
+{
+ "format_on_save": "off"
+}
+```
+
+## Formatter
+
+- Description: How to perform a buffer format.
+- Setting: `formatter`
+- Default: `auto`
+
+**Options**
+
+1. To use the current language server, use `"language_server"`:
+
+```json
+{
+ "formatter": "language_server"
+}
+```
+
+2. Or to use an external command, use `"external"`. Specify the name of the formatting program to run, and an array of arguments to pass to the program. The buffer's text will be passed to the program on stdin, and the formatted output should be written to stdout. For example, the following command would strip trailing spaces using [`sed(1)`](https://linux.die.net/man/1/sed):
+
+```json
+{
+ "formatter": {
+ "external": {
+ "command": "sed",
+ "arguments": ["-e", "s/ *$//"]
+ }
+ }
+}
+```
+
+3. External formatters may optionally include a `{buffer_path}` placeholder which at runtime will include the path of the buffer being formatted. Formatters operate by receiving file content via standard input, reformatting it and then outputting it to standard output and so normally don't know the filename of what they are formatting. Tools like Prettier support receiving the file path via a command line argument which can then used to impact formatting decisions.
+
+WARNING: `{buffer_path}` should not be used to direct your formatter to read from a filename. Your formatter should only read from standard input and should not read or write files directly.
+
+```json
+ "formatter": {
+ "external": {
+ "command": "prettier",
+ "arguments": ["--stdin-filepath", "{buffer_path}"]
+ }
}
```
@@ -1886,6 +2190,50 @@ Example:
}
```
+## Go to Definition Fallback
+
+- Description: What to do when the {#action editor::GoToDefinition} action fails to find a definition
+- Setting: `go_to_definition_fallback`
+- Default: `"find_all_references"`
+
+**Options**
+
+1. Do nothing:
+
+```json
+{
+ "go_to_definition_fallback": "none"
+}
+```
+
+2. Find references for the same symbol (default):
+
+```json
+{
+ "go_to_definition_fallback": "find_all_references"
+}
+```
+
+## Hard Tabs
+
+- Description: Whether to indent lines using tab characters or multiple spaces.
+- Setting: `hard_tabs`
+- Default: `false`
+
+**Options**
+
+`boolean` values
+
+## Helix Mode
+
+- Description: Whether or not to enable Helix mode. Enabling `helix_mode` also enables `vim_mode`. See the [Helix documentation](./helix.md) for more details.
+- Setting: `helix_mode`
+- Default: `false`
+
+**Options**
+
+`boolean` values
+
## Indent Guides
- Description: Configuration related to indent guides. Indent guides can be configured separately for each language.
@@ -1955,40 +2303,6 @@ Example:
}
```
-## Hard Tabs
-
-- Description: Whether to indent lines using tab characters or multiple spaces.
-- Setting: `hard_tabs`
-- Default: `false`
-
-**Options**
-
-`boolean` values
-
-## Multi Cursor Modifier
-
-- Description: Determines the modifier to be used to add multiple cursors with the mouse. The open hover link mouse gestures will adapt such that it do not conflict with the multicursor modifier.
-- Setting: `multi_cursor_modifier`
-- Default: `alt`
-
-**Options**
-
-1. Maps to `Alt` on Linux and Windows and to `Option` on MacOS:
-
-```json
-{
- "multi_cursor_modifier": "alt"
-}
-```
-
-2. Maps `Control` on Linux and Windows and to `Command` on MacOS:
-
-```json
-{
- "multi_cursor_modifier": "cmd_or_ctrl" // alias: "cmd", "ctrl"
-}
-```
-
## Hover Popover Enabled
- Description: Whether or not to show the informational hover box when moving the mouse over symbols in the editor.
@@ -2069,7 +2383,7 @@ Example:
**Options**
-Run the `icon theme selector: toggle` action in the command palette to see a current list of valid icon themes names.
+Run the {#action icon_theme_selector::Toggle} action in the command palette to see a current list of valid icon themes names.
### Light
@@ -2079,7 +2393,51 @@ Run the `icon theme selector: toggle` action in the command palette to see a cur
**Options**
-Run the `icon theme selector: toggle` action in the command palette to see a current list of valid icon themes names.
+Run the {#action icon_theme_selector::Toggle} action in the command palette to see a current list of valid icon themes names.
+
+## Image Viewer
+
+- Description: Settings for image viewer functionality
+- Setting: `image_viewer`
+- Default:
+
+```json
+{
+ "image_viewer": {
+ "unit": "binary"
+ }
+}
+```
+
+**Options**
+
+### Unit
+
+- Description: The unit for image file sizes
+- Setting: `unit`
+- Default: `"binary"`
+
+**Options**
+
+1. Use binary units (KiB, MiB):
+
+```json
+{
+ "image_viewer": {
+ "unit": "binary"
+ }
+}
+```
+
+2. Use decimal units (KB, MB):
+
+```json
+{
+ "image_viewer": {
+ "unit": "decimal"
+ }
+}
+```
## Inlay hints
@@ -2181,6 +2539,24 @@ Unspecified values have a `false` value, hints won't be toggled if all the modif
}
```
+## JSX Tag Auto Close
+
+- Description: Whether to automatically close JSX tags
+- Setting: `jsx_tag_auto_close`
+- Default:
+
+```json
+{
+ "jsx_tag_auto_close": {
+ "enabled": true
+ }
+}
+```
+
+**Options**
+
+- `enabled`: Whether to enable automatic JSX tag closing
+
## Languages
- Description: Configuration for specific languages.
@@ -2222,139 +2598,544 @@ The following settings can be overridden for each specific language:
These values take in the same options as the root-level settings with the same name.
-## Network Proxy
+## Language Models
+
+- Description: Configuration for language model providers
+- Setting: `language_models`
+- Default:
+
+```json
+{
+ "language_models": {
+ "anthropic": {
+ "api_url": "https://api.anthropic.com"
+ },
+ "google": {
+ "api_url": "https://generativelanguage.googleapis.com"
+ },
+ "ollama": {
+ "api_url": "http://localhost:11434"
+ },
+ "openai": {
+ "api_url": "https://api.openai.com/v1"
+ }
+ }
+}
+```
+
+**Options**
+
+Configuration for various AI model providers including API URLs and authentication settings.
+
+## Line Indicator Format
+
+- Description: Format for line indicator in the status bar
+- Setting: `line_indicator_format`
+- Default: `"short"`
+
+**Options**
+
+1. Short format:
+
+```json
+{
+ "line_indicator_format": "short"
+}
+```
+
+2. Long format:
+
+```json
+{
+ "line_indicator_format": "long"
+}
+```
+
+## Linked Edits
+
+- Description: Whether to perform linked edits of associated ranges, if the language server supports it. For example, when editing opening `<html>` tag, the contents of the closing `</html>` tag will be edited as well.
+- Setting: `linked_edits`
+- Default: `true`
+
+**Options**
+
+`boolean` values
+
+## LSP Document Colors
+
+- Description: Whether to show document color information from the language server
+- Setting: `lsp_document_colors`
+- Default: `true`
+
+**Options**
+
+`boolean` values
+
+## Max Tabs
+
+- Description: Maximum number of tabs to show in the tab bar
+- Setting: `max_tabs`
+- Default: `null`
+
+**Options**
+
+Positive `integer` values or `null` for unlimited tabs
+
+## Middle Click Paste (Linux only)
+
+- Description: Enable middle-click paste on Linux
+- Setting: `middle_click_paste`
+- Default: `true`
+
+**Options**
+
+`boolean` values
+
+## Multi Cursor Modifier
+
+- Description: Determines the modifier to be used to add multiple cursors with the mouse. The open hover link mouse gestures will adapt such that it do not conflict with the multicursor modifier.
+- Setting: `multi_cursor_modifier`
+- Default: `alt`
+
+**Options**
+
+1. Maps to `Alt` on Linux and Windows and to `Option` on MacOS:
+
+```json
+{
+ "multi_cursor_modifier": "alt"
+}
+```
+
+2. Maps `Control` on Linux and Windows and to `Command` on MacOS:
+
+```json
+{
+ "multi_cursor_modifier": "cmd_or_ctrl" // alias: "cmd", "ctrl"
+}
+```
+
+## Node
+
+- Description: Configuration for Node.js integration
+- Setting: `node`
+- Default:
+
+```json
+{
+ "node": {
+ "ignore_system_version": false,
+ "path": null,
+ "npm_path": null
+ }
+}
+```
+
+**Options**
+
+- `ignore_system_version`: Whether to ignore the system Node.js version
+- `path`: Custom path to Node.js binary
+- `npm_path`: Custom path to npm binary
+
+## Network Proxy
+
+- Description: Configure a network proxy for Zed.
+- Setting: `proxy`
+- Default: `null`
+
+**Options**
+
+The proxy setting must contain a URL to the proxy.
+
+The following URI schemes are supported:
+
+- `http`
+- `https`
+- `socks4` - SOCKS4 proxy with local DNS
+- `socks4a` - SOCKS4 proxy with remote DNS
+- `socks5` - SOCKS5 proxy with local DNS
+- `socks5h` - SOCKS5 proxy with remote DNS
+
+`http` will be used when no scheme is specified.
+
+By default no proxy will be used, or Zed will attempt to retrieve proxy settings from environment variables, such as `http_proxy`, `HTTP_PROXY`, `https_proxy`, `HTTPS_PROXY`, `all_proxy`, `ALL_PROXY`, `no_proxy` and `NO_PROXY`.
+
+For example, to set an `http` proxy, add the following to your settings:
+
+```json
+{
+ "proxy": "http://127.0.0.1:10809"
+}
+```
+
+Or to set a `socks5` proxy:
+
+```json
+{
+ "proxy": "socks5h://localhost:10808"
+}
+```
+
+If you wish to exclude certain hosts from using the proxy, set the `NO_PROXY` environment variable. This accepts a comma-separated list of hostnames, host suffixes, IPv4/IPv6 addresses or blocks that should not use the proxy. For example if your environment included `NO_PROXY="google.com, 192.168.1.0/24"` all hosts in `192.168.1.*`, `google.com` and `*.google.com` would bypass the proxy. See [reqwest NoProxy docs](https://docs.rs/reqwest/latest/reqwest/struct.NoProxy.html#method.from_string) for more.
+
+## On Last Window Closed
+
+- Description: What to do when the last window is closed
+- Setting: `on_last_window_closed`
+- Default: `"platform_default"`
+
+**Options**
+
+1. Use platform default behavior:
+
+```json
+{
+ "on_last_window_closed": "platform_default"
+}
+```
+
+2. Always quit the application:
+
+```json
+{
+ "on_last_window_closed": "quit_app"
+}
+```
+
+## Profiles
+
+- Description: Configuration profiles that can be applied on top of existing settings
+- Setting: `profiles`
+- Default: `{}`
+
+**Options**
+
+Configuration object for defining settings profiles. Example:
+
+```json
+{
+ "profiles": {
+ "presentation": {
+ "buffer_font_size": 20,
+ "ui_font_size": 18,
+ "theme": "One Light"
+ }
+ }
+}
+```
+
+## Preview tabs
+
+- Description:
+ Preview tabs allow you to open files in preview mode, where they close automatically when you switch to another file unless you explicitly pin them. This is useful for quickly viewing files without cluttering your workspace. Preview tabs display their file names in italics. \
+ There are several ways to convert a preview tab into a regular tab:
+
+ - Double-clicking on the file
+ - Double-clicking on the tab header
+ - Using the {#action project_panel::OpenPermanent} action
+ - Editing the file
+ - Dragging the file to a different pane
+
+- Setting: `preview_tabs`
+- Default:
+
+```json
+"preview_tabs": {
+ "enabled": true,
+ "enable_preview_from_file_finder": false,
+ "enable_preview_from_code_navigation": false,
+}
+```
+
+### Enable preview from file finder
+
+- Description: Determines whether to open files in preview mode when selected from the file finder.
+- Setting: `enable_preview_from_file_finder`
+- Default: `false`
+
+**Options**
+
+`boolean` values
+
+### Enable preview from code navigation
+
+- Description: Determines whether a preview tab gets replaced when code navigation is used to navigate away from the tab.
+- Setting: `enable_preview_from_code_navigation`
+- Default: `false`
+
+**Options**
+
+`boolean` values
+
+## File Finder
+
+### File Icons
+
+- Description: Whether to show file icons in the file finder.
+- Setting: `file_icons`
+- Default: `true`
+
+### Modal Max Width
+
+- Description: Max-width of the file finder modal. It can take one of these values: `small`, `medium`, `large`, `xlarge`, and `full`.
+- Setting: `modal_max_width`
+- Default: `small`
+
+### Skip Focus For Active In Search
+
+- Description: Determines whether the file finder should skip focus for the active file in search results.
+- Setting: `skip_focus_for_active_in_search`
+- Default: `true`
+
+## Pane Split Direction Horizontal
+
+- Description: The direction that you want to split panes horizontally
+- Setting: `pane_split_direction_horizontal`
+- Default: `"up"`
+
+**Options**
+
+1. Split upward:
+
+```json
+{
+ "pane_split_direction_horizontal": "up"
+}
+```
+
+2. Split downward:
+
+```json
+{
+ "pane_split_direction_horizontal": "down"
+}
+```
+
+## Pane Split Direction Vertical
+
+- Description: The direction that you want to split panes vertically
+- Setting: `pane_split_direction_vertical`
+- Default: `"left"`
+
+**Options**
+
+1. Split to the left:
+
+```json
+{
+ "pane_split_direction_vertical": "left"
+}
+```
+
+2. Split to the right:
+
+```json
+{
+ "pane_split_direction_vertical": "right"
+}
+```
+
+## Preferred Line Length
+
+- Description: The column at which to soft-wrap lines, for buffers where soft-wrap is enabled.
+- Setting: `preferred_line_length`
+- Default: `80`
+
+**Options**
+
+`integer` values
+
+## Private Files
+
+- Description: Globs to match against file paths to determine if a file is private
+- Setting: `private_files`
+- Default: `["**/.env*", "**/*.pem", "**/*.key", "**/*.cert", "**/*.crt", "**/secrets.yml"]`
+
+**Options**
+
+List of `string` glob patterns
+
+## Projects Online By Default
+
+- Description: Whether or not to show the online projects view by default.
+- Setting: `projects_online_by_default`
+- Default: `true`
+
+**Options**
+
+`boolean` values
+
+## Read SSH Config
+
+- Description: Whether to read SSH configuration files
+- Setting: `read_ssh_config`
+- Default: `true`
+
+**Options**
+
+`boolean` values
+
+## Redact Private Values
+
+- Description: Hide the values of variables from visual display in private files
+- Setting: `redact_private_values`
+- Default: `false`
+
+**Options**
+
+`boolean` values
+
+## Relative Line Numbers
+
+- Description: Whether to show relative line numbers in the gutter
+- Setting: `relative_line_numbers`
+- Default: `false`
+
+**Options**
+
+`boolean` values
+
+## Remove Trailing Whitespace On Save
+
+- Description: Whether or not to remove any trailing whitespace from lines of a buffer before saving it.
+- Setting: `remove_trailing_whitespace_on_save`
+- Default: `true`
+
+**Options**
+
+`boolean` values
+
+## Resize All Panels In Dock
-- Description: Configure a network proxy for Zed.
-- Setting: `proxy`
-- Default: `null`
+- Description: Whether to resize all the panels in a dock when resizing the dock. Can be a combination of "left", "right" and "bottom".
+- Setting: `resize_all_panels_in_dock`
+- Default: `["left"]`
**Options**
-The proxy setting must contain a URL to the proxy.
+List of strings containing any combination of:
-The following URI schemes are supported:
+- `"left"`: Resize left dock panels together
+- `"right"`: Resize right dock panels together
+- `"bottom"`: Resize bottom dock panels together
-- `http`
-- `https`
-- `socks4` - SOCKS4 proxy with local DNS
-- `socks4a` - SOCKS4 proxy with remote DNS
-- `socks5` - SOCKS5 proxy with local DNS
-- `socks5h` - SOCKS5 proxy with remote DNS
+## Restore on File Reopen
-`http` will be used when no scheme is specified.
+- Description: Whether to attempt to restore previous file's state when opening it again. The state is stored per pane.
+- Setting: `restore_on_file_reopen`
+- Default: `true`
-By default no proxy will be used, or Zed will attempt to retrieve proxy settings from environment variables, such as `http_proxy`, `HTTP_PROXY`, `https_proxy`, `HTTPS_PROXY`, `all_proxy`, `ALL_PROXY`, `no_proxy` and `NO_PROXY`.
+**Options**
-For example, to set an `http` proxy, add the following to your settings:
+`boolean` values
+
+## Restore on Startup
+
+- Description: Controls session restoration on startup.
+- Setting: `restore_on_startup`
+- Default: `last_session`
+
+**Options**
+
+1. Restore all workspaces that were open when quitting Zed:
```json
{
- "proxy": "http://127.0.0.1:10809"
+ "restore_on_startup": "last_session"
}
```
-Or to set a `socks5` proxy:
+2. Restore the workspace that was closed last:
```json
{
- "proxy": "socks5h://localhost:10808"
+ "restore_on_startup": "last_workspace"
}
```
-If you wish to exclude certain hosts from using the proxy, set the `NO_PROXY` environment variable. This accepts a comma-separated list of hostnames, host suffixes, IPv4/IPv6 addresses or blocks that should not use the proxy. For example if your environment included `NO_PROXY="google.com, 192.168.1.0/24"` all hosts in `192.168.1.*`, `google.com` and `*.google.com` would bypass the proxy. See [reqwest NoProxy docs](https://docs.rs/reqwest/latest/reqwest/struct.NoProxy.html#method.from_string) for more.
-
-## Preview tabs
-
-- Description:
- Preview tabs allow you to open files in preview mode, where they close automatically when you switch to another file unless you explicitly pin them. This is useful for quickly viewing files without cluttering your workspace. Preview tabs display their file names in italics. \
- There are several ways to convert a preview tab into a regular tab:
-
- - Double-clicking on the file
- - Double-clicking on the tab header
- - Using the `project_panel::OpenPermanent` action
- - Editing the file
- - Dragging the file to a different pane
-
-- Setting: `preview_tabs`
-- Default:
+3. Always start with an empty editor:
```json
-"preview_tabs": {
- "enabled": true,
- "enable_preview_from_file_finder": false,
- "enable_preview_from_code_navigation": false,
+{
+ "restore_on_startup": "none"
}
```
-### Enable preview from file finder
+## Scroll Beyond Last Line
-- Description: Determines whether to open files in preview mode when selected from the file finder.
-- Setting: `enable_preview_from_file_finder`
-- Default: `false`
+- Description: Whether the editor will scroll beyond the last line
+- Setting: `scroll_beyond_last_line`
+- Default: `"one_page"`
**Options**
-`boolean` values
+1. Scroll one page beyond the last line by one page:
-### Enable preview from code navigation
+```json
+{
+ "scroll_beyond_last_line": "one_page"
+}
+```
-- Description: Determines whether a preview tab gets replaced when code navigation is used to navigate away from the tab.
-- Setting: `enable_preview_from_code_navigation`
-- Default: `false`
+2. The editor will scroll beyond the last line by the same amount of lines as `vertical_scroll_margin`:
-**Options**
+```json
+{
+ "scroll_beyond_last_line": "vertical_scroll_margin"
+}
+```
-`boolean` values
+3. The editor will not scroll beyond the last line:
-## File Finder
+```json
+{
+ "scroll_beyond_last_line": "off"
+}
+```
-### File Icons
+**Options**
-- Description: Whether to show file icons in the file finder.
-- Setting: `file_icons`
-- Default: `true`
+`boolean` values
-### Modal Max Width
+## Scroll Sensitivity
-- Description: Max-width of the file finder modal. It can take one of these values: `small`, `medium`, `large`, `xlarge`, and `full`.
-- Setting: `modal_max_width`
-- Default: `small`
+- Description: Scroll sensitivity multiplier. This multiplier is applied to both the horizontal and vertical delta values while scrolling.
+- Setting: `scroll_sensitivity`
+- Default: `1.0`
-### Skip Focus For Active In Search
+**Options**
-- Description: Determines whether the file finder should skip focus for the active file in search results.
-- Setting: `skip_focus_for_active_in_search`
-- Default: `true`
+Positive `float` values
-## Preferred Line Length
+### Fast Scroll Sensitivity
-- Description: The column at which to soft-wrap lines, for buffers where soft-wrap is enabled.
-- Setting: `preferred_line_length`
-- Default: `80`
+- Description: Scroll sensitivity multiplier for fast scrolling. This multiplier is applied to both the horizontal and vertical delta values while scrolling. Fast scrolling happens when a user holds the alt or option key while scrolling.
+- Setting: `fast_scroll_sensitivity`
+- Default: `4.0`
**Options**
-`integer` values
+Positive `float` values
-## Projects Online By Default
+### Horizontal Scroll Margin
-- Description: Whether or not to show the online projects view by default.
-- Setting: `projects_online_by_default`
-- Default: `true`
+- Description: The number of characters to keep on either side when scrolling with the mouse
+- Setting: `horizontal_scroll_margin`
+- Default: `5`
**Options**
-`boolean` values
+Non-negative `integer` values
-## Remove Trailing Whitespace On Save
+### Vertical Scroll Margin
-- Description: Whether or not to remove any trailing whitespace from lines of a buffer before saving it.
-- Setting: `remove_trailing_whitespace_on_save`
-- Default: `true`
+- Description: The number of lines to keep above/below the cursor when scrolling with the keyboard
+- Setting: `vertical_scroll_margin`
+- Default: `3`
**Options**
-`boolean` values
+Non-negative `integer` values
## Search
@@ -78,11 +78,10 @@ While configuration fields are debug adapter-dependent, most adapters support th
// The debug adapter that Zed should use to debug the program
"adapter": "Example adapter name",
// Request:
- // - launch: Zed will launch the program if specified or shows a debug terminal with the right configuration
- // - attach: Zed will attach to a running program to debug it or when the process_id is not specified we will show a process picker (only supported for node currently)
+ // - launch: Zed will launch the program if specified, or show a debug terminal with the right configuration
+ // - attach: Zed will attach to a running program to debug it, or when the process_id is not specified, will show a process picker (only supported for node currently)
"request": "launch",
- // program: The program that you want to debug
- // This field supports path resolution with ~ or . symbols
+ // The program to debug. This field supports path resolution with ~ or . symbols.
"program": "path_to_program",
// cwd: defaults to the current working directory of your project ($ZED_WORKTREE_ROOT)
"cwd": "$ZED_WORKTREE_ROOT"
@@ -148,6 +147,8 @@ The debug adapter will then stop whenever an exception of a given kind occurs. W
## Settings
+The settings for the debugger are grouped under the `debugger` key in `settings.json`:
+
- `dock`: Determines the position of the debug panel in the UI.
- `stepping_granularity`: Determines the stepping granularity.
- `save_breakpoints`: Whether the breakpoints should be reused across Zed sessions.
@@ -0,0 +1,116 @@
+# Zed Development: Glossary
+
+These are some terms and structures frequently used throughout the zed codebase.
+
+This is a best effort list and a work in progress.
+
+<!--
+TBD: Glossary Improvement
+
+Questions:
+
+- Can we generate this list from doc comments throughout zed?
+- We should have a section that shows the various UI parts and their names. (Can't do that in the channel.)
+-->
+
+## Naming conventions
+
+These are generally true for the whole codebase. Note that Name can be anything
+here. An example would be `AnyElement` and `LspStore`.
+
+- `AnyName`: A type erased version of _name_. Think `Box<dyn NameTrait>`.
+- `NameStore`: A wrapper type which abstracts over whether operations are running locally or on a remote.
+
+## GPUI
+
+### State menagement
+
+- `App`: A singleton which holds the full application state including all the entities. Crucially: `App` is not `Send`, which means that `App` only exists on the thread that created it (which is the main/UI thread, usually). Thus, if you see a `&mut App`, know that you're on UI thread.
+- `Context`: A wrapper around the `App` struct with specialized behavior for a specific `Entity`. Think of it as `(&mut App, Entity<V>)`. The specialized behavior is surfaced in the API surface of `Context`. E.g., `App::spawn` takes an `AsyncFnOnce(AsyncApp) -> Ret`, whereas `Context::spawn` takes an `AsyncFnOnce(WeakEntity<V>, AsyncApp) -> Ret`.
+- `AsyncApp`: An owned version of `App` for use in async contexts. This type is _still_ not `Send` (so `AsyncApp` = you're on the main thread) and any use of it may be fallible (to account for the fact that the `App` might've been terminated by the time this closure runs).
+ The convenience of `AsyncApp` lies in the fact that you usually interface with `App` via `&mut App`, which would be inconvenient to use with async closures; `AsyncApp` is owned, so you can use it in async closures with no sweat.
+- `AppContext` A trait which abstracts over `App`, `AsyncApp` & `Context` and their Test versions.
+- `Task`: A future running or scheduled to run on the background or foreground
+ executor. In contradiction to regular Futures Tasks do not need `.await` to start running. You do need to await them to get the result of the task.
+- `Executor`: Used to spawn tasks that run either on the foreground or background thread. Try to run the tasks on the background thread.
+ - `BackgroundExecutor`: A threadpool running `Task`s.
+ - `ForegroundExecutor`: The main thread running `Task`s.
+- `Entity`: A strong, well-typed reference to a struct which is managed by gpui. Effectively a pointer/map key into the `App::EntityMap`.
+- `WeakEntity`: A runtime checked reference to an `Entity` which may no longer exist. Similar to [`std::rc::Weak`](https://doc.rust-lang.org/std/rc/struct.Weak.html).
+- `Global`: A singleton type which has only one value, that is stored in the `App`.
+- `Event`: A datatype which can be send by an `Entity` to subscribers
+- `Action`: An event that represents a user's keyboard input that can be handled by listeners
+ Example: `file finder: toggle`
+- `Observing`: reacting entities notifying they've changed
+- `Subscription`: An event handler that is used to react to the changes of state in the application.
+ 1. Emitted event handling
+ 2. Observing `{new,release,on notify}` of an entity
+
+### UI
+
+- `View`: An `Entity` which can produce an `Element` through its implementation of `Render`.
+- `Element`: A type that can be laid out and painted to the screen.
+- `element expression`: An expression that builds an element tree, example:
+
+```rust
+h_flex()
+ .id(text[i])
+ .relative()
+ .when(selected, |this| {
+ this.child(
+ div()
+ .h_4()
+ .absolute()
+ etc etc
+```
+
+- `Component`: A builder which can be rendered turning it into an `Element`.
+- `Dispatch tree`: TODO
+- `Focus`: The place where keystrokes are handled first
+- `Focus tree`: Path from the place thats the current focus to the UI Root. Example <img> TODO
+
+## Zed UI
+
+- `Window`: A struct in zed representing a zed window in your desktop environment (see image below). There can be multiple if you have multiple zed instances open. Mostly passed around for rendering.
+- `Modal`: A UI element that floats on top of the rest of the UI
+- `Picker`: A struct representing a list of items in floating on top of the UI (Modal). You can select an item and confirm. What happens on select or confirm is determined by the picker's delegate. (The 'Model' in the image below is a picker.)
+- `PickerDelegate`: A trait used to specialize behavior for a `Picker`. The `Picker` stores the `PickerDelegate` in the field delegate.
+- `Center`: The middle of the zed window, the center is split into multiple `Pane`s. In the codebase this is a field on the `Workspace` struct. (see image below).
+- `Pane`: An area in the `Center` where we can place items, such as an editor, multi-buffer or terminal (see image below).
+- `Panel`: An `Entity` implementing the `Panel` trait. These can be placed in a `Dock`. In the image below we see the: `ProjectPanel` in the left dock, the `DebugPanel` in the bottom dock, and `AgentPanel` in the right dock. Note `Editor` does not implement `Panel` and hence is not a `Panel`.
+- `Dock`: A UI element similar to a `Pane` which can be opened and hidden. There can be up to 3 docks open at a time, left right and below the center. A dock contains one or more `Panel`s not `Pane`s. (see image).
+ <img width="1921" height="1080" alt="image" src="https://github.com/user-attachments/assets/2cb1170e-2850-450d-89bb-73622b5d07b2" />
+
+- `Project`: One or more `Worktree`s
+- `Worktree`: Represents either local or remote files.
+ <img width="552" height="1118" alt="image" src="https://github.com/user-attachments/assets/da5c58e4-b02e-4038-9736-27e3509fdbfa" />
+
+- [Multibuffer](https://zed.dev/docs/multibuffers): A list of Editors, a multi-buffer allows editing multiple files simultaneously. A multi-buffer opens when an operation in Zed returns multiple locations, examples: _search_ or _go to definition_. See project search in the image below.
+
+<img width="800" height="886" alt="image" src="https://github.com/user-attachments/assets/d59dcecd-8ab6-4172-8fb6-b1fc3c3eaf9d" />
+
+## Editor
+
+- `Editor`: _The_ text editor, nearly everything in zed is an `Editor`, even single line inputs. Each pane in the image above contains one or more `Editor` instances.
+- `Workspace`: The root of the window
+- `Entry`: A file, dir, pending dir or unloaded dir.
+- `Buffer`: The in-memory representation of a 'file' together with relevant data such as syntax trees, git status and diagnostics.
+- `pending selection`: You have mouse down and you're dragging but you have not yet released.
+
+## Collab
+
+- `Collab session`: Multiple users working in a shared `Project`
+- `Upstream client`: The zed client which has shared their workspace
+- `Downstream client`: The zed client joining a shared workspace
+
+## Debugger
+
+- `DapStore`: Is an entity that manages debugger sessions
+- `debugger::Session`: Is an entity that manages the lifecycle of a debug session and communication with DAPS
+- `BreakpointStore`: Is an entity that manages breakpoints states in local and remote instances of Zed
+- `DebugSession`: Manages a debug session's UI and running state
+- `RunningState`: Directily manages all the views of a debug session
+- `VariableList`: The variable and watch list view of a debug session
+- `Console`: TODO
+- `Terminal`: TODO
+- `BreakpointList`: TODO
@@ -10,7 +10,7 @@ Clone down the [Zed repository](https://github.com/zed-industries/zed).
- Install [Xcode](https://apps.apple.com/us/app/xcode/id497799835?mt=12) from the macOS App Store, or from the [Apple Developer](https://developer.apple.com/download/all/) website. Note this requires a developer account.
-> Ensure you launch Xcode after installing, and install the macOS components, which is the default option.
+> Ensure you launch Xcode after installing, and install the macOS components, which is the default option. If you are on macOS 26 (Tahoe) you will need to use `--features gpui/runtime_shaders` or add the feature in the root `Cargo.toml`
- Install [Xcode command line tools](https://developer.apple.com/xcode/resources/)
@@ -33,7 +33,7 @@ Clone down the [Zed repository](https://github.com/zed-industries/zed).
### Backend Dependencies (optional) {#backend-dependencies}
-If you are looking to develop Zed collaboration features using a local collabortation server, please see: [Local Collaboration](./local-collaboration.md) docs.
+If you are looking to develop Zed collaboration features using a local collaboration server, please see: [Local Collaboration](./local-collaboration.md) docs.
## Building Zed from Source
@@ -69,6 +69,8 @@ xcrun: error: unable to find utility "metal", not a developer tool or in PATH
Try `sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer`
+If you're on macOS 26, try `xcodebuild -downloadComponent MetalToolchain`
+
### Cargo errors claiming that a dependency is using unstable features
Try `cargo clean` and `cargo build`.
@@ -1,49 +1,98 @@
# Zed Releases
-Zed currently maintains two public releases for macOS:
+Read about Zed's release channels [here](https://zed.dev/faq#what-are-the-release-channels).
-- [Stable](https://zed.dev/download): This is the primary version that people download and use.
-- [Preview](https://zed.dev/releases/preview): which receives updates a week ahead of Stable for early adopters.
+## Wednesday release process
-Typically we cut a new minor release every Wednesday. The current Preview becomes Stable, and the new Preview contains everything on main up until that point.
+You will need write access to the Zed repository to do this.
-If bugs are found and fixed during the week, they may be cherry-picked into the release branches and so new patch versions for preview and stable can become available throughout the week.
+Credentials for various services used in this process can be found in 1Password.
-## Wednesday release process
+---
-You will need write access to the Zed repository to do this:
+1. Checkout `main` and ensure your working copy is clean.
+
+1. Run `git fetch && git pull` to ensure you have the latest commits locally.
+
+1. Run `git fetch --tags --force` to forcibly ensure your local tags are in sync with the remote.
+
+1. Run `./script/get-stable-channel-release-notes`.
+
+ - Follow the instructions at the end of the script and aggregate the release notes into one structure.
+
+1. Run `./script/bump-zed-minor-versions`.
+
+ - Push the tags and branches as instructed.
+
+1. Run `./script/get-preview-channel-changes`.
+
+ - Take the script's output and build release notes by organizing each release note line into a category.
+ - Use a prior release for the initial outline.
+ - Make sure to append the `Credit` line, if present, to the end of the release note line.
+
+1. Once release drafts are up on [GitHub Releases](https://github.com/zed-industries/zed/releases), paste both preview and stable release notes into each and **save**.
+
+ - **Do not publish the drafts, yet.**
+
+1. Check the release assets.
+
+ - Ensure the stable and preview release jobs have finished without error.
+ - Ensure each draft has the proper number of assets—releases currently have 10 assets each.
+ - Download the artifacts for each release draft and test that you can run them locally.
-- Checkout `main` and ensure your working copy is clean.
-- Run `./script/bump-zed-minor-versions` and push the tags
- and branches as instructed.
-- Wait for the builds to appear on [the Releases tab on GitHub](https://github.com/zed-industries/zed/releases) (typically takes around 30 minutes)
-- While you're waiting:
- - Start creating the new release notes for preview. You can start with the output of `./script/get-preview-channel-changes`.
- - Start drafting the release tweets.
-- Once the builds are ready:
- - Copy the release notes from the previous Preview release(s) to the current Stable release.
- - Download the artifacts for each release and test that you can run them locally.
- - Publish the releases on GitHub.
- - Tweet the tweets (Credentials are in 1Password).
+1. Publish stable / preview drafts, one at a time.
+
+ - Use [Vercel](https://vercel.com/zed-industries/zed-dev) to check the progress of the website rebuild.
+ The release will be public once the rebuild has completed.
+
+1. Publish the release email that has been sent to [Kit](https://kit.com).
+
+ - Make sure to double-check that the email is correct before publishing.
+ - We sometimes correct things here and there that didn't translate from GitHub's renderer to Kit's.
+
+1. Build social media posts based on the popular items in stable.
+ - You can use the [prior week's post chain](https://zed.dev/channel/tweets-23331) as your outline.
+ - Stage the copy and assets using [Buffer](https://buffer.com), for both X and BlueSky.
+ - Publish both, one at a time, ensuring both are posted to each respective platform.
## Patch release process
-If your PR fixes a panic or a crash, you should cherry-pick it to the current stable and preview branches. If your PR fixes a regression in recently released code, you should cherry-pick it to preview.
+If your PR fixes a panic or a crash, you should cherry-pick it to the current stable and preview branches.
+If your PR fixes a regression in recently released code, you should cherry-pick it to preview.
You will need write access to the Zed repository to do this:
-- Send a PR containing your change to `main` as normal.
-- Leave a comment on the PR `/cherry-pick v0.XXX.x`. Once your PR is merged, the GitHub bot will send a PR to the branch.
- - In case of a merge conflict, you will have to cherry-pick manually and push the change to the `v0.XXX.x` branch.
-- After the commits are cherry-picked onto the branch, run `./script/trigger-release {preview|stable}`. This will bump the version numbers, create a new release tag, and kick off a release build.
- - This can also be run from the [GitHub Actions UI](https://github.com/zed-industries/zed/actions/workflows/bump_patch_version.yml):
- 
-- Wait for the builds to appear on [the Releases tab on GitHub](https://github.com/zed-industries/zed/releases) (typically takes around 30 minutes)
-- Proof-read and edit the release notes as needed.
-- Download the artifacts for each release and test that you can run them locally.
-- Publish the release.
+---
+
+1. Send a PR containing your change to `main` as normal.
+
+1. Once it is merged, cherry-pick the commit locally to either of the release branches (`v0.XXX.x`).
+
+ - In some cases, you may have to handle a merge conflict.
+ More often than not, this will happen when cherry-picking to stable, as the stable branch is more "stale" than the preview branch.
+
+1. After the commit is cherry-picked, run `./script/trigger-release {preview|stable}`.
+ This will bump the version numbers, create a new release tag, and kick off a release build.
+
+ - This can also be run from the [GitHub Actions UI](https://github.com/zed-industries/zed/actions/workflows/bump_patch_version.yml):
+ 
+
+1. Once release drafts are up on [GitHub Releases](https://github.com/zed-industries/zed/releases), proofread and edit the release notes as needed and **save**.
+
+ - **Do not publish the drafts, yet.**
+
+1. Check the release assets.
+
+ - Ensure the stable / preview release jobs have finished without error.
+ - Ensure each draft has the proper number of assets—releases currently have 10 assets each.
+ - Download the artifacts for each release draft and test that you can run them locally.
+
+1. Publish stable / preview drafts, one at a time.
+ - Use [Vercel](https://vercel.com/zed-industries/zed-dev) to check the progress of the website rebuild.
+ The release will be public once the rebuild has completed.
## Nightly release process
In addition to the public releases, we also have a nightly build that we encourage employees to use.
-Nightly is released by cron once a day, and can be shipped as often as you'd like. There are no release notes or announcements, so you can just merge your changes to main and run `./script/trigger-release nightly`.
+Nightly is released by cron once a day, and can be shipped as often as you'd like.
+There are no release notes or announcements, so you can just merge your changes to main and run `./script/trigger-release nightly`.
@@ -114,20 +114,9 @@ cargo test --workspace
## Installing from msys2
-[MSYS2](https://msys2.org/) distribution provides Zed as a package [mingw-w64-zed](https://packages.msys2.org/base/mingw-w64-zed). The package is available for UCRT64, CLANG64 and CLANGARM64 repositories. To download it, run
+Zed does not support unofficial MSYS2 Zed packages built for Mingw-w64. Please report any issues you may have with [mingw-w64-zed](https://packages.msys2.org/base/mingw-w64-zed) to [msys2/MINGW-packages/issues](https://github.com/msys2/MINGW-packages/issues?q=is%3Aissue+is%3Aopen+zed).
-```sh
-pacman -Syu
-pacman -S $MINGW_PACKAGE_PREFIX-zed
-```
-
-You can see the [build script](https://github.com/msys2/MINGW-packages/blob/master/mingw-w64-zed/PKGBUILD) for more details on build process.
-
-> Please, report any issue in [msys2/MINGW-packages/issues](https://github.com/msys2/MINGW-packages/issues?q=is%3Aissue+is%3Aopen+zed) first.
-
-See also MSYS2 [documentation page](https://www.msys2.org/docs/ides-editors).
-
-Note that `collab` is not supported for MSYS2.
+Please refer to [MSYS2 documentation](https://www.msys2.org/docs/ides-editors/#zed) first.
## Troubleshooting
@@ -51,7 +51,7 @@ To configure, use
```json5
"project_panel": {
- "diagnostics": "all",
+ "show_diagnostics": "all",
}
```
@@ -9,3 +9,7 @@ For a guide on Vim-related features that are also available in Helix mode, pleas
To check the current status of Helix mode, or to request a missing Helix feature, checkout out the ["Are we Helix yet?" discussion](https://github.com/zed-industries/zed/discussions/33580).
For a detailed list of Helix's default keybindings, please visit the [official Helix documentation](https://docs.helix-editor.com/keymap.html).
+
+## Core differences
+
+Any text object that works with `m i` or `m a` also works with `]` and `[`, so for example `] (` selects the next pair of parentheses after the cursor.
@@ -8,14 +8,14 @@ C support is available natively in Zed.
## Clangd: Force detect as C
-Clangd out of the box assumes mixed C++/C projects. If you have a C-only project you may wish to instruct clangd to all files as C using the `-xc` flag. To do this, create a `.clangd` file in the root of your project with the following:
+Clangd out of the box assumes mixed C++/C projects. If you have a C-only project you may wish to instruct clangd to treat all files as C using the `-xc` flag. To do this, create a `.clangd` file in the root of your project with the following:
```yaml
CompileFlags:
Add: [-xc]
```
-By default clang and gcc by will recognize `*.C` and `*.H` (uppercase extensions) as C++ and not C and so Zed too follows this convention. If you are working with a C-only project (perhaps one with legacy uppercase pathing like `FILENAME.C`) you can override this behavior by adding this to your settings:
+By default clang and gcc will recognize `*.C` and `*.H` (uppercase extensions) as C++ and not C and so Zed too follows this convention. If you are working with a C-only project (perhaps one with legacy uppercase pathing like `FILENAME.C`) you can override this behavior by adding this to your settings:
```json
{
@@ -9,22 +9,23 @@ C++ support is available natively in Zed.
You can configure which `clangd` binary Zed should use.
-To use a binary in a custom location, add the following to your `settings.json`:
+By default, Zed will try to find a `clangd` in your `$PATH` and try to use that. If that binary successfully executes, it's used. Otherwise, Zed will fall back to installing its own `clangd` version and use that.
+
+If you want to install a pre-release `clangd` version instead you can instruct Zed to do so by setting `pre_release` to `true` in your `settings.json`:
```json
{
"lsp": {
"clangd": {
- "binary": {
- "path": "/path/to/clangd",
- "arguments": []
+ "fetch": {
+ "pre_release": true
}
}
}
}
```
-If you want to disable Zed looking for a `clangd` binary, you can set `ignore_system_version` to `true`:
+If you want to disable Zed looking for a `clangd` binary, you can set `ignore_system_version` to `true` in your `settings.json`:
```json
{
@@ -38,6 +39,23 @@ If you want to disable Zed looking for a `clangd` binary, you can set `ignore_sy
}
```
+If you want to use a binary in a custom location, you can specify a `path` and optional `arguments`:
+
+```json
+{
+ "lsp": {
+ "clangd": {
+ "binary": {
+ "path": "/path/to/clangd",
+ "arguments": []
+ }
+ }
+ }
+}
+```
+
+This `"path"` has to be an absolute path.
+
## Arguments
You can pass any number of arguments to clangd. To see a full set of available options, run `clangd --help` from the command line. For example with `--function-arg-placeholders=0` completions contain only parentheses for function calls, while the default (`--function-arg-placeholders=1`) completions also contain placeholders for method parameters.
@@ -6,35 +6,72 @@ Elixir support is available through the [Elixir extension](https://github.com/ze
- [elixir-lang/tree-sitter-elixir](https://github.com/elixir-lang/tree-sitter-elixir)
- [phoenixframework/tree-sitter-heex](https://github.com/phoenixframework/tree-sitter-heex)
- Language servers:
+ - [elixir-lang/expert](https://github.com/elixir-lang/expert)
- [elixir-lsp/elixir-ls](https://github.com/elixir-lsp/elixir-ls)
- [elixir-tools/next-ls](https://github.com/elixir-tools/next-ls)
- [lexical-lsp/lexical](https://github.com/lexical-lsp/lexical)
## Choosing a language server
-The Elixir extension offers language server support for `elixir-ls`, `next-ls`, and `lexical`.
+The Elixir extension offers language server support for `expert`, `elixir-ls`, `next-ls`, and `lexical`.
`elixir-ls` is enabled by default.
+### Expert
+
+To switch to `expert`, add the following to your `settings.json`:
+
+```json
+{
+ "languages": {
+ "Elixir": {
+ "language_servers": [
+ "expert",
+ "!elixir-ls",
+ "!next-ls",
+ "!lexical",
+ "..."
+ ]
+ }
+ }
+}
+```
+
+### Next LS
+
To switch to `next-ls`, add the following to your `settings.json`:
```json
{
"languages": {
"Elixir": {
- "language_servers": ["next-ls", "!elixir-ls", "..."]
+ "language_servers": [
+ "next-ls",
+ "!expert",
+ "!elixir-ls",
+ "!lexical",
+ "..."
+ ]
}
}
}
```
+### Lexical
+
To switch to `lexical`, add the following to your `settings.json`:
```json
{
"languages": {
"Elixir": {
- "language_servers": ["lexical", "!elixir-ls", "..."]
+ "language_servers": [
+ "lexical",
+ "!expert",
+ "!elixir-ls",
+ "!next-ls",
+ "..."
+ ]
}
}
}
@@ -299,6 +299,7 @@ To run tests in your Ruby project, you can set up custom tasks in your local `.z
"-n",
"\"$ZED_CUSTOM_RUBY_TEST_NAME\""
],
+ "cwd": "$ZED_WORKTREE_ROOT",
"tags": ["ruby-test"]
}
]
@@ -321,6 +322,7 @@ Plain minitest does not support running tests by line number, only by name, so w
"-n",
"\"$ZED_CUSTOM_RUBY_TEST_NAME\""
],
+ "cwd": "$ZED_WORKTREE_ROOT",
"tags": ["ruby-test"]
}
]
@@ -334,6 +336,7 @@ Plain minitest does not support running tests by line number, only by name, so w
"label": "test $ZED_RELATIVE_FILE:$ZED_ROW",
"command": "bundle",
"args": ["exec", "rspec", "\"$ZED_RELATIVE_FILE:$ZED_ROW\""],
+ "cwd": "$ZED_WORKTREE_ROOT",
"tags": ["ruby-test"]
}
]
@@ -369,7 +372,7 @@ The Ruby extension provides a debug adapter for debugging Ruby code. Zed's name
"label": "Debug Rails server",
"adapter": "rdbg",
"request": "launch",
- "command": "$ZED_WORKTREE_ROOT/bin/rails",
+ "command": "./bin/rails",
"args": ["server"],
"cwd": "$ZED_WORKTREE_ROOT",
"env": {
@@ -63,7 +63,21 @@ A `true` setting will set the target directory to `target/rust-analyzer`. You ca
You can configure which `rust-analyzer` binary Zed should use.
-By default, Zed will try to find a `rust-analyzer` in your `$PATH` and try to use that. If that binary successfully executes `rust-analyzer --help`, it's used. Otherwise, Zed will fall back to installing its own `rust-analyzer` version and using that.
+By default, Zed will try to find a `rust-analyzer` in your `$PATH` and try to use that. If that binary successfully executes `rust-analyzer --help`, it's used. Otherwise, Zed will fall back to installing its own stable `rust-analyzer` version and use that.
+
+If you want to install pre-release `rust-analyzer` version instead you can instruct Zed to do so by setting `pre_release` to `true` in your `settings.json`:
+
+```json
+{
+ "lsp": {
+ "rust-analyzer": {
+ "fetch": {
+ "pre_release": true
+ }
+ }
+ }
+}
+```
If you want to disable Zed looking for a `rust-analyzer` binary, you can set `ignore_system_version` to `true` in your `settings.json`:
@@ -98,7 +112,7 @@ This `"path"` has to be an absolute path.
## Alternate Targets
-If want rust-analyzer to provide diagnostics for a target other than you current platform (e.g. for windows when running on macOS) you can use the following Zed lsp settings:
+If you want rust-analyzer to provide diagnostics for a target other than your current platform (e.g. for windows when running on macOS) you can use the following Zed lsp settings:
```json
{
@@ -114,7 +128,7 @@ If want rust-analyzer to provide diagnostics for a target other than you current
}
```
-If you are using `rustup` and you can find a list of available target triples (`aarch64-apple-darwin`, `x86_64-unknown-linux-gnu`, etc) by running:
+If you are using `rustup`, you can find a list of available target triples (`aarch64-apple-darwin`, `x86_64-unknown-linux-gnu`, etc) by running:
```sh
rustup target list --installed
@@ -136,22 +150,7 @@ This is enabled by default and can be configured as
## Manual Cargo Diagnostics fetch
By default, rust-analyzer has `checkOnSave: true` enabled, which causes every buffer save to trigger a `cargo check --workspace --all-targets` command.
-For lager projects this might introduce excessive wait times, so a more fine-grained triggering could be enabled by altering the
-
-```json
-"diagnostics": {
- "cargo": {
- // When enabled, Zed disables rust-analyzer's check on save and starts to query
- // Cargo diagnostics separately.
- "fetch_cargo_diagnostics": false
- }
-}
-```
-
-default settings.
-
-This will stop rust-analyzer from running `cargo check ...` on save, yet still allow to run
-`editor: run/clear/cancel flycheck` commands in Rust files to refresh cargo diagnostics; the project diagnostics editor will also refresh cargo diagnostics with `editor: run flycheck` command when the setting is enabled.
+If disabled with `checkOnSave: false` (see the example of the server configuration json above), it's still possible to fetch the diagnostics manually, with the `editor: run/clear/cancel flycheck` commands in Rust files to refresh cargo diagnostics; the project diagnostics editor will also refresh cargo diagnostics with `editor: run flycheck` command when the setting is enabled.
## More server configuration
@@ -240,7 +239,7 @@ you can list them in `linkedProjects` in the local project settings:
### Snippets
-There's a way get custom completion items from rust-analyzer, that will transform the code according to the snippet body:
+There's a way to get custom completion items from rust-analyzer, that will transform the code according to the snippet body:
```json
{
@@ -1,22 +1,7 @@
# TOML
-TOML support is available through the [TOML extension](https://github.com/zed-industries/zed/tree/main/extensions/toml).
+TOML support is available through the [TOML extension](https://zed.dev/extensions/toml).
- Tree-sitter: [tree-sitter/tree-sitter-toml](https://github.com/tree-sitter/tree-sitter-toml)
-- Language Server: [tamasfe/taplo](https://github.com/tamasfe/taplo)
-## Configuration
-
-You can control the behavior of the Taplo TOML language server by adding a `.taplo.toml` file to the root of your project. See the [Taplo Configuration File](https://taplo.tamasfe.dev/configuration/file.html#configuration-file) and [Taplo Formatter Options](https://taplo.tamasfe.dev/configuration/formatter-options.html) documentation for more.
-
-```toml
-# .taplo.toml
-[formatting]
-align_comments = false
-reorder_keys = true
-
-include = ["Cargo.toml", "some_directory/**/*.toml"]
-# exclude = ["vendor/**/*.toml"]
-```
-
-Note: The taplo language server will not automatically pickup changes to `.taplo.toml`. You must manually trigger {#action editor::RestartLanguageServer} or reload Zed for it to pickup changes.
+A TOML language server is available in the [Tombi extension](https://zed.dev/extensions/tombi).
@@ -40,4 +40,20 @@ To create JSX snippets you have to use `javascript.json` snippets file, instead
## See also
+The `feature_paths` option in `simple-completion-language-server` is disabled by default.
+
+If you want to enable it you can add the following to your `settings.json`:
+
+```json
+{
+ "lsp": {
+ "snippet-completion-server": {
+ "settings": {
+ "feature_paths": true
+ }
+ }
+ }
+}
+```
+
For more configuration information, see the [`simple-completion-language-server` instructions](https://github.com/zed-industries/simple-completion-language-server/tree/main).
@@ -45,9 +45,9 @@ Zed supports ways to spawn (and rerun) commands using its integrated terminal to
// Whether to show the task line in the output of the spawned task, defaults to `true`.
"show_summary": true,
// Whether to show the command line in the output of the spawned task, defaults to `true`.
- "show_output": true,
+ "show_command": true
// Represents the tags for inline runnable indicators, or spawning multiple tasks at once.
- "tags": []
+ // "tags": []
}
]
```
@@ -8,7 +8,7 @@ See [Configuring Zed](./configuring-zed.md) for additional information and other
Use may install zed extensions providing [Themes](./themes.md) and [Icon Themes](./icon-themes.md) via {#action zed::Extensions} from the command palette or menu.
-You can preview/choose amongsts your installed themes and icon themes with {#action theme_selector::Toggle} ({#kb theme_selector::Toggle}) and ({#action icon_theme_selector::Toggle}) which will modify the following settings:
+You can preview/choose amongst your installed themes and icon themes with {#action theme_selector::Toggle} ({#kb theme_selector::Toggle}) and ({#action icon_theme_selector::Toggle}) which will modify the following settings:
```json
{
@@ -321,6 +321,12 @@ TBD: Centered layout related settings
// Defaults to true.
"cursor_position_button": true,
},
+ "global_lsp_settings": {
+ // Show/hide the LSP button in the status bar.
+ // Activity from the LSP is still shown.
+ // Button is not shown if "enable_language_server" if false.
+ "button": true
+ },
```
### Multibuffer
@@ -328,7 +334,9 @@ TBD: Centered layout related settings
```json
{
// The default number of lines to expand excerpts in the multibuffer by.
- "expand_excerpt_lines": 5
+ "expand_excerpt_lines": 5,
+ // The default number of lines of context provided for excerpts in the multibuffer by.
+ "excerpt_context_lines": 2
}
```
@@ -424,6 +432,8 @@ Project panel can be shown/hidden with {#action project_panel::ToggleFocus} ({#k
"indent_size": 20, // Pixels for each successive indent
"auto_reveal_entries": true, // Show file in panel when activating its buffer
"auto_fold_dirs": true, // Fold dirs with single subdir
+ "sticky_scroll": true, // Stick parent directories at top of the project panel.
+ "drag_and_drop": true, // Whether drag and drop is enabled
"scrollbar": { // Project panel scrollbar settings
"show": null // Show/hide: (auto, system, always, never)
},
@@ -119,7 +119,7 @@ impl zed::Extension for GlslExtension {
) -> Result<Option<serde_json::Value>> {
let settings = LspSettings::for_worktree("glsl_analyzer", worktree)
.ok()
- .and_then(|lsp_settings| lsp_settings.settings.clone())
+ .and_then(|lsp_settings| lsp_settings.settings)
.unwrap_or_default();
Ok(Some(serde_json::json!({
@@ -1,6 +1,6 @@
[package]
name = "zed_html"
-version = "0.2.1"
+version = "0.2.2"
edition.workspace = true
publish.workspace = true
license = "Apache-2.0"
@@ -1,7 +1,7 @@
id = "html"
name = "HTML"
description = "HTML support."
-version = "0.2.1"
+version = "0.2.2"
schema_version = 1
authors = ["Isaac Clayton <slightknack@gmail.com>"]
repository = "https://github.com/zed-industries/zed"
@@ -3,6 +3,7 @@ grammar = "html"
path_suffixes = ["html", "htm", "shtml"]
autoclose_before = ">})"
block_comment = { start = "<!--", prefix = "", end = "-->", tab_size = 0 }
+wrap_characters = { start_prefix = "<", start_suffix = ">", end_prefix = "</", end_suffix = ">" }
brackets = [
{ start = "{", end = "}", close = true, newline = true },
{ start = "[", end = "]", close = true, newline = true },
@@ -1 +1,5 @@
(comment) @annotation
+
+(element
+ (start_tag
+ (tag_name) @name)) @item
@@ -94,7 +94,7 @@ impl zed::Extension for HtmlExtension {
) -> Result<Option<zed::serde_json::Value>> {
let settings = LspSettings::for_worktree(server_id.as_ref(), worktree)
.ok()
- .and_then(|lsp_settings| lsp_settings.settings.clone())
+ .and_then(|lsp_settings| lsp_settings.settings)
.unwrap_or_default();
Ok(Some(settings))
}
@@ -151,7 +151,7 @@ impl zed::Extension for RuffExtension {
) -> Result<Option<zed_extension_api::serde_json::Value>> {
let settings = LspSettings::for_worktree(server_id.as_ref(), worktree)
.ok()
- .and_then(|lsp_settings| lsp_settings.initialization_options.clone())
+ .and_then(|lsp_settings| lsp_settings.initialization_options)
.unwrap_or_default();
Ok(Some(settings))
}
@@ -163,7 +163,7 @@ impl zed::Extension for RuffExtension {
) -> Result<Option<zed_extension_api::serde_json::Value>> {
let settings = LspSettings::for_worktree(server_id.as_ref(), worktree)
.ok()
- .and_then(|lsp_settings| lsp_settings.settings.clone())
+ .and_then(|lsp_settings| lsp_settings.settings)
.unwrap_or_default();
Ok(Some(settings))
}
@@ -1,6 +1,6 @@
[package]
name = "zed_snippets"
-version = "0.0.5"
+version = "0.0.6"
edition.workspace = true
publish.workspace = true
license = "Apache-2.0"
@@ -1,9 +1,9 @@
id = "snippets"
name = "Snippets"
description = "Support for language-agnostic snippets, provided by simple-completion-language-server"
-version = "0.0.5"
+version = "0.0.6"
schema_version = 1
-authors = []
+authors = ["Zed Industries <hi@zed.dev>"]
repository = "https://github.com/zed-industries/zed"
[language_servers.snippet-completion-server]
@@ -113,14 +113,16 @@ impl zed::Extension for SnippetExtension {
) -> Result<Option<zed_extension_api::serde_json::Value>> {
let settings = LspSettings::for_worktree(server_id.as_ref(), worktree)
.ok()
- .and_then(|lsp_settings| lsp_settings.settings.clone())
+ .and_then(|lsp_settings| lsp_settings.settings)
.unwrap_or_else(|| {
json!({
"max_completion_items": 20,
"snippets_first": true,
"feature_words": false,
"feature_snippets": true,
- "feature_paths": true
+ // We disable `feature_paths` by default, because it's bad UX to assume that any `/` that is typed
+ // is the start of a path.
+ "feature_paths": false
})
});
Ok(Some(settings))
@@ -1,16 +0,0 @@
-[package]
-name = "zed_toml"
-version = "0.1.4"
-edition.workspace = true
-publish.workspace = true
-license = "Apache-2.0"
-
-[lints]
-workspace = true
-
-[lib]
-path = "src/toml.rs"
-crate-type = ["cdylib"]
-
-[dependencies]
-zed_extension_api = "0.1.0"
@@ -1,18 +0,0 @@
-id = "toml"
-name = "TOML"
-description = "TOML support."
-version = "0.1.4"
-schema_version = 1
-authors = [
- "Max Brunsfeld <max@zed.dev>",
- "Ammar Arif <evergreenkary@gmail.com>"
-]
-repository = "https://github.com/zed-industries/zed"
-
-[language_servers.taplo]
-name = "Taplo"
-language = "TOML"
-
-[grammars.toml]
-repository = "https://github.com/tree-sitter/tree-sitter-toml"
-commit = "342d9be207c2dba869b9967124c679b5e6fd0ebe"
@@ -1,3 +0,0 @@
-("[" @open "]" @close)
-("{" @open "}" @close)
-("\"" @open "\"" @close)
@@ -1,11 +0,0 @@
-name = "TOML"
-grammar = "toml"
-path_suffixes = ["Cargo.lock", "toml", "Pipfile", "uv.lock"]
-line_comments = ["# "]
-autoclose_before = ",]}"
-brackets = [
- { start = "{", end = "}", close = true, newline = true },
- { start = "[", end = "]", close = true, newline = true },
- { start = "\"", end = "\"", close = true, newline = false, not_in = ["comment", "string"] },
- { start = "'", end = "'", close = true, newline = false, not_in = ["comment", "string"] },
-]
@@ -1,38 +0,0 @@
-; Properties
-;-----------
-
-(bare_key) @property
-(quoted_key) @property
-
-; Literals
-;---------
-
-(boolean) @constant
-(comment) @comment
-(integer) @number
-(float) @number
-(string) @string
-(escape_sequence) @string.escape
-(offset_date_time) @string.special
-(local_date_time) @string.special
-(local_date) @string.special
-(local_time) @string.special
-
-; Punctuation
-;------------
-
-[
- "."
- ","
-] @punctuation.delimiter
-
-"=" @operator
-
-[
- "["
- "]"
- "[["
- "]]"
- "{"
- "}"
-] @punctuation.bracket
@@ -1,15 +0,0 @@
-(table
- .
- "["
- .
- (_) @name) @item
-
-(table_array_element
- .
- "[["
- .
- (_) @name) @item
-
-(pair
- .
- (_) @name) @item
@@ -1 +0,0 @@
-(pair (bare_key) "=" (_) @redact)
@@ -1,6 +0,0 @@
-(comment)+ @comment
-(table "[" (_) "]"
- (_)* @class.inside) @class.around
-
-(table_array_element "[[" (_) "]]"
- (_)* @class.inside) @class.around
@@ -1,152 +0,0 @@
-use std::fs;
-use zed::LanguageServerId;
-use zed_extension_api::settings::LspSettings;
-use zed_extension_api::{self as zed, Result};
-
-struct TaploBinary {
- path: String,
- args: Option<Vec<String>>,
-}
-
-struct TomlExtension {
- cached_binary_path: Option<String>,
-}
-
-impl TomlExtension {
- fn language_server_binary(
- &mut self,
- language_server_id: &LanguageServerId,
- worktree: &zed::Worktree,
- ) -> Result<TaploBinary> {
- let binary_settings = LspSettings::for_worktree("taplo", worktree)
- .ok()
- .and_then(|lsp_settings| lsp_settings.binary);
- let binary_args = binary_settings
- .as_ref()
- .and_then(|binary_settings| binary_settings.arguments.clone());
-
- if let Some(path) = binary_settings.and_then(|binary_settings| binary_settings.path) {
- return Ok(TaploBinary {
- path,
- args: binary_args,
- });
- }
-
- if let Some(path) = worktree.which("taplo") {
- return Ok(TaploBinary {
- path,
- args: binary_args,
- });
- }
-
- if let Some(path) = &self.cached_binary_path
- && fs::metadata(path).is_ok_and(|stat| stat.is_file())
- {
- return Ok(TaploBinary {
- path: path.clone(),
- args: binary_args,
- });
- }
-
- zed::set_language_server_installation_status(
- language_server_id,
- &zed::LanguageServerInstallationStatus::CheckingForUpdate,
- );
- let release = zed::latest_github_release(
- "tamasfe/taplo",
- zed::GithubReleaseOptions {
- require_assets: true,
- pre_release: false,
- },
- )?;
-
- let (platform, arch) = zed::current_platform();
- let asset_name = format!(
- "taplo-{os}-{arch}.gz",
- arch = match arch {
- zed::Architecture::Aarch64 => "aarch64",
- zed::Architecture::X86 => "x86",
- zed::Architecture::X8664 => "x86_64",
- },
- os = match platform {
- zed::Os::Mac => "darwin",
- zed::Os::Linux => "linux",
- zed::Os::Windows => "windows",
- },
- );
-
- let asset = release
- .assets
- .iter()
- .find(|asset| asset.name == asset_name)
- .ok_or_else(|| format!("no asset found matching {:?}", asset_name))?;
-
- let version_dir = format!("taplo-{}", release.version);
- fs::create_dir_all(&version_dir)
- .map_err(|err| format!("failed to create directory '{version_dir}': {err}"))?;
-
- let binary_path = format!(
- "{version_dir}/{bin_name}",
- bin_name = match platform {
- zed::Os::Windows => "taplo.exe",
- zed::Os::Mac | zed::Os::Linux => "taplo",
- }
- );
-
- if !fs::metadata(&binary_path).is_ok_and(|stat| stat.is_file()) {
- zed::set_language_server_installation_status(
- language_server_id,
- &zed::LanguageServerInstallationStatus::Downloading,
- );
-
- zed::download_file(
- &asset.download_url,
- &binary_path,
- zed::DownloadedFileType::Gzip,
- )
- .map_err(|err| format!("failed to download file: {err}"))?;
-
- zed::make_file_executable(&binary_path)?;
-
- let entries = fs::read_dir(".")
- .map_err(|err| format!("failed to list working directory {err}"))?;
- for entry in entries {
- let entry = entry.map_err(|err| format!("failed to load directory entry {err}"))?;
- if entry.file_name().to_str() != Some(&version_dir) {
- fs::remove_dir_all(entry.path()).ok();
- }
- }
- }
-
- self.cached_binary_path = Some(binary_path.clone());
- Ok(TaploBinary {
- path: binary_path,
- args: binary_args,
- })
- }
-}
-
-impl zed::Extension for TomlExtension {
- fn new() -> Self {
- Self {
- cached_binary_path: None,
- }
- }
-
- fn language_server_command(
- &mut self,
- language_server_id: &LanguageServerId,
- worktree: &zed::Worktree,
- ) -> Result<zed::Command> {
- let taplo_binary = self.language_server_binary(language_server_id, worktree)?;
- Ok(zed::Command {
- command: taplo_binary.path,
- args: taplo_binary
- .args
- .unwrap_or_else(|| vec!["lsp".to_string(), "stdio".to_string()]),
- env: Default::default(),
- })
- }
-}
-
-zed::register_extension!(TomlExtension);
@@ -2,11 +2,11 @@
"nodes": {
"crane": {
"locked": {
- "lastModified": 1754269165,
- "narHash": "sha256-0tcS8FHd4QjbCVoxN9jI+PjHgA4vc/IjkUSp+N3zy0U=",
+ "lastModified": 1755993354,
+ "narHash": "sha256-FCRRAzSaL/+umLIm3RU3O/+fJ2ssaPHseI2SSFL8yZU=",
"owner": "ipetkov",
"repo": "crane",
- "rev": "444e81206df3f7d92780680e45858e31d2f07a08",
+ "rev": "25bd41b24426c7734278c2ff02e53258851db914",
"type": "github"
},
"original": {
@@ -33,10 +33,10 @@
"nixpkgs": {
"locked": {
"lastModified": 315532800,
- "narHash": "sha256-5VYevX3GccubYeccRGAXvCPA1ktrGmIX1IFC0icX07g=",
- "rev": "a683adc19ff5228af548c6539dbc3440509bfed3",
+ "narHash": "sha256-E8CyvVDZuIsF7puIw+OLkrFmhj3qUV+iwPcNbBhdcxM=",
+ "rev": "a918bb3594dd243c2f8534b3be01b3cb4ed35fd1",
"type": "tarball",
- "url": "https://releases.nixos.org/nixpkgs/nixpkgs-25.11pre840248.a683adc19ff5/nixexprs.tar.xz"
+ "url": "https://releases.nixos.org/nixpkgs/nixpkgs-25.11pre854010.a918bb3594dd/nixexprs.tar.xz"
},
"original": {
"type": "tarball",
@@ -58,11 +58,11 @@
]
},
"locked": {
- "lastModified": 1754575663,
- "narHash": "sha256-afOx8AG0KYtw7mlt6s6ahBBy7eEHZwws3iCRoiuRQS4=",
+ "lastModified": 1756607787,
+ "narHash": "sha256-ciwAdgtlAN1PCaidWK6RuWsTBL8DVuyDCGM+X3ein5Q=",
"owner": "oxalica",
"repo": "rust-overlay",
- "rev": "6db0fb0e9cec2e9729dc52bf4898e6c135bb8a0f",
+ "rev": "f46d294b87ebb9f7124f1ce13aa2a5f5acc0f3eb",
"type": "github"
},
"original": {
@@ -145,7 +145,6 @@ let
]
++ lib.optionals stdenv'.hostPlatform.isDarwin [
apple-sdk_15
- darwin.apple_sdk.frameworks.System
(darwinMinVersionHook "10.15")
];
@@ -150,6 +150,7 @@ function CollectFiles {
Move-Item -Path "$innoDir\zed_explorer_command_injector.appx" -Destination "$innoDir\appx\zed_explorer_command_injector.appx" -Force
Move-Item -Path "$innoDir\zed_explorer_command_injector.dll" -Destination "$innoDir\appx\zed_explorer_command_injector.dll" -Force
Move-Item -Path "$innoDir\cli.exe" -Destination "$innoDir\bin\zed.exe" -Force
+ Move-Item -Path "$innoDir\zed.sh" -Destination "$innoDir\bin\zed" -Force
Move-Item -Path "$innoDir\auto_update_helper.exe" -Destination "$innoDir\tools\auto_update_helper.exe" -Force
Move-Item -Path ".\AGS_SDK-6.3.0\ags_lib\lib\amd_ags_x64.dll" -Destination "$innoDir\amd_ags_x64.dll" -Force
}
@@ -33,8 +33,8 @@ packages:
resolution: {integrity: sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==}
engines: {node: '>= 18'}
- '@octokit/core@5.2.1':
- resolution: {integrity: sha512-dKYCMuPO1bmrpuogcjQ8z7ICCH3FP6WmxpwC03yjzGfZhj9fTJg6+bS1+UAplekbN2C+M61UNllGOOoAfGCrdQ==}
+ '@octokit/core@5.2.2':
+ resolution: {integrity: sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg==}
engines: {node: '>= 18'}
'@octokit/endpoint@9.0.6':
@@ -131,8 +131,8 @@ packages:
commander@2.20.3:
resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==}
- core-js@3.41.0:
- resolution: {integrity: sha512-SJ4/EHwS36QMJd6h/Rg+GyR4A5xE0FSI3eZ+iBVpfqf1x0eTSg1smWLHrA+2jQThZSh97fmSgFSU8B61nxosxA==}
+ core-js@3.45.1:
+ resolution: {integrity: sha512-L4NPsJlCfZsPeXukyzHFlg/i7IIVwHSItR0wg0FLNqYClJ4MQYTYLbC7EkjKYRLZF2iof2MUgN0EGy7MdQFChg==}
danger-plugin-pr-hygiene@0.6.1:
resolution: {integrity: sha512-nb+iUQvirE3BlKXI1WoOND6sujyGzHar590mJm5tt4RLi65HXFaU5hqONxgDoWFujJNHYnXse9yaZdxnxEi4QA==}
@@ -142,8 +142,8 @@ packages:
engines: {node: '>=18'}
hasBin: true
- debug@4.4.0:
- resolution: {integrity: sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==}
+ debug@4.4.1:
+ resolution: {integrity: sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==}
engines: {node: '>=6.0'}
peerDependencies:
supports-color: '*'
@@ -252,8 +252,8 @@ packages:
resolution: {integrity: sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==}
engines: {node: '>=12', npm: '>=6'}
- jwa@1.4.1:
- resolution: {integrity: sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==}
+ jwa@1.4.2:
+ resolution: {integrity: sha512-eeH5JO+21J78qMvTIDdBXidBd6nG2kZjg5Ohz/1fpa28Z4CcsWUzJ1ZZyFq/3z3N17aZy+ZuBoHljASbL1WfOw==}
jws@3.2.2:
resolution: {integrity: sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==}
@@ -385,8 +385,8 @@ packages:
safe-buffer@5.2.1:
resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==}
- semver@7.7.1:
- resolution: {integrity: sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==}
+ semver@7.7.2:
+ resolution: {integrity: sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==}
engines: {node: '>=10'}
hasBin: true
@@ -460,7 +460,7 @@ snapshots:
'@octokit/auth-token@4.0.0': {}
- '@octokit/core@5.2.1':
+ '@octokit/core@5.2.2':
dependencies:
'@octokit/auth-token': 4.0.0
'@octokit/graphql': 7.1.1
@@ -483,18 +483,18 @@ snapshots:
'@octokit/openapi-types@24.2.0': {}
- '@octokit/plugin-paginate-rest@11.4.4-cjs.2(@octokit/core@5.2.1)':
+ '@octokit/plugin-paginate-rest@11.4.4-cjs.2(@octokit/core@5.2.2)':
dependencies:
- '@octokit/core': 5.2.1
+ '@octokit/core': 5.2.2
'@octokit/types': 13.10.0
- '@octokit/plugin-request-log@4.0.1(@octokit/core@5.2.1)':
+ '@octokit/plugin-request-log@4.0.1(@octokit/core@5.2.2)':
dependencies:
- '@octokit/core': 5.2.1
+ '@octokit/core': 5.2.2
- '@octokit/plugin-rest-endpoint-methods@13.3.2-cjs.1(@octokit/core@5.2.1)':
+ '@octokit/plugin-rest-endpoint-methods@13.3.2-cjs.1(@octokit/core@5.2.2)':
dependencies:
- '@octokit/core': 5.2.1
+ '@octokit/core': 5.2.2
'@octokit/types': 13.10.0
'@octokit/request-error@5.1.1':
@@ -512,10 +512,10 @@ snapshots:
'@octokit/rest@20.1.2':
dependencies:
- '@octokit/core': 5.2.1
- '@octokit/plugin-paginate-rest': 11.4.4-cjs.2(@octokit/core@5.2.1)
- '@octokit/plugin-request-log': 4.0.1(@octokit/core@5.2.1)
- '@octokit/plugin-rest-endpoint-methods': 13.3.2-cjs.1(@octokit/core@5.2.1)
+ '@octokit/core': 5.2.2
+ '@octokit/plugin-paginate-rest': 11.4.4-cjs.2(@octokit/core@5.2.2)
+ '@octokit/plugin-request-log': 4.0.1(@octokit/core@5.2.2)
+ '@octokit/plugin-rest-endpoint-methods': 13.3.2-cjs.1(@octokit/core@5.2.2)
'@octokit/types@13.10.0':
dependencies:
@@ -525,7 +525,7 @@ snapshots:
agent-base@6.0.2:
dependencies:
- debug: 4.4.0
+ debug: 4.4.1
transitivePeerDependencies:
- supports-color
@@ -571,7 +571,7 @@ snapshots:
commander@2.20.3: {}
- core-js@3.41.0: {}
+ core-js@3.45.1: {}
danger-plugin-pr-hygiene@0.6.1: {}
@@ -582,8 +582,8 @@ snapshots:
async-retry: 1.2.3
chalk: 2.4.2
commander: 2.20.3
- core-js: 3.41.0
- debug: 4.4.0
+ core-js: 3.45.1
+ debug: 4.4.1
fast-json-patch: 3.1.1
get-stdin: 6.0.0
http-proxy-agent: 5.0.0
@@ -618,7 +618,7 @@ snapshots:
- encoding
- supports-color
- debug@4.4.0:
+ debug@4.4.1:
dependencies:
ms: 2.1.3
@@ -688,14 +688,14 @@ snapshots:
dependencies:
'@tootallnate/once': 2.0.0
agent-base: 6.0.2
- debug: 4.4.0
+ debug: 4.4.1
transitivePeerDependencies:
- supports-color
https-proxy-agent@5.0.1:
dependencies:
agent-base: 6.0.2
- debug: 4.4.0
+ debug: 4.4.1
transitivePeerDependencies:
- supports-color
@@ -720,9 +720,9 @@ snapshots:
lodash.isstring: 4.0.1
lodash.once: 4.1.1
ms: 2.1.3
- semver: 7.7.1
+ semver: 7.7.2
- jwa@1.4.1:
+ jwa@1.4.2:
dependencies:
buffer-equal-constant-time: 1.0.1
ecdsa-sig-formatter: 1.0.11
@@ -730,7 +730,7 @@ snapshots:
jws@3.2.2:
dependencies:
- jwa: 1.4.1
+ jwa: 1.4.2
safe-buffer: 5.2.1
lodash.find@4.6.0: {}
@@ -823,7 +823,7 @@ snapshots:
safe-buffer@5.2.1: {}
- semver@7.7.1: {}
+ semver@7.7.2: {}
side-channel-list@1.0.0:
dependencies:
@@ -1,12 +1,11 @@
#!/usr/bin/env node --redirect-warnings=/dev/null
const { execFileSync } = require("child_process");
-let { GITHUB_ACCESS_TOKEN } = process.env;
+const { GITHUB_ACCESS_TOKEN } = process.env;
const GITHUB_URL = "https://github.com";
const SKIPPABLE_NOTE_REGEX = /^\s*-?\s*n\/?a\s*/ims;
const PULL_REQUEST_WEB_URL = "https://github.com/zed-industries/zed/pull";
-const PULL_REQUEST_API_URL =
- "https://api.github.com/repos/zed-industries/zed/pulls";
+const PULL_REQUEST_API_URL = "https://api.github.com/repos/zed-industries/zed/pulls";
const DIVIDER = "-".repeat(80);
main();
@@ -25,15 +24,12 @@ async function main() {
const STAFF_MEMBERS = new Set(
(
await (
- await fetch(
- "https://api.github.com/orgs/zed-industries/teams/staff/members",
- {
- headers: {
- Authorization: `token ${GITHUB_ACCESS_TOKEN}`,
- Accept: "application/vnd.github+json",
- },
+ await fetch("https://api.github.com/orgs/zed-industries/teams/staff/members", {
+ headers: {
+ Authorization: `token ${GITHUB_ACCESS_TOKEN}`,
+ Accept: "application/vnd.github+json",
},
- )
+ })
).json()
).map(({ login }) => login.toLowerCase()),
);
@@ -44,11 +40,7 @@ async function main() {
};
// Get the last two preview tags
- const [newTag, oldTag] = execFileSync(
- "git",
- ["tag", "--sort", "-committerdate"],
- { encoding: "utf8" },
- )
+ const [newTag, oldTag] = execFileSync("git", ["tag", "--sort", "-committerdate"], { encoding: "utf8" })
.split("\n")
.filter((t) => t.startsWith("v") && t.endsWith("-pre"));
@@ -59,14 +51,10 @@ async function main() {
const pullRequestNumbers = getPullRequestNumbers(oldTag, newTag);
// Get the PRs that were cherry-picked between main and the old tag.
- const existingPullRequestNumbers = new Set(
- getPullRequestNumbers("main", oldTag),
- );
+ const existingPullRequestNumbers = new Set(getPullRequestNumbers("main", oldTag));
// Filter out those existing PRs from the set of new PRs.
- const newPullRequestNumbers = pullRequestNumbers.filter(
- (number) => !existingPullRequestNumbers.has(number),
- );
+ const newPullRequestNumbers = pullRequestNumbers.filter((number) => !existingPullRequestNumbers.has(number));
// Fetch the pull requests from the GitHub API.
console.log("Merged Pull requests:");
@@ -84,8 +72,7 @@ async function main() {
const releaseNotesHeader = /^\s*Release Notes:(.+)/ims;
const releaseNotes = pullRequest.body || "";
- let contributor =
- pullRequest.user?.login ?? "Unable to identify contributor";
+ let contributor = pullRequest.user?.login ?? "Unable to identify contributor";
const captures = releaseNotesHeader.exec(releaseNotes);
let notes = captures ? captures[1] : "MISSING";
notes = notes.trim();
@@ -127,11 +114,7 @@ function getCreditString(pullRequestNumber, contributor, isStaff) {
}
function getPullRequestNumbers(oldTag, newTag) {
- const pullRequestNumbers = execFileSync(
- "git",
- ["log", `${oldTag}..${newTag}`, "--oneline"],
- { encoding: "utf8" },
- )
+ const pullRequestNumbers = execFileSync("git", ["log", `${oldTag}..${newTag}`, "--oneline"], { encoding: "utf8" })
.split("\n")
.filter((line) => line.length > 0)
.map((line) => {
@@ -0,0 +1,101 @@
+#!/usr/bin/env node --redirect-warnings=/dev/null
+
+// This script should be ran before `bump-zed-minor-versions`
+
+// Prints the changelogs for all preview releases associated with the most
+// recent preview minor version.
+
+// Future TODO: Have the script perform deduplication of lines that were
+// included in both past stable and preview patches that shouldn't be mentioned
+// again in this week's stable minor release.
+
+// Future TODO: Get changelogs for latest cherry-picked commits on preview and
+// stable that didn't make it into a release, as they were cherry picked
+
+const { execFileSync } = require("child_process");
+const { GITHUB_ACCESS_TOKEN } = process.env;
+const GITHUB_TAGS_API_URL = "https://api.github.com/repos/zed-industries/zed/releases/tags";
+const DIVIDER = "-".repeat(80);
+
+main();
+
+async function main() {
+ if (!GITHUB_ACCESS_TOKEN) {
+ try {
+ GITHUB_ACCESS_TOKEN = execFileSync("gh", ["auth", "token"]).toString();
+ } catch (error) {
+ console.log(error);
+ console.log("No GITHUB_ACCESS_TOKEN and no `gh auth token`");
+ process.exit(1);
+ }
+ }
+
+ const allTags = execFileSync("git", ["tag", "--sort", "-committerdate"], { encoding: "utf8" })
+ .split("\n")
+ .filter((t) => t.length > 0);
+ const latestPreviewTag = allTags.filter((t) => t.startsWith("v") && t.endsWith("-pre"))[0];
+ const latestPreviewMinorVersion = latestPreviewTag.split(".")[1];
+ const latestPreviewTagRegex = new RegExp(`^v(\\d+)\\.(${latestPreviewMinorVersion})\\.(\\d+)-pre$`);
+
+ const parsedPreviewTags = allTags
+ .map((tag) => {
+ const match = tag.match(latestPreviewTagRegex);
+ if (match) {
+ return {
+ tag,
+ version: {
+ major: parseInt(match[1]),
+ minor: parseInt(match[2]),
+ patch: parseInt(match[3]),
+ },
+ };
+ }
+ return null;
+ })
+ .filter((item) => item !== null)
+ .sort((a, b) => a.version.patch - b.version.patch);
+
+ const matchingPreviewTags = parsedPreviewTags.map((item) => item.tag);
+
+ console.log("Fetching release information for preview tags:");
+ console.log(DIVIDER);
+
+ for (const tag of matchingPreviewTags) {
+ const releaseApiUrl = `${GITHUB_TAGS_API_URL}/${tag}`;
+
+ try {
+ const response = await fetch(releaseApiUrl, {
+ headers: {
+ Authorization: `token ${GITHUB_ACCESS_TOKEN}`,
+ },
+ });
+
+ if (!response.ok) {
+ console.log(`Failed to fetch release for ${tag}: ${response.status}`);
+ continue;
+ }
+
+ const release = await response.json();
+
+ console.log(`\nRelease: ${release.name || tag}`);
+ console.log(`Tag: ${tag}`);
+ console.log(`Published: ${release.published_at}`);
+ console.log(`URL: ${release.html_url}`);
+ console.log("\nRelease Notes:");
+ console.log(release.body || "No release notes");
+ console.log(DIVIDER);
+ } catch (error) {
+ console.log(`Error fetching release for ${tag}:`, error.message);
+ }
+ }
+
+ const patchUpdateTags = parsedPreviewTags.filter((tag) => tag.version.patch != 0).map((tag) => tag.tag);
+
+ console.log();
+ console.log("Please review the release notes associated with the following patch versions:");
+ for (const tag of patchUpdateTags) {
+ console.log(`- ${tag}`);
+ }
+ console.log("Remove items that have already been mentioned in the current published stable versions.");
+ console.log("https://github.com/zed-industries/zed/releases?q=prerelease%3Afalse&expanded=true");
+}
@@ -9,14 +9,14 @@
"start": "node main.js"
},
"dependencies": {
- "@octokit/rest": "^21.1.0",
- "@slack/webhook": "^7.0.4",
+ "@octokit/rest": "^21.1.1",
+ "@slack/webhook": "^7.0.6",
"date-fns": "^4.1.0",
- "octokit": "^4.1.1"
+ "octokit": "^4.1.4"
},
"devDependencies": {
- "@octokit/types": "^13.8.0",
- "@slack/types": "^2.14.0",
+ "@octokit/types": "^13.10.0",
+ "@slack/types": "^2.16.0",
"@tsconfig/node20": "20.1.5",
"@tsconfig/strictest": "2.0.5",
"typescript": "5.7.3"
@@ -9,24 +9,24 @@ importers:
.:
dependencies:
'@octokit/rest':
- specifier: ^21.1.0
+ specifier: ^21.1.1
version: 21.1.1
'@slack/webhook':
- specifier: ^7.0.4
- version: 7.0.5
+ specifier: ^7.0.6
+ version: 7.0.6
date-fns:
specifier: ^4.1.0
version: 4.1.0
octokit:
- specifier: ^4.1.1
- version: 4.1.2
+ specifier: ^4.1.4
+ version: 4.1.4
devDependencies:
'@octokit/types':
- specifier: ^13.8.0
- version: 13.8.0
+ specifier: ^13.10.0
+ version: 13.10.0
'@slack/types':
- specifier: ^2.14.0
- version: 2.14.0
+ specifier: ^2.16.0
+ version: 2.16.0
'@tsconfig/node20':
specifier: 20.1.5
version: 20.1.5
@@ -39,44 +39,44 @@ importers:
packages:
- '@octokit/app@15.1.4':
- resolution: {integrity: sha512-PM1MqlPAnItjQIKWRmSoJu02+m7Eif4Am3w5C+Ctkw0//QETWMbW2ejBZhcw3aS7wRcFSbS+lH3NoYm614aZVQ==}
+ '@octokit/app@15.1.6':
+ resolution: {integrity: sha512-WELCamoCJo9SN0lf3SWZccf68CF0sBNPQuLYmZ/n87p5qvBJDe9aBtr5dHkh7T9nxWZ608pizwsUbypSzZAiUw==}
engines: {node: '>= 18'}
- '@octokit/auth-app@7.1.5':
- resolution: {integrity: sha512-boklS4E6LpbA3nRx+SU2fRKRGZJdOGoSZne/i3Y0B5rfHOcGwFgcXrwDLdtbv4igfDSnAkZaoNBv1GYjPDKRNw==}
+ '@octokit/auth-app@7.2.2':
+ resolution: {integrity: sha512-p6hJtEyQDCJEPN9ijjhEC/kpFHMHN4Gca9r+8S0S8EJi7NaWftaEmexjxxpT1DFBeJpN4u/5RE22ArnyypupJw==}
engines: {node: '>= 18'}
- '@octokit/auth-oauth-app@8.1.3':
- resolution: {integrity: sha512-4e6OjVe5rZ8yBe8w7byBjpKtSXFuro7gqeGAAZc7QYltOF8wB93rJl2FE0a4U1Mt88xxPv/mS+25/0DuLk0Ewg==}
+ '@octokit/auth-oauth-app@8.1.4':
+ resolution: {integrity: sha512-71iBa5SflSXcclk/OL3lJzdt4iFs56OJdpBGEBl1wULp7C58uiswZLV6TdRaiAzHP1LT8ezpbHlKuxADb+4NkQ==}
engines: {node: '>= 18'}
- '@octokit/auth-oauth-device@7.1.3':
- resolution: {integrity: sha512-BECO/N4B/Uikj0w3GCvjf/odMujtYTP3q82BJSjxC2J3rxTEiZIJ+z2xnRlDb0IE9dQSaTgRqUPVOieSbFcVzg==}
+ '@octokit/auth-oauth-device@7.1.5':
+ resolution: {integrity: sha512-lR00+k7+N6xeECj0JuXeULQ2TSBB/zjTAmNF2+vyGPDEFx1dgk1hTDmL13MjbSmzusuAmuJD8Pu39rjp9jH6yw==}
engines: {node: '>= 18'}
- '@octokit/auth-oauth-user@5.1.3':
- resolution: {integrity: sha512-zNPByPn9K7TC+OOHKGxU+MxrE9SZAN11UHYEFLsK2NRn3akJN2LHRl85q+Eypr3tuB2GrKx3rfj2phJdkYCvzw==}
+ '@octokit/auth-oauth-user@5.1.6':
+ resolution: {integrity: sha512-/R8vgeoulp7rJs+wfJ2LtXEVC7pjQTIqDab7wPKwVG6+2v/lUnCOub6vaHmysQBbb45FknM3tbHW8TOVqYHxCw==}
engines: {node: '>= 18'}
'@octokit/auth-token@5.1.2':
resolution: {integrity: sha512-JcQDsBdg49Yky2w2ld20IHAlwr8d/d8N6NiOXbtuoPCqzbsiJgF633mVUw3x4mo0H5ypataQIX7SFu3yy44Mpw==}
engines: {node: '>= 18'}
- '@octokit/auth-unauthenticated@6.1.2':
- resolution: {integrity: sha512-07DlUGcz/AAVdzu3EYfi/dOyMSHp9YsOxPl/MPmtlVXWiD//GlV8HgZsPhud94DEyx+RfrW0wSl46Lx+AWbOlg==}
+ '@octokit/auth-unauthenticated@6.1.3':
+ resolution: {integrity: sha512-d5gWJla3WdSl1yjbfMpET+hUSFCE15qM0KVSB0H1shyuJihf/RL1KqWoZMIaonHvlNojkL9XtLFp8QeLe+1iwA==}
engines: {node: '>= 18'}
- '@octokit/core@6.1.4':
- resolution: {integrity: sha512-lAS9k7d6I0MPN+gb9bKDt7X8SdxknYqAMh44S5L+lNqIN2NuV8nvv3g8rPp7MuRxcOpxpUIATWprO0C34a8Qmg==}
+ '@octokit/core@6.1.6':
+ resolution: {integrity: sha512-kIU8SLQkYWGp3pVKiYzA5OSaNF5EE03P/R8zEmmrG6XwOg5oBjXyQVVIauQ0dgau4zYhpZEhJrvIYt6oM+zZZA==}
engines: {node: '>= 18'}
- '@octokit/endpoint@10.1.3':
- resolution: {integrity: sha512-nBRBMpKPhQUxCsQQeW+rCJ/OPSMcj3g0nfHn01zGYZXuNDvvXudF/TYY6APj5THlurerpFN4a/dQAIAaM6BYhA==}
+ '@octokit/endpoint@10.1.4':
+ resolution: {integrity: sha512-OlYOlZIsfEVZm5HCSR8aSg02T2lbUWOsCQoPKfTXJwDzcHQBrVBGdGXb89dv2Kw2ToZaRtudp8O3ZIYoaOjKlA==}
engines: {node: '>= 18'}
- '@octokit/graphql@8.2.1':
- resolution: {integrity: sha512-n57hXtOoHrhwTWdvhVkdJHdhTv0JstjDbDRhJfwIRNfFqmSo1DaK/mD2syoNUoLCyqSjBpGAKOG0BuwF392slw==}
+ '@octokit/graphql@8.2.2':
+ resolution: {integrity: sha512-Yi8hcoqsrXGdt0yObxbebHXFOiUA+2v3n53epuOg1QUgOB6c4XzvisBNVXJSl8RYA5KrDuSL2yq9Qmqe5N0ryA==}
engines: {node: '>= 18'}
'@octokit/oauth-app@7.1.6':
@@ -87,15 +87,18 @@ packages:
resolution: {integrity: sha512-ooXV8GBSabSWyhLUowlMIVd9l1s2nsOGQdlP2SQ4LnkEsGXzeCvbSbCPdZThXhEFzleGPwbapT0Sb+YhXRyjCA==}
engines: {node: '>= 18'}
- '@octokit/oauth-methods@5.1.4':
- resolution: {integrity: sha512-Jc/ycnePClOvO1WL7tlC+TRxOFtyJBGuTDsL4dzXNiVZvzZdrPuNw7zHI3qJSUX2n6RLXE5L0SkFmYyNaVUFoQ==}
+ '@octokit/oauth-methods@5.1.5':
+ resolution: {integrity: sha512-Ev7K8bkYrYLhoOSZGVAGsLEscZQyq7XQONCBBAl2JdMg7IT3PQn/y8P0KjloPoYpI5UylqYrLeUcScaYWXwDvw==}
engines: {node: '>= 18'}
- '@octokit/openapi-types@23.0.1':
- resolution: {integrity: sha512-izFjMJ1sir0jn0ldEKhZ7xegCTj/ObmEDlEfpFrx4k/JyZSMRHbO3/rBwgE7f3m2DHt+RrNGIVw4wSmwnm3t/g==}
+ '@octokit/openapi-types@24.2.0':
+ resolution: {integrity: sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==}
- '@octokit/openapi-webhooks-types@9.1.0':
- resolution: {integrity: sha512-bO1D2jLdU8qEvqmbWjNxJzDYSFT4wesiYKIKP6f4LaM0XUGtn/0LBv/20hu9YqcnpdX38X5o/xANTMtIAqdwYw==}
+ '@octokit/openapi-types@25.1.0':
+ resolution: {integrity: sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==}
+
+ '@octokit/openapi-webhooks-types@11.0.0':
+ resolution: {integrity: sha512-ZBzCFj98v3SuRM7oBas6BHZMJRadlnDoeFfvm1olVxZnYeU6Vh97FhPxyS5aLh5pN51GYv2I51l/hVUAVkGBlA==}
'@octokit/plugin-paginate-graphql@5.2.4':
resolution: {integrity: sha512-pLZES1jWaOynXKHOqdnwZ5ULeVR6tVVCMm+AUbp0htdcyXDU95WbkYdU4R2ej1wKj5Tu94Mee2Ne0PjPO9cCyA==}
@@ -103,8 +106,14 @@ packages:
peerDependencies:
'@octokit/core': '>=6'
- '@octokit/plugin-paginate-rest@11.4.2':
- resolution: {integrity: sha512-BXJ7XPCTDXFF+wxcg/zscfgw2O/iDPtNSkwwR1W1W5c4Mb3zav/M2XvxQ23nVmKj7jpweB4g8viMeCQdm7LMVA==}
+ '@octokit/plugin-paginate-rest@11.6.0':
+ resolution: {integrity: sha512-n5KPteiF7pWKgBIBJSk8qzoZWcUkza2O6A0za97pMGVrGfPdltxrfmfF5GucHYvHGZD8BdaZmmHGz5cX/3gdpw==}
+ engines: {node: '>= 18'}
+ peerDependencies:
+ '@octokit/core': '>=6'
+
+ '@octokit/plugin-paginate-rest@12.0.0':
+ resolution: {integrity: sha512-MPd6WK1VtZ52lFrgZ0R2FlaoiWllzgqFHaSZxvp72NmoDeZ0m8GeJdg4oB6ctqMTYyrnDYp592Xma21mrgiyDA==}
engines: {node: '>= 18'}
peerDependencies:
'@octokit/core': '>=6'
@@ -115,53 +124,62 @@ packages:
peerDependencies:
'@octokit/core': '>=6'
- '@octokit/plugin-rest-endpoint-methods@13.3.1':
- resolution: {integrity: sha512-o8uOBdsyR+WR8MK9Cco8dCgvG13H1RlM1nWnK/W7TEACQBFux/vPREgKucxUfuDQ5yi1T3hGf4C5ZmZXAERgwQ==}
+ '@octokit/plugin-rest-endpoint-methods@13.5.0':
+ resolution: {integrity: sha512-9Pas60Iv9ejO3WlAX3maE1+38c5nqbJXV5GrncEfkndIpZrJ/WPMRd2xYDcPPEt5yzpxcjw9fWNoPhsSGzqKqw==}
+ engines: {node: '>= 18'}
+ peerDependencies:
+ '@octokit/core': '>=6'
+
+ '@octokit/plugin-rest-endpoint-methods@14.0.0':
+ resolution: {integrity: sha512-iQt6ovem4b7zZYZQtdv+PwgbL5VPq37th1m2x2TdkgimIDJpsi2A6Q/OI/23i/hR6z5mL0EgisNR4dcbmckSZQ==}
engines: {node: '>= 18'}
peerDependencies:
'@octokit/core': '>=6'
- '@octokit/plugin-retry@7.1.4':
- resolution: {integrity: sha512-7AIP4p9TttKN7ctygG4BtR7rrB0anZqoU9ThXFk8nETqIfvgPUANTSYHqWYknK7W3isw59LpZeLI8pcEwiJdRg==}
+ '@octokit/plugin-retry@7.2.1':
+ resolution: {integrity: sha512-wUc3gv0D6vNHpGxSaR3FlqJpTXGWgqmk607N9L3LvPL4QjaxDgX/1nY2mGpT37Khn+nlIXdljczkRnNdTTV3/A==}
engines: {node: '>= 18'}
peerDependencies:
'@octokit/core': '>=6'
- '@octokit/plugin-throttling@9.4.0':
- resolution: {integrity: sha512-IOlXxXhZA4Z3m0EEYtrrACkuHiArHLZ3CvqWwOez/pURNqRuwfoFlTPbN5Muf28pzFuztxPyiUiNwz8KctdZaQ==}
+ '@octokit/plugin-throttling@10.0.0':
+ resolution: {integrity: sha512-Kuq5/qs0DVYTHZuBAzCZStCzo2nKvVRo/TDNhCcpC2TKiOGz/DisXMCvjt3/b5kr6SCI1Y8eeeJTHBxxpFvZEg==}
engines: {node: '>= 18'}
peerDependencies:
'@octokit/core': ^6.1.3
- '@octokit/request-error@6.1.7':
- resolution: {integrity: sha512-69NIppAwaauwZv6aOzb+VVLwt+0havz9GT5YplkeJv7fG7a40qpLt/yZKyiDxAhgz0EtgNdNcb96Z0u+Zyuy2g==}
+ '@octokit/request-error@6.1.8':
+ resolution: {integrity: sha512-WEi/R0Jmq+IJKydWlKDmryPcmdYSVjL3ekaiEL1L9eo1sUnqMJ+grqmC9cjk7CA7+b2/T397tO5d8YLOH3qYpQ==}
engines: {node: '>= 18'}
- '@octokit/request@9.2.2':
- resolution: {integrity: sha512-dZl0ZHx6gOQGcffgm1/Sf6JfEpmh34v3Af2Uci02vzUYz6qEN6zepoRtmybWXIGXFIK8K9ylE3b+duCWqhArtg==}
+ '@octokit/request@9.2.4':
+ resolution: {integrity: sha512-q8ybdytBmxa6KogWlNa818r0k1wlqzNC+yNkcQDECHvQo8Vmstrg18JwqJHdJdUiHD2sjlwBgSm9kHkOKe2iyA==}
engines: {node: '>= 18'}
'@octokit/rest@21.1.1':
resolution: {integrity: sha512-sTQV7va0IUVZcntzy1q3QqPm/r8rWtDCqpRAmb8eXXnKkjoQEtFe3Nt5GTVsHft+R6jJoHeSiVLcgcvhtue/rg==}
engines: {node: '>= 18'}
- '@octokit/types@13.8.0':
- resolution: {integrity: sha512-x7DjTIbEpEWXK99DMd01QfWy0hd5h4EN+Q7shkdKds3otGQP+oWE/y0A76i1OvH9fygo4ddvNf7ZvF0t78P98A==}
+ '@octokit/types@13.10.0':
+ resolution: {integrity: sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==}
+
+ '@octokit/types@14.1.0':
+ resolution: {integrity: sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==}
'@octokit/webhooks-methods@5.1.1':
resolution: {integrity: sha512-NGlEHZDseJTCj8TMMFehzwa9g7On4KJMPVHDSrHxCQumL6uSQR8wIkP/qesv52fXqV1BPf4pTxwtS31ldAt9Xg==}
engines: {node: '>= 18'}
- '@octokit/webhooks@13.6.1':
- resolution: {integrity: sha512-vk0jnc5k0/mLMUI4IA9LfSYkLs3OHtfa7B3h4aRG6to912V3wIG8lS/wKwatwYxRkAug4oE8is0ERRI8pzoYTw==}
+ '@octokit/webhooks@13.9.1':
+ resolution: {integrity: sha512-Nss2b4Jyn4wB3EAqAPJypGuCJFalz/ZujKBQQ5934To7Xw9xjf4hkr/EAByxQY7hp7MKd790bWGz7XYSTsHmaw==}
engines: {node: '>= 18'}
- '@slack/types@2.14.0':
- resolution: {integrity: sha512-n0EGm7ENQRxlXbgKSrQZL69grzg1gHLAVd+GlRVQJ1NSORo0FrApR7wql/gaKdu2n4TO83Sq/AmeUOqD60aXUA==}
+ '@slack/types@2.16.0':
+ resolution: {integrity: sha512-bICnyukvdklXhwxprR3uF1+ZFkTvWTZge4evlCS4G1H1HU6QLY68AcjqzQRymf7/5gNt6Y4OBb4NdviheyZcAg==}
engines: {node: '>= 12.13.0', npm: '>= 6.12.0'}
- '@slack/webhook@7.0.5':
- resolution: {integrity: sha512-PmbZx89+SmH4zt78FUwe4If8hWX2MAIRmGXjmlF0A8PwyJb/H7CWaQYV6DDlZn1+7Zs6CEytKH0ejEE/idVSDw==}
+ '@slack/webhook@7.0.6':
+ resolution: {integrity: sha512-RvNCcOjNbzl5uQ2TZsbTJ+A+5ptoWMwnyd/W4lKzeXFToIwebeaZiuntcP0usmhZHj1LH9H1T9WN6Bt1B/DLyg==}
engines: {node: '>= 18', npm: '>= 8.6.0'}
'@tsconfig/node20@20.1.5':
@@ -170,17 +188,17 @@ packages:
'@tsconfig/strictest@2.0.5':
resolution: {integrity: sha512-ec4tjL2Rr0pkZ5hww65c+EEPYwxOi4Ryv+0MtjeaSQRJyq322Q27eOQiFbuNgw2hpL4hB1/W/HBGk3VKS43osg==}
- '@types/aws-lambda@8.10.147':
- resolution: {integrity: sha512-nD0Z9fNIZcxYX5Mai2CTmFD7wX7UldCkW2ezCF8D1T5hdiLsnTWDGRpfRYntU6VjTdLQjOvyszru7I1c1oCQew==}
+ '@types/aws-lambda@8.10.152':
+ resolution: {integrity: sha512-soT/c2gYBnT5ygwiHPmd9a1bftj462NWVk2tKCc1PYHSIacB2UwbTS2zYG4jzag1mRDuzg/OjtxQjQ2NKRB6Rw==}
- '@types/node@22.13.13':
- resolution: {integrity: sha512-ClsL5nMwKaBRwPcCvH8E7+nU4GxHVx1axNvMZTFHMEfNI7oahimt26P5zjVCRrjiIWj6YFXfE1v3dEp94wLcGQ==}
+ '@types/node@24.3.0':
+ resolution: {integrity: sha512-aPTXCrfwnDLj4VvXrm+UUCQjNEvJgNA8s5F1cvwQU+3KNltTOkBm1j30uNLyqqPNe7gE3KFzImYoZEfLhp4Yow==}
asynckit@0.4.0:
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
- axios@1.8.4:
- resolution: {integrity: sha512-eBSYY4Y68NNlHbHBMdeDmKNtDgXWhQsJcGqzO3iLUM0GraQFSS9cVgPX5I9b3lbdFKyYoAEGAZF1DwhTaljNAw==}
+ axios@1.11.0:
+ resolution: {integrity: sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==}
before-after-hook@3.0.2:
resolution: {integrity: sha512-Nik3Sc0ncrMK4UUdXQmAnRtzmNQTAAXmXIopizwZ1W1t8QmfJj+zL4OA2I7XPTPW5z5TDqv4hRo/JzouDJnX3A==}
@@ -226,8 +244,8 @@ packages:
fast-content-type-parse@2.0.1:
resolution: {integrity: sha512-nGqtvLrj5w0naR6tDPfB4cUmYCqouzyQiz6C5y/LtcDllJdrcc6WaWW6iXyIIOErTa/XRybj28aasdn4LkVk6Q==}
- follow-redirects@1.15.9:
- resolution: {integrity: sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==}
+ follow-redirects@1.15.11:
+ resolution: {integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==}
engines: {node: '>=4.0'}
peerDependencies:
debug: '*'
@@ -235,8 +253,8 @@ packages:
debug:
optional: true
- form-data@4.0.2:
- resolution: {integrity: sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==}
+ form-data@4.0.4:
+ resolution: {integrity: sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==}
engines: {node: '>= 6'}
function-bind@1.1.2:
@@ -278,8 +296,8 @@ packages:
resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==}
engines: {node: '>= 0.6'}
- octokit@4.1.2:
- resolution: {integrity: sha512-0kcTxJOK3yQrJsRb8wKa28hlTze4QOz4sLuUnfXXnhboDhFKgv8LxS86tFwbsafDW9JZ08ByuVAE8kQbYJIZkA==}
+ octokit@4.1.4:
+ resolution: {integrity: sha512-cRvxRte6FU3vAHRC9+PMSY3D+mRAs2Rd9emMoqp70UGRvJRM3sbAoim2IXRZNNsf8wVfn4sGxVBHRAP+JBVX/g==}
engines: {node: '>= 18'}
proxy-from-env@1.1.0:
@@ -294,182 +312,198 @@ packages:
engines: {node: '>=14.17'}
hasBin: true
- undici-types@6.20.0:
- resolution: {integrity: sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==}
+ undici-types@7.10.0:
+ resolution: {integrity: sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==}
- universal-github-app-jwt@2.2.0:
- resolution: {integrity: sha512-G5o6f95b5BggDGuUfKDApKaCgNYy2x7OdHY0zSMF081O0EJobw+1130VONhrA7ezGSV2FNOGyM+KQpQZAr9bIQ==}
+ universal-github-app-jwt@2.2.2:
+ resolution: {integrity: sha512-dcmbeSrOdTnsjGjUfAlqNDJrhxXizjAz94ija9Qw8YkZ1uu0d+GoZzyH+Jb9tIIqvGsadUfwg+22k5aDqqwzbw==}
- universal-user-agent@7.0.2:
- resolution: {integrity: sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q==}
+ universal-user-agent@7.0.3:
+ resolution: {integrity: sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A==}
snapshots:
- '@octokit/app@15.1.4':
+ '@octokit/app@15.1.6':
dependencies:
- '@octokit/auth-app': 7.1.5
- '@octokit/auth-unauthenticated': 6.1.2
- '@octokit/core': 6.1.4
+ '@octokit/auth-app': 7.2.2
+ '@octokit/auth-unauthenticated': 6.1.3
+ '@octokit/core': 6.1.6
'@octokit/oauth-app': 7.1.6
- '@octokit/plugin-paginate-rest': 11.4.2(@octokit/core@6.1.4)
- '@octokit/types': 13.8.0
- '@octokit/webhooks': 13.6.1
+ '@octokit/plugin-paginate-rest': 12.0.0(@octokit/core@6.1.6)
+ '@octokit/types': 14.1.0
+ '@octokit/webhooks': 13.9.1
- '@octokit/auth-app@7.1.5':
+ '@octokit/auth-app@7.2.2':
dependencies:
- '@octokit/auth-oauth-app': 8.1.3
- '@octokit/auth-oauth-user': 5.1.3
- '@octokit/request': 9.2.2
- '@octokit/request-error': 6.1.7
- '@octokit/types': 13.8.0
+ '@octokit/auth-oauth-app': 8.1.4
+ '@octokit/auth-oauth-user': 5.1.6
+ '@octokit/request': 9.2.4
+ '@octokit/request-error': 6.1.8
+ '@octokit/types': 14.1.0
toad-cache: 3.7.0
- universal-github-app-jwt: 2.2.0
- universal-user-agent: 7.0.2
+ universal-github-app-jwt: 2.2.2
+ universal-user-agent: 7.0.3
- '@octokit/auth-oauth-app@8.1.3':
+ '@octokit/auth-oauth-app@8.1.4':
dependencies:
- '@octokit/auth-oauth-device': 7.1.3
- '@octokit/auth-oauth-user': 5.1.3
- '@octokit/request': 9.2.2
- '@octokit/types': 13.8.0
- universal-user-agent: 7.0.2
+ '@octokit/auth-oauth-device': 7.1.5
+ '@octokit/auth-oauth-user': 5.1.6
+ '@octokit/request': 9.2.4
+ '@octokit/types': 14.1.0
+ universal-user-agent: 7.0.3
- '@octokit/auth-oauth-device@7.1.3':
+ '@octokit/auth-oauth-device@7.1.5':
dependencies:
- '@octokit/oauth-methods': 5.1.4
- '@octokit/request': 9.2.2
- '@octokit/types': 13.8.0
- universal-user-agent: 7.0.2
+ '@octokit/oauth-methods': 5.1.5
+ '@octokit/request': 9.2.4
+ '@octokit/types': 14.1.0
+ universal-user-agent: 7.0.3
- '@octokit/auth-oauth-user@5.1.3':
+ '@octokit/auth-oauth-user@5.1.6':
dependencies:
- '@octokit/auth-oauth-device': 7.1.3
- '@octokit/oauth-methods': 5.1.4
- '@octokit/request': 9.2.2
- '@octokit/types': 13.8.0
- universal-user-agent: 7.0.2
+ '@octokit/auth-oauth-device': 7.1.5
+ '@octokit/oauth-methods': 5.1.5
+ '@octokit/request': 9.2.4
+ '@octokit/types': 14.1.0
+ universal-user-agent: 7.0.3
'@octokit/auth-token@5.1.2': {}
- '@octokit/auth-unauthenticated@6.1.2':
+ '@octokit/auth-unauthenticated@6.1.3':
dependencies:
- '@octokit/request-error': 6.1.7
- '@octokit/types': 13.8.0
+ '@octokit/request-error': 6.1.8
+ '@octokit/types': 14.1.0
- '@octokit/core@6.1.4':
+ '@octokit/core@6.1.6':
dependencies:
'@octokit/auth-token': 5.1.2
- '@octokit/graphql': 8.2.1
- '@octokit/request': 9.2.2
- '@octokit/request-error': 6.1.7
- '@octokit/types': 13.8.0
+ '@octokit/graphql': 8.2.2
+ '@octokit/request': 9.2.4
+ '@octokit/request-error': 6.1.8
+ '@octokit/types': 14.1.0
before-after-hook: 3.0.2
- universal-user-agent: 7.0.2
+ universal-user-agent: 7.0.3
- '@octokit/endpoint@10.1.3':
+ '@octokit/endpoint@10.1.4':
dependencies:
- '@octokit/types': 13.8.0
- universal-user-agent: 7.0.2
+ '@octokit/types': 14.1.0
+ universal-user-agent: 7.0.3
- '@octokit/graphql@8.2.1':
+ '@octokit/graphql@8.2.2':
dependencies:
- '@octokit/request': 9.2.2
- '@octokit/types': 13.8.0
- universal-user-agent: 7.0.2
+ '@octokit/request': 9.2.4
+ '@octokit/types': 14.1.0
+ universal-user-agent: 7.0.3
'@octokit/oauth-app@7.1.6':
dependencies:
- '@octokit/auth-oauth-app': 8.1.3
- '@octokit/auth-oauth-user': 5.1.3
- '@octokit/auth-unauthenticated': 6.1.2
- '@octokit/core': 6.1.4
+ '@octokit/auth-oauth-app': 8.1.4
+ '@octokit/auth-oauth-user': 5.1.6
+ '@octokit/auth-unauthenticated': 6.1.3
+ '@octokit/core': 6.1.6
'@octokit/oauth-authorization-url': 7.1.1
- '@octokit/oauth-methods': 5.1.4
- '@types/aws-lambda': 8.10.147
- universal-user-agent: 7.0.2
+ '@octokit/oauth-methods': 5.1.5
+ '@types/aws-lambda': 8.10.152
+ universal-user-agent: 7.0.3
'@octokit/oauth-authorization-url@7.1.1': {}
- '@octokit/oauth-methods@5.1.4':
+ '@octokit/oauth-methods@5.1.5':
dependencies:
'@octokit/oauth-authorization-url': 7.1.1
- '@octokit/request': 9.2.2
- '@octokit/request-error': 6.1.7
- '@octokit/types': 13.8.0
+ '@octokit/request': 9.2.4
+ '@octokit/request-error': 6.1.8
+ '@octokit/types': 14.1.0
+
+ '@octokit/openapi-types@24.2.0': {}
- '@octokit/openapi-types@23.0.1': {}
+ '@octokit/openapi-types@25.1.0': {}
- '@octokit/openapi-webhooks-types@9.1.0': {}
+ '@octokit/openapi-webhooks-types@11.0.0': {}
- '@octokit/plugin-paginate-graphql@5.2.4(@octokit/core@6.1.4)':
+ '@octokit/plugin-paginate-graphql@5.2.4(@octokit/core@6.1.6)':
dependencies:
- '@octokit/core': 6.1.4
+ '@octokit/core': 6.1.6
- '@octokit/plugin-paginate-rest@11.4.2(@octokit/core@6.1.4)':
+ '@octokit/plugin-paginate-rest@11.6.0(@octokit/core@6.1.6)':
dependencies:
- '@octokit/core': 6.1.4
- '@octokit/types': 13.8.0
+ '@octokit/core': 6.1.6
+ '@octokit/types': 13.10.0
- '@octokit/plugin-request-log@5.3.1(@octokit/core@6.1.4)':
+ '@octokit/plugin-paginate-rest@12.0.0(@octokit/core@6.1.6)':
dependencies:
- '@octokit/core': 6.1.4
+ '@octokit/core': 6.1.6
+ '@octokit/types': 14.1.0
- '@octokit/plugin-rest-endpoint-methods@13.3.1(@octokit/core@6.1.4)':
+ '@octokit/plugin-request-log@5.3.1(@octokit/core@6.1.6)':
dependencies:
- '@octokit/core': 6.1.4
- '@octokit/types': 13.8.0
+ '@octokit/core': 6.1.6
- '@octokit/plugin-retry@7.1.4(@octokit/core@6.1.4)':
+ '@octokit/plugin-rest-endpoint-methods@13.5.0(@octokit/core@6.1.6)':
dependencies:
- '@octokit/core': 6.1.4
- '@octokit/request-error': 6.1.7
- '@octokit/types': 13.8.0
+ '@octokit/core': 6.1.6
+ '@octokit/types': 13.10.0
+
+ '@octokit/plugin-rest-endpoint-methods@14.0.0(@octokit/core@6.1.6)':
+ dependencies:
+ '@octokit/core': 6.1.6
+ '@octokit/types': 14.1.0
+
+ '@octokit/plugin-retry@7.2.1(@octokit/core@6.1.6)':
+ dependencies:
+ '@octokit/core': 6.1.6
+ '@octokit/request-error': 6.1.8
+ '@octokit/types': 14.1.0
bottleneck: 2.19.5
- '@octokit/plugin-throttling@9.4.0(@octokit/core@6.1.4)':
+ '@octokit/plugin-throttling@10.0.0(@octokit/core@6.1.6)':
dependencies:
- '@octokit/core': 6.1.4
- '@octokit/types': 13.8.0
+ '@octokit/core': 6.1.6
+ '@octokit/types': 14.1.0
bottleneck: 2.19.5
- '@octokit/request-error@6.1.7':
+ '@octokit/request-error@6.1.8':
dependencies:
- '@octokit/types': 13.8.0
+ '@octokit/types': 14.1.0
- '@octokit/request@9.2.2':
+ '@octokit/request@9.2.4':
dependencies:
- '@octokit/endpoint': 10.1.3
- '@octokit/request-error': 6.1.7
- '@octokit/types': 13.8.0
+ '@octokit/endpoint': 10.1.4
+ '@octokit/request-error': 6.1.8
+ '@octokit/types': 14.1.0
fast-content-type-parse: 2.0.1
- universal-user-agent: 7.0.2
+ universal-user-agent: 7.0.3
'@octokit/rest@21.1.1':
dependencies:
- '@octokit/core': 6.1.4
- '@octokit/plugin-paginate-rest': 11.4.2(@octokit/core@6.1.4)
- '@octokit/plugin-request-log': 5.3.1(@octokit/core@6.1.4)
- '@octokit/plugin-rest-endpoint-methods': 13.3.1(@octokit/core@6.1.4)
+ '@octokit/core': 6.1.6
+ '@octokit/plugin-paginate-rest': 11.6.0(@octokit/core@6.1.6)
+ '@octokit/plugin-request-log': 5.3.1(@octokit/core@6.1.6)
+ '@octokit/plugin-rest-endpoint-methods': 13.5.0(@octokit/core@6.1.6)
+
+ '@octokit/types@13.10.0':
+ dependencies:
+ '@octokit/openapi-types': 24.2.0
- '@octokit/types@13.8.0':
+ '@octokit/types@14.1.0':
dependencies:
- '@octokit/openapi-types': 23.0.1
+ '@octokit/openapi-types': 25.1.0
'@octokit/webhooks-methods@5.1.1': {}
- '@octokit/webhooks@13.6.1':
+ '@octokit/webhooks@13.9.1':
dependencies:
- '@octokit/openapi-webhooks-types': 9.1.0
- '@octokit/request-error': 6.1.7
+ '@octokit/openapi-webhooks-types': 11.0.0
+ '@octokit/request-error': 6.1.8
'@octokit/webhooks-methods': 5.1.1
- '@slack/types@2.14.0': {}
+ '@slack/types@2.16.0': {}
- '@slack/webhook@7.0.5':
+ '@slack/webhook@7.0.6':
dependencies:
- '@slack/types': 2.14.0
- '@types/node': 22.13.13
- axios: 1.8.4
+ '@slack/types': 2.16.0
+ '@types/node': 24.3.0
+ axios: 1.11.0
transitivePeerDependencies:
- debug
@@ -477,18 +511,18 @@ snapshots:
'@tsconfig/strictest@2.0.5': {}
- '@types/aws-lambda@8.10.147': {}
+ '@types/aws-lambda@8.10.152': {}
- '@types/node@22.13.13':
+ '@types/node@24.3.0':
dependencies:
- undici-types: 6.20.0
+ undici-types: 7.10.0
asynckit@0.4.0: {}
- axios@1.8.4:
+ axios@1.11.0:
dependencies:
- follow-redirects: 1.15.9
- form-data: 4.0.2
+ follow-redirects: 1.15.11
+ form-data: 4.0.4
proxy-from-env: 1.1.0
transitivePeerDependencies:
- debug
@@ -533,13 +567,14 @@ snapshots:
fast-content-type-parse@2.0.1: {}
- follow-redirects@1.15.9: {}
+ follow-redirects@1.15.11: {}
- form-data@4.0.2:
+ form-data@4.0.4:
dependencies:
asynckit: 0.4.0
combined-stream: 1.0.8
es-set-tostringtag: 2.1.0
+ hasown: 2.0.2
mime-types: 2.1.35
function-bind@1.1.2: {}
@@ -582,18 +617,19 @@ snapshots:
dependencies:
mime-db: 1.52.0
- octokit@4.1.2:
+ octokit@4.1.4:
dependencies:
- '@octokit/app': 15.1.4
- '@octokit/core': 6.1.4
+ '@octokit/app': 15.1.6
+ '@octokit/core': 6.1.6
'@octokit/oauth-app': 7.1.6
- '@octokit/plugin-paginate-graphql': 5.2.4(@octokit/core@6.1.4)
- '@octokit/plugin-paginate-rest': 11.4.2(@octokit/core@6.1.4)
- '@octokit/plugin-rest-endpoint-methods': 13.3.1(@octokit/core@6.1.4)
- '@octokit/plugin-retry': 7.1.4(@octokit/core@6.1.4)
- '@octokit/plugin-throttling': 9.4.0(@octokit/core@6.1.4)
- '@octokit/request-error': 6.1.7
- '@octokit/types': 13.8.0
+ '@octokit/plugin-paginate-graphql': 5.2.4(@octokit/core@6.1.6)
+ '@octokit/plugin-paginate-rest': 12.0.0(@octokit/core@6.1.6)
+ '@octokit/plugin-rest-endpoint-methods': 14.0.0(@octokit/core@6.1.6)
+ '@octokit/plugin-retry': 7.2.1(@octokit/core@6.1.6)
+ '@octokit/plugin-throttling': 10.0.0(@octokit/core@6.1.6)
+ '@octokit/request-error': 6.1.8
+ '@octokit/types': 14.1.0
+ '@octokit/webhooks': 13.9.1
proxy-from-env@1.1.0: {}
@@ -601,8 +637,8 @@ snapshots:
typescript@5.7.3: {}
- undici-types@6.20.0: {}
+ undici-types@7.10.0: {}
- universal-github-app-jwt@2.2.0: {}
+ universal-github-app-jwt@2.2.2: {}
- universal-user-agent@7.0.2: {}
+ universal-user-agent@7.0.3: {}
@@ -47,7 +47,10 @@ if [[ -n $apt ]]; then
musl-dev
build-essential
)
- if (grep -qP 'PRETTY_NAME="(Linux Mint 22|.+24\.(04|10))' /etc/os-release); then
+ if (grep -qP 'PRETTY_NAME="((Debian|Raspbian).+13|' /etc/os-release); then
+ # libstdc++-14-dev is in build-essential
+ deps+=( mold )
+ elif (grep -qP 'PRETTY_NAME="(Linux Mint 22|.+24\.(04|10))' /etc/os-release); then
deps+=( mold libstdc++-14-dev )
elif (grep -qP 'PRETTY_NAME="((Debian|Raspbian).+12|Linux Mint 21|.+22\.04)' /etc/os-release); then
deps+=( mold libstdc++-12-dev )
@@ -63,6 +63,7 @@ anyhow.workspace = true
gpui.workspace = true
ui.workspace = true
util.workspace = true
+workspace-hack.workspace = true
# Uncomment other workspace dependencies as needed
# assistant.workspace = true
@@ -15,13 +15,11 @@ SQUAWK_VERSION=0.26.0
SQUAWK_BIN="./target/squawk-$SQUAWK_VERSION"
SQUAWK_ARGS="--assume-in-transaction --config script/lib/squawk.toml"
-if [ ! -f "$SQUAWK_BIN" ]; then
- pkgutil --pkg-info com.apple.pkg.RosettaUpdateAuto || /usr/sbin/softwareupdate --install-rosetta --agree-to-license
- # When bootstrapping a brand new CI machine, the `target` directory may not exist yet.
- mkdir -p "./target"
- curl -L -o "$SQUAWK_BIN" "https://github.com/sbdchd/squawk/releases/download/v$SQUAWK_VERSION/squawk-darwin-x86_64"
- chmod +x "$SQUAWK_BIN"
-fi
+pkgutil --pkg-info com.apple.pkg.RosettaUpdateAuto || /usr/sbin/softwareupdate --install-rosetta --agree-to-license
+# When bootstrapping a brand new CI machine, the `target` directory may not exist yet.
+mkdir -p "./target"
+curl -L -o "$SQUAWK_BIN" "https://github.com/sbdchd/squawk/releases/download/v$SQUAWK_VERSION/squawk-darwin-x86_64"
+chmod +x "$SQUAWK_BIN"
if [ -n "$SQUAWK_GITHUB_TOKEN" ]; then
export SQUAWK_GITHUB_REPO_OWNER=$(echo $GITHUB_REPOSITORY | awk -F/ '{print $1}')
@@ -54,6 +54,7 @@ digest = { version = "0.10", features = ["mac", "oid", "std"] }
either = { version = "1", features = ["serde", "use_std"] }
euclid = { version = "0.22" }
event-listener = { version = "5" }
+event-listener-strategy = { version = "0.5" }
flate2 = { version = "1", features = ["zlib-rs"] }
form_urlencoded = { version = "1" }
futures = { version = "0.3", features = ["io-compat"] }
@@ -108,7 +109,6 @@ rustc-hash = { version = "1" }
rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", default-features = false, features = ["fs", "net", "std"] }
rustls = { version = "0.23", features = ["ring"] }
rustls-webpki = { version = "0.103", default-features = false, features = ["aws-lc-rs", "ring", "std"] }
-schemars = { version = "1", features = ["chrono04", "indexmap2", "semver1"] }
sea-orm = { version = "1", features = ["runtime-tokio-rustls", "sqlx-postgres", "sqlx-sqlite"] }
sea-query-binder = { version = "0.7", default-features = false, features = ["postgres-array", "sqlx-postgres", "sqlx-sqlite", "with-bigdecimal", "with-chrono", "with-json", "with-rust_decimal", "with-time", "with-uuid"] }
semver = { version = "1", features = ["serde"] }
@@ -183,6 +183,7 @@ digest = { version = "0.10", features = ["mac", "oid", "std"] }
either = { version = "1", features = ["serde", "use_std"] }
euclid = { version = "0.22" }
event-listener = { version = "5" }
+event-listener-strategy = { version = "0.5" }
flate2 = { version = "1", features = ["zlib-rs"] }
form_urlencoded = { version = "1" }
futures = { version = "0.3", features = ["io-compat"] }
@@ -242,7 +243,6 @@ rustc-hash = { version = "1" }
rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", default-features = false, features = ["fs", "net", "std"] }
rustls = { version = "0.23", features = ["ring"] }
rustls-webpki = { version = "0.103", default-features = false, features = ["aws-lc-rs", "ring", "std"] }
-schemars = { version = "1", features = ["chrono04", "indexmap2", "semver1"] }
sea-orm = { version = "1", features = ["runtime-tokio-rustls", "sqlx-postgres", "sqlx-sqlite"] }
sea-query-binder = { version = "0.7", default-features = false, features = ["postgres-array", "sqlx-postgres", "sqlx-sqlite", "with-bigdecimal", "with-chrono", "with-json", "with-rust_decimal", "with-time", "with-uuid"] }
semver = { version = "1", features = ["serde"] }
@@ -403,7 +403,6 @@ bytemuck = { version = "1", default-features = false, features = ["min_const_gen
cipher = { version = "0.4", default-features = false, features = ["block-padding", "rand_core", "zeroize"] }
codespan-reporting = { version = "0.12" }
crypto-common = { version = "0.1", default-features = false, features = ["rand_core", "std"] }
-event-listener-strategy = { version = "0.5" }
flume = { version = "0.11" }
foldhash = { version = "0.1", default-features = false, features = ["std"] }
getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] }
@@ -444,7 +443,6 @@ bytemuck = { version = "1", default-features = false, features = ["min_const_gen
cipher = { version = "0.4", default-features = false, features = ["block-padding", "rand_core", "zeroize"] }
codespan-reporting = { version = "0.12" }
crypto-common = { version = "0.1", default-features = false, features = ["rand_core", "std"] }
-event-listener-strategy = { version = "0.5" }
flume = { version = "0.11" }
foldhash = { version = "0.1", default-features = false, features = ["std"] }
getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] }
@@ -483,7 +481,6 @@ bytemuck = { version = "1", default-features = false, features = ["min_const_gen
cipher = { version = "0.4", default-features = false, features = ["block-padding", "rand_core", "zeroize"] }
codespan-reporting = { version = "0.12" }
crypto-common = { version = "0.1", default-features = false, features = ["rand_core", "std"] }
-event-listener-strategy = { version = "0.5" }
flume = { version = "0.11" }
foldhash = { version = "0.1", default-features = false, features = ["std"] }
getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] }
@@ -524,7 +521,6 @@ bytemuck = { version = "1", default-features = false, features = ["min_const_gen
cipher = { version = "0.4", default-features = false, features = ["block-padding", "rand_core", "zeroize"] }
codespan-reporting = { version = "0.12" }
crypto-common = { version = "0.1", default-features = false, features = ["rand_core", "std"] }
-event-listener-strategy = { version = "0.5" }
flume = { version = "0.11" }
foldhash = { version = "0.1", default-features = false, features = ["std"] }
getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] }
@@ -572,7 +568,7 @@ tokio-rustls = { version = "0.26", default-features = false, features = ["loggin
tokio-socks = { version = "0.5", features = ["futures-io"] }
tokio-stream = { version = "0.1", features = ["fs"] }
tower = { version = "0.5", default-features = false, features = ["timeout", "util"] }
-winapi = { version = "0.3", default-features = false, features = ["cfg", "commapi", "consoleapi", "errhandlingapi", "evntrace", "fileapi", "handleapi", "impl-debug", "impl-default", "in6addr", "inaddr", "ioapiset", "knownfolders", "minwinbase", "minwindef", "namedpipeapi", "ntsecapi", "objbase", "processenv", "processthreadsapi", "shlobj", "std", "synchapi", "sysinfoapi", "timezoneapi", "winbase", "windef", "winerror", "winioctl", "winnt", "winreg", "winsock2", "winuser"] }
+winapi = { version = "0.3", default-features = false, features = ["cfg", "commapi", "consoleapi", "evntrace", "fileapi", "handleapi", "impl-debug", "impl-default", "in6addr", "inaddr", "ioapiset", "knownfolders", "minwinbase", "minwindef", "namedpipeapi", "ntsecapi", "objbase", "processenv", "processthreadsapi", "shlobj", "std", "synchapi", "sysinfoapi", "timezoneapi", "winbase", "windef", "winerror", "winioctl", "winnt", "winreg", "winsock2", "winuser"] }
windows-core = { version = "0.61" }
windows-numerics = { version = "0.2" }
windows-sys-73dcd821b1037cfd = { package = "windows-sys", version = "0.59", features = ["Wdk_Foundation", "Wdk_Storage_FileSystem", "Win32_Globalization", "Win32_NetworkManagement_IpHelper", "Win32_Networking_WinSock", "Win32_Security_Authentication_Identity", "Win32_Security_Credentials", "Win32_Security_Cryptography", "Win32_Storage_FileSystem", "Win32_System_Com", "Win32_System_Console", "Win32_System_Diagnostics_Debug", "Win32_System_IO", "Win32_System_Ioctl", "Win32_System_Kernel", "Win32_System_LibraryLoader", "Win32_System_Memory", "Win32_System_Performance", "Win32_System_Pipes", "Win32_System_Registry", "Win32_System_SystemInformation", "Win32_System_SystemServices", "Win32_System_Threading", "Win32_System_Time", "Win32_System_WindowsProgramming", "Win32_UI_Input_KeyboardAndMouse", "Win32_UI_Shell", "Win32_UI_WindowsAndMessaging"] }
@@ -596,7 +592,7 @@ tokio-rustls = { version = "0.26", default-features = false, features = ["loggin
tokio-socks = { version = "0.5", features = ["futures-io"] }
tokio-stream = { version = "0.1", features = ["fs"] }
tower = { version = "0.5", default-features = false, features = ["timeout", "util"] }
-winapi = { version = "0.3", default-features = false, features = ["cfg", "commapi", "consoleapi", "errhandlingapi", "evntrace", "fileapi", "handleapi", "impl-debug", "impl-default", "in6addr", "inaddr", "ioapiset", "knownfolders", "minwinbase", "minwindef", "namedpipeapi", "ntsecapi", "objbase", "processenv", "processthreadsapi", "shlobj", "std", "synchapi", "sysinfoapi", "timezoneapi", "winbase", "windef", "winerror", "winioctl", "winnt", "winreg", "winsock2", "winuser"] }
+winapi = { version = "0.3", default-features = false, features = ["cfg", "commapi", "consoleapi", "evntrace", "fileapi", "handleapi", "impl-debug", "impl-default", "in6addr", "inaddr", "ioapiset", "knownfolders", "minwinbase", "minwindef", "namedpipeapi", "ntsecapi", "objbase", "processenv", "processthreadsapi", "shlobj", "std", "synchapi", "sysinfoapi", "timezoneapi", "winbase", "windef", "winerror", "winioctl", "winnt", "winreg", "winsock2", "winuser"] }
windows-core = { version = "0.61" }
windows-numerics = { version = "0.2" }
windows-sys-73dcd821b1037cfd = { package = "windows-sys", version = "0.59", features = ["Wdk_Foundation", "Wdk_Storage_FileSystem", "Win32_Globalization", "Win32_NetworkManagement_IpHelper", "Win32_Networking_WinSock", "Win32_Security_Authentication_Identity", "Win32_Security_Credentials", "Win32_Security_Cryptography", "Win32_Storage_FileSystem", "Win32_System_Com", "Win32_System_Console", "Win32_System_Diagnostics_Debug", "Win32_System_IO", "Win32_System_Ioctl", "Win32_System_Kernel", "Win32_System_LibraryLoader", "Win32_System_Memory", "Win32_System_Performance", "Win32_System_Pipes", "Win32_System_Registry", "Win32_System_SystemInformation", "Win32_System_SystemServices", "Win32_System_Threading", "Win32_System_Time", "Win32_System_WindowsProgramming", "Win32_UI_Input_KeyboardAndMouse", "Win32_UI_Shell", "Win32_UI_WindowsAndMessaging"] }
@@ -610,7 +606,6 @@ bytemuck = { version = "1", default-features = false, features = ["min_const_gen
cipher = { version = "0.4", default-features = false, features = ["block-padding", "rand_core", "zeroize"] }
codespan-reporting = { version = "0.12" }
crypto-common = { version = "0.1", default-features = false, features = ["rand_core", "std"] }
-event-listener-strategy = { version = "0.5" }
flume = { version = "0.11" }
foldhash = { version = "0.1", default-features = false, features = ["std"] }
getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] }
@@ -651,7 +646,6 @@ bytemuck = { version = "1", default-features = false, features = ["min_const_gen
cipher = { version = "0.4", default-features = false, features = ["block-padding", "rand_core", "zeroize"] }
codespan-reporting = { version = "0.12" }
crypto-common = { version = "0.1", default-features = false, features = ["rand_core", "std"] }
-event-listener-strategy = { version = "0.5" }
flume = { version = "0.11" }
foldhash = { version = "0.1", default-features = false, features = ["std"] }
getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] }
@@ -36,7 +36,10 @@ extend-exclude = [
# glsl isn't recognized by this tool.
"extensions/glsl/languages/glsl/",
# Windows likes its abbreviations.
- "crates/gpui/src/platform/windows/",
+ "crates/gpui/src/platform/windows/directx_renderer.rs",
+ "crates/gpui/src/platform/windows/events.rs",
+ "crates/gpui/src/platform/windows/direct_write.rs",
+ "crates/gpui/src/platform/windows/window.rs",
# Some typos in the base mdBook CSS.
"docs/theme/css/",
# Spellcheck triggers on `|Fixe[sd]|` regex part.