diff --git a/.cargo/ci-config.toml b/.cargo/ci-config.toml index d5e312c2429ad8a4fa933d4080c8fcde217bd6eb..b31b79a59b262a5cc18cf1d2b32124a97bab4fc7 100644 --- a/.cargo/ci-config.toml +++ b/.cargo/ci-config.toml @@ -5,12 +5,16 @@ # Arrays are merged together though. See: https://doc.rust-lang.org/cargo/reference/config.html#hierarchical-structure # The intent for this file is to configure CI build process with a divergance from Zed developers experience; for example, in this config file # we use `-D warnings` for rustflags (which makes compilation fail in presence of warnings during build process). Placing that in developers `config.toml` -# would be incovenient. +# would be inconvenient. # The reason for not using the RUSTFLAGS environment variable is that doing so would override all the settings in the config.toml file, even if the contents of the latter are completely nonsensical. See: https://github.com/rust-lang/cargo/issues/5376 # Here, we opted to use `[target.'cfg(all())']` instead of `[build]` because `[target.'**']` is guaranteed to be cumulative. [target.'cfg(all())'] rustflags = ["-D", "warnings"] +# We don't need fullest debug information for dev stuff (tests etc.) in CI. +[profile.dev] +debug = "limited" + # Use Mold on Linux, because it's faster than GNU ld and LLD. # # We no longer set this in the default `config.toml` so that developers can opt in to Wild, which diff --git a/.config/hakari.toml b/.config/hakari.toml deleted file mode 100644 index 1e8386a14115be2e36b287ace0d47d464df9e620..0000000000000000000000000000000000000000 --- a/.config/hakari.toml +++ /dev/null @@ -1,42 +0,0 @@ -# This file contains settings for `cargo hakari`. -# See https://docs.rs/cargo-hakari/latest/cargo_hakari/config for a full list of options. - -hakari-package = "workspace-hack" - -resolver = "2" -dep-format-version = "4" -workspace-hack-line-style = "workspace-dotted" - -# this should be the same list as "targets" in ../rust-toolchain.toml -platforms = [ - "x86_64-apple-darwin", - "aarch64-apple-darwin", - "x86_64-unknown-linux-gnu", - "aarch64-unknown-linux-gnu", - "x86_64-pc-windows-msvc", - "x86_64-unknown-linux-musl", # remote server -] - -[traversal-excludes] -workspace-members = [ - "remote_server", -] -third-party = [ - { name = "reqwest", version = "0.11.27" }, - # build of remote_server should not include scap / its x11 dependency - { name = "zed-scap", git = "https://github.com/zed-industries/scap", rev = "4afea48c3b002197176fb19cd0f9b180dd36eaac", version = "0.0.8-zed" }, - # build of remote_server should not need to include on libalsa through rodio - { name = "rodio", git = "https://github.com/RustAudio/rodio" }, -] - -[final-excludes] -workspace-members = [ - "zed_extension_api", - - # exclude all extensions - "zed_glsl", - "zed_html", - "zed_proto", - "slash_commands_example", - "zed_test_extension", -] diff --git a/.config/nextest.toml b/.config/nextest.toml index b05d68911fb5f50afaa623649fd426f7eb1e7bbe..49fb4d01f794613e430953e4565923a784368836 100644 --- a/.config/nextest.toml +++ b/.config/nextest.toml @@ -4,3 +4,17 @@ sequential-db-tests = { max-threads = 1 } [[profile.default.overrides]] filter = 'package(db)' test-group = 'sequential-db-tests' + +# Run slowest tests first. +# +[[profile.default.overrides]] +filter = 'package(worktree) and test(test_random_worktree_changes)' +priority = 100 + +[[profile.default.overrides]] +filter = 'package(collab) and (test(random_project_collaboration_tests) or test(random_channel_buffer_tests) or test(test_contact_requests) or test(test_basic_following))' +priority = 99 + +[[profile.default.overrides]] +filter = 'package(extension_host) and test(test_extension_store_with_test_extension)' +priority = 99 diff --git a/.github/ISSUE_TEMPLATE/06_bug_windows_beta.yml b/.github/ISSUE_TEMPLATE/06_bug_git.yml similarity index 84% rename from .github/ISSUE_TEMPLATE/06_bug_windows_beta.yml rename to .github/ISSUE_TEMPLATE/06_bug_git.yml index b2b2a0f9dfcd5ddaa0dda41650864b053c5bb933..7a01a728cd4592fb74144087110d475c9dd347a5 100644 --- a/.github/ISSUE_TEMPLATE/06_bug_windows_beta.yml +++ b/.github/ISSUE_TEMPLATE/06_bug_git.yml @@ -1,8 +1,8 @@ -name: Bug Report (Windows Beta) -description: Zed Windows Beta Related Bugs +name: Bug Report (Git) +description: Zed Git Related Bugs type: "Bug" -labels: ["windows"] -title: "Windows Beta: " +labels: ["git"] +title: "Git: " body: - type: textarea attributes: diff --git a/.github/ISSUE_TEMPLATE/11_crash_report.yml b/.github/ISSUE_TEMPLATE/11_crash_report.yml index aa736c75341512442720c202a4cadbf51bf253c8..1300809a39c6ecd9a10eb6a28e80ef4478dba6b5 100644 --- a/.github/ISSUE_TEMPLATE/11_crash_report.yml +++ b/.github/ISSUE_TEMPLATE/11_crash_report.yml @@ -33,9 +33,10 @@ body: required: true - type: textarea attributes: - label: If applicable, attach your `~/Library/Logs/Zed/Zed.log` file to this issue. + label: If applicable, attach your `Zed.log` file to this issue. description: | macOS: `~/Library/Logs/Zed/Zed.log` + Windows: `C:\Users\YOU\AppData\Local\Zed\logs\Zed.log` Linux: `~/.local/share/zed/logs/Zed.log` or $XDG_DATA_HOME If you only need the most recent lines, you can run the `zed: open log` command palette action to see the last 1000. value: | diff --git a/.github/actions/run_tests/action.yml b/.github/actions/run_tests/action.yml index faf94017976f4b06fdaaa80a5db8083405a7950a..3bc28249f3b8b2a08a48be040177530c5ecfd407 100644 --- a/.github/actions/run_tests/action.yml +++ b/.github/actions/run_tests/action.yml @@ -15,8 +15,11 @@ runs: node-version: "18" - name: Limit target directory size + env: + MAX_SIZE: ${{ runner.os == 'macOS' && 300 || 100 }} shell: bash -euxo pipefail {0} - run: script/clear-target-dir-if-larger-than 100 + # Use the variable in the run command + run: script/clear-target-dir-if-larger-than ${{ env.MAX_SIZE }} - name: Run tests shell: bash -euxo pipefail {0} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f5bf5790e4b7daf02f4713d25b1017b494f88f1a..2ebbcaba49823787aafe40e5f3dd80eb67478b42 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -130,39 +130,6 @@ jobs: input: "crates/proto/proto/" against: "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/" - workspace_hack: - timeout-minutes: 60 - name: Check workspace-hack crate - needs: [job_spec] - if: | - github.repository_owner == 'zed-industries' && - needs.job_spec.outputs.run_tests == 'true' - runs-on: - - namespace-profile-8x16-ubuntu-2204 - steps: - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - - name: Add Rust to the PATH - run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" - - name: Install cargo-hakari - uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386 # v2 - with: - command: install - args: cargo-hakari@0.9.35 - - - name: Check workspace-hack Cargo.toml is up-to-date - run: | - cargo hakari generate --diff || { - echo "To fix, run script/update-workspace-hack or script/update-workspace-hack.ps1"; - false - } - - name: Check all crates depend on workspace-hack - run: | - cargo hakari manage-deps --dry-run || { - echo "To fix, run script/update-workspace-hack or script/update-workspace-hack.ps1" - false - } - style: timeout-minutes: 60 name: Check formatting and spelling @@ -210,7 +177,7 @@ jobs: uses: ./.github/actions/check_style - name: Check for typos - uses: crate-ci/typos@8e6a4285bcbde632c5d79900a7779746e8b7ea3f # v1.24.6 + uses: crate-ci/typos@80c8a4945eec0f6d464eaf9e65ed98ef085283d1 # v1.38.1 with: config: ./typos.toml @@ -507,7 +474,6 @@ jobs: - actionlint - migration_checks # run_tests: If adding required tests, add them here and to script below. - - workspace_hack - linux_tests - build_remote_server - macos_tests @@ -533,7 +499,6 @@ jobs: # Only check test jobs if they were supposed to run if [[ "${{ needs.job_spec.outputs.run_tests }}" == "true" ]]; then - [[ "${{ needs.workspace_hack.result }}" != 'success' ]] && { RET_CODE=1; echo "Workspace Hack failed"; } [[ "${{ needs.macos_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "macOS tests failed"; } [[ "${{ needs.linux_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Linux tests failed"; } [[ "${{ needs.windows_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows tests failed"; } @@ -882,7 +847,8 @@ jobs: auto-release-preview: name: Auto release preview if: | - startsWith(github.ref, 'refs/tags/v') + false + && startsWith(github.ref, 'refs/tags/v') && endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre') needs: [bundle-mac, bundle-linux-x86_x64, bundle-linux-aarch64, bundle-windows-x64] runs-on: diff --git a/.github/workflows/community_release_actions.yml b/.github/workflows/community_release_actions.yml index 4a042a5e06b499b1ca278f152798c171971129ee..7724aa2096cfa31c0586c9a43678a805443b259a 100644 --- a/.github/workflows/community_release_actions.yml +++ b/.github/workflows/community_release_actions.yml @@ -38,6 +38,26 @@ jobs: webhook-url: ${{ secrets.DISCORD_WEBHOOK_RELEASE_NOTES }} content: ${{ steps.get-content.outputs.string }} + publish-winget: + runs-on: + - ubuntu-latest + steps: + - name: Set Package Name + id: set-package-name + run: | + if [ "${{ github.event.release.prerelease }}" == "true" ]; then + PACKAGE_NAME=ZedIndustries.Zed.Preview + else + PACKAGE_NAME=ZedIndustries.Zed + fi + + echo "PACKAGE_NAME=$PACKAGE_NAME" >> "$GITHUB_OUTPUT" + - uses: vedantmgoyal9/winget-releaser@19e706d4c9121098010096f9c495a70a7518b30f # v2 + with: + identifier: ${{ steps.set-package-name.outputs.PACKAGE_NAME }} + max-versions-to-keep: 5 + token: ${{ secrets.WINGET_TOKEN }} + send_release_notes_email: if: false && github.repository_owner == 'zed-industries' && !github.event.release.prerelease runs-on: ubuntu-latest diff --git a/.github/workflows/deploy_cloudflare.yml b/.github/workflows/deploy_cloudflare.yml index df35d44ca9ceb00a0503e941110c472c0b418fa2..2650cce1406b16e691565077b95d07730845664b 100644 --- a/.github/workflows/deploy_cloudflare.yml +++ b/.github/workflows/deploy_cloudflare.yml @@ -22,6 +22,8 @@ jobs: - name: Build docs uses: ./.github/actions/build_docs + env: + DOCS_AMPLITUDE_API_KEY: ${{ secrets.DOCS_AMPLITUDE_API_KEY }} - name: Deploy Docs uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3 diff --git a/.gitignore b/.gitignore index d248b1f7e5adf30cb286a1737c1cd4f72f0f5d20..2a91a65b6eaef906681bf3f6e315de07b094c4b1 100644 --- a/.gitignore +++ b/.gitignore @@ -25,6 +25,7 @@ /crates/collab/seed.json /crates/theme/schemas/theme.json /crates/zed/resources/flatpak/flatpak-cargo-sources.json +/crates/project_panel/benches/linux_repo_snapshot.txt /dev.zed.Zed*.json /node_modules/ /plugins/bin diff --git a/.zed/settings.json b/.zed/settings.json index 68e05a426f2474cb663aa5ff843905f375170e0f..2760be95819e9340acf55f60616a9c22105ff52a 100644 --- a/.zed/settings.json +++ b/.zed/settings.json @@ -48,7 +48,7 @@ "remove_trailing_whitespace_on_save": true, "ensure_final_newline_on_save": true, "file_scan_exclusions": [ - "crates/assistant_tools/src/edit_agent/evals/fixtures", + "crates/agent/src/edit_agent/evals/fixtures", "crates/eval/worktrees/", "crates/eval/repos/", "**/.git", diff --git a/Cargo.lock b/Cargo.lock index 23799cdb7421414af6cb76e2d4e9f12ecc52818c..8d1b799c501cc0978ca055e313aa575c462f1fc5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11,6 +11,7 @@ dependencies = [ "agent_settings", "anyhow", "buffer_diff", + "collections", "editor", "env_logger 0.11.8", "file_icons", @@ -25,7 +26,7 @@ dependencies = [ "portable-pty", "project", "prompt_store", - "rand 0.9.1", + "rand 0.9.2", "serde", "serde_json", "settings", @@ -35,11 +36,9 @@ dependencies = [ "terminal", "ui", "url", + "util", "uuid", "watch", - "workspace-hack", - "zed-collections", - "zed-util", ] [[package]] @@ -47,6 +46,7 @@ name = "acp_tools" version = "0.1.0" dependencies = [ "agent-client-protocol", + "collections", "gpui", "language", "markdown", @@ -56,10 +56,8 @@ dependencies = [ "settings", "theme", "ui", + "util", "workspace", - "workspace-hack", - "zed-collections", - "zed-util", ] [[package]] @@ -69,6 +67,7 @@ dependencies = [ "anyhow", "buffer_diff", "clock", + "collections", "ctor", "futures 0.3.31", "gpui", @@ -77,14 +76,12 @@ dependencies = [ "log", "pretty_assertions", "project", - "rand 0.9.1", + "rand 0.9.2", "serde_json", "settings", "text", + "util", "watch", - "workspace-hack", - "zed-collections", - "zed-util", "zlog", ] @@ -104,25 +101,24 @@ dependencies = [ "release_channel", "smallvec", "ui", + "util", "workspace", - "workspace-hack", - "zed-util", ] [[package]] name = "addr2line" -version = "0.24.2" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +checksum = "1b5d307320b3181d6d7954e663bd7c774a838b8220fe0593c86d9fb09f498b4b" dependencies = [ - "gimli", + "gimli 0.32.3", ] [[package]] name = "adler2" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" [[package]] name = "aes" @@ -139,98 +135,23 @@ dependencies = [ [[package]] name = "agent" version = "0.1.0" -dependencies = [ - "action_log", - "agent_settings", - "anyhow", - "assistant_context", - "assistant_tool", - "assistant_tools", - "chrono", - "client", - "cloud_llm_client", - "component", - "context_server", - "convert_case 0.8.0", - "fs", - "futures 0.3.31", - "git", - "gpui", - "heed", - "icons", - "indoc", - "itertools 0.14.0", - "language", - "language_model", - "log", - "parking_lot", - "paths", - "postage", - "pretty_assertions", - "project", - "prompt_store", - "rand 0.9.1", - "ref-cast", - "rope", - "schemars 1.0.1", - "serde", - "serde_json", - "settings", - "smol", - "sqlez", - "telemetry", - "text", - "theme", - "thiserror 2.0.12", - "time", - "uuid", - "workspace", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-util", - "zed_env_vars", - "zstd", -] - -[[package]] -name = "agent-client-protocol" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3aaa2bd05a2401887945f8bfd70026e90bc3cf96c62ab9eba2779835bf21dc60" -dependencies = [ - "anyhow", - "async-broadcast", - "async-trait", - "futures 0.3.31", - "log", - "parking_lot", - "schemars 1.0.1", - "serde", - "serde_json", -] - -[[package]] -name = "agent2" -version = "0.1.0" dependencies = [ "acp_thread", "action_log", - "agent", "agent-client-protocol", "agent_servers", "agent_settings", "anyhow", - "assistant_context", - "assistant_tool", - "assistant_tools", + "assistant_text_thread", "chrono", "client", "clock", "cloud_llm_client", + "collections", "context_server", "ctor", "db", + "derive_more 0.99.20", "editor", "env_logger 0.11.8", "fs", @@ -240,6 +161,7 @@ dependencies = [ "gpui_tokio", "handlebars 4.5.0", "html_to_markdown", + "http_client", "indoc", "itertools 0.14.0", "language", @@ -253,35 +175,68 @@ dependencies = [ "pretty_assertions", "project", "prompt_store", + "rand 0.9.2", + "regex", "reqwest_client", "rust-embed", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "settings", + "smallvec", "smol", "sqlez", + "streaming_diff", + "strsim", "task", "telemetry", "tempfile", "terminal", "text", "theme", - "thiserror 2.0.12", + "thiserror 2.0.17", "tree-sitter-rust", "ui", "unindent", + "util", "uuid", "watch", "web_search", - "workspace-hack", "worktree", - "zed-collections", - "zed-http-client", - "zed-util", "zed_env_vars", "zlog", - "zstd", + "zstd 0.11.2+zstd.1.5.2", +] + +[[package]] +name = "agent-client-protocol" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f655394a107cd601bd2e5375c2d909ea83adc65678a0e0e8d77613d3c848a7d" +dependencies = [ + "agent-client-protocol-schema", + "anyhow", + "async-broadcast", + "async-trait", + "derive_more 2.0.1", + "futures 0.3.31", + "log", + "parking_lot", + "serde", + "serde_json", +] + +[[package]] +name = "agent-client-protocol-schema" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61be4454304d7df1a5b44c4ae55e707ffe72eac4dfb1ef8762510ce8d8f6d924" +dependencies = [ + "anyhow", + "derive_more 2.0.1", + "schemars 1.0.4", + "serde", + "serde_json", ] [[package]] @@ -296,11 +251,13 @@ dependencies = [ "anyhow", "async-trait", "client", + "collections", "env_logger 0.11.8", "fs", "futures 0.3.31", "gpui", "gpui_tokio", + "http_client", "indoc", "language", "language_model", @@ -317,14 +274,11 @@ dependencies = [ "task", "tempfile", "terminal", - "thiserror 2.0.12", + "thiserror 2.0.17", "ui", + "util", "uuid", "watch", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-util", ] [[package]] @@ -333,20 +287,19 @@ version = "0.1.0" dependencies = [ "anyhow", "cloud_llm_client", + "collections", "convert_case 0.8.0", "fs", "gpui", "language_model", "paths", "project", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "serde_json_lenient", "settings", - "workspace-hack", - "zed-collections", - "zed-util", + "util", ] [[package]] @@ -357,22 +310,20 @@ dependencies = [ "action_log", "agent", "agent-client-protocol", - "agent2", "agent_servers", "agent_settings", "ai_onboarding", "anyhow", "arrayvec", - "assistant_context", "assistant_slash_command", "assistant_slash_commands", - "assistant_tool", - "assistant_tools", + "assistant_text_thread", "audio", "buffer_diff", "chrono", "client", "cloud_llm_client", + "collections", "command_palette_hooks", "component", "context_server", @@ -387,6 +338,7 @@ dependencies = [ "fuzzy", "gpui", "html_to_markdown", + "http_client", "indoc", "itertools 0.14.0", "jsonschema", @@ -409,11 +361,12 @@ dependencies = [ "project", "prompt_store", "proto", - "rand 0.9.1", + "rand 0.9.2", + "ref-cast", "release_channel", "rope", "rules_library", - "schemars 1.0.1", + "schemars 1.0.4", "search", "serde", "serde_json", @@ -436,12 +389,9 @@ dependencies = [ "unindent", "url", "urlencoding", + "util", "watch", "workspace", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-util", "zed_actions", ] @@ -451,24 +401,24 @@ version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", "once_cell", "version_check", ] [[package]] name = "ahash" -version = "0.8.11" +version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" dependencies = [ "cfg-if", "const-random", - "getrandom 0.2.15", + "getrandom 0.3.4", "once_cell", "serde", "version_check", - "zerocopy 0.7.35", + "zerocopy", ] [[package]] @@ -493,7 +443,6 @@ dependencies = [ "smallvec", "telemetry", "ui", - "workspace-hack", "zed_actions", ] @@ -504,7 +453,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3cb5f4f1ef69bdb8b2095ddd14b09dd74ee0303aae8bd5372667a54cff689a1b" dependencies = [ "base64 0.22.1", - "bitflags 2.9.0", + "bitflags 2.9.4", "home", "libc", "log", @@ -513,7 +462,7 @@ dependencies = [ "piper", "polling", "regex-automata", - "rustix 1.0.7", + "rustix 1.1.2", "rustix-openpty", "serde", "signal-hook", @@ -530,9 +479,27 @@ checksum = "250f629c0161ad8107cf89319e990051fae62832fd343083bea452d93e2205fd" [[package]] name = "aligned-vec" -version = "0.5.0" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4aa90d7ce82d4be67b64039a3d588d38dbcc6736577de4a847025ce5b0c468d1" +checksum = "dc890384c8602f339876ded803c97ad529f3842aba97f6392b3dba0dd171769b" +dependencies = [ + "equator", +] + +[[package]] +name = "alloc-no-stdlib" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" + +[[package]] +name = "alloc-stdlib" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" +dependencies = [ + "alloc-no-stdlib", +] [[package]] name = "allocator-api2" @@ -547,7 +514,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed7572b7ba83a31e20d1b48970ee402d2e3e0537dcfe0a3ff4d6eb7508617d43" dependencies = [ "alsa-sys", - "bitflags 2.9.0", + "bitflags 2.9.4", "cfg-if", "libc", ] @@ -570,23 +537,17 @@ checksum = "e9d4ee0d472d1cd2e28c97dfa124b3d8d992e10eb0a035f33f5d12e3a177ba3b" [[package]] name = "ammonia" -version = "4.1.0" +version = "4.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ada2ee439075a3e70b6992fce18ac4e407cd05aea9ca3f75d2c0b0c20bbb364" +checksum = "17e913097e1a2124b46746c980134e8c954bc17a6a59bb3fde96f088d126dde6" dependencies = [ "cssparser", - "html5ever 0.31.0", + "html5ever 0.35.0", "maplit", "tendril", "url", ] -[[package]] -name = "android-tzdata" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" - [[package]] name = "android_system_properties" version = "0.1.5" @@ -604,9 +565,9 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" [[package]] name = "anstream" -version = "0.6.18" +version = "0.6.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" +checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" dependencies = [ "anstyle", "anstyle-parse", @@ -619,37 +580,37 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.10" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" [[package]] name = "anstyle-parse" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" +checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.2" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" +checksum = "9e231f6134f61b71076a3eab506c379d4f36122f2af15a9ff04415ea4c3339e2" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] name = "anstyle-wincon" -version = "3.0.7" +version = "3.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" +checksum = "3e0633414522a32ffaac8ac6cc8f748e090c5717661fddeea04219e2344f5f2a" dependencies = [ "anstyle", - "once_cell", - "windows-sys 0.59.0", + "once_cell_polyfill", + "windows-sys 0.60.2", ] [[package]] @@ -659,14 +620,13 @@ dependencies = [ "anyhow", "chrono", "futures 0.3.31", - "schemars 1.0.1", + "http_client", + "schemars 1.0.4", "serde", "serde_json", "settings", - "strum 0.27.1", - "thiserror 2.0.12", - "workspace-hack", - "zed-http-client", + "strum 0.27.2", + "thiserror 2.0.17", ] [[package]] @@ -677,9 +637,9 @@ checksum = "34cd60c5e3152cef0a592f1b296f1cc93715d89d2551d85315828c3a09575ff4" [[package]] name = "anyhow" -version = "1.0.98" +version = "1.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" [[package]] name = "approx" @@ -692,9 +652,9 @@ dependencies = [ [[package]] name = "arbitrary" -version = "1.4.1" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" +checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1" dependencies = [ "derive_arbitrary", ] @@ -707,9 +667,24 @@ checksum = "0ae92a5119aa49cdbcf6b9f893fe4e1d98b04ccbf82ee0584ad948a44a734dea" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", +] + +[[package]] +name = "argminmax" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70f13d10a41ac8d2ec79ee34178d61e6f47a29c2edfe7ef1721c7383b0359e65" +dependencies = [ + "num-traits", ] +[[package]] +name = "array-init-cursor" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed51fe0f224d1d4ea768be38c51f9f831dee9d05c163c11fba0b8c44387b1fc3" + [[package]] name = "arraydeque" version = "0.5.1" @@ -774,13 +749,13 @@ dependencies = [ "enumflags2", "futures-channel", "futures-util", - "rand 0.9.1", + "rand 0.9.2", "serde", "serde_repr", "url", "wayland-backend", "wayland-client", - "wayland-protocols 0.32.6", + "wayland-protocols 0.32.9", "zbus", ] @@ -795,7 +770,7 @@ dependencies = [ "enumflags2", "futures-channel", "futures-util", - "rand 0.9.1", + "rand 0.9.2", "serde", "serde_repr", "url", @@ -813,9 +788,8 @@ dependencies = [ "net", "smol", "tempfile", - "windows 0.61.1", - "workspace-hack", - "zed-util", + "util", + "windows 0.61.3", "zeroize", ] @@ -826,55 +800,6 @@ dependencies = [ "anyhow", "gpui", "rust-embed", - "workspace-hack", -] - -[[package]] -name = "assistant_context" -version = "0.1.0" -dependencies = [ - "agent_settings", - "anyhow", - "assistant_slash_command", - "assistant_slash_commands", - "chrono", - "client", - "clock", - "cloud_llm_client", - "context_server", - "fs", - "futures 0.3.31", - "fuzzy", - "gpui", - "indoc", - "language", - "language_model", - "log", - "open_ai", - "parking_lot", - "paths", - "pretty_assertions", - "project", - "prompt_store", - "proto", - "rand 0.9.1", - "regex", - "rpc", - "serde", - "serde_json", - "settings", - "smallvec", - "smol", - "telemetry_events", - "text", - "ui", - "unindent", - "uuid", - "workspace", - "workspace-hack", - "zed-collections", - "zed-util", - "zed_env_vars", ] [[package]] @@ -883,7 +808,8 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", - "derive_more", + "collections", + "derive_more 0.99.20", "extension", "futures 0.3.31", "gpui", @@ -894,10 +820,8 @@ dependencies = [ "serde", "serde_json", "ui", + "util", "workspace", - "workspace-hack", - "zed-collections", - "zed-util", ] [[package]] @@ -907,6 +831,7 @@ dependencies = [ "anyhow", "assistant_slash_command", "chrono", + "collections", "context_server", "editor", "feature_flags", @@ -916,6 +841,7 @@ dependencies = [ "globset", "gpui", "html_to_markdown", + "http_client", "language", "pretty_assertions", "project", @@ -927,113 +853,57 @@ dependencies = [ "smol", "text", "ui", + "util", "workspace", - "workspace-hack", "worktree", - "zed-collections", - "zed-http-client", - "zed-util", - "zlog", -] - -[[package]] -name = "assistant_tool" -version = "0.1.0" -dependencies = [ - "action_log", - "anyhow", - "buffer_diff", - "clock", - "ctor", - "derive_more", - "gpui", - "icons", - "indoc", - "language", - "language_model", - "log", - "parking_lot", - "pretty_assertions", - "project", - "rand 0.9.1", - "regex", - "serde", - "serde_json", - "settings", - "text", - "workspace", - "workspace-hack", - "zed-collections", - "zed-util", "zlog", ] [[package]] -name = "assistant_tools" +name = "assistant_text_thread" version = "0.1.0" dependencies = [ - "action_log", "agent_settings", "anyhow", - "assistant_tool", - "buffer_diff", + "assistant_slash_command", + "assistant_slash_commands", "chrono", "client", "clock", "cloud_llm_client", - "component", - "derive_more", - "diffy", - "editor", - "feature_flags", + "collections", + "context_server", "fs", "futures 0.3.31", + "fuzzy", "gpui", - "gpui_tokio", - "handlebars 4.5.0", - "html_to_markdown", "indoc", - "itertools 0.14.0", "language", "language_model", - "language_models", "log", - "lsp", - "markdown", - "open", + "open_ai", + "parking_lot", "paths", - "portable-pty", "pretty_assertions", "project", "prompt_store", - "rand 0.9.1", + "proto", + "rand 0.9.2", "regex", - "reqwest_client", - "rust-embed", - "schemars 1.0.1", + "rpc", "serde", "serde_json", "settings", "smallvec", "smol", - "streaming_diff", - "strsim", - "task", - "tempfile", - "terminal", - "terminal_view", - "theme", - "tree-sitter-rust", + "telemetry_events", + "text", "ui", "unindent", - "watch", - "web_search", + "util", + "uuid", "workspace", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-util", - "zlog", + "zed_env_vars", ] [[package]] @@ -1052,7 +922,7 @@ version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "435a87a52755b8f27fcf321ac4f04b2802e337c8c4872923137471ec39c37532" dependencies = [ - "event-listener 5.4.0", + "event-listener 5.4.1", "event-listener-strategy", "futures-core", "pin-project-lite", @@ -1071,9 +941,9 @@ dependencies = [ [[package]] name = "async-channel" -version = "2.3.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" +checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2" dependencies = [ "concurrent-queue", "event-listener-strategy", @@ -1083,9 +953,9 @@ dependencies = [ [[package]] name = "async-compat" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bab94bde396a3f7b4962e396fdad640e241ed797d4d8d77fc8c237d14c58fc0" +checksum = "a1ba85bc55464dcbf728b56d97e119d673f4cf9062be330a9a26f3acf504a590" dependencies = [ "futures-core", "futures-io", @@ -1096,15 +966,14 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.22" +version = "0.4.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59a194f9d963d8099596278594b3107448656ba73831c9d8c783e613ce86da64" +checksum = "5a89bce6054c720275ac2432fbba080a66a2106a44a1b804553930ca6909f4e0" dependencies = [ - "deflate64", - "flate2", + "compression-codecs", + "compression-core", "futures-core", "futures-io", - "memchr", "pin-project-lite", ] @@ -1120,26 +989,27 @@ dependencies = [ [[package]] name = "async-executor" -version = "1.13.1" +version = "1.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30ca9a001c1e8ba5149f91a74362376cc6bc5b919d92d988668657bd570bdcec" +checksum = "497c00e0fd83a72a79a39fcbd8e3e2f055d6f6c7e025f3b3d91f4f8e76527fb8" dependencies = [ "async-task", "concurrent-queue", "fastrand 2.3.0", - "futures-lite 2.6.0", + "futures-lite 2.6.1", + "pin-project-lite", "slab", ] [[package]] name = "async-fs" -version = "2.1.3" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09f7e37c0ed80b2a977691c47dae8625cfb21e205827106c64f7c588766b2e50" +checksum = "8034a681df4aed8b8edbd7fbe472401ecf009251c8b40556b304567052e294c5" dependencies = [ - "async-lock", + "async-lock 3.4.1", "blocking", - "futures-lite 2.6.0", + "futures-lite 2.6.1", ] [[package]] @@ -1148,31 +1018,40 @@ version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" dependencies = [ - "async-channel 2.3.1", + "async-channel 2.5.0", "async-executor", "async-io", - "async-lock", + "async-lock 3.4.1", "blocking", - "futures-lite 2.6.0", + "futures-lite 2.6.1", "once_cell", ] [[package]] name = "async-io" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19634d6336019ef220f09fd31168ce5c184b295cbf80345437cc36094ef223ca" +checksum = "456b8a8feb6f42d237746d4b3e9a178494627745c3c56c6ea55d92ba50d026fc" dependencies = [ - "async-lock", + "autocfg", "cfg-if", "concurrent-queue", "futures-io", - "futures-lite 2.6.0", + "futures-lite 2.6.1", "parking", "polling", - "rustix 1.0.7", + "rustix 1.1.2", "slab", - "windows-sys 0.60.2", + "windows-sys 0.61.2", +] + +[[package]] +name = "async-lock" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" +dependencies = [ + "event-listener 2.5.3", ] [[package]] @@ -1181,7 +1060,7 @@ version = "3.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5fd03604047cee9b6ce9de9f70c6cd540a0520c813cbd49bae61f33ab80ed1dc" dependencies = [ - "event-listener 5.4.0", + "event-listener 5.4.1", "event-listener-strategy", "pin-project-lite", ] @@ -1194,7 +1073,7 @@ checksum = "b948000fad4873c1c9339d60f2623323a0cfd3816e5181033c6a5cb68b2accf7" dependencies = [ "async-io", "blocking", - "futures-lite 2.6.0", + "futures-lite 2.6.1", ] [[package]] @@ -1208,21 +1087,20 @@ dependencies = [ [[package]] name = "async-process" -version = "2.3.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63255f1dc2381611000436537bbedfe83183faa303a5a0edaf191edef06526bb" +checksum = "fc50921ec0055cdd8a16de48773bfeec5c972598674347252c0399676be7da75" dependencies = [ - "async-channel 2.3.1", + "async-channel 2.5.0", "async-io", - "async-lock", + "async-lock 3.4.1", "async-signal", "async-task", "blocking", "cfg-if", - "event-listener 5.4.0", - "futures-lite 2.6.0", - "rustix 0.38.44", - "tracing", + "event-listener 5.4.1", + "futures-lite 2.6.1", + "rustix 1.1.2", ] [[package]] @@ -1233,44 +1111,44 @@ checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "async-signal" -version = "0.2.10" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "637e00349800c0bdf8bfc21ebbc0b6524abea702b0da4168ac00d070d0c0b9f3" +checksum = "43c070bbf59cd3570b6b2dd54cd772527c7c3620fce8be898406dd3ed6adc64c" dependencies = [ "async-io", - "async-lock", + "async-lock 3.4.1", "atomic-waker", "cfg-if", "futures-core", "futures-io", - "rustix 0.38.44", + "rustix 1.1.2", "signal-hook-registry", "slab", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] name = "async-std" -version = "1.13.1" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "730294c1c08c2e0f85759590518f6333f0d5a0a766a27d519c1b244c3dfd8a24" +checksum = "2c8e079a4ab67ae52b7403632e4618815d6db36d2a010cfe41b02c1b1578f93b" dependencies = [ "async-attributes", "async-channel 1.9.0", "async-global-executor", "async-io", - "async-lock", + "async-lock 3.4.1", "async-process", "crossbeam-utils", "futures-channel", "futures-core", "futures-io", - "futures-lite 2.6.0", + "futures-lite 2.6.1", "gloo-timers", "kv-log-macro", "log", @@ -1301,14 +1179,14 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "async-tar" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a42f905d4f623faf634bbd1e001e84e0efc24694afa64be9ad239bf6ca49e1f8" +checksum = "d1937db2d56578aa3919b9bdb0e5100693fd7d1c0f145c53eb81fbb03e217550" dependencies = [ "async-std", "filetime", @@ -1332,14 +1210,14 @@ checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "async-tungstenite" -version = "0.29.1" +version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef0f7efedeac57d9b26170f72965ecfd31473ca52ca7a64e925b0b6f5f079886" +checksum = "ee88b4c88ac8c9ea446ad43498955750a4bbe64c4392f21ccfe5d952865e318f" dependencies = [ "atomic-waker", "futures-core", @@ -1351,7 +1229,7 @@ dependencies = [ "rustls-pki-types", "tokio", "tokio-rustls 0.26.2", - "tungstenite 0.26.2", + "tungstenite 0.27.0", ] [[package]] @@ -1362,7 +1240,7 @@ checksum = "00b9f7252833d5ed4b00aa9604b563529dd5e11de9c23615de2dcdf91eb87b52" dependencies = [ "async-compression", "crc32fast", - "futures-lite 2.6.0", + "futures-lite 2.6.1", "pin-project", "thiserror 1.0.69", ] @@ -1389,6 +1267,15 @@ dependencies = [ "num-traits", ] +[[package]] +name = "atoi_simd" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2a49e05797ca52e312a0c658938b7d00693ef037799ef7187678f212d7684cf" +dependencies = [ + "debug_unsafe", +] + [[package]] name = "atomic" version = "0.5.3" @@ -1407,6 +1294,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-tar", + "collections", "crossbeam", "denoise", "gpui", @@ -1417,10 +1305,8 @@ dependencies = [ "serde", "settings", "smol", - "thiserror 2.0.12", - "workspace-hack", - "zed-collections", - "zed-util", + "thiserror 2.0.17", + "util", ] [[package]] @@ -1443,6 +1329,7 @@ dependencies = [ "client", "db", "gpui", + "http_client", "log", "paths", "release_channel", @@ -1453,8 +1340,6 @@ dependencies = [ "tempfile", "which 6.0.3", "workspace", - "workspace-hack", - "zed-http-client", ] [[package]] @@ -1464,9 +1349,8 @@ dependencies = [ "anyhow", "log", "simplelog", - "windows 0.61.1", + "windows 0.61.3", "winresource", - "workspace-hack", ] [[package]] @@ -1478,28 +1362,27 @@ dependencies = [ "client", "editor", "gpui", + "http_client", "markdown_preview", "release_channel", "serde", "serde_json", "smol", + "util", "workspace", - "workspace-hack", - "zed-http-client", - "zed-util", ] [[package]] name = "autocfg" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] name = "av1-grain" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6678909d8c5d46a42abcf571271e15fdbc0a225e3646cf23762cd415046c78bf" +checksum = "4f3efb2ca85bc610acfa917b5aaa36f3fcbebed5b3182d7f877b02531c4b80c8" dependencies = [ "anyhow", "arrayvec", @@ -1511,18 +1394,18 @@ dependencies = [ [[package]] name = "avif-serialize" -version = "0.8.3" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98922d6a4cfbcb08820c69d8eeccc05bb1f29bfa06b4f5b1dbfe9a868bd7608e" +checksum = "47c8fbc0f831f4519fe8b810b6a7a91410ec83031b8233f730a0480029f6a23f" dependencies = [ "arrayvec", ] [[package]] name = "aws-config" -version = "1.6.1" +version = "1.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c39646d1a6b51240a1a23bb57ea4eebede7e16fbc237fdc876980233dcecb4f" +checksum = "37cf2b6af2a95a20e266782b4f76f1a5e12bf412a9db2de9c1e9123b9d8c0ad8" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1550,9 +1433,9 @@ dependencies = [ [[package]] name = "aws-credential-types" -version = "1.2.2" +version = "1.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4471bef4c22a06d2c7a1b6492493d3fdf24a805323109d6874f9c94d5906ac14" +checksum = "faf26925f4a5b59eb76722b63c2892b1d70d06fa053c72e4a100ec308c1d47bc" dependencies = [ "aws-smithy-async", "aws-smithy-runtime-api", @@ -1562,9 +1445,9 @@ dependencies = [ [[package]] name = "aws-lc-rs" -version = "1.13.1" +version = "1.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93fcc8f365936c834db5514fc45aee5b1202d677e6b40e48468aaaa8183ca8c7" +checksum = "879b6c89592deb404ba4dc0ae6b58ffd1795c78991cbb5b8bc441c48a070440d" dependencies = [ "aws-lc-sys", "zeroize", @@ -1572,11 +1455,11 @@ dependencies = [ [[package]] name = "aws-lc-sys" -version = "0.29.0" +version = "0.32.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61b1d86e7705efe1be1b569bab41d4fa1e14e220b60a160f78de2db687add079" +checksum = "107a4e9d9cab9963e04e84bb8dee0e25f2a987f9a8bad5ed054abd439caa8f8c" dependencies = [ - "bindgen 0.69.5", + "bindgen 0.72.1", "cc", "cmake", "dunce", @@ -1585,9 +1468,9 @@ dependencies = [ [[package]] name = "aws-runtime" -version = "1.5.6" +version = "1.5.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0aff45ffe35196e593ea3b9dd65b320e51e2dda95aff4390bc459e461d09c6ad" +checksum = "bfa006bb32360ed90ac51203feafb9d02e3d21046e1fd3a450a404b90ea73e5d" dependencies = [ "aws-credential-types", "aws-sigv4", @@ -1602,7 +1485,6 @@ dependencies = [ "fastrand 2.3.0", "http 0.2.12", "http-body 0.4.6", - "once_cell", "percent-encoding", "pin-project-lite", "tracing", @@ -1611,9 +1493,9 @@ dependencies = [ [[package]] name = "aws-sdk-bedrockruntime" -version = "1.82.0" +version = "1.109.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cb95f77abd4321348dd2f52a25e1de199732f54d2a35860ad20f5df21c66b44" +checksum = "fbfdfd941dcb253c17bf70baddbf1e5b22f19e29d313d2e049bad4b1dadb2011" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1630,16 +1512,15 @@ dependencies = [ "fastrand 2.3.0", "http 0.2.12", "hyper 0.14.32", - "once_cell", "regex-lite", "tracing", ] [[package]] name = "aws-sdk-kinesis" -version = "1.66.0" +version = "1.91.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e43e5fb05c78cdad4fef5be4503465e4b42292f472fc991823ea4c50078208e4" +checksum = "699a3d645a2ab5cb12ca02eb23979753953414429fd6584ea8841af6bc4e0516" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1654,16 +1535,15 @@ dependencies = [ "bytes 1.10.1", "fastrand 2.3.0", "http 0.2.12", - "once_cell", "regex-lite", "tracing", ] [[package]] name = "aws-sdk-s3" -version = "1.82.0" +version = "1.108.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6eab2900764411ab01c8e91a76fd11a63b4e12bc3da97d9e14a0ce1343d86d3" +checksum = "200be4aed61e3c0669f7268bacb768f283f1c32a7014ce57225e1160be2f6ccb" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1686,7 +1566,6 @@ dependencies = [ "http 1.3.1", "http-body 0.4.6", "lru", - "once_cell", "percent-encoding", "regex-lite", "sha2", @@ -1696,9 +1575,9 @@ dependencies = [ [[package]] name = "aws-sdk-sso" -version = "1.64.0" +version = "1.86.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02d4bdb0e5f80f0689e61c77ab678b2b9304af329616af38aef5b6b967b8e736" +checksum = "4a0abbfab841446cce6e87af853a3ba2cc1bc9afcd3f3550dd556c43d434c86d" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1712,16 +1591,15 @@ dependencies = [ "bytes 1.10.1", "fastrand 2.3.0", "http 0.2.12", - "once_cell", "regex-lite", "tracing", ] [[package]] name = "aws-sdk-ssooidc" -version = "1.65.0" +version = "1.88.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acbbb3ce8da257aedbccdcb1aadafbbb6a5fe9adf445db0e1ea897bdc7e22d08" +checksum = "9a68d675582afea0e94d38b6ca9c5aaae4ca14f1d36faa6edb19b42e687e70d7" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1735,16 +1613,15 @@ dependencies = [ "bytes 1.10.1", "fastrand 2.3.0", "http 0.2.12", - "once_cell", "regex-lite", "tracing", ] [[package]] name = "aws-sdk-sts" -version = "1.65.0" +version = "1.88.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96a78a8f50a1630db757b60f679c8226a8a70ee2ab5f5e6e51dc67f6c61c7cfd" +checksum = "d30990923f4f675523c51eb1c0dec9b752fb267b36a61e83cbc219c9d86da715" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1759,16 +1636,15 @@ dependencies = [ "aws-types", "fastrand 2.3.0", "http 0.2.12", - "once_cell", "regex-lite", "tracing", ] [[package]] name = "aws-sigv4" -version = "1.3.0" +version = "1.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69d03c3c05ff80d54ff860fe38c726f6f494c639ae975203a101335f223386db" +checksum = "bffc03068fbb9c8dd5ce1c6fb240678a5cffb86fb2b7b1985c999c4b83c8df68" dependencies = [ "aws-credential-types", "aws-smithy-eventstream", @@ -1782,7 +1658,6 @@ dependencies = [ "hmac", "http 0.2.12", "http 1.3.1", - "once_cell", "p256", "percent-encoding", "ring", @@ -1795,9 +1670,9 @@ dependencies = [ [[package]] name = "aws-smithy-async" -version = "1.2.5" +version = "1.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e190749ea56f8c42bf15dd76c65e14f8f765233e6df9b0506d9d934ebef867c" +checksum = "127fcfad33b7dfc531141fda7e1c402ac65f88aca5511a4d31e2e3d2cd01ce9c" dependencies = [ "futures-util", "pin-project-lite", @@ -1806,16 +1681,14 @@ dependencies = [ [[package]] name = "aws-smithy-checksums" -version = "0.63.1" +version = "0.63.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b65d21e1ba6f2cdec92044f904356a19f5ad86961acf015741106cdfafd747c0" +checksum = "165d8583d8d906e2fb5511d29201d447cc710864f075debcdd9c31c265412806" dependencies = [ "aws-smithy-http", "aws-smithy-types", "bytes 1.10.1", - "crc32c", - "crc32fast", - "crc64fast-nvme", + "crc-fast", "hex", "http 0.2.12", "http-body 0.4.6", @@ -1828,9 +1701,9 @@ dependencies = [ [[package]] name = "aws-smithy-eventstream" -version = "0.60.8" +version = "0.60.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c45d3dddac16c5c59d553ece225a88870cf81b7b813c9cc17b78cf4685eac7a" +checksum = "9656b85088f8d9dc7ad40f9a6c7228e1e8447cdf4b046c87e152e0805dea02fa" dependencies = [ "aws-smithy-types", "bytes 1.10.1", @@ -1839,9 +1712,9 @@ dependencies = [ [[package]] name = "aws-smithy-http" -version = "0.62.0" +version = "0.62.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5949124d11e538ca21142d1fba61ab0a2a2c1bc3ed323cdb3e4b878bfb83166" +checksum = "3feafd437c763db26aa04e0cc7591185d0961e64c61885bece0fb9d50ceac671" dependencies = [ "aws-smithy-eventstream", "aws-smithy-runtime-api", @@ -1852,7 +1725,6 @@ dependencies = [ "http 0.2.12", "http 1.3.1", "http-body 0.4.6", - "once_cell", "percent-encoding", "pin-project-lite", "pin-utils", @@ -1861,56 +1733,57 @@ dependencies = [ [[package]] name = "aws-smithy-http-client" -version = "1.0.1" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8aff1159006441d02e57204bf57a1b890ba68bedb6904ffd2873c1c4c11c546b" +checksum = "1053b5e587e6fa40ce5a79ea27957b04ba660baa02b28b7436f64850152234f1" dependencies = [ "aws-smithy-async", "aws-smithy-runtime-api", "aws-smithy-types", - "h2 0.4.9", + "h2 0.3.27", + "h2 0.4.12", "http 0.2.12", "http 1.3.1", "http-body 0.4.6", "hyper 0.14.32", - "hyper 1.6.0", + "hyper 1.7.0", "hyper-rustls 0.24.2", - "hyper-rustls 0.27.5", + "hyper-rustls 0.27.7", "hyper-util", "pin-project-lite", "rustls 0.21.12", - "rustls 0.23.26", - "rustls-native-certs 0.8.1", + "rustls 0.23.33", + "rustls-native-certs 0.8.2", "rustls-pki-types", "tokio", + "tokio-rustls 0.26.2", "tower 0.5.2", "tracing", ] [[package]] name = "aws-smithy-json" -version = "0.61.3" +version = "0.61.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92144e45819cae7dc62af23eac5a038a58aa544432d2102609654376a900bd07" +checksum = "cff418fc8ec5cadf8173b10125f05c2e7e1d46771406187b2c878557d4503390" dependencies = [ "aws-smithy-types", ] [[package]] name = "aws-smithy-observability" -version = "0.1.2" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445d065e76bc1ef54963db400319f1dd3ebb3e0a74af20f7f7630625b0cc7cc0" +checksum = "2d1881b1ea6d313f9890710d65c158bdab6fb08c91ea825f74c1c8c357baf4cc" dependencies = [ "aws-smithy-runtime-api", - "once_cell", ] [[package]] name = "aws-smithy-query" -version = "0.60.7" +version = "0.60.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2fbd61ceb3fe8a1cb7352e42689cec5335833cd9f94103a61e98f9bb61c64bb" +checksum = "d28a63441360c477465f80c7abac3b9c4d075ca638f982e605b7dc2a2c7156c9" dependencies = [ "aws-smithy-types", "urlencoding", @@ -1918,9 +1791,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime" -version = "1.8.1" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0152749e17ce4d1b47c7747bdfec09dac1ccafdcbc741ebf9daa2a373356730f" +checksum = "40ab99739082da5347660c556689256438defae3bcefd66c52b095905730e404" dependencies = [ "aws-smithy-async", "aws-smithy-http", @@ -1934,7 +1807,6 @@ dependencies = [ "http 1.3.1", "http-body 0.4.6", "http-body 1.0.1", - "once_cell", "pin-project-lite", "pin-utils", "tokio", @@ -1943,9 +1815,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime-api" -version = "1.7.4" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3da37cf5d57011cb1753456518ec76e31691f1f474b73934a284eb2a1c76510f" +checksum = "3683c5b152d2ad753607179ed71988e8cfd52964443b4f74fd8e552d0bbfeb46" dependencies = [ "aws-smithy-async", "aws-smithy-types", @@ -1960,9 +1832,9 @@ dependencies = [ [[package]] name = "aws-smithy-types" -version = "1.3.0" +version = "1.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "836155caafba616c0ff9b07944324785de2ab016141c3550bd1c07882f8cee8f" +checksum = "9f5b3a7486f6690ba25952cabf1e7d75e34d69eaff5081904a47bc79074d6457" dependencies = [ "base64-simd", "bytes 1.10.1", @@ -1986,18 +1858,18 @@ dependencies = [ [[package]] name = "aws-smithy-xml" -version = "0.60.9" +version = "0.60.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab0b0166827aa700d3dc519f72f8b3a91c35d0b8d042dc5d643a91e6f80648fc" +checksum = "e9c34127e8c624bc2999f3b657e749c1393bedc9cd97b92a804db8ced4d2e163" dependencies = [ "xmlparser", ] [[package]] name = "aws-types" -version = "1.3.6" +version = "1.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3873f8deed8927ce8d04487630dc9ff73193bab64742a61d050e57a68dec4125" +checksum = "e2fd329bf0e901ff3f60425691410c69094dc2a1f34b331f37bfc4e9ac1565a1" dependencies = [ "aws-credential-types", "aws-smithy-async", @@ -2013,8 +1885,7 @@ version = "0.1.0" dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", - "workspace-hack", - "zed-http-client", + "http_client", ] [[package]] @@ -2093,17 +1964,17 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.74" +version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" +checksum = "bb531853791a215d7c62a30daf0dde835f381ab5de4589cfe7c649d2cbe92bd6" dependencies = [ "addr2line", "cfg-if", "libc", "miniz_oxide", - "object", + "object 0.37.3", "rustc-demangle", - "windows-targets 0.52.6", + "windows-link 0.2.1", ] [[package]] @@ -2136,9 +2007,9 @@ dependencies = [ [[package]] name = "base64ct" -version = "1.7.3" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89e25b6adfb930f02d1981565a6e5d9c547ac15a96606256d3b59040e5cd4ca3" +checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba" [[package]] name = "bedrock" @@ -2148,12 +2019,11 @@ dependencies = [ "aws-sdk-bedrockruntime", "aws-smithy-types", "futures 0.3.31", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", - "strum 0.27.1", - "thiserror 2.0.12", - "workspace-hack", + "strum 0.27.2", + "thiserror 2.0.17", ] [[package]] @@ -2180,56 +2050,55 @@ dependencies = [ ] [[package]] -name = "bindgen" -version = "0.69.5" +name = "bincode" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088" +checksum = "36eaf5d7b090263e8150820482d5d93cd964a81e4019913c972f4edcc6edb740" dependencies = [ - "bitflags 2.9.0", - "cexpr", - "clang-sys", - "itertools 0.12.1", - "lazy_static", - "lazycell", - "log", - "prettyplease", - "proc-macro2", - "quote", - "regex", - "rustc-hash 1.1.0", - "shlex", - "syn 2.0.101", - "which 4.4.2", + "bincode_derive", + "serde", + "unty", +] + +[[package]] +name = "bincode_derive" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf95709a440f45e986983918d0e8a1f30a9b1df04918fc828670606804ac3c09" +dependencies = [ + "virtue", ] [[package]] name = "bindgen" -version = "0.70.1" +version = "0.71.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f49d8fed880d473ea71efb9bf597651e77201bdd4893efe54c9e5d65ae04ce6f" +checksum = "5f58bf3d7db68cfbac37cfc485a8d711e87e064c3d0fe0435b92f7a407f9d6b3" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "cexpr", "clang-sys", - "itertools 0.13.0", + "itertools 0.12.1", + "log", + "prettyplease", "proc-macro2", "quote", "regex", - "rustc-hash 1.1.0", + "rustc-hash 2.1.1", "shlex", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "bindgen" -version = "0.71.1" +version = "0.72.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f58bf3d7db68cfbac37cfc485a8d711e87e064c3d0fe0435b92f7a407f9d6b3" +checksum = "993776b509cfb49c750f11b8f07a46fa23e0a1386ffc01fb1e7d343efc387895" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "cexpr", "clang-sys", - "itertools 0.13.0", + "itertools 0.12.1", "log", "prettyplease", "proc-macro2", @@ -2237,7 +2106,7 @@ dependencies = [ "regex", "rustc-hash 2.1.1", "shlex", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -2272,9 +2141,9 @@ checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" [[package]] name = "bit_field" -version = "0.10.2" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc827186963e592360843fb5ba4b973e145841266c1357f7180c43526f2e5b61" +checksum = "1e4b40c7323adcfc0a41c4b88143ed58346ff65a288fc144329c5c45e05d70c6" [[package]] name = "bitflags" @@ -2284,9 +2153,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.9.0" +version = "2.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" +checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394" dependencies = [ "serde", ] @@ -2317,9 +2186,9 @@ checksum = "e4deb8f595ce7f00dee3543ebf6fd9a20ea86fc421ab79600dac30876250bdae" dependencies = [ "ash", "ash-window", - "bitflags 2.9.0", + "bitflags 2.9.4", "bytemuck", - "codespan-reporting", + "codespan-reporting 0.12.0", "glow", "gpu-alloc", "gpu-alloc-ash", @@ -2352,7 +2221,7 @@ checksum = "27142319e2f4c264581067eaccb9f80acccdde60d8b4bf57cc50cd3152f109ca" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -2367,6 +2236,19 @@ dependencies = [ "profiling", ] +[[package]] +name = "blake3" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3888aaa89e4b2a40fca9848e400f6a658a5a3978de7be858e209cafa8be9a4a0" +dependencies = [ + "arrayref", + "arrayvec", + "cc", + "cfg-if", + "constant_time_eq 0.3.1", +] + [[package]] name = "block" version = "0.1.6" @@ -2393,23 +2275,23 @@ dependencies = [ [[package]] name = "block2" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "340d2f0bdb2a43c1d3cd40513185b2bd7def0aa1052f956455114bc98f82dcf2" +checksum = "cdeb9d870516001442e364c5220d3574d2da8dc765554b4a617230d33fa58ef5" dependencies = [ "objc2", ] [[package]] name = "blocking" -version = "1.6.1" +version = "1.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea" +checksum = "e83f8d02be6967315521be875afa792a316e28d57b5a2d401897e2a7921b7f21" dependencies = [ - "async-channel 2.3.1", + "async-channel 2.5.0", "async-task", "futures-io", - "futures-lite 2.6.0", + "futures-lite 2.6.1", "piper", ] @@ -2453,9 +2335,15 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] +[[package]] +name = "boxcar" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36f64beae40a84da1b4b26ff2761a5b895c12adc41dc25aaee1c4f2bbfe97a6e" + [[package]] name = "breadcrumbs" version = "0.1.0" @@ -2467,10 +2355,30 @@ dependencies = [ "theme", "ui", "workspace", - "workspace-hack", "zed_actions", ] +[[package]] +name = "brotli" +version = "8.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor", +] + +[[package]] +name = "brotli-decompressor" +version = "5.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + [[package]] name = "bstr" version = "1.12.0" @@ -2495,14 +2403,13 @@ dependencies = [ "language", "log", "pretty_assertions", - "rand 0.9.1", + "rand 0.9.2", "rope", "serde_json", + "sum_tree", "text", "unindent", - "workspace-hack", - "zed-sum-tree", - "zed-util", + "util", "zlog", ] @@ -2514,9 +2421,9 @@ checksum = "56ed6191a7e78c36abdb16ab65341eefd73d64d303fffccdbb00d51e4205967b" [[package]] name = "bumpalo" -version = "3.17.0" +version = "3.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" dependencies = [ "allocator-api2", ] @@ -2551,28 +2458,28 @@ dependencies = [ [[package]] name = "bytecount" -version = "0.6.8" +version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce" +checksum = "175812e0be2bccb6abe50bb8d566126198344f707e304f45c648fd8f2cc0365e" [[package]] name = "bytemuck" -version = "1.22.0" +version = "1.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6b1fc10dbac614ebc03540c9dbd60e83887fda27794998c6528f1782047d540" +checksum = "1fbdf580320f38b612e485521afda1ee26d10cc9884efaaa750d383e13e3c5f4" dependencies = [ "bytemuck_derive", ] [[package]] name = "bytemuck_derive" -version = "1.9.3" +version = "1.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ecc273b49b3205b83d648f0690daa588925572cc5063745bfe547fe7ec8e1a1" +checksum = "f9abbd1bc6865053c427f7198e6af43bfdedc55ab791faed4fbd361d789575ff" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -2602,6 +2509,9 @@ name = "bytes" version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" +dependencies = [ + "serde", +] [[package]] name = "bytes-utils" @@ -2639,12 +2549,12 @@ version = "0.56.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "801927ee168e17809ab8901d9f01f700cd7d8d6a6527997fee44e4b0327a253c" dependencies = [ - "ahash 0.8.11", + "ahash 0.8.12", "cached_proc_macro", "cached_proc_macro_types", - "hashbrown 0.15.3", + "hashbrown 0.15.5", "once_cell", - "thiserror 2.0.12", + "thiserror 2.0.17", "web-time", ] @@ -2654,10 +2564,10 @@ version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9225bdcf4e4a9a4c08bf16607908eb2fbf746828d5e0b5e019726dbf6571f201" dependencies = [ - "darling", + "darling 0.20.11", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -2673,11 +2583,13 @@ dependencies = [ "anyhow", "audio", "client", + "collections", "feature_flags", "fs", "futures 0.3.31", "gpui", "gpui_tokio", + "http_client", "language", "livekit_client", "log", @@ -2686,10 +2598,7 @@ dependencies = [ "serde", "settings", "telemetry", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-util", + "util", ] [[package]] @@ -2698,7 +2607,7 @@ version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b99da2f8558ca23c71f4fd15dc57c906239752dd27ff3c00a1d56b685b7cbfec" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "log", "polling", "rustix 0.38.44", @@ -2720,11 +2629,11 @@ dependencies = [ [[package]] name = "camino" -version = "1.1.9" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3" +checksum = "276a59bf2b2c967788139340c9f0c5b12d7fd6630315c15c217e559de85d2609" dependencies = [ - "serde", + "serde_core", ] [[package]] @@ -2739,13 +2648,13 @@ dependencies = [ "memmap2", "num-traits", "num_cpus", - "rand 0.9.1", + "rand 0.9.2", "rand_distr", "rayon", "safetensors", "thiserror 1.0.69", "ug", - "yoke", + "yoke 0.7.5", "zip 1.1.4", ] @@ -2794,7 +2703,7 @@ checksum = "9f83833816c66c986e913b22ac887cec216ea09301802054316fc5301809702c" dependencies = [ "cap-primitives", "cap-std", - "rustix 1.0.7", + "rustix 1.1.2", "smallvec", ] @@ -2810,7 +2719,7 @@ dependencies = [ "io-lifetimes", "ipnet", "maybe-owned", - "rustix 1.0.7", + "rustix 1.1.2", "rustix-linux-procfs", "windows-sys 0.59.0", "winx", @@ -2835,7 +2744,7 @@ dependencies = [ "cap-primitives", "io-extras", "io-lifetimes", - "rustix 1.0.7", + "rustix 1.1.2", ] [[package]] @@ -2848,7 +2757,7 @@ dependencies = [ "cap-primitives", "iana-time-zone", "once_cell", - "rustix 1.0.7", + "rustix 1.1.2", "winx", ] @@ -2872,7 +2781,7 @@ dependencies = [ "semver", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -2882,7 +2791,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5fbd1fe9db3ebf71b89060adaf7b0504c2d6a425cf061313099547e382c2e472" dependencies = [ "serde", - "toml 0.8.20", + "toml 0.8.23", ] [[package]] @@ -2891,6 +2800,15 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" +[[package]] +name = "castaway" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dec551ab6e7578819132c713a93c022a05d60159dc86e7a7050223577484c55a" +dependencies = [ + "rustversion", +] + [[package]] name = "cbc" version = "0.1.2" @@ -2907,23 +2825,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eadd868a2ce9ca38de7eeafdcec9c7065ef89b42b32f0839278d55f35c54d1ff" dependencies = [ "heck 0.4.1", - "indexmap 2.9.0", + "indexmap 2.11.4", "log", "proc-macro2", "quote", "serde", "serde_json", - "syn 2.0.101", + "syn 2.0.106", "tempfile", - "toml 0.8.20", + "toml 0.8.23", ] [[package]] name = "cc" -version = "1.2.19" +version = "1.2.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e3a13707ac958681c13b39b458c073d0d9bc8a22cb1b2f4c8e55eb72c13f362" +checksum = "ac9fe6cdbb24b6ade63616c0a0688e45bb56732262c158df3c0c4bea4ca47cb7" dependencies = [ + "find-msvc-tools", "jobserver", "libc", "shlex", @@ -2956,9 +2875,9 @@ dependencies = [ [[package]] name = "cfg-if" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" [[package]] name = "cfg_aliases" @@ -2988,8 +2907,10 @@ dependencies = [ "anyhow", "client", "clock", + "collections", "futures 0.3.31", "gpui", + "http_client", "language", "log", "postage", @@ -2998,25 +2919,31 @@ dependencies = [ "settings", "text", "time", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-util", + "util", ] [[package]] name = "chrono" -version = "0.4.41" +version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" +checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" dependencies = [ - "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "serde", "wasm-bindgen", - "windows-link 0.1.1", + "windows-link 0.2.1", +] + +[[package]] +name = "chrono-tz" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6139a8597ed92cf816dfb33f5dd6cf0bb93a6adc938f11039f371bc5bcd26c3" +dependencies = [ + "chrono", + "phf 0.12.1", ] [[package]] @@ -3065,9 +2992,9 @@ dependencies = [ [[package]] name = "circular-buffer" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23bdce1da528cadbac4654b5632bfcd8c6c63e25b1d42cea919a95958790b51d" +checksum = "14c638459986b83c2b885179bd4ea6a2cbb05697b001501a56adb3a3d230803b" [[package]] name = "clang-sys" @@ -3082,9 +3009,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.37" +version = "4.5.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eccb054f56cbd38340b380d4a8e69ef1f02f1af43db2f0cc817a4774d80ae071" +checksum = "f4512b90fa68d3a9932cea5184017c5d200f5921df706d45e853537dea51508f" dependencies = [ "clap_builder", "clap_derive", @@ -3092,9 +3019,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.37" +version = "4.5.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efd9466fac8543255d3b1fcad4762c5e116ffe808c8a3043d4263cd4fd4862a2" +checksum = "0025e98baa12e766c67ba13ff4695a887a1eba19569aad00a472546795bd6730" dependencies = [ "anstream", "anstyle", @@ -3105,30 +3032,30 @@ dependencies = [ [[package]] name = "clap_complete" -version = "4.5.47" +version = "4.5.59" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06f5378ea264ad4f82bbc826628b5aad714a75abf6ece087e923010eb937fb6" +checksum = "2348487adcd4631696ced64ccdb40d38ac4d31cae7f2eec8817fcea1b9d1c43c" dependencies = [ "clap", ] [[package]] name = "clap_derive" -version = "4.5.32" +version = "4.5.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7" +checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "clap_lex" -version = "0.7.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" +checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d" [[package]] name = "cli" @@ -3137,6 +3064,7 @@ dependencies = [ "anyhow", "askpass", "clap", + "collections", "core-foundation 0.10.0", "core-services", "exec", @@ -3148,10 +3076,8 @@ dependencies = [ "release_channel", "serde", "tempfile", - "windows 0.61.1", - "workspace-hack", - "zed-collections", - "zed-util", + "util", + "windows 0.61.3", ] [[package]] @@ -3165,13 +3091,15 @@ dependencies = [ "clock", "cloud_api_client", "cloud_llm_client", + "collections", "credentials_provider", - "derive_more", + "derive_more 0.99.20", "feature_flags", "fs", "futures 0.3.31", "gpui", "gpui_tokio", + "http_client", "http_client_tls", "httparse", "log", @@ -3179,7 +3107,7 @@ dependencies = [ "parking_lot", "paths", "postage", - "rand 0.9.1", + "rand 0.9.2", "regex", "release_channel", "rpc", @@ -3193,7 +3121,7 @@ dependencies = [ "telemetry", "telemetry_events", "text", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tiny_http", "tokio", @@ -3201,12 +3129,9 @@ dependencies = [ "tokio-rustls 0.26.2", "tokio-socks", "url", - "windows 0.61.1", - "workspace-hack", + "util", + "windows 0.61.3", "worktree", - "zed-collections", - "zed-http-client", - "zed-util", ] [[package]] @@ -3216,7 +3141,6 @@ dependencies = [ "parking_lot", "serde", "smallvec", - "workspace-hack", ] [[package]] @@ -3228,11 +3152,10 @@ dependencies = [ "futures 0.3.31", "gpui", "gpui_tokio", + "http_client", "parking_lot", "serde_json", - "workspace-hack", "yawc", - "zed-http-client", ] [[package]] @@ -3246,7 +3169,6 @@ dependencies = [ "pretty_assertions", "serde", "serde_json", - "workspace-hack", ] [[package]] @@ -3255,12 +3177,12 @@ version = "0.1.0" dependencies = [ "anyhow", "chrono", + "indoc", "pretty_assertions", "serde", "serde_json", - "strum 0.27.1", + "strum 0.27.2", "uuid", - "workspace-hack", ] [[package]] @@ -3273,8 +3195,7 @@ dependencies = [ "ordered-float 2.10.1", "rustc-hash 2.1.1", "serde", - "strum 0.27.1", - "workspace-hack", + "strum 0.27.2", ] [[package]] @@ -3288,9 +3209,12 @@ dependencies = [ [[package]] name = "cobs" -version = "0.2.3" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15" +checksum = "0fa961b519f0b462e3a3b4a34b64d119eeaca1d59af726fe450bbba07a9fc0a1" +dependencies = [ + "thiserror 2.0.17", +] [[package]] name = "cocoa" @@ -3314,7 +3238,7 @@ version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f79398230a6e2c08f5c9760610eb6924b52aa9e7950a619602baba59dcbbdbb2" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "block", "cocoa-foundation 0.2.0", "core-foundation 0.10.0", @@ -3344,7 +3268,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e14045fb83be07b5acf1c0884b2180461635b433455fa35d1cd6f17f1450679d" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "block", "core-foundation 0.10.0", "core-graphics-types 0.2.0", @@ -3363,6 +3287,17 @@ dependencies = [ "unicode-width", ] +[[package]] +name = "codespan-reporting" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba7a06c0b31fff5ff2e1e7d37dbf940864e2a974b336e1a2938d10af6e8fb283" +dependencies = [ + "serde", + "termcolor", + "unicode-width", +] + [[package]] name = "codestral" version = "0.1.0" @@ -3372,6 +3307,7 @@ dependencies = [ "edit_prediction_context", "futures 0.3.31", "gpui", + "http_client", "language", "language_models", "log", @@ -3380,8 +3316,6 @@ dependencies = [ "serde_json", "smol", "text", - "workspace-hack", - "zed-http-client", ] [[package]] @@ -3390,8 +3324,8 @@ version = "0.44.0" dependencies = [ "agent_settings", "anyhow", - "assistant_context", "assistant_slash_command", + "assistant_text_thread", "async-trait", "async-tungstenite", "audio", @@ -3408,6 +3342,7 @@ dependencies = [ "client", "clock", "collab_ui", + "collections", "command_palette_hooks", "context_server", "ctor", @@ -3428,6 +3363,7 @@ dependencies = [ "gpui", "gpui_tokio", "hex", + "http_client", "hyper 0.14.32", "indoc", "language", @@ -3447,7 +3383,7 @@ dependencies = [ "prometheus", "prompt_store", "prost 0.9.0", - "rand 0.9.1", + "rand 0.9.2", "recent_projects", "release_channel", "remote", @@ -3457,6 +3393,8 @@ dependencies = [ "rpc", "scrypt", "sea-orm", + "sea-orm-macros", + "semantic_version", "semver", "serde", "serde_json", @@ -3465,7 +3403,7 @@ dependencies = [ "sha2", "smol", "sqlx", - "strum 0.27.1", + "strum 0.27.2", "subtle", "supermaven_api", "task", @@ -3474,20 +3412,16 @@ dependencies = [ "theme", "time", "tokio", - "toml 0.8.20", + "toml 0.8.23", "tower 0.4.13", "tower-http 0.4.4", "tracing", "tracing-subscriber", "unindent", + "util", "uuid", "workspace", - "workspace-hack", "worktree", - "zed-collections", - "zed-http-client", - "zed-semantic-version", - "zed-util", "zlog", ] @@ -3500,11 +3434,13 @@ dependencies = [ "channel", "chrono", "client", + "collections", "db", "editor", "futures 0.3.31", "fuzzy", "gpui", + "http_client", "log", "menu", "notifications", @@ -3525,11 +3461,16 @@ dependencies = [ "title_bar", "tree-sitter-md", "ui", + "util", "workspace", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-util", +] + +[[package]] +name = "collections" +version = "0.1.0" +dependencies = [ + "indexmap 2.11.4", + "rustc-hash 2.1.1", ] [[package]] @@ -3540,9 +3481,9 @@ checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" [[package]] name = "colorchoice" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" +checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" [[package]] name = "combine" @@ -3554,14 +3495,25 @@ dependencies = [ "memchr", ] +[[package]] +name = "comfy-table" +version = "7.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b03b7db8e0b4b2fdad6c551e634134e99ec000e5c8c3b6856c65e8bbaded7a3b" +dependencies = [ + "crossterm", + "unicode-segmentation", + "unicode-width", +] + [[package]] name = "command-fds" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ec1052629a80c28594777d1252efc8a6b005d13f9edfd8c3fc0f44d5b32489a" +checksum = "f849b92c694fe237ecd8fafd1ba0df7ae0d45c1df6daeb7f68ed4220d51640bd" dependencies = [ "nix 0.30.1", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -3570,6 +3522,7 @@ version = "0.1.0" dependencies = [ "anyhow", "client", + "collections", "command_palette_hooks", "ctor", "db", @@ -3591,10 +3544,8 @@ dependencies = [ "theme", "time", "ui", + "util", "workspace", - "workspace-hack", - "zed-collections", - "zed-util", "zed_actions", ] @@ -3602,26 +3553,58 @@ dependencies = [ name = "command_palette_hooks" version = "0.1.0" dependencies = [ - "derive_more", + "collections", + "derive_more 0.99.20", "gpui", - "workspace-hack", - "zed-collections", + "workspace", +] + +[[package]] +name = "compact_str" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb1325a1cece981e8a296ab8f0f9b63ae357bd0784a9faaf548cc7b480707a" +dependencies = [ + "castaway", + "cfg-if", + "itoa", + "rustversion", + "ryu", + "serde", + "static_assertions", ] [[package]] name = "component" version = "0.1.0" dependencies = [ + "collections", "documented", "gpui", "inventory", "parking_lot", - "strum 0.27.1", + "strum 0.27.2", "theme", - "workspace-hack", - "zed-collections", ] +[[package]] +name = "compression-codecs" +version = "0.4.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef8a506ec4b81c460798f572caead636d57d3d7e940f998160f52bd254bf2d23" +dependencies = [ + "compression-core", + "deflate64", + "flate2", + "memchr", +] + +[[package]] +name = "compression-core" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e47641d3deaf41fb1538ac1f54735925e275eaf3bf4d55c81b137fba797e5cbb" + [[package]] name = "concurrent-queue" version = "2.5.0" @@ -3665,7 +3648,7 @@ version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", "once_cell", "tiny-keccak", ] @@ -3676,28 +3659,33 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" +[[package]] +name = "constant_time_eq" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" + [[package]] name = "context_server" version = "0.1.0" dependencies = [ "anyhow", "async-trait", + "collections", "futures 0.3.31", "gpui", "log", "net", "parking_lot", "postage", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "settings", "smol", "tempfile", "url", - "workspace-hack", - "zed-collections", - "zed-util", + "util", ] [[package]] @@ -3706,15 +3694,6 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" -[[package]] -name = "convert_case" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" -dependencies = [ - "unicode-segmentation", -] - [[package]] name = "convert_case" version = "0.8.0" @@ -3733,6 +3712,7 @@ dependencies = [ "chrono", "client", "clock", + "collections", "command_palette_hooks", "ctor", "dirs 4.0.0", @@ -3741,6 +3721,7 @@ dependencies = [ "fs", "futures 0.3.31", "gpui", + "http_client", "indoc", "itertools 0.14.0", "language", @@ -3756,15 +3737,12 @@ dependencies = [ "serde", "serde_json", "settings", + "sum_tree", "task", "theme", "ui", + "util", "workspace", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-sum-tree", - "zed-util", "zlog", ] @@ -3813,7 +3791,7 @@ version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa95a34622365fa5bbf40b20b75dba8dfa8c94c734aea8ac9a5ca38af14316f1" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "core-foundation 0.10.0", "core-graphics-types 0.2.0", "foreign-types 0.5.0", @@ -3826,7 +3804,7 @@ version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32eb7c354ae9f6d437a6039099ce7ecd049337a8109b23d73e48e8ffba8e9cd5" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "core-foundation 0.9.4", "core-graphics-types 0.1.3", "foreign-types 0.5.0", @@ -3850,7 +3828,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d44a101f213f6c4cdc1853d4b78aef6db6bdfa3468798cc1d9912f4735013eb" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "core-foundation 0.10.0", "libc", ] @@ -3861,7 +3839,7 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7e4583956b9806b69f73fcb23aee05eb3620efc282972f08f6a6db7504f8334d" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "block", "cfg-if", "core-foundation 0.10.0", @@ -3939,20 +3917,20 @@ dependencies = [ [[package]] name = "coreaudio-sys" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ce857aa0b77d77287acc1ac3e37a05a8c95a2af3647d23b15f263bdaeb7562b" +checksum = "ceec7a6067e62d6f931a2baf6f3a751f4a892595bcec1461a3c94ef9949864b6" dependencies = [ - "bindgen 0.70.1", + "bindgen 0.72.1", ] [[package]] name = "cosmic-text" -version = "0.14.0" +version = "0.14.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e1ecbb5db9a4c2ee642df67bcfa8f044dd867dbbaa21bfab139cbc204ffbf67" +checksum = "da46a9d5a8905cc538a4a5bceb6a4510de7a51049c5588c0114efce102bcbbe8" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "fontdb 0.16.2", "log", "rangemap", @@ -3981,7 +3959,7 @@ dependencies = [ "jni", "js-sys", "libc", - "mach2 0.4.2", + "mach2 0.4.3", "ndk", "ndk-context", "num-derive", @@ -3997,9 +3975,9 @@ dependencies = [ [[package]] name = "cpp_demangle" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96e58d342ad113c2b878f16d5d034c03be492ae460cdbc02b7f0f2284d310c7d" +checksum = "f2bb79cb74d735044c972aae58ed0aaa9a837e85b01106a54c39e42e97f62253" dependencies = [ "cfg-if", ] @@ -4046,7 +4024,7 @@ dependencies = [ "cranelift-control", "cranelift-entity", "cranelift-isle", - "gimli", + "gimli 0.31.1", "hashbrown 0.14.5", "log", "postcard", @@ -4056,7 +4034,7 @@ dependencies = [ "serde_derive", "sha2", "smallvec", - "target-lexicon 0.13.2", + "target-lexicon 0.13.3", ] [[package]] @@ -4103,7 +4081,7 @@ dependencies = [ "cranelift-codegen", "log", "smallvec", - "target-lexicon 0.13.2", + "target-lexicon 0.13.3", ] [[package]] @@ -4120,7 +4098,7 @@ checksum = "b8dee82f3f1f2c4cba9177f1cc5e350fe98764379bcd29340caa7b01f85076c7" dependencies = [ "cranelift-codegen", "libc", - "target-lexicon 0.13.2", + "target-lexicon 0.13.3", ] [[package]] @@ -4131,7 +4109,7 @@ checksum = "031ed29858d90cfdf27fe49fae28028a1f20466db97962fa2f4ea34809aeebf3" dependencies = [ "cfg-if", "libc", - "mach2 0.4.2", + "mach2 0.4.3", ] [[package]] @@ -4143,7 +4121,7 @@ dependencies = [ "cfg-if", "crash-context", "libc", - "mach2 0.4.2", + "mach2 0.4.3", "parking_lot", ] @@ -4151,9 +4129,10 @@ dependencies = [ name = "crashes" version = "0.1.0" dependencies = [ - "bincode", + "bincode 1.3.3", "cfg-if", "crash-handler", + "extension_host", "log", "mach2 0.5.0", "minidumper", @@ -4163,15 +4142,14 @@ dependencies = [ "serde_json", "smol", "system_specs", - "workspace-hack", - "zstd", + "zstd 0.11.2+zstd.1.5.2", ] [[package]] name = "crc" -version = "3.2.1" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636" +checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675" dependencies = [ "crc-catalog", ] @@ -4183,32 +4161,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" [[package]] -name = "crc32c" -version = "0.6.8" +name = "crc-fast" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a47af21622d091a8f0fb295b88bc886ac74efcc613efc19f5d0b21de5c89e47" +checksum = "6bf62af4cc77d8fe1c22dde4e721d87f2f54056139d8c412e1366b740305f56f" dependencies = [ - "rustc_version", + "crc", + "digest", + "libc", + "rand 0.9.2", + "regex", ] [[package]] name = "crc32fast" -version = "1.4.2" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" dependencies = [ "cfg-if", ] -[[package]] -name = "crc64fast-nvme" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4955638f00a809894c947f85a024020a20815b65a5eea633798ea7924edab2b3" -dependencies = [ - "crc", -] - [[package]] name = "credentials_provider" version = "0.1.0" @@ -4220,7 +4193,6 @@ dependencies = [ "release_channel", "serde", "serde_json", - "workspace-hack", ] [[package]] @@ -4316,16 +4288,39 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] -name = "crunchy" -version = "0.2.3" +name = "crossterm" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43da5946c66ffcc7745f48db692ffbb10a83bfe0afd96235c5c2a4fb23994929" +checksum = "d8b9f2e4c67f833b660cdb0a3523065869fb35570177239812ed4c905aeff87b" +dependencies = [ + "bitflags 2.9.4", + "crossterm_winapi", + "document-features", + "parking_lot", + "rustix 1.1.2", + "winapi", +] [[package]] -name = "crypto-bigint" -version = "0.4.9" +name = "crossterm_winapi" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" +checksum = "acdd7c62a3665c7f6830a51635d9ac9b23ed385797f70a83bb8bafe9c572ab2b" +dependencies = [ + "winapi", +] + +[[package]] +name = "crunchy" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" + +[[package]] +name = "crypto-bigint" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" dependencies = [ "generic-array", "rand_core 0.6.4", @@ -4363,7 +4358,7 @@ dependencies = [ "cssparser-macros", "dtoa-short", "itoa", - "phf", + "phf 0.11.3", "smallvec", ] @@ -4374,14 +4369,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" dependencies = [ "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "ctor" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4735f265ba6a1188052ca32d461028a7d1125868be18e287e756019da7607b5" +checksum = "ec09e802f5081de6157da9a75701d6c713d8dc3ba52571fd4bd25f412644e8a6" dependencies = [ "ctor-proc-macro", "dtor", @@ -4389,83 +4384,87 @@ dependencies = [ [[package]] name = "ctor-proc-macro" -version = "0.0.5" +version = "0.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f211af61d8efdd104f96e57adf5e426ba1bc3ed7a4ead616e15e5881fd79c4d" +checksum = "e2931af7e13dc045d8e9d26afccc6fa115d64e115c9c84b1166288b46f6782c2" [[package]] name = "ctrlc" -version = "3.4.6" +version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "697b5419f348fd5ae2478e8018cb016c00a5881c7f46c717de98ffd135a5651c" +checksum = "881c5d0a13b2f1498e2306e82cbada78390e152d4b1378fb28a84f4dcd0dc4f3" dependencies = [ - "nix 0.29.0", - "windows-sys 0.59.0", + "dispatch", + "nix 0.30.1", + "windows-sys 0.61.2", ] [[package]] name = "cursor-icon" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96a6ac251f4a2aca6b3f91340350eab87ae57c3f127ffeb585e92bd336717991" +checksum = "f27ae1dd37df86211c42e150270f82743308803d90a6f6e6651cd730d5e1732f" [[package]] name = "cxx" -version = "1.0.157" +version = "1.0.187" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6354e975ea4ec28033ec3a36fa9baa1a02e3eb22ad740eeb4929370d4f5ba8" +checksum = "d8465678d499296e2cbf9d3acf14307458fd69b471a31b65b3c519efe8b5e187" dependencies = [ "cc", + "cxx-build", "cxxbridge-cmd", "cxxbridge-flags", "cxxbridge-macro", - "foldhash", + "foldhash 0.2.0", "link-cplusplus", ] [[package]] name = "cxx-build" -version = "1.0.157" +version = "1.0.187" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b4400e26ea4b99417e4263b1ce2d8452404d750ba0809a7bd043072593d430d" +checksum = "d74b6bcf49ebbd91f1b1875b706ea46545032a14003b5557b7dfa4bbeba6766e" dependencies = [ "cc", - "codespan-reporting", + "codespan-reporting 0.13.0", + "indexmap 2.11.4", "proc-macro2", "quote", "scratch", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "cxxbridge-cmd" -version = "1.0.157" +version = "1.0.187" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31860c98f69fc14da5742c5deaf78983e846c7b27804ca8c8319e32eef421bde" +checksum = "94ca2ad69673c4b35585edfa379617ac364bccd0ba0adf319811ba3a74ffa48a" dependencies = [ "clap", - "codespan-reporting", + "codespan-reporting 0.13.0", + "indexmap 2.11.4", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "cxxbridge-flags" -version = "1.0.157" +version = "1.0.187" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0402a66013f3b8d3d9f2d7c9994656cc81e671054822b0728d7454d9231892f" +checksum = "d29b52102aa395386d77d322b3a0522f2035e716171c2c60aa87cc5e9466e523" [[package]] name = "cxxbridge-macro" -version = "1.0.157" +version = "1.0.187" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64c0b38f32d68f3324a981645ee39b2d686af36d03c98a386df3716108c9feae" +checksum = "2a8ebf0b6138325af3ec73324cb3a48b64d57721f17291b151206782e61f66cd" dependencies = [ + "indexmap 2.11.4", "proc-macro2", "quote", - "rustversion", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -4478,10 +4477,12 @@ dependencies = [ "async-tar", "async-trait", "client", + "collections", "dap-types", "fs", "futures 0.3.31", "gpui", + "http_client", "language", "libc", "log", @@ -4489,7 +4490,7 @@ dependencies = [ "parking_lot", "paths", "proto", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "settings", @@ -4499,10 +4500,7 @@ dependencies = [ "telemetry", "tree-sitter", "tree-sitter-go", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-util", + "util", "zlog", ] @@ -4511,7 +4509,7 @@ name = "dap-types" version = "0.0.1" source = "git+https://github.com/zed-industries/dap-types?rev=1b461b310481d01e02b2603c16d7144b926339f8#1b461b310481d01e02b2603c16d7144b926339f8" dependencies = [ - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", ] @@ -4522,6 +4520,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", + "collections", "dap", "dotenvy", "fs", @@ -4533,12 +4532,9 @@ dependencies = [ "paths", "serde", "serde_json", - "shlex", "smol", "task", - "workspace-hack", - "zed-collections", - "zed-util", + "util", ] [[package]] @@ -4547,8 +4543,18 @@ version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" dependencies = [ - "darling_core", - "darling_macro", + "darling_core 0.20.11", + "darling_macro 0.20.11", +] + +[[package]] +name = "darling" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" +dependencies = [ + "darling_core 0.21.3", + "darling_macro 0.21.3", ] [[package]] @@ -4562,7 +4568,21 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.101", + "syn 2.0.106", +] + +[[package]] +name = "darling_core" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 2.0.106", ] [[package]] @@ -4571,9 +4591,20 @@ version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ - "darling_core", + "darling_core 0.20.11", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "darling_macro" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" +dependencies = [ + "darling_core 0.21.3", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -4617,9 +4648,9 @@ checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476" [[package]] name = "data-url" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c297a1c74b71ae29df00c3e22dd9534821d60eb9af5a0192823fa2acea70c2a" +checksum = "be1e0bca6c3637f992fc1cc7cbc52a78c1ef6db076dbf1059c4323d6a2048376" [[package]] name = "db" @@ -4635,20 +4666,19 @@ dependencies = [ "sqlez", "sqlez_macros", "tempfile", - "workspace-hack", - "zed-util", + "util", "zed_env_vars", ] [[package]] name = "dbus" -version = "0.9.7" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bb21987b9fb1613058ba3843121dd18b163b254d8a6e797e144cbac14d96d1b" +checksum = "190b6255e8ab55a7b568df5a883e9497edc3e4821c06396612048b430e5ad1e9" dependencies = [ "libc", "libdbus-sys", - "winapi", + "windows-sys 0.59.0", ] [[package]] @@ -4657,15 +4687,21 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", + "collections", "dap", "extension", "gpui", "serde_json", "task", - "workspace-hack", - "zed-util", + "util", ] +[[package]] +name = "debug_unsafe" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85d3cef41d236720ed453e102153a53e4cc3d2fde848c0078a50cf249e8e3e5b" + [[package]] name = "debugger_tools" version = "0.1.0" @@ -4679,9 +4715,8 @@ dependencies = [ "serde_json", "settings", "smol", + "util", "workspace", - "workspace-hack", - "zed-util", ] [[package]] @@ -4690,8 +4725,9 @@ version = "0.1.0" dependencies = [ "alacritty_terminal", "anyhow", - "bitflags 2.9.0", + "bitflags 2.9.4", "client", + "collections", "command_palette_hooks", "dap", "dap_adapters", @@ -4716,13 +4752,12 @@ dependencies = [ "pretty_assertions", "project", "rpc", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "serde_json_lenient", "settings", - "shlex", - "sysinfo", + "sysinfo 0.37.2", "task", "tasks_ui", "telemetry", @@ -4734,10 +4769,8 @@ dependencies = [ "tree-sitter-json", "ui", "unindent", + "util", "workspace", - "workspace-hack", - "zed-collections", - "zed-util", "zed_actions", "zlog", ] @@ -4757,18 +4790,17 @@ version = "0.1.0" dependencies = [ "anyhow", "futures 0.3.31", - "schemars 1.0.1", + "http_client", + "schemars 1.0.4", "serde", "serde_json", - "workspace-hack", - "zed-http-client", ] [[package]] name = "deflate64" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da692b8d1080ea3045efaab14434d40468c3d8657e42abddfffca87b428f4c1b" +checksum = "26bf8fc351c5ed29b5c2f0cbbac1b209b74f60ecd62e675a998df72c49af5204" [[package]] name = "denoise" @@ -4780,8 +4812,7 @@ dependencies = [ "realfft", "rodio", "rustfft", - "thiserror 2.0.12", - "workspace-hack", + "thiserror 2.0.17", ] [[package]] @@ -4807,12 +4838,12 @@ dependencies = [ [[package]] name = "deranged" -version = "0.4.0" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e" +checksum = "a41953f86f8a05768a6cda24def994fd2f424b04ec5c719cf89989779f199071" dependencies = [ "powerfmt", - "serde", + "serde_core", ] [[package]] @@ -4823,20 +4854,50 @@ checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "derive_more" -version = "0.99.19" +version = "0.99.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3da29a38df43d6f156149c9b43ded5e018ddff2a855cf2cfd62e8cd7d079c69f" +checksum = "6edb4b64a43d977b8e99788fe3a04d483834fba1215a7e02caa415b626497f7f" dependencies = [ "convert_case 0.4.0", "proc-macro2", "quote", "rustc_version", - "syn 2.0.101", + "syn 2.0.106", +] + +[[package]] +name = "derive_more" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678" +dependencies = [ + "derive_more-impl", +] + +[[package]] +name = "derive_more-impl" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", + "unicode-xid", +] + +[[package]] +name = "derive_refineable" +version = "0.1.0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", ] [[package]] @@ -4851,6 +4912,7 @@ version = "0.1.0" dependencies = [ "anyhow", "client", + "collections", "component", "ctor", "editor", @@ -4862,7 +4924,7 @@ dependencies = [ "markdown", "pretty_assertions", "project", - "rand 0.9.1", + "rand 0.9.2", "serde", "serde_json", "settings", @@ -4870,10 +4932,8 @@ dependencies = [ "theme", "ui", "unindent", + "util", "workspace", - "workspace-hack", - "zed-collections", - "zed-util", "zlog", ] @@ -4976,8 +5036,8 @@ checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" dependencies = [ "libc", "option-ext", - "redox_users 0.5.0", - "windows-sys 0.61.0", + "redox_users 0.5.2", + "windows-sys 0.61.2", ] [[package]] @@ -4992,7 +5052,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "objc2", ] @@ -5004,7 +5064,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -5028,36 +5088,46 @@ dependencies = [ "serde", "serde_json", "settings", - "workspace-hack", + "task", + "theme", + "util", "zed", - "zed-util", "zlog", ] +[[package]] +name = "document-features" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95249b50c6c185bee49034bcb378a49dc2b5dff0be90ff6616d31d64febab05d" +dependencies = [ + "litrs", +] + [[package]] name = "documented" -version = "0.9.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc6db32f0995bc4553d2de888999075acd0dbeef75ba923503f6a724263dc6f3" +checksum = "ed6b3e31251e87acd1b74911aed84071c8364fc9087972748ade2f1094ccce34" dependencies = [ "documented-macros", - "phf", - "thiserror 1.0.69", + "phf 0.12.1", + "thiserror 2.0.17", ] [[package]] name = "documented-macros" -version = "0.9.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a394bb35929b58f9a5fd418f7c6b17a4b616efcc1e53e6995ca123948f87e5fa" +checksum = "1149cf7462e5e79e17a3c05fd5b1f9055092bbfa95e04c319395c3beacc9370f" dependencies = [ - "convert_case 0.6.0", - "itertools 0.13.0", + "convert_case 0.8.0", + "itertools 0.14.0", "optfield", "proc-macro2", "quote", - "strum 0.26.3", - "syn 2.0.101", + "strum 0.27.2", + "syn 2.0.106", ] [[package]] @@ -5078,7 +5148,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "415b6ec780d34dcf624666747194393603d0373b7141eef01d12ee58881507d9" dependencies = [ - "phf", + "phf 0.11.3", ] [[package]] @@ -5119,9 +5189,9 @@ checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" [[package]] name = "dwrote" -version = "0.11.3" +version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfe1f192fcce01590bd8d839aca53ce0d11d803bf291b2a6c4ad925a8f0024be" +checksum = "9e1b35532432acc8b19ceed096e35dfa088d3ea037fe4f3c085f1f97f33b4d02" dependencies = [ "lazy_static", "libc", @@ -5131,9 +5201,9 @@ dependencies = [ [[package]] name = "dyn-clone" -version = "1.0.19" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c7a8fb8a9fbf66c1f703fe16184d10ca0ee9d23be5b4436400408ba54a95005" +checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" [[package]] name = "dyn-stack" @@ -5147,13 +5217,20 @@ dependencies = [ [[package]] name = "dyn-stack" -version = "0.13.0" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "490bd48eb68fffcfed519b4edbfd82c69cbe741d175b84f0e0cbe8c57cbe0bdd" +checksum = "1c4713e43e2886ba72b8271aa66c93d722116acf7a75555cce11dcde84388fe8" dependencies = [ "bytemuck", + "dyn-stack-macros", ] +[[package]] +name = "dyn-stack-macros" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1d926b4d407d372f141f93bb444696142c29d32962ccbd3531117cf3aa0bfa9" + [[package]] name = "ec4rs" version = "1.2.0" @@ -5179,7 +5256,6 @@ dependencies = [ "client", "gpui", "language", - "workspace-hack", ] [[package]] @@ -5210,7 +5286,6 @@ dependencies = [ "theme", "ui", "workspace", - "workspace-hack", "zed_actions", "zeta", ] @@ -5224,10 +5299,11 @@ dependencies = [ "arrayvec", "clap", "cloud_llm_client", + "collections", "criterion", "futures 0.3.31", "gpui", - "hashbrown 0.15.3", + "hashbrown 0.15.5", "indoc", "itertools 0.14.0", "language", @@ -5236,22 +5312,20 @@ dependencies = [ "postage", "pretty_assertions", "project", - "rand 0.9.1", + "rand 0.9.2", "regex", "serde", "serde_json", "settings", "slotmap", "smallvec", - "strum 0.27.1", + "strum 0.27.2", "text", "tree-sitter", "tree-sitter-c", "tree-sitter-cpp", "tree-sitter-go", - "workspace-hack", - "zed-collections", - "zed-util", + "util", "zlog", ] @@ -5265,6 +5339,7 @@ dependencies = [ "buffer_diff", "client", "clock", + "collections", "convert_case 0.8.0", "criterion", "ctor", @@ -5278,6 +5353,7 @@ dependencies = [ "fuzzy", "git", "gpui", + "http_client", "indoc", "itertools 0.14.0", "language", @@ -5292,17 +5368,19 @@ dependencies = [ "parking_lot", "pretty_assertions", "project", - "rand 0.9.1", + "rand 0.9.2", "regex", "release_channel", + "rope", "rpc", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "settings", "smallvec", "smol", "snippet", + "sum_tree", "task", "telemetry", "tempfile", @@ -5321,14 +5399,10 @@ dependencies = [ "unicode-segmentation", "unindent", "url", + "util", "uuid", "vim_mode_setting", "workspace", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-sum-tree", - "zed-util", "zed_actions", "zlog", ] @@ -5385,16 +5459,16 @@ dependencies = [ [[package]] name = "embed-resource" -version = "3.0.2" +version = "3.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fbc6e0d8e0c03a655b53ca813f0463d2c956bc4db8138dbc89f120b066551e3" +checksum = "55a075fc573c64510038d7ee9abc7990635863992f83ebc52c8b433b8411a02e" dependencies = [ "cc", "memchr", "rustc_version", - "toml 0.8.20", + "toml 0.9.8", "vswhom", - "winreg 0.52.0", + "winreg 0.55.0", ] [[package]] @@ -5415,7 +5489,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99e1f1df1f181f2539bac8bf027d31ca5ffbf9e559e3f2d09413b9107b5c02f4" dependencies = [ - "phf", + "phf 0.11.3", ] [[package]] @@ -5448,14 +5522,14 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "enumflags2" -version = "0.7.11" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba2f4b465f5318854c6f8dd686ede6c0a9dc67d4b1ac241cf0eb51521a309147" +checksum = "1027f7680c853e056ebcec683615fb6fbbc07dbaa13b4d5d9442b146ded4ecef" dependencies = [ "enumflags2_derive", "serde", @@ -5463,20 +5537,20 @@ dependencies = [ [[package]] name = "enumflags2_derive" -version = "0.7.11" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc4caf64a58d7a6d65ab00639b046ff54399a39f5f2554728895ace4b297cd79" +checksum = "67c78a4d8fdf9953a5c9d458f9efe940fd97a0cab0941c075a813ac594733827" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "env_filter" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "186e05a59d4c50738528153b83b0b0194d3a29507dfec16eccd4b342903397d0" +checksum = "1bf3c259d255ca70051b30e2e95b5446cdb8949ac4cd22c0d7fd634d89f568e2" dependencies = [ "log", "regex", @@ -5517,6 +5591,26 @@ dependencies = [ "serde", ] +[[package]] +name = "equator" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4711b213838dfee0117e3be6ac926007d7f433d7bbe33595975d4190cb07e6fc" +dependencies = [ + "equator-macro", +] + +[[package]] +name = "equator-macro" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44f23cf4b44bfce11a86ace86f8a73ffdec849c9fd00a386a53d278bd9e81fb3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + [[package]] name = "equivalent" version = "1.0.2" @@ -5525,11 +5619,12 @@ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "erased-serde" -version = "0.4.6" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e004d887f51fcb9fef17317a2f3525c887d8aa3f4f50fed920816a688284a5b7" +checksum = "259d404d09818dec19332e31d94558aeb442fea04c817006456c24b5460bbd4b" dependencies = [ "serde", + "serde_core", "typeid", ] @@ -5546,12 +5641,12 @@ dependencies = [ [[package]] name = "errno" -version = "0.3.11" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -5585,6 +5680,12 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "ethnum" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca81e6b4777c89fd810c25a4be2b1bd93ea034fbe58e6a75216a34c6b82c539b" + [[package]] name = "euclid" version = "0.22.11" @@ -5598,18 +5699,18 @@ dependencies = [ name = "eval" version = "0.1.0" dependencies = [ + "acp_thread", "agent", + "agent-client-protocol", "agent_settings", "agent_ui", "anyhow", - "assistant_tool", - "assistant_tools", "async-trait", "buffer_diff", "chrono", "clap", "client", - "cloud_llm_client", + "collections", "debug_adapter_extension", "dirs 4.0.0", "dotenvy", @@ -5632,6 +5733,7 @@ dependencies = [ "pretty_assertions", "project", "prompt_store", + "rand 0.9.2", "regex", "release_channel", "reqwest_client", @@ -5639,16 +5741,13 @@ dependencies = [ "serde_json", "settings", "shellexpand 2.1.2", - "smol", "telemetry", "terminal_view", - "toml 0.8.20", + "toml 0.8.23", "unindent", + "util", "uuid", "watch", - "workspace-hack", - "zed-collections", - "zed-util", ] [[package]] @@ -5659,9 +5758,9 @@ checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "event-listener" -version = "5.4.0" +version = "5.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3492acde4c3fc54c845eaab3eed8bd00c7a7d881f78bfc801e43a93dec1331ae" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" dependencies = [ "concurrent-queue", "parking", @@ -5674,7 +5773,7 @@ version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" dependencies = [ - "event-listener 5.4.0", + "event-listener 5.4.1", "pin-project-lite", ] @@ -5692,10 +5791,9 @@ dependencies = [ name = "explorer_command_injector" version = "0.1.0" dependencies = [ - "windows 0.61.1", - "windows-core 0.61.0", - "windows-registry 0.5.1", - "workspace-hack", + "windows 0.61.3", + "windows-core 0.61.2", + "windows-registry 0.5.3", ] [[package]] @@ -5727,28 +5825,27 @@ dependencies = [ "async-compression", "async-tar", "async-trait", + "collections", "dap", "fs", "futures 0.3.31", "gpui", "heck 0.5.0", + "http_client", "language", "log", "lsp", "parking_lot", "pretty_assertions", + "semantic_version", "serde", "serde_json", "task", - "toml 0.8.20", + "toml 0.8.23", "url", + "util", "wasm-encoder 0.221.3", "wasmparser 0.221.3", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-semantic-version", - "zed-util", ] [[package]] @@ -5769,10 +5866,9 @@ dependencies = [ "serde_json", "theme", "tokio", - "toml 0.8.20", + "toml 0.8.23", "tree-sitter", "wasmtime", - "workspace-hack", ] [[package]] @@ -5784,6 +5880,7 @@ dependencies = [ "async-tar", "async-trait", "client", + "collections", "criterion", "ctor", "dap", @@ -5791,6 +5888,8 @@ dependencies = [ "fs", "futures 0.3.31", "gpui", + "gpui_tokio", + "http_client", "language", "language_extension", "log", @@ -5800,10 +5899,11 @@ dependencies = [ "parking_lot", "paths", "project", - "rand 0.9.1", + "rand 0.9.2", "release_channel", "remote", "reqwest_client", + "semantic_version", "serde", "serde_json", "serde_json_lenient", @@ -5813,16 +5913,12 @@ dependencies = [ "tempfile", "theme", "theme_extension", - "toml 0.8.20", + "toml 0.8.23", "url", + "util", "wasmparser 0.221.3", "wasmtime", "wasmtime-wasi", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-semantic-version", - "zed-util", "zlog", ] @@ -5832,6 +5928,7 @@ version = "0.1.0" dependencies = [ "anyhow", "client", + "collections", "db", "editor", "extension", @@ -5845,19 +5942,17 @@ dependencies = [ "picker", "project", "release_channel", + "semantic_version", "serde", "settings", "smallvec", - "strum 0.27.1", + "strum 0.27.2", "telemetry", "theme", "ui", + "util", "vim_mode_setting", "workspace", - "workspace-hack", - "zed-collections", - "zed-semantic-version", - "zed-util", "zed_actions", ] @@ -5867,6 +5962,12 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" +[[package]] +name = "fallible-streaming-iterator" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" + [[package]] name = "fancy-regex" version = "0.13.0" @@ -5889,6 +5990,12 @@ dependencies = [ "regex-syntax", ] +[[package]] +name = "fast-float2" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8eb564c5c7423d25c886fb561d1e4ee69f72354d16918afa32c08811f6b6a55" + [[package]] name = "fast-srgb8" version = "1.0.0" @@ -5910,6 +6017,26 @@ version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" +[[package]] +name = "fax" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f05de7d48f37cd6730705cbca900770cab77a89f413d23e100ad7fad7795a0ab" +dependencies = [ + "fax_derive", +] + +[[package]] +name = "fax_derive" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0aca10fb742cb43f9e7bb8467c91aa9bcb8e3ffbc6a6f7389bb93ffc920577d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + [[package]] name = "fd-lock" version = "4.0.4" @@ -5917,7 +6044,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce92ff622d6dadf7349484f42c93271a0d49b7cc4d466a936405bacbe10aa78" dependencies = [ "cfg-if", - "rustix 1.0.7", + "rustix 1.1.2", "windows-sys 0.59.0", ] @@ -5937,7 +6064,6 @@ dependencies = [ "futures 0.3.31", "gpui", "smol", - "workspace-hack", ] [[package]] @@ -5948,9 +6074,8 @@ dependencies = [ "gpui", "system_specs", "urlencoding", + "util", "workspace", - "workspace-hack", - "zed-util", "zed_actions", ] @@ -5969,6 +6094,7 @@ name = "file_finder" version = "0.1.0" dependencies = [ "anyhow", + "collections", "ctor", "editor", "file_icons", @@ -5980,7 +6106,7 @@ dependencies = [ "picker", "pretty_assertions", "project", - "schemars 1.0.1", + "schemars 1.0.4", "search", "serde", "serde_json", @@ -5988,10 +6114,8 @@ dependencies = [ "text", "theme", "ui", + "util", "workspace", - "workspace-hack", - "zed-collections", - "zed-util", "zlog", ] @@ -6002,8 +6126,7 @@ dependencies = [ "gpui", "serde", "theme", - "workspace-hack", - "zed-util", + "util", ] [[package]] @@ -6019,16 +6142,22 @@ dependencies = [ [[package]] name = "filetime" -version = "0.2.25" +version = "0.2.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" +checksum = "bc0505cd1b6fa6580283f6bdf70a73fcf4aba1184038c90902b92b3dd0df63ed" dependencies = [ "cfg-if", "libc", "libredox", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] +[[package]] +name = "find-msvc-tools" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52051878f80a721bb68ebfbc930e07b65ba72f2da88968ea5c06fd6ca3d3a127" + [[package]] name = "fixedbitset" version = "0.4.2" @@ -6037,11 +6166,12 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.1.1" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece" +checksum = "dc5a4e564e38c699f2880d3fda590bedc2e69f3f84cd48b457bd892ce61d0aa9" dependencies = [ "crc32fast", + "libz-rs-sys", "miniz_oxide", ] @@ -6065,7 +6195,7 @@ checksum = "4203231de188ebbdfb85c11f3c20ca2b063945710de04e7b59268731e728b462" dependencies = [ "half", "num-traits", - "rand 0.9.1", + "rand 0.9.2", "rand_distr", ] @@ -6110,20 +6240,26 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" +[[package]] +name = "foldhash" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" + [[package]] name = "font-types" -version = "0.8.4" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fa6a5e5a77b5f3f7f9e32879f484aa5b3632ddfbe568a16266c904a6f32cdaf" +checksum = "511e2c18a516c666d27867d2f9821f76e7d591f762e9fc41dd6cc5c90fe54b0b" dependencies = [ "bytemuck", ] [[package]] name = "fontconfig-parser" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1fcfcd44ca6e90c921fee9fa665d530b21ef1327a4c1a6c5250ea44b776ada7" +checksum = "bbc773e24e02d4ddd8395fd30dc147524273a83e54e0f312d986ea30de5f5646" dependencies = [ "roxmltree", ] @@ -6183,7 +6319,7 @@ checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -6209,9 +6345,9 @@ dependencies = [ [[package]] name = "form_urlencoded" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" dependencies = [ "percent-encoding", ] @@ -6246,6 +6382,7 @@ dependencies = [ "async-tar", "async-trait", "cocoa 0.26.0", + "collections", "fsevent", "futures 0.3.31", "git", @@ -6265,10 +6402,8 @@ dependencies = [ "tempfile", "text", "time", - "windows 0.61.1", - "workspace-hack", - "zed-collections", - "zed-util", + "util", + "windows 0.61.3", ] [[package]] @@ -6278,7 +6413,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94e7099f6313ecacbe1256e8ff9d617b75d1bcb16a6fddef94866d225a01a14a" dependencies = [ "io-lifetimes", - "rustix 1.0.7", + "rustix 1.1.2", "windows-sys 0.59.0", ] @@ -6292,6 +6427,24 @@ dependencies = [ "winapi", ] +[[package]] +name = "fs4" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8640e34b88f7652208ce9e88b1a37a2ae95227d84abec377ccd3c5cfeb141ed4" +dependencies = [ + "rustix 1.1.2", + "windows-sys 0.59.0", +] + +[[package]] +name = "fs_benchmarks" +version = "0.1.0" +dependencies = [ + "fs", + "gpui", +] + [[package]] name = "fs_extra" version = "1.3.0" @@ -6302,13 +6455,12 @@ checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" name = "fsevent" version = "0.1.0" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "core-foundation 0.10.0", "fsevent-sys 3.1.0", "log", "parking_lot", "tempfile", - "workspace-hack", ] [[package]] @@ -6427,9 +6579,9 @@ dependencies = [ [[package]] name = "futures-lite" -version = "2.6.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5edaec856126859abb19ed65f39e90fea3a9574b9707f13539acf4abf7eb532" +checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad" dependencies = [ "fastrand 2.3.0", "futures-core", @@ -6446,7 +6598,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -6487,8 +6639,7 @@ version = "0.1.0" dependencies = [ "gpui", "log", - "workspace-hack", - "zed-util", + "util", ] [[package]] @@ -6535,7 +6686,7 @@ version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab96b703d31950f1aeddded248bc95543c9efc7ac9c4a21fda8703a83ee35451" dependencies = [ - "dyn-stack 0.13.0", + "dyn-stack 0.13.2", "gemm-c32 0.18.2", "gemm-c64 0.18.2", "gemm-common 0.18.2", @@ -6570,7 +6721,7 @@ version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6db9fd9f40421d00eea9dd0770045a5603b8d684654816637732463f4073847" dependencies = [ - "dyn-stack 0.13.0", + "dyn-stack 0.13.2", "gemm-common 0.18.2", "num-complex", "num-traits", @@ -6600,7 +6751,7 @@ version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dfcad8a3d35a43758330b635d02edad980c1e143dc2f21e6fd25f9e4eada8edf" dependencies = [ - "dyn-stack 0.13.0", + "dyn-stack 0.13.2", "gemm-common 0.18.2", "num-complex", "num-traits", @@ -6636,7 +6787,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a352d4a69cbe938b9e2a9cb7a3a63b7e72f9349174a2752a558a8a563510d0f3" dependencies = [ "bytemuck", - "dyn-stack 0.13.0", + "dyn-stack 0.13.2", "half", "libm", "num-complex", @@ -6674,7 +6825,7 @@ version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cff95ae3259432f3c3410eaa919033cd03791d81cebd18018393dc147952e109" dependencies = [ - "dyn-stack 0.13.0", + "dyn-stack 0.13.2", "gemm-common 0.18.2", "gemm-f32 0.18.2", "half", @@ -6707,7 +6858,7 @@ version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc8d3d4385393304f407392f754cd2dc4b315d05063f62cf09f47b58de276864" dependencies = [ - "dyn-stack 0.13.0", + "dyn-stack 0.13.2", "gemm-common 0.18.2", "num-complex", "num-traits", @@ -6737,7 +6888,7 @@ version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35b2a4f76ce4b8b16eadc11ccf2e083252d8237c1b589558a49b0183545015bd" dependencies = [ - "dyn-stack 0.13.0", + "dyn-stack 0.13.2", "gemm-common 0.18.2", "num-complex", "num-traits", @@ -6746,20 +6897,6 @@ dependencies = [ "seq-macro", ] -[[package]] -name = "generator" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d18470a76cb7f8ff746cf1f7470914f900252ec36bbc40b569d74b1258446827" -dependencies = [ - "cc", - "cfg-if", - "libc", - "log", - "rustversion", - "windows 0.61.1", -] - [[package]] name = "generic-array" version = "0.14.7" @@ -6772,46 +6909,46 @@ dependencies = [ [[package]] name = "gethostname" -version = "0.4.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0176e0459c2e4a1fe232f984bca6890e681076abb9934f6cea7c326f3fc47818" +checksum = "1bd49230192a3797a9a4d6abe9b3eed6f7fa4c8a8a4947977c6f80025f92cbd8" dependencies = [ - "libc", - "windows-targets 0.48.5", + "rustix 1.1.2", + "windows-link 0.2.1", ] [[package]] name = "getrandom" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ "cfg-if", "js-sys", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "wasm-bindgen", ] [[package]] name = "getrandom" -version = "0.3.2" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" dependencies = [ "cfg-if", "js-sys", "libc", "r-efi", - "wasi 0.14.2+wasi-0.2.4", + "wasip2", "wasm-bindgen", ] [[package]] name = "gif" -version = "0.13.1" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fb2d69b19215e18bb912fa30f7ce15846e301408695e44e0ef719f1da9e19f2" +checksum = "4ae047235e33e2829703574b54fdec96bfbad892062d97fed2f76022287de61b" dependencies = [ "color_quant", "weezl", @@ -6824,10 +6961,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" dependencies = [ "fallible-iterator", - "indexmap 2.9.0", + "indexmap 2.11.4", "stable_deref_trait", ] +[[package]] +name = "gimli" +version = "0.32.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" + [[package]] name = "git" version = "0.1.0" @@ -6835,43 +6978,42 @@ dependencies = [ "anyhow", "askpass", "async-trait", - "derive_more", + "collections", + "derive_more 0.99.20", "futures 0.3.31", "git2", "gpui", + "http_client", "itertools 0.14.0", "log", "parking_lot", "pretty_assertions", - "rand 0.9.1", + "rand 0.9.2", "regex", "rope", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "smol", + "sum_tree", "tempfile", "text", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "unindent", "url", "urlencoding", + "util", "uuid", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-sum-tree", - "zed-util", ] [[package]] name = "git2" -version = "0.20.1" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5220b8ba44c68a9a7f7a7659e864dd73692e417ef0211bea133c7b74e031eeb9" +checksum = "2deb07a133b1520dc1a5690e9bd08950108873d7ed5de38dcc74d3b5ebffa110" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "libc", "libgit2-sys", "log", @@ -6887,6 +7029,7 @@ dependencies = [ "futures 0.3.31", "git", "gpui", + "http_client", "indoc", "pretty_assertions", "regex", @@ -6894,9 +7037,7 @@ dependencies = [ "serde_json", "settings", "url", - "workspace-hack", - "zed-http-client", - "zed-util", + "util", ] [[package]] @@ -6910,6 +7051,7 @@ dependencies = [ "call", "chrono", "cloud_llm_client", + "collections", "command_palette_hooks", "component", "ctor", @@ -6931,26 +7073,23 @@ dependencies = [ "notifications", "panel", "picker", - "postage", "pretty_assertions", "project", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "settings", - "strum 0.27.1", + "strum 0.27.2", "telemetry", "theme", "time", "time_format", "ui", "unindent", + "util", "watch", - "windows 0.61.1", + "windows 0.61.3", "workspace", - "workspace-hack", - "zed-collections", - "zed-util", "zed_actions", "zeroize", "zlog", @@ -6958,15 +7097,15 @@ dependencies = [ [[package]] name = "glob" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" [[package]] name = "globset" -version = "0.4.16" +version = "0.4.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54a1028dfc5f5df5da8a56a73e6c153c9a9708ec57232470703592a3f18e49f5" +checksum = "eab69130804d941f8075cfd713bf8848a2c3b3f201a9457a11e6f87e1ab62305" dependencies = [ "aho-corasick", "bstr", @@ -7018,9 +7157,8 @@ dependencies = [ "tree-sitter-rust", "tree-sitter-typescript", "ui", + "util", "workspace", - "workspace-hack", - "zed-util", ] [[package]] @@ -7040,13 +7178,12 @@ version = "0.1.0" dependencies = [ "anyhow", "futures 0.3.31", - "schemars 1.0.1", + "http_client", + "schemars 1.0.4", "serde", "serde_json", "settings", - "strum 0.27.1", - "workspace-hack", - "zed-http-client", + "strum 0.27.2", ] [[package]] @@ -7055,7 +7192,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fbcd2dba93594b227a1f57ee09b8b9da8892c34d55aa332e034a228d0fe6a171" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "gpu-alloc-types", ] @@ -7076,12 +7213,12 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "98ff03b468aa837d70984d55f5d3f846f6ec31fe34bbb97c4f85219caeee1ca4" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", ] [[package]] name = "gpui" -version = "0.2.0" +version = "0.2.2" dependencies = [ "anyhow", "as-raw-xcb-connection", @@ -7098,6 +7235,8 @@ dependencies = [ "calloop-wayland-source", "cbindgen", "cocoa 0.26.0", + "cocoa-foundation 0.2.0", + "collections", "core-foundation 0.10.0", "core-foundation-sys", "core-graphics 0.24.0", @@ -7105,7 +7244,7 @@ dependencies = [ "core-video", "cosmic-text", "ctor", - "derive_more", + "derive_more 0.99.20", "embed-resource", "env_logger 0.11.8", "etagere", @@ -7113,13 +7252,15 @@ dependencies = [ "flume", "foreign-types 0.5.0", "futures 0.3.31", - "gpui-macros", + "gpui_macros", + "http_client", "image", "inventory", "itertools 0.14.0", "libc", "log", "lyon", + "media", "metal", "naga", "num_cpus", @@ -7131,26 +7272,32 @@ dependencies = [ "parking", "parking_lot", "pathfinder_geometry", + "pin-project", "postage", "pretty_assertions", "profiling", - "rand 0.9.1", + "rand 0.9.2", "raw-window-handle", + "refineable", "reqwest_client", "resvg", - "schemars 1.0.1", + "schemars 1.0.4", "seahash", + "semantic_version", "serde", "serde_json", "slotmap", "smallvec", "smol", "stacksafe", - "strum 0.27.1", + "strum 0.27.2", + "sum_tree", "taffy", - "thiserror 2.0.12", + "thiserror 2.0.17", "unicode-segmentation", "usvg", + "util", + "util_macros", "uuid", "waker-fn", "wayland-backend", @@ -7158,37 +7305,27 @@ dependencies = [ "wayland-cursor", "wayland-protocols 0.31.2", "wayland-protocols-plasma", - "windows 0.61.1", - "windows-core 0.61.0", + "windows 0.61.3", + "windows-core 0.61.2", "windows-numerics", - "windows-registry 0.5.1", - "workspace-hack", + "windows-registry 0.5.3", "x11-clipboard", "x11rb", "xkbcommon", - "zed-collections", "zed-font-kit", - "zed-http-client", - "zed-media", - "zed-refineable", "zed-scap", - "zed-semantic-version", - "zed-sum-tree", - "zed-util", - "zed-util-macros", "zed-xim", ] [[package]] -name = "gpui-macros" +name = "gpui_macros" version = "0.1.0" dependencies = [ "gpui", "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.101", - "workspace-hack", + "syn 2.0.106", ] [[package]] @@ -7198,8 +7335,7 @@ dependencies = [ "anyhow", "gpui", "tokio", - "workspace-hack", - "zed-util", + "util", ] [[package]] @@ -7221,9 +7357,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.26" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" +checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d" dependencies = [ "bytes 1.10.1", "fnv", @@ -7231,7 +7367,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.9.0", + "indexmap 2.11.4", "slab", "tokio", "tokio-util", @@ -7240,9 +7376,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.9" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75249d144030531f8dee69fe9cea04d3edf809a017ae445e2abdff6629e86633" +checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" dependencies = [ "atomic-waker", "bytes 1.10.1", @@ -7250,7 +7386,7 @@ dependencies = [ "futures-core", "futures-sink", "http 1.3.1", - "indexmap 2.9.0", + "indexmap 2.11.4", "slab", "tokio", "tokio-util", @@ -7259,16 +7395,17 @@ dependencies = [ [[package]] name = "half" -version = "2.6.0" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "459196ed295495a68f7d7fe1d84f6c4b7ff0e21fe3017b2f283c6fac3ad803c9" +checksum = "6ea2d84b969582b4b1864a92dc5d27cd2b77b622a8d79306834f1be5ba20d84b" dependencies = [ "bytemuck", "cfg-if", "crunchy", "num-traits", - "rand 0.9.1", + "rand 0.9.2", "rand_distr", + "zerocopy", ] [[package]] @@ -7315,19 +7452,20 @@ version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ - "ahash 0.8.11", + "ahash 0.8.12", "allocator-api2", ] [[package]] name = "hashbrown" -version = "0.15.3" +version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84b26c544d002229e640969970a2e74021aadf6e2f96372b9c58eff97de08eb3" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" dependencies = [ "allocator-api2", "equivalent", - "foldhash", + "foldhash 0.1.5", + "rayon", "serde", ] @@ -7346,7 +7484,7 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" dependencies = [ - "hashbrown 0.15.3", + "hashbrown 0.15.5", ] [[package]] @@ -7403,7 +7541,7 @@ version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd54745cfacb7b97dee45e8fdb91814b62bccddb481debb7de0f9ee6b7bf5b43" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "byteorder", "heed-traits", "heed-types", @@ -7428,7 +7566,7 @@ version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c255bdf46e07fb840d120a36dcc81f385140d7191c76a7391672675c01a55d" dependencies = [ - "bincode", + "bincode 1.3.3", "byteorder", "heed-traits", "serde", @@ -7437,15 +7575,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" - -[[package]] -name = "hermit-abi" -version = "0.5.0" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbd780fe5cc30f81464441920d82ac8740e2e46b29a6fad543ddd075229ce37e" +checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" [[package]] name = "hex" @@ -7514,18 +7646,17 @@ dependencies = [ "markup5ever 0.12.1", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "html5ever" -version = "0.31.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "953cbbe631aae7fc0a112702ad5d3aaf09da38beaf45ea84610d6e1c358f569c" +checksum = "55d958c2f74b664487a2035fe1dadb032c48718a03b63f3ab0b8537db8549ed4" dependencies = [ "log", - "mac", - "markup5ever 0.16.1", + "markup5ever 0.35.0", "match_token", ] @@ -7539,7 +7670,6 @@ dependencies = [ "markup5ever_rcdom", "pretty_assertions", "regex", - "workspace-hack", ] [[package]] @@ -7605,16 +7735,39 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "add0ab9360ddbd88cfeb3bd9574a1d85cfdfa14db10b3e21d3700dbc4328758f" [[package]] -name = "http_client_tls" +name = "http_client" version = "0.1.0" dependencies = [ - "rustls 0.23.26", - "rustls-platform-verifier", - "workspace-hack", -] - -[[package]] -name = "httparse" + "anyhow", + "async-compression", + "async-fs", + "async-tar", + "bytes 1.10.1", + "derive_more 0.99.20", + "futures 0.3.31", + "http 1.3.1", + "http-body 1.0.1", + "log", + "parking_lot", + "serde", + "serde_json", + "sha2", + "tempfile", + "url", + "util", + "zed-reqwest", +] + +[[package]] +name = "http_client_tls" +version = "0.1.0" +dependencies = [ + "rustls 0.23.33", + "rustls-platform-verifier", +] + +[[package]] +name = "httparse" version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" @@ -7633,9 +7786,9 @@ checksum = "91f255a4535024abf7640cb288260811fc14794f62b063652ed349f9a6c2348e" [[package]] name = "humantime" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b112acc8b3adf4b107a8ec20977da0273a8c386765a3ec0229bd500a1443f9f" +checksum = "135b12329e5e3ce057a9f972339ea52bc954fe1e9358ef27f95e89716fbc5424" [[package]] name = "hyper" @@ -7647,14 +7800,14 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2 0.3.26", + "h2 0.3.27", "http 0.2.12", "http-body 0.4.6", "httparse", "httpdate", "itoa", "pin-project-lite", - "socket2", + "socket2 0.5.10", "tokio", "tower-service", "tracing", @@ -7663,19 +7816,21 @@ dependencies = [ [[package]] name = "hyper" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" +checksum = "eb3aa54a13a0dfe7fbe3a59e0c76093041720fdc77b110cc0fc260fafb4dc51e" dependencies = [ + "atomic-waker", "bytes 1.10.1", "futures-channel", - "futures-util", - "h2 0.4.9", + "futures-core", + "h2 0.4.12", "http 1.3.1", "http-body 1.0.1", "httparse", "itoa", "pin-project-lite", + "pin-utils", "smallvec", "tokio", "want", @@ -7699,16 +7854,15 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.27.5" +version = "0.27.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d191583f3da1305256f22463b9bb0471acad48a4e534a5218b9963e9c1f59b2" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" dependencies = [ - "futures-util", "http 1.3.1", - "hyper 1.6.0", + "hyper 1.7.0", "hyper-util", - "rustls 0.23.26", - "rustls-native-certs 0.8.1", + "rustls 0.23.33", + "rustls-native-certs 0.8.2", "rustls-pki-types", "tokio", "tokio-rustls 0.26.2", @@ -7730,19 +7884,23 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.11" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497bbc33a26fdd4af9ed9c70d63f61cf56a938375fbb32df34db9b1cd6d643f2" +checksum = "3c6995591a8f1380fcb4ba966a252a4b29188d51d2b89e3a252f5305be65aea8" dependencies = [ + "base64 0.22.1", "bytes 1.10.1", "futures-channel", + "futures-core", "futures-util", "http 1.3.1", "http-body 1.0.1", - "hyper 1.6.0", + "hyper 1.7.0", + "ipnet", "libc", + "percent-encoding", "pin-project-lite", - "socket2", + "socket2 0.6.1", "tokio", "tower-service", "tracing", @@ -7750,9 +7908,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.63" +version = "0.1.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8" +checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -7760,7 +7918,7 @@ dependencies = [ "js-sys", "log", "wasm-bindgen", - "windows-core 0.61.0", + "windows-core 0.62.2", ] [[package]] @@ -7777,27 +7935,27 @@ name = "icons" version = "0.1.0" dependencies = [ "serde", - "strum 0.27.1", - "workspace-hack", + "strum 0.27.2", ] [[package]] name = "icu_collections" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" dependencies = [ "displaydoc", - "yoke", + "potential_utf", + "yoke 0.8.0", "zerofrom", "zerovec", ] [[package]] -name = "icu_locid" -version = "1.5.0" +name = "icu_locale_core" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" dependencies = [ "displaydoc", "litemap", @@ -7806,31 +7964,11 @@ dependencies = [ "zerovec", ] -[[package]] -name = "icu_locid_transform" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" -dependencies = [ - "displaydoc", - "icu_locid", - "icu_locid_transform_data", - "icu_provider", - "tinystr", - "zerovec", -] - -[[package]] -name = "icu_locid_transform_data" -version = "1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7515e6d781098bf9f7205ab3fc7e9709d34554ae0b21ddbcb5febfa4bc7df11d" - [[package]] name = "icu_normalizer" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" dependencies = [ "displaydoc", "icu_collections", @@ -7838,67 +7976,54 @@ dependencies = [ "icu_properties", "icu_provider", "smallvec", - "utf16_iter", - "utf8_iter", - "write16", "zerovec", ] [[package]] name = "icu_normalizer_data" -version = "1.5.1" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5e8338228bdc8ab83303f16b797e177953730f601a96c25d10cb3ab0daa0cb7" +checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" [[package]] name = "icu_properties" -version = "1.5.1" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" dependencies = [ "displaydoc", "icu_collections", - "icu_locid_transform", + "icu_locale_core", "icu_properties_data", "icu_provider", - "tinystr", + "potential_utf", + "zerotrie", "zerovec", ] [[package]] name = "icu_properties_data" -version = "1.5.1" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85fb8799753b75aee8d2a21d7c14d9f38921b54b3dbda10f5a3c7a7b82dba5e2" +checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" [[package]] name = "icu_provider" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" dependencies = [ "displaydoc", - "icu_locid", - "icu_provider_macros", + "icu_locale_core", "stable_deref_trait", "tinystr", "writeable", - "yoke", + "yoke 0.8.0", "zerofrom", + "zerotrie", "zerovec", ] -[[package]] -name = "icu_provider_macros" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - [[package]] name = "id-arena" version = "2.2.1" @@ -7913,9 +8038,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" dependencies = [ "idna_adapter", "smallvec", @@ -7924,9 +8049,9 @@ dependencies = [ [[package]] name = "idna_adapter" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" dependencies = [ "icu_normalizer", "icu_properties", @@ -7934,9 +8059,9 @@ dependencies = [ [[package]] name = "ignore" -version = "0.4.23" +version = "0.4.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d89fd380afde86567dfba715db065673989d6253f42b88179abd3eae47bda4b" +checksum = "81776e6f9464432afcc28d03e52eb101c93b6f0566f52aef2427663e700f0403" dependencies = [ "crossbeam-deque", "globset", @@ -7950,9 +8075,9 @@ dependencies = [ [[package]] name = "image" -version = "0.25.6" +version = "0.25.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db35664ce6b9810857a38a906215e75a9c879f0696556a39f59c62829710251a" +checksum = "529feb3e6769d234375c4cf1ee2ce713682b8e76538cb13f9fc23e1400a591e7" dependencies = [ "bytemuck", "byteorder-lite", @@ -7960,8 +8085,9 @@ dependencies = [ "exr", "gif", "image-webp", + "moxcms", "num-traits", - "png", + "png 0.18.0", "qoi", "ravif", "rayon", @@ -7973,9 +8099,9 @@ dependencies = [ [[package]] name = "image-webp" -version = "0.2.1" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b77d01e822461baa8409e156015a1d91735549f0f2c17691bd2d996bef238f7f" +checksum = "525e9ff3e1a4be2fbea1fdf0e98686a6d98b4d8f937e1bf7402245af1909e8c3" dependencies = [ "byteorder-lite", "quick-error", @@ -7997,9 +8123,8 @@ dependencies = [ "settings", "theme", "ui", + "util", "workspace", - "workspace-hack", - "zed-util", ] [[package]] @@ -8014,14 +8139,14 @@ version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "17d34b7d42178945f775e84bc4c36dde7c1c6cdfea656d3354d009056f2bb3d2" dependencies = [ - "hashbrown 0.15.3", + "hashbrown 0.15.5", ] [[package]] name = "imgref" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0263a3d970d5c054ed9312c0057b4f3bde9c0b33836d3637361d4a9e6e7a408" +checksum = "e7c5cedc30da3a610cac6b4ba17597bdf7152cf974e8aab3afb3d54455e371c8" [[package]] name = "indexmap" @@ -8036,13 +8161,14 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.9.0" +version = "2.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" +checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" dependencies = [ "equivalent", - "hashbrown 0.15.3", + "hashbrown 0.15.5", "serde", + "serde_core", ] [[package]] @@ -8053,13 +8179,13 @@ checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd" [[package]] name = "inherent" -version = "1.0.12" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c38228f24186d9cc68c729accb4d413be9eaed6ad07ff79e0270d9e56f3de13" +checksum = "c727f80bfa4a6c6e2508d2f05b6f4bfce242030bd88ed15ae5331c5b5d30fba7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -8079,7 +8205,7 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "inotify-sys", "libc", ] @@ -8119,10 +8245,9 @@ dependencies = [ "theme", "title_bar", "ui", + "util", + "util_macros", "workspace", - "workspace-hack", - "zed-util", - "zed-util-macros", "zed_actions", ] @@ -8135,9 +8260,8 @@ dependencies = [ "gpui", "release_channel", "smol", + "util", "workspace", - "workspace-hack", - "zed-util", ] [[package]] @@ -8157,14 +8281,14 @@ checksum = "c34819042dc3d3971c46c2190835914dfbe0c3c13f61449b2997f4e9722dfa60" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "inventory" -version = "0.3.20" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab08d7cd2c5897f2c949e5383ea7c7db03fb19130ffcfbf7eda795137ae3cb83" +checksum = "bc61209c082fbeb19919bee74b176221b27223e27b65d781eb91af24eb1fb46e" dependencies = [ "rustversion", ] @@ -8187,15 +8311,14 @@ checksum = "06432fb54d3be7964ecd3649233cddf80db2832f47fec34c01f65b3d9d774983" [[package]] name = "io-surface" -version = "0.16.0" +version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8283575d5f0b2e7447ec0840363879d71c0fa325d4c699d5b45208ea4a51f45e" +checksum = "554b8c5d64ec09a3a520fe58e4d48a73e00ff32899cdcbe32a4877afd4968b8e" dependencies = [ "cgl", "core-foundation 0.10.0", "core-foundation-sys", "leaky-cow", - "libc", ] [[package]] @@ -8213,12 +8336,12 @@ version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fb8251fb7bcd9ccd3725ed8deae9fe7db8e586495c9eb5b0c52e6233e5e75ea" dependencies = [ - "bincode", + "bincode 1.3.3", "crossbeam-channel", "fnv", "lazy_static", "libc", - "mio 1.0.3", + "mio 1.1.0", "rand 0.8.5", "serde", "tempfile", @@ -8232,6 +8355,16 @@ version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" +[[package]] +name = "iri-string" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" +dependencies = [ + "memchr", + "serde", +] + [[package]] name = "is-docker" version = "0.2.0" @@ -8247,7 +8380,7 @@ version = "0.4.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" dependencies = [ - "hermit-abi 0.5.0", + "hermit-abi", "libc", "windows-sys 0.59.0", ] @@ -8295,15 +8428,6 @@ dependencies = [ "either", ] -[[package]] -name = "itertools" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" -dependencies = [ - "either", -] - [[package]] name = "itertools" version = "0.14.0" @@ -8321,9 +8445,9 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "jiff" -version = "0.2.10" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a064218214dc6a10fbae5ec5fa888d80c45d611aba169222fc272072bf7aef6" +checksum = "be1f93b8b1eb69c77f24bbb0afdf66f54b632ee39af40ca21c4365a1d7347e49" dependencies = [ "jiff-static", "log", @@ -8334,13 +8458,13 @@ dependencies = [ [[package]] name = "jiff-static" -version = "0.2.10" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "199b7932d97e325aff3a7030e141eafe7f2c6268e1d1b24859b753a627f45254" +checksum = "03343451ff899767262ec32146f6d559dd759fdadf42ff0e227c7c48f72594b4" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -8367,11 +8491,11 @@ checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" [[package]] name = "jobserver" -version = "0.1.33" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38f262f097c174adebe41eb73d66ae9c06b2844fb0da69969647bbddd9b0538a" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" dependencies = [ - "getrandom 0.3.2", + "getrandom 0.3.4", "libc", ] @@ -8388,20 +8512,13 @@ dependencies = [ "settings", "shellexpand 2.1.2", "workspace", - "workspace-hack", ] -[[package]] -name = "jpeg-decoder" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5d4a7da358eff58addd2877a45865158f0d78c911d43a5784ceb7bbf52833b0" - [[package]] name = "js-sys" -version = "0.3.77" +version = "0.3.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +checksum = "ec48937a97411dcb524a265206ccd4c90bb711fca92b2792c407f268825b9305" dependencies = [ "once_cell", "wasm-bindgen", @@ -8430,15 +8547,14 @@ dependencies = [ "language", "paths", "project", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "settings", "snippet_provider", "task", "theme", - "workspace-hack", - "zed-util", + "util", ] [[package]] @@ -8447,7 +8563,7 @@ version = "0.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1b46a0365a611fbf1d2143104dcf910aada96fafd295bab16c60b802bf6fa1d" dependencies = [ - "ahash 0.8.11", + "ahash 0.8.12", "base64 0.22.1", "bytecount", "email_address", @@ -8462,7 +8578,7 @@ dependencies = [ "referencing", "regex", "regex-syntax", - "reqwest 0.12.15", + "reqwest 0.12.24", "serde", "serde_json", "uuid-simd", @@ -8519,6 +8635,7 @@ name = "keymap_editor" version = "0.1.0" dependencies = [ "anyhow", + "collections", "command_palette", "component", "db", @@ -8545,11 +8662,9 @@ dependencies = [ "tree-sitter-rust", "ui", "ui_input", + "util", "vim", "workspace", - "workspace-hack", - "zed-collections", - "zed-util", "zed_actions", ] @@ -8565,9 +8680,9 @@ dependencies = [ [[package]] name = "kqueue" -version = "1.0.8" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7447f1ca1b7b563588a205fe93dea8df60fd981423a768bc1c0ded35ed147d0c" +checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a" dependencies = [ "kqueue-sys", "libc", @@ -8585,11 +8700,12 @@ dependencies = [ [[package]] name = "kurbo" -version = "0.11.1" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89234b2cc610a7dd927ebde6b41dd1a5d4214cffaef4cf1fb2195d592f92518f" +checksum = "c62026ae44756f8a599ba21140f350303d4f08dcdcc71b5ad9c9bb8128c13c62" dependencies = [ "arrayvec", + "euclid", "smallvec", ] @@ -8609,6 +8725,7 @@ dependencies = [ "anyhow", "async-trait", "clock", + "collections", "ctor", "diffy", "ec4rs", @@ -8617,6 +8734,7 @@ dependencies = [ "fuzzy", "globset", "gpui", + "http_client", "imara-diff", "indoc", "itertools 0.14.0", @@ -8625,10 +8743,10 @@ dependencies = [ "parking_lot", "postage", "pretty_assertions", - "rand 0.9.1", + "rand 0.9.2", "regex", "rpc", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "settings", @@ -8637,10 +8755,11 @@ dependencies = [ "smol", "streaming-iterator", "strsim", + "sum_tree", "task", "text", "theme", - "toml 0.8.20", + "toml 0.8.23", "tree-sitter", "tree-sitter-elixir", "tree-sitter-embedded-template", @@ -8654,12 +8773,8 @@ dependencies = [ "tree-sitter-typescript", "unicase", "unindent", + "util", "watch", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-sum-tree", - "zed-util", "zlog", ] @@ -8669,6 +8784,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", + "collections", "extension", "fs", "futures 0.3.31", @@ -8679,9 +8795,7 @@ dependencies = [ "project", "serde", "serde_json", - "workspace-hack", - "zed-collections", - "zed-util", + "util", ] [[package]] @@ -8694,25 +8808,23 @@ dependencies = [ "client", "cloud_api_types", "cloud_llm_client", + "collections", "futures 0.3.31", "gpui", + "http_client", "icons", "image", "log", "open_router", "parking_lot", "proto", - "schemars 1.0.1", "serde", "serde_json", "settings", "smol", "telemetry_events", - "thiserror 2.0.12", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-util", + "thiserror 2.0.17", + "util", ] [[package]] @@ -8729,6 +8841,7 @@ dependencies = [ "chrono", "client", "cloud_llm_client", + "collections", "component", "convert_case 0.8.0", "copilot", @@ -8740,6 +8853,7 @@ dependencies = [ "google_ai", "gpui", "gpui_tokio", + "http_client", "language", "language_model", "lmstudio", @@ -8752,23 +8866,20 @@ dependencies = [ "partial-json-fixer", "project", "release_channel", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "settings", "smol", - "strum 0.27.1", - "thiserror 2.0.12", + "strum 0.27.2", + "thiserror 2.0.17", "tiktoken-rs", "tokio", "ui", "ui_input", + "util", "vercel", - "workspace-hack", "x_ai", - "zed-collections", - "zed-http-client", - "zed-util", "zed_env_vars", ] @@ -8782,7 +8893,6 @@ dependencies = [ "project", "ui", "workspace", - "workspace-hack", ] [[package]] @@ -8800,9 +8910,8 @@ dependencies = [ "project", "settings", "ui", + "util", "workspace", - "workspace-hack", - "zed-util", ] [[package]] @@ -8811,6 +8920,7 @@ version = "0.1.0" dependencies = [ "anyhow", "client", + "collections", "command_palette_hooks", "copilot", "editor", @@ -8827,10 +8937,8 @@ dependencies = [ "theme", "tree-sitter", "ui", + "util", "workspace", - "workspace-hack", - "zed-collections", - "zed-util", "zed_actions", "zlog", ] @@ -8845,8 +8953,10 @@ dependencies = [ "async-tar", "async-trait", "chrono", + "collections", "futures 0.3.31", "gpui", + "http_client", "itertools 0.14.0", "json_schema_store", "language", @@ -8874,7 +8984,7 @@ dependencies = [ "task", "text", "theme", - "toml 0.8.20", + "toml 0.8.23", "tree-sitter", "tree-sitter-bash", "tree-sitter-c", @@ -8895,11 +9005,8 @@ dependencies = [ "tree-sitter-yaml", "unindent", "url", + "util", "workspace", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-util", ] [[package]] @@ -8911,12 +9018,6 @@ dependencies = [ "spin", ] -[[package]] -name = "lazycell" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" - [[package]] name = "leak" version = "0.1.2" @@ -8946,21 +9047,21 @@ checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" [[package]] name = "lebe" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03087c2bad5e1034e8cace5926dec053fb3790248370865f5117a7d0213354c8" +checksum = "7a79a3332a6609480d7d0c9eab957bca6b455b91bb84e66d19f5ff66294b85b8" [[package]] name = "libc" -version = "0.2.172" +version = "0.2.177" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" +checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976" [[package]] name = "libdbus-sys" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06085512b750d640299b79be4bad3d2fa90a9c00b1fd9e1b46364f66f0485c72" +checksum = "5cbe856efeb50e4681f010e9aaa2bf0a644e10139e54cde10fc83a307c23bd9f" dependencies = [ "cc", "pkg-config", @@ -8968,9 +9069,9 @@ dependencies = [ [[package]] name = "libfuzzer-sys" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf78f52d400cf2d84a3a973a78a592b4adc535739e0a5597a0da6f0c357adc75" +checksum = "5037190e1f70cbeef565bd267599242926f724d3b8a9f510fd7e0b540cfa4404" dependencies = [ "arbitrary", "cc", @@ -8978,9 +9079,9 @@ dependencies = [ [[package]] name = "libgit2-sys" -version = "0.18.1+1.9.0" +version = "0.18.2+1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1dcb20f84ffcdd825c7a311ae347cce604a6f084a767dec4a4929829645290e" +checksum = "1c42fe03df2bd3c53a3a9c7317ad91d80c81cd1fb0caec8d7cc4cd2bfa10c222" dependencies = [ "cc", "libc", @@ -8990,25 +9091,25 @@ dependencies = [ [[package]] name = "libloading" -version = "0.8.6" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" +checksum = "d7c4b02199fee7c5d21a5ae7d8cfa79a6ef5bb2fc834d6e9058e89c825efdc55" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-link 0.2.1", ] [[package]] name = "libm" -version = "0.2.11" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" +checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" [[package]] name = "libmimalloc-sys" -version = "0.1.42" +version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec9d6fac27761dabcd4ee73571cdb06b7022dc99089acbe5435691edffaac0f4" +checksum = "667f4fec20f29dfc6bc7357c582d91796c169ad7e2fce709468aefeb2c099870" dependencies = [ "cc", "libc", @@ -9016,13 +9117,13 @@ dependencies = [ [[package]] name = "libredox" -version = "0.1.3" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "libc", - "redox_syscall 0.5.11", + "redox_syscall 0.5.18", ] [[package]] @@ -9059,6 +9160,15 @@ dependencies = [ "webrtc-sys", ] +[[package]] +name = "libz-rs-sys" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "840db8cf39d9ec4dd794376f38acc40d0fc65eec2a8f484f7fd375b84602becd" +dependencies = [ + "zlib-rs", +] + [[package]] name = "libz-sys" version = "1.1.22" @@ -9081,16 +9191,15 @@ dependencies = [ "picker", "project", "ui", + "util", "workspace", - "workspace-hack", - "zed-util", ] [[package]] name = "link-cplusplus" -version = "1.0.10" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a6f6da007f968f9def0d65a05b187e2960183de70c160204ecfccf0ee330212" +checksum = "7f78c730aaa7d0b9336a299029ea49f9ee53b0ed06e9202e8cb7db9bae7b8c82" dependencies = [ "cc", ] @@ -9112,15 +9221,21 @@ checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] name = "linux-raw-sys" -version = "0.9.4" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" +checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" [[package]] name = "litemap" -version = "0.7.5" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" + +[[package]] +name = "litrs" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23fb14cb19457329c82206317a5663005a4d404783dc74f4252769b0d5f42856" +checksum = "f5e54036fe321fd421e10d732f155734c4e4afd610dd556d9a82833ab3ee0bed" [[package]] name = "livekit" @@ -9158,7 +9273,7 @@ dependencies = [ "parking_lot", "pbjson-types", "prost 0.12.6", - "rand 0.9.1", + "rand 0.9.2", "reqwest 0.11.27", "scopeguard", "serde", @@ -9207,7 +9322,6 @@ dependencies = [ "prost-build 0.9.0", "prost-types 0.9.0", "serde", - "workspace-hack", "zed-reqwest", ] @@ -9218,6 +9332,7 @@ dependencies = [ "anyhow", "async-trait", "audio", + "collections", "core-foundation 0.10.0", "core-video", "coreaudio-rs 0.12.1", @@ -9245,10 +9360,8 @@ dependencies = [ "smallvec", "tokio-tungstenite 0.26.2", "ui", - "workspace-hack", - "zed-collections", + "util", "zed-scap", - "zed-util", ] [[package]] @@ -9268,46 +9381,31 @@ version = "0.1.0" dependencies = [ "anyhow", "futures 0.3.31", - "schemars 1.0.1", + "http_client", + "schemars 1.0.4", "serde", "serde_json", - "workspace-hack", - "zed-http-client", ] [[package]] name = "lock_api" -version = "0.4.13" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" dependencies = [ - "autocfg", "scopeguard", ] [[package]] name = "log" -version = "0.4.27" +version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" +checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" dependencies = [ "serde", "value-bag", ] -[[package]] -name = "loom" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca" -dependencies = [ - "cfg-if", - "generator", - "scoped-tls", - "tracing", - "tracing-subscriber", -] - [[package]] name = "loop9" version = "0.1.5" @@ -9323,15 +9421,22 @@ version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" dependencies = [ - "hashbrown 0.15.3", + "hashbrown 0.15.5", ] +[[package]] +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + [[package]] name = "lsp" version = "0.1.0" dependencies = [ "anyhow", "async-pipe", + "collections", "ctor", "futures 0.3.31", "gpui", @@ -9340,13 +9445,11 @@ dependencies = [ "parking_lot", "postage", "release_channel", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "smol", - "workspace-hack", - "zed-collections", - "zed-util", + "util", "zlog", ] @@ -9363,9 +9466,9 @@ dependencies = [ [[package]] name = "lyon" -version = "1.0.1" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e7f9cda98b5430809e63ca5197b06c7d191bf7e26dfc467d5a3f0290e2a74f" +checksum = "dbcb7d54d54c8937364c9d41902d066656817dce1e03a44e5533afebd1ef4352" dependencies = [ "lyon_algorithms", "lyon_extra", @@ -9374,9 +9477,9 @@ dependencies = [ [[package]] name = "lyon_algorithms" -version = "1.0.5" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f13c9be19d257c7d37e70608ed858e8eab4b2afcea2e3c9a622e892acbf43c08" +checksum = "f4c0829e28c4f336396f250d850c3987e16ce6db057ffe047ce0dd54aab6b647" dependencies = [ "lyon_path", "num-traits", @@ -9394,9 +9497,9 @@ dependencies = [ [[package]] name = "lyon_geom" -version = "1.0.6" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8af69edc087272df438b3ee436c4bb6d7c04aa8af665cfd398feae627dbd8570" +checksum = "4e16770d760c7848b0c1c2d209101e408207a65168109509f8483837a36cf2e7" dependencies = [ "arrayvec", "euclid", @@ -9405,9 +9508,9 @@ dependencies = [ [[package]] name = "lyon_path" -version = "1.0.7" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0047f508cd7a85ad6bad9518f68cce7b1bf6b943fb71f6da0ee3bc1e8cb75f25" +checksum = "1aeca86bcfd632a15984ba029b539ffb811e0a70bf55e814ef8b0f54f506fdeb" dependencies = [ "lyon_geom", "num-traits", @@ -9415,15 +9518,34 @@ dependencies = [ [[package]] name = "lyon_tessellation" -version = "1.0.15" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "579d42360a4b09846eff2feef28f538696c7d6c7439bfa65874ff3cbe0951b2c" +checksum = "f3f586142e1280335b1bc89539f7c97dd80f08fc43e9ab1b74ef0a42b04aa353" dependencies = [ "float_next_after", "lyon_path", "num-traits", ] +[[package]] +name = "lz4" +version = "1.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a20b523e860d03443e98350ceaac5e71c6ba89aea7d960769ec3ce37f4de5af4" +dependencies = [ + "lz4-sys", +] + +[[package]] +name = "lz4-sys" +version = "1.11.1+lz4-1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bd8c0d6c6ed0cd30b3652886bb8711dc4bb01d637a68105a3d5158039b418e6" +dependencies = [ + "cc", + "libc", +] + [[package]] name = "mac" version = "0.1.1" @@ -9432,9 +9554,9 @@ checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" [[package]] name = "mach2" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b955cdeb2a02b9117f121ce63aa52d08ade45de53e48fe6a38b39c10f6f709" +checksum = "d640282b302c0bb0a2a8e0233ead9035e3bed871f0b7e81fe4a1ec829765db44" dependencies = [ "libc", ] @@ -9469,6 +9591,7 @@ version = "0.1.0" dependencies = [ "assets", "base64 0.22.1", + "collections", "env_logger 0.11.8", "fs", "futures 0.3.31", @@ -9480,12 +9603,10 @@ dependencies = [ "node_runtime", "pulldown-cmark 0.12.2", "settings", + "sum_tree", "theme", "ui", - "workspace-hack", - "zed-collections", - "zed-sum-tree", - "zed-util", + "util", ] [[package]] @@ -9494,6 +9615,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-recursion", + "collections", "editor", "fs", "gpui", @@ -9507,10 +9629,8 @@ dependencies = [ "settings", "theme", "ui", + "util", "workspace", - "workspace-hack", - "zed-collections", - "zed-util", ] [[package]] @@ -9520,7 +9640,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16ce3abbeba692c8b8441d036ef91aea6df8da2c6b6e21c7e14d3c18e526be45" dependencies = [ "log", - "phf", + "phf 0.11.3", "phf_codegen", "string_cache", "string_cache_codegen", @@ -9529,9 +9649,9 @@ dependencies = [ [[package]] name = "markup5ever" -version = "0.16.1" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0a8096766c229e8c88a3900c9b44b7e06aa7f7343cc229158c3e58ef8f9973a" +checksum = "311fe69c934650f8f19652b3946075f0fc41ad8757dbb68f1ca14e7900ecc1c3" dependencies = [ "log", "tendril", @@ -9552,13 +9672,13 @@ dependencies = [ [[package]] name = "match_token" -version = "0.1.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88a9689d8d44bf9964484516275f5cd4c9b59457a6940c1d5d0ecbb94510a36b" +checksum = "ac84fd3f360fcc43dc5f5d186f02a94192761a080e8bc58621ad4d12296a58cf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -9638,26 +9758,40 @@ dependencies = [ "warp", ] +[[package]] +name = "media" +version = "0.1.0" +dependencies = [ + "anyhow", + "bindgen 0.71.1", + "core-foundation 0.10.0", + "core-video", + "ctor", + "foreign-types 0.5.0", + "metal", + "objc", +] + [[package]] name = "memchr" -version = "2.7.4" +version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" [[package]] name = "memfd" -version = "0.6.4" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2cffa4ad52c6f791f4f8b15f0c05f9824b2ced1160e88cc393d64fff9a8ac64" +checksum = "ad38eb12aea514a0466ea40a80fd8cc83637065948eb4a426e4aa46261175227" dependencies = [ - "rustix 0.38.44", + "rustix 1.1.2", ] [[package]] name = "memmap2" -version = "0.9.5" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd3f7eed9d3848f8b98834af67102b720745c4ec028fcd0aa0239277e7de374f" +checksum = "843a98750cd611cc2965a8213b53b43e715f13c37a9e096c6408e69990961db7" dependencies = [ "libc", "stable_deref_trait", @@ -9677,7 +9811,6 @@ name = "menu" version = "0.1.0" dependencies = [ "gpui", - "workspace-hack", ] [[package]] @@ -9686,7 +9819,7 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ecfd3296f8c56b7c1f6fbac3c71cefa9d78ce009850c45000015f206dc7fa21" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "block", "core-graphics-types 0.1.3", "foreign-types 0.5.0", @@ -9700,6 +9833,7 @@ name = "migrator" version = "0.1.0" dependencies = [ "anyhow", + "collections", "convert_case 0.8.0", "log", "pretty_assertions", @@ -9710,15 +9844,13 @@ dependencies = [ "tree-sitter", "tree-sitter-json", "unindent", - "workspace-hack", - "zed-collections", ] [[package]] name = "mimalloc" -version = "0.1.46" +version = "0.1.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "995942f432bbb4822a7e9c3faa87a695185b0d09273ba85f097b54f4e458f2af" +checksum = "e1ee66a4b64c74f4ef288bcbb9192ad9c3feaad75193129ac8509af543894fd8" dependencies = [ "libmimalloc-sys", ] @@ -9745,7 +9877,7 @@ version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c4d14bcca0fd3ed165a03000480aaa364c6860c34e900cb2dafdf3b95340e77" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "debugid", "num-derive", "num-traits", @@ -9760,14 +9892,14 @@ version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2abcd9c8a1e6e1e9d56ce3627851f39a17ea83e17c96bc510f29d7e43d78a7d" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "byteorder", "cfg-if", "crash-context", "goblin", "libc", "log", - "mach2 0.4.2", + "mach2 0.4.3", "memmap2", "memoffset", "minidump-common", @@ -9804,9 +9936,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" dependencies = [ "adler2", "simd-adler32", @@ -9826,29 +9958,29 @@ checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "log", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "windows-sys 0.48.0", ] [[package]] name = "mio" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" +checksum = "69d83b0086dc8ecf3ce9ae2874b2d1290252e2a30720bea58a5c6639b0092873" dependencies = [ "libc", "log", - "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.52.0", + "wasi", + "windows-sys 0.61.2", ] [[package]] name = "miow" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "359f76430b20a79f9e20e115b3428614e654f04fab314482fc0fda0ebd3c6044" +checksum = "536bfad37a309d62069485248eeaba1e8d9853aaf951caaeaed0585a95346f08" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.61.2", ] [[package]] @@ -9857,33 +9989,41 @@ version = "0.1.0" dependencies = [ "anyhow", "futures 0.3.31", - "schemars 1.0.1", + "http_client", + "schemars 1.0.4", "serde", "serde_json", - "strum 0.27.1", - "workspace-hack", - "zed-http-client", + "strum 0.27.2", ] [[package]] name = "moka" -version = "0.12.10" +version = "0.12.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9321642ca94a4282428e6ea4af8cc2ca4eac48ac7a6a4ea8f33f76d0ce70926" +checksum = "8261cd88c312e0004c1d51baad2980c66528dfdb2bee62003e643a4d8f86b077" dependencies = [ "crossbeam-channel", "crossbeam-epoch", "crossbeam-utils", - "loom", + "equivalent", "parking_lot", "portable-atomic", "rustc_version", "smallvec", "tagptr", - "thiserror 1.0.69", "uuid", ] +[[package]] +name = "moxcms" +version = "0.7.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c588e11a3082784af229e23e8e4ecf5bcc6fbe4f69101e0421ce8d79da7f0b40" +dependencies = [ + "num-traits", + "pxfm", +] + [[package]] name = "msvc_spectre_libs" version = "0.1.3" @@ -9900,6 +10040,7 @@ dependencies = [ "anyhow", "buffer_diff", "clock", + "collections", "ctor", "gpui", "indoc", @@ -9909,19 +10050,17 @@ dependencies = [ "parking_lot", "pretty_assertions", "project", - "rand 0.9.1", + "rand 0.9.2", "rope", "serde", "settings", "smallvec", "smol", + "sum_tree", "text", "theme", "tree-sitter", - "workspace-hack", - "zed-collections", - "zed-sum-tree", - "zed-util", + "util", "zlog", ] @@ -9931,6 +10070,12 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" +[[package]] +name = "multimap" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d87ecb2933e8aeadb3e3a02b828fed80a7528047e68b4f424523a0981a3a084" + [[package]] name = "naga" version = "25.0.1" @@ -9939,20 +10084,20 @@ checksum = "2b977c445f26e49757f9aca3631c3b8b836942cb278d69a92e7b80d3b24da632" dependencies = [ "arrayvec", "bit-set 0.8.0", - "bitflags 2.9.0", + "bitflags 2.9.4", "cfg_aliases 0.2.1", - "codespan-reporting", + "codespan-reporting 0.12.0", "half", - "hashbrown 0.15.3", + "hashbrown 0.15.5", "hexf-parse", - "indexmap 2.9.0", + "indexmap 2.11.4", "log", "num-traits", "once_cell", "rustc-hash 1.1.0", "spirv", "strum 0.26.3", - "thiserror 2.0.12", + "thiserror 2.0.17", "unicode-ident", ] @@ -9971,7 +10116,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a51313c5820b0b02bd422f4b44776fbf47961755c74ce64afc73bfad10226c3" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", ] [[package]] @@ -10013,7 +10158,6 @@ dependencies = [ "futures 0.3.31", "net", "smol", - "workspace-hack", ] [[package]] @@ -10022,7 +10166,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3f42e7bbe13d351b6bead8286a43aac9534b82bd3cc43e47037f012ebfd62d4" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "jni-sys", "log", "ndk-sys", @@ -10053,8 +10197,7 @@ dependencies = [ "async-io", "smol", "tempfile", - "windows 0.61.1", - "workspace-hack", + "windows 0.61.3", ] [[package]] @@ -10069,7 +10212,7 @@ version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "cfg-if", "cfg_aliases 0.1.1", "libc", @@ -10081,7 +10224,7 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "cfg-if", "cfg_aliases 0.2.1", "libc", @@ -10093,7 +10236,7 @@ version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "cfg-if", "cfg_aliases 0.2.1", "libc", @@ -10110,17 +10253,16 @@ dependencies = [ "async-tar", "async-trait", "futures 0.3.31", + "http_client", "log", "paths", "semver", "serde", "serde_json", "smol", + "util", "watch", "which 6.0.3", - "workspace-hack", - "zed-http-client", - "zed-util", ] [[package]] @@ -10150,11 +10292,11 @@ checksum = "0676bb32a98c1a483ce53e500a81ad9c3d5b3f7c920c28c24e9cb0980d0b5bc8" [[package]] name = "normpath" -version = "1.3.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8911957c4b1549ac0dc74e30db9c8b0e66ddcd6d7acc33098f4c63a64a6d7ed" +checksum = "bf23ab2b905654b4cb177e30b629937b3868311d4e1cba859f899c041046e69b" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -10164,18 +10306,17 @@ dependencies = [ "anyhow", "channel", "client", + "collections", "component", "db", "gpui", "rpc", "settings", + "sum_tree", "time", "ui", + "util", "workspace", - "workspace-hack", - "zed-collections", - "zed-sum-tree", - "zed-util", "zed_actions", ] @@ -10185,7 +10326,7 @@ version = "6.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6205bd8bb1e454ad2e27422015fb5e4f2bcc7e08fa8f27058670d208324a4d2d" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "crossbeam-channel", "filetime", "fsevent-sys 4.1.0", @@ -10203,14 +10344,14 @@ name = "notify" version = "8.0.0" source = "git+https://github.com/zed-industries/notify.git?rev=bbb9ea5ae52b253e095737847e367c30653a2e96#bbb9ea5ae52b253e095737847e367c30653a2e96" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "filetime", "fsevent-sys 4.1.0", "inotify 0.11.0", "kqueue", "libc", "log", - "mio 1.0.3", + "mio 1.1.0", "notify-types", "walkdir", "windows-sys 0.59.0", @@ -10232,6 +10373,15 @@ name = "notify-types" version = "2.0.0" source = "git+https://github.com/zed-industries/notify.git?rev=bbb9ea5ae52b253e095737847e367c30653a2e96#bbb9ea5ae52b253e095737847e367c30653a2e96" +[[package]] +name = "now" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d89e9874397a1f0a52fc1f197a8effd9735223cb2390e9dcc83ac6cd02923d0" +dependencies = [ + "chrono", +] + [[package]] name = "ntapi" version = "0.4.1" @@ -10243,11 +10393,11 @@ dependencies = [ [[package]] name = "nu-ansi-term" -version = "0.50.1" +version = "0.50.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399" +checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] @@ -10322,7 +10472,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -10378,33 +10528,34 @@ dependencies = [ [[package]] name = "num_cpus" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" dependencies = [ - "hermit-abi 0.3.9", + "hermit-abi", "libc", ] [[package]] name = "num_enum" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e613fc340b2220f734a8595782c551f1250e969d87d3be1ae0579e8d4065179" +checksum = "a973b4e44ce6cad84ce69d797acf9a044532e4184c4f267913d1b546a0727b7a" dependencies = [ "num_enum_derive", + "rustversion", ] [[package]] name = "num_enum_derive" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" +checksum = "77e878c846a8abae00dd069496dbe8751b16ac1c3d6bd2a7283a938e8228f90d" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -10453,9 +10604,9 @@ dependencies = [ [[package]] name = "objc2" -version = "0.6.1" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88c6597e14493ab2e44ce58f2fdecf095a51f12ca57bec060a11c57332520551" +checksum = "b7c2599ce0ec54857b29ce62166b0ed9b4f6f1a70ccc9a71165b6154caca8c05" dependencies = [ "objc2-encode", ] @@ -10466,7 +10617,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6f29f568bec459b0ddff777cec4fe3fd8666d82d5a40ebd0ff7e66134f89bcc" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "objc2", "objc2-core-foundation", "objc2-foundation", @@ -10479,7 +10630,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "10cbe18d879e20a4aea544f8befe38bcf52255eb63d3f23eca2842f3319e4c07" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "libc", "objc2", "objc2-core-audio", @@ -10490,9 +10641,9 @@ dependencies = [ [[package]] name = "objc2-core-audio" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca44961e888e19313b808f23497073e3f6b3c22bb485056674c8b49f3b025c82" +checksum = "e1eebcea8b0dbff5f7c8504f3107c68fc061a3eb44932051c8cf8a68d969c3b2" dependencies = [ "dispatch2", "objc2", @@ -10502,21 +10653,21 @@ dependencies = [ [[package]] name = "objc2-core-audio-types" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0f1cc99bb07ad2ddb6527ddf83db6a15271bb036b3eb94b801cd44fdc666ee1" +checksum = "5a89f2ec274a0cf4a32642b2991e8b351a404d290da87bb6a9a9d8632490bd1c" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "objc2", ] [[package]] name = "objc2-core-foundation" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c10c2894a6fed806ade6027bcd50662746363a9589d3ec9d9bef30a4e4bc166" +checksum = "2a180dd8642fa45cdb7dd721cd4c11b1cadd4929ce112ebd8b9f5803cc79d536" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "dispatch2", "objc2", ] @@ -10533,18 +10684,28 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "900831247d2fe1a09a683278e5384cfb8c80c79fe6b166f9d14bfdde0ea1b03c" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "objc2", "objc2-core-foundation", ] +[[package]] +name = "objc2-io-kit" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33fafba39597d6dc1fb709123dfa8289d39406734be322956a69f0931c73bb15" +dependencies = [ + "libc", + "objc2-core-foundation", +] + [[package]] name = "objc2-metal" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f246c183239540aab1782457b35ab2040d4259175bd1d0c58e46ada7b47a874" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "block2", "dispatch2", "objc2", @@ -10558,7 +10719,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "90ffb6a0cd5f182dc964334388560b12a57f7b74b3e2dec5e2722aa2dfb2ccd5" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "objc2", "objc2-core-foundation", "objc2-foundation", @@ -10571,7 +10732,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "25b1312ad7bc8a0e92adae17aa10f90aae1fb618832f9b993b022b591027daed" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "objc2", "objc2-core-foundation", "objc2-foundation", @@ -10603,23 +10764,66 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" dependencies = [ "crc32fast", - "hashbrown 0.15.3", - "indexmap 2.9.0", + "hashbrown 0.15.5", + "indexmap 2.11.4", + "memchr", +] + +[[package]] +name = "object" +version = "0.37.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" +dependencies = [ "memchr", ] +[[package]] +name = "object_store" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c1be0c6c22ec0817cdc77d3842f721a17fd30ab6965001415b5402a74e6b740" +dependencies = [ + "async-trait", + "base64 0.22.1", + "bytes 1.10.1", + "chrono", + "form_urlencoded", + "futures 0.3.31", + "http 1.3.1", + "http-body-util", + "humantime", + "hyper 1.7.0", + "itertools 0.14.0", + "parking_lot", + "percent-encoding", + "quick-xml 0.38.3", + "rand 0.9.2", + "reqwest 0.12.24", + "ring", + "serde", + "serde_json", + "serde_urlencoded", + "thiserror 2.0.17", + "tokio", + "tracing", + "url", + "walkdir", + "wasm-bindgen-futures", + "web-time", +] + [[package]] name = "ollama" version = "0.1.0" dependencies = [ "anyhow", "futures 0.3.31", - "schemars 1.0.1", + "http_client", + "schemars 1.0.4", "serde", "serde_json", "settings", - "workspace-hack", - "zed-http-client", ] [[package]] @@ -10639,17 +10843,15 @@ dependencies = [ "notifications", "picker", "project", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "settings", "telemetry", "theme", "ui", - "ui_input", + "util", "vim_mode_setting", "workspace", - "workspace-hack", - "zed-util", "zed_actions", "zlog", ] @@ -10660,6 +10862,12 @@ version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" +[[package]] +name = "once_cell_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" + [[package]] name = "oo7" version = "0.5.0" @@ -10670,22 +10878,22 @@ dependencies = [ "ashpd 0.12.0", "async-fs", "async-io", - "async-lock", + "async-lock 3.4.1", "blocking", "cbc", "cipher", "digest", "endi", - "futures-lite 2.6.0", + "futures-lite 2.6.1", "futures-util", - "getrandom 0.3.2", + "getrandom 0.3.4", "hkdf", "hmac", "md-5", "num", "num-bigint-dig", "pbkdf2 0.12.2", - "rand 0.9.1", + "rand 0.9.2", "serde", "sha2", "subtle", @@ -10718,14 +10926,13 @@ version = "0.1.0" dependencies = [ "anyhow", "futures 0.3.31", + "http_client", "log", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "settings", - "strum 0.27.1", - "workspace-hack", - "zed-http-client", + "strum 0.27.2", ] [[package]] @@ -10734,14 +10941,13 @@ version = "0.1.0" dependencies = [ "anyhow", "futures 0.3.31", - "schemars 1.0.1", + "http_client", + "schemars 1.0.4", "serde", "serde_json", "settings", - "strum 0.27.1", - "thiserror 2.0.12", - "workspace-hack", - "zed-http-client", + "strum 0.27.2", + "thiserror 2.0.17", ] [[package]] @@ -10758,11 +10964,11 @@ dependencies = [ [[package]] name = "openssl" -version = "0.10.72" +version = "0.10.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fedfea7d58a1f73118430a55da6a286e7b044961736ce96a16a17068ea25e5da" +checksum = "24ad14dd45412269e1a30f52ad8f0664f0f4f4a89ee8fe28c3b3527021ebb654" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "cfg-if", "foreign-types 0.3.2", "libc", @@ -10779,7 +10985,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -10790,9 +10996,9 @@ checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "openssl-sys" -version = "0.9.107" +version = "0.9.110" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8288979acd84749c744a9014b4382d42b8f7b2592847b5afb2ed29e5d16ede07" +checksum = "0a9f0075ba3c21b09f8e8b2026584b1d18d49388648f2fbbf3c97ea8deced8e2" dependencies = [ "cc", "libc", @@ -10802,13 +11008,13 @@ dependencies = [ [[package]] name = "optfield" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa59f025cde9c698fcb4fcb3533db4621795374065bee908215263488f2d2a1d" +checksum = "969ccca8ffc4fb105bd131a228107d5c9dd89d9d627edf3295cbe979156f9712" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -10866,7 +11072,7 @@ dependencies = [ "proc-macro2", "proc-macro2-diagnostics", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -10890,9 +11096,8 @@ dependencies = [ "tree-sitter-rust", "tree-sitter-typescript", "ui", + "util", "workspace", - "workspace-hack", - "zed-util", "zed_actions", ] @@ -10901,6 +11106,7 @@ name = "outline_panel" version = "0.1.0" dependencies = [ "anyhow", + "collections", "db", "editor", "file_icons", @@ -10921,11 +11127,9 @@ dependencies = [ "smol", "theme", "ui", + "util", "workspace", - "workspace-hack", "worktree", - "zed-collections", - "zed-util", "zed_actions", ] @@ -10976,7 +11180,7 @@ dependencies = [ "by_address", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -10989,7 +11193,6 @@ dependencies = [ "theme", "ui", "workspace", - "workspace-hack", ] [[package]] @@ -11000,9 +11203,9 @@ checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" [[package]] name = "parking_lot" -version = "0.12.4" +version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" dependencies = [ "lock_api", "parking_lot_core", @@ -11010,15 +11213,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.11" +version = "0.9.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.11", + "redox_syscall 0.5.18", "smallvec", - "windows-targets 0.52.6", + "windows-link 0.2.1", ] [[package]] @@ -11095,8 +11298,7 @@ version = "0.1.0" dependencies = [ "dirs 4.0.0", "ignore", - "workspace-hack", - "zed-util", + "util", ] [[package]] @@ -11166,12 +11368,12 @@ checksum = "0008e816fcdaf229cdd540e9b6ca2dc4a10d65c31624abb546c6420a02846e61" [[package]] name = "pem" -version = "3.0.5" +version = "3.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38af38e8470ac9dee3ce1bae1af9c1671fffc44ddfd8bd1d0a3445bf349a8ef3" +checksum = "1d30c53c26bc5b31a98cd02d20f25a7c8567146caf63ed593a9d87b2775291be" dependencies = [ "base64 0.22.1", - "serde", + "serde_core", ] [[package]] @@ -11185,26 +11387,34 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.1" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "perf" +version = "0.1.0" +dependencies = [ + "collections", + "serde", + "serde_json", +] [[package]] name = "pest" -version = "2.8.0" +version = "2.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "198db74531d58c70a361c42201efde7e2591e976d518caf7662a47dc5720e7b6" +checksum = "989e7521a040efde50c3ab6bbadafbe15ab6dc042686926be59ac35d74607df4" dependencies = [ "memchr", - "thiserror 2.0.12", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.8.0" +version = "2.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d725d9cfd79e87dccc9341a2ef39d1b6f6353d68c4b33c177febbe1a402c97c5" +checksum = "187da9a3030dbafabbbfb20cb323b976dc7b7ce91fcd84f2f74d6e31d378e2de" dependencies = [ "pest", "pest_generator", @@ -11212,24 +11422,23 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.8.0" +version = "2.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db7d01726be8ab66ab32f9df467ae8b1148906685bbe75c82d1e65d7f5b3f841" +checksum = "49b401d98f5757ebe97a26085998d6c0eecec4995cad6ab7fc30ffdf4b052843" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "pest_meta" -version = "2.8.0" +version = "2.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f9f832470494906d1fca5329f8ab5791cc60beb230c74815dff541cbd2b5ca0" +checksum = "72f27a2cfee9f9039c4d86faa5af122a0ac3851441a34865b8a043b46be0065a" dependencies = [ - "once_cell", "pest", "sha2", ] @@ -11237,7 +11446,7 @@ dependencies = [ [[package]] name = "pet" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "clap", "env_logger 0.10.2", @@ -11274,7 +11483,7 @@ dependencies = [ [[package]] name = "pet-conda" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "env_logger 0.10.2", "lazy_static", @@ -11293,7 +11502,7 @@ dependencies = [ [[package]] name = "pet-core" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "clap", "lazy_static", @@ -11308,7 +11517,7 @@ dependencies = [ [[package]] name = "pet-env-var-path" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "lazy_static", "log", @@ -11324,7 +11533,7 @@ dependencies = [ [[package]] name = "pet-fs" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "log", "msvc_spectre_libs", @@ -11333,7 +11542,7 @@ dependencies = [ [[package]] name = "pet-global-virtualenvs" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "log", "msvc_spectre_libs", @@ -11346,7 +11555,7 @@ dependencies = [ [[package]] name = "pet-homebrew" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "lazy_static", "log", @@ -11364,7 +11573,7 @@ dependencies = [ [[package]] name = "pet-jsonrpc" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "env_logger 0.10.2", "log", @@ -11377,7 +11586,7 @@ dependencies = [ [[package]] name = "pet-linux-global-python" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "log", "msvc_spectre_libs", @@ -11390,7 +11599,7 @@ dependencies = [ [[package]] name = "pet-mac-commandlinetools" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "log", "msvc_spectre_libs", @@ -11403,7 +11612,7 @@ dependencies = [ [[package]] name = "pet-mac-python-org" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "log", "msvc_spectre_libs", @@ -11416,7 +11625,7 @@ dependencies = [ [[package]] name = "pet-mac-xcode" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "log", "msvc_spectre_libs", @@ -11429,7 +11638,7 @@ dependencies = [ [[package]] name = "pet-pipenv" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "log", "msvc_spectre_libs", @@ -11442,7 +11651,7 @@ dependencies = [ [[package]] name = "pet-pixi" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "log", "msvc_spectre_libs", @@ -11454,7 +11663,7 @@ dependencies = [ [[package]] name = "pet-poetry" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "base64 0.22.1", "lazy_static", @@ -11469,13 +11678,13 @@ dependencies = [ "serde", "serde_json", "sha2", - "toml 0.8.20", + "toml 0.8.23", ] [[package]] name = "pet-pyenv" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "lazy_static", "log", @@ -11493,7 +11702,7 @@ dependencies = [ [[package]] name = "pet-python-utils" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "env_logger 0.10.2", "lazy_static", @@ -11510,7 +11719,7 @@ dependencies = [ [[package]] name = "pet-reporter" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "env_logger 0.10.2", "log", @@ -11524,7 +11733,7 @@ dependencies = [ [[package]] name = "pet-telemetry" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "env_logger 0.10.2", "lazy_static", @@ -11539,7 +11748,7 @@ dependencies = [ [[package]] name = "pet-venv" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "log", "msvc_spectre_libs", @@ -11551,7 +11760,7 @@ dependencies = [ [[package]] name = "pet-virtualenv" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "log", "msvc_spectre_libs", @@ -11563,7 +11772,7 @@ dependencies = [ [[package]] name = "pet-virtualenvwrapper" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "log", "msvc_spectre_libs", @@ -11576,7 +11785,7 @@ dependencies = [ [[package]] name = "pet-windows-registry" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "lazy_static", "log", @@ -11594,7 +11803,7 @@ dependencies = [ [[package]] name = "pet-windows-store" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=845945b830297a50de0e24020b980a65e4820559#845945b830297a50de0e24020b980a65e4820559" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c" dependencies = [ "lazy_static", "log", @@ -11614,14 +11823,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap 2.9.0", + "indexmap 2.11.4", ] [[package]] name = "pgvector" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0e8871b6d7ca78348c6cd29b911b94851f3429f0cd403130ca17f26c1fb91a6" +checksum = "fc58e2d255979a31caa7cabfa7aac654af0354220719ab7a68520ae7a91e8c0b" dependencies = [ "serde", ] @@ -11632,8 +11841,18 @@ version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" dependencies = [ - "phf_macros", - "phf_shared", + "phf_macros 0.11.3", + "phf_shared 0.11.3", +] + +[[package]] +name = "phf" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7" +dependencies = [ + "phf_macros 0.12.1", + "phf_shared 0.12.1", ] [[package]] @@ -11642,8 +11861,8 @@ version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a" dependencies = [ - "phf_generator", - "phf_shared", + "phf_generator 0.11.3", + "phf_shared 0.11.3", ] [[package]] @@ -11652,21 +11871,44 @@ version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" dependencies = [ - "phf_shared", + "phf_shared 0.11.3", "rand 0.8.5", ] +[[package]] +name = "phf_generator" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2cbb1126afed61dd6368748dae63b1ee7dc480191c6262a3b4ff1e29d86a6c5b" +dependencies = [ + "fastrand 2.3.0", + "phf_shared 0.12.1", +] + [[package]] name = "phf_macros" version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f84ac04429c13a7ff43785d75ad27569f2951ce0ffd30a3321230db2fc727216" dependencies = [ - "phf_generator", - "phf_shared", + "phf_generator 0.11.3", + "phf_shared 0.11.3", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", +] + +[[package]] +name = "phf_macros" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d713258393a82f091ead52047ca779d37e5766226d009de21696c4e667044368" +dependencies = [ + "phf_generator 0.12.1", + "phf_shared 0.12.1", + "proc-macro2", + "quote", + "syn 2.0.106", ] [[package]] @@ -11678,6 +11920,15 @@ dependencies = [ "siphasher", ] +[[package]] +name = "phf_shared" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981" +dependencies = [ + "siphasher", +] + [[package]] name = "picker" version = "0.1.0" @@ -11688,13 +11939,12 @@ dependencies = [ "env_logger 0.11.8", "gpui", "menu", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "theme", "ui", "workspace", - "workspace-hack", ] [[package]] @@ -11720,7 +11970,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -11768,79 +12018,616 @@ dependencies = [ ] [[package]] -name = "pkcs8" -version = "0.10.2" +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der 0.7.10", + "spki 0.7.3", +] + +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] +name = "plain" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6" + +[[package]] +name = "planus" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3daf8e3d4b712abe1d690838f6e29fb76b76ea19589c4afa39ec30e12f62af71" +dependencies = [ + "array-init-cursor", + "hashbrown 0.15.5", +] + +[[package]] +name = "plist" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "740ebea15c5d1428f910cd1a5f52cebf8d25006245ed8ade92702f4943d91e07" +dependencies = [ + "base64 0.22.1", + "indexmap 2.11.4", + "quick-xml 0.38.3", + "serde", + "time", +] + +[[package]] +name = "plotters" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" + +[[package]] +name = "plotters-svg" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" +dependencies = [ + "plotters-backend", +] + +[[package]] +name = "png" +version = "0.17.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82151a2fc869e011c153adc57cf2789ccb8d9906ce52c0b39a6b5697749d7526" +dependencies = [ + "bitflags 1.3.2", + "crc32fast", + "fdeflate", + "flate2", + "miniz_oxide", +] + +[[package]] +name = "png" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97baced388464909d42d89643fe4361939af9b7ce7a31ee32a168f832a70f2a0" +dependencies = [ + "bitflags 2.9.4", + "crc32fast", + "fdeflate", + "flate2", + "miniz_oxide", +] + +[[package]] +name = "polars" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5f7feb5d56b954e691dff22a8b2d78d77433dcc93c35fe21c3777fdc121b697" +dependencies = [ + "getrandom 0.2.16", + "getrandom 0.3.4", + "polars-arrow", + "polars-core", + "polars-error", + "polars-io", + "polars-lazy", + "polars-ops", + "polars-parquet", + "polars-sql", + "polars-time", + "polars-utils", + "version_check", +] + +[[package]] +name = "polars-arrow" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32b4fed2343961b3eea3db2cee165540c3e1ad9d5782350cc55a9e76cf440148" +dependencies = [ + "atoi_simd", + "bitflags 2.9.4", + "bytemuck", + "chrono", + "chrono-tz", + "dyn-clone", + "either", + "ethnum", + "getrandom 0.2.16", + "getrandom 0.3.4", + "hashbrown 0.15.5", + "itoa", + "lz4", + "num-traits", + "polars-arrow-format", + "polars-error", + "polars-schema", + "polars-utils", + "serde", + "simdutf8", + "streaming-iterator", + "strum_macros 0.27.2", + "version_check", + "zstd 0.13.3", +] + +[[package]] +name = "polars-arrow-format" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a556ac0ee744e61e167f34c1eb0013ce740e0ee6cd8c158b2ec0b518f10e6675" +dependencies = [ + "planus", + "serde", +] + +[[package]] +name = "polars-compute" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "138785beda4e4a90a025219f09d0d15a671b2be9091513ede58e05db6ad4413f" +dependencies = [ + "atoi_simd", + "bytemuck", + "chrono", + "either", + "fast-float2", + "hashbrown 0.15.5", + "itoa", + "num-traits", + "polars-arrow", + "polars-error", + "polars-utils", + "rand 0.9.2", + "ryu", + "serde", + "skiplist", + "strength_reduce", + "strum_macros 0.27.2", + "version_check", +] + +[[package]] +name = "polars-core" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e77b1f08ef6dbb032bb1d0d3365464be950df9905f6827a95b24c4ca5518901d" +dependencies = [ + "bitflags 2.9.4", + "boxcar", + "bytemuck", + "chrono", + "chrono-tz", + "comfy-table", + "either", + "hashbrown 0.15.5", + "indexmap 2.11.4", + "itoa", + "num-traits", + "polars-arrow", + "polars-compute", + "polars-dtype", + "polars-error", + "polars-row", + "polars-schema", + "polars-utils", + "rand 0.9.2", + "rand_distr", + "rayon", + "regex", + "serde", + "serde_json", + "strum_macros 0.27.2", + "uuid", + "version_check", + "xxhash-rust", +] + +[[package]] +name = "polars-dtype" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89c43d0ea57168be4546c4d8064479ed8b29a9c79c31a0c7c367ee734b9b7158" +dependencies = [ + "boxcar", + "hashbrown 0.15.5", + "polars-arrow", + "polars-error", + "polars-utils", + "serde", + "uuid", +] + +[[package]] +name = "polars-error" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9cb5d98f59f8b94673ee391840440ad9f0d2170afced95fc98aa86f895563c0" +dependencies = [ + "object_store", + "parking_lot", + "polars-arrow-format", + "regex", + "signal-hook", + "simdutf8", +] + +[[package]] +name = "polars-expr" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "343931b818cf136349135ba11dbc18c27683b52c3477b1ba8ca606cf5ab1965c" +dependencies = [ + "bitflags 2.9.4", + "hashbrown 0.15.5", + "num-traits", + "polars-arrow", + "polars-compute", + "polars-core", + "polars-io", + "polars-ops", + "polars-plan", + "polars-row", + "polars-time", + "polars-utils", + "rand 0.9.2", + "rayon", + "recursive", +] + +[[package]] +name = "polars-io" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10388c64b8155122488229a881d1c6f4fdc393bc988e764ab51b182fcb2307e4" +dependencies = [ + "async-trait", + "atoi_simd", + "blake3", + "bytes 1.10.1", + "chrono", + "fast-float2", + "fs4", + "futures 0.3.31", + "glob", + "hashbrown 0.15.5", + "home", + "itoa", + "memchr", + "memmap2", + "num-traits", + "object_store", + "percent-encoding", + "polars-arrow", + "polars-core", + "polars-error", + "polars-parquet", + "polars-schema", + "polars-time", + "polars-utils", + "rayon", + "regex", + "reqwest 0.12.24", + "ryu", + "serde", + "serde_json", + "simdutf8", + "tokio", + "tokio-util", + "url", +] + +[[package]] +name = "polars-lazy" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fb6e2c6c2fa4ea0c660df1c06cf56960c81e7c2683877995bae3d4e3d408147" +dependencies = [ + "bitflags 2.9.4", + "chrono", + "either", + "memchr", + "polars-arrow", + "polars-compute", + "polars-core", + "polars-expr", + "polars-io", + "polars-mem-engine", + "polars-ops", + "polars-plan", + "polars-stream", + "polars-time", + "polars-utils", + "rayon", + "version_check", +] + +[[package]] +name = "polars-mem-engine" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20a856e98e253587c28d8132a5e7e5a75cb2c44731ca090f1481d45f1d123771" +dependencies = [ + "futures 0.3.31", + "memmap2", + "polars-arrow", + "polars-core", + "polars-error", + "polars-expr", + "polars-io", + "polars-ops", + "polars-plan", + "polars-time", + "polars-utils", + "rayon", + "recursive", + "tokio", +] + +[[package]] +name = "polars-ops" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acf6062173fdc9ba05775548beb66e76643a148d9aeadc9984ed712bc4babd76" +dependencies = [ + "argminmax", + "base64 0.22.1", + "bytemuck", + "chrono", + "chrono-tz", + "either", + "hashbrown 0.15.5", + "hex", + "indexmap 2.11.4", + "libm", + "memchr", + "num-traits", + "polars-arrow", + "polars-compute", + "polars-core", + "polars-error", + "polars-schema", + "polars-utils", + "rayon", + "regex", + "regex-syntax", + "strum_macros 0.27.2", + "unicode-normalization", + "unicode-reverse", + "version_check", +] + +[[package]] +name = "polars-parquet" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc1d769180dec070df0dc4b89299b364bf2cfe32b218ecc4ddd8f1a49ae60669" +dependencies = [ + "async-stream", + "base64 0.22.1", + "brotli", + "bytemuck", + "ethnum", + "flate2", + "futures 0.3.31", + "hashbrown 0.15.5", + "lz4", + "num-traits", + "polars-arrow", + "polars-compute", + "polars-error", + "polars-parquet-format", + "polars-utils", + "serde", + "simdutf8", + "snap", + "streaming-decompression", + "zstd 0.13.3", +] + +[[package]] +name = "polars-parquet-format" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +checksum = "c025243dcfe8dbc57e94d9f82eb3bef10b565ab180d5b99bed87fd8aea319ce1" dependencies = [ - "der 0.7.10", - "spki 0.7.3", + "async-trait", + "futures 0.3.31", ] [[package]] -name = "pkg-config" -version = "0.3.32" +name = "polars-plan" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" +checksum = "1cd3a2e33ae4484fe407ab2d2ba5684f0889d1ccf3ad6b844103c03638e6d0a0" +dependencies = [ + "bitflags 2.9.4", + "bytemuck", + "bytes 1.10.1", + "chrono", + "chrono-tz", + "either", + "futures 0.3.31", + "hashbrown 0.15.5", + "memmap2", + "num-traits", + "percent-encoding", + "polars-arrow", + "polars-compute", + "polars-core", + "polars-error", + "polars-io", + "polars-ops", + "polars-parquet", + "polars-time", + "polars-utils", + "rayon", + "recursive", + "regex", + "sha2", + "strum_macros 0.27.2", + "version_check", +] [[package]] -name = "plain" -version = "0.2.3" +name = "polars-row" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6" +checksum = "18734f17e0e348724df3ae65f3ee744c681117c04b041cac969dfceb05edabc0" +dependencies = [ + "bitflags 2.9.4", + "bytemuck", + "polars-arrow", + "polars-compute", + "polars-dtype", + "polars-error", + "polars-utils", +] [[package]] -name = "plist" -version = "1.7.1" +name = "polars-schema" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac26e981c03a6e53e0aee43c113e3202f5581d5360dae7bd2c70e800dd0451d" +checksum = "8e6c1ab13e04d5167661a9854ed1ea0482b2ed9b8a0f1118dabed7cd994a85e3" dependencies = [ - "base64 0.22.1", - "indexmap 2.9.0", - "quick-xml 0.32.0", + "indexmap 2.11.4", + "polars-error", + "polars-utils", "serde", - "time", + "version_check", ] [[package]] -name = "plotters" -version = "0.3.7" +name = "polars-sql" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" +checksum = "c4e7766da02cc1d464994404d3e88a7a0ccd4933df3627c325480fbd9bbc0a11" dependencies = [ - "num-traits", - "plotters-backend", - "plotters-svg", - "wasm-bindgen", - "web-sys", + "bitflags 2.9.4", + "hex", + "polars-core", + "polars-error", + "polars-lazy", + "polars-ops", + "polars-plan", + "polars-time", + "polars-utils", + "rand 0.9.2", + "regex", + "serde", + "sqlparser", ] [[package]] -name = "plotters-backend" -version = "0.3.7" +name = "polars-stream" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" +checksum = "31f6c6ca1ea01f9dea424d167e4f33f5ec44cd67fbfac9efd40575ed20521f14" +dependencies = [ + "async-channel 2.5.0", + "async-trait", + "atomic-waker", + "bitflags 2.9.4", + "crossbeam-channel", + "crossbeam-deque", + "crossbeam-queue", + "crossbeam-utils", + "futures 0.3.31", + "memmap2", + "parking_lot", + "percent-encoding", + "pin-project-lite", + "polars-arrow", + "polars-core", + "polars-error", + "polars-expr", + "polars-io", + "polars-mem-engine", + "polars-ops", + "polars-parquet", + "polars-plan", + "polars-utils", + "rand 0.9.2", + "rayon", + "recursive", + "slotmap", + "tokio", + "tokio-util", + "version_check", +] [[package]] -name = "plotters-svg" -version = "0.3.7" +name = "polars-time" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" +checksum = "f6a3a6e279a7a984a0b83715660f9e880590c6129ec2104396bfa710bcd76dee" dependencies = [ - "plotters-backend", + "atoi_simd", + "bytemuck", + "chrono", + "chrono-tz", + "now", + "num-traits", + "polars-arrow", + "polars-compute", + "polars-core", + "polars-error", + "polars-ops", + "polars-utils", + "rayon", + "regex", + "strum_macros 0.27.2", ] [[package]] -name = "png" -version = "0.17.16" +name = "polars-utils" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82151a2fc869e011c153adc57cf2789ccb8d9906ce52c0b39a6b5697749d7526" +checksum = "57b267021b0e5422d7fbc70fd79e51b9f9a8466c585779373a18b0199e973f29" dependencies = [ - "bitflags 1.3.2", - "crc32fast", - "fdeflate", + "bincode 2.0.1", + "bytemuck", + "bytes 1.10.1", + "compact_str", + "either", "flate2", - "miniz_oxide", + "foldhash 0.1.5", + "hashbrown 0.15.5", + "indexmap 2.11.4", + "libc", + "memmap2", + "num-traits", + "polars-error", + "rand 0.9.2", + "raw-cpuid 11.6.0", + "rayon", + "regex", + "rmp-serde", + "serde", + "serde_json", + "serde_stacker", + "slotmap", + "stacker", + "uuid", + "version_check", ] [[package]] @@ -11851,10 +12638,10 @@ checksum = "5d0e4f59085d47d8241c88ead0f274e8a0cb551f3625263c05eb8dd897c34218" dependencies = [ "cfg-if", "concurrent-queue", - "hermit-abi 0.5.0", + "hermit-abi", "pin-project-lite", - "rustix 1.0.7", - "windows-sys 0.61.0", + "rustix 1.1.2", + "windows-sys 0.61.2", ] [[package]] @@ -11865,9 +12652,9 @@ checksum = "5da3b0203fd7ee5720aa0b5e790b591aa5d3f41c3ed2c34a3a393382198af2f7" [[package]] name = "portable-atomic" -version = "1.11.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e" +checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" [[package]] name = "portable-atomic-util" @@ -11918,9 +12705,9 @@ dependencies = [ [[package]] name = "postcard" -version = "1.1.1" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "170a2601f67cc9dba8edd8c4870b15f71a6a2dc196daec8c83f72b59dff628a8" +checksum = "6764c3b5dd454e283a30e6dfe78e9b31096d9e32036b5d1eaac7a6119ccb9a24" dependencies = [ "cobs", "embedded-io 0.4.0", @@ -11928,6 +12715,15 @@ dependencies = [ "serde", ] +[[package]] +name = "potential_utf" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84df19adbe5b5a0782edcab45899906947ab039ccf4573713735ee7de1e6b08a" +dependencies = [ + "zerovec", +] + [[package]] name = "powerfmt" version = "0.2.0" @@ -11940,7 +12736,7 @@ version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" dependencies = [ - "zerocopy 0.8.24", + "zerocopy", ] [[package]] @@ -11954,6 +12750,7 @@ name = "prettier" version = "0.1.0" dependencies = [ "anyhow", + "collections", "fs", "gpui", "language", @@ -11964,9 +12761,7 @@ dependencies = [ "paths", "serde", "serde_json", - "workspace-hack", - "zed-collections", - "zed-util", + "util", ] [[package]] @@ -11981,12 +12776,12 @@ dependencies = [ [[package]] name = "prettyplease" -version = "0.2.32" +version = "0.2.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "664ec5419c51e34154eec046ebcba56312d5a2fc3b09a06da188e1ad21afadf6" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" dependencies = [ "proc-macro2", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -12000,11 +12795,11 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edce586971a4dfaa28950c6f18ed55e0406c1ab88bbce2c6f6293a7aaba73d35" +checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" dependencies = [ - "toml_edit", + "toml_edit 0.23.7", ] [[package]] @@ -12026,14 +12821,14 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "proc-macro2" -version = "1.0.95" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" +checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" dependencies = [ "unicode-ident", ] @@ -12046,7 +12841,7 @@ checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", "version_check", "yansi", ] @@ -12057,27 +12852,27 @@ version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d3554923a69f4ce04c4a754260c338f505ce22642d3830e049a399fc2059a29" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "hex", ] [[package]] name = "profiling" -version = "1.0.16" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afbdc74edc00b6f6a218ca6a5364d6226a259d4b8ea1af4a0ea063f27e179f4d" +checksum = "3eb8486b569e12e2c32ad3e204dbaba5e4b5b216e9367044f25f1dba42341773" dependencies = [ "profiling-procmacros", ] [[package]] name = "profiling-procmacros" -version = "1.0.16" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a65f2e60fbf1063868558d69c6beacf412dc755f9fc020f514b7955fc914fe30" +checksum = "52717f9a02b6965224f95ca2a81e2e0c5c43baacd28ca057577988930b6c3d5b" dependencies = [ "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -12093,6 +12888,7 @@ dependencies = [ "circular-buffer", "client", "clock", + "collections", "context_server", "dap", "dap_adapters", @@ -12106,8 +12902,9 @@ dependencies = [ "git_hosting_providers", "globset", "gpui", + "http_client", "image", - "indexmap 2.9.0", + "indexmap 2.11.4", "itertools 0.14.0", "language", "log", @@ -12119,38 +12916,34 @@ dependencies = [ "postage", "prettier", "pretty_assertions", - "rand 0.9.1", + "rand 0.9.2", "regex", "release_channel", "remote", "rpc", - "schemars 1.0.1", + "schemars 1.0.4", "semver", "serde", "serde_json", "settings", "sha2", "shellexpand 2.1.2", - "shlex", "smallvec", "smol", "snippet", "snippet_provider", + "sum_tree", "task", "tempfile", "terminal", "text", - "toml 0.8.20", + "toml 0.8.23", "unindent", "url", + "util", "watch", "which 6.0.3", - "workspace-hack", "worktree", - "zed-collections", - "zed-http-client", - "zed-sum-tree", - "zed-util", "zeroize", "zlog", ] @@ -12161,6 +12954,7 @@ version = "0.1.0" dependencies = [ "anyhow", "client", + "collections", "command_palette_hooks", "criterion", "db", @@ -12174,7 +12968,7 @@ dependencies = [ "pretty_assertions", "project", "rayon", - "schemars 1.0.1", + "schemars 1.0.4", "search", "serde", "serde_json", @@ -12183,11 +12977,9 @@ dependencies = [ "telemetry", "theme", "ui", + "util", "workspace", - "workspace-hack", "worktree", - "zed-collections", - "zed-util", "zed_actions", ] @@ -12209,9 +13001,8 @@ dependencies = [ "serde_json", "settings", "theme", + "util", "workspace", - "workspace-hack", - "zed-util", ] [[package]] @@ -12226,7 +13017,7 @@ dependencies = [ "memchr", "parking_lot", "protobuf", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -12236,6 +13027,7 @@ dependencies = [ "anyhow", "assets", "chrono", + "collections", "fs", "futures 0.3.31", "fuzzy", @@ -12250,10 +13042,8 @@ dependencies = [ "serde", "serde_json", "text", + "util", "uuid", - "workspace-hack", - "zed-collections", - "zed-util", ] [[package]] @@ -12287,7 +13077,7 @@ dependencies = [ "itertools 0.10.5", "lazy_static", "log", - "multimap", + "multimap 0.8.3", "petgraph", "prost 0.9.0", "prost-types 0.9.0", @@ -12306,14 +13096,14 @@ dependencies = [ "heck 0.5.0", "itertools 0.12.1", "log", - "multimap", + "multimap 0.10.1", "once_cell", "petgraph", "prettyplease", "prost 0.12.6", "prost-types 0.12.6", "regex", - "syn 2.0.101", + "syn 2.0.106", "tempfile", ] @@ -12340,7 +13130,7 @@ dependencies = [ "itertools 0.12.1", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -12367,12 +13157,11 @@ name = "proto" version = "0.1.0" dependencies = [ "anyhow", + "collections", "prost 0.9.0", "prost-build 0.9.0", "serde", "typed-path", - "workspace-hack", - "zed-collections", ] [[package]] @@ -12397,9 +13186,9 @@ dependencies = [ [[package]] name = "psm" -version = "0.1.25" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f58e5423e24c18cc840e1c98370b3993c6649cd1678b4d24318bcf0a083cbe88" +checksum = "e66fcd288453b748497d8fb18bccc83a16b0518e3906d4b8df0a8d42d93dbb1c" dependencies = [ "cc", ] @@ -12430,7 +13219,7 @@ version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76979bea66e7875e7509c4ec5300112b316af87fa7a252ca91c448b32dfe3993" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "memchr", "pulldown-cmark-escape", "unicase", @@ -12442,7 +13231,7 @@ version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f86ba2052aebccc42cbbb3ed234b8b13ce76f75c3551a303cb2bcffcff12bb14" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "memchr", "unicase", ] @@ -12491,6 +13280,15 @@ dependencies = [ "version_check", ] +[[package]] +name = "pxfm" +version = "0.1.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3cbdf373972bf78df4d3b518d07003938e2c7d1fb5891e55f9cb6df57009d84" +dependencies = [ + "num-traits", +] + [[package]] name = "qoi" version = "0.4.1" @@ -12517,27 +13315,28 @@ dependencies = [ [[package]] name = "quick-xml" -version = "0.32.0" +version = "0.37.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d3a6e5838b60e0e8fa7a43f22ade549a37d61f8bdbe636d0d7816191de969c2" +checksum = "331e97a1af0bf59823e6eadffe373d7b27f485be8748f71471c662c1f269b7fb" dependencies = [ "memchr", ] [[package]] name = "quick-xml" -version = "0.37.4" +version = "0.38.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4ce8c88de324ff838700f36fb6ab86c96df0e3c4ab6ef3a9b2044465cce1369" +checksum = "42a232e7487fc2ef313d96dde7948e7a3c05101870d8985e4fd8d26aedd27b89" dependencies = [ "memchr", + "serde", ] [[package]] name = "quinn" -version = "0.11.7" +version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3bd15a6f2967aef83887dcb9fec0014580467e33720d073560cf015a5683012" +checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" dependencies = [ "bytes 1.10.1", "cfg_aliases 0.2.1", @@ -12545,9 +13344,9 @@ dependencies = [ "quinn-proto", "quinn-udp", "rustc-hash 2.1.1", - "rustls 0.23.26", - "socket2", - "thiserror 2.0.12", + "rustls 0.23.33", + "socket2 0.6.1", + "thiserror 2.0.17", "tokio", "tracing", "web-time", @@ -12555,19 +13354,20 @@ dependencies = [ [[package]] name = "quinn-proto" -version = "0.11.10" +version = "0.11.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b820744eb4dc9b57a3398183639c511b5a26d2ed702cedd3febaa1393caa22cc" +checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" dependencies = [ "bytes 1.10.1", - "getrandom 0.3.2", - "rand 0.9.1", + "getrandom 0.3.4", + "lru-slab", + "rand 0.9.2", "ring", "rustc-hash 2.1.1", - "rustls 0.23.26", + "rustls 0.23.33", "rustls-pki-types", "slab", - "thiserror 2.0.12", + "thiserror 2.0.17", "tinyvec", "tracing", "web-time", @@ -12575,32 +13375,32 @@ dependencies = [ [[package]] name = "quinn-udp" -version = "0.5.11" +version = "0.5.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "541d0f57c6ec747a90738a52741d3221f7960e8ac2f0ff4b1a63680e033b4ab5" +checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" dependencies = [ "cfg_aliases 0.2.1", "libc", "once_cell", - "socket2", + "socket2 0.6.1", "tracing", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] name = "quote" -version = "1.0.40" +version = "1.0.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +checksum = "ce25767e7b499d1b604768e7cde645d14cc8584231ea6b295e9c9eb22c02e1d1" dependencies = [ "proc-macro2", ] [[package]] name = "r-efi" -version = "5.2.0" +version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" [[package]] name = "radium" @@ -12621,9 +13421,9 @@ dependencies = [ [[package]] name = "rand" -version = "0.9.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" dependencies = [ "rand_chacha 0.9.0", "rand_core 0.9.3", @@ -12655,7 +13455,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", ] [[package]] @@ -12664,7 +13464,7 @@ version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" dependencies = [ - "getrandom 0.3.2", + "getrandom 0.3.4", ] [[package]] @@ -12674,7 +13474,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8615d50dcf34fa31f7ab52692afec947c4dd0ab803cc87cb3b0b4570ff7463" dependencies = [ "num-traits", - "rand 0.9.1", + "rand 0.9.2", ] [[package]] @@ -12688,9 +13488,9 @@ dependencies = [ [[package]] name = "rangemap" -version = "1.5.1" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f60fcc7d6849342eff22c4350c8b9a989ee8ceabc4b481253e8946b9fe83d684" +checksum = "f93e7e49bb0bf967717f7bd674458b3d6b0c5f48ec7e3038166026a69fc22223" [[package]] name = "rav1e" @@ -12729,9 +13529,9 @@ dependencies = [ [[package]] name = "ravif" -version = "0.11.12" +version = "0.11.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6a5f31fcf7500f9401fea858ea4ab5525c99f2322cfcee732c0e6c74208c0c6" +checksum = "5825c26fddd16ab9f515930d49028a630efec172e903483c94796cfe31893e6b" dependencies = [ "avif-serialize", "imgref", @@ -12757,7 +13557,7 @@ version = "11.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "498cd0dc59d73224351ee52a95fee0f1a617a2eae0e7d9d720cc622c73a54186" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", ] [[package]] @@ -12780,9 +13580,9 @@ dependencies = [ [[package]] name = "rayon" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" +checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" dependencies = [ "either", "rayon-core", @@ -12790,9 +13590,9 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.12.1" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" dependencies = [ "crossbeam-deque", "crossbeam-utils", @@ -12800,9 +13600,9 @@ dependencies = [ [[package]] name = "read-fonts" -version = "0.25.3" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f9e8a4f503e5c8750e4cd3b32a4e090035c46374b305a15c70bad833dca05f" +checksum = "6717cf23b488adf64b9d711329542ba34de147df262370221940dfabc2c91358" dependencies = [ "bytemuck", "font-types", @@ -12856,13 +13656,32 @@ dependencies = [ "telemetry", "theme", "ui", - "windows-registry 0.6.0", + "util", + "windows-registry 0.6.1", "workspace", - "workspace-hack", - "zed-util", "zed_actions", ] +[[package]] +name = "recursive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0786a43debb760f491b1bc0269fe5e84155353c67482b9e60d0cfb596054b43e" +dependencies = [ + "recursive-proc-macro-impl", + "stacker", +] + +[[package]] +name = "recursive-proc-macro-impl" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76009fbe0614077fc1a2ce255e3a1881a2e3a3527097d5dc6d8212c585e7e38b" +dependencies = [ + "quote", + "syn 2.0.106", +] + [[package]] name = "redox_syscall" version = "0.2.16" @@ -12874,11 +13693,11 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.11" +version = "0.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2f103c6d277498fbceb16e84d317e2a400f160f46904d5f5410848c829511a3" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", ] [[package]] @@ -12887,40 +13706,40 @@ version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", "libredox", "thiserror 1.0.69", ] [[package]] name = "redox_users" -version = "0.5.0" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b" +checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", "libredox", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] name = "ref-cast" -version = "1.0.24" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a0ae411dbe946a674d89546582cea4ba2bb8defac896622d6496f14c23ba5cf" +checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" dependencies = [ "ref-cast-impl", ] [[package]] name = "ref-cast-impl" -version = "1.0.24" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" +checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -12929,7 +13748,7 @@ version = "0.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8eff4fa778b5c2a57e85c5f2fe3a709c52f0e60d23146e2151cbef5893f420e" dependencies = [ - "ahash 0.8.11", + "ahash 0.8.12", "fluent-uri", "once_cell", "parking_lot", @@ -12937,6 +13756,13 @@ dependencies = [ "serde_json", ] +[[package]] +name = "refineable" +version = "0.1.0" +dependencies = [ + "derive_refineable", +] + [[package]] name = "regalloc2" version = "0.11.2" @@ -12945,7 +13771,7 @@ checksum = "dc06e6b318142614e4a48bc725abbf08ff166694835c43c9dae5a9009704639a" dependencies = [ "allocator-api2", "bumpalo", - "hashbrown 0.15.3", + "hashbrown 0.15.5", "log", "rustc-hash 2.1.1", "serde", @@ -12954,9 +13780,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.11.1" +version = "1.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" dependencies = [ "aho-corasick", "memchr", @@ -12966,9 +13792,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.9" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" dependencies = [ "aho-corasick", "memchr", @@ -12977,22 +13803,21 @@ dependencies = [ [[package]] name = "regex-lite" -version = "0.1.6" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a" +checksum = "8d942b98df5e658f56f20d592c7f868833fe38115e65c33003d8cd224b0155da" [[package]] name = "regex-syntax" -version = "0.8.5" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" +checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" [[package]] name = "release_channel" version = "0.1.0" dependencies = [ "gpui", - "workspace-hack", ] [[package]] @@ -13002,6 +13827,7 @@ dependencies = [ "anyhow", "askpass", "async-trait", + "collections", "fs", "futures 0.3.31", "gpui", @@ -13014,15 +13840,12 @@ dependencies = [ "serde", "serde_json", "settings", - "shlex", "smol", "tempfile", - "thiserror 2.0.12", + "thiserror 2.0.17", "urlencoding", + "util", "which 6.0.3", - "workspace-hack", - "zed-collections", - "zed-util", ] [[package]] @@ -13030,14 +13853,14 @@ name = "remote_server" version = "0.1.0" dependencies = [ "action_log", + "agent", "anyhow", "askpass", - "assistant_tool", - "assistant_tools", "cargo_toml", "clap", "client", "clock", + "collections", "crash-handler", "crashes", "dap", @@ -13055,6 +13878,7 @@ dependencies = [ "git_hosting_providers", "gpui", "gpui_tokio", + "http_client", "json_schema_store", "language", "language_extension", @@ -13079,16 +13903,15 @@ dependencies = [ "settings", "shellexpand 2.1.2", "smol", - "sysinfo", - "thiserror 2.0.12", - "toml 0.8.20", + "sysinfo 0.37.2", + "task", + "thiserror 2.0.17", + "toml 0.8.23", "unindent", + "util", "watch", "workspace", "worktree", - "zed-collections", - "zed-http-client", - "zed-util", "zlog", ] @@ -13111,6 +13934,7 @@ dependencies = [ "async-tungstenite", "base64 0.22.1", "client", + "collections", "command_palette_hooks", "editor", "env_logger 0.11.8", @@ -13118,6 +13942,7 @@ dependencies = [ "file_icons", "futures 0.3.31", "gpui", + "http_client", "image", "indoc", "jupyter-protocol", @@ -13144,12 +13969,9 @@ dependencies = [ "tree-sitter-python", "tree-sitter-typescript", "ui", + "util", "uuid", "workspace", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-util", ] [[package]] @@ -13163,7 +13985,7 @@ dependencies = [ "encoding_rs", "futures-core", "futures-util", - "h2 0.3.26", + "h2 0.3.27", "http 0.2.12", "http-body 0.4.6", "hyper 0.14.32", @@ -13198,39 +14020,45 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.15" +version = "0.12.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d19c46a6fdd48bc4dab94b6103fccc55d34c67cc0ad04653aad4ea2a07cd7bbb" +checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f" dependencies = [ "base64 0.22.1", "bytes 1.10.1", "futures-channel", "futures-core", "futures-util", + "h2 0.4.12", "http 1.3.1", "http-body 1.0.1", "http-body-util", - "hyper 1.6.0", + "hyper 1.7.0", + "hyper-rustls 0.27.7", "hyper-util", - "ipnet", "js-sys", "log", - "mime", - "once_cell", "percent-encoding", "pin-project-lite", + "quinn", + "rustls 0.23.33", + "rustls-native-certs 0.8.2", + "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", "sync_wrapper 1.0.2", "tokio", + "tokio-rustls 0.26.2", + "tokio-util", "tower 0.5.2", + "tower-http 0.6.6", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", + "wasm-streams", "web-sys", - "windows-registry 0.4.0", ] [[package]] @@ -13241,13 +14069,12 @@ dependencies = [ "bytes 1.10.1", "futures 0.3.31", "gpui", + "http_client", "http_client_tls", "log", "regex", "serde", "tokio", - "workspace-hack", - "zed-http-client", "zed-reqwest", ] @@ -13278,9 +14105,9 @@ dependencies = [ [[package]] name = "rgb" -version = "0.8.50" +version = "0.8.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57397d16646700483b67d2dd6511d79318f9d057fdbd21a4066aeac8b41d310a" +checksum = "0c6a884d2998352bb4daf0183589aec883f16a6da1f4dde84d8e2e9a5409a1ce" dependencies = [ "bytemuck", ] @@ -13296,8 +14123,7 @@ dependencies = [ "pulldown-cmark 0.12.2", "theme", "ui", - "workspace-hack", - "zed-util", + "util", ] [[package]] @@ -13308,7 +14134,7 @@ checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" dependencies = [ "cc", "cfg-if", - "getrandom 0.2.15", + "getrandom 0.2.16", "libc", "untrusted", "windows-sys 0.52.0", @@ -13354,6 +14180,17 @@ dependencies = [ "paste", ] +[[package]] +name = "rmp-serde" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52e599a477cf9840e92f2cde9a7189e67b42c57532749bf90aea6ec10facd4db" +dependencies = [ + "byteorder", + "rmp", + "serde", +] + [[package]] name = "rmpv" version = "1.3.0" @@ -13367,7 +14204,7 @@ dependencies = [ [[package]] name = "rodio" version = "0.21.1" -source = "git+https://github.com/RustAudio/rodio#e2074c6c2acf07b57cf717e076bdda7a9ac6e70b" +source = "git+https://github.com/RustAudio/rodio?rev=e2074c6c2acf07b57cf717e076bdda7a9ac6e70b#e2074c6c2acf07b57cf717e076bdda7a9ac6e70b" dependencies = [ "cpal", "dasp_sample", @@ -13375,7 +14212,7 @@ dependencies = [ "num-rational", "rtrb", "symphonia", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -13387,13 +14224,13 @@ dependencies = [ "ctor", "gpui", "log", - "rand 0.9.1", + "rand 0.9.2", "rayon", + "regex", "smallvec", + "sum_tree", "unicode-segmentation", - "workspace-hack", - "zed-sum-tree", - "zed-util", + "util", "zlog", ] @@ -13411,22 +14248,21 @@ dependencies = [ "async-tungstenite", "base64 0.22.1", "chrono", + "collections", "futures 0.3.31", "gpui", "parking_lot", "proto", - "rand 0.9.1", + "rand 0.9.2", "rsa", "serde", "serde_json", "sha2", - "strum 0.27.1", + "strum 0.27.2", "tracing", - "workspace-hack", - "zed-collections", - "zed-util", + "util", "zlog", - "zstd", + "zstd 0.11.2+zstd.1.5.2", ] [[package]] @@ -13460,6 +14296,7 @@ name = "rules_library" version = "0.1.0" dependencies = [ "anyhow", + "collections", "editor", "gpui", "language", @@ -13475,10 +14312,8 @@ dependencies = [ "theme", "title_bar", "ui", + "util", "workspace", - "workspace-hack", - "zed-collections", - "zed-util", "zed_actions", ] @@ -13509,9 +14344,9 @@ dependencies = [ [[package]] name = "rust-embed" -version = "8.7.0" +version = "8.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5fbc0ee50fcb99af7cebb442e5df7b5b45e9460ffa3f8f549cd26b862bec49d" +checksum = "025908b8682a26ba8d12f6f2d66b987584a4a87bc024abc5bbc12553a8cd178a" dependencies = [ "rust-embed-impl", "rust-embed-utils", @@ -13520,22 +14355,22 @@ dependencies = [ [[package]] name = "rust-embed-impl" -version = "8.7.0" +version = "8.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bf418c9a2e3f6663ca38b8a7134cc2c2167c9d69688860e8961e3faa731702e" +checksum = "6065f1a4392b71819ec1ea1df1120673418bf386f50de1d6f54204d836d4349c" dependencies = [ "proc-macro2", "quote", "rust-embed-utils", - "syn 2.0.101", + "syn 2.0.106", "walkdir", ] [[package]] name = "rust-embed-utils" -version = "8.7.0" +version = "8.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08d55b95147fe01265d06b3955db798bdaed52e60e2211c41137701b3aba8e21" +checksum = "f6cc0c81648b20b70c491ff8cce00c1c3b223bb8ed2b5d41f0e54c6c4c0a3594" dependencies = [ "globset", "sha2", @@ -13554,9 +14389,9 @@ dependencies = [ [[package]] name = "rust_decimal" -version = "1.37.1" +version = "1.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "faa7de2ba56ac291bd90c6b9bece784a52ae1411f9506544b3eae36dd2356d50" +checksum = "35affe401787a9bd846712274d97654355d21b2a2c092a3139aabe31e9022282" dependencies = [ "arrayvec", "borsh", @@ -13570,9 +14405,9 @@ dependencies = [ [[package]] name = "rustc-demangle" -version = "0.1.24" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" +checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" [[package]] name = "rustc-hash" @@ -13597,9 +14432,9 @@ dependencies = [ [[package]] name = "rustfft" -version = "6.4.0" +version = "6.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6f140db74548f7c9d7cce60912c9ac414e74df5e718dc947d514b051b42f3f4" +checksum = "21db5f9893e91f41798c88680037dba611ca6674703c1a18601b01a72c8adb89" dependencies = [ "num-complex", "num-integer", @@ -13615,8 +14450,8 @@ version = "0.38.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" dependencies = [ - "bitflags 2.9.0", - "errno 0.3.11", + "bitflags 2.9.4", + "errno 0.3.14", "libc", "linux-raw-sys 0.4.15", "windows-sys 0.59.0", @@ -13624,15 +14459,15 @@ dependencies = [ [[package]] name = "rustix" -version = "1.0.7" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266" +checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" dependencies = [ - "bitflags 2.9.0", - "errno 0.3.11", + "bitflags 2.9.4", + "errno 0.3.14", "libc", - "linux-raw-sys 0.9.4", - "windows-sys 0.59.0", + "linux-raw-sys 0.11.0", + "windows-sys 0.61.2", ] [[package]] @@ -13642,7 +14477,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2fc84bf7e9aa16c4f2c758f27412dc9841341e16aa682d9c7ac308fe3ee12056" dependencies = [ "once_cell", - "rustix 1.0.7", + "rustix 1.1.2", ] [[package]] @@ -13651,9 +14486,9 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1de16c7c59892b870a6336f185dc10943517f1327447096bbb7bb32cd85e2393" dependencies = [ - "errno 0.3.11", + "errno 0.3.14", "libc", - "rustix 1.0.7", + "rustix 1.1.2", ] [[package]] @@ -13670,16 +14505,16 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.26" +version = "0.23.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df51b5869f3a441595eac5e8ff14d486ff285f7b8c0df8770e49c3b56351f0f0" +checksum = "751e04a496ca00bb97a5e043158d23d66b5aabf2e1d5aa2a0aaebb1aafe6f82c" dependencies = [ "aws-lc-rs", "log", "once_cell", "ring", "rustls-pki-types", - "rustls-webpki 0.103.1", + "rustls-webpki 0.103.7", "subtle", "zeroize", ] @@ -13698,14 +14533,14 @@ dependencies = [ [[package]] name = "rustls-native-certs" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3" +checksum = "9980d917ebb0c0536119ba501e90834767bffc3d60641457fd84a1f3fd337923" dependencies = [ "openssl-probe", "rustls-pki-types", "schannel", - "security-framework 3.2.0", + "security-framework 3.5.1", ] [[package]] @@ -13738,20 +14573,20 @@ dependencies = [ [[package]] name = "rustls-platform-verifier" -version = "0.5.1" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a5467026f437b4cb2a533865eaa73eb840019a0916f4b9ec563c6e617e086c9" +checksum = "19787cda76408ec5404443dc8b31795c87cd8fec49762dc75fa727740d34acc1" dependencies = [ "core-foundation 0.10.0", "core-foundation-sys", "jni", "log", "once_cell", - "rustls 0.23.26", - "rustls-native-certs 0.8.1", + "rustls 0.23.33", + "rustls-native-certs 0.8.2", "rustls-platform-verifier-android", - "rustls-webpki 0.103.1", - "security-framework 3.2.0", + "rustls-webpki 0.103.7", + "security-framework 3.5.1", "security-framework-sys", "webpki-root-certs", "windows-sys 0.59.0", @@ -13775,9 +14610,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.103.1" +version = "0.103.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fef8b8769aaccf73098557a87cd1816b4f9c7c16811c9c77142aa695c16f2c03" +checksum = "e10b3f4191e8a80e6b43eebabfac91e5dcecebb27a71f04e820c47ec41d314bf" dependencies = [ "aws-lc-rs", "ring", @@ -13787,9 +14622,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.20" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] name = "rustybuzz" @@ -13797,7 +14632,7 @@ version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfb9cf8877777222e4a3bc7eb247e398b56baba500c38c1c46842431adc8b55c" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "bytemuck", "libm", "smallvec", @@ -13814,7 +14649,7 @@ version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd3c7c96f8a08ee34eff8857b11b49b07d71d1c3f4e88f8a88d4c9e9f90b1702" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "bytemuck", "core_maths", "log", @@ -13862,11 +14697,11 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -13878,8 +14713,7 @@ dependencies = [ "chrono", "futures 0.3.31", "parking_lot", - "rand 0.9.1", - "workspace-hack", + "rand 0.9.2", ] [[package]] @@ -13889,11 +14723,10 @@ dependencies = [ "anyhow", "clap", "env_logger 0.11.8", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "theme", - "workspace-hack", ] [[package]] @@ -13910,12 +14743,12 @@ dependencies = [ [[package]] name = "schemars" -version = "1.0.1" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe8c9d1c68d67dd9f97ecbc6f932b60eb289c5dbddd8aa1405484a8fd2fcd984" +checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" dependencies = [ "dyn-clone", - "indexmap 2.9.0", + "indexmap 2.11.4", "ref-cast", "schemars_derive", "serde", @@ -13924,14 +14757,14 @@ dependencies = [ [[package]] name = "schemars_derive" -version = "1.0.1" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ca9fcb757952f8e8629b9ab066fc62da523c46c2b247b1708a3be06dd82530b" +checksum = "33d020396d1d138dc19f1165df7545479dcd58d93810dc5d646a16e55abefa80" dependencies = [ "proc-macro2", "quote", "serde_derive_internals", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -13948,9 +14781,9 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "scratch" -version = "1.0.8" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f6280af86e5f559536da57a45ebc84948833b3bee313a7dd25232e09c878a52" +checksum = "d68f2ec51b097e4c1a75b681a8bec621909b5e91f15bb7b840c4f2f7b01148b2" [[package]] name = "screencapturekit" @@ -13992,7 +14825,7 @@ checksum = "1783eabc414609e28a5ba76aee5ddd52199f7107a0b24c2e9746a1ecc34a683d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -14027,7 +14860,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -14052,7 +14885,7 @@ dependencies = [ "serde_json", "sqlx", "strum 0.26.3", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tracing", "url", @@ -14069,15 +14902,15 @@ dependencies = [ "proc-macro2", "quote", "sea-bae", - "syn 2.0.101", + "syn 2.0.106", "unicode-ident", ] [[package]] name = "sea-query" -version = "0.32.4" +version = "0.32.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d99447c24da0cded00089e2021e1624af90878c65f7534319448d01da3df869d" +checksum = "8a5d1c518eaf5eda38e5773f902b26ab6d5e9e9e2bb2349ca6c64cf96f80448c" dependencies = [ "bigdecimal", "chrono", @@ -14117,15 +14950,17 @@ version = "0.1.0" dependencies = [ "any_vec", "anyhow", - "bitflags 2.9.0", + "bitflags 2.9.4", "client", + "collections", "editor", "futures 0.3.31", "gpui", "language", + "lsp", "menu", "project", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "settings", @@ -14133,10 +14968,9 @@ dependencies = [ "theme", "ui", "unindent", + "util", + "util_macros", "workspace", - "workspace-hack", - "zed-collections", - "zed-util", "zed_actions", ] @@ -14160,7 +14994,7 @@ version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "core-foundation 0.9.4", "core-foundation-sys", "libc", @@ -14169,11 +15003,11 @@ dependencies = [ [[package]] name = "security-framework" -version = "3.2.0" +version = "3.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "271720403f46ca04f7ba6f55d438f8bd878d6b8ca0a1046e8228c4145bcbb316" +checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "core-foundation 0.10.0", "core-foundation-sys", "libc", @@ -14182,9 +15016,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.14.0" +version = "2.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" dependencies = [ "core-foundation-sys", "libc", @@ -14196,13 +15030,22 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f7d95a54511e0c7be3f51e8867aa8cf35148d7b9445d44de2f943e2b206e749" +[[package]] +name = "semantic_version" +version = "0.1.0" +dependencies = [ + "anyhow", + "serde", +] + [[package]] name = "semver" -version = "1.0.26" +version = "1.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" dependencies = [ "serde", + "serde_core", ] [[package]] @@ -14213,9 +15056,9 @@ checksum = "1bc711410fbe7399f390ca1c3b60ad0f53f80e95c5eb935e52268a0e2cd49acc" [[package]] name = "serde" -version = "1.0.221" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "341877e04a22458705eb4e131a1508483c877dca2792b3781d4e5d8a6019ec43" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" dependencies = [ "serde_core", "serde_derive", @@ -14223,22 +15066,22 @@ dependencies = [ [[package]] name = "serde_core" -version = "1.0.221" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c459bc0a14c840cb403fc14b148620de1e0778c96ecd6e0c8c3cacb6d8d00fe" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.221" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6185cf75117e20e62b1ff867b9518577271e58abe0037c40bb4794969355ab0" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -14249,7 +15092,7 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -14263,14 +15106,15 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.144" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56177480b00303e689183f110b4e727bb4211d692c62d4fcd16d02be93077d40" +checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" dependencies = [ - "indexmap 2.9.0", + "indexmap 2.11.4", "itoa", "memchr", "ryu", + "serde", "serde_core", ] @@ -14280,7 +15124,7 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e033097bf0d2b59a62b42c18ebbb797503839b26afdda2c4e1415cb6c813540" dependencies = [ - "indexmap 2.9.0", + "indexmap 2.11.4", "itoa", "memchr", "ryu", @@ -14289,12 +15133,13 @@ dependencies = [ [[package]] name = "serde_path_to_error" -version = "0.1.17" +version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59fab13f937fa393d08645bf3a84bdfe86e296747b506ada67bb15f10f218b2a" +checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457" dependencies = [ "itoa", "serde", + "serde_core", ] [[package]] @@ -14305,16 +15150,36 @@ checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", +] + +[[package]] +name = "serde_spanned" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", ] [[package]] name = "serde_spanned" -version = "0.6.8" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e24345aa0fe688594e73770a5f6d1b216508b4f93484c0026d521acd30134392" +dependencies = [ + "serde_core", +] + +[[package]] +name = "serde_stacker" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" +checksum = "d4936375d50c4be7eff22293a9344f8e46f323ed2b3c243e52f89138d9bb0f4a" dependencies = [ "serde", + "serde_core", + "stacker", ] [[package]] @@ -14331,18 +15196,18 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.13.0" +version = "3.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf65a400f8f66fb7b0552869ad70157166676db75ed8181f8104ea91cf9d0b42" +checksum = "6093cd8c01b25262b84927e0f7151692158fab02d961e04c979d3903eba7ecc5" dependencies = [ "base64 0.22.1", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.9.0", + "indexmap 2.11.4", "schemars 0.9.0", - "serde", - "serde_derive", + "schemars 1.0.4", + "serde_core", "serde_json", "serde_with_macros", "time", @@ -14350,21 +15215,21 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.13.0" +version = "3.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81679d9ed988d5e9a5e6531dc3f2c28efbd639cbd1dfb628df08edea6004da77" +checksum = "a7e6c180db0816026a61afa1cff5344fb7ebded7e4d3062772179f2501481c27" dependencies = [ - "darling", + "darling 0.21.3", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "serial2" -version = "0.2.29" +version = "0.2.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7d1d08630509d69f90eff4afcd02c3bd974d979225cbd815ff5942351b14375" +checksum = "8cc76fa68e25e771492ca1e3c53d447ef0be3093e05cd3b47f4b712ba10c6f3c" dependencies = [ "cfg-if", "libc", @@ -14378,9 +15243,8 @@ dependencies = [ "db", "gpui", "serde_json", + "util", "uuid", - "workspace-hack", - "zed-util", ] [[package]] @@ -14388,7 +15252,8 @@ name = "settings" version = "0.1.0" dependencies = [ "anyhow", - "derive_more", + "collections", + "derive_more 0.99.20", "ec4rs", "fs", "futures 0.3.31", @@ -14400,7 +15265,7 @@ dependencies = [ "pretty_assertions", "release_channel", "rust-embed", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "serde_json_lenient", @@ -14409,13 +15274,11 @@ dependencies = [ "serde_with", "settings_macros", "smallvec", - "strum 0.27.1", + "strum 0.27.2", "tree-sitter", "tree-sitter-json", "unindent", - "workspace-hack", - "zed-collections", - "zed-util", + "util", "zlog", ] @@ -14425,8 +15288,7 @@ version = "0.1.0" dependencies = [ "quote", "settings", - "syn 2.0.101", - "workspace-hack", + "syn 2.0.106", ] [[package]] @@ -14446,7 +15308,6 @@ dependencies = [ "theme", "ui", "workspace", - "workspace-hack", "zed_actions", ] @@ -14470,21 +15331,22 @@ dependencies = [ "menu", "node_runtime", "paths", + "picker", "pretty_assertions", "project", - "schemars 1.0.1", + "release_channel", + "schemars 1.0.4", "search", "serde", "session", "settings", - "strum 0.27.1", + "strum 0.27.2", "theme", "title_bar", "ui", "ui_input", + "util", "workspace", - "workspace-hack", - "zed-util", "zed_actions", "zlog", ] @@ -14508,9 +15370,9 @@ checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d" [[package]] name = "sha2" -version = "0.10.8" +version = "0.10.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" dependencies = [ "cfg-if", "cpufeatures", @@ -14568,9 +15430,9 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook" -version = "0.3.17" +version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8621587d4798caf8eb44879d42e56b9a93ea5dcd315a6487c357130095b62801" +checksum = "d881a16cf4426aa584979d30bd82cb33429027e42122b169753d6ef1085ed6e2" dependencies = [ "libc", "signal-hook-registry", @@ -14578,9 +15440,9 @@ dependencies = [ [[package]] name = "signal-hook-registry" -version = "1.4.5" +version = "1.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9203b8055f63a2a00e2f593bb0510367fe707d7ff1e5c872de2f537b339e5410" +checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b" dependencies = [ "libc", ] @@ -14634,7 +15496,7 @@ checksum = "297f631f50729c8c99b84667867963997ec0b50f32b2a7dbcab828ef0541e8bb" dependencies = [ "num-bigint", "num-traits", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", ] @@ -14664,11 +15526,21 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" +[[package]] +name = "skiplist" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f354fd282d3177c2951004953e2fdc4cb342fa159bbee8b829852b6a081c8ea1" +dependencies = [ + "rand 0.9.2", + "thiserror 2.0.17", +] + [[package]] name = "skrifa" -version = "0.26.6" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cc1aa86c26dbb1b63875a7180aa0819709b33348eb5b1491e4321fae388179d" +checksum = "8c31071dedf532758ecf3fed987cdb4bd9509f900e026ab684b4ecb81ea49841" dependencies = [ "bytemuck", "read-fonts", @@ -14676,12 +15548,9 @@ dependencies = [ [[package]] name = "slab" -version = "0.4.9" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" -dependencies = [ - "autocfg", -] +checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" [[package]] name = "slash_commands_example" @@ -14701,9 +15570,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.15.0" +version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" dependencies = [ "serde", ] @@ -14716,7 +15585,7 @@ checksum = "0eb01866308440fc64d6c44d9e86c5cc17adfe33c4d6eed55da9145044d0ffc1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -14725,22 +15594,28 @@ version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a33bd3e260892199c3ccfc487c88b2da2265080acb316cd920da72fdfd7c599f" dependencies = [ - "async-channel 2.3.1", + "async-channel 2.5.0", "async-executor", "async-fs", "async-io", - "async-lock", + "async-lock 3.4.1", "async-net", "async-process", "blocking", - "futures-lite 2.6.0", + "futures-lite 2.6.1", ] [[package]] name = "smol_str" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd538fb6910ac1099850255cf94a94df6551fbdd602454387d0adb2d1ca6dead" +checksum = "dd538fb6910ac1099850255cf94a94df6551fbdd602454387d0adb2d1ca6dead" + +[[package]] +name = "snap" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b" [[package]] name = "snippet" @@ -14748,7 +15623,6 @@ version = "0.1.0" dependencies = [ "anyhow", "smallvec", - "workspace-hack", ] [[package]] @@ -14756,6 +15630,7 @@ name = "snippet_provider" version = "0.1.0" dependencies = [ "anyhow", + "collections", "extension", "fs", "futures 0.3.31", @@ -14763,14 +15638,12 @@ dependencies = [ "indoc", "parking_lot", "paths", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "serde_json_lenient", "snippet", - "workspace-hack", - "zed-collections", - "zed-util", + "util", ] [[package]] @@ -14786,26 +15659,55 @@ dependencies = [ "picker", "settings", "ui", + "util", "workspace", - "workspace-hack", - "zed-util", +] + +[[package]] +name = "soa-rs" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b75ae4668062b095fda87ba54118697bed601f07f6c68bf50289a25ca0c8c935" +dependencies = [ + "soa-rs-derive", +] + +[[package]] +name = "soa-rs-derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c09121507da587d3434e5929ce3321162f36bd3eff403873cb163c06b176913" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", ] [[package]] name = "socket2" -version = "0.5.9" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f5fd57c80058a56cf5c777ab8a126398ece8e442983605d280a44ce79d0edef" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" dependencies = [ "libc", "windows-sys 0.52.0", ] +[[package]] +name = "socket2" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" +dependencies = [ + "libc", + "windows-sys 0.60.2", +] + [[package]] name = "spdx" -version = "0.10.8" +version = "0.10.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58b69356da67e2fc1f542c71ea7e654a361a79c938e4424392ecf4fa065d2193" +checksum = "c3e17e880bafaeb362a7b751ec46bdc5b61445a188f80e0606e68167cd540fa3" dependencies = [ "smallvec", ] @@ -14825,7 +15727,7 @@ version = "0.3.0+sdk-1.3.268.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eda41003dc44290527a59b13432d4a0379379fa074b70174882adfbdfd917844" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", ] [[package]] @@ -14859,6 +15761,7 @@ name = "sqlez" version = "0.1.0" dependencies = [ "anyhow", + "collections", "futures 0.3.31", "indoc", "libsqlite3-sys", @@ -14867,10 +15770,8 @@ dependencies = [ "smol", "sqlformat", "thread_local", + "util", "uuid", - "workspace-hack", - "zed-collections", - "zed-util", ] [[package]] @@ -14879,8 +15780,7 @@ version = "0.1.0" dependencies = [ "sqlez", "sqlformat", - "syn 2.0.101", - "workspace-hack", + "syn 2.0.106", ] [[package]] @@ -14893,11 +15793,20 @@ dependencies = [ "unicode_categories", ] +[[package]] +name = "sqlparser" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05a528114c392209b3264855ad491fcce534b94a38771b0a0b97a79379275ce8" +dependencies = [ + "log", +] + [[package]] name = "sqlx" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3c3a85280daca669cfd3bcb68a337882a8bc57ec882f72c5d13a430613a738e" +checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc" dependencies = [ "sqlx-core", "sqlx-macros", @@ -14908,9 +15817,9 @@ dependencies = [ [[package]] name = "sqlx-core" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f743f2a3cea30a58cd479013f75550e879009e3a02f616f18ca699335aa248c3" +checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6" dependencies = [ "base64 0.22.1", "bigdecimal", @@ -14919,25 +15828,25 @@ dependencies = [ "crc", "crossbeam-queue", "either", - "event-listener 5.4.0", + "event-listener 5.4.1", "futures-core", "futures-intrusive", "futures-io", "futures-util", - "hashbrown 0.15.3", + "hashbrown 0.15.5", "hashlink 0.10.0", - "indexmap 2.9.0", + "indexmap 2.11.4", "log", "memchr", "once_cell", "percent-encoding", "rust_decimal", - "rustls 0.23.26", + "rustls 0.23.33", "serde", "serde_json", "sha2", "smallvec", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tokio-stream", @@ -14949,22 +15858,22 @@ dependencies = [ [[package]] name = "sqlx-macros" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f4200e0fde19834956d4252347c12a083bdcb237d7a1a1446bffd8768417dce" +checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d" dependencies = [ "proc-macro2", "quote", "sqlx-core", "sqlx-macros-core", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "sqlx-macros-core" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "882ceaa29cade31beca7129b6beeb05737f44f82dbe2a9806ecea5a7093d00b7" +checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b" dependencies = [ "dotenvy", "either", @@ -14980,22 +15889,21 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 2.0.101", - "tempfile", + "syn 2.0.106", "tokio", "url", ] [[package]] name = "sqlx-mysql" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0afdd3aa7a629683c2d750c2df343025545087081ab5942593a5288855b1b7a7" +checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526" dependencies = [ "atoi", "base64 0.22.1", "bigdecimal", - "bitflags 2.9.0", + "bitflags 2.9.4", "byteorder", "bytes 1.10.1", "chrono", @@ -15026,7 +15934,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tracing", "uuid", @@ -15035,14 +15943,14 @@ dependencies = [ [[package]] name = "sqlx-postgres" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0bedbe1bbb5e2615ef347a5e9d8cd7680fb63e77d9dafc0f29be15e53f1ebe6" +checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46" dependencies = [ "atoi", "base64 0.22.1", "bigdecimal", - "bitflags 2.9.0", + "bitflags 2.9.4", "byteorder", "chrono", "crc", @@ -15069,7 +15977,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tracing", "uuid", @@ -15078,9 +15986,9 @@ dependencies = [ [[package]] name = "sqlx-sqlite" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c26083e9a520e8eb87a06b12347679b142dc2ea29e6e409f805644a7a979a5bc" +checksum = "c2d12fe70b2c1b4401038055f90f151b78208de1f9f89a7dbfd41587a10c3eea" dependencies = [ "atoi", "chrono", @@ -15096,7 +16004,7 @@ dependencies = [ "serde", "serde_urlencoded", "sqlx-core", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tracing", "url", @@ -15105,15 +16013,15 @@ dependencies = [ [[package]] name = "stable_deref_trait" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" [[package]] name = "stacker" -version = "0.1.21" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cddb07e32ddb770749da91081d8d0ac3a16f1a569a18b20348cd371f5dead06b" +checksum = "e1f8b29fb42aafcea4edeeb6b2f2d7ecd0d969c48b4cf0d2e64aafc471dd6e59" dependencies = [ "cc", "cfg-if", @@ -15140,7 +16048,7 @@ checksum = "172175341049678163e979d9107ca3508046d4d2a7c6682bee46ac541b17db69" dependencies = [ "proc-macro-error2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -15165,7 +16073,6 @@ dependencies = [ "gpui", "itertools 0.14.0", "smallvec", - "workspace-hack", ] [[package]] @@ -15191,12 +16098,20 @@ dependencies = [ "settings", "simplelog", "story", - "strum 0.27.1", + "strum 0.27.2", "theme", "title_bar", "ui", "workspace", - "workspace-hack", +] + +[[package]] +name = "streaming-decompression" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf6cc3b19bfb128a8ad11026086e31d3ce9ad23f8ea37354b31383a187c44cf3" +dependencies = [ + "fallible-streaming-iterator", ] [[package]] @@ -15210,10 +16125,9 @@ name = "streaming_diff" version = "0.1.0" dependencies = [ "ordered-float 2.10.1", - "rand 0.9.1", + "rand 0.9.2", "rope", - "workspace-hack", - "zed-util", + "util", ] [[package]] @@ -15239,7 +16153,7 @@ checksum = "bf776ba3fa74f83bf4b63c3dcbbf82173db2632ed8452cb2d891d33f459de70f" dependencies = [ "new_debug_unreachable", "parking_lot", - "phf_shared", + "phf_shared 0.11.3", "precomputed-hash", "serde", ] @@ -15250,8 +16164,8 @@ version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c711928715f1fe0fe509c53b43e993a9a557babc2d0a3567d0a3006f1ac931a0" dependencies = [ - "phf_generator", - "phf_shared", + "phf_generator 0.11.3", + "phf_shared 0.11.3", "proc-macro2", "quote", ] @@ -15284,11 +16198,11 @@ dependencies = [ [[package]] name = "strum" -version = "0.27.1" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f64def088c51c9510a8579e3c5d67c65349dcf755e5479ad3d010aa6454e2c32" +checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" dependencies = [ - "strum_macros 0.27.1", + "strum_macros 0.27.2", ] [[package]] @@ -15301,20 +16215,19 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "strum_macros" -version = "0.27.1" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c77a8c5abcaf0f9ce05d62342b7d298c346515365c36b673df4ebe3ced01fde8" +checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "rustversion", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -15323,17 +16236,31 @@ version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" +[[package]] +name = "sum_tree" +version = "0.1.0" +dependencies = [ + "arrayvec", + "ctor", + "log", + "rand 0.9.2", + "rayon", + "zlog", +] + [[package]] name = "supermaven" version = "0.1.0" dependencies = [ "anyhow", "client", + "collections", "edit_prediction", "editor", "env_logger 0.11.8", "futures 0.3.31", "gpui", + "http_client", "language", "log", "postage", @@ -15347,10 +16274,7 @@ dependencies = [ "theme", "ui", "unicode-segmentation", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-util", + "util", ] [[package]] @@ -15359,26 +16283,25 @@ version = "0.1.0" dependencies = [ "anyhow", "futures 0.3.31", + "http_client", "paths", "serde", "serde_json", "smol", - "workspace-hack", - "zed-http-client", - "zed-util", + "util", ] [[package]] name = "sval" -version = "2.14.1" +version = "2.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cc9739f56c5d0c44a5ed45473ec868af02eb896af8c05f616673a31e1d1bb09" +checksum = "d94c4464e595f0284970fd9c7e9013804d035d4a61ab74b113242c874c05814d" [[package]] name = "sval_buffer" -version = "2.14.1" +version = "2.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f39b07436a8c271b34dad5070c634d1d3d76d6776e938ee97b4a66a5e8003d0b" +checksum = "a0f46e34b20a39e6a2bf02b926983149b3af6609fd1ee8a6e63f6f340f3e2164" dependencies = [ "sval", "sval_ref", @@ -15386,18 +16309,18 @@ dependencies = [ [[package]] name = "sval_dynamic" -version = "2.14.1" +version = "2.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffcb072d857431bf885580dacecf05ed987bac931230736739a79051dbf3499b" +checksum = "03d0970e53c92ab5381d3b2db1828da8af945954d4234225f6dd9c3afbcef3f5" dependencies = [ "sval", ] [[package]] name = "sval_fmt" -version = "2.14.1" +version = "2.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f214f427ad94a553e5ca5514c95c6be84667cbc5568cce957f03f3477d03d5c" +checksum = "43e5e6e1613e1e7fc2e1a9fdd709622e54c122ceb067a60d170d75efd491a839" dependencies = [ "itoa", "ryu", @@ -15406,9 +16329,9 @@ dependencies = [ [[package]] name = "sval_json" -version = "2.14.1" +version = "2.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "389ed34b32e638dec9a99c8ac92d0aa1220d40041026b625474c2b6a4d6f4feb" +checksum = "aec382f7bfa6e367b23c9611f129b94eb7daaf3d8fae45a8d0a0211eb4d4c8e6" dependencies = [ "itoa", "ryu", @@ -15417,9 +16340,9 @@ dependencies = [ [[package]] name = "sval_nested" -version = "2.14.1" +version = "2.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14bae8fcb2f24fee2c42c1f19037707f7c9a29a0cda936d2188d48a961c4bb2a" +checksum = "3049d0f99ce6297f8f7d9953b35a0103b7584d8f638de40e64edb7105fa578ae" dependencies = [ "sval", "sval_buffer", @@ -15428,20 +16351,20 @@ dependencies = [ [[package]] name = "sval_ref" -version = "2.14.1" +version = "2.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a4eaea3821d3046dcba81d4b8489421da42961889902342691fb7eab491d79e" +checksum = "f88913e77506085c0a8bf6912bb6558591a960faf5317df6c1d9b227224ca6e1" dependencies = [ "sval", ] [[package]] name = "sval_serde" -version = "2.14.1" +version = "2.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "172dd4aa8cb3b45c8ac8f3b4111d644cd26938b0643ede8f93070812b87fb339" +checksum = "f579fd7254f4be6cd7b450034f856b78523404655848789c451bacc6aa8b387d" dependencies = [ - "serde", + "serde_core", "sval", "sval_nested", ] @@ -15462,7 +16385,6 @@ dependencies = [ "multi_buffer", "ui", "workspace", - "workspace-hack", ] [[package]] @@ -15477,9 +16399,9 @@ dependencies = [ [[package]] name = "swash" -version = "0.2.2" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fae9a562c7b46107d9c78cd78b75bbe1e991c16734c0aee8ff0ee711fb8b620a" +checksum = "47846491253e976bdd07d0f9cc24b7daf24720d11309302ccbbc6e6b6e53550a" dependencies = [ "skrifa", "yazi", @@ -15488,9 +16410,9 @@ dependencies = [ [[package]] name = "symphonia" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "815c942ae7ee74737bb00f965fa5b5a2ac2ce7b6c01c0cc169bbeaf7abd5f5a9" +checksum = "5773a4c030a19d9bfaa090f49746ff35c75dfddfa700df7a5939d5e076a57039" dependencies = [ "lazy_static", "symphonia-bundle-flac", @@ -15507,9 +16429,9 @@ dependencies = [ [[package]] name = "symphonia-bundle-flac" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72e34f34298a7308d4397a6c7fbf5b84c5d491231ce3dd379707ba673ab3bd97" +checksum = "c91565e180aea25d9b80a910c546802526ffd0072d0b8974e3ebe59b686c9976" dependencies = [ "log", "symphonia-core", @@ -15519,9 +16441,9 @@ dependencies = [ [[package]] name = "symphonia-bundle-mp3" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c01c2aae70f0f1fb096b6f0ff112a930b1fb3626178fba3ae68b09dce71706d4" +checksum = "4872dd6bb56bf5eac799e3e957aa1981086c3e613b27e0ac23b176054f7c57ed" dependencies = [ "lazy_static", "log", @@ -15531,9 +16453,9 @@ dependencies = [ [[package]] name = "symphonia-codec-aac" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdbf25b545ad0d3ee3e891ea643ad115aff4ca92f6aec472086b957a58522f70" +checksum = "4c263845aa86881416849c1729a54c7f55164f8b96111dba59de46849e73a790" dependencies = [ "lazy_static", "log", @@ -15542,9 +16464,9 @@ dependencies = [ [[package]] name = "symphonia-codec-pcm" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f395a67057c2ebc5e84d7bb1be71cce1a7ba99f64e0f0f0e303a03f79116f89b" +checksum = "4e89d716c01541ad3ebe7c91ce4c8d38a7cf266a3f7b2f090b108fb0cb031d95" dependencies = [ "log", "symphonia-core", @@ -15552,9 +16474,9 @@ dependencies = [ [[package]] name = "symphonia-codec-vorbis" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a98765fb46a0a6732b007f7e2870c2129b6f78d87db7987e6533c8f164a9f30" +checksum = "f025837c309cd69ffef572750b4a2257b59552c5399a5e49707cc5b1b85d1c73" dependencies = [ "log", "symphonia-core", @@ -15563,9 +16485,9 @@ dependencies = [ [[package]] name = "symphonia-core" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "798306779e3dc7d5231bd5691f5a813496dc79d3f56bf82e25789f2094e022c3" +checksum = "ea00cc4f79b7f6bb7ff87eddc065a1066f3a43fe1875979056672c9ef948c2af" dependencies = [ "arrayvec", "bitflags 1.3.2", @@ -15576,9 +16498,9 @@ dependencies = [ [[package]] name = "symphonia-format-isomp4" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abfdf178d697e50ce1e5d9b982ba1b94c47218e03ec35022d9f0e071a16dc844" +checksum = "243739585d11f81daf8dac8d9f3d18cc7898f6c09a259675fc364b382c30e0a5" dependencies = [ "encoding_rs", "log", @@ -15589,9 +16511,9 @@ dependencies = [ [[package]] name = "symphonia-format-ogg" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ada3505789516bcf00fc1157c67729eded428b455c27ca370e41f4d785bfa931" +checksum = "2b4955c67c1ed3aa8ae8428d04ca8397fbef6a19b2b051e73b5da8b1435639cb" dependencies = [ "log", "symphonia-core", @@ -15601,9 +16523,9 @@ dependencies = [ [[package]] name = "symphonia-format-riff" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f7be232f962f937f4b7115cbe62c330929345434c834359425e043bfd15f50" +checksum = "c2d7c3df0e7d94efb68401d81906eae73c02b40d5ec1a141962c592d0f11a96f" dependencies = [ "extended", "log", @@ -15613,9 +16535,9 @@ dependencies = [ [[package]] name = "symphonia-metadata" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc622b9841a10089c5b18e99eb904f4341615d5aa55bbf4eedde1be721a4023c" +checksum = "36306ff42b9ffe6e5afc99d49e121e0bd62fe79b9db7b9681d48e29fa19e6b16" dependencies = [ "encoding_rs", "lazy_static", @@ -15625,9 +16547,9 @@ dependencies = [ [[package]] name = "symphonia-utils-xiph" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "484472580fa49991afda5f6550ece662237b00c6f562c7d9638d1b086ed010fe" +checksum = "ee27c85ab799a338446b68eec77abf42e1a6f1bb490656e121c6e27bfbab9f16" dependencies = [ "symphonia-core", "symphonia-metadata", @@ -15646,9 +16568,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.101" +version = "2.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf" +checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" dependencies = [ "proc-macro2", "quote", @@ -15681,13 +16603,13 @@ dependencies = [ [[package]] name = "synstructure" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -15705,7 +16627,7 @@ version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec7dddc5f0fee506baf8b9fdb989e242f17e4b11c61dfbb0635b705217199eea" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "byteorder", "enum-as-inner", "libc", @@ -15719,7 +16641,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01198a2debb237c62b6826ec7081082d951f46dbb64b0e8c7649a452230d1dfc" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "byteorder", "enum-as-inner", "libc", @@ -15741,6 +16663,20 @@ dependencies = [ "windows 0.57.0", ] +[[package]] +name = "sysinfo" +version = "0.37.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16607d5caffd1c07ce073528f9ed972d88db15dd44023fa57142963be3feb11f" +dependencies = [ + "libc", + "memchr", + "ntapi", + "objc2-core-foundation", + "objc2-io-kit", + "windows 0.61.3", +] + [[package]] name = "system-configuration" version = "0.5.1" @@ -15758,7 +16694,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "core-foundation 0.9.4", "system-configuration-sys 0.6.0", ] @@ -15792,7 +16728,7 @@ dependencies = [ "cfg-expr", "heck 0.5.0", "pkg-config", - "toml 0.8.20", + "toml 0.8.23", "version-compare", ] @@ -15802,7 +16738,7 @@ version = "0.27.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cc4592f674ce18521c2a81483873a49596655b179f71c5e05d10c1fe66c78745" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "cap-fs-ext", "cap-std", "fd-lock", @@ -15823,8 +16759,7 @@ dependencies = [ "pciid-parser", "release_channel", "serde", - "sysinfo", - "workspace-hack", + "sysinfo 0.37.2", ] [[package]] @@ -15832,6 +16767,7 @@ name = "tab_switcher" version = "0.1.0" dependencies = [ "anyhow", + "collections", "ctor", "editor", "fuzzy", @@ -15840,17 +16776,15 @@ dependencies = [ "menu", "picker", "project", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "settings", "smol", "theme", "ui", + "util", "workspace", - "workspace-hack", - "zed-collections", - "zed-util", "zlog", ] @@ -15904,15 +16838,16 @@ checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" [[package]] name = "target-lexicon" -version = "0.13.2" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e502f78cdbb8ba4718f566c418c52bc729126ffd16baee5baa718cf25dd5a69a" +checksum = "df7f62577c25e07834649fc3b39fafdc597c0a3527dc1c60129201ccfcbaa50c" [[package]] name = "task" version = "0.1.0" dependencies = [ "anyhow", + "collections", "futures 0.3.31", "gpui", "hex", @@ -15920,15 +16855,13 @@ dependencies = [ "parking_lot", "pretty_assertions", "proto", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "serde_json_lenient", "sha2", "shellexpand 2.1.2", - "workspace-hack", - "zed-collections", - "zed-util", + "util", "zed_actions", ] @@ -15937,6 +16870,7 @@ name = "tasks_ui" version = "0.1.0" dependencies = [ "anyhow", + "collections", "editor", "file_icons", "fuzzy", @@ -15952,10 +16886,8 @@ dependencies = [ "tree-sitter-rust", "tree-sitter-typescript", "ui", + "util", "workspace", - "workspace-hack", - "zed-collections", - "zed-util", "zed_actions", ] @@ -15967,30 +16899,28 @@ dependencies = [ "serde", "serde_json", "telemetry_events", - "workspace-hack", ] [[package]] name = "telemetry_events" version = "0.1.0" dependencies = [ + "semantic_version", "serde", "serde_json", - "workspace-hack", - "zed-semantic-version", ] [[package]] name = "tempfile" -version = "3.20.0" +version = "3.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" +checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" dependencies = [ "fastrand 2.3.0", - "getrandom 0.3.2", + "getrandom 0.3.4", "once_cell", - "rustix 1.0.7", - "windows-sys 0.59.0", + "rustix 1.1.2", + "windows-sys 0.61.2", ] [[package]] @@ -16019,38 +16949,37 @@ version = "0.1.0" dependencies = [ "alacritty_terminal", "anyhow", + "collections", "futures 0.3.31", "gpui", "itertools 0.14.0", "libc", "log", - "rand 0.9.1", + "rand 0.9.2", "regex", "release_channel", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "settings", "smol", - "sysinfo", + "sysinfo 0.37.2", "task", "theme", - "thiserror 2.0.12", + "thiserror 2.0.17", "url", "urlencoding", - "windows 0.61.1", - "workspace-hack", - "zed-collections", - "zed-util", + "util", + "windows 0.61.3", ] [[package]] name = "terminal_size" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45c6481c4829e4cc63825e62c49186a34538b7b2750b73b266581ffb612fb5ed" +checksum = "60b8cb979cb11c32ce1603f8137b22262a9d131aaa5c37b5678025f22b8becd0" dependencies = [ - "rustix 1.0.7", - "windows-sys 0.59.0", + "rustix 1.1.2", + "windows-sys 0.60.2", ] [[package]] @@ -16062,6 +16991,7 @@ dependencies = [ "async-recursion", "breadcrumbs", "client", + "collections", "db", "dirs 4.0.0", "editor", @@ -16072,9 +17002,9 @@ dependencies = [ "log", "pretty_assertions", "project", - "rand 0.9.1", + "rand 0.9.2", "regex", - "schemars 1.0.1", + "schemars 1.0.4", "search", "serde", "serde_json", @@ -16085,10 +17015,8 @@ dependencies = [ "terminal", "theme", "ui", + "util", "workspace", - "workspace-hack", - "zed-collections", - "zed-util", "zed_actions", ] @@ -16098,20 +17026,18 @@ version = "0.1.0" dependencies = [ "anyhow", "clock", + "collections", "ctor", "gpui", + "http_client", "log", "parking_lot", "postage", - "rand 0.9.1", - "regex", + "rand 0.9.2", "rope", "smallvec", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-sum-tree", - "zed-util", + "sum_tree", + "util", "zlog", ] @@ -16120,25 +17046,24 @@ name = "theme" version = "0.1.0" dependencies = [ "anyhow", - "derive_more", + "collections", + "derive_more 0.99.20", "fs", "futures 0.3.31", "gpui", "log", "palette", "parking_lot", - "schemars 1.0.1", + "refineable", + "schemars 1.0.4", "serde", "serde_json", "serde_json_lenient", "settings", - "strum 0.27.1", - "thiserror 2.0.12", + "strum 0.27.2", + "thiserror 2.0.17", + "util", "uuid", - "workspace-hack", - "zed-collections", - "zed-refineable", - "zed-util", ] [[package]] @@ -16150,7 +17075,6 @@ dependencies = [ "fs", "gpui", "theme", - "workspace-hack", ] [[package]] @@ -16159,19 +17083,18 @@ version = "0.1.0" dependencies = [ "anyhow", "clap", + "collections", "gpui", - "indexmap 2.9.0", + "indexmap 2.11.4", "log", "palette", "serde", "serde_json", "serde_json_lenient", "simplelog", - "strum 0.27.1", + "strum 0.27.2", "theme", "vscode_theme", - "workspace-hack", - "zed-collections", ] [[package]] @@ -16188,9 +17111,8 @@ dependencies = [ "telemetry", "theme", "ui", + "util", "workspace", - "workspace-hack", - "zed-util", "zed_actions", ] @@ -16205,11 +17127,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.12" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" +checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" dependencies = [ - "thiserror-impl 2.0.12", + "thiserror-impl 2.0.17", ] [[package]] @@ -16220,39 +17142,41 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "thiserror-impl" -version = "2.0.12" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" +checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "thread_local" -version = "1.1.8" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" dependencies = [ "cfg-if", - "once_cell", ] [[package]] name = "tiff" -version = "0.9.1" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba1310fcea54c6a9a4fd1aad794ecc02c31682f6bfbecdf460bf19533eed1e3e" +checksum = "af9605de7fee8d9551863fd692cce7637f548dbd9db9180fcc07ccc6d26c336f" dependencies = [ + "fax", "flate2", - "jpeg-decoder", + "half", + "quick-error", "weezl", + "zune-jpeg", ] [[package]] @@ -16271,9 +17195,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.41" +version = "0.3.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40" +checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" dependencies = [ "deranged", "itoa", @@ -16288,15 +17212,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.4" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c" +checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" [[package]] name = "time-macros" -version = "0.2.22" +version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49" +checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3" dependencies = [ "num-conv", "time-core", @@ -16310,7 +17234,6 @@ dependencies = [ "core-foundation-sys", "sys-locale", "time", - "workspace-hack", ] [[package]] @@ -16333,7 +17256,7 @@ dependencies = [ "bytemuck", "cfg-if", "log", - "png", + "png 0.17.16", "tiny-skia-path", ] @@ -16363,9 +17286,9 @@ dependencies = [ [[package]] name = "tinystr" -version = "0.7.6" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" dependencies = [ "displaydoc", "zerovec", @@ -16383,9 +17306,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09b3661f17e86524eccd4371ab0429194e0d7c008abb45f7a7495b1719463c71" +checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" dependencies = [ "tinyvec_macros", ] @@ -16406,14 +17329,16 @@ dependencies = [ "chrono", "client", "cloud_llm_client", + "collections", "db", "gpui", + "http_client", "notifications", "pretty_assertions", "project", "remote", "rpc", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "settings", "smallvec", @@ -16422,31 +17347,27 @@ dependencies = [ "theme", "tree-sitter-md", "ui", - "windows 0.61.1", + "util", + "windows 0.61.3", "workspace", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-util", "zed_actions", ] [[package]] name = "tokio" -version = "1.44.2" +version = "1.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6b88822cbe49de4185e3a4cbf8321dd487cf5fe0c5c65695fef6346371e9c48" +checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" dependencies = [ - "backtrace", "bytes 1.10.1", "libc", - "mio 1.0.3", + "mio 1.1.0", "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2", + "socket2 0.6.1", "tokio-macros", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] @@ -16462,13 +17383,13 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -16497,7 +17418,7 @@ version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" dependencies = [ - "rustls 0.23.26", + "rustls 0.23.33", "tokio", ] @@ -16557,7 +17478,7 @@ checksum = "7a9daff607c6d2bf6c16fd681ccb7eecc83e4e2cdc1ca067ffaadfca5de7f084" dependencies = [ "futures-util", "log", - "rustls 0.23.26", + "rustls 0.23.33", "rustls-pki-types", "tokio", "tokio-rustls 0.26.2", @@ -16566,14 +17487,15 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.14" +version = "0.7.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b9590b93e6fcc1739458317cccd391ad3955e2bde8913edf6f95f9e65a8f034" +checksum = "14307c986784f72ef81c89db7d9e28d6ac26d16213b109ea501696195e6e3ce5" dependencies = [ "bytes 1.10.1", "futures-core", "futures-io", "futures-sink", + "futures-util", "pin-project-lite", "tokio", ] @@ -16589,44 +17511,95 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.20" +version = "0.8.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd87a5cdd6ffab733b2f74bc4fd7ee5fff6634124999ac278c35fc78c6120148" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" dependencies = [ "serde", - "serde_spanned", - "toml_datetime", - "toml_edit", + "serde_spanned 0.6.9", + "toml_datetime 0.6.11", + "toml_edit 0.22.27", +] + +[[package]] +name = "toml" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0dc8b1fb61449e27716ec0e1bdf0f6b8f3e8f6b05391e8497b8b6d7804ea6d8" +dependencies = [ + "indexmap 2.11.4", + "serde_core", + "serde_spanned 1.0.3", + "toml_datetime 0.7.3", + "toml_parser", + "toml_writer", + "winnow", ] [[package]] name = "toml_datetime" -version = "0.6.9" +version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3da5db5a963e24bc68be8b17b6fa82814bb22ee8660f192bb182771d498f09a3" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" dependencies = [ "serde", ] +[[package]] +name = "toml_datetime" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533" +dependencies = [ + "serde_core", +] + [[package]] name = "toml_edit" -version = "0.22.26" +version = "0.22.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "310068873db2c5b3e7659d2cc35d21855dbafa50d1ce336397c666e3cb08137e" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" dependencies = [ - "indexmap 2.9.0", + "indexmap 2.11.4", "serde", - "serde_spanned", - "toml_datetime", + "serde_spanned 0.6.9", + "toml_datetime 0.6.11", "toml_write", "winnow", ] +[[package]] +name = "toml_edit" +version = "0.23.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6485ef6d0d9b5d0ec17244ff7eb05310113c3f316f2d14200d4de56b3cb98f8d" +dependencies = [ + "indexmap 2.11.4", + "toml_datetime 0.7.3", + "toml_parser", + "winnow", +] + +[[package]] +name = "toml_parser" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e" +dependencies = [ + "winnow", +] + [[package]] name = "toml_write" -version = "0.1.1" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" + +[[package]] +name = "toml_writer" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfb942dfe1d8e29a7ee7fcbde5bd2b9a25fb89aa70caea2eba3bee836ff41076" +checksum = "df8b2b54733674ad286d16267dcfc7a71ed5c776e4ac7aa3c3e2561f7c637bf2" [[package]] name = "toolchain_selector" @@ -16644,9 +17617,8 @@ dependencies = [ "picker", "project", "ui", + "util", "workspace", - "workspace-hack", - "zed-util", ] [[package]] @@ -16710,7 +17682,7 @@ version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "bytes 1.10.1", "futures-core", "futures-util", @@ -16723,6 +17695,24 @@ dependencies = [ "tracing", ] +[[package]] +name = "tower-http" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" +dependencies = [ + "bitflags 2.9.4", + "bytes 1.10.1", + "futures-util", + "http 1.3.1", + "http-body 1.0.1", + "iri-string", + "pin-project-lite", + "tower 0.5.2", + "tower-layer", + "tower-service", +] + [[package]] name = "tower-layer" version = "0.3.3" @@ -16749,20 +17739,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.28" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" +checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "tracing-core" -version = "0.1.33" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" +checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" dependencies = [ "once_cell", "valuable", @@ -16818,7 +17808,7 @@ checksum = "70977707304198400eb4835a78f6a9f928bf41bba420deb8fdb175cd965d77a7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -17140,11 +18130,30 @@ dependencies = [ "http 1.3.1", "httparse", "log", - "rand 0.9.1", - "rustls 0.23.26", + "rand 0.9.2", + "rustls 0.23.33", + "rustls-pki-types", + "sha1", + "thiserror 2.0.17", + "utf-8", +] + +[[package]] +name = "tungstenite" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadc29d668c91fcc564941132e17b28a7ceb2f3ebf0b9dae3e03fd7a6748eb0d" +dependencies = [ + "bytes 1.10.1", + "data-encoding", + "http 1.3.1", + "httparse", + "log", + "rand 0.9.2", + "rustls 0.23.33", "rustls-pki-types", "sha1", - "thiserror 2.0.12", + "thiserror 2.0.17", "utf-8", ] @@ -17162,9 +18171,9 @@ checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c" [[package]] name = "typenum" -version = "1.18.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" [[package]] name = "ucd-trie" @@ -17210,7 +18219,7 @@ dependencies = [ "serde", "thiserror 1.0.69", "tracing", - "yoke", + "yoke 0.7.5", ] [[package]] @@ -17221,21 +18230,20 @@ dependencies = [ "component", "documented", "gpui", - "gpui-macros", + "gpui_macros", "icons", "itertools 0.14.0", "menu", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "settings", "smallvec", "story", - "strum 0.27.1", + "strum 0.27.2", "theme", "ui_macros", - "windows 0.61.1", - "workspace-hack", - "zed-util", + "util", + "windows 0.61.3", ] [[package]] @@ -17244,14 +18252,11 @@ version = "0.1.0" dependencies = [ "component", "editor", - "fuzzy", "gpui", "menu", - "picker", "settings", "theme", "ui", - "workspace-hack", ] [[package]] @@ -17260,9 +18265,8 @@ version = "0.1.0" dependencies = [ "component", "quote", - "syn 2.0.101", + "syn 2.0.106", "ui", - "workspace-hack", ] [[package]] @@ -17276,7 +18280,6 @@ dependencies = [ "theme", "ui", "workspace", - "workspace-hack", ] [[package]] @@ -17317,9 +18320,9 @@ checksum = "ce61d488bcdc9bc8b5d1772c404828b17fc481c0a582b5581e95fb233aef503e" [[package]] name = "unicode-ident" -version = "1.0.18" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" +checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d" [[package]] name = "unicode-linebreak" @@ -17342,6 +18345,15 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" +[[package]] +name = "unicode-reverse" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b6f4888ebc23094adfb574fdca9fdc891826287a6397d2cd28802ffd6f20c76" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "unicode-script" version = "0.5.7" @@ -17362,9 +18374,9 @@ checksum = "b1d386ff53b415b7fe27b50bb44679e2cc4660272694b7b6f3326d8480823a94" [[package]] name = "unicode-width" -version = "0.2.0" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" +checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" [[package]] name = "unicode-xid" @@ -17390,11 +18402,17 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" +[[package]] +name = "unty" +version = "0.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d49784317cd0d1ee7ec5c716dd598ec5b4483ea832a2dced265471cc0f690ae" + [[package]] name = "url" -version = "2.5.4" +version = "2.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" dependencies = [ "form_urlencoded", "idna", @@ -17436,38 +18454,82 @@ dependencies = [ ] [[package]] -name = "utf-8" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" - -[[package]] -name = "utf16_iter" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" - -[[package]] -name = "utf8_iter" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" - -[[package]] -name = "utf8parse" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "util" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-fs", + "async_zip", + "collections", + "command-fds", + "dirs 4.0.0", + "dunce", + "futures 0.3.31", + "futures-lite 1.13.0", + "git2", + "globset", + "indoc", + "itertools 0.14.0", + "libc", + "log", + "nix 0.29.0", + "pretty_assertions", + "rand 0.9.2", + "regex", + "rust-embed", + "schemars 1.0.4", + "serde", + "serde_json", + "serde_json_lenient", + "shlex", + "smol", + "take-until", + "tempfile", + "tendril", + "unicase", + "util_macros", + "walkdir", + "which 6.0.3", +] + +[[package]] +name = "util_macros" +version = "0.1.0" +dependencies = [ + "perf", + "quote", + "syn 2.0.106", +] [[package]] name = "uuid" -version = "1.16.0" +version = "1.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "458f7a779bf54acc9f347480ac654f68407d3aab21269a6e3c9f922acd9e2da9" +checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2" dependencies = [ - "getrandom 0.3.2", + "getrandom 0.3.4", + "js-sys", "serde", "sha1_smol", + "wasm-bindgen", ] [[package]] @@ -17483,9 +18545,9 @@ dependencies = [ [[package]] name = "v_frame" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6f32aaa24bacd11e488aa9ba66369c7cd514885742c9fe08cfe85884db3e92b" +checksum = "666b7727c8875d6ab5db9533418d7c764233ac9c0cff1d469aec8fa127597be2" dependencies = [ "aligned-vec", "num-traits", @@ -17545,10 +18607,9 @@ name = "vercel" version = "0.1.0" dependencies = [ "anyhow", - "schemars 1.0.1", + "schemars 1.0.4", "serde", - "strum 0.27.1", - "workspace-hack", + "strum 0.27.2", ] [[package]] @@ -17571,12 +18632,14 @@ dependencies = [ "assets", "async-compat", "async-trait", + "collections", "command_palette", "command_palette_hooks", "db", "editor", "env_logger 0.11.8", "futures 0.3.31", + "fuzzy", "git_ui", "gpui", "indoc", @@ -17588,12 +18651,13 @@ dependencies = [ "multi_buffer", "nvim-rs", "parking_lot", + "perf", "picker", "project", "project_panel", "regex", "release_channel", - "schemars 1.0.1", + "schemars 1.0.4", "search", "serde", "serde_json", @@ -17603,13 +18667,10 @@ dependencies = [ "theme", "tokio", "ui", + "util", + "util_macros", "vim_mode_setting", "workspace", - "workspace-hack", - "zed-collections", - "zed-perf", - "zed-util", - "zed-util-macros", "zed_actions", ] @@ -17619,9 +18680,14 @@ version = "0.1.0" dependencies = [ "gpui", "settings", - "workspace-hack", ] +[[package]] +name = "virtue" +version = "0.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "051eb1abcf10076295e815102942cc58f9d5e3b4560e46e53c21e8ff6f3af7b1" + [[package]] name = "vscode_theme" version = "0.2.0" @@ -17664,7 +18730,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a5924018406ce0063cd67f8e008104968b74b563ee1b85dde3ed1f7cb87d3dbd" dependencies = [ "arrayvec", - "bitflags 2.9.0", + "bitflags 2.9.4", "cursor-icon", "log", "memchr", @@ -17726,17 +18792,17 @@ dependencies = [ [[package]] name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +version = "0.11.1+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] -name = "wasi" -version = "0.14.2+wasi-0.2.4" +name = "wasip2" +version = "1.0.1+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" dependencies = [ - "wit-bindgen-rt 0.39.0", + "wit-bindgen 0.46.0", ] [[package]] @@ -17747,35 +18813,36 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" -version = "0.2.100" +version = "0.2.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +checksum = "c1da10c01ae9f1ae40cbfac0bac3b1e724b320abfcf52229f80b547c0d250e2d" dependencies = [ "cfg-if", "once_cell", "rustversion", "wasm-bindgen-macro", + "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.100" +version = "0.2.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +checksum = "671c9a5a66f49d8a47345ab942e2cb93c7d1d0339065d4f8139c486121b43b19" dependencies = [ "bumpalo", "log", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.50" +version = "0.4.54" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +checksum = "7e038d41e478cc73bae0ff9b36c60cff1c98b8f38f8d7e8061e79ee63608ac5c" dependencies = [ "cfg-if", "js-sys", @@ -17786,9 +18853,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.100" +version = "0.2.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +checksum = "7ca60477e4c59f5f2986c50191cd972e3a50d8a95603bc9434501cf156a9a119" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -17796,22 +18863,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.100" +version = "0.2.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +checksum = "9f07d2f20d4da7b26400c9f4a0511e6e0345b040694e8a75bd41d578fa4421d7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.100" +version = "0.2.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +checksum = "bad67dc8b2a1a6e5448428adec4c3e84c43e561d8c9ee8a9e5aabeb193ec41d1" dependencies = [ "unicode-ident", ] @@ -17852,7 +18919,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fd83062c17b9f4985d438603cde0a5e8c5c8198201a6937f778b607924c7da2" dependencies = [ "anyhow", - "indexmap 2.9.0", + "indexmap 2.11.4", "serde", "serde_derive", "serde_json", @@ -17870,7 +18937,7 @@ dependencies = [ "anyhow", "auditable-serde", "flate2", - "indexmap 2.9.0", + "indexmap 2.11.4", "serde", "serde_derive", "serde_json", @@ -17899,8 +18966,8 @@ version = "0.201.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "84e5df6dba6c0d7fafc63a450f1738451ed7a0b52295d83e868218fa286bf708" dependencies = [ - "bitflags 2.9.0", - "indexmap 2.9.0", + "bitflags 2.9.4", + "indexmap 2.11.4", "semver", ] @@ -17910,9 +18977,9 @@ version = "0.221.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d06bfa36ab3ac2be0dee563380147a5b81ba10dd8885d7fbbc9eb574be67d185" dependencies = [ - "bitflags 2.9.0", - "hashbrown 0.15.3", - "indexmap 2.9.0", + "bitflags 2.9.4", + "hashbrown 0.15.5", + "indexmap 2.11.4", "semver", "serde", ] @@ -17923,9 +18990,9 @@ version = "0.227.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f51cad774fb3c9461ab9bccc9c62dfb7388397b5deda31bf40e8108ccd678b2" dependencies = [ - "bitflags 2.9.0", - "hashbrown 0.15.3", - "indexmap 2.9.0", + "bitflags 2.9.4", + "hashbrown 0.15.5", + "indexmap 2.11.4", "semver", ] @@ -17948,18 +19015,18 @@ checksum = "11976a250672556d1c4c04c6d5d7656ac9192ac9edc42a4587d6c21460010e69" dependencies = [ "anyhow", "async-trait", - "bitflags 2.9.0", + "bitflags 2.9.4", "bumpalo", "cc", "cfg-if", "encoding_rs", "hashbrown 0.14.5", - "indexmap 2.9.0", + "indexmap 2.11.4", "libc", "log", - "mach2 0.4.2", + "mach2 0.4.3", "memfd", - "object", + "object 0.36.7", "once_cell", "paste", "postcard", @@ -17972,7 +19039,7 @@ dependencies = [ "serde_derive", "smallvec", "sptr", - "target-lexicon 0.13.2", + "target-lexicon 0.13.3", "trait-variant", "wasmparser 0.221.3", "wasmtime-asm-macros", @@ -18030,7 +19097,7 @@ dependencies = [ "anyhow", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", "wasmtime-component-util", "wasmtime-wit-bindgen", "wit-parser 0.221.3", @@ -18055,12 +19122,12 @@ dependencies = [ "cranelift-entity", "cranelift-frontend", "cranelift-native", - "gimli", + "gimli 0.31.1", "itertools 0.12.1", "log", - "object", + "object 0.36.7", "smallvec", - "target-lexicon 0.13.2", + "target-lexicon 0.13.3", "thiserror 1.0.69", "wasmparser 0.221.3", "wasmtime-environ", @@ -18077,17 +19144,17 @@ dependencies = [ "cpp_demangle", "cranelift-bitset", "cranelift-entity", - "gimli", - "indexmap 2.9.0", + "gimli 0.31.1", + "indexmap 2.11.4", "log", - "object", + "object 0.36.7", "postcard", "rustc-demangle", "semver", "serde", "serde_derive", "smallvec", - "target-lexicon 0.13.2", + "target-lexicon 0.13.3", "wasm-encoder 0.221.3", "wasmparser 0.221.3", "wasmprinter", @@ -18144,7 +19211,7 @@ checksum = "86ff86db216dc0240462de40c8290887a613dddf9685508eb39479037ba97b5b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -18155,7 +19222,7 @@ checksum = "8d1be69bfcab1bdac74daa7a1f9695ab992b9c8e21b9b061e7d66434097e0ca4" dependencies = [ "anyhow", "async-trait", - "bitflags 2.9.0", + "bitflags 2.9.4", "bytes 1.10.1", "cap-fs-ext", "cap-net-ext", @@ -18186,9 +19253,9 @@ checksum = "fdbabfb8f20502d5e1d81092b9ead3682ae59988487aafcd7567387b7a43cf8f" dependencies = [ "anyhow", "cranelift-codegen", - "gimli", - "object", - "target-lexicon 0.13.2", + "gimli 0.31.1", + "object 0.36.7", + "target-lexicon 0.13.3", "wasmparser 0.221.3", "wasmtime-cranelift", "wasmtime-environ", @@ -18203,7 +19270,7 @@ checksum = "8358319c2dd1e4db79e3c1c5d3a5af84956615343f9f89f4e4996a36816e06e6" dependencies = [ "anyhow", "heck 0.5.0", - "indexmap 2.9.0", + "indexmap 2.11.4", "wit-parser 0.221.3", ] @@ -18224,20 +19291,19 @@ dependencies = [ "futures 0.3.31", "gpui", "parking_lot", - "rand 0.9.1", - "workspace-hack", + "rand 0.9.2", "zlog", ] [[package]] name = "wayland-backend" -version = "0.3.8" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7208998eaa3870dad37ec8836979581506e0c5c64c20c9e79e9d2a10d6f47bf" +checksum = "673a33c33048a5ade91a6b139580fa174e19fb0d23f396dca9fa15f2e1e49b35" dependencies = [ "cc", "downcast-rs", - "rustix 0.38.44", + "rustix 1.1.2", "scoped-tls", "smallvec", "wayland-sys", @@ -18245,23 +19311,23 @@ dependencies = [ [[package]] name = "wayland-client" -version = "0.31.8" +version = "0.31.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2120de3d33638aaef5b9f4472bff75f07c56379cf76ea320bd3a3d65ecaf73f" +checksum = "c66a47e840dc20793f2264eb4b3e4ecb4b75d91c0dd4af04b456128e0bdd449d" dependencies = [ - "bitflags 2.9.0", - "rustix 0.38.44", + "bitflags 2.9.4", + "rustix 1.1.2", "wayland-backend", "wayland-scanner", ] [[package]] name = "wayland-cursor" -version = "0.31.8" +version = "0.31.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a93029cbb6650748881a00e4922b076092a6a08c11e7fbdb923f064b23968c5d" +checksum = "447ccc440a881271b19e9989f75726d60faa09b95b0200a9b7eb5cc47c3eeb29" dependencies = [ - "rustix 0.38.44", + "rustix 1.1.2", "wayland-client", "xcursor", ] @@ -18272,7 +19338,7 @@ version = "0.31.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f81f365b8b4a97f422ac0e8737c438024b5951734506b0e1d775c73030561f4" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "wayland-backend", "wayland-client", "wayland-scanner", @@ -18280,11 +19346,11 @@ dependencies = [ [[package]] name = "wayland-protocols" -version = "0.32.6" +version = "0.32.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0781cf46869b37e36928f7b432273c0995aa8aed9552c556fb18754420541efc" +checksum = "efa790ed75fbfd71283bd2521a1cfdc022aabcc28bdcff00851f9e4ae88d9901" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "wayland-backend", "wayland-client", "wayland-scanner", @@ -18296,7 +19362,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23803551115ff9ea9bce586860c5c5a971e360825a0309264102a9495a5ff479" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "wayland-backend", "wayland-client", "wayland-protocols 0.31.2", @@ -18305,20 +19371,20 @@ dependencies = [ [[package]] name = "wayland-scanner" -version = "0.31.6" +version = "0.31.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "896fdafd5d28145fce7958917d69f2fd44469b1d4e861cb5961bcbeebc6d1484" +checksum = "54cb1e9dc49da91950bdfd8b848c49330536d9d1fb03d4bfec8cae50caa50ae3" dependencies = [ "proc-macro2", - "quick-xml 0.37.4", + "quick-xml 0.37.5", "quote", ] [[package]] name = "wayland-sys" -version = "0.31.6" +version = "0.31.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbcebb399c77d5aa9fa5db874806ee7b4eba4e73650948e8f93963f128896615" +checksum = "34949b42822155826b41db8e5d0c1be3a2bd296c747577a43a3e6daefc296142" dependencies = [ "dlib", "log", @@ -18328,9 +19394,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.77" +version = "0.3.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +checksum = "9367c417a924a74cae129e6a2ae3b47fabb1f8995595ab474029da749a8be120" dependencies = [ "js-sys", "wasm-bindgen", @@ -18348,11 +19414,11 @@ dependencies = [ [[package]] name = "web_atoms" -version = "0.1.0" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "954c5a41f2bcb7314344079d0891505458cc2f4b422bdea1d5bfbe6d1a04903b" +checksum = "57ffde1dc01240bdf9992e3205668b235e59421fd085e8a317ed98da0178d414" dependencies = [ - "phf", + "phf 0.11.3", "phf_codegen", "string_cache", "string_cache_codegen", @@ -18364,10 +19430,9 @@ version = "0.1.0" dependencies = [ "anyhow", "cloud_llm_client", + "collections", "gpui", "serde", - "workspace-hack", - "zed-collections", ] [[package]] @@ -18379,12 +19444,11 @@ dependencies = [ "cloud_llm_client", "futures 0.3.31", "gpui", + "http_client", "language_model", "serde", "serde_json", "web_search", - "workspace-hack", - "zed-http-client", ] [[package]] @@ -18433,9 +19497,9 @@ dependencies = [ [[package]] name = "weezl" -version = "0.1.8" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53a85b86a771b1c87058196170769dd264f66c0782acf1ae6cc51bfd64b39082" +checksum = "a751b3277700db47d3e574514de2eced5e54dc8a5436a3bf7a0b248b2cee16f3" [[package]] name = "which" @@ -18463,11 +19527,11 @@ dependencies = [ [[package]] name = "whoami" -version = "1.6.0" +version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6994d13118ab492c3c80c1f81928718159254c53c472bf9ce36f8dae4add02a7" +checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d" dependencies = [ - "redox_syscall 0.5.11", + "libredox", "wasite", ] @@ -18479,7 +19543,7 @@ checksum = "4b9af35bc9629c52c261465320a9a07959164928b4241980ba1cf923b9e6751d" dependencies = [ "anyhow", "async-trait", - "bitflags 2.9.0", + "bitflags 2.9.4", "thiserror 1.0.69", "tracing", "wasmtime", @@ -18497,7 +19561,7 @@ dependencies = [ "proc-macro2", "quote", "shellexpand 2.1.2", - "syn 2.0.101", + "syn 2.0.106", "witx", ] @@ -18509,7 +19573,7 @@ checksum = "08c5c473d4198e6c2d377f3809f713ff0c110cab88a0805ae099a82119ee250c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", "wiggle-generate", ] @@ -18531,11 +19595,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.9" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -18552,10 +19616,10 @@ checksum = "2f849ef2c5f46cb0a20af4b4487aaa239846e52e2c03f13fa3c784684552859c" dependencies = [ "anyhow", "cranelift-codegen", - "gimli", + "gimli 0.31.1", "regalloc2", "smallvec", - "target-lexicon 0.13.2", + "target-lexicon 0.13.3", "thiserror 1.0.69", "wasmparser 0.221.3", "wasmtime-cranelift", @@ -18594,14 +19658,14 @@ dependencies = [ [[package]] name = "windows" -version = "0.61.1" +version = "0.61.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5ee8f3d025738cb02bad7868bbb5f8a6327501e870bf51f1b455b0a2454a419" +checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893" dependencies = [ "windows-collections", - "windows-core 0.61.0", + "windows-core 0.61.2", "windows-future", - "windows-link 0.1.1", + "windows-link 0.1.3", "windows-numerics", ] @@ -18614,8 +19678,8 @@ dependencies = [ "ctrlc", "parking_lot", "rayon", - "thiserror 2.0.12", - "windows 0.61.1", + "thiserror 2.0.17", + "windows 0.61.3", "windows-future", ] @@ -18625,7 +19689,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8" dependencies = [ - "windows-core 0.61.0", + "windows-core 0.61.2", ] [[package]] @@ -18665,25 +19729,39 @@ dependencies = [ [[package]] name = "windows-core" -version = "0.61.0" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" +dependencies = [ + "windows-implement 0.60.2", + "windows-interface 0.59.3", + "windows-link 0.1.3", + "windows-result 0.3.4", + "windows-strings 0.4.2", +] + +[[package]] +name = "windows-core" +version = "0.62.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4763c1de310c86d75a878046489e2e5ba02c649d185f21c67d4cf8a56d098980" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" dependencies = [ - "windows-implement 0.60.0", - "windows-interface 0.59.1", - "windows-link 0.1.1", - "windows-result 0.3.2", - "windows-strings 0.4.0", + "windows-implement 0.60.2", + "windows-interface 0.59.3", + "windows-link 0.2.1", + "windows-result 0.4.1", + "windows-strings 0.5.1", ] [[package]] name = "windows-future" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a1d6bbefcb7b60acd19828e1bc965da6fcf18a7e39490c5f8be71e54a19ba32" +checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e" dependencies = [ - "windows-core 0.61.0", - "windows-link 0.1.1", + "windows-core 0.61.2", + "windows-link 0.1.3", + "windows-threading", ] [[package]] @@ -18694,7 +19772,7 @@ checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -18705,18 +19783,18 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "windows-implement" -version = "0.60.0" +version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -18727,7 +19805,7 @@ checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -18738,31 +19816,31 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "windows-interface" -version = "0.59.1" +version = "0.59.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "windows-link" -version = "0.1.1" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" [[package]] name = "windows-link" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45e46c0661abb7180e7b9c281db115305d49ca1709ab8242adf09666d2173c65" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" [[package]] name = "windows-numerics" @@ -18770,8 +19848,8 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1" dependencies = [ - "windows-core 0.61.0", - "windows-link 0.1.1", + "windows-core 0.61.2", + "windows-link 0.1.3", ] [[package]] @@ -18780,31 +19858,31 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3" dependencies = [ - "windows-result 0.3.2", + "windows-result 0.3.4", "windows-strings 0.3.1", - "windows-targets 0.53.2", + "windows-targets 0.53.5", ] [[package]] name = "windows-registry" -version = "0.5.1" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad1da3e436dc7653dfdf3da67332e22bff09bb0e28b0239e1624499c7830842e" +checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e" dependencies = [ - "windows-link 0.1.1", - "windows-result 0.3.2", - "windows-strings 0.4.0", + "windows-link 0.1.3", + "windows-result 0.3.4", + "windows-strings 0.4.2", ] [[package]] name = "windows-registry" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f91f87ce112ffb7275000ea98eb1940912c21c1567c9312fde20261f3eadd29" +checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720" dependencies = [ - "windows-link 0.2.0", - "windows-result 0.4.0", - "windows-strings 0.5.0", + "windows-link 0.2.1", + "windows-result 0.4.1", + "windows-strings 0.5.1", ] [[package]] @@ -18827,20 +19905,20 @@ dependencies = [ [[package]] name = "windows-result" -version = "0.3.2" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" dependencies = [ - "windows-link 0.1.1", + "windows-link 0.1.3", ] [[package]] name = "windows-result" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7084dcc306f89883455a206237404d3eaf961e5bd7e0f312f7c91f57eb44167f" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" dependencies = [ - "windows-link 0.2.0", + "windows-link 0.2.1", ] [[package]] @@ -18859,25 +19937,25 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319" dependencies = [ - "windows-link 0.1.1", + "windows-link 0.1.3", ] [[package]] name = "windows-strings" -version = "0.4.0" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ba9642430ee452d5a7aa78d72907ebe8cfda358e8cb7918a2050581322f97" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" dependencies = [ - "windows-link 0.1.1", + "windows-link 0.1.3", ] [[package]] name = "windows-strings" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7218c655a553b0bed4426cf54b20d7ba363ef543b52d515b3e48d7fd55318dda" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" dependencies = [ - "windows-link 0.2.0", + "windows-link 0.2.1", ] [[package]] @@ -18922,16 +20000,16 @@ version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" dependencies = [ - "windows-targets 0.53.2", + "windows-targets 0.53.5", ] [[package]] name = "windows-sys" -version = "0.61.0" +version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e201184e40b2ede64bc2ea34968b28e33622acdbbf37104f0e4a33f7abe657aa" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" dependencies = [ - "windows-link 0.2.0", + "windows-link 0.2.1", ] [[package]] @@ -18982,18 +20060,28 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.53.2" +version = "0.53.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" +dependencies = [ + "windows-link 0.2.1", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", +] + +[[package]] +name = "windows-threading" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c66f69fcc9ce11da9966ddb31a40968cad001c5bedeb5c2b82ede4253ab48aef" +checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6" dependencies = [ - "windows_aarch64_gnullvm 0.53.0", - "windows_aarch64_msvc 0.53.0", - "windows_i686_gnu 0.53.0", - "windows_i686_gnullvm 0.53.0", - "windows_i686_msvc 0.53.0", - "windows_x86_64_gnu 0.53.0", - "windows_x86_64_gnullvm 0.53.0", - "windows_x86_64_msvc 0.53.0", + "windows-link 0.1.3", ] [[package]] @@ -19016,9 +20104,9 @@ checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_gnullvm" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" [[package]] name = "windows_aarch64_msvc" @@ -19040,9 +20128,9 @@ checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_aarch64_msvc" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" [[package]] name = "windows_i686_gnu" @@ -19064,9 +20152,9 @@ checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnu" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" [[package]] name = "windows_i686_gnullvm" @@ -19076,9 +20164,9 @@ checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_gnullvm" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" [[package]] name = "windows_i686_msvc" @@ -19100,9 +20188,9 @@ checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_i686_msvc" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" [[package]] name = "windows_x86_64_gnu" @@ -19124,9 +20212,9 @@ checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnu" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" [[package]] name = "windows_x86_64_gnullvm" @@ -19148,9 +20236,9 @@ checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_gnullvm" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" [[package]] name = "windows_x86_64_msvc" @@ -19172,15 +20260,15 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "windows_x86_64_msvc" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" [[package]] name = "winnow" -version = "0.7.6" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63d3fcd9bba44b03821e7d699eeee959f3126dcc4aa8e4ae18ec617c2a5cea10" +checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf" dependencies = [ "memchr", ] @@ -19204,16 +20292,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "winreg" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" -dependencies = [ - "cfg-if", - "windows-sys 0.48.0", -] - [[package]] name = "winreg" version = "0.55.0" @@ -19226,11 +20304,11 @@ dependencies = [ [[package]] name = "winresource" -version = "0.1.20" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba4a67c78ee5782c0c1cb41bebc7e12c6e79644daa1650ebbc1de5d5b08593f7" +checksum = "edcacf11b6f48dd21b9ba002f991bdd5de29b2da8cc2800412f4b80f677e4957" dependencies = [ - "toml 0.8.20", + "toml 0.8.23", "version_check", ] @@ -19246,7 +20324,7 @@ version = "0.36.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f3fd376f71958b862e7afb20cfe5a22830e1963462f3a17f49d82a6c1d1f42d" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "windows-sys 0.59.0", ] @@ -19265,7 +20343,7 @@ version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "288f992ea30e6b5c531b52cdd5f3be81c148554b09ea416f058d16556ba92c27" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "wit-bindgen-rt 0.22.0", "wit-bindgen-rust-macro 0.22.0", ] @@ -19280,6 +20358,12 @@ dependencies = [ "wit-bindgen-rust-macro 0.41.0", ] +[[package]] +name = "wit-bindgen" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" + [[package]] name = "wit-bindgen-core" version = "0.22.0" @@ -19307,22 +20391,13 @@ version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcb8738270f32a2d6739973cbbb7c1b6dd8959ce515578a6e19165853272ee64" -[[package]] -name = "wit-bindgen-rt" -version = "0.39.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" -dependencies = [ - "bitflags 2.9.0", -] - [[package]] name = "wit-bindgen-rt" version = "0.41.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4db52a11d4dfb0a59f194c064055794ee6564eb1ced88c25da2cf76e50c5621" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "futures 0.3.31", "once_cell", ] @@ -19335,7 +20410,7 @@ checksum = "d8a39a15d1ae2077688213611209849cad40e9e5cccf6e61951a425850677ff3" dependencies = [ "anyhow", "heck 0.4.1", - "indexmap 2.9.0", + "indexmap 2.11.4", "wasm-metadata 0.201.0", "wit-bindgen-core 0.22.0", "wit-component 0.201.0", @@ -19349,9 +20424,9 @@ checksum = "9d0809dc5ba19e2e98661bf32fc0addc5a3ca5bf3a6a7083aa6ba484085ff3ce" dependencies = [ "anyhow", "heck 0.5.0", - "indexmap 2.9.0", + "indexmap 2.11.4", "prettyplease", - "syn 2.0.101", + "syn 2.0.106", "wasm-metadata 0.227.1", "wit-bindgen-core 0.41.0", "wit-component 0.227.1", @@ -19366,7 +20441,7 @@ dependencies = [ "anyhow", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", "wit-bindgen-core 0.22.0", "wit-bindgen-rust 0.22.0", ] @@ -19381,7 +20456,7 @@ dependencies = [ "prettyplease", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", "wit-bindgen-core 0.41.0", "wit-bindgen-rust 0.41.0", ] @@ -19393,8 +20468,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "421c0c848a0660a8c22e2fd217929a0191f14476b68962afd2af89fd22e39825" dependencies = [ "anyhow", - "bitflags 2.9.0", - "indexmap 2.9.0", + "bitflags 2.9.4", + "indexmap 2.11.4", "log", "serde", "serde_derive", @@ -19412,8 +20487,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "635c3adc595422cbf2341a17fb73a319669cc8d33deed3a48368a841df86b676" dependencies = [ "anyhow", - "bitflags 2.9.0", - "indexmap 2.9.0", + "bitflags 2.9.4", + "indexmap 2.11.4", "log", "serde", "serde_derive", @@ -19432,7 +20507,7 @@ checksum = "196d3ecfc4b759a8573bf86a9b3f8996b304b3732e4c7de81655f875f6efdca6" dependencies = [ "anyhow", "id-arena", - "indexmap 2.9.0", + "indexmap 2.11.4", "log", "semver", "serde", @@ -19450,7 +20525,7 @@ checksum = "896112579ed56b4a538b07a3d16e562d101ff6265c46b515ce0c701eef16b2ac" dependencies = [ "anyhow", "id-arena", - "indexmap 2.9.0", + "indexmap 2.11.4", "log", "semver", "serde", @@ -19468,7 +20543,7 @@ checksum = "ddf445ed5157046e4baf56f9138c124a0824d4d1657e7204d71886ad8ce2fc11" dependencies = [ "anyhow", "id-arena", - "indexmap 2.9.0", + "indexmap 2.11.4", "log", "semver", "serde", @@ -19500,12 +20575,14 @@ dependencies = [ "call", "client", "clock", + "collections", "component", "dap", "db", "fs", "futures 0.3.31", "gpui", + "http_client", "itertools 0.14.0", "language", "log", @@ -19516,223 +20593,24 @@ dependencies = [ "pretty_assertions", "project", "remote", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "serde_json", "session", - "settings", - "smallvec", - "sqlez", - "strum 0.27.1", - "task", - "telemetry", - "tempfile", - "theme", - "ui", - "uuid", - "windows 0.61.1", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-util", - "zed_actions", - "zlog", -] - -[[package]] -name = "workspace-hack" -version = "0.1.0" -dependencies = [ - "aes", - "ahash 0.8.11", - "aho-corasick", - "anstream", - "arrayvec", - "ashpd 0.11.0", - "async-compression", - "async-std", - "async-tungstenite", - "aws-config", - "aws-credential-types", - "aws-runtime", - "aws-sigv4", - "aws-smithy-async", - "aws-smithy-http", - "aws-smithy-runtime", - "aws-smithy-runtime-api", - "aws-smithy-types", - "base64 0.22.1", - "base64ct", - "bigdecimal", - "bit-set 0.8.0", - "bit-vec 0.8.0", - "bitflags 2.9.0", - "bstr", - "bytemuck", - "byteorder", - "bytes 1.10.1", - "cc", - "chrono", - "cipher", - "clap", - "clap_builder", - "codespan-reporting", - "concurrent-queue", - "core-foundation 0.9.4", - "core-foundation-sys", - "cranelift-codegen", - "crossbeam-channel", - "crossbeam-epoch", - "crossbeam-utils", - "crypto-common", - "deranged", - "digest", - "either", - "euclid", - "event-listener 5.4.0", - "event-listener-strategy", - "flate2", - "flume", - "foldhash", - "form_urlencoded", - "futures 0.3.31", - "futures-channel", - "futures-core", - "futures-executor", - "futures-io", - "futures-sink", - "futures-task", - "futures-util", - "getrandom 0.2.15", - "getrandom 0.3.2", - "gimli", - "half", - "handlebars 4.5.0", - "hashbrown 0.14.5", - "hashbrown 0.15.3", - "heck 0.4.1", - "hmac", - "hyper 0.14.32", - "hyper-rustls 0.27.5", - "idna", - "indexmap 2.9.0", - "inout", - "itertools 0.12.1", - "itertools 0.13.0", - "lazy_static", - "libc", - "libsqlite3-sys", - "linux-raw-sys 0.4.15", - "linux-raw-sys 0.9.4", - "livekit-runtime", - "log", - "lyon", - "lyon_path", - "md-5", - "memchr", - "memmap2", - "mime_guess", - "miniz_oxide", - "mio 1.0.3", - "naga", - "nix 0.28.0", - "nix 0.29.0", - "nix 0.30.1", - "nom 7.1.3", - "num-bigint", - "num-bigint-dig", - "num-complex", - "num-integer", - "num-iter", - "num-rational", - "num-traits", - "objc2", - "objc2-core-foundation", - "objc2-foundation", - "objc2-metal", - "object", - "once_cell", - "percent-encoding", - "phf", - "phf_shared", - "prettyplease", - "proc-macro2", - "prost 0.12.6", - "prost 0.9.0", - "prost-types 0.9.0", - "quote", - "rand 0.8.5", - "rand 0.9.1", - "rand_chacha 0.3.1", - "rand_core 0.6.4", - "rand_distr", - "regalloc2", - "regex", - "regex-automata", - "regex-syntax", - "ring", - "rust_decimal", - "rustc-hash 1.1.0", - "rustix 0.38.44", - "rustix 1.0.7", - "rustls 0.23.26", - "rustls-webpki 0.103.1", - "scopeguard", - "sea-orm", - "sea-query-binder", - "security-framework 3.2.0", - "security-framework-sys", - "semver", - "serde", - "serde_core", - "serde_json", - "simd-adler32", + "settings", "smallvec", - "spin", - "sqlx", - "sqlx-macros", - "sqlx-macros-core", - "sqlx-postgres", - "sqlx-sqlite", - "stable_deref_trait", - "strum 0.26.3", - "subtle", - "syn 1.0.109", - "syn 2.0.101", - "sync_wrapper 1.0.2", - "thiserror 2.0.12", - "time", - "time-macros", - "tokio", - "tokio-rustls 0.26.2", - "tokio-socks", - "tokio-stream", - "tokio-util", - "toml_datetime", - "toml_edit", - "tower 0.5.2", - "tracing", - "tracing-core", - "tungstenite 0.26.2", - "unicode-properties", - "url", + "sqlez", + "strum 0.27.2", + "task", + "telemetry", + "tempfile", + "theme", + "ui", + "util", "uuid", - "wasmparser 0.221.3", - "wasmtime", - "wasmtime-cranelift", - "wasmtime-environ", - "wayland-backend", - "wayland-sys", - "winapi", - "windows-core 0.61.0", - "windows-numerics", - "windows-sys 0.48.0", - "windows-sys 0.52.0", - "windows-sys 0.59.0", - "windows-sys 0.61.0", - "zbus_macros", - "zeroize", - "zvariant", + "windows 0.61.3", + "zed_actions", + "zlog", ] [[package]] @@ -19740,13 +20618,16 @@ name = "worktree" version = "0.1.0" dependencies = [ "anyhow", + "async-lock 2.8.0", "clock", + "collections", "fs", "futures 0.3.31", "fuzzy", "git", "git2", "gpui", + "http_client", "ignore", "language", "log", @@ -19754,33 +20635,24 @@ dependencies = [ "paths", "postage", "pretty_assertions", - "rand 0.9.1", + "rand 0.9.2", "rpc", "serde", "serde_json", "settings", "smallvec", "smol", + "sum_tree", "text", - "workspace-hack", - "zed-collections", - "zed-http-client", - "zed-sum-tree", - "zed-util", + "util", "zlog", ] -[[package]] -name = "write16" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" - [[package]] name = "writeable" -version = "0.5.5" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" +checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" [[package]] name = "wyz" @@ -19813,32 +20685,32 @@ dependencies = [ [[package]] name = "x11rb" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d91ffca73ee7f68ce055750bf9f6eca0780b8c85eff9bc046a3b0da41755e12" +checksum = "9993aa5be5a26815fe2c3eacfc1fde061fc1a1f094bf1ad2a18bf9c495dd7414" dependencies = [ "as-raw-xcb-connection", "gethostname", "libc", - "rustix 0.38.44", + "rustix 1.1.2", "x11rb-protocol", + "xcursor", ] [[package]] name = "x11rb-protocol" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec107c4503ea0b4a98ef47356329af139c0a4f7750e621cf2973cd3385ebcb3d" +checksum = "ea6fc2961e4ef194dcbfe56bb845534d0dc8098940c7e5c012a258bfec6701bd" [[package]] name = "x_ai" version = "0.1.0" dependencies = [ "anyhow", - "schemars 1.0.1", + "schemars 1.0.4", "serde", - "strum 0.27.1", - "workspace-hack", + "strum 0.27.2", ] [[package]] @@ -19864,9 +20736,9 @@ dependencies = [ [[package]] name = "xcursor" -version = "0.3.8" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ef33da6b1660b4ddbfb3aef0ade110c8b8a781a3b6382fa5f2b5b040fd55f61" +checksum = "bec9e4a500ca8864c5b47b8b482a73d62e4237670e5b5f1d6b9e3cae50f28f2b" [[package]] name = "xim-ctext" @@ -19881,7 +20753,7 @@ name = "xim-parser" version = "0.2.1" source = "git+https://github.com/zed-industries/xim-rs.git?rev=16f35a2c881b815a2b6cdfd6687988e84f8447d8#16f35a2c881b815a2b6cdfd6687988e84f8447d8" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", ] [[package]] @@ -19934,11 +20806,16 @@ dependencies = [ "cargo_toml", "clap", "indoc", - "toml 0.8.20", - "toml_edit", - "workspace-hack", + "toml 0.8.23", + "toml_edit 0.22.27", ] +[[package]] +name = "xxhash-rust" +version = "0.8.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdd20c5420375476fbd4394763288da7eb0cc0b8c11deed431a91562af7335d3" + [[package]] name = "yaml-rust2" version = "0.8.1" @@ -19967,7 +20844,7 @@ dependencies = [ "flate2", "futures 0.3.31", "http-body-util", - "hyper 1.6.0", + "hyper 1.7.0", "hyper-util", "js-sys", "nom 8.0.0", @@ -20010,7 +20887,19 @@ checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" dependencies = [ "serde", "stable_deref_trait", - "yoke-derive", + "yoke-derive 0.7.5", + "zerofrom", +] + +[[package]] +name = "yoke" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive 0.8.0", "zerofrom", ] @@ -20022,29 +20911,41 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", + "synstructure", +] + +[[package]] +name = "yoke-derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", "synstructure", ] [[package]] name = "zbus" -version = "5.11.0" +version = "5.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d07e46d035fb8e375b2ce63ba4e4ff90a7f73cf2ffb0138b29e1158d2eaadf7" +checksum = "b622b18155f7a93d1cd2dc8c01d2d6a44e08fb9ebb7b3f9e6ed101488bad6c91" dependencies = [ "async-broadcast", "async-executor", "async-io", - "async-lock", + "async-lock 3.4.1", "async-process", "async-recursion", "async-task", "async-trait", "blocking", "enumflags2", - "event-listener 5.4.0", + "event-listener 5.4.1", "futures-core", - "futures-lite 2.6.0", + "futures-lite 2.6.1", "hex", "nix 0.30.1", "ordered-stream", @@ -20052,7 +20953,8 @@ dependencies = [ "serde_repr", "tracing", "uds_windows", - "windows-sys 0.60.2", + "uuid", + "windows-sys 0.61.2", "winnow", "zbus_macros", "zbus_names", @@ -20061,14 +20963,14 @@ dependencies = [ [[package]] name = "zbus_macros" -version = "5.11.0" +version = "5.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57e797a9c847ed3ccc5b6254e8bcce056494b375b511b3d6edcec0aeb4defaca" +checksum = "1cdb94821ca8a87ca9c298b5d1cbd80e2a8b67115d99f6e4551ac49e42b6a314" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", "zbus_names", "zvariant", "zvariant_utils", @@ -20088,23 +20990,21 @@ dependencies = [ [[package]] name = "zed" -version = "0.209.0" +version = "0.211.0" dependencies = [ "acp_tools", "activity_indicator", - "agent", "agent_settings", "agent_ui", "anyhow", "ashpd 0.11.0", "askpass", "assets", - "assistant_tools", "audio", "auto_update", "auto_update_ui", "backtrace", - "bincode", + "bincode 1.3.3", "breadcrumbs", "call", "channel", @@ -20113,6 +21013,7 @@ dependencies = [ "client", "codestral", "collab_ui", + "collections", "command_palette", "component", "copilot", @@ -20141,6 +21042,7 @@ dependencies = [ "go_to_line", "gpui", "gpui_tokio", + "http_client", "image_viewer", "inspector_ui", "install_cli", @@ -20199,7 +21101,7 @@ dependencies = [ "snippets_ui", "supermaven", "svg_preview", - "sysinfo", + "sysinfo 0.37.2", "system_specs", "tab_switcher", "task", @@ -20220,20 +21122,17 @@ dependencies = [ "ui_prompt", "url", "urlencoding", + "util", "uuid", "vim", "vim_mode_setting", "watch", "web_search", "web_search_providers", - "windows 0.61.1", + "windows 0.61.3", "winresource", "workspace", - "workspace-hack", - "zed-collections", - "zed-http-client", "zed-reqwest", - "zed-util", "zed_actions", "zed_env_vars", "zeta", @@ -20243,31 +21142,12 @@ dependencies = [ "zlog_settings", ] -[[package]] -name = "zed-collections" -version = "0.1.0" -dependencies = [ - "indexmap 2.9.0", - "rustc-hash 2.1.1", - "workspace-hack", -] - -[[package]] -name = "zed-derive-refineable" -version = "0.1.0" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", - "workspace-hack", -] - [[package]] name = "zed-font-kit" version = "0.14.1-zed" source = "git+https://github.com/zed-industries/font-kit?rev=110523127440aefb11ce0cf280ae7c5071337ec5#110523127440aefb11ce0cf280ae7c5071337ec5" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.4", "byteorder", "core-foundation 0.10.0", "core-graphics 0.24.0", @@ -20286,64 +21166,6 @@ dependencies = [ "yeslogic-fontconfig-sys", ] -[[package]] -name = "zed-http-client" -version = "0.1.0" -dependencies = [ - "anyhow", - "async-compression", - "async-fs", - "async-tar", - "bytes 1.10.1", - "derive_more", - "futures 0.3.31", - "http 1.3.1", - "http-body 1.0.1", - "log", - "parking_lot", - "serde", - "serde_json", - "sha2", - "tempfile", - "url", - "workspace-hack", - "zed-reqwest", - "zed-util", -] - -[[package]] -name = "zed-media" -version = "0.1.0" -dependencies = [ - "anyhow", - "bindgen 0.71.1", - "core-foundation 0.10.0", - "core-video", - "ctor", - "foreign-types 0.5.0", - "metal", - "objc", - "workspace-hack", -] - -[[package]] -name = "zed-perf" -version = "0.1.0" -dependencies = [ - "serde", - "serde_json", - "workspace-hack", - "zed-collections", -] - -[[package]] -name = "zed-refineable" -version = "0.1.0" -dependencies = [ - "workspace-hack", - "zed-derive-refineable", -] - [[package]] name = "zed-reqwest" version = "0.12.15-zed" @@ -20354,12 +21176,12 @@ dependencies = [ "encoding_rs", "futures-core", "futures-util", - "h2 0.4.9", + "h2 0.4.12", "http 1.3.1", "http-body 1.0.1", "http-body-util", - "hyper 1.6.0", - "hyper-rustls 0.27.5", + "hyper 1.7.0", + "hyper-rustls 0.27.7", "hyper-util", "ipnet", "js-sys", @@ -20370,8 +21192,8 @@ dependencies = [ "percent-encoding", "pin-project-lite", "quinn", - "rustls 0.23.26", - "rustls-native-certs 0.8.1", + "rustls 0.23.33", + "rustls-native-certs 0.8.2", "rustls-pemfile 2.2.0", "rustls-pki-types", "serde", @@ -20406,92 +21228,20 @@ dependencies = [ "rand 0.8.5", "screencapturekit", "screencapturekit-sys", - "sysinfo", + "sysinfo 0.31.4", "tao-core-video-sys", - "windows 0.61.1", + "windows 0.61.3", "windows-capture", "x11", "xcb", ] -[[package]] -name = "zed-semantic-version" -version = "0.1.0" -dependencies = [ - "anyhow", - "serde", - "workspace-hack", -] - -[[package]] -name = "zed-sum-tree" -version = "0.1.0" -dependencies = [ - "arrayvec", - "ctor", - "log", - "rand 0.9.1", - "rayon", - "workspace-hack", - "zlog", -] - -[[package]] -name = "zed-util" -version = "0.1.0" -dependencies = [ - "anyhow", - "async-fs", - "async_zip", - "command-fds", - "dirs 4.0.0", - "dunce", - "futures 0.3.31", - "futures-lite 1.13.0", - "git2", - "globset", - "indoc", - "itertools 0.14.0", - "libc", - "log", - "nix 0.29.0", - "pretty_assertions", - "rand 0.9.1", - "regex", - "rust-embed", - "schemars 1.0.1", - "serde", - "serde_json", - "serde_json_lenient", - "shlex", - "smol", - "take-until", - "tempfile", - "tendril", - "unicase", - "walkdir", - "which 6.0.3", - "workspace-hack", - "zed-collections", - "zed-util-macros", -] - -[[package]] -name = "zed-util-macros" -version = "0.1.0" -dependencies = [ - "quote", - "syn 2.0.101", - "workspace-hack", - "zed-perf", -] - [[package]] name = "zed-xim" version = "0.4.0-zed" source = "git+https://github.com/zed-industries/xim-rs.git?rev=16f35a2c881b815a2b6cdfd6687988e84f8447d8#16f35a2c881b815a2b6cdfd6687988e84f8447d8" dependencies = [ - "ahash 0.8.11", + "ahash 0.8.12", "hashbrown 0.14.5", "log", "x11rb", @@ -20504,10 +21254,9 @@ name = "zed_actions" version = "0.1.0" dependencies = [ "gpui", - "schemars 1.0.1", + "schemars 1.0.4", "serde", "uuid", - "workspace-hack", ] [[package]] @@ -20515,7 +21264,6 @@ name = "zed_env_vars" version = "0.1.0" dependencies = [ "gpui", - "workspace-hack", ] [[package]] @@ -20579,48 +21327,28 @@ dependencies = [ [[package]] name = "zeno" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc0de2315dc13d00e5df3cd6b8d2124a6eaec6a2d4b6a1c5f37b7efad17fcc17" - -[[package]] -name = "zerocopy" -version = "0.7.35" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" -dependencies = [ - "zerocopy-derive 0.7.35", -] +checksum = "6df3dc4292935e51816d896edcd52aa30bc297907c26167fec31e2b0c6a32524" [[package]] name = "zerocopy" -version = "0.8.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2586fea28e186957ef732a5f8b3be2da217d65c5969d4b1e17f973ebbe876879" -dependencies = [ - "zerocopy-derive 0.8.24", -] - -[[package]] -name = "zerocopy-derive" -version = "0.7.35" +version = "0.8.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", + "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.24" +version = "0.8.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a996a8f63c5c4448cd959ac1bab0aaa3306ccfd060472f85943ee0750f0169be" +checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -20640,15 +21368,15 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", "synstructure", ] [[package]] name = "zeroize" -version = "1.8.1" +version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" dependencies = [ "zeroize_derive", ] @@ -20661,7 +21389,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -20688,26 +21416,37 @@ dependencies = [ "uuid", ] +[[package]] +name = "zerotrie" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" +dependencies = [ + "displaydoc", + "yoke 0.8.0", + "zerofrom", +] + [[package]] name = "zerovec" -version = "0.10.4" +version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b" dependencies = [ - "yoke", + "yoke 0.8.0", "zerofrom", "zerovec-derive", ] [[package]] name = "zerovec-derive" -version = "0.10.3" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -20722,6 +21461,7 @@ dependencies = [ "clock", "cloud_api_types", "cloud_llm_client", + "collections", "command_palette_hooks", "copilot", "ctor", @@ -20732,6 +21472,7 @@ dependencies = [ "fs", "futures 0.3.31", "gpui", + "http_client", "indoc", "itertools 0.14.0", "language", @@ -20741,7 +21482,7 @@ dependencies = [ "parking_lot", "postage", "project", - "rand 0.9.1", + "rand 0.9.2", "regex", "release_channel", "reqwest_client", @@ -20749,21 +21490,18 @@ dependencies = [ "serde", "serde_json", "settings", - "strum 0.27.1", + "strum 0.27.2", "telemetry", "telemetry_events", "theme", - "thiserror 2.0.12", + "thiserror 2.0.17", "tree-sitter-go", "tree-sitter-rust", "ui", + "util", "uuid", "workspace", - "workspace-hack", "worktree", - "zed-collections", - "zed-http-client", - "zed-util", "zed_actions", "zlog", ] @@ -20781,6 +21519,7 @@ dependencies = [ "cloud_zeta2_prompt", "edit_prediction", "edit_prediction_context", + "feature_flags", "futures 0.3.31", "gpui", "indoc", @@ -20791,14 +21530,14 @@ dependencies = [ "pretty_assertions", "project", "release_channel", + "serde", "serde_json", "settings", - "thiserror 2.0.12", + "thiserror 2.0.17", + "util", "uuid", "workspace", - "workspace-hack", "worktree", - "zed-util", ] [[package]] @@ -20809,8 +21548,10 @@ dependencies = [ "clap", "client", "cloud_llm_client", + "collections", "edit_prediction_context", "editor", + "feature_flags", "futures 0.3.31", "gpui", "indoc", @@ -20823,13 +21564,12 @@ dependencies = [ "serde", "serde_json", "settings", + "telemetry", "text", "ui", "ui_input", + "util", "workspace", - "workspace-hack", - "zed-collections", - "zed-util", "zeta2", "zlog", ] @@ -20839,10 +21579,12 @@ name = "zeta_cli" version = "0.1.0" dependencies = [ "anyhow", + "chrono", "clap", "client", "cloud_llm_client", "cloud_zeta2_prompt", + "collections", "debug_adapter_extension", "edit_prediction_context", "extension", @@ -20859,6 +21601,7 @@ dependencies = [ "node_runtime", "ordered-float 2.10.1", "paths", + "polars", "project", "prompt_store", "release_channel", @@ -20868,11 +21611,10 @@ dependencies = [ "settings", "shellexpand 2.1.2", "smol", + "soa-rs", "terminal_view", + "util", "watch", - "workspace-hack", - "zed-collections", - "zed-util", "zeta", "zeta2", "zlog", @@ -20887,7 +21629,7 @@ dependencies = [ "aes", "byteorder", "bzip2", - "constant_time_eq", + "constant_time_eq 0.1.5", "crc32fast", "crossbeam-utils", "flate2", @@ -20895,7 +21637,7 @@ dependencies = [ "pbkdf2 0.11.0", "sha1", "time", - "zstd", + "zstd 0.11.2+zstd.1.5.2", ] [[package]] @@ -20908,31 +21650,35 @@ dependencies = [ "crc32fast", "crossbeam-utils", "displaydoc", - "indexmap 2.9.0", + "indexmap 2.11.4", "num_enum", "thiserror 1.0.69", ] +[[package]] +name = "zlib-rs" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f06ae92f42f5e5c42443fd094f245eb656abf56dd7cce9b8b263236565e00f2" + [[package]] name = "zlog" version = "0.1.0" dependencies = [ "anyhow", "chrono", + "collections", "log", "tempfile", - "workspace-hack", - "zed-collections", ] [[package]] name = "zlog_settings" version = "0.1.0" dependencies = [ + "collections", "gpui", "settings", - "workspace-hack", - "zed-collections", "zlog", ] @@ -20942,7 +21688,16 @@ version = "0.11.2+zstd.1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4" dependencies = [ - "zstd-safe", + "zstd-safe 5.0.2+zstd.1.5.2", +] + +[[package]] +name = "zstd" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" +dependencies = [ + "zstd-safe 7.2.4", ] [[package]] @@ -20955,11 +21710,20 @@ dependencies = [ "zstd-sys", ] +[[package]] +name = "zstd-safe" +version = "7.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" +dependencies = [ + "zstd-sys", +] + [[package]] name = "zstd-sys" -version = "2.0.15+zstd.1.5.7" +version = "2.0.16+zstd.1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb81183ddd97d0c74cedf1d50d85c8d08c1b8b68ee863bdee9e706eedba1a237" +checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748" dependencies = [ "cc", "pkg-config", @@ -20982,18 +21746,18 @@ dependencies = [ [[package]] name = "zune-jpeg" -version = "0.4.14" +version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99a5bab8d7dedf81405c4bb1f2b83ea057643d9cb28778cea9eecddeedd2e028" +checksum = "29ce2c8a9384ad323cf564b67da86e21d3cfdff87908bc1223ed5c99bc792713" dependencies = [ "zune-core", ] [[package]] name = "zvariant" -version = "5.7.0" +version = "5.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "999dd3be73c52b1fccd109a4a81e4fcd20fab1d3599c8121b38d04e1419498db" +checksum = "2be61892e4f2b1772727be11630a62664a1826b62efa43a6fe7449521cb8744c" dependencies = [ "endi", "enumflags2", @@ -21006,27 +21770,26 @@ dependencies = [ [[package]] name = "zvariant_derive" -version = "5.7.0" +version = "5.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6643fd0b26a46d226bd90d3f07c1b5321fe9bb7f04673cb37ac6d6883885b68e" +checksum = "da58575a1b2b20766513b1ec59d8e2e68db2745379f961f86650655e862d2006" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", "zvariant_utils", ] [[package]] name = "zvariant_utils" -version = "3.2.0" +version = "3.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e16edfee43e5d7b553b77872d99bc36afdda75c223ca7ad5e3fbecd82ca5fc34" +checksum = "c6949d142f89f6916deca2232cf26a8afacf2b9fdc35ce766105e104478be599" dependencies = [ "proc-macro2", "quote", "serde", - "static_assertions", - "syn 2.0.101", + "syn 2.0.106", "winnow", ] diff --git a/Cargo.toml b/Cargo.toml index 6b4be9c302e394aa219b6d74e13ccc547406c10d..41ae90bf7ec00ee07447819c16cd48f36bb4701f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,7 +6,6 @@ members = [ "crates/action_log", "crates/activity_indicator", "crates/agent", - "crates/agent2", "crates/agent_servers", "crates/agent_settings", "crates/agent_ui", @@ -14,11 +13,9 @@ members = [ "crates/anthropic", "crates/askpass", "crates/assets", - "crates/assistant_context", + "crates/assistant_text_thread", "crates/assistant_slash_command", "crates/assistant_slash_commands", - "crates/assistant_tool", - "crates/assistant_tools", "crates/audio", "crates/auto_update", "crates/auto_update_helper", @@ -73,6 +70,7 @@ members = [ "crates/file_finder", "crates/file_icons", "crates/fs", + "crates/fs_benchmarks", "crates/fsevent", "crates/fuzzy", "crates/git", @@ -221,7 +219,6 @@ members = [ # "tooling/perf", - "tooling/workspace-hack", "tooling/xtask", ] default-members = ["crates/zed"] @@ -240,7 +237,6 @@ acp_tools = { path = "crates/acp_tools" } acp_thread = { path = "crates/acp_thread" } action_log = { path = "crates/action_log" } agent = { path = "crates/agent" } -agent2 = { path = "crates/agent2" } activity_indicator = { path = "crates/activity_indicator" } agent_ui = { path = "crates/agent_ui" } agent_settings = { path = "crates/agent_settings" } @@ -250,11 +246,9 @@ ai_onboarding = { path = "crates/ai_onboarding" } anthropic = { path = "crates/anthropic" } askpass = { path = "crates/askpass" } assets = { path = "crates/assets" } -assistant_context = { path = "crates/assistant_context" } +assistant_text_thread = { path = "crates/assistant_text_thread" } assistant_slash_command = { path = "crates/assistant_slash_command" } assistant_slash_commands = { path = "crates/assistant_slash_commands" } -assistant_tool = { path = "crates/assistant_tool" } -assistant_tools = { path = "crates/assistant_tools" } audio = { path = "crates/audio" } auto_update = { path = "crates/auto_update" } auto_update_helper = { path = "crates/auto_update_helper" } @@ -274,7 +268,7 @@ cloud_llm_client = { path = "crates/cloud_llm_client" } cloud_zeta2_prompt = { path = "crates/cloud_zeta2_prompt" } collab = { path = "crates/collab" } collab_ui = { path = "crates/collab_ui" } -collections = { path = "crates/collections", package = "zed-collections", version = "0.1.0" } +collections = { path = "crates/collections", version = "0.1.0" } command_palette = { path = "crates/command_palette" } command_palette_hooks = { path = "crates/command_palette_hooks" } component = { path = "crates/component" } @@ -290,7 +284,7 @@ debug_adapter_extension = { path = "crates/debug_adapter_extension" } debugger_tools = { path = "crates/debugger_tools" } debugger_ui = { path = "crates/debugger_ui" } deepseek = { path = "crates/deepseek" } -derive_refineable = { path = "crates/refineable/derive_refineable", package = "zed-derive-refineable", version = "0.1.0" } +derive_refineable = { path = "crates/refineable/derive_refineable" } diagnostics = { path = "crates/diagnostics" } editor = { path = "crates/editor" } extension = { path = "crates/extension" } @@ -309,10 +303,10 @@ git_ui = { path = "crates/git_ui" } go_to_line = { path = "crates/go_to_line" } google_ai = { path = "crates/google_ai" } gpui = { path = "crates/gpui", default-features = false } -gpui_macros = { path = "crates/gpui_macros", package = "gpui-macros", version = "0.1.0" } +gpui_macros = { path = "crates/gpui_macros" } gpui_tokio = { path = "crates/gpui_tokio" } html_to_markdown = { path = "crates/html_to_markdown" } -http_client = { path = "crates/http_client", package = "zed-http-client", version = "0.1.0" } +http_client = { path = "crates/http_client" } http_client_tls = { path = "crates/http_client_tls" } icons = { path = "crates/icons" } image_viewer = { path = "crates/image_viewer" } @@ -341,7 +335,7 @@ lsp = { path = "crates/lsp" } markdown = { path = "crates/markdown" } markdown_preview = { path = "crates/markdown_preview" } svg_preview = { path = "crates/svg_preview" } -media = { path = "crates/media", package = "zed-media", version = "0.1.0" } +media = { path = "crates/media" } menu = { path = "crates/menu" } migrator = { path = "crates/migrator" } mistral = { path = "crates/mistral" } @@ -358,7 +352,7 @@ outline = { path = "crates/outline" } outline_panel = { path = "crates/outline_panel" } panel = { path = "crates/panel" } paths = { path = "crates/paths" } -perf = { path = "tooling/perf", package = "zed-perf", version = "0.1.0" } +perf = { path = "tooling/perf" } picker = { path = "crates/picker" } plugin = { path = "crates/plugin" } plugin_macros = { path = "crates/plugin_macros" } @@ -370,7 +364,7 @@ project_symbols = { path = "crates/project_symbols" } prompt_store = { path = "crates/prompt_store" } proto = { path = "crates/proto" } recent_projects = { path = "crates/recent_projects" } -refineable = { path = "crates/refineable", package = "zed-refineable", version = "0.1.0" } +refineable = { path = "crates/refineable" } release_channel = { path = "crates/release_channel" } scheduler = { path = "crates/scheduler" } remote = { path = "crates/remote" } @@ -378,12 +372,12 @@ remote_server = { path = "crates/remote_server" } repl = { path = "crates/repl" } reqwest_client = { path = "crates/reqwest_client" } rich_text = { path = "crates/rich_text" } -rodio = { git = "https://github.com/RustAudio/rodio" } +rodio = { git = "https://github.com/RustAudio/rodio", rev ="e2074c6c2acf07b57cf717e076bdda7a9ac6e70b", features = ["wav", "playback", "wav_output", "recording"] } rope = { path = "crates/rope" } rpc = { path = "crates/rpc" } rules_library = { path = "crates/rules_library" } search = { path = "crates/search" } -semantic_version = { path = "crates/semantic_version", package = "zed-semantic-version", version = "0.1.0" } +semantic_version = { path = "crates/semantic_version" } session = { path = "crates/session" } settings = { path = "crates/settings" } settings_macros = { path = "crates/settings_macros" } @@ -396,7 +390,7 @@ sqlez_macros = { path = "crates/sqlez_macros" } story = { path = "crates/story" } storybook = { path = "crates/storybook" } streaming_diff = { path = "crates/streaming_diff" } -sum_tree = { path = "crates/sum_tree", package = "zed-sum-tree", version = "0.1.0" } +sum_tree = { path = "crates/sum_tree" } supermaven = { path = "crates/supermaven" } supermaven_api = { path = "crates/supermaven_api" } codestral = { path = "crates/codestral" } @@ -420,8 +414,8 @@ ui = { path = "crates/ui" } ui_input = { path = "crates/ui_input" } ui_macros = { path = "crates/ui_macros" } ui_prompt = { path = "crates/ui_prompt" } -util = { path = "crates/util", package = "zed-util", version = "0.1.0" } -util_macros = { path = "crates/util_macros", package = "zed-util-macros", version = "0.1.0" } +util = { path = "crates/util" } +util_macros = { path = "crates/util_macros" } vercel = { path = "crates/vercel" } vim = { path = "crates/vim" } vim_mode_setting = { path = "crates/vim_mode_setting" } @@ -444,7 +438,7 @@ zlog_settings = { path = "crates/zlog_settings" } # External crates # -agent-client-protocol = { version = "0.4.3", features = ["unstable"] } +agent-client-protocol = { version = "0.5.0", features = ["unstable"] } aho-corasick = "1.1" alacritty_terminal = "0.25.1-rc1" any_vec = "0.14" @@ -456,12 +450,13 @@ async-compat = "0.2.1" async-compression = { version = "0.4", features = ["gzip", "futures-io"] } async-dispatcher = "0.1" async-fs = "2.1" +async-lock = "2.1" async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553" } async-recursion = "1.0.0" -async-tar = "0.5.0" +async-tar = "0.5.1" async-task = "4.7" async-trait = "0.1" -async-tungstenite = "0.29.1" +async-tungstenite = "0.31.0" async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] } aws-config = { version = "1.6.1", features = ["behavior-version-latest"] } aws-credential-types = { version = "1.2.2", features = [ @@ -487,10 +482,10 @@ chrono = { version = "0.4", features = ["serde"] } ciborium = "0.2" circular-buffer = "1.0" clap = { version = "4.4", features = ["derive"] } -cocoa = "0.26" -cocoa-foundation = "0.2.0" +cocoa = "=0.26.0" +cocoa-foundation = "=0.2.0" convert_case = "0.8.0" -core-foundation = "0.10.0" +core-foundation = "=0.10.0" core-foundation-sys = "0.8.6" core-video = { version = "0.4.3", features = ["metal"] } cpal = "0.16" @@ -553,7 +548,7 @@ nix = "0.29" num-format = "0.4.4" num-traits = "0.2" objc = "0.2" -objc2-foundation = { version = "0.3", default-features = false, features = [ +objc2-foundation = { version = "=0.3.1", default-features = false, features = [ "NSArray", "NSAttributedString", "NSBundle", @@ -586,14 +581,14 @@ partial-json-fixer = "0.5.3" parse_int = "0.9" pciid-parser = "0.8.0" pathdiff = "0.2" -pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" } -pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" } -pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" } -pet-fs = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" } -pet-pixi = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" } -pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" } -pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" } -pet-virtualenv = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" } +pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } +pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } +pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } +pet-fs = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } +pet-pixi = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } +pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } +pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } +pet-virtualenv = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } portable-pty = "0.9.0" postage = { version = "0.5", features = ["futures-traits"] } pretty_assertions = { version = "1.3.0", features = ["unstable"] } @@ -651,11 +646,11 @@ sqlformat = "0.2" stacksafe = "0.1" streaming-iterator = "0.1" strsim = "0.11" -strum = { version = "0.27.0", features = ["derive"] } +strum = { version = "0.27.2", features = ["derive"] } subtle = "2.5.0" syn = { version = "2.0.101", features = ["full", "extra-traits", "visit-mut"] } sys-locale = "0.3.1" -sysinfo = "0.31.0" +sysinfo = "0.37.0" take-until = "0.2.0" tempfile = "3.20.0" thiserror = "2.0.12" @@ -719,7 +714,6 @@ wasmtime-wasi = "29" which = "6.0.0" windows-core = "0.61" wit-component = "0.221" -workspace-hack = "0.1.0" yawc = "0.2.5" zeroize = "1.8" zstd = "0.11" @@ -780,9 +774,6 @@ notify = { git = "https://github.com/zed-industries/notify.git", rev = "bbb9ea5a notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "bbb9ea5ae52b253e095737847e367c30653a2e96" } windows-capture = { git = "https://github.com/zed-industries/windows-capture.git", rev = "f0d6c1b6691db75461b732f6d5ff56eed002eeb9" } -# Makes the workspace hack crate refer to the local one, but only when you're building locally -workspace-hack = { path = "tooling/workspace-hack" } - [profile.dev] split-debuginfo = "unpacked" codegen-units = 16 @@ -806,7 +797,7 @@ wasmtime = { opt-level = 3 } activity_indicator = { codegen-units = 1 } assets = { codegen-units = 1 } breadcrumbs = { codegen-units = 1 } -zed-collections = { codegen-units = 1 } +collections = { codegen-units = 1 } command_palette = { codegen-units = 1 } command_palette_hooks = { codegen-units = 1 } extension_cli = { codegen-units = 1 } @@ -826,11 +817,11 @@ outline = { codegen-units = 1 } paths = { codegen-units = 1 } prettier = { codegen-units = 1 } project_symbols = { codegen-units = 1 } -zed-refineable = { codegen-units = 1 } +refineable = { codegen-units = 1 } release_channel = { codegen-units = 1 } reqwest_client = { codegen-units = 1 } rich_text = { codegen-units = 1 } -zed-semantic-version = { codegen-units = 1 } +semantic_version = { codegen-units = 1 } session = { codegen-units = 1 } snippet = { codegen-units = 1 } snippets_ui = { codegen-units = 1 } @@ -910,5 +901,5 @@ ignored = [ "serde", "component", "documented", - "workspace-hack", + "sea-orm-macros", ] diff --git a/Cross.toml b/Cross.toml deleted file mode 100644 index b5f0f1103af2ba6956c7910a7196ddd13788bf46..0000000000000000000000000000000000000000 --- a/Cross.toml +++ /dev/null @@ -1,2 +0,0 @@ -[build] -dockerfile = "Dockerfile-cross" diff --git a/Dockerfile-cross b/Dockerfile-cross deleted file mode 100644 index 488309641caed52c4b15d7367fd42bfab1a14418..0000000000000000000000000000000000000000 --- a/Dockerfile-cross +++ /dev/null @@ -1,17 +0,0 @@ -# syntax=docker/dockerfile:1 - -ARG CROSS_BASE_IMAGE -FROM ${CROSS_BASE_IMAGE} -WORKDIR /app -ARG TZ=Etc/UTC \ - LANG=C.UTF-8 \ - LC_ALL=C.UTF-8 \ - DEBIAN_FRONTEND=noninteractive -ENV CARGO_TERM_COLOR=always - -COPY script/install-mold script/ -RUN ./script/install-mold "2.34.0" -COPY script/remote-server script/ -RUN ./script/remote-server - -COPY . . diff --git a/README.md b/README.md index 38547c1ca441b918b773d8b1a884a1e3f48c785f..adc152b7af163b3c90c73a23e0f45bab1120bddc 100644 --- a/README.md +++ b/README.md @@ -9,11 +9,10 @@ Welcome to Zed, a high-performance, multiplayer code editor from the creators of ### Installation -On macOS and Linux you can [download Zed directly](https://zed.dev/download) or [install Zed via your local package manager](https://zed.dev/docs/linux#installing-via-a-package-manager). +On macOS, Linux, and Windows you can [download Zed directly](https://zed.dev/download) or [install Zed via your local package manager](https://zed.dev/docs/linux#installing-via-a-package-manager). Other platforms are not yet available: -- Windows ([tracking issue](https://github.com/zed-industries/zed/issues/5394)) - Web ([tracking issue](https://github.com/zed-industries/zed/issues/5396)) ### Developing Zed diff --git a/REVIEWERS.conl b/REVIEWERS.conl new file mode 100644 index 0000000000000000000000000000000000000000..d5254c7aaf394f4fae33be391fde84c567c37a53 --- /dev/null +++ b/REVIEWERS.conl @@ -0,0 +1,106 @@ +; This file contains a list of people who're interested in reviewing pull requests +; to certain parts of the code-base. +; +; This is mostly used internally for PR assignment, and may change over time. +; +; If you have permission to merge PRs (mostly equivalent to "do you work at Zed Industries"), +; we strongly encourage you to put your name in the "all" bucket, but you can also add yourself +; to other areas too. + + + = @ConradIrwin + = @maxdeviant + = @SomeoneToIgnore + = @probably-neb + = @danilo-leal + = @Veykril + = @kubkon + = @p1n3appl3 + = @dinocosta + = @smitbarmase + = @cole-miller + +vim + = @ConradIrwin + = @probably-neb + = @p1n3appl3 + = @dinocosta + +gpui + = @mikayla-maki + +git + = @cole-miller + = @danilo-leal + +linux + = @dvdsk + = @smitbarmase + = @p1n3appl3 + = @cole-miller + +windows + = @reflectronic + = @localcc + +pickers + = @p1n3appl3 + = @dvdsk + = @SomeoneToIgnore + +audio + = @dvdsk + +helix + = @kubkon + +terminal + = @kubkon + = @Veykril + +debugger + = @kubkon + = @osiewicz + = @Anthony-Eid + +extension + = @kubkon + +settings_ui + = @probably-neb + = @danilo-leal + = @Anthony-Eid + +crashes + = @p1n3appl3 + = @Veykril + +ai + = @danilo-leal + = @benbrandt + +design + = @danilo-leal + +multi_buffer + = @Veykril + = @SomeoneToIgnore + +lsp + = @osiewicz + = @Veykril + = @smitbarmase + = @SomeoneToIgnore + +languages + = @osiewicz + = @Veykril + = @smitbarmase + = @SomeoneToIgnore + +project_panel + = @smitbarmase + +tasks + = @SomeoneToIgnore + = @Veykril diff --git a/assets/icons/undo.svg b/assets/icons/undo.svg index c714b58747e950ab75d3a02be7eebfe7cd83eda1..ccd45e246c6911c57cb2659764db6e1dc11bf0cb 100644 --- a/assets/icons/undo.svg +++ b/assets/icons/undo.svg @@ -1 +1,4 @@ - + + + + diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index ae15492b804274bc3ceacaae156a19093344a9b7..3d94edafcdfc1d9acec5328cade996459547996b 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -139,7 +139,7 @@ "find": "buffer_search::Deploy", "ctrl-f": "buffer_search::Deploy", "ctrl-h": "buffer_search::DeployReplace", - "ctrl->": "agent::QuoteSelection", + "ctrl->": "agent::AddSelectionToThread", "ctrl-<": "assistant::InsertIntoEditor", "ctrl-alt-e": "editor::SelectEnclosingSymbol", "ctrl-shift-backspace": "editor::GoToPreviousChange", @@ -243,7 +243,7 @@ "ctrl-shift-i": "agent::ToggleOptionsMenu", "ctrl-alt-shift-n": "agent::ToggleNewThreadMenu", "shift-alt-escape": "agent::ExpandMessageEditor", - "ctrl->": "agent::QuoteSelection", + "ctrl->": "agent::AddSelectionToThread", "ctrl-alt-e": "agent::RemoveAllContext", "ctrl-shift-e": "project_panel::ToggleFocus", "ctrl-shift-enter": "agent::ContinueThread", @@ -269,14 +269,14 @@ } }, { - "context": "AgentPanel && prompt_editor", + "context": "AgentPanel && text_thread", "bindings": { "ctrl-n": "agent::NewTextThread", "ctrl-alt-t": "agent::NewThread" } }, { - "context": "AgentPanel && external_agent_thread", + "context": "AgentPanel && acp_thread", "use_key_equivalents": true, "bindings": { "ctrl-n": "agent::NewExternalAgentThread", @@ -366,7 +366,7 @@ } }, { - "context": "PromptLibrary", + "context": "RulesLibrary", "bindings": { "new": "rules_library::NewRule", "ctrl-n": "rules_library::NewRule", @@ -491,8 +491,8 @@ "bindings": { "ctrl-[": "editor::Outdent", "ctrl-]": "editor::Indent", - "shift-alt-up": "editor::AddSelectionAbove", // Insert Cursor Above - "shift-alt-down": "editor::AddSelectionBelow", // Insert Cursor Below + "shift-alt-up": ["editor::AddSelectionAbove", { "skip_soft_wrap": true }], // Insert Cursor Above + "shift-alt-down": ["editor::AddSelectionBelow", { "skip_soft_wrap": true }], // Insert Cursor Below "ctrl-shift-k": "editor::DeleteLine", "alt-up": "editor::MoveLineUp", "alt-down": "editor::MoveLineDown", @@ -1080,7 +1080,8 @@ { "context": "StashList || (StashList > Picker > Editor)", "bindings": { - "ctrl-shift-backspace": "stash_picker::DropStashItem" + "ctrl-shift-backspace": "stash_picker::DropStashItem", + "ctrl-shift-v": "stash_picker::ShowStashItem" } }, { @@ -1249,6 +1250,7 @@ "escape": "workspace::CloseWindow", "ctrl-m": "settings_editor::Minimize", "ctrl-f": "search::FocusSearch", + "left": "settings_editor::ToggleFocusNav", "ctrl-shift-e": "settings_editor::ToggleFocusNav", // todo(settings_ui): cut this down based on the max files and overflow UI "ctrl-1": ["settings_editor::FocusFile", 0], @@ -1265,10 +1267,22 @@ "ctrl-pagedown": "settings_editor::FocusNextFile" } }, + { + "context": "StashDiff > Editor", + "bindings": { + "ctrl-space": "git::ApplyCurrentStash", + "ctrl-shift-space": "git::PopCurrentStash", + "ctrl-shift-backspace": "git::DropCurrentStash" + } + }, { "context": "SettingsWindow > NavigationMenu", "use_key_equivalents": true, "bindings": { + "up": "settings_editor::FocusPreviousNavEntry", + "shift-tab": "settings_editor::FocusPreviousNavEntry", + "down": "settings_editor::FocusNextNavEntry", + "tab": "settings_editor::FocusNextNavEntry", "right": "settings_editor::ExpandNavEntry", "left": "settings_editor::CollapseNavEntry", "pageup": "settings_editor::FocusPreviousRootNavEntry", @@ -1276,5 +1290,13 @@ "home": "settings_editor::FocusFirstNavEntry", "end": "settings_editor::FocusLastNavEntry" } + }, + { + "context": "Zeta2Feedback > Editor", + "bindings": { + "enter": "editor::Newline", + "ctrl-enter up": "dev::Zeta2RatePredictionPositive", + "ctrl-enter down": "dev::Zeta2RatePredictionNegative" + } } ] diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 50528dca6f97e8b287333d4011ebc97bb1ed27a2..6c3f47cb45909c1e014e76c9d414b68f23632a14 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -163,7 +163,7 @@ "cmd-alt-f": "buffer_search::DeployReplace", "cmd-alt-l": ["buffer_search::Deploy", { "selection_search_enabled": true }], "cmd-e": ["buffer_search::Deploy", { "focus": false }], - "cmd->": "agent::QuoteSelection", + "cmd->": "agent::AddSelectionToThread", "cmd-<": "assistant::InsertIntoEditor", "cmd-alt-e": "editor::SelectEnclosingSymbol", "alt-enter": "editor::OpenSelectionsInMultibuffer" @@ -282,7 +282,7 @@ "cmd-shift-i": "agent::ToggleOptionsMenu", "cmd-alt-shift-n": "agent::ToggleNewThreadMenu", "shift-alt-escape": "agent::ExpandMessageEditor", - "cmd->": "agent::QuoteSelection", + "cmd->": "agent::AddSelectionToThread", "cmd-alt-e": "agent::RemoveAllContext", "cmd-shift-e": "project_panel::ToggleFocus", "cmd-ctrl-b": "agent::ToggleBurnMode", @@ -307,7 +307,7 @@ } }, { - "context": "AgentPanel && prompt_editor", + "context": "AgentPanel && text_thread", "use_key_equivalents": true, "bindings": { "cmd-n": "agent::NewTextThread", @@ -315,7 +315,7 @@ } }, { - "context": "AgentPanel && external_agent_thread", + "context": "AgentPanel && acp_thread", "use_key_equivalents": true, "bindings": { "cmd-n": "agent::NewExternalAgentThread", @@ -423,7 +423,7 @@ } }, { - "context": "PromptLibrary", + "context": "RulesLibrary", "use_key_equivalents": true, "bindings": { "cmd-n": "rules_library::NewRule", @@ -539,10 +539,10 @@ "bindings": { "cmd-[": "editor::Outdent", "cmd-]": "editor::Indent", - "cmd-ctrl-p": "editor::AddSelectionAbove", // Insert cursor above - "cmd-alt-up": "editor::AddSelectionAbove", - "cmd-ctrl-n": "editor::AddSelectionBelow", // Insert cursor below - "cmd-alt-down": "editor::AddSelectionBelow", + "cmd-ctrl-p": ["editor::AddSelectionAbove", { "skip_soft_wrap": false }], // Insert cursor above + "cmd-alt-up": ["editor::AddSelectionAbove", { "skip_soft_wrap": true }], + "cmd-ctrl-n": ["editor::AddSelectionBelow", { "skip_soft_wrap": false }], // Insert cursor below + "cmd-alt-down": ["editor::AddSelectionBelow", { "skip_soft_wrap": true }], "cmd-shift-k": "editor::DeleteLine", "alt-up": "editor::MoveLineUp", "alt-down": "editor::MoveLineDown", @@ -1153,7 +1153,8 @@ "context": "StashList || (StashList > Picker > Editor)", "use_key_equivalents": true, "bindings": { - "ctrl-shift-backspace": "stash_picker::DropStashItem" + "ctrl-shift-backspace": "stash_picker::DropStashItem", + "ctrl-shift-v": "stash_picker::ShowStashItem" } }, { @@ -1354,6 +1355,7 @@ "escape": "workspace::CloseWindow", "cmd-m": "settings_editor::Minimize", "cmd-f": "search::FocusSearch", + "left": "settings_editor::ToggleFocusNav", "cmd-shift-e": "settings_editor::ToggleFocusNav", // todo(settings_ui): cut this down based on the max files and overflow UI "ctrl-1": ["settings_editor::FocusFile", 0], @@ -1370,10 +1372,23 @@ "cmd-}": "settings_editor::FocusNextFile" } }, + { + "context": "StashDiff > Editor", + "use_key_equivalents": true, + "bindings": { + "ctrl-space": "git::ApplyCurrentStash", + "ctrl-shift-space": "git::PopCurrentStash", + "ctrl-shift-backspace": "git::DropCurrentStash" + } + }, { "context": "SettingsWindow > NavigationMenu", "use_key_equivalents": true, "bindings": { + "up": "settings_editor::FocusPreviousNavEntry", + "shift-tab": "settings_editor::FocusPreviousNavEntry", + "down": "settings_editor::FocusNextNavEntry", + "tab": "settings_editor::FocusNextNavEntry", "right": "settings_editor::ExpandNavEntry", "left": "settings_editor::CollapseNavEntry", "pageup": "settings_editor::FocusPreviousRootNavEntry", @@ -1381,5 +1396,13 @@ "home": "settings_editor::FocusFirstNavEntry", "end": "settings_editor::FocusLastNavEntry" } + }, + { + "context": "Zeta2Feedback > Editor", + "bindings": { + "enter": "editor::Newline", + "cmd-enter up": "dev::Zeta2RatePredictionPositive", + "cmd-enter down": "dev::Zeta2RatePredictionNegative" + } } ] diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 570766e92ce8adddb6913fcc20acd71bf7ed240b..5b96d20633b573d939e49a3ea60c4afc5d7ca721 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -134,7 +134,7 @@ "ctrl-k z": "editor::ToggleSoftWrap", "ctrl-f": "buffer_search::Deploy", "ctrl-h": "buffer_search::DeployReplace", - "ctrl-shift-.": "agent::QuoteSelection", + "ctrl-shift-.": "agent::AddSelectionToThread", "ctrl-shift-,": "assistant::InsertIntoEditor", "shift-alt-e": "editor::SelectEnclosingSymbol", "ctrl-shift-backspace": "editor::GoToPreviousChange", @@ -244,7 +244,7 @@ "ctrl-shift-i": "agent::ToggleOptionsMenu", // "ctrl-shift-alt-n": "agent::ToggleNewThreadMenu", "shift-alt-escape": "agent::ExpandMessageEditor", - "ctrl-shift-.": "agent::QuoteSelection", + "ctrl-shift-.": "agent::AddSelectionToThread", "shift-alt-e": "agent::RemoveAllContext", "ctrl-shift-e": "project_panel::ToggleFocus", "ctrl-shift-enter": "agent::ContinueThread", @@ -270,7 +270,7 @@ } }, { - "context": "AgentPanel && prompt_editor", + "context": "AgentPanel && text_thread", "use_key_equivalents": true, "bindings": { "ctrl-n": "agent::NewTextThread", @@ -278,7 +278,7 @@ } }, { - "context": "AgentPanel && external_agent_thread", + "context": "AgentPanel && acp_thread", "use_key_equivalents": true, "bindings": { "ctrl-n": "agent::NewExternalAgentThread", @@ -375,7 +375,7 @@ } }, { - "context": "PromptLibrary", + "context": "RulesLibrary", "use_key_equivalents": true, "bindings": { "ctrl-n": "rules_library::NewRule", @@ -500,8 +500,8 @@ "bindings": { "ctrl-[": "editor::Outdent", "ctrl-]": "editor::Indent", - "ctrl-shift-alt-up": "editor::AddSelectionAbove", // Insert Cursor Above - "ctrl-shift-alt-down": "editor::AddSelectionBelow", // Insert Cursor Below + "ctrl-shift-alt-up": ["editor::AddSelectionAbove", { "skip_soft_wrap": true }], // Insert Cursor Above + "ctrl-shift-alt-down": ["editor::AddSelectionBelow", { "skip_soft_wrap": true }], // Insert Cursor Below "ctrl-shift-k": "editor::DeleteLine", "alt-up": "editor::MoveLineUp", "alt-down": "editor::MoveLineDown", @@ -1106,7 +1106,8 @@ "context": "StashList || (StashList > Picker > Editor)", "use_key_equivalents": true, "bindings": { - "ctrl-shift-backspace": "stash_picker::DropStashItem" + "ctrl-shift-backspace": "stash_picker::DropStashItem", + "ctrl-shift-v": "stash_picker::ShowStashItem" } }, { @@ -1117,6 +1118,7 @@ "ctrl-insert": "terminal::Copy", "ctrl-shift-c": "terminal::Copy", "shift-insert": "terminal::Paste", + "ctrl-v": "terminal::Paste", "ctrl-shift-v": "terminal::Paste", "ctrl-enter": "assistant::InlineAssist", "alt-b": ["terminal::SendText", "\u001bb"], @@ -1153,6 +1155,12 @@ "alt-t": "terminal::RerunTask" } }, + { + "context": "Terminal && selection", + "bindings": { + "ctrl-c": "terminal::Copy" + } + }, { "context": "ZedPredictModal", "use_key_equivalents": true, @@ -1270,6 +1278,7 @@ "escape": "workspace::CloseWindow", "ctrl-m": "settings_editor::Minimize", "ctrl-f": "search::FocusSearch", + "left": "settings_editor::ToggleFocusNav", "ctrl-shift-e": "settings_editor::ToggleFocusNav", // todo(settings_ui): cut this down based on the max files and overflow UI "ctrl-1": ["settings_editor::FocusFile", 0], @@ -1286,10 +1295,23 @@ "ctrl-pagedown": "settings_editor::FocusNextFile" } }, + { + "context": "StashDiff > Editor", + "use_key_equivalents": true, + "bindings": { + "ctrl-space": "git::ApplyCurrentStash", + "ctrl-shift-space": "git::PopCurrentStash", + "ctrl-shift-backspace": "git::DropCurrentStash" + } + }, { "context": "SettingsWindow > NavigationMenu", "use_key_equivalents": true, "bindings": { + "up": "settings_editor::FocusPreviousNavEntry", + "shift-tab": "settings_editor::FocusPreviousNavEntry", + "down": "settings_editor::FocusNextNavEntry", + "tab": "settings_editor::FocusNextNavEntry", "right": "settings_editor::ExpandNavEntry", "left": "settings_editor::CollapseNavEntry", "pageup": "settings_editor::FocusPreviousRootNavEntry", @@ -1297,5 +1319,13 @@ "home": "settings_editor::FocusFirstNavEntry", "end": "settings_editor::FocusLastNavEntry" } + }, + { + "context": "Zeta2Feedback > Editor", + "bindings": { + "enter": "editor::Newline", + "ctrl-enter up": "dev::Zeta2RatePredictionPositive", + "ctrl-enter down": "dev::Zeta2RatePredictionNegative" + } } ] diff --git a/assets/keymaps/linux/atom.json b/assets/keymaps/linux/atom.json index 86ee068b06ef38ccec8215e4296c718dd873c824..98992b19fac72055807063edae8b7b23652062d3 100644 --- a/assets/keymaps/linux/atom.json +++ b/assets/keymaps/linux/atom.json @@ -24,8 +24,8 @@ "ctrl-<": "editor::ScrollCursorCenter", // editor:scroll-to-cursor "f3": ["editor::SelectNext", { "replace_newest": true }], // find-and-replace:find-next "shift-f3": ["editor::SelectPrevious", { "replace_newest": true }], //find-and-replace:find-previous - "alt-shift-down": "editor::AddSelectionBelow", // editor:add-selection-below - "alt-shift-up": "editor::AddSelectionAbove", // editor:add-selection-above + "alt-shift-down": ["editor::AddSelectionBelow", { "skip_soft_wrap": true }], // editor:add-selection-below + "alt-shift-up": ["editor::AddSelectionAbove", { "skip_soft_wrap": true }], // editor:add-selection-above "ctrl-j": "editor::JoinLines", // editor:join-lines "ctrl-shift-d": "editor::DuplicateLineDown", // editor:duplicate-lines "ctrl-up": "editor::MoveLineUp", // editor:move-line-up diff --git a/assets/keymaps/linux/cursor.json b/assets/keymaps/linux/cursor.json index 2e27158e1167f0840cadfb0d86dc06614f6076c6..4d2d13a90d96c31f72b1bb0ccc74608f81004eda 100644 --- a/assets/keymaps/linux/cursor.json +++ b/assets/keymaps/linux/cursor.json @@ -17,8 +17,8 @@ "bindings": { "ctrl-i": "agent::ToggleFocus", "ctrl-shift-i": "agent::ToggleFocus", - "ctrl-shift-l": "agent::QuoteSelection", // In cursor uses "Ask" mode - "ctrl-l": "agent::QuoteSelection", // In cursor uses "Agent" mode + "ctrl-shift-l": "agent::AddSelectionToThread", // In cursor uses "Ask" mode + "ctrl-l": "agent::AddSelectionToThread", // In cursor uses "Agent" mode "ctrl-k": "assistant::InlineAssist", "ctrl-shift-k": "assistant::InsertIntoEditor" } diff --git a/assets/keymaps/linux/emacs.json b/assets/keymaps/linux/emacs.json index 0f936ba2f968abe0759e4bb294271a5e5f501848..c5cf22c81220bf286187252394f8fde26bdd6509 100755 --- a/assets/keymaps/linux/emacs.json +++ b/assets/keymaps/linux/emacs.json @@ -8,11 +8,23 @@ "ctrl-g": "menu::Cancel" } }, + { + // Workaround to avoid falling back to default bindings. + // Unbind so Zed ignores these keys and lets emacs handle them. + // NOTE: must be declared before the `Editor` override. + // NOTE: in macos the 'ctrl-x' 'ctrl-p' and 'ctrl-n' rebindings are not needed, since they default to 'cmd'. + "context": "Editor", + "bindings": { + "ctrl-g": null, // currently activates `go_to_line::Toggle` when there is nothing to cancel + "ctrl-x": null, // currently activates `editor::Cut` if no following key is pressed for 1 second + "ctrl-p": null, // currently activates `file_finder::Toggle` when the cursor is on the first character of the buffer + "ctrl-n": null // currently activates `workspace::NewFile` when the cursor is on the last character of the buffer + } + }, { "context": "Editor", "bindings": { "ctrl-g": "editor::Cancel", - "ctrl-x b": "tab_switcher::Toggle", // switch-to-buffer "alt-g g": "go_to_line::Toggle", // goto-line "alt-g alt-g": "go_to_line::Toggle", // goto-line "ctrl-space": "editor::SetMark", // set-mark @@ -29,8 +41,10 @@ "shift-home": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": false }], // move-beginning-of-line "shift-end": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": false }], // move-end-of-line "alt-m": ["editor::MoveToBeginningOfLine", { "stop_at_soft_wraps": false, "stop_at_indent": true }], // back-to-indentation - "alt-f": "editor::MoveToNextSubwordEnd", // forward-word - "alt-b": "editor::MoveToPreviousSubwordStart", // backward-word + "alt-left": "editor::MoveToPreviousWordStart", // left-word + "alt-right": "editor::MoveToNextWordEnd", // right-word + "alt-f": "editor::MoveToNextWordEnd", // forward-word + "alt-b": "editor::MoveToPreviousWordStart", // backward-word "alt-u": "editor::ConvertToUpperCase", // upcase-word "alt-l": "editor::ConvertToLowerCase", // downcase-word "alt-c": "editor::ConvertToUpperCamelCase", // capitalize-word @@ -43,6 +57,8 @@ "ctrl-x h": "editor::SelectAll", // mark-whole-buffer "ctrl-d": "editor::Delete", // delete-char "alt-d": ["editor::DeleteToNextWordEnd", { "ignore_newlines": false, "ignore_brackets": false }], // kill-word + "alt-backspace": "editor::DeleteToPreviousWordStart", // backward-kill-word + "alt-delete": "editor::DeleteToPreviousWordStart", // backward-kill-word "ctrl-k": "editor::KillRingCut", // kill-line "ctrl-w": "editor::Cut", // kill-region "alt-w": "editor::Copy", // kill-ring-save @@ -52,14 +68,19 @@ "ctrl-x u": "editor::Undo", // undo "alt-{": "editor::MoveToStartOfParagraph", // backward-paragraph "alt-}": "editor::MoveToEndOfParagraph", // forward-paragraph + "ctrl-up": "editor::MoveToStartOfParagraph", // backward-paragraph + "ctrl-down": "editor::MoveToEndOfParagraph", // forward-paragraph "ctrl-v": "editor::MovePageDown", // scroll-up "alt-v": "editor::MovePageUp", // scroll-down "ctrl-x [": "editor::MoveToBeginning", // beginning-of-buffer "ctrl-x ]": "editor::MoveToEnd", // end-of-buffer "alt-<": "editor::MoveToBeginning", // beginning-of-buffer "alt->": "editor::MoveToEnd", // end-of-buffer + "ctrl-home": "editor::MoveToBeginning", // beginning-of-buffer + "ctrl-end": "editor::MoveToEnd", // end-of-buffer "ctrl-l": "editor::ScrollCursorCenterTopBottom", // recenter-top-bottom "ctrl-s": "buffer_search::Deploy", // isearch-forward + "ctrl-r": "buffer_search::Deploy", // isearch-backward "alt-^": "editor::JoinLines", // join-line "alt-q": "editor::Rewrap" // fill-paragraph } @@ -85,10 +106,19 @@ "end": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": false }], "ctrl-a": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": false }], "ctrl-e": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": false }], + "alt-m": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": false, "stop_at_indent": true }], "alt-f": "editor::SelectToNextWordEnd", - "alt-b": "editor::SelectToPreviousSubwordStart", + "alt-b": "editor::SelectToPreviousWordStart", + "alt-{": "editor::SelectToStartOfParagraph", + "alt-}": "editor::SelectToEndOfParagraph", + "ctrl-up": "editor::SelectToStartOfParagraph", + "ctrl-down": "editor::SelectToEndOfParagraph", + "ctrl-x [": "editor::SelectToBeginning", + "ctrl-x ]": "editor::SelectToEnd", "alt-<": "editor::SelectToBeginning", "alt->": "editor::SelectToEnd", + "ctrl-home": "editor::SelectToBeginning", + "ctrl-end": "editor::SelectToEnd", "ctrl-g": "editor::Cancel" } }, @@ -106,15 +136,28 @@ "ctrl-n": "editor::SignatureHelpNext" } }, + // Example setting for using emacs-style tab + // (i.e. indent the current line / selection or perform symbol completion depending on context) + // { + // "context": "Editor && !showing_code_actions && !showing_completions", + // "bindings": { + // "tab": "editor::AutoIndent" // indent-for-tab-command + // } + // }, { "context": "Workspace", "bindings": { + "alt-x": "command_palette::Toggle", // execute-extended-command + "ctrl-x b": "tab_switcher::Toggle", // switch-to-buffer + "ctrl-x ctrl-b": "tab_switcher::Toggle", // list-buffers + // "ctrl-x ctrl-c": "workspace::CloseWindow" // in case you only want to exit the current Zed instance "ctrl-x ctrl-c": "zed::Quit", // save-buffers-kill-terminal "ctrl-x 5 0": "workspace::CloseWindow", // delete-frame "ctrl-x 5 2": "workspace::NewWindow", // make-frame-command "ctrl-x o": "workspace::ActivateNextPane", // other-window "ctrl-x k": "pane::CloseActiveItem", // kill-buffer "ctrl-x 0": "pane::CloseActiveItem", // delete-window + // "ctrl-x 1": "pane::JoinAll", // in case you prefer to delete the splits but keep the buffers open "ctrl-x 1": "pane::CloseOtherItems", // delete-other-windows "ctrl-x 2": "pane::SplitDown", // split-window-below "ctrl-x 3": "pane::SplitRight", // split-window-right @@ -125,10 +168,19 @@ } }, { - // Workaround to enable using emacs in the Zed terminal. + // Workaround to enable using native emacs from the Zed terminal. // Unbind so Zed ignores these keys and lets emacs handle them. + // NOTE: + // "terminal::SendKeystroke" only works for a single key stroke (e.g. ctrl-x), + // so override with null for compound sequences (e.g. ctrl-x ctrl-c). "context": "Terminal", "bindings": { + // If you want to perfect your emacs-in-zed setup, also consider the following. + // You may need to enable "option_as_meta" from the Zed settings for "alt-x" to work. + // "alt-x": ["terminal::SendKeystroke", "alt-x"], + // "ctrl-x": ["terminal::SendKeystroke", "ctrl-x"], + // "ctrl-n": ["terminal::SendKeystroke", "ctrl-n"], + // ... "ctrl-x ctrl-c": null, // save-buffers-kill-terminal "ctrl-x ctrl-f": null, // find-file "ctrl-x ctrl-s": null, // save-buffer diff --git a/assets/keymaps/linux/sublime_text.json b/assets/keymaps/linux/sublime_text.json index f526db45ff29e0828ce58df6ca9816bd71a4cbe5..eefd59e5bd1aa48125d0c6e3d662f3cb4e270be7 100644 --- a/assets/keymaps/linux/sublime_text.json +++ b/assets/keymaps/linux/sublime_text.json @@ -28,8 +28,8 @@ { "context": "Editor", "bindings": { - "ctrl-alt-up": "editor::AddSelectionAbove", - "ctrl-alt-down": "editor::AddSelectionBelow", + "ctrl-alt-up": ["editor::AddSelectionAbove", { "skip_soft_wrap": false }], + "ctrl-alt-down": ["editor::AddSelectionBelow", { "skip_soft_wrap": false }], "ctrl-shift-up": "editor::MoveLineUp", "ctrl-shift-down": "editor::MoveLineDown", "ctrl-shift-m": "editor::SelectLargerSyntaxNode", diff --git a/assets/keymaps/macos/atom.json b/assets/keymaps/macos/atom.json index df48e51767e54524c6645630d1fcb6b1cdeba599..ca015b667faa05db53d8fdc3bd82352d9bcc62aa 100644 --- a/assets/keymaps/macos/atom.json +++ b/assets/keymaps/macos/atom.json @@ -25,8 +25,8 @@ "cmd-<": "editor::ScrollCursorCenter", "cmd-g": ["editor::SelectNext", { "replace_newest": true }], "cmd-shift-g": ["editor::SelectPrevious", { "replace_newest": true }], - "ctrl-shift-down": "editor::AddSelectionBelow", - "ctrl-shift-up": "editor::AddSelectionAbove", + "ctrl-shift-down": ["editor::AddSelectionBelow", { "skip_soft_wrap": true }], + "ctrl-shift-up": ["editor::AddSelectionAbove", { "skip_soft_wrap": true }], "alt-enter": "editor::Newline", "cmd-shift-d": "editor::DuplicateLineDown", "ctrl-cmd-up": "editor::MoveLineUp", diff --git a/assets/keymaps/macos/cursor.json b/assets/keymaps/macos/cursor.json index 1d723bd75bb788aa1ea63335f9fa555cb50d2df0..97abc7dd819485850107eca6762fc1ed60ec0515 100644 --- a/assets/keymaps/macos/cursor.json +++ b/assets/keymaps/macos/cursor.json @@ -17,8 +17,8 @@ "bindings": { "cmd-i": "agent::ToggleFocus", "cmd-shift-i": "agent::ToggleFocus", - "cmd-shift-l": "agent::QuoteSelection", // In cursor uses "Ask" mode - "cmd-l": "agent::QuoteSelection", // In cursor uses "Agent" mode + "cmd-shift-l": "agent::AddSelectionToThread", // In cursor uses "Ask" mode + "cmd-l": "agent::AddSelectionToThread", // In cursor uses "Agent" mode "cmd-k": "assistant::InlineAssist", "cmd-shift-k": "assistant::InsertIntoEditor" } diff --git a/assets/keymaps/macos/emacs.json b/assets/keymaps/macos/emacs.json index 78e2235965335ac2914355fc0c51abe38d390897..ea831c0c059ea082d002f3af01b8d97be9e86616 100755 --- a/assets/keymaps/macos/emacs.json +++ b/assets/keymaps/macos/emacs.json @@ -9,11 +9,19 @@ "ctrl-g": "menu::Cancel" } }, + { + // Workaround to avoid falling back to default bindings. + // Unbind so Zed ignores these keys and lets emacs handle them. + // NOTE: must be declared before the `Editor` override. + "context": "Editor", + "bindings": { + "ctrl-g": null // currently activates `go_to_line::Toggle` when there is nothing to cancel + } + }, { "context": "Editor", "bindings": { "ctrl-g": "editor::Cancel", - "ctrl-x b": "tab_switcher::Toggle", // switch-to-buffer "alt-g g": "go_to_line::Toggle", // goto-line "alt-g alt-g": "go_to_line::Toggle", // goto-line "ctrl-space": "editor::SetMark", // set-mark @@ -30,8 +38,10 @@ "shift-home": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": false }], // move-beginning-of-line "shift-end": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": false }], // move-end-of-line "alt-m": ["editor::MoveToBeginningOfLine", { "stop_at_soft_wraps": false, "stop_at_indent": true }], // back-to-indentation - "alt-f": "editor::MoveToNextSubwordEnd", // forward-word - "alt-b": "editor::MoveToPreviousSubwordStart", // backward-word + "alt-left": "editor::MoveToPreviousWordStart", // left-word + "alt-right": "editor::MoveToNextWordEnd", // right-word + "alt-f": "editor::MoveToNextWordEnd", // forward-word + "alt-b": "editor::MoveToPreviousWordStart", // backward-word "alt-u": "editor::ConvertToUpperCase", // upcase-word "alt-l": "editor::ConvertToLowerCase", // downcase-word "alt-c": "editor::ConvertToUpperCamelCase", // capitalize-word @@ -44,6 +54,8 @@ "ctrl-x h": "editor::SelectAll", // mark-whole-buffer "ctrl-d": "editor::Delete", // delete-char "alt-d": ["editor::DeleteToNextWordEnd", { "ignore_newlines": false, "ignore_brackets": false }], // kill-word + "alt-backspace": "editor::DeleteToPreviousWordStart", // backward-kill-word + "alt-delete": "editor::DeleteToPreviousWordStart", // backward-kill-word "ctrl-k": "editor::KillRingCut", // kill-line "ctrl-w": "editor::Cut", // kill-region "alt-w": "editor::Copy", // kill-ring-save @@ -53,14 +65,19 @@ "ctrl-x u": "editor::Undo", // undo "alt-{": "editor::MoveToStartOfParagraph", // backward-paragraph "alt-}": "editor::MoveToEndOfParagraph", // forward-paragraph + "ctrl-up": "editor::MoveToStartOfParagraph", // backward-paragraph + "ctrl-down": "editor::MoveToEndOfParagraph", // forward-paragraph "ctrl-v": "editor::MovePageDown", // scroll-up "alt-v": "editor::MovePageUp", // scroll-down "ctrl-x [": "editor::MoveToBeginning", // beginning-of-buffer "ctrl-x ]": "editor::MoveToEnd", // end-of-buffer "alt-<": "editor::MoveToBeginning", // beginning-of-buffer "alt->": "editor::MoveToEnd", // end-of-buffer + "ctrl-home": "editor::MoveToBeginning", // beginning-of-buffer + "ctrl-end": "editor::MoveToEnd", // end-of-buffer "ctrl-l": "editor::ScrollCursorCenterTopBottom", // recenter-top-bottom "ctrl-s": "buffer_search::Deploy", // isearch-forward + "ctrl-r": "buffer_search::Deploy", // isearch-backward "alt-^": "editor::JoinLines", // join-line "alt-q": "editor::Rewrap" // fill-paragraph } @@ -86,10 +103,19 @@ "end": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": false }], "ctrl-a": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": false }], "ctrl-e": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": false }], + "alt-m": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": false, "stop_at_indent": true }], "alt-f": "editor::SelectToNextWordEnd", - "alt-b": "editor::SelectToPreviousSubwordStart", + "alt-b": "editor::SelectToPreviousWordStart", + "alt-{": "editor::SelectToStartOfParagraph", + "alt-}": "editor::SelectToEndOfParagraph", + "ctrl-up": "editor::SelectToStartOfParagraph", + "ctrl-down": "editor::SelectToEndOfParagraph", + "ctrl-x [": "editor::SelectToBeginning", + "ctrl-x ]": "editor::SelectToEnd", "alt-<": "editor::SelectToBeginning", "alt->": "editor::SelectToEnd", + "ctrl-home": "editor::SelectToBeginning", + "ctrl-end": "editor::SelectToEnd", "ctrl-g": "editor::Cancel" } }, @@ -107,15 +133,28 @@ "ctrl-n": "editor::SignatureHelpNext" } }, + // Example setting for using emacs-style tab + // (i.e. indent the current line / selection or perform symbol completion depending on context) + // { + // "context": "Editor && !showing_code_actions && !showing_completions", + // "bindings": { + // "tab": "editor::AutoIndent" // indent-for-tab-command + // } + // }, { "context": "Workspace", "bindings": { + "alt-x": "command_palette::Toggle", // execute-extended-command + "ctrl-x b": "tab_switcher::Toggle", // switch-to-buffer + "ctrl-x ctrl-b": "tab_switcher::Toggle", // list-buffers + // "ctrl-x ctrl-c": "workspace::CloseWindow" // in case you only want to exit the current Zed instance "ctrl-x ctrl-c": "zed::Quit", // save-buffers-kill-terminal "ctrl-x 5 0": "workspace::CloseWindow", // delete-frame "ctrl-x 5 2": "workspace::NewWindow", // make-frame-command "ctrl-x o": "workspace::ActivateNextPane", // other-window "ctrl-x k": "pane::CloseActiveItem", // kill-buffer "ctrl-x 0": "pane::CloseActiveItem", // delete-window + // "ctrl-x 1": "pane::JoinAll", // in case you prefer to delete the splits but keep the buffers open "ctrl-x 1": "pane::CloseOtherItems", // delete-other-windows "ctrl-x 2": "pane::SplitDown", // split-window-below "ctrl-x 3": "pane::SplitRight", // split-window-right @@ -126,10 +165,19 @@ } }, { - // Workaround to enable using emacs in the Zed terminal. + // Workaround to enable using native emacs from the Zed terminal. // Unbind so Zed ignores these keys and lets emacs handle them. + // NOTE: + // "terminal::SendKeystroke" only works for a single key stroke (e.g. ctrl-x), + // so override with null for compound sequences (e.g. ctrl-x ctrl-c). "context": "Terminal", "bindings": { + // If you want to perfect your emacs-in-zed setup, also consider the following. + // You may need to enable "option_as_meta" from the Zed settings for "alt-x" to work. + // "alt-x": ["terminal::SendKeystroke", "alt-x"], + // "ctrl-x": ["terminal::SendKeystroke", "ctrl-x"], + // "ctrl-n": ["terminal::SendKeystroke", "ctrl-n"], + // ... "ctrl-x ctrl-c": null, // save-buffers-kill-terminal "ctrl-x ctrl-f": null, // find-file "ctrl-x ctrl-s": null, // save-buffer diff --git a/assets/keymaps/macos/sublime_text.json b/assets/keymaps/macos/sublime_text.json index a1e61bf8859e2e4ea227ed3dbe22ec29eb35a149..d1bffca755b611d9046d4b7e794d2303835227a2 100644 --- a/assets/keymaps/macos/sublime_text.json +++ b/assets/keymaps/macos/sublime_text.json @@ -28,8 +28,8 @@ { "context": "Editor", "bindings": { - "ctrl-shift-up": "editor::AddSelectionAbove", - "ctrl-shift-down": "editor::AddSelectionBelow", + "ctrl-shift-up": ["editor::AddSelectionAbove", { "skip_soft_wrap": false }], + "ctrl-shift-down": ["editor::AddSelectionBelow", { "skip_soft_wrap": false }], "cmd-ctrl-up": "editor::MoveLineUp", "cmd-ctrl-down": "editor::MoveLineDown", "cmd-shift-space": "editor::SelectAll", diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index c90b439c6abb60f4e3d826c171d7e2491fce1d90..da7491a0070cc74d8329d9bae65d445896b77386 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -95,8 +95,6 @@ "g g": "vim::StartOfDocument", "g h": "editor::Hover", "g B": "editor::BlameHover", - "g t": "vim::GoToTab", - "g shift-t": "vim::GoToPreviousTab", "g d": "editor::GoToDefinition", "g shift-d": "editor::GoToDeclaration", "g y": "editor::GoToTypeDefinition", @@ -424,56 +422,66 @@ { "context": "(vim_mode == helix_normal || vim_mode == helix_select) && !menu", "bindings": { - ";": "vim::HelixCollapseSelection", - ":": "command_palette::Toggle", - "m": "vim::PushHelixMatch", - "s": "vim::HelixSelectRegex", - "]": ["vim::PushHelixNext", { "around": true }], - "[": ["vim::PushHelixPrevious", { "around": true }], - "left": "vim::WrappingLeft", - "right": "vim::WrappingRight", + // Movement "h": "vim::WrappingLeft", + "left": "vim::WrappingLeft", "l": "vim::WrappingRight", - "y": "vim::HelixYank", - "p": "vim::HelixPaste", - "shift-p": ["vim::HelixPaste", { "before": true }], - "alt-;": "vim::OtherEnd", - "ctrl-r": "vim::Redo", - "f": ["vim::PushFindForward", { "before": false, "multiline": true }], + "right": "vim::WrappingRight", "t": ["vim::PushFindForward", { "before": true, "multiline": true }], - "shift-f": ["vim::PushFindBackward", { "after": false, "multiline": true }], + "f": ["vim::PushFindForward", { "before": false, "multiline": true }], "shift-t": ["vim::PushFindBackward", { "after": true, "multiline": true }], - ">": "vim::Indent", - "<": "vim::Outdent", - "=": "vim::AutoIndent", + "shift-f": ["vim::PushFindBackward", { "after": false, "multiline": true }], + "alt-.": "vim::RepeatFind", + + // Changes + "shift-r": "editor::Paste", "`": "vim::ConvertToLowerCase", "alt-`": "vim::ConvertToUpperCase", - "g q": "vim::PushRewrap", - "g w": "vim::PushRewrap", "insert": "vim::InsertBefore", - "alt-.": "vim::RepeatFind", + "shift-u": "editor::Redo", + "ctrl-r": "vim::Redo", + "y": "vim::HelixYank", + "p": "vim::HelixPaste", + "shift-p": ["vim::HelixPaste", { "before": true }], + ">": "vim::Indent", + "<": "vim::Outdent", + "=": "vim::AutoIndent", + "d": "vim::HelixDelete", + "c": "vim::HelixSubstitute", + "alt-c": "vim::HelixSubstituteNoYank", + + // Selection manipulation + "s": "vim::HelixSelectRegex", "alt-s": ["editor::SplitSelectionIntoLines", { "keep_selections": true }], + ";": "vim::HelixCollapseSelection", + "alt-;": "vim::OtherEnd", + ",": "vim::HelixKeepNewestSelection", + "shift-c": "vim::HelixDuplicateBelow", + "alt-shift-c": "vim::HelixDuplicateAbove", + "%": "editor::SelectAll", + "x": "vim::HelixSelectLine", + "shift-x": "editor::SelectLine", + "ctrl-c": "editor::ToggleComments", + "alt-o": "editor::SelectLargerSyntaxNode", + "alt-i": "editor::SelectSmallerSyntaxNode", + "alt-p": "editor::SelectPreviousSyntaxNode", + "alt-n": "editor::SelectNextSyntaxNode", + // Goto mode - "g n": "pane::ActivateNextItem", - "g p": "pane::ActivatePreviousItem", - // "tab": "pane::ActivateNextItem", - // "shift-tab": "pane::ActivatePrevItem", - "shift-h": "pane::ActivatePreviousItem", - "shift-l": "pane::ActivateNextItem", - "g l": "vim::EndOfLine", + "g e": "vim::EndOfDocument", "g h": "vim::StartOfLine", + "g l": "vim::EndOfLine", "g s": "vim::FirstNonWhitespace", // "g s" default behavior is "space s" - "g e": "vim::EndOfDocument", - "g .": "vim::HelixGotoLastModification", // go to last modification - "g r": "editor::FindAllReferences", // zed specific "g t": "vim::WindowTop", "g c": "vim::WindowMiddle", "g b": "vim::WindowBottom", - - "shift-r": "editor::Paste", - "x": "vim::HelixSelectLine", - "shift-x": "editor::SelectLine", - "%": "editor::SelectAll", + "g r": "editor::FindAllReferences", // zed specific + "g n": "pane::ActivateNextItem", + "shift-l": "pane::ActivateNextItem", + "g p": "pane::ActivatePreviousItem", + "shift-h": "pane::ActivatePreviousItem", + "g .": "vim::HelixGotoLastModification", // go to last modification + // Window mode "space w h": "workspace::ActivatePaneLeft", "space w l": "workspace::ActivatePaneRight", @@ -484,6 +492,7 @@ "space w r": "pane::SplitRight", "space w v": "pane::SplitDown", "space w d": "pane::SplitDown", + // Space mode "space f": "file_finder::Toggle", "space k": "editor::Hover", @@ -494,14 +503,18 @@ "space a": "editor::ToggleCodeActions", "space h": "editor::SelectAllMatches", "space c": "editor::ToggleComments", - "space y": "editor::Copy", "space p": "editor::Paste", - "shift-u": "editor::Redo", - "ctrl-c": "editor::ToggleComments", - "d": "vim::HelixDelete", - "c": "vim::Substitute", - "shift-c": "editor::AddSelectionBelow", - "alt-shift-c": "editor::AddSelectionAbove" + "space y": "editor::Copy", + + // Other + ":": "command_palette::Toggle", + "m": "vim::PushHelixMatch", + "]": ["vim::PushHelixNext", { "around": true }], + "[": ["vim::PushHelixPrevious", { "around": true }], + "g q": "vim::PushRewrap", + "g w": "vim::PushRewrap", + // "tab": "pane::ActivateNextItem", + // "shift-tab": "pane::ActivatePrevItem", } }, { @@ -811,7 +824,7 @@ } }, { - "context": "VimControl || !Editor && !Terminal", + "context": "VimControl && !menu || !Editor && !Terminal", "bindings": { // window related commands (ctrl-w X) "ctrl-w": null, @@ -831,10 +844,10 @@ "ctrl-w shift-right": "workspace::SwapPaneRight", "ctrl-w shift-up": "workspace::SwapPaneUp", "ctrl-w shift-down": "workspace::SwapPaneDown", - "ctrl-w shift-h": "workspace::SwapPaneLeft", - "ctrl-w shift-l": "workspace::SwapPaneRight", - "ctrl-w shift-k": "workspace::SwapPaneUp", - "ctrl-w shift-j": "workspace::SwapPaneDown", + "ctrl-w shift-h": "workspace::MovePaneLeft", + "ctrl-w shift-l": "workspace::MovePaneRight", + "ctrl-w shift-k": "workspace::MovePaneUp", + "ctrl-w shift-j": "workspace::MovePaneDown", "ctrl-w >": "vim::ResizePaneRight", "ctrl-w <": "vim::ResizePaneLeft", "ctrl-w -": "vim::ResizePaneDown", @@ -865,7 +878,9 @@ "ctrl-w ctrl-o": "workspace::CloseInactiveTabsAndPanes", "ctrl-w o": "workspace::CloseInactiveTabsAndPanes", "ctrl-w ctrl-n": "workspace::NewFileSplitHorizontal", - "ctrl-w n": "workspace::NewFileSplitHorizontal" + "ctrl-w n": "workspace::NewFileSplitHorizontal", + "g t": "vim::GoToTab", + "g shift-t": "vim::GoToPreviousTab" } }, { @@ -968,7 +983,9 @@ "bindings": { "ctrl-h": "editor::Backspace", "ctrl-u": "editor::DeleteToBeginningOfLine", - "ctrl-w": "editor::DeleteToPreviousWordStart" + "ctrl-w": "editor::DeleteToPreviousWordStart", + "ctrl-p": "menu::SelectPrevious", + "ctrl-n": "menu::SelectNext" } }, { diff --git a/assets/settings/default.json b/assets/settings/default.json index 101b53c4a74e507cc8ff2853513cb6ea2f109b6a..10aa98498b09d4cbcf4f231393df3e9203a0512a 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1,8 +1,8 @@ { "$schema": "zed://schemas/settings", - /// The displayed name of this project. If not set or empty, the root directory name + /// The displayed name of this project. If not set or null, the root directory name /// will be displayed. - "project_name": "", + "project_name": null, // The name of the Zed theme to use for the UI. // // `mode` is one of: @@ -311,11 +311,11 @@ "use_on_type_format": true, // Whether to automatically add matching closing characters when typing // opening parenthesis, bracket, brace, single or double quote characters. - // For example, when you type (, Zed will add a closing ) at the correct position. + // For example, when you type '(', Zed will add a closing ) at the correct position. "use_autoclose": true, // Whether to automatically surround selected text when typing opening parenthesis, // bracket, brace, single or double quote characters. - // For example, when you select text and type (, Zed will surround the text with (). + // For example, when you select text and type '(', Zed will surround the text with (). "use_auto_surround": true, // Whether indentation should be adjusted based on the context whilst typing. "auto_indent": true, @@ -722,7 +722,11 @@ // Whether to enable drag-and-drop operations in the project panel. "drag_and_drop": true, // Whether to hide the root entry when only one folder is open in the window. - "hide_root": false + "hide_root": false, + // Whether to hide the hidden entries in the project panel. + "hide_hidden": false, + // Whether to automatically open files when pasting them in the project panel. + "open_file_on_paste": true }, "outline_panel": { // Whether to show the outline panel button in the status bar @@ -880,8 +884,6 @@ // Note: This setting has no effect on external agents that support permission modes, such as Claude Code. // You can set `agent_servers.claude.default_mode` to `bypassPermissions` to skip all permission requests. "always_allow_tool_actions": false, - // When enabled, the agent will stream edits. - "stream_edits": false, // When enabled, agent edits will be displayed in single-file editors for review "single_file_review": true, // When enabled, show voting thumbs for feedback on agent edits. @@ -904,6 +906,7 @@ "now": true, "find_path": true, "read_file": true, + "open": true, "grep": true, "terminal": true, "thinking": true, @@ -915,7 +918,6 @@ // We don't know which of the context server tools are safe for the "Ask" profile, so we don't enable them by default. // "enable_all_context_servers": true, "tools": { - "contents": true, "diagnostics": true, "fetch": true, "list_directory": true, @@ -1089,10 +1091,10 @@ // Only the file Zed had indexed will be used, not necessary all the gitignored files. // // Can accept 3 values: - // * `true`: Use all gitignored files - // * `false`: Use only the files Zed had indexed - // * `null`: Be smart and search for ignored when called from a gitignored worktree - "include_ignored": null + // * "all": Use all gitignored files + // * "indexed": Use only the files Zed had indexed + // * "smart": Be smart and search for ignored when called from a gitignored worktree + "include_ignored": "smart" }, // Whether or not to remove any trailing whitespace from lines of a buffer // before saving it. @@ -1105,22 +1107,28 @@ // Whether or not to perform a buffer format before saving: [on, off] // Keep in mind, if the autosave with delay is enabled, format_on_save will be ignored "format_on_save": "on", - // How to perform a buffer format. This setting can take 4 values: + // How to perform a buffer format. This setting can take multiple values: // - // 1. Format code using the current language server: + // 1. Default. Format files using Zed's Prettier integration (if applicable), + // or falling back to formatting via language server: + // "formatter": "auto" + // 2. Format code using the current language server: // "formatter": "language_server" - // 2. Format code using an external command: + // 3. Format code using a specific language server: + // "formatter": {"language_server": {"name": "ruff"}} + // 4. Format code using an external command: // "formatter": { // "external": { // "command": "prettier", // "arguments": ["--stdin-filepath", "{buffer_path}"] // } // } - // 3. Format code using Zed's Prettier integration: + // 5. Format code using Zed's Prettier integration: // "formatter": "prettier" - // 4. Default. Format files using Zed's Prettier integration (if applicable), - // or falling back to formatting via language server: - // "formatter": "auto" + // 6. Format code using a code action + // "formatter": {"code_action": "source.fixAll.eslint"} + // 7. An array of any format step specified above to apply in order + // "formatter": [{"code_action": "source.fixAll.eslint"}, "prettier"] "formatter": "auto", // How to soft-wrap long lines of text. // Possible values: @@ -1342,7 +1350,9 @@ // Whether to show the active language button in the status bar. "active_language_button": true, // Whether to show the cursor position button in the status bar. - "cursor_position_button": true + "cursor_position_button": true, + // Whether to show active line endings button in the status bar. + "line_endings_button": false }, // Settings specific to the terminal "terminal": { @@ -1519,6 +1529,7 @@ // A value of 45 preserves colorful themes while ensuring legibility. "minimum_contrast": 45 }, + "code_actions_on_format": {}, // Settings related to running tasks. "tasks": { "variables": {}, @@ -1548,6 +1559,7 @@ // "file_types": { "JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "**/Zed/**/*.json", "**/.vscode/**/*.json", "tsconfig*.json"], + "Markdown": [".rules", ".cursorrules", ".windsurfrules", ".clinerules"], "Shell Script": [".env.*"] }, // Settings for which version of Node.js and NPM to use when installing @@ -1688,7 +1700,9 @@ "preferred_line_length": 72 }, "Go": { - "formatter": [{ "code_action": "source.organizeImports" }, { "language_server": {} }], + "code_actions_on_format": { + "source.organizeImports": true + }, "debuggers": ["Delve"] }, "GraphQL": { @@ -1727,7 +1741,7 @@ } }, "Kotlin": { - "language_servers": ["kotlin-language-server", "!kotlin-lsp", "..."] + "language_servers": ["!kotlin-language-server", "kotlin-lsp", "..."] }, "LaTeX": { "formatter": "language_server", @@ -1758,12 +1772,16 @@ "allow_rewrap": "anywhere" }, "Python": { + "code_actions_on_format": { + "source.organizeImports.ruff": true + }, "formatter": { "language_server": { "name": "ruff" } }, - "debuggers": ["Debugpy"] + "debuggers": ["Debugpy"], + "language_servers": ["basedpyright", "ruff", "!ty", "!pyrefly", "!pyright", "!pylsp", "..."] }, "Ruby": { "language_servers": ["solargraph", "!ruby-lsp", "!rubocop", "!sorbet", "!steep", "..."] @@ -1805,10 +1823,11 @@ }, "SystemVerilog": { "format_on_save": "off", + "language_servers": ["!slang", "..."], "use_on_type_format": false }, "Vue.js": { - "language_servers": ["vue-language-server", "..."], + "language_servers": ["vue-language-server", "vtsls", "..."], "prettier": { "allowed": true } @@ -1912,6 +1931,11 @@ // DAP Specific settings. "dap": { // Specify the DAP name as a key here. + "CodeLLDB": { + "env": { + "RUST_LOG": "info" + } + } }, // Common language server settings. "global_lsp_settings": { diff --git a/assets/themes/gruvbox/gruvbox.json b/assets/themes/gruvbox/gruvbox.json index 4e6f8334b269e3e5090b0f91d995834906c09083..402d190b34bb3c730e01b9817d815da53cff288d 100644 --- a/assets/themes/gruvbox/gruvbox.json +++ b/assets/themes/gruvbox/gruvbox.json @@ -49,8 +49,9 @@ "panel.background": "#3a3735ff", "panel.focused_border": "#83a598ff", "pane.focused_border": null, - "scrollbar.thumb.background": "#fbf1c74c", - "scrollbar.thumb.hover_background": "#494340ff", + "scrollbar.thumb.active_background": "#83a598ac", + "scrollbar.thumb.hover_background": "#fbf1c74c", + "scrollbar.thumb.background": "#a899844c", "scrollbar.thumb.border": "#494340ff", "scrollbar.track.background": "#00000000", "scrollbar.track.border": "#373432ff", @@ -454,8 +455,9 @@ "panel.background": "#393634ff", "panel.focused_border": "#83a598ff", "pane.focused_border": null, - "scrollbar.thumb.background": "#fbf1c74c", - "scrollbar.thumb.hover_background": "#494340ff", + "scrollbar.thumb.active_background": "#83a598ac", + "scrollbar.thumb.hover_background": "#fbf1c74c", + "scrollbar.thumb.background": "#a899844c", "scrollbar.thumb.border": "#494340ff", "scrollbar.track.background": "#00000000", "scrollbar.track.border": "#343130ff", @@ -859,8 +861,9 @@ "panel.background": "#3b3735ff", "panel.focused_border": null, "pane.focused_border": null, - "scrollbar.thumb.background": "#fbf1c74c", - "scrollbar.thumb.hover_background": "#494340ff", + "scrollbar.thumb.active_background": "#83a598ac", + "scrollbar.thumb.hover_background": "#fbf1c74c", + "scrollbar.thumb.background": "#a899844c", "scrollbar.thumb.border": "#494340ff", "scrollbar.track.background": "#00000000", "scrollbar.track.border": "#393634ff", @@ -1264,8 +1267,9 @@ "panel.background": "#ecddb4ff", "panel.focused_border": null, "pane.focused_border": null, - "scrollbar.thumb.background": "#2828284c", - "scrollbar.thumb.hover_background": "#ddcca7ff", + "scrollbar.thumb.active_background": "#458588ac", + "scrollbar.thumb.hover_background": "#2828284c", + "scrollbar.thumb.background": "#7c6f644c", "scrollbar.thumb.border": "#ddcca7ff", "scrollbar.track.background": "#00000000", "scrollbar.track.border": "#eee0b7ff", @@ -1669,8 +1673,9 @@ "panel.background": "#ecddb5ff", "panel.focused_border": null, "pane.focused_border": null, - "scrollbar.thumb.background": "#2828284c", - "scrollbar.thumb.hover_background": "#ddcca7ff", + "scrollbar.thumb.active_background": "#458588ac", + "scrollbar.thumb.hover_background": "#2828284c", + "scrollbar.thumb.background": "#7c6f644c", "scrollbar.thumb.border": "#ddcca7ff", "scrollbar.track.background": "#00000000", "scrollbar.track.border": "#eee1bbff", @@ -2074,8 +2079,9 @@ "panel.background": "#ecdcb3ff", "panel.focused_border": null, "pane.focused_border": null, - "scrollbar.thumb.background": "#2828284c", - "scrollbar.thumb.hover_background": "#ddcca7ff", + "scrollbar.thumb.active_background": "#458588ac", + "scrollbar.thumb.hover_background": "#2828284c", + "scrollbar.thumb.background": "#7c6f644c", "scrollbar.thumb.border": "#ddcca7ff", "scrollbar.track.background": "#00000000", "scrollbar.track.border": "#eddeb5ff", diff --git a/clippy.toml b/clippy.toml index 57f6f59385a4885730015c3d09f040a5f340d379..4e9f2de8585e74afe76840c59306ad8ed87fd947 100644 --- a/clippy.toml +++ b/clippy.toml @@ -3,12 +3,14 @@ avoid-breaking-exported-api = false ignore-interior-mutability = [ # Suppresses clippy::mutable_key_type, which is a false positive as the Eq # and Hash impls do not use fields with interior mutability. - "agent::context::AgentContextKey" + "agent_ui::context::AgentContextKey" ] disallowed-methods = [ { path = "std::process::Command::spawn", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::spawn" }, { path = "std::process::Command::output", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::output" }, { path = "std::process::Command::status", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::status" }, + { path = "serde_json::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892. Use `serde_json::from_slice` instead." }, + { path = "serde_json_lenient::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892, Use `serde_json_lenient::from_slice` instead." }, ] disallowed-types = [ # { path = "std::collections::HashMap", replacement = "collections::HashMap" }, diff --git a/crates/acp_thread/Cargo.toml b/crates/acp_thread/Cargo.toml index ac24a6ed0f41c75d5c4dcd9b9b4122336022ddf3..09202dc57cb96f5f258e64063f5d61169fa7a045 100644 --- a/crates/acp_thread/Cargo.toml +++ b/crates/acp_thread/Cargo.toml @@ -45,7 +45,6 @@ url.workspace = true util.workspace = true uuid.workspace = true watch.workspace = true -workspace-hack.workspace = true [dev-dependencies] env_logger.workspace = true diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index 61486a475c4601a9d9201d3a6920c63566a1ba36..4d8c57dd8f5a97aabc5cf3dc9e8d5aae9d6c8f2f 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -328,7 +328,7 @@ impl ToolCall { location: acp::ToolCallLocation, project: WeakEntity, cx: &mut AsyncApp, - ) -> Option { + ) -> Option { let buffer = project .update(cx, |project, cx| { project @@ -350,17 +350,14 @@ impl ToolCall { }) .ok()?; - Some(AgentLocation { - buffer: buffer.downgrade(), - position, - }) + Some(ResolvedLocation { buffer, position }) } fn resolve_locations( &self, project: Entity, cx: &mut App, - ) -> Task>> { + ) -> Task>> { let locations = self.locations.clone(); project.update(cx, |_, cx| { cx.spawn(async move |project, cx| { @@ -374,6 +371,23 @@ impl ToolCall { } } +// Separate so we can hold a strong reference to the buffer +// for saving on the thread +#[derive(Clone, Debug, PartialEq, Eq)] +struct ResolvedLocation { + buffer: Entity, + position: Anchor, +} + +impl From<&ResolvedLocation> for AgentLocation { + fn from(value: &ResolvedLocation) -> Self { + Self { + buffer: value.buffer.downgrade(), + position: value.position, + } + } +} + #[derive(Debug)] pub enum ToolCallStatus { /// The tool call hasn't started running yet, but we start showing it to @@ -1393,35 +1407,46 @@ impl AcpThread { let task = tool_call.resolve_locations(project, cx); cx.spawn(async move |this, cx| { let resolved_locations = task.await; + this.update(cx, |this, cx| { let project = this.project.clone(); + + for location in resolved_locations.iter().flatten() { + this.shared_buffers + .insert(location.buffer.clone(), location.buffer.read(cx).snapshot()); + } let Some((ix, tool_call)) = this.tool_call_mut(&id) else { return; }; + if let Some(Some(location)) = resolved_locations.last() { project.update(cx, |project, cx| { - if let Some(agent_location) = project.agent_location() { - let should_ignore = agent_location.buffer == location.buffer - && location - .buffer - .update(cx, |buffer, _| { - let snapshot = buffer.snapshot(); - let old_position = - agent_location.position.to_point(&snapshot); - let new_position = location.position.to_point(&snapshot); - // ignore this so that when we get updates from the edit tool - // the position doesn't reset to the startof line - old_position.row == new_position.row - && old_position.column > new_position.column - }) - .ok() - .unwrap_or_default(); - if !should_ignore { - project.set_agent_location(Some(location.clone()), cx); - } + let should_ignore = if let Some(agent_location) = project + .agent_location() + .filter(|agent_location| agent_location.buffer == location.buffer) + { + let snapshot = location.buffer.read(cx).snapshot(); + let old_position = agent_location.position.to_point(&snapshot); + let new_position = location.position.to_point(&snapshot); + + // ignore this so that when we get updates from the edit tool + // the position doesn't reset to the startof line + old_position.row == new_position.row + && old_position.column > new_position.column + } else { + false + }; + if !should_ignore { + project.set_agent_location(Some(location.into()), cx); } }); } + + let resolved_locations = resolved_locations + .iter() + .map(|l| l.as_ref().map(|l| AgentLocation::from(l))) + .collect::>(); + if tool_call.resolved_locations != resolved_locations { tool_call.resolved_locations = resolved_locations; cx.emit(AcpThreadEvent::EntryUpdated(ix)); @@ -2112,6 +2137,7 @@ impl AcpThread { let project = self.project.clone(); let language_registry = project.read(cx).languages().clone(); + let is_windows = project.read(cx).path_style(cx).is_windows(); let terminal_id = acp::TerminalId(Uuid::new_v4().to_string().into()); let terminal_task = cx.spawn({ @@ -2125,9 +2151,10 @@ impl AcpThread { .and_then(|r| r.read(cx).default_system_shell()) })? .unwrap_or_else(|| get_default_system_shell_preferring_bash()); - let (task_command, task_args) = ShellBuilder::new(&Shell::Program(shell)) - .redirect_stdin_to_dev_null() - .build(Some(command.clone()), &args); + let (task_command, task_args) = + ShellBuilder::new(&Shell::Program(shell), is_windows) + .redirect_stdin_to_dev_null() + .build(Some(command.clone()), &args); let terminal = project .update(cx, |project, cx| { project.create_terminal_task( diff --git a/crates/acp_thread/src/diff.rs b/crates/acp_thread/src/diff.rs index 15de12af27fe233bad4ad8ebb2893ffa5fbdd598..055b2f7fb86ffe9d7f12459b6b16405ce77815a0 100644 --- a/crates/acp_thread/src/diff.rs +++ b/crates/acp_thread/src/diff.rs @@ -236,21 +236,21 @@ impl PendingDiff { fn finalize(&self, cx: &mut Context) -> FinalizedDiff { let ranges = self.excerpt_ranges(cx); let base_text = self.base_text.clone(); - let language_registry = self.new_buffer.read(cx).language_registry(); + let new_buffer = self.new_buffer.read(cx); + let language_registry = new_buffer.language_registry(); - let path = self - .new_buffer - .read(cx) + let path = new_buffer .file() .map(|file| file.path().display(file.path_style(cx))) .unwrap_or("untitled".into()) .into(); + let replica_id = new_buffer.replica_id(); // Replace the buffer in the multibuffer with the snapshot let buffer = cx.new(|cx| { let language = self.new_buffer.read(cx).language().cloned(); let buffer = TextBuffer::new_normalized( - 0, + replica_id, cx.entity_id().as_non_zero_u64().into(), self.new_buffer.read(cx).line_ending(), self.new_buffer.read(cx).as_rope().clone(), diff --git a/crates/acp_thread/src/terminal.rs b/crates/acp_thread/src/terminal.rs index 888c7698c3d2270769f3afbe712ecba7d08b055f..9ca6d4021b316231930ab7803957dab3a0139f1e 100644 --- a/crates/acp_thread/src/terminal.rs +++ b/crates/acp_thread/src/terminal.rs @@ -1,10 +1,15 @@ use agent_client_protocol as acp; - +use anyhow::Result; use futures::{FutureExt as _, future::Shared}; -use gpui::{App, AppContext, Context, Entity, Task}; +use gpui::{App, AppContext, AsyncApp, Context, Entity, Task}; use language::LanguageRegistry; use markdown::Markdown; +use project::Project; +use settings::{Settings as _, SettingsLocation}; use std::{path::PathBuf, process::ExitStatus, sync::Arc, time::Instant}; +use task::Shell; +use terminal::terminal_settings::TerminalSettings; +use util::get_default_system_shell_preferring_bash; pub struct Terminal { id: acp::TerminalId, @@ -170,3 +175,68 @@ impl Terminal { ) } } + +pub async fn create_terminal_entity( + command: String, + args: &[String], + env_vars: Vec<(String, String)>, + cwd: Option, + project: &Entity, + cx: &mut AsyncApp, +) -> Result> { + let mut env = if let Some(dir) = &cwd { + project + .update(cx, |project, cx| { + let worktree = project.find_worktree(dir.as_path(), cx); + let shell = TerminalSettings::get( + worktree.as_ref().map(|(worktree, path)| SettingsLocation { + worktree_id: worktree.read(cx).id(), + path: &path, + }), + cx, + ) + .shell + .clone(); + project.directory_environment(&shell, dir.clone().into(), cx) + })? + .await + .unwrap_or_default() + } else { + Default::default() + }; + + // Disables paging for `git` and hopefully other commands + env.insert("PAGER".into(), "".into()); + env.extend(env_vars); + + // Use remote shell or default system shell, as appropriate + let shell = project + .update(cx, |project, cx| { + project + .remote_client() + .and_then(|r| r.read(cx).default_system_shell()) + .map(Shell::Program) + })? + .unwrap_or_else(|| Shell::Program(get_default_system_shell_preferring_bash())); + let is_windows = project + .read_with(cx, |project, cx| project.path_style(cx).is_windows()) + .unwrap_or(cfg!(windows)); + let (task_command, task_args) = task::ShellBuilder::new(&shell, is_windows) + .redirect_stdin_to_dev_null() + .build(Some(command.clone()), &args); + + project + .update(cx, |project, cx| { + project.create_terminal_task( + task::SpawnInTerminal { + command: Some(task_command), + args: task_args, + cwd, + env, + ..Default::default() + }, + cx, + ) + })? + .await +} diff --git a/crates/acp_tools/Cargo.toml b/crates/acp_tools/Cargo.toml index 7a6d8c21a096364a8468671f4186048559ec8a61..0720c4b6685ecf7fa20d8cacd2b61baa765c961c 100644 --- a/crates/acp_tools/Cargo.toml +++ b/crates/acp_tools/Cargo.toml @@ -26,5 +26,4 @@ settings.workspace = true theme.workspace = true ui.workspace = true util.workspace = true -workspace-hack.workspace = true workspace.workspace = true diff --git a/crates/acp_tools/src/acp_tools.rs b/crates/acp_tools/src/acp_tools.rs index e20a040e9da70a40066f3e5534171818de34a936..69722815306e412745a62832115d2f010b2b8607 100644 --- a/crates/acp_tools/src/acp_tools.rs +++ b/crates/acp_tools/src/acp_tools.rs @@ -4,22 +4,26 @@ use std::{ fmt::Display, rc::{Rc, Weak}, sync::Arc, + time::Duration, }; use agent_client_protocol as acp; use collections::HashMap; use gpui::{ - App, Empty, Entity, EventEmitter, FocusHandle, Focusable, Global, ListAlignment, ListState, - StyleRefinement, Subscription, Task, TextStyleRefinement, Window, actions, list, prelude::*, + App, ClipboardItem, Empty, Entity, EventEmitter, FocusHandle, Focusable, Global, ListAlignment, + ListState, StyleRefinement, Subscription, Task, TextStyleRefinement, Window, actions, list, + prelude::*, }; use language::LanguageRegistry; use markdown::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownStyle}; use project::Project; use settings::Settings; use theme::ThemeSettings; -use ui::prelude::*; +use ui::{Tooltip, prelude::*}; use util::ResultExt as _; -use workspace::{Item, Workspace}; +use workspace::{ + Item, ItemHandle, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, +}; actions!(dev, [OpenAcpLogs]); @@ -89,8 +93,8 @@ struct WatchedConnection { messages: Vec, list_state: ListState, connection: Weak, - incoming_request_methods: HashMap>, - outgoing_request_methods: HashMap>, + incoming_request_methods: HashMap>, + outgoing_request_methods: HashMap>, _task: Task<()>, } @@ -171,7 +175,7 @@ impl AcpTools { } }; - method_map.insert(id, method.clone()); + method_map.insert(id.clone(), method.clone()); (Some(id), method.into(), MessageType::Request, Ok(params)) } acp::StreamMessageContent::Response { id, result } => { @@ -227,6 +231,34 @@ impl AcpTools { cx.notify(); } + fn serialize_observed_messages(&self) -> Option { + let connection = self.watched_connection.as_ref()?; + + let messages: Vec = connection + .messages + .iter() + .filter_map(|message| { + let params = match &message.params { + Ok(Some(params)) => params.clone(), + Ok(None) => serde_json::Value::Null, + Err(err) => serde_json::to_value(err).ok()?, + }; + Some(serde_json::json!({ + "_direction": match message.direction { + acp::StreamMessageDirection::Incoming => "incoming", + acp::StreamMessageDirection::Outgoing => "outgoing", + }, + "_type": message.message_type.to_string().to_lowercase(), + "id": message.request_id, + "method": message.name.to_string(), + "params": params, + })) + }) + .collect(); + + serde_json::to_string_pretty(&messages).ok() + } + fn render_message( &mut self, index: usize, @@ -306,6 +338,7 @@ impl AcpTools { .children( message .request_id + .as_ref() .map(|req_id| div().child(ui::Chip::new(req_id.to_string()))), ), ) @@ -357,7 +390,7 @@ impl AcpTools { struct WatchedConnectionMessage { name: SharedString, - request_id: Option, + request_id: Option, direction: acp::StreamMessageDirection, message_type: MessageType, params: Result, acp::Error>, @@ -492,3 +525,92 @@ impl Render for AcpTools { }) } } + +pub struct AcpToolsToolbarItemView { + acp_tools: Option>, + just_copied: bool, +} + +impl AcpToolsToolbarItemView { + pub fn new() -> Self { + Self { + acp_tools: None, + just_copied: false, + } + } +} + +impl Render for AcpToolsToolbarItemView { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + let Some(acp_tools) = self.acp_tools.as_ref() else { + return Empty.into_any_element(); + }; + + let acp_tools = acp_tools.clone(); + + h_flex() + .gap_2() + .child( + IconButton::new( + "copy_all_messages", + if self.just_copied { + IconName::Check + } else { + IconName::Copy + }, + ) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text(if self.just_copied { + "Copied!" + } else { + "Copy All Messages" + })) + .disabled( + acp_tools + .read(cx) + .watched_connection + .as_ref() + .is_none_or(|connection| connection.messages.is_empty()), + ) + .on_click(cx.listener(move |this, _, _window, cx| { + if let Some(content) = acp_tools.read(cx).serialize_observed_messages() { + cx.write_to_clipboard(ClipboardItem::new_string(content)); + + this.just_copied = true; + cx.spawn(async move |this, cx| { + cx.background_executor().timer(Duration::from_secs(2)).await; + this.update(cx, |this, cx| { + this.just_copied = false; + cx.notify(); + }) + }) + .detach(); + } + })), + ) + .into_any() + } +} + +impl EventEmitter for AcpToolsToolbarItemView {} + +impl ToolbarItemView for AcpToolsToolbarItemView { + fn set_active_pane_item( + &mut self, + active_pane_item: Option<&dyn ItemHandle>, + _window: &mut Window, + cx: &mut Context, + ) -> ToolbarItemLocation { + if let Some(item) = active_pane_item + && let Some(acp_tools) = item.downcast::() + { + self.acp_tools = Some(acp_tools); + cx.notify(); + return ToolbarItemLocation::PrimaryRight; + } + if self.acp_tools.take().is_some() { + cx.notify(); + } + ToolbarItemLocation::Hidden + } +} diff --git a/crates/action_log/Cargo.toml b/crates/action_log/Cargo.toml index 1a389e8859b24a320720ecfc3fa6cf2a13f274ad..a8395a943a2ce4e06d4971548c32bf765adb492d 100644 --- a/crates/action_log/Cargo.toml +++ b/crates/action_log/Cargo.toml @@ -23,7 +23,6 @@ project.workspace = true text.workspace = true util.workspace = true watch.workspace = true -workspace-hack.workspace = true [dev-dependencies] diff --git a/crates/activity_indicator/Cargo.toml b/crates/activity_indicator/Cargo.toml index 3a80f012f9fb0e5b056a7b2f8763a2019dfcdf2b..4e604b452122c5a8e38b2d02b54f4ee639817ab4 100644 --- a/crates/activity_indicator/Cargo.toml +++ b/crates/activity_indicator/Cargo.toml @@ -25,7 +25,6 @@ proto.workspace = true smallvec.workspace = true ui.workspace = true util.workspace = true -workspace-hack.workspace = true workspace.workspace = true [dev-dependencies] diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index f35b2ad17879c57b15ac8579e6b50a26110ff21d..84d1291dad6d235e8d90d21bfcaf78a7e2ec042d 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -11,8 +11,7 @@ use language::{ LanguageServerStatusUpdate, ServerHealth, }; use project::{ - EnvironmentErrorMessage, LanguageServerProgress, LspStoreEvent, Project, - ProjectEnvironmentEvent, + LanguageServerProgress, LspStoreEvent, Project, ProjectEnvironmentEvent, git_store::{GitStoreEvent, Repository}, }; use smallvec::SmallVec; @@ -20,7 +19,6 @@ use std::{ cmp::Reverse, collections::HashSet, fmt::Write, - path::Path, sync::Arc, time::{Duration, Instant}, }; @@ -328,27 +326,23 @@ impl ActivityIndicator { .flatten() } - fn pending_environment_errors<'a>( - &'a self, - cx: &'a App, - ) -> impl Iterator, &'a EnvironmentErrorMessage)> { - self.project.read(cx).shell_environment_errors(cx) + fn pending_environment_error<'a>(&'a self, cx: &'a App) -> Option<&'a String> { + self.project.read(cx).peek_environment_error(cx) } fn content_to_render(&mut self, cx: &mut Context) -> Option { // Show if any direnv calls failed - if let Some((abs_path, error)) = self.pending_environment_errors(cx).next() { - let abs_path = abs_path.clone(); + if let Some(message) = self.pending_environment_error(cx) { return Some(Content { icon: Some( Icon::new(IconName::Warning) .size(IconSize::Small) .into_any_element(), ), - message: error.0.clone(), + message: message.clone(), on_click: Some(Arc::new(move |this, window, cx| { this.project.update(cx, |project, cx| { - project.remove_environment_error(&abs_path, cx); + project.pop_environment_error(cx); }); window.dispatch_action(Box::new(workspace::OpenLog), cx); })), diff --git a/crates/agent/Cargo.toml b/crates/agent/Cargo.toml index 76f96647c7af5692ca9b4b146e27f9f7c19c7995..e0f2d9dcb97e298dd3c906e3f902974821efcdc0 100644 --- a/crates/agent/Cargo.toml +++ b/crates/agent/Cargo.toml @@ -5,75 +5,101 @@ edition.workspace = true publish.workspace = true license = "GPL-3.0-or-later" -[lints] -workspace = true - [lib] path = "src/agent.rs" -doctest = false [features] -test-support = [ - "gpui/test-support", - "language/test-support", -] +test-support = ["db/test-support"] +eval = [] +edit-agent-eval = [] +e2e = [] + +[lints] +workspace = true [dependencies] +acp_thread.workspace = true action_log.workspace = true +agent-client-protocol.workspace = true +agent_servers.workspace = true agent_settings.workspace = true anyhow.workspace = true -assistant_context.workspace = true -assistant_tool.workspace = true +assistant_text_thread.workspace = true chrono.workspace = true client.workspace = true cloud_llm_client.workspace = true collections.workspace = true -component.workspace = true context_server.workspace = true -convert_case.workspace = true +db.workspace = true +derive_more.workspace = true fs.workspace = true futures.workspace = true git.workspace = true gpui.workspace = true -heed.workspace = true +handlebars = { workspace = true, features = ["rust-embed"] } +html_to_markdown.workspace = true http_client.workspace = true -icons.workspace = true indoc.workspace = true itertools.workspace = true language.workspace = true language_model.workspace = true +language_models.workspace = true log.workspace = true +open.workspace = true +parking_lot.workspace = true paths.workspace = true -postage.workspace = true project.workspace = true prompt_store.workspace = true -ref-cast.workspace = true -rope.workspace = true +regex.workspace = true +rust-embed.workspace = true schemars.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true +smallvec.workspace = true smol.workspace = true sqlez.workspace = true +streaming_diff.workspace = true +strsim.workspace = true +task.workspace = true telemetry.workspace = true +terminal.workspace = true text.workspace = true -theme.workspace = true thiserror.workspace = true -time.workspace = true +ui.workspace = true util.workspace = true uuid.workspace = true -workspace-hack.workspace = true +watch.workspace = true +web_search.workspace = true zed_env_vars.workspace = true zstd.workspace = true [dev-dependencies] -assistant_tools.workspace = true +agent_servers = { workspace = true, "features" = ["test-support"] } +assistant_text_thread = { workspace = true, "features" = ["test-support"] } +client = { workspace = true, "features" = ["test-support"] } +clock = { workspace = true, "features" = ["test-support"] } +context_server = { workspace = true, "features" = ["test-support"] } +ctor.workspace = true +db = { workspace = true, "features" = ["test-support"] } +editor = { workspace = true, "features" = ["test-support"] } +env_logger.workspace = true +fs = { workspace = true, "features" = ["test-support"] } +git = { workspace = true, "features" = ["test-support"] } gpui = { workspace = true, "features" = ["test-support"] } -indoc.workspace = true +gpui_tokio.workspace = true language = { workspace = true, "features" = ["test-support"] } language_model = { workspace = true, "features" = ["test-support"] } -parking_lot.workspace = true +lsp = { workspace = true, "features" = ["test-support"] } pretty_assertions.workspace = true -project = { workspace = true, features = ["test-support"] } -workspace = { workspace = true, features = ["test-support"] } +project = { workspace = true, "features" = ["test-support"] } rand.workspace = true +reqwest_client.workspace = true +settings = { workspace = true, "features" = ["test-support"] } +tempfile.workspace = true +terminal = { workspace = true, "features" = ["test-support"] } +theme = { workspace = true, "features" = ["test-support"] } +tree-sitter-rust.workspace = true +unindent = { workspace = true } +worktree = { workspace = true, "features" = ["test-support"] } +zlog.workspace = true diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index 9cd2a93d9bfc9a8a1940fea150f651b60f1a1073..63ee0adf191cbe309229c57b950d11ca7a3680e3 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -1,22 +1,1635 @@ -pub mod agent_profile; -pub mod context; -pub mod context_server_tool; -pub mod context_store; -pub mod history_store; -pub mod thread; -pub mod thread_store; -pub mod tool_use; - -pub use context::{AgentContext, ContextId, ContextLoadResult}; -pub use context_store::ContextStore; +mod db; +mod edit_agent; +mod history_store; +mod legacy_thread; +mod native_agent_server; +pub mod outline; +mod templates; +mod thread; +mod tool_schema; +mod tools; + +#[cfg(test)] +mod tests; + +pub use db::*; +pub use history_store::*; +pub use native_agent_server::NativeAgentServer; +pub use templates::*; +pub use thread::*; +pub use tools::*; + +use acp_thread::{AcpThread, AgentModelSelector}; +use agent_client_protocol as acp; +use anyhow::{Context as _, Result, anyhow}; +use chrono::{DateTime, Utc}; +use collections::{HashSet, IndexMap}; use fs::Fs; -use std::sync::Arc; -pub use thread::{ - LastRestoreCheckpoint, Message, MessageCrease, MessageId, MessageSegment, Thread, ThreadError, - ThreadEvent, ThreadFeedback, ThreadId, ThreadSummary, TokenUsageRatio, +use futures::channel::{mpsc, oneshot}; +use futures::future::Shared; +use futures::{StreamExt, future}; +use gpui::{ + App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity, +}; +use language_model::{LanguageModel, LanguageModelProvider, LanguageModelRegistry}; +use project::{Project, ProjectItem, ProjectPath, Worktree}; +use prompt_store::{ + ProjectContext, PromptStore, RulesFileContext, UserRulesContext, WorktreeContext, }; -pub use thread_store::{SerializedThread, TextThreadStore, ThreadStore}; +use serde::{Deserialize, Serialize}; +use settings::{LanguageModelSelection, update_settings_file}; +use std::any::Any; +use std::collections::HashMap; +use std::path::{Path, PathBuf}; +use std::rc::Rc; +use std::sync::Arc; +use util::ResultExt; +use util::rel_path::RelPath; + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct ProjectSnapshot { + pub worktree_snapshots: Vec, + pub timestamp: DateTime, +} + +const RULES_FILE_NAMES: [&str; 9] = [ + ".rules", + ".cursorrules", + ".windsurfrules", + ".clinerules", + ".github/copilot-instructions.md", + "CLAUDE.md", + "AGENT.md", + "AGENTS.md", + "GEMINI.md", +]; + +pub struct RulesLoadingError { + pub message: SharedString, +} + +/// Holds both the internal Thread and the AcpThread for a session +struct Session { + /// The internal thread that processes messages + thread: Entity, + /// The ACP thread that handles protocol communication + acp_thread: WeakEntity, + pending_save: Task<()>, + _subscriptions: Vec, +} + +pub struct LanguageModels { + /// Access language model by ID + models: HashMap>, + /// Cached list for returning language model information + model_list: acp_thread::AgentModelList, + refresh_models_rx: watch::Receiver<()>, + refresh_models_tx: watch::Sender<()>, + _authenticate_all_providers_task: Task<()>, +} + +impl LanguageModels { + fn new(cx: &mut App) -> Self { + let (refresh_models_tx, refresh_models_rx) = watch::channel(()); + + let mut this = Self { + models: HashMap::default(), + model_list: acp_thread::AgentModelList::Grouped(IndexMap::default()), + refresh_models_rx, + refresh_models_tx, + _authenticate_all_providers_task: Self::authenticate_all_language_model_providers(cx), + }; + this.refresh_list(cx); + this + } + + fn refresh_list(&mut self, cx: &App) { + let providers = LanguageModelRegistry::global(cx) + .read(cx) + .providers() + .into_iter() + .filter(|provider| provider.is_authenticated(cx)) + .collect::>(); + + let mut language_model_list = IndexMap::default(); + let mut recommended_models = HashSet::default(); + + let mut recommended = Vec::new(); + for provider in &providers { + for model in provider.recommended_models(cx) { + recommended_models.insert((model.provider_id(), model.id())); + recommended.push(Self::map_language_model_to_info(&model, provider)); + } + } + if !recommended.is_empty() { + language_model_list.insert( + acp_thread::AgentModelGroupName("Recommended".into()), + recommended, + ); + } + + let mut models = HashMap::default(); + for provider in providers { + let mut provider_models = Vec::new(); + for model in provider.provided_models(cx) { + let model_info = Self::map_language_model_to_info(&model, &provider); + let model_id = model_info.id.clone(); + if !recommended_models.contains(&(model.provider_id(), model.id())) { + provider_models.push(model_info); + } + models.insert(model_id, model); + } + if !provider_models.is_empty() { + language_model_list.insert( + acp_thread::AgentModelGroupName(provider.name().0.clone()), + provider_models, + ); + } + } + + self.models = models; + self.model_list = acp_thread::AgentModelList::Grouped(language_model_list); + self.refresh_models_tx.send(()).ok(); + } + + fn watch(&self) -> watch::Receiver<()> { + self.refresh_models_rx.clone() + } + + pub fn model_from_id(&self, model_id: &acp::ModelId) -> Option> { + self.models.get(model_id).cloned() + } + + fn map_language_model_to_info( + model: &Arc, + provider: &Arc, + ) -> acp_thread::AgentModelInfo { + acp_thread::AgentModelInfo { + id: Self::model_id(model), + name: model.name().0, + description: None, + icon: Some(provider.icon()), + } + } + + fn model_id(model: &Arc) -> acp::ModelId { + acp::ModelId(format!("{}/{}", model.provider_id().0, model.id().0).into()) + } + + fn authenticate_all_language_model_providers(cx: &mut App) -> Task<()> { + let authenticate_all_providers = LanguageModelRegistry::global(cx) + .read(cx) + .providers() + .iter() + .map(|provider| (provider.id(), provider.name(), provider.authenticate(cx))) + .collect::>(); + + cx.background_spawn(async move { + for (provider_id, provider_name, authenticate_task) in authenticate_all_providers { + if let Err(err) = authenticate_task.await { + match err { + language_model::AuthenticateError::CredentialsNotFound => { + // Since we're authenticating these providers in the + // background for the purposes of populating the + // language selector, we don't care about providers + // where the credentials are not found. + } + language_model::AuthenticateError::ConnectionRefused => { + // Not logging connection refused errors as they are mostly from LM Studio's noisy auth failures. + // LM Studio only has one auth method (endpoint call) which fails for users who haven't enabled it. + // TODO: Better manage LM Studio auth logic to avoid these noisy failures. + } + _ => { + // Some providers have noisy failure states that we + // don't want to spam the logs with every time the + // language model selector is initialized. + // + // Ideally these should have more clear failure modes + // that we know are safe to ignore here, like what we do + // with `CredentialsNotFound` above. + match provider_id.0.as_ref() { + "lmstudio" | "ollama" => { + // LM Studio and Ollama both make fetch requests to the local APIs to determine if they are "authenticated". + // + // These fail noisily, so we don't log them. + } + "copilot_chat" => { + // Copilot Chat returns an error if Copilot is not enabled, so we don't log those errors. + } + _ => { + log::error!( + "Failed to authenticate provider: {}: {err}", + provider_name.0 + ); + } + } + } + } + } + } + }) + } +} + +pub struct NativeAgent { + /// Session ID -> Session mapping + sessions: HashMap, + history: Entity, + /// Shared project context for all threads + project_context: Entity, + project_context_needs_refresh: watch::Sender<()>, + _maintain_project_context: Task>, + context_server_registry: Entity, + /// Shared templates for all threads + templates: Arc, + /// Cached model information + models: LanguageModels, + project: Entity, + prompt_store: Option>, + fs: Arc, + _subscriptions: Vec, +} + +impl NativeAgent { + pub async fn new( + project: Entity, + history: Entity, + templates: Arc, + prompt_store: Option>, + fs: Arc, + cx: &mut AsyncApp, + ) -> Result> { + log::debug!("Creating new NativeAgent"); + + let project_context = cx + .update(|cx| Self::build_project_context(&project, prompt_store.as_ref(), cx))? + .await; + + cx.new(|cx| { + let mut subscriptions = vec![ + cx.subscribe(&project, Self::handle_project_event), + cx.subscribe( + &LanguageModelRegistry::global(cx), + Self::handle_models_updated_event, + ), + ]; + if let Some(prompt_store) = prompt_store.as_ref() { + subscriptions.push(cx.subscribe(prompt_store, Self::handle_prompts_updated_event)) + } + + let (project_context_needs_refresh_tx, project_context_needs_refresh_rx) = + watch::channel(()); + Self { + sessions: HashMap::new(), + history, + project_context: cx.new(|_| project_context), + project_context_needs_refresh: project_context_needs_refresh_tx, + _maintain_project_context: cx.spawn(async move |this, cx| { + Self::maintain_project_context(this, project_context_needs_refresh_rx, cx).await + }), + context_server_registry: cx.new(|cx| { + ContextServerRegistry::new(project.read(cx).context_server_store(), cx) + }), + templates, + models: LanguageModels::new(cx), + project, + prompt_store, + fs, + _subscriptions: subscriptions, + } + }) + } + + fn register_session( + &mut self, + thread_handle: Entity, + cx: &mut Context, + ) -> Entity { + let connection = Rc::new(NativeAgentConnection(cx.entity())); + + let thread = thread_handle.read(cx); + let session_id = thread.id().clone(); + let title = thread.title(); + let project = thread.project.clone(); + let action_log = thread.action_log.clone(); + let prompt_capabilities_rx = thread.prompt_capabilities_rx.clone(); + let acp_thread = cx.new(|cx| { + acp_thread::AcpThread::new( + title, + connection, + project.clone(), + action_log.clone(), + session_id.clone(), + prompt_capabilities_rx, + cx, + ) + }); + + let registry = LanguageModelRegistry::read_global(cx); + let summarization_model = registry.thread_summary_model().map(|c| c.model); + + thread_handle.update(cx, |thread, cx| { + thread.set_summarization_model(summarization_model, cx); + thread.add_default_tools( + Rc::new(AcpThreadEnvironment { + acp_thread: acp_thread.downgrade(), + }) as _, + cx, + ) + }); + + let subscriptions = vec![ + cx.observe_release(&acp_thread, |this, acp_thread, _cx| { + this.sessions.remove(acp_thread.session_id()); + }), + cx.subscribe(&thread_handle, Self::handle_thread_title_updated), + cx.subscribe(&thread_handle, Self::handle_thread_token_usage_updated), + cx.observe(&thread_handle, move |this, thread, cx| { + this.save_thread(thread, cx) + }), + ]; + + self.sessions.insert( + session_id, + Session { + thread: thread_handle, + acp_thread: acp_thread.downgrade(), + _subscriptions: subscriptions, + pending_save: Task::ready(()), + }, + ); + acp_thread + } + + pub fn models(&self) -> &LanguageModels { + &self.models + } + + async fn maintain_project_context( + this: WeakEntity, + mut needs_refresh: watch::Receiver<()>, + cx: &mut AsyncApp, + ) -> Result<()> { + while needs_refresh.changed().await.is_ok() { + let project_context = this + .update(cx, |this, cx| { + Self::build_project_context(&this.project, this.prompt_store.as_ref(), cx) + })? + .await; + this.update(cx, |this, cx| { + this.project_context = cx.new(|_| project_context); + })?; + } + + Ok(()) + } + + fn build_project_context( + project: &Entity, + prompt_store: Option<&Entity>, + cx: &mut App, + ) -> Task { + let worktrees = project.read(cx).visible_worktrees(cx).collect::>(); + let worktree_tasks = worktrees + .into_iter() + .map(|worktree| { + Self::load_worktree_info_for_system_prompt(worktree, project.clone(), cx) + }) + .collect::>(); + let default_user_rules_task = if let Some(prompt_store) = prompt_store.as_ref() { + prompt_store.read_with(cx, |prompt_store, cx| { + let prompts = prompt_store.default_prompt_metadata(); + let load_tasks = prompts.into_iter().map(|prompt_metadata| { + let contents = prompt_store.load(prompt_metadata.id, cx); + async move { (contents.await, prompt_metadata) } + }); + cx.background_spawn(future::join_all(load_tasks)) + }) + } else { + Task::ready(vec![]) + }; + + cx.spawn(async move |_cx| { + let (worktrees, default_user_rules) = + future::join(future::join_all(worktree_tasks), default_user_rules_task).await; + + let worktrees = worktrees + .into_iter() + .map(|(worktree, _rules_error)| { + // TODO: show error message + // if let Some(rules_error) = rules_error { + // this.update(cx, |_, cx| cx.emit(rules_error)).ok(); + // } + worktree + }) + .collect::>(); + + let default_user_rules = default_user_rules + .into_iter() + .flat_map(|(contents, prompt_metadata)| match contents { + Ok(contents) => Some(UserRulesContext { + uuid: match prompt_metadata.id { + prompt_store::PromptId::User { uuid } => uuid, + prompt_store::PromptId::EditWorkflow => return None, + }, + title: prompt_metadata.title.map(|title| title.to_string()), + contents, + }), + Err(_err) => { + // TODO: show error message + // this.update(cx, |_, cx| { + // cx.emit(RulesLoadingError { + // message: format!("{err:?}").into(), + // }); + // }) + // .ok(); + None + } + }) + .collect::>(); + + ProjectContext::new(worktrees, default_user_rules) + }) + } + + fn load_worktree_info_for_system_prompt( + worktree: Entity, + project: Entity, + cx: &mut App, + ) -> Task<(WorktreeContext, Option)> { + let tree = worktree.read(cx); + let root_name = tree.root_name_str().into(); + let abs_path = tree.abs_path(); + + let mut context = WorktreeContext { + root_name, + abs_path, + rules_file: None, + }; + + let rules_task = Self::load_worktree_rules_file(worktree, project, cx); + let Some(rules_task) = rules_task else { + return Task::ready((context, None)); + }; + + cx.spawn(async move |_| { + let (rules_file, rules_file_error) = match rules_task.await { + Ok(rules_file) => (Some(rules_file), None), + Err(err) => ( + None, + Some(RulesLoadingError { + message: format!("{err}").into(), + }), + ), + }; + context.rules_file = rules_file; + (context, rules_file_error) + }) + } + + fn load_worktree_rules_file( + worktree: Entity, + project: Entity, + cx: &mut App, + ) -> Option>> { + let worktree = worktree.read(cx); + let worktree_id = worktree.id(); + let selected_rules_file = RULES_FILE_NAMES + .into_iter() + .filter_map(|name| { + worktree + .entry_for_path(RelPath::unix(name).unwrap()) + .filter(|entry| entry.is_file()) + .map(|entry| entry.path.clone()) + }) + .next(); + + // Note that Cline supports `.clinerules` being a directory, but that is not currently + // supported. This doesn't seem to occur often in GitHub repositories. + selected_rules_file.map(|path_in_worktree| { + let project_path = ProjectPath { + worktree_id, + path: path_in_worktree.clone(), + }; + let buffer_task = + project.update(cx, |project, cx| project.open_buffer(project_path, cx)); + let rope_task = cx.spawn(async move |cx| { + buffer_task.await?.read_with(cx, |buffer, cx| { + let project_entry_id = buffer.entry_id(cx).context("buffer has no file")?; + anyhow::Ok((project_entry_id, buffer.as_rope().clone())) + })? + }); + // Build a string from the rope on a background thread. + cx.background_spawn(async move { + let (project_entry_id, rope) = rope_task.await?; + anyhow::Ok(RulesFileContext { + path_in_worktree, + text: rope.to_string().trim().to_string(), + project_entry_id: project_entry_id.to_usize(), + }) + }) + }) + } + + fn handle_thread_title_updated( + &mut self, + thread: Entity, + _: &TitleUpdated, + cx: &mut Context, + ) { + let session_id = thread.read(cx).id(); + let Some(session) = self.sessions.get(session_id) else { + return; + }; + let thread = thread.downgrade(); + let acp_thread = session.acp_thread.clone(); + cx.spawn(async move |_, cx| { + let title = thread.read_with(cx, |thread, _| thread.title())?; + let task = acp_thread.update(cx, |acp_thread, cx| acp_thread.set_title(title, cx))?; + task.await + }) + .detach_and_log_err(cx); + } + + fn handle_thread_token_usage_updated( + &mut self, + thread: Entity, + usage: &TokenUsageUpdated, + cx: &mut Context, + ) { + let Some(session) = self.sessions.get(thread.read(cx).id()) else { + return; + }; + session + .acp_thread + .update(cx, |acp_thread, cx| { + acp_thread.update_token_usage(usage.0.clone(), cx); + }) + .ok(); + } + + fn handle_project_event( + &mut self, + _project: Entity, + event: &project::Event, + _cx: &mut Context, + ) { + match event { + project::Event::WorktreeAdded(_) | project::Event::WorktreeRemoved(_) => { + self.project_context_needs_refresh.send(()).ok(); + } + project::Event::WorktreeUpdatedEntries(_, items) => { + if items.iter().any(|(path, _, _)| { + RULES_FILE_NAMES + .iter() + .any(|name| path.as_ref() == RelPath::unix(name).unwrap()) + }) { + self.project_context_needs_refresh.send(()).ok(); + } + } + _ => {} + } + } + + fn handle_prompts_updated_event( + &mut self, + _prompt_store: Entity, + _event: &prompt_store::PromptsUpdatedEvent, + _cx: &mut Context, + ) { + self.project_context_needs_refresh.send(()).ok(); + } + + fn handle_models_updated_event( + &mut self, + _registry: Entity, + _event: &language_model::Event, + cx: &mut Context, + ) { + self.models.refresh_list(cx); + + let registry = LanguageModelRegistry::read_global(cx); + let default_model = registry.default_model().map(|m| m.model); + let summarization_model = registry.thread_summary_model().map(|m| m.model); + + for session in self.sessions.values_mut() { + session.thread.update(cx, |thread, cx| { + if thread.model().is_none() + && let Some(model) = default_model.clone() + { + thread.set_model(model, cx); + cx.notify(); + } + thread.set_summarization_model(summarization_model.clone(), cx); + }); + } + } + + pub fn load_thread( + &mut self, + id: acp::SessionId, + cx: &mut Context, + ) -> Task>> { + let database_future = ThreadsDatabase::connect(cx); + cx.spawn(async move |this, cx| { + let database = database_future.await.map_err(|err| anyhow!(err))?; + let db_thread = database + .load_thread(id.clone()) + .await? + .with_context(|| format!("no thread found with ID: {id:?}"))?; + + this.update(cx, |this, cx| { + let summarization_model = LanguageModelRegistry::read_global(cx) + .thread_summary_model() + .map(|c| c.model); + + cx.new(|cx| { + let mut thread = Thread::from_db( + id.clone(), + db_thread, + this.project.clone(), + this.project_context.clone(), + this.context_server_registry.clone(), + this.templates.clone(), + cx, + ); + thread.set_summarization_model(summarization_model, cx); + thread + }) + }) + }) + } + + pub fn open_thread( + &mut self, + id: acp::SessionId, + cx: &mut Context, + ) -> Task>> { + let task = self.load_thread(id, cx); + cx.spawn(async move |this, cx| { + let thread = task.await?; + let acp_thread = + this.update(cx, |this, cx| this.register_session(thread.clone(), cx))?; + let events = thread.update(cx, |thread, cx| thread.replay(cx))?; + cx.update(|cx| { + NativeAgentConnection::handle_thread_events(events, acp_thread.downgrade(), cx) + })? + .await?; + Ok(acp_thread) + }) + } + + pub fn thread_summary( + &mut self, + id: acp::SessionId, + cx: &mut Context, + ) -> Task> { + let thread = self.open_thread(id.clone(), cx); + cx.spawn(async move |this, cx| { + let acp_thread = thread.await?; + let result = this + .update(cx, |this, cx| { + this.sessions + .get(&id) + .unwrap() + .thread + .update(cx, |thread, cx| thread.summary(cx)) + })? + .await + .context("Failed to generate summary")?; + drop(acp_thread); + Ok(result) + }) + } + + fn save_thread(&mut self, thread: Entity, cx: &mut Context) { + if thread.read(cx).is_empty() { + return; + } + + let database_future = ThreadsDatabase::connect(cx); + let (id, db_thread) = + thread.update(cx, |thread, cx| (thread.id().clone(), thread.to_db(cx))); + let Some(session) = self.sessions.get_mut(&id) else { + return; + }; + let history = self.history.clone(); + session.pending_save = cx.spawn(async move |_, cx| { + let Some(database) = database_future.await.map_err(|err| anyhow!(err)).log_err() else { + return; + }; + let db_thread = db_thread.await; + database.save_thread(id, db_thread).await.log_err(); + history.update(cx, |history, cx| history.reload(cx)).ok(); + }); + } +} + +/// Wrapper struct that implements the AgentConnection trait +#[derive(Clone)] +pub struct NativeAgentConnection(pub Entity); + +impl NativeAgentConnection { + pub fn thread(&self, session_id: &acp::SessionId, cx: &App) -> Option> { + self.0 + .read(cx) + .sessions + .get(session_id) + .map(|session| session.thread.clone()) + } + + pub fn load_thread(&self, id: acp::SessionId, cx: &mut App) -> Task>> { + self.0.update(cx, |this, cx| this.load_thread(id, cx)) + } + + fn run_turn( + &self, + session_id: acp::SessionId, + cx: &mut App, + f: impl 'static + + FnOnce(Entity, &mut App) -> Result>>, + ) -> Task> { + let Some((thread, acp_thread)) = self.0.update(cx, |agent, _cx| { + agent + .sessions + .get_mut(&session_id) + .map(|s| (s.thread.clone(), s.acp_thread.clone())) + }) else { + return Task::ready(Err(anyhow!("Session not found"))); + }; + log::debug!("Found session for: {}", session_id); + + let response_stream = match f(thread, cx) { + Ok(stream) => stream, + Err(err) => return Task::ready(Err(err)), + }; + Self::handle_thread_events(response_stream, acp_thread, cx) + } + + fn handle_thread_events( + mut events: mpsc::UnboundedReceiver>, + acp_thread: WeakEntity, + cx: &App, + ) -> Task> { + cx.spawn(async move |cx| { + // Handle response stream and forward to session.acp_thread + while let Some(result) = events.next().await { + match result { + Ok(event) => { + log::trace!("Received completion event: {:?}", event); + + match event { + ThreadEvent::UserMessage(message) => { + acp_thread.update(cx, |thread, cx| { + for content in message.content { + thread.push_user_content_block( + Some(message.id.clone()), + content.into(), + cx, + ); + } + })?; + } + ThreadEvent::AgentText(text) => { + acp_thread.update(cx, |thread, cx| { + thread.push_assistant_content_block( + acp::ContentBlock::Text(acp::TextContent { + text, + annotations: None, + meta: None, + }), + false, + cx, + ) + })?; + } + ThreadEvent::AgentThinking(text) => { + acp_thread.update(cx, |thread, cx| { + thread.push_assistant_content_block( + acp::ContentBlock::Text(acp::TextContent { + text, + annotations: None, + meta: None, + }), + true, + cx, + ) + })?; + } + ThreadEvent::ToolCallAuthorization(ToolCallAuthorization { + tool_call, + options, + response, + }) => { + let outcome_task = acp_thread.update(cx, |thread, cx| { + thread.request_tool_call_authorization( + tool_call, options, true, cx, + ) + })??; + cx.background_spawn(async move { + if let acp::RequestPermissionOutcome::Selected { option_id } = + outcome_task.await + { + response + .send(option_id) + .map(|_| anyhow!("authorization receiver was dropped")) + .log_err(); + } + }) + .detach(); + } + ThreadEvent::ToolCall(tool_call) => { + acp_thread.update(cx, |thread, cx| { + thread.upsert_tool_call(tool_call, cx) + })??; + } + ThreadEvent::ToolCallUpdate(update) => { + acp_thread.update(cx, |thread, cx| { + thread.update_tool_call(update, cx) + })??; + } + ThreadEvent::Retry(status) => { + acp_thread.update(cx, |thread, cx| { + thread.update_retry_status(status, cx) + })?; + } + ThreadEvent::Stop(stop_reason) => { + log::debug!("Assistant message complete: {:?}", stop_reason); + return Ok(acp::PromptResponse { + stop_reason, + meta: None, + }); + } + } + } + Err(e) => { + log::error!("Error in model response stream: {:?}", e); + return Err(e); + } + } + } + + log::debug!("Response stream completed"); + anyhow::Ok(acp::PromptResponse { + stop_reason: acp::StopReason::EndTurn, + meta: None, + }) + }) + } +} + +struct NativeAgentModelSelector { + session_id: acp::SessionId, + connection: NativeAgentConnection, +} + +impl acp_thread::AgentModelSelector for NativeAgentModelSelector { + fn list_models(&self, cx: &mut App) -> Task> { + log::debug!("NativeAgentConnection::list_models called"); + let list = self.connection.0.read(cx).models.model_list.clone(); + Task::ready(if list.is_empty() { + Err(anyhow::anyhow!("No models available")) + } else { + Ok(list) + }) + } + + fn select_model(&self, model_id: acp::ModelId, cx: &mut App) -> Task> { + log::debug!( + "Setting model for session {}: {}", + self.session_id, + model_id + ); + let Some(thread) = self + .connection + .0 + .read(cx) + .sessions + .get(&self.session_id) + .map(|session| session.thread.clone()) + else { + return Task::ready(Err(anyhow!("Session not found"))); + }; + + let Some(model) = self.connection.0.read(cx).models.model_from_id(&model_id) else { + return Task::ready(Err(anyhow!("Invalid model ID {}", model_id))); + }; + + thread.update(cx, |thread, cx| { + thread.set_model(model.clone(), cx); + }); + + update_settings_file( + self.connection.0.read(cx).fs.clone(), + cx, + move |settings, _cx| { + let provider = model.provider_id().0.to_string(); + let model = model.id().0.to_string(); + settings + .agent + .get_or_insert_default() + .set_model(LanguageModelSelection { + provider: provider.into(), + model, + }); + }, + ); + + Task::ready(Ok(())) + } + + fn selected_model(&self, cx: &mut App) -> Task> { + let Some(thread) = self + .connection + .0 + .read(cx) + .sessions + .get(&self.session_id) + .map(|session| session.thread.clone()) + else { + return Task::ready(Err(anyhow!("Session not found"))); + }; + let Some(model) = thread.read(cx).model() else { + return Task::ready(Err(anyhow!("Model not found"))); + }; + let Some(provider) = LanguageModelRegistry::read_global(cx).provider(&model.provider_id()) + else { + return Task::ready(Err(anyhow!("Provider not found"))); + }; + Task::ready(Ok(LanguageModels::map_language_model_to_info( + model, &provider, + ))) + } + + fn watch(&self, cx: &mut App) -> Option> { + Some(self.connection.0.read(cx).models.watch()) + } +} + +impl acp_thread::AgentConnection for NativeAgentConnection { + fn new_thread( + self: Rc, + project: Entity, + cwd: &Path, + cx: &mut App, + ) -> Task>> { + let agent = self.0.clone(); + log::debug!("Creating new thread for project at: {:?}", cwd); + + cx.spawn(async move |cx| { + log::debug!("Starting thread creation in async context"); + + // Create Thread + let thread = agent.update( + cx, + |agent, cx: &mut gpui::Context| -> Result<_> { + // Fetch default model from registry settings + let registry = LanguageModelRegistry::read_global(cx); + // Log available models for debugging + let available_count = registry.available_models(cx).count(); + log::debug!("Total available models: {}", available_count); + + let default_model = registry.default_model().and_then(|default_model| { + agent + .models + .model_from_id(&LanguageModels::model_id(&default_model.model)) + }); + Ok(cx.new(|cx| { + Thread::new( + project.clone(), + agent.project_context.clone(), + agent.context_server_registry.clone(), + agent.templates.clone(), + default_model, + cx, + ) + })) + }, + )??; + agent.update(cx, |agent, cx| agent.register_session(thread, cx)) + }) + } + + fn auth_methods(&self) -> &[acp::AuthMethod] { + &[] // No auth for in-process + } + + fn authenticate(&self, _method: acp::AuthMethodId, _cx: &mut App) -> Task> { + Task::ready(Ok(())) + } + + fn model_selector(&self, session_id: &acp::SessionId) -> Option> { + Some(Rc::new(NativeAgentModelSelector { + session_id: session_id.clone(), + connection: self.clone(), + }) as Rc) + } + + fn prompt( + &self, + id: Option, + params: acp::PromptRequest, + cx: &mut App, + ) -> Task> { + let id = id.expect("UserMessageId is required"); + let session_id = params.session_id.clone(); + log::info!("Received prompt request for session: {}", session_id); + log::debug!("Prompt blocks count: {}", params.prompt.len()); + + self.run_turn(session_id, cx, |thread, cx| { + let content: Vec = params + .prompt + .into_iter() + .map(Into::into) + .collect::>(); + log::debug!("Converted prompt to message: {} chars", content.len()); + log::debug!("Message id: {:?}", id); + log::debug!("Message content: {:?}", content); + + thread.update(cx, |thread, cx| thread.send(id, content, cx)) + }) + } + + fn resume( + &self, + session_id: &acp::SessionId, + _cx: &App, + ) -> Option> { + Some(Rc::new(NativeAgentSessionResume { + connection: self.clone(), + session_id: session_id.clone(), + }) as _) + } + + fn cancel(&self, session_id: &acp::SessionId, cx: &mut App) { + log::info!("Cancelling on session: {}", session_id); + self.0.update(cx, |agent, cx| { + if let Some(agent) = agent.sessions.get(session_id) { + agent.thread.update(cx, |thread, cx| thread.cancel(cx)); + } + }); + } + + fn truncate( + &self, + session_id: &agent_client_protocol::SessionId, + cx: &App, + ) -> Option> { + self.0.read_with(cx, |agent, _cx| { + agent.sessions.get(session_id).map(|session| { + Rc::new(NativeAgentSessionTruncate { + thread: session.thread.clone(), + acp_thread: session.acp_thread.clone(), + }) as _ + }) + }) + } + + fn set_title( + &self, + session_id: &acp::SessionId, + _cx: &App, + ) -> Option> { + Some(Rc::new(NativeAgentSessionSetTitle { + connection: self.clone(), + session_id: session_id.clone(), + }) as _) + } + + fn telemetry(&self) -> Option> { + Some(Rc::new(self.clone()) as Rc) + } + + fn into_any(self: Rc) -> Rc { + self + } +} + +impl acp_thread::AgentTelemetry for NativeAgentConnection { + fn agent_name(&self) -> String { + "Zed".into() + } + + fn thread_data( + &self, + session_id: &acp::SessionId, + cx: &mut App, + ) -> Task> { + let Some(session) = self.0.read(cx).sessions.get(session_id) else { + return Task::ready(Err(anyhow!("Session not found"))); + }; + + let task = session.thread.read(cx).to_db(cx); + cx.background_spawn(async move { + serde_json::to_value(task.await).context("Failed to serialize thread") + }) + } +} + +struct NativeAgentSessionTruncate { + thread: Entity, + acp_thread: WeakEntity, +} + +impl acp_thread::AgentSessionTruncate for NativeAgentSessionTruncate { + fn run(&self, message_id: acp_thread::UserMessageId, cx: &mut App) -> Task> { + match self.thread.update(cx, |thread, cx| { + thread.truncate(message_id.clone(), cx)?; + Ok(thread.latest_token_usage()) + }) { + Ok(usage) => { + self.acp_thread + .update(cx, |thread, cx| { + thread.update_token_usage(usage, cx); + }) + .ok(); + Task::ready(Ok(())) + } + Err(error) => Task::ready(Err(error)), + } + } +} + +struct NativeAgentSessionResume { + connection: NativeAgentConnection, + session_id: acp::SessionId, +} + +impl acp_thread::AgentSessionResume for NativeAgentSessionResume { + fn run(&self, cx: &mut App) -> Task> { + self.connection + .run_turn(self.session_id.clone(), cx, |thread, cx| { + thread.update(cx, |thread, cx| thread.resume(cx)) + }) + } +} + +struct NativeAgentSessionSetTitle { + connection: NativeAgentConnection, + session_id: acp::SessionId, +} + +impl acp_thread::AgentSessionSetTitle for NativeAgentSessionSetTitle { + fn run(&self, title: SharedString, cx: &mut App) -> Task> { + let Some(session) = self.connection.0.read(cx).sessions.get(&self.session_id) else { + return Task::ready(Err(anyhow!("session not found"))); + }; + let thread = session.thread.clone(); + thread.update(cx, |thread, cx| thread.set_title(title, cx)); + Task::ready(Ok(())) + } +} + +pub struct AcpThreadEnvironment { + acp_thread: WeakEntity, +} + +impl ThreadEnvironment for AcpThreadEnvironment { + fn create_terminal( + &self, + command: String, + cwd: Option, + output_byte_limit: Option, + cx: &mut AsyncApp, + ) -> Task>> { + let task = self.acp_thread.update(cx, |thread, cx| { + thread.create_terminal(command, vec![], vec![], cwd, output_byte_limit, cx) + }); + + let acp_thread = self.acp_thread.clone(); + cx.spawn(async move |cx| { + let terminal = task?.await?; + + let (drop_tx, drop_rx) = oneshot::channel(); + let terminal_id = terminal.read_with(cx, |terminal, _cx| terminal.id().clone())?; + + cx.spawn(async move |cx| { + drop_rx.await.ok(); + acp_thread.update(cx, |thread, cx| thread.release_terminal(terminal_id, cx)) + }) + .detach(); + + let handle = AcpTerminalHandle { + terminal, + _drop_tx: Some(drop_tx), + }; + + Ok(Rc::new(handle) as _) + }) + } +} + +pub struct AcpTerminalHandle { + terminal: Entity, + _drop_tx: Option>, +} + +impl TerminalHandle for AcpTerminalHandle { + fn id(&self, cx: &AsyncApp) -> Result { + self.terminal.read_with(cx, |term, _cx| term.id().clone()) + } + + fn wait_for_exit(&self, cx: &AsyncApp) -> Result>> { + self.terminal + .read_with(cx, |term, _cx| term.wait_for_exit()) + } + + fn current_output(&self, cx: &AsyncApp) -> Result { + self.terminal + .read_with(cx, |term, cx| term.current_output(cx)) + } +} + +#[cfg(test)] +mod internal_tests { + use crate::HistoryEntryId; + + use super::*; + use acp_thread::{AgentConnection, AgentModelGroupName, AgentModelInfo, MentionUri}; + use fs::FakeFs; + use gpui::TestAppContext; + use indoc::formatdoc; + use language_model::fake_provider::FakeLanguageModel; + use serde_json::json; + use settings::SettingsStore; + use util::{path, rel_path::rel_path}; + + #[gpui::test] + async fn test_maintaining_project_context(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/", + json!({ + "a": {} + }), + ) + .await; + let project = Project::test(fs.clone(), [], cx).await; + let text_thread_store = + cx.new(|cx| assistant_text_thread::TextThreadStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx)); + let agent = NativeAgent::new( + project.clone(), + history_store, + Templates::new(), + None, + fs.clone(), + &mut cx.to_async(), + ) + .await + .unwrap(); + agent.read_with(cx, |agent, cx| { + assert_eq!(agent.project_context.read(cx).worktrees, vec![]) + }); + + let worktree = project + .update(cx, |project, cx| project.create_worktree("/a", true, cx)) + .await + .unwrap(); + cx.run_until_parked(); + agent.read_with(cx, |agent, cx| { + assert_eq!( + agent.project_context.read(cx).worktrees, + vec![WorktreeContext { + root_name: "a".into(), + abs_path: Path::new("/a").into(), + rules_file: None + }] + ) + }); + + // Creating `/a/.rules` updates the project context. + fs.insert_file("/a/.rules", Vec::new()).await; + cx.run_until_parked(); + agent.read_with(cx, |agent, cx| { + let rules_entry = worktree + .read(cx) + .entry_for_path(rel_path(".rules")) + .unwrap(); + assert_eq!( + agent.project_context.read(cx).worktrees, + vec![WorktreeContext { + root_name: "a".into(), + abs_path: Path::new("/a").into(), + rules_file: Some(RulesFileContext { + path_in_worktree: rel_path(".rules").into(), + text: "".into(), + project_entry_id: rules_entry.id.to_usize() + }) + }] + ) + }); + } + + #[gpui::test] + async fn test_listing_models(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/", json!({ "a": {} })).await; + let project = Project::test(fs.clone(), [], cx).await; + let text_thread_store = + cx.new(|cx| assistant_text_thread::TextThreadStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx)); + let connection = NativeAgentConnection( + NativeAgent::new( + project.clone(), + history_store, + Templates::new(), + None, + fs.clone(), + &mut cx.to_async(), + ) + .await + .unwrap(), + ); + + // Create a thread/session + let acp_thread = cx + .update(|cx| { + Rc::new(connection.clone()).new_thread(project.clone(), Path::new("/a"), cx) + }) + .await + .unwrap(); + + let session_id = cx.update(|cx| acp_thread.read(cx).session_id().clone()); + + let models = cx + .update(|cx| { + connection + .model_selector(&session_id) + .unwrap() + .list_models(cx) + }) + .await + .unwrap(); + + let acp_thread::AgentModelList::Grouped(models) = models else { + panic!("Unexpected model group"); + }; + assert_eq!( + models, + IndexMap::from_iter([( + AgentModelGroupName("Fake".into()), + vec![AgentModelInfo { + id: acp::ModelId("fake/fake".into()), + name: "Fake".into(), + description: None, + icon: Some(ui::IconName::ZedAssistant), + }] + )]) + ); + } + + #[gpui::test] + async fn test_model_selection_persists_to_settings(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.create_dir(paths::settings_file().parent().unwrap()) + .await + .unwrap(); + fs.insert_file( + paths::settings_file(), + json!({ + "agent": { + "default_model": { + "provider": "foo", + "model": "bar" + } + } + }) + .to_string() + .into_bytes(), + ) + .await; + let project = Project::test(fs.clone(), [], cx).await; + + let text_thread_store = + cx.new(|cx| assistant_text_thread::TextThreadStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx)); + + // Create the agent and connection + let agent = NativeAgent::new( + project.clone(), + history_store, + Templates::new(), + None, + fs.clone(), + &mut cx.to_async(), + ) + .await + .unwrap(); + let connection = NativeAgentConnection(agent.clone()); + + // Create a thread/session + let acp_thread = cx + .update(|cx| { + Rc::new(connection.clone()).new_thread(project.clone(), Path::new("/a"), cx) + }) + .await + .unwrap(); + + let session_id = cx.update(|cx| acp_thread.read(cx).session_id().clone()); + + // Select a model + let selector = connection.model_selector(&session_id).unwrap(); + let model_id = acp::ModelId("fake/fake".into()); + cx.update(|cx| selector.select_model(model_id.clone(), cx)) + .await + .unwrap(); + + // Verify the thread has the selected model + agent.read_with(cx, |agent, _| { + let session = agent.sessions.get(&session_id).unwrap(); + session.thread.read_with(cx, |thread, _| { + assert_eq!(thread.model().unwrap().id().0, "fake"); + }); + }); + + cx.run_until_parked(); + + // Verify settings file was updated + let settings_content = fs.load(paths::settings_file()).await.unwrap(); + let settings_json: serde_json::Value = serde_json::from_str(&settings_content).unwrap(); + + // Check that the agent settings contain the selected model + assert_eq!( + settings_json["agent"]["default_model"]["model"], + json!("fake") + ); + assert_eq!( + settings_json["agent"]["default_model"]["provider"], + json!("fake") + ); + } + + #[gpui::test] + async fn test_save_load_thread(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/", + json!({ + "a": { + "b.md": "Lorem" + } + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; + let text_thread_store = + cx.new(|cx| assistant_text_thread::TextThreadStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx)); + let agent = NativeAgent::new( + project.clone(), + history_store.clone(), + Templates::new(), + None, + fs.clone(), + &mut cx.to_async(), + ) + .await + .unwrap(); + let connection = Rc::new(NativeAgentConnection(agent.clone())); + + let acp_thread = cx + .update(|cx| { + connection + .clone() + .new_thread(project.clone(), Path::new(""), cx) + }) + .await + .unwrap(); + let session_id = acp_thread.read_with(cx, |thread, _| thread.session_id().clone()); + let thread = agent.read_with(cx, |agent, _| { + agent.sessions.get(&session_id).unwrap().thread.clone() + }); + + // Ensure empty threads are not saved, even if they get mutated. + let model = Arc::new(FakeLanguageModel::default()); + let summary_model = Arc::new(FakeLanguageModel::default()); + thread.update(cx, |thread, cx| { + thread.set_model(model.clone(), cx); + thread.set_summarization_model(Some(summary_model.clone()), cx); + }); + cx.run_until_parked(); + assert_eq!(history_entries(&history_store, cx), vec![]); + + let send = acp_thread.update(cx, |thread, cx| { + thread.send( + vec![ + "What does ".into(), + acp::ContentBlock::ResourceLink(acp::ResourceLink { + name: "b.md".into(), + uri: MentionUri::File { + abs_path: path!("/a/b.md").into(), + } + .to_uri() + .to_string(), + annotations: None, + description: None, + mime_type: None, + size: None, + title: None, + meta: None, + }), + " mean?".into(), + ], + cx, + ) + }); + let send = cx.foreground_executor().spawn(send); + cx.run_until_parked(); + + model.send_last_completion_stream_text_chunk("Lorem."); + model.end_last_completion_stream(); + cx.run_until_parked(); + summary_model + .send_last_completion_stream_text_chunk(&format!("Explaining {}", path!("/a/b.md"))); + summary_model.end_last_completion_stream(); + + send.await.unwrap(); + let uri = MentionUri::File { + abs_path: path!("/a/b.md").into(), + } + .to_uri(); + acp_thread.read_with(cx, |thread, cx| { + assert_eq!( + thread.to_markdown(cx), + formatdoc! {" + ## User + + What does [@b.md]({uri}) mean? + + ## Assistant + + Lorem. + + "} + ) + }); + + cx.run_until_parked(); + + // Drop the ACP thread, which should cause the session to be dropped as well. + cx.update(|_| { + drop(thread); + drop(acp_thread); + }); + agent.read_with(cx, |agent, _| { + assert_eq!(agent.sessions.keys().cloned().collect::>(), []); + }); + + // Ensure the thread can be reloaded from disk. + assert_eq!( + history_entries(&history_store, cx), + vec![( + HistoryEntryId::AcpThread(session_id.clone()), + format!("Explaining {}", path!("/a/b.md")) + )] + ); + let acp_thread = agent + .update(cx, |agent, cx| agent.open_thread(session_id.clone(), cx)) + .await + .unwrap(); + acp_thread.read_with(cx, |thread, cx| { + assert_eq!( + thread.to_markdown(cx), + formatdoc! {" + ## User + + What does [@b.md]({uri}) mean? + + ## Assistant + + Lorem. + + "} + ) + }); + } + + fn history_entries( + history: &Entity, + cx: &mut TestAppContext, + ) -> Vec<(HistoryEntryId, String)> { + history.read_with(cx, |history, _| { + history + .entries() + .map(|e| (e.id(), e.title().to_string())) + .collect::>() + }) + } -pub fn init(fs: Arc, cx: &mut gpui::App) { - thread_store::init(fs, cx); + fn init_test(cx: &mut TestAppContext) { + env_logger::try_init().ok(); + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + Project::init_settings(cx); + agent_settings::init(cx); + language::init(cx); + LanguageModelRegistry::test(cx); + }); + } } diff --git a/crates/agent/src/agent_profile.rs b/crates/agent/src/agent_profile.rs deleted file mode 100644 index 40ba2f07db7ad425a5d0e9befe91499eb746b74e..0000000000000000000000000000000000000000 --- a/crates/agent/src/agent_profile.rs +++ /dev/null @@ -1,341 +0,0 @@ -use std::sync::Arc; - -use agent_settings::{AgentProfileId, AgentProfileSettings, AgentSettings}; -use assistant_tool::{Tool, ToolSource, ToolWorkingSet, UniqueToolName}; -use collections::IndexMap; -use convert_case::{Case, Casing}; -use fs::Fs; -use gpui::{App, Entity, SharedString}; -use settings::{Settings, update_settings_file}; -use util::ResultExt; - -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct AgentProfile { - id: AgentProfileId, - tool_set: Entity, -} - -pub type AvailableProfiles = IndexMap; - -impl AgentProfile { - pub fn new(id: AgentProfileId, tool_set: Entity) -> Self { - Self { id, tool_set } - } - - /// Saves a new profile to the settings. - pub fn create( - name: String, - base_profile_id: Option, - fs: Arc, - cx: &App, - ) -> AgentProfileId { - let id = AgentProfileId(name.to_case(Case::Kebab).into()); - - let base_profile = - base_profile_id.and_then(|id| AgentSettings::get_global(cx).profiles.get(&id).cloned()); - - let profile_settings = AgentProfileSettings { - name: name.into(), - tools: base_profile - .as_ref() - .map(|profile| profile.tools.clone()) - .unwrap_or_default(), - enable_all_context_servers: base_profile - .as_ref() - .map(|profile| profile.enable_all_context_servers) - .unwrap_or_default(), - context_servers: base_profile - .map(|profile| profile.context_servers) - .unwrap_or_default(), - }; - - update_settings_file(fs, cx, { - let id = id.clone(); - move |settings, _cx| { - profile_settings.save_to_settings(id, settings).log_err(); - } - }); - - id - } - - /// Returns a map of AgentProfileIds to their names - pub fn available_profiles(cx: &App) -> AvailableProfiles { - let mut profiles = AvailableProfiles::default(); - for (id, profile) in AgentSettings::get_global(cx).profiles.iter() { - profiles.insert(id.clone(), profile.name.clone()); - } - profiles - } - - pub fn id(&self) -> &AgentProfileId { - &self.id - } - - pub fn enabled_tools(&self, cx: &App) -> Vec<(UniqueToolName, Arc)> { - let Some(settings) = AgentSettings::get_global(cx).profiles.get(&self.id) else { - return Vec::new(); - }; - - self.tool_set - .read(cx) - .tools(cx) - .into_iter() - .filter(|(_, tool)| Self::is_enabled(settings, tool.source(), tool.name())) - .collect() - } - - pub fn is_tool_enabled(&self, source: ToolSource, tool_name: String, cx: &App) -> bool { - let Some(settings) = AgentSettings::get_global(cx).profiles.get(&self.id) else { - return false; - }; - - Self::is_enabled(settings, source, tool_name) - } - - fn is_enabled(settings: &AgentProfileSettings, source: ToolSource, name: String) -> bool { - match source { - ToolSource::Native => *settings.tools.get(name.as_str()).unwrap_or(&false), - ToolSource::ContextServer { id } => settings - .context_servers - .get(id.as_ref()) - .and_then(|preset| preset.tools.get(name.as_str()).copied()) - .unwrap_or(settings.enable_all_context_servers), - } - } -} - -#[cfg(test)] -mod tests { - use agent_settings::ContextServerPreset; - use assistant_tool::ToolRegistry; - use collections::IndexMap; - use gpui::SharedString; - use gpui::{AppContext, TestAppContext}; - use http_client::FakeHttpClient; - use project::Project; - use settings::{Settings, SettingsStore}; - - use super::*; - - #[gpui::test] - async fn test_enabled_built_in_tools_for_profile(cx: &mut TestAppContext) { - init_test_settings(cx); - - let id = AgentProfileId::default(); - let profile_settings = cx.read(|cx| { - AgentSettings::get_global(cx) - .profiles - .get(&id) - .unwrap() - .clone() - }); - let tool_set = default_tool_set(cx); - - let profile = AgentProfile::new(id, tool_set); - - let mut enabled_tools = cx - .read(|cx| profile.enabled_tools(cx)) - .into_iter() - .map(|(_, tool)| tool.name()) - .collect::>(); - enabled_tools.sort(); - - let mut expected_tools = profile_settings - .tools - .into_iter() - .filter_map(|(tool, enabled)| enabled.then_some(tool.to_string())) - // Provider dependent - .filter(|tool| tool != "web_search") - .collect::>(); - // Plus all registered MCP tools - expected_tools.extend(["enabled_mcp_tool".into(), "disabled_mcp_tool".into()]); - expected_tools.sort(); - - assert_eq!(enabled_tools, expected_tools); - } - - #[gpui::test] - async fn test_custom_mcp_settings(cx: &mut TestAppContext) { - init_test_settings(cx); - - let id = AgentProfileId("custom_mcp".into()); - let profile_settings = cx.read(|cx| { - AgentSettings::get_global(cx) - .profiles - .get(&id) - .unwrap() - .clone() - }); - let tool_set = default_tool_set(cx); - - let profile = AgentProfile::new(id, tool_set); - - let mut enabled_tools = cx - .read(|cx| profile.enabled_tools(cx)) - .into_iter() - .map(|(_, tool)| tool.name()) - .collect::>(); - enabled_tools.sort(); - - let mut expected_tools = profile_settings.context_servers["mcp"] - .tools - .iter() - .filter_map(|(key, enabled)| enabled.then(|| key.to_string())) - .collect::>(); - expected_tools.sort(); - - assert_eq!(enabled_tools, expected_tools); - } - - #[gpui::test] - async fn test_only_built_in(cx: &mut TestAppContext) { - init_test_settings(cx); - - let id = AgentProfileId("write_minus_mcp".into()); - let profile_settings = cx.read(|cx| { - AgentSettings::get_global(cx) - .profiles - .get(&id) - .unwrap() - .clone() - }); - let tool_set = default_tool_set(cx); - - let profile = AgentProfile::new(id, tool_set); - - let mut enabled_tools = cx - .read(|cx| profile.enabled_tools(cx)) - .into_iter() - .map(|(_, tool)| tool.name()) - .collect::>(); - enabled_tools.sort(); - - let mut expected_tools = profile_settings - .tools - .into_iter() - .filter_map(|(tool, enabled)| enabled.then_some(tool.to_string())) - // Provider dependent - .filter(|tool| tool != "web_search") - .collect::>(); - expected_tools.sort(); - - assert_eq!(enabled_tools, expected_tools); - } - - fn init_test_settings(cx: &mut TestAppContext) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - Project::init_settings(cx); - AgentSettings::register(cx); - language_model::init_settings(cx); - ToolRegistry::default_global(cx); - assistant_tools::init(FakeHttpClient::with_404_response(), cx); - }); - - cx.update(|cx| { - let mut agent_settings = AgentSettings::get_global(cx).clone(); - agent_settings.profiles.insert( - AgentProfileId("write_minus_mcp".into()), - AgentProfileSettings { - name: "write_minus_mcp".into(), - enable_all_context_servers: false, - ..agent_settings.profiles[&AgentProfileId::default()].clone() - }, - ); - agent_settings.profiles.insert( - AgentProfileId("custom_mcp".into()), - AgentProfileSettings { - name: "mcp".into(), - tools: IndexMap::default(), - enable_all_context_servers: false, - context_servers: IndexMap::from_iter([("mcp".into(), context_server_preset())]), - }, - ); - AgentSettings::override_global(agent_settings, cx); - }) - } - - fn context_server_preset() -> ContextServerPreset { - ContextServerPreset { - tools: IndexMap::from_iter([ - ("enabled_mcp_tool".into(), true), - ("disabled_mcp_tool".into(), false), - ]), - } - } - - fn default_tool_set(cx: &mut TestAppContext) -> Entity { - cx.new(|cx| { - let mut tool_set = ToolWorkingSet::default(); - tool_set.insert(Arc::new(FakeTool::new("enabled_mcp_tool", "mcp")), cx); - tool_set.insert(Arc::new(FakeTool::new("disabled_mcp_tool", "mcp")), cx); - tool_set - }) - } - - struct FakeTool { - name: String, - source: SharedString, - } - - impl FakeTool { - fn new(name: impl Into, source: impl Into) -> Self { - Self { - name: name.into(), - source: source.into(), - } - } - } - - impl Tool for FakeTool { - fn name(&self) -> String { - self.name.clone() - } - - fn source(&self) -> ToolSource { - ToolSource::ContextServer { - id: self.source.clone(), - } - } - - fn description(&self) -> String { - unimplemented!() - } - - fn icon(&self) -> icons::IconName { - unimplemented!() - } - - fn needs_confirmation( - &self, - _input: &serde_json::Value, - _project: &Entity, - _cx: &App, - ) -> bool { - unimplemented!() - } - - fn ui_text(&self, _input: &serde_json::Value) -> String { - unimplemented!() - } - - fn run( - self: Arc, - _input: serde_json::Value, - _request: Arc, - _project: Entity, - _action_log: Entity, - _model: Arc, - _window: Option, - _cx: &mut App, - ) -> assistant_tool::ToolResult { - unimplemented!() - } - - fn may_perform_edits(&self) -> bool { - unimplemented!() - } - } -} diff --git a/crates/agent/src/context_server_tool.rs b/crates/agent/src/context_server_tool.rs deleted file mode 100644 index 696c569356bca36adf54bc84ec52fa7295048b75..0000000000000000000000000000000000000000 --- a/crates/agent/src/context_server_tool.rs +++ /dev/null @@ -1,140 +0,0 @@ -use std::sync::Arc; - -use action_log::ActionLog; -use anyhow::{Result, anyhow, bail}; -use assistant_tool::{Tool, ToolResult, ToolSource}; -use context_server::{ContextServerId, types}; -use gpui::{AnyWindowHandle, App, Entity, Task}; -use icons::IconName; -use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; -use project::{Project, context_server_store::ContextServerStore}; - -pub struct ContextServerTool { - store: Entity, - server_id: ContextServerId, - tool: types::Tool, -} - -impl ContextServerTool { - pub fn new( - store: Entity, - server_id: ContextServerId, - tool: types::Tool, - ) -> Self { - Self { - store, - server_id, - tool, - } - } -} - -impl Tool for ContextServerTool { - fn name(&self) -> String { - self.tool.name.clone() - } - - fn description(&self) -> String { - self.tool.description.clone().unwrap_or_default() - } - - fn icon(&self) -> IconName { - IconName::ToolHammer - } - - fn source(&self) -> ToolSource { - ToolSource::ContextServer { - id: self.server_id.clone().0.into(), - } - } - - fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity, _: &App) -> bool { - true - } - - fn may_perform_edits(&self) -> bool { - true - } - - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { - let mut schema = self.tool.input_schema.clone(); - assistant_tool::adapt_schema_to_format(&mut schema, format)?; - Ok(match schema { - serde_json::Value::Null => { - serde_json::json!({ "type": "object", "properties": [] }) - } - serde_json::Value::Object(map) if map.is_empty() => { - serde_json::json!({ "type": "object", "properties": [] }) - } - _ => schema, - }) - } - - fn ui_text(&self, _input: &serde_json::Value) -> String { - format!("Run MCP tool `{}`", self.tool.name) - } - - fn run( - self: Arc, - input: serde_json::Value, - _request: Arc, - _project: Entity, - _action_log: Entity, - _model: Arc, - _window: Option, - cx: &mut App, - ) -> ToolResult { - if let Some(server) = self.store.read(cx).get_running_server(&self.server_id) { - let tool_name = self.tool.name.clone(); - - cx.spawn(async move |_cx| { - let Some(protocol) = server.client() else { - bail!("Context server not initialized"); - }; - - let arguments = if let serde_json::Value::Object(map) = input { - Some(map.into_iter().collect()) - } else { - None - }; - - log::trace!( - "Running tool: {} with arguments: {:?}", - tool_name, - arguments - ); - let response = protocol - .request::( - context_server::types::CallToolParams { - name: tool_name, - arguments, - meta: None, - }, - ) - .await?; - - let mut result = String::new(); - for content in response.content { - match content { - types::ToolResponseContent::Text { text } => { - result.push_str(&text); - } - types::ToolResponseContent::Image { .. } => { - log::warn!("Ignoring image content from tool response"); - } - types::ToolResponseContent::Audio { .. } => { - log::warn!("Ignoring audio content from tool response"); - } - types::ToolResponseContent::Resource { .. } => { - log::warn!("Ignoring resource content from tool response"); - } - } - } - Ok(result.into()) - }) - .into() - } else { - Task::ready(Err(anyhow!("Context server not found"))).into() - } - } -} diff --git a/crates/agent2/src/db.rs b/crates/agent/src/db.rs similarity index 78% rename from crates/agent2/src/db.rs rename to crates/agent/src/db.rs index 563ccdd7ca5b2c2cc63a8c7f30c59b9443f8a0bd..c72e20571e2761788157a5fd10df147c2b414e4a 100644 --- a/crates/agent2/src/db.rs +++ b/crates/agent/src/db.rs @@ -1,6 +1,5 @@ use crate::{AgentMessage, AgentMessageContent, UserMessage, UserMessageContent}; use acp_thread::UserMessageId; -use agent::{thread::DetailedSummaryState, thread_store}; use agent_client_protocol as acp; use agent_settings::{AgentProfileId, CompletionMode}; use anyhow::{Result, anyhow}; @@ -21,8 +20,8 @@ use ui::{App, SharedString}; use zed_env_vars::ZED_STATELESS; pub type DbMessage = crate::Message; -pub type DbSummary = DetailedSummaryState; -pub type DbLanguageModel = thread_store::SerializedLanguageModel; +pub type DbSummary = crate::legacy_thread::DetailedSummaryState; +pub type DbLanguageModel = crate::legacy_thread::SerializedLanguageModel; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct DbThreadMetadata { @@ -40,7 +39,7 @@ pub struct DbThread { #[serde(default)] pub detailed_summary: Option, #[serde(default)] - pub initial_project_snapshot: Option>, + pub initial_project_snapshot: Option>, #[serde(default)] pub cumulative_token_usage: language_model::TokenUsage, #[serde(default)] @@ -61,13 +60,17 @@ impl DbThread { match saved_thread_json.get("version") { Some(serde_json::Value::String(version)) => match version.as_str() { Self::VERSION => Ok(serde_json::from_value(saved_thread_json)?), - _ => Self::upgrade_from_agent_1(agent::SerializedThread::from_json(json)?), + _ => Self::upgrade_from_agent_1(crate::legacy_thread::SerializedThread::from_json( + json, + )?), }, - _ => Self::upgrade_from_agent_1(agent::SerializedThread::from_json(json)?), + _ => { + Self::upgrade_from_agent_1(crate::legacy_thread::SerializedThread::from_json(json)?) + } } } - fn upgrade_from_agent_1(thread: agent::SerializedThread) -> Result { + fn upgrade_from_agent_1(thread: crate::legacy_thread::SerializedThread) -> Result { let mut messages = Vec::new(); let mut request_token_usage = HashMap::default(); @@ -80,14 +83,19 @@ impl DbThread { // Convert segments to content for segment in msg.segments { match segment { - thread_store::SerializedMessageSegment::Text { text } => { + crate::legacy_thread::SerializedMessageSegment::Text { text } => { content.push(UserMessageContent::Text(text)); } - thread_store::SerializedMessageSegment::Thinking { text, .. } => { + crate::legacy_thread::SerializedMessageSegment::Thinking { + text, + .. + } => { // User messages don't have thinking segments, but handle gracefully content.push(UserMessageContent::Text(text)); } - thread_store::SerializedMessageSegment::RedactedThinking { .. } => { + crate::legacy_thread::SerializedMessageSegment::RedactedThinking { + .. + } => { // User messages don't have redacted thinking, skip. } } @@ -113,16 +121,18 @@ impl DbThread { // Convert segments to content for segment in msg.segments { match segment { - thread_store::SerializedMessageSegment::Text { text } => { + crate::legacy_thread::SerializedMessageSegment::Text { text } => { content.push(AgentMessageContent::Text(text)); } - thread_store::SerializedMessageSegment::Thinking { + crate::legacy_thread::SerializedMessageSegment::Thinking { text, signature, } => { content.push(AgentMessageContent::Thinking { text, signature }); } - thread_store::SerializedMessageSegment::RedactedThinking { data } => { + crate::legacy_thread::SerializedMessageSegment::RedactedThinking { + data, + } => { content.push(AgentMessageContent::RedactedThinking(data)); } } @@ -187,10 +197,9 @@ impl DbThread { messages, updated_at: thread.updated_at, detailed_summary: match thread.detailed_summary_state { - DetailedSummaryState::NotGenerated | DetailedSummaryState::Generating { .. } => { - None - } - DetailedSummaryState::Generated { text, .. } => Some(text), + crate::legacy_thread::DetailedSummaryState::NotGenerated + | crate::legacy_thread::DetailedSummaryState::Generating => None, + crate::legacy_thread::DetailedSummaryState::Generated { text, .. } => Some(text), }, initial_project_snapshot: thread.initial_project_snapshot, cumulative_token_usage: thread.cumulative_token_usage, @@ -414,84 +423,3 @@ impl ThreadsDatabase { }) } } - -#[cfg(test)] -mod tests { - - use super::*; - use agent::MessageSegment; - use agent::context::LoadedContext; - use client::Client; - use fs::{FakeFs, Fs}; - use gpui::AppContext; - use gpui::TestAppContext; - use http_client::FakeHttpClient; - use language_model::Role; - use project::Project; - use settings::SettingsStore; - - fn init_test(fs: Arc, cx: &mut TestAppContext) { - env_logger::try_init().ok(); - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - Project::init_settings(cx); - language::init(cx); - - let http_client = FakeHttpClient::with_404_response(); - let clock = Arc::new(clock::FakeSystemClock::new()); - let client = Client::new(clock, http_client, cx); - agent::init(fs, cx); - agent_settings::init(cx); - language_model::init(client, cx); - }); - } - - #[gpui::test] - async fn test_retrieving_old_thread(cx: &mut TestAppContext) { - let fs = FakeFs::new(cx.executor()); - init_test(fs.clone(), cx); - let project = Project::test(fs, [], cx).await; - - // Save a thread using the old agent. - let thread_store = cx.new(|cx| agent::ThreadStore::fake(project, cx)); - let thread = thread_store.update(cx, |thread_store, cx| thread_store.create_thread(cx)); - thread.update(cx, |thread, cx| { - thread.insert_message( - Role::User, - vec![MessageSegment::Text("Hey!".into())], - LoadedContext::default(), - vec![], - false, - cx, - ); - thread.insert_message( - Role::Assistant, - vec![MessageSegment::Text("How're you doing?".into())], - LoadedContext::default(), - vec![], - false, - cx, - ) - }); - thread_store - .update(cx, |thread_store, cx| thread_store.save_thread(&thread, cx)) - .await - .unwrap(); - - // Open that same thread using the new agent. - let db = cx.update(ThreadsDatabase::connect).await.unwrap(); - let threads = db.list_threads().await.unwrap(); - assert_eq!(threads.len(), 1); - let thread = db - .load_thread(threads[0].id.clone()) - .await - .unwrap() - .unwrap(); - assert_eq!(thread.messages[0].to_markdown(), "## User\n\nHey!\n"); - assert_eq!( - thread.messages[1].to_markdown(), - "## Assistant\n\nHow're you doing?\n" - ); - } -} diff --git a/crates/assistant_tools/src/edit_agent.rs b/crates/agent/src/edit_agent.rs similarity index 100% rename from crates/assistant_tools/src/edit_agent.rs rename to crates/agent/src/edit_agent.rs diff --git a/crates/assistant_tools/src/edit_agent/create_file_parser.rs b/crates/agent/src/edit_agent/create_file_parser.rs similarity index 100% rename from crates/assistant_tools/src/edit_agent/create_file_parser.rs rename to crates/agent/src/edit_agent/create_file_parser.rs diff --git a/crates/assistant_tools/src/edit_agent/edit_parser.rs b/crates/agent/src/edit_agent/edit_parser.rs similarity index 100% rename from crates/assistant_tools/src/edit_agent/edit_parser.rs rename to crates/agent/src/edit_agent/edit_parser.rs diff --git a/crates/assistant_tools/src/edit_agent/evals.rs b/crates/agent/src/edit_agent/evals.rs similarity index 96% rename from crates/assistant_tools/src/edit_agent/evals.rs rename to crates/agent/src/edit_agent/evals.rs index 515e22d5f8b184a875cd91038d7bfa0a7d8127a7..48977df1974cc104bc10fdf8975ed09172a1a938 100644 --- a/crates/assistant_tools/src/edit_agent/evals.rs +++ b/crates/agent/src/edit_agent/evals.rs @@ -1,12 +1,8 @@ use super::*; use crate::{ - ReadFileToolInput, - edit_file_tool::{EditFileMode, EditFileToolInput}, - grep_tool::GrepToolInput, - list_directory_tool::ListDirectoryToolInput, + EditFileMode, EditFileToolInput, GrepToolInput, ListDirectoryToolInput, ReadFileToolInput, }; use Role::*; -use assistant_tool::ToolRegistry; use client::{Client, UserStore}; use collections::HashMap; use fs::FakeFs; @@ -15,11 +11,11 @@ use gpui::{AppContext, TestAppContext, Timer}; use http_client::StatusCode; use indoc::{formatdoc, indoc}; use language_model::{ - LanguageModelRegistry, LanguageModelRequestTool, LanguageModelToolResult, - LanguageModelToolResultContent, LanguageModelToolUse, LanguageModelToolUseId, SelectedModel, + LanguageModelRegistry, LanguageModelToolResult, LanguageModelToolResultContent, + LanguageModelToolUse, LanguageModelToolUseId, SelectedModel, }; use project::Project; -use prompt_store::{ModelContext, ProjectContext, PromptBuilder, WorktreeContext}; +use prompt_store::{ProjectContext, WorktreeContext}; use rand::prelude::*; use reqwest_client::ReqwestClient; use serde_json::json; @@ -35,7 +31,7 @@ use std::{ use util::path; #[test] -#[cfg_attr(not(feature = "eval"), ignore)] +#[cfg_attr(not(feature = "edit-agent-eval"), ignore)] fn eval_extract_handle_command_output() { // Test how well agent generates multiple edit hunks. // @@ -112,7 +108,7 @@ fn eval_extract_handle_command_output() { } #[test] -#[cfg_attr(not(feature = "eval"), ignore)] +#[cfg_attr(not(feature = "edit-agent-eval"), ignore)] fn eval_delete_run_git_blame() { // Model | Pass rate // ----------------------------|---------- @@ -121,6 +117,7 @@ fn eval_delete_run_git_blame() { // gemini-2.5-pro-06-05 | 1.0 (2025-06-16) // gemini-2.5-flash | // gpt-4.1 | + let input_file_path = "root/blame.rs"; let input_file_content = include_str!("evals/fixtures/delete_run_git_blame/before.rs"); let output_file_content = include_str!("evals/fixtures/delete_run_git_blame/after.rs"); @@ -174,7 +171,7 @@ fn eval_delete_run_git_blame() { } #[test] -#[cfg_attr(not(feature = "eval"), ignore)] +#[cfg_attr(not(feature = "edit-agent-eval"), ignore)] fn eval_translate_doc_comments() { // Model | Pass rate // ============================================ @@ -184,6 +181,7 @@ fn eval_translate_doc_comments() { // gemini-2.5-pro-preview-03-25 | 1.0 (2025-05-22) // gemini-2.5-flash-preview-04-17 | // gpt-4.1 | + let input_file_path = "root/canvas.rs"; let input_file_content = include_str!("evals/fixtures/translate_doc_comments/before.rs"); let edit_description = "Translate all doc comments to Italian"; @@ -236,7 +234,7 @@ fn eval_translate_doc_comments() { } #[test] -#[cfg_attr(not(feature = "eval"), ignore)] +#[cfg_attr(not(feature = "edit-agent-eval"), ignore)] fn eval_use_wasi_sdk_in_compile_parser_to_wasm() { // Model | Pass rate // ============================================ @@ -246,6 +244,7 @@ fn eval_use_wasi_sdk_in_compile_parser_to_wasm() { // gemini-2.5-pro-preview-latest | 0.99 (2025-06-16) // gemini-2.5-flash-preview-04-17 | // gpt-4.1 | + let input_file_path = "root/lib.rs"; let input_file_content = include_str!("evals/fixtures/use_wasi_sdk_in_compile_parser_to_wasm/before.rs"); @@ -361,7 +360,7 @@ fn eval_use_wasi_sdk_in_compile_parser_to_wasm() { } #[test] -#[cfg_attr(not(feature = "eval"), ignore)] +#[cfg_attr(not(feature = "edit-agent-eval"), ignore)] fn eval_disable_cursor_blinking() { // Model | Pass rate // ============================================ @@ -371,6 +370,7 @@ fn eval_disable_cursor_blinking() { // gemini-2.5-pro | 0.95 (2025-07-14) // gemini-2.5-flash-preview-04-17 | 0.78 (2025-07-14) // gpt-4.1 | 0.00 (2025-07-14) (follows edit_description too literally) + let input_file_path = "root/editor.rs"; let input_file_content = include_str!("evals/fixtures/disable_cursor_blinking/before.rs"); let edit_description = "Comment out the call to `BlinkManager::enable`"; @@ -446,7 +446,7 @@ fn eval_disable_cursor_blinking() { } #[test] -#[cfg_attr(not(feature = "eval"), ignore)] +#[cfg_attr(not(feature = "edit-agent-eval"), ignore)] fn eval_from_pixels_constructor() { // Results for 2025-06-13 // @@ -463,6 +463,7 @@ fn eval_from_pixels_constructor() { // claude-3.7-sonnet | 2025-06-14 | 0.88 // gemini-2.5-pro-preview-06-05 | 2025-06-16 | 0.98 // gpt-4.1 | + let input_file_path = "root/canvas.rs"; let input_file_content = include_str!("evals/fixtures/from_pixels_constructor/before.rs"); let edit_description = "Implement from_pixels constructor and add tests."; @@ -655,7 +656,7 @@ fn eval_from_pixels_constructor() { } #[test] -#[cfg_attr(not(feature = "eval"), ignore)] +#[cfg_attr(not(feature = "edit-agent-eval"), ignore)] fn eval_zode() { // Model | Pass rate // ============================================ @@ -665,6 +666,7 @@ fn eval_zode() { // gemini-2.5-pro-preview-03-25 | 1.0 (2025-05-22) // gemini-2.5-flash-preview-04-17 | 1.0 (2025-05-22) // gpt-4.1 | 1.0 (2025-05-22) + let input_file_path = "root/zode.py"; let input_content = None; let edit_description = "Create the main Zode CLI script"; @@ -761,7 +763,7 @@ fn eval_zode() { } #[test] -#[cfg_attr(not(feature = "eval"), ignore)] +#[cfg_attr(not(feature = "edit-agent-eval"), ignore)] fn eval_add_overwrite_test() { // Model | Pass rate // ============================================ @@ -771,6 +773,7 @@ fn eval_add_overwrite_test() { // gemini-2.5-pro-preview-03-25 | 0.35 (2025-05-22) // gemini-2.5-flash-preview-04-17 | // gpt-4.1 | + let input_file_path = "root/action_log.rs"; let input_file_content = include_str!("evals/fixtures/add_overwrite_test/before.rs"); let edit_description = "Add a new test for overwriting a file in action_log.rs"; @@ -992,7 +995,7 @@ fn eval_add_overwrite_test() { } #[test] -#[cfg_attr(not(feature = "eval"), ignore)] +#[cfg_attr(not(feature = "edit-agent-eval"), ignore)] fn eval_create_empty_file() { // Check that Edit Agent can create a file without writing its // thoughts into it. This issue is not specific to empty files, but @@ -1010,7 +1013,7 @@ fn eval_create_empty_file() { // // TODO: gpt-4.1-mini errored 38 times: // "data did not match any variant of untagged enum ResponseStreamResult" - // + let input_file_content = None; let expected_output_content = String::new(); eval( @@ -1475,24 +1478,32 @@ impl EditAgentTest { language::init(cx); language_model::init(client.clone(), cx); language_models::init(user_store, client.clone(), cx); - crate::init(client.http_client(), cx); }); fs.insert_tree("/root", json!({})).await; let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let agent_model = SelectedModel::from_str( - &std::env::var("ZED_AGENT_MODEL") - .unwrap_or("anthropic/claude-3-7-sonnet-latest".into()), + &std::env::var("ZED_AGENT_MODEL").unwrap_or("anthropic/claude-sonnet-4-latest".into()), ) .unwrap(); let judge_model = SelectedModel::from_str( - &std::env::var("ZED_JUDGE_MODEL") - .unwrap_or("anthropic/claude-3-7-sonnet-latest".into()), + &std::env::var("ZED_JUDGE_MODEL").unwrap_or("anthropic/claude-sonnet-4-latest".into()), ) .unwrap(); + + let authenticate_provider_tasks = cx.update(|cx| { + LanguageModelRegistry::global(cx).update(cx, |registry, cx| { + registry + .providers() + .iter() + .map(|p| p.authenticate(cx)) + .collect::>() + }) + }); let (agent_model, judge_model) = cx .update(|cx| { cx.spawn(async move |cx| { + futures::future::join_all(authenticate_provider_tasks).await; let agent_model = Self::load_model(&agent_model, cx).await; let judge_model = Self::load_model(&judge_model, cx).await; (agent_model.unwrap(), judge_model.unwrap()) @@ -1536,7 +1547,7 @@ impl EditAgentTest { model.provider_id() == selected_model.provider && model.id() == selected_model.model }) - .expect("Model not found"); + .unwrap_or_else(|| panic!("Model {} not found", selected_model.model.0)); model }) } @@ -1553,39 +1564,27 @@ impl EditAgentTest { .update(cx, |project, cx| project.open_buffer(path, cx)) .await .unwrap(); - let tools = cx.update(|cx| { - ToolRegistry::default_global(cx) - .tools() - .into_iter() - .filter_map(|tool| { - let input_schema = tool - .input_schema(self.agent.model.tool_input_format()) - .ok()?; - Some(LanguageModelRequestTool { - name: tool.name(), - description: tool.description(), - input_schema, - }) - }) - .collect::>() - }); - let tool_names = tools - .iter() - .map(|tool| tool.name.clone()) - .collect::>(); - let worktrees = vec![WorktreeContext { - root_name: "root".to_string(), - abs_path: Path::new("/path/to/root").into(), - rules_file: None, - }]; - let prompt_builder = PromptBuilder::new(None)?; - let project_context = ProjectContext::new(worktrees, Vec::default()); - let system_prompt = prompt_builder.generate_assistant_system_prompt( - &project_context, - &ModelContext { + + let tools = crate::built_in_tools().collect::>(); + + let system_prompt = { + let worktrees = vec![WorktreeContext { + root_name: "root".to_string(), + abs_path: Path::new("/path/to/root").into(), + rules_file: None, + }]; + let project_context = ProjectContext::new(worktrees, Vec::default()); + let tool_names = tools + .iter() + .map(|tool| tool.name.clone().into()) + .collect::>(); + let template = crate::SystemPromptTemplate { + project: &project_context, available_tools: tool_names, - }, - )?; + }; + let templates = Templates::new(); + template.render(&templates).unwrap() + }; let has_system_prompt = eval .conversation diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/add_overwrite_test/before.rs b/crates/agent/src/edit_agent/evals/fixtures/add_overwrite_test/before.rs similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/add_overwrite_test/before.rs rename to crates/agent/src/edit_agent/evals/fixtures/add_overwrite_test/before.rs diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/delete_run_git_blame/after.rs b/crates/agent/src/edit_agent/evals/fixtures/delete_run_git_blame/after.rs similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/delete_run_git_blame/after.rs rename to crates/agent/src/edit_agent/evals/fixtures/delete_run_git_blame/after.rs diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/delete_run_git_blame/before.rs b/crates/agent/src/edit_agent/evals/fixtures/delete_run_git_blame/before.rs similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/delete_run_git_blame/before.rs rename to crates/agent/src/edit_agent/evals/fixtures/delete_run_git_blame/before.rs diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/disable_cursor_blinking/before.rs b/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/before.rs similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/disable_cursor_blinking/before.rs rename to crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/before.rs diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-01.diff b/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-01.diff similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-01.diff rename to crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-01.diff diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-02.diff b/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-02.diff similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-02.diff rename to crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-02.diff diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-03.diff b/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-03.diff similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-03.diff rename to crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-03.diff diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-04.diff b/crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-04.diff similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-04.diff rename to crates/agent/src/edit_agent/evals/fixtures/disable_cursor_blinking/possible-04.diff diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/extract_handle_command_output/before.rs b/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/before.rs similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/extract_handle_command_output/before.rs rename to crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/before.rs diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-01.diff b/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-01.diff similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-01.diff rename to crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-01.diff diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-02.diff b/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-02.diff similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-02.diff rename to crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-02.diff diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-03.diff b/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-03.diff similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-03.diff rename to crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-03.diff diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-04.diff b/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-04.diff similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-04.diff rename to crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-04.diff diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-05.diff b/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-05.diff similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-05.diff rename to crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-05.diff diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-06.diff b/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-06.diff similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-06.diff rename to crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-06.diff diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-07.diff b/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-07.diff similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-07.diff rename to crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-07.diff diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-08.diff b/crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-08.diff similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-08.diff rename to crates/agent/src/edit_agent/evals/fixtures/extract_handle_command_output/possible-08.diff diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/from_pixels_constructor/before.rs b/crates/agent/src/edit_agent/evals/fixtures/from_pixels_constructor/before.rs similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/from_pixels_constructor/before.rs rename to crates/agent/src/edit_agent/evals/fixtures/from_pixels_constructor/before.rs diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/translate_doc_comments/before.rs b/crates/agent/src/edit_agent/evals/fixtures/translate_doc_comments/before.rs similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/translate_doc_comments/before.rs rename to crates/agent/src/edit_agent/evals/fixtures/translate_doc_comments/before.rs diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/use_wasi_sdk_in_compile_parser_to_wasm/before.rs b/crates/agent/src/edit_agent/evals/fixtures/use_wasi_sdk_in_compile_parser_to_wasm/before.rs similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/use_wasi_sdk_in_compile_parser_to_wasm/before.rs rename to crates/agent/src/edit_agent/evals/fixtures/use_wasi_sdk_in_compile_parser_to_wasm/before.rs diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/zode/prompt.md b/crates/agent/src/edit_agent/evals/fixtures/zode/prompt.md similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/zode/prompt.md rename to crates/agent/src/edit_agent/evals/fixtures/zode/prompt.md diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/zode/react.py b/crates/agent/src/edit_agent/evals/fixtures/zode/react.py similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/zode/react.py rename to crates/agent/src/edit_agent/evals/fixtures/zode/react.py diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/zode/react_test.py b/crates/agent/src/edit_agent/evals/fixtures/zode/react_test.py similarity index 100% rename from crates/assistant_tools/src/edit_agent/evals/fixtures/zode/react_test.py rename to crates/agent/src/edit_agent/evals/fixtures/zode/react_test.py diff --git a/crates/assistant_tools/src/edit_agent/streaming_fuzzy_matcher.rs b/crates/agent/src/edit_agent/streaming_fuzzy_matcher.rs similarity index 98% rename from crates/assistant_tools/src/edit_agent/streaming_fuzzy_matcher.rs rename to crates/agent/src/edit_agent/streaming_fuzzy_matcher.rs index 386b8204400a157b37b2f356829fa27df3abca92..904ec05a8c7565d5052cd546fc0bf6d723ffa375 100644 --- a/crates/assistant_tools/src/edit_agent/streaming_fuzzy_matcher.rs +++ b/crates/agent/src/edit_agent/streaming_fuzzy_matcher.rs @@ -308,12 +308,13 @@ mod tests { use indoc::indoc; use language::{BufferId, TextBuffer}; use rand::prelude::*; + use text::ReplicaId; use util::test::{generate_marked_text, marked_text_ranges}; #[test] fn test_empty_query() { let buffer = TextBuffer::new( - 0, + ReplicaId::LOCAL, BufferId::new(1).unwrap(), "Hello world\nThis is a test\nFoo bar baz", ); @@ -327,7 +328,7 @@ mod tests { #[test] fn test_streaming_exact_match() { let buffer = TextBuffer::new( - 0, + ReplicaId::LOCAL, BufferId::new(1).unwrap(), "Hello world\nThis is a test\nFoo bar baz", ); @@ -351,7 +352,7 @@ mod tests { #[test] fn test_streaming_fuzzy_match() { let buffer = TextBuffer::new( - 0, + ReplicaId::LOCAL, BufferId::new(1).unwrap(), indoc! {" function foo(a, b) { @@ -385,7 +386,7 @@ mod tests { #[test] fn test_incremental_improvement() { let buffer = TextBuffer::new( - 0, + ReplicaId::LOCAL, BufferId::new(1).unwrap(), "Line 1\nLine 2\nLine 3\nLine 4\nLine 5", ); @@ -410,7 +411,7 @@ mod tests { #[test] fn test_incomplete_lines_buffering() { let buffer = TextBuffer::new( - 0, + ReplicaId::LOCAL, BufferId::new(1).unwrap(), indoc! {" The quick brown fox @@ -437,7 +438,7 @@ mod tests { #[test] fn test_multiline_fuzzy_match() { let buffer = TextBuffer::new( - 0, + ReplicaId::LOCAL, BufferId::new(1).unwrap(), indoc! {r#" impl Display for User { @@ -691,7 +692,11 @@ mod tests { } "#}; - let buffer = TextBuffer::new(0, BufferId::new(1).unwrap(), text.to_string()); + let buffer = TextBuffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + text.to_string(), + ); let snapshot = buffer.snapshot(); let mut matcher = StreamingFuzzyMatcher::new(snapshot.clone()); @@ -724,7 +729,7 @@ mod tests { #[track_caller] fn assert_location_resolution(text_with_expected_range: &str, query: &str, rng: &mut StdRng) { let (text, expected_ranges) = marked_text_ranges(text_with_expected_range, false); - let buffer = TextBuffer::new(0, BufferId::new(1).unwrap(), text.clone()); + let buffer = TextBuffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), text.clone()); let snapshot = buffer.snapshot(); let mut matcher = StreamingFuzzyMatcher::new(snapshot); diff --git a/crates/agent/src/history_store.rs b/crates/agent/src/history_store.rs index 4b1795047b7444dc74f8a41097c0c66aa54ecfd9..3bfbd99677feed5db53d96d2fa96316ac49abce4 100644 --- a/crates/agent/src/history_store.rs +++ b/crates/agent/src/history_store.rs @@ -1,64 +1,128 @@ -use crate::{ThreadId, thread_store::SerializedThreadMetadata}; -use anyhow::{Context as _, Result}; -use assistant_context::SavedContextMetadata; +use crate::{DbThread, DbThreadMetadata, ThreadsDatabase}; +use acp_thread::MentionUri; +use agent_client_protocol as acp; +use anyhow::{Context as _, Result, anyhow}; +use assistant_text_thread::{SavedTextThreadMetadata, TextThread}; use chrono::{DateTime, Utc}; +use db::kvp::KEY_VALUE_STORE; use gpui::{App, AsyncApp, Entity, SharedString, Task, prelude::*}; use itertools::Itertools; -use paths::contexts_dir; +use paths::text_threads_dir; +use project::Project; use serde::{Deserialize, Serialize}; -use std::{collections::VecDeque, path::Path, sync::Arc, time::Duration}; +use std::{collections::VecDeque, path::Path, rc::Rc, sync::Arc, time::Duration}; +use ui::ElementId; use util::ResultExt as _; const MAX_RECENTLY_OPENED_ENTRIES: usize = 6; -const NAVIGATION_HISTORY_PATH: &str = "agent-navigation-history.json"; +const RECENTLY_OPENED_THREADS_KEY: &str = "recent-agent-threads"; const SAVE_RECENTLY_OPENED_ENTRIES_DEBOUNCE: Duration = Duration::from_millis(50); +const DEFAULT_TITLE: &SharedString = &SharedString::new_static("New Thread"); + +//todo: We should remove this function once we support loading all acp thread +pub fn load_agent_thread( + session_id: acp::SessionId, + history_store: Entity, + project: Entity, + cx: &mut App, +) -> Task>> { + use agent_servers::{AgentServer, AgentServerDelegate}; + + let server = Rc::new(crate::NativeAgentServer::new( + project.read(cx).fs().clone(), + history_store, + )); + let delegate = AgentServerDelegate::new( + project.read(cx).agent_server_store().clone(), + project.clone(), + None, + None, + ); + let connection = server.connect(None, delegate, cx); + cx.spawn(async move |cx| { + let (agent, _) = connection.await?; + let agent = agent.downcast::().unwrap(); + cx.update(|cx| agent.load_thread(session_id, cx))?.await + }) +} + #[derive(Clone, Debug)] pub enum HistoryEntry { - Thread(SerializedThreadMetadata), - Context(SavedContextMetadata), + AcpThread(DbThreadMetadata), + TextThread(SavedTextThreadMetadata), } impl HistoryEntry { pub fn updated_at(&self) -> DateTime { match self { - HistoryEntry::Thread(thread) => thread.updated_at, - HistoryEntry::Context(context) => context.mtime.to_utc(), + HistoryEntry::AcpThread(thread) => thread.updated_at, + HistoryEntry::TextThread(text_thread) => text_thread.mtime.to_utc(), } } pub fn id(&self) -> HistoryEntryId { match self { - HistoryEntry::Thread(thread) => HistoryEntryId::Thread(thread.id.clone()), - HistoryEntry::Context(context) => HistoryEntryId::Context(context.path.clone()), + HistoryEntry::AcpThread(thread) => HistoryEntryId::AcpThread(thread.id.clone()), + HistoryEntry::TextThread(text_thread) => { + HistoryEntryId::TextThread(text_thread.path.clone()) + } + } + } + + pub fn mention_uri(&self) -> MentionUri { + match self { + HistoryEntry::AcpThread(thread) => MentionUri::Thread { + id: thread.id.clone(), + name: thread.title.to_string(), + }, + HistoryEntry::TextThread(text_thread) => MentionUri::TextThread { + path: text_thread.path.as_ref().to_owned(), + name: text_thread.title.to_string(), + }, } } pub fn title(&self) -> &SharedString { match self { - HistoryEntry::Thread(thread) => &thread.summary, - HistoryEntry::Context(context) => &context.title, + HistoryEntry::AcpThread(thread) => { + if thread.title.is_empty() { + DEFAULT_TITLE + } else { + &thread.title + } + } + HistoryEntry::TextThread(text_thread) => &text_thread.title, } } } /// Generic identifier for a history entry. -#[derive(Clone, PartialEq, Eq, Debug)] +#[derive(Clone, PartialEq, Eq, Debug, Hash)] pub enum HistoryEntryId { - Thread(ThreadId), - Context(Arc), + AcpThread(acp::SessionId), + TextThread(Arc), +} + +impl Into for HistoryEntryId { + fn into(self) -> ElementId { + match self { + HistoryEntryId::AcpThread(session_id) => ElementId::Name(session_id.0.into()), + HistoryEntryId::TextThread(path) => ElementId::Path(path), + } + } } -#[derive(Serialize, Deserialize)] +#[derive(Serialize, Deserialize, Debug)] enum SerializedRecentOpen { - Thread(String), - ContextName(String), - /// Old format which stores the full path - Context(String), + AcpThread(String), + TextThread(String), } pub struct HistoryStore { - context_store: Entity, + threads: Vec, + entries: Vec, + text_thread_store: Entity, recently_opened_entries: VecDeque, _subscriptions: Vec, _save_recently_opened_entries_task: Task<()>, @@ -66,57 +130,133 @@ pub struct HistoryStore { impl HistoryStore { pub fn new( - context_store: Entity, - initial_recent_entries: impl IntoIterator, + text_thread_store: Entity, cx: &mut Context, ) -> Self { - let subscriptions = vec![cx.observe(&context_store, |_, _, cx| cx.notify())]; + let subscriptions = + vec![cx.observe(&text_thread_store, |this, _, cx| this.update_entries(cx))]; cx.spawn(async move |this, cx| { - let entries = Self::load_recently_opened_entries(cx).await.log_err()?; - this.update(cx, |this, _| { - this.recently_opened_entries - .extend( - entries.into_iter().take( - MAX_RECENTLY_OPENED_ENTRIES - .saturating_sub(this.recently_opened_entries.len()), - ), - ); + let entries = Self::load_recently_opened_entries(cx).await; + this.update(cx, |this, cx| { + if let Some(entries) = entries.log_err() { + this.recently_opened_entries = entries; + } + + this.reload(cx); }) - .ok() + .ok(); }) .detach(); Self { - context_store, - recently_opened_entries: initial_recent_entries.into_iter().collect(), + text_thread_store, + recently_opened_entries: VecDeque::default(), + threads: Vec::default(), + entries: Vec::default(), _subscriptions: subscriptions, _save_recently_opened_entries_task: Task::ready(()), } } - pub fn entries(&self, cx: &mut Context) -> Vec { - let mut history_entries = Vec::new(); + pub fn thread_from_session_id(&self, session_id: &acp::SessionId) -> Option<&DbThreadMetadata> { + self.threads.iter().find(|thread| &thread.id == session_id) + } + + pub fn load_thread( + &mut self, + id: acp::SessionId, + cx: &mut Context, + ) -> Task>> { + let database_future = ThreadsDatabase::connect(cx); + cx.background_spawn(async move { + let database = database_future.await.map_err(|err| anyhow!(err))?; + database.load_thread(id).await + }) + } + + pub fn delete_thread( + &mut self, + id: acp::SessionId, + cx: &mut Context, + ) -> Task> { + let database_future = ThreadsDatabase::connect(cx); + cx.spawn(async move |this, cx| { + let database = database_future.await.map_err(|err| anyhow!(err))?; + database.delete_thread(id.clone()).await?; + this.update(cx, |this, cx| this.reload(cx)) + }) + } + + pub fn delete_text_thread( + &mut self, + path: Arc, + cx: &mut Context, + ) -> Task> { + self.text_thread_store + .update(cx, |store, cx| store.delete_local(path, cx)) + } + + pub fn load_text_thread( + &self, + path: Arc, + cx: &mut Context, + ) -> Task>> { + self.text_thread_store + .update(cx, |store, cx| store.open_local(path, cx)) + } + pub fn reload(&self, cx: &mut Context) { + let database_future = ThreadsDatabase::connect(cx); + cx.spawn(async move |this, cx| { + let threads = database_future + .await + .map_err(|err| anyhow!(err))? + .list_threads() + .await?; + + this.update(cx, |this, cx| { + if this.recently_opened_entries.len() < MAX_RECENTLY_OPENED_ENTRIES { + for thread in threads + .iter() + .take(MAX_RECENTLY_OPENED_ENTRIES - this.recently_opened_entries.len()) + .rev() + { + this.push_recently_opened_entry( + HistoryEntryId::AcpThread(thread.id.clone()), + cx, + ) + } + } + this.threads = threads; + this.update_entries(cx); + }) + }) + .detach_and_log_err(cx); + } + + fn update_entries(&mut self, cx: &mut Context) { #[cfg(debug_assertions)] if std::env::var("ZED_SIMULATE_NO_THREAD_HISTORY").is_ok() { - return history_entries; + return; } - + let mut history_entries = Vec::new(); + history_entries.extend(self.threads.iter().cloned().map(HistoryEntry::AcpThread)); history_entries.extend( - self.context_store + self.text_thread_store .read(cx) - .unordered_contexts() + .unordered_text_threads() .cloned() - .map(HistoryEntry::Context), + .map(HistoryEntry::TextThread), ); history_entries.sort_unstable_by_key(|entry| std::cmp::Reverse(entry.updated_at())); - history_entries + self.entries = history_entries; + cx.notify() } - pub fn recent_entries(&self, limit: usize, cx: &mut Context) -> Vec { - self.entries(cx).into_iter().take(limit).collect() + pub fn is_empty(&self, _cx: &App) -> bool { + self.entries.is_empty() } pub fn recently_opened_entries(&self, cx: &App) -> Vec { @@ -125,23 +265,36 @@ impl HistoryStore { return Vec::new(); } - let context_entries = - self.context_store - .read(cx) - .unordered_contexts() - .flat_map(|context| { - self.recently_opened_entries - .iter() - .enumerate() - .flat_map(|(index, entry)| match entry { - HistoryEntryId::Context(path) if &context.path == path => { - Some((index, HistoryEntry::Context(context.clone()))) - } - _ => None, - }) - }); - - context_entries + let thread_entries = self.threads.iter().flat_map(|thread| { + self.recently_opened_entries + .iter() + .enumerate() + .flat_map(|(index, entry)| match entry { + HistoryEntryId::AcpThread(id) if &thread.id == id => { + Some((index, HistoryEntry::AcpThread(thread.clone()))) + } + _ => None, + }) + }); + + let context_entries = self + .text_thread_store + .read(cx) + .unordered_text_threads() + .flat_map(|text_thread| { + self.recently_opened_entries + .iter() + .enumerate() + .flat_map(|(index, entry)| match entry { + HistoryEntryId::TextThread(path) if &text_thread.path == path => { + Some((index, HistoryEntry::TextThread(text_thread.clone()))) + } + _ => None, + }) + }); + + thread_entries + .chain(context_entries) // optimization to halt iteration early .take(self.recently_opened_entries.len()) .sorted_unstable_by_key(|(index, _)| *index) @@ -154,59 +307,52 @@ impl HistoryStore { .recently_opened_entries .iter() .filter_map(|entry| match entry { - HistoryEntryId::Context(path) => path.file_name().map(|file| { - SerializedRecentOpen::ContextName(file.to_string_lossy().into_owned()) + HistoryEntryId::TextThread(path) => path.file_name().map(|file| { + SerializedRecentOpen::TextThread(file.to_string_lossy().into_owned()) }), - HistoryEntryId::Thread(id) => Some(SerializedRecentOpen::Thread(id.to_string())), + HistoryEntryId::AcpThread(id) => { + Some(SerializedRecentOpen::AcpThread(id.to_string())) + } }) .collect::>(); self._save_recently_opened_entries_task = cx.spawn(async move |_, cx| { + let content = serde_json::to_string(&serialized_entries).unwrap(); cx.background_executor() .timer(SAVE_RECENTLY_OPENED_ENTRIES_DEBOUNCE) .await; - cx.background_spawn(async move { - let path = paths::data_dir().join(NAVIGATION_HISTORY_PATH); - let content = serde_json::to_string(&serialized_entries)?; - std::fs::write(path, content)?; - anyhow::Ok(()) - }) - .await - .log_err(); + + if cfg!(any(feature = "test-support", test)) { + return; + } + KEY_VALUE_STORE + .write_kvp(RECENTLY_OPENED_THREADS_KEY.to_owned(), content) + .await + .log_err(); }); } - fn load_recently_opened_entries(cx: &AsyncApp) -> Task>> { + fn load_recently_opened_entries(cx: &AsyncApp) -> Task>> { cx.background_spawn(async move { - let path = paths::data_dir().join(NAVIGATION_HISTORY_PATH); - let contents = match smol::fs::read_to_string(path).await { - Ok(it) => it, - Err(e) if e.kind() == std::io::ErrorKind::NotFound => { - return Ok(Vec::new()); - } - Err(e) => { - return Err(e) - .context("deserializing persisted agent panel navigation history"); - } - }; - let entries = serde_json::from_str::>(&contents) + if cfg!(any(feature = "test-support", test)) { + anyhow::bail!("history store does not persist in tests"); + } + let json = KEY_VALUE_STORE + .read_kvp(RECENTLY_OPENED_THREADS_KEY)? + .unwrap_or("[]".to_string()); + let entries = serde_json::from_str::>(&json) .context("deserializing persisted agent panel navigation history")? .into_iter() .take(MAX_RECENTLY_OPENED_ENTRIES) .flat_map(|entry| match entry { - SerializedRecentOpen::Thread(id) => { - Some(HistoryEntryId::Thread(id.as_str().into())) - } - SerializedRecentOpen::ContextName(file_name) => Some(HistoryEntryId::Context( - contexts_dir().join(file_name).into(), + SerializedRecentOpen::AcpThread(id) => Some(HistoryEntryId::AcpThread( + acp::SessionId(id.as_str().into()), )), - SerializedRecentOpen::Context(path) => { - Path::new(&path).file_name().map(|file_name| { - HistoryEntryId::Context(contexts_dir().join(file_name).into()) - }) - } + SerializedRecentOpen::TextThread(file_name) => Some( + HistoryEntryId::TextThread(text_threads_dir().join(file_name).into()), + ), }) - .collect::>(); + .collect(); Ok(entries) }) } @@ -220,9 +366,9 @@ impl HistoryStore { self.save_recently_opened_entries(cx); } - pub fn remove_recently_opened_thread(&mut self, id: ThreadId, cx: &mut Context) { + pub fn remove_recently_opened_thread(&mut self, id: acp::SessionId, cx: &mut Context) { self.recently_opened_entries.retain( - |entry| !matches!(entry, HistoryEntryId::Thread(thread_id) if thread_id == &id), + |entry| !matches!(entry, HistoryEntryId::AcpThread(thread_id) if thread_id == &id), ); self.save_recently_opened_entries(cx); } @@ -235,8 +381,8 @@ impl HistoryStore { ) { for entry in &mut self.recently_opened_entries { match entry { - HistoryEntryId::Context(path) if path.as_ref() == old_path => { - *entry = HistoryEntryId::Context(new_path.clone()); + HistoryEntryId::TextThread(path) if path.as_ref() == old_path => { + *entry = HistoryEntryId::TextThread(new_path.clone()); break; } _ => {} @@ -250,4 +396,8 @@ impl HistoryStore { .retain(|old_entry| old_entry != entry); self.save_recently_opened_entries(cx); } + + pub fn entries(&self) -> impl Iterator { + self.entries.iter().cloned() + } } diff --git a/crates/agent/src/legacy_thread.rs b/crates/agent/src/legacy_thread.rs new file mode 100644 index 0000000000000000000000000000000000000000..34babb800616e7a3d5390abdaccc0cafa24ff386 --- /dev/null +++ b/crates/agent/src/legacy_thread.rs @@ -0,0 +1,402 @@ +use crate::ProjectSnapshot; +use agent_settings::{AgentProfileId, CompletionMode}; +use anyhow::Result; +use chrono::{DateTime, Utc}; +use gpui::SharedString; +use language_model::{LanguageModelToolResultContent, LanguageModelToolUseId, Role, TokenUsage}; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; + +#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)] +pub enum DetailedSummaryState { + #[default] + NotGenerated, + Generating, + Generated { + text: SharedString, + }, +} + +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)] +pub struct MessageId(pub usize); + +#[derive(Serialize, Deserialize, Debug, PartialEq)] +pub struct SerializedThread { + pub version: String, + pub summary: SharedString, + pub updated_at: DateTime, + pub messages: Vec, + #[serde(default)] + pub initial_project_snapshot: Option>, + #[serde(default)] + pub cumulative_token_usage: TokenUsage, + #[serde(default)] + pub request_token_usage: Vec, + #[serde(default)] + pub detailed_summary_state: DetailedSummaryState, + #[serde(default)] + pub model: Option, + #[serde(default)] + pub completion_mode: Option, + #[serde(default)] + pub tool_use_limit_reached: bool, + #[serde(default)] + pub profile: Option, +} + +#[derive(Serialize, Deserialize, Debug, PartialEq)] +pub struct SerializedLanguageModel { + pub provider: String, + pub model: String, +} + +impl SerializedThread { + pub const VERSION: &'static str = "0.2.0"; + + pub fn from_json(json: &[u8]) -> Result { + let saved_thread_json = serde_json::from_slice::(json)?; + match saved_thread_json.get("version") { + Some(serde_json::Value::String(version)) => match version.as_str() { + SerializedThreadV0_1_0::VERSION => { + let saved_thread = + serde_json::from_value::(saved_thread_json)?; + Ok(saved_thread.upgrade()) + } + SerializedThread::VERSION => Ok(serde_json::from_value::( + saved_thread_json, + )?), + _ => anyhow::bail!("unrecognized serialized thread version: {version:?}"), + }, + None => { + let saved_thread = + serde_json::from_value::(saved_thread_json)?; + Ok(saved_thread.upgrade()) + } + version => anyhow::bail!("unrecognized serialized thread version: {version:?}"), + } + } +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct SerializedThreadV0_1_0( + // The structure did not change, so we are reusing the latest SerializedThread. + // When making the next version, make sure this points to SerializedThreadV0_2_0 + SerializedThread, +); + +impl SerializedThreadV0_1_0 { + pub const VERSION: &'static str = "0.1.0"; + + pub fn upgrade(self) -> SerializedThread { + debug_assert_eq!(SerializedThread::VERSION, "0.2.0"); + + let mut messages: Vec = Vec::with_capacity(self.0.messages.len()); + + for message in self.0.messages { + if message.role == Role::User + && !message.tool_results.is_empty() + && let Some(last_message) = messages.last_mut() + { + debug_assert!(last_message.role == Role::Assistant); + + last_message.tool_results = message.tool_results; + continue; + } + + messages.push(message); + } + + SerializedThread { + messages, + version: SerializedThread::VERSION.to_string(), + ..self.0 + } + } +} + +#[derive(Debug, Serialize, Deserialize, PartialEq)] +pub struct SerializedMessage { + pub id: MessageId, + pub role: Role, + #[serde(default)] + pub segments: Vec, + #[serde(default)] + pub tool_uses: Vec, + #[serde(default)] + pub tool_results: Vec, + #[serde(default)] + pub context: String, + #[serde(default)] + pub creases: Vec, + #[serde(default)] + pub is_hidden: bool, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq)] +#[serde(tag = "type")] +pub enum SerializedMessageSegment { + #[serde(rename = "text")] + Text { + text: String, + }, + #[serde(rename = "thinking")] + Thinking { + text: String, + #[serde(skip_serializing_if = "Option::is_none")] + signature: Option, + }, + RedactedThinking { + data: String, + }, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq)] +pub struct SerializedToolUse { + pub id: LanguageModelToolUseId, + pub name: SharedString, + pub input: serde_json::Value, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq)] +pub struct SerializedToolResult { + pub tool_use_id: LanguageModelToolUseId, + pub is_error: bool, + pub content: LanguageModelToolResultContent, + pub output: Option, +} + +#[derive(Serialize, Deserialize)] +struct LegacySerializedThread { + pub summary: SharedString, + pub updated_at: DateTime, + pub messages: Vec, + #[serde(default)] + pub initial_project_snapshot: Option>, +} + +impl LegacySerializedThread { + pub fn upgrade(self) -> SerializedThread { + SerializedThread { + version: SerializedThread::VERSION.to_string(), + summary: self.summary, + updated_at: self.updated_at, + messages: self.messages.into_iter().map(|msg| msg.upgrade()).collect(), + initial_project_snapshot: self.initial_project_snapshot, + cumulative_token_usage: TokenUsage::default(), + request_token_usage: Vec::new(), + detailed_summary_state: DetailedSummaryState::default(), + model: None, + completion_mode: None, + tool_use_limit_reached: false, + profile: None, + } + } +} + +#[derive(Debug, Serialize, Deserialize)] +struct LegacySerializedMessage { + pub id: MessageId, + pub role: Role, + pub text: String, + #[serde(default)] + pub tool_uses: Vec, + #[serde(default)] + pub tool_results: Vec, +} + +impl LegacySerializedMessage { + fn upgrade(self) -> SerializedMessage { + SerializedMessage { + id: self.id, + role: self.role, + segments: vec![SerializedMessageSegment::Text { text: self.text }], + tool_uses: self.tool_uses, + tool_results: self.tool_results, + context: String::new(), + creases: Vec::new(), + is_hidden: false, + } + } +} + +#[derive(Debug, Serialize, Deserialize, PartialEq)] +pub struct SerializedCrease { + pub start: usize, + pub end: usize, + pub icon_path: SharedString, + pub label: SharedString, +} + +#[cfg(test)] +mod tests { + use super::*; + use chrono::Utc; + use language_model::{Role, TokenUsage}; + use pretty_assertions::assert_eq; + + #[test] + fn test_legacy_serialized_thread_upgrade() { + let updated_at = Utc::now(); + let legacy_thread = LegacySerializedThread { + summary: "Test conversation".into(), + updated_at, + messages: vec![LegacySerializedMessage { + id: MessageId(1), + role: Role::User, + text: "Hello, world!".to_string(), + tool_uses: vec![], + tool_results: vec![], + }], + initial_project_snapshot: None, + }; + + let upgraded = legacy_thread.upgrade(); + + assert_eq!( + upgraded, + SerializedThread { + summary: "Test conversation".into(), + updated_at, + messages: vec![SerializedMessage { + id: MessageId(1), + role: Role::User, + segments: vec![SerializedMessageSegment::Text { + text: "Hello, world!".to_string() + }], + tool_uses: vec![], + tool_results: vec![], + context: "".to_string(), + creases: vec![], + is_hidden: false + }], + version: SerializedThread::VERSION.to_string(), + initial_project_snapshot: None, + cumulative_token_usage: TokenUsage::default(), + request_token_usage: vec![], + detailed_summary_state: DetailedSummaryState::default(), + model: None, + completion_mode: None, + tool_use_limit_reached: false, + profile: None + } + ) + } + + #[test] + fn test_serialized_threadv0_1_0_upgrade() { + let updated_at = Utc::now(); + let thread_v0_1_0 = SerializedThreadV0_1_0(SerializedThread { + summary: "Test conversation".into(), + updated_at, + messages: vec![ + SerializedMessage { + id: MessageId(1), + role: Role::User, + segments: vec![SerializedMessageSegment::Text { + text: "Use tool_1".to_string(), + }], + tool_uses: vec![], + tool_results: vec![], + context: "".to_string(), + creases: vec![], + is_hidden: false, + }, + SerializedMessage { + id: MessageId(2), + role: Role::Assistant, + segments: vec![SerializedMessageSegment::Text { + text: "I want to use a tool".to_string(), + }], + tool_uses: vec![SerializedToolUse { + id: "abc".into(), + name: "tool_1".into(), + input: serde_json::Value::Null, + }], + tool_results: vec![], + context: "".to_string(), + creases: vec![], + is_hidden: false, + }, + SerializedMessage { + id: MessageId(1), + role: Role::User, + segments: vec![SerializedMessageSegment::Text { + text: "Here is the tool result".to_string(), + }], + tool_uses: vec![], + tool_results: vec![SerializedToolResult { + tool_use_id: "abc".into(), + is_error: false, + content: LanguageModelToolResultContent::Text("abcdef".into()), + output: Some(serde_json::Value::Null), + }], + context: "".to_string(), + creases: vec![], + is_hidden: false, + }, + ], + version: SerializedThreadV0_1_0::VERSION.to_string(), + initial_project_snapshot: None, + cumulative_token_usage: TokenUsage::default(), + request_token_usage: vec![], + detailed_summary_state: DetailedSummaryState::default(), + model: None, + completion_mode: None, + tool_use_limit_reached: false, + profile: None, + }); + let upgraded = thread_v0_1_0.upgrade(); + + assert_eq!( + upgraded, + SerializedThread { + summary: "Test conversation".into(), + updated_at, + messages: vec![ + SerializedMessage { + id: MessageId(1), + role: Role::User, + segments: vec![SerializedMessageSegment::Text { + text: "Use tool_1".to_string() + }], + tool_uses: vec![], + tool_results: vec![], + context: "".to_string(), + creases: vec![], + is_hidden: false + }, + SerializedMessage { + id: MessageId(2), + role: Role::Assistant, + segments: vec![SerializedMessageSegment::Text { + text: "I want to use a tool".to_string(), + }], + tool_uses: vec![SerializedToolUse { + id: "abc".into(), + name: "tool_1".into(), + input: serde_json::Value::Null, + }], + tool_results: vec![SerializedToolResult { + tool_use_id: "abc".into(), + is_error: false, + content: LanguageModelToolResultContent::Text("abcdef".into()), + output: Some(serde_json::Value::Null), + }], + context: "".to_string(), + creases: vec![], + is_hidden: false, + }, + ], + version: SerializedThread::VERSION.to_string(), + initial_project_snapshot: None, + cumulative_token_usage: TokenUsage::default(), + request_token_usage: vec![], + detailed_summary_state: DetailedSummaryState::default(), + model: None, + completion_mode: None, + tool_use_limit_reached: false, + profile: None + } + ) + } +} diff --git a/crates/agent2/src/native_agent_server.rs b/crates/agent/src/native_agent_server.rs similarity index 93% rename from crates/agent2/src/native_agent_server.rs rename to crates/agent/src/native_agent_server.rs index 0dde0ff98552d4292a4391d2aec4f36419228a25..b28009223b7a7f2232b440282a0d6f61907f442c 100644 --- a/crates/agent2/src/native_agent_server.rs +++ b/crates/agent/src/native_agent_server.rs @@ -81,7 +81,7 @@ impl AgentServer for NativeAgentServer { mod tests { use super::*; - use assistant_context::ContextStore; + use assistant_text_thread::TextThreadStore; use gpui::AppContext; agent_servers::e2e_tests::common_e2e_tests!( @@ -116,8 +116,9 @@ mod tests { }); let history = cx.update(|cx| { - let context_store = cx.new(move |cx| ContextStore::fake(project.clone(), cx)); - cx.new(move |cx| HistoryStore::new(context_store, cx)) + let text_thread_store = + cx.new(move |cx| TextThreadStore::fake(project.clone(), cx)); + cx.new(move |cx| HistoryStore::new(text_thread_store, cx)) }); NativeAgentServer::new(fs.clone(), history) diff --git a/crates/assistant_tool/src/outline.rs b/crates/agent/src/outline.rs similarity index 76% rename from crates/assistant_tool/src/outline.rs rename to crates/agent/src/outline.rs index 4c8e2efefd67e25c630d38e16bda8a8dff34fb16..bc78290fb52ae208742b9dea0e6dbbe560022419 100644 --- a/crates/assistant_tool/src/outline.rs +++ b/crates/agent/src/outline.rs @@ -1,8 +1,6 @@ -use action_log::ActionLog; -use anyhow::{Context as _, Result}; +use anyhow::Result; use gpui::{AsyncApp, Entity}; use language::{Buffer, OutlineItem, ParseStatus}; -use project::Project; use regex::Regex; use std::fmt::Write; use text::Point; @@ -11,51 +9,66 @@ use text::Point; /// we automatically provide the file's symbol outline instead, with line numbers. pub const AUTO_OUTLINE_SIZE: usize = 16384; -pub async fn file_outline( - project: Entity, - path: String, - action_log: Entity, - regex: Option, - cx: &mut AsyncApp, -) -> anyhow::Result { - let buffer = { - let project_path = project.read_with(cx, |project, cx| { - project - .find_project_path(&path, cx) - .with_context(|| format!("Path {path} not found in project")) - })??; - - project - .update(cx, |project, cx| project.open_buffer(project_path, cx))? - .await? - }; +/// Result of getting buffer content, which can be either full content or an outline. +pub struct BufferContent { + /// The actual content (either full text or outline) + pub text: String, + /// Whether this is an outline (true) or full content (false) + pub is_outline: bool, +} - action_log.update(cx, |action_log, cx| { - action_log.buffer_read(buffer.clone(), cx); - })?; +/// Returns either the full content of a buffer or its outline, depending on size. +/// For files larger than AUTO_OUTLINE_SIZE, returns an outline with a header. +/// For smaller files, returns the full content. +pub async fn get_buffer_content_or_outline( + buffer: Entity, + path: Option<&str>, + cx: &AsyncApp, +) -> Result { + let file_size = buffer.read_with(cx, |buffer, _| buffer.text().len())?; - // Wait until the buffer has been fully parsed, so that we can read its outline. - let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?; - while *parse_status.borrow() != ParseStatus::Idle { - parse_status.changed().await?; - } + if file_size > AUTO_OUTLINE_SIZE { + // For large files, use outline instead of full content + // Wait until the buffer has been fully parsed, so we can read its outline + let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?; + while *parse_status.borrow() != ParseStatus::Idle { + parse_status.changed().await?; + } + + let outline_items = buffer.read_with(cx, |buffer, _| { + let snapshot = buffer.snapshot(); + snapshot + .outline(None) + .items + .into_iter() + .map(|item| item.to_point(&snapshot)) + .collect::>() + })?; - let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?; - let outline = snapshot.outline(None); - - render_outline( - outline - .items - .into_iter() - .map(|item| item.to_point(&snapshot)), - regex, - 0, - usize::MAX, - ) - .await + let outline_text = render_outline(outline_items, None, 0, usize::MAX).await?; + + let text = if let Some(path) = path { + format!( + "# File outline for {path} (file too large to show full content)\n\n{outline_text}", + ) + } else { + format!("# File outline (file too large to show full content)\n\n{outline_text}",) + }; + Ok(BufferContent { + text, + is_outline: true, + }) + } else { + // File is small enough, return full content + let text = buffer.read_with(cx, |buffer, _| buffer.text())?; + Ok(BufferContent { + text, + is_outline: false, + }) + } } -pub async fn render_outline( +async fn render_outline( items: impl IntoIterator>, regex: Option, offset: usize, @@ -128,62 +141,3 @@ fn render_entries( entries_rendered } - -/// Result of getting buffer content, which can be either full content or an outline. -pub struct BufferContent { - /// The actual content (either full text or outline) - pub text: String, - /// Whether this is an outline (true) or full content (false) - pub is_outline: bool, -} - -/// Returns either the full content of a buffer or its outline, depending on size. -/// For files larger than AUTO_OUTLINE_SIZE, returns an outline with a header. -/// For smaller files, returns the full content. -pub async fn get_buffer_content_or_outline( - buffer: Entity, - path: Option<&str>, - cx: &AsyncApp, -) -> Result { - let file_size = buffer.read_with(cx, |buffer, _| buffer.text().len())?; - - if file_size > AUTO_OUTLINE_SIZE { - // For large files, use outline instead of full content - // Wait until the buffer has been fully parsed, so we can read its outline - let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?; - while *parse_status.borrow() != ParseStatus::Idle { - parse_status.changed().await?; - } - - let outline_items = buffer.read_with(cx, |buffer, _| { - let snapshot = buffer.snapshot(); - snapshot - .outline(None) - .items - .into_iter() - .map(|item| item.to_point(&snapshot)) - .collect::>() - })?; - - let outline_text = render_outline(outline_items, None, 0, usize::MAX).await?; - - let text = if let Some(path) = path { - format!( - "# File outline for {path} (file too large to show full content)\n\n{outline_text}", - ) - } else { - format!("# File outline (file too large to show full content)\n\n{outline_text}",) - }; - Ok(BufferContent { - text, - is_outline: true, - }) - } else { - // File is small enough, return full content - let text = buffer.read_with(cx, |buffer, _| buffer.text())?; - Ok(BufferContent { - text, - is_outline: false, - }) - } -} diff --git a/crates/agent/src/prompts/stale_files_prompt_header.txt b/crates/agent/src/prompts/stale_files_prompt_header.txt deleted file mode 100644 index f743e239c883c7456f7bdc6e089185c6b994cb44..0000000000000000000000000000000000000000 --- a/crates/agent/src/prompts/stale_files_prompt_header.txt +++ /dev/null @@ -1,3 +0,0 @@ -[The following is an auto-generated notification; do not reply] - -These files have changed since the last read: diff --git a/crates/agent2/src/templates.rs b/crates/agent/src/templates.rs similarity index 100% rename from crates/agent2/src/templates.rs rename to crates/agent/src/templates.rs diff --git a/crates/assistant_tools/src/templates/create_file_prompt.hbs b/crates/agent/src/templates/create_file_prompt.hbs similarity index 100% rename from crates/assistant_tools/src/templates/create_file_prompt.hbs rename to crates/agent/src/templates/create_file_prompt.hbs diff --git a/crates/assistant_tools/src/templates/diff_judge.hbs b/crates/agent/src/templates/diff_judge.hbs similarity index 100% rename from crates/assistant_tools/src/templates/diff_judge.hbs rename to crates/agent/src/templates/diff_judge.hbs diff --git a/crates/assistant_tools/src/templates/edit_file_prompt_diff_fenced.hbs b/crates/agent/src/templates/edit_file_prompt_diff_fenced.hbs similarity index 100% rename from crates/assistant_tools/src/templates/edit_file_prompt_diff_fenced.hbs rename to crates/agent/src/templates/edit_file_prompt_diff_fenced.hbs diff --git a/crates/assistant_tools/src/templates/edit_file_prompt_xml.hbs b/crates/agent/src/templates/edit_file_prompt_xml.hbs similarity index 100% rename from crates/assistant_tools/src/templates/edit_file_prompt_xml.hbs rename to crates/agent/src/templates/edit_file_prompt_xml.hbs diff --git a/crates/agent2/src/templates/system_prompt.hbs b/crates/agent/src/templates/system_prompt.hbs similarity index 100% rename from crates/agent2/src/templates/system_prompt.hbs rename to crates/agent/src/templates/system_prompt.hbs diff --git a/crates/agent2/src/tests/mod.rs b/crates/agent/src/tests/mod.rs similarity index 99% rename from crates/agent2/src/tests/mod.rs rename to crates/agent/src/tests/mod.rs index 2e63aa5856501f880fec94f7659b13be321b03b3..ddddbfc5279ca23fb95527892e929b23b8cefbf6 100644 --- a/crates/agent2/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -975,9 +975,9 @@ async fn test_mcp_tools(cx: &mut TestAppContext) { vec![context_server::types::Tool { name: "echo".into(), description: None, - input_schema: serde_json::to_value( - EchoTool.input_schema(LanguageModelToolSchemaFormat::JsonSchema), - ) + input_schema: serde_json::to_value(EchoTool::input_schema( + LanguageModelToolSchemaFormat::JsonSchema, + )) .unwrap(), output_schema: None, annotations: None, @@ -1149,9 +1149,9 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) { context_server::types::Tool { name: "echo".into(), // Conflicts with native EchoTool description: None, - input_schema: serde_json::to_value( - EchoTool.input_schema(LanguageModelToolSchemaFormat::JsonSchema), - ) + input_schema: serde_json::to_value(EchoTool::input_schema( + LanguageModelToolSchemaFormat::JsonSchema, + )) .unwrap(), output_schema: None, annotations: None, @@ -1174,9 +1174,9 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) { context_server::types::Tool { name: "echo".into(), // Also conflicts with native EchoTool description: None, - input_schema: serde_json::to_value( - EchoTool.input_schema(LanguageModelToolSchemaFormat::JsonSchema), - ) + input_schema: serde_json::to_value(EchoTool::input_schema( + LanguageModelToolSchemaFormat::JsonSchema, + )) .unwrap(), output_schema: None, annotations: None, @@ -1834,8 +1834,9 @@ async fn test_agent_connection(cx: &mut TestAppContext) { fake_fs.insert_tree(path!("/test"), json!({})).await; let project = Project::test(fake_fs.clone(), [Path::new("/test")], cx).await; let cwd = Path::new("/test"); - let context_store = cx.new(|cx| assistant_context::ContextStore::fake(project.clone(), cx)); - let history_store = cx.new(|cx| HistoryStore::new(context_store, cx)); + let text_thread_store = + cx.new(|cx| assistant_text_thread::TextThreadStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx)); // Create agent and connection let agent = NativeAgent::new( @@ -1864,7 +1865,7 @@ async fn test_agent_connection(cx: &mut TestAppContext) { let selector_opt = connection.model_selector(&session_id); assert!( selector_opt.is_some(), - "agent2 should always support ModelSelector" + "agent should always support ModelSelector" ); let selector = selector_opt.unwrap(); @@ -1995,7 +1996,7 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) { locations: vec![], raw_input: Some(json!({})), raw_output: None, - meta: None, + meta: Some(json!({ "tool_name": "thinking" })), } ); let update = expect_tool_call_update_fields(&mut events).await; diff --git a/crates/agent2/src/tests/test_tools.rs b/crates/agent/src/tests/test_tools.rs similarity index 100% rename from crates/agent2/src/tests/test_tools.rs rename to crates/agent/src/tests/test_tools.rs diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index d189b7611209d2fbea5c882ea548318f73ddbfb3..d3414d84c8f5594a567e5b38b45ddf0739965365 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -1,95 +1,60 @@ use crate::{ - agent_profile::AgentProfile, - context::{AgentContext, AgentContextHandle, ContextLoadResult, LoadedContext}, - thread_store::{ - SerializedCrease, SerializedLanguageModel, SerializedMessage, SerializedMessageSegment, - SerializedThread, SerializedToolResult, SerializedToolUse, SharedProjectContext, - ThreadStore, - }, - tool_use::{PendingToolUse, ToolUse, ToolUseMetadata, ToolUseState}, + ContextServerRegistry, CopyPathTool, CreateDirectoryTool, DbLanguageModel, DbThread, + DeletePathTool, DiagnosticsTool, EditFileTool, FetchTool, FindPathTool, GrepTool, + ListDirectoryTool, MovePathTool, NowTool, OpenTool, ProjectSnapshot, ReadFileTool, + SystemPromptTemplate, Template, Templates, TerminalTool, ThinkingTool, WebSearchTool, }; +use acp_thread::{MentionUri, UserMessageId}; use action_log::ActionLog; + +use agent_client_protocol as acp; use agent_settings::{ - AgentProfileId, AgentSettings, CompletionMode, SUMMARIZE_THREAD_DETAILED_PROMPT, - SUMMARIZE_THREAD_PROMPT, + AgentProfileId, AgentProfileSettings, AgentSettings, CompletionMode, + SUMMARIZE_THREAD_DETAILED_PROMPT, SUMMARIZE_THREAD_PROMPT, }; -use anyhow::{Result, anyhow}; -use assistant_tool::{AnyToolCard, Tool, ToolWorkingSet}; +use anyhow::{Context as _, Result, anyhow}; use chrono::{DateTime, Utc}; -use client::{ModelRequestUsage, RequestUsage}; +use client::{ModelRequestUsage, RequestUsage, UserStore}; use cloud_llm_client::{CompletionIntent, CompletionRequestStatus, Plan, UsageLimit}; -use collections::HashMap; -use futures::{FutureExt, StreamExt as _, future::Shared}; -use git::repository::DiffType; +use collections::{HashMap, HashSet, IndexMap}; +use fs::Fs; +use futures::stream; +use futures::{ + FutureExt, + channel::{mpsc, oneshot}, + future::Shared, + stream::FuturesUnordered, +}; use gpui::{ - AnyWindowHandle, App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task, - WeakEntity, Window, + App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task, WeakEntity, }; -use http_client::StatusCode; use language_model::{ - ConfiguredModel, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, - LanguageModelExt as _, LanguageModelId, LanguageModelRegistry, LanguageModelRequest, + LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelExt, + LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult, - LanguageModelToolResultContent, LanguageModelToolUse, LanguageModelToolUseId, MessageContent, - ModelRequestLimitReachedError, PaymentRequiredError, Role, SelectedModel, StopReason, - TokenUsage, -}; -use postage::stream::Stream as _; -use project::{ - Project, - git_store::{GitStore, GitStoreCheckpoint, RepositoryState}, + LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse, + LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage, ZED_CLOUD_PROVIDER_ID, }; -use prompt_store::{ModelContext, PromptBuilder}; -use schemars::JsonSchema; +use project::Project; +use prompt_store::ProjectContext; +use schemars::{JsonSchema, Schema}; use serde::{Deserialize, Serialize}; -use settings::Settings; +use settings::{Settings, update_settings_file}; +use smol::stream::StreamExt; use std::{ - io::Write, - ops::Range, + collections::BTreeMap, + ops::RangeInclusive, + path::Path, + rc::Rc, sync::Arc, time::{Duration, Instant}, }; -use thiserror::Error; -use util::{ResultExt as _, post_inc}; +use std::{fmt::Write, path::PathBuf}; +use util::{ResultExt, debug_panic, markdown::MarkdownCodeBlock}; use uuid::Uuid; -const MAX_RETRY_ATTEMPTS: u8 = 4; -const BASE_RETRY_DELAY: Duration = Duration::from_secs(5); - -#[derive(Debug, Clone)] -enum RetryStrategy { - ExponentialBackoff { - initial_delay: Duration, - max_attempts: u8, - }, - Fixed { - delay: Duration, - max_attempts: u8, - }, -} - -#[derive( - Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Serialize, Deserialize, JsonSchema, -)] -pub struct ThreadId(Arc); - -impl ThreadId { - pub fn new() -> Self { - Self(Uuid::new_v4().to_string().into()) - } -} - -impl std::fmt::Display for ThreadId { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.0) - } -} - -impl From<&str> for ThreadId { - fn from(value: &str) -> Self { - Self(value.into()) - } -} +const TOOL_CANCELED_MESSAGE: &str = "Tool canceled by user"; +pub const MAX_TOOL_NAME_LENGTH: usize = 64; /// The ID of the user prompt that initiated a request. /// @@ -109,2014 +74,1898 @@ impl std::fmt::Display for PromptId { } } -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)] -pub struct MessageId(pub usize); - -impl MessageId { - fn post_inc(&mut self) -> Self { - Self(post_inc(&mut self.0)) - } - - pub fn as_usize(&self) -> usize { - self.0 - } -} +pub(crate) const MAX_RETRY_ATTEMPTS: u8 = 4; +pub(crate) const BASE_RETRY_DELAY: Duration = Duration::from_secs(5); -/// Stored information that can be used to resurrect a context crease when creating an editor for a past message. -#[derive(Clone, Debug)] -pub struct MessageCrease { - pub range: Range, - pub icon_path: SharedString, - pub label: SharedString, - /// None for a deserialized message, Some otherwise. - pub context: Option, +#[derive(Debug, Clone)] +enum RetryStrategy { + ExponentialBackoff { + initial_delay: Duration, + max_attempts: u8, + }, + Fixed { + delay: Duration, + max_attempts: u8, + }, } -/// A message in a [`Thread`]. -#[derive(Debug, Clone)] -pub struct Message { - pub id: MessageId, - pub role: Role, - pub segments: Vec, - pub loaded_context: LoadedContext, - pub creases: Vec, - pub is_hidden: bool, - pub ui_only: bool, +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub enum Message { + User(UserMessage), + Agent(AgentMessage), + Resume, } impl Message { - /// Returns whether the message contains any meaningful text that should be displayed - /// The model sometimes runs tool without producing any text or just a marker ([`USING_TOOL_MARKER`]) - pub fn should_display_content(&self) -> bool { - self.segments.iter().all(|segment| segment.should_display()) + pub fn as_agent_message(&self) -> Option<&AgentMessage> { + match self { + Message::Agent(agent_message) => Some(agent_message), + _ => None, + } } - pub fn push_thinking(&mut self, text: &str, signature: Option) { - if let Some(MessageSegment::Thinking { - text: segment, - signature: current_signature, - }) = self.segments.last_mut() - { - if let Some(signature) = signature { - *current_signature = Some(signature); - } - segment.push_str(text); - } else { - self.segments.push(MessageSegment::Thinking { - text: text.to_string(), - signature, - }); + pub fn to_request(&self) -> Vec { + match self { + Message::User(message) => vec![message.to_request()], + Message::Agent(message) => message.to_request(), + Message::Resume => vec![LanguageModelRequestMessage { + role: Role::User, + content: vec!["Continue where you left off".into()], + cache: false, + }], } } - pub fn push_redacted_thinking(&mut self, data: String) { - self.segments.push(MessageSegment::RedactedThinking(data)); + pub fn to_markdown(&self) -> String { + match self { + Message::User(message) => message.to_markdown(), + Message::Agent(message) => message.to_markdown(), + Message::Resume => "[resume]\n".into(), + } } - pub fn push_text(&mut self, text: &str) { - if let Some(MessageSegment::Text(segment)) = self.segments.last_mut() { - segment.push_str(text); - } else { - self.segments.push(MessageSegment::Text(text.to_string())); + pub fn role(&self) -> Role { + match self { + Message::User(_) | Message::Resume => Role::User, + Message::Agent(_) => Role::Assistant, } } +} - pub fn to_message_content(&self) -> String { - let mut result = String::new(); +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct UserMessage { + pub id: UserMessageId, + pub content: Vec, +} - if !self.loaded_context.text.is_empty() { - result.push_str(&self.loaded_context.text); - } +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub enum UserMessageContent { + Text(String), + Mention { uri: MentionUri, content: String }, + Image(LanguageModelImage), +} - for segment in &self.segments { - match segment { - MessageSegment::Text(text) => result.push_str(text), - MessageSegment::Thinking { text, .. } => { - result.push_str("\n"); - result.push_str(text); - result.push_str("\n"); +impl UserMessage { + pub fn to_markdown(&self) -> String { + let mut markdown = String::from("## User\n\n"); + + for content in &self.content { + match content { + UserMessageContent::Text(text) => { + markdown.push_str(text); + markdown.push('\n'); + } + UserMessageContent::Image(_) => { + markdown.push_str("\n"); + } + UserMessageContent::Mention { uri, content } => { + if !content.is_empty() { + let _ = writeln!(&mut markdown, "{}\n\n{}", uri.as_link(), content); + } else { + let _ = writeln!(&mut markdown, "{}", uri.as_link()); + } } - MessageSegment::RedactedThinking(_) => {} } } - result + markdown } -} -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum MessageSegment { - Text(String), - Thinking { - text: String, - signature: Option, - }, - RedactedThinking(String), -} + fn to_request(&self) -> LanguageModelRequestMessage { + let mut message = LanguageModelRequestMessage { + role: Role::User, + content: Vec::with_capacity(self.content.len()), + cache: false, + }; -impl MessageSegment { - pub fn should_display(&self) -> bool { - match self { - Self::Text(text) => text.is_empty(), - Self::Thinking { text, .. } => text.is_empty(), - Self::RedactedThinking(_) => false, + const OPEN_CONTEXT: &str = "\n\ + The following items were attached by the user. \ + They are up-to-date and don't need to be re-read.\n\n"; + + const OPEN_FILES_TAG: &str = ""; + const OPEN_DIRECTORIES_TAG: &str = ""; + const OPEN_SYMBOLS_TAG: &str = ""; + const OPEN_SELECTIONS_TAG: &str = ""; + const OPEN_THREADS_TAG: &str = ""; + const OPEN_FETCH_TAG: &str = ""; + const OPEN_RULES_TAG: &str = + "\nThe user has specified the following rules that should be applied:\n"; + + let mut file_context = OPEN_FILES_TAG.to_string(); + let mut directory_context = OPEN_DIRECTORIES_TAG.to_string(); + let mut symbol_context = OPEN_SYMBOLS_TAG.to_string(); + let mut selection_context = OPEN_SELECTIONS_TAG.to_string(); + let mut thread_context = OPEN_THREADS_TAG.to_string(); + let mut fetch_context = OPEN_FETCH_TAG.to_string(); + let mut rules_context = OPEN_RULES_TAG.to_string(); + + for chunk in &self.content { + let chunk = match chunk { + UserMessageContent::Text(text) => { + language_model::MessageContent::Text(text.clone()) + } + UserMessageContent::Image(value) => { + language_model::MessageContent::Image(value.clone()) + } + UserMessageContent::Mention { uri, content } => { + match uri { + MentionUri::File { abs_path } => { + write!( + &mut file_context, + "\n{}", + MarkdownCodeBlock { + tag: &codeblock_tag(abs_path, None), + text: &content.to_string(), + } + ) + .ok(); + } + MentionUri::PastedImage => { + debug_panic!("pasted image URI should not be used in mention content") + } + MentionUri::Directory { .. } => { + write!(&mut directory_context, "\n{}\n", content).ok(); + } + MentionUri::Symbol { + abs_path: path, + line_range, + .. + } => { + write!( + &mut symbol_context, + "\n{}", + MarkdownCodeBlock { + tag: &codeblock_tag(path, Some(line_range)), + text: content + } + ) + .ok(); + } + MentionUri::Selection { + abs_path: path, + line_range, + .. + } => { + write!( + &mut selection_context, + "\n{}", + MarkdownCodeBlock { + tag: &codeblock_tag( + path.as_deref().unwrap_or("Untitled".as_ref()), + Some(line_range) + ), + text: content + } + ) + .ok(); + } + MentionUri::Thread { .. } => { + write!(&mut thread_context, "\n{}\n", content).ok(); + } + MentionUri::TextThread { .. } => { + write!(&mut thread_context, "\n{}\n", content).ok(); + } + MentionUri::Rule { .. } => { + write!( + &mut rules_context, + "\n{}", + MarkdownCodeBlock { + tag: "", + text: content + } + ) + .ok(); + } + MentionUri::Fetch { url } => { + write!(&mut fetch_context, "\nFetch: {}\n\n{}", url, content).ok(); + } + } + + language_model::MessageContent::Text(uri.as_link().to_string()) + } + }; + + message.content.push(chunk); } - } - pub fn text(&self) -> Option<&str> { - match self { - MessageSegment::Text(text) => Some(text), - _ => None, + let len_before_context = message.content.len(); + + if file_context.len() > OPEN_FILES_TAG.len() { + file_context.push_str("\n"); + message + .content + .push(language_model::MessageContent::Text(file_context)); } - } -} -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] -pub struct ProjectSnapshot { - pub worktree_snapshots: Vec, - pub timestamp: DateTime, -} + if directory_context.len() > OPEN_DIRECTORIES_TAG.len() { + directory_context.push_str("\n"); + message + .content + .push(language_model::MessageContent::Text(directory_context)); + } -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] -pub struct WorktreeSnapshot { - pub worktree_path: String, - pub git_state: Option, -} + if symbol_context.len() > OPEN_SYMBOLS_TAG.len() { + symbol_context.push_str("\n"); + message + .content + .push(language_model::MessageContent::Text(symbol_context)); + } -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] -pub struct GitState { - pub remote_url: Option, - pub head_sha: Option, - pub current_branch: Option, - pub diff: Option, -} + if selection_context.len() > OPEN_SELECTIONS_TAG.len() { + selection_context.push_str("\n"); + message + .content + .push(language_model::MessageContent::Text(selection_context)); + } -#[derive(Clone, Debug)] -pub struct ThreadCheckpoint { - message_id: MessageId, - git_checkpoint: GitStoreCheckpoint, -} + if thread_context.len() > OPEN_THREADS_TAG.len() { + thread_context.push_str("\n"); + message + .content + .push(language_model::MessageContent::Text(thread_context)); + } -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub enum ThreadFeedback { - Positive, - Negative, -} + if fetch_context.len() > OPEN_FETCH_TAG.len() { + fetch_context.push_str("\n"); + message + .content + .push(language_model::MessageContent::Text(fetch_context)); + } -pub enum LastRestoreCheckpoint { - Pending { - message_id: MessageId, - }, - Error { - message_id: MessageId, - error: String, - }, -} + if rules_context.len() > OPEN_RULES_TAG.len() { + rules_context.push_str("\n"); + message + .content + .push(language_model::MessageContent::Text(rules_context)); + } -impl LastRestoreCheckpoint { - pub fn message_id(&self) -> MessageId { - match self { - LastRestoreCheckpoint::Pending { message_id } => *message_id, - LastRestoreCheckpoint::Error { message_id, .. } => *message_id, + if message.content.len() > len_before_context { + message.content.insert( + len_before_context, + language_model::MessageContent::Text(OPEN_CONTEXT.into()), + ); + message + .content + .push(language_model::MessageContent::Text("".into())); } + + message } } -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)] -pub enum DetailedSummaryState { - #[default] - NotGenerated, - Generating { - message_id: MessageId, - }, - Generated { - text: SharedString, - message_id: MessageId, - }, -} +fn codeblock_tag(full_path: &Path, line_range: Option<&RangeInclusive>) -> String { + let mut result = String::new(); + + if let Some(extension) = full_path.extension().and_then(|ext| ext.to_str()) { + let _ = write!(result, "{} ", extension); + } + + let _ = write!(result, "{}", full_path.display()); -impl DetailedSummaryState { - fn text(&self) -> Option { - if let Self::Generated { text, .. } = self { - Some(text.clone()) + if let Some(range) = line_range { + if range.start() == range.end() { + let _ = write!(result, ":{}", range.start() + 1); } else { - None + let _ = write!(result, ":{}-{}", range.start() + 1, range.end() + 1); } } -} -#[derive(Default, Debug)] -pub struct TotalTokenUsage { - pub total: u64, - pub max: u64, + result } -impl TotalTokenUsage { - pub fn ratio(&self) -> TokenUsageRatio { - #[cfg(debug_assertions)] - let warning_threshold: f32 = std::env::var("ZED_THREAD_WARNING_THRESHOLD") - .unwrap_or("0.8".to_string()) - .parse() - .unwrap(); - #[cfg(not(debug_assertions))] - let warning_threshold: f32 = 0.8; - - // When the maximum is unknown because there is no selected model, - // avoid showing the token limit warning. - if self.max == 0 { - TokenUsageRatio::Normal - } else if self.total >= self.max { - TokenUsageRatio::Exceeded - } else if self.total as f32 / self.max as f32 >= warning_threshold { - TokenUsageRatio::Warning - } else { - TokenUsageRatio::Normal +impl AgentMessage { + pub fn to_markdown(&self) -> String { + let mut markdown = String::from("## Assistant\n\n"); + + for content in &self.content { + match content { + AgentMessageContent::Text(text) => { + markdown.push_str(text); + markdown.push('\n'); + } + AgentMessageContent::Thinking { text, .. } => { + markdown.push_str(""); + markdown.push_str(text); + markdown.push_str("\n"); + } + AgentMessageContent::RedactedThinking(_) => { + markdown.push_str("\n") + } + AgentMessageContent::ToolUse(tool_use) => { + markdown.push_str(&format!( + "**Tool Use**: {} (ID: {})\n", + tool_use.name, tool_use.id + )); + markdown.push_str(&format!( + "{}\n", + MarkdownCodeBlock { + tag: "json", + text: &format!("{:#}", tool_use.input) + } + )); + } + } + } + + for tool_result in self.tool_results.values() { + markdown.push_str(&format!( + "**Tool Result**: {} (ID: {})\n\n", + tool_result.tool_name, tool_result.tool_use_id + )); + if tool_result.is_error { + markdown.push_str("**ERROR:**\n"); + } + + match &tool_result.content { + LanguageModelToolResultContent::Text(text) => { + writeln!(markdown, "{text}\n").ok(); + } + LanguageModelToolResultContent::Image(_) => { + writeln!(markdown, "\n").ok(); + } + } + + if let Some(output) = tool_result.output.as_ref() { + writeln!( + markdown, + "**Debug Output**:\n\n```json\n{}\n```\n", + serde_json::to_string_pretty(output).unwrap() + ) + .unwrap(); + } } + + markdown } - pub fn add(&self, tokens: u64) -> TotalTokenUsage { - TotalTokenUsage { - total: self.total + tokens, - max: self.max, + pub fn to_request(&self) -> Vec { + let mut assistant_message = LanguageModelRequestMessage { + role: Role::Assistant, + content: Vec::with_capacity(self.content.len()), + cache: false, + }; + for chunk in &self.content { + match chunk { + AgentMessageContent::Text(text) => { + assistant_message + .content + .push(language_model::MessageContent::Text(text.clone())); + } + AgentMessageContent::Thinking { text, signature } => { + assistant_message + .content + .push(language_model::MessageContent::Thinking { + text: text.clone(), + signature: signature.clone(), + }); + } + AgentMessageContent::RedactedThinking(value) => { + assistant_message.content.push( + language_model::MessageContent::RedactedThinking(value.clone()), + ); + } + AgentMessageContent::ToolUse(tool_use) => { + if self.tool_results.contains_key(&tool_use.id) { + assistant_message + .content + .push(language_model::MessageContent::ToolUse(tool_use.clone())); + } + } + }; + } + + let mut user_message = LanguageModelRequestMessage { + role: Role::User, + content: Vec::new(), + cache: false, + }; + + for tool_result in self.tool_results.values() { + let mut tool_result = tool_result.clone(); + // Surprisingly, the API fails if we return an empty string here. + // It thinks we are sending a tool use without a tool result. + if tool_result.content.is_empty() { + tool_result.content = "".into(); + } + user_message + .content + .push(language_model::MessageContent::ToolResult(tool_result)); + } + + let mut messages = Vec::new(); + if !assistant_message.content.is_empty() { + messages.push(assistant_message); } + if !user_message.content.is_empty() { + messages.push(user_message); + } + messages } } -#[derive(Debug, Default, PartialEq, Eq)] -pub enum TokenUsageRatio { - #[default] - Normal, - Warning, - Exceeded, +#[derive(Default, Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct AgentMessage { + pub content: Vec, + pub tool_results: IndexMap, } -#[derive(Debug, Clone, Copy)] -pub enum QueueState { - Sending, - Queued { position: usize }, - Started, +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub enum AgentMessageContent { + Text(String), + Thinking { + text: String, + signature: Option, + }, + RedactedThinking(String), + ToolUse(LanguageModelToolUse), } -/// A thread of conversation with the LLM. -pub struct Thread { - id: ThreadId, - updated_at: DateTime, - summary: ThreadSummary, - pending_summary: Task>, - detailed_summary_task: Task>, - detailed_summary_tx: postage::watch::Sender, - detailed_summary_rx: postage::watch::Receiver, - completion_mode: agent_settings::CompletionMode, - messages: Vec, - next_message_id: MessageId, - last_prompt_id: PromptId, - project_context: SharedProjectContext, - checkpoints_by_message: HashMap, - completion_count: usize, - pending_completions: Vec, - project: Entity, - prompt_builder: Arc, - tools: Entity, - tool_use: ToolUseState, - action_log: Entity, - last_restore_checkpoint: Option, - pending_checkpoint: Option, - initial_project_snapshot: Shared>>>, - request_token_usage: Vec, - cumulative_token_usage: TokenUsage, - exceeded_window_error: Option, - tool_use_limit_reached: bool, - retry_state: Option, - message_feedback: HashMap, - last_received_chunk_at: Option, - request_callback: Option< - Box])>, - >, - remaining_turns: u32, - configured_model: Option, - profile: AgentProfile, - last_error_context: Option<(Arc, CompletionIntent)>, +pub trait TerminalHandle { + fn id(&self, cx: &AsyncApp) -> Result; + fn current_output(&self, cx: &AsyncApp) -> Result; + fn wait_for_exit(&self, cx: &AsyncApp) -> Result>>; } -#[derive(Clone, Debug)] -struct RetryState { - attempt: u8, - max_attempts: u8, - intent: CompletionIntent, +pub trait ThreadEnvironment { + fn create_terminal( + &self, + command: String, + cwd: Option, + output_byte_limit: Option, + cx: &mut AsyncApp, + ) -> Task>>; } -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum ThreadSummary { - Pending, - Generating, - Ready(SharedString), - Error, +#[derive(Debug)] +pub enum ThreadEvent { + UserMessage(UserMessage), + AgentText(String), + AgentThinking(String), + ToolCall(acp::ToolCall), + ToolCallUpdate(acp_thread::ToolCallUpdate), + ToolCallAuthorization(ToolCallAuthorization), + Retry(acp_thread::RetryStatus), + Stop(acp::StopReason), } -impl ThreadSummary { - pub const DEFAULT: SharedString = SharedString::new_static("New Thread"); - - pub fn or_default(&self) -> SharedString { - self.unwrap_or(Self::DEFAULT) - } +#[derive(Debug)] +pub struct NewTerminal { + pub command: String, + pub output_byte_limit: Option, + pub cwd: Option, + pub response: oneshot::Sender>>, +} - pub fn unwrap_or(&self, message: impl Into) -> SharedString { - self.ready().unwrap_or_else(|| message.into()) - } +#[derive(Debug)] +pub struct ToolCallAuthorization { + pub tool_call: acp::ToolCallUpdate, + pub options: Vec, + pub response: oneshot::Sender, +} - pub fn ready(&self) -> Option { - match self { - ThreadSummary::Ready(summary) => Some(summary.clone()), - ThreadSummary::Pending | ThreadSummary::Generating | ThreadSummary::Error => None, - } - } +#[derive(Debug, thiserror::Error)] +enum CompletionError { + #[error("max tokens")] + MaxTokens, + #[error("refusal")] + Refusal, + #[error(transparent)] + Other(#[from] anyhow::Error), } -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] -pub struct ExceededWindowError { - /// Model used when last message exceeded context window - model_id: LanguageModelId, - /// Token count including last message - token_count: u64, +pub struct Thread { + id: acp::SessionId, + prompt_id: PromptId, + updated_at: DateTime, + title: Option, + pending_title_generation: Option>, + pending_summary_generation: Option>>>, + summary: Option, + messages: Vec, + user_store: Entity, + completion_mode: CompletionMode, + /// Holds the task that handles agent interaction until the end of the turn. + /// Survives across multiple requests as the model performs tool calls and + /// we run tools, report their results. + running_turn: Option, + pending_message: Option, + tools: BTreeMap>, + tool_use_limit_reached: bool, + request_token_usage: HashMap, + #[allow(unused)] + cumulative_token_usage: TokenUsage, + #[allow(unused)] + initial_project_snapshot: Shared>>>, + context_server_registry: Entity, + profile_id: AgentProfileId, + project_context: Entity, + templates: Arc, + model: Option>, + summarization_model: Option>, + prompt_capabilities_tx: watch::Sender, + pub(crate) prompt_capabilities_rx: watch::Receiver, + pub(crate) project: Entity, + pub(crate) action_log: Entity, } impl Thread { + fn prompt_capabilities(model: Option<&dyn LanguageModel>) -> acp::PromptCapabilities { + let image = model.map_or(true, |model| model.supports_images()); + acp::PromptCapabilities { + meta: None, + image, + audio: false, + embedded_context: true, + } + } + pub fn new( project: Entity, - tools: Entity, - prompt_builder: Arc, - system_prompt: SharedProjectContext, + project_context: Entity, + context_server_registry: Entity, + templates: Arc, + model: Option>, cx: &mut Context, ) -> Self { - let (detailed_summary_tx, detailed_summary_rx) = postage::watch::channel(); - let configured_model = LanguageModelRegistry::read_global(cx).default_model(); let profile_id = AgentSettings::get_global(cx).default_profile.clone(); - + let action_log = cx.new(|_cx| ActionLog::new(project.clone())); + let (prompt_capabilities_tx, prompt_capabilities_rx) = + watch::channel(Self::prompt_capabilities(model.as_deref())); Self { - id: ThreadId::new(), + id: acp::SessionId(uuid::Uuid::new_v4().to_string().into()), + prompt_id: PromptId::new(), updated_at: Utc::now(), - summary: ThreadSummary::Pending, - pending_summary: Task::ready(None), - detailed_summary_task: Task::ready(None), - detailed_summary_tx, - detailed_summary_rx, - completion_mode: AgentSettings::get_global(cx).preferred_completion_mode, + title: None, + pending_title_generation: None, + pending_summary_generation: None, + summary: None, messages: Vec::new(), - next_message_id: MessageId(0), - last_prompt_id: PromptId::new(), - project_context: system_prompt, - checkpoints_by_message: HashMap::default(), - completion_count: 0, - pending_completions: Vec::new(), - project: project.clone(), - prompt_builder, - tools: tools.clone(), - last_restore_checkpoint: None, - pending_checkpoint: None, - tool_use: ToolUseState::new(tools.clone()), - action_log: cx.new(|_| ActionLog::new(project.clone())), + user_store: project.read(cx).user_store(), + completion_mode: AgentSettings::get_global(cx).preferred_completion_mode, + running_turn: None, + pending_message: None, + tools: BTreeMap::default(), + tool_use_limit_reached: false, + request_token_usage: HashMap::default(), + cumulative_token_usage: TokenUsage::default(), initial_project_snapshot: { - let project_snapshot = Self::project_snapshot(project, cx); + let project_snapshot = Self::project_snapshot(project.clone(), cx); cx.foreground_executor() .spawn(async move { Some(project_snapshot.await) }) .shared() }, - request_token_usage: Vec::new(), - cumulative_token_usage: TokenUsage::default(), - exceeded_window_error: None, - tool_use_limit_reached: false, - retry_state: None, - message_feedback: HashMap::default(), - last_error_context: None, - last_received_chunk_at: None, - request_callback: None, - remaining_turns: u32::MAX, - configured_model, - profile: AgentProfile::new(profile_id, tools), + context_server_registry, + profile_id, + project_context, + templates, + model, + summarization_model: None, + prompt_capabilities_tx, + prompt_capabilities_rx, + project, + action_log, } } - pub fn deserialize( - id: ThreadId, - serialized: SerializedThread, - project: Entity, - tools: Entity, - prompt_builder: Arc, - project_context: SharedProjectContext, - window: Option<&mut Window>, // None in headless mode - cx: &mut Context, - ) -> Self { - let next_message_id = MessageId( - serialized - .messages - .last() - .map(|message| message.id.0 + 1) - .unwrap_or(0), - ); - let tool_use = ToolUseState::from_serialized_messages( - tools.clone(), - &serialized.messages, - project.clone(), - window, - cx, - ); - let (detailed_summary_tx, detailed_summary_rx) = - postage::watch::channel_with(serialized.detailed_summary_state); - - let configured_model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| { - serialized - .model - .and_then(|model| { - let model = SelectedModel { - provider: model.provider.clone().into(), - model: model.model.into(), - }; - registry.select_model(&model, cx) - }) - .or_else(|| registry.default_model()) - }); - - let completion_mode = serialized - .completion_mode - .unwrap_or_else(|| AgentSettings::get_global(cx).preferred_completion_mode); - let profile_id = serialized - .profile - .unwrap_or_else(|| AgentSettings::get_global(cx).default_profile.clone()); + pub fn id(&self) -> &acp::SessionId { + &self.id + } - Self { - id, - updated_at: serialized.updated_at, - summary: ThreadSummary::Ready(serialized.summary), - pending_summary: Task::ready(None), - detailed_summary_task: Task::ready(None), - detailed_summary_tx, - detailed_summary_rx, - completion_mode, - retry_state: None, - messages: serialized - .messages - .into_iter() - .map(|message| Message { - id: message.id, - role: message.role, - segments: message - .segments - .into_iter() - .map(|segment| match segment { - SerializedMessageSegment::Text { text } => MessageSegment::Text(text), - SerializedMessageSegment::Thinking { text, signature } => { - MessageSegment::Thinking { text, signature } + pub fn replay( + &mut self, + cx: &mut Context, + ) -> mpsc::UnboundedReceiver> { + let (tx, rx) = mpsc::unbounded(); + let stream = ThreadEventStream(tx); + for message in &self.messages { + match message { + Message::User(user_message) => stream.send_user_message(user_message), + Message::Agent(assistant_message) => { + for content in &assistant_message.content { + match content { + AgentMessageContent::Text(text) => stream.send_text(text), + AgentMessageContent::Thinking { text, .. } => { + stream.send_thinking(text) } - SerializedMessageSegment::RedactedThinking { data } => { - MessageSegment::RedactedThinking(data) + AgentMessageContent::RedactedThinking(_) => {} + AgentMessageContent::ToolUse(tool_use) => { + self.replay_tool_call( + tool_use, + assistant_message.tool_results.get(&tool_use.id), + &stream, + cx, + ); } - }) - .collect(), - loaded_context: LoadedContext { - contexts: Vec::new(), - text: message.context, - images: Vec::new(), - }, - creases: message - .creases - .into_iter() - .map(|crease| MessageCrease { - range: crease.start..crease.end, - icon_path: crease.icon_path, - label: crease.label, - context: None, - }) - .collect(), - is_hidden: message.is_hidden, - ui_only: false, // UI-only messages are not persisted - }) - .collect(), - next_message_id, - last_prompt_id: PromptId::new(), - project_context, - checkpoints_by_message: HashMap::default(), - completion_count: 0, - pending_completions: Vec::new(), - last_restore_checkpoint: None, - pending_checkpoint: None, - project: project.clone(), - prompt_builder, - tools: tools.clone(), - tool_use, - action_log: cx.new(|_| ActionLog::new(project)), - initial_project_snapshot: Task::ready(serialized.initial_project_snapshot).shared(), - request_token_usage: serialized.request_token_usage, - cumulative_token_usage: serialized.cumulative_token_usage, - exceeded_window_error: None, - tool_use_limit_reached: serialized.tool_use_limit_reached, - message_feedback: HashMap::default(), - last_error_context: None, - last_received_chunk_at: None, - request_callback: None, - remaining_turns: u32::MAX, - configured_model, - profile: AgentProfile::new(profile_id, tools), + } + } + } + Message::Resume => {} + } } + rx } - pub fn set_request_callback( - &mut self, - callback: impl 'static - + FnMut(&LanguageModelRequest, &[Result]), + fn replay_tool_call( + &self, + tool_use: &LanguageModelToolUse, + tool_result: Option<&LanguageModelToolResult>, + stream: &ThreadEventStream, + cx: &mut Context, ) { - self.request_callback = Some(Box::new(callback)); - } + let tool = self.tools.get(tool_use.name.as_ref()).cloned().or_else(|| { + self.context_server_registry + .read(cx) + .servers() + .find_map(|(_, tools)| { + if let Some(tool) = tools.get(tool_use.name.as_ref()) { + Some(tool.clone()) + } else { + None + } + }) + }); - pub fn id(&self) -> &ThreadId { - &self.id - } + let Some(tool) = tool else { + stream + .0 + .unbounded_send(Ok(ThreadEvent::ToolCall(acp::ToolCall { + meta: None, + id: acp::ToolCallId(tool_use.id.to_string().into()), + title: tool_use.name.to_string(), + kind: acp::ToolKind::Other, + status: acp::ToolCallStatus::Failed, + content: Vec::new(), + locations: Vec::new(), + raw_input: Some(tool_use.input.clone()), + raw_output: None, + }))) + .ok(); + return; + }; + + let title = tool.initial_title(tool_use.input.clone(), cx); + let kind = tool.kind(); + stream.send_tool_call( + &tool_use.id, + &tool_use.name, + title, + kind, + tool_use.input.clone(), + ); - pub fn profile(&self) -> &AgentProfile { - &self.profile + let output = tool_result + .as_ref() + .and_then(|result| result.output.clone()); + if let Some(output) = output.clone() { + let tool_event_stream = ToolCallEventStream::new( + tool_use.id.clone(), + stream.clone(), + Some(self.project.read(cx).fs().clone()), + ); + tool.replay(tool_use.input.clone(), output, tool_event_stream, cx) + .log_err(); + } + + stream.update_tool_call_fields( + &tool_use.id, + acp::ToolCallUpdateFields { + status: Some( + tool_result + .as_ref() + .map_or(acp::ToolCallStatus::Failed, |result| { + if result.is_error { + acp::ToolCallStatus::Failed + } else { + acp::ToolCallStatus::Completed + } + }), + ), + raw_output: output, + ..Default::default() + }, + ); } - pub fn set_profile(&mut self, id: AgentProfileId, cx: &mut Context) { - if &id != self.profile.id() { - self.profile = AgentProfile::new(id, self.tools.clone()); - cx.emit(ThreadEvent::ProfileChanged); + pub fn from_db( + id: acp::SessionId, + db_thread: DbThread, + project: Entity, + project_context: Entity, + context_server_registry: Entity, + templates: Arc, + cx: &mut Context, + ) -> Self { + let profile_id = db_thread + .profile + .unwrap_or_else(|| AgentSettings::get_global(cx).default_profile.clone()); + let model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| { + db_thread + .model + .and_then(|model| { + let model = SelectedModel { + provider: model.provider.clone().into(), + model: model.model.into(), + }; + registry.select_model(&model, cx) + }) + .or_else(|| registry.default_model()) + .map(|model| model.model) + }); + let (prompt_capabilities_tx, prompt_capabilities_rx) = + watch::channel(Self::prompt_capabilities(model.as_deref())); + + let action_log = cx.new(|_| ActionLog::new(project.clone())); + + Self { + id, + prompt_id: PromptId::new(), + title: if db_thread.title.is_empty() { + None + } else { + Some(db_thread.title.clone()) + }, + pending_title_generation: None, + pending_summary_generation: None, + summary: db_thread.detailed_summary, + messages: db_thread.messages, + user_store: project.read(cx).user_store(), + completion_mode: db_thread.completion_mode.unwrap_or_default(), + running_turn: None, + pending_message: None, + tools: BTreeMap::default(), + tool_use_limit_reached: false, + request_token_usage: db_thread.request_token_usage.clone(), + cumulative_token_usage: db_thread.cumulative_token_usage, + initial_project_snapshot: Task::ready(db_thread.initial_project_snapshot).shared(), + context_server_registry, + profile_id, + project_context, + templates, + model, + summarization_model: None, + project, + action_log, + updated_at: db_thread.updated_at, + prompt_capabilities_tx, + prompt_capabilities_rx, } } - pub fn is_empty(&self) -> bool { - self.messages.is_empty() - } + pub fn to_db(&self, cx: &App) -> Task { + let initial_project_snapshot = self.initial_project_snapshot.clone(); + let mut thread = DbThread { + title: self.title(), + messages: self.messages.clone(), + updated_at: self.updated_at, + detailed_summary: self.summary.clone(), + initial_project_snapshot: None, + cumulative_token_usage: self.cumulative_token_usage, + request_token_usage: self.request_token_usage.clone(), + model: self.model.as_ref().map(|model| DbLanguageModel { + provider: model.provider_id().to_string(), + model: model.name().0.to_string(), + }), + completion_mode: Some(self.completion_mode), + profile: Some(self.profile_id.clone()), + }; - pub fn updated_at(&self) -> DateTime { - self.updated_at + cx.background_spawn(async move { + let initial_project_snapshot = initial_project_snapshot.await; + thread.initial_project_snapshot = initial_project_snapshot; + thread + }) } - pub fn touch_updated_at(&mut self) { - self.updated_at = Utc::now(); - } + /// Create a snapshot of the current project state including git information and unsaved buffers. + fn project_snapshot( + project: Entity, + cx: &mut Context, + ) -> Task> { + let task = project::telemetry_snapshot::TelemetrySnapshot::new(&project, cx); + cx.spawn(async move |_, _| { + let snapshot = task.await; - pub fn advance_prompt_id(&mut self) { - self.last_prompt_id = PromptId::new(); + Arc::new(ProjectSnapshot { + worktree_snapshots: snapshot.worktree_snapshots, + timestamp: Utc::now(), + }) + }) } - pub fn project_context(&self) -> SharedProjectContext { - self.project_context.clone() + pub fn project_context(&self) -> &Entity { + &self.project_context } - pub fn get_or_init_configured_model(&mut self, cx: &App) -> Option { - if self.configured_model.is_none() { - self.configured_model = LanguageModelRegistry::read_global(cx).default_model(); - } - self.configured_model.clone() + pub fn project(&self) -> &Entity { + &self.project } - pub fn configured_model(&self) -> Option { - self.configured_model.clone() + pub fn action_log(&self) -> &Entity { + &self.action_log } - pub fn set_configured_model(&mut self, model: Option, cx: &mut Context) { - self.configured_model = model; - cx.notify(); + pub fn is_empty(&self) -> bool { + self.messages.is_empty() && self.title.is_none() } - pub fn summary(&self) -> &ThreadSummary { - &self.summary + pub fn model(&self) -> Option<&Arc> { + self.model.as_ref() } - pub fn set_summary(&mut self, new_summary: impl Into, cx: &mut Context) { - let current_summary = match &self.summary { - ThreadSummary::Pending | ThreadSummary::Generating => return, - ThreadSummary::Ready(summary) => summary, - ThreadSummary::Error => &ThreadSummary::DEFAULT, - }; - - let mut new_summary = new_summary.into(); - - if new_summary.is_empty() { - new_summary = ThreadSummary::DEFAULT; + pub fn set_model(&mut self, model: Arc, cx: &mut Context) { + let old_usage = self.latest_token_usage(); + self.model = Some(model); + let new_caps = Self::prompt_capabilities(self.model.as_deref()); + let new_usage = self.latest_token_usage(); + if old_usage != new_usage { + cx.emit(TokenUsageUpdated(new_usage)); } + self.prompt_capabilities_tx.send(new_caps).log_err(); + cx.notify() + } - if current_summary != &new_summary { - self.summary = ThreadSummary::Ready(new_summary); - cx.emit(ThreadEvent::SummaryChanged); - } + pub fn summarization_model(&self) -> Option<&Arc> { + self.summarization_model.as_ref() + } + + pub fn set_summarization_model( + &mut self, + model: Option>, + cx: &mut Context, + ) { + self.summarization_model = model; + cx.notify() } pub fn completion_mode(&self) -> CompletionMode { self.completion_mode } - pub fn set_completion_mode(&mut self, mode: CompletionMode) { + pub fn set_completion_mode(&mut self, mode: CompletionMode, cx: &mut Context) { + let old_usage = self.latest_token_usage(); self.completion_mode = mode; + let new_usage = self.latest_token_usage(); + if old_usage != new_usage { + cx.emit(TokenUsageUpdated(new_usage)); + } + cx.notify() } - pub fn message(&self, id: MessageId) -> Option<&Message> { - let index = self - .messages - .binary_search_by(|message| message.id.cmp(&id)) - .ok()?; - - self.messages.get(index) + #[cfg(any(test, feature = "test-support"))] + pub fn last_message(&self) -> Option { + if let Some(message) = self.pending_message.clone() { + Some(Message::Agent(message)) + } else { + self.messages.last().cloned() + } } - pub fn messages(&self) -> impl ExactSizeIterator { - self.messages.iter() + pub fn add_default_tools( + &mut self, + environment: Rc, + cx: &mut Context, + ) { + let language_registry = self.project.read(cx).languages().clone(); + self.add_tool(CopyPathTool::new(self.project.clone())); + self.add_tool(CreateDirectoryTool::new(self.project.clone())); + self.add_tool(DeletePathTool::new( + self.project.clone(), + self.action_log.clone(), + )); + self.add_tool(DiagnosticsTool::new(self.project.clone())); + self.add_tool(EditFileTool::new( + self.project.clone(), + cx.weak_entity(), + language_registry, + Templates::new(), + )); + self.add_tool(FetchTool::new(self.project.read(cx).client().http_client())); + self.add_tool(FindPathTool::new(self.project.clone())); + self.add_tool(GrepTool::new(self.project.clone())); + self.add_tool(ListDirectoryTool::new(self.project.clone())); + self.add_tool(MovePathTool::new(self.project.clone())); + self.add_tool(NowTool); + self.add_tool(OpenTool::new(self.project.clone())); + self.add_tool(ReadFileTool::new( + self.project.clone(), + self.action_log.clone(), + )); + self.add_tool(TerminalTool::new(self.project.clone(), environment)); + self.add_tool(ThinkingTool); + self.add_tool(WebSearchTool); } - pub fn is_generating(&self) -> bool { - !self.pending_completions.is_empty() || !self.all_tools_finished() + pub fn add_tool(&mut self, tool: T) { + self.tools.insert(T::name().into(), tool.erase()); } - /// Indicates whether streaming of language model events is stale. - /// When `is_generating()` is false, this method returns `None`. - pub fn is_generation_stale(&self) -> Option { - const STALE_THRESHOLD: u128 = 250; - - self.last_received_chunk_at - .map(|instant| instant.elapsed().as_millis() > STALE_THRESHOLD) + pub fn remove_tool(&mut self, name: &str) -> bool { + self.tools.remove(name).is_some() } - fn received_chunk(&mut self) { - self.last_received_chunk_at = Some(Instant::now()); + pub fn profile(&self) -> &AgentProfileId { + &self.profile_id } - pub fn queue_state(&self) -> Option { - self.pending_completions - .first() - .map(|pending_completion| pending_completion.queue_state) + pub fn set_profile(&mut self, profile_id: AgentProfileId) { + self.profile_id = profile_id; } - pub fn tools(&self) -> &Entity { - &self.tools + pub fn cancel(&mut self, cx: &mut Context) { + if let Some(running_turn) = self.running_turn.take() { + running_turn.cancel(); + } + self.flush_pending_message(cx); } - pub fn pending_tool(&self, id: &LanguageModelToolUseId) -> Option<&PendingToolUse> { - self.tool_use - .pending_tool_uses() - .into_iter() - .find(|tool_use| &tool_use.id == id) + fn update_token_usage(&mut self, update: language_model::TokenUsage, cx: &mut Context) { + let Some(last_user_message) = self.last_user_message() else { + return; + }; + + self.request_token_usage + .insert(last_user_message.id.clone(), update); + cx.emit(TokenUsageUpdated(self.latest_token_usage())); + cx.notify(); } - pub fn tools_needing_confirmation(&self) -> impl Iterator { - self.tool_use - .pending_tool_uses() - .into_iter() - .filter(|tool_use| tool_use.status.needs_confirmation()) + pub fn truncate(&mut self, message_id: UserMessageId, cx: &mut Context) -> Result<()> { + self.cancel(cx); + let Some(position) = self.messages.iter().position( + |msg| matches!(msg, Message::User(UserMessage { id, .. }) if id == &message_id), + ) else { + return Err(anyhow!("Message not found")); + }; + + for message in self.messages.drain(position..) { + match message { + Message::User(message) => { + self.request_token_usage.remove(&message.id); + } + Message::Agent(_) | Message::Resume => {} + } + } + self.clear_summary(); + cx.notify(); + Ok(()) } - pub fn has_pending_tool_uses(&self) -> bool { - !self.tool_use.pending_tool_uses().is_empty() + pub fn latest_request_token_usage(&self) -> Option { + let last_user_message = self.last_user_message()?; + let tokens = self.request_token_usage.get(&last_user_message.id)?; + Some(*tokens) } - pub fn checkpoint_for_message(&self, id: MessageId) -> Option { - self.checkpoints_by_message.get(&id).cloned() + pub fn latest_token_usage(&self) -> Option { + let usage = self.latest_request_token_usage()?; + let model = self.model.clone()?; + Some(acp_thread::TokenUsage { + max_tokens: model.max_token_count_for_mode(self.completion_mode.into()), + used_tokens: usage.total_tokens(), + }) } - pub fn restore_checkpoint( + pub fn resume( &mut self, - checkpoint: ThreadCheckpoint, cx: &mut Context, - ) -> Task> { - self.last_restore_checkpoint = Some(LastRestoreCheckpoint::Pending { - message_id: checkpoint.message_id, - }); - cx.emit(ThreadEvent::CheckpointChanged); + ) -> Result>> { + self.messages.push(Message::Resume); cx.notify(); - let git_store = self.project().read(cx).git_store().clone(); - let restore = git_store.update(cx, |git_store, cx| { - git_store.restore_checkpoint(checkpoint.git_checkpoint.clone(), cx) - }); - - cx.spawn(async move |this, cx| { - let result = restore.await; - this.update(cx, |this, cx| { - if let Err(err) = result.as_ref() { - this.last_restore_checkpoint = Some(LastRestoreCheckpoint::Error { - message_id: checkpoint.message_id, - error: err.to_string(), - }); - } else { - this.truncate(checkpoint.message_id, cx); - this.last_restore_checkpoint = None; - } - this.pending_checkpoint = None; - cx.emit(ThreadEvent::CheckpointChanged); - cx.notify(); - })?; - result - }) + log::debug!("Total messages in thread: {}", self.messages.len()); + self.run_turn(cx) } - fn finalize_pending_checkpoint(&mut self, cx: &mut Context) { - let pending_checkpoint = if self.is_generating() { - return; - } else if let Some(checkpoint) = self.pending_checkpoint.take() { - checkpoint - } else { - return; - }; + /// Sending a message results in the model streaming a response, which could include tool calls. + /// After calling tools, the model will stops and waits for any outstanding tool calls to be completed and their results sent. + /// The returned channel will report all the occurrences in which the model stops before erroring or ending its turn. + pub fn send( + &mut self, + id: UserMessageId, + content: impl IntoIterator, + cx: &mut Context, + ) -> Result>> + where + T: Into, + { + let model = self.model().context("No language model configured")?; + + log::info!("Thread::send called with model: {}", model.name().0); + self.advance_prompt_id(); + + let content = content.into_iter().map(Into::into).collect::>(); + log::debug!("Thread::send content: {:?}", content); - self.finalize_checkpoint(pending_checkpoint, cx); + self.messages + .push(Message::User(UserMessage { id, content })); + cx.notify(); + + log::debug!("Total messages in thread: {}", self.messages.len()); + self.run_turn(cx) } - fn finalize_checkpoint( + #[cfg(feature = "eval")] + pub fn proceed( &mut self, - pending_checkpoint: ThreadCheckpoint, cx: &mut Context, - ) { - let git_store = self.project.read(cx).git_store().clone(); - let final_checkpoint = git_store.update(cx, |git_store, cx| git_store.checkpoint(cx)); - cx.spawn(async move |this, cx| match final_checkpoint.await { - Ok(final_checkpoint) => { - let equal = git_store - .update(cx, |store, cx| { - store.compare_checkpoints( - pending_checkpoint.git_checkpoint.clone(), - final_checkpoint.clone(), - cx, - ) - })? - .await - .unwrap_or(false); + ) -> Result>> { + self.run_turn(cx) + } - this.update(cx, |this, cx| { - this.pending_checkpoint = if equal { - Some(pending_checkpoint) - } else { - this.insert_checkpoint(pending_checkpoint, cx); - Some(ThreadCheckpoint { - message_id: this.next_message_id, - git_checkpoint: final_checkpoint, - }) + fn run_turn( + &mut self, + cx: &mut Context, + ) -> Result>> { + self.cancel(cx); + + let model = self.model.clone().context("No language model configured")?; + let profile = AgentSettings::get_global(cx) + .profiles + .get(&self.profile_id) + .context("Profile not found")?; + let (events_tx, events_rx) = mpsc::unbounded::>(); + let event_stream = ThreadEventStream(events_tx); + let message_ix = self.messages.len().saturating_sub(1); + self.tool_use_limit_reached = false; + self.clear_summary(); + self.running_turn = Some(RunningTurn { + event_stream: event_stream.clone(), + tools: self.enabled_tools(profile, &model, cx), + _task: cx.spawn(async move |this, cx| { + log::debug!("Starting agent turn execution"); + + let turn_result = Self::run_turn_internal(&this, model, &event_stream, cx).await; + _ = this.update(cx, |this, cx| this.flush_pending_message(cx)); + + match turn_result { + Ok(()) => { + log::debug!("Turn execution completed"); + event_stream.send_stop(acp::StopReason::EndTurn); } - })?; + Err(error) => { + log::error!("Turn execution failed: {:?}", error); + match error.downcast::() { + Ok(CompletionError::Refusal) => { + event_stream.send_stop(acp::StopReason::Refusal); + _ = this.update(cx, |this, _| this.messages.truncate(message_ix)); + } + Ok(CompletionError::MaxTokens) => { + event_stream.send_stop(acp::StopReason::MaxTokens); + } + Ok(CompletionError::Other(error)) | Err(error) => { + event_stream.send_error(error); + } + } + } + } - Ok(()) - } - Err(_) => this.update(cx, |this, cx| { - this.insert_checkpoint(pending_checkpoint, cx) + _ = this.update(cx, |this, _| this.running_turn.take()); }), - }) - .detach(); + }); + Ok(events_rx) } - fn insert_checkpoint(&mut self, checkpoint: ThreadCheckpoint, cx: &mut Context) { - self.checkpoints_by_message - .insert(checkpoint.message_id, checkpoint); - cx.emit(ThreadEvent::CheckpointChanged); - cx.notify(); - } + async fn run_turn_internal( + this: &WeakEntity, + model: Arc, + event_stream: &ThreadEventStream, + cx: &mut AsyncApp, + ) -> Result<()> { + let mut attempt = 0; + let mut intent = CompletionIntent::UserPrompt; + loop { + let request = + this.update(cx, |this, cx| this.build_completion_request(intent, cx))??; - pub fn last_restore_checkpoint(&self) -> Option<&LastRestoreCheckpoint> { - self.last_restore_checkpoint.as_ref() - } + telemetry::event!( + "Agent Thread Completion", + thread_id = this.read_with(cx, |this, _| this.id.to_string())?, + prompt_id = this.read_with(cx, |this, _| this.prompt_id.to_string())?, + model = model.telemetry_id(), + model_provider = model.provider_id().to_string(), + attempt + ); - pub fn truncate(&mut self, message_id: MessageId, cx: &mut Context) { - let Some(message_ix) = self - .messages - .iter() - .rposition(|message| message.id == message_id) - else { - return; - }; - for deleted_message in self.messages.drain(message_ix..) { - self.checkpoints_by_message.remove(&deleted_message.id); - } - cx.notify(); - } + log::debug!("Calling model.stream_completion, attempt {}", attempt); - pub fn context_for_message(&self, id: MessageId) -> impl Iterator { - self.messages - .iter() - .find(|message| message.id == id) - .into_iter() - .flat_map(|message| message.loaded_context.contexts.iter()) - } + let (mut events, mut error) = match model.stream_completion(request, cx).await { + Ok(events) => (events, None), + Err(err) => (stream::empty().boxed(), Some(err)), + }; + let mut tool_results = FuturesUnordered::new(); + while let Some(event) = events.next().await { + log::trace!("Received completion event: {:?}", event); + match event { + Ok(event) => { + tool_results.extend(this.update(cx, |this, cx| { + this.handle_completion_event(event, event_stream, cx) + })??); + } + Err(err) => { + error = Some(err); + break; + } + } + } - pub fn is_turn_end(&self, ix: usize) -> bool { - if self.messages.is_empty() { - return false; - } + let end_turn = tool_results.is_empty(); + while let Some(tool_result) = tool_results.next().await { + log::debug!("Tool finished {:?}", tool_result); - if !self.is_generating() && ix == self.messages.len() - 1 { - return true; - } + event_stream.update_tool_call_fields( + &tool_result.tool_use_id, + acp::ToolCallUpdateFields { + status: Some(if tool_result.is_error { + acp::ToolCallStatus::Failed + } else { + acp::ToolCallStatus::Completed + }), + raw_output: tool_result.output.clone(), + ..Default::default() + }, + ); + this.update(cx, |this, _cx| { + this.pending_message() + .tool_results + .insert(tool_result.tool_use_id.clone(), tool_result); + })?; + } - let Some(message) = self.messages.get(ix) else { - return false; - }; + this.update(cx, |this, cx| { + this.flush_pending_message(cx); + if this.title.is_none() && this.pending_title_generation.is_none() { + this.generate_title(cx); + } + })?; - if message.role != Role::Assistant { - return false; + if let Some(error) = error { + attempt += 1; + let retry = this.update(cx, |this, cx| { + let user_store = this.user_store.read(cx); + this.handle_completion_error(error, attempt, user_store.plan()) + })??; + let timer = cx.background_executor().timer(retry.duration); + event_stream.send_retry(retry); + timer.await; + this.update(cx, |this, _cx| { + if let Some(Message::Agent(message)) = this.messages.last() { + if message.tool_results.is_empty() { + intent = CompletionIntent::UserPrompt; + this.messages.push(Message::Resume); + } + } + })?; + } else if this.read_with(cx, |this, _| this.tool_use_limit_reached)? { + return Err(language_model::ToolUseLimitReachedError.into()); + } else if end_turn { + return Ok(()); + } else { + intent = CompletionIntent::ToolResults; + attempt = 0; + } } - - self.messages - .get(ix + 1) - .and_then(|message| { - self.message(message.id) - .map(|next_message| next_message.role == Role::User && !next_message.is_hidden) - }) - .unwrap_or(false) } - pub fn tool_use_limit_reached(&self) -> bool { - self.tool_use_limit_reached - } + fn handle_completion_error( + &mut self, + error: LanguageModelCompletionError, + attempt: u8, + plan: Option, + ) -> Result { + let Some(model) = self.model.as_ref() else { + return Err(anyhow!(error)); + }; - /// Returns whether all of the tool uses have finished running. - pub fn all_tools_finished(&self) -> bool { - // If the only pending tool uses left are the ones with errors, then - // that means that we've finished running all of the pending tools. - self.tool_use - .pending_tool_uses() - .iter() - .all(|pending_tool_use| pending_tool_use.status.is_error()) - } + let auto_retry = if model.provider_id() == ZED_CLOUD_PROVIDER_ID { + match plan { + Some(Plan::V2(_)) => true, + Some(Plan::V1(_)) => self.completion_mode == CompletionMode::Burn, + None => false, + } + } else { + true + }; - /// Returns whether any pending tool uses may perform edits - pub fn has_pending_edit_tool_uses(&self) -> bool { - self.tool_use - .pending_tool_uses() - .iter() - .filter(|pending_tool_use| !pending_tool_use.status.is_error()) - .any(|pending_tool_use| pending_tool_use.may_perform_edits) - } + if !auto_retry { + return Err(anyhow!(error)); + } - pub fn tool_uses_for_message(&self, id: MessageId, cx: &App) -> Vec { - self.tool_use.tool_uses_for_message(id, &self.project, cx) - } + let Some(strategy) = Self::retry_strategy_for(&error) else { + return Err(anyhow!(error)); + }; - pub fn tool_results_for_message( - &self, - assistant_message_id: MessageId, - ) -> Vec<&LanguageModelToolResult> { - self.tool_use.tool_results_for_message(assistant_message_id) - } + let max_attempts = match &strategy { + RetryStrategy::ExponentialBackoff { max_attempts, .. } => *max_attempts, + RetryStrategy::Fixed { max_attempts, .. } => *max_attempts, + }; + + if attempt > max_attempts { + return Err(anyhow!(error)); + } - pub fn tool_result(&self, id: &LanguageModelToolUseId) -> Option<&LanguageModelToolResult> { - self.tool_use.tool_result(id) + let delay = match &strategy { + RetryStrategy::ExponentialBackoff { initial_delay, .. } => { + let delay_secs = initial_delay.as_secs() * 2u64.pow((attempt - 1) as u32); + Duration::from_secs(delay_secs) + } + RetryStrategy::Fixed { delay, .. } => *delay, + }; + log::debug!("Retry attempt {attempt} with delay {delay:?}"); + + Ok(acp_thread::RetryStatus { + last_error: error.to_string().into(), + attempt: attempt as usize, + max_attempts: max_attempts as usize, + started_at: Instant::now(), + duration: delay, + }) } - pub fn output_for_tool(&self, id: &LanguageModelToolUseId) -> Option<&Arc> { - match &self.tool_use.tool_result(id)?.content { - LanguageModelToolResultContent::Text(text) => Some(text), - LanguageModelToolResultContent::Image(_) => { - // TODO: We should display image - None + /// A helper method that's called on every streamed completion event. + /// Returns an optional tool result task, which the main agentic loop will + /// send back to the model when it resolves. + fn handle_completion_event( + &mut self, + event: LanguageModelCompletionEvent, + event_stream: &ThreadEventStream, + cx: &mut Context, + ) -> Result>> { + log::trace!("Handling streamed completion event: {:?}", event); + use LanguageModelCompletionEvent::*; + + match event { + StartMessage { .. } => { + self.flush_pending_message(cx); + self.pending_message = Some(AgentMessage::default()); + } + Text(new_text) => self.handle_text_event(new_text, event_stream, cx), + Thinking { text, signature } => { + self.handle_thinking_event(text, signature, event_stream, cx) + } + RedactedThinking { data } => self.handle_redacted_thinking_event(data, cx), + ToolUse(tool_use) => { + return Ok(self.handle_tool_use_event(tool_use, event_stream, cx)); } + ToolUseJsonParseError { + id, + tool_name, + raw_input, + json_parse_error, + } => { + return Ok(Some(Task::ready( + self.handle_tool_use_json_parse_error_event( + id, + tool_name, + raw_input, + json_parse_error, + ), + ))); + } + UsageUpdate(usage) => { + telemetry::event!( + "Agent Thread Completion Usage Updated", + thread_id = self.id.to_string(), + prompt_id = self.prompt_id.to_string(), + model = self.model.as_ref().map(|m| m.telemetry_id()), + model_provider = self.model.as_ref().map(|m| m.provider_id().to_string()), + input_tokens = usage.input_tokens, + output_tokens = usage.output_tokens, + cache_creation_input_tokens = usage.cache_creation_input_tokens, + cache_read_input_tokens = usage.cache_read_input_tokens, + ); + self.update_token_usage(usage, cx); + } + StatusUpdate(CompletionRequestStatus::UsageUpdated { amount, limit }) => { + self.update_model_request_usage(amount, limit, cx); + } + StatusUpdate( + CompletionRequestStatus::Started + | CompletionRequestStatus::Queued { .. } + | CompletionRequestStatus::Failed { .. }, + ) => {} + StatusUpdate(CompletionRequestStatus::ToolUseLimitReached) => { + self.tool_use_limit_reached = true; + } + Stop(StopReason::Refusal) => return Err(CompletionError::Refusal.into()), + Stop(StopReason::MaxTokens) => return Err(CompletionError::MaxTokens.into()), + Stop(StopReason::ToolUse | StopReason::EndTurn) => {} } - } - pub fn card_for_tool(&self, id: &LanguageModelToolUseId) -> Option { - self.tool_use.tool_result_card(id).cloned() + Ok(None) } - /// Return tools that are both enabled and supported by the model - pub fn available_tools( - &self, - cx: &App, - model: Arc, - ) -> Vec { - if model.supports_tools() { - self.profile - .enabled_tools(cx) - .into_iter() - .filter_map(|(name, tool)| { - // Skip tools that cannot be supported - let input_schema = tool.input_schema(model.tool_input_format()).ok()?; - Some(LanguageModelRequestTool { - name: name.into(), - description: tool.description(), - input_schema, - }) - }) - .collect() + fn handle_text_event( + &mut self, + new_text: String, + event_stream: &ThreadEventStream, + cx: &mut Context, + ) { + event_stream.send_text(&new_text); + + let last_message = self.pending_message(); + if let Some(AgentMessageContent::Text(text)) = last_message.content.last_mut() { + text.push_str(&new_text); } else { - Vec::default() + last_message + .content + .push(AgentMessageContent::Text(new_text)); } + + cx.notify(); } - pub fn insert_user_message( + fn handle_thinking_event( &mut self, - text: impl Into, - loaded_context: ContextLoadResult, - git_checkpoint: Option, - creases: Vec, + new_text: String, + new_signature: Option, + event_stream: &ThreadEventStream, cx: &mut Context, - ) -> MessageId { - if !loaded_context.referenced_buffers.is_empty() { - self.action_log.update(cx, |log, cx| { - for buffer in loaded_context.referenced_buffers { - log.buffer_read(buffer, cx); - } + ) { + event_stream.send_thinking(&new_text); + + let last_message = self.pending_message(); + if let Some(AgentMessageContent::Thinking { text, signature }) = + last_message.content.last_mut() + { + text.push_str(&new_text); + *signature = new_signature.or(signature.take()); + } else { + last_message.content.push(AgentMessageContent::Thinking { + text: new_text, + signature: new_signature, }); } - let message_id = self.insert_message( - Role::User, - vec![MessageSegment::Text(text.into())], - loaded_context.loaded_context, - creases, - false, - cx, - ); - - if let Some(git_checkpoint) = git_checkpoint { - self.pending_checkpoint = Some(ThreadCheckpoint { - message_id, - git_checkpoint, - }); - } - - message_id + cx.notify(); } - pub fn insert_invisible_continue_message(&mut self, cx: &mut Context) -> MessageId { - let id = self.insert_message( - Role::User, - vec![MessageSegment::Text("Continue where you left off".into())], - LoadedContext::default(), - vec![], - true, - cx, - ); - self.pending_checkpoint = None; - - id + fn handle_redacted_thinking_event(&mut self, data: String, cx: &mut Context) { + let last_message = self.pending_message(); + last_message + .content + .push(AgentMessageContent::RedactedThinking(data)); + cx.notify(); } - pub fn insert_assistant_message( + fn handle_tool_use_event( &mut self, - segments: Vec, + tool_use: LanguageModelToolUse, + event_stream: &ThreadEventStream, cx: &mut Context, - ) -> MessageId { - self.insert_message( - Role::Assistant, - segments, - LoadedContext::default(), - Vec::new(), - false, - cx, - ) - } + ) -> Option> { + cx.notify(); - pub fn insert_message( - &mut self, - role: Role, - segments: Vec, - loaded_context: LoadedContext, - creases: Vec, - is_hidden: bool, - cx: &mut Context, - ) -> MessageId { - let id = self.next_message_id.post_inc(); - self.messages.push(Message { - id, - role, - segments, - loaded_context, - creases, - is_hidden, - ui_only: false, + let tool = self.tool(tool_use.name.as_ref()); + let mut title = SharedString::from(&tool_use.name); + let mut kind = acp::ToolKind::Other; + if let Some(tool) = tool.as_ref() { + title = tool.initial_title(tool_use.input.clone(), cx); + kind = tool.kind(); + } + + // Ensure the last message ends in the current tool use + let last_message = self.pending_message(); + let push_new_tool_use = last_message.content.last_mut().is_none_or(|content| { + if let AgentMessageContent::ToolUse(last_tool_use) = content { + if last_tool_use.id == tool_use.id { + *last_tool_use = tool_use.clone(); + false + } else { + true + } + } else { + true + } }); - self.touch_updated_at(); - cx.emit(ThreadEvent::MessageAdded(id)); - id - } - pub fn edit_message( - &mut self, - id: MessageId, - new_role: Role, - new_segments: Vec, - creases: Vec, - loaded_context: Option, - checkpoint: Option, - cx: &mut Context, - ) -> bool { - let Some(message) = self.messages.iter_mut().find(|message| message.id == id) else { - return false; - }; - message.role = new_role; - message.segments = new_segments; - message.creases = creases; - if let Some(context) = loaded_context { - message.loaded_context = context; - } - if let Some(git_checkpoint) = checkpoint { - self.checkpoints_by_message.insert( - id, - ThreadCheckpoint { - message_id: id, - git_checkpoint, + if push_new_tool_use { + event_stream.send_tool_call( + &tool_use.id, + &tool_use.name, + title, + kind, + tool_use.input.clone(), + ); + last_message + .content + .push(AgentMessageContent::ToolUse(tool_use.clone())); + } else { + event_stream.update_tool_call_fields( + &tool_use.id, + acp::ToolCallUpdateFields { + title: Some(title.into()), + kind: Some(kind), + raw_input: Some(tool_use.input.clone()), + ..Default::default() }, ); } - self.touch_updated_at(); - cx.emit(ThreadEvent::MessageEdited(id)); - true - } - pub fn delete_message(&mut self, id: MessageId, cx: &mut Context) -> bool { - let Some(index) = self.messages.iter().position(|message| message.id == id) else { - return false; - }; - self.messages.remove(index); - self.touch_updated_at(); - cx.emit(ThreadEvent::MessageDeleted(id)); - true - } + if !tool_use.is_input_complete { + return None; + } - /// Returns the representation of this [`Thread`] in a textual form. - /// - /// This is the representation we use when attaching a thread as context to another thread. - pub fn text(&self) -> String { - let mut text = String::new(); + let Some(tool) = tool else { + let content = format!("No tool named {} exists", tool_use.name); + return Some(Task::ready(LanguageModelToolResult { + content: LanguageModelToolResultContent::Text(Arc::from(content)), + tool_use_id: tool_use.id, + tool_name: tool_use.name, + is_error: true, + output: None, + })); + }; - for message in &self.messages { - text.push_str(match message.role { - language_model::Role::User => "User:", - language_model::Role::Assistant => "Agent:", - language_model::Role::System => "System:", + let fs = self.project.read(cx).fs().clone(); + let tool_event_stream = + ToolCallEventStream::new(tool_use.id.clone(), event_stream.clone(), Some(fs)); + tool_event_stream.update_fields(acp::ToolCallUpdateFields { + status: Some(acp::ToolCallStatus::InProgress), + ..Default::default() + }); + let supports_images = self.model().is_some_and(|model| model.supports_images()); + let tool_result = tool.run(tool_use.input, tool_event_stream, cx); + log::debug!("Running tool {}", tool_use.name); + Some(cx.foreground_executor().spawn(async move { + let tool_result = tool_result.await.and_then(|output| { + if let LanguageModelToolResultContent::Image(_) = &output.llm_output + && !supports_images + { + return Err(anyhow!( + "Attempted to read an image, but this model doesn't support it.", + )); + } + Ok(output) }); - text.push('\n'); - for segment in &message.segments { - match segment { - MessageSegment::Text(content) => text.push_str(content), - MessageSegment::Thinking { text: content, .. } => { - text.push_str(&format!("{}", content)) - } - MessageSegment::RedactedThinking(_) => {} - } + match tool_result { + Ok(output) => LanguageModelToolResult { + tool_use_id: tool_use.id, + tool_name: tool_use.name, + is_error: false, + content: output.llm_output, + output: Some(output.raw_output), + }, + Err(error) => LanguageModelToolResult { + tool_use_id: tool_use.id, + tool_name: tool_use.name, + is_error: true, + content: LanguageModelToolResultContent::Text(Arc::from(error.to_string())), + output: Some(error.to_string().into()), + }, } - text.push('\n'); - } + })) + } - text + fn handle_tool_use_json_parse_error_event( + &mut self, + tool_use_id: LanguageModelToolUseId, + tool_name: Arc, + raw_input: Arc, + json_parse_error: String, + ) -> LanguageModelToolResult { + let tool_output = format!("Error parsing input JSON: {json_parse_error}"); + LanguageModelToolResult { + tool_use_id, + tool_name, + is_error: true, + content: LanguageModelToolResultContent::Text(tool_output.into()), + output: Some(serde_json::Value::String(raw_input.to_string())), + } } - /// Serializes this thread into a format for storage or telemetry. - pub fn serialize(&self, cx: &mut Context) -> Task> { - let initial_project_snapshot = self.initial_project_snapshot.clone(); - cx.spawn(async move |this, cx| { - let initial_project_snapshot = initial_project_snapshot.await; - this.read_with(cx, |this, cx| SerializedThread { - version: SerializedThread::VERSION.to_string(), - summary: this.summary().or_default(), - updated_at: this.updated_at(), - messages: this - .messages() - .filter(|message| !message.ui_only) - .map(|message| SerializedMessage { - id: message.id, - role: message.role, - segments: message - .segments - .iter() - .map(|segment| match segment { - MessageSegment::Text(text) => { - SerializedMessageSegment::Text { text: text.clone() } - } - MessageSegment::Thinking { text, signature } => { - SerializedMessageSegment::Thinking { - text: text.clone(), - signature: signature.clone(), - } - } - MessageSegment::RedactedThinking(data) => { - SerializedMessageSegment::RedactedThinking { - data: data.clone(), - } - } - }) - .collect(), - tool_uses: this - .tool_uses_for_message(message.id, cx) - .into_iter() - .map(|tool_use| SerializedToolUse { - id: tool_use.id, - name: tool_use.name, - input: tool_use.input, - }) - .collect(), - tool_results: this - .tool_results_for_message(message.id) - .into_iter() - .map(|tool_result| SerializedToolResult { - tool_use_id: tool_result.tool_use_id.clone(), - is_error: tool_result.is_error, - content: tool_result.content.clone(), - output: tool_result.output.clone(), - }) - .collect(), - context: message.loaded_context.text.clone(), - creases: message - .creases - .iter() - .map(|crease| SerializedCrease { - start: crease.range.start, - end: crease.range.end, - icon_path: crease.icon_path.clone(), - label: crease.label.clone(), - }) - .collect(), - is_hidden: message.is_hidden, - }) - .collect(), - initial_project_snapshot, - cumulative_token_usage: this.cumulative_token_usage, - request_token_usage: this.request_token_usage.clone(), - detailed_summary_state: this.detailed_summary_rx.borrow().clone(), - exceeded_window_error: this.exceeded_window_error.clone(), - model: this - .configured_model - .as_ref() - .map(|model| SerializedLanguageModel { - provider: model.provider.id().0.to_string(), - model: model.model.id().0.to_string(), + fn update_model_request_usage(&self, amount: usize, limit: UsageLimit, cx: &mut Context) { + self.project + .read(cx) + .user_store() + .update(cx, |user_store, cx| { + user_store.update_model_request_usage( + ModelRequestUsage(RequestUsage { + amount: amount as i32, + limit, }), - completion_mode: Some(this.completion_mode), - tool_use_limit_reached: this.tool_use_limit_reached, - profile: Some(this.profile.id().clone()), - }) - }) + cx, + ) + }); } - pub fn remaining_turns(&self) -> u32 { - self.remaining_turns + pub fn title(&self) -> SharedString { + self.title.clone().unwrap_or("New Thread".into()) } - pub fn set_remaining_turns(&mut self, remaining_turns: u32) { - self.remaining_turns = remaining_turns; + pub fn is_generating_summary(&self) -> bool { + self.pending_summary_generation.is_some() } - pub fn send_to_model( - &mut self, - model: Arc, - intent: CompletionIntent, - window: Option, - cx: &mut Context, - ) { - if self.remaining_turns == 0 { - return; + pub fn summary(&mut self, cx: &mut Context) -> Shared>> { + if let Some(summary) = self.summary.as_ref() { + return Task::ready(Some(summary.clone())).shared(); } - - self.remaining_turns -= 1; - - self.flush_notifications(model.clone(), intent, cx); - - let _checkpoint = self.finalize_pending_checkpoint(cx); - self.stream_completion( - self.to_completion_request(model.clone(), intent, cx), - model, - intent, - window, - cx, - ); - } - - pub fn to_completion_request( - &self, - model: Arc, - intent: CompletionIntent, - cx: &mut Context, - ) -> LanguageModelRequest { + if let Some(task) = self.pending_summary_generation.clone() { + return task; + } + let Some(model) = self.summarization_model.clone() else { + log::error!("No summarization model available"); + return Task::ready(None).shared(); + }; let mut request = LanguageModelRequest { - thread_id: Some(self.id.to_string()), - prompt_id: Some(self.last_prompt_id.to_string()), - intent: Some(intent), - mode: None, - messages: vec![], - tools: Vec::new(), - tool_choice: None, - stop: Vec::new(), + intent: Some(CompletionIntent::ThreadContextSummarization), temperature: AgentSettings::temperature_for_model(&model, cx), - thinking_allowed: true, - }; - - let available_tools = self.available_tools(cx, model.clone()); - let available_tool_names = available_tools - .iter() - .map(|tool| tool.name.clone()) - .collect(); - - let model_context = &ModelContext { - available_tools: available_tool_names, + ..Default::default() }; - if let Some(project_context) = self.project_context.borrow().as_ref() { - match self - .prompt_builder - .generate_assistant_system_prompt(project_context, model_context) - { - Err(err) => { - let message = format!("{err:?}").into(); - log::error!("{message}"); - cx.emit(ThreadEvent::ShowError(ThreadError::Message { - header: "Error generating system prompt".into(), - message, - })); - } - Ok(system_prompt) => { - request.messages.push(LanguageModelRequestMessage { - role: Role::System, - content: vec![MessageContent::Text(system_prompt)], - cache: true, - }); - } - } - } else { - let message = "Context for system prompt unexpectedly not ready.".into(); - log::error!("{message}"); - cx.emit(ThreadEvent::ShowError(ThreadError::Message { - header: "Error generating system prompt".into(), - message, - })); - } - - let mut message_ix_to_cache = None; for message in &self.messages { - // ui_only messages are for the UI only, not for the model - if message.ui_only { - continue; - } + request.messages.extend(message.to_request()); + } - let mut request_message = LanguageModelRequestMessage { - role: message.role, - content: Vec::new(), - cache: false, - }; + request.messages.push(LanguageModelRequestMessage { + role: Role::User, + content: vec![SUMMARIZE_THREAD_DETAILED_PROMPT.into()], + cache: false, + }); - message - .loaded_context - .add_to_request_message(&mut request_message); - - for segment in &message.segments { - match segment { - MessageSegment::Text(text) => { - let text = text.trim_end(); - if !text.is_empty() { - request_message - .content - .push(MessageContent::Text(text.into())); - } - } - MessageSegment::Thinking { text, signature } => { - if !text.is_empty() { - request_message.content.push(MessageContent::Thinking { - text: text.into(), - signature: signature.clone(), - }); + let task = cx + .spawn(async move |this, cx| { + let mut summary = String::new(); + let mut messages = model.stream_completion(request, cx).await.log_err()?; + while let Some(event) = messages.next().await { + let event = event.log_err()?; + let text = match event { + LanguageModelCompletionEvent::Text(text) => text, + LanguageModelCompletionEvent::StatusUpdate( + CompletionRequestStatus::UsageUpdated { amount, limit }, + ) => { + this.update(cx, |thread, cx| { + thread.update_model_request_usage(amount, limit, cx); + }) + .ok()?; + continue; } - } - MessageSegment::RedactedThinking(data) => { - request_message - .content - .push(MessageContent::RedactedThinking(data.clone())); - } - }; - } + _ => continue, + }; - let mut cache_message = true; - let mut tool_results_message = LanguageModelRequestMessage { - role: Role::User, - content: Vec::new(), - cache: false, - }; - for (tool_use, tool_result) in self.tool_use.tool_results(message.id) { - if let Some(tool_result) = tool_result { - request_message - .content - .push(MessageContent::ToolUse(tool_use.clone())); - tool_results_message - .content - .push(MessageContent::ToolResult(LanguageModelToolResult { - tool_use_id: tool_use.id.clone(), - tool_name: tool_result.tool_name.clone(), - is_error: tool_result.is_error, - content: if tool_result.content.is_empty() { - // Surprisingly, the API fails if we return an empty string here. - // It thinks we are sending a tool use without a tool result. - "".into() - } else { - tool_result.content.clone() - }, - output: None, - })); - } else { - cache_message = false; - log::debug!( - "skipped tool use {:?} because it is still pending", - tool_use - ); + let mut lines = text.lines(); + summary.extend(lines.next()); } - } - if cache_message { - message_ix_to_cache = Some(request.messages.len()); - } - request.messages.push(request_message); + log::debug!("Setting summary: {}", summary); + let summary = SharedString::from(summary); - if !tool_results_message.content.is_empty() { - if cache_message { - message_ix_to_cache = Some(request.messages.len()); - } - request.messages.push(tool_results_message); - } - } + this.update(cx, |this, cx| { + this.summary = Some(summary.clone()); + this.pending_summary_generation = None; + cx.notify() + }) + .ok()?; - // https://docs.anthropic.com/en/docs/build-with-claude/prompt-caching - if let Some(message_ix_to_cache) = message_ix_to_cache { - request.messages[message_ix_to_cache].cache = true; - } + Some(summary) + }) + .shared(); + self.pending_summary_generation = Some(task.clone()); + task + } - request.tools = available_tools; - request.mode = if model.supports_burn_mode() { - Some(self.completion_mode.into()) - } else { - Some(CompletionMode::Normal.into()) + fn generate_title(&mut self, cx: &mut Context) { + let Some(model) = self.summarization_model.clone() else { + return; }; - request - } - - fn to_summarize_request( - &self, - model: &Arc, - intent: CompletionIntent, - added_user_message: String, - cx: &App, - ) -> LanguageModelRequest { + log::debug!( + "Generating title with model: {:?}", + self.summarization_model.as_ref().map(|model| model.name()) + ); let mut request = LanguageModelRequest { - thread_id: None, - prompt_id: None, - intent: Some(intent), - mode: None, - messages: vec![], - tools: Vec::new(), - tool_choice: None, - stop: Vec::new(), - temperature: AgentSettings::temperature_for_model(model, cx), - thinking_allowed: false, + intent: Some(CompletionIntent::ThreadSummarization), + temperature: AgentSettings::temperature_for_model(&model, cx), + ..Default::default() }; for message in &self.messages { - let mut request_message = LanguageModelRequestMessage { - role: message.role, - content: Vec::new(), - cache: false, - }; - - for segment in &message.segments { - match segment { - MessageSegment::Text(text) => request_message - .content - .push(MessageContent::Text(text.clone())), - MessageSegment::Thinking { .. } => {} - MessageSegment::RedactedThinking(_) => {} - } - } - - if request_message.content.is_empty() { - continue; - } - - request.messages.push(request_message); + request.messages.extend(message.to_request()); } request.messages.push(LanguageModelRequestMessage { role: Role::User, - content: vec![MessageContent::Text(added_user_message)], + content: vec![SUMMARIZE_THREAD_PROMPT.into()], cache: false, }); + self.pending_title_generation = Some(cx.spawn(async move |this, cx| { + let mut title = String::new(); - request - } + let generate = async { + let mut messages = model.stream_completion(request, cx).await?; + while let Some(event) = messages.next().await { + let event = event?; + let text = match event { + LanguageModelCompletionEvent::Text(text) => text, + LanguageModelCompletionEvent::StatusUpdate( + CompletionRequestStatus::UsageUpdated { amount, limit }, + ) => { + this.update(cx, |thread, cx| { + thread.update_model_request_usage(amount, limit, cx); + })?; + continue; + } + _ => continue, + }; - /// Insert auto-generated notifications (if any) to the thread - fn flush_notifications( - &mut self, - model: Arc, - intent: CompletionIntent, - cx: &mut Context, - ) { - match intent { - CompletionIntent::UserPrompt | CompletionIntent::ToolResults => { - if let Some(pending_tool_use) = self.attach_tracked_files_state(model, cx) { - cx.emit(ThreadEvent::ToolFinished { - tool_use_id: pending_tool_use.id.clone(), - pending_tool_use: Some(pending_tool_use), - }); + let mut lines = text.lines(); + title.extend(lines.next()); + + // Stop if the LLM generated multiple lines. + if lines.next().is_some() { + break; + } } + anyhow::Ok(()) + }; + + if generate.await.context("failed to generate title").is_ok() { + _ = this.update(cx, |this, cx| this.set_title(title.into(), cx)); } - CompletionIntent::ThreadSummarization - | CompletionIntent::ThreadContextSummarization - | CompletionIntent::CreateFile - | CompletionIntent::EditFile - | CompletionIntent::InlineAssist - | CompletionIntent::TerminalInlineAssist - | CompletionIntent::GenerateGitCommitMessage => {} - }; + _ = this.update(cx, |this, _| this.pending_title_generation = None); + })); } - fn attach_tracked_files_state( - &mut self, - model: Arc, - cx: &mut App, - ) -> Option { - // Represent notification as a simulated `project_notifications` tool call - let tool_name = Arc::from("project_notifications"); - let tool = self.tools.read(cx).tool(&tool_name, cx)?; - - if !self.profile.is_tool_enabled(tool.source(), tool.name(), cx) { - return None; + pub fn set_title(&mut self, title: SharedString, cx: &mut Context) { + self.pending_title_generation = None; + if Some(&title) != self.title.as_ref() { + self.title = Some(title); + cx.emit(TitleUpdated); + cx.notify(); } + } - if self - .action_log - .update(cx, |log, cx| log.unnotified_user_edits(cx).is_none()) - { - return None; - } + fn clear_summary(&mut self) { + self.summary = None; + self.pending_summary_generation = None; + } - let input = serde_json::json!({}); - let request = Arc::new(LanguageModelRequest::default()); // unused - let window = None; - let tool_result = tool.run( - input, - request, - self.project.clone(), - self.action_log.clone(), - model.clone(), - window, - cx, - ); + fn last_user_message(&self) -> Option<&UserMessage> { + self.messages + .iter() + .rev() + .find_map(|message| match message { + Message::User(user_message) => Some(user_message), + Message::Agent(_) => None, + Message::Resume => None, + }) + } - let tool_use_id = - LanguageModelToolUseId::from(format!("project_notifications_{}", self.messages.len())); + fn pending_message(&mut self) -> &mut AgentMessage { + self.pending_message.get_or_insert_default() + } - let tool_use = LanguageModelToolUse { - id: tool_use_id.clone(), - name: tool_name.clone(), - raw_input: "{}".to_string(), - input: serde_json::json!({}), - is_input_complete: true, + fn flush_pending_message(&mut self, cx: &mut Context) { + let Some(mut message) = self.pending_message.take() else { + return; }; - let tool_output = cx.background_executor().block(tool_result.output); + if message.content.is_empty() { + return; + } - // Attach a project_notification tool call to the latest existing - // Assistant message. We cannot create a new Assistant message - // because thinking models require a `thinking` block that we - // cannot mock. We cannot send a notification as a normal - // (non-tool-use) User message because this distracts Agent - // too much. - let tool_message_id = self - .messages - .iter() - .enumerate() - .rfind(|(_, message)| message.role == Role::Assistant) - .map(|(_, message)| message.id)?; - - let tool_use_metadata = ToolUseMetadata { - model: model.clone(), - thread_id: self.id.clone(), - prompt_id: self.last_prompt_id.clone(), - }; + for content in &message.content { + let AgentMessageContent::ToolUse(tool_use) = content else { + continue; + }; - self.tool_use - .request_tool_use(tool_message_id, tool_use, tool_use_metadata, cx); + if !message.tool_results.contains_key(&tool_use.id) { + message.tool_results.insert( + tool_use.id.clone(), + LanguageModelToolResult { + tool_use_id: tool_use.id.clone(), + tool_name: tool_use.name.clone(), + is_error: true, + content: LanguageModelToolResultContent::Text(TOOL_CANCELED_MESSAGE.into()), + output: None, + }, + ); + } + } - self.tool_use.insert_tool_output( - tool_use_id, - tool_name, - tool_output, - self.configured_model.as_ref(), - self.completion_mode, - ) + self.messages.push(Message::Agent(message)); + self.updated_at = Utc::now(); + self.clear_summary(); + cx.notify() } - pub fn stream_completion( - &mut self, - request: LanguageModelRequest, - model: Arc, - intent: CompletionIntent, - window: Option, - cx: &mut Context, - ) { - self.tool_use_limit_reached = false; - - let pending_completion_id = post_inc(&mut self.completion_count); - let mut request_callback_parameters = if self.request_callback.is_some() { - Some((request.clone(), Vec::new())) + pub(crate) fn build_completion_request( + &self, + completion_intent: CompletionIntent, + cx: &App, + ) -> Result { + let model = self.model().context("No language model configured")?; + let tools = if let Some(turn) = self.running_turn.as_ref() { + turn.tools + .iter() + .filter_map(|(tool_name, tool)| { + log::trace!("Including tool: {}", tool_name); + Some(LanguageModelRequestTool { + name: tool_name.to_string(), + description: tool.description().to_string(), + input_schema: tool.input_schema(model.tool_input_format()).log_err()?, + }) + }) + .collect::>() } else { - None - }; - let prompt_id = self.last_prompt_id.clone(); - let tool_use_metadata = ToolUseMetadata { - model: model.clone(), - thread_id: self.id.clone(), - prompt_id: prompt_id.clone(), + Vec::new() }; - let completion_mode = request - .mode - .unwrap_or(cloud_llm_client::CompletionMode::Normal); - - self.last_received_chunk_at = Some(Instant::now()); - - let task = cx.spawn(async move |thread, cx| { - let stream_completion_future = model.stream_completion(request, cx); - let initial_token_usage = - thread.read_with(cx, |thread, _cx| thread.cumulative_token_usage); - let stream_completion = async { - let mut events = stream_completion_future.await?; + log::debug!("Building completion request"); + log::debug!("Completion intent: {:?}", completion_intent); + log::debug!("Completion mode: {:?}", self.completion_mode); - let mut stop_reason = StopReason::EndTurn; - let mut current_token_usage = TokenUsage::default(); + let messages = self.build_request_messages(cx); + log::debug!("Request will include {} messages", messages.len()); + log::debug!("Request includes {} tools", tools.len()); - thread - .update(cx, |_thread, cx| { - cx.emit(ThreadEvent::NewRequest); - }) - .ok(); + let request = LanguageModelRequest { + thread_id: Some(self.id.to_string()), + prompt_id: Some(self.prompt_id.to_string()), + intent: Some(completion_intent), + mode: Some(self.completion_mode.into()), + messages, + tools, + tool_choice: None, + stop: Vec::new(), + temperature: AgentSettings::temperature_for_model(model, cx), + thinking_allowed: true, + }; - let mut request_assistant_message_id = None; + log::debug!("Completion request built successfully"); + Ok(request) + } - while let Some(event) = events.next().await { - if let Some((_, response_events)) = request_callback_parameters.as_mut() { - response_events - .push(event.as_ref().map_err(|error| error.to_string()).cloned()); - } - - thread.update(cx, |thread, cx| { - match event? { - LanguageModelCompletionEvent::StartMessage { .. } => { - request_assistant_message_id = - Some(thread.insert_assistant_message( - vec![MessageSegment::Text(String::new())], - cx, - )); - } - LanguageModelCompletionEvent::Stop(reason) => { - stop_reason = reason; - } - LanguageModelCompletionEvent::UsageUpdate(token_usage) => { - thread.update_token_usage_at_last_message(token_usage); - thread.cumulative_token_usage = thread.cumulative_token_usage - + token_usage - - current_token_usage; - current_token_usage = token_usage; - } - LanguageModelCompletionEvent::Text(chunk) => { - thread.received_chunk(); - - cx.emit(ThreadEvent::ReceivedTextChunk); - if let Some(last_message) = thread.messages.last_mut() { - if last_message.role == Role::Assistant - && !thread.tool_use.has_tool_results(last_message.id) - { - last_message.push_text(&chunk); - cx.emit(ThreadEvent::StreamedAssistantText( - last_message.id, - chunk, - )); - } else { - // If we won't have an Assistant message yet, assume this chunk marks the beginning - // of a new Assistant response. - // - // Importantly: We do *not* want to emit a `StreamedAssistantText` event here, as it - // will result in duplicating the text of the chunk in the rendered Markdown. - request_assistant_message_id = - Some(thread.insert_assistant_message( - vec![MessageSegment::Text(chunk.to_string())], - cx, - )); - }; - } - } - LanguageModelCompletionEvent::Thinking { - text: chunk, - signature, - } => { - thread.received_chunk(); - - if let Some(last_message) = thread.messages.last_mut() { - if last_message.role == Role::Assistant - && !thread.tool_use.has_tool_results(last_message.id) - { - last_message.push_thinking(&chunk, signature); - cx.emit(ThreadEvent::StreamedAssistantThinking( - last_message.id, - chunk, - )); - } else { - // If we won't have an Assistant message yet, assume this chunk marks the beginning - // of a new Assistant response. - // - // Importantly: We do *not* want to emit a `StreamedAssistantText` event here, as it - // will result in duplicating the text of the chunk in the rendered Markdown. - request_assistant_message_id = - Some(thread.insert_assistant_message( - vec![MessageSegment::Thinking { - text: chunk.to_string(), - signature, - }], - cx, - )); - }; - } - } - LanguageModelCompletionEvent::RedactedThinking { data } => { - thread.received_chunk(); - - if let Some(last_message) = thread.messages.last_mut() { - if last_message.role == Role::Assistant - && !thread.tool_use.has_tool_results(last_message.id) - { - last_message.push_redacted_thinking(data); - } else { - request_assistant_message_id = - Some(thread.insert_assistant_message( - vec![MessageSegment::RedactedThinking(data)], - cx, - )); - }; - } - } - LanguageModelCompletionEvent::ToolUse(tool_use) => { - let last_assistant_message_id = request_assistant_message_id - .unwrap_or_else(|| { - let new_assistant_message_id = - thread.insert_assistant_message(vec![], cx); - request_assistant_message_id = - Some(new_assistant_message_id); - new_assistant_message_id - }); - - let tool_use_id = tool_use.id.clone(); - let streamed_input = if tool_use.is_input_complete { - None - } else { - Some(tool_use.input.clone()) - }; - - let ui_text = thread.tool_use.request_tool_use( - last_assistant_message_id, - tool_use, - tool_use_metadata.clone(), - cx, - ); - - if let Some(input) = streamed_input { - cx.emit(ThreadEvent::StreamedToolUse { - tool_use_id, - ui_text, - input, - }); - } - } - LanguageModelCompletionEvent::ToolUseJsonParseError { - id, - tool_name, - raw_input: invalid_input_json, - json_parse_error, - } => { - thread.receive_invalid_tool_json( - id, - tool_name, - invalid_input_json, - json_parse_error, - window, - cx, - ); - } - LanguageModelCompletionEvent::StatusUpdate(status_update) => { - if let Some(completion) = thread - .pending_completions - .iter_mut() - .find(|completion| completion.id == pending_completion_id) - { - match status_update { - CompletionRequestStatus::Queued { position } => { - completion.queue_state = - QueueState::Queued { position }; - } - CompletionRequestStatus::Started => { - completion.queue_state = QueueState::Started; - } - CompletionRequestStatus::Failed { - code, - message, - request_id: _, - retry_after, - } => { - return Err( - LanguageModelCompletionError::from_cloud_failure( - model.upstream_provider_name(), - code, - message, - retry_after.map(Duration::from_secs_f64), - ), - ); - } - CompletionRequestStatus::UsageUpdated { amount, limit } => { - thread.update_model_request_usage( - amount as u32, - limit, - cx, - ); - } - CompletionRequestStatus::ToolUseLimitReached => { - thread.tool_use_limit_reached = true; - cx.emit(ThreadEvent::ToolUseLimitReached); - } - } - } - } - } - - thread.touch_updated_at(); - cx.emit(ThreadEvent::StreamedCompletion); - cx.notify(); - - Ok(()) - })??; + fn enabled_tools( + &self, + profile: &AgentProfileSettings, + model: &Arc, + cx: &App, + ) -> BTreeMap> { + fn truncate(tool_name: &SharedString) -> SharedString { + if tool_name.len() > MAX_TOOL_NAME_LENGTH { + let mut truncated = tool_name.to_string(); + truncated.truncate(MAX_TOOL_NAME_LENGTH); + truncated.into() + } else { + tool_name.clone() + } + } - smol::future::yield_now().await; + let mut tools = self + .tools + .iter() + .filter_map(|(tool_name, tool)| { + if tool.supports_provider(&model.provider_id()) + && profile.is_tool_enabled(tool_name) + { + Some((truncate(tool_name), tool.clone())) + } else { + None } - - thread.update(cx, |thread, cx| { - thread.last_received_chunk_at = None; - thread - .pending_completions - .retain(|completion| completion.id != pending_completion_id); - - // If there is a response without tool use, summarize the message. Otherwise, - // allow two tool uses before summarizing. - if matches!(thread.summary, ThreadSummary::Pending) - && thread.messages.len() >= 2 - && (!thread.has_pending_tool_uses() || thread.messages.len() >= 6) - { - thread.summarize(cx); - } - })?; - - anyhow::Ok(stop_reason) - }; - - let result = stream_completion.await; - let mut retry_scheduled = false; - - thread - .update(cx, |thread, cx| { - thread.finalize_pending_checkpoint(cx); - match result.as_ref() { - Ok(stop_reason) => { - match stop_reason { - StopReason::ToolUse => { - let tool_uses = - thread.use_pending_tools(window, model.clone(), cx); - cx.emit(ThreadEvent::UsePendingTools { tool_uses }); - } - StopReason::EndTurn | StopReason::MaxTokens => { - thread.project.update(cx, |project, cx| { - project.set_agent_location(None, cx); - }); - } - StopReason::Refusal => { - thread.project.update(cx, |project, cx| { - project.set_agent_location(None, cx); - }); - - // Remove the turn that was refused. - // - // https://docs.anthropic.com/en/docs/test-and-evaluate/strengthen-guardrails/handle-streaming-refusals#reset-context-after-refusal - { - let mut messages_to_remove = Vec::new(); - - for (ix, message) in - thread.messages.iter().enumerate().rev() - { - messages_to_remove.push(message.id); - - if message.role == Role::User { - if ix == 0 { - break; - } - - if let Some(prev_message) = - thread.messages.get(ix - 1) - && prev_message.role == Role::Assistant { - break; - } - } - } - - for message_id in messages_to_remove { - thread.delete_message(message_id, cx); - } - } - - cx.emit(ThreadEvent::ShowError(ThreadError::Message { - header: "Language model refusal".into(), - message: - "Model refused to generate content for safety reasons." - .into(), - })); - } - } - - // We successfully completed, so cancel any remaining retries. - thread.retry_state = None; - } - Err(error) => { - thread.project.update(cx, |project, cx| { - project.set_agent_location(None, cx); - }); - - if error.is::() { - cx.emit(ThreadEvent::ShowError(ThreadError::PaymentRequired)); - } else if let Some(error) = - error.downcast_ref::() - { - cx.emit(ThreadEvent::ShowError( - ThreadError::ModelRequestLimitReached { plan: error.plan }, - )); - } else if let Some(completion_error) = - error.downcast_ref::() - { - match &completion_error { - LanguageModelCompletionError::PromptTooLarge { - tokens, .. - } => { - let tokens = tokens.unwrap_or_else(|| { - // We didn't get an exact token count from the API, so fall back on our estimate. - thread - .total_token_usage() - .map(|usage| usage.total) - .unwrap_or(0) - // We know the context window was exceeded in practice, so if our estimate was - // lower than max tokens, the estimate was wrong; return that we exceeded by 1. - .max( - model - .max_token_count_for_mode(completion_mode) - .saturating_add(1), - ) - }); - thread.exceeded_window_error = Some(ExceededWindowError { - model_id: model.id(), - token_count: tokens, - }); - cx.notify(); - } - _ => { - if let Some(retry_strategy) = - Thread::get_retry_strategy(completion_error) - { - log::info!( - "Retrying with {:?} for language model completion error {:?}", - retry_strategy, - completion_error - ); - - retry_scheduled = thread - .handle_retryable_error_with_delay( - completion_error, - Some(retry_strategy), - model.clone(), - intent, - window, - cx, - ); - } - } - } - } - - if !retry_scheduled { - thread.cancel_last_completion(window, cx); - } - } - } - - if !retry_scheduled { - cx.emit(ThreadEvent::Stopped(result.map_err(Arc::new))); - } - - if let Some((request_callback, (request, response_events))) = thread - .request_callback - .as_mut() - .zip(request_callback_parameters.as_ref()) - { - request_callback(request, response_events); + }) + .collect::>(); + + let mut context_server_tools = Vec::new(); + let mut seen_tools = tools.keys().cloned().collect::>(); + let mut duplicate_tool_names = HashSet::default(); + for (server_id, server_tools) in self.context_server_registry.read(cx).servers() { + for (tool_name, tool) in server_tools { + if profile.is_context_server_tool_enabled(&server_id.0, &tool_name) { + let tool_name = truncate(tool_name); + if !seen_tools.insert(tool_name.clone()) { + duplicate_tool_names.insert(tool_name.clone()); } + context_server_tools.push((server_id.clone(), tool_name, tool.clone())); + } + } + } - if let Ok(initial_usage) = initial_token_usage { - let usage = thread.cumulative_token_usage - initial_usage; - - telemetry::event!( - "Assistant Thread Completion", - thread_id = thread.id().to_string(), - prompt_id = prompt_id, - model = model.telemetry_id(), - model_provider = model.provider_id().to_string(), - input_tokens = usage.input_tokens, - output_tokens = usage.output_tokens, - cache_creation_input_tokens = usage.cache_creation_input_tokens, - cache_read_input_tokens = usage.cache_read_input_tokens, - ); - } - }) - .ok(); - }); + // When there are duplicate tool names, disambiguate by prefixing them + // with the server ID. In the rare case there isn't enough space for the + // disambiguated tool name, keep only the last tool with this name. + for (server_id, tool_name, tool) in context_server_tools { + if duplicate_tool_names.contains(&tool_name) { + let available = MAX_TOOL_NAME_LENGTH.saturating_sub(tool_name.len()); + if available >= 2 { + let mut disambiguated = server_id.0.to_string(); + disambiguated.truncate(available - 1); + disambiguated.push('_'); + disambiguated.push_str(&tool_name); + tools.insert(disambiguated.into(), tool.clone()); + } else { + tools.insert(tool_name, tool.clone()); + } + } else { + tools.insert(tool_name, tool.clone()); + } + } - self.pending_completions.push(PendingCompletion { - id: pending_completion_id, - queue_state: QueueState::Sending, - _task: task, - }); + tools } - pub fn summarize(&mut self, cx: &mut Context) { - let Some(model) = LanguageModelRegistry::read_global(cx).thread_summary_model() else { - println!("No thread summary model"); - return; - }; - - if !model.provider.is_authenticated(cx) { - return; - } + fn tool(&self, name: &str) -> Option> { + self.running_turn.as_ref()?.tools.get(name).cloned() + } - let request = self.to_summarize_request( - &model.model, - CompletionIntent::ThreadSummarization, - SUMMARIZE_THREAD_PROMPT.into(), - cx, + fn build_request_messages(&self, cx: &App) -> Vec { + log::trace!( + "Building request messages from {} thread messages", + self.messages.len() ); - self.summary = ThreadSummary::Generating; - - self.pending_summary = cx.spawn(async move |this, cx| { - let result = async { - let mut messages = model.model.stream_completion(request, cx).await?; + let system_prompt = SystemPromptTemplate { + project: self.project_context.read(cx), + available_tools: self.tools.keys().cloned().collect(), + } + .render(&self.templates) + .context("failed to build system prompt") + .expect("Invalid template"); + let mut messages = vec![LanguageModelRequestMessage { + role: Role::System, + content: vec![system_prompt.into()], + cache: false, + }]; + for message in &self.messages { + messages.extend(message.to_request()); + } - let mut new_summary = String::new(); - while let Some(event) = messages.next().await { - let Ok(event) = event else { - continue; - }; - let text = match event { - LanguageModelCompletionEvent::Text(text) => text, - LanguageModelCompletionEvent::StatusUpdate( - CompletionRequestStatus::UsageUpdated { amount, limit }, - ) => { - this.update(cx, |thread, cx| { - thread.update_model_request_usage(amount as u32, limit, cx); - })?; - continue; - } - _ => continue, - }; + if let Some(last_message) = messages.last_mut() { + last_message.cache = true; + } - let mut lines = text.lines(); - new_summary.extend(lines.next()); + if let Some(message) = self.pending_message.as_ref() { + messages.extend(message.to_request()); + } - // Stop if the LLM generated multiple lines. - if lines.next().is_some() { - break; - } - } + messages + } - anyhow::Ok(new_summary) + pub fn to_markdown(&self) -> String { + let mut markdown = String::new(); + for (ix, message) in self.messages.iter().enumerate() { + if ix > 0 { + markdown.push('\n'); } - .await; + markdown.push_str(&message.to_markdown()); + } - this.update(cx, |this, cx| { - match result { - Ok(new_summary) => { - if new_summary.is_empty() { - this.summary = ThreadSummary::Error; - } else { - this.summary = ThreadSummary::Ready(new_summary.into()); - } - } - Err(err) => { - this.summary = ThreadSummary::Error; - log::error!("Failed to generate thread summary: {}", err); - } - } - cx.emit(ThreadEvent::SummaryGenerated); - }) - .log_err()?; + if let Some(message) = self.pending_message.as_ref() { + markdown.push('\n'); + markdown.push_str(&message.to_markdown()); + } - Some(()) - }); + markdown + } + + fn advance_prompt_id(&mut self) { + self.prompt_id = PromptId::new(); } - fn get_retry_strategy(error: &LanguageModelCompletionError) -> Option { + fn retry_strategy_for(error: &LanguageModelCompletionError) -> Option { use LanguageModelCompletionError::*; + use http_client::StatusCode; // General strategy here: // - If retrying won't help (e.g. invalid API key or payload too large), return None so we don't retry at all. @@ -2205,8 +2054,8 @@ impl Thread { }) } Other(err) - if err.is::() - || err.is::() => + if err.is::() + || err.is::() => { // Retrying won't help for Payment Required or Model Request Limit errors (where // the user must upgrade to usage-based billing to get more requests, or else wait @@ -2220,3166 +2069,561 @@ impl Thread { }), } } +} - fn handle_retryable_error_with_delay( - &mut self, - error: &LanguageModelCompletionError, - strategy: Option, - model: Arc, - intent: CompletionIntent, - window: Option, - cx: &mut Context, - ) -> bool { - // Store context for the Retry button - self.last_error_context = Some((model.clone(), intent)); - - // Only auto-retry if Burn Mode is enabled - if self.completion_mode != CompletionMode::Burn { - // Show error with retry options - cx.emit(ThreadEvent::ShowError(ThreadError::RetryableError { - message: format!( - "{}\n\nTo automatically retry when similar errors happen, enable Burn Mode.", - error - ) - .into(), - can_enable_burn_mode: true, - })); - return false; - } +struct RunningTurn { + /// Holds the task that handles agent interaction until the end of the turn. + /// Survives across multiple requests as the model performs tool calls and + /// we run tools, report their results. + _task: Task<()>, + /// The current event stream for the running turn. Used to report a final + /// cancellation event if we cancel the turn. + event_stream: ThreadEventStream, + /// The tools that were enabled for this turn. + tools: BTreeMap>, +} - let Some(strategy) = strategy.or_else(|| Self::get_retry_strategy(error)) else { - return false; - }; +impl RunningTurn { + fn cancel(self) { + log::debug!("Cancelling in progress turn"); + self.event_stream.send_canceled(); + } +} - let max_attempts = match &strategy { - RetryStrategy::ExponentialBackoff { max_attempts, .. } => *max_attempts, - RetryStrategy::Fixed { max_attempts, .. } => *max_attempts, - }; +pub struct TokenUsageUpdated(pub Option); - let retry_state = self.retry_state.get_or_insert(RetryState { - attempt: 0, - max_attempts, - intent, - }); +impl EventEmitter for Thread {} - retry_state.attempt += 1; - let attempt = retry_state.attempt; - let max_attempts = retry_state.max_attempts; - let intent = retry_state.intent; +pub struct TitleUpdated; - if attempt <= max_attempts { - let delay = match &strategy { - RetryStrategy::ExponentialBackoff { initial_delay, .. } => { - let delay_secs = initial_delay.as_secs() * 2u64.pow((attempt - 1) as u32); - Duration::from_secs(delay_secs) - } - RetryStrategy::Fixed { delay, .. } => *delay, - }; +impl EventEmitter for Thread {} - // Add a transient message to inform the user - let delay_secs = delay.as_secs(); - let retry_message = if max_attempts == 1 { - format!("{error}. Retrying in {delay_secs} seconds...") - } else { - format!( - "{error}. Retrying (attempt {attempt} of {max_attempts}) \ - in {delay_secs} seconds..." - ) - }; - log::warn!( - "Retrying completion request (attempt {attempt} of {max_attempts}) \ - in {delay_secs} seconds: {error:?}", - ); +pub trait AgentTool +where + Self: 'static + Sized, +{ + type Input: for<'de> Deserialize<'de> + Serialize + JsonSchema; + type Output: for<'de> Deserialize<'de> + Serialize + Into; - // Add a UI-only message instead of a regular message - let id = self.next_message_id.post_inc(); - self.messages.push(Message { - id, - role: Role::System, - segments: vec![MessageSegment::Text(retry_message)], - loaded_context: LoadedContext::default(), - creases: Vec::new(), - is_hidden: false, - ui_only: true, - }); - cx.emit(ThreadEvent::MessageAdded(id)); + fn name() -> &'static str; - // Schedule the retry - let thread_handle = cx.entity().downgrade(); + fn description() -> SharedString { + let schema = schemars::schema_for!(Self::Input); + SharedString::new( + schema + .get("description") + .and_then(|description| description.as_str()) + .unwrap_or_default(), + ) + } - cx.spawn(async move |_thread, cx| { - cx.background_executor().timer(delay).await; + fn kind() -> acp::ToolKind; - thread_handle - .update(cx, |thread, cx| { - // Retry the completion - thread.send_to_model(model, intent, window, cx); - }) - .log_err(); - }) - .detach(); + /// The initial tool title to display. Can be updated during the tool run. + fn initial_title( + &self, + input: Result, + cx: &mut App, + ) -> SharedString; - true - } else { - // Max retries exceeded - self.retry_state = None; + /// Returns the JSON schema that describes the tool's input. + fn input_schema(format: LanguageModelToolSchemaFormat) -> Schema { + crate::tool_schema::root_schema_for::(format) + } - // Stop generating since we're giving up on retrying. - self.pending_completions.clear(); + /// Some tools rely on a provider for the underlying billing or other reasons. + /// Allow the tool to check if they are compatible, or should be filtered out. + fn supports_provider(_provider: &LanguageModelProviderId) -> bool { + true + } - // Show error alongside a Retry button, but no - // Enable Burn Mode button (since it's already enabled) - cx.emit(ThreadEvent::ShowError(ThreadError::RetryableError { - message: format!("Failed after retrying: {}", error).into(), - can_enable_burn_mode: false, - })); + /// Runs the tool with the provided input. + fn run( + self: Arc, + input: Self::Input, + event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task>; - false - } + /// Emits events for a previous execution of the tool. + fn replay( + &self, + _input: Self::Input, + _output: Self::Output, + _event_stream: ToolCallEventStream, + _cx: &mut App, + ) -> Result<()> { + Ok(()) } - pub fn start_generating_detailed_summary_if_needed( - &mut self, - thread_store: WeakEntity, - cx: &mut Context, - ) { - let Some(last_message_id) = self.messages.last().map(|message| message.id) else { - return; - }; + fn erase(self) -> Arc { + Arc::new(Erased(Arc::new(self))) + } +} - match &*self.detailed_summary_rx.borrow() { - DetailedSummaryState::Generating { message_id, .. } - | DetailedSummaryState::Generated { message_id, .. } - if *message_id == last_message_id => - { - // Already up-to-date - return; - } - _ => {} - } +pub struct Erased(T); - let Some(ConfiguredModel { model, provider }) = - LanguageModelRegistry::read_global(cx).thread_summary_model() - else { - return; - }; +pub struct AgentToolOutput { + pub llm_output: LanguageModelToolResultContent, + pub raw_output: serde_json::Value, +} - if !provider.is_authenticated(cx) { - return; - } +pub trait AnyAgentTool { + fn name(&self) -> SharedString; + fn description(&self) -> SharedString; + fn kind(&self) -> acp::ToolKind; + fn initial_title(&self, input: serde_json::Value, _cx: &mut App) -> SharedString; + fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result; + fn supports_provider(&self, _provider: &LanguageModelProviderId) -> bool { + true + } + fn run( + self: Arc, + input: serde_json::Value, + event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task>; + fn replay( + &self, + input: serde_json::Value, + output: serde_json::Value, + event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Result<()>; +} - let request = self.to_summarize_request( - &model, - CompletionIntent::ThreadContextSummarization, - SUMMARIZE_THREAD_DETAILED_PROMPT.into(), - cx, - ); +impl AnyAgentTool for Erased> +where + T: AgentTool, +{ + fn name(&self) -> SharedString { + T::name().into() + } - *self.detailed_summary_tx.borrow_mut() = DetailedSummaryState::Generating { - message_id: last_message_id, - }; + fn description(&self) -> SharedString { + T::description() + } - // Replace the detailed summarization task if there is one, cancelling it. It would probably - // be better to allow the old task to complete, but this would require logic for choosing - // which result to prefer (the old task could complete after the new one, resulting in a - // stale summary). - self.detailed_summary_task = cx.spawn(async move |thread, cx| { - let stream = model.stream_completion_text(request, cx); - let Some(mut messages) = stream.await.log_err() else { - thread - .update(cx, |thread, _cx| { - *thread.detailed_summary_tx.borrow_mut() = - DetailedSummaryState::NotGenerated; - }) - .ok()?; - return None; - }; + fn kind(&self) -> agent_client_protocol::ToolKind { + T::kind() + } - let mut new_detailed_summary = String::new(); + fn initial_title(&self, input: serde_json::Value, _cx: &mut App) -> SharedString { + let parsed_input = serde_json::from_value(input.clone()).map_err(|_| input); + self.0.initial_title(parsed_input, _cx) + } - while let Some(chunk) = messages.stream.next().await { - if let Some(chunk) = chunk.log_err() { - new_detailed_summary.push_str(&chunk); - } - } + fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { + let mut json = serde_json::to_value(T::input_schema(format))?; + crate::tool_schema::adapt_schema_to_format(&mut json, format)?; + Ok(json) + } - thread - .update(cx, |thread, _cx| { - *thread.detailed_summary_tx.borrow_mut() = DetailedSummaryState::Generated { - text: new_detailed_summary.into(), - message_id: last_message_id, - }; - }) - .ok()?; + fn supports_provider(&self, provider: &LanguageModelProviderId) -> bool { + T::supports_provider(provider) + } - // Save thread so its summary can be reused later - if let Some(thread) = thread.upgrade() - && let Ok(Ok(save_task)) = cx.update(|cx| { - thread_store - .update(cx, |thread_store, cx| thread_store.save_thread(&thread, cx)) - }) - { - save_task.await.log_err(); - } + fn run( + self: Arc, + input: serde_json::Value, + event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + cx.spawn(async move |cx| { + let input = serde_json::from_value(input)?; + let output = cx + .update(|cx| self.0.clone().run(input, event_stream, cx))? + .await?; + let raw_output = serde_json::to_value(&output)?; + Ok(AgentToolOutput { + llm_output: output.into(), + raw_output, + }) + }) + } - Some(()) - }); + fn replay( + &self, + input: serde_json::Value, + output: serde_json::Value, + event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Result<()> { + let input = serde_json::from_value(input)?; + let output = serde_json::from_value(output)?; + self.0.replay(input, output, event_stream, cx) } +} - pub async fn wait_for_detailed_summary_or_text( - this: &Entity, - cx: &mut AsyncApp, - ) -> Option { - let mut detailed_summary_rx = this - .read_with(cx, |this, _cx| this.detailed_summary_rx.clone()) - .ok()?; - loop { - match detailed_summary_rx.recv().await? { - DetailedSummaryState::Generating { .. } => {} - DetailedSummaryState::NotGenerated => { - return this.read_with(cx, |this, _cx| this.text().into()).ok(); - } - DetailedSummaryState::Generated { text, .. } => return Some(text), - } - } +#[derive(Clone)] +struct ThreadEventStream(mpsc::UnboundedSender>); + +impl ThreadEventStream { + fn send_user_message(&self, message: &UserMessage) { + self.0 + .unbounded_send(Ok(ThreadEvent::UserMessage(message.clone()))) + .ok(); } - pub fn latest_detailed_summary_or_text(&self) -> SharedString { - self.detailed_summary_rx - .borrow() - .text() - .unwrap_or_else(|| self.text().into()) + fn send_text(&self, text: &str) { + self.0 + .unbounded_send(Ok(ThreadEvent::AgentText(text.to_string()))) + .ok(); } - pub fn is_generating_detailed_summary(&self) -> bool { - matches!( - &*self.detailed_summary_rx.borrow(), - DetailedSummaryState::Generating { .. } - ) + fn send_thinking(&self, text: &str) { + self.0 + .unbounded_send(Ok(ThreadEvent::AgentThinking(text.to_string()))) + .ok(); } - pub fn use_pending_tools( - &mut self, - window: Option, - model: Arc, - cx: &mut Context, - ) -> Vec { - let request = - Arc::new(self.to_completion_request(model.clone(), CompletionIntent::ToolResults, cx)); - let pending_tool_uses = self - .tool_use - .pending_tool_uses() - .into_iter() - .filter(|tool_use| tool_use.status.is_idle()) - .cloned() - .collect::>(); - - for tool_use in pending_tool_uses.iter() { - self.use_pending_tool(tool_use.clone(), request.clone(), model.clone(), window, cx); - } + fn send_tool_call( + &self, + id: &LanguageModelToolUseId, + tool_name: &str, + title: SharedString, + kind: acp::ToolKind, + input: serde_json::Value, + ) { + self.0 + .unbounded_send(Ok(ThreadEvent::ToolCall(Self::initial_tool_call( + id, + tool_name, + title.to_string(), + kind, + input, + )))) + .ok(); + } + + fn initial_tool_call( + id: &LanguageModelToolUseId, + tool_name: &str, + title: String, + kind: acp::ToolKind, + input: serde_json::Value, + ) -> acp::ToolCall { + acp::ToolCall { + meta: Some(serde_json::json!({ + "tool_name": tool_name + })), + id: acp::ToolCallId(id.to_string().into()), + title, + kind, + status: acp::ToolCallStatus::Pending, + content: vec![], + locations: vec![], + raw_input: Some(input), + raw_output: None, + } + } + + fn update_tool_call_fields( + &self, + tool_use_id: &LanguageModelToolUseId, + fields: acp::ToolCallUpdateFields, + ) { + self.0 + .unbounded_send(Ok(ThreadEvent::ToolCallUpdate( + acp::ToolCallUpdate { + meta: None, + id: acp::ToolCallId(tool_use_id.to_string().into()), + fields, + } + .into(), + ))) + .ok(); + } - pending_tool_uses + fn send_retry(&self, status: acp_thread::RetryStatus) { + self.0.unbounded_send(Ok(ThreadEvent::Retry(status))).ok(); } - fn use_pending_tool( - &mut self, - tool_use: PendingToolUse, - request: Arc, - model: Arc, - window: Option, - cx: &mut Context, - ) { - let Some(tool) = self.tools.read(cx).tool(&tool_use.name, cx) else { - return self.handle_hallucinated_tool_use(tool_use.id, tool_use.name, window, cx); - }; + fn send_stop(&self, reason: acp::StopReason) { + self.0.unbounded_send(Ok(ThreadEvent::Stop(reason))).ok(); + } - if !self.profile.is_tool_enabled(tool.source(), tool.name(), cx) { - return self.handle_hallucinated_tool_use(tool_use.id, tool_use.name, window, cx); - } + fn send_canceled(&self) { + self.0 + .unbounded_send(Ok(ThreadEvent::Stop(acp::StopReason::Cancelled))) + .ok(); + } - if tool.needs_confirmation(&tool_use.input, &self.project, cx) - && !AgentSettings::get_global(cx).always_allow_tool_actions - { - self.tool_use.confirm_tool_use( - tool_use.id, - tool_use.ui_text, - tool_use.input, - request, - tool, - ); - cx.emit(ThreadEvent::ToolConfirmationNeeded); - } else { - self.run_tool( - tool_use.id, - tool_use.ui_text, - tool_use.input, - request, - tool, - model, - window, - cx, - ); - } + fn send_error(&self, error: impl Into) { + self.0.unbounded_send(Err(error.into())).ok(); } +} - pub fn handle_hallucinated_tool_use( - &mut self, - tool_use_id: LanguageModelToolUseId, - hallucinated_tool_name: Arc, - window: Option, - cx: &mut Context, - ) { - let available_tools = self.profile.enabled_tools(cx); +#[derive(Clone)] +pub struct ToolCallEventStream { + tool_use_id: LanguageModelToolUseId, + stream: ThreadEventStream, + fs: Option>, +} - let tool_list = available_tools - .iter() - .map(|(name, tool)| format!("- {}: {}", name, tool.description())) - .collect::>() - .join("\n"); +impl ToolCallEventStream { + #[cfg(any(test, feature = "test-support"))] + pub fn test() -> (Self, ToolCallEventStreamReceiver) { + let (events_tx, events_rx) = mpsc::unbounded::>(); - let error_message = format!( - "The tool '{}' doesn't exist or is not enabled. Available tools:\n{}", - hallucinated_tool_name, tool_list - ); - - let pending_tool_use = self.tool_use.insert_tool_output( - tool_use_id.clone(), - hallucinated_tool_name, - Err(anyhow!("Missing tool call: {error_message}")), - self.configured_model.as_ref(), - self.completion_mode, - ); - - cx.emit(ThreadEvent::MissingToolUse { - tool_use_id: tool_use_id.clone(), - ui_text: error_message.into(), - }); - - self.tool_finished(tool_use_id, pending_tool_use, false, window, cx); - } - - pub fn receive_invalid_tool_json( - &mut self, - tool_use_id: LanguageModelToolUseId, - tool_name: Arc, - invalid_json: Arc, - error: String, - window: Option, - cx: &mut Context, - ) { - log::error!("The model returned invalid input JSON: {invalid_json}"); - - let pending_tool_use = self.tool_use.insert_tool_output( - tool_use_id.clone(), - tool_name, - Err(anyhow!("Error parsing input JSON: {error}")), - self.configured_model.as_ref(), - self.completion_mode, - ); - let ui_text = if let Some(pending_tool_use) = &pending_tool_use { - pending_tool_use.ui_text.clone() - } else { - log::error!( - "There was no pending tool use for tool use {tool_use_id}, even though it finished (with invalid input JSON)." - ); - format!("Unknown tool {}", tool_use_id).into() - }; - - cx.emit(ThreadEvent::InvalidToolInput { - tool_use_id: tool_use_id.clone(), - ui_text, - invalid_input_json: invalid_json, - }); - - self.tool_finished(tool_use_id, pending_tool_use, false, window, cx); - } - - pub fn run_tool( - &mut self, - tool_use_id: LanguageModelToolUseId, - ui_text: impl Into, - input: serde_json::Value, - request: Arc, - tool: Arc, - model: Arc, - window: Option, - cx: &mut Context, - ) { - let task = - self.spawn_tool_use(tool_use_id.clone(), request, input, tool, model, window, cx); - self.tool_use - .run_pending_tool(tool_use_id, ui_text.into(), task); - } - - fn spawn_tool_use( - &mut self, - tool_use_id: LanguageModelToolUseId, - request: Arc, - input: serde_json::Value, - tool: Arc, - model: Arc, - window: Option, - cx: &mut Context, - ) -> Task<()> { - let tool_name: Arc = tool.name().into(); - - let tool_result = tool.run( - input, - request, - self.project.clone(), - self.action_log.clone(), - model, - window, - cx, - ); - - // Store the card separately if it exists - if let Some(card) = tool_result.card.clone() { - self.tool_use - .insert_tool_result_card(tool_use_id.clone(), card); - } + let stream = ToolCallEventStream::new("test_id".into(), ThreadEventStream(events_tx), None); - cx.spawn({ - async move |thread: WeakEntity, cx| { - let output = tool_result.output.await; - - thread - .update(cx, |thread, cx| { - let pending_tool_use = thread.tool_use.insert_tool_output( - tool_use_id.clone(), - tool_name, - output, - thread.configured_model.as_ref(), - thread.completion_mode, - ); - thread.tool_finished(tool_use_id, pending_tool_use, false, window, cx); - }) - .ok(); - } - }) + (stream, ToolCallEventStreamReceiver(events_rx)) } - fn tool_finished( - &mut self, + fn new( tool_use_id: LanguageModelToolUseId, - pending_tool_use: Option, - canceled: bool, - window: Option, - cx: &mut Context, - ) { - if self.all_tools_finished() - && let Some(ConfiguredModel { model, .. }) = self.configured_model.as_ref() - && !canceled - { - self.send_to_model(model.clone(), CompletionIntent::ToolResults, window, cx); - } - - cx.emit(ThreadEvent::ToolFinished { + stream: ThreadEventStream, + fs: Option>, + ) -> Self { + Self { tool_use_id, - pending_tool_use, - }); - } - - /// Cancels the last pending completion, if there are any pending. - /// - /// Returns whether a completion was canceled. - pub fn cancel_last_completion( - &mut self, - window: Option, - cx: &mut Context, - ) -> bool { - let mut canceled = self.pending_completions.pop().is_some() || self.retry_state.is_some(); - - self.retry_state = None; - - for pending_tool_use in self.tool_use.cancel_pending() { - canceled = true; - self.tool_finished( - pending_tool_use.id.clone(), - Some(pending_tool_use), - true, - window, - cx, - ); - } - - if canceled { - cx.emit(ThreadEvent::CompletionCanceled); - - // When canceled, we always want to insert the checkpoint. - // (We skip over finalize_pending_checkpoint, because it - // would conclude we didn't have anything to insert here.) - if let Some(checkpoint) = self.pending_checkpoint.take() { - self.insert_checkpoint(checkpoint, cx); - } - } else { - self.finalize_pending_checkpoint(cx); + stream, + fs, } - - canceled } - /// Signals that any in-progress editing should be canceled. - /// - /// This method is used to notify listeners (like ActiveThread) that - /// they should cancel any editing operations. - pub fn cancel_editing(&mut self, cx: &mut Context) { - cx.emit(ThreadEvent::CancelEditing); + pub fn update_fields(&self, fields: acp::ToolCallUpdateFields) { + self.stream + .update_tool_call_fields(&self.tool_use_id, fields); } - pub fn message_feedback(&self, message_id: MessageId) -> Option { - self.message_feedback.get(&message_id).copied() + pub fn update_diff(&self, diff: Entity) { + self.stream + .0 + .unbounded_send(Ok(ThreadEvent::ToolCallUpdate( + acp_thread::ToolCallUpdateDiff { + id: acp::ToolCallId(self.tool_use_id.to_string().into()), + diff, + } + .into(), + ))) + .ok(); } - pub fn report_message_feedback( - &mut self, - message_id: MessageId, - feedback: ThreadFeedback, - cx: &mut Context, - ) -> Task> { - if self.message_feedback.get(&message_id) == Some(&feedback) { + pub fn authorize(&self, title: impl Into, cx: &mut App) -> Task> { + if agent_settings::AgentSettings::get_global(cx).always_allow_tool_actions { return Task::ready(Ok(())); } - let final_project_snapshot = Self::project_snapshot(self.project.clone(), cx); - let serialized_thread = self.serialize(cx); - let thread_id = self.id().clone(); - let client = self.project.read(cx).client(); - - let enabled_tool_names: Vec = self - .profile - .enabled_tools(cx) - .iter() - .map(|(name, _)| name.clone().into()) - .collect(); - - self.message_feedback.insert(message_id, feedback); - - cx.notify(); - - let message_content = self - .message(message_id) - .map(|msg| msg.to_message_content()) - .unwrap_or_default(); - - cx.background_spawn(async move { - let final_project_snapshot = final_project_snapshot.await; - let serialized_thread = serialized_thread.await?; - let thread_data = - serde_json::to_value(serialized_thread).unwrap_or_else(|_| serde_json::Value::Null); - - let rating = match feedback { - ThreadFeedback::Positive => "positive", - ThreadFeedback::Negative => "negative", - }; - telemetry::event!( - "Assistant Thread Rated", - rating, - thread_id, - enabled_tool_names, - message_id = message_id.0, - message_content, - thread_data, - final_project_snapshot - ); - client.telemetry().flush_events().await; - - Ok(()) - }) - } - - /// Create a snapshot of the current project state including git information and unsaved buffers. - fn project_snapshot( - project: Entity, - cx: &mut Context, - ) -> Task> { - let git_store = project.read(cx).git_store().clone(); - let worktree_snapshots: Vec<_> = project - .read(cx) - .visible_worktrees(cx) - .map(|worktree| Self::worktree_snapshot(worktree, git_store.clone(), cx)) - .collect(); - - cx.spawn(async move |_, _| { - let worktree_snapshots = futures::future::join_all(worktree_snapshots).await; - - Arc::new(ProjectSnapshot { - worktree_snapshots, - timestamp: Utc::now(), - }) - }) - } - - fn worktree_snapshot( - worktree: Entity, - git_store: Entity, - cx: &App, - ) -> Task { - cx.spawn(async move |cx| { - // Get worktree path and snapshot - let worktree_info = cx.update(|app_cx| { - let worktree = worktree.read(app_cx); - let path = worktree.abs_path().to_string_lossy().into_owned(); - let snapshot = worktree.snapshot(); - (path, snapshot) - }); - - let Ok((worktree_path, _snapshot)) = worktree_info else { - return WorktreeSnapshot { - worktree_path: String::new(), - git_state: None, - }; - }; - - let git_state = git_store - .update(cx, |git_store, cx| { - git_store - .repositories() - .values() - .find(|repo| { - repo.read(cx) - .abs_path_to_repo_path(&worktree.read(cx).abs_path()) - .is_some() - }) - .cloned() - }) - .ok() - .flatten() - .map(|repo| { - repo.update(cx, |repo, _| { - let current_branch = - repo.branch.as_ref().map(|branch| branch.name().to_owned()); - repo.send_job(None, |state, _| async move { - let RepositoryState::Local { backend, .. } = state else { - return GitState { - remote_url: None, - head_sha: None, - current_branch, - diff: None, - }; - }; - - let remote_url = backend.remote_url("origin"); - let head_sha = backend.head_sha().await; - let diff = backend.diff(DiffType::HeadToWorktree).await.ok(); - - GitState { - remote_url, - head_sha, - current_branch, - diff, - } - }) - }) - }); - - let git_state = match git_state { - Some(git_state) => match git_state.ok() { - Some(git_state) => git_state.await.ok(), - None => None, + let (response_tx, response_rx) = oneshot::channel(); + self.stream + .0 + .unbounded_send(Ok(ThreadEvent::ToolCallAuthorization( + ToolCallAuthorization { + tool_call: acp::ToolCallUpdate { + meta: None, + id: acp::ToolCallId(self.tool_use_id.to_string().into()), + fields: acp::ToolCallUpdateFields { + title: Some(title.into()), + ..Default::default() + }, + }, + options: vec![ + acp::PermissionOption { + id: acp::PermissionOptionId("always_allow".into()), + name: "Always Allow".into(), + kind: acp::PermissionOptionKind::AllowAlways, + meta: None, + }, + acp::PermissionOption { + id: acp::PermissionOptionId("allow".into()), + name: "Allow".into(), + kind: acp::PermissionOptionKind::AllowOnce, + meta: None, + }, + acp::PermissionOption { + id: acp::PermissionOptionId("deny".into()), + name: "Deny".into(), + kind: acp::PermissionOptionKind::RejectOnce, + meta: None, + }, + ], + response: response_tx, }, - None => None, - }; - - WorktreeSnapshot { - worktree_path, - git_state, - } - }) - } - - pub fn to_markdown(&self, cx: &App) -> Result { - let mut markdown = Vec::new(); - - let summary = self.summary().or_default(); - writeln!(markdown, "# {summary}\n")?; - - for message in self.messages() { - writeln!( - markdown, - "## {role}\n", - role = match message.role { - Role::User => "User", - Role::Assistant => "Agent", - Role::System => "System", - } - )?; - - if !message.loaded_context.text.is_empty() { - writeln!(markdown, "{}", message.loaded_context.text)?; - } - - if !message.loaded_context.images.is_empty() { - writeln!( - markdown, - "\n{} images attached as context.\n", - message.loaded_context.images.len() - )?; - } - - for segment in &message.segments { - match segment { - MessageSegment::Text(text) => writeln!(markdown, "{}\n", text)?, - MessageSegment::Thinking { text, .. } => { - writeln!(markdown, "\n{}\n\n", text)? - } - MessageSegment::RedactedThinking(_) => {} - } - } - - for tool_use in self.tool_uses_for_message(message.id, cx) { - writeln!( - markdown, - "**Use Tool: {} ({})**", - tool_use.name, tool_use.id - )?; - writeln!(markdown, "```json")?; - writeln!( - markdown, - "{}", - serde_json::to_string_pretty(&tool_use.input)? - )?; - writeln!(markdown, "```")?; - } - - for tool_result in self.tool_results_for_message(message.id) { - write!(markdown, "\n**Tool Results: {}", tool_result.tool_use_id)?; - if tool_result.is_error { - write!(markdown, " (Error)")?; - } - - writeln!(markdown, "**\n")?; - match &tool_result.content { - LanguageModelToolResultContent::Text(text) => { - writeln!(markdown, "{text}")?; - } - LanguageModelToolResultContent::Image(image) => { - writeln!(markdown, "![Image](data:base64,{})", image.source)?; - } + ))) + .ok(); + let fs = self.fs.clone(); + cx.spawn(async move |cx| match response_rx.await?.0.as_ref() { + "always_allow" => { + if let Some(fs) = fs.clone() { + cx.update(|cx| { + update_settings_file(fs, cx, |settings, _| { + settings + .agent + .get_or_insert_default() + .set_always_allow_tool_actions(true); + }); + })?; } - if let Some(output) = tool_result.output.as_ref() { - writeln!( - markdown, - "\n\nDebug Output:\n\n```json\n{}\n```\n", - serde_json::to_string_pretty(output)? - )?; - } + Ok(()) } - } - - Ok(String::from_utf8_lossy(&markdown).to_string()) - } - - pub fn keep_edits_in_range( - &mut self, - buffer: Entity, - buffer_range: Range, - cx: &mut Context, - ) { - self.action_log.update(cx, |action_log, cx| { - action_log.keep_edits_in_range(buffer, buffer_range, cx) - }); - } - - pub fn keep_all_edits(&mut self, cx: &mut Context) { - self.action_log - .update(cx, |action_log, cx| action_log.keep_all_edits(cx)); - } - - pub fn reject_edits_in_ranges( - &mut self, - buffer: Entity, - buffer_ranges: Vec>, - cx: &mut Context, - ) -> Task> { - self.action_log.update(cx, |action_log, cx| { - action_log.reject_edits_in_ranges(buffer, buffer_ranges, cx) + "allow" => Ok(()), + _ => Err(anyhow!("Permission to run tool denied by user")), }) } +} - pub fn action_log(&self) -> &Entity { - &self.action_log - } - - pub fn project(&self) -> &Entity { - &self.project - } - - pub fn cumulative_token_usage(&self) -> TokenUsage { - self.cumulative_token_usage - } - - pub fn token_usage_up_to_message(&self, message_id: MessageId) -> TotalTokenUsage { - let Some(model) = self.configured_model.as_ref() else { - return TotalTokenUsage::default(); - }; - - let max = model - .model - .max_token_count_for_mode(self.completion_mode().into()); - - let index = self - .messages - .iter() - .position(|msg| msg.id == message_id) - .unwrap_or(0); - - if index == 0 { - return TotalTokenUsage { total: 0, max }; - } - - let token_usage = &self - .request_token_usage - .get(index - 1) - .cloned() - .unwrap_or_default(); +#[cfg(any(test, feature = "test-support"))] +pub struct ToolCallEventStreamReceiver(mpsc::UnboundedReceiver>); - TotalTokenUsage { - total: token_usage.total_tokens(), - max, +#[cfg(any(test, feature = "test-support"))] +impl ToolCallEventStreamReceiver { + pub async fn expect_authorization(&mut self) -> ToolCallAuthorization { + let event = self.0.next().await; + if let Some(Ok(ThreadEvent::ToolCallAuthorization(auth))) = event { + auth + } else { + panic!("Expected ToolCallAuthorization but got: {:?}", event); } } - pub fn total_token_usage(&self) -> Option { - let model = self.configured_model.as_ref()?; - - let max = model - .model - .max_token_count_for_mode(self.completion_mode().into()); - - if let Some(exceeded_error) = &self.exceeded_window_error - && model.model.id() == exceeded_error.model_id + pub async fn expect_update_fields(&mut self) -> acp::ToolCallUpdateFields { + let event = self.0.next().await; + if let Some(Ok(ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateFields( + update, + )))) = event { - return Some(TotalTokenUsage { - total: exceeded_error.token_count, - max, - }); + update.fields + } else { + panic!("Expected update fields but got: {:?}", event); } - - let total = self - .token_usage_at_last_message() - .unwrap_or_default() - .total_tokens(); - - Some(TotalTokenUsage { total, max }) - } - - fn token_usage_at_last_message(&self) -> Option { - self.request_token_usage - .get(self.messages.len().saturating_sub(1)) - .or_else(|| self.request_token_usage.last()) - .cloned() } - fn update_token_usage_at_last_message(&mut self, token_usage: TokenUsage) { - let placeholder = self.token_usage_at_last_message().unwrap_or_default(); - self.request_token_usage - .resize(self.messages.len(), placeholder); - - if let Some(last) = self.request_token_usage.last_mut() { - *last = token_usage; + pub async fn expect_diff(&mut self) -> Entity { + let event = self.0.next().await; + if let Some(Ok(ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateDiff( + update, + )))) = event + { + update.diff + } else { + panic!("Expected diff but got: {:?}", event); } } - fn update_model_request_usage(&self, amount: u32, limit: UsageLimit, cx: &mut Context) { - self.project - .read(cx) - .user_store() - .update(cx, |user_store, cx| { - user_store.update_model_request_usage( - ModelRequestUsage(RequestUsage { - amount: amount as i32, - limit, - }), - cx, - ) - }); + pub async fn expect_terminal(&mut self) -> Entity { + let event = self.0.next().await; + if let Some(Ok(ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateTerminal( + update, + )))) = event + { + update.terminal + } else { + panic!("Expected terminal but got: {:?}", event); + } } +} - pub fn deny_tool_use( - &mut self, - tool_use_id: LanguageModelToolUseId, - tool_name: Arc, - window: Option, - cx: &mut Context, - ) { - let err = Err(anyhow::anyhow!( - "Permission to run tool action denied by user" - )); +#[cfg(any(test, feature = "test-support"))] +impl std::ops::Deref for ToolCallEventStreamReceiver { + type Target = mpsc::UnboundedReceiver>; - self.tool_use.insert_tool_output( - tool_use_id.clone(), - tool_name, - err, - self.configured_model.as_ref(), - self.completion_mode, - ); - self.tool_finished(tool_use_id, None, true, window, cx); + fn deref(&self) -> &Self::Target { + &self.0 } } -#[derive(Debug, Clone, Error)] -pub enum ThreadError { - #[error("Payment required")] - PaymentRequired, - #[error("Model request limit reached")] - ModelRequestLimitReached { plan: Plan }, - #[error("Message {header}: {message}")] - Message { - header: SharedString, - message: SharedString, - }, - #[error("Retryable error: {message}")] - RetryableError { - message: SharedString, - can_enable_burn_mode: bool, - }, -} - -#[derive(Debug, Clone)] -pub enum ThreadEvent { - ShowError(ThreadError), - StreamedCompletion, - ReceivedTextChunk, - NewRequest, - StreamedAssistantText(MessageId, String), - StreamedAssistantThinking(MessageId, String), - StreamedToolUse { - tool_use_id: LanguageModelToolUseId, - ui_text: Arc, - input: serde_json::Value, - }, - MissingToolUse { - tool_use_id: LanguageModelToolUseId, - ui_text: Arc, - }, - InvalidToolInput { - tool_use_id: LanguageModelToolUseId, - ui_text: Arc, - invalid_input_json: Arc, - }, - Stopped(Result>), - MessageAdded(MessageId), - MessageEdited(MessageId), - MessageDeleted(MessageId), - SummaryGenerated, - SummaryChanged, - UsePendingTools { - tool_uses: Vec, - }, - ToolFinished { - #[allow(unused)] - tool_use_id: LanguageModelToolUseId, - /// The pending tool use that corresponds to this tool. - pending_tool_use: Option, - }, - CheckpointChanged, - ToolConfirmationNeeded, - ToolUseLimitReached, - CancelEditing, - CompletionCanceled, - ProfileChanged, +#[cfg(any(test, feature = "test-support"))] +impl std::ops::DerefMut for ToolCallEventStreamReceiver { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } } -impl EventEmitter for Thread {} - -struct PendingCompletion { - id: usize, - queue_state: QueueState, - _task: Task<()>, +impl From<&str> for UserMessageContent { + fn from(text: &str) -> Self { + Self::Text(text.into()) + } } -#[cfg(test)] -mod tests { - use super::*; - use crate::{ - context::load_context, context_store::ContextStore, thread_store, thread_store::ThreadStore, - }; - - // Test-specific constants - const TEST_RATE_LIMIT_RETRY_SECS: u64 = 30; - use agent_settings::{AgentProfileId, AgentSettings}; - use assistant_tool::ToolRegistry; - use assistant_tools; - use fs::Fs; - use futures::StreamExt; - use futures::future::BoxFuture; - use futures::stream::BoxStream; - use gpui::TestAppContext; - use http_client; - use language_model::fake_provider::{FakeLanguageModel, FakeLanguageModelProvider}; - use language_model::{ - LanguageModelCompletionError, LanguageModelName, LanguageModelProviderId, - LanguageModelProviderName, LanguageModelToolChoice, - }; - use parking_lot::Mutex; - use project::{FakeFs, Project}; - use prompt_store::PromptBuilder; - use serde_json::json; - use settings::{LanguageModelParameters, Settings, SettingsStore}; - use std::sync::Arc; - use std::time::Duration; - use util::path; - use workspace::Workspace; - - #[gpui::test] - async fn test_message_with_context(cx: &mut TestAppContext) { - let fs = init_test_settings(cx); - - let project = create_test_project( - &fs, - cx, - json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}), - ) - .await; - - let (_workspace, _thread_store, thread, context_store, model) = - setup_test_environment(cx, project.clone()).await; - - add_file_to_context(&project, &context_store, "test/code.rs", cx) - .await - .unwrap(); - - let context = - context_store.read_with(cx, |store, _| store.context().next().cloned().unwrap()); - let loaded_context = cx - .update(|cx| load_context(vec![context], &project, &None, cx)) - .await; - - // Insert user message with context - let message_id = thread.update(cx, |thread, cx| { - thread.insert_user_message( - "Please explain this code", - loaded_context, - None, - Vec::new(), - cx, - ) - }); - - // Check content and context in message object - let message = thread.read_with(cx, |thread, _| thread.message(message_id).unwrap().clone()); - - // Use different path format strings based on platform for the test - #[cfg(windows)] - let path_part = r"test\code.rs"; - #[cfg(not(windows))] - let path_part = "test/code.rs"; - - let expected_context = format!( - r#" - -The following items were attached by the user. They are up-to-date and don't need to be re-read. - - -```rs {path_part} -fn main() {{ - println!("Hello, world!"); -}} -``` - - -"# - ); - - assert_eq!(message.role, Role::User); - assert_eq!(message.segments.len(), 1); - assert_eq!( - message.segments[0], - MessageSegment::Text("Please explain this code".to_string()) - ); - assert_eq!(message.loaded_context.text, expected_context); - - // Check message in request - let request = thread.update(cx, |thread, cx| { - thread.to_completion_request(model.clone(), CompletionIntent::UserPrompt, cx) - }); - - assert_eq!(request.messages.len(), 2); - let expected_full_message = format!("{}Please explain this code", expected_context); - assert_eq!(request.messages[1].string_contents(), expected_full_message); +impl From for UserMessageContent { + fn from(value: acp::ContentBlock) -> Self { + match value { + acp::ContentBlock::Text(text_content) => Self::Text(text_content.text), + acp::ContentBlock::Image(image_content) => Self::Image(convert_image(image_content)), + acp::ContentBlock::Audio(_) => { + // TODO + Self::Text("[audio]".to_string()) + } + acp::ContentBlock::ResourceLink(resource_link) => { + match MentionUri::parse(&resource_link.uri) { + Ok(uri) => Self::Mention { + uri, + content: String::new(), + }, + Err(err) => { + log::error!("Failed to parse mention link: {}", err); + Self::Text(format!("[{}]({})", resource_link.name, resource_link.uri)) + } + } + } + acp::ContentBlock::Resource(resource) => match resource.resource { + acp::EmbeddedResourceResource::TextResourceContents(resource) => { + match MentionUri::parse(&resource.uri) { + Ok(uri) => Self::Mention { + uri, + content: resource.text, + }, + Err(err) => { + log::error!("Failed to parse mention link: {}", err); + Self::Text( + MarkdownCodeBlock { + tag: &resource.uri, + text: &resource.text, + } + .to_string(), + ) + } + } + } + acp::EmbeddedResourceResource::BlobResourceContents(_) => { + // TODO + Self::Text("[blob]".to_string()) + } + }, + } } +} - #[gpui::test] - async fn test_only_include_new_contexts(cx: &mut TestAppContext) { - let fs = init_test_settings(cx); - - let project = create_test_project( - &fs, - cx, - json!({ - "file1.rs": "fn function1() {}\n", - "file2.rs": "fn function2() {}\n", - "file3.rs": "fn function3() {}\n", - "file4.rs": "fn function4() {}\n", +impl From for acp::ContentBlock { + fn from(content: UserMessageContent) -> Self { + match content { + UserMessageContent::Text(text) => acp::ContentBlock::Text(acp::TextContent { + text, + annotations: None, + meta: None, }), - ) - .await; - - let (_, _thread_store, thread, context_store, model) = - setup_test_environment(cx, project.clone()).await; - - // First message with context 1 - add_file_to_context(&project, &context_store, "test/file1.rs", cx) - .await - .unwrap(); - let new_contexts = context_store.update(cx, |store, cx| { - store.new_context_for_thread(thread.read(cx), None) - }); - assert_eq!(new_contexts.len(), 1); - let loaded_context = cx - .update(|cx| load_context(new_contexts, &project, &None, cx)) - .await; - let message1_id = thread.update(cx, |thread, cx| { - thread.insert_user_message("Message 1", loaded_context, None, Vec::new(), cx) - }); - - // Second message with contexts 1 and 2 (context 1 should be skipped as it's already included) - add_file_to_context(&project, &context_store, "test/file2.rs", cx) - .await - .unwrap(); - let new_contexts = context_store.update(cx, |store, cx| { - store.new_context_for_thread(thread.read(cx), None) - }); - assert_eq!(new_contexts.len(), 1); - let loaded_context = cx - .update(|cx| load_context(new_contexts, &project, &None, cx)) - .await; - let message2_id = thread.update(cx, |thread, cx| { - thread.insert_user_message("Message 2", loaded_context, None, Vec::new(), cx) - }); - - // Third message with all three contexts (contexts 1 and 2 should be skipped) - // - add_file_to_context(&project, &context_store, "test/file3.rs", cx) - .await - .unwrap(); - let new_contexts = context_store.update(cx, |store, cx| { - store.new_context_for_thread(thread.read(cx), None) - }); - assert_eq!(new_contexts.len(), 1); - let loaded_context = cx - .update(|cx| load_context(new_contexts, &project, &None, cx)) - .await; - let message3_id = thread.update(cx, |thread, cx| { - thread.insert_user_message("Message 3", loaded_context, None, Vec::new(), cx) - }); - - // Check what contexts are included in each message - let (message1, message2, message3) = thread.read_with(cx, |thread, _| { - ( - thread.message(message1_id).unwrap().clone(), - thread.message(message2_id).unwrap().clone(), - thread.message(message3_id).unwrap().clone(), - ) - }); - - // First message should include context 1 - assert!(message1.loaded_context.text.contains("file1.rs")); - - // Second message should include only context 2 (not 1) - assert!(!message2.loaded_context.text.contains("file1.rs")); - assert!(message2.loaded_context.text.contains("file2.rs")); - - // Third message should include only context 3 (not 1 or 2) - assert!(!message3.loaded_context.text.contains("file1.rs")); - assert!(!message3.loaded_context.text.contains("file2.rs")); - assert!(message3.loaded_context.text.contains("file3.rs")); - - // Check entire request to make sure all contexts are properly included - let request = thread.update(cx, |thread, cx| { - thread.to_completion_request(model.clone(), CompletionIntent::UserPrompt, cx) - }); - - // The request should contain all 3 messages - assert_eq!(request.messages.len(), 4); - - // Check that the contexts are properly formatted in each message - assert!(request.messages[1].string_contents().contains("file1.rs")); - assert!(!request.messages[1].string_contents().contains("file2.rs")); - assert!(!request.messages[1].string_contents().contains("file3.rs")); - - assert!(!request.messages[2].string_contents().contains("file1.rs")); - assert!(request.messages[2].string_contents().contains("file2.rs")); - assert!(!request.messages[2].string_contents().contains("file3.rs")); - - assert!(!request.messages[3].string_contents().contains("file1.rs")); - assert!(!request.messages[3].string_contents().contains("file2.rs")); - assert!(request.messages[3].string_contents().contains("file3.rs")); - - add_file_to_context(&project, &context_store, "test/file4.rs", cx) - .await - .unwrap(); - let new_contexts = context_store.update(cx, |store, cx| { - store.new_context_for_thread(thread.read(cx), Some(message2_id)) - }); - assert_eq!(new_contexts.len(), 3); - let loaded_context = cx - .update(|cx| load_context(new_contexts, &project, &None, cx)) - .await - .loaded_context; - - assert!(!loaded_context.text.contains("file1.rs")); - assert!(loaded_context.text.contains("file2.rs")); - assert!(loaded_context.text.contains("file3.rs")); - assert!(loaded_context.text.contains("file4.rs")); - - let new_contexts = context_store.update(cx, |store, cx| { - // Remove file4.rs - store.remove_context(&loaded_context.contexts[2].handle(), cx); - store.new_context_for_thread(thread.read(cx), Some(message2_id)) - }); - assert_eq!(new_contexts.len(), 2); - let loaded_context = cx - .update(|cx| load_context(new_contexts, &project, &None, cx)) - .await - .loaded_context; - - assert!(!loaded_context.text.contains("file1.rs")); - assert!(loaded_context.text.contains("file2.rs")); - assert!(loaded_context.text.contains("file3.rs")); - assert!(!loaded_context.text.contains("file4.rs")); - - let new_contexts = context_store.update(cx, |store, cx| { - // Remove file3.rs - store.remove_context(&loaded_context.contexts[1].handle(), cx); - store.new_context_for_thread(thread.read(cx), Some(message2_id)) - }); - assert_eq!(new_contexts.len(), 1); - let loaded_context = cx - .update(|cx| load_context(new_contexts, &project, &None, cx)) - .await - .loaded_context; - - assert!(!loaded_context.text.contains("file1.rs")); - assert!(loaded_context.text.contains("file2.rs")); - assert!(!loaded_context.text.contains("file3.rs")); - assert!(!loaded_context.text.contains("file4.rs")); - } - - #[gpui::test] - async fn test_message_without_files(cx: &mut TestAppContext) { - let fs = init_test_settings(cx); - - let project = create_test_project( - &fs, - cx, - json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}), - ) - .await; - - let (_, _thread_store, thread, _context_store, model) = - setup_test_environment(cx, project.clone()).await; - - // Insert user message without any context (empty context vector) - let message_id = thread.update(cx, |thread, cx| { - thread.insert_user_message( - "What is the best way to learn Rust?", - ContextLoadResult::default(), - None, - Vec::new(), - cx, - ) - }); - - // Check content and context in message object - let message = thread.read_with(cx, |thread, _| thread.message(message_id).unwrap().clone()); - - // Context should be empty when no files are included - assert_eq!(message.role, Role::User); - assert_eq!(message.segments.len(), 1); - assert_eq!( - message.segments[0], - MessageSegment::Text("What is the best way to learn Rust?".to_string()) - ); - assert_eq!(message.loaded_context.text, ""); - - // Check message in request - let request = thread.update(cx, |thread, cx| { - thread.to_completion_request(model.clone(), CompletionIntent::UserPrompt, cx) - }); - - assert_eq!(request.messages.len(), 2); - assert_eq!( - request.messages[1].string_contents(), - "What is the best way to learn Rust?" - ); - - // Add second message, also without context - let message2_id = thread.update(cx, |thread, cx| { - thread.insert_user_message( - "Are there any good books?", - ContextLoadResult::default(), - None, - Vec::new(), - cx, - ) - }); - - let message2 = - thread.read_with(cx, |thread, _| thread.message(message2_id).unwrap().clone()); - assert_eq!(message2.loaded_context.text, ""); - - // Check that both messages appear in the request - let request = thread.update(cx, |thread, cx| { - thread.to_completion_request(model.clone(), CompletionIntent::UserPrompt, cx) - }); - - assert_eq!(request.messages.len(), 3); - assert_eq!( - request.messages[1].string_contents(), - "What is the best way to learn Rust?" - ); - assert_eq!( - request.messages[2].string_contents(), - "Are there any good books?" - ); - } - - #[gpui::test] - #[ignore] // turn this test on when project_notifications tool is re-enabled - async fn test_stale_buffer_notification(cx: &mut TestAppContext) { - let fs = init_test_settings(cx); - - let project = create_test_project( - &fs, - cx, - json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}), - ) - .await; - - let (_workspace, _thread_store, thread, context_store, model) = - setup_test_environment(cx, project.clone()).await; - - // Add a buffer to the context. This will be a tracked buffer - let buffer = add_file_to_context(&project, &context_store, "test/code.rs", cx) - .await - .unwrap(); - - let context = context_store - .read_with(cx, |store, _| store.context().next().cloned()) - .unwrap(); - let loaded_context = cx - .update(|cx| load_context(vec![context], &project, &None, cx)) - .await; - - // Insert user message and assistant response - thread.update(cx, |thread, cx| { - thread.insert_user_message("Explain this code", loaded_context, None, Vec::new(), cx); - thread.insert_assistant_message( - vec![MessageSegment::Text("This code prints 42.".into())], - cx, - ); - }); - cx.run_until_parked(); - - // We shouldn't have a stale buffer notification yet - let notifications = thread.read_with(cx, |thread, _| { - find_tool_uses(thread, "project_notifications") - }); - assert!( - notifications.is_empty(), - "Should not have stale buffer notification before buffer is modified" - ); - - // Modify the buffer - buffer.update(cx, |buffer, cx| { - buffer.edit( - [(1..1, "\n println!(\"Added a new line\");\n")], - None, - cx, - ); - }); - - // Insert another user message - thread.update(cx, |thread, cx| { - thread.insert_user_message( - "What does the code do now?", - ContextLoadResult::default(), - None, - Vec::new(), - cx, - ) - }); - cx.run_until_parked(); - - // Check for the stale buffer warning - thread.update(cx, |thread, cx| { - thread.flush_notifications(model.clone(), CompletionIntent::UserPrompt, cx) - }); - cx.run_until_parked(); - - let notifications = thread.read_with(cx, |thread, _cx| { - find_tool_uses(thread, "project_notifications") - }); - - let [notification] = notifications.as_slice() else { - panic!("Should have a `project_notifications` tool use"); - }; - - let Some(notification_content) = notification.content.to_str() else { - panic!("`project_notifications` should return text"); - }; - - assert!(notification_content.contains("These files have changed since the last read:")); - assert!(notification_content.contains("code.rs")); - - // Insert another user message and flush notifications again - thread.update(cx, |thread, cx| { - thread.insert_user_message( - "Can you tell me more?", - ContextLoadResult::default(), - None, - Vec::new(), - cx, - ) - }); - - thread.update(cx, |thread, cx| { - thread.flush_notifications(model.clone(), CompletionIntent::UserPrompt, cx) - }); - cx.run_until_parked(); - - // There should be no new notifications (we already flushed one) - let notifications = thread.read_with(cx, |thread, _cx| { - find_tool_uses(thread, "project_notifications") - }); - - assert_eq!( - notifications.len(), - 1, - "Should still have only one notification after second flush - no duplicates" - ); - } - - fn find_tool_uses(thread: &Thread, tool_name: &str) -> Vec { - thread - .messages() - .flat_map(|message| { - thread - .tool_results_for_message(message.id) - .into_iter() - .filter(|result| result.tool_name == tool_name.into()) - .cloned() - .collect::>() - }) - .collect() - } - - #[gpui::test] - async fn test_storing_profile_setting_per_thread(cx: &mut TestAppContext) { - let fs = init_test_settings(cx); - - let project = create_test_project( - &fs, - cx, - json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}), - ) - .await; - - let (_workspace, thread_store, thread, _context_store, _model) = - setup_test_environment(cx, project.clone()).await; - - // Check that we are starting with the default profile - let profile = cx.read(|cx| thread.read(cx).profile.clone()); - let tool_set = cx.read(|cx| thread_store.read(cx).tools()); - assert_eq!( - profile, - AgentProfile::new(AgentProfileId::default(), tool_set) - ); - } - - #[gpui::test] - async fn test_serializing_thread_profile(cx: &mut TestAppContext) { - let fs = init_test_settings(cx); - - let project = create_test_project( - &fs, - cx, - json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}), - ) - .await; - - let (_workspace, thread_store, thread, _context_store, _model) = - setup_test_environment(cx, project.clone()).await; - - // Profile gets serialized with default values - let serialized = thread - .update(cx, |thread, cx| thread.serialize(cx)) - .await - .unwrap(); - - assert_eq!(serialized.profile, Some(AgentProfileId::default())); - - let deserialized = cx.update(|cx| { - thread.update(cx, |thread, cx| { - Thread::deserialize( - thread.id.clone(), - serialized, - thread.project.clone(), - thread.tools.clone(), - thread.prompt_builder.clone(), - thread.project_context.clone(), - None, - cx, - ) - }) - }); - let tool_set = cx.read(|cx| thread_store.read(cx).tools()); - - assert_eq!( - deserialized.profile, - AgentProfile::new(AgentProfileId::default(), tool_set) - ); - } - - #[gpui::test] - async fn test_temperature_setting(cx: &mut TestAppContext) { - let fs = init_test_settings(cx); - - let project = create_test_project( - &fs, - cx, - json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}), - ) - .await; - - let (_workspace, _thread_store, thread, _context_store, model) = - setup_test_environment(cx, project.clone()).await; - - // Both model and provider - cx.update(|cx| { - AgentSettings::override_global( - AgentSettings { - model_parameters: vec![LanguageModelParameters { - provider: Some(model.provider_id().0.to_string().into()), - model: Some(model.id().0), - temperature: Some(0.66), - }], - ..AgentSettings::get_global(cx).clone() - }, - cx, - ); - }); - - let request = thread.update(cx, |thread, cx| { - thread.to_completion_request(model.clone(), CompletionIntent::UserPrompt, cx) - }); - assert_eq!(request.temperature, Some(0.66)); - - // Only model - cx.update(|cx| { - AgentSettings::override_global( - AgentSettings { - model_parameters: vec![LanguageModelParameters { - provider: None, - model: Some(model.id().0), - temperature: Some(0.66), - }], - ..AgentSettings::get_global(cx).clone() - }, - cx, - ); - }); - - let request = thread.update(cx, |thread, cx| { - thread.to_completion_request(model.clone(), CompletionIntent::UserPrompt, cx) - }); - assert_eq!(request.temperature, Some(0.66)); - - // Only provider - cx.update(|cx| { - AgentSettings::override_global( - AgentSettings { - model_parameters: vec![LanguageModelParameters { - provider: Some(model.provider_id().0.to_string().into()), - model: None, - temperature: Some(0.66), - }], - ..AgentSettings::get_global(cx).clone() - }, - cx, - ); - }); - - let request = thread.update(cx, |thread, cx| { - thread.to_completion_request(model.clone(), CompletionIntent::UserPrompt, cx) - }); - assert_eq!(request.temperature, Some(0.66)); - - // Same model name, different provider - cx.update(|cx| { - AgentSettings::override_global( - AgentSettings { - model_parameters: vec![LanguageModelParameters { - provider: Some("anthropic".into()), - model: Some(model.id().0), - temperature: Some(0.66), - }], - ..AgentSettings::get_global(cx).clone() - }, - cx, - ); - }); - - let request = thread.update(cx, |thread, cx| { - thread.to_completion_request(model.clone(), CompletionIntent::UserPrompt, cx) - }); - assert_eq!(request.temperature, None); - } - - #[gpui::test] - async fn test_thread_summary(cx: &mut TestAppContext) { - let fs = init_test_settings(cx); - - let project = create_test_project(&fs, cx, json!({})).await; - - let (_, _thread_store, thread, _context_store, model) = - setup_test_environment(cx, project.clone()).await; - - // Initial state should be pending - thread.read_with(cx, |thread, _| { - assert!(matches!(thread.summary(), ThreadSummary::Pending)); - assert_eq!(thread.summary().or_default(), ThreadSummary::DEFAULT); - }); - - // Manually setting the summary should not be allowed in this state - thread.update(cx, |thread, cx| { - thread.set_summary("This should not work", cx); - }); - - thread.read_with(cx, |thread, _| { - assert!(matches!(thread.summary(), ThreadSummary::Pending)); - }); - - // Send a message - thread.update(cx, |thread, cx| { - thread.insert_user_message("Hi!", ContextLoadResult::default(), None, vec![], cx); - thread.send_to_model( - model.clone(), - CompletionIntent::ThreadSummarization, - None, - cx, - ); - }); - - let fake_model = model.as_fake(); - simulate_successful_response(fake_model, cx); - - // Should start generating summary when there are >= 2 messages - thread.read_with(cx, |thread, _| { - assert_eq!(*thread.summary(), ThreadSummary::Generating); - }); - - // Should not be able to set the summary while generating - thread.update(cx, |thread, cx| { - thread.set_summary("This should not work either", cx); - }); - - thread.read_with(cx, |thread, _| { - assert!(matches!(thread.summary(), ThreadSummary::Generating)); - assert_eq!(thread.summary().or_default(), ThreadSummary::DEFAULT); - }); - - cx.run_until_parked(); - fake_model.send_last_completion_stream_text_chunk("Brief"); - fake_model.send_last_completion_stream_text_chunk(" Introduction"); - fake_model.end_last_completion_stream(); - cx.run_until_parked(); - - // Summary should be set - thread.read_with(cx, |thread, _| { - assert!(matches!(thread.summary(), ThreadSummary::Ready(_))); - assert_eq!(thread.summary().or_default(), "Brief Introduction"); - }); - - // Now we should be able to set a summary - thread.update(cx, |thread, cx| { - thread.set_summary("Brief Intro", cx); - }); - - thread.read_with(cx, |thread, _| { - assert_eq!(thread.summary().or_default(), "Brief Intro"); - }); - - // Test setting an empty summary (should default to DEFAULT) - thread.update(cx, |thread, cx| { - thread.set_summary("", cx); - }); - - thread.read_with(cx, |thread, _| { - assert!(matches!(thread.summary(), ThreadSummary::Ready(_))); - assert_eq!(thread.summary().or_default(), ThreadSummary::DEFAULT); - }); - } - - #[gpui::test] - async fn test_thread_summary_error_set_manually(cx: &mut TestAppContext) { - let fs = init_test_settings(cx); - - let project = create_test_project(&fs, cx, json!({})).await; - - let (_, _thread_store, thread, _context_store, model) = - setup_test_environment(cx, project.clone()).await; - - test_summarize_error(&model, &thread, cx); - - // Now we should be able to set a summary - thread.update(cx, |thread, cx| { - thread.set_summary("Brief Intro", cx); - }); - - thread.read_with(cx, |thread, _| { - assert!(matches!(thread.summary(), ThreadSummary::Ready(_))); - assert_eq!(thread.summary().or_default(), "Brief Intro"); - }); - } - - #[gpui::test] - async fn test_thread_summary_error_retry(cx: &mut TestAppContext) { - let fs = init_test_settings(cx); - - let project = create_test_project(&fs, cx, json!({})).await; - - let (_, _thread_store, thread, _context_store, model) = - setup_test_environment(cx, project.clone()).await; - - test_summarize_error(&model, &thread, cx); - - // Sending another message should not trigger another summarize request - thread.update(cx, |thread, cx| { - thread.insert_user_message( - "How are you?", - ContextLoadResult::default(), - None, - vec![], - cx, - ); - thread.send_to_model(model.clone(), CompletionIntent::UserPrompt, None, cx); - }); - - let fake_model = model.as_fake(); - simulate_successful_response(fake_model, cx); - - thread.read_with(cx, |thread, _| { - // State is still Error, not Generating - assert!(matches!(thread.summary(), ThreadSummary::Error)); - }); - - // But the summarize request can be invoked manually - thread.update(cx, |thread, cx| { - thread.summarize(cx); - }); - - thread.read_with(cx, |thread, _| { - assert!(matches!(thread.summary(), ThreadSummary::Generating)); - }); - - cx.run_until_parked(); - fake_model.send_last_completion_stream_text_chunk("A successful summary"); - fake_model.end_last_completion_stream(); - cx.run_until_parked(); - - thread.read_with(cx, |thread, _| { - assert!(matches!(thread.summary(), ThreadSummary::Ready(_))); - assert_eq!(thread.summary().or_default(), "A successful summary"); - }); - } - - // Helper to create a model that returns errors - enum TestError { - Overloaded, - InternalServerError, - } - - struct ErrorInjector { - inner: Arc, - error_type: TestError, - } - - impl ErrorInjector { - fn new(error_type: TestError) -> Self { - Self { - inner: Arc::new(FakeLanguageModel::default()), - error_type, - } - } - } - - impl LanguageModel for ErrorInjector { - fn id(&self) -> LanguageModelId { - self.inner.id() - } - - fn name(&self) -> LanguageModelName { - self.inner.name() - } - - fn provider_id(&self) -> LanguageModelProviderId { - self.inner.provider_id() - } - - fn provider_name(&self) -> LanguageModelProviderName { - self.inner.provider_name() - } - - fn supports_tools(&self) -> bool { - self.inner.supports_tools() - } - - fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { - self.inner.supports_tool_choice(choice) - } - - fn supports_images(&self) -> bool { - self.inner.supports_images() - } - - fn telemetry_id(&self) -> String { - self.inner.telemetry_id() - } - - fn max_token_count(&self) -> u64 { - self.inner.max_token_count() - } - - fn count_tokens( - &self, - request: LanguageModelRequest, - cx: &App, - ) -> BoxFuture<'static, Result> { - self.inner.count_tokens(request, cx) - } - - fn stream_completion( - &self, - _request: LanguageModelRequest, - _cx: &AsyncApp, - ) -> BoxFuture< - 'static, - Result< - BoxStream< - 'static, - Result, - >, - LanguageModelCompletionError, - >, - > { - let error = match self.error_type { - TestError::Overloaded => LanguageModelCompletionError::ServerOverloaded { - provider: self.provider_name(), - retry_after: None, - }, - TestError::InternalServerError => { - LanguageModelCompletionError::ApiInternalServerError { - provider: self.provider_name(), - message: "I'm a teapot orbiting the sun".to_string(), - } - } - }; - async move { - let stream = futures::stream::once(async move { Err(error) }); - Ok(stream.boxed()) - } - .boxed() - } - - fn as_fake(&self) -> &FakeLanguageModel { - &self.inner - } - } - - #[gpui::test] - async fn test_retry_on_overloaded_error(cx: &mut TestAppContext) { - let fs = init_test_settings(cx); - - let project = create_test_project(&fs, cx, json!({})).await; - let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await; - - // Enable Burn Mode to allow retries - thread.update(cx, |thread, _| { - thread.set_completion_mode(CompletionMode::Burn); - }); - - // Create model that returns overloaded error - let model = Arc::new(ErrorInjector::new(TestError::Overloaded)); - - // Insert a user message - thread.update(cx, |thread, cx| { - thread.insert_user_message("Hello!", ContextLoadResult::default(), None, vec![], cx); - }); - - // Start completion - thread.update(cx, |thread, cx| { - thread.send_to_model(model.clone(), CompletionIntent::UserPrompt, None, cx); - }); - - cx.run_until_parked(); - - thread.read_with(cx, |thread, _| { - assert!(thread.retry_state.is_some(), "Should have retry state"); - let retry_state = thread.retry_state.as_ref().unwrap(); - assert_eq!(retry_state.attempt, 1, "Should be first retry attempt"); - assert_eq!( - retry_state.max_attempts, MAX_RETRY_ATTEMPTS, - "Should retry MAX_RETRY_ATTEMPTS times for overloaded errors" - ); - }); - - // Check that a retry message was added - thread.read_with(cx, |thread, _| { - let mut messages = thread.messages(); - assert!( - messages.any(|msg| { - msg.role == Role::System - && msg.ui_only - && msg.segments.iter().any(|seg| { - if let MessageSegment::Text(text) = seg { - text.contains("overloaded") - && text - .contains(&format!("attempt 1 of {}", MAX_RETRY_ATTEMPTS)) - } else { - false - } - }) - }), - "Should have added a system retry message" - ); - }); - - let retry_count = thread.update(cx, |thread, _| { - thread - .messages - .iter() - .filter(|m| { - m.ui_only - && m.segments.iter().any(|s| { - if let MessageSegment::Text(text) = s { - text.contains("Retrying") && text.contains("seconds") - } else { - false - } - }) - }) - .count() - }); - - assert_eq!(retry_count, 1, "Should have one retry message"); - } - - #[gpui::test] - async fn test_retry_on_internal_server_error(cx: &mut TestAppContext) { - let fs = init_test_settings(cx); - - let project = create_test_project(&fs, cx, json!({})).await; - let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await; - - // Enable Burn Mode to allow retries - thread.update(cx, |thread, _| { - thread.set_completion_mode(CompletionMode::Burn); - }); - - // Create model that returns internal server error - let model = Arc::new(ErrorInjector::new(TestError::InternalServerError)); - - // Insert a user message - thread.update(cx, |thread, cx| { - thread.insert_user_message("Hello!", ContextLoadResult::default(), None, vec![], cx); - }); - - // Start completion - thread.update(cx, |thread, cx| { - thread.send_to_model(model.clone(), CompletionIntent::UserPrompt, None, cx); - }); - - cx.run_until_parked(); - - // Check retry state on thread - thread.read_with(cx, |thread, _| { - assert!(thread.retry_state.is_some(), "Should have retry state"); - let retry_state = thread.retry_state.as_ref().unwrap(); - assert_eq!(retry_state.attempt, 1, "Should be first retry attempt"); - assert_eq!( - retry_state.max_attempts, 3, - "Should have correct max attempts" - ); - }); - - // Check that a retry message was added with provider name - thread.read_with(cx, |thread, _| { - let mut messages = thread.messages(); - assert!( - messages.any(|msg| { - msg.role == Role::System - && msg.ui_only - && msg.segments.iter().any(|seg| { - if let MessageSegment::Text(text) = seg { - text.contains("internal") - && text.contains("Fake") - && text.contains("Retrying") - && text.contains("attempt 1 of 3") - && text.contains("seconds") - } else { - false - } - }) - }), - "Should have added a system retry message with provider name" - ); - }); - - // Count retry messages - let retry_count = thread.update(cx, |thread, _| { - thread - .messages - .iter() - .filter(|m| { - m.ui_only - && m.segments.iter().any(|s| { - if let MessageSegment::Text(text) = s { - text.contains("Retrying") && text.contains("seconds") - } else { - false - } - }) - }) - .count() - }); - - assert_eq!(retry_count, 1, "Should have one retry message"); - } - - #[gpui::test] - async fn test_exponential_backoff_on_retries(cx: &mut TestAppContext) { - let fs = init_test_settings(cx); - - let project = create_test_project(&fs, cx, json!({})).await; - let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await; - - // Enable Burn Mode to allow retries - thread.update(cx, |thread, _| { - thread.set_completion_mode(CompletionMode::Burn); - }); - - // Create model that returns internal server error - let model = Arc::new(ErrorInjector::new(TestError::InternalServerError)); - - // Insert a user message - thread.update(cx, |thread, cx| { - thread.insert_user_message("Hello!", ContextLoadResult::default(), None, vec![], cx); - }); - - // Track retry events and completion count - // Track completion events - let completion_count = Arc::new(Mutex::new(0)); - let completion_count_clone = completion_count.clone(); - - let _subscription = thread.update(cx, |_, cx| { - cx.subscribe(&thread, move |_, _, event: &ThreadEvent, _| { - if let ThreadEvent::NewRequest = event { - *completion_count_clone.lock() += 1; - } - }) - }); - - // First attempt - thread.update(cx, |thread, cx| { - thread.send_to_model(model.clone(), CompletionIntent::UserPrompt, None, cx); - }); - cx.run_until_parked(); - - // Should have scheduled first retry - count retry messages - let retry_count = thread.update(cx, |thread, _| { - thread - .messages - .iter() - .filter(|m| { - m.ui_only - && m.segments.iter().any(|s| { - if let MessageSegment::Text(text) = s { - text.contains("Retrying") && text.contains("seconds") - } else { - false - } - }) - }) - .count() - }); - assert_eq!(retry_count, 1, "Should have scheduled first retry"); - - // Check retry state - thread.read_with(cx, |thread, _| { - assert!(thread.retry_state.is_some(), "Should have retry state"); - let retry_state = thread.retry_state.as_ref().unwrap(); - assert_eq!(retry_state.attempt, 1, "Should be first retry attempt"); - assert_eq!( - retry_state.max_attempts, 3, - "Internal server errors should retry up to 3 times" - ); - }); - - // Advance clock for first retry - cx.executor().advance_clock(BASE_RETRY_DELAY); - cx.run_until_parked(); - - // Advance clock for second retry - cx.executor().advance_clock(BASE_RETRY_DELAY); - cx.run_until_parked(); - - // Advance clock for third retry - cx.executor().advance_clock(BASE_RETRY_DELAY); - cx.run_until_parked(); - - // Should have completed all retries - count retry messages - let retry_count = thread.update(cx, |thread, _| { - thread - .messages - .iter() - .filter(|m| { - m.ui_only - && m.segments.iter().any(|s| { - if let MessageSegment::Text(text) = s { - text.contains("Retrying") && text.contains("seconds") - } else { - false - } - }) - }) - .count() - }); - assert_eq!( - retry_count, 3, - "Should have 3 retries for internal server errors" - ); - - // For internal server errors, we retry 3 times and then give up - // Check that retry_state is cleared after all retries - thread.read_with(cx, |thread, _| { - assert!( - thread.retry_state.is_none(), - "Retry state should be cleared after all retries" - ); - }); - - // Verify total attempts (1 initial + 3 retries) - assert_eq!( - *completion_count.lock(), - 4, - "Should have attempted once plus 3 retries" - ); - } - - #[gpui::test] - async fn test_max_retries_exceeded(cx: &mut TestAppContext) { - let fs = init_test_settings(cx); - - let project = create_test_project(&fs, cx, json!({})).await; - let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await; - - // Enable Burn Mode to allow retries - thread.update(cx, |thread, _| { - thread.set_completion_mode(CompletionMode::Burn); - }); - - // Create model that returns overloaded error - let model = Arc::new(ErrorInjector::new(TestError::Overloaded)); - - // Insert a user message - thread.update(cx, |thread, cx| { - thread.insert_user_message("Hello!", ContextLoadResult::default(), None, vec![], cx); - }); - - // Track events - let stopped_with_error = Arc::new(Mutex::new(false)); - let stopped_with_error_clone = stopped_with_error.clone(); - - let _subscription = thread.update(cx, |_, cx| { - cx.subscribe(&thread, move |_, _, event: &ThreadEvent, _| { - if let ThreadEvent::Stopped(Err(_)) = event { - *stopped_with_error_clone.lock() = true; - } - }) - }); - - // Start initial completion - thread.update(cx, |thread, cx| { - thread.send_to_model(model.clone(), CompletionIntent::UserPrompt, None, cx); - }); - cx.run_until_parked(); - - // Advance through all retries - for _ in 0..MAX_RETRY_ATTEMPTS { - cx.executor().advance_clock(BASE_RETRY_DELAY); - cx.run_until_parked(); - } - - let retry_count = thread.update(cx, |thread, _| { - thread - .messages - .iter() - .filter(|m| { - m.ui_only - && m.segments.iter().any(|s| { - if let MessageSegment::Text(text) = s { - text.contains("Retrying") && text.contains("seconds") - } else { - false - } - }) - }) - .count() - }); - - // After max retries, should emit Stopped(Err(...)) event - assert_eq!( - retry_count, MAX_RETRY_ATTEMPTS as usize, - "Should have attempted MAX_RETRY_ATTEMPTS retries for overloaded errors" - ); - assert!( - *stopped_with_error.lock(), - "Should emit Stopped(Err(...)) event after max retries exceeded" - ); - - // Retry state should be cleared - thread.read_with(cx, |thread, _| { - assert!( - thread.retry_state.is_none(), - "Retry state should be cleared after max retries" - ); - - // Verify we have the expected number of retry messages - let retry_messages = thread - .messages - .iter() - .filter(|msg| msg.ui_only && msg.role == Role::System) - .count(); - assert_eq!( - retry_messages, MAX_RETRY_ATTEMPTS as usize, - "Should have MAX_RETRY_ATTEMPTS retry messages for overloaded errors" - ); - }); - } - - #[gpui::test] - async fn test_retry_message_removed_on_retry(cx: &mut TestAppContext) { - let fs = init_test_settings(cx); - - let project = create_test_project(&fs, cx, json!({})).await; - let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await; - - // Enable Burn Mode to allow retries - thread.update(cx, |thread, _| { - thread.set_completion_mode(CompletionMode::Burn); - }); - - // We'll use a wrapper to switch behavior after first failure - struct RetryTestModel { - inner: Arc, - failed_once: Arc>, - } - - impl LanguageModel for RetryTestModel { - fn id(&self) -> LanguageModelId { - self.inner.id() - } - - fn name(&self) -> LanguageModelName { - self.inner.name() - } - - fn provider_id(&self) -> LanguageModelProviderId { - self.inner.provider_id() - } - - fn provider_name(&self) -> LanguageModelProviderName { - self.inner.provider_name() - } - - fn supports_tools(&self) -> bool { - self.inner.supports_tools() - } - - fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { - self.inner.supports_tool_choice(choice) - } - - fn supports_images(&self) -> bool { - self.inner.supports_images() - } - - fn telemetry_id(&self) -> String { - self.inner.telemetry_id() - } - - fn max_token_count(&self) -> u64 { - self.inner.max_token_count() - } - - fn count_tokens( - &self, - request: LanguageModelRequest, - cx: &App, - ) -> BoxFuture<'static, Result> { - self.inner.count_tokens(request, cx) - } - - fn stream_completion( - &self, - request: LanguageModelRequest, - cx: &AsyncApp, - ) -> BoxFuture< - 'static, - Result< - BoxStream< - 'static, - Result, - >, - LanguageModelCompletionError, - >, - > { - if !*self.failed_once.lock() { - *self.failed_once.lock() = true; - let provider = self.provider_name(); - // Return error on first attempt - let stream = futures::stream::once(async move { - Err(LanguageModelCompletionError::ServerOverloaded { - provider, - retry_after: None, - }) - }); - async move { Ok(stream.boxed()) }.boxed() - } else { - // Succeed on retry - self.inner.stream_completion(request, cx) - } - } - - fn as_fake(&self) -> &FakeLanguageModel { - &self.inner - } - } - - let model = Arc::new(RetryTestModel { - inner: Arc::new(FakeLanguageModel::default()), - failed_once: Arc::new(Mutex::new(false)), - }); - - // Insert a user message - thread.update(cx, |thread, cx| { - thread.insert_user_message("Hello!", ContextLoadResult::default(), None, vec![], cx); - }); - - // Track message deletions - // Track when retry completes successfully - let retry_completed = Arc::new(Mutex::new(false)); - let retry_completed_clone = retry_completed.clone(); - - let _subscription = thread.update(cx, |_, cx| { - cx.subscribe(&thread, move |_, _, event: &ThreadEvent, _| { - if let ThreadEvent::StreamedCompletion = event { - *retry_completed_clone.lock() = true; - } - }) - }); - - // Start completion - thread.update(cx, |thread, cx| { - thread.send_to_model(model.clone(), CompletionIntent::UserPrompt, None, cx); - }); - cx.run_until_parked(); - - // Get the retry message ID - let retry_message_id = thread.read_with(cx, |thread, _| { - thread - .messages() - .find(|msg| msg.role == Role::System && msg.ui_only) - .map(|msg| msg.id) - .expect("Should have a retry message") - }); - - // Wait for retry - cx.executor().advance_clock(BASE_RETRY_DELAY); - cx.run_until_parked(); - - // Stream some successful content - let fake_model = model.as_fake(); - // After the retry, there should be a new pending completion - let pending = fake_model.pending_completions(); - assert!( - !pending.is_empty(), - "Should have a pending completion after retry" - ); - fake_model.send_completion_stream_text_chunk(&pending[0], "Success!"); - fake_model.end_completion_stream(&pending[0]); - cx.run_until_parked(); - - // Check that the retry completed successfully - assert!( - *retry_completed.lock(), - "Retry should have completed successfully" - ); - - // Retry message should still exist but be marked as ui_only - thread.read_with(cx, |thread, _| { - let retry_msg = thread - .message(retry_message_id) - .expect("Retry message should still exist"); - assert!(retry_msg.ui_only, "Retry message should be ui_only"); - assert_eq!( - retry_msg.role, - Role::System, - "Retry message should have System role" - ); - }); - } - - #[gpui::test] - async fn test_successful_completion_clears_retry_state(cx: &mut TestAppContext) { - let fs = init_test_settings(cx); - - let project = create_test_project(&fs, cx, json!({})).await; - let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await; - - // Enable Burn Mode to allow retries - thread.update(cx, |thread, _| { - thread.set_completion_mode(CompletionMode::Burn); - }); - - // Create a model that fails once then succeeds - struct FailOnceModel { - inner: Arc, - failed_once: Arc>, - } - - impl LanguageModel for FailOnceModel { - fn id(&self) -> LanguageModelId { - self.inner.id() - } - - fn name(&self) -> LanguageModelName { - self.inner.name() - } - - fn provider_id(&self) -> LanguageModelProviderId { - self.inner.provider_id() - } - - fn provider_name(&self) -> LanguageModelProviderName { - self.inner.provider_name() - } - - fn supports_tools(&self) -> bool { - self.inner.supports_tools() - } - - fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { - self.inner.supports_tool_choice(choice) - } - - fn supports_images(&self) -> bool { - self.inner.supports_images() - } - - fn telemetry_id(&self) -> String { - self.inner.telemetry_id() - } - - fn max_token_count(&self) -> u64 { - self.inner.max_token_count() - } - - fn count_tokens( - &self, - request: LanguageModelRequest, - cx: &App, - ) -> BoxFuture<'static, Result> { - self.inner.count_tokens(request, cx) - } - - fn stream_completion( - &self, - request: LanguageModelRequest, - cx: &AsyncApp, - ) -> BoxFuture< - 'static, - Result< - BoxStream< - 'static, - Result, - >, - LanguageModelCompletionError, - >, - > { - if !*self.failed_once.lock() { - *self.failed_once.lock() = true; - let provider = self.provider_name(); - // Return error on first attempt - let stream = futures::stream::once(async move { - Err(LanguageModelCompletionError::ServerOverloaded { - provider, - retry_after: None, - }) - }); - async move { Ok(stream.boxed()) }.boxed() - } else { - // Succeed on retry - self.inner.stream_completion(request, cx) - } - } - } - - let fail_once_model = Arc::new(FailOnceModel { - inner: Arc::new(FakeLanguageModel::default()), - failed_once: Arc::new(Mutex::new(false)), - }); - - // Insert a user message - thread.update(cx, |thread, cx| { - thread.insert_user_message( - "Test message", - ContextLoadResult::default(), - None, - vec![], - cx, - ); - }); - - // Start completion with fail-once model - thread.update(cx, |thread, cx| { - thread.send_to_model( - fail_once_model.clone(), - CompletionIntent::UserPrompt, - None, - cx, - ); - }); - - cx.run_until_parked(); - - // Verify retry state exists after first failure - thread.read_with(cx, |thread, _| { - assert!( - thread.retry_state.is_some(), - "Should have retry state after failure" - ); - }); - - // Wait for retry delay - cx.executor().advance_clock(BASE_RETRY_DELAY); - cx.run_until_parked(); - - // The retry should now use our FailOnceModel which should succeed - // We need to help the FakeLanguageModel complete the stream - let inner_fake = fail_once_model.inner.clone(); - - // Wait a bit for the retry to start - cx.run_until_parked(); - - // Check for pending completions and complete them - if let Some(pending) = inner_fake.pending_completions().first() { - inner_fake.send_completion_stream_text_chunk(pending, "Success!"); - inner_fake.end_completion_stream(pending); - } - cx.run_until_parked(); - - thread.read_with(cx, |thread, _| { - assert!( - thread.retry_state.is_none(), - "Retry state should be cleared after successful completion" - ); - - let has_assistant_message = thread - .messages - .iter() - .any(|msg| msg.role == Role::Assistant && !msg.ui_only); - assert!( - has_assistant_message, - "Should have an assistant message after successful retry" - ); - }); - } - - #[gpui::test] - async fn test_rate_limit_retry_single_attempt(cx: &mut TestAppContext) { - let fs = init_test_settings(cx); - - let project = create_test_project(&fs, cx, json!({})).await; - let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await; - - // Enable Burn Mode to allow retries - thread.update(cx, |thread, _| { - thread.set_completion_mode(CompletionMode::Burn); - }); - - // Create a model that returns rate limit error with retry_after - struct RateLimitModel { - inner: Arc, - } - - impl LanguageModel for RateLimitModel { - fn id(&self) -> LanguageModelId { - self.inner.id() - } - - fn name(&self) -> LanguageModelName { - self.inner.name() - } - - fn provider_id(&self) -> LanguageModelProviderId { - self.inner.provider_id() - } - - fn provider_name(&self) -> LanguageModelProviderName { - self.inner.provider_name() - } - - fn supports_tools(&self) -> bool { - self.inner.supports_tools() - } - - fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { - self.inner.supports_tool_choice(choice) - } - - fn supports_images(&self) -> bool { - self.inner.supports_images() - } - - fn telemetry_id(&self) -> String { - self.inner.telemetry_id() - } - - fn max_token_count(&self) -> u64 { - self.inner.max_token_count() - } - - fn count_tokens( - &self, - request: LanguageModelRequest, - cx: &App, - ) -> BoxFuture<'static, Result> { - self.inner.count_tokens(request, cx) - } - - fn stream_completion( - &self, - _request: LanguageModelRequest, - _cx: &AsyncApp, - ) -> BoxFuture< - 'static, - Result< - BoxStream< - 'static, - Result, - >, - LanguageModelCompletionError, - >, - > { - let provider = self.provider_name(); - async move { - let stream = futures::stream::once(async move { - Err(LanguageModelCompletionError::RateLimitExceeded { - provider, - retry_after: Some(Duration::from_secs(TEST_RATE_LIMIT_RETRY_SECS)), - }) - }); - Ok(stream.boxed()) - } - .boxed() - } - - fn as_fake(&self) -> &FakeLanguageModel { - &self.inner - } - } - - let model = Arc::new(RateLimitModel { - inner: Arc::new(FakeLanguageModel::default()), - }); - - // Insert a user message - thread.update(cx, |thread, cx| { - thread.insert_user_message("Hello!", ContextLoadResult::default(), None, vec![], cx); - }); - - // Start completion - thread.update(cx, |thread, cx| { - thread.send_to_model(model.clone(), CompletionIntent::UserPrompt, None, cx); - }); - - cx.run_until_parked(); - - let retry_count = thread.update(cx, |thread, _| { - thread - .messages - .iter() - .filter(|m| { - m.ui_only - && m.segments.iter().any(|s| { - if let MessageSegment::Text(text) = s { - text.contains("rate limit exceeded") - } else { - false - } - }) - }) - .count() - }); - assert_eq!(retry_count, 1, "Should have scheduled one retry"); - - thread.read_with(cx, |thread, _| { - assert!( - thread.retry_state.is_some(), - "Rate limit errors should set retry_state" - ); - if let Some(retry_state) = &thread.retry_state { - assert_eq!( - retry_state.max_attempts, MAX_RETRY_ATTEMPTS, - "Rate limit errors should use MAX_RETRY_ATTEMPTS" - ); - } - }); - - // Verify we have one retry message - thread.read_with(cx, |thread, _| { - let retry_messages = thread - .messages - .iter() - .filter(|msg| { - msg.ui_only - && msg.segments.iter().any(|seg| { - if let MessageSegment::Text(text) = seg { - text.contains("rate limit exceeded") - } else { - false - } - }) + UserMessageContent::Image(image) => acp::ContentBlock::Image(acp::ImageContent { + data: image.source.to_string(), + mime_type: "image/png".to_string(), + meta: None, + annotations: None, + uri: None, + }), + UserMessageContent::Mention { uri, content } => { + acp::ContentBlock::Resource(acp::EmbeddedResource { + meta: None, + resource: acp::EmbeddedResourceResource::TextResourceContents( + acp::TextResourceContents { + meta: None, + mime_type: None, + text: content, + uri: uri.to_uri().to_string(), + }, + ), + annotations: None, }) - .count(); - assert_eq!( - retry_messages, 1, - "Should have one rate limit retry message" - ); - }); - - // Check that retry message doesn't include attempt count - thread.read_with(cx, |thread, _| { - let retry_message = thread - .messages - .iter() - .find(|msg| msg.role == Role::System && msg.ui_only) - .expect("Should have a retry message"); - - // Check that the message contains attempt count since we use retry_state - if let Some(MessageSegment::Text(text)) = retry_message.segments.first() { - assert!( - text.contains(&format!("attempt 1 of {}", MAX_RETRY_ATTEMPTS)), - "Rate limit retry message should contain attempt count with MAX_RETRY_ATTEMPTS" - ); - assert!( - text.contains("Retrying"), - "Rate limit retry message should contain retry text" - ); } - }); - } - - #[gpui::test] - async fn test_ui_only_messages_not_sent_to_model(cx: &mut TestAppContext) { - let fs = init_test_settings(cx); - - let project = create_test_project(&fs, cx, json!({})).await; - let (_, _, thread, _, model) = setup_test_environment(cx, project.clone()).await; - - // Insert a regular user message - thread.update(cx, |thread, cx| { - thread.insert_user_message("Hello!", ContextLoadResult::default(), None, vec![], cx); - }); - - // Insert a UI-only message (like our retry notifications) - thread.update(cx, |thread, cx| { - let id = thread.next_message_id.post_inc(); - thread.messages.push(Message { - id, - role: Role::System, - segments: vec![MessageSegment::Text( - "This is a UI-only message that should not be sent to the model".to_string(), - )], - loaded_context: LoadedContext::default(), - creases: Vec::new(), - is_hidden: true, - ui_only: true, - }); - cx.emit(ThreadEvent::MessageAdded(id)); - }); - - // Insert another regular message - thread.update(cx, |thread, cx| { - thread.insert_user_message( - "How are you?", - ContextLoadResult::default(), - None, - vec![], - cx, - ); - }); - - // Generate the completion request - let request = thread.update(cx, |thread, cx| { - thread.to_completion_request(model.clone(), CompletionIntent::UserPrompt, cx) - }); - - // Verify that the request only contains non-UI-only messages - // Should have system prompt + 2 user messages, but not the UI-only message - let user_messages: Vec<_> = request - .messages - .iter() - .filter(|msg| msg.role == Role::User) - .collect(); - assert_eq!( - user_messages.len(), - 2, - "Should have exactly 2 user messages" - ); - - // Verify the UI-only content is not present anywhere in the request - let request_text = request - .messages - .iter() - .flat_map(|msg| &msg.content) - .filter_map(|content| match content { - MessageContent::Text(text) => Some(text.as_str()), - _ => None, - }) - .collect::(); - - assert!( - !request_text.contains("UI-only message"), - "UI-only message content should not be in the request" - ); - - // Verify the thread still has all 3 messages (including UI-only) - thread.read_with(cx, |thread, _| { - assert_eq!( - thread.messages().count(), - 3, - "Thread should have 3 messages" - ); - assert_eq!( - thread.messages().filter(|m| m.ui_only).count(), - 1, - "Thread should have 1 UI-only message" - ); - }); - - // Verify that UI-only messages are not serialized - let serialized = thread - .update(cx, |thread, cx| thread.serialize(cx)) - .await - .unwrap(); - assert_eq!( - serialized.messages.len(), - 2, - "Serialized thread should only have 2 messages (no UI-only)" - ); - } - - #[gpui::test] - async fn test_no_retry_without_burn_mode(cx: &mut TestAppContext) { - let fs = init_test_settings(cx); - - let project = create_test_project(&fs, cx, json!({})).await; - let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await; - - // Ensure we're in Normal mode (not Burn mode) - thread.update(cx, |thread, _| { - thread.set_completion_mode(CompletionMode::Normal); - }); - - // Track error events - let error_events = Arc::new(Mutex::new(Vec::new())); - let error_events_clone = error_events.clone(); - - let _subscription = thread.update(cx, |_, cx| { - cx.subscribe(&thread, move |_, _, event: &ThreadEvent, _| { - if let ThreadEvent::ShowError(error) = event { - error_events_clone.lock().push(error.clone()); - } - }) - }); - - // Create model that returns overloaded error - let model = Arc::new(ErrorInjector::new(TestError::Overloaded)); - - // Insert a user message - thread.update(cx, |thread, cx| { - thread.insert_user_message("Hello!", ContextLoadResult::default(), None, vec![], cx); - }); - - // Start completion - thread.update(cx, |thread, cx| { - thread.send_to_model(model.clone(), CompletionIntent::UserPrompt, None, cx); - }); - - cx.run_until_parked(); - - // Verify no retry state was created - thread.read_with(cx, |thread, _| { - assert!( - thread.retry_state.is_none(), - "Should not have retry state in Normal mode" - ); - }); - - // Check that a retryable error was reported - let errors = error_events.lock(); - assert!(!errors.is_empty(), "Should have received an error event"); - - if let ThreadError::RetryableError { - message: _, - can_enable_burn_mode, - } = &errors[0] - { - assert!( - *can_enable_burn_mode, - "Error should indicate burn mode can be enabled" - ); - } else { - panic!("Expected RetryableError, got {:?}", errors[0]); } - - // Verify the thread is no longer generating - thread.read_with(cx, |thread, _| { - assert!( - !thread.is_generating(), - "Should not be generating after error without retry" - ); - }); - } - - #[gpui::test] - async fn test_retry_canceled_on_stop(cx: &mut TestAppContext) { - let fs = init_test_settings(cx); - - let project = create_test_project(&fs, cx, json!({})).await; - let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await; - - // Enable Burn Mode to allow retries - thread.update(cx, |thread, _| { - thread.set_completion_mode(CompletionMode::Burn); - }); - - // Create model that returns overloaded error - let model = Arc::new(ErrorInjector::new(TestError::Overloaded)); - - // Insert a user message - thread.update(cx, |thread, cx| { - thread.insert_user_message("Hello!", ContextLoadResult::default(), None, vec![], cx); - }); - - // Start completion - thread.update(cx, |thread, cx| { - thread.send_to_model(model.clone(), CompletionIntent::UserPrompt, None, cx); - }); - - cx.run_until_parked(); - - // Verify retry was scheduled by checking for retry message - let has_retry_message = thread.read_with(cx, |thread, _| { - thread.messages.iter().any(|m| { - m.ui_only - && m.segments.iter().any(|s| { - if let MessageSegment::Text(text) = s { - text.contains("Retrying") && text.contains("seconds") - } else { - false - } - }) - }) - }); - assert!(has_retry_message, "Should have scheduled a retry"); - - // Cancel the completion before the retry happens - thread.update(cx, |thread, cx| { - thread.cancel_last_completion(None, cx); - }); - - cx.run_until_parked(); - - // The retry should not have happened - no pending completions - let fake_model = model.as_fake(); - assert_eq!( - fake_model.pending_completions().len(), - 0, - "Should have no pending completions after cancellation" - ); - - // Verify the retry was canceled by checking retry state - thread.read_with(cx, |thread, _| { - if let Some(retry_state) = &thread.retry_state { - panic!( - "retry_state should be cleared after cancellation, but found: attempt={}, max_attempts={}, intent={:?}", - retry_state.attempt, retry_state.max_attempts, retry_state.intent - ); - } - }); - } - - fn test_summarize_error( - model: &Arc, - thread: &Entity, - cx: &mut TestAppContext, - ) { - thread.update(cx, |thread, cx| { - thread.insert_user_message("Hi!", ContextLoadResult::default(), None, vec![], cx); - thread.send_to_model( - model.clone(), - CompletionIntent::ThreadSummarization, - None, - cx, - ); - }); - - let fake_model = model.as_fake(); - simulate_successful_response(fake_model, cx); - - thread.read_with(cx, |thread, _| { - assert!(matches!(thread.summary(), ThreadSummary::Generating)); - assert_eq!(thread.summary().or_default(), ThreadSummary::DEFAULT); - }); - - // Simulate summary request ending - cx.run_until_parked(); - fake_model.end_last_completion_stream(); - cx.run_until_parked(); - - // State is set to Error and default message - thread.read_with(cx, |thread, _| { - assert!(matches!(thread.summary(), ThreadSummary::Error)); - assert_eq!(thread.summary().or_default(), ThreadSummary::DEFAULT); - }); - } - - fn simulate_successful_response(fake_model: &FakeLanguageModel, cx: &mut TestAppContext) { - cx.run_until_parked(); - fake_model.send_last_completion_stream_text_chunk("Assistant response"); - fake_model.end_last_completion_stream(); - cx.run_until_parked(); - } - - fn init_test_settings(cx: &mut TestAppContext) -> Arc { - let fs = FakeFs::new(cx.executor()); - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - language::init(cx); - Project::init_settings(cx); - AgentSettings::register(cx); - prompt_store::init(cx); - thread_store::init(fs.clone(), cx); - workspace::init_settings(cx); - language_model::init_settings(cx); - theme::init(theme::LoadThemes::JustBase, cx); - ToolRegistry::default_global(cx); - assistant_tool::init(cx); - - let http_client = Arc::new(http_client::HttpClientWithUrl::new( - http_client::FakeHttpClient::with_200_response(), - "http://localhost".to_string(), - None, - )); - assistant_tools::init(http_client, cx); - }); - fs - } - - // Helper to create a test project with test files - async fn create_test_project( - fs: &Arc, - cx: &mut TestAppContext, - files: serde_json::Value, - ) -> Entity { - fs.as_fake().insert_tree(path!("/test"), files).await; - Project::test(fs.clone(), [path!("/test").as_ref()], cx).await - } - - async fn setup_test_environment( - cx: &mut TestAppContext, - project: Entity, - ) -> ( - Entity, - Entity, - Entity, - Entity, - Arc, - ) { - let (workspace, cx) = - cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); - - let thread_store = cx - .update(|_, cx| { - ThreadStore::load( - project.clone(), - cx.new(|_| ToolWorkingSet::default()), - None, - Arc::new(PromptBuilder::new(None).unwrap()), - cx, - ) - }) - .await - .unwrap(); - - let thread = thread_store.update(cx, |store, cx| store.create_thread(cx)); - let context_store = cx.new(|_cx| ContextStore::new(project.downgrade(), None)); - - let provider = Arc::new(FakeLanguageModelProvider::default()); - let model = provider.test_model(); - let model: Arc = Arc::new(model); - - cx.update(|_, cx| { - LanguageModelRegistry::global(cx).update(cx, |registry, cx| { - registry.set_default_model( - Some(ConfiguredModel { - provider: provider.clone(), - model: model.clone(), - }), - cx, - ); - registry.set_thread_summary_model( - Some(ConfiguredModel { - provider, - model: model.clone(), - }), - cx, - ); - }) - }); - - (workspace, thread_store, thread, context_store, model) } +} - async fn add_file_to_context( - project: &Entity, - context_store: &Entity, - path: &str, - cx: &mut TestAppContext, - ) -> Result> { - let buffer_path = project - .read_with(cx, |project, cx| project.find_project_path(path, cx)) - .unwrap(); - - let buffer = project - .update(cx, |project, cx| { - project.open_buffer(buffer_path.clone(), cx) - }) - .await - .unwrap(); - - context_store.update(cx, |context_store, cx| { - context_store.add_file_from_buffer(&buffer_path, buffer.clone(), false, cx); - }); - - Ok(buffer) +fn convert_image(image_content: acp::ImageContent) -> LanguageModelImage { + LanguageModelImage { + source: image_content.data.into(), + // TODO: make this optional? + size: gpui::Size::new(0.into(), 0.into()), } } diff --git a/crates/agent/src/thread_store.rs b/crates/agent/src/thread_store.rs deleted file mode 100644 index 2139f232e3e99b1affb78928dec70e1aaef2a03a..0000000000000000000000000000000000000000 --- a/crates/agent/src/thread_store.rs +++ /dev/null @@ -1,1287 +0,0 @@ -use crate::{ - context_server_tool::ContextServerTool, - thread::{ - DetailedSummaryState, ExceededWindowError, MessageId, ProjectSnapshot, Thread, ThreadId, - }, -}; -use agent_settings::{AgentProfileId, CompletionMode}; -use anyhow::{Context as _, Result, anyhow}; -use assistant_tool::{Tool, ToolId, ToolWorkingSet}; -use chrono::{DateTime, Utc}; -use collections::HashMap; -use context_server::ContextServerId; -use fs::{Fs, RemoveOptions}; -use futures::{ - FutureExt as _, StreamExt as _, - channel::{mpsc, oneshot}, - future::{self, BoxFuture, Shared}, -}; -use gpui::{ - App, BackgroundExecutor, Context, Entity, EventEmitter, Global, ReadGlobal, SharedString, - Subscription, Task, Window, prelude::*, -}; -use indoc::indoc; -use language_model::{LanguageModelToolResultContent, LanguageModelToolUseId, Role, TokenUsage}; -use project::context_server_store::{ContextServerStatus, ContextServerStore}; -use project::{Project, ProjectItem, ProjectPath, Worktree}; -use prompt_store::{ - ProjectContext, PromptBuilder, PromptId, PromptStore, PromptsUpdatedEvent, RulesFileContext, - UserRulesContext, WorktreeContext, -}; -use serde::{Deserialize, Serialize}; -use sqlez::{ - bindable::{Bind, Column}, - connection::Connection, - statement::Statement, -}; -use std::{ - cell::{Ref, RefCell}, - path::{Path, PathBuf}, - rc::Rc, - sync::{Arc, LazyLock, Mutex}, -}; -use util::{ResultExt as _, rel_path::RelPath}; - -use zed_env_vars::ZED_STATELESS; - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub enum DataType { - #[serde(rename = "json")] - Json, - #[serde(rename = "zstd")] - Zstd, -} - -impl Bind for DataType { - fn bind(&self, statement: &Statement, start_index: i32) -> Result { - let value = match self { - DataType::Json => "json", - DataType::Zstd => "zstd", - }; - value.bind(statement, start_index) - } -} - -impl Column for DataType { - fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> { - let (value, next_index) = String::column(statement, start_index)?; - let data_type = match value.as_str() { - "json" => DataType::Json, - "zstd" => DataType::Zstd, - _ => anyhow::bail!("Unknown data type: {}", value), - }; - Ok((data_type, next_index)) - } -} - -static RULES_FILE_NAMES: LazyLock<[&RelPath; 9]> = LazyLock::new(|| { - [ - RelPath::unix(".rules").unwrap(), - RelPath::unix(".cursorrules").unwrap(), - RelPath::unix(".windsurfrules").unwrap(), - RelPath::unix(".clinerules").unwrap(), - RelPath::unix(".github/copilot-instructions.md").unwrap(), - RelPath::unix("CLAUDE.md").unwrap(), - RelPath::unix("AGENT.md").unwrap(), - RelPath::unix("AGENTS.md").unwrap(), - RelPath::unix("GEMINI.md").unwrap(), - ] -}); - -pub fn init(fs: Arc, cx: &mut App) { - ThreadsDatabase::init(fs, cx); -} - -/// A system prompt shared by all threads created by this ThreadStore -#[derive(Clone, Default)] -pub struct SharedProjectContext(Rc>>); - -impl SharedProjectContext { - pub fn borrow(&self) -> Ref<'_, Option> { - self.0.borrow() - } -} - -pub type TextThreadStore = assistant_context::ContextStore; - -pub struct ThreadStore { - project: Entity, - tools: Entity, - prompt_builder: Arc, - prompt_store: Option>, - context_server_tool_ids: HashMap>, - threads: Vec, - project_context: SharedProjectContext, - reload_system_prompt_tx: mpsc::Sender<()>, - _reload_system_prompt_task: Task<()>, - _subscriptions: Vec, -} - -pub struct RulesLoadingError { - pub message: SharedString, -} - -impl EventEmitter for ThreadStore {} - -impl ThreadStore { - pub fn load( - project: Entity, - tools: Entity, - prompt_store: Option>, - prompt_builder: Arc, - cx: &mut App, - ) -> Task>> { - cx.spawn(async move |cx| { - let (thread_store, ready_rx) = cx.update(|cx| { - let mut option_ready_rx = None; - let thread_store = cx.new(|cx| { - let (thread_store, ready_rx) = - Self::new(project, tools, prompt_builder, prompt_store, cx); - option_ready_rx = Some(ready_rx); - thread_store - }); - (thread_store, option_ready_rx.take().unwrap()) - })?; - ready_rx.await?; - Ok(thread_store) - }) - } - - fn new( - project: Entity, - tools: Entity, - prompt_builder: Arc, - prompt_store: Option>, - cx: &mut Context, - ) -> (Self, oneshot::Receiver<()>) { - let mut subscriptions = vec![cx.subscribe(&project, Self::handle_project_event)]; - - if let Some(prompt_store) = prompt_store.as_ref() { - subscriptions.push(cx.subscribe( - prompt_store, - |this, _prompt_store, PromptsUpdatedEvent, _cx| { - this.enqueue_system_prompt_reload(); - }, - )) - } - - // This channel and task prevent concurrent and redundant loading of the system prompt. - let (reload_system_prompt_tx, mut reload_system_prompt_rx) = mpsc::channel(1); - let (ready_tx, ready_rx) = oneshot::channel(); - let mut ready_tx = Some(ready_tx); - let reload_system_prompt_task = cx.spawn({ - let prompt_store = prompt_store.clone(); - async move |thread_store, cx| { - loop { - let Some(reload_task) = thread_store - .update(cx, |thread_store, cx| { - thread_store.reload_system_prompt(prompt_store.clone(), cx) - }) - .ok() - else { - return; - }; - reload_task.await; - if let Some(ready_tx) = ready_tx.take() { - ready_tx.send(()).ok(); - } - reload_system_prompt_rx.next().await; - } - } - }); - - let this = Self { - project, - tools, - prompt_builder, - prompt_store, - context_server_tool_ids: HashMap::default(), - threads: Vec::new(), - project_context: SharedProjectContext::default(), - reload_system_prompt_tx, - _reload_system_prompt_task: reload_system_prompt_task, - _subscriptions: subscriptions, - }; - this.register_context_server_handlers(cx); - this.reload(cx).detach_and_log_err(cx); - (this, ready_rx) - } - - #[cfg(any(test, feature = "test-support"))] - pub fn fake(project: Entity, cx: &mut App) -> Self { - Self { - project, - tools: cx.new(|_| ToolWorkingSet::default()), - prompt_builder: Arc::new(PromptBuilder::new(None).unwrap()), - prompt_store: None, - context_server_tool_ids: HashMap::default(), - threads: Vec::new(), - project_context: SharedProjectContext::default(), - reload_system_prompt_tx: mpsc::channel(0).0, - _reload_system_prompt_task: Task::ready(()), - _subscriptions: vec![], - } - } - - fn handle_project_event( - &mut self, - _project: Entity, - event: &project::Event, - _cx: &mut Context, - ) { - match event { - project::Event::WorktreeAdded(_) | project::Event::WorktreeRemoved(_) => { - self.enqueue_system_prompt_reload(); - } - project::Event::WorktreeUpdatedEntries(_, items) => { - if items - .iter() - .any(|(path, _, _)| RULES_FILE_NAMES.iter().any(|name| path.as_ref() == *name)) - { - self.enqueue_system_prompt_reload(); - } - } - _ => {} - } - } - - fn enqueue_system_prompt_reload(&mut self) { - self.reload_system_prompt_tx.try_send(()).ok(); - } - - // Note that this should only be called from `reload_system_prompt_task`. - fn reload_system_prompt( - &self, - prompt_store: Option>, - cx: &mut Context, - ) -> Task<()> { - let worktrees = self - .project - .read(cx) - .visible_worktrees(cx) - .collect::>(); - let worktree_tasks = worktrees - .into_iter() - .map(|worktree| { - Self::load_worktree_info_for_system_prompt(worktree, self.project.clone(), cx) - }) - .collect::>(); - let default_user_rules_task = match prompt_store { - None => Task::ready(vec![]), - Some(prompt_store) => prompt_store.read_with(cx, |prompt_store, cx| { - let prompts = prompt_store.default_prompt_metadata(); - let load_tasks = prompts.into_iter().map(|prompt_metadata| { - let contents = prompt_store.load(prompt_metadata.id, cx); - async move { (contents.await, prompt_metadata) } - }); - cx.background_spawn(future::join_all(load_tasks)) - }), - }; - - cx.spawn(async move |this, cx| { - let (worktrees, default_user_rules) = - future::join(future::join_all(worktree_tasks), default_user_rules_task).await; - - let worktrees = worktrees - .into_iter() - .map(|(worktree, rules_error)| { - if let Some(rules_error) = rules_error { - this.update(cx, |_, cx| cx.emit(rules_error)).ok(); - } - worktree - }) - .collect::>(); - - let default_user_rules = default_user_rules - .into_iter() - .flat_map(|(contents, prompt_metadata)| match contents { - Ok(contents) => Some(UserRulesContext { - uuid: match prompt_metadata.id { - PromptId::User { uuid } => uuid, - PromptId::EditWorkflow => return None, - }, - title: prompt_metadata.title.map(|title| title.to_string()), - contents, - }), - Err(err) => { - this.update(cx, |_, cx| { - cx.emit(RulesLoadingError { - message: format!("{err:?}").into(), - }); - }) - .ok(); - None - } - }) - .collect::>(); - - this.update(cx, |this, _cx| { - *this.project_context.0.borrow_mut() = - Some(ProjectContext::new(worktrees, default_user_rules)); - }) - .ok(); - }) - } - - fn load_worktree_info_for_system_prompt( - worktree: Entity, - project: Entity, - cx: &mut App, - ) -> Task<(WorktreeContext, Option)> { - let tree = worktree.read(cx); - let root_name = tree.root_name_str().into(); - let abs_path = tree.abs_path(); - - let mut context = WorktreeContext { - root_name, - abs_path, - rules_file: None, - }; - - let rules_task = Self::load_worktree_rules_file(worktree, project, cx); - let Some(rules_task) = rules_task else { - return Task::ready((context, None)); - }; - - cx.spawn(async move |_| { - let (rules_file, rules_file_error) = match rules_task.await { - Ok(rules_file) => (Some(rules_file), None), - Err(err) => ( - None, - Some(RulesLoadingError { - message: format!("{err}").into(), - }), - ), - }; - context.rules_file = rules_file; - (context, rules_file_error) - }) - } - - fn load_worktree_rules_file( - worktree: Entity, - project: Entity, - cx: &mut App, - ) -> Option>> { - let worktree = worktree.read(cx); - let worktree_id = worktree.id(); - let selected_rules_file = RULES_FILE_NAMES - .into_iter() - .filter_map(|name| { - worktree - .entry_for_path(name) - .filter(|entry| entry.is_file()) - .map(|entry| entry.path.clone()) - }) - .next(); - - // Note that Cline supports `.clinerules` being a directory, but that is not currently - // supported. This doesn't seem to occur often in GitHub repositories. - selected_rules_file.map(|path_in_worktree| { - let project_path = ProjectPath { - worktree_id, - path: path_in_worktree.clone(), - }; - let buffer_task = - project.update(cx, |project, cx| project.open_buffer(project_path, cx)); - let rope_task = cx.spawn(async move |cx| { - buffer_task.await?.read_with(cx, |buffer, cx| { - let project_entry_id = buffer.entry_id(cx).context("buffer has no file")?; - anyhow::Ok((project_entry_id, buffer.as_rope().clone())) - })? - }); - // Build a string from the rope on a background thread. - cx.background_spawn(async move { - let (project_entry_id, rope) = rope_task.await?; - anyhow::Ok(RulesFileContext { - path_in_worktree, - text: rope.to_string().trim().to_string(), - project_entry_id: project_entry_id.to_usize(), - }) - }) - }) - } - - pub fn prompt_store(&self) -> &Option> { - &self.prompt_store - } - - pub fn tools(&self) -> Entity { - self.tools.clone() - } - - /// Returns the number of threads. - pub fn thread_count(&self) -> usize { - self.threads.len() - } - - pub fn reverse_chronological_threads(&self) -> impl Iterator { - // ordering is from "ORDER BY" in `list_threads` - self.threads.iter() - } - - pub fn create_thread(&mut self, cx: &mut Context) -> Entity { - cx.new(|cx| { - Thread::new( - self.project.clone(), - self.tools.clone(), - self.prompt_builder.clone(), - self.project_context.clone(), - cx, - ) - }) - } - - pub fn create_thread_from_serialized( - &mut self, - serialized: SerializedThread, - cx: &mut Context, - ) -> Entity { - cx.new(|cx| { - Thread::deserialize( - ThreadId::new(), - serialized, - self.project.clone(), - self.tools.clone(), - self.prompt_builder.clone(), - self.project_context.clone(), - None, - cx, - ) - }) - } - - pub fn open_thread( - &self, - id: &ThreadId, - window: &mut Window, - cx: &mut Context, - ) -> Task>> { - let id = id.clone(); - let database_future = ThreadsDatabase::global_future(cx); - let this = cx.weak_entity(); - window.spawn(cx, async move |cx| { - let database = database_future.await.map_err(|err| anyhow!(err))?; - let thread = database - .try_find_thread(id.clone()) - .await? - .with_context(|| format!("no thread found with ID: {id:?}"))?; - - let thread = this.update_in(cx, |this, window, cx| { - cx.new(|cx| { - Thread::deserialize( - id.clone(), - thread, - this.project.clone(), - this.tools.clone(), - this.prompt_builder.clone(), - this.project_context.clone(), - Some(window), - cx, - ) - }) - })?; - - Ok(thread) - }) - } - - pub fn save_thread(&self, thread: &Entity, cx: &mut Context) -> Task> { - let (metadata, serialized_thread) = - thread.update(cx, |thread, cx| (thread.id().clone(), thread.serialize(cx))); - - let database_future = ThreadsDatabase::global_future(cx); - cx.spawn(async move |this, cx| { - let serialized_thread = serialized_thread.await?; - let database = database_future.await.map_err(|err| anyhow!(err))?; - database.save_thread(metadata, serialized_thread).await?; - - this.update(cx, |this, cx| this.reload(cx))?.await - }) - } - - pub fn delete_thread(&mut self, id: &ThreadId, cx: &mut Context) -> Task> { - let id = id.clone(); - let database_future = ThreadsDatabase::global_future(cx); - cx.spawn(async move |this, cx| { - let database = database_future.await.map_err(|err| anyhow!(err))?; - database.delete_thread(id.clone()).await?; - - this.update(cx, |this, cx| { - this.threads.retain(|thread| thread.id != id); - cx.notify(); - }) - }) - } - - pub fn reload(&self, cx: &mut Context) -> Task> { - let database_future = ThreadsDatabase::global_future(cx); - cx.spawn(async move |this, cx| { - let threads = database_future - .await - .map_err(|err| anyhow!(err))? - .list_threads() - .await?; - - this.update(cx, |this, cx| { - this.threads = threads; - cx.notify(); - }) - }) - } - - fn register_context_server_handlers(&self, cx: &mut Context) { - let context_server_store = self.project.read(cx).context_server_store(); - cx.subscribe(&context_server_store, Self::handle_context_server_event) - .detach(); - - // Check for any servers that were already running before the handler was registered - for server in context_server_store.read(cx).running_servers() { - self.load_context_server_tools(server.id(), context_server_store.clone(), cx); - } - } - - fn handle_context_server_event( - &mut self, - context_server_store: Entity, - event: &project::context_server_store::Event, - cx: &mut Context, - ) { - let tool_working_set = self.tools.clone(); - match event { - project::context_server_store::Event::ServerStatusChanged { server_id, status } => { - match status { - ContextServerStatus::Starting => {} - ContextServerStatus::Running => { - self.load_context_server_tools(server_id.clone(), context_server_store, cx); - } - ContextServerStatus::Stopped | ContextServerStatus::Error(_) => { - if let Some(tool_ids) = self.context_server_tool_ids.remove(server_id) { - tool_working_set.update(cx, |tool_working_set, cx| { - tool_working_set.remove(&tool_ids, cx); - }); - } - } - } - } - } - } - - fn load_context_server_tools( - &self, - server_id: ContextServerId, - context_server_store: Entity, - cx: &mut Context, - ) { - let Some(server) = context_server_store.read(cx).get_running_server(&server_id) else { - return; - }; - let tool_working_set = self.tools.clone(); - cx.spawn(async move |this, cx| { - let Some(protocol) = server.client() else { - return; - }; - - if protocol.capable(context_server::protocol::ServerCapability::Tools) - && let Some(response) = protocol - .request::(()) - .await - .log_err() - { - let tool_ids = tool_working_set - .update(cx, |tool_working_set, cx| { - tool_working_set.extend( - response.tools.into_iter().map(|tool| { - Arc::new(ContextServerTool::new( - context_server_store.clone(), - server.id(), - tool, - )) as Arc - }), - cx, - ) - }) - .log_err(); - - if let Some(tool_ids) = tool_ids { - this.update(cx, |this, _| { - this.context_server_tool_ids.insert(server_id, tool_ids); - }) - .log_err(); - } - } - }) - .detach(); - } -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct SerializedThreadMetadata { - pub id: ThreadId, - pub summary: SharedString, - pub updated_at: DateTime, -} - -#[derive(Serialize, Deserialize, Debug, PartialEq)] -pub struct SerializedThread { - pub version: String, - pub summary: SharedString, - pub updated_at: DateTime, - pub messages: Vec, - #[serde(default)] - pub initial_project_snapshot: Option>, - #[serde(default)] - pub cumulative_token_usage: TokenUsage, - #[serde(default)] - pub request_token_usage: Vec, - #[serde(default)] - pub detailed_summary_state: DetailedSummaryState, - #[serde(default)] - pub exceeded_window_error: Option, - #[serde(default)] - pub model: Option, - #[serde(default)] - pub completion_mode: Option, - #[serde(default)] - pub tool_use_limit_reached: bool, - #[serde(default)] - pub profile: Option, -} - -#[derive(Serialize, Deserialize, Debug, PartialEq)] -pub struct SerializedLanguageModel { - pub provider: String, - pub model: String, -} - -impl SerializedThread { - pub const VERSION: &'static str = "0.2.0"; - - pub fn from_json(json: &[u8]) -> Result { - let saved_thread_json = serde_json::from_slice::(json)?; - match saved_thread_json.get("version") { - Some(serde_json::Value::String(version)) => match version.as_str() { - SerializedThreadV0_1_0::VERSION => { - let saved_thread = - serde_json::from_value::(saved_thread_json)?; - Ok(saved_thread.upgrade()) - } - SerializedThread::VERSION => Ok(serde_json::from_value::( - saved_thread_json, - )?), - _ => anyhow::bail!("unrecognized serialized thread version: {version:?}"), - }, - None => { - let saved_thread = - serde_json::from_value::(saved_thread_json)?; - Ok(saved_thread.upgrade()) - } - version => anyhow::bail!("unrecognized serialized thread version: {version:?}"), - } - } -} - -#[derive(Serialize, Deserialize, Debug)] -pub struct SerializedThreadV0_1_0( - // The structure did not change, so we are reusing the latest SerializedThread. - // When making the next version, make sure this points to SerializedThreadV0_2_0 - SerializedThread, -); - -impl SerializedThreadV0_1_0 { - pub const VERSION: &'static str = "0.1.0"; - - pub fn upgrade(self) -> SerializedThread { - debug_assert_eq!(SerializedThread::VERSION, "0.2.0"); - - let mut messages: Vec = Vec::with_capacity(self.0.messages.len()); - - for message in self.0.messages { - if message.role == Role::User - && !message.tool_results.is_empty() - && let Some(last_message) = messages.last_mut() - { - debug_assert!(last_message.role == Role::Assistant); - - last_message.tool_results = message.tool_results; - continue; - } - - messages.push(message); - } - - SerializedThread { - messages, - version: SerializedThread::VERSION.to_string(), - ..self.0 - } - } -} - -#[derive(Debug, Serialize, Deserialize, PartialEq)] -pub struct SerializedMessage { - pub id: MessageId, - pub role: Role, - #[serde(default)] - pub segments: Vec, - #[serde(default)] - pub tool_uses: Vec, - #[serde(default)] - pub tool_results: Vec, - #[serde(default)] - pub context: String, - #[serde(default)] - pub creases: Vec, - #[serde(default)] - pub is_hidden: bool, -} - -#[derive(Debug, Serialize, Deserialize, PartialEq)] -#[serde(tag = "type")] -pub enum SerializedMessageSegment { - #[serde(rename = "text")] - Text { - text: String, - }, - #[serde(rename = "thinking")] - Thinking { - text: String, - #[serde(skip_serializing_if = "Option::is_none")] - signature: Option, - }, - RedactedThinking { - data: String, - }, -} - -#[derive(Debug, Serialize, Deserialize, PartialEq)] -pub struct SerializedToolUse { - pub id: LanguageModelToolUseId, - pub name: SharedString, - pub input: serde_json::Value, -} - -#[derive(Debug, Serialize, Deserialize, PartialEq)] -pub struct SerializedToolResult { - pub tool_use_id: LanguageModelToolUseId, - pub is_error: bool, - pub content: LanguageModelToolResultContent, - pub output: Option, -} - -#[derive(Serialize, Deserialize)] -struct LegacySerializedThread { - pub summary: SharedString, - pub updated_at: DateTime, - pub messages: Vec, - #[serde(default)] - pub initial_project_snapshot: Option>, -} - -impl LegacySerializedThread { - pub fn upgrade(self) -> SerializedThread { - SerializedThread { - version: SerializedThread::VERSION.to_string(), - summary: self.summary, - updated_at: self.updated_at, - messages: self.messages.into_iter().map(|msg| msg.upgrade()).collect(), - initial_project_snapshot: self.initial_project_snapshot, - cumulative_token_usage: TokenUsage::default(), - request_token_usage: Vec::new(), - detailed_summary_state: DetailedSummaryState::default(), - exceeded_window_error: None, - model: None, - completion_mode: None, - tool_use_limit_reached: false, - profile: None, - } - } -} - -#[derive(Debug, Serialize, Deserialize)] -struct LegacySerializedMessage { - pub id: MessageId, - pub role: Role, - pub text: String, - #[serde(default)] - pub tool_uses: Vec, - #[serde(default)] - pub tool_results: Vec, -} - -impl LegacySerializedMessage { - fn upgrade(self) -> SerializedMessage { - SerializedMessage { - id: self.id, - role: self.role, - segments: vec![SerializedMessageSegment::Text { text: self.text }], - tool_uses: self.tool_uses, - tool_results: self.tool_results, - context: String::new(), - creases: Vec::new(), - is_hidden: false, - } - } -} - -#[derive(Debug, Serialize, Deserialize, PartialEq)] -pub struct SerializedCrease { - pub start: usize, - pub end: usize, - pub icon_path: SharedString, - pub label: SharedString, -} - -struct GlobalThreadsDatabase( - Shared, Arc>>>, -); - -impl Global for GlobalThreadsDatabase {} - -pub(crate) struct ThreadsDatabase { - executor: BackgroundExecutor, - connection: Arc>, -} - -impl ThreadsDatabase { - fn connection(&self) -> Arc> { - self.connection.clone() - } - - const COMPRESSION_LEVEL: i32 = 3; -} - -impl Bind for ThreadId { - fn bind(&self, statement: &Statement, start_index: i32) -> Result { - self.to_string().bind(statement, start_index) - } -} - -impl Column for ThreadId { - fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> { - let (id_str, next_index) = String::column(statement, start_index)?; - Ok((ThreadId::from(id_str.as_str()), next_index)) - } -} - -impl ThreadsDatabase { - fn global_future( - cx: &mut App, - ) -> Shared, Arc>>> { - GlobalThreadsDatabase::global(cx).0.clone() - } - - fn init(fs: Arc, cx: &mut App) { - let executor = cx.background_executor().clone(); - let database_future = executor - .spawn({ - let executor = executor.clone(); - let threads_dir = paths::data_dir().join("threads"); - async move { ThreadsDatabase::new(fs, threads_dir, executor).await } - }) - .then(|result| future::ready(result.map(Arc::new).map_err(Arc::new))) - .boxed() - .shared(); - - cx.set_global(GlobalThreadsDatabase(database_future)); - } - - pub async fn new( - fs: Arc, - threads_dir: PathBuf, - executor: BackgroundExecutor, - ) -> Result { - fs.create_dir(&threads_dir).await?; - - let sqlite_path = threads_dir.join("threads.db"); - let mdb_path = threads_dir.join("threads-db.1.mdb"); - - let needs_migration_from_heed = fs.is_file(&mdb_path).await; - - let connection = if *ZED_STATELESS { - Connection::open_memory(Some("THREAD_FALLBACK_DB")) - } else if cfg!(any(feature = "test-support", test)) { - // rust stores the name of the test on the current thread. - // We use this to automatically create a database that will - // be shared within the test (for the test_retrieve_old_thread) - // but not with concurrent tests. - let thread = std::thread::current(); - let test_name = thread.name(); - Connection::open_memory(Some(&format!( - "THREAD_FALLBACK_{}", - test_name.unwrap_or_default() - ))) - } else { - Connection::open_file(&sqlite_path.to_string_lossy()) - }; - - connection.exec(indoc! {" - CREATE TABLE IF NOT EXISTS threads ( - id TEXT PRIMARY KEY, - summary TEXT NOT NULL, - updated_at TEXT NOT NULL, - data_type TEXT NOT NULL, - data BLOB NOT NULL - ) - "})?() - .map_err(|e| anyhow!("Failed to create threads table: {}", e))?; - - let db = Self { - executor: executor.clone(), - connection: Arc::new(Mutex::new(connection)), - }; - - if needs_migration_from_heed { - let db_connection = db.connection(); - let executor_clone = executor.clone(); - executor - .spawn(async move { - log::info!("Starting threads.db migration"); - Self::migrate_from_heed(&mdb_path, db_connection, executor_clone)?; - fs.remove_dir( - &mdb_path, - RemoveOptions { - recursive: true, - ignore_if_not_exists: true, - }, - ) - .await?; - log::info!("threads.db migrated to sqlite"); - Ok::<(), anyhow::Error>(()) - }) - .detach(); - } - - Ok(db) - } - - // Remove this migration after 2025-09-01 - fn migrate_from_heed( - mdb_path: &Path, - connection: Arc>, - _executor: BackgroundExecutor, - ) -> Result<()> { - use heed::types::SerdeBincode; - struct SerializedThreadHeed(SerializedThread); - - impl heed::BytesEncode<'_> for SerializedThreadHeed { - type EItem = SerializedThreadHeed; - - fn bytes_encode( - item: &Self::EItem, - ) -> Result, heed::BoxedError> { - serde_json::to_vec(&item.0) - .map(std::borrow::Cow::Owned) - .map_err(Into::into) - } - } - - impl<'a> heed::BytesDecode<'a> for SerializedThreadHeed { - type DItem = SerializedThreadHeed; - - fn bytes_decode(bytes: &'a [u8]) -> Result { - SerializedThread::from_json(bytes) - .map(SerializedThreadHeed) - .map_err(Into::into) - } - } - - const ONE_GB_IN_BYTES: usize = 1024 * 1024 * 1024; - - let env = unsafe { - heed::EnvOpenOptions::new() - .map_size(ONE_GB_IN_BYTES) - .max_dbs(1) - .open(mdb_path)? - }; - - let txn = env.write_txn()?; - let threads: heed::Database, SerializedThreadHeed> = env - .open_database(&txn, Some("threads"))? - .ok_or_else(|| anyhow!("threads database not found"))?; - - for result in threads.iter(&txn)? { - let (thread_id, thread_heed) = result?; - Self::save_thread_sync(&connection, thread_id, thread_heed.0)?; - } - - Ok(()) - } - - fn save_thread_sync( - connection: &Arc>, - id: ThreadId, - thread: SerializedThread, - ) -> Result<()> { - let json_data = serde_json::to_string(&thread)?; - let summary = thread.summary.to_string(); - let updated_at = thread.updated_at.to_rfc3339(); - - let connection = connection.lock().unwrap(); - - let compressed = zstd::encode_all(json_data.as_bytes(), Self::COMPRESSION_LEVEL)?; - let data_type = DataType::Zstd; - let data = compressed; - - let mut insert = connection.exec_bound::<(ThreadId, String, String, DataType, Vec)>(indoc! {" - INSERT OR REPLACE INTO threads (id, summary, updated_at, data_type, data) VALUES (?, ?, ?, ?, ?) - "})?; - - insert((id, summary, updated_at, data_type, data))?; - - Ok(()) - } - - pub fn list_threads(&self) -> Task>> { - let connection = self.connection.clone(); - - self.executor.spawn(async move { - let connection = connection.lock().unwrap(); - let mut select = - connection.select_bound::<(), (ThreadId, String, String)>(indoc! {" - SELECT id, summary, updated_at FROM threads ORDER BY updated_at DESC - "})?; - - let rows = select(())?; - let mut threads = Vec::new(); - - for (id, summary, updated_at) in rows { - threads.push(SerializedThreadMetadata { - id, - summary: summary.into(), - updated_at: DateTime::parse_from_rfc3339(&updated_at)?.with_timezone(&Utc), - }); - } - - Ok(threads) - }) - } - - pub fn try_find_thread(&self, id: ThreadId) -> Task>> { - let connection = self.connection.clone(); - - self.executor.spawn(async move { - let connection = connection.lock().unwrap(); - let mut select = connection.select_bound::)>(indoc! {" - SELECT data_type, data FROM threads WHERE id = ? LIMIT 1 - "})?; - - let rows = select(id)?; - if let Some((data_type, data)) = rows.into_iter().next() { - let json_data = match data_type { - DataType::Zstd => { - let decompressed = zstd::decode_all(&data[..])?; - String::from_utf8(decompressed)? - } - DataType::Json => String::from_utf8(data)?, - }; - - let thread = SerializedThread::from_json(json_data.as_bytes())?; - Ok(Some(thread)) - } else { - Ok(None) - } - }) - } - - pub fn save_thread(&self, id: ThreadId, thread: SerializedThread) -> Task> { - let connection = self.connection.clone(); - - self.executor - .spawn(async move { Self::save_thread_sync(&connection, id, thread) }) - } - - pub fn delete_thread(&self, id: ThreadId) -> Task> { - let connection = self.connection.clone(); - - self.executor.spawn(async move { - let connection = connection.lock().unwrap(); - - let mut delete = connection.exec_bound::(indoc! {" - DELETE FROM threads WHERE id = ? - "})?; - - delete(id)?; - - Ok(()) - }) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::thread::{DetailedSummaryState, MessageId}; - use chrono::Utc; - use language_model::{Role, TokenUsage}; - use pretty_assertions::assert_eq; - - #[test] - fn test_legacy_serialized_thread_upgrade() { - let updated_at = Utc::now(); - let legacy_thread = LegacySerializedThread { - summary: "Test conversation".into(), - updated_at, - messages: vec![LegacySerializedMessage { - id: MessageId(1), - role: Role::User, - text: "Hello, world!".to_string(), - tool_uses: vec![], - tool_results: vec![], - }], - initial_project_snapshot: None, - }; - - let upgraded = legacy_thread.upgrade(); - - assert_eq!( - upgraded, - SerializedThread { - summary: "Test conversation".into(), - updated_at, - messages: vec![SerializedMessage { - id: MessageId(1), - role: Role::User, - segments: vec![SerializedMessageSegment::Text { - text: "Hello, world!".to_string() - }], - tool_uses: vec![], - tool_results: vec![], - context: "".to_string(), - creases: vec![], - is_hidden: false - }], - version: SerializedThread::VERSION.to_string(), - initial_project_snapshot: None, - cumulative_token_usage: TokenUsage::default(), - request_token_usage: vec![], - detailed_summary_state: DetailedSummaryState::default(), - exceeded_window_error: None, - model: None, - completion_mode: None, - tool_use_limit_reached: false, - profile: None - } - ) - } - - #[test] - fn test_serialized_threadv0_1_0_upgrade() { - let updated_at = Utc::now(); - let thread_v0_1_0 = SerializedThreadV0_1_0(SerializedThread { - summary: "Test conversation".into(), - updated_at, - messages: vec![ - SerializedMessage { - id: MessageId(1), - role: Role::User, - segments: vec![SerializedMessageSegment::Text { - text: "Use tool_1".to_string(), - }], - tool_uses: vec![], - tool_results: vec![], - context: "".to_string(), - creases: vec![], - is_hidden: false, - }, - SerializedMessage { - id: MessageId(2), - role: Role::Assistant, - segments: vec![SerializedMessageSegment::Text { - text: "I want to use a tool".to_string(), - }], - tool_uses: vec![SerializedToolUse { - id: "abc".into(), - name: "tool_1".into(), - input: serde_json::Value::Null, - }], - tool_results: vec![], - context: "".to_string(), - creases: vec![], - is_hidden: false, - }, - SerializedMessage { - id: MessageId(1), - role: Role::User, - segments: vec![SerializedMessageSegment::Text { - text: "Here is the tool result".to_string(), - }], - tool_uses: vec![], - tool_results: vec![SerializedToolResult { - tool_use_id: "abc".into(), - is_error: false, - content: LanguageModelToolResultContent::Text("abcdef".into()), - output: Some(serde_json::Value::Null), - }], - context: "".to_string(), - creases: vec![], - is_hidden: false, - }, - ], - version: SerializedThreadV0_1_0::VERSION.to_string(), - initial_project_snapshot: None, - cumulative_token_usage: TokenUsage::default(), - request_token_usage: vec![], - detailed_summary_state: DetailedSummaryState::default(), - exceeded_window_error: None, - model: None, - completion_mode: None, - tool_use_limit_reached: false, - profile: None, - }); - let upgraded = thread_v0_1_0.upgrade(); - - assert_eq!( - upgraded, - SerializedThread { - summary: "Test conversation".into(), - updated_at, - messages: vec![ - SerializedMessage { - id: MessageId(1), - role: Role::User, - segments: vec![SerializedMessageSegment::Text { - text: "Use tool_1".to_string() - }], - tool_uses: vec![], - tool_results: vec![], - context: "".to_string(), - creases: vec![], - is_hidden: false - }, - SerializedMessage { - id: MessageId(2), - role: Role::Assistant, - segments: vec![SerializedMessageSegment::Text { - text: "I want to use a tool".to_string(), - }], - tool_uses: vec![SerializedToolUse { - id: "abc".into(), - name: "tool_1".into(), - input: serde_json::Value::Null, - }], - tool_results: vec![SerializedToolResult { - tool_use_id: "abc".into(), - is_error: false, - content: LanguageModelToolResultContent::Text("abcdef".into()), - output: Some(serde_json::Value::Null), - }], - context: "".to_string(), - creases: vec![], - is_hidden: false, - }, - ], - version: SerializedThread::VERSION.to_string(), - initial_project_snapshot: None, - cumulative_token_usage: TokenUsage::default(), - request_token_usage: vec![], - detailed_summary_state: DetailedSummaryState::default(), - exceeded_window_error: None, - model: None, - completion_mode: None, - tool_use_limit_reached: false, - profile: None - } - ) - } -} diff --git a/crates/assistant_tool/src/tool_schema.rs b/crates/agent/src/tool_schema.rs similarity index 85% rename from crates/assistant_tool/src/tool_schema.rs rename to crates/agent/src/tool_schema.rs index 192f7c8a2bb565ece01a3472a9e46dad316377f4..4b0de3e5c63fb0c5ccafbb89a22dad8a33072b35 100644 --- a/crates/assistant_tool/src/tool_schema.rs +++ b/crates/agent/src/tool_schema.rs @@ -1,7 +1,48 @@ use anyhow::Result; +use language_model::LanguageModelToolSchemaFormat; +use schemars::{ + JsonSchema, Schema, + generate::SchemaSettings, + transform::{Transform, transform_subschemas}, +}; use serde_json::Value; -use crate::LanguageModelToolSchemaFormat; +pub(crate) fn root_schema_for(format: LanguageModelToolSchemaFormat) -> Schema { + let mut generator = match format { + LanguageModelToolSchemaFormat::JsonSchema => SchemaSettings::draft07().into_generator(), + LanguageModelToolSchemaFormat::JsonSchemaSubset => SchemaSettings::openapi3() + .with(|settings| { + settings.meta_schema = None; + settings.inline_subschemas = true; + }) + .with_transform(ToJsonSchemaSubsetTransform) + .into_generator(), + }; + generator.root_schema_for::() +} + +#[derive(Debug, Clone)] +struct ToJsonSchemaSubsetTransform; + +impl Transform for ToJsonSchemaSubsetTransform { + fn transform(&mut self, schema: &mut Schema) { + // Ensure that the type field is not an array, this happens when we use + // Option, the type will be [T, "null"]. + if let Some(type_field) = schema.get_mut("type") + && let Some(types) = type_field.as_array() + && let Some(first_type) = types.first() + { + *type_field = first_type.clone(); + } + + // oneOf is not supported, use anyOf instead + if let Some(one_of) = schema.remove("oneOf") { + schema.insert("anyOf".to_string(), one_of); + } + + transform_subschemas(self, schema); + } +} /// Tries to adapt a JSON schema representation to be compatible with the specified format. /// diff --git a/crates/agent/src/tool_use.rs b/crates/agent/src/tool_use.rs deleted file mode 100644 index 962dca591fb66f4679d44b8e8a4733c879bc2e0c..0000000000000000000000000000000000000000 --- a/crates/agent/src/tool_use.rs +++ /dev/null @@ -1,575 +0,0 @@ -use crate::{ - thread::{MessageId, PromptId, ThreadId}, - thread_store::SerializedMessage, -}; -use agent_settings::CompletionMode; -use anyhow::Result; -use assistant_tool::{ - AnyToolCard, Tool, ToolResultContent, ToolResultOutput, ToolUseStatus, ToolWorkingSet, -}; -use collections::HashMap; -use futures::{FutureExt as _, future::Shared}; -use gpui::{App, Entity, SharedString, Task, Window}; -use icons::IconName; -use language_model::{ - ConfiguredModel, LanguageModel, LanguageModelExt, LanguageModelRequest, - LanguageModelToolResult, LanguageModelToolResultContent, LanguageModelToolUse, - LanguageModelToolUseId, Role, -}; -use project::Project; -use std::sync::Arc; -use util::truncate_lines_to_byte_limit; - -#[derive(Debug)] -pub struct ToolUse { - pub id: LanguageModelToolUseId, - pub name: SharedString, - pub ui_text: SharedString, - pub status: ToolUseStatus, - pub input: serde_json::Value, - pub icon: icons::IconName, - pub needs_confirmation: bool, -} - -pub struct ToolUseState { - tools: Entity, - tool_uses_by_assistant_message: HashMap>, - tool_results: HashMap, - pending_tool_uses_by_id: HashMap, - tool_result_cards: HashMap, - tool_use_metadata_by_id: HashMap, -} - -impl ToolUseState { - pub fn new(tools: Entity) -> Self { - Self { - tools, - tool_uses_by_assistant_message: HashMap::default(), - tool_results: HashMap::default(), - pending_tool_uses_by_id: HashMap::default(), - tool_result_cards: HashMap::default(), - tool_use_metadata_by_id: HashMap::default(), - } - } - - /// Constructs a [`ToolUseState`] from the given list of [`SerializedMessage`]s. - /// - /// Accepts a function to filter the tools that should be used to populate the state. - /// - /// If `window` is `None` (e.g., when in headless mode or when running evals), - /// tool cards won't be deserialized - pub fn from_serialized_messages( - tools: Entity, - messages: &[SerializedMessage], - project: Entity, - window: Option<&mut Window>, // None in headless mode - cx: &mut App, - ) -> Self { - let mut this = Self::new(tools); - let mut tool_names_by_id = HashMap::default(); - let mut window = window; - - for message in messages { - match message.role { - Role::Assistant => { - if !message.tool_uses.is_empty() { - let tool_uses = message - .tool_uses - .iter() - .map(|tool_use| LanguageModelToolUse { - id: tool_use.id.clone(), - name: tool_use.name.clone().into(), - raw_input: tool_use.input.to_string(), - input: tool_use.input.clone(), - is_input_complete: true, - }) - .collect::>(); - - tool_names_by_id.extend( - tool_uses - .iter() - .map(|tool_use| (tool_use.id.clone(), tool_use.name.clone())), - ); - - this.tool_uses_by_assistant_message - .insert(message.id, tool_uses); - - for tool_result in &message.tool_results { - let tool_use_id = tool_result.tool_use_id.clone(); - let Some(tool_use) = tool_names_by_id.get(&tool_use_id) else { - log::warn!("no tool name found for tool use: {tool_use_id:?}"); - continue; - }; - - this.tool_results.insert( - tool_use_id.clone(), - LanguageModelToolResult { - tool_use_id: tool_use_id.clone(), - tool_name: tool_use.clone(), - is_error: tool_result.is_error, - content: tool_result.content.clone(), - output: tool_result.output.clone(), - }, - ); - - if let Some(window) = &mut window - && let Some(tool) = this.tools.read(cx).tool(tool_use, cx) - && let Some(output) = tool_result.output.clone() - && let Some(card) = - tool.deserialize_card(output, project.clone(), window, cx) - { - this.tool_result_cards.insert(tool_use_id, card); - } - } - } - } - Role::System | Role::User => {} - } - } - - this - } - - pub fn cancel_pending(&mut self) -> Vec { - let mut canceled_tool_uses = Vec::new(); - self.pending_tool_uses_by_id - .retain(|tool_use_id, tool_use| { - if matches!(tool_use.status, PendingToolUseStatus::Error { .. }) { - return true; - } - - let content = "Tool canceled by user".into(); - self.tool_results.insert( - tool_use_id.clone(), - LanguageModelToolResult { - tool_use_id: tool_use_id.clone(), - tool_name: tool_use.name.clone(), - content, - output: None, - is_error: true, - }, - ); - canceled_tool_uses.push(tool_use.clone()); - false - }); - canceled_tool_uses - } - - pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> { - self.pending_tool_uses_by_id.values().collect() - } - - pub fn tool_uses_for_message( - &self, - id: MessageId, - project: &Entity, - cx: &App, - ) -> Vec { - let Some(tool_uses_for_message) = &self.tool_uses_by_assistant_message.get(&id) else { - return Vec::new(); - }; - - let mut tool_uses = Vec::new(); - - for tool_use in tool_uses_for_message.iter() { - let tool_result = self.tool_results.get(&tool_use.id); - - let status = (|| { - if let Some(tool_result) = tool_result { - let content = tool_result - .content - .to_str() - .map(|str| str.to_owned().into()) - .unwrap_or_default(); - - return if tool_result.is_error { - ToolUseStatus::Error(content) - } else { - ToolUseStatus::Finished(content) - }; - } - - if let Some(pending_tool_use) = self.pending_tool_uses_by_id.get(&tool_use.id) { - match pending_tool_use.status { - PendingToolUseStatus::Idle => ToolUseStatus::Pending, - PendingToolUseStatus::NeedsConfirmation { .. } => { - ToolUseStatus::NeedsConfirmation - } - PendingToolUseStatus::Running { .. } => ToolUseStatus::Running, - PendingToolUseStatus::Error(ref err) => { - ToolUseStatus::Error(err.clone().into()) - } - PendingToolUseStatus::InputStillStreaming => { - ToolUseStatus::InputStillStreaming - } - } - } else { - ToolUseStatus::Pending - } - })(); - - let (icon, needs_confirmation) = - if let Some(tool) = self.tools.read(cx).tool(&tool_use.name, cx) { - ( - tool.icon(), - tool.needs_confirmation(&tool_use.input, project, cx), - ) - } else { - (IconName::Cog, false) - }; - - tool_uses.push(ToolUse { - id: tool_use.id.clone(), - name: tool_use.name.clone().into(), - ui_text: self.tool_ui_label( - &tool_use.name, - &tool_use.input, - tool_use.is_input_complete, - cx, - ), - input: tool_use.input.clone(), - status, - icon, - needs_confirmation, - }) - } - - tool_uses - } - - pub fn tool_ui_label( - &self, - tool_name: &str, - input: &serde_json::Value, - is_input_complete: bool, - cx: &App, - ) -> SharedString { - if let Some(tool) = self.tools.read(cx).tool(tool_name, cx) { - if is_input_complete { - tool.ui_text(input).into() - } else { - tool.still_streaming_ui_text(input).into() - } - } else { - format!("Unknown tool {tool_name:?}").into() - } - } - - pub fn tool_results_for_message( - &self, - assistant_message_id: MessageId, - ) -> Vec<&LanguageModelToolResult> { - let Some(tool_uses) = self - .tool_uses_by_assistant_message - .get(&assistant_message_id) - else { - return Vec::new(); - }; - - tool_uses - .iter() - .filter_map(|tool_use| self.tool_results.get(&tool_use.id)) - .collect() - } - - pub fn message_has_tool_results(&self, assistant_message_id: MessageId) -> bool { - self.tool_uses_by_assistant_message - .get(&assistant_message_id) - .is_some_and(|results| !results.is_empty()) - } - - pub fn tool_result( - &self, - tool_use_id: &LanguageModelToolUseId, - ) -> Option<&LanguageModelToolResult> { - self.tool_results.get(tool_use_id) - } - - pub fn tool_result_card(&self, tool_use_id: &LanguageModelToolUseId) -> Option<&AnyToolCard> { - self.tool_result_cards.get(tool_use_id) - } - - pub fn insert_tool_result_card( - &mut self, - tool_use_id: LanguageModelToolUseId, - card: AnyToolCard, - ) { - self.tool_result_cards.insert(tool_use_id, card); - } - - pub fn request_tool_use( - &mut self, - assistant_message_id: MessageId, - tool_use: LanguageModelToolUse, - metadata: ToolUseMetadata, - cx: &App, - ) -> Arc { - let tool_uses = self - .tool_uses_by_assistant_message - .entry(assistant_message_id) - .or_default(); - - let mut existing_tool_use_found = false; - - for existing_tool_use in tool_uses.iter_mut() { - if existing_tool_use.id == tool_use.id { - *existing_tool_use = tool_use.clone(); - existing_tool_use_found = true; - } - } - - if !existing_tool_use_found { - tool_uses.push(tool_use.clone()); - } - - let status = if tool_use.is_input_complete { - self.tool_use_metadata_by_id - .insert(tool_use.id.clone(), metadata); - - PendingToolUseStatus::Idle - } else { - PendingToolUseStatus::InputStillStreaming - }; - - let ui_text: Arc = self - .tool_ui_label( - &tool_use.name, - &tool_use.input, - tool_use.is_input_complete, - cx, - ) - .into(); - - let may_perform_edits = self - .tools - .read(cx) - .tool(&tool_use.name, cx) - .is_some_and(|tool| tool.may_perform_edits()); - - self.pending_tool_uses_by_id.insert( - tool_use.id.clone(), - PendingToolUse { - assistant_message_id, - id: tool_use.id, - name: tool_use.name.clone(), - ui_text: ui_text.clone(), - input: tool_use.input, - may_perform_edits, - status, - }, - ); - - ui_text - } - - pub fn run_pending_tool( - &mut self, - tool_use_id: LanguageModelToolUseId, - ui_text: SharedString, - task: Task<()>, - ) { - if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) { - tool_use.ui_text = ui_text.into(); - tool_use.status = PendingToolUseStatus::Running { - _task: task.shared(), - }; - } - } - - pub fn confirm_tool_use( - &mut self, - tool_use_id: LanguageModelToolUseId, - ui_text: impl Into>, - input: serde_json::Value, - request: Arc, - tool: Arc, - ) { - if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) { - let ui_text = ui_text.into(); - tool_use.ui_text = ui_text.clone(); - let confirmation = Confirmation { - tool_use_id, - input, - request, - tool, - ui_text, - }; - tool_use.status = PendingToolUseStatus::NeedsConfirmation(Arc::new(confirmation)); - } - } - - pub fn insert_tool_output( - &mut self, - tool_use_id: LanguageModelToolUseId, - tool_name: Arc, - output: Result, - configured_model: Option<&ConfiguredModel>, - completion_mode: CompletionMode, - ) -> Option { - let metadata = self.tool_use_metadata_by_id.remove(&tool_use_id); - - telemetry::event!( - "Agent Tool Finished", - model = metadata - .as_ref() - .map(|metadata| metadata.model.telemetry_id()), - model_provider = metadata - .as_ref() - .map(|metadata| metadata.model.provider_id().to_string()), - thread_id = metadata.as_ref().map(|metadata| metadata.thread_id.clone()), - prompt_id = metadata.as_ref().map(|metadata| metadata.prompt_id.clone()), - tool_name, - success = output.is_ok() - ); - - match output { - Ok(output) => { - let tool_result = output.content; - const BYTES_PER_TOKEN_ESTIMATE: usize = 3; - - let old_use = self.pending_tool_uses_by_id.remove(&tool_use_id); - - // Protect from overly large output - let tool_output_limit = configured_model - .map(|model| { - model.model.max_token_count_for_mode(completion_mode.into()) as usize - * BYTES_PER_TOKEN_ESTIMATE - }) - .unwrap_or(usize::MAX); - - let content = match tool_result { - ToolResultContent::Text(text) => { - let text = if text.len() < tool_output_limit { - text - } else { - let truncated = truncate_lines_to_byte_limit(&text, tool_output_limit); - format!( - "Tool result too long. The first {} bytes:\n\n{}", - truncated.len(), - truncated - ) - }; - LanguageModelToolResultContent::Text(text.into()) - } - ToolResultContent::Image(language_model_image) => { - if language_model_image.estimate_tokens() < tool_output_limit { - LanguageModelToolResultContent::Image(language_model_image) - } else { - self.tool_results.insert( - tool_use_id.clone(), - LanguageModelToolResult { - tool_use_id: tool_use_id.clone(), - tool_name, - content: "Tool responded with an image that would exceeded the remaining tokens".into(), - is_error: true, - output: None, - }, - ); - - return old_use; - } - } - }; - - self.tool_results.insert( - tool_use_id.clone(), - LanguageModelToolResult { - tool_use_id: tool_use_id.clone(), - tool_name, - content, - is_error: false, - output: output.output, - }, - ); - - old_use - } - Err(err) => { - self.tool_results.insert( - tool_use_id.clone(), - LanguageModelToolResult { - tool_use_id: tool_use_id.clone(), - tool_name, - content: LanguageModelToolResultContent::Text(err.to_string().into()), - is_error: true, - output: None, - }, - ); - - if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) { - tool_use.status = PendingToolUseStatus::Error(err.to_string().into()); - } - - self.pending_tool_uses_by_id.get(&tool_use_id).cloned() - } - } - } - - pub fn has_tool_results(&self, assistant_message_id: MessageId) -> bool { - self.tool_uses_by_assistant_message - .contains_key(&assistant_message_id) - } - - pub fn tool_results( - &self, - assistant_message_id: MessageId, - ) -> impl Iterator)> { - self.tool_uses_by_assistant_message - .get(&assistant_message_id) - .into_iter() - .flatten() - .map(|tool_use| (tool_use, self.tool_results.get(&tool_use.id))) - } -} - -#[derive(Debug, Clone)] -pub struct PendingToolUse { - pub id: LanguageModelToolUseId, - /// The ID of the Assistant message in which the tool use was requested. - #[allow(unused)] - pub assistant_message_id: MessageId, - pub name: Arc, - pub ui_text: Arc, - pub input: serde_json::Value, - pub status: PendingToolUseStatus, - pub may_perform_edits: bool, -} - -#[derive(Debug, Clone)] -pub struct Confirmation { - pub tool_use_id: LanguageModelToolUseId, - pub input: serde_json::Value, - pub ui_text: Arc, - pub request: Arc, - pub tool: Arc, -} - -#[derive(Debug, Clone)] -pub enum PendingToolUseStatus { - InputStillStreaming, - Idle, - NeedsConfirmation(Arc), - Running { _task: Shared> }, - Error(#[allow(unused)] Arc), -} - -impl PendingToolUseStatus { - pub fn is_idle(&self) -> bool { - matches!(self, PendingToolUseStatus::Idle) - } - - pub fn is_error(&self) -> bool { - matches!(self, PendingToolUseStatus::Error(_)) - } - - pub fn needs_confirmation(&self) -> bool { - matches!(self, PendingToolUseStatus::NeedsConfirmation { .. }) - } -} - -#[derive(Clone)] -pub struct ToolUseMetadata { - pub model: Arc, - pub thread_id: ThreadId, - pub prompt_id: PromptId, -} diff --git a/crates/agent/src/tools.rs b/crates/agent/src/tools.rs new file mode 100644 index 0000000000000000000000000000000000000000..1d3c0d557716ec3a52f910971547df4ee764cab0 --- /dev/null +++ b/crates/agent/src/tools.rs @@ -0,0 +1,94 @@ +mod context_server_registry; +mod copy_path_tool; +mod create_directory_tool; +mod delete_path_tool; +mod diagnostics_tool; +mod edit_file_tool; +mod fetch_tool; +mod find_path_tool; +mod grep_tool; +mod list_directory_tool; +mod move_path_tool; +mod now_tool; +mod open_tool; +mod read_file_tool; +mod terminal_tool; +mod thinking_tool; +mod web_search_tool; + +use crate::AgentTool; +use language_model::{LanguageModelRequestTool, LanguageModelToolSchemaFormat}; + +pub use context_server_registry::*; +pub use copy_path_tool::*; +pub use create_directory_tool::*; +pub use delete_path_tool::*; +pub use diagnostics_tool::*; +pub use edit_file_tool::*; +pub use fetch_tool::*; +pub use find_path_tool::*; +pub use grep_tool::*; +pub use list_directory_tool::*; +pub use move_path_tool::*; +pub use now_tool::*; +pub use open_tool::*; +pub use read_file_tool::*; +pub use terminal_tool::*; +pub use thinking_tool::*; +pub use web_search_tool::*; + +macro_rules! tools { + ($($tool:ty),* $(,)?) => { + /// A list of all built-in tool names + pub fn supported_built_in_tool_names(provider: Option) -> impl Iterator { + [ + $( + (if let Some(provider) = provider.as_ref() { + <$tool>::supports_provider(provider) + } else { + true + }) + .then(|| <$tool>::name().to_string()), + )* + ] + .into_iter() + .flatten() + } + + /// A list of all built-in tools + pub fn built_in_tools() -> impl Iterator { + fn language_model_tool() -> LanguageModelRequestTool { + LanguageModelRequestTool { + name: T::name().to_string(), + description: T::description().to_string(), + input_schema: T::input_schema(LanguageModelToolSchemaFormat::JsonSchema).to_value(), + } + } + [ + $( + language_model_tool::<$tool>(), + )* + ] + .into_iter() + } + }; +} + +tools! { + CopyPathTool, + CreateDirectoryTool, + DeletePathTool, + DiagnosticsTool, + EditFileTool, + FetchTool, + FindPathTool, + GrepTool, + ListDirectoryTool, + MovePathTool, + NowTool, + OpenTool, + ReadFileTool, + TerminalTool, + ThinkingTool, + WebSearchTool, +} diff --git a/crates/agent2/src/tools/context_server_registry.rs b/crates/agent/src/tools/context_server_registry.rs similarity index 95% rename from crates/agent2/src/tools/context_server_registry.rs rename to crates/agent/src/tools/context_server_registry.rs index 46fa0298044de017464dc1a2e5bd21bf57c1bfcf..382d2ba9be74b4518de853037c858fd054366d5d 100644 --- a/crates/agent2/src/tools/context_server_registry.rs +++ b/crates/agent/src/tools/context_server_registry.rs @@ -32,6 +32,17 @@ impl ContextServerRegistry { this } + pub fn tools_for_server( + &self, + server_id: &ContextServerId, + ) -> impl Iterator> { + self.registered_servers + .get(server_id) + .map(|server| server.tools.values()) + .into_iter() + .flatten() + } + pub fn servers( &self, ) -> impl Iterator< @@ -154,7 +165,7 @@ impl AnyAgentTool for ContextServerTool { format: language_model::LanguageModelToolSchemaFormat, ) -> Result { let mut schema = self.tool.input_schema.clone(); - assistant_tool::adapt_schema_to_format(&mut schema, format)?; + crate::tool_schema::adapt_schema_to_format(&mut schema, format)?; Ok(match schema { serde_json::Value::Null => { serde_json::json!({ "type": "object", "properties": [] }) diff --git a/crates/agent2/src/tools/copy_path_tool.rs b/crates/agent/src/tools/copy_path_tool.rs similarity index 100% rename from crates/agent2/src/tools/copy_path_tool.rs rename to crates/agent/src/tools/copy_path_tool.rs diff --git a/crates/agent2/src/tools/create_directory_tool.rs b/crates/agent/src/tools/create_directory_tool.rs similarity index 100% rename from crates/agent2/src/tools/create_directory_tool.rs rename to crates/agent/src/tools/create_directory_tool.rs diff --git a/crates/agent2/src/tools/delete_path_tool.rs b/crates/agent/src/tools/delete_path_tool.rs similarity index 100% rename from crates/agent2/src/tools/delete_path_tool.rs rename to crates/agent/src/tools/delete_path_tool.rs diff --git a/crates/agent2/src/tools/diagnostics_tool.rs b/crates/agent/src/tools/diagnostics_tool.rs similarity index 100% rename from crates/agent2/src/tools/diagnostics_tool.rs rename to crates/agent/src/tools/diagnostics_tool.rs diff --git a/crates/agent2/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs similarity index 98% rename from crates/agent2/src/tools/edit_file_tool.rs rename to crates/agent/src/tools/edit_file_tool.rs index 7c51df0fae274e2d5906aa73e70c30105b1a2353..0adff2dee3571f09b40ee69896c05e50c56b51b9 100644 --- a/crates/agent2/src/tools/edit_file_tool.rs +++ b/crates/agent/src/tools/edit_file_tool.rs @@ -1,8 +1,10 @@ -use crate::{AgentTool, Thread, ToolCallEventStream}; +use crate::{ + AgentTool, Templates, Thread, ToolCallEventStream, + edit_agent::{EditAgent, EditAgentOutput, EditAgentOutputEvent, EditFormat}, +}; use acp_thread::Diff; use agent_client_protocol::{self as acp, ToolCallLocation, ToolCallUpdateFields}; use anyhow::{Context as _, Result, anyhow}; -use assistant_tools::edit_agent::{EditAgent, EditAgentOutput, EditAgentOutputEvent, EditFormat}; use cloud_llm_client::CompletionIntent; use collections::HashSet; use gpui::{App, AppContext, AsyncApp, Entity, Task, WeakEntity}; @@ -34,7 +36,7 @@ const DEFAULT_UI_TEXT: &str = "Editing file"; /// /// 2. Verify the directory path is correct (only applicable when creating new files): /// - Use the `list_directory` tool to verify the parent directory exists and is the correct location -#[derive(Debug, Serialize, Deserialize, JsonSchema)] +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] pub struct EditFileToolInput { /// A one-line, user-friendly markdown description of the edit. This will be shown in the UI and also passed to another model to perform the edit. /// @@ -75,7 +77,7 @@ pub struct EditFileToolInput { pub mode: EditFileMode, } -#[derive(Debug, Serialize, Deserialize, JsonSchema)] +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] struct EditFileToolPartialInput { #[serde(default)] path: String, @@ -123,6 +125,7 @@ pub struct EditFileTool { thread: WeakEntity, language_registry: Arc, project: Entity, + templates: Arc, } impl EditFileTool { @@ -130,11 +133,13 @@ impl EditFileTool { project: Entity, thread: WeakEntity, language_registry: Arc, + templates: Arc, ) -> Self { Self { project, thread, language_registry, + templates, } } @@ -294,8 +299,7 @@ impl AgentTool for EditFileTool { model, project.clone(), action_log.clone(), - // TODO: move edit agent to this crate so we can use our templates - assistant_tools::templates::Templates::new(), + self.templates.clone(), edit_format, ); @@ -599,6 +603,7 @@ mod tests { project, thread.downgrade(), language_registry, + Templates::new(), )) .run(input, ToolCallEventStream::test().0, cx) }) @@ -790,7 +795,7 @@ mod tests { store.update_user_settings(cx, |settings| { settings.project.all_languages.defaults.format_on_save = Some(FormatOnSave::On); settings.project.all_languages.defaults.formatter = - Some(language::language_settings::SelectedFormatter::Auto); + Some(language::language_settings::FormatterList::default()); }); }); }); @@ -807,6 +812,7 @@ mod tests { project.clone(), thread.downgrade(), language_registry.clone(), + Templates::new(), )) .run(input, ToolCallEventStream::test().0, cx) }); @@ -865,6 +871,7 @@ mod tests { project.clone(), thread.downgrade(), language_registry, + Templates::new(), )) .run(input, ToolCallEventStream::test().0, cx) }); @@ -951,6 +958,7 @@ mod tests { project.clone(), thread.downgrade(), language_registry.clone(), + Templates::new(), )) .run(input, ToolCallEventStream::test().0, cx) }); @@ -1005,6 +1013,7 @@ mod tests { project.clone(), thread.downgrade(), language_registry, + Templates::new(), )) .run(input, ToolCallEventStream::test().0, cx) }); @@ -1057,6 +1066,7 @@ mod tests { project.clone(), thread.downgrade(), language_registry, + Templates::new(), )); fs.insert_tree("/root", json!({})).await; @@ -1197,6 +1207,7 @@ mod tests { project.clone(), thread.downgrade(), language_registry, + Templates::new(), )); // Test global config paths - these should require confirmation if they exist and are outside the project @@ -1309,6 +1320,7 @@ mod tests { project.clone(), thread.downgrade(), language_registry, + Templates::new(), )); // Test files in different worktrees @@ -1393,6 +1405,7 @@ mod tests { project.clone(), thread.downgrade(), language_registry, + Templates::new(), )); // Test edge cases @@ -1482,6 +1495,7 @@ mod tests { project.clone(), thread.downgrade(), language_registry, + Templates::new(), )); // Test different EditFileMode values @@ -1566,6 +1580,7 @@ mod tests { project, thread.downgrade(), language_registry, + Templates::new(), )); cx.update(|cx| { @@ -1653,6 +1668,7 @@ mod tests { project.clone(), thread.downgrade(), languages.clone(), + Templates::new(), )); let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let edit = cx.update(|cx| { @@ -1682,6 +1698,7 @@ mod tests { project.clone(), thread.downgrade(), languages.clone(), + Templates::new(), )); let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let edit = cx.update(|cx| { @@ -1709,6 +1726,7 @@ mod tests { project.clone(), thread.downgrade(), languages.clone(), + Templates::new(), )); let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let edit = cx.update(|cx| { diff --git a/crates/agent2/src/tools/fetch_tool.rs b/crates/agent/src/tools/fetch_tool.rs similarity index 100% rename from crates/agent2/src/tools/fetch_tool.rs rename to crates/agent/src/tools/fetch_tool.rs diff --git a/crates/agent2/src/tools/find_path_tool.rs b/crates/agent/src/tools/find_path_tool.rs similarity index 100% rename from crates/agent2/src/tools/find_path_tool.rs rename to crates/agent/src/tools/find_path_tool.rs diff --git a/crates/agent2/src/tools/grep_tool.rs b/crates/agent/src/tools/grep_tool.rs similarity index 100% rename from crates/agent2/src/tools/grep_tool.rs rename to crates/agent/src/tools/grep_tool.rs diff --git a/crates/agent2/src/tools/list_directory_tool.rs b/crates/agent/src/tools/list_directory_tool.rs similarity index 100% rename from crates/agent2/src/tools/list_directory_tool.rs rename to crates/agent/src/tools/list_directory_tool.rs diff --git a/crates/agent2/src/tools/move_path_tool.rs b/crates/agent/src/tools/move_path_tool.rs similarity index 100% rename from crates/agent2/src/tools/move_path_tool.rs rename to crates/agent/src/tools/move_path_tool.rs diff --git a/crates/agent2/src/tools/now_tool.rs b/crates/agent/src/tools/now_tool.rs similarity index 100% rename from crates/agent2/src/tools/now_tool.rs rename to crates/agent/src/tools/now_tool.rs diff --git a/crates/agent2/src/tools/open_tool.rs b/crates/agent/src/tools/open_tool.rs similarity index 100% rename from crates/agent2/src/tools/open_tool.rs rename to crates/agent/src/tools/open_tool.rs diff --git a/crates/agent2/src/tools/read_file_tool.rs b/crates/agent/src/tools/read_file_tool.rs similarity index 99% rename from crates/agent2/src/tools/read_file_tool.rs rename to crates/agent/src/tools/read_file_tool.rs index ce8dcba10236aa194e8b30d3fe6855d8c5fa5148..f3ce8e35f2856a3dd53770eef48ec1091fe9b116 100644 --- a/crates/agent2/src/tools/read_file_tool.rs +++ b/crates/agent/src/tools/read_file_tool.rs @@ -1,7 +1,6 @@ use action_log::ActionLog; use agent_client_protocol::{self as acp, ToolCallUpdateFields}; use anyhow::{Context as _, Result, anyhow}; -use assistant_tool::outline; use gpui::{App, Entity, SharedString, Task}; use indoc::formatdoc; use language::Point; @@ -13,7 +12,7 @@ use settings::Settings; use std::sync::Arc; use util::markdown::MarkdownCodeBlock; -use crate::{AgentTool, ToolCallEventStream}; +use crate::{AgentTool, ToolCallEventStream, outline}; /// Reads the content of the given file in the project. /// diff --git a/crates/agent2/src/tools/terminal_tool.rs b/crates/agent/src/tools/terminal_tool.rs similarity index 100% rename from crates/agent2/src/tools/terminal_tool.rs rename to crates/agent/src/tools/terminal_tool.rs diff --git a/crates/agent2/src/tools/thinking_tool.rs b/crates/agent/src/tools/thinking_tool.rs similarity index 100% rename from crates/agent2/src/tools/thinking_tool.rs rename to crates/agent/src/tools/thinking_tool.rs diff --git a/crates/agent2/src/tools/web_search_tool.rs b/crates/agent/src/tools/web_search_tool.rs similarity index 98% rename from crates/agent2/src/tools/web_search_tool.rs rename to crates/agent/src/tools/web_search_tool.rs index b65c89167d6f5ed026bb4ebb5e1990fa4e1c17ce..03e9db6601579e082e4d83de50f1999209d9f197 100644 --- a/crates/agent2/src/tools/web_search_tool.rs +++ b/crates/agent/src/tools/web_search_tool.rs @@ -57,7 +57,7 @@ impl AgentTool for WebSearchTool { } /// We currently only support Zed Cloud as a provider. - fn supported_provider(&self, provider: &LanguageModelProviderId) -> bool { + fn supports_provider(provider: &LanguageModelProviderId) -> bool { provider == &ZED_CLOUD_PROVIDER_ID } diff --git a/crates/agent2/Cargo.toml b/crates/agent2/Cargo.toml deleted file mode 100644 index b712bed258dfb69ddf81a1ba431ec7a3566b9baf..0000000000000000000000000000000000000000 --- a/crates/agent2/Cargo.toml +++ /dev/null @@ -1,102 +0,0 @@ -[package] -name = "agent2" -version = "0.1.0" -edition.workspace = true -publish.workspace = true -license = "GPL-3.0-or-later" - -[lib] -path = "src/agent2.rs" - -[features] -test-support = ["db/test-support"] -e2e = [] - -[lints] -workspace = true - -[dependencies] -acp_thread.workspace = true -action_log.workspace = true -agent.workspace = true -agent-client-protocol.workspace = true -agent_servers.workspace = true -agent_settings.workspace = true -anyhow.workspace = true -assistant_context.workspace = true -assistant_tool.workspace = true -assistant_tools.workspace = true -chrono.workspace = true -client.workspace = true -cloud_llm_client.workspace = true -collections.workspace = true -context_server.workspace = true -db.workspace = true -fs.workspace = true -futures.workspace = true -git.workspace = true -gpui.workspace = true -handlebars = { workspace = true, features = ["rust-embed"] } -html_to_markdown.workspace = true -http_client.workspace = true -indoc.workspace = true -itertools.workspace = true -language.workspace = true -language_model.workspace = true -language_models.workspace = true -log.workspace = true -open.workspace = true -parking_lot.workspace = true -paths.workspace = true -project.workspace = true -prompt_store.workspace = true -rust-embed.workspace = true -schemars.workspace = true -serde.workspace = true -serde_json.workspace = true -settings.workspace = true -smol.workspace = true -sqlez.workspace = true -task.workspace = true -telemetry.workspace = true -terminal.workspace = true -thiserror.workspace = true -text.workspace = true -ui.workspace = true -util.workspace = true -uuid.workspace = true -watch.workspace = true -web_search.workspace = true -workspace-hack.workspace = true -zed_env_vars.workspace = true -zstd.workspace = true - -[dev-dependencies] -agent = { workspace = true, "features" = ["test-support"] } -agent_servers = { workspace = true, "features" = ["test-support"] } -assistant_context = { workspace = true, "features" = ["test-support"] } -ctor.workspace = true -client = { workspace = true, "features" = ["test-support"] } -clock = { workspace = true, "features" = ["test-support"] } -context_server = { workspace = true, "features" = ["test-support"] } -db = { workspace = true, "features" = ["test-support"] } -editor = { workspace = true, "features" = ["test-support"] } -env_logger.workspace = true -fs = { workspace = true, "features" = ["test-support"] } -git = { workspace = true, "features" = ["test-support"] } -gpui = { workspace = true, "features" = ["test-support"] } -gpui_tokio.workspace = true -language = { workspace = true, "features" = ["test-support"] } -language_model = { workspace = true, "features" = ["test-support"] } -lsp = { workspace = true, "features" = ["test-support"] } -pretty_assertions.workspace = true -project = { workspace = true, "features" = ["test-support"] } -reqwest_client.workspace = true -settings = { workspace = true, "features" = ["test-support"] } -tempfile.workspace = true -terminal = { workspace = true, "features" = ["test-support"] } -theme = { workspace = true, "features" = ["test-support"] } -tree-sitter-rust.workspace = true -unindent = { workspace = true } -worktree = { workspace = true, "features" = ["test-support"] } -zlog.workspace = true diff --git a/crates/agent2/src/agent.rs b/crates/agent2/src/agent.rs deleted file mode 100644 index bf1fe8b5bb72038e197eafc842ca02e417b9e7c3..0000000000000000000000000000000000000000 --- a/crates/agent2/src/agent.rs +++ /dev/null @@ -1,1588 +0,0 @@ -use crate::{ - ContextServerRegistry, Thread, ThreadEvent, ThreadsDatabase, ToolCallAuthorization, - UserMessageContent, templates::Templates, -}; -use crate::{HistoryStore, TerminalHandle, ThreadEnvironment, TitleUpdated, TokenUsageUpdated}; -use acp_thread::{AcpThread, AgentModelSelector}; -use action_log::ActionLog; -use agent_client_protocol as acp; -use anyhow::{Context as _, Result, anyhow}; -use collections::{HashSet, IndexMap}; -use fs::Fs; -use futures::channel::{mpsc, oneshot}; -use futures::future::Shared; -use futures::{StreamExt, future}; -use gpui::{ - App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity, -}; -use language_model::{LanguageModel, LanguageModelProvider, LanguageModelRegistry}; -use project::{Project, ProjectItem, ProjectPath, Worktree}; -use prompt_store::{ - ProjectContext, PromptId, PromptStore, RulesFileContext, UserRulesContext, WorktreeContext, -}; -use settings::{LanguageModelSelection, update_settings_file}; -use std::any::Any; -use std::collections::HashMap; -use std::path::{Path, PathBuf}; -use std::rc::Rc; -use std::sync::Arc; -use util::ResultExt; -use util::rel_path::RelPath; - -const RULES_FILE_NAMES: [&str; 9] = [ - ".rules", - ".cursorrules", - ".windsurfrules", - ".clinerules", - ".github/copilot-instructions.md", - "CLAUDE.md", - "AGENT.md", - "AGENTS.md", - "GEMINI.md", -]; - -pub struct RulesLoadingError { - pub message: SharedString, -} - -/// Holds both the internal Thread and the AcpThread for a session -struct Session { - /// The internal thread that processes messages - thread: Entity, - /// The ACP thread that handles protocol communication - acp_thread: WeakEntity, - pending_save: Task<()>, - _subscriptions: Vec, -} - -pub struct LanguageModels { - /// Access language model by ID - models: HashMap>, - /// Cached list for returning language model information - model_list: acp_thread::AgentModelList, - refresh_models_rx: watch::Receiver<()>, - refresh_models_tx: watch::Sender<()>, - _authenticate_all_providers_task: Task<()>, -} - -impl LanguageModels { - fn new(cx: &mut App) -> Self { - let (refresh_models_tx, refresh_models_rx) = watch::channel(()); - - let mut this = Self { - models: HashMap::default(), - model_list: acp_thread::AgentModelList::Grouped(IndexMap::default()), - refresh_models_rx, - refresh_models_tx, - _authenticate_all_providers_task: Self::authenticate_all_language_model_providers(cx), - }; - this.refresh_list(cx); - this - } - - fn refresh_list(&mut self, cx: &App) { - let providers = LanguageModelRegistry::global(cx) - .read(cx) - .providers() - .into_iter() - .filter(|provider| provider.is_authenticated(cx)) - .collect::>(); - - let mut language_model_list = IndexMap::default(); - let mut recommended_models = HashSet::default(); - - let mut recommended = Vec::new(); - for provider in &providers { - for model in provider.recommended_models(cx) { - recommended_models.insert((model.provider_id(), model.id())); - recommended.push(Self::map_language_model_to_info(&model, provider)); - } - } - if !recommended.is_empty() { - language_model_list.insert( - acp_thread::AgentModelGroupName("Recommended".into()), - recommended, - ); - } - - let mut models = HashMap::default(); - for provider in providers { - let mut provider_models = Vec::new(); - for model in provider.provided_models(cx) { - let model_info = Self::map_language_model_to_info(&model, &provider); - let model_id = model_info.id.clone(); - if !recommended_models.contains(&(model.provider_id(), model.id())) { - provider_models.push(model_info); - } - models.insert(model_id, model); - } - if !provider_models.is_empty() { - language_model_list.insert( - acp_thread::AgentModelGroupName(provider.name().0.clone()), - provider_models, - ); - } - } - - self.models = models; - self.model_list = acp_thread::AgentModelList::Grouped(language_model_list); - self.refresh_models_tx.send(()).ok(); - } - - fn watch(&self) -> watch::Receiver<()> { - self.refresh_models_rx.clone() - } - - pub fn model_from_id(&self, model_id: &acp::ModelId) -> Option> { - self.models.get(model_id).cloned() - } - - fn map_language_model_to_info( - model: &Arc, - provider: &Arc, - ) -> acp_thread::AgentModelInfo { - acp_thread::AgentModelInfo { - id: Self::model_id(model), - name: model.name().0, - description: None, - icon: Some(provider.icon()), - } - } - - fn model_id(model: &Arc) -> acp::ModelId { - acp::ModelId(format!("{}/{}", model.provider_id().0, model.id().0).into()) - } - - fn authenticate_all_language_model_providers(cx: &mut App) -> Task<()> { - let authenticate_all_providers = LanguageModelRegistry::global(cx) - .read(cx) - .providers() - .iter() - .map(|provider| (provider.id(), provider.name(), provider.authenticate(cx))) - .collect::>(); - - cx.background_spawn(async move { - for (provider_id, provider_name, authenticate_task) in authenticate_all_providers { - if let Err(err) = authenticate_task.await { - match err { - language_model::AuthenticateError::CredentialsNotFound => { - // Since we're authenticating these providers in the - // background for the purposes of populating the - // language selector, we don't care about providers - // where the credentials are not found. - } - language_model::AuthenticateError::ConnectionRefused => { - // Not logging connection refused errors as they are mostly from LM Studio's noisy auth failures. - // LM Studio only has one auth method (endpoint call) which fails for users who haven't enabled it. - // TODO: Better manage LM Studio auth logic to avoid these noisy failures. - } - _ => { - // Some providers have noisy failure states that we - // don't want to spam the logs with every time the - // language model selector is initialized. - // - // Ideally these should have more clear failure modes - // that we know are safe to ignore here, like what we do - // with `CredentialsNotFound` above. - match provider_id.0.as_ref() { - "lmstudio" | "ollama" => { - // LM Studio and Ollama both make fetch requests to the local APIs to determine if they are "authenticated". - // - // These fail noisily, so we don't log them. - } - "copilot_chat" => { - // Copilot Chat returns an error if Copilot is not enabled, so we don't log those errors. - } - _ => { - log::error!( - "Failed to authenticate provider: {}: {err}", - provider_name.0 - ); - } - } - } - } - } - } - }) - } -} - -pub struct NativeAgent { - /// Session ID -> Session mapping - sessions: HashMap, - history: Entity, - /// Shared project context for all threads - project_context: Entity, - project_context_needs_refresh: watch::Sender<()>, - _maintain_project_context: Task>, - context_server_registry: Entity, - /// Shared templates for all threads - templates: Arc, - /// Cached model information - models: LanguageModels, - project: Entity, - prompt_store: Option>, - fs: Arc, - _subscriptions: Vec, -} - -impl NativeAgent { - pub async fn new( - project: Entity, - history: Entity, - templates: Arc, - prompt_store: Option>, - fs: Arc, - cx: &mut AsyncApp, - ) -> Result> { - log::debug!("Creating new NativeAgent"); - - let project_context = cx - .update(|cx| Self::build_project_context(&project, prompt_store.as_ref(), cx))? - .await; - - cx.new(|cx| { - let mut subscriptions = vec![ - cx.subscribe(&project, Self::handle_project_event), - cx.subscribe( - &LanguageModelRegistry::global(cx), - Self::handle_models_updated_event, - ), - ]; - if let Some(prompt_store) = prompt_store.as_ref() { - subscriptions.push(cx.subscribe(prompt_store, Self::handle_prompts_updated_event)) - } - - let (project_context_needs_refresh_tx, project_context_needs_refresh_rx) = - watch::channel(()); - Self { - sessions: HashMap::new(), - history, - project_context: cx.new(|_| project_context), - project_context_needs_refresh: project_context_needs_refresh_tx, - _maintain_project_context: cx.spawn(async move |this, cx| { - Self::maintain_project_context(this, project_context_needs_refresh_rx, cx).await - }), - context_server_registry: cx.new(|cx| { - ContextServerRegistry::new(project.read(cx).context_server_store(), cx) - }), - templates, - models: LanguageModels::new(cx), - project, - prompt_store, - fs, - _subscriptions: subscriptions, - } - }) - } - - fn register_session( - &mut self, - thread_handle: Entity, - cx: &mut Context, - ) -> Entity { - let connection = Rc::new(NativeAgentConnection(cx.entity())); - - let thread = thread_handle.read(cx); - let session_id = thread.id().clone(); - let title = thread.title(); - let project = thread.project.clone(); - let action_log = thread.action_log.clone(); - let prompt_capabilities_rx = thread.prompt_capabilities_rx.clone(); - let acp_thread = cx.new(|cx| { - acp_thread::AcpThread::new( - title, - connection, - project.clone(), - action_log.clone(), - session_id.clone(), - prompt_capabilities_rx, - cx, - ) - }); - - let registry = LanguageModelRegistry::read_global(cx); - let summarization_model = registry.thread_summary_model().map(|c| c.model); - - thread_handle.update(cx, |thread, cx| { - thread.set_summarization_model(summarization_model, cx); - thread.add_default_tools( - Rc::new(AcpThreadEnvironment { - acp_thread: acp_thread.downgrade(), - }) as _, - cx, - ) - }); - - let subscriptions = vec![ - cx.observe_release(&acp_thread, |this, acp_thread, _cx| { - this.sessions.remove(acp_thread.session_id()); - }), - cx.subscribe(&thread_handle, Self::handle_thread_title_updated), - cx.subscribe(&thread_handle, Self::handle_thread_token_usage_updated), - cx.observe(&thread_handle, move |this, thread, cx| { - this.save_thread(thread, cx) - }), - ]; - - self.sessions.insert( - session_id, - Session { - thread: thread_handle, - acp_thread: acp_thread.downgrade(), - _subscriptions: subscriptions, - pending_save: Task::ready(()), - }, - ); - acp_thread - } - - pub fn models(&self) -> &LanguageModels { - &self.models - } - - async fn maintain_project_context( - this: WeakEntity, - mut needs_refresh: watch::Receiver<()>, - cx: &mut AsyncApp, - ) -> Result<()> { - while needs_refresh.changed().await.is_ok() { - let project_context = this - .update(cx, |this, cx| { - Self::build_project_context(&this.project, this.prompt_store.as_ref(), cx) - })? - .await; - this.update(cx, |this, cx| { - this.project_context = cx.new(|_| project_context); - })?; - } - - Ok(()) - } - - fn build_project_context( - project: &Entity, - prompt_store: Option<&Entity>, - cx: &mut App, - ) -> Task { - let worktrees = project.read(cx).visible_worktrees(cx).collect::>(); - let worktree_tasks = worktrees - .into_iter() - .map(|worktree| { - Self::load_worktree_info_for_system_prompt(worktree, project.clone(), cx) - }) - .collect::>(); - let default_user_rules_task = if let Some(prompt_store) = prompt_store.as_ref() { - prompt_store.read_with(cx, |prompt_store, cx| { - let prompts = prompt_store.default_prompt_metadata(); - let load_tasks = prompts.into_iter().map(|prompt_metadata| { - let contents = prompt_store.load(prompt_metadata.id, cx); - async move { (contents.await, prompt_metadata) } - }); - cx.background_spawn(future::join_all(load_tasks)) - }) - } else { - Task::ready(vec![]) - }; - - cx.spawn(async move |_cx| { - let (worktrees, default_user_rules) = - future::join(future::join_all(worktree_tasks), default_user_rules_task).await; - - let worktrees = worktrees - .into_iter() - .map(|(worktree, _rules_error)| { - // TODO: show error message - // if let Some(rules_error) = rules_error { - // this.update(cx, |_, cx| cx.emit(rules_error)).ok(); - // } - worktree - }) - .collect::>(); - - let default_user_rules = default_user_rules - .into_iter() - .flat_map(|(contents, prompt_metadata)| match contents { - Ok(contents) => Some(UserRulesContext { - uuid: match prompt_metadata.id { - PromptId::User { uuid } => uuid, - PromptId::EditWorkflow => return None, - }, - title: prompt_metadata.title.map(|title| title.to_string()), - contents, - }), - Err(_err) => { - // TODO: show error message - // this.update(cx, |_, cx| { - // cx.emit(RulesLoadingError { - // message: format!("{err:?}").into(), - // }); - // }) - // .ok(); - None - } - }) - .collect::>(); - - ProjectContext::new(worktrees, default_user_rules) - }) - } - - fn load_worktree_info_for_system_prompt( - worktree: Entity, - project: Entity, - cx: &mut App, - ) -> Task<(WorktreeContext, Option)> { - let tree = worktree.read(cx); - let root_name = tree.root_name_str().into(); - let abs_path = tree.abs_path(); - - let mut context = WorktreeContext { - root_name, - abs_path, - rules_file: None, - }; - - let rules_task = Self::load_worktree_rules_file(worktree, project, cx); - let Some(rules_task) = rules_task else { - return Task::ready((context, None)); - }; - - cx.spawn(async move |_| { - let (rules_file, rules_file_error) = match rules_task.await { - Ok(rules_file) => (Some(rules_file), None), - Err(err) => ( - None, - Some(RulesLoadingError { - message: format!("{err}").into(), - }), - ), - }; - context.rules_file = rules_file; - (context, rules_file_error) - }) - } - - fn load_worktree_rules_file( - worktree: Entity, - project: Entity, - cx: &mut App, - ) -> Option>> { - let worktree = worktree.read(cx); - let worktree_id = worktree.id(); - let selected_rules_file = RULES_FILE_NAMES - .into_iter() - .filter_map(|name| { - worktree - .entry_for_path(RelPath::unix(name).unwrap()) - .filter(|entry| entry.is_file()) - .map(|entry| entry.path.clone()) - }) - .next(); - - // Note that Cline supports `.clinerules` being a directory, but that is not currently - // supported. This doesn't seem to occur often in GitHub repositories. - selected_rules_file.map(|path_in_worktree| { - let project_path = ProjectPath { - worktree_id, - path: path_in_worktree.clone(), - }; - let buffer_task = - project.update(cx, |project, cx| project.open_buffer(project_path, cx)); - let rope_task = cx.spawn(async move |cx| { - buffer_task.await?.read_with(cx, |buffer, cx| { - let project_entry_id = buffer.entry_id(cx).context("buffer has no file")?; - anyhow::Ok((project_entry_id, buffer.as_rope().clone())) - })? - }); - // Build a string from the rope on a background thread. - cx.background_spawn(async move { - let (project_entry_id, rope) = rope_task.await?; - anyhow::Ok(RulesFileContext { - path_in_worktree, - text: rope.to_string().trim().to_string(), - project_entry_id: project_entry_id.to_usize(), - }) - }) - }) - } - - fn handle_thread_title_updated( - &mut self, - thread: Entity, - _: &TitleUpdated, - cx: &mut Context, - ) { - let session_id = thread.read(cx).id(); - let Some(session) = self.sessions.get(session_id) else { - return; - }; - let thread = thread.downgrade(); - let acp_thread = session.acp_thread.clone(); - cx.spawn(async move |_, cx| { - let title = thread.read_with(cx, |thread, _| thread.title())?; - let task = acp_thread.update(cx, |acp_thread, cx| acp_thread.set_title(title, cx))?; - task.await - }) - .detach_and_log_err(cx); - } - - fn handle_thread_token_usage_updated( - &mut self, - thread: Entity, - usage: &TokenUsageUpdated, - cx: &mut Context, - ) { - let Some(session) = self.sessions.get(thread.read(cx).id()) else { - return; - }; - session - .acp_thread - .update(cx, |acp_thread, cx| { - acp_thread.update_token_usage(usage.0.clone(), cx); - }) - .ok(); - } - - fn handle_project_event( - &mut self, - _project: Entity, - event: &project::Event, - _cx: &mut Context, - ) { - match event { - project::Event::WorktreeAdded(_) | project::Event::WorktreeRemoved(_) => { - self.project_context_needs_refresh.send(()).ok(); - } - project::Event::WorktreeUpdatedEntries(_, items) => { - if items.iter().any(|(path, _, _)| { - RULES_FILE_NAMES - .iter() - .any(|name| path.as_ref() == RelPath::unix(name).unwrap()) - }) { - self.project_context_needs_refresh.send(()).ok(); - } - } - _ => {} - } - } - - fn handle_prompts_updated_event( - &mut self, - _prompt_store: Entity, - _event: &prompt_store::PromptsUpdatedEvent, - _cx: &mut Context, - ) { - self.project_context_needs_refresh.send(()).ok(); - } - - fn handle_models_updated_event( - &mut self, - _registry: Entity, - _event: &language_model::Event, - cx: &mut Context, - ) { - self.models.refresh_list(cx); - - let registry = LanguageModelRegistry::read_global(cx); - let default_model = registry.default_model().map(|m| m.model); - let summarization_model = registry.thread_summary_model().map(|m| m.model); - - for session in self.sessions.values_mut() { - session.thread.update(cx, |thread, cx| { - if thread.model().is_none() - && let Some(model) = default_model.clone() - { - thread.set_model(model, cx); - cx.notify(); - } - thread.set_summarization_model(summarization_model.clone(), cx); - }); - } - } - - pub fn open_thread( - &mut self, - id: acp::SessionId, - cx: &mut Context, - ) -> Task>> { - let database_future = ThreadsDatabase::connect(cx); - cx.spawn(async move |this, cx| { - let database = database_future.await.map_err(|err| anyhow!(err))?; - let db_thread = database - .load_thread(id.clone()) - .await? - .with_context(|| format!("no thread found with ID: {id:?}"))?; - - let thread = this.update(cx, |this, cx| { - let action_log = cx.new(|_cx| ActionLog::new(this.project.clone())); - cx.new(|cx| { - Thread::from_db( - id.clone(), - db_thread, - this.project.clone(), - this.project_context.clone(), - this.context_server_registry.clone(), - action_log.clone(), - this.templates.clone(), - cx, - ) - }) - })?; - let acp_thread = - this.update(cx, |this, cx| this.register_session(thread.clone(), cx))?; - let events = thread.update(cx, |thread, cx| thread.replay(cx))?; - cx.update(|cx| { - NativeAgentConnection::handle_thread_events(events, acp_thread.downgrade(), cx) - })? - .await?; - Ok(acp_thread) - }) - } - - pub fn thread_summary( - &mut self, - id: acp::SessionId, - cx: &mut Context, - ) -> Task> { - let thread = self.open_thread(id.clone(), cx); - cx.spawn(async move |this, cx| { - let acp_thread = thread.await?; - let result = this - .update(cx, |this, cx| { - this.sessions - .get(&id) - .unwrap() - .thread - .update(cx, |thread, cx| thread.summary(cx)) - })? - .await?; - drop(acp_thread); - Ok(result) - }) - } - - fn save_thread(&mut self, thread: Entity, cx: &mut Context) { - if thread.read(cx).is_empty() { - return; - } - - let database_future = ThreadsDatabase::connect(cx); - let (id, db_thread) = - thread.update(cx, |thread, cx| (thread.id().clone(), thread.to_db(cx))); - let Some(session) = self.sessions.get_mut(&id) else { - return; - }; - let history = self.history.clone(); - session.pending_save = cx.spawn(async move |_, cx| { - let Some(database) = database_future.await.map_err(|err| anyhow!(err)).log_err() else { - return; - }; - let db_thread = db_thread.await; - database.save_thread(id, db_thread).await.log_err(); - history.update(cx, |history, cx| history.reload(cx)).ok(); - }); - } -} - -/// Wrapper struct that implements the AgentConnection trait -#[derive(Clone)] -pub struct NativeAgentConnection(pub Entity); - -impl NativeAgentConnection { - pub fn thread(&self, session_id: &acp::SessionId, cx: &App) -> Option> { - self.0 - .read(cx) - .sessions - .get(session_id) - .map(|session| session.thread.clone()) - } - - fn run_turn( - &self, - session_id: acp::SessionId, - cx: &mut App, - f: impl 'static - + FnOnce(Entity, &mut App) -> Result>>, - ) -> Task> { - let Some((thread, acp_thread)) = self.0.update(cx, |agent, _cx| { - agent - .sessions - .get_mut(&session_id) - .map(|s| (s.thread.clone(), s.acp_thread.clone())) - }) else { - return Task::ready(Err(anyhow!("Session not found"))); - }; - log::debug!("Found session for: {}", session_id); - - let response_stream = match f(thread, cx) { - Ok(stream) => stream, - Err(err) => return Task::ready(Err(err)), - }; - Self::handle_thread_events(response_stream, acp_thread, cx) - } - - fn handle_thread_events( - mut events: mpsc::UnboundedReceiver>, - acp_thread: WeakEntity, - cx: &App, - ) -> Task> { - cx.spawn(async move |cx| { - // Handle response stream and forward to session.acp_thread - while let Some(result) = events.next().await { - match result { - Ok(event) => { - log::trace!("Received completion event: {:?}", event); - - match event { - ThreadEvent::UserMessage(message) => { - acp_thread.update(cx, |thread, cx| { - for content in message.content { - thread.push_user_content_block( - Some(message.id.clone()), - content.into(), - cx, - ); - } - })?; - } - ThreadEvent::AgentText(text) => { - acp_thread.update(cx, |thread, cx| { - thread.push_assistant_content_block( - acp::ContentBlock::Text(acp::TextContent { - text, - annotations: None, - meta: None, - }), - false, - cx, - ) - })?; - } - ThreadEvent::AgentThinking(text) => { - acp_thread.update(cx, |thread, cx| { - thread.push_assistant_content_block( - acp::ContentBlock::Text(acp::TextContent { - text, - annotations: None, - meta: None, - }), - true, - cx, - ) - })?; - } - ThreadEvent::ToolCallAuthorization(ToolCallAuthorization { - tool_call, - options, - response, - }) => { - let outcome_task = acp_thread.update(cx, |thread, cx| { - thread.request_tool_call_authorization( - tool_call, options, true, cx, - ) - })??; - cx.background_spawn(async move { - if let acp::RequestPermissionOutcome::Selected { option_id } = - outcome_task.await - { - response - .send(option_id) - .map(|_| anyhow!("authorization receiver was dropped")) - .log_err(); - } - }) - .detach(); - } - ThreadEvent::ToolCall(tool_call) => { - acp_thread.update(cx, |thread, cx| { - thread.upsert_tool_call(tool_call, cx) - })??; - } - ThreadEvent::ToolCallUpdate(update) => { - acp_thread.update(cx, |thread, cx| { - thread.update_tool_call(update, cx) - })??; - } - ThreadEvent::Retry(status) => { - acp_thread.update(cx, |thread, cx| { - thread.update_retry_status(status, cx) - })?; - } - ThreadEvent::Stop(stop_reason) => { - log::debug!("Assistant message complete: {:?}", stop_reason); - return Ok(acp::PromptResponse { - stop_reason, - meta: None, - }); - } - } - } - Err(e) => { - log::error!("Error in model response stream: {:?}", e); - return Err(e); - } - } - } - - log::debug!("Response stream completed"); - anyhow::Ok(acp::PromptResponse { - stop_reason: acp::StopReason::EndTurn, - meta: None, - }) - }) - } -} - -struct NativeAgentModelSelector { - session_id: acp::SessionId, - connection: NativeAgentConnection, -} - -impl acp_thread::AgentModelSelector for NativeAgentModelSelector { - fn list_models(&self, cx: &mut App) -> Task> { - log::debug!("NativeAgentConnection::list_models called"); - let list = self.connection.0.read(cx).models.model_list.clone(); - Task::ready(if list.is_empty() { - Err(anyhow::anyhow!("No models available")) - } else { - Ok(list) - }) - } - - fn select_model(&self, model_id: acp::ModelId, cx: &mut App) -> Task> { - log::debug!( - "Setting model for session {}: {}", - self.session_id, - model_id - ); - let Some(thread) = self - .connection - .0 - .read(cx) - .sessions - .get(&self.session_id) - .map(|session| session.thread.clone()) - else { - return Task::ready(Err(anyhow!("Session not found"))); - }; - - let Some(model) = self.connection.0.read(cx).models.model_from_id(&model_id) else { - return Task::ready(Err(anyhow!("Invalid model ID {}", model_id))); - }; - - thread.update(cx, |thread, cx| { - thread.set_model(model.clone(), cx); - }); - - update_settings_file( - self.connection.0.read(cx).fs.clone(), - cx, - move |settings, _cx| { - let provider = model.provider_id().0.to_string(); - let model = model.id().0.to_string(); - settings - .agent - .get_or_insert_default() - .set_model(LanguageModelSelection { - provider: provider.into(), - model, - }); - }, - ); - - Task::ready(Ok(())) - } - - fn selected_model(&self, cx: &mut App) -> Task> { - let Some(thread) = self - .connection - .0 - .read(cx) - .sessions - .get(&self.session_id) - .map(|session| session.thread.clone()) - else { - return Task::ready(Err(anyhow!("Session not found"))); - }; - let Some(model) = thread.read(cx).model() else { - return Task::ready(Err(anyhow!("Model not found"))); - }; - let Some(provider) = LanguageModelRegistry::read_global(cx).provider(&model.provider_id()) - else { - return Task::ready(Err(anyhow!("Provider not found"))); - }; - Task::ready(Ok(LanguageModels::map_language_model_to_info( - model, &provider, - ))) - } - - fn watch(&self, cx: &mut App) -> Option> { - Some(self.connection.0.read(cx).models.watch()) - } -} - -impl acp_thread::AgentConnection for NativeAgentConnection { - fn new_thread( - self: Rc, - project: Entity, - cwd: &Path, - cx: &mut App, - ) -> Task>> { - let agent = self.0.clone(); - log::debug!("Creating new thread for project at: {:?}", cwd); - - cx.spawn(async move |cx| { - log::debug!("Starting thread creation in async context"); - - // Create Thread - let thread = agent.update( - cx, - |agent, cx: &mut gpui::Context| -> Result<_> { - // Fetch default model from registry settings - let registry = LanguageModelRegistry::read_global(cx); - // Log available models for debugging - let available_count = registry.available_models(cx).count(); - log::debug!("Total available models: {}", available_count); - - let default_model = registry.default_model().and_then(|default_model| { - agent - .models - .model_from_id(&LanguageModels::model_id(&default_model.model)) - }); - Ok(cx.new(|cx| { - Thread::new( - project.clone(), - agent.project_context.clone(), - agent.context_server_registry.clone(), - agent.templates.clone(), - default_model, - cx, - ) - })) - }, - )??; - agent.update(cx, |agent, cx| agent.register_session(thread, cx)) - }) - } - - fn auth_methods(&self) -> &[acp::AuthMethod] { - &[] // No auth for in-process - } - - fn authenticate(&self, _method: acp::AuthMethodId, _cx: &mut App) -> Task> { - Task::ready(Ok(())) - } - - fn model_selector(&self, session_id: &acp::SessionId) -> Option> { - Some(Rc::new(NativeAgentModelSelector { - session_id: session_id.clone(), - connection: self.clone(), - }) as Rc) - } - - fn prompt( - &self, - id: Option, - params: acp::PromptRequest, - cx: &mut App, - ) -> Task> { - let id = id.expect("UserMessageId is required"); - let session_id = params.session_id.clone(); - log::info!("Received prompt request for session: {}", session_id); - log::debug!("Prompt blocks count: {}", params.prompt.len()); - - self.run_turn(session_id, cx, |thread, cx| { - let content: Vec = params - .prompt - .into_iter() - .map(Into::into) - .collect::>(); - log::debug!("Converted prompt to message: {} chars", content.len()); - log::debug!("Message id: {:?}", id); - log::debug!("Message content: {:?}", content); - - thread.update(cx, |thread, cx| thread.send(id, content, cx)) - }) - } - - fn resume( - &self, - session_id: &acp::SessionId, - _cx: &App, - ) -> Option> { - Some(Rc::new(NativeAgentSessionResume { - connection: self.clone(), - session_id: session_id.clone(), - }) as _) - } - - fn cancel(&self, session_id: &acp::SessionId, cx: &mut App) { - log::info!("Cancelling on session: {}", session_id); - self.0.update(cx, |agent, cx| { - if let Some(agent) = agent.sessions.get(session_id) { - agent.thread.update(cx, |thread, cx| thread.cancel(cx)); - } - }); - } - - fn truncate( - &self, - session_id: &agent_client_protocol::SessionId, - cx: &App, - ) -> Option> { - self.0.read_with(cx, |agent, _cx| { - agent.sessions.get(session_id).map(|session| { - Rc::new(NativeAgentSessionTruncate { - thread: session.thread.clone(), - acp_thread: session.acp_thread.clone(), - }) as _ - }) - }) - } - - fn set_title( - &self, - session_id: &acp::SessionId, - _cx: &App, - ) -> Option> { - Some(Rc::new(NativeAgentSessionSetTitle { - connection: self.clone(), - session_id: session_id.clone(), - }) as _) - } - - fn telemetry(&self) -> Option> { - Some(Rc::new(self.clone()) as Rc) - } - - fn into_any(self: Rc) -> Rc { - self - } -} - -impl acp_thread::AgentTelemetry for NativeAgentConnection { - fn agent_name(&self) -> String { - "Zed".into() - } - - fn thread_data( - &self, - session_id: &acp::SessionId, - cx: &mut App, - ) -> Task> { - let Some(session) = self.0.read(cx).sessions.get(session_id) else { - return Task::ready(Err(anyhow!("Session not found"))); - }; - - let task = session.thread.read(cx).to_db(cx); - cx.background_spawn(async move { - serde_json::to_value(task.await).context("Failed to serialize thread") - }) - } -} - -struct NativeAgentSessionTruncate { - thread: Entity, - acp_thread: WeakEntity, -} - -impl acp_thread::AgentSessionTruncate for NativeAgentSessionTruncate { - fn run(&self, message_id: acp_thread::UserMessageId, cx: &mut App) -> Task> { - match self.thread.update(cx, |thread, cx| { - thread.truncate(message_id.clone(), cx)?; - Ok(thread.latest_token_usage()) - }) { - Ok(usage) => { - self.acp_thread - .update(cx, |thread, cx| { - thread.update_token_usage(usage, cx); - }) - .ok(); - Task::ready(Ok(())) - } - Err(error) => Task::ready(Err(error)), - } - } -} - -struct NativeAgentSessionResume { - connection: NativeAgentConnection, - session_id: acp::SessionId, -} - -impl acp_thread::AgentSessionResume for NativeAgentSessionResume { - fn run(&self, cx: &mut App) -> Task> { - self.connection - .run_turn(self.session_id.clone(), cx, |thread, cx| { - thread.update(cx, |thread, cx| thread.resume(cx)) - }) - } -} - -struct NativeAgentSessionSetTitle { - connection: NativeAgentConnection, - session_id: acp::SessionId, -} - -impl acp_thread::AgentSessionSetTitle for NativeAgentSessionSetTitle { - fn run(&self, title: SharedString, cx: &mut App) -> Task> { - let Some(session) = self.connection.0.read(cx).sessions.get(&self.session_id) else { - return Task::ready(Err(anyhow!("session not found"))); - }; - let thread = session.thread.clone(); - thread.update(cx, |thread, cx| thread.set_title(title, cx)); - Task::ready(Ok(())) - } -} - -pub struct AcpThreadEnvironment { - acp_thread: WeakEntity, -} - -impl ThreadEnvironment for AcpThreadEnvironment { - fn create_terminal( - &self, - command: String, - cwd: Option, - output_byte_limit: Option, - cx: &mut AsyncApp, - ) -> Task>> { - let task = self.acp_thread.update(cx, |thread, cx| { - thread.create_terminal(command, vec![], vec![], cwd, output_byte_limit, cx) - }); - - let acp_thread = self.acp_thread.clone(); - cx.spawn(async move |cx| { - let terminal = task?.await?; - - let (drop_tx, drop_rx) = oneshot::channel(); - let terminal_id = terminal.read_with(cx, |terminal, _cx| terminal.id().clone())?; - - cx.spawn(async move |cx| { - drop_rx.await.ok(); - acp_thread.update(cx, |thread, cx| thread.release_terminal(terminal_id, cx)) - }) - .detach(); - - let handle = AcpTerminalHandle { - terminal, - _drop_tx: Some(drop_tx), - }; - - Ok(Rc::new(handle) as _) - }) - } -} - -pub struct AcpTerminalHandle { - terminal: Entity, - _drop_tx: Option>, -} - -impl TerminalHandle for AcpTerminalHandle { - fn id(&self, cx: &AsyncApp) -> Result { - self.terminal.read_with(cx, |term, _cx| term.id().clone()) - } - - fn wait_for_exit(&self, cx: &AsyncApp) -> Result>> { - self.terminal - .read_with(cx, |term, _cx| term.wait_for_exit()) - } - - fn current_output(&self, cx: &AsyncApp) -> Result { - self.terminal - .read_with(cx, |term, cx| term.current_output(cx)) - } -} - -#[cfg(test)] -mod tests { - use crate::HistoryEntryId; - - use super::*; - use acp_thread::{AgentConnection, AgentModelGroupName, AgentModelInfo, MentionUri}; - use fs::FakeFs; - use gpui::TestAppContext; - use indoc::formatdoc; - use language_model::fake_provider::FakeLanguageModel; - use serde_json::json; - use settings::SettingsStore; - use util::{path, rel_path::rel_path}; - - #[gpui::test] - async fn test_maintaining_project_context(cx: &mut TestAppContext) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - "/", - json!({ - "a": {} - }), - ) - .await; - let project = Project::test(fs.clone(), [], cx).await; - let context_store = cx.new(|cx| assistant_context::ContextStore::fake(project.clone(), cx)); - let history_store = cx.new(|cx| HistoryStore::new(context_store, cx)); - let agent = NativeAgent::new( - project.clone(), - history_store, - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); - agent.read_with(cx, |agent, cx| { - assert_eq!(agent.project_context.read(cx).worktrees, vec![]) - }); - - let worktree = project - .update(cx, |project, cx| project.create_worktree("/a", true, cx)) - .await - .unwrap(); - cx.run_until_parked(); - agent.read_with(cx, |agent, cx| { - assert_eq!( - agent.project_context.read(cx).worktrees, - vec![WorktreeContext { - root_name: "a".into(), - abs_path: Path::new("/a").into(), - rules_file: None - }] - ) - }); - - // Creating `/a/.rules` updates the project context. - fs.insert_file("/a/.rules", Vec::new()).await; - cx.run_until_parked(); - agent.read_with(cx, |agent, cx| { - let rules_entry = worktree - .read(cx) - .entry_for_path(rel_path(".rules")) - .unwrap(); - assert_eq!( - agent.project_context.read(cx).worktrees, - vec![WorktreeContext { - root_name: "a".into(), - abs_path: Path::new("/a").into(), - rules_file: Some(RulesFileContext { - path_in_worktree: rel_path(".rules").into(), - text: "".into(), - project_entry_id: rules_entry.id.to_usize() - }) - }] - ) - }); - } - - #[gpui::test] - async fn test_listing_models(cx: &mut TestAppContext) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - fs.insert_tree("/", json!({ "a": {} })).await; - let project = Project::test(fs.clone(), [], cx).await; - let context_store = cx.new(|cx| assistant_context::ContextStore::fake(project.clone(), cx)); - let history_store = cx.new(|cx| HistoryStore::new(context_store, cx)); - let connection = NativeAgentConnection( - NativeAgent::new( - project.clone(), - history_store, - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(), - ); - - // Create a thread/session - let acp_thread = cx - .update(|cx| { - Rc::new(connection.clone()).new_thread(project.clone(), Path::new("/a"), cx) - }) - .await - .unwrap(); - - let session_id = cx.update(|cx| acp_thread.read(cx).session_id().clone()); - - let models = cx - .update(|cx| { - connection - .model_selector(&session_id) - .unwrap() - .list_models(cx) - }) - .await - .unwrap(); - - let acp_thread::AgentModelList::Grouped(models) = models else { - panic!("Unexpected model group"); - }; - assert_eq!( - models, - IndexMap::from_iter([( - AgentModelGroupName("Fake".into()), - vec![AgentModelInfo { - id: acp::ModelId("fake/fake".into()), - name: "Fake".into(), - description: None, - icon: Some(ui::IconName::ZedAssistant), - }] - )]) - ); - } - - #[gpui::test] - async fn test_model_selection_persists_to_settings(cx: &mut TestAppContext) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - fs.create_dir(paths::settings_file().parent().unwrap()) - .await - .unwrap(); - fs.insert_file( - paths::settings_file(), - json!({ - "agent": { - "default_model": { - "provider": "foo", - "model": "bar" - } - } - }) - .to_string() - .into_bytes(), - ) - .await; - let project = Project::test(fs.clone(), [], cx).await; - - let context_store = cx.new(|cx| assistant_context::ContextStore::fake(project.clone(), cx)); - let history_store = cx.new(|cx| HistoryStore::new(context_store, cx)); - - // Create the agent and connection - let agent = NativeAgent::new( - project.clone(), - history_store, - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); - let connection = NativeAgentConnection(agent.clone()); - - // Create a thread/session - let acp_thread = cx - .update(|cx| { - Rc::new(connection.clone()).new_thread(project.clone(), Path::new("/a"), cx) - }) - .await - .unwrap(); - - let session_id = cx.update(|cx| acp_thread.read(cx).session_id().clone()); - - // Select a model - let selector = connection.model_selector(&session_id).unwrap(); - let model_id = acp::ModelId("fake/fake".into()); - cx.update(|cx| selector.select_model(model_id.clone(), cx)) - .await - .unwrap(); - - // Verify the thread has the selected model - agent.read_with(cx, |agent, _| { - let session = agent.sessions.get(&session_id).unwrap(); - session.thread.read_with(cx, |thread, _| { - assert_eq!(thread.model().unwrap().id().0, "fake"); - }); - }); - - cx.run_until_parked(); - - // Verify settings file was updated - let settings_content = fs.load(paths::settings_file()).await.unwrap(); - let settings_json: serde_json::Value = serde_json::from_str(&settings_content).unwrap(); - - // Check that the agent settings contain the selected model - assert_eq!( - settings_json["agent"]["default_model"]["model"], - json!("fake") - ); - assert_eq!( - settings_json["agent"]["default_model"]["provider"], - json!("fake") - ); - } - - #[gpui::test] - async fn test_save_load_thread(cx: &mut TestAppContext) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - "/", - json!({ - "a": { - "b.md": "Lorem" - } - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; - let context_store = cx.new(|cx| assistant_context::ContextStore::fake(project.clone(), cx)); - let history_store = cx.new(|cx| HistoryStore::new(context_store, cx)); - let agent = NativeAgent::new( - project.clone(), - history_store.clone(), - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); - let connection = Rc::new(NativeAgentConnection(agent.clone())); - - let acp_thread = cx - .update(|cx| { - connection - .clone() - .new_thread(project.clone(), Path::new(""), cx) - }) - .await - .unwrap(); - let session_id = acp_thread.read_with(cx, |thread, _| thread.session_id().clone()); - let thread = agent.read_with(cx, |agent, _| { - agent.sessions.get(&session_id).unwrap().thread.clone() - }); - - // Ensure empty threads are not saved, even if they get mutated. - let model = Arc::new(FakeLanguageModel::default()); - let summary_model = Arc::new(FakeLanguageModel::default()); - thread.update(cx, |thread, cx| { - thread.set_model(model.clone(), cx); - thread.set_summarization_model(Some(summary_model.clone()), cx); - }); - cx.run_until_parked(); - assert_eq!(history_entries(&history_store, cx), vec![]); - - let send = acp_thread.update(cx, |thread, cx| { - thread.send( - vec![ - "What does ".into(), - acp::ContentBlock::ResourceLink(acp::ResourceLink { - name: "b.md".into(), - uri: MentionUri::File { - abs_path: path!("/a/b.md").into(), - } - .to_uri() - .to_string(), - annotations: None, - description: None, - mime_type: None, - size: None, - title: None, - meta: None, - }), - " mean?".into(), - ], - cx, - ) - }); - let send = cx.foreground_executor().spawn(send); - cx.run_until_parked(); - - model.send_last_completion_stream_text_chunk("Lorem."); - model.end_last_completion_stream(); - cx.run_until_parked(); - summary_model - .send_last_completion_stream_text_chunk(&format!("Explaining {}", path!("/a/b.md"))); - summary_model.end_last_completion_stream(); - - send.await.unwrap(); - let uri = MentionUri::File { - abs_path: path!("/a/b.md").into(), - } - .to_uri(); - acp_thread.read_with(cx, |thread, cx| { - assert_eq!( - thread.to_markdown(cx), - formatdoc! {" - ## User - - What does [@b.md]({uri}) mean? - - ## Assistant - - Lorem. - - "} - ) - }); - - cx.run_until_parked(); - - // Drop the ACP thread, which should cause the session to be dropped as well. - cx.update(|_| { - drop(thread); - drop(acp_thread); - }); - agent.read_with(cx, |agent, _| { - assert_eq!(agent.sessions.keys().cloned().collect::>(), []); - }); - - // Ensure the thread can be reloaded from disk. - assert_eq!( - history_entries(&history_store, cx), - vec![( - HistoryEntryId::AcpThread(session_id.clone()), - format!("Explaining {}", path!("/a/b.md")) - )] - ); - let acp_thread = agent - .update(cx, |agent, cx| agent.open_thread(session_id.clone(), cx)) - .await - .unwrap(); - acp_thread.read_with(cx, |thread, cx| { - assert_eq!( - thread.to_markdown(cx), - formatdoc! {" - ## User - - What does [@b.md]({uri}) mean? - - ## Assistant - - Lorem. - - "} - ) - }); - } - - fn history_entries( - history: &Entity, - cx: &mut TestAppContext, - ) -> Vec<(HistoryEntryId, String)> { - history.read_with(cx, |history, _| { - history - .entries() - .map(|e| (e.id(), e.title().to_string())) - .collect::>() - }) - } - - fn init_test(cx: &mut TestAppContext) { - env_logger::try_init().ok(); - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - Project::init_settings(cx); - agent_settings::init(cx); - language::init(cx); - LanguageModelRegistry::test(cx); - }); - } -} diff --git a/crates/agent2/src/agent2.rs b/crates/agent2/src/agent2.rs deleted file mode 100644 index 1fc9c1cb956d1676c42713b5d9bb2a0b51e8ac90..0000000000000000000000000000000000000000 --- a/crates/agent2/src/agent2.rs +++ /dev/null @@ -1,19 +0,0 @@ -mod agent; -mod db; -mod history_store; -mod native_agent_server; -mod templates; -mod thread; -mod tool_schema; -mod tools; - -#[cfg(test)] -mod tests; - -pub use agent::*; -pub use db::*; -pub use history_store::*; -pub use native_agent_server::NativeAgentServer; -pub use templates::*; -pub use thread::*; -pub use tools::*; diff --git a/crates/agent2/src/history_store.rs b/crates/agent2/src/history_store.rs deleted file mode 100644 index ff6caacc78e5dba4ee38f160fa6ded7fcb45a845..0000000000000000000000000000000000000000 --- a/crates/agent2/src/history_store.rs +++ /dev/null @@ -1,357 +0,0 @@ -use crate::{DbThreadMetadata, ThreadsDatabase}; -use acp_thread::MentionUri; -use agent_client_protocol as acp; -use anyhow::{Context as _, Result, anyhow}; -use assistant_context::{AssistantContext, SavedContextMetadata}; -use chrono::{DateTime, Utc}; -use db::kvp::KEY_VALUE_STORE; -use gpui::{App, AsyncApp, Entity, SharedString, Task, prelude::*}; -use itertools::Itertools; -use paths::contexts_dir; -use serde::{Deserialize, Serialize}; -use std::{collections::VecDeque, path::Path, sync::Arc, time::Duration}; -use ui::ElementId; -use util::ResultExt as _; - -const MAX_RECENTLY_OPENED_ENTRIES: usize = 6; -const RECENTLY_OPENED_THREADS_KEY: &str = "recent-agent-threads"; -const SAVE_RECENTLY_OPENED_ENTRIES_DEBOUNCE: Duration = Duration::from_millis(50); - -const DEFAULT_TITLE: &SharedString = &SharedString::new_static("New Thread"); - -#[derive(Clone, Debug)] -pub enum HistoryEntry { - AcpThread(DbThreadMetadata), - TextThread(SavedContextMetadata), -} - -impl HistoryEntry { - pub fn updated_at(&self) -> DateTime { - match self { - HistoryEntry::AcpThread(thread) => thread.updated_at, - HistoryEntry::TextThread(context) => context.mtime.to_utc(), - } - } - - pub fn id(&self) -> HistoryEntryId { - match self { - HistoryEntry::AcpThread(thread) => HistoryEntryId::AcpThread(thread.id.clone()), - HistoryEntry::TextThread(context) => HistoryEntryId::TextThread(context.path.clone()), - } - } - - pub fn mention_uri(&self) -> MentionUri { - match self { - HistoryEntry::AcpThread(thread) => MentionUri::Thread { - id: thread.id.clone(), - name: thread.title.to_string(), - }, - HistoryEntry::TextThread(context) => MentionUri::TextThread { - path: context.path.as_ref().to_owned(), - name: context.title.to_string(), - }, - } - } - - pub fn title(&self) -> &SharedString { - match self { - HistoryEntry::AcpThread(thread) if thread.title.is_empty() => DEFAULT_TITLE, - HistoryEntry::AcpThread(thread) => &thread.title, - HistoryEntry::TextThread(context) => &context.title, - } - } -} - -/// Generic identifier for a history entry. -#[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub enum HistoryEntryId { - AcpThread(acp::SessionId), - TextThread(Arc), -} - -impl Into for HistoryEntryId { - fn into(self) -> ElementId { - match self { - HistoryEntryId::AcpThread(session_id) => ElementId::Name(session_id.0.into()), - HistoryEntryId::TextThread(path) => ElementId::Path(path), - } - } -} - -#[derive(Serialize, Deserialize, Debug)] -enum SerializedRecentOpen { - AcpThread(String), - TextThread(String), -} - -pub struct HistoryStore { - threads: Vec, - entries: Vec, - context_store: Entity, - recently_opened_entries: VecDeque, - _subscriptions: Vec, - _save_recently_opened_entries_task: Task<()>, -} - -impl HistoryStore { - pub fn new( - context_store: Entity, - cx: &mut Context, - ) -> Self { - let subscriptions = vec![cx.observe(&context_store, |this, _, cx| this.update_entries(cx))]; - - cx.spawn(async move |this, cx| { - let entries = Self::load_recently_opened_entries(cx).await; - this.update(cx, |this, cx| { - if let Some(entries) = entries.log_err() { - this.recently_opened_entries = entries; - } - - this.reload(cx); - }) - .ok(); - }) - .detach(); - - Self { - context_store, - recently_opened_entries: VecDeque::default(), - threads: Vec::default(), - entries: Vec::default(), - _subscriptions: subscriptions, - _save_recently_opened_entries_task: Task::ready(()), - } - } - - pub fn thread_from_session_id(&self, session_id: &acp::SessionId) -> Option<&DbThreadMetadata> { - self.threads.iter().find(|thread| &thread.id == session_id) - } - - pub fn delete_thread( - &mut self, - id: acp::SessionId, - cx: &mut Context, - ) -> Task> { - let database_future = ThreadsDatabase::connect(cx); - cx.spawn(async move |this, cx| { - let database = database_future.await.map_err(|err| anyhow!(err))?; - database.delete_thread(id.clone()).await?; - this.update(cx, |this, cx| this.reload(cx)) - }) - } - - pub fn delete_text_thread( - &mut self, - path: Arc, - cx: &mut Context, - ) -> Task> { - self.context_store.update(cx, |context_store, cx| { - context_store.delete_local_context(path, cx) - }) - } - - pub fn load_text_thread( - &self, - path: Arc, - cx: &mut Context, - ) -> Task>> { - self.context_store.update(cx, |context_store, cx| { - context_store.open_local_context(path, cx) - }) - } - - pub fn reload(&self, cx: &mut Context) { - let database_future = ThreadsDatabase::connect(cx); - cx.spawn(async move |this, cx| { - let threads = database_future - .await - .map_err(|err| anyhow!(err))? - .list_threads() - .await?; - - this.update(cx, |this, cx| { - if this.recently_opened_entries.len() < MAX_RECENTLY_OPENED_ENTRIES { - for thread in threads - .iter() - .take(MAX_RECENTLY_OPENED_ENTRIES - this.recently_opened_entries.len()) - .rev() - { - this.push_recently_opened_entry( - HistoryEntryId::AcpThread(thread.id.clone()), - cx, - ) - } - } - this.threads = threads; - this.update_entries(cx); - }) - }) - .detach_and_log_err(cx); - } - - fn update_entries(&mut self, cx: &mut Context) { - #[cfg(debug_assertions)] - if std::env::var("ZED_SIMULATE_NO_THREAD_HISTORY").is_ok() { - return; - } - let mut history_entries = Vec::new(); - history_entries.extend(self.threads.iter().cloned().map(HistoryEntry::AcpThread)); - history_entries.extend( - self.context_store - .read(cx) - .unordered_contexts() - .cloned() - .map(HistoryEntry::TextThread), - ); - - history_entries.sort_unstable_by_key(|entry| std::cmp::Reverse(entry.updated_at())); - self.entries = history_entries; - cx.notify() - } - - pub fn is_empty(&self, _cx: &App) -> bool { - self.entries.is_empty() - } - - pub fn recently_opened_entries(&self, cx: &App) -> Vec { - #[cfg(debug_assertions)] - if std::env::var("ZED_SIMULATE_NO_THREAD_HISTORY").is_ok() { - return Vec::new(); - } - - let thread_entries = self.threads.iter().flat_map(|thread| { - self.recently_opened_entries - .iter() - .enumerate() - .flat_map(|(index, entry)| match entry { - HistoryEntryId::AcpThread(id) if &thread.id == id => { - Some((index, HistoryEntry::AcpThread(thread.clone()))) - } - _ => None, - }) - }); - - let context_entries = - self.context_store - .read(cx) - .unordered_contexts() - .flat_map(|context| { - self.recently_opened_entries - .iter() - .enumerate() - .flat_map(|(index, entry)| match entry { - HistoryEntryId::TextThread(path) if &context.path == path => { - Some((index, HistoryEntry::TextThread(context.clone()))) - } - _ => None, - }) - }); - - thread_entries - .chain(context_entries) - // optimization to halt iteration early - .take(self.recently_opened_entries.len()) - .sorted_unstable_by_key(|(index, _)| *index) - .map(|(_, entry)| entry) - .collect() - } - - fn save_recently_opened_entries(&mut self, cx: &mut Context) { - let serialized_entries = self - .recently_opened_entries - .iter() - .filter_map(|entry| match entry { - HistoryEntryId::TextThread(path) => path.file_name().map(|file| { - SerializedRecentOpen::TextThread(file.to_string_lossy().into_owned()) - }), - HistoryEntryId::AcpThread(id) => { - Some(SerializedRecentOpen::AcpThread(id.to_string())) - } - }) - .collect::>(); - - self._save_recently_opened_entries_task = cx.spawn(async move |_, cx| { - let content = serde_json::to_string(&serialized_entries).unwrap(); - cx.background_executor() - .timer(SAVE_RECENTLY_OPENED_ENTRIES_DEBOUNCE) - .await; - - if cfg!(any(feature = "test-support", test)) { - return; - } - KEY_VALUE_STORE - .write_kvp(RECENTLY_OPENED_THREADS_KEY.to_owned(), content) - .await - .log_err(); - }); - } - - fn load_recently_opened_entries(cx: &AsyncApp) -> Task>> { - cx.background_spawn(async move { - if cfg!(any(feature = "test-support", test)) { - anyhow::bail!("history store does not persist in tests"); - } - let json = KEY_VALUE_STORE - .read_kvp(RECENTLY_OPENED_THREADS_KEY)? - .unwrap_or("[]".to_string()); - let entries = serde_json::from_str::>(&json) - .context("deserializing persisted agent panel navigation history")? - .into_iter() - .take(MAX_RECENTLY_OPENED_ENTRIES) - .flat_map(|entry| match entry { - SerializedRecentOpen::AcpThread(id) => Some(HistoryEntryId::AcpThread( - acp::SessionId(id.as_str().into()), - )), - SerializedRecentOpen::TextThread(file_name) => Some( - HistoryEntryId::TextThread(contexts_dir().join(file_name).into()), - ), - }) - .collect(); - Ok(entries) - }) - } - - pub fn push_recently_opened_entry(&mut self, entry: HistoryEntryId, cx: &mut Context) { - self.recently_opened_entries - .retain(|old_entry| old_entry != &entry); - self.recently_opened_entries.push_front(entry); - self.recently_opened_entries - .truncate(MAX_RECENTLY_OPENED_ENTRIES); - self.save_recently_opened_entries(cx); - } - - pub fn remove_recently_opened_thread(&mut self, id: acp::SessionId, cx: &mut Context) { - self.recently_opened_entries.retain( - |entry| !matches!(entry, HistoryEntryId::AcpThread(thread_id) if thread_id == &id), - ); - self.save_recently_opened_entries(cx); - } - - pub fn replace_recently_opened_text_thread( - &mut self, - old_path: &Path, - new_path: &Arc, - cx: &mut Context, - ) { - for entry in &mut self.recently_opened_entries { - match entry { - HistoryEntryId::TextThread(path) if path.as_ref() == old_path => { - *entry = HistoryEntryId::TextThread(new_path.clone()); - break; - } - _ => {} - } - } - self.save_recently_opened_entries(cx); - } - - pub fn remove_recently_opened_entry(&mut self, entry: &HistoryEntryId, cx: &mut Context) { - self.recently_opened_entries - .retain(|old_entry| old_entry != entry); - self.save_recently_opened_entries(cx); - } - - pub fn entries(&self) -> impl Iterator { - self.entries.iter().cloned() - } -} diff --git a/crates/agent2/src/thread.rs b/crates/agent2/src/thread.rs deleted file mode 100644 index 756b868dcfc26239911d6e5c0cd8ad984cd7dc4e..0000000000000000000000000000000000000000 --- a/crates/agent2/src/thread.rs +++ /dev/null @@ -1,2663 +0,0 @@ -use crate::{ - ContextServerRegistry, CopyPathTool, CreateDirectoryTool, DbLanguageModel, DbThread, - DeletePathTool, DiagnosticsTool, EditFileTool, FetchTool, FindPathTool, GrepTool, - ListDirectoryTool, MovePathTool, NowTool, OpenTool, ReadFileTool, SystemPromptTemplate, - Template, Templates, TerminalTool, ThinkingTool, WebSearchTool, -}; -use acp_thread::{MentionUri, UserMessageId}; -use action_log::ActionLog; -use agent::thread::{GitState, ProjectSnapshot, WorktreeSnapshot}; -use agent_client_protocol as acp; -use agent_settings::{ - AgentProfileId, AgentProfileSettings, AgentSettings, CompletionMode, - SUMMARIZE_THREAD_DETAILED_PROMPT, SUMMARIZE_THREAD_PROMPT, -}; -use anyhow::{Context as _, Result, anyhow}; -use assistant_tool::adapt_schema_to_format; -use chrono::{DateTime, Utc}; -use client::{ModelRequestUsage, RequestUsage, UserStore}; -use cloud_llm_client::{CompletionIntent, CompletionRequestStatus, Plan, UsageLimit}; -use collections::{HashMap, HashSet, IndexMap}; -use fs::Fs; -use futures::stream; -use futures::{ - FutureExt, - channel::{mpsc, oneshot}, - future::Shared, - stream::FuturesUnordered, -}; -use git::repository::DiffType; -use gpui::{ - App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task, WeakEntity, -}; -use language_model::{ - LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelExt, - LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest, - LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult, - LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse, - LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage, ZED_CLOUD_PROVIDER_ID, -}; -use project::{ - Project, - git_store::{GitStore, RepositoryState}, -}; -use prompt_store::ProjectContext; -use schemars::{JsonSchema, Schema}; -use serde::{Deserialize, Serialize}; -use settings::{Settings, update_settings_file}; -use smol::stream::StreamExt; -use std::{ - collections::BTreeMap, - ops::RangeInclusive, - path::Path, - rc::Rc, - sync::Arc, - time::{Duration, Instant}, -}; -use std::{fmt::Write, path::PathBuf}; -use util::{ResultExt, debug_panic, markdown::MarkdownCodeBlock}; -use uuid::Uuid; - -const TOOL_CANCELED_MESSAGE: &str = "Tool canceled by user"; -pub const MAX_TOOL_NAME_LENGTH: usize = 64; - -/// The ID of the user prompt that initiated a request. -/// -/// This equates to the user physically submitting a message to the model (e.g., by pressing the Enter key). -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Serialize, Deserialize)] -pub struct PromptId(Arc); - -impl PromptId { - pub fn new() -> Self { - Self(Uuid::new_v4().to_string().into()) - } -} - -impl std::fmt::Display for PromptId { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.0) - } -} - -pub(crate) const MAX_RETRY_ATTEMPTS: u8 = 4; -pub(crate) const BASE_RETRY_DELAY: Duration = Duration::from_secs(5); - -#[derive(Debug, Clone)] -enum RetryStrategy { - ExponentialBackoff { - initial_delay: Duration, - max_attempts: u8, - }, - Fixed { - delay: Duration, - max_attempts: u8, - }, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub enum Message { - User(UserMessage), - Agent(AgentMessage), - Resume, -} - -impl Message { - pub fn as_agent_message(&self) -> Option<&AgentMessage> { - match self { - Message::Agent(agent_message) => Some(agent_message), - _ => None, - } - } - - pub fn to_request(&self) -> Vec { - match self { - Message::User(message) => vec![message.to_request()], - Message::Agent(message) => message.to_request(), - Message::Resume => vec![LanguageModelRequestMessage { - role: Role::User, - content: vec!["Continue where you left off".into()], - cache: false, - }], - } - } - - pub fn to_markdown(&self) -> String { - match self { - Message::User(message) => message.to_markdown(), - Message::Agent(message) => message.to_markdown(), - Message::Resume => "[resume]\n".into(), - } - } - - pub fn role(&self) -> Role { - match self { - Message::User(_) | Message::Resume => Role::User, - Message::Agent(_) => Role::Assistant, - } - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct UserMessage { - pub id: UserMessageId, - pub content: Vec, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub enum UserMessageContent { - Text(String), - Mention { uri: MentionUri, content: String }, - Image(LanguageModelImage), -} - -impl UserMessage { - pub fn to_markdown(&self) -> String { - let mut markdown = String::from("## User\n\n"); - - for content in &self.content { - match content { - UserMessageContent::Text(text) => { - markdown.push_str(text); - markdown.push('\n'); - } - UserMessageContent::Image(_) => { - markdown.push_str("\n"); - } - UserMessageContent::Mention { uri, content } => { - if !content.is_empty() { - let _ = writeln!(&mut markdown, "{}\n\n{}", uri.as_link(), content); - } else { - let _ = writeln!(&mut markdown, "{}", uri.as_link()); - } - } - } - } - - markdown - } - - fn to_request(&self) -> LanguageModelRequestMessage { - let mut message = LanguageModelRequestMessage { - role: Role::User, - content: Vec::with_capacity(self.content.len()), - cache: false, - }; - - const OPEN_CONTEXT: &str = "\n\ - The following items were attached by the user. \ - They are up-to-date and don't need to be re-read.\n\n"; - - const OPEN_FILES_TAG: &str = ""; - const OPEN_DIRECTORIES_TAG: &str = ""; - const OPEN_SYMBOLS_TAG: &str = ""; - const OPEN_SELECTIONS_TAG: &str = ""; - const OPEN_THREADS_TAG: &str = ""; - const OPEN_FETCH_TAG: &str = ""; - const OPEN_RULES_TAG: &str = - "\nThe user has specified the following rules that should be applied:\n"; - - let mut file_context = OPEN_FILES_TAG.to_string(); - let mut directory_context = OPEN_DIRECTORIES_TAG.to_string(); - let mut symbol_context = OPEN_SYMBOLS_TAG.to_string(); - let mut selection_context = OPEN_SELECTIONS_TAG.to_string(); - let mut thread_context = OPEN_THREADS_TAG.to_string(); - let mut fetch_context = OPEN_FETCH_TAG.to_string(); - let mut rules_context = OPEN_RULES_TAG.to_string(); - - for chunk in &self.content { - let chunk = match chunk { - UserMessageContent::Text(text) => { - language_model::MessageContent::Text(text.clone()) - } - UserMessageContent::Image(value) => { - language_model::MessageContent::Image(value.clone()) - } - UserMessageContent::Mention { uri, content } => { - match uri { - MentionUri::File { abs_path } => { - write!( - &mut file_context, - "\n{}", - MarkdownCodeBlock { - tag: &codeblock_tag(abs_path, None), - text: &content.to_string(), - } - ) - .ok(); - } - MentionUri::PastedImage => { - debug_panic!("pasted image URI should not be used in mention content") - } - MentionUri::Directory { .. } => { - write!(&mut directory_context, "\n{}\n", content).ok(); - } - MentionUri::Symbol { - abs_path: path, - line_range, - .. - } => { - write!( - &mut symbol_context, - "\n{}", - MarkdownCodeBlock { - tag: &codeblock_tag(path, Some(line_range)), - text: content - } - ) - .ok(); - } - MentionUri::Selection { - abs_path: path, - line_range, - .. - } => { - write!( - &mut selection_context, - "\n{}", - MarkdownCodeBlock { - tag: &codeblock_tag( - path.as_deref().unwrap_or("Untitled".as_ref()), - Some(line_range) - ), - text: content - } - ) - .ok(); - } - MentionUri::Thread { .. } => { - write!(&mut thread_context, "\n{}\n", content).ok(); - } - MentionUri::TextThread { .. } => { - write!(&mut thread_context, "\n{}\n", content).ok(); - } - MentionUri::Rule { .. } => { - write!( - &mut rules_context, - "\n{}", - MarkdownCodeBlock { - tag: "", - text: content - } - ) - .ok(); - } - MentionUri::Fetch { url } => { - write!(&mut fetch_context, "\nFetch: {}\n\n{}", url, content).ok(); - } - } - - language_model::MessageContent::Text(uri.as_link().to_string()) - } - }; - - message.content.push(chunk); - } - - let len_before_context = message.content.len(); - - if file_context.len() > OPEN_FILES_TAG.len() { - file_context.push_str("\n"); - message - .content - .push(language_model::MessageContent::Text(file_context)); - } - - if directory_context.len() > OPEN_DIRECTORIES_TAG.len() { - directory_context.push_str("\n"); - message - .content - .push(language_model::MessageContent::Text(directory_context)); - } - - if symbol_context.len() > OPEN_SYMBOLS_TAG.len() { - symbol_context.push_str("\n"); - message - .content - .push(language_model::MessageContent::Text(symbol_context)); - } - - if selection_context.len() > OPEN_SELECTIONS_TAG.len() { - selection_context.push_str("\n"); - message - .content - .push(language_model::MessageContent::Text(selection_context)); - } - - if thread_context.len() > OPEN_THREADS_TAG.len() { - thread_context.push_str("\n"); - message - .content - .push(language_model::MessageContent::Text(thread_context)); - } - - if fetch_context.len() > OPEN_FETCH_TAG.len() { - fetch_context.push_str("\n"); - message - .content - .push(language_model::MessageContent::Text(fetch_context)); - } - - if rules_context.len() > OPEN_RULES_TAG.len() { - rules_context.push_str("\n"); - message - .content - .push(language_model::MessageContent::Text(rules_context)); - } - - if message.content.len() > len_before_context { - message.content.insert( - len_before_context, - language_model::MessageContent::Text(OPEN_CONTEXT.into()), - ); - message - .content - .push(language_model::MessageContent::Text("".into())); - } - - message - } -} - -fn codeblock_tag(full_path: &Path, line_range: Option<&RangeInclusive>) -> String { - let mut result = String::new(); - - if let Some(extension) = full_path.extension().and_then(|ext| ext.to_str()) { - let _ = write!(result, "{} ", extension); - } - - let _ = write!(result, "{}", full_path.display()); - - if let Some(range) = line_range { - if range.start() == range.end() { - let _ = write!(result, ":{}", range.start() + 1); - } else { - let _ = write!(result, ":{}-{}", range.start() + 1, range.end() + 1); - } - } - - result -} - -impl AgentMessage { - pub fn to_markdown(&self) -> String { - let mut markdown = String::from("## Assistant\n\n"); - - for content in &self.content { - match content { - AgentMessageContent::Text(text) => { - markdown.push_str(text); - markdown.push('\n'); - } - AgentMessageContent::Thinking { text, .. } => { - markdown.push_str(""); - markdown.push_str(text); - markdown.push_str("\n"); - } - AgentMessageContent::RedactedThinking(_) => { - markdown.push_str("\n") - } - AgentMessageContent::ToolUse(tool_use) => { - markdown.push_str(&format!( - "**Tool Use**: {} (ID: {})\n", - tool_use.name, tool_use.id - )); - markdown.push_str(&format!( - "{}\n", - MarkdownCodeBlock { - tag: "json", - text: &format!("{:#}", tool_use.input) - } - )); - } - } - } - - for tool_result in self.tool_results.values() { - markdown.push_str(&format!( - "**Tool Result**: {} (ID: {})\n\n", - tool_result.tool_name, tool_result.tool_use_id - )); - if tool_result.is_error { - markdown.push_str("**ERROR:**\n"); - } - - match &tool_result.content { - LanguageModelToolResultContent::Text(text) => { - writeln!(markdown, "{text}\n").ok(); - } - LanguageModelToolResultContent::Image(_) => { - writeln!(markdown, "\n").ok(); - } - } - - if let Some(output) = tool_result.output.as_ref() { - writeln!( - markdown, - "**Debug Output**:\n\n```json\n{}\n```\n", - serde_json::to_string_pretty(output).unwrap() - ) - .unwrap(); - } - } - - markdown - } - - pub fn to_request(&self) -> Vec { - let mut assistant_message = LanguageModelRequestMessage { - role: Role::Assistant, - content: Vec::with_capacity(self.content.len()), - cache: false, - }; - for chunk in &self.content { - match chunk { - AgentMessageContent::Text(text) => { - assistant_message - .content - .push(language_model::MessageContent::Text(text.clone())); - } - AgentMessageContent::Thinking { text, signature } => { - assistant_message - .content - .push(language_model::MessageContent::Thinking { - text: text.clone(), - signature: signature.clone(), - }); - } - AgentMessageContent::RedactedThinking(value) => { - assistant_message.content.push( - language_model::MessageContent::RedactedThinking(value.clone()), - ); - } - AgentMessageContent::ToolUse(tool_use) => { - if self.tool_results.contains_key(&tool_use.id) { - assistant_message - .content - .push(language_model::MessageContent::ToolUse(tool_use.clone())); - } - } - }; - } - - let mut user_message = LanguageModelRequestMessage { - role: Role::User, - content: Vec::new(), - cache: false, - }; - - for tool_result in self.tool_results.values() { - let mut tool_result = tool_result.clone(); - // Surprisingly, the API fails if we return an empty string here. - // It thinks we are sending a tool use without a tool result. - if tool_result.content.is_empty() { - tool_result.content = "".into(); - } - user_message - .content - .push(language_model::MessageContent::ToolResult(tool_result)); - } - - let mut messages = Vec::new(); - if !assistant_message.content.is_empty() { - messages.push(assistant_message); - } - if !user_message.content.is_empty() { - messages.push(user_message); - } - messages - } -} - -#[derive(Default, Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct AgentMessage { - pub content: Vec, - pub tool_results: IndexMap, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub enum AgentMessageContent { - Text(String), - Thinking { - text: String, - signature: Option, - }, - RedactedThinking(String), - ToolUse(LanguageModelToolUse), -} - -pub trait TerminalHandle { - fn id(&self, cx: &AsyncApp) -> Result; - fn current_output(&self, cx: &AsyncApp) -> Result; - fn wait_for_exit(&self, cx: &AsyncApp) -> Result>>; -} - -pub trait ThreadEnvironment { - fn create_terminal( - &self, - command: String, - cwd: Option, - output_byte_limit: Option, - cx: &mut AsyncApp, - ) -> Task>>; -} - -#[derive(Debug)] -pub enum ThreadEvent { - UserMessage(UserMessage), - AgentText(String), - AgentThinking(String), - ToolCall(acp::ToolCall), - ToolCallUpdate(acp_thread::ToolCallUpdate), - ToolCallAuthorization(ToolCallAuthorization), - Retry(acp_thread::RetryStatus), - Stop(acp::StopReason), -} - -#[derive(Debug)] -pub struct NewTerminal { - pub command: String, - pub output_byte_limit: Option, - pub cwd: Option, - pub response: oneshot::Sender>>, -} - -#[derive(Debug)] -pub struct ToolCallAuthorization { - pub tool_call: acp::ToolCallUpdate, - pub options: Vec, - pub response: oneshot::Sender, -} - -#[derive(Debug, thiserror::Error)] -enum CompletionError { - #[error("max tokens")] - MaxTokens, - #[error("refusal")] - Refusal, - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -pub struct Thread { - id: acp::SessionId, - prompt_id: PromptId, - updated_at: DateTime, - title: Option, - pending_title_generation: Option>, - summary: Option, - messages: Vec, - user_store: Entity, - completion_mode: CompletionMode, - /// Holds the task that handles agent interaction until the end of the turn. - /// Survives across multiple requests as the model performs tool calls and - /// we run tools, report their results. - running_turn: Option, - pending_message: Option, - tools: BTreeMap>, - tool_use_limit_reached: bool, - request_token_usage: HashMap, - #[allow(unused)] - cumulative_token_usage: TokenUsage, - #[allow(unused)] - initial_project_snapshot: Shared>>>, - context_server_registry: Entity, - profile_id: AgentProfileId, - project_context: Entity, - templates: Arc, - model: Option>, - summarization_model: Option>, - prompt_capabilities_tx: watch::Sender, - pub(crate) prompt_capabilities_rx: watch::Receiver, - pub(crate) project: Entity, - pub(crate) action_log: Entity, -} - -impl Thread { - fn prompt_capabilities(model: Option<&dyn LanguageModel>) -> acp::PromptCapabilities { - let image = model.map_or(true, |model| model.supports_images()); - acp::PromptCapabilities { - meta: None, - image, - audio: false, - embedded_context: true, - } - } - - pub fn new( - project: Entity, - project_context: Entity, - context_server_registry: Entity, - templates: Arc, - model: Option>, - cx: &mut Context, - ) -> Self { - let profile_id = AgentSettings::get_global(cx).default_profile.clone(); - let action_log = cx.new(|_cx| ActionLog::new(project.clone())); - let (prompt_capabilities_tx, prompt_capabilities_rx) = - watch::channel(Self::prompt_capabilities(model.as_deref())); - Self { - id: acp::SessionId(uuid::Uuid::new_v4().to_string().into()), - prompt_id: PromptId::new(), - updated_at: Utc::now(), - title: None, - pending_title_generation: None, - summary: None, - messages: Vec::new(), - user_store: project.read(cx).user_store(), - completion_mode: AgentSettings::get_global(cx).preferred_completion_mode, - running_turn: None, - pending_message: None, - tools: BTreeMap::default(), - tool_use_limit_reached: false, - request_token_usage: HashMap::default(), - cumulative_token_usage: TokenUsage::default(), - initial_project_snapshot: { - let project_snapshot = Self::project_snapshot(project.clone(), cx); - cx.foreground_executor() - .spawn(async move { Some(project_snapshot.await) }) - .shared() - }, - context_server_registry, - profile_id, - project_context, - templates, - model, - summarization_model: None, - prompt_capabilities_tx, - prompt_capabilities_rx, - project, - action_log, - } - } - - pub fn id(&self) -> &acp::SessionId { - &self.id - } - - pub fn replay( - &mut self, - cx: &mut Context, - ) -> mpsc::UnboundedReceiver> { - let (tx, rx) = mpsc::unbounded(); - let stream = ThreadEventStream(tx); - for message in &self.messages { - match message { - Message::User(user_message) => stream.send_user_message(user_message), - Message::Agent(assistant_message) => { - for content in &assistant_message.content { - match content { - AgentMessageContent::Text(text) => stream.send_text(text), - AgentMessageContent::Thinking { text, .. } => { - stream.send_thinking(text) - } - AgentMessageContent::RedactedThinking(_) => {} - AgentMessageContent::ToolUse(tool_use) => { - self.replay_tool_call( - tool_use, - assistant_message.tool_results.get(&tool_use.id), - &stream, - cx, - ); - } - } - } - } - Message::Resume => {} - } - } - rx - } - - fn replay_tool_call( - &self, - tool_use: &LanguageModelToolUse, - tool_result: Option<&LanguageModelToolResult>, - stream: &ThreadEventStream, - cx: &mut Context, - ) { - let tool = self.tools.get(tool_use.name.as_ref()).cloned().or_else(|| { - self.context_server_registry - .read(cx) - .servers() - .find_map(|(_, tools)| { - if let Some(tool) = tools.get(tool_use.name.as_ref()) { - Some(tool.clone()) - } else { - None - } - }) - }); - - let Some(tool) = tool else { - stream - .0 - .unbounded_send(Ok(ThreadEvent::ToolCall(acp::ToolCall { - meta: None, - id: acp::ToolCallId(tool_use.id.to_string().into()), - title: tool_use.name.to_string(), - kind: acp::ToolKind::Other, - status: acp::ToolCallStatus::Failed, - content: Vec::new(), - locations: Vec::new(), - raw_input: Some(tool_use.input.clone()), - raw_output: None, - }))) - .ok(); - return; - }; - - let title = tool.initial_title(tool_use.input.clone(), cx); - let kind = tool.kind(); - stream.send_tool_call(&tool_use.id, title, kind, tool_use.input.clone()); - - let output = tool_result - .as_ref() - .and_then(|result| result.output.clone()); - if let Some(output) = output.clone() { - let tool_event_stream = ToolCallEventStream::new( - tool_use.id.clone(), - stream.clone(), - Some(self.project.read(cx).fs().clone()), - ); - tool.replay(tool_use.input.clone(), output, tool_event_stream, cx) - .log_err(); - } - - stream.update_tool_call_fields( - &tool_use.id, - acp::ToolCallUpdateFields { - status: Some( - tool_result - .as_ref() - .map_or(acp::ToolCallStatus::Failed, |result| { - if result.is_error { - acp::ToolCallStatus::Failed - } else { - acp::ToolCallStatus::Completed - } - }), - ), - raw_output: output, - ..Default::default() - }, - ); - } - - pub fn from_db( - id: acp::SessionId, - db_thread: DbThread, - project: Entity, - project_context: Entity, - context_server_registry: Entity, - action_log: Entity, - templates: Arc, - cx: &mut Context, - ) -> Self { - let profile_id = db_thread - .profile - .unwrap_or_else(|| AgentSettings::get_global(cx).default_profile.clone()); - let model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| { - db_thread - .model - .and_then(|model| { - let model = SelectedModel { - provider: model.provider.clone().into(), - model: model.model.into(), - }; - registry.select_model(&model, cx) - }) - .or_else(|| registry.default_model()) - .map(|model| model.model) - }); - let (prompt_capabilities_tx, prompt_capabilities_rx) = - watch::channel(Self::prompt_capabilities(model.as_deref())); - - Self { - id, - prompt_id: PromptId::new(), - title: if db_thread.title.is_empty() { - None - } else { - Some(db_thread.title.clone()) - }, - pending_title_generation: None, - summary: db_thread.detailed_summary, - messages: db_thread.messages, - user_store: project.read(cx).user_store(), - completion_mode: db_thread.completion_mode.unwrap_or_default(), - running_turn: None, - pending_message: None, - tools: BTreeMap::default(), - tool_use_limit_reached: false, - request_token_usage: db_thread.request_token_usage.clone(), - cumulative_token_usage: db_thread.cumulative_token_usage, - initial_project_snapshot: Task::ready(db_thread.initial_project_snapshot).shared(), - context_server_registry, - profile_id, - project_context, - templates, - model, - summarization_model: None, - project, - action_log, - updated_at: db_thread.updated_at, - prompt_capabilities_tx, - prompt_capabilities_rx, - } - } - - pub fn to_db(&self, cx: &App) -> Task { - let initial_project_snapshot = self.initial_project_snapshot.clone(); - let mut thread = DbThread { - title: self.title(), - messages: self.messages.clone(), - updated_at: self.updated_at, - detailed_summary: self.summary.clone(), - initial_project_snapshot: None, - cumulative_token_usage: self.cumulative_token_usage, - request_token_usage: self.request_token_usage.clone(), - model: self.model.as_ref().map(|model| DbLanguageModel { - provider: model.provider_id().to_string(), - model: model.name().0.to_string(), - }), - completion_mode: Some(self.completion_mode), - profile: Some(self.profile_id.clone()), - }; - - cx.background_spawn(async move { - let initial_project_snapshot = initial_project_snapshot.await; - thread.initial_project_snapshot = initial_project_snapshot; - thread - }) - } - - /// Create a snapshot of the current project state including git information and unsaved buffers. - fn project_snapshot( - project: Entity, - cx: &mut Context, - ) -> Task> { - let git_store = project.read(cx).git_store().clone(); - let worktree_snapshots: Vec<_> = project - .read(cx) - .visible_worktrees(cx) - .map(|worktree| Self::worktree_snapshot(worktree, git_store.clone(), cx)) - .collect(); - - cx.spawn(async move |_, _| { - let worktree_snapshots = futures::future::join_all(worktree_snapshots).await; - - Arc::new(ProjectSnapshot { - worktree_snapshots, - timestamp: Utc::now(), - }) - }) - } - - fn worktree_snapshot( - worktree: Entity, - git_store: Entity, - cx: &App, - ) -> Task { - cx.spawn(async move |cx| { - // Get worktree path and snapshot - let worktree_info = cx.update(|app_cx| { - let worktree = worktree.read(app_cx); - let path = worktree.abs_path().to_string_lossy().into_owned(); - let snapshot = worktree.snapshot(); - (path, snapshot) - }); - - let Ok((worktree_path, _snapshot)) = worktree_info else { - return WorktreeSnapshot { - worktree_path: String::new(), - git_state: None, - }; - }; - - let git_state = git_store - .update(cx, |git_store, cx| { - git_store - .repositories() - .values() - .find(|repo| { - repo.read(cx) - .abs_path_to_repo_path(&worktree.read(cx).abs_path()) - .is_some() - }) - .cloned() - }) - .ok() - .flatten() - .map(|repo| { - repo.update(cx, |repo, _| { - let current_branch = - repo.branch.as_ref().map(|branch| branch.name().to_owned()); - repo.send_job(None, |state, _| async move { - let RepositoryState::Local { backend, .. } = state else { - return GitState { - remote_url: None, - head_sha: None, - current_branch, - diff: None, - }; - }; - - let remote_url = backend.remote_url("origin"); - let head_sha = backend.head_sha().await; - let diff = backend.diff(DiffType::HeadToWorktree).await.ok(); - - GitState { - remote_url, - head_sha, - current_branch, - diff, - } - }) - }) - }); - - let git_state = match git_state { - Some(git_state) => match git_state.ok() { - Some(git_state) => git_state.await.ok(), - None => None, - }, - None => None, - }; - - WorktreeSnapshot { - worktree_path, - git_state, - } - }) - } - - pub fn project_context(&self) -> &Entity { - &self.project_context - } - - pub fn project(&self) -> &Entity { - &self.project - } - - pub fn action_log(&self) -> &Entity { - &self.action_log - } - - pub fn is_empty(&self) -> bool { - self.messages.is_empty() && self.title.is_none() - } - - pub fn model(&self) -> Option<&Arc> { - self.model.as_ref() - } - - pub fn set_model(&mut self, model: Arc, cx: &mut Context) { - let old_usage = self.latest_token_usage(); - self.model = Some(model); - let new_caps = Self::prompt_capabilities(self.model.as_deref()); - let new_usage = self.latest_token_usage(); - if old_usage != new_usage { - cx.emit(TokenUsageUpdated(new_usage)); - } - self.prompt_capabilities_tx.send(new_caps).log_err(); - cx.notify() - } - - pub fn summarization_model(&self) -> Option<&Arc> { - self.summarization_model.as_ref() - } - - pub fn set_summarization_model( - &mut self, - model: Option>, - cx: &mut Context, - ) { - self.summarization_model = model; - cx.notify() - } - - pub fn completion_mode(&self) -> CompletionMode { - self.completion_mode - } - - pub fn set_completion_mode(&mut self, mode: CompletionMode, cx: &mut Context) { - let old_usage = self.latest_token_usage(); - self.completion_mode = mode; - let new_usage = self.latest_token_usage(); - if old_usage != new_usage { - cx.emit(TokenUsageUpdated(new_usage)); - } - cx.notify() - } - - #[cfg(any(test, feature = "test-support"))] - pub fn last_message(&self) -> Option { - if let Some(message) = self.pending_message.clone() { - Some(Message::Agent(message)) - } else { - self.messages.last().cloned() - } - } - - pub fn add_default_tools( - &mut self, - environment: Rc, - cx: &mut Context, - ) { - let language_registry = self.project.read(cx).languages().clone(); - self.add_tool(CopyPathTool::new(self.project.clone())); - self.add_tool(CreateDirectoryTool::new(self.project.clone())); - self.add_tool(DeletePathTool::new( - self.project.clone(), - self.action_log.clone(), - )); - self.add_tool(DiagnosticsTool::new(self.project.clone())); - self.add_tool(EditFileTool::new( - self.project.clone(), - cx.weak_entity(), - language_registry, - )); - self.add_tool(FetchTool::new(self.project.read(cx).client().http_client())); - self.add_tool(FindPathTool::new(self.project.clone())); - self.add_tool(GrepTool::new(self.project.clone())); - self.add_tool(ListDirectoryTool::new(self.project.clone())); - self.add_tool(MovePathTool::new(self.project.clone())); - self.add_tool(NowTool); - self.add_tool(OpenTool::new(self.project.clone())); - self.add_tool(ReadFileTool::new( - self.project.clone(), - self.action_log.clone(), - )); - self.add_tool(TerminalTool::new(self.project.clone(), environment)); - self.add_tool(ThinkingTool); - self.add_tool(WebSearchTool); - } - - pub fn add_tool(&mut self, tool: T) { - self.tools.insert(T::name().into(), tool.erase()); - } - - pub fn remove_tool(&mut self, name: &str) -> bool { - self.tools.remove(name).is_some() - } - - pub fn profile(&self) -> &AgentProfileId { - &self.profile_id - } - - pub fn set_profile(&mut self, profile_id: AgentProfileId) { - self.profile_id = profile_id; - } - - pub fn cancel(&mut self, cx: &mut Context) { - if let Some(running_turn) = self.running_turn.take() { - running_turn.cancel(); - } - self.flush_pending_message(cx); - } - - fn update_token_usage(&mut self, update: language_model::TokenUsage, cx: &mut Context) { - let Some(last_user_message) = self.last_user_message() else { - return; - }; - - self.request_token_usage - .insert(last_user_message.id.clone(), update); - cx.emit(TokenUsageUpdated(self.latest_token_usage())); - cx.notify(); - } - - pub fn truncate(&mut self, message_id: UserMessageId, cx: &mut Context) -> Result<()> { - self.cancel(cx); - let Some(position) = self.messages.iter().position( - |msg| matches!(msg, Message::User(UserMessage { id, .. }) if id == &message_id), - ) else { - return Err(anyhow!("Message not found")); - }; - - for message in self.messages.drain(position..) { - match message { - Message::User(message) => { - self.request_token_usage.remove(&message.id); - } - Message::Agent(_) | Message::Resume => {} - } - } - self.summary = None; - cx.notify(); - Ok(()) - } - - pub fn latest_token_usage(&self) -> Option { - let last_user_message = self.last_user_message()?; - let tokens = self.request_token_usage.get(&last_user_message.id)?; - let model = self.model.clone()?; - - Some(acp_thread::TokenUsage { - max_tokens: model.max_token_count_for_mode(self.completion_mode.into()), - used_tokens: tokens.total_tokens(), - }) - } - - pub fn resume( - &mut self, - cx: &mut Context, - ) -> Result>> { - self.messages.push(Message::Resume); - cx.notify(); - - log::debug!("Total messages in thread: {}", self.messages.len()); - self.run_turn(cx) - } - - /// Sending a message results in the model streaming a response, which could include tool calls. - /// After calling tools, the model will stops and waits for any outstanding tool calls to be completed and their results sent. - /// The returned channel will report all the occurrences in which the model stops before erroring or ending its turn. - pub fn send( - &mut self, - id: UserMessageId, - content: impl IntoIterator, - cx: &mut Context, - ) -> Result>> - where - T: Into, - { - let model = self.model().context("No language model configured")?; - - log::info!("Thread::send called with model: {}", model.name().0); - self.advance_prompt_id(); - - let content = content.into_iter().map(Into::into).collect::>(); - log::debug!("Thread::send content: {:?}", content); - - self.messages - .push(Message::User(UserMessage { id, content })); - cx.notify(); - - log::debug!("Total messages in thread: {}", self.messages.len()); - self.run_turn(cx) - } - - fn run_turn( - &mut self, - cx: &mut Context, - ) -> Result>> { - self.cancel(cx); - - let model = self.model.clone().context("No language model configured")?; - let profile = AgentSettings::get_global(cx) - .profiles - .get(&self.profile_id) - .context("Profile not found")?; - let (events_tx, events_rx) = mpsc::unbounded::>(); - let event_stream = ThreadEventStream(events_tx); - let message_ix = self.messages.len().saturating_sub(1); - self.tool_use_limit_reached = false; - self.summary = None; - self.running_turn = Some(RunningTurn { - event_stream: event_stream.clone(), - tools: self.enabled_tools(profile, &model, cx), - _task: cx.spawn(async move |this, cx| { - log::debug!("Starting agent turn execution"); - - let turn_result = Self::run_turn_internal(&this, model, &event_stream, cx).await; - _ = this.update(cx, |this, cx| this.flush_pending_message(cx)); - - match turn_result { - Ok(()) => { - log::debug!("Turn execution completed"); - event_stream.send_stop(acp::StopReason::EndTurn); - } - Err(error) => { - log::error!("Turn execution failed: {:?}", error); - match error.downcast::() { - Ok(CompletionError::Refusal) => { - event_stream.send_stop(acp::StopReason::Refusal); - _ = this.update(cx, |this, _| this.messages.truncate(message_ix)); - } - Ok(CompletionError::MaxTokens) => { - event_stream.send_stop(acp::StopReason::MaxTokens); - } - Ok(CompletionError::Other(error)) | Err(error) => { - event_stream.send_error(error); - } - } - } - } - - _ = this.update(cx, |this, _| this.running_turn.take()); - }), - }); - Ok(events_rx) - } - - async fn run_turn_internal( - this: &WeakEntity, - model: Arc, - event_stream: &ThreadEventStream, - cx: &mut AsyncApp, - ) -> Result<()> { - let mut attempt = 0; - let mut intent = CompletionIntent::UserPrompt; - loop { - let request = - this.update(cx, |this, cx| this.build_completion_request(intent, cx))??; - - telemetry::event!( - "Agent Thread Completion", - thread_id = this.read_with(cx, |this, _| this.id.to_string())?, - prompt_id = this.read_with(cx, |this, _| this.prompt_id.to_string())?, - model = model.telemetry_id(), - model_provider = model.provider_id().to_string(), - attempt - ); - - log::debug!("Calling model.stream_completion, attempt {}", attempt); - - let (mut events, mut error) = match model.stream_completion(request, cx).await { - Ok(events) => (events, None), - Err(err) => (stream::empty().boxed(), Some(err)), - }; - let mut tool_results = FuturesUnordered::new(); - while let Some(event) = events.next().await { - log::trace!("Received completion event: {:?}", event); - match event { - Ok(event) => { - tool_results.extend(this.update(cx, |this, cx| { - this.handle_completion_event(event, event_stream, cx) - })??); - } - Err(err) => { - error = Some(err); - break; - } - } - } - - let end_turn = tool_results.is_empty(); - while let Some(tool_result) = tool_results.next().await { - log::debug!("Tool finished {:?}", tool_result); - - event_stream.update_tool_call_fields( - &tool_result.tool_use_id, - acp::ToolCallUpdateFields { - status: Some(if tool_result.is_error { - acp::ToolCallStatus::Failed - } else { - acp::ToolCallStatus::Completed - }), - raw_output: tool_result.output.clone(), - ..Default::default() - }, - ); - this.update(cx, |this, _cx| { - this.pending_message() - .tool_results - .insert(tool_result.tool_use_id.clone(), tool_result); - })?; - } - - this.update(cx, |this, cx| { - this.flush_pending_message(cx); - if this.title.is_none() && this.pending_title_generation.is_none() { - this.generate_title(cx); - } - })?; - - if let Some(error) = error { - attempt += 1; - let retry = this.update(cx, |this, cx| { - let user_store = this.user_store.read(cx); - this.handle_completion_error(error, attempt, user_store.plan()) - })??; - let timer = cx.background_executor().timer(retry.duration); - event_stream.send_retry(retry); - timer.await; - this.update(cx, |this, _cx| { - if let Some(Message::Agent(message)) = this.messages.last() { - if message.tool_results.is_empty() { - intent = CompletionIntent::UserPrompt; - this.messages.push(Message::Resume); - } - } - })?; - } else if this.read_with(cx, |this, _| this.tool_use_limit_reached)? { - return Err(language_model::ToolUseLimitReachedError.into()); - } else if end_turn { - return Ok(()); - } else { - intent = CompletionIntent::ToolResults; - attempt = 0; - } - } - } - - fn handle_completion_error( - &mut self, - error: LanguageModelCompletionError, - attempt: u8, - plan: Option, - ) -> Result { - let Some(model) = self.model.as_ref() else { - return Err(anyhow!(error)); - }; - - let auto_retry = if model.provider_id() == ZED_CLOUD_PROVIDER_ID { - match plan { - Some(Plan::V2(_)) => true, - Some(Plan::V1(_)) => self.completion_mode == CompletionMode::Burn, - None => false, - } - } else { - true - }; - - if !auto_retry { - return Err(anyhow!(error)); - } - - let Some(strategy) = Self::retry_strategy_for(&error) else { - return Err(anyhow!(error)); - }; - - let max_attempts = match &strategy { - RetryStrategy::ExponentialBackoff { max_attempts, .. } => *max_attempts, - RetryStrategy::Fixed { max_attempts, .. } => *max_attempts, - }; - - if attempt > max_attempts { - return Err(anyhow!(error)); - } - - let delay = match &strategy { - RetryStrategy::ExponentialBackoff { initial_delay, .. } => { - let delay_secs = initial_delay.as_secs() * 2u64.pow((attempt - 1) as u32); - Duration::from_secs(delay_secs) - } - RetryStrategy::Fixed { delay, .. } => *delay, - }; - log::debug!("Retry attempt {attempt} with delay {delay:?}"); - - Ok(acp_thread::RetryStatus { - last_error: error.to_string().into(), - attempt: attempt as usize, - max_attempts: max_attempts as usize, - started_at: Instant::now(), - duration: delay, - }) - } - - /// A helper method that's called on every streamed completion event. - /// Returns an optional tool result task, which the main agentic loop will - /// send back to the model when it resolves. - fn handle_completion_event( - &mut self, - event: LanguageModelCompletionEvent, - event_stream: &ThreadEventStream, - cx: &mut Context, - ) -> Result>> { - log::trace!("Handling streamed completion event: {:?}", event); - use LanguageModelCompletionEvent::*; - - match event { - StartMessage { .. } => { - self.flush_pending_message(cx); - self.pending_message = Some(AgentMessage::default()); - } - Text(new_text) => self.handle_text_event(new_text, event_stream, cx), - Thinking { text, signature } => { - self.handle_thinking_event(text, signature, event_stream, cx) - } - RedactedThinking { data } => self.handle_redacted_thinking_event(data, cx), - ToolUse(tool_use) => { - return Ok(self.handle_tool_use_event(tool_use, event_stream, cx)); - } - ToolUseJsonParseError { - id, - tool_name, - raw_input, - json_parse_error, - } => { - return Ok(Some(Task::ready( - self.handle_tool_use_json_parse_error_event( - id, - tool_name, - raw_input, - json_parse_error, - ), - ))); - } - UsageUpdate(usage) => { - telemetry::event!( - "Agent Thread Completion Usage Updated", - thread_id = self.id.to_string(), - prompt_id = self.prompt_id.to_string(), - model = self.model.as_ref().map(|m| m.telemetry_id()), - model_provider = self.model.as_ref().map(|m| m.provider_id().to_string()), - input_tokens = usage.input_tokens, - output_tokens = usage.output_tokens, - cache_creation_input_tokens = usage.cache_creation_input_tokens, - cache_read_input_tokens = usage.cache_read_input_tokens, - ); - self.update_token_usage(usage, cx); - } - StatusUpdate(CompletionRequestStatus::UsageUpdated { amount, limit }) => { - self.update_model_request_usage(amount, limit, cx); - } - StatusUpdate( - CompletionRequestStatus::Started - | CompletionRequestStatus::Queued { .. } - | CompletionRequestStatus::Failed { .. }, - ) => {} - StatusUpdate(CompletionRequestStatus::ToolUseLimitReached) => { - self.tool_use_limit_reached = true; - } - Stop(StopReason::Refusal) => return Err(CompletionError::Refusal.into()), - Stop(StopReason::MaxTokens) => return Err(CompletionError::MaxTokens.into()), - Stop(StopReason::ToolUse | StopReason::EndTurn) => {} - } - - Ok(None) - } - - fn handle_text_event( - &mut self, - new_text: String, - event_stream: &ThreadEventStream, - cx: &mut Context, - ) { - event_stream.send_text(&new_text); - - let last_message = self.pending_message(); - if let Some(AgentMessageContent::Text(text)) = last_message.content.last_mut() { - text.push_str(&new_text); - } else { - last_message - .content - .push(AgentMessageContent::Text(new_text)); - } - - cx.notify(); - } - - fn handle_thinking_event( - &mut self, - new_text: String, - new_signature: Option, - event_stream: &ThreadEventStream, - cx: &mut Context, - ) { - event_stream.send_thinking(&new_text); - - let last_message = self.pending_message(); - if let Some(AgentMessageContent::Thinking { text, signature }) = - last_message.content.last_mut() - { - text.push_str(&new_text); - *signature = new_signature.or(signature.take()); - } else { - last_message.content.push(AgentMessageContent::Thinking { - text: new_text, - signature: new_signature, - }); - } - - cx.notify(); - } - - fn handle_redacted_thinking_event(&mut self, data: String, cx: &mut Context) { - let last_message = self.pending_message(); - last_message - .content - .push(AgentMessageContent::RedactedThinking(data)); - cx.notify(); - } - - fn handle_tool_use_event( - &mut self, - tool_use: LanguageModelToolUse, - event_stream: &ThreadEventStream, - cx: &mut Context, - ) -> Option> { - cx.notify(); - - let tool = self.tool(tool_use.name.as_ref()); - let mut title = SharedString::from(&tool_use.name); - let mut kind = acp::ToolKind::Other; - if let Some(tool) = tool.as_ref() { - title = tool.initial_title(tool_use.input.clone(), cx); - kind = tool.kind(); - } - - // Ensure the last message ends in the current tool use - let last_message = self.pending_message(); - let push_new_tool_use = last_message.content.last_mut().is_none_or(|content| { - if let AgentMessageContent::ToolUse(last_tool_use) = content { - if last_tool_use.id == tool_use.id { - *last_tool_use = tool_use.clone(); - false - } else { - true - } - } else { - true - } - }); - - if push_new_tool_use { - event_stream.send_tool_call(&tool_use.id, title, kind, tool_use.input.clone()); - last_message - .content - .push(AgentMessageContent::ToolUse(tool_use.clone())); - } else { - event_stream.update_tool_call_fields( - &tool_use.id, - acp::ToolCallUpdateFields { - title: Some(title.into()), - kind: Some(kind), - raw_input: Some(tool_use.input.clone()), - ..Default::default() - }, - ); - } - - if !tool_use.is_input_complete { - return None; - } - - let Some(tool) = tool else { - let content = format!("No tool named {} exists", tool_use.name); - return Some(Task::ready(LanguageModelToolResult { - content: LanguageModelToolResultContent::Text(Arc::from(content)), - tool_use_id: tool_use.id, - tool_name: tool_use.name, - is_error: true, - output: None, - })); - }; - - let fs = self.project.read(cx).fs().clone(); - let tool_event_stream = - ToolCallEventStream::new(tool_use.id.clone(), event_stream.clone(), Some(fs)); - tool_event_stream.update_fields(acp::ToolCallUpdateFields { - status: Some(acp::ToolCallStatus::InProgress), - ..Default::default() - }); - let supports_images = self.model().is_some_and(|model| model.supports_images()); - let tool_result = tool.run(tool_use.input, tool_event_stream, cx); - log::debug!("Running tool {}", tool_use.name); - Some(cx.foreground_executor().spawn(async move { - let tool_result = tool_result.await.and_then(|output| { - if let LanguageModelToolResultContent::Image(_) = &output.llm_output - && !supports_images - { - return Err(anyhow!( - "Attempted to read an image, but this model doesn't support it.", - )); - } - Ok(output) - }); - - match tool_result { - Ok(output) => LanguageModelToolResult { - tool_use_id: tool_use.id, - tool_name: tool_use.name, - is_error: false, - content: output.llm_output, - output: Some(output.raw_output), - }, - Err(error) => LanguageModelToolResult { - tool_use_id: tool_use.id, - tool_name: tool_use.name, - is_error: true, - content: LanguageModelToolResultContent::Text(Arc::from(error.to_string())), - output: Some(error.to_string().into()), - }, - } - })) - } - - fn handle_tool_use_json_parse_error_event( - &mut self, - tool_use_id: LanguageModelToolUseId, - tool_name: Arc, - raw_input: Arc, - json_parse_error: String, - ) -> LanguageModelToolResult { - let tool_output = format!("Error parsing input JSON: {json_parse_error}"); - LanguageModelToolResult { - tool_use_id, - tool_name, - is_error: true, - content: LanguageModelToolResultContent::Text(tool_output.into()), - output: Some(serde_json::Value::String(raw_input.to_string())), - } - } - - fn update_model_request_usage(&self, amount: usize, limit: UsageLimit, cx: &mut Context) { - self.project - .read(cx) - .user_store() - .update(cx, |user_store, cx| { - user_store.update_model_request_usage( - ModelRequestUsage(RequestUsage { - amount: amount as i32, - limit, - }), - cx, - ) - }); - } - - pub fn title(&self) -> SharedString { - self.title.clone().unwrap_or("New Thread".into()) - } - - pub fn summary(&mut self, cx: &mut Context) -> Task> { - if let Some(summary) = self.summary.as_ref() { - return Task::ready(Ok(summary.clone())); - } - let Some(model) = self.summarization_model.clone() else { - return Task::ready(Err(anyhow!("No summarization model available"))); - }; - let mut request = LanguageModelRequest { - intent: Some(CompletionIntent::ThreadContextSummarization), - temperature: AgentSettings::temperature_for_model(&model, cx), - ..Default::default() - }; - - for message in &self.messages { - request.messages.extend(message.to_request()); - } - - request.messages.push(LanguageModelRequestMessage { - role: Role::User, - content: vec![SUMMARIZE_THREAD_DETAILED_PROMPT.into()], - cache: false, - }); - cx.spawn(async move |this, cx| { - let mut summary = String::new(); - let mut messages = model.stream_completion(request, cx).await?; - while let Some(event) = messages.next().await { - let event = event?; - let text = match event { - LanguageModelCompletionEvent::Text(text) => text, - LanguageModelCompletionEvent::StatusUpdate( - CompletionRequestStatus::UsageUpdated { amount, limit }, - ) => { - this.update(cx, |thread, cx| { - thread.update_model_request_usage(amount, limit, cx); - })?; - continue; - } - _ => continue, - }; - - let mut lines = text.lines(); - summary.extend(lines.next()); - } - - log::debug!("Setting summary: {}", summary); - let summary = SharedString::from(summary); - - this.update(cx, |this, cx| { - this.summary = Some(summary.clone()); - cx.notify() - })?; - - Ok(summary) - }) - } - - fn generate_title(&mut self, cx: &mut Context) { - let Some(model) = self.summarization_model.clone() else { - return; - }; - - log::debug!( - "Generating title with model: {:?}", - self.summarization_model.as_ref().map(|model| model.name()) - ); - let mut request = LanguageModelRequest { - intent: Some(CompletionIntent::ThreadSummarization), - temperature: AgentSettings::temperature_for_model(&model, cx), - ..Default::default() - }; - - for message in &self.messages { - request.messages.extend(message.to_request()); - } - - request.messages.push(LanguageModelRequestMessage { - role: Role::User, - content: vec![SUMMARIZE_THREAD_PROMPT.into()], - cache: false, - }); - self.pending_title_generation = Some(cx.spawn(async move |this, cx| { - let mut title = String::new(); - - let generate = async { - let mut messages = model.stream_completion(request, cx).await?; - while let Some(event) = messages.next().await { - let event = event?; - let text = match event { - LanguageModelCompletionEvent::Text(text) => text, - LanguageModelCompletionEvent::StatusUpdate( - CompletionRequestStatus::UsageUpdated { amount, limit }, - ) => { - this.update(cx, |thread, cx| { - thread.update_model_request_usage(amount, limit, cx); - })?; - continue; - } - _ => continue, - }; - - let mut lines = text.lines(); - title.extend(lines.next()); - - // Stop if the LLM generated multiple lines. - if lines.next().is_some() { - break; - } - } - anyhow::Ok(()) - }; - - if generate.await.context("failed to generate title").is_ok() { - _ = this.update(cx, |this, cx| this.set_title(title.into(), cx)); - } - _ = this.update(cx, |this, _| this.pending_title_generation = None); - })); - } - - pub fn set_title(&mut self, title: SharedString, cx: &mut Context) { - self.pending_title_generation = None; - if Some(&title) != self.title.as_ref() { - self.title = Some(title); - cx.emit(TitleUpdated); - cx.notify(); - } - } - - fn last_user_message(&self) -> Option<&UserMessage> { - self.messages - .iter() - .rev() - .find_map(|message| match message { - Message::User(user_message) => Some(user_message), - Message::Agent(_) => None, - Message::Resume => None, - }) - } - - fn pending_message(&mut self) -> &mut AgentMessage { - self.pending_message.get_or_insert_default() - } - - fn flush_pending_message(&mut self, cx: &mut Context) { - let Some(mut message) = self.pending_message.take() else { - return; - }; - - if message.content.is_empty() { - return; - } - - for content in &message.content { - let AgentMessageContent::ToolUse(tool_use) = content else { - continue; - }; - - if !message.tool_results.contains_key(&tool_use.id) { - message.tool_results.insert( - tool_use.id.clone(), - LanguageModelToolResult { - tool_use_id: tool_use.id.clone(), - tool_name: tool_use.name.clone(), - is_error: true, - content: LanguageModelToolResultContent::Text(TOOL_CANCELED_MESSAGE.into()), - output: None, - }, - ); - } - } - - self.messages.push(Message::Agent(message)); - self.updated_at = Utc::now(); - self.summary = None; - cx.notify() - } - - pub(crate) fn build_completion_request( - &self, - completion_intent: CompletionIntent, - cx: &App, - ) -> Result { - let model = self.model().context("No language model configured")?; - let tools = if let Some(turn) = self.running_turn.as_ref() { - turn.tools - .iter() - .filter_map(|(tool_name, tool)| { - log::trace!("Including tool: {}", tool_name); - Some(LanguageModelRequestTool { - name: tool_name.to_string(), - description: tool.description().to_string(), - input_schema: tool.input_schema(model.tool_input_format()).log_err()?, - }) - }) - .collect::>() - } else { - Vec::new() - }; - - log::debug!("Building completion request"); - log::debug!("Completion intent: {:?}", completion_intent); - log::debug!("Completion mode: {:?}", self.completion_mode); - - let messages = self.build_request_messages(cx); - log::debug!("Request will include {} messages", messages.len()); - log::debug!("Request includes {} tools", tools.len()); - - let request = LanguageModelRequest { - thread_id: Some(self.id.to_string()), - prompt_id: Some(self.prompt_id.to_string()), - intent: Some(completion_intent), - mode: Some(self.completion_mode.into()), - messages, - tools, - tool_choice: None, - stop: Vec::new(), - temperature: AgentSettings::temperature_for_model(model, cx), - thinking_allowed: true, - }; - - log::debug!("Completion request built successfully"); - Ok(request) - } - - fn enabled_tools( - &self, - profile: &AgentProfileSettings, - model: &Arc, - cx: &App, - ) -> BTreeMap> { - fn truncate(tool_name: &SharedString) -> SharedString { - if tool_name.len() > MAX_TOOL_NAME_LENGTH { - let mut truncated = tool_name.to_string(); - truncated.truncate(MAX_TOOL_NAME_LENGTH); - truncated.into() - } else { - tool_name.clone() - } - } - - let mut tools = self - .tools - .iter() - .filter_map(|(tool_name, tool)| { - if tool.supported_provider(&model.provider_id()) - && profile.is_tool_enabled(tool_name) - { - Some((truncate(tool_name), tool.clone())) - } else { - None - } - }) - .collect::>(); - - let mut context_server_tools = Vec::new(); - let mut seen_tools = tools.keys().cloned().collect::>(); - let mut duplicate_tool_names = HashSet::default(); - for (server_id, server_tools) in self.context_server_registry.read(cx).servers() { - for (tool_name, tool) in server_tools { - if profile.is_context_server_tool_enabled(&server_id.0, &tool_name) { - let tool_name = truncate(tool_name); - if !seen_tools.insert(tool_name.clone()) { - duplicate_tool_names.insert(tool_name.clone()); - } - context_server_tools.push((server_id.clone(), tool_name, tool.clone())); - } - } - } - - // When there are duplicate tool names, disambiguate by prefixing them - // with the server ID. In the rare case there isn't enough space for the - // disambiguated tool name, keep only the last tool with this name. - for (server_id, tool_name, tool) in context_server_tools { - if duplicate_tool_names.contains(&tool_name) { - let available = MAX_TOOL_NAME_LENGTH.saturating_sub(tool_name.len()); - if available >= 2 { - let mut disambiguated = server_id.0.to_string(); - disambiguated.truncate(available - 1); - disambiguated.push('_'); - disambiguated.push_str(&tool_name); - tools.insert(disambiguated.into(), tool.clone()); - } else { - tools.insert(tool_name, tool.clone()); - } - } else { - tools.insert(tool_name, tool.clone()); - } - } - - tools - } - - fn tool(&self, name: &str) -> Option> { - self.running_turn.as_ref()?.tools.get(name).cloned() - } - - fn build_request_messages(&self, cx: &App) -> Vec { - log::trace!( - "Building request messages from {} thread messages", - self.messages.len() - ); - - let system_prompt = SystemPromptTemplate { - project: self.project_context.read(cx), - available_tools: self.tools.keys().cloned().collect(), - } - .render(&self.templates) - .context("failed to build system prompt") - .expect("Invalid template"); - let mut messages = vec![LanguageModelRequestMessage { - role: Role::System, - content: vec![system_prompt.into()], - cache: false, - }]; - for message in &self.messages { - messages.extend(message.to_request()); - } - - if let Some(last_message) = messages.last_mut() { - last_message.cache = true; - } - - if let Some(message) = self.pending_message.as_ref() { - messages.extend(message.to_request()); - } - - messages - } - - pub fn to_markdown(&self) -> String { - let mut markdown = String::new(); - for (ix, message) in self.messages.iter().enumerate() { - if ix > 0 { - markdown.push('\n'); - } - markdown.push_str(&message.to_markdown()); - } - - if let Some(message) = self.pending_message.as_ref() { - markdown.push('\n'); - markdown.push_str(&message.to_markdown()); - } - - markdown - } - - fn advance_prompt_id(&mut self) { - self.prompt_id = PromptId::new(); - } - - fn retry_strategy_for(error: &LanguageModelCompletionError) -> Option { - use LanguageModelCompletionError::*; - use http_client::StatusCode; - - // General strategy here: - // - If retrying won't help (e.g. invalid API key or payload too large), return None so we don't retry at all. - // - If it's a time-based issue (e.g. server overloaded, rate limit exceeded), retry up to 4 times with exponential backoff. - // - If it's an issue that *might* be fixed by retrying (e.g. internal server error), retry up to 3 times. - match error { - HttpResponseError { - status_code: StatusCode::TOO_MANY_REQUESTS, - .. - } => Some(RetryStrategy::ExponentialBackoff { - initial_delay: BASE_RETRY_DELAY, - max_attempts: MAX_RETRY_ATTEMPTS, - }), - ServerOverloaded { retry_after, .. } | RateLimitExceeded { retry_after, .. } => { - Some(RetryStrategy::Fixed { - delay: retry_after.unwrap_or(BASE_RETRY_DELAY), - max_attempts: MAX_RETRY_ATTEMPTS, - }) - } - UpstreamProviderError { - status, - retry_after, - .. - } => match *status { - StatusCode::TOO_MANY_REQUESTS | StatusCode::SERVICE_UNAVAILABLE => { - Some(RetryStrategy::Fixed { - delay: retry_after.unwrap_or(BASE_RETRY_DELAY), - max_attempts: MAX_RETRY_ATTEMPTS, - }) - } - StatusCode::INTERNAL_SERVER_ERROR => Some(RetryStrategy::Fixed { - delay: retry_after.unwrap_or(BASE_RETRY_DELAY), - // Internal Server Error could be anything, retry up to 3 times. - max_attempts: 3, - }), - status => { - // There is no StatusCode variant for the unofficial HTTP 529 ("The service is overloaded"), - // but we frequently get them in practice. See https://http.dev/529 - if status.as_u16() == 529 { - Some(RetryStrategy::Fixed { - delay: retry_after.unwrap_or(BASE_RETRY_DELAY), - max_attempts: MAX_RETRY_ATTEMPTS, - }) - } else { - Some(RetryStrategy::Fixed { - delay: retry_after.unwrap_or(BASE_RETRY_DELAY), - max_attempts: 2, - }) - } - } - }, - ApiInternalServerError { .. } => Some(RetryStrategy::Fixed { - delay: BASE_RETRY_DELAY, - max_attempts: 3, - }), - ApiReadResponseError { .. } - | HttpSend { .. } - | DeserializeResponse { .. } - | BadRequestFormat { .. } => Some(RetryStrategy::Fixed { - delay: BASE_RETRY_DELAY, - max_attempts: 3, - }), - // Retrying these errors definitely shouldn't help. - HttpResponseError { - status_code: - StatusCode::PAYLOAD_TOO_LARGE | StatusCode::FORBIDDEN | StatusCode::UNAUTHORIZED, - .. - } - | AuthenticationError { .. } - | PermissionError { .. } - | NoApiKey { .. } - | ApiEndpointNotFound { .. } - | PromptTooLarge { .. } => None, - // These errors might be transient, so retry them - SerializeRequest { .. } | BuildRequestBody { .. } => Some(RetryStrategy::Fixed { - delay: BASE_RETRY_DELAY, - max_attempts: 1, - }), - // Retry all other 4xx and 5xx errors once. - HttpResponseError { status_code, .. } - if status_code.is_client_error() || status_code.is_server_error() => - { - Some(RetryStrategy::Fixed { - delay: BASE_RETRY_DELAY, - max_attempts: 3, - }) - } - Other(err) - if err.is::() - || err.is::() => - { - // Retrying won't help for Payment Required or Model Request Limit errors (where - // the user must upgrade to usage-based billing to get more requests, or else wait - // for a significant amount of time for the request limit to reset). - None - } - // Conservatively assume that any other errors are non-retryable - HttpResponseError { .. } | Other(..) => Some(RetryStrategy::Fixed { - delay: BASE_RETRY_DELAY, - max_attempts: 2, - }), - } - } -} - -struct RunningTurn { - /// Holds the task that handles agent interaction until the end of the turn. - /// Survives across multiple requests as the model performs tool calls and - /// we run tools, report their results. - _task: Task<()>, - /// The current event stream for the running turn. Used to report a final - /// cancellation event if we cancel the turn. - event_stream: ThreadEventStream, - /// The tools that were enabled for this turn. - tools: BTreeMap>, -} - -impl RunningTurn { - fn cancel(self) { - log::debug!("Cancelling in progress turn"); - self.event_stream.send_canceled(); - } -} - -pub struct TokenUsageUpdated(pub Option); - -impl EventEmitter for Thread {} - -pub struct TitleUpdated; - -impl EventEmitter for Thread {} - -pub trait AgentTool -where - Self: 'static + Sized, -{ - type Input: for<'de> Deserialize<'de> + Serialize + JsonSchema; - type Output: for<'de> Deserialize<'de> + Serialize + Into; - - fn name() -> &'static str; - - fn description(&self) -> SharedString { - let schema = schemars::schema_for!(Self::Input); - SharedString::new( - schema - .get("description") - .and_then(|description| description.as_str()) - .unwrap_or_default(), - ) - } - - fn kind() -> acp::ToolKind; - - /// The initial tool title to display. Can be updated during the tool run. - fn initial_title( - &self, - input: Result, - cx: &mut App, - ) -> SharedString; - - /// Returns the JSON schema that describes the tool's input. - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Schema { - crate::tool_schema::root_schema_for::(format) - } - - /// Some tools rely on a provider for the underlying billing or other reasons. - /// Allow the tool to check if they are compatible, or should be filtered out. - fn supported_provider(&self, _provider: &LanguageModelProviderId) -> bool { - true - } - - /// Runs the tool with the provided input. - fn run( - self: Arc, - input: Self::Input, - event_stream: ToolCallEventStream, - cx: &mut App, - ) -> Task>; - - /// Emits events for a previous execution of the tool. - fn replay( - &self, - _input: Self::Input, - _output: Self::Output, - _event_stream: ToolCallEventStream, - _cx: &mut App, - ) -> Result<()> { - Ok(()) - } - - fn erase(self) -> Arc { - Arc::new(Erased(Arc::new(self))) - } -} - -pub struct Erased(T); - -pub struct AgentToolOutput { - pub llm_output: LanguageModelToolResultContent, - pub raw_output: serde_json::Value, -} - -pub trait AnyAgentTool { - fn name(&self) -> SharedString; - fn description(&self) -> SharedString; - fn kind(&self) -> acp::ToolKind; - fn initial_title(&self, input: serde_json::Value, _cx: &mut App) -> SharedString; - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result; - fn supported_provider(&self, _provider: &LanguageModelProviderId) -> bool { - true - } - fn run( - self: Arc, - input: serde_json::Value, - event_stream: ToolCallEventStream, - cx: &mut App, - ) -> Task>; - fn replay( - &self, - input: serde_json::Value, - output: serde_json::Value, - event_stream: ToolCallEventStream, - cx: &mut App, - ) -> Result<()>; -} - -impl AnyAgentTool for Erased> -where - T: AgentTool, -{ - fn name(&self) -> SharedString { - T::name().into() - } - - fn description(&self) -> SharedString { - self.0.description() - } - - fn kind(&self) -> agent_client_protocol::ToolKind { - T::kind() - } - - fn initial_title(&self, input: serde_json::Value, _cx: &mut App) -> SharedString { - let parsed_input = serde_json::from_value(input.clone()).map_err(|_| input); - self.0.initial_title(parsed_input, _cx) - } - - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { - let mut json = serde_json::to_value(self.0.input_schema(format))?; - adapt_schema_to_format(&mut json, format)?; - Ok(json) - } - - fn supported_provider(&self, provider: &LanguageModelProviderId) -> bool { - self.0.supported_provider(provider) - } - - fn run( - self: Arc, - input: serde_json::Value, - event_stream: ToolCallEventStream, - cx: &mut App, - ) -> Task> { - cx.spawn(async move |cx| { - let input = serde_json::from_value(input)?; - let output = cx - .update(|cx| self.0.clone().run(input, event_stream, cx))? - .await?; - let raw_output = serde_json::to_value(&output)?; - Ok(AgentToolOutput { - llm_output: output.into(), - raw_output, - }) - }) - } - - fn replay( - &self, - input: serde_json::Value, - output: serde_json::Value, - event_stream: ToolCallEventStream, - cx: &mut App, - ) -> Result<()> { - let input = serde_json::from_value(input)?; - let output = serde_json::from_value(output)?; - self.0.replay(input, output, event_stream, cx) - } -} - -#[derive(Clone)] -struct ThreadEventStream(mpsc::UnboundedSender>); - -impl ThreadEventStream { - fn send_user_message(&self, message: &UserMessage) { - self.0 - .unbounded_send(Ok(ThreadEvent::UserMessage(message.clone()))) - .ok(); - } - - fn send_text(&self, text: &str) { - self.0 - .unbounded_send(Ok(ThreadEvent::AgentText(text.to_string()))) - .ok(); - } - - fn send_thinking(&self, text: &str) { - self.0 - .unbounded_send(Ok(ThreadEvent::AgentThinking(text.to_string()))) - .ok(); - } - - fn send_tool_call( - &self, - id: &LanguageModelToolUseId, - title: SharedString, - kind: acp::ToolKind, - input: serde_json::Value, - ) { - self.0 - .unbounded_send(Ok(ThreadEvent::ToolCall(Self::initial_tool_call( - id, - title.to_string(), - kind, - input, - )))) - .ok(); - } - - fn initial_tool_call( - id: &LanguageModelToolUseId, - title: String, - kind: acp::ToolKind, - input: serde_json::Value, - ) -> acp::ToolCall { - acp::ToolCall { - meta: None, - id: acp::ToolCallId(id.to_string().into()), - title, - kind, - status: acp::ToolCallStatus::Pending, - content: vec![], - locations: vec![], - raw_input: Some(input), - raw_output: None, - } - } - - fn update_tool_call_fields( - &self, - tool_use_id: &LanguageModelToolUseId, - fields: acp::ToolCallUpdateFields, - ) { - self.0 - .unbounded_send(Ok(ThreadEvent::ToolCallUpdate( - acp::ToolCallUpdate { - meta: None, - id: acp::ToolCallId(tool_use_id.to_string().into()), - fields, - } - .into(), - ))) - .ok(); - } - - fn send_retry(&self, status: acp_thread::RetryStatus) { - self.0.unbounded_send(Ok(ThreadEvent::Retry(status))).ok(); - } - - fn send_stop(&self, reason: acp::StopReason) { - self.0.unbounded_send(Ok(ThreadEvent::Stop(reason))).ok(); - } - - fn send_canceled(&self) { - self.0 - .unbounded_send(Ok(ThreadEvent::Stop(acp::StopReason::Cancelled))) - .ok(); - } - - fn send_error(&self, error: impl Into) { - self.0.unbounded_send(Err(error.into())).ok(); - } -} - -#[derive(Clone)] -pub struct ToolCallEventStream { - tool_use_id: LanguageModelToolUseId, - stream: ThreadEventStream, - fs: Option>, -} - -impl ToolCallEventStream { - #[cfg(test)] - pub fn test() -> (Self, ToolCallEventStreamReceiver) { - let (events_tx, events_rx) = mpsc::unbounded::>(); - - let stream = ToolCallEventStream::new("test_id".into(), ThreadEventStream(events_tx), None); - - (stream, ToolCallEventStreamReceiver(events_rx)) - } - - fn new( - tool_use_id: LanguageModelToolUseId, - stream: ThreadEventStream, - fs: Option>, - ) -> Self { - Self { - tool_use_id, - stream, - fs, - } - } - - pub fn update_fields(&self, fields: acp::ToolCallUpdateFields) { - self.stream - .update_tool_call_fields(&self.tool_use_id, fields); - } - - pub fn update_diff(&self, diff: Entity) { - self.stream - .0 - .unbounded_send(Ok(ThreadEvent::ToolCallUpdate( - acp_thread::ToolCallUpdateDiff { - id: acp::ToolCallId(self.tool_use_id.to_string().into()), - diff, - } - .into(), - ))) - .ok(); - } - - pub fn authorize(&self, title: impl Into, cx: &mut App) -> Task> { - if agent_settings::AgentSettings::get_global(cx).always_allow_tool_actions { - return Task::ready(Ok(())); - } - - let (response_tx, response_rx) = oneshot::channel(); - self.stream - .0 - .unbounded_send(Ok(ThreadEvent::ToolCallAuthorization( - ToolCallAuthorization { - tool_call: acp::ToolCallUpdate { - meta: None, - id: acp::ToolCallId(self.tool_use_id.to_string().into()), - fields: acp::ToolCallUpdateFields { - title: Some(title.into()), - ..Default::default() - }, - }, - options: vec![ - acp::PermissionOption { - id: acp::PermissionOptionId("always_allow".into()), - name: "Always Allow".into(), - kind: acp::PermissionOptionKind::AllowAlways, - meta: None, - }, - acp::PermissionOption { - id: acp::PermissionOptionId("allow".into()), - name: "Allow".into(), - kind: acp::PermissionOptionKind::AllowOnce, - meta: None, - }, - acp::PermissionOption { - id: acp::PermissionOptionId("deny".into()), - name: "Deny".into(), - kind: acp::PermissionOptionKind::RejectOnce, - meta: None, - }, - ], - response: response_tx, - }, - ))) - .ok(); - let fs = self.fs.clone(); - cx.spawn(async move |cx| match response_rx.await?.0.as_ref() { - "always_allow" => { - if let Some(fs) = fs.clone() { - cx.update(|cx| { - update_settings_file(fs, cx, |settings, _| { - settings - .agent - .get_or_insert_default() - .set_always_allow_tool_actions(true); - }); - })?; - } - - Ok(()) - } - "allow" => Ok(()), - _ => Err(anyhow!("Permission to run tool denied by user")), - }) - } -} - -#[cfg(test)] -pub struct ToolCallEventStreamReceiver(mpsc::UnboundedReceiver>); - -#[cfg(test)] -impl ToolCallEventStreamReceiver { - pub async fn expect_authorization(&mut self) -> ToolCallAuthorization { - let event = self.0.next().await; - if let Some(Ok(ThreadEvent::ToolCallAuthorization(auth))) = event { - auth - } else { - panic!("Expected ToolCallAuthorization but got: {:?}", event); - } - } - - pub async fn expect_update_fields(&mut self) -> acp::ToolCallUpdateFields { - let event = self.0.next().await; - if let Some(Ok(ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateFields( - update, - )))) = event - { - update.fields - } else { - panic!("Expected update fields but got: {:?}", event); - } - } - - pub async fn expect_diff(&mut self) -> Entity { - let event = self.0.next().await; - if let Some(Ok(ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateDiff( - update, - )))) = event - { - update.diff - } else { - panic!("Expected diff but got: {:?}", event); - } - } - - pub async fn expect_terminal(&mut self) -> Entity { - let event = self.0.next().await; - if let Some(Ok(ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateTerminal( - update, - )))) = event - { - update.terminal - } else { - panic!("Expected terminal but got: {:?}", event); - } - } -} - -#[cfg(test)] -impl std::ops::Deref for ToolCallEventStreamReceiver { - type Target = mpsc::UnboundedReceiver>; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -#[cfg(test)] -impl std::ops::DerefMut for ToolCallEventStreamReceiver { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -impl From<&str> for UserMessageContent { - fn from(text: &str) -> Self { - Self::Text(text.into()) - } -} - -impl From for UserMessageContent { - fn from(value: acp::ContentBlock) -> Self { - match value { - acp::ContentBlock::Text(text_content) => Self::Text(text_content.text), - acp::ContentBlock::Image(image_content) => Self::Image(convert_image(image_content)), - acp::ContentBlock::Audio(_) => { - // TODO - Self::Text("[audio]".to_string()) - } - acp::ContentBlock::ResourceLink(resource_link) => { - match MentionUri::parse(&resource_link.uri) { - Ok(uri) => Self::Mention { - uri, - content: String::new(), - }, - Err(err) => { - log::error!("Failed to parse mention link: {}", err); - Self::Text(format!("[{}]({})", resource_link.name, resource_link.uri)) - } - } - } - acp::ContentBlock::Resource(resource) => match resource.resource { - acp::EmbeddedResourceResource::TextResourceContents(resource) => { - match MentionUri::parse(&resource.uri) { - Ok(uri) => Self::Mention { - uri, - content: resource.text, - }, - Err(err) => { - log::error!("Failed to parse mention link: {}", err); - Self::Text( - MarkdownCodeBlock { - tag: &resource.uri, - text: &resource.text, - } - .to_string(), - ) - } - } - } - acp::EmbeddedResourceResource::BlobResourceContents(_) => { - // TODO - Self::Text("[blob]".to_string()) - } - }, - } - } -} - -impl From for acp::ContentBlock { - fn from(content: UserMessageContent) -> Self { - match content { - UserMessageContent::Text(text) => acp::ContentBlock::Text(acp::TextContent { - text, - annotations: None, - meta: None, - }), - UserMessageContent::Image(image) => acp::ContentBlock::Image(acp::ImageContent { - data: image.source.to_string(), - mime_type: "image/png".to_string(), - meta: None, - annotations: None, - uri: None, - }), - UserMessageContent::Mention { uri, content } => { - acp::ContentBlock::Resource(acp::EmbeddedResource { - meta: None, - resource: acp::EmbeddedResourceResource::TextResourceContents( - acp::TextResourceContents { - meta: None, - mime_type: None, - text: content, - uri: uri.to_uri().to_string(), - }, - ), - annotations: None, - }) - } - } - } -} - -fn convert_image(image_content: acp::ImageContent) -> LanguageModelImage { - LanguageModelImage { - source: image_content.data.into(), - // TODO: make this optional? - size: gpui::Size::new(0.into(), 0.into()), - } -} diff --git a/crates/agent2/src/tool_schema.rs b/crates/agent2/src/tool_schema.rs deleted file mode 100644 index f608336b416a72885e52abba58ef472029421e4f..0000000000000000000000000000000000000000 --- a/crates/agent2/src/tool_schema.rs +++ /dev/null @@ -1,43 +0,0 @@ -use language_model::LanguageModelToolSchemaFormat; -use schemars::{ - JsonSchema, Schema, - generate::SchemaSettings, - transform::{Transform, transform_subschemas}, -}; - -pub(crate) fn root_schema_for(format: LanguageModelToolSchemaFormat) -> Schema { - let mut generator = match format { - LanguageModelToolSchemaFormat::JsonSchema => SchemaSettings::draft07().into_generator(), - LanguageModelToolSchemaFormat::JsonSchemaSubset => SchemaSettings::openapi3() - .with(|settings| { - settings.meta_schema = None; - settings.inline_subschemas = true; - }) - .with_transform(ToJsonSchemaSubsetTransform) - .into_generator(), - }; - generator.root_schema_for::() -} - -#[derive(Debug, Clone)] -struct ToJsonSchemaSubsetTransform; - -impl Transform for ToJsonSchemaSubsetTransform { - fn transform(&mut self, schema: &mut Schema) { - // Ensure that the type field is not an array, this happens when we use - // Option, the type will be [T, "null"]. - if let Some(type_field) = schema.get_mut("type") - && let Some(types) = type_field.as_array() - && let Some(first_type) = types.first() - { - *type_field = first_type.clone(); - } - - // oneOf is not supported, use anyOf instead - if let Some(one_of) = schema.remove("oneOf") { - schema.insert("anyOf".to_string(), one_of); - } - - transform_subschemas(self, schema); - } -} diff --git a/crates/agent2/src/tools.rs b/crates/agent2/src/tools.rs deleted file mode 100644 index bcca7eecd185b9381afded26fb573d14f50bc5be..0000000000000000000000000000000000000000 --- a/crates/agent2/src/tools.rs +++ /dev/null @@ -1,60 +0,0 @@ -mod context_server_registry; -mod copy_path_tool; -mod create_directory_tool; -mod delete_path_tool; -mod diagnostics_tool; -mod edit_file_tool; -mod fetch_tool; -mod find_path_tool; -mod grep_tool; -mod list_directory_tool; -mod move_path_tool; -mod now_tool; -mod open_tool; -mod read_file_tool; -mod terminal_tool; -mod thinking_tool; -mod web_search_tool; - -/// A list of all built in tool names, for use in deduplicating MCP tool names -pub fn default_tool_names() -> impl Iterator { - [ - CopyPathTool::name(), - CreateDirectoryTool::name(), - DeletePathTool::name(), - DiagnosticsTool::name(), - EditFileTool::name(), - FetchTool::name(), - FindPathTool::name(), - GrepTool::name(), - ListDirectoryTool::name(), - MovePathTool::name(), - NowTool::name(), - OpenTool::name(), - ReadFileTool::name(), - TerminalTool::name(), - ThinkingTool::name(), - WebSearchTool::name(), - ] - .into_iter() -} - -pub use context_server_registry::*; -pub use copy_path_tool::*; -pub use create_directory_tool::*; -pub use delete_path_tool::*; -pub use diagnostics_tool::*; -pub use edit_file_tool::*; -pub use fetch_tool::*; -pub use find_path_tool::*; -pub use grep_tool::*; -pub use list_directory_tool::*; -pub use move_path_tool::*; -pub use now_tool::*; -pub use open_tool::*; -pub use read_file_tool::*; -pub use terminal_tool::*; -pub use thinking_tool::*; -pub use web_search_tool::*; - -use crate::AgentTool; diff --git a/crates/agent_servers/Cargo.toml b/crates/agent_servers/Cargo.toml index bdf1b72fdc0c2c71d5e445633d1d4a8ce32a6ba4..fcdba2301ee21254832a81899cff0bc9753e92f2 100644 --- a/crates/agent_servers/Cargo.toml +++ b/crates/agent_servers/Cargo.toml @@ -51,7 +51,6 @@ terminal.workspace = true uuid.workspace = true util.workspace = true watch.workspace = true -workspace-hack.workspace = true [target.'cfg(unix)'.dependencies] libc.workspace = true diff --git a/crates/agent_servers/src/acp.rs b/crates/agent_servers/src/acp.rs index 57ddfcf9dc9d635e33f252e6b6f35f015581cfc4..6f92b958b2d94e48539e34b6a58b4789ea376fb5 100644 --- a/crates/agent_servers/src/acp.rs +++ b/crates/agent_servers/src/acp.rs @@ -9,7 +9,6 @@ use futures::io::BufReader; use project::Project; use project::agent_server_store::AgentServerCommand; use serde::Deserialize; -use task::Shell; use util::ResultExt as _; use std::path::PathBuf; @@ -39,7 +38,7 @@ pub struct AcpConnection { // NB: Don't move this into the wait_task, since we need to ensure the process is // killed on drop (setting kill_on_drop on the command seems to not always work). child: smol::process::Child, - _io_task: Task>, + _io_task: Task>, _wait_task: Task>, _stderr_task: Task>, } @@ -97,7 +96,7 @@ impl AcpConnection { let stdout = child.stdout.take().context("Failed to take stdout")?; let stdin = child.stdin.take().context("Failed to take stdin")?; let stderr = child.stderr.take().context("Failed to take stderr")?; - log::info!( + log::debug!( "Spawning external agent server: {:?}, {:?}", command.path, command.args @@ -168,7 +167,10 @@ impl AcpConnection { meta: None, }, terminal: true, - meta: None, + meta: Some(serde_json::json!({ + // Experimental: Allow for rendering terminal output from the agents + "terminal_output": true, + })), }, meta: None, }) @@ -812,47 +814,18 @@ impl acp::Client for ClientDelegate { let thread = self.session_thread(&args.session_id)?; let project = thread.read_with(&self.cx, |thread, _cx| thread.project().clone())?; - let mut env = if let Some(dir) = &args.cwd { - project - .update(&mut self.cx.clone(), |project, cx| { - project.directory_environment(&task::Shell::System, dir.clone().into(), cx) - })? - .await - .unwrap_or_default() - } else { - Default::default() - }; - for var in args.env { - env.insert(var.name, var.value); - } - - // Use remote shell or default system shell, as appropriate - let shell = project - .update(&mut self.cx.clone(), |project, cx| { - project - .remote_client() - .and_then(|r| r.read(cx).default_system_shell()) - .map(Shell::Program) - })? - .unwrap_or(task::Shell::System); - let (task_command, task_args) = task::ShellBuilder::new(&shell) - .redirect_stdin_to_dev_null() - .build(Some(args.command.clone()), &args.args); - - let terminal_entity = project - .update(&mut self.cx.clone(), |project, cx| { - project.create_terminal_task( - task::SpawnInTerminal { - command: Some(task_command), - args: task_args, - cwd: args.cwd.clone(), - env, - ..Default::default() - }, - cx, - ) - })? - .await?; + let terminal_entity = acp_thread::create_terminal_entity( + args.command.clone(), + &args.args, + args.env + .into_iter() + .map(|env| (env.name, env.value)) + .collect(), + args.cwd.clone(), + &project, + &mut self.cx.clone(), + ) + .await?; // Register with renderer let terminal_entity = thread.update(&mut self.cx.clone(), |thread, cx| { diff --git a/crates/agent_servers/src/codex.rs b/crates/agent_servers/src/codex.rs index 0a19cfd03214972e9c7cd62aee713f3689d525df..3b2b4171de8c0fa17e076761ab36ab03b0f2ac5f 100644 --- a/crates/agent_servers/src/codex.rs +++ b/crates/agent_servers/src/codex.rs @@ -1,11 +1,16 @@ use std::rc::Rc; +use std::sync::Arc; use std::{any::Any, path::Path}; -use crate::{AgentServer, AgentServerDelegate, load_proxy_env}; use acp_thread::AgentConnection; +use agent_client_protocol as acp; use anyhow::{Context as _, Result}; -use gpui::{App, SharedString, Task}; -use project::agent_server_store::CODEX_NAME; +use fs::Fs; +use gpui::{App, AppContext as _, SharedString, Task}; +use project::agent_server_store::{AllAgentServersSettings, CODEX_NAME}; +use settings::{SettingsStore, update_settings_file}; + +use crate::{AgentServer, AgentServerDelegate, load_proxy_env}; #[derive(Clone)] pub struct Codex; @@ -30,6 +35,27 @@ impl AgentServer for Codex { ui::IconName::AiOpenAi } + fn default_mode(&self, cx: &mut App) -> Option { + let settings = cx.read_global(|settings: &SettingsStore, _| { + settings.get::(None).codex.clone() + }); + + settings + .as_ref() + .and_then(|s| s.default_mode.clone().map(|m| acp::SessionModeId(m.into()))) + } + + fn set_default_mode(&self, mode_id: Option, fs: Arc, cx: &mut App) { + update_settings_file(fs, cx, |settings, _| { + settings + .agent_servers + .get_or_insert_default() + .codex + .get_or_insert_default() + .default_mode = mode_id.map(|m| m.to_string()) + }); + } + fn connect( &self, root_dir: Option<&Path>, diff --git a/crates/agent_settings/Cargo.toml b/crates/agent_settings/Cargo.toml index a8b457a9dddb1f8932d015f895e6d2064944bfe9..8ddcac24fe054d1226f2bbac49498fd35d6ed1c3 100644 --- a/crates/agent_settings/Cargo.toml +++ b/crates/agent_settings/Cargo.toml @@ -24,7 +24,6 @@ schemars.workspace = true serde.workspace = true settings.workspace = true util.workspace = true -workspace-hack.workspace = true [dev-dependencies] fs.workspace = true diff --git a/crates/agent_settings/src/agent_settings.rs b/crates/agent_settings/src/agent_settings.rs index ec05c95672fa29b6e4813207e3e592fff9d3be15..c573f2688159619474051e1f7cfefb957f7154a8 100644 --- a/crates/agent_settings/src/agent_settings.rs +++ b/crates/agent_settings/src/agent_settings.rs @@ -10,15 +10,14 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{ DefaultAgentView, DockPosition, LanguageModelParameters, LanguageModelSelection, - NotifyWhenAgentWaiting, Settings, SettingsContent, + NotifyWhenAgentWaiting, Settings, }; pub use crate::agent_profile::*; -pub const SUMMARIZE_THREAD_PROMPT: &str = - include_str!("../../agent/src/prompts/summarize_thread_prompt.txt"); +pub const SUMMARIZE_THREAD_PROMPT: &str = include_str!("prompts/summarize_thread_prompt.txt"); pub const SUMMARIZE_THREAD_DETAILED_PROMPT: &str = - include_str!("../../agent/src/prompts/summarize_thread_detailed_prompt.txt"); + include_str!("prompts/summarize_thread_detailed_prompt.txt"); pub fn init(cx: &mut App) { AgentSettings::register(cx); @@ -42,7 +41,6 @@ pub struct AgentSettings { pub always_allow_tool_actions: bool, pub notify_when_agent_waiting: NotifyWhenAgentWaiting, pub play_sound_when_agent_done: bool, - pub stream_edits: bool, pub single_file_review: bool, pub model_parameters: Vec, pub preferred_completion_mode: CompletionMode, @@ -175,7 +173,6 @@ impl Settings for AgentSettings { always_allow_tool_actions: agent.always_allow_tool_actions.unwrap(), notify_when_agent_waiting: agent.notify_when_agent_waiting.unwrap(), play_sound_when_agent_done: agent.play_sound_when_agent_done.unwrap(), - stream_edits: agent.stream_edits.unwrap(), single_file_review: agent.single_file_review.unwrap(), model_parameters: agent.model_parameters, preferred_completion_mode: agent.preferred_completion_mode.unwrap().into(), @@ -186,14 +183,4 @@ impl Settings for AgentSettings { message_editor_min_lines: agent.message_editor_min_lines.unwrap(), } } - - fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut SettingsContent) { - if let Some(b) = vscode - .read_value("chat.agent.enabled") - .and_then(|b| b.as_bool()) - { - current.agent.get_or_insert_default().enabled = Some(b); - current.agent.get_or_insert_default().button = Some(b); - } - } } diff --git a/crates/agent/src/prompts/summarize_thread_detailed_prompt.txt b/crates/agent_settings/src/prompts/summarize_thread_detailed_prompt.txt similarity index 100% rename from crates/agent/src/prompts/summarize_thread_detailed_prompt.txt rename to crates/agent_settings/src/prompts/summarize_thread_detailed_prompt.txt diff --git a/crates/agent/src/prompts/summarize_thread_prompt.txt b/crates/agent_settings/src/prompts/summarize_thread_prompt.txt similarity index 100% rename from crates/agent/src/prompts/summarize_thread_prompt.txt rename to crates/agent_settings/src/prompts/summarize_thread_prompt.txt diff --git a/crates/agent_ui/Cargo.toml b/crates/agent_ui/Cargo.toml index 47d9f6d6a27a2ad5102e831094912208e66a9b43..724b53a017911edbd6e9dd88c410daf794889d4e 100644 --- a/crates/agent_ui/Cargo.toml +++ b/crates/agent_ui/Cargo.toml @@ -20,16 +20,14 @@ acp_thread.workspace = true action_log.workspace = true agent-client-protocol.workspace = true agent.workspace = true -agent2.workspace = true agent_servers.workspace = true agent_settings.workspace = true ai_onboarding.workspace = true anyhow.workspace = true arrayvec.workspace = true -assistant_context.workspace = true +assistant_text_thread.workspace = true assistant_slash_command.workspace = true assistant_slash_commands.workspace = true -assistant_tool.workspace = true audio.workspace = true buffer_diff.workspace = true chrono.workspace = true @@ -71,6 +69,7 @@ postage.workspace = true project.workspace = true prompt_store.workspace = true proto.workspace = true +ref-cast.workspace = true release_channel.workspace = true rope.workspace = true rules_library.workspace = true @@ -97,16 +96,13 @@ url.workspace = true urlencoding.workspace = true util.workspace = true watch.workspace = true -workspace-hack.workspace = true workspace.workspace = true zed_actions.workspace = true [dev-dependencies] acp_thread = { workspace = true, features = ["test-support"] } agent = { workspace = true, features = ["test-support"] } -agent2 = { workspace = true, features = ["test-support"] } -assistant_context = { workspace = true, features = ["test-support"] } -assistant_tools.workspace = true +assistant_text_thread = { workspace = true, features = ["test-support"] } buffer_diff = { workspace = true, features = ["test-support"] } db = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } diff --git a/crates/agent_ui/src/acp/completion_provider.rs b/crates/agent_ui/src/acp/completion_provider.rs index 8cbae5a5420a2f89d0f7ca478ae77a5d5d350411..c5ab47fe18970791c047ef157f6664188c95e346 100644 --- a/crates/agent_ui/src/acp/completion_provider.rs +++ b/crates/agent_ui/src/acp/completion_provider.rs @@ -6,13 +6,13 @@ use std::sync::Arc; use std::sync::atomic::AtomicBool; use acp_thread::MentionUri; +use agent::{HistoryEntry, HistoryStore}; use agent_client_protocol as acp; -use agent2::{HistoryEntry, HistoryStore}; use anyhow::Result; use editor::{CompletionProvider, Editor, ExcerptId}; use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{App, Entity, Task, WeakEntity}; -use language::{Buffer, CodeLabel, HighlightId}; +use language::{Buffer, CodeLabel, CodeLabelBuilder, HighlightId}; use lsp::CompletionContext; use project::lsp_store::{CompletionDocumentation, SymbolLocation}; use project::{ @@ -27,11 +27,12 @@ use util::rel_path::RelPath; use workspace::Workspace; use crate::AgentPanel; -use crate::acp::message_editor::{MessageEditor, MessageEditorEvent}; +use crate::acp::message_editor::MessageEditor; use crate::context_picker::file_context_picker::{FileMatch, search_files}; use crate::context_picker::rules_context_picker::{RulesContextEntry, search_rules}; use crate::context_picker::symbol_context_picker::SymbolMatch; use crate::context_picker::symbol_context_picker::search_symbols; +use crate::context_picker::thread_context_picker::search_threads; use crate::context_picker::{ ContextPickerAction, ContextPickerEntry, ContextPickerMode, selection_ranges, }; @@ -651,7 +652,9 @@ impl ContextPickerCompletionProvider { .active_item(cx) .and_then(|item| item.downcast::()) .is_some_and(|editor| { - editor.update(cx, |editor, cx| editor.has_non_empty_selection(cx)) + editor.update(cx, |editor, cx| { + editor.has_non_empty_selection(&editor.display_snapshot(cx)) + }) }); if has_selection { entries.push(ContextPickerEntry::Action( @@ -673,7 +676,7 @@ impl ContextPickerCompletionProvider { fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx: &App) -> CodeLabel { let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId); - let mut label = CodeLabel::default(); + let mut label = CodeLabelBuilder::default(); label.push_str(file_name, None); label.push_str(" ", None); @@ -682,9 +685,7 @@ fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx: label.push_str(directory, comment_id); } - label.filter_range = 0..label.text().len(); - - label + label.build() } impl CompletionProvider for ContextPickerCompletionProvider { @@ -759,13 +760,13 @@ impl CompletionProvider for ContextPickerCompletionProvider { let editor = editor.clone(); move |cx| { editor - .update(cx, |_editor, cx| { + .update(cx, |editor, cx| { match intent { CompletionIntent::Complete | CompletionIntent::CompleteWithInsert | CompletionIntent::CompleteWithReplace => { if !is_missing_argument { - cx.emit(MessageEditorEvent::Send); + editor.send(cx); } } CompletionIntent::Compose => {} @@ -775,7 +776,7 @@ impl CompletionProvider for ContextPickerCompletionProvider { } }); } - is_missing_argument + false } })), } @@ -910,6 +911,17 @@ impl CompletionProvider for ContextPickerCompletionProvider { offset_to_line, self.prompt_capabilities.borrow().embedded_context, ) + .filter(|completion| { + // Right now we don't support completing arguments of slash commands + let is_slash_command_with_argument = matches!( + completion, + ContextCompletion::SlashCommand(SlashCommandCompletion { + argument: Some(_), + .. + }) + ); + !is_slash_command_with_argument + }) .map(|completion| { completion.source_range().start <= offset_to_line + position.column as usize && completion.source_range().end >= offset_to_line + position.column as usize @@ -929,42 +941,6 @@ impl CompletionProvider for ContextPickerCompletionProvider { } } -pub(crate) fn search_threads( - query: String, - cancellation_flag: Arc, - history_store: &Entity, - cx: &mut App, -) -> Task> { - let threads = history_store.read(cx).entries().collect(); - if query.is_empty() { - return Task::ready(threads); - } - - let executor = cx.background_executor().clone(); - cx.background_spawn(async move { - let candidates = threads - .iter() - .enumerate() - .map(|(id, thread)| StringMatchCandidate::new(id, thread.title())) - .collect::>(); - let matches = fuzzy::match_strings( - &candidates, - &query, - false, - true, - 100, - &cancellation_flag, - executor, - ) - .await; - - matches - .into_iter() - .map(|mat| threads[mat.candidate_id].clone()) - .collect() - }) -} - fn confirm_completion_callback( crease_text: SharedString, start: Anchor, diff --git a/crates/agent_ui/src/acp/entry_view_state.rs b/crates/agent_ui/src/acp/entry_view_state.rs index ee506b98810ba51d0fb933a2ca21e650d0cacc0b..4c058b984f4fa24074ea9e9d81e43c1d73d87d1f 100644 --- a/crates/agent_ui/src/acp/entry_view_state.rs +++ b/crates/agent_ui/src/acp/entry_view_state.rs @@ -1,8 +1,8 @@ use std::{cell::RefCell, ops::Range, rc::Rc}; use acp_thread::{AcpThread, AgentThreadEntry}; +use agent::HistoryStore; use agent_client_protocol::{self as acp, ToolCallId}; -use agent2::HistoryStore; use collections::HashMap; use editor::{Editor, EditorMode, MinimapVisibility}; use gpui::{ @@ -399,10 +399,10 @@ mod tests { use std::{path::Path, rc::Rc}; use acp_thread::{AgentConnection, StubAgentConnection}; + use agent::HistoryStore; use agent_client_protocol as acp; use agent_settings::AgentSettings; - use agent2::HistoryStore; - use assistant_context::ContextStore; + use assistant_text_thread::TextThreadStore; use buffer_diff::{DiffHunkStatus, DiffHunkStatusKind}; use editor::{EditorSettings, RowInfo}; use fs::FakeFs; @@ -466,8 +466,8 @@ mod tests { connection.send_update(session_id, acp::SessionUpdate::ToolCall(tool_call), cx) }); - let context_store = cx.new(|cx| ContextStore::fake(project.clone(), cx)); - let history_store = cx.new(|cx| HistoryStore::new(context_store, cx)); + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx)); let view_state = cx.new(|_cx| { EntryViewState::new( diff --git a/crates/agent_ui/src/acp/message_editor.rs b/crates/agent_ui/src/acp/message_editor.rs index 626c9f3bb7bc3d9c146f584bb9acbc999c25c879..c24cefcf2d5fc04baffeb9f3d1a1ecaf9dd05268 100644 --- a/crates/agent_ui/src/acp/message_editor.rs +++ b/crates/agent_ui/src/acp/message_editor.rs @@ -3,19 +3,18 @@ use crate::{ context_picker::{ContextPickerAction, fetch_context_picker::fetch_url_content}, }; use acp_thread::{MentionUri, selection_name}; +use agent::{HistoryStore, outline}; use agent_client_protocol as acp; use agent_servers::{AgentServer, AgentServerDelegate}; -use agent2::HistoryStore; use anyhow::{Result, anyhow}; use assistant_slash_commands::codeblock_fence_for_path; -use assistant_tool::outline; use collections::{HashMap, HashSet}; use editor::{ Addon, Anchor, AnchorRangeExt, ContextMenuOptions, ContextMenuPlacement, Editor, EditorElement, - EditorEvent, EditorMode, EditorSnapshot, EditorStyle, ExcerptId, FoldPlaceholder, InlayId, + EditorEvent, EditorMode, EditorSnapshot, EditorStyle, ExcerptId, FoldPlaceholder, Inlay, MultiBuffer, ToOffset, actions::Paste, - display_map::{Crease, CreaseId, FoldId, Inlay}, + display_map::{Crease, CreaseId, FoldId}, }; use futures::{ FutureExt as _, @@ -30,7 +29,8 @@ use language::{Buffer, Language, language_settings::InlayHintKind}; use language_model::LanguageModelImage; use postage::stream::Stream as _; use project::{ - CompletionIntent, InlayHint, InlayHintLabel, Project, ProjectItem, ProjectPath, Worktree, + CompletionIntent, InlayHint, InlayHintLabel, InlayId, Project, ProjectItem, ProjectPath, + Worktree, }; use prompt_store::{PromptId, PromptStore}; use rope::Point; @@ -76,7 +76,7 @@ pub enum MessageEditorEvent { impl EventEmitter for MessageEditor {} -const COMMAND_HINT_INLAY_ID: u32 = 0; +const COMMAND_HINT_INLAY_ID: InlayId = InlayId::Hint(0); impl MessageEditor { pub fn new( @@ -141,7 +141,9 @@ impl MessageEditor { subscriptions.push(cx.subscribe_in(&editor, window, { move |this, editor, event, window, cx| { - if let EditorEvent::Edited { .. } = event { + if let EditorEvent::Edited { .. } = event + && !editor.read(cx).read_only(cx) + { let snapshot = editor.update(cx, |editor, cx| { let new_hints = this .command_hint(editor.buffer(), cx) @@ -150,7 +152,7 @@ impl MessageEditor { let has_new_hint = !new_hints.is_empty(); editor.splice_inlays( if has_hint { - &[InlayId::Hint(COMMAND_HINT_INLAY_ID)] + &[COMMAND_HINT_INLAY_ID] } else { &[] }, @@ -228,7 +230,7 @@ impl MessageEditor { pub fn insert_thread_summary( &mut self, - thread: agent2::DbThreadMetadata, + thread: agent::DbThreadMetadata, window: &mut Window, cx: &mut Context, ) { @@ -290,15 +292,10 @@ impl MessageEditor { let snapshot = self .editor .update(cx, |editor, cx| editor.snapshot(window, cx)); - let Some((excerpt_id, _, _)) = snapshot.buffer_snapshot().as_singleton() else { - return Task::ready(()); - }; - let Some(start_anchor) = snapshot - .buffer_snapshot() - .anchor_in_excerpt(*excerpt_id, start) - else { + let Some(start_anchor) = snapshot.buffer_snapshot().as_singleton_anchor(start) else { return Task::ready(()); }; + let excerpt_id = start_anchor.excerpt_id; let end_anchor = snapshot .buffer_snapshot() .anchor_before(start_anchor.to_offset(&snapshot.buffer_snapshot()) + content_len + 1); @@ -330,7 +327,7 @@ impl MessageEditor { }) .shared(); insert_crease_for_mention( - *excerpt_id, + excerpt_id, start, content_len, mention_uri.name().into(), @@ -342,7 +339,7 @@ impl MessageEditor { ) } else { insert_crease_for_mention( - *excerpt_id, + excerpt_id, start, content_len, crease_text, @@ -544,10 +541,7 @@ impl MessageEditor { cx: &mut Context, ) { let snapshot = self.editor.read(cx).buffer().read(cx).snapshot(cx); - let Some((&excerpt_id, _, _)) = snapshot.as_singleton() else { - return; - }; - let Some(start) = snapshot.anchor_in_excerpt(excerpt_id, source_range.start) else { + let Some(start) = snapshot.as_singleton_anchor(source_range.start) else { return; }; @@ -605,7 +599,7 @@ impl MessageEditor { id: acp::SessionId, cx: &mut Context, ) -> Task> { - let server = Rc::new(agent2::NativeAgentServer::new( + let server = Rc::new(agent::NativeAgentServer::new( self.project.read(cx).fs().clone(), self.history_store.clone(), )); @@ -618,7 +612,7 @@ impl MessageEditor { let connection = server.connect(None, delegate, cx); cx.spawn(async move |_, cx| { let (agent, _) = connection.await?; - let agent = agent.downcast::().unwrap(); + let agent = agent.downcast::().unwrap(); let summary = agent .0 .update(cx, |agent, cx| agent.thread_summary(id, cx))? @@ -635,12 +629,12 @@ impl MessageEditor { path: PathBuf, cx: &mut Context, ) -> Task> { - let context = self.history_store.update(cx, |text_thread_store, cx| { - text_thread_store.load_text_thread(path.as_path().into(), cx) + let text_thread_task = self.history_store.update(cx, |store, cx| { + store.load_text_thread(path.as_path().into(), cx) }); cx.spawn(async move |_, cx| { - let context = context.await?; - let xml = context.update(cx, |context, cx| context.to_xml(cx))?; + let text_thread = text_thread_task.await?; + let xml = text_thread.update(cx, |text_thread, cx| text_thread.to_xml(cx))?; Ok(Mention::Text { content: xml, tracked_buffers: Vec::new(), @@ -823,13 +817,20 @@ impl MessageEditor { }); } - fn send(&mut self, _: &Chat, _: &mut Window, cx: &mut Context) { + pub fn send(&mut self, cx: &mut Context) { if self.is_empty(cx) { return; } + self.editor.update(cx, |editor, cx| { + editor.clear_inlay_hints(cx); + }); cx.emit(MessageEditorEvent::Send) } + fn chat(&mut self, _: &Chat, _: &mut Window, cx: &mut Context) { + self.send(cx); + } + fn cancel(&mut self, _: &editor::actions::Cancel, _: &mut Window, cx: &mut Context) { cx.emit(MessageEditorEvent::Cancel) } @@ -1288,7 +1289,7 @@ impl Render for MessageEditor { fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { div() .key_context("MessageEditor") - .on_action(cx.listener(Self::send)) + .on_action(cx.listener(Self::chat)) .on_action(cx.listener(Self::cancel)) .capture_action(cx.listener(Self::paste)) .flex_1() @@ -1588,10 +1589,9 @@ mod tests { use std::{cell::RefCell, ops::Range, path::Path, rc::Rc, sync::Arc}; use acp_thread::MentionUri; + use agent::{HistoryStore, outline}; use agent_client_protocol as acp; - use agent2::HistoryStore; - use assistant_context::ContextStore; - use assistant_tool::outline; + use assistant_text_thread::TextThreadStore; use editor::{AnchorRangeExt as _, Editor, EditorMode}; use fs::FakeFs; use futures::StreamExt as _; @@ -1622,8 +1622,8 @@ mod tests { let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); - let context_store = cx.new(|cx| ContextStore::fake(project.clone(), cx)); - let history_store = cx.new(|cx| HistoryStore::new(context_store, cx)); + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx)); let message_editor = cx.update(|window, cx| { cx.new(|cx| { @@ -1685,13 +1685,10 @@ mod tests { editor.update_in(cx, |editor, window, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); - let start = snapshot - .anchor_in_excerpt(excerpt_id, completion.replace_range.start) - .unwrap(); - let end = snapshot - .anchor_in_excerpt(excerpt_id, completion.replace_range.end) + let range = snapshot + .anchor_range_in_excerpt(excerpt_id, completion.replace_range) .unwrap(); - editor.edit([(start..end, completion.new_text)], cx); + editor.edit([(range, completion.new_text)], cx); (completion.confirm.unwrap())(CompletionIntent::Complete, window, cx); }); @@ -1730,8 +1727,8 @@ mod tests { .await; let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await; - let context_store = cx.new(|cx| ContextStore::fake(project.clone(), cx)); - let history_store = cx.new(|cx| HistoryStore::new(context_store, cx)); + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx)); let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default())); // Start with no available commands - simulating Claude which doesn't support slash commands let available_commands = Rc::new(RefCell::new(vec![])); @@ -1894,8 +1891,8 @@ mod tests { let mut cx = VisualTestContext::from_window(*window, cx); - let context_store = cx.new(|cx| ContextStore::fake(project.clone(), cx)); - let history_store = cx.new(|cx| HistoryStore::new(context_store, cx)); + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx)); let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default())); let available_commands = Rc::new(RefCell::new(vec![ acp::AvailableCommand { @@ -2012,21 +2009,11 @@ mod tests { editor.update_in(&mut cx, |editor, _window, cx| { assert_eq!(editor.text(cx), "/say-hello "); assert_eq!(editor.display_text(cx), "/say-hello "); - assert!(editor.has_visible_completions_menu()); - - assert_eq!( - current_completion_labels_with_documentation(editor), - &[("say-hello".into(), "Say hello to whoever you want".into())] - ); + assert!(!editor.has_visible_completions_menu()); }); cx.simulate_input("GPT5"); - editor.update_in(&mut cx, |editor, window, cx| { - assert!(editor.has_visible_completions_menu()); - editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx); - }); - cx.run_until_parked(); editor.update_in(&mut cx, |editor, window, cx| { @@ -2035,7 +2022,7 @@ mod tests { assert!(!editor.has_visible_completions_menu()); // Delete argument - for _ in 0..4 { + for _ in 0..5 { editor.backspace(&editor::actions::Backspace, window, cx); } }); @@ -2043,13 +2030,12 @@ mod tests { cx.run_until_parked(); editor.update_in(&mut cx, |editor, window, cx| { - assert_eq!(editor.text(cx), "/say-hello "); + assert_eq!(editor.text(cx), "/say-hello"); // Hint is visible because argument was deleted assert_eq!(editor.display_text(cx), "/say-hello "); // Delete last command letter editor.backspace(&editor::actions::Backspace, window, cx); - editor.backspace(&editor::actions::Backspace, window, cx); }); cx.run_until_parked(); @@ -2145,8 +2131,8 @@ mod tests { opened_editors.push(buffer); } - let context_store = cx.new(|cx| ContextStore::fake(project.clone(), cx)); - let history_store = cx.new(|cx| HistoryStore::new(context_store, cx)); + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx)); let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default())); let (message_editor, editor) = workspace.update_in(&mut cx, |workspace, window, cx| { @@ -2672,8 +2658,8 @@ mod tests { let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); - let context_store = cx.new(|cx| ContextStore::fake(project.clone(), cx)); - let history_store = cx.new(|cx| HistoryStore::new(context_store, cx)); + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx)); let message_editor = cx.update(|window, cx| { cx.new(|cx| { diff --git a/crates/agent_ui/src/acp/mode_selector.rs b/crates/agent_ui/src/acp/mode_selector.rs index 410874126665b7d622c7cf45e81596dce7f96823..36970a29ab7fd30f175d8128f8bbd3c55b71b605 100644 --- a/crates/agent_ui/src/acp/mode_selector.rs +++ b/crates/agent_ui/src/acp/mode_selector.rs @@ -174,11 +174,16 @@ impl Render for ModeSelector { let this = cx.entity(); + let icon = if self.menu_handle.is_deployed() { + IconName::ChevronUp + } else { + IconName::ChevronDown + }; + let trigger_button = Button::new("mode-selector-trigger", current_mode_name) .label_size(LabelSize::Small) - .style(ButtonStyle::Subtle) .color(Color::Muted) - .icon(IconName::ChevronDown) + .icon(icon) .icon_size(IconSize::XSmall) .icon_position(IconPosition::End) .icon_color(Color::Muted) @@ -189,7 +194,7 @@ impl Render for ModeSelector { trigger_button, Tooltip::element({ let focus_handle = self.focus_handle.clone(); - move |window, cx| { + move |_window, cx| { v_flex() .gap_1() .child( @@ -200,10 +205,9 @@ impl Render for ModeSelector { .border_b_1() .border_color(cx.theme().colors().border_variant) .child(Label::new("Cycle Through Modes")) - .children(KeyBinding::for_action_in( + .child(KeyBinding::for_action_in( &CycleModeSelector, &focus_handle, - window, cx, )), ) @@ -212,10 +216,9 @@ impl Render for ModeSelector { .gap_2() .justify_between() .child(Label::new("Toggle Mode Menu")) - .children(KeyBinding::for_action_in( + .child(KeyBinding::for_action_in( &ToggleProfileSelector, &focus_handle, - window, cx, )), ) diff --git a/crates/agent_ui/src/acp/model_selector.rs b/crates/agent_ui/src/acp/model_selector.rs index 381bdb01edec49e222c9bd9b3a97ce9ba21a9789..45fec558720fc5e88548f6dd7bc24fe624a908f5 100644 --- a/crates/agent_ui/src/acp/model_selector.rs +++ b/crates/agent_ui/src/acp/model_selector.rs @@ -5,12 +5,12 @@ use anyhow::Result; use collections::IndexMap; use futures::FutureExt; use fuzzy::{StringMatchCandidate, match_strings}; -use gpui::{Action, AsyncWindowContext, BackgroundExecutor, DismissEvent, Task, WeakEntity}; +use gpui::{AsyncWindowContext, BackgroundExecutor, DismissEvent, Task, WeakEntity}; use ordered_float::OrderedFloat; use picker::{Picker, PickerDelegate}; use ui::{ - AnyElement, App, Context, DocumentationAside, DocumentationEdge, DocumentationSide, - IntoElement, ListItem, ListItemSpacing, SharedString, Window, prelude::*, rems, + DocumentationAside, DocumentationEdge, DocumentationSide, IntoElement, ListItem, + ListItemSpacing, prelude::*, }; use util::ResultExt; @@ -278,36 +278,6 @@ impl PickerDelegate for AcpModelPickerDelegate { } } - fn render_footer( - &self, - _: &mut Window, - cx: &mut Context>, - ) -> Option { - Some( - h_flex() - .w_full() - .border_t_1() - .border_color(cx.theme().colors().border_variant) - .p_1() - .gap_4() - .justify_between() - .child( - Button::new("configure", "Configure") - .icon(IconName::Settings) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .icon_position(IconPosition::Start) - .on_click(|_, window, cx| { - window.dispatch_action( - zed_actions::agent::OpenSettings.boxed_clone(), - cx, - ); - }), - ) - .into_any(), - ) - } - fn documentation_aside( &self, _window: &mut Window, @@ -317,7 +287,7 @@ impl PickerDelegate for AcpModelPickerDelegate { let description = description.clone(); DocumentationAside::new( DocumentationSide::Left, - DocumentationEdge::Bottom, + DocumentationEdge::Top, Rc::new(move |_| Label::new(description.clone()).into_any_element()), ) }) diff --git a/crates/agent_ui/src/acp/model_selector_popover.rs b/crates/agent_ui/src/acp/model_selector_popover.rs index 55f530c81b1cead74fd4ec4f6cc29ececcf2bf7e..bd64756483032bee00ba8f56794bcb228bf91246 100644 --- a/crates/agent_ui/src/acp/model_selector_popover.rs +++ b/crates/agent_ui/src/acp/model_selector_popover.rs @@ -57,38 +57,28 @@ impl Render for AcpModelSelectorPopover { let focus_handle = self.focus_handle.clone(); - let color = if self.menu_handle.is_deployed() { - Color::Accent + let (color, icon) = if self.menu_handle.is_deployed() { + (Color::Accent, IconName::ChevronUp) } else { - Color::Muted + (Color::Muted, IconName::ChevronDown) }; PickerPopoverMenu::new( self.selector.clone(), ButtonLike::new("active-model") + .selected_style(ButtonStyle::Tinted(TintColor::Accent)) .when_some(model_icon, |this, icon| { this.child(Icon::new(icon).color(color).size(IconSize::XSmall)) }) - .selected_style(ButtonStyle::Tinted(TintColor::Accent)) .child( Label::new(model_name) .color(color) .size(LabelSize::Small) .ml_0p5(), ) - .child( - Icon::new(IconName::ChevronDown) - .color(Color::Muted) - .size(IconSize::XSmall), - ), - move |window, cx| { - Tooltip::for_action_in( - "Change Model", - &ToggleModelSelector, - &focus_handle, - window, - cx, - ) + .child(Icon::new(icon).color(Color::Muted).size(IconSize::XSmall)), + move |_window, cx| { + Tooltip::for_action_in("Change Model", &ToggleModelSelector, &focus_handle, cx) }, gpui::Corner::BottomRight, cx, diff --git a/crates/agent_ui/src/acp/thread_history.rs b/crates/agent_ui/src/acp/thread_history.rs index cd696f33fa44976e0784c79d1945b548feb20a50..d96c3b3219717b3ffa7310d207a323bc5fb222b0 100644 --- a/crates/agent_ui/src/acp/thread_history.rs +++ b/crates/agent_ui/src/acp/thread_history.rs @@ -1,6 +1,6 @@ use crate::acp::AcpThreadView; use crate::{AgentPanel, RemoveSelectedThread}; -use agent2::{HistoryEntry, HistoryStore}; +use agent::{HistoryEntry, HistoryStore}; use chrono::{Datelike as _, Local, NaiveDate, TimeDelta}; use editor::{Editor, EditorEvent}; use fuzzy::StringMatchCandidate; @@ -23,11 +23,8 @@ pub struct AcpThreadHistory { hovered_index: Option, search_editor: Entity, search_query: SharedString, - visible_items: Vec, - local_timezone: UtcOffset, - _update_task: Task<()>, _subscriptions: Vec, } @@ -62,7 +59,7 @@ impl EventEmitter for AcpThreadHistory {} impl AcpThreadHistory { pub(crate) fn new( - history_store: Entity, + history_store: Entity, window: &mut Window, cx: &mut Context, ) -> Self { @@ -327,8 +324,8 @@ impl AcpThreadHistory { HistoryEntry::AcpThread(thread) => self .history_store .update(cx, |this, cx| this.delete_thread(thread.id.clone(), cx)), - HistoryEntry::TextThread(context) => self.history_store.update(cx, |this, cx| { - this.delete_text_thread(context.path.clone(), cx) + HistoryEntry::TextThread(text_thread) => self.history_store.update(cx, |this, cx| { + this.delete_text_thread(text_thread.path.clone(), cx) }), }; task.detach_and_log_err(cx); @@ -426,8 +423,8 @@ impl AcpThreadHistory { .shape(IconButtonShape::Square) .icon_size(IconSize::XSmall) .icon_color(Color::Muted) - .tooltip(move |window, cx| { - Tooltip::for_action("Delete", &RemoveSelectedThread, window, cx) + .tooltip(move |_window, cx| { + Tooltip::for_action("Delete", &RemoveSelectedThread, cx) }) .on_click( cx.listener(move |this, _, _, cx| this.remove_thread(ix, cx)), @@ -598,8 +595,8 @@ impl RenderOnce for AcpHistoryEntryElement { .shape(IconButtonShape::Square) .icon_size(IconSize::XSmall) .icon_color(Color::Muted) - .tooltip(move |window, cx| { - Tooltip::for_action("Delete", &RemoveSelectedThread, window, cx) + .tooltip(move |_window, cx| { + Tooltip::for_action("Delete", &RemoveSelectedThread, cx) }) .on_click({ let thread_view = self.thread_view.clone(); @@ -638,12 +635,12 @@ impl RenderOnce for AcpHistoryEntryElement { }); } } - HistoryEntry::TextThread(context) => { + HistoryEntry::TextThread(text_thread) => { if let Some(panel) = workspace.read(cx).panel::(cx) { panel.update(cx, |panel, cx| { panel - .open_saved_prompt_editor( - context.path.clone(), + .open_saved_text_thread( + text_thread.path.clone(), window, cx, ) diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index 8b8f127764ad36a9fb9afa029b22b2a113991d34..8e5396590fe0170b536075bff210c859435a4b3c 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -5,10 +5,10 @@ use acp_thread::{ }; use acp_thread::{AgentConnection, Plan}; use action_log::ActionLog; +use agent::{DbThreadMetadata, HistoryEntry, HistoryEntryId, HistoryStore, NativeAgentServer}; use agent_client_protocol::{self as acp, PromptCapabilities}; use agent_servers::{AgentServer, AgentServerDelegate}; use agent_settings::{AgentProfileId, AgentSettings, CompletionMode}; -use agent2::{DbThreadMetadata, HistoryEntry, HistoryEntryId, HistoryStore, NativeAgentServer}; use anyhow::{Result, anyhow, bail}; use arrayvec::ArrayVec; use audio::{Audio, Sound}; @@ -117,7 +117,7 @@ impl ThreadError { } } -impl ProfileProvider for Entity { +impl ProfileProvider for Entity { fn profile_id(&self, cx: &App) -> AgentProfileId { self.read(cx).profile().clone() } @@ -292,6 +292,8 @@ pub struct AcpThreadView { resume_thread_metadata: Option, _cancel_task: Option>, _subscriptions: [Subscription; 5], + #[cfg(target_os = "windows")] + show_codex_windows_warning: bool, } enum ThreadState { @@ -335,7 +337,10 @@ impl AcpThreadView { let placeholder = if agent.name() == "Zed Agent" { format!("Message the {} — @ to include context", agent.name()) - } else if agent.name() == "Claude Code" || !available_commands.borrow().is_empty() { + } else if agent.name() == "Claude Code" + || agent.name() == "Codex" + || !available_commands.borrow().is_empty() + { format!( "Message {} — @ to include context, / for commands", agent.name() @@ -394,6 +399,10 @@ impl AcpThreadView { ), ]; + #[cfg(target_os = "windows")] + let show_codex_windows_warning = crate::ExternalAgent::parse_built_in(agent.as_ref()) + == Some(crate::ExternalAgent::Codex); + Self { agent: agent.clone(), workspace: workspace.clone(), @@ -436,6 +445,8 @@ impl AcpThreadView { focus_handle: cx.focus_handle(), new_server_version_available: None, resume_thread_metadata: resume_thread, + #[cfg(target_os = "windows")] + show_codex_windows_warning, } } @@ -518,7 +529,7 @@ impl AcpThreadView { let result = if let Some(native_agent) = connection .clone() - .downcast::() + .downcast::() && let Some(resume) = resume_thread.clone() { cx.update(|_, cx| { @@ -1055,6 +1066,9 @@ impl AcpThreadView { .iter() .any(|command| command.name == "logout"); if can_login && !logout_supported { + self.message_editor + .update(cx, |editor, cx| editor.clear(window, cx)); + let this = cx.weak_entity(); let agent = self.agent.clone(); window.defer(cx, |window, cx| { @@ -1245,18 +1259,13 @@ impl AcpThreadView { .await?; this.update_in(cx, |this, window, cx| { this.send_impl(message_editor, window, cx); + this.focus_handle(cx).focus(window); })?; anyhow::Ok(()) }) .detach(); } - fn open_agent_diff(&mut self, _: &OpenAgentDiff, window: &mut Window, cx: &mut Context) { - if let Some(thread) = self.thread() { - AgentDiffPane::deploy(thread.clone(), self.workspace.clone(), window, cx).log_err(); - } - } - fn open_edited_buffer( &mut self, buffer: &Entity, @@ -2149,7 +2158,6 @@ impl AcpThreadView { options, entry_ix, tool_call.id.clone(), - window, cx, )) .into_any(), @@ -2550,7 +2558,6 @@ impl AcpThreadView { options: &[acp::PermissionOption], entry_ix: usize, tool_call_id: acp::ToolCallId, - window: &Window, cx: &Context, ) -> Div { let is_first = self.thread().is_some_and(|thread| { @@ -2607,7 +2614,7 @@ impl AcpThreadView { seen_kinds.push(option.kind); this.key_binding( - KeyBinding::for_action_in(action, &self.focus_handle, window, cx) + KeyBinding::for_action_in(action, &self.focus_handle, cx) .map(|kb| kb.size(rems_from_px(10.))), ) }) @@ -2788,12 +2795,11 @@ impl AcpThreadView { .icon_size(IconSize::Small) .icon_color(Color::Error) .label_size(LabelSize::Small) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { Tooltip::with_meta( "Stop This Command", None, "Also possible by placing your cursor inside the terminal and using regular terminal bindings.", - window, cx, ) }) @@ -3094,11 +3100,11 @@ impl AcpThreadView { ) } - fn render_recent_history(&self, window: &mut Window, cx: &mut Context) -> AnyElement { + fn render_recent_history(&self, cx: &mut Context) -> AnyElement { let render_history = self .agent .clone() - .downcast::() + .downcast::() .is_some() && self .history_store @@ -3123,7 +3129,6 @@ impl AcpThreadView { KeyBinding::for_action_in( &OpenHistory, &self.focus_handle(cx), - window, cx, ) .map(|kb| kb.size(rems_from_px(12.))), @@ -3283,6 +3288,12 @@ impl AcpThreadView { this.style(ButtonStyle::Outlined) } }) + .when_some( + method.description.clone(), + |this, description| { + this.tooltip(Tooltip::text(description)) + }, + ) .on_click({ cx.listener(move |this, _, window, cx| { telemetry::event!( @@ -3445,7 +3456,6 @@ impl AcpThreadView { &changed_buffers, self.edits_expanded, pending_edits, - window, cx, )) .when(self.edits_expanded, |parent| { @@ -3605,7 +3615,6 @@ impl AcpThreadView { changed_buffers: &BTreeMap, Entity>, expanded: bool, pending_edits: bool, - window: &mut Window, cx: &Context, ) -> Div { const EDIT_NOT_READY_TOOLTIP_LABEL: &str = "Wait until file edits are complete."; @@ -3681,12 +3690,11 @@ impl AcpThreadView { .icon_size(IconSize::Small) .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Review Changes", &OpenAgentDiff, &focus_handle, - window, cx, ) } @@ -3704,13 +3712,8 @@ impl AcpThreadView { this.tooltip(Tooltip::text(EDIT_NOT_READY_TOOLTIP_LABEL)) }) .key_binding( - KeyBinding::for_action_in( - &RejectAll, - &focus_handle.clone(), - window, - cx, - ) - .map(|kb| kb.size(rems_from_px(10.))), + KeyBinding::for_action_in(&RejectAll, &focus_handle.clone(), cx) + .map(|kb| kb.size(rems_from_px(10.))), ) .on_click(cx.listener(move |this, _, window, cx| { this.reject_all(&RejectAll, window, cx); @@ -3724,7 +3727,7 @@ impl AcpThreadView { this.tooltip(Tooltip::text(EDIT_NOT_READY_TOOLTIP_LABEL)) }) .key_binding( - KeyBinding::for_action_in(&KeepAll, &focus_handle, window, cx) + KeyBinding::for_action_in(&KeepAll, &focus_handle, cx) .map(|kb| kb.size(rems_from_px(10.))), ) .on_click(cx.listener(move |this, _, window, cx| { @@ -3954,12 +3957,11 @@ impl AcpThreadView { .icon_size(IconSize::Small) .icon_color(Color::Muted) .tooltip({ - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( expand_tooltip, &ExpandMessageEditor, &focus_handle, - window, cx, ) } @@ -3997,12 +3999,12 @@ impl AcpThreadView { pub(crate) fn as_native_connection( &self, cx: &App, - ) -> Option> { + ) -> Option> { let acp_thread = self.thread()?.read(cx); acp_thread.connection().clone().downcast() } - pub(crate) fn as_native_thread(&self, cx: &App) -> Option> { + pub(crate) fn as_native_thread(&self, cx: &App) -> Option> { let acp_thread = self.thread()?.read(cx); self.as_native_connection(cx)? .thread(acp_thread.session_id(), cx) @@ -4184,8 +4186,8 @@ impl AcpThreadView { IconButton::new("stop-generation", IconName::Stop) .icon_color(Color::Error) .style(ButtonStyle::Tinted(ui::TintColor::Error)) - .tooltip(move |window, cx| { - Tooltip::for_action("Stop Generation", &editor::actions::Cancel, window, cx) + .tooltip(move |_window, cx| { + Tooltip::for_action("Stop Generation", &editor::actions::Cancel, cx) }) .on_click(cx.listener(|this, _event, _, cx| this.cancel_generation(cx))) .into_any_element() @@ -4207,7 +4209,7 @@ impl AcpThreadView { this.icon_color(Color::Accent) } }) - .tooltip(move |window, cx| Tooltip::for_action(send_btn_tooltip, &Chat, window, cx)) + .tooltip(move |_window, cx| Tooltip::for_action(send_btn_tooltip, &Chat, cx)) .on_click(cx.listener(|this, _, window, cx| { this.send(window, cx); })) @@ -4268,15 +4270,14 @@ impl AcpThreadView { .icon_color(Color::Muted) .toggle_state(following) .selected_icon_color(Some(Color::Custom(cx.theme().players().agent().cursor))) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { if following { - Tooltip::for_action(tooltip_label.clone(), &Follow, window, cx) + Tooltip::for_action(tooltip_label.clone(), &Follow, cx) } else { Tooltip::with_meta( tooltip_label.clone(), Some(&Follow), "Track the agent's location as it reads and edits files.", - window, cx, ) } @@ -4390,7 +4391,7 @@ impl AcpThreadView { if let Some(panel) = workspace.panel::(cx) { panel.update(cx, |panel, cx| { panel - .open_saved_prompt_editor(path.as_path().into(), window, cx) + .open_saved_text_thread(path.as_path().into(), window, cx) .detach_and_log_err(cx); }); } @@ -5022,7 +5023,50 @@ impl AcpThreadView { ) } - fn render_thread_error(&self, window: &mut Window, cx: &mut Context) -> Option
{ + #[cfg(target_os = "windows")] + fn render_codex_windows_warning(&self, cx: &mut Context) -> Option { + if self.show_codex_windows_warning { + Some( + Callout::new() + .icon(IconName::Warning) + .severity(Severity::Warning) + .title("Codex on Windows") + .description( + "For best performance, run Codex in Windows Subsystem for Linux (WSL2)", + ) + .actions_slot( + Button::new("open-wsl-modal", "Open in WSL") + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .on_click(cx.listener({ + move |_, _, window, cx| { + window.dispatch_action( + zed_actions::wsl_actions::OpenWsl::default().boxed_clone(), + cx, + ); + cx.notify(); + } + })), + ) + .dismiss_action( + IconButton::new("dismiss", IconName::Close) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .tooltip(Tooltip::text("Dismiss Warning")) + .on_click(cx.listener({ + move |this, _, _, cx| { + this.show_codex_windows_warning = false; + cx.notify(); + } + })), + ), + ) + } else { + None + } + } + + fn render_thread_error(&self, cx: &mut Context) -> Option
{ let content = match self.thread_error.as_ref()? { ThreadError::Other(error) => self.render_any_thread_error(error.clone(), cx), ThreadError::Refusal => self.render_refusal_error(cx), @@ -5033,9 +5077,7 @@ impl AcpThreadView { ThreadError::ModelRequestLimitReached(plan) => { self.render_model_request_limit_reached_error(*plan, cx) } - ThreadError::ToolUseLimitReached => { - self.render_tool_use_limit_reached_error(window, cx)? - } + ThreadError::ToolUseLimitReached => self.render_tool_use_limit_reached_error(cx)?, }; Some(div().child(content)) @@ -5080,7 +5122,7 @@ impl AcpThreadView { if self .agent .clone() - .downcast::() + .downcast::() .is_some() { // Native agent - use the model name @@ -5226,11 +5268,7 @@ impl AcpThreadView { .dismiss_action(self.dismiss_error_button(cx)) } - fn render_tool_use_limit_reached_error( - &self, - window: &mut Window, - cx: &mut Context, - ) -> Option { + fn render_tool_use_limit_reached_error(&self, cx: &mut Context) -> Option { let thread = self.as_native_thread(cx)?; let supports_burn_mode = thread .read(cx) @@ -5257,7 +5295,6 @@ impl AcpThreadView { KeyBinding::for_action_in( &ContinueWithBurnMode, &focus_handle, - window, cx, ) .map(|kb| kb.size(rems_from_px(10.))), @@ -5281,13 +5318,8 @@ impl AcpThreadView { .layer(ElevationIndex::ModalSurface) .label_size(LabelSize::Small) .key_binding( - KeyBinding::for_action_in( - &ContinueThread, - &focus_handle, - window, - cx, - ) - .map(|kb| kb.size(rems_from_px(10.))), + KeyBinding::for_action_in(&ContinueThread, &focus_handle, cx) + .map(|kb| kb.size(rems_from_px(10.))), ) .on_click(cx.listener(|this, _, _window, cx| { this.resume_chat(cx); @@ -5382,9 +5414,11 @@ impl AcpThreadView { HistoryEntry::AcpThread(thread) => self.history_store.update(cx, |history, cx| { history.delete_thread(thread.id.clone(), cx) }), - HistoryEntry::TextThread(context) => self.history_store.update(cx, |history, cx| { - history.delete_text_thread(context.path.clone(), cx) - }), + HistoryEntry::TextThread(text_thread) => { + self.history_store.update(cx, |history, cx| { + history.delete_text_thread(text_thread.path.clone(), cx) + }) + } }; task.detach_and_log_err(cx); } @@ -5436,7 +5470,6 @@ impl Render for AcpThreadView { v_flex() .size_full() .key_context("AcpThread") - .on_action(cx.listener(Self::open_agent_diff)) .on_action(cx.listener(Self::toggle_burn_mode)) .on_action(cx.listener(Self::keep_all)) .on_action(cx.listener(Self::reject_all)) @@ -5464,7 +5497,7 @@ impl Render for AcpThreadView { .into_any(), ThreadState::Loading { .. } => v_flex() .flex_1() - .child(self.render_recent_history(window, cx)) + .child(self.render_recent_history(cx)) .into_any(), ThreadState::LoadError(e) => v_flex() .flex_1() @@ -5495,8 +5528,7 @@ impl Render for AcpThreadView { .vertical_scrollbar_for(self.list_state.clone(), window, cx) .into_any() } else { - this.child(self.render_recent_history(window, cx)) - .into_any() + this.child(self.render_recent_history(cx)).into_any() } }), }) @@ -5510,7 +5542,17 @@ impl Render for AcpThreadView { _ => this, }) .children(self.render_thread_retry_status_callout(window, cx)) - .children(self.render_thread_error(window, cx)) + .children({ + #[cfg(target_os = "windows")] + { + self.render_codex_windows_warning(cx) + } + #[cfg(not(target_os = "windows"))] + { + Vec::::new() + } + }) + .children(self.render_thread_error(cx)) .when_some( self.new_server_version_available.as_ref().filter(|_| { !has_messages || !matches!(self.thread_state, ThreadState::Ready { .. }) @@ -5695,7 +5737,7 @@ fn terminal_command_markdown_style(window: &Window, cx: &App) -> MarkdownStyle { pub(crate) mod tests { use acp_thread::StubAgentConnection; use agent_client_protocol::SessionId; - use assistant_context::ContextStore; + use assistant_text_thread::TextThreadStore; use editor::EditorSettings; use fs::FakeFs; use gpui::{EventEmitter, SemanticVersion, TestAppContext, VisualTestContext}; @@ -5858,10 +5900,10 @@ pub(crate) mod tests { let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); - let context_store = - cx.update(|_window, cx| cx.new(|cx| ContextStore::fake(project.clone(), cx))); + let text_thread_store = + cx.update(|_window, cx| cx.new(|cx| TextThreadStore::fake(project.clone(), cx))); let history_store = - cx.update(|_window, cx| cx.new(|cx| HistoryStore::new(context_store, cx))); + cx.update(|_window, cx| cx.new(|cx| HistoryStore::new(text_thread_store, cx))); let thread_view = cx.update(|window, cx| { cx.new(|cx| { @@ -6130,10 +6172,10 @@ pub(crate) mod tests { let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); - let context_store = - cx.update(|_window, cx| cx.new(|cx| ContextStore::fake(project.clone(), cx))); + let text_thread_store = + cx.update(|_window, cx| cx.new(|cx| TextThreadStore::fake(project.clone(), cx))); let history_store = - cx.update(|_window, cx| cx.new(|cx| HistoryStore::new(context_store, cx))); + cx.update(|_window, cx| cx.new(|cx| HistoryStore::new(text_thread_store, cx))); let connection = Rc::new(StubAgentConnection::new()); let thread_view = cx.update(|window, cx| { diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index b447617c340dedb6795c0c950f08eee8f0c6d59e..ef0d4735d2d7690111ee2549cdee8ab31e32196e 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -6,9 +6,8 @@ mod tool_picker; use std::{ops::Range, sync::Arc}; -use agent_settings::AgentSettings; +use agent::ContextServerRegistry; use anyhow::Result; -use assistant_tool::{ToolSource, ToolWorkingSet}; use cloud_llm_client::{Plan, PlanV1, PlanV2}; use collections::HashMap; use context_server::ContextServerId; @@ -18,7 +17,7 @@ use extension_host::ExtensionStore; use fs::Fs; use gpui::{ Action, AnyView, App, AsyncWindowContext, Corner, Entity, EventEmitter, FocusHandle, Focusable, - Hsla, ScrollHandle, Subscription, Task, WeakEntity, + ScrollHandle, Subscription, Task, WeakEntity, }; use language::LanguageRegistry; use language_model::{ @@ -29,10 +28,10 @@ use project::{ agent_server_store::{AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME}, context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore}, }; -use settings::{Settings, SettingsStore, update_settings_file}; +use settings::{SettingsStore, update_settings_file}; use ui::{ Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex, - Indicator, PopoverMenu, Switch, SwitchColor, SwitchField, Tooltip, WithScrollbar, prelude::*, + Indicator, PopoverMenu, Switch, SwitchColor, Tooltip, WithScrollbar, prelude::*, }; use util::ResultExt as _; use workspace::{Workspace, create_and_open_local_file}; @@ -55,9 +54,8 @@ pub struct AgentConfiguration { focus_handle: FocusHandle, configuration_views_by_provider: HashMap, context_server_store: Entity, - expanded_context_server_tools: HashMap, expanded_provider_configurations: HashMap, - tools: Entity, + context_server_registry: Entity, _registry_subscription: Subscription, scroll_handle: ScrollHandle, _check_for_gemini: Task<()>, @@ -68,7 +66,7 @@ impl AgentConfiguration { fs: Arc, agent_server_store: Entity, context_server_store: Entity, - tools: Entity, + context_server_registry: Entity, language_registry: Arc, workspace: WeakEntity, window: &mut Window, @@ -104,9 +102,8 @@ impl AgentConfiguration { configuration_views_by_provider: HashMap::default(), agent_server_store, context_server_store, - expanded_context_server_tools: HashMap::default(), expanded_provider_configurations: HashMap::default(), - tools, + context_server_registry, _registry_subscription: registry_subscription, scroll_handle: ScrollHandle::new(), _check_for_gemini: Task::ready(()), @@ -402,101 +399,6 @@ impl AgentConfiguration { ) } - fn render_command_permission(&mut self, cx: &mut Context) -> impl IntoElement { - let always_allow_tool_actions = AgentSettings::get_global(cx).always_allow_tool_actions; - let fs = self.fs.clone(); - - SwitchField::new( - "always-allow-tool-actions-switch", - Some("Allow running commands without asking for confirmation"), - Some( - "The agent can perform potentially destructive actions without asking for your confirmation.".into(), - ), - always_allow_tool_actions, - move |state, _window, cx| { - let allow = state == &ToggleState::Selected; - update_settings_file(fs.clone(), cx, move |settings, _| { - settings.agent.get_or_insert_default().set_always_allow_tool_actions(allow); - }); - }, - ) - } - - fn render_single_file_review(&mut self, cx: &mut Context) -> impl IntoElement { - let single_file_review = AgentSettings::get_global(cx).single_file_review; - let fs = self.fs.clone(); - - SwitchField::new( - "single-file-review", - Some("Enable single-file agent reviews"), - Some("Agent edits are also displayed in single-file editors for review.".into()), - single_file_review, - move |state, _window, cx| { - let allow = state == &ToggleState::Selected; - update_settings_file(fs.clone(), cx, move |settings, _| { - settings - .agent - .get_or_insert_default() - .set_single_file_review(allow); - }); - }, - ) - } - - fn render_sound_notification(&mut self, cx: &mut Context) -> impl IntoElement { - let play_sound_when_agent_done = AgentSettings::get_global(cx).play_sound_when_agent_done; - let fs = self.fs.clone(); - - SwitchField::new( - "sound-notification", - Some("Play sound when finished generating"), - Some( - "Hear a notification sound when the agent is done generating changes or needs your input.".into(), - ), - play_sound_when_agent_done, - move |state, _window, cx| { - let allow = state == &ToggleState::Selected; - update_settings_file(fs.clone(), cx, move |settings, _| { - settings.agent.get_or_insert_default().set_play_sound_when_agent_done(allow); - }); - }, - ) - } - - fn render_modifier_to_send(&mut self, cx: &mut Context) -> impl IntoElement { - let use_modifier_to_send = AgentSettings::get_global(cx).use_modifier_to_send; - let fs = self.fs.clone(); - - SwitchField::new( - "modifier-send", - Some("Use modifier to submit a message"), - Some( - "Make a modifier (cmd-enter on macOS, ctrl-enter on Linux or Windows) required to send messages.".into(), - ), - use_modifier_to_send, - move |state, _window, cx| { - let allow = state == &ToggleState::Selected; - update_settings_file(fs.clone(), cx, move |settings, _| { - settings.agent.get_or_insert_default().set_use_modifier_to_send(allow); - }); - }, - ) - } - - fn render_general_settings_section(&mut self, cx: &mut Context) -> impl IntoElement { - v_flex() - .p(DynamicSpacing::Base16.rems(cx)) - .pr(DynamicSpacing::Base20.rems(cx)) - .gap_2p5() - .border_b_1() - .border_color(cx.theme().colors().border) - .child(Headline::new("General Settings")) - .child(self.render_command_permission(cx)) - .child(self.render_single_file_review(cx)) - .child(self.render_sound_notification(cx)) - .child(self.render_modifier_to_send(cx)) - } - fn render_zed_plan_info(&self, plan: Option, cx: &mut Context) -> impl IntoElement { if let Some(plan) = plan { let free_chip_bg = cx @@ -534,10 +436,6 @@ impl AgentConfiguration { } } - fn card_item_border_color(&self, cx: &mut Context) -> Hsla { - cx.theme().colors().border.opacity(0.6) - } - fn render_context_servers_section( &mut self, window: &mut Window, @@ -663,7 +561,6 @@ impl AgentConfiguration { window: &mut Window, cx: &mut Context, ) -> impl use<> + IntoElement { - let tools_by_source = self.tools.read(cx).tools_by_source(cx); let server_status = self .context_server_store .read(cx) @@ -692,17 +589,11 @@ impl AgentConfiguration { None }; - let are_tools_expanded = self - .expanded_context_server_tools - .get(&context_server_id) - .copied() - .unwrap_or_default(); - let tools = tools_by_source - .get(&ToolSource::ContextServer { - id: context_server_id.0.clone().into(), - }) - .map_or([].as_slice(), |tools| tools.as_slice()); - let tool_count = tools.len(); + let tool_count = self + .context_server_registry + .read(cx) + .tools_for_server(&context_server_id) + .count(); let (source_icon, source_tooltip) = if is_from_extension { ( @@ -756,7 +647,7 @@ impl AgentConfiguration { let language_registry = self.language_registry.clone(); let context_server_store = self.context_server_store.clone(); let workspace = self.workspace.clone(); - let tools = self.tools.clone(); + let context_server_registry = self.context_server_registry.clone(); move |window, cx| { Some(ContextMenu::build(window, cx, |menu, _window, _cx| { @@ -774,20 +665,16 @@ impl AgentConfiguration { ) .detach_and_log_err(cx); } - }).when(tool_count >= 1, |this| this.entry("View Tools", None, { + }).when(tool_count > 0, |this| this.entry("View Tools", None, { let context_server_id = context_server_id.clone(); - let tools = tools.clone(); + let context_server_registry = context_server_registry.clone(); let workspace = workspace.clone(); - move |window, cx| { let context_server_id = context_server_id.clone(); - let tools = tools.clone(); - let workspace = workspace.clone(); - workspace.update(cx, |workspace, cx| { ConfigureContextServerToolsModal::toggle( context_server_id, - tools, + context_server_registry.clone(), workspace, window, cx, @@ -869,14 +756,6 @@ impl AgentConfiguration { .child( h_flex() .justify_between() - .when( - error.is_none() && are_tools_expanded && tool_count >= 1, - |element| { - element - .border_b_1() - .border_color(self.card_item_border_color(cx)) - }, - ) .child( h_flex() .flex_1() @@ -1000,11 +879,6 @@ impl AgentConfiguration { ), ); } - - if !are_tools_expanded || tools.is_empty() { - return parent; - } - parent }) } @@ -1141,7 +1015,6 @@ impl Render for AgentConfiguration { .track_scroll(&self.scroll_handle) .size_full() .overflow_y_scroll() - .child(self.render_general_settings_section(cx)) .child(self.render_agent_servers_section(cx)) .child(self.render_context_servers_section(window, cx)) .child(self.render_provider_configuration_section(cx)), diff --git a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs index 5e1712e626da98c60834da28906afa3eb30b92e6..8f4fdeacf303c9869e903bde95326c80fba10126 100644 --- a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs +++ b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs @@ -10,7 +10,7 @@ use settings::{OpenAiCompatibleSettingsContent, update_settings_file}; use ui::{ Banner, Checkbox, KeyBinding, Modal, ModalFooter, ModalHeader, Section, ToggleState, prelude::*, }; -use ui_input::SingleLineInput; +use ui_input::InputField; use workspace::{ModalView, Workspace}; #[derive(Clone, Copy)] @@ -33,9 +33,9 @@ impl LlmCompatibleProvider { } struct AddLlmProviderInput { - provider_name: Entity, - api_url: Entity, - api_key: Entity, + provider_name: Entity, + api_url: Entity, + api_key: Entity, models: Vec, } @@ -76,10 +76,10 @@ struct ModelCapabilityToggles { } struct ModelInput { - name: Entity, - max_completion_tokens: Entity, - max_output_tokens: Entity, - max_tokens: Entity, + name: Entity, + max_completion_tokens: Entity, + max_output_tokens: Entity, + max_tokens: Entity, capabilities: ModelCapabilityToggles, } @@ -171,9 +171,9 @@ fn single_line_input( text: Option<&str>, window: &mut Window, cx: &mut App, -) -> Entity { +) -> Entity { cx.new(|cx| { - let input = SingleLineInput::new(window, cx, placeholder).label(label); + let input = InputField::new(window, cx, placeholder).label(label); if let Some(text) = text { input .editor() @@ -431,7 +431,7 @@ impl Focusable for AddLlmProviderModal { impl ModalView for AddLlmProviderModal {} impl Render for AddLlmProviderModal { - fn render(&mut self, window: &mut ui::Window, cx: &mut ui::Context) -> impl IntoElement { + fn render(&mut self, _window: &mut ui::Window, cx: &mut ui::Context) -> impl IntoElement { let focus_handle = self.focus_handle(cx); div() @@ -484,7 +484,6 @@ impl Render for AddLlmProviderModal { KeyBinding::for_action_in( &menu::Cancel, &focus_handle, - window, cx, ) .map(|kb| kb.size(rems_from_px(12.))), @@ -499,7 +498,6 @@ impl Render for AddLlmProviderModal { KeyBinding::for_action_in( &menu::Confirm, &focus_handle, - window, cx, ) .map(|kb| kb.size(rems_from_px(12.))), @@ -757,12 +755,7 @@ mod tests { models: Vec<(&str, &str, &str, &str)>, cx: &mut VisualTestContext, ) -> Option { - fn set_text( - input: &Entity, - text: &str, - window: &mut Window, - cx: &mut App, - ) { + fn set_text(input: &Entity, text: &str, window: &mut Window, cx: &mut App) { input.update(cx, |input, cx| { input.editor().update(cx, |editor, cx| { editor.set_text(text, window, cx); diff --git a/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs b/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs index ce8e167dab3ed2e4d84c4afd747cb266740f1d42..88896f51086dc5f7d3eddb2fffef2fa3a7039c79 100644 --- a/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs +++ b/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs @@ -566,7 +566,7 @@ impl ConfigureContextServerModal { .into_any_element() } - fn render_modal_footer(&self, window: &mut Window, cx: &mut Context) -> ModalFooter { + fn render_modal_footer(&self, cx: &mut Context) -> ModalFooter { let focus_handle = self.focus_handle(cx); let is_connecting = matches!(self.state, State::Waiting); @@ -584,12 +584,11 @@ impl ConfigureContextServerModal { .icon_size(IconSize::Small) .tooltip({ let repository_url = repository_url.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::with_meta( "Open Repository", None, repository_url.clone(), - window, cx, ) } @@ -616,7 +615,7 @@ impl ConfigureContextServerModal { }, ) .key_binding( - KeyBinding::for_action_in(&menu::Cancel, &focus_handle, window, cx) + KeyBinding::for_action_in(&menu::Cancel, &focus_handle, cx) .map(|kb| kb.size(rems_from_px(12.))), ) .on_click( @@ -634,7 +633,7 @@ impl ConfigureContextServerModal { ) .disabled(is_connecting) .key_binding( - KeyBinding::for_action_in(&menu::Confirm, &focus_handle, window, cx) + KeyBinding::for_action_in(&menu::Confirm, &focus_handle, cx) .map(|kb| kb.size(rems_from_px(12.))), ) .on_click( @@ -709,7 +708,7 @@ impl Render for ConfigureContextServerModal { State::Error(error) => Self::render_modal_error(error.clone()), }), ) - .footer(self.render_modal_footer(window, cx)), + .footer(self.render_modal_footer(cx)), ) } } diff --git a/crates/agent_ui/src/agent_configuration/configure_context_server_tools_modal.rs b/crates/agent_ui/src/agent_configuration/configure_context_server_tools_modal.rs index 5a59806972ecf1b6cbc0702809c98acf1a86b387..3fe0b8d1b1400b4362192261995ed5b6bd1cb662 100644 --- a/crates/agent_ui/src/agent_configuration/configure_context_server_tools_modal.rs +++ b/crates/agent_ui/src/agent_configuration/configure_context_server_tools_modal.rs @@ -1,4 +1,5 @@ -use assistant_tool::{ToolSource, ToolWorkingSet}; +use agent::ContextServerRegistry; +use collections::HashMap; use context_server::ContextServerId; use gpui::{ DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, ScrollHandle, Window, prelude::*, @@ -8,37 +9,37 @@ use workspace::{ModalView, Workspace}; pub struct ConfigureContextServerToolsModal { context_server_id: ContextServerId, - tools: Entity, + context_server_registry: Entity, focus_handle: FocusHandle, - expanded_tools: std::collections::HashMap, + expanded_tools: HashMap, scroll_handle: ScrollHandle, } impl ConfigureContextServerToolsModal { fn new( context_server_id: ContextServerId, - tools: Entity, + context_server_registry: Entity, _window: &mut Window, cx: &mut Context, ) -> Self { Self { context_server_id, - tools, + context_server_registry, focus_handle: cx.focus_handle(), - expanded_tools: std::collections::HashMap::new(), + expanded_tools: HashMap::default(), scroll_handle: ScrollHandle::new(), } } pub fn toggle( context_server_id: ContextServerId, - tools: Entity, + context_server_registry: Entity, workspace: &mut Workspace, window: &mut Window, cx: &mut Context, ) { workspace.toggle_modal(window, cx, |window, cx| { - Self::new(context_server_id, tools, window, cx) + Self::new(context_server_id, context_server_registry, window, cx) }); } @@ -51,13 +52,11 @@ impl ConfigureContextServerToolsModal { window: &mut Window, cx: &mut Context, ) -> impl IntoElement { - let tools_by_source = self.tools.read(cx).tools_by_source(cx); - let server_tools = tools_by_source - .get(&ToolSource::ContextServer { - id: self.context_server_id.0.clone().into(), - }) - .map(|tools| tools.as_slice()) - .unwrap_or(&[]); + let tools = self + .context_server_registry + .read(cx) + .tools_for_server(&self.context_server_id) + .collect::>(); div() .size_full() @@ -70,11 +69,11 @@ impl ConfigureContextServerToolsModal { .max_h_128() .overflow_y_scroll() .track_scroll(&self.scroll_handle) - .children(server_tools.iter().enumerate().flat_map(|(index, tool)| { + .children(tools.iter().enumerate().flat_map(|(index, tool)| { let tool_name = tool.name(); let is_expanded = self .expanded_tools - .get(&tool_name) + .get(tool_name.as_ref()) .copied() .unwrap_or(false); @@ -110,7 +109,7 @@ impl ConfigureContextServerToolsModal { move |this, _event, _window, _cx| { let current = this .expanded_tools - .get(&tool_name) + .get(tool_name.as_ref()) .copied() .unwrap_or(false); this.expanded_tools @@ -127,7 +126,7 @@ impl ConfigureContextServerToolsModal { .into_any_element(), ]; - if index < server_tools.len() - 1 { + if index < tools.len() - 1 { items.push( h_flex() .w_full() diff --git a/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs b/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs index 9a7f0ed602a52d3b27dde565383453f2c5c325fb..e583bb7d5425ec4c6f233ac0eed67c358ccac98d 100644 --- a/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs +++ b/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs @@ -2,11 +2,12 @@ mod profile_modal_header; use std::sync::Arc; +use agent::ContextServerRegistry; use agent_settings::{AgentProfile, AgentProfileId, AgentSettings, builtin_profiles}; -use assistant_tool::ToolWorkingSet; use editor::Editor; use fs::Fs; use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription, prelude::*}; +use language_model::LanguageModel; use settings::Settings as _; use ui::{ KeyBinding, ListItem, ListItemSpacing, ListSeparator, Navigable, NavigableEntry, prelude::*, @@ -17,8 +18,6 @@ use crate::agent_configuration::manage_profiles_modal::profile_modal_header::Pro use crate::agent_configuration::tool_picker::{ToolPicker, ToolPickerDelegate}; use crate::{AgentPanel, ManageProfiles}; -use super::tool_picker::ToolPickerMode; - enum Mode { ChooseProfile(ChooseProfileMode), NewProfile(NewProfileMode), @@ -97,7 +96,8 @@ pub struct NewProfileMode { pub struct ManageProfilesModal { fs: Arc, - tools: Entity, + context_server_registry: Entity, + active_model: Option>, focus_handle: FocusHandle, mode: Mode, } @@ -111,10 +111,14 @@ impl ManageProfilesModal { workspace.register_action(|workspace, action: &ManageProfiles, window, cx| { if let Some(panel) = workspace.panel::(cx) { let fs = workspace.app_state().fs.clone(); - let thread_store = panel.read(cx).thread_store(); - let tools = thread_store.read(cx).tools(); + let active_model = panel + .read(cx) + .active_native_agent_thread(cx) + .and_then(|thread| thread.read(cx).model().cloned()); + + let context_server_registry = panel.read(cx).context_server_registry().clone(); workspace.toggle_modal(window, cx, |window, cx| { - let mut this = Self::new(fs, tools, window, cx); + let mut this = Self::new(fs, active_model, context_server_registry, window, cx); if let Some(profile_id) = action.customize_tools.clone() { this.configure_builtin_tools(profile_id, window, cx); @@ -128,7 +132,8 @@ impl ManageProfilesModal { pub fn new( fs: Arc, - tools: Entity, + active_model: Option>, + context_server_registry: Entity, window: &mut Window, cx: &mut Context, ) -> Self { @@ -136,7 +141,8 @@ impl ManageProfilesModal { Self { fs, - tools, + active_model, + context_server_registry, focus_handle, mode: Mode::choose_profile(window, cx), } @@ -193,10 +199,9 @@ impl ManageProfilesModal { }; let tool_picker = cx.new(|cx| { - let delegate = ToolPickerDelegate::new( - ToolPickerMode::McpTools, + let delegate = ToolPickerDelegate::mcp_tools( + &self.context_server_registry, self.fs.clone(), - self.tools.clone(), profile_id.clone(), profile, cx, @@ -230,10 +235,14 @@ impl ManageProfilesModal { }; let tool_picker = cx.new(|cx| { - let delegate = ToolPickerDelegate::new( - ToolPickerMode::BuiltinTools, + let delegate = ToolPickerDelegate::builtin_tools( + //todo: This causes the web search tool to show up even it only works when using zed hosted models + agent::supported_built_in_tool_names( + self.active_model.as_ref().map(|model| model.provider_id()), + ) + .map(|s| s.into()) + .collect::>(), self.fs.clone(), - self.tools.clone(), profile_id.clone(), profile, cx, @@ -343,10 +352,9 @@ impl ManageProfilesModal { .size(LabelSize::Small) .color(Color::Muted), ) - .children(KeyBinding::for_action_in( + .child(KeyBinding::for_action_in( &menu::Confirm, &self.focus_handle, - window, cx, )), ) @@ -640,14 +648,13 @@ impl ManageProfilesModal { ) .child(Label::new("Go Back")) .end_slot( - div().children( + div().child( KeyBinding::for_action_in( &menu::Cancel, &self.focus_handle, - window, cx, ) - .map(|kb| kb.size(rems_from_px(12.))), + .size(rems_from_px(12.)), ), ) .on_click({ @@ -691,14 +698,9 @@ impl Render for ManageProfilesModal { ) .child(Label::new("Go Back")) .end_slot( - div().children( - KeyBinding::for_action_in( - &menu::Cancel, - &self.focus_handle, - window, - cx, - ) - .map(|kb| kb.size(rems_from_px(12.))), + div().child( + KeyBinding::for_action_in(&menu::Cancel, &self.focus_handle, cx) + .size(rems_from_px(12.)), ), ) .on_click({ diff --git a/crates/agent_ui/src/agent_configuration/tool_picker.rs b/crates/agent_ui/src/agent_configuration/tool_picker.rs index c624948944c0624e75e385d1b4b15aa77fea9bcd..6b84205e1bd6336d70751090d8f0451b1b1925b0 100644 --- a/crates/agent_ui/src/agent_configuration/tool_picker.rs +++ b/crates/agent_ui/src/agent_configuration/tool_picker.rs @@ -1,7 +1,7 @@ use std::{collections::BTreeMap, sync::Arc}; +use agent::ContextServerRegistry; use agent_settings::{AgentProfileId, AgentProfileSettings}; -use assistant_tool::{ToolSource, ToolWorkingSet}; use fs::Fs; use gpui::{App, Context, DismissEvent, Entity, EventEmitter, Focusable, Task, WeakEntity, Window}; use picker::{Picker, PickerDelegate}; @@ -14,7 +14,7 @@ pub struct ToolPicker { } #[derive(Clone, Copy, Debug, PartialEq)] -pub enum ToolPickerMode { +enum ToolPickerMode { BuiltinTools, McpTools, } @@ -76,59 +76,79 @@ pub struct ToolPickerDelegate { } impl ToolPickerDelegate { - pub fn new( - mode: ToolPickerMode, + pub fn builtin_tools( + tool_names: Vec>, fs: Arc, - tool_set: Entity, profile_id: AgentProfileId, profile_settings: AgentProfileSettings, cx: &mut Context, ) -> Self { - let items = Arc::new(Self::resolve_items(mode, &tool_set, cx)); + Self::new( + Arc::new( + tool_names + .into_iter() + .map(|name| PickerItem::Tool { + name, + server_id: None, + }) + .collect(), + ), + ToolPickerMode::BuiltinTools, + fs, + profile_id, + profile_settings, + cx, + ) + } + pub fn mcp_tools( + registry: &Entity, + fs: Arc, + profile_id: AgentProfileId, + profile_settings: AgentProfileSettings, + cx: &mut Context, + ) -> Self { + let mut items = Vec::new(); + + for (id, tools) in registry.read(cx).servers() { + let server_id = id.clone().0; + items.push(PickerItem::ContextServer { + server_id: server_id.clone(), + }); + items.extend(tools.keys().map(|tool_name| PickerItem::Tool { + name: tool_name.clone().into(), + server_id: Some(server_id.clone()), + })); + } + + Self::new( + Arc::new(items), + ToolPickerMode::McpTools, + fs, + profile_id, + profile_settings, + cx, + ) + } + + fn new( + items: Arc>, + mode: ToolPickerMode, + fs: Arc, + profile_id: AgentProfileId, + profile_settings: AgentProfileSettings, + cx: &mut Context, + ) -> Self { Self { tool_picker: cx.entity().downgrade(), + mode, fs, items, profile_id, profile_settings, filtered_items: Vec::new(), selected_index: 0, - mode, - } - } - - fn resolve_items( - mode: ToolPickerMode, - tool_set: &Entity, - cx: &mut App, - ) -> Vec { - let mut items = Vec::new(); - for (source, tools) in tool_set.read(cx).tools_by_source(cx) { - match source { - ToolSource::Native => { - if mode == ToolPickerMode::BuiltinTools { - items.extend(tools.into_iter().map(|tool| PickerItem::Tool { - name: tool.name().into(), - server_id: None, - })); - } - } - ToolSource::ContextServer { id } => { - if mode == ToolPickerMode::McpTools && !tools.is_empty() { - let server_id: Arc = id.clone().into(); - items.push(PickerItem::ContextServer { - server_id: server_id.clone(), - }); - items.extend(tools.into_iter().map(|tool| PickerItem::Tool { - name: tool.name().into(), - server_id: Some(server_id.clone()), - })); - } - } - } } - items } } diff --git a/crates/agent_ui/src/agent_diff.rs b/crates/agent_ui/src/agent_diff.rs index 67014e3c3a4c8bd9b43f34d9cad3c23832efdc13..146a1fc71d2d11023dec49e42fdcfee2b081bfd3 100644 --- a/crates/agent_ui/src/agent_diff.rs +++ b/crates/agent_ui/src/agent_diff.rs @@ -452,7 +452,10 @@ fn update_editor_selection( window: &mut Window, cx: &mut Context, ) { - let newest_cursor = editor.selections.newest::(cx).head(); + let newest_cursor = editor + .selections + .newest::(&editor.display_snapshot(cx)) + .head(); if !diff_hunks.iter().any(|hunk| { hunk.row_range @@ -666,7 +669,7 @@ impl Item for AgentDiffPane { } impl Render for AgentDiffPane { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { let is_empty = self.multibuffer.read(cx).is_empty(); let focus_handle = &self.focus_handle; @@ -699,7 +702,6 @@ impl Render for AgentDiffPane { .key_binding(KeyBinding::for_action_in( &ToggleFocus, &focus_handle.clone(), - window, cx, )) .on_click(|_event, window, cx| { @@ -716,14 +718,7 @@ fn diff_hunk_controls(thread: &AgentDiffThread) -> editor::RenderDiffHunkControl let thread = thread.clone(); Arc::new( - move |row, - status: &DiffHunkStatus, - hunk_range, - is_created_file, - line_height, - editor: &Entity, - window: &mut Window, - cx: &mut App| { + move |row, status, hunk_range, is_created_file, line_height, editor, _, cx| { { render_diff_hunk_controls( row, @@ -733,7 +728,6 @@ fn diff_hunk_controls(thread: &AgentDiffThread) -> editor::RenderDiffHunkControl line_height, &thread, editor, - window, cx, ) } @@ -749,7 +743,6 @@ fn render_diff_hunk_controls( line_height: Pixels, thread: &AgentDiffThread, editor: &Entity, - window: &mut Window, cx: &mut App, ) -> AnyElement { let editor = editor.clone(); @@ -772,13 +765,8 @@ fn render_diff_hunk_controls( Button::new(("reject", row as u64), "Reject") .disabled(is_created_file) .key_binding( - KeyBinding::for_action_in( - &Reject, - &editor.read(cx).focus_handle(cx), - window, - cx, - ) - .map(|kb| kb.size(rems_from_px(12.))), + KeyBinding::for_action_in(&Reject, &editor.read(cx).focus_handle(cx), cx) + .map(|kb| kb.size(rems_from_px(12.))), ) .on_click({ let editor = editor.clone(); @@ -799,7 +787,7 @@ fn render_diff_hunk_controls( }), Button::new(("keep", row as u64), "Keep") .key_binding( - KeyBinding::for_action_in(&Keep, &editor.read(cx).focus_handle(cx), window, cx) + KeyBinding::for_action_in(&Keep, &editor.read(cx).focus_handle(cx), cx) .map(|kb| kb.size(rems_from_px(12.))), ) .on_click({ @@ -830,14 +818,8 @@ fn render_diff_hunk_controls( // .disabled(!has_multiple_hunks) .tooltip({ let focus_handle = editor.focus_handle(cx); - move |window, cx| { - Tooltip::for_action_in( - "Next Hunk", - &GoToHunk, - &focus_handle, - window, - cx, - ) + move |_window, cx| { + Tooltip::for_action_in("Next Hunk", &GoToHunk, &focus_handle, cx) } }) .on_click({ @@ -866,12 +848,11 @@ fn render_diff_hunk_controls( // .disabled(!has_multiple_hunks) .tooltip({ let focus_handle = editor.focus_handle(cx); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Previous Hunk", &GoToPreviousHunk, &focus_handle, - window, cx, ) } @@ -1036,7 +1017,7 @@ impl ToolbarItemView for AgentDiffToolbar { } impl Render for AgentDiffToolbar { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { let spinner_icon = div() .px_0p5() .id("generating") @@ -1111,7 +1092,6 @@ impl Render for AgentDiffToolbar { KeyBinding::for_action_in( &RejectAll, &editor_focus_handle, - window, cx, ) .map(|kb| kb.size(rems_from_px(12.))) @@ -1126,7 +1106,6 @@ impl Render for AgentDiffToolbar { KeyBinding::for_action_in( &KeepAll, &editor_focus_handle, - window, cx, ) .map(|kb| kb.size(rems_from_px(12.))) @@ -1203,13 +1182,8 @@ impl Render for AgentDiffToolbar { .child( Button::new("reject-all", "Reject All") .key_binding({ - KeyBinding::for_action_in( - &RejectAll, - &focus_handle, - window, - cx, - ) - .map(|kb| kb.size(rems_from_px(12.))) + KeyBinding::for_action_in(&RejectAll, &focus_handle, cx) + .map(|kb| kb.size(rems_from_px(12.))) }) .on_click(cx.listener(|this, _, window, cx| { this.dispatch_action(&RejectAll, window, cx) @@ -1218,13 +1192,8 @@ impl Render for AgentDiffToolbar { .child( Button::new("keep-all", "Keep All") .key_binding({ - KeyBinding::for_action_in( - &KeepAll, - &focus_handle, - window, - cx, - ) - .map(|kb| kb.size(rems_from_px(12.))) + KeyBinding::for_action_in(&KeepAll, &focus_handle, cx) + .map(|kb| kb.size(rems_from_px(12.))) }) .on_click(cx.listener(|this, _, window, cx| { this.dispatch_action(&KeepAll, window, cx) @@ -1895,7 +1864,9 @@ mod tests { ); assert_eq!( editor - .update(cx, |editor, cx| editor.selections.newest::(cx)) + .update(cx, |editor, cx| editor + .selections + .newest::(&editor.display_snapshot(cx))) .range(), Point::new(1, 0)..Point::new(1, 0) ); @@ -1909,7 +1880,9 @@ mod tests { ); assert_eq!( editor - .update(cx, |editor, cx| editor.selections.newest::(cx)) + .update(cx, |editor, cx| editor + .selections + .newest::(&editor.display_snapshot(cx))) .range(), Point::new(3, 0)..Point::new(3, 0) ); @@ -1930,7 +1903,9 @@ mod tests { ); assert_eq!( editor - .update(cx, |editor, cx| editor.selections.newest::(cx)) + .update(cx, |editor, cx| editor + .selections + .newest::(&editor.display_snapshot(cx))) .range(), Point::new(3, 0)..Point::new(3, 0) ); @@ -1962,7 +1937,9 @@ mod tests { ); assert_eq!( editor - .update(cx, |editor, cx| editor.selections.newest::(cx)) + .update(cx, |editor, cx| editor + .selections + .newest::(&editor.display_snapshot(cx))) .range(), Point::new(3, 0)..Point::new(3, 0) ); @@ -2119,7 +2096,9 @@ mod tests { ); assert_eq!( editor1 - .update(cx, |editor, cx| editor.selections.newest::(cx)) + .update(cx, |editor, cx| editor + .selections + .newest::(&editor.display_snapshot(cx))) .range(), Point::new(1, 0)..Point::new(1, 0) ); @@ -2160,7 +2139,9 @@ mod tests { ); assert_eq!( editor1 - .update(cx, |editor, cx| editor.selections.newest::(cx)) + .update(cx, |editor, cx| editor + .selections + .newest::(&editor.display_snapshot(cx))) .range(), Point::new(3, 0)..Point::new(3, 0) ); @@ -2181,7 +2162,9 @@ mod tests { ); assert_eq!( editor1 - .update(cx, |editor, cx| editor.selections.newest::(cx)) + .update(cx, |editor, cx| editor + .selections + .newest::(&editor.display_snapshot(cx))) .range(), Point::new(3, 0)..Point::new(3, 0) ); @@ -2207,7 +2190,9 @@ mod tests { ); assert_eq!( editor1 - .update(cx, |editor, cx| editor.selections.newest::(cx)) + .update(cx, |editor, cx| editor + .selections + .newest::(&editor.display_snapshot(cx))) .range(), Point::new(3, 0)..Point::new(3, 0) ); @@ -2240,7 +2225,9 @@ mod tests { ); assert_eq!( editor2 - .update(cx, |editor, cx| editor.selections.newest::(cx)) + .update(cx, |editor, cx| editor + .selections + .newest::(&editor.display_snapshot(cx))) .range(), Point::new(0, 0)..Point::new(0, 0) ); diff --git a/crates/agent_ui/src/agent_model_selector.rs b/crates/agent_ui/src/agent_model_selector.rs index fe25cadc3c1df785c89318882a246e2209cb42e6..df7d166064da20aa4bc958ebd6a9df806164eb7a 100644 --- a/crates/agent_ui/src/agent_model_selector.rs +++ b/crates/agent_ui/src/agent_model_selector.rs @@ -7,7 +7,7 @@ use gpui::{Entity, FocusHandle, SharedString}; use picker::popover_menu::PickerPopoverMenu; use settings::update_settings_file; use std::sync::Arc; -use ui::{ButtonLike, PopoverMenuHandle, Tooltip, prelude::*}; +use ui::{ButtonLike, PopoverMenuHandle, TintColor, Tooltip, prelude::*}; use zed_actions::agent::ToggleModelSelector; pub struct AgentModelSelector { @@ -70,6 +70,11 @@ impl Render for AgentModelSelector { .unwrap_or_else(|| SharedString::from("Select a Model")); let provider_icon = model.as_ref().map(|model| model.provider.icon()); + let color = if self.menu_handle.is_deployed() { + Color::Accent + } else { + Color::Muted + }; let focus_handle = self.focus_handle.clone(); @@ -77,32 +82,31 @@ impl Render for AgentModelSelector { self.selector.clone(), ButtonLike::new("active-model") .when_some(provider_icon, |this, icon| { - this.child(Icon::new(icon).color(Color::Muted).size(IconSize::XSmall)) + this.child(Icon::new(icon).color(color).size(IconSize::XSmall)) }) + .selected_style(ButtonStyle::Tinted(TintColor::Accent)) .child( Label::new(model_name) - .color(Color::Muted) + .color(color) .size(LabelSize::Small) .ml_0p5(), ) .child( Icon::new(IconName::ChevronDown) - .color(Color::Muted) + .color(color) .size(IconSize::XSmall), ), - move |window, cx| { - Tooltip::for_action_in( - "Change Model", - &ToggleModelSelector, - &focus_handle, - window, - cx, - ) + move |_window, cx| { + Tooltip::for_action_in("Change Model", &ToggleModelSelector, &focus_handle, cx) }, - gpui::Corner::BottomRight, + gpui::Corner::TopRight, cx, ) .with_handle(self.menu_handle.clone()) + .offset(gpui::Point { + x: px(0.0), + y: px(2.0), + }) .render(window, cx) } } diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 480f3a2cd8b37b862ee67c96142ef4d5f370faad..997a2bec09aa2a0ae39909c909c7de80771c5055 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -4,10 +4,13 @@ use std::rc::Rc; use std::sync::Arc; use acp_thread::AcpThread; -use agent2::{DbThreadMetadata, HistoryEntry}; +use agent::{ContextServerRegistry, DbThreadMetadata, HistoryEntry, HistoryStore}; use db::kvp::{Dismissable, KEY_VALUE_STORE}; -use project::agent_server_store::{ - AgentServerCommand, AllAgentServersSettings, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME, +use project::{ + ExternalAgentServerName, + agent_server_store::{ + AgentServerCommand, AllAgentServersSettings, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME, + }, }; use serde::{Deserialize, Serialize}; use settings::{ @@ -17,11 +20,13 @@ use zed_actions::OpenBrowser; use zed_actions::agent::{OpenClaudeCodeOnboardingModal, ReauthenticateAgent}; use crate::acp::{AcpThreadHistory, ThreadHistoryEvent}; +use crate::context_store::ContextStore; use crate::ui::{AcpOnboardingModal, ClaudeCodeOnboardingModal}; use crate::{ - AddContextServer, DeleteRecentlyOpenThread, Follow, InlineAssistant, NewTextThread, NewThread, - OpenActiveThreadAsMarkdown, OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell, - ToggleNavigationMenu, ToggleNewThreadMenu, ToggleOptionsMenu, + AddContextServer, AgentDiffPane, DeleteRecentlyOpenThread, Follow, InlineAssistant, + NewTextThread, NewThread, OpenActiveThreadAsMarkdown, OpenAgentDiff, OpenHistory, + ResetTrialEndUpsell, ResetTrialUpsell, ToggleNavigationMenu, ToggleNewThreadMenu, + ToggleOptionsMenu, acp::AcpThreadView, agent_configuration::{AgentConfiguration, AssistantConfigurationEvent}, slash_command::SlashCommandCompletionProvider, @@ -31,20 +36,16 @@ use crate::{ use crate::{ ExternalAgent, NewExternalAgentThread, NewNativeAgentThreadFromSummary, placeholder_command, }; -use agent::{ - context_store::ContextStore, - history_store::{HistoryEntryId, HistoryStore}, - thread_store::{TextThreadStore, ThreadStore}, -}; use agent_settings::AgentSettings; use ai_onboarding::AgentPanelOnboarding; use anyhow::{Result, anyhow}; -use assistant_context::{AssistantContext, ContextEvent, ContextSummary}; use assistant_slash_command::SlashCommandWorkingSet; -use assistant_tool::ToolWorkingSet; +use assistant_text_thread::{TextThread, TextThreadEvent, TextThreadSummary}; use client::{UserStore, zed_urls}; use cloud_llm_client::{Plan, PlanV1, PlanV2, UsageLimit}; use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer}; +use extension::ExtensionEvents; +use extension_host::ExtensionStore; use fs::Fs; use gpui::{ Action, AnyElement, App, AsyncWindowContext, Corner, DismissEvent, Entity, EventEmitter, @@ -71,7 +72,9 @@ use workspace::{ }; use zed_actions::{ DecreaseBufferFontSize, IncreaseBufferFontSize, ResetBufferFontSize, - agent::{OpenAcpOnboardingModal, OpenOnboardingModal, OpenSettings, ResetOnboarding}, + agent::{ + OpenAcpOnboardingModal, OpenOnboardingModal, OpenSettings, ResetAgentZoom, ResetOnboarding, + }, assistant::{OpenRulesLibrary, ToggleFocus}, }; @@ -118,7 +121,7 @@ pub fn init(cx: &mut App) { .register_action(|workspace, _: &NewTextThread, window, cx| { if let Some(panel) = workspace.panel::(cx) { workspace.focus_panel::(window, cx); - panel.update(cx, |panel, cx| panel.new_prompt_editor(window, cx)); + panel.update(cx, |panel, cx| panel.new_text_thread(window, cx)); } }) .register_action(|workspace, action: &NewExternalAgentThread, window, cx| { @@ -140,6 +143,16 @@ pub fn init(cx: &mut App) { .register_action(|workspace, _: &Follow, window, cx| { workspace.follow(CollaboratorId::Agent, window, cx); }) + .register_action(|workspace, _: &OpenAgentDiff, window, cx| { + let thread = workspace + .panel::(cx) + .and_then(|panel| panel.read(cx).active_thread_view().cloned()) + .and_then(|thread_view| thread_view.read(cx).thread().cloned()); + + if let Some(thread) = thread { + AgentDiffPane::deploy_in_workspace(thread, workspace, window, cx); + } + }) .register_action(|workspace, _: &ToggleNavigationMenu, window, cx| { if let Some(panel) = workspace.panel::(cx) { workspace.focus_panel::(window, cx); @@ -182,6 +195,13 @@ pub fn init(cx: &mut App) { }) .register_action(|_workspace, _: &ResetTrialEndUpsell, _window, cx| { TrialEndUpsell::set_dismissed(false, cx); + }) + .register_action(|workspace, _: &ResetAgentZoom, window, cx| { + if let Some(panel) = workspace.panel::(cx) { + panel.update(cx, |panel, cx| { + panel.reset_agent_zoom(window, cx); + }); + } }); }, ) @@ -193,7 +213,7 @@ enum ActiveView { thread_view: Entity, }, TextThread { - context_editor: Entity, + text_thread_editor: Entity, title_editor: Entity, buffer_search_bar: Entity, _subscriptions: Vec, @@ -212,12 +232,11 @@ enum WhichFontSize { #[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)] pub enum AgentType { #[default] - Zed, + NativeAgent, TextThread, Gemini, ClaudeCode, Codex, - NativeAgent, Custom { name: SharedString, command: AgentServerCommand, @@ -227,8 +246,7 @@ pub enum AgentType { impl AgentType { fn label(&self) -> SharedString { match self { - Self::Zed | Self::TextThread => "Zed Agent".into(), - Self::NativeAgent => "Agent 2".into(), + Self::NativeAgent | Self::TextThread => "Zed Agent".into(), Self::Gemini => "Gemini CLI".into(), Self::ClaudeCode => "Claude Code".into(), Self::Codex => "Codex".into(), @@ -238,7 +256,7 @@ impl AgentType { fn icon(&self) -> Option { match self { - Self::Zed | Self::NativeAgent | Self::TextThread => None, + Self::NativeAgent | Self::TextThread => None, Self::Gemini => Some(IconName::AiGemini), Self::ClaudeCode => Some(IconName::AiClaude), Self::Codex => Some(IconName::AiOpenAi), @@ -273,7 +291,7 @@ impl ActiveView { pub fn native_agent( fs: Arc, prompt_store: Option>, - acp_history_store: Entity, + history_store: Entity, project: Entity, workspace: WeakEntity, window: &mut Window, @@ -281,12 +299,12 @@ impl ActiveView { ) -> Self { let thread_view = cx.new(|cx| { crate::acp::AcpThreadView::new( - ExternalAgent::NativeAgent.server(fs, acp_history_store.clone()), + ExternalAgent::NativeAgent.server(fs, history_store.clone()), None, None, workspace, project, - acp_history_store, + history_store, prompt_store, window, cx, @@ -296,15 +314,14 @@ impl ActiveView { Self::ExternalAgentThread { thread_view } } - pub fn prompt_editor( - context_editor: Entity, - history_store: Entity, - acp_history_store: Entity, + pub fn text_thread( + text_thread_editor: Entity, + acp_history_store: Entity, language_registry: Arc, window: &mut Window, cx: &mut App, ) -> Self { - let title = context_editor.read(cx).title(cx).to_string(); + let title = text_thread_editor.read(cx).title(cx).to_string(); let editor = cx.new(|cx| { let mut editor = Editor::single_line(window, cx); @@ -320,7 +337,7 @@ impl ActiveView { let subscriptions = vec![ window.subscribe(&editor, cx, { { - let context_editor = context_editor.clone(); + let text_thread_editor = text_thread_editor.clone(); move |editor, event, window, cx| match event { EditorEvent::BufferEdited => { if suppress_first_edit { @@ -329,19 +346,19 @@ impl ActiveView { } let new_summary = editor.read(cx).text(cx); - context_editor.update(cx, |context_editor, cx| { - context_editor - .context() - .update(cx, |assistant_context, cx| { - assistant_context.set_custom_summary(new_summary, cx); + text_thread_editor.update(cx, |text_thread_editor, cx| { + text_thread_editor + .text_thread() + .update(cx, |text_thread, cx| { + text_thread.set_custom_summary(new_summary, cx); }) }) } EditorEvent::Blurred => { if editor.read(cx).text(cx).is_empty() { - let summary = context_editor + let summary = text_thread_editor .read(cx) - .context() + .text_thread() .read(cx) .summary() .or_default(); @@ -355,36 +372,24 @@ impl ActiveView { } } }), - window.subscribe(&context_editor.read(cx).context().clone(), cx, { + window.subscribe(&text_thread_editor.read(cx).text_thread().clone(), cx, { let editor = editor.clone(); - move |assistant_context, event, window, cx| match event { - ContextEvent::SummaryGenerated => { - let summary = assistant_context.read(cx).summary().or_default(); + move |text_thread, event, window, cx| match event { + TextThreadEvent::SummaryGenerated => { + let summary = text_thread.read(cx).summary().or_default(); editor.update(cx, |editor, cx| { editor.set_text(summary, window, cx); }) } - ContextEvent::PathChanged { old_path, new_path } => { - history_store.update(cx, |history_store, cx| { - if let Some(old_path) = old_path { - history_store - .replace_recently_opened_text_thread(old_path, new_path, cx); - } else { - history_store.push_recently_opened_entry( - HistoryEntryId::Context(new_path.clone()), - cx, - ); - } - }); - + TextThreadEvent::PathChanged { old_path, new_path } => { acp_history_store.update(cx, |history_store, cx| { if let Some(old_path) = old_path { history_store .replace_recently_opened_text_thread(old_path, new_path, cx); } else { history_store.push_recently_opened_entry( - agent2::HistoryEntryId::TextThread(new_path.clone()), + agent::HistoryEntryId::TextThread(new_path.clone()), cx, ); } @@ -398,11 +403,11 @@ impl ActiveView { let buffer_search_bar = cx.new(|cx| BufferSearchBar::new(Some(language_registry), window, cx)); buffer_search_bar.update(cx, |buffer_search_bar, cx| { - buffer_search_bar.set_active_pane_item(Some(&context_editor), window, cx) + buffer_search_bar.set_active_pane_item(Some(&text_thread_editor), window, cx) }); Self::TextThread { - context_editor, + text_thread_editor, title_editor: editor, buffer_search_bar, _subscriptions: subscriptions, @@ -417,21 +422,21 @@ pub struct AgentPanel { project: Entity, fs: Arc, language_registry: Arc, - thread_store: Entity, acp_history: Entity, - acp_history_store: Entity, - context_store: Entity, + history_store: Entity, + text_thread_store: Entity, prompt_store: Option>, + context_server_registry: Entity, inline_assist_context_store: Entity, configuration: Option>, configuration_subscription: Option, active_view: ActiveView, previous_view: Option, - history_store: Entity, new_thread_menu_handle: PopoverMenuHandle, agent_panel_menu_handle: PopoverMenuHandle, - assistant_navigation_menu_handle: PopoverMenuHandle, - assistant_navigation_menu: Option>, + agent_navigation_menu_handle: PopoverMenuHandle, + agent_navigation_menu: Option>, + _extension_subscription: Option, width: Option, height: Option, zoomed: bool, @@ -469,33 +474,6 @@ impl AgentPanel { Ok(prompt_store) => prompt_store.await.ok(), Err(_) => None, }; - let tools = cx.new(|_| ToolWorkingSet::default())?; - let thread_store = workspace - .update(cx, |workspace, cx| { - let project = workspace.project().clone(); - ThreadStore::load( - project, - tools.clone(), - prompt_store.clone(), - prompt_builder.clone(), - cx, - ) - })? - .await?; - - let slash_commands = Arc::new(SlashCommandWorkingSet::default()); - let context_store = workspace - .update(cx, |workspace, cx| { - let project = workspace.project().clone(); - assistant_context::ContextStore::new( - project, - prompt_builder.clone(), - slash_commands, - cx, - ) - })? - .await?; - let serialized_panel = if let Some(panel) = cx .background_spawn(async move { KEY_VALUE_STORE.read_kvp(AGENT_PANEL_KEY) }) .await @@ -507,17 +485,22 @@ impl AgentPanel { None }; - let panel = workspace.update_in(cx, |workspace, window, cx| { - let panel = cx.new(|cx| { - Self::new( - workspace, - thread_store, - context_store, - prompt_store, - window, + let slash_commands = Arc::new(SlashCommandWorkingSet::default()); + let text_thread_store = workspace + .update(cx, |workspace, cx| { + let project = workspace.project().clone(); + assistant_text_thread::TextThreadStore::new( + project, + prompt_builder, + slash_commands, cx, ) - }); + })? + .await?; + + let panel = workspace.update_in(cx, |workspace, window, cx| { + let panel = + cx.new(|cx| Self::new(workspace, text_thread_store, prompt_store, window, cx)); panel.as_mut(cx).loading = true; if let Some(serialized_panel) = serialized_panel { @@ -544,8 +527,7 @@ impl AgentPanel { fn new( workspace: &Workspace, - thread_store: Entity, - context_store: Entity, + text_thread_store: Entity, prompt_store: Option>, window: &mut Window, cx: &mut Context, @@ -557,13 +539,12 @@ impl AgentPanel { let client = workspace.client().clone(); let workspace = workspace.weak_handle(); - let inline_assist_context_store = - cx.new(|_cx| ContextStore::new(project.downgrade(), Some(thread_store.downgrade()))); + let inline_assist_context_store = cx.new(|_cx| ContextStore::new(project.downgrade())); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let history_store = cx.new(|cx| HistoryStore::new(context_store.clone(), [], cx)); - - let acp_history_store = cx.new(|cx| agent2::HistoryStore::new(context_store.clone(), cx)); - let acp_history = cx.new(|cx| AcpThreadHistory::new(acp_history_store.clone(), window, cx)); + let history_store = cx.new(|cx| agent::HistoryStore::new(text_thread_store.clone(), cx)); + let acp_history = cx.new(|cx| AcpThreadHistory::new(history_store.clone(), window, cx)); cx.subscribe_in( &acp_history, window, @@ -578,32 +559,29 @@ impl AgentPanel { ); } ThreadHistoryEvent::Open(HistoryEntry::TextThread(thread)) => { - this.open_saved_prompt_editor(thread.path.clone(), window, cx) + this.open_saved_text_thread(thread.path.clone(), window, cx) .detach_and_log_err(cx); } }, ) .detach(); - cx.observe(&history_store, |_, _, cx| cx.notify()).detach(); - let panel_type = AgentSettings::get_global(cx).default_view; let active_view = match panel_type { DefaultView::Thread => ActiveView::native_agent( fs.clone(), prompt_store.clone(), - acp_history_store.clone(), + history_store.clone(), project.clone(), workspace.clone(), window, cx, ), DefaultView::TextThread => { - let context = - context_store.update(cx, |context_store, cx| context_store.create(cx)); + let context = text_thread_store.update(cx, |store, cx| store.create(cx)); let lsp_adapter_delegate = make_lsp_adapter_delegate(&project.clone(), cx).unwrap(); - let context_editor = cx.new(|cx| { - let mut editor = TextThreadEditor::for_context( + let text_thread_editor = cx.new(|cx| { + let mut editor = TextThreadEditor::for_text_thread( context, fs.clone(), workspace.clone(), @@ -615,10 +593,9 @@ impl AgentPanel { editor.insert_default_prompt(window, cx); editor }); - ActiveView::prompt_editor( - context_editor, + ActiveView::text_thread( + text_thread_editor, history_store.clone(), - acp_history_store.clone(), language_registry.clone(), window, cx, @@ -630,7 +607,7 @@ impl AgentPanel { window.defer(cx, move |window, cx| { let panel = weak_panel.clone(); - let assistant_navigation_menu = + let agent_navigation_menu = ContextMenu::build_persistent(window, cx, move |mut menu, _window, cx| { if let Some(panel) = panel.upgrade() { menu = Self::populate_recently_opened_menu_section(menu, panel, cx); @@ -644,7 +621,7 @@ impl AgentPanel { weak_panel .update(cx, |panel, cx| { cx.subscribe_in( - &assistant_navigation_menu, + &agent_navigation_menu, window, |_, menu, _: &DismissEvent, window, cx| { menu.update(cx, |menu, _| { @@ -654,7 +631,7 @@ impl AgentPanel { }, ) .detach(); - panel.assistant_navigation_menu = Some(assistant_navigation_menu); + panel.agent_navigation_menu = Some(agent_navigation_menu); }) .ok(); }); @@ -670,35 +647,56 @@ impl AgentPanel { ) }); - Self { + // Subscribe to extension events to sync agent servers when extensions change + let extension_subscription = if let Some(extension_events) = ExtensionEvents::try_global(cx) + { + Some( + cx.subscribe(&extension_events, |this, _source, event, cx| match event { + extension::Event::ExtensionInstalled(_) + | extension::Event::ExtensionUninstalled(_) + | extension::Event::ExtensionsInstalledChanged => { + this.sync_agent_servers_from_extensions(cx); + } + _ => {} + }), + ) + } else { + None + }; + + let mut panel = Self { active_view, workspace, user_store, project: project.clone(), fs: fs.clone(), language_registry, - thread_store: thread_store.clone(), - context_store, + text_thread_store, prompt_store, configuration: None, configuration_subscription: None, + context_server_registry, inline_assist_context_store, previous_view: None, - history_store: history_store.clone(), new_thread_menu_handle: PopoverMenuHandle::default(), agent_panel_menu_handle: PopoverMenuHandle::default(), - assistant_navigation_menu_handle: PopoverMenuHandle::default(), - assistant_navigation_menu: None, + agent_navigation_menu_handle: PopoverMenuHandle::default(), + agent_navigation_menu: None, + _extension_subscription: extension_subscription, width: None, height: None, zoomed: false, pending_serialization: None, onboarding, acp_history, - acp_history_store, + history_store, selected_agent: AgentType::default(), loading: false, - } + }; + + // Initial sync of agent servers from extensions + panel.sync_agent_servers_from_extensions(cx); + panel } pub fn toggle_focus( @@ -723,12 +721,12 @@ impl AgentPanel { &self.inline_assist_context_store } - pub(crate) fn thread_store(&self) -> &Entity { - &self.thread_store + pub(crate) fn thread_store(&self) -> &Entity { + &self.history_store } - pub(crate) fn text_thread_store(&self) -> &Entity { - &self.context_store + pub(crate) fn context_server_registry(&self) -> &Entity { + &self.context_server_registry } fn active_thread_view(&self) -> Option<&Entity> { @@ -749,7 +747,7 @@ impl AgentPanel { cx: &mut Context, ) { let Some(thread) = self - .acp_history_store + .history_store .read(cx) .thread_from_session_id(&action.from_session_id) else { @@ -765,18 +763,18 @@ impl AgentPanel { ); } - fn new_prompt_editor(&mut self, window: &mut Window, cx: &mut Context) { + fn new_text_thread(&mut self, window: &mut Window, cx: &mut Context) { telemetry::event!("Agent Thread Started", agent = "zed-text"); let context = self - .context_store + .text_thread_store .update(cx, |context_store, cx| context_store.create(cx)); let lsp_adapter_delegate = make_lsp_adapter_delegate(&self.project, cx) .log_err() .flatten(); - let context_editor = cx.new(|cx| { - let mut editor = TextThreadEditor::for_context( + let text_thread_editor = cx.new(|cx| { + let mut editor = TextThreadEditor::for_text_thread( context, self.fs.clone(), self.workspace.clone(), @@ -795,10 +793,9 @@ impl AgentPanel { } self.set_active_view( - ActiveView::prompt_editor( - context_editor.clone(), + ActiveView::text_thread( + text_thread_editor.clone(), self.history_store.clone(), - self.acp_history_store.clone(), self.language_registry.clone(), window, cx, @@ -806,7 +803,7 @@ impl AgentPanel { window, cx, ); - context_editor.focus_handle(cx).focus(window); + text_thread_editor.focus_handle(cx).focus(window); } fn external_thread( @@ -824,13 +821,13 @@ impl AgentPanel { const LAST_USED_EXTERNAL_AGENT_KEY: &str = "agent_panel__last_used_external_agent"; - #[derive(Default, Serialize, Deserialize)] + #[derive(Serialize, Deserialize)] struct LastUsedExternalAgent { agent: crate::ExternalAgent, } let loading = self.loading; - let history = self.acp_history_store.clone(); + let history = self.history_store.clone(); cx.spawn_in(window, async move |this, cx| { let ext_agent = match agent_choice { @@ -865,18 +862,18 @@ impl AgentPanel { .and_then(|value| { serde_json::from_str::(&value).log_err() }) - .unwrap_or_default() - .agent + .map(|agent| agent.agent) + .unwrap_or(ExternalAgent::NativeAgent) } } }; + let server = ext_agent.server(fs, history); + if !loading { - telemetry::event!("Agent Thread Started", agent = ext_agent.name()); + telemetry::event!("Agent Thread Started", agent = server.telemetry_id()); } - let server = ext_agent.server(fs, history); - this.update_in(cx, |this, window, cx| { let selected_agent = ext_agent.into(); if this.selected_agent != selected_agent { @@ -891,7 +888,7 @@ impl AgentPanel { summarize_thread, workspace.clone(), project, - this.acp_history_store.clone(), + this.history_store.clone(), this.prompt_store.clone(), window, cx, @@ -934,34 +931,31 @@ impl AgentPanel { self.set_active_view(previous_view, window, cx); } } else { - self.thread_store - .update(cx, |thread_store, cx| thread_store.reload(cx)) - .detach_and_log_err(cx); self.set_active_view(ActiveView::History, window, cx); } cx.notify(); } - pub(crate) fn open_saved_prompt_editor( + pub(crate) fn open_saved_text_thread( &mut self, path: Arc, window: &mut Window, cx: &mut Context, ) -> Task> { - let context = self - .context_store - .update(cx, |store, cx| store.open_local_context(path, cx)); + let text_thread_task = self + .history_store + .update(cx, |store, cx| store.load_text_thread(path, cx)); cx.spawn_in(window, async move |this, cx| { - let context = context.await?; + let text_thread = text_thread_task.await?; this.update_in(cx, |this, window, cx| { - this.open_prompt_editor(context, window, cx); + this.open_text_thread(text_thread, window, cx); }) }) } - pub(crate) fn open_prompt_editor( + pub(crate) fn open_text_thread( &mut self, - context: Entity, + text_thread: Entity, window: &mut Window, cx: &mut Context, ) { @@ -969,8 +963,8 @@ impl AgentPanel { .log_err() .flatten(); let editor = cx.new(|cx| { - TextThreadEditor::for_context( - context, + TextThreadEditor::for_text_thread( + text_thread, self.fs.clone(), self.workspace.clone(), self.project.clone(), @@ -986,10 +980,9 @@ impl AgentPanel { } self.set_active_view( - ActiveView::prompt_editor( + ActiveView::text_thread( editor, self.history_store.clone(), - self.acp_history_store.clone(), self.language_registry.clone(), window, cx, @@ -1009,8 +1002,10 @@ impl AgentPanel { ActiveView::ExternalAgentThread { thread_view } => { thread_view.focus_handle(cx).focus(window); } - ActiveView::TextThread { context_editor, .. } => { - context_editor.focus_handle(cx).focus(window); + ActiveView::TextThread { + text_thread_editor, .. + } => { + text_thread_editor.focus_handle(cx).focus(window); } ActiveView::History | ActiveView::Configuration => {} } @@ -1027,7 +1022,7 @@ impl AgentPanel { window: &mut Window, cx: &mut Context, ) { - self.assistant_navigation_menu_handle.toggle(window, cx); + self.agent_navigation_menu_handle.toggle(window, cx); } pub fn toggle_options_menu( @@ -1073,13 +1068,21 @@ impl AgentPanel { update_settings_file(self.fs.clone(), cx, move |settings, cx| { let agent_ui_font_size = ThemeSettings::get_global(cx).agent_ui_font_size(cx) + delta; + let agent_buffer_font_size = + ThemeSettings::get_global(cx).agent_buffer_font_size(cx) + delta; + let _ = settings .theme .agent_ui_font_size .insert(theme::clamp_font_size(agent_ui_font_size).into()); + let _ = settings + .theme + .agent_buffer_font_size + .insert(theme::clamp_font_size(agent_buffer_font_size).into()); }); } else { theme::adjust_agent_ui_font_size(cx, |size| size + delta); + theme::adjust_agent_buffer_font_size(cx, |size| size + delta); } } WhichFontSize::BufferFont => { @@ -1100,12 +1103,19 @@ impl AgentPanel { if action.persist { update_settings_file(self.fs.clone(), cx, move |settings, _| { settings.theme.agent_ui_font_size = None; + settings.theme.agent_buffer_font_size = None; }); } else { theme::reset_agent_ui_font_size(cx); + theme::reset_agent_buffer_font_size(cx); } } + pub fn reset_agent_zoom(&mut self, _window: &mut Window, cx: &mut Context) { + theme::reset_agent_ui_font_size(cx); + theme::reset_agent_buffer_font_size(cx); + } + pub fn toggle_zoom(&mut self, _: &ToggleZoom, window: &mut Window, cx: &mut Context) { if self.zoomed { cx.emit(PanelEvent::ZoomOut); @@ -1120,7 +1130,6 @@ impl AgentPanel { pub(crate) fn open_configuration(&mut self, window: &mut Window, cx: &mut Context) { let agent_server_store = self.project.read(cx).agent_server_store().clone(); let context_server_store = self.project.read(cx).context_server_store(); - let tools = self.thread_store.read(cx).tools(); let fs = self.fs.clone(); self.set_active_view(ActiveView::Configuration, window, cx); @@ -1129,7 +1138,7 @@ impl AgentPanel { fs, agent_server_store, context_server_store, - tools, + self.context_server_registry.clone(), self.language_registry.clone(), self.workspace.clone(), window, @@ -1197,7 +1206,7 @@ impl AgentPanel { }); } - self.new_thread(&NewThread::default(), window, cx); + self.new_thread(&NewThread, window, cx); if let Some((thread, model)) = self .active_native_agent_thread(cx) .zip(provider.default_model(cx)) @@ -1219,7 +1228,7 @@ impl AgentPanel { } } - pub(crate) fn active_native_agent_thread(&self, cx: &App) -> Option> { + pub(crate) fn active_native_agent_thread(&self, cx: &App) -> Option> { match &self.active_view { ActiveView::ExternalAgentThread { thread_view, .. } => { thread_view.read(cx).as_native_thread(cx) @@ -1228,9 +1237,11 @@ impl AgentPanel { } } - pub(crate) fn active_context_editor(&self) -> Option> { + pub(crate) fn active_text_thread_editor(&self) -> Option> { match &self.active_view { - ActiveView::TextThread { context_editor, .. } => Some(context_editor.clone()), + ActiveView::TextThread { + text_thread_editor, .. + } => Some(text_thread_editor.clone()), _ => None, } } @@ -1251,21 +1262,16 @@ impl AgentPanel { let new_is_special = new_is_history || new_is_config; match &new_view { - ActiveView::TextThread { context_editor, .. } => { - self.history_store.update(cx, |store, cx| { - if let Some(path) = context_editor.read(cx).context().read(cx).path() { - store.push_recently_opened_entry(HistoryEntryId::Context(path.clone()), cx) - } - }); - self.acp_history_store.update(cx, |store, cx| { - if let Some(path) = context_editor.read(cx).context().read(cx).path() { - store.push_recently_opened_entry( - agent2::HistoryEntryId::TextThread(path.clone()), - cx, - ) - } - }) - } + ActiveView::TextThread { + text_thread_editor, .. + } => self.history_store.update(cx, |store, cx| { + if let Some(path) = text_thread_editor.read(cx).text_thread().read(cx).path() { + store.push_recently_opened_entry( + agent::HistoryEntryId::TextThread(path.clone()), + cx, + ) + } + }), ActiveView::ExternalAgentThread { .. } => {} ActiveView::History | ActiveView::Configuration => {} } @@ -1291,7 +1297,7 @@ impl AgentPanel { ) -> ContextMenu { let entries = panel .read(cx) - .acp_history_store + .history_store .read(cx) .recently_opened_entries(cx); @@ -1314,15 +1320,15 @@ impl AgentPanel { let entry = entry.clone(); panel .update(cx, move |this, cx| match &entry { - agent2::HistoryEntry::AcpThread(entry) => this.external_thread( + agent::HistoryEntry::AcpThread(entry) => this.external_thread( Some(ExternalAgent::NativeAgent), Some(entry.clone()), None, window, cx, ), - agent2::HistoryEntry::TextThread(entry) => this - .open_saved_prompt_editor(entry.path.clone(), window, cx) + agent::HistoryEntry::TextThread(entry) => this + .open_saved_text_thread(entry.path.clone(), window, cx) .detach_and_log_err(cx), }) .ok(); @@ -1336,7 +1342,7 @@ impl AgentPanel { move |_window, cx| { panel .update(cx, |this, cx| { - this.acp_history_store.update(cx, |history_store, cx| { + this.history_store.update(cx, |history_store, cx| { history_store.remove_recently_opened_entry(&id, cx); }); }) @@ -1355,6 +1361,31 @@ impl AgentPanel { self.selected_agent.clone() } + fn sync_agent_servers_from_extensions(&mut self, cx: &mut Context) { + if let Some(extension_store) = ExtensionStore::try_global(cx) { + let (manifests, extensions_dir) = { + let store = extension_store.read(cx); + let installed = store.installed_extensions(); + let manifests: Vec<_> = installed + .iter() + .map(|(id, entry)| (id.clone(), entry.manifest.clone())) + .collect(); + let extensions_dir = paths::extensions_dir().join("installed"); + (manifests, extensions_dir) + }; + + self.project.update(cx, |project, cx| { + project.agent_server_store().update(cx, |store, cx| { + let manifest_refs: Vec<_> = manifests + .iter() + .map(|(id, manifest)| (id.as_ref(), manifest.as_ref())) + .collect(); + store.sync_extension_agents(manifest_refs, extensions_dir, cx); + }); + }); + } + } + pub fn new_agent_thread( &mut self, agent: AgentType, @@ -1362,15 +1393,6 @@ impl AgentPanel { cx: &mut Context, ) { match agent { - AgentType::Zed => { - window.dispatch_action( - NewThread { - from_thread_id: None, - } - .boxed_clone(), - cx, - ); - } AgentType::TextThread => { window.dispatch_action(NewTextThread.boxed_clone(), cx); } @@ -1431,7 +1453,9 @@ impl Focusable for AgentPanel { match &self.active_view { ActiveView::ExternalAgentThread { thread_view, .. } => thread_view.focus_handle(cx), ActiveView::History => self.acp_history.focus_handle(cx), - ActiveView::TextThread { context_editor, .. } => context_editor.focus_handle(cx), + ActiveView::TextThread { + text_thread_editor, .. + } => text_thread_editor.focus_handle(cx), ActiveView::Configuration => { if let Some(configuration) = self.configuration.as_ref() { configuration.focus_handle(cx) @@ -1454,6 +1478,10 @@ impl Panel for AgentPanel { "AgentPanel" } + fn panel_key() -> &'static str { + AGENT_PANEL_KEY + } + fn position(&self, _window: &Window, cx: &App) -> DockPosition { agent_panel_dock_position(cx) } @@ -1562,17 +1590,17 @@ impl AgentPanel { } ActiveView::TextThread { title_editor, - context_editor, + text_thread_editor, .. } => { - let summary = context_editor.read(cx).context().read(cx).summary(); + let summary = text_thread_editor.read(cx).text_thread().read(cx).summary(); match summary { - ContextSummary::Pending => Label::new(ContextSummary::DEFAULT) + TextThreadSummary::Pending => Label::new(TextThreadSummary::DEFAULT) .color(Color::Muted) .truncate() .into_any_element(), - ContextSummary::Content(summary) => { + TextThreadSummary::Content(summary) => { if summary.done { div() .w_full() @@ -1585,17 +1613,17 @@ impl AgentPanel { .into_any_element() } } - ContextSummary::Error => h_flex() + TextThreadSummary::Error => h_flex() .w_full() .child(title_editor.clone()) .child( IconButton::new("retry-summary-generation", IconName::RotateCcw) .icon_size(IconSize::Small) .on_click({ - let context_editor = context_editor.clone(); + let text_thread_editor = text_thread_editor.clone(); move |_, _window, cx| { - context_editor.update(cx, |context_editor, cx| { - context_editor.regenerate_summary(cx); + text_thread_editor.update(cx, |text_thread_editor, cx| { + text_thread_editor.regenerate_summary(cx); }); } }) @@ -1650,12 +1678,11 @@ impl AgentPanel { .icon_size(IconSize::Small), { let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Toggle Agent Menu", &ToggleOptionsMenu, &focus_handle, - window, cx, ) } @@ -1719,7 +1746,7 @@ impl AgentPanel { .separator(); menu = menu - .action("Rules…", Box::new(OpenRulesLibrary::default())) + .action("Rules", Box::new(OpenRulesLibrary::default())) .action("Settings", Box::new(OpenSettings)) .separator() .action(full_screen_label, Box::new(ToggleZoom)); @@ -1746,21 +1773,20 @@ impl AgentPanel { .trigger_with_tooltip( IconButton::new("agent-nav-menu", icon).icon_size(IconSize::Small), { - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Toggle Recent Threads", &ToggleNavigationMenu, &focus_handle, - window, cx, ) } }, ) .anchor(corner) - .with_handle(self.assistant_navigation_menu_handle.clone()) + .with_handle(self.agent_navigation_menu_handle.clone()) .menu({ - let menu = self.assistant_navigation_menu.clone(); + let menu = self.agent_navigation_menu.clone(); move |window, cx| { telemetry::event!("View Thread History Clicked"); @@ -1785,8 +1811,8 @@ impl AgentPanel { this.go_back(&workspace::GoBack, window, cx); })) .tooltip({ - move |window, cx| { - Tooltip::for_action_in("Go Back", &workspace::GoBack, &focus_handle, window, cx) + move |_window, cx| { + Tooltip::for_action_in("Go Back", &workspace::GoBack, &focus_handle, cx) } }) } @@ -1795,6 +1821,16 @@ impl AgentPanel { let agent_server_store = self.project.read(cx).agent_server_store().clone(); let focus_handle = self.focus_handle(cx); + // Get custom icon path for selected agent before building menu (to avoid borrow issues) + let selected_agent_custom_icon = + if let AgentType::Custom { name, .. } = &self.selected_agent { + agent_server_store + .read(cx) + .agent_icon(&ExternalAgentServerName(name.clone())) + } else { + None + }; + let active_thread = match &self.active_view { ActiveView::ExternalAgentThread { thread_view } => { thread_view.read(cx).as_native_thread(cx) @@ -1807,14 +1843,8 @@ impl AgentPanel { IconButton::new("new_thread_menu_btn", IconName::Plus).icon_size(IconSize::Small), { let focus_handle = focus_handle.clone(); - move |window, cx| { - Tooltip::for_action_in( - "New…", - &ToggleNewThreadMenu, - &focus_handle, - window, - cx, - ) + move |_window, cx| { + Tooltip::for_action_in("New…", &ToggleNewThreadMenu, &focus_handle, cx) } }, ) @@ -1833,8 +1863,7 @@ impl AgentPanel { let active_thread = active_thread.clone(); Some(ContextMenu::build(window, cx, |menu, _window, cx| { - menu - .context(focus_handle.clone()) + menu.context(focus_handle.clone()) .header("Zed Agent") .when_some(active_thread, |this, active_thread| { let thread = active_thread.read(cx); @@ -1860,7 +1889,7 @@ impl AgentPanel { }) .item( ContextMenuEntry::new("New Thread") - .action(NewThread::default().boxed_clone()) + .action(NewThread.boxed_clone()) .icon(IconName::Thread) .icon_color(Color::Muted) .handler({ @@ -1991,83 +2020,110 @@ impl AgentPanel { }), ) .map(|mut menu| { - let agent_names = agent_server_store - .read(cx) + let agent_server_store_read = agent_server_store.read(cx); + let agent_names = agent_server_store_read .external_agents() .filter(|name| { - name.0 != GEMINI_NAME && name.0 != CLAUDE_CODE_NAME && name.0 != CODEX_NAME + name.0 != GEMINI_NAME + && name.0 != CLAUDE_CODE_NAME + && name.0 != CODEX_NAME }) .cloned() .collect::>(); - let custom_settings = cx.global::().get::(None).custom.clone(); + let custom_settings = cx + .global::() + .get::(None) + .custom + .clone(); for agent_name in agent_names { - menu = menu.item( - ContextMenuEntry::new(format!("New {} Thread", agent_name)) - .icon(IconName::Terminal) - .icon_color(Color::Muted) - .disabled(is_via_collab) - .handler({ - let workspace = workspace.clone(); - let agent_name = agent_name.clone(); - let custom_settings = custom_settings.clone(); - move |window, cx| { - if let Some(workspace) = workspace.upgrade() { - workspace.update(cx, |workspace, cx| { - if let Some(panel) = - workspace.panel::(cx) - { - panel.update(cx, |panel, cx| { - panel.new_agent_thread( - AgentType::Custom { - name: agent_name.clone().into(), - command: custom_settings - .get(&agent_name.0) - .map(|settings| { - settings.command.clone() - }) - .unwrap_or(placeholder_command()), - }, - window, - cx, - ); - }); - } - }); - } + let icon_path = agent_server_store_read.agent_icon(&agent_name); + let mut entry = + ContextMenuEntry::new(format!("New {} Thread", agent_name)); + if let Some(icon_path) = icon_path { + entry = entry.custom_icon_path(icon_path); + } else { + entry = entry.icon(IconName::Terminal); + } + entry = entry + .icon_color(Color::Muted) + .disabled(is_via_collab) + .handler({ + let workspace = workspace.clone(); + let agent_name = agent_name.clone(); + let custom_settings = custom_settings.clone(); + move |window, cx| { + if let Some(workspace) = workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + if let Some(panel) = + workspace.panel::(cx) + { + panel.update(cx, |panel, cx| { + panel.new_agent_thread( + AgentType::Custom { + name: agent_name + .clone() + .into(), + command: custom_settings + .get(&agent_name.0) + .map(|settings| { + settings + .command + .clone() + }) + .unwrap_or( + placeholder_command( + ), + ), + }, + window, + cx, + ); + }); + } + }); } - }), - ); + } + }); + menu = menu.item(entry); } menu }) - .separator().link( - "Add Other Agents", - OpenBrowser { - url: zed_urls::external_agents_docs(cx), - } - .boxed_clone(), - ) + .separator() + .link( + "Add Other Agents", + OpenBrowser { + url: zed_urls::external_agents_docs(cx), + } + .boxed_clone(), + ) })) } }); let selected_agent_label = self.selected_agent.label(); + + let has_custom_icon = selected_agent_custom_icon.is_some(); let selected_agent = div() .id("selected_agent_icon") - .when_some(self.selected_agent.icon(), |this, icon| { + .when_some(selected_agent_custom_icon, |this, icon_path| { + let label = selected_agent_label.clone(); this.px(DynamicSpacing::Base02.rems(cx)) - .child(Icon::new(icon).color(Color::Muted)) - .tooltip(move |window, cx| { - Tooltip::with_meta( - selected_agent_label.clone(), - None, - "Selected Agent", - window, - cx, - ) + .child(Icon::from_path(icon_path).color(Color::Muted)) + .tooltip(move |_window, cx| { + Tooltip::with_meta(label.clone(), None, "Selected Agent", cx) }) }) + .when(!has_custom_icon, |this| { + this.when_some(self.selected_agent.icon(), |this, icon| { + let label = selected_agent_label.clone(); + this.px(DynamicSpacing::Base02.rems(cx)) + .child(Icon::new(icon).color(Color::Muted)) + .tooltip(move |_window, cx| { + Tooltip::with_meta(label.clone(), None, "Selected Agent", cx) + }) + }) + }) .into_any_element(); h_flex() @@ -2167,10 +2223,7 @@ impl AgentPanel { false } _ => { - let history_is_empty = self.acp_history_store.read(cx).is_empty(cx) - && self - .history_store - .update(cx, |store, cx| store.recent_entries(1, cx).is_empty()); + let history_is_empty = self.history_store.read(cx).is_empty(cx); let has_configured_non_zed_providers = LanguageModelRegistry::read_global(cx) .providers() @@ -2244,7 +2297,6 @@ impl AgentPanel { border_bottom: bool, configuration_error: &ConfigurationError, focus_handle: &FocusHandle, - window: &mut Window, cx: &mut App, ) -> impl IntoElement { let zed_provider_configured = AgentSettings::get_global(cx) @@ -2293,7 +2345,7 @@ impl AgentPanel { .style(ButtonStyle::Tinted(ui::TintColor::Warning)) .label_size(LabelSize::Small) .key_binding( - KeyBinding::for_action_in(&OpenSettings, focus_handle, window, cx) + KeyBinding::for_action_in(&OpenSettings, focus_handle, cx) .map(|kb| kb.size(rems_from_px(12.))), ) .on_click(|_event, window, cx| { @@ -2309,9 +2361,9 @@ impl AgentPanel { } } - fn render_prompt_editor( + fn render_text_thread( &self, - context_editor: &Entity, + text_thread_editor: &Entity, buffer_search_bar: &Entity, window: &mut Window, cx: &mut Context, @@ -2345,7 +2397,7 @@ impl AgentPanel { ) }) }) - .child(context_editor.clone()) + .child(text_thread_editor.clone()) .child(self.render_drag_target(cx)) } @@ -2421,10 +2473,12 @@ impl AgentPanel { thread_view.insert_dragged_files(paths, added_worktrees, window, cx); }); } - ActiveView::TextThread { context_editor, .. } => { - context_editor.update(cx, |context_editor, cx| { + ActiveView::TextThread { + text_thread_editor, .. + } => { + text_thread_editor.update(cx, |text_thread_editor, cx| { TextThreadEditor::insert_dragged_files( - context_editor, + text_thread_editor, paths, added_worktrees, window, @@ -2440,8 +2494,8 @@ impl AgentPanel { let mut key_context = KeyContext::new_with_defaults(); key_context.add("AgentPanel"); match &self.active_view { - ActiveView::ExternalAgentThread { .. } => key_context.add("external_agent_thread"), - ActiveView::TextThread { .. } => key_context.add("prompt_editor"), + ActiveView::ExternalAgentThread { .. } => key_context.add("acp_thread"), + ActiveView::TextThread { .. } => key_context.add("text_thread"), ActiveView::History | ActiveView::Configuration => {} } key_context @@ -2495,7 +2549,7 @@ impl Render for AgentPanel { .child(self.render_drag_target(cx)), ActiveView::History => parent.child(self.acp_history.clone()), ActiveView::TextThread { - context_editor, + text_thread_editor, buffer_search_bar, .. } => { @@ -2511,15 +2565,14 @@ impl Render for AgentPanel { true, err, &self.focus_handle(cx), - window, cx, )) } else { this } }) - .child(self.render_prompt_editor( - context_editor, + .child(self.render_text_thread( + text_thread_editor, buffer_search_bar, window, cx, @@ -2569,8 +2622,7 @@ impl rules_library::InlineAssistDelegate for PromptLibraryInlineAssist { }; let prompt_store = None; let thread_store = None; - let text_thread_store = None; - let context_store = cx.new(|_| ContextStore::new(project.clone(), None)); + let context_store = cx.new(|_| ContextStore::new(project.clone())); assistant.assist( prompt_editor, self.workspace.clone(), @@ -2578,7 +2630,6 @@ impl rules_library::InlineAssistDelegate for PromptLibraryInlineAssist { project, prompt_store, thread_store, - text_thread_store, initial_prompt, window, cx, @@ -2599,17 +2650,17 @@ impl rules_library::InlineAssistDelegate for PromptLibraryInlineAssist { pub struct ConcreteAssistantPanelDelegate; impl AgentPanelDelegate for ConcreteAssistantPanelDelegate { - fn active_context_editor( + fn active_text_thread_editor( &self, workspace: &mut Workspace, _window: &mut Window, cx: &mut Context, ) -> Option> { let panel = workspace.panel::(cx)?; - panel.read(cx).active_context_editor() + panel.read(cx).active_text_thread_editor() } - fn open_saved_context( + fn open_local_text_thread( &self, workspace: &mut Workspace, path: Arc, @@ -2621,14 +2672,14 @@ impl AgentPanelDelegate for ConcreteAssistantPanelDelegate { }; panel.update(cx, |panel, cx| { - panel.open_saved_prompt_editor(path, window, cx) + panel.open_saved_text_thread(path, window, cx) }) } - fn open_remote_context( + fn open_remote_text_thread( &self, _workspace: &mut Workspace, - _context_id: assistant_context::ContextId, + _text_thread_id: assistant_text_thread::TextThreadId, _window: &mut Window, _cx: &mut Context, ) -> Task>> { @@ -2659,15 +2710,15 @@ impl AgentPanelDelegate for ConcreteAssistantPanelDelegate { thread_view.update(cx, |thread_view, cx| { thread_view.insert_selections(window, cx); }); - } else if let Some(context_editor) = panel.active_context_editor() { + } else if let Some(text_thread_editor) = panel.active_text_thread_editor() { let snapshot = buffer.read(cx).snapshot(cx); let selection_ranges = selection_ranges .into_iter() .map(|range| range.to_point(&snapshot)) .collect::>(); - context_editor.update(cx, |context_editor, cx| { - context_editor.quote_ranges(selection_ranges, snapshot, window, cx) + text_thread_editor.update(cx, |text_thread_editor, cx| { + text_thread_editor.quote_ranges(selection_ranges, snapshot, window, cx) }); } }); diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index 2c439a725456976f090ddc4cb754664c4953d626..7869aa4e0191f393a05ff1b2c0307bccaef41dc8 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -4,8 +4,10 @@ mod agent_diff; mod agent_model_selector; mod agent_panel; mod buffer_codegen; +mod context; mod context_picker; mod context_server_configuration; +mod context_store; mod context_strip; mod inline_assistant; mod inline_prompt_editor; @@ -22,7 +24,6 @@ mod ui; use std::rc::Rc; use std::sync::Arc; -use agent::ThreadId; use agent_settings::{AgentProfileId, AgentSettings}; use assistant_slash_command::SlashCommandRegistry; use client::Client; @@ -129,20 +130,11 @@ actions!( ] ); -#[derive(Clone, Copy, Debug, PartialEq, Eq, Action)] -#[action(namespace = agent)] -#[action(deprecated_aliases = ["assistant::QuoteSelection"])] -/// Quotes the current selection in the agent panel's message editor. -pub struct QuoteSelection; - /// Creates a new conversation thread, optionally based on an existing thread. #[derive(Default, Clone, PartialEq, Deserialize, JsonSchema, Action)] #[action(namespace = agent)] #[serde(deny_unknown_fields)] -pub struct NewThread { - #[serde(default)] - from_thread_id: Option, -} +pub struct NewThread; /// Creates a new external agent conversation thread. #[derive(Default, Clone, PartialEq, Deserialize, JsonSchema, Action)] @@ -161,10 +153,9 @@ pub struct NewNativeAgentThreadFromSummary { } // TODO unify this with AgentType -#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] -enum ExternalAgent { - #[default] +pub enum ExternalAgent { Gemini, ClaudeCode, Codex, @@ -184,26 +175,26 @@ fn placeholder_command() -> AgentServerCommand { } impl ExternalAgent { - fn name(&self) -> &'static str { - match self { - Self::NativeAgent => "zed", - Self::Gemini => "gemini-cli", - Self::ClaudeCode => "claude-code", - Self::Codex => "codex", - Self::Custom { .. } => "custom", + pub fn parse_built_in(server: &dyn agent_servers::AgentServer) -> Option { + match server.telemetry_id() { + "gemini-cli" => Some(Self::Gemini), + "claude-code" => Some(Self::ClaudeCode), + "codex" => Some(Self::Codex), + "zed" => Some(Self::NativeAgent), + _ => None, } } pub fn server( &self, fs: Arc, - history: Entity, + history: Entity, ) -> Rc { match self { Self::Gemini => Rc::new(agent_servers::Gemini), Self::ClaudeCode => Rc::new(agent_servers::ClaudeCode), Self::Codex => Rc::new(agent_servers::Codex), - Self::NativeAgent => Rc::new(agent2::NativeAgentServer::new(fs, history)), + Self::NativeAgent => Rc::new(agent::NativeAgentServer::new(fs, history)), Self::Custom { name, command: _ } => { Rc::new(agent_servers::CustomAgentServer::new(name.clone())) } @@ -259,7 +250,7 @@ pub fn init( ) { AgentSettings::register(cx); - assistant_context::init(client.clone(), cx); + assistant_text_thread::init(client.clone(), cx); rules_library::init(cx); if !is_eval { // Initializing the language model from the user settings messes with the eval, so we only initialize them when @@ -267,7 +258,6 @@ pub fn init( init_language_model_settings(cx); } assistant_slash_command::init(cx); - agent::init(fs.clone(), cx); agent_panel::init(cx); context_server_configuration::init(language_registry.clone(), fs.clone(), cx); TextThreadEditor::init(cx); diff --git a/crates/agent_ui/src/buffer_codegen.rs b/crates/agent_ui/src/buffer_codegen.rs index 2309aad754aee55af5ad040c39d22304486446a4..215e2a74d7be9cbcb18442dcefa1581d08eec7b2 100644 --- a/crates/agent_ui/src/buffer_codegen.rs +++ b/crates/agent_ui/src/buffer_codegen.rs @@ -1,7 +1,5 @@ -use crate::inline_prompt_editor::CodegenStatus; -use agent::{ - ContextStore, - context::{ContextLoadResult, load_context}, +use crate::{ + context::load_context, context_store::ContextStore, inline_prompt_editor::CodegenStatus, }; use agent_settings::AgentSettings; use anyhow::{Context as _, Result}; @@ -434,16 +432,16 @@ impl CodegenAlternative { .generate_inline_transformation_prompt(user_prompt, language_name, buffer, range) .context("generating content prompt")?; - let context_task = self.context_store.as_ref().map(|context_store| { + let context_task = self.context_store.as_ref().and_then(|context_store| { if let Some(project) = self.project.upgrade() { let context = context_store .read(cx) .context() .cloned() .collect::>(); - load_context(context, &project, &self.prompt_store, cx) + Some(load_context(context, &project, &self.prompt_store, cx)) } else { - Task::ready(ContextLoadResult::default()) + None } }); @@ -459,7 +457,6 @@ impl CodegenAlternative { if let Some(context_task) = context_task { context_task .await - .loaded_context .add_to_request_message(&mut request_message); } diff --git a/crates/agent/src/context.rs b/crates/agent_ui/src/context.rs similarity index 89% rename from crates/agent/src/context.rs rename to crates/agent_ui/src/context.rs index 3b2922087a94c497c07f1df67a8d4d9adf759909..2a1ff4a1d9d3e0bb6c8b128cf7f944e9ed3ff657 100644 --- a/crates/agent/src/context.rs +++ b/crates/agent_ui/src/context.rs @@ -1,11 +1,8 @@ -use crate::thread::Thread; -use assistant_context::AssistantContext; -use assistant_tool::outline; -use collections::HashSet; +use agent::outline; +use assistant_text_thread::TextThread; use futures::future; use futures::{FutureExt, future::Shared}; use gpui::{App, AppContext as _, ElementId, Entity, SharedString, Task}; -use icons::IconName; use language::Buffer; use language_model::{LanguageModelImage, LanguageModelRequestMessage, MessageContent}; use project::{Project, ProjectEntryId, ProjectPath, Worktree}; @@ -17,6 +14,7 @@ use std::hash::{Hash, Hasher}; use std::path::PathBuf; use std::{ops::Range, path::Path, sync::Arc}; use text::{Anchor, OffsetRangeExt as _}; +use ui::IconName; use util::markdown::MarkdownCodeBlock; use util::rel_path::RelPath; use util::{ResultExt as _, post_inc}; @@ -181,7 +179,7 @@ impl FileContextHandle { }) } - fn load(self, cx: &App) -> Task>)>> { + fn load(self, cx: &App) -> Task> { let buffer_ref = self.buffer.read(cx); let Some(file) = buffer_ref.file() else { log::error!("file context missing path"); @@ -206,7 +204,7 @@ impl FileContextHandle { text: buffer_content.text.into(), is_outline: buffer_content.is_outline, }); - Some((context, vec![buffer])) + Some(context) }) } } @@ -256,11 +254,7 @@ impl DirectoryContextHandle { self.entry_id.hash(state) } - fn load( - self, - project: Entity, - cx: &mut App, - ) -> Task>)>> { + fn load(self, project: Entity, cx: &mut App) -> Task> { let Some(worktree) = project.read(cx).worktree_for_entry(self.entry_id, cx) else { return Task::ready(None); }; @@ -307,7 +301,7 @@ impl DirectoryContextHandle { }); cx.background_spawn(async move { - let (rope, buffer) = rope_task.await?; + let (rope, _buffer) = rope_task.await?; let fenced_codeblock = MarkdownCodeBlock { tag: &codeblock_tag(&full_path, None), text: &rope.to_string(), @@ -318,18 +312,22 @@ impl DirectoryContextHandle { rel_path, fenced_codeblock, }; - Some((descendant, buffer)) + Some(descendant) }) })); cx.background_spawn(async move { - let (descendants, buffers) = descendants_future.await.into_iter().flatten().unzip(); + let descendants = descendants_future + .await + .into_iter() + .flatten() + .collect::>(); let context = AgentContext::Directory(DirectoryContext { handle: self, full_path: directory_full_path, descendants, }); - Some((context, buffers)) + Some(context) }) } } @@ -397,7 +395,7 @@ impl SymbolContextHandle { .into() } - fn load(self, cx: &App) -> Task>)>> { + fn load(self, cx: &App) -> Task> { let buffer_ref = self.buffer.read(cx); let Some(file) = buffer_ref.file() else { log::error!("symbol context's file has no path"); @@ -406,14 +404,13 @@ impl SymbolContextHandle { let full_path = file.full_path(cx).to_string_lossy().into_owned(); let line_range = self.enclosing_range.to_point(&buffer_ref.snapshot()); let text = self.text(cx); - let buffer = self.buffer.clone(); let context = AgentContext::Symbol(SymbolContext { handle: self, full_path, line_range, text, }); - Task::ready(Some((context, vec![buffer]))) + Task::ready(Some(context)) } } @@ -468,13 +465,12 @@ impl SelectionContextHandle { .into() } - fn load(self, cx: &App) -> Task>)>> { + fn load(self, cx: &App) -> Task> { let Some(full_path) = self.full_path(cx) else { log::error!("selection context's file has no path"); return Task::ready(None); }; let text = self.text(cx); - let buffer = self.buffer.clone(); let context = AgentContext::Selection(SelectionContext { full_path: full_path.to_string_lossy().into_owned(), line_range: self.line_range(cx), @@ -482,7 +478,7 @@ impl SelectionContextHandle { handle: self, }); - Task::ready(Some((context, vec![buffer]))) + Task::ready(Some(context)) } } @@ -523,8 +519,8 @@ impl FetchedUrlContext { })) } - pub fn load(self) -> Task>)>> { - Task::ready(Some((AgentContext::FetchedUrl(self), vec![]))) + pub fn load(self) -> Task> { + Task::ready(Some(AgentContext::FetchedUrl(self))) } } @@ -537,7 +533,7 @@ impl Display for FetchedUrlContext { #[derive(Debug, Clone)] pub struct ThreadContextHandle { - pub thread: Entity, + pub thread: Entity, pub context_id: ContextId, } @@ -558,22 +554,20 @@ impl ThreadContextHandle { } pub fn title(&self, cx: &App) -> SharedString { - self.thread.read(cx).summary().or_default() + self.thread.read(cx).title() } - fn load(self, cx: &App) -> Task>)>> { - cx.spawn(async move |cx| { - let text = Thread::wait_for_detailed_summary_or_text(&self.thread, cx).await?; - let title = self - .thread - .read_with(cx, |thread, _cx| thread.summary().or_default()) - .ok()?; + fn load(self, cx: &mut App) -> Task> { + let task = self.thread.update(cx, |thread, cx| thread.summary(cx)); + let title = self.title(cx); + cx.background_spawn(async move { + let text = task.await?; let context = AgentContext::Thread(ThreadContext { title, text, handle: self, }); - Some((context, vec![])) + Some(context) }) } } @@ -587,7 +581,7 @@ impl Display for ThreadContext { #[derive(Debug, Clone)] pub struct TextThreadContextHandle { - pub context: Entity, + pub text_thread: Entity, pub context_id: ContextId, } @@ -601,26 +595,26 @@ pub struct TextThreadContext { impl TextThreadContextHandle { // pub fn lookup_key() -> pub fn eq_for_key(&self, other: &Self) -> bool { - self.context == other.context + self.text_thread == other.text_thread } pub fn hash_for_key(&self, state: &mut H) { - self.context.hash(state) + self.text_thread.hash(state) } pub fn title(&self, cx: &App) -> SharedString { - self.context.read(cx).summary().or_default() + self.text_thread.read(cx).summary().or_default() } - fn load(self, cx: &App) -> Task>)>> { + fn load(self, cx: &App) -> Task> { let title = self.title(cx); - let text = self.context.read(cx).to_xml(cx); + let text = self.text_thread.read(cx).to_xml(cx); let context = AgentContext::TextThread(TextThreadContext { title, text: text.into(), handle: self, }); - Task::ready(Some((context, vec![]))) + Task::ready(Some(context)) } } @@ -666,7 +660,7 @@ impl RulesContextHandle { self, prompt_store: &Option>, cx: &App, - ) -> Task>)>> { + ) -> Task> { let Some(prompt_store) = prompt_store.as_ref() else { return Task::ready(None); }; @@ -685,7 +679,7 @@ impl RulesContextHandle { title, text, }); - Some((context, vec![])) + Some(context) }) } } @@ -748,32 +742,21 @@ impl ImageContext { } } - pub fn load(self, cx: &App) -> Task>)>> { + pub fn load(self, cx: &App) -> Task> { cx.background_spawn(async move { self.image_task.clone().await; - Some((AgentContext::Image(self), vec![])) + Some(AgentContext::Image(self)) }) } } -#[derive(Debug, Clone, Default)] -pub struct ContextLoadResult { - pub loaded_context: LoadedContext, - pub referenced_buffers: HashSet>, -} - #[derive(Debug, Clone, Default)] pub struct LoadedContext { - pub contexts: Vec, pub text: String, pub images: Vec, } impl LoadedContext { - pub fn is_empty(&self) -> bool { - self.text.is_empty() && self.images.is_empty() - } - pub fn add_to_request_message(&self, request_message: &mut LanguageModelRequestMessage) { if !self.text.is_empty() { request_message @@ -804,7 +787,7 @@ pub fn load_context( project: &Entity, prompt_store: &Option>, cx: &mut App, -) -> Task { +) -> Task { let load_tasks: Vec<_> = contexts .into_iter() .map(|context| match context { @@ -823,16 +806,7 @@ pub fn load_context( cx.background_spawn(async move { let load_results = future::join_all(load_tasks).await; - let mut contexts = Vec::new(); let mut text = String::new(); - let mut referenced_buffers = HashSet::default(); - for context in load_results { - let Some((context, buffers)) = context else { - continue; - }; - contexts.push(context); - referenced_buffers.extend(buffers); - } let mut file_context = Vec::new(); let mut directory_context = Vec::new(); @@ -843,7 +817,7 @@ pub fn load_context( let mut text_thread_context = Vec::new(); let mut rules_context = Vec::new(); let mut images = Vec::new(); - for context in &contexts { + for context in load_results.into_iter().flatten() { match context { AgentContext::File(context) => file_context.push(context), AgentContext::Directory(context) => directory_context.push(context), @@ -868,14 +842,7 @@ pub fn load_context( && text_thread_context.is_empty() && rules_context.is_empty() { - return ContextLoadResult { - loaded_context: LoadedContext { - contexts, - text, - images, - }, - referenced_buffers, - }; + return LoadedContext { text, images }; } text.push_str( @@ -961,14 +928,7 @@ pub fn load_context( text.push_str("\n"); - ContextLoadResult { - loaded_context: LoadedContext { - contexts, - text, - images, - }, - referenced_buffers, - } + LoadedContext { text, images } }) } @@ -1131,11 +1091,13 @@ mod tests { assert!(content_len > outline::AUTO_OUTLINE_SIZE); - let file_context = file_context_for(large_content, cx).await; + let file_context = load_context_for("file.txt", large_content, cx).await; assert!( - file_context.is_outline, - "Large file should use outline format" + file_context + .text + .contains(&format!("# File outline for {}", path!("test/file.txt"))), + "Large files should not get an outline" ); assert!( @@ -1153,29 +1115,38 @@ mod tests { assert!(content_len < outline::AUTO_OUTLINE_SIZE); - let file_context = file_context_for(small_content.to_string(), cx).await; + let file_context = load_context_for("file.txt", small_content.to_string(), cx).await; assert!( - !file_context.is_outline, + !file_context + .text + .contains(&format!("# File outline for {}", path!("test/file.txt"))), "Small files should not get an outline" ); - assert_eq!(file_context.text, small_content); + assert!( + file_context.text.contains(small_content), + "Small files should use full content" + ); } - async fn file_context_for(content: String, cx: &mut TestAppContext) -> FileContext { + async fn load_context_for( + filename: &str, + content: String, + cx: &mut TestAppContext, + ) -> LoadedContext { // Create a test project with the file let project = create_test_project( cx, json!({ - "file.txt": content, + filename: content, }), ) .await; // Open the buffer let buffer_path = project - .read_with(cx, |project, cx| project.find_project_path("file.txt", cx)) + .read_with(cx, |project, cx| project.find_project_path(filename, cx)) .unwrap(); let buffer = project @@ -1190,16 +1161,5 @@ mod tests { cx.update(|cx| load_context(vec![context_handle], &project, &None, cx)) .await - .loaded_context - .contexts - .into_iter() - .find_map(|ctx| { - if let AgentContext::File(file_ctx) = ctx { - Some(file_ctx) - } else { - None - } - }) - .expect("Should have found a file context") } } diff --git a/crates/agent_ui/src/context_picker.rs b/crates/agent_ui/src/context_picker.rs index 58edecdf3da6b16bca82a7d4c0e73dcac3969e03..caffb31521e397ca7cd6b1fa0c8f4ae73d5ab9ff 100644 --- a/crates/agent_ui/src/context_picker.rs +++ b/crates/agent_ui/src/context_picker.rs @@ -9,6 +9,8 @@ use std::ops::Range; use std::path::PathBuf; use std::sync::Arc; +use agent::{HistoryEntry, HistoryEntryId, HistoryStore}; +use agent_client_protocol as acp; use anyhow::{Result, anyhow}; use collections::HashSet; pub use completion_provider::ContextPickerCompletionProvider; @@ -27,9 +29,7 @@ use project::ProjectPath; use prompt_store::PromptStore; use rules_context_picker::{RulesContextEntry, RulesContextPicker}; use symbol_context_picker::SymbolContextPicker; -use thread_context_picker::{ - ThreadContextEntry, ThreadContextPicker, render_thread_context_entry, unordered_thread_entries, -}; +use thread_context_picker::render_thread_context_entry; use ui::{ ButtonLike, ContextMenu, ContextMenuEntry, ContextMenuItem, Disclosure, TintColor, prelude::*, }; @@ -37,12 +37,8 @@ use util::paths::PathStyle; use util::rel_path::RelPath; use workspace::{Workspace, notifications::NotifyResultExt}; -use agent::{ - ThreadId, - context::RULES_ICON, - context_store::ContextStore, - thread_store::{TextThreadStore, ThreadStore}, -}; +use crate::context_picker::thread_context_picker::ThreadContextPicker; +use crate::{context::RULES_ICON, context_store::ContextStore}; #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub(crate) enum ContextPickerEntry { @@ -168,17 +164,16 @@ pub(super) struct ContextPicker { mode: ContextPickerState, workspace: WeakEntity, context_store: WeakEntity, - thread_store: Option>, - text_thread_store: Option>, - prompt_store: Option>, + thread_store: Option>, + prompt_store: Option>, _subscriptions: Vec, } impl ContextPicker { pub fn new( workspace: WeakEntity, - thread_store: Option>, - text_thread_store: Option>, + thread_store: Option>, + prompt_store: Option>, context_store: WeakEntity, window: &mut Window, cx: &mut Context, @@ -199,13 +194,6 @@ impl ContextPicker { ) .collect::>(); - let prompt_store = thread_store.as_ref().and_then(|thread_store| { - thread_store - .read_with(cx, |thread_store, _cx| thread_store.prompt_store().clone()) - .ok() - .flatten() - }); - ContextPicker { mode: ContextPickerState::Default(ContextMenu::build( window, @@ -215,7 +203,6 @@ impl ContextPicker { workspace, context_store, thread_store, - text_thread_store, prompt_store, _subscriptions: subscriptions, } @@ -355,17 +342,13 @@ impl ContextPicker { })); } ContextPickerMode::Thread => { - if let Some((thread_store, text_thread_store)) = self - .thread_store - .as_ref() - .zip(self.text_thread_store.as_ref()) - { + if let Some(thread_store) = self.thread_store.clone() { self.mode = ContextPickerState::Thread(cx.new(|cx| { ThreadContextPicker::new( - thread_store.clone(), - text_thread_store.clone(), + thread_store, context_picker.clone(), self.context_store.clone(), + self.workspace.clone(), window, cx, ) @@ -480,16 +463,23 @@ impl ContextPicker { fn add_recent_thread( &self, - entry: ThreadContextEntry, - window: &mut Window, + entry: HistoryEntry, + _window: &mut Window, cx: &mut Context, ) -> Task> { let Some(context_store) = self.context_store.upgrade() else { return Task::ready(Err(anyhow!("context store not available"))); }; + let Some(project) = self + .workspace + .upgrade() + .map(|workspace| workspace.read(cx).project().clone()) + else { + return Task::ready(Err(anyhow!("project not available"))); + }; match entry { - ThreadContextEntry::Thread { id, .. } => { + HistoryEntry::AcpThread(thread) => { let Some(thread_store) = self .thread_store .as_ref() @@ -497,28 +487,28 @@ impl ContextPicker { else { return Task::ready(Err(anyhow!("thread store not available"))); }; - - let open_thread_task = - thread_store.update(cx, |this, cx| this.open_thread(&id, window, cx)); + let load_thread_task = + agent::load_agent_thread(thread.id, thread_store, project, cx); cx.spawn(async move |this, cx| { - let thread = open_thread_task.await?; + let thread = load_thread_task.await?; context_store.update(cx, |context_store, cx| { context_store.add_thread(thread, true, cx); })?; this.update(cx, |_this, cx| cx.notify()) }) } - ThreadContextEntry::Context { path, .. } => { - let Some(text_thread_store) = self - .text_thread_store + HistoryEntry::TextThread(thread) => { + let Some(thread_store) = self + .thread_store .as_ref() .and_then(|thread_store| thread_store.upgrade()) else { return Task::ready(Err(anyhow!("text thread store not available"))); }; - let task = text_thread_store - .update(cx, |this, cx| this.open_local_context(path.clone(), cx)); + let task = thread_store.update(cx, |this, cx| { + this.load_text_thread(thread.path.clone(), cx) + }); cx.spawn(async move |this, cx| { let thread = task.await?; context_store.update(cx, |context_store, cx| { @@ -542,7 +532,6 @@ impl ContextPicker { recent_context_picker_entries_with_store( context_store, self.thread_store.clone(), - self.text_thread_store.clone(), workspace, None, cx, @@ -599,12 +588,12 @@ pub(crate) enum RecentEntry { project_path: ProjectPath, path_prefix: Arc, }, - Thread(ThreadContextEntry), + Thread(HistoryEntry), } pub(crate) fn available_context_picker_entries( - prompt_store: &Option>, - thread_store: &Option>, + prompt_store: &Option>, + thread_store: &Option>, workspace: &Entity, cx: &mut App, ) -> Vec { @@ -617,7 +606,11 @@ pub(crate) fn available_context_picker_entries( .read(cx) .active_item(cx) .and_then(|item| item.downcast::()) - .is_some_and(|editor| editor.update(cx, |editor, cx| editor.has_non_empty_selection(cx))); + .is_some_and(|editor| { + editor.update(cx, |editor, cx| { + editor.has_non_empty_selection(&editor.display_snapshot(cx)) + }) + }); if has_selection { entries.push(ContextPickerEntry::Action( ContextPickerAction::AddSelections, @@ -639,8 +632,7 @@ pub(crate) fn available_context_picker_entries( fn recent_context_picker_entries_with_store( context_store: Entity, - thread_store: Option>, - text_thread_store: Option>, + thread_store: Option>, workspace: Entity, exclude_path: Option, cx: &App, @@ -657,22 +649,14 @@ fn recent_context_picker_entries_with_store( let exclude_threads = context_store.read(cx).thread_ids(); - recent_context_picker_entries( - thread_store, - text_thread_store, - workspace, - &exclude_paths, - exclude_threads, - cx, - ) + recent_context_picker_entries(thread_store, workspace, &exclude_paths, exclude_threads, cx) } pub(crate) fn recent_context_picker_entries( - thread_store: Option>, - text_thread_store: Option>, + thread_store: Option>, workspace: Entity, exclude_paths: &HashSet, - _exclude_threads: &HashSet, + exclude_threads: &HashSet, cx: &App, ) -> Vec { let mut recent = Vec::with_capacity(6); @@ -698,30 +682,21 @@ pub(crate) fn recent_context_picker_entries( }), ); - if let Some((thread_store, text_thread_store)) = thread_store - .and_then(|store| store.upgrade()) - .zip(text_thread_store.and_then(|store| store.upgrade())) - { - let mut threads = unordered_thread_entries(thread_store, text_thread_store, cx) - .filter(|(_, thread)| match thread { - ThreadContextEntry::Thread { .. } => false, - ThreadContextEntry::Context { .. } => true, - }) - .collect::>(); - - const RECENT_COUNT: usize = 2; - if threads.len() > RECENT_COUNT { - threads.select_nth_unstable_by_key(RECENT_COUNT - 1, |(updated_at, _)| { - std::cmp::Reverse(*updated_at) - }); - threads.truncate(RECENT_COUNT); - } - threads.sort_unstable_by_key(|(updated_at, _)| std::cmp::Reverse(*updated_at)); - + if let Some(thread_store) = thread_store.and_then(|store| store.upgrade()) { + const RECENT_THREADS_COUNT: usize = 2; recent.extend( - threads - .into_iter() - .map(|(_, thread)| RecentEntry::Thread(thread)), + thread_store + .read(cx) + .recently_opened_entries(cx) + .iter() + .filter(|e| match e.id() { + HistoryEntryId::AcpThread(session_id) => !exclude_threads.contains(&session_id), + HistoryEntryId::TextThread(path) => { + !exclude_paths.contains(&path.to_path_buf()) + } + }) + .take(RECENT_THREADS_COUNT) + .map(|thread| RecentEntry::Thread(thread.clone())), ); } @@ -754,7 +729,7 @@ pub(crate) fn selection_ranges( }; editor.update(cx, |editor, cx| { - let selections = editor.selections.all_adjusted(cx); + let selections = editor.selections.all_adjusted(&editor.display_snapshot(cx)); let buffer = editor.buffer().clone().read(cx); let snapshot = buffer.snapshot(cx); @@ -915,17 +890,21 @@ impl MentionLink { ) } - pub fn for_thread(thread: &ThreadContextEntry) -> String { + pub fn for_thread(thread: &HistoryEntry) -> String { match thread { - ThreadContextEntry::Thread { id, title } => { - format!("[@{}]({}:{})", title, Self::THREAD, id) + HistoryEntry::AcpThread(thread) => { + format!("[@{}]({}:{})", thread.title, Self::THREAD, thread.id) } - ThreadContextEntry::Context { path, title } => { - let filename = path.file_name().unwrap_or_default().to_string_lossy(); + HistoryEntry::TextThread(thread) => { + let filename = thread + .path + .file_name() + .unwrap_or_default() + .to_string_lossy(); let escaped_filename = urlencoding::encode(&filename); format!( "[@{}]({}:{}{})", - title, + thread.title, Self::THREAD, Self::TEXT_THREAD_URL_PREFIX, escaped_filename diff --git a/crates/agent_ui/src/context_picker/completion_provider.rs b/crates/agent_ui/src/context_picker/completion_provider.rs index 33a5a621a1d1ea23ccdb49fd97010fea1856ce80..56444141f12903db4868f9e154cccdb872b48514 100644 --- a/crates/agent_ui/src/context_picker/completion_provider.rs +++ b/crates/agent_ui/src/context_picker/completion_provider.rs @@ -3,7 +3,7 @@ use std::path::{Path, PathBuf}; use std::sync::Arc; use std::sync::atomic::AtomicBool; -use agent::context_store::ContextStore; +use agent::{HistoryEntry, HistoryStore}; use anyhow::Result; use editor::{CompletionProvider, Editor, ExcerptId, ToOffset as _}; use file_icons::FileIcons; @@ -11,12 +11,12 @@ use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{App, Entity, Task, WeakEntity}; use http_client::HttpClientWithUrl; use itertools::Itertools; -use language::{Buffer, CodeLabel, HighlightId}; +use language::{Buffer, CodeLabel, CodeLabelBuilder, HighlightId}; use lsp::CompletionContext; use project::lsp_store::SymbolLocation; use project::{ - Completion, CompletionDisplayOptions, CompletionIntent, CompletionResponse, ProjectPath, - Symbol, WorktreeId, + Completion, CompletionDisplayOptions, CompletionIntent, CompletionResponse, Project, + ProjectPath, Symbol, WorktreeId, }; use prompt_store::PromptStore; use rope::Point; @@ -27,10 +27,9 @@ use util::paths::PathStyle; use util::rel_path::RelPath; use workspace::Workspace; -use agent::{ - Thread, +use crate::{ context::{AgentContextHandle, AgentContextKey, RULES_ICON}, - thread_store::{TextThreadStore, ThreadStore}, + context_store::ContextStore, }; use super::fetch_context_picker::fetch_url_content; @@ -38,7 +37,7 @@ use super::file_context_picker::{FileMatch, search_files}; use super::rules_context_picker::{RulesContextEntry, search_rules}; use super::symbol_context_picker::SymbolMatch; use super::symbol_context_picker::search_symbols; -use super::thread_context_picker::{ThreadContextEntry, ThreadMatch, search_threads}; +use super::thread_context_picker::search_threads; use super::{ ContextPickerAction, ContextPickerEntry, ContextPickerMode, MentionLink, RecentEntry, available_context_picker_entries, recent_context_picker_entries_with_store, selection_ranges, @@ -48,7 +47,8 @@ use crate::message_editor::ContextCreasesAddon; pub(crate) enum Match { File(FileMatch), Symbol(SymbolMatch), - Thread(ThreadMatch), + Thread(HistoryEntry), + RecentThread(HistoryEntry), Fetch(SharedString), Rules(RulesContextEntry), Entry(EntryMatch), @@ -65,6 +65,7 @@ impl Match { Match::File(file) => file.mat.score, Match::Entry(mode) => mode.mat.as_ref().map(|mat| mat.score).unwrap_or(1.), Match::Thread(_) => 1., + Match::RecentThread(_) => 1., Match::Symbol(_) => 1., Match::Fetch(_) => 1., Match::Rules(_) => 1., @@ -77,9 +78,8 @@ fn search( query: String, cancellation_flag: Arc, recent_entries: Vec, - prompt_store: Option>, - thread_store: Option>, - text_thread_context_store: Option>, + prompt_store: Option>, + thread_store: Option>, workspace: Entity, cx: &mut App, ) -> Task> { @@ -107,13 +107,9 @@ fn search( } Some(ContextPickerMode::Thread) => { - if let Some((thread_store, context_store)) = thread_store - .as_ref() - .and_then(|t| t.upgrade()) - .zip(text_thread_context_store.as_ref().and_then(|t| t.upgrade())) - { + if let Some(thread_store) = thread_store.as_ref().and_then(|t| t.upgrade()) { let search_threads_task = - search_threads(query, cancellation_flag, thread_store, context_store, cx); + search_threads(query, cancellation_flag, &thread_store, cx); cx.background_spawn(async move { search_threads_task .await @@ -135,8 +131,8 @@ fn search( } Some(ContextPickerMode::Rules) => { - if let Some(prompt_store) = prompt_store.as_ref() { - let search_rules_task = search_rules(query, cancellation_flag, prompt_store, cx); + if let Some(prompt_store) = prompt_store.as_ref().and_then(|p| p.upgrade()) { + let search_rules_task = search_rules(query, cancellation_flag, &prompt_store, cx); cx.background_spawn(async move { search_rules_task .await @@ -169,12 +165,7 @@ fn search( }, is_recent: true, }), - super::RecentEntry::Thread(thread_context_entry) => { - Match::Thread(ThreadMatch { - thread: thread_context_entry, - is_recent: true, - }) - } + super::RecentEntry::Thread(entry) => Match::RecentThread(entry), }) .collect::>(); @@ -245,8 +236,8 @@ fn search( pub struct ContextPickerCompletionProvider { workspace: WeakEntity, context_store: WeakEntity, - thread_store: Option>, - text_thread_store: Option>, + thread_store: Option>, + prompt_store: Option>, editor: WeakEntity, excluded_buffer: Option>, } @@ -255,8 +246,8 @@ impl ContextPickerCompletionProvider { pub fn new( workspace: WeakEntity, context_store: WeakEntity, - thread_store: Option>, - text_thread_store: Option>, + thread_store: Option>, + prompt_store: Option>, editor: WeakEntity, exclude_buffer: Option>, ) -> Self { @@ -264,7 +255,7 @@ impl ContextPickerCompletionProvider { workspace, context_store, thread_store, - text_thread_store, + prompt_store, editor, excluded_buffer: exclude_buffer, } @@ -406,14 +397,14 @@ impl ContextPickerCompletionProvider { } fn completion_for_thread( - thread_entry: ThreadContextEntry, + thread_entry: HistoryEntry, excerpt_id: ExcerptId, source_range: Range, recent: bool, editor: Entity, context_store: Entity, - thread_store: Entity, - text_thread_store: Entity, + thread_store: Entity, + project: Entity, ) -> Completion { let icon_for_completion = if recent { IconName::HistoryRerun @@ -439,18 +430,16 @@ impl ContextPickerCompletionProvider { editor, context_store.clone(), move |window, cx| match &thread_entry { - ThreadContextEntry::Thread { id, .. } => { - let thread_id = id.clone(); + HistoryEntry::AcpThread(thread) => { let context_store = context_store.clone(); - let thread_store = thread_store.clone(); + let load_thread_task = agent::load_agent_thread( + thread.id.clone(), + thread_store.clone(), + project.clone(), + cx, + ); window.spawn::<_, Option<_>>(cx, async move |cx| { - let thread: Entity = thread_store - .update_in(cx, |thread_store, window, cx| { - thread_store.open_thread(&thread_id, window, cx) - }) - .ok()? - .await - .log_err()?; + let thread = load_thread_task.await.log_err()?; let context = context_store .update(cx, |context_store, cx| { context_store.add_thread(thread, false, cx) @@ -459,13 +448,13 @@ impl ContextPickerCompletionProvider { Some(context) }) } - ThreadContextEntry::Context { path, .. } => { - let path = path.clone(); + HistoryEntry::TextThread(thread) => { + let path = thread.path.clone(); let context_store = context_store.clone(); - let text_thread_store = text_thread_store.clone(); + let thread_store = thread_store.clone(); cx.spawn::<_, Option<_>>(async move |cx| { - let thread = text_thread_store - .update(cx, |store, cx| store.open_local_context(path, cx)) + let thread = thread_store + .update(cx, |store, cx| store.load_text_thread(path, cx)) .ok()? .await .log_err()?; @@ -686,7 +675,8 @@ impl ContextPickerCompletionProvider { }; let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId); - let mut label = CodeLabel::plain(symbol.name.clone(), None); + let mut label = CodeLabelBuilder::default(); + label.push_str(&symbol.name, None); label.push_str(" ", None); label.push_str(&file_name, comment_id); label.push_str(&format!(" L{}", symbol.range.start.0.row + 1), comment_id); @@ -696,7 +686,7 @@ impl ContextPickerCompletionProvider { Some(Completion { replace_range: source_range.clone(), new_text, - label, + label: label.build(), documentation: None, source: project::CompletionSource::Custom, icon_path: Some(IconName::Code.path().into()), @@ -729,7 +719,7 @@ impl ContextPickerCompletionProvider { fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx: &App) -> CodeLabel { let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId); - let mut label = CodeLabel::default(); + let mut label = CodeLabelBuilder::default(); label.push_str(file_name, None); label.push_str(" ", None); @@ -738,9 +728,7 @@ fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx: label.push_str(directory, comment_id); } - label.filter_range = 0..label.text().len(); - - label + label.build() } impl CompletionProvider for ContextPickerCompletionProvider { @@ -775,7 +763,7 @@ impl CompletionProvider for ContextPickerCompletionProvider { ..snapshot.anchor_after(state.source_range.end); let thread_store = self.thread_store.clone(); - let text_thread_store = self.text_thread_store.clone(); + let prompt_store = self.prompt_store.clone(); let editor = self.editor.clone(); let http_client = workspace.read(cx).client().http_client(); let path_style = workspace.read(cx).path_style(cx); @@ -793,19 +781,11 @@ impl CompletionProvider for ContextPickerCompletionProvider { let recent_entries = recent_context_picker_entries_with_store( context_store.clone(), thread_store.clone(), - text_thread_store.clone(), workspace.clone(), excluded_path.clone(), cx, ); - let prompt_store = thread_store.as_ref().and_then(|thread_store| { - thread_store - .read_with(cx, |thread_store, _cx| thread_store.prompt_store().clone()) - .ok() - .flatten() - }); - let search_task = search( mode, query, @@ -813,14 +793,14 @@ impl CompletionProvider for ContextPickerCompletionProvider { recent_entries, prompt_store, thread_store.clone(), - text_thread_store.clone(), workspace.clone(), cx, ); + let project = workspace.read(cx).project().downgrade(); cx.spawn(async move |_, cx| { let matches = search_task.await; - let Some(editor) = editor.upgrade() else { + let Some((editor, project)) = editor.upgrade().zip(project.upgrade()) else { return Ok(Vec::new()); }; @@ -861,25 +841,32 @@ impl CompletionProvider for ContextPickerCompletionProvider { workspace.clone(), cx, ), - - Match::Thread(ThreadMatch { - thread, is_recent, .. - }) => { + Match::Thread(thread) => { let thread_store = thread_store.as_ref().and_then(|t| t.upgrade())?; - let text_thread_store = - text_thread_store.as_ref().and_then(|t| t.upgrade())?; Some(Self::completion_for_thread( thread, excerpt_id, source_range.clone(), - is_recent, + false, editor.clone(), context_store.clone(), thread_store, - text_thread_store, + project.clone(), + )) + } + Match::RecentThread(thread) => { + let thread_store = thread_store.as_ref().and_then(|t| t.upgrade())?; + Some(Self::completion_for_thread( + thread, + excerpt_id, + source_range.clone(), + true, + editor.clone(), + context_store.clone(), + thread_store, + project.clone(), )) } - Match::Rules(user_rules) => Some(Self::completion_for_rules( user_rules, excerpt_id, @@ -1282,7 +1269,7 @@ mod tests { editor }); - let context_store = cx.new(|_| ContextStore::new(project.downgrade(), None)); + let context_store = cx.new(|_| ContextStore::new(project.downgrade())); let editor_entity = editor.downgrade(); editor.update_in(&mut cx, |editor, window, cx| { diff --git a/crates/agent_ui/src/context_picker/fetch_context_picker.rs b/crates/agent_ui/src/context_picker/fetch_context_picker.rs index dd558b2a1c88f60e68313b208b076a0974b30f85..31fc45aca3ccbf561793769939169d214aaa2d99 100644 --- a/crates/agent_ui/src/context_picker/fetch_context_picker.rs +++ b/crates/agent_ui/src/context_picker/fetch_context_picker.rs @@ -2,7 +2,6 @@ use std::cell::RefCell; use std::rc::Rc; use std::sync::Arc; -use agent::context_store::ContextStore; use anyhow::{Context as _, Result, bail}; use futures::AsyncReadExt as _; use gpui::{App, DismissEvent, Entity, FocusHandle, Focusable, Task, WeakEntity}; @@ -12,7 +11,7 @@ use picker::{Picker, PickerDelegate}; use ui::{Context, ListItem, Window, prelude::*}; use workspace::Workspace; -use crate::context_picker::ContextPicker; +use crate::{context_picker::ContextPicker, context_store::ContextStore}; pub struct FetchContextPicker { picker: Entity>, diff --git a/crates/agent_ui/src/context_picker/file_context_picker.rs b/crates/agent_ui/src/context_picker/file_context_picker.rs index 4f7a4308406f9d9fbdfa42cc86adc1ffe7593396..8d1e5cb46dfba7bc89770356334fb08a7bf7a0c5 100644 --- a/crates/agent_ui/src/context_picker/file_context_picker.rs +++ b/crates/agent_ui/src/context_picker/file_context_picker.rs @@ -12,8 +12,10 @@ use ui::{ListItem, Tooltip, prelude::*}; use util::{ResultExt as _, paths::PathStyle, rel_path::RelPath}; use workspace::Workspace; -use crate::context_picker::ContextPicker; -use agent::context_store::{ContextStore, FileInclusion}; +use crate::{ + context_picker::ContextPicker, + context_store::{ContextStore, FileInclusion}, +}; pub struct FileContextPicker { picker: Entity>, diff --git a/crates/agent_ui/src/context_picker/rules_context_picker.rs b/crates/agent_ui/src/context_picker/rules_context_picker.rs index 677011577aef23296a34203acdb10e5228ca7cd7..68f4917a4fd5689aab1a418dd78d2c8a322cd717 100644 --- a/crates/agent_ui/src/context_picker/rules_context_picker.rs +++ b/crates/agent_ui/src/context_picker/rules_context_picker.rs @@ -7,9 +7,11 @@ use prompt_store::{PromptId, PromptStore, UserPromptId}; use ui::{ListItem, prelude::*}; use util::ResultExt as _; -use crate::context_picker::ContextPicker; -use agent::context::RULES_ICON; -use agent::context_store::{self, ContextStore}; +use crate::{ + context::RULES_ICON, + context_picker::ContextPicker, + context_store::{self, ContextStore}, +}; pub struct RulesContextPicker { picker: Entity>, @@ -17,7 +19,7 @@ pub struct RulesContextPicker { impl RulesContextPicker { pub fn new( - prompt_store: Entity, + prompt_store: WeakEntity, context_picker: WeakEntity, context_store: WeakEntity, window: &mut Window, @@ -49,7 +51,7 @@ pub struct RulesContextEntry { } pub struct RulesContextPickerDelegate { - prompt_store: Entity, + prompt_store: WeakEntity, context_picker: WeakEntity, context_store: WeakEntity, matches: Vec, @@ -58,7 +60,7 @@ pub struct RulesContextPickerDelegate { impl RulesContextPickerDelegate { pub fn new( - prompt_store: Entity, + prompt_store: WeakEntity, context_picker: WeakEntity, context_store: WeakEntity, ) -> Self { @@ -102,12 +104,10 @@ impl PickerDelegate for RulesContextPickerDelegate { window: &mut Window, cx: &mut Context>, ) -> Task<()> { - let search_task = search_rules( - query, - Arc::new(AtomicBool::default()), - &self.prompt_store, - cx, - ); + let Some(prompt_store) = self.prompt_store.upgrade() else { + return Task::ready(()); + }; + let search_task = search_rules(query, Arc::new(AtomicBool::default()), &prompt_store, cx); cx.spawn_in(window, async move |this, cx| { let matches = search_task.await; this.update(cx, |this, cx| { diff --git a/crates/agent_ui/src/context_picker/symbol_context_picker.rs b/crates/agent_ui/src/context_picker/symbol_context_picker.rs index 5b89f09de884067a94832c7bf474a2949e78c420..fbce71d94efd84b1acc6e0b5d4ea11cb2b9243d5 100644 --- a/crates/agent_ui/src/context_picker/symbol_context_picker.rs +++ b/crates/agent_ui/src/context_picker/symbol_context_picker.rs @@ -15,9 +15,9 @@ use ui::{ListItem, prelude::*}; use util::ResultExt as _; use workspace::Workspace; -use crate::context_picker::ContextPicker; -use agent::context::AgentContextHandle; -use agent::context_store::ContextStore; +use crate::{ + context::AgentContextHandle, context_picker::ContextPicker, context_store::ContextStore, +}; pub struct SymbolContextPicker { picker: Entity>, diff --git a/crates/agent_ui/src/context_picker/thread_context_picker.rs b/crates/agent_ui/src/context_picker/thread_context_picker.rs index 9e843779c2216a89fe23dce514553e50043b8187..d6a3a270742fe28c483d2d7d39894eb9e3c021ea 100644 --- a/crates/agent_ui/src/context_picker/thread_context_picker.rs +++ b/crates/agent_ui/src/context_picker/thread_context_picker.rs @@ -1,19 +1,16 @@ -use std::path::Path; use std::sync::Arc; use std::sync::atomic::AtomicBool; -use chrono::{DateTime, Utc}; +use crate::{ + context_picker::ContextPicker, + context_store::{self, ContextStore}, +}; +use agent::{HistoryEntry, HistoryStore}; use fuzzy::StringMatchCandidate; use gpui::{App, DismissEvent, Entity, FocusHandle, Focusable, Task, WeakEntity}; use picker::{Picker, PickerDelegate}; use ui::{ListItem, prelude::*}; - -use crate::context_picker::ContextPicker; -use agent::{ - ThreadId, - context_store::{self, ContextStore}, - thread_store::{TextThreadStore, ThreadStore}, -}; +use workspace::Workspace; pub struct ThreadContextPicker { picker: Entity>, @@ -21,18 +18,18 @@ pub struct ThreadContextPicker { impl ThreadContextPicker { pub fn new( - thread_store: WeakEntity, - text_thread_context_store: WeakEntity, + thread_store: WeakEntity, context_picker: WeakEntity, context_store: WeakEntity, + workspace: WeakEntity, window: &mut Window, cx: &mut Context, ) -> Self { let delegate = ThreadContextPickerDelegate::new( thread_store, - text_thread_context_store, context_picker, context_store, + workspace, ); let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx)); @@ -52,48 +49,27 @@ impl Render for ThreadContextPicker { } } -#[derive(Debug, Clone)] -pub enum ThreadContextEntry { - Thread { - id: ThreadId, - title: SharedString, - }, - Context { - path: Arc, - title: SharedString, - }, -} - -impl ThreadContextEntry { - pub fn title(&self) -> &SharedString { - match self { - Self::Thread { title, .. } => title, - Self::Context { title, .. } => title, - } - } -} - pub struct ThreadContextPickerDelegate { - thread_store: WeakEntity, - text_thread_store: WeakEntity, + thread_store: WeakEntity, context_picker: WeakEntity, context_store: WeakEntity, - matches: Vec, + workspace: WeakEntity, + matches: Vec, selected_index: usize, } impl ThreadContextPickerDelegate { pub fn new( - thread_store: WeakEntity, - text_thread_store: WeakEntity, + thread_store: WeakEntity, context_picker: WeakEntity, context_store: WeakEntity, + workspace: WeakEntity, ) -> Self { ThreadContextPickerDelegate { thread_store, context_picker, context_store, - text_thread_store, + workspace, matches: Vec::new(), selected_index: 0, } @@ -130,25 +106,15 @@ impl PickerDelegate for ThreadContextPickerDelegate { window: &mut Window, cx: &mut Context>, ) -> Task<()> { - let Some((thread_store, text_thread_context_store)) = self - .thread_store - .upgrade() - .zip(self.text_thread_store.upgrade()) - else { + let Some(thread_store) = self.thread_store.upgrade() else { return Task::ready(()); }; - let search_task = search_threads( - query, - Arc::new(AtomicBool::default()), - thread_store, - text_thread_context_store, - cx, - ); + let search_task = search_threads(query, Arc::new(AtomicBool::default()), &thread_store, cx); cx.spawn_in(window, async move |this, cx| { let matches = search_task.await; this.update(cx, |this, cx| { - this.delegate.matches = matches.into_iter().map(|mat| mat.thread).collect(); + this.delegate.matches = matches; this.delegate.selected_index = 0; cx.notify(); }) @@ -156,21 +122,29 @@ impl PickerDelegate for ThreadContextPickerDelegate { }) } - fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context>) { - let Some(entry) = self.matches.get(self.selected_index) else { + fn confirm(&mut self, _secondary: bool, _window: &mut Window, cx: &mut Context>) { + let Some(project) = self + .workspace + .upgrade() + .map(|w| w.read(cx).project().clone()) + else { + return; + }; + let Some((entry, thread_store)) = self + .matches + .get(self.selected_index) + .zip(self.thread_store.upgrade()) + else { return; }; match entry { - ThreadContextEntry::Thread { id, .. } => { - let Some(thread_store) = self.thread_store.upgrade() else { - return; - }; - let open_thread_task = - thread_store.update(cx, |this, cx| this.open_thread(id, window, cx)); + HistoryEntry::AcpThread(thread) => { + let load_thread_task = + agent::load_agent_thread(thread.id.clone(), thread_store, project, cx); cx.spawn(async move |this, cx| { - let thread = open_thread_task.await?; + let thread = load_thread_task.await?; this.update(cx, |this, cx| { this.delegate .context_store @@ -182,12 +156,10 @@ impl PickerDelegate for ThreadContextPickerDelegate { }) .detach_and_log_err(cx); } - ThreadContextEntry::Context { path, .. } => { - let Some(text_thread_store) = self.text_thread_store.upgrade() else { - return; - }; - let task = text_thread_store - .update(cx, |this, cx| this.open_local_context(path.clone(), cx)); + HistoryEntry::TextThread(thread) => { + let task = thread_store.update(cx, |this, cx| { + this.load_text_thread(thread.path.clone(), cx) + }); cx.spawn(async move |this, cx| { let thread = task.await?; @@ -229,17 +201,17 @@ impl PickerDelegate for ThreadContextPickerDelegate { } pub fn render_thread_context_entry( - entry: &ThreadContextEntry, + entry: &HistoryEntry, context_store: WeakEntity, cx: &mut App, ) -> Div { let is_added = match entry { - ThreadContextEntry::Thread { id, .. } => context_store + HistoryEntry::AcpThread(thread) => context_store .upgrade() - .is_some_and(|ctx_store| ctx_store.read(cx).includes_thread(id)), - ThreadContextEntry::Context { path, .. } => context_store + .is_some_and(|ctx_store| ctx_store.read(cx).includes_thread(&thread.id)), + HistoryEntry::TextThread(thread) => context_store .upgrade() - .is_some_and(|ctx_store| ctx_store.read(cx).includes_text_thread(path)), + .is_some_and(|ctx_store| ctx_store.read(cx).includes_text_thread(&thread.path)), }; h_flex() @@ -271,91 +243,38 @@ pub fn render_thread_context_entry( }) } -#[derive(Clone)] -pub struct ThreadMatch { - pub thread: ThreadContextEntry, - pub is_recent: bool, -} - -pub fn unordered_thread_entries( - thread_store: Entity, - text_thread_store: Entity, - cx: &App, -) -> impl Iterator, ThreadContextEntry)> { - let threads = thread_store - .read(cx) - .reverse_chronological_threads() - .map(|thread| { - ( - thread.updated_at, - ThreadContextEntry::Thread { - id: thread.id.clone(), - title: thread.summary.clone(), - }, - ) - }); - - let text_threads = text_thread_store - .read(cx) - .unordered_contexts() - .map(|context| { - ( - context.mtime.to_utc(), - ThreadContextEntry::Context { - path: context.path.clone(), - title: context.title.clone(), - }, - ) - }); - - threads.chain(text_threads) -} - pub(crate) fn search_threads( query: String, cancellation_flag: Arc, - thread_store: Entity, - text_thread_store: Entity, + thread_store: &Entity, cx: &mut App, -) -> Task> { - let mut threads = - unordered_thread_entries(thread_store, text_thread_store, cx).collect::>(); - threads.sort_unstable_by_key(|(updated_at, _)| std::cmp::Reverse(*updated_at)); +) -> Task> { + let threads = thread_store.read(cx).entries().collect(); + if query.is_empty() { + return Task::ready(threads); + } let executor = cx.background_executor().clone(); cx.background_spawn(async move { - if query.is_empty() { - threads - .into_iter() - .map(|(_, thread)| ThreadMatch { - thread, - is_recent: false, - }) - .collect() - } else { - let candidates = threads - .iter() - .enumerate() - .map(|(id, (_, thread))| StringMatchCandidate::new(id, thread.title())) - .collect::>(); - let matches = fuzzy::match_strings( - &candidates, - &query, - false, - true, - 100, - &cancellation_flag, - executor, - ) - .await; + let candidates = threads + .iter() + .enumerate() + .map(|(id, thread)| StringMatchCandidate::new(id, thread.title())) + .collect::>(); + let matches = fuzzy::match_strings( + &candidates, + &query, + false, + true, + 100, + &cancellation_flag, + executor, + ) + .await; - matches - .into_iter() - .map(|mat| ThreadMatch { - thread: threads[mat.candidate_id].1.clone(), - is_recent: false, - }) - .collect() - } + matches + .into_iter() + .map(|mat| threads[mat.candidate_id].clone()) + .collect() }) } diff --git a/crates/agent/src/context_store.rs b/crates/agent_ui/src/context_store.rs similarity index 85% rename from crates/agent/src/context_store.rs rename to crates/agent_ui/src/context_store.rs index cf35840cc4215695a966931701257c838c00af18..18aa59c8f716d59e4a0d717904b09472494c4dbc 100644 --- a/crates/agent/src/context_store.rs +++ b/crates/agent_ui/src/context_store.rs @@ -1,14 +1,11 @@ -use crate::{ - context::{ - AgentContextHandle, AgentContextKey, ContextId, ContextKind, DirectoryContextHandle, - FetchedUrlContext, FileContextHandle, ImageContext, RulesContextHandle, - SelectionContextHandle, SymbolContextHandle, TextThreadContextHandle, ThreadContextHandle, - }, - thread::{MessageId, Thread, ThreadId}, - thread_store::ThreadStore, +use crate::context::{ + AgentContextHandle, AgentContextKey, ContextId, ContextKind, DirectoryContextHandle, + FetchedUrlContext, FileContextHandle, ImageContext, RulesContextHandle, SelectionContextHandle, + SymbolContextHandle, TextThreadContextHandle, ThreadContextHandle, }; +use agent_client_protocol as acp; use anyhow::{Context as _, Result, anyhow}; -use assistant_context::AssistantContext; +use assistant_text_thread::TextThread; use collections::{HashSet, IndexSet}; use futures::{self, FutureExt}; use gpui::{App, Context, Entity, EventEmitter, Image, SharedString, Task, WeakEntity}; @@ -29,10 +26,9 @@ use text::{Anchor, OffsetRangeExt}; pub struct ContextStore { project: WeakEntity, - thread_store: Option>, next_context_id: ContextId, context_set: IndexSet, - context_thread_ids: HashSet, + context_thread_ids: HashSet, context_text_thread_paths: HashSet>, } @@ -43,13 +39,9 @@ pub enum ContextStoreEvent { impl EventEmitter for ContextStore {} impl ContextStore { - pub fn new( - project: WeakEntity, - thread_store: Option>, - ) -> Self { + pub fn new(project: WeakEntity) -> Self { Self { project, - thread_store, next_context_id: ContextId::zero(), context_set: IndexSet::default(), context_thread_ids: HashSet::default(), @@ -67,29 +59,6 @@ impl ContextStore { cx.notify(); } - pub fn new_context_for_thread( - &self, - thread: &Thread, - exclude_messages_from_id: Option, - ) -> Vec { - let existing_context = thread - .messages() - .take_while(|message| exclude_messages_from_id.is_none_or(|id| message.id != id)) - .flat_map(|message| { - message - .loaded_context - .contexts - .iter() - .map(|context| AgentContextKey(context.handle())) - }) - .collect::>(); - self.context_set - .iter() - .filter(|context| !existing_context.contains(context)) - .map(|entry| entry.0.clone()) - .collect::>() - } - pub fn add_file_from_path( &mut self, project_path: ProjectPath, @@ -209,7 +178,7 @@ impl ContextStore { pub fn add_thread( &mut self, - thread: Entity, + thread: Entity, remove_if_exists: bool, cx: &mut Context, ) -> Option { @@ -231,13 +200,13 @@ impl ContextStore { pub fn add_text_thread( &mut self, - context: Entity, + text_thread: Entity, remove_if_exists: bool, cx: &mut Context, ) -> Option { let context_id = self.next_context_id.post_inc(); let context = AgentContextHandle::TextThread(TextThreadContextHandle { - context, + text_thread, context_id, }); @@ -384,21 +353,15 @@ impl ContextStore { ); }; } - SuggestedContext::Thread { thread, name: _ } => { - if let Some(thread) = thread.upgrade() { - let context_id = self.next_context_id.post_inc(); - self.insert_context( - AgentContextHandle::Thread(ThreadContextHandle { thread, context_id }), - cx, - ); - } - } - SuggestedContext::TextThread { context, name: _ } => { - if let Some(context) = context.upgrade() { + SuggestedContext::TextThread { + text_thread, + name: _, + } => { + if let Some(text_thread) = text_thread.upgrade() { let context_id = self.next_context_id.post_inc(); self.insert_context( AgentContextHandle::TextThread(TextThreadContextHandle { - context, + text_thread, context_id, }), cx, @@ -410,20 +373,20 @@ impl ContextStore { fn insert_context(&mut self, context: AgentContextHandle, cx: &mut Context) -> bool { match &context { - AgentContextHandle::Thread(thread_context) => { - if let Some(thread_store) = self.thread_store.clone() { - thread_context.thread.update(cx, |thread, cx| { - thread.start_generating_detailed_summary_if_needed(thread_store, cx); - }); - self.context_thread_ids - .insert(thread_context.thread.read(cx).id().clone()); - } else { - return false; - } - } + // AgentContextHandle::Thread(thread_context) => { + // if let Some(thread_store) = self.thread_store.clone() { + // thread_context.thread.update(cx, |thread, cx| { + // thread.start_generating_detailed_summary_if_needed(thread_store, cx); + // }); + // self.context_thread_ids + // .insert(thread_context.thread.read(cx).id().clone()); + // } else { + // return false; + // } + // } AgentContextHandle::TextThread(text_thread_context) => { self.context_text_thread_paths - .extend(text_thread_context.context.read(cx).path().cloned()); + .extend(text_thread_context.text_thread.read(cx).path().cloned()); } _ => {} } @@ -445,7 +408,7 @@ impl ContextStore { .remove(thread_context.thread.read(cx).id()); } AgentContextHandle::TextThread(text_thread_context) => { - if let Some(path) = text_thread_context.context.read(cx).path() { + if let Some(path) = text_thread_context.text_thread.read(cx).path() { self.context_text_thread_paths.remove(path); } } @@ -514,7 +477,7 @@ impl ContextStore { }) } - pub fn includes_thread(&self, thread_id: &ThreadId) -> bool { + pub fn includes_thread(&self, thread_id: &acp::SessionId) -> bool { self.context_thread_ids.contains(thread_id) } @@ -547,9 +510,9 @@ impl ContextStore { } AgentContextHandle::Directory(_) | AgentContextHandle::Symbol(_) + | AgentContextHandle::Thread(_) | AgentContextHandle::Selection(_) | AgentContextHandle::FetchedUrl(_) - | AgentContextHandle::Thread(_) | AgentContextHandle::TextThread(_) | AgentContextHandle::Rules(_) | AgentContextHandle::Image(_) => None, @@ -557,7 +520,7 @@ impl ContextStore { .collect() } - pub fn thread_ids(&self) -> &HashSet { + pub fn thread_ids(&self) -> &HashSet { &self.context_thread_ids } } @@ -569,13 +532,9 @@ pub enum SuggestedContext { icon_path: Option, buffer: WeakEntity, }, - Thread { - name: SharedString, - thread: WeakEntity, - }, TextThread { name: SharedString, - context: WeakEntity, + text_thread: WeakEntity, }, } @@ -583,7 +542,6 @@ impl SuggestedContext { pub fn name(&self) -> &SharedString { match self { Self::File { name, .. } => name, - Self::Thread { name, .. } => name, Self::TextThread { name, .. } => name, } } @@ -591,7 +549,6 @@ impl SuggestedContext { pub fn icon_path(&self) -> Option { match self { Self::File { icon_path, .. } => icon_path.clone(), - Self::Thread { .. } => None, Self::TextThread { .. } => None, } } @@ -599,7 +556,6 @@ impl SuggestedContext { pub fn kind(&self) -> ContextKind { match self { Self::File { .. } => ContextKind::File, - Self::Thread { .. } => ContextKind::Thread, Self::TextThread { .. } => ContextKind::TextThread, } } diff --git a/crates/agent_ui/src/context_strip.rs b/crates/agent_ui/src/context_strip.rs index b75b933de40f19557d9dfa83c874c3427773445b..d2393ac4f612cebc6cf97d10a38894e7022e53b9 100644 --- a/crates/agent_ui/src/context_strip.rs +++ b/crates/agent_ui/src/context_strip.rs @@ -4,12 +4,11 @@ use crate::{ context_picker::ContextPicker, ui::{AddedContext, ContextPill}, }; -use agent::context_store::SuggestedContext; -use agent::{ +use crate::{ context::AgentContextHandle, - context_store::ContextStore, - thread_store::{TextThreadStore, ThreadStore}, + context_store::{ContextStore, SuggestedContext}, }; +use agent::HistoryStore; use collections::HashSet; use editor::Editor; use gpui::{ @@ -18,6 +17,7 @@ use gpui::{ }; use itertools::Itertools; use project::ProjectItem; +use prompt_store::PromptStore; use rope::Point; use std::rc::Rc; use text::ToPoint as _; @@ -33,7 +33,7 @@ pub struct ContextStrip { focus_handle: FocusHandle, suggest_context_kind: SuggestContextKind, workspace: WeakEntity, - thread_store: Option>, + prompt_store: Option>, _subscriptions: Vec, focused_index: Option, children_bounds: Option>>, @@ -44,8 +44,8 @@ impl ContextStrip { pub fn new( context_store: Entity, workspace: WeakEntity, - thread_store: Option>, - text_thread_store: Option>, + thread_store: Option>, + prompt_store: Option>, context_picker_menu_handle: PopoverMenuHandle, suggest_context_kind: SuggestContextKind, model_usage_context: ModelUsageContext, @@ -56,7 +56,7 @@ impl ContextStrip { ContextPicker::new( workspace.clone(), thread_store.clone(), - text_thread_store, + prompt_store.clone(), context_store.downgrade(), window, cx, @@ -79,7 +79,7 @@ impl ContextStrip { focus_handle, suggest_context_kind, workspace, - thread_store, + prompt_store, _subscriptions: subscriptions, focused_index: None, children_bounds: None, @@ -96,11 +96,7 @@ impl ContextStrip { fn added_contexts(&self, cx: &App) -> Vec { if let Some(workspace) = self.workspace.upgrade() { let project = workspace.read(cx).project().read(cx); - let prompt_store = self - .thread_store - .as_ref() - .and_then(|thread_store| thread_store.upgrade()) - .and_then(|thread_store| thread_store.read(cx).prompt_store().as_ref()); + let prompt_store = self.prompt_store.as_ref().and_then(|p| p.upgrade()); let current_model = self.model_usage_context.language_model(cx); @@ -110,7 +106,7 @@ impl ContextStrip { .flat_map(|context| { AddedContext::new_pending( context.clone(), - prompt_store, + prompt_store.as_ref(), project, current_model.as_ref(), cx, @@ -136,19 +132,19 @@ impl ContextStrip { let workspace = self.workspace.upgrade()?; let panel = workspace.read(cx).panel::(cx)?.read(cx); - if let Some(active_context_editor) = panel.active_context_editor() { - let context = active_context_editor.read(cx).context(); - let weak_context = context.downgrade(); - let context = context.read(cx); - let path = context.path()?; + if let Some(active_text_thread_editor) = panel.active_text_thread_editor() { + let text_thread = active_text_thread_editor.read(cx).text_thread(); + let weak_text_thread = text_thread.downgrade(); + let text_thread = text_thread.read(cx); + let path = text_thread.path()?; if self.context_store.read(cx).includes_text_thread(path) { return None; } Some(SuggestedContext::TextThread { - name: context.summary().or_default(), - context: weak_context, + name: text_thread.summary().or_default(), + text_thread: weak_text_thread, }) } else { None @@ -336,10 +332,10 @@ impl ContextStrip { AgentContextHandle::TextThread(text_thread_context) => { workspace.update(cx, |workspace, cx| { if let Some(panel) = workspace.panel::(cx) { - let context = text_thread_context.context.clone(); + let context = text_thread_context.text_thread.clone(); window.defer(cx, move |window, cx| { panel.update(cx, |panel, cx| { - panel.open_prompt_editor(context, window, cx) + panel.open_text_thread(context, window, cx) }); }); } @@ -487,12 +483,11 @@ impl Render for ContextStrip { .style(ui::ButtonStyle::Filled), { let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Add Context", &ToggleContextPicker, &focus_handle, - window, cx, ) } @@ -562,12 +557,11 @@ impl Render for ContextStrip { .icon_size(IconSize::Small) .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Remove All Context", &RemoveAllContext, &focus_handle, - window, cx, ) } diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index d24dc4ab781585e9ebabc7f19016e4da2457a873..b05dba59e6b19fa5091903882748de853cd9cb93 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -7,13 +7,11 @@ use std::sync::Arc; use crate::{ AgentPanel, buffer_codegen::{BufferCodegen, CodegenAlternative, CodegenEvent}, + context_store::ContextStore, inline_prompt_editor::{CodegenStatus, InlineAssistId, PromptEditor, PromptEditorEvent}, terminal_inline_assistant::TerminalInlineAssistant, }; -use agent::{ - context_store::ContextStore, - thread_store::{TextThreadStore, ThreadStore}, -}; +use agent::HistoryStore; use agent_settings::AgentSettings; use anyhow::{Context as _, Result}; use client::telemetry::Telemetry; @@ -209,24 +207,21 @@ impl InlineAssistant { window: &mut Window, cx: &mut App, ) { - let is_assistant2_enabled = !DisableAiSettings::get_global(cx).disable_ai; + let is_ai_enabled = !DisableAiSettings::get_global(cx).disable_ai; if let Some(editor) = item.act_as::(cx) { editor.update(cx, |editor, cx| { - if is_assistant2_enabled { + if is_ai_enabled { let panel = workspace.read(cx).panel::(cx); let thread_store = panel .as_ref() .map(|agent_panel| agent_panel.read(cx).thread_store().downgrade()); - let text_thread_store = panel - .map(|agent_panel| agent_panel.read(cx).text_thread_store().downgrade()); editor.add_code_action_provider( Rc::new(AssistantCodeActionProvider { editor: cx.entity().downgrade(), workspace: workspace.downgrade(), thread_store, - text_thread_store, }), window, cx, @@ -283,7 +278,6 @@ impl InlineAssistant { let prompt_store = agent_panel.prompt_store().as_ref().cloned(); let thread_store = Some(agent_panel.thread_store().downgrade()); - let text_thread_store = Some(agent_panel.text_thread_store().downgrade()); let context_store = agent_panel.inline_assist_context_store().clone(); let handle_assist = @@ -297,7 +291,6 @@ impl InlineAssistant { workspace.project().downgrade(), prompt_store, thread_store, - text_thread_store, action.prompt.clone(), window, cx, @@ -312,7 +305,6 @@ impl InlineAssistant { workspace.project().downgrade(), prompt_store, thread_store, - text_thread_store, action.prompt.clone(), window, cx, @@ -365,16 +357,18 @@ impl InlineAssistant { context_store: Entity, project: WeakEntity, prompt_store: Option>, - thread_store: Option>, - text_thread_store: Option>, + thread_store: Option>, initial_prompt: Option, window: &mut Window, cx: &mut App, ) { let (snapshot, initial_selections, newest_selection) = editor.update(cx, |editor, cx| { - let selections = editor.selections.all::(cx); - let newest_selection = editor.selections.newest::(cx); - (editor.snapshot(window, cx), selections, newest_selection) + let snapshot = editor.snapshot(window, cx); + let selections = editor.selections.all::(&snapshot.display_snapshot); + let newest_selection = editor + .selections + .newest::(&snapshot.display_snapshot); + (snapshot, selections, newest_selection) }); // Check if there is already an inline assistant that contains the @@ -517,7 +511,7 @@ impl InlineAssistant { context_store.clone(), workspace.clone(), thread_store.clone(), - text_thread_store.clone(), + prompt_store.as_ref().map(|s| s.downgrade()), window, cx, ) @@ -589,8 +583,7 @@ impl InlineAssistant { focus: bool, workspace: Entity, prompt_store: Option>, - thread_store: Option>, - text_thread_store: Option>, + thread_store: Option>, window: &mut Window, cx: &mut App, ) -> InlineAssistId { @@ -608,7 +601,7 @@ impl InlineAssistant { } let project = workspace.read(cx).project().downgrade(); - let context_store = cx.new(|_cx| ContextStore::new(project.clone(), thread_store.clone())); + let context_store = cx.new(|_cx| ContextStore::new(project.clone())); let codegen = cx.new(|cx| { BufferCodegen::new( @@ -617,7 +610,7 @@ impl InlineAssistant { initial_transaction_id, context_store.clone(), project, - prompt_store, + prompt_store.clone(), self.telemetry.clone(), self.prompt_builder.clone(), cx, @@ -636,7 +629,7 @@ impl InlineAssistant { context_store, workspace.downgrade(), thread_store, - text_thread_store, + prompt_store.map(|s| s.downgrade()), window, cx, ) @@ -808,7 +801,9 @@ impl InlineAssistant { if editor.read(cx).selections.count() == 1 { let (selection, buffer) = editor.update(cx, |editor, cx| { ( - editor.selections.newest::(cx), + editor + .selections + .newest::(&editor.display_snapshot(cx)), editor.buffer().read(cx).snapshot(cx), ) }); @@ -839,7 +834,9 @@ impl InlineAssistant { if editor.read(cx).selections.count() == 1 { let (selection, buffer) = editor.update(cx, |editor, cx| { ( - editor.selections.newest::(cx), + editor + .selections + .newest::(&editor.display_snapshot(cx)), editor.buffer().read(cx).snapshot(cx), ) }); @@ -1511,8 +1508,8 @@ impl InlineAssistant { return Some(InlineAssistTarget::Terminal(terminal_view)); } - let context_editor = agent_panel - .and_then(|panel| panel.read(cx).active_context_editor()) + let text_thread_editor = agent_panel + .and_then(|panel| panel.read(cx).active_text_thread_editor()) .and_then(|editor| { let editor = &editor.read(cx).editor().clone(); if editor.read(cx).is_focused(window) { @@ -1522,8 +1519,8 @@ impl InlineAssistant { } }); - if let Some(context_editor) = context_editor { - Some(InlineAssistTarget::Editor(context_editor)) + if let Some(text_thread_editor) = text_thread_editor { + Some(InlineAssistTarget::Editor(text_thread_editor)) } else if let Some(workspace_editor) = workspace .active_item(cx) .and_then(|item| item.act_as::(cx)) @@ -1773,8 +1770,7 @@ struct InlineAssistDecorations { struct AssistantCodeActionProvider { editor: WeakEntity, workspace: WeakEntity, - thread_store: Option>, - text_thread_store: Option>, + thread_store: Option>, } const ASSISTANT_CODE_ACTION_PROVIDER_ID: &str = "assistant2"; @@ -1846,7 +1842,6 @@ impl CodeActionProvider for AssistantCodeActionProvider { let editor = self.editor.clone(); let workspace = self.workspace.clone(); let thread_store = self.thread_store.clone(); - let text_thread_store = self.text_thread_store.clone(); let prompt_store = PromptStore::global(cx); window.spawn(cx, async move |cx| { let workspace = workspace.upgrade().context("workspace was released")?; @@ -1878,12 +1873,7 @@ impl CodeActionProvider for AssistantCodeActionProvider { } let multibuffer_snapshot = multibuffer.read(cx); - Some( - multibuffer_snapshot - .anchor_in_excerpt(excerpt_id, action.range.start)? - ..multibuffer_snapshot - .anchor_in_excerpt(excerpt_id, action.range.end)?, - ) + multibuffer_snapshot.anchor_range_in_excerpt(excerpt_id, action.range) }) })? .context("invalid range")?; @@ -1899,7 +1889,6 @@ impl CodeActionProvider for AssistantCodeActionProvider { workspace, prompt_store, thread_store, - text_thread_store, window, cx, ); diff --git a/crates/agent_ui/src/inline_prompt_editor.rs b/crates/agent_ui/src/inline_prompt_editor.rs index f6347dcb6b80c1b5c939a5c4cd650b9fadf92c62..89bfd50e37e8ea681e70fadd78cbbd047f7258cb 100644 --- a/crates/agent_ui/src/inline_prompt_editor.rs +++ b/crates/agent_ui/src/inline_prompt_editor.rs @@ -1,7 +1,5 @@ -use agent::{ - context_store::ContextStore, - thread_store::{TextThreadStore, ThreadStore}, -}; +use crate::context_store::ContextStore; +use agent::HistoryStore; use collections::VecDeque; use editor::actions::Paste; use editor::display_map::EditorMargins; @@ -16,6 +14,7 @@ use gpui::{ }; use language_model::{LanguageModel, LanguageModelRegistry}; use parking_lot::Mutex; +use prompt_store::PromptStore; use settings::Settings; use std::cmp; use std::rc::Rc; @@ -469,12 +468,11 @@ impl PromptEditor { IconButton::new("stop", IconName::Stop) .icon_color(Color::Error) .shape(IconButtonShape::Square) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { Tooltip::with_meta( mode.tooltip_interrupt(), Some(&menu::Cancel), "Changes won't be discarded", - window, cx, ) }) @@ -488,12 +486,11 @@ impl PromptEditor { IconButton::new("restart", IconName::RotateCw) .icon_color(Color::Info) .shape(IconButtonShape::Square) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { Tooltip::with_meta( mode.tooltip_restart(), Some(&menu::Confirm), "Changes will be discarded", - window, cx, ) }) @@ -506,8 +503,8 @@ impl PromptEditor { let accept = IconButton::new("accept", IconName::Check) .icon_color(Color::Info) .shape(IconButtonShape::Square) - .tooltip(move |window, cx| { - Tooltip::for_action(mode.tooltip_accept(), &menu::Confirm, window, cx) + .tooltip(move |_window, cx| { + Tooltip::for_action(mode.tooltip_accept(), &menu::Confirm, cx) }) .on_click(cx.listener(|_, _, _, cx| { cx.emit(PromptEditorEvent::ConfirmRequested { execute: false }); @@ -520,11 +517,10 @@ impl PromptEditor { IconButton::new("confirm", IconName::PlayFilled) .icon_color(Color::Info) .shape(IconButtonShape::Square) - .tooltip(|window, cx| { + .tooltip(|_window, cx| { Tooltip::for_action( "Execute Generated Command", &menu::SecondaryConfirm, - window, cx, ) }) @@ -616,13 +612,12 @@ impl PromptEditor { .shape(IconButtonShape::Square) .tooltip({ let focus_handle = self.editor.focus_handle(cx); - move |window, cx| { + move |_window, cx| { cx.new(|cx| { let mut tooltip = Tooltip::new("Previous Alternative").key_binding( KeyBinding::for_action_in( &CyclePreviousInlineAssist, &focus_handle, - window, cx, ), ); @@ -658,13 +653,12 @@ impl PromptEditor { .shape(IconButtonShape::Square) .tooltip({ let focus_handle = self.editor.focus_handle(cx); - move |window, cx| { + move |_window, cx| { cx.new(|cx| { let mut tooltip = Tooltip::new("Next Alternative").key_binding( KeyBinding::for_action_in( &CycleNextInlineAssist, &focus_handle, - window, cx, ), ); @@ -777,8 +771,8 @@ impl PromptEditor { fs: Arc, context_store: Entity, workspace: WeakEntity, - thread_store: Option>, - text_thread_store: Option>, + thread_store: Option>, + prompt_store: Option>, window: &mut Window, cx: &mut Context>, ) -> PromptEditor { @@ -823,7 +817,7 @@ impl PromptEditor { workspace.clone(), context_store.downgrade(), thread_store.clone(), - text_thread_store.clone(), + prompt_store.clone(), prompt_editor_entity, codegen_buffer.as_ref().map(Entity::downgrade), )))); @@ -837,7 +831,7 @@ impl PromptEditor { context_store.clone(), workspace.clone(), thread_store.clone(), - text_thread_store.clone(), + prompt_store, context_picker_menu_handle.clone(), SuggestContextKind::Thread, ModelUsageContext::InlineAssistant, @@ -949,8 +943,8 @@ impl PromptEditor { fs: Arc, context_store: Entity, workspace: WeakEntity, - thread_store: Option>, - text_thread_store: Option>, + thread_store: Option>, + prompt_store: Option>, window: &mut Window, cx: &mut Context, ) -> Self { @@ -988,7 +982,7 @@ impl PromptEditor { workspace.clone(), context_store.downgrade(), thread_store.clone(), - text_thread_store.clone(), + prompt_store.clone(), prompt_editor_entity, None, )))); @@ -1002,7 +996,7 @@ impl PromptEditor { context_store.clone(), workspace.clone(), thread_store.clone(), - text_thread_store.clone(), + prompt_store.clone(), context_picker_menu_handle.clone(), SuggestContextKind::Thread, ModelUsageContext::InlineAssistant, diff --git a/crates/agent_ui/src/message_editor.rs b/crates/agent_ui/src/message_editor.rs index a1311f39233c7eaaf0b416401676fb2e43e51a26..42607833e4b5734424988d1edaa32d10bec06506 100644 --- a/crates/agent_ui/src/message_editor.rs +++ b/crates/agent_ui/src/message_editor.rs @@ -1,31 +1,25 @@ -use agent::{context::AgentContextKey, context_store::ContextStoreEvent}; -use agent_settings::AgentProfileId; +use std::ops::Range; + use collections::HashMap; use editor::display_map::CreaseId; use editor::{Addon, AnchorRangeExt, Editor}; -use gpui::{App, Entity, Subscription}; +use gpui::{Entity, Subscription}; use ui::prelude::*; -use crate::context_picker::crease_for_mention; -use crate::profile_selector::ProfileProvider; -use agent::{MessageCrease, Thread, context_store::ContextStore}; - -impl ProfileProvider for Entity { - fn profiles_supported(&self, cx: &App) -> bool { - self.read(cx) - .configured_model() - .is_some_and(|model| model.model.supports_tools()) - } - - fn profile_id(&self, cx: &App) -> AgentProfileId { - self.read(cx).profile().id().clone() - } +use crate::{ + context::{AgentContextHandle, AgentContextKey}, + context_picker::crease_for_mention, + context_store::{ContextStore, ContextStoreEvent}, +}; - fn set_profile(&self, profile_id: AgentProfileId, cx: &mut App) { - self.update(cx, |this, cx| { - this.set_profile(profile_id, cx); - }); - } +/// Stored information that can be used to resurrect a context crease when creating an editor for a past message. +#[derive(Clone, Debug)] +pub struct MessageCrease { + pub range: Range, + pub icon_path: SharedString, + pub label: SharedString, + /// None for a deserialized message, Some otherwise. + pub context: Option, } #[derive(Default)] diff --git a/crates/agent_ui/src/profile_selector.rs b/crates/agent_ui/src/profile_selector.rs index d52b2436d68c4b43cd7e7ec73b27007e34a43153..2f9fe19eb33667d6ca6bb2f5502fbd1c9f094e9c 100644 --- a/crates/agent_ui/src/profile_selector.rs +++ b/crates/agent_ui/src/profile_selector.rs @@ -144,10 +144,16 @@ impl Render for ProfileSelector { .unwrap_or_else(|| "Unknown".into()); let focus_handle = self.focus_handle.clone(); + let icon = if self.picker_handle.is_deployed() { + IconName::ChevronUp + } else { + IconName::ChevronDown + }; + let trigger_button = Button::new("profile-selector", selected_profile) .label_size(LabelSize::Small) .color(Color::Muted) - .icon(IconName::ChevronDown) + .icon(icon) .icon_size(IconSize::XSmall) .icon_position(IconPosition::End) .icon_color(Color::Muted) @@ -156,12 +162,11 @@ impl Render for ProfileSelector { PickerPopoverMenu::new( picker, trigger_button, - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Toggle Profile Menu", &ToggleProfileSelector, &focus_handle, - window, cx, ) }, diff --git a/crates/agent_ui/src/slash_command_picker.rs b/crates/agent_ui/src/slash_command_picker.rs index a6bb61510cbeb557e22018c73082bba17d177d7e..0c3cf37599887fe8e97dcdc67bb0bd7e28a744a7 100644 --- a/crates/agent_ui/src/slash_command_picker.rs +++ b/crates/agent_ui/src/slash_command_picker.rs @@ -155,8 +155,8 @@ impl PickerDelegate for SlashCommandDelegate { match command { SlashCommandEntry::Info(info) => { self.active_context_editor - .update(cx, |context_editor, cx| { - context_editor.insert_command(&info.name, window, cx) + .update(cx, |text_thread_editor, cx| { + text_thread_editor.insert_command(&info.name, window, cx) }) .ok(); } diff --git a/crates/agent_ui/src/terminal_inline_assistant.rs b/crates/agent_ui/src/terminal_inline_assistant.rs index 4385d2420511c8a148b2a7a58fa8845bd2c19a07..9e653dcce1dcf1487af9998662b57ea4f998c7de 100644 --- a/crates/agent_ui/src/terminal_inline_assistant.rs +++ b/crates/agent_ui/src/terminal_inline_assistant.rs @@ -1,12 +1,12 @@ -use crate::inline_prompt_editor::{ - CodegenStatus, PromptEditor, PromptEditorEvent, TerminalInlineAssistId, -}; -use crate::terminal_codegen::{CLEAR_INPUT, CodegenEvent, TerminalCodegen}; -use agent::{ +use crate::{ context::load_context, context_store::ContextStore, - thread_store::{TextThreadStore, ThreadStore}, + inline_prompt_editor::{ + CodegenStatus, PromptEditor, PromptEditorEvent, TerminalInlineAssistId, + }, + terminal_codegen::{CLEAR_INPUT, CodegenEvent, TerminalCodegen}, }; +use agent::HistoryStore; use agent_settings::AgentSettings; use anyhow::{Context as _, Result}; use client::telemetry::Telemetry; @@ -74,8 +74,7 @@ impl TerminalInlineAssistant { workspace: WeakEntity, project: WeakEntity, prompt_store: Option>, - thread_store: Option>, - text_thread_store: Option>, + thread_store: Option>, initial_prompt: Option, window: &mut Window, cx: &mut App, @@ -88,7 +87,7 @@ impl TerminalInlineAssistant { cx, ) }); - let context_store = cx.new(|_cx| ContextStore::new(project, thread_store.clone())); + let context_store = cx.new(|_cx| ContextStore::new(project)); let codegen = cx.new(|_| TerminalCodegen::new(terminal, self.telemetry.clone())); let prompt_editor = cx.new(|cx| { @@ -101,7 +100,7 @@ impl TerminalInlineAssistant { context_store.clone(), workspace.clone(), thread_store.clone(), - text_thread_store.clone(), + prompt_store.as_ref().map(|s| s.downgrade()), window, cx, ) @@ -282,7 +281,6 @@ impl TerminalInlineAssistant { context_load_task .await - .loaded_context .add_to_request_message(&mut request_message); request_message.content.push(prompt.into()); diff --git a/crates/agent_ui/src/text_thread_editor.rs b/crates/agent_ui/src/text_thread_editor.rs index e1265e923ef06b0bad945d3751dfbf24e0f1ee00..667ccb8938b892dcf59232d5cd7ea8dda04bc4b2 100644 --- a/crates/agent_ui/src/text_thread_editor.rs +++ b/crates/agent_ui/src/text_thread_editor.rs @@ -1,5 +1,4 @@ use crate::{ - QuoteSelection, language_model_selector::{LanguageModelSelector, language_model_selector}, ui::BurnModeTooltip, }; @@ -72,13 +71,13 @@ use workspace::{ pane, searchable::{SearchEvent, SearchableItem}, }; -use zed_actions::agent::ToggleModelSelector; +use zed_actions::agent::{AddSelectionToThread, ToggleModelSelector}; use crate::{slash_command::SlashCommandCompletionProvider, slash_command_picker}; -use assistant_context::{ - AssistantContext, CacheStatus, Content, ContextEvent, ContextId, InvokedSlashCommandId, - InvokedSlashCommandStatus, Message, MessageId, MessageMetadata, MessageStatus, - PendingSlashCommandStatus, ThoughtProcessOutputSection, +use assistant_text_thread::{ + CacheStatus, Content, InvokedSlashCommandId, InvokedSlashCommandStatus, Message, MessageId, + MessageMetadata, MessageStatus, PendingSlashCommandStatus, TextThread, TextThreadEvent, + TextThreadId, ThoughtProcessOutputSection, }; actions!( @@ -127,14 +126,14 @@ pub enum ThoughtProcessStatus { } pub trait AgentPanelDelegate { - fn active_context_editor( + fn active_text_thread_editor( &self, workspace: &mut Workspace, window: &mut Window, cx: &mut Context, ) -> Option>; - fn open_saved_context( + fn open_local_text_thread( &self, workspace: &mut Workspace, path: Arc, @@ -142,10 +141,10 @@ pub trait AgentPanelDelegate { cx: &mut Context, ) -> Task>; - fn open_remote_context( + fn open_remote_text_thread( &self, workspace: &mut Workspace, - context_id: ContextId, + text_thread_id: TextThreadId, window: &mut Window, cx: &mut Context, ) -> Task>>; @@ -178,7 +177,7 @@ struct GlobalAssistantPanelDelegate(Arc); impl Global for GlobalAssistantPanelDelegate {} pub struct TextThreadEditor { - context: Entity, + text_thread: Entity, fs: Arc, slash_commands: Arc, workspace: WeakEntity, @@ -224,8 +223,8 @@ impl TextThreadEditor { .detach(); } - pub fn for_context( - context: Entity, + pub fn for_text_thread( + text_thread: Entity, fs: Arc, workspace: WeakEntity, project: Entity, @@ -234,14 +233,14 @@ impl TextThreadEditor { cx: &mut Context, ) -> Self { let completion_provider = SlashCommandCompletionProvider::new( - context.read(cx).slash_commands().clone(), + text_thread.read(cx).slash_commands().clone(), Some(cx.entity().downgrade()), Some(workspace.clone()), ); let editor = cx.new(|cx| { let mut editor = - Editor::for_buffer(context.read(cx).buffer().clone(), None, window, cx); + Editor::for_buffer(text_thread.read(cx).buffer().clone(), None, window, cx); editor.disable_scrollbars_and_minimap(window, cx); editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx); editor.set_show_line_numbers(false, cx); @@ -265,18 +264,24 @@ impl TextThreadEditor { }); let _subscriptions = vec![ - cx.observe(&context, |_, _, cx| cx.notify()), - cx.subscribe_in(&context, window, Self::handle_context_event), + cx.observe(&text_thread, |_, _, cx| cx.notify()), + cx.subscribe_in(&text_thread, window, Self::handle_text_thread_event), cx.subscribe_in(&editor, window, Self::handle_editor_event), cx.subscribe_in(&editor, window, Self::handle_editor_search_event), cx.observe_global_in::(window, Self::settings_changed), ]; - let slash_command_sections = context.read(cx).slash_command_output_sections().to_vec(); - let thought_process_sections = context.read(cx).thought_process_output_sections().to_vec(); - let slash_commands = context.read(cx).slash_commands().clone(); + let slash_command_sections = text_thread + .read(cx) + .slash_command_output_sections() + .to_vec(); + let thought_process_sections = text_thread + .read(cx) + .thought_process_output_sections() + .to_vec(); + let slash_commands = text_thread.read(cx).slash_commands().clone(); let mut this = Self { - context, + text_thread, slash_commands, editor, lsp_adapter_delegate, @@ -338,8 +343,8 @@ impl TextThreadEditor { }); } - pub fn context(&self) -> &Entity { - &self.context + pub fn text_thread(&self) -> &Entity { + &self.text_thread } pub fn editor(&self) -> &Entity { @@ -351,9 +356,9 @@ impl TextThreadEditor { self.editor.update(cx, |editor, cx| { editor.insert(&format!("/{command_name}\n\n"), window, cx) }); - let command = self.context.update(cx, |context, cx| { - context.reparse(cx); - context.parsed_slash_commands()[0].clone() + let command = self.text_thread.update(cx, |text_thread, cx| { + text_thread.reparse(cx); + text_thread.parsed_slash_commands()[0].clone() }); self.run_command( command.source_range, @@ -376,11 +381,14 @@ impl TextThreadEditor { fn send_to_model(&mut self, window: &mut Window, cx: &mut Context) { self.last_error = None; - if let Some(user_message) = self.context.update(cx, |context, cx| context.assist(cx)) { + if let Some(user_message) = self + .text_thread + .update(cx, |text_thread, cx| text_thread.assist(cx)) + { let new_selection = { let cursor = user_message .start - .to_offset(self.context.read(cx).buffer().read(cx)); + .to_offset(self.text_thread.read(cx).buffer().read(cx)); cursor..cursor }; self.editor.update(cx, |editor, cx| { @@ -404,8 +412,8 @@ impl TextThreadEditor { self.last_error = None; if self - .context - .update(cx, |context, cx| context.cancel_last_assist(cx)) + .text_thread + .update(cx, |text_thread, cx| text_thread.cancel_last_assist(cx)) { return; } @@ -420,20 +428,20 @@ impl TextThreadEditor { cx: &mut Context, ) { let cursors = self.cursors(cx); - self.context.update(cx, |context, cx| { - let messages = context + self.text_thread.update(cx, |text_thread, cx| { + let messages = text_thread .messages_for_offsets(cursors, cx) .into_iter() .map(|message| message.id) .collect(); - context.cycle_message_roles(messages, cx) + text_thread.cycle_message_roles(messages, cx) }); } fn cursors(&self, cx: &mut App) -> Vec { - let selections = self - .editor - .update(cx, |editor, cx| editor.selections.all::(cx)); + let selections = self.editor.update(cx, |editor, cx| { + editor.selections.all::(&editor.display_snapshot(cx)) + }); selections .into_iter() .map(|selection| selection.head()) @@ -446,7 +454,10 @@ impl TextThreadEditor { editor.transact(window, cx, |editor, window, cx| { editor.change_selections(Default::default(), window, cx, |s| s.try_cancel()); let snapshot = editor.buffer().read(cx).snapshot(cx); - let newest_cursor = editor.selections.newest::(cx).head(); + let newest_cursor = editor + .selections + .newest::(&editor.display_snapshot(cx)) + .head(); if newest_cursor.column > 0 || snapshot .chars_at(newest_cursor) @@ -489,11 +500,11 @@ impl TextThreadEditor { let selections = self.editor.read(cx).selections.disjoint_anchors_arc(); let mut commands_by_range = HashMap::default(); let workspace = self.workspace.clone(); - self.context.update(cx, |context, cx| { - context.reparse(cx); + self.text_thread.update(cx, |text_thread, cx| { + text_thread.reparse(cx); for selection in selections.iter() { if let Some(command) = - context.pending_command_for_position(selection.head().text_anchor, cx) + text_thread.pending_command_for_position(selection.head().text_anchor, cx) { commands_by_range .entry(command.source_range.clone()) @@ -531,14 +542,14 @@ impl TextThreadEditor { cx: &mut Context, ) { if let Some(command) = self.slash_commands.command(name, cx) { - let context = self.context.read(cx); - let sections = context + let text_thread = self.text_thread.read(cx); + let sections = text_thread .slash_command_output_sections() .iter() - .filter(|section| section.is_valid(context.buffer().read(cx))) + .filter(|section| section.is_valid(text_thread.buffer().read(cx))) .cloned() .collect::>(); - let snapshot = context.buffer().read(cx).snapshot(); + let snapshot = text_thread.buffer().read(cx).snapshot(); let output = command.run( arguments, §ions, @@ -548,8 +559,8 @@ impl TextThreadEditor { window, cx, ); - self.context.update(cx, |context, cx| { - context.insert_command_output( + self.text_thread.update(cx, |text_thread, cx| { + text_thread.insert_command_output( command_range, name, output, @@ -560,32 +571,32 @@ impl TextThreadEditor { } } - fn handle_context_event( + fn handle_text_thread_event( &mut self, - _: &Entity, - event: &ContextEvent, + _: &Entity, + event: &TextThreadEvent, window: &mut Window, cx: &mut Context, ) { - let context_editor = cx.entity().downgrade(); + let text_thread_editor = cx.entity().downgrade(); match event { - ContextEvent::MessagesEdited => { + TextThreadEvent::MessagesEdited => { self.update_message_headers(cx); self.update_image_blocks(cx); - self.context.update(cx, |context, cx| { - context.save(Some(Duration::from_millis(500)), self.fs.clone(), cx); + self.text_thread.update(cx, |text_thread, cx| { + text_thread.save(Some(Duration::from_millis(500)), self.fs.clone(), cx); }); } - ContextEvent::SummaryChanged => { + TextThreadEvent::SummaryChanged => { cx.emit(EditorEvent::TitleChanged); - self.context.update(cx, |context, cx| { - context.save(Some(Duration::from_millis(500)), self.fs.clone(), cx); + self.text_thread.update(cx, |text_thread, cx| { + text_thread.save(Some(Duration::from_millis(500)), self.fs.clone(), cx); }); } - ContextEvent::SummaryGenerated => {} - ContextEvent::PathChanged { .. } => {} - ContextEvent::StartedThoughtProcess(range) => { + TextThreadEvent::SummaryGenerated => {} + TextThreadEvent::PathChanged { .. } => {} + TextThreadEvent::StartedThoughtProcess(range) => { let creases = self.insert_thought_process_output_sections( [( ThoughtProcessOutputSection { @@ -598,14 +609,12 @@ impl TextThreadEditor { ); self.pending_thought_process = Some((creases[0], range.start)); } - ContextEvent::EndedThoughtProcess(end) => { + TextThreadEvent::EndedThoughtProcess(end) => { if let Some((crease_id, start)) = self.pending_thought_process.take() { self.editor.update(cx, |editor, cx| { let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx); - let (excerpt_id, _, _) = multi_buffer_snapshot.as_singleton().unwrap(); - let start_anchor = multi_buffer_snapshot - .anchor_in_excerpt(*excerpt_id, start) - .unwrap(); + let start_anchor = + multi_buffer_snapshot.as_singleton_anchor(start).unwrap(); editor.display_map.update(cx, |display_map, cx| { display_map.unfold_intersecting( @@ -626,7 +635,7 @@ impl TextThreadEditor { ); } } - ContextEvent::StreamedCompletion => { + TextThreadEvent::StreamedCompletion => { self.editor.update(cx, |editor, cx| { if let Some(scroll_position) = self.scroll_position { let snapshot = editor.snapshot(window, cx); @@ -641,7 +650,7 @@ impl TextThreadEditor { } }); } - ContextEvent::ParsedSlashCommandsUpdated { removed, updated } => { + TextThreadEvent::ParsedSlashCommandsUpdated { removed, updated } => { self.editor.update(cx, |editor, cx| { let buffer = editor.buffer().read(cx).snapshot(cx); let (&excerpt_id, _, _) = buffer.as_singleton().unwrap(); @@ -657,12 +666,12 @@ impl TextThreadEditor { updated.iter().map(|command| { let workspace = self.workspace.clone(); let confirm_command = Arc::new({ - let context_editor = context_editor.clone(); + let text_thread_editor = text_thread_editor.clone(); let command = command.clone(); move |window: &mut Window, cx: &mut App| { - context_editor - .update(cx, |context_editor, cx| { - context_editor.run_command( + text_thread_editor + .update(cx, |text_thread_editor, cx| { + text_thread_editor.run_command( command.source_range.clone(), &command.name, &command.arguments, @@ -696,13 +705,10 @@ impl TextThreadEditor { } }; - let start = buffer - .anchor_in_excerpt(excerpt_id, command.source_range.start) - .unwrap(); - let end = buffer - .anchor_in_excerpt(excerpt_id, command.source_range.end) + let range = buffer + .anchor_range_in_excerpt(excerpt_id, command.source_range.clone()) .unwrap(); - Crease::inline(start..end, placeholder, render_toggle, render_trailer) + Crease::inline(range, placeholder, render_toggle, render_trailer) }), cx, ); @@ -715,17 +721,17 @@ impl TextThreadEditor { ); }) } - ContextEvent::InvokedSlashCommandChanged { command_id } => { + TextThreadEvent::InvokedSlashCommandChanged { command_id } => { self.update_invoked_slash_command(*command_id, window, cx); } - ContextEvent::SlashCommandOutputSectionAdded { section } => { + TextThreadEvent::SlashCommandOutputSectionAdded { section } => { self.insert_slash_command_output_sections([section.clone()], false, window, cx); } - ContextEvent::Operation(_) => {} - ContextEvent::ShowAssistError(error_message) => { + TextThreadEvent::Operation(_) => {} + TextThreadEvent::ShowAssistError(error_message) => { self.last_error = Some(AssistError::Message(error_message.clone())); } - ContextEvent::ShowPaymentRequiredError => { + TextThreadEvent::ShowPaymentRequiredError => { self.last_error = Some(AssistError::PaymentRequired); } } @@ -738,14 +744,14 @@ impl TextThreadEditor { cx: &mut Context, ) { if let Some(invoked_slash_command) = - self.context.read(cx).invoked_slash_command(&command_id) + self.text_thread.read(cx).invoked_slash_command(&command_id) && let InvokedSlashCommandStatus::Finished = invoked_slash_command.status { let run_commands_in_ranges = invoked_slash_command.run_commands_in_ranges.clone(); for range in run_commands_in_ranges { - let commands = self.context.update(cx, |context, cx| { - context.reparse(cx); - context + let commands = self.text_thread.update(cx, |text_thread, cx| { + text_thread.reparse(cx); + text_thread .pending_commands_for_range(range.clone(), cx) .to_vec() }); @@ -766,21 +772,18 @@ impl TextThreadEditor { self.editor.update(cx, |editor, cx| { if let Some(invoked_slash_command) = - self.context.read(cx).invoked_slash_command(&command_id) + self.text_thread.read(cx).invoked_slash_command(&command_id) { if let InvokedSlashCommandStatus::Finished = invoked_slash_command.status { let buffer = editor.buffer().read(cx).snapshot(cx); let (&excerpt_id, _buffer_id, _buffer_snapshot) = buffer.as_singleton().unwrap(); - let start = buffer - .anchor_in_excerpt(excerpt_id, invoked_slash_command.range.start) - .unwrap(); - let end = buffer - .anchor_in_excerpt(excerpt_id, invoked_slash_command.range.end) + let range = buffer + .anchor_range_in_excerpt(excerpt_id, invoked_slash_command.range.clone()) .unwrap(); editor.remove_folds_with_type( - &[start..end], + &[range], TypeId::of::(), false, cx, @@ -796,15 +799,12 @@ impl TextThreadEditor { let buffer = editor.buffer().read(cx).snapshot(cx); let (&excerpt_id, _buffer_id, _buffer_snapshot) = buffer.as_singleton().unwrap(); - let context = self.context.downgrade(); - let crease_start = buffer - .anchor_in_excerpt(excerpt_id, invoked_slash_command.range.start) - .unwrap(); - let crease_end = buffer - .anchor_in_excerpt(excerpt_id, invoked_slash_command.range.end) + let context = self.text_thread.downgrade(); + let range = buffer + .anchor_range_in_excerpt(excerpt_id, invoked_slash_command.range.clone()) .unwrap(); let crease = Crease::inline( - crease_start..crease_end, + range, invoked_slash_command_fold_placeholder(command_id, context), fold_toggle("invoked-slash-command"), |_row, _folded, _window, _cx| Empty.into_any(), @@ -842,17 +842,14 @@ impl TextThreadEditor { let mut buffer_rows_to_fold = BTreeSet::new(); let mut creases = Vec::new(); for (section, status) in sections { - let start = buffer - .anchor_in_excerpt(excerpt_id, section.range.start) - .unwrap(); - let end = buffer - .anchor_in_excerpt(excerpt_id, section.range.end) + let range = buffer + .anchor_range_in_excerpt(excerpt_id, section.range) .unwrap(); - let buffer_row = MultiBufferRow(start.to_point(&buffer).row); + let buffer_row = MultiBufferRow(range.start.to_point(&buffer).row); buffer_rows_to_fold.insert(buffer_row); creases.push( Crease::inline( - start..end, + range, FoldPlaceholder { render: render_thought_process_fold_icon_button( cx.entity().downgrade(), @@ -894,17 +891,14 @@ impl TextThreadEditor { let mut buffer_rows_to_fold = BTreeSet::new(); let mut creases = Vec::new(); for section in sections { - let start = buffer - .anchor_in_excerpt(excerpt_id, section.range.start) + let range = buffer + .anchor_range_in_excerpt(excerpt_id, section.range) .unwrap(); - let end = buffer - .anchor_in_excerpt(excerpt_id, section.range.end) - .unwrap(); - let buffer_row = MultiBufferRow(start.to_point(&buffer).row); + let buffer_row = MultiBufferRow(range.start.to_point(&buffer).row); buffer_rows_to_fold.insert(buffer_row); creases.push( Crease::inline( - start..end, + range, FoldPlaceholder { render: render_fold_icon_button( cx.entity().downgrade(), @@ -1035,7 +1029,7 @@ impl TextThreadEditor { let render_block = |message: MessageMetadata| -> RenderBlock { Arc::new({ - let context = self.context.clone(); + let text_thread = self.text_thread.clone(); move |cx| { let message_id = MessageId(message.timestamp); @@ -1099,20 +1093,19 @@ impl TextThreadEditor { .child(label) .children(spinner), ) - .tooltip(|window, cx| { + .tooltip(|_window, cx| { Tooltip::with_meta( "Toggle message role", None, "Available roles: You (User), Agent, System", - window, cx, ) }) .on_click({ - let context = context.clone(); + let text_thread = text_thread.clone(); move |_, _window, cx| { - context.update(cx, |context, cx| { - context.cycle_message_roles( + text_thread.update(cx, |text_thread, cx| { + text_thread.cycle_message_roles( HashSet::from_iter(Some(message_id)), cx, ) @@ -1140,12 +1133,11 @@ impl TextThreadEditor { .size(IconSize::XSmall) .color(Color::Hint), ) - .tooltip(|window, cx| { + .tooltip(|_window, cx| { Tooltip::with_meta( "Context Cached", None, "Large messages cached to optimize performance", - window, cx, ) }) @@ -1175,11 +1167,11 @@ impl TextThreadEditor { .icon_position(IconPosition::Start) .tooltip(Tooltip::text("View Details")) .on_click({ - let context = context.clone(); + let text_thread = text_thread.clone(); let error = error.clone(); move |_, _window, cx| { - context.update(cx, |_, cx| { - cx.emit(ContextEvent::ShowAssistError( + text_thread.update(cx, |_, cx| { + cx.emit(TextThreadEvent::ShowAssistError( error.clone(), )); }); @@ -1222,7 +1214,7 @@ impl TextThreadEditor { }; let mut new_blocks = vec![]; let mut block_index_to_message = vec![]; - for message in self.context.read(cx).messages(cx) { + for message in self.text_thread.read(cx).messages(cx) { if blocks_to_remove.remove(&message.id).is_some() { // This is an old message that we might modify. let Some((meta, block_id)) = old_blocks.get_mut(&message.id) else { @@ -1263,13 +1255,21 @@ impl TextThreadEditor { ) -> Option<(String, bool)> { const CODE_FENCE_DELIMITER: &str = "```"; - let context_editor = context_editor_view.read(cx).editor.clone(); - context_editor.update(cx, |context_editor, cx| { - if context_editor.selections.newest::(cx).is_empty() { - let snapshot = context_editor.buffer().read(cx).snapshot(cx); + let text_thread_editor = context_editor_view.read(cx).editor.clone(); + text_thread_editor.update(cx, |text_thread_editor, cx| { + let display_map = text_thread_editor.display_snapshot(cx); + if text_thread_editor + .selections + .newest::(&display_map) + .is_empty() + { + let snapshot = text_thread_editor.buffer().read(cx).snapshot(cx); let (_, _, snapshot) = snapshot.as_singleton()?; - let head = context_editor.selections.newest::(cx).head(); + let head = text_thread_editor + .selections + .newest::(&display_map) + .head(); let offset = snapshot.point_to_offset(head); let surrounding_code_block_range = find_surrounding_code_block(snapshot, offset)?; @@ -1286,8 +1286,8 @@ impl TextThreadEditor { (!text.is_empty()).then_some((text, true)) } else { - let selection = context_editor.selections.newest_adjusted(cx); - let buffer = context_editor.buffer().read(cx).snapshot(cx); + let selection = text_thread_editor.selections.newest_adjusted(&display_map); + let buffer = text_thread_editor.buffer().read(cx).snapshot(cx); let selected_text = buffer.text_for_range(selection.range()).collect::(); (!selected_text.is_empty()).then_some((selected_text, false)) @@ -1305,7 +1305,7 @@ impl TextThreadEditor { return; }; let Some(context_editor_view) = - agent_panel_delegate.active_context_editor(workspace, window, cx) + agent_panel_delegate.active_text_thread_editor(workspace, window, cx) else { return; }; @@ -1333,7 +1333,7 @@ impl TextThreadEditor { let result = maybe!({ let agent_panel_delegate = ::try_global(cx)?; let context_editor_view = - agent_panel_delegate.active_context_editor(workspace, window, cx)?; + agent_panel_delegate.active_text_thread_editor(workspace, window, cx)?; Self::get_selection_or_code_block(&context_editor_view, cx) }); let Some((text, is_code_block)) = result else { @@ -1370,7 +1370,7 @@ impl TextThreadEditor { return; }; let Some(context_editor_view) = - agent_panel_delegate.active_context_editor(workspace, window, cx) + agent_panel_delegate.active_text_thread_editor(workspace, window, cx) else { return; }; @@ -1456,7 +1456,7 @@ impl TextThreadEditor { pub fn quote_selection( workspace: &mut Workspace, - _: &QuoteSelection, + _: &AddSelectionToThread, window: &mut Window, cx: &mut Context, ) { @@ -1474,7 +1474,7 @@ impl TextThreadEditor { let selections = editor.update(cx, |editor, cx| { editor .selections - .all_adjusted(cx) + .all_adjusted(&editor.display_snapshot(cx)) .into_iter() .filter_map(|s| { (!s.is_empty()) @@ -1506,7 +1506,10 @@ impl TextThreadEditor { self.editor.update(cx, |editor, cx| { editor.insert("\n", window, cx); for (text, crease_title) in creases { - let point = editor.selections.newest::(cx).head(); + let point = editor + .selections + .newest::(&editor.display_snapshot(cx)) + .head(); let start_row = MultiBufferRow(point.row); editor.insert(&text, window, cx); @@ -1578,7 +1581,9 @@ impl TextThreadEditor { cx: &mut Context, ) -> (String, CopyMetadata, Vec>) { let (mut selection, creases) = self.editor.update(cx, |editor, cx| { - let mut selection = editor.selections.newest_adjusted(cx); + let mut selection = editor + .selections + .newest_adjusted(&editor.display_snapshot(cx)); let snapshot = editor.buffer().read(cx).snapshot(cx); selection.goal = SelectionGoal::None; @@ -1626,29 +1631,33 @@ impl TextThreadEditor { ) }); - let context = self.context.read(cx); + let text_thread = self.text_thread.read(cx); let mut text = String::new(); // If selection is empty, we want to copy the entire line if selection.range().is_empty() { - let snapshot = context.buffer().read(cx).snapshot(); + let snapshot = text_thread.buffer().read(cx).snapshot(); let point = snapshot.offset_to_point(selection.range().start); selection.start = snapshot.point_to_offset(Point::new(point.row, 0)); selection.end = snapshot .point_to_offset(cmp::min(Point::new(point.row + 1, 0), snapshot.max_point())); - for chunk in context.buffer().read(cx).text_for_range(selection.range()) { + for chunk in text_thread + .buffer() + .read(cx) + .text_for_range(selection.range()) + { text.push_str(chunk); } } else { - for message in context.messages(cx) { + for message in text_thread.messages(cx) { if message.offset_range.start >= selection.range().end { break; } else if message.offset_range.end >= selection.range().start { let range = cmp::max(message.offset_range.start, selection.range().start) ..cmp::min(message.offset_range.end, selection.range().end); if !range.is_empty() { - for chunk in context.buffer().read(cx).text_for_range(range) { + for chunk in text_thread.buffer().read(cx).text_for_range(range) { text.push_str(chunk); } if message.offset_range.end < selection.range().end { @@ -1697,7 +1706,10 @@ impl TextThreadEditor { if images.is_empty() { self.editor.update(cx, |editor, cx| { - let paste_position = editor.selections.newest::(cx).head(); + let paste_position = editor + .selections + .newest::(&editor.display_snapshot(cx)) + .head(); editor.paste(action, window, cx); if let Some(metadata) = metadata { @@ -1744,19 +1756,19 @@ impl TextThreadEditor { editor.transact(window, cx, |editor, _window, cx| { let edits = editor .selections - .all::(cx) + .all::(&editor.display_snapshot(cx)) .into_iter() .map(|selection| (selection.start..selection.end, "\n")); editor.edit(edits, cx); let snapshot = editor.buffer().read(cx).snapshot(cx); - for selection in editor.selections.all::(cx) { + for selection in editor.selections.all::(&editor.display_snapshot(cx)) { image_positions.push(snapshot.anchor_before(selection.end)); } }); }); - self.context.update(cx, |context, cx| { + self.text_thread.update(cx, |text_thread, cx| { for image in images { let Some(render_image) = image.to_image_data(cx.svg_renderer()).log_err() else { @@ -1766,7 +1778,7 @@ impl TextThreadEditor { let image_task = LanguageModelImage::from_image(Arc::new(image), cx).shared(); for image_position in image_positions.iter() { - context.insert_content( + text_thread.insert_content( Content::Image { anchor: image_position.text_anchor, image_id, @@ -1787,7 +1799,7 @@ impl TextThreadEditor { let excerpt_id = *buffer.as_singleton().unwrap().0; let old_blocks = std::mem::take(&mut self.image_blocks); let new_blocks = self - .context + .text_thread .read(cx) .contents(cx) .map( @@ -1835,36 +1847,36 @@ impl TextThreadEditor { } fn split(&mut self, _: &Split, _window: &mut Window, cx: &mut Context) { - self.context.update(cx, |context, cx| { + self.text_thread.update(cx, |text_thread, cx| { let selections = self.editor.read(cx).selections.disjoint_anchors_arc(); for selection in selections.as_ref() { let buffer = self.editor.read(cx).buffer().read(cx).snapshot(cx); let range = selection .map(|endpoint| endpoint.to_offset(&buffer)) .range(); - context.split_message(range, cx); + text_thread.split_message(range, cx); } }); } fn save(&mut self, _: &Save, _window: &mut Window, cx: &mut Context) { - self.context.update(cx, |context, cx| { - context.save(Some(Duration::from_millis(500)), self.fs.clone(), cx) + self.text_thread.update(cx, |text_thread, cx| { + text_thread.save(Some(Duration::from_millis(500)), self.fs.clone(), cx) }); } pub fn title(&self, cx: &App) -> SharedString { - self.context.read(cx).summary().or_default() + self.text_thread.read(cx).summary().or_default() } pub fn regenerate_summary(&mut self, cx: &mut Context) { - self.context - .update(cx, |context, cx| context.summarize(true, cx)); + self.text_thread + .update(cx, |text_thread, cx| text_thread.summarize(true, cx)); } fn render_remaining_tokens(&self, cx: &App) -> Option> { let (token_count_color, token_count, max_token_count, tooltip) = - match token_state(&self.context, cx)? { + match token_state(&self.text_thread, cx)? { TokenState::NoTokensLeft { max_token_count, token_count, @@ -1912,7 +1924,7 @@ impl TextThreadEditor { fn render_send_button(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let focus_handle = self.focus_handle(cx); - let (style, tooltip) = match token_state(&self.context, cx) { + let (style, tooltip) = match token_state(&self.text_thread, cx) { Some(TokenState::NoTokensLeft { .. }) => ( ButtonStyle::Tinted(TintColor::Error), Some(Tooltip::text("Token limit reached")(window, cx)), @@ -1945,7 +1957,7 @@ impl TextThreadEditor { }) .layer(ElevationIndex::ModalSurface) .key_binding( - KeyBinding::for_action_in(&Assist, &focus_handle, window, cx) + KeyBinding::for_action_in(&Assist, &focus_handle, cx) .map(|kb| kb.size(rems_from_px(12.))), ) .on_click(move |_event, window, cx| { @@ -1977,21 +1989,17 @@ impl TextThreadEditor { cx.entity().downgrade(), IconButton::new("trigger", IconName::Plus) .icon_size(IconSize::Small) - .icon_color(Color::Muted), - move |window, cx| { - Tooltip::with_meta( - "Add Context", - None, - "Type / to insert via keyboard", - window, - cx, - ) + .icon_color(Color::Muted) + .selected_icon_color(Color::Accent) + .selected_style(ButtonStyle::Filled), + move |_window, cx| { + Tooltip::with_meta("Add Context", None, "Type / to insert via keyboard", cx) }, ) } fn render_burn_mode_toggle(&self, cx: &mut Context) -> Option { - let context = self.context().read(cx); + let text_thread = self.text_thread().read(cx); let active_model = LanguageModelRegistry::read_global(cx) .default_model() .map(|default| default.model)?; @@ -1999,7 +2007,7 @@ impl TextThreadEditor { return None; } - let active_completion_mode = context.completion_mode(); + let active_completion_mode = text_thread.completion_mode(); let burn_mode_enabled = active_completion_mode == CompletionMode::Burn; let icon = if burn_mode_enabled { IconName::ZedBurnModeOn @@ -2014,8 +2022,8 @@ impl TextThreadEditor { .toggle_state(burn_mode_enabled) .selected_icon_color(Color::Error) .on_click(cx.listener(move |this, _event, _window, cx| { - this.context().update(cx, |context, _cx| { - context.set_completion_mode(match active_completion_mode { + this.text_thread().update(cx, |text_thread, _cx| { + text_thread.set_completion_mode(match active_completion_mode { CompletionMode::Burn => CompletionMode::Normal, CompletionMode::Normal => CompletionMode::Burn, }); @@ -2052,41 +2060,32 @@ impl TextThreadEditor { }; let focus_handle = self.editor().focus_handle(cx); + let (color, icon) = if self.language_model_selector_menu_handle.is_deployed() { + (Color::Accent, IconName::ChevronUp) + } else { + (Color::Muted, IconName::ChevronDown) + }; PickerPopoverMenu::new( self.language_model_selector.clone(), ButtonLike::new("active-model") - .style(ButtonStyle::Subtle) + .selected_style(ButtonStyle::Tinted(TintColor::Accent)) .child( h_flex() .gap_0p5() - .child( - Icon::new(provider_icon) - .color(Color::Muted) - .size(IconSize::XSmall), - ) + .child(Icon::new(provider_icon).color(color).size(IconSize::XSmall)) .child( Label::new(model_name) - .color(Color::Muted) + .color(color) .size(LabelSize::Small) .ml_0p5(), ) - .child( - Icon::new(IconName::ChevronDown) - .color(Color::Muted) - .size(IconSize::XSmall), - ), + .child(Icon::new(icon).color(color).size(IconSize::XSmall)), ), - move |window, cx| { - Tooltip::for_action_in( - "Change Model", - &ToggleModelSelector, - &focus_handle, - window, - cx, - ) + move |_window, cx| { + Tooltip::for_action_in("Change Model", &ToggleModelSelector, &focus_handle, cx) }, - gpui::Corner::BottomLeft, + gpui::Corner::BottomRight, cx, ) .with_handle(self.language_model_selector_menu_handle.clone()) @@ -2651,10 +2650,10 @@ impl FollowableItem for TextThreadEditor { } fn to_state_proto(&self, window: &Window, cx: &App) -> Option { - let context = self.context.read(cx); + let text_thread = self.text_thread.read(cx); Some(proto::view::Variant::ContextEditor( proto::view::ContextEditor { - context_id: context.id().to_proto(), + context_id: text_thread.id().to_proto(), editor: if let Some(proto::view::Variant::Editor(proto)) = self.editor.read(cx).to_state_proto(window, cx) { @@ -2680,22 +2679,22 @@ impl FollowableItem for TextThreadEditor { unreachable!() }; - let context_id = ContextId::from_proto(state.context_id); + let text_thread_id = TextThreadId::from_proto(state.context_id); let editor_state = state.editor?; let project = workspace.read(cx).project().clone(); let agent_panel_delegate = ::try_global(cx)?; - let context_editor_task = workspace.update(cx, |workspace, cx| { - agent_panel_delegate.open_remote_context(workspace, context_id, window, cx) + let text_thread_editor_task = workspace.update(cx, |workspace, cx| { + agent_panel_delegate.open_remote_text_thread(workspace, text_thread_id, window, cx) }); Some(window.spawn(cx, async move |cx| { - let context_editor = context_editor_task.await?; - context_editor - .update_in(cx, |context_editor, window, cx| { - context_editor.remote_id = Some(id); - context_editor.editor.update(cx, |editor, cx| { + let text_thread_editor = text_thread_editor_task.await?; + text_thread_editor + .update_in(cx, |text_thread_editor, window, cx| { + text_thread_editor.remote_id = Some(id); + text_thread_editor.editor.update(cx, |editor, cx| { editor.apply_update_proto( &project, proto::update_view::Variant::Editor(proto::update_view::Editor { @@ -2712,7 +2711,7 @@ impl FollowableItem for TextThreadEditor { }) })? .await?; - Ok(context_editor) + Ok(text_thread_editor) })) } @@ -2759,7 +2758,7 @@ impl FollowableItem for TextThreadEditor { } fn dedup(&self, existing: &Self, _window: &Window, cx: &App) -> Option { - if existing.context.read(cx).id() == self.context.read(cx).id() { + if existing.text_thread.read(cx).id() == self.text_thread.read(cx).id() { Some(item::Dedup::KeepExisting) } else { None @@ -2771,17 +2770,17 @@ enum PendingSlashCommand {} fn invoked_slash_command_fold_placeholder( command_id: InvokedSlashCommandId, - context: WeakEntity, + text_thread: WeakEntity, ) -> FoldPlaceholder { FoldPlaceholder { constrain_width: false, merge_adjacent: false, render: Arc::new(move |fold_id, _, cx| { - let Some(context) = context.upgrade() else { + let Some(text_thread) = text_thread.upgrade() else { return Empty.into_any(); }; - let Some(command) = context.read(cx).invoked_slash_command(&command_id) else { + let Some(command) = text_thread.read(cx).invoked_slash_command(&command_id) else { return Empty.into_any(); }; @@ -2822,14 +2821,15 @@ enum TokenState { }, } -fn token_state(context: &Entity, cx: &App) -> Option { +fn token_state(text_thread: &Entity, cx: &App) -> Option { const WARNING_TOKEN_THRESHOLD: f32 = 0.8; let model = LanguageModelRegistry::read_global(cx) .default_model()? .model; - let token_count = context.read(cx).token_count()?; - let max_token_count = model.max_token_count_for_mode(context.read(cx).completion_mode().into()); + let token_count = text_thread.read(cx).token_count()?; + let max_token_count = + model.max_token_count_for_mode(text_thread.read(cx).completion_mode().into()); let token_state = if max_token_count.saturating_sub(token_count) == 0 { TokenState::NoTokensLeft { max_token_count, @@ -2941,7 +2941,7 @@ mod tests { #[gpui::test] async fn test_copy_paste_whole_message(cx: &mut TestAppContext) { - let (context, context_editor, mut cx) = setup_context_editor_text(vec![ + let (context, text_thread_editor, mut cx) = setup_text_thread_editor_text(vec![ (Role::User, "What is the Zed editor?"), ( Role::Assistant, @@ -2951,8 +2951,8 @@ mod tests { ],cx).await; // Select & Copy whole user message - assert_copy_paste_context_editor( - &context_editor, + assert_copy_paste_text_thread_editor( + &text_thread_editor, message_range(&context, 0, &mut cx), indoc! {" What is the Zed editor? @@ -2963,8 +2963,8 @@ mod tests { ); // Select & Copy whole assistant message - assert_copy_paste_context_editor( - &context_editor, + assert_copy_paste_text_thread_editor( + &text_thread_editor, message_range(&context, 1, &mut cx), indoc! {" What is the Zed editor? @@ -2978,7 +2978,7 @@ mod tests { #[gpui::test] async fn test_copy_paste_no_selection(cx: &mut TestAppContext) { - let (context, context_editor, mut cx) = setup_context_editor_text( + let (context, text_thread_editor, mut cx) = setup_text_thread_editor_text( vec![ (Role::User, "user1"), (Role::Assistant, "assistant1"), @@ -2991,8 +2991,8 @@ mod tests { // Copy and paste first assistant message let message_2_range = message_range(&context, 1, &mut cx); - assert_copy_paste_context_editor( - &context_editor, + assert_copy_paste_text_thread_editor( + &text_thread_editor, message_2_range.start..message_2_range.start, indoc! {" user1 @@ -3005,8 +3005,8 @@ mod tests { // Copy and cut second assistant message let message_3_range = message_range(&context, 2, &mut cx); - assert_copy_paste_context_editor( - &context_editor, + assert_copy_paste_text_thread_editor( + &text_thread_editor, message_3_range.start..message_3_range.start, indoc! {" user1 @@ -3093,29 +3093,29 @@ mod tests { } } - async fn setup_context_editor_text( + async fn setup_text_thread_editor_text( messages: Vec<(Role, &str)>, cx: &mut TestAppContext, ) -> ( - Entity, + Entity, Entity, VisualTestContext, ) { cx.update(init_test); let fs = FakeFs::new(cx.executor()); - let context = create_context_with_messages(messages, cx); + let text_thread = create_text_thread_with_messages(messages, cx); let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await; let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); let workspace = window.root(cx).unwrap(); let mut cx = VisualTestContext::from_window(*window, cx); - let context_editor = window + let text_thread_editor = window .update(&mut cx, |_, window, cx| { cx.new(|cx| { - TextThreadEditor::for_context( - context.clone(), + TextThreadEditor::for_text_thread( + text_thread.clone(), fs, workspace.downgrade(), project, @@ -3127,59 +3127,59 @@ mod tests { }) .unwrap(); - (context, context_editor, cx) + (text_thread, text_thread_editor, cx) } fn message_range( - context: &Entity, + text_thread: &Entity, message_ix: usize, cx: &mut TestAppContext, ) -> Range { - context.update(cx, |context, cx| { - context + text_thread.update(cx, |text_thread, cx| { + text_thread .messages(cx) .nth(message_ix) .unwrap() .anchor_range - .to_offset(&context.buffer().read(cx).snapshot()) + .to_offset(&text_thread.buffer().read(cx).snapshot()) }) } - fn assert_copy_paste_context_editor( - context_editor: &Entity, + fn assert_copy_paste_text_thread_editor( + text_thread_editor: &Entity, range: Range, expected_text: &str, cx: &mut VisualTestContext, ) { - context_editor.update_in(cx, |context_editor, window, cx| { - context_editor.editor.update(cx, |editor, cx| { + text_thread_editor.update_in(cx, |text_thread_editor, window, cx| { + text_thread_editor.editor.update(cx, |editor, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.select_ranges([range]) }); }); - context_editor.copy(&Default::default(), window, cx); + text_thread_editor.copy(&Default::default(), window, cx); - context_editor.editor.update(cx, |editor, cx| { + text_thread_editor.editor.update(cx, |editor, cx| { editor.move_to_end(&Default::default(), window, cx); }); - context_editor.paste(&Default::default(), window, cx); + text_thread_editor.paste(&Default::default(), window, cx); - context_editor.editor.update(cx, |editor, cx| { + text_thread_editor.editor.update(cx, |editor, cx| { assert_eq!(editor.text(cx), expected_text); }); }); } - fn create_context_with_messages( + fn create_text_thread_with_messages( mut messages: Vec<(Role, &str)>, cx: &mut TestAppContext, - ) -> Entity { + ) -> Entity { let registry = Arc::new(LanguageRegistry::test(cx.executor())); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); cx.new(|cx| { - let mut context = AssistantContext::local( + let mut text_thread = TextThread::local( registry, None, None, @@ -3187,33 +3187,33 @@ mod tests { Arc::new(SlashCommandWorkingSet::default()), cx, ); - let mut message_1 = context.messages(cx).next().unwrap(); + let mut message_1 = text_thread.messages(cx).next().unwrap(); let (role, text) = messages.remove(0); loop { if role == message_1.role { - context.buffer().update(cx, |buffer, cx| { + text_thread.buffer().update(cx, |buffer, cx| { buffer.edit([(message_1.offset_range, text)], None, cx); }); break; } let mut ids = HashSet::default(); ids.insert(message_1.id); - context.cycle_message_roles(ids, cx); - message_1 = context.messages(cx).next().unwrap(); + text_thread.cycle_message_roles(ids, cx); + message_1 = text_thread.messages(cx).next().unwrap(); } let mut last_message_id = message_1.id; for (role, text) in messages { - context.insert_message_after(last_message_id, role, MessageStatus::Done, cx); - let message = context.messages(cx).last().unwrap(); + text_thread.insert_message_after(last_message_id, role, MessageStatus::Done, cx); + let message = text_thread.messages(cx).last().unwrap(); last_message_id = message.id; - context.buffer().update(cx, |buffer, cx| { + text_thread.buffer().update(cx, |buffer, cx| { buffer.edit([(message.offset_range, text)], None, cx); }) } - context + text_thread }) } diff --git a/crates/agent_ui/src/ui/burn_mode_tooltip.rs b/crates/agent_ui/src/ui/burn_mode_tooltip.rs index f95dc1250e36bba388452ce11e6ec783e44248e1..ccd7d4bf3190c0d879327dc0ea152994c4a33163 100644 --- a/crates/agent_ui/src/ui/burn_mode_tooltip.rs +++ b/crates/agent_ui/src/ui/burn_mode_tooltip.rs @@ -18,7 +18,7 @@ impl BurnModeTooltip { } impl Render for BurnModeTooltip { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { let (icon, color) = if self.selected { (IconName::ZedBurnModeOn, Color::Error) } else { @@ -45,8 +45,7 @@ impl Render for BurnModeTooltip { .child(Label::new("Burn Mode")) .when(self.selected, |title| title.child(turned_on)); - let keybinding = KeyBinding::for_action(&ToggleBurnMode, window, cx) - .map(|kb| kb.size(rems_from_px(12.))); + let keybinding = KeyBinding::for_action(&ToggleBurnMode, cx).size(rems_from_px(12.)); tooltip_container(cx, |this, _| { this @@ -54,7 +53,7 @@ impl Render for BurnModeTooltip { h_flex() .justify_between() .child(title) - .children(keybinding) + .child(keybinding) ) .child( div() diff --git a/crates/agent_ui/src/ui/context_pill.rs b/crates/agent_ui/src/ui/context_pill.rs index f85a06455439d8e52a7b4272bc7f8069f36548ac..89bf618a16d3fb8e7abc5afaf34ee6e8bb43ab67 100644 --- a/crates/agent_ui/src/ui/context_pill.rs +++ b/crates/agent_ui/src/ui/context_pill.rs @@ -11,13 +11,13 @@ use project::Project; use prompt_store::PromptStore; use rope::Point; use ui::{IconButtonShape, Tooltip, prelude::*, tooltip_container}; +use util::paths::PathStyle; -use agent::context::{ +use crate::context::{ AgentContextHandle, ContextId, ContextKind, DirectoryContextHandle, FetchedUrlContext, FileContextHandle, ImageContext, ImageStatus, RulesContextHandle, SelectionContextHandle, SymbolContextHandle, TextThreadContextHandle, ThreadContextHandle, }; -use util::paths::PathStyle; #[derive(IntoElement)] pub enum ContextPill { @@ -244,8 +244,8 @@ impl RenderOnce for ContextPill { .truncate(), ), ) - .tooltip(|window, cx| { - Tooltip::with_meta("Suggested Context", None, "Click to add it", window, cx) + .tooltip(|_window, cx| { + Tooltip::with_meta("Suggested Context", None, "Click to add it", cx) }) .when_some(on_click.as_ref(), |element, on_click| { let on_click = on_click.clone(); @@ -466,7 +466,7 @@ impl AddedContext { parent: None, tooltip: None, icon_path: None, - status: if handle.thread.read(cx).is_generating_detailed_summary() { + status: if handle.thread.read(cx).is_generating_summary() { ContextStatus::Loading { message: "Summarizing…".into(), } @@ -476,7 +476,11 @@ impl AddedContext { render_hover: { let thread = handle.thread.clone(); Some(Rc::new(move |_, cx| { - let text = thread.read(cx).latest_detailed_summary_or_text(); + let text = thread + .update(cx, |thread, cx| thread.summary(cx)) + .now_or_never() + .flatten() + .unwrap_or_else(|| SharedString::from(thread.read(cx).to_markdown())); ContextPillHover::new_text(text, cx).into() })) }, @@ -493,9 +497,9 @@ impl AddedContext { icon_path: None, status: ContextStatus::Ready, render_hover: { - let context = handle.context.clone(); + let text_thread = handle.text_thread.clone(); Some(Rc::new(move |_, cx| { - let text = context.read(cx).to_xml(cx); + let text = text_thread.read(cx).to_xml(cx); ContextPillHover::new_text(text.into(), cx).into() })) }, diff --git a/crates/ai_onboarding/Cargo.toml b/crates/ai_onboarding/Cargo.toml index 95a45b1a6fbe103f02532d33c21af707f2f51d45..8fb0570e5cf3da5f5f3d6249f76b42f15b8eed7d 100644 --- a/crates/ai_onboarding/Cargo.toml +++ b/crates/ai_onboarding/Cargo.toml @@ -24,5 +24,4 @@ serde.workspace = true smallvec.workspace = true telemetry.workspace = true ui.workspace = true -workspace-hack.workspace = true zed_actions.workspace = true diff --git a/crates/ai_onboarding/src/ai_onboarding.rs b/crates/ai_onboarding/src/ai_onboarding.rs index d953ae612199cea15f6718bf3d5cd7dd55ef856e..20bb0a5f6895ea225cad59ad8fef6cc6ef168b39 100644 --- a/crates/ai_onboarding/src/ai_onboarding.rs +++ b/crates/ai_onboarding/src/ai_onboarding.rs @@ -84,10 +84,32 @@ impl ZedAiOnboarding { self } + fn render_dismiss_button(&self) -> Option { + self.dismiss_onboarding.as_ref().map(|dismiss_callback| { + let callback = dismiss_callback.clone(); + + h_flex() + .absolute() + .top_0() + .right_0() + .child( + IconButton::new("dismiss_onboarding", IconName::Close) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Dismiss")) + .on_click(move |_, window, cx| { + telemetry::event!("Banner Dismissed", source = "AI Onboarding",); + callback(window, cx) + }), + ) + .into_any_element() + }) + } + fn render_sign_in_disclaimer(&self, _cx: &mut App) -> AnyElement { let signing_in = matches!(self.sign_in_status, SignInStatus::SigningIn); v_flex() + .relative() .gap_1() .child(Headline::new("Welcome to Zed AI")) .child( @@ -109,6 +131,7 @@ impl ZedAiOnboarding { } }), ) + .children(self.render_dismiss_button()) .into_any_element() } @@ -180,27 +203,7 @@ impl ZedAiOnboarding { ) .child(PlanDefinitions.free_plan(is_v2)), ) - .when_some( - self.dismiss_onboarding.as_ref(), - |this, dismiss_callback| { - let callback = dismiss_callback.clone(); - - this.child( - h_flex().absolute().top_0().right_0().child( - IconButton::new("dismiss_onboarding", IconName::Close) - .icon_size(IconSize::Small) - .tooltip(Tooltip::text("Dismiss")) - .on_click(move |_, window, cx| { - telemetry::event!( - "Banner Dismissed", - source = "AI Onboarding", - ); - callback(window, cx) - }), - ), - ) - }, - ) + .children(self.render_dismiss_button()) .child( v_flex() .mt_2() @@ -245,26 +248,7 @@ impl ZedAiOnboarding { .mb_2(), ) .child(PlanDefinitions.pro_trial(is_v2, false)) - .when_some( - self.dismiss_onboarding.as_ref(), - |this, dismiss_callback| { - let callback = dismiss_callback.clone(); - this.child( - h_flex().absolute().top_0().right_0().child( - IconButton::new("dismiss_onboarding", IconName::Close) - .icon_size(IconSize::Small) - .tooltip(Tooltip::text("Dismiss")) - .on_click(move |_, window, cx| { - telemetry::event!( - "Banner Dismissed", - source = "AI Onboarding", - ); - callback(window, cx) - }), - ), - ) - }, - ) + .children(self.render_dismiss_button()) .into_any_element() } @@ -278,26 +262,7 @@ impl ZedAiOnboarding { .mb_2(), ) .child(PlanDefinitions.pro_plan(is_v2, false)) - .when_some( - self.dismiss_onboarding.as_ref(), - |this, dismiss_callback| { - let callback = dismiss_callback.clone(); - this.child( - h_flex().absolute().top_0().right_0().child( - IconButton::new("dismiss_onboarding", IconName::Close) - .icon_size(IconSize::Small) - .tooltip(Tooltip::text("Dismiss")) - .on_click(move |_, window, cx| { - telemetry::event!( - "Banner Dismissed", - source = "AI Onboarding", - ); - callback(window, cx) - }), - ), - ) - }, - ) + .children(self.render_dismiss_button()) .into_any_element() } } diff --git a/crates/anthropic/Cargo.toml b/crates/anthropic/Cargo.toml index c8103e5bfb533a0f7f8e88995ac0927073a9793f..a9c7208b0caa9a2660aa723c903554205e672fe6 100644 --- a/crates/anthropic/Cargo.toml +++ b/crates/anthropic/Cargo.toml @@ -26,4 +26,3 @@ serde_json.workspace = true settings.workspace = true strum.workspace = true thiserror.workspace = true -workspace-hack.workspace = true diff --git a/crates/anthropic/src/anthropic.rs b/crates/anthropic/src/anthropic.rs index 93334fd950f27309000ff175aeafb18767bd2867..cd2077cdeb1370a9753df83f9b239ef776bab149 100644 --- a/crates/anthropic/src/anthropic.rs +++ b/crates/anthropic/src/anthropic.rs @@ -91,6 +91,13 @@ pub enum Model { Claude3_7SonnetThinking, #[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-latest")] Claude3_5Sonnet, + #[serde(rename = "claude-haiku-4-5", alias = "claude-haiku-4-5-latest")] + ClaudeHaiku4_5, + #[serde( + rename = "claude-haiku-4-5-thinking", + alias = "claude-haiku-4-5-thinking-latest" + )] + ClaudeHaiku4_5Thinking, #[serde(rename = "claude-3-5-haiku", alias = "claude-3-5-haiku-latest")] Claude3_5Haiku, #[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")] @@ -168,6 +175,14 @@ impl Model { return Ok(Self::Claude3_5Sonnet); } + if id.starts_with("claude-haiku-4-5-thinking") { + return Ok(Self::ClaudeHaiku4_5Thinking); + } + + if id.starts_with("claude-haiku-4-5") { + return Ok(Self::ClaudeHaiku4_5); + } + if id.starts_with("claude-3-5-haiku") { return Ok(Self::Claude3_5Haiku); } @@ -200,6 +215,8 @@ impl Model { Self::Claude3_5Sonnet => "claude-3-5-sonnet-latest", Self::Claude3_7Sonnet => "claude-3-7-sonnet-latest", Self::Claude3_7SonnetThinking => "claude-3-7-sonnet-thinking-latest", + Self::ClaudeHaiku4_5 => "claude-haiku-4-5-latest", + Self::ClaudeHaiku4_5Thinking => "claude-haiku-4-5-thinking-latest", Self::Claude3_5Haiku => "claude-3-5-haiku-latest", Self::Claude3Opus => "claude-3-opus-latest", Self::Claude3Sonnet => "claude-3-sonnet-20240229", @@ -217,6 +234,7 @@ impl Model { Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5Thinking => "claude-sonnet-4-5-20250929", Self::Claude3_5Sonnet => "claude-3-5-sonnet-latest", Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => "claude-3-7-sonnet-latest", + Self::ClaudeHaiku4_5 | Self::ClaudeHaiku4_5Thinking => "claude-haiku-4-5-20251001", Self::Claude3_5Haiku => "claude-3-5-haiku-latest", Self::Claude3Opus => "claude-3-opus-latest", Self::Claude3Sonnet => "claude-3-sonnet-20240229", @@ -238,6 +256,8 @@ impl Model { Self::Claude3_7Sonnet => "Claude 3.7 Sonnet", Self::Claude3_5Sonnet => "Claude 3.5 Sonnet", Self::Claude3_7SonnetThinking => "Claude 3.7 Sonnet Thinking", + Self::ClaudeHaiku4_5 => "Claude Haiku 4.5", + Self::ClaudeHaiku4_5Thinking => "Claude Haiku 4.5 Thinking", Self::Claude3_5Haiku => "Claude 3.5 Haiku", Self::Claude3Opus => "Claude 3 Opus", Self::Claude3Sonnet => "Claude 3 Sonnet", @@ -259,6 +279,8 @@ impl Model { | Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5Thinking | Self::Claude3_5Sonnet + | Self::ClaudeHaiku4_5 + | Self::ClaudeHaiku4_5Thinking | Self::Claude3_5Haiku | Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking @@ -286,6 +308,8 @@ impl Model { | Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5Thinking | Self::Claude3_5Sonnet + | Self::ClaudeHaiku4_5 + | Self::ClaudeHaiku4_5Thinking | Self::Claude3_5Haiku | Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking @@ -310,6 +334,7 @@ impl Model { | Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking | Self::Claude3_5Haiku => 8_192, + Self::ClaudeHaiku4_5 | Self::ClaudeHaiku4_5Thinking => 64_000, Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => 4_096, Self::Custom { max_output_tokens, .. @@ -330,6 +355,8 @@ impl Model { | Self::Claude3_5Sonnet | Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking + | Self::ClaudeHaiku4_5 + | Self::ClaudeHaiku4_5Thinking | Self::Claude3_5Haiku | Self::Claude3Opus | Self::Claude3Sonnet @@ -349,6 +376,7 @@ impl Model { | Self::ClaudeSonnet4_5 | Self::Claude3_5Sonnet | Self::Claude3_7Sonnet + | Self::ClaudeHaiku4_5 | Self::Claude3_5Haiku | Self::Claude3Opus | Self::Claude3Sonnet @@ -357,6 +385,7 @@ impl Model { | Self::ClaudeOpus4_1Thinking | Self::ClaudeSonnet4Thinking | Self::ClaudeSonnet4_5Thinking + | Self::ClaudeHaiku4_5Thinking | Self::Claude3_7SonnetThinking => AnthropicModelMode::Thinking { budget_tokens: Some(4_096), }, diff --git a/crates/askpass/Cargo.toml b/crates/askpass/Cargo.toml index 6aec7e6d7e011c626a57c478fa65e161f43b2bdd..298d1a736959d1021da49a2c4f4356e12cf014be 100644 --- a/crates/askpass/Cargo.toml +++ b/crates/askpass/Cargo.toml @@ -20,7 +20,6 @@ smol.workspace = true log.workspace = true tempfile.workspace = true util.workspace = true -workspace-hack.workspace = true zeroize.workspace = true [target.'cfg(target_os = "windows")'.dependencies] diff --git a/crates/askpass/src/askpass.rs b/crates/askpass/src/askpass.rs index dfe8a96ee6f19510df06948f94af48d621515747..81cdd355bf7173b3954a8c2731a0728d354253ba 100644 --- a/crates/askpass/src/askpass.rs +++ b/crates/askpass/src/askpass.rs @@ -20,7 +20,7 @@ use futures::{ }; use gpui::{AsyncApp, BackgroundExecutor, Task}; use smol::fs; -use util::{ResultExt as _, debug_panic, maybe, paths::PathExt}; +use util::{ResultExt as _, debug_panic, maybe, paths::PathExt, shell::ShellKind}; /// Path to the program used for askpass /// @@ -199,9 +199,15 @@ impl PasswordProxy { let current_exec = std::env::current_exe().context("Failed to determine current zed executable path.")?; + // TODO: inferred from the use of powershell.exe in askpass_helper_script + let shell_kind = if cfg!(windows) { + ShellKind::PowerShell + } else { + ShellKind::Posix + }; let askpass_program = ASKPASS_PROGRAM .get_or_init(|| current_exec) - .try_shell_safe() + .try_shell_safe(shell_kind) .context("Failed to shell-escape Askpass program path.")? .to_string(); // Create an askpass script that communicates back to this process. @@ -343,7 +349,7 @@ fn generate_askpass_script(askpass_program: &str, askpass_socket: &std::path::Pa format!( r#" $ErrorActionPreference = 'Stop'; - ($args -join [char]0) | & "{askpass_program}" --askpass={askpass_socket} 2> $null + ($args -join [char]0) | & {askpass_program} --askpass={askpass_socket} 2> $null "#, askpass_socket = askpass_socket.display(), ) diff --git a/crates/assets/Cargo.toml b/crates/assets/Cargo.toml index 130394a30b7faf909e40922dd833dfcf9598d848..a56cd109f1be0eaa003d831ba31f4e288c94fd85 100644 --- a/crates/assets/Cargo.toml +++ b/crates/assets/Cargo.toml @@ -15,4 +15,3 @@ workspace = true anyhow.workspace = true gpui.workspace = true rust-embed.workspace = true -workspace-hack.workspace = true diff --git a/crates/assistant_slash_command/Cargo.toml b/crates/assistant_slash_command/Cargo.toml index 0908cd61653d35dbb54ae325118a6091cd345a4e..1fc3e8448c5e2d0c278254b369ac49fd2e9ce33a 100644 --- a/crates/assistant_slash_command/Cargo.toml +++ b/crates/assistant_slash_command/Cargo.toml @@ -27,7 +27,6 @@ serde_json.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true -workspace-hack.workspace = true [dev-dependencies] gpui = { workspace = true, features = ["test-support"] } diff --git a/crates/assistant_slash_command/src/assistant_slash_command.rs b/crates/assistant_slash_command/src/assistant_slash_command.rs index 4b85fa2edf2afd6b3ea7df154b5e14ab492a8013..2e6bb7325e14ac109d77854e1d848c541a685458 100644 --- a/crates/assistant_slash_command/src/assistant_slash_command.rs +++ b/crates/assistant_slash_command/src/assistant_slash_command.rs @@ -9,6 +9,7 @@ use anyhow::Result; use futures::StreamExt; use futures::stream::{self, BoxStream}; use gpui::{App, SharedString, Task, WeakEntity, Window}; +use language::CodeLabelBuilder; use language::HighlightId; use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate, OffsetRangeExt}; pub use language_model::Role; @@ -328,15 +329,15 @@ impl SlashCommandLine { } pub fn create_label_for_command(command_name: &str, arguments: &[&str], cx: &App) -> CodeLabel { - let mut label = CodeLabel::default(); + let mut label = CodeLabelBuilder::default(); label.push_str(command_name, None); + label.respan_filter_range(None); label.push_str(" ", None); label.push_str( &arguments.join(" "), cx.theme().syntax().highlight_id("comment").map(HighlightId), ); - label.filter_range = 0..command_name.len(); - label + label.build() } #[cfg(test)] diff --git a/crates/assistant_slash_commands/Cargo.toml b/crates/assistant_slash_commands/Cargo.toml index 5844d21a51b0642a89fd13f29f53a074331ee10e..85dd92501f93fb79ba1d3f70b3a06f1077356cfa 100644 --- a/crates/assistant_slash_commands/Cargo.toml +++ b/crates/assistant_slash_commands/Cargo.toml @@ -38,7 +38,6 @@ ui.workspace = true util.workspace = true workspace.workspace = true worktree.workspace = true -workspace-hack.workspace = true [dev-dependencies] fs = { workspace = true, features = ["test-support"] } diff --git a/crates/assistant_slash_commands/src/file_command.rs b/crates/assistant_slash_commands/src/file_command.rs index 0968a297b82bb0da783ec18fb1cd0301acf50f4c..6fe1a410d3551fe72737500ad8b143a392645d1b 100644 --- a/crates/assistant_slash_commands/src/file_command.rs +++ b/crates/assistant_slash_commands/src/file_command.rs @@ -7,7 +7,7 @@ use futures::Stream; use futures::channel::mpsc; use fuzzy::PathMatch; use gpui::{App, Entity, Task, WeakEntity}; -use language::{BufferSnapshot, CodeLabel, HighlightId, LineEnding, LspAdapterDelegate}; +use language::{BufferSnapshot, CodeLabelBuilder, HighlightId, LineEnding, LspAdapterDelegate}; use project::{PathMatchCandidateSet, Project}; use serde::{Deserialize, Serialize}; use smol::stream::StreamExt; @@ -168,7 +168,7 @@ impl SlashCommand for FileSlashCommand { .display(path_style) .to_string(); - let mut label = CodeLabel::default(); + let mut label = CodeLabelBuilder::default(); let file_name = path_match.path.file_name()?; let label_text = if path_match.is_dir { format!("{}/ ", file_name) @@ -178,10 +178,10 @@ impl SlashCommand for FileSlashCommand { label.push_str(label_text.as_str(), None); label.push_str(&text, comment_id); - label.filter_range = 0..file_name.len(); + label.respan_filter_range(Some(file_name)); Some(ArgumentCompletion { - label, + label: label.build(), new_text: text, after_completion: AfterCompletion::Compose, replace_previous_arguments: false, diff --git a/crates/assistant_slash_commands/src/selection_command.rs b/crates/assistant_slash_commands/src/selection_command.rs index c8692dec718a03af777753f35ae646f245878ed9..ce6c0b931411d8073ffd6c97b648bb044ad857e7 100644 --- a/crates/assistant_slash_commands/src/selection_command.rs +++ b/crates/assistant_slash_commands/src/selection_command.rs @@ -79,7 +79,7 @@ impl SlashCommand for SelectionCommand { editor.update(cx, |editor, cx| { let selection_ranges = editor .selections - .all_adjusted(cx) + .all_adjusted(&editor.display_snapshot(cx)) .iter() .map(|selection| selection.range()) .collect::>(); diff --git a/crates/assistant_slash_commands/src/tab_command.rs b/crates/assistant_slash_commands/src/tab_command.rs index 9fd38128cacd51db5bd48fc801d1238ae3f674c4..a4c0ad412cca3eaf7d03d684cc3fb828be60a93d 100644 --- a/crates/assistant_slash_commands/src/tab_command.rs +++ b/crates/assistant_slash_commands/src/tab_command.rs @@ -7,7 +7,7 @@ use collections::{HashMap, HashSet}; use editor::Editor; use futures::future::join_all; use gpui::{Task, WeakEntity}; -use language::{BufferSnapshot, CodeLabel, HighlightId, LspAdapterDelegate}; +use language::{BufferSnapshot, CodeLabel, CodeLabelBuilder, HighlightId, LspAdapterDelegate}; use std::sync::{Arc, atomic::AtomicBool}; use ui::{ActiveTheme, App, Window, prelude::*}; use util::{ResultExt, paths::PathStyle}; @@ -308,10 +308,10 @@ fn create_tab_completion_label( comment_id: Option, ) -> CodeLabel { let (parent_path, file_name) = path_style.split(path); - let mut label = CodeLabel::default(); + let mut label = CodeLabelBuilder::default(); label.push_str(file_name, None); label.push_str(" ", None); label.push_str(parent_path.unwrap_or_default(), comment_id); - label.filter_range = 0..file_name.len(); - label + label.respan_filter_range(Some(file_name)); + label.build() } diff --git a/crates/assistant_context/Cargo.toml b/crates/assistant_text_thread/Cargo.toml similarity index 93% rename from crates/assistant_context/Cargo.toml rename to crates/assistant_text_thread/Cargo.toml index 3e2761a84674c6c4201165edf856b675843315d9..8dfdfa3828340217456088a246eee5b1568a7a77 100644 --- a/crates/assistant_context/Cargo.toml +++ b/crates/assistant_text_thread/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "assistant_context" +name = "assistant_text_thread" version = "0.1.0" edition.workspace = true publish.workspace = true @@ -9,7 +9,7 @@ license = "GPL-3.0-or-later" workspace = true [lib] -path = "src/assistant_context.rs" +path = "src/assistant_text_thread.rs" [features] test-support = [] @@ -51,7 +51,6 @@ ui.workspace = true util.workspace = true uuid.workspace = true workspace.workspace = true -workspace-hack.workspace = true zed_env_vars.workspace = true [dev-dependencies] diff --git a/crates/agent2/LICENSE-GPL b/crates/assistant_text_thread/LICENSE-GPL similarity index 100% rename from crates/agent2/LICENSE-GPL rename to crates/assistant_text_thread/LICENSE-GPL diff --git a/crates/assistant_text_thread/src/assistant_text_thread.rs b/crates/assistant_text_thread/src/assistant_text_thread.rs new file mode 100644 index 0000000000000000000000000000000000000000..7eab9800d5d6f43ba8eabec0682961e073781ace --- /dev/null +++ b/crates/assistant_text_thread/src/assistant_text_thread.rs @@ -0,0 +1,15 @@ +#[cfg(test)] +mod assistant_text_thread_tests; +mod text_thread; +mod text_thread_store; + +pub use crate::text_thread::*; +pub use crate::text_thread_store::*; + +use client::Client; +use gpui::App; +use std::sync::Arc; + +pub fn init(client: Arc, _: &mut App) { + text_thread_store::init(&client.into()); +} diff --git a/crates/assistant_context/src/assistant_context_tests.rs b/crates/assistant_text_thread/src/assistant_text_thread_tests.rs similarity index 73% rename from crates/assistant_context/src/assistant_context_tests.rs rename to crates/assistant_text_thread/src/assistant_text_thread_tests.rs index 413e32dfcb14273920e9ae4110e5905bdbae5956..fbd5dcafa6e142538f1f5821bc9e0a89ccbfd881 100644 --- a/crates/assistant_context/src/assistant_context_tests.rs +++ b/crates/assistant_text_thread/src/assistant_text_thread_tests.rs @@ -1,6 +1,6 @@ use crate::{ - AssistantContext, CacheStatus, ContextEvent, ContextId, ContextOperation, ContextSummary, - InvokedSlashCommandId, MessageCacheMetadata, MessageId, MessageStatus, + CacheStatus, InvokedSlashCommandId, MessageCacheMetadata, MessageId, MessageStatus, TextThread, + TextThreadEvent, TextThreadId, TextThreadOperation, TextThreadSummary, }; use anyhow::Result; use assistant_slash_command::{ @@ -47,8 +47,8 @@ fn test_inserting_and_removing_messages(cx: &mut App) { let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone())); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); - let context = cx.new(|cx| { - AssistantContext::local( + let text_thread = cx.new(|cx| { + TextThread::local( registry, None, None, @@ -57,21 +57,21 @@ fn test_inserting_and_removing_messages(cx: &mut App) { cx, ) }); - let buffer = context.read(cx).buffer.clone(); + let buffer = text_thread.read(cx).buffer().clone(); - let message_1 = context.read(cx).message_anchors[0].clone(); + let message_1 = text_thread.read(cx).message_anchors[0].clone(); assert_eq!( - messages(&context, cx), + messages(&text_thread, cx), vec![(message_1.id, Role::User, 0..0)] ); - let message_2 = context.update(cx, |context, cx| { + let message_2 = text_thread.update(cx, |context, cx| { context .insert_message_after(message_1.id, Role::Assistant, MessageStatus::Done, cx) .unwrap() }); assert_eq!( - messages(&context, cx), + messages(&text_thread, cx), vec![ (message_1.id, Role::User, 0..1), (message_2.id, Role::Assistant, 1..1) @@ -82,20 +82,20 @@ fn test_inserting_and_removing_messages(cx: &mut App) { buffer.edit([(0..0, "1"), (1..1, "2")], None, cx) }); assert_eq!( - messages(&context, cx), + messages(&text_thread, cx), vec![ (message_1.id, Role::User, 0..2), (message_2.id, Role::Assistant, 2..3) ] ); - let message_3 = context.update(cx, |context, cx| { + let message_3 = text_thread.update(cx, |context, cx| { context .insert_message_after(message_2.id, Role::User, MessageStatus::Done, cx) .unwrap() }); assert_eq!( - messages(&context, cx), + messages(&text_thread, cx), vec![ (message_1.id, Role::User, 0..2), (message_2.id, Role::Assistant, 2..4), @@ -103,13 +103,13 @@ fn test_inserting_and_removing_messages(cx: &mut App) { ] ); - let message_4 = context.update(cx, |context, cx| { + let message_4 = text_thread.update(cx, |context, cx| { context .insert_message_after(message_2.id, Role::User, MessageStatus::Done, cx) .unwrap() }); assert_eq!( - messages(&context, cx), + messages(&text_thread, cx), vec![ (message_1.id, Role::User, 0..2), (message_2.id, Role::Assistant, 2..4), @@ -122,7 +122,7 @@ fn test_inserting_and_removing_messages(cx: &mut App) { buffer.edit([(4..4, "C"), (5..5, "D")], None, cx) }); assert_eq!( - messages(&context, cx), + messages(&text_thread, cx), vec![ (message_1.id, Role::User, 0..2), (message_2.id, Role::Assistant, 2..4), @@ -134,7 +134,7 @@ fn test_inserting_and_removing_messages(cx: &mut App) { // Deleting across message boundaries merges the messages. buffer.update(cx, |buffer, cx| buffer.edit([(1..4, "")], None, cx)); assert_eq!( - messages(&context, cx), + messages(&text_thread, cx), vec![ (message_1.id, Role::User, 0..3), (message_3.id, Role::User, 3..4), @@ -144,7 +144,7 @@ fn test_inserting_and_removing_messages(cx: &mut App) { // Undoing the deletion should also undo the merge. buffer.update(cx, |buffer, cx| buffer.undo(cx)); assert_eq!( - messages(&context, cx), + messages(&text_thread, cx), vec![ (message_1.id, Role::User, 0..2), (message_2.id, Role::Assistant, 2..4), @@ -156,7 +156,7 @@ fn test_inserting_and_removing_messages(cx: &mut App) { // Redoing the deletion should also redo the merge. buffer.update(cx, |buffer, cx| buffer.redo(cx)); assert_eq!( - messages(&context, cx), + messages(&text_thread, cx), vec![ (message_1.id, Role::User, 0..3), (message_3.id, Role::User, 3..4), @@ -164,13 +164,13 @@ fn test_inserting_and_removing_messages(cx: &mut App) { ); // Ensure we can still insert after a merged message. - let message_5 = context.update(cx, |context, cx| { + let message_5 = text_thread.update(cx, |context, cx| { context .insert_message_after(message_1.id, Role::System, MessageStatus::Done, cx) .unwrap() }); assert_eq!( - messages(&context, cx), + messages(&text_thread, cx), vec![ (message_1.id, Role::User, 0..3), (message_5.id, Role::System, 3..4), @@ -186,8 +186,8 @@ fn test_message_splitting(cx: &mut App) { let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone())); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); - let context = cx.new(|cx| { - AssistantContext::local( + let text_thread = cx.new(|cx| { + TextThread::local( registry.clone(), None, None, @@ -196,11 +196,11 @@ fn test_message_splitting(cx: &mut App) { cx, ) }); - let buffer = context.read(cx).buffer.clone(); + let buffer = text_thread.read(cx).buffer().clone(); - let message_1 = context.read(cx).message_anchors[0].clone(); + let message_1 = text_thread.read(cx).message_anchors[0].clone(); assert_eq!( - messages(&context, cx), + messages(&text_thread, cx), vec![(message_1.id, Role::User, 0..0)] ); @@ -208,26 +208,28 @@ fn test_message_splitting(cx: &mut App) { buffer.edit([(0..0, "aaa\nbbb\nccc\nddd\n")], None, cx) }); - let (_, message_2) = context.update(cx, |context, cx| context.split_message(3..3, cx)); + let (_, message_2) = + text_thread.update(cx, |text_thread, cx| text_thread.split_message(3..3, cx)); let message_2 = message_2.unwrap(); // We recycle newlines in the middle of a split message assert_eq!(buffer.read(cx).text(), "aaa\nbbb\nccc\nddd\n"); assert_eq!( - messages(&context, cx), + messages(&text_thread, cx), vec![ (message_1.id, Role::User, 0..4), (message_2.id, Role::User, 4..16), ] ); - let (_, message_3) = context.update(cx, |context, cx| context.split_message(3..3, cx)); + let (_, message_3) = + text_thread.update(cx, |text_thread, cx| text_thread.split_message(3..3, cx)); let message_3 = message_3.unwrap(); // We don't recycle newlines at the end of a split message assert_eq!(buffer.read(cx).text(), "aaa\n\nbbb\nccc\nddd\n"); assert_eq!( - messages(&context, cx), + messages(&text_thread, cx), vec![ (message_1.id, Role::User, 0..4), (message_3.id, Role::User, 4..5), @@ -235,11 +237,12 @@ fn test_message_splitting(cx: &mut App) { ] ); - let (_, message_4) = context.update(cx, |context, cx| context.split_message(9..9, cx)); + let (_, message_4) = + text_thread.update(cx, |text_thread, cx| text_thread.split_message(9..9, cx)); let message_4 = message_4.unwrap(); assert_eq!(buffer.read(cx).text(), "aaa\n\nbbb\nccc\nddd\n"); assert_eq!( - messages(&context, cx), + messages(&text_thread, cx), vec![ (message_1.id, Role::User, 0..4), (message_3.id, Role::User, 4..5), @@ -248,11 +251,12 @@ fn test_message_splitting(cx: &mut App) { ] ); - let (_, message_5) = context.update(cx, |context, cx| context.split_message(9..9, cx)); + let (_, message_5) = + text_thread.update(cx, |text_thread, cx| text_thread.split_message(9..9, cx)); let message_5 = message_5.unwrap(); assert_eq!(buffer.read(cx).text(), "aaa\n\nbbb\n\nccc\nddd\n"); assert_eq!( - messages(&context, cx), + messages(&text_thread, cx), vec![ (message_1.id, Role::User, 0..4), (message_3.id, Role::User, 4..5), @@ -263,12 +267,12 @@ fn test_message_splitting(cx: &mut App) { ); let (message_6, message_7) = - context.update(cx, |context, cx| context.split_message(14..16, cx)); + text_thread.update(cx, |text_thread, cx| text_thread.split_message(14..16, cx)); let message_6 = message_6.unwrap(); let message_7 = message_7.unwrap(); assert_eq!(buffer.read(cx).text(), "aaa\n\nbbb\n\nccc\ndd\nd\n"); assert_eq!( - messages(&context, cx), + messages(&text_thread, cx), vec![ (message_1.id, Role::User, 0..4), (message_3.id, Role::User, 4..5), @@ -287,8 +291,8 @@ fn test_messages_for_offsets(cx: &mut App) { let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone())); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); - let context = cx.new(|cx| { - AssistantContext::local( + let text_thread = cx.new(|cx| { + TextThread::local( registry, None, None, @@ -297,32 +301,32 @@ fn test_messages_for_offsets(cx: &mut App) { cx, ) }); - let buffer = context.read(cx).buffer.clone(); + let buffer = text_thread.read(cx).buffer().clone(); - let message_1 = context.read(cx).message_anchors[0].clone(); + let message_1 = text_thread.read(cx).message_anchors[0].clone(); assert_eq!( - messages(&context, cx), + messages(&text_thread, cx), vec![(message_1.id, Role::User, 0..0)] ); buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "aaa")], None, cx)); - let message_2 = context - .update(cx, |context, cx| { - context.insert_message_after(message_1.id, Role::User, MessageStatus::Done, cx) + let message_2 = text_thread + .update(cx, |text_thread, cx| { + text_thread.insert_message_after(message_1.id, Role::User, MessageStatus::Done, cx) }) .unwrap(); buffer.update(cx, |buffer, cx| buffer.edit([(4..4, "bbb")], None, cx)); - let message_3 = context - .update(cx, |context, cx| { - context.insert_message_after(message_2.id, Role::User, MessageStatus::Done, cx) + let message_3 = text_thread + .update(cx, |text_thread, cx| { + text_thread.insert_message_after(message_2.id, Role::User, MessageStatus::Done, cx) }) .unwrap(); buffer.update(cx, |buffer, cx| buffer.edit([(8..8, "ccc")], None, cx)); assert_eq!(buffer.read(cx).text(), "aaa\nbbb\nccc"); assert_eq!( - messages(&context, cx), + messages(&text_thread, cx), vec![ (message_1.id, Role::User, 0..4), (message_2.id, Role::User, 4..8), @@ -331,22 +335,22 @@ fn test_messages_for_offsets(cx: &mut App) { ); assert_eq!( - message_ids_for_offsets(&context, &[0, 4, 9], cx), + message_ids_for_offsets(&text_thread, &[0, 4, 9], cx), [message_1.id, message_2.id, message_3.id] ); assert_eq!( - message_ids_for_offsets(&context, &[0, 1, 11], cx), + message_ids_for_offsets(&text_thread, &[0, 1, 11], cx), [message_1.id, message_3.id] ); - let message_4 = context - .update(cx, |context, cx| { - context.insert_message_after(message_3.id, Role::User, MessageStatus::Done, cx) + let message_4 = text_thread + .update(cx, |text_thread, cx| { + text_thread.insert_message_after(message_3.id, Role::User, MessageStatus::Done, cx) }) .unwrap(); assert_eq!(buffer.read(cx).text(), "aaa\nbbb\nccc\n"); assert_eq!( - messages(&context, cx), + messages(&text_thread, cx), vec![ (message_1.id, Role::User, 0..4), (message_2.id, Role::User, 4..8), @@ -355,12 +359,12 @@ fn test_messages_for_offsets(cx: &mut App) { ] ); assert_eq!( - message_ids_for_offsets(&context, &[0, 4, 8, 12], cx), + message_ids_for_offsets(&text_thread, &[0, 4, 8, 12], cx), [message_1.id, message_2.id, message_3.id, message_4.id] ); fn message_ids_for_offsets( - context: &Entity, + context: &Entity, offsets: &[usize], cx: &App, ) -> Vec { @@ -398,8 +402,8 @@ async fn test_slash_commands(cx: &mut TestAppContext) { let registry = Arc::new(LanguageRegistry::test(cx.executor())); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); - let context = cx.new(|cx| { - AssistantContext::local( + let text_thread = cx.new(|cx| { + TextThread::local( registry.clone(), None, None, @@ -417,19 +421,19 @@ async fn test_slash_commands(cx: &mut TestAppContext) { } let context_ranges = Rc::new(RefCell::new(ContextRanges::default())); - context.update(cx, |_, cx| { - cx.subscribe(&context, { + text_thread.update(cx, |_, cx| { + cx.subscribe(&text_thread, { let context_ranges = context_ranges.clone(); - move |context, _, event, _| { + move |text_thread, _, event, _| { let mut context_ranges = context_ranges.borrow_mut(); match event { - ContextEvent::InvokedSlashCommandChanged { command_id } => { - let command = context.invoked_slash_command(command_id).unwrap(); + TextThreadEvent::InvokedSlashCommandChanged { command_id } => { + let command = text_thread.invoked_slash_command(command_id).unwrap(); context_ranges .command_outputs .insert(*command_id, command.range.clone()); } - ContextEvent::ParsedSlashCommandsUpdated { removed, updated } => { + TextThreadEvent::ParsedSlashCommandsUpdated { removed, updated } => { for range in removed { context_ranges.parsed_commands.remove(range); } @@ -439,7 +443,7 @@ async fn test_slash_commands(cx: &mut TestAppContext) { .insert(command.source_range.clone()); } } - ContextEvent::SlashCommandOutputSectionAdded { section } => { + TextThreadEvent::SlashCommandOutputSectionAdded { section } => { context_ranges.output_sections.insert(section.range.clone()); } _ => {} @@ -449,7 +453,7 @@ async fn test_slash_commands(cx: &mut TestAppContext) { .detach(); }); - let buffer = context.read_with(cx, |context, _| context.buffer.clone()); + let buffer = text_thread.read_with(cx, |text_thread, _| text_thread.buffer().clone()); // Insert a slash command buffer.update(cx, |buffer, cx| { @@ -508,9 +512,9 @@ async fn test_slash_commands(cx: &mut TestAppContext) { ); let (command_output_tx, command_output_rx) = mpsc::unbounded(); - context.update(cx, |context, cx| { - let command_source_range = context.parsed_slash_commands[0].source_range.clone(); - context.insert_command_output( + text_thread.update(cx, |text_thread, cx| { + let command_source_range = text_thread.parsed_slash_commands[0].source_range.clone(); + text_thread.insert_command_output( command_source_range, "file", Task::ready(Ok(command_output_rx.boxed())), @@ -670,8 +674,8 @@ async fn test_serialization(cx: &mut TestAppContext) { let registry = Arc::new(LanguageRegistry::test(cx.executor())); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); - let context = cx.new(|cx| { - AssistantContext::local( + let text_thread = cx.new(|cx| { + TextThread::local( registry.clone(), None, None, @@ -680,15 +684,15 @@ async fn test_serialization(cx: &mut TestAppContext) { cx, ) }); - let buffer = context.read_with(cx, |context, _| context.buffer.clone()); - let message_0 = context.read_with(cx, |context, _| context.message_anchors[0].id); - let message_1 = context.update(cx, |context, cx| { - context + let buffer = text_thread.read_with(cx, |text_thread, _| text_thread.buffer().clone()); + let message_0 = text_thread.read_with(cx, |text_thread, _| text_thread.message_anchors[0].id); + let message_1 = text_thread.update(cx, |text_thread, cx| { + text_thread .insert_message_after(message_0, Role::Assistant, MessageStatus::Done, cx) .unwrap() }); - let message_2 = context.update(cx, |context, cx| { - context + let message_2 = text_thread.update(cx, |text_thread, cx| { + text_thread .insert_message_after(message_1.id, Role::System, MessageStatus::Done, cx) .unwrap() }); @@ -696,15 +700,15 @@ async fn test_serialization(cx: &mut TestAppContext) { buffer.edit([(0..0, "a"), (1..1, "b\nc")], None, cx); buffer.finalize_last_transaction(); }); - let _message_3 = context.update(cx, |context, cx| { - context + let _message_3 = text_thread.update(cx, |text_thread, cx| { + text_thread .insert_message_after(message_2.id, Role::System, MessageStatus::Done, cx) .unwrap() }); buffer.update(cx, |buffer, cx| buffer.undo(cx)); assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "a\nb\nc\n"); assert_eq!( - cx.read(|cx| messages(&context, cx)), + cx.read(|cx| messages(&text_thread, cx)), [ (message_0, Role::User, 0..2), (message_1.id, Role::Assistant, 2..6), @@ -712,9 +716,9 @@ async fn test_serialization(cx: &mut TestAppContext) { ] ); - let serialized_context = context.read_with(cx, |context, cx| context.serialize(cx)); + let serialized_context = text_thread.read_with(cx, |text_thread, cx| text_thread.serialize(cx)); let deserialized_context = cx.new(|cx| { - AssistantContext::deserialize( + TextThread::deserialize( serialized_context, Path::new("").into(), registry.clone(), @@ -726,7 +730,7 @@ async fn test_serialization(cx: &mut TestAppContext) { ) }); let deserialized_buffer = - deserialized_context.read_with(cx, |context, _| context.buffer.clone()); + deserialized_context.read_with(cx, |text_thread, _| text_thread.buffer().clone()); assert_eq!( deserialized_buffer.read_with(cx, |buffer, _| buffer.text()), "a\nb\nc\n" @@ -741,7 +745,7 @@ async fn test_serialization(cx: &mut TestAppContext) { ); } -#[gpui::test(iterations = 100)] +#[gpui::test(iterations = 25)] async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: StdRng) { cx.update(init_test); @@ -762,16 +766,16 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std let registry = Arc::new(LanguageRegistry::test(cx.background_executor.clone())); let network = Arc::new(Mutex::new(Network::new(rng.clone()))); - let mut contexts = Vec::new(); + let mut text_threads = Vec::new(); let num_peers = rng.random_range(min_peers..=max_peers); - let context_id = ContextId::new(); + let context_id = TextThreadId::new(); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); for i in 0..num_peers { let context = cx.new(|cx| { - AssistantContext::new( + TextThread::new( context_id.clone(), - i as ReplicaId, + ReplicaId::new(i as u16), language::Capability::ReadWrite, registry.clone(), prompt_builder.clone(), @@ -786,18 +790,18 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std cx.subscribe(&context, { let network = network.clone(); move |_, event, _| { - if let ContextEvent::Operation(op) = event { + if let TextThreadEvent::Operation(op) = event { network .lock() - .broadcast(i as ReplicaId, vec![op.to_proto()]); + .broadcast(ReplicaId::new(i as u16), vec![op.to_proto()]); } } }) .detach(); }); - contexts.push(context); - network.lock().add_peer(i as ReplicaId); + text_threads.push(context); + network.lock().add_peer(ReplicaId::new(i as u16)); } let mut mutation_count = operations; @@ -806,30 +810,30 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std || !network.lock().is_idle() || network.lock().contains_disconnected_peers() { - let context_index = rng.random_range(0..contexts.len()); - let context = &contexts[context_index]; + let context_index = rng.random_range(0..text_threads.len()); + let text_thread = &text_threads[context_index]; match rng.random_range(0..100) { 0..=29 if mutation_count > 0 => { log::info!("Context {}: edit buffer", context_index); - context.update(cx, |context, cx| { - context - .buffer + text_thread.update(cx, |text_thread, cx| { + text_thread + .buffer() .update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx)); }); mutation_count -= 1; } 30..=44 if mutation_count > 0 => { - context.update(cx, |context, cx| { - let range = context.buffer.read(cx).random_byte_range(0, &mut rng); + text_thread.update(cx, |text_thread, cx| { + let range = text_thread.buffer().read(cx).random_byte_range(0, &mut rng); log::info!("Context {}: split message at {:?}", context_index, range); - context.split_message(range, cx); + text_thread.split_message(range, cx); }); mutation_count -= 1; } 45..=59 if mutation_count > 0 => { - context.update(cx, |context, cx| { - if let Some(message) = context.messages(cx).choose(&mut rng) { + text_thread.update(cx, |text_thread, cx| { + if let Some(message) = text_thread.messages(cx).choose(&mut rng) { let role = *[Role::User, Role::Assistant, Role::System] .choose(&mut rng) .unwrap(); @@ -839,13 +843,13 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std message.id, role ); - context.insert_message_after(message.id, role, MessageStatus::Done, cx); + text_thread.insert_message_after(message.id, role, MessageStatus::Done, cx); } }); mutation_count -= 1; } 60..=74 if mutation_count > 0 => { - context.update(cx, |context, cx| { + text_thread.update(cx, |text_thread, cx| { let command_text = "/".to_string() + slash_commands .command_names() @@ -854,7 +858,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std .clone() .as_ref(); - let command_range = context.buffer.update(cx, |buffer, cx| { + let command_range = text_thread.buffer().update(cx, |buffer, cx| { let offset = buffer.random_byte_range(0, &mut rng).start; buffer.edit( [(offset..offset, format!("\n{}\n", command_text))], @@ -908,9 +912,15 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std events.len() ); - let command_range = context.buffer.read(cx).anchor_after(command_range.start) - ..context.buffer.read(cx).anchor_after(command_range.end); - context.insert_command_output( + let command_range = text_thread + .buffer() + .read(cx) + .anchor_after(command_range.start) + ..text_thread + .buffer() + .read(cx) + .anchor_after(command_range.end); + text_thread.insert_command_output( command_range, "/command", Task::ready(Ok(stream::iter(events).boxed())), @@ -922,8 +932,8 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std mutation_count -= 1; } 75..=84 if mutation_count > 0 => { - context.update(cx, |context, cx| { - if let Some(message) = context.messages(cx).choose(&mut rng) { + text_thread.update(cx, |text_thread, cx| { + if let Some(message) = text_thread.messages(cx).choose(&mut rng) { let new_status = match rng.random_range(0..3) { 0 => MessageStatus::Done, 1 => MessageStatus::Pending, @@ -935,7 +945,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std message.id, new_status ); - context.update_metadata(message.id, cx, |metadata| { + text_thread.update_metadata(message.id, cx, |metadata| { metadata.status = new_status; }); } @@ -943,13 +953,13 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std mutation_count -= 1; } _ => { - let replica_id = context_index as ReplicaId; + let replica_id = ReplicaId::new(context_index as u16); if network.lock().is_disconnected(replica_id) { - network.lock().reconnect_peer(replica_id, 0); + network.lock().reconnect_peer(replica_id, ReplicaId::new(0)); let (ops_to_send, ops_to_receive) = cx.read(|cx| { - let host_context = &contexts[0].read(cx); - let guest_context = context.read(cx); + let host_context = &text_threads[0].read(cx); + let guest_context = text_thread.read(cx); ( guest_context.serialize_ops(&host_context.version(cx), cx), host_context.serialize_ops(&guest_context.version(cx), cx), @@ -959,7 +969,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std let ops_to_receive = ops_to_receive .await .into_iter() - .map(ContextOperation::from_proto) + .map(TextThreadOperation::from_proto) .collect::>>() .unwrap(); log::info!( @@ -970,8 +980,10 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std ); network.lock().broadcast(replica_id, ops_to_send); - context.update(cx, |context, cx| context.apply_ops(ops_to_receive, cx)); - } else if rng.random_bool(0.1) && replica_id != 0 { + text_thread.update(cx, |text_thread, cx| { + text_thread.apply_ops(ops_to_receive, cx) + }); + } else if rng.random_bool(0.1) && replica_id != ReplicaId::new(0) { log::info!("Context {}: disconnecting", context_index); network.lock().disconnect_peer(replica_id); } else if network.lock().has_unreceived(replica_id) { @@ -979,43 +991,43 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std let ops = network.lock().receive(replica_id); let ops = ops .into_iter() - .map(ContextOperation::from_proto) + .map(TextThreadOperation::from_proto) .collect::>>() .unwrap(); - context.update(cx, |context, cx| context.apply_ops(ops, cx)); + text_thread.update(cx, |text_thread, cx| text_thread.apply_ops(ops, cx)); } } } } cx.read(|cx| { - let first_context = contexts[0].read(cx); - for context in &contexts[1..] { - let context = context.read(cx); - assert!(context.pending_ops.is_empty(), "pending ops: {:?}", context.pending_ops); + let first_context = text_threads[0].read(cx); + for text_thread in &text_threads[1..] { + let text_thread = text_thread.read(cx); + assert!(text_thread.pending_ops.is_empty(), "pending ops: {:?}", text_thread.pending_ops); assert_eq!( - context.buffer.read(cx).text(), - first_context.buffer.read(cx).text(), - "Context {} text != Context 0 text", - context.buffer.read(cx).replica_id() + text_thread.buffer().read(cx).text(), + first_context.buffer().read(cx).text(), + "Context {:?} text != Context 0 text", + text_thread.buffer().read(cx).replica_id() ); assert_eq!( - context.message_anchors, + text_thread.message_anchors, first_context.message_anchors, - "Context {} messages != Context 0 messages", - context.buffer.read(cx).replica_id() + "Context {:?} messages != Context 0 messages", + text_thread.buffer().read(cx).replica_id() ); assert_eq!( - context.messages_metadata, + text_thread.messages_metadata, first_context.messages_metadata, - "Context {} message metadata != Context 0 message metadata", - context.buffer.read(cx).replica_id() + "Context {:?} message metadata != Context 0 message metadata", + text_thread.buffer().read(cx).replica_id() ); assert_eq!( - context.slash_command_output_sections, + text_thread.slash_command_output_sections, first_context.slash_command_output_sections, - "Context {} slash command output sections != Context 0 slash command output sections", - context.buffer.read(cx).replica_id() + "Context {:?} slash command output sections != Context 0 slash command output sections", + text_thread.buffer().read(cx).replica_id() ); } }); @@ -1027,8 +1039,8 @@ fn test_mark_cache_anchors(cx: &mut App) { let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone())); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); - let context = cx.new(|cx| { - AssistantContext::local( + let text_thread = cx.new(|cx| { + TextThread::local( registry, None, None, @@ -1037,7 +1049,7 @@ fn test_mark_cache_anchors(cx: &mut App) { cx, ) }); - let buffer = context.read(cx).buffer.clone(); + let buffer = text_thread.read(cx).buffer().clone(); // Create a test cache configuration let cache_configuration = &Some(LanguageModelCacheConfiguration { @@ -1046,14 +1058,14 @@ fn test_mark_cache_anchors(cx: &mut App) { min_total_token: 10, }); - let message_1 = context.read(cx).message_anchors[0].clone(); + let message_1 = text_thread.read(cx).message_anchors[0].clone(); - context.update(cx, |context, cx| { - context.mark_cache_anchors(cache_configuration, false, cx) + text_thread.update(cx, |text_thread, cx| { + text_thread.mark_cache_anchors(cache_configuration, false, cx) }); assert_eq!( - messages_cache(&context, cx) + messages_cache(&text_thread, cx) .iter() .filter(|(_, cache)| cache.as_ref().is_some_and(|cache| cache.is_anchor)) .count(), @@ -1062,41 +1074,41 @@ fn test_mark_cache_anchors(cx: &mut App) { ); buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "aaa")], None, cx)); - let message_2 = context - .update(cx, |context, cx| { - context.insert_message_after(message_1.id, Role::User, MessageStatus::Pending, cx) + let message_2 = text_thread + .update(cx, |text_thread, cx| { + text_thread.insert_message_after(message_1.id, Role::User, MessageStatus::Pending, cx) }) .unwrap(); buffer.update(cx, |buffer, cx| buffer.edit([(4..4, "bbbbbbb")], None, cx)); - let message_3 = context - .update(cx, |context, cx| { - context.insert_message_after(message_2.id, Role::User, MessageStatus::Pending, cx) + let message_3 = text_thread + .update(cx, |text_thread, cx| { + text_thread.insert_message_after(message_2.id, Role::User, MessageStatus::Pending, cx) }) .unwrap(); buffer.update(cx, |buffer, cx| buffer.edit([(12..12, "cccccc")], None, cx)); - context.update(cx, |context, cx| { - context.mark_cache_anchors(cache_configuration, false, cx) + text_thread.update(cx, |text_thread, cx| { + text_thread.mark_cache_anchors(cache_configuration, false, cx) }); assert_eq!(buffer.read(cx).text(), "aaa\nbbbbbbb\ncccccc"); assert_eq!( - messages_cache(&context, cx) + messages_cache(&text_thread, cx) .iter() .filter(|(_, cache)| cache.as_ref().is_some_and(|cache| cache.is_anchor)) .count(), 0, "Messages should not be marked for cache before going over the token minimum." ); - context.update(cx, |context, _| { - context.token_count = Some(20); + text_thread.update(cx, |text_thread, _| { + text_thread.token_count = Some(20); }); - context.update(cx, |context, cx| { - context.mark_cache_anchors(cache_configuration, true, cx) + text_thread.update(cx, |text_thread, cx| { + text_thread.mark_cache_anchors(cache_configuration, true, cx) }); assert_eq!( - messages_cache(&context, cx) + messages_cache(&text_thread, cx) .iter() .map(|(_, cache)| cache.as_ref().is_some_and(|cache| cache.is_anchor)) .collect::>(), @@ -1104,28 +1116,33 @@ fn test_mark_cache_anchors(cx: &mut App) { "Last message should not be an anchor on speculative request." ); - context - .update(cx, |context, cx| { - context.insert_message_after(message_3.id, Role::Assistant, MessageStatus::Pending, cx) + text_thread + .update(cx, |text_thread, cx| { + text_thread.insert_message_after( + message_3.id, + Role::Assistant, + MessageStatus::Pending, + cx, + ) }) .unwrap(); - context.update(cx, |context, cx| { - context.mark_cache_anchors(cache_configuration, false, cx) + text_thread.update(cx, |text_thread, cx| { + text_thread.mark_cache_anchors(cache_configuration, false, cx) }); assert_eq!( - messages_cache(&context, cx) + messages_cache(&text_thread, cx) .iter() .map(|(_, cache)| cache.as_ref().is_some_and(|cache| cache.is_anchor)) .collect::>(), vec![false, true, true, false], "Most recent message should also be cached if not a speculative request." ); - context.update(cx, |context, cx| { - context.update_cache_status_for_completion(cx) + text_thread.update(cx, |text_thread, cx| { + text_thread.update_cache_status_for_completion(cx) }); assert_eq!( - messages_cache(&context, cx) + messages_cache(&text_thread, cx) .iter() .map(|(_, cache)| cache .as_ref() @@ -1141,11 +1158,11 @@ fn test_mark_cache_anchors(cx: &mut App) { ); buffer.update(cx, |buffer, cx| buffer.edit([(14..14, "d")], None, cx)); - context.update(cx, |context, cx| { - context.mark_cache_anchors(cache_configuration, false, cx) + text_thread.update(cx, |text_thread, cx| { + text_thread.mark_cache_anchors(cache_configuration, false, cx) }); assert_eq!( - messages_cache(&context, cx) + messages_cache(&text_thread, cx) .iter() .map(|(_, cache)| cache .as_ref() @@ -1160,11 +1177,11 @@ fn test_mark_cache_anchors(cx: &mut App) { "Modifying a message should invalidate it's cache but leave previous messages." ); buffer.update(cx, |buffer, cx| buffer.edit([(2..2, "e")], None, cx)); - context.update(cx, |context, cx| { - context.mark_cache_anchors(cache_configuration, false, cx) + text_thread.update(cx, |text_thread, cx| { + text_thread.mark_cache_anchors(cache_configuration, false, cx) }); assert_eq!( - messages_cache(&context, cx) + messages_cache(&text_thread, cx) .iter() .map(|(_, cache)| cache .as_ref() @@ -1182,31 +1199,36 @@ fn test_mark_cache_anchors(cx: &mut App) { #[gpui::test] async fn test_summarization(cx: &mut TestAppContext) { - let (context, fake_model) = setup_context_editor_with_fake_model(cx); + let (text_thread, fake_model) = setup_context_editor_with_fake_model(cx); // Initial state should be pending - context.read_with(cx, |context, _| { - assert!(matches!(context.summary(), ContextSummary::Pending)); - assert_eq!(context.summary().or_default(), ContextSummary::DEFAULT); + text_thread.read_with(cx, |text_thread, _| { + assert!(matches!(text_thread.summary(), TextThreadSummary::Pending)); + assert_eq!( + text_thread.summary().or_default(), + TextThreadSummary::DEFAULT + ); }); - let message_1 = context.read_with(cx, |context, _cx| context.message_anchors[0].clone()); - context.update(cx, |context, cx| { + let message_1 = text_thread.read_with(cx, |text_thread, _cx| { + text_thread.message_anchors[0].clone() + }); + text_thread.update(cx, |context, cx| { context .insert_message_after(message_1.id, Role::Assistant, MessageStatus::Done, cx) .unwrap(); }); // Send a message - context.update(cx, |context, cx| { - context.assist(cx); + text_thread.update(cx, |text_thread, cx| { + text_thread.assist(cx); }); simulate_successful_response(&fake_model, cx); // Should start generating summary when there are >= 2 messages - context.read_with(cx, |context, _| { - assert!(!context.summary().content().unwrap().done); + text_thread.read_with(cx, |text_thread, _| { + assert!(!text_thread.summary().content().unwrap().done); }); cx.run_until_parked(); @@ -1216,61 +1238,61 @@ async fn test_summarization(cx: &mut TestAppContext) { cx.run_until_parked(); // Summary should be set - context.read_with(cx, |context, _| { - assert_eq!(context.summary().or_default(), "Brief Introduction"); + text_thread.read_with(cx, |text_thread, _| { + assert_eq!(text_thread.summary().or_default(), "Brief Introduction"); }); // We should be able to manually set a summary - context.update(cx, |context, cx| { - context.set_custom_summary("Brief Intro".into(), cx); + text_thread.update(cx, |text_thread, cx| { + text_thread.set_custom_summary("Brief Intro".into(), cx); }); - context.read_with(cx, |context, _| { - assert_eq!(context.summary().or_default(), "Brief Intro"); + text_thread.read_with(cx, |text_thread, _| { + assert_eq!(text_thread.summary().or_default(), "Brief Intro"); }); } #[gpui::test] async fn test_thread_summary_error_set_manually(cx: &mut TestAppContext) { - let (context, fake_model) = setup_context_editor_with_fake_model(cx); + let (text_thread, fake_model) = setup_context_editor_with_fake_model(cx); - test_summarize_error(&fake_model, &context, cx); + test_summarize_error(&fake_model, &text_thread, cx); // Now we should be able to set a summary - context.update(cx, |context, cx| { - context.set_custom_summary("Brief Intro".into(), cx); + text_thread.update(cx, |text_thread, cx| { + text_thread.set_custom_summary("Brief Intro".into(), cx); }); - context.read_with(cx, |context, _| { - assert_eq!(context.summary().or_default(), "Brief Intro"); + text_thread.read_with(cx, |text_thread, _| { + assert_eq!(text_thread.summary().or_default(), "Brief Intro"); }); } #[gpui::test] async fn test_thread_summary_error_retry(cx: &mut TestAppContext) { - let (context, fake_model) = setup_context_editor_with_fake_model(cx); + let (text_thread, fake_model) = setup_context_editor_with_fake_model(cx); - test_summarize_error(&fake_model, &context, cx); + test_summarize_error(&fake_model, &text_thread, cx); // Sending another message should not trigger another summarize request - context.update(cx, |context, cx| { - context.assist(cx); + text_thread.update(cx, |text_thread, cx| { + text_thread.assist(cx); }); simulate_successful_response(&fake_model, cx); - context.read_with(cx, |context, _| { + text_thread.read_with(cx, |text_thread, _| { // State is still Error, not Generating - assert!(matches!(context.summary(), ContextSummary::Error)); + assert!(matches!(text_thread.summary(), TextThreadSummary::Error)); }); // But the summarize request can be invoked manually - context.update(cx, |context, cx| { - context.summarize(true, cx); + text_thread.update(cx, |text_thread, cx| { + text_thread.summarize(true, cx); }); - context.read_with(cx, |context, _| { - assert!(!context.summary().content().unwrap().done); + text_thread.read_with(cx, |text_thread, _| { + assert!(!text_thread.summary().content().unwrap().done); }); cx.run_until_parked(); @@ -1278,32 +1300,34 @@ async fn test_thread_summary_error_retry(cx: &mut TestAppContext) { fake_model.end_last_completion_stream(); cx.run_until_parked(); - context.read_with(cx, |context, _| { - assert_eq!(context.summary().or_default(), "A successful summary"); + text_thread.read_with(cx, |text_thread, _| { + assert_eq!(text_thread.summary().or_default(), "A successful summary"); }); } fn test_summarize_error( model: &Arc, - context: &Entity, + text_thread: &Entity, cx: &mut TestAppContext, ) { - let message_1 = context.read_with(cx, |context, _cx| context.message_anchors[0].clone()); - context.update(cx, |context, cx| { - context + let message_1 = text_thread.read_with(cx, |text_thread, _cx| { + text_thread.message_anchors[0].clone() + }); + text_thread.update(cx, |text_thread, cx| { + text_thread .insert_message_after(message_1.id, Role::Assistant, MessageStatus::Done, cx) .unwrap(); }); // Send a message - context.update(cx, |context, cx| { - context.assist(cx); + text_thread.update(cx, |text_thread, cx| { + text_thread.assist(cx); }); simulate_successful_response(model, cx); - context.read_with(cx, |context, _| { - assert!(!context.summary().content().unwrap().done); + text_thread.read_with(cx, |text_thread, _| { + assert!(!text_thread.summary().content().unwrap().done); }); // Simulate summary request ending @@ -1312,15 +1336,18 @@ fn test_summarize_error( cx.run_until_parked(); // State is set to Error and default message - context.read_with(cx, |context, _| { - assert_eq!(*context.summary(), ContextSummary::Error); - assert_eq!(context.summary().or_default(), ContextSummary::DEFAULT); + text_thread.read_with(cx, |text_thread, _| { + assert_eq!(*text_thread.summary(), TextThreadSummary::Error); + assert_eq!( + text_thread.summary().or_default(), + TextThreadSummary::DEFAULT + ); }); } fn setup_context_editor_with_fake_model( cx: &mut TestAppContext, -) -> (Entity, Arc) { +) -> (Entity, Arc) { let registry = Arc::new(LanguageRegistry::test(cx.executor())); let fake_provider = Arc::new(FakeLanguageModelProvider::default()); @@ -1340,7 +1367,7 @@ fn setup_context_editor_with_fake_model( let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); let context = cx.new(|cx| { - AssistantContext::local( + TextThread::local( registry, None, None, @@ -1360,7 +1387,7 @@ fn simulate_successful_response(fake_model: &FakeLanguageModel, cx: &mut TestApp cx.run_until_parked(); } -fn messages(context: &Entity, cx: &App) -> Vec<(MessageId, Role, Range)> { +fn messages(context: &Entity, cx: &App) -> Vec<(MessageId, Role, Range)> { context .read(cx) .messages(cx) @@ -1369,7 +1396,7 @@ fn messages(context: &Entity, cx: &App) -> Vec<(MessageId, Rol } fn messages_cache( - context: &Entity, + context: &Entity, cx: &App, ) -> Vec<(MessageId, Option)> { context diff --git a/crates/assistant_context/src/assistant_context.rs b/crates/assistant_text_thread/src/text_thread.rs similarity index 92% rename from crates/assistant_context/src/assistant_context.rs rename to crates/assistant_text_thread/src/text_thread.rs index 6c06cc2c8ec7f845b1e6d49631a1bea6755a62d0..9ad383cdfd43eed236268349e2ff97c34a0178c0 100644 --- a/crates/assistant_context/src/assistant_context.rs +++ b/crates/assistant_text_thread/src/text_thread.rs @@ -1,7 +1,3 @@ -#[cfg(test)] -mod assistant_context_tests; -mod context_store; - use agent_settings::{AgentSettings, SUMMARIZE_THREAD_PROMPT}; use anyhow::{Context as _, Result, bail}; use assistant_slash_command::{ @@ -9,7 +5,7 @@ use assistant_slash_command::{ SlashCommandResult, SlashCommandWorkingSet, }; use assistant_slash_commands::FileCommandMetadata; -use client::{self, Client, ModelRequestUsage, RequestUsage, proto, telemetry::Telemetry}; +use client::{self, ModelRequestUsage, RequestUsage, proto, telemetry::Telemetry}; use clock::ReplicaId; use cloud_llm_client::{CompletionIntent, CompletionRequestStatus, UsageLimit}; use collections::{HashMap, HashSet}; @@ -27,7 +23,7 @@ use language_model::{ report_assistant_event, }; use open_ai::Model as OpenAiModel; -use paths::contexts_dir; +use paths::text_threads_dir; use project::Project; use prompt_store::PromptBuilder; use serde::{Deserialize, Serialize}; @@ -48,16 +44,10 @@ use ui::IconName; use util::{ResultExt, TryFutureExt, post_inc}; use uuid::Uuid; -pub use crate::context_store::*; - -pub fn init(client: Arc, _: &mut App) { - context_store::init(&client.into()); -} - #[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord, Serialize, Deserialize)] -pub struct ContextId(String); +pub struct TextThreadId(String); -impl ContextId { +impl TextThreadId { pub fn new() -> Self { Self(Uuid::new_v4().to_string()) } @@ -130,7 +120,7 @@ impl MessageStatus { } #[derive(Clone, Debug)] -pub enum ContextOperation { +pub enum TextThreadOperation { InsertMessage { anchor: MessageAnchor, metadata: MessageMetadata, @@ -142,7 +132,7 @@ pub enum ContextOperation { version: clock::Global, }, UpdateSummary { - summary: ContextSummaryContent, + summary: TextThreadSummaryContent, version: clock::Global, }, SlashCommandStarted { @@ -170,7 +160,7 @@ pub enum ContextOperation { BufferOperation(language::Operation), } -impl ContextOperation { +impl TextThreadOperation { pub fn from_proto(op: proto::ContextOperation) -> Result { match op.variant.context("invalid variant")? { proto::context_operation::Variant::InsertMessage(insert) => { @@ -212,7 +202,7 @@ impl ContextOperation { version: language::proto::deserialize_version(&update.version), }), proto::context_operation::Variant::UpdateSummary(update) => Ok(Self::UpdateSummary { - summary: ContextSummaryContent { + summary: TextThreadSummaryContent { text: update.summary, done: update.done, timestamp: language::proto::deserialize_timestamp( @@ -453,7 +443,7 @@ impl ContextOperation { } #[derive(Debug, Clone)] -pub enum ContextEvent { +pub enum TextThreadEvent { ShowAssistError(SharedString), ShowPaymentRequiredError, MessagesEdited, @@ -476,24 +466,24 @@ pub enum ContextEvent { SlashCommandOutputSectionAdded { section: SlashCommandOutputSection, }, - Operation(ContextOperation), + Operation(TextThreadOperation), } #[derive(Clone, Debug, Eq, PartialEq)] -pub enum ContextSummary { +pub enum TextThreadSummary { Pending, - Content(ContextSummaryContent), + Content(TextThreadSummaryContent), Error, } -#[derive(Default, Clone, Debug, Eq, PartialEq)] -pub struct ContextSummaryContent { +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct TextThreadSummaryContent { pub text: String, pub done: bool, pub timestamp: clock::Lamport, } -impl ContextSummary { +impl TextThreadSummary { pub const DEFAULT: &str = "New Text Thread"; pub fn or_default(&self) -> SharedString { @@ -505,44 +495,48 @@ impl ContextSummary { .map_or_else(|| message.into(), |content| content.text.clone().into()) } - pub fn content(&self) -> Option<&ContextSummaryContent> { + pub fn content(&self) -> Option<&TextThreadSummaryContent> { match self { - ContextSummary::Content(content) => Some(content), - ContextSummary::Pending | ContextSummary::Error => None, + TextThreadSummary::Content(content) => Some(content), + TextThreadSummary::Pending | TextThreadSummary::Error => None, } } - fn content_as_mut(&mut self) -> Option<&mut ContextSummaryContent> { + fn content_as_mut(&mut self) -> Option<&mut TextThreadSummaryContent> { match self { - ContextSummary::Content(content) => Some(content), - ContextSummary::Pending | ContextSummary::Error => None, + TextThreadSummary::Content(content) => Some(content), + TextThreadSummary::Pending | TextThreadSummary::Error => None, } } - fn content_or_set_empty(&mut self) -> &mut ContextSummaryContent { + fn content_or_set_empty(&mut self) -> &mut TextThreadSummaryContent { match self { - ContextSummary::Content(content) => content, - ContextSummary::Pending | ContextSummary::Error => { - let content = ContextSummaryContent::default(); - *self = ContextSummary::Content(content); + TextThreadSummary::Content(content) => content, + TextThreadSummary::Pending | TextThreadSummary::Error => { + let content = TextThreadSummaryContent { + text: "".to_string(), + done: false, + timestamp: clock::Lamport::MIN, + }; + *self = TextThreadSummary::Content(content); self.content_as_mut().unwrap() } } } pub fn is_pending(&self) -> bool { - matches!(self, ContextSummary::Pending) + matches!(self, TextThreadSummary::Pending) } fn timestamp(&self) -> Option { match self { - ContextSummary::Content(content) => Some(content.timestamp), - ContextSummary::Pending | ContextSummary::Error => None, + TextThreadSummary::Content(content) => Some(content.timestamp), + TextThreadSummary::Pending | TextThreadSummary::Error => None, } } } -impl PartialOrd for ContextSummary { +impl PartialOrd for TextThreadSummary { fn partial_cmp(&self, other: &Self) -> Option { self.timestamp().partial_cmp(&other.timestamp()) } @@ -664,27 +658,27 @@ struct PendingCompletion { #[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)] pub struct InvokedSlashCommandId(clock::Lamport); -pub struct AssistantContext { - id: ContextId, +pub struct TextThread { + id: TextThreadId, timestamp: clock::Lamport, version: clock::Global, - pending_ops: Vec, - operations: Vec, + pub(crate) pending_ops: Vec, + operations: Vec, buffer: Entity, - parsed_slash_commands: Vec, + pub(crate) parsed_slash_commands: Vec, invoked_slash_commands: HashMap, edits_since_last_parse: language::Subscription, slash_commands: Arc, - slash_command_output_sections: Vec>, + pub(crate) slash_command_output_sections: Vec>, thought_process_output_sections: Vec>, - message_anchors: Vec, + pub(crate) message_anchors: Vec, contents: Vec, - messages_metadata: HashMap, - summary: ContextSummary, + pub(crate) messages_metadata: HashMap, + summary: TextThreadSummary, summary_task: Task>, completion_count: usize, pending_completions: Vec, - token_count: Option, + pub(crate) token_count: Option, pending_token_count: Task>, pending_save: Task>, pending_cache_warming_task: Task>, @@ -707,9 +701,9 @@ impl ContextAnnotation for ParsedSlashCommand { } } -impl EventEmitter for AssistantContext {} +impl EventEmitter for TextThread {} -impl AssistantContext { +impl TextThread { pub fn local( language_registry: Arc, project: Option>, @@ -719,7 +713,7 @@ impl AssistantContext { cx: &mut Context, ) -> Self { Self::new( - ContextId::new(), + TextThreadId::new(), ReplicaId::default(), language::Capability::ReadWrite, language_registry, @@ -740,7 +734,7 @@ impl AssistantContext { } pub fn new( - id: ContextId, + id: TextThreadId, replica_id: ReplicaId, capability: language::Capability, language_registry: Arc, @@ -776,7 +770,7 @@ impl AssistantContext { slash_command_output_sections: Vec::new(), thought_process_output_sections: Vec::new(), edits_since_last_parse: edits_since_last_slash_command_parse, - summary: ContextSummary::Pending, + summary: TextThreadSummary::Pending, summary_task: Task::ready(None), completion_count: Default::default(), pending_completions: Default::default(), @@ -796,7 +790,7 @@ impl AssistantContext { }; let first_message_id = MessageId(clock::Lamport { - replica_id: 0, + replica_id: ReplicaId::LOCAL, value: 0, }); let message = MessageAnchor { @@ -819,12 +813,12 @@ impl AssistantContext { this } - pub(crate) fn serialize(&self, cx: &App) -> SavedContext { + pub(crate) fn serialize(&self, cx: &App) -> SavedTextThread { let buffer = self.buffer.read(cx); - SavedContext { + SavedTextThread { id: Some(self.id.clone()), zed: "context".into(), - version: SavedContext::VERSION.into(), + version: SavedTextThread::VERSION.into(), text: buffer.text(), messages: self .messages(cx) @@ -872,7 +866,7 @@ impl AssistantContext { } pub fn deserialize( - saved_context: SavedContext, + saved_context: SavedTextThread, path: Arc, language_registry: Arc, prompt_builder: Arc, @@ -881,7 +875,7 @@ impl AssistantContext { telemetry: Option>, cx: &mut Context, ) -> Self { - let id = saved_context.id.clone().unwrap_or_else(ContextId::new); + let id = saved_context.id.clone().unwrap_or_else(TextThreadId::new); let mut this = Self::new( id, ReplicaId::default(), @@ -902,7 +896,7 @@ impl AssistantContext { this } - pub fn id(&self) -> &ContextId { + pub fn id(&self) -> &TextThreadId { &self.id } @@ -910,9 +904,9 @@ impl AssistantContext { self.timestamp.replica_id } - pub fn version(&self, cx: &App) -> ContextVersion { - ContextVersion { - context: self.version.clone(), + pub fn version(&self, cx: &App) -> TextThreadVersion { + TextThreadVersion { + text_thread: self.version.clone(), buffer: self.buffer.read(cx).version(), } } @@ -934,7 +928,7 @@ impl AssistantContext { pub fn serialize_ops( &self, - since: &ContextVersion, + since: &TextThreadVersion, cx: &App, ) -> Task> { let buffer_ops = self @@ -945,7 +939,7 @@ impl AssistantContext { let mut context_ops = self .operations .iter() - .filter(|op| !since.context.observed(op.timestamp())) + .filter(|op| !since.text_thread.observed(op.timestamp())) .cloned() .collect::>(); context_ops.extend(self.pending_ops.iter().cloned()); @@ -969,13 +963,13 @@ impl AssistantContext { pub fn apply_ops( &mut self, - ops: impl IntoIterator, + ops: impl IntoIterator, cx: &mut Context, ) { let mut buffer_ops = Vec::new(); for op in ops { match op { - ContextOperation::BufferOperation(buffer_op) => buffer_ops.push(buffer_op), + TextThreadOperation::BufferOperation(buffer_op) => buffer_ops.push(buffer_op), op @ _ => self.pending_ops.push(op), } } @@ -984,7 +978,7 @@ impl AssistantContext { self.flush_ops(cx); } - fn flush_ops(&mut self, cx: &mut Context) { + fn flush_ops(&mut self, cx: &mut Context) { let mut changed_messages = HashSet::default(); let mut summary_generated = false; @@ -997,7 +991,7 @@ impl AssistantContext { let timestamp = op.timestamp(); match op.clone() { - ContextOperation::InsertMessage { + TextThreadOperation::InsertMessage { anchor, metadata, .. } => { if self.messages_metadata.contains_key(&anchor.id) { @@ -1007,7 +1001,7 @@ impl AssistantContext { self.insert_message(anchor, metadata, cx); } } - ContextOperation::UpdateMessage { + TextThreadOperation::UpdateMessage { message_id, metadata: new_metadata, .. @@ -1018,7 +1012,7 @@ impl AssistantContext { changed_messages.insert(message_id); } } - ContextOperation::UpdateSummary { + TextThreadOperation::UpdateSummary { summary: new_summary, .. } => { @@ -1027,11 +1021,11 @@ impl AssistantContext { .timestamp() .is_none_or(|current_timestamp| new_summary.timestamp > current_timestamp) { - self.summary = ContextSummary::Content(new_summary); + self.summary = TextThreadSummary::Content(new_summary); summary_generated = true; } } - ContextOperation::SlashCommandStarted { + TextThreadOperation::SlashCommandStarted { id, output_range, name, @@ -1048,9 +1042,9 @@ impl AssistantContext { timestamp: id.0, }, ); - cx.emit(ContextEvent::InvokedSlashCommandChanged { command_id: id }); + cx.emit(TextThreadEvent::InvokedSlashCommandChanged { command_id: id }); } - ContextOperation::SlashCommandOutputSectionAdded { section, .. } => { + TextThreadOperation::SlashCommandOutputSectionAdded { section, .. } => { let buffer = self.buffer.read(cx); if let Err(ix) = self .slash_command_output_sections @@ -1058,10 +1052,10 @@ impl AssistantContext { { self.slash_command_output_sections .insert(ix, section.clone()); - cx.emit(ContextEvent::SlashCommandOutputSectionAdded { section }); + cx.emit(TextThreadEvent::SlashCommandOutputSectionAdded { section }); } } - ContextOperation::ThoughtProcessOutputSectionAdded { section, .. } => { + TextThreadOperation::ThoughtProcessOutputSectionAdded { section, .. } => { let buffer = self.buffer.read(cx); if let Err(ix) = self .thought_process_output_sections @@ -1071,7 +1065,7 @@ impl AssistantContext { .insert(ix, section.clone()); } } - ContextOperation::SlashCommandFinished { + TextThreadOperation::SlashCommandFinished { id, error_message, timestamp, @@ -1090,10 +1084,10 @@ impl AssistantContext { slash_command.status = InvokedSlashCommandStatus::Finished; } } - cx.emit(ContextEvent::InvokedSlashCommandChanged { command_id: id }); + cx.emit(TextThreadEvent::InvokedSlashCommandChanged { command_id: id }); } } - ContextOperation::BufferOperation(_) => unreachable!(), + TextThreadOperation::BufferOperation(_) => unreachable!(), } self.version.observe(timestamp); @@ -1103,43 +1097,43 @@ impl AssistantContext { if !changed_messages.is_empty() { self.message_roles_updated(changed_messages, cx); - cx.emit(ContextEvent::MessagesEdited); + cx.emit(TextThreadEvent::MessagesEdited); cx.notify(); } if summary_generated { - cx.emit(ContextEvent::SummaryChanged); - cx.emit(ContextEvent::SummaryGenerated); + cx.emit(TextThreadEvent::SummaryChanged); + cx.emit(TextThreadEvent::SummaryGenerated); cx.notify(); } } - fn can_apply_op(&self, op: &ContextOperation, cx: &App) -> bool { + fn can_apply_op(&self, op: &TextThreadOperation, cx: &App) -> bool { if !self.version.observed_all(op.version()) { return false; } match op { - ContextOperation::InsertMessage { anchor, .. } => self + TextThreadOperation::InsertMessage { anchor, .. } => self .buffer .read(cx) .version .observed(anchor.start.timestamp), - ContextOperation::UpdateMessage { message_id, .. } => { + TextThreadOperation::UpdateMessage { message_id, .. } => { self.messages_metadata.contains_key(message_id) } - ContextOperation::UpdateSummary { .. } => true, - ContextOperation::SlashCommandStarted { output_range, .. } => { + TextThreadOperation::UpdateSummary { .. } => true, + TextThreadOperation::SlashCommandStarted { output_range, .. } => { self.has_received_operations_for_anchor_range(output_range.clone(), cx) } - ContextOperation::SlashCommandOutputSectionAdded { section, .. } => { + TextThreadOperation::SlashCommandOutputSectionAdded { section, .. } => { self.has_received_operations_for_anchor_range(section.range.clone(), cx) } - ContextOperation::ThoughtProcessOutputSectionAdded { section, .. } => { + TextThreadOperation::ThoughtProcessOutputSectionAdded { section, .. } => { self.has_received_operations_for_anchor_range(section.range.clone(), cx) } - ContextOperation::SlashCommandFinished { .. } => true, - ContextOperation::BufferOperation(_) => { + TextThreadOperation::SlashCommandFinished { .. } => true, + TextThreadOperation::BufferOperation(_) => { panic!("buffer operations should always be applied") } } @@ -1160,9 +1154,9 @@ impl AssistantContext { observed_start && observed_end } - fn push_op(&mut self, op: ContextOperation, cx: &mut Context) { + fn push_op(&mut self, op: TextThreadOperation, cx: &mut Context) { self.operations.push(op.clone()); - cx.emit(ContextEvent::Operation(op)); + cx.emit(TextThreadEvent::Operation(op)); } pub fn buffer(&self) -> &Entity { @@ -1185,7 +1179,7 @@ impl AssistantContext { self.path.as_ref() } - pub fn summary(&self) -> &ContextSummary { + pub fn summary(&self) -> &TextThreadSummary { &self.summary } @@ -1246,13 +1240,13 @@ impl AssistantContext { language::BufferEvent::Operation { operation, is_local: true, - } => cx.emit(ContextEvent::Operation(ContextOperation::BufferOperation( - operation.clone(), - ))), + } => cx.emit(TextThreadEvent::Operation( + TextThreadOperation::BufferOperation(operation.clone()), + )), language::BufferEvent::Edited => { self.count_remaining_tokens(cx); self.reparse(cx); - cx.emit(ContextEvent::MessagesEdited); + cx.emit(TextThreadEvent::MessagesEdited); } _ => {} } @@ -1518,7 +1512,7 @@ impl AssistantContext { if !updated_parsed_slash_commands.is_empty() || !removed_parsed_slash_command_ranges.is_empty() { - cx.emit(ContextEvent::ParsedSlashCommandsUpdated { + cx.emit(TextThreadEvent::ParsedSlashCommandsUpdated { removed: removed_parsed_slash_command_ranges, updated: updated_parsed_slash_commands, }); @@ -1592,7 +1586,7 @@ impl AssistantContext { && (!command.range.start.is_valid(buffer) || !command.range.end.is_valid(buffer)) { command.status = InvokedSlashCommandStatus::Finished; - cx.emit(ContextEvent::InvokedSlashCommandChanged { command_id }); + cx.emit(TextThreadEvent::InvokedSlashCommandChanged { command_id }); invalidated_command_ids.push(command_id); } } @@ -1601,7 +1595,7 @@ impl AssistantContext { let version = self.version.clone(); let timestamp = self.next_timestamp(); self.push_op( - ContextOperation::SlashCommandFinished { + TextThreadOperation::SlashCommandFinished { id: command_id, timestamp, error_message: None, @@ -1906,9 +1900,9 @@ impl AssistantContext { } } - cx.emit(ContextEvent::InvokedSlashCommandChanged { command_id }); + cx.emit(TextThreadEvent::InvokedSlashCommandChanged { command_id }); this.push_op( - ContextOperation::SlashCommandFinished { + TextThreadOperation::SlashCommandFinished { id: command_id, timestamp, error_message, @@ -1931,9 +1925,9 @@ impl AssistantContext { timestamp: command_id.0, }, ); - cx.emit(ContextEvent::InvokedSlashCommandChanged { command_id }); + cx.emit(TextThreadEvent::InvokedSlashCommandChanged { command_id }); self.push_op( - ContextOperation::SlashCommandStarted { + TextThreadOperation::SlashCommandStarted { id: command_id, output_range: command_range, name: name.to_string(), @@ -1957,13 +1951,13 @@ impl AssistantContext { }; self.slash_command_output_sections .insert(insertion_ix, section.clone()); - cx.emit(ContextEvent::SlashCommandOutputSectionAdded { + cx.emit(TextThreadEvent::SlashCommandOutputSectionAdded { section: section.clone(), }); let version = self.version.clone(); let timestamp = self.next_timestamp(); self.push_op( - ContextOperation::SlashCommandOutputSectionAdded { + TextThreadOperation::SlashCommandOutputSectionAdded { timestamp, section, version, @@ -1992,7 +1986,7 @@ impl AssistantContext { let version = self.version.clone(); let timestamp = self.next_timestamp(); self.push_op( - ContextOperation::ThoughtProcessOutputSectionAdded { + TextThreadOperation::ThoughtProcessOutputSectionAdded { timestamp, section, version, @@ -2111,7 +2105,7 @@ impl AssistantContext { let end = buffer .anchor_before(message_old_end_offset + chunk_len); context_event = Some( - ContextEvent::StartedThoughtProcess(start..end), + TextThreadEvent::StartedThoughtProcess(start..end), ); } else { // This ensures that all the thinking chunks are inserted inside the thinking tag @@ -2129,7 +2123,7 @@ impl AssistantContext { if let Some(start) = thought_process_stack.pop() { let end = buffer.anchor_before(message_old_end_offset); context_event = - Some(ContextEvent::EndedThoughtProcess(end)); + Some(TextThreadEvent::EndedThoughtProcess(end)); thought_process_output_section = Some(ThoughtProcessOutputSection { range: start..end, @@ -2159,7 +2153,7 @@ impl AssistantContext { cx.emit(context_event); } - cx.emit(ContextEvent::StreamedCompletion); + cx.emit(TextThreadEvent::StreamedCompletion); Some(()) })?; @@ -2180,7 +2174,7 @@ impl AssistantContext { this.update(cx, |this, cx| { let error_message = if let Some(error) = result.as_ref().err() { if error.is::() { - cx.emit(ContextEvent::ShowPaymentRequiredError); + cx.emit(TextThreadEvent::ShowPaymentRequiredError); this.update_metadata(assistant_message_id, cx, |metadata| { metadata.status = MessageStatus::Canceled; }); @@ -2191,7 +2185,7 @@ impl AssistantContext { .map(|err| err.to_string()) .collect::>() .join("\n"); - cx.emit(ContextEvent::ShowAssistError(SharedString::from( + cx.emit(TextThreadEvent::ShowAssistError(SharedString::from( error_message.clone(), ))); this.update_metadata(assistant_message_id, cx, |metadata| { @@ -2408,13 +2402,13 @@ impl AssistantContext { if let Some(metadata) = self.messages_metadata.get_mut(&id) { f(metadata); metadata.timestamp = timestamp; - let operation = ContextOperation::UpdateMessage { + let operation = TextThreadOperation::UpdateMessage { message_id: id, metadata: metadata.clone(), version, }; self.push_op(operation, cx); - cx.emit(ContextEvent::MessagesEdited); + cx.emit(TextThreadEvent::MessagesEdited); cx.notify(); } } @@ -2478,7 +2472,7 @@ impl AssistantContext { }; self.insert_message(anchor.clone(), metadata.clone(), cx); self.push_op( - ContextOperation::InsertMessage { + TextThreadOperation::InsertMessage { anchor: anchor.clone(), metadata, version, @@ -2501,7 +2495,7 @@ impl AssistantContext { Err(ix) => ix, }; self.contents.insert(insertion_ix, content); - cx.emit(ContextEvent::MessagesEdited); + cx.emit(TextThreadEvent::MessagesEdited); } pub fn contents<'a>(&'a self, cx: &'a App) -> impl 'a + Iterator { @@ -2576,7 +2570,7 @@ impl AssistantContext { }; self.insert_message(suffix.clone(), suffix_metadata.clone(), cx); self.push_op( - ContextOperation::InsertMessage { + TextThreadOperation::InsertMessage { anchor: suffix.clone(), metadata: suffix_metadata, version, @@ -2626,7 +2620,7 @@ impl AssistantContext { }; self.insert_message(selection.clone(), selection_metadata.clone(), cx); self.push_op( - ContextOperation::InsertMessage { + TextThreadOperation::InsertMessage { anchor: selection.clone(), metadata: selection_metadata, version, @@ -2638,7 +2632,7 @@ impl AssistantContext { }; if !edited_buffer { - cx.emit(ContextEvent::MessagesEdited); + cx.emit(TextThreadEvent::MessagesEdited); } new_messages } else { @@ -2652,7 +2646,7 @@ impl AssistantContext { new_metadata: MessageMetadata, cx: &mut Context, ) { - cx.emit(ContextEvent::MessagesEdited); + cx.emit(TextThreadEvent::MessagesEdited); self.messages_metadata.insert(new_anchor.id, new_metadata); @@ -2688,15 +2682,15 @@ impl AssistantContext { // If there is no summary, it is set with `done: false` so that "Loading Summary…" can // be displayed. match self.summary { - ContextSummary::Pending | ContextSummary::Error => { - self.summary = ContextSummary::Content(ContextSummaryContent { + TextThreadSummary::Pending | TextThreadSummary::Error => { + self.summary = TextThreadSummary::Content(TextThreadSummaryContent { text: "".to_string(), done: false, - timestamp: clock::Lamport::default(), + timestamp: clock::Lamport::MIN, }); replace_old = true; } - ContextSummary::Content(_) => {} + TextThreadSummary::Content(_) => {} } self.summary_task = cx.spawn(async move |this, cx| { @@ -2718,13 +2712,13 @@ impl AssistantContext { } summary.text.extend(lines.next()); summary.timestamp = timestamp; - let operation = ContextOperation::UpdateSummary { + let operation = TextThreadOperation::UpdateSummary { summary: summary.clone(), version, }; this.push_op(operation, cx); - cx.emit(ContextEvent::SummaryChanged); - cx.emit(ContextEvent::SummaryGenerated); + cx.emit(TextThreadEvent::SummaryChanged); + cx.emit(TextThreadEvent::SummaryGenerated); })?; // Stop if the LLM generated multiple lines. @@ -2748,13 +2742,13 @@ impl AssistantContext { if let Some(summary) = this.summary.content_as_mut() { summary.done = true; summary.timestamp = timestamp; - let operation = ContextOperation::UpdateSummary { + let operation = TextThreadOperation::UpdateSummary { summary: summary.clone(), version, }; this.push_op(operation, cx); - cx.emit(ContextEvent::SummaryChanged); - cx.emit(ContextEvent::SummaryGenerated); + cx.emit(TextThreadEvent::SummaryChanged); + cx.emit(TextThreadEvent::SummaryGenerated); } })?; @@ -2764,8 +2758,8 @@ impl AssistantContext { if let Err(err) = result { this.update(cx, |this, cx| { - this.summary = ContextSummary::Error; - cx.emit(ContextEvent::SummaryChanged); + this.summary = TextThreadSummary::Error; + cx.emit(TextThreadEvent::SummaryChanged); }) .log_err(); log::error!("Error generating context summary: {}", err); @@ -2871,7 +2865,7 @@ impl AssistantContext { &mut self, debounce: Option, fs: Arc, - cx: &mut Context, + cx: &mut Context, ) { if self.replica_id() != ReplicaId::default() { // Prevent saving a remote context for now. @@ -2902,7 +2896,7 @@ impl AssistantContext { let mut discriminant = 1; let mut new_path; loop { - new_path = contexts_dir().join(&format!( + new_path = text_threads_dir().join(&format!( "{} - {}.zed.json", summary.trim(), discriminant @@ -2914,7 +2908,7 @@ impl AssistantContext { } } - fs.create_dir(contexts_dir().as_ref()).await?; + fs.create_dir(text_threads_dir().as_ref()).await?; // rename before write ensures that only one file exists if let Some(old_path) = old_path.as_ref() @@ -2936,7 +2930,7 @@ impl AssistantContext { let new_path: Arc = new_path.clone().into(); move |this, cx| { this.path = Some(new_path.clone()); - cx.emit(ContextEvent::PathChanged { old_path, new_path }); + cx.emit(TextThreadEvent::PathChanged { old_path, new_path }); } }) .ok(); @@ -2955,7 +2949,7 @@ impl AssistantContext { summary.timestamp = timestamp; summary.done = true; summary.text = custom_summary; - cx.emit(ContextEvent::SummaryChanged); + cx.emit(TextThreadEvent::SummaryChanged); } fn update_model_request_usage(&self, amount: u32, limit: UsageLimit, cx: &mut App) { @@ -2975,23 +2969,23 @@ impl AssistantContext { } #[derive(Debug, Default)] -pub struct ContextVersion { - context: clock::Global, +pub struct TextThreadVersion { + text_thread: clock::Global, buffer: clock::Global, } -impl ContextVersion { +impl TextThreadVersion { pub fn from_proto(proto: &proto::ContextVersion) -> Self { Self { - context: language::proto::deserialize_version(&proto.context_version), + text_thread: language::proto::deserialize_version(&proto.context_version), buffer: language::proto::deserialize_version(&proto.buffer_version), } } - pub fn to_proto(&self, context_id: ContextId) -> proto::ContextVersion { + pub fn to_proto(&self, context_id: TextThreadId) -> proto::ContextVersion { proto::ContextVersion { context_id: context_id.to_proto(), - context_version: language::proto::serialize_version(&self.context), + context_version: language::proto::serialize_version(&self.text_thread), buffer_version: language::proto::serialize_version(&self.buffer), } } @@ -3059,8 +3053,8 @@ pub struct SavedMessage { } #[derive(Serialize, Deserialize)] -pub struct SavedContext { - pub id: Option, +pub struct SavedTextThread { + pub id: Option, pub zed: String, pub version: String, pub text: String, @@ -3072,7 +3066,7 @@ pub struct SavedContext { pub thought_process_output_sections: Vec>, } -impl SavedContext { +impl SavedTextThread { pub const VERSION: &'static str = "0.4.0"; pub fn from_json(json: &str) -> Result { @@ -3082,9 +3076,9 @@ impl SavedContext { .context("version not found")? { serde_json::Value::String(version) => match version.as_str() { - SavedContext::VERSION => { - Ok(serde_json::from_value::(saved_context_json)?) - } + SavedTextThread::VERSION => Ok(serde_json::from_value::( + saved_context_json, + )?), SavedContextV0_3_0::VERSION => { let saved_context = serde_json::from_value::(saved_context_json)?; @@ -3109,18 +3103,18 @@ impl SavedContext { fn into_ops( self, buffer: &Entity, - cx: &mut Context, - ) -> Vec { + cx: &mut Context, + ) -> Vec { let mut operations = Vec::new(); let mut version = clock::Global::new(); let mut next_timestamp = clock::Lamport::new(ReplicaId::default()); let mut first_message_metadata = None; for message in self.messages { - if message.id == MessageId(clock::Lamport::default()) { + if message.id == MessageId(clock::Lamport::MIN) { first_message_metadata = Some(message.metadata); } else { - operations.push(ContextOperation::InsertMessage { + operations.push(TextThreadOperation::InsertMessage { anchor: MessageAnchor { id: message.id, start: buffer.read(cx).anchor_before(message.start), @@ -3140,8 +3134,8 @@ impl SavedContext { if let Some(metadata) = first_message_metadata { let timestamp = next_timestamp.tick(); - operations.push(ContextOperation::UpdateMessage { - message_id: MessageId(clock::Lamport::default()), + operations.push(TextThreadOperation::UpdateMessage { + message_id: MessageId(clock::Lamport::MIN), metadata: MessageMetadata { role: metadata.role, status: metadata.status, @@ -3156,7 +3150,7 @@ impl SavedContext { let buffer = buffer.read(cx); for section in self.slash_command_output_sections { let timestamp = next_timestamp.tick(); - operations.push(ContextOperation::SlashCommandOutputSectionAdded { + operations.push(TextThreadOperation::SlashCommandOutputSectionAdded { timestamp, section: SlashCommandOutputSection { range: buffer.anchor_after(section.range.start) @@ -3173,7 +3167,7 @@ impl SavedContext { for section in self.thought_process_output_sections { let timestamp = next_timestamp.tick(); - operations.push(ContextOperation::ThoughtProcessOutputSectionAdded { + operations.push(TextThreadOperation::ThoughtProcessOutputSectionAdded { timestamp, section: ThoughtProcessOutputSection { range: buffer.anchor_after(section.range.start) @@ -3186,8 +3180,8 @@ impl SavedContext { } let timestamp = next_timestamp.tick(); - operations.push(ContextOperation::UpdateSummary { - summary: ContextSummaryContent { + operations.push(TextThreadOperation::UpdateSummary { + summary: TextThreadSummaryContent { text: self.summary, done: true, timestamp, @@ -3217,7 +3211,7 @@ struct SavedMessageMetadataPreV0_4_0 { #[derive(Serialize, Deserialize)] struct SavedContextV0_3_0 { - id: Option, + id: Option, zed: String, version: String, text: String, @@ -3230,11 +3224,11 @@ struct SavedContextV0_3_0 { impl SavedContextV0_3_0 { const VERSION: &'static str = "0.3.0"; - fn upgrade(self) -> SavedContext { - SavedContext { + fn upgrade(self) -> SavedTextThread { + SavedTextThread { id: self.id, zed: self.zed, - version: SavedContext::VERSION.into(), + version: SavedTextThread::VERSION.into(), text: self.text, messages: self .messages @@ -3266,7 +3260,7 @@ impl SavedContextV0_3_0 { #[derive(Serialize, Deserialize)] struct SavedContextV0_2_0 { - id: Option, + id: Option, zed: String, version: String, text: String, @@ -3278,7 +3272,7 @@ struct SavedContextV0_2_0 { impl SavedContextV0_2_0 { const VERSION: &'static str = "0.2.0"; - fn upgrade(self) -> SavedContext { + fn upgrade(self) -> SavedTextThread { SavedContextV0_3_0 { id: self.id, zed: self.zed, @@ -3295,7 +3289,7 @@ impl SavedContextV0_2_0 { #[derive(Serialize, Deserialize)] struct SavedContextV0_1_0 { - id: Option, + id: Option, zed: String, version: String, text: String, @@ -3309,7 +3303,7 @@ struct SavedContextV0_1_0 { impl SavedContextV0_1_0 { const VERSION: &'static str = "0.1.0"; - fn upgrade(self) -> SavedContext { + fn upgrade(self) -> SavedTextThread { SavedContextV0_2_0 { id: self.id, zed: self.zed, @@ -3324,7 +3318,7 @@ impl SavedContextV0_1_0 { } #[derive(Debug, Clone)] -pub struct SavedContextMetadata { +pub struct SavedTextThreadMetadata { pub title: SharedString, pub path: Arc, pub mtime: chrono::DateTime, diff --git a/crates/assistant_context/src/context_store.rs b/crates/assistant_text_thread/src/text_thread_store.rs similarity index 71% rename from crates/assistant_context/src/context_store.rs rename to crates/assistant_text_thread/src/text_thread_store.rs index 5fac44e31f4cc073af8fe6bbb57f75fc03b27f45..19c317baf0fa728c77faebc388b5e36008aa39b3 100644 --- a/crates/assistant_context/src/context_store.rs +++ b/crates/assistant_text_thread/src/text_thread_store.rs @@ -1,6 +1,6 @@ use crate::{ - AssistantContext, ContextEvent, ContextId, ContextOperation, ContextVersion, SavedContext, - SavedContextMetadata, + SavedTextThread, SavedTextThreadMetadata, TextThread, TextThreadEvent, TextThreadId, + TextThreadOperation, TextThreadVersion, }; use anyhow::{Context as _, Result}; use assistant_slash_command::{SlashCommandId, SlashCommandWorkingSet}; @@ -11,9 +11,9 @@ use context_server::ContextServerId; use fs::{Fs, RemoveOptions}; use futures::StreamExt; use fuzzy::StringMatchCandidate; -use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task, WeakEntity}; +use gpui::{App, AppContext as _, AsyncApp, Context, Entity, Task, WeakEntity}; use language::LanguageRegistry; -use paths::contexts_dir; +use paths::text_threads_dir; use project::{ Project, context_server_store::{ContextServerStatus, ContextServerStore}, @@ -27,24 +27,24 @@ use util::{ResultExt, TryFutureExt}; use zed_env_vars::ZED_STATELESS; pub(crate) fn init(client: &AnyProtoClient) { - client.add_entity_message_handler(ContextStore::handle_advertise_contexts); - client.add_entity_request_handler(ContextStore::handle_open_context); - client.add_entity_request_handler(ContextStore::handle_create_context); - client.add_entity_message_handler(ContextStore::handle_update_context); - client.add_entity_request_handler(ContextStore::handle_synchronize_contexts); + client.add_entity_message_handler(TextThreadStore::handle_advertise_contexts); + client.add_entity_request_handler(TextThreadStore::handle_open_context); + client.add_entity_request_handler(TextThreadStore::handle_create_context); + client.add_entity_message_handler(TextThreadStore::handle_update_context); + client.add_entity_request_handler(TextThreadStore::handle_synchronize_contexts); } #[derive(Clone)] -pub struct RemoteContextMetadata { - pub id: ContextId, +pub struct RemoteTextThreadMetadata { + pub id: TextThreadId, pub summary: Option, } -pub struct ContextStore { - contexts: Vec, - contexts_metadata: Vec, +pub struct TextThreadStore { + text_threads: Vec, + text_threads_metadata: Vec, context_server_slash_command_ids: HashMap>, - host_contexts: Vec, + host_text_threads: Vec, fs: Arc, languages: Arc, slash_commands: Arc, @@ -58,34 +58,28 @@ pub struct ContextStore { prompt_builder: Arc, } -pub enum ContextStoreEvent { - ContextCreated(ContextId), +enum TextThreadHandle { + Weak(WeakEntity), + Strong(Entity), } -impl EventEmitter for ContextStore {} - -enum ContextHandle { - Weak(WeakEntity), - Strong(Entity), -} - -impl ContextHandle { - fn upgrade(&self) -> Option> { +impl TextThreadHandle { + fn upgrade(&self) -> Option> { match self { - ContextHandle::Weak(weak) => weak.upgrade(), - ContextHandle::Strong(strong) => Some(strong.clone()), + TextThreadHandle::Weak(weak) => weak.upgrade(), + TextThreadHandle::Strong(strong) => Some(strong.clone()), } } - fn downgrade(&self) -> WeakEntity { + fn downgrade(&self) -> WeakEntity { match self { - ContextHandle::Weak(weak) => weak.clone(), - ContextHandle::Strong(strong) => strong.downgrade(), + TextThreadHandle::Weak(weak) => weak.clone(), + TextThreadHandle::Strong(strong) => strong.downgrade(), } } } -impl ContextStore { +impl TextThreadStore { pub fn new( project: Entity, prompt_builder: Arc, @@ -97,14 +91,14 @@ impl ContextStore { let telemetry = project.read(cx).client().telemetry().clone(); cx.spawn(async move |cx| { const CONTEXT_WATCH_DURATION: Duration = Duration::from_millis(100); - let (mut events, _) = fs.watch(contexts_dir(), CONTEXT_WATCH_DURATION).await; + let (mut events, _) = fs.watch(text_threads_dir(), CONTEXT_WATCH_DURATION).await; let this = cx.new(|cx: &mut Context| { let mut this = Self { - contexts: Vec::new(), - contexts_metadata: Vec::new(), + text_threads: Vec::new(), + text_threads_metadata: Vec::new(), context_server_slash_command_ids: HashMap::default(), - host_contexts: Vec::new(), + host_text_threads: Vec::new(), fs, languages, slash_commands, @@ -142,10 +136,10 @@ impl ContextStore { #[cfg(any(test, feature = "test-support"))] pub fn fake(project: Entity, cx: &mut Context) -> Self { Self { - contexts: Default::default(), - contexts_metadata: Default::default(), + text_threads: Default::default(), + text_threads_metadata: Default::default(), context_server_slash_command_ids: Default::default(), - host_contexts: Default::default(), + host_text_threads: Default::default(), fs: project.read(cx).fs().clone(), languages: project.read(cx).languages().clone(), slash_commands: Arc::default(), @@ -166,13 +160,13 @@ impl ContextStore { mut cx: AsyncApp, ) -> Result<()> { this.update(&mut cx, |this, cx| { - this.host_contexts = envelope + this.host_text_threads = envelope .payload .contexts .into_iter() - .map(|context| RemoteContextMetadata { - id: ContextId::from_proto(context.context_id), - summary: context.summary, + .map(|text_thread| RemoteTextThreadMetadata { + id: TextThreadId::from_proto(text_thread.context_id), + summary: text_thread.summary, }) .collect(); cx.notify(); @@ -184,25 +178,25 @@ impl ContextStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let context_id = ContextId::from_proto(envelope.payload.context_id); + let context_id = TextThreadId::from_proto(envelope.payload.context_id); let operations = this.update(&mut cx, |this, cx| { anyhow::ensure!( !this.project.read(cx).is_via_collab(), "only the host contexts can be opened" ); - let context = this - .loaded_context_for_id(&context_id, cx) + let text_thread = this + .loaded_text_thread_for_id(&context_id, cx) .context("context not found")?; anyhow::ensure!( - context.read(cx).replica_id() == ReplicaId::default(), + text_thread.read(cx).replica_id() == ReplicaId::default(), "context must be opened via the host" ); anyhow::Ok( - context + text_thread .read(cx) - .serialize_ops(&ContextVersion::default(), cx), + .serialize_ops(&TextThreadVersion::default(), cx), ) })??; let operations = operations.await; @@ -222,15 +216,14 @@ impl ContextStore { "can only create contexts as the host" ); - let context = this.create(cx); - let context_id = context.read(cx).id().clone(); - cx.emit(ContextStoreEvent::ContextCreated(context_id.clone())); + let text_thread = this.create(cx); + let context_id = text_thread.read(cx).id().clone(); anyhow::Ok(( context_id, - context + text_thread .read(cx) - .serialize_ops(&ContextVersion::default(), cx), + .serialize_ops(&TextThreadVersion::default(), cx), )) })??; let operations = operations.await; @@ -246,11 +239,11 @@ impl ContextStore { mut cx: AsyncApp, ) -> Result<()> { this.update(&mut cx, |this, cx| { - let context_id = ContextId::from_proto(envelope.payload.context_id); - if let Some(context) = this.loaded_context_for_id(&context_id, cx) { + let context_id = TextThreadId::from_proto(envelope.payload.context_id); + if let Some(text_thread) = this.loaded_text_thread_for_id(&context_id, cx) { let operation_proto = envelope.payload.operation.context("invalid operation")?; - let operation = ContextOperation::from_proto(operation_proto)?; - context.update(cx, |context, cx| context.apply_ops([operation], cx)); + let operation = TextThreadOperation::from_proto(operation_proto)?; + text_thread.update(cx, |text_thread, cx| text_thread.apply_ops([operation], cx)); } Ok(()) })? @@ -269,12 +262,12 @@ impl ContextStore { let mut local_versions = Vec::new(); for remote_version_proto in envelope.payload.contexts { - let remote_version = ContextVersion::from_proto(&remote_version_proto); - let context_id = ContextId::from_proto(remote_version_proto.context_id); - if let Some(context) = this.loaded_context_for_id(&context_id, cx) { - let context = context.read(cx); - let operations = context.serialize_ops(&remote_version, cx); - local_versions.push(context.version(cx).to_proto(context_id.clone())); + let remote_version = TextThreadVersion::from_proto(&remote_version_proto); + let context_id = TextThreadId::from_proto(remote_version_proto.context_id); + if let Some(text_thread) = this.loaded_text_thread_for_id(&context_id, cx) { + let text_thread = text_thread.read(cx); + let operations = text_thread.serialize_ops(&remote_version, cx); + local_versions.push(text_thread.version(cx).to_proto(context_id.clone())); let client = this.client.clone(); let project_id = envelope.payload.project_id; cx.background_spawn(async move { @@ -308,9 +301,9 @@ impl ContextStore { } if is_shared { - self.contexts.retain_mut(|context| { - if let Some(strong_context) = context.upgrade() { - *context = ContextHandle::Strong(strong_context); + self.text_threads.retain_mut(|text_thread| { + if let Some(strong_context) = text_thread.upgrade() { + *text_thread = TextThreadHandle::Strong(strong_context); true } else { false @@ -345,12 +338,12 @@ impl ContextStore { self.synchronize_contexts(cx); } project::Event::DisconnectedFromHost => { - self.contexts.retain_mut(|context| { - if let Some(strong_context) = context.upgrade() { - *context = ContextHandle::Weak(context.downgrade()); - strong_context.update(cx, |context, cx| { - if context.replica_id() != ReplicaId::default() { - context.set_capability(language::Capability::ReadOnly, cx); + self.text_threads.retain_mut(|text_thread| { + if let Some(strong_context) = text_thread.upgrade() { + *text_thread = TextThreadHandle::Weak(text_thread.downgrade()); + strong_context.update(cx, |text_thread, cx| { + if text_thread.replica_id() != ReplicaId::default() { + text_thread.set_capability(language::Capability::ReadOnly, cx); } }); true @@ -358,20 +351,24 @@ impl ContextStore { false } }); - self.host_contexts.clear(); + self.host_text_threads.clear(); cx.notify(); } _ => {} } } - pub fn unordered_contexts(&self) -> impl Iterator { - self.contexts_metadata.iter() + pub fn unordered_text_threads(&self) -> impl Iterator { + self.text_threads_metadata.iter() } - pub fn create(&mut self, cx: &mut Context) -> Entity { + pub fn host_text_threads(&self) -> impl Iterator { + self.host_text_threads.iter() + } + + pub fn create(&mut self, cx: &mut Context) -> Entity { let context = cx.new(|cx| { - AssistantContext::local( + TextThread::local( self.languages.clone(), Some(self.project.clone()), Some(self.telemetry.clone()), @@ -380,14 +377,11 @@ impl ContextStore { cx, ) }); - self.register_context(&context, cx); + self.register_text_thread(&context, cx); context } - pub fn create_remote_context( - &mut self, - cx: &mut Context, - ) -> Task>> { + pub fn create_remote(&mut self, cx: &mut Context) -> Task>> { let project = self.project.read(cx); let Some(project_id) = project.remote_id() else { return Task::ready(Err(anyhow::anyhow!("project was not remote"))); @@ -403,10 +397,10 @@ impl ContextStore { let request = self.client.request(proto::CreateContext { project_id }); cx.spawn(async move |this, cx| { let response = request.await?; - let context_id = ContextId::from_proto(response.context_id); + let context_id = TextThreadId::from_proto(response.context_id); let context_proto = response.context.context("invalid context")?; - let context = cx.new(|cx| { - AssistantContext::new( + let text_thread = cx.new(|cx| { + TextThread::new( context_id.clone(), replica_id, capability, @@ -423,29 +417,29 @@ impl ContextStore { context_proto .operations .into_iter() - .map(ContextOperation::from_proto) + .map(TextThreadOperation::from_proto) .collect::>>() }) .await?; - context.update(cx, |context, cx| context.apply_ops(operations, cx))?; + text_thread.update(cx, |context, cx| context.apply_ops(operations, cx))?; this.update(cx, |this, cx| { - if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) { + if let Some(existing_context) = this.loaded_text_thread_for_id(&context_id, cx) { existing_context } else { - this.register_context(&context, cx); + this.register_text_thread(&text_thread, cx); this.synchronize_contexts(cx); - context + text_thread } }) }) } - pub fn open_local_context( + pub fn open_local( &mut self, path: Arc, cx: &Context, - ) -> Task>> { - if let Some(existing_context) = self.loaded_context_for_path(&path, cx) { + ) -> Task>> { + if let Some(existing_context) = self.loaded_text_thread_for_path(&path, cx) { return Task::ready(Ok(existing_context)); } @@ -457,7 +451,7 @@ impl ContextStore { let path = path.clone(); async move { let saved_context = fs.load(&path).await?; - SavedContext::from_json(&saved_context) + SavedTextThread::from_json(&saved_context) } }); let prompt_builder = self.prompt_builder.clone(); @@ -466,7 +460,7 @@ impl ContextStore { cx.spawn(async move |this, cx| { let saved_context = load.await?; let context = cx.new(|cx| { - AssistantContext::deserialize( + TextThread::deserialize( saved_context, path.clone(), languages, @@ -478,21 +472,17 @@ impl ContextStore { ) })?; this.update(cx, |this, cx| { - if let Some(existing_context) = this.loaded_context_for_path(&path, cx) { + if let Some(existing_context) = this.loaded_text_thread_for_path(&path, cx) { existing_context } else { - this.register_context(&context, cx); + this.register_text_thread(&context, cx); context } }) }) } - pub fn delete_local_context( - &mut self, - path: Arc, - cx: &mut Context, - ) -> Task> { + pub fn delete_local(&mut self, path: Arc, cx: &mut Context) -> Task> { let fs = self.fs.clone(); cx.spawn(async move |this, cx| { @@ -506,57 +496,57 @@ impl ContextStore { .await?; this.update(cx, |this, cx| { - this.contexts.retain(|context| { - context + this.text_threads.retain(|text_thread| { + text_thread .upgrade() - .and_then(|context| context.read(cx).path()) + .and_then(|text_thread| text_thread.read(cx).path()) != Some(&path) }); - this.contexts_metadata - .retain(|context| context.path.as_ref() != path.as_ref()); + this.text_threads_metadata + .retain(|text_thread| text_thread.path.as_ref() != path.as_ref()); })?; Ok(()) }) } - fn loaded_context_for_path(&self, path: &Path, cx: &App) -> Option> { - self.contexts.iter().find_map(|context| { - let context = context.upgrade()?; - if context.read(cx).path().map(Arc::as_ref) == Some(path) { - Some(context) + fn loaded_text_thread_for_path(&self, path: &Path, cx: &App) -> Option> { + self.text_threads.iter().find_map(|text_thread| { + let text_thread = text_thread.upgrade()?; + if text_thread.read(cx).path().map(Arc::as_ref) == Some(path) { + Some(text_thread) } else { None } }) } - pub fn loaded_context_for_id( + pub fn loaded_text_thread_for_id( &self, - id: &ContextId, + id: &TextThreadId, cx: &App, - ) -> Option> { - self.contexts.iter().find_map(|context| { - let context = context.upgrade()?; - if context.read(cx).id() == id { - Some(context) + ) -> Option> { + self.text_threads.iter().find_map(|text_thread| { + let text_thread = text_thread.upgrade()?; + if text_thread.read(cx).id() == id { + Some(text_thread) } else { None } }) } - pub fn open_remote_context( + pub fn open_remote( &mut self, - context_id: ContextId, + text_thread_id: TextThreadId, cx: &mut Context, - ) -> Task>> { + ) -> Task>> { let project = self.project.read(cx); let Some(project_id) = project.remote_id() else { return Task::ready(Err(anyhow::anyhow!("project was not remote"))); }; - if let Some(context) = self.loaded_context_for_id(&context_id, cx) { + if let Some(context) = self.loaded_text_thread_for_id(&text_thread_id, cx) { return Task::ready(Ok(context)); } @@ -567,16 +557,16 @@ impl ContextStore { let telemetry = self.telemetry.clone(); let request = self.client.request(proto::OpenContext { project_id, - context_id: context_id.to_proto(), + context_id: text_thread_id.to_proto(), }); let prompt_builder = self.prompt_builder.clone(); let slash_commands = self.slash_commands.clone(); cx.spawn(async move |this, cx| { let response = request.await?; let context_proto = response.context.context("invalid context")?; - let context = cx.new(|cx| { - AssistantContext::new( - context_id.clone(), + let text_thread = cx.new(|cx| { + TextThread::new( + text_thread_id.clone(), replica_id, capability, language_registry, @@ -592,38 +582,40 @@ impl ContextStore { context_proto .operations .into_iter() - .map(ContextOperation::from_proto) + .map(TextThreadOperation::from_proto) .collect::>>() }) .await?; - context.update(cx, |context, cx| context.apply_ops(operations, cx))?; + text_thread.update(cx, |context, cx| context.apply_ops(operations, cx))?; this.update(cx, |this, cx| { - if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) { + if let Some(existing_context) = this.loaded_text_thread_for_id(&text_thread_id, cx) + { existing_context } else { - this.register_context(&context, cx); + this.register_text_thread(&text_thread, cx); this.synchronize_contexts(cx); - context + text_thread } }) }) } - fn register_context(&mut self, context: &Entity, cx: &mut Context) { + fn register_text_thread(&mut self, text_thread: &Entity, cx: &mut Context) { let handle = if self.project_is_shared { - ContextHandle::Strong(context.clone()) + TextThreadHandle::Strong(text_thread.clone()) } else { - ContextHandle::Weak(context.downgrade()) + TextThreadHandle::Weak(text_thread.downgrade()) }; - self.contexts.push(handle); + self.text_threads.push(handle); self.advertise_contexts(cx); - cx.subscribe(context, Self::handle_context_event).detach(); + cx.subscribe(text_thread, Self::handle_context_event) + .detach(); } fn handle_context_event( &mut self, - context: Entity, - event: &ContextEvent, + text_thread: Entity, + event: &TextThreadEvent, cx: &mut Context, ) { let Some(project_id) = self.project.read(cx).remote_id() else { @@ -631,12 +623,12 @@ impl ContextStore { }; match event { - ContextEvent::SummaryChanged => { + TextThreadEvent::SummaryChanged => { self.advertise_contexts(cx); } - ContextEvent::PathChanged { old_path, new_path } => { + TextThreadEvent::PathChanged { old_path, new_path } => { if let Some(old_path) = old_path.as_ref() { - for metadata in &mut self.contexts_metadata { + for metadata in &mut self.text_threads_metadata { if &metadata.path == old_path { metadata.path = new_path.clone(); break; @@ -644,8 +636,8 @@ impl ContextStore { } } } - ContextEvent::Operation(operation) => { - let context_id = context.read(cx).id().to_proto(); + TextThreadEvent::Operation(operation) => { + let context_id = text_thread.read(cx).id().to_proto(); let operation = operation.to_proto(); self.client .send(proto::UpdateContext { @@ -670,15 +662,15 @@ impl ContextStore { } let contexts = self - .contexts + .text_threads .iter() .rev() - .filter_map(|context| { - let context = context.upgrade()?.read(cx); - if context.replica_id() == ReplicaId::default() { + .filter_map(|text_thread| { + let text_thread = text_thread.upgrade()?.read(cx); + if text_thread.replica_id() == ReplicaId::default() { Some(proto::ContextMetadata { - context_id: context.id().to_proto(), - summary: context + context_id: text_thread.id().to_proto(), + summary: text_thread .summary() .content() .map(|summary| summary.text.clone()), @@ -701,13 +693,13 @@ impl ContextStore { return; }; - let contexts = self - .contexts + let text_threads = self + .text_threads .iter() - .filter_map(|context| { - let context = context.upgrade()?.read(cx); - if context.replica_id() != ReplicaId::default() { - Some(context.version(cx).to_proto(context.id().clone())) + .filter_map(|text_thread| { + let text_thread = text_thread.upgrade()?.read(cx); + if text_thread.replica_id() != ReplicaId::default() { + Some(text_thread.version(cx).to_proto(text_thread.id().clone())) } else { None } @@ -717,26 +709,27 @@ impl ContextStore { let client = self.client.clone(); let request = self.client.request(proto::SynchronizeContexts { project_id, - contexts, + contexts: text_threads, }); cx.spawn(async move |this, cx| { let response = request.await?; - let mut context_ids = Vec::new(); + let mut text_thread_ids = Vec::new(); let mut operations = Vec::new(); this.read_with(cx, |this, cx| { for context_version_proto in response.contexts { - let context_version = ContextVersion::from_proto(&context_version_proto); - let context_id = ContextId::from_proto(context_version_proto.context_id); - if let Some(context) = this.loaded_context_for_id(&context_id, cx) { - context_ids.push(context_id); - operations.push(context.read(cx).serialize_ops(&context_version, cx)); + let text_thread_version = TextThreadVersion::from_proto(&context_version_proto); + let text_thread_id = TextThreadId::from_proto(context_version_proto.context_id); + if let Some(text_thread) = this.loaded_text_thread_for_id(&text_thread_id, cx) { + text_thread_ids.push(text_thread_id); + operations + .push(text_thread.read(cx).serialize_ops(&text_thread_version, cx)); } } })?; let operations = futures::future::join_all(operations).await; - for (context_id, operations) in context_ids.into_iter().zip(operations) { + for (context_id, operations) in text_thread_ids.into_iter().zip(operations) { for operation in operations { client.send(proto::UpdateContext { project_id, @@ -751,8 +744,8 @@ impl ContextStore { .detach_and_log_err(cx); } - pub fn search(&self, query: String, cx: &App) -> Task> { - let metadata = self.contexts_metadata.clone(); + pub fn search(&self, query: String, cx: &App) -> Task> { + let metadata = self.text_threads_metadata.clone(); let executor = cx.background_executor().clone(); cx.background_spawn(async move { if query.is_empty() { @@ -782,20 +775,16 @@ impl ContextStore { }) } - pub fn host_contexts(&self) -> &[RemoteContextMetadata] { - &self.host_contexts - } - fn reload(&mut self, cx: &mut Context) -> Task> { let fs = self.fs.clone(); cx.spawn(async move |this, cx| { if *ZED_STATELESS { return Ok(()); } - fs.create_dir(contexts_dir()).await?; + fs.create_dir(text_threads_dir()).await?; - let mut paths = fs.read_dir(contexts_dir()).await?; - let mut contexts = Vec::::new(); + let mut paths = fs.read_dir(text_threads_dir()).await?; + let mut contexts = Vec::::new(); while let Some(path) = paths.next().await { let path = path?; if path.extension() != Some(OsStr::new("json")) { @@ -821,7 +810,7 @@ impl ContextStore { .lines() .next() { - contexts.push(SavedContextMetadata { + contexts.push(SavedTextThreadMetadata { title: title.to_string().into(), path: path.into(), mtime: metadata.mtime.timestamp_for_user().into(), @@ -829,10 +818,10 @@ impl ContextStore { } } } - contexts.sort_unstable_by_key(|context| Reverse(context.mtime)); + contexts.sort_unstable_by_key(|text_thread| Reverse(text_thread.mtime)); this.update(cx, |this, cx| { - this.contexts_metadata = contexts; + this.text_threads_metadata = contexts; cx.notify(); }) }) diff --git a/crates/assistant_tool/Cargo.toml b/crates/assistant_tool/Cargo.toml deleted file mode 100644 index c95695052a4778209010b2f9e7a4a57be4cb6cf7..0000000000000000000000000000000000000000 --- a/crates/assistant_tool/Cargo.toml +++ /dev/null @@ -1,50 +0,0 @@ -[package] -name = "assistant_tool" -version = "0.1.0" -edition.workspace = true -publish.workspace = true -license = "GPL-3.0-or-later" - -[lints] -workspace = true - -[lib] -path = "src/assistant_tool.rs" - -[dependencies] -action_log.workspace = true -anyhow.workspace = true -collections.workspace = true -derive_more.workspace = true -gpui.workspace = true -icons.workspace = true -language.workspace = true -language_model.workspace = true -log.workspace = true -parking_lot.workspace = true -project.workspace = true -regex.workspace = true -serde.workspace = true -serde_json.workspace = true -text.workspace = true -util.workspace = true -workspace.workspace = true -workspace-hack.workspace = true - -[dev-dependencies] -buffer_diff = { workspace = true, features = ["test-support"] } -collections = { workspace = true, features = ["test-support"] } -clock = { workspace = true, features = ["test-support"] } -ctor.workspace = true -gpui = { workspace = true, features = ["test-support"] } -indoc.workspace = true -language = { workspace = true, features = ["test-support"] } -language_model = { workspace = true, features = ["test-support"] } -log.workspace = true -pretty_assertions.workspace = true -project = { workspace = true, features = ["test-support"] } -rand.workspace = true -settings = { workspace = true, features = ["test-support"] } -text = { workspace = true, features = ["test-support"] } -util = { workspace = true, features = ["test-support"] } -zlog.workspace = true diff --git a/crates/assistant_tool/src/assistant_tool.rs b/crates/assistant_tool/src/assistant_tool.rs deleted file mode 100644 index 9c5825d0f0ecc9c31277bfff5123d3d80501511b..0000000000000000000000000000000000000000 --- a/crates/assistant_tool/src/assistant_tool.rs +++ /dev/null @@ -1,269 +0,0 @@ -pub mod outline; -mod tool_registry; -mod tool_schema; -mod tool_working_set; - -use std::fmt; -use std::fmt::Debug; -use std::fmt::Formatter; -use std::ops::Deref; -use std::sync::Arc; - -use action_log::ActionLog; -use anyhow::Result; -use gpui::AnyElement; -use gpui::AnyWindowHandle; -use gpui::Context; -use gpui::IntoElement; -use gpui::Window; -use gpui::{App, Entity, SharedString, Task, WeakEntity}; -use icons::IconName; -use language_model::LanguageModel; -use language_model::LanguageModelImage; -use language_model::LanguageModelRequest; -use language_model::LanguageModelToolSchemaFormat; -use project::Project; -use workspace::Workspace; - -pub use crate::tool_registry::*; -pub use crate::tool_schema::*; -pub use crate::tool_working_set::*; - -pub fn init(cx: &mut App) { - ToolRegistry::default_global(cx); -} - -#[derive(Debug, Clone)] -pub enum ToolUseStatus { - InputStillStreaming, - NeedsConfirmation, - Pending, - Running, - Finished(SharedString), - Error(SharedString), -} - -impl ToolUseStatus { - pub fn text(&self) -> SharedString { - match self { - ToolUseStatus::NeedsConfirmation => "".into(), - ToolUseStatus::InputStillStreaming => "".into(), - ToolUseStatus::Pending => "".into(), - ToolUseStatus::Running => "".into(), - ToolUseStatus::Finished(out) => out.clone(), - ToolUseStatus::Error(out) => out.clone(), - } - } - - pub fn error(&self) -> Option { - match self { - ToolUseStatus::Error(out) => Some(out.clone()), - _ => None, - } - } -} - -#[derive(Debug)] -pub struct ToolResultOutput { - pub content: ToolResultContent, - pub output: Option, -} - -#[derive(Debug, PartialEq, Eq)] -pub enum ToolResultContent { - Text(String), - Image(LanguageModelImage), -} - -impl ToolResultContent { - pub fn len(&self) -> usize { - match self { - ToolResultContent::Text(str) => str.len(), - ToolResultContent::Image(image) => image.len(), - } - } - - pub fn is_empty(&self) -> bool { - match self { - ToolResultContent::Text(str) => str.is_empty(), - ToolResultContent::Image(image) => image.is_empty(), - } - } - - pub fn as_str(&self) -> Option<&str> { - match self { - ToolResultContent::Text(str) => Some(str), - ToolResultContent::Image(_) => None, - } - } -} - -impl From for ToolResultOutput { - fn from(value: String) -> Self { - ToolResultOutput { - content: ToolResultContent::Text(value), - output: None, - } - } -} - -impl Deref for ToolResultOutput { - type Target = ToolResultContent; - - fn deref(&self) -> &Self::Target { - &self.content - } -} - -/// The result of running a tool, containing both the asynchronous output -/// and an optional card view that can be rendered immediately. -pub struct ToolResult { - /// The asynchronous task that will eventually resolve to the tool's output - pub output: Task>, - /// An optional view to present the output of the tool. - pub card: Option, -} - -pub trait ToolCard: 'static + Sized { - fn render( - &mut self, - status: &ToolUseStatus, - window: &mut Window, - workspace: WeakEntity, - cx: &mut Context, - ) -> impl IntoElement; -} - -#[derive(Clone)] -pub struct AnyToolCard { - entity: gpui::AnyEntity, - render: fn( - entity: gpui::AnyEntity, - status: &ToolUseStatus, - window: &mut Window, - workspace: WeakEntity, - cx: &mut App, - ) -> AnyElement, -} - -impl From> for AnyToolCard { - fn from(entity: Entity) -> Self { - fn downcast_render( - entity: gpui::AnyEntity, - status: &ToolUseStatus, - window: &mut Window, - workspace: WeakEntity, - cx: &mut App, - ) -> AnyElement { - let entity = entity.downcast::().unwrap(); - entity.update(cx, |entity, cx| { - entity - .render(status, window, workspace, cx) - .into_any_element() - }) - } - - Self { - entity: entity.into(), - render: downcast_render::, - } - } -} - -impl AnyToolCard { - pub fn render( - &self, - status: &ToolUseStatus, - window: &mut Window, - workspace: WeakEntity, - cx: &mut App, - ) -> AnyElement { - (self.render)(self.entity.clone(), status, window, workspace, cx) - } -} - -impl From>> for ToolResult { - /// Convert from a task to a ToolResult with no card - fn from(output: Task>) -> Self { - Self { output, card: None } - } -} - -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone)] -pub enum ToolSource { - /// A native tool built-in to Zed. - Native, - /// A tool provided by a context server. - ContextServer { id: SharedString }, -} - -/// A tool that can be used by a language model. -pub trait Tool: 'static + Send + Sync { - /// Returns the name of the tool. - fn name(&self) -> String; - - /// Returns the description of the tool. - fn description(&self) -> String; - - /// Returns the icon for the tool. - fn icon(&self) -> IconName; - - /// Returns the source of the tool. - fn source(&self) -> ToolSource { - ToolSource::Native - } - - /// Returns true if the tool needs the users's confirmation - /// before having permission to run. - fn needs_confirmation( - &self, - input: &serde_json::Value, - project: &Entity, - cx: &App, - ) -> bool; - - /// Returns true if the tool may perform edits. - fn may_perform_edits(&self) -> bool; - - /// Returns the JSON schema that describes the tool's input. - fn input_schema(&self, _: LanguageModelToolSchemaFormat) -> Result { - Ok(serde_json::Value::Object(serde_json::Map::default())) - } - - /// Returns markdown to be displayed in the UI for this tool. - fn ui_text(&self, input: &serde_json::Value) -> String; - - /// Returns markdown to be displayed in the UI for this tool, while the input JSON is still streaming - /// (so information may be missing). - fn still_streaming_ui_text(&self, input: &serde_json::Value) -> String { - self.ui_text(input) - } - - /// Runs the tool with the provided input. - fn run( - self: Arc, - input: serde_json::Value, - request: Arc, - project: Entity, - action_log: Entity, - model: Arc, - window: Option, - cx: &mut App, - ) -> ToolResult; - - fn deserialize_card( - self: Arc, - _output: serde_json::Value, - _project: Entity, - _window: &mut Window, - _cx: &mut App, - ) -> Option { - None - } -} - -impl Debug for dyn Tool { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - f.debug_struct("Tool").field("name", &self.name()).finish() - } -} diff --git a/crates/assistant_tool/src/tool_registry.rs b/crates/assistant_tool/src/tool_registry.rs deleted file mode 100644 index 26b4821a6d1af05a5e42d639f465486b9311d427..0000000000000000000000000000000000000000 --- a/crates/assistant_tool/src/tool_registry.rs +++ /dev/null @@ -1,74 +0,0 @@ -use std::sync::Arc; - -use collections::HashMap; -use derive_more::{Deref, DerefMut}; -use gpui::Global; -use gpui::{App, ReadGlobal}; -use parking_lot::RwLock; - -use crate::Tool; - -#[derive(Default, Deref, DerefMut)] -struct GlobalToolRegistry(Arc); - -impl Global for GlobalToolRegistry {} - -#[derive(Default)] -struct ToolRegistryState { - tools: HashMap, Arc>, -} - -#[derive(Default)] -pub struct ToolRegistry { - state: RwLock, -} - -impl ToolRegistry { - /// Returns the global [`ToolRegistry`]. - pub fn global(cx: &App) -> Arc { - GlobalToolRegistry::global(cx).0.clone() - } - - /// Returns the global [`ToolRegistry`]. - /// - /// Inserts a default [`ToolRegistry`] if one does not yet exist. - pub fn default_global(cx: &mut App) -> Arc { - cx.default_global::().0.clone() - } - - pub fn new() -> Arc { - Arc::new(Self { - state: RwLock::new(ToolRegistryState { - tools: HashMap::default(), - }), - }) - } - - /// Registers the provided [`Tool`]. - pub fn register_tool(&self, tool: impl Tool) { - let mut state = self.state.write(); - let tool_name: Arc = tool.name().into(); - state.tools.insert(tool_name, Arc::new(tool)); - } - - /// Unregisters the provided [`Tool`]. - pub fn unregister_tool(&self, tool: impl Tool) { - self.unregister_tool_by_name(tool.name().as_str()) - } - - /// Unregisters the tool with the given name. - pub fn unregister_tool_by_name(&self, tool_name: &str) { - let mut state = self.state.write(); - state.tools.remove(tool_name); - } - - /// Returns the list of tools in the registry. - pub fn tools(&self) -> Vec> { - self.state.read().tools.values().cloned().collect() - } - - /// Returns the [`Tool`] with the given name. - pub fn tool(&self, name: &str) -> Option> { - self.state.read().tools.get(name).cloned() - } -} diff --git a/crates/assistant_tool/src/tool_working_set.rs b/crates/assistant_tool/src/tool_working_set.rs deleted file mode 100644 index 61f57affc76aad9e4d2185665b539f9092e3491c..0000000000000000000000000000000000000000 --- a/crates/assistant_tool/src/tool_working_set.rs +++ /dev/null @@ -1,415 +0,0 @@ -use std::{borrow::Borrow, sync::Arc}; - -use crate::{Tool, ToolRegistry, ToolSource}; -use collections::{HashMap, HashSet, IndexMap}; -use gpui::{App, SharedString}; -use util::debug_panic; - -#[derive(Copy, Clone, PartialEq, Eq, Hash, Default)] -pub struct ToolId(usize); - -/// A unique identifier for a tool within a working set. -#[derive(Clone, PartialEq, Eq, Hash, Default)] -pub struct UniqueToolName(SharedString); - -impl Borrow for UniqueToolName { - fn borrow(&self) -> &str { - &self.0 - } -} - -impl From for UniqueToolName { - fn from(value: String) -> Self { - UniqueToolName(SharedString::new(value)) - } -} - -impl Into for UniqueToolName { - fn into(self) -> String { - self.0.into() - } -} - -impl std::fmt::Debug for UniqueToolName { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.0.fmt(f) - } -} - -impl std::fmt::Display for UniqueToolName { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.0.as_ref()) - } -} - -/// A working set of tools for use in one instance of the Assistant Panel. -#[derive(Default)] -pub struct ToolWorkingSet { - context_server_tools_by_id: HashMap>, - context_server_tools_by_name: HashMap>, - next_tool_id: ToolId, -} - -impl ToolWorkingSet { - pub fn tool(&self, name: &str, cx: &App) -> Option> { - self.context_server_tools_by_name - .get(name) - .cloned() - .or_else(|| ToolRegistry::global(cx).tool(name)) - } - - pub fn tools(&self, cx: &App) -> Vec<(UniqueToolName, Arc)> { - let mut tools = ToolRegistry::global(cx) - .tools() - .into_iter() - .map(|tool| (UniqueToolName(tool.name().into()), tool)) - .collect::>(); - tools.extend(self.context_server_tools_by_name.clone()); - tools - } - - pub fn tools_by_source(&self, cx: &App) -> IndexMap>> { - let mut tools_by_source = IndexMap::default(); - - for (_, tool) in self.tools(cx) { - tools_by_source - .entry(tool.source()) - .or_insert_with(Vec::new) - .push(tool); - } - - for tools in tools_by_source.values_mut() { - tools.sort_by_key(|tool| tool.name()); - } - - tools_by_source.sort_unstable_keys(); - - tools_by_source - } - - pub fn insert(&mut self, tool: Arc, cx: &App) -> ToolId { - let tool_id = self.register_tool(tool); - self.tools_changed(cx); - tool_id - } - - pub fn extend(&mut self, tools: impl Iterator>, cx: &App) -> Vec { - let ids = tools.map(|tool| self.register_tool(tool)).collect(); - self.tools_changed(cx); - ids - } - - pub fn remove(&mut self, tool_ids_to_remove: &[ToolId], cx: &App) { - self.context_server_tools_by_id - .retain(|id, _| !tool_ids_to_remove.contains(id)); - self.tools_changed(cx); - } - - fn register_tool(&mut self, tool: Arc) -> ToolId { - let tool_id = self.next_tool_id; - self.next_tool_id.0 += 1; - self.context_server_tools_by_id - .insert(tool_id, tool.clone()); - tool_id - } - - fn tools_changed(&mut self, cx: &App) { - self.context_server_tools_by_name = resolve_context_server_tool_name_conflicts( - &self - .context_server_tools_by_id - .values() - .cloned() - .collect::>(), - &ToolRegistry::global(cx).tools(), - ); - } -} - -fn resolve_context_server_tool_name_conflicts( - context_server_tools: &[Arc], - native_tools: &[Arc], -) -> HashMap> { - fn resolve_tool_name(tool: &Arc) -> String { - let mut tool_name = tool.name(); - tool_name.truncate(MAX_TOOL_NAME_LENGTH); - tool_name - } - - const MAX_TOOL_NAME_LENGTH: usize = 64; - - let mut duplicated_tool_names = HashSet::default(); - let mut seen_tool_names = HashSet::default(); - seen_tool_names.extend(native_tools.iter().map(|tool| tool.name())); - for tool in context_server_tools { - let tool_name = resolve_tool_name(tool); - if seen_tool_names.contains(&tool_name) { - debug_assert!( - tool.source() != ToolSource::Native, - "Expected MCP tool but got a native tool: {}", - tool_name - ); - duplicated_tool_names.insert(tool_name); - } else { - seen_tool_names.insert(tool_name); - } - } - - if duplicated_tool_names.is_empty() { - return context_server_tools - .iter() - .map(|tool| (resolve_tool_name(tool).into(), tool.clone())) - .collect(); - } - - context_server_tools - .iter() - .filter_map(|tool| { - let mut tool_name = resolve_tool_name(tool); - if !duplicated_tool_names.contains(&tool_name) { - return Some((tool_name.into(), tool.clone())); - } - match tool.source() { - ToolSource::Native => { - debug_panic!("Expected MCP tool but got a native tool: {}", tool_name); - // Built-in tools always keep their original name - Some((tool_name.into(), tool.clone())) - } - ToolSource::ContextServer { id } => { - // Context server tools are prefixed with the context server ID, and truncated if necessary - tool_name.insert(0, '_'); - if tool_name.len() + id.len() > MAX_TOOL_NAME_LENGTH { - let len = MAX_TOOL_NAME_LENGTH - tool_name.len(); - let mut id = id.to_string(); - id.truncate(len); - tool_name.insert_str(0, &id); - } else { - tool_name.insert_str(0, &id); - } - - tool_name.truncate(MAX_TOOL_NAME_LENGTH); - - if seen_tool_names.contains(&tool_name) { - log::error!("Cannot resolve tool name conflict for tool {}", tool.name()); - None - } else { - Some((tool_name.into(), tool.clone())) - } - } - } - }) - .collect() -} -#[cfg(test)] -mod tests { - use gpui::{AnyWindowHandle, Entity, Task, TestAppContext}; - use language_model::{LanguageModel, LanguageModelRequest}; - use project::Project; - - use crate::{ActionLog, ToolResult}; - - use super::*; - - #[gpui::test] - fn test_unique_tool_names(cx: &mut TestAppContext) { - fn assert_tool( - tool_working_set: &ToolWorkingSet, - unique_name: &str, - expected_name: &str, - expected_source: ToolSource, - cx: &App, - ) { - let tool = tool_working_set.tool(unique_name, cx).unwrap(); - assert_eq!(tool.name(), expected_name); - assert_eq!(tool.source(), expected_source); - } - - let tool_registry = cx.update(ToolRegistry::default_global); - tool_registry.register_tool(TestTool::new("tool1", ToolSource::Native)); - tool_registry.register_tool(TestTool::new("tool2", ToolSource::Native)); - - let mut tool_working_set = ToolWorkingSet::default(); - cx.update(|cx| { - tool_working_set.extend( - vec![ - Arc::new(TestTool::new( - "tool2", - ToolSource::ContextServer { id: "mcp-1".into() }, - )) as Arc, - Arc::new(TestTool::new( - "tool2", - ToolSource::ContextServer { id: "mcp-2".into() }, - )) as Arc, - ] - .into_iter(), - cx, - ); - }); - - cx.update(|cx| { - assert_tool(&tool_working_set, "tool1", "tool1", ToolSource::Native, cx); - assert_tool(&tool_working_set, "tool2", "tool2", ToolSource::Native, cx); - assert_tool( - &tool_working_set, - "mcp-1_tool2", - "tool2", - ToolSource::ContextServer { id: "mcp-1".into() }, - cx, - ); - assert_tool( - &tool_working_set, - "mcp-2_tool2", - "tool2", - ToolSource::ContextServer { id: "mcp-2".into() }, - cx, - ); - }) - } - - #[gpui::test] - fn test_resolve_context_server_tool_name_conflicts() { - assert_resolve_context_server_tool_name_conflicts( - vec![ - TestTool::new("tool1", ToolSource::Native), - TestTool::new("tool2", ToolSource::Native), - ], - vec![TestTool::new( - "tool3", - ToolSource::ContextServer { id: "mcp-1".into() }, - )], - vec!["tool3"], - ); - - assert_resolve_context_server_tool_name_conflicts( - vec![ - TestTool::new("tool1", ToolSource::Native), - TestTool::new("tool2", ToolSource::Native), - ], - vec![ - TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-1".into() }), - TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-2".into() }), - ], - vec!["mcp-1_tool3", "mcp-2_tool3"], - ); - - assert_resolve_context_server_tool_name_conflicts( - vec![ - TestTool::new("tool1", ToolSource::Native), - TestTool::new("tool2", ToolSource::Native), - TestTool::new("tool3", ToolSource::Native), - ], - vec![ - TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-1".into() }), - TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-2".into() }), - ], - vec!["mcp-1_tool3", "mcp-2_tool3"], - ); - - // Test deduplication of tools with very long names, in this case the mcp server name should be truncated - assert_resolve_context_server_tool_name_conflicts( - vec![TestTool::new( - "tool-with-very-very-very-long-name", - ToolSource::Native, - )], - vec![TestTool::new( - "tool-with-very-very-very-long-name", - ToolSource::ContextServer { - id: "mcp-with-very-very-very-long-name".into(), - }, - )], - vec!["mcp-with-very-very-very-long-_tool-with-very-very-very-long-name"], - ); - - fn assert_resolve_context_server_tool_name_conflicts( - builtin_tools: Vec, - context_server_tools: Vec, - expected: Vec<&'static str>, - ) { - let context_server_tools: Vec> = context_server_tools - .into_iter() - .map(|t| Arc::new(t) as Arc) - .collect(); - let builtin_tools: Vec> = builtin_tools - .into_iter() - .map(|t| Arc::new(t) as Arc) - .collect(); - let tools = - resolve_context_server_tool_name_conflicts(&context_server_tools, &builtin_tools); - assert_eq!(tools.len(), expected.len()); - for (i, (name, _)) in tools.into_iter().enumerate() { - assert_eq!( - name.0.as_ref(), - expected[i], - "Expected '{}' got '{}' at index {}", - expected[i], - name, - i - ); - } - } - } - - struct TestTool { - name: String, - source: ToolSource, - } - - impl TestTool { - fn new(name: impl Into, source: ToolSource) -> Self { - Self { - name: name.into(), - source, - } - } - } - - impl Tool for TestTool { - fn name(&self) -> String { - self.name.clone() - } - - fn icon(&self) -> icons::IconName { - icons::IconName::Ai - } - - fn may_perform_edits(&self) -> bool { - false - } - - fn needs_confirmation( - &self, - _input: &serde_json::Value, - _project: &Entity, - _cx: &App, - ) -> bool { - true - } - - fn source(&self) -> ToolSource { - self.source.clone() - } - - fn description(&self) -> String { - "Test tool".to_string() - } - - fn ui_text(&self, _input: &serde_json::Value) -> String { - "Test tool".to_string() - } - - fn run( - self: Arc, - _input: serde_json::Value, - _request: Arc, - _project: Entity, - _action_log: Entity, - _model: Arc, - _window: Option, - _cx: &mut App, - ) -> ToolResult { - ToolResult { - output: Task::ready(Err(anyhow::anyhow!("No content"))), - card: None, - } - } - } -} diff --git a/crates/assistant_tools/Cargo.toml b/crates/assistant_tools/Cargo.toml deleted file mode 100644 index 9b9b8196d1c342c536d605306a1a062e73768c56..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/Cargo.toml +++ /dev/null @@ -1,92 +0,0 @@ -[package] -name = "assistant_tools" -version = "0.1.0" -edition.workspace = true -publish.workspace = true -license = "GPL-3.0-or-later" - -[lints] -workspace = true - -[lib] -path = "src/assistant_tools.rs" - -[features] -eval = [] - -[dependencies] -action_log.workspace = true -agent_settings.workspace = true -anyhow.workspace = true -assistant_tool.workspace = true -buffer_diff.workspace = true -chrono.workspace = true -client.workspace = true -cloud_llm_client.workspace = true -collections.workspace = true -component.workspace = true -derive_more.workspace = true -diffy = "0.4.2" -editor.workspace = true -feature_flags.workspace = true -futures.workspace = true -gpui.workspace = true -handlebars = { workspace = true, features = ["rust-embed"] } -html_to_markdown.workspace = true -http_client.workspace = true -indoc.workspace = true -itertools.workspace = true -language.workspace = true -language_model.workspace = true -log.workspace = true -lsp.workspace = true -markdown.workspace = true -open.workspace = true -paths.workspace = true -portable-pty.workspace = true -project.workspace = true -prompt_store.workspace = true -regex.workspace = true -rust-embed.workspace = true -schemars.workspace = true -serde.workspace = true -serde_json.workspace = true -settings.workspace = true -smallvec.workspace = true -streaming_diff.workspace = true -strsim.workspace = true -task.workspace = true -terminal.workspace = true -terminal_view.workspace = true -theme.workspace = true -ui.workspace = true -util.workspace = true -watch.workspace = true -web_search.workspace = true -workspace-hack.workspace = true -workspace.workspace = true - -[dev-dependencies] -lsp = { workspace = true, features = ["test-support"] } -client = { workspace = true, features = ["test-support"] } -clock = { workspace = true, features = ["test-support"] } -collections = { workspace = true, features = ["test-support"] } -gpui = { workspace = true, features = ["test-support"] } -gpui_tokio.workspace = true -fs = { workspace = true, features = ["test-support"] } -language = { workspace = true, features = ["test-support"] } -language_model = { workspace = true, features = ["test-support"] } -language_models.workspace = true -project = { workspace = true, features = ["test-support"] } -rand.workspace = true -pretty_assertions.workspace = true -reqwest_client.workspace = true -settings = { workspace = true, features = ["test-support"] } -smol.workspace = true -task = { workspace = true, features = ["test-support"]} -tempfile.workspace = true -theme.workspace = true -tree-sitter-rust.workspace = true -workspace = { workspace = true, features = ["test-support"] } -unindent.workspace = true -zlog.workspace = true diff --git a/crates/assistant_tools/LICENSE-GPL b/crates/assistant_tools/LICENSE-GPL deleted file mode 120000 index 89e542f750cd3860a0598eff0dc34b56d7336dc4..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/LICENSE-GPL +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-GPL \ No newline at end of file diff --git a/crates/assistant_tools/src/assistant_tools.rs b/crates/assistant_tools/src/assistant_tools.rs deleted file mode 100644 index 17e2ba12f706387859ca3393aa44f5c05570e50a..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/assistant_tools.rs +++ /dev/null @@ -1,167 +0,0 @@ -mod copy_path_tool; -mod create_directory_tool; -mod delete_path_tool; -mod diagnostics_tool; -pub mod edit_agent; -mod edit_file_tool; -mod fetch_tool; -mod find_path_tool; -mod grep_tool; -mod list_directory_tool; -mod move_path_tool; -mod now_tool; -mod open_tool; -mod project_notifications_tool; -mod read_file_tool; -mod schema; -pub mod templates; -mod terminal_tool; -mod thinking_tool; -mod ui; -mod web_search_tool; - -use assistant_tool::ToolRegistry; -use copy_path_tool::CopyPathTool; -use gpui::{App, Entity}; -use http_client::HttpClientWithUrl; -use language_model::LanguageModelRegistry; -use move_path_tool::MovePathTool; -use std::sync::Arc; -use web_search_tool::WebSearchTool; - -pub(crate) use templates::*; - -use crate::create_directory_tool::CreateDirectoryTool; -use crate::delete_path_tool::DeletePathTool; -use crate::diagnostics_tool::DiagnosticsTool; -use crate::edit_file_tool::EditFileTool; -use crate::fetch_tool::FetchTool; -use crate::list_directory_tool::ListDirectoryTool; -use crate::now_tool::NowTool; -use crate::thinking_tool::ThinkingTool; - -pub use edit_file_tool::{EditFileMode, EditFileToolInput}; -pub use find_path_tool::*; -pub use grep_tool::{GrepTool, GrepToolInput}; -pub use open_tool::OpenTool; -pub use project_notifications_tool::ProjectNotificationsTool; -pub use read_file_tool::{ReadFileTool, ReadFileToolInput}; -pub use terminal_tool::TerminalTool; - -pub fn init(http_client: Arc, cx: &mut App) { - assistant_tool::init(cx); - - let registry = ToolRegistry::global(cx); - registry.register_tool(TerminalTool); - registry.register_tool(CreateDirectoryTool); - registry.register_tool(CopyPathTool); - registry.register_tool(DeletePathTool); - registry.register_tool(MovePathTool); - registry.register_tool(DiagnosticsTool); - registry.register_tool(ListDirectoryTool); - registry.register_tool(NowTool); - registry.register_tool(OpenTool); - registry.register_tool(ProjectNotificationsTool); - registry.register_tool(FindPathTool); - registry.register_tool(ReadFileTool); - registry.register_tool(GrepTool); - registry.register_tool(ThinkingTool); - registry.register_tool(FetchTool::new(http_client)); - registry.register_tool(EditFileTool); - - register_web_search_tool(&LanguageModelRegistry::global(cx), cx); - cx.subscribe( - &LanguageModelRegistry::global(cx), - move |registry, event, cx| { - if let language_model::Event::DefaultModelChanged = event { - register_web_search_tool(®istry, cx); - } - }, - ) - .detach(); -} - -fn register_web_search_tool(registry: &Entity, cx: &mut App) { - let using_zed_provider = registry - .read(cx) - .default_model() - .is_some_and(|default| default.is_provided_by_zed()); - if using_zed_provider { - ToolRegistry::global(cx).register_tool(WebSearchTool); - } else { - ToolRegistry::global(cx).unregister_tool(WebSearchTool); - } -} - -#[cfg(test)] -mod tests { - use super::*; - use agent_settings::AgentSettings; - use client::Client; - use clock::FakeSystemClock; - use http_client::FakeHttpClient; - use schemars::JsonSchema; - use serde::Serialize; - use settings::Settings; - - #[test] - fn test_json_schema() { - #[derive(Serialize, JsonSchema)] - struct GetWeatherTool { - location: String, - } - - let schema = schema::json_schema_for::( - language_model::LanguageModelToolSchemaFormat::JsonSchema, - ) - .unwrap(); - - assert_eq!( - schema, - serde_json::json!({ - "type": "object", - "properties": { - "location": { - "type": "string" - } - }, - "required": ["location"], - "additionalProperties": false - }) - ); - } - - #[gpui::test] - fn test_builtin_tool_schema_compatibility(cx: &mut App) { - settings::init(cx); - AgentSettings::register(cx); - - let client = Client::new( - Arc::new(FakeSystemClock::new()), - FakeHttpClient::with_200_response(), - cx, - ); - language_model::init(client.clone(), cx); - crate::init(client.http_client(), cx); - - for tool in ToolRegistry::global(cx).tools() { - let actual_schema = tool - .input_schema(language_model::LanguageModelToolSchemaFormat::JsonSchemaSubset) - .unwrap(); - let mut expected_schema = actual_schema.clone(); - assistant_tool::adapt_schema_to_format( - &mut expected_schema, - language_model::LanguageModelToolSchemaFormat::JsonSchemaSubset, - ) - .unwrap(); - - let error_message = format!( - "Tool schema for `{}` is not compatible with `language_model::LanguageModelToolSchemaFormat::JsonSchemaSubset` (Gemini Models).\n\ - Are you using `schema::json_schema_for(format)` to generate the schema?", - tool.name(), - ); - - assert_eq!(actual_schema, expected_schema, "{}", error_message) - } - } -} diff --git a/crates/assistant_tools/src/copy_path_tool.rs b/crates/assistant_tools/src/copy_path_tool.rs deleted file mode 100644 index 572eddcb1079557b464ba29d125aa44929409cc5..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/copy_path_tool.rs +++ /dev/null @@ -1,123 +0,0 @@ -use crate::schema::json_schema_for; -use action_log::ActionLog; -use anyhow::{Context as _, Result, anyhow}; -use assistant_tool::{Tool, ToolResult}; -use gpui::AnyWindowHandle; -use gpui::{App, AppContext, Entity, Task}; -use language_model::LanguageModel; -use language_model::{LanguageModelRequest, LanguageModelToolSchemaFormat}; -use project::Project; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use std::sync::Arc; -use ui::IconName; -use util::markdown::MarkdownInlineCode; - -#[derive(Debug, Serialize, Deserialize, JsonSchema)] -pub struct CopyPathToolInput { - /// The source path of the file or directory to copy. - /// If a directory is specified, its contents will be copied recursively (like `cp -r`). - /// - /// - /// If the project has the following files: - /// - /// - directory1/a/something.txt - /// - directory2/a/things.txt - /// - directory3/a/other.txt - /// - /// You can copy the first file by providing a source_path of "directory1/a/something.txt" - /// - pub source_path: String, - - /// The destination path where the file or directory should be copied to. - /// - /// - /// To copy "directory1/a/something.txt" to "directory2/b/copy.txt", - /// provide a destination_path of "directory2/b/copy.txt" - /// - pub destination_path: String, -} - -pub struct CopyPathTool; - -impl Tool for CopyPathTool { - fn name(&self) -> String { - "copy_path".into() - } - - fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity, _: &App) -> bool { - false - } - - fn may_perform_edits(&self) -> bool { - true - } - - fn description(&self) -> String { - include_str!("./copy_path_tool/description.md").into() - } - - fn icon(&self) -> IconName { - IconName::ToolCopy - } - - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { - json_schema_for::(format) - } - - fn ui_text(&self, input: &serde_json::Value) -> String { - match serde_json::from_value::(input.clone()) { - Ok(input) => { - let src = MarkdownInlineCode(&input.source_path); - let dest = MarkdownInlineCode(&input.destination_path); - format!("Copy {src} to {dest}") - } - Err(_) => "Copy path".to_string(), - } - } - - fn run( - self: Arc, - input: serde_json::Value, - _request: Arc, - project: Entity, - _action_log: Entity, - _model: Arc, - _window: Option, - cx: &mut App, - ) -> ToolResult { - let input = match serde_json::from_value::(input) { - Ok(input) => input, - Err(err) => return Task::ready(Err(anyhow!(err))).into(), - }; - let copy_task = project.update(cx, |project, cx| { - match project - .find_project_path(&input.source_path, cx) - .and_then(|project_path| project.entry_for_path(&project_path, cx)) - { - Some(entity) => match project.find_project_path(&input.destination_path, cx) { - Some(project_path) => project.copy_entry(entity.id, project_path, cx), - None => Task::ready(Err(anyhow!( - "Destination path {} was outside the project.", - input.destination_path - ))), - }, - None => Task::ready(Err(anyhow!( - "Source path {} was not found in the project.", - input.source_path - ))), - } - }); - - cx.background_spawn(async move { - let _ = copy_task.await.with_context(|| { - format!( - "Copying {} to {}", - input.source_path, input.destination_path - ) - })?; - Ok(format!("Copied {} to {}", input.source_path, input.destination_path).into()) - }) - .into() - } -} diff --git a/crates/assistant_tools/src/copy_path_tool/description.md b/crates/assistant_tools/src/copy_path_tool/description.md deleted file mode 100644 index a5105e6f18c705e93aa9c30b9588f84dd8db542a..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/copy_path_tool/description.md +++ /dev/null @@ -1,6 +0,0 @@ -Copies a file or directory in the project, and returns confirmation that the copy succeeded. -Directory contents will be copied recursively (like `cp -r`). - -This tool should be used when it's desirable to create a copy of a file or directory without modifying the original. -It's much more efficient than doing this by separately reading and then writing the file or directory's contents, -so this tool should be preferred over that approach whenever copying is the goal. diff --git a/crates/assistant_tools/src/create_directory_tool.rs b/crates/assistant_tools/src/create_directory_tool.rs deleted file mode 100644 index 85eea463dc1dfd429dd70ded8c18faf6ee8421c5..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/create_directory_tool.rs +++ /dev/null @@ -1,100 +0,0 @@ -use crate::schema::json_schema_for; -use action_log::ActionLog; -use anyhow::{Context as _, Result, anyhow}; -use assistant_tool::{Tool, ToolResult}; -use gpui::AnyWindowHandle; -use gpui::{App, Entity, Task}; -use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; -use project::Project; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use std::sync::Arc; -use ui::IconName; -use util::markdown::MarkdownInlineCode; - -#[derive(Debug, Serialize, Deserialize, JsonSchema)] -pub struct CreateDirectoryToolInput { - /// The path of the new directory. - /// - /// - /// If the project has the following structure: - /// - /// - directory1/ - /// - directory2/ - /// - /// You can create a new directory by providing a path of "directory1/new_directory" - /// - pub path: String, -} - -pub struct CreateDirectoryTool; - -impl Tool for CreateDirectoryTool { - fn name(&self) -> String { - "create_directory".into() - } - - fn description(&self) -> String { - include_str!("./create_directory_tool/description.md").into() - } - - fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity, _: &App) -> bool { - false - } - - fn may_perform_edits(&self) -> bool { - false - } - - fn icon(&self) -> IconName { - IconName::ToolFolder - } - - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { - json_schema_for::(format) - } - - fn ui_text(&self, input: &serde_json::Value) -> String { - match serde_json::from_value::(input.clone()) { - Ok(input) => { - format!("Create directory {}", MarkdownInlineCode(&input.path)) - } - Err(_) => "Create directory".to_string(), - } - } - - fn run( - self: Arc, - input: serde_json::Value, - _request: Arc, - project: Entity, - _action_log: Entity, - _model: Arc, - _window: Option, - cx: &mut App, - ) -> ToolResult { - let input = match serde_json::from_value::(input) { - Ok(input) => input, - Err(err) => return Task::ready(Err(anyhow!(err))).into(), - }; - let project_path = match project.read(cx).find_project_path(&input.path, cx) { - Some(project_path) => project_path, - None => { - return Task::ready(Err(anyhow!("Path to create was outside the project"))).into(); - } - }; - let destination_path: Arc = input.path.as_str().into(); - - cx.spawn(async move |cx| { - project - .update(cx, |project, cx| { - project.create_entry(project_path.clone(), true, cx) - })? - .await - .with_context(|| format!("Creating directory {destination_path}"))?; - - Ok(format!("Created directory {destination_path}").into()) - }) - .into() - } -} diff --git a/crates/assistant_tools/src/create_directory_tool/description.md b/crates/assistant_tools/src/create_directory_tool/description.md deleted file mode 100644 index 52056518c23517bf9fd36bf7d41d7e46947b15b6..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/create_directory_tool/description.md +++ /dev/null @@ -1,3 +0,0 @@ -Creates a new directory at the specified path within the project. Returns confirmation that the directory was created. - -This tool creates a directory and all necessary parent directories (similar to `mkdir -p`). It should be used whenever you need to create new directories within the project. diff --git a/crates/assistant_tools/src/delete_path_tool.rs b/crates/assistant_tools/src/delete_path_tool.rs deleted file mode 100644 index 7c85f1ed7552931822500f76bb9f3b1b1f47fd0c..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/delete_path_tool.rs +++ /dev/null @@ -1,144 +0,0 @@ -use crate::schema::json_schema_for; -use action_log::ActionLog; -use anyhow::{Context as _, Result, anyhow}; -use assistant_tool::{Tool, ToolResult}; -use futures::{SinkExt, StreamExt, channel::mpsc}; -use gpui::{AnyWindowHandle, App, AppContext, Entity, Task}; -use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; -use project::{Project, ProjectPath}; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use std::sync::Arc; -use ui::IconName; - -#[derive(Debug, Serialize, Deserialize, JsonSchema)] -pub struct DeletePathToolInput { - /// The path of the file or directory to delete. - /// - /// - /// If the project has the following files: - /// - /// - directory1/a/something.txt - /// - directory2/a/things.txt - /// - directory3/a/other.txt - /// - /// You can delete the first file by providing a path of "directory1/a/something.txt" - /// - pub path: String, -} - -pub struct DeletePathTool; - -impl Tool for DeletePathTool { - fn name(&self) -> String { - "delete_path".into() - } - - fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity, _: &App) -> bool { - true - } - - fn may_perform_edits(&self) -> bool { - true - } - - fn description(&self) -> String { - include_str!("./delete_path_tool/description.md").into() - } - - fn icon(&self) -> IconName { - IconName::ToolDeleteFile - } - - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { - json_schema_for::(format) - } - - fn ui_text(&self, input: &serde_json::Value) -> String { - match serde_json::from_value::(input.clone()) { - Ok(input) => format!("Delete “`{}`”", input.path), - Err(_) => "Delete path".to_string(), - } - } - - fn run( - self: Arc, - input: serde_json::Value, - _request: Arc, - project: Entity, - action_log: Entity, - _model: Arc, - _window: Option, - cx: &mut App, - ) -> ToolResult { - let path_str = match serde_json::from_value::(input) { - Ok(input) => input.path, - Err(err) => return Task::ready(Err(anyhow!(err))).into(), - }; - let Some(project_path) = project.read(cx).find_project_path(&path_str, cx) else { - return Task::ready(Err(anyhow!( - "Couldn't delete {path_str} because that path isn't in this project." - ))) - .into(); - }; - - let Some(worktree) = project - .read(cx) - .worktree_for_id(project_path.worktree_id, cx) - else { - return Task::ready(Err(anyhow!( - "Couldn't delete {path_str} because that path isn't in this project." - ))) - .into(); - }; - - let worktree_snapshot = worktree.read(cx).snapshot(); - let (mut paths_tx, mut paths_rx) = mpsc::channel(256); - cx.background_spawn({ - let project_path = project_path.clone(); - async move { - for entry in - worktree_snapshot.traverse_from_path(true, false, false, &project_path.path) - { - if !entry.path.starts_with(&project_path.path) { - break; - } - paths_tx - .send(ProjectPath { - worktree_id: project_path.worktree_id, - path: entry.path.clone(), - }) - .await?; - } - anyhow::Ok(()) - } - }) - .detach(); - - cx.spawn(async move |cx| { - while let Some(path) = paths_rx.next().await { - if let Ok(buffer) = project - .update(cx, |project, cx| project.open_buffer(path, cx))? - .await - { - action_log.update(cx, |action_log, cx| { - action_log.will_delete_buffer(buffer.clone(), cx) - })?; - } - } - - let deletion_task = project - .update(cx, |project, cx| { - project.delete_file(project_path, false, cx) - })? - .with_context(|| { - format!("Couldn't delete {path_str} because that path isn't in this project.") - })?; - deletion_task - .await - .with_context(|| format!("Deleting {path_str}"))?; - Ok(format!("Deleted {path_str}").into()) - }) - .into() - } -} diff --git a/crates/assistant_tools/src/delete_path_tool/description.md b/crates/assistant_tools/src/delete_path_tool/description.md deleted file mode 100644 index dfd4388bf04cf32038d04cacf169e9ea4bf05c56..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/delete_path_tool/description.md +++ /dev/null @@ -1 +0,0 @@ -Deletes the file or directory (and the directory's contents, recursively) at the specified path in the project, and returns confirmation of the deletion. diff --git a/crates/assistant_tools/src/diagnostics_tool.rs b/crates/assistant_tools/src/diagnostics_tool.rs deleted file mode 100644 index 75bd683512b58d2fdb6c43fc319d266f6609f926..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/diagnostics_tool.rs +++ /dev/null @@ -1,171 +0,0 @@ -use crate::schema::json_schema_for; -use action_log::ActionLog; -use anyhow::{Result, anyhow}; -use assistant_tool::{Tool, ToolResult}; -use gpui::{AnyWindowHandle, App, Entity, Task}; -use language::{DiagnosticSeverity, OffsetRangeExt}; -use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; -use project::Project; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use std::{fmt::Write, sync::Arc}; -use ui::IconName; -use util::markdown::MarkdownInlineCode; - -#[derive(Debug, Serialize, Deserialize, JsonSchema)] -pub struct DiagnosticsToolInput { - /// The path to get diagnostics for. If not provided, returns a project-wide summary. - /// - /// This path should never be absolute, and the first component - /// of the path should always be a root directory in a project. - /// - /// - /// If the project has the following root directories: - /// - /// - lorem - /// - ipsum - /// - /// If you wanna access diagnostics for `dolor.txt` in `ipsum`, you should use the path `ipsum/dolor.txt`. - /// - #[serde(deserialize_with = "deserialize_path")] - pub path: Option, -} - -fn deserialize_path<'de, D>(deserializer: D) -> Result, D::Error> -where - D: serde::Deserializer<'de>, -{ - let opt = Option::::deserialize(deserializer)?; - // The model passes an empty string sometimes - Ok(opt.filter(|s| !s.is_empty())) -} - -pub struct DiagnosticsTool; - -impl Tool for DiagnosticsTool { - fn name(&self) -> String { - "diagnostics".into() - } - - fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity, _: &App) -> bool { - false - } - - fn may_perform_edits(&self) -> bool { - false - } - - fn description(&self) -> String { - include_str!("./diagnostics_tool/description.md").into() - } - - fn icon(&self) -> IconName { - IconName::ToolDiagnostics - } - - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { - json_schema_for::(format) - } - - fn ui_text(&self, input: &serde_json::Value) -> String { - if let Some(path) = serde_json::from_value::(input.clone()) - .ok() - .and_then(|input| match input.path { - Some(path) if !path.is_empty() => Some(path), - _ => None, - }) - { - format!("Check diagnostics for {}", MarkdownInlineCode(&path)) - } else { - "Check project diagnostics".to_string() - } - } - - fn run( - self: Arc, - input: serde_json::Value, - _request: Arc, - project: Entity, - _action_log: Entity, - _model: Arc, - _window: Option, - cx: &mut App, - ) -> ToolResult { - match serde_json::from_value::(input) - .ok() - .and_then(|input| input.path) - { - Some(path) if !path.is_empty() => { - let Some(project_path) = project.read(cx).find_project_path(&path, cx) else { - return Task::ready(Err(anyhow!("Could not find path {path} in project",))) - .into(); - }; - - let buffer = - project.update(cx, |project, cx| project.open_buffer(project_path, cx)); - - cx.spawn(async move |cx| { - let mut output = String::new(); - let buffer = buffer.await?; - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; - - for (_, group) in snapshot.diagnostic_groups(None) { - let entry = &group.entries[group.primary_ix]; - let range = entry.range.to_point(&snapshot); - let severity = match entry.diagnostic.severity { - DiagnosticSeverity::ERROR => "error", - DiagnosticSeverity::WARNING => "warning", - _ => continue, - }; - - writeln!( - output, - "{} at line {}: {}", - severity, - range.start.row + 1, - entry.diagnostic.message - )?; - } - - if output.is_empty() { - Ok("File doesn't have errors or warnings!".to_string().into()) - } else { - Ok(output.into()) - } - }) - .into() - } - _ => { - let project = project.read(cx); - let mut output = String::new(); - let mut has_diagnostics = false; - - for (project_path, _, summary) in project.diagnostic_summaries(true, cx) { - if summary.error_count > 0 || summary.warning_count > 0 { - let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) - else { - continue; - }; - - has_diagnostics = true; - output.push_str(&format!( - "{}: {} error(s), {} warning(s)\n", - worktree.read(cx).absolutize(&project_path.path).display(), - summary.error_count, - summary.warning_count - )); - } - } - - if has_diagnostics { - Task::ready(Ok(output.into())).into() - } else { - Task::ready(Ok("No errors or warnings found in the project." - .to_string() - .into())) - .into() - } - } - } - } -} diff --git a/crates/assistant_tools/src/diagnostics_tool/description.md b/crates/assistant_tools/src/diagnostics_tool/description.md deleted file mode 100644 index 90dc00f1e408c0bd4d79de68833db9d4bafc0d2c..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/diagnostics_tool/description.md +++ /dev/null @@ -1,21 +0,0 @@ -Get errors and warnings for the project or a specific file. - -This tool can be invoked after a series of edits to determine if further edits are necessary, or if the user asks to fix errors or warnings in their codebase. - -When a path is provided, shows all diagnostics for that specific file. -When no path is provided, shows a summary of error and warning counts for all files in the project. - - -To get diagnostics for a specific file: -{ - "path": "src/main.rs" -} - -To get a project-wide diagnostic summary: -{} - - - -- If you think you can fix a diagnostic, make 1-2 attempts and then give up. -- Don't remove code you've generated just because you can't fix an error. The user can help you fix it. - diff --git a/crates/assistant_tools/src/edit_file_tool.rs b/crates/assistant_tools/src/edit_file_tool.rs deleted file mode 100644 index f88978650a32cdc6922a1ff864b0ee898721df80..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/edit_file_tool.rs +++ /dev/null @@ -1,2423 +0,0 @@ -use crate::{ - Templates, - edit_agent::{EditAgent, EditAgentOutput, EditAgentOutputEvent, EditFormat}, - schema::json_schema_for, - ui::{COLLAPSED_LINES, ToolOutputPreview}, -}; -use action_log::ActionLog; -use agent_settings; -use anyhow::{Context as _, Result, anyhow}; -use assistant_tool::{ - AnyToolCard, Tool, ToolCard, ToolResult, ToolResultContent, ToolResultOutput, ToolUseStatus, -}; -use buffer_diff::{BufferDiff, BufferDiffSnapshot}; -use editor::{ - Editor, EditorMode, MinimapVisibility, MultiBuffer, PathKey, multibuffer_context_lines, -}; -use futures::StreamExt; -use gpui::{ - Animation, AnimationExt, AnyWindowHandle, App, AppContext, AsyncApp, Entity, Task, - TextStyleRefinement, WeakEntity, pulsating_between, -}; -use indoc::formatdoc; -use language::{ - Anchor, Buffer, Capability, LanguageRegistry, LineEnding, OffsetRangeExt, Point, Rope, - TextBuffer, - language_settings::{self, FormatOnSave, SoftWrap}, -}; -use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; -use markdown::{Markdown, MarkdownElement, MarkdownStyle}; -use paths; -use project::{ - Project, ProjectPath, - lsp_store::{FormatTrigger, LspFormatTarget}, -}; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use settings::Settings; -use std::{ - cmp::Reverse, - collections::HashSet, - ffi::OsStr, - ops::Range, - path::{Path, PathBuf}, - sync::Arc, - time::Duration, -}; -use theme::ThemeSettings; -use ui::{CommonAnimationExt, Disclosure, Tooltip, prelude::*}; -use util::{ResultExt, rel_path::RelPath}; -use workspace::Workspace; - -pub struct EditFileTool; - -#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] -pub struct EditFileToolInput { - /// A one-line, user-friendly markdown description of the edit. This will be - /// shown in the UI and also passed to another model to perform the edit. - /// - /// Be terse, but also descriptive in what you want to achieve with this - /// edit. Avoid generic instructions. - /// - /// NEVER mention the file path in this description. - /// - /// Fix API endpoint URLs - /// Update copyright year in `page_footer` - /// - /// Make sure to include this field before all the others in the input object - /// so that we can display it immediately. - pub display_description: String, - - /// The full path of the file to create or modify in the project. - /// - /// WARNING: When specifying which file path need changing, you MUST - /// start each path with one of the project's root directories. - /// - /// The following examples assume we have two root directories in the project: - /// - /a/b/backend - /// - /c/d/frontend - /// - /// - /// `backend/src/main.rs` - /// - /// Notice how the file path starts with `backend`. Without that, the path - /// would be ambiguous and the call would fail! - /// - /// - /// - /// `frontend/db.js` - /// - pub path: PathBuf, - - /// The mode of operation on the file. Possible values: - /// - 'edit': Make granular edits to an existing file. - /// - 'create': Create a new file if it doesn't exist. - /// - 'overwrite': Replace the entire contents of an existing file. - /// - /// When a file already exists or you just created it, prefer editing - /// it as opposed to recreating it from scratch. - pub mode: EditFileMode, -} - -#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] -#[serde(rename_all = "lowercase")] -pub enum EditFileMode { - Edit, - Create, - Overwrite, -} - -#[derive(Debug, Serialize, Deserialize, JsonSchema)] -pub struct EditFileToolOutput { - pub original_path: PathBuf, - pub new_text: String, - pub old_text: Arc, - pub raw_output: Option, -} - -#[derive(Debug, Serialize, Deserialize, JsonSchema)] -struct PartialInput { - #[serde(default)] - path: String, - #[serde(default)] - display_description: String, -} - -const DEFAULT_UI_TEXT: &str = "Editing file"; - -impl Tool for EditFileTool { - fn name(&self) -> String { - "edit_file".into() - } - - fn needs_confirmation( - &self, - input: &serde_json::Value, - project: &Entity, - cx: &App, - ) -> bool { - if agent_settings::AgentSettings::get_global(cx).always_allow_tool_actions { - return false; - } - - let Ok(input) = serde_json::from_value::(input.clone()) else { - // If it's not valid JSON, it's going to error and confirming won't do anything. - return false; - }; - - // If any path component matches the local settings folder, then this could affect - // the editor in ways beyond the project source, so prompt. - let local_settings_folder = paths::local_settings_folder_name(); - let path = Path::new(&input.path); - if path - .components() - .any(|c| c.as_os_str() == >::as_ref(local_settings_folder)) - { - return true; - } - - // It's also possible that the global config dir is configured to be inside the project, - // so check for that edge case too. - if let Ok(canonical_path) = std::fs::canonicalize(&input.path) - && canonical_path.starts_with(paths::config_dir()) - { - return true; - } - - // Check if path is inside the global config directory - // First check if it's already inside project - if not, try to canonicalize - let project_path = project.read(cx).find_project_path(&input.path, cx); - - // If the path is inside the project, and it's not one of the above edge cases, - // then no confirmation is necessary. Otherwise, confirmation is necessary. - project_path.is_none() - } - - fn may_perform_edits(&self) -> bool { - true - } - - fn description(&self) -> String { - include_str!("edit_file_tool/description.md").to_string() - } - - fn icon(&self) -> IconName { - IconName::ToolPencil - } - - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { - json_schema_for::(format) - } - - fn ui_text(&self, input: &serde_json::Value) -> String { - match serde_json::from_value::(input.clone()) { - Ok(input) => { - let path = Path::new(&input.path); - let mut description = input.display_description.clone(); - - // Add context about why confirmation may be needed - let local_settings_folder = paths::local_settings_folder_name(); - if path - .components() - .any(|c| c.as_os_str() == >::as_ref(local_settings_folder)) - { - description.push_str(" (local settings)"); - } else if let Ok(canonical_path) = std::fs::canonicalize(&input.path) - && canonical_path.starts_with(paths::config_dir()) - { - description.push_str(" (global settings)"); - } - - description - } - Err(_) => "Editing file".to_string(), - } - } - - fn still_streaming_ui_text(&self, input: &serde_json::Value) -> String { - if let Some(input) = serde_json::from_value::(input.clone()).ok() { - let description = input.display_description.trim(); - if !description.is_empty() { - return description.to_string(); - } - - let path = input.path.trim(); - if !path.is_empty() { - return path.to_string(); - } - } - - DEFAULT_UI_TEXT.to_string() - } - - fn run( - self: Arc, - input: serde_json::Value, - request: Arc, - project: Entity, - action_log: Entity, - model: Arc, - window: Option, - cx: &mut App, - ) -> ToolResult { - let input = match serde_json::from_value::(input) { - Ok(input) => input, - Err(err) => return Task::ready(Err(anyhow!(err))).into(), - }; - - let project_path = match resolve_path(&input, project.clone(), cx) { - Ok(path) => path, - Err(err) => return Task::ready(Err(anyhow!(err))).into(), - }; - - let card = window.and_then(|window| { - window - .update(cx, |_, window, cx| { - cx.new(|cx| { - EditFileToolCard::new(input.path.clone(), project.clone(), window, cx) - }) - }) - .ok() - }); - - let card_clone = card.clone(); - let action_log_clone = action_log.clone(); - let task = cx.spawn(async move |cx: &mut AsyncApp| { - let edit_format = EditFormat::from_model(model.clone())?; - let edit_agent = EditAgent::new( - model, - project.clone(), - action_log_clone, - Templates::new(), - edit_format, - ); - - let buffer = project - .update(cx, |project, cx| { - project.open_buffer(project_path.clone(), cx) - })? - .await?; - - let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; - let old_text = cx - .background_spawn({ - let old_snapshot = old_snapshot.clone(); - async move { Arc::new(old_snapshot.text()) } - }) - .await; - - if let Some(card) = card_clone.as_ref() { - card.update(cx, |card, cx| card.initialize(buffer.clone(), cx))?; - } - - let (output, mut events) = if matches!(input.mode, EditFileMode::Edit) { - edit_agent.edit( - buffer.clone(), - input.display_description.clone(), - &request, - cx, - ) - } else { - edit_agent.overwrite( - buffer.clone(), - input.display_description.clone(), - &request, - cx, - ) - }; - - let mut hallucinated_old_text = false; - let mut ambiguous_ranges = Vec::new(); - while let Some(event) = events.next().await { - match event { - EditAgentOutputEvent::Edited { .. } => { - if let Some(card) = card_clone.as_ref() { - card.update(cx, |card, cx| card.update_diff(cx))?; - } - } - EditAgentOutputEvent::UnresolvedEditRange => hallucinated_old_text = true, - EditAgentOutputEvent::AmbiguousEditRange(ranges) => ambiguous_ranges = ranges, - EditAgentOutputEvent::ResolvingEditRange(range) => { - if let Some(card) = card_clone.as_ref() { - card.update(cx, |card, cx| card.reveal_range(range, cx))?; - } - } - } - } - let agent_output = output.await?; - - // If format_on_save is enabled, format the buffer - let format_on_save_enabled = buffer - .read_with(cx, |buffer, cx| { - let settings = language_settings::language_settings( - buffer.language().map(|l| l.name()), - buffer.file(), - cx, - ); - !matches!(settings.format_on_save, FormatOnSave::Off) - }) - .unwrap_or(false); - - if format_on_save_enabled { - action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - })?; - let format_task = project.update(cx, |project, cx| { - project.format( - HashSet::from_iter([buffer.clone()]), - LspFormatTarget::Buffers, - false, // Don't push to history since the tool did it. - FormatTrigger::Save, - cx, - ) - })?; - format_task.await.log_err(); - } - - project - .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))? - .await?; - - // Notify the action log that we've edited the buffer (*after* formatting has completed). - action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - })?; - - let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; - let (new_text, diff) = cx - .background_spawn({ - let new_snapshot = new_snapshot.clone(); - let old_text = old_text.clone(); - async move { - let new_text = new_snapshot.text(); - let diff = language::unified_diff(&old_text, &new_text); - - (new_text, diff) - } - }) - .await; - - let output = EditFileToolOutput { - original_path: project_path.path.as_std_path().to_owned(), - new_text, - old_text, - raw_output: Some(agent_output), - }; - - if let Some(card) = card_clone { - card.update(cx, |card, cx| { - card.update_diff(cx); - card.finalize(cx) - }) - .log_err(); - } - - let input_path = input.path.display(); - if diff.is_empty() { - anyhow::ensure!( - !hallucinated_old_text, - formatdoc! {" - Some edits were produced but none of them could be applied. - Read the relevant sections of {input_path} again so that - I can perform the requested edits. - "} - ); - anyhow::ensure!( - ambiguous_ranges.is_empty(), - { - let line_numbers = ambiguous_ranges - .iter() - .map(|range| range.start.to_string()) - .collect::>() - .join(", "); - formatdoc! {" - matches more than one position in the file (lines: {line_numbers}). Read the - relevant sections of {input_path} again and extend so - that I can perform the requested edits. - "} - } - ); - Ok(ToolResultOutput { - content: ToolResultContent::Text("No edits were made.".into()), - output: serde_json::to_value(output).ok(), - }) - } else { - Ok(ToolResultOutput { - content: ToolResultContent::Text(format!( - "Edited {}:\n\n```diff\n{}\n```", - input_path, diff - )), - output: serde_json::to_value(output).ok(), - }) - } - }); - - ToolResult { - output: task, - card: card.map(AnyToolCard::from), - } - } - - fn deserialize_card( - self: Arc, - output: serde_json::Value, - project: Entity, - window: &mut Window, - cx: &mut App, - ) -> Option { - let output = match serde_json::from_value::(output) { - Ok(output) => output, - Err(_) => return None, - }; - - let card = cx.new(|cx| { - EditFileToolCard::new(output.original_path.clone(), project.clone(), window, cx) - }); - - cx.spawn({ - let path: Arc = output.original_path.into(); - let language_registry = project.read(cx).languages().clone(); - let card = card.clone(); - async move |cx| { - let buffer = - build_buffer(output.new_text, path.clone(), &language_registry, cx).await?; - let buffer_diff = - build_buffer_diff(output.old_text.clone(), &buffer, &language_registry, cx) - .await?; - card.update(cx, |card, cx| { - card.multibuffer.update(cx, |multibuffer, cx| { - let snapshot = buffer.read(cx).snapshot(); - let diff = buffer_diff.read(cx); - let diff_hunk_ranges = diff - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx) - .map(|diff_hunk| diff_hunk.buffer_range.to_point(&snapshot)) - .collect::>(); - - multibuffer.set_excerpts_for_path( - PathKey::for_buffer(&buffer, cx), - buffer, - diff_hunk_ranges, - multibuffer_context_lines(cx), - cx, - ); - multibuffer.add_diff(buffer_diff, cx); - let end = multibuffer.len(cx); - card.total_lines = - Some(multibuffer.snapshot(cx).offset_to_point(end).row + 1); - }); - - cx.notify(); - })?; - anyhow::Ok(()) - } - }) - .detach_and_log_err(cx); - - Some(card.into()) - } -} - -/// Validate that the file path is valid, meaning: -/// -/// - For `edit` and `overwrite`, the path must point to an existing file. -/// - For `create`, the file must not already exist, but it's parent dir must exist. -fn resolve_path( - input: &EditFileToolInput, - project: Entity, - cx: &mut App, -) -> Result { - let project = project.read(cx); - - match input.mode { - EditFileMode::Edit | EditFileMode::Overwrite => { - let path = project - .find_project_path(&input.path, cx) - .context("Can't edit file: path not found")?; - - let entry = project - .entry_for_path(&path, cx) - .context("Can't edit file: path not found")?; - - anyhow::ensure!(entry.is_file(), "Can't edit file: path is a directory"); - Ok(path) - } - - EditFileMode::Create => { - if let Some(path) = project.find_project_path(&input.path, cx) { - anyhow::ensure!( - project.entry_for_path(&path, cx).is_none(), - "Can't create file: file already exists" - ); - } - - let parent_path = input - .path - .parent() - .context("Can't create file: incorrect path")?; - - let parent_project_path = project.find_project_path(&parent_path, cx); - - let parent_entry = parent_project_path - .as_ref() - .and_then(|path| project.entry_for_path(path, cx)) - .context("Can't create file: parent directory doesn't exist")?; - - anyhow::ensure!( - parent_entry.is_dir(), - "Can't create file: parent is not a directory" - ); - - let file_name = input - .path - .file_name() - .and_then(|file_name| file_name.to_str()) - .context("Can't create file: invalid filename")?; - - let new_file_path = parent_project_path.map(|parent| ProjectPath { - path: parent.path.join(RelPath::unix(file_name).unwrap()), - ..parent - }); - - new_file_path.context("Can't create file") - } - } -} - -pub struct EditFileToolCard { - path: PathBuf, - editor: Entity, - multibuffer: Entity, - project: Entity, - buffer: Option>, - base_text: Option>, - buffer_diff: Option>, - revealed_ranges: Vec>, - diff_task: Option>>, - preview_expanded: bool, - error_expanded: Option>, - full_height_expanded: bool, - total_lines: Option, -} - -impl EditFileToolCard { - pub fn new(path: PathBuf, project: Entity, window: &mut Window, cx: &mut App) -> Self { - let expand_edit_card = agent_settings::AgentSettings::get_global(cx).expand_edit_card; - let multibuffer = cx.new(|_| MultiBuffer::without_headers(Capability::ReadOnly)); - - let editor = cx.new(|cx| { - let mut editor = Editor::new( - EditorMode::Full { - scale_ui_elements_with_buffer_font_size: false, - show_active_line_background: false, - sized_by_content: true, - }, - multibuffer.clone(), - Some(project.clone()), - window, - cx, - ); - editor.set_show_gutter(false, cx); - editor.disable_inline_diagnostics(); - editor.disable_expand_excerpt_buttons(cx); - // Keep horizontal scrollbar so user can scroll horizontally if needed - editor.set_show_vertical_scrollbar(false, cx); - editor.set_minimap_visibility(MinimapVisibility::Disabled, window, cx); - editor.set_soft_wrap_mode(SoftWrap::None, cx); - editor.scroll_manager.set_forbid_vertical_scroll(true); - editor.set_show_indent_guides(false, cx); - editor.set_read_only(true); - editor.set_show_breakpoints(false, cx); - editor.set_show_code_actions(false, cx); - editor.set_show_git_diff_gutter(false, cx); - editor.set_expand_all_diff_hunks(cx); - editor - }); - Self { - path, - project, - editor, - multibuffer, - buffer: None, - base_text: None, - buffer_diff: None, - revealed_ranges: Vec::new(), - diff_task: None, - preview_expanded: true, - error_expanded: None, - full_height_expanded: expand_edit_card, - total_lines: None, - } - } - - pub fn initialize(&mut self, buffer: Entity, cx: &mut App) { - let buffer_snapshot = buffer.read(cx).snapshot(); - let base_text = buffer_snapshot.text(); - let language_registry = buffer.read(cx).language_registry(); - let text_snapshot = buffer.read(cx).text_snapshot(); - - // Create a buffer diff with the current text as the base - let buffer_diff = cx.new(|cx| { - let mut diff = BufferDiff::new(&text_snapshot, cx); - let _ = diff.set_base_text( - buffer_snapshot.clone(), - language_registry, - text_snapshot, - cx, - ); - diff - }); - - self.buffer = Some(buffer); - self.base_text = Some(base_text.into()); - self.buffer_diff = Some(buffer_diff.clone()); - - // Add the diff to the multibuffer - self.multibuffer - .update(cx, |multibuffer, cx| multibuffer.add_diff(buffer_diff, cx)); - } - - pub fn is_loading(&self) -> bool { - self.total_lines.is_none() - } - - pub fn update_diff(&mut self, cx: &mut Context) { - let Some(buffer) = self.buffer.as_ref() else { - return; - }; - let Some(buffer_diff) = self.buffer_diff.as_ref() else { - return; - }; - - let buffer = buffer.clone(); - let buffer_diff = buffer_diff.clone(); - let base_text = self.base_text.clone(); - self.diff_task = Some(cx.spawn(async move |this, cx| { - let text_snapshot = buffer.read_with(cx, |buffer, _| buffer.text_snapshot())?; - let diff_snapshot = BufferDiff::update_diff( - buffer_diff.clone(), - text_snapshot.clone(), - base_text, - false, - false, - None, - None, - cx, - ) - .await?; - buffer_diff.update(cx, |diff, cx| { - diff.set_snapshot(diff_snapshot, &text_snapshot, cx) - })?; - this.update(cx, |this, cx| this.update_visible_ranges(cx)) - })); - } - - pub fn reveal_range(&mut self, range: Range, cx: &mut Context) { - self.revealed_ranges.push(range); - self.update_visible_ranges(cx); - } - - fn update_visible_ranges(&mut self, cx: &mut Context) { - let Some(buffer) = self.buffer.as_ref() else { - return; - }; - - let ranges = self.excerpt_ranges(cx); - self.total_lines = self.multibuffer.update(cx, |multibuffer, cx| { - multibuffer.set_excerpts_for_path( - PathKey::for_buffer(buffer, cx), - buffer.clone(), - ranges, - multibuffer_context_lines(cx), - cx, - ); - let end = multibuffer.len(cx); - Some(multibuffer.snapshot(cx).offset_to_point(end).row + 1) - }); - cx.notify(); - } - - fn excerpt_ranges(&self, cx: &App) -> Vec> { - let Some(buffer) = self.buffer.as_ref() else { - return Vec::new(); - }; - let Some(diff) = self.buffer_diff.as_ref() else { - return Vec::new(); - }; - - let buffer = buffer.read(cx); - let diff = diff.read(cx); - let mut ranges = diff - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer, cx) - .map(|diff_hunk| diff_hunk.buffer_range.to_point(buffer)) - .collect::>(); - ranges.extend( - self.revealed_ranges - .iter() - .map(|range| range.to_point(buffer)), - ); - ranges.sort_unstable_by_key(|range| (range.start, Reverse(range.end))); - - // Merge adjacent ranges - let mut ranges = ranges.into_iter().peekable(); - let mut merged_ranges = Vec::new(); - while let Some(mut range) = ranges.next() { - while let Some(next_range) = ranges.peek() { - if range.end >= next_range.start { - range.end = range.end.max(next_range.end); - ranges.next(); - } else { - break; - } - } - - merged_ranges.push(range); - } - merged_ranges - } - - pub fn finalize(&mut self, cx: &mut Context) -> Result<()> { - let ranges = self.excerpt_ranges(cx); - let buffer = self.buffer.take().context("card was already finalized")?; - let base_text = self - .base_text - .take() - .context("card was already finalized")?; - let language_registry = self.project.read(cx).languages().clone(); - - // Replace the buffer in the multibuffer with the snapshot - let buffer = cx.new(|cx| { - let language = buffer.read(cx).language().cloned(); - let buffer = TextBuffer::new_normalized( - 0, - cx.entity_id().as_non_zero_u64().into(), - buffer.read(cx).line_ending(), - buffer.read(cx).as_rope().clone(), - ); - let mut buffer = Buffer::build(buffer, None, Capability::ReadWrite); - buffer.set_language(language, cx); - buffer - }); - - let buffer_diff = cx.spawn({ - let buffer = buffer.clone(); - async move |_this, cx| { - build_buffer_diff(base_text, &buffer, &language_registry, cx).await - } - }); - - cx.spawn(async move |this, cx| { - let buffer_diff = buffer_diff.await?; - this.update(cx, |this, cx| { - this.multibuffer.update(cx, |multibuffer, cx| { - let path_key = PathKey::for_buffer(&buffer, cx); - multibuffer.clear(cx); - multibuffer.set_excerpts_for_path( - path_key, - buffer, - ranges, - multibuffer_context_lines(cx), - cx, - ); - multibuffer.add_diff(buffer_diff.clone(), cx); - }); - - cx.notify(); - }) - }) - .detach_and_log_err(cx); - Ok(()) - } -} - -impl ToolCard for EditFileToolCard { - fn render( - &mut self, - status: &ToolUseStatus, - window: &mut Window, - workspace: WeakEntity, - cx: &mut Context, - ) -> impl IntoElement { - let error_message = match status { - ToolUseStatus::Error(err) => Some(err), - _ => None, - }; - - let running_or_pending = match status { - ToolUseStatus::Running | ToolUseStatus::Pending => Some(()), - _ => None, - }; - - let should_show_loading = running_or_pending.is_some() && !self.full_height_expanded; - - let path_label_button = h_flex() - .id(("edit-tool-path-label-button", self.editor.entity_id())) - .w_full() - .max_w_full() - .px_1() - .gap_0p5() - .cursor_pointer() - .rounded_sm() - .opacity(0.8) - .hover(|label| { - label - .opacity(1.) - .bg(cx.theme().colors().element_hover.opacity(0.5)) - }) - .tooltip(Tooltip::text("Jump to File")) - .child( - h_flex() - .child( - Icon::new(IconName::ToolPencil) - .size(IconSize::Small) - .color(Color::Muted), - ) - .child( - div() - .text_size(rems(0.8125)) - .child(self.path.display().to_string()) - .ml_1p5() - .mr_0p5(), - ) - .child( - Icon::new(IconName::ArrowUpRight) - .size(IconSize::Small) - .color(Color::Ignored), - ), - ) - .on_click({ - let path = self.path.clone(); - move |_, window, cx| { - workspace - .update(cx, { - |workspace, cx| { - let Some(project_path) = - workspace.project().read(cx).find_project_path(&path, cx) - else { - return; - }; - let open_task = - workspace.open_path(project_path, None, true, window, cx); - window - .spawn(cx, async move |cx| { - let item = open_task.await?; - if let Some(active_editor) = item.downcast::() { - active_editor - .update_in(cx, |editor, window, cx| { - let snapshot = - editor.buffer().read(cx).snapshot(cx); - let first_hunk = editor - .diff_hunks_in_ranges( - &[editor::Anchor::min() - ..editor::Anchor::max()], - &snapshot, - ) - .next(); - if let Some(first_hunk) = first_hunk { - let first_hunk_start = - first_hunk.multi_buffer_range().start; - editor.change_selections( - Default::default(), - window, - cx, - |selections| { - selections.select_anchor_ranges([ - first_hunk_start - ..first_hunk_start, - ]); - }, - ) - } - }) - .log_err(); - } - anyhow::Ok(()) - }) - .detach_and_log_err(cx); - } - }) - .ok(); - } - }) - .into_any_element(); - - let codeblock_header_bg = cx - .theme() - .colors() - .element_background - .blend(cx.theme().colors().editor_foreground.opacity(0.025)); - - let codeblock_header = h_flex() - .flex_none() - .p_1() - .gap_1() - .justify_between() - .rounded_t_md() - .when(error_message.is_none(), |header| { - header.bg(codeblock_header_bg) - }) - .child(path_label_button) - .when(should_show_loading, |header| { - header.pr_1p5().child( - Icon::new(IconName::ArrowCircle) - .size(IconSize::XSmall) - .color(Color::Info) - .with_rotate_animation(2), - ) - }) - .when_some(error_message, |header, error_message| { - header.child( - h_flex() - .gap_1() - .child( - Icon::new(IconName::Close) - .size(IconSize::Small) - .color(Color::Error), - ) - .child( - Disclosure::new( - ("edit-file-error-disclosure", self.editor.entity_id()), - self.error_expanded.is_some(), - ) - .opened_icon(IconName::ChevronUp) - .closed_icon(IconName::ChevronDown) - .on_click(cx.listener({ - let error_message = error_message.clone(); - - move |this, _event, _window, cx| { - if this.error_expanded.is_some() { - this.error_expanded.take(); - } else { - this.error_expanded = Some(cx.new(|cx| { - Markdown::new(error_message.clone(), None, None, cx) - })) - } - cx.notify(); - } - })), - ), - ) - }) - .when(error_message.is_none() && !self.is_loading(), |header| { - header.child( - Disclosure::new( - ("edit-file-disclosure", self.editor.entity_id()), - self.preview_expanded, - ) - .opened_icon(IconName::ChevronUp) - .closed_icon(IconName::ChevronDown) - .on_click(cx.listener( - move |this, _event, _window, _cx| { - this.preview_expanded = !this.preview_expanded; - }, - )), - ) - }); - - let (editor, editor_line_height) = self.editor.update(cx, |editor, cx| { - let line_height = editor - .style() - .map(|style| style.text.line_height_in_pixels(window.rem_size())) - .unwrap_or_default(); - - editor.set_text_style_refinement(TextStyleRefinement { - font_size: Some( - TextSize::Small - .rems(cx) - .to_pixels(ThemeSettings::get_global(cx).agent_ui_font_size(cx)) - .into(), - ), - ..TextStyleRefinement::default() - }); - let element = editor.render(window, cx); - (element.into_any_element(), line_height) - }); - - let border_color = cx.theme().colors().border.opacity(0.6); - - let waiting_for_diff = { - let styles = [ - ("w_4_5", (0.1, 0.85), 2000), - ("w_1_4", (0.2, 0.75), 2200), - ("w_2_4", (0.15, 0.64), 1900), - ("w_3_5", (0.25, 0.72), 2300), - ("w_2_5", (0.3, 0.56), 1800), - ]; - - let mut container = v_flex() - .p_3() - .gap_1() - .border_t_1() - .rounded_b_md() - .border_color(border_color) - .bg(cx.theme().colors().editor_background); - - for (width_method, pulse_range, duration_ms) in styles.iter() { - let (min_opacity, max_opacity) = *pulse_range; - let placeholder = match *width_method { - "w_4_5" => div().w_3_4(), - "w_1_4" => div().w_1_4(), - "w_2_4" => div().w_2_4(), - "w_3_5" => div().w_3_5(), - "w_2_5" => div().w_2_5(), - _ => div().w_1_2(), - } - .id("loading_div") - .h_1() - .rounded_full() - .bg(cx.theme().colors().element_active) - .with_animation( - "loading_pulsate", - Animation::new(Duration::from_millis(*duration_ms)) - .repeat() - .with_easing(pulsating_between(min_opacity, max_opacity)), - |label, delta| label.opacity(delta), - ); - - container = container.child(placeholder); - } - - container - }; - - v_flex() - .mb_2() - .border_1() - .when(error_message.is_some(), |card| card.border_dashed()) - .border_color(border_color) - .rounded_md() - .overflow_hidden() - .child(codeblock_header) - .when_some(self.error_expanded.as_ref(), |card, error_markdown| { - card.child( - v_flex() - .p_2() - .gap_1() - .border_t_1() - .border_dashed() - .border_color(border_color) - .bg(cx.theme().colors().editor_background) - .rounded_b_md() - .child( - Label::new("Error") - .size(LabelSize::XSmall) - .color(Color::Error), - ) - .child( - div() - .rounded_md() - .text_ui_sm(cx) - .bg(cx.theme().colors().editor_background) - .child(MarkdownElement::new( - error_markdown.clone(), - markdown_style(window, cx), - )), - ), - ) - }) - .when(self.is_loading() && error_message.is_none(), |card| { - card.child(waiting_for_diff) - }) - .when(self.preview_expanded && !self.is_loading(), |card| { - let editor_view = v_flex() - .relative() - .h_full() - .when(!self.full_height_expanded, |editor_container| { - editor_container.max_h(COLLAPSED_LINES as f32 * editor_line_height) - }) - .overflow_hidden() - .border_t_1() - .border_color(border_color) - .bg(cx.theme().colors().editor_background) - .child(editor); - - card.child( - ToolOutputPreview::new(editor_view.into_any_element(), self.editor.entity_id()) - .with_total_lines(self.total_lines.unwrap_or(0) as usize) - .toggle_state(self.full_height_expanded) - .with_collapsed_fade() - .on_toggle({ - let this = cx.entity().downgrade(); - move |is_expanded, _window, cx| { - if let Some(this) = this.upgrade() { - this.update(cx, |this, _cx| { - this.full_height_expanded = is_expanded; - }); - } - } - }), - ) - }) - } -} - -fn markdown_style(window: &Window, cx: &App) -> MarkdownStyle { - let theme_settings = ThemeSettings::get_global(cx); - let ui_font_size = TextSize::Default.rems(cx); - let mut text_style = window.text_style(); - - text_style.refine(&TextStyleRefinement { - font_family: Some(theme_settings.ui_font.family.clone()), - font_fallbacks: theme_settings.ui_font.fallbacks.clone(), - font_features: Some(theme_settings.ui_font.features.clone()), - font_size: Some(ui_font_size.into()), - color: Some(cx.theme().colors().text), - ..Default::default() - }); - - MarkdownStyle { - base_text_style: text_style.clone(), - selection_background_color: cx.theme().colors().element_selection_background, - ..Default::default() - } -} - -async fn build_buffer( - mut text: String, - path: Arc, - language_registry: &Arc, - cx: &mut AsyncApp, -) -> Result> { - let line_ending = LineEnding::detect(&text); - LineEnding::normalize(&mut text); - let text = Rope::from(text); - let language = cx - .update(|_cx| language_registry.load_language_for_file_path(&path))? - .await - .ok(); - let buffer = cx.new(|cx| { - let buffer = TextBuffer::new_normalized( - 0, - cx.entity_id().as_non_zero_u64().into(), - line_ending, - text, - ); - let mut buffer = Buffer::build(buffer, None, Capability::ReadWrite); - buffer.set_language(language, cx); - buffer - })?; - Ok(buffer) -} - -async fn build_buffer_diff( - old_text: Arc, - buffer: &Entity, - language_registry: &Arc, - cx: &mut AsyncApp, -) -> Result> { - let buffer = cx.update(|cx| buffer.read(cx).snapshot())?; - - let old_text_rope = cx - .background_spawn({ - let old_text = old_text.clone(); - async move { Rope::from(old_text.as_str()) } - }) - .await; - let base_buffer = cx - .update(|cx| { - Buffer::build_snapshot( - old_text_rope, - buffer.language().cloned(), - Some(language_registry.clone()), - cx, - ) - })? - .await; - - let diff_snapshot = cx - .update(|cx| { - BufferDiffSnapshot::new_with_base_buffer( - buffer.text.clone(), - Some(old_text), - base_buffer, - cx, - ) - })? - .await; - - let secondary_diff = cx.new(|cx| { - let mut diff = BufferDiff::new(&buffer, cx); - diff.set_snapshot(diff_snapshot.clone(), &buffer, cx); - diff - })?; - - cx.new(|cx| { - let mut diff = BufferDiff::new(&buffer.text, cx); - diff.set_snapshot(diff_snapshot, &buffer, cx); - diff.set_secondary_diff(secondary_diff); - diff - }) -} - -#[cfg(test)] -mod tests { - use super::*; - use ::fs::Fs; - use client::TelemetrySettings; - use gpui::{TestAppContext, UpdateGlobal}; - use language_model::fake_provider::FakeLanguageModel; - use serde_json::json; - use settings::SettingsStore; - use std::fs; - use util::{path, rel_path::rel_path}; - - #[gpui::test] - async fn test_edit_nonexistent_file(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/root", json!({})).await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let model = Arc::new(FakeLanguageModel::default()); - let result = cx - .update(|cx| { - let input = serde_json::to_value(EditFileToolInput { - display_description: "Some edit".into(), - path: "root/nonexistent_file.txt".into(), - mode: EditFileMode::Edit, - }) - .unwrap(); - Arc::new(EditFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log, - model, - None, - cx, - ) - .output - }) - .await; - assert_eq!( - result.unwrap_err().to_string(), - "Can't edit file: path not found" - ); - } - - #[gpui::test] - async fn test_resolve_path_for_creating_file(cx: &mut TestAppContext) { - let mode = &EditFileMode::Create; - - let result = test_resolve_path(mode, "root/new.txt", cx); - assert_resolved_path_eq(result.await, "new.txt"); - - let result = test_resolve_path(mode, "new.txt", cx); - assert_resolved_path_eq(result.await, "new.txt"); - - let result = test_resolve_path(mode, "dir/new.txt", cx); - assert_resolved_path_eq(result.await, "dir/new.txt"); - - let result = test_resolve_path(mode, "root/dir/subdir/existing.txt", cx); - assert_eq!( - result.await.unwrap_err().to_string(), - "Can't create file: file already exists" - ); - - let result = test_resolve_path(mode, "root/dir/nonexistent_dir/new.txt", cx); - assert_eq!( - result.await.unwrap_err().to_string(), - "Can't create file: parent directory doesn't exist" - ); - } - - #[gpui::test] - async fn test_resolve_path_for_editing_file(cx: &mut TestAppContext) { - let mode = &EditFileMode::Edit; - - let path_with_root = "root/dir/subdir/existing.txt"; - let path_without_root = "dir/subdir/existing.txt"; - let result = test_resolve_path(mode, path_with_root, cx); - assert_resolved_path_eq(result.await, path_without_root); - - let result = test_resolve_path(mode, path_without_root, cx); - assert_resolved_path_eq(result.await, path_without_root); - - let result = test_resolve_path(mode, "root/nonexistent.txt", cx); - assert_eq!( - result.await.unwrap_err().to_string(), - "Can't edit file: path not found" - ); - - let result = test_resolve_path(mode, "root/dir", cx); - assert_eq!( - result.await.unwrap_err().to_string(), - "Can't edit file: path is a directory" - ); - } - - async fn test_resolve_path( - mode: &EditFileMode, - path: &str, - cx: &mut TestAppContext, - ) -> anyhow::Result { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "dir": { - "subdir": { - "existing.txt": "hello" - } - } - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - - let input = EditFileToolInput { - display_description: "Some edit".into(), - path: path.into(), - mode: mode.clone(), - }; - - cx.update(|cx| resolve_path(&input, project, cx)) - } - - #[track_caller] - fn assert_resolved_path_eq(path: anyhow::Result, expected: &str) { - let actual = path.expect("Should return valid path").path; - assert_eq!(actual.as_ref(), rel_path(expected)); - } - - #[test] - fn still_streaming_ui_text_with_path() { - let input = json!({ - "path": "src/main.rs", - "display_description": "", - "old_string": "old code", - "new_string": "new code" - }); - - assert_eq!(EditFileTool.still_streaming_ui_text(&input), "src/main.rs"); - } - - #[test] - fn still_streaming_ui_text_with_description() { - let input = json!({ - "path": "", - "display_description": "Fix error handling", - "old_string": "old code", - "new_string": "new code" - }); - - assert_eq!( - EditFileTool.still_streaming_ui_text(&input), - "Fix error handling", - ); - } - - #[test] - fn still_streaming_ui_text_with_path_and_description() { - let input = json!({ - "path": "src/main.rs", - "display_description": "Fix error handling", - "old_string": "old code", - "new_string": "new code" - }); - - assert_eq!( - EditFileTool.still_streaming_ui_text(&input), - "Fix error handling", - ); - } - - #[test] - fn still_streaming_ui_text_no_path_or_description() { - let input = json!({ - "path": "", - "display_description": "", - "old_string": "old code", - "new_string": "new code" - }); - - assert_eq!( - EditFileTool.still_streaming_ui_text(&input), - DEFAULT_UI_TEXT, - ); - } - - #[test] - fn still_streaming_ui_text_with_null() { - let input = serde_json::Value::Null; - - assert_eq!( - EditFileTool.still_streaming_ui_text(&input), - DEFAULT_UI_TEXT, - ); - } - - fn init_test(cx: &mut TestAppContext) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - language::init(cx); - TelemetrySettings::register(cx); - agent_settings::AgentSettings::register(cx); - Project::init_settings(cx); - }); - } - - fn init_test_with_config(cx: &mut TestAppContext, data_dir: &Path) { - cx.update(|cx| { - paths::set_custom_data_dir(data_dir.to_str().unwrap()); - // Set custom data directory (config will be under data_dir/config) - - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - language::init(cx); - TelemetrySettings::register(cx); - agent_settings::AgentSettings::register(cx); - Project::init_settings(cx); - }); - } - - #[gpui::test] - async fn test_format_on_save(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/root", json!({"src": {}})).await; - - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - - // Set up a Rust language with LSP formatting support - let rust_language = Arc::new(language::Language::new( - language::LanguageConfig { - name: "Rust".into(), - matcher: language::LanguageMatcher { - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - ..Default::default() - }, - None, - )); - - // Register the language and fake LSP - let language_registry = project.read_with(cx, |project, _| project.languages().clone()); - language_registry.add(rust_language); - - let mut fake_language_servers = language_registry.register_fake_lsp( - "Rust", - language::FakeLspAdapter { - capabilities: lsp::ServerCapabilities { - document_formatting_provider: Some(lsp::OneOf::Left(true)), - ..Default::default() - }, - ..Default::default() - }, - ); - - // Create the file - fs.save( - path!("/root/src/main.rs").as_ref(), - &"initial content".into(), - language::LineEnding::Unix, - ) - .await - .unwrap(); - - // Open the buffer to trigger LSP initialization - let buffer = project - .update(cx, |project, cx| { - project.open_local_buffer(path!("/root/src/main.rs"), cx) - }) - .await - .unwrap(); - - // Register the buffer with language servers - let _handle = project.update(cx, |project, cx| { - project.register_buffer_with_language_servers(&buffer, cx) - }); - - const UNFORMATTED_CONTENT: &str = "fn main() {println!(\"Hello!\");}\n"; - const FORMATTED_CONTENT: &str = - "This file was formatted by the fake formatter in the test.\n"; - - // Get the fake language server and set up formatting handler - let fake_language_server = fake_language_servers.next().await.unwrap(); - fake_language_server.set_request_handler::({ - |_, _| async move { - Ok(Some(vec![lsp::TextEdit { - range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(1, 0)), - new_text: FORMATTED_CONTENT.to_string(), - }])) - } - }); - - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let model = Arc::new(FakeLanguageModel::default()); - - // First, test with format_on_save enabled - cx.update(|cx| { - SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings(cx, |settings| { - settings.project.all_languages.defaults.format_on_save = Some(FormatOnSave::On); - settings.project.all_languages.defaults.formatter = - Some(language::language_settings::SelectedFormatter::Auto); - }); - }); - }); - - // Have the model stream unformatted content - let edit_result = { - let edit_task = cx.update(|cx| { - let input = serde_json::to_value(EditFileToolInput { - display_description: "Create main function".into(), - path: "root/src/main.rs".into(), - mode: EditFileMode::Overwrite, - }) - .unwrap(); - Arc::new(EditFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }); - - // Stream the unformatted content - cx.executor().run_until_parked(); - model.send_last_completion_stream_text_chunk(UNFORMATTED_CONTENT.to_string()); - model.end_last_completion_stream(); - - edit_task.await - }; - assert!(edit_result.is_ok()); - - // Wait for any async operations (e.g. formatting) to complete - cx.executor().run_until_parked(); - - // Read the file to verify it was formatted automatically - let new_content = fs.load(path!("/root/src/main.rs").as_ref()).await.unwrap(); - assert_eq!( - // Ignore carriage returns on Windows - new_content.replace("\r\n", "\n"), - FORMATTED_CONTENT, - "Code should be formatted when format_on_save is enabled" - ); - - let stale_buffer_count = action_log.read_with(cx, |log, cx| log.stale_buffers(cx).count()); - - assert_eq!( - stale_buffer_count, 0, - "BUG: Buffer is incorrectly marked as stale after format-on-save. Found {} stale buffers. \ - This causes the agent to think the file was modified externally when it was just formatted.", - stale_buffer_count - ); - - // Next, test with format_on_save disabled - cx.update(|cx| { - SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings(cx, |settings| { - settings.project.all_languages.defaults.format_on_save = - Some(FormatOnSave::Off); - }); - }); - }); - - // Stream unformatted edits again - let edit_result = { - let edit_task = cx.update(|cx| { - let input = serde_json::to_value(EditFileToolInput { - display_description: "Update main function".into(), - path: "root/src/main.rs".into(), - mode: EditFileMode::Overwrite, - }) - .unwrap(); - Arc::new(EditFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }); - - // Stream the unformatted content - cx.executor().run_until_parked(); - model.send_last_completion_stream_text_chunk(UNFORMATTED_CONTENT.to_string()); - model.end_last_completion_stream(); - - edit_task.await - }; - assert!(edit_result.is_ok()); - - // Wait for any async operations (e.g. formatting) to complete - cx.executor().run_until_parked(); - - // Verify the file was not formatted - let new_content = fs.load(path!("/root/src/main.rs").as_ref()).await.unwrap(); - assert_eq!( - // Ignore carriage returns on Windows - new_content.replace("\r\n", "\n"), - UNFORMATTED_CONTENT, - "Code should not be formatted when format_on_save is disabled" - ); - } - - #[gpui::test] - async fn test_remove_trailing_whitespace(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/root", json!({"src": {}})).await; - - // Create a simple file with trailing whitespace - fs.save( - path!("/root/src/main.rs").as_ref(), - &"initial content".into(), - language::LineEnding::Unix, - ) - .await - .unwrap(); - - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let model = Arc::new(FakeLanguageModel::default()); - - // First, test with remove_trailing_whitespace_on_save enabled - cx.update(|cx| { - SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings(cx, |settings| { - settings - .project - .all_languages - .defaults - .remove_trailing_whitespace_on_save = Some(true); - }); - }); - }); - - const CONTENT_WITH_TRAILING_WHITESPACE: &str = - "fn main() { \n println!(\"Hello!\"); \n}\n"; - - // Have the model stream content that contains trailing whitespace - let edit_result = { - let edit_task = cx.update(|cx| { - let input = serde_json::to_value(EditFileToolInput { - display_description: "Create main function".into(), - path: "root/src/main.rs".into(), - mode: EditFileMode::Overwrite, - }) - .unwrap(); - Arc::new(EditFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }); - - // Stream the content with trailing whitespace - cx.executor().run_until_parked(); - model.send_last_completion_stream_text_chunk( - CONTENT_WITH_TRAILING_WHITESPACE.to_string(), - ); - model.end_last_completion_stream(); - - edit_task.await - }; - assert!(edit_result.is_ok()); - - // Wait for any async operations (e.g. formatting) to complete - cx.executor().run_until_parked(); - - // Read the file to verify trailing whitespace was removed automatically - assert_eq!( - // Ignore carriage returns on Windows - fs.load(path!("/root/src/main.rs").as_ref()) - .await - .unwrap() - .replace("\r\n", "\n"), - "fn main() {\n println!(\"Hello!\");\n}\n", - "Trailing whitespace should be removed when remove_trailing_whitespace_on_save is enabled" - ); - - // Next, test with remove_trailing_whitespace_on_save disabled - cx.update(|cx| { - SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings(cx, |settings| { - settings - .project - .all_languages - .defaults - .remove_trailing_whitespace_on_save = Some(false); - }); - }); - }); - - // Stream edits again with trailing whitespace - let edit_result = { - let edit_task = cx.update(|cx| { - let input = serde_json::to_value(EditFileToolInput { - display_description: "Update main function".into(), - path: "root/src/main.rs".into(), - mode: EditFileMode::Overwrite, - }) - .unwrap(); - Arc::new(EditFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }); - - // Stream the content with trailing whitespace - cx.executor().run_until_parked(); - model.send_last_completion_stream_text_chunk( - CONTENT_WITH_TRAILING_WHITESPACE.to_string(), - ); - model.end_last_completion_stream(); - - edit_task.await - }; - assert!(edit_result.is_ok()); - - // Wait for any async operations (e.g. formatting) to complete - cx.executor().run_until_parked(); - - // Verify the file still has trailing whitespace - // Read the file again - it should still have trailing whitespace - let final_content = fs.load(path!("/root/src/main.rs").as_ref()).await.unwrap(); - assert_eq!( - // Ignore carriage returns on Windows - final_content.replace("\r\n", "\n"), - CONTENT_WITH_TRAILING_WHITESPACE, - "Trailing whitespace should remain when remove_trailing_whitespace_on_save is disabled" - ); - } - - #[gpui::test] - async fn test_needs_confirmation(cx: &mut TestAppContext) { - init_test(cx); - let tool = Arc::new(EditFileTool); - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/root", json!({})).await; - - // Test 1: Path with .zed component should require confirmation - let input_with_zed = json!({ - "display_description": "Edit settings", - "path": ".zed/settings.json", - "mode": "edit" - }); - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - cx.update(|cx| { - assert!( - tool.needs_confirmation(&input_with_zed, &project, cx), - "Path with .zed component should require confirmation" - ); - }); - - // Test 2: Absolute path should require confirmation - let input_absolute = json!({ - "display_description": "Edit file", - "path": "/etc/hosts", - "mode": "edit" - }); - cx.update(|cx| { - assert!( - tool.needs_confirmation(&input_absolute, &project, cx), - "Absolute path should require confirmation" - ); - }); - - // Test 3: Relative path without .zed should not require confirmation - let input_relative = json!({ - "display_description": "Edit file", - "path": "root/src/main.rs", - "mode": "edit" - }); - cx.update(|cx| { - assert!( - !tool.needs_confirmation(&input_relative, &project, cx), - "Relative path without .zed should not require confirmation" - ); - }); - - // Test 4: Path with .zed in the middle should require confirmation - let input_zed_middle = json!({ - "display_description": "Edit settings", - "path": "root/.zed/tasks.json", - "mode": "edit" - }); - cx.update(|cx| { - assert!( - tool.needs_confirmation(&input_zed_middle, &project, cx), - "Path with .zed in any component should require confirmation" - ); - }); - - // Test 5: When always_allow_tool_actions is enabled, no confirmation needed - cx.update(|cx| { - let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); - settings.always_allow_tool_actions = true; - agent_settings::AgentSettings::override_global(settings, cx); - - assert!( - !tool.needs_confirmation(&input_with_zed, &project, cx), - "When always_allow_tool_actions is true, no confirmation should be needed" - ); - assert!( - !tool.needs_confirmation(&input_absolute, &project, cx), - "When always_allow_tool_actions is true, no confirmation should be needed for absolute paths" - ); - }); - } - - #[gpui::test] - async fn test_ui_text_shows_correct_context(cx: &mut TestAppContext) { - // Set up a custom config directory for testing - let temp_dir = tempfile::tempdir().unwrap(); - init_test_with_config(cx, temp_dir.path()); - - let tool = Arc::new(EditFileTool); - - // Test ui_text shows context for various paths - let test_cases = vec![ - ( - json!({ - "display_description": "Update config", - "path": ".zed/settings.json", - "mode": "edit" - }), - "Update config (local settings)", - ".zed path should show local settings context", - ), - ( - json!({ - "display_description": "Fix bug", - "path": "src/.zed/local.json", - "mode": "edit" - }), - "Fix bug (local settings)", - "Nested .zed path should show local settings context", - ), - ( - json!({ - "display_description": "Update readme", - "path": "README.md", - "mode": "edit" - }), - "Update readme", - "Normal path should not show additional context", - ), - ( - json!({ - "display_description": "Edit config", - "path": "config.zed", - "mode": "edit" - }), - "Edit config", - ".zed as extension should not show context", - ), - ]; - - for (input, expected_text, description) in test_cases { - cx.update(|_cx| { - let ui_text = tool.ui_text(&input); - assert_eq!(ui_text, expected_text, "Failed for case: {}", description); - }); - } - } - - #[gpui::test] - async fn test_needs_confirmation_outside_project(cx: &mut TestAppContext) { - init_test(cx); - let tool = Arc::new(EditFileTool); - let fs = project::FakeFs::new(cx.executor()); - - // Create a project in /project directory - fs.insert_tree("/project", json!({})).await; - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - - // Test file outside project requires confirmation - let input_outside = json!({ - "display_description": "Edit file", - "path": "/outside/file.txt", - "mode": "edit" - }); - cx.update(|cx| { - assert!( - tool.needs_confirmation(&input_outside, &project, cx), - "File outside project should require confirmation" - ); - }); - - // Test file inside project doesn't require confirmation - let input_inside = json!({ - "display_description": "Edit file", - "path": "project/file.txt", - "mode": "edit" - }); - cx.update(|cx| { - assert!( - !tool.needs_confirmation(&input_inside, &project, cx), - "File inside project should not require confirmation" - ); - }); - } - - #[gpui::test] - async fn test_needs_confirmation_config_paths(cx: &mut TestAppContext) { - // Set up a custom data directory for testing - let temp_dir = tempfile::tempdir().unwrap(); - init_test_with_config(cx, temp_dir.path()); - - let tool = Arc::new(EditFileTool); - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/home/user/myproject", json!({})).await; - let project = Project::test(fs.clone(), [path!("/home/user/myproject").as_ref()], cx).await; - - // Get the actual local settings folder name - let local_settings_folder = paths::local_settings_folder_name(); - - // Test various config path patterns - let test_cases = vec![ - ( - format!("{local_settings_folder}/settings.json"), - true, - "Top-level local settings file".to_string(), - ), - ( - format!("myproject/{local_settings_folder}/settings.json"), - true, - "Local settings in project path".to_string(), - ), - ( - format!("src/{local_settings_folder}/config.toml"), - true, - "Local settings in subdirectory".to_string(), - ), - ( - ".zed.backup/file.txt".to_string(), - true, - ".zed.backup is outside project".to_string(), - ), - ( - "my.zed/file.txt".to_string(), - true, - "my.zed is outside project".to_string(), - ), - ( - "myproject/src/file.zed".to_string(), - false, - ".zed as file extension".to_string(), - ), - ( - "myproject/normal/path/file.rs".to_string(), - false, - "Normal file without config paths".to_string(), - ), - ]; - - for (path, should_confirm, description) in test_cases { - let input = json!({ - "display_description": "Edit file", - "path": path, - "mode": "edit" - }); - cx.update(|cx| { - assert_eq!( - tool.needs_confirmation(&input, &project, cx), - should_confirm, - "Failed for case: {} - path: {}", - description, - path - ); - }); - } - } - - #[gpui::test] - async fn test_needs_confirmation_global_config(cx: &mut TestAppContext) { - // Set up a custom data directory for testing - let temp_dir = tempfile::tempdir().unwrap(); - init_test_with_config(cx, temp_dir.path()); - - let tool = Arc::new(EditFileTool); - let fs = project::FakeFs::new(cx.executor()); - - // Create test files in the global config directory - let global_config_dir = paths::config_dir(); - fs::create_dir_all(&global_config_dir).unwrap(); - let global_settings_path = global_config_dir.join("settings.json"); - fs::write(&global_settings_path, "{}").unwrap(); - - fs.insert_tree("/project", json!({})).await; - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - - // Test global config paths - let test_cases = vec![ - ( - global_settings_path.to_str().unwrap().to_string(), - true, - "Global settings file should require confirmation", - ), - ( - global_config_dir - .join("keymap.json") - .to_str() - .unwrap() - .to_string(), - true, - "Global keymap file should require confirmation", - ), - ( - "project/normal_file.rs".to_string(), - false, - "Normal project file should not require confirmation", - ), - ]; - - for (path, should_confirm, description) in test_cases { - let input = json!({ - "display_description": "Edit file", - "path": path, - "mode": "edit" - }); - cx.update(|cx| { - assert_eq!( - tool.needs_confirmation(&input, &project, cx), - should_confirm, - "Failed for case: {}", - description - ); - }); - } - } - - #[gpui::test] - async fn test_needs_confirmation_with_multiple_worktrees(cx: &mut TestAppContext) { - init_test(cx); - let tool = Arc::new(EditFileTool); - let fs = project::FakeFs::new(cx.executor()); - - // Create multiple worktree directories - fs.insert_tree( - "/workspace/frontend", - json!({ - "src": { - "main.js": "console.log('frontend');" - } - }), - ) - .await; - fs.insert_tree( - "/workspace/backend", - json!({ - "src": { - "main.rs": "fn main() {}" - } - }), - ) - .await; - fs.insert_tree( - "/workspace/shared", - json!({ - ".zed": { - "settings.json": "{}" - } - }), - ) - .await; - - // Create project with multiple worktrees - let project = Project::test( - fs.clone(), - [ - path!("/workspace/frontend").as_ref(), - path!("/workspace/backend").as_ref(), - path!("/workspace/shared").as_ref(), - ], - cx, - ) - .await; - - // Test files in different worktrees - let test_cases = vec![ - ("frontend/src/main.js", false, "File in first worktree"), - ("backend/src/main.rs", false, "File in second worktree"), - ( - "shared/.zed/settings.json", - true, - ".zed file in third worktree", - ), - ("/etc/hosts", true, "Absolute path outside all worktrees"), - ( - "../outside/file.txt", - true, - "Relative path outside worktrees", - ), - ]; - - for (path, should_confirm, description) in test_cases { - let input = json!({ - "display_description": "Edit file", - "path": path, - "mode": "edit" - }); - cx.update(|cx| { - assert_eq!( - tool.needs_confirmation(&input, &project, cx), - should_confirm, - "Failed for case: {} - path: {}", - description, - path - ); - }); - } - } - - #[gpui::test] - async fn test_needs_confirmation_edge_cases(cx: &mut TestAppContext) { - init_test(cx); - let tool = Arc::new(EditFileTool); - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/project", - json!({ - ".zed": { - "settings.json": "{}" - }, - "src": { - ".zed": { - "local.json": "{}" - } - } - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - - // Test edge cases - let test_cases = vec![ - // Empty path - find_project_path returns Some for empty paths - ("", false, "Empty path is treated as project root"), - // Root directory - ("/", true, "Root directory should be outside project"), - ("project/../other", true, "Path with .. is outside project"), - ( - "project/./src/file.rs", - false, - "Path with . should work normally", - ), - // Windows-style paths (if on Windows) - #[cfg(target_os = "windows")] - ("C:\\Windows\\System32\\hosts", true, "Windows system path"), - #[cfg(target_os = "windows")] - ("project\\src\\main.rs", false, "Windows-style project path"), - ]; - - for (path, should_confirm, description) in test_cases { - let input = json!({ - "display_description": "Edit file", - "path": path, - "mode": "edit" - }); - cx.update(|cx| { - assert_eq!( - tool.needs_confirmation(&input, &project, cx), - should_confirm, - "Failed for case: {} - path: {}", - description, - path - ); - }); - } - } - - #[gpui::test] - async fn test_ui_text_with_all_path_types(cx: &mut TestAppContext) { - init_test(cx); - let tool = Arc::new(EditFileTool); - - // Test UI text for various scenarios - let test_cases = vec![ - ( - json!({ - "display_description": "Update config", - "path": ".zed/settings.json", - "mode": "edit" - }), - "Update config (local settings)", - ".zed path should show local settings context", - ), - ( - json!({ - "display_description": "Fix bug", - "path": "src/.zed/local.json", - "mode": "edit" - }), - "Fix bug (local settings)", - "Nested .zed path should show local settings context", - ), - ( - json!({ - "display_description": "Update readme", - "path": "README.md", - "mode": "edit" - }), - "Update readme", - "Normal path should not show additional context", - ), - ( - json!({ - "display_description": "Edit config", - "path": "config.zed", - "mode": "edit" - }), - "Edit config", - ".zed as extension should not show context", - ), - ]; - - for (input, expected_text, description) in test_cases { - cx.update(|_cx| { - let ui_text = tool.ui_text(&input); - assert_eq!(ui_text, expected_text, "Failed for case: {}", description); - }); - } - } - - #[gpui::test] - async fn test_needs_confirmation_with_different_modes(cx: &mut TestAppContext) { - init_test(cx); - let tool = Arc::new(EditFileTool); - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/project", - json!({ - "existing.txt": "content", - ".zed": { - "settings.json": "{}" - } - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - - // Test different EditFileMode values - let modes = vec![ - EditFileMode::Edit, - EditFileMode::Create, - EditFileMode::Overwrite, - ]; - - for mode in modes { - // Test .zed path with different modes - let input_zed = json!({ - "display_description": "Edit settings", - "path": "project/.zed/settings.json", - "mode": mode - }); - cx.update(|cx| { - assert!( - tool.needs_confirmation(&input_zed, &project, cx), - ".zed path should require confirmation regardless of mode: {:?}", - mode - ); - }); - - // Test outside path with different modes - let input_outside = json!({ - "display_description": "Edit file", - "path": "/outside/file.txt", - "mode": mode - }); - cx.update(|cx| { - assert!( - tool.needs_confirmation(&input_outside, &project, cx), - "Outside path should require confirmation regardless of mode: {:?}", - mode - ); - }); - - // Test normal path with different modes - let input_normal = json!({ - "display_description": "Edit file", - "path": "project/normal.txt", - "mode": mode - }); - cx.update(|cx| { - assert!( - !tool.needs_confirmation(&input_normal, &project, cx), - "Normal path should not require confirmation regardless of mode: {:?}", - mode - ); - }); - } - } - - #[gpui::test] - async fn test_always_allow_tool_actions_bypasses_all_checks(cx: &mut TestAppContext) { - // Set up with custom directories for deterministic testing - let temp_dir = tempfile::tempdir().unwrap(); - init_test_with_config(cx, temp_dir.path()); - - let tool = Arc::new(EditFileTool); - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/project", json!({})).await; - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - - // Enable always_allow_tool_actions - cx.update(|cx| { - let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); - settings.always_allow_tool_actions = true; - agent_settings::AgentSettings::override_global(settings, cx); - }); - - // Test that all paths that normally require confirmation are bypassed - let global_settings_path = paths::config_dir().join("settings.json"); - fs::create_dir_all(paths::config_dir()).unwrap(); - fs::write(&global_settings_path, "{}").unwrap(); - - let test_cases = vec![ - ".zed/settings.json", - "project/.zed/config.toml", - global_settings_path.to_str().unwrap(), - "/etc/hosts", - "/absolute/path/file.txt", - "../outside/project.txt", - ]; - - for path in test_cases { - let input = json!({ - "display_description": "Edit file", - "path": path, - "mode": "edit" - }); - cx.update(|cx| { - assert!( - !tool.needs_confirmation(&input, &project, cx), - "Path {} should not require confirmation when always_allow_tool_actions is true", - path - ); - }); - } - - // Disable always_allow_tool_actions and verify confirmation is required again - cx.update(|cx| { - let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); - settings.always_allow_tool_actions = false; - agent_settings::AgentSettings::override_global(settings, cx); - }); - - // Verify .zed path requires confirmation again - let input = json!({ - "display_description": "Edit file", - "path": ".zed/settings.json", - "mode": "edit" - }); - cx.update(|cx| { - assert!( - tool.needs_confirmation(&input, &project, cx), - ".zed path should require confirmation when always_allow_tool_actions is false" - ); - }); - } -} diff --git a/crates/assistant_tools/src/edit_file_tool/description.md b/crates/assistant_tools/src/edit_file_tool/description.md deleted file mode 100644 index 27f8e49dd626a2d1a5266b90413a3a5f8e02e6d8..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/edit_file_tool/description.md +++ /dev/null @@ -1,8 +0,0 @@ -This is a tool for creating a new file or editing an existing file. For moving or renaming files, you should generally use the `terminal` tool with the 'mv' command instead. - -Before using this tool: - -1. Use the `read_file` tool to understand the file's contents and context - -2. Verify the directory path is correct (only applicable when creating new files): - - Use the `list_directory` tool to verify the parent directory exists and is the correct location diff --git a/crates/assistant_tools/src/fetch_tool.rs b/crates/assistant_tools/src/fetch_tool.rs deleted file mode 100644 index cc22c9fc09f73914720c4b639f8d273207d7ca53..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/fetch_tool.rs +++ /dev/null @@ -1,178 +0,0 @@ -use std::rc::Rc; -use std::sync::Arc; -use std::{borrow::Cow, cell::RefCell}; - -use crate::schema::json_schema_for; -use action_log::ActionLog; -use anyhow::{Context as _, Result, anyhow, bail}; -use assistant_tool::{Tool, ToolResult}; -use futures::AsyncReadExt as _; -use gpui::{AnyWindowHandle, App, AppContext as _, Entity, Task}; -use html_to_markdown::{TagHandler, convert_html_to_markdown, markdown}; -use http_client::{AsyncBody, HttpClientWithUrl}; -use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; -use project::Project; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use ui::IconName; -use util::markdown::MarkdownEscaped; - -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] -enum ContentType { - Html, - Plaintext, - Json, -} - -#[derive(Debug, Serialize, Deserialize, JsonSchema)] -pub struct FetchToolInput { - /// The URL to fetch. - url: String, -} - -pub struct FetchTool { - http_client: Arc, -} - -impl FetchTool { - pub fn new(http_client: Arc) -> Self { - Self { http_client } - } - - async fn build_message(http_client: Arc, url: &str) -> Result { - let url = if !url.starts_with("https://") && !url.starts_with("http://") { - Cow::Owned(format!("https://{url}")) - } else { - Cow::Borrowed(url) - }; - - let mut response = http_client.get(&url, AsyncBody::default(), true).await?; - - let mut body = Vec::new(); - response - .body_mut() - .read_to_end(&mut body) - .await - .context("error reading response body")?; - - if response.status().is_client_error() { - let text = String::from_utf8_lossy(body.as_slice()); - bail!( - "status error {}, response: {text:?}", - response.status().as_u16() - ); - } - - let Some(content_type) = response.headers().get("content-type") else { - bail!("missing Content-Type header"); - }; - let content_type = content_type - .to_str() - .context("invalid Content-Type header")?; - let content_type = match content_type { - "text/html" | "application/xhtml+xml" => ContentType::Html, - "application/json" => ContentType::Json, - _ => ContentType::Plaintext, - }; - - match content_type { - ContentType::Html => { - let mut handlers: Vec = vec![ - Rc::new(RefCell::new(markdown::WebpageChromeRemover)), - Rc::new(RefCell::new(markdown::ParagraphHandler)), - Rc::new(RefCell::new(markdown::HeadingHandler)), - Rc::new(RefCell::new(markdown::ListHandler)), - Rc::new(RefCell::new(markdown::TableHandler::new())), - Rc::new(RefCell::new(markdown::StyledTextHandler)), - ]; - if url.contains("wikipedia.org") { - use html_to_markdown::structure::wikipedia; - - handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaChromeRemover))); - handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaInfoboxHandler))); - handlers.push(Rc::new( - RefCell::new(wikipedia::WikipediaCodeHandler::new()), - )); - } else { - handlers.push(Rc::new(RefCell::new(markdown::CodeHandler))); - } - - convert_html_to_markdown(&body[..], &mut handlers) - } - ContentType::Plaintext => Ok(std::str::from_utf8(&body)?.to_owned()), - ContentType::Json => { - let json: serde_json::Value = serde_json::from_slice(&body)?; - - Ok(format!( - "```json\n{}\n```", - serde_json::to_string_pretty(&json)? - )) - } - } - } -} - -impl Tool for FetchTool { - fn name(&self) -> String { - "fetch".to_string() - } - - fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity, _: &App) -> bool { - true - } - - fn may_perform_edits(&self) -> bool { - false - } - - fn description(&self) -> String { - include_str!("./fetch_tool/description.md").to_string() - } - - fn icon(&self) -> IconName { - IconName::ToolWeb - } - - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { - json_schema_for::(format) - } - - fn ui_text(&self, input: &serde_json::Value) -> String { - match serde_json::from_value::(input.clone()) { - Ok(input) => format!("Fetch {}", MarkdownEscaped(&input.url)), - Err(_) => "Fetch URL".to_string(), - } - } - - fn run( - self: Arc, - input: serde_json::Value, - _request: Arc, - _project: Entity, - _action_log: Entity, - _model: Arc, - _window: Option, - cx: &mut App, - ) -> ToolResult { - let input = match serde_json::from_value::(input) { - Ok(input) => input, - Err(err) => return Task::ready(Err(anyhow!(err))).into(), - }; - - let text = cx.background_spawn({ - let http_client = self.http_client.clone(); - async move { Self::build_message(http_client, &input.url).await } - }); - - cx.foreground_executor() - .spawn(async move { - let text = text.await?; - if text.trim().is_empty() { - bail!("no textual content found"); - } - - Ok(text.into()) - }) - .into() - } -} diff --git a/crates/assistant_tools/src/fetch_tool/description.md b/crates/assistant_tools/src/fetch_tool/description.md deleted file mode 100644 index 007ba6c60864c2185740b40222a32b05d2819bf0..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/fetch_tool/description.md +++ /dev/null @@ -1 +0,0 @@ -Fetches a URL and returns the content as Markdown. diff --git a/crates/assistant_tools/src/find_path_tool.rs b/crates/assistant_tools/src/find_path_tool.rs deleted file mode 100644 index 0bc478251cb5d3d558dda4fb41df02e85eaafde2..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/find_path_tool.rs +++ /dev/null @@ -1,472 +0,0 @@ -use crate::{schema::json_schema_for, ui::ToolCallCardHeader}; -use action_log::ActionLog; -use anyhow::{Result, anyhow}; -use assistant_tool::{ - Tool, ToolCard, ToolResult, ToolResultContent, ToolResultOutput, ToolUseStatus, -}; -use editor::Editor; -use futures::channel::oneshot::{self, Receiver}; -use gpui::{ - AnyWindowHandle, App, AppContext, Context, Entity, IntoElement, Task, WeakEntity, Window, -}; -use language; -use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; -use project::Project; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use std::fmt::Write; -use std::{cmp, path::PathBuf, sync::Arc}; -use ui::{Disclosure, Tooltip, prelude::*}; -use util::{ResultExt, paths::PathMatcher}; -use workspace::Workspace; - -#[derive(Debug, Serialize, Deserialize, JsonSchema)] -pub struct FindPathToolInput { - /// The glob to match against every path in the project. - /// - /// - /// If the project has the following root directories: - /// - /// - directory1/a/something.txt - /// - directory2/a/things.txt - /// - directory3/a/other.txt - /// - /// You can get back the first two paths by providing a glob of "*thing*.txt" - /// - pub glob: String, - - /// Optional starting position for paginated results (0-based). - /// When not provided, starts from the beginning. - #[serde(default)] - pub offset: usize, -} - -#[derive(Debug, Serialize, Deserialize)] -struct FindPathToolOutput { - glob: String, - paths: Vec, -} - -const RESULTS_PER_PAGE: usize = 50; - -pub struct FindPathTool; - -impl Tool for FindPathTool { - fn name(&self) -> String { - "find_path".into() - } - - fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity, _: &App) -> bool { - false - } - - fn may_perform_edits(&self) -> bool { - false - } - - fn description(&self) -> String { - include_str!("./find_path_tool/description.md").into() - } - - fn icon(&self) -> IconName { - IconName::ToolSearch - } - - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { - json_schema_for::(format) - } - - fn ui_text(&self, input: &serde_json::Value) -> String { - match serde_json::from_value::(input.clone()) { - Ok(input) => format!("Find paths matching “`{}`”", input.glob), - Err(_) => "Search paths".to_string(), - } - } - - fn run( - self: Arc, - input: serde_json::Value, - _request: Arc, - project: Entity, - _action_log: Entity, - _model: Arc, - _window: Option, - cx: &mut App, - ) -> ToolResult { - let (offset, glob) = match serde_json::from_value::(input) { - Ok(input) => (input.offset, input.glob), - Err(err) => return Task::ready(Err(anyhow!(err))).into(), - }; - - let (sender, receiver) = oneshot::channel(); - - let card = cx.new(|cx| FindPathToolCard::new(glob.clone(), receiver, cx)); - - let search_paths_task = search_paths(&glob, project, cx); - - let task = cx.background_spawn(async move { - let matches = search_paths_task.await?; - let paginated_matches: &[PathBuf] = &matches[cmp::min(offset, matches.len()) - ..cmp::min(offset + RESULTS_PER_PAGE, matches.len())]; - - sender.send(paginated_matches.to_vec()).log_err(); - - if matches.is_empty() { - Ok("No matches found".to_string().into()) - } else { - let mut message = format!("Found {} total matches.", matches.len()); - if matches.len() > RESULTS_PER_PAGE { - write!( - &mut message, - "\nShowing results {}-{} (provide 'offset' parameter for more results):", - offset + 1, - offset + paginated_matches.len() - ) - .unwrap(); - } - - for mat in matches.iter().skip(offset).take(RESULTS_PER_PAGE) { - write!(&mut message, "\n{}", mat.display()).unwrap(); - } - - let output = FindPathToolOutput { - glob, - paths: matches, - }; - - Ok(ToolResultOutput { - content: ToolResultContent::Text(message), - output: Some(serde_json::to_value(output)?), - }) - } - }); - - ToolResult { - output: task, - card: Some(card.into()), - } - } - - fn deserialize_card( - self: Arc, - output: serde_json::Value, - _project: Entity, - _window: &mut Window, - cx: &mut App, - ) -> Option { - let output = serde_json::from_value::(output).ok()?; - let card = cx.new(|_| FindPathToolCard::from_output(output)); - Some(card.into()) - } -} - -fn search_paths(glob: &str, project: Entity, cx: &mut App) -> Task>> { - let path_matcher = match PathMatcher::new( - [ - // Sometimes models try to search for "". In this case, return all paths in the project. - if glob.is_empty() { "*" } else { glob }, - ], - project.read(cx).path_style(cx), - ) { - Ok(matcher) => matcher, - Err(err) => return Task::ready(Err(anyhow!("Invalid glob: {err}"))), - }; - let snapshots: Vec<_> = project - .read(cx) - .worktrees(cx) - .map(|worktree| worktree.read(cx).snapshot()) - .collect(); - - cx.background_spawn(async move { - Ok(snapshots - .iter() - .flat_map(|snapshot| { - snapshot - .entries(false, 0) - .map(move |entry| { - snapshot - .root_name() - .join(&entry.path) - .as_std_path() - .to_path_buf() - }) - .filter(|path| path_matcher.is_match(&path)) - }) - .collect()) - }) -} - -struct FindPathToolCard { - paths: Vec, - expanded: bool, - glob: String, - _receiver_task: Option>>, -} - -impl FindPathToolCard { - fn new(glob: String, receiver: Receiver>, cx: &mut Context) -> Self { - let _receiver_task = cx.spawn(async move |this, cx| { - let paths = receiver.await?; - - this.update(cx, |this, _cx| { - this.paths = paths; - }) - .log_err(); - - Ok(()) - }); - - Self { - paths: Vec::new(), - expanded: false, - glob, - _receiver_task: Some(_receiver_task), - } - } - - fn from_output(output: FindPathToolOutput) -> Self { - Self { - glob: output.glob, - paths: output.paths, - expanded: false, - _receiver_task: None, - } - } -} - -impl ToolCard for FindPathToolCard { - fn render( - &mut self, - _status: &ToolUseStatus, - _window: &mut Window, - workspace: WeakEntity, - cx: &mut Context, - ) -> impl IntoElement { - let matches_label: SharedString = if self.paths.is_empty() { - "No matches".into() - } else if self.paths.len() == 1 { - "1 match".into() - } else { - format!("{} matches", self.paths.len()).into() - }; - - let content = if !self.paths.is_empty() && self.expanded { - Some( - v_flex() - .relative() - .ml_1p5() - .px_1p5() - .gap_0p5() - .border_l_1() - .border_color(cx.theme().colors().border_variant) - .children(self.paths.iter().enumerate().map(|(index, path)| { - let path_clone = path.clone(); - let workspace_clone = workspace.clone(); - let button_label = path.to_string_lossy().into_owned(); - - Button::new(("path", index), button_label) - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::Small) - .icon_position(IconPosition::End) - .label_size(LabelSize::Small) - .color(Color::Muted) - .tooltip(Tooltip::text("Jump to File")) - .on_click(move |_, window, cx| { - workspace_clone - .update(cx, |workspace, cx| { - let path = PathBuf::from(&path_clone); - let Some(project_path) = workspace - .project() - .read(cx) - .find_project_path(&path, cx) - else { - return; - }; - let open_task = workspace.open_path( - project_path, - None, - true, - window, - cx, - ); - window - .spawn(cx, async move |cx| { - let item = open_task.await?; - if let Some(active_editor) = - item.downcast::() - { - active_editor - .update_in(cx, |editor, window, cx| { - editor.go_to_singleton_buffer_point( - language::Point::new(0, 0), - window, - cx, - ); - }) - .log_err(); - } - anyhow::Ok(()) - }) - .detach_and_log_err(cx); - }) - .ok(); - }) - })) - .into_any(), - ) - } else { - None - }; - - v_flex() - .mb_2() - .gap_1() - .child( - ToolCallCardHeader::new(IconName::ToolSearch, matches_label) - .with_code_path(&self.glob) - .disclosure_slot( - Disclosure::new("path-search-disclosure", self.expanded) - .opened_icon(IconName::ChevronUp) - .closed_icon(IconName::ChevronDown) - .disabled(self.paths.is_empty()) - .on_click(cx.listener(move |this, _, _, _cx| { - this.expanded = !this.expanded; - })), - ), - ) - .children(content) - } -} - -impl Component for FindPathTool { - fn scope() -> ComponentScope { - ComponentScope::Agent - } - - fn sort_name() -> &'static str { - "FindPathTool" - } - - fn preview(window: &mut Window, cx: &mut App) -> Option { - let successful_card = cx.new(|_| FindPathToolCard { - paths: vec![ - PathBuf::from("src/main.rs"), - PathBuf::from("src/lib.rs"), - PathBuf::from("tests/test.rs"), - ], - expanded: true, - glob: "*.rs".to_string(), - _receiver_task: None, - }); - - let empty_card = cx.new(|_| FindPathToolCard { - paths: Vec::new(), - expanded: false, - glob: "*.nonexistent".to_string(), - _receiver_task: None, - }); - - Some( - v_flex() - .gap_6() - .children(vec![example_group(vec![ - single_example( - "With Paths", - div() - .size_full() - .child(successful_card.update(cx, |tool, cx| { - tool.render( - &ToolUseStatus::Finished("".into()), - window, - WeakEntity::new_invalid(), - cx, - ) - .into_any_element() - })) - .into_any_element(), - ), - single_example( - "No Paths", - div() - .size_full() - .child(empty_card.update(cx, |tool, cx| { - tool.render( - &ToolUseStatus::Finished("".into()), - window, - WeakEntity::new_invalid(), - cx, - ) - .into_any_element() - })) - .into_any_element(), - ), - ])]) - .into_any_element(), - ) - } -} - -#[cfg(test)] -mod test { - use super::*; - use gpui::TestAppContext; - use project::{FakeFs, Project}; - use settings::SettingsStore; - use util::path; - - #[gpui::test] - async fn test_find_path_tool(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - serde_json::json!({ - "apple": { - "banana": { - "carrot": "1", - }, - "bandana": { - "carbonara": "2", - }, - "endive": "3" - } - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - - let matches = cx - .update(|cx| search_paths("root/**/car*", project.clone(), cx)) - .await - .unwrap(); - assert_eq!( - matches, - &[ - PathBuf::from(path!("root/apple/banana/carrot")), - PathBuf::from(path!("root/apple/bandana/carbonara")) - ] - ); - - let matches = cx - .update(|cx| search_paths("**/car*", project.clone(), cx)) - .await - .unwrap(); - assert_eq!( - matches, - &[ - PathBuf::from(path!("root/apple/banana/carrot")), - PathBuf::from(path!("root/apple/bandana/carbonara")) - ] - ); - } - - fn init_test(cx: &mut TestAppContext) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - language::init(cx); - Project::init_settings(cx); - }); - } -} diff --git a/crates/assistant_tools/src/find_path_tool/description.md b/crates/assistant_tools/src/find_path_tool/description.md deleted file mode 100644 index f7a697c467b2807c1f4cf1706ef660a77b9ee727..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/find_path_tool/description.md +++ /dev/null @@ -1,7 +0,0 @@ -Fast file path pattern matching tool that works with any codebase size - -- Supports glob patterns like "**/*.js" or "src/**/*.ts" -- Returns matching file paths sorted alphabetically -- Prefer the `grep` tool to this tool when searching for symbols unless you have specific information about paths. -- Use this tool when you need to find files by name patterns -- Results are paginated with 50 matches per page. Use the optional 'offset' parameter to request subsequent pages. diff --git a/crates/assistant_tools/src/grep_tool.rs b/crates/assistant_tools/src/grep_tool.rs deleted file mode 100644 index 609e25f338d11995ea6f587ba476e4f95274e4e9..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/grep_tool.rs +++ /dev/null @@ -1,1308 +0,0 @@ -use crate::schema::json_schema_for; -use action_log::ActionLog; -use anyhow::{Result, anyhow}; -use assistant_tool::{Tool, ToolResult}; -use futures::StreamExt; -use gpui::{AnyWindowHandle, App, Entity, Task}; -use language::{OffsetRangeExt, ParseStatus, Point}; -use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; -use project::{ - Project, WorktreeSettings, - search::{SearchQuery, SearchResult}, -}; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use settings::Settings; -use std::{cmp, fmt::Write, sync::Arc}; -use ui::IconName; -use util::RangeExt; -use util::markdown::MarkdownInlineCode; -use util::paths::PathMatcher; - -#[derive(Debug, Serialize, Deserialize, JsonSchema)] -pub struct GrepToolInput { - /// A regex pattern to search for in the entire project. Note that the regex - /// will be parsed by the Rust `regex` crate. - /// - /// Do NOT specify a path here! This will only be matched against the code **content**. - pub regex: String, - - /// A glob pattern for the paths of files to include in the search. - /// Supports standard glob patterns like "**/*.rs" or "src/**/*.ts". - /// If omitted, all files in the project will be searched. - pub include_pattern: Option, - - /// Optional starting position for paginated results (0-based). - /// When not provided, starts from the beginning. - #[serde(default)] - pub offset: u32, - - /// Whether the regex is case-sensitive. Defaults to false (case-insensitive). - #[serde(default)] - pub case_sensitive: bool, -} - -impl GrepToolInput { - /// Which page of search results this is. - pub fn page(&self) -> u32 { - 1 + (self.offset / RESULTS_PER_PAGE) - } -} - -const RESULTS_PER_PAGE: u32 = 20; - -pub struct GrepTool; - -impl Tool for GrepTool { - fn name(&self) -> String { - "grep".into() - } - - fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity, _: &App) -> bool { - false - } - - fn may_perform_edits(&self) -> bool { - false - } - - fn description(&self) -> String { - include_str!("./grep_tool/description.md").into() - } - - fn icon(&self) -> IconName { - IconName::ToolRegex - } - - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { - json_schema_for::(format) - } - - fn ui_text(&self, input: &serde_json::Value) -> String { - match serde_json::from_value::(input.clone()) { - Ok(input) => { - let page = input.page(); - let regex_str = MarkdownInlineCode(&input.regex); - let case_info = if input.case_sensitive { - " (case-sensitive)" - } else { - "" - }; - - if page > 1 { - format!("Get page {page} of search results for regex {regex_str}{case_info}") - } else { - format!("Search files for regex {regex_str}{case_info}") - } - } - Err(_) => "Search with regex".to_string(), - } - } - - fn run( - self: Arc, - input: serde_json::Value, - _request: Arc, - project: Entity, - _action_log: Entity, - _model: Arc, - _window: Option, - cx: &mut App, - ) -> ToolResult { - const CONTEXT_LINES: u32 = 2; - const MAX_ANCESTOR_LINES: u32 = 10; - - let input = match serde_json::from_value::(input) { - Ok(input) => input, - Err(error) => { - return Task::ready(Err(anyhow!("Failed to parse input: {error}"))).into(); - } - }; - - let include_matcher = match PathMatcher::new( - input - .include_pattern - .as_ref() - .into_iter() - .collect::>(), - project.read(cx).path_style(cx), - ) { - Ok(matcher) => matcher, - Err(error) => { - return Task::ready(Err(anyhow!("invalid include glob pattern: {error}"))).into(); - } - }; - - // Exclude global file_scan_exclusions and private_files settings - let exclude_matcher = { - let global_settings = WorktreeSettings::get_global(cx); - let exclude_patterns = global_settings - .file_scan_exclusions - .sources() - .iter() - .chain(global_settings.private_files.sources().iter()); - - match PathMatcher::new(exclude_patterns, project.read(cx).path_style(cx)) { - Ok(matcher) => matcher, - Err(error) => { - return Task::ready(Err(anyhow!("invalid exclude pattern: {error}"))).into(); - } - } - }; - - let query = match SearchQuery::regex( - &input.regex, - false, - input.case_sensitive, - false, - false, - include_matcher, - exclude_matcher, - true, // Always match file include pattern against *full project paths* that start with a project root. - None, - ) { - Ok(query) => query, - Err(error) => return Task::ready(Err(error)).into(), - }; - - let results = project.update(cx, |project, cx| project.search(query, cx)); - - cx.spawn(async move |cx| { - futures::pin_mut!(results); - - let mut output = String::new(); - let mut skips_remaining = input.offset; - let mut matches_found = 0; - let mut has_more_matches = false; - - 'outer: while let Some(SearchResult::Buffer { buffer, ranges }) = results.next().await { - if ranges.is_empty() { - continue; - } - - let Ok((Some(path), mut parse_status)) = buffer.read_with(cx, |buffer, cx| { - (buffer.file().map(|file| file.full_path(cx)), buffer.parse_status()) - }) else { - continue; - }; - - // Check if this file should be excluded based on its worktree settings - if let Ok(Some(project_path)) = project.read_with(cx, |project, cx| { - project.find_project_path(&path, cx) - }) - && cx.update(|cx| { - let worktree_settings = WorktreeSettings::get(Some((&project_path).into()), cx); - worktree_settings.is_path_excluded(&project_path.path) - || worktree_settings.is_path_private(&project_path.path) - }).unwrap_or(false) { - continue; - } - - while *parse_status.borrow() != ParseStatus::Idle { - parse_status.changed().await?; - } - - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; - - let mut ranges = ranges - .into_iter() - .map(|range| { - let matched = range.to_point(&snapshot); - let matched_end_line_len = snapshot.line_len(matched.end.row); - let full_lines = Point::new(matched.start.row, 0)..Point::new(matched.end.row, matched_end_line_len); - let symbols = snapshot.symbols_containing(matched.start, None); - - if let Some(ancestor_node) = snapshot.syntax_ancestor(full_lines.clone()) { - let full_ancestor_range = ancestor_node.byte_range().to_point(&snapshot); - let end_row = full_ancestor_range.end.row.min(full_ancestor_range.start.row + MAX_ANCESTOR_LINES); - let end_col = snapshot.line_len(end_row); - let capped_ancestor_range = Point::new(full_ancestor_range.start.row, 0)..Point::new(end_row, end_col); - - if capped_ancestor_range.contains_inclusive(&full_lines) { - return (capped_ancestor_range, Some(full_ancestor_range), symbols) - } - } - - let mut matched = matched; - matched.start.column = 0; - matched.start.row = - matched.start.row.saturating_sub(CONTEXT_LINES); - matched.end.row = cmp::min( - snapshot.max_point().row, - matched.end.row + CONTEXT_LINES, - ); - matched.end.column = snapshot.line_len(matched.end.row); - - (matched, None, symbols) - }) - .peekable(); - - let mut file_header_written = false; - - while let Some((mut range, ancestor_range, parent_symbols)) = ranges.next(){ - if skips_remaining > 0 { - skips_remaining -= 1; - continue; - } - - // We'd already found a full page of matches, and we just found one more. - if matches_found >= RESULTS_PER_PAGE { - has_more_matches = true; - break 'outer; - } - - while let Some((next_range, _, _)) = ranges.peek() { - if range.end.row >= next_range.start.row { - range.end = next_range.end; - ranges.next(); - } else { - break; - } - } - - if !file_header_written { - writeln!(output, "\n## Matches in {}", path.display())?; - file_header_written = true; - } - - let end_row = range.end.row; - output.push_str("\n### "); - - for symbol in parent_symbols { - write!(output, "{} › ", symbol.text)?; - } - - if range.start.row == end_row { - writeln!(output, "L{}", range.start.row + 1)?; - } else { - writeln!(output, "L{}-{}", range.start.row + 1, end_row + 1)?; - } - - output.push_str("```\n"); - output.extend(snapshot.text_for_range(range)); - output.push_str("\n```\n"); - - if let Some(ancestor_range) = ancestor_range - && end_row < ancestor_range.end.row { - let remaining_lines = ancestor_range.end.row - end_row; - writeln!(output, "\n{} lines remaining in ancestor node. Read the file to see all.", remaining_lines)?; - } - - matches_found += 1; - } - } - - if matches_found == 0 { - Ok("No matches found".to_string().into()) - } else if has_more_matches { - Ok(format!( - "Showing matches {}-{} (there were more matches found; use offset: {} to see next page):\n{output}", - input.offset + 1, - input.offset + matches_found, - input.offset + RESULTS_PER_PAGE, - ).into()) - } else { - Ok(format!("Found {matches_found} matches:\n{output}").into()) - } - }).into() - } -} - -#[cfg(test)] -mod tests { - use super::*; - use assistant_tool::Tool; - use gpui::{AppContext, TestAppContext, UpdateGlobal}; - use language::{Language, LanguageConfig, LanguageMatcher}; - use language_model::fake_provider::FakeLanguageModel; - use project::{FakeFs, Project}; - use serde_json::json; - use settings::SettingsStore; - use unindent::Unindent; - use util::path; - - #[gpui::test] - async fn test_grep_tool_with_include_pattern(cx: &mut TestAppContext) { - init_test(cx); - cx.executor().allow_parking(); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/root"), - serde_json::json!({ - "src": { - "main.rs": "fn main() {\n println!(\"Hello, world!\");\n}", - "utils": { - "helper.rs": "fn helper() {\n println!(\"I'm a helper!\");\n}", - }, - }, - "tests": { - "test_main.rs": "fn test_main() {\n assert!(true);\n}", - } - }), - ) - .await; - - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - - // Test with include pattern for Rust files inside the root of the project - let input = serde_json::to_value(GrepToolInput { - regex: "println".to_string(), - include_pattern: Some("root/**/*.rs".to_string()), - offset: 0, - case_sensitive: false, - }) - .unwrap(); - - let result = run_grep_tool(input, project.clone(), cx).await; - assert!(result.contains("main.rs"), "Should find matches in main.rs"); - assert!( - result.contains("helper.rs"), - "Should find matches in helper.rs" - ); - assert!( - !result.contains("test_main.rs"), - "Should not include test_main.rs even though it's a .rs file (because it doesn't have the pattern)" - ); - - // Test with include pattern for src directory only - let input = serde_json::to_value(GrepToolInput { - regex: "fn".to_string(), - include_pattern: Some("root/**/src/**".to_string()), - offset: 0, - case_sensitive: false, - }) - .unwrap(); - - let result = run_grep_tool(input, project.clone(), cx).await; - assert!( - result.contains("main.rs"), - "Should find matches in src/main.rs" - ); - assert!( - result.contains("helper.rs"), - "Should find matches in src/utils/helper.rs" - ); - assert!( - !result.contains("test_main.rs"), - "Should not include test_main.rs as it's not in src directory" - ); - - // Test with empty include pattern (should default to all files) - let input = serde_json::to_value(GrepToolInput { - regex: "fn".to_string(), - include_pattern: None, - offset: 0, - case_sensitive: false, - }) - .unwrap(); - - let result = run_grep_tool(input, project.clone(), cx).await; - assert!(result.contains("main.rs"), "Should find matches in main.rs"); - assert!( - result.contains("helper.rs"), - "Should find matches in helper.rs" - ); - assert!( - result.contains("test_main.rs"), - "Should include test_main.rs" - ); - } - - #[gpui::test] - async fn test_grep_tool_with_case_sensitivity(cx: &mut TestAppContext) { - init_test(cx); - cx.executor().allow_parking(); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/root"), - serde_json::json!({ - "case_test.txt": "This file has UPPERCASE and lowercase text.\nUPPERCASE patterns should match only with case_sensitive: true", - }), - ) - .await; - - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - - // Test case-insensitive search (default) - let input = serde_json::to_value(GrepToolInput { - regex: "uppercase".to_string(), - include_pattern: Some("**/*.txt".to_string()), - offset: 0, - case_sensitive: false, - }) - .unwrap(); - - let result = run_grep_tool(input, project.clone(), cx).await; - assert!( - result.contains("UPPERCASE"), - "Case-insensitive search should match uppercase" - ); - - // Test case-sensitive search - let input = serde_json::to_value(GrepToolInput { - regex: "uppercase".to_string(), - include_pattern: Some("**/*.txt".to_string()), - offset: 0, - case_sensitive: true, - }) - .unwrap(); - - let result = run_grep_tool(input, project.clone(), cx).await; - assert!( - !result.contains("UPPERCASE"), - "Case-sensitive search should not match uppercase" - ); - - // Test case-sensitive search - let input = serde_json::to_value(GrepToolInput { - regex: "LOWERCASE".to_string(), - include_pattern: Some("**/*.txt".to_string()), - offset: 0, - case_sensitive: true, - }) - .unwrap(); - - let result = run_grep_tool(input, project.clone(), cx).await; - - assert!( - !result.contains("lowercase"), - "Case-sensitive search should match lowercase" - ); - - // Test case-sensitive search for lowercase pattern - let input = serde_json::to_value(GrepToolInput { - regex: "lowercase".to_string(), - include_pattern: Some("**/*.txt".to_string()), - offset: 0, - case_sensitive: true, - }) - .unwrap(); - - let result = run_grep_tool(input, project.clone(), cx).await; - assert!( - result.contains("lowercase"), - "Case-sensitive search should match lowercase text" - ); - } - - /// Helper function to set up a syntax test environment - async fn setup_syntax_test(cx: &mut TestAppContext) -> Entity { - use unindent::Unindent; - init_test(cx); - cx.executor().allow_parking(); - - let fs = FakeFs::new(cx.executor()); - - // Create test file with syntax structures - fs.insert_tree( - path!("/root"), - serde_json::json!({ - "test_syntax.rs": r#" - fn top_level_function() { - println!("This is at the top level"); - } - - mod feature_module { - pub mod nested_module { - pub fn nested_function( - first_arg: String, - second_arg: i32, - ) { - println!("Function in nested module"); - println!("{first_arg}"); - println!("{second_arg}"); - } - } - } - - struct MyStruct { - field1: String, - field2: i32, - } - - impl MyStruct { - fn method_with_block() { - let condition = true; - if condition { - println!("Inside if block"); - } - } - - fn long_function() { - println!("Line 1"); - println!("Line 2"); - println!("Line 3"); - println!("Line 4"); - println!("Line 5"); - println!("Line 6"); - println!("Line 7"); - println!("Line 8"); - println!("Line 9"); - println!("Line 10"); - println!("Line 11"); - println!("Line 12"); - } - } - - trait Processor { - fn process(&self, input: &str) -> String; - } - - impl Processor for MyStruct { - fn process(&self, input: &str) -> String { - format!("Processed: {}", input) - } - } - "#.unindent().trim(), - }), - ) - .await; - - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - - project.update(cx, |project, _cx| { - project.languages().add(rust_lang().into()) - }); - - project - } - - #[gpui::test] - async fn test_grep_top_level_function(cx: &mut TestAppContext) { - let project = setup_syntax_test(cx).await; - - // Test: Line at the top level of the file - let input = serde_json::to_value(GrepToolInput { - regex: "This is at the top level".to_string(), - include_pattern: Some("**/*.rs".to_string()), - offset: 0, - case_sensitive: false, - }) - .unwrap(); - - let result = run_grep_tool(input, project.clone(), cx).await; - let expected = r#" - Found 1 matches: - - ## Matches in root/test_syntax.rs - - ### fn top_level_function › L1-3 - ``` - fn top_level_function() { - println!("This is at the top level"); - } - ``` - "# - .unindent(); - assert_eq!(result, expected); - } - - #[gpui::test] - async fn test_grep_function_body(cx: &mut TestAppContext) { - let project = setup_syntax_test(cx).await; - - // Test: Line inside a function body - let input = serde_json::to_value(GrepToolInput { - regex: "Function in nested module".to_string(), - include_pattern: Some("**/*.rs".to_string()), - offset: 0, - case_sensitive: false, - }) - .unwrap(); - - let result = run_grep_tool(input, project.clone(), cx).await; - let expected = r#" - Found 1 matches: - - ## Matches in root/test_syntax.rs - - ### mod feature_module › pub mod nested_module › pub fn nested_function › L10-14 - ``` - ) { - println!("Function in nested module"); - println!("{first_arg}"); - println!("{second_arg}"); - } - ``` - "# - .unindent(); - assert_eq!(result, expected); - } - - #[gpui::test] - async fn test_grep_function_args_and_body(cx: &mut TestAppContext) { - let project = setup_syntax_test(cx).await; - - // Test: Line with a function argument - let input = serde_json::to_value(GrepToolInput { - regex: "second_arg".to_string(), - include_pattern: Some("**/*.rs".to_string()), - offset: 0, - case_sensitive: false, - }) - .unwrap(); - - let result = run_grep_tool(input, project.clone(), cx).await; - let expected = r#" - Found 1 matches: - - ## Matches in root/test_syntax.rs - - ### mod feature_module › pub mod nested_module › pub fn nested_function › L7-14 - ``` - pub fn nested_function( - first_arg: String, - second_arg: i32, - ) { - println!("Function in nested module"); - println!("{first_arg}"); - println!("{second_arg}"); - } - ``` - "# - .unindent(); - assert_eq!(result, expected); - } - - #[gpui::test] - async fn test_grep_if_block(cx: &mut TestAppContext) { - use unindent::Unindent; - let project = setup_syntax_test(cx).await; - - // Test: Line inside an if block - let input = serde_json::to_value(GrepToolInput { - regex: "Inside if block".to_string(), - include_pattern: Some("**/*.rs".to_string()), - offset: 0, - case_sensitive: false, - }) - .unwrap(); - - let result = run_grep_tool(input, project.clone(), cx).await; - let expected = r#" - Found 1 matches: - - ## Matches in root/test_syntax.rs - - ### impl MyStruct › fn method_with_block › L26-28 - ``` - if condition { - println!("Inside if block"); - } - ``` - "# - .unindent(); - assert_eq!(result, expected); - } - - #[gpui::test] - async fn test_grep_long_function_top(cx: &mut TestAppContext) { - use unindent::Unindent; - let project = setup_syntax_test(cx).await; - - // Test: Line in the middle of a long function - should show message about remaining lines - let input = serde_json::to_value(GrepToolInput { - regex: "Line 5".to_string(), - include_pattern: Some("**/*.rs".to_string()), - offset: 0, - case_sensitive: false, - }) - .unwrap(); - - let result = run_grep_tool(input, project.clone(), cx).await; - let expected = r#" - Found 1 matches: - - ## Matches in root/test_syntax.rs - - ### impl MyStruct › fn long_function › L31-41 - ``` - fn long_function() { - println!("Line 1"); - println!("Line 2"); - println!("Line 3"); - println!("Line 4"); - println!("Line 5"); - println!("Line 6"); - println!("Line 7"); - println!("Line 8"); - println!("Line 9"); - println!("Line 10"); - ``` - - 3 lines remaining in ancestor node. Read the file to see all. - "# - .unindent(); - assert_eq!(result, expected); - } - - #[gpui::test] - async fn test_grep_long_function_bottom(cx: &mut TestAppContext) { - use unindent::Unindent; - let project = setup_syntax_test(cx).await; - - // Test: Line in the long function - let input = serde_json::to_value(GrepToolInput { - regex: "Line 12".to_string(), - include_pattern: Some("**/*.rs".to_string()), - offset: 0, - case_sensitive: false, - }) - .unwrap(); - - let result = run_grep_tool(input, project.clone(), cx).await; - let expected = r#" - Found 1 matches: - - ## Matches in root/test_syntax.rs - - ### impl MyStruct › fn long_function › L41-45 - ``` - println!("Line 10"); - println!("Line 11"); - println!("Line 12"); - } - } - ``` - "# - .unindent(); - assert_eq!(result, expected); - } - - async fn run_grep_tool( - input: serde_json::Value, - project: Entity, - cx: &mut TestAppContext, - ) -> String { - let tool = Arc::new(GrepTool); - let action_log = cx.new(|_cx| ActionLog::new(project.clone())); - let model = Arc::new(FakeLanguageModel::default()); - let task = - cx.update(|cx| tool.run(input, Arc::default(), project, action_log, model, None, cx)); - - match task.output.await { - Ok(result) => { - if cfg!(windows) { - result.content.as_str().unwrap().replace("root\\", "root/") - } else { - result.content.as_str().unwrap().to_string() - } - } - Err(e) => panic!("Failed to run grep tool: {}", e), - } - } - - fn init_test(cx: &mut TestAppContext) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - language::init(cx); - Project::init_settings(cx); - }); - } - - fn rust_lang() -> Language { - Language::new( - LanguageConfig { - name: "Rust".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - ..Default::default() - }, - Some(tree_sitter_rust::LANGUAGE.into()), - ) - .with_outline_query(include_str!("../../languages/src/rust/outline.scm")) - .unwrap() - } - - #[gpui::test] - async fn test_grep_security_boundaries(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - - fs.insert_tree( - path!("/"), - json!({ - "project_root": { - "allowed_file.rs": "fn main() { println!(\"This file is in the project\"); }", - ".mysecrets": "SECRET_KEY=abc123\nfn secret() { /* private */ }", - ".secretdir": { - "config": "fn special_configuration() { /* excluded */ }" - }, - ".mymetadata": "fn custom_metadata() { /* excluded */ }", - "subdir": { - "normal_file.rs": "fn normal_file_content() { /* Normal */ }", - "special.privatekey": "fn private_key_content() { /* private */ }", - "data.mysensitive": "fn sensitive_data() { /* private */ }" - } - }, - "outside_project": { - "sensitive_file.rs": "fn outside_function() { /* This file is outside the project */ }" - } - }), - ) - .await; - - cx.update(|cx| { - use gpui::UpdateGlobal; - use settings::SettingsStore; - SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings(cx, |settings| { - settings.project.worktree.file_scan_exclusions = Some(vec![ - "**/.secretdir".to_string(), - "**/.mymetadata".to_string(), - ]); - settings.project.worktree.private_files = Some( - vec![ - "**/.mysecrets".to_string(), - "**/*.privatekey".to_string(), - "**/*.mysensitive".to_string(), - ] - .into(), - ); - }); - }); - }); - - let project = Project::test(fs.clone(), [path!("/project_root").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let model = Arc::new(FakeLanguageModel::default()); - - // Searching for files outside the project worktree should return no results - let result = cx - .update(|cx| { - let input = json!({ - "regex": "outside_function" - }); - Arc::new(GrepTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - let results = result.unwrap(); - let paths = extract_paths_from_results(results.content.as_str().unwrap()); - assert!( - paths.is_empty(), - "grep_tool should not find files outside the project worktree" - ); - - // Searching within the project should succeed - let result = cx - .update(|cx| { - let input = json!({ - "regex": "main" - }); - Arc::new(GrepTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - let results = result.unwrap(); - let paths = extract_paths_from_results(results.content.as_str().unwrap()); - assert!( - paths.iter().any(|p| p.contains("allowed_file.rs")), - "grep_tool should be able to search files inside worktrees" - ); - - // Searching files that match file_scan_exclusions should return no results - let result = cx - .update(|cx| { - let input = json!({ - "regex": "special_configuration" - }); - Arc::new(GrepTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - let results = result.unwrap(); - let paths = extract_paths_from_results(results.content.as_str().unwrap()); - assert!( - paths.is_empty(), - "grep_tool should not search files in .secretdir (file_scan_exclusions)" - ); - - let result = cx - .update(|cx| { - let input = json!({ - "regex": "custom_metadata" - }); - Arc::new(GrepTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - let results = result.unwrap(); - let paths = extract_paths_from_results(results.content.as_str().unwrap()); - assert!( - paths.is_empty(), - "grep_tool should not search .mymetadata files (file_scan_exclusions)" - ); - - // Searching private files should return no results - let result = cx - .update(|cx| { - let input = json!({ - "regex": "SECRET_KEY" - }); - Arc::new(GrepTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - let results = result.unwrap(); - let paths = extract_paths_from_results(results.content.as_str().unwrap()); - assert!( - paths.is_empty(), - "grep_tool should not search .mysecrets (private_files)" - ); - - let result = cx - .update(|cx| { - let input = json!({ - "regex": "private_key_content" - }); - Arc::new(GrepTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - let results = result.unwrap(); - let paths = extract_paths_from_results(results.content.as_str().unwrap()); - assert!( - paths.is_empty(), - "grep_tool should not search .privatekey files (private_files)" - ); - - let result = cx - .update(|cx| { - let input = json!({ - "regex": "sensitive_data" - }); - Arc::new(GrepTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - let results = result.unwrap(); - let paths = extract_paths_from_results(results.content.as_str().unwrap()); - assert!( - paths.is_empty(), - "grep_tool should not search .mysensitive files (private_files)" - ); - - // Searching a normal file should still work, even with private_files configured - let result = cx - .update(|cx| { - let input = json!({ - "regex": "normal_file_content" - }); - Arc::new(GrepTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - let results = result.unwrap(); - let paths = extract_paths_from_results(results.content.as_str().unwrap()); - assert!( - paths.iter().any(|p| p.contains("normal_file.rs")), - "Should be able to search normal files" - ); - - // Path traversal attempts with .. in include_pattern should not escape project - let result = cx - .update(|cx| { - let input = json!({ - "regex": "outside_function", - "include_pattern": "../outside_project/**/*.rs" - }); - Arc::new(GrepTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - let results = result.unwrap(); - let paths = extract_paths_from_results(results.content.as_str().unwrap()); - assert!( - paths.is_empty(), - "grep_tool should not allow escaping project boundaries with relative paths" - ); - } - - #[gpui::test] - async fn test_grep_with_multiple_worktree_settings(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - - // Create first worktree with its own private files - fs.insert_tree( - path!("/worktree1"), - json!({ - ".zed": { - "settings.json": r#"{ - "file_scan_exclusions": ["**/fixture.*"], - "private_files": ["**/secret.rs"] - }"# - }, - "src": { - "main.rs": "fn main() { let secret_key = \"hidden\"; }", - "secret.rs": "const API_KEY: &str = \"secret_value\";", - "utils.rs": "pub fn get_config() -> String { \"config\".to_string() }" - }, - "tests": { - "test.rs": "fn test_secret() { assert!(true); }", - "fixture.sql": "SELECT * FROM secret_table;" - } - }), - ) - .await; - - // Create second worktree with different private files - fs.insert_tree( - path!("/worktree2"), - json!({ - ".zed": { - "settings.json": r#"{ - "file_scan_exclusions": ["**/internal.*"], - "private_files": ["**/private.js", "**/data.json"] - }"# - }, - "lib": { - "public.js": "export function getSecret() { return 'public'; }", - "private.js": "const SECRET_KEY = \"private_value\";", - "data.json": "{\"secret_data\": \"hidden\"}" - }, - "docs": { - "README.md": "# Documentation with secret info", - "internal.md": "Internal secret documentation" - } - }), - ) - .await; - - // Set global settings - cx.update(|cx| { - SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings(cx, |settings| { - settings.project.worktree.file_scan_exclusions = - Some(vec!["**/.git".to_string(), "**/node_modules".to_string()]); - settings.project.worktree.private_files = - Some(vec!["**/.env".to_string()].into()); - }); - }); - }); - - let project = Project::test( - fs.clone(), - [path!("/worktree1").as_ref(), path!("/worktree2").as_ref()], - cx, - ) - .await; - - // Wait for worktrees to be fully scanned - cx.executor().run_until_parked(); - - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let model = Arc::new(FakeLanguageModel::default()); - - // Search for "secret" - should exclude files based on worktree-specific settings - let result = cx - .update(|cx| { - let input = json!({ - "regex": "secret", - "case_sensitive": false - }); - Arc::new(GrepTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await - .unwrap(); - - let content = result.content.as_str().unwrap(); - let paths = extract_paths_from_results(content); - - // Should find matches in non-private files - assert!( - paths.iter().any(|p| p.contains("main.rs")), - "Should find 'secret' in worktree1/src/main.rs" - ); - assert!( - paths.iter().any(|p| p.contains("test.rs")), - "Should find 'secret' in worktree1/tests/test.rs" - ); - assert!( - paths.iter().any(|p| p.contains("public.js")), - "Should find 'secret' in worktree2/lib/public.js" - ); - assert!( - paths.iter().any(|p| p.contains("README.md")), - "Should find 'secret' in worktree2/docs/README.md" - ); - - // Should NOT find matches in private/excluded files based on worktree settings - assert!( - !paths.iter().any(|p| p.contains("secret.rs")), - "Should not search in worktree1/src/secret.rs (local private_files)" - ); - assert!( - !paths.iter().any(|p| p.contains("fixture.sql")), - "Should not search in worktree1/tests/fixture.sql (local file_scan_exclusions)" - ); - assert!( - !paths.iter().any(|p| p.contains("private.js")), - "Should not search in worktree2/lib/private.js (local private_files)" - ); - assert!( - !paths.iter().any(|p| p.contains("data.json")), - "Should not search in worktree2/lib/data.json (local private_files)" - ); - assert!( - !paths.iter().any(|p| p.contains("internal.md")), - "Should not search in worktree2/docs/internal.md (local file_scan_exclusions)" - ); - - // Test with `include_pattern` specific to one worktree - let result = cx - .update(|cx| { - let input = json!({ - "regex": "secret", - "include_pattern": "worktree1/**/*.rs" - }); - Arc::new(GrepTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await - .unwrap(); - - let content = result.content.as_str().unwrap(); - let paths = extract_paths_from_results(content); - - // Should only find matches in worktree1 *.rs files (excluding private ones) - assert!( - paths.iter().any(|p| p.contains("main.rs")), - "Should find match in worktree1/src/main.rs" - ); - assert!( - paths.iter().any(|p| p.contains("test.rs")), - "Should find match in worktree1/tests/test.rs" - ); - assert!( - !paths.iter().any(|p| p.contains("secret.rs")), - "Should not find match in excluded worktree1/src/secret.rs" - ); - assert!( - paths.iter().all(|p| !p.contains("worktree2")), - "Should not find any matches in worktree2" - ); - } - - // Helper function to extract file paths from grep results - fn extract_paths_from_results(results: &str) -> Vec { - results - .lines() - .filter(|line| line.starts_with("## Matches in ")) - .map(|line| { - line.strip_prefix("## Matches in ") - .unwrap() - .trim() - .to_string() - }) - .collect() - } -} diff --git a/crates/assistant_tools/src/grep_tool/description.md b/crates/assistant_tools/src/grep_tool/description.md deleted file mode 100644 index e3c0b43f31da53df49ce905e764dedcc5ea530de..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/grep_tool/description.md +++ /dev/null @@ -1,9 +0,0 @@ -Searches the contents of files in the project with a regular expression - -- Prefer this tool to path search when searching for symbols in the project, because you won't need to guess what path it's in. -- Supports full regex syntax (eg. "log.*Error", "function\\s+\\w+", etc.) -- Pass an `include_pattern` if you know how to narrow your search on the files system -- Never use this tool to search for paths. Only search file contents with this tool. -- Use this tool when you need to find files containing specific patterns -- Results are paginated with 20 matches per page. Use the optional 'offset' parameter to request subsequent pages. -- DO NOT use HTML entities solely to escape characters in the tool parameters. diff --git a/crates/assistant_tools/src/list_directory_tool.rs b/crates/assistant_tools/src/list_directory_tool.rs deleted file mode 100644 index 7d70f41a8c5000b433d47e8caa2a60d3a8024b99..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/list_directory_tool.rs +++ /dev/null @@ -1,869 +0,0 @@ -use crate::schema::json_schema_for; -use action_log::ActionLog; -use anyhow::{Result, anyhow}; -use assistant_tool::{Tool, ToolResult}; -use gpui::{AnyWindowHandle, App, Entity, Task}; -use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; -use project::{Project, ProjectPath, WorktreeSettings}; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use settings::Settings; -use std::{fmt::Write, sync::Arc}; -use ui::IconName; -use util::markdown::MarkdownInlineCode; - -#[derive(Debug, Serialize, Deserialize, JsonSchema)] -pub struct ListDirectoryToolInput { - /// The fully-qualified path of the directory to list in the project. - /// - /// This path should never be absolute, and the first component - /// of the path should always be a root directory in a project. - /// - /// - /// If the project has the following root directories: - /// - /// - directory1 - /// - directory2 - /// - /// You can list the contents of `directory1` by using the path `directory1`. - /// - /// - /// - /// If the project has the following root directories: - /// - /// - foo - /// - bar - /// - /// If you wanna list contents in the directory `foo/baz`, you should use the path `foo/baz`. - /// - pub path: String, -} - -pub struct ListDirectoryTool; - -impl Tool for ListDirectoryTool { - fn name(&self) -> String { - "list_directory".into() - } - - fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity, _: &App) -> bool { - false - } - - fn may_perform_edits(&self) -> bool { - false - } - - fn description(&self) -> String { - include_str!("./list_directory_tool/description.md").into() - } - - fn icon(&self) -> IconName { - IconName::ToolFolder - } - - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { - json_schema_for::(format) - } - - fn ui_text(&self, input: &serde_json::Value) -> String { - match serde_json::from_value::(input.clone()) { - Ok(input) => { - let path = MarkdownInlineCode(&input.path); - format!("List the {path} directory's contents") - } - Err(_) => "List directory".to_string(), - } - } - - fn run( - self: Arc, - input: serde_json::Value, - _request: Arc, - project: Entity, - _action_log: Entity, - _model: Arc, - _window: Option, - cx: &mut App, - ) -> ToolResult { - let path_style = project.read(cx).path_style(cx); - let input = match serde_json::from_value::(input) { - Ok(input) => input, - Err(err) => return Task::ready(Err(anyhow!(err))).into(), - }; - - // Sometimes models will return these even though we tell it to give a path and not a glob. - // When this happens, just list the root worktree directories. - if matches!(input.path.as_str(), "." | "" | "./" | "*") { - let output = project - .read(cx) - .worktrees(cx) - .filter_map(|worktree| { - worktree.read(cx).root_entry().and_then(|entry| { - if entry.is_dir() { - Some(entry.path.display(path_style)) - } else { - None - } - }) - }) - .collect::>() - .join("\n"); - - return Task::ready(Ok(output.into())).into(); - } - - let Some(project_path) = project.read(cx).find_project_path(&input.path, cx) else { - return Task::ready(Err(anyhow!("Path {} not found in project", input.path))).into(); - }; - let Some(worktree) = project - .read(cx) - .worktree_for_id(project_path.worktree_id, cx) - else { - return Task::ready(Err(anyhow!("Worktree not found"))).into(); - }; - - // Check if the directory whose contents we're listing is itself excluded or private - let global_settings = WorktreeSettings::get_global(cx); - if global_settings.is_path_excluded(&project_path.path) { - return Task::ready(Err(anyhow!( - "Cannot list directory because its path matches the user's global `file_scan_exclusions` setting: {}", - &input.path - ))) - .into(); - } - - if global_settings.is_path_private(&project_path.path) { - return Task::ready(Err(anyhow!( - "Cannot list directory because its path matches the user's global `private_files` setting: {}", - &input.path - ))) - .into(); - } - - let worktree_settings = WorktreeSettings::get(Some((&project_path).into()), cx); - if worktree_settings.is_path_excluded(&project_path.path) { - return Task::ready(Err(anyhow!( - "Cannot list directory because its path matches the user's worktree`file_scan_exclusions` setting: {}", - &input.path - ))) - .into(); - } - - if worktree_settings.is_path_private(&project_path.path) { - return Task::ready(Err(anyhow!( - "Cannot list directory because its path matches the user's worktree `private_paths` setting: {}", - &input.path - ))) - .into(); - } - - let worktree_snapshot = worktree.read(cx).snapshot(); - - let Some(entry) = worktree_snapshot.entry_for_path(&project_path.path) else { - return Task::ready(Err(anyhow!("Path not found: {}", input.path))).into(); - }; - - if !entry.is_dir() { - return Task::ready(Err(anyhow!("{} is not a directory.", input.path))).into(); - } - let worktree_snapshot = worktree.read(cx).snapshot(); - - let mut folders = Vec::new(); - let mut files = Vec::new(); - - for entry in worktree_snapshot.child_entries(&project_path.path) { - // Skip private and excluded files and directories - if global_settings.is_path_private(&entry.path) - || global_settings.is_path_excluded(&entry.path) - { - continue; - } - - let project_path = ProjectPath { - worktree_id: worktree_snapshot.id(), - path: entry.path.clone(), - }; - let worktree_settings = WorktreeSettings::get(Some((&project_path).into()), cx); - - if worktree_settings.is_path_excluded(&project_path.path) - || worktree_settings.is_path_private(&project_path.path) - { - continue; - } - - let full_path = worktree_snapshot - .root_name() - .join(&entry.path) - .display(worktree_snapshot.path_style()) - .to_string(); - if entry.is_dir() { - folders.push(full_path); - } else { - files.push(full_path); - } - } - - let mut output = String::new(); - - if !folders.is_empty() { - writeln!(output, "# Folders:\n{}", folders.join("\n")).unwrap(); - } - - if !files.is_empty() { - writeln!(output, "\n# Files:\n{}", files.join("\n")).unwrap(); - } - - if output.is_empty() { - writeln!(output, "{} is empty.", input.path).unwrap(); - } - - Task::ready(Ok(output.into())).into() - } -} - -#[cfg(test)] -mod tests { - use super::*; - use assistant_tool::Tool; - use gpui::{AppContext, TestAppContext, UpdateGlobal}; - use indoc::indoc; - use language_model::fake_provider::FakeLanguageModel; - use project::{FakeFs, Project}; - use serde_json::json; - use settings::SettingsStore; - use util::path; - - fn platform_paths(path_str: &str) -> String { - if cfg!(target_os = "windows") { - path_str.replace("/", "\\") - } else { - path_str.to_string() - } - } - - fn init_test(cx: &mut TestAppContext) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - language::init(cx); - Project::init_settings(cx); - }); - } - - #[gpui::test] - async fn test_list_directory_separates_files_and_dirs(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/project"), - json!({ - "src": { - "main.rs": "fn main() {}", - "lib.rs": "pub fn hello() {}", - "models": { - "user.rs": "struct User {}", - "post.rs": "struct Post {}" - }, - "utils": { - "helper.rs": "pub fn help() {}" - } - }, - "tests": { - "integration_test.rs": "#[test] fn test() {}" - }, - "README.md": "# Project", - "Cargo.toml": "[package]" - }), - ) - .await; - - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let model = Arc::new(FakeLanguageModel::default()); - let tool = Arc::new(ListDirectoryTool); - - // Test listing root directory - let input = json!({ - "path": "project" - }); - - let result = cx - .update(|cx| { - tool.clone().run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }) - .output - .await - .unwrap(); - - let content = result.content.as_str().unwrap(); - assert_eq!( - content, - platform_paths(indoc! {" - # Folders: - project/src - project/tests - - # Files: - project/Cargo.toml - project/README.md - "}) - ); - - // Test listing src directory - let input = json!({ - "path": "project/src" - }); - - let result = cx - .update(|cx| { - tool.clone().run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }) - .output - .await - .unwrap(); - - let content = result.content.as_str().unwrap(); - assert_eq!( - content, - platform_paths(indoc! {" - # Folders: - project/src/models - project/src/utils - - # Files: - project/src/lib.rs - project/src/main.rs - "}) - ); - - // Test listing directory with only files - let input = json!({ - "path": "project/tests" - }); - - let result = cx - .update(|cx| { - tool.clone().run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }) - .output - .await - .unwrap(); - - let content = result.content.as_str().unwrap(); - assert!(!content.contains("# Folders:")); - assert!(content.contains("# Files:")); - assert!(content.contains(&platform_paths("project/tests/integration_test.rs"))); - } - - #[gpui::test] - async fn test_list_directory_empty_directory(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/project"), - json!({ - "empty_dir": {} - }), - ) - .await; - - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let model = Arc::new(FakeLanguageModel::default()); - let tool = Arc::new(ListDirectoryTool); - - let input = json!({ - "path": "project/empty_dir" - }); - - let result = cx - .update(|cx| tool.run(input, Arc::default(), project, action_log, model, None, cx)) - .output - .await - .unwrap(); - - let content = result.content.as_str().unwrap(); - assert_eq!(content, "project/empty_dir is empty.\n"); - } - - #[gpui::test] - async fn test_list_directory_error_cases(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/project"), - json!({ - "file.txt": "content" - }), - ) - .await; - - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let model = Arc::new(FakeLanguageModel::default()); - let tool = Arc::new(ListDirectoryTool); - - // Test non-existent path - let input = json!({ - "path": "project/nonexistent" - }); - - let result = cx - .update(|cx| { - tool.clone().run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }) - .output - .await; - - assert!(result.is_err()); - assert!(result.unwrap_err().to_string().contains("Path not found")); - - // Test trying to list a file instead of directory - let input = json!({ - "path": "project/file.txt" - }); - - let result = cx - .update(|cx| tool.run(input, Arc::default(), project, action_log, model, None, cx)) - .output - .await; - - assert!(result.is_err()); - assert!( - result - .unwrap_err() - .to_string() - .contains("is not a directory") - ); - } - - #[gpui::test] - async fn test_list_directory_security(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/project"), - json!({ - "normal_dir": { - "file1.txt": "content", - "file2.txt": "content" - }, - ".mysecrets": "SECRET_KEY=abc123", - ".secretdir": { - "config": "special configuration", - "secret.txt": "secret content" - }, - ".mymetadata": "custom metadata", - "visible_dir": { - "normal.txt": "normal content", - "special.privatekey": "private key content", - "data.mysensitive": "sensitive data", - ".hidden_subdir": { - "hidden_file.txt": "hidden content" - } - } - }), - ) - .await; - - // Configure settings explicitly - cx.update(|cx| { - SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings(cx, |settings| { - settings.project.worktree.file_scan_exclusions = Some(vec![ - "**/.secretdir".to_string(), - "**/.mymetadata".to_string(), - "**/.hidden_subdir".to_string(), - ]); - settings.project.worktree.private_files = Some( - vec![ - "**/.mysecrets".to_string(), - "**/*.privatekey".to_string(), - "**/*.mysensitive".to_string(), - ] - .into(), - ); - }); - }); - }); - - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let model = Arc::new(FakeLanguageModel::default()); - let tool = Arc::new(ListDirectoryTool); - - // Listing root directory should exclude private and excluded files - let input = json!({ - "path": "project" - }); - - let result = cx - .update(|cx| { - tool.clone().run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }) - .output - .await - .unwrap(); - - let content = result.content.as_str().unwrap(); - - // Should include normal directories - assert!(content.contains("normal_dir"), "Should list normal_dir"); - assert!(content.contains("visible_dir"), "Should list visible_dir"); - - // Should NOT include excluded or private files - assert!( - !content.contains(".secretdir"), - "Should not list .secretdir (file_scan_exclusions)" - ); - assert!( - !content.contains(".mymetadata"), - "Should not list .mymetadata (file_scan_exclusions)" - ); - assert!( - !content.contains(".mysecrets"), - "Should not list .mysecrets (private_files)" - ); - - // Trying to list an excluded directory should fail - let input = json!({ - "path": "project/.secretdir" - }); - - let result = cx - .update(|cx| { - tool.clone().run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }) - .output - .await; - - assert!( - result.is_err(), - "Should not be able to list excluded directory" - ); - assert!( - result - .unwrap_err() - .to_string() - .contains("file_scan_exclusions"), - "Error should mention file_scan_exclusions" - ); - - // Listing a directory should exclude private files within it - let input = json!({ - "path": "project/visible_dir" - }); - - let result = cx - .update(|cx| { - tool.clone().run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }) - .output - .await - .unwrap(); - - let content = result.content.as_str().unwrap(); - - // Should include normal files - assert!(content.contains("normal.txt"), "Should list normal.txt"); - - // Should NOT include private files - assert!( - !content.contains("privatekey"), - "Should not list .privatekey files (private_files)" - ); - assert!( - !content.contains("mysensitive"), - "Should not list .mysensitive files (private_files)" - ); - - // Should NOT include subdirectories that match exclusions - assert!( - !content.contains(".hidden_subdir"), - "Should not list .hidden_subdir (file_scan_exclusions)" - ); - } - - #[gpui::test] - async fn test_list_directory_with_multiple_worktree_settings(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - - // Create first worktree with its own private files - fs.insert_tree( - path!("/worktree1"), - json!({ - ".zed": { - "settings.json": r#"{ - "file_scan_exclusions": ["**/fixture.*"], - "private_files": ["**/secret.rs", "**/config.toml"] - }"# - }, - "src": { - "main.rs": "fn main() { println!(\"Hello from worktree1\"); }", - "secret.rs": "const API_KEY: &str = \"secret_key_1\";", - "config.toml": "[database]\nurl = \"postgres://localhost/db1\"" - }, - "tests": { - "test.rs": "mod tests { fn test_it() {} }", - "fixture.sql": "CREATE TABLE users (id INT, name VARCHAR(255));" - } - }), - ) - .await; - - // Create second worktree with different private files - fs.insert_tree( - path!("/worktree2"), - json!({ - ".zed": { - "settings.json": r#"{ - "file_scan_exclusions": ["**/internal.*"], - "private_files": ["**/private.js", "**/data.json"] - }"# - }, - "lib": { - "public.js": "export function greet() { return 'Hello from worktree2'; }", - "private.js": "const SECRET_TOKEN = \"private_token_2\";", - "data.json": "{\"api_key\": \"json_secret_key\"}" - }, - "docs": { - "README.md": "# Public Documentation", - "internal.md": "# Internal Secrets and Configuration" - } - }), - ) - .await; - - // Set global settings - cx.update(|cx| { - SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings(cx, |settings| { - settings.project.worktree.file_scan_exclusions = - Some(vec!["**/.git".to_string(), "**/node_modules".to_string()]); - settings.project.worktree.private_files = - Some(vec!["**/.env".to_string()].into()); - }); - }); - }); - - let project = Project::test( - fs.clone(), - [path!("/worktree1").as_ref(), path!("/worktree2").as_ref()], - cx, - ) - .await; - - // Wait for worktrees to be fully scanned - cx.executor().run_until_parked(); - - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let model = Arc::new(FakeLanguageModel::default()); - let tool = Arc::new(ListDirectoryTool); - - // Test listing worktree1/src - should exclude secret.rs and config.toml based on local settings - let input = json!({ - "path": "worktree1/src" - }); - - let result = cx - .update(|cx| { - tool.clone().run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }) - .output - .await - .unwrap(); - - let content = result.content.as_str().unwrap(); - assert!(content.contains("main.rs"), "Should list main.rs"); - assert!( - !content.contains("secret.rs"), - "Should not list secret.rs (local private_files)" - ); - assert!( - !content.contains("config.toml"), - "Should not list config.toml (local private_files)" - ); - - // Test listing worktree1/tests - should exclude fixture.sql based on local settings - let input = json!({ - "path": "worktree1/tests" - }); - - let result = cx - .update(|cx| { - tool.clone().run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }) - .output - .await - .unwrap(); - - let content = result.content.as_str().unwrap(); - assert!(content.contains("test.rs"), "Should list test.rs"); - assert!( - !content.contains("fixture.sql"), - "Should not list fixture.sql (local file_scan_exclusions)" - ); - - // Test listing worktree2/lib - should exclude private.js and data.json based on local settings - let input = json!({ - "path": "worktree2/lib" - }); - - let result = cx - .update(|cx| { - tool.clone().run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }) - .output - .await - .unwrap(); - - let content = result.content.as_str().unwrap(); - assert!(content.contains("public.js"), "Should list public.js"); - assert!( - !content.contains("private.js"), - "Should not list private.js (local private_files)" - ); - assert!( - !content.contains("data.json"), - "Should not list data.json (local private_files)" - ); - - // Test listing worktree2/docs - should exclude internal.md based on local settings - let input = json!({ - "path": "worktree2/docs" - }); - - let result = cx - .update(|cx| { - tool.clone().run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }) - .output - .await - .unwrap(); - - let content = result.content.as_str().unwrap(); - assert!(content.contains("README.md"), "Should list README.md"); - assert!( - !content.contains("internal.md"), - "Should not list internal.md (local file_scan_exclusions)" - ); - - // Test trying to list an excluded directory directly - let input = json!({ - "path": "worktree1/src/secret.rs" - }); - - let result = cx - .update(|cx| { - tool.clone().run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }) - .output - .await; - - // This should fail because we're trying to list a file, not a directory - assert!(result.is_err(), "Should fail when trying to list a file"); - } -} diff --git a/crates/assistant_tools/src/list_directory_tool/description.md b/crates/assistant_tools/src/list_directory_tool/description.md deleted file mode 100644 index 30dcc012ff316c944a7495dc14457cfd9df93bb7..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/list_directory_tool/description.md +++ /dev/null @@ -1 +0,0 @@ -Lists files and directories in a given path. Prefer the `grep` or `find_path` tools when searching the codebase. diff --git a/crates/assistant_tools/src/move_path_tool.rs b/crates/assistant_tools/src/move_path_tool.rs deleted file mode 100644 index 22dbe9e625468d8c2688b60bdcd94a7da594730e..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/move_path_tool.rs +++ /dev/null @@ -1,132 +0,0 @@ -use crate::schema::json_schema_for; -use action_log::ActionLog; -use anyhow::{Context as _, Result, anyhow}; -use assistant_tool::{Tool, ToolResult}; -use gpui::{AnyWindowHandle, App, AppContext, Entity, Task}; -use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; -use project::Project; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use std::{path::Path, sync::Arc}; -use ui::IconName; -use util::markdown::MarkdownInlineCode; - -#[derive(Debug, Serialize, Deserialize, JsonSchema)] -pub struct MovePathToolInput { - /// The source path of the file or directory to move/rename. - /// - /// - /// If the project has the following files: - /// - /// - directory1/a/something.txt - /// - directory2/a/things.txt - /// - directory3/a/other.txt - /// - /// You can move the first file by providing a source_path of "directory1/a/something.txt" - /// - pub source_path: String, - - /// The destination path where the file or directory should be moved/renamed to. - /// If the paths are the same except for the filename, then this will be a rename. - /// - /// - /// To move "directory1/a/something.txt" to "directory2/b/renamed.txt", - /// provide a destination_path of "directory2/b/renamed.txt" - /// - pub destination_path: String, -} - -pub struct MovePathTool; - -impl Tool for MovePathTool { - fn name(&self) -> String { - "move_path".into() - } - - fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity, _: &App) -> bool { - false - } - - fn may_perform_edits(&self) -> bool { - true - } - - fn description(&self) -> String { - include_str!("./move_path_tool/description.md").into() - } - - fn icon(&self) -> IconName { - IconName::ArrowRightLeft - } - - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { - json_schema_for::(format) - } - - fn ui_text(&self, input: &serde_json::Value) -> String { - match serde_json::from_value::(input.clone()) { - Ok(input) => { - let src = MarkdownInlineCode(&input.source_path); - let dest = MarkdownInlineCode(&input.destination_path); - let src_path = Path::new(&input.source_path); - let dest_path = Path::new(&input.destination_path); - - match dest_path - .file_name() - .and_then(|os_str| os_str.to_os_string().into_string().ok()) - { - Some(filename) if src_path.parent() == dest_path.parent() => { - let filename = MarkdownInlineCode(&filename); - format!("Rename {src} to {filename}") - } - _ => { - format!("Move {src} to {dest}") - } - } - } - Err(_) => "Move path".to_string(), - } - } - - fn run( - self: Arc, - input: serde_json::Value, - _request: Arc, - project: Entity, - _action_log: Entity, - _model: Arc, - _window: Option, - cx: &mut App, - ) -> ToolResult { - let input = match serde_json::from_value::(input) { - Ok(input) => input, - Err(err) => return Task::ready(Err(anyhow!(err))).into(), - }; - let rename_task = project.update(cx, |project, cx| { - match project - .find_project_path(&input.source_path, cx) - .and_then(|project_path| project.entry_for_path(&project_path, cx)) - { - Some(entity) => match project.find_project_path(&input.destination_path, cx) { - Some(project_path) => project.rename_entry(entity.id, project_path, cx), - None => Task::ready(Err(anyhow!( - "Destination path {} was outside the project.", - input.destination_path - ))), - }, - None => Task::ready(Err(anyhow!( - "Source path {} was not found in the project.", - input.source_path - ))), - } - }); - - cx.background_spawn(async move { - let _ = rename_task.await.with_context(|| { - format!("Moving {} to {}", input.source_path, input.destination_path) - })?; - Ok(format!("Moved {} to {}", input.source_path, input.destination_path).into()) - }) - .into() - } -} diff --git a/crates/assistant_tools/src/move_path_tool/description.md b/crates/assistant_tools/src/move_path_tool/description.md deleted file mode 100644 index 76bc3003d003c44afdd9036cb6691d5fc432291d..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/move_path_tool/description.md +++ /dev/null @@ -1,5 +0,0 @@ -Moves or rename a file or directory in the project, and returns confirmation that the move succeeded. -If the source and destination directories are the same, but the filename is different, this performs -a rename. Otherwise, it performs a move. - -This tool should be used when it's desirable to move or rename a file or directory without changing its contents at all. diff --git a/crates/assistant_tools/src/now_tool.rs b/crates/assistant_tools/src/now_tool.rs deleted file mode 100644 index f50ad065d1cd320aa1a82e4ce17f744d6b04be2c..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/now_tool.rs +++ /dev/null @@ -1,84 +0,0 @@ -use std::sync::Arc; - -use crate::schema::json_schema_for; -use action_log::ActionLog; -use anyhow::{Result, anyhow}; -use assistant_tool::{Tool, ToolResult}; -use chrono::{Local, Utc}; -use gpui::{AnyWindowHandle, App, Entity, Task}; -use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; -use project::Project; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use ui::IconName; - -#[derive(Debug, Serialize, Deserialize, JsonSchema)] -#[serde(rename_all = "snake_case")] -pub enum Timezone { - /// Use UTC for the datetime. - Utc, - /// Use local time for the datetime. - Local, -} - -#[derive(Debug, Serialize, Deserialize, JsonSchema)] -pub struct NowToolInput { - /// The timezone to use for the datetime. - timezone: Timezone, -} - -pub struct NowTool; - -impl Tool for NowTool { - fn name(&self) -> String { - "now".into() - } - - fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity, _: &App) -> bool { - false - } - - fn may_perform_edits(&self) -> bool { - false - } - - fn description(&self) -> String { - "Returns the current datetime in RFC 3339 format. Only use this tool when the user specifically asks for it or the current task would benefit from knowing the current datetime.".into() - } - - fn icon(&self) -> IconName { - IconName::Info - } - - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { - json_schema_for::(format) - } - - fn ui_text(&self, _input: &serde_json::Value) -> String { - "Get current time".to_string() - } - - fn run( - self: Arc, - input: serde_json::Value, - _request: Arc, - _project: Entity, - _action_log: Entity, - _model: Arc, - _window: Option, - _cx: &mut App, - ) -> ToolResult { - let input: NowToolInput = match serde_json::from_value(input) { - Ok(input) => input, - Err(err) => return Task::ready(Err(anyhow!(err))).into(), - }; - - let now = match input.timezone { - Timezone::Utc => Utc::now().to_rfc3339(), - Timezone::Local => Local::now().to_rfc3339(), - }; - let text = format!("The current datetime is {now}."); - - Task::ready(Ok(text.into())).into() - } -} diff --git a/crates/assistant_tools/src/open_tool.rs b/crates/assistant_tools/src/open_tool.rs deleted file mode 100644 index a1aafad041364b0ffca01cc1890c2cc10b3d7b01..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/open_tool.rs +++ /dev/null @@ -1,170 +0,0 @@ -use crate::schema::json_schema_for; -use action_log::ActionLog; -use anyhow::{Context as _, Result, anyhow}; -use assistant_tool::{Tool, ToolResult}; -use gpui::{AnyWindowHandle, App, AppContext, Entity, Task}; -use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; -use project::Project; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use std::{path::PathBuf, sync::Arc}; -use ui::IconName; -use util::markdown::MarkdownEscaped; - -#[derive(Debug, Serialize, Deserialize, JsonSchema)] -pub struct OpenToolInput { - /// The path or URL to open with the default application. - path_or_url: String, -} - -pub struct OpenTool; - -impl Tool for OpenTool { - fn name(&self) -> String { - "open".to_string() - } - - fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity, _: &App) -> bool { - true - } - fn may_perform_edits(&self) -> bool { - false - } - fn description(&self) -> String { - include_str!("./open_tool/description.md").to_string() - } - - fn icon(&self) -> IconName { - IconName::ArrowUpRight - } - - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { - json_schema_for::(format) - } - - fn ui_text(&self, input: &serde_json::Value) -> String { - match serde_json::from_value::(input.clone()) { - Ok(input) => format!("Open `{}`", MarkdownEscaped(&input.path_or_url)), - Err(_) => "Open file or URL".to_string(), - } - } - - fn run( - self: Arc, - input: serde_json::Value, - _request: Arc, - project: Entity, - _action_log: Entity, - _model: Arc, - _window: Option, - cx: &mut App, - ) -> ToolResult { - let input: OpenToolInput = match serde_json::from_value(input) { - Ok(input) => input, - Err(err) => return Task::ready(Err(anyhow!(err))).into(), - }; - - // If path_or_url turns out to be a path in the project, make it absolute. - let abs_path = to_absolute_path(&input.path_or_url, project, cx); - - cx.background_spawn(async move { - match abs_path { - Some(path) => open::that(path), - None => open::that(&input.path_or_url), - } - .context("Failed to open URL or file path")?; - - Ok(format!("Successfully opened {}", input.path_or_url).into()) - }) - .into() - } -} - -fn to_absolute_path( - potential_path: &str, - project: Entity, - cx: &mut App, -) -> Option { - let project = project.read(cx); - project - .find_project_path(PathBuf::from(potential_path), cx) - .and_then(|project_path| project.absolute_path(&project_path, cx)) -} - -#[cfg(test)] -mod tests { - use super::*; - use gpui::TestAppContext; - use project::{FakeFs, Project}; - use settings::SettingsStore; - use std::path::Path; - use tempfile::TempDir; - - #[gpui::test] - async fn test_to_absolute_path(cx: &mut TestAppContext) { - init_test(cx); - let temp_dir = TempDir::new().expect("Failed to create temp directory"); - let temp_path = temp_dir.path().to_string_lossy().into_owned(); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - &temp_path, - serde_json::json!({ - "src": { - "main.rs": "fn main() {}", - "lib.rs": "pub fn lib_fn() {}" - }, - "docs": { - "readme.md": "# Project Documentation" - } - }), - ) - .await; - - // Use the temp_path as the root directory, not just its filename - let project = Project::test(fs.clone(), [temp_dir.path()], cx).await; - - // Test cases where the function should return Some - cx.update(|cx| { - // Project-relative paths should return Some - // Create paths using the last segment of the temp path to simulate a project-relative path - let root_dir_name = Path::new(&temp_path) - .file_name() - .unwrap_or_else(|| std::ffi::OsStr::new("temp")) - .to_string_lossy(); - - assert!( - to_absolute_path(&format!("{root_dir_name}/src/main.rs"), project.clone(), cx) - .is_some(), - "Failed to resolve main.rs path" - ); - - assert!( - to_absolute_path( - &format!("{root_dir_name}/docs/readme.md",), - project.clone(), - cx, - ) - .is_some(), - "Failed to resolve readme.md path" - ); - - // External URL should return None - let result = to_absolute_path("https://example.com", project.clone(), cx); - assert_eq!(result, None, "External URLs should return None"); - - // Path outside project - let result = to_absolute_path("../invalid/path", project.clone(), cx); - assert_eq!(result, None, "Paths outside the project should return None"); - }); - } - - fn init_test(cx: &mut TestAppContext) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - language::init(cx); - Project::init_settings(cx); - }); - } -} diff --git a/crates/assistant_tools/src/open_tool/description.md b/crates/assistant_tools/src/open_tool/description.md deleted file mode 100644 index 99ccbb0524473b8c740d6ecd2d9ca9555e1e7028..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/open_tool/description.md +++ /dev/null @@ -1,9 +0,0 @@ -This tool opens a file or URL with the default application associated with it on the user's operating system: -- On macOS, it's equivalent to the `open` command -- On Windows, it's equivalent to `start` -- On Linux, it uses something like `xdg-open`, `gio open`, `gnome-open`, `kde-open`, `wslview` as appropriate - -For example, it can open a web browser with a URL, open a PDF file with the default PDF viewer, etc. - -You MUST ONLY use this tool when the user has explicitly requested opening something. You MUST NEVER assume that -the user would like for you to use this tool. diff --git a/crates/assistant_tools/src/project_notifications_tool.rs b/crates/assistant_tools/src/project_notifications_tool.rs deleted file mode 100644 index e30d80207dae4de1e69efe99724a2a5343b57664..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/project_notifications_tool.rs +++ /dev/null @@ -1,360 +0,0 @@ -use crate::schema::json_schema_for; -use action_log::ActionLog; -use anyhow::Result; -use assistant_tool::{Tool, ToolResult}; -use gpui::{AnyWindowHandle, App, Entity, Task}; -use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; -use project::Project; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use std::{fmt::Write, sync::Arc}; -use ui::IconName; - -#[derive(Debug, Serialize, Deserialize, JsonSchema)] -pub struct ProjectUpdatesToolInput {} - -pub struct ProjectNotificationsTool; - -impl Tool for ProjectNotificationsTool { - fn name(&self) -> String { - "project_notifications".to_string() - } - - fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity, _: &App) -> bool { - false - } - fn may_perform_edits(&self) -> bool { - false - } - fn description(&self) -> String { - include_str!("./project_notifications_tool/description.md").to_string() - } - - fn icon(&self) -> IconName { - IconName::ToolNotification - } - - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { - json_schema_for::(format) - } - - fn ui_text(&self, _input: &serde_json::Value) -> String { - "Check project notifications".into() - } - - fn run( - self: Arc, - _input: serde_json::Value, - _request: Arc, - _project: Entity, - action_log: Entity, - _model: Arc, - _window: Option, - cx: &mut App, - ) -> ToolResult { - let Some(user_edits_diff) = - action_log.update(cx, |log, cx| log.flush_unnotified_user_edits(cx)) - else { - return result("No new notifications"); - }; - - // NOTE: Changes to this prompt require a symmetric update in the LLM Worker - const HEADER: &str = include_str!("./project_notifications_tool/prompt_header.txt"); - const MAX_BYTES: usize = 8000; - let diff = fit_patch_to_size(&user_edits_diff, MAX_BYTES); - result(&format!("{HEADER}\n\n```diff\n{diff}\n```\n").replace("\r\n", "\n")) - } -} - -fn result(response: &str) -> ToolResult { - Task::ready(Ok(response.to_string().into())).into() -} - -/// Make sure that the patch fits into the size limit (in bytes). -/// Compress the patch by omitting some parts if needed. -/// Unified diff format is assumed. -fn fit_patch_to_size(patch: &str, max_size: usize) -> String { - if patch.len() <= max_size { - return patch.to_string(); - } - - // Compression level 1: remove context lines in diff bodies, but - // leave the counts and positions of inserted/deleted lines - let mut current_size = patch.len(); - let mut file_patches = split_patch(patch); - file_patches.sort_by_key(|patch| patch.len()); - let compressed_patches = file_patches - .iter() - .rev() - .map(|patch| { - if current_size > max_size { - let compressed = compress_patch(patch).unwrap_or_else(|_| patch.to_string()); - current_size -= patch.len() - compressed.len(); - compressed - } else { - patch.to_string() - } - }) - .collect::>(); - - if current_size <= max_size { - return compressed_patches.join("\n\n"); - } - - // Compression level 2: list paths of the changed files only - let filenames = file_patches - .iter() - .map(|patch| { - let patch = diffy::Patch::from_str(patch).unwrap(); - let path = patch - .modified() - .and_then(|path| path.strip_prefix("b/")) - .unwrap_or_default(); - format!("- {path}\n") - }) - .collect::>(); - - filenames.join("") -} - -/// Split a potentially multi-file patch into multiple single-file patches -fn split_patch(patch: &str) -> Vec { - let mut result = Vec::new(); - let mut current_patch = String::new(); - - for line in patch.lines() { - if line.starts_with("---") && !current_patch.is_empty() { - result.push(current_patch.trim_end_matches('\n').into()); - current_patch = String::new(); - } - current_patch.push_str(line); - current_patch.push('\n'); - } - - if !current_patch.is_empty() { - result.push(current_patch.trim_end_matches('\n').into()); - } - - result -} - -fn compress_patch(patch: &str) -> anyhow::Result { - let patch = diffy::Patch::from_str(patch)?; - let mut out = String::new(); - - writeln!(out, "--- {}", patch.original().unwrap_or("a"))?; - writeln!(out, "+++ {}", patch.modified().unwrap_or("b"))?; - - for hunk in patch.hunks() { - writeln!(out, "@@ -{} +{} @@", hunk.old_range(), hunk.new_range())?; - writeln!(out, "[...skipped...]")?; - } - - Ok(out) -} - -#[cfg(test)] -mod tests { - use super::*; - use assistant_tool::ToolResultContent; - use gpui::{AppContext, TestAppContext}; - use indoc::indoc; - use language_model::{LanguageModelRequest, fake_provider::FakeLanguageModelProvider}; - use project::{FakeFs, Project}; - use serde_json::json; - use settings::SettingsStore; - use std::sync::Arc; - use util::path; - - #[gpui::test] - async fn test_stale_buffer_notification(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/test"), - json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}), - ) - .await; - - let project = Project::test(fs, [path!("/test").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - - let buffer_path = project - .read_with(cx, |project, cx| { - project.find_project_path("test/code.rs", cx) - }) - .unwrap(); - - let buffer = project - .update(cx, |project, cx| { - project.open_buffer(buffer_path.clone(), cx) - }) - .await - .unwrap(); - - // Start tracking the buffer - action_log.update(cx, |log, cx| { - log.buffer_read(buffer.clone(), cx); - }); - cx.run_until_parked(); - - // Run the tool before any changes - let tool = Arc::new(ProjectNotificationsTool); - let provider = Arc::new(FakeLanguageModelProvider::default()); - let model: Arc = Arc::new(provider.test_model()); - let request = Arc::new(LanguageModelRequest::default()); - let tool_input = json!({}); - - let result = cx.update(|cx| { - tool.clone().run( - tool_input.clone(), - request.clone(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }); - cx.run_until_parked(); - - let response = result.output.await.unwrap(); - let response_text = match &response.content { - ToolResultContent::Text(text) => text.clone(), - _ => panic!("Expected text response"), - }; - assert_eq!( - response_text.as_str(), - "No new notifications", - "Tool should return 'No new notifications' when no stale buffers" - ); - - // Modify the buffer (makes it stale) - buffer.update(cx, |buffer, cx| { - buffer.edit([(1..1, "\nChange!\n")], None, cx); - }); - cx.run_until_parked(); - - // Run the tool again - let result = cx.update(|cx| { - tool.clone().run( - tool_input.clone(), - request.clone(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }); - cx.run_until_parked(); - - // This time the buffer is stale, so the tool should return a notification - let response = result.output.await.unwrap(); - let response_text = match &response.content { - ToolResultContent::Text(text) => text.clone(), - _ => panic!("Expected text response"), - }; - - assert!( - response_text.contains("These files have changed"), - "Tool should return the stale buffer notification" - ); - assert!( - response_text.contains("test/code.rs"), - "Tool should return the stale buffer notification" - ); - - // Run the tool once more without any changes - should get no new notifications - let result = cx.update(|cx| { - tool.run( - tool_input.clone(), - request.clone(), - project.clone(), - action_log, - model.clone(), - None, - cx, - ) - }); - cx.run_until_parked(); - - let response = result.output.await.unwrap(); - let response_text = match &response.content { - ToolResultContent::Text(text) => text.clone(), - _ => panic!("Expected text response"), - }; - - assert_eq!( - response_text.as_str(), - "No new notifications", - "Tool should return 'No new notifications' when running again without changes" - ); - } - - #[test] - fn test_patch_compression() { - // Given a patch that doesn't fit into the size budget - let patch = indoc! {" - --- a/dir/test.txt - +++ b/dir/test.txt - @@ -1,3 +1,3 @@ - line 1 - -line 2 - +CHANGED - line 3 - @@ -10,2 +10,2 @@ - line 10 - -line 11 - +line eleven - - - --- a/dir/another.txt - +++ b/dir/another.txt - @@ -100,1 +1,1 @@ - -before - +after - "}; - - // When the size deficit can be compensated by dropping the body, - // then the body should be trimmed for larger files first - let limit = patch.len() - 10; - let compressed = fit_patch_to_size(patch, limit); - let expected = indoc! {" - --- a/dir/test.txt - +++ b/dir/test.txt - @@ -1,3 +1,3 @@ - [...skipped...] - @@ -10,2 +10,2 @@ - [...skipped...] - - - --- a/dir/another.txt - +++ b/dir/another.txt - @@ -100,1 +1,1 @@ - -before - +after"}; - assert_eq!(compressed, expected); - - // When the size deficit is too large, then only file paths - // should be returned - let limit = 10; - let compressed = fit_patch_to_size(patch, limit); - let expected = indoc! {" - - dir/another.txt - - dir/test.txt - "}; - assert_eq!(compressed, expected); - } - - fn init_test(cx: &mut TestAppContext) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - language::init(cx); - Project::init_settings(cx); - assistant_tool::init(cx); - }); - } -} diff --git a/crates/assistant_tools/src/project_notifications_tool/description.md b/crates/assistant_tools/src/project_notifications_tool/description.md deleted file mode 100644 index 24ff678f5e7fd728b94ad4ebce06f2a1dcc6a658..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/project_notifications_tool/description.md +++ /dev/null @@ -1,3 +0,0 @@ -This tool reports which files have been modified by the user since the agent last accessed them. - -It serves as a notification mechanism to inform the agent of recent changes. No immediate action is required in response to these updates. diff --git a/crates/assistant_tools/src/project_notifications_tool/prompt_header.txt b/crates/assistant_tools/src/project_notifications_tool/prompt_header.txt deleted file mode 100644 index f743e239c883c7456f7bdc6e089185c6b994cb44..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/project_notifications_tool/prompt_header.txt +++ /dev/null @@ -1,3 +0,0 @@ -[The following is an auto-generated notification; do not reply] - -These files have changed since the last read: diff --git a/crates/assistant_tools/src/read_file_tool.rs b/crates/assistant_tools/src/read_file_tool.rs deleted file mode 100644 index f9f68491e5846fa1ead09d6976d1f9a9bc99b501..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/read_file_tool.rs +++ /dev/null @@ -1,1190 +0,0 @@ -use crate::schema::json_schema_for; -use action_log::ActionLog; -use anyhow::{Context as _, Result, anyhow}; -use assistant_tool::{Tool, ToolResult}; -use assistant_tool::{ToolResultContent, outline}; -use gpui::{AnyWindowHandle, App, Entity, Task}; -use project::{ImageItem, image_store}; - -use assistant_tool::ToolResultOutput; -use indoc::formatdoc; -use itertools::Itertools; -use language::{Anchor, Point}; -use language_model::{ - LanguageModel, LanguageModelImage, LanguageModelRequest, LanguageModelToolSchemaFormat, -}; -use project::{AgentLocation, Project, WorktreeSettings}; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use settings::Settings; -use std::sync::Arc; -use ui::IconName; - -/// If the model requests to read a file whose size exceeds this, then -#[derive(Debug, Serialize, Deserialize, JsonSchema)] -pub struct ReadFileToolInput { - /// The relative path of the file to read. - /// - /// This path should never be absolute, and the first component - /// of the path should always be a root directory in a project. - /// - /// - /// If the project has the following root directories: - /// - /// - /a/b/directory1 - /// - /c/d/directory2 - /// - /// If you want to access `file.txt` in `directory1`, you should use the path `directory1/file.txt`. - /// If you want to access `file.txt` in `directory2`, you should use the path `directory2/file.txt`. - /// - pub path: String, - - /// Optional line number to start reading on (1-based index) - #[serde(default)] - pub start_line: Option, - - /// Optional line number to end reading on (1-based index, inclusive) - #[serde(default)] - pub end_line: Option, -} - -pub struct ReadFileTool; - -impl Tool for ReadFileTool { - fn name(&self) -> String { - "read_file".into() - } - - fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity, _: &App) -> bool { - false - } - - fn may_perform_edits(&self) -> bool { - false - } - - fn description(&self) -> String { - include_str!("./read_file_tool/description.md").into() - } - - fn icon(&self) -> IconName { - IconName::ToolSearch - } - - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { - json_schema_for::(format) - } - - fn ui_text(&self, input: &serde_json::Value) -> String { - match serde_json::from_value::(input.clone()) { - Ok(input) => { - let path = &input.path; - match (input.start_line, input.end_line) { - (Some(start), Some(end)) => { - format!( - "[Read file `{}` (lines {}-{})](@selection:{}:({}-{}))", - path, start, end, path, start, end - ) - } - (Some(start), None) => { - format!( - "[Read file `{}` (from line {})](@selection:{}:({}-{}))", - path, start, path, start, start - ) - } - _ => format!("[Read file `{}`](@file:{})", path, path), - } - } - Err(_) => "Read file".to_string(), - } - } - - fn run( - self: Arc, - input: serde_json::Value, - _request: Arc, - project: Entity, - action_log: Entity, - model: Arc, - _window: Option, - cx: &mut App, - ) -> ToolResult { - let input = match serde_json::from_value::(input) { - Ok(input) => input, - Err(err) => return Task::ready(Err(anyhow!(err))).into(), - }; - - let Some(project_path) = project.read(cx).find_project_path(&input.path, cx) else { - return Task::ready(Err(anyhow!("Path {} not found in project", &input.path))).into(); - }; - - // Error out if this path is either excluded or private in global settings - let global_settings = WorktreeSettings::get_global(cx); - if global_settings.is_path_excluded(&project_path.path) { - return Task::ready(Err(anyhow!( - "Cannot read file because its path matches the global `file_scan_exclusions` setting: {}", - &input.path - ))) - .into(); - } - - if global_settings.is_path_private(&project_path.path) { - return Task::ready(Err(anyhow!( - "Cannot read file because its path matches the global `private_files` setting: {}", - &input.path - ))) - .into(); - } - - // Error out if this path is either excluded or private in worktree settings - let worktree_settings = WorktreeSettings::get(Some((&project_path).into()), cx); - if worktree_settings.is_path_excluded(&project_path.path) { - return Task::ready(Err(anyhow!( - "Cannot read file because its path matches the worktree `file_scan_exclusions` setting: {}", - &input.path - ))) - .into(); - } - - if worktree_settings.is_path_private(&project_path.path) { - return Task::ready(Err(anyhow!( - "Cannot read file because its path matches the worktree `private_files` setting: {}", - &input.path - ))) - .into(); - } - - let file_path = input.path.clone(); - - if image_store::is_image_file(&project, &project_path, cx) { - if !model.supports_images() { - return Task::ready(Err(anyhow!( - "Attempted to read an image, but Zed doesn't currently support sending images to {}.", - model.name().0 - ))) - .into(); - } - - let task = cx.spawn(async move |cx| -> Result { - let image_entity: Entity = cx - .update(|cx| { - project.update(cx, |project, cx| { - project.open_image(project_path.clone(), cx) - }) - })? - .await?; - - let image = - image_entity.read_with(cx, |image_item, _| Arc::clone(&image_item.image))?; - - let language_model_image = cx - .update(|cx| LanguageModelImage::from_image(image, cx))? - .await - .context("processing image")?; - - Ok(ToolResultOutput { - content: ToolResultContent::Image(language_model_image), - output: None, - }) - }); - - return task.into(); - } - - cx.spawn(async move |cx| { - let buffer = cx - .update(|cx| { - project.update(cx, |project, cx| project.open_buffer(project_path, cx)) - })? - .await?; - if buffer.read_with(cx, |buffer, _| { - buffer - .file() - .as_ref() - .is_none_or(|file| !file.disk_state().exists()) - })? { - anyhow::bail!("{file_path} not found"); - } - - project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: Anchor::MIN, - }), - cx, - ); - })?; - - // Check if specific line ranges are provided - if input.start_line.is_some() || input.end_line.is_some() { - let mut anchor = None; - let result = buffer.read_with(cx, |buffer, _cx| { - let text = buffer.text(); - // .max(1) because despite instructions to be 1-indexed, sometimes the model passes 0. - let start = input.start_line.unwrap_or(1).max(1); - let start_row = start - 1; - if start_row <= buffer.max_point().row { - let column = buffer.line_indent_for_row(start_row).raw_len(); - anchor = Some(buffer.anchor_before(Point::new(start_row, column))); - } - - let lines = text.split('\n').skip(start_row as usize); - if let Some(end) = input.end_line { - let count = end.saturating_sub(start).saturating_add(1); // Ensure at least 1 line - Itertools::intersperse(lines.take(count as usize), "\n") - .collect::() - .into() - } else { - Itertools::intersperse(lines, "\n") - .collect::() - .into() - } - })?; - - action_log.update(cx, |log, cx| { - log.buffer_read(buffer.clone(), cx); - })?; - - if let Some(anchor) = anchor { - project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: anchor, - }), - cx, - ); - })?; - } - - Ok(result) - } else { - // No line ranges specified, so check file size to see if it's too big. - let buffer_content = - outline::get_buffer_content_or_outline(buffer.clone(), Some(&file_path), cx) - .await?; - - action_log.update(cx, |log, cx| { - log.buffer_read(buffer, cx); - })?; - - if buffer_content.is_outline { - Ok(formatdoc! {" - This file was too big to read all at once. - - {} - - Using the line numbers in this outline, you can call this tool again - while specifying the start_line and end_line fields to see the - implementations of symbols in the outline. - - Alternatively, you can fall back to the `grep` tool (if available) - to search the file for specific content.", buffer_content.text - } - .into()) - } else { - Ok(buffer_content.text.into()) - } - } - }) - .into() - } -} - -#[cfg(test)] -mod test { - use super::*; - use gpui::{AppContext, TestAppContext, UpdateGlobal}; - use language::{Language, LanguageConfig, LanguageMatcher}; - use language_model::fake_provider::FakeLanguageModel; - use project::{FakeFs, Project}; - use serde_json::json; - use settings::SettingsStore; - use util::path; - - #[gpui::test] - async fn test_read_nonexistent_file(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree(path!("/root"), json!({})).await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let model = Arc::new(FakeLanguageModel::default()); - let result = cx - .update(|cx| { - let input = json!({ - "path": "root/nonexistent_file.txt" - }); - Arc::new(ReadFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log, - model, - None, - cx, - ) - .output - }) - .await; - assert_eq!( - result.unwrap_err().to_string(), - "root/nonexistent_file.txt not found" - ); - } - - #[gpui::test] - async fn test_read_small_file(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/root"), - json!({ - "small_file.txt": "This is a small file content" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let model = Arc::new(FakeLanguageModel::default()); - let result = cx - .update(|cx| { - let input = json!({ - "path": "root/small_file.txt" - }); - Arc::new(ReadFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log, - model, - None, - cx, - ) - .output - }) - .await; - assert_eq!( - result.unwrap().content.as_str(), - Some("This is a small file content") - ); - } - - #[gpui::test] - async fn test_read_large_file(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/root"), - json!({ - "large_file.rs": (0..1000).map(|i| format!("struct Test{} {{\n a: u32,\n b: usize,\n}}", i)).collect::>().join("\n") - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _| project.languages().clone()); - language_registry.add(Arc::new(rust_lang())); - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let model = Arc::new(FakeLanguageModel::default()); - - let result = cx - .update(|cx| { - let input = json!({ - "path": "root/large_file.rs" - }); - Arc::new(ReadFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - let content = result.unwrap(); - let content = content.as_str().unwrap(); - assert_eq!( - content.lines().skip(4).take(6).collect::>(), - vec![ - "struct Test0 [L1-4]", - " a [L2]", - " b [L3]", - "struct Test1 [L5-8]", - " a [L6]", - " b [L7]", - ] - ); - - let result = cx - .update(|cx| { - let input = json!({ - "path": "root/large_file.rs", - "offset": 1 - }); - Arc::new(ReadFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log, - model, - None, - cx, - ) - .output - }) - .await; - let content = result.unwrap(); - let expected_content = (0..1000) - .flat_map(|i| { - vec![ - format!("struct Test{} [L{}-{}]", i, i * 4 + 1, i * 4 + 4), - format!(" a [L{}]", i * 4 + 2), - format!(" b [L{}]", i * 4 + 3), - ] - }) - .collect::>(); - pretty_assertions::assert_eq!( - content - .as_str() - .unwrap() - .lines() - .skip(4) - .take(expected_content.len()) - .collect::>(), - expected_content - ); - } - - #[gpui::test] - async fn test_read_file_with_line_range(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/root"), - json!({ - "multiline.txt": "Line 1\nLine 2\nLine 3\nLine 4\nLine 5" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let model = Arc::new(FakeLanguageModel::default()); - let result = cx - .update(|cx| { - let input = json!({ - "path": "root/multiline.txt", - "start_line": 2, - "end_line": 4 - }); - Arc::new(ReadFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log, - model, - None, - cx, - ) - .output - }) - .await; - assert_eq!( - result.unwrap().content.as_str(), - Some("Line 2\nLine 3\nLine 4") - ); - } - - #[gpui::test] - async fn test_read_file_line_range_edge_cases(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - path!("/root"), - json!({ - "multiline.txt": "Line 1\nLine 2\nLine 3\nLine 4\nLine 5" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let model = Arc::new(FakeLanguageModel::default()); - - // start_line of 0 should be treated as 1 - let result = cx - .update(|cx| { - let input = json!({ - "path": "root/multiline.txt", - "start_line": 0, - "end_line": 2 - }); - Arc::new(ReadFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - assert_eq!(result.unwrap().content.as_str(), Some("Line 1\nLine 2")); - - // end_line of 0 should result in at least 1 line - let result = cx - .update(|cx| { - let input = json!({ - "path": "root/multiline.txt", - "start_line": 1, - "end_line": 0 - }); - Arc::new(ReadFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - assert_eq!(result.unwrap().content.as_str(), Some("Line 1")); - - // when start_line > end_line, should still return at least 1 line - let result = cx - .update(|cx| { - let input = json!({ - "path": "root/multiline.txt", - "start_line": 3, - "end_line": 2 - }); - Arc::new(ReadFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log, - model, - None, - cx, - ) - .output - }) - .await; - assert_eq!(result.unwrap().content.as_str(), Some("Line 3")); - } - - fn init_test(cx: &mut TestAppContext) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - language::init(cx); - Project::init_settings(cx); - }); - } - - fn rust_lang() -> Language { - Language::new( - LanguageConfig { - name: "Rust".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - ..Default::default() - }, - Some(tree_sitter_rust::LANGUAGE.into()), - ) - .with_outline_query( - r#" - (line_comment) @annotation - - (struct_item - "struct" @context - name: (_) @name) @item - (enum_item - "enum" @context - name: (_) @name) @item - (enum_variant - name: (_) @name) @item - (field_declaration - name: (_) @name) @item - (impl_item - "impl" @context - trait: (_)? @name - "for"? @context - type: (_) @name - body: (_ "{" (_)* "}")) @item - (function_item - "fn" @context - name: (_) @name) @item - (mod_item - "mod" @context - name: (_) @name) @item - "#, - ) - .unwrap() - } - - #[gpui::test] - async fn test_read_file_security(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - - fs.insert_tree( - path!("/"), - json!({ - "project_root": { - "allowed_file.txt": "This file is in the project", - ".mysecrets": "SECRET_KEY=abc123", - ".secretdir": { - "config": "special configuration" - }, - ".mymetadata": "custom metadata", - "subdir": { - "normal_file.txt": "Normal file content", - "special.privatekey": "private key content", - "data.mysensitive": "sensitive data" - } - }, - "outside_project": { - "sensitive_file.txt": "This file is outside the project" - } - }), - ) - .await; - - cx.update(|cx| { - use gpui::UpdateGlobal; - use settings::SettingsStore; - SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings(cx, |settings| { - settings.project.worktree.file_scan_exclusions = Some(vec![ - "**/.secretdir".to_string(), - "**/.mymetadata".to_string(), - ]); - settings.project.worktree.private_files = Some( - vec![ - "**/.mysecrets".to_string(), - "**/*.privatekey".to_string(), - "**/*.mysensitive".to_string(), - ] - .into(), - ); - }); - }); - }); - - let project = Project::test(fs.clone(), [path!("/project_root").as_ref()], cx).await; - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let model = Arc::new(FakeLanguageModel::default()); - - // Reading a file outside the project worktree should fail - let result = cx - .update(|cx| { - let input = json!({ - "path": "/outside_project/sensitive_file.txt" - }); - Arc::new(ReadFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - assert!( - result.is_err(), - "read_file_tool should error when attempting to read an absolute path outside a worktree" - ); - - // Reading a file within the project should succeed - let result = cx - .update(|cx| { - let input = json!({ - "path": "project_root/allowed_file.txt" - }); - Arc::new(ReadFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - assert!( - result.is_ok(), - "read_file_tool should be able to read files inside worktrees" - ); - - // Reading files that match file_scan_exclusions should fail - let result = cx - .update(|cx| { - let input = json!({ - "path": "project_root/.secretdir/config" - }); - Arc::new(ReadFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - assert!( - result.is_err(), - "read_file_tool should error when attempting to read files in .secretdir (file_scan_exclusions)" - ); - - let result = cx - .update(|cx| { - let input = json!({ - "path": "project_root/.mymetadata" - }); - Arc::new(ReadFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - assert!( - result.is_err(), - "read_file_tool should error when attempting to read .mymetadata files (file_scan_exclusions)" - ); - - // Reading private files should fail - let result = cx - .update(|cx| { - let input = json!({ - "path": "project_root/.mysecrets" - }); - Arc::new(ReadFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - assert!( - result.is_err(), - "read_file_tool should error when attempting to read .mysecrets (private_files)" - ); - - let result = cx - .update(|cx| { - let input = json!({ - "path": "project_root/subdir/special.privatekey" - }); - Arc::new(ReadFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - assert!( - result.is_err(), - "read_file_tool should error when attempting to read .privatekey files (private_files)" - ); - - let result = cx - .update(|cx| { - let input = json!({ - "path": "project_root/subdir/data.mysensitive" - }); - Arc::new(ReadFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - assert!( - result.is_err(), - "read_file_tool should error when attempting to read .mysensitive files (private_files)" - ); - - // Reading a normal file should still work, even with private_files configured - let result = cx - .update(|cx| { - let input = json!({ - "path": "project_root/subdir/normal_file.txt" - }); - Arc::new(ReadFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - assert!(result.is_ok(), "Should be able to read normal files"); - assert_eq!( - result.unwrap().content.as_str().unwrap(), - "Normal file content" - ); - - // Path traversal attempts with .. should fail - let result = cx - .update(|cx| { - let input = json!({ - "path": "project_root/../outside_project/sensitive_file.txt" - }); - Arc::new(ReadFileTool) - .run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - .output - }) - .await; - assert!( - result.is_err(), - "read_file_tool should error when attempting to read a relative path that resolves to outside a worktree" - ); - } - - #[gpui::test] - async fn test_read_file_with_multiple_worktree_settings(cx: &mut TestAppContext) { - init_test(cx); - - let fs = FakeFs::new(cx.executor()); - - // Create first worktree with its own private_files setting - fs.insert_tree( - path!("/worktree1"), - json!({ - "src": { - "main.rs": "fn main() { println!(\"Hello from worktree1\"); }", - "secret.rs": "const API_KEY: &str = \"secret_key_1\";", - "config.toml": "[database]\nurl = \"postgres://localhost/db1\"" - }, - "tests": { - "test.rs": "mod tests { fn test_it() {} }", - "fixture.sql": "CREATE TABLE users (id INT, name VARCHAR(255));" - }, - ".zed": { - "settings.json": r#"{ - "file_scan_exclusions": ["**/fixture.*"], - "private_files": ["**/secret.rs", "**/config.toml"] - }"# - } - }), - ) - .await; - - // Create second worktree with different private_files setting - fs.insert_tree( - path!("/worktree2"), - json!({ - "lib": { - "public.js": "export function greet() { return 'Hello from worktree2'; }", - "private.js": "const SECRET_TOKEN = \"private_token_2\";", - "data.json": "{\"api_key\": \"json_secret_key\"}" - }, - "docs": { - "README.md": "# Public Documentation", - "internal.md": "# Internal Secrets and Configuration" - }, - ".zed": { - "settings.json": r#"{ - "file_scan_exclusions": ["**/internal.*"], - "private_files": ["**/private.js", "**/data.json"] - }"# - } - }), - ) - .await; - - // Set global settings - cx.update(|cx| { - SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings(cx, |settings| { - settings.project.worktree.file_scan_exclusions = - Some(vec!["**/.git".to_string(), "**/node_modules".to_string()]); - settings.project.worktree.private_files = - Some(vec!["**/.env".to_string()].into()); - }); - }); - }); - - let project = Project::test( - fs.clone(), - [path!("/worktree1").as_ref(), path!("/worktree2").as_ref()], - cx, - ) - .await; - - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let model = Arc::new(FakeLanguageModel::default()); - let tool = Arc::new(ReadFileTool); - - // Test reading allowed files in worktree1 - let input = json!({ - "path": "worktree1/src/main.rs" - }); - - let result = cx - .update(|cx| { - tool.clone().run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }) - .output - .await - .unwrap(); - - assert_eq!( - result.content.as_str().unwrap(), - "fn main() { println!(\"Hello from worktree1\"); }" - ); - - // Test reading private file in worktree1 should fail - let input = json!({ - "path": "worktree1/src/secret.rs" - }); - - let result = cx - .update(|cx| { - tool.clone().run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }) - .output - .await; - - assert!(result.is_err()); - assert!( - result - .unwrap_err() - .to_string() - .contains("worktree `private_files` setting"), - "Error should mention worktree private_files setting" - ); - - // Test reading excluded file in worktree1 should fail - let input = json!({ - "path": "worktree1/tests/fixture.sql" - }); - - let result = cx - .update(|cx| { - tool.clone().run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }) - .output - .await; - - assert!(result.is_err()); - assert!( - result - .unwrap_err() - .to_string() - .contains("worktree `file_scan_exclusions` setting"), - "Error should mention worktree file_scan_exclusions setting" - ); - - // Test reading allowed files in worktree2 - let input = json!({ - "path": "worktree2/lib/public.js" - }); - - let result = cx - .update(|cx| { - tool.clone().run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }) - .output - .await - .unwrap(); - - assert_eq!( - result.content.as_str().unwrap(), - "export function greet() { return 'Hello from worktree2'; }" - ); - - // Test reading private file in worktree2 should fail - let input = json!({ - "path": "worktree2/lib/private.js" - }); - - let result = cx - .update(|cx| { - tool.clone().run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }) - .output - .await; - - assert!(result.is_err()); - assert!( - result - .unwrap_err() - .to_string() - .contains("worktree `private_files` setting"), - "Error should mention worktree private_files setting" - ); - - // Test reading excluded file in worktree2 should fail - let input = json!({ - "path": "worktree2/docs/internal.md" - }); - - let result = cx - .update(|cx| { - tool.clone().run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }) - .output - .await; - - assert!(result.is_err()); - assert!( - result - .unwrap_err() - .to_string() - .contains("worktree `file_scan_exclusions` setting"), - "Error should mention worktree file_scan_exclusions setting" - ); - - // Test that files allowed in one worktree but not in another are handled correctly - // (e.g., config.toml is private in worktree1 but doesn't exist in worktree2) - let input = json!({ - "path": "worktree1/src/config.toml" - }); - - let result = cx - .update(|cx| { - tool.clone().run( - input, - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }) - .output - .await; - - assert!(result.is_err()); - assert!( - result - .unwrap_err() - .to_string() - .contains("worktree `private_files` setting"), - "Config.toml should be blocked by worktree1's private_files setting" - ); - } -} diff --git a/crates/assistant_tools/src/read_file_tool/description.md b/crates/assistant_tools/src/read_file_tool/description.md deleted file mode 100644 index 7bcebc03341541496ab090090ab7ef8beb3f2ebe..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/read_file_tool/description.md +++ /dev/null @@ -1,3 +0,0 @@ -Reads the content of the given file in the project. - -- Never attempt to read a path that hasn't been previously mentioned. diff --git a/crates/assistant_tools/src/schema.rs b/crates/assistant_tools/src/schema.rs deleted file mode 100644 index dab7384efd8ba23669db645c87dcf79e95538d3a..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/schema.rs +++ /dev/null @@ -1,60 +0,0 @@ -use anyhow::Result; -use language_model::LanguageModelToolSchemaFormat; -use schemars::{ - JsonSchema, Schema, - generate::SchemaSettings, - transform::{Transform, transform_subschemas}, -}; - -pub fn json_schema_for( - format: LanguageModelToolSchemaFormat, -) -> Result { - let schema = root_schema_for::(format); - schema_to_json(&schema, format) -} - -fn schema_to_json( - schema: &Schema, - format: LanguageModelToolSchemaFormat, -) -> Result { - let mut value = serde_json::to_value(schema)?; - assistant_tool::adapt_schema_to_format(&mut value, format)?; - Ok(value) -} - -fn root_schema_for(format: LanguageModelToolSchemaFormat) -> Schema { - let mut generator = match format { - LanguageModelToolSchemaFormat::JsonSchema => SchemaSettings::draft07().into_generator(), - LanguageModelToolSchemaFormat::JsonSchemaSubset => SchemaSettings::openapi3() - .with(|settings| { - settings.meta_schema = None; - settings.inline_subschemas = true; - }) - .with_transform(ToJsonSchemaSubsetTransform) - .into_generator(), - }; - generator.root_schema_for::() -} - -#[derive(Debug, Clone)] -struct ToJsonSchemaSubsetTransform; - -impl Transform for ToJsonSchemaSubsetTransform { - fn transform(&mut self, schema: &mut Schema) { - // Ensure that the type field is not an array, this happens when we use - // Option, the type will be [T, "null"]. - if let Some(type_field) = schema.get_mut("type") - && let Some(types) = type_field.as_array() - && let Some(first_type) = types.first() - { - *type_field = first_type.clone(); - } - - // oneOf is not supported, use anyOf instead - if let Some(one_of) = schema.remove("oneOf") { - schema.insert("anyOf".to_string(), one_of); - } - - transform_subschemas(self, schema); - } -} diff --git a/crates/assistant_tools/src/templates.rs b/crates/assistant_tools/src/templates.rs deleted file mode 100644 index c83601199cca11e7a92f07e4159ac6241378d725..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/templates.rs +++ /dev/null @@ -1,32 +0,0 @@ -use anyhow::Result; -use handlebars::Handlebars; -use rust_embed::RustEmbed; -use serde::Serialize; -use std::sync::Arc; - -#[derive(RustEmbed)] -#[folder = "src/templates"] -#[include = "*.hbs"] -struct Assets; - -pub struct Templates(Handlebars<'static>); - -impl Templates { - pub fn new() -> Arc { - let mut handlebars = Handlebars::new(); - handlebars.register_embed_templates::().unwrap(); - handlebars.register_escape_fn(|text| text.into()); - Arc::new(Self(handlebars)) - } -} - -pub trait Template: Sized { - const TEMPLATE_NAME: &'static str; - - fn render(&self, templates: &Templates) -> Result - where - Self: Serialize + Sized, - { - Ok(templates.0.render(Self::TEMPLATE_NAME, self)?) - } -} diff --git a/crates/assistant_tools/src/terminal_tool.rs b/crates/assistant_tools/src/terminal_tool.rs deleted file mode 100644 index bc6f5f2a612bf17468577624e34d49119f3813c8..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/terminal_tool.rs +++ /dev/null @@ -1,882 +0,0 @@ -use crate::{ - schema::json_schema_for, - ui::{COLLAPSED_LINES, ToolOutputPreview}, -}; -use action_log::ActionLog; -use agent_settings; -use anyhow::{Context as _, Result, anyhow}; -use assistant_tool::{Tool, ToolCard, ToolResult, ToolUseStatus}; -use futures::FutureExt as _; -use gpui::{ - AnyWindowHandle, App, AppContext, Empty, Entity, EntityId, Task, TextStyleRefinement, - WeakEntity, Window, -}; -use language::LineEnding; -use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; -use markdown::{Markdown, MarkdownElement, MarkdownStyle}; -use portable_pty::{CommandBuilder, PtySize, native_pty_system}; -use project::Project; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsLocation}; -use std::{ - env, - path::{Path, PathBuf}, - process::ExitStatus, - sync::Arc, - time::{Duration, Instant}, -}; -use task::{Shell, ShellBuilder}; -use terminal::terminal_settings::TerminalSettings; -use terminal_view::TerminalView; -use theme::ThemeSettings; -use ui::{CommonAnimationExt, Disclosure, Tooltip, prelude::*}; -use util::{ - ResultExt, get_default_system_shell_preferring_bash, markdown::MarkdownInlineCode, - size::format_file_size, time::duration_alt_display, -}; -use workspace::Workspace; - -const COMMAND_OUTPUT_LIMIT: usize = 16 * 1024; - -#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] -pub struct TerminalToolInput { - /// The one-liner command to execute. - command: String, - /// Working directory for the command. This must be one of the root directories of the project. - cd: String, -} - -pub struct TerminalTool; - -impl TerminalTool { - pub const NAME: &str = "terminal"; -} - -impl Tool for TerminalTool { - fn name(&self) -> String { - Self::NAME.to_string() - } - - fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity, _: &App) -> bool { - true - } - - fn may_perform_edits(&self) -> bool { - false - } - - fn description(&self) -> String { - include_str!("./terminal_tool/description.md").to_string() - } - - fn icon(&self) -> IconName { - IconName::ToolTerminal - } - - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { - json_schema_for::(format) - } - - fn ui_text(&self, input: &serde_json::Value) -> String { - match serde_json::from_value::(input.clone()) { - Ok(input) => { - let mut lines = input.command.lines(); - let first_line = lines.next().unwrap_or_default(); - let remaining_line_count = lines.count(); - match remaining_line_count { - 0 => MarkdownInlineCode(first_line).to_string(), - 1 => MarkdownInlineCode(&format!( - "{} - {} more line", - first_line, remaining_line_count - )) - .to_string(), - n => MarkdownInlineCode(&format!("{} - {} more lines", first_line, n)) - .to_string(), - } - } - Err(_) => "Run terminal command".to_string(), - } - } - - fn run( - self: Arc, - input: serde_json::Value, - _request: Arc, - project: Entity, - _action_log: Entity, - _model: Arc, - window: Option, - cx: &mut App, - ) -> ToolResult { - let input: TerminalToolInput = match serde_json::from_value(input) { - Ok(input) => input, - Err(err) => return Task::ready(Err(anyhow!(err))).into(), - }; - - let working_dir = match working_dir(&input, &project, cx) { - Ok(dir) => dir, - Err(err) => return Task::ready(Err(err)).into(), - }; - - let cwd = working_dir.clone(); - let env = match &cwd { - Some(dir) => project.update(cx, |project, cx| { - let worktree = project.find_worktree(dir.as_path(), cx); - let shell = TerminalSettings::get( - worktree.as_ref().map(|(worktree, path)| SettingsLocation { - worktree_id: worktree.read(cx).id(), - path: &path, - }), - cx, - ) - .shell - .clone(); - project.directory_environment(&shell, dir.as_path().into(), cx) - }), - None => Task::ready(None).shared(), - }; - let shell = project - .update(cx, |project, cx| { - project - .remote_client() - .and_then(|r| r.read(cx).default_system_shell()) - }) - .unwrap_or_else(|| get_default_system_shell_preferring_bash()); - - let env = cx.spawn(async move |_| { - let mut env = env.await.unwrap_or_default(); - if cfg!(unix) { - env.insert("PAGER".into(), "cat".into()); - } - env - }); - - let build_cmd = { - let input_command = input.command.clone(); - move || { - ShellBuilder::new(&Shell::Program(shell)) - .redirect_stdin_to_dev_null() - .build(Some(input_command), &[]) - } - }; - - let Some(window) = window else { - // Headless setup, a test or eval. Our terminal subsystem requires a workspace, - // so bypass it and provide a convincing imitation using a pty. - let task = cx.background_spawn(async move { - let env = env.await; - let pty_system = native_pty_system(); - let (command, args) = build_cmd(); - let mut cmd = CommandBuilder::new(command); - cmd.args(args); - for (k, v) in env { - cmd.env(k, v); - } - if let Some(cwd) = cwd { - cmd.cwd(cwd); - } - let pair = pty_system.openpty(PtySize { - rows: 24, - cols: 80, - ..Default::default() - })?; - let mut child = pair.slave.spawn_command(cmd)?; - let mut reader = pair.master.try_clone_reader()?; - drop(pair); - let mut content = String::new(); - reader.read_to_string(&mut content)?; - // Massage the pty output a bit to try to match what the terminal codepath gives us - LineEnding::normalize(&mut content); - content = content - .chars() - .filter(|c| c.is_ascii_whitespace() || !c.is_ascii_control()) - .collect(); - let content = content.trim_start().trim_start_matches("^D"); - let exit_status = child.wait()?; - let (processed_content, _) = - process_content(content, &input.command, Some(exit_status)); - Ok(processed_content.into()) - }); - return ToolResult { - output: task, - card: None, - }; - }; - - let terminal = cx.spawn({ - let project = project.downgrade(); - async move |cx| { - let (command, args) = build_cmd(); - let env = env.await; - project - .update(cx, |project, cx| { - project.create_terminal_task( - task::SpawnInTerminal { - command: Some(command), - args, - cwd, - env, - ..Default::default() - }, - cx, - ) - })? - .await - } - }); - - let command_markdown = cx.new(|cx| { - Markdown::new( - format!("```bash\n{}\n```", input.command).into(), - None, - None, - cx, - ) - }); - - let card = - cx.new(|cx| TerminalToolCard::new(command_markdown, working_dir, cx.entity_id(), cx)); - - let output = cx.spawn({ - let card = card.clone(); - async move |cx| { - let terminal = terminal.await?; - let workspace = window - .downcast::() - .and_then(|handle| handle.entity(cx).ok()) - .context("no workspace entity in root of window")?; - - let terminal_view = window.update(cx, |_, window, cx| { - cx.new(|cx| { - let mut view = TerminalView::new( - terminal.clone(), - workspace.downgrade(), - None, - project.downgrade(), - window, - cx, - ); - view.set_embedded_mode(None, cx); - view - }) - })?; - - card.update(cx, |card, _| { - card.terminal = Some(terminal_view.clone()); - card.start_instant = Instant::now(); - }) - .log_err(); - - let exit_status = terminal - .update(cx, |terminal, cx| terminal.wait_for_completed_task(cx))? - .await; - let (content, content_line_count) = terminal.read_with(cx, |terminal, _| { - (terminal.get_content(), terminal.total_lines()) - })?; - - let previous_len = content.len(); - let (processed_content, finished_with_empty_output) = process_content( - &content, - &input.command, - exit_status.map(portable_pty::ExitStatus::from), - ); - - card.update(cx, |card, _| { - card.command_finished = true; - card.exit_status = exit_status; - card.was_content_truncated = processed_content.len() < previous_len; - card.original_content_len = previous_len; - card.content_line_count = content_line_count; - card.finished_with_empty_output = finished_with_empty_output; - card.elapsed_time = Some(card.start_instant.elapsed()); - }) - .log_err(); - - Ok(processed_content.into()) - } - }); - - ToolResult { - output, - card: Some(card.into()), - } - } -} - -fn process_content( - content: &str, - command: &str, - exit_status: Option, -) -> (String, bool) { - let should_truncate = content.len() > COMMAND_OUTPUT_LIMIT; - - let content = if should_truncate { - let mut end_ix = COMMAND_OUTPUT_LIMIT.min(content.len()); - while !content.is_char_boundary(end_ix) { - end_ix -= 1; - } - // Don't truncate mid-line, clear the remainder of the last line - end_ix = content[..end_ix].rfind('\n').unwrap_or(end_ix); - &content[..end_ix] - } else { - content - }; - let content = content.trim(); - let is_empty = content.is_empty(); - let content = format!("```\n{content}\n```"); - let content = if should_truncate { - format!( - "Command output too long. The first {} bytes:\n\n{content}", - content.len(), - ) - } else { - content - }; - - let content = match exit_status { - Some(exit_status) if exit_status.success() => { - if is_empty { - "Command executed successfully.".to_string() - } else { - content - } - } - Some(exit_status) => { - if is_empty { - format!( - "Command \"{command}\" failed with exit code {}.", - exit_status.exit_code() - ) - } else { - format!( - "Command \"{command}\" failed with exit code {}.\n\n{content}", - exit_status.exit_code() - ) - } - } - None => { - format!( - "Command failed or was interrupted.\nPartial output captured:\n\n{}", - content, - ) - } - }; - (content, is_empty) -} - -fn working_dir( - input: &TerminalToolInput, - project: &Entity, - cx: &mut App, -) -> Result> { - let project = project.read(cx); - let cd = &input.cd; - - if cd == "." || cd.is_empty() { - // Accept "." or "" as meaning "the one worktree" if we only have one worktree. - let mut worktrees = project.worktrees(cx); - - match worktrees.next() { - Some(worktree) => { - anyhow::ensure!( - worktrees.next().is_none(), - "'.' is ambiguous in multi-root workspaces. Please specify a root directory explicitly.", - ); - Ok(Some(worktree.read(cx).abs_path().to_path_buf())) - } - None => Ok(None), - } - } else { - let input_path = Path::new(cd); - - if input_path.is_absolute() { - // Absolute paths are allowed, but only if they're in one of the project's worktrees. - if project - .worktrees(cx) - .any(|worktree| input_path.starts_with(&worktree.read(cx).abs_path())) - { - return Ok(Some(input_path.into())); - } - } else if let Some(worktree) = project.worktree_for_root_name(cd, cx) { - return Ok(Some(worktree.read(cx).abs_path().to_path_buf())); - } - - anyhow::bail!("`cd` directory {cd:?} was not in any of the project's worktrees."); - } -} - -struct TerminalToolCard { - input_command: Entity, - working_dir: Option, - entity_id: EntityId, - exit_status: Option, - terminal: Option>, - command_finished: bool, - was_content_truncated: bool, - finished_with_empty_output: bool, - content_line_count: usize, - original_content_len: usize, - preview_expanded: bool, - start_instant: Instant, - elapsed_time: Option, -} - -impl TerminalToolCard { - pub fn new( - input_command: Entity, - working_dir: Option, - entity_id: EntityId, - cx: &mut Context, - ) -> Self { - let expand_terminal_card = - agent_settings::AgentSettings::get_global(cx).expand_terminal_card; - Self { - input_command, - working_dir, - entity_id, - exit_status: None, - terminal: None, - command_finished: false, - was_content_truncated: false, - finished_with_empty_output: false, - original_content_len: 0, - content_line_count: 0, - preview_expanded: expand_terminal_card, - start_instant: Instant::now(), - elapsed_time: None, - } - } -} - -impl ToolCard for TerminalToolCard { - fn render( - &mut self, - status: &ToolUseStatus, - window: &mut Window, - _workspace: WeakEntity, - cx: &mut Context, - ) -> impl IntoElement { - let Some(terminal) = self.terminal.as_ref() else { - return Empty.into_any(); - }; - - let tool_failed = matches!(status, ToolUseStatus::Error(_)); - - let command_failed = - self.command_finished && self.exit_status.is_none_or(|code| !code.success()); - - if (tool_failed || command_failed) && self.elapsed_time.is_none() { - self.elapsed_time = Some(self.start_instant.elapsed()); - } - let time_elapsed = self - .elapsed_time - .unwrap_or_else(|| self.start_instant.elapsed()); - - let header_bg = cx - .theme() - .colors() - .element_background - .blend(cx.theme().colors().editor_foreground.opacity(0.025)); - - let border_color = cx.theme().colors().border.opacity(0.6); - - let path = self - .working_dir - .as_ref() - .cloned() - .or_else(|| env::current_dir().ok()) - .map(|path| path.display().to_string()) - .unwrap_or_else(|| "current directory".to_string()); - - let header = h_flex() - .flex_none() - .gap_1() - .justify_between() - .rounded_t_md() - .child( - div() - .id(("command-target-path", self.entity_id)) - .w_full() - .max_w_full() - .overflow_x_scroll() - .child( - Label::new(path) - .buffer_font(cx) - .size(LabelSize::XSmall) - .color(Color::Muted), - ), - ) - .when(!self.command_finished, |header| { - header.child( - Icon::new(IconName::ArrowCircle) - .size(IconSize::XSmall) - .color(Color::Info) - .with_rotate_animation(2), - ) - }) - .when(tool_failed || command_failed, |header| { - header.child( - div() - .id(("terminal-tool-error-code-indicator", self.entity_id)) - .child( - Icon::new(IconName::Close) - .size(IconSize::Small) - .color(Color::Error), - ) - .when(command_failed && self.exit_status.is_some(), |this| { - this.tooltip(Tooltip::text(format!( - "Exited with code {}", - self.exit_status - .and_then(|status| status.code()) - .unwrap_or(-1), - ))) - }) - .when( - !command_failed && tool_failed && status.error().is_some(), - |this| { - this.tooltip(Tooltip::text(format!( - "Error: {}", - status.error().unwrap(), - ))) - }, - ), - ) - }) - .when(self.was_content_truncated, |header| { - let tooltip = if self.content_line_count + 10 > terminal::MAX_SCROLL_HISTORY_LINES { - "Output exceeded terminal max lines and was \ - truncated, the model received the first 16 KB." - .to_string() - } else { - format!( - "Output is {} long, to avoid unexpected token usage, \ - only 16 KB was sent back to the model.", - format_file_size(self.original_content_len as u64, true), - ) - }; - header.child( - h_flex() - .id(("terminal-tool-truncated-label", self.entity_id)) - .tooltip(Tooltip::text(tooltip)) - .gap_1() - .child( - Icon::new(IconName::Info) - .size(IconSize::XSmall) - .color(Color::Ignored), - ) - .child( - Label::new("Truncated") - .color(Color::Muted) - .size(LabelSize::Small), - ), - ) - }) - .when(time_elapsed > Duration::from_secs(10), |header| { - header.child( - Label::new(format!("({})", duration_alt_display(time_elapsed))) - .buffer_font(cx) - .color(Color::Muted) - .size(LabelSize::Small), - ) - }) - .when(!self.finished_with_empty_output, |header| { - header.child( - Disclosure::new( - ("terminal-tool-disclosure", self.entity_id), - self.preview_expanded, - ) - .opened_icon(IconName::ChevronUp) - .closed_icon(IconName::ChevronDown) - .on_click(cx.listener( - move |this, _event, _window, _cx| { - this.preview_expanded = !this.preview_expanded; - }, - )), - ) - }); - - v_flex() - .mb_2() - .border_1() - .when(tool_failed || command_failed, |card| card.border_dashed()) - .border_color(border_color) - .rounded_lg() - .overflow_hidden() - .child( - v_flex() - .p_2() - .gap_0p5() - .bg(header_bg) - .text_xs() - .child(header) - .child( - MarkdownElement::new( - self.input_command.clone(), - markdown_style(window, cx), - ) - .code_block_renderer( - markdown::CodeBlockRenderer::Default { - copy_button: false, - copy_button_on_hover: true, - border: false, - }, - ), - ), - ) - .when( - self.preview_expanded && !self.finished_with_empty_output, - |this| { - this.child( - div() - .pt_2() - .border_t_1() - .when(tool_failed || command_failed, |card| card.border_dashed()) - .border_color(border_color) - .bg(cx.theme().colors().editor_background) - .rounded_b_md() - .text_ui_sm(cx) - .child({ - let content_mode = terminal.read(cx).content_mode(window, cx); - - if content_mode.is_scrollable() { - div().h_72().child(terminal.clone()).into_any_element() - } else { - ToolOutputPreview::new( - terminal.clone().into_any_element(), - terminal.entity_id(), - ) - .with_total_lines(self.content_line_count) - .toggle_state(!content_mode.is_limited()) - .on_toggle({ - let terminal = terminal.clone(); - move |is_expanded, _, cx| { - terminal.update(cx, |terminal, cx| { - terminal.set_embedded_mode( - if is_expanded { - None - } else { - Some(COLLAPSED_LINES) - }, - cx, - ); - }); - } - }) - .into_any_element() - } - }), - ) - }, - ) - .into_any() - } -} - -fn markdown_style(window: &Window, cx: &App) -> MarkdownStyle { - let theme_settings = ThemeSettings::get_global(cx); - let buffer_font_size = TextSize::Default.rems(cx); - let mut text_style = window.text_style(); - - text_style.refine(&TextStyleRefinement { - font_family: Some(theme_settings.buffer_font.family.clone()), - font_fallbacks: theme_settings.buffer_font.fallbacks.clone(), - font_features: Some(theme_settings.buffer_font.features.clone()), - font_size: Some(buffer_font_size.into()), - color: Some(cx.theme().colors().text), - ..Default::default() - }); - - MarkdownStyle { - base_text_style: text_style.clone(), - selection_background_color: cx.theme().colors().element_selection_background, - ..Default::default() - } -} - -#[cfg(test)] -mod tests { - use editor::EditorSettings; - use fs::RealFs; - use gpui::{BackgroundExecutor, TestAppContext}; - use language_model::fake_provider::FakeLanguageModel; - use pretty_assertions::assert_eq; - use serde_json::json; - use settings::{Settings, SettingsStore}; - use terminal::terminal_settings::TerminalSettings; - use util::{ResultExt as _, test::TempTree}; - - use super::*; - - fn init_test(executor: &BackgroundExecutor, cx: &mut TestAppContext) { - zlog::init_test(); - - executor.allow_parking(); - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - language::init(cx); - Project::init_settings(cx); - workspace::init_settings(cx); - theme::init(theme::LoadThemes::JustBase, cx); - TerminalSettings::register(cx); - EditorSettings::register(cx); - }); - } - - #[gpui::test] - async fn test_interactive_command(executor: BackgroundExecutor, cx: &mut TestAppContext) { - if cfg!(windows) { - return; - } - init_test(&executor, cx); - - let fs = Arc::new(RealFs::new(None, executor)); - let tree = TempTree::new(json!({ - "project": {}, - })); - let project: Entity = - Project::test(fs, [tree.path().join("project").as_path()], cx).await; - let action_log = cx.update(|cx| cx.new(|_| ActionLog::new(project.clone()))); - let model = Arc::new(FakeLanguageModel::default()); - - let input = TerminalToolInput { - command: "cat".to_owned(), - cd: tree - .path() - .join("project") - .as_path() - .to_string_lossy() - .to_string(), - }; - let result = cx.update(|cx| { - TerminalTool::run( - Arc::new(TerminalTool), - serde_json::to_value(input).unwrap(), - Arc::default(), - project.clone(), - action_log.clone(), - model, - None, - cx, - ) - }); - - let output = result.output.await.log_err().unwrap().content; - assert_eq!(output.as_str().unwrap(), "Command executed successfully."); - } - - #[gpui::test] - async fn test_working_directory(executor: BackgroundExecutor, cx: &mut TestAppContext) { - if cfg!(windows) { - return; - } - init_test(&executor, cx); - - let fs = Arc::new(RealFs::new(None, executor)); - let tree = TempTree::new(json!({ - "project": {}, - "other-project": {}, - })); - let project: Entity = - Project::test(fs, [tree.path().join("project").as_path()], cx).await; - let action_log = cx.update(|cx| cx.new(|_| ActionLog::new(project.clone()))); - let model = Arc::new(FakeLanguageModel::default()); - - let check = |input, expected, cx: &mut App| { - let headless_result = TerminalTool::run( - Arc::new(TerminalTool), - serde_json::to_value(input).unwrap(), - Arc::default(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ); - cx.spawn(async move |_| { - let output = headless_result.output.await.map(|output| output.content); - assert_eq!( - output - .ok() - .and_then(|content| content.as_str().map(ToString::to_string)), - expected - ); - }) - }; - - cx.update(|cx| { - check( - TerminalToolInput { - command: "pwd".into(), - cd: ".".into(), - }, - Some(format!( - "```\n{}\n```", - tree.path().join("project").display() - )), - cx, - ) - }) - .await; - - cx.update(|cx| { - check( - TerminalToolInput { - command: "pwd".into(), - cd: "other-project".into(), - }, - None, // other-project is a dir, but *not* a worktree (yet) - cx, - ) - }) - .await; - - // Absolute path above the worktree root - cx.update(|cx| { - check( - TerminalToolInput { - command: "pwd".into(), - cd: tree.path().to_string_lossy().into(), - }, - None, - cx, - ) - }) - .await; - - project - .update(cx, |project, cx| { - project.create_worktree(tree.path().join("other-project"), true, cx) - }) - .await - .unwrap(); - - cx.update(|cx| { - check( - TerminalToolInput { - command: "pwd".into(), - cd: "other-project".into(), - }, - Some(format!( - "```\n{}\n```", - tree.path().join("other-project").display() - )), - cx, - ) - }) - .await; - - cx.update(|cx| { - check( - TerminalToolInput { - command: "pwd".into(), - cd: ".".into(), - }, - None, - cx, - ) - }) - .await; - } -} diff --git a/crates/assistant_tools/src/terminal_tool/description.md b/crates/assistant_tools/src/terminal_tool/description.md deleted file mode 100644 index 3cb5d87d163b3919abafa899ed2fbdba67500773..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/terminal_tool/description.md +++ /dev/null @@ -1,11 +0,0 @@ -Executes a shell one-liner and returns the combined output. - -This tool spawns a process using the user's shell, reads from stdout and stderr (preserving the order of writes), and returns a string with the combined output result. - -The output results will be shown to the user already, only list it again if necessary, avoid being redundant. - -Make sure you use the `cd` parameter to navigate to one of the root directories of the project. NEVER do it as part of the `command` itself, otherwise it will error. - -Do not use this tool for commands that run indefinitely, such as servers (like `npm run start`, `npm run dev`, `python -m http.server`, etc) or file watchers that don't terminate on their own. - -Remember that each invocation of this tool will spawn a new shell process, so you can't rely on any state from previous invocations. diff --git a/crates/assistant_tools/src/thinking_tool.rs b/crates/assistant_tools/src/thinking_tool.rs deleted file mode 100644 index 17ce4afc2eeeff8c6f37834cd9e8c4ff71e7cd70..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/thinking_tool.rs +++ /dev/null @@ -1,69 +0,0 @@ -use std::sync::Arc; - -use crate::schema::json_schema_for; -use action_log::ActionLog; -use anyhow::{Result, anyhow}; -use assistant_tool::{Tool, ToolResult}; -use gpui::{AnyWindowHandle, App, Entity, Task}; -use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; -use project::Project; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use ui::IconName; - -#[derive(Debug, Serialize, Deserialize, JsonSchema)] -pub struct ThinkingToolInput { - /// Content to think about. This should be a description of what to think about or - /// a problem to solve. - content: String, -} - -pub struct ThinkingTool; - -impl Tool for ThinkingTool { - fn name(&self) -> String { - "thinking".to_string() - } - - fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity, _: &App) -> bool { - false - } - - fn may_perform_edits(&self) -> bool { - false - } - - fn description(&self) -> String { - include_str!("./thinking_tool/description.md").to_string() - } - - fn icon(&self) -> IconName { - IconName::ToolThink - } - - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { - json_schema_for::(format) - } - - fn ui_text(&self, _input: &serde_json::Value) -> String { - "Thinking".to_string() - } - - fn run( - self: Arc, - input: serde_json::Value, - _request: Arc, - _project: Entity, - _action_log: Entity, - _model: Arc, - _window: Option, - _cx: &mut App, - ) -> ToolResult { - // This tool just "thinks out loud" and doesn't perform any actions. - Task::ready(match serde_json::from_value::(input) { - Ok(_input) => Ok("Finished thinking.".to_string().into()), - Err(err) => Err(anyhow!(err)), - }) - .into() - } -} diff --git a/crates/assistant_tools/src/thinking_tool/description.md b/crates/assistant_tools/src/thinking_tool/description.md deleted file mode 100644 index b625d22f321fa427945fdb9c42aaaed9ab86f6be..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/thinking_tool/description.md +++ /dev/null @@ -1 +0,0 @@ -A tool for thinking through problems, brainstorming ideas, or planning without executing any actions. Use this tool when you need to work through complex problems, develop strategies, or outline approaches before taking action. diff --git a/crates/assistant_tools/src/ui.rs b/crates/assistant_tools/src/ui.rs deleted file mode 100644 index 793427385456939eb1a7070fff5bba928a6c2643..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/ui.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod tool_call_card_header; -mod tool_output_preview; - -pub use tool_call_card_header::*; -pub use tool_output_preview::*; diff --git a/crates/assistant_tools/src/ui/tool_call_card_header.rs b/crates/assistant_tools/src/ui/tool_call_card_header.rs deleted file mode 100644 index b41f19432f99685cf745f684228169b53939fffb..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/ui/tool_call_card_header.rs +++ /dev/null @@ -1,131 +0,0 @@ -use gpui::{Animation, AnimationExt, AnyElement, App, IntoElement, pulsating_between}; -use std::time::Duration; -use ui::{Tooltip, prelude::*}; - -/// A reusable header component for tool call cards. -#[derive(IntoElement)] -pub struct ToolCallCardHeader { - icon: IconName, - primary_text: SharedString, - secondary_text: Option, - code_path: Option, - disclosure_slot: Option, - is_loading: bool, - error: Option, -} - -impl ToolCallCardHeader { - pub fn new(icon: IconName, primary_text: impl Into) -> Self { - Self { - icon, - primary_text: primary_text.into(), - secondary_text: None, - code_path: None, - disclosure_slot: None, - is_loading: false, - error: None, - } - } - - pub fn with_secondary_text(mut self, text: impl Into) -> Self { - self.secondary_text = Some(text.into()); - self - } - - pub fn with_code_path(mut self, text: impl Into) -> Self { - self.code_path = Some(text.into()); - self - } - - pub fn disclosure_slot(mut self, element: impl IntoElement) -> Self { - self.disclosure_slot = Some(element.into_any_element()); - self - } - - pub fn loading(mut self) -> Self { - self.is_loading = true; - self - } - - pub fn with_error(mut self, error: impl Into) -> Self { - self.error = Some(error.into()); - self - } -} - -impl RenderOnce for ToolCallCardHeader { - fn render(self, window: &mut Window, cx: &mut App) -> impl IntoElement { - let font_size = rems(0.8125); - let line_height = window.line_height(); - - let secondary_text = self.secondary_text; - let code_path = self.code_path; - - let bullet_divider = || { - div() - .size(px(3.)) - .rounded_full() - .bg(cx.theme().colors().text) - }; - - h_flex() - .id("tool-label-container") - .gap_2() - .max_w_full() - .overflow_x_scroll() - .opacity(0.8) - .child( - h_flex() - .h(line_height) - .gap_1p5() - .text_size(font_size) - .child( - h_flex().h(line_height).justify_center().child( - Icon::new(self.icon) - .size(IconSize::Small) - .color(Color::Muted), - ), - ) - .map(|this| { - if let Some(error) = &self.error { - this.child(format!("{} failed", self.primary_text)).child( - IconButton::new("error_info", IconName::Warning) - .shape(ui::IconButtonShape::Square) - .icon_size(IconSize::XSmall) - .icon_color(Color::Warning) - .tooltip(Tooltip::text(error.clone())), - ) - } else { - this.child(self.primary_text.clone()) - } - }) - .when_some(secondary_text, |this, secondary_text| { - this.child(bullet_divider()) - .child(div().text_size(font_size).child(secondary_text)) - }) - .when_some(code_path, |this, code_path| { - this.child(bullet_divider()) - .child(Label::new(code_path).size(LabelSize::Small).inline_code(cx)) - }) - .with_animation( - "loading-label", - Animation::new(Duration::from_secs(2)) - .repeat() - .with_easing(pulsating_between(0.6, 1.)), - move |this, delta| { - if self.is_loading { - this.opacity(delta) - } else { - this - } - }, - ), - ) - .when_some(self.disclosure_slot, |container, disclosure_slot| { - container - .group("disclosure") - .justify_between() - .child(div().visible_on_hover("disclosure").child(disclosure_slot)) - }) - } -} diff --git a/crates/assistant_tools/src/ui/tool_output_preview.rs b/crates/assistant_tools/src/ui/tool_output_preview.rs deleted file mode 100644 index a672bb8b99daa1fd776f59c4e8be789b8e25240c..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/ui/tool_output_preview.rs +++ /dev/null @@ -1,115 +0,0 @@ -use gpui::{AnyElement, EntityId, prelude::*}; -use ui::{Tooltip, prelude::*}; - -#[derive(IntoElement)] -pub struct ToolOutputPreview -where - F: Fn(bool, &mut Window, &mut App) + 'static, -{ - content: AnyElement, - entity_id: EntityId, - full_height: bool, - total_lines: usize, - collapsed_fade: bool, - on_toggle: Option, -} - -pub const COLLAPSED_LINES: usize = 10; - -impl ToolOutputPreview -where - F: Fn(bool, &mut Window, &mut App) + 'static, -{ - pub fn new(content: AnyElement, entity_id: EntityId) -> Self { - Self { - content, - entity_id, - full_height: true, - total_lines: 0, - collapsed_fade: false, - on_toggle: None, - } - } - - pub fn with_total_lines(mut self, total_lines: usize) -> Self { - self.total_lines = total_lines; - self - } - - pub fn toggle_state(mut self, full_height: bool) -> Self { - self.full_height = full_height; - self - } - - pub fn with_collapsed_fade(mut self) -> Self { - self.collapsed_fade = true; - self - } - - pub fn on_toggle(mut self, listener: F) -> Self { - self.on_toggle = Some(listener); - self - } -} - -impl RenderOnce for ToolOutputPreview -where - F: Fn(bool, &mut Window, &mut App) + 'static, -{ - fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { - if self.total_lines <= COLLAPSED_LINES { - return self.content; - } - let border_color = cx.theme().colors().border.opacity(0.6); - - let (icon, tooltip_label) = if self.full_height { - (IconName::ChevronUp, "Collapse") - } else { - (IconName::ChevronDown, "Expand") - }; - - let gradient_overlay = - if self.collapsed_fade && !self.full_height { - Some(div().absolute().bottom_5().left_0().w_full().h_2_5().bg( - gpui::linear_gradient( - 0., - gpui::linear_color_stop(cx.theme().colors().editor_background, 0.), - gpui::linear_color_stop( - cx.theme().colors().editor_background.opacity(0.), - 1., - ), - ), - )) - } else { - None - }; - - v_flex() - .relative() - .child(self.content) - .children(gradient_overlay) - .child( - h_flex() - .id(("expand-button", self.entity_id)) - .flex_none() - .cursor_pointer() - .h_5() - .justify_center() - .border_t_1() - .rounded_b_md() - .border_color(border_color) - .bg(cx.theme().colors().editor_background) - .hover(|style| style.bg(cx.theme().colors().element_hover.opacity(0.1))) - .child(Icon::new(icon).size(IconSize::Small).color(Color::Muted)) - .tooltip(Tooltip::text(tooltip_label)) - .when_some(self.on_toggle, |this, on_toggle| { - this.on_click({ - move |_, window, cx| { - on_toggle(!self.full_height, window, cx); - } - }) - }), - ) - .into_any() - } -} diff --git a/crates/assistant_tools/src/web_search_tool.rs b/crates/assistant_tools/src/web_search_tool.rs deleted file mode 100644 index dbcca0a1f6f2d5f679fd240a5bfe64c6c9705256..0000000000000000000000000000000000000000 --- a/crates/assistant_tools/src/web_search_tool.rs +++ /dev/null @@ -1,327 +0,0 @@ -use std::{sync::Arc, time::Duration}; - -use crate::schema::json_schema_for; -use crate::ui::ToolCallCardHeader; -use action_log::ActionLog; -use anyhow::{Context as _, Result, anyhow}; -use assistant_tool::{ - Tool, ToolCard, ToolResult, ToolResultContent, ToolResultOutput, ToolUseStatus, -}; -use cloud_llm_client::{WebSearchResponse, WebSearchResult}; -use futures::{Future, FutureExt, TryFutureExt}; -use gpui::{ - AnyWindowHandle, App, AppContext, Context, Entity, IntoElement, Task, WeakEntity, Window, -}; -use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; -use project::Project; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use ui::{IconName, Tooltip, prelude::*}; -use web_search::WebSearchRegistry; -use workspace::Workspace; - -#[derive(Debug, Serialize, Deserialize, JsonSchema)] -pub struct WebSearchToolInput { - /// The search term or question to query on the web. - query: String, -} - -pub struct WebSearchTool; - -impl Tool for WebSearchTool { - fn name(&self) -> String { - "web_search".into() - } - - fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity, _: &App) -> bool { - false - } - - fn may_perform_edits(&self) -> bool { - false - } - - fn description(&self) -> String { - "Search the web for information using your query. Use this when you need real-time information, facts, or data that might not be in your training. Results will include snippets and links from relevant web pages.".into() - } - - fn icon(&self) -> IconName { - IconName::ToolWeb - } - - fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { - json_schema_for::(format) - } - - fn ui_text(&self, _input: &serde_json::Value) -> String { - "Searching the Web".to_string() - } - - fn run( - self: Arc, - input: serde_json::Value, - _request: Arc, - _project: Entity, - _action_log: Entity, - _model: Arc, - _window: Option, - cx: &mut App, - ) -> ToolResult { - let input = match serde_json::from_value::(input) { - Ok(input) => input, - Err(err) => return Task::ready(Err(anyhow!(err))).into(), - }; - let Some(provider) = WebSearchRegistry::read_global(cx).active_provider() else { - return Task::ready(Err(anyhow!("Web search is not available."))).into(); - }; - - let search_task = provider.search(input.query, cx).map_err(Arc::new).shared(); - let output = cx.background_spawn({ - let search_task = search_task.clone(); - async move { - let response = search_task.await.map_err(|err| anyhow!(err))?; - Ok(ToolResultOutput { - content: ToolResultContent::Text( - serde_json::to_string(&response) - .context("Failed to serialize search results")?, - ), - output: Some(serde_json::to_value(response)?), - }) - } - }); - - ToolResult { - output, - card: Some(cx.new(|cx| WebSearchToolCard::new(search_task, cx)).into()), - } - } - - fn deserialize_card( - self: Arc, - output: serde_json::Value, - _project: Entity, - _window: &mut Window, - cx: &mut App, - ) -> Option { - let output = serde_json::from_value::(output).ok()?; - let card = cx.new(|cx| WebSearchToolCard::new(Task::ready(Ok(output)), cx)); - Some(card.into()) - } -} - -#[derive(RegisterComponent)] -struct WebSearchToolCard { - response: Option>, - _task: Task<()>, -} - -impl WebSearchToolCard { - fn new( - search_task: impl 'static + Future>>, - cx: &mut Context, - ) -> Self { - let _task = cx.spawn(async move |this, cx| { - let response = search_task.await.map_err(|err| anyhow!(err)); - this.update(cx, |this, cx| { - this.response = Some(response); - cx.notify(); - }) - .ok(); - }); - - Self { - response: None, - _task, - } - } -} - -impl ToolCard for WebSearchToolCard { - fn render( - &mut self, - _status: &ToolUseStatus, - _window: &mut Window, - _workspace: WeakEntity, - cx: &mut Context, - ) -> impl IntoElement { - let icon = IconName::ToolWeb; - - let header = match self.response.as_ref() { - Some(Ok(response)) => { - let text: SharedString = if response.results.len() == 1 { - "1 result".into() - } else { - format!("{} results", response.results.len()).into() - }; - ToolCallCardHeader::new(icon, "Searched the Web").with_secondary_text(text) - } - Some(Err(error)) => { - ToolCallCardHeader::new(icon, "Web Search").with_error(error.to_string()) - } - None => ToolCallCardHeader::new(icon, "Searching the Web").loading(), - }; - - let content = self.response.as_ref().and_then(|response| match response { - Ok(response) => Some( - v_flex() - .overflow_hidden() - .ml_1p5() - .pl(px(5.)) - .border_l_1() - .border_color(cx.theme().colors().border_variant) - .gap_1() - .children(response.results.iter().enumerate().map(|(index, result)| { - let title = result.title.clone(); - let url = SharedString::from(result.url.clone()); - - Button::new(("result", index), title) - .label_size(LabelSize::Small) - .color(Color::Muted) - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::Small) - .icon_position(IconPosition::End) - .truncate(true) - .tooltip({ - let url = url.clone(); - move |window, cx| { - Tooltip::with_meta( - "Web Search Result", - None, - url.clone(), - window, - cx, - ) - } - }) - .on_click(move |_, _, cx| cx.open_url(&url)) - })) - .into_any(), - ), - Err(_) => None, - }); - - v_flex().mb_3().gap_1().child(header).children(content) - } -} - -impl Component for WebSearchToolCard { - fn scope() -> ComponentScope { - ComponentScope::Agent - } - - fn preview(window: &mut Window, cx: &mut App) -> Option { - let in_progress_search = cx.new(|cx| WebSearchToolCard { - response: None, - _task: cx.spawn(async move |_this, cx| { - loop { - cx.background_executor() - .timer(Duration::from_secs(60)) - .await - } - }), - }); - - let successful_search = cx.new(|_cx| WebSearchToolCard { - response: Some(Ok(example_search_response())), - _task: Task::ready(()), - }); - - let error_search = cx.new(|_cx| WebSearchToolCard { - response: Some(Err(anyhow!("Failed to resolve https://google.com"))), - _task: Task::ready(()), - }); - - Some( - v_flex() - .gap_6() - .children(vec![example_group(vec![ - single_example( - "In Progress", - div() - .size_full() - .child(in_progress_search.update(cx, |tool, cx| { - tool.render( - &ToolUseStatus::Pending, - window, - WeakEntity::new_invalid(), - cx, - ) - .into_any_element() - })) - .into_any_element(), - ), - single_example( - "Successful", - div() - .size_full() - .child(successful_search.update(cx, |tool, cx| { - tool.render( - &ToolUseStatus::Finished("".into()), - window, - WeakEntity::new_invalid(), - cx, - ) - .into_any_element() - })) - .into_any_element(), - ), - single_example( - "Error", - div() - .size_full() - .child(error_search.update(cx, |tool, cx| { - tool.render( - &ToolUseStatus::Error("".into()), - window, - WeakEntity::new_invalid(), - cx, - ) - .into_any_element() - })) - .into_any_element(), - ), - ])]) - .into_any_element(), - ) - } -} - -fn example_search_response() -> WebSearchResponse { - WebSearchResponse { - results: vec![ - WebSearchResult { - title: "Alo".to_string(), - url: "https://www.google.com/maps/search/Alo%2C+Toronto%2C+Canada".to_string(), - text: "Alo is a popular restaurant in Toronto.".to_string(), - }, - WebSearchResult { - title: "Alo".to_string(), - url: "https://www.google.com/maps/search/Alo%2C+Toronto%2C+Canada".to_string(), - text: "Information about Alo restaurant in Toronto.".to_string(), - }, - WebSearchResult { - title: "Edulis".to_string(), - url: "https://www.google.com/maps/search/Edulis%2C+Toronto%2C+Canada".to_string(), - text: "Details about Edulis restaurant in Toronto.".to_string(), - }, - WebSearchResult { - title: "Sushi Masaki Saito".to_string(), - url: "https://www.google.com/maps/search/Sushi+Masaki+Saito%2C+Toronto%2C+Canada" - .to_string(), - text: "Information about Sushi Masaki Saito in Toronto.".to_string(), - }, - WebSearchResult { - title: "Shoushin".to_string(), - url: "https://www.google.com/maps/search/Shoushin%2C+Toronto%2C+Canada".to_string(), - text: "Details about Shoushin restaurant in Toronto.".to_string(), - }, - WebSearchResult { - title: "Restaurant 20 Victoria".to_string(), - url: - "https://www.google.com/maps/search/Restaurant+20+Victoria%2C+Toronto%2C+Canada" - .to_string(), - text: "Information about Restaurant 20 Victoria in Toronto.".to_string(), - }, - ], - } -} diff --git a/crates/audio/Cargo.toml b/crates/audio/Cargo.toml index 7f2fed80e2315e51fca7d8477b04885998336632..2aee764007a791176c6e41cb77f6efaf19aa3dc4 100644 --- a/crates/audio/Cargo.toml +++ b/crates/audio/Cargo.toml @@ -21,13 +21,12 @@ gpui.workspace = true denoise = { path = "../denoise" } log.workspace = true parking_lot.workspace = true -rodio = { workspace = true, features = [ "wav", "playback", "wav_output" ] } +rodio.workspace = true serde.workspace = true settings.workspace = true smol.workspace = true thiserror.workspace = true util.workspace = true -workspace-hack.workspace = true [target.'cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))'.dependencies] libwebrtc = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks" } diff --git a/crates/audio/src/rodio_ext.rs b/crates/audio/src/rodio_ext.rs index af4cc89252dfdc1498471ec7ac09b56d59b62eca..ab74c59fe6661cecab7ec9611dd0b9aa9e7f5aa7 100644 --- a/crates/audio/src/rodio_ext.rs +++ b/crates/audio/src/rodio_ext.rs @@ -433,7 +433,7 @@ where /// Stores already emitted samples, once its full we call the callback. buffer: [Sample; N], /// Next free element in buffer. If this is equal to the buffer length - /// we have no more free lements. + /// we have no more free elements. free: usize, } diff --git a/crates/auto_update/Cargo.toml b/crates/auto_update/Cargo.toml index 21df028a88f027b1ce3796ef3e04998ca205ce51..08db9f8a97bb0783da987f84991ad1aaa62c2141 100644 --- a/crates/auto_update/Cargo.toml +++ b/crates/auto_update/Cargo.toml @@ -27,7 +27,6 @@ settings.workspace = true smol.workspace = true tempfile.workspace = true workspace.workspace = true -workspace-hack.workspace = true [target.'cfg(not(target_os = "windows"))'.dependencies] which.workspace = true diff --git a/crates/auto_update_helper/Cargo.toml b/crates/auto_update_helper/Cargo.toml index 6581de48d27f9975db6ddfe7ad63f49a55e4c22e..4b4a0126a4c178a76fd2a03b7596d42e19aa9d23 100644 --- a/crates/auto_update_helper/Cargo.toml +++ b/crates/auto_update_helper/Cargo.toml @@ -17,7 +17,6 @@ doctest = false anyhow.workspace = true log.workspace = true simplelog.workspace = true -workspace-hack.workspace = true [target.'cfg(target_os = "windows")'.dependencies] windows.workspace = true diff --git a/crates/auto_update_ui/Cargo.toml b/crates/auto_update_ui/Cargo.toml index 6a8ba02b82683406e1f9bb3a2c5430fe614820df..0e31f94f5ee268cdc3274dea747bd0b05d9c80eb 100644 --- a/crates/auto_update_ui/Cargo.toml +++ b/crates/auto_update_ui/Cargo.toml @@ -25,4 +25,3 @@ serde_json.workspace = true smol.workspace = true util.workspace = true workspace.workspace = true -workspace-hack.workspace = true diff --git a/crates/aws_http_client/Cargo.toml b/crates/aws_http_client/Cargo.toml index 2749286d4c1361d9dbdb50d6566e3b4043f97b2e..24569a764dc4ab466c62ed3543df484327d1506d 100644 --- a/crates/aws_http_client/Cargo.toml +++ b/crates/aws_http_client/Cargo.toml @@ -18,4 +18,3 @@ default = [] aws-smithy-runtime-api.workspace = true aws-smithy-types.workspace = true http_client.workspace = true -workspace-hack.workspace = true diff --git a/crates/bedrock/Cargo.toml b/crates/bedrock/Cargo.toml index 3000af50bb71be18784a8e6a8f6da0ca8a66d7f9..f8f6fa46017309f3861ef2ec42f98d740cae7200 100644 --- a/crates/bedrock/Cargo.toml +++ b/crates/bedrock/Cargo.toml @@ -25,4 +25,3 @@ serde.workspace = true serde_json.workspace = true strum.workspace = true thiserror.workspace = true -workspace-hack.workspace = true diff --git a/crates/breadcrumbs/Cargo.toml b/crates/breadcrumbs/Cargo.toml index c25cfc3c86f26a72b3af37246ab30a175a68969a..16d0ff10e1cfef058422ed79934bec53f74c4804 100644 --- a/crates/breadcrumbs/Cargo.toml +++ b/crates/breadcrumbs/Cargo.toml @@ -21,7 +21,6 @@ theme.workspace = true ui.workspace = true workspace.workspace = true zed_actions.workspace = true -workspace-hack.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } diff --git a/crates/breadcrumbs/src/breadcrumbs.rs b/crates/breadcrumbs/src/breadcrumbs.rs index a6b27476fe36b1143103e1acd035bda6cda15132..08c0915c58ae50741238574cec5b6f2474d06eb8 100644 --- a/crates/breadcrumbs/src/breadcrumbs.rs +++ b/crates/breadcrumbs/src/breadcrumbs.rs @@ -119,21 +119,19 @@ impl Render for Breadcrumbs { } } }) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { if let Some(editor) = editor.upgrade() { let focus_handle = editor.read(cx).focus_handle(cx); Tooltip::for_action_in( "Show Symbol Outline", &zed_actions::outline::ToggleOutline, &focus_handle, - window, cx, ) } else { Tooltip::for_action( "Show Symbol Outline", &zed_actions::outline::ToggleOutline, - window, cx, ) } diff --git a/crates/buffer_diff/Cargo.toml b/crates/buffer_diff/Cargo.toml index 3d6c2a24e9de8dfb6e5fab7cff250fb3f26ec24d..1be21f3a0f1ef7aafa222a611d858f8adb097454 100644 --- a/crates/buffer_diff/Cargo.toml +++ b/crates/buffer_diff/Cargo.toml @@ -27,7 +27,6 @@ rope.workspace = true sum_tree.workspace = true text.workspace = true util.workspace = true -workspace-hack.workspace = true [dev-dependencies] ctor.workspace = true diff --git a/crates/buffer_diff/src/buffer_diff.rs b/crates/buffer_diff/src/buffer_diff.rs index 1787f616ad365175de352e3eeeede3e1749dede4..d6ae5545200bb47976554814e346be3039fa276e 100644 --- a/crates/buffer_diff/src/buffer_diff.rs +++ b/crates/buffer_diff/src/buffer_diff.rs @@ -85,7 +85,7 @@ struct PendingHunk { new_status: DiffHunkSecondaryStatus, } -#[derive(Debug, Default, Clone)] +#[derive(Debug, Clone)] pub struct DiffHunkSummary { buffer_range: Range, } @@ -114,15 +114,17 @@ impl sum_tree::Summary for DiffHunkSummary { type Context<'a> = &'a text::BufferSnapshot; fn zero(_cx: Self::Context<'_>) -> Self { - Default::default() + DiffHunkSummary { + buffer_range: Anchor::MIN..Anchor::MIN, + } } fn add_summary(&mut self, other: &Self, buffer: Self::Context<'_>) { - self.buffer_range.start = self + self.buffer_range.start = *self .buffer_range .start .min(&other.buffer_range.start, buffer); - self.buffer_range.end = self.buffer_range.end.max(&other.buffer_range.end, buffer); + self.buffer_range.end = *self.buffer_range.end.max(&other.buffer_range.end, buffer); } } @@ -937,7 +939,9 @@ impl BufferDiff { pub fn clear_pending_hunks(&mut self, cx: &mut Context) { if self.secondary_diff.is_some() { - self.inner.pending_hunks = SumTree::from_summary(DiffHunkSummary::default()); + self.inner.pending_hunks = SumTree::from_summary(DiffHunkSummary { + buffer_range: Anchor::MIN..Anchor::MIN, + }); cx.emit(BufferDiffEvent::DiffChanged { changed_range: Some(Anchor::MIN..Anchor::MAX), }); @@ -1068,8 +1072,8 @@ impl BufferDiff { self.range_to_hunk_range(secondary_changed_range, buffer, cx) { if let Some(range) = &mut changed_range { - range.start = secondary_hunk_range.start.min(&range.start, buffer); - range.end = secondary_hunk_range.end.max(&range.end, buffer); + range.start = *secondary_hunk_range.start.min(&range.start, buffer); + range.end = *secondary_hunk_range.end.max(&range.end, buffer); } else { changed_range = Some(secondary_hunk_range); } @@ -1083,8 +1087,8 @@ impl BufferDiff { if let Some((first, last)) = state.pending_hunks.first().zip(state.pending_hunks.last()) { if let Some(range) = &mut changed_range { - range.start = range.start.min(&first.buffer_range.start, buffer); - range.end = range.end.max(&last.buffer_range.end, buffer); + range.start = *range.start.min(&first.buffer_range.start, buffer); + range.end = *range.end.max(&last.buffer_range.end, buffer); } else { changed_range = Some(first.buffer_range.start..last.buffer_range.end); } @@ -1158,34 +1162,22 @@ impl BufferDiff { self.hunks_intersecting_range(start..end, buffer, cx) } - pub fn set_base_text_buffer( - &mut self, - base_buffer: Entity, - buffer: text::BufferSnapshot, - cx: &mut Context, - ) -> oneshot::Receiver<()> { - let base_buffer = base_buffer.read(cx); - let language_registry = base_buffer.language_registry(); - let base_buffer = base_buffer.snapshot(); - self.set_base_text(base_buffer, language_registry, buffer, cx) - } - /// Used in cases where the change set isn't derived from git. pub fn set_base_text( &mut self, - base_buffer: language::BufferSnapshot, + base_text: Option>, + language: Option>, language_registry: Option>, buffer: text::BufferSnapshot, cx: &mut Context, ) -> oneshot::Receiver<()> { let (tx, rx) = oneshot::channel(); let this = cx.weak_entity(); - let base_text = Arc::new(base_buffer.text()); let snapshot = BufferDiffSnapshot::new_with_base_text( buffer.clone(), - Some(base_text), - base_buffer.language().cloned(), + base_text, + language, language_registry, cx, ); @@ -1368,7 +1360,7 @@ mod tests { use gpui::TestAppContext; use pretty_assertions::{assert_eq, assert_ne}; use rand::{Rng as _, rngs::StdRng}; - use text::{Buffer, BufferId, Rope}; + use text::{Buffer, BufferId, ReplicaId, Rope}; use unindent::Unindent as _; use util::test::marked_text_ranges; @@ -1393,7 +1385,7 @@ mod tests { " .unindent(); - let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text); let mut diff = BufferDiffSnapshot::new_sync(buffer.clone(), diff_base.clone(), cx); assert_hunks( diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer), @@ -1467,7 +1459,7 @@ mod tests { " .unindent(); - let buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text); + let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text); let unstaged_diff = BufferDiffSnapshot::new_sync(buffer.clone(), index_text, cx); let mut uncommitted_diff = BufferDiffSnapshot::new_sync(buffer.clone(), head_text.clone(), cx); @@ -1536,7 +1528,7 @@ mod tests { " .unindent(); - let buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text); + let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text); let diff = cx .update(|cx| { BufferDiffSnapshot::new_with_base_text( @@ -1799,7 +1791,7 @@ mod tests { for example in table { let (buffer_text, ranges) = marked_text_ranges(&example.buffer_marked_text, false); - let buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text); + let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text); let hunk_range = buffer.anchor_before(ranges[0].start)..buffer.anchor_before(ranges[0].end); @@ -1872,7 +1864,11 @@ mod tests { " .unindent(); - let buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text.clone()); + let buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + buffer_text.clone(), + ); let unstaged = BufferDiffSnapshot::new_sync(buffer.clone(), index_text, cx); let uncommitted = BufferDiffSnapshot::new_sync(buffer.clone(), head_text.clone(), cx); let unstaged_diff = cx.new(|cx| { @@ -1945,7 +1941,7 @@ mod tests { " .unindent(); - let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text_1); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text_1); let empty_diff = cx.update(|cx| BufferDiffSnapshot::empty(&buffer, cx)); let diff_1 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx); diff --git a/crates/call/Cargo.toml b/crates/call/Cargo.toml index 1d5fbccb4644d9f168a2afd321a205f01c8f9cdc..ff034f914b0be44e6ec9f6475881ed79c368cd8a 100644 --- a/crates/call/Cargo.toml +++ b/crates/call/Cargo.toml @@ -41,7 +41,6 @@ telemetry.workspace = true util.workspace = true gpui_tokio.workspace = true livekit_client.workspace = true -workspace-hack.workspace = true [dev-dependencies] client = { workspace = true, features = ["test-support"] } diff --git a/crates/channel/Cargo.toml b/crates/channel/Cargo.toml index ab6e1dfc2b8dd0f89c4e6cd03e5ee66840003d6a..43af27ac8b6f21d4e1e16c9102da3de9c0585db4 100644 --- a/crates/channel/Cargo.toml +++ b/crates/channel/Cargo.toml @@ -31,7 +31,6 @@ settings.workspace = true text.workspace = true time.workspace = true util.workspace = true -workspace-hack.workspace = true [dev-dependencies] collections = { workspace = true, features = ["test-support"] } diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index 828248b330b6ef6cfe0e13eab426de2900d364b2..efa0850753887c2116ee7916727a870a3528b627 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -9,7 +9,7 @@ use rpc::{ proto::{self, PeerId}, }; use std::{sync::Arc, time::Duration}; -use text::BufferId; +use text::{BufferId, ReplicaId}; use util::ResultExt; pub const ACKNOWLEDGE_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(250); @@ -65,7 +65,12 @@ impl ChannelBuffer { let buffer = cx.new(|cx| { let capability = channel_store.read(cx).channel_capability(channel.id); - language::Buffer::remote(buffer_id, response.replica_id as u16, capability, base_text) + language::Buffer::remote( + buffer_id, + ReplicaId::new(response.replica_id as u16), + capability, + base_text, + ) })?; buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?; @@ -272,7 +277,7 @@ impl ChannelBuffer { self.connected } - pub fn replica_id(&self, cx: &App) -> u16 { + pub fn replica_id(&self, cx: &App) -> ReplicaId { self.buffer.read(cx).replica_id() } } diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index 812e56d1730b8e2ada34e98aa85a5767eed77997..ea4a8de290921e1c7d4d4eb70a271799a1761dc2 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -32,7 +32,6 @@ release_channel.workspace = true serde.workspace = true util.workspace = true tempfile.workspace = true -workspace-hack.workspace = true [target.'cfg(any(target_os = "linux", target_os = "freebsd"))'.dependencies] exec.workspace = true diff --git a/crates/cli/README.md b/crates/cli/README.md new file mode 100644 index 0000000000000000000000000000000000000000..ac4384e2566685cb57601a833e25ed783197fb5f --- /dev/null +++ b/crates/cli/README.md @@ -0,0 +1,15 @@ +# Cli + +## Testing + +You can test your changes to the `cli` crate by first building the main zed binary: + +``` +cargo build -p zed +``` + +And then building and running the `cli` crate with the following parameters: + +``` + cargo run -p cli -- --zed ./target/debug/zed.exe +``` diff --git a/crates/cli/src/cli.rs b/crates/cli/src/cli.rs index 79a10fa2b0936b44d9500fd9990ffa4c6ac62e85..fbd7e2693a74598f3840afa5f4a99c86e96f2357 100644 --- a/crates/cli/src/cli.rs +++ b/crates/cli/src/cli.rs @@ -17,6 +17,7 @@ pub enum CliRequest { wsl: Option, wait: bool, open_new_workspace: Option, + reuse: bool, env: Option>, user_data_dir: Option, }, diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index 3044172d8974ab05d32c6618ec09dbad618a218f..64a342a332f2c1b896afe58dda0e7156304e8116 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -62,16 +62,22 @@ struct Args { #[arg(short, long)] wait: bool, /// Add files to the currently open workspace - #[arg(short, long, overrides_with = "new")] + #[arg(short, long, overrides_with_all = ["new", "reuse"])] add: bool, /// Create a new workspace - #[arg(short, long, overrides_with = "add")] + #[arg(short, long, overrides_with_all = ["add", "reuse"])] new: bool, + /// Reuse an existing window, replacing its workspace + #[arg(short, long, overrides_with_all = ["add", "new"])] + reuse: bool, /// Sets a custom directory for all user data (e.g., database, extensions, logs). - /// This overrides the default platform-specific data directory location. - /// On macOS, the default is `~/Library/Application Support/Zed`. - /// On Linux/FreeBSD, the default is `$XDG_DATA_HOME/zed`. - /// On Windows, the default is `%LOCALAPPDATA%\Zed`. + /// This overrides the default platform-specific data directory location: + #[cfg_attr(target_os = "macos", doc = "`~/Library/Application Support/Zed`.")] + #[cfg_attr(target_os = "windows", doc = "`%LOCALAPPDATA%\\Zed`.")] + #[cfg_attr( + not(any(target_os = "windows", target_os = "macos")), + doc = "`$XDG_DATA_HOME/zed`." + )] #[arg(long, value_name = "DIR")] user_data_dir: Option, /// The paths to open in Zed (space-separated). @@ -152,6 +158,7 @@ fn parse_path_with_position(argument_str: &str) -> anyhow::Result { } fn parse_path_in_wsl(source: &str, wsl: &str) -> Result { + let mut source = PathWithPosition::parse_str(source); let mut command = util::command::new_std_command("wsl.exe"); let (user, distro_name) = if let Some((user, distro)) = wsl.split_once('@') { @@ -170,19 +177,17 @@ fn parse_path_in_wsl(source: &str, wsl: &str) -> Result { let output = command .arg("--distribution") .arg(distro_name) + .arg("--exec") .arg("wslpath") .arg("-m") - .arg(source) + .arg(&source.path) .output()?; let result = String::from_utf8_lossy(&output.stdout); let prefix = format!("//wsl.localhost/{}", distro_name); + source.path = Path::new(result.trim().strip_prefix(&prefix).unwrap_or(&result)).to_owned(); - Ok(result - .trim() - .strip_prefix(&prefix) - .unwrap_or(&result) - .to_string()) + Ok(source.to_string(|path| path.to_string_lossy().into_owned())) } fn main() -> Result<()> { @@ -372,6 +377,7 @@ fn main() -> Result<()> { wsl, wait: args.wait, open_new_workspace, + reuse: args.reuse, env, user_data_dir: user_data_dir_for_thread, })?; diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index 86ecb1b34e323289b542d3bd6f48520c50867ad6..513a73be4581f3b0c8069dde831cc6811f5e045b 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -57,7 +57,6 @@ tokio-socks = { version = "0.5.2", default-features = false, features = ["future tokio.workspace = true url.workspace = true util.workspace = true -workspace-hack.workspace = true worktree.workspace = true [dev-dependencies] diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 911cada78f14ee587a1b4570c9a35181a2e6fdec..5aff87155f3a0328aa017060604b5fc79604731e 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -138,10 +138,6 @@ impl Settings for ProxySettings { proxy: content.proxy.clone(), } } - - fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut SettingsContent) { - vscode.string_setting("http.proxy", &mut current.proxy); - } } pub fn init_settings(cx: &mut App) { @@ -525,27 +521,6 @@ impl settings::Settings for TelemetrySettings { metrics: content.telemetry.as_ref().unwrap().metrics.unwrap(), } } - - fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut SettingsContent) { - let mut telemetry = settings::TelemetrySettingsContent::default(); - vscode.enum_setting("telemetry.telemetryLevel", &mut telemetry.metrics, |s| { - Some(s == "all") - }); - vscode.enum_setting( - "telemetry.telemetryLevel", - &mut telemetry.diagnostics, - |s| Some(matches!(s, "all" | "error" | "crash")), - ); - // we could translate telemetry.telemetryLevel, but just because users didn't want - // to send microsoft telemetry doesn't mean they don't want to send it to zed. their - // all/error/crash/off correspond to combinations of our "diagnostics" and "metrics". - if let Some(diagnostics) = telemetry.diagnostics { - current.telemetry.get_or_insert_default().diagnostics = Some(diagnostics) - } - if let Some(metrics) = telemetry.metrics { - current.telemetry.get_or_insert_default().metrics = Some(metrics) - } - } } impl Client { diff --git a/crates/client/src/proxy/socks_proxy.rs b/crates/client/src/proxy/socks_proxy.rs index 9ccf4906d8efb4d88b6167ed2a46a44df22906a2..bf2a5eab627cbae0f0ac965b3e379e6f388aaa70 100644 --- a/crates/client/src/proxy/socks_proxy.rs +++ b/crates/client/src/proxy/socks_proxy.rs @@ -23,7 +23,7 @@ pub(super) struct Socks5Authorization<'a> { /// Socks Proxy Protocol Version /// -/// V4 allows idenfication using a user_id +/// V4 allows identification using a user_id /// V5 allows authorization using a username and password pub(super) enum SocksVersion<'a> { V4 { diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index de0668b406c512eabfc70f4702466f013eb8c515..525a3e960ce8bc2aede4b0665af23ab3c33cac15 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -943,7 +943,7 @@ impl Collaborator { pub fn from_proto(message: proto::Collaborator) -> Result { Ok(Self { peer_id: message.peer_id.context("invalid peer id")?, - replica_id: message.replica_id as ReplicaId, + replica_id: ReplicaId::new(message.replica_id as u16), user_id: message.user_id as UserId, is_host: message.is_host, committer_name: message.committer_name, diff --git a/crates/clock/Cargo.toml b/crates/clock/Cargo.toml index c2fa1e003a00a52a315ce5b6179bb07db40ce414..486cf0ba8bebc032481fc6bcbe908be05b7fd353 100644 --- a/crates/clock/Cargo.toml +++ b/crates/clock/Cargo.toml @@ -19,4 +19,3 @@ test-support = ["dep:parking_lot"] parking_lot = { workspace = true, optional = true } serde.workspace = true smallvec.workspace = true -workspace-hack.workspace = true diff --git a/crates/clock/src/clock.rs b/crates/clock/src/clock.rs index b4f57116d273733d6c43a0a09c8a2a33ccb89b38..bec98d9bfbc19b7e8ca72b97c5748d3efce4dcf6 100644 --- a/crates/clock/src/clock.rs +++ b/crates/clock/src/clock.rs @@ -4,33 +4,73 @@ use serde::{Deserialize, Serialize}; use smallvec::SmallVec; use std::{ cmp::{self, Ordering}, - fmt, iter, + fmt, }; pub use system_clock::*; -pub const LOCAL_BRANCH_REPLICA_ID: u16 = u16::MAX; -pub const AGENT_REPLICA_ID: u16 = u16::MAX - 1; - /// A unique identifier for each distributed node. -pub type ReplicaId = u16; +#[derive(Clone, Copy, Default, Eq, Hash, PartialEq, Ord, PartialOrd, Serialize, Deserialize)] +pub struct ReplicaId(u16); + +impl ReplicaId { + /// The local replica + pub const LOCAL: ReplicaId = ReplicaId(0); + /// The remote replica of the connected remote server. + pub const REMOTE_SERVER: ReplicaId = ReplicaId(1); + /// The agent's unique identifier. + pub const AGENT: ReplicaId = ReplicaId(2); + /// A local branch. + pub const LOCAL_BRANCH: ReplicaId = ReplicaId(3); + /// The first collaborative replica ID, any replica equal or greater than this is a collaborative replica. + pub const FIRST_COLLAB_ID: ReplicaId = ReplicaId(8); + + pub fn new(id: u16) -> Self { + ReplicaId(id) + } + + pub fn as_u16(&self) -> u16 { + self.0 + } + + pub fn is_remote(self) -> bool { + self == ReplicaId::REMOTE_SERVER || self >= ReplicaId::FIRST_COLLAB_ID + } +} + +impl fmt::Debug for ReplicaId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if *self == ReplicaId::LOCAL { + write!(f, "") + } else if *self == ReplicaId::REMOTE_SERVER { + write!(f, "") + } else if *self == ReplicaId::AGENT { + write!(f, "") + } else if *self == ReplicaId::LOCAL_BRANCH { + write!(f, "") + } else { + write!(f, "{}", self.0) + } + } +} /// A [Lamport sequence number](https://en.wikipedia.org/wiki/Lamport_timestamp). pub type Seq = u32; /// A [Lamport timestamp](https://en.wikipedia.org/wiki/Lamport_timestamp), /// used to determine the ordering of events in the editor. -#[derive(Clone, Copy, Default, Eq, Hash, PartialEq, Serialize, Deserialize)] +#[derive(Clone, Copy, Eq, Hash, PartialEq, Serialize, Deserialize)] pub struct Lamport { pub replica_id: ReplicaId, pub value: Seq, } -/// A [vector clock](https://en.wikipedia.org/wiki/Vector_clock). +/// A [version vector](https://en.wikipedia.org/wiki/Version_vector). #[derive(Clone, Default, Hash, Eq, PartialEq)] pub struct Global { - values: SmallVec<[u32; 8]>, - local_branch_value: u32, + // 4 is chosen as it is the biggest count that does not increase the size of the field itself. + // Coincidentally, it also covers all the important non-collab replica ids. + values: SmallVec<[u32; 4]>, } impl Global { @@ -38,30 +78,31 @@ impl Global { Self::default() } + /// Fetches the sequence number for the given replica ID. pub fn get(&self, replica_id: ReplicaId) -> Seq { - if replica_id == LOCAL_BRANCH_REPLICA_ID { - self.local_branch_value - } else { - self.values.get(replica_id as usize).copied().unwrap_or(0) as Seq - } + self.values.get(replica_id.0 as usize).copied().unwrap_or(0) as Seq } + /// Observe the lamport timestamp. + /// + /// This sets the current sequence number of the observed replica ID to the maximum of this global's observed sequence and the observed timestamp. pub fn observe(&mut self, timestamp: Lamport) { + debug_assert_ne!(timestamp.replica_id, Lamport::MAX.replica_id); if timestamp.value > 0 { - if timestamp.replica_id == LOCAL_BRANCH_REPLICA_ID { - self.local_branch_value = cmp::max(self.local_branch_value, timestamp.value); - } else { - let new_len = timestamp.replica_id as usize + 1; - if new_len > self.values.len() { - self.values.resize(new_len, 0); - } - - let entry = &mut self.values[timestamp.replica_id as usize]; - *entry = cmp::max(*entry, timestamp.value); + let new_len = timestamp.replica_id.0 as usize + 1; + if new_len > self.values.len() { + self.values.resize(new_len, 0); } + + let entry = &mut self.values[timestamp.replica_id.0 as usize]; + *entry = cmp::max(*entry, timestamp.value); } } + /// Join another global. + /// + /// This observes all timestamps from the other global. + #[doc(alias = "synchronize")] pub fn join(&mut self, other: &Self) { if other.values.len() > self.values.len() { self.values.resize(other.values.len(), 0); @@ -70,34 +111,36 @@ impl Global { for (left, right) in self.values.iter_mut().zip(&other.values) { *left = cmp::max(*left, *right); } - - self.local_branch_value = cmp::max(self.local_branch_value, other.local_branch_value); } + /// Meet another global. + /// + /// Sets all unobserved timestamps of this global to the sequences of other and sets all observed timestamps of this global to the minimum observed of both globals. pub fn meet(&mut self, other: &Self) { if other.values.len() > self.values.len() { self.values.resize(other.values.len(), 0); } let mut new_len = 0; - for (ix, (left, right)) in self - .values - .iter_mut() - .zip(other.values.iter().chain(iter::repeat(&0))) - .enumerate() - { - if *left == 0 { - *left = *right; - } else if *right > 0 { - *left = cmp::min(*left, *right); + for (ix, (left, &right)) in self.values.iter_mut().zip(&other.values).enumerate() { + match (*left, right) { + // left has not observed the replica + (0, _) => *left = right, + // right has not observed the replica + (_, 0) => (), + (_, _) => *left = cmp::min(*left, right), } - if *left != 0 { new_len = ix + 1; } } - self.values.resize(new_len, 0); - self.local_branch_value = cmp::min(self.local_branch_value, other.local_branch_value); + if other.values.len() == self.values.len() { + // only truncate if other was equal or shorter (which at this point + // cant be due to the resize above) to `self` as otherwise we would + // truncate the unprocessed tail that is guaranteed to contain + // non-null timestamps + self.values.truncate(new_len); + } } pub fn observed(&self, timestamp: Lamport) -> bool { @@ -105,20 +148,18 @@ impl Global { } pub fn observed_any(&self, other: &Self) -> bool { - self.values - .iter() - .zip(other.values.iter()) - .any(|(left, right)| *right > 0 && left >= right) - || (other.local_branch_value > 0 && self.local_branch_value >= other.local_branch_value) + self.iter() + .zip(other.iter()) + .any(|(left, right)| right.value > 0 && left.value >= right.value) } pub fn observed_all(&self, other: &Self) -> bool { - let mut rhs = other.values.iter(); - self.values.iter().all(|left| match rhs.next() { - Some(right) => left >= right, - None => true, - }) && rhs.next().is_none() - && self.local_branch_value >= other.local_branch_value + if self.values.len() < other.values.len() { + return false; + } + self.iter() + .zip(other.iter()) + .all(|(left, right)| left.value >= right.value) } pub fn changed_since(&self, other: &Self) -> bool { @@ -128,21 +169,21 @@ impl Global { .iter() .zip(other.values.iter()) .any(|(left, right)| left > right) - || self.local_branch_value > other.local_branch_value } + pub fn most_recent(&self) -> Option { + self.iter().max_by_key(|timestamp| timestamp.value) + } + + /// Iterates all replicas observed by this global as well as any unobserved replicas whose ID is lower than the highest observed replica. pub fn iter(&self) -> impl Iterator + '_ { self.values .iter() .enumerate() .map(|(replica_id, seq)| Lamport { - replica_id: replica_id as ReplicaId, + replica_id: ReplicaId(replica_id as u16), value: *seq, }) - .chain((self.local_branch_value > 0).then_some(Lamport { - replica_id: LOCAL_BRANCH_REPLICA_ID, - value: self.local_branch_value, - })) } } @@ -173,12 +214,12 @@ impl PartialOrd for Lamport { impl Lamport { pub const MIN: Self = Self { - replica_id: ReplicaId::MIN, + replica_id: ReplicaId(u16::MIN), value: Seq::MIN, }; pub const MAX: Self = Self { - replica_id: ReplicaId::MAX, + replica_id: ReplicaId(u16::MAX), value: Seq::MAX, }; @@ -190,7 +231,7 @@ impl Lamport { } pub fn as_u64(self) -> u64 { - ((self.value as u64) << 32) | (self.replica_id as u64) + ((self.value as u64) << 32) | (self.replica_id.0 as u64) } pub fn tick(&mut self) -> Self { @@ -206,7 +247,13 @@ impl Lamport { impl fmt::Debug for Lamport { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Lamport {{{}: {}}}", self.replica_id, self.value) + if *self == Self::MAX { + write!(f, "Lamport {{MAX}}") + } else if *self == Self::MIN { + write!(f, "Lamport {{MIN}}") + } else { + write!(f, "Lamport {{{:?}: {}}}", self.replica_id, self.value) + } } } @@ -214,14 +261,10 @@ impl fmt::Debug for Global { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Global {{")?; for timestamp in self.iter() { - if timestamp.replica_id > 0 { + if timestamp.replica_id.0 > 0 { write!(f, ", ")?; } - if timestamp.replica_id == LOCAL_BRANCH_REPLICA_ID { - write!(f, ": {}", timestamp.value)?; - } else { - write!(f, "{}: {}", timestamp.replica_id, timestamp.value)?; - } + write!(f, "{:?}: {}", timestamp.replica_id, timestamp.value)?; } write!(f, "}}") } diff --git a/crates/cloud_api_client/Cargo.toml b/crates/cloud_api_client/Cargo.toml index 8e50ccb191373fe2cfadce2e4fd12cc3e397357f..9dc009bf2e59ba848c93a6ebc65be566a2aabd55 100644 --- a/crates/cloud_api_client/Cargo.toml +++ b/crates/cloud_api_client/Cargo.toml @@ -20,5 +20,4 @@ gpui_tokio.workspace = true http_client.workspace = true parking_lot.workspace = true serde_json.workspace = true -workspace-hack.workspace = true yawc.workspace = true diff --git a/crates/cloud_api_types/Cargo.toml b/crates/cloud_api_types/Cargo.toml index 28e0a36a44f023e883bea98e4facacd9085e0efb..46d5d109b1bd5328c9c4d8b7cb1fbb8325e27656 100644 --- a/crates/cloud_api_types/Cargo.toml +++ b/crates/cloud_api_types/Cargo.toml @@ -17,7 +17,6 @@ chrono.workspace = true ciborium.workspace = true cloud_llm_client.workspace = true serde.workspace = true -workspace-hack.workspace = true [dev-dependencies] pretty_assertions.workspace = true diff --git a/crates/cloud_llm_client/Cargo.toml b/crates/cloud_llm_client/Cargo.toml index 1ef978f0a7d112f4239215d43b2306631bafa64b..c6a551a1fbd8a83e50f68fbcf47f26a6e96a1d24 100644 --- a/crates/cloud_llm_client/Cargo.toml +++ b/crates/cloud_llm_client/Cargo.toml @@ -21,7 +21,7 @@ serde = { workspace = true, features = ["derive", "rc"] } serde_json.workspace = true strum = { workspace = true, features = ["derive"] } uuid = { workspace = true, features = ["serde"] } -workspace-hack.workspace = true [dev-dependencies] pretty_assertions.workspace = true +indoc.workspace = true diff --git a/crates/cloud_llm_client/src/cloud_llm_client.rs b/crates/cloud_llm_client/src/cloud_llm_client.rs index 4ae72ce0a4c41c9279dd18ca215c0dc0a7839f33..bb77c3a5b7f8009093cbf7bc427160ed535e6c62 100644 --- a/crates/cloud_llm_client/src/cloud_llm_client.rs +++ b/crates/cloud_llm_client/src/cloud_llm_client.rs @@ -322,6 +322,9 @@ pub struct LanguageModel { pub supports_images: bool, pub supports_thinking: bool, pub supports_max_mode: bool, + // only used by OpenAI and xAI + #[serde(default)] + pub supports_parallel_tool_calls: bool, } #[derive(Debug, Serialize, Deserialize)] diff --git a/crates/cloud_llm_client/src/predict_edits_v3.rs b/crates/cloud_llm_client/src/predict_edits_v3.rs index 6a6090afab569d77ccfcbcb25d48bab0158ea335..e03541e0f7d66bd54d6fbd918debbdc3d6c8d9e7 100644 --- a/crates/cloud_llm_client/src/predict_edits_v3.rs +++ b/crates/cloud_llm_client/src/predict_edits_v3.rs @@ -1,7 +1,8 @@ use chrono::Duration; use serde::{Deserialize, Serialize}; use std::{ - ops::Range, + fmt::Display, + ops::{Add, Range, Sub}, path::{Path, PathBuf}, sync::Arc, }; @@ -18,8 +19,8 @@ pub struct PredictEditsRequest { pub excerpt_path: Arc, /// Within file pub excerpt_range: Range, - /// Within `excerpt` - pub cursor_offset: usize, + pub excerpt_line_range: Range, + pub cursor_point: Point, /// Within `signatures` pub excerpt_parent: Option, pub signatures: Vec, @@ -47,12 +48,13 @@ pub struct PredictEditsRequest { pub enum PromptFormat { MarkedExcerpt, LabeledSections, + NumLinesUniDiff, /// Prompt format intended for use via zeta_cli OnlySnippets, } impl PromptFormat { - pub const DEFAULT: PromptFormat = PromptFormat::LabeledSections; + pub const DEFAULT: PromptFormat = PromptFormat::NumLinesUniDiff; } impl Default for PromptFormat { @@ -73,6 +75,7 @@ impl std::fmt::Display for PromptFormat { PromptFormat::MarkedExcerpt => write!(f, "Marked Excerpt"), PromptFormat::LabeledSections => write!(f, "Labeled Sections"), PromptFormat::OnlySnippets => write!(f, "Only Snippets"), + PromptFormat::NumLinesUniDiff => write!(f, "Numbered Lines / Unified Diff"), } } } @@ -89,6 +92,38 @@ pub enum Event { }, } +impl Display for Event { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Event::BufferChange { + path, + old_path, + diff, + predicted, + } => { + let new_path = path.as_deref().unwrap_or(Path::new("untitled")); + let old_path = old_path.as_deref().unwrap_or(new_path); + + if *predicted { + write!( + f, + "// User accepted prediction:\n--- a/{}\n+++ b/{}\n{diff}", + old_path.display(), + new_path.display() + ) + } else { + write!( + f, + "--- a/{}\n+++ b/{}\n{diff}", + old_path.display(), + new_path.display() + ) + } + } + } + } +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Signature { pub text: String, @@ -97,7 +132,7 @@ pub struct Signature { pub parent_index: Option, /// Range of `text` within the file, possibly truncated according to `text_is_truncated`. The /// file is implicitly the file that contains the descendant declaration or excerpt. - pub range: Range, + pub range: Range, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -106,7 +141,7 @@ pub struct ReferencedDeclaration { pub text: String, pub text_is_truncated: bool, /// Range of `text` within file, possibly truncated according to `text_is_truncated` - pub range: Range, + pub range: Range, /// Range within `text` pub signature_range: Range, /// Index within `signatures`. @@ -169,10 +204,115 @@ pub struct DebugInfo { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Edit { pub path: Arc, - pub range: Range, + pub range: Range, pub content: String, } fn is_default(value: &T) -> bool { *value == T::default() } + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, PartialOrd, Eq, Ord)] +pub struct Point { + pub line: Line, + pub column: u32, +} + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, PartialOrd, Eq, Ord)] +#[serde(transparent)] +pub struct Line(pub u32); + +impl Add for Line { + type Output = Self; + + fn add(self, rhs: Self) -> Self::Output { + Self(self.0 + rhs.0) + } +} + +impl Sub for Line { + type Output = Self; + + fn sub(self, rhs: Self) -> Self::Output { + Self(self.0 - rhs.0) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use indoc::indoc; + use pretty_assertions::assert_eq; + + #[test] + fn test_event_display() { + let ev = Event::BufferChange { + path: None, + old_path: None, + diff: "@@ -1,2 +1,2 @@\n-a\n-b\n".into(), + predicted: false, + }; + assert_eq!( + ev.to_string(), + indoc! {" + --- a/untitled + +++ b/untitled + @@ -1,2 +1,2 @@ + -a + -b + "} + ); + + let ev = Event::BufferChange { + path: Some(PathBuf::from("foo/bar.txt")), + old_path: Some(PathBuf::from("foo/bar.txt")), + diff: "@@ -1,2 +1,2 @@\n-a\n-b\n".into(), + predicted: false, + }; + assert_eq!( + ev.to_string(), + indoc! {" + --- a/foo/bar.txt + +++ b/foo/bar.txt + @@ -1,2 +1,2 @@ + -a + -b + "} + ); + + let ev = Event::BufferChange { + path: Some(PathBuf::from("abc.txt")), + old_path: Some(PathBuf::from("123.txt")), + diff: "@@ -1,2 +1,2 @@\n-a\n-b\n".into(), + predicted: false, + }; + assert_eq!( + ev.to_string(), + indoc! {" + --- a/123.txt + +++ b/abc.txt + @@ -1,2 +1,2 @@ + -a + -b + "} + ); + + let ev = Event::BufferChange { + path: Some(PathBuf::from("abc.txt")), + old_path: Some(PathBuf::from("123.txt")), + diff: "@@ -1,2 +1,2 @@\n-a\n-b\n".into(), + predicted: true, + }; + assert_eq!( + ev.to_string(), + indoc! {" + // User accepted prediction: + --- a/123.txt + +++ b/abc.txt + @@ -1,2 +1,2 @@ + -a + -b + "} + ); + } +} diff --git a/crates/cloud_zeta2_prompt/Cargo.toml b/crates/cloud_zeta2_prompt/Cargo.toml index f5b23d653bd84faed6ce1fca02f6c7436a0badc8..43446f460c872afcdfe1d4bc47d14f894f0c9c09 100644 --- a/crates/cloud_zeta2_prompt/Cargo.toml +++ b/crates/cloud_zeta2_prompt/Cargo.toml @@ -19,4 +19,3 @@ ordered-float.workspace = true rustc-hash.workspace = true serde.workspace = true strum.workspace = true -workspace-hack.workspace = true diff --git a/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs b/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs index df70119b7fc91cc570e605fd5cebb9164d54f215..284b245acf2305350e6a6a5e7c38dfaa9b16c5d4 100644 --- a/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs +++ b/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs @@ -1,7 +1,7 @@ //! Zeta2 prompt planning and generation code shared with cloud. use anyhow::{Context as _, Result, anyhow}; -use cloud_llm_client::predict_edits_v3::{self, Event, PromptFormat, ReferencedDeclaration}; +use cloud_llm_client::predict_edits_v3::{self, Line, Point, PromptFormat, ReferencedDeclaration}; use indoc::indoc; use ordered_float::OrderedFloat; use rustc_hash::{FxHashMap, FxHashSet}; @@ -13,27 +13,30 @@ use strum::{EnumIter, IntoEnumIterator}; pub const DEFAULT_MAX_PROMPT_BYTES: usize = 10 * 1024; -pub const CURSOR_MARKER: &str = "<|cursor_position|>"; +pub const CURSOR_MARKER: &str = "<|user_cursor|>"; /// NOTE: Differs from zed version of constant - includes a newline pub const EDITABLE_REGION_START_MARKER_WITH_NEWLINE: &str = "<|editable_region_start|>\n"; /// NOTE: Differs from zed version of constant - includes a newline pub const EDITABLE_REGION_END_MARKER_WITH_NEWLINE: &str = "<|editable_region_end|>\n"; // TODO: use constants for markers? -const MARKED_EXCERPT_SYSTEM_PROMPT: &str = indoc! {" +const MARKED_EXCERPT_INSTRUCTIONS: &str = indoc! {" You are a code completion assistant and your task is to analyze user edits and then rewrite an excerpt that the user provides, suggesting the appropriate edits within the excerpt, taking into account the cursor location. - The excerpt to edit will be wrapped in markers <|editable_region_start|> and <|editable_region_end|>. The cursor position is marked with <|cursor_position|>. Please respond with edited code for that region. + The excerpt to edit will be wrapped in markers <|editable_region_start|> and <|editable_region_end|>. The cursor position is marked with <|user_cursor|>. Please respond with edited code for that region. Other code is provided for context, and `…` indicates when code has been skipped. + + # Edit History: + "}; -const LABELED_SECTIONS_SYSTEM_PROMPT: &str = indoc! {r#" +const LABELED_SECTIONS_INSTRUCTIONS: &str = indoc! {r#" You are a code completion assistant and your task is to analyze user edits, and suggest an edit to one of the provided sections of code. Sections of code are grouped by file and then labeled by `<|section_N|>` (e.g `<|section_8|>`). - The cursor position is marked with `<|cursor_position|>` and it will appear within a special section labeled `<|current_section|>`. Prefer editing the current section until no more changes are needed within it. + The cursor position is marked with `<|user_cursor|>` and it will appear within a special section labeled `<|current_section|>`. Prefer editing the current section until no more changes are needed within it. Respond ONLY with the name of the section to edit on a single line, followed by all of the code that should replace that section. For example: @@ -41,8 +44,58 @@ const LABELED_SECTIONS_SYSTEM_PROMPT: &str = indoc! {r#" for i in 0..16 { println!("{i}"); } + + # Edit History: + "#}; +const NUMBERED_LINES_INSTRUCTIONS: &str = indoc! {r#" + # Instructions + + You are a code completion assistant helping a programmer finish their work. Your task is to: + + 1. Analyze the edit history to understand what the programmer is trying to achieve + 2. Identify any incomplete refactoring or changes that need to be finished + 3. Make the remaining edits that a human programmer would logically make next + 4. Apply systematic changes consistently across the entire codebase - if you see a pattern starting, complete it everywhere. + + Focus on: + - Understanding the intent behind the changes (e.g., improving error handling, refactoring APIs, fixing bugs) + - Completing any partially-applied changes across the codebase + - Ensuring consistency with the programming style and patterns already established + - Making edits that maintain or improve code quality + - If the programmer started refactoring one instance of a pattern, find and update ALL similar instances + - Don't write a lot of code if you're not sure what to do + + Rules: + - Do not just mechanically apply patterns - reason about what changes make sense given the context and the programmer's apparent goals. + - Do not just fix syntax errors - look for the broader refactoring pattern and apply it systematically throughout the code. + - Write the edits in the unified diff format as shown in the example. + + # Example output: + + ``` + --- a/src/myapp/cli.py + +++ b/src/myapp/cli.py + @@ -1,3 +1,3 @@ + - + - + -import sys + +import json + ``` + + # Edit History: + +"#}; + +const UNIFIED_DIFF_REMINDER: &str = indoc! {" + --- + + Please analyze the edit history and the files, then provide the unified diff for your predicted edits. + Do not include the cursor marker in your output. + If you're editing multiple files, be sure to reflect filename in the hunk's header. +"}; + pub struct PlannedPrompt<'a> { request: &'a predict_edits_v3::PredictEditsRequest, /// Snippets to include in the prompt. These may overlap - they are merged / deduplicated in @@ -51,19 +104,10 @@ pub struct PlannedPrompt<'a> { budget_used: usize, } -pub fn system_prompt(format: PromptFormat) -> &'static str { - match format { - PromptFormat::MarkedExcerpt => MARKED_EXCERPT_SYSTEM_PROMPT, - PromptFormat::LabeledSections => LABELED_SECTIONS_SYSTEM_PROMPT, - // only intended for use via zeta_cli - PromptFormat::OnlySnippets => "", - } -} - #[derive(Clone, Debug)] pub struct PlannedSnippet<'a> { path: Arc, - range: Range, + range: Range, text: &'a str, // TODO: Indicate this in the output #[allow(dead_code)] @@ -79,7 +123,7 @@ pub enum DeclarationStyle { #[derive(Clone, Debug, Serialize)] pub struct SectionLabels { pub excerpt_index: usize, - pub section_ranges: Vec<(Arc, Range)>, + pub section_ranges: Vec<(Arc, Range)>, } impl<'a> PlannedPrompt<'a> { @@ -196,10 +240,24 @@ impl<'a> PlannedPrompt<'a> { declaration.text.len() )); }; + let signature_start_line = declaration.range.start + + Line( + declaration.text[..declaration.signature_range.start] + .lines() + .count() as u32, + ); + let signature_end_line = signature_start_line + + Line( + declaration.text + [declaration.signature_range.start..declaration.signature_range.end] + .lines() + .count() as u32, + ); + let range = signature_start_line..signature_end_line; + PlannedSnippet { path: declaration.path.clone(), - range: (declaration.signature_range.start + declaration.range.start) - ..(declaration.signature_range.end + declaration.range.start), + range, text, text_is_truncated: declaration.text_is_truncated, } @@ -318,7 +376,7 @@ impl<'a> PlannedPrompt<'a> { } let excerpt_snippet = PlannedSnippet { path: self.request.excerpt_path.clone(), - range: self.request.excerpt_range.clone(), + range: self.request.excerpt_line_range.clone(), text: &self.request.excerpt, text_is_truncated: false, }; @@ -328,86 +386,98 @@ impl<'a> PlannedPrompt<'a> { let mut excerpt_file_insertions = match self.request.prompt_format { PromptFormat::MarkedExcerpt => vec![ ( - self.request.excerpt_range.start, + Point { + line: self.request.excerpt_line_range.start, + column: 0, + }, EDITABLE_REGION_START_MARKER_WITH_NEWLINE, ), + (self.request.cursor_point, CURSOR_MARKER), ( - self.request.excerpt_range.start + self.request.cursor_offset, - CURSOR_MARKER, - ), - ( - self.request - .excerpt_range - .end - .saturating_sub(0) - .max(self.request.excerpt_range.start), + Point { + line: self.request.excerpt_line_range.end, + column: 0, + }, EDITABLE_REGION_END_MARKER_WITH_NEWLINE, ), ], - PromptFormat::LabeledSections => vec![( - self.request.excerpt_range.start + self.request.cursor_offset, - CURSOR_MARKER, - )], + PromptFormat::LabeledSections => vec![(self.request.cursor_point, CURSOR_MARKER)], + PromptFormat::NumLinesUniDiff => { + vec![(self.request.cursor_point, CURSOR_MARKER)] + } PromptFormat::OnlySnippets => vec![], }; - let mut prompt = String::new(); - prompt.push_str("## User Edits\n\n"); - Self::push_events(&mut prompt, &self.request.events); + let mut prompt = match self.request.prompt_format { + PromptFormat::MarkedExcerpt => MARKED_EXCERPT_INSTRUCTIONS.to_string(), + PromptFormat::LabeledSections => LABELED_SECTIONS_INSTRUCTIONS.to_string(), + PromptFormat::NumLinesUniDiff => NUMBERED_LINES_INSTRUCTIONS.to_string(), + // only intended for use via zeta_cli + PromptFormat::OnlySnippets => String::new(), + }; + + if self.request.events.is_empty() { + prompt.push_str("(No edit history)\n\n"); + } else { + prompt.push_str( + "The following are the latest edits made by the user, from earlier to later.\n\n", + ); + Self::push_events(&mut prompt, &self.request.events); + } + + if self.request.prompt_format == PromptFormat::NumLinesUniDiff { + if self.request.referenced_declarations.is_empty() { + prompt.push_str(indoc! {" + # File under the cursor: + + The cursor marker <|user_cursor|> indicates the current user cursor position. + The file is in current state, edits from edit history have been applied. + We prepend line numbers (e.g., `123|`); they are not part of the file. + + "}); + } else { + // Note: This hasn't been trained on yet + prompt.push_str(indoc! {" + # Code Excerpts: + + The cursor marker <|user_cursor|> indicates the current user cursor position. + Other excerpts of code from the project have been included as context based on their similarity to the code under the cursor. + Context excerpts are not guaranteed to be relevant, so use your own judgement. + Files are in their current state, edits from edit history have been applied. + We prepend line numbers (e.g., `123|`); they are not part of the file. + + "}); + } + } else { + prompt.push_str("\n## Code\n\n"); + } - prompt.push_str("\n## Code\n\n"); let section_labels = self.push_file_snippets(&mut prompt, &mut excerpt_file_insertions, file_snippets)?; + + if self.request.prompt_format == PromptFormat::NumLinesUniDiff { + prompt.push_str(UNIFIED_DIFF_REMINDER); + } + Ok((prompt, section_labels)) } fn push_events(output: &mut String, events: &[predict_edits_v3::Event]) { - for event in events { - match event { - Event::BufferChange { - path, - old_path, - diff, - predicted, - } => { - if let Some(old_path) = &old_path - && let Some(new_path) = &path - { - if old_path != new_path { - writeln!( - output, - "User renamed {} to {}\n\n", - old_path.display(), - new_path.display() - ) - .unwrap(); - } - } + if events.is_empty() { + return; + }; - let path = path - .as_ref() - .map_or_else(|| "untitled".to_string(), |path| path.display().to_string()); - - if *predicted { - writeln!( - output, - "User accepted prediction {:?}:\n```diff\n{}\n```\n", - path, diff - ) - .unwrap(); - } else { - writeln!(output, "User edited {:?}:\n```diff\n{}\n```\n", path, diff) - .unwrap(); - } - } - } + writeln!(output, "`````diff").unwrap(); + for event in events { + writeln!(output, "{}", event).unwrap(); } + writeln!(output, "`````\n").unwrap(); } fn push_file_snippets( &self, output: &mut String, - excerpt_file_insertions: &mut Vec<(usize, &'static str)>, + excerpt_file_insertions: &mut Vec<(Point, &'static str)>, file_snippets: Vec<(&'a Path, Vec<&'a PlannedSnippet>, bool)>, ) -> Result { let mut section_ranges = Vec::new(); @@ -417,15 +487,13 @@ impl<'a> PlannedPrompt<'a> { snippets.sort_by_key(|s| (s.range.start, Reverse(s.range.end))); // TODO: What if the snippets get expanded too large to be editable? - let mut current_snippet: Option<(&PlannedSnippet, Range)> = None; - let mut disjoint_snippets: Vec<(&PlannedSnippet, Range)> = Vec::new(); + let mut current_snippet: Option<(&PlannedSnippet, Range)> = None; + let mut disjoint_snippets: Vec<(&PlannedSnippet, Range)> = Vec::new(); for snippet in snippets { if let Some((_, current_snippet_range)) = current_snippet.as_mut() - && snippet.range.start < current_snippet_range.end + && snippet.range.start <= current_snippet_range.end { - if snippet.range.end > current_snippet_range.end { - current_snippet_range.end = snippet.range.end; - } + current_snippet_range.end = current_snippet_range.end.max(snippet.range.end); continue; } if let Some(current_snippet) = current_snippet.take() { @@ -437,21 +505,24 @@ impl<'a> PlannedPrompt<'a> { disjoint_snippets.push(current_snippet); } - writeln!(output, "```{}", file_path.display()).ok(); + // TODO: remove filename=? + writeln!(output, "`````filename={}", file_path.display()).ok(); let mut skipped_last_snippet = false; for (snippet, range) in disjoint_snippets { let section_index = section_ranges.len(); match self.request.prompt_format { - PromptFormat::MarkedExcerpt | PromptFormat::OnlySnippets => { - if range.start > 0 && !skipped_last_snippet { + PromptFormat::MarkedExcerpt + | PromptFormat::OnlySnippets + | PromptFormat::NumLinesUniDiff => { + if range.start.0 > 0 && !skipped_last_snippet { output.push_str("…\n"); } } PromptFormat::LabeledSections => { if is_excerpt_file - && range.start <= self.request.excerpt_range.start - && range.end >= self.request.excerpt_range.end + && range.start <= self.request.excerpt_line_range.start + && range.end >= self.request.excerpt_line_range.end { writeln!(output, "<|current_section|>").ok(); } else { @@ -460,46 +531,83 @@ impl<'a> PlannedPrompt<'a> { } } + let push_full_snippet = |output: &mut String| { + if self.request.prompt_format == PromptFormat::NumLinesUniDiff { + for (i, line) in snippet.text.lines().enumerate() { + writeln!(output, "{}|{}", i as u32 + range.start.0 + 1, line)?; + } + } else { + output.push_str(&snippet.text); + } + anyhow::Ok(()) + }; + if is_excerpt_file { if self.request.prompt_format == PromptFormat::OnlySnippets { - if range.start >= self.request.excerpt_range.start - && range.end <= self.request.excerpt_range.end + if range.start >= self.request.excerpt_line_range.start + && range.end <= self.request.excerpt_line_range.end { skipped_last_snippet = true; } else { skipped_last_snippet = false; output.push_str(snippet.text); } - } else { - let mut last_offset = range.start; - let mut i = 0; - while i < excerpt_file_insertions.len() { - let (offset, insertion) = &excerpt_file_insertions[i]; - let found = *offset >= range.start && *offset <= range.end; + } else if !excerpt_file_insertions.is_empty() { + let lines = snippet.text.lines().collect::>(); + let push_line = |output: &mut String, line_ix: usize| { + if self.request.prompt_format == PromptFormat::NumLinesUniDiff { + write!(output, "{}|", line_ix as u32 + range.start.0 + 1)?; + } + anyhow::Ok(writeln!(output, "{}", lines[line_ix])?) + }; + let mut last_line_ix = 0; + let mut insertion_ix = 0; + while insertion_ix < excerpt_file_insertions.len() { + let (point, insertion) = &excerpt_file_insertions[insertion_ix]; + let found = point.line >= range.start && point.line <= range.end; if found { excerpt_index = Some(section_index); - output.push_str( - &snippet.text[last_offset - range.start..offset - range.start], - ); - output.push_str(insertion); - last_offset = *offset; - excerpt_file_insertions.remove(i); + let insertion_line_ix = (point.line.0 - range.start.0) as usize; + for line_ix in last_line_ix..insertion_line_ix { + push_line(output, line_ix)?; + } + if let Some(next_line) = lines.get(insertion_line_ix) { + if self.request.prompt_format == PromptFormat::NumLinesUniDiff { + write!( + output, + "{}|", + insertion_line_ix as u32 + range.start.0 + 1 + )? + } + output.push_str(&next_line[..point.column as usize]); + output.push_str(insertion); + writeln!(output, "{}", &next_line[point.column as usize..])?; + } else { + writeln!(output, "{}", insertion)?; + } + last_line_ix = insertion_line_ix + 1; + excerpt_file_insertions.remove(insertion_ix); continue; } - i += 1; + insertion_ix += 1; } skipped_last_snippet = false; - output.push_str(&snippet.text[last_offset - range.start..]); + for line_ix in last_line_ix..lines.len() { + push_line(output, line_ix)?; + } + } else { + skipped_last_snippet = false; + push_full_snippet(output)?; } } else { skipped_last_snippet = false; - output.push_str(snippet.text); + push_full_snippet(output)?; } section_ranges.push((snippet.path.clone(), range)); } - output.push_str("```\n\n"); + output.push_str("`````\n\n"); } Ok(SectionLabels { diff --git a/crates/codestral/Cargo.toml b/crates/codestral/Cargo.toml index 932834827f3516f48fed06ccf6c430935c725fee..b402274a33530424349081da764a4b6766e419e9 100644 --- a/crates/codestral/Cargo.toml +++ b/crates/codestral/Cargo.toml @@ -23,6 +23,5 @@ serde.workspace = true serde_json.workspace = true smol.workspace = true text.workspace = true -workspace-hack.workspace = true [dev-dependencies] diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index d95b318b0e791b532a340bda94d945fb7c9485c1..c8467da7954b195c0eef09ce1bed8361d7fa2c7b 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -20,7 +20,7 @@ test-support = ["sqlite"] [dependencies] anyhow.workspace = true async-trait.workspace = true -async-tungstenite.workspace = true +async-tungstenite = { workspace = true, features = ["tokio", "tokio-rustls-manual-roots" ] } aws-config = { version = "1.1.5" } aws-sdk-kinesis = "1.51.0" aws-sdk-s3 = { version = "1.15.0" } @@ -47,7 +47,9 @@ reqwest = { version = "0.11", features = ["json"] } reqwest_client.workspace = true rpc.workspace = true scrypt = "0.11" -sea-orm = { version = "1.1.0-rc.1", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] } +# sea-orm and sea-orm-macros versions must match exactly. +sea-orm = { version = "=1.1.10", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] } +sea-orm-macros = "=1.1.10" semantic_version.workspace = true semver.workspace = true serde.workspace = true @@ -68,11 +70,10 @@ tracing = "0.1.40" tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json", "registry", "tracing-log"] } # workaround for https://github.com/tokio-rs/tracing/issues/2927 util.workspace = true uuid.workspace = true -workspace-hack.workspace = true [dev-dependencies] agent_settings.workspace = true -assistant_context.workspace = true +assistant_text_thread.workspace = true assistant_slash_command.workspace = true async-trait.workspace = true audio.workspace = true @@ -116,7 +117,7 @@ release_channel.workspace = true remote = { workspace = true, features = ["test-support"] } remote_server.workspace = true rpc = { workspace = true, features = ["test-support"] } -sea-orm = { version = "1.1.0-rc.1", features = ["sqlx-sqlite"] } +sea-orm = { version = "=1.1.10", features = ["sqlx-sqlite"] } serde_json.workspace = true session = { workspace = true, features = ["test-support"] } settings = { workspace = true, features = ["test-support"] } diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index d498ecd50a0b88a3a83c7e35a962136e7da74aa5..f2cbf419f0a64004a2210af216faba2baffca8b4 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -97,6 +97,7 @@ CREATE TABLE "worktree_entries" ( "is_external" BOOL NOT NULL, "is_ignored" BOOL NOT NULL, "is_deleted" BOOL NOT NULL, + "is_hidden" BOOL NOT NULL, "git_status" INTEGER, "is_fifo" BOOL NOT NULL, PRIMARY KEY (project_id, worktree_id, id), @@ -466,6 +467,7 @@ CREATE TABLE extension_versions ( provides_grammars BOOLEAN NOT NULL DEFAULT FALSE, provides_language_servers BOOLEAN NOT NULL DEFAULT FALSE, provides_context_servers BOOLEAN NOT NULL DEFAULT FALSE, + provides_agent_servers BOOLEAN NOT NULL DEFAULT FALSE, provides_slash_commands BOOLEAN NOT NULL DEFAULT FALSE, provides_indexed_docs_providers BOOLEAN NOT NULL DEFAULT FALSE, provides_snippets BOOLEAN NOT NULL DEFAULT FALSE, diff --git a/crates/collab/migrations/20250618090000_add_agent_servers_provides_field_to_extensions.sql b/crates/collab/migrations/20250618090000_add_agent_servers_provides_field_to_extensions.sql new file mode 100644 index 0000000000000000000000000000000000000000..3c399924b96891d490792fb36b61a034f8dce97f --- /dev/null +++ b/crates/collab/migrations/20250618090000_add_agent_servers_provides_field_to_extensions.sql @@ -0,0 +1,2 @@ +alter table extension_versions +add column provides_agent_servers bool not null default false diff --git a/crates/collab/migrations/20251008120000_add_is_hidden_to_worktree_entries.sql b/crates/collab/migrations/20251008120000_add_is_hidden_to_worktree_entries.sql new file mode 100644 index 0000000000000000000000000000000000000000..5b4207aeea500595c66508fa88a20662bc5693c1 --- /dev/null +++ b/crates/collab/migrations/20251008120000_add_is_hidden_to_worktree_entries.sql @@ -0,0 +1,2 @@ +ALTER TABLE "worktree_entries" +ADD "is_hidden" BOOL NOT NULL DEFAULT FALSE; diff --git a/crates/collab/src/db/queries/buffers.rs b/crates/collab/src/db/queries/buffers.rs index 2e6b4719d1c126230849ac81bc1f215092bc0b5e..6c4cd58d132bdeaaa791f4da8406e0e6d9052981 100644 --- a/crates/collab/src/db/queries/buffers.rs +++ b/crates/collab/src/db/queries/buffers.rs @@ -62,9 +62,9 @@ impl Database { .iter() .map(|c| c.replica_id) .collect::>(); - let mut replica_id = ReplicaId(0); + let mut replica_id = ReplicaId(clock::ReplicaId::FIRST_COLLAB_ID.as_u16() as i32); while replica_ids.contains(&replica_id) { - replica_id.0 += 1; + replica_id = ReplicaId(replica_id.0 + 1); } let collaborator = channel_buffer_collaborator::ActiveModel { channel_id: ActiveValue::Set(channel_id), @@ -203,7 +203,7 @@ impl Database { while let Some(row) = rows.next().await { let row = row?; let timestamp = clock::Lamport { - replica_id: row.replica_id as u16, + replica_id: clock::ReplicaId::new(row.replica_id as u16), value: row.lamport_timestamp as u32, }; server_version.observe(timestamp); @@ -701,7 +701,11 @@ impl Database { return Ok(()); } - let mut text_buffer = text::Buffer::new(0, text::BufferId::new(1).unwrap(), base_text); + let mut text_buffer = text::Buffer::new( + clock::ReplicaId::LOCAL, + text::BufferId::new(1).unwrap(), + base_text, + ); text_buffer.apply_ops(operations.into_iter().filter_map(operation_from_wire)); let base_text = text_buffer.text(); @@ -934,7 +938,7 @@ pub fn operation_from_wire(operation: proto::Operation) -> Option Some(text::Operation::Edit(EditOperation { timestamp: clock::Lamport { - replica_id: edit.replica_id as text::ReplicaId, + replica_id: clock::ReplicaId::new(edit.replica_id as u16), value: edit.lamport_timestamp, }, version: version_from_wire(&edit.version), @@ -949,7 +953,7 @@ pub fn operation_from_wire(operation: proto::Operation) -> Option Some(text::Operation::Undo(UndoOperation { timestamp: clock::Lamport { - replica_id: undo.replica_id as text::ReplicaId, + replica_id: clock::ReplicaId::new(undo.replica_id as u16), value: undo.lamport_timestamp, }, version: version_from_wire(&undo.version), @@ -959,7 +963,7 @@ pub fn operation_from_wire(operation: proto::Operation) -> Option clock::Global { let mut version = clock::Global::new(); for entry in message { version.observe(clock::Lamport { - replica_id: entry.replica_id as text::ReplicaId, + replica_id: clock::ReplicaId::new(entry.replica_id as u16), value: entry.timestamp, }); } @@ -986,7 +990,7 @@ fn version_to_wire(version: &clock::Global) -> Vec { let mut message = Vec::new(); for entry in version.iter() { message.push(proto::VectorClockEntry { - replica_id: entry.replica_id as u32, + replica_id: entry.replica_id.as_u16() as u32, timestamp: entry.value, }); } diff --git a/crates/collab/src/db/queries/extensions.rs b/crates/collab/src/db/queries/extensions.rs index f218ff28507cf51a72cd0aa00a044ad75f64f839..b4dc4dd89d15fa1b80b561408f2bdc9a233094c0 100644 --- a/crates/collab/src/db/queries/extensions.rs +++ b/crates/collab/src/db/queries/extensions.rs @@ -255,7 +255,7 @@ impl Database { let insert = extension::Entity::insert(extension::ActiveModel { name: ActiveValue::Set(latest_version.name.clone()), - external_id: ActiveValue::Set(external_id.to_string()), + external_id: ActiveValue::Set((*external_id).to_owned()), id: ActiveValue::NotSet, latest_version: ActiveValue::Set(latest_version.version.to_string()), total_download_count: ActiveValue::NotSet, @@ -310,6 +310,9 @@ impl Database { .provides .contains(&ExtensionProvides::ContextServers), ), + provides_agent_servers: ActiveValue::Set( + version.provides.contains(&ExtensionProvides::AgentServers), + ), provides_slash_commands: ActiveValue::Set( version.provides.contains(&ExtensionProvides::SlashCommands), ), @@ -422,6 +425,10 @@ fn apply_provides_filter( condition = condition.add(extension_version::Column::ProvidesContextServers.eq(true)); } + if provides_filter.contains(&ExtensionProvides::AgentServers) { + condition = condition.add(extension_version::Column::ProvidesAgentServers.eq(true)); + } + if provides_filter.contains(&ExtensionProvides::SlashCommands) { condition = condition.add(extension_version::Column::ProvidesSlashCommands.eq(true)); } diff --git a/crates/collab/src/db/queries/notifications.rs b/crates/collab/src/db/queries/notifications.rs index cc22ee99b53b8590ff5e95e2c7bf46b1cb8ba71e..e92c269b7e8324cce5a042a56fc9cb395127b959 100644 --- a/crates/collab/src/db/queries/notifications.rs +++ b/crates/collab/src/db/queries/notifications.rs @@ -17,7 +17,7 @@ impl Database { .any(|existing| existing.name == **kind) }) .map(|kind| notification_kind::ActiveModel { - name: ActiveValue::Set(kind.to_string()), + name: ActiveValue::Set((*kind).to_owned()), ..Default::default() }) .collect(); @@ -260,7 +260,7 @@ pub fn model_to_proto(this: &Database, row: notification::Model) -> Result>(); - let mut replica_id = ReplicaId(1); + let mut replica_id = ReplicaId(clock::ReplicaId::FIRST_COLLAB_ID.as_u16() as i32); while replica_ids.contains(&replica_id) { replica_id.0 += 1; } @@ -905,6 +911,7 @@ impl Database { canonical_path: db_entry.canonical_path, is_ignored: db_entry.is_ignored, is_external: db_entry.is_external, + is_hidden: db_entry.is_hidden, // This is only used in the summarization backlog, so if it's None, // that just means we won't be able to detect when to resummarize // based on total number of backlogged bytes - instead, we'd go diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index 175361af351b1529d04f6a5d30b512bbcf7d7568..f020b99b5f1030cfe9391498512258e6db249bac 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -671,6 +671,7 @@ impl Database { canonical_path: db_entry.canonical_path, is_ignored: db_entry.is_ignored, is_external: db_entry.is_external, + is_hidden: db_entry.is_hidden, // This is only used in the summarization backlog, so if it's None, // that just means we won't be able to detect when to resummarize // based on total number of backlogged bytes - instead, we'd go diff --git a/crates/collab/src/db/tables/extension_version.rs b/crates/collab/src/db/tables/extension_version.rs index 80726248713c66f0cd8cbdec0fa374f3e60d9868..5e71914ddb0dd60c75fa3a6b1b5ee86fe1b662b6 100644 --- a/crates/collab/src/db/tables/extension_version.rs +++ b/crates/collab/src/db/tables/extension_version.rs @@ -24,6 +24,7 @@ pub struct Model { pub provides_grammars: bool, pub provides_language_servers: bool, pub provides_context_servers: bool, + pub provides_agent_servers: bool, pub provides_slash_commands: bool, pub provides_indexed_docs_providers: bool, pub provides_snippets: bool, @@ -57,6 +58,10 @@ impl Model { provides.insert(ExtensionProvides::ContextServers); } + if self.provides_agent_servers { + provides.insert(ExtensionProvides::AgentServers); + } + if self.provides_slash_commands { provides.insert(ExtensionProvides::SlashCommands); } diff --git a/crates/collab/src/db/tables/worktree_entry.rs b/crates/collab/src/db/tables/worktree_entry.rs index d148c63a7f6d22ff9109b1316e7e1507d828492a..1a28203977d67502d5ca2449235b2d71b4bb21f6 100644 --- a/crates/collab/src/db/tables/worktree_entry.rs +++ b/crates/collab/src/db/tables/worktree_entry.rs @@ -19,6 +19,7 @@ pub struct Model { pub is_ignored: bool, pub is_external: bool, pub is_deleted: bool, + pub is_hidden: bool, pub scan_id: i64, pub is_fifo: bool, pub canonical_path: Option, diff --git a/crates/collab/src/db/tests.rs b/crates/collab/src/db/tests.rs index 141262d5e94a4bf1d4d897e78f6281ab9ee3ccfc..318fbc4e76d907f4798c069400ba92574c06e789 100644 --- a/crates/collab/src/db/tests.rs +++ b/crates/collab/src/db/tests.rs @@ -196,7 +196,7 @@ fn channel_tree(channels: &[(ChannelId, &[ChannelId], &'static str)]) -> Vec) { .await .unwrap(); - let mut buffer_a = Buffer::new(0, text::BufferId::new(1).unwrap(), "".to_string()); + let mut buffer_a = Buffer::new( + ReplicaId::new(0), + text::BufferId::new(1).unwrap(), + "".to_string(), + ); let operations = vec![ buffer_a.edit([(0..0, "hello world")]), buffer_a.edit([(5..5, ", cruel")]), @@ -95,7 +99,7 @@ async fn test_channel_buffers(db: &Arc) { .unwrap(); let mut buffer_b = Buffer::new( - 0, + ReplicaId::new(0), text::BufferId::new(1).unwrap(), buffer_response_b.base_text, ); @@ -124,7 +128,7 @@ async fn test_channel_buffers(db: &Arc) { rpc::proto::Collaborator { user_id: a_id.to_proto(), peer_id: Some(rpc::proto::PeerId { id: 1, owner_id }), - replica_id: 0, + replica_id: ReplicaId::FIRST_COLLAB_ID.as_u16() as u32, is_host: false, committer_name: None, committer_email: None, @@ -132,7 +136,7 @@ async fn test_channel_buffers(db: &Arc) { rpc::proto::Collaborator { user_id: b_id.to_proto(), peer_id: Some(rpc::proto::PeerId { id: 2, owner_id }), - replica_id: 1, + replica_id: ReplicaId::FIRST_COLLAB_ID.as_u16() as u32 + 1, is_host: false, committer_name: None, committer_email: None, @@ -228,7 +232,8 @@ async fn test_channel_buffers_last_operations(db: &Database) { .await .unwrap(); - db.join_channel_buffer(channel, user_id, connection_id) + let res = db + .join_channel_buffer(channel, user_id, connection_id) .await .unwrap(); @@ -239,7 +244,7 @@ async fn test_channel_buffers_last_operations(db: &Database) { ); text_buffers.push(Buffer::new( - 0, + ReplicaId::new(res.replica_id as u16), text::BufferId::new(1).unwrap(), "".to_string(), )); @@ -276,7 +281,12 @@ async fn test_channel_buffers_last_operations(db: &Database) { db.join_channel_buffer(buffers[1].channel_id, user_id, connection_id) .await .unwrap(); - text_buffers[1] = Buffer::new(1, text::BufferId::new(1).unwrap(), "def".to_string()); + let replica_id = text_buffers[1].replica_id(); + text_buffers[1] = Buffer::new( + replica_id, + text::BufferId::new(1).unwrap(), + "def".to_string(), + ); update_buffer( buffers[1].channel_id, user_id, @@ -304,20 +314,32 @@ async fn test_channel_buffers_last_operations(db: &Database) { rpc::proto::ChannelBufferVersion { channel_id: buffers[0].channel_id.to_proto(), epoch: 0, - version: serialize_version(&text_buffers[0].version()), + version: serialize_version(&text_buffers[0].version()) + .into_iter() + .filter( + |vector| vector.replica_id == text_buffers[0].replica_id().as_u16() as u32 + ) + .collect::>(), }, rpc::proto::ChannelBufferVersion { channel_id: buffers[1].channel_id.to_proto(), epoch: 1, version: serialize_version(&text_buffers[1].version()) .into_iter() - .filter(|vector| vector.replica_id == text_buffers[1].replica_id() as u32) + .filter( + |vector| vector.replica_id == text_buffers[1].replica_id().as_u16() as u32 + ) .collect::>(), }, rpc::proto::ChannelBufferVersion { channel_id: buffers[2].channel_id.to_proto(), epoch: 0, - version: serialize_version(&text_buffers[2].version()), + version: serialize_version(&text_buffers[2].version()) + .into_iter() + .filter( + |vector| vector.replica_id == text_buffers[2].replica_id().as_u16() as u32 + ) + .collect::>(), }, ] ); diff --git a/crates/collab/src/db/tests/extension_tests.rs b/crates/collab/src/db/tests/extension_tests.rs index 9396b405fd52c19255159453afccaff5447b4544..cb58f6af2a6559b8ca3bb4c19c694a263e73d878 100644 --- a/crates/collab/src/db/tests/extension_tests.rs +++ b/crates/collab/src/db/tests/extension_tests.rs @@ -16,6 +16,72 @@ test_both_dbs!( test_extensions_sqlite ); +test_both_dbs!( + test_agent_servers_filter, + test_agent_servers_filter_postgres, + test_agent_servers_filter_sqlite +); + +async fn test_agent_servers_filter(db: &Arc) { + // No extensions initially + let versions = db.get_known_extension_versions().await.unwrap(); + assert!(versions.is_empty()); + + // Shared timestamp + let t0 = time::OffsetDateTime::from_unix_timestamp_nanos(0).unwrap(); + let t0 = time::PrimitiveDateTime::new(t0.date(), t0.time()); + + // Insert two extensions, only one provides AgentServers + db.insert_extension_versions( + &[ + ( + "ext_agent_servers", + vec![NewExtensionVersion { + name: "Agent Servers Provider".into(), + version: semver::Version::parse("1.0.0").unwrap(), + description: "has agent servers".into(), + authors: vec!["author".into()], + repository: "org/agent-servers".into(), + schema_version: 1, + wasm_api_version: None, + provides: BTreeSet::from_iter([ExtensionProvides::AgentServers]), + published_at: t0, + }], + ), + ( + "ext_plain", + vec![NewExtensionVersion { + name: "Plain Extension".into(), + version: semver::Version::parse("0.1.0").unwrap(), + description: "no agent servers".into(), + authors: vec!["author2".into()], + repository: "org/plain".into(), + schema_version: 1, + wasm_api_version: None, + provides: BTreeSet::default(), + published_at: t0, + }], + ), + ] + .into_iter() + .collect(), + ) + .await + .unwrap(); + + // Filter by AgentServers provides + let provides_filter = BTreeSet::from_iter([ExtensionProvides::AgentServers]); + + let filtered = db + .get_extensions(None, Some(&provides_filter), 1, 10) + .await + .unwrap(); + + // Expect only the extension that declared AgentServers + assert_eq!(filtered.len(), 1); + assert_eq!(filtered[0].id.as_ref(), "ext_agent_servers"); +} + async fn test_extensions(db: &Arc) { let versions = db.get_known_extension_versions().await.unwrap(); assert!(versions.is_empty()); diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index fa2ca6a890af93979eed759265286d99a5a98bb2..bfcab578f4b30357594cb460dfff53fd94d0ec05 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -343,11 +343,11 @@ impl Server { .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) - .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) @@ -462,6 +462,8 @@ impl Server { .add_message_handler(broadcast_project_message_from_host::) .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) diff --git a/crates/collab/src/rpc/connection_pool.rs b/crates/collab/src/rpc/connection_pool.rs index 729e7c8533460c0789d74040e883d48c8b94af92..417edd66d66d7479f42fb09b01c7a5d9f05a6223 100644 --- a/crates/collab/src/rpc/connection_pool.rs +++ b/crates/collab/src/rpc/connection_pool.rs @@ -30,9 +30,9 @@ impl fmt::Display for ZedVersion { impl ZedVersion { pub fn can_collaborate(&self) -> bool { - // v0.198.4 is the first version where we no longer connect to Collab automatically. - // We reject any clients older than that to prevent them from connecting to Collab just for authentication. - if self.0 < SemanticVersion::new(0, 198, 4) { + // v0.204.1 was the first version after the auto-update bug. + // We reject any clients older than that to hope we can persuade them to upgrade. + if self.0 < SemanticVersion::new(0, 204, 1) { return false; } diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index 0614d66928710aeeda4a4a492508b92c5b4d35e0..f675cd3522b0f0e273db7528d62f31e37ceda794 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -4,7 +4,7 @@ use crate::{ }; use call::ActiveCall; use editor::{ - DocumentColorsRenderMode, Editor, RowInfo, SelectionEffects, + DocumentColorsRenderMode, Editor, FETCH_COLORS_DEBOUNCE_TIMEOUT, RowInfo, SelectionEffects, actions::{ ConfirmCodeAction, ConfirmCompletion, ConfirmRename, ContextMenuFirst, ExpandMacroRecursively, MoveToEnd, Redo, Rename, SelectAll, ToggleCodeActions, Undo, @@ -505,7 +505,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu label: "third_method(…)".into(), detail: Some("fn(&mut self, B, C, D) -> E".into()), text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { - // no snippet placehodlers + // no snippet placeholders new_text: "third_method".to_string(), range: lsp::Range::new( lsp::Position::new(1, 32), @@ -877,7 +877,7 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T 6..9 ); rename.editor.update(cx, |rename_editor, cx| { - let rename_selection = rename_editor.selections.newest::(cx); + let rename_selection = rename_editor.selections.newest::(&rename_editor.display_snapshot(cx)); assert_eq!( rename_selection.range(), 0..3, @@ -924,7 +924,7 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T let lsp_rename_end = rename.range.end.to_offset(&buffer); assert_eq!(lsp_rename_start..lsp_rename_end, 6..9); rename.editor.update(cx, |rename_editor, cx| { - let rename_selection = rename_editor.selections.newest::(cx); + let rename_selection = rename_editor.selections.newest::(&rename_editor.display_snapshot(cx)); assert_eq!( rename_selection.range(), 1..2, @@ -1849,10 +1849,40 @@ async fn test_mutual_editor_inlay_hint_cache_update( ..lsp::ServerCapabilities::default() }; client_a.language_registry().add(rust_lang()); + + // Set up the language server to return an additional inlay hint on each request. + let edits_made = Arc::new(AtomicUsize::new(0)); + let closure_edits_made = Arc::clone(&edits_made); let mut fake_language_servers = client_a.language_registry().register_fake_lsp( "Rust", FakeLspAdapter { capabilities: capabilities.clone(), + initializer: Some(Box::new(move |fake_language_server| { + let closure_edits_made = closure_edits_made.clone(); + fake_language_server.set_request_handler::( + move |params, _| { + let edits_made_2 = Arc::clone(&closure_edits_made); + async move { + assert_eq!( + params.text_document.uri, + lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), + ); + let edits_made = + AtomicUsize::load(&edits_made_2, atomic::Ordering::Acquire); + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(0, edits_made as u32), + label: lsp::InlayHintLabel::String(edits_made.to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }, + ); + })), ..FakeLspAdapter::default() }, ); @@ -1894,61 +1924,20 @@ async fn test_mutual_editor_inlay_hint_cache_update( .unwrap(); let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); - executor.start_waiting(); // The host opens a rust file. - let _buffer_a = project_a - .update(cx_a, |project, cx| { - project.open_local_buffer(path!("/a/main.rs"), cx) - }) - .await - .unwrap(); - let editor_a = workspace_a - .update_in(cx_a, |workspace, window, cx| { - workspace.open_path((worktree_id, rel_path("main.rs")), None, true, window, cx) - }) - .await - .unwrap() - .downcast::() - .unwrap(); - + let file_a = workspace_a.update_in(cx_a, |workspace, window, cx| { + workspace.open_path((worktree_id, rel_path("main.rs")), None, true, window, cx) + }); let fake_language_server = fake_language_servers.next().await.unwrap(); - - // Set up the language server to return an additional inlay hint on each request. - let edits_made = Arc::new(AtomicUsize::new(0)); - let closure_edits_made = Arc::clone(&edits_made); - fake_language_server - .set_request_handler::(move |params, _| { - let edits_made_2 = Arc::clone(&closure_edits_made); - async move { - assert_eq!( - params.text_document.uri, - lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), - ); - let edits_made = AtomicUsize::load(&edits_made_2, atomic::Ordering::Acquire); - Ok(Some(vec![lsp::InlayHint { - position: lsp::Position::new(0, edits_made as u32), - label: lsp::InlayHintLabel::String(edits_made.to_string()), - kind: None, - text_edits: None, - tooltip: None, - padding_left: None, - padding_right: None, - data: None, - }])) - } - }) - .next() - .await - .unwrap(); - + let editor_a = file_a.await.unwrap().downcast::().unwrap(); executor.run_until_parked(); let initial_edit = edits_made.load(atomic::Ordering::Acquire); - editor_a.update(cx_a, |editor, _| { + editor_a.update(cx_a, |editor, cx| { assert_eq!( vec![initial_edit.to_string()], - extract_hint_labels(editor), + extract_hint_labels(editor, cx), "Host should get its first hints when opens an editor" ); }); @@ -1963,10 +1952,10 @@ async fn test_mutual_editor_inlay_hint_cache_update( .unwrap(); executor.run_until_parked(); - editor_b.update(cx_b, |editor, _| { + editor_b.update(cx_b, |editor, cx| { assert_eq!( vec![initial_edit.to_string()], - extract_hint_labels(editor), + extract_hint_labels(editor, cx), "Client should get its first hints when opens an editor" ); }); @@ -1981,16 +1970,16 @@ async fn test_mutual_editor_inlay_hint_cache_update( cx_b.focus(&editor_b); executor.run_until_parked(); - editor_a.update(cx_a, |editor, _| { + editor_a.update(cx_a, |editor, cx| { assert_eq!( vec![after_client_edit.to_string()], - extract_hint_labels(editor), + extract_hint_labels(editor, cx), ); }); - editor_b.update(cx_b, |editor, _| { + editor_b.update(cx_b, |editor, cx| { assert_eq!( vec![after_client_edit.to_string()], - extract_hint_labels(editor), + extract_hint_labels(editor, cx), ); }); @@ -2004,16 +1993,16 @@ async fn test_mutual_editor_inlay_hint_cache_update( cx_a.focus(&editor_a); executor.run_until_parked(); - editor_a.update(cx_a, |editor, _| { + editor_a.update(cx_a, |editor, cx| { assert_eq!( vec![after_host_edit.to_string()], - extract_hint_labels(editor), + extract_hint_labels(editor, cx), ); }); - editor_b.update(cx_b, |editor, _| { + editor_b.update(cx_b, |editor, cx| { assert_eq!( vec![after_host_edit.to_string()], - extract_hint_labels(editor), + extract_hint_labels(editor, cx), ); }); @@ -2025,26 +2014,22 @@ async fn test_mutual_editor_inlay_hint_cache_update( .expect("inlay refresh request failed"); executor.run_until_parked(); - editor_a.update(cx_a, |editor, _| { + editor_a.update(cx_a, |editor, cx| { assert_eq!( vec![after_special_edit_for_refresh.to_string()], - extract_hint_labels(editor), + extract_hint_labels(editor, cx), "Host should react to /refresh LSP request" ); }); - editor_b.update(cx_b, |editor, _| { + editor_b.update(cx_b, |editor, cx| { assert_eq!( vec![after_special_edit_for_refresh.to_string()], - extract_hint_labels(editor), + extract_hint_labels(editor, cx), "Guest should get a /refresh LSP request propagated by host" ); }); } -// This test started hanging on seed 2 after the theme settings -// PR. The hypothesis is that it's been buggy for a while, but got lucky -// on seeds. -#[ignore] #[gpui::test(iterations = 10)] async fn test_inlay_hint_refresh_is_forwarded( cx_a: &mut TestAppContext, @@ -2206,18 +2191,18 @@ async fn test_inlay_hint_refresh_is_forwarded( executor.finish_waiting(); executor.run_until_parked(); - editor_a.update(cx_a, |editor, _| { + editor_a.update(cx_a, |editor, cx| { assert!( - extract_hint_labels(editor).is_empty(), + extract_hint_labels(editor, cx).is_empty(), "Host should get no hints due to them turned off" ); }); executor.run_until_parked(); - editor_b.update(cx_b, |editor, _| { + editor_b.update(cx_b, |editor, cx| { assert_eq!( vec!["initial hint".to_string()], - extract_hint_labels(editor), + extract_hint_labels(editor, cx), "Client should get its first hints when opens an editor" ); }); @@ -2229,18 +2214,18 @@ async fn test_inlay_hint_refresh_is_forwarded( .into_response() .expect("inlay refresh request failed"); executor.run_until_parked(); - editor_a.update(cx_a, |editor, _| { + editor_a.update(cx_a, |editor, cx| { assert!( - extract_hint_labels(editor).is_empty(), + extract_hint_labels(editor, cx).is_empty(), "Host should get no hints due to them turned off, even after the /refresh" ); }); executor.run_until_parked(); - editor_b.update(cx_b, |editor, _| { + editor_b.update(cx_b, |editor, cx| { assert_eq!( vec!["other hint".to_string()], - extract_hint_labels(editor), + extract_hint_labels(editor, cx), "Guest should get a /refresh LSP request propagated by host despite host hints are off" ); }); @@ -2409,6 +2394,7 @@ async fn test_lsp_document_color(cx_a: &mut TestAppContext, cx_b: &mut TestAppCo .unwrap(); color_request_handle.next().await.unwrap(); + executor.advance_clock(FETCH_COLORS_DEBOUNCE_TIMEOUT); executor.run_until_parked(); assert_eq!( @@ -2552,6 +2538,27 @@ async fn test_lsp_pull_diagnostics( cx_a.update(editor::init); cx_b.update(editor::init); + let expected_push_diagnostic_main_message = "pushed main diagnostic"; + let expected_push_diagnostic_lib_message = "pushed lib diagnostic"; + let expected_pull_diagnostic_main_message = "pulled main diagnostic"; + let expected_pull_diagnostic_lib_message = "pulled lib diagnostic"; + let expected_workspace_pull_diagnostics_main_message = "pulled workspace main diagnostic"; + let expected_workspace_pull_diagnostics_lib_message = "pulled workspace lib diagnostic"; + + let diagnostics_pulls_result_ids = Arc::new(Mutex::new(BTreeSet::>::new())); + let workspace_diagnostics_pulls_result_ids = Arc::new(Mutex::new(BTreeSet::::new())); + let diagnostics_pulls_made = Arc::new(AtomicUsize::new(0)); + let closure_diagnostics_pulls_made = diagnostics_pulls_made.clone(); + let closure_diagnostics_pulls_result_ids = diagnostics_pulls_result_ids.clone(); + let workspace_diagnostics_pulls_made = Arc::new(AtomicUsize::new(0)); + let closure_workspace_diagnostics_pulls_made = workspace_diagnostics_pulls_made.clone(); + let closure_workspace_diagnostics_pulls_result_ids = + workspace_diagnostics_pulls_result_ids.clone(); + let (workspace_diagnostic_cancel_tx, closure_workspace_diagnostic_cancel_rx) = + smol::channel::bounded::<()>(1); + let (closure_workspace_diagnostic_received_tx, workspace_diagnostic_received_rx) = + smol::channel::bounded::<()>(1); + let capabilities = lsp::ServerCapabilities { diagnostic_provider: Some(lsp::DiagnosticServerCapabilities::Options( lsp::DiagnosticOptions { @@ -2566,13 +2573,195 @@ async fn test_lsp_pull_diagnostics( ..lsp::ServerCapabilities::default() }; client_a.language_registry().add(rust_lang()); + + let pull_diagnostics_handle = Arc::new(parking_lot::Mutex::new(None)); + let workspace_diagnostics_pulls_handle = Arc::new(parking_lot::Mutex::new(None)); + + let closure_pull_diagnostics_handle = pull_diagnostics_handle.clone(); + let closure_workspace_diagnostics_pulls_handle = workspace_diagnostics_pulls_handle.clone(); let mut fake_language_servers = client_a.language_registry().register_fake_lsp( "Rust", FakeLspAdapter { capabilities: capabilities.clone(), + initializer: Some(Box::new(move |fake_language_server| { + let expected_workspace_diagnostic_token = lsp::ProgressToken::String(format!( + "workspace/diagnostic-{}-1", + fake_language_server.server.server_id() + )); + let closure_workspace_diagnostics_pulls_result_ids = closure_workspace_diagnostics_pulls_result_ids.clone(); + let diagnostics_pulls_made = closure_diagnostics_pulls_made.clone(); + let diagnostics_pulls_result_ids = closure_diagnostics_pulls_result_ids.clone(); + let closure_pull_diagnostics_handle = closure_pull_diagnostics_handle.clone(); + let closure_workspace_diagnostics_pulls_handle = closure_workspace_diagnostics_pulls_handle.clone(); + let closure_workspace_diagnostic_cancel_rx = closure_workspace_diagnostic_cancel_rx.clone(); + let closure_workspace_diagnostic_received_tx = closure_workspace_diagnostic_received_tx.clone(); + let pull_diagnostics_handle = fake_language_server + .set_request_handler::( + move |params, _| { + let requests_made = diagnostics_pulls_made.clone(); + let diagnostics_pulls_result_ids = + diagnostics_pulls_result_ids.clone(); + async move { + let message = if lsp::Uri::from_file_path(path!("/a/main.rs")) + .unwrap() + == params.text_document.uri + { + expected_pull_diagnostic_main_message.to_string() + } else if lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap() + == params.text_document.uri + { + expected_pull_diagnostic_lib_message.to_string() + } else { + panic!("Unexpected document: {}", params.text_document.uri) + }; + { + diagnostics_pulls_result_ids + .lock() + .await + .insert(params.previous_result_id); + } + let new_requests_count = + requests_made.fetch_add(1, atomic::Ordering::Release) + 1; + Ok(lsp::DocumentDiagnosticReportResult::Report( + lsp::DocumentDiagnosticReport::Full( + lsp::RelatedFullDocumentDiagnosticReport { + related_documents: None, + full_document_diagnostic_report: + lsp::FullDocumentDiagnosticReport { + result_id: Some(format!( + "pull-{new_requests_count}" + )), + items: vec![lsp::Diagnostic { + range: lsp::Range { + start: lsp::Position { + line: 0, + character: 0, + }, + end: lsp::Position { + line: 0, + character: 2, + }, + }, + severity: Some( + lsp::DiagnosticSeverity::ERROR, + ), + message, + ..lsp::Diagnostic::default() + }], + }, + }, + ), + )) + } + }, + ); + let _ = closure_pull_diagnostics_handle.lock().insert(pull_diagnostics_handle); + + let closure_workspace_diagnostics_pulls_made = closure_workspace_diagnostics_pulls_made.clone(); + let workspace_diagnostics_pulls_handle = fake_language_server.set_request_handler::( + move |params, _| { + let workspace_requests_made = closure_workspace_diagnostics_pulls_made.clone(); + let workspace_diagnostics_pulls_result_ids = + closure_workspace_diagnostics_pulls_result_ids.clone(); + let workspace_diagnostic_cancel_rx = closure_workspace_diagnostic_cancel_rx.clone(); + let workspace_diagnostic_received_tx = closure_workspace_diagnostic_received_tx.clone(); + let expected_workspace_diagnostic_token = expected_workspace_diagnostic_token.clone(); + async move { + let workspace_request_count = + workspace_requests_made.fetch_add(1, atomic::Ordering::Release) + 1; + { + workspace_diagnostics_pulls_result_ids + .lock() + .await + .extend(params.previous_result_ids.into_iter().map(|id| id.value)); + } + if should_stream_workspace_diagnostic && !workspace_diagnostic_cancel_rx.is_closed() + { + assert_eq!( + params.partial_result_params.partial_result_token, + Some(expected_workspace_diagnostic_token) + ); + workspace_diagnostic_received_tx.send(()).await.unwrap(); + workspace_diagnostic_cancel_rx.recv().await.unwrap(); + workspace_diagnostic_cancel_rx.close(); + // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#partialResults + // > The final response has to be empty in terms of result values. + return Ok(lsp::WorkspaceDiagnosticReportResult::Report( + lsp::WorkspaceDiagnosticReport { items: Vec::new() }, + )); + } + Ok(lsp::WorkspaceDiagnosticReportResult::Report( + lsp::WorkspaceDiagnosticReport { + items: vec![ + lsp::WorkspaceDocumentDiagnosticReport::Full( + lsp::WorkspaceFullDocumentDiagnosticReport { + uri: lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), + version: None, + full_document_diagnostic_report: + lsp::FullDocumentDiagnosticReport { + result_id: Some(format!( + "workspace_{workspace_request_count}" + )), + items: vec![lsp::Diagnostic { + range: lsp::Range { + start: lsp::Position { + line: 0, + character: 1, + }, + end: lsp::Position { + line: 0, + character: 3, + }, + }, + severity: Some(lsp::DiagnosticSeverity::WARNING), + message: + expected_workspace_pull_diagnostics_main_message + .to_string(), + ..lsp::Diagnostic::default() + }], + }, + }, + ), + lsp::WorkspaceDocumentDiagnosticReport::Full( + lsp::WorkspaceFullDocumentDiagnosticReport { + uri: lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap(), + version: None, + full_document_diagnostic_report: + lsp::FullDocumentDiagnosticReport { + result_id: Some(format!( + "workspace_{workspace_request_count}" + )), + items: vec![lsp::Diagnostic { + range: lsp::Range { + start: lsp::Position { + line: 0, + character: 1, + }, + end: lsp::Position { + line: 0, + character: 3, + }, + }, + severity: Some(lsp::DiagnosticSeverity::WARNING), + message: + expected_workspace_pull_diagnostics_lib_message + .to_string(), + ..lsp::Diagnostic::default() + }], + }, + }, + ), + ], + }, + )) + } + }); + let _ = closure_workspace_diagnostics_pulls_handle.lock().insert(workspace_diagnostics_pulls_handle); + })), ..FakeLspAdapter::default() }, ); + client_b.language_registry().add(rust_lang()); client_b.language_registry().register_fake_lsp_adapter( "Rust", @@ -2630,183 +2819,15 @@ async fn test_lsp_pull_diagnostics( .unwrap(); let fake_language_server = fake_language_servers.next().await.unwrap(); - cx_a.run_until_parked(); - cx_b.run_until_parked(); - let expected_push_diagnostic_main_message = "pushed main diagnostic"; - let expected_push_diagnostic_lib_message = "pushed lib diagnostic"; - let expected_pull_diagnostic_main_message = "pulled main diagnostic"; - let expected_pull_diagnostic_lib_message = "pulled lib diagnostic"; - let expected_workspace_pull_diagnostics_main_message = "pulled workspace main diagnostic"; - let expected_workspace_pull_diagnostics_lib_message = "pulled workspace lib diagnostic"; - - let diagnostics_pulls_result_ids = Arc::new(Mutex::new(BTreeSet::>::new())); - let workspace_diagnostics_pulls_result_ids = Arc::new(Mutex::new(BTreeSet::::new())); - let diagnostics_pulls_made = Arc::new(AtomicUsize::new(0)); - let closure_diagnostics_pulls_made = diagnostics_pulls_made.clone(); - let closure_diagnostics_pulls_result_ids = diagnostics_pulls_result_ids.clone(); - let mut pull_diagnostics_handle = fake_language_server - .set_request_handler::(move |params, _| { - let requests_made = closure_diagnostics_pulls_made.clone(); - let diagnostics_pulls_result_ids = closure_diagnostics_pulls_result_ids.clone(); - async move { - let message = if lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap() - == params.text_document.uri - { - expected_pull_diagnostic_main_message.to_string() - } else if lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap() - == params.text_document.uri - { - expected_pull_diagnostic_lib_message.to_string() - } else { - panic!("Unexpected document: {}", params.text_document.uri) - }; - { - diagnostics_pulls_result_ids - .lock() - .await - .insert(params.previous_result_id); - } - let new_requests_count = requests_made.fetch_add(1, atomic::Ordering::Release) + 1; - Ok(lsp::DocumentDiagnosticReportResult::Report( - lsp::DocumentDiagnosticReport::Full(lsp::RelatedFullDocumentDiagnosticReport { - related_documents: None, - full_document_diagnostic_report: lsp::FullDocumentDiagnosticReport { - result_id: Some(format!("pull-{new_requests_count}")), - items: vec![lsp::Diagnostic { - range: lsp::Range { - start: lsp::Position { - line: 0, - character: 0, - }, - end: lsp::Position { - line: 0, - character: 2, - }, - }, - severity: Some(lsp::DiagnosticSeverity::ERROR), - message, - ..lsp::Diagnostic::default() - }], - }, - }), - )) - } - }); - - let workspace_diagnostics_pulls_made = Arc::new(AtomicUsize::new(0)); - let closure_workspace_diagnostics_pulls_made = workspace_diagnostics_pulls_made.clone(); - let closure_workspace_diagnostics_pulls_result_ids = - workspace_diagnostics_pulls_result_ids.clone(); - let (workspace_diagnostic_cancel_tx, closure_workspace_diagnostic_cancel_rx) = - smol::channel::bounded::<()>(1); - let (closure_workspace_diagnostic_received_tx, workspace_diagnostic_received_rx) = - smol::channel::bounded::<()>(1); let expected_workspace_diagnostic_token = lsp::ProgressToken::String(format!( "workspace/diagnostic-{}-1", fake_language_server.server.server_id() )); - let closure_expected_workspace_diagnostic_token = expected_workspace_diagnostic_token.clone(); - let mut workspace_diagnostics_pulls_handle = fake_language_server - .set_request_handler::( - move |params, _| { - let workspace_requests_made = closure_workspace_diagnostics_pulls_made.clone(); - let workspace_diagnostics_pulls_result_ids = - closure_workspace_diagnostics_pulls_result_ids.clone(); - let workspace_diagnostic_cancel_rx = closure_workspace_diagnostic_cancel_rx.clone(); - let workspace_diagnostic_received_tx = closure_workspace_diagnostic_received_tx.clone(); - let expected_workspace_diagnostic_token = - closure_expected_workspace_diagnostic_token.clone(); - async move { - let workspace_request_count = - workspace_requests_made.fetch_add(1, atomic::Ordering::Release) + 1; - { - workspace_diagnostics_pulls_result_ids - .lock() - .await - .extend(params.previous_result_ids.into_iter().map(|id| id.value)); - } - if should_stream_workspace_diagnostic && !workspace_diagnostic_cancel_rx.is_closed() - { - assert_eq!( - params.partial_result_params.partial_result_token, - Some(expected_workspace_diagnostic_token) - ); - workspace_diagnostic_received_tx.send(()).await.unwrap(); - workspace_diagnostic_cancel_rx.recv().await.unwrap(); - workspace_diagnostic_cancel_rx.close(); - // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#partialResults - // > The final response has to be empty in terms of result values. - return Ok(lsp::WorkspaceDiagnosticReportResult::Report( - lsp::WorkspaceDiagnosticReport { items: Vec::new() }, - )); - } - Ok(lsp::WorkspaceDiagnosticReportResult::Report( - lsp::WorkspaceDiagnosticReport { - items: vec![ - lsp::WorkspaceDocumentDiagnosticReport::Full( - lsp::WorkspaceFullDocumentDiagnosticReport { - uri: lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(), - version: None, - full_document_diagnostic_report: - lsp::FullDocumentDiagnosticReport { - result_id: Some(format!( - "workspace_{workspace_request_count}" - )), - items: vec![lsp::Diagnostic { - range: lsp::Range { - start: lsp::Position { - line: 0, - character: 1, - }, - end: lsp::Position { - line: 0, - character: 3, - }, - }, - severity: Some(lsp::DiagnosticSeverity::WARNING), - message: - expected_workspace_pull_diagnostics_main_message - .to_string(), - ..lsp::Diagnostic::default() - }], - }, - }, - ), - lsp::WorkspaceDocumentDiagnosticReport::Full( - lsp::WorkspaceFullDocumentDiagnosticReport { - uri: lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap(), - version: None, - full_document_diagnostic_report: - lsp::FullDocumentDiagnosticReport { - result_id: Some(format!( - "workspace_{workspace_request_count}" - )), - items: vec![lsp::Diagnostic { - range: lsp::Range { - start: lsp::Position { - line: 0, - character: 1, - }, - end: lsp::Position { - line: 0, - character: 3, - }, - }, - severity: Some(lsp::DiagnosticSeverity::WARNING), - message: - expected_workspace_pull_diagnostics_lib_message - .to_string(), - ..lsp::Diagnostic::default() - }], - }, - }, - ), - ], - }, - )) - } - }, - ); + cx_a.run_until_parked(); + cx_b.run_until_parked(); + let mut pull_diagnostics_handle = pull_diagnostics_handle.lock().take().unwrap(); + let mut workspace_diagnostics_pulls_handle = + workspace_diagnostics_pulls_handle.lock().take().unwrap(); if should_stream_workspace_diagnostic { workspace_diagnostic_received_rx.recv().await.unwrap(); @@ -4181,15 +4202,35 @@ fn tab_undo_assert( cx_b.assert_editor_state(expected_initial); } -fn extract_hint_labels(editor: &Editor) -> Vec { - let mut labels = Vec::new(); - for hint in editor.inlay_hint_cache().hints() { - match hint.label { - project::InlayHintLabel::String(s) => labels.push(s), - _ => unreachable!(), - } +fn extract_hint_labels(editor: &Editor, cx: &mut App) -> Vec { + let lsp_store = editor.project().unwrap().read(cx).lsp_store(); + + let mut all_cached_labels = Vec::new(); + let mut all_fetched_hints = Vec::new(); + for buffer in editor.buffer().read(cx).all_buffers() { + lsp_store.update(cx, |lsp_store, cx| { + let hints = &lsp_store.latest_lsp_data(&buffer, cx).inlay_hints(); + all_cached_labels.extend(hints.all_cached_hints().into_iter().map(|hint| { + let mut label = hint.text().to_string(); + if hint.padding_left { + label.insert(0, ' '); + } + if hint.padding_right { + label.push_str(" "); + } + label + })); + all_fetched_hints.extend(hints.all_fetched_hints()); + }); } - labels + + assert!( + all_fetched_hints.is_empty(), + "Did not expect background hints fetch tasks, but got {} of them", + all_fetched_hints.len() + ); + + all_cached_labels } #[track_caller] diff --git a/crates/collab/src/tests/following_tests.rs b/crates/collab/src/tests/following_tests.rs index 6f4a819f440929a9cc004cc018169420f758d264..ab72ce3605b7c93bac05dc6321b44b7abb964d93 100644 --- a/crates/collab/src/tests/following_tests.rs +++ b/crates/collab/src/tests/following_tests.rs @@ -122,13 +122,19 @@ async fn test_basic_following( editor.handle_input("b", window, cx); editor.handle_input("c", window, cx); editor.select_left(&Default::default(), window, cx); - assert_eq!(editor.selections.ranges(cx), vec![3..2]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + vec![3..2] + ); }); editor_a2.update_in(cx_a, |editor, window, cx| { editor.handle_input("d", window, cx); editor.handle_input("e", window, cx); editor.select_left(&Default::default(), window, cx); - assert_eq!(editor.selections.ranges(cx), vec![2..1]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + vec![2..1] + ); }); // When client B starts following client A, only the active view state is replicated to client B. @@ -149,11 +155,15 @@ async fn test_basic_following( Some((worktree_id, rel_path("2.txt")).into()) ); assert_eq!( - editor_b2.update(cx_b, |editor, cx| editor.selections.ranges(cx)), + editor_b2.update(cx_b, |editor, cx| editor + .selections + .ranges(&editor.display_snapshot(cx))), vec![2..1] ); assert_eq!( - editor_b1.update(cx_b, |editor, cx| editor.selections.ranges(cx)), + editor_b1.update(cx_b, |editor, cx| editor + .selections + .ranges(&editor.display_snapshot(cx))), vec![3..3] ); @@ -384,7 +394,10 @@ async fn test_basic_following( cx_b.background_executor.run_until_parked(); editor_b1.update(cx_b, |editor, cx| { - assert_eq!(editor.selections.ranges(cx), &[1..1, 2..2]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + &[1..1, 2..2] + ); }); editor_a1.update_in(cx_a, |editor, window, cx| { @@ -402,7 +415,10 @@ async fn test_basic_following( executor.advance_clock(workspace::item::LEADER_UPDATE_THROTTLE); executor.run_until_parked(); editor_b1.update(cx_b, |editor, cx| { - assert_eq!(editor.selections.ranges(cx), &[3..3]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + &[3..3] + ); }); // After unfollowing, client B stops receiving updates from client A. @@ -1679,7 +1695,10 @@ async fn test_following_stops_on_unshare(cx_a: &mut TestAppContext, cx_b: &mut T .advance_clock(workspace::item::LEADER_UPDATE_THROTTLE); cx_a.run_until_parked(); editor_b.update(cx_b, |editor, cx| { - assert_eq!(editor.selections.ranges(cx), vec![1..1]) + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + vec![1..1] + ) }); // a unshares the project @@ -1701,7 +1720,10 @@ async fn test_following_stops_on_unshare(cx_a: &mut TestAppContext, cx_b: &mut T .advance_clock(workspace::item::LEADER_UPDATE_THROTTLE); cx_a.run_until_parked(); editor_b.update(cx_b, |editor, cx| { - assert_eq!(editor.selections.ranges(cx), vec![1..1]) + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + vec![1..1] + ) }); cx_b.update(|_, cx| { let room = ActiveCall::global(cx).read(cx).room().unwrap().read(cx); @@ -1799,13 +1821,19 @@ async fn test_following_into_excluded_file( editor.handle_input("b", window, cx); editor.handle_input("c", window, cx); editor.select_left(&Default::default(), window, cx); - assert_eq!(editor.selections.ranges(cx), vec![3..2]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + vec![3..2] + ); }); editor_for_excluded_a.update_in(cx_a, |editor, window, cx| { editor.select_all(&Default::default(), window, cx); editor.handle_input("new commit message", window, cx); editor.select_left(&Default::default(), window, cx); - assert_eq!(editor.selections.ranges(cx), vec![18..17]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + vec![18..17] + ); }); // When client B starts following client A, currently visible file is replicated @@ -1827,7 +1855,9 @@ async fn test_following_into_excluded_file( Some((worktree_id, rel_path(".git/COMMIT_EDITMSG")).into()) ); assert_eq!( - editor_for_excluded_b.update(cx_b, |editor, cx| editor.selections.ranges(cx)), + editor_for_excluded_b.update(cx_b, |editor, cx| editor + .selections + .ranges(&editor.display_snapshot(cx))), vec![18..17] ); @@ -2037,7 +2067,12 @@ async fn test_following_to_channel_notes_without_a_shared_project( assert_eq!(notes.channel(cx).unwrap().name, "channel-1"); notes.editor.update(cx, |editor, cx| { assert_eq!(editor.text(cx), "Hello from A."); - assert_eq!(editor.selections.ranges::(cx), &[3..4]); + assert_eq!( + editor + .selections + .ranges::(&editor.display_snapshot(cx)), + &[3..4] + ); }) }); diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index f6a106b7db5c77ba8e98b307cc3e562766fb4dd4..30396ab90290b692537b974fde8308287079e50f 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -6,8 +6,8 @@ use crate::{ }, }; use anyhow::{Result, anyhow}; -use assistant_context::ContextStore; use assistant_slash_command::SlashCommandWorkingSet; +use assistant_text_thread::TextThreadStore; use buffer_diff::{DiffHunkSecondaryStatus, DiffHunkStatus, assert_hunks}; use call::{ActiveCall, ParticipantLocation, Room, room}; use client::{RECEIVE_TIMEOUT, User}; @@ -25,7 +25,7 @@ use gpui::{ use language::{ Diagnostic, DiagnosticEntry, DiagnosticSourceKind, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope, - language_settings::{Formatter, FormatterList, SelectedFormatter}, + language_settings::{Formatter, FormatterList}, tree_sitter_rust, tree_sitter_typescript, }; use lsp::{LanguageServerId, OneOf}; @@ -39,7 +39,7 @@ use project::{ use prompt_store::PromptBuilder; use rand::prelude::*; use serde_json::json; -use settings::{PrettierSettingsContent, SettingsStore}; +use settings::{LanguageServerFormatterSpecifier, PrettierSettingsContent, SettingsStore}; use std::{ cell::{Cell, RefCell}, env, future, mem, @@ -4610,14 +4610,13 @@ async fn test_formatting_buffer( cx_a.update(|cx| { SettingsStore::update_global(cx, |store, cx| { store.update_user_settings(cx, |file| { - file.project.all_languages.defaults.formatter = Some(SelectedFormatter::List( - FormatterList::Single(Formatter::External { + file.project.all_languages.defaults.formatter = + Some(FormatterList::Single(Formatter::External { command: "awk".into(), arguments: Some( vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into(), ), - }), - )); + })); }); }); }); @@ -4708,7 +4707,7 @@ async fn test_prettier_formatting_buffer( cx_a.update(|cx| { SettingsStore::update_global(cx, |store, cx| { store.update_user_settings(cx, |file| { - file.project.all_languages.defaults.formatter = Some(SelectedFormatter::Auto); + file.project.all_languages.defaults.formatter = Some(FormatterList::default()); file.project.all_languages.defaults.prettier = Some(PrettierSettingsContent { allowed: Some(true), ..Default::default() @@ -4719,8 +4718,8 @@ async fn test_prettier_formatting_buffer( cx_b.update(|cx| { SettingsStore::update_global(cx, |store, cx| { store.update_user_settings(cx, |file| { - file.project.all_languages.defaults.formatter = Some(SelectedFormatter::List( - FormatterList::Single(Formatter::LanguageServer { name: None }), + file.project.all_languages.defaults.formatter = Some(FormatterList::Single( + Formatter::LanguageServer(LanguageServerFormatterSpecifier::Current), )); file.project.all_languages.defaults.prettier = Some(PrettierSettingsContent { allowed: Some(true), @@ -6878,9 +6877,9 @@ async fn test_context_collaboration_with_reconnect( }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); - let context_store_a = cx_a + let text_thread_store_a = cx_a .update(|cx| { - ContextStore::new( + TextThreadStore::new( project_a.clone(), prompt_builder.clone(), Arc::new(SlashCommandWorkingSet::default()), @@ -6889,9 +6888,9 @@ async fn test_context_collaboration_with_reconnect( }) .await .unwrap(); - let context_store_b = cx_b + let text_thread_store_b = cx_b .update(|cx| { - ContextStore::new( + TextThreadStore::new( project_b.clone(), prompt_builder.clone(), Arc::new(SlashCommandWorkingSet::default()), @@ -6902,60 +6901,60 @@ async fn test_context_collaboration_with_reconnect( .unwrap(); // Client A creates a new chats. - let context_a = context_store_a.update(cx_a, |store, cx| store.create(cx)); + let text_thread_a = text_thread_store_a.update(cx_a, |store, cx| store.create(cx)); executor.run_until_parked(); // Client B retrieves host's contexts and joins one. - let context_b = context_store_b + let text_thread_b = text_thread_store_b .update(cx_b, |store, cx| { - let host_contexts = store.host_contexts().to_vec(); - assert_eq!(host_contexts.len(), 1); - store.open_remote_context(host_contexts[0].id.clone(), cx) + let host_text_threads = store.host_text_threads().collect::>(); + assert_eq!(host_text_threads.len(), 1); + store.open_remote(host_text_threads[0].id.clone(), cx) }) .await .unwrap(); // Host and guest make changes - context_a.update(cx_a, |context, cx| { - context.buffer().update(cx, |buffer, cx| { + text_thread_a.update(cx_a, |text_thread, cx| { + text_thread.buffer().update(cx, |buffer, cx| { buffer.edit([(0..0, "Host change\n")], None, cx) }) }); - context_b.update(cx_b, |context, cx| { - context.buffer().update(cx, |buffer, cx| { + text_thread_b.update(cx_b, |text_thread, cx| { + text_thread.buffer().update(cx, |buffer, cx| { buffer.edit([(0..0, "Guest change\n")], None, cx) }) }); executor.run_until_parked(); assert_eq!( - context_a.read_with(cx_a, |context, cx| context.buffer().read(cx).text()), + text_thread_a.read_with(cx_a, |text_thread, cx| text_thread.buffer().read(cx).text()), "Guest change\nHost change\n" ); assert_eq!( - context_b.read_with(cx_b, |context, cx| context.buffer().read(cx).text()), + text_thread_b.read_with(cx_b, |text_thread, cx| text_thread.buffer().read(cx).text()), "Guest change\nHost change\n" ); // Disconnect client A and make some changes while disconnected. server.disconnect_client(client_a.peer_id().unwrap()); server.forbid_connections(); - context_a.update(cx_a, |context, cx| { - context.buffer().update(cx, |buffer, cx| { + text_thread_a.update(cx_a, |text_thread, cx| { + text_thread.buffer().update(cx, |buffer, cx| { buffer.edit([(0..0, "Host offline change\n")], None, cx) }) }); - context_b.update(cx_b, |context, cx| { - context.buffer().update(cx, |buffer, cx| { + text_thread_b.update(cx_b, |text_thread, cx| { + text_thread.buffer().update(cx, |buffer, cx| { buffer.edit([(0..0, "Guest offline change\n")], None, cx) }) }); executor.run_until_parked(); assert_eq!( - context_a.read_with(cx_a, |context, cx| context.buffer().read(cx).text()), + text_thread_a.read_with(cx_a, |text_thread, cx| text_thread.buffer().read(cx).text()), "Host offline change\nGuest change\nHost change\n" ); assert_eq!( - context_b.read_with(cx_b, |context, cx| context.buffer().read(cx).text()), + text_thread_b.read_with(cx_b, |text_thread, cx| text_thread.buffer().read(cx).text()), "Guest offline change\nGuest change\nHost change\n" ); @@ -6963,11 +6962,11 @@ async fn test_context_collaboration_with_reconnect( server.allow_connections(); executor.advance_clock(RECEIVE_TIMEOUT); assert_eq!( - context_a.read_with(cx_a, |context, cx| context.buffer().read(cx).text()), + text_thread_a.read_with(cx_a, |text_thread, cx| text_thread.buffer().read(cx).text()), "Guest offline change\nHost offline change\nGuest change\nHost change\n" ); assert_eq!( - context_b.read_with(cx_b, |context, cx| context.buffer().read(cx).text()), + text_thread_b.read_with(cx_b, |text_thread, cx| text_thread.buffer().read(cx).text()), "Guest offline change\nHost offline change\nGuest change\nHost change\n" ); @@ -6975,8 +6974,8 @@ async fn test_context_collaboration_with_reconnect( server.forbid_connections(); server.disconnect_client(client_a.peer_id().unwrap()); executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); - context_b.read_with(cx_b, |context, cx| { - assert!(context.buffer().read(cx).read_only()); + text_thread_b.read_with(cx_b, |text_thread, cx| { + assert!(text_thread.buffer().read(cx).read_only()); }); } diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index 84ee9a33906b976a68da5da7b81c1e89c96190b1..7fb76ffd16dda9f556a22eeefd78c72f80dc1457 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -14,7 +14,7 @@ use gpui::{ use http_client::BlockedHttpClient; use language::{ FakeLspAdapter, Language, LanguageConfig, LanguageMatcher, LanguageRegistry, - language_settings::{Formatter, FormatterList, SelectedFormatter, language_settings}, + language_settings::{Formatter, FormatterList, language_settings}, tree_sitter_typescript, }; use node_runtime::NodeRuntime; @@ -27,7 +27,7 @@ use remote::RemoteClient; use remote_server::{HeadlessAppState, HeadlessProject}; use rpc::proto; use serde_json::json; -use settings::{PrettierSettingsContent, SettingsStore}; +use settings::{LanguageServerFormatterSpecifier, PrettierSettingsContent, SettingsStore}; use std::{ path::Path, sync::{Arc, atomic::AtomicUsize}, @@ -491,7 +491,7 @@ async fn test_ssh_collaboration_formatting_with_prettier( cx_a.update(|cx| { SettingsStore::update_global(cx, |store, cx| { store.update_user_settings(cx, |file| { - file.project.all_languages.defaults.formatter = Some(SelectedFormatter::Auto); + file.project.all_languages.defaults.formatter = Some(FormatterList::default()); file.project.all_languages.defaults.prettier = Some(PrettierSettingsContent { allowed: Some(true), ..Default::default() @@ -502,8 +502,8 @@ async fn test_ssh_collaboration_formatting_with_prettier( cx_b.update(|cx| { SettingsStore::update_global(cx, |store, cx| { store.update_user_settings(cx, |file| { - file.project.all_languages.defaults.formatter = Some(SelectedFormatter::List( - FormatterList::Single(Formatter::LanguageServer { name: None }), + file.project.all_languages.defaults.formatter = Some(FormatterList::Single( + Formatter::LanguageServer(LanguageServerFormatterSpecifier::Current), )); file.project.all_languages.defaults.prettier = Some(PrettierSettingsContent { allowed: Some(true), @@ -550,7 +550,7 @@ async fn test_ssh_collaboration_formatting_with_prettier( cx_a.update(|cx| { SettingsStore::update_global(cx, |store, cx| { store.update_user_settings(cx, |file| { - file.project.all_languages.defaults.formatter = Some(SelectedFormatter::Auto); + file.project.all_languages.defaults.formatter = Some(FormatterList::default()); file.project.all_languages.defaults.prettier = Some(PrettierSettingsContent { allowed: Some(true), ..Default::default() diff --git a/crates/collab/src/tests/test_server.rs b/crates/collab/src/tests/test_server.rs index 528253f0dc2e9d4dc8b88a7d8d8c2926be2b2652..fbff269494f3f1ae5fb48d124ad090e61a558f31 100644 --- a/crates/collab/src/tests/test_server.rs +++ b/crates/collab/src/tests/test_server.rs @@ -358,7 +358,7 @@ impl TestServer { settings::KeymapFile::load_asset_allow_partial_failure(os_keymap, cx).unwrap(), ); language_model::LanguageModelRegistry::test(cx); - assistant_context::init(client.clone(), cx); + assistant_text_thread::init(client.clone(), cx); agent_settings::init(cx); }); diff --git a/crates/collab_ui/Cargo.toml b/crates/collab_ui/Cargo.toml index 24202445a79b5c906d4f6fe1f1a633422f24772a..4abeb1324c28f73585dea4c60fe185ca7b2317ad 100644 --- a/crates/collab_ui/Cargo.toml +++ b/crates/collab_ui/Cargo.toml @@ -60,7 +60,6 @@ title_bar.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true -workspace-hack.workspace = true [dev-dependencies] call = { workspace = true, features = ["test-support"] } diff --git a/crates/collab_ui/src/channel_view.rs b/crates/collab_ui/src/channel_view.rs index e37abbbccdbcdb7335b45b3fbe01d8797541e336..4e4bd2ca958d20225a7188b1f7f601e879e22835 100644 --- a/crates/collab_ui/src/channel_view.rs +++ b/crates/collab_ui/src/channel_view.rs @@ -287,9 +287,12 @@ impl ChannelView { } fn copy_link(&mut self, _: &CopyLink, window: &mut Window, cx: &mut Context) { - let position = self - .editor - .update(cx, |editor, cx| editor.selections.newest_display(cx).start); + let position = self.editor.update(cx, |editor, cx| { + editor + .selections + .newest_display(&editor.display_snapshot(cx)) + .start + }); self.copy_link_for_position(position, window, cx) } diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index f42c12ac57826625ceff82158da3ebd90b9e2452..bfbf9721fab6df79ddd97810fa5b1d70ee701866 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -2250,7 +2250,7 @@ impl CollabPanel { })), ) .child( - div().flex().w_full().items_center().child( + v_flex().w_full().items_center().child( Label::new("Sign in to enable collaboration.") .color(Color::Muted) .size(LabelSize::Small), @@ -3037,6 +3037,10 @@ impl Panel for CollabPanel { "CollabPanel" } + fn panel_key() -> &'static str { + COLLABORATION_PANEL_KEY + } + fn activation_priority(&self) -> u32 { 6 } diff --git a/crates/collab_ui/src/notification_panel.rs b/crates/collab_ui/src/notification_panel.rs index 3d988c4634ded9bd2c94d8a75886cf452e64eacb..99203bc867ff7da9e140bc4a886e291252a5153d 100644 --- a/crates/collab_ui/src/notification_panel.rs +++ b/crates/collab_ui/src/notification_panel.rs @@ -612,6 +612,10 @@ impl Panel for NotificationPanel { "NotificationPanel" } + fn panel_key() -> &'static str { + NOTIFICATION_PANEL_KEY + } + fn position(&self, _: &Window, cx: &App) -> DockPosition { NotificationPanelSettings::get_global(cx).dock } @@ -734,19 +738,17 @@ impl Render for NotificationToast { .on_modifiers_changed(cx.listener(|_, _, _, cx| cx.notify())) .child( IconButton::new(close_id, close_icon) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { if suppress { Tooltip::for_action( "Suppress.\nClose with click.", &workspace::SuppressNotification, - window, cx, ) } else { Tooltip::for_action( "Close.\nSuppress with shift-click", &menu::Cancel, - window, cx, ) } diff --git a/crates/collections/Cargo.toml b/crates/collections/Cargo.toml index c0a6dd833851c290e276ce108106ca8ce1a793e5..8675504347f171397ea7372841cb00b7959eafe3 100644 --- a/crates/collections/Cargo.toml +++ b/crates/collections/Cargo.toml @@ -1,8 +1,8 @@ [package] -name = "zed-collections" +name = "collections" version = "0.1.0" edition.workspace = true -publish = true +publish = false license = "Apache-2.0" description = "Standard collection type re-exports used by Zed and GPUI" @@ -19,4 +19,3 @@ test-support = [] [dependencies] indexmap.workspace = true rustc-hash.workspace = true -workspace-hack.workspace = true diff --git a/crates/command_palette/Cargo.toml b/crates/command_palette/Cargo.toml index c97d1421528325b107186a9158e57da277c97bb3..6e8d2bb5ae7ce079296b061a0c00616191b4382a 100644 --- a/crates/command_palette/Cargo.toml +++ b/crates/command_palette/Cargo.toml @@ -32,7 +32,6 @@ util.workspace = true telemetry.workspace = true workspace.workspace = true zed_actions.workspace = true -workspace-hack.workspace = true [dev-dependencies] ctor.workspace = true diff --git a/crates/command_palette/src/command_palette.rs b/crates/command_palette/src/command_palette.rs index 227d246f04cecf8a5c58f2361d0b543ff678eac6..4b883d890b3ca5b54459bd0ead3322acfe5b6f41 100644 --- a/crates/command_palette/src/command_palette.rs +++ b/crates/command_palette/src/command_palette.rs @@ -9,7 +9,8 @@ use std::{ use client::parse_zed_link; use command_palette_hooks::{ - CommandInterceptResult, CommandPaletteFilter, CommandPaletteInterceptor, + CommandInterceptItem, CommandInterceptResult, CommandPaletteFilter, + GlobalCommandPaletteInterceptor, }; use fuzzy::{StringMatch, StringMatchCandidate}; @@ -81,14 +82,17 @@ impl CommandPalette { let Some(previous_focus_handle) = window.focused(cx) else { return; }; + + let entity = cx.weak_entity(); workspace.toggle_modal(window, cx, move |window, cx| { - CommandPalette::new(previous_focus_handle, query, window, cx) + CommandPalette::new(previous_focus_handle, query, entity, window, cx) }); } fn new( previous_focus_handle: FocusHandle, query: &str, + entity: WeakEntity, window: &mut Window, cx: &mut Context, ) -> Self { @@ -109,8 +113,12 @@ impl CommandPalette { }) .collect(); - let delegate = - CommandPaletteDelegate::new(cx.entity().downgrade(), commands, previous_focus_handle); + let delegate = CommandPaletteDelegate::new( + cx.entity().downgrade(), + entity, + commands, + previous_focus_handle, + ); let picker = cx.new(|cx| { let picker = Picker::uniform_list(delegate, window, cx); @@ -146,6 +154,7 @@ impl Render for CommandPalette { pub struct CommandPaletteDelegate { latest_query: String, command_palette: WeakEntity, + workspace: WeakEntity, all_commands: Vec, commands: Vec, matches: Vec, @@ -153,7 +162,7 @@ pub struct CommandPaletteDelegate { previous_focus_handle: FocusHandle, updating_matches: Option<( Task<()>, - postage::dispatch::Receiver<(Vec, Vec)>, + postage::dispatch::Receiver<(Vec, Vec, CommandInterceptResult)>, )>, } @@ -174,11 +183,13 @@ impl Clone for Command { impl CommandPaletteDelegate { fn new( command_palette: WeakEntity, + workspace: WeakEntity, commands: Vec, previous_focus_handle: FocusHandle, ) -> Self { Self { command_palette, + workspace, all_commands: commands.clone(), matches: vec![], commands, @@ -194,30 +205,19 @@ impl CommandPaletteDelegate { query: String, mut commands: Vec, mut matches: Vec, - cx: &mut Context>, + intercept_result: CommandInterceptResult, + _: &mut Context>, ) { self.updating_matches.take(); - self.latest_query = query.clone(); - - let mut intercept_results = CommandPaletteInterceptor::try_global(cx) - .map(|interceptor| interceptor.intercept(&query, cx)) - .unwrap_or_default(); - - if parse_zed_link(&query, cx).is_some() { - intercept_results = vec![CommandInterceptResult { - action: OpenZedUrl { url: query.clone() }.boxed_clone(), - string: query, - positions: vec![], - }] - } + self.latest_query = query; let mut new_matches = Vec::new(); - for CommandInterceptResult { + for CommandInterceptItem { action, string, positions, - } in intercept_results + } in intercept_result.results { if let Some(idx) = matches .iter() @@ -236,7 +236,9 @@ impl CommandPaletteDelegate { score: 0.0, }) } - new_matches.append(&mut matches); + if !intercept_result.exclusive { + new_matches.append(&mut matches); + } self.commands = commands; self.matches = new_matches; if self.matches.is_empty() { @@ -295,12 +297,22 @@ impl PickerDelegate for CommandPaletteDelegate { if let Some(alias) = settings.command_aliases.get(&query) { query = alias.to_string(); } + + let workspace = self.workspace.clone(); + + let intercept_task = GlobalCommandPaletteInterceptor::intercept(&query, workspace, cx); + let (mut tx, mut rx) = postage::dispatch::channel(1); + + let query_str = query.as_str(); + let is_zed_link = parse_zed_link(query_str, cx).is_some(); + let task = cx.background_spawn({ let mut commands = self.all_commands.clone(); let hit_counts = self.hit_counts(); let executor = cx.background_executor().clone(); - let query = normalize_action_query(query.as_str()); + let query = normalize_action_query(query_str); + let query_for_link = query_str.to_string(); async move { commands.sort_by_key(|action| { ( @@ -326,13 +338,34 @@ impl PickerDelegate for CommandPaletteDelegate { ) .await; - tx.send((commands, matches)).await.log_err(); + let intercept_result = if is_zed_link { + CommandInterceptResult { + results: vec![CommandInterceptItem { + action: OpenZedUrl { + url: query_for_link.clone(), + } + .boxed_clone(), + string: query_for_link, + positions: vec![], + }], + exclusive: false, + } + } else if let Some(task) = intercept_task { + task.await + } else { + CommandInterceptResult::default() + }; + + tx.send((commands, matches, intercept_result)) + .await + .log_err(); } }); + self.updating_matches = Some((task, rx.clone())); cx.spawn_in(window, async move |picker, cx| { - let Some((commands, matches)) = rx.recv().await else { + let Some((commands, matches, intercept_result)) = rx.recv().await else { return; }; @@ -340,7 +373,7 @@ impl PickerDelegate for CommandPaletteDelegate { .update(cx, |picker, cx| { picker .delegate - .matches_updated(query, commands, matches, cx) + .matches_updated(query, commands, matches, intercept_result, cx) }) .log_err(); }) @@ -361,8 +394,8 @@ impl PickerDelegate for CommandPaletteDelegate { .background_executor() .block_with_timeout(duration, rx.clone().recv()) { - Ok(Some((commands, matches))) => { - self.matches_updated(query, commands, matches, cx); + Ok(Some((commands, matches, interceptor_result))) => { + self.matches_updated(query, commands, matches, interceptor_result, cx); true } _ => { @@ -410,7 +443,7 @@ impl PickerDelegate for CommandPaletteDelegate { &self, ix: usize, selected: bool, - window: &mut Window, + _: &mut Window, cx: &mut Context>, ) -> Option { let matching_command = self.matches.get(ix)?; @@ -429,10 +462,9 @@ impl PickerDelegate for CommandPaletteDelegate { command.name.clone(), matching_command.positions.clone(), )) - .children(KeyBinding::for_action_in( + .child(KeyBinding::for_action_in( &*command.action, &self.previous_focus_handle, - window, cx, )), ), @@ -665,7 +697,11 @@ mod tests { editor.update_in(cx, |editor, window, cx| { assert!(editor.focus_handle(cx).is_focused(window)); assert_eq!( - editor.selections.last::(cx).range().start, + editor + .selections + .last::(&editor.display_snapshot(cx)) + .range() + .start, Point::new(2, 0) ); }); diff --git a/crates/command_palette_hooks/Cargo.toml b/crates/command_palette_hooks/Cargo.toml index dd0b44c57dafe0266737e6c589f8cc6f763f2f4d..6ba771562d374a1c5f2499a9759cbbe3bb0229a4 100644 --- a/crates/command_palette_hooks/Cargo.toml +++ b/crates/command_palette_hooks/Cargo.toml @@ -16,4 +16,4 @@ doctest = false collections.workspace = true derive_more.workspace = true gpui.workspace = true -workspace-hack.workspace = true +workspace.workspace = true diff --git a/crates/command_palette_hooks/src/command_palette_hooks.rs b/crates/command_palette_hooks/src/command_palette_hooks.rs index 4923b811c570ea9413ff1e9c94aba4fbf1205a2b..bd8f9375b77ec9372a1657724a41dcb851537ece 100644 --- a/crates/command_palette_hooks/src/command_palette_hooks.rs +++ b/crates/command_palette_hooks/src/command_palette_hooks.rs @@ -2,16 +2,16 @@ #![deny(missing_docs)] -use std::any::TypeId; +use std::{any::TypeId, rc::Rc}; use collections::HashSet; use derive_more::{Deref, DerefMut}; -use gpui::{Action, App, BorrowAppContext, Global}; +use gpui::{Action, App, BorrowAppContext, Global, Task, WeakEntity}; +use workspace::Workspace; /// Initializes the command palette hooks. pub fn init(cx: &mut App) { cx.set_global(GlobalCommandPaletteFilter::default()); - cx.set_global(GlobalCommandPaletteInterceptor::default()); } /// A filter for the command palette. @@ -94,7 +94,7 @@ impl CommandPaletteFilter { /// The result of intercepting a command palette command. #[derive(Debug)] -pub struct CommandInterceptResult { +pub struct CommandInterceptItem { /// The action produced as a result of the interception. pub action: Box, /// The display string to show in the command palette for this result. @@ -104,50 +104,50 @@ pub struct CommandInterceptResult { pub positions: Vec, } +/// The result of intercepting a command palette command. +#[derive(Default, Debug)] +pub struct CommandInterceptResult { + /// The items + pub results: Vec, + /// Whether or not to continue to show the normal matches + pub exclusive: bool, +} + /// An interceptor for the command palette. -#[derive(Default)] -pub struct CommandPaletteInterceptor( - Option Vec>>, +#[derive(Clone)] +pub struct GlobalCommandPaletteInterceptor( + Rc, &mut App) -> Task>, ); -#[derive(Default)] -struct GlobalCommandPaletteInterceptor(CommandPaletteInterceptor); - impl Global for GlobalCommandPaletteInterceptor {} -impl CommandPaletteInterceptor { - /// Returns the global [`CommandPaletteInterceptor`], if one is set. - pub fn try_global(cx: &App) -> Option<&CommandPaletteInterceptor> { - cx.try_global::() - .map(|interceptor| &interceptor.0) - } - - /// Updates the global [`CommandPaletteInterceptor`] using the given closure. - pub fn update_global(cx: &mut App, update: F) -> R - where - F: FnOnce(&mut Self, &mut App) -> R, - { - cx.update_global(|this: &mut GlobalCommandPaletteInterceptor, cx| update(&mut this.0, cx)) - } - - /// Intercepts the given query from the command palette. - pub fn intercept(&self, query: &str, cx: &App) -> Vec { - if let Some(handler) = self.0.as_ref() { - (handler)(query, cx) - } else { - Vec::new() - } +impl GlobalCommandPaletteInterceptor { + /// Sets the global interceptor. + /// + /// This will override the previous interceptor, if it exists. + pub fn set( + cx: &mut App, + interceptor: impl Fn(&str, WeakEntity, &mut App) -> Task + + 'static, + ) { + cx.set_global(Self(Rc::new(interceptor))); } /// Clears the global interceptor. - pub fn clear(&mut self) { - self.0 = None; + pub fn clear(cx: &mut App) { + if cx.has_global::() { + cx.remove_global::(); + } } - /// Sets the global interceptor. - /// - /// This will override the previous interceptor, if it exists. - pub fn set(&mut self, handler: Box Vec>) { - self.0 = Some(handler); + /// Intercepts the given query from the command palette. + pub fn intercept( + query: &str, + workspace: WeakEntity, + cx: &mut App, + ) -> Option> { + let interceptor = cx.try_global::()?; + let handler = interceptor.0.clone(); + Some(handler(query, workspace, cx)) } } diff --git a/crates/component/Cargo.toml b/crates/component/Cargo.toml index 74481834f1cab5047dec3cd32121eb002fabbbbd..4ca95cbbbdf1f1c8d6c49a966849d8971842ffe1 100644 --- a/crates/component/Cargo.toml +++ b/crates/component/Cargo.toml @@ -18,7 +18,6 @@ inventory.workspace = true parking_lot.workspace = true strum.workspace = true theme.workspace = true -workspace-hack.workspace = true [dev-dependencies] documented.workspace = true diff --git a/crates/context_server/Cargo.toml b/crates/context_server/Cargo.toml index 1c5745408041ae2f67e91ba0d9365188ab957d4e..846a53fde4b6f87493ec2b75da6c08d2b081df47 100644 --- a/crates/context_server/Cargo.toml +++ b/crates/context_server/Cargo.toml @@ -32,4 +32,3 @@ smol.workspace = true tempfile.workspace = true url = { workspace = true, features = ["serde"] } util.workspace = true -workspace-hack.workspace = true diff --git a/crates/copilot/Cargo.toml b/crates/copilot/Cargo.toml index a0a49d6f25d42e6a578db1f1ec886e50f437e414..d9ea4709eadcfab2f6a91c793ac63933dbae545a 100644 --- a/crates/copilot/Cargo.toml +++ b/crates/copilot/Cargo.toml @@ -52,7 +52,6 @@ task.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true -workspace-hack.workspace = true itertools.workspace = true [target.'cfg(windows)'.dependencies] diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index d8fa8967a862053ccf2a820878f450c38ea18fad..41c8a17c2d251e23f7c2d6b27fbd2ff488c1c0e4 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -1,5 +1,6 @@ pub mod copilot_chat; mod copilot_completion_provider; +pub mod copilot_responses; pub mod request; mod sign_in; diff --git a/crates/copilot/src/copilot_chat.rs b/crates/copilot/src/copilot_chat.rs index a6758ce53c0aa18d04dcd376c2e0afb93add6ab5..5d22760942dbbcfd72f1dacb83c249a08f2fe72a 100644 --- a/crates/copilot/src/copilot_chat.rs +++ b/crates/copilot/src/copilot_chat.rs @@ -15,6 +15,8 @@ use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; use itertools::Itertools; use paths::home_dir; use serde::{Deserialize, Serialize}; + +use crate::copilot_responses as responses; use settings::watch_config_dir; pub const COPILOT_OAUTH_ENV_VAR: &str = "GH_COPILOT_TOKEN"; @@ -42,10 +44,14 @@ impl CopilotChatConfiguration { } } - pub fn api_url_from_endpoint(&self, endpoint: &str) -> String { + pub fn chat_completions_url_from_endpoint(&self, endpoint: &str) -> String { format!("{}/chat/completions", endpoint) } + pub fn responses_url_from_endpoint(&self, endpoint: &str) -> String { + format!("{}/responses", endpoint) + } + pub fn models_url_from_endpoint(&self, endpoint: &str) -> String { format!("{}/models", endpoint) } @@ -71,6 +77,14 @@ pub enum Role { System, } +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq)] +pub enum ModelSupportedEndpoint { + #[serde(rename = "/chat/completions")] + ChatCompletions, + #[serde(rename = "/responses")] + Responses, +} + #[derive(Deserialize)] struct ModelSchema { #[serde(deserialize_with = "deserialize_models_skip_errors")] @@ -109,6 +123,8 @@ pub struct Model { // reached. Zed does not currently implement this behaviour is_chat_fallback: bool, model_picker_enabled: bool, + #[serde(default)] + supported_endpoints: Vec, } #[derive(Clone, Serialize, Deserialize, Debug, PartialEq)] @@ -224,6 +240,16 @@ impl Model { pub fn tokenizer(&self) -> Option<&str> { self.capabilities.tokenizer.as_deref() } + + pub fn supports_response(&self) -> bool { + self.supported_endpoints.len() > 0 + && !self + .supported_endpoints + .contains(&ModelSupportedEndpoint::ChatCompletions) + && self + .supported_endpoints + .contains(&ModelSupportedEndpoint::Responses) + } } #[derive(Serialize, Deserialize)] @@ -253,7 +279,7 @@ pub enum Tool { Function { function: Function }, } -#[derive(Serialize, Deserialize)] +#[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "lowercase")] pub enum ToolChoice { Auto, @@ -346,7 +372,7 @@ pub struct Usage { #[derive(Debug, Deserialize)] pub struct ResponseChoice { - pub index: usize, + pub index: Option, pub finish_reason: Option, pub delta: Option, pub message: Option, @@ -359,10 +385,9 @@ pub struct ResponseDelta { #[serde(default)] pub tool_calls: Vec, } - #[derive(Deserialize, Debug, Eq, PartialEq)] pub struct ToolCallChunk { - pub index: usize, + pub index: Option, pub id: Option, pub function: Option, } @@ -554,13 +579,47 @@ impl CopilotChat { is_user_initiated: bool, mut cx: AsyncApp, ) -> Result>> { + let (client, token, configuration) = Self::get_auth_details(&mut cx).await?; + + let api_url = configuration.chat_completions_url_from_endpoint(&token.api_endpoint); + stream_completion( + client.clone(), + token.api_key, + api_url.into(), + request, + is_user_initiated, + ) + .await + } + + pub async fn stream_response( + request: responses::Request, + is_user_initiated: bool, + mut cx: AsyncApp, + ) -> Result>> { + let (client, token, configuration) = Self::get_auth_details(&mut cx).await?; + + let api_url = configuration.responses_url_from_endpoint(&token.api_endpoint); + responses::stream_response( + client.clone(), + token.api_key, + api_url, + request, + is_user_initiated, + ) + .await + } + + async fn get_auth_details( + cx: &mut AsyncApp, + ) -> Result<(Arc, ApiToken, CopilotChatConfiguration)> { let this = cx .update(|cx| Self::global(cx)) .ok() .flatten() .context("Copilot chat is not enabled")?; - let (oauth_token, api_token, client, configuration) = this.read_with(&cx, |this, _| { + let (oauth_token, api_token, client, configuration) = this.read_with(cx, |this, _| { ( this.oauth_token.clone(), this.api_token.clone(), @@ -572,12 +631,12 @@ impl CopilotChat { let oauth_token = oauth_token.context("No OAuth token available")?; let token = match api_token { - Some(api_token) if api_token.remaining_seconds() > 5 * 60 => api_token.clone(), + Some(api_token) if api_token.remaining_seconds() > 5 * 60 => api_token, _ => { let token_url = configuration.token_url(); let token = request_api_token(&oauth_token, token_url.into(), client.clone()).await?; - this.update(&mut cx, |this, cx| { + this.update(cx, |this, cx| { this.api_token = Some(token.clone()); cx.notify(); })?; @@ -585,15 +644,7 @@ impl CopilotChat { } }; - let api_url = configuration.api_url_from_endpoint(&token.api_endpoint); - stream_completion( - client.clone(), - token.api_key, - api_url.into(), - request, - is_user_initiated, - ) - .await + Ok((client, token, configuration)) } pub fn set_configuration( diff --git a/crates/copilot/src/copilot_responses.rs b/crates/copilot/src/copilot_responses.rs new file mode 100644 index 0000000000000000000000000000000000000000..c1e066208823dcab34a32096cfa447dd0ec9592f --- /dev/null +++ b/crates/copilot/src/copilot_responses.rs @@ -0,0 +1,414 @@ +use super::*; +use anyhow::{Result, anyhow}; +use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream}; +use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +pub use settings::OpenAiReasoningEffort as ReasoningEffort; + +#[derive(Serialize, Debug)] +pub struct Request { + pub model: String, + pub input: Vec, + #[serde(default)] + pub stream: bool, + #[serde(skip_serializing_if = "Option::is_none")] + pub temperature: Option, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub tools: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub tool_choice: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub reasoning: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub include: Option>, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(rename_all = "snake_case")] +pub enum ResponseIncludable { + #[serde(rename = "reasoning.encrypted_content")] + ReasoningEncryptedContent, +} + +#[derive(Serialize, Deserialize, Debug)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ToolDefinition { + Function { + name: String, + #[serde(skip_serializing_if = "Option::is_none")] + description: Option, + #[serde(skip_serializing_if = "Option::is_none")] + parameters: Option, + #[serde(skip_serializing_if = "Option::is_none")] + strict: Option, + }, +} + +#[derive(Serialize, Deserialize, Debug)] +#[serde(rename_all = "lowercase")] +pub enum ToolChoice { + Auto, + Any, + None, + #[serde(untagged)] + Other(ToolDefinition), +} + +#[derive(Serialize, Deserialize, Debug)] +#[serde(rename_all = "lowercase")] +pub enum ReasoningSummary { + Auto, + Concise, + Detailed, +} + +#[derive(Serialize, Debug)] +pub struct ReasoningConfig { + pub effort: ReasoningEffort, + #[serde(skip_serializing_if = "Option::is_none")] + pub summary: Option, +} + +#[derive(Serialize, Deserialize, Debug, Clone, Default)] +#[serde(rename_all = "snake_case")] +pub enum ResponseImageDetail { + Low, + High, + #[default] + Auto, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ResponseInputContent { + InputText { + text: String, + }, + OutputText { + text: String, + }, + InputImage { + #[serde(skip_serializing_if = "Option::is_none")] + image_url: Option, + #[serde(default)] + detail: ResponseImageDetail, + }, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(rename_all = "snake_case")] +pub enum ItemStatus { + InProgress, + Completed, + Incomplete, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(untagged)] +pub enum ResponseFunctionOutput { + Text(String), + Content(Vec), +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ResponseInputItem { + Message { + role: String, + #[serde(skip_serializing_if = "Option::is_none")] + content: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + status: Option, + }, + FunctionCall { + call_id: String, + name: String, + arguments: String, + #[serde(skip_serializing_if = "Option::is_none")] + status: Option, + }, + FunctionCallOutput { + call_id: String, + output: ResponseFunctionOutput, + #[serde(skip_serializing_if = "Option::is_none")] + status: Option, + }, + Reasoning { + #[serde(skip_serializing_if = "Option::is_none")] + id: Option, + summary: Vec, + encrypted_content: String, + }, +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "snake_case")] +pub enum IncompleteReason { + #[serde(rename = "max_output_tokens")] + MaxOutputTokens, + #[serde(rename = "content_filter")] + ContentFilter, +} + +#[derive(Deserialize, Debug, Clone)] +pub struct IncompleteDetails { + #[serde(skip_serializing_if = "Option::is_none")] + pub reason: Option, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct ResponseReasoningItem { + #[serde(rename = "type")] + pub kind: String, + pub text: String, +} + +#[derive(Deserialize, Debug)] +#[serde(tag = "type")] +pub enum StreamEvent { + #[serde(rename = "error")] + GenericError { error: ResponseError }, + + #[serde(rename = "response.created")] + Created { response: Response }, + + #[serde(rename = "response.output_item.added")] + OutputItemAdded { + output_index: usize, + #[serde(default)] + sequence_number: Option, + item: ResponseOutputItem, + }, + + #[serde(rename = "response.output_text.delta")] + OutputTextDelta { + item_id: String, + output_index: usize, + delta: String, + }, + + #[serde(rename = "response.output_item.done")] + OutputItemDone { + output_index: usize, + #[serde(default)] + sequence_number: Option, + item: ResponseOutputItem, + }, + + #[serde(rename = "response.incomplete")] + Incomplete { response: Response }, + + #[serde(rename = "response.completed")] + Completed { response: Response }, + + #[serde(rename = "response.failed")] + Failed { response: Response }, + + #[serde(other)] + Unknown, +} + +#[derive(Deserialize, Debug, Clone)] +pub struct ResponseError { + pub code: String, + pub message: String, +} + +#[derive(Deserialize, Debug, Default, Clone)] +pub struct Response { + pub id: Option, + pub status: Option, + pub usage: Option, + pub output: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub incomplete_details: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub error: Option, +} + +#[derive(Deserialize, Debug, Default, Clone)] +pub struct ResponseUsage { + pub input_tokens: Option, + pub output_tokens: Option, + pub total_tokens: Option, +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ResponseOutputItem { + Message { + id: String, + role: String, + #[serde(skip_serializing_if = "Option::is_none")] + content: Option>, + }, + FunctionCall { + #[serde(skip_serializing_if = "Option::is_none")] + id: Option, + call_id: String, + name: String, + arguments: String, + #[serde(skip_serializing_if = "Option::is_none")] + status: Option, + }, + Reasoning { + id: String, + #[serde(skip_serializing_if = "Option::is_none")] + summary: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + encrypted_content: Option, + }, +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ResponseOutputContent { + OutputText { text: String }, + Refusal { refusal: String }, +} + +pub async fn stream_response( + client: Arc, + api_key: String, + api_url: String, + request: Request, + is_user_initiated: bool, +) -> Result>> { + let is_vision_request = request.input.iter().any(|item| match item { + ResponseInputItem::Message { + content: Some(parts), + .. + } => parts + .iter() + .any(|p| matches!(p, ResponseInputContent::InputImage { .. })), + _ => false, + }); + + let request_initiator = if is_user_initiated { "user" } else { "agent" }; + + let request_builder = HttpRequest::builder() + .method(Method::POST) + .uri(&api_url) + .header( + "Editor-Version", + format!( + "Zed/{}", + option_env!("CARGO_PKG_VERSION").unwrap_or("unknown") + ), + ) + .header("Authorization", format!("Bearer {}", api_key)) + .header("Content-Type", "application/json") + .header("Copilot-Integration-Id", "vscode-chat") + .header("X-Initiator", request_initiator); + + let request_builder = if is_vision_request { + request_builder.header("Copilot-Vision-Request", "true") + } else { + request_builder + }; + + let is_streaming = request.stream; + let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; + let mut response = client.send(request).await?; + + if !response.status().is_success() { + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + anyhow::bail!("Failed to connect to API: {} {}", response.status(), body); + } + + if is_streaming { + let reader = BufReader::new(response.into_body()); + Ok(reader + .lines() + .filter_map(|line| async move { + match line { + Ok(line) => { + let line = line.strip_prefix("data: ")?; + if line.starts_with("[DONE]") || line.is_empty() { + return None; + } + + match serde_json::from_str::(line) { + Ok(event) => Some(Ok(event)), + Err(error) => { + log::error!( + "Failed to parse Copilot responses stream event: `{}`\nResponse: `{}`", + error, + line, + ); + Some(Err(anyhow!(error))) + } + } + } + Err(error) => Some(Err(anyhow!(error))), + } + }) + .boxed()) + } else { + // Simulate streaming this makes the mapping of this function return more straight-forward to handle if all callers assume it streams. + // Removes the need of having a method to map StreamEvent and another to map Response to a LanguageCompletionEvent + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + + match serde_json::from_str::(&body) { + Ok(response) => { + let events = vec![StreamEvent::Created { + response: response.clone(), + }]; + + let mut all_events = events; + for (output_index, item) in response.output.iter().enumerate() { + all_events.push(StreamEvent::OutputItemAdded { + output_index, + sequence_number: None, + item: item.clone(), + }); + + if let ResponseOutputItem::Message { + id, + content: Some(content), + .. + } = item + { + for part in content { + if let ResponseOutputContent::OutputText { text } = part { + all_events.push(StreamEvent::OutputTextDelta { + item_id: id.clone(), + output_index, + delta: text.clone(), + }); + } + } + } + + all_events.push(StreamEvent::OutputItemDone { + output_index, + sequence_number: None, + item: item.clone(), + }); + } + + let final_event = if response.error.is_some() { + StreamEvent::Failed { response } + } else if response.incomplete_details.is_some() { + StreamEvent::Incomplete { response } + } else { + StreamEvent::Completed { response } + }; + all_events.push(final_event); + + Ok(futures::stream::iter(all_events.into_iter().map(Ok)).boxed()) + } + Err(error) => { + log::error!( + "Failed to parse Copilot non-streaming response: `{}`\nResponse: `{}`", + error, + body, + ); + Err(anyhow!(error)) + } + } + } +} diff --git a/crates/crashes/Cargo.toml b/crates/crashes/Cargo.toml index 0f8147038dd8c4dafbfd4f32b97340f44fc292cd..3f85039e9ea3bce8e702991461adec4a931d3e4a 100644 --- a/crates/crashes/Cargo.toml +++ b/crates/crashes/Cargo.toml @@ -9,6 +9,7 @@ license = "GPL-3.0-or-later" bincode.workspace = true cfg-if.workspace = true crash-handler.workspace = true +extension_host.workspace = true log.workspace = true minidumper.workspace = true paths.workspace = true @@ -17,7 +18,6 @@ smol.workspace = true serde.workspace = true serde_json.workspace = true system_specs.workspace = true -workspace-hack.workspace = true zstd.workspace = true [target.'cfg(target_os = "macos")'.dependencies] diff --git a/crates/crashes/src/crashes.rs b/crates/crashes/src/crashes.rs index 4c8c6ec45e621862115ef6441857ada4e2b634ad..3a2c9378535dd1ac5dead68b3191ba1699b4d920 100644 --- a/crates/crashes/src/crashes.rs +++ b/crates/crashes/src/crashes.rs @@ -33,17 +33,31 @@ const CRASH_HANDLER_CONNECT_TIMEOUT: Duration = Duration::from_secs(10); static PANIC_THREAD_ID: AtomicU32 = AtomicU32::new(0); pub async fn init(crash_init: InitCrashHandler) { - if *RELEASE_CHANNEL == ReleaseChannel::Dev && env::var("ZED_GENERATE_MINIDUMPS").is_err() { - let old_hook = panic::take_hook(); - panic::set_hook(Box::new(move |info| { - unsafe { env::set_var("RUST_BACKTRACE", "1") }; - old_hook(info); - // prevent the macOS crash dialog from popping up - std::process::exit(1); - })); - return; - } else { - panic::set_hook(Box::new(panic_hook)); + let gen_var = match env::var("ZED_GENERATE_MINIDUMPS") { + Ok(v) => { + if v == "false" || v == "0" { + Some(false) + } else { + Some(true) + } + } + Err(_) => None, + }; + + match (gen_var, *RELEASE_CHANNEL) { + (Some(false), _) | (None, ReleaseChannel::Dev) => { + let old_hook = panic::take_hook(); + panic::set_hook(Box::new(move |info| { + unsafe { env::set_var("RUST_BACKTRACE", "1") }; + old_hook(info); + // prevent the macOS crash dialog from popping up + std::process::exit(1); + })); + return; + } + (Some(true), _) | (None, _) => { + panic::set_hook(Box::new(panic_hook)); + } } let exe = env::current_exe().expect("unable to find ourselves"); @@ -92,7 +106,10 @@ pub async fn init(crash_init: InitCrashHandler) { #[cfg(target_os = "macos")] suspend_all_other_threads(); - client.ping().unwrap(); + // on macos this "ping" is needed to ensure that all our + // `client.send_message` calls have been processed before we trigger the + // minidump request. + client.ping().ok(); client.request_dump(crash_context).is_ok() } else { true @@ -269,6 +286,11 @@ impl minidumper::ServerHandler for CrashServer { } pub fn panic_hook(info: &PanicHookInfo) { + // Don't handle a panic on threads that are not relevant to the main execution. + if extension_host::wasm_host::IS_WASM_THREAD.with(|v| v.load(Ordering::Acquire)) { + return; + } + let message = info .payload() .downcast_ref::<&str>() diff --git a/crates/credentials_provider/Cargo.toml b/crates/credentials_provider/Cargo.toml index 3233b68c605e5273254366c62413172be3375ad5..bf47bb24b12b90d54bc04f766efe06489c730b43 100644 --- a/crates/credentials_provider/Cargo.toml +++ b/crates/credentials_provider/Cargo.toml @@ -19,4 +19,3 @@ paths.workspace = true release_channel.workspace = true serde.workspace = true serde_json.workspace = true -workspace-hack.workspace = true diff --git a/crates/dap/Cargo.toml b/crates/dap/Cargo.toml index ee963a4f83a70775bcf103094cb04e09f1791998..d856ae0164ff35236f7a133361cdf28908f8b044 100644 --- a/crates/dap/Cargo.toml +++ b/crates/dap/Cargo.toml @@ -49,7 +49,6 @@ smol.workspace = true task.workspace = true telemetry.workspace = true util.workspace = true -workspace-hack.workspace = true [target.'cfg(not(windows))'.dependencies] libc.workspace = true diff --git a/crates/dap/src/adapters.rs b/crates/dap/src/adapters.rs index 6d1b89ef99920ecdd7bffedc643ade878294a6a3..b303a0c0268c7e7812e49d1ff3fbe827f6eac2aa 100644 --- a/crates/dap/src/adapters.rs +++ b/crates/dap/src/adapters.rs @@ -306,7 +306,7 @@ pub async fn download_adapter_from_github( anyhow::ensure!( response.status().is_success(), "download failed with status {}", - response.status().to_string() + response.status() ); delegate.output_to_console("Download complete".to_owned()); @@ -356,6 +356,7 @@ pub trait DebugAdapter: 'static + Send + Sync { config: &DebugTaskDefinition, user_installed_path: Option, user_args: Option>, + user_env: Option>, cx: &mut AsyncApp, ) -> Result; @@ -455,6 +456,7 @@ impl DebugAdapter for FakeAdapter { task_definition: &DebugTaskDefinition, _: Option, _: Option>, + _: Option>, _: &mut AsyncApp, ) -> Result { let connection = task_definition diff --git a/crates/dap/src/transport.rs b/crates/dap/src/transport.rs index 50ffb4b7820517f380909ae2ecad160a31afdd54..e6f8d0bce1c28c9f1dfc8b7ad0c1ba4ffceeca36 100644 --- a/crates/dap/src/transport.rs +++ b/crates/dap/src/transport.rs @@ -262,11 +262,15 @@ impl TransportDelegate { break; } } + + // Clean up logs by trimming unnecessary whitespace/newlines before inserting into log. + let line = line.trim(); + log::debug!("stderr: {line}"); for (kind, handler) in log_handlers.lock().iter_mut() { if matches!(kind, LogKind::Adapter) { - handler(iokind, None, line.as_str()); + handler(iokind, None, line); } } } @@ -649,7 +653,7 @@ impl Drop for TcpTransport { } pub struct StdioTransport { - process: Mutex>, + process: Mutex, _stderr_task: Option>, } @@ -676,7 +680,7 @@ impl StdioTransport { let mut process = Child::spawn(command, Stdio::piped())?; - let err_task = process.stderr.take().map(|stderr| { + let _stderr_task = process.stderr.take().map(|stderr| { cx.background_spawn(TransportDelegate::handle_adapter_log( stderr, IoKind::StdErr, @@ -684,24 +688,22 @@ impl StdioTransport { )) }); - let process = Mutex::new(Some(process)); + let process = Mutex::new(process); Ok(Self { process, - _stderr_task: err_task, + _stderr_task, }) } } impl Transport for StdioTransport { fn has_adapter_logs(&self) -> bool { - false + true } fn kill(&mut self) { - if let Some(process) = &mut *self.process.lock() { - process.kill(); - } + self.process.lock().kill(); } fn connect( @@ -713,8 +715,7 @@ impl Transport for StdioTransport { )>, > { let result = util::maybe!({ - let mut guard = self.process.lock(); - let process = guard.as_mut().context("oops")?; + let mut process = self.process.lock(); Ok(( Box::new(process.stdin.take().context("Cannot reconnect")?) as _, Box::new(process.stdout.take().context("Cannot reconnect")?) as _, @@ -730,9 +731,7 @@ impl Transport for StdioTransport { impl Drop for StdioTransport { fn drop(&mut self) { - if let Some(process) = &mut *self.process.lock() { - process.kill(); - } + self.process.lock().kill(); } } diff --git a/crates/dap_adapters/Cargo.toml b/crates/dap_adapters/Cargo.toml index e7366785c810077ef2bdc3669dd5b340859c97a6..253674c0f3da16574b4303faf679abeb310756d8 100644 --- a/crates/dap_adapters/Cargo.toml +++ b/crates/dap_adapters/Cargo.toml @@ -35,11 +35,9 @@ log.workspace = true paths.workspace = true serde.workspace = true serde_json.workspace = true -shlex.workspace = true smol.workspace = true task.workspace = true util.workspace = true -workspace-hack.workspace = true [dev-dependencies] dap = { workspace = true, features = ["test-support"] } diff --git a/crates/dap_adapters/src/codelldb.rs b/crates/dap_adapters/src/codelldb.rs index 64c32b387d5c8d764632774930d11f1db884970c..05aca2225aa9f0fd2a7fb4c5c1f213372f6ce899 100644 --- a/crates/dap_adapters/src/codelldb.rs +++ b/crates/dap_adapters/src/codelldb.rs @@ -1,7 +1,8 @@ -use std::{collections::HashMap, path::PathBuf, sync::OnceLock}; +use std::{path::PathBuf, sync::OnceLock}; use anyhow::{Context as _, Result}; use async_trait::async_trait; +use collections::HashMap; use dap::adapters::{DebugTaskDefinition, latest_github_release}; use futures::StreamExt; use gpui::AsyncApp; @@ -329,6 +330,7 @@ impl DebugAdapter for CodeLldbDebugAdapter { config: &DebugTaskDefinition, user_installed_path: Option, user_args: Option>, + user_env: Option>, _: &mut AsyncApp, ) -> Result { let mut command = user_installed_path @@ -377,6 +379,7 @@ impl DebugAdapter for CodeLldbDebugAdapter { command = Some(path); }; let mut json_config = config.config.clone(); + Ok(DebugAdapterBinary { command: Some(command.unwrap()), cwd: Some(delegate.worktree_root_path().to_path_buf()), @@ -401,7 +404,7 @@ impl DebugAdapter for CodeLldbDebugAdapter { request_args: self .request_args(delegate, json_config, &config.label) .await?, - envs: HashMap::default(), + envs: user_env.unwrap_or_default(), connection: None, }) } diff --git a/crates/dap_adapters/src/gdb.rs b/crates/dap_adapters/src/gdb.rs index 17b7a659111532b5fa04f2b3424e50e7867df6d6..12489247c53322612ea7d7cd33fedce51bb68b26 100644 --- a/crates/dap_adapters/src/gdb.rs +++ b/crates/dap_adapters/src/gdb.rs @@ -1,7 +1,8 @@ -use std::{collections::HashMap, ffi::OsStr}; +use std::ffi::OsStr; use anyhow::{Context as _, Result, bail}; use async_trait::async_trait; +use collections::HashMap; use dap::{StartDebuggingRequestArguments, adapters::DebugTaskDefinition}; use gpui::AsyncApp; use task::{DebugScenario, ZedDebugConfig}; @@ -160,6 +161,7 @@ impl DebugAdapter for GdbDebugAdapter { config: &DebugTaskDefinition, user_installed_path: Option, user_args: Option>, + user_env: Option>, _: &mut AsyncApp, ) -> Result { let user_setting_path = user_installed_path @@ -188,7 +190,7 @@ impl DebugAdapter for GdbDebugAdapter { Ok(DebugAdapterBinary { command: Some(gdb_path), arguments: user_args.unwrap_or_else(|| vec!["-i=dap".into()]), - envs: HashMap::default(), + envs: user_env.unwrap_or_default(), cwd: Some(delegate.worktree_root_path().to_path_buf()), connection: None, request_args: StartDebuggingRequestArguments { diff --git a/crates/dap_adapters/src/go.rs b/crates/dap_adapters/src/go.rs index 999909ad44f313d413ecaa3990f9816872bae588..323ca094934fc93466451246f4bc69f34ded4891 100644 --- a/crates/dap_adapters/src/go.rs +++ b/crates/dap_adapters/src/go.rs @@ -409,6 +409,7 @@ impl DebugAdapter for GoDebugAdapter { task_definition: &DebugTaskDefinition, user_installed_path: Option, user_args: Option>, + user_env: Option>, _cx: &mut AsyncApp, ) -> Result { let adapter_path = paths::debug_adapters_dir().join(&Self::ADAPTER_NAME); @@ -460,7 +461,7 @@ impl DebugAdapter for GoDebugAdapter { let connection; let mut configuration = task_definition.config.clone(); - let mut envs = HashMap::default(); + let mut envs = user_env.unwrap_or_default(); if let Some(configuration) = configuration.as_object_mut() { configuration diff --git a/crates/dap_adapters/src/javascript.rs b/crates/dap_adapters/src/javascript.rs index 4e3dc30a7929683cc030558bed5034fe8ed69349..68f5ca7e7976640c5b3e44ec5e2e2b880a6c2407 100644 --- a/crates/dap_adapters/src/javascript.rs +++ b/crates/dap_adapters/src/javascript.rs @@ -6,7 +6,7 @@ use gpui::AsyncApp; use serde_json::Value; use std::{path::PathBuf, sync::OnceLock}; use task::DebugRequest; -use util::{ResultExt, maybe}; +use util::{ResultExt, maybe, shell::ShellKind}; use crate::*; @@ -52,12 +52,13 @@ impl JsDebugAdapter { task_definition: &DebugTaskDefinition, user_installed_path: Option, user_args: Option>, + user_env: Option>, _: &mut AsyncApp, ) -> Result { let tcp_connection = task_definition.tcp_connection.clone().unwrap_or_default(); let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?; - let mut envs = HashMap::default(); + let mut envs = user_env.unwrap_or_default(); let mut configuration = task_definition.config.clone(); if let Some(configuration) = configuration.as_object_mut() { @@ -66,7 +67,7 @@ impl JsDebugAdapter { .get("type") .filter(|value| value == &"node-terminal")?; let command = configuration.get("command")?.as_str()?.to_owned(); - let mut args = shlex::split(&command)?.into_iter(); + let mut args = ShellKind::Posix.split(&command)?.into_iter(); let program = args.next()?; configuration.insert("runtimeExecutable".to_owned(), program.into()); configuration.insert( @@ -100,9 +101,9 @@ impl JsDebugAdapter { } if let Some(env) = configuration.get("env").cloned() - && let Ok(env) = serde_json::from_value(env) + && let Ok(env) = serde_json::from_value::>(env) { - envs = env; + envs.extend(env.into_iter()); } configuration @@ -504,6 +505,7 @@ impl DebugAdapter for JsDebugAdapter { config: &DebugTaskDefinition, user_installed_path: Option, user_args: Option>, + user_env: Option>, cx: &mut AsyncApp, ) -> Result { if self.checked.set(()).is_ok() { @@ -521,8 +523,15 @@ impl DebugAdapter for JsDebugAdapter { } } - self.get_installed_binary(delegate, config, user_installed_path, user_args, cx) - .await + self.get_installed_binary( + delegate, + config, + user_installed_path, + user_args, + user_env, + cx, + ) + .await } fn label_for_child_session(&self, args: &StartDebuggingRequestArguments) -> Option { diff --git a/crates/dap_adapters/src/python.rs b/crates/dap_adapters/src/python.rs index 47aec4aa5b1a39a517b7887828200ebf4bd065d4..66005db77029bd28c66f458bef7f1d2a1ad7a685 100644 --- a/crates/dap_adapters/src/python.rs +++ b/crates/dap_adapters/src/python.rs @@ -1,12 +1,13 @@ use crate::*; -use anyhow::Context as _; +use anyhow::{Context as _, bail}; +use collections::HashMap; use dap::{DebugRequest, StartDebuggingRequestArguments, adapters::DebugTaskDefinition}; use fs::RemoveOptions; use futures::{StreamExt, TryStreamExt}; use gpui::http_client::AsyncBody; use gpui::{AsyncApp, SharedString}; use json_dotpath::DotPaths; -use language::LanguageName; +use language::{LanguageName, Toolchain}; use paths::debug_adapters_dir; use serde_json::Value; use smol::fs::File; @@ -16,11 +17,11 @@ use std::ffi::OsString; use std::net::Ipv4Addr; use std::str::FromStr; use std::{ - collections::HashMap, ffi::OsStr, path::{Path, PathBuf}, }; -use util::{ResultExt, maybe, paths::PathStyle, rel_path::RelPath}; +use util::command::new_smol_command; +use util::{ResultExt, paths::PathStyle, rel_path::RelPath}; #[derive(Default)] pub(crate) struct PythonDebugAdapter { @@ -92,12 +93,16 @@ impl PythonDebugAdapter { }) } - async fn fetch_wheel(&self, delegate: &Arc) -> Result, String> { + async fn fetch_wheel( + &self, + toolchain: Option, + delegate: &Arc, + ) -> Result> { let download_dir = debug_adapters_dir().join(Self::ADAPTER_NAME).join("wheels"); - std::fs::create_dir_all(&download_dir).map_err(|e| e.to_string())?; - let system_python = self.base_venv_path(delegate).await?; + std::fs::create_dir_all(&download_dir)?; + let venv_python = self.base_venv_path(toolchain, delegate).await?; - let installation_succeeded = util::command::new_smol_command(system_python.as_ref()) + let installation_succeeded = util::command::new_smol_command(venv_python.as_ref()) .args([ "-m", "pip", @@ -109,36 +114,36 @@ impl PythonDebugAdapter { ]) .output() .await - .map_err(|e| format!("{e}"))? + .context("spawn system python")? .status .success(); if !installation_succeeded { - return Err("debugpy installation failed (could not fetch Debugpy's wheel)".into()); + bail!("debugpy installation failed (could not fetch Debugpy's wheel)"); } - let wheel_path = std::fs::read_dir(&download_dir) - .map_err(|e| e.to_string())? + let wheel_path = std::fs::read_dir(&download_dir)? .find_map(|entry| { entry.ok().filter(|e| { e.file_type().is_ok_and(|typ| typ.is_file()) && Path::new(&e.file_name()).extension() == Some("whl".as_ref()) }) }) - .ok_or_else(|| String::from("Did not find a .whl in {download_dir}"))?; + .with_context(|| format!("Did not find a .whl in {download_dir:?}"))?; util::archive::extract_zip( &debug_adapters_dir().join(Self::ADAPTER_NAME), - File::open(&wheel_path.path()) - .await - .map_err(|e| e.to_string())?, + File::open(&wheel_path.path()).await?, ) - .await - .map_err(|e| e.to_string())?; + .await?; Ok(Arc::from(wheel_path.path())) } - async fn maybe_fetch_new_wheel(&self, delegate: &Arc) { + async fn maybe_fetch_new_wheel( + &self, + toolchain: Option, + delegate: &Arc, + ) -> Result<()> { let latest_release = delegate .http_client() .get( @@ -148,62 +153,61 @@ impl PythonDebugAdapter { ) .await .log_err(); - maybe!(async move { - let response = latest_release.filter(|response| response.status().is_success())?; - - let download_dir = debug_adapters_dir().join(Self::ADAPTER_NAME); - std::fs::create_dir_all(&download_dir).ok()?; - - let mut output = String::new(); - response - .into_body() - .read_to_string(&mut output) - .await - .ok()?; - let as_json = serde_json::Value::from_str(&output).ok()?; - let latest_version = as_json.get("info").and_then(|info| { + let response = latest_release + .filter(|response| response.status().is_success()) + .context("getting latest release")?; + + let download_dir = debug_adapters_dir().join(Self::ADAPTER_NAME); + std::fs::create_dir_all(&download_dir)?; + + let mut output = String::new(); + response.into_body().read_to_string(&mut output).await?; + let as_json = serde_json::Value::from_str(&output)?; + let latest_version = as_json + .get("info") + .and_then(|info| { info.get("version") .and_then(|version| version.as_str()) .map(ToOwned::to_owned) - })?; - let dist_info_dirname: OsString = format!("debugpy-{latest_version}.dist-info").into(); - let is_up_to_date = delegate - .fs() - .read_dir(&debug_adapters_dir().join(Self::ADAPTER_NAME)) - .await - .ok()? - .into_stream() - .any(async |entry| { - entry.is_ok_and(|e| e.file_name().is_some_and(|name| name == dist_info_dirname)) - }) - .await; + }) + .context("parsing latest release information")?; + let dist_info_dirname: OsString = format!("debugpy-{latest_version}.dist-info").into(); + let is_up_to_date = delegate + .fs() + .read_dir(&debug_adapters_dir().join(Self::ADAPTER_NAME)) + .await? + .into_stream() + .any(async |entry| { + entry.is_ok_and(|e| e.file_name().is_some_and(|name| name == dist_info_dirname)) + }) + .await; - if !is_up_to_date { - delegate - .fs() - .remove_dir( - &debug_adapters_dir().join(Self::ADAPTER_NAME), - RemoveOptions { - recursive: true, - ignore_if_not_exists: true, - }, - ) - .await - .ok()?; - self.fetch_wheel(delegate).await.ok()?; - } - Some(()) - }) - .await; + if !is_up_to_date { + delegate + .fs() + .remove_dir( + &debug_adapters_dir().join(Self::ADAPTER_NAME), + RemoveOptions { + recursive: true, + ignore_if_not_exists: true, + }, + ) + .await?; + self.fetch_wheel(toolchain, delegate).await?; + } + anyhow::Ok(()) } async fn fetch_debugpy_whl( &self, + toolchain: Option, delegate: &Arc, ) -> Result, String> { self.debugpy_whl_base_path .get_or_init(|| async move { - self.maybe_fetch_new_wheel(delegate).await; + self.maybe_fetch_new_wheel(toolchain, delegate) + .await + .map_err(|e| format!("{e}"))?; Ok(Arc::from( debug_adapters_dir() .join(Self::ADAPTER_NAME) @@ -216,58 +220,88 @@ impl PythonDebugAdapter { .clone() } - async fn base_venv_path(&self, delegate: &Arc) -> Result, String> { - self.base_venv_path + async fn base_venv_path( + &self, + toolchain: Option, + delegate: &Arc, + ) -> Result> { + let result = self.base_venv_path .get_or_init(|| async { - let base_python = Self::system_python_name(delegate) - .await - .ok_or_else(|| String::from("Could not find a Python installation"))?; + let base_python = if let Some(toolchain) = toolchain { + toolchain.path.to_string() + } else { + Self::system_python_name(delegate).await.ok_or_else(|| { + let mut message = "Could not find a Python installation".to_owned(); + if cfg!(windows){ + message.push_str(". Install Python from the Microsoft Store, or manually from https://www.python.org/downloads/windows.") + } + message + })? + }; - let did_succeed = util::command::new_smol_command(base_python) + let debug_adapter_path = paths::debug_adapters_dir().join(Self::DEBUG_ADAPTER_NAME.as_ref()); + let output = util::command::new_smol_command(&base_python) .args(["-m", "venv", "zed_base_venv"]) .current_dir( - paths::debug_adapters_dir().join(Self::DEBUG_ADAPTER_NAME.as_ref()), + &debug_adapter_path, ) .spawn() .map_err(|e| format!("{e:#?}"))? - .status() + .output() .await - .map_err(|e| format!("{e:#?}"))? - .success(); + .map_err(|e| format!("{e:#?}"))?; - if !did_succeed { - return Err("Failed to create base virtual environment".into()); + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + let stdout = String::from_utf8_lossy(&output.stdout); + let debug_adapter_path = debug_adapter_path.display(); + return Err(format!("Failed to create base virtual environment with {base_python} in:\n{debug_adapter_path}\nstderr:\n{stderr}\nstdout:\n{stdout}\n")); } - const DIR: &str = if cfg!(target_os = "windows") { - "Scripts" + const PYTHON_PATH: &str = if cfg!(target_os = "windows") { + "Scripts/python.exe" } else { - "bin" + "bin/python3" }; Ok(Arc::from( paths::debug_adapters_dir() .join(Self::DEBUG_ADAPTER_NAME.as_ref()) .join("zed_base_venv") - .join(DIR) - .join("python3") + .join(PYTHON_PATH) .as_ref(), )) }) .await - .clone() + .clone(); + match result { + Ok(path) => Ok(path), + Err(e) => Err(anyhow::anyhow!("{e}")), + } } async fn system_python_name(delegate: &Arc) -> Option { const BINARY_NAMES: [&str; 3] = ["python3", "python", "py"]; let mut name = None; for cmd in BINARY_NAMES { - name = delegate - .which(OsStr::new(cmd)) + let Some(path) = delegate.which(OsStr::new(cmd)).await else { + continue; + }; + // Try to detect situations where `python3` exists but is not a real Python interpreter. + // Notably, on fresh Windows installs, `python3` is a shim that opens the Microsoft Store app + // when run with no arguments, and just fails otherwise. + let Some(output) = new_smol_command(&path) + .args(["-c", "print(1 + 2)"]) + .output() .await - .map(|path| path.to_string_lossy().into_owned()); - if name.is_some() { - break; + .ok() + else { + continue; + }; + if output.stdout.trim_ascii() != b"3" { + continue; } + name = Some(path.to_string_lossy().into_owned()); + break; } name } @@ -278,6 +312,7 @@ impl PythonDebugAdapter { config: &DebugTaskDefinition, user_installed_path: Option, user_args: Option>, + user_env: Option>, python_from_toolchain: Option, ) -> Result { let tcp_connection = config.tcp_connection.clone().unwrap_or_default(); @@ -315,7 +350,7 @@ impl PythonDebugAdapter { timeout, }), cwd: Some(delegate.worktree_root_path().to_path_buf()), - envs: HashMap::default(), + envs: user_env.unwrap_or_default(), request_args: self.request_args(delegate, config).await?, }) } @@ -710,6 +745,7 @@ impl DebugAdapter for PythonDebugAdapter { config: &DebugTaskDefinition, user_installed_path: Option, user_args: Option>, + user_env: Option>, cx: &mut AsyncApp, ) -> Result { if let Some(local_path) = &user_installed_path { @@ -718,7 +754,14 @@ impl DebugAdapter for PythonDebugAdapter { local_path.display() ); return self - .get_installed_binary(delegate, config, Some(local_path.clone()), user_args, None) + .get_installed_binary( + delegate, + config, + Some(local_path.clone()), + user_args, + user_env, + None, + ) .await; } @@ -746,27 +789,23 @@ impl DebugAdapter for PythonDebugAdapter { ) .await; - let debugpy_path = self - .fetch_debugpy_whl(delegate) + self.fetch_debugpy_whl(toolchain.clone(), delegate) .await .map_err(|e| anyhow::anyhow!("{e}"))?; if let Some(toolchain) = &toolchain { - log::debug!( - "Found debugpy in toolchain environment: {}", - debugpy_path.display() - ); return self .get_installed_binary( delegate, config, None, user_args, + user_env, Some(toolchain.path.to_string()), ) .await; } - self.get_installed_binary(delegate, config, None, user_args, None) + self.get_installed_binary(delegate, config, None, user_args, user_env, None) .await } diff --git a/crates/db/Cargo.toml b/crates/db/Cargo.toml index de449cd38f77d062eda906cced3e3b697a370d15..3bcfefec0315ad2d94f44946c754501f43999264 100644 --- a/crates/db/Cargo.toml +++ b/crates/db/Cargo.toml @@ -26,7 +26,6 @@ smol.workspace = true sqlez.workspace = true sqlez_macros.workspace = true util.workspace = true -workspace-hack.workspace = true zed_env_vars.workspace = true [dev-dependencies] diff --git a/crates/debug_adapter_extension/Cargo.toml b/crates/debug_adapter_extension/Cargo.toml index 78d7cbaba3fbf92f4863228c532524cd0f0577ba..08f916eb9e7c2a26f598f75e46018b5fc76e37db 100644 --- a/crates/debug_adapter_extension/Cargo.toml +++ b/crates/debug_adapter_extension/Cargo.toml @@ -8,13 +8,13 @@ edition.workspace = true [dependencies] anyhow.workspace = true async-trait.workspace = true +collections.workspace = true dap.workspace = true extension.workspace = true gpui.workspace = true serde_json.workspace = true util.workspace = true task.workspace = true -workspace-hack = { version = "0.1", path = "../../tooling/workspace-hack" } [lints] workspace = true diff --git a/crates/debug_adapter_extension/src/extension_dap_adapter.rs b/crates/debug_adapter_extension/src/extension_dap_adapter.rs index 3a39027b62963aa99b53b09ab621f91a1b3f95c5..abc0fbac19faa2be0f6c1ff8c93cadd2b6b96af9 100644 --- a/crates/debug_adapter_extension/src/extension_dap_adapter.rs +++ b/crates/debug_adapter_extension/src/extension_dap_adapter.rs @@ -6,6 +6,7 @@ use std::{ use anyhow::{Context, Result}; use async_trait::async_trait; +use collections::HashMap; use dap::{ StartDebuggingRequestArgumentsRequest, adapters::{ @@ -91,6 +92,8 @@ impl DebugAdapter for ExtensionDapAdapter { user_installed_path: Option, // TODO support user args in the extension API _user_args: Option>, + // TODO support user env in the extension API + _user_env: Option>, _cx: &mut AsyncApp, ) -> Result { self.extension diff --git a/crates/debugger_tools/Cargo.toml b/crates/debugger_tools/Cargo.toml index d91f43182d1b1bb72dea02c612c60ee90e93ff84..c3f6dd9338ae87687680900380c96df53a5e9a6a 100644 --- a/crates/debugger_tools/Cargo.toml +++ b/crates/debugger_tools/Cargo.toml @@ -27,4 +27,3 @@ settings.workspace = true smol.workspace = true util.workspace = true workspace.workspace = true -workspace-hack.workspace = true diff --git a/crates/debugger_tools/src/dap_log.rs b/crates/debugger_tools/src/dap_log.rs index c4338c6d0017a215c721c772871647c89227775e..4c994ad7eb749dcb5828daa83bad34a579f9f14c 100644 --- a/crates/debugger_tools/src/dap_log.rs +++ b/crates/debugger_tools/src/dap_log.rs @@ -963,26 +963,21 @@ pub fn init(cx: &mut App) { }; let project = workspace.project(); - if project.read(cx).is_local() { - log_store.update(cx, |store, cx| { - store.add_project(project, cx); - }); - } + log_store.update(cx, |store, cx| { + store.add_project(project, cx); + }); let log_store = log_store.clone(); workspace.register_action(move |workspace, _: &OpenDebugAdapterLogs, window, cx| { - let project = workspace.project().read(cx); - if project.is_local() { - workspace.add_item_to_active_pane( - Box::new(cx.new(|cx| { - DapLogView::new(workspace.project().clone(), log_store.clone(), window, cx) - })), - None, - true, - window, - cx, - ); - } + workspace.add_item_to_active_pane( + Box::new(cx.new(|cx| { + DapLogView::new(workspace.project().clone(), log_store.clone(), window, cx) + })), + None, + true, + window, + cx, + ); }); }) .detach(); diff --git a/crates/debugger_ui/Cargo.toml b/crates/debugger_ui/Cargo.toml index df4125860f4ab79ce3a55d6b5b4fbb8f8fc64e5e..c1a0657c0ed93508acb330a98dc6d1c1ee91c570 100644 --- a/crates/debugger_ui/Cargo.toml +++ b/crates/debugger_ui/Cargo.toml @@ -60,7 +60,6 @@ serde.workspace = true serde_json.workspace = true serde_json_lenient.workspace = true settings.workspace = true -shlex.workspace = true sysinfo.workspace = true task.workspace = true tasks_ui.workspace = true @@ -73,7 +72,6 @@ tree-sitter.workspace = true ui.workspace = true unindent = { workspace = true, optional = true } util.workspace = true -workspace-hack.workspace = true workspace.workspace = true zed_actions.workspace = true diff --git a/crates/debugger_ui/src/attach_modal.rs b/crates/debugger_ui/src/attach_modal.rs index daa83f71b1c4148398e12f491caf46b5bf556919..e39a842f63590375898c9870c345574e1932a788 100644 --- a/crates/debugger_ui/src/attach_modal.rs +++ b/crates/debugger_ui/src/attach_modal.rs @@ -9,7 +9,7 @@ use task::ZedDebugConfig; use util::debug_panic; use std::sync::Arc; -use sysinfo::System; +use sysinfo::{ProcessRefreshKind, RefreshKind, System, UpdateKind}; use ui::{Context, Tooltip, prelude::*}; use ui::{ListItem, ListItemSpacing}; use workspace::{ModalView, Workspace}; @@ -362,7 +362,12 @@ fn get_processes_for_project(project: &Entity, cx: &mut App) -> Task = System::new_all() + let refresh_kind = RefreshKind::nothing().with_processes( + ProcessRefreshKind::nothing() + .without_tasks() + .with_cmd(UpdateKind::Always), + ); + let mut processes: Box<[_]> = System::new_with_specifics(refresh_kind) .processes() .values() .map(|process| { diff --git a/crates/debugger_ui/src/debugger_panel.rs b/crates/debugger_ui/src/debugger_panel.rs index 9154047aa54b43a726834e62a3a4a397ae91d74b..12c303675aed7fe6c8d7f7dc52d1f9e7d1af1966 100644 --- a/crates/debugger_ui/src/debugger_panel.rs +++ b/crates/debugger_ui/src/debugger_panel.rs @@ -43,6 +43,8 @@ use workspace::{ }; use zed_actions::ToggleFocus; +const DEBUG_PANEL_KEY: &str = "DebugPanel"; + pub struct DebugPanel { size: Pixels, active_session: Option>, @@ -268,12 +270,12 @@ impl DebugPanel { async move |_, cx| { if let Err(error) = task.await { - log::error!("{error}"); + log::error!("{error:#}"); session .update(cx, |session, cx| { session .console_output(cx) - .unbounded_send(format!("error: {}", error)) + .unbounded_send(format!("error: {:#}", error)) .ok(); session.shutdown(cx) })? @@ -614,12 +616,11 @@ impl DebugPanel { }) .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Start Debug Session", &crate::Start, &focus_handle, - window, cx, ) } @@ -692,12 +693,11 @@ impl DebugPanel { )) .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Pause Program", &Pause, &focus_handle, - window, cx, ) } @@ -717,12 +717,11 @@ impl DebugPanel { .disabled(thread_status != ThreadStatus::Stopped) .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Continue Program", &Continue, &focus_handle, - window, cx, ) } @@ -742,12 +741,11 @@ impl DebugPanel { .disabled(thread_status != ThreadStatus::Stopped) .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Step Over", &StepOver, &focus_handle, - window, cx, ) } @@ -768,12 +766,11 @@ impl DebugPanel { .disabled(thread_status != ThreadStatus::Stopped) .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Step In", &StepInto, &focus_handle, - window, cx, ) } @@ -791,12 +788,11 @@ impl DebugPanel { .disabled(thread_status != ThreadStatus::Stopped) .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Step Out", &StepOut, &focus_handle, - window, cx, ) } @@ -814,12 +810,11 @@ impl DebugPanel { )) .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Rerun Session", &RerunSession, &focus_handle, - window, cx, ) } @@ -859,12 +854,11 @@ impl DebugPanel { } else { "Terminate All Threads" }; - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( label, &Stop, &focus_handle, - window, cx, ) } @@ -891,12 +885,11 @@ impl DebugPanel { )) .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Detach", &Detach, &focus_handle, - window, cx, ) } @@ -1414,6 +1407,10 @@ impl Panel for DebugPanel { "DebugPanel" } + fn panel_key() -> &'static str { + DEBUG_PANEL_KEY + } + fn position(&self, _window: &Window, cx: &App) -> DockPosition { DebuggerSettings::get_global(cx).dock.into() } diff --git a/crates/debugger_ui/src/debugger_ui.rs b/crates/debugger_ui/src/debugger_ui.rs index 689e3cd878b574d31963231df9bcff317ea6d64c..78cc9e9bd28beb31474c12662d7e118eae6f066e 100644 --- a/crates/debugger_ui/src/debugger_ui.rs +++ b/crates/debugger_ui/src/debugger_ui.rs @@ -341,8 +341,10 @@ pub fn init(cx: &mut App) { maybe!({ let (buffer, position, _) = editor .update(cx, |editor, cx| { - let cursor_point: language::Point = - editor.selections.newest(cx).head(); + let cursor_point: language::Point = editor + .selections + .newest(&editor.display_snapshot(cx)) + .head(); editor .buffer() @@ -392,7 +394,10 @@ pub fn init(cx: &mut App) { let text = editor .update(cx, |editor, cx| { editor.text_for_range( - editor.selections.newest(cx).range(), + editor + .selections + .newest(&editor.display_snapshot(cx)) + .range(), &mut None, window, cx, diff --git a/crates/debugger_ui/src/dropdown_menus.rs b/crates/debugger_ui/src/dropdown_menus.rs index 376a4a41ce7b03cd07f578d85f641a6ddfc4ebe8..e0c3628f4fc0a927857adbe93549087f930145d6 100644 --- a/crates/debugger_ui/src/dropdown_menus.rs +++ b/crates/debugger_ui/src/dropdown_menus.rs @@ -1,7 +1,7 @@ use std::rc::Rc; use collections::HashMap; -use gpui::{Entity, WeakEntity}; +use gpui::{Corner, Entity, WeakEntity}; use project::debugger::session::{ThreadId, ThreadStatus}; use ui::{CommonAnimationExt, ContextMenu, DropdownMenu, DropdownStyle, Indicator, prelude::*}; use util::{maybe, truncate_and_trailoff}; @@ -211,6 +211,7 @@ impl DebugPanel { this }), ) + .attach(Corner::BottomLeft) .style(DropdownStyle::Ghost) .handle(self.session_picker_menu_handle.clone()); @@ -322,6 +323,7 @@ impl DebugPanel { this }), ) + .attach(Corner::BottomLeft) .disabled(session_terminated) .style(DropdownStyle::Ghost) .handle(self.thread_picker_menu_handle.clone()), diff --git a/crates/debugger_ui/src/new_process_modal.rs b/crates/debugger_ui/src/new_process_modal.rs index 56c4a690325a0f5d8387fa76c1121206ff8f05fb..e12c768e12b1e098e150027c89d05695c59c51f6 100644 --- a/crates/debugger_ui/src/new_process_modal.rs +++ b/crates/debugger_ui/src/new_process_modal.rs @@ -32,7 +32,7 @@ use ui::{ SharedString, Styled, StyledExt, ToggleButton, ToggleState, Toggleable, Tooltip, Window, div, h_flex, relative, rems, v_flex, }; -use util::{ResultExt, rel_path::RelPath}; +use util::{ResultExt, rel_path::RelPath, shell::ShellKind}; use workspace::{ModalView, Workspace, notifications::DetachAndPromptErr, pane}; use crate::{attach_modal::AttachModal, debugger_panel::DebugPanel}; @@ -96,7 +96,9 @@ impl NewProcessModal { let debug_picker = cx.new(|cx| { let delegate = DebugDelegate::new(debug_panel.downgrade(), task_store.clone()); - Picker::uniform_list(delegate, window, cx).modal(false) + Picker::list(delegate, window, cx) + .modal(false) + .list_measure_all() }); let configure_mode = ConfigureMode::new(window, cx); @@ -745,22 +747,15 @@ impl Render for NewProcessModal { == 0; let secondary_action = menu::SecondaryConfirm.boxed_clone(); container - .child(div().children( - KeyBinding::for_action(&*secondary_action, window, cx).map( - |keybind| { - Button::new("edit-attach-task", "Edit in debug.json") - .label_size(LabelSize::Small) - .key_binding(keybind) - .on_click(move |_, window, cx| { - window.dispatch_action( - secondary_action.boxed_clone(), - cx, - ) - }) - .disabled(disabled) - }, - ), - )) + .child(div().child({ + Button::new("edit-attach-task", "Edit in debug.json") + .label_size(LabelSize::Small) + .key_binding(KeyBinding::for_action(&*secondary_action, cx)) + .on_click(move |_, window, cx| { + window.dispatch_action(secondary_action.boxed_clone(), cx) + }) + .disabled(disabled) + })) .child( h_flex() .child(div().child(self.adapter_drop_down_menu(window, cx))), @@ -844,7 +839,11 @@ impl ConfigureMode { }; } let command = self.program.read(cx).text(cx); - let mut args = shlex::split(&command).into_iter().flatten().peekable(); + let mut args = ShellKind::Posix + .split(&command) + .into_iter() + .flatten() + .peekable(); let mut env = FxHashMap::default(); while args.peek().is_some_and(|arg| arg.contains('=')) { let arg = args.next().unwrap(); @@ -1053,7 +1052,7 @@ impl DebugDelegate { Some(TaskSourceKind::Lsp { language_name, .. }) => { Some(format!("LSP: {language_name}")) } - Some(TaskSourceKind::Language { .. }) => None, + Some(TaskSourceKind::Language { name }) => Some(format!("Lang: {name}")), _ => context.clone().and_then(|ctx| { ctx.task_context .task_variables @@ -1270,7 +1269,11 @@ impl PickerDelegate for DebugDelegate { }) .unwrap_or_default(); - let mut args = shlex::split(&text).into_iter().flatten().peekable(); + let mut args = ShellKind::Posix + .split(&text) + .into_iter() + .flatten() + .peekable(); let mut env = HashMap::default(); while args.peek().is_some_and(|arg| arg.contains('=')) { let arg = args.next().unwrap(); @@ -1447,56 +1450,48 @@ impl PickerDelegate for DebugDelegate { .justify_between() .border_t_1() .border_color(cx.theme().colors().border_variant) - .children({ + .child({ let action = menu::SecondaryConfirm.boxed_clone(); if self.matches.is_empty() { - Some( - Button::new("edit-debug-json", "Edit debug.json") - .label_size(LabelSize::Small) - .on_click(cx.listener(|_picker, _, window, cx| { - window.dispatch_action( - zed_actions::OpenProjectDebugTasks.boxed_clone(), - cx, - ); - cx.emit(DismissEvent); - })), - ) + Button::new("edit-debug-json", "Edit debug.json") + .label_size(LabelSize::Small) + .on_click(cx.listener(|_picker, _, window, cx| { + window.dispatch_action( + zed_actions::OpenProjectDebugTasks.boxed_clone(), + cx, + ); + cx.emit(DismissEvent); + })) } else { - KeyBinding::for_action(&*action, window, cx).map(|keybind| { - Button::new("edit-debug-task", "Edit in debug.json") - .label_size(LabelSize::Small) - .key_binding(keybind) - .on_click(move |_, window, cx| { - window.dispatch_action(action.boxed_clone(), cx) - }) - }) + Button::new("edit-debug-task", "Edit in debug.json") + .label_size(LabelSize::Small) + .key_binding(KeyBinding::for_action(&*action, cx)) + .on_click(move |_, window, cx| { + window.dispatch_action(action.boxed_clone(), cx) + }) } }) .map(|this| { if (current_modifiers.alt || self.matches.is_empty()) && !self.prompt.is_empty() { let action = picker::ConfirmInput { secondary: false }.boxed_clone(); - this.children(KeyBinding::for_action(&*action, window, cx).map(|keybind| { + this.child({ Button::new("launch-custom", "Launch Custom") - .key_binding(keybind) + .key_binding(KeyBinding::for_action(&*action, cx)) .on_click(move |_, window, cx| { window.dispatch_action(action.boxed_clone(), cx) }) - })) + }) } else { - this.children(KeyBinding::for_action(&menu::Confirm, window, cx).map( - |keybind| { - let is_recent_selected = - self.divider_index >= Some(self.selected_index); - let run_entry_label = - if is_recent_selected { "Rerun" } else { "Spawn" }; - - Button::new("spawn", run_entry_label) - .key_binding(keybind) - .on_click(|_, window, cx| { - window.dispatch_action(menu::Confirm.boxed_clone(), cx); - }) - }, - )) + this.child({ + let is_recent_selected = self.divider_index >= Some(self.selected_index); + let run_entry_label = if is_recent_selected { "Rerun" } else { "Spawn" }; + + Button::new("spawn", run_entry_label) + .key_binding(KeyBinding::for_action(&menu::Confirm, cx)) + .on_click(|_, window, cx| { + window.dispatch_action(menu::Confirm.boxed_clone(), cx); + }) + }) } }); Some(footer.into_any_element()) diff --git a/crates/debugger_ui/src/session/running.rs b/crates/debugger_ui/src/session/running.rs index fe8cf083fa885246197707dc0783b7f327b57fa8..0e21ef1268412418c381fc14617a917f9529834d 100644 --- a/crates/debugger_ui/src/session/running.rs +++ b/crates/debugger_ui/src/session/running.rs @@ -386,6 +386,7 @@ pub(crate) fn new_debugger_pane( Default::default(), None, NoAction.boxed_clone(), + true, window, cx, ); @@ -565,14 +566,13 @@ pub(crate) fn new_debugger_pane( })) .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { let zoomed_text = if zoomed { "Minimize" } else { "Expand" }; Tooltip::for_action_in( zoomed_text, &ToggleExpandItem, &focus_handle, - window, cx, ) } @@ -937,6 +937,7 @@ impl RunningState { let task_store = project.read(cx).task_store().downgrade(); let weak_project = project.downgrade(); let weak_workspace = workspace.downgrade(); + let is_windows = project.read(cx).path_style(cx).is_windows(); let remote_shell = project .read(cx) .remote_client() @@ -1029,7 +1030,7 @@ impl RunningState { task.resolved.shell = Shell::Program(remote_shell); } - let builder = ShellBuilder::new(&task.resolved.shell); + let builder = ShellBuilder::new(&task.resolved.shell, is_windows); let command_label = builder.command_label(task.resolved.command.as_deref().unwrap_or("")); let (command, args) = builder.build(task.resolved.command.clone(), &task.resolved.args); diff --git a/crates/debugger_ui/src/session/running/breakpoint_list.rs b/crates/debugger_ui/src/session/running/breakpoint_list.rs index cec906e293485f3ab7b3685f65834d2b143ef8e2..c9f2a58dae28c2e41e49aecc847857ca6191c0eb 100644 --- a/crates/debugger_ui/src/session/running/breakpoint_list.rs +++ b/crates/debugger_ui/src/session/running/breakpoint_list.rs @@ -607,13 +607,12 @@ impl BreakpointList { .when_some(toggle_label, |this, (label, meta)| { this.tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::with_meta_in( label, Some(&ToggleEnableBreakpoint), meta, &focus_handle, - window, cx, ) } @@ -634,13 +633,12 @@ impl BreakpointList { .when_some(remove_breakpoint_tooltip, |this, tooltip| { this.tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::with_meta_in( "Remove Breakpoint", Some(&UnsetBreakpoint), tooltip, &focus_handle, - window, cx, ) } @@ -819,7 +817,7 @@ impl LineBreakpoint { ) .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( if is_enabled { "Disable Breakpoint" @@ -828,7 +826,6 @@ impl LineBreakpoint { }, &ToggleEnableBreakpoint, &focus_handle, - window, cx, ) } @@ -980,7 +977,7 @@ impl DataBreakpoint { ) .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( if is_enabled { "Disable Data Breakpoint" @@ -989,7 +986,6 @@ impl DataBreakpoint { }, &ToggleEnableBreakpoint, &focus_handle, - window, cx, ) } @@ -1085,7 +1081,7 @@ impl ExceptionBreakpoint { ) .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( if is_enabled { "Disable Exception Breakpoint" @@ -1094,7 +1090,6 @@ impl ExceptionBreakpoint { }, &ToggleEnableBreakpoint, &focus_handle, - window, cx, ) } @@ -1402,12 +1397,11 @@ impl RenderOnce for BreakpointOptionsStrip { .disabled(!supports_logs) .toggle_state(self.is_toggled(ActiveBreakpointStripMode::Log)) .on_click(self.on_click_callback(ActiveBreakpointStripMode::Log)) - .tooltip(|window, cx| { + .tooltip(|_window, cx| { Tooltip::with_meta( "Set Log Message", None, "Set log message to display (instead of stopping) when a breakpoint is hit.", - window, cx, ) }), @@ -1438,12 +1432,11 @@ impl RenderOnce for BreakpointOptionsStrip { .disabled(!supports_condition) .toggle_state(self.is_toggled(ActiveBreakpointStripMode::Condition)) .on_click(self.on_click_callback(ActiveBreakpointStripMode::Condition)) - .tooltip(|window, cx| { + .tooltip(|_window, cx| { Tooltip::with_meta( "Set Condition", None, "Set condition to evaluate when a breakpoint is hit. Program execution will stop only when the condition is met.", - window, cx, ) }), @@ -1474,12 +1467,11 @@ impl RenderOnce for BreakpointOptionsStrip { .disabled(!supports_hit_condition) .toggle_state(self.is_toggled(ActiveBreakpointStripMode::HitCondition)) .on_click(self.on_click_callback(ActiveBreakpointStripMode::HitCondition)) - .tooltip(|window, cx| { + .tooltip(|_window, cx| { Tooltip::with_meta( "Set Hit Condition", None, "Set expression that controls how many hits of the breakpoint are ignored.", - window, cx, ) }), diff --git a/crates/debugger_ui/src/session/running/console.rs b/crates/debugger_ui/src/session/running/console.rs index cf7b59f2fe96bb031fc1ed1a5d7ae4005dd37eb9..2d01a325a2b0056bfbf42e519a79a4ec199c4a9d 100644 --- a/crates/debugger_ui/src/session/running/console.rs +++ b/crates/debugger_ui/src/session/running/console.rs @@ -484,12 +484,11 @@ impl Render for Console { .tooltip({ let query_focus_handle = query_focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Evaluate", &Confirm, &query_focus_handle, - window, cx, ) } @@ -669,11 +668,7 @@ impl ConsoleQueryBarCompletionProvider { &snapshot, ), new_text: string_match.string.clone(), - label: CodeLabel { - filter_range: 0..string_match.string.len(), - text: string_match.string.clone(), - runs: Vec::new(), - }, + label: CodeLabel::plain(string_match.string.clone(), None), icon_path: None, documentation: Some(CompletionDocumentation::MultiLineMarkdown( variable_value.into(), @@ -782,11 +777,7 @@ impl ConsoleQueryBarCompletionProvider { &snapshot, ), new_text, - label: CodeLabel { - filter_range: 0..completion.label.len(), - text: completion.label, - runs: Vec::new(), - }, + label: CodeLabel::plain(completion.label, None), icon_path: None, documentation: completion.detail.map(|detail| { CompletionDocumentation::MultiLineMarkdown(detail.into()) @@ -971,8 +962,12 @@ mod tests { ) { cx.set_state(input); - let buffer_position = - cx.editor(|editor, _, cx| editor.selections.newest::(cx).start); + let buffer_position = cx.editor(|editor, _, cx| { + editor + .selections + .newest::(&editor.display_snapshot(cx)) + .start + }); let snapshot = &cx.buffer_snapshot(); diff --git a/crates/debugger_ui/src/session/running/stack_frame_list.rs b/crates/debugger_ui/src/session/running/stack_frame_list.rs index 309b58e7de40f527e4ab96f8aacd668810aede64..3fc7e8ce392b5ea3982a168fcc8f6dcfad1f7313 100644 --- a/crates/debugger_ui/src/session/running/stack_frame_list.rs +++ b/crates/debugger_ui/src/session/running/stack_frame_list.rs @@ -872,8 +872,8 @@ impl StackFrameList { "filter-by-visible-worktree-stack-frame-list", IconName::ListFilter, ) - .tooltip(move |window, cx| { - Tooltip::for_action(tooltip_title, &ToggleUserFrames, window, cx) + .tooltip(move |_window, cx| { + Tooltip::for_action(tooltip_title, &ToggleUserFrames, cx) }) .toggle_state(self.list_filter == StackFrameFilter::OnlyUserFrames) .icon_size(IconSize::Small) diff --git a/crates/debugger_ui/src/session/running/variable_list.rs b/crates/debugger_ui/src/session/running/variable_list.rs index aa8cb143ac71328920bb1a41933b456491647a03..f2b79523fe3d7329073ad618a9d5c5d219a32f3c 100644 --- a/crates/debugger_ui/src/session/running/variable_list.rs +++ b/crates/debugger_ui/src/session/running/variable_list.rs @@ -1306,14 +1306,8 @@ impl VariableList { .ok(); } }) - .tooltip(move |window, cx| { - Tooltip::for_action_in( - "Remove Watch", - &RemoveWatch, - &focus_handle, - window, - cx, - ) + .tooltip(move |_window, cx| { + Tooltip::for_action_in("Remove Watch", &RemoveWatch, &focus_handle, cx) }) .icon_size(ui::IconSize::Indicator), ), diff --git a/crates/debugger_ui/src/stack_trace_view.rs b/crates/debugger_ui/src/stack_trace_view.rs index 3806e77b6e932b90f4dec143ddacd40a02e6e421..07caabaacaf00d2752a04c5ba68be07a5678c40a 100644 --- a/crates/debugger_ui/src/stack_trace_view.rs +++ b/crates/debugger_ui/src/stack_trace_view.rs @@ -55,7 +55,10 @@ impl StackTraceView { cx.subscribe_in(&editor, window, |this, editor, event, window, cx| { if let EditorEvent::SelectionsChanged { local: true } = event { let excerpt_id = editor.update(cx, |editor, cx| { - let position: Point = editor.selections.newest(cx).head(); + let position: Point = editor + .selections + .newest(&editor.display_snapshot(cx)) + .head(); editor .snapshot(window, cx) diff --git a/crates/debugger_ui/src/tests/new_process_modal.rs b/crates/debugger_ui/src/tests/new_process_modal.rs index 80e27ee6bdeb1d1a2627ad7aa46bf68c38464510..2f470560d5a58a1ed9e56ebe89257572d195689e 100644 --- a/crates/debugger_ui/src/tests/new_process_modal.rs +++ b/crates/debugger_ui/src/tests/new_process_modal.rs @@ -231,7 +231,10 @@ async fn test_save_debug_scenario_to_file(executor: BackgroundExecutor, cx: &mut editor.update(cx, |editor, cx| { assert_eq!( - editor.selections.newest::(cx).head(), + editor + .selections + .newest::(&editor.display_snapshot(cx)) + .head(), Point::new(5, 2) ) }); diff --git a/crates/deepseek/Cargo.toml b/crates/deepseek/Cargo.toml index f294e946d805245649c4dedf07df36bfae4972e1..25e8f2f25c8f6cb8505f7975a93f02f12937f3b5 100644 --- a/crates/deepseek/Cargo.toml +++ b/crates/deepseek/Cargo.toml @@ -22,4 +22,3 @@ http_client.workspace = true schemars = { workspace = true, optional = true } serde.workspace = true serde_json.workspace = true -workspace-hack.workspace = true diff --git a/crates/denoise/Cargo.toml b/crates/denoise/Cargo.toml index a2f43cdfee72a64fbd7e6e60b9414c691c3adfcd..7d4644a610c854c63a11a8d92e8ac89eace0a6dc 100644 --- a/crates/denoise/Cargo.toml +++ b/crates/denoise/Cargo.toml @@ -18,4 +18,3 @@ rodio = { workspace = true, features = ["wav_output"] } rustfft = { version = "6.2.0", features = ["avx"] } realfft = "3.4.0" thiserror.workspace = true -workspace-hack.workspace = true diff --git a/crates/diagnostics/Cargo.toml b/crates/diagnostics/Cargo.toml index fd678078e8668b8a569c2d0f1627c786987a3cb4..5bb6892f0cea9500fd66671f8e8e86ab9a6d901a 100644 --- a/crates/diagnostics/Cargo.toml +++ b/crates/diagnostics/Cargo.toml @@ -34,7 +34,6 @@ theme.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true -workspace-hack.workspace = true [dev-dependencies] client = { workspace = true, features = ["test-support"] } diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 5fbd958141f658ee74ad91d51a5c5081227b436f..47e2a9539b7e362bb9b968d8e39cda30d3f17e78 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -965,10 +965,11 @@ async fn heuristic_syntactic_expand( let row_count = node_end.row - node_start.row + 1; let mut ancestor_range = None; let reached_outline_node = cx.background_executor().scoped({ - let node_range = node_range.clone(); - let outline_range = outline_range.clone(); - let ancestor_range = &mut ancestor_range; - |scope| {scope.spawn(async move { + let node_range = node_range.clone(); + let outline_range = outline_range.clone(); + let ancestor_range = &mut ancestor_range; + |scope| { + scope.spawn(async move { // Stop if we've exceeded the row count or reached an outline node. Then, find the interval // of node children which contains the query range. For example, this allows just returning // the header of a declaration rather than the entire declaration. @@ -980,8 +981,11 @@ async fn heuristic_syntactic_expand( if cursor.goto_first_child() { loop { let child_node = cursor.node(); - let child_range = previous_end..Point::from_ts_point(child_node.end_position()); - if included_child_start.is_none() && child_range.contains(&input_range.start) { + let child_range = + previous_end..Point::from_ts_point(child_node.end_position()); + if included_child_start.is_none() + && child_range.contains(&input_range.start) + { included_child_start = Some(child_range.start); } if child_range.contains(&input_range.end) { @@ -997,19 +1001,22 @@ async fn heuristic_syntactic_expand( if let Some(start) = included_child_start { let row_count = end.row - start.row; if row_count < max_row_count { - *ancestor_range = Some(Some(RangeInclusive::new(start.row, end.row))); + *ancestor_range = + Some(Some(RangeInclusive::new(start.row, end.row))); return; } } log::info!( - "Expanding to ancestor started on {} node exceeding row limit of {max_row_count}.", + "Expanding to ancestor started on {} node\ + exceeding row limit of {max_row_count}.", node.grammar_name() ); *ancestor_range = Some(None); } }) - }}); + } + }); reached_outline_node.await; if let Some(node) = ancestor_range { return node; diff --git a/crates/diagnostics/src/diagnostics_tests.rs b/crates/diagnostics/src/diagnostics_tests.rs index 2d86361df003ceab114d1e4cd3adabbbfbf9b497..d97a5ab65aab4bb238182040821ecf9fdf828bc3 100644 --- a/crates/diagnostics/src/diagnostics_tests.rs +++ b/crates/diagnostics/src/diagnostics_tests.rs @@ -1,9 +1,9 @@ use super::*; use collections::{HashMap, HashSet}; use editor::{ - DisplayPoint, EditorSettings, + DisplayPoint, EditorSettings, Inlay, actions::{GoToDiagnostic, GoToPreviousDiagnostic, Hover, MoveToBeginning}, - display_map::{DisplayRow, Inlay}, + display_map::DisplayRow, test::{ editor_content_with_blocks, editor_lsp_test_context::EditorLspTestContext, editor_test_context::EditorTestContext, @@ -1341,7 +1341,7 @@ async fn test_hover_diagnostic_and_info_popovers(cx: &mut gpui::TestAppContext) range: Some(range), })) }); - let delay = cx.update(|_, cx| EditorSettings::get_global(cx).hover_popover_delay + 1); + let delay = cx.update(|_, cx| EditorSettings::get_global(cx).hover_popover_delay.0 + 1); cx.background_executor .advance_clock(Duration::from_millis(delay)); diff --git a/crates/diagnostics/src/items.rs b/crates/diagnostics/src/items.rs index afbef4427ffe0c7f5712a94e7c1bc01fbcbf5f54..413bad5c0d696bfcba92a1127789c9e7c31edc30 100644 --- a/crates/diagnostics/src/items.rs +++ b/crates/diagnostics/src/items.rs @@ -30,7 +30,7 @@ impl Render for DiagnosticIndicator { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { let indicator = h_flex().gap_2(); if !ProjectSettings::get_global(cx).diagnostics.button { - return indicator; + return indicator.hidden(); } let diagnostic_indicator = match (self.summary.error_count, self.summary.warning_count) { @@ -67,11 +67,10 @@ impl Render for DiagnosticIndicator { Some( Button::new("diagnostic_message", SharedString::new(message)) .label_size(LabelSize::Small) - .tooltip(|window, cx| { + .tooltip(|_window, cx| { Tooltip::for_action( "Next Diagnostic", &editor::actions::GoToDiagnostic::default(), - window, cx, ) }) @@ -87,8 +86,8 @@ impl Render for DiagnosticIndicator { .child( ButtonLike::new("diagnostic-indicator") .child(diagnostic_indicator) - .tooltip(|window, cx| { - Tooltip::for_action("Project Diagnostics", &Deploy, window, cx) + .tooltip(move |_window, cx| { + Tooltip::for_action("Project Diagnostics", &Deploy, cx) }) .on_click(cx.listener(|this, _, window, cx| { if let Some(workspace) = this.workspace.upgrade() { @@ -170,7 +169,10 @@ impl DiagnosticIndicator { fn update(&mut self, editor: Entity, window: &mut Window, cx: &mut Context) { let (buffer, cursor_position) = editor.update(cx, |editor, cx| { let buffer = editor.buffer().read(cx).snapshot(cx); - let cursor_position = editor.selections.newest::(cx).head(); + let cursor_position = editor + .selections + .newest::(&editor.display_snapshot(cx)) + .head(); (buffer, cursor_position) }); let new_diagnostic = buffer diff --git a/crates/docs_preprocessor/Cargo.toml b/crates/docs_preprocessor/Cargo.toml index e46ceb18db7e75f0f946da1d112509a18a68d4aa..e71f9ae3f3f6fcff790db27fb1e377f0d1c20e40 100644 --- a/crates/docs_preprocessor/Cargo.toml +++ b/crates/docs_preprocessor/Cargo.toml @@ -17,9 +17,10 @@ serde.workspace = true serde_json.workspace = true settings.workspace = true util.workspace = true -workspace-hack.workspace = true zed.workspace = true zlog.workspace = true +task.workspace = true +theme.workspace = true [lints] workspace = true diff --git a/crates/docs_preprocessor/src/main.rs b/crates/docs_preprocessor/src/main.rs index e8f81812ccdf2e8ad10e18f06c10509a1ed5b65d..b614a8251139413f4b316937db1d4e3c0d551df6 100644 --- a/crates/docs_preprocessor/src/main.rs +++ b/crates/docs_preprocessor/src/main.rs @@ -53,9 +53,20 @@ fn main() -> Result<()> { #[derive(Debug, Clone, PartialEq, Eq, Hash)] enum PreprocessorError { - ActionNotFound { action_name: String }, - DeprecatedActionUsed { used: String, should_be: String }, + ActionNotFound { + action_name: String, + }, + DeprecatedActionUsed { + used: String, + should_be: String, + }, InvalidFrontmatterLine(String), + InvalidSettingsJson { + file: std::path::PathBuf, + line: usize, + snippet: String, + error: String, + }, } impl PreprocessorError { @@ -72,6 +83,20 @@ impl PreprocessorError { } PreprocessorError::ActionNotFound { action_name } } + + fn new_for_invalid_settings_json( + chapter: &Chapter, + location: usize, + snippet: String, + error: String, + ) -> Self { + PreprocessorError::InvalidSettingsJson { + file: chapter.path.clone().expect("chapter has path"), + line: chapter.content[..location].lines().count() + 1, + snippet, + error, + } + } } impl std::fmt::Display for PreprocessorError { @@ -88,6 +113,21 @@ impl std::fmt::Display for PreprocessorError { "Deprecated action used: {} should be {}", used, should_be ), + PreprocessorError::InvalidSettingsJson { + file, + line, + snippet, + error, + } => { + write!( + f, + "Invalid settings JSON at {}:{}\nError: {}\n\n{}", + file.display(), + line, + error, + snippet + ) + } } } } @@ -100,11 +140,11 @@ fn handle_preprocessing() -> Result<()> { let (_ctx, mut book) = CmdPreprocessor::parse_input(input.as_bytes())?; let mut errors = HashSet::::new(); - handle_frontmatter(&mut book, &mut errors); template_big_table_of_actions(&mut book); template_and_validate_keybindings(&mut book, &mut errors); template_and_validate_actions(&mut book, &mut errors); + template_and_validate_json_snippets(&mut book, &mut errors); if !errors.is_empty() { const ANSI_RED: &str = "\x1b[31m"; @@ -163,6 +203,10 @@ fn template_big_table_of_actions(book: &mut Book) { }); } +fn format_binding(binding: String) -> String { + binding.replace("\\", "\\\\") +} + fn template_and_validate_keybindings(book: &mut Book, errors: &mut HashSet) { let regex = Regex::new(r"\{#kb (.*?)\}").unwrap(); @@ -183,7 +227,10 @@ fn template_and_validate_keybindings(book: &mut Book, errors: &mut HashSetNo default binding
".to_string(); } - format!("{macos_binding}|{linux_binding}") + let formatted_macos_binding = format_binding(macos_binding); + let formatted_linux_binding = format_binding(linux_binding); + + format!("{formatted_macos_binding}|{formatted_linux_binding}") }) .into_owned() }); @@ -235,6 +282,161 @@ fn find_binding(os: &str, action: &str) -> Option { }) } +fn template_and_validate_json_snippets(book: &mut Book, errors: &mut HashSet) { + fn for_each_labeled_code_block_mut( + book: &mut Book, + errors: &mut HashSet, + f: impl Fn(&str, &str) -> anyhow::Result<()>, + ) { + const TAGGED_JSON_BLOCK_START: &'static str = "```json ["; + const JSON_BLOCK_END: &'static str = "```"; + + for_each_chapter_mut(book, |chapter| { + let mut offset = 0; + while let Some(loc) = chapter.content[offset..].find(TAGGED_JSON_BLOCK_START) { + let loc = loc + offset; + let tag_start = loc + TAGGED_JSON_BLOCK_START.len(); + offset = tag_start; + let Some(tag_end) = chapter.content[tag_start..].find(']') else { + errors.insert(PreprocessorError::new_for_invalid_settings_json( + chapter, + loc, + chapter.content[loc..tag_start].to_string(), + "Unclosed JSON block tag".to_string(), + )); + continue; + }; + let tag_end = tag_end + tag_start; + + let tag = &chapter.content[tag_start..tag_end]; + + if tag.contains('\n') { + errors.insert(PreprocessorError::new_for_invalid_settings_json( + chapter, + loc, + chapter.content[loc..tag_start].to_string(), + "Unclosed JSON block tag".to_string(), + )); + continue; + } + + let snippet_start = tag_end + 1; + offset = snippet_start; + + let Some(snippet_end) = chapter.content[snippet_start..].find(JSON_BLOCK_END) + else { + errors.insert(PreprocessorError::new_for_invalid_settings_json( + chapter, + loc, + chapter.content[loc..tag_end + 1].to_string(), + "Missing closing code block".to_string(), + )); + continue; + }; + let snippet_end = snippet_start + snippet_end; + let snippet_json = &chapter.content[snippet_start..snippet_end]; + offset = snippet_end + 3; + + if let Err(err) = f(tag, snippet_json) { + errors.insert(PreprocessorError::new_for_invalid_settings_json( + chapter, + loc, + chapter.content[loc..snippet_end + 3].to_string(), + err.to_string(), + )); + continue; + }; + let tag_range_complete = tag_start - 1..tag_end + 1; + offset -= tag_range_complete.len(); + chapter.content.replace_range(tag_range_complete, ""); + } + }); + } + + for_each_labeled_code_block_mut(book, errors, |label, snippet_json| { + let mut snippet_json_fixed = snippet_json + .to_string() + .replace("\n>", "\n") + .trim() + .to_string(); + while snippet_json_fixed.starts_with("//") { + if let Some(line_end) = snippet_json_fixed.find('\n') { + snippet_json_fixed.replace_range(0..line_end, ""); + snippet_json_fixed = snippet_json_fixed.trim().to_string(); + } + } + match label { + "settings" => { + if !snippet_json_fixed.starts_with('{') || !snippet_json_fixed.ends_with('}') { + snippet_json_fixed.insert(0, '{'); + snippet_json_fixed.push_str("\n}"); + } + settings::parse_json_with_comments::( + &snippet_json_fixed, + )?; + } + "keymap" => { + if !snippet_json_fixed.starts_with('[') || !snippet_json_fixed.ends_with(']') { + snippet_json_fixed.insert(0, '['); + snippet_json_fixed.push_str("\n]"); + } + + let keymap = settings::KeymapFile::parse(&snippet_json_fixed) + .context("Failed to parse keymap JSON")?; + for section in keymap.sections() { + for (keystrokes, action) in section.bindings() { + keystrokes + .split_whitespace() + .map(|source| gpui::Keystroke::parse(source)) + .collect::, _>>() + .context("Failed to parse keystroke")?; + if let Some((action_name, _)) = settings::KeymapFile::parse_action(action) + .map_err(|err| anyhow::format_err!(err)) + .context("Failed to parse action")? + { + anyhow::ensure!( + find_action_by_name(action_name).is_some(), + "Action not found: {}", + action_name + ); + } + } + } + } + "debug" => { + if !snippet_json_fixed.starts_with('[') || !snippet_json_fixed.ends_with(']') { + snippet_json_fixed.insert(0, '['); + snippet_json_fixed.push_str("\n]"); + } + + settings::parse_json_with_comments::(&snippet_json_fixed)?; + } + "tasks" => { + if !snippet_json_fixed.starts_with('[') || !snippet_json_fixed.ends_with(']') { + snippet_json_fixed.insert(0, '['); + snippet_json_fixed.push_str("\n]"); + } + + settings::parse_json_with_comments::(&snippet_json_fixed)?; + } + "icon-theme" => { + if !snippet_json_fixed.starts_with('{') || !snippet_json_fixed.ends_with('}') { + snippet_json_fixed.insert(0, '{'); + snippet_json_fixed.push_str("\n}"); + } + + settings::parse_json_with_comments::( + &snippet_json_fixed, + )?; + } + label => { + anyhow::bail!("Unexpected JSON code block tag: {}", label) + } + }; + Ok(()) + }); +} + /// Removes any configurable options from the stringified action if existing, /// ensuring that only the actual action name is returned. If the action consists /// only of a string and nothing else, the string is returned as-is. @@ -334,6 +536,7 @@ fn handle_postprocessing() -> Result<()> { .as_str() .expect("Default title not a string") .to_string(); + let amplitude_key = std::env::var("DOCS_AMPLITUDE_API_KEY").unwrap_or_default(); output.insert("html".to_string(), zed_html); mdbook::Renderer::render(&mdbook::renderer::HtmlHandlebars::new(), &ctx)?; @@ -402,6 +605,7 @@ fn handle_postprocessing() -> Result<()> { let meta_title = format!("{} | {}", page_title, meta_title); zlog::trace!(logger => "Updating {:?}", pretty_path(&file, &root_dir)); let contents = contents.replace("#description#", meta_description); + let contents = contents.replace("#amplitude_key#", &litude_key); let contents = title_regex() .replace(&contents, |_: ®ex::Captures| { format!("{}", meta_title) diff --git a/crates/edit_prediction/Cargo.toml b/crates/edit_prediction/Cargo.toml index 0195bdb06d67297569ef14175148fcab71effd6a..2c6888d14be49c857e7805fb63f9f9335ac32c8e 100644 --- a/crates/edit_prediction/Cargo.toml +++ b/crates/edit_prediction/Cargo.toml @@ -15,4 +15,3 @@ path = "src/edit_prediction.rs" client.workspace = true gpui.workspace = true language.workspace = true -workspace-hack.workspace = true diff --git a/crates/edit_prediction_button/Cargo.toml b/crates/edit_prediction_button/Cargo.toml index 597a83da33cf49cd8170630a53675bdd6da92af4..189db7f7bac3eaea36a154424c4e7702f1387d24 100644 --- a/crates/edit_prediction_button/Cargo.toml +++ b/crates/edit_prediction_button/Cargo.toml @@ -32,7 +32,6 @@ settings.workspace = true supermaven.workspace = true telemetry.workspace = true ui.workspace = true -workspace-hack.workspace = true workspace.workspace = true zed_actions.workspace = true zeta.workspace = true diff --git a/crates/edit_prediction_button/src/edit_prediction_button.rs b/crates/edit_prediction_button/src/edit_prediction_button.rs index 6f050fc86c708e2c97f9b34f2fa786516ba0aca9..8b9bfc1c50092b65892cfcee9f4da1aeb2a0993e 100644 --- a/crates/edit_prediction_button/src/edit_prediction_button.rs +++ b/crates/edit_prediction_button/src/edit_prediction_button.rs @@ -72,17 +72,17 @@ impl Render for EditPredictionButton { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { // Return empty div if AI is disabled if DisableAiSettings::get_global(cx).disable_ai { - return div(); + return div().hidden(); } let all_language_settings = all_language_settings(None, cx); match all_language_settings.edit_predictions.provider { - EditPredictionProvider::None => div(), + EditPredictionProvider::None => div().hidden(), EditPredictionProvider::Copilot => { let Some(copilot) = Copilot::global(cx) else { - return div(); + return div().hidden(); }; let status = copilot.read(cx).status(); @@ -123,8 +123,8 @@ impl Render for EditPredictionButton { }); } })) - .tooltip(|window, cx| { - Tooltip::for_action("GitHub Copilot", &ToggleMenu, window, cx) + .tooltip(|_window, cx| { + Tooltip::for_action("GitHub Copilot", &ToggleMenu, cx) }), ); } @@ -146,9 +146,7 @@ impl Render for EditPredictionButton { .anchor(Corner::BottomRight) .trigger_with_tooltip( IconButton::new("copilot-icon", icon), - |window, cx| { - Tooltip::for_action("GitHub Copilot", &ToggleMenu, window, cx) - }, + |_window, cx| Tooltip::for_action("GitHub Copilot", &ToggleMenu, cx), ) .with_handle(self.popover_menu_handle.clone()), ) @@ -220,12 +218,7 @@ impl Render for EditPredictionButton { IconButton::new("supermaven-icon", icon), move |window, cx| { if has_menu { - Tooltip::for_action( - tooltip_text.clone(), - &ToggleMenu, - window, - cx, - ) + Tooltip::for_action(tooltip_text.clone(), &ToggleMenu, cx) } else { Tooltip::text(tooltip_text.clone())(window, cx) } @@ -288,9 +281,7 @@ impl Render for EditPredictionButton { cx.theme().colors().status_bar_background, )) }), - move |window, cx| { - Tooltip::for_action("Codestral", &ToggleMenu, window, cx) - }, + move |_window, cx| Tooltip::for_action("Codestral", &ToggleMenu, cx), ) .with_handle(self.popover_menu_handle.clone()), ) @@ -317,14 +308,8 @@ impl Render for EditPredictionButton { .shape(IconButtonShape::Square) .indicator(Indicator::dot().color(Color::Muted)) .indicator_border_color(Some(cx.theme().colors().status_bar_background)) - .tooltip(move |window, cx| { - Tooltip::with_meta( - "Edit Predictions", - None, - tooltip_meta, - window, - cx, - ) + .tooltip(move |_window, cx| { + Tooltip::with_meta("Edit Predictions", None, tooltip_meta, cx) }) .on_click(cx.listener(move |_, _, window, cx| { telemetry::event!( @@ -365,16 +350,15 @@ impl Render for EditPredictionButton { }, ) .when(!self.popover_menu_handle.is_deployed(), |element| { - element.tooltip(move |window, cx| { + element.tooltip(move |_window, cx| { if enabled { if show_editor_predictions { - Tooltip::for_action("Edit Prediction", &ToggleMenu, window, cx) + Tooltip::for_action("Edit Prediction", &ToggleMenu, cx) } else { Tooltip::with_meta( "Edit Prediction", Some(&ToggleMenu), "Hidden For This File", - window, cx, ) } @@ -383,7 +367,6 @@ impl Render for EditPredictionButton { "Edit Prediction", Some(&ToggleMenu), "Disabled For This File", - window, cx, ) } diff --git a/crates/edit_prediction_context/Cargo.toml b/crates/edit_prediction_context/Cargo.toml index 754d7e03f7bfd6839a038dc6acebfb8a570a239d..771ba4f932ee3f6b1d7fb7ab2c23ea38930ef2cc 100644 --- a/crates/edit_prediction_context/Cargo.toml +++ b/crates/edit_prediction_context/Cargo.toml @@ -35,7 +35,6 @@ strum.workspace = true text.workspace = true tree-sitter.workspace = true util.workspace = true -workspace-hack.workspace = true [dev-dependencies] clap.workspace = true diff --git a/crates/edit_prediction_context/src/declaration.rs b/crates/edit_prediction_context/src/declaration.rs index b57054cb537655184d4a52b511213dcfa570cd87..cc32640425ecc563b1f24a6c695be1c13199cd73 100644 --- a/crates/edit_prediction_context/src/declaration.rs +++ b/crates/edit_prediction_context/src/declaration.rs @@ -1,3 +1,4 @@ +use cloud_llm_client::predict_edits_v3::{self, Line}; use language::{Language, LanguageId}; use project::ProjectEntryId; use std::ops::Range; @@ -91,6 +92,18 @@ impl Declaration { } } + pub fn item_line_range(&self) -> Range { + match self { + Declaration::File { declaration, .. } => declaration.item_line_range.clone(), + Declaration::Buffer { + declaration, rope, .. + } => { + Line(rope.offset_to_point(declaration.item_range.start).row) + ..Line(rope.offset_to_point(declaration.item_range.end).row) + } + } + } + pub fn item_text(&self) -> (Cow<'_, str>, bool) { match self { Declaration::File { declaration, .. } => ( @@ -130,6 +143,18 @@ impl Declaration { } } + pub fn signature_line_range(&self) -> Range { + match self { + Declaration::File { declaration, .. } => declaration.signature_line_range.clone(), + Declaration::Buffer { + declaration, rope, .. + } => { + Line(rope.offset_to_point(declaration.signature_range.start).row) + ..Line(rope.offset_to_point(declaration.signature_range.end).row) + } + } + } + pub fn signature_range_in_item_text(&self) -> Range { let signature_range = self.signature_range(); let item_range = self.item_range(); @@ -142,7 +167,7 @@ fn expand_range_to_line_boundaries_and_truncate( range: &Range, limit: usize, rope: &Rope, -) -> (Range, bool) { +) -> (Range, Range, bool) { let mut point_range = rope.offset_to_point(range.start)..rope.offset_to_point(range.end); point_range.start.column = 0; point_range.end.row += 1; @@ -155,7 +180,10 @@ fn expand_range_to_line_boundaries_and_truncate( item_range.end = item_range.start + limit; } item_range.end = rope.clip_offset(item_range.end, Bias::Left); - (item_range, is_truncated) + + let line_range = + predict_edits_v3::Line(point_range.start.row)..predict_edits_v3::Line(point_range.end.row); + (item_range, line_range, is_truncated) } #[derive(Debug, Clone)] @@ -164,25 +192,30 @@ pub struct FileDeclaration { pub identifier: Identifier, /// offset range of the declaration in the file, expanded to line boundaries and truncated pub item_range: Range, + /// line range of the declaration in the file, potentially truncated + pub item_line_range: Range, /// text of `item_range` pub text: Arc, /// whether `text` was truncated pub text_is_truncated: bool, /// offset range of the signature in the file, expanded to line boundaries and truncated pub signature_range: Range, + /// line range of the signature in the file, truncated + pub signature_line_range: Range, /// whether `signature` was truncated pub signature_is_truncated: bool, } impl FileDeclaration { pub fn from_outline(declaration: OutlineDeclaration, rope: &Rope) -> FileDeclaration { - let (item_range_in_file, text_is_truncated) = expand_range_to_line_boundaries_and_truncate( - &declaration.item_range, - ITEM_TEXT_TRUNCATION_LENGTH, - rope, - ); + let (item_range_in_file, item_line_range_in_file, text_is_truncated) = + expand_range_to_line_boundaries_and_truncate( + &declaration.item_range, + ITEM_TEXT_TRUNCATION_LENGTH, + rope, + ); - let (mut signature_range_in_file, mut signature_is_truncated) = + let (mut signature_range_in_file, signature_line_range, mut signature_is_truncated) = expand_range_to_line_boundaries_and_truncate( &declaration.signature_range, ITEM_TEXT_TRUNCATION_LENGTH, @@ -202,6 +235,7 @@ impl FileDeclaration { parent: None, identifier: declaration.identifier, signature_range: signature_range_in_file, + signature_line_range, signature_is_truncated, text: rope .chunks_in_range(item_range_in_file.clone()) @@ -209,6 +243,7 @@ impl FileDeclaration { .into(), text_is_truncated, item_range: item_range_in_file, + item_line_range: item_line_range_in_file, } } } @@ -225,12 +260,13 @@ pub struct BufferDeclaration { impl BufferDeclaration { pub fn from_outline(declaration: OutlineDeclaration, rope: &Rope) -> Self { - let (item_range, item_range_is_truncated) = expand_range_to_line_boundaries_and_truncate( - &declaration.item_range, - ITEM_TEXT_TRUNCATION_LENGTH, - rope, - ); - let (signature_range, signature_range_is_truncated) = + let (item_range, _item_line_range, item_range_is_truncated) = + expand_range_to_line_boundaries_and_truncate( + &declaration.item_range, + ITEM_TEXT_TRUNCATION_LENGTH, + rope, + ); + let (signature_range, _signature_line_range, signature_range_is_truncated) = expand_range_to_line_boundaries_and_truncate( &declaration.signature_range, ITEM_TEXT_TRUNCATION_LENGTH, diff --git a/crates/edit_prediction_context/src/edit_prediction_context.rs b/crates/edit_prediction_context/src/edit_prediction_context.rs index 70ada71991b003e68d6c9d7de45d85cfba67781a..a2bf59475d07da287490da6bd43e9dafec8bf1d2 100644 --- a/crates/edit_prediction_context/src/edit_prediction_context.rs +++ b/crates/edit_prediction_context/src/edit_prediction_context.rs @@ -8,6 +8,7 @@ mod similar_snippets; mod syntax_index; pub mod text_similarity; +use cloud_llm_client::predict_edits_v3; use collections::HashMap; use gpui::{App, AppContext as _, Entity, Task}; use language::BufferSnapshot; @@ -23,19 +24,22 @@ pub use similar_snippets::*; pub use syntax_index::*; pub use text_similarity::*; +pub use predict_edits_v3::Line; + #[derive(Clone, Debug, PartialEq)] pub struct EditPredictionContextOptions { pub use_imports: bool, pub excerpt: EditPredictionExcerptOptions, pub score: EditPredictionScoreOptions, pub similar_snippets: SimilarSnippetOptions, + pub max_retrieved_declarations: u8, } #[derive(Clone, Debug)] pub struct EditPredictionContext { pub excerpt: EditPredictionExcerpt, pub excerpt_text: EditPredictionExcerptText, - pub cursor_offset_in_excerpt: usize, + pub cursor_point: Point, pub declarations: Vec, pub similar_snippets: Vec, } @@ -117,27 +121,25 @@ impl EditPredictionContext { index_state, )?; let excerpt_text = excerpt.text(buffer); - let excerpt_occurrences = - Occurrences::new(IdentifierParts::occurrences_in_str(&excerpt_text.body)); - let excerpt_trigram_occurrences: Occurrences> = - Occurrences::new(NGram::occurrences_in_str(&excerpt_text.body)); - - let adjacent_start = Point::new(cursor_point.row.saturating_sub(2), 0); - let adjacent_end = Point::new(cursor_point.row + 1, 0); - let adjacent_occurrences = Occurrences::new(IdentifierParts::occurrences_in_str( - &buffer - .text_for_range(adjacent_start..adjacent_end) - .collect::(), - )); - - let cursor_offset_in_file = cursor_point.to_offset(buffer); - // TODO fix this to not need saturating_sub - let cursor_offset_in_excerpt = cursor_offset_in_file.saturating_sub(excerpt.range.start); - - let declarations = if let Some(index_state) = index_state { + + let declarations = if options.max_retrieved_declarations > 0 + && let Some(index_state) = index_state + { + let excerpt_occurrences = + Occurrences::new(IdentifierParts::occurrences_in_str(&excerpt_text.body)); + let adjacent_start = Point::new(cursor_point.row.saturating_sub(2), 0); + let adjacent_end = Point::new(cursor_point.row + 1, 0); + let adjacent_occurrences = Occurrences::new(IdentifierParts::occurrences_in_str( + &buffer + .text_for_range(adjacent_start..adjacent_end) + .collect::(), + )); + + let cursor_offset_in_file = cursor_point.to_offset(buffer); + let references = get_references(&excerpt, &excerpt_text, buffer); - scored_declarations( + let mut declarations = scored_declarations( &options.score, &index_state, &excerpt, @@ -147,19 +149,29 @@ impl EditPredictionContext { references, cursor_offset_in_file, buffer, - ) + ); + // TODO [zeta2] if we need this when we ship, we should probably do it in a smarter way + declarations.truncate(options.max_retrieved_declarations as usize); + declarations } else { vec![] }; - let before = Instant::now(); - let similar_snippets = similar_snippets( - &excerpt_trigram_occurrences, - excerpt.range.clone(), - buffer, - &options.similar_snippets, - ); - dbg!(before.elapsed()); + let similar_snippets = if options.similar_snippets.max_result_count > 0 { + let before = Instant::now(); + let excerpt_trigram_occurrences: Occurrences> = + Occurrences::new(NGram::occurrences_in_str(&excerpt_text.body)); + let similar_snippets = similar_snippets( + &excerpt_trigram_occurrences, + excerpt.range.clone(), + buffer, + &options.similar_snippets, + ); + dbg!(before.elapsed()); + similar_snippets + } else { + Vec::new() + }; // buffer.debug(&excerpt.range, "excerpt"); @@ -174,7 +186,7 @@ impl EditPredictionContext { Some(Self { excerpt, excerpt_text, - cursor_offset_in_excerpt, + cursor_point, declarations, similar_snippets, }) @@ -230,6 +242,7 @@ mod tests { omit_excerpt_overlaps: true, }, similar_snippets: SimilarSnippetOptions::default(), + max_retrieved_declarations: u8::MAX, }, Some(index.clone()), cx, diff --git a/crates/edit_prediction_context/src/excerpt.rs b/crates/edit_prediction_context/src/excerpt.rs index 9b858c5a2b5d6ef59848f98b1d66ae43a5b64304..5bf5d31cde16d614f4cbea405b5706c9e21531f4 100644 --- a/crates/edit_prediction_context/src/excerpt.rs +++ b/crates/edit_prediction_context/src/excerpt.rs @@ -4,7 +4,7 @@ use text::{Point, ToOffset, ToPoint as _}; use tree_sitter::{Node, TreeCursor}; use util::RangeExt; -use crate::{BufferDeclaration, declaration::DeclarationId, syntax_index::SyntaxIndexState}; +use crate::{BufferDeclaration, Line, declaration::DeclarationId, syntax_index::SyntaxIndexState}; // TODO: // @@ -35,6 +35,7 @@ pub struct EditPredictionExcerptOptions { #[derive(Debug, Clone)] pub struct EditPredictionExcerpt { pub range: Range, + pub line_range: Range, pub parent_declarations: Vec<(DeclarationId, Range)>, pub size: usize, } @@ -86,12 +87,19 @@ impl EditPredictionExcerpt { buffer.len(), options.max_bytes ); - return Some(EditPredictionExcerpt::new(0..buffer.len(), Vec::new())); + let offset_range = 0..buffer.len(); + let line_range = Line(0)..Line(buffer.max_point().row); + return Some(EditPredictionExcerpt::new( + offset_range, + line_range, + Vec::new(), + )); } let query_offset = query_point.to_offset(buffer); - let query_range = Point::new(query_point.row, 0).to_offset(buffer) - ..Point::new(query_point.row + 1, 0).to_offset(buffer); + let query_line_range = query_point.row..query_point.row + 1; + let query_range = Point::new(query_line_range.start, 0).to_offset(buffer) + ..Point::new(query_line_range.end, 0).to_offset(buffer); if query_range.len() >= options.max_bytes { return None; } @@ -107,6 +115,7 @@ impl EditPredictionExcerpt { let excerpt_selector = ExcerptSelector { query_offset, query_range, + query_line_range: Line(query_line_range.start)..Line(query_line_range.end), parent_declarations: &parent_declarations, buffer, options, @@ -130,7 +139,11 @@ impl EditPredictionExcerpt { excerpt_selector.select_lines() } - fn new(range: Range, parent_declarations: Vec<(DeclarationId, Range)>) -> Self { + fn new( + range: Range, + line_range: Range, + parent_declarations: Vec<(DeclarationId, Range)>, + ) -> Self { let size = range.len() + parent_declarations .iter() @@ -140,10 +153,11 @@ impl EditPredictionExcerpt { range, parent_declarations, size, + line_range, } } - fn with_expanded_range(&self, new_range: Range) -> Self { + fn with_expanded_range(&self, new_range: Range, new_line_range: Range) -> Self { if !new_range.contains_inclusive(&self.range) { // this is an issue because parent_signature_ranges may be incorrect log::error!("bug: with_expanded_range called with disjoint range"); @@ -155,7 +169,7 @@ impl EditPredictionExcerpt { } parent_declarations.push((*declaration_id, range.clone())); } - Self::new(new_range, parent_declarations) + Self::new(new_range, new_line_range, parent_declarations) } fn parent_signatures_size(&self) -> usize { @@ -166,6 +180,7 @@ impl EditPredictionExcerpt { struct ExcerptSelector<'a> { query_offset: usize, query_range: Range, + query_line_range: Range, parent_declarations: &'a [(DeclarationId, &'a BufferDeclaration)], buffer: &'a BufferSnapshot, options: &'a EditPredictionExcerptOptions, @@ -178,10 +193,13 @@ impl<'a> ExcerptSelector<'a> { let mut cursor = selected_layer_root.walk(); loop { - let excerpt_range = node_line_start(cursor.node()).to_offset(&self.buffer) - ..node_line_end(cursor.node()).to_offset(&self.buffer); + let line_start = node_line_start(cursor.node()); + let line_end = node_line_end(cursor.node()); + let line_range = Line(line_start.row)..Line(line_end.row); + let excerpt_range = + line_start.to_offset(&self.buffer)..line_end.to_offset(&self.buffer); if excerpt_range.contains_inclusive(&self.query_range) { - let excerpt = self.make_excerpt(excerpt_range); + let excerpt = self.make_excerpt(excerpt_range, line_range); if excerpt.size <= self.options.max_bytes { return Some(self.expand_to_siblings(&mut cursor, excerpt)); } @@ -272,9 +290,13 @@ impl<'a> ExcerptSelector<'a> { let mut forward = None; while !forward_done { - let new_end = node_line_end(forward_cursor.node()).to_offset(&self.buffer); + let new_end_point = node_line_end(forward_cursor.node()); + let new_end = new_end_point.to_offset(&self.buffer); if new_end > excerpt.range.end { - let new_excerpt = excerpt.with_expanded_range(excerpt.range.start..new_end); + let new_excerpt = excerpt.with_expanded_range( + excerpt.range.start..new_end, + excerpt.line_range.start..Line(new_end_point.row), + ); if new_excerpt.size <= self.options.max_bytes { forward = Some(new_excerpt); break; @@ -289,9 +311,13 @@ impl<'a> ExcerptSelector<'a> { let mut backward = None; while !backward_done { - let new_start = node_line_start(backward_cursor.node()).to_offset(&self.buffer); + let new_start_point = node_line_start(backward_cursor.node()); + let new_start = new_start_point.to_offset(&self.buffer); if new_start < excerpt.range.start { - let new_excerpt = excerpt.with_expanded_range(new_start..excerpt.range.end); + let new_excerpt = excerpt.with_expanded_range( + new_start..excerpt.range.end, + Line(new_start_point.row)..excerpt.line_range.end, + ); if new_excerpt.size <= self.options.max_bytes { backward = Some(new_excerpt); break; @@ -339,7 +365,7 @@ impl<'a> ExcerptSelector<'a> { fn select_lines(&self) -> Option { // early return if line containing query_offset is already too large - let excerpt = self.make_excerpt(self.query_range.clone()); + let excerpt = self.make_excerpt(self.query_range.clone(), self.query_line_range.clone()); if excerpt.size > self.options.max_bytes { log::debug!( "excerpt for cursor line is {} bytes, which exceeds the window", @@ -353,17 +379,24 @@ impl<'a> ExcerptSelector<'a> { let before_bytes = (self.options.target_before_cursor_over_total_bytes * bytes_remaining as f32) as usize; - let start_offset = - next_line_start(self.query_offset.saturating_sub(before_bytes), &self.buffer) - .to_offset(&self.buffer); - let end_offset = previous_line_start(start_offset + bytes_remaining, &self.buffer) - .to_offset(&self.buffer); + let start_line = { + let offset = self.query_offset.saturating_sub(before_bytes); + let point = offset.to_point(self.buffer); + Line(point.row + 1) + }; + let start_offset = Point::new(start_line.0, 0).to_offset(&self.buffer); + let end_line = { + let offset = start_offset + bytes_remaining; + let point = offset.to_point(self.buffer); + Line(point.row) + }; + let end_offset = Point::new(end_line.0, 0).to_offset(&self.buffer); // this could be expanded further since recalculated `signature_size` may be smaller, but // skipping that for now for simplicity // // TODO: could also consider checking if lines immediately before / after fit. - let excerpt = self.make_excerpt(start_offset..end_offset); + let excerpt = self.make_excerpt(start_offset..end_offset, start_line..end_line); if excerpt.size > self.options.max_bytes { log::error!( "bug: line-based excerpt selection has size {}, \ @@ -375,14 +408,14 @@ impl<'a> ExcerptSelector<'a> { return Some(excerpt); } - fn make_excerpt(&self, range: Range) -> EditPredictionExcerpt { + fn make_excerpt(&self, range: Range, line_range: Range) -> EditPredictionExcerpt { let parent_declarations = self .parent_declarations .iter() .filter(|(_, declaration)| declaration.item_range.contains_inclusive(&range)) .map(|(id, declaration)| (*id, declaration.signature_range.clone())) .collect(); - EditPredictionExcerpt::new(range, parent_declarations) + EditPredictionExcerpt::new(range, line_range, parent_declarations) } /// Returns `true` if the `forward` excerpt is a better choice than the `backward` excerpt. diff --git a/crates/edit_prediction_context/src/syntax_index.rs b/crates/edit_prediction_context/src/syntax_index.rs index e2728ebfc029c7c1b74a35f2e6f5a79003a9a77e..76aa10c076d95aa10bd830bace23ad7b410d8102 100644 --- a/crates/edit_prediction_context/src/syntax_index.rs +++ b/crates/edit_prediction_context/src/syntax_index.rs @@ -854,7 +854,7 @@ mod tests { } #[gpui::test] - async fn test_declarations_limt(cx: &mut TestAppContext) { + async fn test_declarations_limit(cx: &mut TestAppContext) { let (_, index, rust_lang_id) = init_test(cx).await; let index_state = index.read_with(cx, |index, _cx| index.state().clone()); diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index 52b3fa2affeca1ceb87485fb1242fe40b34f8f57..62226f5dec2aa88f0ccdb6ad59935f6bdfe6536e 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -64,6 +64,7 @@ project.workspace = true rand.workspace = true regex.workspace = true rpc.workspace = true +rope.workspace = true schemars.workspace = true serde.workspace = true serde_json.workspace = true @@ -92,7 +93,6 @@ uuid.workspace = true vim_mode_setting.workspace = true workspace.workspace = true zed_actions.workspace = true -workspace-hack.workspace = true [dev-dependencies] criterion.workspace = true diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index 99fe7557b8f0abe12a093d4dd540ead30b600e78..810b84efcd40de6e507dfe12b1a1a7f89d2ec4cf 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -318,6 +318,24 @@ pub struct GoToPreviousDiagnostic { pub severity: GoToDiagnosticSeverityFilter, } +/// Adds a cursor above the current selection. +#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)] +#[action(namespace = editor)] +#[serde(deny_unknown_fields)] +pub struct AddSelectionAbove { + #[serde(default = "default_true")] + pub skip_soft_wrap: bool, +} + +/// Adds a cursor below the current selection. +#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)] +#[action(namespace = editor)] +#[serde(deny_unknown_fields)] +pub struct AddSelectionBelow { + #[serde(default = "default_true")] + pub skip_soft_wrap: bool, +} + actions!( debugger, [ @@ -345,10 +363,6 @@ actions!( /// Accepts a partial edit prediction. #[action(deprecated_aliases = ["editor::AcceptPartialCopilotSuggestion"])] AcceptPartialEditPrediction, - /// Adds a cursor above the current selection. - AddSelectionAbove, - /// Adds a cursor below the current selection. - AddSelectionBelow, /// Applies all diff hunks in the editor. ApplyAllDiffHunks, /// Applies the diff hunk at the current position. @@ -444,6 +458,8 @@ actions!( /// Expands all diff hunks in the editor. #[action(deprecated_aliases = ["editor::ExpandAllHunkDiffs"])] ExpandAllDiffHunks, + /// Collapses all diff hunks in the editor. + CollapseAllDiffHunks, /// Expands macros recursively at cursor position. ExpandMacroRecursively, /// Finds all references to the symbol at cursor. diff --git a/crates/editor/src/code_context_menus.rs b/crates/editor/src/code_context_menus.rs index a89125a3aa6aebe23665469f34962dbacddc52d6..359c985ee9208a1a83e3458635df883c2cf991a8 100644 --- a/crates/editor/src/code_context_menus.rs +++ b/crates/editor/src/code_context_menus.rs @@ -328,11 +328,7 @@ impl CompletionsMenu { .map(|choice| Completion { replace_range: selection.start.text_anchor..selection.end.text_anchor, new_text: choice.to_string(), - label: CodeLabel { - text: choice.to_string(), - runs: Default::default(), - filter_range: Default::default(), - }, + label: CodeLabel::plain(choice.to_string(), None), icon_path: None, documentation: None, confirm: None, @@ -1518,6 +1514,7 @@ impl CodeActionsMenu { this.child( h_flex() .overflow_hidden() + .when(is_quick_action_bar, |this| this.text_ui(cx)) .child(task.resolved_label.replace("\n", "")) .when(selected, |this| { this.text_color(colors.text_accent) @@ -1528,6 +1525,7 @@ impl CodeActionsMenu { this.child( h_flex() .overflow_hidden() + .when(is_quick_action_bar, |this| this.text_ui(cx)) .child("debug: ") .child(scenario.label.clone()) .when(selected, |this| { diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 87841a8f7e135663df14b4bb82e18b61cf36907e..a6b3d904be94fdcab1b347f68c6c0b03ae091a04 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -27,7 +27,7 @@ mod tab_map; mod wrap_map; use crate::{ - EditorStyle, InlayId, RowExt, hover_links::InlayHighlight, movement::TextLayoutDetails, + EditorStyle, RowExt, hover_links::InlayHighlight, inlays::Inlay, movement::TextLayoutDetails, }; pub use block_map::{ Block, BlockChunks as DisplayChunks, BlockContext, BlockId, BlockMap, BlockPlacement, @@ -42,7 +42,6 @@ pub use fold_map::{ ChunkRenderer, ChunkRendererContext, ChunkRendererId, Fold, FoldId, FoldPlaceholder, FoldPoint, }; use gpui::{App, Context, Entity, Font, HighlightStyle, LineLayout, Pixels, UnderlineStyle}; -pub use inlay_map::Inlay; use inlay_map::InlaySnapshot; pub use inlay_map::{InlayOffset, InlayPoint}; pub use invisibles::{is_invisible, replacement}; @@ -50,9 +49,10 @@ use language::{ OffsetUtf16, Point, Subscription as BufferSubscription, language_settings::language_settings, }; use multi_buffer::{ - Anchor, AnchorRangeExt, ExcerptId, MultiBuffer, MultiBufferPoint, MultiBufferRow, - MultiBufferSnapshot, RowInfo, ToOffset, ToPoint, + Anchor, AnchorRangeExt, MultiBuffer, MultiBufferPoint, MultiBufferRow, MultiBufferSnapshot, + RowInfo, ToOffset, ToPoint, }; +use project::InlayId; use project::project_settings::DiagnosticSeverity; use serde::Deserialize; @@ -594,21 +594,6 @@ impl DisplayMap { self.block_map.read(snapshot, edits); } - pub fn remove_inlays_for_excerpts(&mut self, excerpts_removed: &[ExcerptId]) { - let to_remove = self - .inlay_map - .current_inlays() - .filter_map(|inlay| { - if excerpts_removed.contains(&inlay.position.excerpt_id) { - Some(inlay.id) - } else { - None - } - }) - .collect::>(); - self.inlay_map.splice(&to_remove, Vec::new()); - } - fn tab_size(buffer: &Entity, cx: &App) -> NonZeroU32 { let buffer = buffer.read(cx).as_singleton().map(|buffer| buffer.read(cx)); let language = buffer @@ -1401,6 +1386,26 @@ impl DisplaySnapshot { pub fn excerpt_header_height(&self) -> u32 { self.block_snapshot.excerpt_header_height } + + /// Given a `DisplayPoint`, returns another `DisplayPoint` corresponding to + /// the start of the buffer row that is a given number of buffer rows away + /// from the provided point. + /// + /// This moves by buffer rows instead of display rows, a distinction that is + /// important when soft wrapping is enabled. + pub fn start_of_relative_buffer_row(&self, point: DisplayPoint, times: isize) -> DisplayPoint { + let start = self.display_point_to_fold_point(point, Bias::Left); + let target = start.row() as isize + times; + let new_row = (target.max(0) as u32).min(self.fold_snapshot().max_point().row()); + + self.clip_point( + self.fold_point_to_display_point( + self.fold_snapshot() + .clip_point(FoldPoint::new(new_row, 0), Bias::Right), + ), + Bias::Right, + ) + } } #[derive(Copy, Clone, Default, Eq, Ord, PartialOrd, PartialEq)] diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index c954e1ba1b487c1c33187e895b7897f8ed67f94e..4535e161392fd53e80ceb80fc736799ffafc84a7 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -26,8 +26,8 @@ use sum_tree::{Bias, ContextLessSummary, Dimensions, SumTree, TreeMap}; use text::{BufferId, Edit}; use ui::ElementId; -const NEWLINES: &[u8; u128::BITS as usize] = &[b'\n'; _]; -const BULLETS: &[u8; u128::BITS as usize] = &[b'*'; _]; +const NEWLINES: &[u8; rope::Chunk::MASK_BITS] = &[b'\n'; _]; +const BULLETS: &[u8; rope::Chunk::MASK_BITS] = &[b'*'; _]; /// Tracks custom blocks such as diagnostics that should be displayed within buffer. /// @@ -1186,18 +1186,14 @@ impl BlockMapWriter<'_> { self.0.sync(wrap_snapshot, edits); } - pub fn remove_intersecting_replace_blocks( + pub fn remove_intersecting_replace_blocks( &mut self, - ranges: impl IntoIterator>, + ranges: impl IntoIterator>, inclusive: bool, - ) where - T: ToOffset, - { + ) { let wrap_snapshot = self.0.wrap_snapshot.borrow(); let mut blocks_to_remove = HashSet::default(); for range in ranges { - let range = range.start.to_offset(wrap_snapshot.buffer_snapshot()) - ..range.end.to_offset(wrap_snapshot.buffer_snapshot()); for block in self.blocks_intersecting_buffer_range(range, inclusive) { if matches!(block.placement, BlockPlacement::Replace(_)) { blocks_to_remove.insert(block.id); @@ -1521,10 +1517,11 @@ impl BlockSnapshot { } pub(super) fn line_len(&self, row: BlockRow) -> u32 { - let mut cursor = self.transforms.cursor::>(()); - cursor.seek(&BlockRow(row.0), Bias::Right); - if let Some(transform) = cursor.item() { - let Dimensions(output_start, input_start, _) = cursor.start(); + let (start, _, item) = + self.transforms + .find::, _>((), &row, Bias::Right); + if let Some(transform) = item { + let Dimensions(output_start, input_start, _) = start; let overshoot = row.0 - output_start.0; if transform.block.is_some() { 0 @@ -1539,15 +1536,13 @@ impl BlockSnapshot { } pub(super) fn is_block_line(&self, row: BlockRow) -> bool { - let mut cursor = self.transforms.cursor::>(()); - cursor.seek(&row, Bias::Right); - cursor.item().is_some_and(|t| t.block.is_some()) + let (_, _, item) = self.transforms.find::((), &row, Bias::Right); + item.is_some_and(|t| t.block.is_some()) } pub(super) fn is_folded_buffer_header(&self, row: BlockRow) -> bool { - let mut cursor = self.transforms.cursor::>(()); - cursor.seek(&row, Bias::Right); - let Some(transform) = cursor.item() else { + let (_, _, item) = self.transforms.find::((), &row, Bias::Right); + let Some(transform) = item else { return false; }; matches!(transform.block, Some(Block::FoldedBuffer { .. })) @@ -1557,9 +1552,10 @@ impl BlockSnapshot { let wrap_point = self .wrap_snapshot .make_wrap_point(Point::new(row.0, 0), Bias::Left); - let mut cursor = self.transforms.cursor::>(()); - cursor.seek(&WrapRow(wrap_point.row()), Bias::Right); - cursor.item().is_some_and(|transform| { + let (_, _, item) = + self.transforms + .find::((), &WrapRow(wrap_point.row()), Bias::Right); + item.is_some_and(|transform| { transform .block .as_ref() @@ -1627,13 +1623,16 @@ impl BlockSnapshot { } pub fn to_block_point(&self, wrap_point: WrapPoint) -> BlockPoint { - let mut cursor = self.transforms.cursor::>(()); - cursor.seek(&WrapRow(wrap_point.row()), Bias::Right); - if let Some(transform) = cursor.item() { + let (start, _, item) = self.transforms.find::, _>( + (), + &WrapRow(wrap_point.row()), + Bias::Right, + ); + if let Some(transform) = item { if transform.block.is_some() { - BlockPoint::new(cursor.start().1.0, 0) + BlockPoint::new(start.1.0, 0) } else { - let Dimensions(input_start_row, output_start_row, _) = cursor.start(); + let Dimensions(input_start_row, output_start_row, _) = start; let input_start = Point::new(input_start_row.0, 0); let output_start = Point::new(output_start_row.0, 0); let input_overshoot = wrap_point.0 - input_start; @@ -1645,26 +1644,29 @@ impl BlockSnapshot { } pub fn to_wrap_point(&self, block_point: BlockPoint, bias: Bias) -> WrapPoint { - let mut cursor = self.transforms.cursor::>(()); - cursor.seek(&BlockRow(block_point.row), Bias::Right); - if let Some(transform) = cursor.item() { + let (start, end, item) = self.transforms.find::, _>( + (), + &BlockRow(block_point.row), + Bias::Right, + ); + if let Some(transform) = item { match transform.block.as_ref() { Some(block) => { if block.place_below() { - let wrap_row = cursor.start().1.0 - 1; + let wrap_row = start.1.0 - 1; WrapPoint::new(wrap_row, self.wrap_snapshot.line_len(wrap_row)) } else if block.place_above() { - WrapPoint::new(cursor.start().1.0, 0) + WrapPoint::new(start.1.0, 0) } else if bias == Bias::Left { - WrapPoint::new(cursor.start().1.0, 0) + WrapPoint::new(start.1.0, 0) } else { - let wrap_row = cursor.end().1.0 - 1; + let wrap_row = end.1.0 - 1; WrapPoint::new(wrap_row, self.wrap_snapshot.line_len(wrap_row)) } } None => { - let overshoot = block_point.row - cursor.start().0.0; - let wrap_row = cursor.start().1.0 + overshoot; + let overshoot = block_point.row - start.0.0; + let wrap_row = start.1.0 + overshoot; WrapPoint::new(wrap_row, block_point.column) } } @@ -1777,11 +1779,11 @@ impl<'a> Iterator for BlockChunks<'a> { if self.masked { // Not great for multibyte text because to keep cursor math correct we - // need to have the same number of bytes in the input as output. + // need to have the same number of chars in the input as output. let chars_count = prefix.chars().count(); let bullet_len = chars_count; prefix = unsafe { std::str::from_utf8_unchecked(&BULLETS[..bullet_len]) }; - chars = 1u128.unbounded_shl(bullet_len as u32) - 1; + chars = 1u128.unbounded_shl(bullet_len as u32).wrapping_sub(1); tabs = 0; } @@ -3564,8 +3566,12 @@ mod tests { let mut writer = block_map.write(wraps_snapshot.clone(), Default::default()); writer.remove_intersecting_replace_blocks( - [buffer_snapshot.anchor_after(Point::new(1, 0)) - ..buffer_snapshot.anchor_after(Point::new(1, 0))], + [buffer_snapshot + .anchor_after(Point::new(1, 0)) + .to_offset(&buffer_snapshot) + ..buffer_snapshot + .anchor_after(Point::new(1, 0)) + .to_offset(&buffer_snapshot)], false, ); let blocks_snapshot = block_map.read(wraps_snapshot, Default::default()); diff --git a/crates/editor/src/display_map/custom_highlights.rs b/crates/editor/src/display_map/custom_highlights.rs index b7518af59c28dbc95a36d24b36a7eae2862916b6..c6b22bb0b8247420200c2bb8d9e22f55d638386d 100644 --- a/crates/editor/src/display_map/custom_highlights.rs +++ b/crates/editor/src/display_map/custom_highlights.rs @@ -132,37 +132,31 @@ impl<'a> Iterator for CustomHighlightsChunks<'a> { } } - let chunk = self - .buffer_chunk - .get_or_insert_with(|| self.buffer_chunks.next().unwrap_or_default()); - if chunk.text.is_empty() { + let chunk = match &mut self.buffer_chunk { + Some(it) => it, + slot => slot.insert(self.buffer_chunks.next()?), + }; + while chunk.text.is_empty() { *chunk = self.buffer_chunks.next()?; } let split_idx = chunk.text.len().min(next_highlight_endpoint - self.offset); let (prefix, suffix) = chunk.text.split_at(split_idx); - - let (chars, tabs) = if split_idx == 128 { - let output = (chunk.chars, chunk.tabs); - chunk.chars = 0; - chunk.tabs = 0; - output - } else { - let mask = (1 << split_idx) - 1; - let output = (chunk.chars & mask, chunk.tabs & mask); - chunk.chars = chunk.chars >> split_idx; - chunk.tabs = chunk.tabs >> split_idx; - output - }; - - chunk.text = suffix; self.offset += prefix.len(); + + let mask = 1u128.unbounded_shl(split_idx as u32).wrapping_sub(1); + let chars = chunk.chars & mask; + let tabs = chunk.tabs & mask; let mut prefix = Chunk { text: prefix, chars, tabs, ..chunk.clone() }; + + chunk.chars = chunk.chars.unbounded_shr(split_idx as u32); + chunk.tabs = chunk.tabs.unbounded_shr(split_idx as u32); + chunk.text = suffix; if !self.active_highlights.is_empty() { prefix.highlight_style = self .active_highlights diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index e5d82f8f70a9b5e29622b1302c1eaaf2070b0387..a31599ef9b276246226c12640fa8ffbec57eb9e3 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -1,4 +1,4 @@ -use crate::{InlayId, display_map::inlay_map::InlayChunk}; +use crate::display_map::inlay_map::InlayChunk; use super::{ Highlights, @@ -9,6 +9,7 @@ use language::{Edit, HighlightId, Point, TextSummary}; use multi_buffer::{ Anchor, AnchorRangeExt, MultiBufferRow, MultiBufferSnapshot, RowInfo, ToOffset, }; +use project::InlayId; use std::{ any::TypeId, cmp::{self, Ordering}, @@ -98,28 +99,26 @@ impl FoldPoint { } pub fn to_inlay_point(self, snapshot: &FoldSnapshot) -> InlayPoint { - let mut cursor = snapshot + let (start, _, _) = snapshot .transforms - .cursor::>(()); - cursor.seek(&self, Bias::Right); - let overshoot = self.0 - cursor.start().0.0; - InlayPoint(cursor.start().1.0 + overshoot) + .find::, _>((), &self, Bias::Right); + let overshoot = self.0 - start.0.0; + InlayPoint(start.1.0 + overshoot) } pub fn to_offset(self, snapshot: &FoldSnapshot) -> FoldOffset { - let mut cursor = snapshot + let (start, _, item) = snapshot .transforms - .cursor::>(()); - cursor.seek(&self, Bias::Right); - let overshoot = self.0 - cursor.start().1.output.lines; - let mut offset = cursor.start().1.output.len; + .find::, _>((), &self, Bias::Right); + let overshoot = self.0 - start.1.output.lines; + let mut offset = start.1.output.len; if !overshoot.is_zero() { - let transform = cursor.item().expect("display point out of range"); + let transform = item.expect("display point out of range"); assert!(transform.placeholder.is_none()); let end_inlay_offset = snapshot .inlay_snapshot - .to_offset(InlayPoint(cursor.start().1.input.lines + overshoot)); - offset += end_inlay_offset.0 - cursor.start().1.input.len; + .to_offset(InlayPoint(start.1.input.lines + overshoot)); + offset += end_inlay_offset.0 - start.1.input.len; } FoldOffset(offset) } @@ -706,19 +705,18 @@ impl FoldSnapshot { } pub fn to_fold_point(&self, point: InlayPoint, bias: Bias) -> FoldPoint { - let mut cursor = self + let (start, end, item) = self .transforms - .cursor::>(()); - cursor.seek(&point, Bias::Right); - if cursor.item().is_some_and(|t| t.is_fold()) { - if bias == Bias::Left || point == cursor.start().0 { - cursor.start().1 + .find::, _>((), &point, Bias::Right); + if item.is_some_and(|t| t.is_fold()) { + if bias == Bias::Left || point == start.0 { + start.1 } else { - cursor.end().1 + end.1 } } else { - let overshoot = point.0 - cursor.start().0.0; - FoldPoint(cmp::min(cursor.start().1.0 + overshoot, cursor.end().1.0)) + let overshoot = point.0 - start.0.0; + FoldPoint(cmp::min(start.1.0 + overshoot, end.1.0)) } } @@ -787,9 +785,10 @@ impl FoldSnapshot { { let buffer_offset = offset.to_offset(&self.inlay_snapshot.buffer); let inlay_offset = self.inlay_snapshot.to_inlay_offset(buffer_offset); - let mut cursor = self.transforms.cursor::(()); - cursor.seek(&inlay_offset, Bias::Right); - cursor.item().is_some_and(|t| t.placeholder.is_some()) + let (_, _, item) = self + .transforms + .find::((), &inlay_offset, Bias::Right); + item.is_some_and(|t| t.placeholder.is_some()) } pub fn is_line_folded(&self, buffer_row: MultiBufferRow) -> bool { @@ -891,23 +890,22 @@ impl FoldSnapshot { } pub fn clip_point(&self, point: FoldPoint, bias: Bias) -> FoldPoint { - let mut cursor = self + let (start, end, item) = self .transforms - .cursor::>(()); - cursor.seek(&point, Bias::Right); - if let Some(transform) = cursor.item() { - let transform_start = cursor.start().0.0; + .find::, _>((), &point, Bias::Right); + if let Some(transform) = item { + let transform_start = start.0.0; if transform.placeholder.is_some() { if point.0 == transform_start || matches!(bias, Bias::Left) { FoldPoint(transform_start) } else { - FoldPoint(cursor.end().0.0) + FoldPoint(end.0.0) } } else { let overshoot = InlayPoint(point.0 - transform_start); - let inlay_point = cursor.start().1 + overshoot; + let inlay_point = start.1 + overshoot; let clipped_inlay_point = self.inlay_snapshot.clip_point(inlay_point, bias); - FoldPoint(cursor.start().0.0 + (clipped_inlay_point - cursor.start().1).0) + FoldPoint(start.0.0 + (clipped_inlay_point - start.1).0) } } else { FoldPoint(self.transforms.summary().output.lines) @@ -1439,14 +1437,15 @@ impl<'a> Iterator for FoldChunks<'a> { let transform_end = self.transform_cursor.end().1; let chunk_end = buffer_chunk_end.min(transform_end); - chunk.text = &chunk.text - [(self.inlay_offset - buffer_chunk_start).0..(chunk_end - buffer_chunk_start).0]; + let bit_start = (self.inlay_offset - buffer_chunk_start).0; + let bit_end = (chunk_end - buffer_chunk_start).0; + chunk.text = &chunk.text[bit_start..bit_end]; let bit_end = (chunk_end - buffer_chunk_start).0; let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1); - chunk.tabs = (chunk.tabs >> (self.inlay_offset - buffer_chunk_start).0) & mask; - chunk.chars = (chunk.chars >> (self.inlay_offset - buffer_chunk_start).0) & mask; + chunk.tabs = (chunk.tabs >> bit_start) & mask; + chunk.chars = (chunk.chars >> bit_start) & mask; if chunk_end == transform_end { self.transform_cursor.next(); @@ -1480,28 +1479,26 @@ pub struct FoldOffset(pub usize); impl FoldOffset { pub fn to_point(self, snapshot: &FoldSnapshot) -> FoldPoint { - let mut cursor = snapshot + let (start, _, item) = snapshot .transforms - .cursor::>(()); - cursor.seek(&self, Bias::Right); - let overshoot = if cursor.item().is_none_or(|t| t.is_fold()) { - Point::new(0, (self.0 - cursor.start().0.0) as u32) + .find::, _>((), &self, Bias::Right); + let overshoot = if item.is_none_or(|t| t.is_fold()) { + Point::new(0, (self.0 - start.0.0) as u32) } else { - let inlay_offset = cursor.start().1.input.len + self.0 - cursor.start().0.0; + let inlay_offset = start.1.input.len + self.0 - start.0.0; let inlay_point = snapshot.inlay_snapshot.to_point(InlayOffset(inlay_offset)); - inlay_point.0 - cursor.start().1.input.lines + inlay_point.0 - start.1.input.lines }; - FoldPoint(cursor.start().1.output.lines + overshoot) + FoldPoint(start.1.output.lines + overshoot) } #[cfg(test)] pub fn to_inlay_offset(self, snapshot: &FoldSnapshot) -> InlayOffset { - let mut cursor = snapshot + let (start, _, _) = snapshot .transforms - .cursor::>(()); - cursor.seek(&self, Bias::Right); - let overshoot = self.0 - cursor.start().0.0; - InlayOffset(cursor.start().1.0 + overshoot) + .find::, _>((), &self, Bias::Right); + let overshoot = self.0 - start.0.0; + InlayOffset(start.1.0 + overshoot) } } diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs index c4532a93f1d50e91dbd4791b4621b74ee0813cbe..486676f1120bc2e9d85effd4c328a2b7a547e06b 100644 --- a/crates/editor/src/display_map/inlay_map.rs +++ b/crates/editor/src/display_map/inlay_map.rs @@ -1,17 +1,18 @@ -use crate::{ChunkRenderer, HighlightStyles, InlayId}; +use crate::{ + ChunkRenderer, HighlightStyles, + inlays::{Inlay, InlayContent}, +}; use collections::BTreeSet; -use gpui::{Hsla, Rgba}; use language::{Chunk, Edit, Point, TextSummary}; -use multi_buffer::{ - Anchor, MultiBufferRow, MultiBufferRows, MultiBufferSnapshot, RowInfo, ToOffset, -}; +use multi_buffer::{MultiBufferRow, MultiBufferRows, MultiBufferSnapshot, RowInfo, ToOffset}; +use project::InlayId; use std::{ cmp, ops::{Add, AddAssign, Range, Sub, SubAssign}, - sync::{Arc, OnceLock}, + sync::Arc, }; use sum_tree::{Bias, Cursor, Dimensions, SumTree}; -use text::{ChunkBitmaps, Patch, Rope}; +use text::{ChunkBitmaps, Patch}; use ui::{ActiveTheme, IntoElement as _, ParentElement as _, Styled as _, div}; use super::{Highlights, custom_highlights::CustomHighlightsChunks, fold_map::ChunkRendererId}; @@ -37,85 +38,6 @@ enum Transform { Inlay(Inlay), } -#[derive(Debug, Clone)] -pub struct Inlay { - pub id: InlayId, - pub position: Anchor, - pub content: InlayContent, -} - -#[derive(Debug, Clone)] -pub enum InlayContent { - Text(text::Rope), - Color(Hsla), -} - -impl Inlay { - pub fn hint(id: u32, position: Anchor, hint: &project::InlayHint) -> Self { - let mut text = hint.text(); - if hint.padding_right && text.reversed_chars_at(text.len()).next() != Some(' ') { - text.push(" "); - } - if hint.padding_left && text.chars_at(0).next() != Some(' ') { - text.push_front(" "); - } - Self { - id: InlayId::Hint(id), - position, - content: InlayContent::Text(text), - } - } - - #[cfg(any(test, feature = "test-support"))] - pub fn mock_hint(id: u32, position: Anchor, text: impl Into) -> Self { - Self { - id: InlayId::Hint(id), - position, - content: InlayContent::Text(text.into()), - } - } - - pub fn color(id: u32, position: Anchor, color: Rgba) -> Self { - Self { - id: InlayId::Color(id), - position, - content: InlayContent::Color(color.into()), - } - } - - pub fn edit_prediction>(id: u32, position: Anchor, text: T) -> Self { - Self { - id: InlayId::EditPrediction(id), - position, - content: InlayContent::Text(text.into()), - } - } - - pub fn debugger>(id: u32, position: Anchor, text: T) -> Self { - Self { - id: InlayId::DebuggerValue(id), - position, - content: InlayContent::Text(text.into()), - } - } - - pub fn text(&self) -> &Rope { - static COLOR_TEXT: OnceLock = OnceLock::new(); - match &self.content { - InlayContent::Text(text) => text, - InlayContent::Color(_) => COLOR_TEXT.get_or_init(|| Rope::from("◼")), - } - } - - #[cfg(any(test, feature = "test-support"))] - pub fn get_color(&self) -> Option { - match self.content { - InlayContent::Color(color) => Some(color), - _ => None, - } - } -} - impl sum_tree::Item for Transform { type Summary = TransformSummary; @@ -325,21 +247,16 @@ impl<'a> Iterator for InlayChunks<'a> { }; let (prefix, suffix) = chunk.text.split_at(split_index); + self.output_offset.0 += prefix.len(); - let (chars, tabs) = if split_index == 128 { - let output = (chunk.chars, chunk.tabs); - chunk.chars = 0; - chunk.tabs = 0; - output - } else { - let mask = (1 << split_index) - 1; - let output = (chunk.chars & mask, chunk.tabs & mask); - chunk.chars = chunk.chars >> split_index; - chunk.tabs = chunk.tabs >> split_index; - output - }; + let mask = 1u128.unbounded_shl(split_index as u32).wrapping_sub(1); + let chars = chunk.chars & mask; + let tabs = chunk.tabs & mask; + + chunk.chars = chunk.chars.unbounded_shr(split_index as u32); + chunk.tabs = chunk.tabs.unbounded_shr(split_index as u32); chunk.text = suffix; - self.output_offset.0 += prefix.len(); + InlayChunk { chunk: Chunk { text: prefix, @@ -457,18 +374,12 @@ impl<'a> Iterator for InlayChunks<'a> { let (chunk, remainder) = inlay_chunk.split_at(split_index); *inlay_chunk = remainder; - let (chars, tabs) = if split_index == 128 { - let output = (*chars, *tabs); - *chars = 0; - *tabs = 0; - output - } else { - let mask = (1 << split_index as u32) - 1; - let output = (*chars & mask, *tabs & mask); - *chars = *chars >> split_index; - *tabs = *tabs >> split_index; - output - }; + let mask = 1u128.unbounded_shl(split_index as u32).wrapping_sub(1); + let new_chars = *chars & mask; + let new_tabs = *tabs & mask; + + *chars = chars.unbounded_shr(split_index as u32); + *tabs = tabs.unbounded_shr(split_index as u32); if inlay_chunk.is_empty() { self.inlay_chunk = None; @@ -479,8 +390,8 @@ impl<'a> Iterator for InlayChunks<'a> { InlayChunk { chunk: Chunk { text: chunk, - chars, - tabs, + chars: new_chars, + tabs: new_tabs, highlight_style, is_inlay: true, ..Chunk::default() @@ -761,7 +672,7 @@ impl InlayMap { #[cfg(test)] pub(crate) fn randomly_mutate( &mut self, - next_inlay_id: &mut u32, + next_inlay_id: &mut usize, rng: &mut rand::rngs::StdRng, ) -> (InlaySnapshot, Vec) { use rand::prelude::*; @@ -825,22 +736,21 @@ impl InlayMap { impl InlaySnapshot { pub fn to_point(&self, offset: InlayOffset) -> InlayPoint { - let mut cursor = self + let (start, _, item) = self .transforms - .cursor::>(()); - cursor.seek(&offset, Bias::Right); - let overshoot = offset.0 - cursor.start().0.0; - match cursor.item() { + .find::, _>((), &offset, Bias::Right); + let overshoot = offset.0 - start.0.0; + match item { Some(Transform::Isomorphic(_)) => { - let buffer_offset_start = cursor.start().2; + let buffer_offset_start = start.2; let buffer_offset_end = buffer_offset_start + overshoot; let buffer_start = self.buffer.offset_to_point(buffer_offset_start); let buffer_end = self.buffer.offset_to_point(buffer_offset_end); - InlayPoint(cursor.start().1.0 + (buffer_end - buffer_start)) + InlayPoint(start.1.0 + (buffer_end - buffer_start)) } Some(Transform::Inlay(inlay)) => { let overshoot = inlay.text().offset_to_point(overshoot); - InlayPoint(cursor.start().1.0 + overshoot) + InlayPoint(start.1.0 + overshoot) } None => self.max_point(), } @@ -855,47 +765,48 @@ impl InlaySnapshot { } pub fn to_offset(&self, point: InlayPoint) -> InlayOffset { - let mut cursor = self + let (start, _, item) = self .transforms - .cursor::>(()); - cursor.seek(&point, Bias::Right); - let overshoot = point.0 - cursor.start().0.0; - match cursor.item() { + .find::, _>((), &point, Bias::Right); + let overshoot = point.0 - start.0.0; + match item { Some(Transform::Isomorphic(_)) => { - let buffer_point_start = cursor.start().2; + let buffer_point_start = start.2; let buffer_point_end = buffer_point_start + overshoot; let buffer_offset_start = self.buffer.point_to_offset(buffer_point_start); let buffer_offset_end = self.buffer.point_to_offset(buffer_point_end); - InlayOffset(cursor.start().1.0 + (buffer_offset_end - buffer_offset_start)) + InlayOffset(start.1.0 + (buffer_offset_end - buffer_offset_start)) } Some(Transform::Inlay(inlay)) => { let overshoot = inlay.text().point_to_offset(overshoot); - InlayOffset(cursor.start().1.0 + overshoot) + InlayOffset(start.1.0 + overshoot) } None => self.len(), } } pub fn to_buffer_point(&self, point: InlayPoint) -> Point { - let mut cursor = self.transforms.cursor::>(()); - cursor.seek(&point, Bias::Right); - match cursor.item() { + let (start, _, item) = + self.transforms + .find::, _>((), &point, Bias::Right); + match item { Some(Transform::Isomorphic(_)) => { - let overshoot = point.0 - cursor.start().0.0; - cursor.start().1 + overshoot + let overshoot = point.0 - start.0.0; + start.1 + overshoot } - Some(Transform::Inlay(_)) => cursor.start().1, + Some(Transform::Inlay(_)) => start.1, None => self.buffer.max_point(), } } pub fn to_buffer_offset(&self, offset: InlayOffset) -> usize { - let mut cursor = self.transforms.cursor::>(()); - cursor.seek(&offset, Bias::Right); - match cursor.item() { + let (start, _, item) = + self.transforms + .find::, _>((), &offset, Bias::Right); + match item { Some(Transform::Isomorphic(_)) => { - let overshoot = offset - cursor.start().0; - cursor.start().1 + overshoot.0 + let overshoot = offset - start.0; + start.1 + overshoot.0 } - Some(Transform::Inlay(_)) => cursor.start().1, + Some(Transform::Inlay(_)) => start.1, None => self.buffer.len(), } } @@ -1256,17 +1167,18 @@ const fn is_utf8_char_boundary(byte: u8) -> bool { mod tests { use super::*; use crate::{ - InlayId, MultiBuffer, + MultiBuffer, display_map::{HighlightKey, InlayHighlights, TextHighlights}, hover_links::InlayHighlight, }; use gpui::{App, HighlightStyle}; + use multi_buffer::Anchor; use project::{InlayHint, InlayHintLabel, ResolveState}; use rand::prelude::*; use settings::SettingsStore; use std::{any::TypeId, cmp::Reverse, env, sync::Arc}; use sum_tree::TreeMap; - use text::Patch; + use text::{Patch, Rope}; use util::RandomCharIter; use util::post_inc; @@ -1274,11 +1186,11 @@ mod tests { fn test_inlay_properties_label_padding() { assert_eq!( Inlay::hint( - 0, + InlayId::Hint(0), Anchor::min(), &InlayHint { label: InlayHintLabel::String("a".to_string()), - position: text::Anchor::default(), + position: text::Anchor::MIN, padding_left: false, padding_right: false, tooltip: None, @@ -1294,11 +1206,11 @@ mod tests { assert_eq!( Inlay::hint( - 0, + InlayId::Hint(0), Anchor::min(), &InlayHint { label: InlayHintLabel::String("a".to_string()), - position: text::Anchor::default(), + position: text::Anchor::MIN, padding_left: true, padding_right: true, tooltip: None, @@ -1314,11 +1226,11 @@ mod tests { assert_eq!( Inlay::hint( - 0, + InlayId::Hint(0), Anchor::min(), &InlayHint { label: InlayHintLabel::String(" a ".to_string()), - position: text::Anchor::default(), + position: text::Anchor::MIN, padding_left: false, padding_right: false, tooltip: None, @@ -1334,11 +1246,11 @@ mod tests { assert_eq!( Inlay::hint( - 0, + InlayId::Hint(0), Anchor::min(), &InlayHint { label: InlayHintLabel::String(" a ".to_string()), - position: text::Anchor::default(), + position: text::Anchor::MIN, padding_left: true, padding_right: true, tooltip: None, @@ -1357,11 +1269,11 @@ mod tests { fn test_inlay_hint_padding_with_multibyte_chars() { assert_eq!( Inlay::hint( - 0, + InlayId::Hint(0), Anchor::min(), &InlayHint { label: InlayHintLabel::String("🎨".to_string()), - position: text::Anchor::default(), + position: text::Anchor::MIN, padding_left: true, padding_right: true, tooltip: None, diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index b37d81c66614030ab574244f3de0277d3fd8bee9..567533aef556c10a966bc2574a0056c3a115f916 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -11,7 +11,7 @@ use sum_tree::Bias; const MAX_EXPANSION_COLUMN: u32 = 256; // Handles a tab width <= 128 -const SPACES: &[u8; u128::BITS as usize] = &[b' '; _]; +const SPACES: &[u8; rope::Chunk::MASK_BITS] = &[b' '; _]; const MAX_TABS: NonZeroU32 = NonZeroU32::new(SPACES.len() as u32).unwrap(); /// Keeps track of hard tabs in a text buffer. @@ -569,56 +569,47 @@ impl<'a> Iterator for TabChunks<'a> { //todo(improve performance by using tab cursor) for (ix, c) in self.chunk.text.char_indices() { match c { + '\t' if ix > 0 => { + let (prefix, suffix) = self.chunk.text.split_at(ix); + + let mask = 1u128.unbounded_shl(ix as u32).wrapping_sub(1); + let chars = self.chunk.chars & mask; + let tabs = self.chunk.tabs & mask; + self.chunk.tabs = self.chunk.tabs.unbounded_shr(ix as u32); + self.chunk.chars = self.chunk.chars.unbounded_shr(ix as u32); + self.chunk.text = suffix; + return Some(Chunk { + text: prefix, + chars, + tabs, + ..self.chunk.clone() + }); + } '\t' => { - if ix > 0 { - let (prefix, suffix) = self.chunk.text.split_at(ix); - - let (chars, tabs) = if ix == 128 { - let output = (self.chunk.chars, self.chunk.tabs); - self.chunk.chars = 0; - self.chunk.tabs = 0; - output - } else { - let mask = (1 << ix) - 1; - let output = (self.chunk.chars & mask, self.chunk.tabs & mask); - self.chunk.chars = self.chunk.chars >> ix; - self.chunk.tabs = self.chunk.tabs >> ix; - output - }; - - self.chunk.text = suffix; - return Some(Chunk { - text: prefix, - chars, - tabs, - ..self.chunk.clone() - }); + self.chunk.text = &self.chunk.text[1..]; + self.chunk.tabs >>= 1; + self.chunk.chars >>= 1; + let tab_size = if self.input_column < self.max_expansion_column { + self.tab_size.get() } else { - self.chunk.text = &self.chunk.text[1..]; - self.chunk.tabs >>= 1; - self.chunk.chars >>= 1; - let tab_size = if self.input_column < self.max_expansion_column { - self.tab_size.get() - } else { - 1 - }; - let mut len = tab_size - self.column % tab_size; - let next_output_position = cmp::min( - self.output_position + Point::new(0, len), - self.max_output_position, - ); - len = next_output_position.column - self.output_position.column; - self.column += len; - self.input_column += 1; - self.output_position = next_output_position; - return Some(Chunk { - text: unsafe { std::str::from_utf8_unchecked(&SPACES[..len as usize]) }, - is_tab: true, - chars: 1u128.unbounded_shl(len) - 1, - tabs: 0, - ..self.chunk.clone() - }); - } + 1 + }; + let mut len = tab_size - self.column % tab_size; + let next_output_position = cmp::min( + self.output_position + Point::new(0, len), + self.max_output_position, + ); + len = next_output_position.column - self.output_position.column; + self.column += len; + self.input_column += 1; + self.output_position = next_output_position; + return Some(Chunk { + text: unsafe { std::str::from_utf8_unchecked(&SPACES[..len as usize]) }, + is_tab: true, + chars: 1u128.unbounded_shl(len) - 1, + tabs: 0, + ..self.chunk.clone() + }); } '\n' => { self.column = 0; diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 39c247cb4e105155e77c8fd5c84e5f185d726af1..e79e5555a61d0ddb8a93a1708c676554f191c3f6 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -568,14 +568,17 @@ impl WrapSnapshot { let mut old_start = old_cursor.start().output.lines; old_start += tab_edit.old.start.0 - old_cursor.start().input.lines; + // todo(lw): Should these be seek_forward? old_cursor.seek(&tab_edit.old.end, Bias::Right); let mut old_end = old_cursor.start().output.lines; old_end += tab_edit.old.end.0 - old_cursor.start().input.lines; + // todo(lw): Should these be seek_forward? new_cursor.seek(&tab_edit.new.start, Bias::Right); let mut new_start = new_cursor.start().output.lines; new_start += tab_edit.new.start.0 - new_cursor.start().input.lines; + // todo(lw): Should these be seek_forward? new_cursor.seek(&tab_edit.new.end, Bias::Right); let mut new_end = new_cursor.start().output.lines; new_end += tab_edit.new.end.0 - new_cursor.start().input.lines; @@ -628,24 +631,22 @@ impl WrapSnapshot { } pub fn line_len(&self, row: u32) -> u32 { - let mut cursor = self - .transforms - .cursor::>(()); - cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Left); - if cursor - .item() - .is_some_and(|transform| transform.is_isomorphic()) - { - let overshoot = row - cursor.start().0.row(); - let tab_row = cursor.start().1.row() + overshoot; + let (start, _, item) = self.transforms.find::, _>( + (), + &WrapPoint::new(row + 1, 0), + Bias::Left, + ); + if item.is_some_and(|transform| transform.is_isomorphic()) { + let overshoot = row - start.0.row(); + let tab_row = start.1.row() + overshoot; let tab_line_len = self.tab_snapshot.line_len(tab_row); if overshoot == 0 { - cursor.start().0.column() + (tab_line_len - cursor.start().1.column()) + start.0.column() + (tab_line_len - start.1.column()) } else { tab_line_len } } else { - cursor.start().0.column() + start.0.column() } } @@ -711,9 +712,10 @@ impl WrapSnapshot { } pub fn soft_wrap_indent(&self, row: u32) -> Option { - let mut cursor = self.transforms.cursor::(()); - cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Right); - cursor.item().and_then(|transform| { + let (.., item) = + self.transforms + .find::((), &WrapPoint::new(row + 1, 0), Bias::Right); + item.and_then(|transform| { if transform.is_isomorphic() { None } else { @@ -749,13 +751,12 @@ impl WrapSnapshot { } pub fn to_tab_point(&self, point: WrapPoint) -> TabPoint { - let mut cursor = self - .transforms - .cursor::>(()); - cursor.seek(&point, Bias::Right); - let mut tab_point = cursor.start().1.0; - if cursor.item().is_some_and(|t| t.is_isomorphic()) { - tab_point += point.0 - cursor.start().0.0; + let (start, _, item) = + self.transforms + .find::, _>((), &point, Bias::Right); + let mut tab_point = start.1.0; + if item.is_some_and(|t| t.is_isomorphic()) { + tab_point += point.0 - start.0.0; } TabPoint(tab_point) } @@ -769,19 +770,19 @@ impl WrapSnapshot { } pub fn tab_point_to_wrap_point(&self, point: TabPoint) -> WrapPoint { - let mut cursor = self - .transforms - .cursor::>(()); - cursor.seek(&point, Bias::Right); - WrapPoint(cursor.start().1.0 + (point.0 - cursor.start().0.0)) + let (start, ..) = + self.transforms + .find::, _>((), &point, Bias::Right); + WrapPoint(start.1.0 + (point.0 - start.0.0)) } pub fn clip_point(&self, mut point: WrapPoint, bias: Bias) -> WrapPoint { if bias == Bias::Left { - let mut cursor = self.transforms.cursor::(()); - cursor.seek(&point, Bias::Right); - if cursor.item().is_some_and(|t| !t.is_isomorphic()) { - point = *cursor.start(); + let (start, _, item) = self + .transforms + .find::((), &point, Bias::Right); + if item.is_some_and(|t| !t.is_isomorphic()) { + point = start; *point.column_mut() -= 1; } } @@ -971,18 +972,11 @@ impl<'a> Iterator for WrapChunks<'a> { let (prefix, suffix) = self.input_chunk.text.split_at(input_len); - let (chars, tabs) = if input_len == 128 { - let output = (self.input_chunk.chars, self.input_chunk.tabs); - self.input_chunk.chars = 0; - self.input_chunk.tabs = 0; - output - } else { - let mask = (1 << input_len) - 1; - let output = (self.input_chunk.chars & mask, self.input_chunk.tabs & mask); - self.input_chunk.chars = self.input_chunk.chars >> input_len; - self.input_chunk.tabs = self.input_chunk.tabs >> input_len; - output - }; + let mask = 1u128.unbounded_shl(input_len as u32).wrapping_sub(1); + let chars = self.input_chunk.chars & mask; + let tabs = self.input_chunk.tabs & mask; + self.input_chunk.tabs = self.input_chunk.tabs.unbounded_shr(input_len as u32); + self.input_chunk.chars = self.input_chunk.chars.unbounded_shr(input_len as u32); self.input_chunk.text = suffix; Some(Chunk { diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 06256c35971e72b283fb99cb1fc40799e410b2d5..c8b0cd37aeed0601c6402d2ad0f41cfa3a9ba56c 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -7,7 +7,6 @@ //! * [`element`] — the place where all rendering happens //! * [`display_map`] - chunks up text in the editor into the logical blocks, establishes coordinates and mapping between each of them. //! Contains all metadata related to text transformations (folds, fake inlay text insertions, soft wraps, tab markup, etc.). -//! * [`inlay_hint_cache`] - is a storage of inlay hints out of LSP requests, responsible for querying LSP and updating `display_map`'s state accordingly. //! //! All other submodules and structs are mostly concerned with holding editor data about the way it displays current buffer region(s). //! @@ -24,7 +23,7 @@ mod highlight_matching_bracket; mod hover_links; pub mod hover_popover; mod indent_guides; -mod inlay_hint_cache; +mod inlays; pub mod items; mod jsx_tag_auto_close; mod linked_editing_ranges; @@ -33,7 +32,6 @@ mod lsp_ext; mod mouse_context_menu; pub mod movement; mod persistence; -mod proposed_changes_editor; mod rust_analyzer_ext; pub mod scroll; mod selections_collection; @@ -61,6 +59,7 @@ pub use element::{ }; pub use git::blame::BlameRenderer; pub use hover_popover::hover_markdown_style; +pub use inlays::Inlay; pub use items::MAX_TAB_TITLE_LEN; pub use lsp::CompletionContext; pub use lsp_ext::lsp_tasks; @@ -68,21 +67,19 @@ pub use multi_buffer::{ Anchor, AnchorRangeExt, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot, PathKey, RowInfo, ToOffset, ToPoint, }; -pub use proposed_changes_editor::{ - ProposedChangeLocation, ProposedChangesEditor, ProposedChangesEditorToolbar, -}; pub use text::Bias; use ::git::{ Restore, blame::{BlameEntry, ParsedCommitMessage}, + status::FileStatus, }; use aho_corasick::AhoCorasick; use anyhow::{Context as _, Result, anyhow}; use blink_manager::BlinkManager; use buffer_diff::DiffHunkStatus; use client::{Collaborator, ParticipantIndex, parse_zed_link}; -use clock::{AGENT_REPLICA_ID, ReplicaId}; +use clock::ReplicaId; use code_context_menus::{ AvailableCodeAction, CodeActionContents, CodeActionsItem, CodeActionsMenu, CodeContextMenu, CompletionsMenu, ContextMenuOrigin, @@ -112,10 +109,10 @@ use gpui::{ div, point, prelude::*, pulsating_between, px, relative, size, }; use highlight_matching_bracket::refresh_matching_bracket_highlights; -use hover_links::{HoverLink, HoveredLinkState, InlayHighlight, find_file}; +use hover_links::{HoverLink, HoveredLinkState, find_file}; use hover_popover::{HoverState, hide_hover}; use indent_guides::ActiveIndentGuidesState; -use inlay_hint_cache::{InlayHintCache, InlaySplice, InvalidationStrategy}; +use inlays::{InlaySplice, inlay_hints::InlayHintRefreshReason}; use itertools::{Either, Itertools}; use language::{ AutoindentMode, BlockCommentConfig, BracketMatch, BracketPair, Buffer, BufferRow, @@ -124,8 +121,8 @@ use language::{ IndentSize, Language, OffsetRangeExt, Point, Runnable, RunnableRange, Selection, SelectionGoal, TextObject, TransactionId, TreeSitterOptions, WordsQuery, language_settings::{ - self, InlayHintSettings, LspInsertMode, RewrapBehavior, WordsCompletionMode, - all_language_settings, language_settings, + self, LspInsertMode, RewrapBehavior, WordsCompletionMode, all_language_settings, + language_settings, }, point_from_lsp, point_to_lsp, text_diff_with_options, }; @@ -140,15 +137,14 @@ use mouse_context_menu::MouseContextMenu; use movement::TextLayoutDetails; use multi_buffer::{ ExcerptInfo, ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferRow, - ToOffsetUtf16, }; use parking_lot::Mutex; use persistence::DB; use project::{ BreakpointWithPosition, CodeAction, Completion, CompletionDisplayOptions, CompletionIntent, - CompletionResponse, CompletionSource, DisableAiSettings, DocumentHighlight, InlayHint, - Location, LocationLink, PrepareRenameResponse, Project, ProjectItem, ProjectPath, - ProjectTransaction, TaskSourceKind, + CompletionResponse, CompletionSource, DisableAiSettings, DocumentHighlight, InlayHint, InlayId, + InvalidationStrategy, Location, LocationLink, PrepareRenameResponse, Project, ProjectItem, + ProjectPath, ProjectTransaction, TaskSourceKind, debugger::{ breakpoint_store::{ Breakpoint, BreakpointEditAction, BreakpointSessionState, BreakpointState, @@ -156,8 +152,11 @@ use project::{ }, session::{Session, SessionEvent}, }, - git_store::{GitStoreEvent, RepositoryEvent}, - lsp_store::{CompletionDocumentation, FormatTrigger, LspFormatTarget, OpenLspBufferHandle}, + git_store::GitStoreEvent, + lsp_store::{ + CacheInlayHints, CompletionDocumentation, FormatTrigger, LspFormatTarget, + OpenLspBufferHandle, + }, project_settings::{DiagnosticSeverity, GoToDiagnosticSeverityFilter, ProjectSettings}, }; use rand::seq::SliceRandom; @@ -178,7 +177,7 @@ use std::{ iter::{self, Peekable}, mem, num::NonZeroU32, - ops::{ControlFlow, Deref, DerefMut, Not, Range, RangeInclusive}, + ops::{Deref, DerefMut, Not, Range, RangeInclusive}, path::{Path, PathBuf}, rc::Rc, sync::Arc, @@ -208,6 +207,10 @@ use crate::{ code_context_menus::CompletionsMenuSource, editor_settings::MultiCursorModifier, hover_links::{find_url, find_url_from_range}, + inlays::{ + InlineValueCache, + inlay_hints::{LspInlayHintData, inlay_hint_settings}, + }, scroll::{ScrollOffset, ScrollPixelOffset}, signature_help::{SignatureHelpHiddenBy, SignatureHelpState}, }; @@ -226,6 +229,7 @@ pub const SELECTION_HIGHLIGHT_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis pub(crate) const CODE_ACTION_TIMEOUT: Duration = Duration::from_secs(5); pub(crate) const FORMAT_TIMEOUT: Duration = Duration::from_secs(5); pub(crate) const SCROLL_CENTER_TOP_BOTTOM_DEBOUNCE_TIMEOUT: Duration = Duration::from_secs(1); +pub const FETCH_COLORS_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(150); pub(crate) const EDIT_PREDICTION_KEY_CONTEXT: &str = "edit_prediction"; pub(crate) const EDIT_PREDICTION_CONFLICT_KEY_CONTEXT: &str = "edit_prediction_conflict"; @@ -260,42 +264,6 @@ impl ReportEditorEvent { } } -struct InlineValueCache { - enabled: bool, - inlays: Vec, - refresh_task: Task>, -} - -impl InlineValueCache { - fn new(enabled: bool) -> Self { - Self { - enabled, - inlays: Vec::new(), - refresh_task: Task::ready(None), - } - } -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub enum InlayId { - EditPrediction(u32), - DebuggerValue(u32), - // LSP - Hint(u32), - Color(u32), -} - -impl InlayId { - fn id(&self) -> u32 { - match self { - Self::EditPrediction(id) => *id, - Self::DebuggerValue(id) => *id, - Self::Hint(id) => *id, - Self::Color(id) => *id, - } - } -} - pub enum ActiveDebugLine {} pub enum DebugStackFrameLine {} enum DocumentHighlightRead {} @@ -357,6 +325,7 @@ pub fn init(cx: &mut App) { cx.observe_new( |workspace: &mut Workspace, _: Option<&mut Window>, _cx: &mut Context| { workspace.register_action(Editor::new_file); + workspace.register_action(Editor::new_file_split); workspace.register_action(Editor::new_file_vertical); workspace.register_action(Editor::new_file_horizontal); workspace.register_action(Editor::cancel_language_server_work); @@ -875,6 +844,10 @@ pub trait Addon: 'static { None } + fn override_status_for_buffer_id(&self, _: BufferId, _: &App) -> Option { + None + } + fn to_any(&self) -> &dyn std::any::Any; fn to_any_mut(&mut self) -> Option<&mut dyn std::any::Any> { @@ -1122,9 +1095,8 @@ pub struct Editor { edit_prediction_preview: EditPredictionPreview, edit_prediction_indent_conflict: bool, edit_prediction_requires_modifier_in_indent_conflict: bool, - inlay_hint_cache: InlayHintCache, - next_inlay_id: u32, - next_color_inlay_id: u32, + next_inlay_id: usize, + next_color_inlay_id: usize, _subscriptions: Vec, pixel_position_of_newest_cursor: Option>, gutter_dimensions: GutterDimensions, @@ -1189,10 +1161,21 @@ pub struct Editor { inline_value_cache: InlineValueCache, selection_drag_state: SelectionDragState, colors: Option, + post_scroll_update: Task<()>, + refresh_colors_task: Task<()>, + inlay_hints: Option, folding_newlines: Task<()>, pub lookup_key: Option>, } +fn debounce_value(debounce_ms: u64) -> Option { + if debounce_ms > 0 { + Some(Duration::from_millis(debounce_ms)) + } else { + None + } +} + #[derive(Copy, Clone, Debug, PartialEq, Eq, Default)] enum NextScrollCursorCenterTopBottom { #[default] @@ -1298,7 +1281,7 @@ enum SelectionHistoryMode { #[derive(Clone, PartialEq, Eq, Hash)] struct HoveredCursor { - replica_id: u16, + replica_id: ReplicaId, selection_id: usize, } @@ -1617,31 +1600,6 @@ pub enum GotoDefinitionKind { Implementation, } -#[derive(Debug, Clone)] -enum InlayHintRefreshReason { - ModifiersChanged(bool), - Toggle(bool), - SettingsChange(InlayHintSettings), - NewLinesShown, - BufferEdited(HashSet>), - RefreshRequested, - ExcerptsRemoved(Vec), -} - -impl InlayHintRefreshReason { - fn description(&self) -> &'static str { - match self { - Self::ModifiersChanged(_) => "modifiers changed", - Self::Toggle(_) => "toggle", - Self::SettingsChange(_) => "settings change", - Self::NewLinesShown => "new lines shown", - Self::BufferEdited(_) => "buffer edited", - Self::RefreshRequested => "refresh requested", - Self::ExcerptsRemoved(_) => "excerpts removed", - } - } -} - pub enum FormatTarget { Buffers(HashSet>), Ranges(Vec>), @@ -1784,7 +1742,7 @@ impl Editor { fn new_internal( mode: EditorMode, - buffer: Entity, + multi_buffer: Entity, project: Option>, display_map: Option>, window: &mut Window, @@ -1842,7 +1800,7 @@ impl Editor { let display_map = display_map.unwrap_or_else(|| { cx.new(|cx| { DisplayMap::new( - buffer.clone(), + multi_buffer.clone(), style.font(), font_size, None, @@ -1855,7 +1813,7 @@ impl Editor { }) }); - let selections = SelectionsCollection::new(display_map.clone(), buffer.clone()); + let selections = SelectionsCollection::new(display_map.clone(), multi_buffer.clone()); let blink_manager = cx.new(|cx| { let mut blink_manager = BlinkManager::new(CURSOR_BLINK_INTERVAL, cx); @@ -1877,11 +1835,20 @@ impl Editor { project::Event::RefreshCodeLens => { // we always query lens with actions, without storing them, always refreshing them } - project::Event::RefreshInlayHints => { - editor.refresh_inlay_hints(InlayHintRefreshReason::RefreshRequested, cx); + project::Event::RefreshInlayHints(server_id) => { + editor.refresh_inlay_hints( + InlayHintRefreshReason::RefreshRequested(*server_id), + cx, + ); + } + project::Event::LanguageServerRemoved(..) => { + if editor.tasks_update_task.is_none() { + editor.tasks_update_task = Some(editor.refresh_runnables(window, cx)); + } + editor.registered_buffers.clear(); + editor.register_visible_buffers(cx); } - project::Event::LanguageServerAdded(..) - | project::Event::LanguageServerRemoved(..) => { + project::Event::LanguageServerAdded(..) => { if editor.tasks_update_task.is_none() { editor.tasks_update_task = Some(editor.refresh_runnables(window, cx)); } @@ -1907,8 +1874,14 @@ impl Editor { } } project::Event::LanguageServerBufferRegistered { buffer_id, .. } => { - if editor.buffer().read(cx).buffer(*buffer_id).is_some() { - editor.update_lsp_data(false, Some(*buffer_id), window, cx); + let buffer_id = *buffer_id; + if editor.buffer().read(cx).buffer(buffer_id).is_some() { + editor.register_buffer(buffer_id, cx); + editor.update_lsp_data(Some(buffer_id), window, cx); + editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + refresh_linked_ranges(editor, window, cx); + editor.refresh_code_actions(window, cx); + editor.refresh_document_highlights(cx); } } @@ -2005,14 +1978,7 @@ impl Editor { let git_store = project.read(cx).git_store().clone(); let project = project.clone(); project_subscriptions.push(cx.subscribe(&git_store, move |this, _, event, cx| { - if let GitStoreEvent::RepositoryUpdated( - _, - RepositoryEvent::Updated { - new_instance: true, .. - }, - _, - ) = event - { + if let GitStoreEvent::RepositoryAdded = event { this.load_diff_task = Some( update_uncommitted_diff_for_buffer( cx.entity(), @@ -2027,7 +1993,7 @@ impl Editor { })); } - let buffer_snapshot = buffer.read(cx).snapshot(cx); + let buffer_snapshot = multi_buffer.read(cx).snapshot(cx); let inlay_hint_settings = inlay_hint_settings(selections.newest_anchor().head(), &buffer_snapshot, cx); @@ -2064,8 +2030,8 @@ impl Editor { update_uncommitted_diff_for_buffer( cx.entity(), &project, - buffer.read(cx).all_buffers(), - buffer.clone(), + multi_buffer.read(cx).all_buffers(), + multi_buffer.clone(), cx, ) .shared(), @@ -2077,7 +2043,7 @@ impl Editor { focus_handle, show_cursor_when_unfocused: false, last_focused_descendant: None, - buffer: buffer.clone(), + buffer: multi_buffer.clone(), display_map: display_map.clone(), placeholder_display_map: None, selections, @@ -2179,7 +2145,6 @@ impl Editor { diagnostics_enabled: full_mode, word_completions_enabled: full_mode, inline_value_cache: InlineValueCache::new(inlay_hint_settings.show_value_hints), - inlay_hint_cache: InlayHintCache::new(inlay_hint_settings), gutter_hovered: false, pixel_position_of_newest_cursor: None, last_bounds: None, @@ -2219,8 +2184,8 @@ impl Editor { _subscriptions: (!is_minimap) .then(|| { vec![ - cx.observe(&buffer, Self::on_buffer_changed), - cx.subscribe_in(&buffer, window, Self::on_buffer_event), + cx.observe(&multi_buffer, Self::on_buffer_changed), + cx.subscribe_in(&multi_buffer, window, Self::on_buffer_event), cx.observe_in(&display_map, window, Self::on_display_map_changed), cx.observe(&blink_manager, |_, _, cx| cx.notify()), cx.observe_global_in::(window, Self::settings_changed), @@ -2244,7 +2209,10 @@ impl Editor { tasks_update_task: None, pull_diagnostics_task: Task::ready(()), colors: None, + refresh_colors_task: Task::ready(()), + inlay_hints: None, next_color_inlay_id: 0, + post_scroll_update: Task::ready(()), linked_edit_ranges: Default::default(), in_project_search: false, previous_search_ranges: None, @@ -2307,21 +2275,22 @@ impl Editor { } EditorEvent::Edited { .. } => { if !vim_enabled(cx) { - let (map, selections) = editor.selections.all_adjusted_display(cx); + let display_map = editor.display_snapshot(cx); + let selections = editor.selections.all_adjusted_display(&display_map); let pop_state = editor .change_list .last() .map(|previous| { previous.len() == selections.len() && previous.iter().enumerate().all(|(ix, p)| { - p.to_display_point(&map).row() + p.to_display_point(&display_map).row() == selections[ix].head().row() }) }) .unwrap_or(false); let new_positions = selections .into_iter() - .map(|s| map.display_point_to_anchor(s.head(), Bias::Left)) + .map(|s| display_map.display_point_to_anchor(s.head(), Bias::Left)) .collect(); editor .change_list @@ -2367,7 +2336,7 @@ impl Editor { editor.selection_history.mode = SelectionHistoryMode::Normal; editor.scroll_manager.show_scrollbars(window, cx); - jsx_tag_auto_close::refresh_enabled_in_any_buffer(&mut editor, &buffer, cx); + jsx_tag_auto_close::refresh_enabled_in_any_buffer(&mut editor, &multi_buffer, cx); if full_mode { let should_auto_hide_scrollbars = cx.should_auto_hide_scrollbars(); @@ -2379,30 +2348,25 @@ impl Editor { editor.go_to_active_debug_line(window, cx); - if let Some(buffer) = buffer.read(cx).as_singleton() - && let Some(project) = editor.project() - { - let handle = project.update(cx, |project, cx| { - project.register_buffer_with_language_servers(&buffer, cx) - }); - editor - .registered_buffers - .insert(buffer.read(cx).remote_id(), handle); - } - editor.minimap = editor.create_minimap(EditorSettings::get_global(cx).minimap, window, cx); editor.colors = Some(LspColorData::new(cx)); - editor.update_lsp_data(false, None, window, cx); - } + editor.inlay_hints = Some(LspInlayHintData::new(inlay_hint_settings)); - if editor.mode.is_full() { + if let Some(buffer) = multi_buffer.read(cx).as_singleton() { + editor.register_buffer(buffer.read(cx).remote_id(), cx); + } + editor.update_lsp_data(None, window, cx); editor.report_editor_event(ReportEditorEvent::EditorOpened, None, cx); } editor } + pub fn display_snapshot(&self, cx: &mut App) -> DisplaySnapshot { + self.selections.display_map(cx) + } + pub fn deploy_mouse_context_menu( &mut self, position: gpui::Point, @@ -2438,7 +2402,7 @@ impl Editor { } self.selections - .disjoint_in_range::(range.clone(), cx) + .disjoint_in_range::(range.clone(), &self.display_snapshot(cx)) .into_iter() .any(|selection| { // This is needed to cover a corner case, if we just check for an existing @@ -2449,15 +2413,15 @@ impl Editor { }) } - pub fn key_context(&self, window: &Window, cx: &App) -> KeyContext { + pub fn key_context(&self, window: &mut Window, cx: &mut App) -> KeyContext { self.key_context_internal(self.has_active_edit_prediction(), window, cx) } fn key_context_internal( &self, has_active_edit_prediction: bool, - window: &Window, - cx: &App, + window: &mut Window, + cx: &mut App, ) -> KeyContext { let mut key_context = KeyContext::new_with_defaults(); key_context.add("Editor"); @@ -2507,12 +2471,15 @@ impl Editor { } if let Some(singleton_buffer) = self.buffer.read(cx).as_singleton() { - if let Some(extension) = singleton_buffer - .read(cx) - .file() - .and_then(|file| file.path().extension()) - { - key_context.set("extension", extension.to_string()); + if let Some(extension) = singleton_buffer.read(cx).file().and_then(|file| { + Some( + file.full_path(cx) + .extension()? + .to_string_lossy() + .into_owned(), + ) + }) { + key_context.set("extension", extension); } } else { key_context.add("multibuffer"); @@ -2531,6 +2498,17 @@ impl Editor { key_context.add("selection_mode"); } + let disjoint = self.selections.disjoint_anchors(); + let snapshot = self.snapshot(window, cx); + let snapshot = snapshot.buffer_snapshot(); + if self.mode == EditorMode::SingleLine + && let [selection] = disjoint + && selection.start == selection.end + && selection.end.to_offset(snapshot) == snapshot.len() + { + key_context.add("end_of_input"); + } + key_context } @@ -2584,8 +2562,8 @@ impl Editor { pub fn accept_edit_prediction_keybind( &self, accept_partial: bool, - window: &Window, - cx: &App, + window: &mut Window, + cx: &mut App, ) -> AcceptEditPredictionBinding { let key_context = self.key_context_internal(true, window, cx); let in_conflict = self.edit_prediction_in_conflict(); @@ -2664,6 +2642,15 @@ impl Editor { Self::new_file_in_direction(workspace, SplitDirection::horizontal(cx), window, cx) } + fn new_file_split( + workspace: &mut Workspace, + action: &workspace::NewFileSplit, + window: &mut Window, + cx: &mut Context, + ) { + Self::new_file_in_direction(workspace, action.0, window, cx) + } + fn new_file_in_direction( workspace: &mut Workspace, direction: SplitDirection, @@ -2718,7 +2705,7 @@ impl Editor { self.buffer().read(cx).title(cx) } - pub fn snapshot(&self, window: &mut Window, cx: &mut App) -> EditorSnapshot { + pub fn snapshot(&self, window: &Window, cx: &mut App) -> EditorSnapshot { let git_blame_gutter_max_author_length = self .render_git_blame_gutter(cx) .then(|| { @@ -2896,20 +2883,6 @@ impl Editor { self.collapse_matches = collapse_matches; } - fn register_buffers_with_language_servers(&mut self, cx: &mut Context) { - let buffers = self.buffer.read(cx).all_buffers(); - let Some(project) = self.project.as_ref() else { - return; - }; - project.update(cx, |project, cx| { - for buffer in buffers { - self.registered_buffers - .entry(buffer.read(cx).remote_id()) - .or_insert_with(|| project.register_buffer_with_language_servers(&buffer, cx)); - } - }) - } - pub fn range_for_match(&self, range: &Range) -> Range { if self.collapse_matches { return range.start..range.start; @@ -3048,7 +3021,7 @@ impl Editor { // Copy selections to primary selection buffer #[cfg(any(target_os = "linux", target_os = "freebsd"))] if local { - let selections = self.selections.all::(cx); + let selections = self.selections.all::(&self.display_snapshot(cx)); let buffer_handle = self.buffer.read(cx).read(cx); let mut text = String::new(); @@ -3109,19 +3082,8 @@ impl Editor { } if local { - if let Some(buffer_id) = new_cursor_position.buffer_id - && !self.registered_buffers.contains_key(&buffer_id) - && let Some(project) = self.project.as_ref() - { - project.update(cx, |project, cx| { - let Some(buffer) = self.buffer.read(cx).buffer(buffer_id) else { - return; - }; - self.registered_buffers.insert( - buffer_id, - project.register_buffer_with_language_servers(&buffer, cx), - ); - }) + if let Some(buffer_id) = new_cursor_position.buffer_id { + self.register_buffer(buffer_id, cx); } let mut context_menu = self.context_menu.borrow_mut(); @@ -3171,11 +3133,12 @@ impl Editor { } self.refresh_code_actions(window, cx); self.refresh_document_highlights(cx); + refresh_linked_ranges(self, window, cx); + self.refresh_selected_text_highlights(false, window, cx); refresh_matching_bracket_highlights(self, cx); self.update_visible_edit_prediction(window, cx); self.edit_prediction_requires_modifier_in_indent_conflict = true; - linked_editing_ranges::refresh_linked_ranges(self, window, cx); self.inline_blame_popover.take(); if self.git_blame_inline_enabled { self.start_inline_blame_timer(window, cx); @@ -3510,27 +3473,47 @@ impl Editor { cx: &mut Context, ) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let tail = self.selections.newest::(cx).tail(); + let tail = self.selections.newest::(&display_map).tail(); + let click_count = click_count.max(match self.selections.select_mode() { + SelectMode::Character => 1, + SelectMode::Word(_) => 2, + SelectMode::Line(_) => 3, + SelectMode::All => 4, + }); self.begin_selection(position, false, click_count, window, cx); - let position = position.to_offset(&display_map, Bias::Left); let tail_anchor = display_map.buffer_snapshot().anchor_before(tail); + let current_selection = match self.selections.select_mode() { + SelectMode::Character | SelectMode::All => tail_anchor..tail_anchor, + SelectMode::Word(range) | SelectMode::Line(range) => range.clone(), + }; + let mut pending_selection = self .selections .pending_anchor() .cloned() .expect("extend_selection not called with pending selection"); - if position >= tail { - pending_selection.start = tail_anchor; - } else { - pending_selection.end = tail_anchor; + + if pending_selection + .start + .cmp(¤t_selection.start, display_map.buffer_snapshot()) + == Ordering::Greater + { + pending_selection.start = current_selection.start; + } + if pending_selection + .end + .cmp(¤t_selection.end, display_map.buffer_snapshot()) + == Ordering::Less + { + pending_selection.end = current_selection.end; pending_selection.reversed = true; } let mut pending_mode = self.selections.pending_mode().unwrap(); match &mut pending_mode { - SelectMode::Word(range) | SelectMode::Line(range) => *range = tail_anchor..tail_anchor, + SelectMode::Word(range) | SelectMode::Line(range) => *range = current_selection, _ => {} } @@ -3541,7 +3524,8 @@ impl Editor { }; self.change_selections(effects, window, cx, |s| { - s.set_pending(pending_selection.clone(), pending_mode) + s.set_pending(pending_selection.clone(), pending_mode); + s.set_is_extending(true); }); } @@ -3608,7 +3592,7 @@ impl Editor { let point_to_delete: Option = { let selected_points: Vec> = - self.selections.disjoint_in_range(start..end, cx); + self.selections.disjoint_in_range(start..end, &display_map); if !add || click_count > 1 { None @@ -3684,7 +3668,7 @@ impl Editor { ); }; - let tail = self.selections.newest::(cx).tail(); + let tail = self.selections.newest::(&display_map).tail(); let selection_anchor = display_map.buffer_snapshot().anchor_before(tail); self.columnar_selection_state = match mode { ColumnarMode::FromMouse => Some(ColumnarSelectionState::FromMouse { @@ -3810,11 +3794,16 @@ impl Editor { fn end_selection(&mut self, window: &mut Window, cx: &mut Context) { self.columnar_selection_state.take(); - if self.selections.pending_anchor().is_some() { - let selections = self.selections.all::(cx); + if let Some(pending_mode) = self.selections.pending_mode() { + let selections = self.selections.all::(&self.display_snapshot(cx)); self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.select(selections); s.clear_pending(); + if s.is_extending() { + s.set_is_extending(false); + } else { + s.set_select_mode(pending_mode); + } }); } } @@ -3870,6 +3859,9 @@ impl Editor { } }) .collect::>(); + if selection_ranges.is_empty() { + return; + } let ranges = match columnar_state { ColumnarSelectionState::FromMouse { .. } => { @@ -3892,9 +3884,9 @@ impl Editor { cx.notify(); } - pub fn has_non_empty_selection(&self, cx: &mut App) -> bool { + pub fn has_non_empty_selection(&self, snapshot: &DisplaySnapshot) -> bool { self.selections - .all_adjusted(cx) + .all_adjusted(snapshot) .iter() .any(|selection| !selection.is_empty()) } @@ -3945,6 +3937,10 @@ impl Editor { return true; } + if self.hide_blame_popover(true, cx) { + return true; + } + if hide_hover(self, cx) { return true; } @@ -4043,7 +4039,7 @@ impl Editor { self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); - let selections = self.selections.all_adjusted(cx); + let selections = self.selections.all_adjusted(&self.display_snapshot(cx)); let mut bracket_inserted = false; let mut edits = Vec::new(); let mut linked_edits = HashMap::<_, Vec<_>>::default(); @@ -4393,7 +4389,7 @@ impl Editor { let trigger_in_words = this.show_edit_predictions_in_menu() || !had_active_edit_prediction; if this.hard_wrap.is_some() { - let latest: Range = this.selections.newest(cx).range(); + let latest: Range = this.selections.newest(&map).range(); if latest.is_empty() && this .buffer() @@ -4412,7 +4408,7 @@ impl Editor { } } this.trigger_completion_on_input(&text, trigger_in_words, window, cx); - linked_editing_ranges::refresh_linked_ranges(this, window, cx); + refresh_linked_ranges(this, window, cx); this.refresh_edit_prediction(true, false, window, cx); jsx_tag_auto_close::handle_from(this, initial_buffer_versions, window, cx); }); @@ -4469,7 +4465,7 @@ impl Editor { self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { let (edits_with_flags, selection_info): (Vec<_>, Vec<_>) = { - let selections = this.selections.all::(cx); + let selections = this.selections.all::(&this.display_snapshot(cx)); let multi_buffer = this.buffer.read(cx); let buffer = multi_buffer.snapshot(cx); selections @@ -4761,7 +4757,12 @@ impl Editor { let mut edits = Vec::new(); let mut rows = Vec::new(); - for (rows_inserted, selection) in self.selections.all_adjusted(cx).into_iter().enumerate() { + for (rows_inserted, selection) in self + .selections + .all_adjusted(&self.display_snapshot(cx)) + .into_iter() + .enumerate() + { let cursor = selection.head(); let row = cursor.row; @@ -4821,7 +4822,7 @@ impl Editor { let mut rows = Vec::new(); let mut rows_inserted = 0; - for selection in self.selections.all_adjusted(cx) { + for selection in self.selections.all_adjusted(&self.display_snapshot(cx)) { let cursor = selection.head(); let row = cursor.row; @@ -4893,7 +4894,7 @@ impl Editor { let text: Arc = text.into(); self.transact(window, cx, |this, window, cx| { - let old_selections = this.selections.all_adjusted(cx); + let old_selections = this.selections.all_adjusted(&this.display_snapshot(cx)); let selection_anchors = this.buffer.update(cx, |buffer, cx| { let anchors = { let snapshot = buffer.read(cx); @@ -5003,7 +5004,7 @@ impl Editor { /// If any empty selections is touching the start of its innermost containing autoclose /// region, expand it to select the brackets. fn select_autoclose_pair(&mut self, window: &mut Window, cx: &mut Context) { - let selections = self.selections.all::(cx); + let selections = self.selections.all::(&self.display_snapshot(cx)); let buffer = self.buffer.read(cx).read(cx); let new_selections = self .selections_with_autoclose_regions(selections, &buffer) @@ -5144,177 +5145,8 @@ impl Editor { } } - pub fn toggle_inline_values( - &mut self, - _: &ToggleInlineValues, - _: &mut Window, - cx: &mut Context, - ) { - self.inline_value_cache.enabled = !self.inline_value_cache.enabled; - - self.refresh_inline_values(cx); - } - - pub fn toggle_inlay_hints( - &mut self, - _: &ToggleInlayHints, - _: &mut Window, - cx: &mut Context, - ) { - self.refresh_inlay_hints( - InlayHintRefreshReason::Toggle(!self.inlay_hints_enabled()), - cx, - ); - } - - pub fn inlay_hints_enabled(&self) -> bool { - self.inlay_hint_cache.enabled - } - - pub fn inline_values_enabled(&self) -> bool { - self.inline_value_cache.enabled - } - - #[cfg(any(test, feature = "test-support"))] - pub fn inline_value_inlays(&self, cx: &App) -> Vec { - self.display_map - .read(cx) - .current_inlays() - .filter(|inlay| matches!(inlay.id, InlayId::DebuggerValue(_))) - .cloned() - .collect() - } - - #[cfg(any(test, feature = "test-support"))] - pub fn all_inlays(&self, cx: &App) -> Vec { - self.display_map - .read(cx) - .current_inlays() - .cloned() - .collect() - } - - fn refresh_inlay_hints(&mut self, reason: InlayHintRefreshReason, cx: &mut Context) { - if self.semantics_provider.is_none() || !self.mode.is_full() { - return; - } - - let reason_description = reason.description(); - let ignore_debounce = matches!( - reason, - InlayHintRefreshReason::SettingsChange(_) - | InlayHintRefreshReason::Toggle(_) - | InlayHintRefreshReason::ExcerptsRemoved(_) - | InlayHintRefreshReason::ModifiersChanged(_) - ); - let (invalidate_cache, required_languages) = match reason { - InlayHintRefreshReason::ModifiersChanged(enabled) => { - match self.inlay_hint_cache.modifiers_override(enabled) { - Some(enabled) => { - if enabled { - (InvalidationStrategy::RefreshRequested, None) - } else { - self.splice_inlays( - &self - .visible_inlay_hints(cx) - .iter() - .map(|inlay| inlay.id) - .collect::>(), - Vec::new(), - cx, - ); - return; - } - } - None => return, - } - } - InlayHintRefreshReason::Toggle(enabled) => { - if self.inlay_hint_cache.toggle(enabled) { - if enabled { - (InvalidationStrategy::RefreshRequested, None) - } else { - self.splice_inlays( - &self - .visible_inlay_hints(cx) - .iter() - .map(|inlay| inlay.id) - .collect::>(), - Vec::new(), - cx, - ); - return; - } - } else { - return; - } - } - InlayHintRefreshReason::SettingsChange(new_settings) => { - match self.inlay_hint_cache.update_settings( - &self.buffer, - new_settings, - self.visible_inlay_hints(cx), - cx, - ) { - ControlFlow::Break(Some(InlaySplice { - to_remove, - to_insert, - })) => { - self.splice_inlays(&to_remove, to_insert, cx); - return; - } - ControlFlow::Break(None) => return, - ControlFlow::Continue(()) => (InvalidationStrategy::RefreshRequested, None), - } - } - InlayHintRefreshReason::ExcerptsRemoved(excerpts_removed) => { - if let Some(InlaySplice { - to_remove, - to_insert, - }) = self.inlay_hint_cache.remove_excerpts(&excerpts_removed) - { - self.splice_inlays(&to_remove, to_insert, cx); - } - self.display_map.update(cx, |display_map, _| { - display_map.remove_inlays_for_excerpts(&excerpts_removed) - }); - return; - } - InlayHintRefreshReason::NewLinesShown => (InvalidationStrategy::None, None), - InlayHintRefreshReason::BufferEdited(buffer_languages) => { - (InvalidationStrategy::BufferEdited, Some(buffer_languages)) - } - InlayHintRefreshReason::RefreshRequested => { - (InvalidationStrategy::RefreshRequested, None) - } - }; - - if let Some(InlaySplice { - to_remove, - to_insert, - }) = self.inlay_hint_cache.spawn_hint_refresh( - reason_description, - self.visible_excerpts(required_languages.as_ref(), cx), - invalidate_cache, - ignore_debounce, - cx, - ) { - self.splice_inlays(&to_remove, to_insert, cx); - } - } - - fn visible_inlay_hints(&self, cx: &Context) -> Vec { - self.display_map - .read(cx) - .current_inlays() - .filter(move |inlay| matches!(inlay.id, InlayId::Hint(_))) - .cloned() - .collect() - } - pub fn visible_excerpts( &self, - restrict_to_languages: Option<&HashSet>>, cx: &mut Context, ) -> HashMap, clock::Global, Range)> { let Some(project) = self.project() else { @@ -5333,9 +5165,8 @@ impl Editor { + Point::new(self.visible_line_count().unwrap_or(0.).ceil() as u32, 0), Bias::Left, ); - let multi_buffer_visible_range = multi_buffer_visible_start..multi_buffer_visible_end; multi_buffer_snapshot - .range_to_buffer_ranges(multi_buffer_visible_range) + .range_to_buffer_ranges(multi_buffer_visible_start..multi_buffer_visible_end) .into_iter() .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty()) .filter_map(|(buffer, excerpt_visible_range, excerpt_id)| { @@ -5345,23 +5176,17 @@ impl Editor { .read(cx) .entry_for_id(buffer_file.project_entry_id()?)?; if worktree_entry.is_ignored { - return None; - } - - let language = buffer.language()?; - if let Some(restrict_to_languages) = restrict_to_languages - && !restrict_to_languages.contains(language) - { - return None; + None + } else { + Some(( + excerpt_id, + ( + multi_buffer.buffer(buffer.remote_id()).unwrap(), + buffer.version().clone(), + excerpt_visible_range, + ), + )) } - Some(( - excerpt_id, - ( - multi_buffer.buffer(buffer.remote_id()).unwrap(), - buffer.version().clone(), - excerpt_visible_range, - ), - )) }) .collect() } @@ -5377,18 +5202,6 @@ impl Editor { } } - pub fn splice_inlays( - &self, - to_remove: &[InlayId], - to_insert: Vec, - cx: &mut Context, - ) { - self.display_map.update(cx, |display_map, cx| { - display_map.splice_inlays(to_remove, to_insert, cx) - }); - cx.notify(); - } - fn trigger_on_type_formatting( &self, input: String, @@ -5977,15 +5790,8 @@ impl Editor { let snapshot = self.buffer.read(cx).snapshot(cx); let newest_anchor = self.selections.newest_anchor(); let replace_range_multibuffer = { - let excerpt = snapshot.excerpt_containing(newest_anchor.range()).unwrap(); - let multibuffer_anchor = snapshot - .anchor_in_excerpt(excerpt.id(), buffer.anchor_before(replace_range.start)) - .unwrap() - ..snapshot - .anchor_in_excerpt(excerpt.id(), buffer.anchor_before(replace_range.end)) - .unwrap(); - multibuffer_anchor.start.to_offset(&snapshot) - ..multibuffer_anchor.end.to_offset(&snapshot) + let mut excerpt = snapshot.excerpt_containing(newest_anchor.range()).unwrap(); + excerpt.map_range_from_buffer(replace_range.clone()) }; if snapshot.buffer_id_for_anchor(newest_anchor.head()) != Some(buffer.remote_id()) { return None; @@ -6005,7 +5811,7 @@ impl Editor { let prefix = &old_text[..old_text.len().saturating_sub(lookahead)]; let suffix = &old_text[lookbehind.min(old_text.len())..]; - let selections = self.selections.all::(cx); + let selections = self.selections.all::(&self.display_snapshot(cx)); let mut ranges = Vec::new(); let mut linked_edits = HashMap::<_, Vec<_>>::default(); @@ -6157,7 +5963,10 @@ impl Editor { Some(CodeActionSource::Indicator(row)) | Some(CodeActionSource::RunMenu(row)) => { DisplayPoint::new(*row, 0).to_point(&snapshot) } - _ => self.selections.newest::(cx).head(), + _ => self + .selections + .newest::(&snapshot.display_snapshot) + .head(), }; let Some((buffer, buffer_row)) = snapshot .buffer_snapshot() @@ -6573,7 +6382,7 @@ impl Editor { .when(show_tooltip, |this| { this.tooltip({ let focus_handle = self.focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Toggle Code Actions", &ToggleCodeActions { @@ -6581,7 +6390,6 @@ impl Editor { quick_launch: false, }, &focus_handle, - window, cx, ) } @@ -6619,7 +6427,9 @@ impl Editor { if newest_selection.head().diff_base_anchor.is_some() { return None; } - let newest_selection_adjusted = this.selections.newest_adjusted(cx); + let display_snapshot = this.display_snapshot(cx); + let newest_selection_adjusted = + this.selections.newest_adjusted(&display_snapshot); let buffer = this.buffer.read(cx); let (start_buffer, start) = @@ -6694,7 +6504,10 @@ impl Editor { pub fn blame_hover(&mut self, _: &BlameHover, window: &mut Window, cx: &mut Context) { let snapshot = self.snapshot(window, cx); - let cursor = self.selections.newest::(cx).head(); + let cursor = self + .selections + .newest::(&snapshot.display_snapshot) + .head(); let Some((buffer, point, _)) = snapshot.buffer_snapshot().point_to_buffer_point(cursor) else { return; @@ -6740,7 +6553,7 @@ impl Editor { if let Some(state) = &mut self.inline_blame_popover { state.hide_task.take(); } else { - let blame_popover_delay = EditorSettings::get_global(cx).hover_popover_delay; + let blame_popover_delay = EditorSettings::get_global(cx).hover_popover_delay.0; let blame_entry = blame_entry.clone(); let show_task = cx.spawn(async move |editor, cx| { if !ignore_timeout { @@ -6786,13 +6599,15 @@ impl Editor { } } - fn hide_blame_popover(&mut self, cx: &mut Context) { + fn hide_blame_popover(&mut self, ignore_timeout: bool, cx: &mut Context) -> bool { self.inline_blame_popover_show_task.take(); if let Some(state) = &mut self.inline_blame_popover { let hide_task = cx.spawn(async move |editor, cx| { - cx.background_executor() - .timer(std::time::Duration::from_millis(100)) - .await; + if !ignore_timeout { + cx.background_executor() + .timer(std::time::Duration::from_millis(100)) + .await; + } editor .update(cx, |editor, cx| { editor.inline_blame_popover.take(); @@ -6801,6 +6616,9 @@ impl Editor { .ok(); }); state.hide_task = Some(hide_task); + true + } else { + false } } @@ -6831,7 +6649,7 @@ impl Editor { return None; } - let debounce = EditorSettings::get_global(cx).lsp_highlight_debounce; + let debounce = EditorSettings::get_global(cx).lsp_highlight_debounce.0; self.document_highlights_task = Some(cx.spawn(async move |this, cx| { cx.background_executor() .timer(Duration::from_millis(debounce)) @@ -6882,7 +6700,8 @@ impl Editor { continue; } - let range = Anchor::range_in_buffer(excerpt_id, buffer_id, start..end); + let range = + Anchor::range_in_buffer(excerpt_id, buffer_id, *start..*end); if highlight.kind == lsp::DocumentHighlightKind::WRITE { write_ranges.push(range); } else { @@ -6976,6 +6795,7 @@ impl Editor { ) else { return Vec::default(); }; + let query_range = query_range.to_anchors(&multi_buffer_snapshot); for (buffer_snapshot, search_range, excerpt_id) in buffer_ranges { match_ranges.extend( regex @@ -7565,7 +7385,10 @@ impl Editor { // Find an insertion that starts at the cursor position. let snapshot = self.buffer.read(cx).snapshot(cx); - let cursor_offset = self.selections.newest::(cx).head(); + let cursor_offset = self + .selections + .newest::(&self.display_snapshot(cx)) + .head(); let insertion = edits.iter().find_map(|(range, text)| { let range = range.to_offset(&snapshot); if range.is_empty() && range.start == cursor_offset { @@ -7968,9 +7791,10 @@ impl Editor { let edits = edits .into_iter() .flat_map(|(range, new_text)| { - let start = multibuffer.anchor_in_excerpt(excerpt_id, range.start)?; - let end = multibuffer.anchor_in_excerpt(excerpt_id, range.end)?; - Some((start..end, new_text)) + Some(( + multibuffer.anchor_range_in_excerpt(excerpt_id, range)?, + new_text, + )) }) .collect::>(); if edits.is_empty() { @@ -8430,13 +8254,12 @@ impl Editor { cx, ); })) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { Tooltip::with_meta_in( primary_action_text, Some(&ToggleBreakpoint), meta.clone(), &focus_handle, - window, cx, ) }) @@ -8520,7 +8343,11 @@ impl Editor { &mut self, cx: &mut Context, ) -> Option<(Entity, u32, Arc)> { - let cursor_row = self.selections.newest_adjusted(cx).head().row; + let cursor_row = self + .selections + .newest_adjusted(&self.display_snapshot(cx)) + .head() + .row; let ((buffer_id, row), tasks) = self .tasks @@ -8537,7 +8364,10 @@ impl Editor { cx: &mut Context, ) -> Option<(Entity, u32, Arc)> { let snapshot = self.buffer.read(cx).snapshot(cx); - let offset = self.selections.newest::(cx).head(); + let offset = self + .selections + .newest::(&self.display_snapshot(cx)) + .head(); let excerpt = snapshot.excerpt_containing(offset..offset)?; let buffer_id = excerpt.buffer().remote_id(); @@ -9230,7 +9060,7 @@ impl Editor { fn render_edit_prediction_accept_keybind( &self, window: &mut Window, - cx: &App, + cx: &mut App, ) -> Option { let accept_binding = self.accept_edit_prediction_keybind(false, window, cx); let accept_keystroke = accept_binding.keystroke()?; @@ -9276,7 +9106,7 @@ impl Editor { label: impl Into, icon: Option, window: &mut Window, - cx: &App, + cx: &mut App, ) -> Stateful
{ let padding_right = if icon.is_some() { px(4.) } else { px(8.) }; @@ -9854,8 +9684,7 @@ impl Editor { // Check whether the just-entered snippet ends with an auto-closable bracket. if self.autoclose_regions.is_empty() { let snapshot = self.buffer.read(cx).snapshot(cx); - let mut all_selections = self.selections.all::(cx); - for selection in &mut all_selections { + for selection in &mut self.selections.all::(&self.display_snapshot(cx)) { let selection_head = selection.head(); let Some(scope) = snapshot.language_scope_at(selection_head) else { continue; @@ -9993,9 +9822,12 @@ impl Editor { self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { this.select_autoclose_pair(window, cx); + + let display_map = this.display_map.update(cx, |map, cx| map.snapshot(cx)); + let mut linked_ranges = HashMap::<_, Vec<_>>::default(); if !this.linked_edit_ranges.is_empty() { - let selections = this.selections.all::(cx); + let selections = this.selections.all::(&display_map); let snapshot = this.buffer.read(cx).snapshot(cx); for selection in selections.iter() { @@ -10014,8 +9846,7 @@ impl Editor { } } - let mut selections = this.selections.all::(cx); - let display_map = this.display_map.update(cx, |map, cx| map.snapshot(cx)); + let mut selections = this.selections.all::(&display_map); for selection in &mut selections { if selection.is_empty() { let old_head = selection.head(); @@ -10078,7 +9909,7 @@ impl Editor { }) } this.refresh_edit_prediction(true, false, window, cx); - linked_editing_ranges::refresh_linked_ranges(this, window, cx); + refresh_linked_ranges(this, window, cx); }); } @@ -10130,7 +9961,7 @@ impl Editor { return; } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); - let mut selections = self.selections.all_adjusted(cx); + let mut selections = self.selections.all_adjusted(&self.display_snapshot(cx)); let buffer = self.buffer.read(cx); let snapshot = buffer.snapshot(cx); let rows_iter = selections.iter().map(|s| s.head().row); @@ -10246,7 +10077,7 @@ impl Editor { } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); - let mut selections = self.selections.all::(cx); + let mut selections = self.selections.all::(&self.display_snapshot(cx)); let mut prev_edited_row = 0; let mut row_delta = 0; let mut edits = Vec::new(); @@ -10355,7 +10186,7 @@ impl Editor { self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let selections = self.selections.all::(cx); + let selections = self.selections.all::(&display_map); let mut deletion_ranges = Vec::new(); let mut last_outdent = None; { @@ -10416,7 +10247,7 @@ impl Editor { cx, ); }); - let selections = this.selections.all::(cx); + let selections = this.selections.all::(&this.display_snapshot(cx)); this.change_selections(Default::default(), window, cx, |s| s.select(selections)); }); } @@ -10433,7 +10264,7 @@ impl Editor { self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let selections = self .selections - .all::(cx) + .all::(&self.display_snapshot(cx)) .into_iter() .map(|s| s.range()); @@ -10441,7 +10272,7 @@ impl Editor { this.buffer.update(cx, |buffer, cx| { buffer.autoindent_ranges(selections, cx); }); - let selections = this.selections.all::(cx); + let selections = this.selections.all::(&this.display_snapshot(cx)); this.change_selections(Default::default(), window, cx, |s| s.select(selections)); }); } @@ -10449,7 +10280,7 @@ impl Editor { pub fn delete_line(&mut self, _: &DeleteLine, window: &mut Window, cx: &mut Context) { self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let selections = self.selections.all::(cx); + let selections = self.selections.all::(&display_map); let mut new_cursors = Vec::new(); let mut edit_ranges = Vec::new(); @@ -10470,29 +10301,33 @@ impl Editor { let buffer = display_map.buffer_snapshot(); let mut edit_start = ToOffset::to_offset(&Point::new(rows.start.0, 0), buffer); - let edit_end = if buffer.max_point().row >= rows.end.0 { + let (edit_end, target_row) = if buffer.max_point().row >= rows.end.0 { // If there's a line after the range, delete the \n from the end of the row range - ToOffset::to_offset(&Point::new(rows.end.0, 0), buffer) + ( + ToOffset::to_offset(&Point::new(rows.end.0, 0), buffer), + rows.end, + ) } else { // If there isn't a line after the range, delete the \n from the line before the // start of the row range edit_start = edit_start.saturating_sub(1); - buffer.len() + (buffer.len(), rows.start.previous_row()) }; - let (cursor, goal) = movement::down_by_rows( - &display_map, + let text_layout_details = self.text_layout_details(window); + let x = display_map.x_for_display_point( selection.head().to_display_point(&display_map), - rows.len() as u32, - selection.goal, - false, - &self.text_layout_details(window), + &text_layout_details, ); + let row = Point::new(target_row.0, 0) + .to_display_point(&display_map) + .row(); + let column = display_map.display_column_for_x(row, x, &text_layout_details); new_cursors.push(( selection.id, - buffer.anchor_after(cursor.to_point(&display_map)), - goal, + buffer.anchor_after(DisplayPoint::new(row, column).to_point(&display_map)), + SelectionGoal::None, )); edit_ranges.push(edit_start..edit_end); } @@ -10539,7 +10374,7 @@ impl Editor { return; } let mut row_ranges = Vec::>::new(); - for selection in self.selections.all::(cx) { + for selection in self.selections.all::(&self.display_snapshot(cx)) { let start = MultiBufferRow(selection.start.row); // Treat single line selections as if they include the next line. Otherwise this action // would do nothing for single line selections individual cursors. @@ -10682,7 +10517,11 @@ impl Editor { let mut edits = Vec::new(); let mut boundaries = Vec::new(); - for selection in self.selections.all::(cx).iter() { + for selection in self + .selections + .all::(&self.display_snapshot(cx)) + .iter() + { let Some(wrap_config) = snapshot .language_at(selection.start) .and_then(|lang| lang.config().wrap_characters.clone()) @@ -10752,7 +10591,7 @@ impl Editor { self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let mut buffer_ids = HashSet::default(); let snapshot = self.buffer().read(cx).snapshot(cx); - for selection in self.selections.all::(cx) { + for selection in self.selections.all::(&self.display_snapshot(cx)) { buffer_ids.extend(snapshot.buffer_ids_for_range(selection.range())) } @@ -10769,7 +10608,7 @@ impl Editor { self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let selections = self .selections - .all(cx) + .all(&self.display_snapshot(cx)) .into_iter() .map(|s| s.range()) .collect(); @@ -10803,6 +10642,20 @@ impl Editor { } } + pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option { + if let Some(status) = self + .addons + .iter() + .find_map(|(_, addon)| addon.override_status_for_buffer_id(buffer_id, cx)) + { + return Some(status); + } + self.project + .as_ref()? + .read(cx) + .status_for_buffer_id(buffer_id, cx) + } + pub fn open_active_item_in_terminal( &mut self, _: &OpenInTerminal, @@ -11177,7 +11030,7 @@ impl Editor { let mut edits = Vec::new(); - let selections = self.selections.all::(cx); + let selections = self.selections.all::(&display_map); let mut selections = selections.iter().peekable(); let mut contiguous_row_selections = Vec::new(); let mut new_selections = Vec::new(); @@ -11579,7 +11432,7 @@ impl Editor { let mut edits = Vec::new(); let mut selection_adjustment = 0i32; - for selection in self.selections.all_adjusted(cx) { + for selection in self.selections.all_adjusted(&self.display_snapshot(cx)) { let selection_is_empty = selection.is_empty(); let (start, end) = if selection_is_empty { @@ -11671,7 +11524,7 @@ impl Editor { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = display_map.buffer_snapshot(); - let selections = self.selections.all::(cx); + let selections = self.selections.all::(&display_map); let mut edits = Vec::new(); let mut selections_iter = selections.iter().peekable(); @@ -11713,7 +11566,7 @@ impl Editor { end } else { text.push('\n'); - Point::new(rows.end.0, 0) + Point::new(rows.start.0, 0) } } else { text.push('\n'); @@ -11729,11 +11582,57 @@ impl Editor { } } - self.transact(window, cx, |this, _, cx| { + self.transact(window, cx, |this, window, cx| { this.buffer.update(cx, |buffer, cx| { buffer.edit(edits, None, cx); }); + // When duplicating upward with whole lines, move the cursor to the duplicated line + if upwards && whole_lines { + let display_map = this.display_map.update(cx, |map, cx| map.snapshot(cx)); + + this.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + let mut new_ranges = Vec::new(); + let selections = s.all::(&display_map); + let mut selections_iter = selections.iter().peekable(); + + while let Some(first_selection) = selections_iter.next() { + // Group contiguous selections together to find the total row span + let mut group_selections = vec![first_selection]; + let mut rows = first_selection.spanned_rows(false, &display_map); + + while let Some(next_selection) = selections_iter.peek() { + let next_rows = next_selection.spanned_rows(false, &display_map); + if next_rows.start < rows.end { + rows.end = next_rows.end; + group_selections.push(selections_iter.next().unwrap()); + } else { + break; + } + } + + let row_count = rows.end.0 - rows.start.0; + + // Move all selections in this group up by the total number of duplicated rows + for selection in group_selections { + let new_start = Point::new( + selection.start.row.saturating_sub(row_count), + selection.start.column, + ); + + let new_end = Point::new( + selection.end.row.saturating_sub(row_count), + selection.end.column, + ); + + new_ranges.push(new_start..new_end); + } + } + + s.select_ranges(new_ranges); + }); + } + this.request_autoscroll(Autoscroll::fit(), cx); }); } @@ -11779,7 +11678,7 @@ impl Editor { let mut unfold_ranges = Vec::new(); let mut refold_creases = Vec::new(); - let selections = self.selections.all::(cx); + let selections = self.selections.all::(&display_map); let mut selections = selections.iter().peekable(); let mut contiguous_row_selections = Vec::new(); let mut new_selections = Vec::new(); @@ -11890,7 +11789,7 @@ impl Editor { let mut unfold_ranges = Vec::new(); let mut refold_creases = Vec::new(); - let selections = self.selections.all::(cx); + let selections = self.selections.all::(&display_map); let mut selections = selections.iter().peekable(); let mut contiguous_row_selections = Vec::new(); let mut new_selections = Vec::new(); @@ -12023,7 +11922,7 @@ impl Editor { }); this.buffer .update(cx, |buffer, cx| buffer.edit(edits, None, cx)); - let selections = this.selections.all::(cx); + let selections = this.selections.all::(&this.display_snapshot(cx)); this.change_selections(Default::default(), window, cx, |s| { s.select(selections); }); @@ -12042,7 +11941,7 @@ impl Editor { pub fn rewrap_impl(&mut self, options: RewrapOptions, cx: &mut Context) { let buffer = self.buffer.read(cx).snapshot(cx); - let selections = self.selections.all::(cx); + let selections = self.selections.all::(&self.display_snapshot(cx)); #[derive(Clone, Debug, PartialEq)] enum CommentFormat { @@ -12418,7 +12317,7 @@ impl Editor { ) -> ClipboardItem { let mut text = String::new(); let buffer = self.buffer.read(cx).snapshot(cx); - let mut selections = self.selections.all::(cx); + let mut selections = self.selections.all::(&self.display_snapshot(cx)); let mut clipboard_selections = Vec::with_capacity(selections.len()); { let max_point = buffer.max_point(); @@ -12514,7 +12413,7 @@ impl Editor { } fn do_copy(&self, strip_leading_indents: bool, cx: &mut Context) { - let selections = self.selections.all::(cx); + let selections = self.selections.all::(&self.display_snapshot(cx)); let buffer = self.buffer.read(cx).read(cx); let mut text = String::new(); @@ -12625,8 +12524,9 @@ impl Editor { self.transact(window, cx, |this, window, cx| { let had_active_edit_prediction = this.has_active_edit_prediction(); - let old_selections = this.selections.all::(cx); - let cursor_offset = this.selections.last::(cx).head(); + let display_map = this.display_snapshot(cx); + let old_selections = this.selections.all::(&display_map); + let cursor_offset = this.selections.last::(&display_map).head(); if let Some(mut clipboard_selections) = clipboard_selections { let all_selections_were_entire_line = @@ -12707,7 +12607,7 @@ impl Editor { ); }); - let selections = this.selections.all::(cx); + let selections = this.selections.all::(&this.display_snapshot(cx)); this.change_selections(Default::default(), window, cx, |s| s.select(selections)); } else { let url = url::Url::parse(&clipboard_text).ok(); @@ -12772,7 +12672,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - let selections = self.selections.all::(cx); + let selections = self.selections.all::(&self.display_snapshot(cx)); if selections.is_empty() { log::warn!("There should always be at least one selection in Zed. This is a bug."); @@ -14035,7 +13935,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - let mut selection = self.selections.last::(cx); + let mut selection = self.selections.last::(&self.display_snapshot(cx)); selection.set_head(Point::zero(), SelectionGoal::None); self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { @@ -14114,7 +14014,7 @@ impl Editor { pub fn select_to_end(&mut self, _: &SelectToEnd, window: &mut Window, cx: &mut Context) { self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let buffer = self.buffer.read(cx).snapshot(cx); - let mut selection = self.selections.first::(cx); + let mut selection = self.selections.first::(&self.display_snapshot(cx)); selection.set_head(buffer.len(), SelectionGoal::None); self.change_selections(Default::default(), window, cx, |s| { s.select(vec![selection]); @@ -14132,7 +14032,7 @@ impl Editor { pub fn select_line(&mut self, _: &SelectLine, window: &mut Window, cx: &mut Context) { self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections.all::(cx); + let mut selections = self.selections.all::(&display_map); let max_point = display_map.buffer_snapshot().max_point(); for selection in &mut selections { let rows = selection.spanned_rows(true, &display_map); @@ -14153,7 +14053,7 @@ impl Editor { ) { let selections = self .selections - .all::(cx) + .all::(&self.display_snapshot(cx)) .into_iter() .map(|selection| selection.start..selection.end) .collect::>(); @@ -14206,27 +14106,33 @@ impl Editor { pub fn add_selection_above( &mut self, - _: &AddSelectionAbove, + action: &AddSelectionAbove, window: &mut Window, cx: &mut Context, ) { - self.add_selection(true, window, cx); + self.add_selection(true, action.skip_soft_wrap, window, cx); } pub fn add_selection_below( &mut self, - _: &AddSelectionBelow, + action: &AddSelectionBelow, window: &mut Window, cx: &mut Context, ) { - self.add_selection(false, window, cx); + self.add_selection(false, action.skip_soft_wrap, window, cx); } - fn add_selection(&mut self, above: bool, window: &mut Window, cx: &mut Context) { + fn add_selection( + &mut self, + above: bool, + skip_soft_wrap: bool, + window: &mut Window, + cx: &mut Context, + ) { self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let all_selections = self.selections.all::(cx); + let all_selections = self.selections.all::(&display_map); let text_layout_details = self.text_layout_details(window); let (mut columnar_selections, new_selections_to_columnarize) = { @@ -14309,12 +14215,19 @@ impl Editor { }; let mut maybe_new_selection = None; + let direction = if above { -1 } else { 1 }; + while row != end_row { - if above { + if skip_soft_wrap { + row = display_map + .start_of_relative_buffer_row(DisplayPoint::new(row, 0), direction) + .row(); + } else if above { row.0 -= 1; } else { row.0 += 1; } + if let Some(new_selection) = self.selections.build_columnar_selection( &display_map, row, @@ -14353,7 +14266,7 @@ impl Editor { let final_selection_ids: HashSet<_> = self .selections - .all::(cx) + .all::(&display_map) .iter() .map(|s| s.id) .collect(); @@ -14411,7 +14324,7 @@ impl Editor { cx: &mut Context, ) -> Result<()> { let buffer = display_map.buffer_snapshot(); - let mut selections = self.selections.all::(cx); + let mut selections = self.selections.all::(&display_map); if let Some(mut select_next_state) = self.select_next_state.take() { let query = &select_next_state.query; if !select_next_state.done { @@ -14419,6 +14332,10 @@ impl Editor { let last_selection = selections.iter().max_by_key(|s| s.id).unwrap(); let mut next_selected_range = None; + // Collect and sort selection ranges for efficient overlap checking + let mut selection_ranges: Vec<_> = selections.iter().map(|s| s.range()).collect(); + selection_ranges.sort_by_key(|r| r.start); + let bytes_after_last_selection = buffer.bytes_in_range(last_selection.end..buffer.len()); let bytes_before_first_selection = buffer.bytes_in_range(0..first_selection.start); @@ -14440,11 +14357,20 @@ impl Editor { || (!buffer.is_inside_word(offset_range.start, None) && !buffer.is_inside_word(offset_range.end, None)) { - // TODO: This is n^2, because we might check all the selections - if !selections - .iter() - .any(|selection| selection.range().overlaps(&offset_range)) - { + // Use binary search to check for overlap (O(log n)) + let overlaps = selection_ranges + .binary_search_by(|range| { + if range.end <= offset_range.start { + std::cmp::Ordering::Less + } else if range.start >= offset_range.end { + std::cmp::Ordering::Greater + } else { + std::cmp::Ordering::Equal + } + }) + .is_ok(); + + if !overlaps { next_selected_range = Some(offset_range); break; } @@ -14572,7 +14498,7 @@ impl Editor { let mut new_selections = Vec::new(); - let reversed = self.selections.oldest::(cx).reversed; + let reversed = self.selections.oldest::(&display_map).reversed; let buffer = display_map.buffer_snapshot(); let query_matches = select_next_state .query @@ -14636,7 +14562,7 @@ impl Editor { self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = display_map.buffer_snapshot(); - let mut selections = self.selections.all::(cx); + let mut selections = self.selections.all::(&display_map); if let Some(mut select_prev_state) = self.select_prev_state.take() { let query = &select_prev_state.query; if !select_prev_state.done { @@ -14824,7 +14750,9 @@ impl Editor { self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let text_layout_details = &self.text_layout_details(window); self.transact(window, cx, |this, window, cx| { - let mut selections = this.selections.all::(cx); + let mut selections = this + .selections + .all::(&this.display_snapshot(cx)); let mut edits = Vec::new(); let mut selection_edit_ranges = Vec::new(); let mut last_toggled_row = None; @@ -15055,7 +14983,7 @@ impl Editor { // Adjust selections so that they end before any comment suffixes that // were inserted. let mut suffixes_inserted = suffixes_inserted.into_iter().peekable(); - let mut selections = this.selections.all::(cx); + let mut selections = this.selections.all::(&this.display_snapshot(cx)); let snapshot = this.buffer.read(cx).read(cx); for selection in &mut selections { while let Some((row, suffix_len)) = suffixes_inserted.peek().copied() { @@ -15081,7 +15009,7 @@ impl Editor { drop(snapshot); this.change_selections(Default::default(), window, cx, |s| s.select(selections)); - let selections = this.selections.all::(cx); + let selections = this.selections.all::(&this.display_snapshot(cx)); let selections_on_single_row = selections.windows(2).all(|selections| { selections[0].start.row == selections[1].start.row && selections[0].end.row == selections[1].end.row @@ -15125,7 +15053,10 @@ impl Editor { self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let buffer = self.buffer.read(cx).snapshot(cx); - let old_selections = self.selections.all::(cx).into_boxed_slice(); + let old_selections = self + .selections + .all::(&self.display_snapshot(cx)) + .into_boxed_slice(); fn update_selection( selection: &Selection, @@ -15180,7 +15111,10 @@ impl Editor { let Some(visible_row_count) = self.visible_row_count() else { return; }; - let old_selections: Box<[_]> = self.selections.all::(cx).into(); + let old_selections: Box<[_]> = self + .selections + .all::(&self.display_snapshot(cx)) + .into(); if old_selections.is_empty() { return; } @@ -15338,7 +15272,7 @@ impl Editor { let buffer = self.buffer.read(cx).snapshot(cx); let selections = self .selections - .all::(cx) + .all::(&self.display_snapshot(cx)) .into_iter() // subtracting the offset requires sorting .sorted_by_key(|i| i.start); @@ -15410,7 +15344,10 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - let old_selections: Box<[_]> = self.selections.all::(cx).into(); + let old_selections: Box<[_]> = self + .selections + .all::(&self.display_snapshot(cx)) + .into(); if old_selections.is_empty() { return; } @@ -15459,7 +15396,10 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - let old_selections: Box<[_]> = self.selections.all::(cx).into(); + let old_selections: Box<[_]> = self + .selections + .all::(&self.display_snapshot(cx)) + .into(); if old_selections.is_empty() { return; } @@ -16008,7 +15948,7 @@ impl Editor { cx: &mut Context, ) { let buffer = self.buffer.read(cx).snapshot(cx); - let selection = self.selections.newest::(cx); + let selection = self.selections.newest::(&self.display_snapshot(cx)); let mut active_group_id = None; if let ActiveDiagnostic::Group(active_group) = &self.active_diagnostics @@ -16089,7 +16029,7 @@ impl Editor { pub fn go_to_next_hunk(&mut self, _: &GoToHunk, window: &mut Window, cx: &mut Context) { self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let snapshot = self.snapshot(window, cx); - let selection = self.selections.newest::(cx); + let selection = self.selections.newest::(&self.display_snapshot(cx)); self.go_to_hunk_before_or_after_position( &snapshot, selection.head(), @@ -16150,7 +16090,7 @@ impl Editor { ) { self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let snapshot = self.snapshot(window, cx); - let selection = self.selections.newest::(cx); + let selection = self.selections.newest::(&snapshot.display_snapshot); self.go_to_hunk_before_or_after_position( &snapshot, selection.head(), @@ -16240,7 +16180,10 @@ impl Editor { self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let snapshot = self.snapshot(window, cx); let buffer = &snapshot.buffer_snapshot(); - let position = self.selections.newest::(cx).head(); + let position = self + .selections + .newest::(&snapshot.display_snapshot) + .head(); let anchor_position = buffer.anchor_after(position); // Get all document highlights (both read and write) @@ -16423,7 +16366,10 @@ impl Editor { let Some(provider) = self.semantics_provider.clone() else { return Task::ready(Ok(Navigated::No)); }; - let head = self.selections.newest::(cx).head(); + let head = self + .selections + .newest::(&self.display_snapshot(cx)) + .head(); let buffer = self.buffer.read(cx); let Some((buffer, head)) = buffer.text_anchor_for_position(head, cx) else { return Task::ready(Ok(Navigated::No)); @@ -16754,7 +16700,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) -> Option>> { - let selection = self.selections.newest::(cx); + let selection = self.selections.newest::(&self.display_snapshot(cx)); let multi_buffer = self.buffer.read(cx); let head = selection.head(); @@ -16915,40 +16861,31 @@ impl Editor { editor }) }); - editor.update(cx, |editor, cx| { - match multibuffer_selection_mode { - MultibufferSelectionMode::First => { - if let Some(first_range) = ranges.first() { - editor.change_selections( - SelectionEffects::no_scroll(), - window, - cx, - |selections| { - selections.clear_disjoint(); - selections - .select_anchor_ranges(std::iter::once(first_range.clone())); - }, - ); - } - editor.highlight_background::( - &ranges, - |theme| theme.colors().editor_highlighted_line_background, - cx, - ); - } - MultibufferSelectionMode::All => { + editor.update(cx, |editor, cx| match multibuffer_selection_mode { + MultibufferSelectionMode::First => { + if let Some(first_range) = ranges.first() { editor.change_selections( SelectionEffects::no_scroll(), window, cx, |selections| { selections.clear_disjoint(); - selections.select_anchor_ranges(ranges); + selections.select_anchor_ranges(std::iter::once(first_range.clone())); }, ); } + editor.highlight_background::( + &ranges, + |theme| theme.colors().editor_highlighted_line_background, + cx, + ); + } + MultibufferSelectionMode::All => { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |selections| { + selections.clear_disjoint(); + selections.select_anchor_ranges(ranges); + }); } - editor.register_buffers_with_language_servers(cx); }); let item = Box::new(editor); @@ -17238,7 +17175,10 @@ impl Editor { if moving_cursor { let cursor_in_rename_editor = rename.editor.update(cx, |editor, cx| { - editor.selections.newest::(cx).head() + editor + .selections + .newest::(&editor.display_snapshot(cx)) + .head() }); // Update the selection to match the position of the selection inside @@ -17301,7 +17241,7 @@ impl Editor { let ranges = self .selections - .all_adjusted(cx) + .all_adjusted(&self.display_snapshot(cx)) .into_iter() .map(|selection| selection.range()) .collect_vec(); @@ -17493,9 +17433,9 @@ impl Editor { HashSet::default(), cx, ); - cx.emit(project::Event::RefreshInlayHints); }); }); + self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); } } @@ -17795,7 +17735,7 @@ impl Editor { window: &Window, cx: &mut Context, ) -> Option<()> { - if !self.mode().is_full() { + if self.ignore_lsp_data() { return None; } let pull_diagnostics_settings = ProjectSettings::get_global(cx) @@ -17807,8 +17747,14 @@ impl Editor { let project = self.project()?.downgrade(); let debounce = Duration::from_millis(pull_diagnostics_settings.debounce_ms); let mut buffers = self.buffer.read(cx).all_buffers(); - if let Some(buffer_id) = buffer_id { - buffers.retain(|buffer| buffer.read(cx).remote_id() == buffer_id); + buffers.retain(|buffer| { + let buffer_id_to_retain = buffer.read(cx).remote_id(); + buffer_id.is_none_or(|buffer_id| buffer_id == buffer_id_to_retain) + && self.registered_buffers.contains_key(&buffer_id_to_retain) + }); + if buffers.is_empty() { + self.pull_diagnostics_task = Task::ready(()); + return None; } self.pull_diagnostics_task = cx.spawn_in(window, async move |editor, cx| { @@ -17994,9 +17940,9 @@ impl Editor { cx: &mut Context, ) { if self.buffer_kind(cx) == ItemBufferKind::Singleton { - let selection = self.selections.newest::(cx); - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let selection = self.selections.newest::(&display_map); + let range = if selection.is_empty() { let point = selection.head().to_display_point(&display_map); let start = DisplayPoint::new(point.row(), 0).to_point(&display_map); @@ -18039,7 +17985,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - let selection = self.selections.newest::(cx); + let selection = self.selections.newest::(&self.display_snapshot(cx)); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let range = if selection.is_empty() { @@ -18062,7 +18008,7 @@ impl Editor { if self.buffer_kind(cx) == ItemBufferKind::Singleton { let mut to_fold = Vec::new(); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let selections = self.selections.all_adjusted(cx); + let selections = self.selections.all_adjusted(&display_map); for selection in selections { let range = selection.range().sorted(); @@ -18169,7 +18115,7 @@ impl Editor { let row_ranges_to_keep: Vec> = self .selections - .all::(cx) + .all::(&self.display_snapshot(cx)) .into_iter() .map(|sel| sel.start.row..sel.end.row) .collect(); @@ -18344,7 +18290,7 @@ impl Editor { ) { let mut to_fold = Vec::new(); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let selections = self.selections.all_adjusted(cx); + let selections = self.selections.all_adjusted(&display_map); for selection in selections { let range = selection.range().sorted(); @@ -18388,7 +18334,7 @@ impl Editor { if let Some(crease) = display_map.crease_for_buffer_row(buffer_row) { let autoscroll = self .selections - .all::(cx) + .all::(&display_map) .iter() .any(|selection| crease.range().overlaps(&selection.range())); @@ -18400,7 +18346,7 @@ impl Editor { if self.buffer_kind(cx) == ItemBufferKind::Singleton { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = display_map.buffer_snapshot(); - let selections = self.selections.all::(cx); + let selections = self.selections.all::(&display_map); let ranges = selections .iter() .map(|s| { @@ -18434,7 +18380,7 @@ impl Editor { cx: &mut Context, ) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let selections = self.selections.all::(cx); + let selections = self.selections.all::(&display_map); let ranges = selections .iter() .map(|s| { @@ -18466,7 +18412,7 @@ impl Editor { let autoscroll = self .selections - .all::(cx) + .all::(&display_map) .iter() .any(|selection| RangeExt::overlaps(&selection.range(), &intersection_range)); @@ -18501,8 +18447,8 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - let selections = self.selections.all_adjusted(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let selections = self.selections.all_adjusted(&display_map); let ranges = selections .into_iter() .map(|s| Crease::simple(s.range(), display_map.fold_placeholder.clone())) @@ -18681,6 +18627,17 @@ impl Editor { }); } + pub fn collapse_all_diff_hunks( + &mut self, + _: &CollapseAllDiffHunks, + _window: &mut Window, + cx: &mut Context, + ) { + self.buffer.update(cx, |buffer, cx| { + buffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx) + }); + } + pub fn toggle_selected_diff_hunks( &mut self, _: &ToggleSelectedDiffHunks, @@ -18835,7 +18792,10 @@ impl Editor { self.stage_or_unstage_diff_hunks(stage, ranges, cx); let snapshot = self.snapshot(window, cx); - let position = self.selections.newest::(cx).head(); + let position = self + .selections + .newest::(&snapshot.display_snapshot) + .head(); let mut row = snapshot .buffer_snapshot() .diff_hunks_in_range(position..snapshot.buffer_snapshot().max_point()) @@ -18983,7 +18943,7 @@ impl Editor { let snapshot = self.snapshot(window, cx); let hunks = snapshot.hunks_for_ranges( self.selections - .all(cx) + .all(&snapshot.display_snapshot) .into_iter() .map(|selection| selection.range()), ); @@ -19719,7 +19679,10 @@ impl Editor { ) -> Option<()> { let blame = self.blame.as_ref()?; let snapshot = self.snapshot(window, cx); - let cursor = self.selections.newest::(cx).head(); + let cursor = self + .selections + .newest::(&snapshot.display_snapshot) + .head(); let (buffer, point, _) = snapshot.buffer_snapshot().point_to_buffer_point(cursor)?; let (_, blame_entry) = blame .update(cx, |blame, cx| { @@ -19861,7 +19824,7 @@ impl Editor { fn get_permalink_to_line(&self, cx: &mut Context) -> Task> { let buffer_and_selection = maybe!({ - let selection = self.selections.newest::(cx); + let selection = self.selections.newest::(&self.display_snapshot(cx)); let selection_range = selection.range(); let multi_buffer = self.buffer().read(cx); @@ -19939,7 +19902,12 @@ impl Editor { _: &mut Window, cx: &mut Context, ) { - let selection = self.selections.newest::(cx).start.row + 1; + let selection = self + .selections + .newest::(&self.display_snapshot(cx)) + .start + .row + + 1; if let Some(file) = self.target_file(cx) { let path = file.path().display(file.path_style(cx)); cx.write_to_clipboard(ClipboardItem::new_string(format!("{path}:{selection}"))); @@ -20010,7 +19978,7 @@ impl Editor { self.transact(window, cx, |this, window, cx| { let edits = this .selections - .all::(cx) + .all::(&this.display_snapshot(cx)) .into_iter() .map(|selection| { let uuid = match version { @@ -20655,18 +20623,6 @@ impl Editor { cx.notify(); } - pub(crate) fn highlight_inlays( - &mut self, - highlights: Vec, - style: HighlightStyle, - cx: &mut Context, - ) { - self.display_map.update(cx, |map, _| { - map.highlight_inlays(TypeId::of::(), highlights, style) - }); - cx.notify(); - } - pub fn text_highlights<'a, T: 'static>( &'a self, cx: &'a App, @@ -20805,10 +20761,7 @@ impl Editor { cx: &mut Context, ) { match event { - multi_buffer::Event::Edited { - singleton_buffer_edited, - edited_buffer, - } => { + multi_buffer::Event::Edited { edited_buffer } => { self.scrollbar_marker_state.dirty = true; self.active_indent_guides_state.dirty = true; self.refresh_active_diagnostics(cx); @@ -20819,57 +20772,26 @@ impl Editor { if self.has_active_edit_prediction() { self.update_visible_edit_prediction(window, cx); } - if let Some(project) = self.project.as_ref() - && let Some(edited_buffer) = edited_buffer - { - project.update(cx, |project, cx| { - self.registered_buffers - .entry(edited_buffer.read(cx).remote_id()) - .or_insert_with(|| { - project.register_buffer_with_language_servers(edited_buffer, cx) - }); - }); - } - cx.emit(EditorEvent::BufferEdited); - cx.emit(SearchEvent::MatchesInvalidated); if let Some(buffer) = edited_buffer { - self.update_lsp_data(false, Some(buffer.read(cx).remote_id()), window, cx); - } - - if *singleton_buffer_edited { - if let Some(buffer) = edited_buffer - && buffer.read(cx).file().is_none() - { + if buffer.read(cx).file().is_none() { cx.emit(EditorEvent::TitleChanged); } - if let Some(project) = &self.project { - #[allow(clippy::mutable_key_type)] - let languages_affected = multibuffer.update(cx, |multibuffer, cx| { - multibuffer - .all_buffers() - .into_iter() - .filter_map(|buffer| { - buffer.update(cx, |buffer, cx| { - let language = buffer.language()?; - let should_discard = project.update(cx, |project, cx| { - project.is_local() - && !project.has_language_servers_for(buffer, cx) - }); - should_discard.not().then_some(language.clone()) - }) - }) - .collect::>() - }); - if !languages_affected.is_empty() { - self.refresh_inlay_hints( - InlayHintRefreshReason::BufferEdited(languages_affected), - cx, - ); - } + + if self.project.is_some() { + let buffer_id = buffer.read(cx).remote_id(); + self.register_buffer(buffer_id, cx); + self.update_lsp_data(Some(buffer_id), window, cx); + self.refresh_inlay_hints( + InlayHintRefreshReason::BufferEdited(buffer_id), + cx, + ); } } + cx.emit(EditorEvent::BufferEdited); + cx.emit(SearchEvent::MatchesInvalidated); + let Some(project) = &self.project else { return }; let (telemetry, is_via_ssh) = { let project = project.read(cx); @@ -20877,7 +20799,6 @@ impl Editor { let is_via_ssh = project.is_via_remote_server(); (telemetry, is_via_ssh) }; - refresh_linked_ranges(self, window, cx); telemetry.log_edit_event("editor", is_via_ssh); } multi_buffer::Event::ExcerptsAdded { @@ -20899,24 +20820,25 @@ impl Editor { ) .detach(); } - if self.active_diagnostics != ActiveDiagnostic::All { - self.update_lsp_data(false, Some(buffer_id), window, cx); - } + self.update_lsp_data(Some(buffer_id), window, cx); + self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); cx.emit(EditorEvent::ExcerptsAdded { buffer: buffer.clone(), predecessor: *predecessor, excerpts: excerpts.clone(), }); - self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); } multi_buffer::Event::ExcerptsRemoved { ids, removed_buffer_ids, } => { + if let Some(inlay_hints) = &mut self.inlay_hints { + inlay_hints.remove_inlay_chunk_data(removed_buffer_ids); + } self.refresh_inlay_hints(InlayHintRefreshReason::ExcerptsRemoved(ids.clone()), cx); - let buffer = self.buffer.read(cx); - self.registered_buffers - .retain(|buffer_id, _| buffer.buffer(*buffer_id).is_some()); + for buffer_id in removed_buffer_ids { + self.registered_buffers.remove(buffer_id); + } jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx); cx.emit(EditorEvent::ExcerptsRemoved { ids: ids.clone(), @@ -20936,6 +20858,7 @@ impl Editor { } multi_buffer::Event::ExcerptsExpanded { ids } => { self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + self.refresh_document_highlights(cx); cx.emit(EditorEvent::ExcerptsExpanded { ids: ids.clone() }) } multi_buffer::Event::Reparsed(buffer_id) => { @@ -20948,7 +20871,7 @@ impl Editor { self.tasks_update_task = Some(self.refresh_runnables(window, cx)); } multi_buffer::Event::LanguageChanged(buffer_id) => { - linked_editing_ranges::refresh_linked_ranges(self, window, cx); + self.registered_buffers.remove(&buffer_id); jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx); cx.emit(EditorEvent::Reparsed(*buffer_id)); cx.notify(); @@ -21086,10 +21009,10 @@ impl Editor { if let Some(inlay_splice) = self.colors.as_mut().and_then(|colors| { colors.render_mode_updated(EditorSettings::get_global(cx).lsp_document_colors) }) { - if !inlay_splice.to_insert.is_empty() || !inlay_splice.to_remove.is_empty() { + if !inlay_splice.is_empty() { self.splice_inlays(&inlay_splice.to_remove, inlay_splice.to_insert, cx); } - self.refresh_colors(false, None, window, cx); + self.refresh_colors_for_visible_range(None, window, cx); } cx.notify(); @@ -21103,65 +21026,6 @@ impl Editor { self.searchable } - fn open_proposed_changes_editor( - &mut self, - _: &OpenProposedChangesEditor, - window: &mut Window, - cx: &mut Context, - ) { - let Some(workspace) = self.workspace() else { - cx.propagate(); - return; - }; - - let selections = self.selections.all::(cx); - let multi_buffer = self.buffer.read(cx); - let multi_buffer_snapshot = multi_buffer.snapshot(cx); - let mut new_selections_by_buffer = HashMap::default(); - for selection in selections { - for (buffer, range, _) in - multi_buffer_snapshot.range_to_buffer_ranges(selection.start..selection.end) - { - let mut range = range.to_point(buffer); - range.start.column = 0; - range.end.column = buffer.line_len(range.end.row); - new_selections_by_buffer - .entry(multi_buffer.buffer(buffer.remote_id()).unwrap()) - .or_insert(Vec::new()) - .push(range) - } - } - - let proposed_changes_buffers = new_selections_by_buffer - .into_iter() - .map(|(buffer, ranges)| ProposedChangeLocation { buffer, ranges }) - .collect::>(); - let proposed_changes_editor = cx.new(|cx| { - ProposedChangesEditor::new( - "Proposed changes", - proposed_changes_buffers, - self.project.clone(), - window, - cx, - ) - }); - - window.defer(cx, move |window, cx| { - workspace.update(cx, |workspace, cx| { - workspace.active_pane().update(cx, |pane, cx| { - pane.add_item( - Box::new(proposed_changes_editor), - true, - true, - None, - window, - cx, - ); - }); - }); - }); - } - pub fn open_excerpts_in_split( &mut self, _: &OpenExcerptsSplit, @@ -21238,7 +21102,7 @@ impl Editor { } } None => { - let selections = self.selections.all::(cx); + let selections = self.selections.all::(&self.display_snapshot(cx)); let multi_buffer = self.buffer.read(cx); for selection in selections { for (snapshot, range, _, anchor) in multi_buffer @@ -21376,7 +21240,9 @@ impl Editor { range: Range, cx: &mut App, ) -> Vec> { - let selections = self.selections.all::(cx); + let selections = self + .selections + .all::(&self.display_snapshot(cx)); let newest_selection = selections .iter() .max_by_key(|selection| selection.id) @@ -21479,7 +21345,10 @@ impl Editor { if selection.range.is_empty() { None } else { - Some(selection.range) + Some( + snapshot.offset_utf16_to_offset(OffsetUtf16(selection.range.start)) + ..snapshot.offset_utf16_to_offset(OffsetUtf16(selection.range.end)), + ) } }) .unwrap_or_else(|| 0..snapshot.len()); @@ -21539,14 +21408,13 @@ impl Editor { cx: &mut Context, ) { self.request_autoscroll(Autoscroll::newest(), cx); - let position = self.selections.newest_display(cx).start; + let position = self + .selections + .newest_display(&self.display_snapshot(cx)) + .start; mouse_context_menu::deploy_context_menu(self, None, position, window, cx); } - pub fn inlay_hint_cache(&self) -> &InlayHintCache { - &self.inlay_hint_cache - } - pub fn replay_insert_event( &mut self, text: &str, @@ -21559,7 +21427,9 @@ impl Editor { return; } if let Some(relative_utf16_range) = relative_utf16_range { - let selections = self.selections.all::(cx); + let selections = self + .selections + .all::(&self.display_snapshot(cx)); self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { let new_ranges = selections.into_iter().map(|range| { let start = OffsetUtf16( @@ -21583,21 +21453,6 @@ impl Editor { self.handle_input(text, window, cx); } - pub fn supports_inlay_hints(&self, cx: &mut App) -> bool { - let Some(provider) = self.semantics_provider.as_ref() else { - return false; - }; - - let mut supports = false; - self.buffer().update(cx, |this, cx| { - this.for_each_buffer(|buffer| { - supports |= provider.supports_inlay_hints(buffer, cx); - }); - }); - - supports - } - pub fn is_focused(&self, window: &Window) -> bool { self.focus_handle.is_focused(window) } @@ -21699,7 +21554,7 @@ impl Editor { } let transaction = self.transact(window, cx, |this, window, cx| { - let selections = this.selections.all::(cx); + let selections = this.selections.all::(&this.display_snapshot(cx)); let edits = selections .iter() .map(|selection| (selection.end..selection.end, pending.clone())); @@ -21718,7 +21573,7 @@ impl Editor { let snapshot = self.snapshot(window, cx); let ranges = self .selections - .all::(cx) + .all::(&snapshot.display_snapshot) .into_iter() .map(|selection| { snapshot.buffer_snapshot().anchor_after(selection.end) @@ -22001,13 +21856,44 @@ impl Editor { fn update_lsp_data( &mut self, - ignore_cache: bool, for_buffer: Option, window: &mut Window, cx: &mut Context<'_, Self>, ) { self.pull_diagnostics(for_buffer, window, cx); - self.refresh_colors(ignore_cache, for_buffer, window, cx); + self.refresh_colors_for_visible_range(for_buffer, window, cx); + } + + fn register_visible_buffers(&mut self, cx: &mut Context) { + if self.ignore_lsp_data() { + return; + } + for (_, (visible_buffer, _, _)) in self.visible_excerpts(cx) { + self.register_buffer(visible_buffer.read(cx).remote_id(), cx); + } + } + + fn register_buffer(&mut self, buffer_id: BufferId, cx: &mut Context) { + if !self.registered_buffers.contains_key(&buffer_id) + && let Some(project) = self.project.as_ref() + { + if let Some(buffer) = self.buffer.read(cx).buffer(buffer_id) { + project.update(cx, |project, cx| { + self.registered_buffers.insert( + buffer_id, + project.register_buffer_with_language_servers(&buffer, cx), + ); + }); + } else { + self.registered_buffers.remove(&buffer_id); + } + } + } + + fn ignore_lsp_data(&self) -> bool { + // `ActiveDiagnostic::All` is a special mode where editor's diagnostics are managed by the external view, + // skip any LSP updates for it. + self.active_diagnostics == ActiveDiagnostic::All || !self.mode().is_full() } } @@ -22709,20 +22595,23 @@ pub trait SemanticsProvider { cx: &mut App, ) -> Option>>>; - fn inlay_hints( + fn applicable_inlay_chunks( &self, - buffer_handle: Entity, - range: Range, + buffer: &Entity, + ranges: &[Range], cx: &mut App, - ) -> Option>>>; + ) -> Vec>; - fn resolve_inlay_hint( + fn invalidate_inlay_hints(&self, for_buffers: &HashSet, cx: &mut App); + + fn inlay_hints( &self, - hint: InlayHint, - buffer_handle: Entity, - server_id: LanguageServerId, + invalidate: InvalidationStrategy, + buffer: Entity, + ranges: Vec>, + known_chunks: Option<(clock::Global, HashSet>)>, cx: &mut App, - ) -> Option>>; + ) -> Option, Task>>>; fn supports_inlay_hints(&self, buffer: &Entity, cx: &mut App) -> bool; @@ -23034,11 +22923,7 @@ fn snippet_completions( }), lsp_defaults: None, }, - label: CodeLabel { - text: matching_prefix.clone(), - runs: Vec::new(), - filter_range: 0..matching_prefix.len(), - }, + label: CodeLabel::plain(matching_prefix.clone(), None), icon_path: None, documentation: Some(CompletionDocumentation::SingleLineAndMultiLinePlainText { single_line: snippet.name.clone().into(), @@ -23219,26 +23104,33 @@ impl SemanticsProvider for Entity { }) } - fn inlay_hints( + fn applicable_inlay_chunks( &self, - buffer_handle: Entity, - range: Range, + buffer: &Entity, + ranges: &[Range], cx: &mut App, - ) -> Option>>> { - Some(self.update(cx, |project, cx| { - project.inlay_hints(buffer_handle, range, cx) - })) + ) -> Vec> { + self.read(cx).lsp_store().update(cx, |lsp_store, cx| { + lsp_store.applicable_inlay_chunks(buffer, ranges, cx) + }) + } + + fn invalidate_inlay_hints(&self, for_buffers: &HashSet, cx: &mut App) { + self.read(cx).lsp_store().update(cx, |lsp_store, _| { + lsp_store.invalidate_inlay_hints(for_buffers) + }); } - fn resolve_inlay_hint( + fn inlay_hints( &self, - hint: InlayHint, - buffer_handle: Entity, - server_id: LanguageServerId, + invalidate: InvalidationStrategy, + buffer: Entity, + ranges: Vec>, + known_chunks: Option<(clock::Global, HashSet>)>, cx: &mut App, - ) -> Option>> { - Some(self.update(cx, |project, cx| { - project.resolve_inlay_hint(hint, buffer_handle, server_id, cx) + ) -> Option, Task>>> { + Some(self.read(cx).lsp_store().update(cx, |lsp_store, cx| { + lsp_store.inlay_hints(invalidate, buffer, ranges, known_chunks, cx) })) } @@ -23287,16 +23179,6 @@ impl SemanticsProvider for Entity { } } -fn inlay_hint_settings( - location: Anchor, - snapshot: &MultiBufferSnapshot, - cx: &mut Context, -) -> InlayHintSettings { - let file = snapshot.file_at(location); - let language = snapshot.language_at(location).map(|l| l.name()); - language_settings(language, file, cx).inlay_hints -} - fn consume_contiguous_rows( contiguous_row_selections: &mut Vec>, selection: &Selection, @@ -23351,7 +23233,7 @@ impl EditorSnapshot { self.buffer_snapshot() .selections_in_range(range, false) .filter_map(move |(replica_id, line_mode, cursor_shape, selection)| { - if replica_id == AGENT_REPLICA_ID { + if replica_id == ReplicaId::AGENT { Some(RemoteSelection { replica_id, selection, @@ -23819,7 +23701,9 @@ impl EntityInputHandler for Editor { return None; } - let selection = self.selections.newest::(cx); + let selection = self + .selections + .newest::(&self.display_snapshot(cx)); let range = selection.range(); Some(UTF16Selection { @@ -23862,7 +23746,7 @@ impl EntityInputHandler for Editor { let range_to_replace = new_selected_ranges.as_ref().and_then(|ranges_to_replace| { let newest_selection_id = this.selections.newest_anchor().id; this.selections - .all::(cx) + .all::(&this.display_snapshot(cx)) .iter() .zip(ranges_to_replace.iter()) .find_map(|(selection, range)| { @@ -23937,7 +23821,7 @@ impl EntityInputHandler for Editor { let range_to_replace = ranges_to_replace.as_ref().and_then(|ranges_to_replace| { let newest_selection_id = this.selections.newest_anchor().id; this.selections - .all::(cx) + .all::(&this.display_snapshot(cx)) .iter() .zip(ranges_to_replace.iter()) .find_map(|(selection, range)| { @@ -24698,12 +24582,11 @@ fn render_diff_hunk_controls( .alpha(if status.is_pending() { 0.66 } else { 1.0 }) .tooltip({ let focus_handle = editor.focus_handle(cx); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Stage Hunk", &::git::ToggleStaged, &focus_handle, - window, cx, ) } @@ -24725,12 +24608,11 @@ fn render_diff_hunk_controls( .alpha(if status.is_pending() { 0.66 } else { 1.0 }) .tooltip({ let focus_handle = editor.focus_handle(cx); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Unstage Hunk", &::git::ToggleStaged, &focus_handle, - window, cx, ) } @@ -24752,14 +24634,8 @@ fn render_diff_hunk_controls( Button::new(("restore", row as u64), "Restore") .tooltip({ let focus_handle = editor.focus_handle(cx); - move |window, cx| { - Tooltip::for_action_in( - "Restore Hunk", - &::git::Restore, - &focus_handle, - window, - cx, - ) + move |_window, cx| { + Tooltip::for_action_in("Restore Hunk", &::git::Restore, &focus_handle, cx) } }) .on_click({ @@ -24784,14 +24660,8 @@ fn render_diff_hunk_controls( // .disabled(!has_multiple_hunks) .tooltip({ let focus_handle = editor.focus_handle(cx); - move |window, cx| { - Tooltip::for_action_in( - "Next Hunk", - &GoToHunk, - &focus_handle, - window, - cx, - ) + move |_window, cx| { + Tooltip::for_action_in("Next Hunk", &GoToHunk, &focus_handle, cx) } }) .on_click({ @@ -24820,12 +24690,11 @@ fn render_diff_hunk_controls( // .disabled(!has_multiple_hunks) .tooltip({ let focus_handle = editor.focus_handle(cx); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Previous Hunk", &GoToPreviousHunk, &focus_handle, - window, cx, ) } diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index 066d827bb90b96481823a92ea747d8123b95b47d..dc67ab3ed6c8cfdbe88809e32d615789c01eef60 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -1,16 +1,14 @@ use core::num; -use std::num::NonZeroU32; use gpui::App; use language::CursorShape; use project::project_settings::DiagnosticSeverity; +use settings::Settings; pub use settings::{ - CurrentLineHighlight, DisplayIn, DocumentColorsRenderMode, DoubleClickInMultibuffer, + CurrentLineHighlight, DelayMs, DisplayIn, DocumentColorsRenderMode, DoubleClickInMultibuffer, GoToDefinitionFallback, HideMouseMode, MinimapThumb, MinimapThumbBorder, MultiCursorModifier, ScrollBeyondLastLine, ScrollbarDiagnostics, SeedQuerySetting, ShowMinimap, SnippetSortOrder, - VsCodeSettings, }; -use settings::{Settings, SettingsContent}; use ui::scrollbars::{ScrollbarVisibility, ShowScrollbar}; /// Imports from the VSCode settings at @@ -22,9 +20,9 @@ pub struct EditorSettings { pub current_line_highlight: CurrentLineHighlight, pub selection_highlight: bool, pub rounded_selection: bool, - pub lsp_highlight_debounce: u64, + pub lsp_highlight_debounce: DelayMs, pub hover_popover_enabled: bool, - pub hover_popover_delay: u64, + pub hover_popover_delay: DelayMs, pub toolbar: Toolbar, pub scrollbar: Scrollbar, pub minimap: Minimap, @@ -149,7 +147,7 @@ pub struct DragAndDropSelection { /// The delay in milliseconds that must elapse before drag and drop is allowed. Otherwise, a new text selection is created. /// /// Default: 300 - pub delay: u64, + pub delay: DelayMs, } /// Default options for buffer and project search items. @@ -270,208 +268,4 @@ impl Settings for EditorSettings { minimum_contrast_for_highlights: editor.minimum_contrast_for_highlights.unwrap().0, } } - - fn import_from_vscode(vscode: &VsCodeSettings, current: &mut SettingsContent) { - vscode.enum_setting( - "editor.cursorBlinking", - &mut current.editor.cursor_blink, - |s| match s { - "blink" | "phase" | "expand" | "smooth" => Some(true), - "solid" => Some(false), - _ => None, - }, - ); - vscode.enum_setting( - "editor.cursorStyle", - &mut current.editor.cursor_shape, - |s| match s { - "block" => Some(settings::CursorShape::Block), - "block-outline" => Some(settings::CursorShape::Hollow), - "line" | "line-thin" => Some(settings::CursorShape::Bar), - "underline" | "underline-thin" => Some(settings::CursorShape::Underline), - _ => None, - }, - ); - - vscode.enum_setting( - "editor.renderLineHighlight", - &mut current.editor.current_line_highlight, - |s| match s { - "gutter" => Some(CurrentLineHighlight::Gutter), - "line" => Some(CurrentLineHighlight::Line), - "all" => Some(CurrentLineHighlight::All), - _ => None, - }, - ); - - vscode.bool_setting( - "editor.selectionHighlight", - &mut current.editor.selection_highlight, - ); - vscode.bool_setting( - "editor.roundedSelection", - &mut current.editor.rounded_selection, - ); - vscode.bool_setting( - "editor.hover.enabled", - &mut current.editor.hover_popover_enabled, - ); - vscode.u64_setting( - "editor.hover.delay", - &mut current.editor.hover_popover_delay, - ); - - let mut gutter = settings::GutterContent::default(); - vscode.enum_setting( - "editor.showFoldingControls", - &mut gutter.folds, - |s| match s { - "always" | "mouseover" => Some(true), - "never" => Some(false), - _ => None, - }, - ); - vscode.enum_setting( - "editor.lineNumbers", - &mut gutter.line_numbers, - |s| match s { - "on" | "relative" => Some(true), - "off" => Some(false), - _ => None, - }, - ); - if let Some(old_gutter) = current.editor.gutter.as_mut() { - if gutter.folds.is_some() { - old_gutter.folds = gutter.folds - } - if gutter.line_numbers.is_some() { - old_gutter.line_numbers = gutter.line_numbers - } - } else if gutter != settings::GutterContent::default() { - current.editor.gutter = Some(gutter) - } - if let Some(b) = vscode.read_bool("editor.scrollBeyondLastLine") { - current.editor.scroll_beyond_last_line = Some(if b { - ScrollBeyondLastLine::OnePage - } else { - ScrollBeyondLastLine::Off - }) - } - - let mut scrollbar_axes = settings::ScrollbarAxesContent::default(); - vscode.enum_setting( - "editor.scrollbar.horizontal", - &mut scrollbar_axes.horizontal, - |s| match s { - "auto" | "visible" => Some(true), - "hidden" => Some(false), - _ => None, - }, - ); - vscode.enum_setting( - "editor.scrollbar.vertical", - &mut scrollbar_axes.horizontal, - |s| match s { - "auto" | "visible" => Some(true), - "hidden" => Some(false), - _ => None, - }, - ); - - if scrollbar_axes != settings::ScrollbarAxesContent::default() { - let scrollbar_settings = current.editor.scrollbar.get_or_insert_default(); - let axes_settings = scrollbar_settings.axes.get_or_insert_default(); - - if let Some(vertical) = scrollbar_axes.vertical { - axes_settings.vertical = Some(vertical); - } - if let Some(horizontal) = scrollbar_axes.horizontal { - axes_settings.horizontal = Some(horizontal); - } - } - - // TODO: check if this does the int->float conversion? - vscode.f32_setting( - "editor.cursorSurroundingLines", - &mut current.editor.vertical_scroll_margin, - ); - vscode.f32_setting( - "editor.mouseWheelScrollSensitivity", - &mut current.editor.scroll_sensitivity, - ); - vscode.f32_setting( - "editor.fastScrollSensitivity", - &mut current.editor.fast_scroll_sensitivity, - ); - if Some("relative") == vscode.read_string("editor.lineNumbers") { - current.editor.relative_line_numbers = Some(true); - } - - vscode.enum_setting( - "editor.find.seedSearchStringFromSelection", - &mut current.editor.seed_search_query_from_cursor, - |s| match s { - "always" => Some(SeedQuerySetting::Always), - "selection" => Some(SeedQuerySetting::Selection), - "never" => Some(SeedQuerySetting::Never), - _ => None, - }, - ); - vscode.bool_setting("search.smartCase", &mut current.editor.use_smartcase_search); - vscode.enum_setting( - "editor.multiCursorModifier", - &mut current.editor.multi_cursor_modifier, - |s| match s { - "ctrlCmd" => Some(MultiCursorModifier::CmdOrCtrl), - "alt" => Some(MultiCursorModifier::Alt), - _ => None, - }, - ); - - vscode.bool_setting( - "editor.parameterHints.enabled", - &mut current.editor.auto_signature_help, - ); - vscode.bool_setting( - "editor.parameterHints.enabled", - &mut current.editor.show_signature_help_after_edits, - ); - - if let Some(use_ignored) = vscode.read_bool("search.useIgnoreFiles") { - let search = current.editor.search.get_or_insert_default(); - search.include_ignored = Some(use_ignored); - } - - let mut minimap = settings::MinimapContent::default(); - let minimap_enabled = vscode.read_bool("editor.minimap.enabled").unwrap_or(true); - let autohide = vscode.read_bool("editor.minimap.autohide"); - let mut max_width_columns: Option = None; - vscode.u32_setting("editor.minimap.maxColumn", &mut max_width_columns); - if minimap_enabled { - if let Some(false) = autohide { - minimap.show = Some(ShowMinimap::Always); - } else { - minimap.show = Some(ShowMinimap::Auto); - } - } else { - minimap.show = Some(ShowMinimap::Never); - } - if let Some(max_width_columns) = max_width_columns { - minimap.max_width_columns = NonZeroU32::new(max_width_columns); - } - - vscode.enum_setting( - "editor.minimap.showSlider", - &mut minimap.thumb, - |s| match s { - "always" => Some(MinimapThumb::Always), - "mouseover" => Some(MinimapThumb::Hover), - _ => None, - }, - ); - - if minimap != settings::MinimapContent::default() { - current.editor.minimap = Some(minimap) - } - } } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 8f7dac889d13b6ff1d80557811da555b1b7216a3..a319ad654d016204dbad748d0aa169dee545a44f 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -27,11 +27,11 @@ use language::{ LanguageConfigOverride, LanguageMatcher, LanguageName, Override, Point, language_settings::{ CompletionSettingsContent, FormatterList, LanguageSettingsContent, LspInsertMode, - SelectedFormatter, }, tree_sitter_python, }; use language_settings::Formatter; +use languages::rust_lang; use lsp::CompletionParams; use multi_buffer::{IndentGuide, PathKey}; use parking_lot::Mutex; @@ -51,7 +51,7 @@ use std::{ iter, sync::atomic::{self, AtomicUsize}, }; -use test::{build_editor_with_project, editor_lsp_test_context::rust_lang}; +use test::build_editor_with_project; use text::ToPoint as _; use unindent::Unindent; use util::{ @@ -63,7 +63,7 @@ use util::{ use workspace::{ CloseActiveItem, CloseAllItems, CloseOtherItems, MoveItemToPaneInDirection, NavigationEntry, OpenOptions, ViewId, - invalid_buffer_view::InvalidBufferView, + invalid_item_view::InvalidItemView, item::{FollowEvent, FollowableItem, Item, ItemHandle, SaveOptions}, register_project_item, }; @@ -220,7 +220,10 @@ fn test_undo_redo_with_selection_restoration(cx: &mut TestAppContext) { editor.insert("cd", window, cx); editor.end_transaction_at(now, cx); assert_eq!(editor.text(cx), "12cd56"); - assert_eq!(editor.selections.ranges(cx), vec![4..4]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + vec![4..4] + ); editor.start_transaction_at(now, window, cx); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { @@ -229,7 +232,10 @@ fn test_undo_redo_with_selection_restoration(cx: &mut TestAppContext) { editor.insert("e", window, cx); editor.end_transaction_at(now, cx); assert_eq!(editor.text(cx), "12cde6"); - assert_eq!(editor.selections.ranges(cx), vec![5..5]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + vec![5..5] + ); now += group_interval + Duration::from_millis(1); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { @@ -245,30 +251,45 @@ fn test_undo_redo_with_selection_restoration(cx: &mut TestAppContext) { }); assert_eq!(editor.text(cx), "ab2cde6"); - assert_eq!(editor.selections.ranges(cx), vec![3..3]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + vec![3..3] + ); // Last transaction happened past the group interval in a different editor. // Undo it individually and don't restore selections. editor.undo(&Undo, window, cx); assert_eq!(editor.text(cx), "12cde6"); - assert_eq!(editor.selections.ranges(cx), vec![2..2]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + vec![2..2] + ); // First two transactions happened within the group interval in this editor. // Undo them together and restore selections. editor.undo(&Undo, window, cx); editor.undo(&Undo, window, cx); // Undo stack is empty here, so this is a no-op. assert_eq!(editor.text(cx), "123456"); - assert_eq!(editor.selections.ranges(cx), vec![0..0]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + vec![0..0] + ); // Redo the first two transactions together. editor.redo(&Redo, window, cx); assert_eq!(editor.text(cx), "12cde6"); - assert_eq!(editor.selections.ranges(cx), vec![5..5]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + vec![5..5] + ); // Redo the last transaction on its own. editor.redo(&Redo, window, cx); assert_eq!(editor.text(cx), "ab2cde6"); - assert_eq!(editor.selections.ranges(cx), vec![6..6]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + vec![6..6] + ); // Test empty transactions. editor.start_transaction_at(now, window, cx); @@ -619,6 +640,93 @@ fn test_movement_actions_with_pending_selection(cx: &mut TestAppContext) { }); } +#[gpui::test] +fn test_extending_selection(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let editor = cx.add_window(|window, cx| { + let buffer = MultiBuffer::build_simple("aaa bbb ccc ddd eee", cx); + build_editor(buffer, window, cx) + }); + + _ = editor.update(cx, |editor, window, cx| { + editor.begin_selection(DisplayPoint::new(DisplayRow(0), 5), false, 1, window, cx); + editor.end_selection(window, cx); + assert_eq!( + editor.selections.display_ranges(cx), + [DisplayPoint::new(DisplayRow(0), 5)..DisplayPoint::new(DisplayRow(0), 5)] + ); + + editor.extend_selection(DisplayPoint::new(DisplayRow(0), 10), 1, window, cx); + editor.end_selection(window, cx); + assert_eq!( + editor.selections.display_ranges(cx), + [DisplayPoint::new(DisplayRow(0), 5)..DisplayPoint::new(DisplayRow(0), 10)] + ); + + editor.extend_selection(DisplayPoint::new(DisplayRow(0), 10), 1, window, cx); + editor.end_selection(window, cx); + editor.extend_selection(DisplayPoint::new(DisplayRow(0), 10), 2, window, cx); + assert_eq!( + editor.selections.display_ranges(cx), + [DisplayPoint::new(DisplayRow(0), 5)..DisplayPoint::new(DisplayRow(0), 11)] + ); + + editor.update_selection( + DisplayPoint::new(DisplayRow(0), 1), + 0, + gpui::Point::::default(), + window, + cx, + ); + editor.end_selection(window, cx); + assert_eq!( + editor.selections.display_ranges(cx), + [DisplayPoint::new(DisplayRow(0), 5)..DisplayPoint::new(DisplayRow(0), 0)] + ); + + editor.begin_selection(DisplayPoint::new(DisplayRow(0), 5), true, 1, window, cx); + editor.end_selection(window, cx); + editor.begin_selection(DisplayPoint::new(DisplayRow(0), 5), true, 2, window, cx); + editor.end_selection(window, cx); + assert_eq!( + editor.selections.display_ranges(cx), + [DisplayPoint::new(DisplayRow(0), 4)..DisplayPoint::new(DisplayRow(0), 7)] + ); + + editor.extend_selection(DisplayPoint::new(DisplayRow(0), 10), 1, window, cx); + assert_eq!( + editor.selections.display_ranges(cx), + [DisplayPoint::new(DisplayRow(0), 4)..DisplayPoint::new(DisplayRow(0), 11)] + ); + + editor.update_selection( + DisplayPoint::new(DisplayRow(0), 6), + 0, + gpui::Point::::default(), + window, + cx, + ); + assert_eq!( + editor.selections.display_ranges(cx), + [DisplayPoint::new(DisplayRow(0), 4)..DisplayPoint::new(DisplayRow(0), 7)] + ); + + editor.update_selection( + DisplayPoint::new(DisplayRow(0), 1), + 0, + gpui::Point::::default(), + window, + cx, + ); + editor.end_selection(window, cx); + assert_eq!( + editor.selections.display_ranges(cx), + [DisplayPoint::new(DisplayRow(0), 7)..DisplayPoint::new(DisplayRow(0), 0)] + ); + }); +} + #[gpui::test] fn test_clone(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -684,10 +792,14 @@ fn test_clone(cx: &mut TestAppContext) { ); assert_set_eq!( cloned_editor - .update(cx, |editor, _, cx| editor.selections.ranges::(cx)) + .update(cx, |editor, _, cx| editor + .selections + .ranges::(&editor.display_snapshot(cx))) .unwrap(), editor - .update(cx, |editor, _, cx| editor.selections.ranges(cx)) + .update(cx, |editor, _, cx| editor + .selections + .ranges(&editor.display_snapshot(cx))) .unwrap() ); assert_set_eq!( @@ -3075,7 +3187,7 @@ fn test_newline_with_old_selections(cx: &mut TestAppContext) { ); }); assert_eq!( - editor.selections.ranges(cx), + editor.selections.ranges(&editor.display_snapshot(cx)), &[ Point::new(1, 2)..Point::new(1, 2), Point::new(2, 2)..Point::new(2, 2), @@ -3097,7 +3209,7 @@ fn test_newline_with_old_selections(cx: &mut TestAppContext) { // The selections are moved after the inserted newlines assert_eq!( - editor.selections.ranges(cx), + editor.selections.ranges(&editor.display_snapshot(cx)), &[ Point::new(2, 0)..Point::new(2, 0), Point::new(4, 0)..Point::new(4, 0), @@ -3587,13 +3699,19 @@ fn test_insert_with_old_selections(cx: &mut TestAppContext) { buffer.edit([(2..5, ""), (10..13, ""), (18..21, "")], None, cx); assert_eq!(buffer.read(cx).text(), "a(), b(), c()".unindent()); }); - assert_eq!(editor.selections.ranges(cx), &[2..2, 7..7, 12..12],); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + &[2..2, 7..7, 12..12], + ); editor.insert("Z", window, cx); assert_eq!(editor.text(cx), "a(Z), b(Z), c(Z)"); // The selections are moved after the inserted characters - assert_eq!(editor.selections.ranges(cx), &[3..3, 9..9, 15..15],); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + &[3..3, 9..9, 15..15], + ); }); } @@ -4300,8 +4418,8 @@ fn test_delete_line(cx: &mut TestAppContext) { assert_eq!( editor.selections.display_ranges(cx), vec![ + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0), DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 1), - DisplayPoint::new(DisplayRow(0), 3)..DisplayPoint::new(DisplayRow(0), 3), ] ); }); @@ -4323,6 +4441,24 @@ fn test_delete_line(cx: &mut TestAppContext) { vec![DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 1)] ); }); + + let editor = cx.add_window(|window, cx| { + let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n\njkl\nmno", cx); + build_editor(buffer, window, cx) + }); + _ = editor.update(cx, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(2), 1) + ]) + }); + editor.delete_line(&DeleteLine, window, cx); + assert_eq!(editor.display_text(cx), "\njkl\nmno"); + assert_eq!( + editor.selections.display_ranges(cx), + vec![DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)] + ); + }); } #[gpui::test] @@ -4335,7 +4471,9 @@ fn test_join_lines_with_single_selection(cx: &mut TestAppContext) { let buffer = buffer.read(cx).as_singleton().unwrap(); assert_eq!( - editor.selections.ranges::(cx), + editor + .selections + .ranges::(&editor.display_snapshot(cx)), &[Point::new(0, 0)..Point::new(0, 0)] ); @@ -4343,7 +4481,9 @@ fn test_join_lines_with_single_selection(cx: &mut TestAppContext) { editor.join_lines(&JoinLines, window, cx); assert_eq!(buffer.read(cx).text(), "aaa bbb\nccc\nddd\n\n"); assert_eq!( - editor.selections.ranges::(cx), + editor + .selections + .ranges::(&editor.display_snapshot(cx)), &[Point::new(0, 3)..Point::new(0, 3)] ); @@ -4354,7 +4494,9 @@ fn test_join_lines_with_single_selection(cx: &mut TestAppContext) { editor.join_lines(&JoinLines, window, cx); assert_eq!(buffer.read(cx).text(), "aaa bbb ccc ddd\n\n"); assert_eq!( - editor.selections.ranges::(cx), + editor + .selections + .ranges::(&editor.display_snapshot(cx)), &[Point::new(0, 11)..Point::new(0, 11)] ); @@ -4362,7 +4504,9 @@ fn test_join_lines_with_single_selection(cx: &mut TestAppContext) { editor.undo(&Undo, window, cx); assert_eq!(buffer.read(cx).text(), "aaa bbb\nccc\nddd\n\n"); assert_eq!( - editor.selections.ranges::(cx), + editor + .selections + .ranges::(&editor.display_snapshot(cx)), &[Point::new(0, 5)..Point::new(2, 2)] ); @@ -4373,7 +4517,9 @@ fn test_join_lines_with_single_selection(cx: &mut TestAppContext) { editor.join_lines(&JoinLines, window, cx); assert_eq!(buffer.read(cx).text(), "aaa bbb\nccc\nddd\n"); assert_eq!( - editor.selections.ranges::(cx), + editor + .selections + .ranges::(&editor.display_snapshot(cx)), [Point::new(2, 3)..Point::new(2, 3)] ); @@ -4381,7 +4527,9 @@ fn test_join_lines_with_single_selection(cx: &mut TestAppContext) { editor.join_lines(&JoinLines, window, cx); assert_eq!(buffer.read(cx).text(), "aaa bbb\nccc\nddd"); assert_eq!( - editor.selections.ranges::(cx), + editor + .selections + .ranges::(&editor.display_snapshot(cx)), [Point::new(2, 3)..Point::new(2, 3)] ); @@ -4389,7 +4537,9 @@ fn test_join_lines_with_single_selection(cx: &mut TestAppContext) { editor.join_lines(&JoinLines, window, cx); assert_eq!(buffer.read(cx).text(), "aaa bbb\nccc\nddd"); assert_eq!( - editor.selections.ranges::(cx), + editor + .selections + .ranges::(&editor.display_snapshot(cx)), [Point::new(2, 3)..Point::new(2, 3)] ); @@ -4446,7 +4596,9 @@ fn test_join_lines_with_multi_selection(cx: &mut TestAppContext) { assert_eq!(buffer.read(cx).text(), "aaa bbb ccc\nddd\n"); assert_eq!( - editor.selections.ranges::(cx), + editor + .selections + .ranges::(&editor.display_snapshot(cx)), [ Point::new(0, 7)..Point::new(0, 7), Point::new(1, 3)..Point::new(1, 3) @@ -5494,8 +5646,8 @@ fn test_duplicate_line(cx: &mut TestAppContext) { ); }); - // With `move_upwards` the selections stay in place, except for - // the lines inserted above them + // With `duplicate_line_up` the selections move to the duplicated lines, + // which are inserted above the original lines let editor = cx.add_window(|window, cx| { let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx); build_editor(buffer, window, cx) @@ -5517,7 +5669,7 @@ fn test_duplicate_line(cx: &mut TestAppContext) { DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 1), DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 2), DisplayPoint::new(DisplayRow(2), 0)..DisplayPoint::new(DisplayRow(2), 0), - DisplayPoint::new(DisplayRow(6), 0)..DisplayPoint::new(DisplayRow(6), 0), + DisplayPoint::new(DisplayRow(5), 0)..DisplayPoint::new(DisplayRow(5), 0), ] ); }); @@ -5804,15 +5956,24 @@ fn test_transpose(cx: &mut TestAppContext) { }); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "bac"); - assert_eq!(editor.selections.ranges(cx), [2..2]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + [2..2] + ); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "bca"); - assert_eq!(editor.selections.ranges(cx), [3..3]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + [3..3] + ); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "bac"); - assert_eq!(editor.selections.ranges(cx), [3..3]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + [3..3] + ); editor }); @@ -5825,22 +5986,34 @@ fn test_transpose(cx: &mut TestAppContext) { }); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "acb\nde"); - assert_eq!(editor.selections.ranges(cx), [3..3]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + [3..3] + ); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.select_ranges([4..4]) }); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "acbd\ne"); - assert_eq!(editor.selections.ranges(cx), [5..5]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + [5..5] + ); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "acbde\n"); - assert_eq!(editor.selections.ranges(cx), [6..6]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + [6..6] + ); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "acbd\ne"); - assert_eq!(editor.selections.ranges(cx), [6..6]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + [6..6] + ); editor }); @@ -5853,23 +6026,38 @@ fn test_transpose(cx: &mut TestAppContext) { }); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "bacd\ne"); - assert_eq!(editor.selections.ranges(cx), [2..2, 3..3, 5..5]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + [2..2, 3..3, 5..5] + ); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "bcade\n"); - assert_eq!(editor.selections.ranges(cx), [3..3, 4..4, 6..6]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + [3..3, 4..4, 6..6] + ); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "bcda\ne"); - assert_eq!(editor.selections.ranges(cx), [4..4, 6..6]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + [4..4, 6..6] + ); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "bcade\n"); - assert_eq!(editor.selections.ranges(cx), [4..4, 6..6]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + [4..4, 6..6] + ); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "bcaed\n"); - assert_eq!(editor.selections.ranges(cx), [5..5, 6..6]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + [5..5, 6..6] + ); editor }); @@ -5882,15 +6070,24 @@ fn test_transpose(cx: &mut TestAppContext) { }); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "🏀🍐✋"); - assert_eq!(editor.selections.ranges(cx), [8..8]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + [8..8] + ); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "🏀✋🍐"); - assert_eq!(editor.selections.ranges(cx), [11..11]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + [11..11] + ); editor.transpose(&Default::default(), window, cx); assert_eq!(editor.text(cx), "🏀🍐✋"); - assert_eq!(editor.selections.ranges(cx), [11..11]); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + [11..11] + ); editor }); @@ -9436,7 +9633,7 @@ async fn test_autoindent(cx: &mut TestAppContext) { editor.newline(&Newline, window, cx); assert_eq!(editor.text(cx), "fn a(\n \n) {\n \n}\n"); assert_eq!( - editor.selections.ranges(cx), + editor.selections.ranges(&editor.display_snapshot(cx)), &[ Point::new(1, 4)..Point::new(1, 4), Point::new(3, 4)..Point::new(3, 4), @@ -9512,7 +9709,7 @@ async fn test_autoindent_disabled(cx: &mut TestAppContext) { ) ); assert_eq!( - editor.selections.ranges(cx), + editor.selections.ranges(&editor.display_snapshot(cx)), &[ Point::new(1, 0)..Point::new(1, 0), Point::new(3, 0)..Point::new(3, 0), @@ -10151,7 +10348,9 @@ async fn test_autoclose_with_embedded_language(cx: &mut TestAppContext) { // Precondition: different languages are active at different locations. cx.update_editor(|editor, window, cx| { let snapshot = editor.snapshot(window, cx); - let cursors = editor.selections.ranges::(cx); + let cursors = editor + .selections + .ranges::(&editor.display_snapshot(cx)); let languages = cursors .iter() .map(|c| snapshot.language_at(c.start).unwrap().name()) @@ -10596,7 +10795,9 @@ async fn test_delete_autoclose_pair(cx: &mut TestAppContext) { .unindent() ); assert_eq!( - editor.selections.ranges::(cx), + editor + .selections + .ranges::(&editor.display_snapshot(cx)), [ Point::new(0, 4)..Point::new(0, 4), Point::new(1, 4)..Point::new(1, 4), @@ -10616,7 +10817,9 @@ async fn test_delete_autoclose_pair(cx: &mut TestAppContext) { .unindent() ); assert_eq!( - editor.selections.ranges::(cx), + editor + .selections + .ranges::(&editor.display_snapshot(cx)), [ Point::new(0, 2)..Point::new(0, 2), Point::new(1, 2)..Point::new(1, 2), @@ -10635,7 +10838,9 @@ async fn test_delete_autoclose_pair(cx: &mut TestAppContext) { .unindent() ); assert_eq!( - editor.selections.ranges::(cx), + editor + .selections + .ranges::(&editor.display_snapshot(cx)), [ Point::new(0, 1)..Point::new(0, 1), Point::new(1, 1)..Point::new(1, 1), @@ -10841,7 +11046,12 @@ async fn test_snippet_placeholder_choices(cx: &mut TestAppContext) { fn assert(editor: &mut Editor, cx: &mut Context, marked_text: &str) { let (expected_text, selection_ranges) = marked_text_ranges(marked_text, false); assert_eq!(editor.text(cx), expected_text); - assert_eq!(editor.selections.ranges::(cx), selection_ranges); + assert_eq!( + editor + .selections + .ranges::(&editor.display_snapshot(cx)), + selection_ranges + ); } assert( @@ -10872,7 +11082,7 @@ async fn test_snippets(cx: &mut TestAppContext) { let snippet = Snippet::parse("f(${1:one}, ${2:two}, ${1:three})$0").unwrap(); let insertion_ranges = editor .selections - .all(cx) + .all(&editor.display_snapshot(cx)) .iter() .map(|s| s.range()) .collect::>(); @@ -10952,7 +11162,7 @@ async fn test_snippet_indentation(cx: &mut TestAppContext) { .unwrap(); let insertion_ranges = editor .selections - .all(cx) + .all(&editor.display_snapshot(cx)) .iter() .map(|s| s.range()) .collect::>(); @@ -11803,8 +12013,8 @@ async fn test_range_format_respects_language_tab_size_override(cx: &mut TestAppC #[gpui::test] async fn test_document_format_manual_trigger(cx: &mut TestAppContext) { init_test(cx, |settings| { - settings.defaults.formatter = Some(SelectedFormatter::List(FormatterList::Single( - Formatter::LanguageServer { name: None }, + settings.defaults.formatter = Some(FormatterList::Single(Formatter::LanguageServer( + settings::LanguageServerFormatterSpecifier::Current, ))) }); @@ -11929,11 +12139,11 @@ async fn test_document_format_manual_trigger(cx: &mut TestAppContext) { async fn test_multiple_formatters(cx: &mut TestAppContext) { init_test(cx, |settings| { settings.defaults.remove_trailing_whitespace_on_save = Some(true); - settings.defaults.formatter = Some(SelectedFormatter::List(FormatterList::Vec(vec![ - Formatter::LanguageServer { name: None }, + settings.defaults.formatter = Some(FormatterList::Vec(vec![ + Formatter::LanguageServer(settings::LanguageServerFormatterSpecifier::Current), Formatter::CodeAction("code-action-1".into()), Formatter::CodeAction("code-action-2".into()), - ]))) + ])) }); let fs = FakeFs::new(cx.executor()); @@ -12188,9 +12398,9 @@ async fn test_multiple_formatters(cx: &mut TestAppContext) { #[gpui::test] async fn test_organize_imports_manual_trigger(cx: &mut TestAppContext) { init_test(cx, |settings| { - settings.defaults.formatter = Some(SelectedFormatter::List(FormatterList::Vec(vec![ - Formatter::LanguageServer { name: None }, - ]))) + settings.defaults.formatter = Some(FormatterList::Vec(vec![Formatter::LanguageServer( + settings::LanguageServerFormatterSpecifier::Current, + )])) }); let fs = FakeFs::new(cx.executor()); @@ -12393,7 +12603,7 @@ async fn test_concurrent_format_requests(cx: &mut TestAppContext) { #[gpui::test] async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) { init_test(cx, |settings| { - settings.defaults.formatter = Some(SelectedFormatter::Auto) + settings.defaults.formatter = Some(FormatterList::default()) }); let mut cx = EditorLspTestContext::new_rust( @@ -12405,17 +12615,6 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) { ) .await; - // Set up a buffer white some trailing whitespace and no trailing newline. - cx.set_state( - &[ - "one ", // - "twoˇ", // - "three ", // - "four", // - ] - .join("\n"), - ); - // Record which buffer changes have been sent to the language server let buffer_changes = Arc::new(Mutex::new(Vec::new())); cx.lsp @@ -12431,6 +12630,11 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) { } }); + #[cfg(target_os = "windows")] + let line_ending = "\r\n"; + #[cfg(not(target_os = "windows"))] + let line_ending = "\n"; + // Handle formatting requests to the language server. cx.lsp .set_request_handler::({ @@ -12454,7 +12658,7 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) { ), ( lsp::Range::new(lsp::Position::new(3, 4), lsp::Position::new(3, 4)), - "\n".into() + line_ending.into() ), ] ); @@ -12465,20 +12669,32 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) { lsp::Position::new(1, 0), lsp::Position::new(1, 0), ), - new_text: "\n".into(), + new_text: line_ending.into(), }, lsp::TextEdit { range: lsp::Range::new( lsp::Position::new(2, 0), lsp::Position::new(2, 0), ), - new_text: "\n".into(), + new_text: line_ending.into(), }, ])) } } }); + // Set up a buffer white some trailing whitespace and no trailing newline. + cx.set_state( + &[ + "one ", // + "twoˇ", // + "three ", // + "four", // + ] + .join("\n"), + ); + cx.run_until_parked(); + // Submit a format request. let format = cx .update_editor(|editor, window, cx| editor.format(&Format, window, cx)) @@ -14774,12 +14990,7 @@ async fn test_multiline_completion(cx: &mut TestAppContext) { } else { item.label.clone() }; - let len = text.len(); - Some(language::CodeLabel { - text, - runs: Vec::new(), - filter_range: 0..len, - }) + Some(language::CodeLabel::plain(text, None)) })), ..FakeLspAdapter::default() }, @@ -15845,7 +16056,7 @@ fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) { editor.handle_input("X", window, cx); assert_eq!(editor.text(cx), "Xaaaa\nXbbbb"); assert_eq!( - editor.selections.ranges(cx), + editor.selections.ranges(&editor.display_snapshot(cx)), [ Point::new(0, 1)..Point::new(0, 1), Point::new(1, 1)..Point::new(1, 1), @@ -15859,7 +16070,7 @@ fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) { editor.backspace(&Default::default(), window, cx); assert_eq!(editor.text(cx), "Xa\nbbb"); assert_eq!( - editor.selections.ranges(cx), + editor.selections.ranges(&editor.display_snapshot(cx)), [Point::new(1, 0)..Point::new(1, 0)] ); @@ -15869,7 +16080,7 @@ fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) { editor.backspace(&Default::default(), window, cx); assert_eq!(editor.text(cx), "X\nbb"); assert_eq!( - editor.selections.ranges(cx), + editor.selections.ranges(&editor.display_snapshot(cx)), [Point::new(0, 1)..Point::new(0, 1)] ); }); @@ -15927,7 +16138,10 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) { false, ); assert_eq!(editor.text(cx), expected_text); - assert_eq!(editor.selections.ranges(cx), expected_selections); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + expected_selections + ); editor.newline(&Newline, window, cx); let (expected_text, expected_selections) = marked_text_ranges( @@ -15944,7 +16158,10 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) { false, ); assert_eq!(editor.text(cx), expected_text); - assert_eq!(editor.selections.ranges(cx), expected_selections); + assert_eq!( + editor.selections.ranges(&editor.display_snapshot(cx)), + expected_selections + ); }); } @@ -15985,7 +16202,7 @@ fn test_refresh_selections(cx: &mut TestAppContext) { cx, ); assert_eq!( - editor.selections.ranges(cx), + editor.selections.ranges(&editor.display_snapshot(cx)), [ Point::new(1, 3)..Point::new(1, 3), Point::new(2, 1)..Point::new(2, 1), @@ -15998,7 +16215,7 @@ fn test_refresh_selections(cx: &mut TestAppContext) { _ = editor.update(cx, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| s.refresh()); assert_eq!( - editor.selections.ranges(cx), + editor.selections.ranges(&editor.display_snapshot(cx)), [ Point::new(1, 3)..Point::new(1, 3), Point::new(2, 1)..Point::new(2, 1), @@ -16012,7 +16229,7 @@ fn test_refresh_selections(cx: &mut TestAppContext) { _ = editor.update(cx, |editor, window, cx| { // Removing an excerpt causes the first selection to become degenerate. assert_eq!( - editor.selections.ranges(cx), + editor.selections.ranges(&editor.display_snapshot(cx)), [ Point::new(0, 0)..Point::new(0, 0), Point::new(0, 1)..Point::new(0, 1) @@ -16023,7 +16240,7 @@ fn test_refresh_selections(cx: &mut TestAppContext) { // location. editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| s.refresh()); assert_eq!( - editor.selections.ranges(cx), + editor.selections.ranges(&editor.display_snapshot(cx)), [ Point::new(0, 1)..Point::new(0, 1), Point::new(0, 3)..Point::new(0, 3) @@ -16067,7 +16284,7 @@ fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) { cx, ); assert_eq!( - editor.selections.ranges(cx), + editor.selections.ranges(&editor.display_snapshot(cx)), [Point::new(1, 3)..Point::new(1, 3)] ); editor @@ -16078,14 +16295,14 @@ fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) { }); _ = editor.update(cx, |editor, window, cx| { assert_eq!( - editor.selections.ranges(cx), + editor.selections.ranges(&editor.display_snapshot(cx)), [Point::new(0, 0)..Point::new(0, 0)] ); // Ensure we don't panic when selections are refreshed and that the pending selection is finalized. editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| s.refresh()); assert_eq!( - editor.selections.ranges(cx), + editor.selections.ranges(&editor.display_snapshot(cx)), [Point::new(0, 3)..Point::new(0, 3)] ); assert!(editor.selections.pending_anchor().is_some()); @@ -16335,7 +16552,10 @@ async fn test_following(cx: &mut TestAppContext) { .await .unwrap(); _ = follower.update(cx, |follower, _, cx| { - assert_eq!(follower.selections.ranges(cx), vec![1..1]); + assert_eq!( + follower.selections.ranges(&follower.display_snapshot(cx)), + vec![1..1] + ); }); assert!(*is_still_following.borrow()); assert_eq!(*follower_edit_event_count.borrow(), 0); @@ -16388,7 +16608,10 @@ async fn test_following(cx: &mut TestAppContext) { .unwrap(); _ = follower.update(cx, |follower, _, cx| { assert_eq!(follower.scroll_position(cx), gpui::Point::new(1.5, 0.0)); - assert_eq!(follower.selections.ranges(cx), vec![0..0]); + assert_eq!( + follower.selections.ranges(&follower.display_snapshot(cx)), + vec![0..0] + ); }); assert!(*is_still_following.borrow()); @@ -16412,7 +16635,10 @@ async fn test_following(cx: &mut TestAppContext) { .await .unwrap(); _ = follower.update(cx, |follower, _, cx| { - assert_eq!(follower.selections.ranges(cx), vec![0..0, 1..1]); + assert_eq!( + follower.selections.ranges(&follower.display_snapshot(cx)), + vec![0..0, 1..1] + ); }); assert!(*is_still_following.borrow()); @@ -16433,7 +16659,10 @@ async fn test_following(cx: &mut TestAppContext) { .await .unwrap(); _ = follower.update(cx, |follower, _, cx| { - assert_eq!(follower.selections.ranges(cx), vec![0..2]); + assert_eq!( + follower.selections.ranges(&follower.display_snapshot(cx)), + vec![0..2] + ); }); // Scrolling locally breaks the follow @@ -18082,9 +18311,7 @@ fn completion_menu_entries(menu: &CompletionsMenu) -> Vec { #[gpui::test] async fn test_document_format_with_prettier(cx: &mut TestAppContext) { init_test(cx, |settings| { - settings.defaults.formatter = Some(SelectedFormatter::List(FormatterList::Single( - Formatter::Prettier, - ))) + settings.defaults.formatter = Some(FormatterList::Single(Formatter::Prettier)) }); let fs = FakeFs::new(cx.executor()); @@ -18151,7 +18378,7 @@ async fn test_document_format_with_prettier(cx: &mut TestAppContext) { ); update_test_language_settings(cx, |settings| { - settings.defaults.formatter = Some(SelectedFormatter::Auto) + settings.defaults.formatter = Some(FormatterList::default()) }); let format = editor.update_in(cx, |editor, window, cx| { editor.perform_format( @@ -22570,11 +22797,11 @@ fn add_log_breakpoint_at_cursor( .first() .and_then(|(anchor, bp)| bp.as_ref().map(|bp| (*anchor, bp.clone()))) .unwrap_or_else(|| { - let cursor_position: Point = editor.selections.newest(cx).head(); + let snapshot = editor.snapshot(window, cx); + let cursor_position: Point = + editor.selections.newest(&snapshot.display_snapshot).head(); - let breakpoint_position = editor - .snapshot(window, cx) - .display_snapshot + let breakpoint_position = snapshot .buffer_snapshot() .anchor_before(Point::new(cursor_position.row, 0)); @@ -23521,7 +23748,7 @@ println!("5"); assert_eq!( editor .selections - .all::(cx) + .all::(&editor.display_snapshot(cx)) .into_iter() .map(|s| s.range()) .collect::>(), @@ -23564,7 +23791,7 @@ println!("5"); assert_eq!( editor .selections - .all::(cx) + .all::(&editor.display_snapshot(cx)) .into_iter() .map(|s| s.range()) .collect::>(), @@ -23690,7 +23917,7 @@ println!("5"); assert_eq!( editor .selections - .all::(cx) + .all::(&editor.display_snapshot(cx)) .into_iter() .map(|s| s.range()) .collect::>(), @@ -23716,7 +23943,7 @@ println!("5"); assert_eq!( editor .selections - .all::(cx) + .all::(&editor.display_snapshot(cx)) .into_iter() .map(|s| s.range()) .collect::>(), @@ -25113,7 +25340,7 @@ fn assert_selection_ranges(marked_text: &str, editor: &mut Editor, cx: &mut Cont let (text, ranges) = marked_text_ranges(marked_text, true); assert_eq!(editor.text(cx), text); assert_eq!( - editor.selections.ranges(cx), + editor.selections.ranges(&editor.display_snapshot(cx)), ranges, "Assert selections are {}", marked_text @@ -25584,6 +25811,83 @@ async fn test_add_selection_after_moving_with_multiple_cursors(cx: &mut TestAppC ); } +#[gpui::test] +async fn test_add_selection_skip_soft_wrap_option(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + cx.set_state(indoc!( + r#"ˇThis is a very long line that will be wrapped when soft wrapping is enabled + Second line here"# + )); + + cx.update_editor(|editor, window, cx| { + // Enable soft wrapping with a narrow width to force soft wrapping and + // confirm that more than 2 rows are being displayed. + editor.set_wrap_width(Some(100.0.into()), cx); + assert!(editor.display_text(cx).lines().count() > 2); + + editor.add_selection_below( + &AddSelectionBelow { + skip_soft_wrap: true, + }, + window, + cx, + ); + + assert_eq!( + editor.selections.display_ranges(cx), + &[ + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0), + DisplayPoint::new(DisplayRow(8), 0)..DisplayPoint::new(DisplayRow(8), 0), + ] + ); + + editor.add_selection_above( + &AddSelectionAbove { + skip_soft_wrap: true, + }, + window, + cx, + ); + + assert_eq!( + editor.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)] + ); + + editor.add_selection_below( + &AddSelectionBelow { + skip_soft_wrap: false, + }, + window, + cx, + ); + + assert_eq!( + editor.selections.display_ranges(cx), + &[ + DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0), + DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0), + ] + ); + + editor.add_selection_above( + &AddSelectionAbove { + skip_soft_wrap: false, + }, + window, + cx, + ); + + assert_eq!( + editor.selections.display_ranges(cx), + &[DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)] + ); + }); +} + #[gpui::test(iterations = 10)] async fn test_document_colors(cx: &mut TestAppContext) { let expected_color = Rgba { @@ -25706,7 +26010,7 @@ async fn test_document_colors(cx: &mut TestAppContext) { .set_request_handler::(move |_, _| async move { panic!("Should not be called"); }); - cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().advance_clock(FETCH_COLORS_DEBOUNCE_TIMEOUT); color_request_handle.next().await.unwrap(); cx.run_until_parked(); assert_eq!( @@ -25790,9 +26094,9 @@ async fn test_document_colors(cx: &mut TestAppContext) { color_request_handle.next().await.unwrap(); cx.run_until_parked(); assert_eq!( - 3, + 2, requests_made.load(atomic::Ordering::Acquire), - "Should query for colors once per save and once per formatting after save" + "Should query for colors once per save (deduplicated) and once per formatting after save" ); drop(editor); @@ -25813,7 +26117,7 @@ async fn test_document_colors(cx: &mut TestAppContext) { .unwrap(); close.await.unwrap(); assert_eq!( - 3, + 2, requests_made.load(atomic::Ordering::Acquire), "After saving and closing all editors, no extra requests should be made" ); @@ -25833,7 +26137,7 @@ async fn test_document_colors(cx: &mut TestAppContext) { }) }) .unwrap(); - cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().advance_clock(FETCH_COLORS_DEBOUNCE_TIMEOUT); cx.run_until_parked(); let editor = workspace .update(cx, |workspace, _, cx| { @@ -25844,9 +26148,9 @@ async fn test_document_colors(cx: &mut TestAppContext) { .expect("Should be an editor") }) .unwrap(); - color_request_handle.next().await.unwrap(); + assert_eq!( - 3, + 2, requests_made.load(atomic::Ordering::Acquire), "Cache should be reused on buffer close and reopen" ); @@ -25887,10 +26191,11 @@ async fn test_document_colors(cx: &mut TestAppContext) { }); save.await.unwrap(); + cx.executor().advance_clock(FETCH_COLORS_DEBOUNCE_TIMEOUT); empty_color_request_handle.next().await.unwrap(); cx.run_until_parked(); assert_eq!( - 4, + 3, requests_made.load(atomic::Ordering::Acquire), "Should query for colors once per save only, as formatting was not requested" ); @@ -25952,7 +26257,7 @@ async fn test_non_utf_8_opens(cx: &mut TestAppContext) { assert_eq!( handle.to_any().entity_type(), - TypeId::of::() + TypeId::of::() ); } @@ -26357,6 +26662,83 @@ async fn test_paste_url_from_other_app_creates_markdown_link_selectively_in_mult )); } +#[gpui::test] +async fn test_non_linux_line_endings_registration(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let unix_newlines_file_text = "fn main() { + let a = 5; + }"; + let clrf_file_text = unix_newlines_file_text.lines().join("\r\n"); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/a"), + json!({ + "first.rs": &clrf_file_text, + }), + ) + .await; + + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; + let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let cx = &mut VisualTestContext::from_window(*workspace, cx); + + let registered_text = Arc::new(Mutex::new(Vec::new())); + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_lang()); + let mut fake_servers = language_registry.register_fake_lsp( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + color_provider: Some(lsp::ColorProviderCapability::Simple(true)), + ..lsp::ServerCapabilities::default() + }, + name: "rust-analyzer", + initializer: Some({ + let registered_text = registered_text.clone(); + Box::new(move |fake_server| { + fake_server.handle_notification::({ + let registered_text = registered_text.clone(); + move |params, _| { + registered_text.lock().push(params.text_document.text); + } + }); + }) + }), + ..FakeLspAdapter::default() + }, + ); + + let editor = workspace + .update(cx, |workspace, window, cx| { + workspace.open_abs_path( + PathBuf::from(path!("/a/first.rs")), + OpenOptions::default(), + window, + cx, + ) + }) + .unwrap() + .await + .unwrap() + .downcast::() + .unwrap(); + let _fake_language_server = fake_servers.next().await.unwrap(); + cx.executor().run_until_parked(); + + assert_eq!( + editor.update(cx, |editor, cx| editor.text(cx)), + unix_newlines_file_text, + "Default text API returns \n-separated text", + ); + assert_eq!( + vec![clrf_file_text], + registered_text.lock().drain(..).collect::>(), + "Expected the language server to receive the exact same text from the FS", + ); +} + #[gpui::test] async fn test_race_in_multibuffer_save(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -26506,8 +26888,8 @@ fn test_duplicate_line_up_on_last_line_without_newline(cx: &mut TestAppContext) assert_eq!( editor.selections.display_ranges(cx), - vec![DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0)], - "Selection should remain on the original line" + vec![DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)], + "Selection should move to the duplicated line" ); }) .unwrap(); @@ -26539,3 +26921,24 @@ async fn test_copy_line_without_trailing_newline(cx: &mut TestAppContext) { cx.assert_editor_state("line1\nline2\nˇ"); } + +#[gpui::test] +async fn test_end_of_editor_context(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + + cx.set_state("line1\nline2ˇ"); + cx.update_editor(|e, window, cx| { + e.set_mode(EditorMode::SingleLine); + assert!(e.key_context(window, cx).contains("end_of_input")); + }); + cx.set_state("ˇline1\nline2"); + cx.update_editor(|e, window, cx| { + assert!(!e.key_context(window, cx).contains("end_of_input")); + }); + cx.set_state("line1ˇ\nline2"); + cx.update_editor(|e, window, cx| { + assert!(!e.key_context(window, cx).contains("end_of_input")); + }); +} diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 92fd96d54e2ca2d1c352433ce81da99cd78878cd..41a9809bfa75f091c1c03d924ffebf117d4fd2d7 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -458,7 +458,6 @@ impl EditorElement { register_action(editor, window, Editor::toggle_code_actions); register_action(editor, window, Editor::open_excerpts); register_action(editor, window, Editor::open_excerpts_in_split); - register_action(editor, window, Editor::open_proposed_changes_editor); register_action(editor, window, Editor::toggle_soft_wrap); register_action(editor, window, Editor::toggle_tab_bar); register_action(editor, window, Editor::toggle_line_numbers); @@ -493,6 +492,7 @@ impl EditorElement { register_action(editor, window, Editor::stage_and_next); register_action(editor, window, Editor::unstage_and_next); register_action(editor, window, Editor::expand_all_diff_hunks); + register_action(editor, window, Editor::collapse_all_diff_hunks); register_action(editor, window, Editor::go_to_previous_change); register_action(editor, window, Editor::go_to_next_change); @@ -651,7 +651,6 @@ impl EditorElement { fn mouse_left_down( editor: &mut Editor, event: &MouseDownEvent, - hovered_hunk: Option>, position_map: &PositionMap, line_numbers: &HashMap, window: &mut Window, @@ -667,7 +666,20 @@ impl EditorElement { let mut click_count = event.click_count; let mut modifiers = event.modifiers; - if let Some(hovered_hunk) = hovered_hunk { + if let Some(hovered_hunk) = + position_map + .display_hunks + .iter() + .find_map(|(hunk, hunk_hitbox)| match hunk { + DisplayDiffHunk::Folded { .. } => None, + DisplayDiffHunk::Unfolded { + multi_buffer_range, .. + } => hunk_hitbox + .as_ref() + .is_some_and(|hitbox| hitbox.is_hovered(window)) + .then(|| multi_buffer_range.clone()), + }) + { editor.toggle_single_diff_hunk(hovered_hunk, cx); cx.notify(); return; @@ -681,6 +693,7 @@ impl EditorElement { .drag_and_drop_selection .enabled && click_count == 1 + && !modifiers.shift { let newest_anchor = editor.selections.newest_anchor(); let snapshot = editor.snapshot(window, cx); @@ -739,6 +752,35 @@ impl EditorElement { } } + if !is_singleton { + let display_row = (ScrollPixelOffset::from( + (event.position - gutter_hitbox.bounds.origin).y / position_map.line_height, + ) + position_map.scroll_position.y) as u32; + let multi_buffer_row = position_map + .snapshot + .display_point_to_point(DisplayPoint::new(DisplayRow(display_row), 0), Bias::Right) + .row; + if line_numbers + .get(&MultiBufferRow(multi_buffer_row)) + .and_then(|line_number| line_number.hitbox.as_ref()) + .is_some_and(|hitbox| hitbox.contains(&event.position)) + { + let line_offset_from_top = display_row - position_map.scroll_position.y as u32; + + editor.open_excerpts_common( + Some(JumpData::MultiBufferRow { + row: MultiBufferRow(multi_buffer_row), + line_offset_from_top, + }), + modifiers.alt, + window, + cx, + ); + cx.stop_propagation(); + return; + } + } + let position = point_for_position.previous_valid; if let Some(mode) = Editor::columnar_selection_mode(&modifiers, cx) { editor.select( @@ -776,34 +818,6 @@ impl EditorElement { ); } cx.stop_propagation(); - - if !is_singleton { - let display_row = (ScrollPixelOffset::from( - (event.position - gutter_hitbox.bounds.origin).y / position_map.line_height, - ) + position_map.scroll_position.y) as u32; - let multi_buffer_row = position_map - .snapshot - .display_point_to_point(DisplayPoint::new(DisplayRow(display_row), 0), Bias::Right) - .row; - if line_numbers - .get(&MultiBufferRow(multi_buffer_row)) - .and_then(|line_number| line_number.hitbox.as_ref()) - .is_some_and(|hitbox| hitbox.contains(&event.position)) - { - let line_offset_from_top = display_row - position_map.scroll_position.y as u32; - - editor.open_excerpts_common( - Some(JumpData::MultiBufferRow { - row: MultiBufferRow(multi_buffer_row), - line_offset_from_top, - }), - modifiers.alt, - window, - cx, - ); - cx.stop_propagation(); - } - } } fn mouse_right_down( @@ -1068,7 +1082,10 @@ impl EditorElement { ref mouse_down_time, } => { let drag_and_drop_delay = Duration::from_millis( - EditorSettings::get_global(cx).drag_and_drop_selection.delay, + EditorSettings::get_global(cx) + .drag_and_drop_selection + .delay + .0, ); if mouse_down_time.elapsed() >= drag_and_drop_delay { let drop_cursor = Selection { @@ -1189,10 +1206,10 @@ impl EditorElement { if mouse_over_inline_blame || mouse_over_popover { editor.show_blame_popover(*buffer_id, blame_entry, event.position, false, cx); } else if !keyboard_grace { - editor.hide_blame_popover(cx); + editor.hide_blame_popover(false, cx); } } else { - editor.hide_blame_popover(cx); + editor.hide_blame_popover(false, cx); } let breakpoint_indicator = if gutter_hovered { @@ -1375,7 +1392,7 @@ impl EditorElement { editor_with_selections.update(cx, |editor, cx| { if editor.show_local_selections { let mut layouts = Vec::new(); - let newest = editor.selections.newest(cx); + let newest = editor.selections.newest(&editor.display_snapshot(cx)); for selection in local_selections.iter().cloned() { let is_empty = selection.start == selection.end; let is_newest = selection == newest; @@ -1404,7 +1421,11 @@ impl EditorElement { layouts.push(layout); } - let player = editor.current_user_player_color(cx); + let mut player = editor.current_user_player_color(cx); + if !editor.is_focused(window) { + const UNFOCUS_EDITOR_SELECTION_OPACITY: f32 = 0.5; + player.selection = player.selection.opacity(UNFOCUS_EDITOR_SELECTION_OPACITY); + } selections.push((player, layouts)); if let SelectionDragState::Dragging { @@ -3193,7 +3214,9 @@ impl EditorElement { let (newest_selection_head, is_relative) = self.editor.update(cx, |editor, cx| { let newest_selection_head = newest_selection_head.unwrap_or_else(|| { - let newest = editor.selections.newest::(cx); + let newest = editor + .selections + .newest::(&editor.display_snapshot(cx)); SelectionLayout::new( newest, editor.selections.line_mode(), @@ -3804,13 +3827,7 @@ impl EditorElement { let multi_buffer = editor.buffer.read(cx); let file_status = multi_buffer .all_diff_hunks_expanded() - .then(|| { - editor - .project - .as_ref()? - .read(cx) - .status_for_buffer_id(for_excerpt.buffer_id, cx) - }) + .then(|| editor.status_for_buffer_id(for_excerpt.buffer_id, cx)) .flatten(); let indicator = multi_buffer .buffer(for_excerpt.buffer_id) @@ -3890,7 +3907,7 @@ impl EditorElement { .children(toggle_chevron_icon) .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::with_meta_in( "Toggle Excerpt Fold", Some(&ToggleFold), @@ -3903,7 +3920,6 @@ impl EditorElement { ) ), &focus_handle, - window, cx, ) } @@ -4004,15 +4020,11 @@ impl EditorElement { .id("jump-to-file-button") .gap_2p5() .child(Label::new("Jump To File")) - .children( - KeyBinding::for_action_in( - &OpenExcerpts, - &focus_handle, - window, - cx, - ) - .map(|binding| binding.into_any_element()), - ), + .child(KeyBinding::for_action_in( + &OpenExcerpts, + &focus_handle, + cx, + )), ) }, ) @@ -6168,7 +6180,10 @@ impl EditorElement { } = &editor.selection_drag_state { let drag_and_drop_delay = Duration::from_millis( - EditorSettings::get_global(cx).drag_and_drop_selection.delay, + EditorSettings::get_global(cx) + .drag_and_drop_selection + .delay + .0, ); if mouse_down_time.elapsed() >= drag_and_drop_delay { window.set_cursor_style( @@ -7245,26 +7260,6 @@ impl EditorElement { window.on_mouse_event({ let position_map = layout.position_map.clone(); let editor = self.editor.clone(); - let diff_hunk_range = - layout - .display_hunks - .iter() - .find_map(|(hunk, hunk_hitbox)| match hunk { - DisplayDiffHunk::Folded { .. } => None, - DisplayDiffHunk::Unfolded { - multi_buffer_range, .. - } => { - if hunk_hitbox - .as_ref() - .map(|hitbox| hitbox.is_hovered(window)) - .unwrap_or(false) - { - Some(multi_buffer_range.clone()) - } else { - None - } - } - }); let line_numbers = layout.line_numbers.clone(); move |event: &MouseDownEvent, phase, window, cx| { @@ -7281,7 +7276,6 @@ impl EditorElement { Self::mouse_left_down( editor, event, - diff_hunk_range.clone(), &position_map, line_numbers.as_ref(), window, @@ -7455,8 +7449,8 @@ impl EditorElement { } let clipped_start = range.start.max(&buffer_range.start, buffer); let clipped_end = range.end.min(&buffer_range.end, buffer); - let range = buffer_snapshot.anchor_in_excerpt(excerpt_id, clipped_start)? - ..buffer_snapshot.anchor_in_excerpt(excerpt_id, clipped_end)?; + let range = buffer_snapshot + .anchor_range_in_excerpt(excerpt_id, *clipped_start..*clipped_end)?; let start = range.start.to_display_point(display_snapshot); let end = range.end.to_display_point(display_snapshot); let selection_layout = SelectionLayout { @@ -8784,7 +8778,8 @@ impl Element for EditorElement { .editor_with_selections(cx) .map(|editor| { editor.update(cx, |editor, cx| { - let all_selections = editor.selections.all::(cx); + let all_selections = + editor.selections.all::(&snapshot.display_snapshot); let selected_buffer_ids = if editor.buffer_kind(cx) == ItemBufferKind::Singleton { Vec::new() @@ -8806,10 +8801,12 @@ impl Element for EditorElement { selected_buffer_ids }; - let mut selections = editor - .selections - .disjoint_in_range(start_anchor..end_anchor, cx); - selections.extend(editor.selections.pending(cx)); + let mut selections = editor.selections.disjoint_in_range( + start_anchor..end_anchor, + &snapshot.display_snapshot, + ); + selections + .extend(editor.selections.pending(&snapshot.display_snapshot)); (selections, selected_buffer_ids) }) @@ -8919,10 +8916,20 @@ impl Element for EditorElement { cx, ); + let merged_highlighted_ranges = + if let Some((_, colors)) = document_colors.as_ref() { + &highlighted_ranges + .clone() + .into_iter() + .chain(colors.clone()) + .collect() + } else { + &highlighted_ranges + }; let bg_segments_per_row = Self::bg_segments_per_row( start_row..end_row, &selections, - &highlighted_ranges, + &merged_highlighted_ranges, self.style.background, ); diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index 836b61d56674f070abc13dbf6c67981c78818ff6..3d83e3a5cce937b92255810003a6ff951bb84d95 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -16,7 +16,7 @@ use markdown::Markdown; use multi_buffer::{MultiBuffer, RowInfo}; use project::{ Project, ProjectItem as _, - git_store::{GitStoreEvent, Repository, RepositoryEvent}, + git_store::{GitStoreEvent, Repository}, }; use smallvec::SmallVec; use std::{sync::Arc, time::Duration}; @@ -235,8 +235,8 @@ impl GitBlame { let git_store = project.read(cx).git_store().clone(); let git_store_subscription = cx.subscribe(&git_store, move |this, _, event, cx| match event { - GitStoreEvent::RepositoryUpdated(_, RepositoryEvent::Updated { .. }, _) - | GitStoreEvent::RepositoryAdded(_) + GitStoreEvent::RepositoryUpdated(_, _, _) + | GitStoreEvent::RepositoryAdded | GitStoreEvent::RepositoryRemoved(_) => { log::debug!("Status of git repositories updated. Regenerating blame data...",); this.generate(cx); diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index d2073633dd149f9536838732f25ee89aa630a57c..f36c82b20277fc748620928e6d7fc49a2b20cd3e 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -1,19 +1,14 @@ use crate::{ Anchor, Editor, EditorSettings, EditorSnapshot, FindAllReferences, GoToDefinition, - GoToDefinitionSplit, GoToTypeDefinition, GoToTypeDefinitionSplit, GotoDefinitionKind, InlayId, - Navigated, PointForPosition, SelectPhase, - editor_settings::GoToDefinitionFallback, - hover_popover::{self, InlayHover}, + GoToDefinitionSplit, GoToTypeDefinition, GoToTypeDefinitionSplit, GotoDefinitionKind, + Navigated, PointForPosition, SelectPhase, editor_settings::GoToDefinitionFallback, scroll::ScrollAmount, }; use gpui::{App, AsyncWindowContext, Context, Entity, Modifiers, Task, Window, px}; use language::{Bias, ToOffset}; use linkify::{LinkFinder, LinkKind}; use lsp::LanguageServerId; -use project::{ - HoverBlock, HoverBlockKind, InlayHintLabelPartTooltip, InlayHintTooltip, LocationLink, Project, - ResolveState, ResolvedPath, -}; +use project::{InlayId, LocationLink, Project, ResolvedPath}; use settings::Settings; use std::ops::Range; use theme::ActiveTheme as _; @@ -138,10 +133,9 @@ impl Editor { show_link_definition(modifiers.shift, self, trigger_point, snapshot, window, cx); } None => { - update_inlay_link_and_hover_points( + self.update_inlay_link_and_hover_points( snapshot, point_for_position, - self, hovered_link_modifier, modifiers.shift, window, @@ -283,183 +277,6 @@ impl Editor { } } -pub fn update_inlay_link_and_hover_points( - snapshot: &EditorSnapshot, - point_for_position: PointForPosition, - editor: &mut Editor, - secondary_held: bool, - shift_held: bool, - window: &mut Window, - cx: &mut Context, -) { - let hovered_offset = if point_for_position.column_overshoot_after_line_end == 0 { - Some(snapshot.display_point_to_inlay_offset(point_for_position.exact_unclipped, Bias::Left)) - } else { - None - }; - let mut go_to_definition_updated = false; - let mut hover_updated = false; - if let Some(hovered_offset) = hovered_offset { - let buffer_snapshot = editor.buffer().read(cx).snapshot(cx); - let previous_valid_anchor = - buffer_snapshot.anchor_before(point_for_position.previous_valid.to_point(snapshot)); - let next_valid_anchor = - buffer_snapshot.anchor_after(point_for_position.next_valid.to_point(snapshot)); - if let Some(hovered_hint) = editor - .visible_inlay_hints(cx) - .into_iter() - .skip_while(|hint| { - hint.position - .cmp(&previous_valid_anchor, &buffer_snapshot) - .is_lt() - }) - .take_while(|hint| { - hint.position - .cmp(&next_valid_anchor, &buffer_snapshot) - .is_le() - }) - .max_by_key(|hint| hint.id) - { - let inlay_hint_cache = editor.inlay_hint_cache(); - let excerpt_id = previous_valid_anchor.excerpt_id; - if let Some(cached_hint) = inlay_hint_cache.hint_by_id(excerpt_id, hovered_hint.id) { - match cached_hint.resolve_state { - ResolveState::CanResolve(_, _) => { - if let Some(buffer_id) = snapshot - .buffer_snapshot() - .buffer_id_for_anchor(previous_valid_anchor) - { - inlay_hint_cache.spawn_hint_resolve( - buffer_id, - excerpt_id, - hovered_hint.id, - window, - cx, - ); - } - } - ResolveState::Resolved => { - let mut extra_shift_left = 0; - let mut extra_shift_right = 0; - if cached_hint.padding_left { - extra_shift_left += 1; - extra_shift_right += 1; - } - if cached_hint.padding_right { - extra_shift_right += 1; - } - match cached_hint.label { - project::InlayHintLabel::String(_) => { - if let Some(tooltip) = cached_hint.tooltip { - hover_popover::hover_at_inlay( - editor, - InlayHover { - tooltip: match tooltip { - InlayHintTooltip::String(text) => HoverBlock { - text, - kind: HoverBlockKind::PlainText, - }, - InlayHintTooltip::MarkupContent(content) => { - HoverBlock { - text: content.value, - kind: content.kind, - } - } - }, - range: InlayHighlight { - inlay: hovered_hint.id, - inlay_position: hovered_hint.position, - range: extra_shift_left - ..hovered_hint.text().len() + extra_shift_right, - }, - }, - window, - cx, - ); - hover_updated = true; - } - } - project::InlayHintLabel::LabelParts(label_parts) => { - let hint_start = - snapshot.anchor_to_inlay_offset(hovered_hint.position); - if let Some((hovered_hint_part, part_range)) = - hover_popover::find_hovered_hint_part( - label_parts, - hint_start, - hovered_offset, - ) - { - let highlight_start = - (part_range.start - hint_start).0 + extra_shift_left; - let highlight_end = - (part_range.end - hint_start).0 + extra_shift_right; - let highlight = InlayHighlight { - inlay: hovered_hint.id, - inlay_position: hovered_hint.position, - range: highlight_start..highlight_end, - }; - if let Some(tooltip) = hovered_hint_part.tooltip { - hover_popover::hover_at_inlay( - editor, - InlayHover { - tooltip: match tooltip { - InlayHintLabelPartTooltip::String(text) => { - HoverBlock { - text, - kind: HoverBlockKind::PlainText, - } - } - InlayHintLabelPartTooltip::MarkupContent( - content, - ) => HoverBlock { - text: content.value, - kind: content.kind, - }, - }, - range: highlight.clone(), - }, - window, - cx, - ); - hover_updated = true; - } - if let Some((language_server_id, location)) = - hovered_hint_part.location - && secondary_held - && !editor.has_pending_nonempty_selection() - { - go_to_definition_updated = true; - show_link_definition( - shift_held, - editor, - TriggerPoint::InlayHint( - highlight, - location, - language_server_id, - ), - snapshot, - window, - cx, - ); - } - } - } - }; - } - ResolveState::Resolving => {} - } - } - } - } - - if !go_to_definition_updated { - editor.hide_hovered_link(cx) - } - if !hover_updated { - hover_popover::hover_at(editor, None, window, cx); - } -} - pub fn show_link_definition( shift_held: bool, editor: &mut Editor, @@ -494,22 +311,15 @@ pub fn show_link_definition( } let trigger_anchor = trigger_point.anchor(); - let Some((buffer, buffer_position)) = editor - .buffer - .read(cx) - .text_anchor_for_position(*trigger_anchor, cx) - else { + let anchor = snapshot.buffer_snapshot().anchor_before(*trigger_anchor); + let Some(buffer) = editor.buffer().read(cx).buffer_for_anchor(anchor, cx) else { return; }; - - let Some((excerpt_id, _, _)) = editor - .buffer() - .read(cx) - .excerpt_containing(*trigger_anchor, cx) - else { - return; - }; - + let Anchor { + excerpt_id, + text_anchor, + .. + } = anchor; let same_kind = hovered_link_state.preferred_kind == preferred_kind || hovered_link_state .links @@ -539,44 +349,40 @@ pub fn show_link_definition( async move { let result = match &trigger_point { TriggerPoint::Text(_) => { - if let Some((url_range, url)) = find_url(&buffer, buffer_position, cx.clone()) { + if let Some((url_range, url)) = find_url(&buffer, text_anchor, cx.clone()) { this.read_with(cx, |_, _| { let range = maybe!({ - let start = - snapshot.anchor_in_excerpt(excerpt_id, url_range.start)?; - let end = snapshot.anchor_in_excerpt(excerpt_id, url_range.end)?; - Some(RangeInEditor::Text(start..end)) + let range = + snapshot.anchor_range_in_excerpt(excerpt_id, url_range)?; + Some(RangeInEditor::Text(range)) }); (range, vec![HoverLink::Url(url)]) }) .ok() } else if let Some((filename_range, filename)) = - find_file(&buffer, project.clone(), buffer_position, cx).await + find_file(&buffer, project.clone(), text_anchor, cx).await { let range = maybe!({ - let start = - snapshot.anchor_in_excerpt(excerpt_id, filename_range.start)?; - let end = snapshot.anchor_in_excerpt(excerpt_id, filename_range.end)?; - Some(RangeInEditor::Text(start..end)) + let range = + snapshot.anchor_range_in_excerpt(excerpt_id, filename_range)?; + Some(RangeInEditor::Text(range)) }); Some((range, vec![HoverLink::File(filename)])) } else if let Some(provider) = provider { let task = cx.update(|_, cx| { - provider.definitions(&buffer, buffer_position, preferred_kind, cx) + provider.definitions(&buffer, text_anchor, preferred_kind, cx) })?; if let Some(task) = task { task.await.ok().flatten().map(|definition_result| { ( definition_result.iter().find_map(|link| { link.origin.as_ref().and_then(|origin| { - let start = snapshot.anchor_in_excerpt( + let range = snapshot.anchor_range_in_excerpt( excerpt_id, - origin.range.start, + origin.range.clone(), )?; - let end = snapshot - .anchor_in_excerpt(excerpt_id, origin.range.end)?; - Some(RangeInEditor::Text(start..end)) + Some(RangeInEditor::Text(range)) }) }), definition_result.into_iter().map(HoverLink::Text).collect(), @@ -924,7 +730,7 @@ mod tests { DisplayPoint, display_map::ToDisplayPoint, editor_tests::init_test, - inlay_hint_cache::tests::{cached_hint_labels, visible_hint_labels}, + inlays::inlay_hints::tests::{cached_hint_labels, visible_hint_labels}, test::editor_lsp_test_context::EditorLspTestContext, }; use futures::StreamExt; @@ -1355,7 +1161,7 @@ mod tests { cx.background_executor.run_until_parked(); cx.update_editor(|editor, _window, cx| { let expected_layers = vec![hint_label.to_string()]; - assert_eq!(expected_layers, cached_hint_labels(editor)); + assert_eq!(expected_layers, cached_hint_labels(editor, cx)); assert_eq!(expected_layers, visible_hint_labels(editor, cx)); }); diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index 863ce297be9149f62bb7e880658e8a084968fb81..19213638f417d20cd54868305ea9e39d57363fca 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -154,7 +154,7 @@ pub fn hover_at_inlay( hide_hover(editor, cx); } - let hover_popover_delay = EditorSettings::get_global(cx).hover_popover_delay; + let hover_popover_delay = EditorSettings::get_global(cx).hover_popover_delay.0; let task = cx.spawn_in(window, async move |this, cx| { async move { @@ -275,7 +275,7 @@ fn show_hover( return None; } - let hover_popover_delay = EditorSettings::get_global(cx).hover_popover_delay; + let hover_popover_delay = EditorSettings::get_global(cx).hover_popover_delay.0; let all_diagnostics_active = editor.active_diagnostics == ActiveDiagnostic::All; let active_group_id = if let ActiveDiagnostic::Group(group) = &editor.active_diagnostics { Some(group.group_id) @@ -467,13 +467,10 @@ fn show_hover( let range = hover_result .range .and_then(|range| { - let start = snapshot + let range = snapshot .buffer_snapshot() - .anchor_in_excerpt(excerpt_id, range.start)?; - let end = snapshot - .buffer_snapshot() - .anchor_in_excerpt(excerpt_id, range.end)?; - Some(start..end) + .anchor_range_in_excerpt(excerpt_id, range)?; + Some(range) }) .or_else(|| { let snapshot = &snapshot.buffer_snapshot(); @@ -989,17 +986,17 @@ impl DiagnosticPopover { mod tests { use super::*; use crate::{ - InlayId, PointForPosition, + PointForPosition, actions::ConfirmCompletion, editor_tests::{handle_completion_request, init_test}, - hover_links::update_inlay_link_and_hover_points, - inlay_hint_cache::tests::{cached_hint_labels, visible_hint_labels}, + inlays::inlay_hints::tests::{cached_hint_labels, visible_hint_labels}, test::editor_lsp_test_context::EditorLspTestContext, }; use collections::BTreeSet; use gpui::App; use indoc::indoc; use markdown::parser::MarkdownEvent; + use project::InlayId; use settings::InlayHintSettingsContent; use smol::stream::StreamExt; use std::sync::atomic; @@ -1007,7 +1004,7 @@ mod tests { use text::Bias; fn get_hover_popover_delay(cx: &gpui::TestAppContext) -> u64 { - cx.read(|cx: &App| -> u64 { EditorSettings::get_global(cx).hover_popover_delay }) + cx.read(|cx: &App| -> u64 { EditorSettings::get_global(cx).hover_popover_delay.0 }) } impl InfoPopover { @@ -1651,7 +1648,7 @@ mod tests { cx.background_executor.run_until_parked(); cx.update_editor(|editor, _, cx| { let expected_layers = vec![entire_hint_label.to_string()]; - assert_eq!(expected_layers, cached_hint_labels(editor)); + assert_eq!(expected_layers, cached_hint_labels(editor, cx)); assert_eq!(expected_layers, visible_hint_labels(editor, cx)); }); @@ -1690,10 +1687,9 @@ mod tests { } }); cx.update_editor(|editor, window, cx| { - update_inlay_link_and_hover_points( + editor.update_inlay_link_and_hover_points( &editor.snapshot(window, cx), new_type_hint_part_hover_position, - editor, true, false, window, @@ -1761,10 +1757,9 @@ mod tests { cx.background_executor.run_until_parked(); cx.update_editor(|editor, window, cx| { - update_inlay_link_and_hover_points( + editor.update_inlay_link_and_hover_points( &editor.snapshot(window, cx), new_type_hint_part_hover_position, - editor, true, false, window, @@ -1816,10 +1811,9 @@ mod tests { } }); cx.update_editor(|editor, window, cx| { - update_inlay_link_and_hover_points( + editor.update_inlay_link_and_hover_points( &editor.snapshot(window, cx), struct_hint_part_hover_position, - editor, true, false, window, diff --git a/crates/editor/src/indent_guides.rs b/crates/editor/src/indent_guides.rs index 22b57bd80579c61405cf46b5e84d1fa128a38ffb..7c392d27531472a413ce4d32d09cce4eb722e462 100644 --- a/crates/editor/src/indent_guides.rs +++ b/crates/editor/src/indent_guides.rs @@ -69,7 +69,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) -> Option> { - let selection = self.selections.newest::(cx); + let selection = self.selections.newest::(&self.display_snapshot(cx)); let cursor_row = MultiBufferRow(selection.head().row); let state = &mut self.active_indent_guides_state; diff --git a/crates/editor/src/inlays.rs b/crates/editor/src/inlays.rs new file mode 100644 index 0000000000000000000000000000000000000000..f07bf0b315161f0ce9cdf3ef7e2f6db6d60abfb5 --- /dev/null +++ b/crates/editor/src/inlays.rs @@ -0,0 +1,193 @@ +//! The logic, responsible for managing [`Inlay`]s in the editor. +//! +//! Inlays are "not real" text that gets mixed into the "real" buffer's text. +//! They are attached to a certain [`Anchor`], and display certain contents (usually, strings) +//! between real text around that anchor. +//! +//! Inlay examples in Zed: +//! * inlay hints, received from LSP +//! * inline values, shown in the debugger +//! * inline predictions, showing the Zeta/Copilot/etc. predictions +//! * document color values, if configured to be displayed as inlays +//! * ... anything else, potentially. +//! +//! Editor uses [`crate::DisplayMap`] and [`crate::display_map::InlayMap`] to manage what's rendered inside the editor, using +//! [`InlaySplice`] to update this state. + +/// Logic, related to managing LSP inlay hint inlays. +pub mod inlay_hints; + +use std::{any::TypeId, sync::OnceLock}; + +use gpui::{Context, HighlightStyle, Hsla, Rgba, Task}; +use multi_buffer::Anchor; +use project::{InlayHint, InlayId}; +use text::Rope; + +use crate::{Editor, hover_links::InlayHighlight}; + +/// A splice to send into the `inlay_map` for updating the visible inlays on the screen. +/// "Visible" inlays may not be displayed in the buffer right away, but those are ready to be displayed on further buffer scroll, pane item activations, etc. right away without additional LSP queries or settings changes. +/// The data in the cache is never used directly for displaying inlays on the screen, to avoid races with updates from LSP queries and sync overhead. +/// Splice is picked to help avoid extra hint flickering and "jumps" on the screen. +#[derive(Debug, Default)] +pub struct InlaySplice { + pub to_remove: Vec, + pub to_insert: Vec, +} + +impl InlaySplice { + pub fn is_empty(&self) -> bool { + self.to_remove.is_empty() && self.to_insert.is_empty() + } +} + +#[derive(Debug, Clone)] +pub struct Inlay { + pub id: InlayId, + pub position: Anchor, + pub content: InlayContent, +} + +#[derive(Debug, Clone)] +pub enum InlayContent { + Text(text::Rope), + Color(Hsla), +} + +impl Inlay { + pub fn hint(id: InlayId, position: Anchor, hint: &InlayHint) -> Self { + let mut text = hint.text(); + if hint.padding_right && text.reversed_chars_at(text.len()).next() != Some(' ') { + text.push(" "); + } + if hint.padding_left && text.chars_at(0).next() != Some(' ') { + text.push_front(" "); + } + Self { + id, + position, + content: InlayContent::Text(text), + } + } + + #[cfg(any(test, feature = "test-support"))] + pub fn mock_hint(id: usize, position: Anchor, text: impl Into) -> Self { + Self { + id: InlayId::Hint(id), + position, + content: InlayContent::Text(text.into()), + } + } + + pub fn color(id: usize, position: Anchor, color: Rgba) -> Self { + Self { + id: InlayId::Color(id), + position, + content: InlayContent::Color(color.into()), + } + } + + pub fn edit_prediction>(id: usize, position: Anchor, text: T) -> Self { + Self { + id: InlayId::EditPrediction(id), + position, + content: InlayContent::Text(text.into()), + } + } + + pub fn debugger>(id: usize, position: Anchor, text: T) -> Self { + Self { + id: InlayId::DebuggerValue(id), + position, + content: InlayContent::Text(text.into()), + } + } + + pub fn text(&self) -> &Rope { + static COLOR_TEXT: OnceLock = OnceLock::new(); + match &self.content { + InlayContent::Text(text) => text, + InlayContent::Color(_) => COLOR_TEXT.get_or_init(|| Rope::from("◼")), + } + } + + #[cfg(any(test, feature = "test-support"))] + pub fn get_color(&self) -> Option { + match self.content { + InlayContent::Color(color) => Some(color), + _ => None, + } + } +} + +pub struct InlineValueCache { + pub enabled: bool, + pub inlays: Vec, + pub refresh_task: Task>, +} + +impl InlineValueCache { + pub fn new(enabled: bool) -> Self { + Self { + enabled, + inlays: Vec::new(), + refresh_task: Task::ready(None), + } + } +} + +impl Editor { + /// Modify which hints are displayed in the editor. + pub fn splice_inlays( + &mut self, + to_remove: &[InlayId], + to_insert: Vec, + cx: &mut Context, + ) { + if let Some(inlay_hints) = &mut self.inlay_hints { + for id_to_remove in to_remove { + inlay_hints.added_hints.remove(id_to_remove); + } + } + self.display_map.update(cx, |display_map, cx| { + display_map.splice_inlays(to_remove, to_insert, cx) + }); + cx.notify(); + } + + pub(crate) fn highlight_inlays( + &mut self, + highlights: Vec, + style: HighlightStyle, + cx: &mut Context, + ) { + self.display_map.update(cx, |map, _| { + map.highlight_inlays(TypeId::of::(), highlights, style) + }); + cx.notify(); + } + + pub fn inline_values_enabled(&self) -> bool { + self.inline_value_cache.enabled + } + + #[cfg(any(test, feature = "test-support"))] + pub fn inline_value_inlays(&self, cx: &gpui::App) -> Vec { + self.display_map + .read(cx) + .current_inlays() + .filter(|inlay| matches!(inlay.id, InlayId::DebuggerValue(_))) + .cloned() + .collect() + } + + #[cfg(any(test, feature = "test-support"))] + pub fn all_inlays(&self, cx: &gpui::App) -> Vec { + self.display_map + .read(cx) + .current_inlays() + .cloned() + .collect() + } +} diff --git a/crates/editor/src/inlay_hint_cache.rs b/crates/editor/src/inlays/inlay_hints.rs similarity index 58% rename from crates/editor/src/inlay_hint_cache.rs rename to crates/editor/src/inlays/inlay_hints.rs index 9a1e07ba3946d0f2b05e2096201287334dd02534..9a9be1d1591e5b9f5303a0706ce0ded5afba3f83 100644 --- a/crates/editor/src/inlay_hint_cache.rs +++ b/crates/editor/src/inlays/inlay_hints.rs @@ -1,295 +1,116 @@ -/// Stores and updates all data received from LSP textDocument/inlayHint requests. -/// Has nothing to do with other inlays, e.g. copilot suggestions — those are stored elsewhere. -/// On every update, cache may query for more inlay hints and update inlays on the screen. -/// -/// Inlays stored on screen are in [`crate::display_map::inlay_map`] and this cache is the only way to update any inlay hint data in the visible hints in the inlay map. -/// For determining the update to the `inlay_map`, the cache requires a list of visible inlay hints — all other hints are not relevant and their separate updates are not influencing the cache work. -/// -/// Due to the way the data is stored for both visible inlays and the cache, every inlay (and inlay hint) collection is editor-specific, so a single buffer may have multiple sets of inlays of open on different panes. use std::{ - cmp, + collections::hash_map, ops::{ControlFlow, Range}, sync::Arc, time::Duration, }; -use crate::{ - Anchor, Editor, ExcerptId, InlayId, MultiBuffer, MultiBufferSnapshot, display_map::Inlay, -}; -use anyhow::Context as _; use clock::Global; -use futures::future; -use gpui::{AppContext as _, AsyncApp, Context, Entity, Task, Window}; +use collections::{HashMap, HashSet}; +use futures::future::join_all; +use gpui::{App, Entity, Task}; use language::{ - Buffer, BufferSnapshot, - language_settings::{InlayHintKind, InlayHintSettings}, + BufferRow, + language_settings::{InlayHintKind, InlayHintSettings, language_settings}, +}; +use lsp::LanguageServerId; +use multi_buffer::{Anchor, ExcerptId, MultiBufferSnapshot}; +use parking_lot::Mutex; +use project::{ + HoverBlock, HoverBlockKind, InlayHintLabel, InlayHintLabelPartTooltip, InlayHintTooltip, + InvalidationStrategy, ResolveState, + lsp_store::{CacheInlayHints, ResolvedHint}, }; -use parking_lot::RwLock; -use project::{InlayHint, ResolveState}; +use text::{Bias, BufferId}; +use ui::{Context, Window}; +use util::debug_panic; -use collections::{HashMap, HashSet, hash_map}; -use smol::lock::Semaphore; -use sum_tree::Bias; -use text::{BufferId, ToOffset, ToPoint}; -use util::{ResultExt, post_inc}; +use super::{Inlay, InlayId}; +use crate::{ + Editor, EditorSnapshot, PointForPosition, ToggleInlayHints, ToggleInlineValues, debounce_value, + hover_links::{InlayHighlight, TriggerPoint, show_link_definition}, + hover_popover::{self, InlayHover}, + inlays::InlaySplice, +}; -pub struct InlayHintCache { - hints: HashMap>>, - allowed_hint_kinds: HashSet>, - version: usize, - pub(super) enabled: bool, +pub fn inlay_hint_settings( + location: Anchor, + snapshot: &MultiBufferSnapshot, + cx: &mut Context, +) -> InlayHintSettings { + let file = snapshot.file_at(location); + let language = snapshot.language_at(location).map(|l| l.name()); + language_settings(language, file, cx).inlay_hints +} + +#[derive(Debug)] +pub struct LspInlayHintData { + enabled: bool, modifiers_override: bool, enabled_in_settings: bool, - update_tasks: HashMap, - refresh_task: Task<()>, + allowed_hint_kinds: HashSet>, invalidate_debounce: Option, append_debounce: Option, - lsp_request_limiter: Arc, -} - -#[derive(Debug)] -struct TasksForRanges { - tasks: Vec>, - sorted_ranges: Vec>, + hint_refresh_tasks: HashMap>, Vec>>>, + hint_chunk_fetched: HashMap>)>, + pub added_hints: HashMap>, } -#[derive(Debug)] -struct CachedExcerptHints { - version: usize, - buffer_version: Global, - buffer_id: BufferId, - ordered_hints: Vec, - hints_by_id: HashMap, -} - -/// A logic to apply when querying for new inlay hints and deciding what to do with the old entries in the cache in case of conflicts. -#[derive(Debug, Clone, Copy)] -pub(super) enum InvalidationStrategy { - /// Hints reset is requested by the LSP server. - /// Demands to re-query all inlay hints needed and invalidate all cached entries, but does not require instant update with invalidation. - /// - /// Despite nothing forbids language server from sending this request on every edit, it is expected to be sent only when certain internal server state update, invisible for the editor otherwise. - RefreshRequested, - /// Multibuffer excerpt(s) and/or singleton buffer(s) were edited at least on one place. - /// Neither editor nor LSP is able to tell which open file hints' are not affected, so all of them have to be invalidated, re-queried and do that fast enough to avoid being slow, but also debounce to avoid loading hints on every fast keystroke sequence. - BufferEdited, - /// A new file got opened/new excerpt was added to a multibuffer/a [multi]buffer was scrolled to a new position. - /// No invalidation should be done at all, all new hints are added to the cache. - /// - /// A special case is the settings change: in addition to LSP capabilities, Zed allows omitting certain hint kinds (defined by the corresponding LSP part: type/parameter/other). - /// This does not lead to cache invalidation, but would require cache usage for determining which hints are not displayed and issuing an update to inlays on the screen. - None, -} - -/// A splice to send into the `inlay_map` for updating the visible inlays on the screen. -/// "Visible" inlays may not be displayed in the buffer right away, but those are ready to be displayed on further buffer scroll, pane item activations, etc. right away without additional LSP queries or settings changes. -/// The data in the cache is never used directly for displaying inlays on the screen, to avoid races with updates from LSP queries and sync overhead. -/// Splice is picked to help avoid extra hint flickering and "jumps" on the screen. -#[derive(Debug, Default)] -pub(super) struct InlaySplice { - pub to_remove: Vec, - pub to_insert: Vec, -} - -#[derive(Debug)] -struct ExcerptHintsUpdate { - excerpt_id: ExcerptId, - remove_from_visible: HashSet, - remove_from_cache: HashSet, - add_to_cache: Vec, -} - -#[derive(Debug, Clone, Copy)] -struct ExcerptQuery { - buffer_id: BufferId, - excerpt_id: ExcerptId, - cache_version: usize, - invalidate: InvalidationStrategy, - reason: &'static str, -} - -impl InvalidationStrategy { - fn should_invalidate(&self) -> bool { - matches!( - self, - InvalidationStrategy::RefreshRequested | InvalidationStrategy::BufferEdited - ) - } -} - -impl TasksForRanges { - fn new(query_ranges: QueryRanges, task: Task<()>) -> Self { +impl LspInlayHintData { + pub fn new(settings: InlayHintSettings) -> Self { Self { - tasks: vec![task], - sorted_ranges: query_ranges.into_sorted_query_ranges(), + modifiers_override: false, + enabled: settings.enabled, + enabled_in_settings: settings.enabled, + hint_refresh_tasks: HashMap::default(), + added_hints: HashMap::default(), + hint_chunk_fetched: HashMap::default(), + invalidate_debounce: debounce_value(settings.edit_debounce_ms), + append_debounce: debounce_value(settings.scroll_debounce_ms), + allowed_hint_kinds: settings.enabled_inlay_hint_kinds(), } } - fn update_cached_tasks( - &mut self, - buffer_snapshot: &BufferSnapshot, - query_ranges: QueryRanges, - invalidate: InvalidationStrategy, - spawn_task: impl FnOnce(QueryRanges) -> Task<()>, - ) { - let query_ranges = if invalidate.should_invalidate() { - self.tasks.clear(); - self.sorted_ranges = query_ranges.clone().into_sorted_query_ranges(); - query_ranges + pub fn modifiers_override(&mut self, new_override: bool) -> Option { + if self.modifiers_override == new_override { + return None; + } + self.modifiers_override = new_override; + if (self.enabled && self.modifiers_override) || (!self.enabled && !self.modifiers_override) + { + self.clear(); + Some(false) } else { - let mut non_cached_query_ranges = query_ranges; - non_cached_query_ranges.before_visible = non_cached_query_ranges - .before_visible - .into_iter() - .flat_map(|query_range| { - self.remove_cached_ranges_from_query(buffer_snapshot, query_range) - }) - .collect(); - non_cached_query_ranges.visible = non_cached_query_ranges - .visible - .into_iter() - .flat_map(|query_range| { - self.remove_cached_ranges_from_query(buffer_snapshot, query_range) - }) - .collect(); - non_cached_query_ranges.after_visible = non_cached_query_ranges - .after_visible - .into_iter() - .flat_map(|query_range| { - self.remove_cached_ranges_from_query(buffer_snapshot, query_range) - }) - .collect(); - non_cached_query_ranges - }; - - if !query_ranges.is_empty() { - self.tasks.push(spawn_task(query_ranges)); + Some(true) } } - fn remove_cached_ranges_from_query( - &mut self, - buffer_snapshot: &BufferSnapshot, - query_range: Range, - ) -> Vec> { - let mut ranges_to_query = Vec::new(); - let mut latest_cached_range = None::<&mut Range>; - for cached_range in self - .sorted_ranges - .iter_mut() - .skip_while(|cached_range| { - cached_range - .end - .cmp(&query_range.start, buffer_snapshot) - .is_lt() - }) - .take_while(|cached_range| { - cached_range - .start - .cmp(&query_range.end, buffer_snapshot) - .is_le() - }) - { - match latest_cached_range { - Some(latest_cached_range) => { - if latest_cached_range.end.offset.saturating_add(1) < cached_range.start.offset - { - ranges_to_query.push(latest_cached_range.end..cached_range.start); - cached_range.start = latest_cached_range.end; - } - } - None => { - if query_range - .start - .cmp(&cached_range.start, buffer_snapshot) - .is_lt() - { - ranges_to_query.push(query_range.start..cached_range.start); - cached_range.start = query_range.start; - } - } - } - latest_cached_range = Some(cached_range); + pub fn toggle(&mut self, enabled: bool) -> bool { + if self.enabled == enabled { + return false; } - - match latest_cached_range { - Some(latest_cached_range) => { - if latest_cached_range.end.offset.saturating_add(1) < query_range.end.offset { - ranges_to_query.push(latest_cached_range.end..query_range.end); - latest_cached_range.end = query_range.end; - } - } - None => { - ranges_to_query.push(query_range.clone()); - self.sorted_ranges.push(query_range); - self.sorted_ranges - .sort_by(|range_a, range_b| range_a.start.cmp(&range_b.start, buffer_snapshot)); - } + self.enabled = enabled; + self.modifiers_override = false; + if !enabled { + self.clear(); } - - ranges_to_query + true } - fn invalidate_range(&mut self, buffer: &BufferSnapshot, range: &Range) { - self.sorted_ranges = self - .sorted_ranges - .drain(..) - .filter_map(|mut cached_range| { - if cached_range.start.cmp(&range.end, buffer).is_gt() - || cached_range.end.cmp(&range.start, buffer).is_lt() - { - Some(vec![cached_range]) - } else if cached_range.start.cmp(&range.start, buffer).is_ge() - && cached_range.end.cmp(&range.end, buffer).is_le() - { - None - } else if range.start.cmp(&cached_range.start, buffer).is_ge() - && range.end.cmp(&cached_range.end, buffer).is_le() - { - Some(vec![ - cached_range.start..range.start, - range.end..cached_range.end, - ]) - } else if cached_range.start.cmp(&range.start, buffer).is_ge() { - cached_range.start = range.end; - Some(vec![cached_range]) - } else { - cached_range.end = range.start; - Some(vec![cached_range]) - } - }) - .flatten() - .collect(); - } -} - -impl InlayHintCache { - pub(super) fn new(inlay_hint_settings: InlayHintSettings) -> Self { - Self { - allowed_hint_kinds: inlay_hint_settings.enabled_inlay_hint_kinds(), - enabled: inlay_hint_settings.enabled, - modifiers_override: false, - enabled_in_settings: inlay_hint_settings.enabled, - hints: HashMap::default(), - update_tasks: HashMap::default(), - refresh_task: Task::ready(()), - invalidate_debounce: debounce_value(inlay_hint_settings.edit_debounce_ms), - append_debounce: debounce_value(inlay_hint_settings.scroll_debounce_ms), - version: 0, - lsp_request_limiter: Arc::new(Semaphore::new(MAX_CONCURRENT_LSP_REQUESTS)), - } + pub fn clear(&mut self) { + self.hint_refresh_tasks.clear(); + self.hint_chunk_fetched.clear(); + self.added_hints.clear(); } /// Checks inlay hint settings for enabled hint kinds and general enabled state. /// Generates corresponding inlay_map splice updates on settings changes. /// Does not update inlay hint cache state on disabling or inlay hint kinds change: only reenabling forces new LSP queries. - pub(super) fn update_settings( + fn update_settings( &mut self, - multi_buffer: &Entity, new_hint_settings: InlayHintSettings, visible_hints: Vec, - cx: &mut Context, - ) -> ControlFlow> { + ) -> ControlFlow, Option> { let old_enabled = self.enabled; // If the setting for inlay hints has changed, update `enabled`. This condition avoids inlay // hint visibility changes when other settings change (such as theme). @@ -314,23 +135,30 @@ impl InlayHintCache { if new_allowed_hint_kinds == self.allowed_hint_kinds { ControlFlow::Break(None) } else { - let new_splice = self.new_allowed_hint_kinds_splice( - multi_buffer, - &visible_hints, - &new_allowed_hint_kinds, - cx, - ); - if new_splice.is_some() { - self.version += 1; - self.allowed_hint_kinds = new_allowed_hint_kinds; - } - ControlFlow::Break(new_splice) + self.allowed_hint_kinds = new_allowed_hint_kinds; + ControlFlow::Continue( + Some(InlaySplice { + to_remove: visible_hints + .iter() + .filter_map(|inlay| { + let inlay_kind = self.added_hints.get(&inlay.id).copied()?; + if !self.allowed_hint_kinds.contains(&inlay_kind) { + Some(inlay.id) + } else { + None + } + }) + .collect(), + to_insert: Vec::new(), + }) + .filter(|splice| !splice.is_empty()), + ) } } (true, false) => { self.modifiers_override = false; self.allowed_hint_kinds = new_allowed_hint_kinds; - if self.hints.is_empty() { + if visible_hints.is_empty() { ControlFlow::Break(None) } else { self.clear(); @@ -343,978 +171,774 @@ impl InlayHintCache { (false, true) => { self.modifiers_override = false; self.allowed_hint_kinds = new_allowed_hint_kinds; - ControlFlow::Continue(()) + ControlFlow::Continue( + Some(InlaySplice { + to_remove: visible_hints + .iter() + .filter_map(|inlay| { + let inlay_kind = self.added_hints.get(&inlay.id).copied()?; + if !self.allowed_hint_kinds.contains(&inlay_kind) { + Some(inlay.id) + } else { + None + } + }) + .collect(), + to_insert: Vec::new(), + }) + .filter(|splice| !splice.is_empty()), + ) } } } - pub(super) fn modifiers_override(&mut self, new_override: bool) -> Option { - if self.modifiers_override == new_override { - return None; - } - self.modifiers_override = new_override; - if (self.enabled && self.modifiers_override) || (!self.enabled && !self.modifiers_override) - { - self.clear(); - Some(false) - } else { - Some(true) - } - } - - pub(super) fn toggle(&mut self, enabled: bool) -> bool { - if self.enabled == enabled { - return false; - } - self.enabled = enabled; - self.modifiers_override = false; - if !enabled { - self.clear(); + pub(crate) fn remove_inlay_chunk_data<'a>( + &'a mut self, + removed_buffer_ids: impl IntoIterator + 'a, + ) { + for buffer_id in removed_buffer_ids { + self.hint_refresh_tasks.remove(buffer_id); + self.hint_chunk_fetched.remove(buffer_id); } - true } +} - /// If needed, queries LSP for new inlay hints, using the invalidation strategy given. - /// To reduce inlay hint jumping, attempts to query a visible range of the editor(s) first, - /// followed by the delayed queries of the same range above and below the visible one. - /// This way, subsequent refresh invocations are less likely to trigger LSP queries for the invisible ranges. - pub(super) fn spawn_hint_refresh( - &mut self, - reason_description: &'static str, - excerpts_to_query: HashMap, Global, Range)>, - invalidate: InvalidationStrategy, - ignore_debounce: bool, - cx: &mut Context, - ) -> Option { - if (self.enabled && self.modifiers_override) || (!self.enabled && !self.modifiers_override) - { - return None; - } - let mut invalidated_hints = Vec::new(); - if invalidate.should_invalidate() { - self.update_tasks - .retain(|task_excerpt_id, _| excerpts_to_query.contains_key(task_excerpt_id)); - self.hints.retain(|cached_excerpt, cached_hints| { - let retain = excerpts_to_query.contains_key(cached_excerpt); - if !retain { - invalidated_hints.extend(cached_hints.read().ordered_hints.iter().copied()); - } - retain - }); - } - if excerpts_to_query.is_empty() && invalidated_hints.is_empty() { - return None; - } +#[derive(Debug, Clone)] +pub enum InlayHintRefreshReason { + ModifiersChanged(bool), + Toggle(bool), + SettingsChange(InlayHintSettings), + NewLinesShown, + BufferEdited(BufferId), + RefreshRequested(LanguageServerId), + ExcerptsRemoved(Vec), +} - let cache_version = self.version + 1; - let debounce_duration = if ignore_debounce { - None - } else if invalidate.should_invalidate() { - self.invalidate_debounce - } else { - self.append_debounce +impl Editor { + pub fn supports_inlay_hints(&self, cx: &mut App) -> bool { + let Some(provider) = self.semantics_provider.as_ref() else { + return false; }; - self.refresh_task = cx.spawn(async move |editor, cx| { - if let Some(debounce_duration) = debounce_duration { - cx.background_executor().timer(debounce_duration).await; - } - editor - .update(cx, |editor, cx| { - spawn_new_update_tasks( - editor, - reason_description, - excerpts_to_query, - invalidate, - cache_version, - cx, - ) - }) - .ok(); - }); - - if invalidated_hints.is_empty() { - None - } else { - Some(InlaySplice { - to_remove: invalidated_hints, - to_insert: Vec::new(), - }) - } - } - - fn new_allowed_hint_kinds_splice( - &self, - multi_buffer: &Entity, - visible_hints: &[Inlay], - new_kinds: &HashSet>, - cx: &mut Context, - ) -> Option { - let old_kinds = &self.allowed_hint_kinds; - if new_kinds == old_kinds { - return None; - } - - let mut to_remove = Vec::new(); - let mut to_insert = Vec::new(); - let mut shown_hints_to_remove = visible_hints.iter().fold( - HashMap::>::default(), - |mut current_hints, inlay| { - current_hints - .entry(inlay.position.excerpt_id) - .or_default() - .push((inlay.position, inlay.id)); - current_hints - }, - ); - - let multi_buffer = multi_buffer.read(cx); - let multi_buffer_snapshot = multi_buffer.snapshot(cx); - - for (excerpt_id, excerpt_cached_hints) in &self.hints { - let shown_excerpt_hints_to_remove = - shown_hints_to_remove.entry(*excerpt_id).or_default(); - let excerpt_cached_hints = excerpt_cached_hints.read(); - let mut excerpt_cache = excerpt_cached_hints.ordered_hints.iter().fuse().peekable(); - shown_excerpt_hints_to_remove.retain(|(shown_anchor, shown_hint_id)| { - let Some(buffer) = multi_buffer.buffer_for_anchor(*shown_anchor, cx) else { - return false; - }; - let buffer_snapshot = buffer.read(cx).snapshot(); - loop { - match excerpt_cache.peek() { - Some(&cached_hint_id) => { - let cached_hint = &excerpt_cached_hints.hints_by_id[cached_hint_id]; - if cached_hint_id == shown_hint_id { - excerpt_cache.next(); - return !new_kinds.contains(&cached_hint.kind); - } - - match cached_hint - .position - .cmp(&shown_anchor.text_anchor, &buffer_snapshot) - { - cmp::Ordering::Less | cmp::Ordering::Equal => { - if !old_kinds.contains(&cached_hint.kind) - && new_kinds.contains(&cached_hint.kind) - && let Some(anchor) = multi_buffer_snapshot - .anchor_in_excerpt(*excerpt_id, cached_hint.position) - { - to_insert.push(Inlay::hint( - cached_hint_id.id(), - anchor, - cached_hint, - )); - } - excerpt_cache.next(); - } - cmp::Ordering::Greater => return true, - } - } - None => return true, - } - } + let mut supports = false; + self.buffer().update(cx, |this, cx| { + this.for_each_buffer(|buffer| { + supports |= provider.supports_inlay_hints(buffer, cx); }); + }); - for cached_hint_id in excerpt_cache { - let maybe_missed_cached_hint = &excerpt_cached_hints.hints_by_id[cached_hint_id]; - let cached_hint_kind = maybe_missed_cached_hint.kind; - if !old_kinds.contains(&cached_hint_kind) - && new_kinds.contains(&cached_hint_kind) - && let Some(anchor) = multi_buffer_snapshot - .anchor_in_excerpt(*excerpt_id, maybe_missed_cached_hint.position) - { - to_insert.push(Inlay::hint( - cached_hint_id.id(), - anchor, - maybe_missed_cached_hint, - )); - } - } - } - - to_remove.extend( - shown_hints_to_remove - .into_values() - .flatten() - .map(|(_, hint_id)| hint_id), - ); - if to_remove.is_empty() && to_insert.is_empty() { - None - } else { - Some(InlaySplice { - to_remove, - to_insert, - }) - } + supports } - /// Completely forget of certain excerpts that were removed from the multibuffer. - pub(super) fn remove_excerpts( + pub fn toggle_inline_values( &mut self, - excerpts_removed: &[ExcerptId], - ) -> Option { - let mut to_remove = Vec::new(); - for excerpt_to_remove in excerpts_removed { - self.update_tasks.remove(excerpt_to_remove); - if let Some(cached_hints) = self.hints.remove(excerpt_to_remove) { - let cached_hints = cached_hints.read(); - to_remove.extend(cached_hints.ordered_hints.iter().copied()); - } - } - if to_remove.is_empty() { - None - } else { - self.version += 1; - Some(InlaySplice { - to_remove, - to_insert: Vec::new(), - }) - } - } - - pub(super) fn clear(&mut self) { - if !self.update_tasks.is_empty() || !self.hints.is_empty() { - self.version += 1; - } - self.update_tasks.clear(); - self.refresh_task = Task::ready(()); - self.hints.clear(); - } - - pub(super) fn hint_by_id(&self, excerpt_id: ExcerptId, hint_id: InlayId) -> Option { - self.hints - .get(&excerpt_id)? - .read() - .hints_by_id - .get(&hint_id) - .cloned() - } + _: &ToggleInlineValues, + _: &mut Window, + cx: &mut Context, + ) { + self.inline_value_cache.enabled = !self.inline_value_cache.enabled; - pub fn hints(&self) -> Vec { - let mut hints = Vec::new(); - for excerpt_hints in self.hints.values() { - let excerpt_hints = excerpt_hints.read(); - hints.extend( - excerpt_hints - .ordered_hints - .iter() - .map(|id| &excerpt_hints.hints_by_id[id]) - .cloned(), - ); - } - hints + self.refresh_inline_values(cx); } - /// Queries a certain hint from the cache for extra data via the LSP resolve request. - pub(super) fn spawn_hint_resolve( - &self, - buffer_id: BufferId, - excerpt_id: ExcerptId, - id: InlayId, - window: &mut Window, - cx: &mut Context, + pub fn toggle_inlay_hints( + &mut self, + _: &ToggleInlayHints, + _: &mut Window, + cx: &mut Context, ) { - if let Some(excerpt_hints) = self.hints.get(&excerpt_id) { - let mut guard = excerpt_hints.write(); - if let Some(cached_hint) = guard.hints_by_id.get_mut(&id) - && let ResolveState::CanResolve(server_id, _) = &cached_hint.resolve_state - { - let hint_to_resolve = cached_hint.clone(); - let server_id = *server_id; - cached_hint.resolve_state = ResolveState::Resolving; - drop(guard); - cx.spawn_in(window, async move |editor, cx| { - let resolved_hint_task = editor.update(cx, |editor, cx| { - let buffer = editor.buffer().read(cx).buffer(buffer_id)?; - editor.semantics_provider.as_ref()?.resolve_inlay_hint( - hint_to_resolve, - buffer, - server_id, - cx, - ) - })?; - if let Some(resolved_hint_task) = resolved_hint_task { - let mut resolved_hint = - resolved_hint_task.await.context("hint resolve task")?; - editor.read_with(cx, |editor, _| { - if let Some(excerpt_hints) = - editor.inlay_hint_cache.hints.get(&excerpt_id) - { - let mut guard = excerpt_hints.write(); - if let Some(cached_hint) = guard.hints_by_id.get_mut(&id) - && cached_hint.resolve_state == ResolveState::Resolving - { - resolved_hint.resolve_state = ResolveState::Resolved; - *cached_hint = resolved_hint; - } - } - })?; - } - - anyhow::Ok(()) - }) - .detach_and_log_err(cx); - } - } + self.refresh_inlay_hints( + InlayHintRefreshReason::Toggle(!self.inlay_hints_enabled()), + cx, + ); } -} -fn debounce_value(debounce_ms: u64) -> Option { - if debounce_ms > 0 { - Some(Duration::from_millis(debounce_ms)) - } else { - None + pub fn inlay_hints_enabled(&self) -> bool { + self.inlay_hints.as_ref().is_some_and(|cache| cache.enabled) } -} -fn spawn_new_update_tasks( - editor: &mut Editor, - reason: &'static str, - excerpts_to_query: HashMap, Global, Range)>, - invalidate: InvalidationStrategy, - update_cache_version: usize, - cx: &mut Context, -) { - for (excerpt_id, (excerpt_buffer, new_task_buffer_version, excerpt_visible_range)) in - excerpts_to_query - { - if excerpt_visible_range.is_empty() { - continue; - } - let buffer = excerpt_buffer.read(cx); - let buffer_id = buffer.remote_id(); - let buffer_snapshot = buffer.snapshot(); - if buffer_snapshot - .version() - .changed_since(&new_task_buffer_version) - { - continue; + /// Updates inlay hints for the visible ranges of the singleton buffer(s). + /// Based on its parameters, either invalidates the previous data, or appends to it. + pub(crate) fn refresh_inlay_hints( + &mut self, + reason: InlayHintRefreshReason, + cx: &mut Context, + ) { + if !self.mode.is_full() || self.inlay_hints.is_none() { + return; } - - if let Some(cached_excerpt_hints) = editor.inlay_hint_cache.hints.get(&excerpt_id) { - let cached_excerpt_hints = cached_excerpt_hints.read(); - let cached_buffer_version = &cached_excerpt_hints.buffer_version; - if cached_excerpt_hints.version > update_cache_version - || cached_buffer_version.changed_since(&new_task_buffer_version) - { - continue; - } + let Some(semantics_provider) = self.semantics_provider() else { + return; }; - - let Some(query_ranges) = editor.buffer.update(cx, |multi_buffer, cx| { - determine_query_ranges( - multi_buffer, - excerpt_id, - &excerpt_buffer, - excerpt_visible_range, - cx, - ) - }) else { + let Some(invalidate_cache) = self.refresh_editor_data(&reason, cx) else { return; }; - let query = ExcerptQuery { - buffer_id, - excerpt_id, - cache_version: update_cache_version, - invalidate, - reason, + + let debounce = match &reason { + InlayHintRefreshReason::SettingsChange(_) + | InlayHintRefreshReason::Toggle(_) + | InlayHintRefreshReason::ExcerptsRemoved(_) + | InlayHintRefreshReason::ModifiersChanged(_) => None, + _may_need_lsp_call => self.inlay_hints.as_ref().and_then(|inlay_hints| { + if invalidate_cache.should_invalidate() { + inlay_hints.invalidate_debounce + } else { + inlay_hints.append_debounce + } + }), }; - let mut new_update_task = - |query_ranges| new_update_task(query, query_ranges, excerpt_buffer.clone(), cx); + let mut visible_excerpts = self.visible_excerpts(cx); + let mut all_affected_buffers = HashSet::default(); + let ignore_previous_fetches = match reason { + InlayHintRefreshReason::ModifiersChanged(_) + | InlayHintRefreshReason::Toggle(_) + | InlayHintRefreshReason::SettingsChange(_) => true, + InlayHintRefreshReason::NewLinesShown + | InlayHintRefreshReason::RefreshRequested(_) + | InlayHintRefreshReason::ExcerptsRemoved(_) => false, + InlayHintRefreshReason::BufferEdited(buffer_id) => { + let Some(affected_language) = self + .buffer() + .read(cx) + .buffer(buffer_id) + .and_then(|buffer| buffer.read(cx).language().cloned()) + else { + return; + }; - match editor.inlay_hint_cache.update_tasks.entry(excerpt_id) { - hash_map::Entry::Occupied(mut o) => { - o.get_mut().update_cached_tasks( - &buffer_snapshot, - query_ranges, - invalidate, - new_update_task, + all_affected_buffers.extend( + self.buffer() + .read(cx) + .all_buffers() + .into_iter() + .filter_map(|buffer| { + let buffer = buffer.read(cx); + if buffer.language() == Some(&affected_language) { + Some(buffer.remote_id()) + } else { + None + } + }), ); - } - hash_map::Entry::Vacant(v) => { - v.insert(TasksForRanges::new( - query_ranges.clone(), - new_update_task(query_ranges), - )); - } - } - } -} - -#[derive(Debug, Clone)] -struct QueryRanges { - before_visible: Vec>, - visible: Vec>, - after_visible: Vec>, -} -impl QueryRanges { - fn is_empty(&self) -> bool { - self.before_visible.is_empty() && self.visible.is_empty() && self.after_visible.is_empty() - } + semantics_provider.invalidate_inlay_hints(&all_affected_buffers, cx); + visible_excerpts.retain(|_, (visible_buffer, _, _)| { + visible_buffer.read(cx).language() == Some(&affected_language) + }); + false + } + }; - fn into_sorted_query_ranges(self) -> Vec> { - let mut sorted_ranges = Vec::with_capacity( - self.before_visible.len() + self.visible.len() + self.after_visible.len(), - ); - sorted_ranges.extend(self.before_visible); - sorted_ranges.extend(self.visible); - sorted_ranges.extend(self.after_visible); - sorted_ranges - } -} + let multi_buffer = self.buffer().clone(); + let Some(inlay_hints) = self.inlay_hints.as_mut() else { + return; + }; -fn determine_query_ranges( - multi_buffer: &mut MultiBuffer, - excerpt_id: ExcerptId, - excerpt_buffer: &Entity, - excerpt_visible_range: Range, - cx: &mut Context, -) -> Option { - let buffer = excerpt_buffer.read(cx); - let full_excerpt_range = multi_buffer - .excerpts_for_buffer(buffer.remote_id(), cx) - .into_iter() - .find(|(id, _)| id == &excerpt_id) - .map(|(_, range)| range.context)?; - let snapshot = buffer.snapshot(); - let excerpt_visible_len = excerpt_visible_range.end - excerpt_visible_range.start; - - let visible_range = if excerpt_visible_range.start == excerpt_visible_range.end { - return None; - } else { - vec![ - buffer.anchor_before(snapshot.clip_offset(excerpt_visible_range.start, Bias::Left)) - ..buffer.anchor_after(snapshot.clip_offset(excerpt_visible_range.end, Bias::Right)), - ] - }; - - let full_excerpt_range_end_offset = full_excerpt_range.end.to_offset(&snapshot); - let after_visible_range_start = excerpt_visible_range - .end - .saturating_add(1) - .min(full_excerpt_range_end_offset) - .min(buffer.len()); - let after_visible_range = if after_visible_range_start == full_excerpt_range_end_offset { - Vec::new() - } else { - let after_range_end_offset = after_visible_range_start - .saturating_add(excerpt_visible_len) - .min(full_excerpt_range_end_offset) - .min(buffer.len()); - vec![ - buffer.anchor_before(snapshot.clip_offset(after_visible_range_start, Bias::Left)) - ..buffer.anchor_after(snapshot.clip_offset(after_range_end_offset, Bias::Right)), - ] - }; - - let full_excerpt_range_start_offset = full_excerpt_range.start.to_offset(&snapshot); - let before_visible_range_end = excerpt_visible_range - .start - .saturating_sub(1) - .max(full_excerpt_range_start_offset); - let before_visible_range = if before_visible_range_end == full_excerpt_range_start_offset { - Vec::new() - } else { - let before_range_start_offset = before_visible_range_end - .saturating_sub(excerpt_visible_len) - .max(full_excerpt_range_start_offset); - vec![ - buffer.anchor_before(snapshot.clip_offset(before_range_start_offset, Bias::Left)) - ..buffer.anchor_after(snapshot.clip_offset(before_visible_range_end, Bias::Right)), - ] - }; - - Some(QueryRanges { - before_visible: before_visible_range, - visible: visible_range, - after_visible: after_visible_range, - }) -} + if invalidate_cache.should_invalidate() { + inlay_hints.clear(); + } -const MAX_CONCURRENT_LSP_REQUESTS: usize = 5; -const INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS: u64 = 400; + let mut buffers_to_query = HashMap::default(); + for (excerpt_id, (buffer, buffer_version, visible_range)) in visible_excerpts { + let buffer_id = buffer.read(cx).remote_id(); + if !self.registered_buffers.contains_key(&buffer_id) { + continue; + } -fn new_update_task( - query: ExcerptQuery, - query_ranges: QueryRanges, - excerpt_buffer: Entity, - cx: &mut Context, -) -> Task<()> { - cx.spawn(async move |editor, cx| { - let visible_range_update_results = future::join_all( - query_ranges - .visible - .into_iter() - .filter_map(|visible_range| { - let fetch_task = editor - .update(cx, |_, cx| { - fetch_and_update_hints( - excerpt_buffer.clone(), - query, - visible_range.clone(), - query.invalidate.should_invalidate(), - cx, - ) - }) - .log_err()?; - Some(async move { (visible_range, fetch_task.await) }) - }), - ) - .await; + let buffer_snapshot = buffer.read(cx).snapshot(); + let buffer_anchor_range = buffer_snapshot.anchor_before(visible_range.start) + ..buffer_snapshot.anchor_after(visible_range.end); + + let visible_excerpts = + buffers_to_query + .entry(buffer_id) + .or_insert_with(|| VisibleExcerpts { + excerpts: Vec::new(), + ranges: Vec::new(), + buffer_version: buffer_version.clone(), + buffer: buffer.clone(), + }); + visible_excerpts.buffer_version = buffer_version; + visible_excerpts.excerpts.push(excerpt_id); + visible_excerpts.ranges.push(buffer_anchor_range); + } - let hint_delay = cx.background_executor().timer(Duration::from_millis( - INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS, - )); - - let query_range_failed = - |range: &Range, e: anyhow::Error, cx: &mut AsyncApp| { - log::error!("inlay hint update task for range failed: {e:#?}"); - editor - .update(cx, |editor, cx| { - if let Some(task_ranges) = editor - .inlay_hint_cache - .update_tasks - .get_mut(&query.excerpt_id) - { - let buffer_snapshot = excerpt_buffer.read(cx).snapshot(); - task_ranges.invalidate_range(&buffer_snapshot, range); - } - }) - .ok() + let all_affected_buffers = Arc::new(Mutex::new(all_affected_buffers)); + for (buffer_id, visible_excerpts) in buffers_to_query { + let Some(buffer) = multi_buffer.read(cx).buffer(buffer_id) else { + continue; }; + let fetched_tasks = inlay_hints.hint_chunk_fetched.entry(buffer_id).or_default(); + if visible_excerpts + .buffer_version + .changed_since(&fetched_tasks.0) + { + fetched_tasks.1.clear(); + fetched_tasks.0 = visible_excerpts.buffer_version.clone(); + inlay_hints.hint_refresh_tasks.remove(&buffer_id); + } - for (range, result) in visible_range_update_results { - if let Err(e) = result { - query_range_failed(&range, e, cx); + let applicable_chunks = + semantics_provider.applicable_inlay_chunks(&buffer, &visible_excerpts.ranges, cx); + + match inlay_hints + .hint_refresh_tasks + .entry(buffer_id) + .or_default() + .entry(applicable_chunks) + { + hash_map::Entry::Occupied(mut o) => { + if invalidate_cache.should_invalidate() || ignore_previous_fetches { + o.get_mut().push(spawn_editor_hints_refresh( + buffer_id, + invalidate_cache, + ignore_previous_fetches, + debounce, + visible_excerpts, + all_affected_buffers.clone(), + cx, + )); + } + } + hash_map::Entry::Vacant(v) => { + v.insert(Vec::new()).push(spawn_editor_hints_refresh( + buffer_id, + invalidate_cache, + ignore_previous_fetches, + debounce, + visible_excerpts, + all_affected_buffers.clone(), + cx, + )); + } } } + } - hint_delay.await; - let invisible_range_update_results = future::join_all( - query_ranges - .before_visible - .into_iter() - .chain(query_ranges.after_visible.into_iter()) - .filter_map(|invisible_range| { - let fetch_task = editor - .update(cx, |_, cx| { - fetch_and_update_hints( - excerpt_buffer.clone(), - query, - invisible_range.clone(), - false, // visible screen request already invalidated the entries - cx, - ) - }) - .log_err()?; - Some(async move { (invisible_range, fetch_task.await) }) - }), - ) - .await; - for (range, result) in invisible_range_update_results { - if let Err(e) = result { - query_range_failed(&range, e, cx); - } - } - }) -} + pub fn clear_inlay_hints(&mut self, cx: &mut Context) { + let to_remove = self + .visible_inlay_hints(cx) + .into_iter() + .map(|inlay| { + let inlay_id = inlay.id; + if let Some(inlay_hints) = &mut self.inlay_hints { + inlay_hints.added_hints.remove(&inlay_id); + } + inlay_id + }) + .collect::>(); + self.splice_inlays(&to_remove, Vec::new(), cx); + } -fn fetch_and_update_hints( - excerpt_buffer: Entity, - query: ExcerptQuery, - fetch_range: Range, - invalidate: bool, - cx: &mut Context, -) -> Task> { - cx.spawn(async move |editor, cx|{ - let buffer_snapshot = excerpt_buffer.read_with(cx, |buffer, _| buffer.snapshot())?; - let (lsp_request_limiter, multi_buffer_snapshot) = - editor.update(cx, |editor, cx| { - let multi_buffer_snapshot = - editor.buffer().update(cx, |buffer, cx| buffer.snapshot(cx)); - let lsp_request_limiter = Arc::clone(&editor.inlay_hint_cache.lsp_request_limiter); - (lsp_request_limiter, multi_buffer_snapshot) - })?; - - let (lsp_request_guard, got_throttled) = if query.invalidate.should_invalidate() { - (None, false) - } else { - match lsp_request_limiter.try_acquire() { - Some(guard) => (Some(guard), false), - None => (Some(lsp_request_limiter.acquire().await), true), - } + fn refresh_editor_data( + &mut self, + reason: &InlayHintRefreshReason, + cx: &mut Context<'_, Editor>, + ) -> Option { + let visible_inlay_hints = self.visible_inlay_hints(cx); + let Some(inlay_hints) = self.inlay_hints.as_mut() else { + return None; }; - let fetch_range_to_log = fetch_range.start.to_point(&buffer_snapshot) - ..fetch_range.end.to_point(&buffer_snapshot); - let inlay_hints_fetch_task = editor - .update(cx, |editor, cx| { - if got_throttled { - let query_not_around_visible_range = match editor - .visible_excerpts(None, cx) - .remove(&query.excerpt_id) - { - Some((_, _, current_visible_range)) => { - let visible_offset_length = current_visible_range.len(); - let double_visible_range = current_visible_range - .start - .saturating_sub(visible_offset_length) - ..current_visible_range - .end - .saturating_add(visible_offset_length) - .min(buffer_snapshot.len()); - !double_visible_range - .contains(&fetch_range.start.to_offset(&buffer_snapshot)) - && !double_visible_range - .contains(&fetch_range.end.to_offset(&buffer_snapshot)) + + let invalidate_cache = match reason { + InlayHintRefreshReason::ModifiersChanged(enabled) => { + match inlay_hints.modifiers_override(*enabled) { + Some(enabled) => { + if enabled { + InvalidationStrategy::None + } else { + self.clear_inlay_hints(cx); + return None; } - None => true, - }; - if query_not_around_visible_range { - log::trace!("Fetching inlay hints for range {fetch_range_to_log:?} got throttled and fell off the current visible range, skipping."); - if let Some(task_ranges) = editor - .inlay_hint_cache - .update_tasks - .get_mut(&query.excerpt_id) + } + None => return None, + } + } + InlayHintRefreshReason::Toggle(enabled) => { + if inlay_hints.toggle(*enabled) { + if *enabled { + InvalidationStrategy::None + } else { + self.clear_inlay_hints(cx); + return None; + } + } else { + return None; + } + } + InlayHintRefreshReason::SettingsChange(new_settings) => { + match inlay_hints.update_settings(*new_settings, visible_inlay_hints) { + ControlFlow::Break(Some(InlaySplice { + to_remove, + to_insert, + })) => { + self.splice_inlays(&to_remove, to_insert, cx); + return None; + } + ControlFlow::Break(None) => return None, + ControlFlow::Continue(splice) => { + if let Some(InlaySplice { + to_remove, + to_insert, + }) = splice { - task_ranges.invalidate_range(&buffer_snapshot, &fetch_range); + self.splice_inlays(&to_remove, to_insert, cx); } - return None; + InvalidationStrategy::None } } + } + InlayHintRefreshReason::ExcerptsRemoved(excerpts_removed) => { + let to_remove = self + .display_map + .read(cx) + .current_inlays() + .filter_map(|inlay| { + if excerpts_removed.contains(&inlay.position.excerpt_id) { + Some(inlay.id) + } else { + None + } + }) + .collect::>(); + self.splice_inlays(&to_remove, Vec::new(), cx); + return None; + } + InlayHintRefreshReason::NewLinesShown => InvalidationStrategy::None, + InlayHintRefreshReason::BufferEdited(_) => InvalidationStrategy::BufferEdited, + InlayHintRefreshReason::RefreshRequested(server_id) => { + InvalidationStrategy::RefreshRequested(*server_id) + } + }; - let buffer = editor.buffer().read(cx).buffer(query.buffer_id)?; + match &mut self.inlay_hints { + Some(inlay_hints) => { + if !inlay_hints.enabled + && !matches!(reason, InlayHintRefreshReason::ModifiersChanged(_)) + { + return None; + } + } + None => return None, + } - if !editor.registered_buffers.contains_key(&query.buffer_id) - && let Some(project) = editor.project.as_ref() { - project.update(cx, |project, cx| { - editor.registered_buffers.insert( - query.buffer_id, - project.register_buffer_with_language_servers(&buffer, cx), - ); - }) - } + Some(invalidate_cache) + } - editor - .semantics_provider - .as_ref()? - .inlay_hints(buffer, fetch_range.clone(), cx) - }) - .ok() - .flatten(); + pub(crate) fn visible_inlay_hints(&self, cx: &Context) -> Vec { + self.display_map + .read(cx) + .current_inlays() + .filter(move |inlay| matches!(inlay.id, InlayId::Hint(_))) + .cloned() + .collect() + } - let cached_excerpt_hints = editor.read_with(cx, |editor, _| { - editor - .inlay_hint_cache - .hints - .get(&query.excerpt_id) - .cloned() - })?; - - let visible_hints = editor.update(cx, |editor, cx| editor.visible_inlay_hints(cx))?; - let new_hints = match inlay_hints_fetch_task { - Some(fetch_task) => { - log::debug!( - "Fetching inlay hints for range {fetch_range_to_log:?}, reason: {query_reason}, invalidate: {invalidate}", - query_reason = query.reason, - ); - log::trace!( - "Currently visible hints: {visible_hints:?}, cached hints present: {}", - cached_excerpt_hints.is_some(), - ); - fetch_task.await.context("inlay hint fetch task")? - } - None => return Ok(()), + pub fn update_inlay_link_and_hover_points( + &mut self, + snapshot: &EditorSnapshot, + point_for_position: PointForPosition, + secondary_held: bool, + shift_held: bool, + window: &mut Window, + cx: &mut Context, + ) { + let Some(lsp_store) = self.project().map(|project| project.read(cx).lsp_store()) else { + return; }; - drop(lsp_request_guard); - log::debug!( - "Fetched {} hints for range {fetch_range_to_log:?}", - new_hints.len() - ); - log::trace!("Fetched hints: {new_hints:?}"); - - let background_task_buffer_snapshot = buffer_snapshot.clone(); - let background_fetch_range = fetch_range.clone(); - let new_update = cx.background_spawn(async move { - calculate_hint_updates( - query.excerpt_id, - invalidate, - background_fetch_range, - new_hints, - &background_task_buffer_snapshot, - cached_excerpt_hints, - &visible_hints, + let hovered_offset = if point_for_position.column_overshoot_after_line_end == 0 { + Some( + snapshot + .display_point_to_inlay_offset(point_for_position.exact_unclipped, Bias::Left), ) - }) - .await; - if let Some(new_update) = new_update { - log::debug!( - "Applying update for range {fetch_range_to_log:?}: remove from editor: {}, remove from cache: {}, add to cache: {}", - new_update.remove_from_visible.len(), - new_update.remove_from_cache.len(), - new_update.add_to_cache.len() + } else { + None + }; + let mut go_to_definition_updated = false; + let mut hover_updated = false; + if let Some(hovered_offset) = hovered_offset { + let buffer_snapshot = self.buffer().read(cx).snapshot(cx); + let previous_valid_anchor = buffer_snapshot.anchor_at( + point_for_position.previous_valid.to_point(snapshot), + Bias::Left, ); - log::trace!("New update: {new_update:?}"); - editor - .update(cx, |editor, cx| { - apply_hint_update( - editor, - new_update, - query, - invalidate, - buffer_snapshot, - multi_buffer_snapshot, - cx, - ); + let next_valid_anchor = buffer_snapshot.anchor_at( + point_for_position.next_valid.to_point(snapshot), + Bias::Right, + ); + if let Some(hovered_hint) = self + .visible_inlay_hints(cx) + .into_iter() + .skip_while(|hint| { + hint.position + .cmp(&previous_valid_anchor, &buffer_snapshot) + .is_lt() }) - .ok(); - } - anyhow::Ok(()) - }) -} - -fn calculate_hint_updates( - excerpt_id: ExcerptId, - invalidate: bool, - fetch_range: Range, - new_excerpt_hints: Vec, - buffer_snapshot: &BufferSnapshot, - cached_excerpt_hints: Option>>, - visible_hints: &[Inlay], -) -> Option { - let mut add_to_cache = Vec::::new(); - let mut excerpt_hints_to_persist = HashMap::default(); - for new_hint in new_excerpt_hints { - if !contains_position(&fetch_range, new_hint.position, buffer_snapshot) { - continue; - } - let missing_from_cache = match &cached_excerpt_hints { - Some(cached_excerpt_hints) => { - let cached_excerpt_hints = cached_excerpt_hints.read(); - match cached_excerpt_hints - .ordered_hints - .binary_search_by(|probe| { - cached_excerpt_hints.hints_by_id[probe] - .position - .cmp(&new_hint.position, buffer_snapshot) - }) { - Ok(ix) => { - let mut missing_from_cache = true; - for id in &cached_excerpt_hints.ordered_hints[ix..] { - let cached_hint = &cached_excerpt_hints.hints_by_id[id]; - if new_hint - .position - .cmp(&cached_hint.position, buffer_snapshot) - .is_gt() - { - break; + .take_while(|hint| { + hint.position + .cmp(&next_valid_anchor, &buffer_snapshot) + .is_le() + }) + .max_by_key(|hint| hint.id) + { + if let Some(ResolvedHint::Resolved(cached_hint)) = + hovered_hint.position.buffer_id.and_then(|buffer_id| { + lsp_store.update(cx, |lsp_store, cx| { + lsp_store.resolved_hint(buffer_id, hovered_hint.id, cx) + }) + }) + { + match cached_hint.resolve_state { + ResolveState::Resolved => { + let mut extra_shift_left = 0; + let mut extra_shift_right = 0; + if cached_hint.padding_left { + extra_shift_left += 1; + extra_shift_right += 1; } - if cached_hint == &new_hint { - excerpt_hints_to_persist.insert(*id, cached_hint.kind); - missing_from_cache = false; + if cached_hint.padding_right { + extra_shift_right += 1; } + match cached_hint.label { + InlayHintLabel::String(_) => { + if let Some(tooltip) = cached_hint.tooltip { + hover_popover::hover_at_inlay( + self, + InlayHover { + tooltip: match tooltip { + InlayHintTooltip::String(text) => HoverBlock { + text, + kind: HoverBlockKind::PlainText, + }, + InlayHintTooltip::MarkupContent(content) => { + HoverBlock { + text: content.value, + kind: content.kind, + } + } + }, + range: InlayHighlight { + inlay: hovered_hint.id, + inlay_position: hovered_hint.position, + range: extra_shift_left + ..hovered_hint.text().len() + + extra_shift_right, + }, + }, + window, + cx, + ); + hover_updated = true; + } + } + InlayHintLabel::LabelParts(label_parts) => { + let hint_start = + snapshot.anchor_to_inlay_offset(hovered_hint.position); + if let Some((hovered_hint_part, part_range)) = + hover_popover::find_hovered_hint_part( + label_parts, + hint_start, + hovered_offset, + ) + { + let highlight_start = + (part_range.start - hint_start).0 + extra_shift_left; + let highlight_end = + (part_range.end - hint_start).0 + extra_shift_right; + let highlight = InlayHighlight { + inlay: hovered_hint.id, + inlay_position: hovered_hint.position, + range: highlight_start..highlight_end, + }; + if let Some(tooltip) = hovered_hint_part.tooltip { + hover_popover::hover_at_inlay( + self, + InlayHover { + tooltip: match tooltip { + InlayHintLabelPartTooltip::String(text) => { + HoverBlock { + text, + kind: HoverBlockKind::PlainText, + } + } + InlayHintLabelPartTooltip::MarkupContent( + content, + ) => HoverBlock { + text: content.value, + kind: content.kind, + }, + }, + range: highlight.clone(), + }, + window, + cx, + ); + hover_updated = true; + } + if let Some((language_server_id, location)) = + hovered_hint_part.location + && secondary_held + && !self.has_pending_nonempty_selection() + { + go_to_definition_updated = true; + show_link_definition( + shift_held, + self, + TriggerPoint::InlayHint( + highlight, + location, + language_server_id, + ), + snapshot, + window, + cx, + ); + } + } + } + }; } - missing_from_cache + ResolveState::CanResolve(_, _) => debug_panic!( + "Expected resolved_hint retrieval to return a resolved hint" + ), + ResolveState::Resolving => {} } - Err(_) => true, } } - None => true, - }; - if missing_from_cache { - add_to_cache.push(new_hint); + } + + if !go_to_definition_updated { + self.hide_hovered_link(cx) + } + if !hover_updated { + hover_popover::hover_at(self, None, window, cx); } } - let mut remove_from_visible = HashSet::default(); - let mut remove_from_cache = HashSet::default(); - if invalidate { - remove_from_visible.extend( - visible_hints - .iter() - .filter(|hint| hint.position.excerpt_id == excerpt_id) - .map(|inlay_hint| inlay_hint.id) - .filter(|hint_id| !excerpt_hints_to_persist.contains_key(hint_id)), - ); + fn inlay_hints_for_buffer( + &mut self, + invalidate_cache: InvalidationStrategy, + ignore_previous_fetches: bool, + buffer_excerpts: VisibleExcerpts, + cx: &mut Context, + ) -> Option, anyhow::Result)>>> { + let semantics_provider = self.semantics_provider()?; + let inlay_hints = self.inlay_hints.as_mut()?; + let buffer_id = buffer_excerpts.buffer.read(cx).remote_id(); + + let new_hint_tasks = semantics_provider + .inlay_hints( + invalidate_cache, + buffer_excerpts.buffer, + buffer_excerpts.ranges, + inlay_hints + .hint_chunk_fetched + .get(&buffer_id) + .filter(|_| !ignore_previous_fetches && !invalidate_cache.should_invalidate()) + .cloned(), + cx, + ) + .unwrap_or_default(); + + let (known_version, known_chunks) = + inlay_hints.hint_chunk_fetched.entry(buffer_id).or_default(); + if buffer_excerpts.buffer_version.changed_since(known_version) { + known_chunks.clear(); + *known_version = buffer_excerpts.buffer_version; + } + + let mut hint_tasks = Vec::new(); + for (row_range, new_hints_task) in new_hint_tasks { + let inserted = known_chunks.insert(row_range.clone()); + if inserted || ignore_previous_fetches || invalidate_cache.should_invalidate() { + hint_tasks.push(cx.spawn(async move |_, _| (row_range, new_hints_task.await))); + } + } + + Some(hint_tasks) + } + + fn apply_fetched_hints( + &mut self, + buffer_id: BufferId, + query_version: Global, + invalidate_cache: InvalidationStrategy, + new_hints: Vec<(Range, anyhow::Result)>, + all_affected_buffers: Arc>>, + cx: &mut Context, + ) { + let visible_inlay_hint_ids = self + .visible_inlay_hints(cx) + .iter() + .filter(|inlay| inlay.position.buffer_id == Some(buffer_id)) + .map(|inlay| inlay.id) + .collect::>(); + let Some(inlay_hints) = &mut self.inlay_hints else { + return; + }; - if let Some(cached_excerpt_hints) = &cached_excerpt_hints { - let cached_excerpt_hints = cached_excerpt_hints.read(); - remove_from_cache.extend( - cached_excerpt_hints - .ordered_hints + let mut hints_to_remove = Vec::new(); + let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx); + + // If we've received hints from the cache, it means `invalidate_cache` had invalidated whatever possible there, + // and most probably there are no more hints with IDs from `visible_inlay_hint_ids` in the cache. + // So, if we hover such hints, no resolve will happen. + // + // Another issue is in the fact that changing one buffer may lead to other buffers' hints changing, so more cache entries may be removed. + // Hence, clear all excerpts' hints in the multi buffer: later, the invalidated ones will re-trigger the LSP query, the rest will be restored + // from the cache. + if invalidate_cache.should_invalidate() { + hints_to_remove.extend(visible_inlay_hint_ids); + } + + let excerpts = self.buffer.read(cx).excerpt_ids(); + let hints_to_insert = new_hints + .into_iter() + .filter_map(|(chunk_range, hints_result)| match hints_result { + Ok(new_hints) => Some(new_hints), + Err(e) => { + log::error!( + "Failed to query inlays for buffer row range {chunk_range:?}, {e:#}" + ); + if let Some((for_version, chunks_fetched)) = + inlay_hints.hint_chunk_fetched.get_mut(&buffer_id) + { + if for_version == &query_version { + chunks_fetched.remove(&chunk_range); + } + } + None + } + }) + .flat_map(|hints| hints.into_values()) + .flatten() + .filter_map(|(hint_id, lsp_hint)| { + if inlay_hints.allowed_hint_kinds.contains(&lsp_hint.kind) + && inlay_hints + .added_hints + .insert(hint_id, lsp_hint.kind) + .is_none() + { + let position = excerpts.iter().find_map(|excerpt_id| { + multi_buffer_snapshot.anchor_in_excerpt(*excerpt_id, lsp_hint.position) + })?; + return Some(Inlay::hint(hint_id, position, &lsp_hint)); + } + None + }) + .collect::>(); + + // We need to invalidate excerpts all buffers with the same language, do that once only, after first new data chunk is inserted. + let all_other_affected_buffers = all_affected_buffers + .lock() + .drain() + .filter(|id| buffer_id != *id) + .collect::>(); + if !all_other_affected_buffers.is_empty() { + hints_to_remove.extend( + self.visible_inlay_hints(cx) .iter() - .filter(|cached_inlay_id| { - !excerpt_hints_to_persist.contains_key(cached_inlay_id) + .filter(|inlay| { + inlay + .position + .buffer_id + .is_none_or(|buffer_id| all_other_affected_buffers.contains(&buffer_id)) }) - .copied(), + .map(|inlay| inlay.id), ); - remove_from_visible.extend(remove_from_cache.iter().cloned()); } - } - if remove_from_visible.is_empty() && remove_from_cache.is_empty() && add_to_cache.is_empty() { - None - } else { - Some(ExcerptHintsUpdate { - excerpt_id, - remove_from_visible, - remove_from_cache, - add_to_cache, - }) + self.splice_inlays(&hints_to_remove, hints_to_insert, cx); } } -fn contains_position( - range: &Range, - position: language::Anchor, - buffer_snapshot: &BufferSnapshot, -) -> bool { - range.start.cmp(&position, buffer_snapshot).is_le() - && range.end.cmp(&position, buffer_snapshot).is_ge() +#[derive(Debug)] +struct VisibleExcerpts { + excerpts: Vec, + ranges: Vec>, + buffer_version: Global, + buffer: Entity, } -fn apply_hint_update( - editor: &mut Editor, - new_update: ExcerptHintsUpdate, - query: ExcerptQuery, - invalidate: bool, - buffer_snapshot: BufferSnapshot, - multi_buffer_snapshot: MultiBufferSnapshot, - cx: &mut Context, -) { - let cached_excerpt_hints = editor - .inlay_hint_cache - .hints - .entry(new_update.excerpt_id) - .or_insert_with(|| { - Arc::new(RwLock::new(CachedExcerptHints { - version: query.cache_version, - buffer_version: buffer_snapshot.version().clone(), - buffer_id: query.buffer_id, - ordered_hints: Vec::new(), - hints_by_id: HashMap::default(), - })) - }); - let mut cached_excerpt_hints = cached_excerpt_hints.write(); - match query.cache_version.cmp(&cached_excerpt_hints.version) { - cmp::Ordering::Less => return, - cmp::Ordering::Greater | cmp::Ordering::Equal => { - cached_excerpt_hints.version = query.cache_version; +fn spawn_editor_hints_refresh( + buffer_id: BufferId, + invalidate_cache: InvalidationStrategy, + ignore_previous_fetches: bool, + debounce: Option, + buffer_excerpts: VisibleExcerpts, + all_affected_buffers: Arc>>, + cx: &mut Context<'_, Editor>, +) -> Task<()> { + cx.spawn(async move |editor, cx| { + if let Some(debounce) = debounce { + cx.background_executor().timer(debounce).await; } - } - let mut cached_inlays_changed = !new_update.remove_from_cache.is_empty(); - cached_excerpt_hints - .ordered_hints - .retain(|hint_id| !new_update.remove_from_cache.contains(hint_id)); - cached_excerpt_hints - .hints_by_id - .retain(|hint_id, _| !new_update.remove_from_cache.contains(hint_id)); - let mut splice = InlaySplice::default(); - splice.to_remove.extend(new_update.remove_from_visible); - for new_hint in new_update.add_to_cache { - let insert_position = match cached_excerpt_hints - .ordered_hints - .binary_search_by(|probe| { - cached_excerpt_hints.hints_by_id[probe] - .position - .cmp(&new_hint.position, &buffer_snapshot) - }) { - Ok(i) => { - // When a hint is added to the same position where existing ones are present, - // do not deduplicate it: we split hint queries into non-overlapping ranges - // and each hint batch returned by the server should already contain unique hints. - i + cached_excerpt_hints.ordered_hints[i..].len() + 1 - } - Err(i) => i, + let query_version = buffer_excerpts.buffer_version.clone(); + let Some(hint_tasks) = editor + .update(cx, |editor, cx| { + editor.inlay_hints_for_buffer( + invalidate_cache, + ignore_previous_fetches, + buffer_excerpts, + cx, + ) + }) + .ok() + else { + return; }; - - let new_inlay_id = post_inc(&mut editor.next_inlay_id); - if editor - .inlay_hint_cache - .allowed_hint_kinds - .contains(&new_hint.kind) - && let Some(new_hint_position) = - multi_buffer_snapshot.anchor_in_excerpt(query.excerpt_id, new_hint.position) - { - splice - .to_insert - .push(Inlay::hint(new_inlay_id, new_hint_position, &new_hint)); - } - let new_id = InlayId::Hint(new_inlay_id); - cached_excerpt_hints.hints_by_id.insert(new_id, new_hint); - if cached_excerpt_hints.ordered_hints.len() <= insert_position { - cached_excerpt_hints.ordered_hints.push(new_id); - } else { - cached_excerpt_hints - .ordered_hints - .insert(insert_position, new_id); - } - - cached_inlays_changed = true; - } - cached_excerpt_hints.buffer_version = buffer_snapshot.version().clone(); - drop(cached_excerpt_hints); - - if invalidate { - let mut outdated_excerpt_caches = HashSet::default(); - for (excerpt_id, excerpt_hints) in &editor.inlay_hint_cache().hints { - let excerpt_hints = excerpt_hints.read(); - if excerpt_hints.buffer_id == query.buffer_id - && excerpt_id != &query.excerpt_id - && buffer_snapshot - .version() - .changed_since(&excerpt_hints.buffer_version) - { - outdated_excerpt_caches.insert(*excerpt_id); - splice - .to_remove - .extend(excerpt_hints.ordered_hints.iter().copied()); - } + let hint_tasks = hint_tasks.unwrap_or_default(); + if hint_tasks.is_empty() { + return; } - cached_inlays_changed |= !outdated_excerpt_caches.is_empty(); + let new_hints = join_all(hint_tasks).await; editor - .inlay_hint_cache - .hints - .retain(|excerpt_id, _| !outdated_excerpt_caches.contains(excerpt_id)); - } - - let InlaySplice { - to_remove, - to_insert, - } = splice; - let displayed_inlays_changed = !to_remove.is_empty() || !to_insert.is_empty(); - if cached_inlays_changed || displayed_inlays_changed { - editor.inlay_hint_cache.version += 1; - } - if displayed_inlays_changed { - editor.splice_inlays(&to_remove, to_insert, cx) - } + .update(cx, |editor, cx| { + editor.apply_fetched_hints( + buffer_id, + query_version, + invalidate_cache, + new_hints, + all_affected_buffers, + cx, + ); + }) + .ok(); + }) } #[cfg(test)] pub mod tests { - use crate::SelectionEffects; use crate::editor_tests::update_test_language_settings; + use crate::inlays::inlay_hints::InlayHintRefreshReason; use crate::scroll::ScrollAmount; - use crate::{ExcerptRange, scroll::Autoscroll, test::editor_lsp_test_context::rust_lang}; - use futures::StreamExt; + use crate::{Editor, SelectionEffects}; + use crate::{ExcerptRange, scroll::Autoscroll}; + use collections::HashSet; + use futures::{StreamExt, future}; use gpui::{AppContext as _, Context, SemanticVersion, TestAppContext, WindowHandle}; use itertools::Itertools as _; + use language::language_settings::InlayHintKind; use language::{Capability, FakeLspAdapter}; use language::{Language, LanguageConfig, LanguageMatcher}; + use languages::rust_lang; use lsp::FakeLanguageServer; + use multi_buffer::MultiBuffer; use parking_lot::Mutex; + use pretty_assertions::assert_eq; use project::{FakeFs, Project}; use serde_json::json; use settings::{AllLanguageSettingsContent, InlayHintSettingsContent, SettingsStore}; + use std::ops::Range; + use std::sync::Arc; use std::sync::atomic::{AtomicBool, AtomicU32, AtomicUsize, Ordering}; - use text::Point; + use std::time::Duration; + use text::{OffsetRangeExt, Point}; + use ui::App; use util::path; - - use super::*; + use util::paths::natural_sort; #[gpui::test] async fn test_basic_cache_update_with_duplicate_hints(cx: &mut gpui::TestAppContext) { @@ -1367,13 +991,13 @@ pub mod tests { let expected_hints = vec!["1".to_string()]; assert_eq!( expected_hints, - cached_hint_labels(editor), + cached_hint_labels(editor, cx), "Should get its first hints when opening the editor" ); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); - let inlay_cache = editor.inlay_hint_cache(); assert_eq!( - inlay_cache.allowed_hint_kinds, allowed_hint_kinds, + allowed_hint_kinds_for_editor(editor), + allowed_hint_kinds, "Cache should use editor settings to get the allowed hint kinds" ); }) @@ -1387,44 +1011,125 @@ pub mod tests { editor.handle_input("some change", window, cx); }) .unwrap(); - cx.executor().run_until_parked(); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, _window, cx| { + let expected_hints = vec!["2".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor, cx), + "Should get new hints after an edit" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!( + allowed_hint_kinds_for_editor(editor), + allowed_hint_kinds, + "Cache should use editor settings to get the allowed hint kinds" + ); + }) + .unwrap(); + + fake_server + .request::(()) + .await + .into_response() + .expect("inlay refresh request failed"); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, _window, cx| { + let expected_hints = vec!["3".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor, cx), + "Should get new hints after hint refresh/ request" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!( + allowed_hint_kinds_for_editor(editor), + allowed_hint_kinds, + "Cache should use editor settings to get the allowed hint kinds" + ); + }) + .unwrap(); + } + + #[gpui::test] + async fn test_racy_cache_updates(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettingsContent { + enabled: Some(true), + ..InlayHintSettingsContent::default() + }) + }); + let (_, editor, fake_server) = prepare_test_objects(cx, |fake_server, file_with_hints| { + let lsp_request_count = Arc::new(AtomicU32::new(0)); + fake_server.set_request_handler::( + move |params, _| { + let task_lsp_request_count = Arc::clone(&lsp_request_count); + async move { + let i = task_lsp_request_count.fetch_add(1, Ordering::Release) + 1; + assert_eq!( + params.text_document.uri, + lsp::Uri::from_file_path(file_with_hints).unwrap(), + ); + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(0, i), + label: lsp::InlayHintLabel::String(i.to_string()), + kind: Some(lsp::InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }, + ); + }) + .await; + cx.executor().advance_clock(Duration::from_secs(1)); + cx.executor().run_until_parked(); + + editor + .update(cx, |editor, _window, cx| { + let expected_hints = vec!["1".to_string()]; + assert_eq!( + expected_hints, + cached_hint_labels(editor, cx), + "Should get its first hints when opening the editor" + ); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + }) + .unwrap(); + + // Emulate simultaneous events: both editing, refresh and, slightly after, scroll updates are triggered. + editor + .update(cx, |editor, window, cx| { + editor.handle_input("foo", window, cx); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_millis(5)); editor .update(cx, |editor, _window, cx| { - let expected_hints = vec!["2".to_string()]; - assert_eq!( - expected_hints, - cached_hint_labels(editor), - "Should get new hints after an edit" - ); - assert_eq!(expected_hints, visible_hint_labels(editor, cx)); - let inlay_cache = editor.inlay_hint_cache(); - assert_eq!( - inlay_cache.allowed_hint_kinds, allowed_hint_kinds, - "Cache should use editor settings to get the allowed hint kinds" + editor.refresh_inlay_hints( + InlayHintRefreshReason::RefreshRequested(fake_server.server.server_id()), + cx, ); }) .unwrap(); - - fake_server - .request::(()) - .await - .into_response() - .expect("inlay refresh request failed"); + cx.executor().advance_clock(Duration::from_millis(5)); + editor + .update(cx, |editor, _window, cx| { + editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_secs(1)); cx.executor().run_until_parked(); editor .update(cx, |editor, _window, cx| { - let expected_hints = vec!["3".to_string()]; - assert_eq!( - expected_hints, - cached_hint_labels(editor), - "Should get new hints after hint refresh/ request" - ); + let expected_hints = vec!["2".to_string()]; + assert_eq!(expected_hints, cached_hint_labels(editor, cx), "Despite multiple simultaneous refreshes, only one inlay hint query should be issued"); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); - let inlay_cache = editor.inlay_hint_cache(); - assert_eq!( - inlay_cache.allowed_hint_kinds, allowed_hint_kinds, - "Cache should use editor settings to get the allowed hint kinds" - ); }) .unwrap(); } @@ -1479,7 +1184,7 @@ pub mod tests { let expected_hints = vec!["0".to_string()]; assert_eq!( expected_hints, - cached_hint_labels(editor), + cached_hint_labels(editor, cx), "Should get its first hints when opening the editor" ); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); @@ -1508,7 +1213,7 @@ pub mod tests { let expected_hints = vec!["0".to_string()]; assert_eq!( expected_hints, - cached_hint_labels(editor), + cached_hint_labels(editor, cx), "Should not update hints while the work task is running" ); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); @@ -1528,7 +1233,7 @@ pub mod tests { let expected_hints = vec!["1".to_string()]; assert_eq!( expected_hints, - cached_hint_labels(editor), + cached_hint_labels(editor, cx), "New hints should be queried after the work task is done" ); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); @@ -1663,7 +1368,7 @@ pub mod tests { let expected_hints = vec!["1".to_string()]; assert_eq!( expected_hints, - cached_hint_labels(editor), + cached_hint_labels(editor, cx), "Should get its first hints when opening the editor" ); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); @@ -1688,7 +1393,7 @@ pub mod tests { let expected_hints = vec!["1".to_string()]; assert_eq!( expected_hints, - cached_hint_labels(editor), + cached_hint_labels(editor, cx), "Markdown editor should have a separate version, repeating Rust editor rules" ); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); @@ -1706,15 +1411,10 @@ pub mod tests { cx.executor().run_until_parked(); rs_editor .update(cx, |editor, _window, cx| { - // TODO: Here, we do not get "2", because inserting another language server will trigger `RefreshInlayHints` event from the `LspStore` - // A project is listened in every editor, so each of them will react to this event. - // - // We do not have language server IDs for remote projects, so cannot easily say on the editor level, - // whether we should ignore a particular `RefreshInlayHints` event. - let expected_hints = vec!["3".to_string()]; + let expected_hints = vec!["2".to_string()]; assert_eq!( expected_hints, - cached_hint_labels(editor), + cached_hint_labels(editor, cx), "Rust inlay cache should change after the edit" ); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); @@ -1725,7 +1425,7 @@ pub mod tests { let expected_hints = vec!["1".to_string()]; assert_eq!( expected_hints, - cached_hint_labels(editor), + cached_hint_labels(editor, cx), "Markdown editor should not be affected by Rust editor changes" ); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); @@ -1746,7 +1446,7 @@ pub mod tests { let expected_hints = vec!["2".to_string()]; assert_eq!( expected_hints, - cached_hint_labels(editor), + cached_hint_labels(editor, cx), "Rust editor should not be affected by Markdown editor changes" ); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); @@ -1754,10 +1454,10 @@ pub mod tests { .unwrap(); rs_editor .update(cx, |editor, _window, cx| { - let expected_hints = vec!["3".to_string()]; + let expected_hints = vec!["2".to_string()]; assert_eq!( expected_hints, - cached_hint_labels(editor), + cached_hint_labels(editor, cx), "Markdown editor should also change independently" ); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); @@ -1852,16 +1552,16 @@ pub mod tests { "parameter hint".to_string(), "other hint".to_string(), ], - cached_hint_labels(editor), + cached_hint_labels(editor, cx), "Should get its first hints when opening the editor" ); assert_eq!( vec!["type hint".to_string(), "other hint".to_string()], visible_hint_labels(editor, cx) ); - let inlay_cache = editor.inlay_hint_cache(); assert_eq!( - inlay_cache.allowed_hint_kinds, allowed_hint_kinds, + allowed_hint_kinds_for_editor(editor), + allowed_hint_kinds, "Cache should use editor settings to get the allowed hint kinds" ); }) @@ -1886,7 +1586,7 @@ pub mod tests { "parameter hint".to_string(), "other hint".to_string(), ], - cached_hint_labels(editor), + cached_hint_labels(editor, cx), "Cached hints should not change due to allowed hint kinds settings update" ); assert_eq!( @@ -1961,7 +1661,7 @@ pub mod tests { "parameter hint".to_string(), "other hint".to_string(), ], - cached_hint_labels(editor), + cached_hint_labels(editor, cx), "Should get its cached hints unchanged after the settings change for hint kinds {new_allowed_hint_kinds:?}" ); assert_eq!( @@ -1969,9 +1669,9 @@ pub mod tests { visible_hint_labels(editor, cx), "Should get its visible hints filtered after the settings change for hint kinds {new_allowed_hint_kinds:?}" ); - let inlay_cache = editor.inlay_hint_cache(); assert_eq!( - inlay_cache.allowed_hint_kinds, new_allowed_hint_kinds, + allowed_hint_kinds_for_editor(editor), + new_allowed_hint_kinds, "Cache should use editor settings to get the allowed hint kinds for hint kinds {new_allowed_hint_kinds:?}" ); }).unwrap(); @@ -2003,17 +1703,23 @@ pub mod tests { 2, "Should not load new hints when hints got disabled" ); - assert!( - cached_hint_labels(editor).is_empty(), - "Should clear the cache when hints got disabled" + assert_eq!( + vec![ + "type hint".to_string(), + "parameter hint".to_string(), + "other hint".to_string(), + ], + cached_hint_labels(editor, cx), + "Should not clear the cache when hints got disabled" ); - assert!( - visible_hint_labels(editor, cx).is_empty(), + assert_eq!( + Vec::::new(), + visible_hint_labels(editor, cx), "Should clear visible hints when hints got disabled" ); - let inlay_cache = editor.inlay_hint_cache(); assert_eq!( - inlay_cache.allowed_hint_kinds, another_allowed_hint_kinds, + allowed_hint_kinds_for_editor(editor), + another_allowed_hint_kinds, "Should update its allowed hint kinds even when hints got disabled" ); }) @@ -2032,8 +1738,15 @@ pub mod tests { 2, "Should not load new hints when they got disabled" ); - assert!(cached_hint_labels(editor).is_empty()); - assert!(visible_hint_labels(editor, cx).is_empty()); + assert_eq!( + vec![ + "type hint".to_string(), + "parameter hint".to_string(), + "other hint".to_string(), + ], + cached_hint_labels(editor, cx) + ); + assert_eq!(Vec::::new(), visible_hint_labels(editor, cx)); }) .unwrap(); @@ -2060,8 +1773,8 @@ pub mod tests { .update(cx, |editor, _, cx| { assert_eq!( lsp_request_count.load(Ordering::Relaxed), - 3, - "Should query for new hints when they got re-enabled" + 2, + "Should not query for new hints when they got re-enabled, as the file version did not change" ); assert_eq!( vec![ @@ -2069,7 +1782,7 @@ pub mod tests { "parameter hint".to_string(), "other hint".to_string(), ], - cached_hint_labels(editor), + cached_hint_labels(editor, cx), "Should get its cached hints fully repopulated after the hints got re-enabled" ); assert_eq!( @@ -2077,9 +1790,9 @@ pub mod tests { visible_hint_labels(editor, cx), "Should get its visible hints repopulated and filtered after the h" ); - let inlay_cache = editor.inlay_hint_cache(); assert_eq!( - inlay_cache.allowed_hint_kinds, final_allowed_hint_kinds, + allowed_hint_kinds_for_editor(editor), + final_allowed_hint_kinds, "Cache should update editor settings when hints got re-enabled" ); }) @@ -2095,7 +1808,7 @@ pub mod tests { .update(cx, |editor, _, cx| { assert_eq!( lsp_request_count.load(Ordering::Relaxed), - 4, + 3, "Should query for new hints again" ); assert_eq!( @@ -2104,7 +1817,7 @@ pub mod tests { "parameter hint".to_string(), "other hint".to_string(), ], - cached_hint_labels(editor), + cached_hint_labels(editor, cx), ); assert_eq!( vec!["parameter hint".to_string()], @@ -2197,7 +1910,7 @@ pub mod tests { let expected_hints = vec!["2".to_string()]; assert_eq!( expected_hints, - cached_hint_labels(editor), + cached_hint_labels(editor, cx), "Should get hints from the last edit landed only" ); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); @@ -2243,7 +1956,7 @@ pub mod tests { let expected_hints = vec!["3".to_string()]; assert_eq!( expected_hints, - cached_hint_labels(editor), + cached_hint_labels(editor, cx), "Should get hints from the last edit landed only" ); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); @@ -2289,7 +2002,7 @@ pub mod tests { FakeLspAdapter { capabilities: lsp::ServerCapabilities { inlay_hint_provider: Some(lsp::OneOf::Left(true)), - ..Default::default() + ..lsp::ServerCapabilities::default() }, initializer: Some(Box::new({ let lsp_request_ranges = lsp_request_ranges.clone(); @@ -2327,7 +2040,7 @@ pub mod tests { ); } })), - ..Default::default() + ..FakeLspAdapter::default() }, ); @@ -2339,70 +2052,35 @@ pub mod tests { .unwrap(); let editor = cx.add_window(|window, cx| Editor::for_buffer(buffer, Some(project), window, cx)); - cx.executor().run_until_parked(); - let _fake_server = fake_servers.next().await.unwrap(); + cx.executor().run_until_parked(); - // in large buffers, requests are made for more than visible range of a buffer. - // invisible parts are queried later, to avoid excessive requests on quick typing. - // wait the timeout needed to get all requests. - cx.executor().advance_clock(Duration::from_millis( - INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS + 100, - )); - cx.executor().run_until_parked(); - let initial_visible_range = editor_visible_range(&editor, cx); - let lsp_initial_visible_range = lsp::Range::new( - lsp::Position::new( - initial_visible_range.start.row, - initial_visible_range.start.column, - ), - lsp::Position::new( - initial_visible_range.end.row, - initial_visible_range.end.column, - ), + let ranges = lsp_request_ranges + .lock() + .drain(..) + .sorted_by_key(|r| r.start) + .collect::>(); + assert_eq!( + ranges.len(), + 1, + "Should query 1 range initially, but got: {ranges:?}" ); - let expected_initial_query_range_end = - lsp::Position::new(initial_visible_range.end.row * 2, 2); - let mut expected_invisible_query_start = lsp_initial_visible_range.end; - expected_invisible_query_start.character += 1; - editor.update(cx, |editor, _window, cx| { - let ranges = lsp_request_ranges.lock().drain(..).collect::>(); - assert_eq!(ranges.len(), 2, - "When scroll is at the edge of a big document, its visible part and the same range further should be queried in order, but got: {ranges:?}"); - let visible_query_range = &ranges[0]; - assert_eq!(visible_query_range.start, lsp_initial_visible_range.start); - assert_eq!(visible_query_range.end, lsp_initial_visible_range.end); - let invisible_query_range = &ranges[1]; - - assert_eq!(invisible_query_range.start, expected_invisible_query_start, "Should initially query visible edge of the document"); - assert_eq!(invisible_query_range.end, expected_initial_query_range_end, "Should initially query visible edge of the document"); - - let requests_count = lsp_request_count.load(Ordering::Acquire); - assert_eq!(requests_count, 2, "Visible + invisible request"); - let expected_hints = vec!["47".to_string(), "94".to_string()]; - assert_eq!( - expected_hints, - cached_hint_labels(editor), - "Should have hints from both LSP requests made for a big file" - ); - assert_eq!(expected_hints, visible_hint_labels(editor, cx), "Should display only hints from the visible range"); - }).unwrap(); editor .update(cx, |editor, window, cx| { editor.scroll_screen(&ScrollAmount::Page(1.0), window, cx); }) .unwrap(); + // Wait for the first hints request to fire off + cx.executor().advance_clock(Duration::from_millis(100)); cx.executor().run_until_parked(); editor .update(cx, |editor, window, cx| { editor.scroll_screen(&ScrollAmount::Page(1.0), window, cx); }) .unwrap(); - cx.executor().advance_clock(Duration::from_millis( - INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS + 100, - )); + cx.executor().advance_clock(Duration::from_millis(100)); cx.executor().run_until_parked(); let visible_range_after_scrolls = editor_visible_range(&editor, cx); let visible_line_count = editor @@ -2425,37 +2103,25 @@ pub mod tests { let first_scroll = &ranges[0]; let second_scroll = &ranges[1]; assert_eq!( - first_scroll.end, second_scroll.start, + first_scroll.end.line, second_scroll.start.line, "Should query 2 adjacent ranges after the scrolls, but got: {ranges:?}" ); - assert_eq!( - first_scroll.start, expected_initial_query_range_end, - "First scroll should start the query right after the end of the original scroll", - ); - assert_eq!( - second_scroll.end, - lsp::Position::new( - visible_range_after_scrolls.end.row - + visible_line_count.ceil() as u32, - 1, - ), - "Second scroll should query one more screen down after the end of the visible range" - ); let lsp_requests = lsp_request_count.load(Ordering::Acquire); - assert_eq!(lsp_requests, 4, "Should query for hints after every scroll"); - let expected_hints = vec![ - "47".to_string(), - "94".to_string(), - "139".to_string(), - "184".to_string(), - ]; assert_eq!( - expected_hints, - cached_hint_labels(editor), - "Should have hints from the new LSP response after the edit" + lsp_requests, 3, + "Should query hints initially, and after each scroll (2 times)" + ); + assert_eq!( + vec!["50".to_string(), "100".to_string(), "150".to_string()], + cached_hint_labels(editor, cx), + "Chunks of 50 line width should have been queried each time" + ); + assert_eq!( + vec!["50".to_string(), "100".to_string(), "150".to_string()], + visible_hint_labels(editor, cx), + "Editor should show only hints that it's scrolled to" ); - assert_eq!(expected_hints, visible_hint_labels(editor, cx)); let mut selection_in_cached_range = visible_range_after_scrolls.end; selection_in_cached_range.row -= visible_line_count.ceil() as u32; @@ -2473,9 +2139,6 @@ pub mod tests { ); }) .unwrap(); - cx.executor().advance_clock(Duration::from_millis( - INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS + 100, - )); cx.executor().run_until_parked(); editor.update(cx, |_, _, _| { let ranges = lsp_request_ranges @@ -2484,7 +2147,7 @@ pub mod tests { .sorted_by_key(|r| r.start) .collect::>(); assert!(ranges.is_empty(), "No new ranges or LSP queries should be made after returning to the selection with cached hints"); - assert_eq!(lsp_request_count.load(Ordering::Acquire), 4); + assert_eq!(lsp_request_count.load(Ordering::Acquire), 3, "No new requests should be made when selecting within cached chunks"); }).unwrap(); editor @@ -2492,38 +2155,25 @@ pub mod tests { editor.handle_input("++++more text++++", window, cx); }) .unwrap(); - cx.executor().advance_clock(Duration::from_millis( - INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS + 100, - )); cx.executor().run_until_parked(); editor.update(cx, |editor, _window, cx| { let mut ranges = lsp_request_ranges.lock().drain(..).collect::>(); ranges.sort_by_key(|r| r.start); - assert_eq!(ranges.len(), 3, - "On edit, should scroll to selection and query a range around it: visible + same range above and below. Instead, got query ranges {ranges:?}"); - let above_query_range = &ranges[0]; - let visible_query_range = &ranges[1]; - let below_query_range = &ranges[2]; - assert!(above_query_range.end.character < visible_query_range.start.character || above_query_range.end.line + 1 == visible_query_range.start.line, - "Above range {above_query_range:?} should be before visible range {visible_query_range:?}"); - assert!(visible_query_range.end.character < below_query_range.start.character || visible_query_range.end.line + 1 == below_query_range.start.line, - "Visible range {visible_query_range:?} should be before below range {below_query_range:?}"); - assert!(above_query_range.start.line < selection_in_cached_range.row, + assert_eq!(ranges.len(), 2, + "On edit, should scroll to selection and query a range around it: that range should split into 2 50 rows wide chunks. Instead, got query ranges {ranges:?}"); + let first_chunk = &ranges[0]; + let second_chunk = &ranges[1]; + assert!(first_chunk.end.line == second_chunk.start.line, + "First chunk {first_chunk:?} should be before second chunk {second_chunk:?}"); + assert!(first_chunk.start.line < selection_in_cached_range.row, "Hints should be queried with the selected range after the query range start"); - assert!(below_query_range.end.line > selection_in_cached_range.row, - "Hints should be queried with the selected range before the query range end"); - assert!(above_query_range.start.line <= selection_in_cached_range.row - (visible_line_count * 3.0 / 2.0) as u32, - "Hints query range should contain one more screen before"); - assert!(below_query_range.end.line >= selection_in_cached_range.row + (visible_line_count * 3.0 / 2.0) as u32, - "Hints query range should contain one more screen after"); let lsp_requests = lsp_request_count.load(Ordering::Acquire); - assert_eq!(lsp_requests, 7, "There should be a visible range and two ranges above and below it queried"); - let expected_hints = vec!["67".to_string(), "115".to_string(), "163".to_string()]; - assert_eq!(expected_hints, cached_hint_labels(editor), - "Should have hints from the new LSP response after the edit"); - assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!(lsp_requests, 5, "Two chunks should be re-queried"); + assert_eq!(vec!["100".to_string(), "150".to_string()], cached_hint_labels(editor, cx), + "Should have (less) hints from the new LSP response after the edit"); + assert_eq!(vec!["100".to_string(), "150".to_string()], visible_hint_labels(editor, cx), "Should show only visible hints (in the center) from the new cached set"); }).unwrap(); } @@ -2532,7 +2182,7 @@ pub mod tests { cx: &mut gpui::TestAppContext, ) -> Range { let ranges = editor - .update(cx, |editor, _window, cx| editor.visible_excerpts(None, cx)) + .update(cx, |editor, _window, cx| editor.visible_excerpts(cx)) .unwrap(); assert_eq!( ranges.len(), @@ -2541,14 +2191,7 @@ pub mod tests { ); let (_, (excerpt_buffer, _, excerpt_visible_range)) = ranges.into_iter().next().unwrap(); excerpt_buffer.read_with(cx, |buffer, _| { - let snapshot = buffer.snapshot(); - let start = buffer - .anchor_before(excerpt_visible_range.start) - .to_point(&snapshot); - let end = buffer - .anchor_after(excerpt_visible_range.end) - .to_point(&snapshot); - start..end + excerpt_visible_range.to_point(&buffer.snapshot()) }) } @@ -2588,9 +2231,9 @@ pub mod tests { FakeLspAdapter { capabilities: lsp::ServerCapabilities { inlay_hint_provider: Some(lsp::OneOf::Left(true)), - ..Default::default() + ..lsp::ServerCapabilities::default() }, - ..Default::default() + ..FakeLspAdapter::default() }, ); @@ -2722,7 +2365,7 @@ pub mod tests { ]; assert_eq!( expected_hints, - sorted_cached_hint_labels(editor), + sorted_cached_hint_labels(editor, cx), "When scroll is at the edge of a multibuffer, its visible excerpts only should be queried for inlay hints" ); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); @@ -2747,11 +2390,28 @@ pub mod tests { SelectionEffects::scroll(Autoscroll::Next), window, cx, - |s| s.select_ranges([Point::new(50, 0)..Point::new(50, 0)]), + |s| s.select_ranges([Point::new(57, 0)..Point::new(57, 0)]), ); }) .unwrap(); cx.executor().run_until_parked(); + editor + .update(cx, |editor, _window, cx| { + let expected_hints = vec![ + "main hint #0".to_string(), + "main hint #1".to_string(), + "main hint #2".to_string(), + "main hint #3".to_string(), + "main hint #4".to_string(), + "main hint #5".to_string(), + ]; + assert_eq!(expected_hints, sorted_cached_hint_labels(editor, cx), + "New hints are not shown right after scrolling, we need to wait for the buffer to be registered"); + assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); editor .update(cx, |editor, _window, cx| { let expected_hints = vec![ @@ -2764,10 +2424,17 @@ pub mod tests { "other hint #0".to_string(), "other hint #1".to_string(), "other hint #2".to_string(), + "other hint #3".to_string(), ]; - assert_eq!(expected_hints, sorted_cached_hint_labels(editor), - "With more scrolls of the multibuffer, more hints should be added into the cache and nothing invalidated without edits"); - assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!( + expected_hints, + sorted_cached_hint_labels(editor, cx), + "After scrolling to the new buffer and waiting for it to be registered, new hints should appear"); + assert_eq!( + expected_hints, + visible_hint_labels(editor, cx), + "Editor should show only visible hints", + ); }) .unwrap(); @@ -2781,9 +2448,7 @@ pub mod tests { ); }) .unwrap(); - cx.executor().advance_clock(Duration::from_millis( - INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS + 100, - )); + cx.executor().advance_clock(Duration::from_millis(100)); cx.executor().run_until_parked(); editor .update(cx, |editor, _window, cx| { @@ -2801,9 +2466,16 @@ pub mod tests { "other hint #4".to_string(), "other hint #5".to_string(), ]; - assert_eq!(expected_hints, sorted_cached_hint_labels(editor), - "After multibuffer was scrolled to the end, all hints for all excerpts should be fetched"); - assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!( + expected_hints, + sorted_cached_hint_labels(editor, cx), + "After multibuffer was scrolled to the end, all hints for all excerpts should be fetched" + ); + assert_eq!( + expected_hints, + visible_hint_labels(editor, cx), + "Editor shows only hints for excerpts that were visible when scrolling" + ); }) .unwrap(); @@ -2817,9 +2489,6 @@ pub mod tests { ); }) .unwrap(); - cx.executor().advance_clock(Duration::from_millis( - INVISIBLE_RANGES_HINTS_REQUEST_DELAY_MILLIS + 100, - )); cx.executor().run_until_parked(); editor .update(cx, |editor, _window, cx| { @@ -2837,41 +2506,301 @@ pub mod tests { "other hint #4".to_string(), "other hint #5".to_string(), ]; - assert_eq!(expected_hints, sorted_cached_hint_labels(editor), - "After multibuffer was scrolled to the end, further scrolls up should not bring more hints"); - assert_eq!(expected_hints, visible_hint_labels(editor, cx)); + assert_eq!( + expected_hints, + sorted_cached_hint_labels(editor, cx), + "After multibuffer was scrolled to the end, further scrolls up should not bring more hints" + ); + assert_eq!( + expected_hints, + visible_hint_labels(editor, cx), + ); + }) + .unwrap(); + + // We prepare to change the scrolling on edit, but do not scroll yet + editor + .update(cx, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([Point::new(57, 0)..Point::new(57, 0)]) + }); + }) + .unwrap(); + cx.executor().run_until_parked(); + // Edit triggers the scrolling too + editor_edited.store(true, Ordering::Release); + editor + .update(cx, |editor, window, cx| { + editor.handle_input("++++more text++++", window, cx); + }) + .unwrap(); + cx.executor().run_until_parked(); + // Wait again to trigger the inlay hints fetch on scroll + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, _window, cx| { + let expected_hints = vec![ + "main hint(edited) #0".to_string(), + "main hint(edited) #1".to_string(), + "main hint(edited) #2".to_string(), + "main hint(edited) #3".to_string(), + "main hint(edited) #4".to_string(), + "main hint(edited) #5".to_string(), + "other hint(edited) #0".to_string(), + "other hint(edited) #1".to_string(), + "other hint(edited) #2".to_string(), + "other hint(edited) #3".to_string(), + ]; + assert_eq!( + expected_hints, + sorted_cached_hint_labels(editor, cx), + "After multibuffer edit, editor gets scrolled back to the last selection; \ + all hints should be invalidated and required for all of its visible excerpts" + ); + assert_eq!( + expected_hints, + visible_hint_labels(editor, cx), + "All excerpts should get their hints" + ); + }) + .unwrap(); + } + + #[gpui::test] + async fn test_editing_in_multi_buffer(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettingsContent { + enabled: Some(true), + ..InlayHintSettingsContent::default() + }) + }); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/a"), + json!({ + "main.rs": format!("fn main() {{\n{}\n}}", (0..200).map(|i| format!("let i = {i};\n")).collect::>().join("")), + "lib.rs": r#"let a = 1; +let b = 2; +let c = 3;"# + }), + ) + .await; + + let lsp_request_ranges = Arc::new(Mutex::new(Vec::new())); + + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + let language = rust_lang(); + language_registry.add(language); + + let closure_ranges_fetched = lsp_request_ranges.clone(); + let mut fake_servers = language_registry.register_fake_lsp( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..lsp::ServerCapabilities::default() + }, + initializer: Some(Box::new(move |fake_server| { + let closure_ranges_fetched = closure_ranges_fetched.clone(); + fake_server.set_request_handler::( + move |params, _| { + let closure_ranges_fetched = closure_ranges_fetched.clone(); + async move { + let prefix = if params.text_document.uri + == lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap() + { + closure_ranges_fetched + .lock() + .push(("main.rs", params.range)); + "main.rs" + } else if params.text_document.uri + == lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap() + { + closure_ranges_fetched.lock().push(("lib.rs", params.range)); + "lib.rs" + } else { + panic!("Unexpected file path {:?}", params.text_document.uri); + }; + Ok(Some( + (params.range.start.line..params.range.end.line) + .map(|row| lsp::InlayHint { + position: lsp::Position::new(row, 0), + label: lsp::InlayHintLabel::String(format!( + "{prefix} Inlay hint #{row}" + )), + kind: Some(lsp::InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }) + .collect(), + )) + } + }, + ); + })), + ..FakeLspAdapter::default() + }, + ); + + let (buffer_1, _handle_1) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp(path!("/a/main.rs"), cx) + }) + .await + .unwrap(); + let (buffer_2, _handle_2) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp(path!("/a/lib.rs"), cx) + }) + .await + .unwrap(); + let multi_buffer = cx.new(|cx| { + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); + multibuffer.push_excerpts( + buffer_1.clone(), + [ + // Have first excerpt to spawn over 2 chunks (50 lines each). + ExcerptRange::new(Point::new(49, 0)..Point::new(53, 0)), + // Have 2nd excerpt to be in the 2nd chunk only. + ExcerptRange::new(Point::new(70, 0)..Point::new(73, 0)), + ], + cx, + ); + multibuffer.push_excerpts( + buffer_2.clone(), + [ExcerptRange::new(Point::new(0, 0)..Point::new(4, 0))], + cx, + ); + multibuffer + }); + + let editor = cx.add_window(|window, cx| { + let mut editor = + Editor::for_multibuffer(multi_buffer, Some(project.clone()), window, cx); + editor.change_selections(SelectionEffects::default(), window, cx, |s| { + s.select_ranges([0..0]) + }); + editor + }); + + let _fake_server = fake_servers.next().await.unwrap(); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + assert_eq!( + vec![ + ( + "lib.rs", + lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(2, 10)) + ), + ( + "main.rs", + lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(50, 0)) + ), + ( + "main.rs", + lsp::Range::new(lsp::Position::new(50, 0), lsp::Position::new(100, 11)) + ), + ], + lsp_request_ranges + .lock() + .drain(..) + .sorted_by_key(|(prefix, r)| (prefix.to_owned(), r.start)) + .collect::>(), + "For large buffers, should query chunks that cover both visible excerpt" + ); + editor + .update(cx, |editor, _window, cx| { + assert_eq!( + (0..2) + .map(|i| format!("lib.rs Inlay hint #{i}")) + .chain((0..100).map(|i| format!("main.rs Inlay hint #{i}"))) + .collect::>(), + sorted_cached_hint_labels(editor, cx), + "Both chunks should provide their inlay hints" + ); + assert_eq!( + vec![ + "main.rs Inlay hint #49".to_owned(), + "main.rs Inlay hint #50".to_owned(), + "main.rs Inlay hint #51".to_owned(), + "main.rs Inlay hint #52".to_owned(), + "main.rs Inlay hint #53".to_owned(), + "main.rs Inlay hint #70".to_owned(), + "main.rs Inlay hint #71".to_owned(), + "main.rs Inlay hint #72".to_owned(), + "main.rs Inlay hint #73".to_owned(), + "lib.rs Inlay hint #0".to_owned(), + "lib.rs Inlay hint #1".to_owned(), + ], + visible_hint_labels(editor, cx), + "Only hints from visible excerpt should be added into the editor" + ); }) .unwrap(); - editor_edited.store(true, Ordering::Release); editor .update(cx, |editor, window, cx| { - editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([Point::new(57, 0)..Point::new(57, 0)]) - }); - editor.handle_input("++++more text++++", window, cx); + editor.handle_input("a", window, cx); }) .unwrap(); + cx.executor().advance_clock(Duration::from_millis(1000)); cx.executor().run_until_parked(); + assert_eq!( + vec![ + ( + "lib.rs", + lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(2, 10)) + ), + ( + "main.rs", + lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(50, 0)) + ), + ( + "main.rs", + lsp::Range::new(lsp::Position::new(50, 0), lsp::Position::new(100, 11)) + ), + ], + lsp_request_ranges + .lock() + .drain(..) + .sorted_by_key(|(prefix, r)| (prefix.to_owned(), r.start)) + .collect::>(), + "Same chunks should be re-queried on edit" + ); editor .update(cx, |editor, _window, cx| { - let expected_hints = vec![ - "main hint #0".to_string(), - "main hint #1".to_string(), - "main hint #2".to_string(), - "main hint #3".to_string(), - "main hint #4".to_string(), - "main hint #5".to_string(), - "other hint(edited) #0".to_string(), - "other hint(edited) #1".to_string(), - ]; assert_eq!( - expected_hints, - sorted_cached_hint_labels(editor), - "After multibuffer edit, editor gets scrolled back to the last selection; \ - all hints should be invalidated and required for all of its visible excerpts" + (0..2) + .map(|i| format!("lib.rs Inlay hint #{i}")) + .chain((0..100).map(|i| format!("main.rs Inlay hint #{i}"))) + .collect::>(), + sorted_cached_hint_labels(editor, cx), + "Same hints should be re-inserted after the edit" + ); + assert_eq!( + vec![ + "main.rs Inlay hint #49".to_owned(), + "main.rs Inlay hint #50".to_owned(), + "main.rs Inlay hint #51".to_owned(), + "main.rs Inlay hint #52".to_owned(), + "main.rs Inlay hint #53".to_owned(), + "main.rs Inlay hint #70".to_owned(), + "main.rs Inlay hint #71".to_owned(), + "main.rs Inlay hint #72".to_owned(), + "main.rs Inlay hint #73".to_owned(), + "lib.rs Inlay hint #0".to_owned(), + "lib.rs Inlay hint #1".to_owned(), + ], + visible_hint_labels(editor, cx), + "Same hints should be re-inserted into the editor after the edit" ); - assert_eq!(expected_hints, visible_hint_labels(editor, cx)); }) .unwrap(); } @@ -2911,9 +2840,9 @@ pub mod tests { FakeLspAdapter { capabilities: lsp::ServerCapabilities { inlay_hint_provider: Some(lsp::OneOf::Left(true)), - ..Default::default() + ..lsp::ServerCapabilities::default() }, - ..Default::default() + ..FakeLspAdapter::default() }, ); @@ -3018,18 +2947,29 @@ pub mod tests { }) .next() .await; + cx.executor().advance_clock(Duration::from_millis(100)); cx.executor().run_until_parked(); editor .update(cx, |editor, _, cx| { assert_eq!( - vec!["main hint #0".to_string(), "other hint #0".to_string()], - sorted_cached_hint_labels(editor), - "Cache should update for both excerpts despite hints display was disabled" + vec![ + "main hint #0".to_string(), + "main hint #1".to_string(), + "main hint #2".to_string(), + "main hint #3".to_string(), + "other hint #0".to_string(), + "other hint #1".to_string(), + "other hint #2".to_string(), + "other hint #3".to_string(), + ], + sorted_cached_hint_labels(editor, cx), + "Cache should update for both excerpts despite hints display was disabled; after selecting 2nd buffer, it's now registered with the langserever and should get its hints" + ); + assert_eq!( + Vec::::new(), + visible_hint_labels(editor, cx), + "All hints are disabled and should not be shown despite being present in the cache" ); - assert!( - visible_hint_labels(editor, cx).is_empty(), - "All hints are disabled and should not be shown despite being present in the cache" - ); }) .unwrap(); @@ -3044,9 +2984,14 @@ pub mod tests { editor .update(cx, |editor, _, cx| { assert_eq!( - vec!["main hint #0".to_string()], - cached_hint_labels(editor), - "For the removed excerpt, should clean corresponding cached hints" + vec![ + "main hint #0".to_string(), + "main hint #1".to_string(), + "main hint #2".to_string(), + "main hint #3".to_string(), + ], + cached_hint_labels(editor, cx), + "For the removed excerpt, should clean corresponding cached hints as its buffer was dropped" ); assert!( visible_hint_labels(editor, cx).is_empty(), @@ -3071,16 +3016,22 @@ pub mod tests { cx.executor().run_until_parked(); editor .update(cx, |editor, _, cx| { - let expected_hints = vec!["main hint #0".to_string()]; assert_eq!( - expected_hints, - cached_hint_labels(editor), + vec![ + "main hint #0".to_string(), + "main hint #1".to_string(), + "main hint #2".to_string(), + "main hint #3".to_string(), + ], + cached_hint_labels(editor, cx), "Hint display settings change should not change the cache" ); assert_eq!( - expected_hints, + vec![ + "main hint #0".to_string(), + ], visible_hint_labels(editor, cx), - "Settings change should make cached hints visible" + "Settings change should make cached hints visible, but only the visible ones, from the remaining excerpt" ); }) .unwrap(); @@ -3121,7 +3072,7 @@ pub mod tests { FakeLspAdapter { capabilities: lsp::ServerCapabilities { inlay_hint_provider: Some(lsp::OneOf::Left(true)), - ..Default::default() + ..lsp::ServerCapabilities::default() }, initializer: Some(Box::new(move |fake_server| { let lsp_request_count = Arc::new(AtomicU32::new(0)); @@ -3148,7 +3099,7 @@ pub mod tests { }, ); })), - ..Default::default() + ..FakeLspAdapter::default() }, ); @@ -3173,7 +3124,7 @@ pub mod tests { editor .update(cx, |editor, _, cx| { let expected_hints = vec!["1".to_string()]; - assert_eq!(expected_hints, cached_hint_labels(editor)); + assert_eq!(expected_hints, cached_hint_labels(editor, cx)); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); }) .unwrap(); @@ -3206,7 +3157,7 @@ pub mod tests { lsp::Uri::from_file_path(file_with_hints).unwrap(), ); - let i = lsp_request_count.fetch_add(1, Ordering::SeqCst) + 1; + let i = lsp_request_count.fetch_add(1, Ordering::AcqRel) + 1; Ok(Some(vec![lsp::InlayHint { position: lsp::Position::new(0, i), label: lsp::InlayHintLabel::String(i.to_string()), @@ -3235,7 +3186,7 @@ pub mod tests { let expected_hints = vec!["1".to_string()]; assert_eq!( expected_hints, - cached_hint_labels(editor), + cached_hint_labels(editor, cx), "Should display inlays after toggle despite them disabled in settings" ); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); @@ -3250,11 +3201,16 @@ pub mod tests { cx.executor().run_until_parked(); editor .update(cx, |editor, _, cx| { - assert!( - cached_hint_labels(editor).is_empty(), + assert_eq!( + vec!["1".to_string()], + cached_hint_labels(editor, cx), + "Cache does not change because of toggles in the editor" + ); + assert_eq!( + Vec::::new(), + visible_hint_labels(editor, cx), "Should clear hints after 2nd toggle" ); - assert!(visible_hint_labels(editor, cx).is_empty()); }) .unwrap(); @@ -3274,11 +3230,11 @@ pub mod tests { cx.executor().run_until_parked(); editor .update(cx, |editor, _, cx| { - let expected_hints = vec!["2".to_string()]; + let expected_hints = vec!["1".to_string()]; assert_eq!( expected_hints, - cached_hint_labels(editor), - "Should query LSP hints for the 2nd time after enabling hints in settings" + cached_hint_labels(editor, cx), + "Should not query LSP hints after enabling hints in settings, as file version is the same" ); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); }) @@ -3292,11 +3248,16 @@ pub mod tests { cx.executor().run_until_parked(); editor .update(cx, |editor, _, cx| { - assert!( - cached_hint_labels(editor).is_empty(), + assert_eq!( + vec!["1".to_string()], + cached_hint_labels(editor, cx), + "Cache does not change because of toggles in the editor" + ); + assert_eq!( + Vec::::new(), + visible_hint_labels(editor, cx), "Should clear hints after enabling in settings and a 3rd toggle" ); - assert!(visible_hint_labels(editor, cx).is_empty()); }) .unwrap(); @@ -3307,16 +3268,242 @@ pub mod tests { .unwrap(); cx.executor().run_until_parked(); editor.update(cx, |editor, _, cx| { - let expected_hints = vec!["3".to_string()]; + let expected_hints = vec!["1".to_string()]; assert_eq!( expected_hints, - cached_hint_labels(editor), - "Should query LSP hints for the 3rd time after enabling hints in settings and toggling them back on" + cached_hint_labels(editor,cx), + "Should not query LSP hints after enabling hints in settings and toggling them back on" ); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); }).unwrap(); } + #[gpui::test] + async fn test_modifiers_change(cx: &mut gpui::TestAppContext) { + init_test(cx, |settings| { + settings.defaults.inlay_hints = Some(InlayHintSettingsContent { + show_value_hints: Some(true), + enabled: Some(true), + edit_debounce_ms: Some(0), + scroll_debounce_ms: Some(0), + show_type_hints: Some(true), + show_parameter_hints: Some(true), + show_other_hints: Some(true), + show_background: Some(false), + toggle_on_modifiers_press: None, + }) + }); + + let (_, editor, _fake_server) = prepare_test_objects(cx, |fake_server, file_with_hints| { + let lsp_request_count = Arc::new(AtomicU32::new(0)); + fake_server.set_request_handler::( + move |params, _| { + let lsp_request_count = lsp_request_count.clone(); + async move { + assert_eq!( + params.text_document.uri, + lsp::Uri::from_file_path(file_with_hints).unwrap(), + ); + + let i = lsp_request_count.fetch_add(1, Ordering::AcqRel) + 1; + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(0, i), + label: lsp::InlayHintLabel::String(i.to_string()), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }, + ); + }) + .await; + + cx.executor().run_until_parked(); + editor + .update(cx, |editor, _, cx| { + assert_eq!( + vec!["1".to_string()], + cached_hint_labels(editor, cx), + "Should display inlays after toggle despite them disabled in settings" + ); + assert_eq!(vec!["1".to_string()], visible_hint_labels(editor, cx)); + }) + .unwrap(); + + editor + .update(cx, |editor, _, cx| { + editor.refresh_inlay_hints(InlayHintRefreshReason::ModifiersChanged(true), cx); + }) + .unwrap(); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, _, cx| { + assert_eq!( + vec!["1".to_string()], + cached_hint_labels(editor, cx), + "Nothing happens with the cache on modifiers change" + ); + assert_eq!( + Vec::::new(), + visible_hint_labels(editor, cx), + "On modifiers change and hints toggled on, should hide editor inlays" + ); + }) + .unwrap(); + editor + .update(cx, |editor, _, cx| { + editor.refresh_inlay_hints(InlayHintRefreshReason::ModifiersChanged(true), cx); + }) + .unwrap(); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, _, cx| { + assert_eq!(vec!["1".to_string()], cached_hint_labels(editor, cx)); + assert_eq!( + Vec::::new(), + visible_hint_labels(editor, cx), + "Nothing changes on consequent modifiers change of the same kind" + ); + }) + .unwrap(); + + editor + .update(cx, |editor, _, cx| { + editor.refresh_inlay_hints(InlayHintRefreshReason::ModifiersChanged(false), cx); + }) + .unwrap(); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, _, cx| { + assert_eq!( + vec!["1".to_string()], + cached_hint_labels(editor, cx), + "When modifiers change is off, no extra requests are sent" + ); + assert_eq!( + vec!["1".to_string()], + visible_hint_labels(editor, cx), + "When modifiers change is off, hints are back into the editor" + ); + }) + .unwrap(); + editor + .update(cx, |editor, _, cx| { + editor.refresh_inlay_hints(InlayHintRefreshReason::ModifiersChanged(false), cx); + }) + .unwrap(); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, _, cx| { + assert_eq!(vec!["1".to_string()], cached_hint_labels(editor, cx)); + assert_eq!( + vec!["1".to_string()], + visible_hint_labels(editor, cx), + "Nothing changes on consequent modifiers change of the same kind (2)" + ); + }) + .unwrap(); + + editor + .update(cx, |editor, window, cx| { + editor.toggle_inlay_hints(&crate::ToggleInlayHints, window, cx) + }) + .unwrap(); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, _, cx| { + assert_eq!( + vec!["1".to_string()], + cached_hint_labels(editor, cx), + "Nothing happens with the cache on modifiers change" + ); + assert_eq!( + Vec::::new(), + visible_hint_labels(editor, cx), + "When toggled off, should hide editor inlays" + ); + }) + .unwrap(); + + editor + .update(cx, |editor, _, cx| { + editor.refresh_inlay_hints(InlayHintRefreshReason::ModifiersChanged(true), cx); + }) + .unwrap(); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, _, cx| { + assert_eq!( + vec!["1".to_string()], + cached_hint_labels(editor, cx), + "Nothing happens with the cache on modifiers change" + ); + assert_eq!( + vec!["1".to_string()], + visible_hint_labels(editor, cx), + "On modifiers change & hints toggled off, should show editor inlays" + ); + }) + .unwrap(); + editor + .update(cx, |editor, _, cx| { + editor.refresh_inlay_hints(InlayHintRefreshReason::ModifiersChanged(true), cx); + }) + .unwrap(); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, _, cx| { + assert_eq!(vec!["1".to_string()], cached_hint_labels(editor, cx)); + assert_eq!( + vec!["1".to_string()], + visible_hint_labels(editor, cx), + "Nothing changes on consequent modifiers change of the same kind" + ); + }) + .unwrap(); + + editor + .update(cx, |editor, _, cx| { + editor.refresh_inlay_hints(InlayHintRefreshReason::ModifiersChanged(false), cx); + }) + .unwrap(); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, _, cx| { + assert_eq!( + vec!["1".to_string()], + cached_hint_labels(editor, cx), + "When modifiers change is off, no extra requests are sent" + ); + assert_eq!( + Vec::::new(), + visible_hint_labels(editor, cx), + "When modifiers change is off, editor hints are back into their toggled off state" + ); + }) + .unwrap(); + editor + .update(cx, |editor, _, cx| { + editor.refresh_inlay_hints(InlayHintRefreshReason::ModifiersChanged(false), cx); + }) + .unwrap(); + cx.executor().run_until_parked(); + editor + .update(cx, |editor, _, cx| { + assert_eq!(vec!["1".to_string()], cached_hint_labels(editor, cx)); + assert_eq!( + Vec::::new(), + visible_hint_labels(editor, cx), + "Nothing changes on consequent modifiers change of the same kind (3)" + ); + }) + .unwrap(); + } + #[gpui::test] async fn test_inlays_at_the_same_place(cx: &mut gpui::TestAppContext) { init_test(cx, |settings| { @@ -3463,7 +3650,7 @@ pub mod tests { ]; assert_eq!( expected_hints, - cached_hint_labels(editor), + cached_hint_labels(editor, cx), "Editor inlay hints should repeat server's order when placed at the same spot" ); assert_eq!(expected_hints, visible_hint_labels(editor, cx)); @@ -3511,10 +3698,10 @@ pub mod tests { FakeLspAdapter { capabilities: lsp::ServerCapabilities { inlay_hint_provider: Some(lsp::OneOf::Left(true)), - ..Default::default() + ..lsp::ServerCapabilities::default() }, initializer: Some(Box::new(move |server| initialize(server, file_path))), - ..Default::default() + ..FakeLspAdapter::default() }, ); @@ -3529,7 +3716,7 @@ pub mod tests { editor .update(cx, |editor, _, cx| { - assert!(cached_hint_labels(editor).is_empty()); + assert!(cached_hint_labels(editor, cx).is_empty()); assert!(visible_hint_labels(editor, cx).is_empty()); }) .unwrap(); @@ -3541,30 +3728,35 @@ pub mod tests { // Inlay hints in the cache are stored per excerpt as a key, and those keys are guaranteed to be ordered same as in the multi buffer. // Ensure a stable order for testing. - fn sorted_cached_hint_labels(editor: &Editor) -> Vec { - let mut labels = cached_hint_labels(editor); - labels.sort(); + fn sorted_cached_hint_labels(editor: &Editor, cx: &mut App) -> Vec { + let mut labels = cached_hint_labels(editor, cx); + labels.sort_by(|a, b| natural_sort(a, b)); labels } - pub fn cached_hint_labels(editor: &Editor) -> Vec { - let mut labels = Vec::new(); - for excerpt_hints in editor.inlay_hint_cache().hints.values() { - let excerpt_hints = excerpt_hints.read(); - for id in &excerpt_hints.ordered_hints { - let hint = &excerpt_hints.hints_by_id[id]; - let mut label = hint.text().to_string(); - if hint.padding_left { - label.insert(0, ' '); - } - if hint.padding_right { - label.push_str(" "); - } - labels.push(label); - } + pub fn cached_hint_labels(editor: &Editor, cx: &mut App) -> Vec { + let lsp_store = editor.project().unwrap().read(cx).lsp_store(); + + let mut all_cached_labels = Vec::new(); + let mut all_fetched_hints = Vec::new(); + for buffer in editor.buffer.read(cx).all_buffers() { + lsp_store.update(cx, |lsp_store, cx| { + let hints = &lsp_store.latest_lsp_data(&buffer, cx).inlay_hints(); + all_cached_labels.extend(hints.all_cached_hints().into_iter().map(|hint| { + let mut label = hint.text().to_string(); + if hint.padding_left { + label.insert(0, ' '); + } + if hint.padding_right { + label.push_str(" "); + } + label + })); + all_fetched_hints.extend(hints.all_fetched_hints()); + }); } - labels + all_cached_labels } pub fn visible_hint_labels(editor: &Editor, cx: &Context) -> Vec { @@ -3574,4 +3766,13 @@ pub mod tests { .map(|hint| hint.text().to_string()) .collect() } + + fn allowed_hint_kinds_for_editor(editor: &Editor) -> HashSet> { + editor + .inlay_hints + .as_ref() + .unwrap() + .allowed_hint_kinds + .clone() + } } diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index f56e7dbaf87fa05e1423f639c7473259c8fc956c..0ac8a509554549815b3c43750c517da8954e0e54 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -42,7 +42,7 @@ use ui::{IconDecorationKind, prelude::*}; use util::{ResultExt, TryFutureExt, paths::PathExt}; use workspace::{ CollaboratorId, ItemId, ItemNavHistory, ToolbarItemLocation, ViewId, Workspace, WorkspaceId, - invalid_buffer_view::InvalidBufferView, + invalid_item_view::InvalidItemView, item::{FollowableItem, Item, ItemBufferKind, ItemEvent, ProjectItem, SaveOptions}, searchable::{ Direction, FilteredSearchRange, SearchEvent, SearchableItem, SearchableItemHandle, @@ -226,7 +226,7 @@ impl FollowableItem for Editor { Some(proto::view::Variant::Editor(proto::view::Editor { singleton: buffer.is_singleton(), - title: (!buffer.is_singleton()).then(|| buffer.title(cx).into()), + title: buffer.explicit_title().map(ToOwned::to_owned), excerpts, scroll_top_anchor: Some(serialize_anchor(&scroll_anchor.anchor, &snapshot)), scroll_x: scroll_anchor.offset.x, @@ -364,10 +364,9 @@ impl FollowableItem for Editor { ) { let buffer = self.buffer.read(cx); let buffer = buffer.read(cx); - let Some((excerpt_id, _, _)) = buffer.as_singleton() else { + let Some(position) = buffer.as_singleton_anchor(location) else { return; }; - let position = buffer.anchor_in_excerpt(*excerpt_id, location).unwrap(); let selection = Selection { id: 0, reversed: false, @@ -595,7 +594,7 @@ impl Item for Editor { cx: &mut Context, ) -> bool { if let Ok(data) = data.downcast::() { - let newest_selection = self.selections.newest::(cx); + let newest_selection = self.selections.newest::(&self.display_snapshot(cx)); let buffer = self.buffer.read(cx).read(cx); let offset = if buffer.can_resolve(&data.cursor_anchor) { data.cursor_anchor.to_point(&buffer) @@ -939,8 +938,9 @@ impl Item for Editor { fn breadcrumbs(&self, variant: &Theme, cx: &App) -> Option> { let cursor = self.selections.newest_anchor().head(); let multibuffer = &self.buffer().read(cx); - let (buffer_id, symbols) = - multibuffer.symbols_containing(cursor, Some(variant.syntax()), cx)?; + let (buffer_id, symbols) = multibuffer + .read(cx) + .symbols_containing(cursor, Some(variant.syntax()))?; let buffer = multibuffer.buffer(buffer_id)?; let buffer = buffer.read(cx); @@ -1080,12 +1080,17 @@ impl SerializableItem for Editor { } } Ok(None) => { - return Task::ready(Err(anyhow!("No path or contents found for buffer"))); + return Task::ready(Err(anyhow!( + "Unable to deserialize editor: No entry in database for item_id: {item_id} and workspace_id {workspace_id:?}" + ))); } Err(error) => { return Task::ready(Err(error)); } }; + log::debug!( + "Deserialized editor {item_id:?} in workspace {workspace_id:?}, {serialized_editor:?}" + ); match serialized_editor { SerializedEditor { @@ -1113,7 +1118,8 @@ impl SerializableItem for Editor { // First create the empty buffer let buffer = project .update(cx, |project, cx| project.create_buffer(true, cx))? - .await?; + .await + .context("Failed to create buffer while deserializing editor")?; // Then set the text so that the dirty bit is set correctly buffer.update(cx, |buffer, cx| { @@ -1155,7 +1161,9 @@ impl SerializableItem for Editor { match opened_buffer { Some(opened_buffer) => { window.spawn(cx, async move |cx| { - let (_, buffer) = opened_buffer.await?; + let (_, buffer) = opened_buffer + .await + .context("Failed to open path in project")?; // This is a bit wasteful: we're loading the whole buffer from // disk and then overwrite the content. @@ -1221,7 +1229,8 @@ impl SerializableItem for Editor { } => window.spawn(cx, async move |cx| { let buffer = project .update(cx, |project, cx| project.create_buffer(true, cx))? - .await?; + .await + .context("Failed to create buffer")?; cx.update(|window, cx| { cx.new(|cx| { @@ -1384,8 +1393,8 @@ impl ProjectItem for Editor { e: &anyhow::Error, window: &mut Window, cx: &mut App, - ) -> Option { - Some(InvalidBufferView::new(abs_path, is_local, e, window, cx)) + ) -> Option { + Some(InvalidItemView::new(abs_path, is_local, e, window, cx)) } } @@ -1540,13 +1549,13 @@ impl SearchableItem for Editor { fn query_suggestion(&mut self, window: &mut Window, cx: &mut Context) -> String { let setting = EditorSettings::get_global(cx).seed_search_query_from_cursor; let snapshot = self.snapshot(window, cx); - let snapshot = snapshot.buffer_snapshot(); - let selection = self.selections.newest_adjusted(cx); + let selection = self.selections.newest_adjusted(&snapshot.display_snapshot); + let buffer_snapshot = snapshot.buffer_snapshot(); match setting { SeedQuerySetting::Never => String::new(), SeedQuerySetting::Selection | SeedQuerySetting::Always if !selection.is_empty() => { - let text: String = snapshot + let text: String = buffer_snapshot .text_for_range(selection.start..selection.end) .collect(); if text.contains('\n') { @@ -1557,10 +1566,10 @@ impl SearchableItem for Editor { } SeedQuerySetting::Selection => String::new(), SeedQuerySetting::Always => { - let (range, kind) = - snapshot.surrounding_word(selection.start, Some(CharScopeContext::Completion)); + let (range, kind) = buffer_snapshot + .surrounding_word(selection.start, Some(CharScopeContext::Completion)); if kind == Some(CharKind::Word) { - let text: String = snapshot.text_for_range(range).collect(); + let text: String = buffer_snapshot.text_for_range(range).collect(); if !text.trim().is_empty() { return text; } diff --git a/crates/editor/src/linked_editing_ranges.rs b/crates/editor/src/linked_editing_ranges.rs index 4f1313797f97b1d482effced60b6843541c9e3a7..c883ec14fb4c50a11fb4dfba1031baebf4637f11 100644 --- a/crates/editor/src/linked_editing_ranges.rs +++ b/crates/editor/src/linked_editing_ranges.rs @@ -48,7 +48,7 @@ pub(super) fn refresh_linked_ranges( window: &mut Window, cx: &mut Context, ) -> Option<()> { - if editor.pending_rename.is_some() { + if editor.ignore_lsp_data() || editor.pending_rename.is_some() { return None; } let project = editor.project()?.downgrade(); @@ -59,7 +59,7 @@ pub(super) fn refresh_linked_ranges( let mut applicable_selections = Vec::new(); editor .update(cx, |editor, cx| { - let selections = editor.selections.all::(cx); + let selections = editor.selections.all::(&editor.display_snapshot(cx)); let snapshot = editor.buffer.read(cx).snapshot(cx); let buffer = editor.buffer.read(cx); for selection in selections { diff --git a/crates/editor/src/lsp_colors.rs b/crates/editor/src/lsp_colors.rs index 4d703d219f88cb10566c9e02faa76cc12408b677..050363f219ee5579a73cf168cce82778df8810ab 100644 --- a/crates/editor/src/lsp_colors.rs +++ b/crates/editor/src/lsp_colors.rs @@ -2,19 +2,19 @@ use std::{cmp, ops::Range}; use collections::HashMap; use futures::future::join_all; -use gpui::{Hsla, Rgba}; +use gpui::{Hsla, Rgba, Task}; use itertools::Itertools; use language::point_from_lsp; use multi_buffer::Anchor; -use project::{DocumentColor, lsp_store::LspFetchStrategy}; +use project::{DocumentColor, InlayId}; use settings::Settings as _; use text::{Bias, BufferId, OffsetRangeExt as _}; use ui::{App, Context, Window}; use util::post_inc; use crate::{ - DisplayPoint, Editor, EditorSettings, EditorSnapshot, InlayId, InlaySplice, RangeToAnchorExt, - display_map::Inlay, editor_settings::DocumentColorsRenderMode, + DisplayPoint, Editor, EditorSettings, EditorSnapshot, FETCH_COLORS_DEBOUNCE_TIMEOUT, + InlaySplice, RangeToAnchorExt, editor_settings::DocumentColorsRenderMode, inlays::Inlay, }; #[derive(Debug)] @@ -143,14 +143,13 @@ impl LspColorData { } impl Editor { - pub(super) fn refresh_colors( + pub(super) fn refresh_colors_for_visible_range( &mut self, - ignore_cache: bool, buffer_id: Option, _: &Window, cx: &mut Context, ) { - if !self.mode().is_full() { + if self.ignore_lsp_data() { return; } let Some(project) = self.project.clone() else { @@ -165,11 +164,13 @@ impl Editor { } let visible_buffers = self - .visible_excerpts(None, cx) + .visible_excerpts(cx) .into_values() .map(|(buffer, ..)| buffer) .filter(|editor_buffer| { - buffer_id.is_none_or(|buffer_id| buffer_id == editor_buffer.read(cx).remote_id()) + let editor_buffer_id = editor_buffer.read(cx).remote_id(); + buffer_id.is_none_or(|buffer_id| buffer_id == editor_buffer_id) + && self.registered_buffers.contains_key(&editor_buffer_id) }) .unique_by(|buffer| buffer.read(cx).remote_id()) .collect::>(); @@ -179,21 +180,25 @@ impl Editor { .into_iter() .filter_map(|buffer| { let buffer_id = buffer.read(cx).remote_id(); - let fetch_strategy = if ignore_cache { - LspFetchStrategy::IgnoreCache - } else { - LspFetchStrategy::UseCache { - known_cache_version: self.colors.as_ref().and_then(|colors| { - Some(colors.buffer_colors.get(&buffer_id)?.cache_version_used) - }), - } - }; - let colors_task = lsp_store.document_colors(fetch_strategy, buffer, cx)?; + let known_cache_version = self.colors.as_ref().and_then(|colors| { + Some(colors.buffer_colors.get(&buffer_id)?.cache_version_used) + }); + let colors_task = lsp_store.document_colors(known_cache_version, buffer, cx)?; Some(async move { (buffer_id, colors_task.await) }) }) .collect::>() }); - cx.spawn(async move |editor, cx| { + + if all_colors_task.is_empty() { + self.refresh_colors_task = Task::ready(()); + return; + } + + self.refresh_colors_task = cx.spawn(async move |editor, cx| { + cx.background_executor() + .timer(FETCH_COLORS_DEBOUNCE_TIMEOUT) + .await; + let all_colors = join_all(all_colors_task).await; if all_colors.is_empty() { return; @@ -246,25 +251,14 @@ impl Editor { { continue; } - let Some(color_start_anchor) = multi_buffer_snapshot - .anchor_in_excerpt( - *excerpt_id, - buffer_snapshot.anchor_before( - buffer_snapshot - .clip_point_utf16(color_start, Bias::Left), - ), - ) - else { - continue; - }; - let Some(color_end_anchor) = multi_buffer_snapshot - .anchor_in_excerpt( - *excerpt_id, - buffer_snapshot.anchor_after( - buffer_snapshot - .clip_point_utf16(color_end, Bias::Right), - ), - ) + let start = buffer_snapshot.anchor_before( + buffer_snapshot.clip_point_utf16(color_start, Bias::Left), + ); + let end = buffer_snapshot.anchor_after( + buffer_snapshot.clip_point_utf16(color_end, Bias::Right), + ); + let Some(range) = multi_buffer_snapshot + .anchor_range_in_excerpt(*excerpt_id, start..end) else { continue; }; @@ -280,16 +274,14 @@ impl Editor { new_buffer_colors.binary_search_by(|(probe, _)| { probe .start - .cmp(&color_start_anchor, &multi_buffer_snapshot) + .cmp(&range.start, &multi_buffer_snapshot) .then_with(|| { - probe.end.cmp( - &color_end_anchor, - &multi_buffer_snapshot, - ) + probe + .end + .cmp(&range.end, &multi_buffer_snapshot) }) }); - new_buffer_colors - .insert(i, (color_start_anchor..color_end_anchor, color)); + new_buffer_colors.insert(i, (range, color)); break; } } @@ -408,8 +400,7 @@ impl Editor { } if colors.render_mode == DocumentColorsRenderMode::Inlay - && (!colors_splice.to_insert.is_empty() - || !colors_splice.to_remove.is_empty()) + && !colors_splice.is_empty() { editor.splice_inlays(&colors_splice.to_remove, colors_splice.to_insert, cx); updated = true; @@ -420,7 +411,6 @@ impl Editor { } }) .ok(); - }) - .detach(); + }); } } diff --git a/crates/editor/src/mouse_context_menu.rs b/crates/editor/src/mouse_context_menu.rs index 3d8bbb36103f2e82ce421c9ba83dea0bd6396780..7c83113f7837565efc59889e74bf397b392c516b 100644 --- a/crates/editor/src/mouse_context_menu.rs +++ b/crates/editor/src/mouse_context_menu.rs @@ -11,6 +11,7 @@ use gpui::{Context, DismissEvent, Entity, Focusable as _, Pixels, Point, Subscri use std::ops::Range; use text::PointUtf16; use workspace::OpenInTerminal; +use zed_actions::agent::AddSelectionToThread; #[derive(Debug)] pub enum MenuPosition { @@ -154,7 +155,7 @@ pub fn deploy_context_menu( return; } - let display_map = editor.selections.display_map(cx); + let display_map = editor.display_snapshot(cx); let source_anchor = display_map.display_point_to_anchor(point, text::Bias::Right); let context_menu = if let Some(custom) = editor.custom_context_menu.take() { let menu = custom(editor, point, window, cx); @@ -169,8 +170,8 @@ pub fn deploy_context_menu( return; }; - let display_map = editor.selections.display_map(cx); let snapshot = editor.snapshot(window, cx); + let display_map = editor.display_snapshot(cx); let buffer = snapshot.buffer_snapshot(); let anchor = buffer.anchor_before(point.to_point(&display_map)); if !display_ranges(&display_map, &editor.selections).any(|r| r.contains(&point)) { @@ -185,7 +186,7 @@ pub fn deploy_context_menu( let has_reveal_target = editor.target_file(cx).is_some(); let has_selections = editor .selections - .all::(cx) + .all::(&display_map) .into_iter() .any(|s| !s.is_empty()); let has_git_repo = buffer @@ -233,6 +234,7 @@ pub fn deploy_context_menu( quick_launch: false, }), ) + .action("Add to Agent Thread", Box::new(AddSelectionToThread)) .separator() .action("Cut", Box::new(Cut)) .action("Copy", Box::new(Copy)) diff --git a/crates/editor/src/movement.rs b/crates/editor/src/movement.rs index 486a14e3741989c1632e361e6ae6324d697cf2c7..418fa4fcb442b1de133972457497c0e592e77d15 100644 --- a/crates/editor/src/movement.rs +++ b/crates/editor/src/movement.rs @@ -872,7 +872,7 @@ mod tests { use super::*; use crate::{ Buffer, DisplayMap, DisplayRow, ExcerptRange, FoldPlaceholder, MultiBuffer, - display_map::Inlay, + inlays::Inlay, test::{editor_test_context::EditorTestContext, marked_display_snapshot}, }; use gpui::{AppContext as _, font, px}; diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs deleted file mode 100644 index 2d4710a8d44a023f0c3206ad0c327a34c36fdac4..0000000000000000000000000000000000000000 --- a/crates/editor/src/proposed_changes_editor.rs +++ /dev/null @@ -1,516 +0,0 @@ -use crate::{ApplyAllDiffHunks, Editor, EditorEvent, SelectionEffects, SemanticsProvider}; -use buffer_diff::BufferDiff; -use collections::HashSet; -use futures::{channel::mpsc, future::join_all}; -use gpui::{App, Entity, EventEmitter, Focusable, Render, Subscription, Task}; -use language::{Buffer, BufferEvent, Capability}; -use multi_buffer::{ExcerptRange, MultiBuffer}; -use project::Project; -use smol::stream::StreamExt; -use std::{any::TypeId, ops::Range, rc::Rc, time::Duration}; -use text::ToOffset; -use ui::{ButtonLike, KeyBinding, prelude::*}; -use workspace::{ - Item, ItemHandle as _, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, - item::SaveOptions, searchable::SearchableItemHandle, -}; - -pub struct ProposedChangesEditor { - editor: Entity, - multibuffer: Entity, - title: SharedString, - buffer_entries: Vec, - _recalculate_diffs_task: Task>, - recalculate_diffs_tx: mpsc::UnboundedSender, -} - -pub struct ProposedChangeLocation { - pub buffer: Entity, - pub ranges: Vec>, -} - -struct BufferEntry { - base: Entity, - branch: Entity, - _subscription: Subscription, -} - -pub struct ProposedChangesEditorToolbar { - current_editor: Option>, -} - -struct RecalculateDiff { - buffer: Entity, - debounce: bool, -} - -/// A provider of code semantics for branch buffers. -/// -/// Requests in edited regions will return nothing, but requests in unchanged -/// regions will be translated into the base buffer's coordinates. -struct BranchBufferSemanticsProvider(Rc); - -impl ProposedChangesEditor { - pub fn new( - title: impl Into, - locations: Vec>, - project: Option>, - window: &mut Window, - cx: &mut Context, - ) -> Self { - let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); - let (recalculate_diffs_tx, mut recalculate_diffs_rx) = mpsc::unbounded(); - let mut this = Self { - editor: cx.new(|cx| { - let mut editor = Editor::for_multibuffer(multibuffer.clone(), project, window, cx); - editor.set_expand_all_diff_hunks(cx); - editor.set_completion_provider(None); - editor.clear_code_action_providers(); - editor.set_semantics_provider( - editor - .semantics_provider() - .map(|provider| Rc::new(BranchBufferSemanticsProvider(provider)) as _), - ); - editor - }), - multibuffer, - title: title.into(), - buffer_entries: Vec::new(), - recalculate_diffs_tx, - _recalculate_diffs_task: cx.spawn_in(window, async move |this, cx| { - let mut buffers_to_diff = HashSet::default(); - while let Some(mut recalculate_diff) = recalculate_diffs_rx.next().await { - buffers_to_diff.insert(recalculate_diff.buffer); - - while recalculate_diff.debounce { - cx.background_executor() - .timer(Duration::from_millis(50)) - .await; - let mut had_further_changes = false; - while let Ok(next_recalculate_diff) = recalculate_diffs_rx.try_next() { - let next_recalculate_diff = next_recalculate_diff?; - recalculate_diff.debounce &= next_recalculate_diff.debounce; - buffers_to_diff.insert(next_recalculate_diff.buffer); - had_further_changes = true; - } - if !had_further_changes { - break; - } - } - - let recalculate_diff_futures = this - .update(cx, |this, cx| { - buffers_to_diff - .drain() - .filter_map(|buffer| { - let buffer = buffer.read(cx); - let base_buffer = buffer.base_buffer()?; - let buffer = buffer.text_snapshot(); - let diff = - this.multibuffer.read(cx).diff_for(buffer.remote_id())?; - Some(diff.update(cx, |diff, cx| { - diff.set_base_text_buffer(base_buffer.clone(), buffer, cx) - })) - }) - .collect::>() - }) - .ok()?; - - join_all(recalculate_diff_futures).await; - } - None - }), - }; - this.reset_locations(locations, window, cx); - this - } - - pub fn branch_buffer_for_base(&self, base_buffer: &Entity) -> Option> { - self.buffer_entries.iter().find_map(|entry| { - if &entry.base == base_buffer { - Some(entry.branch.clone()) - } else { - None - } - }) - } - - pub fn set_title(&mut self, title: SharedString, cx: &mut Context) { - self.title = title; - cx.notify(); - } - - pub fn reset_locations( - &mut self, - locations: Vec>, - window: &mut Window, - cx: &mut Context, - ) { - // Undo all branch changes - for entry in &self.buffer_entries { - let base_version = entry.base.read(cx).version(); - entry.branch.update(cx, |buffer, cx| { - let undo_counts = buffer - .operations() - .iter() - .filter_map(|(timestamp, _)| { - if !base_version.observed(*timestamp) { - Some((*timestamp, u32::MAX)) - } else { - None - } - }) - .collect(); - buffer.undo_operations(undo_counts, cx); - }); - } - - self.multibuffer.update(cx, |multibuffer, cx| { - multibuffer.clear(cx); - }); - - let mut buffer_entries = Vec::new(); - let mut new_diffs = Vec::new(); - for location in locations { - let branch_buffer; - if let Some(ix) = self - .buffer_entries - .iter() - .position(|entry| entry.base == location.buffer) - { - let entry = self.buffer_entries.remove(ix); - branch_buffer = entry.branch.clone(); - buffer_entries.push(entry); - } else { - branch_buffer = location.buffer.update(cx, |buffer, cx| buffer.branch(cx)); - new_diffs.push(cx.new(|cx| { - let mut diff = BufferDiff::new(&branch_buffer.read(cx).snapshot(), cx); - let _ = diff.set_base_text_buffer( - location.buffer.clone(), - branch_buffer.read(cx).text_snapshot(), - cx, - ); - diff - })); - buffer_entries.push(BufferEntry { - branch: branch_buffer.clone(), - base: location.buffer.clone(), - _subscription: cx.subscribe(&branch_buffer, Self::on_buffer_event), - }); - } - - self.multibuffer.update(cx, |multibuffer, cx| { - multibuffer.push_excerpts( - branch_buffer, - location - .ranges - .into_iter() - .map(|range| ExcerptRange::new(range)), - cx, - ); - }); - } - - self.buffer_entries = buffer_entries; - self.editor.update(cx, |editor, cx| { - editor.change_selections(SelectionEffects::no_scroll(), window, cx, |selections| { - selections.refresh() - }); - editor.buffer.update(cx, |buffer, cx| { - for diff in new_diffs { - buffer.add_diff(diff, cx) - } - }) - }); - } - - pub fn recalculate_all_buffer_diffs(&self) { - for (ix, entry) in self.buffer_entries.iter().enumerate().rev() { - self.recalculate_diffs_tx - .unbounded_send(RecalculateDiff { - buffer: entry.branch.clone(), - debounce: ix > 0, - }) - .ok(); - } - } - - fn on_buffer_event( - &mut self, - buffer: Entity, - event: &BufferEvent, - _cx: &mut Context, - ) { - if let BufferEvent::Operation { .. } = event { - self.recalculate_diffs_tx - .unbounded_send(RecalculateDiff { - buffer, - debounce: true, - }) - .ok(); - } - } -} - -impl Render for ProposedChangesEditor { - fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { - div() - .size_full() - .key_context("ProposedChangesEditor") - .child(self.editor.clone()) - } -} - -impl Focusable for ProposedChangesEditor { - fn focus_handle(&self, cx: &App) -> gpui::FocusHandle { - self.editor.focus_handle(cx) - } -} - -impl EventEmitter for ProposedChangesEditor {} - -impl Item for ProposedChangesEditor { - type Event = EditorEvent; - - fn tab_icon(&self, _window: &Window, _cx: &App) -> Option { - Some(Icon::new(IconName::Diff)) - } - - fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString { - self.title.clone() - } - - fn as_searchable(&self, _: &Entity) -> Option> { - Some(Box::new(self.editor.clone())) - } - - fn act_as_type<'a>( - &'a self, - type_id: TypeId, - self_handle: &'a Entity, - _: &'a App, - ) -> Option { - if type_id == TypeId::of::() { - Some(self_handle.to_any()) - } else if type_id == TypeId::of::() { - Some(self.editor.to_any()) - } else { - None - } - } - - fn added_to_workspace( - &mut self, - workspace: &mut Workspace, - window: &mut Window, - cx: &mut Context, - ) { - self.editor.update(cx, |editor, cx| { - Item::added_to_workspace(editor, workspace, window, cx) - }); - } - - fn deactivated(&mut self, window: &mut Window, cx: &mut Context) { - self.editor - .update(cx, |editor, cx| editor.deactivated(window, cx)); - } - - fn navigate( - &mut self, - data: Box, - window: &mut Window, - cx: &mut Context, - ) -> bool { - self.editor - .update(cx, |editor, cx| Item::navigate(editor, data, window, cx)) - } - - fn set_nav_history( - &mut self, - nav_history: workspace::ItemNavHistory, - window: &mut Window, - cx: &mut Context, - ) { - self.editor.update(cx, |editor, cx| { - Item::set_nav_history(editor, nav_history, window, cx) - }); - } - - fn can_save(&self, cx: &App) -> bool { - self.editor.read(cx).can_save(cx) - } - - fn save( - &mut self, - options: SaveOptions, - project: Entity, - window: &mut Window, - cx: &mut Context, - ) -> Task> { - self.editor.update(cx, |editor, cx| { - Item::save(editor, options, project, window, cx) - }) - } -} - -impl ProposedChangesEditorToolbar { - pub fn new() -> Self { - Self { - current_editor: None, - } - } - - fn get_toolbar_item_location(&self) -> ToolbarItemLocation { - if self.current_editor.is_some() { - ToolbarItemLocation::PrimaryRight - } else { - ToolbarItemLocation::Hidden - } - } -} - -impl Render for ProposedChangesEditorToolbar { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - let button_like = ButtonLike::new("apply-changes").child(Label::new("Apply All")); - - match &self.current_editor { - Some(editor) => { - let focus_handle = editor.focus_handle(cx); - let keybinding = - KeyBinding::for_action_in(&ApplyAllDiffHunks, &focus_handle, window, cx) - .map(|binding| binding.into_any_element()); - - button_like.children(keybinding).on_click({ - move |_event, window, cx| { - focus_handle.dispatch_action(&ApplyAllDiffHunks, window, cx) - } - }) - } - None => button_like.disabled(true), - } - } -} - -impl EventEmitter for ProposedChangesEditorToolbar {} - -impl ToolbarItemView for ProposedChangesEditorToolbar { - fn set_active_pane_item( - &mut self, - active_pane_item: Option<&dyn workspace::ItemHandle>, - _window: &mut Window, - _cx: &mut Context, - ) -> workspace::ToolbarItemLocation { - self.current_editor = - active_pane_item.and_then(|item| item.downcast::()); - self.get_toolbar_item_location() - } -} - -impl BranchBufferSemanticsProvider { - fn to_base( - &self, - buffer: &Entity, - positions: &[text::Anchor], - cx: &App, - ) -> Option> { - let base_buffer = buffer.read(cx).base_buffer()?; - let version = base_buffer.read(cx).version(); - if positions - .iter() - .any(|position| !version.observed(position.timestamp)) - { - return None; - } - Some(base_buffer) - } -} - -impl SemanticsProvider for BranchBufferSemanticsProvider { - fn hover( - &self, - buffer: &Entity, - position: text::Anchor, - cx: &mut App, - ) -> Option>>> { - let buffer = self.to_base(buffer, &[position], cx)?; - self.0.hover(&buffer, position, cx) - } - - fn inlay_hints( - &self, - buffer: Entity, - range: Range, - cx: &mut App, - ) -> Option>>> { - let buffer = self.to_base(&buffer, &[range.start, range.end], cx)?; - self.0.inlay_hints(buffer, range, cx) - } - - fn inline_values( - &self, - _: Entity, - _: Range, - _: &mut App, - ) -> Option>>> { - None - } - - fn resolve_inlay_hint( - &self, - hint: project::InlayHint, - buffer: Entity, - server_id: lsp::LanguageServerId, - cx: &mut App, - ) -> Option>> { - let buffer = self.to_base(&buffer, &[], cx)?; - self.0.resolve_inlay_hint(hint, buffer, server_id, cx) - } - - fn supports_inlay_hints(&self, buffer: &Entity, cx: &mut App) -> bool { - if let Some(buffer) = self.to_base(buffer, &[], cx) { - self.0.supports_inlay_hints(&buffer, cx) - } else { - false - } - } - - fn document_highlights( - &self, - buffer: &Entity, - position: text::Anchor, - cx: &mut App, - ) -> Option>>> { - let buffer = self.to_base(buffer, &[position], cx)?; - self.0.document_highlights(&buffer, position, cx) - } - - fn definitions( - &self, - buffer: &Entity, - position: text::Anchor, - kind: crate::GotoDefinitionKind, - cx: &mut App, - ) -> Option>>>> { - let buffer = self.to_base(buffer, &[position], cx)?; - self.0.definitions(&buffer, position, kind, cx) - } - - fn range_for_rename( - &self, - _: &Entity, - _: text::Anchor, - _: &mut App, - ) -> Option>>>> { - None - } - - fn perform_rename( - &self, - _: &Entity, - _: text::Anchor, - _: String, - _: &mut App, - ) -> Option>> { - None - } -} diff --git a/crates/editor/src/scroll.rs b/crates/editor/src/scroll.rs index dae668a4b4f9cd718d034f45259d3706e515fafb..001be45ab814e1627dc34abbba342272d3e15750 100644 --- a/crates/editor/src/scroll.rs +++ b/crates/editor/src/scroll.rs @@ -494,15 +494,15 @@ impl Editor { let opened_first_time = self.scroll_manager.visible_line_count.is_none(); self.scroll_manager.visible_line_count = Some(lines); if opened_first_time { - cx.spawn_in(window, async move |editor, cx| { + self.post_scroll_update = cx.spawn_in(window, async move |editor, cx| { editor .update_in(cx, |editor, window, cx| { + editor.register_visible_buffers(cx); editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); - editor.refresh_colors(false, None, window, cx); + editor.update_lsp_data(None, window, cx); }) - .ok() - }) - .detach() + .ok(); + }); } } @@ -613,8 +613,19 @@ impl Editor { cx, ); - self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); - self.refresh_colors(false, None, window, cx); + self.post_scroll_update = cx.spawn_in(window, async move |editor, cx| { + cx.background_executor() + .timer(Duration::from_millis(50)) + .await; + editor + .update_in(cx, |editor, window, cx| { + editor.register_visible_buffers(cx); + editor.refresh_colors_for_visible_range(None, window, cx); + editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + }) + .ok(); + }); + editor_was_scrolled } diff --git a/crates/editor/src/scroll/actions.rs b/crates/editor/src/scroll/actions.rs index 1d98cb537ab8cc9dcf7aac23e6c43f6c1a26ff0a..3b2ed55df724485ee72e6afbc02c7111817869fb 100644 --- a/crates/editor/src/scroll/actions.rs +++ b/crates/editor/src/scroll/actions.rs @@ -72,7 +72,12 @@ impl Editor { cx: &mut Context, ) { let scroll_margin_rows = self.vertical_scroll_margin() as u32; - let new_screen_top = self.selections.newest_display(cx).head().row().0; + let new_screen_top = self + .selections + .newest_display(&self.display_snapshot(cx)) + .head() + .row() + .0; let new_screen_top = new_screen_top.saturating_sub(scroll_margin_rows); self.set_scroll_top_row(DisplayRow(new_screen_top), window, cx); } @@ -86,7 +91,12 @@ impl Editor { let Some(visible_rows) = self.visible_line_count().map(|count| count as u32) else { return; }; - let new_screen_top = self.selections.newest_display(cx).head().row().0; + let new_screen_top = self + .selections + .newest_display(&self.display_snapshot(cx)) + .head() + .row() + .0; let new_screen_top = new_screen_top.saturating_sub(visible_rows / 2); self.set_scroll_top_row(DisplayRow(new_screen_top), window, cx); } @@ -101,7 +111,12 @@ impl Editor { let Some(visible_rows) = self.visible_line_count().map(|count| count as u32) else { return; }; - let new_screen_top = self.selections.newest_display(cx).head().row().0; + let new_screen_top = self + .selections + .newest_display(&self.display_snapshot(cx)) + .head() + .row() + .0; let new_screen_top = new_screen_top.saturating_sub(visible_rows.saturating_sub(scroll_margin_rows)); self.set_scroll_top_row(DisplayRow(new_screen_top), window, cx); diff --git a/crates/editor/src/scroll/autoscroll.rs b/crates/editor/src/scroll/autoscroll.rs index 9130e3cbf879d1b38461a34470b79cc5a50a3cac..28fd9442193bbec663d3f72eaa805214375dd8ca 100644 --- a/crates/editor/src/scroll/autoscroll.rs +++ b/crates/editor/src/scroll/autoscroll.rs @@ -148,7 +148,7 @@ impl Editor { target_top = first_highlighted_row.as_f64(); target_bottom = target_top + 1.; } else { - let selections = self.selections.all::(cx); + let selections = self.selections.all::(&display_map); target_top = selections .first() @@ -293,7 +293,7 @@ impl Editor { let scroll_width = ScrollOffset::from(scroll_width); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let selections = self.selections.all::(cx); + let selections = self.selections.all::(&display_map); let mut scroll_position = self.scroll_manager.scroll_position(&display_map); let mut target_left; diff --git a/crates/editor/src/selections_collection.rs b/crates/editor/src/selections_collection.rs index 5ab6d25eb9abcfa0846a176f83e2f2620245bb47..ab0e78595310da43b803cd53b9177dec53a37d81 100644 --- a/crates/editor/src/selections_collection.rs +++ b/crates/editor/src/selections_collection.rs @@ -35,6 +35,8 @@ pub struct SelectionsCollection { disjoint: Arc<[Selection]>, /// A pending selection, such as when the mouse is being dragged pending: Option, + select_mode: SelectMode, + is_extending: bool, } impl SelectionsCollection { @@ -55,6 +57,8 @@ impl SelectionsCollection { }, mode: SelectMode::Character, }), + select_mode: SelectMode::Character, + is_extending: false, } } @@ -106,7 +110,7 @@ impl SelectionsCollection { if self.pending.is_none() { self.disjoint_anchors_arc() } else { - let all_offset_selections = self.all::(cx); + let all_offset_selections = self.all::(&self.display_map(cx)); let buffer = self.buffer(cx); all_offset_selections .into_iter() @@ -125,25 +129,23 @@ impl SelectionsCollection { pub fn pending>( &self, - cx: &mut App, + snapshot: &DisplaySnapshot, ) -> Option> { - let map = self.display_map(cx); - - resolve_selections(self.pending_anchor(), &map).next() + resolve_selections(self.pending_anchor(), &snapshot).next() } pub(crate) fn pending_mode(&self) -> Option { self.pending.as_ref().map(|pending| pending.mode.clone()) } - pub fn all<'a, D>(&self, cx: &mut App) -> Vec> + pub fn all<'a, D>(&self, snapshot: &DisplaySnapshot) -> Vec> where D: 'a + TextDimension + Ord + Sub, { - let map = self.display_map(cx); let disjoint_anchors = &self.disjoint; - let mut disjoint = resolve_selections::(disjoint_anchors.iter(), &map).peekable(); - let mut pending_opt = self.pending::(cx); + let mut disjoint = + resolve_selections::(disjoint_anchors.iter(), &snapshot).peekable(); + let mut pending_opt = self.pending::(&snapshot); iter::from_fn(move || { if let Some(pending) = pending_opt.as_mut() { while let Some(next_selection) = disjoint.peek() { @@ -171,12 +173,11 @@ impl SelectionsCollection { } /// Returns all of the selections, adjusted to take into account the selection line_mode - pub fn all_adjusted(&self, cx: &mut App) -> Vec> { - let mut selections = self.all::(cx); + pub fn all_adjusted(&self, snapshot: &DisplaySnapshot) -> Vec> { + let mut selections = self.all::(&snapshot); if self.line_mode { - let map = self.display_map(cx); for selection in &mut selections { - let new_range = map.expand_to_line(selection.range()); + let new_range = snapshot.expand_to_line(selection.range()); selection.start = new_range.start; selection.end = new_range.end; } @@ -206,11 +207,10 @@ impl SelectionsCollection { } /// Returns the newest selection, adjusted to take into account the selection line_mode - pub fn newest_adjusted(&self, cx: &mut App) -> Selection { - let mut selection = self.newest::(cx); + pub fn newest_adjusted(&self, snapshot: &DisplaySnapshot) -> Selection { + let mut selection = self.newest::(&snapshot); if self.line_mode { - let map = self.display_map(cx); - let new_range = map.expand_to_line(selection.range()); + let new_range = snapshot.expand_to_line(selection.range()); selection.start = new_range.start; selection.end = new_range.end; } @@ -219,53 +219,55 @@ impl SelectionsCollection { pub fn all_adjusted_display( &self, - cx: &mut App, - ) -> (DisplaySnapshot, Vec>) { + display_map: &DisplaySnapshot, + ) -> Vec> { if self.line_mode { - let selections = self.all::(cx); - let map = self.display_map(cx); + let selections = self.all::(&display_map); let result = selections .into_iter() .map(|mut selection| { - let new_range = map.expand_to_line(selection.range()); + let new_range = display_map.expand_to_line(selection.range()); selection.start = new_range.start; selection.end = new_range.end; - selection.map(|point| point.to_display_point(&map)) + selection.map(|point| point.to_display_point(&display_map)) }) .collect(); - (map, result) + result } else { - self.all_display(cx) + self.all_display(display_map) } } - pub fn disjoint_in_range<'a, D>(&self, range: Range, cx: &mut App) -> Vec> + pub fn disjoint_in_range<'a, D>( + &self, + range: Range, + snapshot: &DisplaySnapshot, + ) -> Vec> where D: 'a + TextDimension + Ord + Sub + std::fmt::Debug, { - let map = self.display_map(cx); let start_ix = match self .disjoint - .binary_search_by(|probe| probe.end.cmp(&range.start, map.buffer_snapshot())) + .binary_search_by(|probe| probe.end.cmp(&range.start, snapshot.buffer_snapshot())) { Ok(ix) | Err(ix) => ix, }; let end_ix = match self .disjoint - .binary_search_by(|probe| probe.start.cmp(&range.end, map.buffer_snapshot())) + .binary_search_by(|probe| probe.start.cmp(&range.end, snapshot.buffer_snapshot())) { Ok(ix) => ix + 1, Err(ix) => ix, }; - resolve_selections(&self.disjoint[start_ix..end_ix], &map).collect() + resolve_selections(&self.disjoint[start_ix..end_ix], snapshot).collect() } - pub fn all_display(&self, cx: &mut App) -> (DisplaySnapshot, Vec>) { - let map = self.display_map(cx); + pub fn all_display(&self, snapshot: &DisplaySnapshot) -> Vec> { let disjoint_anchors = &self.disjoint; - let mut disjoint = resolve_selections_display(disjoint_anchors.iter(), &map).peekable(); - let mut pending_opt = resolve_selections_display(self.pending_anchor(), &map).next(); - let selections = iter::from_fn(move || { + let mut disjoint = + resolve_selections_display(disjoint_anchors.iter(), &snapshot).peekable(); + let mut pending_opt = resolve_selections_display(self.pending_anchor(), &snapshot).next(); + iter::from_fn(move || { if let Some(pending) = pending_opt.as_mut() { while let Some(next_selection) = disjoint.peek() { if pending.start <= next_selection.end && pending.end >= next_selection.start { @@ -288,8 +290,7 @@ impl SelectionsCollection { disjoint.next() } }) - .collect(); - (map, selections) + .collect() } pub fn newest_anchor(&self) -> &Selection { @@ -302,19 +303,15 @@ impl SelectionsCollection { pub fn newest>( &self, - cx: &mut App, + snapshot: &DisplaySnapshot, ) -> Selection { - let map = self.display_map(cx); - - resolve_selections([self.newest_anchor()], &map) + resolve_selections([self.newest_anchor()], &snapshot) .next() .unwrap() } - pub fn newest_display(&self, cx: &mut App) -> Selection { - let map = self.display_map(cx); - - resolve_selections_display([self.newest_anchor()], &map) + pub fn newest_display(&self, snapshot: &DisplaySnapshot) -> Selection { + resolve_selections_display([self.newest_anchor()], &snapshot) .next() .unwrap() } @@ -329,11 +326,9 @@ impl SelectionsCollection { pub fn oldest>( &self, - cx: &mut App, + snapshot: &DisplaySnapshot, ) -> Selection { - let map = self.display_map(cx); - - resolve_selections([self.oldest_anchor()], &map) + resolve_selections([self.oldest_anchor()], &snapshot) .next() .unwrap() } @@ -345,12 +340,18 @@ impl SelectionsCollection { .unwrap_or_else(|| self.disjoint.first().cloned().unwrap()) } - pub fn first>(&self, cx: &mut App) -> Selection { - self.all(cx).first().unwrap().clone() + pub fn first>( + &self, + snapshot: &DisplaySnapshot, + ) -> Selection { + self.all(snapshot).first().unwrap().clone() } - pub fn last>(&self, cx: &mut App) -> Selection { - self.all(cx).last().unwrap().clone() + pub fn last>( + &self, + snapshot: &DisplaySnapshot, + ) -> Selection { + self.all(snapshot).last().unwrap().clone() } /// Returns a list of (potentially backwards!) ranges representing the selections. @@ -358,9 +359,9 @@ impl SelectionsCollection { #[cfg(any(test, feature = "test-support"))] pub fn ranges>( &self, - cx: &mut App, + snapshot: &DisplaySnapshot, ) -> Vec> { - self.all::(cx) + self.all::(snapshot) .iter() .map(|s| { if s.reversed { @@ -388,6 +389,11 @@ impl SelectionsCollection { .collect() } + /// Attempts to build a selection in the provided `DisplayRow` within the + /// same range as the provided range of `Pixels`. + /// Returns `None` if the range is not empty but it starts past the line's + /// length, meaning that the line isn't long enough to be contained within + /// part of the provided range. pub fn build_columnar_selection( &mut self, display_map: &DisplaySnapshot, @@ -456,6 +462,22 @@ impl SelectionsCollection { pub fn set_line_mode(&mut self, line_mode: bool) { self.line_mode = line_mode; } + + pub fn select_mode(&self) -> &SelectMode { + &self.select_mode + } + + pub fn set_select_mode(&mut self, select_mode: SelectMode) { + self.select_mode = select_mode; + } + + pub fn is_extending(&self) -> bool { + self.is_extending + } + + pub fn set_is_extending(&mut self, is_extending: bool) { + self.is_extending = is_extending; + } } pub struct MutableSelectionsCollection<'a> { @@ -571,7 +593,8 @@ impl<'a> MutableSelectionsCollection<'a> { where T: 'a + ToOffset + ToPoint + TextDimension + Ord + Sub + std::marker::Copy, { - let mut selections = self.collection.all(self.cx); + let display_map = self.display_map(); + let mut selections = self.collection.all(&display_map); let mut start = range.start.to_offset(&self.buffer()); let mut end = range.end.to_offset(&self.buffer()); let reversed = if start > end { @@ -590,21 +613,32 @@ impl<'a> MutableSelectionsCollection<'a> { self.select(selections); } - pub fn select(&mut self, mut selections: Vec>) + pub fn select(&mut self, selections: Vec>) where - T: ToOffset + ToPoint + Ord + std::marker::Copy + std::fmt::Debug, + T: ToOffset + std::marker::Copy + std::fmt::Debug, { let buffer = self.buffer.read(self.cx).snapshot(self.cx); + let mut selections = selections + .into_iter() + .map(|selection| selection.map(|it| it.to_offset(&buffer))) + .map(|mut selection| { + if selection.start > selection.end { + mem::swap(&mut selection.start, &mut selection.end); + selection.reversed = true + } + selection + }) + .collect::>(); selections.sort_unstable_by_key(|s| s.start); // Merge overlapping selections. let mut i = 1; while i < selections.len() { - if selections[i - 1].end >= selections[i].start { + if selections[i].start <= selections[i - 1].end { let removed = selections.remove(i); if removed.start < selections[i - 1].start { selections[i - 1].start = removed.start; } - if removed.end > selections[i - 1].end { + if selections[i - 1].end < removed.end { selections[i - 1].end = removed.end; } } else { @@ -754,7 +788,7 @@ impl<'a> MutableSelectionsCollection<'a> { ) { let mut changed = false; let display_map = self.display_map(); - let (_, selections) = self.collection.all_display(self.cx); + let selections = self.collection.all_display(&display_map); let selections = selections .into_iter() .map(|selection| { @@ -778,9 +812,10 @@ impl<'a> MutableSelectionsCollection<'a> { ) { let mut changed = false; let snapshot = self.buffer().clone(); + let display_map = self.display_map(); let selections = self .collection - .all::(self.cx) + .all::(&display_map) .into_iter() .map(|selection| { let mut moved_selection = selection.clone(); @@ -948,13 +983,10 @@ impl DerefMut for MutableSelectionsCollection<'_> { } } -fn selection_to_anchor_selection( - selection: Selection, +fn selection_to_anchor_selection( + selection: Selection, buffer: &MultiBufferSnapshot, -) -> Selection -where - T: ToOffset + Ord, -{ +) -> Selection { let end_bias = if selection.start == selection.end { Bias::Right } else { @@ -992,7 +1024,7 @@ fn resolve_selections_point<'a>( }) } -// Panics if passed selections are not in order +/// Panics if passed selections are not in order fn resolve_selections_display<'a>( selections: impl 'a + IntoIterator>, map: &'a DisplaySnapshot, @@ -1024,7 +1056,7 @@ fn resolve_selections_display<'a>( coalesce_selections(selections) } -// Panics if passed selections are not in order +/// Panics if passed selections are not in order pub(crate) fn resolve_selections<'a, D, I>( selections: I, map: &'a DisplaySnapshot, diff --git a/crates/editor/src/signature_help.rs b/crates/editor/src/signature_help.rs index 150044391a397cc2c35ffc8a85311c1470668ab1..8d74638e4c2aaf356ffabdeef717b9b105487ee3 100644 --- a/crates/editor/src/signature_help.rs +++ b/crates/editor/src/signature_help.rs @@ -82,7 +82,7 @@ impl Editor { if !(self.signature_help_state.is_shown() || self.auto_signature_help_enabled(cx)) { return false; } - let newest_selection = self.selections.newest::(cx); + let newest_selection = self.selections.newest::(&self.display_snapshot(cx)); let head = newest_selection.head(); if !newest_selection.is_empty() && head != newest_selection.tail() { @@ -396,13 +396,8 @@ impl SignatureHelpPopover { .shape(IconButtonShape::Square) .style(ButtonStyle::Subtle) .icon_size(IconSize::Small) - .tooltip(move |window, cx| { - ui::Tooltip::for_action( - "Previous Signature", - &crate::SignatureHelpPrevious, - window, - cx, - ) + .tooltip(move |_window, cx| { + ui::Tooltip::for_action("Previous Signature", &crate::SignatureHelpPrevious, cx) }) .on_click(cx.listener(|editor, _, window, cx| { editor.signature_help_prev(&crate::SignatureHelpPrevious, window, cx); @@ -412,8 +407,8 @@ impl SignatureHelpPopover { .shape(IconButtonShape::Square) .style(ButtonStyle::Subtle) .icon_size(IconSize::Small) - .tooltip(move |window, cx| { - ui::Tooltip::for_action("Next Signature", &crate::SignatureHelpNext, window, cx) + .tooltip(move |_window, cx| { + ui::Tooltip::for_action("Next Signature", &crate::SignatureHelpNext, cx) }) .on_click(cx.listener(|editor, _, window, cx| { editor.signature_help_next(&crate::SignatureHelpNext, window, cx); diff --git a/crates/editor/src/tasks.rs b/crates/editor/src/tasks.rs index d27e4564057ae9b0827ddec98bb3cfaeaf455211..e39880ddc1f575a7b12f40c5496c75c1f473c6e9 100644 --- a/crates/editor/src/tasks.rs +++ b/crates/editor/src/tasks.rs @@ -14,7 +14,7 @@ impl Editor { return Task::ready(None); }; let (selection, buffer, editor_snapshot) = { - let selection = self.selections.newest_adjusted(cx); + let selection = self.selections.newest_adjusted(&self.display_snapshot(cx)); let Some((buffer, _)) = self .buffer() .read(cx) diff --git a/crates/editor/src/test.rs b/crates/editor/src/test.rs index a3a8d81c64a709b65d8d7a894e338800cdeb71c5..9d1003e8c08b3d725ffa13b90eb0ee405520d8cd 100644 --- a/crates/editor/src/test.rs +++ b/crates/editor/src/test.rs @@ -108,7 +108,7 @@ pub fn assert_text_with_selections( assert_eq!(editor.text(cx), unmarked_text, "text doesn't match"); let actual = generate_marked_text( &editor.text(cx), - &editor.selections.ranges(cx), + &editor.selections.ranges(&editor.display_snapshot(cx)), marked_text.contains("«"), ); assert_eq!(actual, marked_text, "Selections don't match"); diff --git a/crates/editor/src/test/editor_lsp_test_context.rs b/crates/editor/src/test/editor_lsp_test_context.rs index 72060a11f07d297f578f933b0f6fd809dc915bb5..3132e2e6d5976754d0bdb7fea312fa152d4c35ac 100644 --- a/crates/editor/src/test/editor_lsp_test_context.rs +++ b/crates/editor/src/test/editor_lsp_test_context.rs @@ -6,6 +6,7 @@ use std::{ }; use anyhow::Result; +use language::rust_lang; use serde_json::json; use crate::{Editor, ToPoint}; @@ -18,7 +19,6 @@ use language::{ point_to_lsp, }; use lsp::{notification, request}; -use multi_buffer::ToPointUtf16; use project::Project; use smol::stream::StreamExt; use workspace::{AppState, Workspace, WorkspaceHandle}; @@ -32,55 +32,6 @@ pub struct EditorLspTestContext { pub buffer_lsp_url: lsp::Uri, } -pub(crate) fn rust_lang() -> Arc { - let language = Language::new( - LanguageConfig { - name: "Rust".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - line_comments: vec!["// ".into(), "/// ".into(), "//! ".into()], - ..Default::default() - }, - Some(tree_sitter_rust::LANGUAGE.into()), - ) - .with_queries(LanguageQueries { - indents: Some(Cow::from(indoc! {r#" - [ - ((where_clause) _ @end) - (field_expression) - (call_expression) - (assignment_expression) - (let_declaration) - (let_chain) - (await_expression) - ] @indent - - (_ "[" "]" @end) @indent - (_ "<" ">" @end) @indent - (_ "{" "}" @end) @indent - (_ "(" ")" @end) @indent"#})), - brackets: Some(Cow::from(indoc! {r#" - ("(" @open ")" @close) - ("[" @open "]" @close) - ("{" @open "}" @close) - ("<" @open ">" @close) - ("\"" @open "\"" @close) - (closure_parameters "|" @open "|" @close)"#})), - text_objects: Some(Cow::from(indoc! {r#" - (function_item - body: (_ - "{" - (_)* @function.inside - "}" )) @function.around - "#})), - ..Default::default() - }) - .expect("Could not parse queries"); - Arc::new(language) -} - #[cfg(test)] pub(crate) fn git_commit_lang() -> Arc { Arc::new(Language::new( diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index be59a1a16f80809784fa23330dc593dbe9a37459..c6779d1e564deb57233dd9e4719ca87f8d6a2da1 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -1,5 +1,5 @@ use crate::{ - AnchorRangeExt, DisplayPoint, Editor, MultiBuffer, RowExt, + AnchorRangeExt, DisplayPoint, Editor, ExcerptId, MultiBuffer, MultiBufferSnapshot, RowExt, display_map::{HighlightKey, ToDisplayPoint}, }; use buffer_diff::DiffHunkStatusKind; @@ -24,6 +24,7 @@ use std::{ atomic::{AtomicUsize, Ordering}, }, }; +use text::Selection; use util::{ assert_set_eq, test::{generate_marked_text, marked_text_ranges}, @@ -264,7 +265,10 @@ impl EditorTestContext { pub fn pixel_position_for(&mut self, display_point: DisplayPoint) -> Point { self.update_editor(|editor, window, cx| { - let newest_point = editor.selections.newest_display(cx).head(); + let newest_point = editor + .selections + .newest_display(&editor.display_snapshot(cx)) + .head(); let pixel_position = editor.pixel_position_of_newest_cursor.unwrap(); let line_height = editor .style() @@ -388,6 +392,23 @@ impl EditorTestContext { #[track_caller] pub fn assert_excerpts_with_selections(&mut self, marked_text: &str) { + let actual_text = self.to_format_multibuffer_as_marked_text(); + let fmt_additional_notes = || { + struct Format<'a, T: std::fmt::Display>(&'a str, &'a T); + + impl std::fmt::Display for Format<'_, T> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "\n\n----- EXPECTED: -----\n\n{}\n\n----- ACTUAL: -----\n\n{}\n\n", + self.0, self.1 + ) + } + } + + Format(marked_text, &actual_text) + }; + let expected_excerpts = marked_text .strip_prefix("[EXCERPT]\n") .unwrap() @@ -408,9 +429,10 @@ impl EditorTestContext { assert!( excerpts.len() == expected_excerpts.len(), - "should have {} excerpts, got {}", + "should have {} excerpts, got {}{}", expected_excerpts.len(), - excerpts.len() + excerpts.len(), + fmt_additional_notes(), ); for (ix, (excerpt_id, snapshot, range)) in excerpts.into_iter().enumerate() { @@ -424,18 +446,25 @@ impl EditorTestContext { if !expected_selections.is_empty() { assert!( is_selected, - "excerpt {ix} should be selected. got {:?}", + "excerpt {ix} should contain selections. got {:?}{}", self.editor_state(), + fmt_additional_notes(), ); } else { assert!( !is_selected, - "excerpt {ix} should not be selected, got: {selections:?}", + "excerpt {ix} should not contain selections, got: {selections:?}{}", + fmt_additional_notes(), ); } continue; } - assert!(!is_folded, "excerpt {} should not be folded", ix); + assert!( + !is_folded, + "excerpt {} should not be folded{}", + ix, + fmt_additional_notes() + ); assert_eq!( multibuffer_snapshot .text_for_range(Anchor::range_in_buffer( @@ -444,7 +473,9 @@ impl EditorTestContext { range.context.clone() )) .collect::(), - expected_text + expected_text, + "{}", + fmt_additional_notes(), ); let selections = selections @@ -460,13 +491,38 @@ impl EditorTestContext { .collect::>(); // todo: selections that cross excerpt boundaries.. assert_eq!( - selections, expected_selections, - "excerpt {} has incorrect selections", + selections, + expected_selections, + "excerpt {} has incorrect selections{}", ix, + fmt_additional_notes() ); } } + fn to_format_multibuffer_as_marked_text(&mut self) -> FormatMultiBufferAsMarkedText { + let (multibuffer_snapshot, selections, excerpts) = self.update_editor(|editor, _, cx| { + let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx); + + let selections = editor.selections.disjoint_anchors_arc().to_vec(); + let excerpts = multibuffer_snapshot + .excerpts() + .map(|(e_id, snapshot, range)| { + let is_folded = editor.is_buffer_folded(snapshot.remote_id(), cx); + (e_id, snapshot.clone(), range, is_folded) + }) + .collect::>(); + + (multibuffer_snapshot, selections, excerpts) + }); + + FormatMultiBufferAsMarkedText { + multibuffer_snapshot, + selections, + excerpts, + } + } + /// Make an assertion about the editor's text and the ranges and directions /// of its selections using a string containing embedded range markers. /// @@ -537,7 +593,7 @@ impl EditorTestContext { fn editor_selections(&mut self) -> Vec> { self.editor .update(&mut self.cx, |editor, cx| { - editor.selections.all::(cx) + editor.selections.all::(&editor.display_snapshot(cx)) }) .into_iter() .map(|s| { @@ -571,6 +627,63 @@ impl EditorTestContext { } } +struct FormatMultiBufferAsMarkedText { + multibuffer_snapshot: MultiBufferSnapshot, + selections: Vec>, + excerpts: Vec<(ExcerptId, BufferSnapshot, ExcerptRange, bool)>, +} + +impl std::fmt::Display for FormatMultiBufferAsMarkedText { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let Self { + multibuffer_snapshot, + selections, + excerpts, + } = self; + + for (excerpt_id, snapshot, range, is_folded) in excerpts.into_iter() { + write!(f, "[EXCERPT]\n")?; + if *is_folded { + write!(f, "[FOLDED]\n")?; + } + + let mut text = multibuffer_snapshot + .text_for_range(Anchor::range_in_buffer( + *excerpt_id, + snapshot.remote_id(), + range.context.clone(), + )) + .collect::(); + + let selections = selections + .iter() + .filter(|&s| s.head().excerpt_id == *excerpt_id) + .map(|s| { + let head = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot) + - text::ToOffset::to_offset(&range.context.start, &snapshot); + let tail = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot) + - text::ToOffset::to_offset(&range.context.start, &snapshot); + tail..head + }) + .rev() + .collect::>(); + + for selection in selections { + if selection.is_empty() { + text.insert(selection.start, 'ˇ'); + continue; + } + text.insert(selection.end, '»'); + text.insert(selection.start, '«'); + } + + write!(f, "{text}")?; + } + + Ok(()) + } +} + #[track_caller] pub fn assert_state_with_diff( editor: &Entity, @@ -578,9 +691,12 @@ pub fn assert_state_with_diff( expected_diff_text: &str, ) { let (snapshot, selections) = editor.update_in(cx, |editor, window, cx| { + let snapshot = editor.snapshot(window, cx); ( - editor.snapshot(window, cx).buffer_snapshot().clone(), - editor.selections.ranges::(cx), + snapshot.buffer_snapshot().clone(), + editor + .selections + .ranges::(&snapshot.display_snapshot), ) }); diff --git a/crates/eval/Cargo.toml b/crates/eval/Cargo.toml index a0214c76a1c7230e071cbc65c1eadbc44c7d6ca8..30908be1e2fde15c0c32894b266d971b7f0ca54f 100644 --- a/crates/eval/Cargo.toml +++ b/crates/eval/Cargo.toml @@ -18,18 +18,17 @@ name = "explorer" path = "src/explorer.rs" [dependencies] -agent.workspace = true +acp_thread.workspace = true +agent = { workspace = true, features = ["eval"] } +agent-client-protocol.workspace = true agent_settings.workspace = true agent_ui.workspace = true anyhow.workspace = true -assistant_tool.workspace = true -assistant_tools.workspace = true async-trait.workspace = true buffer_diff.workspace = true chrono.workspace = true clap.workspace = true client.workspace = true -cloud_llm_client.workspace = true collections.workspace = true debug_adapter_extension.workspace = true dirs.workspace = true @@ -54,13 +53,13 @@ pretty_assertions.workspace = true project.workspace = true prompt_store.workspace = true regex.workspace = true +rand.workspace = true release_channel.workspace = true reqwest_client.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true shellexpand.workspace = true -smol.workspace = true telemetry.workspace = true terminal_view.workspace = true toml.workspace = true @@ -68,4 +67,3 @@ unindent.workspace = true util.workspace = true uuid.workspace = true watch.workspace = true -workspace-hack.workspace = true diff --git a/crates/eval/runner_settings.json b/crates/eval/runner_settings.json index 91f193d7b3359bdc9ca5a2255f0fb51c4484f344..ea2ccb051164c4a6c40aed9d6607db0a8911c5d6 100644 --- a/crates/eval/runner_settings.json +++ b/crates/eval/runner_settings.json @@ -1,7 +1,5 @@ { - "assistant": { - "always_allow_tool_actions": true, - "stream_edits": true, - "version": "2" + "agent": { + "always_allow_tool_actions": true } } diff --git a/crates/eval/src/eval.rs b/crates/eval/src/eval.rs index 40d8c14f4f7ddc441f31581951ee4d6c26376a04..c5b34a63eec33a45e6d1c75e73fa473f845c5e36 100644 --- a/crates/eval/src/eval.rs +++ b/crates/eval/src/eval.rs @@ -61,9 +61,22 @@ struct Args { /// Maximum number of examples to run concurrently. #[arg(long, default_value = "4")] concurrency: usize, + /// Output current environment variables as JSON to stdout + #[arg(long, hide = true)] + printenv: bool, } fn main() { + let args = Args::parse(); + + // This prevents errors showing up in the logs, because + // project::environment::load_shell_environment() calls + // std::env::current_exe().unwrap() --printenv + if args.printenv { + util::shell_env::print_env(); + return; + } + dotenvy::from_filename(CARGO_MANIFEST_DIR.join(".env")).ok(); env_logger::init(); @@ -99,7 +112,6 @@ fn main() { let zed_commit_sha = commit_sha_for_path(&root_dir); let zed_branch_name = git_branch_for_path(&root_dir); - let args = Args::parse(); let languages: HashSet = args.languages.into_iter().collect(); let http_client = Arc::new(ReqwestClient::new()); @@ -126,19 +138,20 @@ fn main() { let mut cumulative_tool_metrics = ToolMetrics::default(); - let agent_model = load_model(&args.model, cx).unwrap(); - let judge_model = load_model(&args.judge_model, cx).unwrap(); - - LanguageModelRegistry::global(cx).update(cx, |registry, cx| { - registry.set_default_model(Some(agent_model.clone()), cx); + let tasks = LanguageModelRegistry::global(cx).update(cx, |registry, cx| { + registry.providers().iter().map(|p| p.authenticate(cx)).collect::>() }); - let auth1 = agent_model.provider.authenticate(cx); - let auth2 = judge_model.provider.authenticate(cx); - cx.spawn(async move |cx| { - auth1.await?; - auth2.await?; + future::join_all(tasks).await; + let judge_model = cx.update(|cx| { + let agent_model = load_model(&args.model, cx).unwrap(); + let judge_model = load_model(&args.judge_model, cx).unwrap(); + LanguageModelRegistry::global(cx).update(cx, |registry, cx| { + registry.set_default_model(Some(agent_model.clone()), cx); + }); + judge_model + })?; let mut examples = Vec::new(); @@ -268,7 +281,6 @@ fn main() { future::join_all((0..args.concurrency).map(|_| { let app_state = app_state.clone(); - let model = agent_model.model.clone(); let judge_model = judge_model.model.clone(); let zed_commit_sha = zed_commit_sha.clone(); let zed_branch_name = zed_branch_name.clone(); @@ -283,7 +295,7 @@ fn main() { let result = async { example.setup().await?; let run_output = cx - .update(|cx| example.run(model.clone(), app_state.clone(), cx))? + .update(|cx| example.run(app_state.clone(), cx))? .await?; let judge_output = judge_example( example.clone(), @@ -429,7 +441,6 @@ pub fn init(cx: &mut App) -> Arc { true, cx, ); - assistant_tools::init(client.http_client(), cx); SettingsStore::update_global(cx, |store, cx| { store.set_user_settings(include_str!("../runner_settings.json"), cx) @@ -525,7 +536,6 @@ async fn judge_example( diff_evaluation = judge_output.diff.clone(), thread_evaluation = judge_output.thread, tool_metrics = run_output.tool_metrics, - response_count = run_output.response_count, token_usage = run_output.token_usage, model = model.telemetry_id(), model_provider = model.provider_id().to_string(), diff --git a/crates/eval/src/example.rs b/crates/eval/src/example.rs index c0f0900a6cfa5dd942bd27eed852ee4a52896c2c..84c47766e96948bccfc01f3b4472b5100c4b7b64 100644 --- a/crates/eval/src/example.rs +++ b/crates/eval/src/example.rs @@ -3,22 +3,24 @@ use std::{ fmt::{self, Debug}, sync::{Arc, Mutex}, time::Duration, + u32, }; use crate::{ ToolMetrics, assertions::{AssertionsReport, RanAssertion, RanAssertionResult}, }; -use agent::{ContextLoadResult, Thread, ThreadEvent}; +use acp_thread::UserMessageId; +use agent::{Thread, ThreadEvent, UserMessageContent}; +use agent_client_protocol as acp; use agent_settings::AgentProfileId; use anyhow::{Result, anyhow}; use async_trait::async_trait; use buffer_diff::DiffHunkStatus; -use cloud_llm_client::CompletionIntent; use collections::HashMap; -use futures::{FutureExt as _, StreamExt, channel::mpsc, select_biased}; +use futures::{FutureExt as _, StreamExt, select_biased}; use gpui::{App, AppContext, AsyncApp, Entity}; -use language_model::{LanguageModel, Role, StopReason}; +use language_model::Role; use util::rel_path::RelPath; pub const THREAD_EVENT_TIMEOUT: Duration = Duration::from_secs(60 * 2); @@ -91,7 +93,6 @@ pub struct ExampleContext { log_prefix: String, agent_thread: Entity, app: AsyncApp, - model: Arc, pub assertions: AssertionsReport, pub tool_metrics: Arc>, } @@ -101,7 +102,6 @@ impl ExampleContext { meta: ExampleMetadata, log_prefix: String, agent_thread: Entity, - model: Arc, app: AsyncApp, ) -> Self { let assertions = AssertionsReport::new(meta.max_assertions); @@ -111,26 +111,11 @@ impl ExampleContext { log_prefix, agent_thread, assertions, - model, app, tool_metrics: Arc::new(Mutex::new(ToolMetrics::default())), } } - pub fn push_user_message(&mut self, text: impl ToString) { - self.app - .update_entity(&self.agent_thread, |thread, cx| { - thread.insert_user_message( - text.to_string(), - ContextLoadResult::default(), - None, - Vec::new(), - cx, - ); - }) - .unwrap(); - } - pub fn assert(&mut self, expected: bool, message: impl ToString) -> Result<()> { let message = message.to_string(); self.log_assertion( @@ -202,156 +187,174 @@ impl ExampleContext { result } - pub async fn run_to_end(&mut self) -> Result { - self.run_turns(u32::MAX).await + pub async fn prompt(&mut self, prompt: impl Into) -> Result { + self.prompt_with_max_turns(prompt, u32::MAX).await } - pub async fn run_turn(&mut self) -> Result { - self.run_turns(1).await + pub async fn prompt_with_max_turns( + &mut self, + prompt: impl Into, + max_turns: u32, + ) -> Result { + let content = vec![UserMessageContent::Text(prompt.into())]; + self.run_turns(Some(content), max_turns).await } - pub async fn run_turns(&mut self, iterations: u32) -> Result { - let (mut tx, mut rx) = mpsc::channel(1); + pub async fn proceed_with_max_turns(&mut self, max_turns: u32) -> Result { + self.run_turns(None, max_turns).await + } + async fn run_turns( + &mut self, + prompt: Option>, + max_turns: u32, + ) -> Result { let tool_metrics = self.tool_metrics.clone(); let log_prefix = self.log_prefix.clone(); - let _subscription = self.app.subscribe( - &self.agent_thread, - move |thread, event: &ThreadEvent, cx| match event { - ThreadEvent::ShowError(thread_error) => { - tx.try_send(Err(anyhow!(thread_error.clone()))).ok(); - } - ThreadEvent::Stopped(reason) => match reason { - Ok(StopReason::EndTurn) => { - tx.close_channel(); + + let mut remaining_turns = max_turns; + + let mut event_stream = self.agent_thread.update(&mut self.app, |thread, cx| { + if let Some(prompt) = prompt { + let id = UserMessageId::new(); + thread.send(id, prompt, cx) + } else { + thread.proceed(cx) + } + })??; + + let task = self.app.background_spawn(async move { + let mut messages = Vec::new(); + let mut tool_uses_by_id = HashMap::default(); + while let Some(event) = event_stream.next().await { + match event? { + ThreadEvent::UserMessage(user_message) => { + messages.push(Message { + role: Role::User, + text: user_message.to_markdown(), + tool_use: Vec::new(), + }); } - Ok(StopReason::ToolUse) => { - if thread.read(cx).remaining_turns() == 0 { - tx.close_channel(); + ThreadEvent::AgentThinking(text) | ThreadEvent::AgentText(text) => { + if matches!( + messages.last(), + Some(Message { + role: Role::Assistant, + .. + }) + ) { + messages.last_mut().unwrap().text.push_str(&text); + } else { + messages.push(Message { + role: Role::Assistant, + text, + tool_use: Vec::new(), + }); } } - Ok(StopReason::MaxTokens) => { - tx.try_send(Err(anyhow!("Exceeded maximum tokens"))).ok(); - } - Ok(StopReason::Refusal) => { - tx.try_send(Err(anyhow!("Model refused to generate content"))) - .ok(); - } - Err(err) => { - tx.try_send(Err(anyhow!(err.clone()))).ok(); + ThreadEvent::ToolCall(tool_call) => { + let meta = tool_call.meta.expect("Missing meta field in tool_call"); + let tool_name = meta + .get("tool_name") + .expect("Missing tool_name field in meta") + .as_str() + .expect("Unknown tool_name content in meta"); + + tool_uses_by_id.insert( + tool_call.id, + ToolUse { + name: tool_name.to_string(), + value: tool_call.raw_input.unwrap_or_default(), + }, + ); + if matches!( + tool_call.status, + acp::ToolCallStatus::Completed | acp::ToolCallStatus::Failed + ) { + panic!("Tool call completed without update"); + } } - }, - ThreadEvent::NewRequest - | ThreadEvent::StreamedAssistantText(_, _) - | ThreadEvent::StreamedAssistantThinking(_, _) - | ThreadEvent::UsePendingTools { .. } - | ThreadEvent::CompletionCanceled => {} - ThreadEvent::ToolUseLimitReached => {} - ThreadEvent::ToolFinished { - tool_use_id, - pending_tool_use, - .. - } => { - thread.update(cx, |thread, _cx| { - if let Some(tool_use) = pending_tool_use { - let mut tool_metrics = tool_metrics.lock().unwrap(); - if let Some(tool_result) = thread.tool_result(tool_use_id) { - let message = if tool_result.is_error { - format!("✖︎ {}", tool_use.name) - } else { + ThreadEvent::ToolCallUpdate(tool_call_update) => { + if let acp_thread::ToolCallUpdate::UpdateFields(update) = tool_call_update { + if let Some(raw_input) = update.fields.raw_input { + if let Some(tool_use) = tool_uses_by_id.get_mut(&update.id) { + tool_use.value = raw_input; + } + } + + if matches!( + update.fields.status, + Some(acp::ToolCallStatus::Completed | acp::ToolCallStatus::Failed) + ) { + let succeeded = + update.fields.status == Some(acp::ToolCallStatus::Completed); + + let tool_use = tool_uses_by_id + .remove(&update.id) + .expect("Unrecognized tool call completed"); + + let log_message = if succeeded { format!("✔︎ {}", tool_use.name) + } else { + format!("✖︎ {}", tool_use.name) }; - println!("{log_prefix}{message}"); + println!("{log_prefix}{log_message}"); + tool_metrics - .insert(tool_result.tool_name.clone(), !tool_result.is_error); - } else { - let message = - format!("TOOL FINISHED WITHOUT RESULT: {}", tool_use.name); - println!("{log_prefix}{message}"); - tool_metrics.insert(tool_use.name.clone(), true); + .lock() + .unwrap() + .insert(tool_use.name.clone().into(), succeeded); + + if let Some(message) = messages.last_mut() { + message.tool_use.push(tool_use); + } else { + messages.push(Message { + role: Role::Assistant, + text: "".to_string(), + tool_use: vec![tool_use], + }); + } + + remaining_turns -= 1; + if remaining_turns == 0 { + return Ok(messages); + } } } - }); - } - ThreadEvent::InvalidToolInput { .. } => { - println!("{log_prefix} invalid tool input"); - } - ThreadEvent::MissingToolUse { - tool_use_id: _, - ui_text, - } => { - println!("{log_prefix} {ui_text}"); - } - ThreadEvent::ToolConfirmationNeeded => { - panic!( + } + ThreadEvent::ToolCallAuthorization(_) => panic!( "{}Bug: Tool confirmation should not be required in eval", log_prefix - ); - } - ThreadEvent::StreamedCompletion - | ThreadEvent::MessageAdded(_) - | ThreadEvent::MessageEdited(_) - | ThreadEvent::MessageDeleted(_) - | ThreadEvent::SummaryChanged - | ThreadEvent::SummaryGenerated - | ThreadEvent::ProfileChanged - | ThreadEvent::ReceivedTextChunk - | ThreadEvent::StreamedToolUse { .. } - | ThreadEvent::CheckpointChanged - | ThreadEvent::CancelEditing => { - tx.try_send(Ok(())).ok(); - if std::env::var("ZED_EVAL_DEBUG").is_ok() { - println!("{}Event: {:#?}", log_prefix, event); - } - } - }, - ); - - let model = self.model.clone(); - - let message_count_before = self.app.update_entity(&self.agent_thread, |thread, cx| { - thread.set_remaining_turns(iterations); - thread.send_to_model(model, CompletionIntent::UserPrompt, None, cx); - thread.messages().len() - })?; - - loop { - select_biased! { - result = rx.next() => { - if let Some(result) = result { - result?; - } else { - break; + ), + ThreadEvent::Retry(status) => { + println!("{log_prefix} Got retry: {status:?}"); } - } - _ = self.app.background_executor().timer(THREAD_EVENT_TIMEOUT).fuse() => { - anyhow::bail!("Agentic loop stalled - waited {THREAD_EVENT_TIMEOUT:?} without any events"); + ThreadEvent::Stop(stop_reason) => match stop_reason { + acp::StopReason::EndTurn => {} + acp::StopReason::MaxTokens => { + return Err(anyhow!("Exceeded maximum tokens")); + } + acp::StopReason::MaxTurnRequests => { + return Err(anyhow!("Exceeded maximum turn requests")); + } + acp::StopReason::Refusal => { + return Err(anyhow!("Refusal")); + } + acp::StopReason::Cancelled => return Err(anyhow!("Cancelled")), + }, } } - } + Ok(messages) + }); - let messages = self.app.read_entity(&self.agent_thread, |thread, cx| { - let mut messages = Vec::new(); - for message in thread.messages().skip(message_count_before) { - messages.push(Message { - _role: message.role, - text: message.to_message_content(), - tool_use: thread - .tool_uses_for_message(message.id, cx) - .into_iter() - .map(|tool_use| ToolUse { - name: tool_use.name.to_string(), - value: tool_use.input, - }) - .collect(), - }); + select_biased! { + result = task.fuse() => { + Ok(Response::new(result?)) } - messages - })?; - - let response = Response::new(messages); - - Ok(response) + _ = self.app.background_executor().timer(THREAD_EVENT_TIMEOUT).fuse() => { + anyhow::bail!("Agentic loop stalled - waited {THREAD_EVENT_TIMEOUT:?} without any events"); + } + } } pub fn edits(&self) -> HashMap, FileEdits> { @@ -486,7 +489,7 @@ impl Response { Self { messages } } - pub fn expect_tool( + pub fn expect_tool_call( &self, tool_name: &'static str, cx: &mut ExampleContext, @@ -503,8 +506,7 @@ impl Response { }) } - #[allow(dead_code)] - pub fn tool_uses(&self) -> impl Iterator { + pub fn tool_calls(&self) -> impl Iterator { self.messages.iter().flat_map(|msg| &msg.tool_use) } @@ -515,7 +517,7 @@ impl Response { #[derive(Debug)] pub struct Message { - _role: Role, + role: Role, text: String, tool_use: Vec, } diff --git a/crates/eval/src/examples/add_arg_to_trait_method.rs b/crates/eval/src/examples/add_arg_to_trait_method.rs index 41fa7c3dc6361c25868e2bbe73b71010b5d07d80..1692932b3304e07ebce261afb75877400e0493f4 100644 --- a/crates/eval/src/examples/add_arg_to_trait_method.rs +++ b/crates/eval/src/examples/add_arg_to_trait_method.rs @@ -27,14 +27,12 @@ impl Example for AddArgToTraitMethod { async fn conversation(&self, cx: &mut ExampleContext) -> Result<()> { const FILENAME: &str = "assistant_tool.rs"; - cx.push_user_message(format!( + let _ = cx.prompt(format!( r#" Add a `window: Option` argument to the `Tool::run` trait method in {FILENAME}, and update all the implementations of the trait and call sites accordingly. "# - )); - - let _ = cx.run_to_end().await?; + )).await?; // Adds ignored argument to all but `batch_tool` diff --git a/crates/eval/src/examples/code_block_citations.rs b/crates/eval/src/examples/code_block_citations.rs index 8150d68ac3e54772e35fe52f086fb942d8923ffb..c8ba75e99f019b0b0609743b10573bae712f82cd 100644 --- a/crates/eval/src/examples/code_block_citations.rs +++ b/crates/eval/src/examples/code_block_citations.rs @@ -29,16 +29,19 @@ impl Example for CodeBlockCitations { async fn conversation(&self, cx: &mut ExampleContext) -> Result<()> { const FILENAME: &str = "assistant_tool.rs"; - cx.push_user_message(format!( - r#" - Show me the method bodies of all the methods of the `Tool` trait in {FILENAME}. - - Please show each method in a separate code snippet. - "# - )); // Verify that the messages all have the correct formatting. - let texts: Vec = cx.run_to_end().await?.texts().collect(); + let texts: Vec = cx + .prompt(format!( + r#" + Show me the method bodies of all the methods of the `Tool` trait in {FILENAME}. + + Please show each method in a separate code snippet. + "# + )) + .await? + .texts() + .collect(); let closing_fence = format!("\n{FENCE}"); for text in texts.iter() { diff --git a/crates/eval/src/examples/comment_translation.rs b/crates/eval/src/examples/comment_translation.rs index b6c9f7376f05fdc38e9f8128c78eb1761bc59c37..421999893a5a39b3d6f61c22d405bf90528758e7 100644 --- a/crates/eval/src/examples/comment_translation.rs +++ b/crates/eval/src/examples/comment_translation.rs @@ -1,7 +1,7 @@ use crate::example::{Example, ExampleContext, ExampleMetadata, JudgeAssertion}; +use agent::{EditFileMode, EditFileToolInput}; use agent_settings::AgentProfileId; use anyhow::Result; -use assistant_tools::{EditFileMode, EditFileToolInput}; use async_trait::async_trait; pub struct CommentTranslation; @@ -22,30 +22,26 @@ impl Example for CommentTranslation { } async fn conversation(&self, cx: &mut ExampleContext) -> Result<()> { - cx.push_user_message(r#" - Edit the following files and translate all their comments to italian, in this exact order: + let response = cx.prompt( + r#" + Edit the following files and translate all their comments to italian, in this exact order: - - font-kit/src/family.rs - - font-kit/src/canvas.rs - - font-kit/src/error.rs - "#); - cx.run_to_end().await?; + - font-kit/src/family.rs + - font-kit/src/canvas.rs + - font-kit/src/error.rs + "# + ).await?; let mut create_or_overwrite_count = 0; - cx.agent_thread().read_with(cx, |thread, cx| { - for message in thread.messages() { - for tool_use in thread.tool_uses_for_message(message.id, cx) { - if tool_use.name == "edit_file" { - let input: EditFileToolInput = serde_json::from_value(tool_use.input)?; - if !matches!(input.mode, EditFileMode::Edit) { - create_or_overwrite_count += 1; - } - } + for tool_call in response.tool_calls() { + if tool_call.name == "edit_file" { + let input = tool_call.parse_input::()?; + if !matches!(input.mode, EditFileMode::Edit) { + create_or_overwrite_count += 1; } } + } - anyhow::Ok(()) - })??; cx.assert_eq(create_or_overwrite_count, 0, "no_creation_or_overwrite")?; Ok(()) diff --git a/crates/eval/src/examples/file_change_notification.rs b/crates/eval/src/examples/file_change_notification.rs index 7879ad6f2ebb782bd4a5620f0fdf562c9aad1360..41ce10cd2240f2e81812a51b2ec581422c102c41 100644 --- a/crates/eval/src/examples/file_change_notification.rs +++ b/crates/eval/src/examples/file_change_notification.rs @@ -48,8 +48,8 @@ impl Example for FileChangeNotificationExample { })?; // Start conversation (specific message is not important) - cx.push_user_message("Find all files in this repo"); - cx.run_turn().await?; + cx.prompt_with_max_turns("Find all files in this repo", 1) + .await?; // Edit the README buffer - the model should get a notification on next turn buffer.update(cx, |buffer, cx| { @@ -58,7 +58,7 @@ impl Example for FileChangeNotificationExample { // Run for some more turns. // The model shouldn't thank us for letting it know about the file change. - cx.run_turns(3).await?; + cx.proceed_with_max_turns(3).await?; Ok(()) } diff --git a/crates/eval/src/examples/file_search.rs b/crates/eval/src/examples/file_search.rs index f1a482a41a952e889b6053e90e9e243ed546d2db..7de7a07d19184b473fd2cb5ba29b270431b71a4c 100644 --- a/crates/eval/src/examples/file_search.rs +++ b/crates/eval/src/examples/file_search.rs @@ -1,6 +1,6 @@ +use agent::FindPathToolInput; use agent_settings::AgentProfileId; use anyhow::Result; -use assistant_tools::FindPathToolInput; use async_trait::async_trait; use regex::Regex; @@ -25,18 +25,19 @@ impl Example for FileSearchExample { async fn conversation(&self, cx: &mut ExampleContext) -> Result<()> { const FILENAME: &str = "find_replace_file_tool.rs"; - cx.push_user_message(format!( - r#" + + let prompt = format!( + r#" Look at the `{FILENAME}`. I want to implement a card for it. The card should implement the `Render` trait. The card should show a diff. It should be a beautifully presented diff. The card "box" should look like what we show for markdown codeblocks (look at `MarkdownElement`). I want to see a red background for lines that were deleted and a green background for lines that were added. We should have a div per diff line. "# - )); + ); - let response = cx.run_turn().await?; - let tool_use = response.expect_tool("find_path", cx)?; + let response = cx.prompt_with_max_turns(prompt, 1).await?; + let tool_use = response.expect_tool_call("find_path", cx)?; let input = tool_use.parse_input::()?; let glob = input.glob; diff --git a/crates/eval/src/examples/grep_params_escapement.rs b/crates/eval/src/examples/grep_params_escapement.rs index 0532698ba28b45bd8111767eb51ea1336e18fa13..57086a1b9bd217e04072754539ddea20aa38c7a8 100644 --- a/crates/eval/src/examples/grep_params_escapement.rs +++ b/crates/eval/src/examples/grep_params_escapement.rs @@ -1,6 +1,6 @@ +use agent::GrepToolInput; use agent_settings::AgentProfileId; use anyhow::Result; -use assistant_tools::GrepToolInput; use async_trait::async_trait; use crate::example::{Example, ExampleContext, ExampleMetadata}; @@ -36,9 +36,9 @@ impl Example for GrepParamsEscapementExample { } async fn conversation(&self, cx: &mut ExampleContext) -> Result<()> { - // cx.push_user_message("How does the precedence/specificity work with Keymap contexts? I am seeing that `MessageEditor > Editor` is lower precendence than `Editor` which is surprising to me, but might be how it works"); - cx.push_user_message("Search for files containing the characters `>` or `<`"); - let response = cx.run_turns(2).await?; + let response = cx + .prompt_with_max_turns("Search for files containing the characters `>` or `<`", 2) + .await?; let grep_input = response .find_tool_call("grep") .and_then(|tool_use| tool_use.parse_input::().ok()); diff --git a/crates/eval/src/examples/mod.rs b/crates/eval/src/examples/mod.rs index afe258aa76b1abb5406ce212af4f223c56cb2020..aec1bce07957fb81c17666b3e64b00a1fa47240f 100644 --- a/crates/eval/src/examples/mod.rs +++ b/crates/eval/src/examples/mod.rs @@ -144,9 +144,8 @@ impl Example for DeclarativeExample { } async fn conversation(&self, cx: &mut ExampleContext) -> Result<()> { - cx.push_user_message(&self.prompt); let max_turns = self.metadata.max_turns.unwrap_or(1000); - let _ = cx.run_turns(max_turns).await; + let _ = cx.prompt_with_max_turns(&self.prompt, max_turns).await; Ok(()) } diff --git a/crates/eval/src/examples/overwrite_file.rs b/crates/eval/src/examples/overwrite_file.rs index df0b75294c31bf7ff365e96aea18c371b817e710..a4df1e97a3f4d9c66262f8679d93324e53df9d53 100644 --- a/crates/eval/src/examples/overwrite_file.rs +++ b/crates/eval/src/examples/overwrite_file.rs @@ -1,6 +1,6 @@ +use agent::{EditFileMode, EditFileToolInput}; use agent_settings::AgentProfileId; use anyhow::Result; -use assistant_tools::{EditFileMode, EditFileToolInput}; use async_trait::async_trait; use crate::example::{Example, ExampleContext, ExampleMetadata}; @@ -36,17 +36,14 @@ impl Example for FileOverwriteExample { } async fn conversation(&self, cx: &mut ExampleContext) -> Result<()> { - let response = cx.run_turns(1).await?; - let file_overwritten = if let Some(tool_use) = response.find_tool_call("edit_file") { - let input = tool_use.parse_input::()?; - match input.mode { - EditFileMode::Edit => false, - EditFileMode::Create | EditFileMode::Overwrite => { - input.path.ends_with("src/language_model_selector.rs") - } + let response = cx.proceed_with_max_turns(1).await?; + let tool_use = response.expect_tool_call("edit_file", cx)?; + let input = tool_use.parse_input::()?; + let file_overwritten = match input.mode { + EditFileMode::Edit => false, + EditFileMode::Create | EditFileMode::Overwrite => { + input.path.ends_with("src/language_model_selector.rs") } - } else { - false }; cx.assert(!file_overwritten, "File should be edited, not overwritten") diff --git a/crates/eval/src/examples/planets.rs b/crates/eval/src/examples/planets.rs index f3a69332d2c544479ca4f367699dc3def4d83370..6b6ca0e3fe75633c49f11f24a24835dc58886a01 100644 --- a/crates/eval/src/examples/planets.rs +++ b/crates/eval/src/examples/planets.rs @@ -1,7 +1,6 @@ +use agent::{AgentTool, OpenTool, TerminalTool}; use agent_settings::AgentProfileId; use anyhow::Result; -use assistant_tool::Tool; -use assistant_tools::{OpenTool, TerminalTool}; use async_trait::async_trait; use crate::example::{Example, ExampleContext, ExampleMetadata, JudgeAssertion}; @@ -24,23 +23,22 @@ impl Example for Planets { } async fn conversation(&self, cx: &mut ExampleContext) -> Result<()> { - cx.push_user_message( - r#" + let response = cx + .prompt( + r#" Make a plain JavaScript web page which renders an animated 3D solar system. Let me drag to rotate the camera around. Do not use npm. - "# - .to_string(), - ); - - let response = cx.run_to_end().await?; + "#, + ) + .await?; let mut open_tool_uses = 0; let mut terminal_tool_uses = 0; - for tool_use in response.tool_uses() { - if tool_use.name == OpenTool.name() { + for tool_use in response.tool_calls() { + if tool_use.name == OpenTool::name() { open_tool_uses += 1; - } else if tool_use.name == TerminalTool::NAME { + } else if tool_use.name == TerminalTool::name() { terminal_tool_uses += 1; } } diff --git a/crates/eval/src/examples/threads/overwrite-file.json b/crates/eval/src/examples/threads/overwrite-file.json index ffef258193d7b738f2489a8e047cafd76e2dbd05..392ccde5b8e064bdb9d4a124f38e7a99ca6561f3 100644 --- a/crates/eval/src/examples/threads/overwrite-file.json +++ b/crates/eval/src/examples/threads/overwrite-file.json @@ -116,7 +116,7 @@ ], "tool_results": [ { - "content": "[package]\nname = \"language_model_selector\"\nversion = \"0.1.0\"\nedition.workspace = true\npublish.workspace = true\nlicense = \"GPL-3.0-or-later\"\n\n[lints]\nworkspace = true\n\n[lib]\npath = \"src/language_model_selector.rs\"\n\n[dependencies]\ncollections.workspace = true\nfeature_flags.workspace = true\nfuzzy.workspace = true\ngpui.workspace = true\nlanguage_model.workspace = true\nlog.workspace = true\npicker.workspace = true\nproto.workspace = true\nui.workspace = true\nworkspace-hack.workspace = true\nzed_actions.workspace = true\n", + "content": "[package]\nname = \"language_model_selector\"\nversion = \"0.1.0\"\nedition.workspace = true\npublish.workspace = true\nlicense = \"GPL-3.0-or-later\"\n\n[lints]\nworkspace = true\n\n[lib]\npath = \"src/language_model_selector.rs\"\n\n[dependencies]\ncollections.workspace = true\nfeature_flags.workspace = true\nfuzzy.workspace = true\ngpui.workspace = true\nlanguage_model.workspace = true\nlog.workspace = true\npicker.workspace = true\nproto.workspace = true\nui.workspace = true\n\nzed_actions.workspace = true\n", "is_error": false, "output": null, "tool_use_id": "toolu_019Je2MLfJhpJr93g5igoRAH" diff --git a/crates/eval/src/instance.rs b/crates/eval/src/instance.rs index 208147e2f04b26a7337c071d36f4f687ca0fe184..5317f100456748616dfec63819bc0373aaceb4c1 100644 --- a/crates/eval/src/instance.rs +++ b/crates/eval/src/instance.rs @@ -1,37 +1,38 @@ -use agent::{Message, MessageSegment, SerializedThread, ThreadStore}; +use agent::ContextServerRegistry; +use agent_client_protocol as acp; use anyhow::{Context as _, Result, anyhow, bail}; -use assistant_tool::ToolWorkingSet; use client::proto::LspWorkProgress; use futures::channel::mpsc; +use futures::future::Shared; use futures::{FutureExt as _, StreamExt as _, future}; use gpui::{App, AppContext as _, AsyncApp, Entity, Task}; use handlebars::Handlebars; use language::{Buffer, DiagnosticSeverity, OffsetRangeExt as _}; use language_model::{ - LanguageModel, LanguageModelCompletionEvent, LanguageModelRequest, LanguageModelRequestMessage, - LanguageModelToolResultContent, MessageContent, Role, TokenUsage, + LanguageModel, LanguageModelCompletionEvent, LanguageModelRegistry, LanguageModelRequest, + LanguageModelRequestMessage, LanguageModelToolResultContent, MessageContent, Role, TokenUsage, }; -use project::lsp_store::OpenLspBufferHandle; -use project::{DiagnosticSummary, Project, ProjectPath}; +use project::{DiagnosticSummary, Project, ProjectPath, lsp_store::OpenLspBufferHandle}; +use prompt_store::{ProjectContext, WorktreeContext}; +use rand::{distr, prelude::*}; use serde::{Deserialize, Serialize}; -use std::cell::RefCell; -use std::fmt::Write as _; -use std::fs; -use std::fs::File; -use std::io::Write as _; -use std::path::Path; -use std::path::PathBuf; -use std::rc::Rc; -use std::sync::Arc; -use std::time::Duration; +use std::{ + fmt::Write as _, + fs::{self, File}, + io::Write as _, + path::{Path, PathBuf}, + rc::Rc, + sync::{Arc, Mutex}, + time::Duration, +}; use unindent::Unindent as _; -use util::ResultExt as _; -use util::command::new_smol_command; -use util::markdown::MarkdownCodeBlock; +use util::{ResultExt as _, command::new_smol_command, markdown::MarkdownCodeBlock}; -use crate::assertions::{AssertionsReport, RanAssertion, RanAssertionResult}; -use crate::example::{Example, ExampleContext, FailedAssertion, JudgeAssertion}; -use crate::{AgentAppState, ToolMetrics}; +use crate::{ + AgentAppState, ToolMetrics, + assertions::{AssertionsReport, RanAssertion, RanAssertionResult}, + example::{Example, ExampleContext, FailedAssertion, JudgeAssertion}, +}; pub const ZED_REPO_URL: &str = "https://github.com/zed-industries/zed.git"; @@ -57,10 +58,9 @@ pub struct RunOutput { pub diagnostic_summary_after: DiagnosticSummary, pub diagnostics_before: Option, pub diagnostics_after: Option, - pub response_count: usize, pub token_usage: TokenUsage, pub tool_metrics: ToolMetrics, - pub all_messages: String, + pub thread_markdown: String, pub programmatic_assertions: AssertionsReport, } @@ -194,12 +194,7 @@ impl ExampleInstance { .join(self.thread.meta().repo_name()) } - pub fn run( - &self, - model: Arc, - app_state: Arc, - cx: &mut App, - ) -> Task> { + pub fn run(&self, app_state: Arc, cx: &mut App) -> Task> { let project = Project::local( app_state.client.clone(), app_state.node_runtime.clone(), @@ -214,15 +209,6 @@ impl ExampleInstance { project.create_worktree(self.worktree_path(), true, cx) }); - let tools = cx.new(|_| ToolWorkingSet::default()); - let prompt_store = None; - let thread_store = ThreadStore::load( - project.clone(), - tools, - prompt_store, - app_state.prompt_builder.clone(), - cx, - ); let meta = self.thread.meta(); let this = self.clone(); @@ -301,74 +287,62 @@ impl ExampleInstance { // history using undo/redo. std::fs::write(&last_diff_file_path, "")?; - let thread_store = thread_store.await?; - + let thread = cx.update(|cx| { + //todo: Do we want to load rules files here? + let worktrees = project.read(cx).visible_worktrees(cx).map(|worktree| { + let root_name = worktree.read(cx).root_name_str().into(); + let abs_path = worktree.read(cx).abs_path(); - let thread = - thread_store.update(cx, |thread_store, cx| { - let thread = if let Some(json) = &meta.existing_thread_json { - let serialized = SerializedThread::from_json(json.as_bytes()).expect("Can't read serialized thread"); - thread_store.create_thread_from_serialized(serialized, cx) - } else { - thread_store.create_thread(cx) - }; - thread.update(cx, |thread, cx| { - thread.set_profile(meta.profile_id.clone(), cx); - }); - thread - })?; - - - thread.update(cx, |thread, _cx| { - let mut request_count = 0; - let previous_diff = Rc::new(RefCell::new("".to_string())); - let example_output_dir = this.run_directory.clone(); - let last_diff_file_path = last_diff_file_path.clone(); - let messages_json_file_path = example_output_dir.join("last.messages.json"); - let this = this.clone(); - thread.set_request_callback(move |request, response_events| { - request_count += 1; - let messages_file_path = example_output_dir.join(format!("{request_count}.messages.md")); - let diff_file_path = example_output_dir.join(format!("{request_count}.diff")); - let last_messages_file_path = example_output_dir.join("last.messages.md"); - let request_markdown = RequestMarkdown::new(request); - let response_events_markdown = response_events_to_markdown(response_events); - let dialog = ThreadDialog::new(request, response_events); - let dialog_json = serde_json::to_string_pretty(&dialog.to_combined_request()).unwrap_or_default(); - - let messages = format!("{}\n\n{}", request_markdown.messages, response_events_markdown); - fs::write(&messages_file_path, messages.clone()).expect("failed to write messages file"); - fs::write(&last_messages_file_path, messages).expect("failed to write last messages file"); - fs::write(&messages_json_file_path, dialog_json).expect("failed to write last.messages.json"); - - let diff_result = smol::block_on(this.repository_diff()); - match diff_result { - Ok(diff) => { - if diff != previous_diff.borrow().clone() { - fs::write(&diff_file_path, &diff).expect("failed to write diff file"); - fs::write(&last_diff_file_path, &diff).expect("failed to write last diff file"); - *previous_diff.borrow_mut() = diff; - } - } - Err(err) => { - let error_message = format!("{err:?}"); - fs::write(&diff_file_path, &error_message).expect("failed to write diff error to file"); - fs::write(&last_diff_file_path, &error_message).expect("failed to write last diff file"); - } + WorktreeContext { + root_name, + abs_path, + rules_file: None, } + }).collect::>(); + let project_context = cx.new(|_cx| ProjectContext::new(worktrees, vec![])); + let context_server_registry = cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + + let thread = if let Some(json) = &meta.existing_thread_json { + let session_id = acp::SessionId( + rand::rng() + .sample_iter(&distr::Alphanumeric) + .take(7) + .map(char::from) + .collect::() + .into(), + ); + + let db_thread = agent::DbThread::from_json(json.as_bytes()).expect("Can't read serialized thread"); + cx.new(|cx| agent::Thread::from_db(session_id, db_thread, project.clone(), project_context, context_server_registry, agent::Templates::new(), cx)) + } else { + cx.new(|cx| agent::Thread::new(project.clone(), project_context, context_server_registry, agent::Templates::new(), None, cx)) + }; - if request_count == 1 { - let tools_file_path = example_output_dir.join("tools.md"); - fs::write(tools_file_path, request_markdown.tools).expect("failed to write tools file"); - } + thread.update(cx, |thread, cx| { + thread.add_default_tools(Rc::new(EvalThreadEnvironment { + project: project.clone(), + }), cx); + thread.set_profile(meta.profile_id.clone()); + thread.set_model( + LanguageModelInterceptor::new( + LanguageModelRegistry::read_global(cx).default_model().expect("Missing model").model.clone(), + this.run_directory.clone(), + last_diff_file_path.clone(), + this.run_directory.join("last.messages.json"), + this.worktree_path(), + this.repo_url(), + ), + cx, + ); }); - })?; + + thread + }).unwrap(); let mut example_cx = ExampleContext::new( meta.clone(), this.log_prefix.clone(), thread.clone(), - model.clone(), cx.clone(), ); let result = this.thread.conversation(&mut example_cx).await; @@ -381,7 +355,7 @@ impl ExampleInstance { println!("{}Stopped", this.log_prefix); println!("{}Getting repository diff", this.log_prefix); - let repository_diff = this.repository_diff().await?; + let repository_diff = Self::repository_diff(this.worktree_path(), &this.repo_url()).await?; std::fs::write(last_diff_file_path, &repository_diff)?; @@ -416,34 +390,28 @@ impl ExampleInstance { } thread.update(cx, |thread, _cx| { - let response_count = thread - .messages() - .filter(|message| message.role == language_model::Role::Assistant) - .count(); RunOutput { repository_diff, diagnostic_summary_before, diagnostic_summary_after, diagnostics_before, diagnostics_after, - response_count, - token_usage: thread.cumulative_token_usage(), + token_usage: thread.latest_request_token_usage().unwrap(), tool_metrics: example_cx.tool_metrics.lock().unwrap().clone(), - all_messages: messages_to_markdown(thread.messages()), + thread_markdown: thread.to_markdown(), programmatic_assertions: example_cx.assertions, } }) }) } - async fn repository_diff(&self) -> Result { - let worktree_path = self.worktree_path(); - run_git(&worktree_path, &["add", "."]).await?; + async fn repository_diff(repository_path: PathBuf, repository_url: &str) -> Result { + run_git(&repository_path, &["add", "."]).await?; let mut diff_args = vec!["diff", "--staged"]; - if self.thread.meta().url == ZED_REPO_URL { + if repository_url == ZED_REPO_URL { diff_args.push(":(exclude).rules"); } - run_git(&worktree_path, &diff_args).await + run_git(&repository_path, &diff_args).await } pub async fn judge( @@ -543,7 +511,7 @@ impl ExampleInstance { hbs.register_template_string(judge_thread_prompt_name, judge_thread_prompt) .unwrap(); - let complete_messages = &run_output.all_messages; + let complete_messages = &run_output.thread_markdown; let to_prompt = |assertion: String| { hbs.render( judge_thread_prompt_name, @@ -635,6 +603,273 @@ impl ExampleInstance { } } +struct EvalThreadEnvironment { + project: Entity, +} + +struct EvalTerminalHandle { + terminal: Entity, +} + +impl agent::TerminalHandle for EvalTerminalHandle { + fn id(&self, cx: &AsyncApp) -> Result { + self.terminal.read_with(cx, |term, _cx| term.id().clone()) + } + + fn wait_for_exit(&self, cx: &AsyncApp) -> Result>> { + self.terminal + .read_with(cx, |term, _cx| term.wait_for_exit()) + } + + fn current_output(&self, cx: &AsyncApp) -> Result { + self.terminal + .read_with(cx, |term, cx| term.current_output(cx)) + } +} + +impl agent::ThreadEnvironment for EvalThreadEnvironment { + fn create_terminal( + &self, + command: String, + cwd: Option, + output_byte_limit: Option, + cx: &mut AsyncApp, + ) -> Task>> { + let project = self.project.clone(); + cx.spawn(async move |cx| { + let language_registry = + project.read_with(cx, |project, _cx| project.languages().clone())?; + let id = acp::TerminalId(uuid::Uuid::new_v4().to_string().into()); + let terminal = + acp_thread::create_terminal_entity(command, &[], vec![], cwd.clone(), &project, cx) + .await?; + let terminal = cx.new(|cx| { + acp_thread::Terminal::new( + id, + "", + cwd, + output_byte_limit.map(|limit| limit as usize), + terminal, + language_registry, + cx, + ) + })?; + Ok(Rc::new(EvalTerminalHandle { terminal }) as Rc) + }) + } +} + +struct LanguageModelInterceptor { + model: Arc, + request_count: Arc>, + previous_diff: Arc>, + example_output_dir: PathBuf, + last_diff_file_path: PathBuf, + messages_json_file_path: PathBuf, + repository_path: PathBuf, + repository_url: String, +} + +impl LanguageModelInterceptor { + fn new( + model: Arc, + example_output_dir: PathBuf, + last_diff_file_path: PathBuf, + messages_json_file_path: PathBuf, + repository_path: PathBuf, + repository_url: String, + ) -> Arc { + Arc::new(Self { + model, + request_count: Arc::new(Mutex::new(0)), + previous_diff: Arc::new(Mutex::new("".to_string())), + example_output_dir, + last_diff_file_path, + messages_json_file_path, + repository_path, + repository_url, + }) + } +} + +impl language_model::LanguageModel for LanguageModelInterceptor { + fn id(&self) -> language_model::LanguageModelId { + self.model.id() + } + + fn name(&self) -> language_model::LanguageModelName { + self.model.name() + } + + fn provider_id(&self) -> language_model::LanguageModelProviderId { + self.model.provider_id() + } + + fn provider_name(&self) -> language_model::LanguageModelProviderName { + self.model.provider_name() + } + + fn telemetry_id(&self) -> String { + self.model.telemetry_id() + } + + fn supports_images(&self) -> bool { + self.model.supports_images() + } + + fn supports_tools(&self) -> bool { + self.model.supports_tools() + } + + fn supports_tool_choice(&self, choice: language_model::LanguageModelToolChoice) -> bool { + self.model.supports_tool_choice(choice) + } + + fn max_token_count(&self) -> u64 { + self.model.max_token_count() + } + + fn count_tokens( + &self, + request: LanguageModelRequest, + cx: &App, + ) -> future::BoxFuture<'static, Result> { + self.model.count_tokens(request, cx) + } + + fn stream_completion( + &self, + request: LanguageModelRequest, + cx: &AsyncApp, + ) -> future::BoxFuture< + 'static, + Result< + futures::stream::BoxStream< + 'static, + Result, + >, + language_model::LanguageModelCompletionError, + >, + > { + let stream = self.model.stream_completion(request.clone(), cx); + let request_count = self.request_count.clone(); + let previous_diff = self.previous_diff.clone(); + let example_output_dir = self.example_output_dir.clone(); + let last_diff_file_path = self.last_diff_file_path.clone(); + let messages_json_file_path = self.messages_json_file_path.clone(); + let repository_path = self.repository_path.clone(); + let repository_url = self.repository_url.clone(); + + Box::pin(async move { + let stream = stream.await?; + + let response_events = Arc::new(Mutex::new(Vec::new())); + let request_clone = request.clone(); + + let wrapped_stream = stream.then(move |event| { + let response_events = response_events.clone(); + let request = request_clone.clone(); + let request_count = request_count.clone(); + let previous_diff = previous_diff.clone(); + let example_output_dir = example_output_dir.clone(); + let last_diff_file_path = last_diff_file_path.clone(); + let messages_json_file_path = messages_json_file_path.clone(); + let repository_path = repository_path.clone(); + let repository_url = repository_url.clone(); + + async move { + let event_result = match &event { + Ok(ev) => Ok(ev.clone()), + Err(err) => Err(err.to_string()), + }; + response_events.lock().unwrap().push(event_result); + + let should_execute = matches!( + &event, + Ok(LanguageModelCompletionEvent::Stop { .. }) | Err(_) + ); + + if should_execute { + let current_request_count = { + let mut count = request_count.lock().unwrap(); + *count += 1; + *count + }; + + let messages_file_path = + example_output_dir.join(format!("{current_request_count}.messages.md")); + let diff_file_path = + example_output_dir.join(format!("{current_request_count}.diff")); + let last_messages_file_path = example_output_dir.join("last.messages.md"); + + let collected_events = response_events.lock().unwrap().clone(); + let request_markdown = RequestMarkdown::new(&request); + let response_events_markdown = + response_events_to_markdown(&collected_events); + let dialog = ThreadDialog::new(&request, &collected_events); + let dialog_json = + serde_json::to_string_pretty(&dialog.to_combined_request()) + .unwrap_or_default(); + + let messages = format!( + "{}\n\n{}", + request_markdown.messages, response_events_markdown + ); + fs::write(&messages_file_path, messages.clone()) + .expect("failed to write messages file"); + fs::write(&last_messages_file_path, messages) + .expect("failed to write last messages file"); + fs::write(&messages_json_file_path, dialog_json) + .expect("failed to write last.messages.json"); + + // Get repository diff + let diff_result = + ExampleInstance::repository_diff(repository_path, &repository_url) + .await; + + match diff_result { + Ok(diff) => { + let prev_diff = previous_diff.lock().unwrap().clone(); + if diff != prev_diff { + fs::write(&diff_file_path, &diff) + .expect("failed to write diff file"); + fs::write(&last_diff_file_path, &diff) + .expect("failed to write last diff file"); + *previous_diff.lock().unwrap() = diff; + } + } + Err(err) => { + let error_message = format!("{err:?}"); + fs::write(&diff_file_path, &error_message) + .expect("failed to write diff error to file"); + fs::write(&last_diff_file_path, &error_message) + .expect("failed to write last diff file"); + } + } + + if current_request_count == 1 { + let tools_file_path = example_output_dir.join("tools.md"); + fs::write(tools_file_path, request_markdown.tools) + .expect("failed to write tools file"); + } + } + + event + } + }); + + Ok(Box::pin(wrapped_stream) + as futures::stream::BoxStream< + 'static, + Result< + LanguageModelCompletionEvent, + language_model::LanguageModelCompletionError, + >, + >) + }) + } +} + pub fn wait_for_lang_server( project: &Entity, buffer: &Entity, @@ -826,40 +1061,6 @@ pub async fn run_git(repo_path: &Path, args: &[&str]) -> Result { Ok(String::from_utf8(output.stdout)?.trim().to_string()) } -fn messages_to_markdown<'a>(message_iter: impl IntoIterator) -> String { - let mut messages = String::new(); - let mut assistant_message_number: u32 = 1; - - for message in message_iter { - push_role(&message.role, &mut messages, &mut assistant_message_number); - - for segment in &message.segments { - match segment { - MessageSegment::Text(text) => { - messages.push_str(text); - messages.push_str("\n\n"); - } - MessageSegment::Thinking { text, signature } => { - messages.push_str("**Thinking**:\n\n"); - if let Some(sig) = signature { - messages.push_str(&format!("Signature: {}\n\n", sig)); - } - messages.push_str(text); - messages.push_str("\n"); - } - MessageSegment::RedactedThinking(items) => { - messages.push_str(&format!( - "**Redacted Thinking**: {} item(s)\n\n", - items.len() - )); - } - } - } - } - - messages -} - fn push_role(role: &Role, buf: &mut String, assistant_message_number: &mut u32) { match role { Role::System => buf.push_str("# ⚙️ SYSTEM\n\n"), diff --git a/crates/explorer_command_injector/Cargo.toml b/crates/explorer_command_injector/Cargo.toml index e929ba6fc824d6fa7a9b2f995828d8081cf2c2a0..8530329358dd5006ca883974c4298ad0787e9d42 100644 --- a/crates/explorer_command_injector/Cargo.toml +++ b/crates/explorer_command_injector/Cargo.toml @@ -25,4 +25,3 @@ windows-core.workspace = true windows-registry = "0.5" [dependencies] -workspace-hack.workspace = true diff --git a/crates/extension/Cargo.toml b/crates/extension/Cargo.toml index 42189f20b3477b4581103807445a397e65dd89eb..59b208cb50ec4183f7a0b8751f85658344d1e742 100644 --- a/crates/extension/Cargo.toml +++ b/crates/extension/Cargo.toml @@ -36,7 +36,6 @@ url.workspace = true util.workspace = true wasm-encoder.workspace = true wasmparser.workspace = true -workspace-hack.workspace = true [dev-dependencies] pretty_assertions.workspace = true diff --git a/crates/extension/src/extension_manifest.rs b/crates/extension/src/extension_manifest.rs index f5296198b06ffeeb83dd21be35d27be6b4387294..1e39ceca58fa8b0da450d98db2d6cc8fb0921f12 100644 --- a/crates/extension/src/extension_manifest.rs +++ b/crates/extension/src/extension_manifest.rs @@ -1,4 +1,4 @@ -use anyhow::{Context as _, Result, bail}; +use anyhow::{Context as _, Result, anyhow, bail}; use collections::{BTreeMap, HashMap}; use fs::Fs; use language::LanguageName; @@ -82,6 +82,8 @@ pub struct ExtensionManifest { #[serde(default)] pub context_servers: BTreeMap, ContextServerManifestEntry>, #[serde(default)] + pub agent_servers: BTreeMap, AgentServerManifestEntry>, + #[serde(default)] pub slash_commands: BTreeMap, SlashCommandManifestEntry>, #[serde(default)] pub snippets: Option, @@ -138,6 +140,48 @@ pub struct LibManifestEntry { pub version: Option, } +#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)] +pub struct AgentServerManifestEntry { + /// Display name for the agent (shown in menus). + pub name: String, + /// Environment variables to set when launching the agent server. + #[serde(default)] + pub env: HashMap, + /// Optional icon path (relative to extension root, e.g., "ai.svg"). + /// Should be a small SVG icon for display in menus. + #[serde(default)] + pub icon: Option, + /// Per-target configuration for archive-based installation. + /// The key format is "{os}-{arch}" where: + /// - os: "darwin" (macOS), "linux", "windows" + /// - arch: "aarch64" (arm64), "x86_64" + /// + /// Example: + /// ```toml + /// [agent_servers.myagent.targets.darwin-aarch64] + /// archive = "https://example.com/myagent-darwin-arm64.zip" + /// cmd = "./myagent" + /// args = ["--serve"] + /// sha256 = "abc123..." # optional + /// ``` + pub targets: HashMap, +} + +#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)] +pub struct TargetConfig { + /// URL to download the archive from (e.g., "https://github.com/owner/repo/releases/download/v1.0.0/myagent-darwin-arm64.zip") + pub archive: String, + /// Command to run (e.g., "./myagent" or "./myagent.exe") + pub cmd: String, + /// Command-line arguments to pass to the agent server. + #[serde(default)] + pub args: Vec, + /// Optional SHA-256 hash of the archive for verification. + /// If not provided and the URL is a GitHub release, we'll attempt to fetch it from GitHub. + #[serde(default)] + pub sha256: Option, +} + #[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)] pub enum ExtensionLibraryKind { Rust, @@ -226,8 +270,9 @@ impl ExtensionManifest { .load(&extension_manifest_path) .await .with_context(|| format!("failed to load {extension_name} extension.toml"))?; - toml::from_str(&manifest_content) - .with_context(|| format!("invalid extension.toml for extension {extension_name}")) + toml::from_str(&manifest_content).map_err(|err| { + anyhow!("Invalid extension.toml for extension {extension_name}:\n{err}") + }) } } } @@ -265,6 +310,7 @@ fn manifest_from_old_manifest( .collect(), language_servers: Default::default(), context_servers: BTreeMap::default(), + agent_servers: BTreeMap::default(), slash_commands: BTreeMap::default(), snippets: None, capabilities: Vec::new(), @@ -297,6 +343,7 @@ mod tests { grammars: BTreeMap::default(), language_servers: BTreeMap::default(), context_servers: BTreeMap::default(), + agent_servers: BTreeMap::default(), slash_commands: BTreeMap::default(), snippets: None, capabilities: vec![], @@ -403,4 +450,31 @@ mod tests { ); assert!(manifest.allow_exec("docker", &["ps"]).is_err()); // wrong first arg } + #[test] + fn parse_manifest_with_agent_server_archive_launcher() { + let toml_src = r#" +id = "example.agent-server-ext" +name = "Agent Server Example" +version = "1.0.0" +schema_version = 0 + +[agent_servers.foo] +name = "Foo Agent" + +[agent_servers.foo.targets.linux-x86_64] +archive = "https://example.com/agent-linux-x64.tar.gz" +cmd = "./agent" +args = ["--serve"] +"#; + + let manifest: ExtensionManifest = toml::from_str(toml_src).expect("manifest should parse"); + assert_eq!(manifest.id.as_ref(), "example.agent-server-ext"); + assert!(manifest.agent_servers.contains_key("foo")); + let entry = manifest.agent_servers.get("foo").unwrap(); + assert!(entry.targets.contains_key("linux-x86_64")); + let target = entry.targets.get("linux-x86_64").unwrap(); + assert_eq!(target.archive, "https://example.com/agent-linux-x64.tar.gz"); + assert_eq!(target.cmd, "./agent"); + assert_eq!(target.args, vec!["--serve"]); + } } diff --git a/crates/extension_cli/Cargo.toml b/crates/extension_cli/Cargo.toml index b2909ec6c9c281012f7814a39d5571baadce1bab..b2562a8e82f68b7d4113dec9e01d89183c0a92ec 100644 --- a/crates/extension_cli/Cargo.toml +++ b/crates/extension_cli/Cargo.toml @@ -30,4 +30,3 @@ tokio = { workspace = true, features = ["full"] } toml.workspace = true tree-sitter.workspace = true wasmtime.workspace = true -workspace-hack.workspace = true diff --git a/crates/extension_cli/src/main.rs b/crates/extension_cli/src/main.rs index 367dba98a32f5e8b0ade64095fbac5cad641b5ad..1dd65fe446232effc932a497601212cd039b6eed 100644 --- a/crates/extension_cli/src/main.rs +++ b/crates/extension_cli/src/main.rs @@ -235,6 +235,21 @@ async fn copy_extension_resources( .with_context(|| "failed to copy icons")?; } + for (_, agent_entry) in &manifest.agent_servers { + if let Some(icon_path) = &agent_entry.icon { + let source_icon = extension_path.join(icon_path); + let dest_icon = output_dir.join(icon_path); + + // Create parent directory if needed + if let Some(parent) = dest_icon.parent() { + fs::create_dir_all(parent)?; + } + + fs::copy(&source_icon, &dest_icon) + .with_context(|| format!("failed to copy agent server icon '{}'", icon_path))?; + } + } + if !manifest.languages.is_empty() { let output_languages_dir = output_dir.join("languages"); fs::create_dir_all(&output_languages_dir)?; diff --git a/crates/extension_host/Cargo.toml b/crates/extension_host/Cargo.toml index ba7f056866f41dbc61e7aea38dac8d8aca35979f..16cbd9ac0c0ef938322f2b57789c7542549a570a 100644 --- a/crates/extension_host/Cargo.toml +++ b/crates/extension_host/Cargo.toml @@ -27,6 +27,7 @@ extension.workspace = true fs.workspace = true futures.workspace = true gpui.workspace = true +gpui_tokio.workspace = true http_client.workspace = true language.workspace = true log.workspace = true @@ -51,7 +52,6 @@ util.workspace = true wasmparser.workspace = true wasmtime-wasi.workspace = true wasmtime.workspace = true -workspace-hack.workspace = true [dev-dependencies] criterion.workspace = true diff --git a/crates/extension_host/benches/extension_compilation_benchmark.rs b/crates/extension_host/benches/extension_compilation_benchmark.rs index 6f0897af6edbb38acef305ff03b76569a741aca5..9cb57fc1fb800df3f20d277cff5c85ecddadf5ad 100644 --- a/crates/extension_host/benches/extension_compilation_benchmark.rs +++ b/crates/extension_host/benches/extension_compilation_benchmark.rs @@ -19,6 +19,7 @@ use util::test::TempTree; fn extension_benchmarks(c: &mut Criterion) { let cx = init(); + cx.update(gpui_tokio::init); let mut group = c.benchmark_group("load"); @@ -37,7 +38,7 @@ fn extension_benchmarks(c: &mut Criterion) { |wasm_bytes| { let _extension = cx .executor() - .block(wasm_host.load_extension(wasm_bytes, &manifest, cx.executor())) + .block(wasm_host.load_extension(wasm_bytes, &manifest, &cx.to_async())) .unwrap(); }, BatchSize::SmallInput, @@ -131,6 +132,7 @@ fn manifest() -> ExtensionManifest { .into_iter() .collect(), context_servers: BTreeMap::default(), + agent_servers: BTreeMap::default(), slash_commands: BTreeMap::default(), snippets: None, capabilities: vec![ExtensionCapability::ProcessExec( diff --git a/crates/extension_host/src/capability_granter.rs b/crates/extension_host/src/capability_granter.rs index 5491967e080fc4d12a52f0360dab1896b77e19d3..9f27b5e480bc3c22faefe67cd49a06af21614096 100644 --- a/crates/extension_host/src/capability_granter.rs +++ b/crates/extension_host/src/capability_granter.rs @@ -107,6 +107,7 @@ mod tests { grammars: BTreeMap::default(), language_servers: BTreeMap::default(), context_servers: BTreeMap::default(), + agent_servers: BTreeMap::default(), slash_commands: BTreeMap::default(), snippets: None, capabilities: vec![], diff --git a/crates/extension_host/src/extension_store_test.rs b/crates/extension_host/src/extension_store_test.rs index 855077bcf87c58fb8e751d6477921d7e8bba8ad9..41b7b35d463a520888d4419f141ffdeca332fdac 100644 --- a/crates/extension_host/src/extension_store_test.rs +++ b/crates/extension_host/src/extension_store_test.rs @@ -159,6 +159,7 @@ async fn test_extension_store(cx: &mut TestAppContext) { .collect(), language_servers: BTreeMap::default(), context_servers: BTreeMap::default(), + agent_servers: BTreeMap::default(), slash_commands: BTreeMap::default(), snippets: None, capabilities: Vec::new(), @@ -189,6 +190,7 @@ async fn test_extension_store(cx: &mut TestAppContext) { grammars: BTreeMap::default(), language_servers: BTreeMap::default(), context_servers: BTreeMap::default(), + agent_servers: BTreeMap::default(), slash_commands: BTreeMap::default(), snippets: None, capabilities: Vec::new(), @@ -368,6 +370,7 @@ async fn test_extension_store(cx: &mut TestAppContext) { grammars: BTreeMap::default(), language_servers: BTreeMap::default(), context_servers: BTreeMap::default(), + agent_servers: BTreeMap::default(), slash_commands: BTreeMap::default(), snippets: None, capabilities: Vec::new(), @@ -868,5 +871,6 @@ fn init_test(cx: &mut TestAppContext) { Project::init_settings(cx); ExtensionSettings::register(cx); language::init(cx); + gpui_tokio::init(cx); }); } diff --git a/crates/extension_host/src/wasm_host.rs b/crates/extension_host/src/wasm_host.rs index f77258e8957fa1be7579b931de82fd633a0f6ae4..00e6321fdb5450a08aa331380a5d410652b66582 100644 --- a/crates/extension_host/src/wasm_host.rs +++ b/crates/extension_host/src/wasm_host.rs @@ -30,12 +30,14 @@ use node_runtime::NodeRuntime; use release_channel::ReleaseChannel; use semantic_version::SemanticVersion; use settings::Settings; -use std::borrow::Cow; -use std::sync::{LazyLock, OnceLock}; -use std::time::Duration; use std::{ + borrow::Cow, path::{Path, PathBuf}, - sync::Arc, + sync::{ + Arc, LazyLock, OnceLock, + atomic::{AtomicBool, Ordering}, + }, + time::Duration, }; use task::{DebugScenario, SpawnInTerminal, TaskTemplate, ZedDebugConfig}; use util::paths::SanitizedPath; @@ -495,6 +497,11 @@ pub struct WasmState { pub(crate) capability_granter: CapabilityGranter, } +std::thread_local! { + /// Used by the crash handler to ignore panics in extension-related threads. + pub static IS_WASM_THREAD: AtomicBool = const { AtomicBool::new(false) }; +} + type MainThreadCall = Box FnOnce(&'a mut AsyncApp) -> LocalBoxFuture<'a, ()>>; type ExtensionCall = Box< @@ -529,6 +536,7 @@ fn wasm_engine(executor: &BackgroundExecutor) -> wasmtime::Engine { let engine_ref = engine.weak(); executor .spawn(async move { + IS_WASM_THREAD.with(|v| v.store(true, Ordering::Release)); // Somewhat arbitrary interval, as it isn't a guaranteed interval. // But this is a rough upper bound for how long the extension execution can block on // `Future::poll`. @@ -591,11 +599,12 @@ impl WasmHost { self: &Arc, wasm_bytes: Vec, manifest: &Arc, - executor: BackgroundExecutor, + cx: &AsyncApp, ) -> Task> { let this = self.clone(); let manifest = manifest.clone(); - executor.clone().spawn(async move { + let executor = cx.background_executor().clone(); + let load_extension_task = async move { let zed_api_version = parse_wasm_extension_version(&manifest.id, &wasm_bytes)?; let component = Component::from_binary(&this.engine, &wasm_bytes) @@ -632,20 +641,29 @@ impl WasmHost { .context("failed to initialize wasm extension")?; let (tx, mut rx) = mpsc::unbounded::(); - executor - .spawn(async move { - while let Some(call) = rx.next().await { - (call)(&mut extension, &mut store).await; - } - }) - .detach(); + let extension_task = async move { + while let Some(call) = rx.next().await { + (call)(&mut extension, &mut store).await; + } + }; - Ok(WasmExtension { - manifest: manifest.clone(), - work_dir: this.work_dir.join(manifest.id.as_ref()).into(), - tx, - zed_api_version, - }) + anyhow::Ok(( + extension_task, + WasmExtension { + manifest: manifest.clone(), + work_dir: this.work_dir.join(manifest.id.as_ref()).into(), + tx, + zed_api_version, + }, + )) + }; + cx.spawn(async move |cx| { + let (extension_task, extension) = load_extension_task.await?; + // we need to run run the task in an extension context as wasmtime_wasi may + // call into tokio, accessing its runtime handle + gpui_tokio::Tokio::spawn(cx, extension_task)?.detach(); + + Ok(extension) }) } @@ -747,7 +765,7 @@ impl WasmExtension { .context("failed to read wasm")?; wasm_host - .load_extension(wasm_bytes, manifest, cx.background_executor().clone()) + .load_extension(wasm_bytes, manifest, cx) .await .with_context(|| format!("failed to load wasm extension {}", manifest.id)) } diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs index 26347ec4426c421909e5cd01b73fdc89751f1dfb..6e6eca975d92f9c8cf5eb206f04da5fccc3f097c 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs @@ -520,7 +520,7 @@ impl ExtensionImports for WasmState { anyhow::ensure!( response.status().is_success(), "download failed with status {}", - response.status().to_string() + response.status() ); let body = BufReader::new(response.body_mut()); diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs index 9942f8aeea04b8c466f5d16fc82ae7545b8bb7b1..8b44efdfb196d93df0a609983c2b97147bbe38a8 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs @@ -1051,7 +1051,7 @@ impl ExtensionImports for WasmState { anyhow::ensure!( response.status().is_success(), "download failed with status {}", - response.status().to_string() + response.status() ); let body = BufReader::new(response.body_mut()); diff --git a/crates/extensions_ui/Cargo.toml b/crates/extensions_ui/Cargo.toml index c31483d763d963edbd0e64d5dc26a4aaf2ed6aeb..87c76b684725dd9f88031d70c67bff76670cdcf5 100644 --- a/crates/extensions_ui/Cargo.toml +++ b/crates/extensions_ui/Cargo.toml @@ -38,7 +38,6 @@ theme.workspace = true ui.workspace = true util.workspace = true vim_mode_setting.workspace = true -workspace-hack.workspace = true workspace.workspace = true zed_actions.workspace = true diff --git a/crates/extensions_ui/src/components/extension_card.rs b/crates/extensions_ui/src/components/extension_card.rs index abdd32fee99cd056e9fece60a2ff7646f55cd264..524f90c7f0e32c0cc60143070c10288c441089e9 100644 --- a/crates/extensions_ui/src/components/extension_card.rs +++ b/crates/extensions_ui/src/components/extension_card.rs @@ -32,14 +32,14 @@ impl RenderOnce for ExtensionCard { fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { div().w_full().child( v_flex() + .mt_4() .w_full() - .h(rems(7.)) + .h(rems_from_px(110.)) .p_3() - .mt_4() .gap_2() - .bg(cx.theme().colors().elevated_surface_background) + .bg(cx.theme().colors().elevated_surface_background.opacity(0.5)) .border_1() - .border_color(cx.theme().colors().border) + .border_color(cx.theme().colors().border_variant) .rounded_md() .children(self.children) .when(self.overridden_by_dev_extension, |card| { @@ -51,7 +51,6 @@ impl RenderOnce for ExtensionCard { .block_mouse_except_scroll() .cursor_default() .size_full() - .items_center() .justify_center() .bg(cx.theme().colors().elevated_surface_background.alpha(0.8)) .child(Label::new("Overridden by dev extension.")), diff --git a/crates/extensions_ui/src/extensions_ui.rs b/crates/extensions_ui/src/extensions_ui.rs index dc40bad4e0476f5e714aa24fa3ef4d618d2bdcc9..1fc1384a133946651f16b3b9bdba742c2882b9a8 100644 --- a/crates/extensions_ui/src/extensions_ui.rs +++ b/crates/extensions_ui/src/extensions_ui.rs @@ -13,8 +13,8 @@ use editor::{Editor, EditorElement, EditorStyle}; use extension_host::{ExtensionManifest, ExtensionOperation, ExtensionStore}; use fuzzy::{StringMatchCandidate, match_strings}; use gpui::{ - Action, App, ClipboardItem, Context, Entity, EventEmitter, Flatten, Focusable, - InteractiveElement, KeyContext, ParentElement, Render, Styled, Task, TextStyle, + Action, App, ClipboardItem, Context, Corner, Entity, EventEmitter, Flatten, Focusable, + InteractiveElement, KeyContext, ParentElement, Point, Render, Styled, Task, TextStyle, UniformListScrollHandle, WeakEntity, Window, actions, point, uniform_list, }; use num_format::{Locale, ToFormattedString}; @@ -66,6 +66,7 @@ pub fn init(cx: &mut App) { ExtensionCategoryFilter::ContextServers => { ExtensionProvides::ContextServers } + ExtensionCategoryFilter::AgentServers => ExtensionProvides::AgentServers, ExtensionCategoryFilter::SlashCommands => ExtensionProvides::SlashCommands, ExtensionCategoryFilter::IndexedDocsProviders => { ExtensionProvides::IndexedDocsProviders @@ -189,6 +190,7 @@ fn extension_provides_label(provides: ExtensionProvides) -> &'static str { ExtensionProvides::Grammars => "Grammars", ExtensionProvides::LanguageServers => "Language Servers", ExtensionProvides::ContextServers => "MCP Servers", + ExtensionProvides::AgentServers => "Agent Servers", ExtensionProvides::SlashCommands => "Slash Commands", ExtensionProvides::IndexedDocsProviders => "Indexed Docs Providers", ExtensionProvides::Snippets => "Snippets", @@ -727,7 +729,7 @@ impl ExtensionsPage { .gap_2() .child( Headline::new(extension.manifest.name.clone()) - .size(HeadlineSize::Medium), + .size(HeadlineSize::Small), ) .child(Headline::new(format!("v{version}")).size(HeadlineSize::XSmall)) .children( @@ -777,20 +779,12 @@ impl ExtensionsPage { h_flex() .gap_2() .justify_between() - .child( - Label::new(format!( - "{}: {}", - if extension.manifest.authors.len() > 1 { - "Authors" - } else { - "Author" - }, - extension.manifest.authors.join(", ") - )) - .size(LabelSize::Small) - .color(Color::Muted) - .truncate(), - ) + .children(extension.manifest.description.as_ref().map(|description| { + Label::new(description.clone()) + .size(LabelSize::Small) + .color(Color::Default) + .truncate() + })) .child( Label::new(format!( "Downloads: {}", @@ -803,21 +797,29 @@ impl ExtensionsPage { h_flex() .gap_2() .justify_between() - .children(extension.manifest.description.as_ref().map(|description| { - Label::new(description.clone()) - .size(LabelSize::Small) - .color(Color::Default) - .truncate() - })) .child( h_flex() - .gap_2() + .gap_1() + .child( + Icon::new(IconName::Person) + .size(IconSize::XSmall) + .color(Color::Muted), + ) + .child( + Label::new(extension.manifest.authors.join(", ")) + .size(LabelSize::Small) + .color(Color::Muted) + .truncate(), + ), + ) + .child( + h_flex() + .gap_1() .child( IconButton::new( SharedString::from(format!("repository-{}", extension.id)), IconName::Github, ) - .icon_color(Color::Accent) .icon_size(IconSize::Small) .on_click(cx.listener({ let repository_url = repository_url.clone(); @@ -837,9 +839,13 @@ impl ExtensionsPage { SharedString::from(format!("more-{}", extension.id)), IconName::Ellipsis, ) - .icon_color(Color::Accent) .icon_size(IconSize::Small), ) + .anchor(Corner::TopRight) + .offset(Point { + x: px(0.0), + y: px(2.0), + }) .menu(move |window, cx| { Some(Self::render_remote_extension_context_menu( &this, @@ -961,6 +967,11 @@ impl ExtensionsPage { SharedString::from(extension.id.clone()), "Install", ) + .style(ButtonStyle::Tinted(ui::TintColor::Accent)) + .icon(IconName::Download) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .icon_position(IconPosition::Start) .on_click({ let extension_id = extension.id.clone(); move |_, _, cx| { @@ -978,6 +989,11 @@ impl ExtensionsPage { SharedString::from(extension.id.clone()), "Install", ) + .style(ButtonStyle::Tinted(ui::TintColor::Accent)) + .icon(IconName::Download) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .icon_position(IconPosition::Start) .disabled(true), configure: None, upgrade: None, @@ -987,6 +1003,7 @@ impl ExtensionsPage { SharedString::from(extension.id.clone()), "Uninstall", ) + .style(ButtonStyle::OutlinedGhost) .disabled(true), configure: is_configurable.then(|| { Button::new( @@ -1004,6 +1021,7 @@ impl ExtensionsPage { SharedString::from(extension.id.clone()), "Uninstall", ) + .style(ButtonStyle::OutlinedGhost) .on_click({ let extension_id = extension.id.clone(); move |_, _, cx| { @@ -1020,6 +1038,7 @@ impl ExtensionsPage { SharedString::from(format!("configure-{}", extension.id)), "Configure", ) + .style(ButtonStyle::OutlinedGhost) .on_click({ let extension_id = extension.id.clone(); move |_, _, cx| { @@ -1044,6 +1063,7 @@ impl ExtensionsPage { } else { Some( Button::new(SharedString::from(extension.id.clone()), "Upgrade") + .style(ButtonStyle::Tinted(ui::TintColor::Accent)) .when(!is_compatible, |upgrade_button| { upgrade_button.disabled(true).tooltip({ let version = extension.manifest.version.clone(); @@ -1082,6 +1102,7 @@ impl ExtensionsPage { SharedString::from(extension.id.clone()), "Uninstall", ) + .style(ButtonStyle::OutlinedGhost) .disabled(true), configure: is_configurable.then(|| { Button::new( diff --git a/crates/feature_flags/Cargo.toml b/crates/feature_flags/Cargo.toml index e4cc1e9330b90a5fca3933d86825974740864811..65d6942d501137ba9e84892470876a3755fdb69d 100644 --- a/crates/feature_flags/Cargo.toml +++ b/crates/feature_flags/Cargo.toml @@ -15,4 +15,3 @@ path = "src/feature_flags.rs" futures.workspace = true gpui.workspace = true smol.workspace = true -workspace-hack.workspace = true diff --git a/crates/feedback/Cargo.toml b/crates/feedback/Cargo.toml index 34ff692f446c776218cf100b8c912b496a1b41cc..0a53a1b6f38d1af0a6b913d61969d4df105a6a10 100644 --- a/crates/feedback/Cargo.toml +++ b/crates/feedback/Cargo.toml @@ -19,7 +19,6 @@ gpui.workspace = true system_specs.workspace = true urlencoding.workspace = true util.workspace = true -workspace-hack.workspace = true workspace.workspace = true zed_actions.workspace = true diff --git a/crates/file_finder/Cargo.toml b/crates/file_finder/Cargo.toml index edb7031f939f117ae5d5ec126a1edec58cb157c3..46257b1f49dc4b5e225373d69576d2f54de8c79e 100644 --- a/crates/file_finder/Cargo.toml +++ b/crates/file_finder/Cargo.toml @@ -32,7 +32,6 @@ theme.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true -workspace-hack.workspace = true [dev-dependencies] ctor.workspace = true diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 979cfa72fffffd0ef9ffc74cec5a8f33aa23488c..d78d789b9b0c8041975da6337620b840896a61f6 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -21,7 +21,9 @@ use gpui::{ }; use open_path_prompt::OpenPathPrompt; use picker::{Picker, PickerDelegate}; -use project::{PathMatchCandidateSet, Project, ProjectPath, WorktreeId}; +use project::{ + PathMatchCandidateSet, Project, ProjectPath, WorktreeId, worktree_store::WorktreeStore, +}; use search::ToggleIncludeIgnored; use settings::Settings; use std::{ @@ -538,11 +540,14 @@ impl Matches { fn push_new_matches<'a>( &'a mut self, + worktree_store: Entity, + cx: &'a App, history_items: impl IntoIterator + Clone, currently_opened: Option<&'a FoundPath>, query: Option<&FileSearchQuery>, new_search_matches: impl Iterator, extend_old_matches: bool, + path_style: PathStyle, ) { let Some(query) = query else { // assuming that if there's no query, then there's no search matches. @@ -556,8 +561,25 @@ impl Matches { .extend(history_items.into_iter().map(path_to_entry)); return; }; - - let new_history_matches = matching_history_items(history_items, currently_opened, query); + // If several worktress are open we have to set the worktree root names in path prefix + let several_worktrees = worktree_store.read(cx).worktrees().count() > 1; + let worktree_name_by_id = several_worktrees.then(|| { + worktree_store + .read(cx) + .worktrees() + .map(|worktree| { + let snapshot = worktree.read(cx).snapshot(); + (snapshot.id(), snapshot.root_name().into()) + }) + .collect() + }); + let new_history_matches = matching_history_items( + history_items, + currently_opened, + worktree_name_by_id, + query, + path_style, + ); let new_search_matches: Vec = new_search_matches .filter(|path_match| { !new_history_matches.contains_key(&ProjectPath { @@ -694,7 +716,9 @@ impl Matches { fn matching_history_items<'a>( history_items: impl IntoIterator, currently_opened: Option<&'a FoundPath>, + worktree_name_by_id: Option>>, query: &FileSearchQuery, + path_style: PathStyle, ) -> HashMap { let mut candidates_paths = HashMap::default(); @@ -734,13 +758,18 @@ fn matching_history_items<'a>( let mut matching_history_paths = HashMap::default(); for (worktree, candidates) in history_items_by_worktrees { let max_results = candidates.len() + 1; + let worktree_root_name = worktree_name_by_id + .as_ref() + .and_then(|w| w.get(&worktree).cloned()); matching_history_paths.extend( fuzzy::match_fixed_path_set( candidates, worktree.to_usize(), + worktree_root_name, query.path_query(), false, max_results, + path_style, ) .into_iter() .filter_map(|path_match| { @@ -866,7 +895,9 @@ impl FileFinderDelegate { let worktrees = self .project .read(cx) - .visible_worktrees(cx) + .worktree_store() + .read(cx) + .visible_worktrees_and_single_files(cx) .collect::>(); let include_root_name = worktrees.len() > 1; let candidate_sets = worktrees @@ -935,15 +966,18 @@ impl FileFinderDelegate { self.matches.get(self.selected_index).cloned() }; + let path_style = self.project.read(cx).path_style(cx); self.matches.push_new_matches( + self.project.read(cx).worktree_store(), + cx, &self.history_items, self.currently_opened_path.as_ref(), Some(&query), matches.into_iter(), extend_old_matches, + path_style, ); - let path_style = self.project.read(cx).path_style(cx); let query_path = query.raw_query.as_str(); if let Ok(mut query_path) = RelPath::new(Path::new(query_path), path_style) { let available_worktree = self @@ -1363,7 +1397,11 @@ impl PickerDelegate for FileFinderDelegate { separate_history: self.separate_history, ..Matches::default() }; + let path_style = self.project.read(cx).path_style(cx); + self.matches.push_new_matches( + project.worktree_store(), + cx, self.history_items.iter().filter(|history_item| { project .worktree_for_id(history_item.project.worktree_id, cx) @@ -1375,6 +1413,7 @@ impl PickerDelegate for FileFinderDelegate { None, None.into_iter(), false, + path_style, ); self.first_update = false; @@ -1624,11 +1663,7 @@ impl PickerDelegate for FileFinderDelegate { ) } - fn render_footer( - &self, - window: &mut Window, - cx: &mut Context>, - ) -> Option { + fn render_footer(&self, _: &mut Window, cx: &mut Context>) -> Option { let focus_handle = self.focus_handle.clone(); Some( @@ -1657,12 +1692,11 @@ impl PickerDelegate for FileFinderDelegate { }), { let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Filter Options", &ToggleFilterMenu, &focus_handle, - window, cx, ) } @@ -1712,14 +1746,13 @@ impl PickerDelegate for FileFinderDelegate { ButtonLike::new("split-trigger") .child(Label::new("Split…")) .selected_style(ButtonStyle::Tinted(TintColor::Accent)) - .children( + .child( KeyBinding::for_action_in( &ToggleSplitMenu, &focus_handle, - window, cx, ) - .map(|kb| kb.size(rems_from_px(12.))), + .size(rems_from_px(12.)), ), ) .menu({ @@ -1751,13 +1784,8 @@ impl PickerDelegate for FileFinderDelegate { .child( Button::new("open-selection", "Open") .key_binding( - KeyBinding::for_action_in( - &menu::Confirm, - &focus_handle, - window, - cx, - ) - .map(|kb| kb.size(rems_from_px(12.))), + KeyBinding::for_action_in(&menu::Confirm, &focus_handle, cx) + .map(|kb| kb.size(rems_from_px(12.))), ) .on_click(|_, window, cx| { window.dispatch_action(menu::Confirm.boxed_clone(), cx) diff --git a/crates/file_finder/src/file_finder_settings.rs b/crates/file_finder/src/file_finder_settings.rs index 8689e0ad1e3df2c90c2c033953f08eb31aff052d..4d826211c70b24c9f9bad7e23b8981fa8cb7bdd0 100644 --- a/crates/file_finder/src/file_finder_settings.rs +++ b/crates/file_finder/src/file_finder_settings.rs @@ -18,7 +18,11 @@ impl Settings for FileFinderSettings { file_icons: file_finder.file_icons.unwrap(), modal_max_width: file_finder.modal_max_width.unwrap().into(), skip_focus_for_active_in_search: file_finder.skip_focus_for_active_in_search.unwrap(), - include_ignored: file_finder.include_ignored, + include_ignored: match file_finder.include_ignored.unwrap() { + settings::IncludeIgnoredContent::All => Some(true), + settings::IncludeIgnoredContent::Indexed => Some(false), + settings::IncludeIgnoredContent::Smart => None, + }, } } } diff --git a/crates/file_finder/src/file_finder_tests.rs b/crates/file_finder/src/file_finder_tests.rs index 00b47bf44ec0817d0ab37ae9a610316fcdf5d3e1..9670de072a5d7c10c2a82c2e384bd7bc4adcd848 100644 --- a/crates/file_finder/src/file_finder_tests.rs +++ b/crates/file_finder/src/file_finder_tests.rs @@ -8,7 +8,7 @@ use pretty_assertions::{assert_eq, assert_matches}; use project::{FS_WATCH_LATENCY, RemoveOptions}; use serde_json::json; use util::{path, rel_path::rel_path}; -use workspace::{AppState, CloseActiveItem, OpenOptions, ToggleFileFinder, Workspace}; +use workspace::{AppState, CloseActiveItem, OpenOptions, ToggleFileFinder, Workspace, open_paths}; #[ctor::ctor] fn init_logger() { @@ -490,7 +490,7 @@ async fn test_row_column_numbers_query_inside_file(cx: &mut TestAppContext) { cx.executor().advance_clock(Duration::from_secs(2)); editor.update(cx, |editor, cx| { - let all_selections = editor.selections.all_adjusted(cx); + let all_selections = editor.selections.all_adjusted(&editor.display_snapshot(cx)); assert_eq!( all_selections.len(), 1, @@ -565,7 +565,7 @@ async fn test_row_column_numbers_query_outside_file(cx: &mut TestAppContext) { cx.executor().advance_clock(Duration::from_secs(2)); editor.update(cx, |editor, cx| { - let all_selections = editor.selections.all_adjusted(cx); + let all_selections = editor.selections.all_adjusted(&editor.display_snapshot(cx)); assert_eq!( all_selections.len(), 1, @@ -2337,7 +2337,6 @@ async fn test_search_results_refreshed_on_worktree_updates(cx: &mut gpui::TestAp assert_match_at_position(finder, 1, "main.rs"); assert_match_at_position(finder, 2, "rs"); }); - // Delete main.rs app_state .fs @@ -2370,6 +2369,64 @@ async fn test_search_results_refreshed_on_worktree_updates(cx: &mut gpui::TestAp }); } +#[gpui::test] +async fn test_search_results_refreshed_on_standalone_file_creation(cx: &mut gpui::TestAppContext) { + let app_state = init_test(cx); + + app_state + .fs + .as_fake() + .insert_tree( + "/src", + json!({ + "lib.rs": "// Lib file", + "main.rs": "// Bar file", + "read.me": "// Readme file", + }), + ) + .await; + app_state + .fs + .as_fake() + .insert_tree( + "/test", + json!({ + "new.rs": "// New file", + }), + ) + .await; + + let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + + cx.update(|_, cx| { + open_paths( + &[PathBuf::from(path!("/test/new.rs"))], + app_state, + workspace::OpenOptions::default(), + cx, + ) + }) + .await + .unwrap(); + assert_eq!(cx.update(|_, cx| cx.windows().len()), 1); + + let initial_history = open_close_queried_buffer("new", 1, "new.rs", &workspace, cx).await; + assert_eq!( + initial_history.first().unwrap().absolute, + PathBuf::from(path!("/test/new.rs")), + "Should show 1st opened item in the history when opening the 2nd item" + ); + + let history_after_first = open_close_queried_buffer("lib", 1, "lib.rs", &workspace, cx).await; + assert_eq!( + history_after_first.first().unwrap().absolute, + PathBuf::from(path!("/test/new.rs")), + "Should show 1st opened item in the history when opening the 2nd item" + ); +} + #[gpui::test] async fn test_search_results_refreshed_on_adding_and_removing_worktrees( cx: &mut gpui::TestAppContext, @@ -2446,6 +2503,147 @@ async fn test_search_results_refreshed_on_adding_and_removing_worktrees( }); } +#[gpui::test] +async fn test_history_items_uniqueness_for_multiple_worktree_open_all_files( + cx: &mut TestAppContext, +) { + let app_state = init_test(cx); + app_state + .fs + .as_fake() + .insert_tree( + path!("/repo1"), + json!({ + "package.json": r#"{"name": "repo1"}"#, + "src": { + "index.js": "// Repo 1 index", + } + }), + ) + .await; + + app_state + .fs + .as_fake() + .insert_tree( + path!("/repo2"), + json!({ + "package.json": r#"{"name": "repo2"}"#, + "src": { + "index.js": "// Repo 2 index", + } + }), + ) + .await; + + let project = Project::test( + app_state.fs.clone(), + [path!("/repo1").as_ref(), path!("/repo2").as_ref()], + cx, + ) + .await; + + let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + let (worktree_id1, worktree_id2) = cx.read(|cx| { + let worktrees = workspace.read(cx).worktrees(cx).collect::>(); + (worktrees[0].read(cx).id(), worktrees[1].read(cx).id()) + }); + + workspace + .update_in(cx, |workspace, window, cx| { + workspace.open_path( + ProjectPath { + worktree_id: worktree_id1, + path: rel_path("package.json").into(), + }, + None, + true, + window, + cx, + ) + }) + .await + .unwrap(); + + cx.dispatch_action(workspace::CloseActiveItem { + save_intent: None, + close_pinned: false, + }); + workspace + .update_in(cx, |workspace, window, cx| { + workspace.open_path( + ProjectPath { + worktree_id: worktree_id2, + path: rel_path("package.json").into(), + }, + None, + true, + window, + cx, + ) + }) + .await + .unwrap(); + + cx.dispatch_action(workspace::CloseActiveItem { + save_intent: None, + close_pinned: false, + }); + + let picker = open_file_picker(&workspace, cx); + cx.simulate_input("package.json"); + + picker.update(cx, |finder, _| { + let matches = &finder.delegate.matches.matches; + + assert_eq!( + matches.len(), + 2, + "Expected 1 history match + 1 search matches, but got {} matches: {:?}", + matches.len(), + matches + ); + + assert_matches!(matches[0], Match::History { .. }); + + let search_matches = collect_search_matches(finder); + assert_eq!( + search_matches.history.len(), + 2, + "Should have exactly 2 history match" + ); + assert_eq!( + search_matches.search.len(), + 0, + "Should have exactly 0 search match (because we already opened the 2 package.json)" + ); + + if let Match::History { path, panel_match } = &matches[0] { + assert_eq!(path.project.worktree_id, worktree_id2); + assert_eq!(path.project.path.as_ref(), rel_path("package.json")); + let panel_match = panel_match.as_ref().unwrap(); + assert_eq!(panel_match.0.path_prefix, rel_path("repo2").into()); + assert_eq!(panel_match.0.path, rel_path("package.json").into()); + assert_eq!( + panel_match.0.positions, + vec![6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17] + ); + } + + if let Match::History { path, panel_match } = &matches[1] { + assert_eq!(path.project.worktree_id, worktree_id1); + assert_eq!(path.project.path.as_ref(), rel_path("package.json")); + let panel_match = panel_match.as_ref().unwrap(); + assert_eq!(panel_match.0.path_prefix, rel_path("repo1").into()); + assert_eq!(panel_match.0.path, rel_path("package.json").into()); + assert_eq!( + panel_match.0.positions, + vec![6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17] + ); + } + }); +} + #[gpui::test] async fn test_selected_match_stays_selected_after_matches_refreshed(cx: &mut gpui::TestAppContext) { let app_state = init_test(cx); diff --git a/crates/file_finder/src/open_path_prompt.rs b/crates/file_finder/src/open_path_prompt.rs index b0417b1d13fc4f82c8b16b0ac87249405b6f4129..694ef1eaceb720c3b63d4ca9d243ab73e9442970 100644 --- a/crates/file_finder/src/open_path_prompt.rs +++ b/crates/file_finder/src/open_path_prompt.rs @@ -669,7 +669,7 @@ impl PickerDelegate for OpenPathDelegate { ) -> Option { let settings = FileFinderSettings::get_global(cx); let candidate = self.get_entry(ix)?; - let match_positions = match &self.directory_state { + let mut match_positions = match &self.directory_state { DirectoryState::List { .. } => self.string_matches.get(ix)?.positions.clone(), DirectoryState::Create { user_input, .. } => { if let Some(user_input) = user_input { @@ -710,29 +710,38 @@ impl PickerDelegate for OpenPathDelegate { }); match &self.directory_state { - DirectoryState::List { parent_path, .. } => Some( - ListItem::new(ix) - .spacing(ListItemSpacing::Sparse) - .start_slot::(file_icon) - .inset(true) - .toggle_state(selected) - .child(HighlightedLabel::new( - if parent_path == &self.prompt_root { - format!("{}{}", self.prompt_root, candidate.path.string) - } else if is_current_dir_candidate { - "open this directory".to_string() - } else { - candidate.path.string - }, + DirectoryState::List { parent_path, .. } => { + let (label, indices) = if *parent_path == self.prompt_root { + match_positions.iter_mut().for_each(|position| { + *position += self.prompt_root.len(); + }); + ( + format!("{}{}", self.prompt_root, candidate.path.string), match_positions, - )), - ), + ) + } else if is_current_dir_candidate { + ("open this directory".to_string(), vec![]) + } else { + (candidate.path.string, match_positions) + }; + Some( + ListItem::new(ix) + .spacing(ListItemSpacing::Sparse) + .start_slot::(file_icon) + .inset(true) + .toggle_state(selected) + .child(HighlightedLabel::new(label, indices)), + ) + } DirectoryState::Create { parent_path, user_input, .. } => { - let (label, delta) = if parent_path == &self.prompt_root { + let (label, delta) = if *parent_path == self.prompt_root { + match_positions.iter_mut().for_each(|position| { + *position += self.prompt_root.len(); + }); ( format!("{}{}", self.prompt_root, candidate.path.string), self.prompt_root.len(), @@ -740,10 +749,10 @@ impl PickerDelegate for OpenPathDelegate { } else { (candidate.path.string.clone(), 0) }; - let label_len = label.len(); let label_with_highlights = match user_input { Some(user_input) => { + let label_len = label.len(); if user_input.file.string == candidate.path.string { if user_input.exists { let label = if user_input.is_dir { @@ -755,7 +764,7 @@ impl PickerDelegate for OpenPathDelegate { .with_default_highlights( &window.text_style(), vec![( - delta..delta + label_len, + delta..label_len, HighlightStyle::color(Color::Conflict.color(cx)), )], ) @@ -765,27 +774,17 @@ impl PickerDelegate for OpenPathDelegate { .with_default_highlights( &window.text_style(), vec![( - delta..delta + label_len, + delta..label_len, HighlightStyle::color(Color::Created.color(cx)), )], ) .into_any_element() } } else { - let mut highlight_positions = match_positions; - highlight_positions.iter_mut().for_each(|position| { - *position += delta; - }); - HighlightedLabel::new(label, highlight_positions).into_any_element() + HighlightedLabel::new(label, match_positions).into_any_element() } } - None => { - let mut highlight_positions = match_positions; - highlight_positions.iter_mut().for_each(|position| { - *position += delta; - }); - HighlightedLabel::new(label, highlight_positions).into_any_element() - } + None => HighlightedLabel::new(label, match_positions).into_any_element(), }; Some( diff --git a/crates/file_icons/Cargo.toml b/crates/file_icons/Cargo.toml index b87827618e8a927f882f177854c41fa90eeebd0b..d45b606e5a8e6ca9c0b20db955e28c0c982c2f38 100644 --- a/crates/file_icons/Cargo.toml +++ b/crates/file_icons/Cargo.toml @@ -17,4 +17,3 @@ gpui.workspace = true serde.workspace = true theme.workspace = true util.workspace = true -workspace-hack.workspace = true diff --git a/crates/fs/Cargo.toml b/crates/fs/Cargo.toml index 1d4161134ee7ff43c15c450284a570a08d7841cd..d6413cb7a07b5aeb72efea012ae7e00f3493837e 100644 --- a/crates/fs/Cargo.toml +++ b/crates/fs/Cargo.toml @@ -33,7 +33,6 @@ tempfile.workspace = true text.workspace = true time.workspace = true util.workspace = true -workspace-hack.workspace = true [target.'cfg(target_os = "macos")'.dependencies] fsevent.workspace = true diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs index 2c6db5b53987013d24a3a922e8f3b67adc9d43f5..8e9f8501dbcd4858f709dd5bd08f7f4d65aab986 100644 --- a/crates/fs/src/fake_git_repo.rs +++ b/crates/fs/src/fake_git_repo.rs @@ -9,16 +9,25 @@ use git::{ AskPassDelegate, Branch, CommitDetails, CommitOptions, FetchOptions, GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RepoPath, ResetMode, }, - status::{FileStatus, GitStatus, StatusCode, TrackedStatus, UnmergedStatus}, + status::{ + DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus, + UnmergedStatus, + }, }; -use gpui::{AsyncApp, BackgroundExecutor, SharedString, Task}; +use gpui::{AsyncApp, BackgroundExecutor, SharedString, Task, TaskLabel}; use ignore::gitignore::GitignoreBuilder; use parking_lot::Mutex; use rope::Rope; use smol::future::FutureExt as _; -use std::{path::PathBuf, sync::Arc}; +use std::{ + path::PathBuf, + sync::{Arc, LazyLock}, +}; use util::{paths::PathStyle, rel_path::RelPath}; +pub static LOAD_INDEX_TEXT_TASK: LazyLock = LazyLock::new(TaskLabel::new); +pub static LOAD_HEAD_TEXT_TASK: LazyLock = LazyLock::new(TaskLabel::new); + #[derive(Clone)] pub struct FakeGitRepository { pub(crate) fs: Arc, @@ -35,6 +44,9 @@ pub struct FakeGitRepositoryState { pub unmerged_paths: HashMap, pub head_contents: HashMap, pub index_contents: HashMap, + // everything in commit contents is in oids + pub merge_base_contents: HashMap, + pub oids: HashMap, pub blames: HashMap, pub current_branch_name: Option, pub branches: HashSet, @@ -54,6 +66,8 @@ impl FakeGitRepositoryState { branches: Default::default(), simulated_index_write_error_message: Default::default(), refs: HashMap::from_iter([("HEAD".into(), "abc".into())]), + merge_base_contents: Default::default(), + oids: Default::default(), } } } @@ -79,32 +93,35 @@ impl GitRepository for FakeGitRepository { fn reload_index(&self) {} fn load_index_text(&self, path: RepoPath) -> BoxFuture<'_, Option> { - async { - self.with_state_async(false, move |state| { - state - .index_contents - .get(&path) - .context("not present in index") - .cloned() - }) - .await - .ok() - } - .boxed() + let fut = self.with_state_async(false, move |state| { + state + .index_contents + .get(&path) + .context("not present in index") + .cloned() + }); + self.executor + .spawn_labeled(*LOAD_INDEX_TEXT_TASK, async move { fut.await.ok() }) + .boxed() } fn load_committed_text(&self, path: RepoPath) -> BoxFuture<'_, Option> { - async { - self.with_state_async(false, move |state| { - state - .head_contents - .get(&path) - .context("not present in HEAD") - .cloned() - }) - .await - .ok() - } + let fut = self.with_state_async(false, move |state| { + state + .head_contents + .get(&path) + .context("not present in HEAD") + .cloned() + }); + self.executor + .spawn_labeled(*LOAD_HEAD_TEXT_TASK, async move { fut.await.ok() }) + .boxed() + } + + fn load_blob_content(&self, oid: git::Oid) -> BoxFuture<'_, Result> { + self.with_state_async(false, move |state| { + state.oids.get(&oid).cloned().context("oid does not exist") + }) .boxed() } @@ -138,6 +155,34 @@ impl GitRepository for FakeGitRepository { None } + fn diff_tree(&self, _request: DiffTreeType) -> BoxFuture<'_, Result> { + let mut entries = HashMap::default(); + self.with_state_async(false, |state| { + for (path, content) in &state.head_contents { + let status = if let Some((oid, original)) = state + .merge_base_contents + .get(path) + .map(|oid| (oid, &state.oids[oid])) + { + if original == content { + continue; + } + TreeDiffStatus::Modified { old: *oid } + } else { + TreeDiffStatus::Added + }; + entries.insert(path.clone(), status); + } + for (path, oid) in &state.merge_base_contents { + if !entries.contains_key(path) { + entries.insert(path.clone(), TreeDiffStatus::Deleted { old: *oid }); + } + } + Ok(TreeDiff { entries }) + }) + .boxed() + } + fn revparse_batch(&self, revs: Vec) -> BoxFuture<'_, Result>>> { self.with_state_async(false, |state| { Ok(revs @@ -521,7 +566,7 @@ impl GitRepository for FakeGitRepository { let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf(); async move { executor.simulate_random_delay().await; - let oid = Oid::random(&mut executor.rng()); + let oid = git::Oid::random(&mut executor.rng()); let entry = fs.entry(&repository_dir_path)?; checkpoints.lock().insert(oid, entry); Ok(GitRepositoryCheckpoint { commit_sha: oid }) @@ -577,7 +622,7 @@ impl GitRepository for FakeGitRepository { } fn default_branch(&self) -> BoxFuture<'_, Result>> { - unimplemented!() + async { Ok(Some("main".into())) }.boxed() } } diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 03cf78d74eb0e0ed8caf22c710acc131960e97c0..c794303ef71232d5a162b51ec8db7d472328b767 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -7,6 +7,7 @@ pub mod fs_watcher; use anyhow::{Context as _, Result, anyhow}; #[cfg(any(target_os = "linux", target_os = "freebsd"))] use ashpd::desktop::trash; +use futures::stream::iter; use gpui::App; use gpui::BackgroundExecutor; use gpui::Global; @@ -57,6 +58,9 @@ use smol::io::AsyncReadExt; #[cfg(any(test, feature = "test-support"))] use std::ffi::OsStr; +#[cfg(any(test, feature = "test-support"))] +pub use fake_git_repo::{LOAD_HEAD_TEXT_TASK, LOAD_INDEX_TEXT_TASK}; + pub trait Watcher: Send + Sync { fn add(&self, path: &Path) -> Result<()>; fn remove(&self, path: &Path) -> Result<()>; @@ -320,7 +324,33 @@ impl FileHandle for std::fs::File { #[cfg(target_os = "windows")] fn current_path(&self, _: &Arc) -> Result { - anyhow::bail!("unimplemented") + use std::ffi::OsString; + use std::os::windows::ffi::OsStringExt; + use std::os::windows::io::AsRawHandle; + + use windows::Win32::Foundation::HANDLE; + use windows::Win32::Storage::FileSystem::{ + FILE_NAME_NORMALIZED, GetFinalPathNameByHandleW, + }; + + let handle = HANDLE(self.as_raw_handle() as _); + + // Query required buffer size (in wide chars) + let required_len = + unsafe { GetFinalPathNameByHandleW(handle, &mut [], FILE_NAME_NORMALIZED) }; + if required_len == 0 { + anyhow::bail!("GetFinalPathNameByHandleW returned 0 length"); + } + + // Allocate buffer and retrieve the path + let mut buf: Vec = vec![0u16; required_len as usize + 1]; + let written = unsafe { GetFinalPathNameByHandleW(handle, &mut buf, FILE_NAME_NORMALIZED) }; + if written == 0 { + anyhow::bail!("GetFinalPathNameByHandleW failed to write path"); + } + + let os_str: OsString = OsString::from_wide(&buf[..written as usize]); + Ok(PathBuf::from(os_str)) } } @@ -557,17 +587,29 @@ impl Fs for RealFs { } async fn open_handle(&self, path: &Path) -> Result> { - Ok(Arc::new(std::fs::File::open(path)?)) + let mut options = std::fs::OpenOptions::new(); + options.read(true); + #[cfg(windows)] + { + use std::os::windows::fs::OpenOptionsExt; + options.custom_flags(windows::Win32::Storage::FileSystem::FILE_FLAG_BACKUP_SEMANTICS.0); + } + Ok(Arc::new(options.open(path)?)) } async fn load(&self, path: &Path) -> Result { let path = path.to_path_buf(); - let text = smol::unblock(|| std::fs::read_to_string(path)).await?; - Ok(text) + self.executor + .spawn(async move { Ok(std::fs::read_to_string(path)?) }) + .await } + async fn load_bytes(&self, path: &Path) -> Result> { let path = path.to_path_buf(); - let bytes = smol::unblock(|| std::fs::read(path)).await?; + let bytes = self + .executor + .spawn(async move { std::fs::read(path) }) + .await?; Ok(bytes) } @@ -635,30 +677,46 @@ impl Fs for RealFs { if let Some(path) = path.parent() { self.create_dir(path).await?; } - smol::fs::write(path, content).await?; - Ok(()) + let path = path.to_owned(); + let contents = content.to_owned(); + self.executor + .spawn(async move { + std::fs::write(path, contents)?; + Ok(()) + }) + .await } async fn canonicalize(&self, path: &Path) -> Result { - Ok(smol::fs::canonicalize(path) + let path = path.to_owned(); + self.executor + .spawn(async move { + std::fs::canonicalize(&path).with_context(|| format!("canonicalizing {path:?}")) + }) .await - .with_context(|| format!("canonicalizing {path:?}"))?) } async fn is_file(&self, path: &Path) -> bool { - smol::fs::metadata(path) + let path = path.to_owned(); + self.executor + .spawn(async move { std::fs::metadata(path).is_ok_and(|metadata| metadata.is_file()) }) .await - .is_ok_and(|metadata| metadata.is_file()) } async fn is_dir(&self, path: &Path) -> bool { - smol::fs::metadata(path) + let path = path.to_owned(); + self.executor + .spawn(async move { std::fs::metadata(path).is_ok_and(|metadata| metadata.is_dir()) }) .await - .is_ok_and(|metadata| metadata.is_dir()) } async fn metadata(&self, path: &Path) -> Result> { - let symlink_metadata = match smol::fs::symlink_metadata(path).await { + let path_buf = path.to_owned(); + let symlink_metadata = match self + .executor + .spawn(async move { std::fs::symlink_metadata(&path_buf) }) + .await + { Ok(metadata) => metadata, Err(err) => { return match (err.kind(), err.raw_os_error()) { @@ -669,19 +727,28 @@ impl Fs for RealFs { } }; - let path_buf = path.to_path_buf(); - let path_exists = smol::unblock(move || { - path_buf - .try_exists() - .with_context(|| format!("checking existence for path {path_buf:?}")) - }) - .await?; let is_symlink = symlink_metadata.file_type().is_symlink(); - let metadata = match (is_symlink, path_exists) { - (true, true) => smol::fs::metadata(path) - .await - .with_context(|| "accessing symlink for path {path}")?, - _ => symlink_metadata, + let metadata = if is_symlink { + let path_buf = path.to_path_buf(); + let path_exists = self + .executor + .spawn(async move { + path_buf + .try_exists() + .with_context(|| format!("checking existence for path {path_buf:?}")) + }) + .await?; + if path_exists { + let path_buf = path.to_path_buf(); + self.executor + .spawn(async move { std::fs::metadata(path_buf) }) + .await + .with_context(|| "accessing symlink for path {path}")? + } else { + symlink_metadata + } + } else { + symlink_metadata }; #[cfg(unix)] @@ -707,7 +774,11 @@ impl Fs for RealFs { } async fn read_link(&self, path: &Path) -> Result { - let path = smol::fs::read_link(path).await?; + let path = path.to_owned(); + let path = self + .executor + .spawn(async move { std::fs::read_link(&path) }) + .await?; Ok(path) } @@ -715,7 +786,13 @@ impl Fs for RealFs { &self, path: &Path, ) -> Result>>>> { - let result = smol::fs::read_dir(path).await?.map(|entry| match entry { + let path = path.to_owned(); + let result = iter( + self.executor + .spawn(async move { std::fs::read_dir(path) }) + .await?, + ) + .map(|entry| match entry { Ok(entry) => Ok(entry.path()), Err(error) => Err(anyhow!("failed to read dir entry {error:?}")), }); @@ -749,6 +826,7 @@ impl Fs for RealFs { events .into_iter() .map(|event| { + log::trace!("fs path event: {event:?}"); let kind = if event.flags.contains(StreamFlags::ITEM_REMOVED) { Some(PathEventKind::Removed) } else if event.flags.contains(StreamFlags::ITEM_CREATED) { @@ -806,6 +884,7 @@ impl Fs for RealFs { // Check if path is a symlink and follow the target parent if let Some(mut target) = self.read_link(path).await.ok() { + log::trace!("watch symlink {path:?} -> {target:?}"); // Check if symlink target is relative path, if so make it absolute if target.is_relative() && let Some(parent) = path.parent() @@ -1673,6 +1752,26 @@ impl FakeFs { .unwrap(); } + pub fn set_merge_base_content_for_repo( + &self, + dot_git: &Path, + contents_by_path: &[(&str, String)], + ) { + self.with_git_state(dot_git, true, |state| { + use git::Oid; + + state.merge_base_contents.clear(); + let oids = (1..) + .map(|n| n.to_string()) + .map(|n| Oid::from_bytes(n.repeat(20).as_bytes()).unwrap()); + for ((path, content), oid) in contents_by_path.iter().zip(oids) { + state.merge_base_contents.insert(repo_path(path), oid); + state.oids.insert(oid, content.clone()); + } + }) + .unwrap(); + } + pub fn set_blame_for_repo(&self, dot_git: &Path, blames: Vec<(RepoPath, git::blame::Blame)>) { self.with_git_state(dot_git, true, |state| { state.blames.clear(); diff --git a/crates/fs/src/fs_watcher.rs b/crates/fs/src/fs_watcher.rs index 0d6d914ae4c0bc3f8b92ec9f8be67d92c9ac6b64..32be1112d0b235281d33dd14534ebb87d8a3bc55 100644 --- a/crates/fs/src/fs_watcher.rs +++ b/crates/fs/src/fs_watcher.rs @@ -46,6 +46,7 @@ impl Drop for FsWatcher { impl Watcher for FsWatcher { fn add(&self, path: &std::path::Path) -> anyhow::Result<()> { + log::trace!("watcher add: {path:?}"); let tx = self.tx.clone(); let pending_paths = self.pending_path_events.clone(); @@ -63,11 +64,15 @@ impl Watcher for FsWatcher { .next_back() && path.starts_with(watched_path.as_ref()) { + log::trace!( + "path to watch is covered by existing registration: {path:?}, {watched_path:?}" + ); return Ok(()); } } #[cfg(target_os = "linux")] { + log::trace!("path to watch is already watched: {path:?}"); if self.registrations.lock().contains_key(path) { return Ok(()); } @@ -85,6 +90,7 @@ impl Watcher for FsWatcher { let path = path.clone(); |g| { g.add(path, mode, move |event: ¬ify::Event| { + log::trace!("watcher received event: {event:?}"); let kind = match event.kind { EventKind::Create(_) => Some(PathEventKind::Created), EventKind::Modify(_) => Some(PathEventKind::Changed), @@ -126,6 +132,7 @@ impl Watcher for FsWatcher { } fn remove(&self, path: &std::path::Path) -> anyhow::Result<()> { + log::trace!("remove watched path: {path:?}"); let Some(registration) = self.registrations.lock().remove(path) else { return Ok(()); }; @@ -215,6 +222,7 @@ static FS_WATCHER_INSTANCE: OnceLock) { + log::trace!("global handle event: {event:?}"); // Filter out access events, which could lead to a weird bug on Linux after upgrading notify // https://github.com/zed-industries/zed/actions/runs/14085230504/job/39449448832 let Some(event) = event diff --git a/crates/fs/src/mac_watcher.rs b/crates/fs/src/mac_watcher.rs index 698014de9716f6505ccd23cd344a62815d9ba0f7..b781a231ba2bc33a895480ea278a7ccfe3364fe7 100644 --- a/crates/fs/src/mac_watcher.rs +++ b/crates/fs/src/mac_watcher.rs @@ -32,6 +32,7 @@ impl MacWatcher { impl Watcher for MacWatcher { fn add(&self, path: &Path) -> Result<()> { + log::trace!("mac watcher add: {:?}", path); let handles = self .handles .upgrade() @@ -44,6 +45,9 @@ impl Watcher for MacWatcher { .next_back() && path.starts_with(watched_path) { + log::trace!( + "mac watched path starts with existing watched path: {watched_path:?}, {path:?}" + ); return Ok(()); } diff --git a/crates/fs_benchmarks/Cargo.toml b/crates/fs_benchmarks/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..f207a2db3b7354ca96347aaffb5c1915a514ef7c --- /dev/null +++ b/crates/fs_benchmarks/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "fs_benchmarks" +version = "0.1.0" +publish.workspace = true +edition.workspace = true + +[dependencies] +fs.workspace = true +gpui = {workspace = true, features = ["windows-manifest"]} + +[lints] +workspace = true diff --git a/crates/assistant_context/LICENSE-GPL b/crates/fs_benchmarks/LICENSE-GPL similarity index 100% rename from crates/assistant_context/LICENSE-GPL rename to crates/fs_benchmarks/LICENSE-GPL diff --git a/crates/fs_benchmarks/src/main.rs b/crates/fs_benchmarks/src/main.rs new file mode 100644 index 0000000000000000000000000000000000000000..12df32f0763e02a95c3f261d2c14fa6e295c304e --- /dev/null +++ b/crates/fs_benchmarks/src/main.rs @@ -0,0 +1,32 @@ +use fs::Fs; +use gpui::{AppContext, Application}; +fn main() { + let Some(path_to_read) = std::env::args().nth(1) else { + println!("Expected path to read as 1st argument."); + return; + }; + + let _ = Application::headless().run(|cx| { + let fs = fs::RealFs::new(None, cx.background_executor().clone()); + cx.background_spawn(async move { + let timer = std::time::Instant::now(); + let result = fs.load_bytes(path_to_read.as_ref()).await; + let elapsed = timer.elapsed(); + if let Err(e) = result { + println!("Failed `load_bytes` after {elapsed:?} with error `{e}`"); + } else { + println!("Took {elapsed:?} to read {} bytes", result.unwrap().len()); + }; + let timer = std::time::Instant::now(); + let result = fs.metadata(path_to_read.as_ref()).await; + let elapsed = timer.elapsed(); + if let Err(e) = result { + println!("Failed `metadata` after {elapsed:?} with error `{e}`"); + } else { + println!("Took {elapsed:?} to query metadata"); + }; + std::process::exit(0); + }) + .detach(); + }); +} diff --git a/crates/fsevent/Cargo.toml b/crates/fsevent/Cargo.toml index a421294785cee5546db9d9c2066cde530f0c8b2c..635b36ebe14ee6823f8773bb38ff085516e320b9 100644 --- a/crates/fsevent/Cargo.toml +++ b/crates/fsevent/Cargo.toml @@ -16,7 +16,6 @@ doctest = false bitflags.workspace = true parking_lot.workspace = true log.workspace = true -workspace-hack.workspace = true [target.'cfg(target_os = "macos")'.dependencies] core-foundation.workspace = true diff --git a/crates/fuzzy/Cargo.toml b/crates/fuzzy/Cargo.toml index 35e134236d619e51467ef96a204df3fc8cc7681c..7df2142fa1862a39f83bb74af773a410e5823b4f 100644 --- a/crates/fuzzy/Cargo.toml +++ b/crates/fuzzy/Cargo.toml @@ -16,7 +16,6 @@ doctest = false gpui.workspace = true util.workspace = true log.workspace = true -workspace-hack.workspace = true [dev-dependencies] util = {workspace = true, features = ["test-support"]} diff --git a/crates/fuzzy/src/paths.rs b/crates/fuzzy/src/paths.rs index 6fc52361e37750400aa308733865fc6fee435134..b35f0c1ce6cec73995838eb82bf782d00f0129af 100644 --- a/crates/fuzzy/src/paths.rs +++ b/crates/fuzzy/src/paths.rs @@ -88,9 +88,11 @@ impl Ord for PathMatch { pub fn match_fixed_path_set( candidates: Vec, worktree_id: usize, + worktree_root_name: Option>, query: &str, smart_case: bool, max_results: usize, + path_style: PathStyle, ) -> Vec { let lowercase_query = query.to_lowercase().chars().collect::>(); let query = query.chars().collect::>(); @@ -98,10 +100,31 @@ pub fn match_fixed_path_set( let mut matcher = Matcher::new(&query, &lowercase_query, query_char_bag, smart_case, true); - let mut results = Vec::new(); + let mut results = Vec::with_capacity(candidates.len()); + let (path_prefix, path_prefix_chars, lowercase_prefix) = match worktree_root_name { + Some(worktree_root_name) => { + let mut path_prefix_chars = worktree_root_name + .display(path_style) + .chars() + .collect::>(); + path_prefix_chars.extend(path_style.separator().chars()); + let lowercase_pfx = path_prefix_chars + .iter() + .map(|c| c.to_ascii_lowercase()) + .collect::>(); + + (worktree_root_name, path_prefix_chars, lowercase_pfx) + } + None => ( + RelPath::empty().into(), + Default::default(), + Default::default(), + ), + }; + matcher.match_candidates( - &[], - &[], + &path_prefix_chars, + &lowercase_prefix, candidates.into_iter(), &mut results, &AtomicBool::new(false), @@ -111,7 +134,7 @@ pub fn match_fixed_path_set( positions: positions.clone(), is_dir: candidate.is_dir, path: candidate.path.into(), - path_prefix: RelPath::empty().into(), + path_prefix: path_prefix.clone(), distance_to_relative_ancestor: usize::MAX, }, ); diff --git a/crates/git/Cargo.toml b/crates/git/Cargo.toml index e91b5c7c6480076481cd25b963a7d8012e67ee34..0a99b0ad27a9e24cee9f59c9180ca5292b050549 100644 --- a/crates/git/Cargo.toml +++ b/crates/git/Cargo.toml @@ -41,7 +41,6 @@ urlencoding.workspace = true util.workspace = true uuid.workspace = true futures.workspace = true -workspace-hack.workspace = true [dev-dependencies] pretty_assertions.workspace = true diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 2e132d4eaca55c9307bf3368c412f77ed6726df2..06bc5ec4114af01ae4c90f12d676ad027d0c5cc0 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -1,6 +1,6 @@ use crate::commit::parse_git_diff_name_status; use crate::stash::GitStash; -use crate::status::{GitStatus, StatusCode}; +use crate::status::{DiffTreeType, GitStatus, StatusCode, TreeDiff}; use crate::{Oid, SHORT_SHA_LENGTH}; use anyhow::{Context as _, Result, anyhow, bail}; use collections::HashMap; @@ -350,6 +350,7 @@ pub trait GitRepository: Send + Sync { /// /// Also returns `None` for symlinks. fn load_committed_text(&self, path: RepoPath) -> BoxFuture<'_, Option>; + fn load_blob_content(&self, oid: Oid) -> BoxFuture<'_, Result>; fn set_index_text( &self, @@ -379,6 +380,7 @@ pub trait GitRepository: Send + Sync { fn merge_message(&self) -> BoxFuture<'_, Option>; fn status(&self, path_prefixes: &[RepoPath]) -> Task>; + fn diff_tree(&self, request: DiffTreeType) -> BoxFuture<'_, Result>; fn stash_entries(&self) -> BoxFuture<'_, Result>; @@ -693,10 +695,11 @@ impl GitRepository for RealGitRepository { .args([ "--no-optional-locks", "show", - "--format=%P", + "--format=", "-z", "--no-renames", "--name-status", + "--first-parent", ]) .arg(&commit) .stdin(Stdio::null()) @@ -707,9 +710,8 @@ impl GitRepository for RealGitRepository { .context("starting git show process")?; let show_stdout = String::from_utf8_lossy(&show_output.stdout); - let mut lines = show_stdout.split('\n'); - let parent_sha = lines.next().unwrap().trim().trim_end_matches('\0'); - let changes = parse_git_diff_name_status(lines.next().unwrap_or("")); + let changes = parse_git_diff_name_status(&show_stdout); + let parent_sha = format!("{}^", commit); let mut cat_file_process = util::command::new_smol_command(&git_binary_path) .current_dir(&working_directory) @@ -908,6 +910,17 @@ impl GitRepository for RealGitRepository { .boxed() } + fn load_blob_content(&self, oid: Oid) -> BoxFuture<'_, Result> { + let repo = self.repository.clone(); + self.executor + .spawn(async move { + let repo = repo.lock(); + let content = repo.find_blob(oid.0)?.content().to_owned(); + Ok(String::from_utf8(content)?) + }) + .boxed() + } + fn set_index_text( &self, path: RepoPath, @@ -1060,6 +1073,50 @@ impl GitRepository for RealGitRepository { }) } + fn diff_tree(&self, request: DiffTreeType) -> BoxFuture<'_, Result> { + let git_binary_path = self.any_git_binary_path.clone(); + let working_directory = match self.working_directory() { + Ok(working_directory) => working_directory, + Err(e) => return Task::ready(Err(e)).boxed(), + }; + + let mut args = vec![ + OsString::from("--no-optional-locks"), + OsString::from("diff-tree"), + OsString::from("-r"), + OsString::from("-z"), + OsString::from("--no-renames"), + ]; + match request { + DiffTreeType::MergeBase { base, head } => { + args.push("--merge-base".into()); + args.push(OsString::from(base.as_str())); + args.push(OsString::from(head.as_str())); + } + DiffTreeType::Since { base, head } => { + args.push(OsString::from(base.as_str())); + args.push(OsString::from(head.as_str())); + } + } + + self.executor + .spawn(async move { + let output = new_smol_command(&git_binary_path) + .current_dir(working_directory) + .args(args) + .output() + .await?; + if output.status.success() { + let stdout = String::from_utf8_lossy(&output.stdout); + stdout.parse() + } else { + let stderr = String::from_utf8_lossy(&output.stderr); + anyhow::bail!("git status failed: {stderr}"); + } + }) + .boxed() + } + fn stash_entries(&self) -> BoxFuture<'_, Result> { let git_binary_path = self.any_git_binary_path.clone(); let working_directory = self.working_directory(); @@ -1827,13 +1884,23 @@ impl GitRepository for RealGitRepository { return Ok(output); } - let output = git - .run(&["symbolic-ref", "refs/remotes/origin/HEAD"]) - .await?; + if let Ok(output) = git.run(&["symbolic-ref", "refs/remotes/origin/HEAD"]).await { + return Ok(output + .strip_prefix("refs/remotes/origin/") + .map(|s| SharedString::from(s.to_owned()))); + } + + if let Ok(default_branch) = git.run(&["config", "init.defaultBranch"]).await { + if git.run(&["rev-parse", &default_branch]).await.is_ok() { + return Ok(Some(default_branch.into())); + } + } + + if git.run(&["rev-parse", "master"]).await.is_ok() { + return Ok(Some("master".into())); + } - Ok(output - .strip_prefix("refs/remotes/origin/") - .map(|s| SharedString::from(s.to_owned()))) + Ok(None) }) .boxed() } diff --git a/crates/git/src/status.rs b/crates/git/src/status.rs index c3f28aa2040446822f81990804a63fcb5a53300c..f3401a0e93990c61df80e0e88e28292c4f2b28e2 100644 --- a/crates/git/src/status.rs +++ b/crates/git/src/status.rs @@ -1,5 +1,7 @@ -use crate::repository::RepoPath; -use anyhow::Result; +use crate::{Oid, repository::RepoPath}; +use anyhow::{Result, anyhow}; +use collections::HashMap; +use gpui::SharedString; use serde::{Deserialize, Serialize}; use std::{str::FromStr, sync::Arc}; use util::{ResultExt, rel_path::RelPath}; @@ -190,7 +192,11 @@ impl FileStatus { } pub fn is_deleted(self) -> bool { - matches!(self, FileStatus::Tracked(tracked) if matches!((tracked.index_status, tracked.worktree_status), (StatusCode::Deleted, _) | (_, StatusCode::Deleted))) + let FileStatus::Tracked(tracked) = self else { + return false; + }; + tracked.index_status == StatusCode::Deleted && tracked.worktree_status != StatusCode::Added + || tracked.worktree_status == StatusCode::Deleted } pub fn is_untracked(self) -> bool { @@ -486,3 +492,128 @@ impl Default for GitStatus { } } } + +pub enum DiffTreeType { + MergeBase { + base: SharedString, + head: SharedString, + }, + Since { + base: SharedString, + head: SharedString, + }, +} + +impl DiffTreeType { + pub fn base(&self) -> &SharedString { + match self { + DiffTreeType::MergeBase { base, .. } => base, + DiffTreeType::Since { base, .. } => base, + } + } + + pub fn head(&self) -> &SharedString { + match self { + DiffTreeType::MergeBase { head, .. } => head, + DiffTreeType::Since { head, .. } => head, + } + } +} + +#[derive(Debug, PartialEq)] +pub struct TreeDiff { + pub entries: HashMap, +} + +#[derive(Debug, Clone, PartialEq)] +pub enum TreeDiffStatus { + Added, + Modified { old: Oid }, + Deleted { old: Oid }, +} + +impl FromStr for TreeDiff { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + let mut fields = s.split('\0'); + let mut parsed = HashMap::default(); + while let Some((status, path)) = fields.next().zip(fields.next()) { + let path = RepoPath(RelPath::unix(path)?.into()); + + let mut fields = status.split(" ").skip(2); + let old_sha = fields + .next() + .ok_or_else(|| anyhow!("expected to find old_sha"))? + .to_owned() + .parse()?; + let _new_sha = fields + .next() + .ok_or_else(|| anyhow!("expected to find new_sha"))?; + let status = fields + .next() + .and_then(|s| { + if s.len() == 1 { + s.as_bytes().first() + } else { + None + } + }) + .ok_or_else(|| anyhow!("expected to find status"))?; + + let result = match StatusCode::from_byte(*status)? { + StatusCode::Modified => TreeDiffStatus::Modified { old: old_sha }, + StatusCode::Added => TreeDiffStatus::Added, + StatusCode::Deleted => TreeDiffStatus::Deleted { old: old_sha }, + _status => continue, + }; + + parsed.insert(path, result); + } + + Ok(Self { entries: parsed }) + } +} + +#[cfg(test)] +mod tests { + + use crate::{ + repository::RepoPath, + status::{TreeDiff, TreeDiffStatus}, + }; + + #[test] + fn test_tree_diff_parsing() { + let input = ":000000 100644 0000000000000000000000000000000000000000 0062c311b8727c3a2e3cd7a41bc9904feacf8f98 A\x00.zed/settings.json\x00".to_owned() + + ":100644 000000 bb3e9ed2e97a8c02545bae243264d342c069afb3 0000000000000000000000000000000000000000 D\x00README.md\x00" + + ":100644 100644 42f097005a1f21eb2260fad02ec8c991282beee8 a437d85f63bb8c62bd78f83f40c506631fabf005 M\x00parallel.go\x00"; + + let output: TreeDiff = input.parse().unwrap(); + assert_eq!( + output, + TreeDiff { + entries: [ + ( + RepoPath::new(".zed/settings.json").unwrap(), + TreeDiffStatus::Added, + ), + ( + RepoPath::new("README.md").unwrap(), + TreeDiffStatus::Deleted { + old: "bb3e9ed2e97a8c02545bae243264d342c069afb3".parse().unwrap() + } + ), + ( + RepoPath::new("parallel.go").unwrap(), + TreeDiffStatus::Modified { + old: "42f097005a1f21eb2260fad02ec8c991282beee8".parse().unwrap(), + } + ), + ] + .into_iter() + .collect() + } + ) + } +} diff --git a/crates/git_hosting_providers/Cargo.toml b/crates/git_hosting_providers/Cargo.toml index dd189f64bf08dabafa18d1132bc36d36e6f5ab8e..2b3e8f235ff6e5f351c1875107443f51838c6da9 100644 --- a/crates/git_hosting_providers/Cargo.toml +++ b/crates/git_hosting_providers/Cargo.toml @@ -24,7 +24,6 @@ serde_json.workspace = true settings.workspace = true url.workspace = true util.workspace = true -workspace-hack.workspace = true [dev-dependencies] indoc.workspace = true diff --git a/crates/git_ui/Cargo.toml b/crates/git_ui/Cargo.toml index 6905b3eb890b5ac96d9ccd75a08dd88f0288787b..486e43fea94f53e2ad9fd67d88cfe2279afb353c 100644 --- a/crates/git_ui/Cargo.toml +++ b/crates/git_ui/Cargo.toml @@ -44,7 +44,6 @@ multi_buffer.workspace = true notifications.workspace = true panel.workspace = true picker.workspace = true -postage.workspace = true project.workspace = true schemars.workspace = true serde.workspace = true @@ -58,7 +57,6 @@ time_format.workspace = true ui.workspace = true util.workspace = true watch.workspace = true -workspace-hack.workspace = true workspace.workspace = true zed_actions.workspace = true zeroize.workspace = true diff --git a/crates/git_ui/src/blame_ui.rs b/crates/git_ui/src/blame_ui.rs index b1dca288763c5cf6251c7ce06c38a057bee86a2e..6059bc9e83b63e710815891165fe6e530a0efa1a 100644 --- a/crates/git_ui/src/blame_ui.rs +++ b/crates/git_ui/src/blame_ui.rs @@ -8,8 +8,8 @@ use git::{ repository::CommitSummary, }; use gpui::{ - ClipboardItem, Entity, Hsla, MouseButton, ScrollHandle, Subscription, TextStyle, WeakEntity, - prelude::*, + ClipboardItem, Entity, Hsla, MouseButton, ScrollHandle, Subscription, TextStyle, + TextStyleRefinement, UnderlineStyle, WeakEntity, prelude::*, }; use markdown::{Markdown, MarkdownElement}; use project::{git_store::Repository, project_settings::ProjectSettings}; @@ -17,7 +17,7 @@ use settings::Settings as _; use theme::ThemeSettings; use time::OffsetDateTime; use time_format::format_local_timestamp; -use ui::{ContextMenu, Divider, IconButtonShape, prelude::*, tooltip_container}; +use ui::{ContextMenu, Divider, prelude::*, tooltip_container}; use workspace::Workspace; const GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED: usize = 20; @@ -61,16 +61,15 @@ impl BlameRenderer for GitBlameRenderer { .mr_2() .child( h_flex() + .id(("blame", ix)) .w_full() + .gap_2() .justify_between() .font_family(style.font().family) .line_height(style.line_height) - .id(("blame", ix)) .text_color(cx.theme().status().hint) - .gap_2() .child( h_flex() - .items_center() .gap_2() .child(div().text_color(sha_color).child(short_commit_id)) .children(avatar) @@ -99,25 +98,10 @@ impl BlameRenderer for GitBlameRenderer { let workspace = workspace.clone(); move |_, window, cx| { CommitView::open( - CommitSummary { - sha: blame_entry.sha.to_string().into(), - subject: blame_entry - .summary - .clone() - .unwrap_or_default() - .into(), - commit_timestamp: blame_entry - .committer_time - .unwrap_or_default(), - author_name: blame_entry - .committer_name - .clone() - .unwrap_or_default() - .into(), - has_parent: true, - }, + blame_entry.sha.to_string(), repository.downgrade(), workspace.clone(), + None, window, cx, ) @@ -209,11 +193,21 @@ impl BlameRenderer for GitBlameRenderer { OffsetDateTime::now_utc(), time_format::TimestampFormat::MediumAbsolute, ); + let link_color = cx.theme().colors().text_accent; let markdown_style = { let mut style = hover_markdown_style(window, cx); if let Some(code_block) = &style.code_block.text { style.base_text_style.refine(code_block); } + style.link.refine(&TextStyleRefinement { + color: Some(link_color), + underline: Some(UnderlineStyle { + color: Some(link_color.opacity(0.4)), + thickness: px(1.0), + ..Default::default() + }), + ..Default::default() + }); style }; @@ -250,20 +244,21 @@ impl BlameRenderer for GitBlameRenderer { }; Some( - tooltip_container(cx, |d, cx| { - d.occlude() + tooltip_container(cx, |this, cx| { + this.occlude() .on_mouse_move(|_, _, cx| cx.stop_propagation()) .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation()) .child( v_flex() .w(gpui::rems(30.)) - .gap_4() .child( h_flex() - .pb_1p5() - .gap_x_2() + .pb_1() + .gap_2() .overflow_x_hidden() .flex_wrap() + .border_b_1() + .border_color(cx.theme().colors().border_variant) .children(avatar) .child(author) .when(!author_email.is_empty(), |this| { @@ -272,30 +267,29 @@ impl BlameRenderer for GitBlameRenderer { .text_color(cx.theme().colors().text_muted) .child(author_email.to_owned()), ) - }) - .border_b_1() - .border_color(cx.theme().colors().border_variant), + }), ) .child( div() .id("inline-blame-commit-message") - .child(message) + .track_scroll(&scroll_handle) + .py_1p5() .max_h(message_max_height) .overflow_y_scroll() - .track_scroll(&scroll_handle), + .child(message), ) .child( h_flex() .text_color(cx.theme().colors().text_muted) .w_full() .justify_between() - .pt_1p5() + .pt_1() .border_t_1() .border_color(cx.theme().colors().border_variant) .child(absolute_timestamp) .child( h_flex() - .gap_1p5() + .gap_1() .when_some(pull_request, |this, pr| { this.child( Button::new( @@ -306,29 +300,30 @@ impl BlameRenderer for GitBlameRenderer { .icon(IconName::PullRequest) .icon_color(Color::Muted) .icon_position(IconPosition::Start) - .style(ButtonStyle::Subtle) + .icon_size(IconSize::Small) .on_click(move |_, _, cx| { cx.stop_propagation(); cx.open_url(pr.url.as_str()) }), ) + .child(Divider::vertical()) }) - .child(Divider::vertical()) .child( Button::new( "commit-sha-button", short_commit_id.clone(), ) - .style(ButtonStyle::Subtle) .color(Color::Muted) .icon(IconName::FileGit) .icon_color(Color::Muted) .icon_position(IconPosition::Start) + .icon_size(IconSize::Small) .on_click(move |_, window, cx| { CommitView::open( - commit_summary.clone(), + commit_summary.sha.clone().into(), repository.downgrade(), workspace.clone(), + None, window, cx, ); @@ -337,7 +332,6 @@ impl BlameRenderer for GitBlameRenderer { ) .child( IconButton::new("copy-sha-button", IconName::Copy) - .shape(IconButtonShape::Square) .icon_size(IconSize::Small) .icon_color(Color::Muted) .on_click(move |_, _, cx| { @@ -366,15 +360,10 @@ impl BlameRenderer for GitBlameRenderer { cx: &mut App, ) { CommitView::open( - CommitSummary { - sha: blame_entry.sha.to_string().into(), - subject: blame_entry.summary.clone().unwrap_or_default().into(), - commit_timestamp: blame_entry.committer_time.unwrap_or_default(), - author_name: blame_entry.committer_name.unwrap_or_default().into(), - has_parent: true, - }, + blame_entry.sha.to_string(), repository.downgrade(), workspace, + None, window, cx, ) diff --git a/crates/git_ui/src/branch_picker.rs b/crates/git_ui/src/branch_picker.rs index b9a8dfea9ea167bf7ee807ee2b459444f4fa4f4d..662e1cc1d712757eb2f31b11a0d6340576c29317 100644 --- a/crates/git_ui/src/branch_picker.rs +++ b/crates/git_ui/src/branch_picker.rs @@ -137,13 +137,13 @@ impl BranchList { }) .await; - this.update_in(cx, |this, window, cx| { + let _ = this.update_in(cx, |this, window, cx| { this.picker.update(cx, |picker, cx| { picker.delegate.default_branch = default_branch; picker.delegate.all_branches = Some(all_branches); picker.refresh(window, cx); }) - })?; + }); anyhow::Ok(()) }) @@ -410,37 +410,20 @@ impl PickerDelegate for BranchListDelegate { return; } - cx.spawn_in(window, { - let branch = entry.branch.clone(); - async move |picker, cx| { - let branch_change_task = picker.update(cx, |this, cx| { - let repo = this - .delegate - .repo - .as_ref() - .context("No active repository")? - .clone(); - - let mut cx = cx.to_async(); - - anyhow::Ok(async move { - repo.update(&mut cx, |repo, _| { - repo.change_branch(branch.name().to_string()) - })? - .await? - }) - })??; - - branch_change_task.await?; + let Some(repo) = self.repo.clone() else { + return; + }; - picker.update(cx, |_, cx| { - cx.emit(DismissEvent); + let branch = entry.branch.clone(); + cx.spawn(async move |_, cx| { + repo.update(cx, |repo, _| repo.change_branch(branch.name().to_string()))? + .await??; - anyhow::Ok(()) - }) - } + anyhow::Ok(()) }) .detach_and_prompt_err("Failed to change branch", window, cx, |_, _, _| None); + + cx.emit(DismissEvent); } fn dismissed(&mut self, _: &mut Window, cx: &mut Context>) { @@ -483,11 +466,10 @@ impl PickerDelegate for BranchListDelegate { this.delegate.set_selected_index(ix, window, cx); this.delegate.confirm(true, window, cx); })) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { Tooltip::for_action( format!("Create branch based off default: {default_branch}"), &menu::SecondaryConfirm, - window, cx, ) }), @@ -511,8 +493,12 @@ impl PickerDelegate for BranchListDelegate { ) .into_any_element() } else { - HighlightedLabel::new(entry.branch.name().to_owned(), entry.positions.clone()) - .truncate() + h_flex() + .max_w_48() + .child( + HighlightedLabel::new(entry.branch.name().to_owned(), entry.positions.clone()) + .truncate(), + ) .into_any_element() }; diff --git a/crates/git_ui/src/commit_modal.rs b/crates/git_ui/src/commit_modal.rs index 6c93e03e4bf4009a622206195c12b49bbedf4038..45b1563dca0ceed5ed2ac488026fe94084050780 100644 --- a/crates/git_ui/src/commit_modal.rs +++ b/crates/git_ui/src/commit_modal.rs @@ -327,7 +327,7 @@ impl CommitModal { .anchor(Corner::TopRight) } - pub fn render_footer(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + pub fn render_footer(&self, _: &mut Window, cx: &mut Context) -> impl IntoElement { let ( can_commit, tooltip, @@ -388,7 +388,7 @@ impl CommitModal { }); let focus_handle = self.focus_handle(cx); - let close_kb_hint = ui::KeyBinding::for_action(&menu::Cancel, window, cx).map(|close_kb| { + let close_kb_hint = ui::KeyBinding::for_action(&menu::Cancel, cx).map(|close_kb| { KeybindingHint::new(close_kb, cx.theme().colors().editor_background).suffix("Cancel") }); @@ -423,7 +423,7 @@ impl CommitModal { .flex_none() .px_1() .gap_4() - .children(close_kb_hint) + .child(close_kb_hint) .child(SplitButton::new( ui::ButtonLike::new_rounded_left(ElementId::Name( format!("split-button-left-{}", commit_label).into(), @@ -452,7 +452,7 @@ impl CommitModal { .disabled(!can_commit) .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { if can_commit { Tooltip::with_meta_in( tooltip, @@ -467,7 +467,6 @@ impl CommitModal { if is_signoff_enabled { " --signoff" } else { "" } ), &focus_handle.clone(), - window, cx, ) } else { diff --git a/crates/git_ui/src/commit_tooltip.rs b/crates/git_ui/src/commit_tooltip.rs index 84ecc0b3a9c0c708ec81a0af1234506ec0208cd0..97224840debcc4cfd8dcc74a56d448ef0d2826c1 100644 --- a/crates/git_ui/src/commit_tooltip.rs +++ b/crates/git_ui/src/commit_tooltip.rs @@ -318,9 +318,10 @@ impl Render for CommitTooltip { .on_click( move |_, window, cx| { CommitView::open( - commit_summary.clone(), + commit_summary.sha.to_string(), repo.downgrade(), workspace.clone(), + None, window, cx, ); diff --git a/crates/git_ui/src/commit_view.rs b/crates/git_ui/src/commit_view.rs index 201a699e2f0e8527ed62babdc941febcf9426a2d..9738e13984a0b032b09a218990f3466052e9fa61 100644 --- a/crates/git_ui/src/commit_view.rs +++ b/crates/git_ui/src/commit_view.rs @@ -1,14 +1,15 @@ use anyhow::{Context as _, Result}; use buffer_diff::{BufferDiff, BufferDiffSnapshot}; use editor::{Editor, EditorEvent, MultiBuffer, SelectionEffects, multibuffer_context_lines}; -use git::repository::{CommitDetails, CommitDiff, CommitSummary, RepoPath}; +use git::repository::{CommitDetails, CommitDiff, RepoPath}; use gpui::{ - AnyElement, AnyView, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, - FocusHandle, Focusable, IntoElement, Render, WeakEntity, Window, + Action, AnyElement, AnyView, App, AppContext as _, AsyncApp, AsyncWindowContext, Context, + Entity, EventEmitter, FocusHandle, Focusable, IntoElement, PromptLevel, Render, WeakEntity, + Window, actions, }; use language::{ Anchor, Buffer, Capability, DiskState, File, LanguageRegistry, LineEnding, OffsetRangeExt as _, - Point, Rope, TextBuffer, + Point, ReplicaId, Rope, TextBuffer, }; use multi_buffer::PathKey; use project::{Project, WorktreeId, git_store::Repository}; @@ -18,17 +19,42 @@ use std::{ path::PathBuf, sync::Arc, }; -use ui::{Color, Icon, IconName, Label, LabelCommon as _, SharedString}; +use ui::{ + Button, Color, Icon, IconName, Label, LabelCommon as _, SharedString, Tooltip, prelude::*, +}; use util::{ResultExt, paths::PathStyle, rel_path::RelPath, truncate_and_trailoff}; use workspace::{ - Item, ItemHandle as _, ItemNavHistory, ToolbarItemLocation, Workspace, + Item, ItemHandle, ItemNavHistory, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, + Workspace, item::{BreadcrumbText, ItemEvent, TabContentParams}, + notifications::NotifyTaskExt, + pane::SaveIntent, searchable::SearchableItemHandle, }; +use crate::git_panel::GitPanel; + +actions!(git, [ApplyCurrentStash, PopCurrentStash, DropCurrentStash,]); + +pub fn init(cx: &mut App) { + cx.observe_new(|workspace: &mut Workspace, _window, _cx| { + register_workspace_action(workspace, |toolbar, _: &ApplyCurrentStash, window, cx| { + toolbar.apply_stash(window, cx); + }); + register_workspace_action(workspace, |toolbar, _: &DropCurrentStash, window, cx| { + toolbar.remove_stash(window, cx); + }); + register_workspace_action(workspace, |toolbar, _: &PopCurrentStash, window, cx| { + toolbar.pop_stash(window, cx); + }); + }) + .detach(); +} + pub struct CommitView { commit: CommitDetails, editor: Entity, + stash: Option, multibuffer: Entity, } @@ -48,17 +74,18 @@ const FILE_NAMESPACE_SORT_PREFIX: u64 = 1; impl CommitView { pub fn open( - commit: CommitSummary, + commit_sha: String, repo: WeakEntity, workspace: WeakEntity, + stash: Option, window: &mut Window, cx: &mut App, ) { let commit_diff = repo - .update(cx, |repo, _| repo.load_commit_diff(commit.sha.to_string())) + .update(cx, |repo, _| repo.load_commit_diff(commit_sha.clone())) .ok(); let commit_details = repo - .update(cx, |repo, _| repo.show(commit.sha.to_string())) + .update(cx, |repo, _| repo.show(commit_sha.clone())) .ok(); window @@ -77,6 +104,7 @@ impl CommitView { commit_diff, repo, project.clone(), + stash, window, cx, ) @@ -87,7 +115,7 @@ impl CommitView { let ix = pane.items().position(|item| { let commit_view = item.downcast::(); commit_view - .is_some_and(|view| view.read(cx).commit.sha == commit.sha) + .is_some_and(|view| view.read(cx).commit.sha == commit_sha) }); if let Some(ix) = ix { pane.activate_item(ix, true, true, window, cx); @@ -106,6 +134,7 @@ impl CommitView { commit_diff: CommitDiff, repository: Entity, project: Entity, + stash: Option, window: &mut Window, cx: &mut Context, ) -> Self { @@ -127,18 +156,21 @@ impl CommitView { let mut metadata_buffer_id = None; if let Some(worktree_id) = first_worktree_id { + let title = if let Some(stash) = stash { + format!("stash@{{{}}}", stash) + } else { + format!("commit {}", commit.sha) + }; let file = Arc::new(CommitMetadataFile { - title: RelPath::unix(&format!("commit {}", commit.sha)) - .unwrap() - .into(), + title: RelPath::unix(&title).unwrap().into(), worktree_id, }); let buffer = cx.new(|cx| { let buffer = TextBuffer::new_normalized( - 0, + ReplicaId::LOCAL, cx.entity_id().as_non_zero_u64().into(), LineEnding::default(), - format_commit(&commit).into(), + format_commit(&commit, stash.is_some()).into(), ); metadata_buffer_id = Some(buffer.remote_id()); Buffer::build(buffer, Some(file.clone()), Capability::ReadWrite) @@ -211,6 +243,7 @@ impl CommitView { commit, editor, multibuffer, + stash, } } } @@ -316,7 +349,7 @@ async fn build_buffer( }; let buffer = cx.new(|cx| { let buffer = TextBuffer::new_normalized( - 0, + ReplicaId::LOCAL, cx.entity_id().as_non_zero_u64().into(), line_ending, text, @@ -369,9 +402,13 @@ async fn build_buffer_diff( }) } -fn format_commit(commit: &CommitDetails) -> String { +fn format_commit(commit: &CommitDetails, is_stash: bool) -> String { let mut result = String::new(); - writeln!(&mut result, "commit {}", commit.sha).unwrap(); + if is_stash { + writeln!(&mut result, "stash commit {}", commit.sha).unwrap(); + } else { + writeln!(&mut result, "commit {}", commit.sha).unwrap(); + } writeln!( &mut result, "Author: {} <{}>", @@ -538,13 +575,296 @@ impl Item for CommitView { editor, multibuffer, commit: self.commit.clone(), + stash: self.stash, } })) } } impl Render for CommitView { - fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { - self.editor.clone() + fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { + let is_stash = self.stash.is_some(); + div() + .key_context(if is_stash { "StashDiff" } else { "CommitDiff" }) + .bg(cx.theme().colors().editor_background) + .flex() + .items_center() + .justify_center() + .size_full() + .child(self.editor.clone()) + } +} + +pub struct CommitViewToolbar { + commit_view: Option>, + workspace: WeakEntity, +} + +impl CommitViewToolbar { + pub fn new(workspace: &Workspace, _: &mut Context) -> Self { + Self { + commit_view: None, + workspace: workspace.weak_handle(), + } + } + + fn commit_view(&self, _: &App) -> Option> { + self.commit_view.as_ref()?.upgrade() + } + + async fn close_commit_view( + commit_view: Entity, + workspace: WeakEntity, + cx: &mut AsyncWindowContext, + ) -> anyhow::Result<()> { + workspace + .update_in(cx, |workspace, window, cx| { + let active_pane = workspace.active_pane(); + let commit_view_id = commit_view.entity_id(); + active_pane.update(cx, |pane, cx| { + pane.close_item_by_id(commit_view_id, SaveIntent::Skip, window, cx) + }) + })? + .await?; + anyhow::Ok(()) + } + + fn apply_stash(&mut self, window: &mut Window, cx: &mut Context) { + self.stash_action( + "Apply", + window, + cx, + async move |repository, sha, stash, commit_view, workspace, cx| { + let result = repository.update(cx, |repo, cx| { + if !stash_matches_index(&sha, stash, repo) { + return Err(anyhow::anyhow!("Stash has changed, not applying")); + } + Ok(repo.stash_apply(Some(stash), cx)) + })?; + + match result { + Ok(task) => task.await?, + Err(err) => { + Self::close_commit_view(commit_view, workspace, cx).await?; + return Err(err); + } + }; + Self::close_commit_view(commit_view, workspace, cx).await?; + anyhow::Ok(()) + }, + ); + } + + fn pop_stash(&mut self, window: &mut Window, cx: &mut Context) { + self.stash_action( + "Pop", + window, + cx, + async move |repository, sha, stash, commit_view, workspace, cx| { + let result = repository.update(cx, |repo, cx| { + if !stash_matches_index(&sha, stash, repo) { + return Err(anyhow::anyhow!("Stash has changed, pop aborted")); + } + Ok(repo.stash_pop(Some(stash), cx)) + })?; + + match result { + Ok(task) => task.await?, + Err(err) => { + Self::close_commit_view(commit_view, workspace, cx).await?; + return Err(err); + } + }; + Self::close_commit_view(commit_view, workspace, cx).await?; + anyhow::Ok(()) + }, + ); + } + + fn remove_stash(&mut self, window: &mut Window, cx: &mut Context) { + self.stash_action( + "Drop", + window, + cx, + async move |repository, sha, stash, commit_view, workspace, cx| { + let result = repository.update(cx, |repo, cx| { + if !stash_matches_index(&sha, stash, repo) { + return Err(anyhow::anyhow!("Stash has changed, drop aborted")); + } + Ok(repo.stash_drop(Some(stash), cx)) + })?; + + match result { + Ok(task) => task.await??, + Err(err) => { + Self::close_commit_view(commit_view, workspace, cx).await?; + return Err(err); + } + }; + Self::close_commit_view(commit_view, workspace, cx).await?; + anyhow::Ok(()) + }, + ); + } + + fn stash_action( + &mut self, + str_action: &str, + window: &mut Window, + cx: &mut Context, + callback: AsyncFn, + ) where + AsyncFn: AsyncFnOnce( + Entity, + &SharedString, + usize, + Entity, + WeakEntity, + &mut AsyncWindowContext, + ) -> anyhow::Result<()> + + 'static, + { + let Some(commit_view) = self.commit_view(cx) else { + return; + }; + let Some(stash) = commit_view.read(cx).stash else { + return; + }; + let sha = commit_view.read(cx).commit.sha.clone(); + let answer = window.prompt( + PromptLevel::Info, + &format!("{} stash@{{{}}}?", str_action, stash), + None, + &[str_action, "Cancel"], + cx, + ); + + let workspace = self.workspace.clone(); + cx.spawn_in(window, async move |_, cx| { + if answer.await != Ok(0) { + return anyhow::Ok(()); + } + let repo = workspace.update(cx, |workspace, cx| { + workspace + .panel::(cx) + .and_then(|p| p.read(cx).active_repository.clone()) + })?; + + let Some(repo) = repo else { + return Ok(()); + }; + callback(repo, &sha, stash, commit_view, workspace, cx).await?; + anyhow::Ok(()) + }) + .detach_and_notify_err(window, cx); + } +} + +impl EventEmitter for CommitViewToolbar {} + +impl ToolbarItemView for CommitViewToolbar { + fn set_active_pane_item( + &mut self, + active_pane_item: Option<&dyn ItemHandle>, + _: &mut Window, + cx: &mut Context, + ) -> ToolbarItemLocation { + if let Some(entity) = active_pane_item.and_then(|i| i.act_as::(cx)) + && entity.read(cx).stash.is_some() + { + self.commit_view = Some(entity.downgrade()); + return ToolbarItemLocation::PrimaryRight; + } + ToolbarItemLocation::Hidden + } + + fn pane_focus_update( + &mut self, + _pane_focused: bool, + _window: &mut Window, + _cx: &mut Context, + ) { + } +} + +impl Render for CommitViewToolbar { + fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { + let Some(commit_view) = self.commit_view(cx) else { + return div(); + }; + + let is_stash = commit_view.read(cx).stash.is_some(); + if !is_stash { + return div(); + } + + let focus_handle = commit_view.focus_handle(cx); + + h_group_xl().my_neg_1().py_1().items_center().child( + h_group_sm() + .child( + Button::new("apply-stash", "Apply") + .tooltip(Tooltip::for_action_title_in( + "Apply current stash", + &ApplyCurrentStash, + &focus_handle, + )) + .on_click(cx.listener(|this, _, window, cx| this.apply_stash(window, cx))), + ) + .child( + Button::new("pop-stash", "Pop") + .tooltip(Tooltip::for_action_title_in( + "Pop current stash", + &PopCurrentStash, + &focus_handle, + )) + .on_click(cx.listener(|this, _, window, cx| this.pop_stash(window, cx))), + ) + .child( + Button::new("remove-stash", "Remove") + .icon(IconName::Trash) + .tooltip(Tooltip::for_action_title_in( + "Remove current stash", + &DropCurrentStash, + &focus_handle, + )) + .on_click(cx.listener(|this, _, window, cx| this.remove_stash(window, cx))), + ), + ) + } +} + +fn register_workspace_action( + workspace: &mut Workspace, + callback: fn(&mut CommitViewToolbar, &A, &mut Window, &mut Context), +) { + workspace.register_action(move |workspace, action: &A, window, cx| { + if workspace.has_active_modal(window, cx) { + cx.propagate(); + return; + } + + workspace.active_pane().update(cx, |pane, cx| { + pane.toolbar().update(cx, move |workspace, cx| { + if let Some(toolbar) = workspace.item_of_type::() { + toolbar.update(cx, move |toolbar, cx| { + callback(toolbar, action, window, cx); + cx.notify(); + }); + } + }); + }) + }); +} + +fn stash_matches_index(sha: &str, index: usize, repo: &mut Repository) -> bool { + match repo + .cached_stash() + .entries + .iter() + .find(|entry| entry.index == index) + { + Some(entry) => entry.oid.to_string() == sha, + None => false, } } diff --git a/crates/git_ui/src/conflict_view.rs b/crates/git_ui/src/conflict_view.rs index ee1b82920d7621f6e5b1d4ab9a9b44e151fbf82a..91cc3ce76b3f10aa310185b566b6c6086580b69c 100644 --- a/crates/git_ui/src/conflict_view.rs +++ b/crates/git_ui/src/conflict_view.rs @@ -234,11 +234,7 @@ fn conflicts_updated( continue; }; let excerpt_id = *excerpt_id; - let Some(range) = snapshot - .anchor_in_excerpt(excerpt_id, conflict_range.start) - .zip(snapshot.anchor_in_excerpt(excerpt_id, conflict_range.end)) - .map(|(start, end)| start..end) - else { + let Some(range) = snapshot.anchor_range_in_excerpt(excerpt_id, conflict_range) else { continue; }; removed_highlighted_ranges.push(range.clone()); @@ -321,27 +317,12 @@ fn update_conflict_highlighting( buffer: &editor::MultiBufferSnapshot, excerpt_id: editor::ExcerptId, cx: &mut Context, -) { +) -> Option<()> { log::debug!("update conflict highlighting for {conflict:?}"); - let outer_start = buffer - .anchor_in_excerpt(excerpt_id, conflict.range.start) - .unwrap(); - let outer_end = buffer - .anchor_in_excerpt(excerpt_id, conflict.range.end) - .unwrap(); - let our_start = buffer - .anchor_in_excerpt(excerpt_id, conflict.ours.start) - .unwrap(); - let our_end = buffer - .anchor_in_excerpt(excerpt_id, conflict.ours.end) - .unwrap(); - let their_start = buffer - .anchor_in_excerpt(excerpt_id, conflict.theirs.start) - .unwrap(); - let their_end = buffer - .anchor_in_excerpt(excerpt_id, conflict.theirs.end) - .unwrap(); + let outer = buffer.anchor_range_in_excerpt(excerpt_id, conflict.range.clone())?; + let ours = buffer.anchor_range_in_excerpt(excerpt_id, conflict.ours.clone())?; + let theirs = buffer.anchor_range_in_excerpt(excerpt_id, conflict.theirs.clone())?; let ours_background = cx.theme().colors().version_control_conflict_marker_ours; let theirs_background = cx.theme().colors().version_control_conflict_marker_theirs; @@ -352,32 +333,29 @@ fn update_conflict_highlighting( }; editor.insert_gutter_highlight::( - outer_start..their_end, + outer.start..theirs.end, |cx| cx.theme().colors().editor_background, cx, ); // Prevent diff hunk highlighting within the entire conflict region. - editor.highlight_rows::(outer_start..outer_end, theirs_background, options, cx); - editor.highlight_rows::(our_start..our_end, ours_background, options, cx); + editor.highlight_rows::(outer.clone(), theirs_background, options, cx); + editor.highlight_rows::(ours.clone(), ours_background, options, cx); editor.highlight_rows::( - outer_start..our_start, + outer.start..ours.start, ours_background, options, cx, ); - editor.highlight_rows::( - their_start..their_end, - theirs_background, - options, - cx, - ); + editor.highlight_rows::(theirs.clone(), theirs_background, options, cx); editor.highlight_rows::( - their_end..outer_end, + theirs.end..outer.end, theirs_background, options, cx, ); + + Some(()) } fn render_conflict_buttons( @@ -488,20 +466,16 @@ pub(crate) fn resolve_conflict( }) .ok()?; let &(_, block_id) = &state.block_ids[ix]; - let start = snapshot - .anchor_in_excerpt(excerpt_id, resolved_conflict.range.start) - .unwrap(); - let end = snapshot - .anchor_in_excerpt(excerpt_id, resolved_conflict.range.end) - .unwrap(); - - editor.remove_gutter_highlights::(vec![start..end], cx); - - editor.remove_highlighted_rows::(vec![start..end], cx); - editor.remove_highlighted_rows::(vec![start..end], cx); - editor.remove_highlighted_rows::(vec![start..end], cx); - editor.remove_highlighted_rows::(vec![start..end], cx); - editor.remove_highlighted_rows::(vec![start..end], cx); + let range = + snapshot.anchor_range_in_excerpt(excerpt_id, resolved_conflict.range)?; + + editor.remove_gutter_highlights::(vec![range.clone()], cx); + + editor.remove_highlighted_rows::(vec![range.clone()], cx); + editor.remove_highlighted_rows::(vec![range.clone()], cx); + editor.remove_highlighted_rows::(vec![range.clone()], cx); + editor.remove_highlighted_rows::(vec![range.clone()], cx); + editor.remove_highlighted_rows::(vec![range], cx); editor.remove_blocks(HashSet::from_iter([block_id]), None, cx); Some((workspace, project, multibuffer, buffer)) }) diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index ce6ddf43f6dbf1e4df32c45f7c96f7c08447df06..9ff8602a18fd1a7eec5804deecee5c21921c6eee 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -425,13 +425,20 @@ impl GitPanel { } GitStoreEvent::RepositoryUpdated( _, - RepositoryEvent::Updated { full_scan, .. }, + RepositoryEvent::StatusesChanged { full_scan: true } + | RepositoryEvent::BranchChanged + | RepositoryEvent::MergeHeadsChanged, true, ) => { - this.schedule_update(*full_scan, window, cx); + this.schedule_update(true, window, cx); } - - GitStoreEvent::RepositoryAdded(_) | GitStoreEvent::RepositoryRemoved(_) => { + GitStoreEvent::RepositoryUpdated( + _, + RepositoryEvent::StatusesChanged { full_scan: false }, + true, + ) + | GitStoreEvent::RepositoryAdded + | GitStoreEvent::RepositoryRemoved(_) => { this.schedule_update(false, window, cx); } GitStoreEvent::IndexWriteError(error) => { @@ -3091,13 +3098,12 @@ impl GitPanel { IconButton::new("generate-commit-message", IconName::AiEdit) .shape(ui::IconButtonShape::Square) .icon_color(Color::Muted) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { if can_commit { Tooltip::for_action_in( "Generate Commit Message", &git::GenerateCommitMessage, &editor_focus_handle, - window, cx, ) } else { @@ -3459,12 +3465,11 @@ impl GitPanel { panel_icon_button("expand-commit-editor", IconName::Maximize) .icon_size(IconSize::Small) .size(ui::ButtonSize::Default) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { Tooltip::for_action_in( "Open Commit Modal", &git::ExpandCommitEditor, &expand_tooltip_focus_handle, - window, cx, ) }) @@ -3526,7 +3531,7 @@ impl GitPanel { .disabled(!can_commit || self.modal_open) .tooltip({ let handle = commit_tooltip_focus_handle.clone(); - move |window, cx| { + move |_window, cx| { if can_commit { Tooltip::with_meta_in( tooltip, @@ -3537,7 +3542,6 @@ impl GitPanel { if signoff { " --signoff" } else { "" } ), &handle.clone(), - window, cx, ) } else { @@ -3611,9 +3615,10 @@ impl GitPanel { let repo = active_repository.downgrade(); move |_, window, cx| { CommitView::open( - commit.clone(), + commit.sha.to_string(), repo.clone(), workspace.clone(), + None, window, cx, ); @@ -3639,7 +3644,7 @@ impl GitPanel { panel_icon_button("undo", IconName::Undo) .icon_size(IconSize::XSmall) .icon_color(Color::Muted) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { Tooltip::with_meta( "Uncommit", Some(&git::Uncommit), @@ -3648,7 +3653,6 @@ impl GitPanel { } else { "git reset HEAD^" }, - window, cx, ) }) @@ -4119,13 +4123,13 @@ impl GitPanel { .ok(); } }) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { let is_staged = entry_staging.is_fully_staged(); let action = if is_staged { "Unstage" } else { "Stage" }; let tooltip_name = action.to_string(); - Tooltip::for_action(tooltip_name, &ToggleStaged, window, cx) + Tooltip::for_action(tooltip_name, &ToggleStaged, cx) }), ), ) @@ -4419,6 +4423,10 @@ impl Panel for GitPanel { "GitPanel" } + fn panel_key() -> &'static str { + GIT_PANEL_KEY + } + fn position(&self, _: &Window, cx: &App) -> DockPosition { GitPanelSettings::get_global(cx).dock } diff --git a/crates/git_ui/src/git_panel_settings.rs b/crates/git_ui/src/git_panel_settings.rs index f98493d1d9ef4bcf9b53393671091c8b72dcd998..83259b228b59c5bb063473cc4a04710a0520808c 100644 --- a/crates/git_ui/src/git_panel_settings.rs +++ b/crates/git_ui/src/git_panel_settings.rs @@ -2,7 +2,7 @@ use editor::EditorSettings; use gpui::Pixels; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsContent, StatusStyle}; +use settings::{Settings, StatusStyle}; use ui::{ px, scrollbars::{ScrollbarVisibility, ShowScrollbar}, @@ -58,16 +58,4 @@ impl Settings for GitPanelSettings { collapse_untracked_diff: git_panel.collapse_untracked_diff.unwrap(), } } - - fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut SettingsContent) { - if let Some(git_enabled) = vscode.read_bool("git.enabled") { - current.git_panel.get_or_insert_default().button = Some(git_enabled); - } - if let Some(default_branch) = vscode.read_string("git.defaultBranchName") { - current - .git_panel - .get_or_insert_default() - .fallback_branch_name = Some(default_branch.to_string()); - } - } } diff --git a/crates/git_ui/src/git_ui.rs b/crates/git_ui/src/git_ui.rs index da2e2ca032aa005ad619eabf094ae6981975b050..919cdf154d438e8ee5b38422032aa150edc5dd34 100644 --- a/crates/git_ui/src/git_ui.rs +++ b/crates/git_ui/src/git_ui.rs @@ -34,7 +34,7 @@ mod askpass_modal; pub mod branch_picker; mod commit_modal; pub mod commit_tooltip; -mod commit_view; +pub mod commit_view; mod conflict_view; pub mod file_diff_view; pub mod git_panel; @@ -59,6 +59,7 @@ pub fn init(cx: &mut App) { GitPanelSettings::register(cx); editor::set_blame_renderer(blame_ui::GitBlameRenderer, cx); + commit_view::init(cx); cx.observe_new(|editor: &mut Editor, _, cx| { conflict_view::register_editor(editor, editor.buffer().clone(), cx); @@ -434,13 +435,12 @@ mod remote_button { move |_, window, cx| { window.dispatch_action(Box::new(git::Fetch), cx); }, - move |window, cx| { + move |_window, cx| { git_action_tooltip( "Fetch updates from remote", &git::Fetch, "git fetch", keybinding_target.clone(), - window, cx, ) }, @@ -462,13 +462,12 @@ mod remote_button { move |_, window, cx| { window.dispatch_action(Box::new(git::Push), cx); }, - move |window, cx| { + move |_window, cx| { git_action_tooltip( "Push committed changes to remote", &git::Push, "git push", keybinding_target.clone(), - window, cx, ) }, @@ -491,13 +490,12 @@ mod remote_button { move |_, window, cx| { window.dispatch_action(Box::new(git::Pull), cx); }, - move |window, cx| { + move |_window, cx| { git_action_tooltip( "Pull", &git::Pull, "git pull", keybinding_target.clone(), - window, cx, ) }, @@ -518,13 +516,12 @@ mod remote_button { move |_, window, cx| { window.dispatch_action(Box::new(git::Push), cx); }, - move |window, cx| { + move |_window, cx| { git_action_tooltip( "Publish branch to remote", &git::Push, "git push --set-upstream", keybinding_target.clone(), - window, cx, ) }, @@ -545,13 +542,12 @@ mod remote_button { move |_, window, cx| { window.dispatch_action(Box::new(git::Push), cx); }, - move |window, cx| { + move |_window, cx| { git_action_tooltip( "Re-publish branch to remote", &git::Push, "git push --set-upstream", keybinding_target.clone(), - window, cx, ) }, @@ -563,16 +559,15 @@ mod remote_button { action: &dyn Action, command: impl Into, focus_handle: Option, - window: &mut Window, cx: &mut App, ) -> AnyView { let label = label.into(); let command = command.into(); if let Some(handle) = focus_handle { - Tooltip::with_meta_in(label, Some(action), command, &handle, window, cx) + Tooltip::with_meta_in(label, Some(action), command, &handle, cx) } else { - Tooltip::with_meta(label, Some(action), command, window, cx) + Tooltip::with_meta(label, Some(action), command, cx) } } diff --git a/crates/git_ui/src/picker_prompt.rs b/crates/git_ui/src/picker_prompt.rs index 9997b0590cedfeab7cad6a7c52bce63f10657a80..6161c62af571f3a90c3110d63cc26ea3a7e032ae 100644 --- a/crates/git_ui/src/picker_prompt.rs +++ b/crates/git_ui/src/picker_prompt.rs @@ -228,7 +228,7 @@ impl PickerDelegate for PickerPromptDelegate { let highlights: Vec<_> = hit .positions .iter() - .filter(|index| index < &&self.max_match_length) + .filter(|&&index| index < self.max_match_length) .copied() .collect(); diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index 6b70f1975e8f361b04fb2ce2eb4966b5da968936..5c74cd6a9689313f343ee97241a7934c0949108a 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -4,16 +4,15 @@ use crate::{ git_panel_settings::GitPanelSettings, remote_button::{render_publish_button, render_push_button}, }; -use anyhow::Result; +use anyhow::{Context as _, Result, anyhow}; use buffer_diff::{BufferDiff, DiffHunkSecondaryStatus}; -use collections::HashSet; +use collections::{HashMap, HashSet}; use editor::{ - Editor, EditorEvent, SelectionEffects, + Addon, Editor, EditorEvent, SelectionEffects, actions::{GoToHunk, GoToPreviousHunk}, multibuffer_context_lines, scroll::Autoscroll, }; -use futures::StreamExt; use git::{ Commit, StageAll, StageAndNext, ToggleStaged, UnstageAll, UnstageAndNext, repository::{Branch, RepoPath, Upstream, UpstreamTracking, UpstreamTrackingStatus}, @@ -27,18 +26,23 @@ use language::{Anchor, Buffer, Capability, OffsetRangeExt}; use multi_buffer::{MultiBuffer, PathKey}; use project::{ Project, ProjectPath, - git_store::{GitStore, GitStoreEvent, Repository}, + git_store::{ + Repository, + branch_diff::{self, BranchDiffEvent, DiffBase}, + }, }; use settings::{Settings, SettingsStore}; use std::any::{Any, TypeId}; use std::ops::Range; +use std::sync::Arc; use theme::ActiveTheme; use ui::{KeyBinding, Tooltip, prelude::*, vertical_divider}; -use util::ResultExt as _; +use util::{ResultExt as _, rel_path::RelPath}; use workspace::{ CloseActiveItem, ItemNavHistory, SerializableItem, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, item::{BreadcrumbText, Item, ItemEvent, ItemHandle, SaveOptions, TabContentParams}, + notifications::NotifyTaskExt, searchable::SearchableItemHandle, }; @@ -48,31 +52,26 @@ actions!( /// Shows the diff between the working directory and the index. Diff, /// Adds files to the git staging area. - Add + Add, + /// Shows the diff between the working directory and your default + /// branch (typically main or master). + BranchDiff ] ); pub struct ProjectDiff { project: Entity, multibuffer: Entity, + branch_diff: Entity, editor: Entity, - git_store: Entity, + buffer_diff_subscriptions: HashMap, (Entity, Subscription)>, workspace: WeakEntity, focus_handle: FocusHandle, - update_needed: postage::watch::Sender<()>, pending_scroll: Option, _task: Task>, _subscription: Subscription, } -#[derive(Debug)] -struct DiffBuffer { - path_key: PathKey, - buffer: Entity, - diff: Entity, - file_status: FileStatus, -} - const CONFLICT_SORT_PREFIX: u64 = 1; const TRACKED_SORT_PREFIX: u64 = 2; const NEW_SORT_PREFIX: u64 = 3; @@ -80,6 +79,7 @@ const NEW_SORT_PREFIX: u64 = 3; impl ProjectDiff { pub(crate) fn register(workspace: &mut Workspace, cx: &mut Context) { workspace.register_action(Self::deploy); + workspace.register_action(Self::deploy_branch_diff); workspace.register_action(|workspace, _: &Add, window, cx| { Self::deploy(workspace, &Diff, window, cx); }); @@ -95,6 +95,40 @@ impl ProjectDiff { Self::deploy_at(workspace, None, window, cx) } + fn deploy_branch_diff( + workspace: &mut Workspace, + _: &BranchDiff, + window: &mut Window, + cx: &mut Context, + ) { + telemetry::event!("Git Branch Diff Opened"); + let project = workspace.project().clone(); + + let existing = workspace + .items_of_type::(cx) + .find(|item| matches!(item.read(cx).diff_base(cx), DiffBase::Merge { .. })); + if let Some(existing) = existing { + workspace.activate_item(&existing, true, true, window, cx); + return; + } + let workspace = cx.entity(); + window + .spawn(cx, async move |cx| { + let this = cx + .update(|window, cx| { + Self::new_with_default_branch(project, workspace.clone(), window, cx) + })? + .await?; + workspace + .update_in(cx, |workspace, window, cx| { + workspace.add_item_to_active_pane(Box::new(this), None, true, window, cx); + }) + .ok(); + anyhow::Ok(()) + }) + .detach_and_notify_err(window, cx); + } + pub fn deploy_at( workspace: &mut Workspace, entry: Option, @@ -109,7 +143,10 @@ impl ProjectDiff { "Action" } ); - let project_diff = if let Some(existing) = workspace.item_of_type::(cx) { + let existing = workspace + .items_of_type::(cx) + .find(|item| matches!(item.read(cx).diff_base(cx), DiffBase::Head)); + let project_diff = if let Some(existing) = existing { workspace.activate_item(&existing, true, true, window, cx); existing } else { @@ -138,11 +175,54 @@ impl ProjectDiff { }) } + fn new_with_default_branch( + project: Entity, + workspace: Entity, + window: &mut Window, + cx: &mut App, + ) -> Task>> { + let Some(repo) = project.read(cx).git_store().read(cx).active_repository() else { + return Task::ready(Err(anyhow!("No active repository"))); + }; + let main_branch = repo.update(cx, |repo, _| repo.default_branch()); + window.spawn(cx, async move |cx| { + let main_branch = main_branch + .await?? + .context("Could not determine default branch")?; + + let branch_diff = cx.new_window_entity(|window, cx| { + branch_diff::BranchDiff::new( + DiffBase::Merge { + base_ref: main_branch, + }, + project.clone(), + window, + cx, + ) + })?; + cx.new_window_entity(|window, cx| { + Self::new_impl(branch_diff, project, workspace, window, cx) + }) + }) + } + fn new( project: Entity, workspace: Entity, window: &mut Window, cx: &mut Context, + ) -> Self { + let branch_diff = + cx.new(|cx| branch_diff::BranchDiff::new(DiffBase::Head, project.clone(), window, cx)); + Self::new_impl(branch_diff, project, workspace, window, cx) + } + + fn new_impl( + branch_diff: Entity, + project: Entity, + workspace: Entity, + window: &mut Window, + cx: &mut Context, ) -> Self { let focus_handle = cx.focus_handle(); let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); @@ -152,9 +232,25 @@ impl ProjectDiff { Editor::for_multibuffer(multibuffer.clone(), Some(project.clone()), window, cx); diff_display_editor.disable_diagnostics(cx); diff_display_editor.set_expand_all_diff_hunks(cx); - diff_display_editor.register_addon(GitPanelAddon { - workspace: workspace.downgrade(), - }); + + match branch_diff.read(cx).diff_base() { + DiffBase::Head => { + diff_display_editor.register_addon(GitPanelAddon { + workspace: workspace.downgrade(), + }); + } + DiffBase::Merge { .. } => { + diff_display_editor.register_addon(BranchDiffAddon { + branch_diff: branch_diff.clone(), + }); + diff_display_editor.start_temporary_diff_override(); + diff_display_editor.set_render_diff_hunk_controls( + Arc::new(|_, _, _, _, _, _, _, _| gpui::Empty.into_any_element()), + cx, + ); + // + } + } diff_display_editor }); window.defer(cx, { @@ -171,66 +267,71 @@ impl ProjectDiff { cx.subscribe_in(&editor, window, Self::handle_editor_event) .detach(); - let git_store = project.read(cx).git_store().clone(); - let git_store_subscription = cx.subscribe_in( - &git_store, + let branch_diff_subscription = cx.subscribe_in( + &branch_diff, window, - move |this, _git_store, event, _window, _cx| match event { - GitStoreEvent::ActiveRepositoryChanged(_) - | GitStoreEvent::RepositoryUpdated(_, _, true) - | GitStoreEvent::ConflictsUpdated => { - *this.update_needed.borrow_mut() = (); + move |this, _git_store, event, window, cx| match event { + BranchDiffEvent::FileListChanged => { + this._task = window.spawn(cx, { + let this = cx.weak_entity(); + async |cx| Self::refresh(this, cx).await + }) } - _ => {} }, ); let mut was_sort_by_path = GitPanelSettings::get_global(cx).sort_by_path; let mut was_collapse_untracked_diff = GitPanelSettings::get_global(cx).collapse_untracked_diff; - cx.observe_global::(move |this, cx| { + cx.observe_global_in::(window, move |this, window, cx| { let is_sort_by_path = GitPanelSettings::get_global(cx).sort_by_path; let is_collapse_untracked_diff = GitPanelSettings::get_global(cx).collapse_untracked_diff; if is_sort_by_path != was_sort_by_path || is_collapse_untracked_diff != was_collapse_untracked_diff { - *this.update_needed.borrow_mut() = (); + this._task = { + window.spawn(cx, { + let this = cx.weak_entity(); + async |cx| Self::refresh(this, cx).await + }) + } } was_sort_by_path = is_sort_by_path; was_collapse_untracked_diff = is_collapse_untracked_diff; }) .detach(); - let (mut send, recv) = postage::watch::channel::<()>(); - let worker = window.spawn(cx, { + let task = window.spawn(cx, { let this = cx.weak_entity(); - async |cx| Self::handle_status_updates(this, recv, cx).await + async |cx| Self::refresh(this, cx).await }); - // Kick off a refresh immediately - *send.borrow_mut() = (); Self { project, - git_store: git_store.clone(), workspace: workspace.downgrade(), + branch_diff, focus_handle, editor, multibuffer, + buffer_diff_subscriptions: Default::default(), pending_scroll: None, - update_needed: send, - _task: worker, - _subscription: git_store_subscription, + _task: task, + _subscription: branch_diff_subscription, } } + pub fn diff_base<'a>(&'a self, cx: &'a App) -> &'a DiffBase { + self.branch_diff.read(cx).diff_base() + } + pub fn move_to_entry( &mut self, entry: GitStatusEntry, window: &mut Window, cx: &mut Context, ) { - let Some(git_repo) = self.git_store.read(cx).active_repository() else { + let Some(git_repo) = self.branch_diff.read(cx).repo() else { return; }; let repo = git_repo.read(cx); @@ -360,68 +461,28 @@ impl ProjectDiff { } } - fn load_buffers(&mut self, cx: &mut Context) -> Vec>> { - let Some(repo) = self.git_store.read(cx).active_repository() else { - self.multibuffer.update(cx, |multibuffer, cx| { - multibuffer.clear(cx); - }); - return vec![]; - }; - - let mut previous_paths = self.multibuffer.read(cx).paths().collect::>(); - - let mut result = vec![]; - repo.update(cx, |repo, cx| { - for entry in repo.cached_status() { - if !entry.status.has_changes() { - continue; - } - let Some(project_path) = repo.repo_path_to_project_path(&entry.repo_path, cx) - else { - continue; - }; - let sort_prefix = sort_prefix(repo, &entry.repo_path, entry.status, cx); - let path_key = PathKey::with_sort_prefix(sort_prefix, entry.repo_path.0.clone()); - - previous_paths.remove(&path_key); - let load_buffer = self - .project - .update(cx, |project, cx| project.open_buffer(project_path, cx)); - - let project = self.project.clone(); - result.push(cx.spawn(async move |_, cx| { - let buffer = load_buffer.await?; - let changes = project - .update(cx, |project, cx| { - project.open_uncommitted_diff(buffer.clone(), cx) - })? - .await?; - Ok(DiffBuffer { - path_key, - buffer, - diff: changes, - file_status: entry.status, - }) - })); - } - }); - self.multibuffer.update(cx, |multibuffer, cx| { - for path in previous_paths { - multibuffer.remove_excerpts_for_path(path, cx); - } - }); - result - } - fn register_buffer( &mut self, - diff_buffer: DiffBuffer, + path_key: PathKey, + file_status: FileStatus, + buffer: Entity, + diff: Entity, window: &mut Window, cx: &mut Context, ) { - let path_key = diff_buffer.path_key; - let buffer = diff_buffer.buffer; - let diff = diff_buffer.diff; + if self.branch_diff.read(cx).diff_base().is_merge_base() { + self.multibuffer.update(cx, |multibuffer, cx| { + multibuffer.add_diff(diff.clone(), cx); + }); + } + let subscription = cx.subscribe_in(&diff, window, move |this, _, _, window, cx| { + this._task = window.spawn(cx, { + let this = cx.weak_entity(); + async |cx| Self::refresh(this, cx).await + }) + }); + self.buffer_diff_subscriptions + .insert(path_key.path.clone(), (diff.clone(), subscription)); let conflict_addon = self .editor @@ -440,9 +501,10 @@ impl ProjectDiff { .unwrap_or_default(); let conflicts = conflicts.iter().map(|conflict| conflict.range.clone()); - let excerpt_ranges = merge_anchor_ranges(diff_hunk_ranges, conflicts, &snapshot) - .map(|range| range.to_point(&snapshot)) - .collect::>(); + let excerpt_ranges = + merge_anchor_ranges(diff_hunk_ranges.into_iter(), conflicts, &snapshot) + .map(|range| range.to_point(&snapshot)) + .collect::>(); let (was_empty, is_excerpt_newly_added) = self.multibuffer.update(cx, |multibuffer, cx| { let was_empty = multibuffer.is_empty(); @@ -464,8 +526,8 @@ impl ProjectDiff { }); } if is_excerpt_newly_added - && (diff_buffer.file_status.is_deleted() - || (diff_buffer.file_status.is_untracked() + && (file_status.is_deleted() + || (file_status.is_untracked() && GitPanelSettings::get_global(cx).collapse_untracked_diff)) { editor.fold_buffer(snapshot.text.remote_id(), cx) @@ -490,26 +552,51 @@ impl ProjectDiff { } } - pub async fn handle_status_updates( - this: WeakEntity, - mut recv: postage::watch::Receiver<()>, - cx: &mut AsyncWindowContext, - ) -> Result<()> { - while (recv.next().await).is_some() { - let buffers_to_load = this.update(cx, |this, cx| this.load_buffers(cx))?; - for buffer_to_load in buffers_to_load { - if let Some(buffer) = buffer_to_load.await.log_err() { - cx.update(|window, cx| { - this.update(cx, |this, cx| this.register_buffer(buffer, window, cx)) - .ok(); - })?; + pub async fn refresh(this: WeakEntity, cx: &mut AsyncWindowContext) -> Result<()> { + let mut path_keys = Vec::new(); + let buffers_to_load = this.update(cx, |this, cx| { + let (repo, buffers_to_load) = this.branch_diff.update(cx, |branch_diff, cx| { + let load_buffers = branch_diff.load_buffers(cx); + (branch_diff.repo().cloned(), load_buffers) + }); + let mut previous_paths = this.multibuffer.read(cx).paths().collect::>(); + + if let Some(repo) = repo { + let repo = repo.read(cx); + + path_keys = Vec::with_capacity(buffers_to_load.len()); + for entry in buffers_to_load.iter() { + let sort_prefix = sort_prefix(&repo, &entry.repo_path, entry.file_status, cx); + let path_key = + PathKey::with_sort_prefix(sort_prefix, entry.repo_path.0.clone()); + previous_paths.remove(&path_key); + path_keys.push(path_key) } } - this.update(cx, |this, cx| { - this.pending_scroll.take(); - cx.notify(); - })?; + + this.multibuffer.update(cx, |multibuffer, cx| { + for path in previous_paths { + this.buffer_diff_subscriptions.remove(&path.path); + multibuffer.remove_excerpts_for_path(path, cx); + } + }); + buffers_to_load + })?; + + for (entry, path_key) in buffers_to_load.into_iter().zip(path_keys.into_iter()) { + if let Some((buffer, diff)) = entry.load.await.log_err() { + cx.update(|window, cx| { + this.update(cx, |this, cx| { + this.register_buffer(path_key, entry.file_status, buffer, diff, window, cx) + }) + .ok(); + })?; + } } + this.update(cx, |this, cx| { + this.pending_scroll.take(); + cx.notify(); + })?; Ok(()) } @@ -519,8 +606,7 @@ impl ProjectDiff { self.multibuffer .read(cx) .excerpt_paths() - .map(|key| key.path()) - .cloned() + .map(|key| key.path.clone()) .collect() } } @@ -579,8 +665,8 @@ impl Item for ProjectDiff { Some("Project Diff".into()) } - fn tab_content(&self, params: TabContentParams, _window: &Window, _: &App) -> AnyElement { - Label::new("Uncommitted Changes") + fn tab_content(&self, params: TabContentParams, _window: &Window, cx: &App) -> AnyElement { + Label::new(self.tab_content_text(0, cx)) .color(if params.selected { Color::Default } else { @@ -589,8 +675,11 @@ impl Item for ProjectDiff { .into_any_element() } - fn tab_content_text(&self, _detail: usize, _: &App) -> SharedString { - "Uncommitted Changes".into() + fn tab_content_text(&self, _detail: usize, cx: &App) -> SharedString { + match self.branch_diff.read(cx).diff_base() { + DiffBase::Head => "Uncommitted Changes".into(), + DiffBase::Merge { base_ref } => format!("Changes since {}", base_ref).into(), + } } fn telemetry_event_text(&self) -> Option<&'static str> { @@ -710,7 +799,7 @@ impl Item for ProjectDiff { } impl Render for ProjectDiff { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { let is_empty = self.multibuffer.read(cx).is_empty(); div() @@ -755,7 +844,6 @@ impl Render for ProjectDiff { .key_binding(KeyBinding::for_action_in( &CloseActiveItem::default(), &keybinding_focus_handle, - window, cx, )) .on_click(move |_, window, cx| { @@ -788,30 +876,47 @@ impl SerializableItem for ProjectDiff { } fn deserialize( - _project: Entity, + project: Entity, workspace: WeakEntity, - _workspace_id: workspace::WorkspaceId, - _item_id: workspace::ItemId, + workspace_id: workspace::WorkspaceId, + item_id: workspace::ItemId, window: &mut Window, cx: &mut App, ) -> Task>> { window.spawn(cx, async move |cx| { - workspace.update_in(cx, |workspace, window, cx| { - let workspace_handle = cx.entity(); - cx.new(|cx| Self::new(workspace.project().clone(), workspace_handle, window, cx)) - }) + let diff_base = persistence::PROJECT_DIFF_DB.get_diff_base(item_id, workspace_id)?; + + let diff = cx.update(|window, cx| { + let branch_diff = cx + .new(|cx| branch_diff::BranchDiff::new(diff_base, project.clone(), window, cx)); + let workspace = workspace.upgrade().context("workspace gone")?; + anyhow::Ok( + cx.new(|cx| ProjectDiff::new_impl(branch_diff, project, workspace, window, cx)), + ) + })??; + + Ok(diff) }) } fn serialize( &mut self, - _workspace: &mut Workspace, - _item_id: workspace::ItemId, + workspace: &mut Workspace, + item_id: workspace::ItemId, _closing: bool, _window: &mut Window, - _cx: &mut Context, + cx: &mut Context, ) -> Option>> { - None + let workspace_id = workspace.database_id()?; + let diff_base = self.diff_base(cx).clone(); + + Some(cx.background_spawn({ + async move { + persistence::PROJECT_DIFF_DB + .save_diff_base(item_id, workspace_id, diff_base.clone()) + .await + } + })) } fn should_serialize(&self, _: &Self::Event) -> bool { @@ -819,6 +924,80 @@ impl SerializableItem for ProjectDiff { } } +mod persistence { + + use anyhow::Context as _; + use db::{ + sqlez::{domain::Domain, thread_safe_connection::ThreadSafeConnection}, + sqlez_macros::sql, + }; + use project::git_store::branch_diff::DiffBase; + use workspace::{ItemId, WorkspaceDb, WorkspaceId}; + + pub struct ProjectDiffDb(ThreadSafeConnection); + + impl Domain for ProjectDiffDb { + const NAME: &str = stringify!(ProjectDiffDb); + + const MIGRATIONS: &[&str] = &[sql!( + CREATE TABLE project_diffs( + workspace_id INTEGER, + item_id INTEGER UNIQUE, + + diff_base TEXT, + + PRIMARY KEY(workspace_id, item_id), + FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) + ON DELETE CASCADE + ) STRICT; + )]; + } + + db::static_connection!(PROJECT_DIFF_DB, ProjectDiffDb, [WorkspaceDb]); + + impl ProjectDiffDb { + pub async fn save_diff_base( + &self, + item_id: ItemId, + workspace_id: WorkspaceId, + diff_base: DiffBase, + ) -> anyhow::Result<()> { + self.write(move |connection| { + let sql_stmt = sql!( + INSERT OR REPLACE INTO project_diffs(item_id, workspace_id, diff_base) VALUES (?, ?, ?) + ); + let diff_base_str = serde_json::to_string(&diff_base)?; + let mut query = connection.exec_bound::<(ItemId, WorkspaceId, String)>(sql_stmt)?; + query((item_id, workspace_id, diff_base_str)).context(format!( + "exec_bound failed to execute or parse for: {}", + sql_stmt + )) + }) + .await + } + + pub fn get_diff_base( + &self, + item_id: ItemId, + workspace_id: WorkspaceId, + ) -> anyhow::Result { + let sql_stmt = + sql!(SELECT diff_base FROM project_diffs WHERE item_id = ?AND workspace_id = ?); + let diff_base_str = self.select_row_bound::<(ItemId, WorkspaceId), String>(sql_stmt)?( + (item_id, workspace_id), + ) + .context(::std::format!( + "Error in get_diff_base, select_row_bound failed to execute or parse for: {}", + sql_stmt + ))?; + let Some(diff_base_str) = diff_base_str else { + return Ok(DiffBase::Head); + }; + serde_json::from_str(&diff_base_str).context("deserializing diff base") + } + } +} + pub struct ProjectDiffToolbar { project_diff: Option>, workspace: WeakEntity, @@ -883,6 +1062,7 @@ impl ToolbarItemView for ProjectDiffToolbar { ) -> ToolbarItemLocation { self.project_diff = active_pane_item .and_then(|item| item.act_as::(cx)) + .filter(|item| item.read(cx).diff_base(cx) == &DiffBase::Head) .map(|entity| entity.downgrade()); if self.project_diff.is_some() { ToolbarItemLocation::PrimaryRight @@ -947,6 +1127,11 @@ impl Render for ProjectDiffToolbar { &StageAndNext, &focus_handle, )) + .disabled( + !button_states.prev_next + && !button_states.stage_all + && !button_states.unstage_all, + ) .on_click(cx.listener(|this, _, window, cx| { this.dispatch_action(&StageAndNext, window, cx) })), @@ -958,6 +1143,11 @@ impl Render for ProjectDiffToolbar { &UnstageAndNext, &focus_handle, )) + .disabled( + !button_states.prev_next + && !button_states.stage_all + && !button_states.unstage_all, + ) .on_click(cx.listener(|this, _, window, cx| { this.dispatch_action(&UnstageAndNext, window, cx) })), @@ -1342,18 +1532,42 @@ fn merge_anchor_ranges<'a>( }) } +struct BranchDiffAddon { + branch_diff: Entity, +} + +impl Addon for BranchDiffAddon { + fn to_any(&self) -> &dyn std::any::Any { + self + } + + fn override_status_for_buffer_id( + &self, + buffer_id: language::BufferId, + cx: &App, + ) -> Option { + self.branch_diff + .read(cx) + .status_for_buffer_id(buffer_id, cx) + } +} + #[cfg(test)] mod tests { + use collections::HashMap; use db::indoc; use editor::test::editor_test_context::{EditorTestContext, assert_state_with_diff}; - use git::status::{UnmergedStatus, UnmergedStatusCode}; + use git::status::{TrackedStatus, UnmergedStatus, UnmergedStatusCode}; use gpui::TestAppContext; use project::FakeFs; use serde_json::json; use settings::SettingsStore; use std::path::Path; use unindent::Unindent as _; - use util::{path, rel_path::rel_path}; + use util::{ + path, + rel_path::{RelPath, rel_path}, + }; use super::*; @@ -1608,8 +1822,8 @@ mod tests { cx, &" - original - + different - ˇ" + + ˇdifferent + " .unindent(), ); } @@ -1619,14 +1833,13 @@ mod tests { project_diff::{self, ProjectDiff}, }; - #[cfg_attr(windows, ignore = "currently fails on windows")] #[gpui::test] async fn test_go_to_prev_hunk_multibuffer(cx: &mut TestAppContext) { init_test(cx); let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/a", + path!("/a"), json!({ ".git": {}, "a.txt": "created\n", @@ -1637,7 +1850,7 @@ mod tests { .await; fs.set_head_and_index_for_repo( - Path::new("/a/.git"), + Path::new(path!("/a/.git")), &[ ("b.txt", "before\n".to_string()), ("c.txt", "unchanged\n".to_string()), @@ -1645,7 +1858,7 @@ mod tests { ], ); - let project = Project::test(fs, [Path::new("/a")], cx).await; + let project = Project::test(fs, [Path::new(path!("/a"))], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); @@ -1707,7 +1920,6 @@ mod tests { )); } - #[cfg_attr(windows, ignore = "currently fails on windows")] #[gpui::test] async fn test_excerpts_splitting_after_restoring_the_middle_excerpt(cx: &mut TestAppContext) { init_test(cx); @@ -1747,7 +1959,7 @@ mod tests { let fs = FakeFs::new(cx.executor()); fs.insert_tree( - "/a", + path!("/a"), json!({ ".git": {}, "main.rs": buffer_contents, @@ -1756,11 +1968,11 @@ mod tests { .await; fs.set_head_and_index_for_repo( - Path::new("/a/.git"), + Path::new(path!("/a/.git")), &[("main.rs", git_contents.to_owned())], ); - let project = Project::test(fs, [Path::new("/a")], cx).await; + let project = Project::test(fs, [Path::new(path!("/a"))], cx).await; let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); @@ -1925,6 +2137,7 @@ mod tests { cx.run_until_parked(); let editor = diff.read_with(cx, |diff, _| diff.editor.clone()); + assert_state_with_diff( &editor, cx, @@ -1938,6 +2151,7 @@ mod tests { .unindent(), ); + // The project diff updates its excerpts when a new hunk appears in a buffer that already has a diff. let buffer = project .update(cx, |project, cx| { project.open_local_buffer(path!("/project/foo.txt"), cx) @@ -1990,4 +2204,156 @@ mod tests { .unindent(), ); } + + #[gpui::test] + async fn test_branch_diff(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/project"), + json!({ + ".git": {}, + "a.txt": "C", + "b.txt": "new", + "c.txt": "in-merge-base-and-work-tree", + "d.txt": "created-in-head", + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let diff = cx + .update(|window, cx| { + ProjectDiff::new_with_default_branch(project.clone(), workspace, window, cx) + }) + .await + .unwrap(); + cx.run_until_parked(); + + fs.set_head_for_repo( + Path::new(path!("/project/.git")), + &[("a.txt", "B".into()), ("d.txt", "created-in-head".into())], + "sha", + ); + // fs.set_index_for_repo(dot_git, index_state); + fs.set_merge_base_content_for_repo( + Path::new(path!("/project/.git")), + &[ + ("a.txt", "A".into()), + ("c.txt", "in-merge-base-and-work-tree".into()), + ], + ); + cx.run_until_parked(); + + let editor = diff.read_with(cx, |diff, _| diff.editor.clone()); + + assert_state_with_diff( + &editor, + cx, + &" + - A + + ˇC + + new + + created-in-head" + .unindent(), + ); + + let statuses: HashMap, Option> = + editor.update(cx, |editor, cx| { + editor + .buffer() + .read(cx) + .all_buffers() + .iter() + .map(|buffer| { + ( + buffer.read(cx).file().unwrap().path().clone(), + editor.status_for_buffer_id(buffer.read(cx).remote_id(), cx), + ) + }) + .collect() + }); + + assert_eq!( + statuses, + HashMap::from_iter([ + ( + rel_path("a.txt").into_arc(), + Some(FileStatus::Tracked(TrackedStatus { + index_status: git::status::StatusCode::Modified, + worktree_status: git::status::StatusCode::Modified + })) + ), + (rel_path("b.txt").into_arc(), Some(FileStatus::Untracked)), + ( + rel_path("d.txt").into_arc(), + Some(FileStatus::Tracked(TrackedStatus { + index_status: git::status::StatusCode::Added, + worktree_status: git::status::StatusCode::Added + })) + ) + ]) + ); + } + + #[gpui::test] + async fn test_update_on_uncommit(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/project"), + json!({ + ".git": {}, + "README.md": "# My cool project\n".to_owned() + }), + ) + .await; + fs.set_head_and_index_for_repo( + Path::new(path!("/project/.git")), + &[("README.md", "# My cool project\n".to_owned())], + ); + let project = Project::test(fs.clone(), [Path::new(path!("/project"))], cx).await; + let worktree_id = project.read_with(cx, |project, cx| { + project.worktrees(cx).next().unwrap().read(cx).id() + }); + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + cx.run_until_parked(); + + let _editor = workspace + .update_in(cx, |workspace, window, cx| { + workspace.open_path((worktree_id, rel_path("README.md")), None, true, window, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + cx.focus(&workspace); + cx.update(|window, cx| { + window.dispatch_action(project_diff::Diff.boxed_clone(), cx); + }); + cx.run_until_parked(); + let item = workspace.update(cx, |workspace, cx| { + workspace.active_item_as::(cx).unwrap() + }); + cx.focus(&item); + let editor = item.read_with(cx, |item, _| item.editor.clone()); + + fs.set_head_and_index_for_repo( + Path::new(path!("/project/.git")), + &[( + "README.md", + "# My cool project\nDetails to come.\n".to_owned(), + )], + ); + cx.run_until_parked(); + + let mut cx = EditorTestContext::for_editor_in(editor, cx).await; + + cx.assert_excerpts_with_selections("[EXCERPT]\nˇ# My cool project\nDetails to come.\n"); + } } diff --git a/crates/git_ui/src/stash_picker.rs b/crates/git_ui/src/stash_picker.rs index d82498007d3d38e509e34e86044fa0a0e188c910..58f17d7a3bb087ff058878f7889d6d83bc1727a6 100644 --- a/crates/git_ui/src/stash_picker.rs +++ b/crates/git_ui/src/stash_picker.rs @@ -5,18 +5,21 @@ use git::stash::StashEntry; use gpui::{ Action, AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement, IntoElement, Modifiers, ModifiersChangedEvent, ParentElement, Render, - SharedString, Styled, Subscription, Task, Window, actions, rems, + SharedString, Styled, Subscription, Task, WeakEntity, Window, actions, rems, svg, }; use picker::{Picker, PickerDelegate}; use project::git_store::{Repository, RepositoryEvent}; use std::sync::Arc; use time::{OffsetDateTime, UtcOffset}; use time_format; -use ui::{HighlightedLabel, KeyBinding, ListItem, ListItemSpacing, Tooltip, prelude::*}; +use ui::{ + ButtonLike, HighlightedLabel, KeyBinding, ListItem, ListItemSpacing, Tooltip, prelude::*, +}; use util::ResultExt; use workspace::notifications::DetachAndPromptErr; use workspace::{ModalView, Workspace}; +use crate::commit_view::CommitView; use crate::stash_picker; actions!( @@ -24,6 +27,8 @@ actions!( [ /// Drop the selected stash entry. DropStashItem, + /// Show the diff view of the selected stash entry. + ShowStashItem, ] ); @@ -38,8 +43,9 @@ pub fn open( cx: &mut Context, ) { let repository = workspace.project().read(cx).active_repository(cx); + let weak_workspace = workspace.weak_handle(); workspace.toggle_modal(window, cx, |window, cx| { - StashList::new(repository, rems(34.), window, cx) + StashList::new(repository, weak_workspace, rems(34.), window, cx) }) } @@ -53,6 +59,7 @@ pub struct StashList { impl StashList { fn new( repository: Option>, + workspace: WeakEntity, width: Rems, window: &mut Window, cx: &mut Context, @@ -65,7 +72,7 @@ impl StashList { if let Some(repo) = repository.clone() { _subscriptions.push( cx.subscribe_in(&repo, window, |this, _, event, window, cx| { - if matches!(event, RepositoryEvent::Updated { .. }) { + if matches!(event, RepositoryEvent::StashEntriesChanged) { let stash_entries = this.picker.read_with(cx, |picker, cx| { picker .delegate @@ -98,7 +105,7 @@ impl StashList { }) .detach_and_log_err(cx); - let delegate = StashListDelegate::new(repository, window, cx); + let delegate = StashListDelegate::new(repository, workspace, window, cx); let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx)); let picker_focus_handle = picker.focus_handle(cx); picker.update(cx, |picker, _| { @@ -131,6 +138,20 @@ impl StashList { cx.notify(); } + fn handle_show_stash( + &mut self, + _: &ShowStashItem, + window: &mut Window, + cx: &mut Context, + ) { + self.picker.update(cx, |picker, cx| { + picker + .delegate + .show_stash_at(picker.delegate.selected_index(), window, cx); + }); + cx.notify(); + } + fn handle_modifiers_changed( &mut self, ev: &ModifiersChangedEvent, @@ -157,6 +178,7 @@ impl Render for StashList { .w(self.width) .on_modifiers_changed(cx.listener(Self::handle_modifiers_changed)) .on_action(cx.listener(Self::handle_drop_stash)) + .on_action(cx.listener(Self::handle_show_stash)) .child(self.picker.clone()) } } @@ -172,6 +194,7 @@ pub struct StashListDelegate { matches: Vec, all_stash_entries: Option>, repo: Option>, + workspace: WeakEntity, selected_index: usize, last_query: String, modifiers: Modifiers, @@ -182,6 +205,7 @@ pub struct StashListDelegate { impl StashListDelegate { fn new( repo: Option>, + workspace: WeakEntity, _window: &mut Window, cx: &mut Context, ) -> Self { @@ -192,6 +216,7 @@ impl StashListDelegate { Self { matches: vec![], repo, + workspace, all_stash_entries: None, selected_index: 0, last_query: Default::default(), @@ -235,6 +260,25 @@ impl StashListDelegate { }); } + fn show_stash_at(&self, ix: usize, window: &mut Window, cx: &mut Context>) { + let Some(entry_match) = self.matches.get(ix) else { + return; + }; + let stash_sha = entry_match.entry.oid.to_string(); + let stash_index = entry_match.entry.index; + let Some(repo) = self.repo.clone() else { + return; + }; + CommitView::open( + stash_sha, + repo.downgrade(), + self.workspace.clone(), + Some(stash_index), + window, + cx, + ); + } + fn pop_stash(&self, stash_index: usize, window: &mut Window, cx: &mut Context>) { let Some(repo) = self.repo.clone() else { return; @@ -390,7 +434,7 @@ impl PickerDelegate for StashListDelegate { ix: usize, selected: bool, _window: &mut Window, - _cx: &mut Context>, + cx: &mut Context>, ) -> Option { let entry_match = &self.matches[ix]; @@ -432,11 +476,35 @@ impl PickerDelegate for StashListDelegate { .size(LabelSize::Small), ); + let show_button = div() + .group("show-button-hover") + .child( + ButtonLike::new("show-button") + .child( + svg() + .size(IconSize::Medium.rems()) + .flex_none() + .path(IconName::Eye.path()) + .text_color(Color::Default.color(cx)) + .group_hover("show-button-hover", |this| { + this.text_color(Color::Accent.color(cx)) + }) + .hover(|this| this.text_color(Color::Accent.color(cx))), + ) + .tooltip(Tooltip::for_action_title("Show Stash", &ShowStashItem)) + .on_click(cx.listener(move |picker, _, window, cx| { + cx.stop_propagation(); + picker.delegate.show_stash_at(ix, window, cx); + })), + ) + .into_any_element(); + Some( ListItem::new(SharedString::from(format!("stash-{ix}"))) .inset(true) .spacing(ListItemSpacing::Sparse) .toggle_state(selected) + .end_slot(show_button) .child( v_flex() .w_full() @@ -455,11 +523,7 @@ impl PickerDelegate for StashListDelegate { Some("No stashes found".into()) } - fn render_footer( - &self, - window: &mut Window, - cx: &mut Context>, - ) -> Option { + fn render_footer(&self, _: &mut Window, cx: &mut Context>) -> Option { let focus_handle = self.focus_handle.clone(); Some( @@ -473,7 +537,7 @@ impl PickerDelegate for StashListDelegate { .child( Button::new("apply-stash", "Apply") .key_binding( - KeyBinding::for_action_in(&menu::Confirm, &focus_handle, window, cx) + KeyBinding::for_action_in(&menu::Confirm, &focus_handle, cx) .map(|kb| kb.size(rems_from_px(12.))), ) .on_click(|_, window, cx| { @@ -483,13 +547,8 @@ impl PickerDelegate for StashListDelegate { .child( Button::new("pop-stash", "Pop") .key_binding( - KeyBinding::for_action_in( - &menu::SecondaryConfirm, - &focus_handle, - window, - cx, - ) - .map(|kb| kb.size(rems_from_px(12.))), + KeyBinding::for_action_in(&menu::SecondaryConfirm, &focus_handle, cx) + .map(|kb| kb.size(rems_from_px(12.))), ) .on_click(|_, window, cx| { window.dispatch_action(menu::SecondaryConfirm.boxed_clone(), cx) @@ -501,7 +560,6 @@ impl PickerDelegate for StashListDelegate { KeyBinding::for_action_in( &stash_picker::DropStashItem, &focus_handle, - window, cx, ) .map(|kb| kb.size(rems_from_px(12.))), diff --git a/crates/git_ui/src/text_diff_view.rs b/crates/git_ui/src/text_diff_view.rs index 8f7dac4e4049a65dbd630966cea249664d22ba61..fd8cd3597377a6de78b3153ccc430afe81b1127e 100644 --- a/crates/git_ui/src/text_diff_view.rs +++ b/crates/git_ui/src/text_diff_view.rs @@ -49,7 +49,7 @@ impl TextDiffView { let selection_data = source_editor.update(cx, |editor, cx| { let multibuffer = editor.buffer().read(cx); let source_buffer = multibuffer.as_singleton()?; - let selections = editor.selections.all::(cx); + let selections = editor.selections.all::(&editor.display_snapshot(cx)); let buffer_snapshot = source_buffer.read(cx); let first_selection = selections.first()?; let max_point = buffer_snapshot.max_point(); diff --git a/crates/go_to_line/Cargo.toml b/crates/go_to_line/Cargo.toml index 54a9b4d37c7a237cdedf20e0cc683895384caa03..0260cd2d122f83f2c11505be9e6e8a84f69f8569 100644 --- a/crates/go_to_line/Cargo.toml +++ b/crates/go_to_line/Cargo.toml @@ -24,7 +24,6 @@ theme.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true -workspace-hack.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } diff --git a/crates/go_to_line/src/cursor_position.rs b/crates/go_to_line/src/cursor_position.rs index 0d17e746701759aef4a0521f1fb0afcb578eb02a..5c10537e2869e0ca51e3178598f55c1589ceacd7 100644 --- a/crates/go_to_line/src/cursor_position.rs +++ b/crates/go_to_line/src/cursor_position.rs @@ -1,5 +1,5 @@ use editor::{Editor, MultiBufferSnapshot}; -use gpui::{App, Entity, FocusHandle, Focusable, Subscription, Task, WeakEntity}; +use gpui::{App, Entity, FocusHandle, Focusable, Styled, Subscription, Task, WeakEntity}; use settings::Settings; use std::{fmt::Write, num::NonZeroU32, time::Duration}; use text::{Point, Selection}; @@ -208,7 +208,7 @@ impl CursorPosition { impl Render for CursorPosition { fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { if !StatusBarSettings::get_global(cx).cursor_position_button { - return div(); + return div().hidden(); } div().when_some(self.position, |el, position| { @@ -238,18 +238,16 @@ impl Render for CursorPosition { }); } })) - .tooltip(move |window, cx| match context.as_ref() { + .tooltip(move |_window, cx| match context.as_ref() { Some(context) => Tooltip::for_action_in( "Go to Line/Column", &editor::actions::ToggleGoToLine, context, - window, cx, ), None => Tooltip::for_action( "Go to Line/Column", &editor::actions::ToggleGoToLine, - window, cx, ), }), diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index f9dd0178922b1a479caade3953d2eb0c0e75c83d..9b0fb6d8c16b0e44b1bbfd1464f44bb7e88b0cde 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -74,7 +74,9 @@ impl GoToLine { ) -> Self { let (user_caret, last_line, scroll_position) = active_editor.update(cx, |editor, cx| { let user_caret = UserCaretPosition::at_selection_end( - &editor.selections.last::(cx), + &editor + .selections + .last::(&editor.display_snapshot(cx)), &editor.buffer().read(cx).snapshot(cx), ); @@ -739,7 +741,7 @@ mod tests { let selections = editor.update(cx, |editor, cx| { editor .selections - .all::(cx) + .all::(&editor.display_snapshot(cx)) .into_iter() .map(|s| s.start..s.end) .collect::>() diff --git a/crates/google_ai/Cargo.toml b/crates/google_ai/Cargo.toml index ce759698ed5f986663fe1cae4a83b65cd76a8e4f..81e05e4836529e9b73b58b72683a7e72a4d5c984 100644 --- a/crates/google_ai/Cargo.toml +++ b/crates/google_ai/Cargo.toml @@ -23,4 +23,3 @@ serde.workspace = true serde_json.workspace = true settings.workspace = true strum.workspace = true -workspace-hack.workspace = true diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index fca9a0c4d7a3d2915ea3ec6409067c3735fae5ed..af23a336f6230a16040cd98f1f3377c817af05fb 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "gpui" -version = "0.2.0" +version = "0.2.2" edition.workspace = true authors = ["Nathan Sobo "] description = "Zed's GPU-accelerated UI framework" @@ -133,12 +133,13 @@ util.workspace = true uuid.workspace = true waker-fn = "1.2.0" lyon = "1.0" -workspace-hack.workspace = true libc.workspace = true +pin-project = "1.1.10" [target.'cfg(target_os = "macos")'.dependencies] block = "0.1" cocoa.workspace = true +cocoa-foundation.workspace = true core-foundation.workspace = true core-foundation-sys.workspace = true core-graphics = "0.24" diff --git a/crates/gpui/README.md b/crates/gpui/README.md index 4b2ba8818b7e983a52730c8399e8afaab79d5e5e..2c411f76cd4782904f5e704c446a6f0e76f7d9ab 100644 --- a/crates/gpui/README.md +++ b/crates/gpui/README.md @@ -11,7 +11,7 @@ GPUI is still in active development as we work on the Zed code editor, and is st gpui = { version = "*" } ``` - - [Ownership and data flow](_ownership_and_data_flow) + - [Ownership and data flow](src/_ownership_and_data_flow.rs) Everything in GPUI starts with an `Application`. You can create one with `Application::new()`, and kick off your application by passing a callback to `Application::run()`. Inside this callback, you can create a new window with `App::open_window()`, and register your first root view. See [gpui.rs](https://www.gpui.rs/) for a complete example. diff --git a/crates/gpui/build.rs b/crates/gpui/build.rs index e48594101f9cab5f945623df5b1ae47f8fd5a058..83aea8a17911aa3d8f63938d3cccdd00dd0935c3 100644 --- a/crates/gpui/build.rs +++ b/crates/gpui/build.rs @@ -49,7 +49,7 @@ fn check_wgsl_shaders() { // All clear } Err(e) => { - eprintln!("WGSL shader compilation failed:\n{}", e); + println!("cargo::error=WGSL shader compilation failed:\n{}", e); process::exit(1); } } @@ -220,8 +220,8 @@ mod macos { .unwrap(); if !output.status.success() { - eprintln!( - "metal shader compilation failed:\n{}", + println!( + "cargo::error=metal shader compilation failed:\n{}", String::from_utf8_lossy(&output.stderr) ); process::exit(1); @@ -236,8 +236,8 @@ mod macos { .unwrap(); if !output.status.success() { - eprintln!( - "metallib compilation failed:\n{}", + println!( + "cargo::error=metallib compilation failed:\n{}", String::from_utf8_lossy(&output.stderr) ); process::exit(1); @@ -418,15 +418,15 @@ mod windows { if result.status.success() { return; } - eprintln!( - "Shader compilation failed for {}:\n{}", + println!( + "cargo::error=Shader compilation failed for {}:\n{}", entry_point, String::from_utf8_lossy(&result.stderr) ); process::exit(1); } Err(e) => { - eprintln!("Failed to run fxc for {}: {}", entry_point, e); + println!("cargo::error=Failed to run fxc for {}: {}", entry_point, e); process::exit(1); } } diff --git a/crates/gpui/examples/data_table.rs b/crates/gpui/examples/data_table.rs index e176c44d530ecbc6d5d3140f5c2defaa30a6149e..56c9625ed3039b872cf4fcc70e84719ce903e268 100644 --- a/crates/gpui/examples/data_table.rs +++ b/crates/gpui/examples/data_table.rs @@ -374,7 +374,6 @@ impl DataTable { impl Render for DataTable { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { div() - .font_family(".SystemUIFont") .bg(gpui::white()) .text_sm() .size_full() diff --git a/crates/gpui/examples/focus_visible.rs b/crates/gpui/examples/focus_visible.rs new file mode 100644 index 0000000000000000000000000000000000000000..737317cabadb7d3358c9c0497b52d4c2ff2e1028 --- /dev/null +++ b/crates/gpui/examples/focus_visible.rs @@ -0,0 +1,214 @@ +use gpui::{ + App, Application, Bounds, Context, Div, ElementId, FocusHandle, KeyBinding, SharedString, + Stateful, Window, WindowBounds, WindowOptions, actions, div, prelude::*, px, size, +}; + +actions!(example, [Tab, TabPrev, Quit]); + +struct Example { + focus_handle: FocusHandle, + items: Vec<(FocusHandle, &'static str)>, + message: SharedString, +} + +impl Example { + fn new(window: &mut Window, cx: &mut Context) -> Self { + let items = vec![ + ( + cx.focus_handle().tab_index(1).tab_stop(true), + "Button with .focus() - always shows border when focused", + ), + ( + cx.focus_handle().tab_index(2).tab_stop(true), + "Button with .focus_visible() - only shows border with keyboard", + ), + ( + cx.focus_handle().tab_index(3).tab_stop(true), + "Button with both .focus() and .focus_visible()", + ), + ]; + + let focus_handle = cx.focus_handle(); + window.focus(&focus_handle); + + Self { + focus_handle, + items, + message: SharedString::from( + "Try clicking vs tabbing! Click shows no border, Tab shows border.", + ), + } + } + + fn on_tab(&mut self, _: &Tab, window: &mut Window, _: &mut Context) { + window.focus_next(); + self.message = SharedString::from("Pressed Tab - focus-visible border should appear!"); + } + + fn on_tab_prev(&mut self, _: &TabPrev, window: &mut Window, _: &mut Context) { + window.focus_prev(); + self.message = + SharedString::from("Pressed Shift-Tab - focus-visible border should appear!"); + } + + fn on_quit(&mut self, _: &Quit, _window: &mut Window, cx: &mut Context) { + cx.quit(); + } +} + +impl Render for Example { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + fn button_base(id: impl Into, label: &'static str) -> Stateful
{ + div() + .id(id) + .h_16() + .w_full() + .flex() + .justify_center() + .items_center() + .bg(gpui::rgb(0x2563eb)) + .text_color(gpui::white()) + .rounded_md() + .cursor_pointer() + .hover(|style| style.bg(gpui::rgb(0x1d4ed8))) + .child(label) + } + + div() + .id("app") + .track_focus(&self.focus_handle) + .on_action(cx.listener(Self::on_tab)) + .on_action(cx.listener(Self::on_tab_prev)) + .on_action(cx.listener(Self::on_quit)) + .size_full() + .flex() + .flex_col() + .p_8() + .gap_6() + .bg(gpui::rgb(0xf3f4f6)) + .child( + div() + .text_2xl() + .font_weight(gpui::FontWeight::BOLD) + .text_color(gpui::rgb(0x111827)) + .child("CSS focus-visible Demo"), + ) + .child( + div() + .p_4() + .rounded_md() + .bg(gpui::rgb(0xdbeafe)) + .text_color(gpui::rgb(0x1e3a8a)) + .child(self.message.clone()), + ) + .child( + div() + .flex() + .flex_col() + .gap_4() + .child( + div() + .flex() + .flex_col() + .gap_2() + .child( + div() + .text_sm() + .font_weight(gpui::FontWeight::BOLD) + .text_color(gpui::rgb(0x374151)) + .child("1. Regular .focus() - always visible:"), + ) + .child( + button_base("button1", self.items[0].1) + .track_focus(&self.items[0].0) + .focus(|style| { + style.border_4().border_color(gpui::rgb(0xfbbf24)) + }) + .on_click(cx.listener(|this, _, _, cx| { + this.message = + "Clicked button 1 - focus border is visible!".into(); + cx.notify(); + })), + ), + ) + .child( + div() + .flex() + .flex_col() + .gap_2() + .child( + div() + .text_sm() + .font_weight(gpui::FontWeight::BOLD) + .text_color(gpui::rgb(0x374151)) + .child("2. New .focus_visible() - only keyboard:"), + ) + .child( + button_base("button2", self.items[1].1) + .track_focus(&self.items[1].0) + .focus_visible(|style| { + style.border_4().border_color(gpui::rgb(0x10b981)) + }) + .on_click(cx.listener(|this, _, _, cx| { + this.message = + "Clicked button 2 - no border! Try Tab instead.".into(); + cx.notify(); + })), + ), + ) + .child( + div() + .flex() + .flex_col() + .gap_2() + .child( + div() + .text_sm() + .font_weight(gpui::FontWeight::BOLD) + .text_color(gpui::rgb(0x374151)) + .child( + "3. Both .focus() (yellow) and .focus_visible() (green):", + ), + ) + .child( + button_base("button3", self.items[2].1) + .track_focus(&self.items[2].0) + .focus(|style| { + style.border_4().border_color(gpui::rgb(0xfbbf24)) + }) + .focus_visible(|style| { + style.border_4().border_color(gpui::rgb(0x10b981)) + }) + .on_click(cx.listener(|this, _, _, cx| { + this.message = + "Clicked button 3 - yellow border. Tab shows green!" + .into(); + cx.notify(); + })), + ), + ), + ) + } +} + +fn main() { + Application::new().run(|cx: &mut App| { + cx.bind_keys([ + KeyBinding::new("tab", Tab, None), + KeyBinding::new("shift-tab", TabPrev, None), + KeyBinding::new("cmd-q", Quit, None), + ]); + + let bounds = Bounds::centered(None, size(px(800.), px(600.0)), cx); + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(bounds)), + ..Default::default() + }, + |window, cx| cx.new(|cx| Example::new(window, cx)), + ) + .unwrap(); + + cx.activate(true); + }); +} diff --git a/crates/gpui/examples/gradient.rs b/crates/gpui/examples/gradient.rs index 4a84d2319d1d0b3d432d35dd2b66e168d733cffd..30fb3090a30d4f6c70e968d637dbf98b73559529 100644 --- a/crates/gpui/examples/gradient.rs +++ b/crates/gpui/examples/gradient.rs @@ -20,7 +20,6 @@ impl Render for GradientViewer { let color_space = self.color_space; div() - .font_family(".SystemUIFont") .bg(gpui::white()) .size_full() .p_4() diff --git a/crates/gpui/examples/image_gallery.rs b/crates/gpui/examples/image_gallery.rs index e7abb196c75ef2cd4a9376b10c253e54a89374e5..1fa7a8678f4794b50d245a02e210ea0c2d423ca3 100644 --- a/crates/gpui/examples/image_gallery.rs +++ b/crates/gpui/examples/image_gallery.rs @@ -47,7 +47,6 @@ impl Render for ImageGallery { div() .image_cache(self.image_cache.clone()) .id("main") - .font_family(".SystemUIFont") .text_color(gpui::black()) .bg(rgb(0xE9E9E9)) .overflow_y_scroll() @@ -102,7 +101,6 @@ impl Render for ImageGallery { .child(image_cache(simple_lru_cache("lru-cache", IMAGES_IN_GALLERY)).child( div() .id("main") - .font_family(".SystemUIFont") .bg(rgb(0xE9E9E9)) .text_color(gpui::black()) .overflow_y_scroll() diff --git a/crates/gpui/examples/painting.rs b/crates/gpui/examples/painting.rs index 668aed23772d32a84a81cc0648d6b60dd05e21cf..e7055cbdbbd781523edbc851d143bf56a551728f 100644 --- a/crates/gpui/examples/painting.rs +++ b/crates/gpui/examples/painting.rs @@ -328,7 +328,6 @@ impl Render for PaintingViewer { let dashed = self.dashed; div() - .font_family(".SystemUIFont") .bg(gpui::white()) .size_full() .p_4() diff --git a/crates/gpui/examples/text_layout.rs b/crates/gpui/examples/text_layout.rs index c4cbcd4e5edc142dde58a1dd5d9b61a1daee0c3a..8929955ba824c36c90951ece2cf9ba710259ddac 100644 --- a/crates/gpui/examples/text_layout.rs +++ b/crates/gpui/examples/text_layout.rs @@ -1,6 +1,6 @@ use gpui::{ - App, Application, Bounds, Context, Window, WindowBounds, WindowOptions, div, prelude::*, px, - size, + App, Application, Bounds, Context, FontStyle, FontWeight, StyledText, Window, WindowBounds, + WindowOptions, div, prelude::*, px, size, }; struct HelloWorld {} @@ -71,6 +71,12 @@ impl Render for HelloWorld { .child("100%"), ), ) + .child(div().flex().gap_2().justify_between().child( + StyledText::new("ABCD").with_highlights([ + (0..1, FontWeight::EXTRA_BOLD.into()), + (2..3, FontStyle::Italic.into()), + ]), + )) } } diff --git a/crates/gpui/examples/text_wrapper.rs b/crates/gpui/examples/text_wrapper.rs index 4c6e5e2ac89bac4f805aa5ed45733035a3f0fb7e..18372ea9e137cc3cfb11f3df59ce698660ad06be 100644 --- a/crates/gpui/examples/text_wrapper.rs +++ b/crates/gpui/examples/text_wrapper.rs @@ -7,7 +7,11 @@ struct HelloWorld {} impl Render for HelloWorld { fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { - let text = "The longest word 你好世界这段是中文,こんにちはこの段落は日本語です in any of the major English language dictionaries is pneumonoultramicroscopicsilicovolcanoconiosis, a word that refers to a lung disease contracted from the inhalation of very fine silica particles, specifically from a volcano; medically, it is the same as silicosis."; + let text = "The longest word 你好世界这段是中文,こんにちはこの段落は日本語です in any of the major \ + English language dictionaries is pneumonoultramicroscopicsilicovolcanoconiosis, a word that \ + refers to a lung disease contracted from the inhalation of very fine silica particles, \ + a url https://github.com/zed-industries/zed/pull/35724?query=foo&bar=2, \ + specifically from a volcano; medically, it is the same as silicosis."; div() .id("page") .size_full() diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 07ff04e32abc19dbe681ab6214d06469fe7917ff..d4bd7798187a5b7a358106965d9e41fd85efeffe 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -344,13 +344,9 @@ impl SystemWindowTabController { let tab_group = self .tab_groups .iter() - .find_map(|(group, tabs)| tabs.iter().find(|tab| tab.id == id).map(|_| *group)); + .find_map(|(group, tabs)| tabs.iter().find(|tab| tab.id == id).map(|_| *group))?; - if let Some(tab_group) = tab_group { - self.tab_groups.get(&tab_group) - } else { - None - } + self.tab_groups.get(&tab_group) } /// Initialize the visibility of the system window tab controller. @@ -415,7 +411,8 @@ impl SystemWindowTabController { for windows in controller.tab_groups.values_mut() { for tab in windows.iter_mut() { if tab.id == id { - tab.title = title.clone(); + tab.title = title; + return; } } } @@ -556,7 +553,7 @@ pub struct App { pub(crate) entities: EntityMap, pub(crate) window_update_stack: Vec, pub(crate) new_entity_observers: SubscriberSet, - pub(crate) windows: SlotMap>, + pub(crate) windows: SlotMap>>, pub(crate) window_handles: FxHashMap, pub(crate) focus_handles: Arc, pub(crate) keymap: Rc>, @@ -967,7 +964,7 @@ impl App { clear.clear(); cx.window_handles.insert(id, window.handle); - cx.windows.get_mut(id).unwrap().replace(window); + cx.windows.get_mut(id).unwrap().replace(Box::new(window)); Ok(handle) } Err(e) => { @@ -1242,7 +1239,7 @@ impl App { .windows .values() .filter_map(|window| { - let window = window.as_ref()?; + let window = window.as_deref()?; window.invalidator.is_dirty().then_some(window.handle) }) .collect::>() @@ -1323,7 +1320,7 @@ impl App { fn apply_refresh_effect(&mut self) { for window in self.windows.values_mut() { - if let Some(window) = window.as_mut() { + if let Some(window) = window.as_deref_mut() { window.refreshing = true; window.invalidator.set_dirty(true); } @@ -2202,7 +2199,7 @@ impl AppContext for App { .windows .get(window.id) .context("window not found")? - .as_ref() + .as_deref() .expect("attempted to read a window that is already on the stack"); let root_view = window.root.clone().unwrap(); diff --git a/crates/gpui/src/app/entity_map.rs b/crates/gpui/src/app/entity_map.rs index ea52b46d9fce958f8cb6e878581fb988c146c43b..bea98cb06a5f80fc8141a52bc47f48e8734b40c9 100644 --- a/crates/gpui/src/app/entity_map.rs +++ b/crates/gpui/src/app/entity_map.rs @@ -378,11 +378,9 @@ pub struct Entity { #[deref] #[deref_mut] pub(crate) any_entity: AnyEntity, - pub(crate) entity_type: PhantomData, + pub(crate) entity_type: PhantomData T>, } -unsafe impl Send for Entity {} -unsafe impl Sync for Entity {} impl Sealed for Entity {} impl Entity { @@ -657,7 +655,7 @@ pub struct WeakEntity { #[deref] #[deref_mut] any_entity: AnyWeakEntity, - entity_type: PhantomData, + entity_type: PhantomData T>, } impl std::fmt::Debug for WeakEntity { @@ -669,9 +667,6 @@ impl std::fmt::Debug for WeakEntity { } } -unsafe impl Send for WeakEntity {} -unsafe impl Sync for WeakEntity {} - impl Clone for WeakEntity { fn clone(&self) -> Self { Self { diff --git a/crates/gpui/src/app/test_context.rs b/crates/gpui/src/app/test_context.rs index b3d342b09bf1dceb27413d3ec24fbcc0d2f541e9..d974823396d9f0d546a6b035f47b569145eb021b 100644 --- a/crates/gpui/src/app/test_context.rs +++ b/crates/gpui/src/app/test_context.rs @@ -455,7 +455,7 @@ impl TestAppContext { .windows .get_mut(window.id) .unwrap() - .as_mut() + .as_deref_mut() .unwrap() .platform_window .as_test() @@ -836,7 +836,7 @@ impl VisualTestContext { }) } - /// Simulate an event from the platform, e.g. a SrollWheelEvent + /// Simulate an event from the platform, e.g. a ScrollWheelEvent /// Make sure you've called [VisualTestContext::draw] first! pub fn simulate_event(&mut self, event: E) { self.test_window(self.window) @@ -888,7 +888,9 @@ impl VisualTestContext { // safety: on_quit will be called after the test has finished. // the executor will ensure that all tasks related to the test have stopped. // so there is no way for cx to be accessed after on_quit is called. - let cx = Box::leak(unsafe { Box::from_raw(ptr) }); + // todo: This is unsound under stacked borrows (also tree borrows probably?) + // the mutable reference invalidates `ptr` which is later used in the closure + let cx = unsafe { &mut *ptr }; cx.on_quit(move || unsafe { drop(Box::from_raw(ptr)); }); diff --git a/crates/gpui/src/arena.rs b/crates/gpui/src/arena.rs index a0d0c23987472de46d5b23129adb5a4ec8ee00cb..9898c8056ab0240abd32ee34992dbe96f8ebab57 100644 --- a/crates/gpui/src/arena.rs +++ b/crates/gpui/src/arena.rs @@ -15,9 +15,7 @@ struct ArenaElement { impl Drop for ArenaElement { #[inline(always)] fn drop(&mut self) { - unsafe { - (self.drop)(self.value); - } + unsafe { (self.drop)(self.value) }; } } @@ -40,33 +38,29 @@ impl Drop for Chunk { impl Chunk { fn new(chunk_size: NonZeroUsize) -> Self { - unsafe { - // this only fails if chunk_size is unreasonably huge - let layout = alloc::Layout::from_size_align(chunk_size.get(), 1).unwrap(); - let start = alloc::alloc(layout); - if start.is_null() { - handle_alloc_error(layout); - } - let end = start.add(chunk_size.get()); - Self { - start, - end, - offset: start, - } + // this only fails if chunk_size is unreasonably huge + let layout = alloc::Layout::from_size_align(chunk_size.get(), 1).unwrap(); + let start = unsafe { alloc::alloc(layout) }; + if start.is_null() { + handle_alloc_error(layout); + } + let end = unsafe { start.add(chunk_size.get()) }; + Self { + start, + end, + offset: start, } } fn allocate(&mut self, layout: alloc::Layout) -> Option> { - unsafe { - let aligned = self.offset.add(self.offset.align_offset(layout.align())); - let next = aligned.add(layout.size()); - - if next <= self.end { - self.offset = next; - NonNull::new(aligned) - } else { - None - } + let aligned = unsafe { self.offset.add(self.offset.align_offset(layout.align())) }; + let next = unsafe { aligned.add(layout.size()) }; + + if next <= self.end { + self.offset = next; + NonNull::new(aligned) + } else { + None } } @@ -122,54 +116,48 @@ impl Arena { where F: FnOnce() -> T, { - unsafe { - ptr::write(ptr, f()); - } + unsafe { ptr::write(ptr, f()) }; } unsafe fn drop(ptr: *mut u8) { - unsafe { - std::ptr::drop_in_place(ptr.cast::()); - } + unsafe { std::ptr::drop_in_place(ptr.cast::()) }; } - unsafe { - let layout = alloc::Layout::new::(); - let mut current_chunk = &mut self.chunks[self.current_chunk_index]; - let ptr = if let Some(ptr) = current_chunk.allocate(layout) { + let layout = alloc::Layout::new::(); + let mut current_chunk = &mut self.chunks[self.current_chunk_index]; + let ptr = if let Some(ptr) = current_chunk.allocate(layout) { + ptr.as_ptr() + } else { + self.current_chunk_index += 1; + if self.current_chunk_index >= self.chunks.len() { + self.chunks.push(Chunk::new(self.chunk_size)); + assert_eq!(self.current_chunk_index, self.chunks.len() - 1); + log::trace!( + "increased element arena capacity to {}kb", + self.capacity() / 1024, + ); + } + current_chunk = &mut self.chunks[self.current_chunk_index]; + if let Some(ptr) = current_chunk.allocate(layout) { ptr.as_ptr() } else { - self.current_chunk_index += 1; - if self.current_chunk_index >= self.chunks.len() { - self.chunks.push(Chunk::new(self.chunk_size)); - assert_eq!(self.current_chunk_index, self.chunks.len() - 1); - log::trace!( - "increased element arena capacity to {}kb", - self.capacity() / 1024, - ); - } - current_chunk = &mut self.chunks[self.current_chunk_index]; - if let Some(ptr) = current_chunk.allocate(layout) { - ptr.as_ptr() - } else { - panic!( - "Arena chunk_size of {} is too small to allocate {} bytes", - self.chunk_size, - layout.size() - ); - } - }; - - inner_writer(ptr.cast(), f); - self.elements.push(ArenaElement { - value: ptr, - drop: drop::, - }); - - ArenaBox { - ptr: ptr.cast(), - valid: self.valid.clone(), + panic!( + "Arena chunk_size of {} is too small to allocate {} bytes", + self.chunk_size, + layout.size() + ); } + }; + + unsafe { inner_writer(ptr.cast(), f) }; + self.elements.push(ArenaElement { + value: ptr, + drop: drop::, + }); + + ArenaBox { + ptr: ptr.cast(), + valid: self.valid.clone(), } } } diff --git a/crates/gpui/src/bounds_tree.rs b/crates/gpui/src/bounds_tree.rs index a96bfe55b9ff431a96da7bf42692288264eb184c..d621609bf7334801059513e03dfd11b4036ea816 100644 --- a/crates/gpui/src/bounds_tree.rs +++ b/crates/gpui/src/bounds_tree.rs @@ -34,15 +34,14 @@ where pub fn insert(&mut self, new_bounds: Bounds) -> u32 { // If the tree is empty, make the root the new leaf. - if self.root.is_none() { + let Some(mut index) = self.root else { let new_node = self.push_leaf(new_bounds, 1); self.root = Some(new_node); return 1; - } + }; // Search for the best place to add the new leaf based on heuristics. let mut max_intersecting_ordering = 0; - let mut index = self.root.unwrap(); while let Node::Internal { left, right, diff --git a/crates/gpui/src/element.rs b/crates/gpui/src/element.rs index a3fc6269f33d8726b55f8e8be4aadb52109a7606..2c695486c5d09103f69fb211076aec6629a29f1b 100644 --- a/crates/gpui/src/element.rs +++ b/crates/gpui/src/element.rs @@ -37,11 +37,11 @@ use crate::{ util::FluentBuilder, }; use derive_more::{Deref, DerefMut}; -pub(crate) use smallvec::SmallVec; use std::{ any::{Any, type_name}, fmt::{self, Debug, Display}, mem, panic, + sync::Arc, }; /// Implemented by types that participate in laying out and painting the contents of a window. @@ -272,8 +272,8 @@ impl IntoElement for Component { } /// A globally unique identifier for an element, used to track state across frames. -#[derive(Deref, DerefMut, Default, Debug, Eq, PartialEq, Hash)] -pub struct GlobalElementId(pub(crate) SmallVec<[ElementId; 32]>); +#[derive(Deref, DerefMut, Clone, Default, Debug, Eq, PartialEq, Hash)] +pub struct GlobalElementId(pub(crate) Arc<[ElementId]>); impl Display for GlobalElementId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -353,7 +353,7 @@ impl Drawable { ElementDrawPhase::Start => { let global_id = self.element.id().map(|element_id| { window.element_id_stack.push(element_id); - GlobalElementId(window.element_id_stack.clone()) + GlobalElementId(Arc::from(&*window.element_id_stack)) }); let inspector_id; @@ -361,7 +361,7 @@ impl Drawable { { inspector_id = self.element.source_location().map(|source| { let path = crate::InspectorElementPath { - global_id: GlobalElementId(window.element_id_stack.clone()), + global_id: GlobalElementId(Arc::from(&*window.element_id_stack)), source_location: source, }; window.build_inspector_element_id(path) @@ -412,7 +412,7 @@ impl Drawable { } => { if let Some(element_id) = self.element.id() { window.element_id_stack.push(element_id); - debug_assert_eq!(global_id.as_ref().unwrap().0, window.element_id_stack); + debug_assert_eq!(&*global_id.as_ref().unwrap().0, &*window.element_id_stack); } let bounds = window.layout_bounds(layout_id); @@ -461,7 +461,7 @@ impl Drawable { } => { if let Some(element_id) = self.element.id() { window.element_id_stack.push(element_id); - debug_assert_eq!(global_id.as_ref().unwrap().0, window.element_id_stack); + debug_assert_eq!(&*global_id.as_ref().unwrap().0, &*window.element_id_stack); } window.next_frame.dispatch_tree.set_active_node(node_id); @@ -741,7 +741,17 @@ impl Element for Empty { window: &mut Window, cx: &mut App, ) -> (LayoutId, Self::RequestLayoutState) { - (window.request_layout(Style::default(), None, cx), ()) + ( + window.request_layout( + Style { + display: crate::Display::None, + ..Default::default() + }, + None, + cx, + ), + (), + ) } fn prepaint( diff --git a/crates/gpui/src/elements/div.rs b/crates/gpui/src/elements/div.rs index 06fe5902bf6eca527f5f16d84e88c9c847e3e08a..efc931f05ffbed2a0b20f23967f20f9e0704b454 100644 --- a/crates/gpui/src/elements/div.rs +++ b/crates/gpui/src/elements/div.rs @@ -17,12 +17,13 @@ use crate::{ AbsoluteLength, Action, AnyDrag, AnyElement, AnyTooltip, AnyView, App, Bounds, ClickEvent, - DispatchPhase, Element, ElementId, Entity, FocusHandle, Global, GlobalElementId, Hitbox, - HitboxBehavior, HitboxId, InspectorElementId, IntoElement, IsZero, KeyContext, KeyDownEvent, - KeyUpEvent, KeyboardButton, KeyboardClickEvent, LayoutId, ModifiersChangedEvent, MouseButton, - MouseClickEvent, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Overflow, ParentElement, Pixels, - Point, Render, ScrollWheelEvent, SharedString, Size, Style, StyleRefinement, Styled, Task, - TooltipId, Visibility, Window, WindowControlArea, point, px, size, + DispatchPhase, Display, Element, ElementId, Entity, FocusHandle, Global, GlobalElementId, + Hitbox, HitboxBehavior, HitboxId, InspectorElementId, IntoElement, IsZero, KeyContext, + KeyDownEvent, KeyUpEvent, KeyboardButton, KeyboardClickEvent, LayoutId, ModifiersChangedEvent, + MouseButton, MouseClickEvent, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Overflow, + ParentElement, Pixels, Point, Render, ScrollWheelEvent, SharedString, Size, Style, + StyleRefinement, Styled, Task, TooltipId, Visibility, Window, WindowControlArea, point, px, + size, }; use collections::HashMap; use refineable::Refineable; @@ -1033,6 +1034,18 @@ pub trait InteractiveElement: Sized { self.interactivity().in_focus_style = Some(Box::new(f(StyleRefinement::default()))); self } + + /// Set the given styles to be applied when this element is focused via keyboard navigation. + /// This is similar to CSS's `:focus-visible` pseudo-class - it only applies when the element + /// is focused AND the user is navigating via keyboard (not mouse clicks). + /// Requires that the element is focusable. Elements can be made focusable using [`InteractiveElement::track_focus`]. + fn focus_visible(mut self, f: impl FnOnce(StyleRefinement) -> StyleRefinement) -> Self + where + Self: Sized, + { + self.interactivity().focus_visible_style = Some(Box::new(f(StyleRefinement::default()))); + self + } } /// A trait for elements that want to use the standard GPUI interactivity features @@ -1403,7 +1416,12 @@ impl Element for Div { content_size, window, cx, - |_style, scroll_offset, hitbox, window, cx| { + |style, scroll_offset, hitbox, window, cx| { + // skip children + if style.display == Display::None { + return hitbox; + } + window.with_element_offset(scroll_offset, |window| { for child in &mut self.children { child.prepaint(window, cx); @@ -1443,7 +1461,12 @@ impl Element for Div { hitbox.as_ref(), window, cx, - |_style, window, cx| { + |style, window, cx| { + // skip children + if style.display == Display::None { + return; + } + for child in &mut self.children { child.paint(window, cx); } @@ -1486,6 +1509,7 @@ pub struct Interactivity { pub base_style: Box, pub(crate) focus_style: Option>, pub(crate) in_focus_style: Option>, + pub(crate) focus_visible_style: Option>, pub(crate) hover_style: Option>, pub(crate) group_hover_style: Option, pub(crate) active_style: Option>, @@ -2481,6 +2505,13 @@ impl Interactivity { { style.refine(focus_style); } + + if let Some(focus_visible_style) = self.focus_visible_style.as_ref() + && focus_handle.is_focused(window) + && window.last_input_was_keyboard() + { + style.refine(focus_visible_style); + } } if let Some(hitbox) = hitbox { diff --git a/crates/gpui/src/elements/img.rs b/crates/gpui/src/elements/img.rs index 9760dd7d9ed953c9a335bcdeee42dc60cac08fde..075c7cf32beb400d800ca4f8970f51dae6da7afe 100644 --- a/crates/gpui/src/elements/img.rs +++ b/crates/gpui/src/elements/img.rs @@ -1,9 +1,9 @@ use crate::{ - AbsoluteLength, AnyElement, AnyImageCache, App, Asset, AssetLogger, Bounds, DefiniteLength, - Element, ElementId, Entity, GlobalElementId, Hitbox, Image, ImageCache, InspectorElementId, - InteractiveElement, Interactivity, IntoElement, LayoutId, Length, ObjectFit, Pixels, - RenderImage, Resource, SMOOTH_SVG_SCALE_FACTOR, SharedString, SharedUri, StyleRefinement, - Styled, SvgSize, Task, Window, px, swap_rgba_pa_to_bgra, + AnyElement, AnyImageCache, App, Asset, AssetLogger, Bounds, DefiniteLength, Element, ElementId, + Entity, GlobalElementId, Hitbox, Image, ImageCache, InspectorElementId, InteractiveElement, + Interactivity, IntoElement, LayoutId, Length, ObjectFit, Pixels, RenderImage, Resource, + SMOOTH_SVG_SCALE_FACTOR, SharedString, SharedUri, StyleRefinement, Styled, SvgSize, Task, + Window, px, swap_rgba_pa_to_bgra, }; use anyhow::{Context as _, Result}; @@ -337,24 +337,28 @@ impl Element for Img { if let Length::Auto = style.size.width { style.size.width = match style.size.height { - Length::Definite(DefiniteLength::Absolute( - AbsoluteLength::Pixels(height), - )) => Length::Definite( - px(image_size.width.0 * height.0 / image_size.height.0) + Length::Definite(DefiniteLength::Absolute(abs_length)) => { + let height_px = abs_length.to_pixels(window.rem_size()); + Length::Definite( + px(image_size.width.0 * height_px.0 + / image_size.height.0) .into(), - ), + ) + } _ => Length::Definite(image_size.width.into()), }; } if let Length::Auto = style.size.height { style.size.height = match style.size.width { - Length::Definite(DefiniteLength::Absolute( - AbsoluteLength::Pixels(width), - )) => Length::Definite( - px(image_size.height * f32::from(width) / image_size.width) + Length::Definite(DefiniteLength::Absolute(abs_length)) => { + let width_px = abs_length.to_pixels(window.rem_size()); + Length::Definite( + px(image_size.height.0 * width_px.0 + / image_size.width.0) .into(), - ), + ) + } _ => Length::Definite(image_size.height.into()), }; } diff --git a/crates/gpui/src/elements/list.rs b/crates/gpui/src/elements/list.rs index d82d7a67a12190a19acde5715378d295c2eb9bc8..78566208c89a7d6bf73804f611b45aa70e4933ec 100644 --- a/crates/gpui/src/elements/list.rs +++ b/crates/gpui/src/elements/list.rs @@ -70,6 +70,7 @@ struct StateInner { #[allow(clippy::type_complexity)] scroll_handler: Option>, scrollbar_drag_start_height: Option, + measuring_behavior: ListMeasuringBehavior, } /// Whether the list is scrolling from top to bottom or bottom to top. @@ -103,6 +104,26 @@ pub enum ListSizingBehavior { Auto, } +/// The measuring behavior to apply during layout. +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum ListMeasuringBehavior { + /// Measure all items in the list. + /// Note: This can be expensive for the first frame in a large list. + Measure(bool), + /// Only measure visible items + #[default] + Visible, +} + +impl ListMeasuringBehavior { + fn reset(&mut self) { + match self { + ListMeasuringBehavior::Measure(has_measured) => *has_measured = false, + ListMeasuringBehavior::Visible => {} + } + } +} + /// The horizontal sizing behavior to apply during layout. #[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum ListHorizontalSizingBehavior { @@ -203,11 +224,20 @@ impl ListState { scroll_handler: None, reset: false, scrollbar_drag_start_height: None, + measuring_behavior: ListMeasuringBehavior::default(), }))); this.splice(0..0, item_count); this } + /// Set the list to measure all items in the list in the first layout phase. + /// + /// This is useful for ensuring that the scrollbar size is correct instead of based on only rendered elements. + pub fn measure_all(self) -> Self { + self.0.borrow_mut().measuring_behavior = ListMeasuringBehavior::Measure(false); + self + } + /// Reset this instantiation of the list state. /// /// Note that this will cause scroll events to be dropped until the next paint. @@ -215,6 +245,7 @@ impl ListState { let old_count = { let state = &mut *self.0.borrow_mut(); state.reset = true; + state.measuring_behavior.reset(); state.logical_scroll_top = None; state.scrollbar_drag_start_height = None; state.items.summary().count @@ -478,10 +509,11 @@ impl StateInner { if self.alignment == ListAlignment::Bottom && new_scroll_top == scroll_max { self.logical_scroll_top = None; } else { - let mut cursor = self.items.cursor::(()); - cursor.seek(&Height(new_scroll_top), Bias::Right); - let item_ix = cursor.start().count; - let offset_in_item = new_scroll_top - cursor.start().height; + let (start, ..) = + self.items + .find::((), &Height(new_scroll_top), Bias::Right); + let item_ix = start.count; + let offset_in_item = new_scroll_top - start.height; self.logical_scroll_top = Some(ListOffset { item_ix, offset_in_item, @@ -519,9 +551,54 @@ impl StateInner { } fn scroll_top(&self, logical_scroll_top: &ListOffset) -> Pixels { - let mut cursor = self.items.cursor::(()); - cursor.seek(&Count(logical_scroll_top.item_ix), Bias::Right); - cursor.start().height + logical_scroll_top.offset_in_item + let (start, ..) = self.items.find::( + (), + &Count(logical_scroll_top.item_ix), + Bias::Right, + ); + start.height + logical_scroll_top.offset_in_item + } + + fn layout_all_items( + &mut self, + available_width: Pixels, + render_item: &mut RenderItemFn, + window: &mut Window, + cx: &mut App, + ) { + match &mut self.measuring_behavior { + ListMeasuringBehavior::Visible => { + return; + } + ListMeasuringBehavior::Measure(has_measured) => { + if *has_measured { + return; + } + *has_measured = true; + } + } + + let mut cursor = self.items.cursor::(()); + let available_item_space = size( + AvailableSpace::Definite(available_width), + AvailableSpace::MinContent, + ); + + let mut measured_items = Vec::default(); + + for (ix, item) in cursor.enumerate() { + let size = item.size().unwrap_or_else(|| { + let mut element = render_item(ix, window, cx); + element.layout_as_root(available_item_space, window, cx) + }); + + measured_items.push(ListItem::Measured { + size, + focus_handle: item.focus_handle(), + }); + } + + self.items = SumTree::from_iter(measured_items, ()); } fn layout_items( @@ -711,6 +788,13 @@ impl StateInner { cx: &mut App, ) -> Result { window.transact(|window| { + match self.measuring_behavior { + ListMeasuringBehavior::Measure(has_measured) if !has_measured => { + self.layout_all_items(bounds.size.width, render_item, window, cx); + } + _ => {} + } + let mut layout_response = self.layout_items( Some(bounds.size.width), bounds.size.height, @@ -802,11 +886,12 @@ impl StateInner { if self.alignment == ListAlignment::Bottom && new_scroll_top == scroll_max { self.logical_scroll_top = None; } else { - let mut cursor = self.items.cursor::(()); - cursor.seek(&Height(new_scroll_top), Bias::Right); + let (start, _, _) = + self.items + .find::((), &Height(new_scroll_top), Bias::Right); - let item_ix = cursor.start().count; - let offset_in_item = new_scroll_top - cursor.start().height; + let item_ix = start.count; + let offset_in_item = new_scroll_top - start.height; self.logical_scroll_top = Some(ListOffset { item_ix, offset_in_item, diff --git a/crates/gpui/src/elements/text.rs b/crates/gpui/src/elements/text.rs index b5e071279623611685ea744e38b072284e764e2a..5d34ccfa5d78e09d47b36a2e061fdaa0fbbca45f 100644 --- a/crates/gpui/src/elements/text.rs +++ b/crates/gpui/src/elements/text.rs @@ -180,8 +180,7 @@ impl StyledText { "Can't use `with_default_highlights` and `with_highlights`" ); let runs = Self::compute_runs(&self.text, default_style, highlights); - self.runs = Some(runs); - self + self.with_runs(runs) } /// Set the styling attributes for the given text, as well as @@ -194,7 +193,15 @@ impl StyledText { self.runs.is_none(), "Can't use `with_highlights` and `with_default_highlights`" ); - self.delayed_highlights = Some(highlights.into_iter().collect::>()); + self.delayed_highlights = Some( + highlights + .into_iter() + .inspect(|(run, _)| { + debug_assert!(self.text.is_char_boundary(run.start)); + debug_assert!(self.text.is_char_boundary(run.end)); + }) + .collect::>(), + ); self } @@ -207,8 +214,10 @@ impl StyledText { let mut ix = 0; for (range, highlight) in highlights { if ix < range.start { + debug_assert!(text.is_char_boundary(range.start)); runs.push(default_style.clone().to_run(range.start - ix)); } + debug_assert!(text.is_char_boundary(range.end)); runs.push( default_style .clone() @@ -225,6 +234,11 @@ impl StyledText { /// Set the text runs for this piece of text. pub fn with_runs(mut self, runs: Vec) -> Self { + let mut text = &**self.text; + for run in &runs { + text = text.get(run.len..).expect("invalid text run"); + } + assert!(text.is_empty(), "invalid text run"); self.runs = Some(runs); self } diff --git a/crates/gpui/src/elements/uniform_list.rs b/crates/gpui/src/elements/uniform_list.rs index 949d4339e616cd9f49b3783f46da0f80424c474f..93082563c02f4168b1d73e2929a6bf9dbd153237 100644 --- a/crates/gpui/src/elements/uniform_list.rs +++ b/crates/gpui/src/elements/uniform_list.rs @@ -343,7 +343,7 @@ impl Element for UniformList { }; let content_size = Size { width: content_width, - height: longest_item_size.height * self.item_count + padding.top + padding.bottom, + height: longest_item_size.height * self.item_count, }; let shared_scroll_offset = self.interactivity.scroll_offset.clone().unwrap(); @@ -364,17 +364,7 @@ impl Element for UniformList { content_size, window, cx, - |style, mut scroll_offset, hitbox, window, cx| { - let border = style.border_widths.to_pixels(window.rem_size()); - let padding = style - .padding - .to_pixels(bounds.size.into(), window.rem_size()); - - let padded_bounds = Bounds::from_corners( - bounds.origin + point(border.left + padding.left, border.top), - bounds.bottom_right() - point(border.right + padding.right, border.bottom), - ); - + |_style, mut scroll_offset, hitbox, window, cx| { let y_flipped = if let Some(scroll_handle) = &self.scroll_handle { let scroll_state = scroll_handle.0.borrow(); scroll_state.y_flipped @@ -383,13 +373,14 @@ impl Element for UniformList { }; if self.item_count > 0 { - let content_height = - item_height * self.item_count + padding.top + padding.bottom; + let content_height = item_height * self.item_count; + let is_scrolled_vertically = !scroll_offset.y.is_zero(); - let min_vertical_scroll_offset = padded_bounds.size.height - content_height; - if is_scrolled_vertically && scroll_offset.y < min_vertical_scroll_offset { - shared_scroll_offset.borrow_mut().y = min_vertical_scroll_offset; - scroll_offset.y = min_vertical_scroll_offset; + let max_scroll_offset = padded_bounds.size.height - content_height; + + if is_scrolled_vertically && scroll_offset.y < max_scroll_offset { + shared_scroll_offset.borrow_mut().y = max_scroll_offset; + scroll_offset.y = max_scroll_offset; } let content_width = content_size.width + padding.left + padding.right; @@ -407,18 +398,19 @@ impl Element for UniformList { } let list_height = padded_bounds.size.height; let mut updated_scroll_offset = shared_scroll_offset.borrow_mut(); - let item_top = item_height * ix + padding.top; + let item_top = item_height * ix; let item_bottom = item_top + item_height; let scroll_top = -updated_scroll_offset.y; let offset_pixels = item_height * deferred_scroll.offset; let mut scrolled_to_top = false; - if item_top < scroll_top + padding.top + offset_pixels { + if item_top < scroll_top + offset_pixels { scrolled_to_top = true; - updated_scroll_offset.y = -(item_top) + padding.top + offset_pixels; - } else if item_bottom > scroll_top + list_height - padding.bottom { + // todo: using the padding here is wrong - this only works well for few scenarios + updated_scroll_offset.y = -item_top + padding.top + offset_pixels; + } else if item_bottom > scroll_top + list_height { scrolled_to_top = true; - updated_scroll_offset.y = -(item_bottom - list_height) - padding.bottom; + updated_scroll_offset.y = -(item_bottom - list_height); } if deferred_scroll.scroll_strict @@ -480,14 +472,9 @@ impl Element for UniformList { window.with_content_mask(Some(content_mask), |window| { for (mut item, ix) in items.into_iter().zip(visible_range.clone()) { let item_origin = padded_bounds.origin - + point( - if can_scroll_horizontally { - scroll_offset.x + padding.left - } else { - scroll_offset.x - }, - item_height * ix + scroll_offset.y + padding.top, - ); + + scroll_offset + + point(Pixels::ZERO, item_height * ix); + let available_width = if can_scroll_horizontally { padded_bounds.size.width + scroll_offset.x.abs() } else { @@ -502,18 +489,8 @@ impl Element for UniformList { frame_state.items.push(item); } - let bounds = Bounds::new( - padded_bounds.origin - + point( - if can_scroll_horizontally { - scroll_offset.x + padding.left - } else { - scroll_offset.x - }, - scroll_offset.y + padding.top, - ), - padded_bounds.size, - ); + let bounds = + Bounds::new(padded_bounds.origin + scroll_offset, padded_bounds.size); for decoration in &self.decorations { let mut decoration = decoration.as_ref().compute( visible_range.clone(), diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index 0b28dd030baff6bc95ede07e50e358660a9c1353..841fbe924cd011bd2afa7d8d344e3a1c5a51e7a1 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -2,21 +2,20 @@ use crate::{App, PlatformDispatcher}; use async_task::Runnable; use futures::channel::mpsc; use smol::prelude::*; -use std::mem::ManuallyDrop; -use std::panic::Location; -use std::thread::{self, ThreadId}; use std::{ fmt::Debug, marker::PhantomData, - mem, + mem::{self, ManuallyDrop}, num::NonZeroUsize, + panic::Location, pin::Pin, rc::Rc, sync::{ Arc, - atomic::{AtomicUsize, Ordering::SeqCst}, + atomic::{AtomicUsize, Ordering}, }, task::{Context, Poll}, + thread::{self, ThreadId}, time::{Duration, Instant}, }; use util::TryFutureExt; @@ -123,7 +122,12 @@ impl TaskLabel { /// Construct a new task label. pub fn new() -> Self { static NEXT_TASK_LABEL: AtomicUsize = AtomicUsize::new(1); - Self(NEXT_TASK_LABEL.fetch_add(1, SeqCst).try_into().unwrap()) + Self( + NEXT_TASK_LABEL + .fetch_add(1, Ordering::SeqCst) + .try_into() + .unwrap(), + ) } } @@ -210,7 +214,8 @@ impl BackgroundExecutor { } let deadline = timeout.map(|timeout| Instant::now() + timeout); - let unparker = self.dispatcher.unparker(); + let parker = parking::Parker::new(); + let unparker = parker.unparker(); let waker = waker_fn(move || { unparker.unpark(); }); @@ -222,10 +227,14 @@ impl BackgroundExecutor { Poll::Pending => { let timeout = deadline.map(|deadline| deadline.saturating_duration_since(Instant::now())); - if !self.dispatcher.park(timeout) - && deadline.is_some_and(|deadline| deadline < Instant::now()) - { - return Err(future); + if let Some(timeout) = timeout { + if !parker.park_timeout(timeout) + && deadline.is_some_and(|deadline| deadline < Instant::now()) + { + return Err(future); + } + } else { + parker.park(); } } } @@ -242,6 +251,8 @@ impl BackgroundExecutor { ) -> Result + use> { use std::sync::atomic::AtomicBool; + use parking::Parker; + let mut future = Box::pin(future); if timeout == Some(Duration::ZERO) { return Err(future); @@ -255,12 +266,16 @@ impl BackgroundExecutor { } else { usize::MAX }; - let unparker = self.dispatcher.unparker(); + + let parker = Parker::new(); + let unparker = parker.unparker(); + let awoken = Arc::new(AtomicBool::new(false)); let waker = waker_fn({ let awoken = awoken.clone(); + let unparker = unparker.clone(); move || { - awoken.store(true, SeqCst); + awoken.store(true, Ordering::SeqCst); unparker.unpark(); } }); @@ -276,7 +291,7 @@ impl BackgroundExecutor { max_ticks -= 1; if !dispatcher.tick(background_only) { - if awoken.swap(false, SeqCst) { + if awoken.swap(false, Ordering::SeqCst) { continue; } @@ -297,7 +312,8 @@ impl BackgroundExecutor { "parked with nothing left to run{waiting_message}{backtrace_message}", ) } - self.dispatcher.park(None); + dispatcher.set_unparker(unparker.clone()); + parker.park(); } } } @@ -513,9 +529,7 @@ where "local task dropped by a thread that didn't spawn it. Task spawned at {}", self.location ); - unsafe { - ManuallyDrop::drop(&mut self.inner); - } + unsafe { ManuallyDrop::drop(&mut self.inner) }; } } diff --git a/crates/gpui/src/inspector.rs b/crates/gpui/src/inspector.rs index 9f86576a599845bb9e09760e8001333b9dea745d..ad3ba6a4b693ef3270d570dc98b4e03f7927d388 100644 --- a/crates/gpui/src/inspector.rs +++ b/crates/gpui/src/inspector.rs @@ -39,7 +39,7 @@ mod conditional { impl Clone for InspectorElementPath { fn clone(&self) -> Self { Self { - global_id: crate::GlobalElementId(self.global_id.0.clone()), + global_id: self.global_id.clone(), source_location: self.source_location, } } diff --git a/crates/gpui/src/key_dispatch.rs b/crates/gpui/src/key_dispatch.rs index 03ee31fdad5bdfc48e10dbf74a2557ea7ee0036e..f0c857abd6f3c353105b4272b51ca519f1906078 100644 --- a/crates/gpui/src/key_dispatch.rs +++ b/crates/gpui/src/key_dispatch.rs @@ -572,18 +572,14 @@ impl DispatchTree { focus_path } - pub fn view_path(&self, view_id: EntityId) -> SmallVec<[EntityId; 8]> { - let mut view_path: SmallVec<[EntityId; 8]> = SmallVec::new(); + pub fn view_path_reversed(&self, view_id: EntityId) -> impl Iterator { let mut current_node_id = self.view_node_ids.get(&view_id).copied(); - while let Some(node_id) = current_node_id { - let node = self.node(node_id); - if let Some(view_id) = node.view_id { - view_path.push(view_id); - } - current_node_id = node.parent; - } - view_path.reverse(); // Reverse the path so it goes from the root to the view node. - view_path + + std::iter::successors( + current_node_id.map(|node_id| self.node(node_id)), + |node_id| Some(self.node(node_id.parent?)), + ) + .filter_map(|node| node.view_id) } pub fn node(&self, node_id: DispatchNodeId) -> &DispatchNode { diff --git a/crates/gpui/src/keymap.rs b/crates/gpui/src/keymap.rs index e26123339bd65fecd6ff9e5356098e29cee30890..33d956917055942cce365e9069cbb007e202eaf2 100644 --- a/crates/gpui/src/keymap.rs +++ b/crates/gpui/src/keymap.rs @@ -118,10 +118,12 @@ impl Keymap { pub fn all_bindings_for_input(&self, input: &[Keystroke]) -> Vec { self.bindings() .rev() - .filter_map(|binding| { - binding.match_keystrokes(input).filter(|pending| !pending)?; - Some(binding.clone()) + .filter(|binding| { + binding + .match_keystrokes(input) + .is_some_and(|pending| !pending) }) + .cloned() .collect() } diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index 555a75879795d85bc20698b5a4c7cf76555f11ac..dd50a08c6b12ab198f1898ba79bae35969e6a5d0 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -48,7 +48,6 @@ use async_task::Runnable; use futures::channel::oneshot; use image::codecs::gif::GifDecoder; use image::{AnimationDecoder as _, Frame}; -use parking::Unparker; use raw_window_handle::{HasDisplayHandle, HasWindowHandle}; use schemars::JsonSchema; use seahash::SeaHasher; @@ -290,10 +289,13 @@ pub trait PlatformDisplay: Send + Sync + Debug { /// Get the default bounds for this display to place a window fn default_bounds(&self) -> Bounds { - let center = self.bounds().center(); - let offset = DEFAULT_WINDOW_SIZE / 2.0; + let bounds = self.bounds(); + let center = bounds.center(); + let clipped_window_size = DEFAULT_WINDOW_SIZE.min(&bounds.size); + + let offset = clipped_window_size / 2.0; let origin = point(center.x - offset.width, center.y - offset.height); - Bounds::new(origin, DEFAULT_WINDOW_SIZE) + Bounds::new(origin, clipped_window_size) } } @@ -349,8 +351,6 @@ impl Debug for DisplayId { } } -unsafe impl Send for DisplayId {} - /// Which part of the window to resize #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum ResizeEdge { @@ -564,8 +564,6 @@ pub trait PlatformDispatcher: Send + Sync { fn dispatch(&self, runnable: Runnable, label: Option); fn dispatch_on_main_thread(&self, runnable: Runnable); fn dispatch_after(&self, duration: Duration, runnable: Runnable); - fn park(&self, timeout: Option) -> bool; - fn unparker(&self) -> Unparker; fn now(&self) -> Instant { Instant::now() } @@ -713,6 +711,41 @@ impl PlatformTextSystem for NoopTextSystem { } } +// Adapted from https://github.com/microsoft/terminal/blob/1283c0f5b99a2961673249fa77c6b986efb5086c/src/renderer/atlas/dwrite.cpp +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +#[allow(dead_code)] +pub(crate) fn get_gamma_correction_ratios(gamma: f32) -> [f32; 4] { + const GAMMA_INCORRECT_TARGET_RATIOS: [[f32; 4]; 13] = [ + [0.0000 / 4.0, 0.0000 / 4.0, 0.0000 / 4.0, 0.0000 / 4.0], // gamma = 1.0 + [0.0166 / 4.0, -0.0807 / 4.0, 0.2227 / 4.0, -0.0751 / 4.0], // gamma = 1.1 + [0.0350 / 4.0, -0.1760 / 4.0, 0.4325 / 4.0, -0.1370 / 4.0], // gamma = 1.2 + [0.0543 / 4.0, -0.2821 / 4.0, 0.6302 / 4.0, -0.1876 / 4.0], // gamma = 1.3 + [0.0739 / 4.0, -0.3963 / 4.0, 0.8167 / 4.0, -0.2287 / 4.0], // gamma = 1.4 + [0.0933 / 4.0, -0.5161 / 4.0, 0.9926 / 4.0, -0.2616 / 4.0], // gamma = 1.5 + [0.1121 / 4.0, -0.6395 / 4.0, 1.1588 / 4.0, -0.2877 / 4.0], // gamma = 1.6 + [0.1300 / 4.0, -0.7649 / 4.0, 1.3159 / 4.0, -0.3080 / 4.0], // gamma = 1.7 + [0.1469 / 4.0, -0.8911 / 4.0, 1.4644 / 4.0, -0.3234 / 4.0], // gamma = 1.8 + [0.1627 / 4.0, -1.0170 / 4.0, 1.6051 / 4.0, -0.3347 / 4.0], // gamma = 1.9 + [0.1773 / 4.0, -1.1420 / 4.0, 1.7385 / 4.0, -0.3426 / 4.0], // gamma = 2.0 + [0.1908 / 4.0, -1.2652 / 4.0, 1.8650 / 4.0, -0.3476 / 4.0], // gamma = 2.1 + [0.2031 / 4.0, -1.3864 / 4.0, 1.9851 / 4.0, -0.3501 / 4.0], // gamma = 2.2 + ]; + + const NORM13: f32 = ((0x10000 as f64) / (255.0 * 255.0) * 4.0) as f32; + const NORM24: f32 = ((0x100 as f64) / (255.0) * 4.0) as f32; + + let index = ((gamma * 10.0).round() as usize).clamp(10, 22) - 10; + let ratios = GAMMA_INCORRECT_TARGET_RATIOS[index]; + + [ + ratios[0] * NORM13, + ratios[1] * NORM24, + ratios[2] * NORM13, + ratios[3] * NORM24, + ] +} + #[derive(PartialEq, Eq, Hash, Clone)] pub(crate) enum AtlasKey { Glyph(RenderGlyphParams), @@ -1646,6 +1679,8 @@ pub enum ImageFormat { Bmp, /// .tif or .tiff Tiff, + /// .ico + Ico, } impl ImageFormat { @@ -1659,6 +1694,7 @@ impl ImageFormat { ImageFormat::Svg => "image/svg+xml", ImageFormat::Bmp => "image/bmp", ImageFormat::Tiff => "image/tiff", + ImageFormat::Ico => "image/ico", } } @@ -1672,6 +1708,7 @@ impl ImageFormat { "image/svg+xml" => Some(Self::Svg), "image/bmp" => Some(Self::Bmp), "image/tiff" | "image/tif" => Some(Self::Tiff), + "image/ico" => Some(Self::Ico), _ => None, } } @@ -1778,6 +1815,7 @@ impl Image { ImageFormat::Webp => frames_for_image(&self.bytes, image::ImageFormat::WebP)?, ImageFormat::Bmp => frames_for_image(&self.bytes, image::ImageFormat::Bmp)?, ImageFormat::Tiff => frames_for_image(&self.bytes, image::ImageFormat::Tiff)?, + ImageFormat::Ico => frames_for_image(&self.bytes, image::ImageFormat::Ico)?, ImageFormat::Svg => { let pixmap = svg_renderer.render_pixmap(&self.bytes, SvgSize::ScaleFactor(1.0))?; diff --git a/crates/gpui/src/platform/blade/blade_renderer.rs b/crates/gpui/src/platform/blade/blade_renderer.rs index d00fbdc7f128e2f51ce7a2786aa7fdb57f296ea2..dd0be7db437fba573a1a552b52cf12d7c72f0361 100644 --- a/crates/gpui/src/platform/blade/blade_renderer.rs +++ b/crates/gpui/src/platform/blade/blade_renderer.rs @@ -5,6 +5,7 @@ use super::{BladeAtlas, BladeContext}; use crate::{ Background, Bounds, DevicePixels, GpuSpecs, MonochromeSprite, Path, Point, PolychromeSprite, PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow, Size, Underline, + get_gamma_correction_ratios, }; use blade_graphics as gpu; use blade_util::{BufferBelt, BufferBeltDescriptor}; @@ -1023,7 +1024,7 @@ impl RenderingParameters { .and_then(|v| v.parse().ok()) .unwrap_or(1.8_f32) .clamp(1.0, 2.2); - let gamma_ratios = Self::get_gamma_ratios(gamma); + let gamma_ratios = get_gamma_correction_ratios(gamma); let grayscale_enhanced_contrast = env::var("ZED_FONTS_GRAYSCALE_ENHANCED_CONTRAST") .ok() .and_then(|v| v.parse().ok()) @@ -1036,37 +1037,4 @@ impl RenderingParameters { grayscale_enhanced_contrast, } } - - // Gamma ratios for brightening/darkening edges for better contrast - // https://github.com/microsoft/terminal/blob/1283c0f5b99a2961673249fa77c6b986efb5086c/src/renderer/atlas/dwrite.cpp#L50 - fn get_gamma_ratios(gamma: f32) -> [f32; 4] { - const GAMMA_INCORRECT_TARGET_RATIOS: [[f32; 4]; 13] = [ - [0.0000 / 4.0, 0.0000 / 4.0, 0.0000 / 4.0, 0.0000 / 4.0], // gamma = 1.0 - [0.0166 / 4.0, -0.0807 / 4.0, 0.2227 / 4.0, -0.0751 / 4.0], // gamma = 1.1 - [0.0350 / 4.0, -0.1760 / 4.0, 0.4325 / 4.0, -0.1370 / 4.0], // gamma = 1.2 - [0.0543 / 4.0, -0.2821 / 4.0, 0.6302 / 4.0, -0.1876 / 4.0], // gamma = 1.3 - [0.0739 / 4.0, -0.3963 / 4.0, 0.8167 / 4.0, -0.2287 / 4.0], // gamma = 1.4 - [0.0933 / 4.0, -0.5161 / 4.0, 0.9926 / 4.0, -0.2616 / 4.0], // gamma = 1.5 - [0.1121 / 4.0, -0.6395 / 4.0, 1.1588 / 4.0, -0.2877 / 4.0], // gamma = 1.6 - [0.1300 / 4.0, -0.7649 / 4.0, 1.3159 / 4.0, -0.3080 / 4.0], // gamma = 1.7 - [0.1469 / 4.0, -0.8911 / 4.0, 1.4644 / 4.0, -0.3234 / 4.0], // gamma = 1.8 - [0.1627 / 4.0, -1.0170 / 4.0, 1.6051 / 4.0, -0.3347 / 4.0], // gamma = 1.9 - [0.1773 / 4.0, -1.1420 / 4.0, 1.7385 / 4.0, -0.3426 / 4.0], // gamma = 2.0 - [0.1908 / 4.0, -1.2652 / 4.0, 1.8650 / 4.0, -0.3476 / 4.0], // gamma = 2.1 - [0.2031 / 4.0, -1.3864 / 4.0, 1.9851 / 4.0, -0.3501 / 4.0], // gamma = 2.2 - ]; - - const NORM13: f32 = ((0x10000 as f64) / (255.0 * 255.0) * 4.0) as f32; - const NORM24: f32 = ((0x100 as f64) / (255.0) * 4.0) as f32; - - let index = ((gamma * 10.0).round() as usize).clamp(10, 22) - 10; - let ratios = GAMMA_INCORRECT_TARGET_RATIOS[index]; - - [ - ratios[0] * NORM13, - ratios[1] * NORM24, - ratios[2] * NORM13, - ratios[3] * NORM24, - ] - } } diff --git a/crates/gpui/src/platform/blade/shaders.wgsl b/crates/gpui/src/platform/blade/shaders.wgsl index 1de8ad442018624b4322901136ec777e66d96b18..2981b1446c6d5a2c6bd670e6a040b6a830a8e1d9 100644 --- a/crates/gpui/src/platform/blade/shaders.wgsl +++ b/crates/gpui/src/platform/blade/shaders.wgsl @@ -28,6 +28,9 @@ fn heat_map_color(value: f32, minValue: f32, maxValue: f32, position: vec2) */ +// Contrast and gamma correction adapted from https://github.com/microsoft/terminal/blob/1283c0f5b99a2961673249fa77c6b986efb5086c/src/renderer/atlas/dwrite.hlsl +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. fn color_brightness(color: vec3) -> f32 { // REC. 601 luminance coefficients for perceived brightness return dot(color, vec3(0.30, 0.59, 0.11)); diff --git a/crates/gpui/src/platform/linux/dispatcher.rs b/crates/gpui/src/platform/linux/dispatcher.rs index 2f6cd83756054bdbca2c764b046b0c37f51d3515..9ca1f76fd6996ffbd376d8254cbbe63a1c8d8fd0 100644 --- a/crates/gpui/src/platform/linux/dispatcher.rs +++ b/crates/gpui/src/platform/linux/dispatcher.rs @@ -5,8 +5,6 @@ use calloop::{ channel::{self, Sender}, timer::TimeoutAction, }; -use parking::{Parker, Unparker}; -use parking_lot::Mutex; use std::{ thread, time::{Duration, Instant}, @@ -19,7 +17,6 @@ struct TimerAfter { } pub(crate) struct LinuxDispatcher { - parker: Mutex, main_sender: Sender, timer_sender: Sender, background_sender: flume::Sender, @@ -92,7 +89,6 @@ impl LinuxDispatcher { background_threads.push(timer_thread); Self { - parker: Mutex::new(Parker::new()), main_sender, timer_sender, background_sender, @@ -130,17 +126,4 @@ impl PlatformDispatcher for LinuxDispatcher { .send(TimerAfter { duration, runnable }) .ok(); } - - fn park(&self, timeout: Option) -> bool { - if let Some(timeout) = timeout { - self.parker.lock().park_timeout(timeout) - } else { - self.parker.lock().park(); - true - } - } - - fn unparker(&self) -> Unparker { - self.parker.lock().unparker() - } } diff --git a/crates/gpui/src/platform/linux/x11/clipboard.rs b/crates/gpui/src/platform/linux/x11/clipboard.rs index 65ad16e82bf103c4ef08e79c692196d3fae58777..3be5008505446e8ca6c6fd93b559fec4779ae85c 100644 --- a/crates/gpui/src/platform/linux/x11/clipboard.rs +++ b/crates/gpui/src/platform/linux/x11/clipboard.rs @@ -86,6 +86,7 @@ x11rb::atom_manager! { SVG__MIME: ImageFormat::mime_type(ImageFormat::Svg ).as_bytes(), BMP__MIME: ImageFormat::mime_type(ImageFormat::Bmp ).as_bytes(), TIFF_MIME: ImageFormat::mime_type(ImageFormat::Tiff).as_bytes(), + ICO__MIME: ImageFormat::mime_type(ImageFormat::Ico ).as_bytes(), // This is just some random name for the property on our window, into which // the clipboard owner writes the data we requested. @@ -1003,6 +1004,7 @@ impl Clipboard { ImageFormat::Svg => self.inner.atoms.SVG__MIME, ImageFormat::Bmp => self.inner.atoms.BMP__MIME, ImageFormat::Tiff => self.inner.atoms.TIFF_MIME, + ImageFormat::Ico => self.inner.atoms.ICO__MIME, }; let data = vec![ClipboardData { bytes: image.bytes, diff --git a/crates/gpui/src/platform/mac/dispatcher.rs b/crates/gpui/src/platform/mac/dispatcher.rs index 137295fb916e893c193962a3076b83ee5dce1436..c72f791f850469287cf66021558032902982ccec 100644 --- a/crates/gpui/src/platform/mac/dispatcher.rs +++ b/crates/gpui/src/platform/mac/dispatcher.rs @@ -9,12 +9,9 @@ use objc::{ runtime::{BOOL, YES}, sel, sel_impl, }; -use parking::{Parker, Unparker}; -use parking_lot::Mutex; use std::{ ffi::c_void, ptr::{NonNull, addr_of}, - sync::Arc, time::Duration, }; @@ -29,23 +26,7 @@ pub(crate) fn dispatch_get_main_queue() -> dispatch_queue_t { addr_of!(_dispatch_main_q) as *const _ as dispatch_queue_t } -pub(crate) struct MacDispatcher { - parker: Arc>, -} - -impl Default for MacDispatcher { - fn default() -> Self { - Self::new() - } -} - -impl MacDispatcher { - pub fn new() -> Self { - MacDispatcher { - parker: Arc::new(Mutex::new(Parker::new())), - } - } -} +pub(crate) struct MacDispatcher; impl PlatformDispatcher for MacDispatcher { fn is_main_thread(&self) -> bool { @@ -86,19 +67,6 @@ impl PlatformDispatcher for MacDispatcher { ); } } - - fn park(&self, timeout: Option) -> bool { - if let Some(timeout) = timeout { - self.parker.lock().park_timeout(timeout) - } else { - self.parker.lock().park(); - true - } - } - - fn unparker(&self) -> Unparker { - self.parker.lock().unparker() - } } extern "C" fn trampoline(runnable: *mut c_void) { diff --git a/crates/gpui/src/platform/mac/platform.rs b/crates/gpui/src/platform/mac/platform.rs index bf92ca6dfb649a6b7b3850e796742464785889a8..244350169caffef10ea2740a30e36772506e6145 100644 --- a/crates/gpui/src/platform/mac/platform.rs +++ b/crates/gpui/src/platform/mac/platform.rs @@ -187,7 +187,7 @@ impl Default for MacPlatform { impl MacPlatform { pub(crate) fn new(headless: bool) -> Self { - let dispatcher = Arc::new(MacDispatcher::new()); + let dispatcher = Arc::new(MacDispatcher); #[cfg(feature = "font-kit")] let text_system = Arc::new(crate::MacTextSystem::new()); @@ -1607,6 +1607,7 @@ impl From for UTType { ImageFormat::Gif => Self::gif(), ImageFormat::Bmp => Self::bmp(), ImageFormat::Svg => Self::svg(), + ImageFormat::Ico => Self::ico(), } } } @@ -1645,6 +1646,11 @@ impl UTType { Self(unsafe { ns_string("public.svg-image") }) } + pub fn ico() -> Self { + // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/ico + Self(unsafe { ns_string("com.microsoft.ico") }) + } + pub fn tiff() -> Self { // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/tiff Self(unsafe { NSPasteboardTypeTIFF }) // This is a rare case where there's a built-in NSPasteboardType diff --git a/crates/gpui/src/platform/mac/text_system.rs b/crates/gpui/src/platform/mac/text_system.rs index aa9c5850d5375cad41166161ba595c3fab85d457..a0f90587d862d885e85b2052ce4f55f3cd5da55d 100644 --- a/crates/gpui/src/platform/mac/text_system.rs +++ b/crates/gpui/src/platform/mac/text_system.rs @@ -43,7 +43,7 @@ use pathfinder_geometry::{ vector::{Vector2F, Vector2I}, }; use smallvec::SmallVec; -use std::{borrow::Cow, char, cmp, convert::TryFrom, sync::Arc}; +use std::{borrow::Cow, char, convert::TryFrom, sync::Arc}; use super::open_type::apply_features_and_fallbacks; @@ -432,26 +432,21 @@ impl MacTextSystemState { let mut string = CFMutableAttributedString::new(); let mut max_ascent = 0.0f32; let mut max_descent = 0.0f32; - { - string.replace_str(&CFString::new(text), CFRange::init(0, 0)); - let utf16_line_len = string.char_len() as usize; - let mut ix_converter = StringIndexConverter::new(text); + { + let mut ix_converter = StringIndexConverter::new(&text); for run in font_runs { - let utf8_end = ix_converter.utf8_ix + run.len; - let utf16_start = ix_converter.utf16_ix; - - if utf16_start >= utf16_line_len { - break; - } + let text = &text[ix_converter.utf8_ix..][..run.len]; - ix_converter.advance_to_utf8_ix(utf8_end); - let utf16_end = cmp::min(ix_converter.utf16_ix, utf16_line_len); + let utf16_start = string.char_len(); // insert at end of string + ix_converter.advance_to_utf8_ix(ix_converter.utf8_ix + run.len); - let cf_range = - CFRange::init(utf16_start as isize, (utf16_end - utf16_start) as isize); + // note: replace_str may silently ignore codepoints it dislikes (e.g., BOM at start of string) + string.replace_str(&CFString::new(text), CFRange::init(utf16_start, 0)); + let utf16_end = string.char_len(); - let font: &FontKitFont = &self.fonts[run.font_id.0]; + let cf_range = CFRange::init(utf16_start, utf16_end - utf16_start); + let font = &self.fonts[run.font_id.0]; let font_metrics = font.metrics(); let font_scale = font_size.0 / font_metrics.units_per_em as f32; @@ -465,17 +460,12 @@ impl MacTextSystemState { &font.native_font().clone_with_font_size(font_size.into()), ); } - - if utf16_end == utf16_line_len { - break; - } } } - // Retrieve the glyphs from the shaped line, converting UTF16 offsets to UTF8 offsets. let line = CTLine::new_with_attributed_string(string.as_concrete_TypeRef()); let glyph_runs = line.glyph_runs(); - let mut runs = Vec::with_capacity(glyph_runs.len() as usize); + let mut runs = >::with_capacity(glyph_runs.len() as usize); let mut ix_converter = StringIndexConverter::new(text); for run in glyph_runs.into_iter() { let attributes = run.attributes().unwrap(); @@ -487,28 +477,35 @@ impl MacTextSystemState { }; let font_id = self.id_for_native_font(font); - let mut glyphs = Vec::with_capacity(run.glyph_count().try_into().unwrap_or(0)); - for ((glyph_id, position), glyph_utf16_ix) in run + let mut glyphs = match runs.last_mut() { + Some(run) if run.font_id == font_id => &mut run.glyphs, + _ => { + runs.push(ShapedRun { + font_id, + glyphs: Vec::with_capacity(run.glyph_count().try_into().unwrap_or(0)), + }); + &mut runs.last_mut().unwrap().glyphs + } + }; + for ((&glyph_id, position), &glyph_utf16_ix) in run .glyphs() .iter() .zip(run.positions().iter()) .zip(run.string_indices().iter()) { - let glyph_utf16_ix = usize::try_from(*glyph_utf16_ix).unwrap(); + let mut glyph_utf16_ix = usize::try_from(glyph_utf16_ix).unwrap(); if ix_converter.utf16_ix > glyph_utf16_ix { // We cannot reuse current index converter, as it can only seek forward. Restart the search. ix_converter = StringIndexConverter::new(text); } ix_converter.advance_to_utf16_ix(glyph_utf16_ix); glyphs.push(ShapedGlyph { - id: GlyphId(*glyph_id as u32), + id: GlyphId(glyph_id as u32), position: point(position.x as f32, position.y as f32).map(px), index: ix_converter.utf8_ix, is_emoji: self.is_emoji(font_id), }); } - - runs.push(ShapedRun { font_id, glyphs }); } let typographic_bounds = line.get_typographic_bounds(); LineLayout { @@ -522,10 +519,12 @@ impl MacTextSystemState { } } -#[derive(Clone)] +#[derive(Debug, Clone)] struct StringIndexConverter<'a> { text: &'a str, + /// Index in UTF-8 bytes utf8_ix: usize, + /// Index in UTF-16 code units utf16_ix: usize, } @@ -706,5 +705,113 @@ mod tests { assert_eq!(layout.runs[0].glyphs[0].id, GlyphId(68u32)); // a // There's no glyph for \u{feff} assert_eq!(layout.runs[0].glyphs[1].id, GlyphId(69u32)); // b + + let line = "\u{feff}ab"; + let font_runs = &[ + FontRun { + len: "\u{feff}".len(), + font_id, + }, + FontRun { + len: "ab".len(), + font_id, + }, + ]; + let layout = fonts.layout_line(line, px(16.), font_runs); + assert_eq!(layout.len, line.len()); + assert_eq!(layout.runs.len(), 1); + assert_eq!(layout.runs[0].glyphs.len(), 2); + // There's no glyph for \u{feff} + assert_eq!(layout.runs[0].glyphs[0].id, GlyphId(68u32)); // a + assert_eq!(layout.runs[0].glyphs[1].id, GlyphId(69u32)); // b + } + + #[test] + fn test_layout_line_zwnj_insertion() { + let fonts = MacTextSystem::new(); + let font_id = fonts.font_id(&font("Helvetica")).unwrap(); + + let text = "hello world"; + let font_runs = &[ + FontRun { font_id, len: 5 }, // "hello" + FontRun { font_id, len: 6 }, // " world" + ]; + + let layout = fonts.layout_line(text, px(16.), font_runs); + assert_eq!(layout.len, text.len()); + + for run in &layout.runs { + for glyph in &run.glyphs { + assert!( + glyph.index < text.len(), + "Glyph index {} is out of bounds for text length {}", + glyph.index, + text.len() + ); + } + } + + // Test with different font runs - should not insert ZWNJ + let font_id2 = fonts.font_id(&font("Times")).unwrap_or(font_id); + let font_runs_different = &[ + FontRun { font_id, len: 5 }, // "hello" + // " world" + FontRun { + font_id: font_id2, + len: 6, + }, + ]; + + let layout2 = fonts.layout_line(text, px(16.), font_runs_different); + assert_eq!(layout2.len, text.len()); + + for run in &layout2.runs { + for glyph in &run.glyphs { + assert!( + glyph.index < text.len(), + "Glyph index {} is out of bounds for text length {}", + glyph.index, + text.len() + ); + } + } + } + + #[test] + fn test_layout_line_zwnj_edge_cases() { + let fonts = MacTextSystem::new(); + let font_id = fonts.font_id(&font("Helvetica")).unwrap(); + + let text = "hello"; + let font_runs = &[FontRun { font_id, len: 5 }]; + let layout = fonts.layout_line(text, px(16.), font_runs); + assert_eq!(layout.len, text.len()); + + let text = "abc"; + let font_runs = &[ + FontRun { font_id, len: 1 }, // "a" + FontRun { font_id, len: 1 }, // "b" + FontRun { font_id, len: 1 }, // "c" + ]; + let layout = fonts.layout_line(text, px(16.), font_runs); + assert_eq!(layout.len, text.len()); + + for run in &layout.runs { + for glyph in &run.glyphs { + assert!( + glyph.index < text.len(), + "Glyph index {} is out of bounds for text length {}", + glyph.index, + text.len() + ); + } + } + + // Test with empty text + let text = ""; + let font_runs = &[]; + let layout = fonts.layout_line(text, px(16.), font_runs); + assert_eq!(layout.len, 0); + assert!(layout.runs.is_empty()); } } diff --git a/crates/gpui/src/platform/test/dispatcher.rs b/crates/gpui/src/platform/test/dispatcher.rs index e19710effda9299c6eb72e8c4acc2f615ac077ee..017c29bfb558f77874a9729a52b518d9d41fb256 100644 --- a/crates/gpui/src/platform/test/dispatcher.rs +++ b/crates/gpui/src/platform/test/dispatcher.rs @@ -2,7 +2,7 @@ use crate::{PlatformDispatcher, TaskLabel}; use async_task::Runnable; use backtrace::Backtrace; use collections::{HashMap, HashSet, VecDeque}; -use parking::{Parker, Unparker}; +use parking::Unparker; use parking_lot::Mutex; use rand::prelude::*; use std::{ @@ -22,8 +22,6 @@ struct TestDispatcherId(usize); pub struct TestDispatcher { id: TestDispatcherId, state: Arc>, - parker: Arc>, - unparker: Unparker, } struct TestDispatcherState { @@ -41,11 +39,11 @@ struct TestDispatcherState { waiting_backtrace: Option, deprioritized_task_labels: HashSet, block_on_ticks: RangeInclusive, + last_parked: Option, } impl TestDispatcher { pub fn new(random: StdRng) -> Self { - let (parker, unparker) = parking::pair(); let state = TestDispatcherState { random, foreground: HashMap::default(), @@ -61,13 +59,12 @@ impl TestDispatcher { waiting_backtrace: None, deprioritized_task_labels: Default::default(), block_on_ticks: 0..=1000, + last_parked: None, }; TestDispatcher { id: TestDispatcherId(0), state: Arc::new(Mutex::new(state)), - parker: Arc::new(Mutex::new(parker)), - unparker, } } @@ -243,6 +240,21 @@ impl TestDispatcher { let block_on_ticks = lock.block_on_ticks.clone(); lock.random.random_range(block_on_ticks) } + pub fn unpark_last(&self) { + self.state + .lock() + .last_parked + .take() + .as_ref() + .map(Unparker::unpark); + } + + pub fn set_unparker(&self, unparker: Unparker) { + let last = { self.state.lock().last_parked.replace(unparker) }; + if let Some(last) = last { + last.unpark(); + } + } } impl Clone for TestDispatcher { @@ -251,8 +263,6 @@ impl Clone for TestDispatcher { Self { id: TestDispatcherId(id), state: self.state.clone(), - parker: self.parker.clone(), - unparker: self.unparker.clone(), } } } @@ -276,7 +286,7 @@ impl PlatformDispatcher for TestDispatcher { state.background.push(runnable); } } - self.unparker.unpark(); + self.unpark_last(); } fn dispatch_on_main_thread(&self, runnable: Runnable) { @@ -286,7 +296,7 @@ impl PlatformDispatcher for TestDispatcher { .entry(self.id) .or_default() .push_back(runnable); - self.unparker.unpark(); + self.unpark_last(); } fn dispatch_after(&self, duration: std::time::Duration, runnable: Runnable) { @@ -297,14 +307,6 @@ impl PlatformDispatcher for TestDispatcher { }; state.delayed.insert(ix, (next_time, runnable)); } - fn park(&self, _: Option) -> bool { - self.parker.lock().park(); - true - } - - fn unparker(&self) -> Unparker { - self.unparker.clone() - } fn as_test(&self) -> Option<&TestDispatcher> { Some(self) diff --git a/crates/gpui/src/platform/windows/alpha_correction.hlsl b/crates/gpui/src/platform/windows/alpha_correction.hlsl index dc8d0b5dc52e9ef1484bfdf776161b5d5d8ce1b9..b0a9ca2e6b60a515ad2c1f9d95cd3e19079d326c 100644 --- a/crates/gpui/src/platform/windows/alpha_correction.hlsl +++ b/crates/gpui/src/platform/windows/alpha_correction.hlsl @@ -1,3 +1,7 @@ +// Adapted from https://github.com/microsoft/terminal/blob/1283c0f5b99a2961673249fa77c6b986efb5086c/src/renderer/atlas/dwrite.hlsl +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + float color_brightness(float3 color) { // REC. 601 luminance coefficients for perceived brightness return dot(color, float3(0.30f, 0.59f, 0.11f)); diff --git a/crates/gpui/src/platform/windows/directx_renderer.rs b/crates/gpui/src/platform/windows/directx_renderer.rs index 2baa237cdaa196da225070c241232fc6af0f0ff4..220876b4a98693f514886c14ca4b58725f2583d2 100644 --- a/crates/gpui/src/platform/windows/directx_renderer.rs +++ b/crates/gpui/src/platform/windows/directx_renderer.rs @@ -612,44 +612,11 @@ impl DirectXRenderer { let render_params: IDWriteRenderingParams1 = factory.CreateRenderingParams().unwrap().cast().unwrap(); FontInfo { - gamma_ratios: Self::get_gamma_ratios(render_params.GetGamma()), + gamma_ratios: get_gamma_correction_ratios(render_params.GetGamma()), grayscale_enhanced_contrast: render_params.GetGrayscaleEnhancedContrast(), } }) } - - // Gamma ratios for brightening/darkening edges for better contrast - // https://github.com/microsoft/terminal/blob/1283c0f5b99a2961673249fa77c6b986efb5086c/src/renderer/atlas/dwrite.cpp#L50 - fn get_gamma_ratios(gamma: f32) -> [f32; 4] { - const GAMMA_INCORRECT_TARGET_RATIOS: [[f32; 4]; 13] = [ - [0.0000 / 4.0, 0.0000 / 4.0, 0.0000 / 4.0, 0.0000 / 4.0], // gamma = 1.0 - [0.0166 / 4.0, -0.0807 / 4.0, 0.2227 / 4.0, -0.0751 / 4.0], // gamma = 1.1 - [0.0350 / 4.0, -0.1760 / 4.0, 0.4325 / 4.0, -0.1370 / 4.0], // gamma = 1.2 - [0.0543 / 4.0, -0.2821 / 4.0, 0.6302 / 4.0, -0.1876 / 4.0], // gamma = 1.3 - [0.0739 / 4.0, -0.3963 / 4.0, 0.8167 / 4.0, -0.2287 / 4.0], // gamma = 1.4 - [0.0933 / 4.0, -0.5161 / 4.0, 0.9926 / 4.0, -0.2616 / 4.0], // gamma = 1.5 - [0.1121 / 4.0, -0.6395 / 4.0, 1.1588 / 4.0, -0.2877 / 4.0], // gamma = 1.6 - [0.1300 / 4.0, -0.7649 / 4.0, 1.3159 / 4.0, -0.3080 / 4.0], // gamma = 1.7 - [0.1469 / 4.0, -0.8911 / 4.0, 1.4644 / 4.0, -0.3234 / 4.0], // gamma = 1.8 - [0.1627 / 4.0, -1.0170 / 4.0, 1.6051 / 4.0, -0.3347 / 4.0], // gamma = 1.9 - [0.1773 / 4.0, -1.1420 / 4.0, 1.7385 / 4.0, -0.3426 / 4.0], // gamma = 2.0 - [0.1908 / 4.0, -1.2652 / 4.0, 1.8650 / 4.0, -0.3476 / 4.0], // gamma = 2.1 - [0.2031 / 4.0, -1.3864 / 4.0, 1.9851 / 4.0, -0.3501 / 4.0], // gamma = 2.2 - ]; - - const NORM13: f32 = ((0x10000 as f64) / (255.0 * 255.0) * 4.0) as f32; - const NORM24: f32 = ((0x100 as f64) / (255.0) * 4.0) as f32; - - let index = ((gamma * 10.0).round() as usize).clamp(10, 22) - 10; - let ratios = GAMMA_INCORRECT_TARGET_RATIOS[index]; - - [ - ratios[0] * NORM13, - ratios[1] * NORM24, - ratios[2] * NORM13, - ratios[3] * NORM24, - ] - } } impl DirectXResources { diff --git a/crates/gpui/src/platform/windows/dispatcher.rs b/crates/gpui/src/platform/windows/dispatcher.rs index 3707a69047cf53cf68a40b3711e135f77dff8be3..8d3e6305f6b4bb60f6c282280bafa7f76f59eecb 100644 --- a/crates/gpui/src/platform/windows/dispatcher.rs +++ b/crates/gpui/src/platform/windows/dispatcher.rs @@ -5,8 +5,6 @@ use std::{ use async_task::Runnable; use flume::Sender; -use parking::Parker; -use parking_lot::Mutex; use util::ResultExt; use windows::{ System::Threading::{ @@ -24,7 +22,6 @@ use crate::{ pub(crate) struct WindowsDispatcher { main_sender: Sender, - parker: Mutex, main_thread_id: ThreadId, platform_window_handle: SafeHwnd, validation_number: usize, @@ -36,13 +33,11 @@ impl WindowsDispatcher { platform_window_handle: HWND, validation_number: usize, ) -> Self { - let parker = Mutex::new(Parker::new()); let main_thread_id = current().id(); let platform_window_handle = platform_window_handle.into(); WindowsDispatcher { main_sender, - parker, main_thread_id, platform_window_handle, validation_number, @@ -112,17 +107,4 @@ impl PlatformDispatcher for WindowsDispatcher { fn dispatch_after(&self, duration: Duration, runnable: Runnable) { self.dispatch_on_threadpool_after(runnable, duration); } - - fn park(&self, timeout: Option) -> bool { - if let Some(timeout) = timeout { - self.parker.lock().park_timeout(timeout) - } else { - self.parker.lock().park(); - true - } - } - - fn unparker(&self) -> parking::Unparker { - self.parker.lock().unparker() - } } diff --git a/crates/gpui/src/platform/windows/events.rs b/crates/gpui/src/platform/windows/events.rs index a9873c109c7547547ea13b5e7ed4f71194c778a0..9c10dcec4bb629bfbc78b76e74db099ed605d8be 100644 --- a/crates/gpui/src/platform/windows/events.rs +++ b/crates/gpui/src/platform/windows/events.rs @@ -530,8 +530,18 @@ impl WindowsWindowInner { }; let scale_factor = lock.scale_factor; let wheel_scroll_amount = match modifiers.shift { - true => lock.system_settings.mouse_wheel_settings.wheel_scroll_chars, - false => lock.system_settings.mouse_wheel_settings.wheel_scroll_lines, + true => { + self.system_settings + .borrow() + .mouse_wheel_settings + .wheel_scroll_chars + } + false => { + self.system_settings + .borrow() + .mouse_wheel_settings + .wheel_scroll_lines + } }; drop(lock); @@ -574,7 +584,11 @@ impl WindowsWindowInner { return Some(1); }; let scale_factor = lock.scale_factor; - let wheel_scroll_chars = lock.system_settings.mouse_wheel_settings.wheel_scroll_chars; + let wheel_scroll_chars = self + .system_settings + .borrow() + .mouse_wheel_settings + .wheel_scroll_chars; drop(lock); let wheel_distance = @@ -707,11 +721,8 @@ impl WindowsWindowInner { // used by Chrome. However, it may result in one row of pixels being obscured // in our client area. But as Chrome says, "there seems to be no better solution." if is_maximized - && let Some(ref taskbar_position) = self - .state - .borrow() - .system_settings - .auto_hide_taskbar_position + && let Some(ref taskbar_position) = + self.system_settings.borrow().auto_hide_taskbar_position { // For the auto-hide taskbar, adjust in by 1 pixel on taskbar edge, // so the window isn't treated as a "fullscreen app", which would cause @@ -1101,9 +1112,11 @@ impl WindowsWindowInner { if wparam.0 != 0 { let mut lock = self.state.borrow_mut(); let display = lock.display; - lock.system_settings.update(display, wparam.0); lock.click_state.system_update(wparam.0); lock.border_offset.update(handle).log_err(); + // system settings may emit a window message which wants to take the refcell lock, so drop it + drop(lock); + self.system_settings.borrow_mut().update(display, wparam.0); } else { self.handle_system_theme_changed(handle, lparam)?; }; @@ -1294,10 +1307,10 @@ where F: FnOnce(Keystroke) -> PlatformInput, { let virtual_key = VIRTUAL_KEY(wparam.loword()); - let mut modifiers = current_modifiers(); + let modifiers = current_modifiers(); match virtual_key { - VK_SHIFT | VK_CONTROL | VK_MENU | VK_LWIN | VK_RWIN => { + VK_SHIFT | VK_CONTROL | VK_MENU | VK_LMENU | VK_RMENU | VK_LWIN | VK_RWIN => { if state .last_reported_modifiers .is_some_and(|prev_modifiers| prev_modifiers == modifiers) @@ -1447,13 +1460,25 @@ fn is_virtual_key_pressed(vkey: VIRTUAL_KEY) -> bool { unsafe { GetKeyState(vkey.0 as i32) < 0 } } +fn keyboard_uses_altgr() -> bool { + use crate::platform::windows::keyboard::WindowsKeyboardLayout; + WindowsKeyboardLayout::new() + .map(|layout| layout.uses_altgr()) + .unwrap_or(false) +} + #[inline] pub(crate) fn current_modifiers() -> Modifiers { - let altgr = is_virtual_key_pressed(VK_RMENU) && is_virtual_key_pressed(VK_LCONTROL); + let lmenu_pressed = is_virtual_key_pressed(VK_LMENU); + let rmenu_pressed = is_virtual_key_pressed(VK_RMENU); + let lcontrol_pressed = is_virtual_key_pressed(VK_LCONTROL); + + // Only treat right Alt + left Ctrl as AltGr on keyboards that actually use it + let altgr = keyboard_uses_altgr() && rmenu_pressed && lcontrol_pressed; Modifiers { control: is_virtual_key_pressed(VK_CONTROL) && !altgr, - alt: is_virtual_key_pressed(VK_MENU) && !altgr, + alt: (lmenu_pressed || rmenu_pressed) && !altgr, shift: is_virtual_key_pressed(VK_SHIFT), platform: is_virtual_key_pressed(VK_LWIN) || is_virtual_key_pressed(VK_RWIN), function: false, diff --git a/crates/gpui/src/platform/windows/keyboard.rs b/crates/gpui/src/platform/windows/keyboard.rs index 259ebaebff794d4ed7203420c8c66188998c5fa4..7a8478d5910d35fb98a913ed799f2fa1447e9a65 100644 --- a/crates/gpui/src/platform/windows/keyboard.rs +++ b/crates/gpui/src/platform/windows/keyboard.rs @@ -110,6 +110,38 @@ impl WindowsKeyboardLayout { name: "unknown".to_string(), } } + + pub(crate) fn uses_altgr(&self) -> bool { + // Check if this is a known AltGr layout by examining the layout ID + // The layout ID is a hex string like "00000409" (US) or "00000407" (German) + // Extract the language ID (last 4 bytes) + let id_bytes = self.id.as_bytes(); + if id_bytes.len() >= 4 { + let lang_id = &id_bytes[id_bytes.len() - 4..]; + // List of keyboard layouts that use AltGr (non-exhaustive) + matches!( + lang_id, + b"0407" | // German + b"040C" | // French + b"040A" | // Spanish + b"0415" | // Polish + b"0413" | // Dutch + b"0816" | // Portuguese + b"041D" | // Swedish + b"0414" | // Norwegian + b"040B" | // Finnish + b"041F" | // Turkish + b"0419" | // Russian + b"0405" | // Czech + b"040E" | // Hungarian + b"0424" | // Slovenian + b"041B" | // Slovak + b"0418" // Romanian + ) + } else { + false + } + } } impl WindowsKeyboardMapper { diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index 5219d8c8177f7c3dc76a7afab258e2c58a0ce6f8..361d8e114308323da8629fae93d257cc38147dba 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -951,17 +951,30 @@ fn file_save_dialog( ) -> Result> { let dialog: IFileSaveDialog = unsafe { CoCreateInstance(&FileSaveDialog, None, CLSCTX_ALL)? }; if !directory.to_string_lossy().is_empty() - && let Some(full_path) = directory.canonicalize().log_err() + && let Some(full_path) = directory + .canonicalize() + .context("failed to canonicalize directory") + .log_err() { let full_path = SanitizedPath::new(&full_path); let full_path_string = full_path.to_string(); let path_item: IShellItem = unsafe { SHCreateItemFromParsingName(&HSTRING::from(full_path_string), None)? }; - unsafe { dialog.SetFolder(&path_item).log_err() }; + unsafe { + dialog + .SetFolder(&path_item) + .context("failed to set dialog folder") + .log_err() + }; } if let Some(suggested_name) = suggested_name { - unsafe { dialog.SetFileName(&HSTRING::from(suggested_name)).log_err() }; + unsafe { + dialog + .SetFileName(&HSTRING::from(suggested_name)) + .context("failed to set file name") + .log_err() + }; } unsafe { diff --git a/crates/gpui/src/platform/windows/shaders.hlsl b/crates/gpui/src/platform/windows/shaders.hlsl index c3ad2952dbf7122672104135f432e0257471642d..d6168eea09b4c7da705f5ecfc3e4002222f3149d 100644 --- a/crates/gpui/src/platform/windows/shaders.hlsl +++ b/crates/gpui/src/platform/windows/shaders.hlsl @@ -390,7 +390,7 @@ float4 gradient_color(Background background, float pattern_period = pattern_height * sin(stripe_angle); float2x2 rotation = rotate2d(stripe_angle); float2 relative_position = position - bounds.origin; - float2 rotated_point = mul(rotation, relative_position); + float2 rotated_point = mul(relative_position, rotation); float pattern = fmod(rotated_point.x, pattern_period); float distance = min(pattern, pattern_period - pattern) - pattern_period * (pattern_width / pattern_height) / 2.0f; color = solid_color; diff --git a/crates/gpui/src/platform/windows/vsync.rs b/crates/gpui/src/platform/windows/vsync.rs index 5cbcb8e99e2741c4b37cad4d550e290c4cab869f..73c32cf9b92b93278d4f88a8c784fd3b2a8fc2d3 100644 --- a/crates/gpui/src/platform/windows/vsync.rs +++ b/crates/gpui/src/platform/windows/vsync.rs @@ -5,23 +5,10 @@ use std::{ use anyhow::{Context, Result}; use util::ResultExt; -use windows::{ - Win32::{ - Foundation::{HANDLE, HWND}, - Graphics::{ - DirectComposition::{ - COMPOSITION_FRAME_ID_COMPLETED, COMPOSITION_FRAME_ID_TYPE, COMPOSITION_FRAME_STATS, - COMPOSITION_TARGET_ID, - }, - Dwm::{DWM_TIMING_INFO, DwmFlush, DwmGetCompositionTimingInfo}, - }, - System::{ - LibraryLoader::{GetModuleHandleA, GetProcAddress}, - Performance::QueryPerformanceFrequency, - Threading::INFINITE, - }, - }, - core::{HRESULT, s}, +use windows::Win32::{ + Foundation::HWND, + Graphics::Dwm::{DWM_TIMING_INFO, DwmFlush, DwmGetCompositionTimingInfo}, + System::Performance::QueryPerformanceFrequency, }; static QPC_TICKS_PER_SECOND: LazyLock = LazyLock::new(|| { @@ -35,20 +22,6 @@ static QPC_TICKS_PER_SECOND: LazyLock = LazyLock::new(|| { const VSYNC_INTERVAL_THRESHOLD: Duration = Duration::from_millis(1); const DEFAULT_VSYNC_INTERVAL: Duration = Duration::from_micros(16_666); // ~60Hz -// Here we are using dynamic loading of DirectComposition functions, -// or the app will refuse to start on windows systems that do not support DirectComposition. -type DCompositionGetFrameId = - unsafe extern "system" fn(frameidtype: COMPOSITION_FRAME_ID_TYPE, frameid: *mut u64) -> HRESULT; -type DCompositionGetStatistics = unsafe extern "system" fn( - frameid: u64, - framestats: *mut COMPOSITION_FRAME_STATS, - targetidcount: u32, - targetids: *mut COMPOSITION_TARGET_ID, - actualtargetidcount: *mut u32, -) -> HRESULT; -type DCompositionWaitForCompositorClock = - unsafe extern "system" fn(count: u32, handles: *const HANDLE, timeoutinms: u32) -> u32; - pub(crate) struct VSyncProvider { interval: Duration, f: Box bool>, @@ -56,35 +29,12 @@ pub(crate) struct VSyncProvider { impl VSyncProvider { pub(crate) fn new() -> Self { - if let Some((get_frame_id, get_statistics, wait_for_comp_clock)) = - initialize_direct_composition() - .context("Retrieving DirectComposition functions") - .log_with_level(log::Level::Warn) - { - let interval = get_dwm_interval_from_direct_composition(get_frame_id, get_statistics) - .context("Failed to get DWM interval from DirectComposition") - .log_err() - .unwrap_or(DEFAULT_VSYNC_INTERVAL); - log::info!( - "DirectComposition is supported for VSync, interval: {:?}", - interval - ); - let f = Box::new(move || unsafe { - wait_for_comp_clock(0, std::ptr::null(), INFINITE) == 0 - }); - Self { interval, f } - } else { - let interval = get_dwm_interval() - .context("Failed to get DWM interval") - .log_err() - .unwrap_or(DEFAULT_VSYNC_INTERVAL); - log::info!( - "DirectComposition is not supported for VSync, falling back to DWM, interval: {:?}", - interval - ); - let f = Box::new(|| unsafe { DwmFlush().is_ok() }); - Self { interval, f } - } + let interval = get_dwm_interval() + .context("Failed to get DWM interval") + .log_err() + .unwrap_or(DEFAULT_VSYNC_INTERVAL); + let f = Box::new(|| unsafe { DwmFlush().is_ok() }); + Self { interval, f } } pub(crate) fn wait_for_vsync(&self) { @@ -105,49 +55,6 @@ impl VSyncProvider { } } -fn initialize_direct_composition() -> Result<( - DCompositionGetFrameId, - DCompositionGetStatistics, - DCompositionWaitForCompositorClock, -)> { - unsafe { - // Load DLL at runtime since older Windows versions don't have dcomp. - let hmodule = GetModuleHandleA(s!("dcomp.dll")).context("Loading dcomp.dll")?; - let get_frame_id_addr = GetProcAddress(hmodule, s!("DCompositionGetFrameId")) - .context("Function DCompositionGetFrameId not found")?; - let get_statistics_addr = GetProcAddress(hmodule, s!("DCompositionGetStatistics")) - .context("Function DCompositionGetStatistics not found")?; - let wait_for_compositor_clock_addr = - GetProcAddress(hmodule, s!("DCompositionWaitForCompositorClock")) - .context("Function DCompositionWaitForCompositorClock not found")?; - let get_frame_id: DCompositionGetFrameId = std::mem::transmute(get_frame_id_addr); - let get_statistics: DCompositionGetStatistics = std::mem::transmute(get_statistics_addr); - let wait_for_compositor_clock: DCompositionWaitForCompositorClock = - std::mem::transmute(wait_for_compositor_clock_addr); - Ok((get_frame_id, get_statistics, wait_for_compositor_clock)) - } -} - -fn get_dwm_interval_from_direct_composition( - get_frame_id: DCompositionGetFrameId, - get_statistics: DCompositionGetStatistics, -) -> Result { - let mut frame_id = 0; - unsafe { get_frame_id(COMPOSITION_FRAME_ID_COMPLETED, &mut frame_id) }.ok()?; - let mut stats = COMPOSITION_FRAME_STATS::default(); - unsafe { - get_statistics( - frame_id, - &mut stats, - 0, - std::ptr::null_mut(), - std::ptr::null_mut(), - ) - } - .ok()?; - Ok(retrieve_duration(stats.framePeriod, *QPC_TICKS_PER_SECOND)) -} - fn get_dwm_interval() -> Result { let mut timing_info = DWM_TIMING_INFO { cbSize: std::mem::size_of::() as u32, diff --git a/crates/gpui/src/platform/windows/window.rs b/crates/gpui/src/platform/windows/window.rs index 7abb4ee21a1a28356e15d09be3c22c688bb7e033..e765fa1a22d54a645d094f0df3250f75c94387af 100644 --- a/crates/gpui/src/platform/windows/window.rs +++ b/crates/gpui/src/platform/windows/window.rs @@ -51,7 +51,6 @@ pub struct WindowsWindowState { pub renderer: DirectXRenderer, pub click_state: ClickState, - pub system_settings: WindowsSystemSettings, pub current_cursor: Option, pub nc_button_pressed: Option, @@ -66,6 +65,7 @@ pub(crate) struct WindowsWindowInner { pub(super) this: Weak, drop_target_helper: IDropTargetHelper, pub(crate) state: RefCell, + pub(crate) system_settings: RefCell, pub(crate) handle: AnyWindowHandle, pub(crate) hide_title_bar: bool, pub(crate) is_movable: bool, @@ -115,7 +115,6 @@ impl WindowsWindowState { let system_key_handled = false; let hovered = false; let click_state = ClickState::new(); - let system_settings = WindowsSystemSettings::new(display); let nc_button_pressed = None; let fullscreen = None; let initial_placement = None; @@ -138,7 +137,6 @@ impl WindowsWindowState { hovered, renderer, click_state, - system_settings, current_cursor, nc_button_pressed, display, @@ -171,7 +169,9 @@ impl WindowsWindowState { length: std::mem::size_of::() as u32, ..Default::default() }; - GetWindowPlacement(self.hwnd, &mut placement).log_err(); + GetWindowPlacement(self.hwnd, &mut placement) + .context("failed to get window placement") + .log_err(); placement }; ( @@ -231,6 +231,7 @@ impl WindowsWindowInner { validation_number: context.validation_number, main_receiver: context.main_receiver.clone(), platform_window_handle: context.platform_window_handle, + system_settings: RefCell::new(WindowsSystemSettings::new(context.display)), })) } @@ -255,7 +256,9 @@ impl WindowsWindowInner { lock.fullscreen_restore_bounds = window_bounds; let style = WINDOW_STYLE(unsafe { get_window_long(this.hwnd, GWL_STYLE) } as _); let mut rc = RECT::default(); - unsafe { GetWindowRect(this.hwnd, &mut rc) }.log_err(); + unsafe { GetWindowRect(this.hwnd, &mut rc) } + .context("failed to get window rect") + .log_err(); let _ = lock.fullscreen.insert(StyleAndBounds { style, x: rc.left, @@ -302,15 +305,20 @@ impl WindowsWindowInner { }; match open_status.state { WindowOpenState::Maximized => unsafe { - SetWindowPlacement(self.hwnd, &open_status.placement)?; + SetWindowPlacement(self.hwnd, &open_status.placement) + .context("failed to set window placement")?; ShowWindowAsync(self.hwnd, SW_MAXIMIZE).ok()?; }, WindowOpenState::Fullscreen => { - unsafe { SetWindowPlacement(self.hwnd, &open_status.placement)? }; + unsafe { + SetWindowPlacement(self.hwnd, &open_status.placement) + .context("failed to set window placement")? + }; self.toggle_fullscreen(); } WindowOpenState::Windowed => unsafe { - SetWindowPlacement(self.hwnd, &open_status.placement)?; + SetWindowPlacement(self.hwnd, &open_status.placement) + .context("failed to set window placement")?; }, } Ok(()) @@ -644,10 +652,12 @@ impl PlatformWindow for WindowsWindow { let mut btn_encoded = Vec::new(); for (index, btn) in answers.iter().enumerate() { let encoded = HSTRING::from(btn.label().as_ref()); - let button_id = if btn.is_cancel() { - IDCANCEL.0 - } else { - index as i32 - 100 + let button_id = match btn { + PromptButton::Ok(_) => IDOK.0, + PromptButton::Cancel(_) => IDCANCEL.0, + // the first few low integer values are reserved for known buttons + // so for simplicity we just go backwards from -1 + PromptButton::Other(_) => -(index as i32) - 1, }; button_id_map.push(button_id); buttons.push(TASKDIALOG_BUTTON { @@ -665,11 +675,11 @@ impl PlatformWindow for WindowsWindow { .context("unable to create task dialog") .log_err(); - let clicked = button_id_map - .iter() - .position(|&button_id| button_id == res) - .unwrap(); - let _ = done_tx.send(clicked); + if let Some(clicked) = + button_id_map.iter().position(|&button_id| button_id == res) + { + let _ = done_tx.send(clicked); + } } }) .detach(); diff --git a/crates/gpui/src/style.rs b/crates/gpui/src/style.rs index 8afb4e4eb8af70a78c1cd4fc0176a7fe3baf3c3e..42f8f25e47620fe673720055037b7f91f44165a2 100644 --- a/crates/gpui/src/style.rs +++ b/crates/gpui/src/style.rs @@ -403,13 +403,7 @@ impl Default for TextStyle { TextStyle { color: black(), // todo(linux) make this configurable or choose better default - font_family: if cfg!(any(target_os = "linux", target_os = "freebsd")) { - "FreeMono".into() - } else if cfg!(target_os = "windows") { - "Segoe UI".into() - } else { - "Helvetica".into() - }, + font_family: ".SystemUIFont".into(), font_features: FontFeatures::default(), font_fallbacks: None, font_size: rems(1.).into(), diff --git a/crates/gpui/src/styled.rs b/crates/gpui/src/styled.rs index c714cac14fe894410a05d40c4c5b30d6fbf61e2d..4475718675b7feee4abcfcded814ae3cc38d5fdb 100644 --- a/crates/gpui/src/styled.rs +++ b/crates/gpui/src/styled.rs @@ -53,6 +53,13 @@ pub trait Styled: Sized { self } + /// Sets the display type of the element to `none`. + /// [Docs](https://tailwindcss.com/docs/display) + fn hidden(mut self) -> Self { + self.style().display = Some(Display::None); + self + } + /// Sets the whitespace of the element to `normal`. /// [Docs](https://tailwindcss.com/docs/whitespace#normal) fn whitespace_normal(mut self) -> Self { diff --git a/crates/gpui/src/taffy.rs b/crates/gpui/src/taffy.rs index bc35dcec1eb126931cd71a6f3a17ce05054e8dbe..11cb0872861321c3c06c3f8a5bf79fdd30eb2275 100644 --- a/crates/gpui/src/taffy.rs +++ b/crates/gpui/src/taffy.rs @@ -3,7 +3,6 @@ use crate::{ point, size, }; use collections::{FxHashMap, FxHashSet}; -use smallvec::SmallVec; use stacksafe::{StackSafe, stacksafe}; use std::{fmt::Debug, ops::Range}; use taffy::{ @@ -31,6 +30,7 @@ pub struct TaffyLayoutEngine { taffy: TaffyTree, absolute_layout_bounds: FxHashMap>, computed_layouts: FxHashSet, + layout_bounds_scratch_space: Vec, } const EXPECT_MESSAGE: &str = "we should avoid taffy layout errors by construction if possible"; @@ -43,6 +43,7 @@ impl TaffyLayoutEngine { taffy, absolute_layout_bounds: FxHashMap::default(), computed_layouts: FxHashSet::default(), + layout_bounds_scratch_space: Vec::new(), } } @@ -69,9 +70,7 @@ impl TaffyLayoutEngine { } else { self.taffy // This is safe because LayoutId is repr(transparent) to taffy::tree::NodeId. - .new_with_children(taffy_style, unsafe { - std::mem::transmute::<&[LayoutId], &[taffy::NodeId]>(children) - }) + .new_with_children(taffy_style, LayoutId::to_taffy_slice(children)) .expect(EXPECT_MESSAGE) .into() } @@ -170,7 +169,7 @@ impl TaffyLayoutEngine { // if !self.computed_layouts.insert(id) { - let mut stack = SmallVec::<[LayoutId; 64]>::new(); + let mut stack = &mut self.layout_bounds_scratch_space; stack.push(id); while let Some(id) = stack.pop() { self.absolute_layout_bounds.remove(&id); @@ -179,7 +178,7 @@ impl TaffyLayoutEngine { .children(id.into()) .expect(EXPECT_MESSAGE) .into_iter() - .map(Into::into), + .map(LayoutId::from), ); } } @@ -265,6 +264,13 @@ impl TaffyLayoutEngine { #[repr(transparent)] pub struct LayoutId(NodeId); +impl LayoutId { + fn to_taffy_slice(node_ids: &[Self]) -> &[taffy::NodeId] { + // SAFETY: LayoutId is repr(transparent) to taffy::tree::NodeId. + unsafe { std::mem::transmute::<&[LayoutId], &[taffy::NodeId]>(node_ids) } + } +} + impl std::hash::Hash for LayoutId { fn hash(&self, state: &mut H) { u64::from(self.0).hash(state); diff --git a/crates/gpui/src/text_system.rs b/crates/gpui/src/text_system.rs index efac0087387e394e0e43859ea1dabdeb087d6b34..85a3133ca6c9e559c1cae76f595426d702bfd3f3 100644 --- a/crates/gpui/src/text_system.rs +++ b/crates/gpui/src/text_system.rs @@ -73,12 +73,15 @@ impl TextSystem { fallback_font_stack: smallvec![ // TODO: Remove this when Linux have implemented setting fallbacks. font(".ZedMono"), + font(".ZedSans"), font("Helvetica"), - font("Segoe UI"), // Windows - font("Cantarell"), // Gnome - font("Ubuntu"), // Gnome (Ubuntu) - font("Noto Sans"), // KDE - font("DejaVu Sans") + font("Segoe UI"), // Windows + font("Ubuntu"), // Gnome (Ubuntu) + font("Adwaita Sans"), // Gnome 47 + font("Cantarell"), // Gnome + font("Noto Sans"), // KDE + font("DejaVu Sans"), + font("Arial"), // macOS, Windows ], } } @@ -420,9 +423,9 @@ impl WindowTextSystem { let mut wrapped_lines = 0; let mut process_line = |line_text: SharedString| { + font_runs.clear(); let line_end = line_start + line_text.len(); - let mut last_font: Option = None; let mut decoration_runs = SmallVec::<[DecorationRun; 32]>::new(); let mut run_start = line_start; while run_start < line_end { @@ -432,23 +435,14 @@ impl WindowTextSystem { let run_len_within_line = cmp::min(line_end, run_start + run.len) - run_start; - if last_font == Some(run.font.clone()) { - font_runs.last_mut().unwrap().len += run_len_within_line; - } else { - last_font = Some(run.font.clone()); - font_runs.push(FontRun { - len: run_len_within_line, - font_id: self.resolve_font(&run.font), - }); - } - - if decoration_runs.last().is_some_and(|last_run| { - last_run.color == run.color - && last_run.underline == run.underline - && last_run.strikethrough == run.strikethrough - && last_run.background_color == run.background_color - }) { - decoration_runs.last_mut().unwrap().len += run_len_within_line as u32; + let decoration_changed = if let Some(last_run) = decoration_runs.last_mut() + && last_run.color == run.color + && last_run.underline == run.underline + && last_run.strikethrough == run.strikethrough + && last_run.background_color == run.background_color + { + last_run.len += run_len_within_line as u32; + false } else { decoration_runs.push(DecorationRun { len: run_len_within_line as u32, @@ -457,6 +451,20 @@ impl WindowTextSystem { underline: run.underline, strikethrough: run.strikethrough, }); + true + }; + + let font_id = self.resolve_font(&run.font); + if let Some(font_run) = font_runs.last_mut() + && font_id == font_run.font_id + && !decoration_changed + { + font_run.len += run_len_within_line; + } else { + font_runs.push(FontRun { + len: run_len_within_line, + font_id, + }); } if run_len_within_line == run.len { @@ -491,8 +499,6 @@ impl WindowTextSystem { runs.next(); } } - - font_runs.clear(); }; let mut split_lines = text.split('\n'); @@ -526,37 +532,54 @@ impl WindowTextSystem { /// Subsets of the line can be styled independently with the `runs` parameter. /// Generally, you should prefer to use [`Self::shape_line`] instead, which /// can be painted directly. - pub fn layout_line( + pub fn layout_line( &self, - text: Text, + text: &str, font_size: Pixels, runs: &[TextRun], force_width: Option, - ) -> Arc - where - Text: AsRef, - SharedString: From, - { + ) -> Arc { + let mut last_run = None::<&TextRun>; + let mut last_font: Option = None; let mut font_runs = self.font_runs_pool.lock().pop().unwrap_or_default(); + font_runs.clear(); + for run in runs.iter() { - let font_id = self.resolve_font(&run.font); - if let Some(last_run) = font_runs.last_mut() - && last_run.font_id == font_id + let decoration_changed = if let Some(last_run) = last_run + && last_run.color == run.color + && last_run.underline == run.underline + && last_run.strikethrough == run.strikethrough + // we do not consider differing background color relevant, as it does not affect glyphs + // && last_run.background_color == run.background_color { - last_run.len += run.len; - continue; + false + } else { + last_run = Some(run); + true + }; + + if let Some(font_run) = font_runs.last_mut() + && Some(font_run.font_id) == last_font + && !decoration_changed + { + font_run.len += run.len; + } else { + let font_id = self.resolve_font(&run.font); + last_font = Some(font_id); + font_runs.push(FontRun { + len: run.len, + font_id, + }); } - font_runs.push(FontRun { - len: run.len, - font_id, - }); } - let layout = - self.line_layout_cache - .layout_line_internal(text, font_size, &font_runs, force_width); + let layout = self.line_layout_cache.layout_line( + &SharedString::new(text), + font_size, + &font_runs, + force_width, + ); - font_runs.clear(); self.font_runs_pool.lock().push(font_runs); layout diff --git a/crates/gpui/src/text_system/line_layout.rs b/crates/gpui/src/text_system/line_layout.rs index eff4e640efb28a9b70c0da2008cd2293ee2dae47..375a9bdc7bccdddb9d34409c5ced138b2d5aebd2 100644 --- a/crates/gpui/src/text_system/line_layout.rs +++ b/crates/gpui/src/text_system/line_layout.rs @@ -501,7 +501,7 @@ impl LineLayoutCache { } else { drop(current_frame); let text = SharedString::from(text); - let unwrapped_layout = self.layout_line::<&SharedString>(&text, font_size, runs); + let unwrapped_layout = self.layout_line::<&SharedString>(&text, font_size, runs, None); let wrap_boundaries = if let Some(wrap_width) = wrap_width { unwrapped_layout.compute_wrap_boundaries(text.as_ref(), wrap_width, max_lines) } else { @@ -535,19 +535,6 @@ impl LineLayoutCache { text: Text, font_size: Pixels, runs: &[FontRun], - ) -> Arc - where - Text: AsRef, - SharedString: From, - { - self.layout_line_internal(text, font_size, runs, None) - } - - pub fn layout_line_internal( - &self, - text: Text, - font_size: Pixels, - runs: &[FontRun], force_width: Option, ) -> Arc where diff --git a/crates/gpui/src/text_system/line_wrapper.rs b/crates/gpui/src/text_system/line_wrapper.rs index d499d78551a5e0e268b575496bbdac5ddf59369c..55599cc0535dfdd94bfd895ce6001f3a83a27cf6 100644 --- a/crates/gpui/src/text_system/line_wrapper.rs +++ b/crates/gpui/src/text_system/line_wrapper.rs @@ -163,6 +163,8 @@ impl LineWrapper { line } + /// Any character in this list should be treated as a word character, + /// meaning it can be part of a word that should not be wrapped. pub(crate) fn is_word_char(c: char) -> bool { // ASCII alphanumeric characters, for English, numbers: `Hello123`, etc. c.is_ascii_alphanumeric() || @@ -180,10 +182,9 @@ impl LineWrapper { // https://en.wikipedia.org/wiki/Cyrillic_script_in_Unicode matches!(c, '\u{0400}'..='\u{04FF}') || // Some other known special characters that should be treated as word characters, - // e.g. `a-b`, `var_name`, `I'm`, '@mention`, `#hashtag`, `100%`, `3.1415`, `2^3`, `a~b`, etc. - matches!(c, '-' | '_' | '.' | '\'' | '$' | '%' | '@' | '#' | '^' | '~' | ',' | '!' | ';' | '*') || - // Characters that used in URL, e.g. `https://github.com/zed-industries/zed?a=1&b=2` for better wrapping a long URL. - matches!(c, '/' | ':' | '?' | '&' | '=') || + // e.g. `a-b`, `var_name`, `I'm`, '@mention`, `#hashtag`, `100%`, `3.1415`, + // `2^3`, `a~b`, `a=1`, `Self::new`, etc. + matches!(c, '-' | '_' | '.' | '\'' | '$' | '%' | '@' | '#' | '^' | '~' | ',' | '=' | ':') || // `⋯` character is special used in Zed, to keep this at the end of the line. matches!(c, '⋯') } @@ -225,19 +226,15 @@ impl LineWrapper { fn update_runs_after_truncation(result: &str, ellipsis: &str, runs: &mut Vec) { let mut truncate_at = result.len() - ellipsis.len(); - let mut run_end = None; for (run_index, run) in runs.iter_mut().enumerate() { if run.len <= truncate_at { truncate_at -= run.len; } else { run.len = truncate_at + ellipsis.len(); - run_end = Some(run_index + 1); + runs.truncate(run_index + 1); break; } } - if let Some(run_end) = run_end { - runs.truncate(run_end); - } } /// A fragment of a line that can be wrapped. @@ -648,15 +645,19 @@ mod tests { assert_word("@mention"); assert_word("#hashtag"); assert_word("$variable"); + assert_word("a=1"); + assert_word("Self::is_word_char"); assert_word("more⋯"); // Space assert_not_word("foo bar"); // URL case - assert_word("https://github.com/zed-industries/zed/"); assert_word("github.com"); - assert_word("a=1&b=2"); + assert_not_word("zed-industries/zed"); + assert_not_word("zed-industries\\zed"); + assert_not_word("a=1&b=2"); + assert_not_word("foo?b=2"); // Latin-1 Supplement assert_word("ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏ"); diff --git a/crates/gpui/src/util.rs b/crates/gpui/src/util.rs index badb68008216400464e997f3252e9467edb234c6..92c86810c5e30c4c1bc614788b0f16f4966f3b4c 100644 --- a/crates/gpui/src/util.rs +++ b/crates/gpui/src/util.rs @@ -83,8 +83,11 @@ impl FutureExt for T { } } +#[pin_project::pin_project] pub struct WithTimeout { + #[pin] future: T, + #[pin] timer: Task<()>, } @@ -97,15 +100,11 @@ impl Future for WithTimeout { type Output = Result; fn poll(self: Pin<&mut Self>, cx: &mut task::Context) -> task::Poll { - // SAFETY: the fields of Timeout are private and we never move the future ourselves - // And its already pinned since we are being polled (all futures need to be pinned to be polled) - let this = unsafe { &raw mut *self.get_unchecked_mut() }; - let future = unsafe { Pin::new_unchecked(&mut (*this).future) }; - let timer = unsafe { Pin::new_unchecked(&mut (*this).timer) }; + let this = self.project(); - if let task::Poll::Ready(output) = future.poll(cx) { + if let task::Poll::Ready(output) = this.future.poll(cx) { task::Poll::Ready(Ok(output)) - } else if timer.poll(cx).is_ready() { + } else if this.timer.poll(cx).is_ready() { task::Poll::Ready(Err(Timeout)) } else { task::Poll::Pending diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index caf78fe407ea0a61a88efd9462be5fce005dedbf..0610ea96cb5150cfbad72b2b70b4432df9b76ca2 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -60,6 +60,13 @@ pub use prompts::*; pub(crate) const DEFAULT_WINDOW_SIZE: Size = size(px(1536.), px(864.)); +/// A 6:5 aspect ratio minimum window size to be used for functional, +/// additional-to-main-Zed windows, like the settings and rules library windows. +pub const DEFAULT_ADDITIONAL_WINDOW_SIZE: Size = Size { + width: Pixels(900.), + height: Pixels(750.), +}; + /// Represents the two different phases when dispatching events. #[derive(Default, Copy, Clone, Debug, Eq, PartialEq)] pub enum DispatchPhase { @@ -863,6 +870,7 @@ pub struct Window { hovered: Rc>, pub(crate) needs_present: Rc>, pub(crate) last_input_timestamp: Rc>, + last_input_was_keyboard: bool, pub(crate) refreshing: bool, pub(crate) activation_observers: SubscriberSet<(), AnyObserver>, pub(crate) focus: Option, @@ -1246,6 +1254,7 @@ impl Window { hovered, needs_present, last_input_timestamp, + last_input_was_keyboard: false, refreshing: false, activation_observers: SubscriberSet::new(), focus: None, @@ -1307,9 +1316,7 @@ impl Window { for view_id in self .rendered_frame .dispatch_tree - .view_path(view_id) - .into_iter() - .rev() + .view_path_reversed(view_id) { if !self.dirty_views.insert(view_id) { break; @@ -1839,7 +1846,8 @@ impl Window { f: impl FnOnce(&GlobalElementId, &mut Self) -> R, ) -> R { self.element_id_stack.push(element_id); - let global_id = GlobalElementId(self.element_id_stack.clone()); + let global_id = GlobalElementId(Arc::from(&*self.element_id_stack)); + let result = f(&global_id, self); self.element_id_stack.pop(); result @@ -1899,6 +1907,12 @@ impl Window { self.modifiers } + /// Returns true if the last input event was keyboard-based (key press, tab navigation, etc.) + /// This is used for focus-visible styling to show focus indicators only for keyboard navigation. + pub fn last_input_was_keyboard(&self) -> bool { + self.last_input_was_keyboard + } + /// The current state of the keyboard's capslock pub fn capslock(&self) -> Capslock { self.capslock @@ -2245,7 +2259,7 @@ impl Window { self.rendered_frame.accessed_element_states[range.start.accessed_element_states_index ..range.end.accessed_element_states_index] .iter() - .map(|(id, type_id)| (GlobalElementId(id.0.clone()), *type_id)), + .map(|(id, type_id)| (id.clone(), *type_id)), ); self.text_system .reuse_layouts(range.start.line_layout_index..range.end.line_layout_index); @@ -2313,7 +2327,7 @@ impl Window { self.rendered_frame.accessed_element_states[range.start.accessed_element_states_index ..range.end.accessed_element_states_index] .iter() - .map(|(id, type_id)| (GlobalElementId(id.0.clone()), *type_id)), + .map(|(id, type_id)| (id.clone(), *type_id)), ); self.next_frame.tab_stops.replay( &self.rendered_frame.tab_stops.insertion_history @@ -2635,10 +2649,8 @@ impl Window { { self.invalidator.debug_assert_paint_or_prepaint(); - let key = (GlobalElementId(global_id.0.clone()), TypeId::of::()); - self.next_frame - .accessed_element_states - .push((GlobalElementId(key.0.clone()), TypeId::of::())); + let key = (global_id.clone(), TypeId::of::()); + self.next_frame.accessed_element_states.push(key.clone()); if let Some(any) = self .next_frame @@ -3580,6 +3592,15 @@ impl Window { #[profiling::function] pub fn dispatch_event(&mut self, event: PlatformInput, cx: &mut App) -> DispatchEventResult { self.last_input_timestamp.set(Instant::now()); + + // Track whether this input was keyboard-based for focus-visible styling + self.last_input_was_keyboard = matches!( + event, + PlatformInput::KeyDown(_) + | PlatformInput::KeyUp(_) + | PlatformInput::ModifiersChanged(_) + ); + // Handlers may set this to false by calling `stop_propagation`. cx.propagate_event = true; // Handlers may set this to true by calling `prevent_default`. @@ -4313,14 +4334,14 @@ impl Window { } /// Returns a generic handler that invokes the given handler with the view and context associated with the given view handle. - pub fn handler_for) + 'static>( + pub fn handler_for) + 'static>( &self, - view: &Entity, + entity: &Entity, f: Callback, - ) -> impl Fn(&mut Window, &mut App) + use { - let view = view.downgrade(); + ) -> impl Fn(&mut Window, &mut App) + 'static { + let entity = entity.downgrade(); move |window: &mut Window, cx: &mut App| { - view.update(cx, |view, cx| f(view, window, cx)).ok(); + entity.update(cx, |entity, cx| f(entity, window, cx)).ok(); } } @@ -4650,7 +4671,7 @@ pub struct WindowHandle { #[deref] #[deref_mut] pub(crate) any_handle: AnyWindowHandle, - state_type: PhantomData, + state_type: PhantomData V>, } impl Debug for WindowHandle { @@ -4718,7 +4739,7 @@ impl WindowHandle { .get(self.id) .and_then(|window| { window - .as_ref() + .as_deref() .and_then(|window| window.root.clone()) .map(|root_view| root_view.downcast::()) }) @@ -4786,9 +4807,6 @@ impl From> for AnyWindowHandle { } } -unsafe impl Send for WindowHandle {} -unsafe impl Sync for WindowHandle {} - /// A handle to a window with any root view type, which can be downcast to a window with a specific root view type. #[derive(Copy, Clone, PartialEq, Eq, Hash)] pub struct AnyWindowHandle { @@ -4882,7 +4900,7 @@ pub enum ElementId { /// A code location. CodeLocation(core::panic::Location<'static>), /// A labeled child of an element. - NamedChild(Box, SharedString), + NamedChild(Arc, SharedString), } impl ElementId { @@ -4996,7 +5014,7 @@ impl From<(&'static str, u32)> for ElementId { impl> From<(ElementId, T)> for ElementId { fn from((id, name): (ElementId, T)) -> Self { - ElementId::NamedChild(Box::new(id), name.into()) + ElementId::NamedChild(Arc::new(id), name.into()) } } diff --git a/crates/gpui_macros/Cargo.toml b/crates/gpui_macros/Cargo.toml index 0722d8d229108c999a8f4b0ff45621b1d53587a6..2ee8da52fb7a013cefdd5fe79520a5d18f1e5b3f 100644 --- a/crates/gpui_macros/Cargo.toml +++ b/crates/gpui_macros/Cargo.toml @@ -1,8 +1,8 @@ [package] -name = "gpui-macros" +name = "gpui_macros" version = "0.1.0" edition.workspace = true -publish = true +publish = false license = "Apache-2.0" description = "Macros used by gpui" @@ -22,7 +22,6 @@ heck.workspace = true proc-macro2.workspace = true quote.workspace = true syn.workspace = true -workspace-hack.workspace = true [dev-dependencies] gpui = { workspace = true, features = ["inspector"] } diff --git a/crates/gpui_tokio/Cargo.toml b/crates/gpui_tokio/Cargo.toml index 2d4abf40631a2f011306d7216a5f96864ccdb0da..e9d72b8ec25c1464e622ec1e531298cbd2df8c37 100644 --- a/crates/gpui_tokio/Cargo.toml +++ b/crates/gpui_tokio/Cargo.toml @@ -17,4 +17,3 @@ anyhow.workspace = true util.workspace = true gpui.workspace = true tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } -workspace-hack.workspace = true diff --git a/crates/html_to_markdown/Cargo.toml b/crates/html_to_markdown/Cargo.toml index 16f10d0cbc1343c0ce7bc439fa860b8002e3d94a..70ff3b3555ee3a2e03debe6aaa24f68ddbc4196a 100644 --- a/crates/html_to_markdown/Cargo.toml +++ b/crates/html_to_markdown/Cargo.toml @@ -20,7 +20,6 @@ anyhow.workspace = true html5ever.workspace = true markup5ever_rcdom.workspace = true regex.workspace = true -workspace-hack.workspace = true [dev-dependencies] indoc.workspace = true diff --git a/crates/http_client/Cargo.toml b/crates/http_client/Cargo.toml index 3a4d875f6a99a868d4adfe39bc83bf5ee2007023..f4ce028b1c650ba3c85081d7737c99e9d1434e44 100644 --- a/crates/http_client/Cargo.toml +++ b/crates/http_client/Cargo.toml @@ -1,8 +1,8 @@ [package] -name = "zed-http-client" +name = "http_client" version = "0.1.0" edition.workspace = true -publish = true +publish = false license = "Apache-2.0" description = "A HTTP client library for Zed and GPUI" @@ -35,4 +35,3 @@ sha2.workspace = true tempfile.workspace = true url.workspace = true util.workspace = true -workspace-hack.workspace = true diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index 76bf0b905dbdc827f38aa37a95edc0e3b9e834eb..056cee4e346e34b5689a0dfe3278c880b7297986 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -6,13 +6,12 @@ pub use anyhow::{Result, anyhow}; pub use async_body::{AsyncBody, Inner}; use derive_more::Deref; use http::HeaderValue; -pub use http::{self, Method, Request, Response, StatusCode, Uri}; +pub use http::{self, Method, Request, Response, StatusCode, Uri, request::Builder}; use futures::{ FutureExt as _, future::{self, BoxFuture}, }; -use http::request::Builder; use parking_lot::Mutex; #[cfg(feature = "test-support")] use std::fmt; diff --git a/crates/http_client_tls/Cargo.toml b/crates/http_client_tls/Cargo.toml index d0b45d70346de1b0ff5e3a0f5a62d643622778ba..a55268ac314ebe4a45d2aaa53c6281f8ebac6aa2 100644 --- a/crates/http_client_tls/Cargo.toml +++ b/crates/http_client_tls/Cargo.toml @@ -18,4 +18,3 @@ doctest = true [dependencies] rustls.workspace = true rustls-platform-verifier.workspace = true -workspace-hack.workspace = true diff --git a/crates/icons/Cargo.toml b/crates/icons/Cargo.toml index c2574014eabef20017fae91cfc0d35bbfeb38ee8..fc00165843a84d7948c7bbcc1b83a9d7c43b67a1 100644 --- a/crates/icons/Cargo.toml +++ b/crates/icons/Cargo.toml @@ -14,4 +14,3 @@ path = "src/icons.rs" [dependencies] serde.workspace = true strum.workspace = true -workspace-hack.workspace = true diff --git a/crates/image_viewer/Cargo.toml b/crates/image_viewer/Cargo.toml index 1afa2c5f9dd90956b93c2e9dfac3537c7253a610..92386e8ba8a38f79711ee50343a6e7cf4a393cbd 100644 --- a/crates/image_viewer/Cargo.toml +++ b/crates/image_viewer/Cargo.toml @@ -30,7 +30,6 @@ theme.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true -workspace-hack.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } diff --git a/crates/image_viewer/src/image_info.rs b/crates/image_viewer/src/image_info.rs index 70a92736aa3d8715a3974ddc5709743e001d9fe8..6e8956abc67868457f071e04f3c2a1957ff6c19c 100644 --- a/crates/image_viewer/src/image_info.rs +++ b/crates/image_viewer/src/image_info.rs @@ -47,7 +47,7 @@ impl Render for ImageInfo { let settings = ImageViewerSettings::get_global(cx); let Some(metadata) = self.metadata.as_ref() else { - return div(); + return div().hidden(); }; let mut components = Vec::new(); diff --git a/crates/image_viewer/src/image_viewer.rs b/crates/image_viewer/src/image_viewer.rs index 8a4f2ebfe237c66d33e5d97a461b34552d0e5974..6162d77241c5b5e85d3f41a3cbc9bdaba6766d65 100644 --- a/crates/image_viewer/src/image_viewer.rs +++ b/crates/image_viewer/src/image_viewer.rs @@ -1,6 +1,8 @@ mod image_info; mod image_viewer_settings; +use std::path::Path; + use anyhow::Context as _; use editor::{EditorSettings, items::entry_git_aware_label_color}; use file_icons::FileIcons; @@ -13,11 +15,12 @@ use language::{DiskState, File as _}; use persistence::IMAGE_VIEWER; use project::{ImageItem, Project, ProjectPath, image_store::ImageItemEvent}; use settings::Settings; -use theme::Theme; +use theme::{Theme, ThemeSettings}; use ui::prelude::*; use util::paths::PathExt; use workspace::{ ItemId, ItemSettings, Pane, ToolbarItemLocation, Workspace, WorkspaceId, delete_unloaded_items, + invalid_item_view::InvalidItemView, item::{BreadcrumbText, Item, ProjectItem, SerializableItem, TabContentParams}, }; @@ -162,10 +165,12 @@ impl Item for ImageView { fn breadcrumbs(&self, _theme: &Theme, cx: &App) -> Option> { let text = breadcrumbs_text_for_image(self.project.read(cx), self.image_item.read(cx), cx); + let settings = ThemeSettings::get_global(cx); + Some(vec![BreadcrumbText { text, highlights: None, - font: None, + font: Some(settings.buffer_font.clone()), }]) } @@ -188,6 +193,9 @@ impl Item for ImageView { fn has_deleted_file(&self, cx: &App) -> bool { self.image_item.read(cx).file.disk_state() == DiskState::Deleted } + fn buffer_kind(&self, _: &App) -> workspace::item::ItemBufferKind { + workspace::item::ItemBufferKind::Singleton + } } fn breadcrumbs_text_for_image(project: &Project, image: &ImageItem, cx: &App) -> String { @@ -293,72 +301,79 @@ impl Focusable for ImageView { impl Render for ImageView { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { let image = self.image_item.read(cx).image.clone(); - let checkered_background = |bounds: Bounds, - _, - window: &mut Window, - _cx: &mut App| { - let square_size = 32.0; - - let start_y = bounds.origin.y.into(); - let height: f32 = bounds.size.height.into(); - let start_x = bounds.origin.x.into(); - let width: f32 = bounds.size.width.into(); - - let mut y = start_y; - let mut x = start_x; - let mut color_swapper = true; - // draw checkerboard pattern - while y <= start_y + height { - // Keeping track of the grid in order to be resilient to resizing - let start_swap = color_swapper; - while x <= start_x + width { - let rect = - Bounds::new(point(px(x), px(y)), size(px(square_size), px(square_size))); - - let color = if color_swapper { - opaque_grey(0.6, 0.4) - } else { - opaque_grey(0.7, 0.4) - }; - - window.paint_quad(fill(rect, color)); - color_swapper = !color_swapper; - x += square_size; + let checkered_background = + |bounds: Bounds, _, window: &mut Window, _cx: &mut App| { + let square_size: f32 = 32.0; + + let start_y = bounds.origin.y.into(); + let height: f32 = bounds.size.height.into(); + let start_x = bounds.origin.x.into(); + let width: f32 = bounds.size.width.into(); + + let mut y = start_y; + let mut x = start_x; + let mut color_swapper = true; + // draw checkerboard pattern + while y < start_y + height { + // Keeping track of the grid in order to be resilient to resizing + let start_swap = color_swapper; + while x < start_x + width { + // Clamp square dimensions to not exceed bounds + let square_width = square_size.min(start_x + width - x); + let square_height = square_size.min(start_y + height - y); + + let rect = Bounds::new( + point(px(x), px(y)), + size(px(square_width), px(square_height)), + ); + + let color = if color_swapper { + opaque_grey(0.6, 0.4) + } else { + opaque_grey(0.7, 0.4) + }; + + window.paint_quad(fill(rect, color)); + color_swapper = !color_swapper; + x += square_size; + } + x = start_x; + color_swapper = !start_swap; + y += square_size; } - x = start_x; - color_swapper = !start_swap; - y += square_size; - } - }; + }; - let checkered_background = canvas(|_, _, _| (), checkered_background) - .border_2() - .border_color(cx.theme().styles.colors.border) - .size_full() - .absolute() - .top_0() - .left_0(); - - div() - .track_focus(&self.focus_handle(cx)) - .size_full() - .child(checkered_background) - .child( - div() - .flex() - .justify_center() - .items_center() - .w_full() - // TODO: In browser based Tailwind & Flex this would be h-screen and we'd use w-full - .h_full() - .child( - img(image) - .object_fit(ObjectFit::ScaleDown) - .max_w_full() - .max_h_full() - .id("img"), - ), - ) + div().track_focus(&self.focus_handle(cx)).size_full().child( + div() + .flex() + .justify_center() + .items_center() + .w_full() + // TODO: In browser based Tailwind & Flex this would be h-screen and we'd use w-full + .h_full() + .child( + div() + .relative() + .max_w_full() + .max_h_full() + .child( + canvas(|_, _, _| (), checkered_background) + .border_2() + .border_color(cx.theme().styles.colors.border) + .size_full() + .absolute() + .top_0() + .left_0(), + ) + .child( + img(image) + .object_fit(ObjectFit::ScaleDown) + .max_w_full() + .max_h_full() + .id("img"), + ), + ), + ) } } @@ -377,6 +392,19 @@ impl ProjectItem for ImageView { { Self::new(item, project, window, cx) } + + fn for_broken_project_item( + abs_path: &Path, + is_local: bool, + e: &anyhow::Error, + window: &mut Window, + cx: &mut App, + ) -> Option + where + Self: Sized, + { + Some(InvalidItemView::new(abs_path, is_local, e, window, cx)) + } } pub fn init(cx: &mut App) { diff --git a/crates/inspector_ui/Cargo.toml b/crates/inspector_ui/Cargo.toml index 9272e5e72be941adc610ec343583a5e04448394f..aaf40b2f8d11aa324f2f76e71988ada87415237b 100644 --- a/crates/inspector_ui/Cargo.toml +++ b/crates/inspector_ui/Cargo.toml @@ -26,6 +26,5 @@ title_bar.workspace = true ui.workspace = true util.workspace = true util_macros.workspace = true -workspace-hack.workspace = true workspace.workspace = true zed_actions.workspace = true diff --git a/crates/install_cli/Cargo.toml b/crates/install_cli/Cargo.toml index 4679f9e54fc8139b6d91a32f897e5b5c1802aa04..1eede025e50a236523b35137a56c02887436c257 100644 --- a/crates/install_cli/Cargo.toml +++ b/crates/install_cli/Cargo.toml @@ -21,5 +21,4 @@ gpui.workspace = true release_channel.workspace = true smol.workspace = true util.workspace = true -workspace-hack.workspace = true workspace.workspace = true diff --git a/crates/journal/Cargo.toml b/crates/journal/Cargo.toml index 1b32c9cdbb7a20a13e39a2d61554e4dd7018d81b..a78a2cc3b2ef465c38367255019e0bda104b5ef2 100644 --- a/crates/journal/Cargo.toml +++ b/crates/journal/Cargo.toml @@ -22,7 +22,6 @@ serde.workspace = true settings.workspace = true shellexpand.workspace = true workspace.workspace = true -workspace-hack.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } diff --git a/crates/json_schema_store/Cargo.toml b/crates/json_schema_store/Cargo.toml index 05c8cbfd9d5a83011f51b6a38f0f776e535c732f..efb1b36e7978805ec9c5a07baf9339f66a9d2f9f 100644 --- a/crates/json_schema_store/Cargo.toml +++ b/crates/json_schema_store/Cargo.toml @@ -30,7 +30,6 @@ snippet_provider.workspace = true task.workspace = true theme.workspace = true util.workspace = true -workspace-hack.workspace = true diff --git a/crates/keymap_editor/Cargo.toml b/crates/keymap_editor/Cargo.toml index ccd42dfa01a081efe940e6f309d87d61aa9472c7..b6086566c3be01b60527d497b836fc53d101e467 100644 --- a/crates/keymap_editor/Cargo.toml +++ b/crates/keymap_editor/Cargo.toml @@ -42,7 +42,6 @@ ui_input.workspace = true ui.workspace = true util.workspace = true vim.workspace = true -workspace-hack.workspace = true workspace.workspace = true zed_actions.workspace = true diff --git a/crates/keymap_editor/src/keymap_editor.rs b/crates/keymap_editor/src/keymap_editor.rs index 76c14ccfe48dbd85b6b02249ac4a26a73c129f4f..8e50a7303fb98febb492eb3f8b4aed4d928a879e 100644 --- a/crates/keymap_editor/src/keymap_editor.rs +++ b/crates/keymap_editor/src/keymap_editor.rs @@ -1,6 +1,7 @@ use std::{ cmp::{self}, ops::{Not as _, Range}, + rc::Rc, sync::Arc, time::Duration, }; @@ -23,14 +24,16 @@ use gpui::{ use language::{Language, LanguageConfig, ToOffset as _}; use notifications::status_toast::{StatusToast, ToastIcon}; use project::{CompletionDisplayOptions, Project}; -use settings::{BaseKeymap, KeybindSource, KeymapFile, Settings as _, SettingsAssets}; +use settings::{ + BaseKeymap, KeybindSource, KeymapFile, Settings as _, SettingsAssets, infer_json_indent_size, +}; use ui::{ ActiveTheme as _, App, Banner, BorrowAppContext, ContextMenu, IconButtonShape, Indicator, Modal, ModalFooter, ModalHeader, ParentElement as _, PopoverMenu, Render, Section, SharedString, Styled as _, Table, TableColumnWidths, TableInteractionState, TableResizeBehavior, Tooltip, Window, prelude::*, }; -use ui_input::SingleLineInput; +use ui_input::InputField; use util::ResultExt; use workspace::{ Item, ModalView, SerializableItem, Workspace, notifications::NotifyTaskExt as _, @@ -171,7 +174,7 @@ impl FilterState { #[derive(Debug, Default, PartialEq, Eq, Clone, Hash)] struct ActionMapping { - keystrokes: Vec, + keystrokes: Rc<[KeybindingKeystroke]>, context: Option, } @@ -233,7 +236,7 @@ struct ConflictState { } type ConflictKeybindMapping = HashMap< - Vec, + Rc<[KeybindingKeystroke]>, Vec<( Option, Vec, @@ -255,7 +258,7 @@ impl ConflictState { .context .and_then(|ctx| gpui::KeyBindingContextPredicate::parse(&ctx).ok()); let entry = action_keybind_mapping - .entry(mapping.keystrokes) + .entry(mapping.keystrokes.clone()) .or_default(); let origin = ConflictOrigin::new(binding.source, index); if let Some((_, origins)) = @@ -683,8 +686,7 @@ impl KeymapEditor { .unwrap_or(KeybindSource::Unknown); let keystroke_text = ui::text_for_keybinding_keystrokes(key_binding.keystrokes(), cx); - let ui_key_binding = ui::KeyBinding::new_from_gpui(key_binding.clone(), cx) - .vim_mode(source == KeybindSource::Vim); + let binding = KeyBinding::new(key_binding, source); let context = key_binding .predicate() @@ -715,7 +717,7 @@ impl KeymapEditor { StringMatchCandidate::new(index, &action_information.humanized_name); processed_bindings.push(ProcessedBinding::new_mapped( keystroke_text, - ui_key_binding, + binding, context, source, action_information, @@ -973,12 +975,11 @@ impl KeymapEditor { if conflict.is_user_keybind_conflict() { base_button_style(index, IconName::Warning) .icon_color(Color::Warning) - .tooltip(|window, cx| { + .tooltip(|_window, cx| { Tooltip::with_meta( "View conflicts", Some(&ToggleConflictFilter), "Use alt+click to show all conflicts", - window, cx, ) }) @@ -993,12 +994,11 @@ impl KeymapEditor { })) } else if self.search_mode.exact_match() { base_button_style(index, IconName::Info) - .tooltip(|window, cx| { + .tooltip(|_window, cx| { Tooltip::with_meta( "Edit this binding", Some(&ShowMatchingKeybinds), "This binding is overridden by other bindings.", - window, cx, ) }) @@ -1009,12 +1009,11 @@ impl KeymapEditor { })) } else { base_button_style(index, IconName::Info) - .tooltip(|window, cx| { + .tooltip(|_window, cx| { Tooltip::with_meta( "Show matching keybinds", Some(&ShowMatchingKeybinds), "This binding is overridden by other bindings.\nUse alt+click to edit this binding", - window, cx, ) }) @@ -1198,13 +1197,12 @@ impl KeymapEditor { else { return; }; - let tab_size = cx.global::().json_tab_size(); self.previous_edit = Some(PreviousEdit::ScrollBarOffset( self.table_interaction_state.read(cx).scroll_offset(), )); let keyboard_mapper = cx.keyboard_mapper().clone(); cx.spawn(async move |_, _| { - remove_keybinding(to_remove, &fs, tab_size, keyboard_mapper.as_ref()).await + remove_keybinding(to_remove, &fs, keyboard_mapper.as_ref()).await }) .detach_and_notify_err(window, cx); } @@ -1347,10 +1345,25 @@ impl HumanizedActionNameCache { } } +#[derive(Clone)] +struct KeyBinding { + keystrokes: Rc<[KeybindingKeystroke]>, + source: KeybindSource, +} + +impl KeyBinding { + fn new(binding: &gpui::KeyBinding, source: KeybindSource) -> Self { + Self { + keystrokes: Rc::from(binding.keystrokes()), + source, + } + } +} + #[derive(Clone)] struct KeybindInformation { keystroke_text: SharedString, - ui_binding: ui::KeyBinding, + binding: KeyBinding, context: KeybindContextString, source: KeybindSource, } @@ -1358,7 +1371,7 @@ struct KeybindInformation { impl KeybindInformation { fn get_action_mapping(&self) -> ActionMapping { ActionMapping { - keystrokes: self.ui_binding.keystrokes.clone(), + keystrokes: self.binding.keystrokes.clone(), context: self.context.local().cloned(), } } @@ -1400,7 +1413,7 @@ enum ProcessedBinding { impl ProcessedBinding { fn new_mapped( keystroke_text: impl Into, - ui_key_binding: ui::KeyBinding, + binding: KeyBinding, context: KeybindContextString, source: KeybindSource, action_information: ActionInformation, @@ -1408,7 +1421,7 @@ impl ProcessedBinding { Self::Mapped( KeybindInformation { keystroke_text: keystroke_text.into(), - ui_binding: ui_key_binding, + binding, context, source, }, @@ -1426,8 +1439,8 @@ impl ProcessedBinding { } fn keystrokes(&self) -> Option<&[KeybindingKeystroke]> { - self.ui_key_binding() - .map(|binding| binding.keystrokes.as_slice()) + self.key_binding() + .map(|binding| binding.keystrokes.as_ref()) } fn keybind_information(&self) -> Option<&KeybindInformation> { @@ -1445,9 +1458,8 @@ impl ProcessedBinding { self.keybind_information().map(|keybind| &keybind.context) } - fn ui_key_binding(&self) -> Option<&ui::KeyBinding> { - self.keybind_information() - .map(|keybind| &keybind.ui_binding) + fn key_binding(&self) -> Option<&KeyBinding> { + self.keybind_information().map(|keybind| &keybind.binding) } fn keystroke_text(&self) -> Option<&SharedString> { @@ -1598,12 +1610,11 @@ impl Render for KeymapEditor { .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Search by Keystroke", &ToggleKeystrokeSearch, &focus_handle.clone(), - window, cx, ) } @@ -1635,7 +1646,7 @@ impl Render for KeymapEditor { let filter_state = self.filter_state; let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( match filter_state { FilterState::All => "Show Conflicts", @@ -1645,7 +1656,6 @@ impl Render for KeymapEditor { }, &ToggleConflictFilter, &focus_handle.clone(), - window, cx, ) } @@ -1697,12 +1707,11 @@ impl Render for KeymapEditor { .icon_size(IconSize::Small), { let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "View Default...", &zed_actions::OpenKeymapFile, &focus_handle, - window, cx, ) } @@ -1744,12 +1753,11 @@ impl Render for KeymapEditor { let keystroke_focus_handle = self.keystroke_editor.read(cx).focus_handle(cx); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Toggle Exact Match Mode", &ToggleExactKeystrokeMatching, &keystroke_focus_handle, - window, cx, ) } @@ -1855,13 +1863,13 @@ impl Render for KeymapEditor { ) .into_any_element(); - let keystrokes = binding.ui_key_binding().cloned().map_or( + let keystrokes = binding.key_binding().map_or( binding .keystroke_text() .cloned() .unwrap_or_default() .into_any_element(), - IntoElement::into_any_element, + |binding| ui::KeyBinding::from_keystrokes(binding.keystrokes.clone(), binding.source).into_any_element() ); let action_arguments = match binding.action().arguments.clone() @@ -2113,7 +2121,7 @@ struct KeybindingEditorModal { editing_keybind: ProcessedBinding, editing_keybind_idx: usize, keybind_editor: Entity, - context_editor: Entity, + context_editor: Entity, action_arguments_editor: Option>, fs: Arc, error: Option, @@ -2147,8 +2155,8 @@ impl KeybindingEditorModal { let keybind_editor = cx .new(|cx| KeystrokeInput::new(editing_keybind.keystrokes().map(Vec::from), window, cx)); - let context_editor: Entity = cx.new(|cx| { - let input = SingleLineInput::new(window, cx, "Keybinding Context") + let context_editor: Entity = cx.new(|cx| { + let input = InputField::new(window, cx, "Keybinding Context") .label("Edit Context") .label_size(LabelSize::Default); @@ -2288,7 +2296,6 @@ impl KeybindingEditorModal { fn save(&mut self, cx: &mut Context) -> Result<(), InputError> { let existing_keybind = self.editing_keybind.clone(); let fs = self.fs.clone(); - let tab_size = cx.global::().json_tab_size(); let mut new_keystrokes = self.validate_keystrokes(cx).map_err(InputError::error)?; new_keystrokes @@ -2301,7 +2308,7 @@ impl KeybindingEditorModal { .map_err(InputError::error)?; let action_mapping = ActionMapping { - keystrokes: new_keystrokes, + keystrokes: Rc::from(new_keystrokes.as_slice()), context: new_context.map(SharedString::from), }; @@ -2367,7 +2374,6 @@ impl KeybindingEditorModal { &action_mapping, new_action_args.as_deref(), &fs, - tab_size, keyboard_mapper.as_ref(), ) .await @@ -3019,13 +3025,14 @@ async fn save_keybinding_update( action_mapping: &ActionMapping, new_args: Option<&str>, fs: &Arc, - tab_size: usize, keyboard_mapper: &dyn PlatformKeyboardMapper, ) -> anyhow::Result<()> { let keymap_contents = settings::KeymapFile::load_keymap_file(fs) .await .context("Failed to load keymap file")?; + let tab_size = infer_json_indent_size(&keymap_contents); + let existing_keystrokes = existing.keystrokes().unwrap_or_default(); let existing_context = existing.context().and_then(KeybindContextString::local_str); let existing_args = existing @@ -3089,7 +3096,6 @@ async fn save_keybinding_update( async fn remove_keybinding( existing: ProcessedBinding, fs: &Arc, - tab_size: usize, keyboard_mapper: &dyn PlatformKeyboardMapper, ) -> anyhow::Result<()> { let Some(keystrokes) = existing.keystrokes() else { @@ -3098,6 +3104,7 @@ async fn remove_keybinding( let keymap_contents = settings::KeymapFile::load_keymap_file(fs) .await .context("Failed to load keymap file")?; + let tab_size = infer_json_indent_size(&keymap_contents); let operation = settings::KeybindUpdateOperation::Remove { target: settings::KeybindUpdateTarget { diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index 4a4f51a58be7e4ffc61617dd74c2ed0ad7b49d34..bbbf9e31a5b39069e93a5f52f18df16bbc9f9671 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -67,7 +67,7 @@ tree-sitter.workspace = true unicase = "2.6" util.workspace = true watch.workspace = true -workspace-hack.workspace = true +zlog.workspace = true diffy = "0.4.2" [dev-dependencies] diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 1605eea051b660f7285481223b0b3b9f97aef732..41c0e3eec8e8f4daaf5dff706dceea4159fedae1 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -18,8 +18,8 @@ pub use crate::{ proto, }; use anyhow::{Context as _, Result}; +use clock::Lamport; pub use clock::ReplicaId; -use clock::{AGENT_REPLICA_ID, Lamport}; use collections::HashMap; use fs::MTime; use futures::channel::oneshot; @@ -506,15 +506,15 @@ pub struct Chunk<'a> { pub highlight_style: Option, /// The severity of diagnostic associated with this chunk, if any. pub diagnostic_severity: Option, - /// Whether this chunk of text is marked as unnecessary. - pub is_unnecessary: bool, - /// Whether this chunk of text was originally a tab character. - pub is_tab: bool, /// A bitset of which characters are tabs in this string. pub tabs: u128, /// Bitmap of character indices in this chunk pub chars: u128, + /// Whether this chunk of text is marked as unnecessary. + pub is_unnecessary: bool, /// Whether this chunk of text was originally a tab character. + pub is_tab: bool, + /// Whether this chunk of text was originally an inlay. pub is_inlay: bool, /// Whether to underline the corresponding text range in the editor. pub underline: bool, @@ -828,7 +828,11 @@ impl Buffer { /// Create a new buffer with the given base text. pub fn local>(base_text: T, cx: &Context) -> Self { Self::build( - TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()), + TextBuffer::new( + ReplicaId::LOCAL, + cx.entity_id().as_non_zero_u64().into(), + base_text.into(), + ), None, Capability::ReadWrite, ) @@ -842,7 +846,7 @@ impl Buffer { ) -> Self { Self::build( TextBuffer::new_normalized( - 0, + ReplicaId::LOCAL, cx.entity_id().as_non_zero_u64().into(), line_ending, base_text_normalized, @@ -991,10 +995,10 @@ impl Buffer { language: None, remote_selections: Default::default(), diagnostics: Default::default(), - diagnostics_timestamp: Default::default(), + diagnostics_timestamp: Lamport::MIN, completion_triggers: Default::default(), completion_triggers_per_language_server: Default::default(), - completion_triggers_timestamp: Default::default(), + completion_triggers_timestamp: Lamport::MIN, deferred_ops: OperationQueue::new(), has_conflict: false, change_bits: Default::default(), @@ -1012,7 +1016,8 @@ impl Buffer { let buffer_id = entity_id.as_non_zero_u64().into(); async move { let text = - TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot(); + TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text) + .snapshot(); let mut syntax = SyntaxMap::new(&text).snapshot(); if let Some(language) = language.clone() { let language_registry = language_registry.clone(); @@ -1033,8 +1038,13 @@ impl Buffer { pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot { let entity_id = cx.reserve_entity::().entity_id(); let buffer_id = entity_id.as_non_zero_u64().into(); - let text = - TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot(); + let text = TextBuffer::new_normalized( + ReplicaId::LOCAL, + buffer_id, + Default::default(), + Rope::new(), + ) + .snapshot(); let syntax = SyntaxMap::new(&text).snapshot(); BufferSnapshot { text, @@ -1056,7 +1066,9 @@ impl Buffer { ) -> BufferSnapshot { let entity_id = cx.reserve_entity::().entity_id(); let buffer_id = entity_id.as_non_zero_u64().into(); - let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot(); + let text = + TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text) + .snapshot(); let mut syntax = SyntaxMap::new(&text).snapshot(); if let Some(language) = language.clone() { syntax.reparse(&text, language_registry, language); @@ -1996,7 +2008,7 @@ impl Buffer { self.end_transaction(cx) } - fn has_unsaved_edits(&self) -> bool { + pub fn has_unsaved_edits(&self) -> bool { let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take(); if last_version == self.version { @@ -2066,12 +2078,15 @@ impl Buffer { } } + /// Set the change bit for all "listeners". fn was_changed(&mut self) { self.change_bits.retain(|change_bit| { - change_bit.upgrade().is_some_and(|bit| { - bit.replace(true); - true - }) + change_bit + .upgrade() + .inspect(|bit| { + _ = bit.replace(true); + }) + .is_some() }); } @@ -2260,7 +2275,7 @@ impl Buffer { ) { let lamport_timestamp = self.text.lamport_clock.tick(); self.remote_selections.insert( - AGENT_REPLICA_ID, + ReplicaId::AGENT, SelectionSet { selections, lamport_timestamp, @@ -2917,7 +2932,7 @@ impl Buffer { edits.push((range, new_text)); } - log::info!("mutating buffer {} with {:?}", self.replica_id(), edits); + log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits); self.edit(edits, None, cx); } @@ -4970,7 +4985,7 @@ impl<'a> Iterator for BufferChunks<'a> { text: chunk, chars: chars_map, tabs, - }) = self.chunks.peek_tabs() + }) = self.chunks.peek_with_bitmaps() { let chunk_start = self.range.start; let mut chunk_end = (self.chunks.offset() + chunk.len()) @@ -4983,18 +4998,14 @@ impl<'a> Iterator for BufferChunks<'a> { chunk_end = chunk_end.min(*parent_capture_end); highlight_id = Some(*parent_highlight_id); } - - let slice = - &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()]; + let bit_start = chunk_start - self.chunks.offset(); let bit_end = chunk_end - self.chunks.offset(); - let mask = if bit_end >= 128 { - u128::MAX - } else { - (1u128 << bit_end) - 1 - }; - let tabs = (tabs >> (chunk_start - self.chunks.offset())) & mask; - let chars_map = (chars_map >> (chunk_start - self.chunks.offset())) & mask; + let slice = &chunk[bit_start..bit_end]; + + let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1); + let tabs = (tabs >> bit_start) & mask; + let chars = (chars_map >> bit_start) & mask; self.range.start = chunk_end; if self.range.start == self.chunks.offset() + chunk.len() { @@ -5008,7 +5019,7 @@ impl<'a> Iterator for BufferChunks<'a> { diagnostic_severity: self.current_diagnostic_severity(), is_unnecessary: self.current_code_is_unnecessary(), tabs, - chars: chars_map, + chars, ..Chunk::default() }) } else { diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 6c87ec5b5183bd5b37e9dd52d53c8fa0f8f28db1..f824639ad762191f4168586551af51fb4e37c8dc 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -70,7 +70,13 @@ fn test_line_endings(cx: &mut gpui::App) { fn test_set_line_ending(cx: &mut TestAppContext) { let base = cx.new(|cx| Buffer::local("one\ntwo\nthree\n", cx)); let base_replica = cx.new(|cx| { - Buffer::from_proto(1, Capability::ReadWrite, base.read(cx).to_proto(cx), None).unwrap() + Buffer::from_proto( + ReplicaId::new(1), + Capability::ReadWrite, + base.read(cx).to_proto(cx), + None, + ) + .unwrap() }); base.update(cx, |_buffer, cx| { cx.subscribe(&base_replica, |this, _, event, cx| { @@ -269,7 +275,7 @@ async fn test_first_line_pattern(cx: &mut TestAppContext) { async fn test_language_for_file_with_custom_file_types(cx: &mut TestAppContext) { cx.update(|cx| { init_settings(cx, |settings| { - settings.file_types.extend([ + settings.file_types.get_or_insert_default().extend([ ("TypeScript".into(), vec!["js".into()].into()), ( "JavaScript".into(), @@ -397,7 +403,7 @@ fn test_edit_events(cx: &mut gpui::App) { let buffer2 = cx.new(|cx| { Buffer::remote( BufferId::from(cx.entity_id().as_non_zero_u64()), - 1, + ReplicaId::new(1), Capability::ReadWrite, "abcdef", ) @@ -2775,7 +2781,8 @@ fn test_serialization(cx: &mut gpui::App) { .background_executor() .block(buffer1.read(cx).serialize_ops(None, cx)); let buffer2 = cx.new(|cx| { - let mut buffer = Buffer::from_proto(1, Capability::ReadWrite, state, None).unwrap(); + let mut buffer = + Buffer::from_proto(ReplicaId::new(1), Capability::ReadWrite, state, None).unwrap(); buffer.apply_ops( ops.into_iter() .map(|op| proto::deserialize_operation(op).unwrap()), @@ -2794,7 +2801,13 @@ fn test_branch_and_merge(cx: &mut TestAppContext) { // Create a remote replica of the base buffer. let base_replica = cx.new(|cx| { - Buffer::from_proto(1, Capability::ReadWrite, base.read(cx).to_proto(cx), None).unwrap() + Buffer::from_proto( + ReplicaId::new(1), + Capability::ReadWrite, + base.read(cx).to_proto(cx), + None, + ) + .unwrap() }); base.update(cx, |_buffer, cx| { cx.subscribe(&base_replica, |this, _, event, cx| { @@ -3108,7 +3121,8 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { .background_executor() .block(base_buffer.read(cx).serialize_ops(None, cx)); let mut buffer = - Buffer::from_proto(i as ReplicaId, Capability::ReadWrite, state, None).unwrap(); + Buffer::from_proto(ReplicaId::new(i as u16), Capability::ReadWrite, state, None) + .unwrap(); buffer.apply_ops( ops.into_iter() .map(|op| proto::deserialize_operation(op).unwrap()), @@ -3133,9 +3147,9 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { }); buffers.push(buffer); - replica_ids.push(i as ReplicaId); - network.lock().add_peer(i as ReplicaId); - log::info!("Adding initial peer with replica id {}", i); + replica_ids.push(ReplicaId::new(i as u16)); + network.lock().add_peer(ReplicaId::new(i as u16)); + log::info!("Adding initial peer with replica id {:?}", replica_ids[i]); } log::info!("initial text: {:?}", base_text); @@ -3155,14 +3169,14 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { buffer.start_transaction_at(now); buffer.randomly_edit(&mut rng, 5, cx); buffer.end_transaction_at(now, cx); - log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text()); + log::info!("buffer {:?} text: {:?}", buffer.replica_id(), buffer.text()); }); mutation_count -= 1; } 30..=39 if mutation_count != 0 => { buffer.update(cx, |buffer, cx| { if rng.random_bool(0.2) { - log::info!("peer {} clearing active selections", replica_id); + log::info!("peer {:?} clearing active selections", replica_id); active_selections.remove(&replica_id); buffer.remove_active_selections(cx); } else { @@ -3179,7 +3193,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { } let selections: Arc<[Selection]> = selections.into(); log::info!( - "peer {} setting active selections: {:?}", + "peer {:?} setting active selections: {:?}", replica_id, selections ); @@ -3189,7 +3203,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { }); mutation_count -= 1; } - 40..=49 if mutation_count != 0 && replica_id == 0 => { + 40..=49 if mutation_count != 0 && replica_id == ReplicaId::REMOTE_SERVER => { let entry_count = rng.random_range(1..=5); buffer.update(cx, |buffer, cx| { let diagnostics = DiagnosticSet::new( @@ -3207,7 +3221,11 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { }), buffer, ); - log::info!("peer {} setting diagnostics: {:?}", replica_id, diagnostics); + log::info!( + "peer {:?} setting diagnostics: {:?}", + replica_id, + diagnostics + ); buffer.update_diagnostics(LanguageServerId(0), diagnostics, cx); }); mutation_count -= 1; @@ -3217,12 +3235,13 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { let old_buffer_ops = cx .background_executor() .block(buffer.read(cx).serialize_ops(None, cx)); - let new_replica_id = (0..=replica_ids.len() as ReplicaId) + let new_replica_id = (0..=replica_ids.len() as u16) + .map(ReplicaId::new) .filter(|replica_id| *replica_id != buffer.read(cx).replica_id()) .choose(&mut rng) .unwrap(); log::info!( - "Adding new replica {} (replicating from {})", + "Adding new replica {:?} (replicating from {:?})", new_replica_id, replica_id ); @@ -3241,7 +3260,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { cx, ); log::info!( - "New replica {} text: {:?}", + "New replica {:?} text: {:?}", new_buffer.replica_id(), new_buffer.text() ); @@ -3264,7 +3283,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { })); network.lock().replicate(replica_id, new_replica_id); - if new_replica_id as usize == replica_ids.len() { + if new_replica_id.as_u16() as usize == replica_ids.len() { replica_ids.push(new_replica_id); } else { let new_buffer = new_buffer.take().unwrap(); @@ -3276,7 +3295,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { .map(|op| proto::deserialize_operation(op).unwrap()); if ops.len() > 0 { log::info!( - "peer {} (version: {:?}) applying {} ops from the network. {:?}", + "peer {:?} (version: {:?}) applying {} ops from the network. {:?}", new_replica_id, buffer.read(cx).version(), ops.len(), @@ -3287,13 +3306,13 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { }); } } - buffers[new_replica_id as usize] = new_buffer; + buffers[new_replica_id.as_u16() as usize] = new_buffer; } } 60..=69 if mutation_count != 0 => { buffer.update(cx, |buffer, cx| { buffer.randomly_undo_redo(&mut rng, cx); - log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text()); + log::info!("buffer {:?} text: {:?}", buffer.replica_id(), buffer.text()); }); mutation_count -= 1; } @@ -3305,7 +3324,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { .map(|op| proto::deserialize_operation(op).unwrap()); if ops.len() > 0 { log::info!( - "peer {} (version: {:?}) applying {} ops from the network. {:?}", + "peer {:?} (version: {:?}) applying {} ops from the network. {:?}", replica_id, buffer.read(cx).version(), ops.len(), @@ -3335,13 +3354,13 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { assert_eq!( buffer.version(), first_buffer.version(), - "Replica {} version != Replica 0 version", + "Replica {:?} version != Replica 0 version", buffer.replica_id() ); assert_eq!( buffer.text(), first_buffer.text(), - "Replica {} text != Replica 0 text", + "Replica {:?} text != Replica 0 text", buffer.replica_id() ); assert_eq!( @@ -3351,7 +3370,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { first_buffer .diagnostics_in_range::<_, usize>(0..first_buffer.len(), false) .collect::>(), - "Replica {} diagnostics != Replica 0 diagnostics", + "Replica {:?} diagnostics != Replica 0 diagnostics", buffer.replica_id() ); } @@ -3370,7 +3389,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { assert_eq!( actual_remote_selections, expected_remote_selections, - "Replica {} remote selections != expected selections", + "Replica {:?} remote selections != expected selections", buffer.replica_id() ); } diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index c16e90bd0f6c02fe49e2845ab24f8d767b32d82b..e3fb6733dd5176906f0a9a9d208305d67470ba15 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -670,6 +670,16 @@ pub struct CodeLabel { pub filter_range: Range, } +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct CodeLabelBuilder { + /// The text to display. + text: String, + /// Syntax highlighting runs. + runs: Vec<(Range, HighlightId)>, + /// The portion of the text that should be used in fuzzy filtering. + filter_range: Range, +} + #[derive(Clone, Deserialize, JsonSchema)] pub struct LanguageConfig { /// Human-readable name of the language. @@ -2223,6 +2233,34 @@ impl Grammar { } } +impl CodeLabelBuilder { + pub fn respan_filter_range(&mut self, filter_text: Option<&str>) { + self.filter_range = filter_text + .and_then(|filter| self.text.find(filter).map(|ix| ix..ix + filter.len())) + .unwrap_or(0..self.text.len()); + } + + pub fn push_str(&mut self, text: &str, highlight: Option) { + let start_ix = self.text.len(); + self.text.push_str(text); + if let Some(highlight) = highlight { + let end_ix = self.text.len(); + self.runs.push((start_ix..end_ix, highlight)); + } + } + + pub fn build(mut self) -> CodeLabel { + if self.filter_range.end == 0 { + self.respan_filter_range(None); + } + CodeLabel { + text: self.text, + runs: self.runs, + filter_range: self.filter_range, + } + } +} + impl CodeLabel { pub fn fallback_for_completion( item: &lsp::CompletionItem, @@ -2286,25 +2324,39 @@ impl CodeLabel { } pub fn plain(text: String, filter_text: Option<&str>) -> Self { + Self::filtered(text, filter_text, Vec::new()) + } + + pub fn filtered( + text: String, + filter_text: Option<&str>, + runs: Vec<(Range, HighlightId)>, + ) -> Self { let filter_range = filter_text .and_then(|filter| text.find(filter).map(|ix| ix..ix + filter.len())) .unwrap_or(0..text.len()); + Self::new(text, filter_range, runs) + } + + pub fn new( + text: String, + filter_range: Range, + runs: Vec<(Range, HighlightId)>, + ) -> Self { + assert!( + text.get(filter_range.clone()).is_some(), + "invalid filter range" + ); + runs.iter().for_each(|(range, _)| { + assert!(text.get(range.clone()).is_some(), "invalid run range"); + }); Self { - runs: Vec::new(), + runs, filter_range, text, } } - pub fn push_str(&mut self, text: &str, highlight: Option) { - let start_ix = self.text.len(); - self.text.push_str(text); - let end_ix = self.text.len(); - if let Some(highlight) = highlight { - self.runs.push((start_ix..end_ix, highlight)); - } - } - pub fn text(&self) -> &str { self.text.as_str() } @@ -2537,12 +2589,76 @@ pub fn range_from_lsp(range: lsp::Range) -> Range> { let mut start = point_from_lsp(range.start); let mut end = point_from_lsp(range.end); if start > end { - log::warn!("range_from_lsp called with inverted range {start:?}-{end:?}"); + // We debug instead of warn so that this is not logged by default unless explicitly requested. + // Using warn would write to the log file, and since we receive an enormous amount of + // range_from_lsp calls (especially during completions), that can hang the main thread. + // + // See issue #36223. + zlog::debug!("range_from_lsp called with inverted range {start:?}-{end:?}"); mem::swap(&mut start, &mut end); } start..end } +#[doc(hidden)] +#[cfg(any(test, feature = "test-support"))] +pub fn rust_lang() -> Arc { + use std::borrow::Cow; + + let language = Language::new( + LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + line_comments: vec!["// ".into(), "/// ".into(), "//! ".into()], + ..Default::default() + }, + Some(tree_sitter_rust::LANGUAGE.into()), + ) + .with_queries(LanguageQueries { + indents: Some(Cow::from( + r#" +[ + ((where_clause) _ @end) + (field_expression) + (call_expression) + (assignment_expression) + (let_declaration) + (let_chain) + (await_expression) +] @indent + +(_ "[" "]" @end) @indent +(_ "<" ">" @end) @indent +(_ "{" "}" @end) @indent +(_ "(" ")" @end) @indent"#, + )), + brackets: Some(Cow::from( + r#" +("(" @open ")" @close) +("[" @open "]" @close) +("{" @open "}" @close) +("<" @open ">" @close) +("\"" @open "\"" @close) +(closure_parameters "|" @open "|" @close)"#, + )), + text_objects: Some(Cow::from( + r#" +(function_item + body: (_ + "{" + (_)* @function.inside + "}" )) @function.around + "#, + )), + ..LanguageQueries::default() + }) + .expect("Could not parse queries"); + Arc::new(language) +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 689a3f90f5a1d8826cd68ccc26bc7427c0aab29d..b6c65ede0596fe96ba1a750bcbcbcb971a3be617 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -13,9 +13,9 @@ use itertools::{Either, Itertools}; pub use settings::{ CompletionSettingsContent, EditPredictionProvider, EditPredictionsMode, FormatOnSave, Formatter, FormatterList, InlayHintKind, LanguageSettingsContent, LspInsertMode, - RewrapBehavior, SelectedFormatter, ShowWhitespaceSetting, SoftWrap, WordsCompletionMode, + RewrapBehavior, ShowWhitespaceSetting, SoftWrap, WordsCompletionMode, }; -use settings::{ExtendingVec, Settings, SettingsContent, SettingsLocation, SettingsStore}; +use settings::{Settings, SettingsLocation, SettingsStore}; use shellexpand; use std::{borrow::Cow, num::NonZeroU32, path::Path, sync::Arc}; @@ -96,7 +96,7 @@ pub struct LanguageSettings { /// when saving it. pub ensure_final_newline_on_save: bool, /// How to perform a buffer format. - pub formatter: settings::SelectedFormatter, + pub formatter: settings::FormatterList, /// Zed's Prettier integration settings. pub prettier: PrettierSettings, /// Whether to automatically close JSX tags. @@ -142,6 +142,8 @@ pub struct LanguageSettings { pub auto_indent_on_paste: bool, /// Controls how the editor handles the autoclosed characters. pub always_treat_brackets_as_autoclosed: bool, + /// Which code actions to run on save + pub code_actions_on_format: HashMap, /// Whether to perform linked edits pub linked_edits: bool, /// Task configuration for this language. @@ -576,6 +578,7 @@ impl settings::Settings for AllLanguageSettings { always_treat_brackets_as_autoclosed: settings .always_treat_brackets_as_autoclosed .unwrap(), + code_actions_on_format: settings.code_actions_on_format.unwrap(), linked_edits: settings.linked_edits.unwrap(), tasks: LanguageTaskSettings { variables: tasks.variables.unwrap_or_default(), @@ -639,7 +642,7 @@ impl settings::Settings for AllLanguageSettings { let mut file_types: FxHashMap, GlobSet> = FxHashMap::default(); - for (language, patterns) in &all_languages.file_types { + for (language, patterns) in all_languages.file_types.iter().flatten() { let mut builder = GlobSetBuilder::new(); for pattern in &patterns.0 { @@ -676,130 +679,6 @@ impl settings::Settings for AllLanguageSettings { file_types, } } - - fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut SettingsContent) { - let d = &mut current.project.all_languages.defaults; - if let Some(size) = vscode - .read_value("editor.tabSize") - .and_then(|v| v.as_u64()) - .and_then(|n| NonZeroU32::new(n as u32)) - { - d.tab_size = Some(size); - } - if let Some(v) = vscode.read_bool("editor.insertSpaces") { - d.hard_tabs = Some(!v); - } - - vscode.enum_setting("editor.wordWrap", &mut d.soft_wrap, |s| match s { - "on" => Some(SoftWrap::EditorWidth), - "wordWrapColumn" => Some(SoftWrap::PreferLine), - "bounded" => Some(SoftWrap::Bounded), - "off" => Some(SoftWrap::None), - _ => None, - }); - vscode.u32_setting("editor.wordWrapColumn", &mut d.preferred_line_length); - - if let Some(arr) = vscode - .read_value("editor.rulers") - .and_then(|v| v.as_array()) - .map(|v| v.iter().map(|n| n.as_u64().map(|n| n as usize)).collect()) - { - d.wrap_guides = arr; - } - if let Some(b) = vscode.read_bool("editor.guides.indentation") { - d.indent_guides.get_or_insert_default().enabled = Some(b); - } - - if let Some(b) = vscode.read_bool("editor.guides.formatOnSave") { - d.format_on_save = Some(if b { - FormatOnSave::On - } else { - FormatOnSave::Off - }); - } - vscode.bool_setting( - "editor.trimAutoWhitespace", - &mut d.remove_trailing_whitespace_on_save, - ); - vscode.bool_setting( - "files.insertFinalNewline", - &mut d.ensure_final_newline_on_save, - ); - vscode.bool_setting("editor.inlineSuggest.enabled", &mut d.show_edit_predictions); - vscode.enum_setting("editor.renderWhitespace", &mut d.show_whitespaces, |s| { - Some(match s { - "boundary" => ShowWhitespaceSetting::Boundary, - "trailing" => ShowWhitespaceSetting::Trailing, - "selection" => ShowWhitespaceSetting::Selection, - "all" => ShowWhitespaceSetting::All, - _ => ShowWhitespaceSetting::None, - }) - }); - vscode.enum_setting( - "editor.autoSurround", - &mut d.use_auto_surround, - |s| match s { - "languageDefined" | "quotes" | "brackets" => Some(true), - "never" => Some(false), - _ => None, - }, - ); - vscode.bool_setting("editor.formatOnType", &mut d.use_on_type_format); - vscode.bool_setting("editor.linkedEditing", &mut d.linked_edits); - vscode.bool_setting("editor.formatOnPaste", &mut d.auto_indent_on_paste); - vscode.bool_setting( - "editor.suggestOnTriggerCharacters", - &mut d.show_completions_on_input, - ); - if let Some(b) = vscode.read_bool("editor.suggest.showWords") { - let mode = if b { - WordsCompletionMode::Enabled - } else { - WordsCompletionMode::Disabled - }; - d.completions.get_or_insert_default().words = Some(mode); - } - // TODO: pull ^ out into helper and reuse for per-language settings - - // vscodes file association map is inverted from ours, so we flip the mapping before merging - let mut associations: HashMap, ExtendingVec> = HashMap::default(); - if let Some(map) = vscode - .read_value("files.associations") - .and_then(|v| v.as_object()) - { - for (k, v) in map { - let Some(v) = v.as_str() else { continue }; - associations.entry(v.into()).or_default().0.push(k.clone()); - } - } - - // TODO: do we want to merge imported globs per filetype? for now we'll just replace - current - .project - .all_languages - .file_types - .extend(associations); - - // cursor global ignore list applies to cursor-tab, so transfer it to edit_predictions.disabled_globs - if let Some(disabled_globs) = vscode - .read_value("cursor.general.globalCursorIgnoreList") - .and_then(|v| v.as_array()) - { - current - .project - .all_languages - .edit_predictions - .get_or_insert_default() - .disabled_globs - .get_or_insert_default() - .extend( - disabled_globs - .iter() - .filter_map(|glob| glob.as_str()) - .map(|s| s.to_string()), - ); - } - } } #[derive(Default, Debug, Clone, PartialEq, Eq)] diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index bc85b10859632fc3e2cf61c663b7159a023f4f3a..5c8200b84002c104ce1e2c3d1a42aff5876bd1ee 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -39,14 +39,14 @@ pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation { crate::Operation::Buffer(text::Operation::Undo(undo)) => { proto::operation::Variant::Undo(proto::operation::Undo { - replica_id: undo.timestamp.replica_id as u32, + replica_id: undo.timestamp.replica_id.as_u16() as u32, lamport_timestamp: undo.timestamp.value, version: serialize_version(&undo.version), counts: undo .counts .iter() .map(|(edit_id, count)| proto::UndoCount { - replica_id: edit_id.replica_id as u32, + replica_id: edit_id.replica_id.as_u16() as u32, lamport_timestamp: edit_id.value, count: *count, }) @@ -60,7 +60,7 @@ pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation { lamport_timestamp, cursor_shape, } => proto::operation::Variant::UpdateSelections(proto::operation::UpdateSelections { - replica_id: lamport_timestamp.replica_id as u32, + replica_id: lamport_timestamp.replica_id.as_u16() as u32, lamport_timestamp: lamport_timestamp.value, selections: serialize_selections(selections), line_mode: *line_mode, @@ -72,7 +72,7 @@ pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation { server_id, diagnostics, } => proto::operation::Variant::UpdateDiagnostics(proto::UpdateDiagnostics { - replica_id: lamport_timestamp.replica_id as u32, + replica_id: lamport_timestamp.replica_id.as_u16() as u32, lamport_timestamp: lamport_timestamp.value, server_id: server_id.0 as u64, diagnostics: serialize_diagnostics(diagnostics.iter()), @@ -84,7 +84,7 @@ pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation { server_id, } => proto::operation::Variant::UpdateCompletionTriggers( proto::operation::UpdateCompletionTriggers { - replica_id: lamport_timestamp.replica_id as u32, + replica_id: lamport_timestamp.replica_id.as_u16() as u32, lamport_timestamp: lamport_timestamp.value, triggers: triggers.clone(), language_server_id: server_id.to_proto(), @@ -95,7 +95,7 @@ pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation { line_ending, lamport_timestamp, } => proto::operation::Variant::UpdateLineEnding(proto::operation::UpdateLineEnding { - replica_id: lamport_timestamp.replica_id as u32, + replica_id: lamport_timestamp.replica_id.as_u16() as u32, lamport_timestamp: lamport_timestamp.value, line_ending: serialize_line_ending(*line_ending) as i32, }), @@ -106,7 +106,7 @@ pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation { /// Serializes an [`EditOperation`] to be sent over RPC. pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation::Edit { proto::operation::Edit { - replica_id: operation.timestamp.replica_id as u32, + replica_id: operation.timestamp.replica_id.as_u16() as u32, lamport_timestamp: operation.timestamp.value, version: serialize_version(&operation.version), ranges: operation.ranges.iter().map(serialize_range).collect(), @@ -123,12 +123,12 @@ pub fn serialize_undo_map_entry( (edit_id, counts): (&clock::Lamport, &[(clock::Lamport, u32)]), ) -> proto::UndoMapEntry { proto::UndoMapEntry { - replica_id: edit_id.replica_id as u32, + replica_id: edit_id.replica_id.as_u16() as u32, local_timestamp: edit_id.value, counts: counts .iter() .map(|(undo_id, count)| proto::UndoCount { - replica_id: undo_id.replica_id as u32, + replica_id: undo_id.replica_id.as_u16() as u32, lamport_timestamp: undo_id.value, count: *count, }) @@ -246,7 +246,7 @@ pub fn serialize_diagnostics<'a>( /// Serializes an [`Anchor`] to be sent over RPC. pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor { proto::Anchor { - replica_id: anchor.timestamp.replica_id as u32, + replica_id: anchor.timestamp.replica_id.as_u16() as u32, timestamp: anchor.timestamp.value, offset: anchor.offset as u64, bias: match anchor.bias { @@ -283,7 +283,7 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { crate::Operation::Buffer(text::Operation::Undo(UndoOperation { timestamp: clock::Lamport { - replica_id: undo.replica_id as ReplicaId, + replica_id: ReplicaId::new(undo.replica_id as u16), value: undo.lamport_timestamp, }, version: deserialize_version(&undo.version), @@ -293,7 +293,7 @@ pub fn deserialize_operation(message: proto::Operation) -> Result Result Result { crate::Operation::UpdateDiagnostics { lamport_timestamp: clock::Lamport { - replica_id: message.replica_id as ReplicaId, + replica_id: ReplicaId::new(message.replica_id as u16), value: message.lamport_timestamp, }, server_id: LanguageServerId(message.server_id as usize), @@ -344,7 +344,7 @@ pub fn deserialize_operation(message: proto::Operation) -> Result Result { crate::Operation::UpdateLineEnding { lamport_timestamp: clock::Lamport { - replica_id: message.replica_id as ReplicaId, + replica_id: ReplicaId::new(message.replica_id as u16), value: message.lamport_timestamp, }, line_ending: deserialize_line_ending( @@ -370,7 +370,7 @@ pub fn deserialize_operation(message: proto::Operation) -> Result EditOperation { EditOperation { timestamp: clock::Lamport { - replica_id: edit.replica_id as ReplicaId, + replica_id: ReplicaId::new(edit.replica_id as u16), value: edit.lamport_timestamp, }, version: deserialize_version(&edit.version), @@ -385,7 +385,7 @@ pub fn deserialize_undo_map_entry( ) -> (clock::Lamport, Vec<(clock::Lamport, u32)>) { ( clock::Lamport { - replica_id: entry.replica_id as u16, + replica_id: ReplicaId::new(entry.replica_id as u16), value: entry.local_timestamp, }, entry @@ -394,7 +394,7 @@ pub fn deserialize_undo_map_entry( .map(|undo_count| { ( clock::Lamport { - replica_id: undo_count.replica_id as u16, + replica_id: ReplicaId::new(undo_count.replica_id as u16), value: undo_count.lamport_timestamp, }, undo_count.count, @@ -480,7 +480,7 @@ pub fn deserialize_anchor(anchor: proto::Anchor) -> Option { }; Some(Anchor { timestamp: clock::Lamport { - replica_id: anchor.replica_id as ReplicaId, + replica_id: ReplicaId::new(anchor.replica_id as u16), value: anchor.timestamp, }, offset: anchor.offset as usize, @@ -524,7 +524,7 @@ pub fn lamport_timestamp_for_operation(operation: &proto::Operation) -> Option Result proto::LamportTimestamp { proto::LamportTimestamp { - replica_id: timestamp.replica_id as u32, + replica_id: timestamp.replica_id.as_u16() as u32, value: timestamp.value, } } @@ -567,7 +567,7 @@ pub fn serialize_timestamp(timestamp: clock::Lamport) -> proto::LamportTimestamp /// Deserializes a [`clock::Lamport`] timestamp from the RPC representation. pub fn deserialize_timestamp(timestamp: proto::LamportTimestamp) -> clock::Lamport { clock::Lamport { - replica_id: timestamp.replica_id as ReplicaId, + replica_id: ReplicaId::new(timestamp.replica_id as u16), value: timestamp.value, } } @@ -590,7 +590,7 @@ pub fn deserialize_version(message: &[proto::VectorClockEntry]) -> clock::Global let mut version = clock::Global::new(); for entry in message { version.observe(clock::Lamport { - replica_id: entry.replica_id as ReplicaId, + replica_id: ReplicaId::new(entry.replica_id as u16), value: entry.timestamp, }); } @@ -602,7 +602,7 @@ pub fn serialize_version(version: &clock::Global) -> Vec (Buf .now_or_never() .unwrap() .unwrap(); - let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), ""); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), ""); let mut mutated_syntax_map = SyntaxMap::new(&buffer); mutated_syntax_map.set_language_registry(registry.clone()); diff --git a/crates/language/src/toolchain.rs b/crates/language/src/toolchain.rs index d3466307f368e7008eedbc8881aa78ab854bc08b..2896d4827c5e16047a471138122ef0256a24480e 100644 --- a/crates/language/src/toolchain.rs +++ b/crates/language/src/toolchain.rs @@ -98,6 +98,7 @@ pub trait ToolchainLister: Send + Sync + 'static { worktree_root: PathBuf, subroot_relative_path: Arc, project_env: Option>, + fs: &dyn Fs, ) -> ToolchainList; /// Given a user-created toolchain, resolve lister-specific details. @@ -106,14 +107,11 @@ pub trait ToolchainLister: Send + Sync + 'static { &self, path: PathBuf, project_env: Option>, + fs: &dyn Fs, ) -> anyhow::Result; - async fn activation_script( - &self, - toolchain: &Toolchain, - shell: ShellKind, - fs: &dyn Fs, - ) -> Vec; + fn activation_script(&self, toolchain: &Toolchain, shell: ShellKind) -> Vec; + /// Returns various "static" bits of information about this toolchain lister. This function should be pure. fn meta(&self) -> ToolchainMetadata; } diff --git a/crates/language_extension/Cargo.toml b/crates/language_extension/Cargo.toml index 565563611e7065dd2aa20fec64f73f81e51f0d48..de5af2246c9dfb2dd385875894a694da5e2a9c23 100644 --- a/crates/language_extension/Cargo.toml +++ b/crates/language_extension/Cargo.toml @@ -26,4 +26,3 @@ project.workspace = true serde.workspace = true serde_json.workspace = true util.workspace = true -workspace-hack.workspace = true diff --git a/crates/language_extension/src/extension_lsp_adapter.rs b/crates/language_extension/src/extension_lsp_adapter.rs index 407b18314f025b7d3b3f43b0735fd0265f6eb9c4..01b726748649e29b4fe69ce26df5564819894985 100644 --- a/crates/language_extension/src/extension_lsp_adapter.rs +++ b/crates/language_extension/src/extension_lsp_adapter.rs @@ -463,11 +463,7 @@ fn build_code_label( let filter_range = label.filter_range.clone(); text.get(filter_range.clone())?; - Some(CodeLabel { - text, - runs, - filter_range, - }) + Some(CodeLabel::new(text, filter_range, runs)) } fn lsp_completion_to_extension(value: lsp::CompletionItem) -> extension::Completion { @@ -615,11 +611,7 @@ fn test_build_code_label() { assert_eq!( label, - CodeLabel { - text: label_text, - runs: label_runs, - filter_range: label.filter_range.clone() - } + CodeLabel::new(label_text, label.filter_range.clone(), label_runs) ) } diff --git a/crates/language_model/Cargo.toml b/crates/language_model/Cargo.toml index a85283cf121bc10a82e1022071d6a136dd5716f5..f572561f6a78b3cf2d9bfc2f7272895836f11614 100644 --- a/crates/language_model/Cargo.toml +++ b/crates/language_model/Cargo.toml @@ -32,7 +32,6 @@ image.workspace = true log.workspace = true parking_lot.workspace = true proto.workspace = true -schemars.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true @@ -40,7 +39,6 @@ smol.workspace = true telemetry_events.workspace = true thiserror.workspace = true util.workspace = true -workspace-hack.workspace = true [dev-dependencies] gpui = { workspace = true, features = ["test-support"] } diff --git a/crates/language_model/src/language_model.rs b/crates/language_model/src/language_model.rs index 38f2b0959072599900cb8a13c16f4e2f8e9c55db..24f9b84afcfa7b9a40b4a1b7684e9a9b036a5a85 100644 --- a/crates/language_model/src/language_model.rs +++ b/crates/language_model/src/language_model.rs @@ -19,8 +19,7 @@ use http_client::{StatusCode, http}; use icons::IconName; use open_router::OpenRouterError; use parking_lot::Mutex; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize, de::DeserializeOwned}; +use serde::{Deserialize, Serialize}; pub use settings::LanguageModelCacheConfiguration; use std::ops::{Add, Sub}; use std::str::FromStr; @@ -669,11 +668,6 @@ pub trait LanguageModelExt: LanguageModel { } impl LanguageModelExt for dyn LanguageModel {} -pub trait LanguageModelTool: 'static + DeserializeOwned + JsonSchema { - fn name() -> String; - fn description() -> String; -} - /// An error that occurred when trying to authenticate the language model provider. #[derive(Debug, Error)] pub enum AuthenticateError { diff --git a/crates/language_model/src/request.rs b/crates/language_model/src/request.rs index 2902e9ae5aaa45ea4607317bee12a3f91abbbe55..d0f7789e40dd71ada8dcae2712cefcef966ad52f 100644 --- a/crates/language_model/src/request.rs +++ b/crates/language_model/src/request.rs @@ -77,7 +77,7 @@ impl std::fmt::Debug for LanguageModelImage { } /// Anthropic wants uploaded images to be smaller than this in both dimensions. -const ANTHROPIC_SIZE_LIMT: f32 = 1568.; +const ANTHROPIC_SIZE_LIMIT: f32 = 1568.; impl LanguageModelImage { pub fn empty() -> Self { @@ -112,13 +112,13 @@ impl LanguageModelImage { let image_size = size(DevicePixels(width as i32), DevicePixels(height as i32)); let base64_image = { - if image_size.width.0 > ANTHROPIC_SIZE_LIMT as i32 - || image_size.height.0 > ANTHROPIC_SIZE_LIMT as i32 + if image_size.width.0 > ANTHROPIC_SIZE_LIMIT as i32 + || image_size.height.0 > ANTHROPIC_SIZE_LIMIT as i32 { let new_bounds = ObjectFit::ScaleDown.get_bounds( gpui::Bounds { origin: point(px(0.0), px(0.0)), - size: size(px(ANTHROPIC_SIZE_LIMT), px(ANTHROPIC_SIZE_LIMT)), + size: size(px(ANTHROPIC_SIZE_LIMIT), px(ANTHROPIC_SIZE_LIMIT)), }, image_size, ); diff --git a/crates/language_models/Cargo.toml b/crates/language_models/Cargo.toml index 28cfe69d96d15f26c567e7e61e5ebb5f329371f3..7d4cd3a618d725429a3979951f04445b5a1fc8eb 100644 --- a/crates/language_models/Cargo.toml +++ b/crates/language_models/Cargo.toml @@ -58,7 +58,6 @@ ui.workspace = true ui_input.workspace = true util.workspace = true vercel = { workspace = true, features = ["schemars"] } -workspace-hack.workspace = true x_ai = { workspace = true, features = ["schemars"] } zed_env_vars.workspace = true diff --git a/crates/language_models/src/provider/anthropic.rs b/crates/language_models/src/provider/anthropic.rs index 2897b836b12d7bcaabfe3841a9f0c77ba6ab497e..9eb96cb79815bdbdc06c58ca4156e68e2962b0a4 100644 --- a/crates/language_models/src/provider/anthropic.rs +++ b/crates/language_models/src/provider/anthropic.rs @@ -21,7 +21,7 @@ use std::str::FromStr; use std::sync::{Arc, LazyLock}; use strum::IntoEnumIterator; use ui::{Icon, IconName, List, Tooltip, prelude::*}; -use ui_input::SingleLineInput; +use ui_input::InputField; use util::{ResultExt, truncate_and_trailoff}; use zed_env_vars::{EnvVar, env_var}; @@ -823,7 +823,7 @@ fn convert_usage(usage: &Usage) -> language_model::TokenUsage { } struct ConfigurationView { - api_key_editor: Entity, + api_key_editor: Entity, state: Entity, load_credentials_task: Option>, target_agent: ConfigurationViewTargetAgent, @@ -862,7 +862,7 @@ impl ConfigurationView { })); Self { - api_key_editor: cx.new(|cx| SingleLineInput::new(window, cx, Self::PLACEHOLDER_TEXT)), + api_key_editor: cx.new(|cx| InputField::new(window, cx, Self::PLACEHOLDER_TEXT)), state, load_credentials_task, target_agent, diff --git a/crates/language_models/src/provider/bedrock.rs b/crates/language_models/src/provider/bedrock.rs index 47dd565f6af64d5ddb1d19cd6ed95ceeffd57cc9..f3e265e925822b2de7950af9fbef5b121da3ed82 100644 --- a/crates/language_models/src/provider/bedrock.rs +++ b/crates/language_models/src/provider/bedrock.rs @@ -42,7 +42,7 @@ use settings::{BedrockAvailableModel as AvailableModel, Settings, SettingsStore} use smol::lock::OnceCell; use strum::{EnumIter, IntoEnumIterator, IntoStaticStr}; use ui::{Icon, IconName, List, Tooltip, prelude::*}; -use ui_input::SingleLineInput; +use ui_input::InputField; use util::ResultExt; use crate::AllLanguageModelSettings; @@ -1006,10 +1006,10 @@ pub fn map_to_language_model_completion_events( } struct ConfigurationView { - access_key_id_editor: Entity, - secret_access_key_editor: Entity, - session_token_editor: Entity, - region_editor: Entity, + access_key_id_editor: Entity, + secret_access_key_editor: Entity, + session_token_editor: Entity, + region_editor: Entity, state: Entity, load_credentials_task: Option>, } @@ -1047,20 +1047,19 @@ impl ConfigurationView { Self { access_key_id_editor: cx.new(|cx| { - SingleLineInput::new(window, cx, Self::PLACEHOLDER_ACCESS_KEY_ID_TEXT) + InputField::new(window, cx, Self::PLACEHOLDER_ACCESS_KEY_ID_TEXT) .label("Access Key ID") }), secret_access_key_editor: cx.new(|cx| { - SingleLineInput::new(window, cx, Self::PLACEHOLDER_SECRET_ACCESS_KEY_TEXT) + InputField::new(window, cx, Self::PLACEHOLDER_SECRET_ACCESS_KEY_TEXT) .label("Secret Access Key") }), session_token_editor: cx.new(|cx| { - SingleLineInput::new(window, cx, Self::PLACEHOLDER_SESSION_TOKEN_TEXT) + InputField::new(window, cx, Self::PLACEHOLDER_SESSION_TOKEN_TEXT) .label("Session Token (Optional)") }), - region_editor: cx.new(|cx| { - SingleLineInput::new(window, cx, Self::PLACEHOLDER_REGION).label("Region") - }), + region_editor: cx + .new(|cx| InputField::new(window, cx, Self::PLACEHOLDER_REGION).label("Region")), state, load_credentials_task, } diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index 1c21e566d68df3191932a860b6ed06ea603083a6..d85533ecce63441fe5aaa7a382bf04af79992f63 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -810,15 +810,11 @@ impl LanguageModel for CloudLanguageModel { } cloud_llm_client::LanguageModelProvider::OpenAi => { let client = self.client.clone(); - let model = match open_ai::Model::from_id(&self.model.id.0) { - Ok(model) => model, - Err(err) => return async move { Err(anyhow!(err).into()) }.boxed(), - }; let request = into_open_ai( request, - model.id(), - model.supports_parallel_tool_calls(), - model.supports_prompt_cache_key(), + &self.model.id.0, + self.model.supports_parallel_tool_calls, + true, None, None, ); @@ -860,15 +856,11 @@ impl LanguageModel for CloudLanguageModel { } cloud_llm_client::LanguageModelProvider::XAi => { let client = self.client.clone(); - let model = match x_ai::Model::from_id(&self.model.id.0) { - Ok(model) => model, - Err(err) => return async move { Err(anyhow!(err).into()) }.boxed(), - }; let request = into_open_ai( request, - model.id(), - model.supports_parallel_tool_calls(), - model.supports_prompt_cache_key(), + &self.model.id.0, + self.model.supports_parallel_tool_calls, + false, None, None, ); diff --git a/crates/language_models/src/provider/copilot_chat.rs b/crates/language_models/src/provider/copilot_chat.rs index 64a2c65f0d2bcc4240e980922930e24240ce3249..1941bd903951420266ba5c4609cb34c15130224e 100644 --- a/crates/language_models/src/provider/copilot_chat.rs +++ b/crates/language_models/src/provider/copilot_chat.rs @@ -15,6 +15,7 @@ use futures::future::BoxFuture; use futures::stream::BoxStream; use futures::{FutureExt, Stream, StreamExt}; use gpui::{Action, AnyView, App, AsyncApp, Entity, Render, Subscription, Task, svg}; +use http_client::StatusCode; use language::language_settings::all_language_settings; use language_model::{ AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, @@ -306,6 +307,23 @@ impl LanguageModel for CopilotChatLanguageModel { | CompletionIntent::EditFile => false, }); + if self.model.supports_response() { + let responses_request = into_copilot_responses(&self.model, request); + let request_limiter = self.request_limiter.clone(); + let future = cx.spawn(async move |cx| { + let request = + CopilotChat::stream_response(responses_request, is_user_initiated, cx.clone()); + request_limiter + .stream(async move { + let stream = request.await?; + let mapper = CopilotResponsesEventMapper::new(); + Ok(mapper.map_stream(stream).boxed()) + }) + .await + }); + return async move { Ok(future.await?.boxed()) }.boxed(); + } + let copilot_request = match into_copilot_chat(&self.model, request) { Ok(request) => request, Err(err) => return futures::future::ready(Err(err.into())).boxed(), @@ -380,11 +398,9 @@ pub fn map_to_language_model_completion_events( events.push(Ok(LanguageModelCompletionEvent::Text(content))); } - for tool_call in &delta.tool_calls { - let entry = state - .tool_calls_by_index - .entry(tool_call.index) - .or_default(); + for (index, tool_call) in delta.tool_calls.iter().enumerate() { + let tool_index = tool_call.index.unwrap_or(index); + let entry = state.tool_calls_by_index.entry(tool_index).or_default(); if let Some(tool_id) = tool_call.id.clone() { entry.id = tool_id; @@ -433,11 +449,11 @@ pub fn map_to_language_model_completion_events( match arguments { Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse( LanguageModelToolUse { - id: tool_call.id.clone().into(), + id: tool_call.id.into(), name: tool_call.name.as_str().into(), is_input_complete: true, input, - raw_input: tool_call.arguments.clone(), + raw_input: tool_call.arguments, }, )), Err(error) => Ok( @@ -477,6 +493,191 @@ pub fn map_to_language_model_completion_events( .flat_map(futures::stream::iter) } +pub struct CopilotResponsesEventMapper { + pending_stop_reason: Option, +} + +impl CopilotResponsesEventMapper { + pub fn new() -> Self { + Self { + pending_stop_reason: None, + } + } + + pub fn map_stream( + mut self, + events: Pin>>>, + ) -> impl Stream> + { + events.flat_map(move |event| { + futures::stream::iter(match event { + Ok(event) => self.map_event(event), + Err(error) => vec![Err(LanguageModelCompletionError::from(anyhow!(error)))], + }) + }) + } + + fn map_event( + &mut self, + event: copilot::copilot_responses::StreamEvent, + ) -> Vec> { + match event { + copilot::copilot_responses::StreamEvent::OutputItemAdded { item, .. } => match item { + copilot::copilot_responses::ResponseOutputItem::Message { id, .. } => { + vec![Ok(LanguageModelCompletionEvent::StartMessage { + message_id: id, + })] + } + _ => Vec::new(), + }, + + copilot::copilot_responses::StreamEvent::OutputTextDelta { delta, .. } => { + if delta.is_empty() { + Vec::new() + } else { + vec![Ok(LanguageModelCompletionEvent::Text(delta))] + } + } + + copilot::copilot_responses::StreamEvent::OutputItemDone { item, .. } => match item { + copilot::copilot_responses::ResponseOutputItem::Message { .. } => Vec::new(), + copilot::copilot_responses::ResponseOutputItem::FunctionCall { + call_id, + name, + arguments, + .. + } => { + let mut events = Vec::new(); + match serde_json::from_str::(&arguments) { + Ok(input) => events.push(Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: call_id.into(), + name: name.as_str().into(), + is_input_complete: true, + input, + raw_input: arguments.clone(), + }, + ))), + Err(error) => { + events.push(Ok(LanguageModelCompletionEvent::ToolUseJsonParseError { + id: call_id.into(), + tool_name: name.as_str().into(), + raw_input: arguments.clone().into(), + json_parse_error: error.to_string(), + })) + } + } + // Record that we already emitted a tool-use stop so we can avoid duplicating + // a Stop event on Completed. + self.pending_stop_reason = Some(StopReason::ToolUse); + events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::ToolUse))); + events + } + copilot::copilot_responses::ResponseOutputItem::Reasoning { + summary, + encrypted_content, + .. + } => { + let mut events = Vec::new(); + + if let Some(blocks) = summary { + let mut text = String::new(); + for block in blocks { + text.push_str(&block.text); + } + if !text.is_empty() { + events.push(Ok(LanguageModelCompletionEvent::Thinking { + text, + signature: None, + })); + } + } + + if let Some(data) = encrypted_content { + events.push(Ok(LanguageModelCompletionEvent::RedactedThinking { data })); + } + + events + } + }, + + copilot::copilot_responses::StreamEvent::Completed { response } => { + let mut events = Vec::new(); + if let Some(usage) = response.usage { + events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage { + input_tokens: usage.input_tokens.unwrap_or(0), + output_tokens: usage.output_tokens.unwrap_or(0), + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + }))); + } + if self.pending_stop_reason.take() != Some(StopReason::ToolUse) { + events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn))); + } + events + } + + copilot::copilot_responses::StreamEvent::Incomplete { response } => { + let reason = response + .incomplete_details + .as_ref() + .and_then(|details| details.reason.as_ref()); + let stop_reason = match reason { + Some(copilot::copilot_responses::IncompleteReason::MaxOutputTokens) => { + StopReason::MaxTokens + } + Some(copilot::copilot_responses::IncompleteReason::ContentFilter) => { + StopReason::Refusal + } + _ => self + .pending_stop_reason + .take() + .unwrap_or(StopReason::EndTurn), + }; + + let mut events = Vec::new(); + if let Some(usage) = response.usage { + events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage { + input_tokens: usage.input_tokens.unwrap_or(0), + output_tokens: usage.output_tokens.unwrap_or(0), + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + }))); + } + events.push(Ok(LanguageModelCompletionEvent::Stop(stop_reason))); + events + } + + copilot::copilot_responses::StreamEvent::Failed { response } => { + let provider = PROVIDER_NAME; + let (status_code, message) = match response.error { + Some(error) => { + let status_code = StatusCode::from_str(&error.code) + .unwrap_or(StatusCode::INTERNAL_SERVER_ERROR); + (status_code, error.message) + } + None => ( + StatusCode::INTERNAL_SERVER_ERROR, + "response.failed".to_string(), + ), + }; + vec![Err(LanguageModelCompletionError::HttpResponseError { + provider, + status_code, + message, + })] + } + + copilot::copilot_responses::StreamEvent::GenericError { error } => vec![Err( + LanguageModelCompletionError::Other(anyhow!(format!("{error:?}"))), + )], + + copilot::copilot_responses::StreamEvent::Created { .. } + | copilot::copilot_responses::StreamEvent::Unknown => Vec::new(), + } + } +} + fn into_copilot_chat( model: &copilot::copilot_chat::Model, request: LanguageModelRequest, @@ -635,6 +836,470 @@ fn into_copilot_chat( }) } +fn into_copilot_responses( + model: &copilot::copilot_chat::Model, + request: LanguageModelRequest, +) -> copilot::copilot_responses::Request { + use copilot::copilot_responses as responses; + + let LanguageModelRequest { + thread_id: _, + prompt_id: _, + intent: _, + mode: _, + messages, + tools, + tool_choice, + stop: _, + temperature, + thinking_allowed: _, + } = request; + + let mut input_items: Vec = Vec::new(); + + for message in messages { + match message.role { + Role::User => { + for content in &message.content { + if let MessageContent::ToolResult(tool_result) = content { + let output = if let Some(out) = &tool_result.output { + match out { + serde_json::Value::String(s) => { + responses::ResponseFunctionOutput::Text(s.clone()) + } + serde_json::Value::Null => { + responses::ResponseFunctionOutput::Text(String::new()) + } + other => responses::ResponseFunctionOutput::Text(other.to_string()), + } + } else { + match &tool_result.content { + LanguageModelToolResultContent::Text(text) => { + responses::ResponseFunctionOutput::Text(text.to_string()) + } + LanguageModelToolResultContent::Image(image) => { + if model.supports_vision() { + responses::ResponseFunctionOutput::Content(vec![ + responses::ResponseInputContent::InputImage { + image_url: Some(image.to_base64_url()), + detail: Default::default(), + }, + ]) + } else { + debug_panic!( + "This should be caught at {} level", + tool_result.tool_name + ); + responses::ResponseFunctionOutput::Text( + "[Tool responded with an image, but this model does not support vision]".into(), + ) + } + } + } + }; + + input_items.push(responses::ResponseInputItem::FunctionCallOutput { + call_id: tool_result.tool_use_id.to_string(), + output, + status: None, + }); + } + } + + let mut parts: Vec = Vec::new(); + for content in &message.content { + match content { + MessageContent::Text(text) => { + parts.push(responses::ResponseInputContent::InputText { + text: text.clone(), + }); + } + + MessageContent::Image(image) => { + if model.supports_vision() { + parts.push(responses::ResponseInputContent::InputImage { + image_url: Some(image.to_base64_url()), + detail: Default::default(), + }); + } + } + _ => {} + } + } + + if !parts.is_empty() { + input_items.push(responses::ResponseInputItem::Message { + role: "user".into(), + content: Some(parts), + status: None, + }); + } + } + + Role::Assistant => { + for content in &message.content { + if let MessageContent::ToolUse(tool_use) = content { + input_items.push(responses::ResponseInputItem::FunctionCall { + call_id: tool_use.id.to_string(), + name: tool_use.name.to_string(), + arguments: tool_use.raw_input.clone(), + status: None, + }); + } + } + + for content in &message.content { + if let MessageContent::RedactedThinking(data) = content { + input_items.push(responses::ResponseInputItem::Reasoning { + id: None, + summary: Vec::new(), + encrypted_content: data.clone(), + }); + } + } + + let mut parts: Vec = Vec::new(); + for content in &message.content { + match content { + MessageContent::Text(text) => { + parts.push(responses::ResponseInputContent::OutputText { + text: text.clone(), + }); + } + MessageContent::Image(_) => { + parts.push(responses::ResponseInputContent::OutputText { + text: "[image omitted]".to_string(), + }); + } + _ => {} + } + } + + if !parts.is_empty() { + input_items.push(responses::ResponseInputItem::Message { + role: "assistant".into(), + content: Some(parts), + status: Some("completed".into()), + }); + } + } + + Role::System => { + let mut parts: Vec = Vec::new(); + for content in &message.content { + if let MessageContent::Text(text) = content { + parts.push(responses::ResponseInputContent::InputText { + text: text.clone(), + }); + } + } + + if !parts.is_empty() { + input_items.push(responses::ResponseInputItem::Message { + role: "system".into(), + content: Some(parts), + status: None, + }); + } + } + } + } + + let converted_tools: Vec = tools + .into_iter() + .map(|tool| responses::ToolDefinition::Function { + name: tool.name, + description: Some(tool.description), + parameters: Some(tool.input_schema), + strict: None, + }) + .collect(); + + let mapped_tool_choice = tool_choice.map(|choice| match choice { + LanguageModelToolChoice::Auto => responses::ToolChoice::Auto, + LanguageModelToolChoice::Any => responses::ToolChoice::Any, + LanguageModelToolChoice::None => responses::ToolChoice::None, + }); + + responses::Request { + model: model.id().to_string(), + input: input_items, + stream: model.uses_streaming(), + temperature, + tools: converted_tools, + tool_choice: mapped_tool_choice, + reasoning: None, // We would need to add support for setting from user settings. + include: Some(vec![ + copilot::copilot_responses::ResponseIncludable::ReasoningEncryptedContent, + ]), + } +} + +#[cfg(test)] +mod tests { + use super::*; + use copilot::copilot_responses as responses; + use futures::StreamExt; + + fn map_events(events: Vec) -> Vec { + futures::executor::block_on(async { + CopilotResponsesEventMapper::new() + .map_stream(Box::pin(futures::stream::iter(events.into_iter().map(Ok)))) + .collect::>() + .await + .into_iter() + .map(Result::unwrap) + .collect() + }) + } + + #[test] + fn responses_stream_maps_text_and_usage() { + let events = vec![ + responses::StreamEvent::OutputItemAdded { + output_index: 0, + sequence_number: None, + item: responses::ResponseOutputItem::Message { + id: "msg_1".into(), + role: "assistant".into(), + content: Some(Vec::new()), + }, + }, + responses::StreamEvent::OutputTextDelta { + item_id: "msg_1".into(), + output_index: 0, + delta: "Hello".into(), + }, + responses::StreamEvent::Completed { + response: responses::Response { + usage: Some(responses::ResponseUsage { + input_tokens: Some(5), + output_tokens: Some(3), + total_tokens: Some(8), + }), + ..Default::default() + }, + }, + ]; + + let mapped = map_events(events); + assert!(matches!( + mapped[0], + LanguageModelCompletionEvent::StartMessage { ref message_id } if message_id == "msg_1" + )); + assert!(matches!( + mapped[1], + LanguageModelCompletionEvent::Text(ref text) if text == "Hello" + )); + assert!(matches!( + mapped[2], + LanguageModelCompletionEvent::UsageUpdate(TokenUsage { + input_tokens: 5, + output_tokens: 3, + .. + }) + )); + assert!(matches!( + mapped[3], + LanguageModelCompletionEvent::Stop(StopReason::EndTurn) + )); + } + + #[test] + fn responses_stream_maps_tool_calls() { + let events = vec![responses::StreamEvent::OutputItemDone { + output_index: 0, + sequence_number: None, + item: responses::ResponseOutputItem::FunctionCall { + id: Some("fn_1".into()), + call_id: "call_1".into(), + name: "do_it".into(), + arguments: "{\"x\":1}".into(), + status: None, + }, + }]; + + let mapped = map_events(events); + assert!(matches!( + mapped[0], + LanguageModelCompletionEvent::ToolUse(ref use_) if use_.id.to_string() == "call_1" && use_.name.as_ref() == "do_it" + )); + assert!(matches!( + mapped[1], + LanguageModelCompletionEvent::Stop(StopReason::ToolUse) + )); + } + + #[test] + fn responses_stream_handles_json_parse_error() { + let events = vec![responses::StreamEvent::OutputItemDone { + output_index: 0, + sequence_number: None, + item: responses::ResponseOutputItem::FunctionCall { + id: Some("fn_1".into()), + call_id: "call_1".into(), + name: "do_it".into(), + arguments: "{not json}".into(), + status: None, + }, + }]; + + let mapped = map_events(events); + assert!(matches!( + mapped[0], + LanguageModelCompletionEvent::ToolUseJsonParseError { ref id, ref tool_name, .. } + if id.to_string() == "call_1" && tool_name.as_ref() == "do_it" + )); + assert!(matches!( + mapped[1], + LanguageModelCompletionEvent::Stop(StopReason::ToolUse) + )); + } + + #[test] + fn responses_stream_maps_reasoning_summary_and_encrypted_content() { + let events = vec![responses::StreamEvent::OutputItemDone { + output_index: 0, + sequence_number: None, + item: responses::ResponseOutputItem::Reasoning { + id: "r1".into(), + summary: Some(vec![responses::ResponseReasoningItem { + kind: "summary_text".into(), + text: "Chain".into(), + }]), + encrypted_content: Some("ENC".into()), + }, + }]; + + let mapped = map_events(events); + assert!(matches!( + mapped[0], + LanguageModelCompletionEvent::Thinking { ref text, signature: None } if text == "Chain" + )); + assert!(matches!( + mapped[1], + LanguageModelCompletionEvent::RedactedThinking { ref data } if data == "ENC" + )); + } + + #[test] + fn responses_stream_handles_incomplete_max_tokens() { + let events = vec![responses::StreamEvent::Incomplete { + response: responses::Response { + usage: Some(responses::ResponseUsage { + input_tokens: Some(10), + output_tokens: Some(0), + total_tokens: Some(10), + }), + incomplete_details: Some(responses::IncompleteDetails { + reason: Some(responses::IncompleteReason::MaxOutputTokens), + }), + ..Default::default() + }, + }]; + + let mapped = map_events(events); + assert!(matches!( + mapped[0], + LanguageModelCompletionEvent::UsageUpdate(TokenUsage { + input_tokens: 10, + output_tokens: 0, + .. + }) + )); + assert!(matches!( + mapped[1], + LanguageModelCompletionEvent::Stop(StopReason::MaxTokens) + )); + } + + #[test] + fn responses_stream_handles_incomplete_content_filter() { + let events = vec![responses::StreamEvent::Incomplete { + response: responses::Response { + usage: None, + incomplete_details: Some(responses::IncompleteDetails { + reason: Some(responses::IncompleteReason::ContentFilter), + }), + ..Default::default() + }, + }]; + + let mapped = map_events(events); + assert!(matches!( + mapped.last().unwrap(), + LanguageModelCompletionEvent::Stop(StopReason::Refusal) + )); + } + + #[test] + fn responses_stream_completed_no_duplicate_after_tool_use() { + let events = vec![ + responses::StreamEvent::OutputItemDone { + output_index: 0, + sequence_number: None, + item: responses::ResponseOutputItem::FunctionCall { + id: Some("fn_1".into()), + call_id: "call_1".into(), + name: "do_it".into(), + arguments: "{}".into(), + status: None, + }, + }, + responses::StreamEvent::Completed { + response: responses::Response::default(), + }, + ]; + + let mapped = map_events(events); + + let mut stop_count = 0usize; + let mut saw_tool_use_stop = false; + for event in mapped { + if let LanguageModelCompletionEvent::Stop(reason) = event { + stop_count += 1; + if matches!(reason, StopReason::ToolUse) { + saw_tool_use_stop = true; + } + } + } + assert_eq!(stop_count, 1, "should emit exactly one Stop event"); + assert!(saw_tool_use_stop, "Stop reason should be ToolUse"); + } + + #[test] + fn responses_stream_failed_maps_http_response_error() { + let events = vec![responses::StreamEvent::Failed { + response: responses::Response { + error: Some(responses::ResponseError { + code: "429".into(), + message: "too many requests".into(), + }), + ..Default::default() + }, + }]; + + let mapped_results = futures::executor::block_on(async { + CopilotResponsesEventMapper::new() + .map_stream(Box::pin(futures::stream::iter(events.into_iter().map(Ok)))) + .collect::>() + .await + }); + + assert_eq!(mapped_results.len(), 1); + match &mapped_results[0] { + Err(LanguageModelCompletionError::HttpResponseError { + status_code, + message, + .. + }) => { + assert_eq!(*status_code, http_client::StatusCode::TOO_MANY_REQUESTS); + assert_eq!(message, "too many requests"); + } + other => panic!("expected HttpResponseError, got {:?}", other), + } + } +} struct ConfigurationView { copilot_status: Option, state: Entity, diff --git a/crates/language_models/src/provider/deepseek.rs b/crates/language_models/src/provider/deepseek.rs index ec420bfd83b427701ffa6eb13a9eb6035604f0b1..8784d3805f22974ffa441ecd04ddea4b56be911b 100644 --- a/crates/language_models/src/provider/deepseek.rs +++ b/crates/language_models/src/provider/deepseek.rs @@ -20,7 +20,7 @@ use std::str::FromStr; use std::sync::{Arc, LazyLock}; use ui::{Icon, IconName, List, prelude::*}; -use ui_input::SingleLineInput; +use ui_input::InputField; use util::{ResultExt, truncate_and_trailoff}; use zed_env_vars::{EnvVar, env_var}; @@ -525,7 +525,7 @@ impl DeepSeekEventMapper { } struct ConfigurationView { - api_key_editor: Entity, + api_key_editor: Entity, state: Entity, load_credentials_task: Option>, } @@ -533,7 +533,7 @@ struct ConfigurationView { impl ConfigurationView { fn new(state: Entity, window: &mut Window, cx: &mut Context) -> Self { let api_key_editor = - cx.new(|cx| SingleLineInput::new(window, cx, "sk-00000000000000000000000000000000")); + cx.new(|cx| InputField::new(window, cx, "sk-00000000000000000000000000000000")); cx.observe(&state, |_, _, cx| { cx.notify(); diff --git a/crates/language_models/src/provider/google.rs b/crates/language_models/src/provider/google.rs index f6ac364611c0e121115a4bd692893ed8bfa89ab3..a4d1202bee4fc4b2f1e071a815bc2f5887d2457d 100644 --- a/crates/language_models/src/provider/google.rs +++ b/crates/language_models/src/provider/google.rs @@ -29,7 +29,7 @@ use std::sync::{ }; use strum::IntoEnumIterator; use ui::{Icon, IconName, List, Tooltip, prelude::*}; -use ui_input::SingleLineInput; +use ui_input::InputField; use util::{ResultExt, truncate_and_trailoff}; use zed_env_vars::EnvVar; @@ -751,7 +751,7 @@ fn convert_usage(usage: &UsageMetadata) -> language_model::TokenUsage { } struct ConfigurationView { - api_key_editor: Entity, + api_key_editor: Entity, state: Entity, target_agent: language_model::ConfigurationViewTargetAgent, load_credentials_task: Option>, @@ -788,7 +788,7 @@ impl ConfigurationView { })); Self { - api_key_editor: cx.new(|cx| SingleLineInput::new(window, cx, "AIzaSy...")), + api_key_editor: cx.new(|cx| InputField::new(window, cx, "AIzaSy...")), target_agent, state, load_credentials_task, diff --git a/crates/language_models/src/provider/mistral.rs b/crates/language_models/src/provider/mistral.rs index ad7bf600d56354ee12e72c9ebc2bfe09f0094da7..66527792ff0b82348457fd28ae04dba60d10de5b 100644 --- a/crates/language_models/src/provider/mistral.rs +++ b/crates/language_models/src/provider/mistral.rs @@ -20,7 +20,7 @@ use std::str::FromStr; use std::sync::{Arc, LazyLock}; use strum::IntoEnumIterator; use ui::{Icon, IconName, List, Tooltip, prelude::*}; -use ui_input::SingleLineInput; +use ui_input::InputField; use util::{ResultExt, truncate_and_trailoff}; use zed_env_vars::{EnvVar, env_var}; @@ -744,8 +744,8 @@ struct RawToolCall { } struct ConfigurationView { - api_key_editor: Entity, - codestral_api_key_editor: Entity, + api_key_editor: Entity, + codestral_api_key_editor: Entity, state: Entity, load_credentials_task: Option>, } @@ -753,9 +753,9 @@ struct ConfigurationView { impl ConfigurationView { fn new(state: Entity, window: &mut Window, cx: &mut Context) -> Self { let api_key_editor = - cx.new(|cx| SingleLineInput::new(window, cx, "0aBCDEFGhIjKLmNOpqrSTUVwxyzabCDE1f2")); + cx.new(|cx| InputField::new(window, cx, "0aBCDEFGhIjKLmNOpqrSTUVwxyzabCDE1f2")); let codestral_api_key_editor = - cx.new(|cx| SingleLineInput::new(window, cx, "0aBCDEFGhIjKLmNOpqrSTUVwxyzabCDE1f2")); + cx.new(|cx| InputField::new(window, cx, "0aBCDEFGhIjKLmNOpqrSTUVwxyzabCDE1f2")); cx.observe(&state, |_, _, cx| { cx.notify(); diff --git a/crates/language_models/src/provider/ollama.rs b/crates/language_models/src/provider/ollama.rs index a25ecbe01aa659817b41cac54d76871b4742ea66..2150966c1af0fdb1bdcc028cba67bcb7b7cbf89f 100644 --- a/crates/language_models/src/provider/ollama.rs +++ b/crates/language_models/src/provider/ollama.rs @@ -23,7 +23,7 @@ use std::sync::LazyLock; use std::sync::atomic::{AtomicU64, Ordering}; use std::{collections::HashMap, sync::Arc}; use ui::{ButtonLike, ElevationIndex, List, Tooltip, prelude::*}; -use ui_input::SingleLineInput; +use ui_input::InputField; use zed_env_vars::{EnvVar, env_var}; use crate::AllLanguageModelSettings; @@ -623,18 +623,17 @@ fn map_to_language_model_completion_events( } struct ConfigurationView { - api_key_editor: Entity, - api_url_editor: Entity, + api_key_editor: Entity, + api_url_editor: Entity, state: Entity, } impl ConfigurationView { pub fn new(state: Entity, window: &mut Window, cx: &mut Context) -> Self { - let api_key_editor = - cx.new(|cx| SingleLineInput::new(window, cx, "63e02e...").label("API key")); + let api_key_editor = cx.new(|cx| InputField::new(window, cx, "63e02e...").label("API key")); let api_url_editor = cx.new(|cx| { - let input = SingleLineInput::new(window, cx, OLLAMA_API_URL).label("API URL"); + let input = InputField::new(window, cx, OLLAMA_API_URL).label("API URL"); input.set_text(OllamaLanguageModelProvider::api_url(cx), window, cx); input }); diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index 3eaaa8b58598328e6e843b1f86b8f4e5cbd04c1e..6c3f063c1111f31a37325f0767a14e8533c1b23f 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -21,7 +21,7 @@ use std::str::FromStr as _; use std::sync::{Arc, LazyLock}; use strum::IntoEnumIterator; use ui::{ElevationIndex, List, Tooltip, prelude::*}; -use ui_input::SingleLineInput; +use ui_input::InputField; use util::{ResultExt, truncate_and_trailoff}; use zed_env_vars::{EnvVar, env_var}; @@ -675,7 +675,7 @@ pub fn count_open_ai_tokens( } struct ConfigurationView { - api_key_editor: Entity, + api_key_editor: Entity, state: Entity, load_credentials_task: Option>, } @@ -683,7 +683,7 @@ struct ConfigurationView { impl ConfigurationView { fn new(state: Entity, window: &mut Window, cx: &mut Context) -> Self { let api_key_editor = cx.new(|cx| { - SingleLineInput::new( + InputField::new( window, cx, "sk-000000000000000000000000000000000000000000000000", diff --git a/crates/language_models/src/provider/open_ai_compatible.rs b/crates/language_models/src/provider/open_ai_compatible.rs index cca49c982ce6e93614c4574a84ffaa4bd1d8f0c6..c8a1da5f5af9feb72ec514854403d15d6e73774b 100644 --- a/crates/language_models/src/provider/open_ai_compatible.rs +++ b/crates/language_models/src/provider/open_ai_compatible.rs @@ -14,7 +14,7 @@ use open_ai::{ResponseStreamEvent, stream_completion}; use settings::{Settings, SettingsStore}; use std::sync::Arc; use ui::{ElevationIndex, Tooltip, prelude::*}; -use ui_input::SingleLineInput; +use ui_input::InputField; use util::{ResultExt, truncate_and_trailoff}; use zed_env_vars::EnvVar; @@ -340,7 +340,7 @@ impl LanguageModel for OpenAiCompatibleLanguageModel { } struct ConfigurationView { - api_key_editor: Entity, + api_key_editor: Entity, state: Entity, load_credentials_task: Option>, } @@ -348,7 +348,7 @@ struct ConfigurationView { impl ConfigurationView { fn new(state: Entity, window: &mut Window, cx: &mut Context) -> Self { let api_key_editor = cx.new(|cx| { - SingleLineInput::new( + InputField::new( window, cx, "000000000000000000000000000000000000000000000000000", diff --git a/crates/language_models/src/provider/open_router.rs b/crates/language_models/src/provider/open_router.rs index 5bfc97c41f60351288ccf08c3a86b4b0947ee997..50131f0a8ef7076420df9a9dc1dbdcd4c840a5c2 100644 --- a/crates/language_models/src/provider/open_router.rs +++ b/crates/language_models/src/provider/open_router.rs @@ -18,7 +18,7 @@ use std::pin::Pin; use std::str::FromStr as _; use std::sync::{Arc, LazyLock}; use ui::{Icon, IconName, List, Tooltip, prelude::*}; -use ui_input::SingleLineInput; +use ui_input::InputField; use util::{ResultExt, truncate_and_trailoff}; use zed_env_vars::{EnvVar, env_var}; @@ -692,7 +692,7 @@ pub fn count_open_router_tokens( } struct ConfigurationView { - api_key_editor: Entity, + api_key_editor: Entity, state: Entity, load_credentials_task: Option>, } @@ -700,7 +700,7 @@ struct ConfigurationView { impl ConfigurationView { fn new(state: Entity, window: &mut Window, cx: &mut Context) -> Self { let api_key_editor = cx.new(|cx| { - SingleLineInput::new( + InputField::new( window, cx, "sk_or_000000000000000000000000000000000000000000000000", diff --git a/crates/language_models/src/provider/vercel.rs b/crates/language_models/src/provider/vercel.rs index ad12e5a628779eddf333bdd4f91bddbea016c402..ff5d4567c60423939c38d00a1203f613df353ccf 100644 --- a/crates/language_models/src/provider/vercel.rs +++ b/crates/language_models/src/provider/vercel.rs @@ -15,7 +15,7 @@ use settings::{Settings, SettingsStore}; use std::sync::{Arc, LazyLock}; use strum::IntoEnumIterator; use ui::{ElevationIndex, List, Tooltip, prelude::*}; -use ui_input::SingleLineInput; +use ui_input::InputField; use util::{ResultExt, truncate_and_trailoff}; use vercel::{Model, VERCEL_API_URL}; use zed_env_vars::{EnvVar, env_var}; @@ -362,7 +362,7 @@ pub fn count_vercel_tokens( } struct ConfigurationView { - api_key_editor: Entity, + api_key_editor: Entity, state: Entity, load_credentials_task: Option>, } @@ -370,7 +370,7 @@ struct ConfigurationView { impl ConfigurationView { fn new(state: Entity, window: &mut Window, cx: &mut Context) -> Self { let api_key_editor = cx.new(|cx| { - SingleLineInput::new( + InputField::new( window, cx, "v1:0000000000000000000000000000000000000000000000000", diff --git a/crates/language_models/src/provider/x_ai.rs b/crates/language_models/src/provider/x_ai.rs index 243a2e3e0217f10d580a278d25e7168f4f62fe21..8b51ca12099691e4ae70084b509c6c40547bd432 100644 --- a/crates/language_models/src/provider/x_ai.rs +++ b/crates/language_models/src/provider/x_ai.rs @@ -15,7 +15,7 @@ use settings::{Settings, SettingsStore}; use std::sync::{Arc, LazyLock}; use strum::IntoEnumIterator; use ui::{ElevationIndex, List, Tooltip, prelude::*}; -use ui_input::SingleLineInput; +use ui_input::InputField; use util::{ResultExt, truncate_and_trailoff}; use x_ai::{Model, XAI_API_URL}; use zed_env_vars::{EnvVar, env_var}; @@ -359,7 +359,7 @@ pub fn count_xai_tokens( } struct ConfigurationView { - api_key_editor: Entity, + api_key_editor: Entity, state: Entity, load_credentials_task: Option>, } @@ -367,7 +367,7 @@ struct ConfigurationView { impl ConfigurationView { fn new(state: Entity, window: &mut Window, cx: &mut Context) -> Self { let api_key_editor = cx.new(|cx| { - SingleLineInput::new( + InputField::new( window, cx, "xai-0000000000000000000000000000000000000000000000000", diff --git a/crates/language_onboarding/Cargo.toml b/crates/language_onboarding/Cargo.toml index a437adf1191a3b76fbd828dacaa60b75b1f7df28..38cf8a604a87f403e2d2720be6a2ba69a61e7484 100644 --- a/crates/language_onboarding/Cargo.toml +++ b/crates/language_onboarding/Cargo.toml @@ -21,7 +21,6 @@ gpui.workspace = true project.workspace = true ui.workspace = true workspace.workspace = true -workspace-hack.workspace = true # Uncomment other workspace dependencies as needed # assistant.workspace = true diff --git a/crates/language_selector/Cargo.toml b/crates/language_selector/Cargo.toml index a7312c71198e46fcef33d9c272eabb86cc544220..47ad9b9f8802a3964ababad593b7c6a604f1c98f 100644 --- a/crates/language_selector/Cargo.toml +++ b/crates/language_selector/Cargo.toml @@ -26,7 +26,6 @@ settings.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true -workspace-hack.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } diff --git a/crates/language_selector/src/active_buffer_language.rs b/crates/language_selector/src/active_buffer_language.rs index 38d010e33bc89012b0dc1a35d1638a6a443f9075..c75c3954cc6590c2e0cb4326c073ed004eaac280 100644 --- a/crates/language_selector/src/active_buffer_language.rs +++ b/crates/language_selector/src/active_buffer_language.rs @@ -1,6 +1,7 @@ use editor::Editor; use gpui::{ - Context, Entity, IntoElement, ParentElement, Render, Subscription, WeakEntity, Window, div, + Context, Entity, IntoElement, ParentElement, Render, Styled, Subscription, WeakEntity, Window, + div, }; use language::LanguageName; use settings::Settings as _; @@ -41,7 +42,7 @@ impl ActiveBufferLanguage { impl Render for ActiveBufferLanguage { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { if !StatusBarSettings::get_global(cx).active_language_button { - return div(); + return div().hidden(); } div().when_some(self.active_language.as_ref(), |el, active_language| { @@ -61,9 +62,7 @@ impl Render for ActiveBufferLanguage { }); } })) - .tooltip(|window, cx| { - Tooltip::for_action("Select Language", &Toggle, window, cx) - }), + .tooltip(|_window, cx| Tooltip::for_action("Select Language", &Toggle, cx)), ) }) } diff --git a/crates/language_tools/Cargo.toml b/crates/language_tools/Cargo.toml index bbac900cded75e9ca680a1813734f57423ce0ee9..d251a297d4d0fd71b9c464230e2180c0e34fdfa4 100644 --- a/crates/language_tools/Cargo.toml +++ b/crates/language_tools/Cargo.toml @@ -34,7 +34,6 @@ ui.workspace = true util.workspace = true workspace.workspace = true zed_actions.workspace = true -workspace-hack.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } diff --git a/crates/language_tools/src/key_context_view.rs b/crates/language_tools/src/key_context_view.rs index 1d3cd451f19ce6bc28540f10b2a91a7a6319214a..12b49ddf290f6a49348e7bc0de1b98e238b1fea1 100644 --- a/crates/language_tools/src/key_context_view.rs +++ b/crates/language_tools/src/key_context_view.rs @@ -166,7 +166,7 @@ impl Item for KeyContextView { } impl Render for KeyContextView { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl ui::IntoElement { + fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl ui::IntoElement { use itertools::Itertools; let key_equivalents = cx.keyboard_mapper().get_key_equivalents(); @@ -211,7 +211,6 @@ impl Render for KeyContextView { .style(ButtonStyle::Filled) .key_binding(ui::KeyBinding::for_action( &zed_actions::OpenDefaultKeymap, - window, cx )) .on_click(|_, window, cx| { @@ -221,7 +220,7 @@ impl Render for KeyContextView { .child( Button::new("edit_your_keymap", "Edit Keymap File") .style(ButtonStyle::Filled) - .key_binding(ui::KeyBinding::for_action(&zed_actions::OpenKeymapFile, window, cx)) + .key_binding(ui::KeyBinding::for_action(&zed_actions::OpenKeymapFile, cx)) .on_click(|_, window, cx| { window.dispatch_action(zed_actions::OpenKeymapFile.boxed_clone(), cx); }), diff --git a/crates/language_tools/src/lsp_button.rs b/crates/language_tools/src/lsp_button.rs index 614eaa4f05a0c4cac2572b9d014a04a8de5c4554..7dc2e93a5c707eaa3829caba6d6d2a04773883b1 100644 --- a/crates/language_tools/src/lsp_button.rs +++ b/crates/language_tools/src/lsp_button.rs @@ -1011,7 +1011,7 @@ impl StatusItemView for LspButton { impl Render for LspButton { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl ui::IntoElement { if self.server_state.read(cx).language_servers.is_empty() || self.lsp_menu.is_none() { - return div(); + return div().hidden(); } let mut has_errors = false; @@ -1065,14 +1065,8 @@ impl Render for LspButton { .when_some(indicator, IconButton::indicator) .icon_size(IconSize::Small) .indicator_border_color(Some(cx.theme().colors().status_bar_background)), - move |window, cx| { - Tooltip::with_meta( - "Language Servers", - Some(&ToggleMenu), - description, - window, - cx, - ) + move |_window, cx| { + Tooltip::with_meta("Language Servers", Some(&ToggleMenu), description, cx) }, ), ) diff --git a/crates/language_tools/src/lsp_log_view.rs b/crates/language_tools/src/lsp_log_view.rs index 1c24bfdcf44c09a1729065835debd4ef5fbb2252..e834dd6aec003930d68ed745f67aff50b2c8f66b 100644 --- a/crates/language_tools/src/lsp_log_view.rs +++ b/crates/language_tools/src/lsp_log_view.rs @@ -229,8 +229,11 @@ impl LspLogView { log_view.editor.update(cx, |editor, cx| { editor.set_read_only(false); let last_offset = editor.buffer().read(cx).len(cx); - let newest_cursor_is_at_end = - editor.selections.newest::(cx).start >= last_offset; + let newest_cursor_is_at_end = editor + .selections + .newest::(&editor.display_snapshot(cx)) + .start + >= last_offset; editor.edit( vec![ (last_offset..last_offset, text.as_str()), diff --git a/crates/language_tools/src/syntax_tree_view.rs b/crates/language_tools/src/syntax_tree_view.rs index eea8c8a43b9b0dad854cc2a24878123ffeed6837..464d518c2e9c697d292d7bffda7ee7bae68dd254 100644 --- a/crates/language_tools/src/syntax_tree_view.rs +++ b/crates/language_tools/src/syntax_tree_view.rs @@ -252,7 +252,10 @@ impl SyntaxTreeView { .editor .update(cx, |editor, cx| editor.snapshot(window, cx)); let (buffer, range, excerpt_id) = editor_state.editor.update(cx, |editor, cx| { - let selection_range = editor.selections.last::(cx).range(); + let selection_range = editor + .selections + .last::(&editor.display_snapshot(cx)) + .range(); let multi_buffer = editor.buffer().read(cx); let (buffer, range, excerpt_id) = snapshot .buffer_snapshot() @@ -356,12 +359,7 @@ impl SyntaxTreeView { let multibuffer = editor_state.editor.read(cx).buffer(); let multibuffer = multibuffer.read(cx).snapshot(cx); let excerpt_id = buffer_state.excerpt_id; - let range = multibuffer - .anchor_in_excerpt(excerpt_id, range.start) - .unwrap() - ..multibuffer - .anchor_in_excerpt(excerpt_id, range.end) - .unwrap(); + let range = multibuffer.anchor_range_in_excerpt(excerpt_id, range)?; // Update the editor with the anchor range. editor_state.editor.update(cx, |editor, cx| { diff --git a/crates/languages/Cargo.toml b/crates/languages/Cargo.toml index 650a785b4686b8afcd5cfe351d7b31ce76e87970..2f123bb70fc3977784f5137a68fb63c09fb302c7 100644 --- a/crates/languages/Cargo.toml +++ b/crates/languages/Cargo.toml @@ -90,7 +90,6 @@ tree-sitter-rust = { workspace = true, optional = true } tree-sitter-typescript = { workspace = true, optional = true } tree-sitter-yaml = { workspace = true, optional = true } util.workspace = true -workspace-hack.workspace = true [dev-dependencies] pretty_assertions.workspace = true diff --git a/crates/languages/src/bash/injections.scm b/crates/languages/src/bash/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..9117c713b98fdd2896b13e4949a77c6489b9ee36 --- /dev/null +++ b/crates/languages/src/bash/injections.scm @@ -0,0 +1,3 @@ +((comment) @injection.content + (#set! injection.language "comment") +) diff --git a/crates/languages/src/c.rs b/crates/languages/src/c.rs index f30120a635655af6c11889d3af110e6c2dca81fc..3463f4505044c83c9ba8a0e602cf5bfa82e93e3f 100644 --- a/crates/languages/src/c.rs +++ b/crates/languages/src/c.rs @@ -188,11 +188,7 @@ impl super::LspAdapter for CLspAdapter { .map(|start| start..start + filter_text.len()) }) .unwrap_or(detail.len() + 1..text.len()); - return Some(CodeLabel { - filter_range, - text, - runs, - }); + return Some(CodeLabel::new(text, filter_range, runs)); } Some(lsp::CompletionItemKind::CONSTANT | lsp::CompletionItemKind::VARIABLE) if completion.detail.is_some() => @@ -208,11 +204,7 @@ impl super::LspAdapter for CLspAdapter { .map(|start| start..start + filter_text.len()) }) .unwrap_or(detail.len() + 1..text.len()); - return Some(CodeLabel { - filter_range, - text, - runs, - }); + return Some(CodeLabel::new(text, filter_range, runs)); } Some(lsp::CompletionItemKind::FUNCTION | lsp::CompletionItemKind::METHOD) if completion.detail.is_some() => @@ -236,11 +228,7 @@ impl super::LspAdapter for CLspAdapter { filter_start..filter_end }); - return Some(CodeLabel { - filter_range, - text, - runs, - }); + return Some(CodeLabel::new(text, filter_range, runs)); } Some(kind) => { let highlight_name = match kind { @@ -324,11 +312,11 @@ impl super::LspAdapter for CLspAdapter { _ => return None, }; - Some(CodeLabel { - runs: language.highlight_text(&text.as_str().into(), display_range.clone()), - text: text[display_range].to_string(), + Some(CodeLabel::new( + text[display_range.clone()].to_string(), filter_range, - }) + language.highlight_text(&text.as_str().into(), display_range), + )) } fn prepare_initialize_params( diff --git a/crates/languages/src/c/highlights.scm b/crates/languages/src/c/highlights.scm index b80c462ae6d32974d27d8a532bf6edd15ba86a82..40e0d7147e98287f5ed7587d690e25bc8bacaa0b 100644 --- a/crates/languages/src/c/highlights.scm +++ b/crates/languages/src/c/highlights.scm @@ -1,27 +1,30 @@ +[ + "const" + "enum" + "extern" + "inline" + "sizeof" + "static" + "struct" + "typedef" + "union" + "volatile" +] @keyword + [ "break" "case" - "const" "continue" "default" "do" "else" - "enum" - "extern" "for" "goto" "if" - "inline" "return" - "sizeof" - "static" - "struct" "switch" - "typedef" - "union" - "volatile" "while" -] @keyword +] @keyword.control [ "#define" diff --git a/crates/languages/src/c/injections.scm b/crates/languages/src/c/injections.scm index 73d2628225f05db94d53381fc9a9e10c29b6189d..447897340cc735ed77099b20fd6fc8c52ac19ec8 100644 --- a/crates/languages/src/c/injections.scm +++ b/crates/languages/src/c/injections.scm @@ -1,3 +1,7 @@ +((comment) @injection.content + (#set! injection.language "comment") +) + (preproc_def value: (preproc_arg) @injection.content (#set! injection.language "c")) diff --git a/crates/languages/src/cpp/highlights.scm b/crates/languages/src/cpp/highlights.scm index bd988445bb155e8851ffa8bc3771bdd235fc7dff..af906e67122333b6e1834f1280d4458189daf105 100644 --- a/crates/languages/src/cpp/highlights.scm +++ b/crates/languages/src/cpp/highlights.scm @@ -106,32 +106,19 @@ type: (primitive_type) @type.builtin [ "alignas" "alignof" - "break" - "case" - "catch" "class" - "co_await" - "co_return" - "co_yield" "concept" "consteval" "constexpr" "constinit" - "continue" "decltype" - "default" "delete" - "do" - "else" "enum" "explicit" "export" "extern" "final" - "for" "friend" - "goto" - "if" "import" "inline" "module" @@ -144,24 +131,40 @@ type: (primitive_type) @type.builtin "protected" "public" "requires" - "return" "sizeof" "struct" - "switch" "template" "thread_local" - "throw" - "try" "typedef" "typename" "union" "using" "virtual" - "while" (storage_class_specifier) (type_qualifier) ] @keyword +[ + "break" + "case" + "catch" + "co_await" + "co_return" + "co_yield" + "continue" + "default" + "do" + "else" + "for" + "goto" + "if" + "return" + "switch" + "throw" + "try" + "while" +] @keyword.control + [ "#define" "#elif" diff --git a/crates/languages/src/cpp/injections.scm b/crates/languages/src/cpp/injections.scm index e903e1affd53a5c641a30736599ebedb2f53169f..160770f3cc1d69f5cb3d1679c8a48726d8d437ed 100644 --- a/crates/languages/src/cpp/injections.scm +++ b/crates/languages/src/cpp/injections.scm @@ -1,3 +1,7 @@ +((comment) @injection.content + (#set! injection.language "comment") +) + (preproc_def value: (preproc_arg) @injection.content (#set! injection.language "c++")) diff --git a/crates/languages/src/gitcommit/injections.scm b/crates/languages/src/gitcommit/injections.scm index db0af176578cfe1ba50db0cc7543d9b805ed8163..8fb9b459679489be7588d1ab9b6d53e40ea10c60 100644 --- a/crates/languages/src/gitcommit/injections.scm +++ b/crates/languages/src/gitcommit/injections.scm @@ -1,3 +1,7 @@ +((comment) @content + (#set! injection.language "comment") +) + ((scissors) @content (#set! "language" "diff")) diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index 13a4cec85ff8554cd14cb835a4320662f79a41d4..6c75abf123af62b3f4ab43a6e94d3b040e2f010a 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -222,7 +222,7 @@ impl LspAdapter for GoLspAdapter { Some((lsp::CompletionItemKind::MODULE, detail)) => { let text = format!("{label} {detail}"); let source = Rope::from(format!("import {text}").as_str()); - let runs = language.highlight_text(&source, 7..7 + text.len()); + let runs = language.highlight_text(&source, 7..7 + text[name_offset..].len()); let filter_range = completion .filter_text .as_deref() @@ -231,11 +231,7 @@ impl LspAdapter for GoLspAdapter { .map(|start| start..start + filter_text.len()) }) .unwrap_or(0..label.len()); - return Some(CodeLabel { - text, - runs, - filter_range, - }); + return Some(CodeLabel::new(text, filter_range, runs)); } Some(( lsp::CompletionItemKind::CONSTANT | lsp::CompletionItemKind::VARIABLE, @@ -246,7 +242,7 @@ impl LspAdapter for GoLspAdapter { Rope::from(format!("var {} {}", &text[name_offset..], detail).as_str()); let runs = adjust_runs( name_offset, - language.highlight_text(&source, 4..4 + text.len()), + language.highlight_text(&source, 4..4 + text[name_offset..].len()), ); let filter_range = completion .filter_text @@ -256,18 +252,14 @@ impl LspAdapter for GoLspAdapter { .map(|start| start..start + filter_text.len()) }) .unwrap_or(0..label.len()); - return Some(CodeLabel { - text, - runs, - filter_range, - }); + return Some(CodeLabel::new(text, filter_range, runs)); } Some((lsp::CompletionItemKind::STRUCT, _)) => { let text = format!("{label} struct {{}}"); let source = Rope::from(format!("type {}", &text[name_offset..]).as_str()); let runs = adjust_runs( name_offset, - language.highlight_text(&source, 5..5 + text.len()), + language.highlight_text(&source, 5..5 + text[name_offset..].len()), ); let filter_range = completion .filter_text @@ -277,18 +269,14 @@ impl LspAdapter for GoLspAdapter { .map(|start| start..start + filter_text.len()) }) .unwrap_or(0..label.len()); - return Some(CodeLabel { - text, - runs, - filter_range, - }); + return Some(CodeLabel::new(text, filter_range, runs)); } Some((lsp::CompletionItemKind::INTERFACE, _)) => { let text = format!("{label} interface {{}}"); let source = Rope::from(format!("type {}", &text[name_offset..]).as_str()); let runs = adjust_runs( name_offset, - language.highlight_text(&source, 5..5 + text.len()), + language.highlight_text(&source, 5..5 + text[name_offset..].len()), ); let filter_range = completion .filter_text @@ -298,11 +286,7 @@ impl LspAdapter for GoLspAdapter { .map(|start| start..start + filter_text.len()) }) .unwrap_or(0..label.len()); - return Some(CodeLabel { - text, - runs, - filter_range, - }); + return Some(CodeLabel::new(text, filter_range, runs)); } Some((lsp::CompletionItemKind::FIELD, detail)) => { let text = format!("{label} {detail}"); @@ -310,7 +294,7 @@ impl LspAdapter for GoLspAdapter { Rope::from(format!("type T struct {{ {} }}", &text[name_offset..]).as_str()); let runs = adjust_runs( name_offset, - language.highlight_text(&source, 16..16 + text.len()), + language.highlight_text(&source, 16..16 + text[name_offset..].len()), ); let filter_range = completion .filter_text @@ -320,11 +304,7 @@ impl LspAdapter for GoLspAdapter { .map(|start| start..start + filter_text.len()) }) .unwrap_or(0..label.len()); - return Some(CodeLabel { - text, - runs, - filter_range, - }); + return Some(CodeLabel::new(text, filter_range, runs)); } Some((lsp::CompletionItemKind::FUNCTION | lsp::CompletionItemKind::METHOD, detail)) => { if let Some(signature) = detail.strip_prefix("func") { @@ -332,7 +312,7 @@ impl LspAdapter for GoLspAdapter { let source = Rope::from(format!("func {} {{}}", &text[name_offset..]).as_str()); let runs = adjust_runs( name_offset, - language.highlight_text(&source, 5..5 + text.len()), + language.highlight_text(&source, 5..5 + text[name_offset..].len()), ); let filter_range = completion .filter_text @@ -342,11 +322,7 @@ impl LspAdapter for GoLspAdapter { .map(|start| start..start + filter_text.len()) }) .unwrap_or(0..label.len()); - return Some(CodeLabel { - filter_range, - text, - runs, - }); + return Some(CodeLabel::new(text, filter_range, runs)); } } _ => {} @@ -406,11 +382,11 @@ impl LspAdapter for GoLspAdapter { _ => return None, }; - Some(CodeLabel { - runs: language.highlight_text(&text.as_str().into(), display_range.clone()), - text: text[display_range].to_string(), + Some(CodeLabel::new( + text[display_range.clone()].to_string(), filter_range, - }) + language.highlight_text(&text.as_str().into(), display_range), + )) } fn diagnostic_message_to_markdown(&self, message: &str) -> Option { @@ -810,15 +786,15 @@ mod tests { &language ) .await, - Some(CodeLabel { - text: "Hello(a B) c.D".to_string(), - filter_range: 0..5, - runs: vec![ + Some(CodeLabel::new( + "Hello(a B) c.D".to_string(), + 0..5, + vec![ (0..5, highlight_function), (8..9, highlight_type), (13..14, highlight_type), - ], - }) + ] + )) ); // Nested methods @@ -834,15 +810,15 @@ mod tests { &language ) .await, - Some(CodeLabel { - text: "one.two.Three() [3]interface{}".to_string(), - filter_range: 0..13, - runs: vec![ + Some(CodeLabel::new( + "one.two.Three() [3]interface{}".to_string(), + 0..13, + vec![ (8..13, highlight_function), (17..18, highlight_number), (19..28, highlight_keyword), ], - }) + )) ); // Nested fields @@ -858,11 +834,11 @@ mod tests { &language ) .await, - Some(CodeLabel { - text: "two.Three a.Bcd".to_string(), - filter_range: 0..9, - runs: vec![(4..9, highlight_field), (12..15, highlight_type)], - }) + Some(CodeLabel::new( + "two.Three a.Bcd".to_string(), + 0..9, + vec![(4..9, highlight_field), (12..15, highlight_type)], + )) ); } diff --git a/crates/languages/src/go/injections.scm b/crates/languages/src/go/injections.scm index 7bb68d760e1a556ef93a9477dc578c88d9350dcb..52edce417798bcc8cd9cbc38ba3443ff3fc561c6 100644 --- a/crates/languages/src/go/injections.scm +++ b/crates/languages/src/go/injections.scm @@ -1,4 +1,8 @@ ; Refer to https://github.com/nvim-treesitter/nvim-treesitter/blob/master/queries/go/injections.scm#L4C1-L16C41 +((comment) @injection.content + (#set! injection.language "comment") +) + (call_expression (selector_expression) @_function (#any-of? @_function diff --git a/crates/languages/src/gomod/injections.scm b/crates/languages/src/gomod/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..321c90add3710f35721daeb6b42abe38af094953 --- /dev/null +++ b/crates/languages/src/gomod/injections.scm @@ -0,0 +1,2 @@ +((comment) @injection.content + (#set! injection.language "comment")) diff --git a/crates/languages/src/gowork/injections.scm b/crates/languages/src/gowork/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..321c90add3710f35721daeb6b42abe38af094953 --- /dev/null +++ b/crates/languages/src/gowork/injections.scm @@ -0,0 +1,2 @@ +((comment) @injection.content + (#set! injection.language "comment")) diff --git a/crates/languages/src/javascript/highlights.scm b/crates/languages/src/javascript/highlights.scm index ebeac7efffb8770616fbc94ee4bbf3c25275a198..e5b84ab68df2b32061691f469046569a6597750e 100644 --- a/crates/languages/src/javascript/highlights.scm +++ b/crates/languages/src/javascript/highlights.scm @@ -171,47 +171,52 @@ "as" "async" "await" - "break" - "case" - "catch" "class" "const" - "continue" "debugger" "default" "delete" - "do" - "else" "export" "extends" - "finally" - "for" "from" "function" "get" - "if" "import" "in" "instanceof" "let" "new" "of" - "return" "set" "static" - "switch" "target" - "throw" - "try" "typeof" "using" "var" "void" - "while" "with" - "yield" ] @keyword +[ + "break" + "case" + "catch" + "continue" + "do" + "else" + "finally" + "for" + "if" + "return" + "switch" + "throw" + "try" + "while" + "yield" +] @keyword.control + +(switch_default "default" @keyword.control) + (template_substitution "${" @punctuation.special "}" @punctuation.special) @embedded diff --git a/crates/languages/src/javascript/injections.scm b/crates/languages/src/javascript/injections.scm index 987be660d3c5ebd706284990d7d21a481b24a2af..f79cd788d78964f61f611023d0645c95c88aaf17 100644 --- a/crates/languages/src/javascript/injections.scm +++ b/crates/languages/src/javascript/injections.scm @@ -1,3 +1,7 @@ +((comment) @injection.content + (#set! injection.language "comment") +) + (((comment) @_jsdoc_comment (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content (#set! injection.language "jsdoc")) diff --git a/crates/languages/src/javascript/outline.scm b/crates/languages/src/javascript/outline.scm index ca16c27a27be3e1e09ced16cd2eef7aa28345f9e..5f72103bc63bdfab73f7b858c01abe8d34317b22 100644 --- a/crates/languages/src/javascript/outline.scm +++ b/crates/languages/src/javascript/outline.scm @@ -31,38 +31,103 @@ (export_statement (lexical_declaration ["let" "const"] @context - ; Multiple names may be exported - @item is on the declarator to keep - ; ranges distinct. (variable_declarator - name: (_) @name) @item))) + name: (identifier) @name) @item))) + +; Exported array destructuring +(program + (export_statement + (lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern left: (identifier) @name @item) + (rest_pattern (identifier) @name @item) + ]))))) + +; Exported object destructuring +(program + (export_statement + (lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (object_pattern + [(shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern left: (identifier) @name @item)) + (rest_pattern (identifier) @name @item)]))))) (program (lexical_declaration ["let" "const"] @context - ; Multiple names may be defined - @item is on the declarator to keep - ; ranges distinct. (variable_declarator - name: (_) @name) @item)) + name: (identifier) @name) @item)) + +; Top-level array destructuring +(program + (lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern left: (identifier) @name @item) + (rest_pattern (identifier) @name @item) + ])))) + +; Top-level object destructuring +(program + (lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (object_pattern + [(shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern left: (identifier) @name @item)) + (rest_pattern (identifier) @name @item)])))) (class_declaration "class" @context name: (_) @name) @item -(method_definition - [ - "get" - "set" - "async" - "*" - "readonly" - "static" - (override_modifier) - (accessibility_modifier) - ]* @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item +; Method definitions in classes (not in object literals) +(class_body + (method_definition + [ + "get" + "set" + "async" + "*" + "readonly" + "static" + (override_modifier) + (accessibility_modifier) + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item) + +; Object literal methods +(variable_declarator + value: (object + (method_definition + [ + "get" + "set" + "async" + "*" + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item)) (public_field_definition [ @@ -116,4 +181,43 @@ ) ) @item +; Object properties +(pair + key: [ + (property_identifier) @name + (string (string_fragment) @name) + (number) @name + (computed_property_name) @name + ]) @item + +; Nested variables in function bodies +(statement_block + (lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (identifier) @name) @item)) + +; Nested array destructuring in functions +(statement_block + (lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern left: (identifier) @name @item) + (rest_pattern (identifier) @name @item) + ])))) + +; Nested object destructuring in functions +(statement_block + (lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (object_pattern + [(shorthand_property_identifier_pattern) @name @item + (pair_pattern value: (identifier) @name @item) + (pair_pattern value: (assignment_pattern left: (identifier) @name @item)) + (rest_pattern (identifier) @name @item)])))) + (comment) @annotation diff --git a/crates/languages/src/package_json.rs b/crates/languages/src/package_json.rs index 8c1cb9f068d34873a4cd27c1b2f21deb236c789d..80e9e3cc0d5789b79592fdb490089b8d2f7879eb 100644 --- a/crates/languages/src/package_json.rs +++ b/crates/languages/src/package_json.rs @@ -15,6 +15,8 @@ pub struct PackageJsonData { pub mocha_package_path: Option>, pub vitest_package_path: Option>, pub jasmine_package_path: Option>, + pub bun_package_path: Option>, + pub node_package_path: Option>, pub scripts: BTreeSet<(Arc, String)>, pub package_manager: Option<&'static str>, } @@ -35,6 +37,8 @@ impl PackageJsonData { let mut mocha_package_path = None; let mut vitest_package_path = None; let mut jasmine_package_path = None; + let mut bun_package_path = None; + let mut node_package_path = None; if let Some(Value::Object(dependencies)) = package_json.get("devDependencies") { if dependencies.contains_key("jest") { jest_package_path.get_or_insert_with(|| path.clone()); @@ -48,6 +52,12 @@ impl PackageJsonData { if dependencies.contains_key("jasmine") { jasmine_package_path.get_or_insert_with(|| path.clone()); } + if dependencies.contains_key("@types/bun") { + bun_package_path.get_or_insert_with(|| path.clone()); + } + if dependencies.contains_key("@types/node") { + node_package_path.get_or_insert_with(|| path.clone()); + } } if let Some(Value::Object(dev_dependencies)) = package_json.get("dependencies") { if dev_dependencies.contains_key("jest") { @@ -62,6 +72,12 @@ impl PackageJsonData { if dev_dependencies.contains_key("jasmine") { jasmine_package_path.get_or_insert_with(|| path.clone()); } + if dev_dependencies.contains_key("@types/bun") { + bun_package_path.get_or_insert_with(|| path.clone()); + } + if dev_dependencies.contains_key("@types/node") { + node_package_path.get_or_insert_with(|| path.clone()); + } } let package_manager = package_json @@ -74,6 +90,8 @@ impl PackageJsonData { Some("yarn") } else if value.starts_with("npm") { Some("npm") + } else if value.starts_with("bun") { + Some("bun") } else { None } @@ -84,6 +102,8 @@ impl PackageJsonData { mocha_package_path, vitest_package_path, jasmine_package_path, + bun_package_path, + node_package_path, scripts, package_manager, } @@ -100,6 +120,8 @@ impl PackageJsonData { .jasmine_package_path .take() .or(other.jasmine_package_path); + self.bun_package_path = self.bun_package_path.take().or(other.bun_package_path); + self.node_package_path = self.node_package_path.take().or(other.node_package_path); self.scripts.extend(other.scripts); self.package_manager = self.package_manager.or(other.package_manager); } diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 2ac4a5b9f543576944e1ce30b52593afaef8d34a..c255ed3f09f733321c1066520b12355f76941931 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -19,10 +19,12 @@ use pet_core::python_environment::{PythonEnvironment, PythonEnvironmentKind}; use pet_virtualenv::is_virtualenv_dir; use project::Fs; use project::lsp_store::language_server_settings; +use serde::{Deserialize, Serialize}; use serde_json::{Value, json}; use smol::lock::OnceCell; use std::cmp::Ordering; use std::env::consts; +use util::command::new_smol_command; use util::fs::{make_file_executable, remove_matching}; use util::rel_path::RelPath; @@ -38,6 +40,14 @@ use std::{ use task::{ShellKind, TaskTemplate, TaskTemplates, VariableName}; use util::{ResultExt, maybe}; +#[derive(Debug, Serialize, Deserialize)] +pub(crate) struct PythonToolchainData { + #[serde(flatten)] + environment: PythonEnvironment, + #[serde(skip_serializing_if = "Option::is_none")] + activation_scripts: Option>, +} + pub(crate) struct PyprojectTomlManifestProvider; impl ManifestProvider for PyprojectTomlManifestProvider { @@ -164,11 +174,12 @@ impl LspAdapter for TyLspAdapter { })? .unwrap_or_else(|| json!({})); if let Some(toolchain) = toolchain.and_then(|toolchain| { - serde_json::from_value::(toolchain.as_json).ok() + serde_json::from_value::(toolchain.as_json).ok() }) { _ = maybe!({ - let uri = url::Url::from_file_path(toolchain.executable?).ok()?; - let sys_prefix = toolchain.prefix.clone()?; + let uri = + url::Url::from_file_path(toolchain.environment.executable.as_ref()?).ok()?; + let sys_prefix = toolchain.environment.prefix.clone()?; let environment = json!({ "executable": { "uri": uri, @@ -406,11 +417,6 @@ impl LspAdapter for PyrightLspAdapter { return None; } }; - let filter_range = item - .filter_text - .as_deref() - .and_then(|filter| label.find(filter).map(|ix| ix..ix + filter.len())) - .unwrap_or(0..label.len()); let mut text = label.clone(); if let Some(completion_details) = item .label_details @@ -419,14 +425,14 @@ impl LspAdapter for PyrightLspAdapter { { write!(&mut text, " {}", completion_details).ok(); } - Some(language::CodeLabel { - runs: highlight_id + Some(language::CodeLabel::filtered( + text, + item.filter_text.as_deref(), + highlight_id .map(|id| (0..label.len(), id)) .into_iter() .collect(), - text, - filter_range, - }) + )) } async fn label_for_symbol( @@ -457,11 +463,11 @@ impl LspAdapter for PyrightLspAdapter { _ => return None, }; - Some(language::CodeLabel { - runs: language.highlight_text(&text.as_str().into(), display_range.clone()), - text: text[display_range].to_string(), + Some(language::CodeLabel::new( + text[display_range.clone()].to_string(), filter_range, - }) + language.highlight_text(&text.as_str().into(), display_range), + )) } async fn workspace_configuration( @@ -478,9 +484,8 @@ impl LspAdapter for PyrightLspAdapter { // If we have a detected toolchain, configure Pyright to use it if let Some(toolchain) = toolchain - && let Ok(env) = serde_json::from_value::< - pet_core::python_environment::PythonEnvironment, - >(toolchain.as_json.clone()) + && let Ok(env) = + serde_json::from_value::(toolchain.as_json.clone()) { if !user_settings.is_object() { user_settings = Value::Object(serde_json::Map::default()); @@ -488,7 +493,7 @@ impl LspAdapter for PyrightLspAdapter { let object = user_settings.as_object_mut().unwrap(); let interpreter_path = toolchain.path.to_string(); - if let Some(venv_dir) = env.prefix { + if let Some(venv_dir) = &env.environment.prefix { // Set venvPath and venv at the root level // This matches the format of a pyrightconfig.json file if let Some(parent) = venv_dir.parent() { @@ -1027,6 +1032,7 @@ impl ToolchainLister for PythonToolchainProvider { worktree_root: PathBuf, subroot_relative_path: Arc, project_env: Option>, + fs: &dyn Fs, ) -> ToolchainList { let env = project_env.unwrap_or_default(); let environment = EnvironmentApi::from_env(&env); @@ -1118,13 +1124,16 @@ impl ToolchainLister for PythonToolchainProvider { .then_with(exe_ordering) }); - let mut toolchains: Vec<_> = toolchains - .into_iter() - .filter_map(venv_to_toolchain) - .collect(); - toolchains.dedup(); + let mut out_toolchains = Vec::new(); + for toolchain in toolchains { + let Some(toolchain) = venv_to_toolchain(toolchain, fs).await else { + continue; + }; + out_toolchains.push(toolchain); + } + out_toolchains.dedup(); ToolchainList { - toolchains, + toolchains: out_toolchains, default: None, groups: Default::default(), } @@ -1143,6 +1152,7 @@ impl ToolchainLister for PythonToolchainProvider { &self, path: PathBuf, env: Option>, + fs: &dyn Fs, ) -> anyhow::Result { let env = env.unwrap_or_default(); let environment = EnvironmentApi::from_env(&env); @@ -1154,58 +1164,48 @@ impl ToolchainLister for PythonToolchainProvider { let toolchain = pet::resolve::resolve_environment(&path, &locators, &environment) .context("Could not find a virtual environment in provided path")?; let venv = toolchain.resolved.unwrap_or(toolchain.discovered); - venv_to_toolchain(venv).context("Could not convert a venv into a toolchain") + venv_to_toolchain(venv, fs) + .await + .context("Could not convert a venv into a toolchain") } - async fn activation_script( - &self, - toolchain: &Toolchain, - shell: ShellKind, - fs: &dyn Fs, - ) -> Vec { - let Ok(toolchain) = serde_json::from_value::( - toolchain.as_json.clone(), - ) else { + fn activation_script(&self, toolchain: &Toolchain, shell: ShellKind) -> Vec { + let Ok(toolchain) = + serde_json::from_value::(toolchain.as_json.clone()) + else { return vec![]; }; + + log::debug!("(Python) Composing activation script for toolchain {toolchain:?}"); + let mut activation_script = vec![]; - match toolchain.kind { + match toolchain.environment.kind { Some(PythonEnvironmentKind::Conda) => { - if let Some(name) = &toolchain.name { + if let Some(name) = &toolchain.environment.name { activation_script.push(format!("conda activate {name}")); } else { activation_script.push("conda activate".to_string()); } } Some(PythonEnvironmentKind::Venv | PythonEnvironmentKind::VirtualEnv) => { - if let Some(prefix) = &toolchain.prefix { - let activate_keyword = shell.activate_keyword(); - let activate_script_name = match shell { - ShellKind::Posix | ShellKind::Rc => "activate", - ShellKind::Csh => "activate.csh", - ShellKind::Tcsh => "activate.csh", - ShellKind::Fish => "activate.fish", - ShellKind::Nushell => "activate.nu", - ShellKind::PowerShell => "activate.ps1", - ShellKind::Cmd => "activate.bat", - ShellKind::Xonsh => "activate.xsh", - }; - let path = prefix.join(BINARY_DIR).join(activate_script_name); - - if let Some(quoted) = shell.try_quote(&path.to_string_lossy()) - && fs.is_file(&path).await - { - activation_script.push(format!("{activate_keyword} {quoted}")); + if let Some(activation_scripts) = &toolchain.activation_scripts { + if let Some(activate_script_path) = activation_scripts.get(&shell) { + let activate_keyword = shell.activate_keyword(); + if let Some(quoted) = + shell.try_quote(&activate_script_path.to_string_lossy()) + { + activation_script.push(format!("{activate_keyword} {quoted}")); + } } } } Some(PythonEnvironmentKind::Pyenv) => { - let Some(manager) = toolchain.manager else { + let Some(manager) = &toolchain.environment.manager else { return vec![]; }; - let version = toolchain.version.as_deref().unwrap_or("system"); - let pyenv = manager.executable; + let version = toolchain.environment.version.as_deref().unwrap_or("system"); + let pyenv = &manager.executable; let pyenv = pyenv.display(); activation_script.extend(match shell { ShellKind::Fish => Some(format!("\"{pyenv}\" shell - fish {version}")), @@ -1225,7 +1225,7 @@ impl ToolchainLister for PythonToolchainProvider { } } -fn venv_to_toolchain(venv: PythonEnvironment) -> Option { +async fn venv_to_toolchain(venv: PythonEnvironment, fs: &dyn Fs) -> Option { let mut name = String::from("Python"); if let Some(ref version) = venv.version { _ = write!(name, " {version}"); @@ -1242,14 +1242,61 @@ fn venv_to_toolchain(venv: PythonEnvironment) -> Option { _ = write!(name, " {nk}"); } + let mut activation_scripts = HashMap::default(); + match venv.kind { + Some(PythonEnvironmentKind::Venv | PythonEnvironmentKind::VirtualEnv) => { + resolve_venv_activation_scripts(&venv, fs, &mut activation_scripts).await + } + _ => {} + } + let data = PythonToolchainData { + environment: venv, + activation_scripts: Some(activation_scripts), + }; + Some(Toolchain { name: name.into(), - path: venv.executable.as_ref()?.to_str()?.to_owned().into(), + path: data + .environment + .executable + .as_ref()? + .to_str()? + .to_owned() + .into(), language_name: LanguageName::new("Python"), - as_json: serde_json::to_value(venv).ok()?, + as_json: serde_json::to_value(data).ok()?, }) } +async fn resolve_venv_activation_scripts( + venv: &PythonEnvironment, + fs: &dyn Fs, + activation_scripts: &mut HashMap, +) { + log::debug!("(Python) Resolving activation scripts for venv toolchain {venv:?}"); + if let Some(prefix) = &venv.prefix { + for (shell_kind, script_name) in &[ + (ShellKind::Posix, "activate"), + (ShellKind::Rc, "activate"), + (ShellKind::Csh, "activate.csh"), + (ShellKind::Tcsh, "activate.csh"), + (ShellKind::Fish, "activate.fish"), + (ShellKind::Nushell, "activate.nu"), + (ShellKind::PowerShell, "activate.ps1"), + (ShellKind::Cmd, "activate.bat"), + (ShellKind::Xonsh, "activate.xsh"), + ] { + let path = prefix.join(BINARY_DIR).join(script_name); + + log::debug!("Trying path: {}", path.display()); + + if fs.is_file(&path).await { + activation_scripts.insert(*shell_kind, path); + } + } + } +} + pub struct EnvironmentApi<'a> { global_search_locations: Arc>>, project_env: &'a HashMap, @@ -1297,9 +1344,13 @@ impl pet_core::os_environment::Environment for EnvironmentApi<'_> { fn get_know_global_search_locations(&self) -> Vec { if self.global_search_locations.lock().is_empty() { - let mut paths = - std::env::split_paths(&self.get_env_var("PATH".to_string()).unwrap_or_default()) - .collect::>(); + let mut paths = std::env::split_paths( + &self + .get_env_var("PATH".to_string()) + .or_else(|| self.get_env_var("Path".to_string())) + .unwrap_or_default(), + ) + .collect::>(); log::trace!("Env PATH: {:?}", paths); for p in self.pet_env.get_know_global_search_locations() { @@ -1332,7 +1383,13 @@ impl PyLspAdapter { async fn ensure_venv(delegate: &dyn LspAdapterDelegate) -> Result> { let python_path = Self::find_base_python(delegate) .await - .context("Could not find Python installation for PyLSP")?; + .with_context(|| { + let mut message = "Could not find Python installation for PyLSP".to_owned(); + if cfg!(windows){ + message.push_str(". Install Python from the Microsoft Store, or manually from https://www.python.org/downloads/windows.") + } + message + })?; let work_dir = delegate .language_server_download_dir(&Self::SERVER_NAME) .await @@ -1355,9 +1412,24 @@ impl PyLspAdapter { // Find "baseline", user python version from which we'll create our own venv. async fn find_base_python(delegate: &dyn LspAdapterDelegate) -> Option { for path in ["python3", "python"] { - if let Some(path) = delegate.which(path.as_ref()).await { - return Some(path); + let Some(path) = delegate.which(path.as_ref()).await else { + continue; + }; + // Try to detect situations where `python3` exists but is not a real Python interpreter. + // Notably, on fresh Windows installs, `python3` is a shim that opens the Microsoft Store app + // when run with no arguments, and just fails otherwise. + let Some(output) = new_smol_command(&path) + .args(["-c", "print(1 + 2)"]) + .output() + .await + .ok() + else { + continue; + }; + if output.stdout.trim_ascii() != b"3" { + continue; } + return Some(path); } None } @@ -1402,16 +1474,11 @@ impl LspAdapter for PyLspAdapter { lsp::CompletionItemKind::CONSTANT => grammar.highlight_id_for_name("constant")?, _ => return None, }; - let filter_range = item - .filter_text - .as_deref() - .and_then(|filter| label.find(filter).map(|ix| ix..ix + filter.len())) - .unwrap_or(0..label.len()); - Some(language::CodeLabel { - text: label.clone(), - runs: vec![(0..label.len(), highlight_id)], - filter_range, - }) + Some(language::CodeLabel::filtered( + label.clone(), + item.filter_text.as_deref(), + vec![(0..label.len(), highlight_id)], + )) } async fn label_for_symbol( @@ -1441,12 +1508,11 @@ impl LspAdapter for PyLspAdapter { } _ => return None, }; - - Some(language::CodeLabel { - runs: language.highlight_text(&text.as_str().into(), display_range.clone()), - text: text[display_range].to_string(), + Some(language::CodeLabel::new( + text[display_range.clone()].to_string(), filter_range, - }) + language.highlight_text(&text.as_str().into(), display_range), + )) } async fn workspace_configuration( @@ -1686,11 +1752,6 @@ impl LspAdapter for BasedPyrightLspAdapter { return None; } }; - let filter_range = item - .filter_text - .as_deref() - .and_then(|filter| label.find(filter).map(|ix| ix..ix + filter.len())) - .unwrap_or(0..label.len()); let mut text = label.clone(); if let Some(completion_details) = item .label_details @@ -1699,14 +1760,14 @@ impl LspAdapter for BasedPyrightLspAdapter { { write!(&mut text, " {}", completion_details).ok(); } - Some(language::CodeLabel { - runs: highlight_id + Some(language::CodeLabel::filtered( + text, + item.filter_text.as_deref(), + highlight_id .map(|id| (0..label.len(), id)) .into_iter() .collect(), - text, - filter_range, - }) + )) } async fn label_for_symbol( @@ -1736,12 +1797,11 @@ impl LspAdapter for BasedPyrightLspAdapter { } _ => return None, }; - - Some(language::CodeLabel { - runs: language.highlight_text(&text.as_str().into(), display_range.clone()), - text: text[display_range].to_string(), + Some(language::CodeLabel::new( + text[display_range.clone()].to_string(), filter_range, - }) + language.highlight_text(&text.as_str().into(), display_range), + )) } async fn workspace_configuration( diff --git a/crates/languages/src/python/injections.scm b/crates/languages/src/python/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..9117c713b98fdd2896b13e4949a77c6489b9ee36 --- /dev/null +++ b/crates/languages/src/python/injections.scm @@ -0,0 +1,3 @@ +((comment) @injection.content + (#set! injection.language "comment") +) diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index b315e945a73d1792dcf2b6aeebfe29c0db6cdf7f..4b56a617735ab1a5932a56a4f6e51397721d8a86 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -40,7 +40,7 @@ impl RustLspAdapter { #[cfg(target_os = "linux")] impl RustLspAdapter { const GITHUB_ASSET_KIND: AssetKind = AssetKind::Gz; - const ARCH_SERVER_NAME: &str = "unknown-linux-gnu"; + const ARCH_SERVER_NAME: &str = "unknown-linux"; } #[cfg(target_os = "freebsd")] @@ -57,19 +57,89 @@ impl RustLspAdapter { const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("rust-analyzer"); +#[cfg(target_os = "linux")] +enum LibcType { + Gnu, + Musl, +} + impl RustLspAdapter { - fn build_asset_name() -> String { + #[cfg(target_os = "linux")] + async fn determine_libc_type() -> LibcType { + use futures::pin_mut; + use smol::process::Command; + + async fn from_ldd_version() -> Option { + let ldd_output = Command::new("ldd").arg("--version").output().await.ok()?; + let ldd_version = String::from_utf8_lossy(&ldd_output.stdout); + + if ldd_version.contains("GNU libc") || ldd_version.contains("GLIBC") { + Some(LibcType::Gnu) + } else if ldd_version.contains("musl") { + Some(LibcType::Musl) + } else { + None + } + } + + if let Some(libc_type) = from_ldd_version().await { + return libc_type; + } + + let Ok(dir_entries) = smol::fs::read_dir("/lib").await else { + // defaulting to gnu because nix doesn't have /lib files due to not following FHS + return LibcType::Gnu; + }; + let dir_entries = dir_entries.filter_map(async move |e| e.ok()); + pin_mut!(dir_entries); + + let mut has_musl = false; + let mut has_gnu = false; + + while let Some(entry) = dir_entries.next().await { + let file_name = entry.file_name(); + let file_name = file_name.to_string_lossy(); + if file_name.starts_with("ld-musl-") { + has_musl = true; + } else if file_name.starts_with("ld-linux-") { + has_gnu = true; + } + } + + match (has_musl, has_gnu) { + (true, _) => LibcType::Musl, + (_, true) => LibcType::Gnu, + _ => LibcType::Gnu, + } + } + + #[cfg(target_os = "linux")] + async fn build_arch_server_name_linux() -> String { + let libc = match Self::determine_libc_type().await { + LibcType::Musl => "musl", + LibcType::Gnu => "gnu", + }; + + format!("{}-{}", Self::ARCH_SERVER_NAME, libc) + } + + async fn build_asset_name() -> String { let extension = match Self::GITHUB_ASSET_KIND { AssetKind::TarGz => "tar.gz", AssetKind::Gz => "gz", AssetKind::Zip => "zip", }; + #[cfg(target_os = "linux")] + let arch_server_name = Self::build_arch_server_name_linux().await; + #[cfg(not(target_os = "linux"))] + let arch_server_name = Self::ARCH_SERVER_NAME.to_string(); + format!( "{}-{}-{}.{}", SERVER_NAME, std::env::consts::ARCH, - Self::ARCH_SERVER_NAME, + &arch_server_name, extension ) } @@ -175,11 +245,7 @@ impl LspAdapter for RustLspAdapter { }) .unwrap_or_else(filter_range); - CodeLabel { - text, - runs, - filter_range, - } + CodeLabel::new(text, filter_range, runs) }; let mut label = match (detail_right, completion.kind) { (Some(signature), Some(lsp::CompletionItemKind::FIELD)) => { @@ -330,11 +396,11 @@ impl LspAdapter for RustLspAdapter { let filter_range = prefix.len()..prefix.len() + name.len(); let display_range = 0..filter_range.end; - Some(CodeLabel { - runs: language.highlight_text(&Rope::from_iter([prefix, name, suffix]), display_range), - text: format!("{prefix}{name}"), + Some(CodeLabel::new( + format!("{prefix}{name}"), filter_range, - }) + language.highlight_text(&Rope::from_iter([prefix, name, suffix]), display_range), + )) } fn prepare_initialize_params( @@ -413,7 +479,7 @@ impl LspInstaller for RustLspAdapter { delegate.http_client(), ) .await?; - let asset_name = Self::build_asset_name(); + let asset_name = Self::build_asset_name().await; let asset = release .assets .into_iter() @@ -1132,10 +1198,10 @@ mod tests { &language ) .await, - Some(CodeLabel { - text: "hello(&mut Option) -> Vec (use crate::foo)".to_string(), - filter_range: 0..5, - runs: vec![ + Some(CodeLabel::new( + "hello(&mut Option) -> Vec (use crate::foo)".to_string(), + 0..5, + vec![ (0..5, highlight_function), (7..10, highlight_keyword), (11..17, highlight_type), @@ -1143,7 +1209,7 @@ mod tests { (25..28, highlight_type), (29..30, highlight_type), ], - }) + )) ); assert_eq!( adapter @@ -1160,10 +1226,10 @@ mod tests { &language ) .await, - Some(CodeLabel { - text: "hello(&mut Option) -> Vec (use crate::foo)".to_string(), - filter_range: 0..5, - runs: vec![ + Some(CodeLabel::new( + "hello(&mut Option) -> Vec (use crate::foo)".to_string(), + 0..5, + vec![ (0..5, highlight_function), (7..10, highlight_keyword), (11..17, highlight_type), @@ -1171,7 +1237,7 @@ mod tests { (25..28, highlight_type), (29..30, highlight_type), ], - }) + )) ); assert_eq!( adapter @@ -1185,11 +1251,11 @@ mod tests { &language ) .await, - Some(CodeLabel { - text: "len: usize".to_string(), - filter_range: 0..3, - runs: vec![(0..3, highlight_field), (5..10, highlight_type),], - }) + Some(CodeLabel::new( + "len: usize".to_string(), + 0..3, + vec![(0..3, highlight_field), (5..10, highlight_type),], + )) ); assert_eq!( @@ -1208,10 +1274,10 @@ mod tests { &language ) .await, - Some(CodeLabel { - text: "hello(&mut Option) -> Vec (use crate::foo)".to_string(), - filter_range: 0..5, - runs: vec![ + Some(CodeLabel::new( + "hello(&mut Option) -> Vec (use crate::foo)".to_string(), + 0..5, + vec![ (0..5, highlight_function), (7..10, highlight_keyword), (11..17, highlight_type), @@ -1219,7 +1285,7 @@ mod tests { (25..28, highlight_type), (29..30, highlight_type), ], - }) + )) ); assert_eq!( @@ -1237,10 +1303,10 @@ mod tests { &language ) .await, - Some(CodeLabel { - text: "hello(&mut Option) -> Vec (use crate::foo)".to_string(), - filter_range: 0..5, - runs: vec![ + Some(CodeLabel::new( + "hello(&mut Option) -> Vec (use crate::foo)".to_string(), + 0..5, + vec![ (0..5, highlight_function), (7..10, highlight_keyword), (11..17, highlight_type), @@ -1248,7 +1314,7 @@ mod tests { (25..28, highlight_type), (29..30, highlight_type), ], - }) + )) ); assert_eq!( @@ -1267,16 +1333,16 @@ mod tests { &language ) .await, - Some(CodeLabel { - text: "await.as_deref_mut(&mut self) -> IterMut<'_, T>".to_string(), - filter_range: 6..18, - runs: vec![ + Some(CodeLabel::new( + "await.as_deref_mut(&mut self) -> IterMut<'_, T>".to_string(), + 6..18, + vec![ (6..18, HighlightId(2)), (20..23, HighlightId(1)), (33..40, HighlightId(0)), (45..46, HighlightId(0)) ], - }) + )) ); assert_eq!( @@ -1297,10 +1363,10 @@ mod tests { &language ) .await, - Some(CodeLabel { - text: "pub fn as_deref_mut(&mut self) -> IterMut<'_, T>".to_string(), - filter_range: 7..19, - runs: vec![ + Some(CodeLabel::new( + "pub fn as_deref_mut(&mut self) -> IterMut<'_, T>".to_string(), + 7..19, + vec![ (0..3, HighlightId(1)), (4..6, HighlightId(1)), (7..19, HighlightId(2)), @@ -1308,7 +1374,7 @@ mod tests { (34..41, HighlightId(0)), (46..47, HighlightId(0)) ], - }) + )) ); assert_eq!( @@ -1324,11 +1390,11 @@ mod tests { &language, ) .await, - Some(CodeLabel { - text: "inner_value: String".to_string(), - filter_range: 6..11, - runs: vec![(0..11, HighlightId(3)), (13..19, HighlightId(0))], - }) + Some(CodeLabel::new( + "inner_value: String".to_string(), + 6..11, + vec![(0..11, HighlightId(3)), (13..19, HighlightId(0))], + )) ); } @@ -1354,22 +1420,22 @@ mod tests { adapter .label_for_symbol("hello", lsp::SymbolKind::FUNCTION, &language) .await, - Some(CodeLabel { - text: "fn hello".to_string(), - filter_range: 3..8, - runs: vec![(0..2, highlight_keyword), (3..8, highlight_function)], - }) + Some(CodeLabel::new( + "fn hello".to_string(), + 3..8, + vec![(0..2, highlight_keyword), (3..8, highlight_function)], + )) ); assert_eq!( adapter .label_for_symbol("World", lsp::SymbolKind::TYPE_PARAMETER, &language) .await, - Some(CodeLabel { - text: "type World".to_string(), - filter_range: 5..10, - runs: vec![(0..4, highlight_keyword), (5..10, highlight_type)], - }) + Some(CodeLabel::new( + "type World".to_string(), + 5..10, + vec![(0..4, highlight_keyword), (5..10, highlight_type)], + )) ); } diff --git a/crates/languages/src/rust/highlights.scm b/crates/languages/src/rust/highlights.scm index b0daac71a097b922aa810aadef64a18e95b5b649..36f638e825b117673bd88b3abaf75d0fc433f4e7 100644 --- a/crates/languages/src/rust/highlights.scm +++ b/crates/languages/src/rust/highlights.scm @@ -83,29 +83,20 @@ "as" "async" "await" - "break" "const" - "continue" "default" "dyn" - "else" "enum" "extern" "fn" - "for" - "if" "impl" - "in" "let" - "loop" "macro_rules!" - "match" "mod" "move" "pub" "raw" "ref" - "return" "static" "struct" "trait" @@ -114,13 +105,25 @@ "unsafe" "use" "where" - "while" - "yield" (crate) (mutable_specifier) (super) ] @keyword +[ + "break" + "continue" + "else" + "for" + "if" + "in" + "loop" + "match" + "return" + "while" + "yield" +] @keyword.control + [ (string_literal) (raw_string_literal) diff --git a/crates/languages/src/rust/injections.scm b/crates/languages/src/rust/injections.scm index e5921501bc613e8adae652e41b4b621b932281d1..91c092b353b615c5dff1f7189af816c9205cbf21 100644 --- a/crates/languages/src/rust/injections.scm +++ b/crates/languages/src/rust/injections.scm @@ -2,7 +2,7 @@ (#set! injection.language "comment")) (macro_invocation - macro: (identifier) @_macro_name + macro: [(identifier) (scoped_identifier)] @_macro_name (#not-any-of? @_macro_name "view" "html") (token_tree) @injection.content (#set! injection.language "rust")) @@ -11,7 +11,7 @@ ; it wants to inject inside of rust, instead of modifying the rust ; injections to support leptos injections (macro_invocation - macro: (identifier) @_macro_name + macro: [(identifier) (scoped_identifier)] @_macro_name (#any-of? @_macro_name "view" "html") (token_tree) @injection.content (#set! injection.language "rstml") diff --git a/crates/languages/src/tsx/highlights.scm b/crates/languages/src/tsx/highlights.scm index f7cb987831578f1d3e78decbf89f71c91d3a3b7e..ef12b3d7913e07109e32bb5bf41909511aa2b555 100644 --- a/crates/languages/src/tsx/highlights.scm +++ b/crates/languages/src/tsx/highlights.scm @@ -171,25 +171,16 @@ "as" "async" "await" - "break" - "case" - "catch" "class" "const" - "continue" "debugger" "default" "delete" - "do" - "else" "export" "extends" - "finally" - "for" "from" "function" "get" - "if" "import" "in" "instanceof" @@ -197,23 +188,37 @@ "let" "new" "of" - "return" "satisfies" "set" "static" - "switch" "target" - "throw" - "try" "typeof" "using" "var" "void" - "while" "with" - "yield" ] @keyword +[ + "break" + "case" + "catch" + "continue" + "do" + "else" + "finally" + "for" + "if" + "return" + "switch" + "throw" + "try" + "while" + "yield" +] @keyword.control + +(switch_default "default" @keyword.control) + (template_substitution "${" @punctuation.special "}" @punctuation.special) @embedded diff --git a/crates/languages/src/tsx/injections.scm b/crates/languages/src/tsx/injections.scm index f749aac43a713dadc6abe81a0523f241610b2675..3cca9e8e81c31d3565554595456fa62be89bc81f 100644 --- a/crates/languages/src/tsx/injections.scm +++ b/crates/languages/src/tsx/injections.scm @@ -1,3 +1,7 @@ +((comment) @injection.content + (#set! injection.language "comment") +) + (((comment) @_jsdoc_comment (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content (#set! injection.language "jsdoc")) diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index 68fb11bf3526e6e4301d118e6be33dfcc3b3ee2c..334fd4c4a717d2b0a9890611ff5cc21f3d898aeb 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -54,6 +54,12 @@ const TYPESCRIPT_VITEST_PACKAGE_PATH_VARIABLE: VariableName = const TYPESCRIPT_JASMINE_PACKAGE_PATH_VARIABLE: VariableName = VariableName::Custom(Cow::Borrowed("TYPESCRIPT_JASMINE_PACKAGE_PATH")); +const TYPESCRIPT_BUN_PACKAGE_PATH_VARIABLE: VariableName = + VariableName::Custom(Cow::Borrowed("TYPESCRIPT_BUN_PACKAGE_PATH")); + +const TYPESCRIPT_NODE_PACKAGE_PATH_VARIABLE: VariableName = + VariableName::Custom(Cow::Borrowed("TYPESCRIPT_NODE_PACKAGE_PATH")); + #[derive(Clone, Debug, Default)] struct PackageJsonContents(Arc>>); @@ -220,6 +226,65 @@ impl PackageJsonData { }); } + if self.bun_package_path.is_some() { + task_templates.0.push(TaskTemplate { + label: format!("{} file test", "bun test".to_owned()), + command: "bun".to_owned(), + args: vec!["test".to_owned(), VariableName::File.template_value()], + cwd: Some(TYPESCRIPT_BUN_PACKAGE_PATH_VARIABLE.template_value()), + ..TaskTemplate::default() + }); + task_templates.0.push(TaskTemplate { + label: format!("bun test {}", VariableName::Symbol.template_value(),), + command: "bun".to_owned(), + args: vec![ + "test".to_owned(), + "--test-name-pattern".to_owned(), + format!("\"{}\"", VariableName::Symbol.template_value()), + VariableName::File.template_value(), + ], + tags: vec![ + "ts-test".to_owned(), + "js-test".to_owned(), + "tsx-test".to_owned(), + ], + cwd: Some(TYPESCRIPT_BUN_PACKAGE_PATH_VARIABLE.template_value()), + ..TaskTemplate::default() + }); + } + + if self.node_package_path.is_some() { + task_templates.0.push(TaskTemplate { + label: format!("{} file test", "node test".to_owned()), + command: "node".to_owned(), + args: vec!["--test".to_owned(), VariableName::File.template_value()], + tags: vec![ + "ts-test".to_owned(), + "js-test".to_owned(), + "tsx-test".to_owned(), + ], + cwd: Some(TYPESCRIPT_NODE_PACKAGE_PATH_VARIABLE.template_value()), + ..TaskTemplate::default() + }); + task_templates.0.push(TaskTemplate { + label: format!("node test {}", VariableName::Symbol.template_value()), + command: "node".to_owned(), + args: vec![ + "--test".to_owned(), + "--test-name-pattern".to_owned(), + format!("\"{}\"", VariableName::Symbol.template_value()), + VariableName::File.template_value(), + ], + tags: vec![ + "ts-test".to_owned(), + "js-test".to_owned(), + "tsx-test".to_owned(), + ], + cwd: Some(TYPESCRIPT_NODE_PACKAGE_PATH_VARIABLE.template_value()), + ..TaskTemplate::default() + }); + } + let script_name_counts: HashMap<_, usize> = self.scripts .iter() @@ -493,6 +558,26 @@ impl ContextProvider for TypeScriptContextProvider { .to_string(), ); } + + if let Some(path) = package_json_data.bun_package_path { + vars.insert( + TYPESCRIPT_BUN_PACKAGE_PATH_VARIABLE, + path.parent() + .unwrap_or(Path::new("")) + .to_string_lossy() + .to_string(), + ); + } + + if let Some(path) = package_json_data.node_package_path { + vars.insert( + TYPESCRIPT_NODE_PACKAGE_PATH_VARIABLE, + path.parent() + .unwrap_or(Path::new("")) + .to_string_lossy() + .to_string(), + ); + } } } Ok(vars) @@ -692,16 +777,11 @@ impl LspAdapter for TypeScriptLspAdapter { } else { item.label.clone() }; - let filter_range = item - .filter_text - .as_deref() - .and_then(|filter| text.find(filter).map(|ix| ix..ix + filter.len())) - .unwrap_or(0..len); - Some(language::CodeLabel { + Some(language::CodeLabel::filtered( text, - runs: vec![(0..len, highlight_id)], - filter_range, - }) + item.filter_text.as_deref(), + vec![(0..len, highlight_id)], + )) } async fn initialization_options( @@ -1030,7 +1110,7 @@ mod tests { let text = r#" function a() { - // local variables are omitted + // local variables are included let a1 = 1; // all functions are included async function a2() {} @@ -1053,6 +1133,7 @@ mod tests { .collect::>(), &[ ("function a()", 0), + ("let a1", 1), ("async function a2()", 1), ("let b", 0), ("function getB()", 0), @@ -1061,6 +1142,223 @@ mod tests { ); } + #[gpui::test] + async fn test_outline_with_destructuring(cx: &mut TestAppContext) { + let language = crate::language( + "typescript", + tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into(), + ); + + let text = r#" + // Top-level destructuring + const { a1, a2 } = a; + const [b1, b2] = b; + + // Defaults and rest + const [c1 = 1, , c2, ...rest1] = c; + const { d1, d2: e1, f1 = 2, g1: h1 = 3, ...rest2 } = d; + + function processData() { + // Nested object destructuring + const { c1, c2 } = c; + // Nested array destructuring + const [d1, d2, d3] = d; + // Destructuring with renaming + const { f1: g1 } = f; + // With defaults + const [x = 10, y] = xy; + } + + class DataHandler { + method() { + // Destructuring in class method + const { a1, a2 } = a; + const [b1, ...b2] = b; + } + } + "# + .unindent(); + + let buffer = cx.new(|cx| language::Buffer::local(text, cx).with_language(language, cx)); + let outline = buffer.read_with(cx, |buffer, _| buffer.snapshot().outline(None)); + assert_eq!( + outline + .items + .iter() + .map(|item| (item.text.as_str(), item.depth)) + .collect::>(), + &[ + ("const a1", 0), + ("const a2", 0), + ("const b1", 0), + ("const b2", 0), + ("const c1", 0), + ("const c2", 0), + ("const rest1", 0), + ("const d1", 0), + ("const e1", 0), + ("const h1", 0), + ("const rest2", 0), + ("function processData()", 0), + ("const c1", 1), + ("const c2", 1), + ("const d1", 1), + ("const d2", 1), + ("const d3", 1), + ("const g1", 1), + ("const x", 1), + ("const y", 1), + ("class DataHandler", 0), + ("method()", 1), + ("const a1", 2), + ("const a2", 2), + ("const b1", 2), + ("const b2", 2), + ] + ); + } + + #[gpui::test] + async fn test_outline_with_object_properties(cx: &mut TestAppContext) { + let language = crate::language( + "typescript", + tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into(), + ); + + let text = r#" + // Object with function properties + const o = { m() {}, async n() {}, g: function* () {}, h: () => {}, k: function () {} }; + + // Object with primitive properties + const p = { p1: 1, p2: "hello", p3: true }; + + // Nested objects + const q = { + r: { + // won't be included due to one-level depth limit + s: 1 + }, + t: 2 + }; + + function getData() { + const local = { x: 1, y: 2 }; + return local; + } + "# + .unindent(); + + let buffer = cx.new(|cx| language::Buffer::local(text, cx).with_language(language, cx)); + let outline = buffer.read_with(cx, |buffer, _| buffer.snapshot().outline(None)); + assert_eq!( + outline + .items + .iter() + .map(|item| (item.text.as_str(), item.depth)) + .collect::>(), + &[ + ("const o", 0), + ("m()", 1), + ("async n()", 1), + ("g", 1), + ("h", 1), + ("k", 1), + ("const p", 0), + ("p1", 1), + ("p2", 1), + ("p3", 1), + ("const q", 0), + ("r", 1), + ("s", 2), + ("t", 1), + ("function getData()", 0), + ("const local", 1), + ("x", 2), + ("y", 2), + ] + ); + } + + #[gpui::test] + async fn test_outline_with_computed_property_names(cx: &mut TestAppContext) { + let language = crate::language( + "typescript", + tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into(), + ); + + let text = r#" + // Symbols as object keys + const sym = Symbol("test"); + const obj1 = { + [sym]: 1, + [Symbol("inline")]: 2, + normalKey: 3 + }; + + // Enums as object keys + enum Color { Red, Blue, Green } + + const obj2 = { + [Color.Red]: "red value", + [Color.Blue]: "blue value", + regularProp: "normal" + }; + + // Mixed computed properties + const key = "dynamic"; + const obj3 = { + [key]: 1, + ["string" + "concat"]: 2, + [1 + 1]: 3, + static: 4 + }; + + // Nested objects with computed properties + const obj4 = { + [sym]: { + nested: 1 + }, + regular: { + [key]: 2 + } + }; + "# + .unindent(); + + let buffer = cx.new(|cx| language::Buffer::local(text, cx).with_language(language, cx)); + let outline = buffer.read_with(cx, |buffer, _| buffer.snapshot().outline(None)); + assert_eq!( + outline + .items + .iter() + .map(|item| (item.text.as_str(), item.depth)) + .collect::>(), + &[ + ("const sym", 0), + ("const obj1", 0), + ("[sym]", 1), + ("[Symbol(\"inline\")]", 1), + ("normalKey", 1), + ("enum Color", 0), + ("const obj2", 0), + ("[Color.Red]", 1), + ("[Color.Blue]", 1), + ("regularProp", 1), + ("const key", 0), + ("const obj3", 0), + ("[key]", 1), + ("[\"string\" + \"concat\"]", 1), + ("[1 + 1]", 1), + ("static", 1), + ("const obj4", 0), + ("[sym]", 1), + ("nested", 2), + ("regular", 1), + ("[key]", 2), + ] + ); + } + #[gpui::test] async fn test_generator_function_outline(cx: &mut TestAppContext) { let language = crate::language("javascript", tree_sitter_typescript::LANGUAGE_TSX.into()); @@ -1178,6 +1476,8 @@ mod tests { mocha_package_path: Some(Path::new(path!("/root/package.json")).into()), vitest_package_path: Some(Path::new(path!("/root/sub/package.json")).into()), jasmine_package_path: None, + bun_package_path: None, + node_package_path: None, scripts: [ ( Path::new(path!("/root/package.json")).into(), @@ -1231,6 +1531,7 @@ mod tests { ] ); } + #[test] fn test_escaping_name() { let cases = [ @@ -1264,4 +1565,110 @@ mod tests { assert_eq!(replace_test_name_parameters(input), expected); } } + + // The order of test runner tasks is based on inferred user preference: + // 1. Dedicated test runners (e.g., Jest, Vitest, Mocha, Jasmine) are prioritized. + // 2. Bun's built-in test runner (`bun test`) comes next. + // 3. Node.js's built-in test runner (`node --test`) is last. + // This hierarchy assumes that if a dedicated test framework is installed, it is the + // preferred testing mechanism. Between runtime-specific options, `bun test` is + // typically preferred over `node --test` when @types/bun is present. + #[gpui::test] + async fn test_task_ordering_with_multiple_test_runners( + executor: BackgroundExecutor, + cx: &mut TestAppContext, + ) { + cx.update(|cx| { + settings::init(cx); + Project::init_settings(cx); + language_settings::init(cx); + }); + + // Test case with all test runners present + let package_json_all_runners = json!({ + "devDependencies": { + "@types/bun": "1.0.0", + "@types/node": "^20.0.0", + "jest": "29.0.0", + "mocha": "10.0.0", + "vitest": "1.0.0", + "jasmine": "5.0.0", + }, + "scripts": { + "test": "jest" + } + }) + .to_string(); + + let fs = FakeFs::new(executor); + fs.insert_tree( + path!("/root"), + json!({ + "package.json": package_json_all_runners, + "file.js": "", + }), + ) + .await; + + let provider = TypeScriptContextProvider::new(fs.clone()); + + let package_json_data = cx + .update(|cx| { + provider.combined_package_json_data( + fs.clone(), + path!("/root").as_ref(), + rel_path("file.js"), + cx, + ) + }) + .await + .unwrap(); + + assert!(package_json_data.jest_package_path.is_some()); + assert!(package_json_data.mocha_package_path.is_some()); + assert!(package_json_data.vitest_package_path.is_some()); + assert!(package_json_data.jasmine_package_path.is_some()); + assert!(package_json_data.bun_package_path.is_some()); + assert!(package_json_data.node_package_path.is_some()); + + let mut task_templates = TaskTemplates::default(); + package_json_data.fill_task_templates(&mut task_templates); + + let test_tasks: Vec<_> = task_templates + .0 + .iter() + .filter(|template| { + template.tags.contains(&"ts-test".to_owned()) + || template.tags.contains(&"js-test".to_owned()) + }) + .map(|template| &template.label) + .collect(); + + let node_test_index = test_tasks + .iter() + .position(|label| label.contains("node test")); + let jest_test_index = test_tasks.iter().position(|label| label.contains("jest")); + let bun_test_index = test_tasks + .iter() + .position(|label| label.contains("bun test")); + + assert!( + node_test_index.is_some(), + "Node test tasks should be present" + ); + assert!( + jest_test_index.is_some(), + "Jest test tasks should be present" + ); + assert!(bun_test_index.is_some(), "Bun test tasks should be present"); + + assert!( + jest_test_index.unwrap() < bun_test_index.unwrap(), + "Jest should come before Bun" + ); + assert!( + bun_test_index.unwrap() < node_test_index.unwrap(), + "Bun should come before Node" + ); + } } diff --git a/crates/languages/src/typescript/highlights.scm b/crates/languages/src/typescript/highlights.scm index 84cbbae77d43c96e62578c444ee913055604e11a..8a85dfea07fe4f50cb271f65ec1bdeeaf2ea150c 100644 --- a/crates/languages/src/typescript/highlights.scm +++ b/crates/languages/src/typescript/highlights.scm @@ -218,27 +218,18 @@ "as" "async" "await" - "break" - "case" - "catch" "class" "const" - "continue" "debugger" "declare" "default" "delete" - "do" - "else" "enum" "export" "extends" - "finally" - "for" "from" "function" "get" - "if" "implements" "import" "in" @@ -257,20 +248,34 @@ "protected" "public" "readonly" - "return" "satisfies" "set" "static" - "switch" "target" - "throw" - "try" "type" "typeof" "using" "var" "void" - "while" "with" - "yield" ] @keyword + +[ + "break" + "case" + "catch" + "continue" + "do" + "else" + "finally" + "for" + "if" + "return" + "switch" + "throw" + "try" + "while" + "yield" +] @keyword.control + +(switch_default "default" @keyword.control) diff --git a/crates/languages/src/typescript/injections.scm b/crates/languages/src/typescript/injections.scm index f98e36b72d049080eb98ffe2b69a67c6b852e4a7..5321e606c118a41df127c8aa37c7c2811dc8bd23 100644 --- a/crates/languages/src/typescript/injections.scm +++ b/crates/languages/src/typescript/injections.scm @@ -1,3 +1,7 @@ +((comment) @injection.content + (#set! injection.language "comment") +) + (((comment) @_jsdoc_comment (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content (#set! injection.language "jsdoc")) diff --git a/crates/languages/src/typescript/outline.scm b/crates/languages/src/typescript/outline.scm index f4261b9697d376f517b717bc942387190e0b6dde..54d29007c7b7eb57c0bcaefc2c1e0ab75e4d9a6c 100644 --- a/crates/languages/src/typescript/outline.scm +++ b/crates/languages/src/typescript/outline.scm @@ -34,18 +34,64 @@ (export_statement (lexical_declaration ["let" "const"] @context - ; Multiple names may be exported - @item is on the declarator to keep - ; ranges distinct. (variable_declarator - name: (_) @name) @item)) + name: (identifier) @name) @item)) +; Exported array destructuring +(export_statement + (lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern left: (identifier) @name @item) + (rest_pattern (identifier) @name @item) + ])))) + +; Exported object destructuring +(export_statement + (lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (object_pattern + [(shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern left: (identifier) @name @item)) + (rest_pattern (identifier) @name @item)])))) + +(program + (lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (identifier) @name) @item)) + +; Top-level array destructuring (program (lexical_declaration ["let" "const"] @context - ; Multiple names may be defined - @item is on the declarator to keep - ; ranges distinct. (variable_declarator - name: (_) @name) @item)) + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern left: (identifier) @name @item) + (rest_pattern (identifier) @name @item) + ])))) + +; Top-level object destructuring +(program + (lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (object_pattern + [(shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern left: (identifier) @name @item)) + (rest_pattern (identifier) @name @item)])))) (class_declaration "class" @context @@ -56,21 +102,38 @@ "class" @context name: (_) @name) @item -(method_definition - [ - "get" - "set" - "async" - "*" - "readonly" - "static" - (override_modifier) - (accessibility_modifier) - ]* @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item +; Method definitions in classes (not in object literals) +(class_body + (method_definition + [ + "get" + "set" + "async" + "*" + "readonly" + "static" + (override_modifier) + (accessibility_modifier) + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item) + +; Object literal methods +(variable_declarator + value: (object + (method_definition + [ + "get" + "set" + "async" + "*" + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item)) (public_field_definition [ @@ -124,4 +187,44 @@ ) ) @item +; Object properties +(pair + key: [ + (property_identifier) @name + (string (string_fragment) @name) + (number) @name + (computed_property_name) @name + ]) @item + + +; Nested variables in function bodies +(statement_block + (lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (identifier) @name) @item)) + +; Nested array destructuring in functions +(statement_block + (lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern left: (identifier) @name @item) + (rest_pattern (identifier) @name @item) + ])))) + +; Nested object destructuring in functions +(statement_block + (lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (object_pattern + [(shorthand_property_identifier_pattern) @name @item + (pair_pattern value: (identifier) @name @item) + (pair_pattern value: (assignment_pattern left: (identifier) @name @item)) + (rest_pattern (identifier) @name @item)])))) + (comment) @annotation diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index 9124a64227f91aa256063f012c960e92afbd8b9e..8cbb9f307f6f4222e0e9a65fe2a6954f97fc7f21 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -12,7 +12,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use util::{ResultExt, maybe, merge_json_value_into, rel_path::RelPath}; +use util::{ResultExt, maybe, merge_json_value_into}; fn typescript_server_binary_arguments(server_path: &Path) -> Vec { vec![server_path.into(), "--stdio".into()] @@ -29,19 +29,19 @@ impl VtslsLspAdapter { const TYPESCRIPT_PACKAGE_NAME: &'static str = "typescript"; const TYPESCRIPT_TSDK_PATH: &'static str = "node_modules/typescript/lib"; + const TYPESCRIPT_YARN_TSDK_PATH: &'static str = ".yarn/sdks/typescript/lib"; pub fn new(node: NodeRuntime, fs: Arc) -> Self { VtslsLspAdapter { node, fs } } async fn tsdk_path(&self, adapter: &Arc) -> Option<&'static str> { - let is_yarn = adapter - .read_text_file(RelPath::unix(".yarn/sdks/typescript/lib/typescript.js").unwrap()) - .await - .is_ok(); + let yarn_sdk = adapter + .worktree_root_path() + .join(Self::TYPESCRIPT_YARN_TSDK_PATH); - let tsdk_path = if is_yarn { - ".yarn/sdks/typescript/lib" + let tsdk_path = if self.fs.is_dir(&yarn_sdk).await { + Self::TYPESCRIPT_YARN_TSDK_PATH } else { Self::TYPESCRIPT_TSDK_PATH }; @@ -201,16 +201,11 @@ impl LspAdapter for VtslsLspAdapter { } else { item.label.clone() }; - let filter_range = item - .filter_text - .as_deref() - .and_then(|filter| text.find(filter).map(|ix| ix..ix + filter.len())) - .unwrap_or(0..len); - Some(language::CodeLabel { + Some(language::CodeLabel::filtered( text, - runs: vec![(0..len, highlight_id)], - filter_range, - }) + item.filter_text.as_deref(), + vec![(0..len, highlight_id)], + )) } async fn workspace_configuration( diff --git a/crates/languages/src/yaml/injections.scm b/crates/languages/src/yaml/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..9117c713b98fdd2896b13e4949a77c6489b9ee36 --- /dev/null +++ b/crates/languages/src/yaml/injections.scm @@ -0,0 +1,3 @@ +((comment) @injection.content + (#set! injection.language "comment") +) diff --git a/crates/line_ending_selector/Cargo.toml b/crates/line_ending_selector/Cargo.toml index 7c5c8f6d8f3996771f832c28d5d71b857bb0b3b6..462404b150b4e9862662fc76a5b7170def19f404 100644 --- a/crates/line_ending_selector/Cargo.toml +++ b/crates/line_ending_selector/Cargo.toml @@ -21,4 +21,3 @@ project.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true -workspace-hack.workspace = true diff --git a/crates/line_ending_selector/src/line_ending_indicator.rs b/crates/line_ending_selector/src/line_ending_indicator.rs new file mode 100644 index 0000000000000000000000000000000000000000..ee858d706b3a8152c868a5bd629c112a4d1b225f --- /dev/null +++ b/crates/line_ending_selector/src/line_ending_indicator.rs @@ -0,0 +1,68 @@ +use editor::Editor; +use gpui::{Entity, Subscription, WeakEntity}; +use language::LineEnding; +use ui::{Tooltip, prelude::*}; +use workspace::{StatusBarSettings, StatusItemView, item::ItemHandle, item::Settings}; + +use crate::{LineEndingSelector, Toggle}; + +#[derive(Default)] +pub struct LineEndingIndicator { + line_ending: Option, + active_editor: Option>, + _observe_active_editor: Option, +} + +impl LineEndingIndicator { + fn update(&mut self, editor: Entity, _: &mut Window, cx: &mut Context) { + self.line_ending = None; + self.active_editor = None; + + if let Some((_, buffer, _)) = editor.read(cx).active_excerpt(cx) { + let line_ending = buffer.read(cx).line_ending(); + self.line_ending = Some(line_ending); + self.active_editor = Some(editor.downgrade()); + } + + cx.notify(); + } +} + +impl Render for LineEndingIndicator { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + if !StatusBarSettings::get_global(cx).line_endings_button { + return div(); + } + + div().when_some(self.line_ending.as_ref(), |el, line_ending| { + el.child( + Button::new("change-line-ending", line_ending.label()) + .label_size(LabelSize::Small) + .on_click(cx.listener(|this, _, window, cx| { + if let Some(editor) = this.active_editor.as_ref() { + LineEndingSelector::toggle(editor, window, cx); + } + })) + .tooltip(|_window, cx| Tooltip::for_action("Select Line Ending", &Toggle, cx)), + ) + }) + } +} + +impl StatusItemView for LineEndingIndicator { + fn set_active_pane_item( + &mut self, + active_pane_item: Option<&dyn ItemHandle>, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(editor) = active_pane_item.and_then(|item| item.downcast::()) { + self._observe_active_editor = Some(cx.observe_in(&editor, window, Self::update)); + self.update(editor, window, cx); + } else { + self.line_ending = None; + self._observe_active_editor = None; + } + cx.notify(); + } +} diff --git a/crates/line_ending_selector/src/line_ending_selector.rs b/crates/line_ending_selector/src/line_ending_selector.rs index 7f75a1ebe3550595c8fa78643ef5446ab2fa3a44..504c327a349c97214e801f6bd375d61c7847f2be 100644 --- a/crates/line_ending_selector/src/line_ending_selector.rs +++ b/crates/line_ending_selector/src/line_ending_selector.rs @@ -1,6 +1,9 @@ +mod line_ending_indicator; + use editor::Editor; use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Task, WeakEntity, actions}; use language::{Buffer, LineEnding}; +pub use line_ending_indicator::LineEndingIndicator; use picker::{Picker, PickerDelegate}; use project::Project; use std::sync::Arc; @@ -9,7 +12,7 @@ use util::ResultExt; use workspace::ModalView; actions!( - line_ending, + line_ending_selector, [ /// Toggles the line ending selector modal. Toggle @@ -172,10 +175,7 @@ impl PickerDelegate for LineEndingSelectorDelegate { _: &mut Context>, ) -> Option { let line_ending = self.matches.get(ix)?; - let label = match line_ending { - LineEnding::Unix => "LF", - LineEnding::Windows => "CRLF", - }; + let label = line_ending.label(); let mut list_item = ListItem::new(ix) .inset(true) diff --git a/crates/livekit_api/Cargo.toml b/crates/livekit_api/Cargo.toml index 6835ec4d561546c30ded4bb3d1b8c8e027ccf139..421deee113e7d1e4967c268f58ccc132d0284b01 100644 --- a/crates/livekit_api/Cargo.toml +++ b/crates/livekit_api/Cargo.toml @@ -22,7 +22,6 @@ prost.workspace = true prost-types.workspace = true reqwest.workspace = true serde.workspace = true -workspace-hack.workspace = true [build-dependencies] prost-build.workspace = true diff --git a/crates/livekit_client/Cargo.toml b/crates/livekit_client/Cargo.toml index 2400092c1c154b8d6a4ee24f43c0556a26dc532e..a7766b5ba5b857e0ec46733efb1105c938f63719 100644 --- a/crates/livekit_client/Cargo.toml +++ b/crates/livekit_client/Cargo.toml @@ -35,7 +35,7 @@ log.workspace = true nanoid.workspace = true parking_lot.workspace = true postage.workspace = true -rodio = { workspace = true, features = ["wav_output", "recording"] } +rodio.workspace = true serde.workspace = true serde_urlencoded.workspace = true settings.workspace = true @@ -43,7 +43,6 @@ smallvec.workspace = true tokio-tungstenite.workspace = true ui.workspace = true util.workspace = true -workspace-hack.workspace = true [target.'cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))'.dependencies] libwebrtc = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks" } diff --git a/crates/lmstudio/Cargo.toml b/crates/lmstudio/Cargo.toml index da5e5c5e46172c92622ab2260935c20352f931b8..825507b9152bc099f23fe66ade235b8b11601875 100644 --- a/crates/lmstudio/Cargo.toml +++ b/crates/lmstudio/Cargo.toml @@ -22,4 +22,3 @@ http_client.workspace = true schemars = { workspace = true, optional = true } serde.workspace = true serde_json.workspace = true -workspace-hack.workspace = true diff --git a/crates/lsp/Cargo.toml b/crates/lsp/Cargo.toml index bc1f8b341b76b1be3e23824033f057a3a00201b3..39a86547f29c90f507bbd908f3af3a2c1a0cdec8 100644 --- a/crates/lsp/Cargo.toml +++ b/crates/lsp/Cargo.toml @@ -31,7 +31,6 @@ schemars.workspace = true smol.workspace = true util.workspace = true release_channel.workspace = true -workspace-hack.workspace = true [dev-dependencies] async-pipe.workspace = true diff --git a/crates/markdown/Cargo.toml b/crates/markdown/Cargo.toml index 650338ef4f05485535313e408db64f0b7fe1188d..9e852d8074add0f835dafd6bcfb4245eaa52214c 100644 --- a/crates/markdown/Cargo.toml +++ b/crates/markdown/Cargo.toml @@ -31,7 +31,6 @@ sum_tree.workspace = true theme.workspace = true ui.workspace = true util.workspace = true -workspace-hack.workspace = true [dev-dependencies] assets.workspace = true diff --git a/crates/markdown_preview/Cargo.toml b/crates/markdown_preview/Cargo.toml index 55646cdcf43617223665e9dc48f13c55f966d99d..c351ad8634be45f3c7b845eecdbc24e89d1fd190 100644 --- a/crates/markdown_preview/Cargo.toml +++ b/crates/markdown_preview/Cargo.toml @@ -32,7 +32,6 @@ settings.workspace = true theme.workspace = true ui.workspace = true util.workspace = true -workspace-hack.workspace = true workspace.workspace = true [dev-dependencies] diff --git a/crates/markdown_preview/src/markdown_elements.rs b/crates/markdown_preview/src/markdown_elements.rs index 827c11f0453817b00431a1f32db8c645aced4e86..b0a36a4cf29c386204f6fd1a347a839009e1c357 100644 --- a/crates/markdown_preview/src/markdown_elements.rs +++ b/crates/markdown_preview/src/markdown_elements.rs @@ -1,5 +1,5 @@ use gpui::{ - DefiniteLength, FontStyle, FontWeight, HighlightStyle, Hsla, SharedString, StrikethroughStyle, + DefiniteLength, FontStyle, FontWeight, HighlightStyle, SharedString, StrikethroughStyle, UnderlineStyle, px, }; use language::HighlightId; @@ -104,25 +104,34 @@ pub enum HeadingLevel { #[derive(Debug)] pub struct ParsedMarkdownTable { pub source_range: Range, - pub header: ParsedMarkdownTableRow, + pub header: Vec, pub body: Vec, pub column_alignments: Vec, } -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone, Copy, Default)] #[cfg_attr(test, derive(PartialEq))] pub enum ParsedMarkdownTableAlignment { - /// Default text alignment. + #[default] None, Left, Center, Right, } +#[derive(Debug)] +#[cfg_attr(test, derive(PartialEq))] +pub struct ParsedMarkdownTableColumn { + pub col_span: usize, + pub row_span: usize, + pub is_header: bool, + pub children: MarkdownParagraph, +} + #[derive(Debug)] #[cfg_attr(test, derive(PartialEq))] pub struct ParsedMarkdownTableRow { - pub children: Vec, + pub columns: Vec, } impl Default for ParsedMarkdownTableRow { @@ -134,12 +143,12 @@ impl Default for ParsedMarkdownTableRow { impl ParsedMarkdownTableRow { pub fn new() -> Self { Self { - children: Vec::new(), + columns: Vec::new(), } } - pub fn with_children(children: Vec) -> Self { - Self { children } + pub fn with_columns(columns: Vec) -> Self { + Self { columns } } } @@ -175,11 +184,7 @@ pub enum MarkdownHighlight { impl MarkdownHighlight { /// Converts this [`MarkdownHighlight`] to a [`HighlightStyle`]. - pub fn to_highlight_style( - &self, - theme: &theme::SyntaxTheme, - link_color: Hsla, - ) -> Option { + pub fn to_highlight_style(&self, theme: &theme::SyntaxTheme) -> Option { match self { MarkdownHighlight::Style(style) => { let mut highlight = HighlightStyle::default(); @@ -209,10 +214,8 @@ impl MarkdownHighlight { if style.link { highlight.underline = Some(UnderlineStyle { thickness: px(1.), - color: Some(link_color), ..Default::default() }); - highlight.color = Some(link_color); } Some(highlight) diff --git a/crates/markdown_preview/src/markdown_parser.rs b/crates/markdown_preview/src/markdown_parser.rs index b7eb73c60a049ca7adb03160d124904bab4cee89..28388923a75f14c601dcafecb2008570e309561f 100644 --- a/crates/markdown_preview/src/markdown_parser.rs +++ b/crates/markdown_preview/src/markdown_parser.rs @@ -9,7 +9,9 @@ use html5ever::{ParseOpts, local_name, parse_document, tendril::TendrilSink}; use language::LanguageRegistry; use markup5ever_rcdom::RcDom; use pulldown_cmark::{Alignment, Event, Options, Parser, Tag, TagEnd}; -use std::{cell::RefCell, collections::HashMap, ops::Range, path::PathBuf, rc::Rc, sync::Arc, vec}; +use std::{ + cell::RefCell, collections::HashMap, mem, ops::Range, path::PathBuf, rc::Rc, sync::Arc, vec, +}; pub async fn parse_markdown( markdown_input: &str, @@ -290,18 +292,16 @@ impl<'a> MarkdownParser<'a> { finder.kinds(&[linkify::LinkKind::Url]); let mut last_link_len = prev_len; for link in finder.links(t) { - let start = link.start(); - let end = link.end(); - let range = (prev_len + start)..(prev_len + end); + let start = prev_len + link.start(); + let end = prev_len + link.end(); + let range = start..end; link_ranges.push(range.clone()); link_urls.push(link.as_str().to_string()); // If there is a style before we match a link, we have to add this to the highlighted ranges - if style != MarkdownHighlightStyle::default() - && last_link_len < link.start() - { + if style != MarkdownHighlightStyle::default() && last_link_len < start { highlights.push(( - last_link_len..link.start(), + last_link_len..start, MarkdownHighlight::Style(style.clone()), )); } @@ -374,15 +374,11 @@ impl<'a> MarkdownParser<'a> { if !text.is_empty() { let parsed_regions = MarkdownParagraphChunk::Text(ParsedMarkdownText { source_range: source_range.clone(), - contents: text.into(), - highlights: highlights.clone(), - region_ranges: region_ranges.clone(), - regions: regions.clone(), + contents: mem::take(&mut text).into(), + highlights: mem::take(&mut highlights), + region_ranges: mem::take(&mut region_ranges), + regions: mem::take(&mut regions), }); - text = String::new(); - highlights = vec![]; - region_ranges = vec![]; - regions = vec![]; markdown_text_like.push(parsed_regions); } image = Image::identify( @@ -407,6 +403,9 @@ impl<'a> MarkdownParser<'a> { if let Some(mut image) = image.take() { if !text.is_empty() { image.set_alt_text(std::mem::take(&mut text).into()); + mem::take(&mut highlights); + mem::take(&mut region_ranges); + mem::take(&mut regions); } markdown_text_like.push(MarkdownParagraphChunk::Image(image)); } @@ -463,9 +462,9 @@ impl<'a> MarkdownParser<'a> { fn parse_table(&mut self, alignment: Vec) -> ParsedMarkdownTable { let (_event, source_range) = self.previous().unwrap(); let source_range = source_range.clone(); - let mut header = ParsedMarkdownTableRow::new(); + let mut header = vec![]; let mut body = vec![]; - let mut current_row = vec![]; + let mut row_columns = vec![]; let mut in_header = true; let column_alignments = alignment.iter().map(Self::convert_alignment).collect(); @@ -485,17 +484,21 @@ impl<'a> MarkdownParser<'a> { Event::Start(Tag::TableCell) => { self.cursor += 1; let cell_contents = self.parse_text(false, Some(source_range)); - current_row.push(cell_contents); + row_columns.push(ParsedMarkdownTableColumn { + col_span: 1, + row_span: 1, + is_header: in_header, + children: cell_contents, + }); } Event::End(TagEnd::TableHead) | Event::End(TagEnd::TableRow) => { self.cursor += 1; - let new_row = std::mem::take(&mut current_row); + let columns = std::mem::take(&mut row_columns); if in_header { - header.children = new_row; + header.push(ParsedMarkdownTableRow { columns: columns }); in_header = false; } else { - let row = ParsedMarkdownTableRow::with_children(new_row); - body.push(row); + body.push(ParsedMarkdownTableRow::with_columns(columns)); } } Event::End(TagEnd::Table) => { @@ -942,6 +945,70 @@ impl<'a> MarkdownParser<'a> { } } + fn parse_table_row( + &self, + source_range: Range, + node: &Rc, + ) -> Option { + let mut columns = Vec::new(); + + match &node.data { + markup5ever_rcdom::NodeData::Element { name, .. } => { + if local_name!("tr") != name.local { + return None; + } + + for node in node.children.borrow().iter() { + if let Some(column) = self.parse_table_column(source_range.clone(), node) { + columns.push(column); + } + } + } + _ => {} + } + + if columns.is_empty() { + None + } else { + Some(ParsedMarkdownTableRow { columns }) + } + } + + fn parse_table_column( + &self, + source_range: Range, + node: &Rc, + ) -> Option { + match &node.data { + markup5ever_rcdom::NodeData::Element { name, attrs, .. } => { + if !matches!(name.local, local_name!("th") | local_name!("td")) { + return None; + } + + let mut children = MarkdownParagraph::new(); + self.consume_paragraph(source_range, node, &mut children); + + Some(ParsedMarkdownTableColumn { + col_span: std::cmp::max( + Self::attr_value(attrs, local_name!("colspan")) + .and_then(|span| span.parse().ok()) + .unwrap_or(1), + 1, + ), + row_span: std::cmp::max( + Self::attr_value(attrs, local_name!("rowspan")) + .and_then(|span| span.parse().ok()) + .unwrap_or(1), + 1, + ), + is_header: matches!(name.local, local_name!("th")), + children, + }) + } + _ => None, + } + } + fn consume_children( &self, source_range: Range, @@ -1057,7 +1124,7 @@ impl<'a> MarkdownParser<'a> { node: &Rc, source_range: Range, ) -> Option { - let mut header_columns = Vec::new(); + let mut header_rows = Vec::new(); let mut body_rows = Vec::new(); // node should be a thead or tbody element @@ -1067,21 +1134,16 @@ impl<'a> MarkdownParser<'a> { if local_name!("thead") == name.local { // node should be a tr element for node in node.children.borrow().iter() { - let mut paragraph = MarkdownParagraph::new(); - self.consume_paragraph(source_range.clone(), node, &mut paragraph); - - for paragraph in paragraph.into_iter() { - header_columns.push(vec![paragraph]); + if let Some(row) = self.parse_table_row(source_range.clone(), node) { + header_rows.push(row); } } } else if local_name!("tbody") == name.local { // node should be a tr element for node in node.children.borrow().iter() { - let mut row = MarkdownParagraph::new(); - self.consume_paragraph(source_range.clone(), node, &mut row); - body_rows.push(ParsedMarkdownTableRow::with_children( - row.into_iter().map(|column| vec![column]).collect(), - )); + if let Some(row) = self.parse_table_row(source_range.clone(), node) { + body_rows.push(row); + } } } } @@ -1089,12 +1151,12 @@ impl<'a> MarkdownParser<'a> { } } - if !header_columns.is_empty() || !body_rows.is_empty() { + if !header_rows.is_empty() || !body_rows.is_empty() { Some(ParsedMarkdownTable { source_range, body: body_rows, column_alignments: Vec::default(), - header: ParsedMarkdownTableRow::with_children(header_columns), + header: header_rows, }) } else { None @@ -1275,17 +1337,40 @@ mod tests { panic!("Expected a paragraph"); }; assert_eq!( - paragraph[0], - MarkdownParagraphChunk::Image(Image { - source_range: 0..111, - link: Link::Web { - url: "https://blog.logrocket.com/wp-content/uploads/2024/04/exploring-zed-open-source-code-editor-rust-2.png".to_string(), - }, - alt_text: Some("test".into()), - height: None, - width: None, - },) - ); + paragraph[0], + MarkdownParagraphChunk::Image(Image { + source_range: 0..111, + link: Link::Web { + url: "https://blog.logrocket.com/wp-content/uploads/2024/04/exploring-zed-open-source-code-editor-rust-2.png".to_string(), + }, + alt_text: Some("test".into()), + height: None, + width: None, + },) + ); + } + + #[gpui::test] + async fn test_image_alt_text() { + let parsed = parse("[![Zed](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/zed-industries/zed/main/assets/badge/v0.json)](https://zed.dev)\n ").await; + + let paragraph = if let ParsedMarkdownElement::Paragraph(text) = &parsed.children[0] { + text + } else { + panic!("Expected a paragraph"); + }; + assert_eq!( + paragraph[0], + MarkdownParagraphChunk::Image(Image { + source_range: 0..142, + link: Link::Web { + url: "https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/zed-industries/zed/main/assets/badge/v0.json".to_string(), + }, + alt_text: Some("Zed".into()), + height: None, + width: None, + },) + ); } #[gpui::test] @@ -1567,10 +1652,19 @@ mod tests { ParsedMarkdown { children: vec![ParsedMarkdownElement::Table(table( 0..366, - row(vec![text("Id", 0..366), text("Name ", 0..366)]), + vec![row(vec![ + column(1, 1, true, text("Id", 0..366)), + column(1, 1, true, text("Name ", 0..366)) + ])], vec![ - row(vec![text("1", 0..366), text("Chris", 0..366)]), - row(vec![text("2", 0..366), text("Dennis", 0..366)]), + row(vec![ + column(1, 1, false, text("1", 0..366)), + column(1, 1, false, text("Chris", 0..366)) + ]), + row(vec![ + column(1, 1, false, text("2", 0..366)), + column(1, 1, false, text("Dennis", 0..366)) + ]), ], ))], }, @@ -1600,10 +1694,16 @@ mod tests { ParsedMarkdown { children: vec![ParsedMarkdownElement::Table(table( 0..240, - row(vec![]), + vec![], vec![ - row(vec![text("1", 0..240), text("Chris", 0..240)]), - row(vec![text("2", 0..240), text("Dennis", 0..240)]), + row(vec![ + column(1, 1, false, text("1", 0..240)), + column(1, 1, false, text("Chris", 0..240)) + ]), + row(vec![ + column(1, 1, false, text("2", 0..240)), + column(1, 1, false, text("Dennis", 0..240)) + ]), ], ))], }, @@ -1629,7 +1729,10 @@ mod tests { ParsedMarkdown { children: vec![ParsedMarkdownElement::Table(table( 0..150, - row(vec![text("Id", 0..150), text("Name", 0..150)]), + vec![row(vec![ + column(1, 1, true, text("Id", 0..150)), + column(1, 1, true, text("Name", 0..150)) + ])], vec![], ))], }, @@ -1811,7 +1914,10 @@ Some other content let expected_table = table( 0..48, - row(vec![text("Header 1", 1..11), text("Header 2", 12..22)]), + vec![row(vec![ + column(1, 1, true, text("Header 1", 1..11)), + column(1, 1, true, text("Header 2", 12..22)), + ])], vec![], ); @@ -1831,10 +1937,19 @@ Some other content let expected_table = table( 0..95, - row(vec![text("Header 1", 1..11), text("Header 2", 12..22)]), + vec![row(vec![ + column(1, 1, true, text("Header 1", 1..11)), + column(1, 1, true, text("Header 2", 12..22)), + ])], vec![ - row(vec![text("Cell 1", 49..59), text("Cell 2", 60..70)]), - row(vec![text("Cell 3", 73..83), text("Cell 4", 84..94)]), + row(vec![ + column(1, 1, false, text("Cell 1", 49..59)), + column(1, 1, false, text("Cell 2", 60..70)), + ]), + row(vec![ + column(1, 1, false, text("Cell 3", 73..83)), + column(1, 1, false, text("Cell 4", 84..94)), + ]), ], ); @@ -2291,7 +2406,7 @@ fn main() { fn table( source_range: Range, - header: ParsedMarkdownTableRow, + header: Vec, body: Vec, ) -> ParsedMarkdownTable { ParsedMarkdownTable { @@ -2302,8 +2417,22 @@ fn main() { } } - fn row(children: Vec) -> ParsedMarkdownTableRow { - ParsedMarkdownTableRow { children } + fn row(columns: Vec) -> ParsedMarkdownTableRow { + ParsedMarkdownTableRow { columns } + } + + fn column( + col_span: usize, + row_span: usize, + is_header: bool, + children: MarkdownParagraph, + ) -> ParsedMarkdownTableColumn { + ParsedMarkdownTableColumn { + col_span, + row_span, + is_header, + children, + } } impl PartialEq for ParsedMarkdownTable { diff --git a/crates/markdown_preview/src/markdown_preview_view.rs b/crates/markdown_preview/src/markdown_preview_view.rs index d20ed40b7928186e2caf564be5ff66b0bd04f0d1..f62ff0874df8079f44868dfeaa1ad2fd0348e474 100644 --- a/crates/markdown_preview/src/markdown_preview_view.rs +++ b/crates/markdown_preview/src/markdown_preview_view.rs @@ -278,8 +278,12 @@ impl MarkdownPreviewView { this.parse_markdown_from_active_editor(true, window, cx); } EditorEvent::SelectionsChanged { .. } => { - let selection_range = editor - .update(cx, |editor, cx| editor.selections.last::(cx).range()); + let selection_range = editor.update(cx, |editor, cx| { + editor + .selections + .last::(&editor.display_snapshot(cx)) + .range() + }); this.selected_block = this.get_block_index_under_cursor(selection_range); this.list_state.scroll_to_reveal_item(this.selected_block); cx.notify(); diff --git a/crates/markdown_preview/src/markdown_renderer.rs b/crates/markdown_preview/src/markdown_renderer.rs index a873771e001f594149acc83ea46ce45608a9ed87..0abb12015af317702ff3afd853eab74a40817941 100644 --- a/crates/markdown_preview/src/markdown_renderer.rs +++ b/crates/markdown_preview/src/markdown_renderer.rs @@ -8,8 +8,8 @@ use fs::normalize_path; use gpui::{ AbsoluteLength, AnyElement, App, AppContext as _, ClipboardItem, Context, DefiniteLength, Div, Element, ElementId, Entity, HighlightStyle, Hsla, ImageSource, InteractiveText, IntoElement, - Keystroke, Length, Modifiers, ParentElement, Render, Resource, SharedString, Styled, - StyledText, TextStyle, WeakEntity, Window, div, img, rems, + Keystroke, Modifiers, ParentElement, Render, Resource, SharedString, Styled, StyledText, + TextStyle, WeakEntity, Window, div, img, rems, }; use settings::Settings; use std::{ @@ -22,7 +22,7 @@ use ui::{ ButtonCommon, Clickable, Color, FluentBuilder, IconButton, IconName, IconSize, InteractiveElement, Label, LabelCommon, LabelSize, LinkPreview, Pixels, Rems, StatefulInteractiveElement, StyledExt, StyledImage, ToggleState, Tooltip, VisibleOnHover, - h_flex, relative, tooltip_container, v_flex, + h_flex, tooltip_container, v_flex, }; use workspace::{OpenOptions, OpenVisible, Workspace}; @@ -51,7 +51,8 @@ pub struct RenderContext { buffer_text_style: TextStyle, text_style: TextStyle, border_color: Hsla, - element_background_color: Hsla, + title_bar_background_color: Hsla, + panel_background_color: Hsla, text_color: Hsla, link_color: Hsla, window_rem_size: Pixels, @@ -61,6 +62,7 @@ pub struct RenderContext { syntax_theme: Arc, indent: usize, checkbox_clicked_callback: Option, + is_last_child: bool, } impl RenderContext { @@ -86,7 +88,8 @@ impl RenderContext { text_style: window.text_style(), syntax_theme: theme.syntax().clone(), border_color: theme.colors().border, - element_background_color: theme.colors().element_background, + title_bar_background_color: theme.colors().title_bar_background, + panel_background_color: theme.colors().panel_background, text_color: theme.colors().text, link_color: theme.colors().text_accent, window_rem_size: window.rem_size(), @@ -94,6 +97,7 @@ impl RenderContext { code_block_background_color: theme.colors().surface_background, code_span_background_color: theme.colors().editor_document_highlight_read_background, checkbox_clicked_callback: None, + is_last_child: false, } } @@ -135,12 +139,25 @@ impl RenderContext { /// We give padding between "This is a block quote." /// and "And this is the next paragraph." fn with_common_p(&self, element: Div) -> Div { - if self.indent > 0 { + if self.indent > 0 && !self.is_last_child { element.pb(self.scaled_rems(0.75)) } else { element } } + + /// The is used to indicate that the current element is the last child or not of its parent. + /// + /// Then we can avoid adding padding to the bottom of the last child. + fn with_last_child(&mut self, is_last: bool, render: R) -> AnyElement + where + R: FnOnce(&mut Self) -> AnyElement, + { + self.is_last_child = is_last; + let element = render(self); + self.is_last_child = false; + element + } } pub fn render_parsed_markdown( @@ -452,128 +469,100 @@ impl gpui::RenderOnce for MarkdownCheckbox { } } -fn paragraph_len(paragraphs: &MarkdownParagraph) -> usize { - paragraphs - .iter() - .map(|paragraph| match paragraph { - MarkdownParagraphChunk::Text(text) => text.contents.len(), - // TODO: Scale column width based on image size - MarkdownParagraphChunk::Image(_) => 1, - }) - .sum() +fn calculate_table_columns_count(rows: &Vec) -> usize { + let mut actual_column_count = 0; + for row in rows { + actual_column_count = actual_column_count.max( + row.columns + .iter() + .map(|column| column.col_span) + .sum::(), + ); + } + actual_column_count } fn render_markdown_table(parsed: &ParsedMarkdownTable, cx: &mut RenderContext) -> AnyElement { - let mut max_lengths: Vec = vec![0; parsed.header.children.len()]; - - for (index, cell) in parsed.header.children.iter().enumerate() { - let length = paragraph_len(cell); - max_lengths[index] = length; - } + let actual_header_column_count = calculate_table_columns_count(&parsed.header); + let actual_body_column_count = calculate_table_columns_count(&parsed.body); + let max_column_count = std::cmp::max(actual_header_column_count, actual_body_column_count); - for row in &parsed.body { - for (index, cell) in row.children.iter().enumerate() { - let length = paragraph_len(cell); + let total_rows = parsed.header.len() + parsed.body.len(); - if index >= max_lengths.len() { - max_lengths.resize(index + 1, length); - } - - if length > max_lengths[index] { - max_lengths[index] = length; - } - } - } + // Track which grid cells are occupied by spanning cells + let mut grid_occupied = vec![vec![false; max_column_count]; total_rows]; - let total_max_length: usize = max_lengths.iter().sum(); - let max_column_widths: Vec = max_lengths - .iter() - .map(|&length| length as f32 / total_max_length as f32) - .collect(); + let mut cells = Vec::with_capacity(total_rows * max_column_count); - let header = render_markdown_table_row( - &parsed.header, - &parsed.column_alignments, - &max_column_widths, - true, - cx, - ); + for (row_idx, row) in parsed.header.iter().chain(parsed.body.iter()).enumerate() { + let mut col_idx = 0; - let body: Vec = parsed - .body - .iter() - .map(|row| { - render_markdown_table_row( - row, - &parsed.column_alignments, - &max_column_widths, - false, - cx, - ) - }) - .collect(); + for (cell_idx, cell) in row.columns.iter().enumerate() { + // Skip columns occupied by row-spanning cells from previous rows + while col_idx < max_column_count && grid_occupied[row_idx][col_idx] { + col_idx += 1; + } - cx.with_common_p(v_flex()) - .w_full() - .child(header) - .children(body) - .into_any() -} + if col_idx >= max_column_count { + break; + } -fn render_markdown_table_row( - parsed: &ParsedMarkdownTableRow, - alignments: &Vec, - max_column_widths: &Vec, - is_header: bool, - cx: &mut RenderContext, -) -> AnyElement { - let mut items = Vec::with_capacity(parsed.children.len()); - let count = parsed.children.len(); + let alignment = parsed + .column_alignments + .get(cell_idx) + .copied() + .unwrap_or_else(|| { + if cell.is_header { + ParsedMarkdownTableAlignment::Center + } else { + ParsedMarkdownTableAlignment::None + } + }); - for (index, cell) in parsed.children.iter().enumerate() { - let alignment = alignments - .get(index) - .copied() - .unwrap_or(ParsedMarkdownTableAlignment::None); + let container = match alignment { + ParsedMarkdownTableAlignment::Left | ParsedMarkdownTableAlignment::None => div(), + ParsedMarkdownTableAlignment::Center => v_flex().items_center(), + ParsedMarkdownTableAlignment::Right => v_flex().items_end(), + }; - let contents = render_markdown_text(cell, cx); + let cell_element = container + .col_span(cell.col_span.min(max_column_count - col_idx) as u16) + .row_span(cell.row_span.min(total_rows - row_idx) as u16) + .children(render_markdown_text(&cell.children, cx)) + .px_2() + .py_1() + .border_1() + .size_full() + .border_color(cx.border_color) + .when(cell.is_header, |this| { + this.bg(cx.title_bar_background_color) + }) + .when(cell.row_span > 1, |this| this.justify_center()) + .when(row_idx % 2 == 1, |this| this.bg(cx.panel_background_color)); - let container = match alignment { - ParsedMarkdownTableAlignment::Left | ParsedMarkdownTableAlignment::None => div(), - ParsedMarkdownTableAlignment::Center => v_flex().items_center(), - ParsedMarkdownTableAlignment::Right => v_flex().items_end(), - }; + cells.push(cell_element); - let max_width = max_column_widths.get(index).unwrap_or(&0.0); - let mut cell = container - .w(Length::Definite(relative(*max_width))) - .h_full() - .children(contents) - .px_2() - .py_1() - .border_color(cx.border_color) - .border_l_1(); - - if count == index + 1 { - cell = cell.border_r_1(); - } + // Mark grid positions as occupied for row-spanning cells + for r in 0..cell.row_span { + for c in 0..cell.col_span { + if row_idx + r < total_rows && col_idx + c < max_column_count { + grid_occupied[row_idx + r][col_idx + c] = true; + } + } + } - if is_header { - cell = cell.bg(cx.element_background_color) + col_idx += cell.col_span; } - - items.push(cell); - } - - let mut row = h_flex().border_color(cx.border_color); - - if is_header { - row = row.border_y_1(); - } else { - row = row.border_b_1(); } - row.children(items).into_any_element() + cx.with_common_p(div()) + .grid() + .size_full() + .grid_cols(max_column_count as u16) + .border_1() + .border_color(cx.border_color) + .children(cells) + .into_any() } fn render_markdown_block_quote( @@ -585,7 +574,12 @@ fn render_markdown_block_quote( let children: Vec = parsed .children .iter() - .map(|child| render_markdown_block(child, cx)) + .enumerate() + .map(|(ix, child)| { + cx.with_last_child(ix + 1 == parsed.children.len(), |cx| { + render_markdown_block(child, cx) + }) + }) .collect(); cx.indent -= 1; @@ -672,7 +666,7 @@ fn render_markdown_text(parsed_new: &MarkdownParagraph, cx: &mut RenderContext) let highlights = gpui::combine_highlights( parsed.highlights.iter().filter_map(|(range, highlight)| { highlight - .to_highlight_style(&syntax_theme, link_color) + .to_highlight_style(&syntax_theme) .map(|style| (range.clone(), style)) }), parsed.regions.iter().zip(&parsed.region_ranges).filter_map( @@ -685,6 +679,14 @@ fn render_markdown_text(parsed_new: &MarkdownParagraph, cx: &mut RenderContext) ..Default::default() }, )) + } else if region.link.is_some() { + Some(( + range.clone(), + HighlightStyle { + color: Some(link_color), + ..Default::default() + }, + )) } else { None } @@ -871,3 +873,143 @@ impl Render for InteractiveMarkdownElementTooltip { }) } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::markdown_elements::ParsedMarkdownTableColumn; + use crate::markdown_elements::ParsedMarkdownText; + + fn text(text: &str) -> MarkdownParagraphChunk { + MarkdownParagraphChunk::Text(ParsedMarkdownText { + source_range: 0..text.len(), + contents: SharedString::new(text), + highlights: Default::default(), + region_ranges: Default::default(), + regions: Default::default(), + }) + } + + fn column( + col_span: usize, + row_span: usize, + children: Vec, + ) -> ParsedMarkdownTableColumn { + ParsedMarkdownTableColumn { + col_span, + row_span, + is_header: false, + children, + } + } + + fn column_with_row_span( + col_span: usize, + row_span: usize, + children: Vec, + ) -> ParsedMarkdownTableColumn { + ParsedMarkdownTableColumn { + col_span, + row_span, + is_header: false, + children, + } + } + + #[test] + fn test_calculate_table_columns_count() { + assert_eq!(0, calculate_table_columns_count(&vec![])); + + assert_eq!( + 1, + calculate_table_columns_count(&vec![ParsedMarkdownTableRow::with_columns(vec![ + column(1, 1, vec![text("column1")]) + ])]) + ); + + assert_eq!( + 2, + calculate_table_columns_count(&vec![ParsedMarkdownTableRow::with_columns(vec![ + column(1, 1, vec![text("column1")]), + column(1, 1, vec![text("column2")]), + ])]) + ); + + assert_eq!( + 2, + calculate_table_columns_count(&vec![ParsedMarkdownTableRow::with_columns(vec![ + column(2, 1, vec![text("column1")]) + ])]) + ); + + assert_eq!( + 3, + calculate_table_columns_count(&vec![ParsedMarkdownTableRow::with_columns(vec![ + column(1, 1, vec![text("column1")]), + column(2, 1, vec![text("column2")]), + ])]) + ); + + assert_eq!( + 2, + calculate_table_columns_count(&vec![ + ParsedMarkdownTableRow::with_columns(vec![ + column(1, 1, vec![text("column1")]), + column(1, 1, vec![text("column2")]), + ]), + ParsedMarkdownTableRow::with_columns(vec![column(1, 1, vec![text("column1")]),]) + ]) + ); + + assert_eq!( + 3, + calculate_table_columns_count(&vec![ + ParsedMarkdownTableRow::with_columns(vec![ + column(1, 1, vec![text("column1")]), + column(1, 1, vec![text("column2")]), + ]), + ParsedMarkdownTableRow::with_columns(vec![column(3, 3, vec![text("column1")]),]) + ]) + ); + } + + #[test] + fn test_row_span_support() { + assert_eq!( + 3, + calculate_table_columns_count(&vec![ + ParsedMarkdownTableRow::with_columns(vec![ + column_with_row_span(1, 2, vec![text("spans 2 rows")]), + column(1, 1, vec![text("column2")]), + column(1, 1, vec![text("column3")]), + ]), + ParsedMarkdownTableRow::with_columns(vec![ + // First column is covered by row span from above + column(1, 1, vec![text("column2 row2")]), + column(1, 1, vec![text("column3 row2")]), + ]) + ]) + ); + + assert_eq!( + 4, + calculate_table_columns_count(&vec![ + ParsedMarkdownTableRow::with_columns(vec![ + column_with_row_span(1, 3, vec![text("spans 3 rows")]), + column_with_row_span(2, 1, vec![text("spans 2 cols")]), + column(1, 1, vec![text("column4")]), + ]), + ParsedMarkdownTableRow::with_columns(vec![ + // First column covered by row span + column(1, 1, vec![text("column2")]), + column(1, 1, vec![text("column3")]), + column(1, 1, vec![text("column4")]), + ]), + ParsedMarkdownTableRow::with_columns(vec![ + // First column still covered by row span + column(3, 1, vec![text("spans 3 cols")]), + ]) + ]) + ); + } +} diff --git a/crates/media/Cargo.toml b/crates/media/Cargo.toml index be72aa0b08b215c49937e0ca6a992d1470bec4df..90a3d938333d66a258bca1bafec92f338c0374b6 100644 --- a/crates/media/Cargo.toml +++ b/crates/media/Cargo.toml @@ -1,8 +1,8 @@ [package] -name = "zed-media" +name = "media" version = "0.1.0" edition.workspace = true -publish = true +publish = false license = "Apache-2.0" description = "Bindings to macos media handling APIs for Zed" @@ -15,7 +15,6 @@ doctest = false [dependencies] anyhow.workspace = true -workspace-hack.workspace = true [target.'cfg(target_os = "macos")'.dependencies] core-foundation.workspace = true diff --git a/crates/menu/Cargo.toml b/crates/menu/Cargo.toml index bbe69903ce32af5cc8e48c052a1ff9d728d42754..fcb209df8892bde42f50b1f7e90f1097ebd10905 100644 --- a/crates/menu/Cargo.toml +++ b/crates/menu/Cargo.toml @@ -14,4 +14,3 @@ doctest = false [dependencies] gpui.workspace = true -workspace-hack.workspace = true diff --git a/crates/migrator/Cargo.toml b/crates/migrator/Cargo.toml index e60233a06d379060537e104279d95b3f20383f0d..edb48a00e2ca93232d9022b6fb778449d2ecc7e4 100644 --- a/crates/migrator/Cargo.toml +++ b/crates/migrator/Cargo.toml @@ -20,7 +20,6 @@ log.workspace = true streaming-iterator.workspace = true tree-sitter-json.workspace = true tree-sitter.workspace = true -workspace-hack.workspace = true serde_json_lenient.workspace = true serde_json.workspace = true settings.workspace = true diff --git a/crates/migrator/src/migrations.rs b/crates/migrator/src/migrations.rs index 1b8ede68b1eb8686325c896723d1fdc762d02b73..084a3348b54acd9d2fc6ba043e1fb1648bbb3f8b 100644 --- a/crates/migrator/src/migrations.rs +++ b/crates/migrator/src/migrations.rs @@ -103,7 +103,7 @@ pub(crate) mod m_2025_07_08 { pub(crate) mod m_2025_10_01 { mod settings; - pub(crate) use settings::SETTINGS_PATTERNS; + pub(crate) use settings::flatten_code_actions_formatters; } pub(crate) mod m_2025_10_02 { @@ -118,8 +118,14 @@ pub(crate) mod m_2025_10_03 { pub(crate) use settings::SETTINGS_PATTERNS; } -pub(crate) mod m_2025_10_10 { +pub(crate) mod m_2025_10_16 { + mod settings; + + pub(crate) use settings::restore_code_actions_on_format; +} + +pub(crate) mod m_2025_10_17 { mod settings; - pub(crate) use settings::remove_code_actions_on_format; + pub(crate) use settings::make_file_finder_include_ignored_an_enum; } diff --git a/crates/migrator/src/migrations/m_2025_10_01/settings.rs b/crates/migrator/src/migrations/m_2025_10_01/settings.rs index 4f1e7a642f2fff3702886f9f37929976b8ad4d76..84cf95049154b44048e92982fd00a11a3514bc16 100644 --- a/crates/migrator/src/migrations/m_2025_10_01/settings.rs +++ b/crates/migrator/src/migrations/m_2025_10_01/settings.rs @@ -1,109 +1,74 @@ -use std::ops::Range; -use tree_sitter::{Query, QueryMatch}; +use crate::patterns::migrate_language_setting; +use anyhow::Result; +use serde_json::Value; -use crate::MigrationPatterns; - -pub const SETTINGS_PATTERNS: MigrationPatterns = - &[(FORMATTER_PATTERN, migrate_code_action_formatters)]; - -const FORMATTER_PATTERN: &str = r#" - (object - (pair - key: (string (string_content) @formatter) (#any-of? @formatter "formatter" "format_on_save") - value: [ - (array - (object - (pair - key: (string (string_content) @code-actions-key) (#eq? @code-actions-key "code_actions") - value: (object - ((pair) @code-action ","?)* - ) - ) - ) @code-actions-obj - ) @formatter-array - (object - (pair - key: (string (string_content) @code-actions-key) (#eq? @code-actions-key "code_actions") - value: (object - ((pair) @code-action ","?)* - ) - ) - ) @code-actions-obj - ] - ) - ) -"#; - -pub fn migrate_code_action_formatters( - contents: &str, - mat: &QueryMatch, - query: &Query, -) -> Option<(Range, String)> { - let code_actions_obj_ix = query.capture_index_for_name("code-actions-obj")?; - let code_actions_obj_node = mat.nodes_for_capture_index(code_actions_obj_ix).next()?; - - let mut code_actions = vec![]; - - let code_actions_ix = query.capture_index_for_name("code-action")?; - for code_action_node in mat.nodes_for_capture_index(code_actions_ix) { - let Some(enabled) = code_action_node - .child_by_field_name("value") - .map(|n| n.kind() != "false") - else { - continue; - }; - if !enabled { - continue; - } - let Some(name) = code_action_node - .child_by_field_name("key") - .and_then(|n| n.child(1)) - .map(|n| &contents[n.byte_range()]) - else { - continue; +pub fn flatten_code_actions_formatters(value: &mut Value) -> Result<()> { + migrate_language_setting(value, |value, _path| { + let Some(obj) = value.as_object_mut() else { + return Ok(()); }; - code_actions.push(name); - } - - let indent = query - .capture_index_for_name("formatter") - .and_then(|ix| mat.nodes_for_capture_index(ix).next()) - .map(|node| node.start_position().column + 1) - .unwrap_or(2); + for key in ["formatter", "format_on_save"] { + let Some(formatter) = obj.get_mut(key) else { + continue; + }; + let new_formatter = match formatter { + Value::Array(arr) => { + let mut new_arr = Vec::new(); + let mut found_code_actions = false; + for item in arr { + let Some(obj) = item.as_object() else { + new_arr.push(item.clone()); + continue; + }; + let code_actions_obj = obj + .get("code_actions") + .and_then(|code_actions| code_actions.as_object()); + let Some(code_actions) = code_actions_obj else { + new_arr.push(item.clone()); + continue; + }; + found_code_actions = true; + for (name, enabled) in code_actions { + if !enabled.as_bool().unwrap_or(true) { + continue; + } + new_arr.push(serde_json::json!({ + "code_action": name + })); + } + } + if !found_code_actions { + continue; + } + Value::Array(new_arr) + } + Value::Object(obj) => { + let mut new_arr = Vec::new(); + let code_actions_obj = obj + .get("code_actions") + .and_then(|code_actions| code_actions.as_object()); + let Some(code_actions) = code_actions_obj else { + continue; + }; + for (name, enabled) in code_actions { + if !enabled.as_bool().unwrap_or(true) { + continue; + } + new_arr.push(serde_json::json!({ + "code_action": name + })); + } + if new_arr.len() == 1 { + new_arr.pop().unwrap() + } else { + Value::Array(new_arr) + } + } + _ => continue, + }; - let mut code_actions_str = code_actions - .into_iter() - .map(|code_action| format!(r#"{{ "code_action": "{}" }}"#, code_action)) - .collect::>() - .join(&format!(",\n{}", " ".repeat(indent))); - let is_array = query - .capture_index_for_name("formatter-array") - .map(|ix| mat.nodes_for_capture_index(ix).count() > 0) - .unwrap_or(false); - if !is_array { - code_actions_str.insert_str(0, &" ".repeat(indent)); - code_actions_str.insert_str(0, "[\n"); - code_actions_str.push('\n'); - code_actions_str.push_str(&" ".repeat(indent.saturating_sub(2))); - code_actions_str.push_str("]"); - } - let mut replace_range = code_actions_obj_node.byte_range(); - if is_array && code_actions_str.is_empty() { - let mut cursor = code_actions_obj_node.parent().unwrap().walk(); - cursor.goto_first_child(); - while cursor.node().id() != code_actions_obj_node.id() && cursor.goto_next_sibling() {} - while cursor.goto_next_sibling() - && (cursor.node().is_extra() - || cursor.node().is_missing() - || cursor.node().kind() == "comment") - {} - if cursor.node().kind() == "," { - // found comma, delete up to next node - while cursor.goto_next_sibling() - && (cursor.node().is_extra() || cursor.node().is_missing()) - {} - replace_range.end = cursor.node().range().start_byte; + obj.insert(key.to_string(), new_formatter); } - } - Some((replace_range, code_actions_str)) + return Ok(()); + }) } diff --git a/crates/migrator/src/migrations/m_2025_10_02/settings.rs b/crates/migrator/src/migrations/m_2025_10_02/settings.rs index 2434ae4d0e100ce58e4cfdd2eee1039188c1d7bc..cb0d63ca8570952818e74e021f5dd2edc2523786 100644 --- a/crates/migrator/src/migrations/m_2025_10_02/settings.rs +++ b/crates/migrator/src/migrations/m_2025_10_02/settings.rs @@ -1,19 +1,10 @@ use anyhow::Result; use serde_json::Value; +use crate::patterns::migrate_language_setting; + pub fn remove_formatters_on_save(value: &mut Value) -> Result<()> { - remove_formatters_on_save_inner(value, &[])?; - let languages = value - .as_object_mut() - .and_then(|obj| obj.get_mut("languages")) - .and_then(|languages| languages.as_object_mut()); - if let Some(languages) = languages { - for (language_name, language) in languages.iter_mut() { - let path = vec!["languages", language_name]; - remove_formatters_on_save_inner(language, &path)?; - } - } - Ok(()) + migrate_language_setting(value, remove_formatters_on_save_inner) } fn remove_formatters_on_save_inner(value: &mut Value, path: &[&str]) -> Result<()> { diff --git a/crates/migrator/src/migrations/m_2025_10_10/settings.rs b/crates/migrator/src/migrations/m_2025_10_10/settings.rs deleted file mode 100644 index 1d07be71a139b60e4b362d26c68b25922f04a233..0000000000000000000000000000000000000000 --- a/crates/migrator/src/migrations/m_2025_10_10/settings.rs +++ /dev/null @@ -1,70 +0,0 @@ -use anyhow::Result; -use serde_json::Value; - -pub fn remove_code_actions_on_format(value: &mut Value) -> Result<()> { - remove_code_actions_on_format_inner(value, &[])?; - let languages = value - .as_object_mut() - .and_then(|obj| obj.get_mut("languages")) - .and_then(|languages| languages.as_object_mut()); - if let Some(languages) = languages { - for (language_name, language) in languages.iter_mut() { - let path = vec!["languages", language_name]; - remove_code_actions_on_format_inner(language, &path)?; - } - } - Ok(()) -} - -fn remove_code_actions_on_format_inner(value: &mut Value, path: &[&str]) -> Result<()> { - let Some(obj) = value.as_object_mut() else { - return Ok(()); - }; - let Some(code_actions_on_format) = obj.get("code_actions_on_format").cloned() else { - return Ok(()); - }; - - fn fmt_path(path: &[&str], key: &str) -> String { - let mut path = path.to_vec(); - path.push(key); - path.join(".") - } - - anyhow::ensure!( - code_actions_on_format.is_object(), - r#"The `code_actions_on_format` setting is deprecated, but it is in an invalid state and cannot be migrated at {}. Please ensure the code_actions_on_format setting is a Map"#, - fmt_path(path, "code_actions_on_format"), - ); - - let code_actions_map = code_actions_on_format.as_object().unwrap(); - let mut code_actions = vec![]; - for (code_action, code_action_enabled) in code_actions_map { - if code_action_enabled.as_bool().map_or(false, |b| !b) { - continue; - } - code_actions.push(code_action.clone()); - } - - let mut formatter_array = vec![]; - if let Some(formatter) = obj.get("formatter") { - if let Some(array) = formatter.as_array() { - formatter_array = array.clone(); - } else { - formatter_array.insert(0, formatter.clone()); - } - }; - let found_code_actions = !code_actions.is_empty(); - formatter_array.splice( - 0..0, - code_actions - .into_iter() - .map(|code_action| serde_json::json!({"code_action": code_action})), - ); - - obj.remove("code_actions_on_format"); - if found_code_actions { - obj.insert("formatter".to_string(), Value::Array(formatter_array)); - } - - Ok(()) -} diff --git a/crates/migrator/src/migrations/m_2025_10_16/settings.rs b/crates/migrator/src/migrations/m_2025_10_16/settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..3fa8c509b1f3910f48603a10a0fd0f448992c151 --- /dev/null +++ b/crates/migrator/src/migrations/m_2025_10_16/settings.rs @@ -0,0 +1,71 @@ +use anyhow::Result; +use serde_json::Value; + +use crate::patterns::migrate_language_setting; + +pub fn restore_code_actions_on_format(value: &mut Value) -> Result<()> { + migrate_language_setting(value, restore_code_actions_on_format_inner) +} + +fn restore_code_actions_on_format_inner(value: &mut Value, path: &[&str]) -> Result<()> { + let Some(obj) = value.as_object_mut() else { + return Ok(()); + }; + let code_actions_on_format = obj + .get("code_actions_on_format") + .cloned() + .unwrap_or_else(|| Value::Object(Default::default())); + + fn fmt_path(path: &[&str], key: &str) -> String { + let mut path = path.to_vec(); + path.push(key); + path.join(".") + } + + let Some(mut code_actions_map) = code_actions_on_format.as_object().cloned() else { + anyhow::bail!( + r#"The `code_actions_on_format` is in an invalid state and cannot be migrated at {}. Please ensure the code_actions_on_format setting is a Map"#, + fmt_path(path, "code_actions_on_format"), + ); + }; + + let Some(formatter) = obj.get("formatter") else { + return Ok(()); + }; + let formatter_array = if let Some(array) = formatter.as_array() { + array.clone() + } else { + vec![formatter.clone()] + }; + if formatter_array.is_empty() { + return Ok(()); + } + let mut code_action_formatters = Vec::new(); + for formatter in formatter_array { + let Some(code_action) = formatter.get("code_action") else { + return Ok(()); + }; + let Some(code_action_name) = code_action.as_str() else { + anyhow::bail!( + r#"The `code_action` is in an invalid state and cannot be migrated at {}. Please ensure the code_action setting is a String"#, + fmt_path(path, "formatter"), + ); + }; + code_action_formatters.push(code_action_name.to_string()); + } + + code_actions_map.extend( + code_action_formatters + .into_iter() + .rev() + .map(|code_action| (code_action, Value::Bool(true))), + ); + + obj.insert("formatter".to_string(), Value::Array(vec![])); + obj.insert( + "code_actions_on_format".into(), + Value::Object(code_actions_map), + ); + + Ok(()) +} diff --git a/crates/migrator/src/migrations/m_2025_10_17/settings.rs b/crates/migrator/src/migrations/m_2025_10_17/settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..519ec740346ed5cb954477b2ae4f0cff341a21b2 --- /dev/null +++ b/crates/migrator/src/migrations/m_2025_10_17/settings.rs @@ -0,0 +1,24 @@ +use anyhow::Result; +use serde_json::Value; + +pub fn make_file_finder_include_ignored_an_enum(value: &mut Value) -> Result<()> { + let Some(file_finder) = value.get_mut("file_finder") else { + return Ok(()); + }; + + let Some(file_finder_obj) = file_finder.as_object_mut() else { + anyhow::bail!("Expected file_finder to be an object"); + }; + + let Some(include_ignored) = file_finder_obj.get_mut("include_ignored") else { + return Ok(()); + }; + *include_ignored = match include_ignored { + Value::Bool(true) => Value::String("all".to_string()), + Value::Bool(false) => Value::String("indexed".to_string()), + Value::Null => Value::String("smart".to_string()), + Value::String(s) if s == "all" || s == "indexed" || s == "smart" => return Ok(()), + _ => anyhow::bail!("Expected include_ignored to be a boolean or null"), + }; + Ok(()) +} diff --git a/crates/migrator/src/migrator.rs b/crates/migrator/src/migrator.rs index aea11f98c460cc6c72120138ca1068be9ea60923..28021042825988ee70c04993ca71c5e9abe86bb4 100644 --- a/crates/migrator/src/migrator.rs +++ b/crates/migrator/src/migrator.rs @@ -74,6 +74,7 @@ fn run_migrations(text: &str, migrations: &[MigrationType]) -> Result = None; + let json_indent_size = settings::infer_json_indent_size(¤t_text); for migration in migrations.iter() { let migrated_text = match migration { MigrationType::TreeSitter(patterns, query) => migrate(¤t_text, patterns, query)?, @@ -92,7 +93,7 @@ fn run_migrations(text: &str, migrations: &[MigrationType]) -> Result Result> { migrations::m_2025_07_08::SETTINGS_PATTERNS, &SETTINGS_QUERY_2025_07_08, ), - MigrationType::TreeSitter( - migrations::m_2025_10_01::SETTINGS_PATTERNS, - &SETTINGS_QUERY_2025_10_01, - ), + MigrationType::Json(migrations::m_2025_10_01::flatten_code_actions_formatters), MigrationType::Json(migrations::m_2025_10_02::remove_formatters_on_save), MigrationType::TreeSitter( migrations::m_2025_10_03::SETTINGS_PATTERNS, &SETTINGS_QUERY_2025_10_03, ), - MigrationType::Json(migrations::m_2025_10_10::remove_code_actions_on_format), + MigrationType::Json(migrations::m_2025_10_16::restore_code_actions_on_format), + MigrationType::Json(migrations::m_2025_10_17::make_file_finder_include_ignored_an_enum), ]; run_migrations(text, migrations) } @@ -328,10 +327,6 @@ define_query!( SETTINGS_QUERY_2025_07_08, migrations::m_2025_07_08::SETTINGS_PATTERNS ); -define_query!( - SETTINGS_QUERY_2025_10_01, - migrations::m_2025_10_01::SETTINGS_PATTERNS -); define_query!( SETTINGS_QUERY_2025_10_03, migrations::m_2025_10_03::SETTINGS_PATTERNS @@ -351,10 +346,11 @@ mod tests { use super::*; use unindent::Unindent as _; + #[track_caller] fn assert_migrated_correctly(migrated: Option, expected: Option<&str>) { match (&migrated, &expected) { (Some(migrated), Some(expected)) => { - pretty_assertions::assert_str_eq!(migrated, expected); + pretty_assertions::assert_str_eq!(expected, migrated); } _ => { pretty_assertions::assert_eq!(migrated.as_deref(), expected); @@ -367,18 +363,32 @@ mod tests { pretty_assertions::assert_eq!(migrated.as_deref(), output); } + #[track_caller] fn assert_migrate_settings(input: &str, output: Option<&str>) { let migrated = migrate_settings(input).unwrap(); - assert_migrated_correctly(migrated, output); + assert_migrated_correctly(migrated.clone(), output); + + // expect that rerunning the migration does not result in another migration + if let Some(migrated) = migrated { + let rerun = migrate_settings(&migrated).unwrap(); + assert_migrated_correctly(rerun, None); + } } + #[track_caller] fn assert_migrate_settings_with_migrations( migrations: &[MigrationType], input: &str, output: Option<&str>, ) { let migrated = run_migrations(input, migrations).unwrap(); - assert_migrated_correctly(migrated, output); + assert_migrated_correctly(migrated.clone(), output); + + // expect that rerunning the migration does not result in another migration + if let Some(migrated) = migrated { + let rerun = run_migrations(&migrated, migrations).unwrap(); + assert_migrated_correctly(rerun, None); + } } #[test] @@ -1341,26 +1351,33 @@ mod tests { #[test] fn test_flatten_code_action_formatters_basic_array() { - assert_migrate_settings( + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2025_10_01::flatten_code_actions_formatters, + )], &r#"{ - "formatter": [ - { - "code_actions": { - "included-1": true, - "included-2": true, - "excluded": false, - } - } - ] - }"# + "formatter": [ + { + "code_actions": { + "included-1": true, + "included-2": true, + "excluded": false, + } + } + ] + }"# .unindent(), Some( &r#"{ - "formatter": [ - { "code_action": "included-1" }, - { "code_action": "included-2" } - ] - }"# + "formatter": [ + { + "code_action": "included-1" + }, + { + "code_action": "included-2" + } + ] + }"# .unindent(), ), ); @@ -1368,23 +1385,30 @@ mod tests { #[test] fn test_flatten_code_action_formatters_basic_object() { - assert_migrate_settings( + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2025_10_01::flatten_code_actions_formatters, + )], &r#"{ - "formatter": { - "code_actions": { - "included-1": true, - "excluded": false, - "included-2": true - } - } - }"# + "formatter": { + "code_actions": { + "included-1": true, + "excluded": false, + "included-2": true + } + } + }"# .unindent(), Some( &r#"{ - "formatter": [ - { "code_action": "included-1" }, - { "code_action": "included-2" } - ] + "formatter": [ + { + "code_action": "included-1" + }, + { + "code_action": "included-2" + } + ] }"# .unindent(), ), @@ -1394,47 +1418,57 @@ mod tests { #[test] fn test_flatten_code_action_formatters_array_with_multiple_action_blocks() { assert_migrate_settings( - r#"{ - "formatter": [ - { - "code_actions": { - "included-1": true, - "included-2": true, - "excluded": false, - } - }, - { - "language_server": "ruff" - }, - { - "code_actions": { - "excluded": false, - "excluded-2": false, - } - } - // some comment - , - { - "code_actions": { - "excluded": false, - "included-3": true, - "included-4": true, - } - }, - ] - }"#, + &r#"{ + "formatter": [ + { + "code_actions": { + "included-1": true, + "included-2": true, + "excluded": false, + } + }, + { + "language_server": "ruff" + }, + { + "code_actions": { + "excluded": false, + "excluded-2": false, + } + } + // some comment + , + { + "code_actions": { + "excluded": false, + "included-3": true, + "included-4": true, + } + }, + ] + }"# + .unindent(), Some( - r#"{ - "formatter": [ - { "code_action": "included-1" }, - { "code_action": "included-2" }, - { - "language_server": "ruff" - }, - { "code_action": "included-3" }, - { "code_action": "included-4" }, - ] - }"#, + &r#"{ + "formatter": [ + { + "code_action": "included-1" + }, + { + "code_action": "included-2" + }, + { + "language_server": "ruff" + }, + { + "code_action": "included-3" + }, + { + "code_action": "included-4" + } + ] + }"# + .unindent(), ), ); } @@ -1443,55 +1477,63 @@ mod tests { fn test_flatten_code_action_formatters_array_with_multiple_action_blocks_in_languages() { assert_migrate_settings( &r#"{ - "languages": { - "Rust": { - "formatter": [ - { - "code_actions": { - "included-1": true, - "included-2": true, - "excluded": false, - } - }, - { - "language_server": "ruff" - }, - { - "code_actions": { - "excluded": false, - "excluded-2": false, - } - } - // some comment - , - { - "code_actions": { - "excluded": false, - "included-3": true, - "included-4": true, - } - }, - ] - } + "languages": { + "Rust": { + "formatter": [ + { + "code_actions": { + "included-1": true, + "included-2": true, + "excluded": false, } - }"# + }, + { + "language_server": "ruff" + }, + { + "code_actions": { + "excluded": false, + "excluded-2": false, + } + } + // some comment + , + { + "code_actions": { + "excluded": false, + "included-3": true, + "included-4": true, + } + }, + ] + } + } + }"# .unindent(), Some( &r#"{ - "languages": { - "Rust": { - "formatter": [ - { "code_action": "included-1" }, - { "code_action": "included-2" }, - { - "language_server": "ruff" - }, - { "code_action": "included-3" }, - { "code_action": "included-4" }, - ] - } - } - }"# + "languages": { + "Rust": { + "formatter": [ + { + "code_action": "included-1" + }, + { + "code_action": "included-2" + }, + { + "language_server": "ruff" + }, + { + "code_action": "included-3" + }, + { + "code_action": "included-4" + } + ] + } + } + }"# .unindent(), ), ); @@ -1500,102 +1542,125 @@ mod tests { #[test] fn test_flatten_code_action_formatters_array_with_multiple_action_blocks_in_defaults_and_multiple_languages() { - assert_migrate_settings( + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2025_10_01::flatten_code_actions_formatters, + )], &r#"{ - "formatter": { - "code_actions": { - "default-1": true, - "default-2": true, - "default-3": true, - "default-4": true, - } - }, - "languages": { - "Rust": { - "formatter": [ - { - "code_actions": { - "included-1": true, - "included-2": true, - "excluded": false, - } - }, - { - "language_server": "ruff" - }, - { - "code_actions": { - "excluded": false, - "excluded-2": false, - } - } - // some comment - , - { - "code_actions": { - "excluded": false, - "included-3": true, - "included-4": true, - } - }, - ] - }, - "Python": { - "formatter": [ - { - "language_server": "ruff" - }, - { - "code_actions": { - "excluded": false, - "excluded-2": false, - } - } - // some comment - , - { - "code_actions": { - "excluded": false, - "included-3": true, - "included-4": true, - } - }, - ] - } + "formatter": { + "code_actions": { + "default-1": true, + "default-2": true, + "default-3": true, + "default-4": true, + } + }, + "languages": { + "Rust": { + "formatter": [ + { + "code_actions": { + "included-1": true, + "included-2": true, + "excluded": false, } - }"# + }, + { + "language_server": "ruff" + }, + { + "code_actions": { + "excluded": false, + "excluded-2": false, + } + } + // some comment + , + { + "code_actions": { + "excluded": false, + "included-3": true, + "included-4": true, + } + }, + ] + }, + "Python": { + "formatter": [ + { + "language_server": "ruff" + }, + { + "code_actions": { + "excluded": false, + "excluded-2": false, + } + } + // some comment + , + { + "code_actions": { + "excluded": false, + "included-3": true, + "included-4": true, + } + }, + ] + } + } + }"# .unindent(), Some( &r#"{ - "formatter": [ - { "code_action": "default-1" }, - { "code_action": "default-2" }, - { "code_action": "default-3" }, - { "code_action": "default-4" } - ], - "languages": { - "Rust": { - "formatter": [ - { "code_action": "included-1" }, - { "code_action": "included-2" }, - { - "language_server": "ruff" - }, - { "code_action": "included-3" }, - { "code_action": "included-4" }, - ] - }, - "Python": { - "formatter": [ - { - "language_server": "ruff" - }, - { "code_action": "included-3" }, - { "code_action": "included-4" }, - ] - } - } - }"# + "formatter": [ + { + "code_action": "default-1" + }, + { + "code_action": "default-2" + }, + { + "code_action": "default-3" + }, + { + "code_action": "default-4" + } + ], + "languages": { + "Rust": { + "formatter": [ + { + "code_action": "included-1" + }, + { + "code_action": "included-2" + }, + { + "language_server": "ruff" + }, + { + "code_action": "included-3" + }, + { + "code_action": "included-4" + } + ] + }, + "Python": { + "formatter": [ + { + "language_server": "ruff" + }, + { + "code_action": "included-3" + }, + { + "code_action": "included-4" + } + ] + } + } + }"# .unindent(), ), ); @@ -1604,153 +1669,185 @@ mod tests { #[test] fn test_flatten_code_action_formatters_array_with_format_on_save_and_multiple_languages() { assert_migrate_settings_with_migrations( - &[MigrationType::TreeSitter( - migrations::m_2025_10_01::SETTINGS_PATTERNS, - &SETTINGS_QUERY_2025_10_01, + &[MigrationType::Json( + migrations::m_2025_10_01::flatten_code_actions_formatters, )], &r#"{ - "formatter": { - "code_actions": { - "default-1": true, - "default-2": true, - "default-3": true, - "default-4": true, - } - }, - "format_on_save": [ - { - "code_actions": { - "included-1": true, - "included-2": true, - "excluded": false, - } - }, - { - "language_server": "ruff" - }, - { - "code_actions": { - "excluded": false, - "excluded-2": false, - } - } - // some comment - , - { - "code_actions": { - "excluded": false, - "included-3": true, - "included-4": true, - } - }, - ], - "languages": { - "Rust": { - "format_on_save": "prettier", - "formatter": [ - { - "code_actions": { - "included-1": true, - "included-2": true, - "excluded": false, - } - }, - { - "language_server": "ruff" - }, - { - "code_actions": { - "excluded": false, - "excluded-2": false, - } - } - // some comment - , - { - "code_actions": { - "excluded": false, - "included-3": true, - "included-4": true, - } - }, - ] - }, - "Python": { - "format_on_save": { - "code_actions": { - "on-save-1": true, - "on-save-2": true, - } - }, - "formatter": [ - { - "language_server": "ruff" - }, - { - "code_actions": { - "excluded": false, - "excluded-2": false, - } - } - // some comment - , - { - "code_actions": { - "excluded": false, - "included-3": true, - "included-4": true, - } - }, - ] - } + "formatter": { + "code_actions": { + "default-1": true, + "default-2": true, + "default-3": true, + "default-4": true, + } + }, + "format_on_save": [ + { + "code_actions": { + "included-1": true, + "included-2": true, + "excluded": false, + } + }, + { + "language_server": "ruff" + }, + { + "code_actions": { + "excluded": false, + "excluded-2": false, + } + } + // some comment + , + { + "code_actions": { + "excluded": false, + "included-3": true, + "included-4": true, + } + }, + ], + "languages": { + "Rust": { + "format_on_save": "prettier", + "formatter": [ + { + "code_actions": { + "included-1": true, + "included-2": true, + "excluded": false, } - }"# + }, + { + "language_server": "ruff" + }, + { + "code_actions": { + "excluded": false, + "excluded-2": false, + } + } + // some comment + , + { + "code_actions": { + "excluded": false, + "included-3": true, + "included-4": true, + } + }, + ] + }, + "Python": { + "format_on_save": { + "code_actions": { + "on-save-1": true, + "on-save-2": true, + } + }, + "formatter": [ + { + "language_server": "ruff" + }, + { + "code_actions": { + "excluded": false, + "excluded-2": false, + } + } + // some comment + , + { + "code_actions": { + "excluded": false, + "included-3": true, + "included-4": true, + } + }, + ] + } + } + }"# .unindent(), Some( - &r#"{ - "formatter": [ - { "code_action": "default-1" }, - { "code_action": "default-2" }, - { "code_action": "default-3" }, - { "code_action": "default-4" } - ], - "format_on_save": [ - { "code_action": "included-1" }, - { "code_action": "included-2" }, - { - "language_server": "ruff" - }, - { "code_action": "included-3" }, - { "code_action": "included-4" }, - ], - "languages": { - "Rust": { - "format_on_save": "prettier", - "formatter": [ - { "code_action": "included-1" }, - { "code_action": "included-2" }, - { - "language_server": "ruff" - }, - { "code_action": "included-3" }, - { "code_action": "included-4" }, - ] - }, - "Python": { - "format_on_save": [ - { "code_action": "on-save-1" }, - { "code_action": "on-save-2" } - ], - "formatter": [ - { - "language_server": "ruff" - }, - { "code_action": "included-3" }, - { "code_action": "included-4" }, - ] - } - } - }"# + &r#" + { + "formatter": [ + { + "code_action": "default-1" + }, + { + "code_action": "default-2" + }, + { + "code_action": "default-3" + }, + { + "code_action": "default-4" + } + ], + "format_on_save": [ + { + "code_action": "included-1" + }, + { + "code_action": "included-2" + }, + { + "language_server": "ruff" + }, + { + "code_action": "included-3" + }, + { + "code_action": "included-4" + } + ], + "languages": { + "Rust": { + "format_on_save": "prettier", + "formatter": [ + { + "code_action": "included-1" + }, + { + "code_action": "included-2" + }, + { + "language_server": "ruff" + }, + { + "code_action": "included-3" + }, + { + "code_action": "included-4" + } + ] + }, + "Python": { + "format_on_save": [ + { + "code_action": "on-save-1" + }, + { + "code_action": "on-save-2" + } + ], + "formatter": [ + { + "language_server": "ruff" + }, + { + "code_action": "included-3" + }, + { + "code_action": "included-4" + } + ] + } + } + }"# .unindent(), ), ); @@ -1916,297 +2013,179 @@ mod tests { } #[test] - fn test_code_actions_on_format_migration_basic() { + fn test_restore_code_actions_on_format() { assert_migrate_settings_with_migrations( &[MigrationType::Json( - migrations::m_2025_10_10::remove_code_actions_on_format, + migrations::m_2025_10_16::restore_code_actions_on_format, )], &r#"{ - "code_actions_on_format": { - "source.organizeImports": true, - "source.fixAll": true + "formatter": { + "code_action": "foo" } }"# .unindent(), Some( &r#"{ - "formatter": [ - { - "code_action": "source.organizeImports" - }, - { - "code_action": "source.fixAll" - } - ] - } - "# + "code_actions_on_format": { + "foo": true + }, + "formatter": [] + }"# .unindent(), ), ); - } - #[test] - fn test_code_actions_on_format_migration_filters_false_values() { assert_migrate_settings_with_migrations( &[MigrationType::Json( - migrations::m_2025_10_10::remove_code_actions_on_format, + migrations::m_2025_10_16::restore_code_actions_on_format, )], &r#"{ - "code_actions_on_format": { - "a": true, - "b": false, - "c": true - } + "formatter": [ + { "code_action": "foo" }, + "auto" + ] }"# .unindent(), - Some( - &r#"{ - "formatter": [ - { - "code_action": "a" - }, - { - "code_action": "c" - } - ] - } - "# - .unindent(), - ), + None, ); - } - #[test] - fn test_code_actions_on_format_migration_with_existing_formatter_object() { assert_migrate_settings_with_migrations( &[MigrationType::Json( - migrations::m_2025_10_10::remove_code_actions_on_format, + migrations::m_2025_10_16::restore_code_actions_on_format, )], &r#"{ - "formatter": "prettier", - "code_actions_on_format": { - "source.organizeImports": true - } + "formatter": { + "code_action": "foo" + }, + "code_actions_on_format": { + "bar": true, + "baz": false + } }"# .unindent(), Some( &r#"{ - "formatter": [ - { - "code_action": "source.organizeImports" - }, - "prettier" - ] + "formatter": [], + "code_actions_on_format": { + "foo": true, + "bar": true, + "baz": false + } }"# .unindent(), ), ); - } - #[test] - fn test_code_actions_on_format_migration_with_existing_formatter_array() { assert_migrate_settings_with_migrations( &[MigrationType::Json( - migrations::m_2025_10_10::remove_code_actions_on_format, + migrations::m_2025_10_16::restore_code_actions_on_format, )], &r#"{ - "formatter": ["prettier", {"language_server": "eslint"}], - "code_actions_on_format": { - "source.organizeImports": true, - "source.fixAll": true - } + "formatter": [ + { "code_action": "foo" }, + { "code_action": "qux" }, + ], + "code_actions_on_format": { + "bar": true, + "baz": false + } }"# .unindent(), Some( &r#"{ - "formatter": [ - { - "code_action": "source.organizeImports" - }, - { - "code_action": "source.fixAll" - }, - "prettier", - { - "language_server": "eslint" + "formatter": [], + "code_actions_on_format": { + "foo": true, + "qux": true, + "bar": true, + "baz": false } - ] }"# .unindent(), ), ); - } - #[test] - fn test_code_actions_on_format_migration_in_languages() { assert_migrate_settings_with_migrations( &[MigrationType::Json( - migrations::m_2025_10_10::remove_code_actions_on_format, + migrations::m_2025_10_16::restore_code_actions_on_format, )], &r#"{ - "languages": { - "JavaScript": { - "code_actions_on_format": { - "source.fixAll.eslint": true - } - }, - "Go": { - "code_actions_on_format": { - "source.organizeImports": true - } - } + "formatter": [], + "code_actions_on_format": { + "bar": true, + "baz": false } }"# .unindent(), - Some( - &r#"{ - "languages": { - "JavaScript": { - "formatter": [ - { - "code_action": "source.fixAll.eslint" - } - ] - }, - "Go": { - "formatter": [ - { - "code_action": "source.organizeImports" - } - ] - } - } - }"# - .unindent(), - ), + None, ); } #[test] - fn test_code_actions_on_format_migration_in_languages_with_existing_formatter() { + fn test_make_file_finder_include_ignored_an_enum() { + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2025_10_17::make_file_finder_include_ignored_an_enum, + )], + &r#"{ }"#.unindent(), + None, + ); + assert_migrate_settings_with_migrations( &[MigrationType::Json( - migrations::m_2025_10_10::remove_code_actions_on_format, + migrations::m_2025_10_17::make_file_finder_include_ignored_an_enum, )], &r#"{ - "languages": { - "JavaScript": { - "formatter": "prettier", - "code_actions_on_format": { - "source.fixAll.eslint": true, - "source.organizeImports": false - } + "file_finder": { + "include_ignored": true } - } }"# .unindent(), Some( &r#"{ - "languages": { - "JavaScript": { - "formatter": [ - { - "code_action": "source.fixAll.eslint" - }, - "prettier" - ] + "file_finder": { + "include_ignored": "all" } - } }"# .unindent(), ), ); - } - #[test] - fn test_code_actions_on_format_migration_mixed_global_and_languages() { assert_migrate_settings_with_migrations( &[MigrationType::Json( - migrations::m_2025_10_10::remove_code_actions_on_format, + migrations::m_2025_10_17::make_file_finder_include_ignored_an_enum, )], &r#"{ - "formatter": "prettier", - "code_actions_on_format": { - "source.fixAll": true - }, - "languages": { - "Rust": { - "formatter": "rust-analyzer", - "code_actions_on_format": { - "source.organizeImports": true - } - }, - "Python": { - "code_actions_on_format": { - "source.organizeImports": true, - "source.fixAll": false - } + "file_finder": { + "include_ignored": false } - } }"# .unindent(), Some( &r#"{ - "formatter": [ - { - "code_action": "source.fixAll" - }, - "prettier" - ], - "languages": { - "Rust": { - "formatter": [ - { - "code_action": "source.organizeImports" - }, - "rust-analyzer" - ] - }, - "Python": { - "formatter": [ - { - "code_action": "source.organizeImports" - } - ] - } - } + "file_finder": { + "include_ignored": "indexed" + } }"# .unindent(), ), ); - } - #[test] - fn test_code_actions_on_format_no_migration_when_not_present() { assert_migrate_settings_with_migrations( &[MigrationType::Json( - migrations::m_2025_10_10::remove_code_actions_on_format, + migrations::m_2025_10_17::make_file_finder_include_ignored_an_enum, )], &r#"{ - "formatter": ["prettier"] - }"# - .unindent(), - None, - ); - } - - #[test] - fn test_code_actions_on_format_migration_all_false_values() { - assert_migrate_settings_with_migrations( - &[MigrationType::Json( - migrations::m_2025_10_10::remove_code_actions_on_format, - )], - &r#"{ - "code_actions_on_format": { - "a": false, - "b": false - }, - "formatter": "prettier" + "file_finder": { + "include_ignored": null + } }"# .unindent(), Some( &r#"{ - "formatter": "prettier" + "file_finder": { + "include_ignored": "smart" + } }"# .unindent(), ), diff --git a/crates/migrator/src/patterns.rs b/crates/migrator/src/patterns.rs index 3848baf23ba0d324995f18e3a53921948291153b..4132c93d9367a8dee200200e03dcc46ee073e67f 100644 --- a/crates/migrator/src/patterns.rs +++ b/crates/migrator/src/patterns.rs @@ -10,4 +10,5 @@ pub(crate) use settings::{ SETTINGS_ASSISTANT_PATTERN, SETTINGS_ASSISTANT_TOOLS_PATTERN, SETTINGS_DUPLICATED_AGENT_PATTERN, SETTINGS_EDIT_PREDICTIONS_ASSISTANT_PATTERN, SETTINGS_LANGUAGES_PATTERN, SETTINGS_NESTED_KEY_VALUE_PATTERN, SETTINGS_ROOT_KEY_VALUE_PATTERN, + migrate_language_setting, }; diff --git a/crates/migrator/src/patterns/settings.rs b/crates/migrator/src/patterns/settings.rs index 72fd02b153a5cf6e3158790f1c5d09a9f643ebf9..a068cce23b013a3435188c03ceebe866883c4e6d 100644 --- a/crates/migrator/src/patterns/settings.rs +++ b/crates/migrator/src/patterns/settings.rs @@ -108,3 +108,24 @@ pub const SETTINGS_DUPLICATED_AGENT_PATTERN: &str = r#"(document (#eq? @agent1 "agent") (#eq? @agent2 "agent") )"#; + +/// Migrate language settings, +/// calls `migrate_fn` with the top level object as well as all language settings under the "languages" key +/// Fails early if `migrate_fn` returns an error at any point +pub fn migrate_language_setting( + value: &mut serde_json::Value, + migrate_fn: fn(&mut serde_json::Value, path: &[&str]) -> anyhow::Result<()>, +) -> anyhow::Result<()> { + migrate_fn(value, &[])?; + let languages = value + .as_object_mut() + .and_then(|obj| obj.get_mut("languages")) + .and_then(|languages| languages.as_object_mut()); + if let Some(languages) = languages { + for (language_name, language) in languages.iter_mut() { + let path = vec!["languages", language_name]; + migrate_fn(language, &path)?; + } + } + Ok(()) +} diff --git a/crates/mistral/Cargo.toml b/crates/mistral/Cargo.toml index 95f44b4f959522617c41e61acc041c43f80d82fe..c4d475f014a035005e4749d7dd7904d0a045cc78 100644 --- a/crates/mistral/Cargo.toml +++ b/crates/mistral/Cargo.toml @@ -23,4 +23,3 @@ schemars = { workspace = true, optional = true } serde.workspace = true serde_json.workspace = true strum.workspace = true -workspace-hack.workspace = true diff --git a/crates/multi_buffer/Cargo.toml b/crates/multi_buffer/Cargo.toml index d5a38f539824c4b17f0c654148362ca5f906c8ba..93747140c1960b70b9a9ddffe2a609e8a32a7dc7 100644 --- a/crates/multi_buffer/Cargo.toml +++ b/crates/multi_buffer/Cargo.toml @@ -43,7 +43,6 @@ text.workspace = true theme.workspace = true tree-sitter.workspace = true util.workspace = true -workspace-hack.workspace = true [dev-dependencies] buffer_diff = { workspace = true, features = ["test-support"] } diff --git a/crates/multi_buffer/src/anchor.rs b/crates/multi_buffer/src/anchor.rs index a2498cb02fb836c6a70af9407d2a4e520c9d3d3b..d5009172084d6d683f722a8ad2aa5b8b21ae0493 100644 --- a/crates/multi_buffer/src/anchor.rs +++ b/crates/multi_buffer/src/anchor.rs @@ -1,4 +1,4 @@ -use super::{ExcerptId, MultiBufferSnapshot, ToOffset, ToOffsetUtf16, ToPoint}; +use super::{ExcerptId, MultiBufferSnapshot, ToOffset, ToPoint}; use language::{OffsetUtf16, Point, TextDimension}; use std::{ cmp::Ordering, @@ -185,9 +185,6 @@ impl ToOffset for Anchor { fn to_offset(&self, snapshot: &MultiBufferSnapshot) -> usize { self.summary(snapshot) } -} - -impl ToOffsetUtf16 for Anchor { fn to_offset_utf16(&self, snapshot: &MultiBufferSnapshot) -> OffsetUtf16 { self.summary(snapshot) } @@ -197,6 +194,9 @@ impl ToPoint for Anchor { fn to_point<'a>(&self, snapshot: &MultiBufferSnapshot) -> Point { self.summary(snapshot) } + fn to_point_utf16(&self, snapshot: &MultiBufferSnapshot) -> rope::PointUtf16 { + self.summary(snapshot) + } } pub trait AnchorRangeExt { diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index be01c4b6a1f9f67703f99bcd0b9b331574a6b360..0163a49c95eeea5372a61824d2754a233ec07740 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -1,7 +1,11 @@ mod anchor; #[cfg(test)] mod multi_buffer_tests; +mod path_key; mod position; +mod transaction; + +use self::transaction::History; pub use anchor::{Anchor, AnchorRangeExt, Offset}; pub use position::{TypedOffset, TypedPoint, TypedRow}; @@ -13,7 +17,7 @@ use buffer_diff::{ }; use clock::ReplicaId; use collections::{BTreeMap, Bound, HashMap, HashSet}; -use gpui::{App, AppContext as _, Context, Entity, EntityId, EventEmitter, Task}; +use gpui::{App, Context, Entity, EntityId, EventEmitter}; use itertools::Itertools; use language::{ AutoindentMode, Buffer, BufferChunks, BufferRow, BufferSnapshot, Capability, CharClassifier, @@ -24,6 +28,9 @@ use language::{ language_settings::{LanguageSettings, language_settings}, }; +#[cfg(any(test, feature = "test-support"))] +use gpui::AppContext as _; + use rope::DimensionPair; use smallvec::SmallVec; use smol::future::yield_now; @@ -40,7 +47,7 @@ use std::{ rc::Rc, str, sync::Arc, - time::{Duration, Instant}, + time::Duration, }; use sum_tree::{Bias, Cursor, Dimension, Dimensions, SumTree, Summary, TreeMap}; use text::{ @@ -49,9 +56,9 @@ use text::{ subscription::{Subscription, Topic}, }; use theme::SyntaxTheme; -use util::{post_inc, rel_path::RelPath}; +use util::post_inc; -const NEWLINES: &[u8] = &[b'\n'; u8::MAX as usize]; +pub use self::path_key::PathKey; #[derive(Debug, Default, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct ExcerptId(u32); @@ -64,17 +71,22 @@ pub struct MultiBuffer { /// Use [`MultiBuffer::snapshot`] to get a up-to-date snapshot. snapshot: RefCell, /// Contains the state of the buffers being edited - buffers: RefCell>, - // only used by consumers using `set_excerpts_for_buffer` + buffers: HashMap, + /// Mapping from path keys to their excerpts. excerpts_by_path: BTreeMap>, + /// Mapping from excerpt IDs to their path key. paths_by_excerpt: HashMap, + /// Mapping from buffer IDs to their diff states diffs: HashMap, - // all_diff_hunks_expanded: bool, subscriptions: Topic, /// If true, the multi-buffer only contains a single [`Buffer`] and a single [`Excerpt`] singleton: bool, + /// The history of the multi-buffer. history: History, + /// The explicit title of the multi-buffer. + /// If `None`, it will be derived from the underlying path or content. title: Option, + /// The writing capability of the multi-buffer. capability: Capability, buffer_changed_since_sync: Rc>, } @@ -99,7 +111,6 @@ pub enum Event { }, DiffHunksToggled, Edited { - singleton_buffer_edited: bool, edited_buffer: Option>, }, TransactionUndone { @@ -159,40 +170,6 @@ impl MultiBufferDiffHunk { } } -#[derive(PartialEq, Eq, Ord, PartialOrd, Clone, Hash, Debug)] -pub struct PathKey { - // Used by the derived PartialOrd & Ord - sort_prefix: Option, - path: Arc, -} - -impl PathKey { - pub fn with_sort_prefix(sort_prefix: u64, path: Arc) -> Self { - Self { - sort_prefix: Some(sort_prefix), - path, - } - } - - pub fn for_buffer(buffer: &Entity, cx: &App) -> Self { - if let Some(file) = buffer.read(cx).file() { - Self::with_sort_prefix(file.worktree_id(cx).to_proto(), file.path().clone()) - } else { - Self { - sort_prefix: None, - path: RelPath::unix(&buffer.entity_id().to_string()) - .unwrap() - .into_arc(), - } - } - } - - #[cfg(any(test, feature = "test-support"))] - pub fn path(&self) -> &Arc { - &self.path - } -} - pub type MultiBufferPoint = Point; type ExcerptOffset = TypedOffset; type ExcerptPoint = TypedPoint; @@ -214,44 +191,20 @@ impl std::ops::Add for MultiBufferRow { } } -#[derive(Clone)] -struct History { - next_transaction_id: TransactionId, - undo_stack: Vec, - redo_stack: Vec, - transaction_depth: usize, - group_interval: Duration, -} - -#[derive(Clone)] -struct Transaction { - id: TransactionId, - buffer_transactions: HashMap, - first_edit_at: Instant, - last_edit_at: Instant, - suppress_grouping: bool, -} - pub trait ToOffset: 'static + fmt::Debug { fn to_offset(&self, snapshot: &MultiBufferSnapshot) -> usize; -} - -pub trait ToOffsetUtf16: 'static + fmt::Debug { fn to_offset_utf16(&self, snapshot: &MultiBufferSnapshot) -> OffsetUtf16; } pub trait ToPoint: 'static + fmt::Debug { fn to_point(&self, snapshot: &MultiBufferSnapshot) -> Point; -} - -pub trait ToPointUtf16: 'static + fmt::Debug { fn to_point_utf16(&self, snapshot: &MultiBufferSnapshot) -> PointUtf16; } struct BufferState { buffer: Entity, - last_version: clock::Global, - last_non_text_state_update_count: usize, + last_version: RefCell, + last_non_text_state_update_count: Cell, excerpts: Vec, _subscriptions: [gpui::Subscription; 2], } @@ -282,19 +235,20 @@ impl DiffState { /// The contents of a [`MultiBuffer`] at a single point in time. #[derive(Clone, Default)] pub struct MultiBufferSnapshot { - singleton: bool, excerpts: SumTree, - excerpt_ids: SumTree, diffs: TreeMap, diff_transforms: SumTree, - replaced_excerpts: TreeMap, - trailing_excerpt_update_count: usize, - all_diff_hunks_expanded: bool, non_text_state_update_count: usize, edit_count: usize, is_dirty: bool, has_deleted_file: bool, has_conflict: bool, + /// immutable fields + singleton: bool, + excerpt_ids: SumTree, + replaced_excerpts: TreeMap, + trailing_excerpt_update_count: usize, + all_diff_hunks_expanded: bool, show_headers: bool, } @@ -551,7 +505,7 @@ struct MultiBufferRegion<'a, D: TextDimension> { struct ExcerptChunks<'a> { excerpt_id: ExcerptId, content_chunks: BufferChunks<'a>, - footer_height: usize, + has_footer: bool, } #[derive(Debug)] @@ -613,56 +567,57 @@ impl IndentGuide { impl MultiBuffer { pub fn new(capability: Capability) -> Self { - Self { - snapshot: RefCell::new(MultiBufferSnapshot { + Self::new_( + capability, + MultiBufferSnapshot { show_headers: true, ..MultiBufferSnapshot::default() - }), - buffers: RefCell::default(), - diffs: HashMap::default(), - subscriptions: Topic::default(), - singleton: false, - capability, - title: None, - excerpts_by_path: Default::default(), - paths_by_excerpt: Default::default(), - buffer_changed_since_sync: Default::default(), - history: History { - next_transaction_id: clock::Lamport::default(), - undo_stack: Vec::new(), - redo_stack: Vec::new(), - transaction_depth: 0, - group_interval: Duration::from_millis(300), }, - } + ) } pub fn without_headers(capability: Capability) -> Self { + Self::new_(capability, Default::default()) + } + + pub fn singleton(buffer: Entity, cx: &mut Context) -> Self { + let mut this = Self::new_( + buffer.read(cx).capability(), + MultiBufferSnapshot { + singleton: true, + ..MultiBufferSnapshot::default() + }, + ); + this.singleton = true; + this.push_excerpts( + buffer, + [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)], + cx, + ); + this + } + + #[inline] + pub fn new_(capability: Capability, snapshot: MultiBufferSnapshot) -> Self { Self { - snapshot: Default::default(), + snapshot: RefCell::new(snapshot), buffers: Default::default(), - excerpts_by_path: Default::default(), - paths_by_excerpt: Default::default(), diffs: HashMap::default(), - subscriptions: Default::default(), + subscriptions: Topic::default(), singleton: false, capability, + title: None, + excerpts_by_path: Default::default(), + paths_by_excerpt: Default::default(), buffer_changed_since_sync: Default::default(), - history: History { - next_transaction_id: Default::default(), - undo_stack: Default::default(), - redo_stack: Default::default(), - transaction_depth: 0, - group_interval: Duration::from_millis(300), - }, - title: Default::default(), + history: History::default(), } } pub fn clone(&self, new_cx: &mut Context) -> Self { let mut buffers = HashMap::default(); let buffer_changed_since_sync = Rc::new(Cell::new(false)); - for (buffer_id, buffer_state) in self.buffers.borrow().iter() { + for (buffer_id, buffer_state) in self.buffers.iter() { buffer_state.buffer.update(new_cx, |buffer, _| { buffer.record_changes(Rc::downgrade(&buffer_changed_since_sync)); }); @@ -671,7 +626,9 @@ impl MultiBuffer { BufferState { buffer: buffer_state.buffer.clone(), last_version: buffer_state.last_version.clone(), - last_non_text_state_update_count: buffer_state.last_non_text_state_update_count, + last_non_text_state_update_count: buffer_state + .last_non_text_state_update_count + .clone(), excerpts: buffer_state.excerpts.clone(), _subscriptions: [ new_cx.observe(&buffer_state.buffer, |_, _, cx| cx.notify()), @@ -686,7 +643,7 @@ impl MultiBuffer { } Self { snapshot: RefCell::new(self.snapshot.borrow().clone()), - buffers: RefCell::new(buffers), + buffers: buffers, excerpts_by_path: Default::default(), paths_by_excerpt: Default::default(), diffs: diff_bases, @@ -699,6 +656,10 @@ impl MultiBuffer { } } + pub fn set_group_interval(&mut self, group_interval: Duration) { + self.history.set_group_interval(group_interval); + } + pub fn with_title(mut self, title: String) -> Self { self.title = Some(title); self @@ -708,18 +669,6 @@ impl MultiBuffer { self.capability == Capability::ReadOnly } - pub fn singleton(buffer: Entity, cx: &mut Context) -> Self { - let mut this = Self::new(buffer.read(cx).capability()); - this.singleton = true; - this.push_excerpts( - buffer, - [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)], - cx, - ); - this.snapshot.borrow_mut().singleton = true; - this - } - /// Returns an up-to-date snapshot of the MultiBuffer. pub fn snapshot(&self, cx: &App) -> MultiBufferSnapshot { self.sync(cx); @@ -733,15 +682,7 @@ impl MultiBuffer { pub fn as_singleton(&self) -> Option> { if self.singleton { - Some( - self.buffers - .borrow() - .values() - .next() - .unwrap() - .buffer - .clone(), - ) + Some(self.buffers.values().next().unwrap().buffer.clone()) } else { None } @@ -774,20 +715,11 @@ impl MultiBuffer { } pub fn is_empty(&self) -> bool { - self.buffers.borrow().is_empty() - } - - pub fn symbols_containing( - &self, - offset: T, - theme: Option<&SyntaxTheme>, - cx: &App, - ) -> Option<(BufferId, Vec>)> { - self.read(cx).symbols_containing(offset, theme) + self.buffers.is_empty() } pub fn edit( - &self, + &mut self, edits: I, autoindent_mode: Option, cx: &mut Context, @@ -796,11 +728,15 @@ impl MultiBuffer { S: ToOffset, T: Into>, { - let snapshot = self.read(cx); + if self.read_only() || self.buffers.is_empty() { + return; + } + self.sync_mut(cx); let edits = edits .into_iter() .map(|(range, new_text)| { - let mut range = range.start.to_offset(&snapshot)..range.end.to_offset(&snapshot); + let mut range = range.start.to_offset(self.snapshot.get_mut()) + ..range.end.to_offset(self.snapshot.get_mut()); if range.start > range.end { mem::swap(&mut range.start, &mut range.end); } @@ -808,20 +744,15 @@ impl MultiBuffer { }) .collect::>(); - return edit_internal(self, snapshot, edits, autoindent_mode, cx); + return edit_internal(self, edits, autoindent_mode, cx); // Non-generic part of edit, hoisted out to avoid blowing up LLVM IR. fn edit_internal( - this: &MultiBuffer, - snapshot: Ref, + this: &mut MultiBuffer, edits: Vec<(Range, Arc)>, mut autoindent_mode: Option, cx: &mut Context, ) { - if this.read_only() || this.buffers.borrow().is_empty() { - return; - } - let original_indent_columns = match &mut autoindent_mode { Some(AutoindentMode::Block { original_indent_columns, @@ -829,86 +760,84 @@ impl MultiBuffer { _ => Default::default(), }; - let (buffer_edits, edited_excerpt_ids) = - this.convert_edits_to_buffer_edits(edits, &snapshot, &original_indent_columns); - drop(snapshot); + let (buffer_edits, edited_excerpt_ids) = MultiBuffer::convert_edits_to_buffer_edits( + edits, + this.snapshot.get_mut(), + &original_indent_columns, + ); let mut buffer_ids = Vec::with_capacity(buffer_edits.len()); for (buffer_id, mut edits) in buffer_edits { buffer_ids.push(buffer_id); edits.sort_by_key(|edit| edit.range.start); - this.buffers.borrow()[&buffer_id] - .buffer - .update(cx, |buffer, cx| { - let mut edits = edits.into_iter().peekable(); - let mut insertions = Vec::new(); - let mut original_indent_columns = Vec::new(); - let mut deletions = Vec::new(); - let empty_str: Arc = Arc::default(); + this.buffers[&buffer_id].buffer.update(cx, |buffer, cx| { + let mut edits = edits.into_iter().peekable(); + let mut insertions = Vec::new(); + let mut original_indent_columns = Vec::new(); + let mut deletions = Vec::new(); + let empty_str: Arc = Arc::default(); + while let Some(BufferEdit { + mut range, + mut new_text, + mut is_insertion, + original_indent_column, + excerpt_id, + }) = edits.next() + { while let Some(BufferEdit { - mut range, - mut new_text, - mut is_insertion, - original_indent_column, - excerpt_id, - }) = edits.next() + range: next_range, + is_insertion: next_is_insertion, + new_text: next_new_text, + excerpt_id: next_excerpt_id, + .. + }) = edits.peek() { - while let Some(BufferEdit { - range: next_range, - is_insertion: next_is_insertion, - new_text: next_new_text, - excerpt_id: next_excerpt_id, - .. - }) = edits.peek() - { - if range.end >= next_range.start { - range.end = cmp::max(next_range.end, range.end); - is_insertion |= *next_is_insertion; - if excerpt_id == *next_excerpt_id { - new_text = format!("{new_text}{next_new_text}").into(); - } - edits.next(); - } else { - break; + if range.end >= next_range.start { + range.end = cmp::max(next_range.end, range.end); + is_insertion |= *next_is_insertion; + if excerpt_id == *next_excerpt_id { + new_text = format!("{new_text}{next_new_text}").into(); } + edits.next(); + } else { + break; } + } - if is_insertion { - original_indent_columns.push(original_indent_column); - insertions.push(( - buffer.anchor_before(range.start) - ..buffer.anchor_before(range.end), - new_text.clone(), - )); - } else if !range.is_empty() { - deletions.push(( - buffer.anchor_before(range.start) - ..buffer.anchor_before(range.end), - empty_str.clone(), - )); - } + if is_insertion { + original_indent_columns.push(original_indent_column); + insertions.push(( + buffer.anchor_before(range.start)..buffer.anchor_before(range.end), + new_text.clone(), + )); + } else if !range.is_empty() { + deletions.push(( + buffer.anchor_before(range.start)..buffer.anchor_before(range.end), + empty_str.clone(), + )); } + } - let deletion_autoindent_mode = - if let Some(AutoindentMode::Block { .. }) = autoindent_mode { - Some(AutoindentMode::Block { - original_indent_columns: Default::default(), - }) - } else { - autoindent_mode.clone() - }; - let insertion_autoindent_mode = - if let Some(AutoindentMode::Block { .. }) = autoindent_mode { - Some(AutoindentMode::Block { - original_indent_columns, - }) - } else { - autoindent_mode.clone() - }; + let deletion_autoindent_mode = + if let Some(AutoindentMode::Block { .. }) = autoindent_mode { + Some(AutoindentMode::Block { + original_indent_columns: Default::default(), + }) + } else { + autoindent_mode.clone() + }; + let insertion_autoindent_mode = + if let Some(AutoindentMode::Block { .. }) = autoindent_mode { + Some(AutoindentMode::Block { + original_indent_columns, + }) + } else { + autoindent_mode.clone() + }; - buffer.edit(deletions, deletion_autoindent_mode, cx); - buffer.edit(insertions, insertion_autoindent_mode, cx); - }) + buffer.edit(deletions, deletion_autoindent_mode, cx); + buffer.edit(insertions, insertion_autoindent_mode, cx); + }) } cx.emit(Event::ExcerptsEdited { @@ -919,7 +848,6 @@ impl MultiBuffer { } fn convert_edits_to_buffer_edits( - &self, edits: Vec<(Range, Arc)>, snapshot: &MultiBufferSnapshot, original_indent_columns: &[Option], @@ -1039,17 +967,21 @@ impl MultiBuffer { (buffer_edits, edited_excerpt_ids) } - pub fn autoindent_ranges(&self, ranges: I, cx: &mut Context) + pub fn autoindent_ranges(&mut self, ranges: I, cx: &mut Context) where I: IntoIterator>, S: ToOffset, { - let snapshot = self.read(cx); + if self.read_only() || self.buffers.is_empty() { + return; + } + self.sync_mut(cx); let empty = Arc::::from(""); let edits = ranges .into_iter() .map(|range| { - let mut range = range.start.to_offset(&snapshot)..range.end.to_offset(&snapshot); + let mut range = range.start.to_offset(self.snapshot.get_mut()) + ..range.end.to_offset(&self.snapshot.get_mut()); if range.start > range.end { mem::swap(&mut range.start, &mut range.end); } @@ -1057,21 +989,15 @@ impl MultiBuffer { }) .collect::>(); - return autoindent_ranges_internal(self, snapshot, edits, cx); + return autoindent_ranges_internal(self, edits, cx); fn autoindent_ranges_internal( - this: &MultiBuffer, - snapshot: Ref, + this: &mut MultiBuffer, edits: Vec<(Range, Arc)>, cx: &mut Context, ) { - if this.read_only() || this.buffers.borrow().is_empty() { - return; - } - let (buffer_edits, edited_excerpt_ids) = - this.convert_edits_to_buffer_edits(edits, &snapshot, &[]); - drop(snapshot); + MultiBuffer::convert_edits_to_buffer_edits(edits, this.snapshot.get_mut(), &[]); let mut buffer_ids = Vec::new(); for (buffer_id, mut edits) in buffer_edits { @@ -1089,11 +1015,9 @@ impl MultiBuffer { ranges.push(edit.range); } - this.buffers.borrow()[&buffer_id] - .buffer - .update(cx, |buffer, cx| { - buffer.autoindent_ranges(ranges, cx); - }) + this.buffers[&buffer_id].buffer.update(cx, |buffer, cx| { + buffer.autoindent_ranges(ranges, cx); + }) } cx.emit(Event::ExcerptsEdited { @@ -1103,9 +1027,9 @@ impl MultiBuffer { } } - // Inserts newlines at the given position to create an empty line, returning the start of the new line. - // You can also request the insertion of empty lines above and below the line starting at the returned point. - // Panics if the given position is invalid. + /// Inserts newlines at the given position to create an empty line, returning the start of the new line. + /// You can also request the insertion of empty lines above and below the line starting at the returned point. + /// Panics if the given position is invalid. pub fn insert_empty_line( &mut self, position: impl ToPoint, @@ -1123,187 +1047,6 @@ impl MultiBuffer { multibuffer_point + (empty_line_start - buffer_point) } - pub fn start_transaction(&mut self, cx: &mut Context) -> Option { - self.start_transaction_at(Instant::now(), cx) - } - - pub fn start_transaction_at( - &mut self, - now: Instant, - cx: &mut Context, - ) -> Option { - if let Some(buffer) = self.as_singleton() { - return buffer.update(cx, |buffer, _| buffer.start_transaction_at(now)); - } - - for BufferState { buffer, .. } in self.buffers.borrow().values() { - buffer.update(cx, |buffer, _| buffer.start_transaction_at(now)); - } - self.history.start_transaction(now) - } - - pub fn last_transaction_id(&self, cx: &App) -> Option { - if let Some(buffer) = self.as_singleton() { - buffer - .read(cx) - .peek_undo_stack() - .map(|history_entry| history_entry.transaction_id()) - } else { - let last_transaction = self.history.undo_stack.last()?; - Some(last_transaction.id) - } - } - - pub fn end_transaction(&mut self, cx: &mut Context) -> Option { - self.end_transaction_at(Instant::now(), cx) - } - - pub fn end_transaction_at( - &mut self, - now: Instant, - cx: &mut Context, - ) -> Option { - if let Some(buffer) = self.as_singleton() { - return buffer.update(cx, |buffer, cx| buffer.end_transaction_at(now, cx)); - } - - let mut buffer_transactions = HashMap::default(); - for BufferState { buffer, .. } in self.buffers.borrow().values() { - if let Some(transaction_id) = - buffer.update(cx, |buffer, cx| buffer.end_transaction_at(now, cx)) - { - buffer_transactions.insert(buffer.read(cx).remote_id(), transaction_id); - } - } - - if self.history.end_transaction(now, buffer_transactions) { - let transaction_id = self.history.group().unwrap(); - Some(transaction_id) - } else { - None - } - } - - pub fn edited_ranges_for_transaction( - &self, - transaction_id: TransactionId, - cx: &App, - ) -> Vec> - where - D: TextDimension + Ord + Sub, - { - let Some(transaction) = self.history.transaction(transaction_id) else { - return Vec::new(); - }; - - let mut ranges = Vec::new(); - let snapshot = self.read(cx); - let buffers = self.buffers.borrow(); - let mut cursor = snapshot.excerpts.cursor::(()); - - for (buffer_id, buffer_transaction) in &transaction.buffer_transactions { - let Some(buffer_state) = buffers.get(buffer_id) else { - continue; - }; - - let buffer = buffer_state.buffer.read(cx); - for range in buffer.edited_ranges_for_transaction_id::(*buffer_transaction) { - for excerpt_id in &buffer_state.excerpts { - cursor.seek(excerpt_id, Bias::Left); - if let Some(excerpt) = cursor.item() - && excerpt.locator == *excerpt_id - { - let excerpt_buffer_start = excerpt.range.context.start.summary::(buffer); - let excerpt_buffer_end = excerpt.range.context.end.summary::(buffer); - let excerpt_range = excerpt_buffer_start..excerpt_buffer_end; - if excerpt_range.contains(&range.start) - && excerpt_range.contains(&range.end) - { - let excerpt_start = D::from_text_summary(&cursor.start().text); - - let mut start = excerpt_start; - start.add_assign(&(range.start - excerpt_buffer_start)); - let mut end = excerpt_start; - end.add_assign(&(range.end - excerpt_buffer_start)); - - ranges.push(start..end); - break; - } - } - } - } - } - - ranges.sort_by_key(|range| range.start); - ranges - } - - pub fn merge_transactions( - &mut self, - transaction: TransactionId, - destination: TransactionId, - cx: &mut Context, - ) { - if let Some(buffer) = self.as_singleton() { - buffer.update(cx, |buffer, _| { - buffer.merge_transactions(transaction, destination) - }); - } else if let Some(transaction) = self.history.forget(transaction) - && let Some(destination) = self.history.transaction_mut(destination) - { - for (buffer_id, buffer_transaction_id) in transaction.buffer_transactions { - if let Some(destination_buffer_transaction_id) = - destination.buffer_transactions.get(&buffer_id) - { - if let Some(state) = self.buffers.borrow().get(&buffer_id) { - state.buffer.update(cx, |buffer, _| { - buffer.merge_transactions( - buffer_transaction_id, - *destination_buffer_transaction_id, - ) - }); - } - } else { - destination - .buffer_transactions - .insert(buffer_id, buffer_transaction_id); - } - } - } - } - - pub fn finalize_last_transaction(&mut self, cx: &mut Context) { - self.history.finalize_last_transaction(); - for BufferState { buffer, .. } in self.buffers.borrow().values() { - buffer.update(cx, |buffer, _| { - buffer.finalize_last_transaction(); - }); - } - } - - pub fn push_transaction<'a, T>(&mut self, buffer_transactions: T, cx: &Context) - where - T: IntoIterator, &'a language::Transaction)>, - { - self.history - .push_transaction(buffer_transactions, Instant::now(), cx); - self.history.finalize_last_transaction(); - } - - pub fn group_until_transaction( - &mut self, - transaction_id: TransactionId, - cx: &mut Context, - ) { - if let Some(buffer) = self.as_singleton() { - buffer.update(cx, |buffer, _| { - buffer.group_until_transaction(transaction_id) - }); - } else { - self.history.group_until(transaction_id); - } - } - pub fn set_active_selections( &self, selections: &[Selection], @@ -1346,7 +1089,7 @@ impl MultiBuffer { } } - for (buffer_id, buffer_state) in self.buffers.borrow().iter() { + for (buffer_id, buffer_state) in self.buffers.iter() { if !selections_by_buffer.contains_key(buffer_id) { buffer_state .buffer @@ -1355,128 +1098,36 @@ impl MultiBuffer { } for (buffer_id, mut selections) in selections_by_buffer { - self.buffers.borrow()[&buffer_id] - .buffer - .update(cx, |buffer, cx| { - selections.sort_unstable_by(|a, b| a.start.cmp(&b.start, buffer)); - let mut selections = selections.into_iter().peekable(); - let merged_selections = Arc::from_iter(iter::from_fn(|| { - let mut selection = selections.next()?; - while let Some(next_selection) = selections.peek() { - if selection.end.cmp(&next_selection.start, buffer).is_ge() { - let next_selection = selections.next().unwrap(); - if next_selection.end.cmp(&selection.end, buffer).is_ge() { - selection.end = next_selection.end; - } - } else { - break; + self.buffers[&buffer_id].buffer.update(cx, |buffer, cx| { + selections.sort_unstable_by(|a, b| a.start.cmp(&b.start, buffer)); + let mut selections = selections.into_iter().peekable(); + let merged_selections = Arc::from_iter(iter::from_fn(|| { + let mut selection = selections.next()?; + while let Some(next_selection) = selections.peek() { + if selection.end.cmp(&next_selection.start, buffer).is_ge() { + let next_selection = selections.next().unwrap(); + if next_selection.end.cmp(&selection.end, buffer).is_ge() { + selection.end = next_selection.end; } + } else { + break; } - Some(selection) - })); - buffer.set_active_selections(merged_selections, line_mode, cursor_shape, cx); - }); + } + Some(selection) + })); + buffer.set_active_selections(merged_selections, line_mode, cursor_shape, cx); + }); } } pub fn remove_active_selections(&self, cx: &mut Context) { - for buffer in self.buffers.borrow().values() { + for buffer in self.buffers.values() { buffer .buffer .update(cx, |buffer, cx| buffer.remove_active_selections(cx)); } } - pub fn undo(&mut self, cx: &mut Context) -> Option { - let mut transaction_id = None; - if let Some(buffer) = self.as_singleton() { - transaction_id = buffer.update(cx, |buffer, cx| buffer.undo(cx)); - } else { - while let Some(transaction) = self.history.pop_undo() { - let mut undone = false; - for (buffer_id, buffer_transaction_id) in &mut transaction.buffer_transactions { - if let Some(BufferState { buffer, .. }) = self.buffers.borrow().get(buffer_id) { - undone |= buffer.update(cx, |buffer, cx| { - let undo_to = *buffer_transaction_id; - if let Some(entry) = buffer.peek_undo_stack() { - *buffer_transaction_id = entry.transaction_id(); - } - buffer.undo_to_transaction(undo_to, cx) - }); - } - } - - if undone { - transaction_id = Some(transaction.id); - break; - } - } - } - - if let Some(transaction_id) = transaction_id { - cx.emit(Event::TransactionUndone { transaction_id }); - } - - transaction_id - } - - pub fn redo(&mut self, cx: &mut Context) -> Option { - if let Some(buffer) = self.as_singleton() { - return buffer.update(cx, |buffer, cx| buffer.redo(cx)); - } - - while let Some(transaction) = self.history.pop_redo() { - let mut redone = false; - for (buffer_id, buffer_transaction_id) in &mut transaction.buffer_transactions { - if let Some(BufferState { buffer, .. }) = self.buffers.borrow().get(buffer_id) { - redone |= buffer.update(cx, |buffer, cx| { - let redo_to = *buffer_transaction_id; - if let Some(entry) = buffer.peek_redo_stack() { - *buffer_transaction_id = entry.transaction_id(); - } - buffer.redo_to_transaction(redo_to, cx) - }); - } - } - - if redone { - return Some(transaction.id); - } - } - - None - } - - pub fn undo_transaction(&mut self, transaction_id: TransactionId, cx: &mut Context) { - if let Some(buffer) = self.as_singleton() { - buffer.update(cx, |buffer, cx| buffer.undo_transaction(transaction_id, cx)); - } else if let Some(transaction) = self.history.remove_from_undo(transaction_id) { - for (buffer_id, transaction_id) in &transaction.buffer_transactions { - if let Some(BufferState { buffer, .. }) = self.buffers.borrow().get(buffer_id) { - buffer.update(cx, |buffer, cx| { - buffer.undo_transaction(*transaction_id, cx) - }); - } - } - } - } - - pub fn forget_transaction(&mut self, transaction_id: TransactionId, cx: &mut Context) { - if let Some(buffer) = self.as_singleton() { - buffer.update(cx, |buffer, _| { - buffer.forget_transaction(transaction_id); - }); - } else if let Some(transaction) = self.history.forget(transaction_id) { - for (buffer_id, buffer_transaction_id) in transaction.buffer_transactions { - if let Some(state) = self.buffers.borrow_mut().get_mut(&buffer_id) { - state.buffer.update(cx, |buffer, _| { - buffer.forget_transaction(buffer_transaction_id); - }); - } - } - } - } - pub fn push_excerpts( &mut self, buffer: Entity, @@ -1489,216 +1140,6 @@ impl MultiBuffer { self.insert_excerpts_after(ExcerptId::max(), buffer, ranges, cx) } - pub fn location_for_path(&self, path: &PathKey, cx: &App) -> Option { - let excerpt_id = self.excerpts_by_path.get(path)?.first()?; - let snapshot = self.snapshot(cx); - let excerpt = snapshot.excerpt(*excerpt_id)?; - Some(Anchor::in_buffer( - *excerpt_id, - excerpt.buffer_id, - excerpt.range.context.start, - )) - } - - pub fn excerpt_paths(&self) -> impl Iterator { - self.excerpts_by_path.keys() - } - - fn expand_excerpts_with_paths( - &mut self, - ids: impl IntoIterator, - line_count: u32, - direction: ExpandExcerptDirection, - cx: &mut Context, - ) { - let grouped = ids - .into_iter() - .chunk_by(|id| self.paths_by_excerpt.get(id).cloned()) - .into_iter() - .flat_map(|(k, v)| Some((k?, v.into_iter().collect::>()))) - .collect::>(); - let snapshot = self.snapshot(cx); - - for (path, ids) in grouped.into_iter() { - let Some(excerpt_ids) = self.excerpts_by_path.get(&path) else { - continue; - }; - - let ids_to_expand = HashSet::from_iter(ids); - let expanded_ranges = excerpt_ids.iter().filter_map(|excerpt_id| { - let excerpt = snapshot.excerpt(*excerpt_id)?; - - let mut context = excerpt.range.context.to_point(&excerpt.buffer); - if ids_to_expand.contains(excerpt_id) { - match direction { - ExpandExcerptDirection::Up => { - context.start.row = context.start.row.saturating_sub(line_count); - context.start.column = 0; - } - ExpandExcerptDirection::Down => { - context.end.row = - (context.end.row + line_count).min(excerpt.buffer.max_point().row); - context.end.column = excerpt.buffer.line_len(context.end.row); - } - ExpandExcerptDirection::UpAndDown => { - context.start.row = context.start.row.saturating_sub(line_count); - context.start.column = 0; - context.end.row = - (context.end.row + line_count).min(excerpt.buffer.max_point().row); - context.end.column = excerpt.buffer.line_len(context.end.row); - } - } - } - - Some(ExcerptRange { - context, - primary: excerpt.range.primary.to_point(&excerpt.buffer), - }) - }); - let mut merged_ranges: Vec> = Vec::new(); - for range in expanded_ranges { - if let Some(last_range) = merged_ranges.last_mut() - && last_range.context.end >= range.context.start - { - last_range.context.end = range.context.end; - continue; - } - merged_ranges.push(range) - } - let Some(excerpt_id) = excerpt_ids.first() else { - continue; - }; - let Some(buffer_id) = &snapshot.buffer_id_for_excerpt(*excerpt_id) else { - continue; - }; - - let Some(buffer) = self - .buffers - .borrow() - .get(buffer_id) - .map(|b| b.buffer.clone()) - else { - continue; - }; - - let buffer_snapshot = buffer.read(cx).snapshot(); - self.update_path_excerpts(path.clone(), buffer, &buffer_snapshot, merged_ranges, cx); - } - } - - /// Sets excerpts, returns `true` if at least one new excerpt was added. - pub fn set_excerpts_for_path( - &mut self, - path: PathKey, - buffer: Entity, - ranges: impl IntoIterator>, - context_line_count: u32, - cx: &mut Context, - ) -> (Vec>, bool) { - let buffer_snapshot = buffer.read(cx).snapshot(); - let excerpt_ranges = build_excerpt_ranges(ranges, context_line_count, &buffer_snapshot); - - let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); - self.set_merged_excerpt_ranges_for_path( - path, - buffer, - excerpt_ranges, - &buffer_snapshot, - new, - counts, - cx, - ) - } - - pub fn set_excerpt_ranges_for_path( - &mut self, - path: PathKey, - buffer: Entity, - buffer_snapshot: &BufferSnapshot, - excerpt_ranges: Vec>, - cx: &mut Context, - ) -> (Vec>, bool) { - let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); - self.set_merged_excerpt_ranges_for_path( - path, - buffer, - excerpt_ranges, - buffer_snapshot, - new, - counts, - cx, - ) - } - - pub fn set_anchored_excerpts_for_path( - &self, - buffer: Entity, - ranges: Vec>, - context_line_count: u32, - cx: &mut Context, - ) -> Task>> { - let buffer_snapshot = buffer.read(cx).snapshot(); - let path_key = PathKey::for_buffer(&buffer, cx); - cx.spawn(async move |multi_buffer, cx| { - let snapshot = buffer_snapshot.clone(); - let (excerpt_ranges, new, counts) = cx - .background_spawn(async move { - let ranges = ranges.into_iter().map(|range| range.to_point(&snapshot)); - let excerpt_ranges = - build_excerpt_ranges(ranges, context_line_count, &snapshot); - let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); - (excerpt_ranges, new, counts) - }) - .await; - - multi_buffer - .update(cx, move |multi_buffer, cx| { - let (ranges, _) = multi_buffer.set_merged_excerpt_ranges_for_path( - path_key, - buffer, - excerpt_ranges, - &buffer_snapshot, - new, - counts, - cx, - ); - ranges - }) - .ok() - .unwrap_or_default() - }) - } - - /// Sets excerpts, returns `true` if at least one new excerpt was added. - fn set_merged_excerpt_ranges_for_path( - &mut self, - path: PathKey, - buffer: Entity, - ranges: Vec>, - buffer_snapshot: &BufferSnapshot, - new: Vec>, - counts: Vec, - cx: &mut Context, - ) -> (Vec>, bool) { - let (excerpt_ids, added_a_new_excerpt) = - self.update_path_excerpts(path, buffer, buffer_snapshot, new, cx); - - let mut result = Vec::new(); - let mut ranges = ranges.into_iter(); - for (excerpt_id, range_count) in excerpt_ids.into_iter().zip(counts.into_iter()) { - for range in ranges.by_ref().take(range_count) { - let range = Anchor::range_in_buffer( - excerpt_id, - buffer_snapshot.remote_id(), - buffer_snapshot.anchor_before(&range.primary.start) - ..buffer_snapshot.anchor_after(&range.primary.end), - ); - result.push(range) - } - } - (result, added_a_new_excerpt) - } - fn merge_excerpt_ranges<'a>( expanded_ranges: impl IntoIterator> + 'a, ) -> (Vec>, Vec) { @@ -1712,184 +1153,16 @@ impl MultiBuffer { ); if last_range.context.end >= range.context.start || last_range.context.end.row + 1 == range.context.start.row - { - last_range.context.end = range.context.end.max(last_range.context.end); - *counts.last_mut().unwrap() += 1; - continue; - } - } - merged_ranges.push(range.clone()); - counts.push(1); - } - (merged_ranges, counts) - } - - fn update_path_excerpts( - &mut self, - path: PathKey, - buffer: Entity, - buffer_snapshot: &BufferSnapshot, - new: Vec>, - cx: &mut Context, - ) -> (Vec, bool) { - let mut insert_after = self - .excerpts_by_path - .range(..path.clone()) - .next_back() - .map(|(_, value)| *value.last().unwrap()) - .unwrap_or(ExcerptId::min()); - - let existing = self - .excerpts_by_path - .get(&path) - .cloned() - .unwrap_or_default(); - - let mut new_iter = new.into_iter().peekable(); - let mut existing_iter = existing.into_iter().peekable(); - - let mut excerpt_ids = Vec::new(); - let mut to_remove = Vec::new(); - let mut to_insert: Vec<(ExcerptId, ExcerptRange)> = Vec::new(); - let mut added_a_new_excerpt = false; - let snapshot = self.snapshot(cx); - - let mut next_excerpt_id = - if let Some(last_entry) = self.snapshot.borrow().excerpt_ids.last() { - last_entry.id.0 + 1 - } else { - 1 - }; - - let mut next_excerpt_id = move || ExcerptId(post_inc(&mut next_excerpt_id)); - - let mut excerpts_cursor = snapshot.excerpts.cursor::>(()); - excerpts_cursor.next(); - - loop { - let new = new_iter.peek(); - let existing = if let Some(existing_id) = existing_iter.peek() { - let locator = snapshot.excerpt_locator_for_id(*existing_id); - excerpts_cursor.seek_forward(&Some(locator), Bias::Left); - if let Some(excerpt) = excerpts_cursor.item() { - if excerpt.buffer_id != buffer_snapshot.remote_id() { - to_remove.push(*existing_id); - existing_iter.next(); - continue; - } - Some(( - *existing_id, - excerpt.range.context.to_point(buffer_snapshot), - )) - } else { - None - } - } else { - None - }; - - if let Some((last_id, last)) = to_insert.last_mut() { - if let Some(new) = new - && last.context.end >= new.context.start - { - last.context.end = last.context.end.max(new.context.end); - excerpt_ids.push(*last_id); - new_iter.next(); - continue; - } - if let Some((existing_id, existing_range)) = &existing - && last.context.end >= existing_range.start - { - last.context.end = last.context.end.max(existing_range.end); - to_remove.push(*existing_id); - self.snapshot - .borrow_mut() - .replaced_excerpts - .insert(*existing_id, *last_id); - existing_iter.next(); - continue; - } - } - - match (new, existing) { - (None, None) => break, - (None, Some((existing_id, _))) => { - existing_iter.next(); - to_remove.push(existing_id); - continue; - } - (Some(_), None) => { - added_a_new_excerpt = true; - let new_id = next_excerpt_id(); - excerpt_ids.push(new_id); - to_insert.push((new_id, new_iter.next().unwrap())); + { + last_range.context.end = range.context.end.max(last_range.context.end); + *counts.last_mut().unwrap() += 1; continue; } - (Some(new), Some((_, existing_range))) => { - if existing_range.end < new.context.start { - let existing_id = existing_iter.next().unwrap(); - to_remove.push(existing_id); - continue; - } else if existing_range.start > new.context.end { - let new_id = next_excerpt_id(); - excerpt_ids.push(new_id); - to_insert.push((new_id, new_iter.next().unwrap())); - continue; - } - - if existing_range.start == new.context.start - && existing_range.end == new.context.end - { - self.insert_excerpts_with_ids_after( - insert_after, - buffer.clone(), - mem::take(&mut to_insert), - cx, - ); - insert_after = existing_iter.next().unwrap(); - excerpt_ids.push(insert_after); - new_iter.next(); - } else { - let existing_id = existing_iter.next().unwrap(); - let new_id = next_excerpt_id(); - self.snapshot - .borrow_mut() - .replaced_excerpts - .insert(existing_id, new_id); - to_remove.push(existing_id); - let mut range = new_iter.next().unwrap(); - range.context.start = range.context.start.min(existing_range.start); - range.context.end = range.context.end.max(existing_range.end); - excerpt_ids.push(new_id); - to_insert.push((new_id, range)); - } - } - }; - } - - self.insert_excerpts_with_ids_after(insert_after, buffer, to_insert, cx); - self.remove_excerpts(to_remove, cx); - if excerpt_ids.is_empty() { - self.excerpts_by_path.remove(&path); - } else { - for excerpt_id in &excerpt_ids { - self.paths_by_excerpt.insert(*excerpt_id, path.clone()); } - self.excerpts_by_path - .insert(path, excerpt_ids.iter().dedup().cloned().collect()); - } - - (excerpt_ids, added_a_new_excerpt) - } - - pub fn paths(&self) -> impl Iterator + '_ { - self.excerpts_by_path.keys().cloned() - } - - pub fn remove_excerpts_for_path(&mut self, path: PathKey, cx: &mut Context) { - if let Some(to_remove) = self.excerpts_by_path.remove(&path) { - self.remove_excerpts(to_remove, cx) + merged_ranges.push(range.clone()); + counts.push(1); } + (merged_ranges, counts) } pub fn insert_excerpts_after( @@ -1931,26 +1204,27 @@ impl MultiBuffer { ) where O: text::ToOffset, { - assert_eq!(self.history.transaction_depth, 0); + assert_eq!(self.history.transaction_depth(), 0); let mut ranges = ranges.into_iter().peekable(); if ranges.peek().is_none() { return Default::default(); } - self.sync(cx); + self.sync_mut(cx); let buffer_snapshot = buffer.read(cx).snapshot(); let buffer_id = buffer_snapshot.remote_id(); - let mut buffers = self.buffers.borrow_mut(); - let buffer_state = buffers.entry(buffer_id).or_insert_with(|| { + let buffer_state = self.buffers.entry(buffer_id).or_insert_with(|| { self.buffer_changed_since_sync.replace(true); buffer.update(cx, |buffer, _| { buffer.record_changes(Rc::downgrade(&self.buffer_changed_since_sync)); }); BufferState { - last_version: buffer_snapshot.version().clone(), - last_non_text_state_update_count: buffer_snapshot.non_text_state_update_count(), + last_version: RefCell::new(buffer_snapshot.version().clone()), + last_non_text_state_update_count: Cell::new( + buffer_snapshot.non_text_state_update_count(), + ), excerpts: Default::default(), _subscriptions: [ cx.observe(&buffer, |_, _, cx| cx.notify()), @@ -1960,7 +1234,7 @@ impl MultiBuffer { } }); - let mut snapshot = self.snapshot.borrow_mut(); + let mut snapshot = self.snapshot.get_mut(); let mut prev_locator = snapshot.excerpt_locator_for_id(prev_excerpt_id).clone(); let mut new_excerpt_ids = mem::take(&mut snapshot.excerpt_ids); @@ -2024,7 +1298,7 @@ impl MultiBuffer { snapshot.trailing_excerpt_update_count += 1; } - self.sync_diff_transforms( + let edits = Self::sync_diff_transforms( &mut snapshot, vec![Edit { old: edit_start..edit_start, @@ -2032,8 +1306,11 @@ impl MultiBuffer { }], DiffChangeKind::BufferEdited, ); + if !edits.is_empty() { + self.subscriptions.publish(edits); + } + cx.emit(Event::Edited { - singleton_buffer_edited: false, edited_buffer: None, }); cx.emit(Event::ExcerptsAdded { @@ -2045,36 +1322,48 @@ impl MultiBuffer { } pub fn clear(&mut self, cx: &mut Context) { - self.sync(cx); + self.sync_mut(cx); let ids = self.excerpt_ids(); - let removed_buffer_ids = self - .buffers - .borrow_mut() - .drain() - .map(|(id, _)| id) - .collect(); + let removed_buffer_ids = self.buffers.drain().map(|(id, _)| id).collect(); self.excerpts_by_path.clear(); self.paths_by_excerpt.clear(); - let mut snapshot = self.snapshot.borrow_mut(); + let MultiBufferSnapshot { + excerpts, + diffs: _, + diff_transforms: _, + non_text_state_update_count: _, + edit_count: _, + is_dirty, + has_deleted_file, + has_conflict, + singleton: _, + excerpt_ids: _, + replaced_excerpts, + trailing_excerpt_update_count, + all_diff_hunks_expanded: _, + show_headers: _, + } = self.snapshot.get_mut(); let start = ExcerptOffset::new(0); - let prev_len = ExcerptOffset::new(snapshot.excerpts.summary().text.len); - snapshot.excerpts = Default::default(); - snapshot.trailing_excerpt_update_count += 1; - snapshot.is_dirty = false; - snapshot.has_deleted_file = false; - snapshot.has_conflict = false; - snapshot.replaced_excerpts.clear(); - - self.sync_diff_transforms( - &mut snapshot, + let prev_len = ExcerptOffset::new(excerpts.summary().text.len); + *excerpts = Default::default(); + *trailing_excerpt_update_count += 1; + *is_dirty = false; + *has_deleted_file = false; + *has_conflict = false; + replaced_excerpts.clear(); + + let edits = Self::sync_diff_transforms( + self.snapshot.get_mut(), vec![Edit { old: start..prev_len, new: start..start, }], DiffChangeKind::BufferEdited, ); + if !edits.is_empty() { + self.subscriptions.publish(edits); + } cx.emit(Event::Edited { - singleton_buffer_edited: false, edited_buffer: None, }); cx.emit(Event::ExcerptsRemoved { @@ -2091,9 +1380,8 @@ impl MultiBuffer { ) -> Vec<(ExcerptId, ExcerptRange)> { let mut excerpts = Vec::new(); let snapshot = self.read(cx); - let buffers = self.buffers.borrow(); let mut cursor = snapshot.excerpts.cursor::>(()); - if let Some(locators) = buffers.get(&buffer_id).map(|state| &state.excerpts) { + if let Some(locators) = self.buffers.get(&buffer_id).map(|state| &state.excerpts) { for locator in locators { cursor.seek_forward(&Some(locator), Bias::Left); if let Some(excerpt) = cursor.item() @@ -2109,7 +1397,6 @@ impl MultiBuffer { pub fn excerpt_ranges_for_buffer(&self, buffer_id: BufferId, cx: &App) -> Vec> { let snapshot = self.read(cx); - let buffers = self.buffers.borrow(); let mut excerpts = snapshot .excerpts .cursor::, ExcerptDimension>>(()); @@ -2117,7 +1404,8 @@ impl MultiBuffer { .diff_transforms .cursor::, OutputDimension>>(()); diff_transforms.next(); - let locators = buffers + let locators = self + .buffers .get(&buffer_id) .into_iter() .flat_map(|state| &state.excerpts); @@ -2178,12 +1466,7 @@ impl MultiBuffer { .map(|excerpt| { ( excerpt.id, - self.buffers - .borrow() - .get(&excerpt.buffer_id) - .unwrap() - .buffer - .clone(), + self.buffers.get(&excerpt.buffer_id).unwrap().buffer.clone(), excerpt.range.context.clone(), ) }) @@ -2207,11 +1490,7 @@ impl MultiBuffer { let snapshot = self.read(cx); let (buffer, offset) = snapshot.point_to_buffer_offset(point)?; Some(( - self.buffers - .borrow() - .get(&buffer.remote_id())? - .buffer - .clone(), + self.buffers.get(&buffer.remote_id())?.buffer.clone(), offset, )) } @@ -2226,11 +1505,7 @@ impl MultiBuffer { let (buffer, point, is_main_buffer) = snapshot.point_to_buffer_point(point.to_point(&snapshot))?; Some(( - self.buffers - .borrow() - .get(&buffer.remote_id())? - .buffer - .clone(), + self.buffers.get(&buffer.remote_id())?.buffer.clone(), point, is_main_buffer, )) @@ -2270,14 +1545,14 @@ impl MultiBuffer { excerpt_ids: impl IntoIterator, cx: &mut Context, ) { - self.sync(cx); + self.sync_mut(cx); let ids = excerpt_ids.into_iter().collect::>(); if ids.is_empty() { return; } + self.buffer_changed_since_sync.replace(true); - let mut buffers = self.buffers.borrow_mut(); - let mut snapshot = self.snapshot.borrow_mut(); + let mut snapshot = self.snapshot.get_mut(); let mut new_excerpts = SumTree::default(); let mut cursor = snapshot .excerpts @@ -2300,14 +1575,14 @@ impl MultiBuffer { // Skip over the removed excerpt. 'remove_excerpts: loop { - if let Some(buffer_state) = buffers.get_mut(&excerpt.buffer_id) { + if let Some(buffer_state) = self.buffers.get_mut(&excerpt.buffer_id) { buffer_state.excerpts.retain(|l| l != &excerpt.locator); if buffer_state.excerpts.is_empty() { log::debug!( "removing buffer and diff for buffer {}", excerpt.buffer_id ); - buffers.remove(&excerpt.buffer_id); + self.buffers.remove(&excerpt.buffer_id); removed_buffer_ids.push(excerpt.buffer_id); } } @@ -2358,10 +1633,11 @@ impl MultiBuffer { snapshot.trailing_excerpt_update_count += 1; } - self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); - self.buffer_changed_since_sync.replace(true); + let edits = Self::sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); + if !edits.is_empty() { + self.subscriptions.publish(edits); + } cx.emit(Event::Edited { - singleton_buffer_edited: false, edited_buffer: None, }); cx.emit(Event::ExcerptsRemoved { @@ -2376,12 +1652,11 @@ impl MultiBuffer { anchors: Anchors, cx: &mut Context, ) -> impl 'static + Future> + use { - let borrow = self.buffers.borrow(); let mut error = None; let mut futures = Vec::new(); for anchor in anchors { if let Some(buffer_id) = anchor.buffer_id { - if let Some(buffer) = borrow.get(&buffer_id) { + if let Some(buffer) = self.buffers.get(&buffer_id) { buffer.buffer.update(cx, |buffer, _| { futures.push(buffer.wait_for_anchors([anchor.text_anchor])) }); @@ -2411,12 +1686,7 @@ impl MultiBuffer { ) -> Option<(Entity, language::Anchor)> { let snapshot = self.read(cx); let anchor = snapshot.anchor_before(position); - let buffer = self - .buffers - .borrow() - .get(&anchor.buffer_id?)? - .buffer - .clone(); + let buffer = self.buffers.get(&anchor.buffer_id?)?.buffer.clone(); Some((buffer, anchor.text_anchor)) } @@ -2429,7 +1699,6 @@ impl MultiBuffer { use language::BufferEvent; cx.emit(match event { BufferEvent::Edited => Event::Edited { - singleton_buffer_edited: true, edited_buffer: Some(buffer), }, BufferEvent::DirtyChanged => Event::DirtyChanged, @@ -2448,12 +1717,10 @@ impl MultiBuffer { } fn buffer_diff_language_changed(&mut self, diff: Entity, cx: &mut Context) { - self.sync(cx); - let mut snapshot = self.snapshot.borrow_mut(); let diff = diff.read(cx); let buffer_id = diff.buffer_id; let diff = diff.snapshot(cx); - snapshot.diffs.insert(buffer_id, diff); + self.snapshot.get_mut().diffs.insert(buffer_id, diff); } fn buffer_diff_changed( @@ -2462,21 +1729,20 @@ impl MultiBuffer { range: Range, cx: &mut Context, ) { - self.sync(cx); - self.buffer_changed_since_sync.replace(true); + self.sync_mut(cx); let diff = diff.read(cx); let buffer_id = diff.buffer_id; - let buffers = self.buffers.borrow(); - let Some(buffer_state) = buffers.get(&buffer_id) else { + let Some(buffer_state) = self.buffers.get(&buffer_id) else { return; }; + self.buffer_changed_since_sync.replace(true); let buffer = buffer_state.buffer.read(cx); let diff_change_range = range.to_offset(buffer); let new_diff = diff.snapshot(cx); - let mut snapshot = self.snapshot.borrow_mut(); + let mut snapshot = self.snapshot.get_mut(); let base_text_changed = snapshot .diffs .get(&buffer_id) @@ -2520,34 +1786,34 @@ impl MultiBuffer { } } - self.sync_diff_transforms( + let edits = Self::sync_diff_transforms( &mut snapshot, excerpt_edits, DiffChangeKind::DiffUpdated { base_changed: base_text_changed, }, ); + if !edits.is_empty() { + self.subscriptions.publish(edits); + } cx.emit(Event::Edited { - singleton_buffer_edited: false, edited_buffer: None, }); } pub fn all_buffers(&self) -> HashSet> { self.buffers - .borrow() .values() .map(|state| state.buffer.clone()) .collect() } pub fn all_buffer_ids(&self) -> Vec { - self.buffers.borrow().keys().copied().collect() + self.buffers.keys().copied().collect() } pub fn buffer(&self, buffer_id: BufferId) -> Option> { self.buffers - .borrow() .get(&buffer_id) .map(|state| state.buffer.clone()) } @@ -2589,10 +1855,11 @@ impl MultiBuffer { } pub fn for_each_buffer(&self, mut f: impl FnMut(&Entity)) { - self.buffers - .borrow() - .values() - .for_each(|state| f(&state.buffer)) + self.buffers.values().for_each(|state| f(&state.buffer)) + } + + pub fn explicit_title(&self) -> Option<&str> { + self.title.as_deref() } pub fn title<'a>(&'a self, cx: &'a App) -> Cow<'a, str> { @@ -2661,7 +1928,7 @@ impl MultiBuffer { /// Preserve preview tabs containing this multibuffer until additional edits occur. pub fn refresh_preview(&self, cx: &mut Context) { - for buffer_state in self.buffers.borrow().values() { + for buffer_state in self.buffers.values() { buffer_state .buffer .update(cx, |buffer, _cx| buffer.refresh_preview()); @@ -2671,7 +1938,6 @@ impl MultiBuffer { /// Whether we should preserve the preview status of a tab containing this multi-buffer. pub fn preserve_preview(&self, cx: &App) -> bool { self.buffers - .borrow() .values() .all(|state| state.buffer.read(cx).preserve_preview()) } @@ -2700,7 +1966,7 @@ impl MultiBuffer { } pub fn set_all_diff_hunks_expanded(&mut self, cx: &mut Context) { - self.snapshot.borrow_mut().all_diff_hunks_expanded = true; + self.snapshot.get_mut().all_diff_hunks_expanded = true; self.expand_or_collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], true, cx); } @@ -2709,7 +1975,7 @@ impl MultiBuffer { } pub fn set_all_diff_hunks_collapsed(&mut self, cx: &mut Context) { - self.snapshot.borrow_mut().all_diff_hunks_expanded = false; + self.snapshot.get_mut().all_diff_hunks_expanded = false; self.expand_or_collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], false, cx); } @@ -2769,8 +2035,8 @@ impl MultiBuffer { if self.snapshot.borrow().all_diff_hunks_expanded && !expand { return; } - self.sync(cx); - let mut snapshot = self.snapshot.borrow_mut(); + self.sync_mut(cx); + let mut snapshot = self.snapshot.get_mut(); let mut excerpt_edits = Vec::new(); let mut last_hunk_row = None; for (range, end_excerpt_id) in ranges { @@ -2801,14 +2067,16 @@ impl MultiBuffer { } } - self.sync_diff_transforms( + let edits = Self::sync_diff_transforms( &mut snapshot, excerpt_edits, DiffChangeKind::ExpandOrCollapseHunks { expand }, ); + if !edits.is_empty() { + self.subscriptions.publish(edits); + } cx.emit(Event::DiffHunksToggled); cx.emit(Event::Edited { - singleton_buffer_edited: false, edited_buffer: None, }); } @@ -2838,9 +2106,9 @@ impl MultiBuffer { range: Range, cx: &mut Context, ) { - self.sync(cx); + self.sync_mut(cx); - let mut snapshot = self.snapshot.borrow_mut(); + let mut snapshot = self.snapshot.get_mut(); let locator = snapshot.excerpt_locator_for_id(id); let mut new_excerpts = SumTree::default(); let mut cursor = snapshot @@ -2890,9 +2158,11 @@ impl MultiBuffer { drop(cursor); snapshot.excerpts = new_excerpts; - self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); + let edits = Self::sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); + if !edits.is_empty() { + self.subscriptions.publish(edits); + } cx.emit(Event::Edited { - singleton_buffer_edited: false, edited_buffer: None, }); cx.emit(Event::ExcerptsExpanded { ids: vec![id] }); @@ -2909,12 +2179,12 @@ impl MultiBuffer { if line_count == 0 { return; } - self.sync(cx); + self.sync_mut(cx); if !self.excerpts_by_path.is_empty() { self.expand_excerpts_with_paths(ids, line_count, direction, cx); return; } - let mut snapshot = self.snapshot.borrow_mut(); + let mut snapshot = self.snapshot.get_mut(); let ids = ids.into_iter().collect::>(); let locators = snapshot.excerpt_locators_for_ids(ids.iter().copied()); @@ -2995,9 +2265,11 @@ impl MultiBuffer { drop(cursor); snapshot.excerpts = new_excerpts; - self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); + let edits = Self::sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); + if !edits.is_empty() { + self.subscriptions.publish(edits); + } cx.emit(Event::Edited { - singleton_buffer_edited: false, edited_buffer: None, }); cx.emit(Event::ExcerptsExpanded { ids }); @@ -3009,26 +2281,71 @@ impl MultiBuffer { if !changed { return; } + let edits = Self::sync_( + &mut self.snapshot.borrow_mut(), + &self.buffers, + &self.diffs, + cx, + ); + if !edits.is_empty() { + self.subscriptions.publish(edits); + } + } + + fn sync_mut(&mut self, cx: &App) { + let changed = self.buffer_changed_since_sync.replace(false); + if !changed { + return; + } + let edits = Self::sync_(self.snapshot.get_mut(), &self.buffers, &self.diffs, cx); + + if !edits.is_empty() { + self.subscriptions.publish(edits); + } + } + + fn sync_( + snapshot: &mut MultiBufferSnapshot, + buffers: &HashMap, + diffs: &HashMap, + cx: &App, + ) -> Vec> { + let MultiBufferSnapshot { + excerpts, + diffs: buffer_diff, + diff_transforms: _, + non_text_state_update_count, + edit_count, + is_dirty, + has_deleted_file, + has_conflict, + singleton: _, + excerpt_ids: _, + replaced_excerpts: _, + trailing_excerpt_update_count: _, + all_diff_hunks_expanded: _, + show_headers: _, + } = snapshot; + *is_dirty = false; + *has_deleted_file = false; + *has_conflict = false; - let mut snapshot = self.snapshot.borrow_mut(); let mut excerpts_to_edit = Vec::new(); let mut non_text_state_updated = false; - let mut is_dirty = false; - let mut has_deleted_file = false; - let mut has_conflict = false; let mut edited = false; - let mut buffers = self.buffers.borrow_mut(); - for buffer_state in buffers.values_mut() { + for buffer_state in buffers.values() { let buffer = buffer_state.buffer.read(cx); let version = buffer.version(); let non_text_state_update_count = buffer.non_text_state_update_count(); - let buffer_edited = version.changed_since(&buffer_state.last_version); + let buffer_edited = version.changed_since(&buffer_state.last_version.borrow()); let buffer_non_text_state_updated = - non_text_state_update_count > buffer_state.last_non_text_state_update_count; + non_text_state_update_count > buffer_state.last_non_text_state_update_count.get(); if buffer_edited || buffer_non_text_state_updated { - buffer_state.last_version = version; - buffer_state.last_non_text_state_update_count = non_text_state_update_count; + *buffer_state.last_version.borrow_mut() = version; + buffer_state + .last_non_text_state_update_count + .set(non_text_state_update_count); excerpts_to_edit.extend( buffer_state .excerpts @@ -3039,25 +2356,22 @@ impl MultiBuffer { edited |= buffer_edited; non_text_state_updated |= buffer_non_text_state_updated; - is_dirty |= buffer.is_dirty(); - has_deleted_file |= buffer + *is_dirty |= buffer.is_dirty(); + *has_deleted_file |= buffer .file() .is_some_and(|file| file.disk_state() == DiskState::Deleted); - has_conflict |= buffer.has_conflict(); + *has_conflict |= buffer.has_conflict(); } if edited { - snapshot.edit_count += 1; + *edit_count += 1; } if non_text_state_updated { - snapshot.non_text_state_update_count += 1; + *non_text_state_update_count += 1; } - snapshot.is_dirty = is_dirty; - snapshot.has_deleted_file = has_deleted_file; - snapshot.has_conflict = has_conflict; - for (id, diff) in self.diffs.iter() { - if snapshot.diffs.get(id).is_none() { - snapshot.diffs.insert(*id, diff.diff.read(cx).snapshot(cx)); + for (id, diff) in diffs.iter() { + if buffer_diff.get(id).is_none() { + buffer_diff.insert(*id, diff.diff.read(cx).snapshot(cx)); } } @@ -3065,9 +2379,7 @@ impl MultiBuffer { let mut edits = Vec::new(); let mut new_excerpts = SumTree::default(); - let mut cursor = snapshot - .excerpts - .cursor::, ExcerptOffset>>(()); + let mut cursor = excerpts.cursor::, ExcerptOffset>>(()); for (locator, buffer, buffer_edited) in excerpts_to_edit { new_excerpts.append(cursor.slice(&Some(locator), Bias::Left), ()); @@ -3117,19 +2429,17 @@ impl MultiBuffer { new_excerpts.append(cursor.suffix(), ()); drop(cursor); - snapshot.excerpts = new_excerpts; - - self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); + *excerpts = new_excerpts; + Self::sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited) } fn sync_diff_transforms( - &self, snapshot: &mut MultiBufferSnapshot, excerpt_edits: Vec>, change_kind: DiffChangeKind, - ) { + ) -> Vec> { if excerpt_edits.is_empty() { - return; + return vec![]; } let mut excerpts = snapshot.excerpts.cursor::(()); @@ -3154,12 +2464,12 @@ impl MultiBuffer { if at_transform_boundary { at_transform_boundary = false; let transforms_before_edit = old_diff_transforms.slice(&edit.old.start, Bias::Left); - self.append_diff_transforms(&mut new_diff_transforms, transforms_before_edit); + Self::append_diff_transforms(&mut new_diff_transforms, transforms_before_edit); if let Some(transform) = old_diff_transforms.item() && old_diff_transforms.end().0 == edit.old.start && old_diff_transforms.start().0 < edit.old.start { - self.push_diff_transform(&mut new_diff_transforms, transform.clone()); + Self::push_diff_transform(&mut new_diff_transforms, transform.clone()); old_diff_transforms.next(); } } @@ -3169,7 +2479,7 @@ impl MultiBuffer { let edit_old_start = old_diff_transforms.start().1 + edit_start_overshoot; let edit_new_start = (edit_old_start as isize + output_delta) as usize; - let changed_diff_hunks = self.recompute_diff_transforms_for_edit( + let changed_diff_hunks = Self::recompute_diff_transforms_for_edit( &edit, &mut excerpts, &mut old_diff_transforms, @@ -3222,7 +2532,7 @@ impl MultiBuffer { } old_expanded_hunks.clear(); - self.push_buffer_content_transform( + Self::push_buffer_content_transform( snapshot, &mut new_diff_transforms, excerpt_offset, @@ -3233,7 +2543,7 @@ impl MultiBuffer { } // Keep any transforms that are after the last edit. - self.append_diff_transforms(&mut new_diff_transforms, old_diff_transforms.suffix()); + Self::append_diff_transforms(&mut new_diff_transforms, old_diff_transforms.suffix()); // Ensure there's always at least one buffer content transform. if new_diff_transforms.is_empty() { @@ -3246,7 +2556,6 @@ impl MultiBuffer { ); } - self.subscriptions.publish(output_edits); drop(old_diff_transforms); drop(excerpts); snapshot.diff_transforms = new_diff_transforms; @@ -3254,10 +2563,10 @@ impl MultiBuffer { #[cfg(any(test, feature = "test-support"))] snapshot.check_invariants(); + output_edits } fn recompute_diff_transforms_for_edit( - &self, edit: &Edit>, excerpts: &mut Cursor>, old_diff_transforms: &mut Cursor, usize>>, @@ -3342,7 +2651,7 @@ impl MultiBuffer { + ExcerptOffset::new(hunk_buffer_range.end - excerpt_buffer_start), ); - self.push_buffer_content_transform( + Self::push_buffer_content_transform( snapshot, new_diff_transforms, hunk_excerpt_start, @@ -3423,7 +2732,6 @@ impl MultiBuffer { } fn append_diff_transforms( - &self, new_transforms: &mut SumTree, subtree: SumTree, ) { @@ -3431,7 +2739,7 @@ impl MultiBuffer { inserted_hunk_info, summary, }) = subtree.first() - && self.extend_last_buffer_content_transform( + && Self::extend_last_buffer_content_transform( new_transforms, *inserted_hunk_info, *summary, @@ -3446,16 +2754,12 @@ impl MultiBuffer { new_transforms.append(subtree, ()); } - fn push_diff_transform( - &self, - new_transforms: &mut SumTree, - transform: DiffTransform, - ) { + fn push_diff_transform(new_transforms: &mut SumTree, transform: DiffTransform) { if let DiffTransform::BufferContent { inserted_hunk_info: inserted_hunk_anchor, summary, } = transform - && self.extend_last_buffer_content_transform( + && Self::extend_last_buffer_content_transform( new_transforms, inserted_hunk_anchor, summary, @@ -3467,7 +2771,6 @@ impl MultiBuffer { } fn push_buffer_content_transform( - &self, old_snapshot: &MultiBufferSnapshot, new_transforms: &mut SumTree, end_offset: ExcerptOffset, @@ -3487,7 +2790,7 @@ impl MultiBuffer { let summary_to_add = old_snapshot .text_summary_for_excerpt_offset_range::(start_offset..end_offset); - if !self.extend_last_buffer_content_transform( + if !Self::extend_last_buffer_content_transform( new_transforms, inserted_hunk_info, summary_to_add, @@ -3504,7 +2807,6 @@ impl MultiBuffer { } fn extend_last_buffer_content_transform( - &self, new_transforms: &mut SumTree, new_inserted_hunk_info: Option, summary_to_add: TextSummary, @@ -3664,7 +2966,7 @@ impl MultiBuffer { let excerpt_ids = self.excerpt_ids(); if excerpt_ids.is_empty() || (rng.random() && excerpt_ids.len() < max_excerpts) { - let buffer_handle = if rng.random() || self.buffers.borrow().is_empty() { + let buffer_handle = if rng.random() || self.buffers.is_empty() { let text = RandomCharIter::new(&mut *rng).take(10).collect::(); buffers.push(cx.new(|cx| Buffer::local(text, cx))); let buffer = buffers.last().unwrap().read(cx); @@ -3675,13 +2977,7 @@ impl MultiBuffer { ); buffers.last().unwrap().clone() } else { - self.buffers - .borrow() - .values() - .choose(rng) - .unwrap() - .buffer - .clone() + self.buffers.values().choose(rng).unwrap().buffer.clone() }; let buffer = buffer_handle.read(cx); @@ -3732,7 +3028,6 @@ impl MultiBuffer { if rng.random_bool(0.7) || self.singleton { let buffer = self .buffers - .borrow() .values() .choose(rng) .map(|state| state.buffer.clone()); @@ -4412,10 +3707,18 @@ impl MultiBufferSnapshot { self.convert_dimension(point, text::BufferSnapshot::point_to_point_utf16) } + pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point { + self.convert_dimension(point, text::BufferSnapshot::point_utf16_to_point) + } + pub fn point_to_offset(&self, point: Point) -> usize { self.convert_dimension(point, text::BufferSnapshot::point_to_offset) } + pub fn point_to_offset_utf16(&self, point: Point) -> OffsetUtf16 { + self.convert_dimension(point, text::BufferSnapshot::point_to_offset_utf16) + } + pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize { self.convert_dimension(offset, text::BufferSnapshot::offset_utf16_to_offset) } @@ -4428,6 +3731,10 @@ impl MultiBufferSnapshot { self.convert_dimension(point, text::BufferSnapshot::point_utf16_to_offset) } + pub fn point_utf16_to_offset_utf16(&self, point: PointUtf16) -> OffsetUtf16 { + self.convert_dimension(point, text::BufferSnapshot::point_utf16_to_offset_utf16) + } + fn clip_dimension( &self, position: D, @@ -4500,10 +3807,23 @@ impl MultiBufferSnapshot { && region.has_trailing_newline && !region.is_main_buffer { - return Some((&cursor.excerpt()?.buffer, cursor.main_buffer_position()?)); + let main_buffer_position = cursor.main_buffer_position()?; + let buffer_snapshot = &cursor.excerpt()?.buffer; + // remove this assert once we figure out the cause of the panics for #40453 + buffer_snapshot + .text + .as_rope() + .assert_char_boundary(main_buffer_position); + return Some((buffer_snapshot, main_buffer_position)); } else if buffer_offset > region.buffer.len() { return None; } + // remove this assert once we figure out the cause of the panics for #40453 + region + .buffer + .text + .as_rope() + .assert_char_boundary(buffer_offset); Some((region.buffer, buffer_offset)) } @@ -5239,8 +4559,30 @@ impl MultiBufferSnapshot { } } + /// Wraps the [`text::Anchor`] in a [`multi_buffer::Anchor`] if this multi-buffer is a singleton. + pub fn as_singleton_anchor(&self, text_anchor: text::Anchor) -> Option { + let (excerpt, buffer, _) = self.as_singleton()?; + Some(Anchor::in_buffer(*excerpt, buffer, text_anchor)) + } + + /// Returns an anchor for the given excerpt and text anchor, + /// Returns [`None`] if the excerpt_id is no longer valid or the text anchor range is out of excerpt's bounds. + pub fn anchor_range_in_excerpt( + &self, + excerpt_id: ExcerptId, + text_anchor: Range, + ) -> Option> { + let excerpt_id = self.latest_excerpt_id(excerpt_id); + let excerpt = self.excerpt(excerpt_id)?; + + Some( + self.anchor_in_excerpt_(excerpt, text_anchor.start)? + ..self.anchor_in_excerpt_(excerpt, text_anchor.end)?, + ) + } + /// Returns an anchor for the given excerpt and text anchor, - /// returns None if the excerpt_id is no longer valid. + /// Returns [`None`] if the excerpt_id is no longer valid or the text anchor range is out of excerpt's bounds. pub fn anchor_in_excerpt( &self, excerpt_id: ExcerptId, @@ -5248,8 +4590,32 @@ impl MultiBufferSnapshot { ) -> Option { let excerpt_id = self.latest_excerpt_id(excerpt_id); let excerpt = self.excerpt(excerpt_id)?; + self.anchor_in_excerpt_(excerpt, text_anchor) + } + + fn anchor_in_excerpt_(&self, excerpt: &Excerpt, text_anchor: text::Anchor) -> Option { + match text_anchor.buffer_id { + Some(buffer_id) if buffer_id == excerpt.buffer_id => (), + Some(_) => return None, + None if text_anchor == text::Anchor::MAX || text_anchor == text::Anchor::MIN => { + return Some(Anchor::in_buffer( + excerpt.id, + excerpt.buffer_id, + text_anchor, + )); + } + None => return None, + } + + let context = &excerpt.range.context; + if context.start.cmp(&text_anchor, &excerpt.buffer).is_gt() + || context.end.cmp(&text_anchor, &excerpt.buffer).is_lt() + { + return None; + } + Some(Anchor::in_buffer( - excerpt_id, + excerpt.id, excerpt.buffer_id, text_anchor, )) @@ -6084,22 +5450,15 @@ impl MultiBufferSnapshot { .flat_map(|item| { Some(OutlineItem { depth: item.depth, - range: self.anchor_in_excerpt(*excerpt_id, item.range.start)? - ..self.anchor_in_excerpt(*excerpt_id, item.range.end)?, + range: self.anchor_range_in_excerpt(*excerpt_id, item.range)?, text: item.text, highlight_ranges: item.highlight_ranges, name_ranges: item.name_ranges, body_range: item.body_range.and_then(|body_range| { - Some( - self.anchor_in_excerpt(*excerpt_id, body_range.start)? - ..self.anchor_in_excerpt(*excerpt_id, body_range.end)?, - ) + self.anchor_range_in_excerpt(*excerpt_id, body_range) }), annotation_range: item.annotation_range.and_then(|annotation_range| { - Some( - self.anchor_in_excerpt(*excerpt_id, annotation_range.start)? - ..self.anchor_in_excerpt(*excerpt_id, annotation_range.end)?, - ) + self.anchor_range_in_excerpt(*excerpt_id, annotation_range) }), }) }) @@ -6147,9 +5506,8 @@ impl MultiBufferSnapshot { } else if id == ExcerptId::max() { Locator::max_ref() } else { - let mut cursor = self.excerpt_ids.cursor::(()); - cursor.seek(&id, Bias::Left); - if let Some(entry) = cursor.item() + let (_, _, item) = self.excerpt_ids.find::((), &id, Bias::Left); + if let Some(entry) = item && entry.id == id { return &entry.locator; @@ -6165,22 +5523,20 @@ impl MultiBufferSnapshot { ) -> SmallVec<[Locator; 1]> { let mut sorted_ids = ids.into_iter().collect::>(); sorted_ids.sort_unstable(); + sorted_ids.dedup(); let mut locators = SmallVec::new(); while sorted_ids.last() == Some(&ExcerptId::max()) { sorted_ids.pop(); - if let Some(mapping) = self.excerpt_ids.last() { - locators.push(mapping.locator.clone()); - } + locators.push(Locator::max()); } - let mut sorted_ids = sorted_ids.into_iter().dedup().peekable(); - if sorted_ids.peek() == Some(&ExcerptId::min()) { - sorted_ids.next(); - if let Some(mapping) = self.excerpt_ids.first() { - locators.push(mapping.locator.clone()); - } - } + let mut sorted_ids = sorted_ids.into_iter().peekable(); + locators.extend( + sorted_ids + .peeking_take_while(|excerpt| *excerpt == ExcerptId::min()) + .map(|_| Locator::min()), + ); let mut cursor = self.excerpt_ids.cursor::(()); for id in sorted_ids { @@ -6728,208 +6084,6 @@ where } } -impl History { - fn start_transaction(&mut self, now: Instant) -> Option { - self.transaction_depth += 1; - if self.transaction_depth == 1 { - let id = self.next_transaction_id.tick(); - self.undo_stack.push(Transaction { - id, - buffer_transactions: Default::default(), - first_edit_at: now, - last_edit_at: now, - suppress_grouping: false, - }); - Some(id) - } else { - None - } - } - - fn end_transaction( - &mut self, - now: Instant, - buffer_transactions: HashMap, - ) -> bool { - assert_ne!(self.transaction_depth, 0); - self.transaction_depth -= 1; - if self.transaction_depth == 0 { - if buffer_transactions.is_empty() { - self.undo_stack.pop(); - false - } else { - self.redo_stack.clear(); - let transaction = self.undo_stack.last_mut().unwrap(); - transaction.last_edit_at = now; - for (buffer_id, transaction_id) in buffer_transactions { - transaction - .buffer_transactions - .entry(buffer_id) - .or_insert(transaction_id); - } - true - } - } else { - false - } - } - - fn push_transaction<'a, T>( - &mut self, - buffer_transactions: T, - now: Instant, - cx: &Context, - ) where - T: IntoIterator, &'a language::Transaction)>, - { - assert_eq!(self.transaction_depth, 0); - let transaction = Transaction { - id: self.next_transaction_id.tick(), - buffer_transactions: buffer_transactions - .into_iter() - .map(|(buffer, transaction)| (buffer.read(cx).remote_id(), transaction.id)) - .collect(), - first_edit_at: now, - last_edit_at: now, - suppress_grouping: false, - }; - if !transaction.buffer_transactions.is_empty() { - self.undo_stack.push(transaction); - self.redo_stack.clear(); - } - } - - fn finalize_last_transaction(&mut self) { - if let Some(transaction) = self.undo_stack.last_mut() { - transaction.suppress_grouping = true; - } - } - - fn forget(&mut self, transaction_id: TransactionId) -> Option { - if let Some(ix) = self - .undo_stack - .iter() - .rposition(|transaction| transaction.id == transaction_id) - { - Some(self.undo_stack.remove(ix)) - } else if let Some(ix) = self - .redo_stack - .iter() - .rposition(|transaction| transaction.id == transaction_id) - { - Some(self.redo_stack.remove(ix)) - } else { - None - } - } - - fn transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> { - self.undo_stack - .iter() - .find(|transaction| transaction.id == transaction_id) - .or_else(|| { - self.redo_stack - .iter() - .find(|transaction| transaction.id == transaction_id) - }) - } - - fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> { - self.undo_stack - .iter_mut() - .find(|transaction| transaction.id == transaction_id) - .or_else(|| { - self.redo_stack - .iter_mut() - .find(|transaction| transaction.id == transaction_id) - }) - } - - fn pop_undo(&mut self) -> Option<&mut Transaction> { - assert_eq!(self.transaction_depth, 0); - if let Some(transaction) = self.undo_stack.pop() { - self.redo_stack.push(transaction); - self.redo_stack.last_mut() - } else { - None - } - } - - fn pop_redo(&mut self) -> Option<&mut Transaction> { - assert_eq!(self.transaction_depth, 0); - if let Some(transaction) = self.redo_stack.pop() { - self.undo_stack.push(transaction); - self.undo_stack.last_mut() - } else { - None - } - } - - fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&Transaction> { - let ix = self - .undo_stack - .iter() - .rposition(|transaction| transaction.id == transaction_id)?; - let transaction = self.undo_stack.remove(ix); - self.redo_stack.push(transaction); - self.redo_stack.last() - } - - fn group(&mut self) -> Option { - let mut count = 0; - let mut transactions = self.undo_stack.iter(); - if let Some(mut transaction) = transactions.next_back() { - while let Some(prev_transaction) = transactions.next_back() { - if !prev_transaction.suppress_grouping - && transaction.first_edit_at - prev_transaction.last_edit_at - <= self.group_interval - { - transaction = prev_transaction; - count += 1; - } else { - break; - } - } - } - self.group_trailing(count) - } - - fn group_until(&mut self, transaction_id: TransactionId) { - let mut count = 0; - for transaction in self.undo_stack.iter().rev() { - if transaction.id == transaction_id { - self.group_trailing(count); - break; - } else if transaction.suppress_grouping { - break; - } else { - count += 1; - } - } - } - - fn group_trailing(&mut self, n: usize) -> Option { - let new_len = self.undo_stack.len() - n; - let (transactions_to_keep, transactions_to_merge) = self.undo_stack.split_at_mut(new_len); - if let Some(last_transaction) = transactions_to_keep.last_mut() { - if let Some(transaction) = transactions_to_merge.last() { - last_transaction.last_edit_at = transaction.last_edit_at; - } - for to_merge in transactions_to_merge { - for (buffer_id, transaction_id) in &to_merge.buffer_transactions { - last_transaction - .buffer_transactions - .entry(*buffer_id) - .or_insert(*transaction_id); - } - } - } - - self.undo_stack.truncate(new_len); - self.undo_stack.last().map(|t| t.id) - } -} - impl Excerpt { fn new( id: ExcerptId, @@ -6957,21 +6111,16 @@ impl Excerpt { let chunks_start = content_start + range.start; let chunks_end = content_start + cmp::min(range.end, self.text_summary.len); - let footer_height = if self.has_trailing_newline + let has_footer = self.has_trailing_newline && range.start <= self.text_summary.len - && range.end > self.text_summary.len - { - 1 - } else { - 0 - }; + && range.end > self.text_summary.len; let content_chunks = self.buffer.chunks(chunks_start..chunks_end, language_aware); ExcerptChunks { excerpt_id: self.id, content_chunks, - footer_height, + has_footer, } } @@ -6980,14 +6129,9 @@ impl Excerpt { let chunks_start = content_start + range.start; let chunks_end = content_start + cmp::min(range.end, self.text_summary.len); excerpt_chunks.content_chunks.seek(chunks_start..chunks_end); - excerpt_chunks.footer_height = if self.has_trailing_newline + excerpt_chunks.has_footer = self.has_trailing_newline && range.start <= self.text_summary.len - && range.end > self.text_summary.len - { - 1 - } else { - 0 - }; + && range.end > self.text_summary.len; } fn clip_anchor(&self, text_anchor: text::Anchor) -> text::Anchor { @@ -7727,7 +6871,7 @@ impl<'a> Iterator for MultiBufferChunks<'a> { let split_idx = diff_transform_end - self.range.start; let (before, after) = chunk.text.split_at(split_idx); self.range.start = diff_transform_end; - let mask = (1 << split_idx) - 1; + let mask = 1u128.unbounded_shl(split_idx as u32).wrapping_sub(1); let chars = chunk.chars & mask; let tabs = chunk.tabs & mask; @@ -7877,10 +7021,10 @@ impl<'a> Iterator for ExcerptChunks<'a> { return Some(chunk); } - if self.footer_height > 0 { - let text = unsafe { str::from_utf8_unchecked(&NEWLINES[..self.footer_height]) }; - let chars = (1 << self.footer_height) - 1; - self.footer_height = 0; + if self.has_footer { + let text = "\n"; + let chars = 0b1; + self.has_footer = false; return Some(Chunk { text, chars, @@ -7896,6 +7040,9 @@ impl ToOffset for Point { fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize { snapshot.point_to_offset(*self) } + fn to_offset_utf16(&self, snapshot: &MultiBufferSnapshot) -> OffsetUtf16 { + snapshot.point_to_offset_utf16(*self) + } } impl ToOffset for usize { @@ -7909,29 +7056,27 @@ impl ToOffset for usize { ); *self } + fn to_offset_utf16(&self, snapshot: &MultiBufferSnapshot) -> OffsetUtf16 { + snapshot.offset_to_offset_utf16(*self) + } } impl ToOffset for OffsetUtf16 { fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize { snapshot.offset_utf16_to_offset(*self) } -} - -impl ToOffset for PointUtf16 { - fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize { - snapshot.point_utf16_to_offset(*self) - } -} -impl ToOffsetUtf16 for OffsetUtf16 { fn to_offset_utf16(&self, _snapshot: &MultiBufferSnapshot) -> OffsetUtf16 { *self } } -impl ToOffsetUtf16 for usize { +impl ToOffset for PointUtf16 { + fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize { + snapshot.point_utf16_to_offset(*self) + } fn to_offset_utf16(&self, snapshot: &MultiBufferSnapshot) -> OffsetUtf16 { - snapshot.offset_to_offset_utf16(*self) + snapshot.point_utf16_to_offset_utf16(*self) } } @@ -7939,27 +7084,24 @@ impl ToPoint for usize { fn to_point<'a>(&self, snapshot: &MultiBufferSnapshot) -> Point { snapshot.offset_to_point(*self) } + fn to_point_utf16<'a>(&self, snapshot: &MultiBufferSnapshot) -> PointUtf16 { + snapshot.offset_to_point_utf16(*self) + } } impl ToPoint for Point { fn to_point<'a>(&self, _: &MultiBufferSnapshot) -> Point { *self } -} - -impl ToPointUtf16 for usize { - fn to_point_utf16<'a>(&self, snapshot: &MultiBufferSnapshot) -> PointUtf16 { - snapshot.offset_to_point_utf16(*self) - } -} - -impl ToPointUtf16 for Point { fn to_point_utf16<'a>(&self, snapshot: &MultiBufferSnapshot) -> PointUtf16 { snapshot.point_to_point_utf16(*self) } } -impl ToPointUtf16 for PointUtf16 { +impl ToPoint for PointUtf16 { + fn to_point<'a>(&self, snapshot: &MultiBufferSnapshot) -> Point { + snapshot.point_utf16_to_point(*self) + } fn to_point_utf16<'a>(&self, _: &MultiBufferSnapshot) -> PointUtf16 { *self } diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index bad99d5412cd009ecaeabe9c6ad7686f07d30862..a9121b9104400d88d5f22801db1bfebaeeb060d6 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -7,6 +7,7 @@ use parking_lot::RwLock; use rand::prelude::*; use settings::SettingsStore; use std::env; +use std::time::{Duration, Instant}; use util::RandomCharIter; use util::rel_path::rel_path; use util::test::sample_text; @@ -78,7 +79,9 @@ fn test_remote(cx: &mut App) { let ops = cx .background_executor() .block(host_buffer.read(cx).serialize_ops(None, cx)); - let mut buffer = Buffer::from_proto(1, Capability::ReadWrite, state, None).unwrap(); + let mut buffer = + Buffer::from_proto(ReplicaId::REMOTE_SERVER, Capability::ReadWrite, state, None) + .unwrap(); buffer.apply_ops( ops.into_iter() .map(|op| language::proto::deserialize_operation(op).unwrap()), @@ -157,15 +160,12 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) { events.read().as_slice(), &[ Event::Edited { - singleton_buffer_edited: false, edited_buffer: None, }, Event::Edited { - singleton_buffer_edited: false, edited_buffer: None, }, Event::Edited { - singleton_buffer_edited: false, edited_buffer: None, } ] @@ -800,7 +800,13 @@ async fn test_set_anchored_excerpts_for_path(cx: &mut TestAppContext) { let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); let anchor_ranges_1 = multibuffer .update(cx, |multibuffer, cx| { - multibuffer.set_anchored_excerpts_for_path(buffer_1.clone(), ranges_1, 2, cx) + multibuffer.set_anchored_excerpts_for_path( + PathKey::for_buffer(&buffer_1, cx), + buffer_1.clone(), + ranges_1, + 2, + cx, + ) }) .await; let snapshot_1 = multibuffer.update(cx, |multibuffer, cx| multibuffer.snapshot(cx)); @@ -817,7 +823,13 @@ async fn test_set_anchored_excerpts_for_path(cx: &mut TestAppContext) { ); let anchor_ranges_2 = multibuffer .update(cx, |multibuffer, cx| { - multibuffer.set_anchored_excerpts_for_path(buffer_2.clone(), ranges_2, 2, cx) + multibuffer.set_anchored_excerpts_for_path( + PathKey::for_buffer(&buffer_2, cx), + buffer_2.clone(), + ranges_2, + 2, + cx, + ) }) .await; let snapshot_2 = multibuffer.update(cx, |multibuffer, cx| multibuffer.snapshot(cx)); @@ -2973,7 +2985,7 @@ fn test_history(cx: &mut App) { }); let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); multibuffer.update(cx, |this, _| { - this.history.group_interval = group_interval; + this.set_group_interval(group_interval); }); multibuffer.update(cx, |multibuffer, cx| { multibuffer.push_excerpts( @@ -3627,7 +3639,7 @@ fn assert_position_translation(snapshot: &MultiBufferSnapshot) { fn assert_line_indents(snapshot: &MultiBufferSnapshot) { let max_row = snapshot.max_point().row; let buffer_id = snapshot.excerpts().next().unwrap().1.remote_id(); - let text = text::Buffer::new(0, buffer_id, snapshot.text()); + let text = text::Buffer::new(ReplicaId::LOCAL, buffer_id, snapshot.text()); let mut line_indents = text .line_indents_in_row_range(0..max_row + 1) .collect::>(); diff --git a/crates/multi_buffer/src/path_key.rs b/crates/multi_buffer/src/path_key.rs new file mode 100644 index 0000000000000000000000000000000000000000..b6175b7aaab4f631728bcfaf8094120068032994 --- /dev/null +++ b/crates/multi_buffer/src/path_key.rs @@ -0,0 +1,417 @@ +use std::{mem, ops::Range, sync::Arc}; + +use collections::HashSet; +use gpui::{App, AppContext, Context, Entity, Task}; +use itertools::Itertools; +use language::{Buffer, BufferSnapshot}; +use rope::Point; +use text::{Bias, OffsetRangeExt, locator::Locator}; +use util::{post_inc, rel_path::RelPath}; + +use crate::{ + Anchor, ExcerptId, ExcerptRange, ExpandExcerptDirection, MultiBuffer, build_excerpt_ranges, +}; + +#[derive(PartialEq, Eq, Ord, PartialOrd, Clone, Hash, Debug)] +pub struct PathKey { + // Used by the derived PartialOrd & Ord + pub sort_prefix: Option, + pub path: Arc, +} + +impl PathKey { + pub fn with_sort_prefix(sort_prefix: u64, path: Arc) -> Self { + Self { + sort_prefix: Some(sort_prefix), + path, + } + } + + pub fn for_buffer(buffer: &Entity, cx: &App) -> Self { + if let Some(file) = buffer.read(cx).file() { + Self::with_sort_prefix(file.worktree_id(cx).to_proto(), file.path().clone()) + } else { + Self { + sort_prefix: None, + path: RelPath::unix(&buffer.entity_id().to_string()) + .unwrap() + .into_arc(), + } + } + } +} + +impl MultiBuffer { + pub fn paths(&self) -> impl Iterator + '_ { + self.excerpts_by_path.keys().cloned() + } + + pub fn remove_excerpts_for_path(&mut self, path: PathKey, cx: &mut Context) { + if let Some(to_remove) = self.excerpts_by_path.remove(&path) { + self.remove_excerpts(to_remove, cx) + } + } + + pub fn location_for_path(&self, path: &PathKey, cx: &App) -> Option { + let excerpt_id = self.excerpts_by_path.get(path)?.first()?; + let snapshot = self.read(cx); + let excerpt = snapshot.excerpt(*excerpt_id)?; + Some(Anchor::in_buffer( + *excerpt_id, + excerpt.buffer_id, + excerpt.range.context.start, + )) + } + + pub fn excerpt_paths(&self) -> impl Iterator { + self.excerpts_by_path.keys() + } + + /// Sets excerpts, returns `true` if at least one new excerpt was added. + pub fn set_excerpts_for_path( + &mut self, + path: PathKey, + buffer: Entity, + ranges: impl IntoIterator>, + context_line_count: u32, + cx: &mut Context, + ) -> (Vec>, bool) { + let buffer_snapshot = buffer.read(cx).snapshot(); + let excerpt_ranges = build_excerpt_ranges(ranges, context_line_count, &buffer_snapshot); + + let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); + self.set_merged_excerpt_ranges_for_path( + path, + buffer, + excerpt_ranges, + &buffer_snapshot, + new, + counts, + cx, + ) + } + + pub fn set_excerpt_ranges_for_path( + &mut self, + path: PathKey, + buffer: Entity, + buffer_snapshot: &BufferSnapshot, + excerpt_ranges: Vec>, + cx: &mut Context, + ) -> (Vec>, bool) { + let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); + self.set_merged_excerpt_ranges_for_path( + path, + buffer, + excerpt_ranges, + buffer_snapshot, + new, + counts, + cx, + ) + } + + pub fn set_anchored_excerpts_for_path( + &self, + path_key: PathKey, + buffer: Entity, + ranges: Vec>, + context_line_count: u32, + cx: &mut Context, + ) -> Task>> { + let buffer_snapshot = buffer.read(cx).snapshot(); + cx.spawn(async move |multi_buffer, cx| { + let snapshot = buffer_snapshot.clone(); + let (excerpt_ranges, new, counts) = cx + .background_spawn(async move { + let ranges = ranges.into_iter().map(|range| range.to_point(&snapshot)); + let excerpt_ranges = + build_excerpt_ranges(ranges, context_line_count, &snapshot); + let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); + (excerpt_ranges, new, counts) + }) + .await; + + multi_buffer + .update(cx, move |multi_buffer, cx| { + let (ranges, _) = multi_buffer.set_merged_excerpt_ranges_for_path( + path_key, + buffer, + excerpt_ranges, + &buffer_snapshot, + new, + counts, + cx, + ); + ranges + }) + .ok() + .unwrap_or_default() + }) + } + + pub(super) fn expand_excerpts_with_paths( + &mut self, + ids: impl IntoIterator, + line_count: u32, + direction: ExpandExcerptDirection, + cx: &mut Context, + ) { + let grouped = ids + .into_iter() + .chunk_by(|id| self.paths_by_excerpt.get(id).cloned()) + .into_iter() + .flat_map(|(k, v)| Some((k?, v.into_iter().collect::>()))) + .collect::>(); + let snapshot = self.snapshot(cx); + + for (path, ids) in grouped.into_iter() { + let Some(excerpt_ids) = self.excerpts_by_path.get(&path) else { + continue; + }; + + let ids_to_expand = HashSet::from_iter(ids); + let expanded_ranges = excerpt_ids.iter().filter_map(|excerpt_id| { + let excerpt = snapshot.excerpt(*excerpt_id)?; + + let mut context = excerpt.range.context.to_point(&excerpt.buffer); + if ids_to_expand.contains(excerpt_id) { + match direction { + ExpandExcerptDirection::Up => { + context.start.row = context.start.row.saturating_sub(line_count); + context.start.column = 0; + } + ExpandExcerptDirection::Down => { + context.end.row = + (context.end.row + line_count).min(excerpt.buffer.max_point().row); + context.end.column = excerpt.buffer.line_len(context.end.row); + } + ExpandExcerptDirection::UpAndDown => { + context.start.row = context.start.row.saturating_sub(line_count); + context.start.column = 0; + context.end.row = + (context.end.row + line_count).min(excerpt.buffer.max_point().row); + context.end.column = excerpt.buffer.line_len(context.end.row); + } + } + } + + Some(ExcerptRange { + context, + primary: excerpt.range.primary.to_point(&excerpt.buffer), + }) + }); + let mut merged_ranges: Vec> = Vec::new(); + for range in expanded_ranges { + if let Some(last_range) = merged_ranges.last_mut() + && last_range.context.end >= range.context.start + { + last_range.context.end = range.context.end; + continue; + } + merged_ranges.push(range) + } + let Some(excerpt_id) = excerpt_ids.first() else { + continue; + }; + let Some(buffer_id) = &snapshot.buffer_id_for_excerpt(*excerpt_id) else { + continue; + }; + + let Some(buffer) = self.buffers.get(buffer_id).map(|b| b.buffer.clone()) else { + continue; + }; + + let buffer_snapshot = buffer.read(cx).snapshot(); + self.update_path_excerpts(path.clone(), buffer, &buffer_snapshot, merged_ranges, cx); + } + } + + /// Sets excerpts, returns `true` if at least one new excerpt was added. + fn set_merged_excerpt_ranges_for_path( + &mut self, + path: PathKey, + buffer: Entity, + ranges: Vec>, + buffer_snapshot: &BufferSnapshot, + new: Vec>, + counts: Vec, + cx: &mut Context, + ) -> (Vec>, bool) { + let (excerpt_ids, added_a_new_excerpt) = + self.update_path_excerpts(path, buffer, buffer_snapshot, new, cx); + + let mut result = Vec::new(); + let mut ranges = ranges.into_iter(); + for (excerpt_id, range_count) in excerpt_ids.into_iter().zip(counts.into_iter()) { + for range in ranges.by_ref().take(range_count) { + let range = Anchor::range_in_buffer( + excerpt_id, + buffer_snapshot.remote_id(), + buffer_snapshot.anchor_before(&range.primary.start) + ..buffer_snapshot.anchor_after(&range.primary.end), + ); + result.push(range) + } + } + (result, added_a_new_excerpt) + } + + fn update_path_excerpts( + &mut self, + path: PathKey, + buffer: Entity, + buffer_snapshot: &BufferSnapshot, + new: Vec>, + cx: &mut Context, + ) -> (Vec, bool) { + let mut insert_after = self + .excerpts_by_path + .range(..path.clone()) + .next_back() + .map(|(_, value)| *value.last().unwrap()) + .unwrap_or(ExcerptId::min()); + + let existing = self + .excerpts_by_path + .get(&path) + .cloned() + .unwrap_or_default(); + + let mut new_iter = new.into_iter().peekable(); + let mut existing_iter = existing.into_iter().peekable(); + + let mut excerpt_ids = Vec::new(); + let mut to_remove = Vec::new(); + let mut to_insert: Vec<(ExcerptId, ExcerptRange)> = Vec::new(); + let mut added_a_new_excerpt = false; + let snapshot = self.snapshot(cx); + + let mut next_excerpt_id = + if let Some(last_entry) = self.snapshot.borrow().excerpt_ids.last() { + last_entry.id.0 + 1 + } else { + 1 + }; + + let mut next_excerpt_id = move || ExcerptId(post_inc(&mut next_excerpt_id)); + + let mut excerpts_cursor = snapshot.excerpts.cursor::>(()); + excerpts_cursor.next(); + + loop { + let new = new_iter.peek(); + let existing = if let Some(existing_id) = existing_iter.peek() { + let locator = snapshot.excerpt_locator_for_id(*existing_id); + excerpts_cursor.seek_forward(&Some(locator), Bias::Left); + if let Some(excerpt) = excerpts_cursor.item() { + if excerpt.buffer_id != buffer_snapshot.remote_id() { + to_remove.push(*existing_id); + existing_iter.next(); + continue; + } + Some(( + *existing_id, + excerpt.range.context.to_point(buffer_snapshot), + )) + } else { + None + } + } else { + None + }; + + if let Some((last_id, last)) = to_insert.last_mut() { + if let Some(new) = new + && last.context.end >= new.context.start + { + last.context.end = last.context.end.max(new.context.end); + excerpt_ids.push(*last_id); + new_iter.next(); + continue; + } + if let Some((existing_id, existing_range)) = &existing + && last.context.end >= existing_range.start + { + last.context.end = last.context.end.max(existing_range.end); + to_remove.push(*existing_id); + self.snapshot + .get_mut() + .replaced_excerpts + .insert(*existing_id, *last_id); + existing_iter.next(); + continue; + } + } + + match (new, existing) { + (None, None) => break, + (None, Some((existing_id, _))) => { + existing_iter.next(); + to_remove.push(existing_id); + continue; + } + (Some(_), None) => { + added_a_new_excerpt = true; + let new_id = next_excerpt_id(); + excerpt_ids.push(new_id); + to_insert.push((new_id, new_iter.next().unwrap())); + continue; + } + (Some(new), Some((_, existing_range))) => { + if existing_range.end < new.context.start { + let existing_id = existing_iter.next().unwrap(); + to_remove.push(existing_id); + continue; + } else if existing_range.start > new.context.end { + let new_id = next_excerpt_id(); + excerpt_ids.push(new_id); + to_insert.push((new_id, new_iter.next().unwrap())); + continue; + } + + if existing_range.start == new.context.start + && existing_range.end == new.context.end + { + self.insert_excerpts_with_ids_after( + insert_after, + buffer.clone(), + mem::take(&mut to_insert), + cx, + ); + insert_after = existing_iter.next().unwrap(); + excerpt_ids.push(insert_after); + new_iter.next(); + } else { + let existing_id = existing_iter.next().unwrap(); + let new_id = next_excerpt_id(); + self.snapshot + .get_mut() + .replaced_excerpts + .insert(existing_id, new_id); + to_remove.push(existing_id); + let mut range = new_iter.next().unwrap(); + range.context.start = range.context.start.min(existing_range.start); + range.context.end = range.context.end.max(existing_range.end); + excerpt_ids.push(new_id); + to_insert.push((new_id, range)); + } + } + }; + } + + self.insert_excerpts_with_ids_after(insert_after, buffer, to_insert, cx); + self.remove_excerpts(to_remove, cx); + if excerpt_ids.is_empty() { + self.excerpts_by_path.remove(&path); + } else { + for excerpt_id in &excerpt_ids { + self.paths_by_excerpt.insert(*excerpt_id, path.clone()); + } + self.excerpts_by_path + .insert(path, excerpt_ids.iter().dedup().cloned().collect()); + } + + (excerpt_ids, added_a_new_excerpt) + } +} diff --git a/crates/multi_buffer/src/transaction.rs b/crates/multi_buffer/src/transaction.rs new file mode 100644 index 0000000000000000000000000000000000000000..062d25d8233777190113aaa3e6a7f62396cfd08f --- /dev/null +++ b/crates/multi_buffer/src/transaction.rs @@ -0,0 +1,524 @@ +use gpui::{App, Context, Entity}; +use language::{self, Buffer, TextDimension, TransactionId}; +use std::{ + collections::HashMap, + ops::{Range, Sub}, + time::{Duration, Instant}, +}; +use sum_tree::Bias; +use text::BufferId; + +use crate::BufferState; + +use super::{Event, ExcerptSummary, MultiBuffer}; + +#[derive(Clone)] +pub(super) struct History { + next_transaction_id: TransactionId, + undo_stack: Vec, + redo_stack: Vec, + transaction_depth: usize, + group_interval: Duration, +} + +impl Default for History { + fn default() -> Self { + History { + next_transaction_id: clock::Lamport::MIN, + undo_stack: Vec::new(), + redo_stack: Vec::new(), + transaction_depth: 0, + group_interval: Duration::from_millis(300), + } + } +} + +#[derive(Clone)] +struct Transaction { + id: TransactionId, + buffer_transactions: HashMap, + first_edit_at: Instant, + last_edit_at: Instant, + suppress_grouping: bool, +} + +impl History { + fn start_transaction(&mut self, now: Instant) -> Option { + self.transaction_depth += 1; + if self.transaction_depth == 1 { + let id = self.next_transaction_id.tick(); + self.undo_stack.push(Transaction { + id, + buffer_transactions: Default::default(), + first_edit_at: now, + last_edit_at: now, + suppress_grouping: false, + }); + Some(id) + } else { + None + } + } + + fn end_transaction( + &mut self, + now: Instant, + buffer_transactions: HashMap, + ) -> bool { + assert_ne!(self.transaction_depth, 0); + self.transaction_depth -= 1; + if self.transaction_depth == 0 { + if buffer_transactions.is_empty() { + self.undo_stack.pop(); + false + } else { + self.redo_stack.clear(); + let transaction = self.undo_stack.last_mut().unwrap(); + transaction.last_edit_at = now; + for (buffer_id, transaction_id) in buffer_transactions { + transaction + .buffer_transactions + .entry(buffer_id) + .or_insert(transaction_id); + } + true + } + } else { + false + } + } + + fn push_transaction<'a, T>( + &mut self, + buffer_transactions: T, + now: Instant, + cx: &Context, + ) where + T: IntoIterator, &'a language::Transaction)>, + { + assert_eq!(self.transaction_depth, 0); + let transaction = Transaction { + id: self.next_transaction_id.tick(), + buffer_transactions: buffer_transactions + .into_iter() + .map(|(buffer, transaction)| (buffer.read(cx).remote_id(), transaction.id)) + .collect(), + first_edit_at: now, + last_edit_at: now, + suppress_grouping: false, + }; + if !transaction.buffer_transactions.is_empty() { + self.undo_stack.push(transaction); + self.redo_stack.clear(); + } + } + + fn finalize_last_transaction(&mut self) { + if let Some(transaction) = self.undo_stack.last_mut() { + transaction.suppress_grouping = true; + } + } + + fn forget(&mut self, transaction_id: TransactionId) -> Option { + if let Some(ix) = self + .undo_stack + .iter() + .rposition(|transaction| transaction.id == transaction_id) + { + Some(self.undo_stack.remove(ix)) + } else if let Some(ix) = self + .redo_stack + .iter() + .rposition(|transaction| transaction.id == transaction_id) + { + Some(self.redo_stack.remove(ix)) + } else { + None + } + } + + fn transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> { + self.undo_stack + .iter() + .find(|transaction| transaction.id == transaction_id) + .or_else(|| { + self.redo_stack + .iter() + .find(|transaction| transaction.id == transaction_id) + }) + } + + fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> { + self.undo_stack + .iter_mut() + .find(|transaction| transaction.id == transaction_id) + .or_else(|| { + self.redo_stack + .iter_mut() + .find(|transaction| transaction.id == transaction_id) + }) + } + + fn pop_undo(&mut self) -> Option<&mut Transaction> { + assert_eq!(self.transaction_depth, 0); + if let Some(transaction) = self.undo_stack.pop() { + self.redo_stack.push(transaction); + self.redo_stack.last_mut() + } else { + None + } + } + + fn pop_redo(&mut self) -> Option<&mut Transaction> { + assert_eq!(self.transaction_depth, 0); + if let Some(transaction) = self.redo_stack.pop() { + self.undo_stack.push(transaction); + self.undo_stack.last_mut() + } else { + None + } + } + + fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&Transaction> { + let ix = self + .undo_stack + .iter() + .rposition(|transaction| transaction.id == transaction_id)?; + let transaction = self.undo_stack.remove(ix); + self.redo_stack.push(transaction); + self.redo_stack.last() + } + + fn group(&mut self) -> Option { + let mut count = 0; + let mut transactions = self.undo_stack.iter(); + if let Some(mut transaction) = transactions.next_back() { + while let Some(prev_transaction) = transactions.next_back() { + if !prev_transaction.suppress_grouping + && transaction.first_edit_at - prev_transaction.last_edit_at + <= self.group_interval + { + transaction = prev_transaction; + count += 1; + } else { + break; + } + } + } + self.group_trailing(count) + } + + fn group_until(&mut self, transaction_id: TransactionId) { + let mut count = 0; + for transaction in self.undo_stack.iter().rev() { + if transaction.id == transaction_id { + self.group_trailing(count); + break; + } else if transaction.suppress_grouping { + break; + } else { + count += 1; + } + } + } + + fn group_trailing(&mut self, n: usize) -> Option { + let new_len = self.undo_stack.len() - n; + let (transactions_to_keep, transactions_to_merge) = self.undo_stack.split_at_mut(new_len); + if let Some(last_transaction) = transactions_to_keep.last_mut() { + if let Some(transaction) = transactions_to_merge.last() { + last_transaction.last_edit_at = transaction.last_edit_at; + } + for to_merge in transactions_to_merge { + for (buffer_id, transaction_id) in &to_merge.buffer_transactions { + last_transaction + .buffer_transactions + .entry(*buffer_id) + .or_insert(*transaction_id); + } + } + } + + self.undo_stack.truncate(new_len); + self.undo_stack.last().map(|t| t.id) + } + + pub(super) fn transaction_depth(&self) -> usize { + self.transaction_depth + } + + pub fn set_group_interval(&mut self, group_interval: Duration) { + self.group_interval = group_interval; + } +} + +impl MultiBuffer { + pub fn start_transaction(&mut self, cx: &mut Context) -> Option { + self.start_transaction_at(Instant::now(), cx) + } + + pub fn start_transaction_at( + &mut self, + now: Instant, + cx: &mut Context, + ) -> Option { + if let Some(buffer) = self.as_singleton() { + return buffer.update(cx, |buffer, _| buffer.start_transaction_at(now)); + } + + for BufferState { buffer, .. } in self.buffers.values() { + buffer.update(cx, |buffer, _| buffer.start_transaction_at(now)); + } + self.history.start_transaction(now) + } + + pub fn last_transaction_id(&self, cx: &App) -> Option { + if let Some(buffer) = self.as_singleton() { + buffer + .read(cx) + .peek_undo_stack() + .map(|history_entry| history_entry.transaction_id()) + } else { + let last_transaction = self.history.undo_stack.last()?; + Some(last_transaction.id) + } + } + + pub fn end_transaction(&mut self, cx: &mut Context) -> Option { + self.end_transaction_at(Instant::now(), cx) + } + + pub fn end_transaction_at( + &mut self, + now: Instant, + cx: &mut Context, + ) -> Option { + if let Some(buffer) = self.as_singleton() { + return buffer.update(cx, |buffer, cx| buffer.end_transaction_at(now, cx)); + } + + let mut buffer_transactions = HashMap::default(); + for BufferState { buffer, .. } in self.buffers.values() { + if let Some(transaction_id) = + buffer.update(cx, |buffer, cx| buffer.end_transaction_at(now, cx)) + { + buffer_transactions.insert(buffer.read(cx).remote_id(), transaction_id); + } + } + + if self.history.end_transaction(now, buffer_transactions) { + let transaction_id = self.history.group().unwrap(); + Some(transaction_id) + } else { + None + } + } + + pub fn edited_ranges_for_transaction( + &self, + transaction_id: TransactionId, + cx: &App, + ) -> Vec> + where + D: TextDimension + Ord + Sub, + { + let Some(transaction) = self.history.transaction(transaction_id) else { + return Vec::new(); + }; + + let mut ranges = Vec::new(); + let snapshot = self.read(cx); + let mut cursor = snapshot.excerpts.cursor::(()); + + for (buffer_id, buffer_transaction) in &transaction.buffer_transactions { + let Some(buffer_state) = self.buffers.get(buffer_id) else { + continue; + }; + + let buffer = buffer_state.buffer.read(cx); + for range in buffer.edited_ranges_for_transaction_id::(*buffer_transaction) { + for excerpt_id in &buffer_state.excerpts { + cursor.seek(excerpt_id, Bias::Left); + if let Some(excerpt) = cursor.item() + && excerpt.locator == *excerpt_id + { + let excerpt_buffer_start = excerpt.range.context.start.summary::(buffer); + let excerpt_buffer_end = excerpt.range.context.end.summary::(buffer); + let excerpt_range = excerpt_buffer_start..excerpt_buffer_end; + if excerpt_range.contains(&range.start) + && excerpt_range.contains(&range.end) + { + let excerpt_start = D::from_text_summary(&cursor.start().text); + + let mut start = excerpt_start; + start.add_assign(&(range.start - excerpt_buffer_start)); + let mut end = excerpt_start; + end.add_assign(&(range.end - excerpt_buffer_start)); + + ranges.push(start..end); + break; + } + } + } + } + } + + ranges.sort_by_key(|range| range.start); + ranges + } + + pub fn merge_transactions( + &mut self, + transaction: TransactionId, + destination: TransactionId, + cx: &mut Context, + ) { + if let Some(buffer) = self.as_singleton() { + buffer.update(cx, |buffer, _| { + buffer.merge_transactions(transaction, destination) + }); + } else if let Some(transaction) = self.history.forget(transaction) + && let Some(destination) = self.history.transaction_mut(destination) + { + for (buffer_id, buffer_transaction_id) in transaction.buffer_transactions { + if let Some(destination_buffer_transaction_id) = + destination.buffer_transactions.get(&buffer_id) + { + if let Some(state) = self.buffers.get(&buffer_id) { + state.buffer.update(cx, |buffer, _| { + buffer.merge_transactions( + buffer_transaction_id, + *destination_buffer_transaction_id, + ) + }); + } + } else { + destination + .buffer_transactions + .insert(buffer_id, buffer_transaction_id); + } + } + } + } + + pub fn finalize_last_transaction(&mut self, cx: &mut Context) { + self.history.finalize_last_transaction(); + for BufferState { buffer, .. } in self.buffers.values() { + buffer.update(cx, |buffer, _| { + buffer.finalize_last_transaction(); + }); + } + } + + pub fn push_transaction<'a, T>(&mut self, buffer_transactions: T, cx: &Context) + where + T: IntoIterator, &'a language::Transaction)>, + { + self.history + .push_transaction(buffer_transactions, Instant::now(), cx); + self.history.finalize_last_transaction(); + } + + pub fn group_until_transaction( + &mut self, + transaction_id: TransactionId, + cx: &mut Context, + ) { + if let Some(buffer) = self.as_singleton() { + buffer.update(cx, |buffer, _| { + buffer.group_until_transaction(transaction_id) + }); + } else { + self.history.group_until(transaction_id); + } + } + pub fn undo(&mut self, cx: &mut Context) -> Option { + let mut transaction_id = None; + if let Some(buffer) = self.as_singleton() { + transaction_id = buffer.update(cx, |buffer, cx| buffer.undo(cx)); + } else { + while let Some(transaction) = self.history.pop_undo() { + let mut undone = false; + for (buffer_id, buffer_transaction_id) in &mut transaction.buffer_transactions { + if let Some(BufferState { buffer, .. }) = self.buffers.get(buffer_id) { + undone |= buffer.update(cx, |buffer, cx| { + let undo_to = *buffer_transaction_id; + if let Some(entry) = buffer.peek_undo_stack() { + *buffer_transaction_id = entry.transaction_id(); + } + buffer.undo_to_transaction(undo_to, cx) + }); + } + } + + if undone { + transaction_id = Some(transaction.id); + break; + } + } + } + + if let Some(transaction_id) = transaction_id { + cx.emit(Event::TransactionUndone { transaction_id }); + } + + transaction_id + } + + pub fn redo(&mut self, cx: &mut Context) -> Option { + if let Some(buffer) = self.as_singleton() { + return buffer.update(cx, |buffer, cx| buffer.redo(cx)); + } + + while let Some(transaction) = self.history.pop_redo() { + let mut redone = false; + for (buffer_id, buffer_transaction_id) in transaction.buffer_transactions.iter_mut() { + if let Some(BufferState { buffer, .. }) = self.buffers.get(buffer_id) { + redone |= buffer.update(cx, |buffer, cx| { + let redo_to = *buffer_transaction_id; + if let Some(entry) = buffer.peek_redo_stack() { + *buffer_transaction_id = entry.transaction_id(); + } + buffer.redo_to_transaction(redo_to, cx) + }); + } + } + + if redone { + return Some(transaction.id); + } + } + + None + } + + pub fn undo_transaction(&mut self, transaction_id: TransactionId, cx: &mut Context) { + if let Some(buffer) = self.as_singleton() { + buffer.update(cx, |buffer, cx| buffer.undo_transaction(transaction_id, cx)); + } else if let Some(transaction) = self.history.remove_from_undo(transaction_id) { + for (buffer_id, transaction_id) in &transaction.buffer_transactions { + if let Some(BufferState { buffer, .. }) = self.buffers.get(buffer_id) { + buffer.update(cx, |buffer, cx| { + buffer.undo_transaction(*transaction_id, cx) + }); + } + } + } + } + + pub fn forget_transaction(&mut self, transaction_id: TransactionId, cx: &mut Context) { + if let Some(buffer) = self.as_singleton() { + buffer.update(cx, |buffer, _| { + buffer.forget_transaction(transaction_id); + }); + } else if let Some(transaction) = self.history.forget(transaction_id) { + for (buffer_id, buffer_transaction_id) in transaction.buffer_transactions { + if let Some(state) = self.buffers.get_mut(&buffer_id) { + state.buffer.update(cx, |buffer, _| { + buffer.forget_transaction(buffer_transaction_id); + }); + } + } + } + } +} diff --git a/crates/nc/Cargo.toml b/crates/nc/Cargo.toml index 46ef2d3c62e233cc8693b3fdb3082749c05d9ed5..534ec2271ca44e8880db973c977948aa7d9a9f53 100644 --- a/crates/nc/Cargo.toml +++ b/crates/nc/Cargo.toml @@ -17,4 +17,3 @@ anyhow.workspace = true futures.workspace = true net.workspace = true smol.workspace = true -workspace-hack.workspace = true diff --git a/crates/net/Cargo.toml b/crates/net/Cargo.toml index fc08bc89f53550fe926ce4a00ac68ce4b0502409..8ce273e30ce891dc981e433237921b44e8ca3fd7 100644 --- a/crates/net/Cargo.toml +++ b/crates/net/Cargo.toml @@ -14,7 +14,6 @@ doctest = false [dependencies] smol.workspace = true -workspace-hack.workspace = true [target.'cfg(target_os = "windows")'.dependencies] anyhow.workspace = true diff --git a/crates/node_runtime/Cargo.toml b/crates/node_runtime/Cargo.toml index 144fc2ae8545619b2548e9f7f3eb070363a02900..dfa40ad666e982c5f037114135c6cf7388f9a910 100644 --- a/crates/node_runtime/Cargo.toml +++ b/crates/node_runtime/Cargo.toml @@ -31,7 +31,6 @@ smol.workspace = true util.workspace = true watch.workspace = true which.workspace = true -workspace-hack.workspace = true [target.'cfg(windows)'.dependencies] async-std = { version = "1.12.0", features = ["unstable"] } diff --git a/crates/notifications/Cargo.toml b/crates/notifications/Cargo.toml index 39acfe2b384c8a2264c5c2dac91024edad89d33a..8304c788fdd1ca840d68dbb4eb24bf5e3e79abdc 100644 --- a/crates/notifications/Cargo.toml +++ b/crates/notifications/Cargo.toml @@ -33,7 +33,6 @@ time.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true -workspace-hack.workspace = true zed_actions.workspace = true [dev-dependencies] diff --git a/crates/notifications/src/notification_store.rs b/crates/notifications/src/notification_store.rs index 0964a648b0bead5d46fe7d63113d6bc966673116..7cae74a7293694ebedd603ded656af00201c7366 100644 --- a/crates/notifications/src/notification_store.rs +++ b/crates/notifications/src/notification_store.rs @@ -123,14 +123,16 @@ impl NotificationStore { return None; } let ix = count - 1 - ix; - let mut cursor = self.notifications.cursor::(()); - cursor.seek(&Count(ix), Bias::Right); - cursor.item() + let (.., item) = self + .notifications + .find::((), &Count(ix), Bias::Right); + item } pub fn notification_for_id(&self, id: u64) -> Option<&NotificationEntry> { - let mut cursor = self.notifications.cursor::(()); - cursor.seek(&NotificationId(id), Bias::Left); - if let Some(item) = cursor.item() + let (.., item) = + self.notifications + .find::((), &NotificationId(id), Bias::Left); + if let Some(item) = item && item.id == id { return Some(item); diff --git a/crates/ollama/Cargo.toml b/crates/ollama/Cargo.toml index 0cf1a5505d8035808b8c3f2a0407557535b15008..fed74993fa4050b5ae690735d9b90f229f33ff5c 100644 --- a/crates/ollama/Cargo.toml +++ b/crates/ollama/Cargo.toml @@ -23,4 +23,3 @@ schemars = { workspace = true, optional = true } serde.workspace = true serde_json.workspace = true settings.workspace = true -workspace-hack.workspace = true diff --git a/crates/onboarding/Cargo.toml b/crates/onboarding/Cargo.toml index 2e9797f717b446177efa08713489aed49892c8c8..2ff3467c4804f7c0a50488a2c4a1e283ea571292 100644 --- a/crates/onboarding/Cargo.toml +++ b/crates/onboarding/Cargo.toml @@ -34,10 +34,8 @@ settings.workspace = true telemetry.workspace = true theme.workspace = true ui.workspace = true -ui_input.workspace = true util.workspace = true vim_mode_setting.workspace = true -workspace-hack.workspace = true workspace.workspace = true zed_actions.workspace = true zlog.workspace = true diff --git a/crates/onboarding/src/onboarding.rs b/crates/onboarding/src/onboarding.rs index 9273f0d7d87851b5118d7835244074502fc128c7..70b6d878ac55c5da470455695542cc0597341c1f 100644 --- a/crates/onboarding/src/onboarding.rs +++ b/crates/onboarding/src/onboarding.rs @@ -17,7 +17,6 @@ use ui::{ Divider, KeyBinding, ParentElement as _, StatefulInteractiveElement, Vector, VectorName, WithScrollbar as _, prelude::*, rems_from_px, }; -pub use ui_input::font_picker; use workspace::{ AppState, Workspace, WorkspaceId, dock::DockPosition, @@ -338,10 +337,9 @@ impl Render for Onboarding { KeyBinding::for_action_in( &Finish, &self.focus_handle, - window, cx, ) - .map(|kb| kb.size(rems_from_px(12.))), + .size(rems_from_px(12.)), ) .on_click(|_, window, cx| { window.dispatch_action(Finish.boxed_clone(), cx); diff --git a/crates/onboarding/src/welcome.rs b/crates/onboarding/src/welcome.rs index 50f0d83698adbd1b8bff0d7e73a5f342d8fe11cd..b2711cd52d61a51711bd8ec90581b981d7bcf784 100644 --- a/crates/onboarding/src/welcome.rs +++ b/crates/onboarding/src/welcome.rs @@ -78,13 +78,7 @@ struct Section { } impl Section { - fn render( - self, - index_offset: usize, - focus: &FocusHandle, - window: &mut Window, - cx: &mut App, - ) -> impl IntoElement { + fn render(self, index_offset: usize, focus: &FocusHandle, cx: &mut App) -> impl IntoElement { v_flex() .min_w_full() .child( @@ -104,7 +98,7 @@ impl Section { self.entries .iter() .enumerate() - .map(|(index, entry)| entry.render(index_offset + index, focus, window, cx)), + .map(|(index, entry)| entry.render(index_offset + index, focus, cx)), ) } } @@ -116,13 +110,7 @@ struct SectionEntry { } impl SectionEntry { - fn render( - &self, - button_index: usize, - focus: &FocusHandle, - window: &Window, - cx: &App, - ) -> impl IntoElement { + fn render(&self, button_index: usize, focus: &FocusHandle, cx: &App) -> impl IntoElement { ButtonLike::new(("onboarding-button-id", button_index)) .tab_index(button_index as isize) .full_width() @@ -141,9 +129,8 @@ impl SectionEntry { ) .child(Label::new(self.title)), ) - .children( - KeyBinding::for_action_in(self.action, focus, window, cx) - .map(|s| s.size(rems_from_px(12.))), + .child( + KeyBinding::for_action_in(self.action, focus, cx).size(rems_from_px(12.)), ), ) .on_click(|_, window, cx| window.dispatch_action(self.action.boxed_clone(), cx)) @@ -151,7 +138,6 @@ impl SectionEntry { } pub struct WelcomePage { - first_paint: bool, focus_handle: FocusHandle, } @@ -168,11 +154,7 @@ impl WelcomePage { } impl Render for WelcomePage { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - if self.first_paint { - window.request_animation_frame(); - self.first_paint = false; - } + fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { let (first_section, second_section) = CONTENT; let first_section_entries = first_section.entries.len(); let last_index = first_section_entries + second_section.entries.len(); @@ -220,13 +202,11 @@ impl Render for WelcomePage { .child(first_section.render( Default::default(), &self.focus_handle, - window, cx, )) .child(second_section.render( first_section_entries, &self.focus_handle, - window, cx, )) .child( @@ -316,10 +296,7 @@ impl WelcomePage { cx.on_focus(&focus_handle, window, |_, _, cx| cx.notify()) .detach(); - WelcomePage { - first_paint: true, - focus_handle, - } + WelcomePage { focus_handle } }) } } diff --git a/crates/open_ai/Cargo.toml b/crates/open_ai/Cargo.toml index 776e308c490bf464c641800399ffaf8a1f301702..49284eff79c11414c0811abd107f7c16ca701179 100644 --- a/crates/open_ai/Cargo.toml +++ b/crates/open_ai/Cargo.toml @@ -25,4 +25,3 @@ serde.workspace = true serde_json.workspace = true settings.workspace = true strum.workspace = true -workspace-hack.workspace = true diff --git a/crates/open_router/Cargo.toml b/crates/open_router/Cargo.toml index f9728673bd891488b90c062acf7cb507bb9e329f..cccb92c33b05b8fff0e5e78277c9f7fa29844ace 100644 --- a/crates/open_router/Cargo.toml +++ b/crates/open_router/Cargo.toml @@ -25,4 +25,3 @@ serde_json.workspace = true settings.workspace = true strum.workspace = true thiserror.workspace = true -workspace-hack.workspace = true diff --git a/crates/outline/Cargo.toml b/crates/outline/Cargo.toml index d4c69acbf9f72f498623e2f253dabe3e960209f0..5069fa2373d16e7afb69f8f9899d86edb09d55a9 100644 --- a/crates/outline/Cargo.toml +++ b/crates/outline/Cargo.toml @@ -26,7 +26,6 @@ ui.workspace = true util.workspace = true workspace.workspace = true zed_actions.workspace = true -workspace-hack.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } diff --git a/crates/outline/src/outline.rs b/crates/outline/src/outline.rs index ac74d6284f4fe2fe62bcad7be447b142255056b4..9e49fabb474d765aa79703ef55c1c98842bee209 100644 --- a/crates/outline/src/outline.rs +++ b/crates/outline/src/outline.rs @@ -245,7 +245,10 @@ impl PickerDelegate for OutlineViewDelegate { let (buffer, cursor_offset) = self.active_editor.update(cx, |editor, cx| { let buffer = editor.buffer().read(cx).snapshot(cx); - let cursor_offset = editor.selections.newest::(cx).head(); + let cursor_offset = editor + .selections + .newest::(&editor.display_snapshot(cx)) + .head(); (buffer, cursor_offset) }); selected_index = self @@ -673,7 +676,7 @@ mod tests { let selections = editor.update(cx, |editor, cx| { editor .selections - .all::(cx) + .all::(&editor.display_snapshot(cx)) .into_iter() .map(|s| s.start..s.end) .collect::>() diff --git a/crates/outline_panel/Cargo.toml b/crates/outline_panel/Cargo.toml index b851ae672df0ba4a99b25e19c8c2ebaf49676346..72e2d1eb63b1253e66bf2b7ef46dfb714fb24db6 100644 --- a/crates/outline_panel/Cargo.toml +++ b/crates/outline_panel/Cargo.toml @@ -38,7 +38,6 @@ util.workspace = true workspace.workspace = true worktree.workspace = true zed_actions.workspace = true -workspace-hack.workspace = true [dev-dependencies] search = { workspace = true, features = ["test-support"] } diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 6dcc572e6c022112f886b9c192a65064040cf1af..ebc5946acf97b763d7ec06d264aeaa7169d7c68b 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -3099,7 +3099,10 @@ impl OutlinePanel { cx: &mut Context, ) -> Option { let selection = editor.update(cx, |editor, cx| { - editor.selections.newest::(cx).head() + editor + .selections + .newest::(&editor.display_snapshot(cx)) + .head() }); let editor_snapshot = editor.update(cx, |editor, cx| editor.snapshot(window, cx)); let multi_buffer = editor.read(cx).buffer(); @@ -3192,13 +3195,13 @@ impl OutlinePanel { .into_iter() .flat_map(|excerpt| excerpt.iter_outlines()) .flat_map(|outline| { - let start = multi_buffer_snapshot - .anchor_in_excerpt(excerpt_id, outline.range.start)? - .to_display_point(&editor_snapshot); - let end = multi_buffer_snapshot - .anchor_in_excerpt(excerpt_id, outline.range.end)? - .to_display_point(&editor_snapshot); - Some((start..end, outline)) + let range = multi_buffer_snapshot + .anchor_range_in_excerpt(excerpt_id, outline.range.clone())?; + Some(( + range.start.to_display_point(&editor_snapshot) + ..range.end.to_display_point(&editor_snapshot), + outline, + )) }) .collect::>(); @@ -4835,6 +4838,10 @@ impl Panel for OutlinePanel { "Outline Panel" } + fn panel_key() -> &'static str { + OUTLINE_PANEL_KEY + } + fn position(&self, _: &Window, cx: &App) -> DockPosition { match OutlinePanelSettings::get_global(cx).dock { DockSide::Left => DockPosition::Left, @@ -6957,13 +6964,13 @@ outline: struct OutlineEntryExcerpt fn selected_row_text(editor: &Entity, cx: &mut App) -> String { editor.update(cx, |editor, cx| { - let selections = editor.selections.all::(cx); - assert_eq!(selections.len(), 1, "Active editor should have exactly one selection after any outline panel interactions"); - let selection = selections.first().unwrap(); - let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx); - let line_start = language::Point::new(selection.start.row, 0); - let line_end = multi_buffer_snapshot.clip_point(language::Point::new(selection.end.row, u32::MAX), language::Bias::Right); - multi_buffer_snapshot.text_for_range(line_start..line_end).collect::().trim().to_owned() + let selections = editor.selections.all::(&editor.display_snapshot(cx)); + assert_eq!(selections.len(), 1, "Active editor should have exactly one selection after any outline panel interactions"); + let selection = selections.first().unwrap(); + let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx); + let line_start = language::Point::new(selection.start.row, 0); + let line_end = multi_buffer_snapshot.clip_point(language::Point::new(selection.end.row, u32::MAX), language::Bias::Right); + multi_buffer_snapshot.text_for_range(line_start..line_end).collect::().trim().to_owned() }) } diff --git a/crates/outline_panel/src/outline_panel_settings.rs b/crates/outline_panel/src/outline_panel_settings.rs index 58598bdb4f9089e2c6284976869b82be600825ae..77fb15ddeb273b6fbe928e5f364f4a135321e7be 100644 --- a/crates/outline_panel/src/outline_panel_settings.rs +++ b/crates/outline_panel/src/outline_panel_settings.rs @@ -62,20 +62,4 @@ impl Settings for OutlinePanelSettings { expand_outlines_with_depth: panel.expand_outlines_with_depth.unwrap(), } } - - fn import_from_vscode( - vscode: &settings::VsCodeSettings, - current: &mut settings::SettingsContent, - ) { - if let Some(b) = vscode.read_bool("outline.icons") { - let outline_panel = current.outline_panel.get_or_insert_default(); - outline_panel.file_icons = Some(b); - outline_panel.folder_icons = Some(b); - } - - if let Some(b) = vscode.read_bool("git.decorations.enabled") { - let outline_panel = current.outline_panel.get_or_insert_default(); - outline_panel.git_status = Some(b); - } - } } diff --git a/crates/panel/Cargo.toml b/crates/panel/Cargo.toml index 530a92356c2403edfa4ddcb7c6afd35b99630823..3c51e6d6dcdb31922c07bd1d16923fdd10eeceb7 100644 --- a/crates/panel/Cargo.toml +++ b/crates/panel/Cargo.toml @@ -18,4 +18,3 @@ settings.workspace = true theme.workspace = true ui.workspace = true workspace.workspace = true -workspace-hack.workspace = true diff --git a/crates/paths/Cargo.toml b/crates/paths/Cargo.toml index 44bb0953e2c45f36511a21f05c608c17c60c9f48..24da7d46e9e7d14c8577550b34c592d12a19af74 100644 --- a/crates/paths/Cargo.toml +++ b/crates/paths/Cargo.toml @@ -18,4 +18,3 @@ path = "src/paths.rs" dirs.workspace = true ignore.workspace = true util.workspace = true -workspace-hack.workspace = true diff --git a/crates/paths/src/paths.rs b/crates/paths/src/paths.rs index bbb6ddb976312b7baca5a11ace863b4a3be8d2bc..207e1f3bb4324d17784b1d8df53ba4bfbc4adddb 100644 --- a/crates/paths/src/paths.rs +++ b/crates/paths/src/paths.rs @@ -288,7 +288,7 @@ pub fn snippets_dir() -> &'static PathBuf { /// Returns the path to the contexts directory. /// /// This is where the saved contexts from the Assistant are stored. -pub fn contexts_dir() -> &'static PathBuf { +pub fn text_threads_dir() -> &'static PathBuf { static CONTEXTS_DIR: OnceLock = OnceLock::new(); CONTEXTS_DIR.get_or_init(|| { if cfg!(target_os = "macos") { diff --git a/crates/picker/Cargo.toml b/crates/picker/Cargo.toml index 23c867b6f30aa64d5916e8939d836dda27ebf6c9..1344d177f42f9ab6a15d8f5f1353b98eadfd175f 100644 --- a/crates/picker/Cargo.toml +++ b/crates/picker/Cargo.toml @@ -25,7 +25,6 @@ serde.workspace = true theme.workspace = true ui.workspace = true workspace.workspace = true -workspace-hack.workspace = true [dev-dependencies] ctor.workspace = true diff --git a/crates/picker/src/highlighted_match_with_paths.rs b/crates/picker/src/highlighted_match_with_paths.rs index 6e91b997da2dab2ac61befd2f596e6f3a4207c85..74271047621b26be573dc2eebfffe9e9e0f1a138 100644 --- a/crates/picker/src/highlighted_match_with_paths.rs +++ b/crates/picker/src/highlighted_match_with_paths.rs @@ -2,6 +2,7 @@ use ui::{HighlightedLabel, prelude::*}; #[derive(Clone)] pub struct HighlightedMatchWithPaths { + pub prefix: Option, pub match_label: HighlightedMatch, pub paths: Vec, } @@ -67,7 +68,14 @@ impl HighlightedMatchWithPaths { impl RenderOnce for HighlightedMatchWithPaths { fn render(mut self, _window: &mut Window, _: &mut App) -> impl IntoElement { v_flex() - .child(self.match_label.clone()) + .child( + h_flex().gap_1().child(self.match_label.clone()).when_some( + self.prefix.as_ref(), + |this, prefix| { + this.child(Label::new(format!("({})", prefix)).color(Color::Muted)) + }, + ), + ) .when(!self.paths.is_empty(), |this| { self.render_paths_children(this) }) diff --git a/crates/picker/src/picker.rs b/crates/picker/src/picker.rs index 247fcbdd875ffc2e52d90d9b1309f874c508e588..90423bcace0ad405e0c88703efe09f39a8763778 100644 --- a/crates/picker/src/picker.rs +++ b/crates/picker/src/picker.rs @@ -352,6 +352,16 @@ impl Picker { self } + pub fn list_measure_all(mut self) -> Self { + match self.element_container { + ElementContainer::List(state) => { + self.element_container = ElementContainer::List(state.measure_all()); + } + _ => {} + } + self + } + pub fn focus(&self, window: &mut Window, cx: &mut App) { self.focus_handle(cx).focus(window); } diff --git a/crates/picker/src/popover_menu.rs b/crates/picker/src/popover_menu.rs index baf0918fd6c8e20211d04a150af9220cb2d66839..42eedb2492149aa56de527e38fcf4f2b0e4da608 100644 --- a/crates/picker/src/popover_menu.rs +++ b/crates/picker/src/popover_menu.rs @@ -1,9 +1,9 @@ use gpui::{ - AnyView, Corner, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription, + AnyView, Corner, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Pixels, Point, + Subscription, }; use ui::{ - App, ButtonCommon, FluentBuilder as _, IntoElement, PopoverMenu, PopoverMenuHandle, - PopoverTrigger, RenderOnce, Window, px, + FluentBuilder as _, IntoElement, PopoverMenu, PopoverMenuHandle, PopoverTrigger, prelude::*, }; use crate::{Picker, PickerDelegate}; @@ -19,6 +19,7 @@ where tooltip: TT, handle: Option>>, anchor: Corner, + offset: Option>, _subscriptions: Vec, } @@ -43,6 +44,10 @@ where trigger, tooltip, handle: None, + offset: Some(Point { + x: px(0.0), + y: px(-2.0), + }), anchor, } } @@ -51,6 +56,11 @@ where self.handle = Some(handle); self } + + pub fn offset(mut self, offset: Point) -> Self { + self.offset = Some(offset); + self + } } impl EventEmitter for PickerPopoverMenu @@ -86,9 +96,6 @@ where .trigger_with_tooltip(self.trigger, self.tooltip) .anchor(self.anchor) .when_some(self.handle, |menu, handle| menu.with_handle(handle)) - .offset(gpui::Point { - x: px(0.0), - y: px(-2.0), - }) + .when_some(self.offset, |menu, offset| menu.offset(offset)) } } diff --git a/crates/prettier/Cargo.toml b/crates/prettier/Cargo.toml index fb31f9ea1fe52fd7445fce708cdfe3db22dd06bb..9da1e4c8d67fe60e8f0ead9448b73440f6053172 100644 --- a/crates/prettier/Cargo.toml +++ b/crates/prettier/Cargo.toml @@ -29,7 +29,6 @@ paths.workspace = true serde.workspace = true serde_json.workspace = true util.workspace = true -workspace-hack.workspace = true [dev-dependencies] fs = { workspace = true, features = ["test-support"] } diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index 39dc0621732bfd42b3a24735ad803915fbf2885c..d9285a8c24ec5130dd8ce8abf5bbd77c830e0f3f 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -72,7 +72,6 @@ serde_json.workspace = true settings.workspace = true sha2.workspace = true shellexpand.workspace = true -shlex.workspace = true smallvec.workspace = true smol.workspace = true snippet.workspace = true @@ -90,7 +89,6 @@ which.workspace = true worktree.workspace = true zeroize.workspace = true zlog.workspace = true -workspace-hack.workspace = true [dev-dependencies] client = { workspace = true, features = ["test-support"] } diff --git a/crates/project/src/agent_server_store.rs b/crates/project/src/agent_server_store.rs index 73a65f98d5a1bae0fb24c1710ecece0175bfa34f..8a950d2820c123b302cac23fd309df40528a3837 100644 --- a/crates/project/src/agent_server_store.rs +++ b/crates/project/src/agent_server_store.rs @@ -1,6 +1,7 @@ use std::{ any::Any, borrow::Borrow, + collections::HashSet, path::{Path, PathBuf}, str::FromStr as _, sync::Arc, @@ -21,6 +22,7 @@ use rpc::{AnyProtoClient, TypedEnvelope, proto}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::SettingsStore; +use task::Shell; use util::{ResultExt as _, debug_panic}; use crate::ProjectEnvironment; @@ -125,13 +127,198 @@ enum AgentServerStoreState { pub struct AgentServerStore { state: AgentServerStoreState, external_agents: HashMap>, + agent_icons: HashMap, } pub struct AgentServersUpdated; impl EventEmitter for AgentServerStore {} +#[cfg(test)] +mod ext_agent_tests { + use super::*; + use std::fmt::Write as _; + + // Helper to build a store in Collab mode so we can mutate internal maps without + // needing to spin up a full project environment. + fn collab_store() -> AgentServerStore { + AgentServerStore { + state: AgentServerStoreState::Collab, + external_agents: HashMap::default(), + agent_icons: HashMap::default(), + } + } + + // A simple fake that implements ExternalAgentServer without needing async plumbing. + struct NoopExternalAgent; + + impl ExternalAgentServer for NoopExternalAgent { + fn get_command( + &mut self, + _root_dir: Option<&str>, + _extra_env: HashMap, + _status_tx: Option>, + _new_version_available_tx: Option>>, + _cx: &mut AsyncApp, + ) -> Task)>> { + Task::ready(Ok(( + AgentServerCommand { + path: PathBuf::from("noop"), + args: Vec::new(), + env: None, + }, + "".to_string(), + None, + ))) + } + + fn as_any_mut(&mut self) -> &mut dyn Any { + self + } + } + + #[test] + fn external_agent_server_name_display() { + let name = ExternalAgentServerName(SharedString::from("Ext: Tool")); + let mut s = String::new(); + write!(&mut s, "{name}").unwrap(); + assert_eq!(s, "Ext: Tool"); + } + + #[test] + fn sync_extension_agents_removes_previous_extension_entries() { + let mut store = collab_store(); + + // Seed with a couple of agents that will be replaced by extensions + store.external_agents.insert( + ExternalAgentServerName(SharedString::from("foo-agent")), + Box::new(NoopExternalAgent) as Box, + ); + store.external_agents.insert( + ExternalAgentServerName(SharedString::from("bar-agent")), + Box::new(NoopExternalAgent) as Box, + ); + store.external_agents.insert( + ExternalAgentServerName(SharedString::from("custom")), + Box::new(NoopExternalAgent) as Box, + ); + + // Simulate the removal phase: if we're syncing extensions that provide + // "foo-agent" and "bar-agent", those should be removed first + let extension_agent_names: HashSet = + ["foo-agent".to_string(), "bar-agent".to_string()] + .into_iter() + .collect(); + + let keys_to_remove: Vec<_> = store + .external_agents + .keys() + .filter(|name| extension_agent_names.contains(name.0.as_ref())) + .cloned() + .collect(); + + for key in keys_to_remove { + store.external_agents.remove(&key); + } + + // Only the custom entry should remain. + let remaining: Vec<_> = store + .external_agents + .keys() + .map(|k| k.0.to_string()) + .collect(); + assert_eq!(remaining, vec!["custom".to_string()]); + } +} + impl AgentServerStore { + /// Synchronizes extension-provided agent servers with the store. + pub fn sync_extension_agents<'a, I>( + &mut self, + manifests: I, + extensions_dir: PathBuf, + cx: &mut Context, + ) where + I: IntoIterator, + { + // Collect manifests first so we can iterate twice + let manifests: Vec<_> = manifests.into_iter().collect(); + + // Remove existing extension-provided agents by tracking which ones we're about to add + let extension_agent_names: HashSet<_> = manifests + .iter() + .flat_map(|(_, manifest)| manifest.agent_servers.keys().map(|k| k.to_string())) + .collect(); + + let keys_to_remove: Vec<_> = self + .external_agents + .keys() + .filter(|name| { + // Remove if it matches an extension agent name from any extension + extension_agent_names.contains(name.0.as_ref()) + }) + .cloned() + .collect(); + for key in &keys_to_remove { + self.external_agents.remove(key); + self.agent_icons.remove(key); + } + + // Insert agent servers from extension manifests + match &self.state { + AgentServerStoreState::Local { + project_environment, + fs, + http_client, + .. + } => { + for (ext_id, manifest) in manifests { + for (agent_name, agent_entry) in &manifest.agent_servers { + let display = SharedString::from(agent_entry.name.clone()); + + // Store absolute icon path if provided, resolving symlinks for dev extensions + if let Some(icon) = &agent_entry.icon { + let icon_path = extensions_dir.join(ext_id).join(icon); + // Canonicalize to resolve symlinks (dev extensions are symlinked) + let absolute_icon_path = icon_path + .canonicalize() + .unwrap_or(icon_path) + .to_string_lossy() + .to_string(); + self.agent_icons.insert( + ExternalAgentServerName(display.clone()), + SharedString::from(absolute_icon_path), + ); + } + + // Archive-based launcher (download from URL) + self.external_agents.insert( + ExternalAgentServerName(display), + Box::new(LocalExtensionArchiveAgent { + fs: fs.clone(), + http_client: http_client.clone(), + project_environment: project_environment.clone(), + extension_id: Arc::from(ext_id), + agent_id: agent_name.clone(), + targets: agent_entry.targets.clone(), + env: agent_entry.env.clone(), + }) as Box, + ); + } + } + } + _ => { + // Only local projects support local extension agents + } + } + + cx.emit(AgentServersUpdated); + } + + pub fn agent_icon(&self, name: &ExternalAgentServerName) -> Option { + self.agent_icons.get(name).cloned() + } + pub fn init_remote(session: &AnyProtoClient) { session.add_entity_message_handler(Self::handle_external_agents_updated); session.add_entity_message_handler(Self::handle_loading_status_updated); @@ -201,7 +388,7 @@ impl AgentServerStore { .gemini .as_ref() .and_then(|settings| settings.ignore_system_version) - .unwrap_or(true), + .unwrap_or(false), }), ); self.external_agents.insert( @@ -278,7 +465,9 @@ impl AgentServerStore { _subscriptions: [subscription], }, external_agents: Default::default(), + agent_icons: Default::default(), }; + if let Some(_events) = extension::ExtensionEvents::try_global(cx) {} this.agent_servers_settings_changed(cx); this } @@ -287,7 +476,7 @@ impl AgentServerStore { // Set up the builtin agents here so they're immediately available in // remote projects--we know that the HeadlessProject on the other end // will have them. - let external_agents = [ + let external_agents: [(ExternalAgentServerName, Box); 3] = [ ( CLAUDE_CODE_NAME.into(), Box::new(RemoteExternalAgentServer { @@ -318,16 +507,15 @@ impl AgentServerStore { new_version_available_tx: None, }) as Box, ), - ] - .into_iter() - .collect(); + ]; Self { state: AgentServerStoreState::Remote { project_id, upstream_client, }, - external_agents, + external_agents: external_agents.into_iter().collect(), + agent_icons: HashMap::default(), } } @@ -335,6 +523,7 @@ impl AgentServerStore { Self { state: AgentServerStoreState::Collab, external_agents: Default::default(), + agent_icons: Default::default(), } } @@ -391,7 +580,7 @@ impl AgentServerStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let (command, root_dir, login) = this + let (command, root_dir, login_command) = this .update(&mut cx, |this, cx| { let AgentServerStoreState::Local { downstream_client, .. @@ -465,7 +654,7 @@ impl AgentServerStore { .map(|env| env.into_iter().collect()) .unwrap_or_default(), root_dir: root_dir, - login: login.map(|login| login.to_proto()), + login: login_command.map(|cmd| cmd.to_proto()), }) } @@ -647,9 +836,11 @@ fn get_or_npm_install_builtin_agent( let dir = dir.clone(); let fs = fs.clone(); async move { - let latest_version = - node_runtime.npm_package_latest_version(&package_name).await; - if let Ok(latest_version) = latest_version + let latest_version = node_runtime + .npm_package_latest_version(&package_name) + .await + .ok(); + if let Some(latest_version) = latest_version && &latest_version != &file_name.to_string_lossy() { let download_result = download_latest_version( @@ -808,9 +999,7 @@ impl ExternalAgentServer for RemoteExternalAgentServer { env: Some(command.env), }, root_dir, - response - .login - .map(|login| task::SpawnInTerminal::from_proto(login)), + None, )) }) } @@ -850,7 +1039,11 @@ impl ExternalAgentServer for LocalGemini { cx.spawn(async move |cx| { let mut env = project_environment .update(cx, |project_environment, cx| { - project_environment.get_directory_environment(root_dir.clone(), cx) + project_environment.get_local_directory_environment( + &Shell::System, + root_dir.clone(), + cx, + ) })? .await .unwrap_or_default(); @@ -873,7 +1066,12 @@ impl ExternalAgentServer for LocalGemini { GEMINI_NAME.into(), "@google/gemini-cli".into(), "node_modules/@google/gemini-cli/dist/index.js".into(), - Some("0.2.1".parse().unwrap()), + if cfg!(windows) { + // v0.8.x on Windows has a bug that causes the initialize request to hang forever + Some("0.9.0".parse().unwrap()) + } else { + Some("0.2.1".parse().unwrap()) + }, status_tx, new_version_available_tx, fs, @@ -937,13 +1135,17 @@ impl ExternalAgentServer for LocalClaudeCode { cx.spawn(async move |cx| { let mut env = project_environment .update(cx, |project_environment, cx| { - project_environment.get_directory_environment(root_dir.clone(), cx) + project_environment.get_local_directory_environment( + &Shell::System, + root_dir.clone(), + cx, + ) })? .await .unwrap_or_default(); env.insert("ANTHROPIC_API_KEY".into(), "".into()); - let (mut command, login) = if let Some(mut custom_command) = custom_command { + let (mut command, login_command) = if let Some(mut custom_command) = custom_command { env.extend(custom_command.env.unwrap_or_default()); custom_command.env = Some(env); (custom_command, None) @@ -984,7 +1186,11 @@ impl ExternalAgentServer for LocalClaudeCode { }; command.env.get_or_insert_default().extend(extra_env); - Ok((command, root_dir.to_string_lossy().into_owned(), login)) + Ok(( + command, + root_dir.to_string_lossy().into_owned(), + login_command, + )) }) } @@ -1023,7 +1229,11 @@ impl ExternalAgentServer for LocalCodex { cx.spawn(async move |cx| { let mut env = project_environment .update(cx, |project_environment, cx| { - project_environment.get_directory_environment(root_dir.clone(), cx) + project_environment.get_local_directory_environment( + &Shell::System, + root_dir.clone(), + cx, + ) })? .await .unwrap_or_default(); @@ -1060,10 +1270,15 @@ impl ExternalAgentServer for LocalCodex { .into_iter() .find(|asset| asset.name == asset_name) .with_context(|| format!("no asset found matching `{asset_name:?}`"))?; + // Strip "sha256:" prefix from digest if present (GitHub API format) + let digest = asset + .digest + .as_deref() + .and_then(|d| d.strip_prefix("sha256:").or(Some(d))); ::http_client::github_download::download_server_binary( &*http, &asset.browser_download_url, - asset.digest.as_deref(), + digest, &version_dir, if cfg!(target_os = "windows") && cfg!(target_arch = "x86_64") { AssetKind::Zip @@ -1107,11 +1322,7 @@ impl ExternalAgentServer for LocalCodex { pub const CODEX_ACP_REPO: &str = "zed-industries/codex-acp"; -/// Assemble Codex release URL for the current OS/arch and the given version number. -/// Returns None if the current target is unsupported. -/// Example output: -/// https://github.com/zed-industries/codex-acp/releases/download/v{version}/codex-acp-{version}-{arch}-{platform}.{ext} -fn asset_name(version: &str) -> Option { +fn get_platform_info() -> Option<(&'static str, &'static str, &'static str)> { let arch = if cfg!(target_arch = "x86_64") { "x86_64" } else if cfg!(target_arch = "aarch64") { @@ -1137,14 +1348,220 @@ fn asset_name(version: &str) -> Option { "tar.gz" }; + Some((arch, platform, ext)) +} + +fn asset_name(version: &str) -> Option { + let (arch, platform, ext) = get_platform_info()?; Some(format!("codex-acp-{version}-{arch}-{platform}.{ext}")) } +struct LocalExtensionArchiveAgent { + fs: Arc, + http_client: Arc, + project_environment: Entity, + extension_id: Arc, + agent_id: Arc, + targets: HashMap, + env: HashMap, +} + struct LocalCustomAgent { project_environment: Entity, command: AgentServerCommand, } +impl ExternalAgentServer for LocalExtensionArchiveAgent { + fn get_command( + &mut self, + root_dir: Option<&str>, + extra_env: HashMap, + _status_tx: Option>, + _new_version_available_tx: Option>>, + cx: &mut AsyncApp, + ) -> Task)>> { + let fs = self.fs.clone(); + let http_client = self.http_client.clone(); + let project_environment = self.project_environment.downgrade(); + let extension_id = self.extension_id.clone(); + let agent_id = self.agent_id.clone(); + let targets = self.targets.clone(); + let base_env = self.env.clone(); + + let root_dir: Arc = root_dir + .map(|root_dir| Path::new(root_dir)) + .unwrap_or(paths::home_dir()) + .into(); + + cx.spawn(async move |cx| { + // Get project environment + let mut env = project_environment + .update(cx, |project_environment, cx| { + project_environment.get_local_directory_environment( + &Shell::System, + root_dir.clone(), + cx, + ) + })? + .await + .unwrap_or_default(); + + // Merge manifest env and extra env + env.extend(base_env); + env.extend(extra_env); + + let cache_key = format!("{}/{}", extension_id, agent_id); + let dir = paths::data_dir().join("external_agents").join(&cache_key); + fs.create_dir(&dir).await?; + + // Determine platform key + let os = if cfg!(target_os = "macos") { + "darwin" + } else if cfg!(target_os = "linux") { + "linux" + } else if cfg!(target_os = "windows") { + "windows" + } else { + anyhow::bail!("unsupported OS"); + }; + + let arch = if cfg!(target_arch = "aarch64") { + "aarch64" + } else if cfg!(target_arch = "x86_64") { + "x86_64" + } else { + anyhow::bail!("unsupported architecture"); + }; + + let platform_key = format!("{}-{}", os, arch); + let target_config = targets.get(&platform_key).with_context(|| { + format!( + "no target specified for platform '{}'. Available platforms: {}", + platform_key, + targets + .keys() + .map(|k| k.as_str()) + .collect::>() + .join(", ") + ) + })?; + + let archive_url = &target_config.archive; + + // Use URL as version identifier for caching + // Hash the URL to get a stable directory name + use std::collections::hash_map::DefaultHasher; + use std::hash::{Hash, Hasher}; + let mut hasher = DefaultHasher::new(); + archive_url.hash(&mut hasher); + let url_hash = hasher.finish(); + let version_dir = dir.join(format!("v_{:x}", url_hash)); + + if !fs.is_dir(&version_dir).await { + // Determine SHA256 for verification + let sha256 = if let Some(provided_sha) = &target_config.sha256 { + // Use provided SHA256 + Some(provided_sha.clone()) + } else if archive_url.starts_with("https://github.com/") { + // Try to fetch SHA256 from GitHub API + // Parse URL to extract repo and tag/file info + // Format: https://github.com/owner/repo/releases/download/tag/file.zip + if let Some(caps) = archive_url.strip_prefix("https://github.com/") { + let parts: Vec<&str> = caps.split('/').collect(); + if parts.len() >= 6 && parts[2] == "releases" && parts[3] == "download" { + let repo = format!("{}/{}", parts[0], parts[1]); + let tag = parts[4]; + let filename = parts[5..].join("/"); + + // Try to get release info from GitHub + if let Ok(release) = ::http_client::github::get_release_by_tag_name( + &repo, + tag, + http_client.clone(), + ) + .await + { + // Find matching asset + if let Some(asset) = + release.assets.iter().find(|a| a.name == filename) + { + // Strip "sha256:" prefix if present + asset.digest.as_ref().and_then(|d| { + d.strip_prefix("sha256:") + .map(|s| s.to_string()) + .or_else(|| Some(d.clone())) + }) + } else { + None + } + } else { + None + } + } else { + None + } + } else { + None + } + } else { + None + }; + + // Determine archive type from URL + let asset_kind = if archive_url.ends_with(".zip") { + AssetKind::Zip + } else if archive_url.ends_with(".tar.gz") || archive_url.ends_with(".tgz") { + AssetKind::TarGz + } else { + anyhow::bail!("unsupported archive type in URL: {}", archive_url); + }; + + // Download and extract + ::http_client::github_download::download_server_binary( + &*http_client, + archive_url, + sha256.as_deref(), + &version_dir, + asset_kind, + ) + .await?; + } + + // Validate and resolve cmd path + let cmd = &target_config.cmd; + if cmd.contains("..") { + anyhow::bail!("command path cannot contain '..': {}", cmd); + } + + let cmd_path = if cmd.starts_with("./") || cmd.starts_with(".\\") { + // Relative to extraction directory + version_dir.join(&cmd[2..]) + } else { + // On PATH + anyhow::bail!("command must be relative (start with './'): {}", cmd); + }; + + anyhow::ensure!( + fs.is_file(&cmd_path).await, + "Missing command {} after extraction", + cmd_path.to_string_lossy() + ); + + let command = AgentServerCommand { + path: cmd_path, + args: target_config.args.clone(), + env: Some(env), + }; + + Ok((command, root_dir.to_string_lossy().into_owned(), None)) + }) + } + + fn as_any_mut(&mut self) -> &mut dyn Any { + self + } +} + impl ExternalAgentServer for LocalCustomAgent { fn get_command( &mut self, @@ -1163,7 +1580,11 @@ impl ExternalAgentServer for LocalCustomAgent { cx.spawn(async move |cx| { let mut env = project_environment .update(cx, |project_environment, cx| { - project_environment.get_directory_environment(root_dir.clone(), cx) + project_environment.get_local_directory_environment( + &Shell::System, + root_dir.clone(), + cx, + ) })? .await .unwrap_or_default(); @@ -1179,42 +1600,6 @@ impl ExternalAgentServer for LocalCustomAgent { } } -#[cfg(test)] -mod tests { - #[test] - fn assembles_codex_release_url_for_current_target() { - let version_number = "0.1.0"; - - // This test fails the build if we are building a version of Zed - // which does not have a known build of codex-acp, to prevent us - // from accidentally doing a release on a new target without - // realizing that codex-acp support will not work on that target! - // - // Additionally, it verifies that our logic for assembling URLs - // correctly resolves to a known-good URL on each of our targets. - let allowed = [ - "codex-acp-0.1.0-aarch64-apple-darwin.tar.gz", - "codex-acp-0.1.0-aarch64-pc-windows-msvc.tar.gz", - "codex-acp-0.1.0-aarch64-unknown-linux-gnu.tar.gz", - "codex-acp-0.1.0-x86_64-apple-darwin.tar.gz", - "codex-acp-0.1.0-x86_64-pc-windows-msvc.zip", - "codex-acp-0.1.0-x86_64-unknown-linux-gnu.tar.gz", - ]; - - if let Some(url) = super::asset_name(version_number) { - assert!( - allowed.contains(&url.as_str()), - "Assembled asset name {} not in allowed list", - url - ); - } else { - panic!( - "This target does not have a known codex-acp release! We should fix this by building a release of codex-acp for this target, as otherwise codex-acp will not be usable with this Zed build." - ); - } - } -} - pub const GEMINI_NAME: &'static str = "gemini"; pub const CLAUDE_CODE_NAME: &'static str = "claude"; pub const CODEX_NAME: &'static str = "codex"; @@ -1307,3 +1692,200 @@ impl settings::Settings for AllAgentServersSettings { } } } + +#[cfg(test)] +mod extension_agent_tests { + use super::*; + use gpui::TestAppContext; + use std::sync::Arc; + + #[test] + fn extension_agent_constructs_proper_display_names() { + // Verify the display name format for extension-provided agents + let name1 = ExternalAgentServerName(SharedString::from("Extension: Agent")); + assert!(name1.0.contains(": ")); + + let name2 = ExternalAgentServerName(SharedString::from("MyExt: MyAgent")); + assert_eq!(name2.0, "MyExt: MyAgent"); + + // Non-extension agents shouldn't have the separator + let custom = ExternalAgentServerName(SharedString::from("custom")); + assert!(!custom.0.contains(": ")); + } + + struct NoopExternalAgent; + + impl ExternalAgentServer for NoopExternalAgent { + fn get_command( + &mut self, + _root_dir: Option<&str>, + _extra_env: HashMap, + _status_tx: Option>, + _new_version_available_tx: Option>>, + _cx: &mut AsyncApp, + ) -> Task)>> { + Task::ready(Ok(( + AgentServerCommand { + path: PathBuf::from("noop"), + args: Vec::new(), + env: None, + }, + "".to_string(), + None, + ))) + } + + fn as_any_mut(&mut self) -> &mut dyn Any { + self + } + } + + #[test] + fn sync_removes_only_extension_provided_agents() { + let mut store = AgentServerStore { + state: AgentServerStoreState::Collab, + external_agents: HashMap::default(), + agent_icons: HashMap::default(), + }; + + // Seed with extension agents (contain ": ") and custom agents (don't contain ": ") + store.external_agents.insert( + ExternalAgentServerName(SharedString::from("Ext1: Agent1")), + Box::new(NoopExternalAgent) as Box, + ); + store.external_agents.insert( + ExternalAgentServerName(SharedString::from("Ext2: Agent2")), + Box::new(NoopExternalAgent) as Box, + ); + store.external_agents.insert( + ExternalAgentServerName(SharedString::from("custom-agent")), + Box::new(NoopExternalAgent) as Box, + ); + + // Simulate removal phase + let keys_to_remove: Vec<_> = store + .external_agents + .keys() + .filter(|name| name.0.contains(": ")) + .cloned() + .collect(); + + for key in keys_to_remove { + store.external_agents.remove(&key); + } + + // Only custom-agent should remain + assert_eq!(store.external_agents.len(), 1); + assert!( + store + .external_agents + .contains_key(&ExternalAgentServerName(SharedString::from("custom-agent"))) + ); + } + + #[test] + fn archive_launcher_constructs_with_all_fields() { + use extension::AgentServerManifestEntry; + + let mut env = HashMap::default(); + env.insert("GITHUB_TOKEN".into(), "secret".into()); + + let mut targets = HashMap::default(); + targets.insert( + "darwin-aarch64".to_string(), + extension::TargetConfig { + archive: + "https://github.com/owner/repo/releases/download/v1.0.0/agent-darwin-arm64.zip" + .into(), + cmd: "./agent".into(), + args: vec![], + sha256: None, + }, + ); + + let _entry = AgentServerManifestEntry { + name: "GitHub Agent".into(), + targets, + env, + icon: None, + }; + + // Verify display name construction + let expected_name = ExternalAgentServerName(SharedString::from("GitHub Agent")); + assert_eq!(expected_name.0, "GitHub Agent"); + } + + #[gpui::test] + async fn archive_agent_uses_extension_and_agent_id_for_cache_key(cx: &mut TestAppContext) { + let fs = fs::FakeFs::new(cx.background_executor.clone()); + let http_client = http_client::FakeHttpClient::with_404_response(); + let project_environment = cx.new(|cx| crate::ProjectEnvironment::new(None, cx)); + + let agent = LocalExtensionArchiveAgent { + fs, + http_client, + project_environment, + extension_id: Arc::from("my-extension"), + agent_id: Arc::from("my-agent"), + targets: { + let mut map = HashMap::default(); + map.insert( + "darwin-aarch64".to_string(), + extension::TargetConfig { + archive: "https://example.com/my-agent-darwin-arm64.zip".into(), + cmd: "./my-agent".into(), + args: vec!["--serve".into()], + sha256: None, + }, + ); + map + }, + env: { + let mut map = HashMap::default(); + map.insert("PORT".into(), "8080".into()); + map + }, + }; + + // Verify agent is properly constructed + assert_eq!(agent.extension_id.as_ref(), "my-extension"); + assert_eq!(agent.agent_id.as_ref(), "my-agent"); + assert_eq!(agent.env.get("PORT"), Some(&"8080".to_string())); + assert!(agent.targets.contains_key("darwin-aarch64")); + } + + #[test] + fn sync_extension_agents_registers_archive_launcher() { + use extension::AgentServerManifestEntry; + + let expected_name = ExternalAgentServerName(SharedString::from("Release Agent")); + assert_eq!(expected_name.0, "Release Agent"); + + // Verify the manifest entry structure for archive-based installation + let mut env = HashMap::default(); + env.insert("API_KEY".into(), "secret".into()); + + let mut targets = HashMap::default(); + targets.insert( + "linux-x86_64".to_string(), + extension::TargetConfig { + archive: "https://github.com/org/project/releases/download/v2.1.0/release-agent-linux-x64.tar.gz".into(), + cmd: "./release-agent".into(), + args: vec!["serve".into()], + sha256: None, + }, + ); + + let manifest_entry = AgentServerManifestEntry { + name: "Release Agent".into(), + targets: targets.clone(), + env, + icon: None, + }; + + // Verify target config is present + assert!(manifest_entry.targets.contains_key("linux-x86_64")); + let target = manifest_entry.targets.get("linux-x86_64").unwrap(); + assert_eq!(target.cmd, "./release-agent"); + } +} diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index 8a4d4f7918c12abd94cf7bf8fc97c939db7ce033..b9249d36e2ca8da6b17f342a8db9f3dcca113515 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -25,8 +25,8 @@ use rpc::{ }; use smol::channel::Receiver; use std::{io, pin::pin, sync::Arc, time::Instant}; -use text::BufferId; -use util::{ResultExt as _, TryFutureExt, debug_panic, maybe, rel_path::RelPath}; +use text::{BufferId, ReplicaId}; +use util::{ResultExt as _, TryFutureExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath}; use worktree::{File, PathChange, ProjectEntryId, Worktree, WorktreeId}; /// A set of open buffers. @@ -158,7 +158,7 @@ impl RemoteBufferStore { pub fn handle_create_buffer_for_peer( &mut self, envelope: TypedEnvelope, - replica_id: u16, + replica_id: ReplicaId, capability: Capability, cx: &mut Context, ) -> Result>> { @@ -623,10 +623,15 @@ impl LocalBufferStore { let load_file = worktree.load_file(path.as_ref(), cx); let reservation = cx.reserve_entity(); let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64()); + let path = path.clone(); cx.spawn(async move |_, cx| { - let loaded = load_file.await?; + let loaded = load_file.await.with_context(|| { + format!("Could not open path: {}", path.display(PathStyle::local())) + })?; let text_buffer = cx - .background_spawn(async move { text::Buffer::new(0, buffer_id, loaded.text) }) + .background_spawn(async move { + text::Buffer::new(ReplicaId::LOCAL, buffer_id, loaded.text) + }) .await; cx.insert_entity(reservation, |_| { Buffer::build(text_buffer, Some(loaded.file), Capability::ReadWrite) @@ -639,7 +644,7 @@ impl LocalBufferStore { Ok(buffer) => Ok(buffer), Err(error) if is_not_found_error(&error) => cx.new(|cx| { let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64()); - let text_buffer = text::Buffer::new(0, buffer_id, ""); + let text_buffer = text::Buffer::new(ReplicaId::LOCAL, buffer_id, ""); Buffer::build( text_buffer, Some(Arc::new(File { @@ -904,7 +909,14 @@ impl BufferStore { }; cx.spawn(async move |this, cx| { task.await?; - this.update(cx, |_, cx| { + this.update(cx, |this, cx| { + old_file.clone().and_then(|file| { + this.path_to_buffer_id.remove(&ProjectPath { + worktree_id: file.worktree_id(cx), + path: file.path().clone(), + }) + }); + cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file }); }) }) @@ -917,7 +929,7 @@ impl BufferStore { path: file.path.clone(), worktree_id: file.worktree_id(cx), }); - let is_remote = buffer.replica_id() != 0; + let is_remote = buffer.replica_id().is_remote(); let open_buffer = OpenBuffer::Complete { buffer: buffer_entity.downgrade(), }; @@ -1317,7 +1329,7 @@ impl BufferStore { pub fn handle_create_buffer_for_peer( &mut self, envelope: TypedEnvelope, - replica_id: u16, + replica_id: ReplicaId, capability: Capability, cx: &mut Context, ) -> Result<()> { diff --git a/crates/project/src/debugger/breakpoint_store.rs b/crates/project/src/debugger/breakpoint_store.rs index b7f5360d189489415032be6e5271b3880a421e57..42663ab9852a5dc2e9850d20dd20940c6723d03c 100644 --- a/crates/project/src/debugger/breakpoint_store.rs +++ b/crates/project/src/debugger/breakpoint_store.rs @@ -164,7 +164,6 @@ pub struct BreakpointStore { impl BreakpointStore { pub fn init(client: &AnyProtoClient) { - log::error!("breakpoint store init"); client.add_entity_request_handler(Self::handle_toggle_breakpoint); client.add_entity_message_handler(Self::handle_breakpoints_for_file); } diff --git a/crates/project/src/debugger/dap_store.rs b/crates/project/src/debugger/dap_store.rs index c6fc1ddf73ec7e619bf9c13a60db6fe024fa20f1..7d80c563e9678ec097dab030bdca047a967e2cf0 100644 --- a/crates/project/src/debugger/dap_store.rs +++ b/crates/project/src/debugger/dap_store.rs @@ -49,7 +49,7 @@ use std::{ path::{Path, PathBuf}, sync::{Arc, Once}, }; -use task::{DebugScenario, SpawnInTerminal, TaskContext, TaskTemplate}; +use task::{DebugScenario, Shell, SpawnInTerminal, TaskContext, TaskTemplate}; use util::{ResultExt as _, rel_path::RelPath}; use worktree::Worktree; @@ -264,13 +264,21 @@ impl DapStore { DapBinary::Custom(binary) => Some(PathBuf::from(binary)), }); let user_args = dap_settings.map(|s| s.args.clone()); + let user_env = dap_settings.map(|s| s.env.clone()); let delegate = self.delegate(worktree, console, cx); let cwd: Arc = worktree.read(cx).abs_path().as_ref().into(); cx.spawn(async move |this, cx| { let mut binary = adapter - .get_binary(&delegate, &definition, user_installed_path, user_args, cx) + .get_binary( + &delegate, + &definition, + user_installed_path, + user_args, + user_env, + cx, + ) .await?; let env = this @@ -279,7 +287,11 @@ impl DapStore { .unwrap() .environment .update(cx, |environment, cx| { - environment.get_directory_environment(cwd, cx) + environment.get_local_directory_environment( + &Shell::System, + cwd, + cx, + ) }) })? .await; diff --git a/crates/project/src/debugger/locators/cargo.rs b/crates/project/src/debugger/locators/cargo.rs index a9bb206301562130976864ab949938968271f1e0..662b9ca7efcd53b8792127e531a9baba24967ea1 100644 --- a/crates/project/src/debugger/locators/cargo.rs +++ b/crates/project/src/debugger/locators/cargo.rs @@ -117,7 +117,7 @@ impl DapLocator for CargoLocator { .cwd .clone() .context("Couldn't get cwd from debug config which is needed for locators")?; - let builder = ShellBuilder::new(&build_config.shell).non_interactive(); + let builder = ShellBuilder::new(&build_config.shell, cfg!(windows)).non_interactive(); let (program, args) = builder.build( Some("cargo".into()), &build_config diff --git a/crates/project/src/debugger/session.rs b/crates/project/src/debugger/session.rs index 19c088e6e8767bd56bf19759fbddd9947c4ef0ba..b5fbfd80d6152faf9d04715138859dc565e8cba8 100644 --- a/crates/project/src/debugger/session.rs +++ b/crates/project/src/debugger/session.rs @@ -14,12 +14,13 @@ use super::dap_command::{ TerminateCommand, TerminateThreadsCommand, ThreadsCommand, VariablesCommand, }; use super::dap_store::DapStore; -use anyhow::{Context as _, Result, anyhow}; +use anyhow::{Context as _, Result, anyhow, bail}; use base64::Engine; use collections::{HashMap, HashSet, IndexMap}; use dap::adapters::{DebugAdapterBinary, DebugAdapterName}; use dap::messages::Response; use dap::requests::{Request, RunInTerminal, StartDebugging}; +use dap::transport::TcpTransport; use dap::{ Capabilities, ContinueArguments, EvaluateArgumentsContext, Module, Source, StackFrameId, SteppingGranularity, StoppedEvent, VariableReference, @@ -47,12 +48,14 @@ use remote::RemoteClient; use rpc::ErrorExt; use serde::{Deserialize, Serialize}; use serde_json::Value; -use smol::net::TcpListener; +use smol::net::{TcpListener, TcpStream}; use std::any::TypeId; use std::collections::BTreeMap; +use std::net::Ipv4Addr; use std::ops::RangeInclusive; use std::path::PathBuf; use std::process::Stdio; +use std::time::Duration; use std::u64; use std::{ any::Any, @@ -63,6 +66,7 @@ use std::{ }; use task::TaskContext; use text::{PointUtf16, ToPointUtf16}; +use url::Url; use util::command::new_smol_command; use util::{ResultExt, debug_panic, maybe}; use worktree::Worktree; @@ -2768,31 +2772,42 @@ impl Session { let mut console_output = self.console_output(cx); let task = cx.spawn(async move |this, cx| { - let (dap_port, _child) = - if remote_client.read_with(cx, |client, _| client.shares_network_interface())? { - (request.server_port, None) - } else { - let port = { - let listener = TcpListener::bind("127.0.0.1:0") - .await - .context("getting port for DAP")?; - listener.local_addr()?.port() - }; - let child = remote_client.update(cx, |client, _| { - let command = client.build_forward_port_command( - port, - "localhost".into(), - request.server_port, - )?; - let child = new_smol_command(command.program) - .args(command.args) - .envs(command.env) - .spawn() - .context("spawning port forwarding process")?; - anyhow::Ok(child) - })??; - (port, Some(child)) - }; + let forward_ports_process = if remote_client + .read_with(cx, |client, _| client.shares_network_interface())? + { + request.other.insert( + "proxyUri".into(), + format!("127.0.0.1:{}", request.server_port).into(), + ); + None + } else { + let port = TcpTransport::unused_port(Ipv4Addr::LOCALHOST) + .await + .context("getting port for DAP")?; + request + .other + .insert("proxyUri".into(), format!("127.0.0.1:{port}").into()); + let mut port_forwards = vec![(port, "localhost".to_owned(), request.server_port)]; + + if let Some(value) = request.params.get("url") + && let Some(url) = value.as_str() + && let Some(url) = Url::parse(url).ok() + && let Some(frontend_port) = url.port() + { + port_forwards.push((frontend_port, "localhost".to_owned(), frontend_port)); + } + + let child = remote_client.update(cx, |client, _| { + let command = client.build_forward_ports_command(port_forwards)?; + let child = new_smol_command(command.program) + .args(command.args) + .envs(command.env) + .spawn() + .context("spawning port forwarding process")?; + anyhow::Ok(child) + })??; + Some(child) + }; let mut companion_process = None; let companion_port = @@ -2814,14 +2829,17 @@ impl Session { } } }; - this.update(cx, |this, cx| { - this.companion_port = Some(companion_port); - let Some(mut child) = companion_process else { - return; - }; - if let Some(stderr) = child.stderr.take() { + + let mut background_tasks = Vec::new(); + if let Some(mut forward_ports_process) = forward_ports_process { + background_tasks.push(cx.spawn(async move |_| { + forward_ports_process.status().await.log_err(); + })); + }; + if let Some(mut companion_process) = companion_process { + if let Some(stderr) = companion_process.stderr.take() { let mut console_output = console_output.clone(); - this.background_tasks.push(cx.spawn(async move |_, _| { + background_tasks.push(cx.spawn(async move |_| { let mut stderr = BufReader::new(stderr); let mut line = String::new(); while let Ok(n) = stderr.read_line(&mut line).await @@ -2835,9 +2853,9 @@ impl Session { } })); } - this.background_tasks.push(cx.spawn({ + background_tasks.push(cx.spawn({ let mut console_output = console_output.clone(); - async move |_, _| match child.status().await { + async move |_| match companion_process.status().await { Ok(status) => { if status.success() { console_output @@ -2860,17 +2878,33 @@ impl Session { .ok(); } } - })) - })?; + })); + } - request - .other - .insert("proxyUri".into(), format!("127.0.0.1:{dap_port}").into()); // TODO pass wslInfo as needed + let companion_address = format!("127.0.0.1:{companion_port}"); + let mut companion_started = false; + for _ in 0..10 { + if TcpStream::connect(&companion_address).await.is_ok() { + companion_started = true; + break; + } + cx.background_executor() + .timer(Duration::from_millis(100)) + .await; + } + if !companion_started { + console_output + .send("Browser companion failed to start".into()) + .await + .ok(); + bail!("Browser companion failed to start"); + } + let response = http_client .post_json( - &format!("http://127.0.0.1:{companion_port}/launch-and-attach"), + &format!("http://{companion_address}/launch-and-attach"), serde_json::to_string(&request) .context("serializing request")? .into(), @@ -2895,6 +2929,11 @@ impl Session { } } + this.update(cx, |this, _| { + this.background_tasks.extend(background_tasks); + this.companion_port = Some(companion_port); + })?; + anyhow::Ok(()) }); self.background_tasks.push(cx.spawn(async move |_, _| { @@ -2926,15 +2965,16 @@ impl Session { } } -#[derive(Serialize, Deserialize)] +#[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "camelCase")] struct LaunchBrowserInCompanionParams { server_port: u16, + params: HashMap, #[serde(flatten)] other: HashMap, } -#[derive(Serialize, Deserialize)] +#[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "camelCase")] struct KillCompanionBrowserParams { launch_id: u64, diff --git a/crates/project/src/direnv.rs b/crates/project/src/direnv.rs deleted file mode 100644 index 75c381dda96eb4f014310f9233c7557177f6eec9..0000000000000000000000000000000000000000 --- a/crates/project/src/direnv.rs +++ /dev/null @@ -1,82 +0,0 @@ -use crate::environment::EnvironmentErrorMessage; -use std::process::ExitStatus; - -use {collections::HashMap, std::path::Path, util::ResultExt}; - -#[derive(Clone)] -pub enum DirenvError { - NotFound, - FailedRun, - NonZeroExit(ExitStatus, Vec), - InvalidJson, -} - -impl From for Option { - fn from(value: DirenvError) -> Self { - match value { - DirenvError::NotFound => None, - DirenvError::FailedRun | DirenvError::NonZeroExit(_, _) => { - Some(EnvironmentErrorMessage(String::from( - "Failed to run direnv. See logs for more info", - ))) - } - DirenvError::InvalidJson => Some(EnvironmentErrorMessage(String::from( - "Direnv returned invalid json. See logs for more info", - ))), - } - } -} - -pub async fn load_direnv_environment( - env: &HashMap, - dir: &Path, -) -> Result>, DirenvError> { - let Ok(direnv_path) = which::which("direnv") else { - return Err(DirenvError::NotFound); - }; - - let args = &["export", "json"]; - let Some(direnv_output) = smol::process::Command::new(&direnv_path) - .args(args) - .envs(env) - .env("TERM", "dumb") - .current_dir(dir) - .output() - .await - .log_err() - else { - return Err(DirenvError::FailedRun); - }; - - if !direnv_output.status.success() { - log::error!( - "Loading direnv environment failed ({}), stderr: {}", - direnv_output.status, - String::from_utf8_lossy(&direnv_output.stderr) - ); - return Err(DirenvError::NonZeroExit( - direnv_output.status, - direnv_output.stderr, - )); - } - - let output = String::from_utf8_lossy(&direnv_output.stdout); - if output.is_empty() { - // direnv outputs nothing when it has no changes to apply to environment variables - return Ok(HashMap::default()); - } - - match serde_json::from_str(&output) { - Ok(env) => Ok(env), - Err(err) => { - log::error!( - "json parse error {}, while parsing output of `{} {}`:\n{}", - err, - direnv_path.display(), - args.join(" "), - output - ); - Err(DirenvError::InvalidJson) - } - } -} diff --git a/crates/project/src/environment.rs b/crates/project/src/environment.rs index fc86702901e1e4ad90acbe0504eaf2913d3c8326..0f713b7deb3aca07ea7f867fc768ab2af9716c15 100644 --- a/crates/project/src/environment.rs +++ b/crates/project/src/environment.rs @@ -1,7 +1,10 @@ -use futures::{FutureExt, future::Shared}; +use anyhow::{Context as _, bail}; +use futures::{FutureExt, StreamExt as _, channel::mpsc, future::Shared}; use language::Buffer; -use std::{path::Path, sync::Arc}; -use task::Shell; +use remote::RemoteClient; +use rpc::proto::{self, REMOTE_SERVER_PROJECT_ID}; +use std::{collections::VecDeque, path::Path, sync::Arc}; +use task::{Shell, shell_to_proto}; use util::ResultExt; use worktree::Worktree; @@ -16,10 +19,11 @@ use crate::{ pub struct ProjectEnvironment { cli_environment: Option>, - environments: HashMap, Shared>>>>, - shell_based_environments: - HashMap<(Shell, Arc), Shared>>>>, - environment_error_messages: HashMap, EnvironmentErrorMessage>, + local_environments: HashMap<(Shell, Arc), Shared>>>>, + remote_environments: HashMap<(Shell, Arc), Shared>>>>, + environment_error_messages: VecDeque, + environment_error_messages_tx: mpsc::UnboundedSender, + _tasks: Vec>, } pub enum ProjectEnvironmentEvent { @@ -29,12 +33,24 @@ pub enum ProjectEnvironmentEvent { impl EventEmitter for ProjectEnvironment {} impl ProjectEnvironment { - pub fn new(cli_environment: Option>) -> Self { + pub fn new(cli_environment: Option>, cx: &mut Context) -> Self { + let (tx, mut rx) = mpsc::unbounded(); + let task = cx.spawn(async move |this, cx| { + while let Some(message) = rx.next().await { + this.update(cx, |this, cx| { + this.environment_error_messages.push_back(message); + cx.emit(ProjectEnvironmentEvent::ErrorsUpdated); + }) + .ok(); + } + }); Self { cli_environment, - environments: Default::default(), - shell_based_environments: Default::default(), + local_environments: Default::default(), + remote_environments: Default::default(), environment_error_messages: Default::default(), + environment_error_messages_tx: tx, + _tasks: vec![task], } } @@ -48,19 +64,6 @@ impl ProjectEnvironment { } } - /// Returns an iterator over all pairs `(abs_path, error_message)` of - /// environment errors associated with this project environment. - pub(crate) fn environment_errors( - &self, - ) -> impl Iterator, &EnvironmentErrorMessage)> { - self.environment_error_messages.iter() - } - - pub(crate) fn remove_environment_error(&mut self, abs_path: &Path, cx: &mut Context) { - self.environment_error_messages.remove(abs_path); - cx.emit(ProjectEnvironmentEvent::ErrorsUpdated); - } - pub(crate) fn get_buffer_environment( &mut self, buffer: &Entity, @@ -115,15 +118,16 @@ impl ProjectEnvironment { abs_path = parent.into(); } - self.get_directory_environment(abs_path, cx) + self.get_local_directory_environment(&Shell::System, abs_path, cx) } /// Returns the project environment, if possible. /// If the project was opened from the CLI, then the inherited CLI environment is returned. /// If it wasn't opened from the CLI, and an absolute path is given, then a shell is spawned in /// that directory, to get environment variables as if the user has `cd`'d there. - pub fn get_directory_environment( + pub fn get_local_directory_environment( &mut self, + shell: &Shell, abs_path: Arc, cx: &mut Context, ) -> Shared>>> { @@ -136,26 +140,83 @@ impl ProjectEnvironment { return Task::ready(Some(cli_environment)).shared(); } - self.environments - .entry(abs_path.clone()) + self.local_environments + .entry((shell.clone(), abs_path.clone())) .or_insert_with(|| { - get_directory_env_impl(&Shell::System, abs_path.clone(), cx).shared() + let load_direnv = ProjectSettings::get_global(cx).load_direnv.clone(); + let shell = shell.clone(); + let tx = self.environment_error_messages_tx.clone(); + cx.spawn(async move |_, cx| { + let mut shell_env = cx + .background_spawn(load_directory_shell_environment( + shell, + abs_path.clone(), + load_direnv, + tx, + )) + .await + .log_err(); + + if let Some(shell_env) = shell_env.as_mut() { + let path = shell_env + .get("PATH") + .map(|path| path.as_str()) + .unwrap_or_default(); + log::debug!( + "using project environment variables shell launched in {:?}. PATH={:?}", + abs_path, + path + ); + + set_origin_marker(shell_env, EnvironmentOrigin::WorktreeShell); + } + + shell_env + }) + .shared() }) .clone() } - /// Returns the project environment, if possible, with the given shell. - pub fn get_directory_environment_for_shell( + pub fn get_remote_directory_environment( &mut self, shell: &Shell, abs_path: Arc, + remote_client: Entity, cx: &mut Context, ) -> Shared>>> { - self.shell_based_environments + if cfg!(any(test, feature = "test-support")) { + return Task::ready(Some(HashMap::default())).shared(); + } + + self.remote_environments .entry((shell.clone(), abs_path.clone())) - .or_insert_with(|| get_directory_env_impl(shell, abs_path.clone(), cx).shared()) + .or_insert_with(|| { + let response = + remote_client + .read(cx) + .proto_client() + .request(proto::GetDirectoryEnvironment { + project_id: REMOTE_SERVER_PROJECT_ID, + shell: Some(shell_to_proto(shell.clone())), + directory: abs_path.to_string_lossy().to_string(), + }); + cx.spawn(async move |_, _| { + let environment = response.await.log_err()?; + Some(environment.environment.into_iter().collect()) + }) + .shared() + }) .clone() } + + pub fn peek_environment_error(&self) -> Option<&String> { + self.environment_error_messages.front() + } + + pub fn pop_environment_error(&mut self) -> Option { + self.environment_error_messages.pop_front() + } } fn set_origin_marker(env: &mut HashMap, origin: EnvironmentOrigin) { @@ -178,120 +239,72 @@ impl From for String { } } -#[derive(Debug)] -pub struct EnvironmentErrorMessage(pub String); - -impl std::fmt::Display for EnvironmentErrorMessage { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.0) - } -} - -impl EnvironmentErrorMessage { - #[allow(dead_code)] - fn from_str(s: &str) -> Self { - Self(String::from(s)) - } -} - async fn load_directory_shell_environment( - shell: &Shell, - abs_path: &Path, - load_direnv: &DirenvSettings, -) -> ( - Option>, - Option, -) { - match smol::fs::metadata(abs_path).await { - Ok(meta) => { - let dir = if meta.is_dir() { - abs_path - } else if let Some(parent) = abs_path.parent() { - parent - } else { - return ( - None, - Some(EnvironmentErrorMessage(format!( - "Failed to load shell environment in {}: not a directory", - abs_path.display() - ))), - ); - }; - - load_shell_environment(shell, dir, load_direnv).await - } - Err(err) => ( - None, - Some(EnvironmentErrorMessage(format!( - "Failed to load shell environment in {}: {}", - abs_path.display(), - err - ))), - ), - } -} - -async fn load_shell_environment( - shell: &Shell, - dir: &Path, - load_direnv: &DirenvSettings, -) -> ( - Option>, - Option, -) { - use crate::direnv::load_direnv_environment; - use util::shell_env; - - if cfg!(any(test, feature = "test-support")) { - let fake_env = [("ZED_FAKE_TEST_ENV".into(), "true".into())] - .into_iter() - .collect(); - (Some(fake_env), None) - } else if cfg!(target_os = "windows",) { - let (shell, args) = shell.program_and_args(); - let envs = match shell_env::capture(shell, args, dir).await { - Ok(envs) => envs, - Err(err) => { - util::log_err(&err); - return ( - None, - Some(EnvironmentErrorMessage(format!( - "Failed to load environment variables: {}", - err - ))), - ); - } - }; - + shell: Shell, + abs_path: Arc, + load_direnv: DirenvSettings, + tx: mpsc::UnboundedSender, +) -> anyhow::Result> { + let meta = smol::fs::metadata(&abs_path).await.with_context(|| { + tx.unbounded_send(format!("Failed to open {}", abs_path.display())) + .ok(); + format!("stat {abs_path:?}") + })?; + + let dir = if meta.is_dir() { + abs_path.clone() + } else { + abs_path + .parent() + .with_context(|| { + tx.unbounded_send(format!("Failed to open {}", abs_path.display())) + .ok(); + format!("getting parent of {abs_path:?}") + })? + .into() + }; + + if cfg!(target_os = "windows") { // Note: direnv is not available on Windows, so we skip direnv processing // and just return the shell environment - (Some(envs), None) + let (shell, args) = shell.program_and_args(); + let mut envs = util::shell_env::capture(shell.clone(), args, abs_path) + .await + .with_context(|| { + tx.unbounded_send("Failed to load environment variables".into()) + .ok(); + format!("capturing shell environment with {shell:?}") + })?; + if let Some(path) = envs.remove("Path") { + // windows env vars are case-insensitive, so normalize the path var + // so we can just assume `PATH` in other places + envs.insert("PATH".into(), path); + } + Ok(envs) } else { - let dir_ = dir.to_owned(); let (shell, args) = shell.program_and_args(); - let mut envs = match shell_env::capture(shell, args, &dir_).await { - Ok(envs) => envs, - Err(err) => { - util::log_err(&err); - return ( - None, - Some(EnvironmentErrorMessage::from_str( - "Failed to load environment variables. See log for details", - )), - ); - } - }; + let mut envs = util::shell_env::capture(shell.clone(), args, abs_path) + .await + .with_context(|| { + tx.unbounded_send("Failed to load environment variables".into()) + .ok(); + format!("capturing shell environment with {shell:?}") + })?; // If the user selects `Direct` for direnv, it would set an environment // variable that later uses to know that it should not run the hook. // We would include in `.envs` call so it is okay to run the hook // even if direnv direct mode is enabled. - let (direnv_environment, direnv_error) = match load_direnv { - DirenvSettings::ShellHook => (None, None), - DirenvSettings::Direct => match load_direnv_environment(&envs, dir).await { - Ok(env) => (Some(env), None), - Err(err) => (None, err.into()), - }, + let direnv_environment = match load_direnv { + DirenvSettings::ShellHook => None, + DirenvSettings::Direct => load_direnv_environment(&envs, &dir) + .await + .with_context(|| { + tx.unbounded_send("Failed to load direnv environment".into()) + .ok(); + "load direnv environment" + }) + .log_err(), }; if let Some(direnv_environment) = direnv_environment { for (key, value) in direnv_environment { @@ -303,51 +316,41 @@ async fn load_shell_environment( } } - (Some(envs), direnv_error) + Ok(envs) } } -fn get_directory_env_impl( - shell: &Shell, - abs_path: Arc, - cx: &Context, -) -> Task>> { - let load_direnv = ProjectSettings::get_global(cx).load_direnv.clone(); - - let shell = shell.clone(); - cx.spawn(async move |this, cx| { - let (mut shell_env, error_message) = cx - .background_spawn({ - let abs_path = abs_path.clone(); - async move { - load_directory_shell_environment(&shell, &abs_path, &load_direnv).await - } - }) - .await; - - if let Some(shell_env) = shell_env.as_mut() { - let path = shell_env - .get("PATH") - .map(|path| path.as_str()) - .unwrap_or_default(); - log::info!( - "using project environment variables shell launched in {:?}. PATH={:?}", - abs_path, - path - ); - - set_origin_marker(shell_env, EnvironmentOrigin::WorktreeShell); - } +async fn load_direnv_environment( + env: &HashMap, + dir: &Path, +) -> anyhow::Result>> { + let Some(direnv_path) = which::which("direnv").ok() else { + return Ok(HashMap::default()); + }; + + let args = &["export", "json"]; + let direnv_output = smol::process::Command::new(&direnv_path) + .args(args) + .envs(env) + .env("TERM", "dumb") + .current_dir(dir) + .output() + .await + .context("running direnv")?; + + if !direnv_output.status.success() { + bail!( + "Loading direnv environment failed ({}), stderr: {}", + direnv_output.status, + String::from_utf8_lossy(&direnv_output.stderr) + ); + } - if let Some(error) = error_message { - this.update(cx, |this, cx| { - log::error!("{error}",); - this.environment_error_messages.insert(abs_path, error); - cx.emit(ProjectEnvironmentEvent::ErrorsUpdated) - }) - .log_err(); - } + let output = String::from_utf8_lossy(&direnv_output.stdout); + if output.is_empty() { + // direnv outputs nothing when it has no changes to apply to environment variables + return Ok(HashMap::default()); + } - shell_env - }) + serde_json::from_str(&output).context("parsing direnv json") } diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 40ed16183bf3d4d7182f4a13df97af4704720ef9..736c96f34e171c4fde83c2db032484456144ae5a 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -1,3 +1,4 @@ +pub mod branch_diff; mod conflict_set; pub mod git_traversal; @@ -30,7 +31,8 @@ use git::{ }, stash::{GitStash, StashEntry}, status::{ - FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode, + DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus, + UnmergedStatus, UnmergedStatusCode, }, }; use gpui::{ @@ -55,6 +57,7 @@ use std::{ mem, ops::Range, path::{Path, PathBuf}, + str::FromStr, sync::{ Arc, atomic::{self, AtomicU64}, @@ -62,6 +65,7 @@ use std::{ time::Instant, }; use sum_tree::{Edit, SumTree, TreeSet}; +use task::Shell; use text::{Bias, BufferId}; use util::{ ResultExt, debug_panic, @@ -300,9 +304,13 @@ pub enum RepositoryState { #[derive(Clone, Debug, PartialEq, Eq)] pub enum RepositoryEvent { - Updated { full_scan: bool, new_instance: bool }, + StatusesChanged { + // TODO could report which statuses changed here + full_scan: bool, + }, MergeHeadsChanged, - PathsChanged, + BranchChanged, + StashEntriesChanged, } #[derive(Clone, Debug)] @@ -312,7 +320,7 @@ pub struct JobsUpdated; pub enum GitStoreEvent { ActiveRepositoryChanged(Option), RepositoryUpdated(RepositoryId, RepositoryEvent, bool), - RepositoryAdded(RepositoryId), + RepositoryAdded, RepositoryRemoved(RepositoryId), IndexWriteError(anyhow::Error), JobsUpdated, @@ -427,6 +435,8 @@ impl GitStore { client.add_entity_request_handler(Self::handle_askpass); client.add_entity_request_handler(Self::handle_check_for_pushed_commits); client.add_entity_request_handler(Self::handle_git_diff); + client.add_entity_request_handler(Self::handle_tree_diff); + client.add_entity_request_handler(Self::handle_get_blob_content); client.add_entity_request_handler(Self::handle_open_unstaged_diff); client.add_entity_request_handler(Self::handle_open_uncommitted_diff); client.add_entity_message_handler(Self::handle_update_diff_bases); @@ -614,6 +624,52 @@ impl GitStore { cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) }) } + pub fn open_diff_since( + &mut self, + oid: Option, + buffer: Entity, + repo: Entity, + languages: Arc, + cx: &mut Context, + ) -> Task>> { + cx.spawn(async move |this, cx| { + let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?; + let content = match oid { + None => None, + Some(oid) => Some( + repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))? + .await?, + ), + }; + let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?; + + buffer_diff + .update(cx, |buffer_diff, cx| { + buffer_diff.set_base_text( + content.map(Arc::new), + buffer_snapshot.language().cloned(), + Some(languages.clone()), + buffer_snapshot.text, + cx, + ) + })? + .await?; + let unstaged_diff = this + .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))? + .await?; + buffer_diff.update(cx, |buffer_diff, _| { + buffer_diff.set_secondary_diff(unstaged_diff); + })?; + + this.update(cx, |_, cx| { + cx.subscribe(&buffer_diff, Self::on_buffer_diff_event) + .detach(); + })?; + + Ok(buffer_diff) + }) + } + pub fn open_uncommitted_diff( &mut self, buffer: Entity, @@ -1217,7 +1273,7 @@ impl GitStore { self._subscriptions .push(cx.subscribe(&repo, Self::on_jobs_updated)); self.repositories.insert(id, repo); - cx.emit(GitStoreEvent::RepositoryAdded(id)); + cx.emit(GitStoreEvent::RepositoryAdded); self.active_repo_id.get_or_insert_with(|| { cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id))); id @@ -1484,11 +1540,10 @@ impl GitStore { let id = RepositoryId::from_proto(update.id); let client = this.upstream_client().context("no upstream client")?; - let mut is_new = false; + let mut repo_subscription = None; let repo = this.repositories.entry(id).or_insert_with(|| { - is_new = true; let git_store = cx.weak_entity(); - cx.new(|cx| { + let repo = cx.new(|cx| { Repository::remote( id, Path::new(&update.abs_path).into(), @@ -1498,16 +1553,16 @@ impl GitStore { git_store, cx, ) - }) + }); + repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event)); + cx.emit(GitStoreEvent::RepositoryAdded); + repo }); - if is_new { - this._subscriptions - .push(cx.subscribe(repo, Self::on_repository_event)) - } + this._subscriptions.extend(repo_subscription); repo.update(cx, { let update = update.clone(); - |repo, cx| repo.apply_remote_update(update, is_new, cx) + |repo, cx| repo.apply_remote_update(update, cx) })?; this.active_repo_id.get_or_insert_with(|| { @@ -2164,6 +2219,75 @@ impl GitStore { Ok(proto::GitDiffResponse { diff }) } + async fn handle_tree_diff( + this: Entity, + request: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let repository_id = RepositoryId(request.payload.repository_id); + let diff_type = if request.payload.is_merge { + DiffTreeType::MergeBase { + base: request.payload.base.into(), + head: request.payload.head.into(), + } + } else { + DiffTreeType::Since { + base: request.payload.base.into(), + head: request.payload.head.into(), + } + }; + + let diff = this + .update(&mut cx, |this, cx| { + let repository = this.repositories().get(&repository_id)?; + Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx))) + })? + .context("missing repository")? + .await??; + + Ok(proto::GetTreeDiffResponse { + entries: diff + .entries + .into_iter() + .map(|(path, status)| proto::TreeDiffStatus { + path: path.0.to_proto(), + status: match status { + TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(), + TreeDiffStatus::Modified { .. } => { + proto::tree_diff_status::Status::Modified.into() + } + TreeDiffStatus::Deleted { .. } => { + proto::tree_diff_status::Status::Deleted.into() + } + }, + oid: match status { + TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => { + Some(old.to_string()) + } + TreeDiffStatus::Added => None, + }, + }) + .collect(), + }) + } + + async fn handle_get_blob_content( + this: Entity, + request: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let oid = git::Oid::from_str(&request.payload.oid)?; + let repository_id = RepositoryId(request.payload.repository_id); + let content = this + .update(&mut cx, |this, cx| { + let repository = this.repositories().get(&repository_id)?; + Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx))) + })? + .context("missing repository")? + .await?; + Ok(proto::GetBlobContentResponse { content }) + } + async fn handle_open_unstaged_diff( this: Entity, request: TypedEnvelope, @@ -2909,6 +3033,13 @@ impl RepositorySnapshot { Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style) } + fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf { + self.path_style + .join(&self.work_directory_abs_path, repo_path.as_std_path()) + .unwrap() + .into() + } + #[inline] fn abs_path_to_repo_path_inner( work_directory_abs_path: &Path, @@ -3348,10 +3479,7 @@ impl Repository { pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option { let git_store = self.git_store.upgrade()?; let worktree_store = git_store.read(cx).worktree_store.read(cx); - let abs_path = self - .snapshot - .work_directory_abs_path - .join(path.as_std_path()); + let abs_path = self.snapshot.repo_path_to_abs_path(path); let abs_path = SanitizedPath::new(&abs_path); let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?; Some(ProjectPath { @@ -3610,6 +3738,7 @@ impl Repository { .read(cx) .file() .is_some_and(|file| file.disk_state().exists()) + && buffer.read(cx).has_unsaved_edits() { save_futures.push(buffer_store.save_buffer(buffer, cx)); } @@ -3676,6 +3805,7 @@ impl Repository { .read(cx) .file() .is_some_and(|file| file.disk_state().exists()) + && buffer.read(cx).has_unsaved_edits() { save_futures.push(buffer_store.save_buffer(buffer, cx)); } @@ -3872,18 +4002,15 @@ impl Repository { environment, .. } => { + // TODO would be nice to not have to do this manually let result = backend.stash_drop(index, environment).await; if result.is_ok() && let Ok(stash_entries) = backend.stash_entries().await { let snapshot = this.update(&mut cx, |this, cx| { this.snapshot.stash_entries = stash_entries; - let snapshot = this.snapshot.clone(); - cx.emit(RepositoryEvent::Updated { - full_scan: false, - new_instance: false, - }); - snapshot + cx.emit(RepositoryEvent::StashEntriesChanged); + this.snapshot.clone() })?; if let Some(updates_tx) = updates_tx { updates_tx @@ -4025,7 +4152,7 @@ impl Repository { let this = cx.weak_entity(); self.send_job( - Some(format!("git push {} {} {}", args, branch, remote).into()), + Some(format!("git push {} {} {}", args, remote, branch).into()), move |git_repo, mut cx| async move { match git_repo { RepositoryState::Local { @@ -4043,18 +4170,15 @@ impl Repository { cx.clone(), ) .await; + // TODO would be nice to not have to do this manually if result.is_ok() { let branches = backend.branches().await?; let branch = branches.into_iter().find(|branch| branch.is_head); log::info!("head branch after scan is {branch:?}"); let snapshot = this.update(&mut cx, |this, cx| { this.snapshot.branch = branch; - let snapshot = this.snapshot.clone(); - cx.emit(RepositoryEvent::Updated { - full_scan: false, - new_instance: false, - }); - snapshot + cx.emit(RepositoryEvent::BranchChanged); + this.snapshot.clone() })?; if let Some(updates_tx) = updates_tx { updates_tx @@ -4299,6 +4423,62 @@ impl Repository { }) } + pub fn diff_tree( + &mut self, + diff_type: DiffTreeType, + _cx: &App, + ) -> oneshot::Receiver> { + let repository_id = self.snapshot.id; + self.send_job(None, move |repo, _cx| async move { + match repo { + RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await, + RepositoryState::Remote { client, project_id } => { + let response = client + .request(proto::GetTreeDiff { + project_id: project_id.0, + repository_id: repository_id.0, + is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }), + base: diff_type.base().to_string(), + head: diff_type.head().to_string(), + }) + .await?; + + let entries = response + .entries + .into_iter() + .filter_map(|entry| { + let status = match entry.status() { + proto::tree_diff_status::Status::Added => TreeDiffStatus::Added, + proto::tree_diff_status::Status::Modified => { + TreeDiffStatus::Modified { + old: git::Oid::from_str( + &entry.oid.context("missing oid").log_err()?, + ) + .log_err()?, + } + } + proto::tree_diff_status::Status::Deleted => { + TreeDiffStatus::Deleted { + old: git::Oid::from_str( + &entry.oid.context("missing oid").log_err()?, + ) + .log_err()?, + } + } + }; + Some(( + RepoPath(RelPath::from_proto(&entry.path).log_err()?), + status, + )) + }) + .collect(); + + Ok(TreeDiff { entries }) + } + } + }) + } + pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver> { let id = self.id; self.send_job(None, move |repo, _cx| async move { @@ -4453,7 +4633,6 @@ impl Repository { pub(crate) fn apply_remote_update( &mut self, update: proto::UpdateRepository, - is_new: bool, cx: &mut Context, ) -> Result<()> { let conflicted_paths = TreeSet::from_ordered_entries( @@ -4462,21 +4641,30 @@ impl Repository { .into_iter() .filter_map(|path| RepoPath::from_proto(&path).log_err()), ); - self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch); - self.snapshot.head_commit = update + let new_branch = update.branch_summary.as_ref().map(proto_to_branch); + let new_head_commit = update .head_commit_details .as_ref() .map(proto_to_commit_details); + if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit { + cx.emit(RepositoryEvent::BranchChanged) + } + self.snapshot.branch = new_branch; + self.snapshot.head_commit = new_head_commit; self.snapshot.merge.conflicted_paths = conflicted_paths; self.snapshot.merge.message = update.merge_message.map(SharedString::from); - self.snapshot.stash_entries = GitStash { + let new_stash_entries = GitStash { entries: update .stash_entries .iter() .filter_map(|entry| proto_to_stash(entry).ok()) .collect(), }; + if self.snapshot.stash_entries != new_stash_entries { + cx.emit(RepositoryEvent::StashEntriesChanged) + } + self.snapshot.stash_entries = new_stash_entries; let edits = update .removed_statuses @@ -4495,14 +4683,13 @@ impl Repository { }), ) .collect::>(); + if !edits.is_empty() { + cx.emit(RepositoryEvent::StatusesChanged { full_scan: true }); + } self.snapshot.statuses_by_path.edit(edits, ()); if update.is_last_update { self.snapshot.scan_id = update.scan_id; } - cx.emit(RepositoryEvent::Updated { - full_scan: true, - new_instance: is_new, - }); Ok(()) } @@ -4599,7 +4786,7 @@ impl Repository { .upgrade() .context("missing project environment")? .update(cx, |project_environment, cx| { - project_environment.get_directory_environment(work_directory_abs_path.clone(), cx) + project_environment.get_local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx) })? .await .unwrap_or_else(|| { @@ -4764,6 +4951,25 @@ impl Repository { cx.spawn(|_: &mut AsyncApp| async move { rx.await? }) } + fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task> { + let repository_id = self.snapshot.id; + let rx = self.send_job(None, move |state, _| async move { + match state { + RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await, + RepositoryState::Remote { client, project_id } => { + let response = client + .request(proto::GetBlobContent { + project_id: project_id.to_proto(), + repository_id: repository_id.0, + oid: oid.to_string(), + }) + .await?; + Ok(response.content) + } + } + }); + cx.spawn(|_: &mut AsyncApp| async move { rx.await? }) + } fn paths_changed( &mut self, @@ -4825,23 +5031,19 @@ impl Repository { .await; this.update(&mut cx, |this, cx| { - let needs_update = !changed_path_statuses.is_empty() - || this.snapshot.stash_entries != stash_entries; - this.snapshot.stash_entries = stash_entries; + if this.snapshot.stash_entries != stash_entries { + cx.emit(RepositoryEvent::StashEntriesChanged); + this.snapshot.stash_entries = stash_entries; + } + if !changed_path_statuses.is_empty() { + cx.emit(RepositoryEvent::StatusesChanged { full_scan: false }); this.snapshot .statuses_by_path .edit(changed_path_statuses, ()); this.snapshot.scan_id += 1; } - if needs_update { - cx.emit(RepositoryEvent::Updated { - full_scan: false, - new_instance: false, - }); - } - if let Some(updates_tx) = updates_tx { updates_tx .unbounded_send(DownstreamUpdate::UpdateRepository( @@ -4849,7 +5051,6 @@ impl Repository { )) .ok(); } - cx.emit(RepositoryEvent::PathsChanged); }) }, ); @@ -5112,28 +5313,24 @@ async fn compute_snapshot( MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?; log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}"); - if merge_heads_changed - || branch != prev_snapshot.branch - || statuses_by_path != prev_snapshot.statuses_by_path - { - events.push(RepositoryEvent::Updated { - full_scan: true, - new_instance: false, - }); - } - - // Cache merge conflict paths so they don't change from staging/unstaging, - // until the merge heads change (at commit time, etc.). if merge_heads_changed { events.push(RepositoryEvent::MergeHeadsChanged); } + if statuses_by_path != prev_snapshot.statuses_by_path { + events.push(RepositoryEvent::StatusesChanged { full_scan: true }) + } + // Useful when branch is None in detached head state let head_commit = match backend.head_sha().await { Some(head_sha) => backend.show(head_sha).await.log_err(), None => None, }; + if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit { + events.push(RepositoryEvent::BranchChanged); + } + // Used by edit prediction data collection let remote_origin_url = backend.remote_url("origin"); let remote_upstream_url = backend.remote_url("upstream"); diff --git a/crates/project/src/git_store/branch_diff.rs b/crates/project/src/git_store/branch_diff.rs new file mode 100644 index 0000000000000000000000000000000000000000..554b5b83a10afc5cc38b1568ad8d175b2cb94b83 --- /dev/null +++ b/crates/project/src/git_store/branch_diff.rs @@ -0,0 +1,386 @@ +use anyhow::Result; +use buffer_diff::BufferDiff; +use collections::HashSet; +use futures::StreamExt; +use git::{ + repository::RepoPath, + status::{DiffTreeType, FileStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus}, +}; +use gpui::{ + App, AsyncWindowContext, Context, Entity, EventEmitter, SharedString, Subscription, Task, + WeakEntity, Window, +}; + +use language::Buffer; +use text::BufferId; +use util::ResultExt; + +use crate::{ + Project, + git_store::{GitStoreEvent, Repository, RepositoryEvent}, +}; + +#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +pub enum DiffBase { + Head, + Merge { base_ref: SharedString }, +} + +impl DiffBase { + pub fn is_merge_base(&self) -> bool { + matches!(self, DiffBase::Merge { .. }) + } +} + +pub struct BranchDiff { + diff_base: DiffBase, + repo: Option>, + project: Entity, + base_commit: Option, + head_commit: Option, + tree_diff: Option, + _subscription: Subscription, + update_needed: postage::watch::Sender<()>, + _task: Task<()>, +} + +pub enum BranchDiffEvent { + FileListChanged, +} + +impl EventEmitter for BranchDiff {} + +impl BranchDiff { + pub fn new( + source: DiffBase, + project: Entity, + window: &mut Window, + cx: &mut Context, + ) -> Self { + let git_store = project.read(cx).git_store().clone(); + let git_store_subscription = cx.subscribe_in( + &git_store, + window, + move |this, _git_store, event, _window, cx| match event { + GitStoreEvent::ActiveRepositoryChanged(_) + | GitStoreEvent::RepositoryUpdated( + _, + RepositoryEvent::StatusesChanged { full_scan: _ }, + true, + ) + | GitStoreEvent::ConflictsUpdated => { + cx.emit(BranchDiffEvent::FileListChanged); + *this.update_needed.borrow_mut() = (); + } + _ => {} + }, + ); + + let (send, recv) = postage::watch::channel::<()>(); + let worker = window.spawn(cx, { + let this = cx.weak_entity(); + async |cx| Self::handle_status_updates(this, recv, cx).await + }); + let repo = git_store.read(cx).active_repository(); + + Self { + diff_base: source, + repo, + project, + tree_diff: None, + base_commit: None, + head_commit: None, + _subscription: git_store_subscription, + _task: worker, + update_needed: send, + } + } + + pub fn diff_base(&self) -> &DiffBase { + &self.diff_base + } + + pub async fn handle_status_updates( + this: WeakEntity, + mut recv: postage::watch::Receiver<()>, + cx: &mut AsyncWindowContext, + ) { + Self::reload_tree_diff(this.clone(), cx).await.log_err(); + while recv.next().await.is_some() { + let Ok(needs_update) = this.update(cx, |this, cx| { + let mut needs_update = false; + let active_repo = this + .project + .read(cx) + .git_store() + .read(cx) + .active_repository(); + if active_repo != this.repo { + needs_update = true; + this.repo = active_repo; + } else if let Some(repo) = this.repo.as_ref() { + repo.update(cx, |repo, _| { + if let Some(branch) = &repo.branch + && let DiffBase::Merge { base_ref } = &this.diff_base + && let Some(commit) = branch.most_recent_commit.as_ref() + && &branch.ref_name == base_ref + && this.base_commit.as_ref() != Some(&commit.sha) + { + this.base_commit = Some(commit.sha.clone()); + needs_update = true; + } + + if repo.head_commit.as_ref().map(|c| &c.sha) != this.head_commit.as_ref() { + this.head_commit = repo.head_commit.as_ref().map(|c| c.sha.clone()); + needs_update = true; + } + }) + } + needs_update + }) else { + return; + }; + + if needs_update { + Self::reload_tree_diff(this.clone(), cx).await.log_err(); + } + } + } + + pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option { + let (repo, path) = self + .project + .read(cx) + .git_store() + .read(cx) + .repository_and_path_for_buffer_id(buffer_id, cx)?; + if self.repo() == Some(&repo) { + return self.merge_statuses( + repo.read(cx) + .status_for_path(&path) + .map(|status| status.status), + self.tree_diff + .as_ref() + .and_then(|diff| diff.entries.get(&path)), + ); + } + None + } + + pub fn merge_statuses( + &self, + diff_from_head: Option, + diff_from_merge_base: Option<&TreeDiffStatus>, + ) -> Option { + match (diff_from_head, diff_from_merge_base) { + (None, None) => None, + (Some(diff_from_head), None) => Some(diff_from_head), + (Some(diff_from_head @ FileStatus::Unmerged(_)), _) => Some(diff_from_head), + + // file does not exist in HEAD + // but *does* exist in work-tree + // and *does* exist in merge-base + ( + Some(FileStatus::Untracked) + | Some(FileStatus::Tracked(TrackedStatus { + index_status: StatusCode::Added, + worktree_status: _, + })), + Some(_), + ) => Some(FileStatus::Tracked(TrackedStatus { + index_status: StatusCode::Modified, + worktree_status: StatusCode::Modified, + })), + + // file exists in HEAD + // but *does not* exist in work-tree + (Some(diff_from_head), Some(diff_from_merge_base)) if diff_from_head.is_deleted() => { + match diff_from_merge_base { + TreeDiffStatus::Added => None, // unchanged, didn't exist in merge base or worktree + _ => Some(diff_from_head), + } + } + + // file exists in HEAD + // and *does* exist in work-tree + (Some(FileStatus::Tracked(_)), Some(tree_status)) => { + Some(FileStatus::Tracked(TrackedStatus { + index_status: match tree_status { + TreeDiffStatus::Added { .. } => StatusCode::Added, + _ => StatusCode::Modified, + }, + worktree_status: match tree_status { + TreeDiffStatus::Added => StatusCode::Added, + _ => StatusCode::Modified, + }, + })) + } + + (_, Some(diff_from_merge_base)) => { + Some(diff_status_to_file_status(diff_from_merge_base)) + } + } + } + + pub async fn reload_tree_diff( + this: WeakEntity, + cx: &mut AsyncWindowContext, + ) -> Result<()> { + let task = this.update(cx, |this, cx| { + let DiffBase::Merge { base_ref } = this.diff_base.clone() else { + return None; + }; + let Some(repo) = this.repo.as_ref() else { + this.tree_diff.take(); + return None; + }; + repo.update(cx, |repo, cx| { + Some(repo.diff_tree( + DiffTreeType::MergeBase { + base: base_ref, + head: "HEAD".into(), + }, + cx, + )) + }) + })?; + let Some(task) = task else { return Ok(()) }; + + let diff = task.await??; + this.update(cx, |this, cx| { + this.tree_diff = Some(diff); + cx.emit(BranchDiffEvent::FileListChanged); + cx.notify(); + }) + } + + pub fn repo(&self) -> Option<&Entity> { + self.repo.as_ref() + } + + pub fn load_buffers(&mut self, cx: &mut Context) -> Vec { + let mut output = Vec::default(); + let Some(repo) = self.repo.clone() else { + return output; + }; + + self.project.update(cx, |_project, cx| { + let mut seen = HashSet::default(); + + for item in repo.read(cx).cached_status() { + seen.insert(item.repo_path.clone()); + let branch_diff = self + .tree_diff + .as_ref() + .and_then(|t| t.entries.get(&item.repo_path)) + .cloned(); + let status = self + .merge_statuses(Some(item.status), branch_diff.as_ref()) + .unwrap(); + if !status.has_changes() { + continue; + } + + let Some(project_path) = + repo.read(cx).repo_path_to_project_path(&item.repo_path, cx) + else { + continue; + }; + let task = Self::load_buffer(branch_diff, project_path, repo.clone(), cx); + + output.push(DiffBuffer { + repo_path: item.repo_path.clone(), + load: task, + file_status: item.status, + }); + } + let Some(tree_diff) = self.tree_diff.as_ref() else { + return; + }; + + for (path, branch_diff) in tree_diff.entries.iter() { + if seen.contains(&path) { + continue; + } + + let Some(project_path) = repo.read(cx).repo_path_to_project_path(&path, cx) else { + continue; + }; + let task = + Self::load_buffer(Some(branch_diff.clone()), project_path, repo.clone(), cx); + + let file_status = diff_status_to_file_status(branch_diff); + + output.push(DiffBuffer { + repo_path: path.clone(), + load: task, + file_status, + }); + } + }); + output + } + + fn load_buffer( + branch_diff: Option, + project_path: crate::ProjectPath, + repo: Entity, + cx: &Context<'_, Project>, + ) -> Task, Entity)>> { + let task = cx.spawn(async move |project, cx| { + let buffer = project + .update(cx, |project, cx| project.open_buffer(project_path, cx))? + .await?; + + let languages = project.update(cx, |project, _cx| project.languages().clone())?; + + let changes = if let Some(entry) = branch_diff { + let oid = match entry { + git::status::TreeDiffStatus::Added { .. } => None, + git::status::TreeDiffStatus::Modified { old, .. } + | git::status::TreeDiffStatus::Deleted { old } => Some(old), + }; + project + .update(cx, |project, cx| { + project.git_store().update(cx, |git_store, cx| { + git_store.open_diff_since(oid, buffer.clone(), repo, languages, cx) + }) + })? + .await? + } else { + project + .update(cx, |project, cx| { + project.open_uncommitted_diff(buffer.clone(), cx) + })? + .await? + }; + Ok((buffer, changes)) + }); + task + } +} + +fn diff_status_to_file_status(branch_diff: &git::status::TreeDiffStatus) -> FileStatus { + let file_status = match branch_diff { + git::status::TreeDiffStatus::Added { .. } => FileStatus::Tracked(TrackedStatus { + index_status: StatusCode::Added, + worktree_status: StatusCode::Added, + }), + git::status::TreeDiffStatus::Modified { .. } => FileStatus::Tracked(TrackedStatus { + index_status: StatusCode::Modified, + worktree_status: StatusCode::Modified, + }), + git::status::TreeDiffStatus::Deleted { .. } => FileStatus::Tracked(TrackedStatus { + index_status: StatusCode::Deleted, + worktree_status: StatusCode::Deleted, + }), + }; + file_status +} + +#[derive(Debug)] +pub struct DiffBuffer { + pub repo_path: RepoPath, + pub file_status: FileStatus, + pub load: Task, Entity)>>, +} diff --git a/crates/project/src/git_store/conflict_set.rs b/crates/project/src/git_store/conflict_set.rs index 13a082b35024b11870fb14fb3419c76841566193..160a384a4a0ff4481c97b6eda75faded28f01624 100644 --- a/crates/project/src/git_store/conflict_set.rs +++ b/crates/project/src/git_store/conflict_set.rs @@ -72,13 +72,15 @@ impl ConflictSetSnapshot { (None, None) => None, (None, Some(conflict)) => Some(conflict.range.start), (Some(conflict), None) => Some(conflict.range.start), - (Some(first), Some(second)) => Some(first.range.start.min(&second.range.start, buffer)), + (Some(first), Some(second)) => { + Some(*first.range.start.min(&second.range.start, buffer)) + } }; let end = match (old_conflicts.last(), new_conflicts.last()) { (None, None) => None, (None, Some(conflict)) => Some(conflict.range.end), (Some(first), None) => Some(first.range.end), - (Some(first), Some(second)) => Some(first.range.end.max(&second.range.end, buffer)), + (Some(first), Some(second)) => Some(*first.range.end.max(&second.range.end, buffer)), }; ConflictSetUpdate { buffer_range: start.zip(end).map(|(start, end)| start..end), @@ -269,7 +271,7 @@ mod tests { use language::language_settings::AllLanguageSettings; use serde_json::json; use settings::Settings as _; - use text::{Buffer, BufferId, Point, ToOffset as _}; + use text::{Buffer, BufferId, Point, ReplicaId, ToOffset as _}; use unindent::Unindent as _; use util::{path, rel_path::rel_path}; use worktree::WorktreeSettings; @@ -297,7 +299,7 @@ mod tests { .unindent(); let buffer_id = BufferId::new(1).unwrap(); - let buffer = Buffer::new(0, buffer_id, test_content); + let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content); let snapshot = buffer.snapshot(); let conflict_snapshot = ConflictSet::parse(&snapshot); @@ -372,7 +374,7 @@ mod tests { .unindent(); let buffer_id = BufferId::new(1).unwrap(); - let buffer = Buffer::new(0, buffer_id, test_content); + let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content); let snapshot = buffer.snapshot(); let conflict_snapshot = ConflictSet::parse(&snapshot); @@ -403,7 +405,7 @@ mod tests { >>>>>>> "# .unindent(); let buffer_id = BufferId::new(1).unwrap(); - let buffer = Buffer::new(0, buffer_id, test_content); + let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content); let snapshot = buffer.snapshot(); let conflict_snapshot = ConflictSet::parse(&snapshot); @@ -445,7 +447,7 @@ mod tests { .unindent(); let buffer_id = BufferId::new(1).unwrap(); - let buffer = Buffer::new(0, buffer_id, test_content.clone()); + let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content.clone()); let snapshot = buffer.snapshot(); let conflict_snapshot = ConflictSet::parse(&snapshot); diff --git a/crates/project/src/image_store.rs b/crates/project/src/image_store.rs index 71394ead2eb27067706023d4870c78c557c3747b..8fcf9c8a6172f866d819e34cbf3b0b4810a8fc8d 100644 --- a/crates/project/src/image_store.rs +++ b/crates/project/src/image_store.rs @@ -687,6 +687,7 @@ fn create_gpui_image(content: Vec) -> anyhow::Result> { image::ImageFormat::Gif => gpui::ImageFormat::Gif, image::ImageFormat::Bmp => gpui::ImageFormat::Bmp, image::ImageFormat::Tiff => gpui::ImageFormat::Tiff, + image::ImageFormat::Ico => gpui::ImageFormat::Ico, format => anyhow::bail!("Image format {format:?} not supported"), }, content, diff --git a/crates/workspace/src/invalid_buffer_view.rs b/crates/project/src/invalid_item_view.rs similarity index 94% rename from crates/workspace/src/invalid_buffer_view.rs rename to crates/project/src/invalid_item_view.rs index 05f409653b69e76654fa11d70b57d61fd6c0b73b..fdcdd16a69ce73d8471f8387d55cf91576f114af 100644 --- a/crates/workspace/src/invalid_buffer_view.rs +++ b/crates/project/src/invalid_item_view.rs @@ -11,7 +11,8 @@ use zed_actions::workspace::OpenWithSystem; use crate::Item; /// A view to display when a certain buffer fails to open. -pub struct InvalidBufferView { +#[derive(Debug)] +pub struct InvalidItemView { /// Which path was attempted to open. pub abs_path: Arc, /// An error message, happened when opening the buffer. @@ -20,7 +21,7 @@ pub struct InvalidBufferView { focus_handle: FocusHandle, } -impl InvalidBufferView { +impl InvalidItemView { pub fn new( abs_path: &Path, is_local: bool, @@ -37,7 +38,7 @@ impl InvalidBufferView { } } -impl Item for InvalidBufferView { +impl Item for InvalidItemView { type Event = (); fn tab_content_text(&self, mut detail: usize, _: &App) -> SharedString { @@ -66,15 +67,15 @@ impl Item for InvalidBufferView { } } -impl EventEmitter<()> for InvalidBufferView {} +impl EventEmitter<()> for InvalidItemView {} -impl Focusable for InvalidBufferView { +impl Focusable for InvalidItemView { fn focus_handle(&self, _: &App) -> FocusHandle { self.focus_handle.clone() } } -impl Render for InvalidBufferView { +impl Render for InvalidItemView { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl gpui::IntoElement { let abs_path = self.abs_path.clone(); v_flex() diff --git a/crates/project/src/lsp_command.rs b/crates/project/src/lsp_command.rs index 5ec6e502bd85a25b6755c6994feff7a3062c919c..55742c284ddcc7dfa6669ea3924fc60a77b2e1ab 100644 --- a/crates/project/src/lsp_command.rs +++ b/crates/project/src/lsp_command.rs @@ -234,7 +234,7 @@ pub(crate) struct OnTypeFormatting { pub push_to_history: bool, } -#[derive(Debug)] +#[derive(Clone, Debug)] pub(crate) struct InlayHints { pub range: Range, } @@ -1834,13 +1834,20 @@ impl LspCommand for GetSignatureHelp { message: Option, lsp_store: Entity, _: Entity, - _: LanguageServerId, + id: LanguageServerId, cx: AsyncApp, ) -> Result { let Some(message) = message else { return Ok(None); }; - cx.update(|cx| SignatureHelp::new(message, Some(lsp_store.read(cx).languages.clone()), cx)) + cx.update(|cx| { + SignatureHelp::new( + message, + Some(lsp_store.read(cx).languages.clone()), + Some(id), + cx, + ) + }) } fn to_proto(&self, project_id: u64, buffer: &Buffer) -> Self::ProtoRequest { @@ -1900,7 +1907,12 @@ impl LspCommand for GetSignatureHelp { .signature_help .map(proto_to_lsp_signature) .and_then(|signature| { - SignatureHelp::new(signature, Some(lsp_store.read(cx).languages.clone()), cx) + SignatureHelp::new( + signature, + Some(lsp_store.read(cx).languages.clone()), + None, + cx, + ) }) }) } diff --git a/crates/project/src/lsp_command/signature_help.rs b/crates/project/src/lsp_command/signature_help.rs index 8adb69ac7726becada3f6123f9f350237e2aa22e..6a499311837b8ebd70874c89d9fac223b3c8ede1 100644 --- a/crates/project/src/lsp_command/signature_help.rs +++ b/crates/project/src/lsp_command/signature_help.rs @@ -2,8 +2,10 @@ use std::{ops::Range, sync::Arc}; use gpui::{App, AppContext, Entity, FontWeight, HighlightStyle, SharedString}; use language::LanguageRegistry; +use lsp::LanguageServerId; use markdown::Markdown; use rpc::proto::{self, documentation}; +use util::maybe; #[derive(Debug)] pub struct SignatureHelp { @@ -31,6 +33,7 @@ impl SignatureHelp { pub fn new( help: lsp::SignatureHelp, language_registry: Option>, + lang_server_id: Option, cx: &mut App, ) -> Option { if help.signatures.is_empty() { @@ -39,6 +42,7 @@ impl SignatureHelp { let active_signature = help.active_signature.unwrap_or(0) as usize; let mut signatures = Vec::::with_capacity(help.signatures.capacity()); for signature in &help.signatures { + let label = SharedString::from(signature.label.clone()); let active_parameter = signature .active_parameter .unwrap_or_else(|| help.active_parameter.unwrap_or(0)) @@ -49,39 +53,53 @@ impl SignatureHelp { if let Some(parameters) = &signature.parameters { for (index, parameter) in parameters.iter().enumerate() { let label_range = match ¶meter.label { - lsp::ParameterLabel::LabelOffsets(parameter_label_offsets) => { - let range = *parameter_label_offsets.get(0)? as usize - ..*parameter_label_offsets.get(1)? as usize; - if index == active_parameter { - highlights.push(( - range.clone(), - HighlightStyle { - font_weight: Some(FontWeight::EXTRA_BOLD), - ..HighlightStyle::default() - }, - )); - } - Some(range) + &lsp::ParameterLabel::LabelOffsets([offset1, offset2]) => { + maybe!({ + let offset1 = offset1 as usize; + let offset2 = offset2 as usize; + if offset1 < offset2 { + let mut indices = label.char_indices().scan( + 0, + |utf16_offset_acc, (offset, c)| { + let utf16_offset = *utf16_offset_acc; + *utf16_offset_acc += c.len_utf16(); + Some((utf16_offset, offset)) + }, + ); + let (_, offset1) = indices + .find(|(utf16_offset, _)| *utf16_offset == offset1)?; + let (_, offset2) = indices + .find(|(utf16_offset, _)| *utf16_offset == offset2)?; + Some(offset1..offset2) + } else { + log::warn!( + "language server {lang_server_id:?} produced invalid parameter label range: {offset1:?}..{offset2:?}", + ); + None + } + }) } lsp::ParameterLabel::Simple(parameter_label) => { if let Some(start) = signature.label.find(parameter_label) { - let range = start..start + parameter_label.len(); - if index == active_parameter { - highlights.push(( - range.clone(), - HighlightStyle { - font_weight: Some(FontWeight::EXTRA_BOLD), - ..HighlightStyle::default() - }, - )); - } - Some(range) + Some(start..start + parameter_label.len()) } else { None } } }; + if let Some(label_range) = &label_range + && index == active_parameter + { + highlights.push(( + label_range.clone(), + HighlightStyle { + font_weight: Some(FontWeight::EXTRA_BOLD), + ..HighlightStyle::default() + }, + )); + } + let documentation = parameter .documentation .as_ref() @@ -94,7 +112,6 @@ impl SignatureHelp { } } - let label = SharedString::from(signature.label.clone()); let documentation = signature .documentation .as_ref() @@ -290,7 +307,7 @@ mod tests { active_signature: Some(0), active_parameter: Some(0), }; - let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, cx)); + let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx)); assert!(maybe_markdown.is_some()); let markdown = maybe_markdown.unwrap(); @@ -336,7 +353,7 @@ mod tests { active_signature: Some(0), active_parameter: Some(1), }; - let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, cx)); + let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx)); assert!(maybe_markdown.is_some()); let markdown = maybe_markdown.unwrap(); @@ -396,7 +413,7 @@ mod tests { active_signature: Some(0), active_parameter: Some(0), }; - let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, cx)); + let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx)); assert!(maybe_markdown.is_some()); let markdown = maybe_markdown.unwrap(); @@ -449,7 +466,7 @@ mod tests { active_signature: Some(1), active_parameter: Some(0), }; - let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, cx)); + let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx)); assert!(maybe_markdown.is_some()); let markdown = maybe_markdown.unwrap(); @@ -502,7 +519,7 @@ mod tests { active_signature: Some(1), active_parameter: Some(1), }; - let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, cx)); + let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx)); assert!(maybe_markdown.is_some()); let markdown = maybe_markdown.unwrap(); @@ -555,7 +572,7 @@ mod tests { active_signature: Some(1), active_parameter: None, }; - let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, cx)); + let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx)); assert!(maybe_markdown.is_some()); let markdown = maybe_markdown.unwrap(); @@ -623,7 +640,7 @@ mod tests { active_signature: Some(2), active_parameter: Some(1), }; - let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, cx)); + let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx)); assert!(maybe_markdown.is_some()); let markdown = maybe_markdown.unwrap(); @@ -645,7 +662,7 @@ mod tests { active_signature: None, active_parameter: None, }; - let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, cx)); + let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx)); assert!(maybe_markdown.is_none()); } @@ -670,7 +687,7 @@ mod tests { active_signature: Some(0), active_parameter: Some(0), }; - let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, cx)); + let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx)); assert!(maybe_markdown.is_some()); let markdown = maybe_markdown.unwrap(); @@ -708,7 +725,8 @@ mod tests { active_signature: Some(0), active_parameter: Some(0), }; - let maybe_signature_help = cx.update(|cx| SignatureHelp::new(signature_help, None, cx)); + let maybe_signature_help = + cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx)); assert!(maybe_signature_help.is_some()); let signature_help = maybe_signature_help.unwrap(); @@ -736,4 +754,40 @@ mod tests { // Check that the active parameter is correct assert_eq!(signature.active_parameter, Some(0)); } + + #[gpui::test] + fn test_create_signature_help_implements_utf16_spec(cx: &mut TestAppContext) { + let signature_help = lsp::SignatureHelp { + signatures: vec![lsp::SignatureInformation { + label: "fn test(🦀: u8, 🦀: &str)".to_string(), + documentation: None, + parameters: Some(vec![ + lsp::ParameterInformation { + label: lsp::ParameterLabel::LabelOffsets([8, 10]), + documentation: None, + }, + lsp::ParameterInformation { + label: lsp::ParameterLabel::LabelOffsets([16, 18]), + documentation: None, + }, + ]), + active_parameter: None, + }], + active_signature: Some(0), + active_parameter: Some(0), + }; + let signature_help = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx)); + assert!(signature_help.is_some()); + + let markdown = signature_help.unwrap(); + let signature = markdown.signatures[markdown.active_signature].clone(); + let markdown = (signature.label, signature.highlights); + assert_eq!( + markdown, + ( + SharedString::new("fn test(🦀: u8, 🦀: &str)"), + vec![(8..12, current_parameter())] + ) + ); + } } diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 4f21255681ce27aa9bbdfe3a393d1b0666bd4733..1d6d4240de0ae8a6781b49f78341d10b5127cdc1 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -14,17 +14,22 @@ pub mod json_language_server_ext; pub mod log_store; pub mod lsp_ext_command; pub mod rust_analyzer_ext; +pub mod vue_language_server_ext; +mod inlay_hint_cache; + +use self::inlay_hint_cache::BufferInlayHints; use crate::{ CodeAction, ColorPresentation, Completion, CompletionDisplayOptions, CompletionResponse, - CompletionSource, CoreCompletion, DocumentColor, Hover, InlayHint, LocationLink, LspAction, - LspPullDiagnostics, ManifestProvidersStore, Project, ProjectItem, ProjectPath, + CompletionSource, CoreCompletion, DocumentColor, Hover, InlayHint, InlayId, LocationLink, + LspAction, LspPullDiagnostics, ManifestProvidersStore, Project, ProjectItem, ProjectPath, ProjectTransaction, PulledDiagnostics, ResolveState, Symbol, buffer_store::{BufferStore, BufferStoreEvent}, environment::ProjectEnvironment, lsp_command::{self, *}, lsp_store::{ self, + inlay_hint_cache::BufferChunk, log_store::{GlobalLogStore, LanguageServerKind}, }, manifest_tree::{ @@ -56,14 +61,12 @@ use gpui::{ use http_client::HttpClient; use itertools::Itertools as _; use language::{ - Bias, BinaryStatus, Buffer, BufferSnapshot, CachedLspAdapter, CodeLabel, Diagnostic, + Bias, BinaryStatus, Buffer, BufferRow, BufferSnapshot, CachedLspAdapter, CodeLabel, Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, Diff, File as _, Language, LanguageName, LanguageRegistry, LocalFile, LspAdapter, LspAdapterDelegate, LspInstaller, ManifestDelegate, ManifestName, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Toolchain, Transaction, Unclipped, - language_settings::{ - FormatOnSave, Formatter, LanguageSettings, SelectedFormatter, language_settings, - }, + language_settings::{FormatOnSave, Formatter, LanguageSettings, language_settings}, point_to_lsp, proto::{ deserialize_anchor, deserialize_lsp_edit, deserialize_version, serialize_anchor, @@ -86,7 +89,7 @@ use parking_lot::Mutex; use postage::{mpsc, sink::Sink, stream::Stream, watch}; use rand::prelude::*; use rpc::{ - AnyProtoClient, + AnyProtoClient, ErrorCode, ErrorExt as _, proto::{LspRequestId, LspRequestMessage as _}, }; use serde::Serialize; @@ -107,11 +110,14 @@ use std::{ path::{self, Path, PathBuf}, pin::pin, rc::Rc, - sync::Arc, + sync::{ + Arc, + atomic::{self, AtomicUsize}, + }, time::{Duration, Instant}, }; use sum_tree::Dimensions; -use text::{Anchor, BufferId, LineEnding, OffsetRangeExt, ToPoint as _}; +use text::{Anchor, BufferId, LineEnding, OffsetRangeExt, Point, ToPoint as _}; use util::{ ConnectionResult, ResultExt as _, debug_panic, defer, maybe, merge_json_value_into, @@ -122,6 +128,7 @@ use util::{ pub use fs::*; pub use language::Location; +pub use lsp_store::inlay_hint_cache::{CacheInlayHints, InvalidationStrategy}; #[cfg(any(test, feature = "test-support"))] pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX; pub use worktree::{ @@ -566,8 +573,7 @@ impl LocalLspStore { } fn setup_lsp_messages( - this: WeakEntity, - + lsp_store: WeakEntity, language_server: &LanguageServer, delegate: Arc, adapter: Arc, @@ -577,7 +583,7 @@ impl LocalLspStore { language_server .on_notification::({ let adapter = adapter.clone(); - let this = this.clone(); + let this = lsp_store.clone(); move |mut params, cx| { let adapter = adapter.clone(); if let Some(this) = this.upgrade() { @@ -621,8 +627,7 @@ impl LocalLspStore { .on_request::({ let adapter = adapter.adapter.clone(); let delegate = delegate.clone(); - let this = this.clone(); - + let this = lsp_store.clone(); move |params, cx| { let adapter = adapter.clone(); let delegate = delegate.clone(); @@ -667,7 +672,7 @@ impl LocalLspStore { language_server .on_request::({ - let this = this.clone(); + let this = lsp_store.clone(); move |_, cx| { let this = this.clone(); let cx = cx.clone(); @@ -695,7 +700,7 @@ impl LocalLspStore { // to these requests when initializing. language_server .on_request::({ - let this = this.clone(); + let this = lsp_store.clone(); move |params, cx| { let this = this.clone(); let mut cx = cx.clone(); @@ -716,7 +721,7 @@ impl LocalLspStore { language_server .on_request::({ - let lsp_store = this.clone(); + let lsp_store = lsp_store.clone(); move |params, cx| { let lsp_store = lsp_store.clone(); let mut cx = cx.clone(); @@ -745,7 +750,7 @@ impl LocalLspStore { language_server .on_request::({ - let lsp_store = this.clone(); + let lsp_store = lsp_store.clone(); move |params, cx| { let lsp_store = lsp_store.clone(); let mut cx = cx.clone(); @@ -774,7 +779,7 @@ impl LocalLspStore { language_server .on_request::({ - let this = this.clone(); + let this = lsp_store.clone(); move |params, cx| { let mut cx = cx.clone(); let this = this.clone(); @@ -793,18 +798,22 @@ impl LocalLspStore { language_server .on_request::({ - let this = this.clone(); + let lsp_store = lsp_store.clone(); move |(), cx| { - let this = this.clone(); + let this = lsp_store.clone(); let mut cx = cx.clone(); async move { - this.update(&mut cx, |this, cx| { - cx.emit(LspStoreEvent::RefreshInlayHints); - this.downstream_client.as_ref().map(|(client, project_id)| { - client.send(proto::RefreshInlayHints { - project_id: *project_id, + this.update(&mut cx, |lsp_store, cx| { + cx.emit(LspStoreEvent::RefreshInlayHints(server_id)); + lsp_store + .downstream_client + .as_ref() + .map(|(client, project_id)| { + client.send(proto::RefreshInlayHints { + project_id: *project_id, + server_id: server_id.to_proto(), + }) }) - }) })? .transpose()?; Ok(()) @@ -815,7 +824,7 @@ impl LocalLspStore { language_server .on_request::({ - let this = this.clone(); + let this = lsp_store.clone(); move |(), cx| { let this = this.clone(); let mut cx = cx.clone(); @@ -837,7 +846,7 @@ impl LocalLspStore { language_server .on_request::({ - let this = this.clone(); + let this = lsp_store.clone(); move |(), cx| { let this = this.clone(); let mut cx = cx.clone(); @@ -863,7 +872,7 @@ impl LocalLspStore { language_server .on_request::({ - let this = this.clone(); + let this = lsp_store.clone(); let name = name.to_string(); move |params, cx| { let this = this.clone(); @@ -901,7 +910,7 @@ impl LocalLspStore { .detach(); language_server .on_notification::({ - let this = this.clone(); + let this = lsp_store.clone(); let name = name.to_string(); move |params, cx| { let this = this.clone(); @@ -933,7 +942,7 @@ impl LocalLspStore { language_server .on_notification::({ - let this = this.clone(); + let this = lsp_store.clone(); move |params, cx| { if let Some(this) = this.upgrade() { this.update(cx, |this, cx| { @@ -952,7 +961,7 @@ impl LocalLspStore { language_server .on_notification::({ - let this = this.clone(); + let this = lsp_store.clone(); move |params, cx| { if let Some(this) = this.upgrade() { this.update(cx, |_, cx| { @@ -970,7 +979,7 @@ impl LocalLspStore { language_server .on_notification::({ - let this = this.clone(); + let this = lsp_store.clone(); move |params, cx| { let mut cx = cx.clone(); if let Some(this) = this.upgrade() { @@ -989,9 +998,10 @@ impl LocalLspStore { }) .detach(); - json_language_server_ext::register_requests(this.clone(), language_server); - rust_analyzer_ext::register_notifications(this.clone(), language_server); - clangd_ext::register_notifications(this, language_server, adapter); + vue_language_server_ext::register_requests(lsp_store.clone(), language_server); + json_language_server_ext::register_requests(lsp_store.clone(), language_server); + rust_analyzer_ext::register_notifications(lsp_store.clone(), language_server); + clangd_ext::register_notifications(lsp_store, language_server, adapter); } fn shutdown_language_servers_on_quit( @@ -1335,26 +1345,58 @@ impl LocalLspStore { })?; } + // Formatter for `code_actions_on_format` that runs before + // the rest of the formatters + let mut code_actions_on_format_formatters = None; + let should_run_code_actions_on_format = !matches!( + (trigger, &settings.format_on_save), + (FormatTrigger::Save, &FormatOnSave::Off) + ); + if should_run_code_actions_on_format { + let have_code_actions_to_run_on_format = settings + .code_actions_on_format + .values() + .any(|enabled| *enabled); + if have_code_actions_to_run_on_format { + zlog::trace!(logger => "going to run code actions on format"); + code_actions_on_format_formatters = Some( + settings + .code_actions_on_format + .iter() + .filter_map(|(action, enabled)| enabled.then_some(action)) + .cloned() + .map(Formatter::CodeAction) + .collect::>(), + ); + } + } + let formatters = match (trigger, &settings.format_on_save) { (FormatTrigger::Save, FormatOnSave::Off) => &[], (FormatTrigger::Manual, _) | (FormatTrigger::Save, FormatOnSave::On) => { - match &settings.formatter { - SelectedFormatter::Auto => { - if settings.prettier.allowed { - zlog::trace!(logger => "Formatter set to auto: defaulting to prettier"); - std::slice::from_ref(&Formatter::Prettier) - } else { - zlog::trace!(logger => "Formatter set to auto: defaulting to primary language server"); - std::slice::from_ref(&Formatter::LanguageServer { name: None }) - } - } - SelectedFormatter::List(formatter_list) => formatter_list.as_ref(), - } + settings.formatter.as_ref() } }; + let formatters = code_actions_on_format_formatters + .iter() + .flatten() + .chain(formatters); + for formatter in formatters { + let formatter = if formatter == &Formatter::Auto { + if settings.prettier.allowed { + zlog::trace!(logger => "Formatter set to auto: defaulting to prettier"); + &Formatter::Prettier + } else { + zlog::trace!(logger => "Formatter set to auto: defaulting to primary language server"); + &Formatter::LanguageServer(settings::LanguageServerFormatterSpecifier::Current) + } + } else { + formatter + }; match formatter { + Formatter::Auto => unreachable!("Auto resolved above"), Formatter::Prettier => { let logger = zlog::scoped!(logger => "prettier"); zlog::trace!(logger => "formatting"); @@ -1409,7 +1451,7 @@ impl LocalLspStore { }, )?; } - Formatter::LanguageServer { name } => { + Formatter::LanguageServer(specifier) => { let logger = zlog::scoped!(logger => "language-server"); zlog::trace!(logger => "formatting"); let _timer = zlog::time!(logger => "Formatting buffer using language server"); @@ -1419,16 +1461,19 @@ impl LocalLspStore { continue; }; - let language_server = if let Some(name) = name.as_deref() { - adapters_and_servers.iter().find_map(|(adapter, server)| { - if adapter.name.0.as_ref() == name { - Some(server.clone()) - } else { - None - } - }) - } else { - adapters_and_servers.first().map(|e| e.1.clone()) + let language_server = match specifier { + settings::LanguageServerFormatterSpecifier::Specific { name } => { + adapters_and_servers.iter().find_map(|(adapter, server)| { + if adapter.name.0.as_ref() == name { + Some(server.clone()) + } else { + None + } + }) + } + settings::LanguageServerFormatterSpecifier::Current => { + adapters_and_servers.first().map(|e| e.1.clone()) + } }; let Some(language_server) = language_server else { @@ -2442,7 +2487,7 @@ impl LocalLspStore { uri.clone(), adapter.language_id(&language.name()), 0, - initial_snapshot.text(), + initial_snapshot.text_with_original_line_endings(), ); vec![snapshot] @@ -2990,9 +3035,8 @@ impl LocalLspStore { Some(buffer_to_edit.read(cx).saved_version().clone()) }; - let most_recent_edit = version.and_then(|version| { - version.iter().max_by_key(|timestamp| timestamp.value) - }); + let most_recent_edit = + version.and_then(|version| version.most_recent()); // Check if the edit that triggered that edit has been made by this participant. if let Some(most_recent_edit) = most_recent_edit { @@ -3464,9 +3508,55 @@ pub struct LspStore { diagnostic_summaries: HashMap, HashMap>>, pub lsp_server_capabilities: HashMap, - lsp_document_colors: HashMap, - lsp_code_lens: HashMap, - running_lsp_requests: HashMap>)>, + lsp_data: HashMap, + next_hint_id: Arc, +} + +#[derive(Debug)] +pub struct BufferLspData { + buffer_version: Global, + document_colors: Option, + code_lens: Option, + inlay_hints: BufferInlayHints, + lsp_requests: HashMap>>, + chunk_lsp_requests: HashMap>, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +struct LspKey { + request_type: TypeId, + server_queried: Option, +} + +impl BufferLspData { + fn new(buffer: &Entity, cx: &mut App) -> Self { + Self { + buffer_version: buffer.read(cx).version(), + document_colors: None, + code_lens: None, + inlay_hints: BufferInlayHints::new(buffer, cx), + lsp_requests: HashMap::default(), + chunk_lsp_requests: HashMap::default(), + } + } + + fn remove_server_data(&mut self, for_server: LanguageServerId) { + if let Some(document_colors) = &mut self.document_colors { + document_colors.colors.remove(&for_server); + document_colors.cache_version += 1; + } + + if let Some(code_lens) = &mut self.code_lens { + code_lens.lens.remove(&for_server); + } + + self.inlay_hints.remove_server_data(for_server); + } + + #[cfg(any(test, feature = "test-support"))] + pub fn inlay_hints(&self) -> &BufferInlayHints { + &self.inlay_hints + } } #[derive(Debug, Default, Clone)] @@ -3480,7 +3570,6 @@ type CodeLensTask = Shared>, Arc #[derive(Debug, Default)] struct DocumentColorData { - colors_for_version: Global, colors: HashMap>, cache_version: usize, colors_update: Option<(Global, DocumentColorTask)>, @@ -3488,17 +3577,10 @@ struct DocumentColorData { #[derive(Debug, Default)] struct CodeLensData { - lens_for_version: Global, lens: HashMap>, update: Option<(Global, CodeLensTask)>, } -#[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum LspFetchStrategy { - IgnoreCache, - UseCache { known_cache_version: Option }, -} - #[derive(Debug)] pub enum LspStoreEvent { LanguageServerAdded(LanguageServerId, LanguageServerName, Option), @@ -3515,7 +3597,7 @@ pub enum LspStoreEvent { new_language: Option>, }, Notification(String), - RefreshInlayHints, + RefreshInlayHints(LanguageServerId), RefreshCodeLens, DiagnosticsUpdated { server_id: LanguageServerId, @@ -3587,7 +3669,6 @@ impl LspStore { client.add_entity_request_handler(Self::handle_apply_code_action_kind); client.add_entity_request_handler(Self::handle_resolve_completion_documentation); client.add_entity_request_handler(Self::handle_apply_code_action); - client.add_entity_request_handler(Self::handle_inlay_hints); client.add_entity_request_handler(Self::handle_get_project_symbols); client.add_entity_request_handler(Self::handle_resolve_inlay_hint); client.add_entity_request_handler(Self::handle_get_color_presentation); @@ -3737,9 +3818,8 @@ impl LspStore { nonce: StdRng::from_os_rng().random(), diagnostic_summaries: HashMap::default(), lsp_server_capabilities: HashMap::default(), - lsp_document_colors: HashMap::default(), - lsp_code_lens: HashMap::default(), - running_lsp_requests: HashMap::default(), + lsp_data: HashMap::default(), + next_hint_id: Arc::default(), active_entry: None, _maintain_workspace_config, _maintain_buffer_languages: Self::maintain_buffer_languages(languages, cx), @@ -3798,9 +3878,8 @@ impl LspStore { nonce: StdRng::from_os_rng().random(), diagnostic_summaries: HashMap::default(), lsp_server_capabilities: HashMap::default(), - lsp_document_colors: HashMap::default(), - lsp_code_lens: HashMap::default(), - running_lsp_requests: HashMap::default(), + next_hint_id: Arc::default(), + lsp_data: HashMap::default(), active_entry: None, _maintain_workspace_config, @@ -3997,8 +4076,7 @@ impl LspStore { *refcount }; if refcount == 0 { - lsp_store.lsp_document_colors.remove(&buffer_id); - lsp_store.lsp_code_lens.remove(&buffer_id); + lsp_store.lsp_data.remove(&buffer_id); let local = lsp_store.as_local_mut().unwrap(); local.registered_buffers.remove(&buffer_id); local.buffers_opened_in_servers.remove(&buffer_id); @@ -4265,7 +4343,7 @@ impl LspStore { &self, buffer: &Entity, request: &R, - cx: &Context, + cx: &App, ) -> bool where R: LspCommand, @@ -4286,7 +4364,7 @@ impl LspStore { &self, buffer: &Entity, check: F, - cx: &Context, + cx: &App, ) -> bool where F: Fn(&lsp::ServerCapabilities) -> bool, @@ -4772,7 +4850,65 @@ impl LspStore { } } - pub fn resolve_inlay_hint( + pub fn resolved_hint( + &mut self, + buffer_id: BufferId, + id: InlayId, + cx: &mut Context, + ) -> Option { + let buffer = self.buffer_store.read(cx).get(buffer_id)?; + + let lsp_data = self.lsp_data.get_mut(&buffer_id)?; + let buffer_lsp_hints = &mut lsp_data.inlay_hints; + let hint = buffer_lsp_hints.hint_for_id(id)?.clone(); + let (server_id, resolve_data) = match &hint.resolve_state { + ResolveState::Resolved => return Some(ResolvedHint::Resolved(hint)), + ResolveState::Resolving => { + return Some(ResolvedHint::Resolving( + buffer_lsp_hints.hint_resolves.get(&id)?.clone(), + )); + } + ResolveState::CanResolve(server_id, resolve_data) => (*server_id, resolve_data.clone()), + }; + + let resolve_task = self.resolve_inlay_hint(hint, buffer, server_id, cx); + let buffer_lsp_hints = &mut self.lsp_data.get_mut(&buffer_id)?.inlay_hints; + let previous_task = buffer_lsp_hints.hint_resolves.insert( + id, + cx.spawn(async move |lsp_store, cx| { + let resolved_hint = resolve_task.await; + lsp_store + .update(cx, |lsp_store, _| { + if let Some(old_inlay_hint) = lsp_store + .lsp_data + .get_mut(&buffer_id) + .and_then(|buffer_lsp_data| buffer_lsp_data.inlay_hints.hint_for_id(id)) + { + match resolved_hint { + Ok(resolved_hint) => { + *old_inlay_hint = resolved_hint; + } + Err(e) => { + old_inlay_hint.resolve_state = + ResolveState::CanResolve(server_id, resolve_data); + log::error!("Inlay hint resolve failed: {e:#}"); + } + } + } + }) + .ok(); + }) + .shared(), + ); + debug_assert!( + previous_task.is_none(), + "Did not change hint's resolve state after spawning its resolve" + ); + buffer_lsp_hints.hint_for_id(id)?.resolve_state = ResolveState::Resolving; + None + } + + fn resolve_inlay_hint( &self, mut hint: InlayHint, buffer: Entity, @@ -5121,6 +5257,7 @@ impl LspStore { } let request_task = upstream_client.request_lsp( project_id, + None, LSP_REQUEST_TIMEOUT, cx.background_executor().clone(), request.to_proto(project_id, buffer.read(cx)), @@ -5186,6 +5323,7 @@ impl LspStore { } let request_task = upstream_client.request_lsp( project_id, + None, LSP_REQUEST_TIMEOUT, cx.background_executor().clone(), request.to_proto(project_id, buffer.read(cx)), @@ -5251,6 +5389,7 @@ impl LspStore { } let request_task = upstream_client.request_lsp( project_id, + None, LSP_REQUEST_TIMEOUT, cx.background_executor().clone(), request.to_proto(project_id, buffer.read(cx)), @@ -5316,6 +5455,7 @@ impl LspStore { } let request_task = upstream_client.request_lsp( project_id, + None, LSP_REQUEST_TIMEOUT, cx.background_executor().clone(), request.to_proto(project_id, buffer.read(cx)), @@ -5382,6 +5522,7 @@ impl LspStore { let request_task = upstream_client.request_lsp( project_id, + None, LSP_REQUEST_TIMEOUT, cx.background_executor().clone(), request.to_proto(project_id, buffer.read(cx)), @@ -5449,6 +5590,7 @@ impl LspStore { } let request_task = upstream_client.request_lsp( project_id, + None, LSP_REQUEST_TIMEOUT, cx.background_executor().clone(), request.to_proto(project_id, buffer.read(cx)), @@ -5510,32 +5652,38 @@ impl LspStore { ) -> CodeLensTask { let version_queried_for = buffer.read(cx).version(); let buffer_id = buffer.read(cx).remote_id(); + let existing_servers = self.as_local().map(|local| { + local + .buffers_opened_in_servers + .get(&buffer_id) + .cloned() + .unwrap_or_default() + }); - if let Some(cached_data) = self.lsp_code_lens.get(&buffer_id) - && !version_queried_for.changed_since(&cached_data.lens_for_version) - { - let has_different_servers = self.as_local().is_some_and(|local| { - local - .buffers_opened_in_servers - .get(&buffer_id) - .cloned() - .unwrap_or_default() - != cached_data.lens.keys().copied().collect() - }); - if !has_different_servers { - return Task::ready(Ok(Some( - cached_data.lens.values().flatten().cloned().collect(), - ))) - .shared(); + if let Some(lsp_data) = self.current_lsp_data(buffer_id) { + if let Some(cached_lens) = &lsp_data.code_lens { + if !version_queried_for.changed_since(&lsp_data.buffer_version) { + let has_different_servers = existing_servers.is_some_and(|existing_servers| { + existing_servers != cached_lens.lens.keys().copied().collect() + }); + if !has_different_servers { + return Task::ready(Ok(Some( + cached_lens.lens.values().flatten().cloned().collect(), + ))) + .shared(); + } + } else if let Some((updating_for, running_update)) = cached_lens.update.as_ref() { + if !version_queried_for.changed_since(updating_for) { + return running_update.clone(); + } + } } } - let lsp_data = self.lsp_code_lens.entry(buffer_id).or_default(); - if let Some((updating_for, running_update)) = &lsp_data.update - && !version_queried_for.changed_since(updating_for) - { - return running_update.clone(); - } + let lens_lsp_data = self + .latest_lsp_data(buffer, cx) + .code_lens + .get_or_insert_default(); let buffer = buffer.clone(); let query_version_queried_for = version_queried_for.clone(); let new_task = cx @@ -5554,7 +5702,13 @@ impl LspStore { Err(e) => { lsp_store .update(cx, |lsp_store, _| { - lsp_store.lsp_code_lens.entry(buffer_id).or_default().update = None; + if let Some(lens_lsp_data) = lsp_store + .lsp_data + .get_mut(&buffer_id) + .and_then(|lsp_data| lsp_data.code_lens.as_mut()) + { + lens_lsp_data.update = None; + } }) .ok(); return Err(e); @@ -5563,25 +5717,26 @@ impl LspStore { lsp_store .update(cx, |lsp_store, _| { - let lsp_data = lsp_store.lsp_code_lens.entry(buffer_id).or_default(); + let lsp_data = lsp_store.current_lsp_data(buffer_id)?; + let code_lens = lsp_data.code_lens.as_mut()?; if let Some(fetched_lens) = fetched_lens { - if lsp_data.lens_for_version == query_version_queried_for { - lsp_data.lens.extend(fetched_lens); + if lsp_data.buffer_version == query_version_queried_for { + code_lens.lens.extend(fetched_lens); } else if !lsp_data - .lens_for_version + .buffer_version .changed_since(&query_version_queried_for) { - lsp_data.lens_for_version = query_version_queried_for; - lsp_data.lens = fetched_lens; + lsp_data.buffer_version = query_version_queried_for; + code_lens.lens = fetched_lens; } } - lsp_data.update = None; - Some(lsp_data.lens.values().flatten().cloned().collect()) + code_lens.update = None; + Some(code_lens.lens.values().flatten().cloned().collect()) }) .map_err(Arc::new) }) .shared(); - lsp_data.update = Some((version_queried_for, new_task.clone())); + lens_lsp_data.update = Some((version_queried_for, new_task.clone())); new_task } @@ -5597,6 +5752,7 @@ impl LspStore { } let request_task = upstream_client.request_lsp( project_id, + None, LSP_REQUEST_TIMEOUT, cx.background_executor().clone(), request.to_proto(project_id, buffer.read(cx)), @@ -6299,6 +6455,7 @@ impl LspStore { } let request_task = client.request_lsp( upstream_project_id, + None, LSP_REQUEST_TIMEOUT, cx.background_executor().clone(), request.to_proto(upstream_project_id, buffer.read(cx)), @@ -6341,58 +6498,305 @@ impl LspStore { } } + pub fn applicable_inlay_chunks( + &mut self, + buffer: &Entity, + ranges: &[Range], + cx: &mut Context, + ) -> Vec> { + self.latest_lsp_data(buffer, cx) + .inlay_hints + .applicable_chunks(ranges) + .map(|chunk| chunk.start..chunk.end) + .collect() + } + + pub fn invalidate_inlay_hints<'a>( + &'a mut self, + for_buffers: impl IntoIterator + 'a, + ) { + for buffer_id in for_buffers { + if let Some(lsp_data) = self.lsp_data.get_mut(buffer_id) { + lsp_data.inlay_hints.clear(); + } + } + } + pub fn inlay_hints( &mut self, + invalidate: InvalidationStrategy, buffer: Entity, - range: Range, + ranges: Vec>, + known_chunks: Option<(clock::Global, HashSet>)>, cx: &mut Context, - ) -> Task>> { - let range_start = range.start; - let range_end = range.end; - let buffer_id = buffer.read(cx).remote_id().into(); - let request = InlayHints { range }; + ) -> HashMap, Task>> { + let buffer_snapshot = buffer.read(cx).snapshot(); + let for_server = if let InvalidationStrategy::RefreshRequested(server_id) = invalidate { + Some(server_id) + } else { + None + }; + let invalidate_cache = invalidate.should_invalidate(); + let next_hint_id = self.next_hint_id.clone(); + let lsp_data = self.latest_lsp_data(&buffer, cx); + let existing_inlay_hints = &mut lsp_data.inlay_hints; + let known_chunks = known_chunks + .filter(|(known_version, _)| !lsp_data.buffer_version.changed_since(known_version)) + .map(|(_, known_chunks)| known_chunks) + .unwrap_or_default(); - if let Some((client, project_id)) = self.upstream_client() { - if !self.is_capable_for_proto_request(&buffer, &request, cx) { - return Task::ready(Ok(Vec::new())); + let mut hint_fetch_tasks = Vec::new(); + let mut cached_inlay_hints = HashMap::default(); + let mut ranges_to_query = Vec::new(); + let applicable_chunks = existing_inlay_hints + .applicable_chunks(ranges.as_slice()) + .filter(|chunk| !known_chunks.contains(&(chunk.start..chunk.end))) + .collect::>(); + if applicable_chunks.is_empty() { + return HashMap::default(); + } + + let last_chunk_number = applicable_chunks.len() - 1; + + for (i, row_chunk) in applicable_chunks.into_iter().enumerate() { + match ( + existing_inlay_hints + .cached_hints(&row_chunk) + .filter(|_| !invalidate_cache) + .cloned(), + existing_inlay_hints + .fetched_hints(&row_chunk) + .as_ref() + .filter(|_| !invalidate_cache) + .cloned(), + ) { + (None, None) => { + let end = if last_chunk_number == i { + Point::new(row_chunk.end, buffer_snapshot.line_len(row_chunk.end)) + } else { + Point::new(row_chunk.end, 0) + }; + ranges_to_query.push(( + row_chunk, + buffer_snapshot.anchor_before(Point::new(row_chunk.start, 0)) + ..buffer_snapshot.anchor_after(end), + )); + } + (None, Some(fetched_hints)) => { + hint_fetch_tasks.push((row_chunk, fetched_hints.clone())) + } + (Some(cached_hints), None) => { + for (server_id, cached_hints) in cached_hints { + if for_server.is_none_or(|for_server| for_server == server_id) { + cached_inlay_hints + .entry(row_chunk.start..row_chunk.end) + .or_insert_with(HashMap::default) + .entry(server_id) + .or_insert_with(Vec::new) + .extend(cached_hints); + } + } + } + (Some(cached_hints), Some(fetched_hints)) => { + hint_fetch_tasks.push((row_chunk, fetched_hints.clone())); + for (server_id, cached_hints) in cached_hints { + if for_server.is_none_or(|for_server| for_server == server_id) { + cached_inlay_hints + .entry(row_chunk.start..row_chunk.end) + .or_insert_with(HashMap::default) + .entry(server_id) + .or_insert_with(Vec::new) + .extend(cached_hints); + } + } + } } - let proto_request = proto::InlayHints { - project_id, - buffer_id, - start: Some(serialize_anchor(&range_start)), - end: Some(serialize_anchor(&range_end)), - version: serialize_version(&buffer.read(cx).version()), - }; - cx.spawn(async move |project, cx| { - let response = client - .request(proto_request) - .await - .context("inlay hints proto request")?; - LspCommand::response_from_proto( - request, - response, - project.upgrade().context("No project")?, - buffer.clone(), - cx.clone(), + } + + let cached_chunk_data = cached_inlay_hints + .into_iter() + .map(|(row_chunk, hints)| (row_chunk, Task::ready(Ok(hints)))) + .collect(); + if hint_fetch_tasks.is_empty() && ranges_to_query.is_empty() { + cached_chunk_data + } else { + if invalidate_cache { + lsp_data.inlay_hints.clear(); + } + + for (chunk, range_to_query) in ranges_to_query { + let next_hint_id = next_hint_id.clone(); + let buffer = buffer.clone(); + let new_inlay_hints = cx + .spawn(async move |lsp_store, cx| { + let new_fetch_task = lsp_store.update(cx, |lsp_store, cx| { + lsp_store.fetch_inlay_hints(for_server, &buffer, range_to_query, cx) + })?; + new_fetch_task + .await + .and_then(|new_hints_by_server| { + lsp_store.update(cx, |lsp_store, cx| { + let lsp_data = lsp_store.latest_lsp_data(&buffer, cx); + let update_cache = !lsp_data + .buffer_version + .changed_since(&buffer.read(cx).version()); + new_hints_by_server + .into_iter() + .map(|(server_id, new_hints)| { + let new_hints = new_hints + .into_iter() + .map(|new_hint| { + ( + InlayId::Hint(next_hint_id.fetch_add( + 1, + atomic::Ordering::AcqRel, + )), + new_hint, + ) + }) + .collect::>(); + if update_cache { + lsp_data.inlay_hints.insert_new_hints( + chunk, + server_id, + new_hints.clone(), + ); + } + (server_id, new_hints) + }) + .collect() + }) + }) + .map_err(Arc::new) + }) + .shared(); + + let fetch_task = lsp_data.inlay_hints.fetched_hints(&chunk); + *fetch_task = Some(new_inlay_hints.clone()); + hint_fetch_tasks.push((chunk, new_inlay_hints)); + } + + let mut combined_data = cached_chunk_data; + combined_data.extend(hint_fetch_tasks.into_iter().map(|(chunk, hints_fetch)| { + ( + chunk.start..chunk.end, + cx.spawn(async move |_, _| { + hints_fetch.await.map_err(|e| { + if e.error_code() != ErrorCode::Internal { + anyhow!(e.error_code()) + } else { + anyhow!("{e:#}") + } + }) + }), ) - .await - .context("inlay hints proto response conversion") + })); + combined_data + } + } + + fn fetch_inlay_hints( + &mut self, + for_server: Option, + buffer: &Entity, + range: Range, + cx: &mut Context, + ) -> Task>>> { + let request = InlayHints { + range: range.clone(), + }; + if let Some((upstream_client, project_id)) = self.upstream_client() { + if !self.is_capable_for_proto_request(buffer, &request, cx) { + return Task::ready(Ok(HashMap::default())); + } + let request_task = upstream_client.request_lsp( + project_id, + for_server.map(|id| id.to_proto()), + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(project_id, buffer.read(cx)), + ); + let buffer = buffer.clone(); + cx.spawn(async move |weak_lsp_store, cx| { + let Some(lsp_store) = weak_lsp_store.upgrade() else { + return Ok(HashMap::default()); + }; + let Some(responses) = request_task.await? else { + return Ok(HashMap::default()); + }; + + let inlay_hints = join_all(responses.payload.into_iter().map(|response| { + let lsp_store = lsp_store.clone(); + let buffer = buffer.clone(); + let cx = cx.clone(); + let request = request.clone(); + async move { + ( + LanguageServerId::from_proto(response.server_id), + request + .response_from_proto(response.response, lsp_store, buffer, cx) + .await, + ) + } + })) + .await; + + let mut has_errors = false; + let inlay_hints = inlay_hints + .into_iter() + .filter_map(|(server_id, inlay_hints)| match inlay_hints { + Ok(inlay_hints) => Some((server_id, inlay_hints)), + Err(e) => { + has_errors = true; + log::error!("{e:#}"); + None + } + }) + .collect::>(); + anyhow::ensure!( + !has_errors || !inlay_hints.is_empty(), + "Failed to fetch inlay hints" + ); + Ok(inlay_hints) }) } else { - let lsp_request_task = self.request_lsp( - buffer.clone(), - LanguageServerToQuery::FirstCapable, - request, - cx, - ); - cx.spawn(async move |_, cx| { - buffer - .update(cx, |buffer, _| { - buffer.wait_for_edits(vec![range_start.timestamp, range_end.timestamp]) - })? + let inlay_hints_task = match for_server { + Some(server_id) => { + let server_task = self.request_lsp( + buffer.clone(), + LanguageServerToQuery::Other(server_id), + request, + cx, + ); + cx.background_spawn(async move { + let mut responses = Vec::new(); + match server_task.await { + Ok(response) => responses.push((server_id, response)), + Err(e) => log::error!( + "Error handling response for inlay hints request: {e:#}" + ), + } + responses + }) + } + None => self.request_multiple_lsp_locally(buffer, None::, request, cx), + }; + let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); + cx.background_spawn(async move { + Ok(inlay_hints_task .await - .context("waiting for inlay hint request range edits")?; - lsp_request_task.await.context("inlay hints LSP request") + .into_iter() + .map(|(server_id, mut new_hints)| { + new_hints.retain(|hint| { + hint.position.is_valid(&buffer_snapshot) + && range.start.is_valid(&buffer_snapshot) + && range.end.is_valid(&buffer_snapshot) + && hint.position.cmp(&range.start, &buffer_snapshot).is_ge() + && hint.position.cmp(&range.end, &buffer_snapshot).is_le() + }); + (server_id, new_hints) + }) + .collect()) }) } } @@ -6496,42 +6900,43 @@ impl LspStore { pub fn document_colors( &mut self, - fetch_strategy: LspFetchStrategy, + known_cache_version: Option, buffer: Entity, cx: &mut Context, ) -> Option { let version_queried_for = buffer.read(cx).version(); let buffer_id = buffer.read(cx).remote_id(); - match fetch_strategy { - LspFetchStrategy::IgnoreCache => {} - LspFetchStrategy::UseCache { - known_cache_version, - } => { - if let Some(cached_data) = self.lsp_document_colors.get(&buffer_id) - && !version_queried_for.changed_since(&cached_data.colors_for_version) - { - let has_different_servers = self.as_local().is_some_and(|local| { - local - .buffers_opened_in_servers - .get(&buffer_id) - .cloned() - .unwrap_or_default() - != cached_data.colors.keys().copied().collect() - }); + let current_language_servers = self.as_local().map(|local| { + local + .buffers_opened_in_servers + .get(&buffer_id) + .cloned() + .unwrap_or_default() + }); + + if let Some(lsp_data) = self.current_lsp_data(buffer_id) { + if let Some(cached_colors) = &lsp_data.document_colors { + if !version_queried_for.changed_since(&lsp_data.buffer_version) { + let has_different_servers = + current_language_servers.is_some_and(|current_language_servers| { + current_language_servers + != cached_colors.colors.keys().copied().collect() + }); if !has_different_servers { - if Some(cached_data.cache_version) == known_cache_version { + let cache_version = cached_colors.cache_version; + if Some(cache_version) == known_cache_version { return None; } else { return Some( Task::ready(Ok(DocumentColors { - colors: cached_data + colors: cached_colors .colors .values() .flatten() .cloned() .collect(), - cache_version: Some(cached_data.cache_version), + cache_version: Some(cache_version), })) .shared(), ); @@ -6541,13 +6946,16 @@ impl LspStore { } } - let lsp_data = self.lsp_document_colors.entry(buffer_id).or_default(); - if let Some((updating_for, running_update)) = &lsp_data.colors_update + let color_lsp_data = self + .latest_lsp_data(&buffer, cx) + .document_colors + .get_or_insert_default(); + if let Some((updating_for, running_update)) = &color_lsp_data.colors_update && !version_queried_for.changed_since(updating_for) { return Some(running_update.clone()); } - let query_version_queried_for = version_queried_for.clone(); + let buffer_version_queried_for = version_queried_for.clone(); let new_task = cx .spawn(async move |lsp_store, cx| { cx.background_executor() @@ -6562,13 +6970,12 @@ impl LspStore { .map_err(Arc::new); let fetched_colors = match fetched_colors { Ok(fetched_colors) => { - if fetch_strategy != LspFetchStrategy::IgnoreCache - && Some(true) - == buffer - .update(cx, |buffer, _| { - buffer.version() != query_version_queried_for - }) - .ok() + if Some(true) + == buffer + .update(cx, |buffer, _| { + buffer.version() != buffer_version_queried_for + }) + .ok() { return Ok(DocumentColors::default()); } @@ -6577,11 +6984,11 @@ impl LspStore { Err(e) => { lsp_store .update(cx, |lsp_store, _| { - lsp_store - .lsp_document_colors - .entry(buffer_id) - .or_default() - .colors_update = None; + if let Some(lsp_data) = lsp_store.lsp_data.get_mut(&buffer_id) { + if let Some(document_colors) = &mut lsp_data.document_colors { + document_colors.colors_update = None; + } + } }) .ok(); return Err(e); @@ -6589,24 +6996,25 @@ impl LspStore { }; lsp_store - .update(cx, |lsp_store, _| { - let lsp_data = lsp_store.lsp_document_colors.entry(buffer_id).or_default(); + .update(cx, |lsp_store, cx| { + let lsp_data = lsp_store.latest_lsp_data(&buffer, cx); + let lsp_colors = lsp_data.document_colors.get_or_insert_default(); if let Some(fetched_colors) = fetched_colors { - if lsp_data.colors_for_version == query_version_queried_for { - lsp_data.colors.extend(fetched_colors); - lsp_data.cache_version += 1; + if lsp_data.buffer_version == buffer_version_queried_for { + lsp_colors.colors.extend(fetched_colors); + lsp_colors.cache_version += 1; } else if !lsp_data - .colors_for_version - .changed_since(&query_version_queried_for) + .buffer_version + .changed_since(&buffer_version_queried_for) { - lsp_data.colors_for_version = query_version_queried_for; - lsp_data.colors = fetched_colors; - lsp_data.cache_version += 1; + lsp_data.buffer_version = buffer_version_queried_for; + lsp_colors.colors = fetched_colors; + lsp_colors.cache_version += 1; } } - lsp_data.colors_update = None; - let colors = lsp_data + lsp_colors.colors_update = None; + let colors = lsp_colors .colors .values() .flatten() @@ -6614,13 +7022,13 @@ impl LspStore { .collect::>(); DocumentColors { colors, - cache_version: Some(lsp_data.cache_version), + cache_version: Some(lsp_colors.cache_version), } }) .map_err(Arc::new) }) .shared(); - lsp_data.colors_update = Some((version_queried_for, new_task.clone())); + color_lsp_data.colors_update = Some((version_queried_for, new_task.clone())); Some(new_task) } @@ -6637,6 +7045,7 @@ impl LspStore { let request_task = client.request_lsp( project_id, + None, LSP_REQUEST_TIMEOUT, cx.background_executor().clone(), request.to_proto(project_id, buffer.read(cx)), @@ -6715,6 +7124,7 @@ impl LspStore { } let request_task = client.request_lsp( upstream_project_id, + None, LSP_REQUEST_TIMEOUT, cx.background_executor().clone(), request.to_proto(upstream_project_id, buffer.read(cx)), @@ -6778,6 +7188,7 @@ impl LspStore { } let request_task = client.request_lsp( upstream_project_id, + None, LSP_REQUEST_TIMEOUT, cx.background_executor().clone(), request.to_proto(upstream_project_id, buffer.read(cx)), @@ -7111,6 +7522,7 @@ impl LspStore { let previous_snapshot = buffer_snapshots.last()?; let build_incremental_change = || { + let line_ending = next_snapshot.line_ending(); buffer .edits_since::>( previous_snapshot.snapshot.version(), @@ -7118,16 +7530,18 @@ impl LspStore { .map(|edit| { let edit_start = edit.new.start.0; let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0); - let new_text = next_snapshot - .text_for_range(edit.new.start.1..edit.new.end.1) - .collect(); lsp::TextDocumentContentChangeEvent { range: Some(lsp::Range::new( point_to_lsp(edit_start), point_to_lsp(edit_end), )), range_length: None, - text: new_text, + // Collect changed text and preserve line endings. + // text_for_range returns chunks with normalized \n, so we need to + // convert to the buffer's actual line ending for LSP. + text: line_ending.into_string( + next_snapshot.text_for_range(edit.new.start.1..edit.new.end.1), + ), } }) .collect() @@ -7147,7 +7561,7 @@ impl LspStore { vec![lsp::TextDocumentContentChangeEvent { range: None, range_length: None, - text: next_snapshot.text(), + text: next_snapshot.text_with_original_line_endings(), }] } Some(lsp::TextDocumentSyncKind::INCREMENTAL) => build_incremental_change(), @@ -7884,8 +8298,9 @@ impl LspStore { cx.background_spawn(async move { let mut responses = Vec::with_capacity(response_results.len()); while let Some((server_id, response_result)) = response_results.next().await { - if let Some(response) = response_result.log_err() { - responses.push((server_id, response)); + match response_result { + Ok(response) => responses.push((server_id, response)), + Err(e) => log::error!("Error handling response for request {request:?}: {e:#}"), } } responses @@ -7943,27 +8358,30 @@ impl LspStore { let sender_id = envelope.original_sender_id().unwrap_or_default(); let lsp_query = envelope.payload; let lsp_request_id = LspRequestId(lsp_query.lsp_request_id); + let server_id = lsp_query.server_id.map(LanguageServerId::from_proto); match lsp_query.request.context("invalid LSP query request")? { Request::GetReferences(get_references) => { let position = get_references.position.clone().and_then(deserialize_anchor); Self::query_lsp_locally::( lsp_store, + server_id, sender_id, lsp_request_id, get_references, position, - cx.clone(), + &mut cx, ) .await?; } Request::GetDocumentColor(get_document_color) => { Self::query_lsp_locally::( lsp_store, + server_id, sender_id, lsp_request_id, get_document_color, None, - cx.clone(), + &mut cx, ) .await?; } @@ -7971,22 +8389,24 @@ impl LspStore { let position = get_hover.position.clone().and_then(deserialize_anchor); Self::query_lsp_locally::( lsp_store, + server_id, sender_id, lsp_request_id, get_hover, position, - cx.clone(), + &mut cx, ) .await?; } Request::GetCodeActions(get_code_actions) => { Self::query_lsp_locally::( lsp_store, + server_id, sender_id, lsp_request_id, get_code_actions, None, - cx.clone(), + &mut cx, ) .await?; } @@ -7997,22 +8417,24 @@ impl LspStore { .and_then(deserialize_anchor); Self::query_lsp_locally::( lsp_store, + server_id, sender_id, lsp_request_id, get_signature_help, position, - cx.clone(), + &mut cx, ) .await?; } Request::GetCodeLens(get_code_lens) => { Self::query_lsp_locally::( lsp_store, + server_id, sender_id, lsp_request_id, get_code_lens, None, - cx.clone(), + &mut cx, ) .await?; } @@ -8020,11 +8442,12 @@ impl LspStore { let position = get_definition.position.clone().and_then(deserialize_anchor); Self::query_lsp_locally::( lsp_store, + server_id, sender_id, lsp_request_id, get_definition, position, - cx.clone(), + &mut cx, ) .await?; } @@ -8035,11 +8458,12 @@ impl LspStore { .and_then(deserialize_anchor); Self::query_lsp_locally::( lsp_store, + server_id, sender_id, lsp_request_id, get_declaration, position, - cx.clone(), + &mut cx, ) .await?; } @@ -8050,11 +8474,12 @@ impl LspStore { .and_then(deserialize_anchor); Self::query_lsp_locally::( lsp_store, + server_id, sender_id, lsp_request_id, get_type_definition, position, - cx.clone(), + &mut cx, ) .await?; } @@ -8065,15 +8490,15 @@ impl LspStore { .and_then(deserialize_anchor); Self::query_lsp_locally::( lsp_store, + server_id, sender_id, lsp_request_id, get_implementation, position, - cx.clone(), + &mut cx, ) .await?; } - // Diagnostics pull synchronizes internally via the buffer state, and cannot be handled generically as the other requests. Request::GetDocumentDiagnostics(get_document_diagnostics) => { let buffer_id = BufferId::new(get_document_diagnostics.buffer_id())?; let version = deserialize_version(get_document_diagnostics.buffer_version()); @@ -8086,16 +8511,20 @@ impl LspStore { })? .await?; lsp_store.update(&mut cx, |lsp_store, cx| { - let existing_queries = lsp_store - .running_lsp_requests - .entry(TypeId::of::()) - .or_default(); + let lsp_data = lsp_store.latest_lsp_data(&buffer, cx); + let key = LspKey { + request_type: TypeId::of::(), + server_queried: server_id, + }; if ::ProtoRequest::stop_previous_requests( - ) || buffer.read(cx).version.changed_since(&existing_queries.0) - { - existing_queries.1.clear(); + ) { + if let Some(lsp_requests) = lsp_data.lsp_requests.get_mut(&key) { + lsp_requests.clear(); + }; } - existing_queries.1.insert( + + let existing_queries = lsp_data.lsp_requests.entry(key).or_default(); + existing_queries.insert( lsp_request_id, cx.spawn(async move |lsp_store, cx| { let diagnostics_pull = lsp_store @@ -8113,6 +8542,39 @@ impl LspStore { ); })?; } + Request::InlayHints(inlay_hints) => { + let query_start = inlay_hints + .start + .clone() + .and_then(deserialize_anchor) + .context("invalid inlay hints range start")?; + let query_end = inlay_hints + .end + .clone() + .and_then(deserialize_anchor) + .context("invalid inlay hints range end")?; + Self::deduplicate_range_based_lsp_requests::( + &lsp_store, + server_id, + lsp_request_id, + &inlay_hints, + query_start..query_end, + &mut cx, + ) + .await + .context("preparing inlay hints request")?; + Self::query_lsp_locally::( + lsp_store, + server_id, + sender_id, + lsp_request_id, + inlay_hints, + None, + &mut cx, + ) + .await + .context("querying for inlay hints")? + } } Ok(proto::Ack {}) } @@ -9028,7 +9490,7 @@ impl LspStore { if let Some(work) = status.pending_work.remove(&token) && !work.is_disk_based_diagnostics_progress { - cx.emit(LspStoreEvent::RefreshInlayHints); + cx.emit(LspStoreEvent::RefreshInlayHints(language_server_id)); } cx.notify(); } @@ -9160,12 +9622,14 @@ impl LspStore { } async fn handle_refresh_inlay_hints( - this: Entity, - _: TypedEnvelope, + lsp_store: Entity, + envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - this.update(&mut cx, |_, cx| { - cx.emit(LspStoreEvent::RefreshInlayHints); + lsp_store.update(&mut cx, |_, cx| { + cx.emit(LspStoreEvent::RefreshInlayHints( + LanguageServerId::from_proto(envelope.payload.server_id), + )); })?; Ok(proto::Ack {}) } @@ -9182,51 +9646,6 @@ impl LspStore { Ok(proto::Ack {}) } - async fn handle_inlay_hints( - this: Entity, - envelope: TypedEnvelope, - mut cx: AsyncApp, - ) -> Result { - let sender_id = envelope.original_sender_id().unwrap_or_default(); - let buffer_id = BufferId::new(envelope.payload.buffer_id)?; - let buffer = this.update(&mut cx, |this, cx| { - this.buffer_store.read(cx).get_existing(buffer_id) - })??; - buffer - .update(&mut cx, |buffer, _| { - buffer.wait_for_version(deserialize_version(&envelope.payload.version)) - })? - .await - .with_context(|| format!("waiting for version for buffer {}", buffer.entity_id()))?; - - let start = envelope - .payload - .start - .and_then(deserialize_anchor) - .context("missing range start")?; - let end = envelope - .payload - .end - .and_then(deserialize_anchor) - .context("missing range end")?; - let buffer_hints = this - .update(&mut cx, |lsp_store, cx| { - lsp_store.inlay_hints(buffer.clone(), start..end, cx) - })? - .await - .context("inlay hints fetch")?; - - this.update(&mut cx, |project, cx| { - InlayHints::response_to_proto( - buffer_hints, - project, - sender_id, - &buffer.read(cx).version(), - cx, - ) - }) - } - async fn handle_get_color_presentation( lsp_store: Entity, envelope: TypedEnvelope, @@ -9292,7 +9711,7 @@ impl LspStore { } async fn handle_resolve_inlay_hint( - this: Entity, + lsp_store: Entity, envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { @@ -9302,13 +9721,13 @@ impl LspStore { .expect("incorrect protobuf resolve inlay hint message: missing the inlay hint"); let hint = InlayHints::proto_to_project_hint(proto_hint) .context("resolved proto inlay hint conversion")?; - let buffer = this.update(&mut cx, |this, cx| { + let buffer = lsp_store.update(&mut cx, |lsp_store, cx| { let buffer_id = BufferId::new(envelope.payload.buffer_id)?; - this.buffer_store.read(cx).get_existing(buffer_id) + lsp_store.buffer_store.read(cx).get_existing(buffer_id) })??; - let response_hint = this - .update(&mut cx, |this, cx| { - this.resolve_inlay_hint( + let response_hint = lsp_store + .update(&mut cx, |lsp_store, cx| { + lsp_store.resolve_inlay_hint( hint, buffer, LanguageServerId(envelope.payload.language_server_id as usize), @@ -9363,11 +9782,7 @@ impl LspStore { name: symbol.name, kind: symbol.kind, range: symbol.range, - label: CodeLabel { - text: Default::default(), - runs: Default::default(), - filter_range: Default::default(), - }, + label: CodeLabel::default(), }, cx, ) @@ -9557,11 +9972,7 @@ impl LspStore { new_text: completion.new_text, source: completion.source, documentation: None, - label: CodeLabel { - text: Default::default(), - runs: Default::default(), - filter_range: Default::default(), - }, + label: CodeLabel::default(), insert_text_mode: None, icon_path: None, confirm: None, @@ -10422,7 +10833,7 @@ impl LspStore { language_server.name(), Some(key.worktree_id), )); - cx.emit(LspStoreEvent::RefreshInlayHints); + cx.emit(LspStoreEvent::RefreshInlayHints(server_id)); let server_capabilities = language_server.capabilities(); if let Some((downstream_client, project_id)) = self.downstream_client.as_ref() { @@ -10514,13 +10925,12 @@ impl LspStore { let snapshot = versions.last().unwrap(); let version = snapshot.version; - let initial_snapshot = &snapshot.snapshot; let uri = lsp::Uri::from_file_path(file.abs_path(cx)).unwrap(); language_server.register_buffer( uri, adapter.language_id(&language.name()), version, - initial_snapshot.text(), + buffer_handle.read(cx).text_with_original_line_endings(), ); buffer_paths_registered.push((buffer_id, file.abs_path(cx))); local @@ -11040,12 +11450,8 @@ impl LspStore { fn cleanup_lsp_data(&mut self, for_server: LanguageServerId) { self.lsp_server_capabilities.remove(&for_server); - for buffer_colors in self.lsp_document_colors.values_mut() { - buffer_colors.colors.remove(&for_server); - buffer_colors.cache_version += 1; - } - for buffer_lens in self.lsp_code_lens.values_mut() { - buffer_lens.lens.remove(&for_server); + for lsp_data in self.lsp_data.values_mut() { + lsp_data.remove_server_data(for_server); } if let Some(local) = self.as_local_mut() { local.buffer_pull_diagnostics_result_ids.remove(&for_server); @@ -11444,9 +11850,25 @@ impl LspStore { .map(serde_json::from_value) .transpose()? { + let state = self + .as_local_mut() + .context("Expected LSP Store to be local")? + .language_servers + .get_mut(&server_id) + .context("Could not obtain Language Servers state")?; server.update_capabilities(|capabilities| { capabilities.diagnostic_provider = Some(caps); }); + if let LanguageServerState::Running { + workspace_refresh_task, + .. + } = state + && workspace_refresh_task.is_none() + { + *workspace_refresh_task = + lsp_workspace_diagnostics_refresh(server.clone(), cx) + } + notify_server_capabilities_updated(&server, cx); } } @@ -11610,6 +12032,19 @@ impl LspStore { server.update_capabilities(|capabilities| { capabilities.diagnostic_provider = None; }); + let state = self + .as_local_mut() + .context("Expected LSP Store to be local")? + .language_servers + .get_mut(&server_id) + .context("Could not obtain Language Servers state")?; + if let LanguageServerState::Running { + workspace_refresh_task, + .. + } = state + { + _ = workspace_refresh_task.take(); + } notify_server_capabilities_updated(&server, cx); } "textDocument/documentColor" => { @@ -11625,13 +12060,71 @@ impl LspStore { Ok(()) } + async fn deduplicate_range_based_lsp_requests( + lsp_store: &Entity, + server_id: Option, + lsp_request_id: LspRequestId, + proto_request: &T::ProtoRequest, + range: Range, + cx: &mut AsyncApp, + ) -> Result<()> + where + T: LspCommand, + T::ProtoRequest: proto::LspRequestMessage, + { + let buffer_id = BufferId::new(proto_request.buffer_id())?; + let version = deserialize_version(proto_request.buffer_version()); + let buffer = lsp_store.update(cx, |this, cx| { + this.buffer_store.read(cx).get_existing(buffer_id) + })??; + buffer + .update(cx, |buffer, _| buffer.wait_for_version(version))? + .await?; + lsp_store.update(cx, |lsp_store, cx| { + let lsp_data = lsp_store + .lsp_data + .entry(buffer_id) + .or_insert_with(|| BufferLspData::new(&buffer, cx)); + let chunks_queried_for = lsp_data + .inlay_hints + .applicable_chunks(&[range]) + .collect::>(); + match chunks_queried_for.as_slice() { + &[chunk] => { + let key = LspKey { + request_type: TypeId::of::(), + server_queried: server_id, + }; + let previous_request = lsp_data + .chunk_lsp_requests + .entry(key) + .or_default() + .insert(chunk, lsp_request_id); + if let Some((previous_request, running_requests)) = + previous_request.zip(lsp_data.lsp_requests.get_mut(&key)) + { + running_requests.remove(&previous_request); + } + } + _ambiguous_chunks => { + // Have not found a unique chunk for the query range — be lenient and let the query to be spawned, + // there, a buffer version-based check will be performed and outdated requests discarded. + } + } + anyhow::Ok(()) + })??; + + Ok(()) + } + async fn query_lsp_locally( lsp_store: Entity, + for_server_id: Option, sender_id: proto::PeerId, lsp_request_id: LspRequestId, proto_request: T::ProtoRequest, position: Option, - mut cx: AsyncApp, + cx: &mut AsyncApp, ) -> Result<()> where T: LspCommand + Clone, @@ -11641,30 +12134,48 @@ impl LspStore { { let buffer_id = BufferId::new(proto_request.buffer_id())?; let version = deserialize_version(proto_request.buffer_version()); - let buffer = lsp_store.update(&mut cx, |this, cx| { + let buffer = lsp_store.update(cx, |this, cx| { this.buffer_store.read(cx).get_existing(buffer_id) })??; buffer - .update(&mut cx, |buffer, _| { - buffer.wait_for_version(version.clone()) - })? + .update(cx, |buffer, _| buffer.wait_for_version(version.clone()))? .await?; - let buffer_version = buffer.read_with(&cx, |buffer, _| buffer.version())?; + let buffer_version = buffer.read_with(cx, |buffer, _| buffer.version())?; let request = T::from_proto(proto_request, lsp_store.clone(), buffer.clone(), cx.clone()).await?; - lsp_store.update(&mut cx, |lsp_store, cx| { - let request_task = - lsp_store.request_multiple_lsp_locally(&buffer, position, request, cx); - let existing_queries = lsp_store - .running_lsp_requests - .entry(TypeId::of::()) - .or_default(); - if T::ProtoRequest::stop_previous_requests() - || buffer_version.changed_since(&existing_queries.0) - { - existing_queries.1.clear(); + let key = LspKey { + request_type: TypeId::of::(), + server_queried: for_server_id, + }; + lsp_store.update(cx, |lsp_store, cx| { + let request_task = match for_server_id { + Some(server_id) => { + let server_task = lsp_store.request_lsp( + buffer.clone(), + LanguageServerToQuery::Other(server_id), + request.clone(), + cx, + ); + cx.background_spawn(async move { + let mut responses = Vec::new(); + match server_task.await { + Ok(response) => responses.push((server_id, response)), + Err(e) => log::error!( + "Error handling response for request {request:?}: {e:#}" + ), + } + responses + }) + } + None => lsp_store.request_multiple_lsp_locally(&buffer, position, request, cx), + }; + let lsp_data = lsp_store.latest_lsp_data(&buffer, cx); + if T::ProtoRequest::stop_previous_requests() { + if let Some(lsp_requests) = lsp_data.lsp_requests.get_mut(&key) { + lsp_requests.clear(); + } } - existing_queries.1.insert( + lsp_data.lsp_requests.entry(key).or_default().insert( lsp_request_id, cx.spawn(async move |lsp_store, cx| { let response = request_task.await; @@ -11723,8 +12234,15 @@ impl LspStore { #[cfg(any(test, feature = "test-support"))] pub fn forget_code_lens_task(&mut self, buffer_id: BufferId) -> Option { - let data = self.lsp_code_lens.get_mut(&buffer_id)?; - Some(data.update.take()?.1) + Some( + self.lsp_data + .get_mut(&buffer_id)? + .code_lens + .take()? + .update + .take()? + .1, + ) } pub fn downstream_client(&self) -> Option<(AnyProtoClient, u64)> { @@ -11734,6 +12252,26 @@ impl LspStore { pub fn worktree_store(&self) -> Entity { self.worktree_store.clone() } + + /// Gets what's stored in the LSP data for the given buffer. + pub fn current_lsp_data(&mut self, buffer_id: BufferId) -> Option<&mut BufferLspData> { + self.lsp_data.get_mut(&buffer_id) + } + + /// Gets the most recent LSP data for the given buffer: if the data is absent or out of date, + /// new [`BufferLspData`] will be created to replace the previous state. + pub fn latest_lsp_data(&mut self, buffer: &Entity, cx: &mut App) -> &mut BufferLspData { + let (buffer_id, buffer_version) = + buffer.read_with(cx, |buffer, _| (buffer.remote_id(), buffer.version())); + let lsp_data = self + .lsp_data + .entry(buffer_id) + .or_insert_with(|| BufferLspData::new(buffer, cx)); + if buffer_version.changed_since(&lsp_data.buffer_version) { + *lsp_data = BufferLspData::new(buffer, cx); + } + lsp_data + } } // Registration with registerOptions as null, should fallback to true. @@ -12487,6 +13025,11 @@ impl From for CompletionDocumentation { } } +pub enum ResolvedHint { + Resolved(InlayHint), + Resolving(Shared>), +} + fn glob_literal_prefix(glob: &Path) -> PathBuf { glob.components() .take_while(|component| match component { @@ -12719,22 +13262,17 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate { return Ok(None); } - #[cfg(not(target_os = "windows"))] async fn which(&self, command: &OsStr) -> Option { let mut worktree_abs_path = self.worktree_root_path().to_path_buf(); if self.fs.is_file(&worktree_abs_path).await { worktree_abs_path.pop(); } - let shell_path = self.shell_env().await.get("PATH").cloned(); - which::which_in(command, shell_path.as_ref(), worktree_abs_path).ok() - } - #[cfg(target_os = "windows")] - async fn which(&self, command: &OsStr) -> Option { - // todo(windows) Getting the shell env variables in a current directory on Windows is more complicated than other platforms - // there isn't a 'default shell' necessarily. The closest would be the default profile on the windows terminal - // SEE: https://learn.microsoft.com/en-us/windows/terminal/customize-settings/startup - which::which(command).ok() + let env = self.shell_env().await; + + let shell_path = env.get("PATH").cloned(); + + which::which_in(command, shell_path.as_ref(), worktree_abs_path).ok() } async fn try_exec(&self, command: LanguageServerBinary) -> Result<()> { @@ -13038,19 +13576,19 @@ mod tests { #[test] fn test_multi_len_chars_normalization() { - let mut label = CodeLabel { - text: "myElˇ (parameter) myElˇ: {\n foo: string;\n}".to_string(), - runs: vec![(0..6, HighlightId(1))], - filter_range: 0..6, - }; + let mut label = CodeLabel::new( + "myElˇ (parameter) myElˇ: {\n foo: string;\n}".to_string(), + 0..6, + vec![(0..6, HighlightId(1))], + ); ensure_uniform_list_compatible_label(&mut label); assert_eq!( label, - CodeLabel { - text: "myElˇ (parameter) myElˇ: { foo: string; }".to_string(), - runs: vec![(0..6, HighlightId(1))], - filter_range: 0..6, - } + CodeLabel::new( + "myElˇ (parameter) myElˇ: { foo: string; }".to_string(), + 0..6, + vec![(0..6, HighlightId(1))], + ) ); } } diff --git a/crates/project/src/lsp_store/inlay_hint_cache.rs b/crates/project/src/lsp_store/inlay_hint_cache.rs new file mode 100644 index 0000000000000000000000000000000000000000..0d527b83d2eef03b9473edc2711041c0ebccadb6 --- /dev/null +++ b/crates/project/src/lsp_store/inlay_hint_cache.rs @@ -0,0 +1,221 @@ +use std::{collections::hash_map, ops::Range, sync::Arc}; + +use collections::HashMap; +use futures::future::Shared; +use gpui::{App, Entity, Task}; +use language::{Buffer, BufferRow, BufferSnapshot}; +use lsp::LanguageServerId; +use text::OffsetRangeExt; + +use crate::{InlayHint, InlayId}; + +pub type CacheInlayHints = HashMap>; +pub type CacheInlayHintsTask = Shared>>>; + +/// A logic to apply when querying for new inlay hints and deciding what to do with the old entries in the cache in case of conflicts. +#[derive(Debug, Clone, Copy)] +pub enum InvalidationStrategy { + /// Language servers reset hints via request. + /// Demands to re-query all inlay hints needed and invalidate all cached entries, but does not require instant update with invalidation. + /// + /// Despite nothing forbids language server from sending this request on every edit, it is expected to be sent only when certain internal server state update, invisible for the editor otherwise. + RefreshRequested(LanguageServerId), + /// Multibuffer excerpt(s) and/or singleton buffer(s) were edited at least on one place. + /// Neither editor nor LSP is able to tell which open file hints' are not affected, so all of them have to be invalidated, re-queried and do that fast enough to avoid being slow, but also debounce to avoid loading hints on every fast keystroke sequence. + BufferEdited, + /// A new file got opened/new excerpt was added to a multibuffer/a [multi]buffer was scrolled to a new position. + /// No invalidation should be done at all, all new hints are added to the cache. + /// + /// A special case is the editor toggles and settings change: + /// in addition to LSP capabilities, Zed allows omitting certain hint kinds (defined by the corresponding LSP part: type/parameter/other) and toggling hints. + /// This does not lead to cache invalidation, but would require cache usage for determining which hints are not displayed and issuing an update to inlays on the screen. + None, +} + +impl InvalidationStrategy { + pub fn should_invalidate(&self) -> bool { + matches!( + self, + InvalidationStrategy::RefreshRequested(_) | InvalidationStrategy::BufferEdited + ) + } +} + +pub struct BufferInlayHints { + snapshot: BufferSnapshot, + buffer_chunks: Vec, + hints_by_chunks: Vec>, + fetches_by_chunks: Vec>, + hints_by_id: HashMap, + pub(super) hint_resolves: HashMap>>, +} + +#[derive(Debug, Clone, Copy)] +struct HintForId { + chunk_id: usize, + server_id: LanguageServerId, + position: usize, +} + +/// An range of rows, exclusive as [`lsp::Range`] and +/// +/// denote. +/// +/// Represents an area in a text editor, adjacent to other ones. +/// Together, chunks form entire document at a particular version [`clock::Global`]. +/// Each chunk is queried for inlays as `(start_row, 0)..(end_exclusive, 0)` via +/// +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct BufferChunk { + id: usize, + pub start: BufferRow, + pub end: BufferRow, +} + +impl std::fmt::Debug for BufferInlayHints { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("BufferInlayHints") + .field("buffer_chunks", &self.buffer_chunks) + .field("hints_by_chunks", &self.hints_by_chunks) + .field("fetches_by_chunks", &self.fetches_by_chunks) + .field("hints_by_id", &self.hints_by_id) + .finish_non_exhaustive() + } +} + +const MAX_ROWS_IN_A_CHUNK: u32 = 50; + +impl BufferInlayHints { + pub fn new(buffer: &Entity, cx: &mut App) -> Self { + let buffer = buffer.read(cx); + let snapshot = buffer.snapshot(); + let buffer_point_range = (0..buffer.len()).to_point(&snapshot); + let last_row = buffer_point_range.end.row; + let buffer_chunks = (buffer_point_range.start.row..=last_row) + .step_by(MAX_ROWS_IN_A_CHUNK as usize) + .enumerate() + .map(|(id, chunk_start)| BufferChunk { + id, + start: chunk_start, + end: (chunk_start + MAX_ROWS_IN_A_CHUNK).min(last_row), + }) + .collect::>(); + + Self { + hints_by_chunks: vec![None; buffer_chunks.len()], + fetches_by_chunks: vec![None; buffer_chunks.len()], + hints_by_id: HashMap::default(), + hint_resolves: HashMap::default(), + snapshot, + buffer_chunks, + } + } + + pub fn applicable_chunks( + &self, + ranges: &[Range], + ) -> impl Iterator { + let row_ranges = ranges + .iter() + .map(|range| range.to_point(&self.snapshot)) + .map(|point_range| point_range.start.row..=point_range.end.row) + .collect::>(); + self.buffer_chunks + .iter() + .filter(move |chunk| -> bool { + // Be lenient and yield multiple chunks if they "touch" the exclusive part of the range. + // This will result in LSP hints [re-]queried for more ranges, but also more hints already visible when scrolling around. + let chunk_range = chunk.start..=chunk.end; + row_ranges.iter().any(|row_range| { + chunk_range.contains(&row_range.start()) + || chunk_range.contains(&row_range.end()) + }) + }) + .copied() + } + + pub fn cached_hints(&mut self, chunk: &BufferChunk) -> Option<&CacheInlayHints> { + self.hints_by_chunks[chunk.id].as_ref() + } + + pub fn fetched_hints(&mut self, chunk: &BufferChunk) -> &mut Option { + &mut self.fetches_by_chunks[chunk.id] + } + + #[cfg(any(test, feature = "test-support"))] + pub fn all_cached_hints(&self) -> Vec { + self.hints_by_chunks + .iter() + .filter_map(|hints| hints.as_ref()) + .flat_map(|hints| hints.values().cloned()) + .flatten() + .map(|(_, hint)| hint) + .collect() + } + + #[cfg(any(test, feature = "test-support"))] + pub fn all_fetched_hints(&self) -> Vec { + self.fetches_by_chunks + .iter() + .filter_map(|fetches| fetches.clone()) + .collect() + } + + pub fn remove_server_data(&mut self, for_server: LanguageServerId) { + for (chunk_index, hints) in self.hints_by_chunks.iter_mut().enumerate() { + if let Some(hints) = hints { + if hints.remove(&for_server).is_some() { + self.fetches_by_chunks[chunk_index] = None; + } + } + } + } + + pub fn clear(&mut self) { + self.hints_by_chunks = vec![None; self.buffer_chunks.len()]; + self.fetches_by_chunks = vec![None; self.buffer_chunks.len()]; + self.hints_by_id.clear(); + self.hint_resolves.clear(); + } + + pub fn insert_new_hints( + &mut self, + chunk: BufferChunk, + server_id: LanguageServerId, + new_hints: Vec<(InlayId, InlayHint)>, + ) { + let existing_hints = self.hints_by_chunks[chunk.id] + .get_or_insert_default() + .entry(server_id) + .or_insert_with(Vec::new); + let existing_count = existing_hints.len(); + existing_hints.extend(new_hints.into_iter().enumerate().filter_map( + |(i, (id, new_hint))| { + let new_hint_for_id = HintForId { + chunk_id: chunk.id, + server_id, + position: existing_count + i, + }; + if let hash_map::Entry::Vacant(vacant_entry) = self.hints_by_id.entry(id) { + vacant_entry.insert(new_hint_for_id); + Some((id, new_hint)) + } else { + None + } + }, + )); + *self.fetched_hints(&chunk) = None; + } + + pub fn hint_for_id(&mut self, id: InlayId) -> Option<&mut InlayHint> { + let hint_for_id = self.hints_by_id.get(&id)?; + let (hint_id, hint) = self + .hints_by_chunks + .get_mut(hint_for_id.chunk_id)? + .as_mut()? + .get_mut(&hint_for_id.server_id)? + .get_mut(hint_for_id.position)?; + debug_assert_eq!(*hint_id, id, "Invalid pointer {hint_for_id:?}"); + Some(hint) + } +} diff --git a/crates/project/src/lsp_store/lsp_ext_command.rs b/crates/project/src/lsp_store/lsp_ext_command.rs index c79e3df178290fa614e08a8abd85a527a696b003..5066143244da890a63ead6650cb61fdb71d3964a 100644 --- a/crates/project/src/lsp_store/lsp_ext_command.rs +++ b/crates/project/src/lsp_store/lsp_ext_command.rs @@ -657,6 +657,7 @@ impl LspCommand for GetLspRunnables { ); task_template.args.extend(cargo.cargo_args); if !cargo.executable_args.is_empty() { + let shell_kind = task_template.shell.shell_kind(cfg!(windows)); task_template.args.push("--".to_string()); task_template.args.extend( cargo @@ -682,7 +683,7 @@ impl LspCommand for GetLspRunnables { // That bit is not auto-expanded when using single quotes. // Escape extra cargo args unconditionally as those are unlikely to contain `~`. .flat_map(|extra_arg| { - shlex::try_quote(&extra_arg).ok().map(|s| s.to_string()) + shell_kind.try_quote(&extra_arg).map(|s| s.to_string()) }), ); } diff --git a/crates/project/src/lsp_store/vue_language_server_ext.rs b/crates/project/src/lsp_store/vue_language_server_ext.rs new file mode 100644 index 0000000000000000000000000000000000000000..28249745403d2c6afe3532582ee92bb94de7dde7 --- /dev/null +++ b/crates/project/src/lsp_store/vue_language_server_ext.rs @@ -0,0 +1,124 @@ +use anyhow::Context as _; +use gpui::{AppContext, WeakEntity}; +use lsp::{LanguageServer, LanguageServerName}; +use serde_json::Value; + +use crate::LspStore; + +struct VueServerRequest; +struct TypescriptServerResponse; + +impl lsp::notification::Notification for VueServerRequest { + type Params = Vec<(u64, String, serde_json::Value)>; + + const METHOD: &'static str = "tsserver/request"; +} + +impl lsp::notification::Notification for TypescriptServerResponse { + type Params = Vec<(u64, serde_json::Value)>; + + const METHOD: &'static str = "tsserver/response"; +} + +const VUE_SERVER_NAME: LanguageServerName = LanguageServerName::new_static("vue-language-server"); +const VTSLS: LanguageServerName = LanguageServerName::new_static("vtsls"); +const TS_LS: LanguageServerName = LanguageServerName::new_static("typescript-language-server"); + +pub fn register_requests(lsp_store: WeakEntity, language_server: &LanguageServer) { + let language_server_name = language_server.name(); + if language_server_name == VUE_SERVER_NAME { + let vue_server_id = language_server.server_id(); + language_server + .on_notification::({ + move |params, cx| { + let lsp_store = lsp_store.clone(); + let Ok(Some(vue_server)) = lsp_store.read_with(cx, |this, _| { + this.language_server_for_id(vue_server_id) + }) else { + return; + }; + + let requests = params; + let target_server = match lsp_store.read_with(cx, |this, _| { + let language_server_id = this + .as_local() + .and_then(|local| { + local.language_server_ids.iter().find_map(|(seed, v)| { + [VTSLS, TS_LS].contains(&seed.name).then_some(v.id) + }) + }) + .context("Could not find language server")?; + + this.language_server_for_id(language_server_id) + .context("language server not found") + }) { + Ok(Ok(server)) => server, + other => { + log::warn!( + "vue-language-server forwarding skipped: {other:?}. \ + Returning null tsserver responses" + ); + if !requests.is_empty() { + let null_responses = requests + .into_iter() + .map(|(id, _, _)| (id, Value::Null)) + .collect::>(); + let _ = vue_server + .notify::(null_responses); + } + return; + } + }; + + let cx = cx.clone(); + for (request_id, command, payload) in requests.into_iter() { + let target_server = target_server.clone(); + let vue_server = vue_server.clone(); + cx.background_spawn(async move { + let response = target_server + .request::( + lsp::ExecuteCommandParams { + command: "typescript.tsserverRequest".to_owned(), + arguments: vec![Value::String(command), payload], + ..Default::default() + }, + ) + .await; + + let response_body = match response { + util::ConnectionResult::Result(Ok(result)) => match result { + Some(Value::Object(mut map)) => map + .remove("body") + .unwrap_or(Value::Object(map)), + Some(other) => other, + None => Value::Null, + }, + util::ConnectionResult::Result(Err(error)) => { + log::warn!( + "typescript.tsserverRequest failed: {error:?} for request {request_id}" + ); + Value::Null + } + other => { + log::warn!( + "typescript.tsserverRequest did not return a response: {other:?} for request {request_id}" + ); + Value::Null + } + }; + + if let Err(err) = vue_server + .notify::(vec![(request_id, response_body)]) + { + log::warn!( + "Failed to notify vue-language-server of tsserver response: {err:?}" + ); + } + }) + .detach(); + } + } + }) + .detach(); + } +} diff --git a/crates/project/src/manifest_tree/server_tree.rs b/crates/project/src/manifest_tree/server_tree.rs index 0d80b845dc63ce00ed7221162b323b8549ad4075..b6828fdb281d51600096bbcad411f94a91c69e54 100644 --- a/crates/project/src/manifest_tree/server_tree.rs +++ b/crates/project/src/manifest_tree/server_tree.rs @@ -47,7 +47,7 @@ pub struct LanguageServerTree { /// A node in language server tree represents either: /// - A language server that has already been initialized/updated for a given project /// - A soon-to-be-initialized language server. -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct LanguageServerTreeNode(Weak); /// Describes a request to launch a language server. diff --git a/crates/project/src/prettier_store.rs b/crates/project/src/prettier_store.rs index 625f239b28ed0c6e4ad2a0fa3886896f0adeb723..40deac76404ddb4378fe08cae931d0f0e3583487 100644 --- a/crates/project/src/prettier_store.rs +++ b/crates/project/src/prettier_store.rs @@ -16,7 +16,7 @@ use futures::{ use gpui::{AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task, WeakEntity}; use language::{ Buffer, LanguageRegistry, LocalFile, - language_settings::{Formatter, LanguageSettings, SelectedFormatter}, + language_settings::{Formatter, LanguageSettings}, }; use lsp::{LanguageServer, LanguageServerId, LanguageServerName}; use node_runtime::NodeRuntime; @@ -700,14 +700,11 @@ impl PrettierStore { pub fn prettier_plugins_for_language( language_settings: &LanguageSettings, ) -> Option<&HashSet> { - match &language_settings.formatter { - SelectedFormatter::Auto => Some(&language_settings.prettier.plugins), - - SelectedFormatter::List(list) => list - .as_ref() - .contains(&Formatter::Prettier) - .then_some(&language_settings.prettier.plugins), + let formatters = language_settings.formatter.as_ref(); + if formatters.contains(&Formatter::Prettier) || formatters.contains(&Formatter::Auto) { + return Some(&language_settings.prettier.plugins); } + None } pub(super) async fn format_with_prettier( diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index f6108ec05680522e51469e8e79a5417a4dd7daf1..910e217a67785249b4d83b7929b32c21b079a5d7 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -15,6 +15,7 @@ pub mod project_settings; pub mod search; mod task_inventory; pub mod task_store; +pub mod telemetry_snapshot; pub mod terminals; pub mod toolchain_store; pub mod worktree_store; @@ -22,11 +23,10 @@ pub mod worktree_store; #[cfg(test)] mod project_tests; -mod direnv; mod environment; use buffer_diff::BufferDiff; use context_server_store::ContextServerStore; -pub use environment::{EnvironmentErrorMessage, ProjectEnvironmentEvent}; +pub use environment::ProjectEnvironmentEvent; use git::repository::get_git_committer; use git_store::{Repository, RepositoryId}; pub mod search_history; @@ -40,7 +40,7 @@ use crate::{ git_store::GitStore, lsp_store::{SymbolLocation, log_store::LogKind}, }; -pub use agent_server_store::{AgentServerStore, AgentServersUpdated}; +pub use agent_server_store::{AgentServerStore, AgentServersUpdated, ExternalAgentServerName}; pub use git_store::{ ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate, git_traversal::{ChildEntriesGitIter, GitEntry, GitEntryRef, GitTraversal}, @@ -144,9 +144,9 @@ pub use task_inventory::{ pub use buffer_store::ProjectTransaction; pub use lsp_store::{ - DiagnosticSummary, LanguageServerLogType, LanguageServerProgress, LanguageServerPromptRequest, - LanguageServerStatus, LanguageServerToQuery, LspStore, LspStoreEvent, - SERVER_PROGRESS_THROTTLE_TIMEOUT, + DiagnosticSummary, InvalidationStrategy, LanguageServerLogType, LanguageServerProgress, + LanguageServerPromptRequest, LanguageServerStatus, LanguageServerToQuery, LspStore, + LspStoreEvent, SERVER_PROGRESS_THROTTLE_TIMEOUT, }; pub use toolchain_store::{ToolchainStore, Toolchains}; const MAX_PROJECT_SEARCH_HISTORY_SIZE: usize = 500; @@ -337,7 +337,7 @@ pub enum Event { HostReshared, Reshared, Rejoined, - RefreshInlayHints, + RefreshInlayHints(LanguageServerId), RefreshCodeLens, RevealInProjectPanel(ProjectEntryId), SnippetEdit(BufferId, Vec<(lsp::Range, Snippet)>), @@ -401,6 +401,26 @@ pub enum PrepareRenameResponse { InvalidPosition, } +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum InlayId { + EditPrediction(usize), + DebuggerValue(usize), + // LSP + Hint(usize), + Color(usize), +} + +impl InlayId { + pub fn id(&self) -> usize { + match self { + Self::EditPrediction(id) => *id, + Self::DebuggerValue(id) => *id, + Self::Hint(id) => *id, + Self::Color(id) => *id, + } + } +} + #[derive(Debug, Clone, PartialEq, Eq)] pub struct InlayHint { pub position: language::Anchor, @@ -985,12 +1005,6 @@ impl settings::Settings for DisableAiSettings { disable_ai: content.disable_ai.unwrap().0, } } - - fn import_from_vscode( - _vscode: &settings::VsCodeSettings, - _current: &mut settings::SettingsContent, - ) { - } } impl Project { @@ -1056,7 +1070,7 @@ impl Project { let context_server_store = cx.new(|cx| ContextServerStore::new(worktree_store.clone(), weak_self, cx)); - let environment = cx.new(|_| ProjectEnvironment::new(env)); + let environment = cx.new(|cx| ProjectEnvironment::new(env, cx)); let manifest_tree = ManifestTree::new(worktree_store.clone(), cx); let toolchain_store = cx.new(|cx| { ToolchainStore::local( @@ -1064,6 +1078,7 @@ impl Project { worktree_store.clone(), environment.clone(), manifest_tree.clone(), + fs.clone(), cx, ) }); @@ -1290,7 +1305,7 @@ impl Project { cx.subscribe(&settings_observer, Self::on_settings_observer_event) .detach(); - let environment = cx.new(|_| ProjectEnvironment::new(None)); + let environment = cx.new(|cx| ProjectEnvironment::new(None, cx)); let lsp_store = cx.new(|cx| { LspStore::new_remote( @@ -1503,7 +1518,7 @@ impl Project { ImageStore::remote(worktree_store.clone(), client.clone().into(), remote_id, cx) })?; - let environment = cx.new(|_| ProjectEnvironment::new(None))?; + let environment = cx.new(|cx| ProjectEnvironment::new(None, cx))?; let breakpoint_store = cx.new(|_| BreakpointStore::remote(remote_id, client.clone().into()))?; @@ -1566,7 +1581,7 @@ impl Project { })?; let agent_server_store = cx.new(|cx| AgentServerStore::collab(cx))?; - let replica_id = response.payload.replica_id as ReplicaId; + let replica_id = ReplicaId::new(response.payload.replica_id as u16); let project = cx.new(|cx| { let snippets = SnippetProvider::new(fs.clone(), BTreeSet::from_iter([]), cx); @@ -1829,10 +1844,12 @@ impl Project { project } + #[inline] pub fn dap_store(&self) -> Entity { self.dap_store.clone() } + #[inline] pub fn breakpoint_store(&self) -> Entity { self.breakpoint_store.clone() } @@ -1846,50 +1863,62 @@ impl Project { Some((session, active_position.clone())) } + #[inline] pub fn lsp_store(&self) -> Entity { self.lsp_store.clone() } + #[inline] pub fn worktree_store(&self) -> Entity { self.worktree_store.clone() } + #[inline] pub fn context_server_store(&self) -> Entity { self.context_server_store.clone() } + #[inline] pub fn buffer_for_id(&self, remote_id: BufferId, cx: &App) -> Option> { self.buffer_store.read(cx).get(remote_id) } + #[inline] pub fn languages(&self) -> &Arc { &self.languages } + #[inline] pub fn client(&self) -> Arc { self.collab_client.clone() } + #[inline] pub fn remote_client(&self) -> Option> { self.remote_client.clone() } + #[inline] pub fn user_store(&self) -> Entity { self.user_store.clone() } + #[inline] pub fn node_runtime(&self) -> Option<&NodeRuntime> { self.node.as_ref() } + #[inline] pub fn opened_buffers(&self, cx: &App) -> Vec> { self.buffer_store.read(cx).buffers().collect() } + #[inline] pub fn environment(&self) -> &Entity { &self.environment } + #[inline] pub fn cli_environment(&self, cx: &App) -> Option> { self.environment.read(cx).get_cli_environment() } @@ -1912,24 +1941,28 @@ impl Project { cx: &mut App, ) -> Shared>>> { self.environment.update(cx, |environment, cx| { - environment.get_directory_environment_for_shell(shell, abs_path, cx) + if let Some(remote_client) = self.remote_client.clone() { + environment.get_remote_directory_environment(shell, abs_path, remote_client, cx) + } else { + environment.get_local_directory_environment(shell, abs_path, cx) + } }) } - pub fn shell_environment_errors<'a>( - &'a self, - cx: &'a App, - ) -> impl Iterator, &'a EnvironmentErrorMessage)> { - self.environment.read(cx).environment_errors() + #[inline] + pub fn peek_environment_error<'a>(&'a self, cx: &'a App) -> Option<&'a String> { + self.environment.read(cx).peek_environment_error() } - pub fn remove_environment_error(&mut self, abs_path: &Path, cx: &mut Context) { - self.environment.update(cx, |environment, cx| { - environment.remove_environment_error(abs_path, cx); + #[inline] + pub fn pop_environment_error(&mut self, cx: &mut Context) { + self.environment.update(cx, |environment, _| { + environment.pop_environment_error(); }); } #[cfg(any(test, feature = "test-support"))] + #[inline] pub fn has_open_buffer(&self, path: impl Into, cx: &App) -> bool { self.buffer_store .read(cx) @@ -1937,10 +1970,12 @@ impl Project { .is_some() } + #[inline] pub fn fs(&self) -> &Arc { &self.fs } + #[inline] pub fn remote_id(&self) -> Option { match self.client_state { ProjectClientState::Local => None, @@ -1949,6 +1984,7 @@ impl Project { } } + #[inline] pub fn supports_terminal(&self, _cx: &App) -> bool { if self.is_local() { return true; @@ -1960,39 +1996,45 @@ impl Project { false } + #[inline] pub fn remote_connection_state(&self, cx: &App) -> Option { self.remote_client .as_ref() .map(|remote| remote.read(cx).connection_state()) } + #[inline] pub fn remote_connection_options(&self, cx: &App) -> Option { self.remote_client .as_ref() .map(|remote| remote.read(cx).connection_options()) } + #[inline] pub fn replica_id(&self) -> ReplicaId { match self.client_state { ProjectClientState::Remote { replica_id, .. } => replica_id, _ => { if self.remote_client.is_some() { - 1 + ReplicaId::REMOTE_SERVER } else { - 0 + ReplicaId::LOCAL } } } } + #[inline] pub fn task_store(&self) -> &Entity { &self.task_store } + #[inline] pub fn snippets(&self) -> &Entity { &self.snippets } + #[inline] pub fn search_history(&self, kind: SearchInputKind) -> &SearchHistory { match kind { SearchInputKind::Query => &self.search_history, @@ -2001,6 +2043,7 @@ impl Project { } } + #[inline] pub fn search_history_mut(&mut self, kind: SearchInputKind) -> &mut SearchHistory { match kind { SearchInputKind::Query => &mut self.search_history, @@ -2009,14 +2052,17 @@ impl Project { } } + #[inline] pub fn collaborators(&self) -> &HashMap { &self.collaborators } + #[inline] pub fn host(&self) -> Option<&Collaborator> { self.collaborators.values().find(|c| c.is_host) } + #[inline] pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool, cx: &mut App) { self.worktree_store.update(cx, |store, _| { store.set_worktrees_reordered(worktrees_reordered); @@ -2024,6 +2070,7 @@ impl Project { } /// Collect all worktrees, including ones that don't appear in the project panel + #[inline] pub fn worktrees<'a>( &self, cx: &'a App, @@ -2032,6 +2079,7 @@ impl Project { } /// Collect all user-visible worktrees, the ones that appear in the project panel. + #[inline] pub fn visible_worktrees<'a>( &'a self, cx: &'a App, @@ -2039,16 +2087,19 @@ impl Project { self.worktree_store.read(cx).visible_worktrees(cx) } + #[inline] pub fn worktree_for_root_name(&self, root_name: &str, cx: &App) -> Option> { self.visible_worktrees(cx) .find(|tree| tree.read(cx).root_name() == root_name) } + #[inline] pub fn worktree_root_names<'a>(&'a self, cx: &'a App) -> impl Iterator { self.visible_worktrees(cx) .map(|tree| tree.read(cx).root_name().as_unix_str()) } + #[inline] pub fn worktree_for_id(&self, id: WorktreeId, cx: &App) -> Option> { self.worktree_store.read(cx).worktree_for_id(id, cx) } @@ -2063,12 +2114,14 @@ impl Project { .worktree_for_entry(entry_id, cx) } + #[inline] pub fn worktree_id_for_entry(&self, entry_id: ProjectEntryId, cx: &App) -> Option { self.worktree_for_entry(entry_id, cx) .map(|worktree| worktree.read(cx).id()) } /// Checks if the entry is the root of a worktree. + #[inline] pub fn entry_is_worktree_root(&self, entry_id: ProjectEntryId, cx: &App) -> bool { self.worktree_for_entry(entry_id, cx) .map(|worktree| { @@ -2080,6 +2133,7 @@ impl Project { .unwrap_or(false) } + #[inline] pub fn project_path_git_status( &self, project_path: &ProjectPath, @@ -2090,6 +2144,7 @@ impl Project { .project_path_git_status(project_path, cx) } + #[inline] pub fn visibility_for_paths( &self, paths: &[PathBuf], @@ -2141,6 +2196,7 @@ impl Project { }) } + #[inline] pub fn copy_entry( &mut self, entry_id: ProjectEntryId, @@ -2219,6 +2275,7 @@ impl Project { }) } + #[inline] pub fn delete_file( &mut self, path: ProjectPath, @@ -2229,6 +2286,7 @@ impl Project { self.delete_entry(entry.id, trash, cx) } + #[inline] pub fn delete_entry( &mut self, entry_id: ProjectEntryId, @@ -2242,6 +2300,7 @@ impl Project { }) } + #[inline] pub fn expand_entry( &mut self, worktree_id: WorktreeId, @@ -2393,6 +2452,7 @@ impl Project { Ok(()) } + #[inline] pub fn unshare(&mut self, cx: &mut Context) -> Result<()> { self.unshare_internal(cx)?; cx.emit(Event::RemoteIdChanged(None)); @@ -2489,10 +2549,12 @@ impl Project { } } + #[inline] pub fn close(&mut self, cx: &mut Context) { cx.emit(Event::Closed); } + #[inline] pub fn is_disconnected(&self, cx: &App) -> bool { match &self.client_state { ProjectClientState::Remote { @@ -2506,6 +2568,7 @@ impl Project { } } + #[inline] fn remote_client_is_disconnected(&self, cx: &App) -> bool { self.remote_client .as_ref() @@ -2513,6 +2576,7 @@ impl Project { .unwrap_or(false) } + #[inline] pub fn capability(&self) -> Capability { match &self.client_state { ProjectClientState::Remote { capability, .. } => *capability, @@ -2520,10 +2584,12 @@ impl Project { } } + #[inline] pub fn is_read_only(&self, cx: &App) -> bool { self.is_disconnected(cx) || self.capability() == Capability::ReadOnly } + #[inline] pub fn is_local(&self) -> bool { match &self.client_state { ProjectClientState::Local | ProjectClientState::Shared { .. } => { @@ -2533,6 +2599,8 @@ impl Project { } } + /// Whether this project is a remote server (not counting collab). + #[inline] pub fn is_via_remote_server(&self) -> bool { match &self.client_state { ProjectClientState::Local | ProjectClientState::Shared { .. } => { @@ -2542,6 +2610,8 @@ impl Project { } } + /// Whether this project is from collab (not counting remote servers). + #[inline] pub fn is_via_collab(&self) -> bool { match &self.client_state { ProjectClientState::Local | ProjectClientState::Shared { .. } => false, @@ -2549,6 +2619,17 @@ impl Project { } } + /// `!self.is_local()` + #[inline] + pub fn is_remote(&self) -> bool { + debug_assert_eq!( + !self.is_local(), + self.is_via_collab() || self.is_via_remote_server() + ); + !self.is_local() + } + + #[inline] pub fn create_buffer( &mut self, searchable: bool, @@ -2559,6 +2640,7 @@ impl Project { }) } + #[inline] pub fn create_local_buffer( &mut self, text: &str, @@ -2566,7 +2648,7 @@ impl Project { project_searchable: bool, cx: &mut Context, ) -> Entity { - if self.is_via_collab() || self.is_via_remote_server() { + if self.is_remote() { panic!("called create_local_buffer on a remote project") } self.buffer_store.update(cx, |buffer_store, cx| { @@ -2992,7 +3074,9 @@ impl Project { return; }; } - LspStoreEvent::RefreshInlayHints => cx.emit(Event::RefreshInlayHints), + LspStoreEvent::RefreshInlayHints(server_id) => { + cx.emit(Event::RefreshInlayHints(*server_id)) + } LspStoreEvent::RefreshCodeLens => cx.emit(Event::RefreshCodeLens), LspStoreEvent::LanguageServerPrompt(prompt) => { cx.emit(Event::LanguageServerPrompt(prompt.clone())) @@ -3912,31 +3996,6 @@ impl Project { }) } - pub fn inlay_hints( - &mut self, - buffer_handle: Entity, - range: Range, - cx: &mut Context, - ) -> Task>> { - let buffer = buffer_handle.read(cx); - let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end); - self.lsp_store.update(cx, |lsp_store, cx| { - lsp_store.inlay_hints(buffer_handle, range, cx) - }) - } - - pub fn resolve_inlay_hint( - &self, - hint: InlayHint, - buffer_handle: Entity, - server_id: LanguageServerId, - cx: &mut Context, - ) -> Task> { - self.lsp_store.update(cx, |lsp_store, cx| { - lsp_store.resolve_inlay_hint(hint, buffer_handle, server_id, cx) - }) - } - pub fn search(&mut self, query: SearchQuery, cx: &mut Context) -> Receiver { let (result_tx, result_rx) = smol::channel::unbounded(); @@ -5196,6 +5255,7 @@ impl Project { }) } + #[cfg(any(test, feature = "test-support"))] pub fn has_language_servers_for(&self, buffer: &Buffer, cx: &mut App) -> bool { self.lsp_store.update(cx, |this, cx| { this.language_servers_for_local_buffer(buffer, cx) diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 1790313dcad43994359c07637ff3b8b534293970..676fac507252646a0650be87dc7a22689a1e70d0 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -331,7 +331,7 @@ pub struct InlineBlameSettings { /// after a delay once the cursor stops moving. /// /// Default: 0 - pub delay_ms: std::time::Duration, + pub delay_ms: settings::DelayMs, /// The amount of padding between the end of the source line and the start /// of the inline blame in units of columns. /// @@ -357,8 +357,8 @@ pub struct BlameSettings { impl GitSettings { pub fn inline_blame_delay(&self) -> Option { - if self.inline_blame.delay_ms.as_millis() > 0 { - Some(self.inline_blame.delay_ms) + if self.inline_blame.delay_ms.0 > 0 { + Some(Duration::from_millis(self.inline_blame.delay_ms.0)) } else { None } @@ -452,7 +452,7 @@ impl Settings for ProjectSettings { let inline = git.inline_blame.unwrap(); InlineBlameSettings { enabled: inline.enabled.unwrap(), - delay_ms: std::time::Duration::from_millis(inline.delay_ms.unwrap()), + delay_ms: inline.delay_ms.unwrap(), padding: inline.padding.unwrap(), min_column: inline.min_column.unwrap(), show_commit_summary: inline.show_commit_summary.unwrap(), @@ -504,11 +504,11 @@ impl Settings for ProjectSettings { include_warnings: diagnostics.include_warnings.unwrap(), lsp_pull_diagnostics: LspPullDiagnosticsSettings { enabled: lsp_pull_diagnostics.enabled.unwrap(), - debounce_ms: lsp_pull_diagnostics.debounce_ms.unwrap(), + debounce_ms: lsp_pull_diagnostics.debounce_ms.unwrap().0, }, inline: InlineDiagnosticsSettings { enabled: inline_diagnostics.enabled.unwrap(), - update_debounce_ms: inline_diagnostics.update_debounce_ms.unwrap(), + update_debounce_ms: inline_diagnostics.update_debounce_ms.unwrap().0, padding: inline_diagnostics.padding.unwrap(), min_column: inline_diagnostics.min_column.unwrap(), max_severity: inline_diagnostics.max_severity.map(Into::into), @@ -522,65 +522,6 @@ impl Settings for ProjectSettings { }, } } - - fn import_from_vscode( - vscode: &settings::VsCodeSettings, - current: &mut settings::SettingsContent, - ) { - // this just sets the binary name instead of a full path so it relies on path lookup - // resolving to the one you want - let npm_path = vscode.read_enum("npm.packageManager", |s| match s { - v @ ("npm" | "yarn" | "bun" | "pnpm") => Some(v.to_owned()), - _ => None, - }); - if npm_path.is_some() { - current.node.get_or_insert_default().npm_path = npm_path; - } - - if let Some(b) = vscode.read_bool("git.blame.editorDecoration.enabled") { - current - .git - .get_or_insert_default() - .inline_blame - .get_or_insert_default() - .enabled = Some(b); - } - - #[derive(Deserialize)] - struct VsCodeContextServerCommand { - command: PathBuf, - args: Option>, - env: Option>, - // note: we don't support envFile and type - } - if let Some(mcp) = vscode.read_value("mcp").and_then(|v| v.as_object()) { - current - .project - .context_servers - .extend(mcp.iter().filter_map(|(k, v)| { - Some(( - k.clone().into(), - settings::ContextServerSettingsContent::Custom { - enabled: true, - command: serde_json::from_value::( - v.clone(), - ) - .ok() - .map(|cmd| { - settings::ContextServerCommand { - path: cmd.command, - args: cmd.args.unwrap_or_default(), - env: cmd.env, - timeout: None, - } - })?, - }, - )) - })); - } - - // TODO: translate lsp settings for rust-analyzer and other popular ones to old.lsp - } } pub enum SettingsObserverMode { @@ -1215,6 +1156,7 @@ pub fn local_settings_kind_to_proto(kind: LocalSettingsKind) -> proto::LocalSett pub struct DapSettings { pub binary: DapBinary, pub args: Vec, + pub env: HashMap, } impl From for DapSettings { @@ -1224,6 +1166,7 @@ impl From for DapSettings { .binary .map_or_else(|| DapBinary::Default, |binary| DapBinary::Custom(binary)), args: content.args.unwrap_or_default(), + env: content.env.unwrap_or_default(), } } } diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 14bdc18fbf3f0956267fb7452b017e6a80369e39..e3714cddf15d7623ab32403ea9cdd889c27abedc 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -94,6 +94,9 @@ async fn test_block_via_smol(cx: &mut gpui::TestAppContext) { task.await; } +// NOTE: +// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus +// we assume that they are not supported out of the box. #[cfg(not(windows))] #[gpui::test] async fn test_symlinks(cx: &mut gpui::TestAppContext) { @@ -1812,7 +1815,10 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) { fake_server .start_progress(format!("{}/0", progress_token)) .await; - assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints); + assert_eq!( + events.next().await.unwrap(), + Event::RefreshInlayHints(fake_server.server.server_id()) + ); assert_eq!( events.next().await.unwrap(), Event::DiskBasedDiagnosticsStarted { @@ -1951,7 +1957,10 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC Some(worktree_id) ) ); - assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints); + assert_eq!( + events.next().await.unwrap(), + Event::RefreshInlayHints(fake_server.server.server_id()) + ); fake_server.start_progress(progress_token).await; assert_eq!( events.next().await.unwrap(), @@ -4242,6 +4251,73 @@ async fn test_save_as(cx: &mut gpui::TestAppContext) { assert_eq!(opened_buffer, buffer); } +#[gpui::test] +async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + + fs.insert_tree( + path!("/dir"), + json!({ + "data_a.txt": "data about a" + }), + ) + .await; + + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/dir/data_a.txt"), cx) + }) + .await + .unwrap(); + + buffer.update(cx, |buffer, cx| { + buffer.edit([(11..12, "b")], None, cx); + }); + + // Save buffer's contents as a new file and confirm that the buffer's now + // associated with `data_b.txt` instead of `data_a.txt`, confirming that the + // file associated with the buffer has now been updated to `data_b.txt` + project + .update(cx, |project, cx| { + let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id(); + let new_path = ProjectPath { + worktree_id, + path: rel_path("data_b.txt").into(), + }; + + project.save_buffer_as(buffer.clone(), new_path, cx) + }) + .await + .unwrap(); + + buffer.update(cx, |buffer, cx| { + assert_eq!( + buffer.file().unwrap().full_path(cx), + Path::new("dir/data_b.txt") + ) + }); + + // Open the original `data_a.txt` file, confirming that its contents are + // unchanged and the resulting buffer's associated file is `data_a.txt`. + let original_buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/dir/data_a.txt"), cx) + }) + .await + .unwrap(); + + original_buffer.update(cx, |buffer, cx| { + assert_eq!(buffer.text(), "data about a"); + assert_eq!( + buffer.file().unwrap().full_path(cx), + Path::new("dir/data_a.txt") + ) + }); +} + #[gpui::test(retries = 5)] async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) { use worktree::WorktreeModelHandle as _; @@ -4304,7 +4380,7 @@ async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) { let remote = cx.update(|cx| { Worktree::remote( 0, - 1, + ReplicaId::REMOTE_SERVER, metadata, project.read(cx).client().into(), project.read(cx).path_style(cx), @@ -8536,6 +8612,7 @@ async fn test_update_gitignore(cx: &mut gpui::TestAppContext) { // a directory which some program has already open. // This is a limitation of the Windows. // See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder +// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information #[gpui::test] #[cfg_attr(target_os = "windows", ignore)] async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) { @@ -8615,7 +8692,8 @@ async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) { // NOTE: This test always fails on Windows, because on Windows, unlike on Unix, // you can't rename a directory which some program has already open. This is a // limitation of the Windows. See: -// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder +// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder +// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information #[gpui::test] #[cfg_attr(target_os = "windows", ignore)] async fn test_file_status(cx: &mut gpui::TestAppContext) { @@ -8841,7 +8919,7 @@ async fn test_file_status(cx: &mut gpui::TestAppContext) { } #[gpui::test] -#[cfg_attr(target_os = "windows", ignore)] +#[ignore] async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) { init_test(cx); cx.executor().allow_parking(); @@ -8925,10 +9003,7 @@ async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) { assert_eq!( repository_updates.lock().drain(..).collect::>(), vec![ - RepositoryEvent::Updated { - full_scan: true, - new_instance: false, - }, + RepositoryEvent::StatusesChanged { full_scan: true }, RepositoryEvent::MergeHeadsChanged, ], "Initial worktree scan should produce a repo update event" @@ -8989,7 +9064,6 @@ async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) { repository_updates .lock() .iter() - .filter(|update| !matches!(update, RepositoryEvent::PathsChanged)) .cloned() .collect::>(), Vec::new(), @@ -9093,17 +9167,10 @@ async fn test_odd_events_for_ignored_dirs( }); assert_eq!( - repository_updates - .lock() - .drain(..) - .filter(|update| !matches!(update, RepositoryEvent::PathsChanged)) - .collect::>(), + repository_updates.lock().drain(..).collect::>(), vec![ - RepositoryEvent::Updated { - full_scan: true, - new_instance: false, - }, RepositoryEvent::MergeHeadsChanged, + RepositoryEvent::BranchChanged ], "Initial worktree scan should produce a repo update event" ); @@ -9131,7 +9198,6 @@ async fn test_odd_events_for_ignored_dirs( repository_updates .lock() .iter() - .filter(|update| !matches!(update, RepositoryEvent::PathsChanged)) .cloned() .collect::>(), Vec::new(), @@ -9648,6 +9714,7 @@ fn python_lang(fs: Arc) -> Arc { worktree_root: PathBuf, subroot_relative_path: Arc, _: Option>, + _: &dyn Fs, ) -> ToolchainList { // This lister will always return a path .venv directories within ancestors let ancestors = subroot_relative_path.ancestors().collect::>(); @@ -9672,6 +9739,7 @@ fn python_lang(fs: Arc) -> Arc { &self, _: PathBuf, _: Option>, + _: &dyn Fs, ) -> anyhow::Result { Err(anyhow::anyhow!("Not implemented")) } @@ -9684,7 +9752,7 @@ fn python_lang(fs: Arc) -> Arc { manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")), } } - async fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &dyn Fs) -> Vec { + fn activation_script(&self, _: &Toolchain, _: ShellKind) -> Vec { vec![] } } diff --git a/crates/project/src/telemetry_snapshot.rs b/crates/project/src/telemetry_snapshot.rs new file mode 100644 index 0000000000000000000000000000000000000000..79fe2bd8b3f21df03b4cf7a59f73df93b22f3a6c --- /dev/null +++ b/crates/project/src/telemetry_snapshot.rs @@ -0,0 +1,125 @@ +use git::repository::DiffType; +use gpui::{App, Entity, Task}; +use serde::{Deserialize, Serialize}; +use worktree::Worktree; + +use crate::{ + Project, + git_store::{GitStore, RepositoryState}, +}; + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct TelemetrySnapshot { + pub worktree_snapshots: Vec, +} + +impl TelemetrySnapshot { + pub fn new(project: &Entity, cx: &mut App) -> Task { + let git_store = project.read(cx).git_store().clone(); + let worktree_snapshots: Vec<_> = project + .read(cx) + .visible_worktrees(cx) + .map(|worktree| TelemetryWorktreeSnapshot::new(worktree, git_store.clone(), cx)) + .collect(); + + cx.spawn(async move |_| { + let worktree_snapshots = futures::future::join_all(worktree_snapshots).await; + + Self { worktree_snapshots } + }) + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct TelemetryWorktreeSnapshot { + pub worktree_path: String, + pub git_state: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct GitState { + pub remote_url: Option, + pub head_sha: Option, + pub current_branch: Option, + pub diff: Option, +} + +impl TelemetryWorktreeSnapshot { + fn new( + worktree: Entity, + git_store: Entity, + cx: &App, + ) -> Task { + cx.spawn(async move |cx| { + // Get worktree path and snapshot + let worktree_info = cx.update(|app_cx| { + let worktree = worktree.read(app_cx); + let path = worktree.abs_path().to_string_lossy().into_owned(); + let snapshot = worktree.snapshot(); + (path, snapshot) + }); + + let Ok((worktree_path, _snapshot)) = worktree_info else { + return TelemetryWorktreeSnapshot { + worktree_path: String::new(), + git_state: None, + }; + }; + + let git_state = git_store + .update(cx, |git_store, cx| { + git_store + .repositories() + .values() + .find(|repo| { + repo.read(cx) + .abs_path_to_repo_path(&worktree.read(cx).abs_path()) + .is_some() + }) + .cloned() + }) + .ok() + .flatten() + .map(|repo| { + repo.update(cx, |repo, _| { + let current_branch = + repo.branch.as_ref().map(|branch| branch.name().to_owned()); + repo.send_job(None, |state, _| async move { + let RepositoryState::Local { backend, .. } = state else { + return GitState { + remote_url: None, + head_sha: None, + current_branch, + diff: None, + }; + }; + + let remote_url = backend.remote_url("origin"); + let head_sha = backend.head_sha().await; + let diff = backend.diff(DiffType::HeadToWorktree).await.ok(); + + GitState { + remote_url, + head_sha, + current_branch, + diff, + } + }) + }) + }); + + let git_state = match git_state { + Some(git_state) => match git_state.ok() { + Some(git_state) => git_state.await.ok(), + None => None, + }, + None => None, + }; + + TelemetryWorktreeSnapshot { + worktree_path, + git_state, + } + }) + } +} diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index dc4224a7ff6b867ecdc959b2e4be1030cfc24aba..f51331a3b073522d09e36c7a2ea96585c66a452f 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -101,6 +101,8 @@ impl Project { None => settings.shell.program(), }; + let is_windows = self.path_style(cx).is_windows(); + let project_path_contexts = self .active_entry() .and_then(|entry_id| self.path_for_entry(entry_id, cx)) @@ -118,9 +120,8 @@ impl Project { .map(|p| self.active_toolchain(p, LanguageName::new("Python"), cx)) .collect::>(); let lang_registry = self.languages.clone(); - let fs = self.fs.clone(); cx.spawn(async move |project, cx| { - let shell_kind = ShellKind::new(&shell); + let shell_kind = ShellKind::new(&shell, is_windows); let activation_script = maybe!(async { for toolchain in toolchains { let Some(toolchain) = toolchain.await else { @@ -131,11 +132,7 @@ impl Project { .await .ok(); let lister = language?.toolchain_lister(); - return Some( - lister? - .activation_script(&toolchain, shell_kind, fs.as_ref()) - .await, - ); + return Some(lister?.activation_script(&toolchain, shell_kind)); } None }) @@ -170,18 +167,19 @@ impl Project { match remote_client { Some(remote_client) => match activation_script.clone() { activation_script if !activation_script.is_empty() => { - let activation_script = activation_script.join("; "); + let separator = shell_kind.sequential_commands_separator(); + let activation_script = + activation_script.join(&format!("{separator} ")); let to_run = format_to_run(); - let args = - vec!["-c".to_owned(), format!("{activation_script}; {to_run}")]; + let shell = remote_client + .read(cx) + .shell() + .unwrap_or_else(get_default_system_shell); + let arg = format!("{activation_script}{separator} {to_run}"); + let args = shell_kind.args_for_shell(false, arg); + create_remote_shell( - Some(( - &remote_client - .read(cx) - .shell() - .unwrap_or_else(get_default_system_shell), - &args, - )), + Some((&shell, &args)), env, path, remote_client, @@ -329,16 +327,17 @@ impl Project { .map(|p| self.active_toolchain(p, LanguageName::new("Python"), cx)) .collect::>(); let remote_client = self.remote_client.clone(); - let shell_kind = ShellKind::new(&match &remote_client { + let shell = match &remote_client { Some(remote_client) => remote_client .read(cx) .shell() .unwrap_or_else(get_default_system_shell), None => settings.shell.program(), - }); + }; + + let shell_kind = ShellKind::new(&shell, self.path_style(cx).is_windows()); let lang_registry = self.languages.clone(); - let fs = self.fs.clone(); cx.spawn(async move |project, cx| { let activation_script = maybe!(async { for toolchain in toolchains { @@ -350,11 +349,7 @@ impl Project { .await .ok(); let lister = language?.toolchain_lister(); - return Some( - lister? - .activation_script(&toolchain, shell_kind, fs.as_ref()) - .await, - ); + return Some(lister?.activation_script(&toolchain, shell_kind)); } None }) @@ -476,7 +471,8 @@ impl Project { .and_then(|remote_client| remote_client.read(cx).shell()) .map(Shell::Program) .unwrap_or_else(|| settings.shell.clone()); - let builder = ShellBuilder::new(&shell).non_interactive(); + let is_windows = self.path_style(cx).is_windows(); + let builder = ShellBuilder::new(&shell, is_windows).non_interactive(); let (command, args) = builder.build(Some(command), &Vec::new()); let mut env = self @@ -552,7 +548,7 @@ fn create_remote_shell( Shell::WithArguments { program: command.program, args: command.args, - title_override: Some(format!("{} — Terminal", host).into()), + title_override: Some(format!("{} — Terminal", host)), }, command.env, )) diff --git a/crates/project/src/toolchain_store.rs b/crates/project/src/toolchain_store.rs index 2b967ef2304af197e5a95cdeb661d746dbc7e6f1..d1c4fc629698bb70d156786837bc2540533d4867 100644 --- a/crates/project/src/toolchain_store.rs +++ b/crates/project/src/toolchain_store.rs @@ -4,6 +4,7 @@ use anyhow::{Context as _, Result, bail}; use async_trait::async_trait; use collections::{BTreeMap, IndexSet}; +use fs::Fs; use gpui::{ App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity, }; @@ -19,6 +20,7 @@ use rpc::{ }, }; use settings::WorktreeId; +use task::Shell; use util::{ResultExt as _, rel_path::RelPath}; use crate::{ @@ -59,6 +61,7 @@ impl ToolchainStore { worktree_store: Entity, project_environment: Entity, manifest_tree: Entity, + fs: Arc, cx: &mut Context, ) -> Self { let entity = cx.new(|_| LocalToolchainStore { @@ -67,6 +70,7 @@ impl ToolchainStore { project_environment, active_toolchains: Default::default(), manifest_tree, + fs, }); let _sub = cx.subscribe(&entity, |_, _, e: &ToolchainStoreEvent, cx| { cx.emit(e.clone()) @@ -396,6 +400,7 @@ pub struct LocalToolchainStore { project_environment: Entity, active_toolchains: BTreeMap<(WorktreeId, LanguageName), BTreeMap, Toolchain>>, manifest_tree: Entity, + fs: Arc, } #[async_trait(?Send)] @@ -484,6 +489,7 @@ impl LocalToolchainStore { let registry = self.languages.clone(); let manifest_tree = self.manifest_tree.downgrade(); + let fs = self.fs.clone(); let environment = self.project_environment.clone(); cx.spawn(async move |this, cx| { @@ -521,7 +527,11 @@ impl LocalToolchainStore { let project_env = environment .update(cx, |environment, cx| { - environment.get_directory_environment(abs_path.as_path().into(), cx) + environment.get_local_directory_environment( + &Shell::System, + abs_path.as_path().into(), + cx, + ) }) .ok()? .await; @@ -529,7 +539,12 @@ impl LocalToolchainStore { cx.background_spawn(async move { Some(( toolchains - .list(worktree_root, relative_path.path.clone(), project_env) + .list( + worktree_root, + relative_path.path.clone(), + project_env, + fs.as_ref(), + ) .await, relative_path.path, )) @@ -563,6 +578,7 @@ impl LocalToolchainStore { ) -> Task> { let registry = self.languages.clone(); let environment = self.project_environment.clone(); + let fs = self.fs.clone(); cx.spawn(async move |_, cx| { let language = cx .background_spawn(registry.language_for_name(&language_name.0)) @@ -574,11 +590,19 @@ impl LocalToolchainStore { let project_env = environment .update(cx, |environment, cx| { - environment.get_directory_environment(path.as_path().into(), cx) + environment.get_local_directory_environment( + &Shell::System, + path.as_path().into(), + cx, + ) })? .await; - cx.background_spawn(async move { toolchain_lister.resolve(path, project_env).await }) - .await + cx.background_spawn(async move { + toolchain_lister + .resolve(path, project_env, fs.as_ref()) + .await + }) + .await }) } } diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 8e18a8085dd311d8e5c34da783dc720570553bc5..e6da207dadbde3ebc725fbb84ed19b3b35414f87 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -5,7 +5,7 @@ use std::{ sync::{Arc, atomic::AtomicUsize}, }; -use anyhow::{Context as _, Result, anyhow}; +use anyhow::{Context as _, Result, anyhow, bail}; use collections::{HashMap, HashSet}; use fs::{Fs, copy_recursive}; use futures::{ @@ -138,6 +138,15 @@ impl WorktreeStore { .filter(|worktree| worktree.read(cx).is_visible()) } + /// Iterates through all user-visible worktrees (directories and files that appear in the project panel) and other, invisible single files that could appear e.g. due to drag and drop. + pub fn visible_worktrees_and_single_files<'a>( + &'a self, + cx: &'a App, + ) -> impl 'a + DoubleEndedIterator> { + self.worktrees() + .filter(|worktree| worktree.read(cx).is_visible() || worktree.read(cx).is_single_file()) + } + pub fn worktree_for_id(&self, id: WorktreeId, cx: &App) -> Option> { self.worktrees() .find(|worktree| worktree.read(cx).id() == id) @@ -542,7 +551,7 @@ impl WorktreeStore { let worktree = cx.update(|cx| { Worktree::remote( REMOTE_SERVER_PROJECT_ID, - 0, + ReplicaId::REMOTE_SERVER, proto::WorktreeMetadata { id: response.worktree_id, root_name, @@ -1194,6 +1203,16 @@ impl WorktreeStore { RelPath::from_proto(&envelope.payload.new_path)?, ); let (scan_id, entry) = this.update(&mut cx, |this, cx| { + let Some((_, project_id)) = this.downstream_client else { + bail!("no downstream client") + }; + let Some(entry) = this.entry_for_id(entry_id, cx) else { + bail!("no such entry"); + }; + if entry.is_private && project_id != REMOTE_SERVER_PROJECT_ID { + bail!("entry is private") + } + let new_worktree = this .worktree_for_id(new_worktree_id, cx) .context("no such worktree")?; @@ -1217,6 +1236,15 @@ impl WorktreeStore { ) -> Result { let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id); let worktree = this.update(&mut cx, |this, cx| { + let Some((_, project_id)) = this.downstream_client else { + bail!("no downstream client") + }; + let Some(entry) = this.entry_for_id(entry_id, cx) else { + bail!("no entry") + }; + if entry.is_private && project_id != REMOTE_SERVER_PROJECT_ID { + bail!("entry is private") + } this.worktree_for_entry(entry_id, cx) .context("worktree not found") })??; @@ -1237,6 +1265,18 @@ impl WorktreeStore { let worktree = this .worktree_for_entry(entry_id, cx) .context("no such worktree")?; + + let Some((_, project_id)) = this.downstream_client else { + bail!("no downstream client") + }; + let entry = worktree + .read(cx) + .entry_for_id(entry_id) + .ok_or_else(|| anyhow!("missing entry"))?; + if entry.is_private && project_id != REMOTE_SERVER_PROJECT_ID { + bail!("entry is private") + } + let scan_id = worktree.read(cx).scan_id(); anyhow::Ok(( scan_id, diff --git a/crates/project_panel/Cargo.toml b/crates/project_panel/Cargo.toml index 1597b60704daab06ef881b60dea8801c5d704af6..a1238990db8617977494d151b1ab9e46a17d715f 100644 --- a/crates/project_panel/Cargo.toml +++ b/crates/project_panel/Cargo.toml @@ -45,7 +45,6 @@ workspace.workspace = true language.workspace = true zed_actions.workspace = true telemetry.workspace = true -workspace-hack.workspace = true [dev-dependencies] client = { workspace = true, features = ["test-support"] } diff --git a/crates/project_panel/benches/sorting.rs b/crates/project_panel/benches/sorting.rs index 448ec51270dcf5f960c479173d270fd02b5cf98b..73d92ccd4913a008020a1480422c020117a723ca 100644 --- a/crates/project_panel/benches/sorting.rs +++ b/crates/project_panel/benches/sorting.rs @@ -29,6 +29,7 @@ fn load_linux_repo_snapshot() -> Vec { is_always_included: false, is_external: false, is_private: false, + is_hidden: false, char_bag: Default::default(), is_fifo: false, }; diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index c50b491a102ef2bc1ff65a4bafebab62e17a996f..8794b625e2b63384041264d67b7d8bf729707735 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -64,7 +64,7 @@ use workspace::{ DraggedSelection, OpenInTerminal, OpenOptions, OpenVisible, PreviewTabsSettings, SelectedEntry, SplitDirection, Workspace, dock::{DockPosition, Panel, PanelEvent}, - notifications::{DetachAndPromptErr, NotifyTaskExt}, + notifications::{DetachAndPromptErr, NotifyResultExt, NotifyTaskExt}, }; use worktree::CreatedEntry; use zed_actions::workspace::OpenWithSystem; @@ -491,17 +491,17 @@ impl ProjectPanel { let project_panel = cx.new(|cx| { let focus_handle = cx.focus_handle(); cx.on_focus(&focus_handle, window, Self::focus_in).detach(); - cx.on_focus_out(&focus_handle, window, |this, _, window, cx| { - this.focus_out(window, cx); - }) - .detach(); cx.subscribe_in( &git_store, window, |this, _, event, window, cx| match event { - GitStoreEvent::RepositoryUpdated(_, RepositoryEvent::Updated { .. }, _) - | GitStoreEvent::RepositoryAdded(_) + GitStoreEvent::RepositoryUpdated( + _, + RepositoryEvent::StatusesChanged { full_scan: _ }, + _, + ) + | GitStoreEvent::RepositoryAdded | GitStoreEvent::RepositoryRemoved(_) => { this.update_visible_entries(None, false, false, window, cx); cx.notify(); @@ -615,8 +615,11 @@ impl ProjectPanel { .detach(); let trash_action = [TypeId::of::()]; - let is_remote = project.read(cx).is_via_collab(); + let is_remote = project.read(cx).is_remote(); + // Make sure the trash option is never displayed anywhere on remote + // hosts since they may not support trashing. May want to dynamically + // detect this in the future. if is_remote { CommandPaletteFilter::update_global(cx, |filter, _cx| { filter.hide_action_types(&trash_action); @@ -643,7 +646,7 @@ impl ProjectPanel { .as_ref() .is_some_and(|state| state.processing_filename.is_none()) { - match project_panel.confirm_edit(window, cx) { + match project_panel.confirm_edit(false, window, cx) { Some(task) => { task.detach_and_notify_err(window, cx); } @@ -676,6 +679,9 @@ impl ProjectPanel { if project_panel_settings.hide_root != new_settings.hide_root { this.update_visible_entries(None, false, false, window, cx); } + if project_panel_settings.hide_hidden != new_settings.hide_hidden { + this.update_visible_entries(None, false, false, window, cx); + } if project_panel_settings.sticky_scroll && !new_settings.sticky_scroll { this.sticky_items_count = 0; } @@ -944,12 +950,6 @@ impl ProjectPanel { } } - fn focus_out(&mut self, window: &mut Window, cx: &mut Context) { - if !self.focus_handle.is_focused(window) { - self.confirm(&Confirm, window, cx); - } - } - fn deploy_context_menu( &mut self, position: Point, @@ -978,7 +978,7 @@ impl ProjectPanel { let is_foldable = auto_fold_dirs && self.is_foldable(entry, worktree); let is_unfoldable = auto_fold_dirs && self.is_unfoldable(entry, worktree); let is_read_only = project.is_read_only(cx); - let is_remote = project.is_via_collab(); + let is_remote = project.is_remote(); let is_local = project.is_local(); let settings = ProjectPanelSettings::get_global(cx); @@ -1042,13 +1042,13 @@ impl ProjectPanel { .when(!should_hide_rename, |menu| { menu.action("Rename", Box::new(Rename)) }) - .when(!is_root & !is_remote, |menu| { + .when(!is_root && !is_remote, |menu| { menu.action("Trash", Box::new(Trash { skip_prompt: false })) }) .when(!is_root, |menu| { menu.action("Delete", Box::new(Delete { skip_prompt: false })) }) - .when(!is_remote & is_root, |menu| { + .when(!is_remote && is_root, |menu| { menu.separator() .action( "Add Folder to Project…", @@ -1418,7 +1418,7 @@ impl ProjectPanel { } fn confirm(&mut self, _: &Confirm, window: &mut Window, cx: &mut Context) { - if let Some(task) = self.confirm_edit(window, cx) { + if let Some(task) = self.confirm_edit(true, window, cx) { task.detach_and_notify_err(window, cx); } } @@ -1550,6 +1550,7 @@ impl ProjectPanel { fn confirm_edit( &mut self, + refocus: bool, window: &mut Window, cx: &mut Context, ) -> Option>> { @@ -1603,7 +1604,7 @@ impl ProjectPanel { filename.clone() }; if let Some(existing) = worktree.read(cx).entry_for_path(&new_path) { - if existing.id == entry.id { + if existing.id == entry.id && refocus { window.focus(&self.focus_handle); } return None; @@ -1614,7 +1615,9 @@ impl ProjectPanel { }); }; - window.focus(&self.focus_handle); + if refocus { + window.focus(&self.focus_handle); + } edit_state.processing_filename = Some(filename); cx.notify(); @@ -2674,12 +2677,14 @@ impl ProjectPanel { for task in paste_tasks { match task { PasteTask::Rename(task) => { - if let Some(CreatedEntry::Included(entry)) = task.await.log_err() { + if let Some(CreatedEntry::Included(entry)) = + task.await.notify_async_err(cx) + { last_succeed = Some(entry); } } PasteTask::Copy(task) => { - if let Some(Some(entry)) = task.await.log_err() { + if let Some(Some(entry)) = task.await.notify_async_err(cx) { last_succeed = Some(entry); } } @@ -2695,8 +2700,10 @@ impl ProjectPanel { }); if item_count == 1 { - // open entry if not dir, and only focus if rename is not pending - if !entry.is_dir() { + // open entry if not dir, setting is enabled, and only focus if rename is not pending + if !entry.is_dir() + && ProjectPanelSettings::get_global(cx).open_file_on_paste + { project_panel.open_entry( entry.id, disambiguation_range.is_none(), @@ -3172,6 +3179,7 @@ impl ProjectPanel { mtime: parent_entry.mtime, size: parent_entry.size, is_ignored: parent_entry.is_ignored, + is_hidden: parent_entry.is_hidden, is_external: false, is_private: false, is_always_included: parent_entry.is_always_included, @@ -3212,6 +3220,7 @@ impl ProjectPanel { .map(|worktree| worktree.read(cx).snapshot()) .collect(); let hide_root = settings.hide_root && visible_worktrees.len() == 1; + let hide_hidden = settings.hide_hidden; self.update_visible_entries_task = cx.spawn_in(window, async move |this, cx| { let new_state = cx .background_spawn(async move { @@ -3303,7 +3312,9 @@ impl ProjectPanel { } } auto_folded_ancestors.clear(); - if !hide_gitignore || !entry.is_ignored { + if (!hide_gitignore || !entry.is_ignored) + && (!hide_hidden || !entry.is_hidden) + { visible_worktree_entries.push(entry.to_owned()); } let precedes_new_entry = if let Some(new_entry_id) = new_entry_parent_id @@ -3316,7 +3327,10 @@ impl ProjectPanel { } else { false }; - if precedes_new_entry && (!hide_gitignore || !entry.is_ignored) { + if precedes_new_entry + && (!hide_gitignore || !entry.is_ignored) + && (!hide_hidden || !entry.is_hidden) + { visible_worktree_entries.push(Self::create_new_git_entry( entry.entry, entry.git_summary, @@ -4668,12 +4682,11 @@ impl ProjectPanel { div() .id("symlink_icon") .pr_3() - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { Tooltip::with_meta( path.to_string(), None, "Symbolic Link", - window, cx, ) }) @@ -5444,33 +5457,13 @@ impl Render for ProjectPanel { .on_action(cx.listener(Self::new_directory)) .on_action(cx.listener(Self::rename)) .on_action(cx.listener(Self::delete)) - .on_action(cx.listener(Self::trash)) .on_action(cx.listener(Self::cut)) .on_action(cx.listener(Self::copy)) .on_action(cx.listener(Self::paste)) .on_action(cx.listener(Self::duplicate)) - .on_click(cx.listener(|this, event: &gpui::ClickEvent, window, cx| { - if event.click_count() > 1 - && let Some(entry_id) = this.state.last_worktree_root_id - { - let project = this.project.read(cx); - - let worktree_id = if let Some(worktree) = - project.worktree_for_entry(entry_id, cx) - { - worktree.read(cx).id() - } else { - return; - }; - - this.state.selection = Some(SelectedEntry { - worktree_id, - entry_id, - }); - - this.new_file(&NewFile, window, cx); - } - })) + .when(!project.is_remote(), |el| { + el.on_action(cx.listener(Self::trash)) + }) }) .when(project.is_local(), |el| { el.on_action(cx.listener(Self::reveal_in_finder)) @@ -5805,7 +5798,34 @@ impl Render for ProjectPanel { ); } }), - ), + ) + .when(!project.is_read_only(cx), |el| { + el.on_click(cx.listener( + |this, event: &gpui::ClickEvent, window, cx| { + if event.click_count() > 1 + && let Some(entry_id) = + this.state.last_worktree_root_id + { + let project = this.project.read(cx); + + let worktree_id = if let Some(worktree) = + project.worktree_for_entry(entry_id, cx) + { + worktree.read(cx).id() + } else { + return; + }; + + this.state.selection = Some(SelectedEntry { + worktree_id, + entry_id, + }); + + this.new_file(&NewFile, window, cx); + } + }, + )) + }), ) .size_full(), ) @@ -5846,7 +5866,6 @@ impl Render for ProjectPanel { .key_binding(KeyBinding::for_action_in( &workspace::Open, &focus_handle, - window, cx, )) .on_click(cx.listener(|this, _, window, cx| { @@ -5997,6 +6016,10 @@ impl Panel for ProjectPanel { "Project Panel" } + fn panel_key() -> &'static str { + PROJECT_PANEL_KEY + } + fn starts_open(&self, _: &Window, cx: &App) -> bool { if !ProjectPanelSettings::get_global(cx).starts_open { return false; diff --git a/crates/project_panel/src/project_panel_settings.rs b/crates/project_panel/src/project_panel_settings.rs index c8bd287c33c9ddf131369897d0897e9edf5311c3..632537fc0213f3702755144c045e58fcb737ed30 100644 --- a/crates/project_panel/src/project_panel_settings.rs +++ b/crates/project_panel/src/project_panel_settings.rs @@ -2,10 +2,7 @@ use editor::EditorSettings; use gpui::Pixels; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{ - DockSide, ProjectPanelEntrySpacing, Settings, SettingsContent, ShowDiagnostics, - ShowIndentGuides, -}; +use settings::{DockSide, ProjectPanelEntrySpacing, Settings, ShowDiagnostics, ShowIndentGuides}; use ui::{ px, scrollbars::{ScrollbarVisibility, ShowScrollbar}, @@ -30,7 +27,9 @@ pub struct ProjectPanelSettings { pub scrollbar: ScrollbarSettings, pub show_diagnostics: ShowDiagnostics, pub hide_root: bool, + pub hide_hidden: bool, pub drag_and_drop: bool, + pub open_file_on_paste: bool, } #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] @@ -79,42 +78,9 @@ impl Settings for ProjectPanelSettings { }, show_diagnostics: project_panel.show_diagnostics.unwrap(), hide_root: project_panel.hide_root.unwrap(), + hide_hidden: project_panel.hide_hidden.unwrap(), drag_and_drop: project_panel.drag_and_drop.unwrap(), - } - } - - fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut SettingsContent) { - if let Some(hide_gitignore) = vscode.read_bool("explorer.excludeGitIgnore") { - current.project_panel.get_or_insert_default().hide_gitignore = Some(hide_gitignore); - } - if let Some(auto_reveal) = vscode.read_bool("explorer.autoReveal") { - current - .project_panel - .get_or_insert_default() - .auto_reveal_entries = Some(auto_reveal); - } - if let Some(compact_folders) = vscode.read_bool("explorer.compactFolders") { - current.project_panel.get_or_insert_default().auto_fold_dirs = Some(compact_folders); - } - - if Some(false) == vscode.read_bool("git.decorations.enabled") { - current.project_panel.get_or_insert_default().git_status = Some(false); - } - if Some(false) == vscode.read_bool("problems.decorations.enabled") { - current - .project_panel - .get_or_insert_default() - .show_diagnostics = Some(ShowDiagnostics::Off); - } - if let (Some(false), Some(false)) = ( - vscode.read_bool("explorer.decorations.badges"), - vscode.read_bool("explorer.decorations.colors"), - ) { - current.project_panel.get_or_insert_default().git_status = Some(false); - current - .project_panel - .get_or_insert_default() - .show_diagnostics = Some(ShowDiagnostics::Off); + open_file_on_paste: project_panel.open_file_on_paste.unwrap(), } } } diff --git a/crates/project_panel/src/project_panel_tests.rs b/crates/project_panel/src/project_panel_tests.rs index 04f52d3ab14bf280cc1a18e6d39c8f92e3cfbf3c..b6cd1da132ad5c1633001bd53fe365f24870cd7c 100644 --- a/crates/project_panel/src/project_panel_tests.rs +++ b/crates/project_panel/src/project_panel_tests.rs @@ -556,7 +556,7 @@ async fn test_editing_files(cx: &mut gpui::TestAppContext) { panel.filename_editor.update(cx, |editor, cx| { editor.set_text("the-new-filename", window, cx) }); - panel.confirm_edit(window, cx).unwrap() + panel.confirm_edit(true, window, cx).unwrap() }); assert_eq!( visible_entries_as_strings(&panel, 0..10, cx), @@ -616,7 +616,7 @@ async fn test_editing_files(cx: &mut gpui::TestAppContext) { panel.filename_editor.update(cx, |editor, cx| { editor.set_text("another-filename.txt", window, cx) }); - panel.confirm_edit(window, cx).unwrap() + panel.confirm_edit(true, window, cx).unwrap() }) .await .unwrap(); @@ -657,7 +657,7 @@ async fn test_editing_files(cx: &mut gpui::TestAppContext) { let confirm = panel.update_in(cx, |panel, window, cx| { panel.filename_editor.update(cx, |editor, cx| { - let file_name_selections = editor.selections.all::(cx); + let file_name_selections = editor.selections.all::(&editor.display_snapshot(cx)); assert_eq!( file_name_selections.len(), 1, @@ -676,7 +676,7 @@ async fn test_editing_files(cx: &mut gpui::TestAppContext) { editor.set_text("a-different-filename.tar.gz", window, cx) }); - panel.confirm_edit(window, cx).unwrap() + panel.confirm_edit(true, window, cx).unwrap() }); assert_eq!( visible_entries_as_strings(&panel, 0..10, cx), @@ -731,7 +731,7 @@ async fn test_editing_files(cx: &mut gpui::TestAppContext) { panel.update_in(cx, |panel, window, cx| { panel.filename_editor.update(cx, |editor, cx| { - let file_name_selections = editor.selections.all::(cx); + let file_name_selections = editor.selections.all::(&editor.display_snapshot(cx)); assert_eq!(file_name_selections.len(), 1, "File editing should have a single selection, but got: {file_name_selections:?}"); let file_name_selection = &file_name_selections[0]; assert_eq!(file_name_selection.start, 0, "Should select the file name from the start"); @@ -765,7 +765,7 @@ async fn test_editing_files(cx: &mut gpui::TestAppContext) { panel .filename_editor .update(cx, |editor, cx| editor.set_text("new-dir", window, cx)); - panel.confirm_edit(window, cx).unwrap() + panel.confirm_edit(true, window, cx).unwrap() }); panel.update_in(cx, |panel, window, cx| { panel.select_next(&Default::default(), window, cx) @@ -863,11 +863,11 @@ async fn test_editing_files(cx: &mut gpui::TestAppContext) { panel.filename_editor.update(cx, |editor, cx| { editor.set_text("", window, cx); }); - assert!(panel.confirm_edit(window, cx).is_none()); + assert!(panel.confirm_edit(true, window, cx).is_none()); panel.filename_editor.update(cx, |editor, cx| { editor.set_text(" ", window, cx); }); - assert!(panel.confirm_edit(window, cx).is_none()); + assert!(panel.confirm_edit(true, window, cx).is_none()); panel.cancel(&menu::Cancel, window, cx); panel.update_visible_entries(None, false, false, window, cx); }); @@ -986,7 +986,7 @@ async fn test_adding_directories_via_file(cx: &mut gpui::TestAppContext) { panel.filename_editor.update(cx, |editor, cx| { editor.set_text("/bdir1/dir2/the-new-filename", window, cx) }); - panel.confirm_edit(window, cx).unwrap() + panel.confirm_edit(true, window, cx).unwrap() }); assert_eq!( @@ -1082,7 +1082,7 @@ async fn test_adding_directory_via_file(cx: &mut gpui::TestAppContext) { panel .filename_editor .update(cx, |editor, cx| editor.set_text("new_dir/", window, cx)); - panel.confirm_edit(window, cx).unwrap() + panel.confirm_edit(true, window, cx).unwrap() }); assert_eq!( @@ -1115,7 +1115,7 @@ async fn test_adding_directory_via_file(cx: &mut gpui::TestAppContext) { panel .filename_editor .update(cx, |editor, cx| editor.set_text("new dir 2/", window, cx)); - panel.confirm_edit(window, cx).unwrap() + panel.confirm_edit(true, window, cx).unwrap() }); confirm.await.unwrap(); cx.run_until_parked(); @@ -1140,7 +1140,7 @@ async fn test_adding_directory_via_file(cx: &mut gpui::TestAppContext) { panel .filename_editor .update(cx, |editor, cx| editor.set_text("new_dir_3\\", window, cx)); - panel.confirm_edit(window, cx).unwrap() + panel.confirm_edit(true, window, cx).unwrap() }); confirm.await.unwrap(); cx.run_until_parked(); @@ -1214,7 +1214,7 @@ async fn test_copy_paste(cx: &mut gpui::TestAppContext) { panel.update_in(cx, |panel, window, cx| { panel.filename_editor.update(cx, |editor, cx| { - let file_name_selections = editor.selections.all::(cx); + let file_name_selections = editor.selections.all::(&editor.display_snapshot(cx)); assert_eq!( file_name_selections.len(), 1, @@ -1232,7 +1232,7 @@ async fn test_copy_paste(cx: &mut gpui::TestAppContext) { "Should select the file name disambiguation until the extension" ); }); - assert!(panel.confirm_edit(window, cx).is_none()); + assert!(panel.confirm_edit(true, window, cx).is_none()); }); panel.update_in(cx, |panel, window, cx| { @@ -1253,7 +1253,7 @@ async fn test_copy_paste(cx: &mut gpui::TestAppContext) { ); panel.update_in(cx, |panel, window, cx| { - assert!(panel.confirm_edit(window, cx).is_none()) + assert!(panel.confirm_edit(true, window, cx).is_none()) }); } @@ -1672,7 +1672,7 @@ async fn test_copy_paste_directory(cx: &mut gpui::TestAppContext) { panel .filename_editor .update(cx, |editor, cx| editor.set_text("c", window, cx)); - panel.confirm_edit(window, cx).unwrap() + panel.confirm_edit(true, window, cx).unwrap() }); assert_eq!( visible_entries_as_strings(&panel, 0..50, cx), @@ -2060,7 +2060,7 @@ async fn test_create_duplicate_items(cx: &mut gpui::TestAppContext) { .filename_editor .update(cx, |editor, cx| editor.set_text("test", window, cx)); assert!( - panel.confirm_edit(window, cx).is_none(), + panel.confirm_edit(true, window, cx).is_none(), "Should not allow to confirm on conflicting new directory name" ); }); @@ -2116,7 +2116,7 @@ async fn test_create_duplicate_items(cx: &mut gpui::TestAppContext) { .filename_editor .update(cx, |editor, cx| editor.set_text("first.rs", window, cx)); assert!( - panel.confirm_edit(window, cx).is_none(), + panel.confirm_edit(true, window, cx).is_none(), "Should not allow to confirm on conflicting new file name" ); }); @@ -2174,7 +2174,7 @@ async fn test_create_duplicate_items(cx: &mut gpui::TestAppContext) { .filename_editor .update(cx, |editor, cx| editor.set_text("second.rs", window, cx)); assert!( - panel.confirm_edit(window, cx).is_none(), + panel.confirm_edit(true, window, cx).is_none(), "Should not allow to confirm on conflicting file rename" ) }); @@ -2983,6 +2983,12 @@ async fn test_new_file_move(cx: &mut gpui::TestAppContext) { ); } +// NOTE: This test is skipped on Windows, because on Windows, unlike on Unix, +// you can't rename a directory which some program has already open. This is a +// limitation of the Windows. Since Zed will have the root open, it will hold an open handle +// to it, and thus renaming it will fail on Windows. +// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder +// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information #[gpui::test] #[cfg_attr(target_os = "windows", ignore)] async fn test_rename_root_of_worktree(cx: &mut gpui::TestAppContext) { @@ -3035,7 +3041,7 @@ async fn test_rename_root_of_worktree(cx: &mut gpui::TestAppContext) { panel .filename_editor .update(cx, |editor, cx| editor.set_text("new_root1", window, cx)); - panel.confirm_edit(window, cx).unwrap() + panel.confirm_edit(true, window, cx).unwrap() }); confirm.await.unwrap(); cx.run_until_parked(); @@ -4167,7 +4173,7 @@ async fn test_creating_excluded_entries(cx: &mut gpui::TestAppContext) { panel.filename_editor.update(cx, |editor, cx| { editor.set_text(excluded_file_path, window, cx) }); - panel.confirm_edit(window, cx).unwrap() + panel.confirm_edit(true, window, cx).unwrap() }) .await .unwrap(); @@ -4223,7 +4229,7 @@ async fn test_creating_excluded_entries(cx: &mut gpui::TestAppContext) { panel.filename_editor.update(cx, |editor, cx| { editor.set_text(excluded_file_path, window, cx) }); - panel.confirm_edit(window, cx).unwrap() + panel.confirm_edit(true, window, cx).unwrap() }) .await .unwrap(); @@ -4267,7 +4273,7 @@ async fn test_creating_excluded_entries(cx: &mut gpui::TestAppContext) { panel.filename_editor.update(cx, |editor, cx| { editor.set_text(excluded_dir_path, window, cx) }); - panel.confirm_edit(window, cx).unwrap() + panel.confirm_edit(true, window, cx).unwrap() }) .await .unwrap(); @@ -5688,7 +5694,7 @@ async fn test_create_entries_without_selection(cx: &mut gpui::TestAppContext) { panel.filename_editor.update(cx, |editor, cx| { editor.set_text("hello_from_no_selections", window, cx) }); - panel.confirm_edit(window, cx).unwrap() + panel.confirm_edit(true, window, cx).unwrap() }) .await .unwrap(); @@ -5786,7 +5792,7 @@ async fn test_create_entries_without_selection_hide_root(cx: &mut gpui::TestAppC panel.filename_editor.update(cx, |editor, cx| { editor.set_text("new_file_at_root.txt", window, cx) }); - panel.confirm_edit(window, cx).unwrap() + panel.confirm_edit(true, window, cx).unwrap() }); confirm.await.unwrap(); cx.run_until_parked(); @@ -5837,7 +5843,7 @@ async fn test_create_entries_without_selection_hide_root(cx: &mut gpui::TestAppC panel.filename_editor.update(cx, |editor, cx| { editor.set_text("new_dir_at_root", window, cx) }); - panel.confirm_edit(window, cx).unwrap() + panel.confirm_edit(true, window, cx).unwrap() }); confirm.await.unwrap(); cx.run_until_parked(); @@ -6678,6 +6684,142 @@ async fn test_compare_files_context_menu(cx: &mut gpui::TestAppContext) { } } +#[gpui::test] +async fn test_hide_hidden_entries(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + ".hidden-file.txt": "hidden file content", + "visible-file.txt": "visible file content", + ".hidden-parent-dir": { + "nested-dir": { + "file.txt": "file content", + } + }, + "visible-dir": { + "file-in-visible.txt": "file content", + "nested": { + ".hidden-nested-dir": { + ".double-hidden-dir": { + "deep-file-1.txt": "deep content 1", + "deep-file-2.txt": "deep content 2" + }, + "hidden-nested-file-1.txt": "hidden nested 1", + "hidden-nested-file-2.txt": "hidden nested 2" + }, + "visible-nested-file.txt": "visible nested content" + } + } + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let cx = &mut VisualTestContext::from_window(*workspace, cx); + + cx.update(|_, cx| { + let settings = *ProjectPanelSettings::get_global(cx); + ProjectPanelSettings::override_global( + ProjectPanelSettings { + hide_hidden: false, + ..settings + }, + cx, + ); + }); + + let panel = workspace.update(cx, ProjectPanel::new).unwrap(); + cx.run_until_parked(); + + toggle_expand_dir(&panel, "root/.hidden-parent-dir", cx); + toggle_expand_dir(&panel, "root/.hidden-parent-dir/nested-dir", cx); + toggle_expand_dir(&panel, "root/visible-dir", cx); + toggle_expand_dir(&panel, "root/visible-dir/nested", cx); + toggle_expand_dir(&panel, "root/visible-dir/nested/.hidden-nested-dir", cx); + toggle_expand_dir( + &panel, + "root/visible-dir/nested/.hidden-nested-dir/.double-hidden-dir", + cx, + ); + + let expanded = [ + "v root", + " v .hidden-parent-dir", + " v nested-dir", + " file.txt", + " v visible-dir", + " v nested", + " v .hidden-nested-dir", + " v .double-hidden-dir <== selected", + " deep-file-1.txt", + " deep-file-2.txt", + " hidden-nested-file-1.txt", + " hidden-nested-file-2.txt", + " visible-nested-file.txt", + " file-in-visible.txt", + " .hidden-file.txt", + " visible-file.txt", + ]; + + assert_eq!( + visible_entries_as_strings(&panel, 0..30, cx), + &expanded, + "With hide_hidden=false, contents of hidden nested directory should be visible" + ); + + cx.update(|_, cx| { + let settings = *ProjectPanelSettings::get_global(cx); + ProjectPanelSettings::override_global( + ProjectPanelSettings { + hide_hidden: true, + ..settings + }, + cx, + ); + }); + + panel.update_in(cx, |panel, window, cx| { + panel.update_visible_entries(None, false, false, window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&panel, 0..30, cx), + &[ + "v root", + " v visible-dir", + " v nested", + " visible-nested-file.txt", + " file-in-visible.txt", + " visible-file.txt", + ], + "With hide_hidden=false, contents of hidden nested directory should be visible" + ); + + panel.update_in(cx, |panel, window, cx| { + let settings = *ProjectPanelSettings::get_global(cx); + ProjectPanelSettings::override_global( + ProjectPanelSettings { + hide_hidden: false, + ..settings + }, + cx, + ); + panel.update_visible_entries(None, false, false, window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&panel, 0..30, cx), + &expanded, + "With hide_hidden=false, deeply nested hidden directories and their contents should be visible" + ); +} + fn select_path(panel: &Entity, path: &str, cx: &mut VisualTestContext) { let path = rel_path(path); panel.update_in(cx, |panel, window, cx| { diff --git a/crates/project_symbols/Cargo.toml b/crates/project_symbols/Cargo.toml index 8033751c683abe3ea132ee655b8d8d5f23b1887c..034e95de8245c59624e7a5e6be3665eb869d8563 100644 --- a/crates/project_symbols/Cargo.toml +++ b/crates/project_symbols/Cargo.toml @@ -25,7 +25,6 @@ settings.workspace = true theme.workspace = true util.workspace = true workspace.workspace = true -workspace-hack.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } diff --git a/crates/prompt_store/Cargo.toml b/crates/prompt_store/Cargo.toml index d7493781383a43943d0a1ed335f784d7286e8f97..1e9552f1978857f04920406eadb3a64df0c51d88 100644 --- a/crates/prompt_store/Cargo.toml +++ b/crates/prompt_store/Cargo.toml @@ -32,4 +32,3 @@ serde_json.workspace = true text.workspace = true util.workspace = true uuid.workspace = true -workspace-hack.workspace = true diff --git a/crates/prompt_store/src/prompts.rs b/crates/prompt_store/src/prompts.rs index 8790e8039957632dcc9508839ada1f6ac026e174..e6a9144a23a7bb31c18a119fc197709aebf935f4 100644 --- a/crates/prompt_store/src/prompts.rs +++ b/crates/prompt_store/src/prompts.rs @@ -45,7 +45,8 @@ impl ProjectContext { user_rules: default_user_rules, os: std::env::consts::OS.to_string(), arch: std::env::consts::ARCH.to_string(), - shell: ShellKind::new(&get_default_system_shell_preferring_bash()).to_string(), + shell: ShellKind::new(&get_default_system_shell_preferring_bash(), cfg!(windows)) + .to_string(), } } } diff --git a/crates/proto/Cargo.toml b/crates/proto/Cargo.toml index 6cae4394bdb9fa13ab31b42f7ee4031ef5449d4b..5b5b8b985cbc102cc451050403cff2e3699f612f 100644 --- a/crates/proto/Cargo.toml +++ b/crates/proto/Cargo.toml @@ -20,7 +20,6 @@ doctest = false anyhow.workspace = true prost.workspace = true serde.workspace = true -workspace-hack.workspace = true [build-dependencies] prost-build.workspace = true diff --git a/crates/proto/proto/git.proto b/crates/proto/proto/git.proto index 7004b0c9a0b4aff54434fac6b1f6ecc9be773ed4..34b57d610be5703f581d363a238eb28e3533606f 100644 --- a/crates/proto/proto/git.proto +++ b/crates/proto/proto/git.proto @@ -472,3 +472,37 @@ message GetDefaultBranch { message GetDefaultBranchResponse { optional string branch = 1; } + +message GetTreeDiff { + uint64 project_id = 1; + uint64 repository_id = 2; + bool is_merge = 3; + string base = 4; + string head = 5; +} + +message GetTreeDiffResponse { + repeated TreeDiffStatus entries = 1; +} + +message TreeDiffStatus { + enum Status { + ADDED = 0; + MODIFIED = 1; + DELETED = 2; + } + + Status status = 1; + string path = 2; + optional string oid = 3; +} + +message GetBlobContent { + uint64 project_id = 1; + uint64 repository_id = 2; + string oid =3; +} + +message GetBlobContentResponse { + string content = 1; +} diff --git a/crates/proto/proto/lsp.proto b/crates/proto/proto/lsp.proto index d50c1924cdf237d603b78062b3335354a6d6127f..7e446a915febbc03f2dd5920faf12a58a5d9b639 100644 --- a/crates/proto/proto/lsp.proto +++ b/crates/proto/proto/lsp.proto @@ -465,6 +465,7 @@ message ResolveInlayHintResponse { message RefreshInlayHints { uint64 project_id = 1; + uint64 server_id = 2; } message CodeLens { @@ -781,6 +782,7 @@ message TextEdit { message LspQuery { uint64 project_id = 1; uint64 lsp_request_id = 2; + optional uint64 server_id = 15; oneof request { GetReferences get_references = 3; GetDocumentColor get_document_color = 4; @@ -793,6 +795,7 @@ message LspQuery { GetDeclaration get_declaration = 11; GetTypeDefinition get_type_definition = 12; GetImplementation get_implementation = 13; + InlayHints inlay_hints = 14; } } @@ -815,6 +818,7 @@ message LspResponse { GetTypeDefinitionResponse get_type_definition_response = 10; GetImplementationResponse get_implementation_response = 11; GetReferencesResponse get_references_response = 12; + InlayHintsResponse inlay_hints_response = 13; } uint64 server_id = 7; } diff --git a/crates/proto/proto/task.proto b/crates/proto/proto/task.proto index 8fc3a6d18e1398d8647ba3daaa419829177e55f8..1844087d623cc3eac0e5d7500a50dfb31028f304 100644 --- a/crates/proto/proto/task.proto +++ b/crates/proto/proto/task.proto @@ -48,3 +48,13 @@ message SpawnInTerminal { map env = 4; optional string cwd = 5; } + +message GetDirectoryEnvironment { + uint64 project_id = 1; + Shell shell = 2; + string directory = 3; +} + +message DirectoryEnvironment { + map environment = 1; +} diff --git a/crates/proto/proto/worktree.proto b/crates/proto/proto/worktree.proto index 4f5451f23440929767455f4d1ecfc345422c200b..9ab9e95438d220834351308ea83ffe9a18dec999 100644 --- a/crates/proto/proto/worktree.proto +++ b/crates/proto/proto/worktree.proto @@ -27,6 +27,7 @@ message Entry { bool is_fifo = 10; optional uint64 size = 11; optional string canonical_path = 12; + bool is_hidden = 13; } message AddWorktree { diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 2c8380661a31f37687ac932771df15e15362c428..44450f83d1e6554e80fac951f5cb5e28dd830de1 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -418,7 +418,16 @@ message Envelope { GitRenameBranch git_rename_branch = 380; - RemoteStarted remote_started = 381; // current max + RemoteStarted remote_started = 381; + + GetDirectoryEnvironment get_directory_environment = 382; + DirectoryEnvironment directory_environment = 383; + + GetTreeDiff get_tree_diff = 384; + GetTreeDiffResponse get_tree_diff_response = 385; + + GetBlobContent get_blob_content = 386; + GetBlobContentResponse get_blob_content_response = 387; // current max } reserved 87 to 88; diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 2217cabbba96271bec7306bb48e950aae04ee167..af9bf99a721b4450b746043fbb7411dfa182d1fa 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -316,9 +316,15 @@ messages!( (PullWorkspaceDiagnostics, Background), (GetDefaultBranch, Background), (GetDefaultBranchResponse, Background), + (GetTreeDiff, Background), + (GetTreeDiffResponse, Background), + (GetBlobContent, Background), + (GetBlobContentResponse, Background), (GitClone, Background), (GitCloneResponse, Background), (ToggleLspLogs, Background), + (GetDirectoryEnvironment, Background), + (DirectoryEnvironment, Background), (GetAgentServerCommand, Background), (AgentServerCommand, Background), (ExternalAgentsUpdated, Background), @@ -495,8 +501,11 @@ request_messages!( (GetDocumentDiagnostics, GetDocumentDiagnosticsResponse), (PullWorkspaceDiagnostics, Ack), (GetDefaultBranch, GetDefaultBranchResponse), + (GetBlobContent, GetBlobContentResponse), + (GetTreeDiff, GetTreeDiffResponse), (GitClone, GitCloneResponse), (ToggleLspLogs, Ack), + (GetDirectoryEnvironment, DirectoryEnvironment), (GetProcesses, GetProcessesResponse), (GetAgentServerCommand, AgentServerCommand), (RemoteStarted, Ack), @@ -514,6 +523,7 @@ lsp_messages!( (GetDeclaration, GetDeclarationResponse, true), (GetTypeDefinition, GetTypeDefinitionResponse, true), (GetImplementation, GetImplementationResponse, true), + (InlayHints, InlayHintsResponse, false), ); entity_messages!( @@ -634,6 +644,7 @@ entity_messages!( GitCheckoutFiles, SetIndexText, ToggleLspLogs, + GetDirectoryEnvironment, Push, Fetch, @@ -654,6 +665,8 @@ entity_messages!( GetDocumentDiagnostics, PullWorkspaceDiagnostics, GetDefaultBranch, + GetTreeDiff, + GetBlobContent, GitClone, GetAgentServerCommand, ExternalAgentsUpdated, @@ -843,6 +856,7 @@ impl LspQuery { Some(lsp_query::Request::GetImplementation(_)) => ("GetImplementation", false), Some(lsp_query::Request::GetReferences(_)) => ("GetReferences", false), Some(lsp_query::Request::GetDocumentColor(_)) => ("GetDocumentColor", false), + Some(lsp_query::Request::InlayHints(_)) => ("InlayHints", false), None => ("", true), } } diff --git a/crates/recent_projects/Cargo.toml b/crates/recent_projects/Cargo.toml index d81ac89e7cdacb96198313b2b8cdea86430dd0b2..51e55d94ea20e6c929c1424243b96b27c11ab1df 100644 --- a/crates/recent_projects/Cargo.toml +++ b/crates/recent_projects/Cargo.toml @@ -42,7 +42,6 @@ ui.workspace = true util.workspace = true workspace.workspace = true zed_actions.workspace = true -workspace-hack.workspace = true indoc.workspace = true [target.'cfg(target_os = "windows")'.dependencies] diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index b97966692f6377d9761b9b689df1471031e0a011..13013c9189749f77b8619ac19d59f96e5adb1e1d 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -311,8 +311,7 @@ impl PickerDelegate for RecentProjectsDelegate { .filter(|(_, (id, _, _))| !self.is_current_workspace(*id, cx)) .map(|(id, (_, _, paths))| { let combined_string = paths - .paths() - .iter() + .ordered_paths() .map(|path| path.compact().to_string_lossy().into_owned()) .collect::>() .join(""); @@ -462,8 +461,7 @@ impl PickerDelegate for RecentProjectsDelegate { let mut path_start_offset = 0; let (match_labels, paths): (Vec<_>, Vec<_>) = paths - .paths() - .iter() + .ordered_paths() .map(|p| p.compact()) .map(|path| { let highlighted_text = @@ -473,7 +471,15 @@ impl PickerDelegate for RecentProjectsDelegate { }) .unzip(); + let prefix = match &location { + SerializedWorkspaceLocation::Remote(RemoteConnectionOptions::Wsl(wsl)) => { + Some(SharedString::from(&wsl.distro_name)) + } + _ => None, + }; + let highlighted_match = HighlightedMatchWithPaths { + prefix, match_label: HighlightedMatch::join(match_labels.into_iter().flatten(), ", "), paths, }; @@ -541,11 +547,7 @@ impl PickerDelegate for RecentProjectsDelegate { ) } - fn render_footer( - &self, - window: &mut Window, - cx: &mut Context>, - ) -> Option { + fn render_footer(&self, _: &mut Window, cx: &mut Context>) -> Option { Some( h_flex() .w_full() @@ -561,7 +563,6 @@ impl PickerDelegate for RecentProjectsDelegate { from_existing_connection: false, create_new_window: false, }, - window, cx, )) .on_click(|_, window, cx| { @@ -577,7 +578,7 @@ impl PickerDelegate for RecentProjectsDelegate { ) .child( Button::new("local", "Open Local Folder") - .key_binding(KeyBinding::for_action(&workspace::Open, window, cx)) + .key_binding(KeyBinding::for_action(&workspace::Open, cx)) .on_click(|_, window, cx| { window.dispatch_action(workspace::Open.boxed_clone(), cx) }), diff --git a/crates/recent_projects/src/remote_connections.rs b/crates/recent_projects/src/remote_connections.rs index 4431c49a2d28ccfc5d799f3646cb9f46714183f1..8744bacf420b28ccb38c96dc949515e6e6ebadaf 100644 --- a/crates/recent_projects/src/remote_connections.rs +++ b/crates/recent_projects/src/remote_connections.rs @@ -1,4 +1,7 @@ -use std::{path::PathBuf, sync::Arc}; +use std::{ + path::{Path, PathBuf}, + sync::Arc, +}; use anyhow::{Context as _, Result}; use askpass::EncryptedPassword; @@ -12,11 +15,11 @@ use gpui::{ TextStyleRefinement, WeakEntity, }; -use language::CursorShape; +use language::{CursorShape, Point}; use markdown::{Markdown, MarkdownElement, MarkdownStyle}; use release_channel::ReleaseChannel; use remote::{ - ConnectionIdentifier, RemoteClient, RemoteConnectionOptions, RemotePlatform, + ConnectionIdentifier, RemoteClient, RemoteConnection, RemoteConnectionOptions, RemotePlatform, SshConnectionOptions, }; pub use settings::SshConnection; @@ -26,6 +29,7 @@ use ui::{ ActiveTheme, Color, CommonAnimationExt, Context, Icon, IconName, IconSize, InteractiveElement, IntoElement, Label, LabelCommon, Styled, Window, prelude::*, }; +use util::paths::PathWithPosition; use workspace::{AppState, ModalView, Workspace}; pub struct SshSettings { @@ -533,34 +537,6 @@ impl RemoteClientDelegate { } } -pub fn connect_over_ssh( - unique_identifier: ConnectionIdentifier, - connection_options: SshConnectionOptions, - ui: Entity, - window: &mut Window, - cx: &mut App, -) -> Task>>> { - let window = window.window_handle(); - let known_password = connection_options - .password - .as_deref() - .and_then(|pw| EncryptedPassword::try_from(pw).ok()); - let (tx, rx) = oneshot::channel(); - ui.update(cx, |ui, _cx| ui.set_cancellation_tx(tx)); - - remote::RemoteClient::ssh( - unique_identifier, - connection_options, - rx, - Arc::new(RemoteClientDelegate { - window, - ui: ui.downgrade(), - known_password, - }), - cx, - ) -} - pub fn connect( unique_identifier: ConnectionIdentifier, connection_options: RemoteConnectionOptions, @@ -579,17 +555,17 @@ pub fn connect( let (tx, rx) = oneshot::channel(); ui.update(cx, |ui, _cx| ui.set_cancellation_tx(tx)); - remote::RemoteClient::new( - unique_identifier, - connection_options, - rx, - Arc::new(RemoteClientDelegate { - window, - ui: ui.downgrade(), - known_password, - }), - cx, - ) + let delegate = Arc::new(RemoteClientDelegate { + window, + ui: ui.downgrade(), + known_password, + }); + + cx.spawn(async move |cx| { + let connection = remote::connect(connection_options, delegate.clone(), cx).await?; + cx.update(|cx| remote::RemoteClient::new(unique_identifier, connection, rx, delegate, cx))? + .await + }) } pub async fn open_remote_project( @@ -604,6 +580,7 @@ pub async fn open_remote_project( } else { let workspace_position = cx .update(|cx| { + // todo: These paths are wrong they may have column and line information workspace::remote_workspace_position_from_db(connection_options.clone(), &paths, cx) })? .await @@ -671,11 +648,16 @@ pub async fn open_remote_project( let Some(delegate) = delegate else { break }; - let did_open_project = cx + let remote_connection = + remote::connect(connection_options.clone(), delegate.clone(), cx).await?; + let (paths, paths_with_positions) = + determine_paths_with_positions(&remote_connection, paths.clone()).await; + + let opened_items = cx .update(|cx| { workspace::open_remote_project_with_new_connection( window, - connection_options.clone(), + remote_connection, cancel_rx, delegate.clone(), app_state.clone(), @@ -693,25 +675,51 @@ pub async fn open_remote_project( }) .ok(); - if let Err(e) = did_open_project { - log::error!("Failed to open project: {e:?}"); - let response = window - .update(cx, |_, window, cx| { - window.prompt( - PromptLevel::Critical, - match connection_options { - RemoteConnectionOptions::Ssh(_) => "Failed to connect over SSH", - RemoteConnectionOptions::Wsl(_) => "Failed to connect to WSL", - }, - Some(&e.to_string()), - &["Retry", "Ok"], - cx, - ) - })? - .await; - - if response == Ok(0) { - continue; + match opened_items { + Err(e) => { + log::error!("Failed to open project: {e:?}"); + let response = window + .update(cx, |_, window, cx| { + window.prompt( + PromptLevel::Critical, + match connection_options { + RemoteConnectionOptions::Ssh(_) => "Failed to connect over SSH", + RemoteConnectionOptions::Wsl(_) => "Failed to connect to WSL", + }, + Some(&e.to_string()), + &["Retry", "Ok"], + cx, + ) + })? + .await; + if response == Ok(0) { + continue; + } + } + Ok(items) => { + for (item, path) in items.into_iter().zip(paths_with_positions) { + let Some(item) = item else { + continue; + }; + let Some(row) = path.row else { + continue; + }; + if let Some(active_editor) = item.downcast::() { + window + .update(cx, |_, window, cx| { + active_editor.update(cx, |editor, cx| { + let row = row.saturating_sub(1); + let col = path.column.unwrap_or(0).saturating_sub(1); + editor.go_to_singleton_buffer_point( + Point::new(row, col), + window, + cx, + ); + }); + }) + .ok(); + } + } } } @@ -730,3 +738,44 @@ pub async fn open_remote_project( // Already showed the error to the user Ok(()) } + +pub(crate) async fn determine_paths_with_positions( + remote_connection: &Arc, + mut paths: Vec, +) -> (Vec, Vec) { + let mut paths_with_positions = Vec::::new(); + for path in &mut paths { + if let Some(path_str) = path.to_str() { + let path_with_position = PathWithPosition::parse_str(&path_str); + if path_with_position.row.is_some() { + if !path_exists(&remote_connection, &path).await { + *path = path_with_position.path.clone(); + paths_with_positions.push(path_with_position); + continue; + } + } + } + paths_with_positions.push(PathWithPosition::from_path(path.clone())) + } + (paths, paths_with_positions) +} + +async fn path_exists(connection: &Arc, path: &Path) -> bool { + let Ok(command) = connection.build_command( + Some("test".to_string()), + &["-e".to_owned(), path.to_string_lossy().to_string()], + &Default::default(), + None, + None, + ) else { + return false; + }; + let Ok(mut child) = util::command::new_smol_command(command.program) + .args(command.args) + .envs(command.env) + .spawn() + else { + return false; + }; + child.status().await.is_ok_and(|status| status.success()) +} diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index 7a144308f938c70134ba58eddef235902f9a0933..2596a3d41604ac3710b9d5302718c18b2f948b4f 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -1,7 +1,8 @@ use crate::{ remote_connections::{ Connection, RemoteConnectionModal, RemoteConnectionPrompt, SshConnection, - SshConnectionHeader, SshSettings, connect, connect_over_ssh, open_remote_project, + SshConnectionHeader, SshSettings, connect, determine_paths_with_positions, + open_remote_project, }, ssh_config::parse_ssh_config_hosts, }; @@ -13,6 +14,7 @@ use gpui::{ FocusHandle, Focusable, PromptLevel, ScrollHandle, Subscription, Task, WeakEntity, Window, canvas, }; +use language::Point; use log::info; use paths::{global_ssh_config_file, user_ssh_config_file}; use picker::Picker; @@ -233,6 +235,15 @@ impl ProjectPicker { .read_with(cx, |workspace, _| workspace.app_state().clone()) .ok()?; + let remote_connection = project + .read_with(cx, |project, cx| { + project.remote_client()?.read(cx).connection() + }) + .ok()??; + + let (paths, paths_with_positions) = + determine_paths_with_positions(&remote_connection, paths).await; + cx.update(|_, cx| { let fs = app_state.fs.clone(); update_settings_file(fs, cx, { @@ -278,12 +289,38 @@ impl ProjectPicker { }) .log_err()?; - open_remote_project_with_existing_connection( + let items = open_remote_project_with_existing_connection( connection, project, paths, app_state, window, cx, ) .await .log_err(); + if let Some(items) = items { + for (item, path) in items.into_iter().zip(paths_with_positions) { + let Some(item) = item else { + continue; + }; + let Some(row) = path.row else { + continue; + }; + if let Some(active_editor) = item.downcast::() { + window + .update(cx, |_, window, cx| { + active_editor.update(cx, |editor, cx| { + let row = row.saturating_sub(1); + let col = path.column.unwrap_or(0).saturating_sub(1); + editor.go_to_singleton_buffer_point( + Point::new(row, col), + window, + cx, + ); + }); + }) + .ok(); + } + } + } + this.update(cx, |_, cx| { cx.emit(DismissEvent); }) @@ -671,9 +708,9 @@ impl RemoteServerProjects { ) }); - let connection = connect_over_ssh( + let connection = connect( ConnectionIdentifier::setup(), - connection_options.clone(), + RemoteConnectionOptions::Ssh(connection_options.clone()), ssh_prompt.clone(), window, cx, @@ -1281,31 +1318,25 @@ impl RemoteServerProjects { let secondary_confirm = e.modifiers().platform; callback(this, secondary_confirm, window, cx) })) - .when( - is_from_zed && matches!(server_ix, ServerIndex::Ssh(_)), - |server_list_item| { - let ServerIndex::Ssh(server_ix) = server_ix else { - unreachable!() - }; - server_list_item.end_hover_slot::(Some( - div() - .mr_2() - .child({ - let project = project.clone(); - // Right-margin to offset it from the Scrollbar - IconButton::new("remove-remote-project", IconName::Trash) - .icon_size(IconSize::Small) - .shape(IconButtonShape::Square) - .size(ButtonSize::Large) - .tooltip(Tooltip::text("Delete Remote Project")) - .on_click(cx.listener(move |this, _, _, cx| { - this.delete_ssh_project(server_ix, &project, cx) - })) - }) - .into_any_element(), - )) - }, - ), + .when(is_from_zed, |server_list_item| { + server_list_item.end_hover_slot::(Some( + div() + .mr_2() + .child({ + let project = project.clone(); + // Right-margin to offset it from the Scrollbar + IconButton::new("remove-remote-project", IconName::Trash) + .icon_size(IconSize::Small) + .shape(IconButtonShape::Square) + .size(ButtonSize::Large) + .tooltip(Tooltip::text("Delete Remote Project")) + .on_click(cx.listener(move |this, _, _, cx| { + this.delete_remote_project(server_ix, &project, cx) + })) + }) + .into_any_element(), + )) + }), ) } @@ -1332,6 +1363,22 @@ impl RemoteServerProjects { }); } + fn delete_remote_project( + &mut self, + server: ServerIndex, + project: &SshProject, + cx: &mut Context, + ) { + match server { + ServerIndex::Ssh(server) => { + self.delete_ssh_project(server, project, cx); + } + ServerIndex::Wsl(server) => { + self.delete_wsl_project(server, project, cx); + } + } + } + fn delete_ssh_project( &mut self, server: SshServerIndex, @@ -1350,6 +1397,24 @@ impl RemoteServerProjects { }); } + fn delete_wsl_project( + &mut self, + server: WslServerIndex, + project: &SshProject, + cx: &mut Context, + ) { + let project = project.clone(); + self.update_settings_file(cx, move |setting, _| { + if let Some(server) = setting + .wsl_connections + .as_mut() + .and_then(|connections| connections.get_mut(server.0)) + { + server.projects.remove(&project); + } + }); + } + #[cfg(target_os = "windows")] fn add_wsl_distro( &mut self, @@ -1357,14 +1422,21 @@ impl RemoteServerProjects { cx: &mut Context, ) { self.update_settings_file(cx, move |setting, _| { - setting - .wsl_connections - .get_or_insert(Default::default()) - .push(settings::WslConnection { - distro_name: SharedString::from(connection_options.distro_name), - user: connection_options.user, + let connections = setting.wsl_connections.get_or_insert(Default::default()); + + let distro_name = SharedString::from(connection_options.distro_name); + let user = connection_options.user; + + if !connections + .iter() + .any(|conn| conn.distro_name == distro_name && conn.user == user) + { + connections.push(settings::WslConnection { + distro_name, + user, projects: BTreeSet::new(), }) + } }); } diff --git a/crates/refineable/Cargo.toml b/crates/refineable/Cargo.toml index 76ed82548d1bfc8cda3976a09f5f0e424b1857b0..4c4e02851b7fac3aa3b60a474fc9ab05504a6319 100644 --- a/crates/refineable/Cargo.toml +++ b/crates/refineable/Cargo.toml @@ -1,8 +1,8 @@ [package] -name = "zed-refineable" +name = "refineable" version = "0.1.0" edition.workspace = true -publish = true +publish = false license = "Apache-2.0" description = "A macro for creating 'refinement' types that can be used to partially initialize or mutate a complex struct" @@ -15,4 +15,3 @@ doctest = false [dependencies] derive_refineable.workspace = true -workspace-hack.workspace = true diff --git a/crates/refineable/derive_refineable/Cargo.toml b/crates/refineable/derive_refineable/Cargo.toml index 42b9ef9ca69deef406c3f64229a9f392d064d33c..75e9ef1f8f2de9919127ecb41b66f64bbbc70ea9 100644 --- a/crates/refineable/derive_refineable/Cargo.toml +++ b/crates/refineable/derive_refineable/Cargo.toml @@ -1,8 +1,8 @@ [package] -name = "zed-derive-refineable" +name = "derive_refineable" version = "0.1.0" edition.workspace = true -publish = true +publish = false license = "Apache-2.0" description = "A derive macro for creating refinement types in Rust" @@ -19,4 +19,3 @@ doctest = false proc-macro2.workspace = true quote.workspace = true syn.workspace = true -workspace-hack.workspace = true diff --git a/crates/release_channel/Cargo.toml b/crates/release_channel/Cargo.toml index 0d64aff691586de018baa363f3eb41f8481a4ab8..53ae53504579e54eb7432edeb54cfc114ef8f17e 100644 --- a/crates/release_channel/Cargo.toml +++ b/crates/release_channel/Cargo.toml @@ -10,4 +10,3 @@ workspace = true [dependencies] gpui.workspace = true -workspace-hack.workspace = true diff --git a/crates/remote/Cargo.toml b/crates/remote/Cargo.toml index 0b2bf4e2fccef3ed1ccd175eda8bde13b179f696..d1a91af9a5decc88b4c70c69001ba6dad18e4b8b 100644 --- a/crates/remote/Cargo.toml +++ b/crates/remote/Cargo.toml @@ -34,14 +34,12 @@ rpc = { workspace = true, features = ["gpui"] } serde.workspace = true serde_json.workspace = true settings.workspace = true -shlex.workspace = true smol.workspace = true tempfile.workspace = true thiserror.workspace = true urlencoding.workspace = true util.workspace = true which.workspace = true -workspace-hack.workspace = true [dev-dependencies] diff --git a/crates/remote/src/remote.rs b/crates/remote/src/remote.rs index 74d45b1a696ff1a02a9f2b4d9afc3844f82196cd..62fe40f7649b9a1f8e4697a5c6b4c7d1690715e4 100644 --- a/crates/remote/src/remote.rs +++ b/crates/remote/src/remote.rs @@ -6,7 +6,7 @@ mod transport; pub use remote_client::{ ConnectionIdentifier, ConnectionState, RemoteClient, RemoteClientDelegate, RemoteClientEvent, - RemoteConnectionOptions, RemotePlatform, + RemoteConnection, RemoteConnectionOptions, RemotePlatform, connect, }; pub use transport::ssh::{SshConnectionOptions, SshPortForwardOption}; pub use transport::wsl::WslConnectionOptions; diff --git a/crates/remote/src/remote_client.rs b/crates/remote/src/remote_client.rs index e2f51c8e2ba59d02a4d6ac8e4bdbea2e443a4590..e9eafa25b0467f29d1dd12816aa17d65b94bf1d4 100644 --- a/crates/remote/src/remote_client.rs +++ b/crates/remote/src/remote_client.rs @@ -93,7 +93,7 @@ const MAX_RECONNECT_ATTEMPTS: usize = 3; enum State { Connecting, Connected { - ssh_connection: Arc, + remote_connection: Arc, delegate: Arc, multiplex_task: Task>, @@ -137,7 +137,10 @@ impl fmt::Display for State { impl State { fn remote_connection(&self) -> Option> { match self { - Self::Connected { ssh_connection, .. } => Some(ssh_connection.clone()), + Self::Connected { + remote_connection: ssh_connection, + .. + } => Some(ssh_connection.clone()), Self::HeartbeatMissed { ssh_connection, .. } => Some(ssh_connection.clone()), Self::ReconnectFailed { ssh_connection, .. } => Some(ssh_connection.clone()), _ => None, @@ -181,7 +184,7 @@ impl State { heartbeat_task, .. } => Self::Connected { - ssh_connection, + remote_connection: ssh_connection, delegate, multiplex_task, heartbeat_task, @@ -193,7 +196,7 @@ impl State { fn heartbeat_missed(self) -> Self { match self { Self::Connected { - ssh_connection, + remote_connection: ssh_connection, delegate, multiplex_task, heartbeat_task, @@ -260,8 +263,8 @@ pub enum RemoteClientEvent { impl EventEmitter for RemoteClient {} -// Identifies the socket on the remote server so that reconnects -// can re-join the same project. +/// Identifies the socket on the remote server so that reconnects +/// can re-join the same project. pub enum ConnectionIdentifier { Setup(u64), Workspace(i64), @@ -294,26 +297,24 @@ impl ConnectionIdentifier { } } -impl RemoteClient { - pub fn ssh( - unique_identifier: ConnectionIdentifier, - connection_options: SshConnectionOptions, - cancellation: oneshot::Receiver<()>, - delegate: Arc, - cx: &mut App, - ) -> Task>>> { - Self::new( - unique_identifier, - RemoteConnectionOptions::Ssh(connection_options), - cancellation, - delegate, - cx, - ) - } +pub async fn connect( + connection_options: RemoteConnectionOptions, + delegate: Arc, + cx: &mut AsyncApp, +) -> Result> { + cx.update(|cx| { + cx.update_default_global(|pool: &mut ConnectionPool, cx| { + pool.connect(connection_options.clone(), delegate.clone(), cx) + }) + })? + .await + .map_err(|e| e.cloned()) +} +impl RemoteClient { pub fn new( unique_identifier: ConnectionIdentifier, - connection_options: RemoteConnectionOptions, + remote_connection: Arc, cancellation: oneshot::Receiver<()>, delegate: Arc, cx: &mut App, @@ -328,25 +329,16 @@ impl RemoteClient { let client = cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx, "client"))?; - let ssh_connection = cx - .update(|cx| { - cx.update_default_global(|pool: &mut ConnectionPool, cx| { - pool.connect(connection_options.clone(), &delegate, cx) - }) - })? - .await - .map_err(|e| e.cloned())?; - - let path_style = ssh_connection.path_style(); + let path_style = remote_connection.path_style(); let this = cx.new(|_| Self { client: client.clone(), unique_identifier: unique_identifier.clone(), - connection_options, + connection_options: remote_connection.connection_options(), path_style, state: Some(State::Connecting), })?; - let io_task = ssh_connection.start_proxy( + let io_task = remote_connection.start_proxy( unique_identifier, false, incoming_tx, @@ -402,7 +394,7 @@ impl RemoteClient { this.update(cx, |this, _| { this.state = Some(State::Connected { - ssh_connection, + remote_connection, delegate, multiplex_task, heartbeat_task, @@ -441,7 +433,7 @@ impl RemoteClient { let State::Connected { multiplex_task, heartbeat_task, - ssh_connection, + remote_connection: ssh_connection, delegate, } = state else { @@ -488,7 +480,7 @@ impl RemoteClient { let state = self.state.take().unwrap(); let (attempts, remote_connection, delegate) = match state { State::Connected { - ssh_connection, + remote_connection: ssh_connection, delegate, multiplex_task, heartbeat_task, @@ -561,7 +553,7 @@ impl RemoteClient { let (ssh_connection, io_task) = match async { let ssh_connection = cx .update_global(|pool: &mut ConnectionPool, cx| { - pool.connect(connection_options, &delegate, cx) + pool.connect(connection_options, delegate.clone(), cx) })? .await .map_err(|error| error.cloned())?; @@ -593,7 +585,7 @@ impl RemoteClient { }; State::Connected { - ssh_connection, + remote_connection: ssh_connection, delegate, multiplex_task, heartbeat_task: Self::heartbeat(this.clone(), connection_activity_rx, cx), @@ -836,16 +828,14 @@ impl RemoteClient { connection.build_command(program, args, env, working_dir, port_forward) } - pub fn build_forward_port_command( + pub fn build_forward_ports_command( &self, - local_port: u16, - host: String, - remote_port: u16, + forwards: Vec<(u16, String, u16)>, ) -> Result { let Some(connection) = self.remote_connection() else { return Err(anyhow!("no ssh connection")); }; - connection.build_forward_port_command(local_port, host, remote_port) + connection.build_forward_ports_command(forwards) } pub fn upload_directory( @@ -868,6 +858,17 @@ impl RemoteClient { self.connection_options.clone() } + pub fn connection(&self) -> Option> { + if let State::Connected { + remote_connection, .. + } = self.state.as_ref()? + { + Some(remote_connection.clone()) + } else { + None + } + } + pub fn connection_state(&self) -> ConnectionState { self.state .as_ref() @@ -949,11 +950,15 @@ impl RemoteClient { client_cx: &mut gpui::TestAppContext, ) -> Entity { let (_tx, rx) = oneshot::channel(); + let mut cx = client_cx.to_async(); + let connection = connect(opts, Arc::new(fake::Delegate), &mut cx) + .await + .unwrap(); client_cx .update(|cx| { Self::new( ConnectionIdentifier::setup(), - opts, + connection, rx, Arc::new(fake::Delegate), cx, @@ -987,7 +992,7 @@ impl ConnectionPool { pub fn connect( &mut self, opts: RemoteConnectionOptions, - delegate: &Arc, + delegate: Arc, cx: &mut App, ) -> Shared, Arc>>> { let connection = self.connections.get(&opts); @@ -1086,7 +1091,7 @@ impl From for RemoteConnectionOptions { } #[async_trait(?Send)] -pub(crate) trait RemoteConnection: Send + Sync { +pub trait RemoteConnection: Send + Sync { fn start_proxy( &self, unique_identifier: String, @@ -1116,11 +1121,9 @@ pub(crate) trait RemoteConnection: Send + Sync { working_dir: Option, port_forward: Option<(u16, String, u16)>, ) -> Result; - fn build_forward_port_command( + fn build_forward_ports_command( &self, - local_port: u16, - remote: String, - remote_port: u16, + forwards: Vec<(u16, String, u16)>, ) -> Result; fn connection_options(&self) -> RemoteConnectionOptions; fn path_style(&self) -> PathStyle; @@ -1551,19 +1554,17 @@ mod fake { }) } - fn build_forward_port_command( + fn build_forward_ports_command( &self, - local_port: u16, - host: String, - remote_port: u16, + forwards: Vec<(u16, String, u16)>, ) -> anyhow::Result { Ok(CommandTemplate { program: "ssh".into(), - args: vec![ - "-N".into(), - "-L".into(), - format!("{local_port}:{host}:{remote_port}"), - ], + args: std::iter::once("-N".to_owned()) + .chain(forwards.into_iter().map(|(local_port, host, remote_port)| { + format!("{local_port}:{host}:{remote_port}") + })) + .collect(), env: Default::default(), }) } diff --git a/crates/remote/src/transport.rs b/crates/remote/src/transport.rs index 62144d565348b65a5ef242124277a8325e77d1a7..6f76977ff9fdeaa1bbc0b7cb5008d7b0cb292d69 100644 --- a/crates/remote/src/transport.rs +++ b/crates/remote/src/transport.rs @@ -121,14 +121,22 @@ async fn build_remote_server_from_source( delegate: &dyn crate::RemoteClientDelegate, cx: &mut AsyncApp, ) -> Result> { + use smol::process::{Command, Stdio}; + use std::env::VarError; use std::path::Path; - let Some(build_remote_server) = std::env::var("ZED_BUILD_REMOTE_SERVER").ok() else { - return Ok(None); - }; + // By default, we make building remote server from source opt-out and we do not force artifact compression + // for quicker builds. + let build_remote_server = + std::env::var("ZED_BUILD_REMOTE_SERVER").unwrap_or("nocompress".into()); - use smol::process::{Command, Stdio}; - use std::env::VarError; + if build_remote_server == "false" + || build_remote_server == "no" + || build_remote_server == "off" + || build_remote_server == "0" + { + return Ok(None); + } async fn run_cmd(command: &mut Command) -> Result<()> { let output = command @@ -178,6 +186,7 @@ async fn build_remote_server_from_source( log::info!("building remote server binary from source"); run_cmd( Command::new("cargo") + .current_dir(concat!(env!("CARGO_MANIFEST_DIR"), "/../..")) .args([ "build", "--package", @@ -192,50 +201,6 @@ async fn build_remote_server_from_source( .env("RUSTFLAGS", &rust_flags), ) .await?; - } else if build_remote_server.contains("cross") { - use util::paths::SanitizedPath; - - delegate.set_status(Some("Installing cross.rs for cross-compilation"), cx); - log::info!("installing cross"); - run_cmd(Command::new("cargo").args([ - "install", - "cross", - "--git", - "https://github.com/cross-rs/cross", - ])) - .await?; - - delegate.set_status( - Some(&format!( - "Building remote server binary from source for {} with Docker", - &triple - )), - cx, - ); - log::info!("building remote server binary from source for {}", &triple); - - let src = SanitizedPath::new(&smol::fs::canonicalize("target").await?).to_string(); - - run_cmd( - Command::new("cross") - .args([ - "build", - "--package", - "remote_server", - "--features", - "debug-embed", - "--target-dir", - "target/remote_server", - "--target", - &triple, - ]) - .env( - "CROSS_CONTAINER_OPTS", - format!("--mount type=bind,src={src},dst=/app/target"), - ) - .env("RUSTFLAGS", &rust_flags), - ) - .await?; } else { let which = cx .background_spawn(async move { which::which("zig") }) @@ -245,13 +210,13 @@ async fn build_remote_server_from_source( #[cfg(not(target_os = "windows"))] { anyhow::bail!( - "zig not found on $PATH, install zig (see https://ziglang.org/learn/getting-started or use zigup) or pass ZED_BUILD_REMOTE_SERVER=cross to use cross" + "zig not found on $PATH, install zig (see https://ziglang.org/learn/getting-started or use zigup)" ) } #[cfg(target_os = "windows")] { anyhow::bail!( - "zig not found on $PATH, install zig (use `winget install -e --id zig.zig` or see https://ziglang.org/learn/getting-started or use zigup) or pass ZED_BUILD_REMOTE_SERVER=cross to use cross" + "zig not found on $PATH, install zig (use `winget install -e --id zig.zig` or see https://ziglang.org/learn/getting-started or use zigup)" ) } } diff --git a/crates/remote/src/transport/ssh.rs b/crates/remote/src/transport/ssh.rs index 909ff93169a8a93cea1474348008981a4fdaa36b..9099caea67d280e37575ebe478ff2b6006c4777b 100644 --- a/crates/remote/src/transport/ssh.rs +++ b/crates/remote/src/transport/ssh.rs @@ -29,11 +29,12 @@ use tempfile::TempDir; use util::{ paths::{PathStyle, RemotePathBuf}, rel_path::RelPath, + shell::ShellKind, }; pub(crate) struct SshRemoteConnection { socket: SshSocket, - master_process: Mutex>, + master_process: Mutex>, remote_binary_path: Option>, ssh_platform: RemotePlatform, ssh_path_style: PathStyle, @@ -79,15 +80,127 @@ struct SshSocket { _proxy: askpass::PasswordProxy, } -macro_rules! shell_script { - ($fmt:expr, $($name:ident = $arg:expr),+ $(,)?) => {{ - format!( - $fmt, - $( - $name = shlex::try_quote($arg).unwrap() - ),+ - ) - }}; +struct MasterProcess { + process: Child, +} + +#[cfg(not(target_os = "windows"))] +impl MasterProcess { + pub fn new( + askpass_script_path: &std::ffi::OsStr, + additional_args: Vec, + socket_path: &std::path::Path, + url: &str, + ) -> Result { + let args = [ + "-N", + "-o", + "ControlPersist=no", + "-o", + "ControlMaster=yes", + "-o", + ]; + + let mut master_process = util::command::new_smol_command("ssh"); + master_process + .kill_on_drop(true) + .stdin(Stdio::null()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .env("SSH_ASKPASS_REQUIRE", "force") + .env("SSH_ASKPASS", askpass_script_path) + .args(additional_args) + .args(args); + + master_process.arg(format!("ControlPath={}", socket_path.display())); + + let process = master_process.arg(&url).spawn()?; + + Ok(MasterProcess { process }) + } + + pub async fn wait_connected(&mut self) -> Result<()> { + let Some(mut stdout) = self.process.stdout.take() else { + anyhow::bail!("ssh process stdout capture failed"); + }; + + let mut output = Vec::new(); + stdout.read_to_end(&mut output).await?; + Ok(()) + } +} + +#[cfg(target_os = "windows")] +impl MasterProcess { + const CONNECTION_ESTABLISHED_MAGIC: &str = "ZED_SSH_CONNECTION_ESTABLISHED"; + + pub fn new( + askpass_script_path: &std::ffi::OsStr, + additional_args: Vec, + url: &str, + ) -> Result { + // On Windows, `ControlMaster` and `ControlPath` are not supported: + // https://github.com/PowerShell/Win32-OpenSSH/issues/405 + // https://github.com/PowerShell/Win32-OpenSSH/wiki/Project-Scope + // + // Using an ugly workaround to detect connection establishment + // -N doesn't work with JumpHosts as windows openssh never closes stdin in that case + let args = [ + "-t", + &format!("echo '{}'; exec $0", Self::CONNECTION_ESTABLISHED_MAGIC), + ]; + + let mut master_process = util::command::new_smol_command("ssh"); + master_process + .kill_on_drop(true) + .stdin(Stdio::null()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .env("SSH_ASKPASS_REQUIRE", "force") + .env("SSH_ASKPASS", askpass_script_path) + .args(additional_args) + .arg(url) + .args(args); + + let process = master_process.spawn()?; + + Ok(MasterProcess { process }) + } + + pub async fn wait_connected(&mut self) -> Result<()> { + use smol::io::AsyncBufReadExt; + + let Some(stdout) = self.process.stdout.take() else { + anyhow::bail!("ssh process stdout capture failed"); + }; + + let mut reader = smol::io::BufReader::new(stdout); + + let mut line = String::new(); + + loop { + let n = reader.read_line(&mut line).await?; + if n == 0 { + anyhow::bail!("ssh process exited before connection established"); + } + + if line.contains(Self::CONNECTION_ESTABLISHED_MAGIC) { + return Ok(()); + } + } + } +} + +impl AsRef for MasterProcess { + fn as_ref(&self) -> &Child { + &self.process + } +} + +impl AsMut for MasterProcess { + fn as_mut(&mut self) -> &mut Child { + &mut self.process + } } #[async_trait(?Send)] @@ -96,8 +209,8 @@ impl RemoteConnection for SshRemoteConnection { let Some(mut process) = self.master_process.lock().take() else { return Ok(()); }; - process.kill().ok(); - process.status().await?; + process.as_mut().kill().ok(); + process.as_mut().status().await?; Ok(()) } @@ -145,19 +258,20 @@ impl RemoteConnection for SshRemoteConnection { ) } - fn build_forward_port_command( + fn build_forward_ports_command( &self, - local_port: u16, - host: String, - remote_port: u16, + forwards: Vec<(u16, String, u16)>, ) -> Result { + let Self { socket, .. } = self; + let mut args = socket.ssh_args(); + args.push("-N".into()); + for (local_port, host, remote_port) in forwards { + args.push("-L".into()); + args.push(format!("{local_port}:{host}:{remote_port}")); + } Ok(CommandTemplate { program: "ssh".into(), - args: vec![ - "-N".into(), - "-L".into(), - format!("{local_port}:{host}:{remote_port}"), - ], + args, env: Default::default(), }) } @@ -168,35 +282,44 @@ impl RemoteConnection for SshRemoteConnection { dest_path: RemotePathBuf, cx: &App, ) -> Task> { - let mut command = util::command::new_smol_command("scp"); - let output = self - .socket - .ssh_options(&mut command) - .args( - self.socket - .connection_options - .port - .map(|port| vec!["-P".to_string(), port.to_string()]) - .unwrap_or_default(), - ) - .arg("-C") - .arg("-r") - .arg(&src_path) - .arg(format!( - "{}:{}", - self.socket.connection_options.scp_url(), - dest_path - )) - .output(); + let dest_path_str = dest_path.to_string(); + let src_path_display = src_path.display().to_string(); + + let mut sftp_command = self.build_sftp_command(); + let mut scp_command = + self.build_scp_command(&src_path, &dest_path_str, Some(&["-C", "-r"])); cx.background_spawn(async move { - let output = output.await?; + if Self::is_sftp_available().await { + log::debug!("using SFTP for directory upload"); + let mut child = sftp_command.spawn()?; + if let Some(mut stdin) = child.stdin.take() { + use futures::AsyncWriteExt; + let sftp_batch = format!("put -r {} {}\n", src_path.display(), dest_path_str); + stdin.write_all(sftp_batch.as_bytes()).await?; + drop(stdin); + } + + let output = child.output().await?; + anyhow::ensure!( + output.status.success(), + "failed to upload directory via SFTP {} -> {}: {}", + src_path_display, + dest_path_str, + String::from_utf8_lossy(&output.stderr) + ); + + return Ok(()); + } + + log::debug!("using SCP for directory upload"); + let output = scp_command.output().await?; anyhow::ensure!( output.status.success(), - "failed to upload directory {} -> {}: {}", - src_path.display(), - dest_path.to_string(), + "failed to upload directory via SCP {} -> {}: {}", + src_path_display, + dest_path_str, String::from_utf8_lossy(&output.stderr) ); @@ -270,8 +393,6 @@ impl SshRemoteConnection { ) -> Result { use askpass::AskPassResult; - delegate.set_status(Some("Connecting"), cx); - let url = connection_options.ssh_url(); let temp_dir = tempfile::Builder::new() @@ -285,51 +406,33 @@ impl SshRemoteConnection { let mut askpass = askpass::AskPassSession::new(cx.background_executor(), askpass_delegate).await?; + delegate.set_status(Some("Connecting"), cx); + // Start the master SSH process, which does not do anything except for establish // the connection and keep it open, allowing other ssh commands to reuse it // via a control socket. #[cfg(not(target_os = "windows"))] let socket_path = temp_dir.path().join("ssh.sock"); - let mut master_process = { - #[cfg(not(target_os = "windows"))] - let args = [ - "-N", - "-o", - "ControlPersist=no", - "-o", - "ControlMaster=yes", - "-o", - ]; - // On Windows, `ControlMaster` and `ControlPath` are not supported: - // https://github.com/PowerShell/Win32-OpenSSH/issues/405 - // https://github.com/PowerShell/Win32-OpenSSH/wiki/Project-Scope - #[cfg(target_os = "windows")] - let args = ["-N"]; - let mut master_process = util::command::new_smol_command("ssh"); - master_process - .kill_on_drop(true) - .stdin(Stdio::null()) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .env("SSH_ASKPASS_REQUIRE", "force") - .env("SSH_ASKPASS", askpass.script_path()) - .args(connection_options.additional_args()) - .args(args); - #[cfg(not(target_os = "windows"))] - master_process.arg(format!("ControlPath={}", socket_path.display())); - master_process.arg(&url).spawn()? - }; - // Wait for this ssh process to close its stdout, indicating that authentication - // has completed. - let mut stdout = master_process.stdout.take().unwrap(); - let mut output = Vec::new(); + #[cfg(target_os = "windows")] + let mut master_process = MasterProcess::new( + askpass.script_path().as_ref(), + connection_options.additional_args(), + &url, + )?; + #[cfg(not(target_os = "windows"))] + let mut master_process = MasterProcess::new( + askpass.script_path().as_ref(), + connection_options.additional_args(), + &socket_path, + &url, + )?; let result = select_biased! { result = askpass.run().fuse() => { match result { AskPassResult::CancelledByUser => { - master_process.kill().ok(); + master_process.as_mut().kill().ok(); anyhow::bail!("SSH connection canceled") } AskPassResult::Timedout => { @@ -337,7 +440,7 @@ impl SshRemoteConnection { } } } - _ = stdout.read_to_end(&mut output).fuse() => { + _ = master_process.wait_connected().fuse() => { anyhow::Ok(()) } }; @@ -346,9 +449,10 @@ impl SshRemoteConnection { return Err(e.context("Failed to connect to host")); } - if master_process.try_status()?.is_some() { + if master_process.as_mut().try_status()?.is_some() { + let mut output = Vec::new(); output.clear(); - let mut stderr = master_process.stderr.take().unwrap(); + let mut stderr = master_process.as_mut().stderr.take().unwrap(); stderr.read_to_end(&mut output).await?; let error_message = format!( @@ -372,12 +476,12 @@ impl SshRemoteConnection { .await?; drop(askpass); - let ssh_platform = socket.platform().await?; + let ssh_shell = socket.shell().await; + let ssh_platform = socket.platform(ShellKind::new(&ssh_shell, false)).await?; let ssh_path_style = match ssh_platform.os { "windows" => PathStyle::Windows, _ => PathStyle::Posix, }; - let ssh_shell = socket.shell().await; let ssh_default_system_shell = String::from("/bin/sh"); let mut this = Self { @@ -623,54 +727,112 @@ impl SshRemoteConnection { delegate.set_status(Some("Extracting remote development server"), cx); let server_mode = 0o755; + let shell_kind = ShellKind::Posix; let orig_tmp_path = tmp_path.display(self.path_style()); + let server_mode = format!("{:o}", server_mode); + let server_mode = shell_kind + .try_quote(&server_mode) + .context("shell quoting")?; + let dst_path = dst_path.display(self.path_style()); + let dst_path = shell_kind.try_quote(&dst_path).context("shell quoting")?; let script = if let Some(tmp_path) = orig_tmp_path.strip_suffix(".gz") { - shell_script!( + format!( "gunzip -f {orig_tmp_path} && chmod {server_mode} {tmp_path} && mv {tmp_path} {dst_path}", - server_mode = &format!("{:o}", server_mode), - dst_path = &dst_path.display(self.path_style()), ) } else { - shell_script!( - "chmod {server_mode} {orig_tmp_path} && mv {orig_tmp_path} {dst_path}", - server_mode = &format!("{:o}", server_mode), - dst_path = &dst_path.display(self.path_style()) - ) + format!("chmod {server_mode} {orig_tmp_path} && mv {orig_tmp_path} {dst_path}",) }; - self.socket.run_command("sh", &["-c", &script]).await?; + let args = shell_kind.args_for_shell(false, script.to_string()); + self.socket.run_command("sh", &args).await?; Ok(()) } - async fn upload_file(&self, src_path: &Path, dest_path: &RelPath) -> Result<()> { - log::debug!("uploading file {:?} to {:?}", src_path, dest_path); + fn build_scp_command( + &self, + src_path: &Path, + dest_path_str: &str, + args: Option<&[&str]>, + ) -> process::Command { let mut command = util::command::new_smol_command("scp"); - let output = self - .socket - .ssh_options(&mut command) - .args( - self.socket - .connection_options - .port - .map(|port| vec!["-P".to_string(), port.to_string()]) - .unwrap_or_default(), - ) - .arg(src_path) - .arg(format!( - "{}:{}", - self.socket.connection_options.scp_url(), - dest_path.display(self.path_style()) - )) - .output() - .await?; + self.socket.ssh_options(&mut command, false).args( + self.socket + .connection_options + .port + .map(|port| vec!["-P".to_string(), port.to_string()]) + .unwrap_or_default(), + ); + if let Some(args) = args { + command.args(args); + } + command.arg(src_path).arg(format!( + "{}:{}", + self.socket.connection_options.scp_url(), + dest_path_str + )); + command + } - anyhow::ensure!( - output.status.success(), - "failed to upload file {} -> {}: {}", - src_path.display(), - dest_path.display(self.path_style()), - String::from_utf8_lossy(&output.stderr) + fn build_sftp_command(&self) -> process::Command { + let mut command = util::command::new_smol_command("sftp"); + self.socket.ssh_options(&mut command, false).args( + self.socket + .connection_options + .port + .map(|port| vec!["-P".to_string(), port.to_string()]) + .unwrap_or_default(), ); - Ok(()) + command.arg("-b").arg("-"); + command.arg(self.socket.connection_options.scp_url()); + command.stdin(Stdio::piped()); + command + } + + async fn upload_file(&self, src_path: &Path, dest_path: &RelPath) -> Result<()> { + log::debug!("uploading file {:?} to {:?}", src_path, dest_path); + + let dest_path_str = dest_path.display(self.path_style()); + + if Self::is_sftp_available().await { + log::debug!("using SFTP for file upload"); + let mut command = self.build_sftp_command(); + let sftp_batch = format!("put {} {}\n", src_path.display(), dest_path_str); + + let mut child = command.spawn()?; + if let Some(mut stdin) = child.stdin.take() { + use futures::AsyncWriteExt; + stdin.write_all(sftp_batch.as_bytes()).await?; + drop(stdin); + } + + let output = child.output().await?; + anyhow::ensure!( + output.status.success(), + "failed to upload file via SFTP {} -> {}: {}", + src_path.display(), + dest_path_str, + String::from_utf8_lossy(&output.stderr) + ); + + Ok(()) + } else { + log::debug!("using SCP for file upload"); + let mut command = self.build_scp_command(src_path, &dest_path_str, None); + let output = command.output().await?; + + anyhow::ensure!( + output.status.success(), + "failed to upload file via SCP {} -> {}: {}", + src_path.display(), + dest_path_str, + String::from_utf8_lossy(&output.stderr) + ); + + Ok(()) + } + } + + async fn is_sftp_available() -> bool { + which::which("sftp").is_ok() } } @@ -715,8 +877,12 @@ impl SshSocket { // into a machine. You must use `cd` to get back to $HOME. // You need to do it like this: $ ssh host "cd; sh -c 'ls -l /tmp'" fn ssh_command(&self, program: &str, args: &[impl AsRef]) -> process::Command { + let shell_kind = ShellKind::Posix; let mut command = util::command::new_smol_command("ssh"); - let mut to_run = shlex::try_quote(program).unwrap().into_owned(); + let mut to_run = shell_kind + .try_quote(program) + .expect("shell quoting") + .into_owned(); for arg in args { // We're trying to work with: sh, bash, zsh, fish, tcsh, ...? debug_assert!( @@ -724,10 +890,11 @@ impl SshSocket { "multiline arguments do not work in all shells" ); to_run.push(' '); - to_run.push_str(&shlex::try_quote(arg.as_ref()).unwrap()); + to_run.push_str(&shell_kind.try_quote(arg.as_ref()).expect("shell quoting")); } - let to_run = format!("cd; {to_run}"); - self.ssh_options(&mut command) + let separator = shell_kind.sequential_commands_separator(); + let to_run = format!("cd{separator} {to_run}"); + self.ssh_options(&mut command, true) .arg(self.connection_options.ssh_url()) .arg("-T") .arg(to_run); @@ -735,7 +902,7 @@ impl SshSocket { command } - async fn run_command(&self, program: &str, args: &[&str]) -> Result { + async fn run_command(&self, program: &str, args: &[impl AsRef]) -> Result { let output = self.ssh_command(program, args).output().await?; anyhow::ensure!( output.status.success(), @@ -746,23 +913,43 @@ impl SshSocket { } #[cfg(not(target_os = "windows"))] - fn ssh_options<'a>(&self, command: &'a mut process::Command) -> &'a mut process::Command { + fn ssh_options<'a>( + &self, + command: &'a mut process::Command, + include_port_forwards: bool, + ) -> &'a mut process::Command { + let args = if include_port_forwards { + self.connection_options.additional_args() + } else { + self.connection_options.additional_args_for_scp() + }; + command .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) - .args(self.connection_options.additional_args()) + .args(args) .args(["-o", "ControlMaster=no", "-o"]) .arg(format!("ControlPath={}", self.socket_path.display())) } #[cfg(target_os = "windows")] - fn ssh_options<'a>(&self, command: &'a mut process::Command) -> &'a mut process::Command { + fn ssh_options<'a>( + &self, + command: &'a mut process::Command, + include_port_forwards: bool, + ) -> &'a mut process::Command { + let args = if include_port_forwards { + self.connection_options.additional_args() + } else { + self.connection_options.additional_args_for_scp() + }; + command .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) - .args(self.connection_options.additional_args()) + .args(args) .envs(self.envs.clone()) } @@ -788,8 +975,13 @@ impl SshSocket { arguments } - async fn platform(&self) -> Result { - let uname = self.run_command("uname", &["-sm"]).await?; + async fn platform(&self, shell: ShellKind) -> Result { + let program = if shell == ShellKind::Nushell { + "^uname" + } else { + "uname" + }; + let uname = self.run_command(program, &["-sm"]).await?; let Some((os, arch)) = uname.split_once(" ") else { anyhow::bail!("unknown uname: {uname:?}") }; @@ -884,7 +1076,10 @@ impl SshConnectionOptions { "-w", ]; - let mut tokens = shlex::split(input).context("invalid input")?.into_iter(); + let mut tokens = ShellKind::Posix + .split(input) + .context("invalid input")? + .into_iter(); 'outer: while let Some(arg) = tokens.next() { if ALLOWED_OPTS.contains(&(&arg as &str)) { @@ -984,8 +1179,12 @@ impl SshConnectionOptions { result } + pub fn additional_args_for_scp(&self) -> Vec { + self.args.iter().flatten().cloned().collect::>() + } + pub fn additional_args(&self) -> Vec { - let mut args = self.args.iter().flatten().cloned().collect::>(); + let mut args = self.additional_args_for_scp(); if let Some(forwards) = &self.port_forwards { args.extend(forwards.iter().map(|pf| { @@ -1043,6 +1242,7 @@ fn build_command( ) -> Result { use std::fmt::Write as _; + let shell_kind = ShellKind::new(ssh_shell, false); let mut exec = String::new(); if let Some(working_dir) = working_dir { let working_dir = RemotePathBuf::new(working_dir, ssh_path_style).to_string(); @@ -1052,29 +1252,38 @@ fn build_command( const TILDE_PREFIX: &'static str = "~/"; if working_dir.starts_with(TILDE_PREFIX) { let working_dir = working_dir.trim_start_matches("~").trim_start_matches("/"); - write!(exec, "cd \"$HOME/{working_dir}\" && ",).unwrap(); + write!(exec, "cd \"$HOME/{working_dir}\" && ",)?; } else { - write!(exec, "cd \"{working_dir}\" && ",).unwrap(); + write!(exec, "cd \"{working_dir}\" && ",)?; } } else { - write!(exec, "cd && ").unwrap(); + write!(exec, "cd && ")?; }; - write!(exec, "exec env ").unwrap(); + write!(exec, "exec env ")?; for (k, v) in input_env.iter() { - if let Some((k, v)) = shlex::try_quote(k).ok().zip(shlex::try_quote(v).ok()) { - write!(exec, "{}={} ", k, v).unwrap(); - } + write!( + exec, + "{}={} ", + k, + shell_kind.try_quote(v).context("shell quoting")? + )?; } if let Some(input_program) = input_program { - write!(exec, "{}", shlex::try_quote(&input_program).unwrap()).unwrap(); + write!( + exec, + "{}", + shell_kind + .try_quote(&input_program) + .context("shell quoting")? + )?; for arg in input_args { - let arg = shlex::try_quote(&arg)?; - write!(exec, " {}", &arg).unwrap(); + let arg = shell_kind.try_quote(&arg).context("shell quoting")?; + write!(exec, " {}", &arg)?; } } else { - write!(exec, "{ssh_shell} -l").unwrap(); + write!(exec, "{ssh_shell} -l")?; }; let mut args = Vec::new(); @@ -1162,4 +1371,45 @@ mod tests { Ok(()) } + + #[test] + fn scp_args_exclude_port_forward_flags() { + let options = SshConnectionOptions { + host: "example.com".into(), + args: Some(vec![ + "-p".to_string(), + "2222".to_string(), + "-o".to_string(), + "StrictHostKeyChecking=no".to_string(), + ]), + port_forwards: Some(vec![SshPortForwardOption { + local_host: Some("127.0.0.1".to_string()), + local_port: 8080, + remote_host: Some("127.0.0.1".to_string()), + remote_port: 80, + }]), + ..Default::default() + }; + + let ssh_args = options.additional_args(); + assert!( + ssh_args.iter().any(|arg| arg.starts_with("-L")), + "expected ssh args to include port-forward: {ssh_args:?}" + ); + + let scp_args = options.additional_args_for_scp(); + assert_eq!( + scp_args, + vec![ + "-p".to_string(), + "2222".to_string(), + "-o".to_string(), + "StrictHostKeyChecking=no".to_string() + ] + ); + assert!( + scp_args.iter().all(|arg| !arg.starts_with("-L")), + "scp args should not contain port forward flags: {scp_args:?}" + ); + } } diff --git a/crates/remote/src/transport/wsl.rs b/crates/remote/src/transport/wsl.rs index 2ec2571aae0b91f8d8c7b1c75cd94d45f73531f6..d3d92b0f436f2a6ce1615426ac22916d4823a4fb 100644 --- a/crates/remote/src/transport/wsl.rs +++ b/crates/remote/src/transport/wsl.rs @@ -2,7 +2,7 @@ use crate::{ RemoteClientDelegate, RemotePlatform, remote_client::{CommandTemplate, RemoteConnection, RemoteConnectionOptions}, }; -use anyhow::{Result, anyhow, bail}; +use anyhow::{Context, Result, anyhow, bail}; use async_trait::async_trait; use collections::HashMap; use futures::channel::mpsc::{Sender, UnboundedReceiver, UnboundedSender}; @@ -21,6 +21,7 @@ use std::{ use util::{ paths::{PathStyle, RemotePathBuf}, rel_path::RelPath, + shell::ShellKind, }; #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -38,12 +39,14 @@ impl From for WslConnectionOptions { } } +#[derive(Debug)] pub(crate) struct WslRemoteConnection { remote_binary_path: Option>, platform: RemotePlatform, shell: String, default_system_shell: String, connection_options: WslConnectionOptions, + can_exec: bool, } impl WslRemoteConnection { @@ -71,20 +74,60 @@ impl WslRemoteConnection { platform: RemotePlatform { os: "", arch: "" }, shell: String::new(), default_system_shell: String::from("/bin/sh"), + can_exec: true, }; delegate.set_status(Some("Detecting WSL environment"), cx); - this.platform = this.detect_platform().await?; this.shell = this.detect_shell().await?; + let shell = ShellKind::new(&this.shell, false); + this.can_exec = this.detect_can_exec(shell).await?; + this.platform = this.detect_platform(shell).await?; this.remote_binary_path = Some( - this.ensure_server_binary(&delegate, release_channel, version, commit, cx) + this.ensure_server_binary(&delegate, release_channel, version, commit, shell, cx) .await?, ); + log::debug!("Detected WSL environment: {this:#?}"); Ok(this) } - async fn detect_platform(&self) -> Result { - let arch_str = self.run_wsl_command("uname", &["-m"]).await?; + async fn detect_can_exec(&self, shell: ShellKind) -> Result { + let options = &self.connection_options; + let program = if shell == ShellKind::Nushell { + "^uname" + } else { + "uname" + }; + let args = &["-m"]; + let output = wsl_command_impl(options, program, args, true) + .output() + .await?; + + if !output.status.success() { + let output = wsl_command_impl(options, program, args, false) + .output() + .await?; + + if !output.status.success() { + return Err(anyhow!( + "Command '{}' failed: {}", + program, + String::from_utf8_lossy(&output.stderr).trim() + )); + } + + Ok(false) + } else { + Ok(true) + } + } + async fn detect_platform(&self, shell: ShellKind) -> Result { + let arch_str = if shell == ShellKind::Nushell { + // https://github.com/nushell/nushell/issues/12570 + self.run_wsl_command("sh", &["-c", "uname -m"]) + } else { + self.run_wsl_command("uname", &["-m"]) + } + .await?; let arch_str = arch_str.trim().to_string(); let arch = match arch_str.as_str() { "x86_64" => "x86_64", @@ -99,19 +142,19 @@ impl WslRemoteConnection { .run_wsl_command("sh", &["-c", "echo $SHELL"]) .await .ok() - .unwrap_or_else(|| "bash".to_string())) + .unwrap_or_else(|| "/bin/sh".to_string())) } async fn windows_path_to_wsl_path(&self, source: &Path) -> Result { - windows_path_to_wsl_path_impl(&self.connection_options, source).await + windows_path_to_wsl_path_impl(&self.connection_options, source, self.can_exec).await } fn wsl_command(&self, program: &str, args: &[impl AsRef]) -> process::Command { - wsl_command_impl(&self.connection_options, program, args) + wsl_command_impl(&self.connection_options, program, args, self.can_exec) } async fn run_wsl_command(&self, program: &str, args: &[&str]) -> Result { - run_wsl_command_impl(&self.connection_options, program, args).await + run_wsl_command_impl(&self.connection_options, program, args, self.can_exec).await } async fn ensure_server_binary( @@ -120,6 +163,7 @@ impl WslRemoteConnection { release_channel: ReleaseChannel, version: SemanticVersion, commit: Option, + shell: ShellKind, cx: &mut AsyncApp, ) -> Result> { let version_str = match release_channel { @@ -141,9 +185,13 @@ impl WslRemoteConnection { paths::remote_wsl_server_dir_relative().join(RelPath::unix(&binary_name).unwrap()); if let Some(parent) = dst_path.parent() { - self.run_wsl_command("mkdir", &["-p", &parent.display(PathStyle::Posix)]) - .await - .map_err(|e| anyhow!("Failed to create directory: {}", e))?; + let parent = parent.display(PathStyle::Posix); + if shell == ShellKind::Nushell { + self.run_wsl_command("mkdir", &[&parent]).await + } else { + self.run_wsl_command("mkdir", &["-p", &parent]).await + } + .map_err(|e| anyhow!("Failed to create directory: {}", e))?; } #[cfg(debug_assertions)] @@ -158,7 +206,7 @@ impl WslRemoteConnection { )) .unwrap(), ); - self.upload_file(&remote_server_path, &tmp_path, delegate, cx) + self.upload_file(&remote_server_path, &tmp_path, delegate, &shell, cx) .await?; self.extract_and_install(&tmp_path, &dst_path, delegate, cx) .await?; @@ -191,7 +239,8 @@ impl WslRemoteConnection { ); let tmp_path = RelPath::unix(&tmp_path).unwrap(); - self.upload_file(&src_path, &tmp_path, delegate, cx).await?; + self.upload_file(&src_path, &tmp_path, delegate, &shell, cx) + .await?; self.extract_and_install(&tmp_path, &dst_path, delegate, cx) .await?; @@ -203,14 +252,19 @@ impl WslRemoteConnection { src_path: &Path, dst_path: &RelPath, delegate: &Arc, + shell: &ShellKind, cx: &mut AsyncApp, ) -> Result<()> { delegate.set_status(Some("Uploading remote server to WSL"), cx); if let Some(parent) = dst_path.parent() { - self.run_wsl_command("mkdir", &["-p", &parent.display(PathStyle::Posix)]) - .await - .map_err(|e| anyhow!("Failed to create directory when uploading file: {}", e))?; + let parent = parent.display(PathStyle::Posix); + if *shell == ShellKind::Nushell { + self.run_wsl_command("mkdir", &[&parent]).await + } else { + self.run_wsl_command("mkdir", &["-p", &parent]).await + } + .map_err(|e| anyhow!("Failed to create directory when uploading file: {}", e))?; } let t0 = Instant::now(); @@ -296,7 +350,10 @@ impl RemoteConnection for WslRemoteConnection { let mut proxy_args = vec![]; for env_var in ["RUST_LOG", "RUST_BACKTRACE", "ZED_GENERATE_MINIDUMPS"] { if let Some(value) = std::env::var(env_var).ok() { - proxy_args.push(format!("{}='{}'", env_var, value)); + // We don't quote the value here as it seems excessive and may result in invalid envs for the + // proxy server. For example, `RUST_LOG='debug'` will result in a warning "invalid logging spec 'debug'', ignoring it" + // in the proxy server. Therefore, we pass the env vars as is. + proxy_args.push(format!("{}={}", env_var, value)); } } proxy_args.push(remote_binary_path.display(PathStyle::Posix).into_owned()); @@ -335,19 +392,25 @@ impl RemoteConnection for WslRemoteConnection { ) -> Task> { cx.background_spawn({ let options = self.connection_options.clone(); + let can_exec = self.can_exec; async move { - let wsl_src = windows_path_to_wsl_path_impl(&options, &src_path).await?; - - run_wsl_command_impl(&options, "cp", &["-r", &wsl_src, &dest_path.to_string()]) - .await - .map_err(|e| { - anyhow!( - "failed to upload directory {} -> {}: {}", - src_path.display(), - dest_path.to_string(), - e - ) - })?; + let wsl_src = windows_path_to_wsl_path_impl(&options, &src_path, can_exec).await?; + + run_wsl_command_impl( + &options, + "cp", + &["-r", &wsl_src, &dest_path.to_string()], + can_exec, + ) + .await + .map_err(|e| { + anyhow!( + "failed to upload directory {} -> {}: {}", + src_path.display(), + dest_path, + e + ) + })?; Ok(()) } @@ -378,6 +441,7 @@ impl RemoteConnection for WslRemoteConnection { bail!("WSL shares the network interface with the host system"); } + let shell_kind = ShellKind::new(&self.shell, false); let working_dir = working_dir .map(|working_dir| RemotePathBuf::new(working_dir, PathStyle::Posix).to_string()) .unwrap_or("~".to_string()); @@ -385,19 +449,26 @@ impl RemoteConnection for WslRemoteConnection { let mut exec = String::from("exec env "); for (k, v) in env.iter() { - if let Some((k, v)) = shlex::try_quote(k).ok().zip(shlex::try_quote(v).ok()) { - write!(exec, "{}={} ", k, v).unwrap(); - } + write!( + exec, + "{}={} ", + k, + shell_kind.try_quote(v).context("shell quoting")? + )?; } if let Some(program) = program { - write!(exec, "{}", shlex::try_quote(&program)?).unwrap(); + write!( + exec, + "{}", + shell_kind.try_quote(&program).context("shell quoting")? + )?; for arg in args { - let arg = shlex::try_quote(&arg)?; - write!(exec, " {}", &arg).unwrap(); + let arg = shell_kind.try_quote(&arg).context("shell quoting")?; + write!(exec, " {}", &arg)?; } } else { - write!(&mut exec, "{} -l", self.shell).unwrap(); + write!(&mut exec, "{} -l", self.shell)?; } let wsl_args = if let Some(user) = &self.connection_options.user { @@ -433,11 +504,9 @@ impl RemoteConnection for WslRemoteConnection { }) } - fn build_forward_port_command( + fn build_forward_ports_command( &self, - _: u16, - _: String, - _: u16, + _: Vec<(u16, String, u16)>, ) -> anyhow::Result { Err(anyhow!("WSL shares a network interface with the host")) } @@ -472,17 +541,21 @@ async fn sanitize_path(path: &Path) -> Result { async fn windows_path_to_wsl_path_impl( options: &WslConnectionOptions, source: &Path, + exec: bool, ) -> Result { let source = sanitize_path(source).await?; - run_wsl_command_impl(options, "wslpath", &["-u", &source]).await + run_wsl_command_impl(options, "wslpath", &["-u", &source], exec).await } async fn run_wsl_command_impl( options: &WslConnectionOptions, program: &str, args: &[&str], + exec: bool, ) -> Result { - let output = wsl_command_impl(options, program, args).output().await?; + let output = wsl_command_impl(options, program, args, exec) + .output() + .await?; if !output.status.success() { return Err(anyhow!( @@ -502,6 +575,7 @@ fn wsl_command_impl( options: &WslConnectionOptions, program: &str, args: &[impl AsRef], + exec: bool, ) -> process::Command { let mut command = util::command::new_smol_command("wsl.exe"); @@ -516,10 +590,13 @@ fn wsl_command_impl( .arg("--distribution") .arg(&options.distro_name) .arg("--cd") - .arg("~") - .arg("--exec") - .arg(program) - .args(args); + .arg("~"); + + if exec { + command.arg("--exec"); + } + + command.arg(program).args(args); log::debug!("wsl {:?}", command); command diff --git a/crates/remote_server/Cargo.toml b/crates/remote_server/Cargo.toml index 92777d1a5950cf67f5ba060e6e891ea213ea504d..3d28f6ba565330a5fc3c0ea0249aaf760c880439 100644 --- a/crates/remote_server/Cargo.toml +++ b/crates/remote_server/Cargo.toml @@ -60,6 +60,7 @@ settings.workspace = true shellexpand.workspace = true smol.workspace = true sysinfo.workspace = true +task.workspace = true util.workspace = true watch.workspace = true worktree.workspace = true @@ -74,8 +75,7 @@ minidumper.workspace = true [dev-dependencies] action_log.workspace = true -assistant_tool.workspace = true -assistant_tools.workspace = true +agent.workspace = true client = { workspace = true, features = ["test-support"] } clock = { workspace = true, features = ["test-support"] } collections.workspace = true diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index be9dbca50c709accfc48dc9c33ae2cd9371b4efa..5d50853601b3949835a350559d48ef755419c93d 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -32,7 +32,7 @@ use std::{ path::{Path, PathBuf}, sync::{Arc, atomic::AtomicUsize}, }; -use sysinfo::System; +use sysinfo::{ProcessRefreshKind, RefreshKind, System, UpdateKind}; use util::{ResultExt, paths::PathStyle, rel_path::RelPath}; use worktree::Worktree; @@ -50,6 +50,7 @@ pub struct HeadlessProject { pub languages: Arc, pub extensions: Entity, pub git_store: Entity, + pub environment: Entity, // Used mostly to keep alive the toolchain store for RPC handlers. // Local variant is used within LSP store, but that's a separate entity. pub _toolchain_store: Entity, @@ -93,7 +94,7 @@ impl HeadlessProject { store }); - let environment = cx.new(|_| ProjectEnvironment::new(None)); + let environment = cx.new(|cx| ProjectEnvironment::new(None, cx)); let manifest_tree = ManifestTree::new(worktree_store.clone(), cx); let toolchain_store = cx.new(|cx| { ToolchainStore::local( @@ -101,6 +102,7 @@ impl HeadlessProject { worktree_store.clone(), environment.clone(), manifest_tree.clone(), + fs.clone(), cx, ) }); @@ -199,7 +201,7 @@ impl HeadlessProject { let mut agent_server_store = AgentServerStore::local( node_runtime.clone(), fs.clone(), - environment, + environment.clone(), http_client.clone(), cx, ); @@ -255,6 +257,7 @@ impl HeadlessProject { session.add_entity_request_handler(Self::handle_open_new_buffer); session.add_entity_request_handler(Self::handle_find_search_candidates); session.add_entity_request_handler(Self::handle_open_server_settings); + session.add_entity_request_handler(Self::handle_get_directory_environment); session.add_entity_message_handler(Self::handle_toggle_lsp_logs); session.add_entity_request_handler(BufferStore::handle_update_buffer); @@ -295,6 +298,7 @@ impl HeadlessProject { languages, extensions, git_store, + environment, _toolchain_store: toolchain_store, } } @@ -743,9 +747,16 @@ impl HeadlessProject { _cx: AsyncApp, ) -> Result { let mut processes = Vec::new(); - let system = System::new_all(); + let refresh_kind = RefreshKind::nothing().with_processes( + ProcessRefreshKind::nothing() + .without_tasks() + .with_cmd(UpdateKind::Always), + ); - for (_pid, process) in system.processes() { + for process in System::new_with_specifics(refresh_kind) + .processes() + .values() + { let name = process.name().to_string_lossy().into_owned(); let command = process .cmd() @@ -764,6 +775,26 @@ impl HeadlessProject { Ok(proto::GetProcessesResponse { processes }) } + + async fn handle_get_directory_environment( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let shell = task::shell_from_proto(envelope.payload.shell.context("missing shell")?)?; + let directory = PathBuf::from(envelope.payload.directory); + let environment = this + .update(&mut cx, |this, cx| { + this.environment.update(cx, |environment, cx| { + environment.get_local_directory_environment(&shell, directory.into(), cx) + }) + })? + .await + .context("failed to get directory environment")? + .into_iter() + .collect(); + Ok(proto::DirectoryEnvironment { environment }) + } } fn prompt_to_proto( diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index fc21e7548272b56f2c482ec2e2843f812c2b11bc..4010d033c09473cb475ae40b977af70fca390b82 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -2,12 +2,11 @@ /// The tests in this file assume that server_cx is running on Windows too. /// We neead to find a way to test Windows-Non-Windows interactions. use crate::headless_project::HeadlessProject; -use assistant_tool::{Tool as _, ToolResultContent}; -use assistant_tools::{ReadFileTool, ReadFileToolInput}; +use agent::{AgentTool, ReadFileTool, ReadFileToolInput, ToolCallEventStream}; use client::{Client, UserStore}; use clock::FakeSystemClock; use collections::{HashMap, HashSet}; -use language_model::{LanguageModelRequest, fake_provider::FakeLanguageModel}; +use language_model::LanguageModelToolResultContent; use extension::ExtensionHostProxy; use fs::{FakeFs, Fs}; @@ -1327,8 +1326,6 @@ async fn test_copy_file_into_remote_project( ); } -// TODO: this test fails on Windows. -#[cfg(not(windows))] #[gpui::test] async fn test_remote_git_diffs(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let text_2 = " @@ -1723,47 +1720,26 @@ async fn test_remote_agent_fs_tool_calls(cx: &mut TestAppContext, server_cx: &mu .unwrap(); let action_log = cx.new(|_| action_log::ActionLog::new(project.clone())); - let model = Arc::new(FakeLanguageModel::default()); - let request = Arc::new(LanguageModelRequest::default()); let input = ReadFileToolInput { path: "project/b.txt".into(), start_line: None, end_line: None, }; - let exists_result = cx.update(|cx| { - ReadFileTool::run( - Arc::new(ReadFileTool), - serde_json::to_value(input).unwrap(), - request.clone(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }); - let output = exists_result.output.await.unwrap().content; - assert_eq!(output, ToolResultContent::Text("B".to_string())); + let read_tool = Arc::new(ReadFileTool::new(project, action_log)); + let (event_stream, _) = ToolCallEventStream::test(); + + let exists_result = cx.update(|cx| read_tool.clone().run(input, event_stream.clone(), cx)); + let output = exists_result.await.unwrap(); + assert_eq!(output, LanguageModelToolResultContent::Text("B".into())); let input = ReadFileToolInput { path: "project/c.txt".into(), start_line: None, end_line: None, }; - let does_not_exist_result = cx.update(|cx| { - ReadFileTool::run( - Arc::new(ReadFileTool), - serde_json::to_value(input).unwrap(), - request.clone(), - project.clone(), - action_log.clone(), - model.clone(), - None, - cx, - ) - }); - does_not_exist_result.output.await.unwrap_err(); + let does_not_exist_result = cx.update(|cx| read_tool.run(input, event_stream, cx)); + does_not_exist_result.await.unwrap_err(); } #[gpui::test] diff --git a/crates/remote_server/src/unix.rs b/crates/remote_server/src/unix.rs index cb09b91fc72404032b1f9c6334b35e24bf3d610d..3cfb73adbb82af2d83182400d725d859ebad29f8 100644 --- a/crates/remote_server/src/unix.rs +++ b/crates/remote_server/src/unix.rs @@ -6,7 +6,7 @@ use util::ResultExt; use extension::ExtensionHostProxy; use fs::{Fs, RealFs}; -use futures::channel::mpsc; +use futures::channel::{mpsc, oneshot}; use futures::{AsyncRead, AsyncWrite, AsyncWriteExt, FutureExt, SinkExt, select, select_biased}; use git::GitHostingProviderRegistry; use gpui::{App, AppContext as _, Context, Entity, SemanticVersion, UpdateGlobal as _}; @@ -103,7 +103,9 @@ fn init_logging_server(log_file_path: PathBuf) -> Result>> { buffer: Vec::new(), }); - env_logger::Builder::from_default_env() + env_logger::Builder::new() + .filter_level(log::LevelFilter::Info) + .parse_default_env() .target(env_logger::Target::Pipe(target)) .format(|buf, record| { let mut log_record = LogRecord::new(record); @@ -368,6 +370,14 @@ pub fn execute_run( let listeners = ServerListeners::new(stdin_socket, stdout_socket, stderr_socket)?; + let (shell_env_loaded_tx, shell_env_loaded_rx) = oneshot::channel(); + app.background_executor() + .spawn(async { + util::load_login_shell_environment().await.log_err(); + shell_env_loaded_tx.send(()).ok(); + }) + .detach(); + let git_hosting_provider_registry = Arc::new(GitHostingProviderRegistry::new()); app.run(move |cx| { settings::init(cx); @@ -413,7 +423,11 @@ pub fn execute_run( ) }; - let node_runtime = NodeRuntime::new(http_client.clone(), None, node_settings_rx); + let node_runtime = NodeRuntime::new( + http_client.clone(), + Some(shell_env_loaded_rx), + node_settings_rx, + ); let mut languages = LanguageRegistry::new(cx.background_executor().clone()); languages.set_language_server_download_dir(paths::languages_dir().clone()); @@ -988,10 +1002,9 @@ fn is_new_version(version: &str) -> bool { } fn is_file_in_use(file_name: &OsStr) -> bool { - let info = - sysinfo::System::new_with_specifics(sysinfo::RefreshKind::new().with_processes( - sysinfo::ProcessRefreshKind::new().with_exe(sysinfo::UpdateKind::Always), - )); + let info = sysinfo::System::new_with_specifics(sysinfo::RefreshKind::nothing().with_processes( + sysinfo::ProcessRefreshKind::nothing().with_exe(sysinfo::UpdateKind::Always), + )); for process in info.processes().values() { if process diff --git a/crates/repl/Cargo.toml b/crates/repl/Cargo.toml index 6386dc330af8fd1eb46380cb39c71f4adffea1e6..14040ba4847d710be0a24a8bbddeb67a6aeb748b 100644 --- a/crates/repl/Cargo.toml +++ b/crates/repl/Cargo.toml @@ -16,7 +16,7 @@ doctest = false alacritty_terminal.workspace = true anyhow.workspace = true async-dispatcher.workspace = true -async-tungstenite = { workspace = true, features = ["tokio", "tokio-rustls-manual-roots"] } +async-tungstenite = { workspace = true, features = ["tokio", "tokio-rustls-manual-roots", "tokio-runtime"] } base64.workspace = true client.workspace = true collections.workspace = true @@ -51,7 +51,6 @@ util.workspace = true uuid.workspace = true workspace.workspace = true picker.workspace = true -workspace-hack.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } diff --git a/crates/repl/src/notebook/notebook_ui.rs b/crates/repl/src/notebook/notebook_ui.rs index 20b2bc62e001cc565495924ffa80cbe466abe649..b1050bce338903c5fdfa3a1867f615fa1dfd6497 100644 --- a/crates/repl/src/notebook/notebook_ui.rs +++ b/crates/repl/src/notebook/notebook_ui.rs @@ -326,7 +326,7 @@ impl NotebookEditor { cx, ) .tooltip(move |window, cx| { - Tooltip::for_action("Execute all cells", &RunAll, window, cx) + Tooltip::for_action("Execute all cells", &RunAll, cx) }) .on_click(|_, window, cx| { window.dispatch_action(Box::new(RunAll), cx); @@ -341,12 +341,7 @@ impl NotebookEditor { ) .disabled(!has_outputs) .tooltip(move |window, cx| { - Tooltip::for_action( - "Clear all outputs", - &ClearOutputs, - window, - cx, - ) + Tooltip::for_action("Clear all outputs", &ClearOutputs, cx) }) .on_click(|_, window, cx| { window.dispatch_action(Box::new(ClearOutputs), cx); @@ -363,7 +358,7 @@ impl NotebookEditor { cx, ) .tooltip(move |window, cx| { - Tooltip::for_action("Move cell up", &MoveCellUp, window, cx) + Tooltip::for_action("Move cell up", &MoveCellUp, cx) }) .on_click(|_, window, cx| { window.dispatch_action(Box::new(MoveCellUp), cx); @@ -377,7 +372,7 @@ impl NotebookEditor { cx, ) .tooltip(move |window, cx| { - Tooltip::for_action("Move cell down", &MoveCellDown, window, cx) + Tooltip::for_action("Move cell down", &MoveCellDown, cx) }) .on_click(|_, window, cx| { window.dispatch_action(Box::new(MoveCellDown), cx); @@ -394,12 +389,7 @@ impl NotebookEditor { cx, ) .tooltip(move |window, cx| { - Tooltip::for_action( - "Add markdown block", - &AddMarkdownBlock, - window, - cx, - ) + Tooltip::for_action("Add markdown block", &AddMarkdownBlock, cx) }) .on_click(|_, window, cx| { window.dispatch_action(Box::new(AddMarkdownBlock), cx); @@ -413,7 +403,7 @@ impl NotebookEditor { cx, ) .tooltip(move |window, cx| { - Tooltip::for_action("Add code block", &AddCodeBlock, window, cx) + Tooltip::for_action("Add code block", &AddCodeBlock, cx) }) .on_click(|_, window, cx| { window.dispatch_action(Box::new(AddCodeBlock), cx); @@ -716,6 +706,10 @@ impl Item for NotebookEditor { Some(cx.new(|cx| Self::new(self.project.clone(), self.notebook_item.clone(), window, cx))) } + fn buffer_kind(&self, _: &App) -> workspace::item::ItemBufferKind { + workspace::item::ItemBufferKind::Singleton + } + fn for_each_project_item( &self, cx: &App, diff --git a/crates/repl/src/outputs/image.rs b/crates/repl/src/outputs/image.rs index 0cabbbbae4715181a76b3730bf492b481d0a6e1b..fefdbec2fa2770baa279a832bd55278bd502380d 100644 --- a/crates/repl/src/outputs/image.rs +++ b/crates/repl/src/outputs/image.rs @@ -51,6 +51,7 @@ impl ImageView { image::ImageFormat::WebP => ImageFormat::Webp, image::ImageFormat::Tiff => ImageFormat::Tiff, image::ImageFormat::Bmp => ImageFormat::Bmp, + image::ImageFormat::Ico => ImageFormat::Ico, format => { anyhow::bail!("unsupported image format {format:?}"); } diff --git a/crates/repl/src/outputs/plain.rs b/crates/repl/src/outputs/plain.rs index f58cf6bd3f9574c79a978b650287ca8ace40092b..6addd9a9f49b5094fcbedd148d8ca7c38e1ccd1b 100644 --- a/crates/repl/src/outputs/plain.rs +++ b/crates/repl/src/outputs/plain.rs @@ -198,7 +198,16 @@ impl TerminalOutput { } fn full_text(&self) -> String { - let mut full_text = String::new(); + fn sanitize(mut line: String) -> Option { + line.retain(|ch| ch != '\u{0}' && ch != '\r'); + if line.trim().is_empty() { + return None; + } + let trimmed = line.trim_end_matches([' ', '\t']); + Some(trimmed.to_owned()) + } + + let mut lines = Vec::new(); // Get the total number of lines, including history let total_lines = self.handler.grid().total_lines(); @@ -210,11 +219,8 @@ impl TerminalOutput { let line_index = Line(-(line as i32) - 1); let start = Point::new(line_index, Column(0)); let end = Point::new(line_index, Column(self.handler.columns() - 1)); - let line_content = self.handler.bounds_to_string(start, end); - - if !line_content.trim().is_empty() { - full_text.push_str(&line_content); - full_text.push('\n'); + if let Some(cleaned) = sanitize(self.handler.bounds_to_string(start, end)) { + lines.push(cleaned); } } @@ -223,15 +229,18 @@ impl TerminalOutput { let line_index = Line(line as i32); let start = Point::new(line_index, Column(0)); let end = Point::new(line_index, Column(self.handler.columns() - 1)); - let line_content = self.handler.bounds_to_string(start, end); - - if !line_content.trim().is_empty() { - full_text.push_str(&line_content); - full_text.push('\n'); + if let Some(cleaned) = sanitize(self.handler.bounds_to_string(start, end)) { + lines.push(cleaned); } } - full_text + if lines.is_empty() { + String::new() + } else { + let mut full_text = lines.join("\n"); + full_text.push('\n'); + full_text + } } } diff --git a/crates/repl/src/repl_editor.rs b/crates/repl/src/repl_editor.rs index b4c928c33e021229caaa68e11b7cdd7228ed934d..a47d680e9bfe7a82cee25db360a59223e89df93e 100644 --- a/crates/repl/src/repl_editor.rs +++ b/crates/repl/src/repl_editor.rs @@ -85,7 +85,11 @@ pub fn run( let editor = editor.upgrade().context("editor was dropped")?; let selected_range = editor - .update(cx, |editor, cx| editor.selections.newest_adjusted(cx)) + .update(cx, |editor, cx| { + editor + .selections + .newest_adjusted(&editor.display_snapshot(cx)) + }) .range(); let multibuffer = editor.read(cx).buffer().clone(); let Some(buffer) = multibuffer.read(cx).as_singleton() else { @@ -473,7 +477,9 @@ fn language_supported(language: &Arc, cx: &mut App) -> bool { fn get_language(editor: WeakEntity, cx: &mut App) -> Option> { editor .update(cx, |editor, cx| { - let selection = editor.selections.newest::(cx); + let selection = editor + .selections + .newest::(&editor.display_snapshot(cx)); let buffer = editor.buffer().read(cx).snapshot(cx); buffer.language_at(selection.head()).cloned() }) diff --git a/crates/repl/src/repl_sessions_ui.rs b/crates/repl/src/repl_sessions_ui.rs index 36936641b050012968ec4ac586c540c2567db350..d8bd8869f28ac4a9bdf396073f8948d15aef9e3e 100644 --- a/crates/repl/src/repl_sessions_ui.rs +++ b/crates/repl/src/repl_sessions_ui.rs @@ -197,7 +197,7 @@ impl Item for ReplSessionsPage { } impl Render for ReplSessionsPage { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { let store = ReplStore::global(cx); let (kernel_specifications, sessions) = store.update(cx, |store, _cx| { @@ -241,7 +241,7 @@ impl Render for ReplSessionsPage { return ReplSessionsContainer::new("No Jupyter Kernel Sessions").child( v_flex() .child(Label::new(instructions)) - .children(KeyBinding::for_action(&Run, window, cx)), + .child(KeyBinding::for_action(&Run, cx)), ); } diff --git a/crates/repl/src/repl_store.rs b/crates/repl/src/repl_store.rs index b9a36a18aec44a3460e099858cc33360b76ee4f9..a5dc7b6c7b642b3c8afb46924fcb6e54620cb19b 100644 --- a/crates/repl/src/repl_store.rs +++ b/crates/repl/src/repl_store.rs @@ -214,8 +214,9 @@ impl ReplStore { let selected_kernelspec = self.selected_kernel_for_worktree.get(&worktree_id).cloned(); if let Some(language_at_cursor) = language_at_cursor { - selected_kernelspec - .or_else(|| self.kernelspec_legacy_by_lang_only(language_at_cursor, cx)) + selected_kernelspec.or_else(|| { + self.kernelspec_legacy_by_lang_only(worktree_id, language_at_cursor, cx) + }) } else { selected_kernelspec } @@ -223,6 +224,7 @@ impl ReplStore { fn kernelspec_legacy_by_lang_only( &self, + worktree_id: WorktreeId, language_at_cursor: Arc, cx: &App, ) -> Option { @@ -232,8 +234,7 @@ impl ReplStore { .get(language_at_cursor.code_fence_block_name().as_ref()); let found_by_name = self - .kernel_specifications - .iter() + .kernel_specifications_for_worktree(worktree_id) .find(|runtime_specification| { if let (Some(selected), KernelSpecification::Jupyter(runtime_specification)) = (selected_kernel, runtime_specification) @@ -249,8 +250,7 @@ impl ReplStore { return Some(found_by_name); } - self.kernel_specifications - .iter() + self.kernel_specifications_for_worktree(worktree_id) .find(|kernel_option| match kernel_option { KernelSpecification::Jupyter(runtime_specification) => { runtime_specification.kernelspec.language.to_lowercase() diff --git a/crates/reqwest_client/Cargo.toml b/crates/reqwest_client/Cargo.toml index 68a354c13b94c01336791d021a926cacc6da4d62..7fd50237d9dc257f0ee7fe75134cd0456ad4928f 100644 --- a/crates/reqwest_client/Cargo.toml +++ b/crates/reqwest_client/Cargo.toml @@ -26,7 +26,6 @@ log.workspace = true tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } regex.workspace = true reqwest.workspace = true -workspace-hack.workspace = true [dev-dependencies] gpui.workspace = true diff --git a/crates/rich_text/Cargo.toml b/crates/rich_text/Cargo.toml index 5d788abea82780a7e90c7f279bdaa0b7e1438828..17bd8d2a4b8977b2bf0079b84dc8f27a9999974b 100644 --- a/crates/rich_text/Cargo.toml +++ b/crates/rich_text/Cargo.toml @@ -27,4 +27,3 @@ pulldown-cmark.workspace = true theme.workspace = true ui.workspace = true util.workspace = true -workspace-hack.workspace = true diff --git a/crates/rope/Cargo.toml b/crates/rope/Cargo.toml index 682b9aad92e355538333da358713d8c97f765b97..f099248a5db49ac1e857900b7d00294a11cfbff2 100644 --- a/crates/rope/Cargo.toml +++ b/crates/rope/Cargo.toml @@ -15,11 +15,11 @@ path = "src/rope.rs" arrayvec = "0.7.1" log.workspace = true rayon.workspace = true +regex.workspace = true smallvec.workspace = true sum_tree.workspace = true unicode-segmentation.workspace = true util.workspace = true -workspace-hack.workspace = true [dev-dependencies] ctor.workspace = true diff --git a/crates/rope/benches/rope_benchmark.rs b/crates/rope/benches/rope_benchmark.rs index bf891a4f837c3e63975aea1233b15c913758c175..030bec01df4d223cd5288842ba0f9c1386dac31b 100644 --- a/crates/rope/benches/rope_benchmark.rs +++ b/crates/rope/benches/rope_benchmark.rs @@ -9,18 +9,31 @@ use rope::{Point, Rope}; use sum_tree::Bias; use util::RandomCharIter; -fn generate_random_text(mut rng: StdRng, text_len: usize) -> String { - RandomCharIter::new(&mut rng).take(text_len).collect() +/// Returns a biased random string whose UTF-8 length is close to but no more than `len` bytes. +/// +/// The string is biased towards characters expected to occur in text or likely to exercise edge +/// cases. +fn generate_random_text(rng: &mut StdRng, len: usize) -> String { + let mut str = String::with_capacity(len); + let mut chars = RandomCharIter::new(rng); + loop { + let ch = chars.next().unwrap(); + if str.len() + ch.len_utf8() > len { + break; + } + str.push(ch); + } + str } -fn generate_random_rope(rng: StdRng, text_len: usize) -> Rope { +fn generate_random_rope(rng: &mut StdRng, text_len: usize) -> Rope { let text = generate_random_text(rng, text_len); let mut rope = Rope::new(); rope.push(&text); rope } -fn generate_random_rope_ranges(mut rng: StdRng, rope: &Rope) -> Vec> { +fn generate_random_rope_ranges(rng: &mut StdRng, rope: &Rope) -> Vec> { let range_max_len = 50; let num_ranges = rope.len() / range_max_len; @@ -47,7 +60,7 @@ fn generate_random_rope_ranges(mut rng: StdRng, rope: &Rope) -> Vec ranges } -fn generate_random_rope_points(mut rng: StdRng, rope: &Rope) -> Vec { +fn generate_random_rope_points(rng: &mut StdRng, rope: &Rope) -> Vec { let num_points = rope.len() / 10; let mut points = Vec::new(); @@ -61,14 +74,14 @@ fn rope_benchmarks(c: &mut Criterion) { static SEED: u64 = 9999; static KB: usize = 1024; - let rng = StdRng::seed_from_u64(SEED); let sizes = [4 * KB, 64 * KB]; let mut group = c.benchmark_group("push"); for size in sizes.iter() { group.throughput(Throughput::Bytes(*size as u64)); group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { - let text = generate_random_text(rng.clone(), *size); + let mut rng = StdRng::seed_from_u64(SEED); + let text = generate_random_text(&mut rng, *size); b.iter(|| { let mut rope = Rope::new(); @@ -84,9 +97,11 @@ fn rope_benchmarks(c: &mut Criterion) { for size in sizes.iter() { group.throughput(Throughput::Bytes(*size as u64)); group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { + let mut rng = StdRng::seed_from_u64(SEED); let mut random_ropes = Vec::new(); for _ in 0..5 { - random_ropes.push(generate_random_rope(rng.clone(), *size)); + let rope = generate_random_rope(&mut rng, *size); + random_ropes.push(rope); } b.iter(|| { @@ -103,10 +118,11 @@ fn rope_benchmarks(c: &mut Criterion) { for size in sizes.iter() { group.throughput(Throughput::Bytes(*size as u64)); group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { - let rope = generate_random_rope(rng.clone(), *size); + let mut rng = StdRng::seed_from_u64(SEED); + let rope = generate_random_rope(&mut rng, *size); b.iter_batched( - || generate_random_rope_ranges(rng.clone(), &rope), + || generate_random_rope_ranges(&mut rng, &rope), |ranges| { for range in ranges.iter() { rope.slice(range.clone()); @@ -122,10 +138,11 @@ fn rope_benchmarks(c: &mut Criterion) { for size in sizes.iter() { group.throughput(Throughput::Bytes(*size as u64)); group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { - let rope = generate_random_rope(rng.clone(), *size); + let mut rng = StdRng::seed_from_u64(SEED); + let rope = generate_random_rope(&mut rng, *size); b.iter_batched( - || generate_random_rope_ranges(rng.clone(), &rope), + || generate_random_rope_ranges(&mut rng, &rope), |ranges| { for range in ranges.iter() { let bytes = rope.bytes_in_range(range.clone()); @@ -142,7 +159,8 @@ fn rope_benchmarks(c: &mut Criterion) { for size in sizes.iter() { group.throughput(Throughput::Bytes(*size as u64)); group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { - let rope = generate_random_rope(rng.clone(), *size); + let mut rng = StdRng::seed_from_u64(SEED); + let rope = generate_random_rope(&mut rng, *size); b.iter(|| { let chars = rope.chars().count(); @@ -156,10 +174,11 @@ fn rope_benchmarks(c: &mut Criterion) { for size in sizes.iter() { group.throughput(Throughput::Bytes(*size as u64)); group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { - let rope = generate_random_rope(rng.clone(), *size); + let mut rng = StdRng::seed_from_u64(SEED); + let rope = generate_random_rope(&mut rng, *size); b.iter_batched( - || generate_random_rope_points(rng.clone(), &rope), + || generate_random_rope_points(&mut rng, &rope), |offsets| { for offset in offsets.iter() { black_box(rope.clip_point(*offset, Bias::Left)); @@ -176,10 +195,11 @@ fn rope_benchmarks(c: &mut Criterion) { for size in sizes.iter() { group.throughput(Throughput::Bytes(*size as u64)); group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { - let rope = generate_random_rope(rng.clone(), *size); + let mut rng = StdRng::seed_from_u64(SEED); + let rope = generate_random_rope(&mut rng, *size); b.iter_batched( - || generate_random_rope_points(rng.clone(), &rope), + || generate_random_rope_points(&mut rng, &rope), |offsets| { for offset in offsets.iter() { black_box(rope.point_to_offset(*offset)); @@ -195,11 +215,11 @@ fn rope_benchmarks(c: &mut Criterion) { for size in sizes.iter() { group.throughput(Throughput::Bytes(*size as u64)); group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { - let rope = generate_random_rope(rng.clone(), *size); + let mut rng = StdRng::seed_from_u64(SEED); + let rope = generate_random_rope(&mut rng, *size); b.iter_batched( || { - let mut rng = rng.clone(); let num_points = rope.len() / 10; let mut points = Vec::new(); diff --git a/crates/rope/src/chunk.rs b/crates/rope/src/chunk.rs index 91e61a517144f5b2408902173a8c34ccb865e9d5..2fa6112dd439a5835891db813dc9ce12cb22809d 100644 --- a/crates/rope/src/chunk.rs +++ b/crates/rope/src/chunk.rs @@ -5,29 +5,36 @@ use sum_tree::Bias; use unicode_segmentation::GraphemeCursor; use util::debug_panic; -pub(crate) const MIN_BASE: usize = if cfg!(test) { 6 } else { 64 }; -pub(crate) const MAX_BASE: usize = MIN_BASE * 2; +#[cfg(not(all(test, not(rust_analyzer))))] +pub(crate) type Bitmap = u128; +#[cfg(all(test, not(rust_analyzer)))] +pub(crate) type Bitmap = u16; + +pub(crate) const MIN_BASE: usize = MAX_BASE / 2; +pub(crate) const MAX_BASE: usize = Bitmap::BITS as usize; #[derive(Clone, Debug, Default)] pub struct Chunk { /// If bit[i] is set, then the character at index i is the start of a UTF-8 character in the /// text. - chars: u128, + chars: Bitmap, /// The number of set bits is the number of UTF-16 code units it would take to represent the /// text. /// /// Bit[i] is set if text[i] is the start of a UTF-8 character. If the character would /// take two UTF-16 code units, then bit[i+1] is also set. (Rust chars never take more /// than two UTF-16 code units.) - chars_utf16: u128, + chars_utf16: Bitmap, /// If bit[i] is set, then the character at index i is an ascii newline. - newlines: u128, + newlines: Bitmap, /// If bit[i] is set, then the character at index i is an ascii tab. - pub tabs: u128, + tabs: Bitmap, pub text: ArrayString, } impl Chunk { + pub const MASK_BITS: usize = Bitmap::BITS as usize; + #[inline(always)] pub fn new(text: &str) -> Self { let mut this = Chunk::default(); @@ -41,9 +48,9 @@ impl Chunk { let ix = self.text.len() + char_ix; self.chars |= 1 << ix; self.chars_utf16 |= 1 << ix; - self.chars_utf16 |= (c.len_utf16() as u128) << ix; - self.newlines |= ((c == '\n') as u128) << ix; - self.tabs |= ((c == '\t') as u128) << ix; + self.chars_utf16 |= (c.len_utf16() as Bitmap) << ix; + self.newlines |= ((c == '\n') as Bitmap) << ix; + self.tabs |= ((c == '\t') as Bitmap) << ix; } self.text.push_str(text); } @@ -79,17 +86,85 @@ impl Chunk { } #[inline(always)] - pub fn chars(&self) -> u128 { + pub fn chars(&self) -> Bitmap { self.chars } + + pub fn tabs(&self) -> Bitmap { + self.tabs + } + + #[inline(always)] + pub fn is_char_boundary(&self, offset: usize) -> bool { + (1 as Bitmap).unbounded_shl(offset as u32) & self.chars != 0 || offset == self.text.len() + } + + pub fn floor_char_boundary(&self, index: usize) -> usize { + #[inline] + pub(crate) const fn is_utf8_char_boundary(u8: u8) -> bool { + // This is bit magic equivalent to: b < 128 || b >= 192 + (u8 as i8) >= -0x40 + } + + if index >= self.text.len() { + self.text.len() + } else { + let mut i = index; + while i > 0 { + if is_utf8_char_boundary(self.text.as_bytes()[i]) { + break; + } + i -= 1; + } + + i + } + } + + #[track_caller] + #[inline(always)] + pub fn assert_char_boundary(&self, offset: usize) { + if self.is_char_boundary(offset) { + return; + } + panic_char_boundary(self, offset); + + #[cold] + #[inline(never)] + fn panic_char_boundary(chunk: &Chunk, offset: usize) { + if offset > chunk.text.len() { + panic!( + "byte index {} is out of bounds of `{:?}` (length: {})", + offset, + chunk.text, + chunk.text.len() + ); + } + // find the character + let char_start = chunk.floor_char_boundary(offset); + // `char_start` must be less than len and a char boundary + let ch = chunk + .text + .get(char_start..) + .unwrap() + .chars() + .next() + .unwrap(); + let char_range = char_start..char_start + ch.len_utf8(); + panic!( + "byte index {} is not a char boundary; it is inside {:?} (bytes {:?})", + offset, ch, char_range, + ); + } + } } #[derive(Clone, Copy, Debug)] pub struct ChunkSlice<'a> { - chars: u128, - chars_utf16: u128, - newlines: u128, - tabs: u128, + chars: Bitmap, + chars_utf16: Bitmap, + newlines: Bitmap, + tabs: Bitmap, text: &'a str, } @@ -112,8 +187,8 @@ impl<'a> ChunkSlice<'a> { } #[inline(always)] - pub fn is_char_boundary(self, offset: usize) -> bool { - self.text.is_char_boundary(offset) + pub fn is_char_boundary(&self, offset: usize) -> bool { + (1 as Bitmap).unbounded_shl(offset as u32) & self.chars != 0 || offset == self.text.len() } #[inline(always)] @@ -129,7 +204,7 @@ impl<'a> ChunkSlice<'a> { }; (left, right) } else { - let mask = (1u128 << mid) - 1; + let mask = ((1 as Bitmap) << mid) - 1; let (left_text, right_text) = self.text.split_at(mid); let left = ChunkSlice { chars: self.chars & mask, @@ -151,17 +226,9 @@ impl<'a> ChunkSlice<'a> { #[inline(always)] pub fn slice(self, range: Range) -> Self { - let mask = if range.end == MAX_BASE { - u128::MAX - } else { - debug_assert!( - self.is_char_boundary(range.end), - "Invalid range end {} in {:?}", - range.end, - self - ); - (1u128 << range.end) - 1 - }; + let mask = (1 as Bitmap) + .unbounded_shl(range.end as u32) + .wrapping_sub(1); if range.start == MAX_BASE { Self { chars: 0, @@ -171,12 +238,8 @@ impl<'a> ChunkSlice<'a> { text: "", } } else { - debug_assert!( - self.is_char_boundary(range.start), - "Invalid range start {} in {:?}", - range.start, - self - ); + self.assert_char_boundary(range.start); + self.assert_char_boundary(range.end); Self { chars: (self.chars & mask) >> range.start, chars_utf16: (self.chars_utf16 & mask) >> range.start, @@ -220,7 +283,7 @@ impl<'a> ChunkSlice<'a> { #[inline(always)] pub fn lines(&self) -> Point { let row = self.newlines.count_ones(); - let column = self.newlines.leading_zeros() - (u128::BITS - self.text.len() as u32); + let column = self.newlines.leading_zeros() - (Bitmap::BITS - self.text.len() as u32); Point::new(row, column) } @@ -230,7 +293,7 @@ impl<'a> ChunkSlice<'a> { if self.newlines == 0 { self.chars.count_ones() } else { - let mask = (1u128 << self.newlines.trailing_zeros()) - 1; + let mask = ((1 as Bitmap) << self.newlines.trailing_zeros()) - 1; (self.chars & mask).count_ones() } } @@ -241,7 +304,7 @@ impl<'a> ChunkSlice<'a> { if self.newlines == 0 { self.chars.count_ones() } else { - let mask = !(u128::MAX >> self.newlines.leading_zeros()); + let mask = !(Bitmap::MAX >> self.newlines.leading_zeros()); (self.chars & mask).count_ones() } } @@ -252,7 +315,7 @@ impl<'a> ChunkSlice<'a> { if self.newlines == 0 { self.chars_utf16.count_ones() } else { - let mask = !(u128::MAX >> self.newlines.leading_zeros()); + let mask = !(Bitmap::MAX >> self.newlines.leading_zeros()); (self.chars_utf16 & mask).count_ones() } } @@ -295,13 +358,9 @@ impl<'a> ChunkSlice<'a> { #[inline(always)] pub fn offset_to_point(&self, offset: usize) -> Point { - let mask = if offset == MAX_BASE { - u128::MAX - } else { - (1u128 << offset) - 1 - }; + let mask = (1 as Bitmap).unbounded_shl(offset as u32).wrapping_sub(1); let row = (self.newlines & mask).count_ones(); - let newline_ix = u128::BITS - (self.newlines & mask).leading_zeros(); + let newline_ix = Bitmap::BITS - (self.newlines & mask).leading_zeros(); let column = (offset - newline_ix as usize) as u32; Point::new(row, column) } @@ -330,13 +389,81 @@ impl<'a> ChunkSlice<'a> { } } + #[track_caller] #[inline(always)] - pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 { - let mask = if offset == MAX_BASE { - u128::MAX + pub fn assert_char_boundary(&self, offset: usize) { + if self.is_char_boundary(offset) { + return; + } + panic_char_boundary(self, offset); + + #[cold] + #[inline(never)] + fn panic_char_boundary(chunk: &ChunkSlice, offset: usize) { + if offset > chunk.text.len() { + panic!( + "byte index {} is out of bounds of `{:?}` (length: {})", + offset, + chunk.text, + chunk.text.len() + ); + } + // find the character + let char_start = chunk.floor_char_boundary(offset); + // `char_start` must be less than len and a char boundary + let ch = chunk + .text + .get(char_start..) + .unwrap() + .chars() + .next() + .unwrap(); + let char_range = char_start..char_start + ch.len_utf8(); + panic!( + "byte index {} is not a char boundary; it is inside {:?} (bytes {:?})", + offset, ch, char_range, + ); + } + } + + pub fn floor_char_boundary(&self, index: usize) -> usize { + #[inline] + pub(crate) const fn is_utf8_char_boundary(u8: u8) -> bool { + // This is bit magic equivalent to: b < 128 || b >= 192 + (u8 as i8) >= -0x40 + } + + if index >= self.text.len() { + self.text.len() } else { - (1u128 << offset) - 1 - }; + let mut i = index; + while i > 0 { + if is_utf8_char_boundary(self.text.as_bytes()[i]) { + break; + } + i -= 1; + } + + i + } + } + + #[inline(always)] + pub fn point_to_offset_utf16(&self, point: Point) -> OffsetUtf16 { + if point.row > self.lines().row { + debug_panic!( + "point {:?} extends beyond rows for string {:?}", + point, + self.text + ); + return self.len_utf16(); + } + self.offset_to_offset_utf16(self.point_to_offset(point)) + } + + #[inline(always)] + pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 { + let mask = (1 as Bitmap).unbounded_shl(offset as u32).wrapping_sub(1); OffsetUtf16((self.chars_utf16 & mask).count_ones() as usize) } @@ -345,7 +472,11 @@ impl<'a> ChunkSlice<'a> { if target.0 == 0 { 0 } else { - let ix = nth_set_bit(self.chars_utf16, target.0) + 1; + #[cfg(not(test))] + let chars_utf16 = self.chars_utf16; + #[cfg(test)] + let chars_utf16 = self.chars_utf16 as u128; + let ix = nth_set_bit(chars_utf16, target.0) + 1; if ix == MAX_BASE { MAX_BASE } else { @@ -360,13 +491,9 @@ impl<'a> ChunkSlice<'a> { #[inline(always)] pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 { - let mask = if offset == MAX_BASE { - u128::MAX - } else { - (1u128 << offset) - 1 - }; + let mask = (1 as Bitmap).unbounded_shl(offset as u32).wrapping_sub(1); let row = (self.newlines & mask).count_ones(); - let newline_ix = u128::BITS - (self.newlines & mask).leading_zeros(); + let newline_ix = Bitmap::BITS - (self.newlines & mask).leading_zeros(); let column = if newline_ix as usize == MAX_BASE { 0 } else { @@ -520,7 +647,11 @@ impl<'a> ChunkSlice<'a> { #[inline(always)] fn offset_range_for_row(&self, row: u32) -> Range { let row_start = if row > 0 { - nth_set_bit(self.newlines, row as usize) + 1 + #[cfg(not(test))] + let newlines = self.newlines; + #[cfg(test)] + let newlines = self.newlines as u128; + nth_set_bit(newlines, row as usize) + 1 } else { 0 }; @@ -545,8 +676,8 @@ impl<'a> ChunkSlice<'a> { } pub struct Tabs { - tabs: u128, - chars: u128, + tabs: Bitmap, + chars: Bitmap, } #[derive(Debug, PartialEq, Eq)] @@ -647,8 +778,8 @@ mod tests { // Verify Chunk::chars() bitmap let expected_chars = char_offsets(&text) .into_iter() - .inspect(|i| assert!(*i < 128)) - .fold(0u128, |acc, i| acc | (1 << i)); + .inspect(|i| assert!(*i < MAX_BASE)) + .fold(0 as Bitmap, |acc, i| acc | (1 << i)); assert_eq!(chunk.chars(), expected_chars); for _ in 0..10 { diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index c349f3f4513ae0f229ac866cfae36c63734a5c22..b5c5cd069e07a0957b01130eec2b4ecdf7f7120e 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -4,22 +4,115 @@ mod point; mod point_utf16; mod unclipped; -use chunk::Chunk; use rayon::iter::{IntoParallelIterator, ParallelIterator as _}; +use regex::Regex; use smallvec::SmallVec; use std::{ + borrow::Cow, cmp, fmt, io, mem, ops::{self, AddAssign, Range}, str, + sync::{Arc, LazyLock}, }; use sum_tree::{Bias, Dimension, Dimensions, SumTree}; -pub use chunk::ChunkSlice; +pub use chunk::{Chunk, ChunkSlice}; pub use offset_utf16::OffsetUtf16; pub use point::Point; pub use point_utf16::PointUtf16; pub use unclipped::Unclipped; +use crate::chunk::Bitmap; + +static LINE_SEPARATORS_REGEX: LazyLock = + LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX")); + +#[derive(Clone, Copy, Debug, PartialEq)] +pub enum LineEnding { + Unix, + Windows, +} + +impl Default for LineEnding { + fn default() -> Self { + #[cfg(unix)] + return Self::Unix; + + #[cfg(not(unix))] + return Self::Windows; + } +} + +impl LineEnding { + pub fn as_str(&self) -> &'static str { + match self { + LineEnding::Unix => "\n", + LineEnding::Windows => "\r\n", + } + } + + pub fn label(&self) -> &'static str { + match self { + LineEnding::Unix => "LF", + LineEnding::Windows => "CRLF", + } + } + + pub fn detect(text: &str) -> Self { + let mut max_ix = cmp::min(text.len(), 1000); + while !text.is_char_boundary(max_ix) { + max_ix -= 1; + } + + if let Some(ix) = text[..max_ix].find(['\n']) { + if ix > 0 && text.as_bytes()[ix - 1] == b'\r' { + Self::Windows + } else { + Self::Unix + } + } else { + Self::default() + } + } + + pub fn normalize(text: &mut String) { + if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") { + *text = replaced; + } + } + + pub fn normalize_arc(text: Arc) -> Arc { + if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") { + replaced.into() + } else { + text + } + } + + pub fn normalize_cow(text: Cow) -> Cow { + if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") { + replaced.into() + } else { + text + } + } + + /// Converts text chunks into a [`String`] using the current line ending. + pub fn into_string(&self, chunks: Chunks<'_>) -> String { + match self { + LineEnding::Unix => chunks.collect(), + LineEnding::Windows => { + let line_ending = self.as_str(); + let mut result = String::new(); + for chunk in chunks { + result.push_str(&chunk.replace('\n', line_ending)); + } + result + } + } + } +} + #[derive(Clone, Default)] pub struct Rope { chunks: SumTree, @@ -30,19 +123,45 @@ impl Rope { Self::default() } + /// Checks that `index`-th byte is the first byte in a UTF-8 code point + /// sequence or the end of the string. + /// + /// The start and end of the string (when `index == self.len()`) are + /// considered to be boundaries. + /// + /// Returns `false` if `index` is greater than `self.len()`. pub fn is_char_boundary(&self, offset: usize) -> bool { if self.chunks.is_empty() { return offset == 0; } - let mut cursor = self.chunks.cursor::(()); - cursor.seek(&offset, Bias::Left); - let chunk_offset = offset - cursor.start(); - cursor - .item() - .map(|chunk| chunk.text.is_char_boundary(chunk_offset)) + let (start, _, item) = self.chunks.find::((), &offset, Bias::Left); + let chunk_offset = offset - start; + item.map(|chunk| chunk.is_char_boundary(chunk_offset)) .unwrap_or(false) } + #[track_caller] + #[inline(always)] + pub fn assert_char_boundary(&self, offset: usize) { + if self.chunks.is_empty() && offset == 0 { + return; + } + let (start, _, item) = self.chunks.find::((), &offset, Bias::Left); + match item { + Some(chunk) => { + let chunk_offset = offset - start; + chunk.assert_char_boundary(chunk_offset); + } + None => { + panic!( + "byte index {} is out of bounds of rope (length: {})", + offset, + self.len() + ); + } + } + } + pub fn floor_char_boundary(&self, index: usize) -> usize { if index >= self.len() { self.len() @@ -53,10 +172,9 @@ impl Rope { (u8 as i8) >= -0x40 } - let mut cursor = self.chunks.cursor::(()); - cursor.seek(&index, Bias::Left); - let chunk_offset = index - cursor.start(); - let lower_idx = cursor.item().map(|chunk| { + let (start, _, item) = self.chunks.find::((), &index, Bias::Left); + let chunk_offset = index - start; + let lower_idx = item.map(|chunk| { let lower_bound = chunk_offset.saturating_sub(3); chunk .text @@ -71,7 +189,7 @@ impl Rope { }) .unwrap_or(chunk.text.len()) }); - lower_idx.map_or_else(|| self.len(), |idx| cursor.start() + idx) + lower_idx.map_or_else(|| self.len(), |idx| start + idx) } } @@ -85,10 +203,9 @@ impl Rope { (u8 as i8) >= -0x40 } - let mut cursor = self.chunks.cursor::(()); - cursor.seek(&index, Bias::Left); - let chunk_offset = index - cursor.start(); - let upper_idx = cursor.item().map(|chunk| { + let (start, _, item) = self.chunks.find::((), &index, Bias::Left); + let chunk_offset = index - start; + let upper_idx = item.map(|chunk| { let upper_bound = Ord::min(chunk_offset + 4, chunk.text.len()); chunk.text.as_bytes()[chunk_offset..upper_bound] .iter() @@ -96,7 +213,7 @@ impl Rope { .map_or(upper_bound, |pos| pos + chunk_offset) }); - upper_idx.map_or_else(|| self.len(), |idx| cursor.start() + idx) + upper_idx.map_or_else(|| self.len(), |idx| start + idx) } } @@ -345,15 +462,26 @@ impl Rope { Chunks::new(self, range, true) } + /// Formats the rope's text with the specified line ending string. + /// This replaces all `\n` characters with the provided line ending. + /// + /// The rope internally stores all line breaks as `\n` (see `Display` impl). + /// Use this method to convert to different line endings for file operations, + /// LSP communication, or other scenarios requiring specific line ending formats. + pub fn to_string_with_line_ending(&self, line_ending: LineEnding) -> String { + line_ending.into_string(self.chunks()) + } + pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 { if offset >= self.summary().len { return self.summary().len_utf16; } - let mut cursor = self.chunks.cursor::>(()); - cursor.seek(&offset, Bias::Left); - let overshoot = offset - cursor.start().0; - cursor.start().1 - + cursor.item().map_or(Default::default(), |chunk| { + let (start, _, item) = + self.chunks + .find::, _>((), &offset, Bias::Left); + let overshoot = offset - start.0; + start.1 + + item.map_or(Default::default(), |chunk| { chunk.as_slice().offset_to_offset_utf16(overshoot) }) } @@ -362,11 +490,12 @@ impl Rope { if offset >= self.summary().len_utf16 { return self.summary().len; } - let mut cursor = self.chunks.cursor::>(()); - cursor.seek(&offset, Bias::Left); - let overshoot = offset - cursor.start().0; - cursor.start().1 - + cursor.item().map_or(Default::default(), |chunk| { + let (start, _, item) = + self.chunks + .find::, _>((), &offset, Bias::Left); + let overshoot = offset - start.0; + start.1 + + item.map_or(Default::default(), |chunk| { chunk.as_slice().offset_utf16_to_offset(overshoot) }) } @@ -375,11 +504,12 @@ impl Rope { if offset >= self.summary().len { return self.summary().lines; } - let mut cursor = self.chunks.cursor::>(()); - cursor.seek(&offset, Bias::Left); - let overshoot = offset - cursor.start().0; - cursor.start().1 - + cursor.item().map_or(Point::zero(), |chunk| { + let (start, _, item) = + self.chunks + .find::, _>((), &offset, Bias::Left); + let overshoot = offset - start.0; + start.1 + + item.map_or(Point::zero(), |chunk| { chunk.as_slice().offset_to_point(overshoot) }) } @@ -388,11 +518,12 @@ impl Rope { if offset >= self.summary().len { return self.summary().lines_utf16(); } - let mut cursor = self.chunks.cursor::>(()); - cursor.seek(&offset, Bias::Left); - let overshoot = offset - cursor.start().0; - cursor.start().1 - + cursor.item().map_or(PointUtf16::zero(), |chunk| { + let (start, _, item) = + self.chunks + .find::, _>((), &offset, Bias::Left); + let overshoot = offset - start.0; + start.1 + + item.map_or(PointUtf16::zero(), |chunk| { chunk.as_slice().offset_to_point_utf16(overshoot) }) } @@ -401,12 +532,28 @@ impl Rope { if point >= self.summary().lines { return self.summary().lines_utf16(); } - let mut cursor = self.chunks.cursor::>(()); + let (start, _, item) = + self.chunks + .find::, _>((), &point, Bias::Left); + let overshoot = point - start.0; + start.1 + + item.map_or(PointUtf16::zero(), |chunk| { + chunk.as_slice().point_to_point_utf16(overshoot) + }) + } + + pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point { + if point >= self.summary().lines_utf16() { + return self.summary().lines; + } + let mut cursor = self.chunks.cursor::>(()); cursor.seek(&point, Bias::Left); let overshoot = point - cursor.start().0; cursor.start().1 - + cursor.item().map_or(PointUtf16::zero(), |chunk| { - chunk.as_slice().point_to_point_utf16(overshoot) + + cursor.item().map_or(Point::zero(), |chunk| { + chunk + .as_slice() + .offset_to_point(chunk.as_slice().point_utf16_to_offset(overshoot, false)) }) } @@ -414,19 +561,34 @@ impl Rope { if point >= self.summary().lines { return self.summary().len; } - let mut cursor = self.chunks.cursor::>(()); + let (start, _, item) = + self.chunks + .find::, _>((), &point, Bias::Left); + let overshoot = point - start.0; + start.1 + item.map_or(0, |chunk| chunk.as_slice().point_to_offset(overshoot)) + } + + pub fn point_to_offset_utf16(&self, point: Point) -> OffsetUtf16 { + if point >= self.summary().lines { + return self.summary().len_utf16; + } + let mut cursor = self.chunks.cursor::>(()); cursor.seek(&point, Bias::Left); let overshoot = point - cursor.start().0; cursor.start().1 - + cursor - .item() - .map_or(0, |chunk| chunk.as_slice().point_to_offset(overshoot)) + + cursor.item().map_or(OffsetUtf16(0), |chunk| { + chunk.as_slice().point_to_offset_utf16(overshoot) + }) } pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize { self.point_utf16_to_offset_impl(point, false) } + pub fn point_utf16_to_offset_utf16(&self, point: PointUtf16) -> OffsetUtf16 { + self.point_utf16_to_offset_utf16_impl(point, false) + } + pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped) -> usize { self.point_utf16_to_offset_impl(point.0, true) } @@ -435,12 +597,30 @@ impl Rope { if point >= self.summary().lines_utf16() { return self.summary().len; } - let mut cursor = self.chunks.cursor::>(()); + let (start, _, item) = + self.chunks + .find::, _>((), &point, Bias::Left); + let overshoot = point - start.0; + start.1 + + item.map_or(0, |chunk| { + chunk.as_slice().point_utf16_to_offset(overshoot, clip) + }) + } + + fn point_utf16_to_offset_utf16_impl(&self, point: PointUtf16, clip: bool) -> OffsetUtf16 { + if point >= self.summary().lines_utf16() { + return self.summary().len_utf16; + } + let mut cursor = self + .chunks + .cursor::>(()); cursor.seek(&point, Bias::Left); let overshoot = point - cursor.start().0; cursor.start().1 - + cursor.item().map_or(0, |chunk| { - chunk.as_slice().point_utf16_to_offset(overshoot, clip) + + cursor.item().map_or(OffsetUtf16(0), |chunk| { + chunk + .as_slice() + .offset_to_offset_utf16(chunk.as_slice().point_utf16_to_offset(overshoot, clip)) }) } @@ -448,11 +628,12 @@ impl Rope { if point.0 >= self.summary().lines_utf16() { return self.summary().lines; } - let mut cursor = self.chunks.cursor::>(()); - cursor.seek(&point.0, Bias::Left); - let overshoot = Unclipped(point.0 - cursor.start().0); - cursor.start().1 - + cursor.item().map_or(Point::zero(), |chunk| { + let (start, _, item) = + self.chunks + .find::, _>((), &point.0, Bias::Left); + let overshoot = Unclipped(point.0 - start.0); + start.1 + + item.map_or(Point::zero(), |chunk| { chunk.as_slice().unclipped_point_utf16_to_point(overshoot) }) } @@ -465,33 +646,30 @@ impl Rope { } pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 { - let mut cursor = self.chunks.cursor::(()); - cursor.seek(&offset, Bias::Right); - if let Some(chunk) = cursor.item() { - let overshoot = offset - cursor.start(); - *cursor.start() + chunk.as_slice().clip_offset_utf16(overshoot, bias) + let (start, _, item) = self.chunks.find::((), &offset, Bias::Right); + if let Some(chunk) = item { + let overshoot = offset - start; + start + chunk.as_slice().clip_offset_utf16(overshoot, bias) } else { self.summary().len_utf16 } } pub fn clip_point(&self, point: Point, bias: Bias) -> Point { - let mut cursor = self.chunks.cursor::(()); - cursor.seek(&point, Bias::Right); - if let Some(chunk) = cursor.item() { - let overshoot = point - cursor.start(); - *cursor.start() + chunk.as_slice().clip_point(overshoot, bias) + let (start, _, item) = self.chunks.find::((), &point, Bias::Right); + if let Some(chunk) = item { + let overshoot = point - start; + start + chunk.as_slice().clip_point(overshoot, bias) } else { self.summary().lines } } pub fn clip_point_utf16(&self, point: Unclipped, bias: Bias) -> PointUtf16 { - let mut cursor = self.chunks.cursor::(()); - cursor.seek(&point.0, Bias::Right); - if let Some(chunk) = cursor.item() { - let overshoot = Unclipped(point.0 - cursor.start()); - *cursor.start() + chunk.as_slice().clip_point_utf16(overshoot, bias) + let (start, _, item) = self.chunks.find::((), &point.0, Bias::Right); + if let Some(chunk) = item { + let overshoot = Unclipped(point.0 - start); + start + chunk.as_slice().clip_point_utf16(overshoot, bias) } else { self.summary().lines_utf16() } @@ -535,10 +713,16 @@ impl From<&String> for Rope { } } +/// Display implementation for Rope. +/// +/// Note: This always uses `\n` as the line separator, regardless of the original +/// file's line endings. The rope internally normalizes all line breaks to `\n`. +/// If you need to preserve original line endings (e.g., for LSP communication), +/// use `to_string_with_line_ending` instead. impl fmt::Display for Rope { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { for chunk in self.chunks() { - write!(f, "{}", chunk)?; + write!(f, "{chunk}")?; } Ok(()) } @@ -650,9 +834,9 @@ pub struct ChunkBitmaps<'a> { /// A slice of text up to 128 bytes in size pub text: &'a str, /// Bitmap of character locations in text. LSB ordered - pub chars: u128, + pub chars: Bitmap, /// Bitmap of tab locations in text. LSB ordered - pub tabs: u128, + pub tabs: Bitmap, } #[derive(Clone)] @@ -673,6 +857,12 @@ impl<'a> Chunks<'a> { chunks.seek(&range.start, Bias::Right); range.start }; + let chunk_offset = offset - chunks.start(); + if let Some(chunk) = chunks.item() + && !chunk.text.is_char_boundary(chunk_offset) + { + panic!("byte index {} is not a char boundary", offset); + } Self { chunks, range, @@ -818,39 +1008,6 @@ impl<'a> Chunks<'a> { self.offset < initial_offset && self.offset == 0 } - /// Returns bitmaps that represent character positions and tab positions - pub fn peek_with_bitmaps(&self) -> Option> { - if !self.offset_is_valid() { - return None; - } - - let chunk = self.chunks.item()?; - let chunk_start = *self.chunks.start(); - let slice_range = if self.reversed { - let slice_start = cmp::max(chunk_start, self.range.start) - chunk_start; - let slice_end = self.offset - chunk_start; - slice_start..slice_end - } else { - let slice_start = self.offset - chunk_start; - let slice_end = cmp::min(self.chunks.end(), self.range.end) - chunk_start; - slice_start..slice_end - }; - - // slice range has a bounds between 0 and 128 in non test builds - // We use a non wrapping sub because we want to overflow in the case where slice_range.end == 128 - // because that represents a full chunk and the bitmask shouldn't remove anything - let bitmask = (1u128.unbounded_shl(slice_range.end as u32)).wrapping_sub(1); - - let chars = (chunk.chars() & bitmask) >> slice_range.start; - let tabs = (chunk.tabs & bitmask) >> slice_range.start; - - Some(ChunkBitmaps { - text: &chunk.text[slice_range], - chars, - tabs, - }) - } - pub fn peek(&self) -> Option<&'a str> { if !self.offset_is_valid() { return None; @@ -871,7 +1028,8 @@ impl<'a> Chunks<'a> { Some(&chunk.text[slice_range]) } - pub fn peek_tabs(&self) -> Option> { + /// Returns bitmaps that represent character positions and tab positions + pub fn peek_with_bitmaps(&self) -> Option> { if !self.offset_is_valid() { return None; } @@ -891,7 +1049,7 @@ impl<'a> Chunks<'a> { let slice_text = &chunk.text[slice_range]; // Shift the tabs to align with our slice window - let shifted_tabs = chunk.tabs >> chunk_start_offset; + let shifted_tabs = chunk.tabs() >> chunk_start_offset; let shifted_chars = chunk.chars() >> chunk_start_offset; Some(ChunkBitmaps { @@ -2214,6 +2372,53 @@ mod tests { } } + #[test] + fn test_to_string_with_line_ending() { + // Test Unix line endings (no conversion) + let rope = Rope::from("line1\nline2\nline3"); + assert_eq!( + rope.to_string_with_line_ending(LineEnding::Unix), + "line1\nline2\nline3" + ); + + // Test Windows line endings + assert_eq!( + rope.to_string_with_line_ending(LineEnding::Windows), + "line1\r\nline2\r\nline3" + ); + + // Test empty rope + let empty_rope = Rope::from(""); + assert_eq!( + empty_rope.to_string_with_line_ending(LineEnding::Windows), + "" + ); + + // Test single line (no newlines) + let single_line = Rope::from("single line"); + assert_eq!( + single_line.to_string_with_line_ending(LineEnding::Windows), + "single line" + ); + + // Test rope ending with newline + let ending_newline = Rope::from("line1\nline2\n"); + assert_eq!( + ending_newline.to_string_with_line_ending(LineEnding::Windows), + "line1\r\nline2\r\n" + ); + + // Test large rope with multiple chunks + let mut large_rope = Rope::new(); + for i in 0..100 { + large_rope.push(&format!("line{}\n", i)); + } + let result = large_rope.to_string_with_line_ending(LineEnding::Windows); + assert!(result.contains("\r\n")); + assert!(!result.contains("\n\n")); + assert_eq!(result.matches("\r\n").count(), 100); + } + fn clip_offset(text: &str, mut offset: usize, bias: Bias) -> usize { while !text.is_char_boundary(offset) { match bias { diff --git a/crates/rpc/Cargo.toml b/crates/rpc/Cargo.toml index 81764917a7e888a766571e4114f614f7391bc000..10ebde26b6b9242ecee9ef52cdb4a00323efaf3f 100644 --- a/crates/rpc/Cargo.toml +++ b/crates/rpc/Cargo.toml @@ -36,7 +36,6 @@ strum.workspace = true tracing = { version = "0.1.34", features = ["log"] } util.workspace = true zstd.workspace = true -workspace-hack.workspace = true [dev-dependencies] collections = { workspace = true, features = ["test-support"] } diff --git a/crates/rpc/src/extension.rs b/crates/rpc/src/extension.rs index 1ee55d5ccef2e7b3aaa1d4d16e9bbad13cf11ade..1b00312bad033a542648252565e062e0209248bc 100644 --- a/crates/rpc/src/extension.rs +++ b/crates/rpc/src/extension.rs @@ -42,6 +42,7 @@ pub enum ExtensionProvides { Grammars, LanguageServers, ContextServers, + AgentServers, SlashCommands, IndexedDocsProviders, Snippets, diff --git a/crates/rpc/src/proto_client.rs b/crates/rpc/src/proto_client.rs index a90797ff5dfb44c22fa7aa61751ad3baefd2b745..d7e3ba1e461b28ac264afcc05a8ae941e6da0c32 100644 --- a/crates/rpc/src/proto_client.rs +++ b/crates/rpc/src/proto_client.rs @@ -226,6 +226,7 @@ impl AnyProtoClient { pub fn request_lsp( &self, project_id: u64, + server_id: Option, timeout: Duration, executor: BackgroundExecutor, request: T, @@ -247,6 +248,7 @@ impl AnyProtoClient { let query = proto::LspQuery { project_id, + server_id, lsp_request_id: new_id.0, request: Some(request.to_proto_query()), }; @@ -361,6 +363,9 @@ impl AnyProtoClient { Response::GetImplementationResponse(response) => { to_any_envelope(&envelope, response) } + Response::InlayHintsResponse(response) => { + to_any_envelope(&envelope, response) + } }; Some(proto::ProtoLspResponse { server_id, diff --git a/crates/rules_library/Cargo.toml b/crates/rules_library/Cargo.toml index 298f77a2d2472dc8e01bbf0355d5193ed8832ff8..d2fdd765e044181ecb16535076fd31175ddb87c9 100644 --- a/crates/rules_library/Cargo.toml +++ b/crates/rules_library/Cargo.toml @@ -30,6 +30,5 @@ theme.workspace = true title_bar.workspace = true ui.workspace = true util.workspace = true -workspace-hack.workspace = true workspace.workspace = true zed_actions.workspace = true diff --git a/crates/rules_library/src/rules_library.rs b/crates/rules_library/src/rules_library.rs index abb0b4e3a1a84cf7ecf40939b33aee19b874bcdf..207a9841e41bf35e1f63bb00b0c62073c1cf0224 100644 --- a/crates/rules_library/src/rules_library.rs +++ b/crates/rules_library/src/rules_library.rs @@ -3,9 +3,9 @@ use collections::{HashMap, HashSet}; use editor::{CompletionProvider, SelectionEffects}; use editor::{CurrentLineHighlight, Editor, EditorElement, EditorEvent, EditorStyle, actions::Tab}; use gpui::{ - Action, App, Bounds, Entity, EventEmitter, Focusable, PromptLevel, Subscription, Task, - TextStyle, TitlebarOptions, WindowBounds, WindowHandle, WindowOptions, actions, point, size, - transparent_black, + Action, App, Bounds, DEFAULT_ADDITIONAL_WINDOW_SIZE, Entity, EventEmitter, Focusable, + PromptLevel, Subscription, Task, TextStyle, TitlebarOptions, WindowBounds, WindowHandle, + WindowOptions, actions, point, size, transparent_black, }; use language::{Buffer, LanguageRegistry, language_settings::SoftWrap}; use language_model::{ @@ -129,13 +129,13 @@ pub fn open_rules_library( titlebar: Some(TitlebarOptions { title: Some("Rules Library".into()), appears_transparent: true, - traffic_light_position: Some(point(px(9.0), px(9.0))), + traffic_light_position: Some(point(px(12.0), px(12.0))), }), app_id: Some(app_id.to_owned()), window_bounds: Some(WindowBounds::Windowed(bounds)), window_background: cx.theme().window_background_appearance(), window_decorations: Some(window_decorations), - window_min_size: Some(size(px(800.), px(600.))), // 4:3 Aspect Ratio + window_min_size: Some(DEFAULT_ADDITIONAL_WINDOW_SIZE), kind: gpui::WindowKind::Floating, ..Default::default() }, @@ -369,10 +369,9 @@ impl PickerDelegate for RulePickerDelegate { .spacing(ListItemSpacing::Sparse) .toggle_state(selected) .child( - h_flex() - .h_5() - .line_height(relative(1.)) - .child(Label::new(rule.title.clone().unwrap_or("Untitled".into()))), + Label::new(rule.title.clone().unwrap_or("Untitled".into())) + .truncate() + .mr_10(), ) .end_slot::(default.then(|| { IconButton::new("toggle-default-rule", IconName::Paperclip) @@ -390,12 +389,11 @@ impl PickerDelegate for RulePickerDelegate { div() .id("built-in-rule") .child(Icon::new(IconName::FileLock).color(Color::Muted)) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { Tooltip::with_meta( "Built-in rule", None, BUILT_IN_TOOLTIP_TEXT, - window, cx, ) }) @@ -426,12 +424,11 @@ impl PickerDelegate for RulePickerDelegate { "Remove from Default Rules", )) } else { - this.tooltip(move |window, cx| { + this.tooltip(move |_window, cx| { Tooltip::with_meta( "Add to Default Rules", None, "Always included in every thread.", - window, cx, ) }) @@ -455,13 +452,15 @@ impl PickerDelegate for RulePickerDelegate { cx: &mut Context>, ) -> Div { h_flex() - .bg(cx.theme().colors().editor_background) - .rounded_sm() - .overflow_hidden() - .flex_none() .py_1() - .px_2() + .px_1p5() .mx_1() + .gap_1p5() + .rounded_sm() + .bg(cx.theme().colors().editor_background) + .border_1() + .border_color(cx.theme().colors().border) + .child(Icon::new(IconName::MagnifyingGlass).color(Color::Muted)) .child(editor.clone()) } } @@ -1098,41 +1097,99 @@ impl RulesLibrary { v_flex() .id("rule-list") .capture_action(cx.listener(Self::focus_active_rule)) - .bg(cx.theme().colors().panel_background) + .px_1p5() .h_full() - .px_1() - .w_1_3() + .w_64() .overflow_x_hidden() - .child( - h_flex() - .p(DynamicSpacing::Base04.rems(cx)) - .h_9() - .w_full() - .flex_none() - .justify_end() - .child( - IconButton::new("new-rule", IconName::Plus) - .tooltip(move |window, cx| { - Tooltip::for_action("New Rule", &NewRule, window, cx) - }) - .on_click(|_, window, cx| { - window.dispatch_action(Box::new(NewRule), cx); - }), - ), - ) + .bg(cx.theme().colors().panel_background) + .map(|this| { + if cfg!(target_os = "macos") { + this.child( + h_flex() + .p(DynamicSpacing::Base04.rems(cx)) + .h_9() + .w_full() + .flex_none() + .justify_end() + .child( + IconButton::new("new-rule", IconName::Plus) + .tooltip(move |_window, cx| { + Tooltip::for_action("New Rule", &NewRule, cx) + }) + .on_click(|_, window, cx| { + window.dispatch_action(Box::new(NewRule), cx); + }), + ), + ) + } else { + this.child( + h_flex().p_1().w_full().child( + Button::new("new-rule", "New Rule") + .full_width() + .style(ButtonStyle::Outlined) + .icon(IconName::Plus) + .icon_size(IconSize::Small) + .icon_position(IconPosition::Start) + .icon_color(Color::Muted) + .on_click(|_, window, cx| { + window.dispatch_action(Box::new(NewRule), cx); + }), + ), + ) + } + }) .child(div().flex_grow().child(self.picker.clone())) } + fn render_active_rule_editor( + &self, + editor: &Entity, + cx: &mut Context, + ) -> impl IntoElement { + let settings = ThemeSettings::get_global(cx); + + div() + .w_full() + .on_action(cx.listener(Self::move_down_from_title)) + .pl_1() + .border_1() + .border_color(transparent_black()) + .rounded_sm() + .group_hover("active-editor-header", |this| { + this.border_color(cx.theme().colors().border_variant) + }) + .child(EditorElement::new( + &editor, + EditorStyle { + background: cx.theme().system().transparent, + local_player: cx.theme().players().local(), + text: TextStyle { + color: cx.theme().colors().editor_foreground, + font_family: settings.ui_font.family.clone(), + font_features: settings.ui_font.features.clone(), + font_size: HeadlineSize::Large.rems().into(), + font_weight: settings.ui_font.weight, + line_height: relative(settings.buffer_line_height.value()), + ..Default::default() + }, + scrollbar_width: Pixels::ZERO, + syntax: cx.theme().syntax().clone(), + status: cx.theme().status().clone(), + inlay_hints_style: editor::make_inlay_hints_style(cx), + edit_prediction_styles: editor::make_suggestion_styles(cx), + ..EditorStyle::default() + }, + )) + } + fn render_active_rule(&mut self, cx: &mut Context) -> gpui::Stateful
{ div() - .w_2_3() - .h_full() .id("rule-editor") + .h_full() + .flex_grow() .border_l_1() .border_color(cx.theme().colors().border) .bg(cx.theme().colors().editor_background) - .flex_none() - .min_w_64() .children(self.active_rule_id.and_then(|prompt_id| { let rule_metadata = self.store.read(cx).metadata(prompt_id)?; let rule_editor = &self.rule_editors[&prompt_id]; @@ -1140,7 +1197,6 @@ impl RulesLibrary { let model = LanguageModelRegistry::read_global(cx) .default_model() .map(|default| default.model); - let settings = ThemeSettings::get_global(cx); Some( v_flex() @@ -1160,46 +1216,7 @@ impl RulesLibrary { .gap_2() .justify_between() .child( - div() - .w_full() - .on_action(cx.listener(Self::move_down_from_title)) - .pl_1() - .border_1() - .border_color(transparent_black()) - .rounded_sm() - .group_hover("active-editor-header", |this| { - this.border_color(cx.theme().colors().border_variant) - }) - .child(EditorElement::new( - &rule_editor.title_editor, - EditorStyle { - background: cx.theme().system().transparent, - local_player: cx.theme().players().local(), - text: TextStyle { - color: cx.theme().colors().editor_foreground, - font_family: settings.ui_font.family.clone(), - font_features: settings - .ui_font - .features - .clone(), - font_size: HeadlineSize::Large.rems().into(), - font_weight: settings.ui_font.weight, - line_height: relative( - settings.buffer_line_height.value(), - ), - ..Default::default() - }, - scrollbar_width: Pixels::ZERO, - syntax: cx.theme().syntax().clone(), - status: cx.theme().status().clone(), - inlay_hints_style: editor::make_inlay_hints_style( - cx, - ), - edit_prediction_styles: - editor::make_suggestion_styles(cx), - ..EditorStyle::default() - }, - )), + self.render_active_rule_editor(&rule_editor.title_editor, cx), ) .child( h_flex() @@ -1215,7 +1232,7 @@ impl RulesLibrary { .id("token_count") .mr_1() .flex_shrink_0() - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { Tooltip::with_meta( "Token Estimation", None, @@ -1226,7 +1243,6 @@ impl RulesLibrary { .map(|model| model.name().0) .unwrap_or_default() ), - window, cx, ) }) @@ -1245,23 +1261,21 @@ impl RulesLibrary { Icon::new(IconName::FileLock) .color(Color::Muted), ) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { Tooltip::with_meta( "Built-in rule", None, BUILT_IN_TOOLTIP_TEXT, - window, cx, ) }) .into_any() } else { IconButton::new("delete-rule", IconName::Trash) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { Tooltip::for_action( "Delete Rule", &DeleteRule, - window, cx, ) }) @@ -1273,11 +1287,10 @@ impl RulesLibrary { }) .child( IconButton::new("duplicate-rule", IconName::BookCopy) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { Tooltip::for_action( "Duplicate Rule", &DuplicateRule, - window, cx, ) }) @@ -1305,12 +1318,11 @@ impl RulesLibrary { "Remove from Default Rules", )) } else { - this.tooltip(move |window, cx| { + this.tooltip(move |_window, cx| { Tooltip::with_meta( "Add to Default Rules", None, "Always included in every thread.", - window, cx, ) }) @@ -1355,9 +1367,8 @@ impl Render for RulesLibrary { client_side_decorations( v_flex() - .bg(theme.colors().background) .id("rules-library") - .key_context("PromptLibrary") + .key_context("RulesLibrary") .on_action(cx.listener(|this, &NewRule, window, cx| this.new_rule(window, cx))) .on_action( cx.listener(|this, &DeleteRule, window, cx| { @@ -1375,60 +1386,33 @@ impl Render for RulesLibrary { .font(ui_font) .text_color(theme.colors().text) .children(self.title_bar.clone()) + .bg(theme.colors().background) .child( h_flex() .flex_1() + .when(!cfg!(target_os = "macos"), |this| { + this.border_t_1().border_color(cx.theme().colors().border) + }) .child(self.render_rule_list(cx)) .map(|el| { if self.store.read(cx).prompt_count() == 0 { el.child( v_flex() - .w_2_3() .h_full() + .flex_1() .items_center() .justify_center() - .gap_4() + .border_l_1() + .border_color(cx.theme().colors().border) .bg(cx.theme().colors().editor_background) .child( - h_flex() - .gap_2() - .child( - Icon::new(IconName::Book) - .size(IconSize::Medium) - .color(Color::Muted), - ) - .child( - Label::new("No rules yet") - .size(LabelSize::Large) - .color(Color::Muted), - ), - ) - .child( - h_flex() - .child(h_flex()) - .child( - v_flex() - .gap_1() - .child(Label::new( - "Create your first rule:", - )) - .child( - Button::new("create-rule", "New Rule") - .full_width() - .key_binding( - KeyBinding::for_action( - &NewRule, window, cx, - ), - ) - .on_click(|_, window, cx| { - window.dispatch_action( - NewRule.boxed_clone(), - cx, - ) - }), - ), - ) - .child(h_flex()), + Button::new("create-rule", "New Rule") + .style(ButtonStyle::Outlined) + .key_binding(KeyBinding::for_action(&NewRule, cx)) + .on_click(|_, window, cx| { + window + .dispatch_action(NewRule.boxed_clone(), cx) + }), ), ) } else { diff --git a/crates/scheduler/Cargo.toml b/crates/scheduler/Cargo.toml index 44436b34d490b94588af54b79abfbf3d60974a93..bbab41dcdb04bad70f390aac3625dcf73e68baa6 100644 --- a/crates/scheduler/Cargo.toml +++ b/crates/scheduler/Cargo.toml @@ -22,4 +22,3 @@ chrono.workspace = true futures.workspace = true parking_lot.workspace = true rand.workspace = true -workspace-hack.workspace = true diff --git a/crates/schema_generator/Cargo.toml b/crates/schema_generator/Cargo.toml index 09fe20adc3b0056ff6b8b269d2918445a684fd37..865f76f4af917606af5d61d173950493fdde07c7 100644 --- a/crates/schema_generator/Cargo.toml +++ b/crates/schema_generator/Cargo.toml @@ -16,4 +16,3 @@ schemars = { workspace = true, features = ["indexmap2"] } serde.workspace = true serde_json.workspace = true theme.workspace = true -workspace-hack.workspace = true diff --git a/crates/search/Cargo.toml b/crates/search/Cargo.toml index 613f229d4d0ed0c0097a64ef4ead331331860ef8..7d8efbb11a5f1461da5b63152e2277a38ad272b4 100644 --- a/crates/search/Cargo.toml +++ b/crates/search/Cargo.toml @@ -39,13 +39,15 @@ smol.workspace = true theme.workspace = true ui.workspace = true util.workspace = true +util_macros.workspace = true workspace.workspace = true zed_actions.workspace = true -workspace-hack.workspace = true [dev-dependencies] client = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } +language = { workspace = true, features = ["test-support"] } +lsp.workspace = true unindent.workspace = true workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 84be844532931eaf8c0ce5ced5bfc56c14a62dd0..49c1fc5b297aedcf86c66140d0d803901b18c52a 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -266,12 +266,11 @@ impl Render for BufferSearchBar { .toggle_state(self.selection_search_enabled.is_some()) .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Toggle Search Selection", &ToggleSelection, &focus_handle, - window, cx, ) } @@ -1524,6 +1523,7 @@ mod tests { use settings::{SearchSettingsContent, SettingsStore}; use smol::stream::StreamExt as _; use unindent::Unindent as _; + use util_macros::perf; fn init_globals(cx: &mut TestAppContext) { cx.update(|cx| { @@ -1580,6 +1580,7 @@ mod tests { (editor.unwrap(), search_bar, cx) } + #[perf] #[gpui::test] async fn test_search_simple(cx: &mut TestAppContext) { let (editor, search_bar, cx) = init_test(cx); @@ -1860,6 +1861,7 @@ mod tests { .collect::>() } + #[perf] #[gpui::test] async fn test_search_option_handling(cx: &mut TestAppContext) { let (editor, search_bar, cx) = init_test(cx); @@ -1920,6 +1922,7 @@ mod tests { }); } + #[perf] #[gpui::test] async fn test_search_select_all_matches(cx: &mut TestAppContext) { init_globals(cx); @@ -2128,6 +2131,7 @@ mod tests { .unwrap(); } + #[perf] #[gpui::test] async fn test_search_query_with_match_whole_word(cx: &mut TestAppContext) { init_globals(cx); @@ -2213,6 +2217,7 @@ mod tests { }); } + #[perf] #[gpui::test] async fn test_search_query_history(cx: &mut TestAppContext) { let (_editor, search_bar, cx) = init_test(cx); @@ -2362,6 +2367,7 @@ mod tests { }); } + #[perf] #[gpui::test] async fn test_replace_simple(cx: &mut TestAppContext) { let (editor, search_bar, cx) = init_test(cx); @@ -2529,6 +2535,7 @@ mod tests { ); } + #[perf] #[gpui::test] async fn test_replace_special_characters(cx: &mut TestAppContext) { let (editor, search_bar, cx) = init_test(cx); @@ -2592,6 +2599,7 @@ mod tests { .await; } + #[perf] #[gpui::test] async fn test_find_matches_in_selections_singleton_buffer_multiple_selections( cx: &mut TestAppContext, @@ -2658,6 +2666,7 @@ mod tests { }); } + #[perf] #[gpui::test] async fn test_find_matches_in_selections_multiple_excerpts_buffer_multiple_selections( cx: &mut TestAppContext, @@ -2744,6 +2753,7 @@ mod tests { }); } + #[perf] #[gpui::test] async fn test_invalid_regexp_search_after_valid(cx: &mut TestAppContext) { let (editor, search_bar, cx) = init_test(cx); @@ -2779,6 +2789,7 @@ mod tests { }); } + #[perf] #[gpui::test] async fn test_search_options_changes(cx: &mut TestAppContext) { let (_editor, search_bar, cx) = init_test(cx); diff --git a/crates/search/src/buffer_search/registrar.rs b/crates/search/src/buffer_search/registrar.rs index 0e227cbb7c3a465892a2eed867a001aa48b80ff9..2c640e67cee98fb5e56eda1484ecf6e2fab41976 100644 --- a/crates/search/src/buffer_search/registrar.rs +++ b/crates/search/src/buffer_search/registrar.rs @@ -62,7 +62,7 @@ impl SearchActionsRegistrar for DivRegistrar<'_, '_, T> { impl SearchActionsRegistrar for Workspace { fn register_handler(&mut self, callback: impl ActionExecutor) { self.register_action(move |workspace, action: &A, window, cx| { - if workspace.has_active_modal(window, cx) { + if workspace.has_active_modal(window, cx) && !workspace.hide_modal(window, cx) { cx.propagate(); return; } diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 9014f1764d7a6097908624e0d93bc6174e998445..c5fa7ecd0461c70c924fbbfc02b5090456143fa1 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -8,7 +8,8 @@ use crate::{ use anyhow::Context as _; use collections::HashMap; use editor::{ - Anchor, Editor, EditorEvent, EditorSettings, MAX_TAB_TITLE_LEN, MultiBuffer, SelectionEffects, + Anchor, Editor, EditorEvent, EditorSettings, MAX_TAB_TITLE_LEN, MultiBuffer, PathKey, + SelectionEffects, actions::{Backtab, SelectAll, Tab}, items::active_match_index, multibuffer_context_lines, @@ -153,7 +154,7 @@ pub fn init(cx: &mut App) { // Both on present and dismissed search, we need to unconditionally handle those actions to focus from the editor. workspace.register_action(move |workspace, action: &DeploySearch, window, cx| { - if workspace.has_active_modal(window, cx) { + if workspace.has_active_modal(window, cx) && !workspace.hide_modal(window, cx) { cx.propagate(); return; } @@ -161,7 +162,7 @@ pub fn init(cx: &mut App) { cx.notify(); }); workspace.register_action(move |workspace, action: &NewSearch, window, cx| { - if workspace.has_active_modal(window, cx) { + if workspace.has_active_modal(window, cx) && !workspace.hide_modal(window, cx) { cx.propagate(); return; } @@ -340,6 +341,7 @@ impl ProjectSearch { .into_iter() .map(|(buffer, ranges)| { excerpts.set_anchored_excerpts_for_path( + PathKey::for_buffer(&buffer, cx), buffer, ranges, multibuffer_context_lines(cx), @@ -389,7 +391,7 @@ pub enum ViewEvent { impl EventEmitter for ProjectSearchView {} impl Render for ProjectSearchView { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { if self.has_matches() { div() .flex_1() @@ -424,7 +426,7 @@ impl Render for ProjectSearchView { None } } else { - Some(self.landing_text_minor(window, cx).into_any_element()) + Some(self.landing_text_minor(cx).into_any_element()) }; let page_content = page_content.map(|text| div().child(text)); @@ -1442,7 +1444,7 @@ impl ProjectSearchView { self.active_match_index.is_some() } - fn landing_text_minor(&self, window: &mut Window, cx: &App) -> impl IntoElement { + fn landing_text_minor(&self, cx: &App) -> impl IntoElement { let focus_handle = self.focus_handle.clone(); v_flex() .gap_1() @@ -1456,12 +1458,7 @@ impl ProjectSearchView { .icon(IconName::Filter) .icon_position(IconPosition::Start) .icon_size(IconSize::Small) - .key_binding(KeyBinding::for_action_in( - &ToggleFilters, - &focus_handle, - window, - cx, - )) + .key_binding(KeyBinding::for_action_in(&ToggleFilters, &focus_handle, cx)) .on_click(|_event, window, cx| { window.dispatch_action(ToggleFilters.boxed_clone(), cx) }), @@ -1471,12 +1468,7 @@ impl ProjectSearchView { .icon(IconName::Replace) .icon_position(IconPosition::Start) .icon_size(IconSize::Small) - .key_binding(KeyBinding::for_action_in( - &ToggleReplace, - &focus_handle, - window, - cx, - )) + .key_binding(KeyBinding::for_action_in(&ToggleReplace, &focus_handle, cx)) .on_click(|_event, window, cx| { window.dispatch_action(ToggleReplace.boxed_clone(), cx) }), @@ -1486,12 +1478,7 @@ impl ProjectSearchView { .icon(IconName::Regex) .icon_position(IconPosition::Start) .icon_size(IconSize::Small) - .key_binding(KeyBinding::for_action_in( - &ToggleRegex, - &focus_handle, - window, - cx, - )) + .key_binding(KeyBinding::for_action_in(&ToggleRegex, &focus_handle, cx)) .on_click(|_event, window, cx| { window.dispatch_action(ToggleRegex.boxed_clone(), cx) }), @@ -1504,7 +1491,6 @@ impl ProjectSearchView { .key_binding(KeyBinding::for_action_in( &ToggleCaseSensitive, &focus_handle, - window, cx, )) .on_click(|_event, window, cx| { @@ -1519,7 +1505,6 @@ impl ProjectSearchView { .key_binding(KeyBinding::for_action_in( &ToggleWholeWord, &focus_handle, - window, cx, )) .on_click(|_event, window, cx| { @@ -2045,8 +2030,8 @@ impl Render for ProjectSearchBar { .child( IconButton::new("project-search-filter-button", IconName::Filter) .shape(IconButtonShape::Square) - .tooltip(|window, cx| { - Tooltip::for_action("Toggle Filters", &ToggleFilters, window, cx) + .tooltip(|_window, cx| { + Tooltip::for_action("Toggle Filters", &ToggleFilters, cx) }) .on_click(cx.listener(|this, _, window, cx| { this.toggle_filters(window, cx); @@ -2059,12 +2044,11 @@ impl Render for ProjectSearchBar { ) .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_window, cx| { Tooltip::for_action_in( "Toggle Filters", &ToggleFilters, &focus_handle, - window, cx, ) } @@ -2281,7 +2265,7 @@ fn register_workspace_action( callback: fn(&mut ProjectSearchBar, &A, &mut Window, &mut Context), ) { workspace.register_action(move |workspace, action: &A, window, cx| { - if workspace.has_active_modal(window, cx) { + if workspace.has_active_modal(window, cx) && !workspace.hide_modal(window, cx) { cx.propagate(); return; } @@ -2308,7 +2292,7 @@ fn register_workspace_action_for_present_search( callback: fn(&mut Workspace, &A, &mut Window, &mut Context), ) { workspace.register_action(move |workspace, action: &A, window, cx| { - if workspace.has_active_modal(window, cx) { + if workspace.has_active_modal(window, cx) && !workspace.hide_modal(window, cx) { cx.propagate(); return; } @@ -2353,12 +2337,15 @@ pub mod tests { use super::*; use editor::{DisplayPoint, display_map::DisplayRow}; use gpui::{Action, TestAppContext, VisualTestContext, WindowHandle}; + use language::{FakeLspAdapter, rust_lang}; use project::FakeFs; use serde_json::json; - use settings::SettingsStore; + use settings::{InlayHintSettingsContent, SettingsStore}; use util::{path, paths::PathStyle, rel_path::rel_path}; + use util_macros::perf; use workspace::DeploySearch; + #[perf] #[gpui::test] async fn test_project_search(cx: &mut TestAppContext) { init_test(cx); @@ -2496,6 +2483,7 @@ pub mod tests { .unwrap(); } + #[perf] #[gpui::test] async fn test_deploy_project_search_focus(cx: &mut TestAppContext) { init_test(cx); @@ -2736,6 +2724,7 @@ pub mod tests { }).unwrap(); } + #[perf] #[gpui::test] async fn test_filters_consider_toggle_state(cx: &mut TestAppContext) { init_test(cx); @@ -2856,6 +2845,7 @@ pub mod tests { .unwrap(); } + #[perf] #[gpui::test] async fn test_new_project_search_focus(cx: &mut TestAppContext) { init_test(cx); @@ -3151,6 +3141,7 @@ pub mod tests { });}).unwrap(); } + #[perf] #[gpui::test] async fn test_new_project_search_in_directory(cx: &mut TestAppContext) { init_test(cx); @@ -3277,6 +3268,7 @@ pub mod tests { .unwrap(); } + #[perf] #[gpui::test] async fn test_search_query_history(cx: &mut TestAppContext) { init_test(cx); @@ -3607,6 +3599,7 @@ pub mod tests { .unwrap(); } + #[perf] #[gpui::test] async fn test_search_query_history_with_multiple_views(cx: &mut TestAppContext) { init_test(cx); @@ -3830,6 +3823,7 @@ pub mod tests { assert_eq!(active_query(&search_view_1, cx), ""); } + #[perf] #[gpui::test] async fn test_deploy_search_with_multiple_panes(cx: &mut TestAppContext) { init_test(cx); @@ -3989,6 +3983,7 @@ pub mod tests { .unwrap(); } + #[perf] #[gpui::test] async fn test_scroll_search_results_to_top(cx: &mut TestAppContext) { init_test(cx); @@ -4069,6 +4064,7 @@ pub mod tests { .expect("unable to update search view"); } + #[perf] #[gpui::test] async fn test_buffer_search_query_reused(cx: &mut TestAppContext) { init_test(cx); @@ -4148,6 +4144,162 @@ pub mod tests { }); } + #[gpui::test] + async fn test_search_dismisses_modal(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/dir"), + json!({ + "one.rs": "const ONE: usize = 1;", + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); + + struct EmptyModalView { + focus_handle: gpui::FocusHandle, + } + impl EventEmitter for EmptyModalView {} + impl Render for EmptyModalView { + fn render(&mut self, _: &mut Window, _: &mut Context<'_, Self>) -> impl IntoElement { + div() + } + } + impl Focusable for EmptyModalView { + fn focus_handle(&self, _cx: &App) -> gpui::FocusHandle { + self.focus_handle.clone() + } + } + impl workspace::ModalView for EmptyModalView {} + + window + .update(cx, |workspace, window, cx| { + workspace.toggle_modal(window, cx, |_, cx| EmptyModalView { + focus_handle: cx.focus_handle(), + }); + assert!(workspace.has_active_modal(window, cx)); + }) + .unwrap(); + + cx.dispatch_action(window.into(), Deploy::find()); + + window + .update(cx, |workspace, window, cx| { + assert!(!workspace.has_active_modal(window, cx)); + workspace.toggle_modal(window, cx, |_, cx| EmptyModalView { + focus_handle: cx.focus_handle(), + }); + assert!(workspace.has_active_modal(window, cx)); + }) + .unwrap(); + + cx.dispatch_action(window.into(), DeploySearch::find()); + + window + .update(cx, |workspace, window, cx| { + assert!(!workspace.has_active_modal(window, cx)); + }) + .unwrap(); + } + + #[perf] + #[gpui::test] + async fn test_search_with_inlays(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.project.all_languages.defaults.inlay_hints = + Some(InlayHintSettingsContent { + enabled: Some(true), + ..InlayHintSettingsContent::default() + }) + }); + }); + }); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/dir"), + // `\n` , a trailing line on the end, is important for the test case + json!({ + "main.rs": "fn main() { let a = 2; }\n", + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + let language = rust_lang(); + language_registry.add(language); + let mut fake_servers = language_registry.register_fake_lsp( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..lsp::ServerCapabilities::default() + }, + initializer: Some(Box::new(|fake_server| { + fake_server.set_request_handler::( + move |_, _| async move { + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(0, 17), + label: lsp::InlayHintLabel::String(": i32".to_owned()), + kind: Some(lsp::InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + }, + ); + })), + ..FakeLspAdapter::default() + }, + ); + + let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let workspace = window.root(cx).unwrap(); + let search = cx.new(|cx| ProjectSearch::new(project.clone(), cx)); + let search_view = cx.add_window(|window, cx| { + ProjectSearchView::new(workspace.downgrade(), search.clone(), window, cx, None) + }); + + perform_search(search_view, "let ", cx); + let _fake_server = fake_servers.next().await.unwrap(); + cx.executor().advance_clock(Duration::from_secs(1)); + cx.executor().run_until_parked(); + search_view + .update(cx, |search_view, _, cx| { + assert_eq!( + search_view + .results_editor + .update(cx, |editor, cx| editor.display_text(cx)), + "\n\nfn main() { let a: i32 = 2; }\n" + ); + }) + .unwrap(); + + // Can do the 2nd search without any panics + perform_search(search_view, "let ", cx); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + search_view + .update(cx, |search_view, _, cx| { + assert_eq!( + search_view + .results_editor + .update(cx, |editor, cx| editor.display_text(cx)), + "\n\nfn main() { let a: i32 = 2; }\n" + ); + }) + .unwrap(); + } + fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let settings = SettingsStore::test(cx); diff --git a/crates/search/src/search.rs b/crates/search/src/search.rs index 147ffcbbfb1956a4e258b7242729d366f4c2d1be..6663f8c3184aba9fedbcd5faa3d80d5889181074 100644 --- a/crates/search/src/search.rs +++ b/crates/search/src/search.rs @@ -158,9 +158,7 @@ impl SearchOption { .style(ButtonStyle::Subtle) .shape(IconButtonShape::Square) .toggle_state(active.contains(self.as_options())) - .tooltip({ - move |window, cx| Tooltip::for_action_in(label, action, &focus_handle, window, cx) - }) + .tooltip(move |_window, cx| Tooltip::for_action_in(label, action, &focus_handle, cx)) } } diff --git a/crates/search/src/search_bar.rs b/crates/search/src/search_bar.rs index 631b96b69f3b9aedd4ed299953edf6e63665ba99..14a5fefcf7341694260da96a8f2c43d149356074 100644 --- a/crates/search/src/search_bar.rs +++ b/crates/search/src/search_bar.rs @@ -32,7 +32,7 @@ pub(super) fn render_action_button( window.dispatch_action(action.boxed_clone(), cx) } }) - .tooltip(move |window, cx| Tooltip::for_action_in(tooltip, action, &focus_handle, window, cx)) + .tooltip(move |_window, cx| Tooltip::for_action_in(tooltip, action, &focus_handle, cx)) .when_some(button_state, |this, state| match state { ActionButtonState::Toggled => this.toggle_state(true), ActionButtonState::Disabled => this.disabled(true), diff --git a/crates/search/src/search_status_button.rs b/crates/search/src/search_status_button.rs index fcf36e86fa84a96117fa9b1f257d422d0bc50978..712a322c1094f28ea601d6d170e7be1e395e25f7 100644 --- a/crates/search/src/search_status_button.rs +++ b/crates/search/src/search_status_button.rs @@ -18,19 +18,14 @@ impl Render for SearchButton { let button = div(); if !EditorSettings::get_global(cx).search.button { - return button.w_0().invisible(); + return button.hidden(); } button.child( IconButton::new("project-search-indicator", SEARCH_ICON) .icon_size(IconSize::Small) - .tooltip(|window, cx| { - Tooltip::for_action( - "Project Search", - &workspace::DeploySearch::default(), - window, - cx, - ) + .tooltip(|_window, cx| { + Tooltip::for_action("Project Search", &workspace::DeploySearch::default(), cx) }) .on_click(cx.listener(|_this, _, window, cx| { window.dispatch_action(Box::new(workspace::DeploySearch::default()), cx); diff --git a/crates/semantic_version/Cargo.toml b/crates/semantic_version/Cargo.toml index b0ecef94d71026a0b596baafc7460b96ade7d4c0..a8bd3ab5ccba24700cc8de9607f825d022967b0b 100644 --- a/crates/semantic_version/Cargo.toml +++ b/crates/semantic_version/Cargo.toml @@ -1,8 +1,8 @@ [package] -name = "zed-semantic-version" +name = "semantic_version" version = "0.1.0" edition.workspace = true -publish = true +publish = false license = "Apache-2.0" description = "A library for working with semantic versioning in gpui and Zed" @@ -15,4 +15,3 @@ path = "src/semantic_version.rs" [dependencies] anyhow.workspace = true serde.workspace = true -workspace-hack.workspace = true diff --git a/crates/session/Cargo.toml b/crates/session/Cargo.toml index a0b9c5f2200f40f5c5583b9d23739551f79218f1..15c3acb8f08e3dce0a4c8a0698d0dd383d79cdd9 100644 --- a/crates/session/Cargo.toml +++ b/crates/session/Cargo.toml @@ -23,4 +23,3 @@ gpui.workspace = true uuid.workspace = true util.workspace = true serde_json.workspace = true -workspace-hack.workspace = true diff --git a/crates/settings/Cargo.toml b/crates/settings/Cargo.toml index cc2a39ce230ea0a8e8f9774cc2d8ee33d4e13037..c4b6bb878a4a1d960c3774fc393d138b530aa7ca 100644 --- a/crates/settings/Cargo.toml +++ b/crates/settings/Cargo.toml @@ -41,7 +41,6 @@ strum.workspace = true tree-sitter-json.workspace = true tree-sitter.workspace = true util.workspace = true -workspace-hack.workspace = true zlog.workspace = true [dev-dependencies] diff --git a/crates/settings/src/base_keymap_setting.rs b/crates/settings/src/base_keymap_setting.rs index b2b19864256704fe1a8e1eb929743d37b7ba4407..4915bdd85319e4abaf3ea575d387a39cc14f302d 100644 --- a/crates/settings/src/base_keymap_setting.rs +++ b/crates/settings/src/base_keymap_setting.rs @@ -1,12 +1,9 @@ use std::fmt::{Display, Formatter}; -use crate::{ - self as settings, - settings_content::{BaseKeymapContent, SettingsContent}, -}; +use crate::{self as settings, settings_content::BaseKeymapContent}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, VsCodeSettings}; +use settings::Settings; /// Base key bindings scheme. Base keymaps can be overridden with user keymaps. /// @@ -56,7 +53,7 @@ impl Into for BaseKeymap { impl Display for BaseKeymap { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { - BaseKeymap::VSCode => write!(f, "VSCode"), + BaseKeymap::VSCode => write!(f, "VS Code"), BaseKeymap::JetBrains => write!(f, "JetBrains"), BaseKeymap::SublimeText => write!(f, "Sublime Text"), BaseKeymap::Atom => write!(f, "Atom"), @@ -71,7 +68,7 @@ impl Display for BaseKeymap { impl BaseKeymap { #[cfg(target_os = "macos")] pub const OPTIONS: [(&'static str, Self); 7] = [ - ("VSCode (Default)", Self::VSCode), + ("VS Code (Default)", Self::VSCode), ("Atom", Self::Atom), ("JetBrains", Self::JetBrains), ("Sublime Text", Self::SublimeText), @@ -82,7 +79,7 @@ impl BaseKeymap { #[cfg(not(target_os = "macos"))] pub const OPTIONS: [(&'static str, Self); 6] = [ - ("VSCode (Default)", Self::VSCode), + ("VS Code (Default)", Self::VSCode), ("Atom", Self::Atom), ("JetBrains", Self::JetBrains), ("Sublime Text", Self::SublimeText), @@ -133,8 +130,4 @@ impl Settings for BaseKeymap { fn from_settings(s: &crate::settings_content::SettingsContent) -> Self { s.base_keymap.unwrap().into() } - - fn import_from_vscode(_vscode: &VsCodeSettings, current: &mut SettingsContent) { - current.base_keymap = Some(BaseKeymapContent::VSCode); - } } diff --git a/crates/settings/src/keymap_file.rs b/crates/settings/src/keymap_file.rs index cab35c1ad2a69fabf95ac1772d84c108ad1b93b5..b69b498a6c5596d8bbc78799e8edbe0befc9c35e 100644 --- a/crates/settings/src/keymap_file.rs +++ b/crates/settings/src/keymap_file.rs @@ -360,11 +360,10 @@ impl KeymapFile { } } - fn build_keymap_action( + pub fn parse_action( action: &KeymapAction, - cx: &App, - ) -> std::result::Result<(Box, Option), String> { - let (build_result, action_input_string) = match &action.0 { + ) -> Result)>, String> { + let name_and_input = match &action.0 { Value::Array(items) => { if items.len() != 2 { return Err(format!( @@ -380,22 +379,10 @@ impl KeymapFile { MarkdownInlineCode(&action.0.to_string()) )); }; - let action_input = items[1].clone(); - if name.as_str() == ActionSequence::name_for_type() { - (ActionSequence::build_sequence(action_input, cx), None) - } else { - let action_input_string = action_input.to_string(); - ( - cx.build_action(name, Some(action_input)), - Some(action_input_string), - ) - } + Some((name, Some(&items[1]))) } - Value::String(name) if name.as_str() == ActionSequence::name_for_type() => { - (Err(ActionSequence::expected_array_error()), None) - } - Value::String(name) => (cx.build_action(name, None), None), - Value::Null => (Ok(NoAction.boxed_clone()), None), + Value::String(name) => Some((name, None)), + Value::Null => None, _ => { return Err(format!( "expected two-element array of `[name, input]`. \ @@ -404,6 +391,33 @@ impl KeymapFile { )); } }; + Ok(name_and_input) + } + + fn build_keymap_action( + action: &KeymapAction, + cx: &App, + ) -> std::result::Result<(Box, Option), String> { + let (build_result, action_input_string) = match Self::parse_action(action)? { + Some((name, action_input)) if name.as_str() == ActionSequence::name_for_type() => { + match action_input { + Some(action_input) => ( + ActionSequence::build_sequence(action_input.clone(), cx), + None, + ), + None => (Err(ActionSequence::expected_array_error()), None), + } + } + Some((name, Some(action_input))) => { + let action_input_string = action_input.to_string(); + ( + cx.build_action(name, Some(action_input.clone())), + Some(action_input_string), + ) + } + Some((name, None)) => (cx.build_action(name, None), None), + None => (Ok(NoAction.boxed_clone()), None), + }; let action = match build_result { Ok(action) => action, diff --git a/crates/settings/src/serde_helper.rs b/crates/settings/src/serde_helper.rs new file mode 100644 index 0000000000000000000000000000000000000000..1c1826abd4a66dcfdb51652a331218b769aa0881 --- /dev/null +++ b/crates/settings/src/serde_helper.rs @@ -0,0 +1,135 @@ +use serde::Serializer; + +/// Serializes an f32 value with 2 decimal places of precision. +/// +/// This function rounds the value to 2 decimal places and formats it as a string, +/// then parses it back to f64 before serialization. This ensures clean JSON output +/// without IEEE 754 floating-point artifacts. +/// +/// # Arguments +/// +/// * `value` - The f32 value to serialize +/// * `serializer` - The serde serializer to use +/// +/// # Returns +/// +/// Result of the serialization operation +/// +/// # Usage +/// +/// This function can be used with Serde's `serialize_with` attribute: +/// ``` +/// use serde::Serialize; +/// use settings::serialize_f32_with_two_decimal_places; +/// +/// #[derive(Serialize)] +/// struct ExampleStruct(#[serde(serialize_with = "serialize_f32_with_two_decimal_places")] f32); +/// ``` +pub fn serialize_f32_with_two_decimal_places( + value: &f32, + serializer: S, +) -> Result +where + S: Serializer, +{ + let rounded = (value * 100.0).round() / 100.0; + let formatted = format!("{:.2}", rounded); + let clean_value: f64 = formatted.parse().unwrap_or(rounded as f64); + serializer.serialize_f64(clean_value) +} + +/// Serializes an optional f32 value with 2 decimal places of precision. +/// +/// This function handles `Option` types, serializing `Some` values with 2 decimal +/// places of precision and `None` values as null. For `Some` values, it rounds to 2 decimal +/// places and formats as a string, then parses back to f64 before serialization. This ensures +/// clean JSON output without IEEE 754 floating-point artifacts. +/// +/// # Arguments +/// +/// * `value` - The optional f32 value to serialize +/// * `serializer` - The serde serializer to use +/// +/// # Returns +/// +/// Result of the serialization operation +/// +/// # Behavior +/// +/// * `Some(v)` - Serializes the value rounded to 2 decimal places +/// * `None` - Serializes as JSON null +/// +/// # Usage +/// +/// This function can be used with Serde's `serialize_with` attribute: +/// ``` +/// use serde::Serialize; +/// use settings::serialize_optional_f32_with_two_decimal_places; +/// +/// #[derive(Serialize)] +/// struct ExampleStruct { +/// #[serde(serialize_with = "serialize_optional_f32_with_two_decimal_places")] +/// optional_value: Option, +/// } +/// ``` +pub fn serialize_optional_f32_with_two_decimal_places( + value: &Option, + serializer: S, +) -> Result +where + S: Serializer, +{ + match value { + Some(v) => { + let rounded = (v * 100.0).round() / 100.0; + let formatted = format!("{:.2}", rounded); + let clean_value: f64 = formatted.parse().unwrap_or(rounded as f64); + serializer.serialize_some(&clean_value) + } + None => serializer.serialize_none(), + } +} + +#[cfg(test)] +mod tests { + use super::*; + use serde::{Deserialize, Serialize}; + + #[derive(Serialize, Deserialize)] + struct TestOptional { + #[serde(serialize_with = "serialize_optional_f32_with_two_decimal_places")] + value: Option, + } + + #[derive(Serialize, Deserialize)] + struct TestNonOptional { + #[serde(serialize_with = "serialize_f32_with_two_decimal_places")] + value: f32, + } + + #[test] + fn test_serialize_optional_f32_with_two_decimal_places() { + let cases = [ + (Some(123.456789), r#"{"value":123.46}"#), + (Some(1.2), r#"{"value":1.2}"#), + (Some(300.00000), r#"{"value":300.0}"#), + ]; + for (value, expected) in cases { + let value = TestOptional { value }; + assert_eq!(serde_json::to_string(&value).unwrap(), expected); + } + } + + #[test] + fn test_serialize_f32_with_two_decimal_places() { + let cases = [ + (123.456789, r#"{"value":123.46}"#), + (1.200, r#"{"value":1.2}"#), + (300.00000, r#"{"value":300.0}"#), + ]; + for (value, expected) in cases { + let value = TestNonOptional { value }; + assert_eq!(serde_json::to_string(&value).unwrap(), expected); + } + } +} diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index 6fe078301abab974ba202660319966e7df42027a..5dad953b32afcd027c0b6c4ec4be36f9659ce022 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -2,6 +2,7 @@ mod base_keymap_setting; mod editable_setting_control; mod keymap_file; pub mod merge_from; +mod serde_helper; mod settings_content; mod settings_file; mod settings_json; @@ -21,6 +22,7 @@ pub use keymap_file::{ KeyBindingValidator, KeyBindingValidatorRegistration, KeybindSource, KeybindUpdateOperation, KeybindUpdateTarget, KeymapFile, KeymapFileLoadResult, }; +pub use serde_helper::*; pub use settings_file::*; pub use settings_json::*; pub use settings_store::{ diff --git a/crates/settings/src/settings_content.rs b/crates/settings/src/settings_content.rs index 3599ac4110360c62071ea40bd2c73935fc5116ec..42b88bd3654159ca3ad55dfecffbe3d4e2b547d0 100644 --- a/crates/settings/src/settings_content.rs +++ b/crates/settings/src/settings_content.rs @@ -234,7 +234,6 @@ impl UserSettingsContent { Eq, Default, strum::VariantArray, - strum::VariantNames, )] pub enum BaseKeymapContent { #[default] @@ -248,6 +247,19 @@ pub enum BaseKeymapContent { None, } +impl strum::VariantNames for BaseKeymapContent { + const VARIANTS: &'static [&'static str] = &[ + "VSCode", + "JetBrains", + "Sublime Text", + "Atom", + "TextMate", + "Emacs", + "Cursor", + "None", + ]; +} + #[skip_serializing_none] #[derive(Clone, PartialEq, Default, Serialize, Deserialize, JsonSchema, MergeFrom, Debug)] pub struct TitleBarSettingsContent { @@ -473,6 +485,7 @@ pub struct GitPanelSettingsContent { /// Default width of the panel in pixels. /// /// Default: 360 + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub default_width: Option, /// How entry statuses are displayed. /// @@ -502,7 +515,18 @@ pub struct GitPanelSettingsContent { } #[derive( - Default, Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq, Eq, + Default, + Copy, + Clone, + Debug, + Serialize, + Deserialize, + JsonSchema, + MergeFrom, + PartialEq, + Eq, + strum::VariantArray, + strum::VariantNames, )] #[serde(rename_all = "snake_case")] pub enum StatusStyle { @@ -512,7 +536,9 @@ pub enum StatusStyle { } #[skip_serializing_none] -#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq, Eq)] +#[derive( + Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq, Eq, +)] pub struct ScrollbarSettings { pub show: Option, } @@ -531,6 +557,7 @@ pub struct NotificationPanelSettingsContent { /// Default width of the panel in pixels. /// /// Default: 300 + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub default_width: Option, } @@ -548,6 +575,7 @@ pub struct PanelSettingsContent { /// Default width of the panel in pixels. /// /// Default: 240 + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub default_width: Option, } @@ -583,14 +611,33 @@ pub struct FileFinderSettingsContent { /// Whether to use gitignored files when searching. /// Only the file Zed had indexed will be used, not necessary all the gitignored files. /// - /// Can accept 3 values: - /// * `Some(true)`: Use all gitignored files - /// * `Some(false)`: Use only the files Zed had indexed - /// * `None`: Be smart and search for ignored when called from a gitignored worktree - /// - /// Default: None - /// todo() -> Change this type to an enum - pub include_ignored: Option, + /// Default: Smart + pub include_ignored: Option, +} + +#[derive( + Debug, + PartialEq, + Eq, + Clone, + Copy, + Default, + Serialize, + Deserialize, + JsonSchema, + MergeFrom, + strum::VariantArray, + strum::VariantNames, +)] +#[serde(rename_all = "snake_case")] +pub enum IncludeIgnoredContent { + /// Use all gitignored files + All, + /// Use only the files Zed had indexed + Indexed, + /// Be smart and search for ignored when called from a gitignored worktree + #[default] + Smart, } #[derive( @@ -703,6 +750,7 @@ pub struct OutlinePanelSettingsContent { /// Customize default width (in pixels) taken by outline panel /// /// Default: 240 + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub default_width: Option, /// The position of outline panel /// @@ -723,6 +771,7 @@ pub struct OutlinePanelSettingsContent { /// Amount of indentation (in pixels) for nested items. /// /// Default: 20 + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub indent_size: Option, /// Whether to reveal it in the outline panel automatically, /// when a corresponding project entry becomes active. @@ -814,7 +863,19 @@ pub struct ImageViewerSettingsContent { } #[skip_serializing_none] -#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, Default, PartialEq)] +#[derive( + Clone, + Copy, + Debug, + Serialize, + Deserialize, + JsonSchema, + MergeFrom, + Default, + PartialEq, + strum::VariantArray, + strum::VariantNames, +)] #[serde(rename_all = "snake_case")] pub enum ImageFileSizeUnit { /// Displays file size in binary units (e.g., KiB, MiB). @@ -948,3 +1009,248 @@ impl merge_from::MergeFrom for SaturatingBool { self.0 |= other.0 } } + +#[derive( + Copy, + Clone, + Default, + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + Serialize, + Deserialize, + MergeFrom, + JsonSchema, + derive_more::FromStr, +)] +#[serde(transparent)] +pub struct DelayMs(pub u64); + +impl From for DelayMs { + fn from(n: u64) -> Self { + Self(n) + } +} + +impl std::fmt::Display for DelayMs { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}ms", self.0) + } +} + +/// A wrapper type that distinguishes between an explicitly set value (including null) and an unset value. +/// +/// This is useful for configuration where you need to differentiate between: +/// - A field that is not present in the configuration file (`Maybe::Unset`) +/// - A field that is explicitly set to `null` (`Maybe::Set(None)`) +/// - A field that is explicitly set to a value (`Maybe::Set(Some(value))`) +/// +/// # Examples +/// +/// In JSON: +/// - `{}` (field missing) deserializes to `Maybe::Unset` +/// - `{"field": null}` deserializes to `Maybe::Set(None)` +/// - `{"field": "value"}` deserializes to `Maybe::Set(Some("value"))` +/// +/// WARN: This type should not be wrapped in an option inside of settings, otherwise the default `serde_json` behavior +/// of treating `null` and missing as the `Option::None` will be used +#[derive(Debug, Clone, PartialEq, Eq, strum::EnumDiscriminants, Default)] +#[strum_discriminants(derive(strum::VariantArray, strum::VariantNames, strum::FromRepr))] +pub enum Maybe { + /// An explicitly set value, which may be `None` (representing JSON `null`) or `Some(value)`. + Set(Option), + /// A value that was not present in the configuration. + #[default] + Unset, +} + +impl merge_from::MergeFrom for Maybe { + fn merge_from(&mut self, other: &Self) { + if self.is_unset() { + *self = other.clone(); + } + } +} + +impl From>> for Maybe { + fn from(value: Option>) -> Self { + match value { + Some(value) => Maybe::Set(value), + None => Maybe::Unset, + } + } +} + +impl Maybe { + pub fn is_set(&self) -> bool { + matches!(self, Maybe::Set(_)) + } + + pub fn is_unset(&self) -> bool { + matches!(self, Maybe::Unset) + } + + pub fn into_inner(self) -> Option { + match self { + Maybe::Set(value) => value, + Maybe::Unset => None, + } + } + + pub fn as_ref(&self) -> Option<&Option> { + match self { + Maybe::Set(value) => Some(value), + Maybe::Unset => None, + } + } +} + +impl serde::Serialize for Maybe { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + match self { + Maybe::Set(value) => value.serialize(serializer), + Maybe::Unset => serializer.serialize_none(), + } + } +} + +impl<'de, T: serde::Deserialize<'de>> serde::Deserialize<'de> for Maybe { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + Option::::deserialize(deserializer).map(Maybe::Set) + } +} + +impl JsonSchema for Maybe { + fn schema_name() -> std::borrow::Cow<'static, str> { + format!("Nullable<{}>", T::schema_name()).into() + } + + fn json_schema(generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + let mut schema = generator.subschema_for::>(); + // Add description explaining that null is an explicit value + let description = if let Some(existing_desc) = + schema.get("description").and_then(|desc| desc.as_str()) + { + format!( + "{}. Note: `null` is treated as an explicit value, different from omitting the field entirely.", + existing_desc + ) + } else { + "This field supports explicit `null` values. Omitting the field is different from setting it to `null`.".to_string() + }; + + schema.insert("description".to_string(), description.into()); + + schema + } +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json; + + #[test] + fn test_maybe() { + #[derive(Debug, PartialEq, Serialize, Deserialize)] + struct TestStruct { + #[serde(default)] + #[serde(skip_serializing_if = "Maybe::is_unset")] + field: Maybe, + } + + #[derive(Debug, PartialEq, Serialize, Deserialize)] + struct NumericTest { + #[serde(default)] + value: Maybe, + } + + let json = "{}"; + let result: TestStruct = serde_json::from_str(json).unwrap(); + assert!(result.field.is_unset()); + assert_eq!(result.field, Maybe::Unset); + + let json = r#"{"field": null}"#; + let result: TestStruct = serde_json::from_str(json).unwrap(); + assert!(result.field.is_set()); + assert_eq!(result.field, Maybe::Set(None)); + + let json = r#"{"field": "hello"}"#; + let result: TestStruct = serde_json::from_str(json).unwrap(); + assert!(result.field.is_set()); + assert_eq!(result.field, Maybe::Set(Some("hello".to_string()))); + + let test = TestStruct { + field: Maybe::Unset, + }; + let json = serde_json::to_string(&test).unwrap(); + assert_eq!(json, "{}"); + + let test = TestStruct { + field: Maybe::Set(None), + }; + let json = serde_json::to_string(&test).unwrap(); + assert_eq!(json, r#"{"field":null}"#); + + let test = TestStruct { + field: Maybe::Set(Some("world".to_string())), + }; + let json = serde_json::to_string(&test).unwrap(); + assert_eq!(json, r#"{"field":"world"}"#); + + let default_maybe: Maybe = Maybe::default(); + assert!(default_maybe.is_unset()); + + let unset: Maybe = Maybe::Unset; + assert!(unset.is_unset()); + assert!(!unset.is_set()); + + let set_none: Maybe = Maybe::Set(None); + assert!(set_none.is_set()); + assert!(!set_none.is_unset()); + + let set_some: Maybe = Maybe::Set(Some("value".to_string())); + assert!(set_some.is_set()); + assert!(!set_some.is_unset()); + + let original = TestStruct { + field: Maybe::Set(Some("test".to_string())), + }; + let json = serde_json::to_string(&original).unwrap(); + let deserialized: TestStruct = serde_json::from_str(&json).unwrap(); + assert_eq!(original, deserialized); + + let json = r#"{"value": 42}"#; + let result: NumericTest = serde_json::from_str(json).unwrap(); + assert_eq!(result.value, Maybe::Set(Some(42))); + + let json = r#"{"value": null}"#; + let result: NumericTest = serde_json::from_str(json).unwrap(); + assert_eq!(result.value, Maybe::Set(None)); + + let json = "{}"; + let result: NumericTest = serde_json::from_str(json).unwrap(); + assert_eq!(result.value, Maybe::Unset); + + // Test JsonSchema implementation + use schemars::schema_for; + let schema = schema_for!(Maybe); + let schema_json = serde_json::to_value(&schema).unwrap(); + + // Verify the description mentions that null is an explicit value + let description = schema_json["description"].as_str().unwrap(); + assert!( + description.contains("null") && description.contains("explicit"), + "Schema description should mention that null is an explicit value. Got: {}", + description + ); + } +} diff --git a/crates/settings/src/settings_content/agent.rs b/crates/settings/src/settings_content/agent.rs index 9644cbb3bd455f42052d0c4c45d958d9a492d712..c641f280e177669a2af14e91c844f2a5f059b648 100644 --- a/crates/settings/src/settings_content/agent.rs +++ b/crates/settings/src/settings_content/agent.rs @@ -26,10 +26,12 @@ pub struct AgentSettingsContent { /// Default width in pixels when the agent panel is docked to the left or right. /// /// Default: 640 + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub default_width: Option, /// Default height in pixels when the agent panel is docked to the bottom. /// /// Default: 320 + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub default_height: Option, /// The default model to use when creating new chats and for other features when a specific model is not specified. pub default_model: Option, @@ -68,10 +70,6 @@ pub struct AgentSettingsContent { /// /// Default: false pub play_sound_when_agent_done: Option, - /// Whether to stream edits from the agent as they are received. - /// - /// Default: false - pub stream_edits: Option, /// Whether to display agent edits in single-file editors in addition to the review multibuffer pane. /// /// Default: true @@ -194,7 +192,19 @@ pub enum DefaultAgentView { TextThread, } -#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq)] +#[derive( + Copy, + Clone, + Default, + Debug, + Serialize, + Deserialize, + JsonSchema, + MergeFrom, + PartialEq, + strum::VariantArray, + strum::VariantNames, +)] #[serde(rename_all = "snake_case")] pub enum NotifyWhenAgentWaiting { #[default] @@ -224,6 +234,7 @@ pub enum CompletionMode { pub struct LanguageModelParameters { pub provider: Option, pub model: Option, + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub temperature: Option, } diff --git a/crates/settings/src/settings_content/editor.rs b/crates/settings/src/settings_content/editor.rs index 5bd1b0daf9206b6ea97374a0281c7f737e0fc2e0..920f02a0f6597454c82d421247787e8ad6f7f74b 100644 --- a/crates/settings/src/settings_content/editor.rs +++ b/crates/settings/src/settings_content/editor.rs @@ -7,7 +7,9 @@ use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; use settings_macros::MergeFrom; -use crate::{DiagnosticSeverityContent, ShowScrollbar}; +use crate::{ + DelayMs, DiagnosticSeverityContent, ShowScrollbar, serialize_f32_with_two_decimal_places, +}; #[skip_serializing_none] #[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)] @@ -45,7 +47,7 @@ pub struct EditorSettingsContent { /// server based on the current cursor location. /// /// Default: 75 - pub lsp_highlight_debounce: Option, + pub lsp_highlight_debounce: Option, /// Whether to show the informational hover box when moving the mouse /// over symbols in the editor. /// @@ -54,7 +56,7 @@ pub struct EditorSettingsContent { /// Time to wait in milliseconds before showing the informational hover box. /// /// Default: 300 - pub hover_popover_delay: Option, + pub hover_popover_delay: Option, /// Toolbar related settings pub toolbar: Option, /// Scrollbar related settings @@ -70,6 +72,7 @@ pub struct EditorSettingsContent { /// The number of lines to keep above/below the cursor when auto-scrolling. /// /// Default: 3. + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub vertical_scroll_margin: Option, /// Whether to scroll when clicking near the edge of the visible text area. /// @@ -78,17 +81,20 @@ pub struct EditorSettingsContent { /// The number of characters to keep on either side when scrolling with the mouse. /// /// Default: 5. + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub horizontal_scroll_margin: Option, /// Scroll sensitivity multiplier. This multiplier is applied /// to both the horizontal and vertical delta values while scrolling. /// /// Default: 1.0 + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub scroll_sensitivity: Option, /// Scroll sensitivity multiplier for fast scrolling. This multiplier is applied /// to both the horizontal and vertical delta values while scrolling. Fast scrolling /// happens when a user holds the alt or option key while scrolling. /// /// Default: 4.0 + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub fast_scroll_sensitivity: Option, /// Whether the line numbers on editors gutter are relative or not. /// @@ -334,7 +340,18 @@ pub struct GutterContent { /// How to render LSP `textDocument/documentColor` colors in the editor. #[derive( - Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, MergeFrom, + Copy, + Clone, + Debug, + Default, + Serialize, + Deserialize, + PartialEq, + Eq, + JsonSchema, + MergeFrom, + strum::VariantArray, + strum::VariantNames, )] #[serde(rename_all = "snake_case")] pub enum DocumentColorsRenderMode { @@ -711,7 +728,7 @@ pub struct DragAndDropSelectionContent { /// The delay in milliseconds that must elapse before drag and drop is allowed. Otherwise, a new text selection is created. /// /// Default: 300 - pub delay: Option, + pub delay: Option, } /// When to show the minimap in the editor. @@ -785,10 +802,113 @@ pub enum DisplayIn { derive_more::FromStr, )] #[serde(transparent)] -pub struct MinimumContrast(pub f32); +pub struct MinimumContrast( + #[serde(serialize_with = "crate::serialize_f32_with_two_decimal_places")] pub f32, +); impl Display for MinimumContrast { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{:.1}", self.0) } } + +impl From for MinimumContrast { + fn from(x: f32) -> Self { + Self(x) + } +} + +/// Opacity of the inactive panes. 0 means transparent, 1 means opaque. +/// +/// Valid range: 0.0 to 1.0 +/// Default: 1.0 +#[derive( + Clone, + Copy, + Debug, + Serialize, + Deserialize, + JsonSchema, + MergeFrom, + PartialEq, + PartialOrd, + derive_more::FromStr, +)] +#[serde(transparent)] +pub struct InactiveOpacity( + #[serde(serialize_with = "serialize_f32_with_two_decimal_places")] pub f32, +); + +impl Display for InactiveOpacity { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:.1}", self.0) + } +} + +impl From for InactiveOpacity { + fn from(x: f32) -> Self { + Self(x) + } +} + +/// Centered layout related setting (left/right). +/// +/// Valid range: 0.0 to 0.4 +/// Default: 2.0 +#[derive( + Clone, + Copy, + Debug, + Serialize, + Deserialize, + MergeFrom, + PartialEq, + PartialOrd, + derive_more::FromStr, +)] +#[serde(transparent)] +pub struct CenteredPaddingSettings( + #[serde(serialize_with = "serialize_f32_with_two_decimal_places")] pub f32, +); + +impl CenteredPaddingSettings { + pub const MIN_PADDING: f32 = 0.0; + // This is an f64 so serde_json can give a type hint without random numbers in the back + pub const DEFAULT_PADDING: f64 = 0.2; + pub const MAX_PADDING: f32 = 0.4; +} + +impl Display for CenteredPaddingSettings { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:.2}", self.0) + } +} + +impl From for CenteredPaddingSettings { + fn from(x: f32) -> Self { + Self(x) + } +} + +impl Default for CenteredPaddingSettings { + fn default() -> Self { + Self(Self::DEFAULT_PADDING as f32) + } +} + +impl schemars::JsonSchema for CenteredPaddingSettings { + fn schema_name() -> std::borrow::Cow<'static, str> { + "CenteredPaddingSettings".into() + } + + fn json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema { + use schemars::json_schema; + json_schema!({ + "type": "number", + "minimum": Self::MIN_PADDING, + "maximum": Self::MAX_PADDING, + "default": Self::DEFAULT_PADDING, + "description": "Centered layout related setting (left/right)." + }) + } +} diff --git a/crates/settings/src/settings_content/language.rs b/crates/settings/src/settings_content/language.rs index 856a0d9394626eb42583bdc32940fb3bebfcb552..a5dbd682d2ca4943e6230789acad96c5d7e2a742 100644 --- a/crates/settings/src/settings_content/language.rs +++ b/crates/settings/src/settings_content/language.rs @@ -1,12 +1,9 @@ -use std::{borrow::Cow, num::NonZeroU32}; +use std::num::NonZeroU32; use collections::{HashMap, HashSet}; use gpui::{Modifiers, SharedString}; -use schemars::{JsonSchema, json_schema}; -use serde::{ - Deserialize, Deserializer, Serialize, - de::{self, IntoDeserializer, MapAccess, SeqAccess, Visitor}, -}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; use settings_macros::MergeFrom; use std::sync::Arc; @@ -30,8 +27,7 @@ pub struct AllLanguageSettingsContent { pub languages: LanguageToSettingsMap, /// Settings for associating file extensions and filenames /// with languages. - #[serde(default)] - pub file_types: HashMap, ExtendingVec>, + pub file_types: Option, ExtendingVec>>, } impl merge_from::MergeFrom for AllLanguageSettingsContent { @@ -246,7 +242,7 @@ pub struct LanguageSettingsContent { /// How to perform a buffer format. /// /// Default: auto - pub formatter: Option, + pub formatter: Option, /// Zed's Prettier integration settings. /// Allows to enable/disable formatting with Prettier /// and configure default Prettier, used when no project-level Prettier installation is found. @@ -300,12 +296,12 @@ pub struct LanguageSettingsContent { /// Inlay hint related settings. pub inlay_hints: Option, /// Whether to automatically type closing characters for you. For example, - /// when you type (, Zed will automatically add a closing ) at the correct position. + /// when you type '(', Zed will automatically add a closing ')' at the correct position. /// /// Default: true pub use_autoclose: Option, /// Whether to automatically surround text with characters for you. For example, - /// when you select text and type (, Zed will automatically surround text with (). + /// when you select text and type '(', Zed will automatically surround text with (). /// /// Default: true pub use_auto_surround: Option, @@ -322,6 +318,11 @@ pub struct LanguageSettingsContent { /// /// Default: true pub use_on_type_format: Option, + /// Which code actions to run on save after the formatter. + /// These are not run if formatting is off. + /// + /// Default: {} (or {"source.organizeImports": true} for Go). + pub code_actions_on_format: Option>, /// Whether to perform linked edits of associated ranges, if the language server supports it. /// For example, when editing opening tag, the contents of the closing tag will be edited as well. /// @@ -639,102 +640,6 @@ pub enum FormatOnSave { Off, } -/// Controls which formatter should be used when formatting code. -#[derive(Clone, Debug, Default, PartialEq, Eq, MergeFrom)] -pub enum SelectedFormatter { - /// Format files using Zed's Prettier integration (if applicable), - /// or falling back to formatting via language server. - #[default] - Auto, - List(FormatterList), -} - -impl JsonSchema for SelectedFormatter { - fn schema_name() -> Cow<'static, str> { - "Formatter".into() - } - - fn json_schema(generator: &mut schemars::SchemaGenerator) -> schemars::Schema { - let formatter_schema = Formatter::json_schema(generator); - - json_schema!({ - "oneOf": [ - { - "type": "array", - "items": formatter_schema - }, - { - "type": "string", - "enum": ["auto", "language_server"] - }, - formatter_schema - ] - }) - } -} - -impl Serialize for SelectedFormatter { - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - match self { - SelectedFormatter::Auto => serializer.serialize_str("auto"), - SelectedFormatter::List(list) => list.serialize(serializer), - } - } -} - -impl<'de> Deserialize<'de> for SelectedFormatter { - fn deserialize(deserializer: D) -> std::result::Result - where - D: Deserializer<'de>, - { - struct FormatDeserializer; - - impl<'d> Visitor<'d> for FormatDeserializer { - type Value = SelectedFormatter; - - fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { - formatter.write_str("a valid formatter kind") - } - fn visit_str(self, v: &str) -> std::result::Result - where - E: serde::de::Error, - { - if v == "auto" { - Ok(Self::Value::Auto) - } else if v == "language_server" { - Ok(Self::Value::List(FormatterList::Single( - Formatter::LanguageServer { name: None }, - ))) - } else { - let ret: Result = - Deserialize::deserialize(v.into_deserializer()); - ret.map(SelectedFormatter::List) - } - } - fn visit_map(self, map: A) -> Result - where - A: MapAccess<'d>, - { - let ret: Result = - Deserialize::deserialize(de::value::MapAccessDeserializer::new(map)); - ret.map(SelectedFormatter::List) - } - fn visit_seq(self, map: A) -> Result - where - A: SeqAccess<'d>, - { - let ret: Result = - Deserialize::deserialize(de::value::SeqAccessDeserializer::new(map)); - ret.map(SelectedFormatter::List) - } - } - deserializer.deserialize_any(FormatDeserializer) - } -} - /// Controls which formatters should be used when formatting code. #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema, MergeFrom)] #[serde(untagged)] @@ -762,10 +667,11 @@ impl AsRef<[Formatter]> for FormatterList { #[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema, MergeFrom)] #[serde(rename_all = "snake_case")] pub enum Formatter { - /// Format code using the current language server. - LanguageServer { name: Option }, - /// Format code using Zed's Prettier integration. + /// Format files using Zed's Prettier integration (if applicable), + /// or falling back to formatting via language server. #[default] + Auto, + /// Format code using Zed's Prettier integration. Prettier, /// Format code using an external command. External { @@ -776,6 +682,73 @@ pub enum Formatter { }, /// Files should be formatted using a code action executed by language servers. CodeAction(String), + /// Format code using a language server. + #[serde(untagged)] + LanguageServer(LanguageServerFormatterSpecifier), +} + +#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema, MergeFrom)] +#[serde( + rename_all = "snake_case", + // allow specifying language servers as "language_server" or {"language_server": {"name": ...}} + from = "LanguageServerVariantContent", + into = "LanguageServerVariantContent" +)] +pub enum LanguageServerFormatterSpecifier { + Specific { + name: String, + }, + #[default] + Current, +} + +impl From for LanguageServerFormatterSpecifier { + fn from(value: LanguageServerVariantContent) -> Self { + match value { + LanguageServerVariantContent::Specific { + language_server: LanguageServerSpecifierContent { name: Some(name) }, + } => Self::Specific { name }, + _ => Self::Current, + } + } +} + +impl From for LanguageServerVariantContent { + fn from(value: LanguageServerFormatterSpecifier) -> Self { + match value { + LanguageServerFormatterSpecifier::Specific { name } => Self::Specific { + language_server: LanguageServerSpecifierContent { name: Some(name) }, + }, + LanguageServerFormatterSpecifier::Current => { + Self::Current(CurrentLanguageServerContent::LanguageServer) + } + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema, MergeFrom)] +#[serde(rename_all = "snake_case", untagged)] +enum LanguageServerVariantContent { + /// Format code using a specific language server. + Specific { + language_server: LanguageServerSpecifierContent, + }, + /// Format code using the current language server. + Current(CurrentLanguageServerContent), +} + +#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema, MergeFrom)] +#[serde(rename_all = "snake_case")] +enum CurrentLanguageServerContent { + #[default] + LanguageServer, +} + +#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema, MergeFrom)] +#[serde(rename_all = "snake_case")] +struct LanguageServerSpecifierContent { + /// The name of the language server to format with + name: Option, } /// The settings for indent guides. @@ -884,31 +857,53 @@ mod test { fn test_formatter_deserialization() { let raw_auto = "{\"formatter\": \"auto\"}"; let settings: LanguageSettingsContent = serde_json::from_str(raw_auto).unwrap(); - assert_eq!(settings.formatter, Some(SelectedFormatter::Auto)); + assert_eq!( + settings.formatter, + Some(FormatterList::Single(Formatter::Auto)) + ); let raw = "{\"formatter\": \"language_server\"}"; let settings: LanguageSettingsContent = serde_json::from_str(raw).unwrap(); assert_eq!( settings.formatter, - Some(SelectedFormatter::List(FormatterList::Single( - Formatter::LanguageServer { name: None } + Some(FormatterList::Single(Formatter::LanguageServer( + LanguageServerFormatterSpecifier::Current ))) ); + let raw = "{\"formatter\": [{\"language_server\": {\"name\": null}}]}"; let settings: LanguageSettingsContent = serde_json::from_str(raw).unwrap(); assert_eq!( settings.formatter, - Some(SelectedFormatter::List(FormatterList::Vec(vec![ - Formatter::LanguageServer { name: None } - ]))) + Some(FormatterList::Vec(vec![Formatter::LanguageServer( + LanguageServerFormatterSpecifier::Current + )])) ); - let raw = "{\"formatter\": [{\"language_server\": {\"name\": null}}, \"prettier\"]}"; + let raw = "{\"formatter\": [{\"language_server\": {\"name\": null}}, \"language_server\", \"prettier\"]}"; let settings: LanguageSettingsContent = serde_json::from_str(raw).unwrap(); assert_eq!( settings.formatter, - Some(SelectedFormatter::List(FormatterList::Vec(vec![ - Formatter::LanguageServer { name: None }, + Some(FormatterList::Vec(vec![ + Formatter::LanguageServer(LanguageServerFormatterSpecifier::Current), + Formatter::LanguageServer(LanguageServerFormatterSpecifier::Current), Formatter::Prettier - ]))) + ])) + ); + + let raw = "{\"formatter\": [{\"language_server\": {\"name\": \"ruff\"}}, \"prettier\"]}"; + let settings: LanguageSettingsContent = serde_json::from_str(raw).unwrap(); + assert_eq!( + settings.formatter, + Some(FormatterList::Vec(vec![ + Formatter::LanguageServer(LanguageServerFormatterSpecifier::Specific { + name: "ruff".to_string() + }), + Formatter::Prettier + ])) + ); + + assert_eq!( + serde_json::to_string(&LanguageServerFormatterSpecifier::Current).unwrap(), + "\"language_server\"", ); } diff --git a/crates/settings/src/settings_content/language_model.rs b/crates/settings/src/settings_content/language_model.rs index 7139aac5eaca5c521007b21dbbb665bb4855347a..a0aa57a970c2483e4d9c617506d7b869c223cdf0 100644 --- a/crates/settings/src/settings_content/language_model.rs +++ b/crates/settings/src/settings_content/language_model.rs @@ -46,6 +46,7 @@ pub struct AnthropicAvailableModel { /// Configuration of Anthropic's caching API. pub cache_configuration: Option, pub max_output_tokens: Option, + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub default_temperature: Option, #[serde(default)] pub extra_beta_headers: Vec, @@ -71,6 +72,7 @@ pub struct BedrockAvailableModel { pub max_tokens: u64, pub cache_configuration: Option, pub max_output_tokens: Option, + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub default_temperature: Option, pub mode: Option, } @@ -332,6 +334,7 @@ pub struct ZedDotDevAvailableModel { /// Indicates whether this custom model supports caching. pub cache_configuration: Option, /// The default temperature to use for this model. + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub default_temperature: Option, /// Any extra beta headers to provide when using the model. #[serde(default)] diff --git a/crates/settings/src/settings_content/project.rs b/crates/settings/src/settings_content/project.rs index 88d9f9803e1579a77d7140e826961ad01f5eedac..6a77b815fa547d41e6f38541fe1d681c82b3347b 100644 --- a/crates/settings/src/settings_content/project.rs +++ b/crates/settings/src/settings_content/project.rs @@ -8,7 +8,8 @@ use settings_macros::MergeFrom; use util::serde::default_true; use crate::{ - AllLanguageSettingsContent, ExtendingVec, ProjectTerminalSettingsContent, SlashCommandSettings, + AllLanguageSettingsContent, DelayMs, ExtendingVec, Maybe, ProjectTerminalSettingsContent, + SlashCommandSettings, }; #[skip_serializing_none] @@ -55,11 +56,13 @@ pub struct ProjectSettingsContent { #[skip_serializing_none] #[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)] pub struct WorktreeSettingsContent { - /// The displayed name of this project. If not set or empty, the root directory name + /// The displayed name of this project. If not set or null, the root directory name /// will be displayed. /// - /// Default: "" - pub project_name: Option, + /// Default: null + #[serde(default)] + #[serde(skip_serializing_if = "Maybe::is_unset")] + pub project_name: Maybe, /// Completely ignore files matching globs from `file_scan_exclusions`. Overrides /// `file_scan_inclusions`. @@ -154,6 +157,8 @@ pub struct DapSettingsContent { pub binary: Option, #[serde(default)] pub args: Option>, + #[serde(default)] + pub env: Option>, } #[skip_serializing_none] @@ -308,7 +313,7 @@ pub struct InlineBlameSettings { /// after a delay once the cursor stops moving. /// /// Default: 0 - pub delay_ms: Option, + pub delay_ms: Option, /// The amount of padding between the end of the source line and the start /// of the inline blame in units of columns. /// @@ -395,7 +400,7 @@ pub struct LspPullDiagnosticsSettingsContent { /// 0 turns the debounce off. /// /// Default: 50 - pub debounce_ms: Option, + pub debounce_ms: Option, } #[skip_serializing_none] @@ -411,7 +416,7 @@ pub struct InlineDiagnosticsSettingsContent { /// last editor event. /// /// Default: 150 - pub update_debounce_ms: Option, + pub update_debounce_ms: Option, /// The amount of padding between the end of the source line and the start /// of the inline diagnostic in units of columns. /// diff --git a/crates/settings/src/settings_content/terminal.rs b/crates/settings/src/settings_content/terminal.rs index e5d3ba60b52073963115934afdd368c582ccfff2..d6e82b40b439ba2308a3c9be594e5d895e9c1dad 100644 --- a/crates/settings/src/settings_content/terminal.rs +++ b/crates/settings/src/settings_content/terminal.rs @@ -41,6 +41,7 @@ pub struct TerminalSettingsContent { /// /// If this option is not included, /// the terminal will default to matching the buffer's font size. + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub font_size: Option, /// Sets the terminal's font family. /// @@ -61,6 +62,7 @@ pub struct TerminalSettingsContent { pub line_height: Option, pub font_features: Option, /// Sets the terminal's font weight in CSS weight units 0-900. + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub font_weight: Option, /// Default cursor shape for the terminal. /// Can be "bar", "block", "underline", or "hollow". @@ -99,10 +101,12 @@ pub struct TerminalSettingsContent { /// Default width when the terminal is docked to the left or right. /// /// Default: 640 + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub default_width: Option, /// Default height when the terminal is docked to the bottom. /// /// Default: 320 + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub default_height: Option, /// The maximum number of lines to keep in the scrollback history. /// Maximum allowed value is 100_000, all values above that will be treated as 100_000. @@ -130,11 +134,24 @@ pub struct TerminalSettingsContent { /// - 90: Preferred for body text /// /// Default: 45 + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub minimum_contrast: Option, } /// Shell configuration to open the terminal with. -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, MergeFrom)] +#[derive( + Clone, + Debug, + Default, + Serialize, + Deserialize, + PartialEq, + Eq, + JsonSchema, + MergeFrom, + strum::EnumDiscriminants, +)] +#[strum_discriminants(derive(strum::VariantArray, strum::VariantNames, strum::FromRepr))] #[serde(rename_all = "snake_case")] pub enum Shell { /// Use the system's default terminal configuration in /etc/passwd @@ -153,7 +170,18 @@ pub enum Shell { }, } -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema, MergeFrom)] +#[derive( + Clone, + Debug, + Serialize, + Deserialize, + PartialEq, + Eq, + JsonSchema, + MergeFrom, + strum::EnumDiscriminants, +)] +#[strum_discriminants(derive(strum::VariantArray, strum::VariantNames, strum::FromRepr))] #[serde(rename_all = "snake_case")] pub enum WorkingDirectory { /// Use the current file's project directory. Will Fallback to the @@ -190,7 +218,7 @@ pub enum TerminalLineHeight { /// particularly if they use box characters Standard, /// Use a custom line height. - Custom(f32), + Custom(#[serde(serialize_with = "crate::serialize_f32_with_two_decimal_places")] f32), } impl TerminalLineHeight { diff --git a/crates/settings/src/settings_content/theme.rs b/crates/settings/src/settings_content/theme.rs index 67cfff6da1051247b2f462c96febd0f09c882963..80b543a20aa389ca05b13371f235ebc0dda9c82e 100644 --- a/crates/settings/src/settings_content/theme.rs +++ b/crates/settings/src/settings_content/theme.rs @@ -9,6 +9,8 @@ use std::{fmt::Display, sync::Arc}; use serde_with::skip_serializing_none; +use crate::serialize_f32_with_two_decimal_places; + /// Settings for rendering text in UI and text buffers. #[skip_serializing_none] @@ -16,6 +18,7 @@ use serde_with::skip_serializing_none; pub struct ThemeSettingsContent { /// The default font size for text in the UI. #[serde(default)] + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub ui_font_size: Option, /// The name of a font to use for rendering in the UI. #[serde(default)] @@ -42,6 +45,7 @@ pub struct ThemeSettingsContent { pub buffer_font_fallbacks: Option>, /// The default font size for rendering in text buffers. #[serde(default)] + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub buffer_font_size: Option, /// The weight of the editor font in CSS units from 100 to 900. #[serde(default)] @@ -56,9 +60,11 @@ pub struct ThemeSettingsContent { pub buffer_font_features: Option, /// The font size for agent responses in the agent panel. Falls back to the UI font size if unset. #[serde(default)] + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub agent_ui_font_size: Option, /// The font size for user messages in the agent panel. #[serde(default)] + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub agent_buffer_font_size: Option, /// The name of the Zed theme to use. #[serde(default)] @@ -104,7 +110,7 @@ pub struct ThemeSettingsContent { derive_more::FromStr, )] #[serde(transparent)] -pub struct CodeFade(pub f32); +pub struct CodeFade(#[serde(serialize_with = "serialize_f32_with_two_decimal_places")] pub f32); impl Display for CodeFade { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { @@ -112,6 +118,12 @@ impl Display for CodeFade { } } +impl From for CodeFade { + fn from(x: f32) -> Self { + Self(x) + } +} + fn default_font_features() -> Option { Some(FontFeatures::default()) } @@ -125,7 +137,18 @@ fn default_buffer_font_weight() -> Option { } /// Represents the selection of a theme, which can be either static or dynamic. -#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq, Eq)] +#[derive( + Clone, + Debug, + Serialize, + Deserialize, + JsonSchema, + MergeFrom, + PartialEq, + Eq, + strum::EnumDiscriminants, +)] +#[strum_discriminants(derive(strum::VariantArray, strum::VariantNames, strum::FromRepr))] #[serde(untagged)] pub enum ThemeSelection { /// A static theme selection, represented by a single theme name. @@ -143,7 +166,18 @@ pub enum ThemeSelection { } /// Represents the selection of an icon theme, which can be either static or dynamic. -#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq, Eq)] +#[derive( + Clone, + Debug, + Serialize, + Deserialize, + JsonSchema, + MergeFrom, + PartialEq, + Eq, + strum::EnumDiscriminants, +)] +#[strum_discriminants(derive(strum::VariantArray, strum::VariantNames, strum::FromRepr))] #[serde(untagged)] pub enum IconThemeSelection { /// A static icon theme selection, represented by a single icon theme name. @@ -167,7 +201,18 @@ pub enum IconThemeSelection { /// /// `System` will select the theme based on the system's appearance. #[derive( - Debug, PartialEq, Eq, Clone, Copy, Default, Serialize, Deserialize, JsonSchema, MergeFrom, + Debug, + PartialEq, + Eq, + Clone, + Copy, + Default, + Serialize, + Deserialize, + JsonSchema, + MergeFrom, + strum::VariantArray, + strum::VariantNames, )] #[serde(rename_all = "snake_case")] pub enum ThemeMode { @@ -262,7 +307,19 @@ impl From for String { } /// The buffer's line height. -#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom, Default)] +#[derive( + Clone, + Copy, + Debug, + Serialize, + Deserialize, + PartialEq, + JsonSchema, + MergeFrom, + Default, + strum::EnumDiscriminants, +)] +#[strum_discriminants(derive(strum::VariantArray, strum::VariantNames, strum::FromRepr))] #[serde(rename_all = "snake_case")] pub enum BufferLineHeight { /// A less dense line height. @@ -838,6 +895,35 @@ pub struct ThemeColorsContent { /// Deprecated in favor of `version_control_conflict_marker_theirs`. #[deprecated] pub version_control_conflict_theirs_background: Option, + + /// Background color for Vim Normal mode indicator. + #[serde(rename = "vim.normal.background")] + pub vim_normal_background: Option, + /// Background color for Vim Insert mode indicator. + #[serde(rename = "vim.insert.background")] + pub vim_insert_background: Option, + /// Background color for Vim Replace mode indicator. + #[serde(rename = "vim.replace.background")] + pub vim_replace_background: Option, + /// Background color for Vim Visual mode indicator. + #[serde(rename = "vim.visual.background")] + pub vim_visual_background: Option, + /// Background color for Vim Visual Line mode indicator. + #[serde(rename = "vim.visual_line.background")] + pub vim_visual_line_background: Option, + /// Background color for Vim Visual Block mode indicator. + #[serde(rename = "vim.visual_block.background")] + pub vim_visual_block_background: Option, + /// Background color for Vim Helix Normal mode indicator. + #[serde(rename = "vim.helix_normal.background")] + pub vim_helix_normal_background: Option, + /// Background color for Vim Helix Select mode indicator. + #[serde(rename = "vim.helix_select.background")] + pub vim_helix_select_background: Option, + + /// Text color for Vim mode indicator label. + #[serde(rename = "vim.mode.text")] + pub vim_mode_text: Option, } #[skip_serializing_none] diff --git a/crates/settings/src/settings_content/workspace.rs b/crates/settings/src/settings_content/workspace.rs index 511c883a4386c6b2ea634dc751c0f38fe5c8079c..c901d7010b37c685180ca67a3c4775da41be87ee 100644 --- a/crates/settings/src/settings_content/workspace.rs +++ b/crates/settings/src/settings_content/workspace.rs @@ -6,13 +6,16 @@ use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; use settings_macros::MergeFrom; -use crate::{DockPosition, DockSide, ScrollbarSettingsContent, ShowIndentGuides}; +use crate::{ + CenteredPaddingSettings, DelayMs, DockPosition, DockSide, InactiveOpacity, + ScrollbarSettingsContent, ShowIndentGuides, serialize_optional_f32_with_two_decimal_places, +}; #[skip_serializing_none] #[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize, JsonSchema, MergeFrom)] pub struct WorkspaceSettingsContent { /// Active pane styling settings. - pub active_pane_modifiers: Option, + pub active_pane_modifiers: Option, /// Layout mode for the bottom dock /// /// Default: contained @@ -56,6 +59,7 @@ pub struct WorkspaceSettingsContent { /// Given as a fraction that will be multiplied by the smaller dimension of the workspace. /// /// Default: `0.2` (20% of the smaller dimension of the workspace) + #[serde(serialize_with = "serialize_optional_f32_with_two_decimal_places")] pub drop_target_size: Option, /// Whether to close the window when using 'close active item' on a workspace with no tabs /// @@ -243,12 +247,13 @@ pub enum ActivateOnClose { #[skip_serializing_none] #[derive(Copy, Clone, PartialEq, Debug, Default, Serialize, Deserialize, JsonSchema, MergeFrom)] #[serde(rename_all = "snake_case")] -pub struct ActivePanelModifiers { +pub struct ActivePaneModifiers { /// Size of the border surrounding the active pane. /// When set to 0, the active pane doesn't have any border. /// The border is drawn inset. /// /// Default: `0.0` + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub border_size: Option, /// Opacity of inactive panels. /// When set to 1.0, the inactive panes have the same opacity as the active one. @@ -256,7 +261,8 @@ pub struct ActivePanelModifiers { /// Values are clamped to the [0.0, 1.0] range. /// /// Default: `1.0` - pub inactive_opacity: Option, + #[schemars(range(min = 0.0, max = 1.0))] + pub inactive_opacity: Option, } #[derive( @@ -377,15 +383,31 @@ pub struct StatusBarSettingsContent { /// /// Default: true pub cursor_position_button: Option, + /// Whether to show active line endings button in the status bar. + /// + /// Default: false + pub line_endings_button: Option, } -#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema, MergeFrom)] +#[derive( + Copy, + Clone, + Debug, + Serialize, + Deserialize, + PartialEq, + Eq, + JsonSchema, + MergeFrom, + strum::EnumDiscriminants, +)] +#[strum_discriminants(derive(strum::VariantArray, strum::VariantNames, strum::FromRepr))] #[serde(rename_all = "snake_case")] pub enum AutosaveSetting { /// Disable autosave. Off, /// Save after inactivity period of `milliseconds`. - AfterDelay { milliseconds: u64 }, + AfterDelay { milliseconds: DelayMs }, /// Autosave when focus changes. OnFocusChange, /// Autosave when the active window changes. @@ -403,34 +425,58 @@ impl AutosaveSetting { } } -#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema, MergeFrom)] +#[derive( + Copy, + Clone, + Debug, + Serialize, + Deserialize, + PartialEq, + Eq, + JsonSchema, + MergeFrom, + strum::VariantArray, + strum::VariantNames, +)] #[serde(rename_all = "snake_case")] pub enum PaneSplitDirectionHorizontal { Up, Down, } -#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema, MergeFrom)] +#[derive( + Copy, + Clone, + Debug, + Serialize, + Deserialize, + PartialEq, + Eq, + JsonSchema, + MergeFrom, + strum::VariantArray, + strum::VariantNames, +)] #[serde(rename_all = "snake_case")] pub enum PaneSplitDirectionVertical { Left, Right, } -#[skip_serializing_none] #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq, Default)] #[serde(rename_all = "snake_case")] +#[skip_serializing_none] pub struct CenteredLayoutSettings { /// The relative width of the left padding of the central pane from the /// workspace when the centered layout is used. /// /// Default: 0.2 - pub left_padding: Option, + pub left_padding: Option, // The relative width of the right padding of the central pane from the // workspace when the centered layout is used. /// /// Default: 0.2 - pub right_padding: Option, + pub right_padding: Option, } #[derive( @@ -478,6 +524,7 @@ pub struct ProjectPanelSettingsContent { /// Customize default width (in pixels) taken by project panel /// /// Default: 240 + #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub default_width: Option, /// The position of project panel /// @@ -502,6 +549,7 @@ pub struct ProjectPanelSettingsContent { /// Amount of indentation (in pixels) for nested items. /// /// Default: 20 + #[serde(serialize_with = "serialize_optional_f32_with_two_decimal_places")] pub indent_size: Option, /// Whether to reveal it in the project panel automatically, /// when a corresponding project entry becomes active. @@ -530,6 +578,10 @@ pub struct ProjectPanelSettingsContent { /// /// Default: false pub hide_root: Option, + /// Whether to hide the hidden entries in the project panel. + /// + /// Default: false + pub hide_hidden: Option, /// Whether to stick parent directories at top of the project panel. /// /// Default: true @@ -538,6 +590,10 @@ pub struct ProjectPanelSettingsContent { /// /// Default: true pub drag_and_drop: Option, + /// Whether to automatically open files when pasting them in the project panel. + /// + /// Default: true + pub open_file_on_paste: Option, } #[derive( diff --git a/crates/settings/src/settings_json.rs b/crates/settings/src/settings_json.rs index 555a48e9f0972d708eaf9aaaaaf467852ccf7dd6..5e83b11b339245c9e8beb6038cb8c5532b551ad8 100644 --- a/crates/settings/src/settings_json.rs +++ b/crates/settings/src/settings_json.rs @@ -262,8 +262,8 @@ pub fn replace_value_in_json_text>( } else { // We don't have the key, construct the nested objects let new_value = construct_json_value(&key_path[depth..], new_value); - let indent_prefix_len = 4 * depth; - let mut new_val = to_pretty_json(&new_value, 4, indent_prefix_len); + let indent_prefix_len = tab_size * depth; + let mut new_val = to_pretty_json(&new_value, tab_size, indent_prefix_len); if depth == 0 { new_val.push('\n'); } @@ -628,6 +628,100 @@ pub fn append_top_level_array_value_in_json_text( } } +/// Infers the indentation size used in JSON text by analyzing the tree structure. +/// Returns the detected indent size, or a default of 2 if no indentation is found. +pub fn infer_json_indent_size(text: &str) -> usize { + const MAX_INDENT_SIZE: usize = 64; + + let mut parser = tree_sitter::Parser::new(); + parser + .set_language(&tree_sitter_json::LANGUAGE.into()) + .unwrap(); + + let Some(syntax_tree) = parser.parse(text, None) else { + return 4; + }; + + let mut cursor = syntax_tree.walk(); + let mut indent_counts = [0u32; MAX_INDENT_SIZE]; + + // Traverse the tree to find indentation patterns + fn visit_node( + cursor: &mut tree_sitter::TreeCursor, + indent_counts: &mut [u32; MAX_INDENT_SIZE], + depth: usize, + ) { + if depth >= 3 { + return; + } + let node = cursor.node(); + let node_kind = node.kind(); + + // For objects and arrays, check the indentation of their first content child + if matches!(node_kind, "object" | "array") { + let container_column = node.start_position().column; + let container_row = node.start_position().row; + + if cursor.goto_first_child() { + // Skip the opening bracket + loop { + let child = cursor.node(); + let child_kind = child.kind(); + + // Look for the first actual content (pair for objects, value for arrays) + if (node_kind == "object" && child_kind == "pair") + || (node_kind == "array" + && !matches!(child_kind, "[" | "]" | "," | "comment")) + { + let child_column = child.start_position().column; + let child_row = child.start_position().row; + + // Only count if the child is on a different line + if child_row > container_row && child_column > container_column { + let indent = child_column - container_column; + if indent > 0 && indent < MAX_INDENT_SIZE { + indent_counts[indent] += 1; + } + } + break; + } + + if !cursor.goto_next_sibling() { + break; + } + } + cursor.goto_parent(); + } + } + + // Recurse to children + if cursor.goto_first_child() { + loop { + visit_node(cursor, indent_counts, depth + 1); + if !cursor.goto_next_sibling() { + break; + } + } + cursor.goto_parent(); + } + } + + visit_node(&mut cursor, &mut indent_counts, 0); + + // Find the indent size with the highest count + let mut max_count = 0; + let mut max_indent = 4; + + for (indent, &count) in indent_counts.iter().enumerate() { + if count > max_count { + max_count = count; + max_indent = indent; + } + } + + if max_count == 0 { 2 } else { max_indent } +} + pub fn to_pretty_json( value: &impl Serialize, indent_size: usize, @@ -2486,4 +2580,69 @@ mod tests { .unindent(), ) } + + #[test] + fn test_infer_json_indent_size() { + let json_2_spaces = r#"{ + "key1": "value1", + "nested": { + "key2": "value2", + "array": [ + 1, + 2, + 3 + ] + } +}"#; + assert_eq!(infer_json_indent_size(json_2_spaces), 2); + + let json_4_spaces = r#"{ + "key1": "value1", + "nested": { + "key2": "value2", + "array": [ + 1, + 2, + 3 + ] + } +}"#; + assert_eq!(infer_json_indent_size(json_4_spaces), 4); + + let json_8_spaces = r#"{ + "key1": "value1", + "nested": { + "key2": "value2" + } +}"#; + assert_eq!(infer_json_indent_size(json_8_spaces), 8); + + let json_single_line = r#"{"key": "value", "nested": {"inner": "data"}}"#; + assert_eq!(infer_json_indent_size(json_single_line), 2); + + let json_empty = r#"{}"#; + assert_eq!(infer_json_indent_size(json_empty), 2); + + let json_array = r#"[ + { + "id": 1, + "name": "first" + }, + { + "id": 2, + "name": "second" + } +]"#; + assert_eq!(infer_json_indent_size(json_array), 2); + + let json_mixed = r#"{ + "a": { + "b": { + "c": "value" + } + }, + "d": "value2" +}"#; + assert_eq!(infer_json_indent_size(json_mixed), 2); + } } diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 33ad826482a21c61e83beaa06c723b0caf5b519a..e971aedd4cd87b4706a465b532846970c2772e23 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -33,6 +33,7 @@ pub type EditorconfigProperties = ec4rs::Properties; use crate::{ ActiveSettingsProfileName, FontFamilyName, IconThemeName, LanguageSettingsContent, LanguageToSettingsMap, SettingsJsonSchemaParams, ThemeName, VsCodeSettings, WorktreeId, + infer_json_indent_size, merge_from::MergeFrom, parse_json_with_comments, settings_content::{ @@ -69,10 +70,6 @@ pub trait Settings: 'static + Send + Sync + Sized { /// and you should add a default to default.json for documentation. fn from_settings(content: &SettingsContent) -> Self; - /// Use [the helpers in the vscode_import module](crate::vscode_import) to apply known - /// equivalent settings from a vscode config to our config - fn import_from_vscode(_vscode: &VsCodeSettings, _current: &mut SettingsContent) {} - #[track_caller] fn register(cx: &mut App) where @@ -151,9 +148,10 @@ pub struct SettingsStore { _setting_file_updates: Task<()>, setting_file_updates_tx: mpsc::UnboundedSender LocalBoxFuture<'static, Result<()>>>>, + file_errors: BTreeMap, } -#[derive(Clone, PartialEq, Debug)] +#[derive(Clone, PartialEq, Eq, Debug)] pub enum SettingsFile { User, Server, @@ -162,6 +160,34 @@ pub enum SettingsFile { Project((WorktreeId, Arc)), } +impl PartialOrd for SettingsFile { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +/// Sorted in order of precedence +impl Ord for SettingsFile { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + use SettingsFile::*; + use std::cmp::Ordering; + match (self, other) { + (User, User) => Ordering::Equal, + (Server, Server) => Ordering::Equal, + (Default, Default) => Ordering::Equal, + (Project((id1, rel_path1)), Project((id2, rel_path2))) => id1 + .cmp(id2) + .then_with(|| rel_path1.cmp(rel_path2).reverse()), + (Project(_), _) => Ordering::Less, + (_, Project(_)) => Ordering::Greater, + (Server, _) => Ordering::Less, + (_, Server) => Ordering::Greater, + (User, _) => Ordering::Less, + (_, User) => Ordering::Greater, + } + } +} + #[derive(Clone)] pub struct Editorconfig { pub is_root: bool, @@ -207,11 +233,6 @@ trait AnySettingValue: 'static + Send + Sync { fn all_local_values(&self) -> Vec<(WorktreeId, Arc, &dyn Any)>; fn set_global_value(&mut self, value: Box); fn set_local_value(&mut self, root_id: WorktreeId, path: Arc, value: Box); - fn import_from_vscode( - &self, - vscode_settings: &VsCodeSettings, - settings_content: &mut SettingsContent, - ); } impl SettingsStore { @@ -236,6 +257,7 @@ impl SettingsStore { (setting_file_update)(cx.clone()).await.log_err(); } }), + file_errors: BTreeMap::default(), } } @@ -484,7 +506,7 @@ impl SettingsStore { files } - fn get_content_for_file(&self, file: SettingsFile) -> Option<&SettingsContent> { + pub fn get_content_for_file(&self, file: SettingsFile) -> Option<&SettingsContent> { match file { SettingsFile::User => self .user_settings @@ -534,11 +556,30 @@ impl SettingsStore { /// Returns the first file found that contains the value. /// The value will only be None if no file contains the value. /// I.e. if no file contains the value, returns `(File::Default, None)` - pub fn get_value_from_file( - &self, + pub fn get_value_from_file<'a, T: 'a>( + &'a self, + target_file: SettingsFile, + pick: fn(&'a SettingsContent) -> Option, + ) -> (SettingsFile, Option) { + self.get_value_from_file_inner(target_file, pick, true) + } + + /// Same as `Self::get_value_from_file` except that it does not include the current file. + /// Therefore it returns the value that was potentially overloaded by the target file. + pub fn get_value_up_to_file<'a, T: 'a>( + &'a self, target_file: SettingsFile, - pick: fn(&SettingsContent) -> &Option, - ) -> (SettingsFile, Option<&T>) { + pick: fn(&'a SettingsContent) -> Option, + ) -> (SettingsFile, Option) { + self.get_value_from_file_inner(target_file, pick, false) + } + + fn get_value_from_file_inner<'a, T: 'a>( + &'a self, + target_file: SettingsFile, + pick: fn(&'a SettingsContent) -> Option, + include_target_file: bool, + ) -> (SettingsFile, Option) { // todo(settings_ui): Add a metadata field for overriding the "overrides" tag, for contextually different settings // e.g. disable AI isn't overridden, or a vec that gets extended instead or some such @@ -547,10 +588,15 @@ impl SettingsStore { let mut found_file = false; for file in all_files.into_iter() { - if !found_file && file != target_file && file != SettingsFile::Default { - continue; + if !found_file && file != SettingsFile::Default { + if file != target_file { + continue; + } + found_file = true; + if !include_target_file { + continue; + } } - found_file = true; if let SettingsFile::Project((worktree_id, ref path)) = file && let SettingsFile::Project((target_worktree_id, ref target_path)) = target_file @@ -563,13 +609,31 @@ impl SettingsStore { let Some(content) = self.get_content_for_file(file.clone()) else { continue; }; - if let Some(value) = pick(content).as_ref() { + if let Some(value) = pick(content) { return (file, Some(value)); } } (SettingsFile::Default, None) } + + fn handle_potential_file_error( + &mut self, + file: SettingsFile, + result: Result, + ) -> Result { + if let Err(err) = result.as_ref() { + let message = err.to_string(); + self.file_errors.insert(file, message); + } else { + self.file_errors.remove(&file); + } + return result; + } + + pub fn error_for_file(&self, file: SettingsFile) -> Option { + self.file_errors.get(&file).cloned() + } } impl SettingsStore { @@ -589,10 +653,8 @@ impl SettingsStore { } pub fn get_vscode_edits(&self, old_text: String, vscode: &VsCodeSettings) -> String { - self.new_text_for_update(old_text, |settings_content| { - for v in self.setting_values.values() { - v.import_from_vscode(vscode, settings_content) - } + self.new_text_for_update(old_text, |content| { + content.merge_from(&vscode.settings_content()) }) } @@ -613,7 +675,7 @@ impl SettingsStore { let mut key_path = Vec::new(); let mut edits = Vec::new(); - let tab_size = self.json_tab_size(); + let tab_size = infer_json_indent_size(&text); let mut text = text.to_string(); update_value_in_json_text( &mut text, @@ -626,10 +688,6 @@ impl SettingsStore { edits } - pub fn json_tab_size(&self) -> usize { - 2 - } - /// Sets the default settings via a JSON string. /// /// The string should contain a JSON object with a default value for every setting. @@ -648,7 +706,10 @@ impl SettingsStore { let settings: UserSettingsContent = if user_settings_content.is_empty() { parse_json_with_comments("{}")? } else { - parse_json_with_comments(user_settings_content)? + self.handle_potential_file_error( + SettingsFile::User, + parse_json_with_comments(user_settings_content), + )? }; self.user_settings = Some(settings); @@ -681,7 +742,10 @@ impl SettingsStore { let settings: Option = if server_settings_content.is_empty() { None } else { - parse_json_with_comments(server_settings_content)? + self.handle_potential_file_error( + SettingsFile::Server, + parse_json_with_comments(server_settings_content), + )? }; // Rewrite the server settings into a content type @@ -730,20 +794,24 @@ impl SettingsStore { zed_settings_changed = self .local_settings .remove(&(root_id, directory_path.clone())) - .is_some() + .is_some(); + self.file_errors + .remove(&SettingsFile::Project((root_id, directory_path.clone()))); } (LocalSettingsKind::Editorconfig, None) => { self.raw_editorconfig_settings .remove(&(root_id, directory_path.clone())); } (LocalSettingsKind::Settings, Some(settings_contents)) => { - let new_settings = parse_json_with_comments::( - settings_contents, - ) - .map_err(|e| InvalidSettingsError::LocalSettings { - path: directory_path.join(local_settings_file_relative_path()), - message: e.to_string(), - })?; + let new_settings = self + .handle_potential_file_error( + SettingsFile::Project((root_id, directory_path.clone())), + parse_json_with_comments::(settings_contents), + ) + .map_err(|e| InvalidSettingsError::LocalSettings { + path: directory_path.join(local_settings_file_relative_path()), + message: e.to_string(), + })?; match self.local_settings.entry((root_id, directory_path.clone())) { btree_map::Entry::Vacant(v) => { v.insert(SettingsContent { @@ -921,6 +989,7 @@ impl SettingsStore { .to_value() } + // todo -> this function never fails, and should not return a result fn recompute_values( &mut self, changed_local_path: Option<(WorktreeId, &RelPath)>, @@ -1108,14 +1177,6 @@ impl AnySettingValue for SettingValue { Err(ix) => self.local_values.insert(ix, (root_id, path, value)), } } - - fn import_from_vscode( - &self, - vscode_settings: &VsCodeSettings, - settings_content: &mut SettingsContent, - ) { - T::import_from_vscode(vscode_settings, settings_content); - } } #[cfg(test)] @@ -1158,19 +1219,6 @@ mod tests { git_status: content.git_status.unwrap(), } } - - fn import_from_vscode(vscode: &VsCodeSettings, content: &mut SettingsContent) { - let mut show = None; - - vscode.bool_setting("workbench.editor.decorations.colors", &mut show); - if let Some(show) = show { - content - .tabs - .get_or_insert_default() - .git_status - .replace(show); - } - } } #[derive(Debug, PartialEq)] @@ -1187,18 +1235,6 @@ mod tests { preferred_line_length: content.preferred_line_length.unwrap(), } } - - fn import_from_vscode(vscode: &VsCodeSettings, content: &mut SettingsContent) { - let content = &mut content.project.all_languages.defaults; - - if let Some(size) = vscode - .read_value("editor.tabSize") - .and_then(|v| v.as_u64()) - .and_then(|n| NonZeroU32::new(n as u32)) - { - content.tab_size = Some(size); - } - } } #[derive(Debug, PartialEq)] @@ -1215,16 +1251,6 @@ mod tests { buffer_font_fallbacks: content.buffer_font_fallbacks.unwrap(), } } - - fn import_from_vscode(vscode: &VsCodeSettings, content: &mut SettingsContent) { - let content = &mut content.theme; - - vscode.font_family_setting( - "editor.fontFamily", - &mut content.buffer_font_family, - &mut content.buffer_font_fallbacks, - ); - } } #[gpui::test] @@ -1516,9 +1542,9 @@ mod tests { }) }, r#"{ - "tabs": { - "git_status": true - } + "tabs": { + "git_status": true + } } "# .unindent(), @@ -1533,9 +1559,9 @@ mod tests { .unindent(), |settings| settings.title_bar.get_or_insert_default().show_branch_name = Some(true), r#"{ - "title_bar": { - "show_branch_name": true - } + "title_bar": { + "show_branch_name": true + } } "# .unindent(), @@ -1560,7 +1586,8 @@ mod tests { .unindent(), r#" { "editor.tabSize": 37 } "#.to_owned(), r#"{ - "tab_size": 37 + "base_keymap": "VSCode", + "tab_size": 37 } "# .unindent(), @@ -1577,6 +1604,7 @@ mod tests { .unindent(), r#"{ "editor.tabSize": 42 }"#.to_owned(), r#"{ + "base_keymap": "VSCode", "tab_size": 42, "preferred_line_length": 99, } @@ -1596,6 +1624,7 @@ mod tests { .unindent(), r#"{}"#.to_owned(), r#"{ + "base_keymap": "VSCode", "preferred_line_length": 99, "tab_size": 42 } @@ -1611,11 +1640,18 @@ mod tests { } "# .unindent(), - r#"{ "workbench.editor.decorations.colors": true }"#.to_owned(), + r#"{ "git.decorations.enabled": true }"#.to_owned(), r#"{ - "tabs": { - "git_status": true - } + "project_panel": { + "git_status": true + }, + "outline_panel": { + "git_status": true + }, + "base_keymap": "VSCode", + "tabs": { + "git_status": true + } } "# .unindent(), @@ -1631,11 +1667,12 @@ mod tests { .unindent(), r#"{ "editor.fontFamily": "Cascadia Code, 'Consolas', Courier New" }"#.to_owned(), r#"{ - "buffer_font_fallbacks": [ - "Consolas", - "Courier New" - ], - "buffer_font_family": "Cascadia Code" + "base_keymap": "VSCode", + "buffer_font_fallbacks": [ + "Consolas", + "Courier New" + ], + "buffer_font_family": "Cascadia Code" } "# .unindent(), @@ -1671,16 +1708,16 @@ mod tests { .get_or_insert_default() .enabled = Some(true); }); - assert_eq!( + pretty_assertions::assert_str_eq!( actual, r#"{ - "git": { + "git": { "inline_blame": { - "enabled": true + "enabled": true } + } } - } - "# + "# .unindent() ); } @@ -1753,11 +1790,16 @@ mod tests { ) .unwrap(); - fn get(content: &SettingsContent) -> &Option { - &content.project.all_languages.defaults.preferred_line_length + fn get(content: &SettingsContent) -> Option<&u32> { + content + .project + .all_languages + .defaults + .preferred_line_length + .as_ref() } - let default_value = get(&store.default_settings).unwrap(); + let default_value = *get(&store.default_settings).unwrap(); assert_eq!( store.get_value_from_file(SettingsFile::Project(local.clone()), get), @@ -1820,8 +1862,13 @@ mod tests { .into_arc(), ); - fn get(content: &SettingsContent) -> &Option { - &content.project.all_languages.defaults.preferred_line_length + fn get(content: &SettingsContent) -> Option<&u32> { + content + .project + .all_languages + .defaults + .preferred_line_length + .as_ref() } store @@ -2044,4 +2091,45 @@ mod tests { let overrides = store.get_overrides_for_field(SettingsFile::Project(wt0_child1), get); assert_eq!(overrides, vec![]); } + + #[test] + fn test_file_ord() { + let wt0_root = + SettingsFile::Project((WorktreeId::from_usize(0), RelPath::empty().into_arc())); + let wt0_child1 = + SettingsFile::Project((WorktreeId::from_usize(0), rel_path("child1").into_arc())); + let wt0_child2 = + SettingsFile::Project((WorktreeId::from_usize(0), rel_path("child2").into_arc())); + + let wt1_root = + SettingsFile::Project((WorktreeId::from_usize(1), RelPath::empty().into_arc())); + let wt1_subdir = + SettingsFile::Project((WorktreeId::from_usize(1), rel_path("subdir").into_arc())); + + let mut files = vec![ + &wt1_root, + &SettingsFile::Default, + &wt0_root, + &wt1_subdir, + &wt0_child2, + &SettingsFile::Server, + &wt0_child1, + &SettingsFile::User, + ]; + + files.sort(); + pretty_assertions::assert_eq!( + files, + vec![ + &wt0_child2, + &wt0_child1, + &wt0_root, + &wt1_subdir, + &wt1_root, + &SettingsFile::Server, + &SettingsFile::User, + &SettingsFile::Default, + ] + ) + } } diff --git a/crates/settings/src/vscode_import.rs b/crates/settings/src/vscode_import.rs index c0c1085684b448dbd3d4ef83faabf21ca1cfbf7f..fd9b343ad9cf6b0fd93ac31bf2dd2e1f2f6023bf 100644 --- a/crates/settings/src/vscode_import.rs +++ b/crates/settings/src/vscode_import.rs @@ -1,10 +1,15 @@ +use crate::*; use anyhow::{Context as _, Result, anyhow}; +use collections::HashMap; use fs::Fs; use paths::{cursor_settings_file_paths, vscode_settings_file_paths}; +use serde::Deserialize; use serde_json::{Map, Value}; -use std::{path::Path, sync::Arc}; - -use crate::FontFamilyName; +use std::{ + num::{NonZeroU32, NonZeroUsize}, + path::{Path, PathBuf}, + sync::Arc, +}; #[derive(Clone, Copy, PartialEq, Eq, Debug)] pub enum VsCodeSettingsSource { @@ -79,83 +84,53 @@ impl VsCodeSettings { }) } - pub fn read_value(&self, setting: &str) -> Option<&Value> { + fn read_value(&self, setting: &str) -> Option<&Value> { self.content.get(setting) } - pub fn read_string(&self, setting: &str) -> Option<&str> { + fn read_str(&self, setting: &str) -> Option<&str> { self.read_value(setting).and_then(|v| v.as_str()) } - pub fn read_bool(&self, setting: &str) -> Option { - self.read_value(setting).and_then(|v| v.as_bool()) - } - - pub fn string_setting(&self, key: &str, setting: &mut Option) { - if let Some(s) = self.content.get(key).and_then(Value::as_str) { - *setting = Some(s.to_owned()) - } - } - - pub fn bool_setting(&self, key: &str, setting: &mut Option) { - if let Some(s) = self.content.get(key).and_then(Value::as_bool) { - *setting = Some(s) - } + fn read_string(&self, setting: &str) -> Option { + self.read_value(setting) + .and_then(|v| v.as_str()) + .map(|s| s.to_owned()) } - pub fn u32_setting(&self, key: &str, setting: &mut Option) { - if let Some(s) = self.content.get(key).and_then(Value::as_u64) { - *setting = Some(s as u32) - } + fn read_bool(&self, setting: &str) -> Option { + self.read_value(setting).and_then(|v| v.as_bool()) } - pub fn u64_setting(&self, key: &str, setting: &mut Option) { - if let Some(s) = self.content.get(key).and_then(Value::as_u64) { - *setting = Some(s) - } + fn read_f32(&self, setting: &str) -> Option { + self.read_value(setting) + .and_then(|v| v.as_f64()) + .map(|v| v as f32) } - pub fn usize_setting(&self, key: &str, setting: &mut Option) { - if let Some(s) = self.content.get(key).and_then(Value::as_u64) { - *setting = Some(s.try_into().unwrap()) - } + fn read_u64(&self, setting: &str) -> Option { + self.read_value(setting).and_then(|v| v.as_u64()) } - pub fn f32_setting(&self, key: &str, setting: &mut Option) { - if let Some(s) = self.content.get(key).and_then(Value::as_f64) { - *setting = Some(s as f32) - } + fn read_usize(&self, setting: &str) -> Option { + self.read_value(setting) + .and_then(|v| v.as_u64()) + .and_then(|v| v.try_into().ok()) } - pub fn from_f32_setting>(&self, key: &str, setting: &mut Option) { - if let Some(s) = self.content.get(key).and_then(Value::as_f64) { - *setting = Some(T::from(s as f32)) - } + fn read_u32(&self, setting: &str) -> Option { + self.read_value(setting) + .and_then(|v| v.as_u64()) + .and_then(|v| v.try_into().ok()) } - pub fn enum_setting( - &self, - key: &str, - setting: &mut Option, - f: impl FnOnce(&str) -> Option, - ) { - if let Some(s) = self.content.get(key).and_then(Value::as_str).and_then(f) { - *setting = Some(s) - } - } - - pub fn read_enum(&self, key: &str, f: impl FnOnce(&str) -> Option) -> Option { + fn read_enum(&self, key: &str, f: impl FnOnce(&str) -> Option) -> Option { self.content.get(key).and_then(Value::as_str).and_then(f) } - pub fn font_family_setting( - &self, - key: &str, - font_family: &mut Option, - font_fallbacks: &mut Option>, - ) { + fn read_fonts(&self, key: &str) -> (Option, Option>) { let Some(css_name) = self.content.get(key).and_then(Value::as_str) else { - return; + return (None, None); }; let mut name_buffer = String::new(); @@ -188,12 +163,725 @@ impl VsCodeSettings { } add_font(&mut name_buffer); + if fonts.is_empty() { + return (None, None); + } + (Some(fonts.remove(0)), skip_default(fonts)) + } + + pub fn settings_content(&self) -> SettingsContent { + SettingsContent { + agent: self.agent_settings_content(), + agent_servers: None, + audio: None, + auto_update: None, + base_keymap: Some(BaseKeymapContent::VSCode), + calls: None, + collaboration_panel: None, + debugger: None, + diagnostics: None, + disable_ai: None, + editor: self.editor_settings_content(), + extension: ExtensionSettingsContent::default(), + file_finder: None, + git: self.git_settings_content(), + git_panel: self.git_panel_settings_content(), + global_lsp_settings: None, + helix_mode: None, + image_viewer: None, + journal: None, + language_models: None, + line_indicator_format: None, + log: None, + message_editor: None, + node: self.node_binary_settings(), + notification_panel: None, + outline_panel: self.outline_panel_settings_content(), + preview_tabs: self.preview_tabs_settings_content(), + project: self.project_settings_content(), + project_panel: self.project_panel_settings_content(), + proxy: self.read_string("http.proxy"), + remote: RemoteSettingsContent::default(), + repl: None, + server_url: None, + session: None, + status_bar: self.status_bar_settings_content(), + tab_bar: self.tab_bar_settings_content(), + tabs: self.item_settings_content(), + telemetry: self.telemetry_settings_content(), + terminal: self.terminal_settings_content(), + theme: Box::new(self.theme_settings_content()), + title_bar: None, + vim: None, + vim_mode: None, + workspace: self.workspace_settings_content(), + } + } + + fn agent_settings_content(&self) -> Option { + let enabled = self.read_bool("chat.agent.enabled"); + skip_default(AgentSettingsContent { + enabled: enabled, + button: enabled, + ..Default::default() + }) + } + + fn editor_settings_content(&self) -> EditorSettingsContent { + EditorSettingsContent { + auto_signature_help: self.read_bool("editor.parameterHints.enabled"), + autoscroll_on_clicks: None, + cursor_blink: self.read_enum("editor.cursorBlinking", |s| match s { + "blink" | "phase" | "expand" | "smooth" => Some(true), + "solid" => Some(false), + _ => None, + }), + cursor_shape: self.read_enum("editor.cursorStyle", |s| match s { + "block" => Some(CursorShape::Block), + "block-outline" => Some(CursorShape::Hollow), + "line" | "line-thin" => Some(CursorShape::Bar), + "underline" | "underline-thin" => Some(CursorShape::Underline), + _ => None, + }), + current_line_highlight: self.read_enum("editor.renderLineHighlight", |s| match s { + "gutter" => Some(CurrentLineHighlight::Gutter), + "line" => Some(CurrentLineHighlight::Line), + "all" => Some(CurrentLineHighlight::All), + _ => None, + }), + diagnostics_max_severity: None, + double_click_in_multibuffer: None, + drag_and_drop_selection: None, + excerpt_context_lines: None, + expand_excerpt_lines: None, + fast_scroll_sensitivity: self.read_f32("editor.fastScrollSensitivity"), + go_to_definition_fallback: None, + gutter: self.gutter_content(), + hide_mouse: None, + horizontal_scroll_margin: None, + hover_popover_delay: self.read_u64("editor.hover.delay").map(Into::into), + hover_popover_enabled: self.read_bool("editor.hover.enabled"), + inline_code_actions: None, + jupyter: None, + lsp_document_colors: None, + lsp_highlight_debounce: None, + middle_click_paste: None, + minimap: self.minimap_content(), + minimum_contrast_for_highlights: None, + multi_cursor_modifier: self.read_enum("editor.multiCursorModifier", |s| match s { + "ctrlCmd" => Some(MultiCursorModifier::CmdOrCtrl), + "alt" => Some(MultiCursorModifier::Alt), + _ => None, + }), + redact_private_values: None, + relative_line_numbers: self.read_enum("editor.lineNumbers", |s| match s { + "relative" => Some(true), + _ => None, + }), + rounded_selection: self.read_bool("editor.roundedSelection"), + scroll_beyond_last_line: None, + scroll_sensitivity: self.read_f32("editor.mouseWheelScrollSensitivity"), + scrollbar: self.scrollbar_content(), + search: self.search_content(), + search_wrap: None, + seed_search_query_from_cursor: self.read_enum( + "editor.find.seedSearchStringFromSelection", + |s| match s { + "always" => Some(SeedQuerySetting::Always), + "selection" => Some(SeedQuerySetting::Selection), + "never" => Some(SeedQuerySetting::Never), + _ => None, + }, + ), + selection_highlight: self.read_bool("editor.selectionHighlight"), + show_signature_help_after_edits: self.read_bool("editor.parameterHints.enabled"), + snippet_sort_order: None, + toolbar: None, + use_smartcase_search: self.read_bool("search.smartCase"), + vertical_scroll_margin: self.read_f32("editor.cursorSurroundingLines"), + } + } + + fn gutter_content(&self) -> Option { + skip_default(GutterContent { + line_numbers: self.read_enum("editor.lineNumbers", |s| match s { + "on" | "relative" => Some(true), + "off" => Some(false), + _ => None, + }), + min_line_number_digits: None, + runnables: None, + breakpoints: None, + folds: self.read_enum("editor.showFoldingControls", |s| match s { + "always" | "mouseover" => Some(true), + "never" => Some(false), + _ => None, + }), + }) + } + + fn scrollbar_content(&self) -> Option { + let scrollbar_axes = skip_default(ScrollbarAxesContent { + horizontal: self.read_enum("editor.scrollbar.horizontal", |s| match s { + "auto" | "visible" => Some(true), + "hidden" => Some(false), + _ => None, + }), + vertical: self.read_enum("editor.scrollbar.vertical", |s| match s { + "auto" | "visible" => Some(true), + "hidden" => Some(false), + _ => None, + }), + })?; + + Some(ScrollbarContent { + axes: Some(scrollbar_axes), + ..Default::default() + }) + } + + fn search_content(&self) -> Option { + skip_default(SearchSettingsContent { + include_ignored: self.read_bool("search.useIgnoreFiles"), + ..Default::default() + }) + } + + fn minimap_content(&self) -> Option { + let minimap_enabled = self.read_bool("editor.minimap.enabled"); + let autohide = self.read_bool("editor.minimap.autohide"); + let show = match (minimap_enabled, autohide) { + (Some(true), Some(false)) => Some(ShowMinimap::Always), + (Some(true), _) => Some(ShowMinimap::Auto), + (Some(false), _) => Some(ShowMinimap::Never), + _ => None, + }; + + skip_default(MinimapContent { + show, + thumb: self.read_enum("editor.minimap.showSlider", |s| match s { + "always" => Some(MinimapThumb::Always), + "mouseover" => Some(MinimapThumb::Hover), + _ => None, + }), + max_width_columns: self + .read_u32("editor.minimap.maxColumn") + .and_then(|v| NonZeroU32::new(v)), + ..Default::default() + }) + } - let mut iter = fonts.into_iter(); - *font_family = iter.next(); - let fallbacks: Vec<_> = iter.collect(); - if !fallbacks.is_empty() { - *font_fallbacks = Some(fallbacks); + fn git_panel_settings_content(&self) -> Option { + skip_default(GitPanelSettingsContent { + button: self.read_bool("git.enabled"), + fallback_branch_name: self.read_string("git.defaultBranchName"), + ..Default::default() + }) + } + + fn project_settings_content(&self) -> ProjectSettingsContent { + ProjectSettingsContent { + all_languages: AllLanguageSettingsContent { + features: None, + edit_predictions: self.edit_predictions_settings_content(), + defaults: self.default_language_settings_content(), + languages: Default::default(), + file_types: self.file_types(), + }, + worktree: self.worktree_settings_content(), + lsp: Default::default(), + terminal: None, + dap: Default::default(), + context_servers: self.context_servers(), + load_direnv: None, + slash_commands: None, + git_hosting_providers: None, + } + } + + fn default_language_settings_content(&self) -> LanguageSettingsContent { + LanguageSettingsContent { + allow_rewrap: None, + always_treat_brackets_as_autoclosed: None, + auto_indent: None, + auto_indent_on_paste: self.read_bool("editor.formatOnPaste"), + code_actions_on_format: None, + completions: skip_default(CompletionSettingsContent { + words: self.read_bool("editor.suggest.showWords").map(|b| { + if b { + WordsCompletionMode::Enabled + } else { + WordsCompletionMode::Disabled + } + }), + ..Default::default() + }), + debuggers: None, + edit_predictions_disabled_in: None, + enable_language_server: None, + ensure_final_newline_on_save: self.read_bool("files.insertFinalNewline"), + extend_comment_on_newline: None, + format_on_save: self.read_bool("editor.guides.formatOnSave").map(|b| { + if b { + FormatOnSave::On + } else { + FormatOnSave::Off + } + }), + formatter: None, + hard_tabs: self.read_bool("editor.insertSpaces").map(|v| !v), + indent_guides: skip_default(IndentGuideSettingsContent { + enabled: self.read_bool("editor.guides.indentation"), + ..Default::default() + }), + inlay_hints: None, + jsx_tag_auto_close: None, + language_servers: None, + linked_edits: self.read_bool("editor.linkedEditing"), + preferred_line_length: self.read_u32("editor.wordWrapColumn"), + prettier: None, + remove_trailing_whitespace_on_save: self.read_bool("editor.trimAutoWhitespace"), + show_completion_documentation: None, + show_completions_on_input: self.read_bool("editor.suggestOnTriggerCharacters"), + show_edit_predictions: self.read_bool("editor.inlineSuggest.enabled"), + show_whitespaces: self.read_enum("editor.renderWhitespace", |s| { + Some(match s { + "boundary" => ShowWhitespaceSetting::Boundary, + "trailing" => ShowWhitespaceSetting::Trailing, + "selection" => ShowWhitespaceSetting::Selection, + "all" => ShowWhitespaceSetting::All, + _ => ShowWhitespaceSetting::None, + }) + }), + show_wrap_guides: None, + soft_wrap: self.read_enum("editor.wordWrap", |s| match s { + "on" => Some(SoftWrap::EditorWidth), + "wordWrapColumn" => Some(SoftWrap::PreferLine), + "bounded" => Some(SoftWrap::Bounded), + "off" => Some(SoftWrap::None), + _ => None, + }), + tab_size: self + .read_u32("editor.tabSize") + .and_then(|n| NonZeroU32::new(n)), + tasks: None, + use_auto_surround: self.read_enum("editor.autoSurround", |s| match s { + "languageDefined" | "quotes" | "brackets" => Some(true), + "never" => Some(false), + _ => None, + }), + use_autoclose: None, + use_on_type_format: self.read_bool("editor.formatOnType"), + whitespace_map: None, + wrap_guides: self + .read_value("editor.rulers") + .and_then(|v| v.as_array()) + .map(|v| { + v.iter() + .flat_map(|n| n.as_u64().map(|n| n as usize)) + .collect() + }), } } + + fn file_types(&self) -> Option, ExtendingVec>> { + // vscodes file association map is inverted from ours, so we flip the mapping before merging + let mut associations: HashMap, ExtendingVec> = HashMap::default(); + let map = self.read_value("files.associations")?.as_object()?; + for (k, v) in map { + let Some(v) = v.as_str() else { continue }; + associations.entry(v.into()).or_default().0.push(k.clone()); + } + skip_default(associations) + } + + fn edit_predictions_settings_content(&self) -> Option { + let disabled_globs = self + .read_value("cursor.general.globalCursorIgnoreList")? + .as_array()?; + + skip_default(EditPredictionSettingsContent { + disabled_globs: skip_default( + disabled_globs + .iter() + .filter_map(|glob| glob.as_str()) + .map(|s| s.to_string()) + .collect(), + ), + ..Default::default() + }) + } + + fn outline_panel_settings_content(&self) -> Option { + skip_default(OutlinePanelSettingsContent { + file_icons: self.read_bool("outline.icons"), + folder_icons: self.read_bool("outline.icons"), + git_status: self.read_bool("git.decorations.enabled"), + ..Default::default() + }) + } + + fn node_binary_settings(&self) -> Option { + // this just sets the binary name instead of a full path so it relies on path lookup + // resolving to the one you want + skip_default(NodeBinarySettings { + npm_path: self.read_enum("npm.packageManager", |s| match s { + v @ ("npm" | "yarn" | "bun" | "pnpm") => Some(v.to_owned()), + _ => None, + }), + ..Default::default() + }) + } + + fn git_settings_content(&self) -> Option { + let inline_blame = self.read_bool("git.blame.editorDecoration.enabled")?; + skip_default(GitSettings { + inline_blame: Some(InlineBlameSettings { + enabled: Some(inline_blame), + ..Default::default() + }), + ..Default::default() + }) + } + + fn context_servers(&self) -> HashMap, ContextServerSettingsContent> { + #[derive(Deserialize)] + struct VsCodeContextServerCommand { + command: PathBuf, + args: Option>, + env: Option>, + // note: we don't support envFile and type + } + let Some(mcp) = self.read_value("mcp").and_then(|v| v.as_object()) else { + return Default::default(); + }; + mcp.iter() + .filter_map(|(k, v)| { + Some(( + k.clone().into(), + ContextServerSettingsContent::Custom { + enabled: true, + command: serde_json::from_value::(v.clone()) + .ok() + .map(|cmd| ContextServerCommand { + path: cmd.command, + args: cmd.args.unwrap_or_default(), + env: cmd.env, + timeout: None, + })?, + }, + )) + }) + .collect() + } + + fn item_settings_content(&self) -> Option { + skip_default(ItemSettingsContent { + git_status: self.read_bool("git.decorations.enabled"), + close_position: self.read_enum("workbench.editor.tabActionLocation", |s| match s { + "right" => Some(ClosePosition::Right), + "left" => Some(ClosePosition::Left), + _ => None, + }), + file_icons: self.read_bool("workbench.editor.showIcons"), + activate_on_close: self + .read_bool("workbench.editor.focusRecentEditorAfterClose") + .map(|b| { + if b { + ActivateOnClose::History + } else { + ActivateOnClose::LeftNeighbour + } + }), + show_diagnostics: None, + show_close_button: self + .read_bool("workbench.editor.tabActionCloseVisibility") + .map(|b| { + if b { + ShowCloseButton::Always + } else { + ShowCloseButton::Hidden + } + }), + }) + } + + fn preview_tabs_settings_content(&self) -> Option { + skip_default(PreviewTabsSettingsContent { + enabled: self.read_bool("workbench.editor.enablePreview"), + enable_preview_from_file_finder: self + .read_bool("workbench.editor.enablePreviewFromQuickOpen"), + enable_preview_from_code_navigation: self + .read_bool("workbench.editor.enablePreviewFromCodeNavigation"), + }) + } + + fn tab_bar_settings_content(&self) -> Option { + skip_default(TabBarSettingsContent { + show: self.read_enum("workbench.editor.showTabs", |s| match s { + "multiple" => Some(true), + "single" | "none" => Some(false), + _ => None, + }), + show_nav_history_buttons: None, + show_tab_bar_buttons: self + .read_str("workbench.editor.editorActionsLocation") + .and_then(|str| if str == "hidden" { Some(false) } else { None }), + }) + } + + fn status_bar_settings_content(&self) -> Option { + skip_default(StatusBarSettingsContent { + show: self.read_bool("workbench.statusBar.visible"), + active_language_button: None, + cursor_position_button: None, + line_endings_button: None, + }) + } + + fn project_panel_settings_content(&self) -> Option { + let mut project_panel_settings = ProjectPanelSettingsContent { + auto_fold_dirs: self.read_bool("explorer.compactFolders"), + auto_reveal_entries: self.read_bool("explorer.autoReveal"), + button: None, + default_width: None, + dock: None, + drag_and_drop: None, + entry_spacing: None, + file_icons: None, + folder_icons: None, + git_status: self.read_bool("git.decorations.enabled"), + hide_gitignore: self.read_bool("explorer.excludeGitIgnore"), + hide_hidden: None, + hide_root: None, + indent_guides: None, + indent_size: None, + open_file_on_paste: None, + scrollbar: None, + show_diagnostics: self + .read_bool("problems.decorations.enabled") + .and_then(|b| if b { Some(ShowDiagnostics::Off) } else { None }), + starts_open: None, + sticky_scroll: None, + }; + + if let (Some(false), Some(false)) = ( + self.read_bool("explorer.decorations.badges"), + self.read_bool("explorer.decorations.colors"), + ) { + project_panel_settings.git_status = Some(false); + project_panel_settings.show_diagnostics = Some(ShowDiagnostics::Off); + } + + skip_default(project_panel_settings) + } + + fn telemetry_settings_content(&self) -> Option { + self.read_enum("telemetry.telemetryLevel", |level| { + let (metrics, diagnostics) = match level { + "all" => (true, true), + "error" | "crash" => (false, true), + "off" => (false, false), + _ => return None, + }; + Some(TelemetrySettingsContent { + metrics: Some(metrics), + diagnostics: Some(diagnostics), + }) + }) + } + + fn terminal_settings_content(&self) -> Option { + let (font_family, font_fallbacks) = self.read_fonts("terminal.integrated.fontFamily"); + skip_default(TerminalSettingsContent { + alternate_scroll: None, + blinking: self + .read_bool("terminal.integrated.cursorBlinking") + .map(|b| { + if b { + TerminalBlink::On + } else { + TerminalBlink::Off + } + }), + button: None, + copy_on_select: self.read_bool("terminal.integrated.copyOnSelection"), + cursor_shape: self.read_enum("terminal.integrated.cursorStyle", |s| match s { + "block" => Some(CursorShapeContent::Block), + "line" => Some(CursorShapeContent::Bar), + "underline" => Some(CursorShapeContent::Underline), + _ => None, + }), + default_height: None, + default_width: None, + dock: None, + font_fallbacks, + font_family, + font_features: None, + font_size: self.read_f32("terminal.integrated.fontSize"), + font_weight: None, + keep_selection_on_copy: None, + line_height: self + .read_f32("terminal.integrated.lineHeight") + .map(|lh| TerminalLineHeight::Custom(lh)), + max_scroll_history_lines: self.read_usize("terminal.integrated.scrollback"), + minimum_contrast: None, + option_as_meta: self.read_bool("terminal.integrated.macOptionIsMeta"), + project: self.project_terminal_settings_content(), + scrollbar: None, + toolbar: None, + }) + } + + fn project_terminal_settings_content(&self) -> ProjectTerminalSettingsContent { + #[cfg(target_os = "windows")] + let platform = "windows"; + #[cfg(target_os = "linux")] + let platform = "linux"; + #[cfg(target_os = "macos")] + let platform = "osx"; + #[cfg(target_os = "freebsd")] + let platform = "freebsd"; + let env = self + .read_value(&format!("terminal.integrated.env.{platform}")) + .and_then(|v| v.as_object()) + .map(|v| v.iter().map(|(k, v)| (k.clone(), v.to_string())).collect()); + + ProjectTerminalSettingsContent { + // TODO: handle arguments + shell: self + .read_string(&format!("terminal.integrated.{platform}Exec")) + .map(|s| Shell::Program(s)), + working_directory: None, + env, + detect_venv: None, + } + } + + fn theme_settings_content(&self) -> ThemeSettingsContent { + let (buffer_font_family, buffer_font_fallbacks) = self.read_fonts("editor.fontFamily"); + ThemeSettingsContent { + ui_font_size: None, + ui_font_family: None, + ui_font_fallbacks: None, + ui_font_features: None, + ui_font_weight: None, + buffer_font_family, + buffer_font_fallbacks, + buffer_font_size: self.read_f32("editor.fontSize"), + buffer_font_weight: self.read_f32("editor.fontWeight").map(|w| w.into()), + buffer_line_height: None, + buffer_font_features: None, + agent_ui_font_size: None, + agent_buffer_font_size: None, + theme: None, + icon_theme: None, + ui_density: None, + unnecessary_code_fade: None, + experimental_theme_overrides: None, + theme_overrides: Default::default(), + } + } + + fn workspace_settings_content(&self) -> WorkspaceSettingsContent { + WorkspaceSettingsContent { + active_pane_modifiers: self.active_pane_modifiers(), + autosave: self.read_enum("files.autoSave", |s| match s { + "off" => Some(AutosaveSetting::Off), + "afterDelay" => Some(AutosaveSetting::AfterDelay { + milliseconds: self + .read_value("files.autoSaveDelay") + .and_then(|v| v.as_u64()) + .unwrap_or(1000) + .into(), + }), + "onFocusChange" => Some(AutosaveSetting::OnFocusChange), + "onWindowChange" => Some(AutosaveSetting::OnWindowChange), + _ => None, + }), + bottom_dock_layout: None, + centered_layout: None, + close_on_file_delete: None, + command_aliases: Default::default(), + confirm_quit: self.read_enum("window.confirmBeforeClose", |s| match s { + "always" | "keyboardOnly" => Some(true), + "never" => Some(false), + _ => None, + }), + drop_target_size: None, + // workbench.editor.limit contains "enabled", "value", and "perEditorGroup" + // our semantics match if those are set to true, some N, and true respectively. + // we'll ignore "perEditorGroup" for now since we only support a global max + max_tabs: if self.read_bool("workbench.editor.limit.enabled") == Some(true) { + self.read_usize("workbench.editor.limit.value") + .and_then(|n| NonZeroUsize::new(n)) + } else { + None + }, + on_last_window_closed: None, + pane_split_direction_horizontal: None, + pane_split_direction_vertical: None, + resize_all_panels_in_dock: None, + restore_on_file_reopen: self.read_bool("workbench.editor.restoreViewState"), + restore_on_startup: None, + show_call_status_icon: None, + use_system_path_prompts: self.read_bool("files.simpleDialog.enable"), + use_system_prompts: None, + use_system_window_tabs: self.read_bool("window.nativeTabs"), + when_closing_with_no_tabs: self.read_bool("window.closeWhenEmpty").map(|b| { + if b { + CloseWindowWhenNoItems::CloseWindow + } else { + CloseWindowWhenNoItems::KeepWindowOpen + } + }), + zoomed_padding: None, + } + } + + fn active_pane_modifiers(&self) -> Option { + if self.read_bool("accessibility.dimUnfocused.enabled") == Some(true) + && let Some(opacity) = self.read_f32("accessibility.dimUnfocused.opacity") + { + Some(ActivePaneModifiers { + border_size: None, + inactive_opacity: Some(InactiveOpacity(opacity)), + }) + } else { + None + } + } + + fn worktree_settings_content(&self) -> WorktreeSettingsContent { + WorktreeSettingsContent { + project_name: crate::Maybe::Unset, + file_scan_exclusions: self + .read_value("files.watcherExclude") + .and_then(|v| v.as_array()) + .map(|v| { + v.iter() + .filter_map(|n| n.as_str().map(str::to_owned)) + .collect::>() + }) + .filter(|r| !r.is_empty()), + file_scan_inclusions: self + .read_value("files.watcherInclude") + .and_then(|v| v.as_array()) + .map(|v| { + v.iter() + .filter_map(|n| n.as_str().map(str::to_owned)) + .collect::>() + }) + .filter(|r| !r.is_empty()), + private_files: None, + } + } +} + +fn skip_default(value: T) -> Option { + if value == T::default() { + None + } else { + Some(value) + } } diff --git a/crates/settings_macros/Cargo.toml b/crates/settings_macros/Cargo.toml index 06ce1d01e5fa9d25b5c0d3c742a2d325ec996e39..175c2f26a3a37e5f93db25ad69aa10912c3c6adb 100644 --- a/crates/settings_macros/Cargo.toml +++ b/crates/settings_macros/Cargo.toml @@ -18,7 +18,6 @@ default = [] [dependencies] quote.workspace = true syn.workspace = true -workspace-hack.workspace = true [dev-dependencies] settings.workspace = true diff --git a/crates/settings_profile_selector/Cargo.toml b/crates/settings_profile_selector/Cargo.toml index 189272e54be02ac46840838f6874be64d1e06321..23ccac2e43dec6c1ab335eeb2ffb4d9159d85859 100644 --- a/crates/settings_profile_selector/Cargo.toml +++ b/crates/settings_profile_selector/Cargo.toml @@ -18,7 +18,6 @@ gpui.workspace = true picker.workspace = true settings.workspace = true ui.workspace = true -workspace-hack.workspace = true workspace.workspace = true zed_actions.workspace = true diff --git a/crates/settings_ui/Cargo.toml b/crates/settings_ui/Cargo.toml index ab5e1b839510a990e17e7abea63e6412e4a10e4b..bbb1cb397b5a806bdbc5ff29b4954f1996ca32a5 100644 --- a/crates/settings_ui/Cargo.toml +++ b/crates/settings_ui/Cargo.toml @@ -26,7 +26,9 @@ fuzzy.workspace = true gpui.workspace = true menu.workspace = true paths.workspace = true +picker.workspace = true project.workspace = true +release_channel.workspace = true schemars.workspace = true search.workspace = true serde.workspace = true @@ -36,7 +38,6 @@ theme.workspace = true ui_input.workspace = true ui.workspace = true util.workspace = true -workspace-hack.workspace = true workspace.workspace = true zed_actions.workspace = true log.workspace = true diff --git a/crates/settings_ui/src/components.rs b/crates/settings_ui/src/components.rs index a29aae3bb1f2ef086e6d3289b03fbe29000d0f45..5026ca806128baf27e0c95e0c45b47eba24c8e41 100644 --- a/crates/settings_ui/src/components.rs +++ b/crates/settings_ui/src/components.rs @@ -1,95 +1,9 @@ -use editor::Editor; -use gpui::{Focusable, div}; -use ui::{ - ActiveTheme as _, App, FluentBuilder as _, InteractiveElement as _, IntoElement, - ParentElement as _, RenderOnce, Styled as _, Window, -}; - -#[derive(IntoElement)] -pub struct SettingsEditor { - initial_text: Option, - placeholder: Option<&'static str>, - confirm: Option, &mut App)>>, - tab_index: Option, -} - -impl SettingsEditor { - pub fn new() -> Self { - Self { - initial_text: None, - placeholder: None, - confirm: None, - tab_index: None, - } - } - - pub fn with_initial_text(mut self, initial_text: String) -> Self { - self.initial_text = Some(initial_text); - self - } - - pub fn with_placeholder(mut self, placeholder: &'static str) -> Self { - self.placeholder = Some(placeholder); - self - } - - pub fn on_confirm(mut self, confirm: impl Fn(Option, &mut App) + 'static) -> Self { - self.confirm = Some(Box::new(confirm)); - self - } - - pub(crate) fn tab_index(mut self, arg: isize) -> Self { - self.tab_index = Some(arg); - self - } -} - -impl RenderOnce for SettingsEditor { - fn render(self, window: &mut Window, cx: &mut App) -> impl ui::IntoElement { - let editor = window.use_state(cx, { - move |window, cx| { - let mut editor = Editor::single_line(window, cx); - if let Some(text) = self.initial_text { - editor.set_text(text, window, cx); - } - - if let Some(placeholder) = self.placeholder { - editor.set_placeholder_text(placeholder, window, cx); - } - // todo(settings_ui): We should have an observe global use for settings store - // so whenever a settings file is updated, the settings ui updates too - editor - } - }); - - if let Some(tab_index) = self.tab_index { - editor.focus_handle(cx).tab_index(tab_index); - } - - let weak_editor = editor.downgrade(); - - let theme_colors = cx.theme().colors(); - - div() - .py_1() - .px_2() - .min_w_64() - .rounded_md() - .border_1() - .border_color(theme_colors.border) - .bg(theme_colors.editor_background) - .child(editor) - .when_some(self.confirm, |this, confirm| { - this.on_action::({ - move |_, _, cx| { - let Some(editor) = weak_editor.upgrade() else { - return; - }; - let new_value = editor.read_with(cx, |editor, cx| editor.text(cx)); - let new_value = (!new_value.is_empty()).then_some(new_value); - confirm(new_value, cx); - } - }) - }) - } -} +mod font_picker; +mod icon_theme_picker; +mod input_field; +mod theme_picker; + +pub use font_picker::font_picker; +pub use icon_theme_picker::icon_theme_picker; +pub use input_field::*; +pub use theme_picker::theme_picker; diff --git a/crates/ui_input/src/font_picker.rs b/crates/settings_ui/src/components/font_picker.rs similarity index 100% rename from crates/ui_input/src/font_picker.rs rename to crates/settings_ui/src/components/font_picker.rs diff --git a/crates/settings_ui/src/components/icon_theme_picker.rs b/crates/settings_ui/src/components/icon_theme_picker.rs new file mode 100644 index 0000000000000000000000000000000000000000..33a648f81bcacdc961d77d7a5532c9807072dbd2 --- /dev/null +++ b/crates/settings_ui/src/components/icon_theme_picker.rs @@ -0,0 +1,189 @@ +use std::sync::Arc; + +use fuzzy::{StringMatch, StringMatchCandidate}; +use gpui::{AnyElement, App, Context, DismissEvent, SharedString, Task, Window}; +use picker::{Picker, PickerDelegate}; +use theme::ThemeRegistry; +use ui::{ListItem, ListItemSpacing, prelude::*}; + +type IconThemePicker = Picker; + +pub struct IconThemePickerDelegate { + icon_themes: Vec, + filtered_themes: Vec, + selected_index: usize, + current_theme: SharedString, + on_theme_changed: Arc, +} + +impl IconThemePickerDelegate { + fn new( + current_theme: SharedString, + on_theme_changed: impl Fn(SharedString, &mut App) + 'static, + cx: &mut Context, + ) -> Self { + let theme_registry = ThemeRegistry::global(cx); + + let icon_themes: Vec = theme_registry + .list_icon_themes() + .into_iter() + .map(|theme_meta| theme_meta.name) + .collect(); + + let selected_index = icon_themes + .iter() + .position(|icon_themes| *icon_themes == current_theme) + .unwrap_or(0); + + let filtered_themes = icon_themes + .iter() + .enumerate() + .map(|(index, icon_themes)| StringMatch { + candidate_id: index, + string: icon_themes.to_string(), + positions: Vec::new(), + score: 0.0, + }) + .collect(); + + Self { + icon_themes, + filtered_themes, + selected_index, + current_theme, + on_theme_changed: Arc::new(on_theme_changed), + } + } +} + +impl PickerDelegate for IconThemePickerDelegate { + type ListItem = AnyElement; + + fn match_count(&self) -> usize { + self.filtered_themes.len() + } + + fn selected_index(&self) -> usize { + self.selected_index + } + + fn set_selected_index(&mut self, ix: usize, _: &mut Window, cx: &mut Context) { + self.selected_index = ix.min(self.filtered_themes.len().saturating_sub(1)); + cx.notify(); + } + + fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { + "Search icon theme…".into() + } + + fn update_matches( + &mut self, + query: String, + _window: &mut Window, + cx: &mut Context, + ) -> Task<()> { + let icon_themes = self.icon_themes.clone(); + let current_theme = self.current_theme.clone(); + + let matches: Vec = if query.is_empty() { + icon_themes + .iter() + .enumerate() + .map(|(index, icon_theme)| StringMatch { + candidate_id: index, + string: icon_theme.to_string(), + positions: Vec::new(), + score: 0.0, + }) + .collect() + } else { + let _candidates: Vec = icon_themes + .iter() + .enumerate() + .map(|(id, icon_theme)| StringMatchCandidate::new(id, icon_theme.as_ref())) + .collect(); + + icon_themes + .iter() + .enumerate() + .filter(|(_, icon_theme)| icon_theme.to_lowercase().contains(&query.to_lowercase())) + .map(|(index, icon_theme)| StringMatch { + candidate_id: index, + string: icon_theme.to_string(), + positions: Vec::new(), + score: 0.0, + }) + .collect() + }; + + let selected_index = if query.is_empty() { + icon_themes + .iter() + .position(|icon_theme| *icon_theme == current_theme) + .unwrap_or(0) + } else { + matches + .iter() + .position(|m| icon_themes[m.candidate_id] == current_theme) + .unwrap_or(0) + }; + + self.filtered_themes = matches; + self.selected_index = selected_index; + cx.notify(); + + Task::ready(()) + } + + fn confirm( + &mut self, + _secondary: bool, + _window: &mut Window, + cx: &mut Context, + ) { + if let Some(theme_match) = self.filtered_themes.get(self.selected_index) { + let theme = theme_match.string.clone(); + (self.on_theme_changed)(theme.into(), cx); + } + } + + fn dismissed(&mut self, window: &mut Window, cx: &mut Context) { + cx.defer_in(window, |picker, window, cx| { + picker.set_query("", window, cx); + }); + cx.emit(DismissEvent); + } + + fn render_match( + &self, + ix: usize, + selected: bool, + _window: &mut Window, + _cx: &mut Context, + ) -> Option { + let theme_match = self.filtered_themes.get(ix)?; + + Some( + ListItem::new(ix) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .toggle_state(selected) + .child(Label::new(theme_match.string.clone())) + .into_any_element(), + ) + } +} + +pub fn icon_theme_picker( + current_theme: SharedString, + on_theme_changed: impl Fn(SharedString, &mut App) + 'static, + window: &mut Window, + cx: &mut Context, +) -> IconThemePicker { + let delegate = IconThemePickerDelegate::new(current_theme, on_theme_changed, cx); + + Picker::uniform_list(delegate, window, cx) + .show_scrollbar(true) + .width(rems_from_px(210.)) + .max_height(Some(rems(18.).into())) +} diff --git a/crates/settings_ui/src/components/input_field.rs b/crates/settings_ui/src/components/input_field.rs new file mode 100644 index 0000000000000000000000000000000000000000..57917c321127baf2e96e3862106461331afaf86f --- /dev/null +++ b/crates/settings_ui/src/components/input_field.rs @@ -0,0 +1,96 @@ +use editor::Editor; +use gpui::{Focusable, div}; +use ui::{ + ActiveTheme as _, App, FluentBuilder as _, InteractiveElement as _, IntoElement, + ParentElement as _, RenderOnce, Styled as _, Window, +}; + +#[derive(IntoElement)] +pub struct SettingsInputField { + initial_text: Option, + placeholder: Option<&'static str>, + confirm: Option, &mut App)>>, + tab_index: Option, +} + +impl SettingsInputField { + pub fn new() -> Self { + Self { + initial_text: None, + placeholder: None, + confirm: None, + tab_index: None, + } + } + + pub fn with_initial_text(mut self, initial_text: String) -> Self { + self.initial_text = Some(initial_text); + self + } + + pub fn with_placeholder(mut self, placeholder: &'static str) -> Self { + self.placeholder = Some(placeholder); + self + } + + pub fn on_confirm(mut self, confirm: impl Fn(Option, &mut App) + 'static) -> Self { + self.confirm = Some(Box::new(confirm)); + self + } + + pub(crate) fn tab_index(mut self, arg: isize) -> Self { + self.tab_index = Some(arg); + self + } +} + +impl RenderOnce for SettingsInputField { + fn render(self, window: &mut Window, cx: &mut App) -> impl ui::IntoElement { + let editor = window.use_state(cx, { + move |window, cx| { + let mut editor = Editor::single_line(window, cx); + if let Some(text) = self.initial_text { + editor.set_text(text, window, cx); + } + + if let Some(placeholder) = self.placeholder { + editor.set_placeholder_text(placeholder, window, cx); + } + // todo(settings_ui): We should have an observe global use for settings store + // so whenever a settings file is updated, the settings ui updates too + editor + } + }); + + let weak_editor = editor.downgrade(); + + let theme_colors = cx.theme().colors(); + + div() + .py_1() + .px_2() + .min_w_64() + .rounded_md() + .border_1() + .border_color(theme_colors.border) + .bg(theme_colors.editor_background) + .when_some(self.tab_index, |this, tab_index| { + let focus_handle = editor.focus_handle(cx).tab_index(tab_index).tab_stop(true); + this.track_focus(&focus_handle) + .focus(|s| s.border_color(theme_colors.border_focused)) + }) + .child(editor) + .when_some(self.confirm, |this, confirm| { + this.on_action::({ + move |_, _, cx| { + let Some(editor) = weak_editor.upgrade() else { + return; + }; + let new_value = editor.read_with(cx, |editor, cx| editor.text(cx)); + let new_value = (!new_value.is_empty()).then_some(new_value); + confirm(new_value, cx); + } + }) + }) + } +} diff --git a/crates/settings_ui/src/components/theme_picker.rs b/crates/settings_ui/src/components/theme_picker.rs new file mode 100644 index 0000000000000000000000000000000000000000..2146ab314f94bb0c0535a462566e6673fc5601bc --- /dev/null +++ b/crates/settings_ui/src/components/theme_picker.rs @@ -0,0 +1,179 @@ +use std::sync::Arc; + +use fuzzy::{StringMatch, StringMatchCandidate}; +use gpui::{AnyElement, App, Context, DismissEvent, SharedString, Task, Window}; +use picker::{Picker, PickerDelegate}; +use theme::ThemeRegistry; +use ui::{ListItem, ListItemSpacing, prelude::*}; + +type ThemePicker = Picker; + +pub struct ThemePickerDelegate { + themes: Vec, + filtered_themes: Vec, + selected_index: usize, + current_theme: SharedString, + on_theme_changed: Arc, +} + +impl ThemePickerDelegate { + fn new( + current_theme: SharedString, + on_theme_changed: impl Fn(SharedString, &mut App) + 'static, + cx: &mut Context, + ) -> Self { + let theme_registry = ThemeRegistry::global(cx); + + let themes = theme_registry.list_names(); + let selected_index = themes + .iter() + .position(|theme| *theme == current_theme) + .unwrap_or(0); + + let filtered_themes = themes + .iter() + .enumerate() + .map(|(index, theme)| StringMatch { + candidate_id: index, + string: theme.to_string(), + positions: Vec::new(), + score: 0.0, + }) + .collect(); + + Self { + themes, + filtered_themes, + selected_index, + current_theme, + on_theme_changed: Arc::new(on_theme_changed), + } + } +} + +impl PickerDelegate for ThemePickerDelegate { + type ListItem = AnyElement; + + fn match_count(&self) -> usize { + self.filtered_themes.len() + } + + fn selected_index(&self) -> usize { + self.selected_index + } + + fn set_selected_index(&mut self, ix: usize, _: &mut Window, cx: &mut Context) { + self.selected_index = ix.min(self.filtered_themes.len().saturating_sub(1)); + cx.notify(); + } + + fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { + "Search theme…".into() + } + + fn update_matches( + &mut self, + query: String, + _window: &mut Window, + cx: &mut Context, + ) -> Task<()> { + let themes = self.themes.clone(); + let current_theme = self.current_theme.clone(); + + let matches: Vec = if query.is_empty() { + themes + .iter() + .enumerate() + .map(|(index, theme)| StringMatch { + candidate_id: index, + string: theme.to_string(), + positions: Vec::new(), + score: 0.0, + }) + .collect() + } else { + let _candidates: Vec = themes + .iter() + .enumerate() + .map(|(id, theme)| StringMatchCandidate::new(id, theme.as_ref())) + .collect(); + + themes + .iter() + .enumerate() + .filter(|(_, theme)| theme.to_lowercase().contains(&query.to_lowercase())) + .map(|(index, theme)| StringMatch { + candidate_id: index, + string: theme.to_string(), + positions: Vec::new(), + score: 0.0, + }) + .collect() + }; + + let selected_index = if query.is_empty() { + themes + .iter() + .position(|theme| *theme == current_theme) + .unwrap_or(0) + } else { + matches + .iter() + .position(|m| themes[m.candidate_id] == current_theme) + .unwrap_or(0) + }; + + self.filtered_themes = matches; + self.selected_index = selected_index; + cx.notify(); + + Task::ready(()) + } + + fn confirm(&mut self, _secondary: bool, _window: &mut Window, cx: &mut Context) { + if let Some(theme_match) = self.filtered_themes.get(self.selected_index) { + let theme = theme_match.string.clone(); + (self.on_theme_changed)(theme.into(), cx); + } + } + + fn dismissed(&mut self, window: &mut Window, cx: &mut Context) { + cx.defer_in(window, |picker, window, cx| { + picker.set_query("", window, cx); + }); + cx.emit(DismissEvent); + } + + fn render_match( + &self, + ix: usize, + selected: bool, + _window: &mut Window, + _cx: &mut Context, + ) -> Option { + let theme_match = self.filtered_themes.get(ix)?; + + Some( + ListItem::new(ix) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .toggle_state(selected) + .child(Label::new(theme_match.string.clone())) + .into_any_element(), + ) + } +} + +pub fn theme_picker( + current_theme: SharedString, + on_theme_changed: impl Fn(SharedString, &mut App) + 'static, + window: &mut Window, + cx: &mut Context, +) -> ThemePicker { + let delegate = ThemePickerDelegate::new(current_theme, on_theme_changed, cx); + + Picker::uniform_list(delegate, window, cx) + .show_scrollbar(true) + .width(rems_from_px(210.)) + .max_height(Some(rems(18.).into())) +} diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index afedde65962ed21f57277d5d9be95ff67b83ad4d..394e6821c85f68e08450ba18fe2e44959e0cf865 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -1,111 +1,175 @@ +use gpui::App; use settings::{LanguageSettingsContent, SettingsContent}; use std::sync::Arc; +use strum::IntoDiscriminant as _; use ui::{IntoElement, SharedString}; use crate::{ - LOCAL, SettingField, SettingItem, SettingsFieldMetadata, SettingsPage, SettingsPageItem, - SubPageLink, USER, sub_page_stack, + DynamicItem, PROJECT, SettingField, SettingItem, SettingsFieldMetadata, SettingsPage, + SettingsPageItem, SubPageLink, USER, all_language_names, sub_page_stack, }; -pub(crate) fn settings_data() -> Vec { +const DEFAULT_STRING: String = String::new(); +/// A default empty string reference. Useful in `pick` functions for cases either in dynamic item fields, or when dealing with `settings::Maybe` +/// to avoid the "NO DEFAULT" case. +const DEFAULT_EMPTY_STRING: Option<&String> = Some(&DEFAULT_STRING); + +const DEFAULT_SHARED_STRING: SharedString = SharedString::new_static(""); +/// A default empty string reference. Useful in `pick` functions for cases either in dynamic item fields, or when dealing with `settings::Maybe` +/// to avoid the "NO DEFAULT" case. +const DEFAULT_EMPTY_SHARED_STRING: Option<&SharedString> = Some(&DEFAULT_SHARED_STRING); + +pub(crate) fn settings_data(cx: &App) -> Vec { vec![ SettingsPage { title: "General", items: vec![ SettingsPageItem::SectionHeader("General Settings"), SettingsPageItem::SettingItem(SettingItem { - title: "Confirm Quit", - description: "Whether to confirm before quitting Zed", + files: PROJECT, + title: "Project Name", + description: "The displayed name of this project. If left empty, the root directory name will be displayed.", + field: Box::new( + SettingField { + json_path: Some("project_name"), + pick: |settings_content| { + settings_content.project.worktree.project_name.as_ref()?.as_ref().or(DEFAULT_EMPTY_STRING) + }, + write: |settings_content, value| { + settings_content.project.worktree.project_name = settings::Maybe::Set(value.filter(|name| !name.is_empty())); + }, + } + ), + metadata: Some(Box::new(SettingsFieldMetadata { placeholder: Some("Project Name"), ..Default::default() })), + }), + SettingsPageItem::SettingItem(SettingItem { + title: "When Closing With No Tabs", + description: "What to do when using the 'close active item' action with no tabs.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.workspace.confirm_quit, - pick_mut: |settings_content| &mut settings_content.workspace.confirm_quit, + json_path: Some("when_closing_with_no_tabs"), + pick: |settings_content| { + settings_content + .workspace + .when_closing_with_no_tabs + .as_ref() + }, + write: |settings_content, value| { + settings_content.workspace.when_closing_with_no_tabs = value; + }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Restore On Startup", - description: "Whether to restore previous session when opening Zed", + title: "On Last Window Closed", + description: "What to do when the last window is closed.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.workspace.restore_on_startup, - pick_mut: |settings_content| { - &mut settings_content.workspace.restore_on_startup + json_path: Some("on_last_window_closed"), + pick: |settings_content| { + settings_content.workspace.on_last_window_closed.as_ref() + }, + write: |settings_content, value| { + settings_content.workspace.on_last_window_closed = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Restore File State", - description: "Whether to restore previous file state when reopening", + title: "Use System Path Prompts", + description: "Use native OS dialogs for 'Open' and 'Save As'.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.workspace.restore_on_file_reopen, - pick_mut: |settings_content| { - &mut settings_content.workspace.restore_on_file_reopen + json_path: Some("use_system_path_prompts"), + pick: |settings_content| { + settings_content.workspace.use_system_path_prompts.as_ref() + }, + write: |settings_content, value| { + settings_content.workspace.use_system_path_prompts = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Close on File Delete", - description: "Whether to automatically close files that have been deleted", + title: "Use System Prompts", + description: "Use native OS dialogs for confirmations.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.workspace.close_on_file_delete, - pick_mut: |settings_content| { - &mut settings_content.workspace.close_on_file_delete + json_path: Some("use_system_prompts"), + pick: |settings_content| { + settings_content.workspace.use_system_prompts.as_ref() + }, + write: |settings_content, value| { + settings_content.workspace.use_system_prompts = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "When Closing With No Tabs", - description: "What to do when using the 'close active item' action with no tabs", + title: "Redact Private Values", + description: "Hide the values of variables in private files.", field: Box::new(SettingField { + json_path: Some("redact_private_values"), pick: |settings_content| { - &settings_content.workspace.when_closing_with_no_tabs + settings_content.editor.redact_private_values.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.workspace.when_closing_with_no_tabs + write: |settings_content, value| { + settings_content.editor.redact_private_values = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "On Last Window Closed", - description: "What to do when the last window is closed", - field: Box::new(SettingField { - pick: |settings_content| &settings_content.workspace.on_last_window_closed, - pick_mut: |settings_content| { - &mut settings_content.workspace.on_last_window_closed - }, - }), + title: "Private Files", + description: "Globs to match against file paths to determine if a file is private.", + field: Box::new( + SettingField { + json_path: Some("worktree.private_files"), + pick: |settings_content| { + settings_content.project.worktree.private_files.as_ref() + }, + write: |settings_content, value| { + settings_content.project.worktree.private_files = value; + }, + } + .unimplemented(), + ), metadata: None, files: USER, }), + SettingsPageItem::SectionHeader("Workspace Restoration"), SettingsPageItem::SettingItem(SettingItem { - title: "Use System Path Prompts", - description: "Whether to use native OS dialogs for 'Open' and 'Save As'", + title: "Restore Unsaved Buffers", + description: "Whether or not to restore unsaved buffers on restart.", field: Box::new(SettingField { + json_path: Some("session.restore_unsaved_buffers"), pick: |settings_content| { - &settings_content.workspace.use_system_path_prompts - }, - pick_mut: |settings_content| { - &mut settings_content.workspace.use_system_path_prompts + settings_content + .session + .as_ref() + .and_then(|session| session.restore_unsaved_buffers.as_ref()) + }, + write: |settings_content, value| { + settings_content + .session + .get_or_insert_default() + .restore_unsaved_buffers = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Use System Prompts", - description: "Whether to use native OS dialogs for confirmations", + title: "Restore On Startup", + description: "What to restore from the previous session when opening Zed.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.workspace.use_system_prompts, - pick_mut: |settings_content| { - &mut settings_content.workspace.use_system_prompts + json_path: Some("restore_on_startup"), + pick: |settings_content| { + settings_content.workspace.restore_on_startup.as_ref() + }, + write: |settings_content, value| { + settings_content.workspace.restore_on_startup = value; }, }), metadata: None, @@ -116,12 +180,15 @@ pub(crate) fn settings_data() -> Vec { // todo(settings_ui): Implement another setting item type that just shows an edit in settings.json files: USER, title: "Preview Channel", - description: "Which settings should be activated only in Preview build of Zed", + description: "Which settings should be activated only in Preview build of Zed.", field: Box::new( SettingField { - pick: |settings_content| &settings_content.workspace.use_system_prompts, - pick_mut: |settings_content| { - &mut settings_content.workspace.use_system_prompts + json_path: Some("use_system_prompts"), + pick: |settings_content| { + settings_content.workspace.use_system_prompts.as_ref() + }, + write: |settings_content, value| { + settings_content.workspace.use_system_prompts = value; }, } .unimplemented(), @@ -131,12 +198,15 @@ pub(crate) fn settings_data() -> Vec { SettingsPageItem::SettingItem(SettingItem { files: USER, title: "Settings Profiles", - description: "Any number of settings profiles that are temporarily applied on top of your existing user settings", + description: "Any number of settings profiles that are temporarily applied on top of your existing user settings.", field: Box::new( SettingField { - pick: |settings_content| &settings_content.workspace.use_system_prompts, - pick_mut: |settings_content| { - &mut settings_content.workspace.use_system_prompts + json_path: Some(""), + pick: |settings_content| { + settings_content.workspace.use_system_prompts.as_ref() + }, + write: |settings_content, value| { + settings_content.workspace.use_system_prompts = value; }, } .unimplemented(), @@ -146,20 +216,20 @@ pub(crate) fn settings_data() -> Vec { SettingsPageItem::SectionHeader("Privacy"), SettingsPageItem::SettingItem(SettingItem { title: "Telemetry Diagnostics", - description: "Send debug information like crash reports", + description: "Send debug information like crash reports.", field: Box::new(SettingField { + json_path: Some("telemetry.diagnostics"), pick: |settings_content| { - if let Some(telemetry) = &settings_content.telemetry { - &telemetry.diagnostics - } else { - &None - } + settings_content + .telemetry + .as_ref() + .and_then(|telemetry| telemetry.diagnostics.as_ref()) }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .telemetry .get_or_insert_default() - .diagnostics + .diagnostics = value; }, }), metadata: None, @@ -167,17 +237,31 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Telemetry Metrics", - description: "Send anonymized usage data like what languages you're using Zed with", + description: "Send anonymized usage data like what languages you're using Zed with.", field: Box::new(SettingField { + json_path: Some("telemetry.metrics"), pick: |settings_content| { - if let Some(telemetry) = &settings_content.telemetry { - &telemetry.metrics - } else { - &None - } + settings_content + .telemetry + .as_ref() + .and_then(|telemetry| telemetry.metrics.as_ref()) }, - pick_mut: |settings_content| { - &mut settings_content.telemetry.get_or_insert_default().metrics + write: |settings_content, value| { + settings_content.telemetry.get_or_insert_default().metrics = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SectionHeader("Auto Update"), + SettingsPageItem::SettingItem(SettingItem { + title: "Auto Update", + description: "Whether or not to automatically check for updates.", + field: Box::new(SettingField { + json_path: Some("auto_update"), + pick: |settings_content| settings_content.auto_update.as_ref(), + write: |settings_content, value| { + settings_content.auto_update = value; }, }), metadata: None, @@ -186,79 +270,470 @@ pub(crate) fn settings_data() -> Vec { ], }, SettingsPage { - title: "Appearance & Behavior", + title: "Appearance", items: vec![ SettingsPageItem::SectionHeader("Theme"), - // todo(settings_ui): Figure out how we want to add these - SettingsPageItem::SettingItem(SettingItem { - files: USER, - title: "Theme Mode", - description: "How to select the theme", - field: Box::new( - SettingField { - pick: |settings_content| &settings_content.theme.theme, - pick_mut: |settings_content| &mut settings_content.theme.theme, + SettingsPageItem::DynamicItem(DynamicItem { + discriminant: SettingItem { + files: USER, + title: "Theme Mode", + description: "Choose a static, fixed theme or dynamically select themes based on appearance and light/dark modes.", + field: Box::new(SettingField { + json_path: Some("theme$"), + pick: |settings_content| { + Some(&dynamic_variants::()[ + settings_content + .theme + .theme + .as_ref()? + .discriminant() as usize]) + }, + write: |settings_content, value| { + let Some(value) = value else { + return; + }; + let settings_value = settings_content.theme.theme.get_or_insert_with(|| { + settings::ThemeSelection::Static(theme::ThemeName(theme::default_theme(theme::SystemAppearance::default().0).into())) + }); + *settings_value = match value { + settings::ThemeSelectionDiscriminants::Static => { + let name = match settings_value { + settings::ThemeSelection::Static(_) => return, + settings::ThemeSelection::Dynamic { mode, light, dark } => { + match mode { + theme::ThemeMode::Light => light.clone(), + theme::ThemeMode::Dark => dark.clone(), + theme::ThemeMode::System => dark.clone(), // no cx, can't determine correct choice + } + }, + }; + settings::ThemeSelection::Static(name) + }, + settings::ThemeSelectionDiscriminants::Dynamic => { + let static_name = match settings_value { + settings::ThemeSelection::Static(theme_name) => theme_name.clone(), + settings::ThemeSelection::Dynamic {..} => return, + }; + + settings::ThemeSelection::Dynamic { + mode: settings::ThemeMode::System, + light: static_name.clone(), + dark: static_name, + } + }, + }; + }, + }), + metadata: None, + }, + pick_discriminant: |settings_content| { + Some(settings_content.theme.theme.as_ref()?.discriminant() as usize) + }, + fields: dynamic_variants::().into_iter().map(|variant| { + match variant { + settings::ThemeSelectionDiscriminants::Static => vec![ + SettingItem { + files: USER, + title: "Theme Name", + description: "The name of your selected theme.", + field: Box::new(SettingField { + json_path: Some("theme"), + pick: |settings_content| { + match settings_content.theme.theme.as_ref() { + Some(settings::ThemeSelection::Static(name)) => Some(name), + _ => None + } + }, + write: |settings_content, value| { + let Some(value) = value else { + return; + }; + match settings_content + .theme + .theme.as_mut() { + Some(settings::ThemeSelection::Static(theme_name)) => *theme_name = value, + _ => return + } + }, + }), + metadata: None, + } + ], + settings::ThemeSelectionDiscriminants::Dynamic => vec![ + SettingItem { + files: USER, + title: "Mode", + description: "Choose whether to use the selected light or dark theme or to follow your OS appearance configuration.", + field: Box::new(SettingField { + json_path: Some("theme.mode"), + pick: |settings_content| { + match settings_content.theme.theme.as_ref() { + Some(settings::ThemeSelection::Dynamic { mode, ..}) => Some(mode), + _ => None + } + }, + write: |settings_content, value| { + let Some(value) = value else { + return; + }; + match settings_content + .theme + .theme.as_mut() { + Some(settings::ThemeSelection::Dynamic{ mode, ..}) => *mode = value, + _ => return + } + }, + }), + metadata: None, + }, + SettingItem { + files: USER, + title: "Light Theme", + description: "The theme to use when mode is set to light, or when mode is set to system and it is in light mode.", + field: Box::new(SettingField { + json_path: Some("theme.light"), + pick: |settings_content| { + match settings_content.theme.theme.as_ref() { + Some(settings::ThemeSelection::Dynamic { light, ..}) => Some(light), + _ => None + } + }, + write: |settings_content, value| { + let Some(value) = value else { + return; + }; + match settings_content + .theme + .theme.as_mut() { + Some(settings::ThemeSelection::Dynamic{ light, ..}) => *light = value, + _ => return + } + }, + }), + metadata: None, + }, + SettingItem { + files: USER, + title: "Dark Theme", + description: "The theme to use when mode is set to dark, or when mode is set to system and it is in dark mode.", + field: Box::new(SettingField { + json_path: Some("theme.dark"), + pick: |settings_content| { + match settings_content.theme.theme.as_ref() { + Some(settings::ThemeSelection::Dynamic { dark, ..}) => Some(dark), + _ => None + } + }, + write: |settings_content, value| { + let Some(value) = value else { + return; + }; + match settings_content + .theme + .theme.as_mut() { + Some(settings::ThemeSelection::Dynamic{ dark, ..}) => *dark = value, + _ => return + } + }, + }), + metadata: None, + } + ], } - .unimplemented(), - ), - metadata: None, + }).collect(), }), - SettingsPageItem::SettingItem(SettingItem { - files: USER, - title: "Icon Theme", - // todo(settings_ui) - // This description is misleading because the icon theme is used in more places than the file explorer) - description: "Choose the icon theme for file explorer", - field: Box::new( - SettingField { - pick: |settings_content| &settings_content.theme.icon_theme, - pick_mut: |settings_content| &mut settings_content.theme.icon_theme, + SettingsPageItem::DynamicItem(DynamicItem { + discriminant: SettingItem { + files: USER, + title: "Icon Theme", + description: "The custom set of icons Zed will associate with files and directories.", + field: Box::new(SettingField { + json_path: Some("icon_theme$"), + pick: |settings_content| { + Some(&dynamic_variants::()[ + settings_content + .theme + .icon_theme + .as_ref()? + .discriminant() as usize]) + }, + write: |settings_content, value| { + let Some(value) = value else { + return; + }; + let settings_value = settings_content.theme.icon_theme.get_or_insert_with(|| { + settings::IconThemeSelection::Static(settings::IconThemeName(theme::default_icon_theme().name.clone().into())) + }); + *settings_value = match value { + settings::IconThemeSelectionDiscriminants::Static => { + let name = match settings_value { + settings::IconThemeSelection::Static(_) => return, + settings::IconThemeSelection::Dynamic { mode, light, dark } => { + match mode { + theme::ThemeMode::Light => light.clone(), + theme::ThemeMode::Dark => dark.clone(), + theme::ThemeMode::System => dark.clone(), // no cx, can't determine correct choice + } + }, + }; + settings::IconThemeSelection::Static(name) + }, + settings::IconThemeSelectionDiscriminants::Dynamic => { + let static_name = match settings_value { + settings::IconThemeSelection::Static(theme_name) => theme_name.clone(), + settings::IconThemeSelection::Dynamic {..} => return, + }; + + settings::IconThemeSelection::Dynamic { + mode: settings::ThemeMode::System, + light: static_name.clone(), + dark: static_name, + } + }, + }; + }, + }), + metadata: None, + }, + pick_discriminant: |settings_content| { + Some(settings_content.theme.icon_theme.as_ref()?.discriminant() as usize) + }, + fields: dynamic_variants::().into_iter().map(|variant| { + match variant { + settings::IconThemeSelectionDiscriminants::Static => vec![ + SettingItem { + files: USER, + title: "Icon Theme Name", + description: "The name of your selected icon theme.", + field: Box::new(SettingField { + json_path: Some("icon_theme$string"), + pick: |settings_content| { + match settings_content.theme.icon_theme.as_ref() { + Some(settings::IconThemeSelection::Static(name)) => Some(name), + _ => None + } + }, + write: |settings_content, value| { + let Some(value) = value else { + return; + }; + match settings_content + .theme + .icon_theme.as_mut() { + Some(settings::IconThemeSelection::Static(theme_name)) => *theme_name = value, + _ => return + } + }, + }), + metadata: None, + } + ], + settings::IconThemeSelectionDiscriminants::Dynamic => vec![ + SettingItem { + files: USER, + title: "Mode", + description: "Choose whether to use the selected light or dark icon theme or to follow your OS appearance configuration.", + field: Box::new(SettingField { + json_path: Some("icon_theme"), + pick: |settings_content| { + match settings_content.theme.icon_theme.as_ref() { + Some(settings::IconThemeSelection::Dynamic { mode, ..}) => Some(mode), + _ => None + } + }, + write: |settings_content, value| { + let Some(value) = value else { + return; + }; + match settings_content + .theme + .icon_theme.as_mut() { + Some(settings::IconThemeSelection::Dynamic{ mode, ..}) => *mode = value, + _ => return + } + }, + }), + metadata: None, + }, + SettingItem { + files: USER, + title: "Light Icon Theme", + description: "The icon theme to use when mode is set to light, or when mode is set to system and it is in light mode.", + field: Box::new(SettingField { + json_path: Some("icon_theme.light"), + pick: |settings_content| { + match settings_content.theme.icon_theme.as_ref() { + Some(settings::IconThemeSelection::Dynamic { light, ..}) => Some(light), + _ => None + } + }, + write: |settings_content, value| { + let Some(value) = value else { + return; + }; + match settings_content + .theme + .icon_theme.as_mut() { + Some(settings::IconThemeSelection::Dynamic{ light, ..}) => *light = value, + _ => return + } + }, + }), + metadata: None, + }, + SettingItem { + files: USER, + title: "Dark Icon Theme", + description: "The icon theme to use when mode is set to dark, or when mode is set to system and it is in dark mode.", + field: Box::new(SettingField { + json_path: Some("icon_theme.dark"), + pick: |settings_content| { + match settings_content.theme.icon_theme.as_ref() { + Some(settings::IconThemeSelection::Dynamic { dark, ..}) => Some(dark), + _ => None + } + }, + write: |settings_content, value| { + let Some(value) = value else { + return; + }; + match settings_content + .theme + .icon_theme.as_mut() { + Some(settings::IconThemeSelection::Dynamic{ dark, ..}) => *dark = value, + _ => return + } + }, + }), + metadata: None, + } + ], } - .unimplemented(), - ), - metadata: None, + }).collect(), }), - SettingsPageItem::SectionHeader("Fonts"), + SettingsPageItem::SectionHeader("Buffer Font"), SettingsPageItem::SettingItem(SettingItem { - title: "Buffer Font Family", - description: "Font family for editor text", + title: "Font Family", + description: "Font family for editor text.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.theme.buffer_font_family, - pick_mut: |settings_content| &mut settings_content.theme.buffer_font_family, + json_path: Some("buffer_font_family"), + pick: |settings_content| settings_content.theme.buffer_font_family.as_ref(), + write: |settings_content, value|{ settings_content.theme.buffer_font_family = value;}, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Buffer Font Size", - description: "Font size for editor text", + title: "Font Size", + description: "Font size for editor text.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.theme.buffer_font_size, - pick_mut: |settings_content| &mut settings_content.theme.buffer_font_size, + json_path: Some("buffer_font_size"), + pick: |settings_content| settings_content.theme.buffer_font_size.as_ref(), + write: |settings_content, value|{ settings_content.theme.buffer_font_size = value;}, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Buffer Font Weight", - description: "Font weight for editor text (100-900)", + title: "Font Weight", + description: "Font weight for editor text (100-900).", field: Box::new(SettingField { - pick: |settings_content| &settings_content.theme.buffer_font_weight, - pick_mut: |settings_content| &mut settings_content.theme.buffer_font_weight, + json_path: Some("buffer_font_weight"), + pick: |settings_content| settings_content.theme.buffer_font_weight.as_ref(), + write: |settings_content, value|{ settings_content.theme.buffer_font_weight = value;}, }), metadata: None, files: USER, }), - // todo(settings_ui): This needs custom ui + SettingsPageItem::DynamicItem(DynamicItem { + discriminant: SettingItem { + files: USER, + title: "Line Height", + description: "Line height for editor text.", + field: Box::new(SettingField { + json_path: Some("buffer_line_height$"), + pick: |settings_content| { + Some(&dynamic_variants::()[ + settings_content + .theme + .buffer_line_height + .as_ref()? + .discriminant() as usize]) + }, + write: |settings_content, value| { + let Some(value) = value else { + return; + }; + let settings_value = settings_content.theme.buffer_line_height.get_or_insert_with(|| { + settings::BufferLineHeight::default() + }); + *settings_value = match value { + settings::BufferLineHeightDiscriminants::Comfortable => { + settings::BufferLineHeight::Comfortable + }, + settings::BufferLineHeightDiscriminants::Standard => { + settings::BufferLineHeight::Standard + }, + settings::BufferLineHeightDiscriminants::Custom => { + let custom_value = theme::BufferLineHeight::from(*settings_value).value(); + settings::BufferLineHeight::Custom(custom_value) + }, + }; + }, + }), + metadata: None, + }, + pick_discriminant: |settings_content| { + Some(settings_content.theme.buffer_line_height.as_ref()?.discriminant() as usize) + }, + fields: dynamic_variants::().into_iter().map(|variant| { + match variant { + settings::BufferLineHeightDiscriminants::Comfortable => vec![], + settings::BufferLineHeightDiscriminants::Standard => vec![], + settings::BufferLineHeightDiscriminants::Custom => vec![ + SettingItem { + files: USER, + title: "Custom Line Height", + description: "Custom line height value (must be at least 1.0).", + field: Box::new(SettingField { + json_path: Some("buffer_line_height"), + pick: |settings_content| { + match settings_content.theme.buffer_line_height.as_ref() { + Some(settings::BufferLineHeight::Custom(value)) => Some(value), + _ => None + } + }, + write: |settings_content, value| { + let Some(value) = value else { + return; + }; + match settings_content + .theme + .buffer_line_height.as_mut() { + Some(settings::BufferLineHeight::Custom(line_height)) => *line_height = f32::max(value, 1.0), + _ => return + } + }, + }), + metadata: None, + } + ], + } + }).collect(), + }), SettingsPageItem::SettingItem(SettingItem { files: USER, - title: "Buffer Line Height", - description: "Line height for editor text", + title: "Font Features", + description: "The OpenType features to enable for rendering in text buffers.", field: Box::new( SettingField { - pick: |settings_content| &settings_content.theme.buffer_line_height, - pick_mut: |settings_content| { - &mut settings_content.theme.buffer_line_height + json_path: Some("buffer_font_features"), + pick: |settings_content| { + settings_content.theme.buffer_font_features.as_ref() + }, + write: |settings_content, value| { + settings_content.theme.buffer_font_features = value; + }, } .unimplemented(), @@ -266,135 +741,180 @@ pub(crate) fn settings_data() -> Vec { metadata: None, }), SettingsPageItem::SettingItem(SettingItem { - title: "UI Font Family", - description: "Font family for UI elements", - field: Box::new(SettingField { - pick: |settings_content| &settings_content.theme.ui_font_family, - pick_mut: |settings_content| &mut settings_content.theme.ui_font_family, - }), - metadata: None, files: USER, + title: "Font Fallbacks", + description: "The font fallbacks to use for rendering in text buffers.", + field: Box::new( + SettingField { + json_path: Some("buffer_font_fallbacks"), + pick: |settings_content| { + settings_content.theme.buffer_font_fallbacks.as_ref() + }, + write: |settings_content, value| { + settings_content.theme.buffer_font_fallbacks = value; + + }, + } + .unimplemented(), + ), + metadata: None, }), + SettingsPageItem::SectionHeader("UI Font"), SettingsPageItem::SettingItem(SettingItem { - title: "UI Font Size", - description: "Font size for UI elements", + title: "Font Family", + description: "Font family for UI elements.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.theme.ui_font_size, - pick_mut: |settings_content| &mut settings_content.theme.ui_font_size, + json_path: Some("ui_font_family"), + pick: |settings_content| settings_content.theme.ui_font_family.as_ref(), + write: |settings_content, value|{ settings_content.theme.ui_font_family = value;}, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "UI Font Weight", - description: "Font weight for UI elements (100-900)", + title: "Font Size", + description: "Font size for UI elements.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.theme.ui_font_weight, - pick_mut: |settings_content| &mut settings_content.theme.ui_font_weight, + json_path: Some("ui_font_size"), + pick: |settings_content| settings_content.theme.ui_font_size.as_ref(), + write: |settings_content, value|{ settings_content.theme.ui_font_size = value;}, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Agent Panel UI Font Size", - description: "Font size for agent response text in the agent panel. Falls back to the regular UI font size.", + title: "Font Weight", + description: "Font weight for UI elements (100-900).", field: Box::new(SettingField { - pick: |settings_content| { - if settings_content.theme.agent_ui_font_size.is_some() { - &settings_content.theme.agent_ui_font_size - } else { - &settings_content.theme.ui_font_size - } - }, - pick_mut: |settings_content| &mut settings_content.theme.agent_ui_font_size, + json_path: Some("ui_font_weight"), + pick: |settings_content| settings_content.theme.ui_font_weight.as_ref(), + write: |settings_content, value|{ settings_content.theme.ui_font_weight = value;}, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Agent Panel Buffer Font Size", - description: "Font size for user messages text in the agent panel", - field: Box::new(SettingField { - pick: |settings_content| &settings_content.theme.agent_buffer_font_size, - pick_mut: |settings_content| { - &mut settings_content.theme.agent_buffer_font_size - }, - }), - metadata: None, files: USER, + title: "Font Features", + description: "The OpenType features to enable for rendering in UI elements.", + field: Box::new( + SettingField { + json_path: Some("ui_font_features"), + pick: |settings_content| { + settings_content.theme.ui_font_features.as_ref() + }, + write: |settings_content, value| { + settings_content.theme.ui_font_features = value; + + }, + } + .unimplemented(), + ), + metadata: None, }), - SettingsPageItem::SectionHeader("Keymap"), SettingsPageItem::SettingItem(SettingItem { - title: "Base Keymap", - description: "The name of a base set of key bindings to use", - field: Box::new(SettingField { - pick: |settings_content| &settings_content.base_keymap, - pick_mut: |settings_content| &mut settings_content.base_keymap, - }), - metadata: None, files: USER, + title: "Font Fallbacks", + description: "The font fallbacks to use for rendering in the UI.", + field: Box::new( + SettingField { + json_path: Some("ui_font_fallbacks"), + pick: |settings_content| { + settings_content.theme.ui_font_fallbacks.as_ref() + }, + write: |settings_content, value| { + settings_content.theme.ui_font_fallbacks = value; + + }, + } + .unimplemented(), + ), + metadata: None, }), - // todo(settings_ui): Vim/Helix Mode should be apart of one type because it's undefined - // behavior to have them both enabled at the same time + SettingsPageItem::SectionHeader("Agent Panel Font"), SettingsPageItem::SettingItem(SettingItem { - title: "Vim Mode", - description: "Whether to enable vim modes and key bindings", + title: "UI Font Size", + description: "Font size for agent response text in the agent panel. Falls back to the regular UI font size.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.vim_mode, - pick_mut: |settings_content| &mut settings_content.vim_mode, + json_path: Some("agent_ui_font_size"), + pick: |settings_content| { + settings_content + .theme + .agent_ui_font_size + .as_ref() + .or(settings_content.theme.ui_font_size.as_ref()) + }, + write: |settings_content, value|{ settings_content.theme.agent_ui_font_size = value;}, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Helix Mode", - description: "Whether to enable helix modes and key bindings", + title: "Buffer Font Size", + description: "Font size for user messages text in the agent panel.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.helix_mode, - pick_mut: |settings_content| &mut settings_content.helix_mode, + json_path: Some("agent_buffer_font_size"), + pick: |settings_content| { + settings_content + .theme + .agent_buffer_font_size + .as_ref() + .or(settings_content.theme.buffer_font_size.as_ref()) + }, + write: |settings_content, value| { + settings_content.theme.agent_buffer_font_size = value; + + }, }), metadata: None, files: USER, }), + SettingsPageItem::SectionHeader("Cursor"), SettingsPageItem::SettingItem(SettingItem { title: "Multi Cursor Modifier", - description: "Modifier key for adding multiple cursors", + description: "Modifier key for adding multiple cursors.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.multi_cursor_modifier, - pick_mut: |settings_content| { - &mut settings_content.editor.multi_cursor_modifier + json_path: Some("multi_cursor_modifier"), + pick: |settings_content| { + settings_content.editor.multi_cursor_modifier.as_ref() + }, + write: |settings_content, value| { + settings_content.editor.multi_cursor_modifier = value; + }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Cursor"), SettingsPageItem::SettingItem(SettingItem { title: "Cursor Blink", - description: "Whether the cursor blinks in the editor", + description: "Whether the cursor blinks in the editor.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.cursor_blink, - pick_mut: |settings_content| &mut settings_content.editor.cursor_blink, + json_path: Some("cursor_blink"), + pick: |settings_content| settings_content.editor.cursor_blink.as_ref(), + write: |settings_content, value|{ settings_content.editor.cursor_blink = value;}, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { title: "Cursor Shape", - description: "Cursor shape for the editor", + description: "Cursor shape for the editor.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.cursor_shape, - pick_mut: |settings_content| &mut settings_content.editor.cursor_shape, + json_path: Some("cursor_shape"), + pick: |settings_content| settings_content.editor.cursor_shape.as_ref(), + write: |settings_content, value|{ settings_content.editor.cursor_shape = value;}, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { title: "Hide Mouse", - description: "When to hide the mouse cursor", + description: "When to hide the mouse cursor.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.hide_mouse, - pick_mut: |settings_content| &mut settings_content.editor.hide_mouse, + json_path: Some("hide_mouse"), + pick: |settings_content| settings_content.editor.hide_mouse.as_ref(), + write: |settings_content, value|{ settings_content.editor.hide_mouse = value;}, }), metadata: None, files: USER, @@ -402,11 +922,15 @@ pub(crate) fn settings_data() -> Vec { SettingsPageItem::SectionHeader("Highlighting"), SettingsPageItem::SettingItem(SettingItem { title: "Unnecessary Code Fade", - description: "How much to fade out unused code (0.0 - 0.9)", + description: "How much to fade out unused code (0.0 - 0.9).", field: Box::new(SettingField { - pick: |settings_content| &settings_content.theme.unnecessary_code_fade, - pick_mut: |settings_content| { - &mut settings_content.theme.unnecessary_code_fade + json_path: Some("unnecessary_code_fade"), + pick: |settings_content| { + settings_content.theme.unnecessary_code_fade.as_ref() + }, + write: |settings_content, value| { + settings_content.theme.unnecessary_code_fade = value; + }, }), metadata: None, @@ -414,11 +938,15 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Current Line Highlight", - description: "How to highlight the current line", + description: "How to highlight the current line.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.current_line_highlight, - pick_mut: |settings_content| { - &mut settings_content.editor.current_line_highlight + json_path: Some("current_line_highlight"), + pick: |settings_content| { + settings_content.editor.current_line_highlight.as_ref() + }, + write: |settings_content, value| { + settings_content.editor.current_line_highlight = value; + }, }), metadata: None, @@ -426,11 +954,15 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Selection Highlight", - description: "Whether to highlight all occurrences of selected text", + description: "Highlight all occurrences of selected text.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.selection_highlight, - pick_mut: |settings_content| { - &mut settings_content.editor.selection_highlight + json_path: Some("selection_highlight"), + pick: |settings_content| { + settings_content.editor.selection_highlight.as_ref() + }, + write: |settings_content, value| { + settings_content.editor.selection_highlight = value; + }, }), metadata: None, @@ -438,10 +970,30 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Rounded Selection", - description: "Whether the text selection should have rounded corners", + description: "Whether the text selection should have rounded corners.", + field: Box::new(SettingField { + json_path: Some("rounded_selection"), + pick: |settings_content| settings_content.editor.rounded_selection.as_ref(), + write: |settings_content, value|{ settings_content.editor.rounded_selection = value;}, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Minimum Contrast For Highlights", + description: "The minimum APCA perceptual contrast to maintain when rendering text over highlight backgrounds.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.rounded_selection, - pick_mut: |settings_content| &mut settings_content.editor.rounded_selection, + json_path: Some("minimum_contrast_for_highlights"), + pick: |settings_content| { + settings_content + .editor + .minimum_contrast_for_highlights + .as_ref() + }, + write: |settings_content, value| { + settings_content.editor.minimum_contrast_for_highlights = value; + + }, }), metadata: None, files: USER, @@ -449,147 +1001,99 @@ pub(crate) fn settings_data() -> Vec { SettingsPageItem::SectionHeader("Guides"), SettingsPageItem::SettingItem(SettingItem { title: "Show Wrap Guides", - description: "Whether to show wrap guides (vertical rulers)", + description: "Show wrap guides (vertical rulers).", field: Box::new(SettingField { + json_path: Some("show_wrap_guides"), pick: |settings_content| { - &settings_content + settings_content .project .all_languages .defaults .show_wrap_guides + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content + .project .all_languages .defaults - .show_wrap_guides + .show_wrap_guides = value; }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), // todo(settings_ui): This needs a custom component SettingsPageItem::SettingItem(SettingItem { title: "Wrap Guides", - description: "Character counts at which to show wrap guides", + description: "Character counts at which to show wrap guides.", field: Box::new( SettingField { + json_path: Some("wrap_guides"), pick: |settings_content| { - &settings_content.project.all_languages.defaults.wrap_guides + settings_content + .project + .all_languages + .defaults + .wrap_guides + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.project.all_languages.defaults.wrap_guides + write: |settings_content, value| { + settings_content.project.all_languages.defaults.wrap_guides = value; }, } .unimplemented(), ), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), - SettingsPageItem::SectionHeader("Whitespace"), + ], + }, + SettingsPage { + title: "Keymap", + items: vec![ + SettingsPageItem::SectionHeader("Base Keymap"), SettingsPageItem::SettingItem(SettingItem { - title: "Show Whitespace", - description: "Whether to show tabs and spaces", + title: "Base Keymap", + description: "The name of a base set of key bindings to use.", field: Box::new(SettingField { - pick: |settings_content| { - &settings_content - .project - .all_languages - .defaults - .show_whitespaces - }, - pick_mut: |settings_content| { - &mut settings_content - .project - .all_languages - .defaults - .show_whitespaces + json_path: Some("base_keymap"), + pick: |settings_content| settings_content.base_keymap.as_ref(), + write: |settings_content, value| { + settings_content.base_keymap = value; }, }), - metadata: None, - files: USER | LOCAL, + metadata: Some(Box::new(SettingsFieldMetadata { + should_do_titlecase: Some(false), + ..Default::default() + })), + files: USER, }), - SettingsPageItem::SectionHeader("Layout"), + SettingsPageItem::SectionHeader("Modal Editing"), + // todo(settings_ui): Vim/Helix Mode should be apart of one type because it's undefined + // behavior to have them both enabled at the same time SettingsPageItem::SettingItem(SettingItem { - title: "Bottom Dock Layout", - description: "Layout mode for the bottom dock", + title: "Vim Mode", + description: "Enable Vim mode and key bindings.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.workspace.bottom_dock_layout, - pick_mut: |settings_content| { - &mut settings_content.workspace.bottom_dock_layout + json_path: Some("vim_mode"), + pick: |settings_content| settings_content.vim_mode.as_ref(), + write: |settings_content, value| { + settings_content.vim_mode = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - files: USER, - title: "Centered Layout Left Padding", - description: "Left padding for centered layout", + title: "Helix Mode", + description: "Enable Helix mode and key bindings.", field: Box::new(SettingField { - pick: |settings_content| { - if let Some(centered_layout) = - &settings_content.workspace.centered_layout - { - ¢ered_layout.left_padding - } else { - &None - } - }, - pick_mut: |settings_content| { - &mut settings_content - .workspace - .centered_layout - .get_or_insert_default() - .left_padding - }, - }), - metadata: None, - }), - SettingsPageItem::SettingItem(SettingItem { - files: USER, - title: "Centered Layout Right Padding", - description: "Right padding for centered layout", - field: Box::new(SettingField { - pick: |settings_content| { - if let Some(centered_layout) = - &settings_content.workspace.centered_layout - { - ¢ered_layout.right_padding - } else { - &None - } - }, - pick_mut: |settings_content| { - &mut settings_content - .workspace - .centered_layout - .get_or_insert_default() - .right_padding - }, - }), - metadata: None, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Zoomed Padding", - description: "Whether to show padding for zoomed panels", - field: Box::new(SettingField { - pick: |settings_content| &settings_content.workspace.zoomed_padding, - pick_mut: |settings_content| &mut settings_content.workspace.zoomed_padding, - }), - metadata: None, - files: USER, - }), - SettingsPageItem::SectionHeader("Window"), - // todo(settings_ui): Should we filter by platform? - SettingsPageItem::SettingItem(SettingItem { - title: "Use System Window Tabs", - description: "(macOS only) Whether to allow windows to tab together", - field: Box::new(SettingField { - pick: |settings_content| &settings_content.workspace.use_system_window_tabs, - pick_mut: |settings_content| { - &mut settings_content.workspace.use_system_window_tabs + json_path: Some("helix_mode"), + pick: |settings_content| settings_content.helix_mode.as_ref(), + write: |settings_content, value| { + settings_content.helix_mode = value; }, }), metadata: None, @@ -601,103 +1105,116 @@ pub(crate) fn settings_data() -> Vec { title: "Editor", items: { let mut items = vec![ - SettingsPageItem::SectionHeader("Search"), - SettingsPageItem::SettingItem(SettingItem { - title: "Search Wrap", - description: "Whether the editor search results will loop", - field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.search_wrap, - pick_mut: |settings_content| &mut settings_content.editor.search_wrap, - }), - metadata: None, - files: USER, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Seed Search Query From Cursor", - description: "When to populate a new search's query based on the text under the cursor", - field: Box::new(SettingField { - pick: |settings_content| { - &settings_content.editor.seed_search_query_from_cursor - }, - pick_mut: |settings_content| { - &mut settings_content.editor.seed_search_query_from_cursor - }, - }), - metadata: None, - files: USER, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Use Smartcase Search", - description: "Whether to use smartcase (i.e., case-sensitive) search", - field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.use_smartcase_search, - pick_mut: |settings_content| { - &mut settings_content.editor.use_smartcase_search - }, - }), - metadata: None, - files: USER, - }), - SettingsPageItem::SectionHeader("Editor Behavior"), - SettingsPageItem::SettingItem(SettingItem { - title: "Redact Private Values", - description: "Hide the values of variables in private files", - field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.redact_private_values, - pick_mut: |settings_content| { - &mut settings_content.editor.redact_private_values - }, - }), - metadata: None, - files: USER, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Middle Click Paste", - description: "Whether to enable middle-click paste on Linux", - field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.middle_click_paste, - pick_mut: |settings_content| { - &mut settings_content.editor.middle_click_paste - }, - }), - metadata: None, - files: USER, + SettingsPageItem::SectionHeader("Auto Save"), + SettingsPageItem::DynamicItem(DynamicItem { + discriminant: SettingItem { + files: USER, + title: "Auto Save Mode", + description: "When to auto save buffer changes.", + field: Box::new(SettingField { + json_path: Some("autosave$"), + pick: |settings_content| { + Some(&dynamic_variants::()[ + settings_content + .workspace + .autosave + .as_ref()? + .discriminant() as usize]) + }, + write: |settings_content, value| { + let Some(value) = value else { + return; + }; + let settings_value = settings_content.workspace.autosave.get_or_insert_with(|| { + settings::AutosaveSetting::Off + }); + *settings_value = match value { + settings::AutosaveSettingDiscriminants::Off => { + settings::AutosaveSetting::Off + }, + settings::AutosaveSettingDiscriminants::AfterDelay => { + let milliseconds = match settings_value { + settings::AutosaveSetting::AfterDelay { milliseconds } => *milliseconds, + _ => settings::DelayMs(1000), + }; + settings::AutosaveSetting::AfterDelay { milliseconds } + }, + settings::AutosaveSettingDiscriminants::OnFocusChange => { + settings::AutosaveSetting::OnFocusChange + }, + settings::AutosaveSettingDiscriminants::OnWindowChange => { + settings::AutosaveSetting::OnWindowChange + }, + }; + }, + }), + metadata: None, + }, + pick_discriminant: |settings_content| { + Some(settings_content.workspace.autosave.as_ref()?.discriminant() as usize) + }, + fields: dynamic_variants::().into_iter().map(|variant| { + match variant { + settings::AutosaveSettingDiscriminants::Off => vec![], + settings::AutosaveSettingDiscriminants::AfterDelay => vec![ + SettingItem { + files: USER, + title: "Delay (milliseconds)", + description: "Save after inactivity period (in milliseconds).", + field: Box::new(SettingField { + json_path: Some("autosave.after_delay.milliseconds"), + pick: |settings_content| { + match settings_content.workspace.autosave.as_ref() { + Some(settings::AutosaveSetting::AfterDelay { milliseconds }) => Some(milliseconds), + _ => None + } + }, + write: |settings_content, value| { + let Some(value) = value else { + return; + }; + match settings_content + .workspace + .autosave.as_mut() { + Some(settings::AutosaveSetting::AfterDelay { milliseconds }) => *milliseconds = value, + _ => return + } + }, + }), + metadata: None, + } + ], + settings::AutosaveSettingDiscriminants::OnFocusChange => vec![], + settings::AutosaveSettingDiscriminants::OnWindowChange => vec![], + } + }).collect(), }), + SettingsPageItem::SectionHeader("Multibuffer"), SettingsPageItem::SettingItem(SettingItem { title: "Double Click In Multibuffer", - description: "What to do when multibuffer is double-clicked in some of its excerpts", + description: "What to do when multibuffer is double-clicked in some of its excerpts.", field: Box::new(SettingField { + json_path: Some("double_click_in_multibuffer"), pick: |settings_content| { - &settings_content.editor.double_click_in_multibuffer + settings_content.editor.double_click_in_multibuffer.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.editor.double_click_in_multibuffer + write: |settings_content, value| { + settings_content.editor.double_click_in_multibuffer = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Go To Definition Fallback", - description: "Whether to follow-up empty go to definition responses from the language server", + title: "Expand Excerpt Lines", + description: "How many lines to expand the multibuffer excerpts by default.", field: Box::new(SettingField { + json_path: Some("expand_excerpt_lines"), pick: |settings_content| { - &settings_content.editor.go_to_definition_fallback + settings_content.editor.expand_excerpt_lines.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.editor.go_to_definition_fallback - }, - }), - metadata: None, - files: USER, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Expand Excerpt Lines", - description: "How many lines to expand the multibuffer excerpts by default", - field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.expand_excerpt_lines, - pick_mut: |settings_content| { - &mut settings_content.editor.expand_excerpt_lines + write: |settings_content, value| { + settings_content.editor.expand_excerpt_lines = value; }, }), metadata: None, @@ -705,25 +1222,37 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Excerpt Context Lines", - description: "How many lines of context to provide in multibuffer excerpts by default", + description: "How many lines of context to provide in multibuffer excerpts by default.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.excerpt_context_lines, - pick_mut: |settings_content| { - &mut settings_content.editor.excerpt_context_lines + json_path: Some("excerpt_context_lines"), + pick: |settings_content| { + settings_content.editor.excerpt_context_lines.as_ref() + }, + write: |settings_content, value| { + settings_content.editor.excerpt_context_lines = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Minimum Contrast For Highlights", - description: "The minimum APCA perceptual contrast to maintain when rendering text over highlight backgrounds", + title: "Expand Outlines With Depth", + description: "Default depth to expand outline items in the current file.", field: Box::new(SettingField { + json_path: Some("outline_panel.expand_outlines_with_depth"), pick: |settings_content| { - &settings_content.editor.minimum_contrast_for_highlights + settings_content + .outline_panel + .as_ref() + .and_then(|outline_panel| { + outline_panel.expand_outlines_with_depth.as_ref() + }) }, - pick_mut: |settings_content| { - &mut settings_content.editor.minimum_contrast_for_highlights + write: |settings_content, value| { + settings_content + .outline_panel + .get_or_insert_default() + .expand_outlines_with_depth = value; }, }), metadata: None, @@ -732,13 +1261,14 @@ pub(crate) fn settings_data() -> Vec { SettingsPageItem::SectionHeader("Scrolling"), SettingsPageItem::SettingItem(SettingItem { title: "Scroll Beyond Last Line", - description: "Whether the editor will scroll beyond the last line", + description: "Whether the editor will scroll beyond the last line.", field: Box::new(SettingField { + json_path: Some("scroll_beyond_last_line"), pick: |settings_content| { - &settings_content.editor.scroll_beyond_last_line + settings_content.editor.scroll_beyond_last_line.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.editor.scroll_beyond_last_line + write: |settings_content, value| { + settings_content.editor.scroll_beyond_last_line = value; }, }), metadata: None, @@ -746,13 +1276,14 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Vertical Scroll Margin", - description: "The number of lines to keep above/below the cursor when auto-scrolling", + description: "The number of lines to keep above/below the cursor when auto-scrolling.", field: Box::new(SettingField { + json_path: Some("vertical_scroll_margin"), pick: |settings_content| { - &settings_content.editor.vertical_scroll_margin + settings_content.editor.vertical_scroll_margin.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.editor.vertical_scroll_margin + write: |settings_content, value| { + settings_content.editor.vertical_scroll_margin = value; }, }), metadata: None, @@ -760,13 +1291,14 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Horizontal Scroll Margin", - description: "The number of characters to keep on either side when scrolling with the mouse", + description: "The number of characters to keep on either side when scrolling with the mouse.", field: Box::new(SettingField { + json_path: Some("horizontal_scroll_margin"), pick: |settings_content| { - &settings_content.editor.horizontal_scroll_margin + settings_content.editor.horizontal_scroll_margin.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.editor.horizontal_scroll_margin + write: |settings_content, value| { + settings_content.editor.horizontal_scroll_margin = value; }, }), metadata: None, @@ -774,11 +1306,14 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Scroll Sensitivity", - description: "Scroll sensitivity multiplier for both horizontal and vertical scrolling", + description: "Scroll sensitivity multiplier for both horizontal and vertical scrolling.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.scroll_sensitivity, - pick_mut: |settings_content| { - &mut settings_content.editor.scroll_sensitivity + json_path: Some("scroll_sensitivity"), + pick: |settings_content| { + settings_content.editor.scroll_sensitivity.as_ref() + }, + write: |settings_content, value| { + settings_content.editor.scroll_sensitivity = value; }, }), metadata: None, @@ -786,13 +1321,14 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Fast Scroll Sensitivity", - description: "Fast Scroll sensitivity multiplier for both horizontal and vertical scrolling", + description: "Fast scroll sensitivity multiplier for both horizontal and vertical scrolling.", field: Box::new(SettingField { + json_path: Some("fast_scroll_sensitivity"), pick: |settings_content| { - &settings_content.editor.fast_scroll_sensitivity + settings_content.editor.fast_scroll_sensitivity.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.editor.fast_scroll_sensitivity + write: |settings_content, value| { + settings_content.editor.fast_scroll_sensitivity = value; }, }), metadata: None, @@ -800,11 +1336,14 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Autoscroll On Clicks", - description: "Whether to scroll when clicking near the edge of the visible text area", + description: "Whether to scroll when clicking near the edge of the visible text area.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.autoscroll_on_clicks, - pick_mut: |settings_content| { - &mut settings_content.editor.autoscroll_on_clicks + json_path: Some("autoscroll_on_clicks"), + pick: |settings_content| { + settings_content.editor.autoscroll_on_clicks.as_ref() + }, + write: |settings_content, value| { + settings_content.editor.autoscroll_on_clicks = value; }, }), metadata: None, @@ -813,11 +1352,14 @@ pub(crate) fn settings_data() -> Vec { SettingsPageItem::SectionHeader("Signature Help"), SettingsPageItem::SettingItem(SettingItem { title: "Auto Signature Help", - description: "Whether to automatically show a signature help pop-up or not", + description: "Automatically show a signature help pop-up.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.auto_signature_help, - pick_mut: |settings_content| { - &mut settings_content.editor.auto_signature_help + json_path: Some("auto_signature_help"), + pick: |settings_content| { + settings_content.editor.auto_signature_help.as_ref() + }, + write: |settings_content, value| { + settings_content.editor.auto_signature_help = value; }, }), metadata: None, @@ -825,13 +1367,17 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Show Signature Help After Edits", - description: "Whether to show the signature help pop-up after completions or bracket pairs inserted", + description: "Show the signature help pop-up after completions or bracket pairs are inserted.", field: Box::new(SettingField { + json_path: Some("show_signature_help_after_edits"), pick: |settings_content| { - &settings_content.editor.show_signature_help_after_edits + settings_content + .editor + .show_signature_help_after_edits + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.editor.show_signature_help_after_edits + write: |settings_content, value| { + settings_content.editor.show_signature_help_after_edits = value; }, }), metadata: None, @@ -839,24 +1385,30 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Snippet Sort Order", - description: "Determines how snippets are sorted relative to other completion items", + description: "Determines how snippets are sorted relative to other completion items.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.snippet_sort_order, - pick_mut: |settings_content| { - &mut settings_content.editor.snippet_sort_order + json_path: Some("snippet_sort_order"), + pick: |settings_content| { + settings_content.editor.snippet_sort_order.as_ref() + }, + write: |settings_content, value| { + settings_content.editor.snippet_sort_order = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Hover"), + SettingsPageItem::SectionHeader("Hover Popover"), SettingsPageItem::SettingItem(SettingItem { - title: "Hover Popover Enabled", - description: "Whether to show the informational hover box when moving the mouse over symbols in the editor", + title: "Enabled", + description: "Show the informational hover box when moving the mouse over symbols in the editor.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.hover_popover_enabled, - pick_mut: |settings_content| { - &mut settings_content.editor.hover_popover_enabled + json_path: Some("hover_popover_enabled"), + pick: |settings_content| { + settings_content.editor.hover_popover_enabled.as_ref() + }, + write: |settings_content, value| { + settings_content.editor.hover_popover_enabled = value; }, }), metadata: None, @@ -864,74 +1416,62 @@ pub(crate) fn settings_data() -> Vec { }), // todo(settings ui): add units to this number input SettingsPageItem::SettingItem(SettingItem { - title: "Hover Popover Delay", - description: "Time to wait in milliseconds before showing the informational hover box", + title: "Delay", + description: "Time to wait in milliseconds before showing the informational hover box.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.hover_popover_delay, - pick_mut: |settings_content| { - &mut settings_content.editor.hover_popover_delay + json_path: Some("hover_popover_enabled"), + pick: |settings_content| { + settings_content.editor.hover_popover_delay.as_ref() }, - }), - metadata: None, - files: USER, - }), - SettingsPageItem::SectionHeader("Code Actions"), - SettingsPageItem::SettingItem(SettingItem { - title: "Inline Code Actions", - description: "Whether to show code action button at start of buffer line", - field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.inline_code_actions, - pick_mut: |settings_content| { - &mut settings_content.editor.inline_code_actions + write: |settings_content, value| { + settings_content.editor.hover_popover_delay = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Selection"), + SettingsPageItem::SectionHeader("Drag And Drop Selection"), SettingsPageItem::SettingItem(SettingItem { - title: "Drag And Drop Selection", - description: "Whether to enable drag and drop selection", + title: "Enabled", + description: "Enable drag and drop selection.", field: Box::new(SettingField { + json_path: Some("drag_and_drop_selection.enabled"), pick: |settings_content| { - if let Some(drag_and_drop) = - &settings_content.editor.drag_and_drop_selection - { - &drag_and_drop.enabled - } else { - &None - } + settings_content + .editor + .drag_and_drop_selection + .as_ref() + .and_then(|drag_and_drop| drag_and_drop.enabled.as_ref()) }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .drag_and_drop_selection .get_or_insert_default() - .enabled + .enabled = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Drag And Drop Selection Delay", - description: "Delay in milliseconds before drag and drop selection starts", + title: "Delay", + description: "Delay in milliseconds before drag and drop selection starts.", field: Box::new(SettingField { + json_path: Some("drag_and_drop_selection.delay"), pick: |settings_content| { - if let Some(drag_and_drop) = - &settings_content.editor.drag_and_drop_selection - { - &drag_and_drop.delay - } else { - &None - } + settings_content + .editor + .drag_and_drop_selection + .as_ref() + .and_then(|drag_and_drop| drag_and_drop.delay.as_ref()) }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .drag_and_drop_selection .get_or_insert_default() - .delay + .delay = value; }, }), metadata: None, @@ -940,21 +1480,22 @@ pub(crate) fn settings_data() -> Vec { SettingsPageItem::SectionHeader("Gutter"), SettingsPageItem::SettingItem(SettingItem { title: "Show Line Numbers", - description: "Whether to show line numbers in the gutter", + description: "Show line numbers in the gutter.", field: Box::new(SettingField { + json_path: Some("gutter.line_numbers"), pick: |settings_content| { - if let Some(gutter) = &settings_content.editor.gutter { - &gutter.line_numbers - } else { - &None - } + settings_content + .editor + .gutter + .as_ref() + .and_then(|gutter| gutter.line_numbers.as_ref()) }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .gutter .get_or_insert_default() - .line_numbers + .line_numbers = value; }, }), metadata: None, @@ -962,11 +1503,14 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Relative Line Numbers", - description: "Whether the line numbers on editors gutter are relative or not", + description: "Whether the line numbers in the editor's gutter are relative or not.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.relative_line_numbers, - pick_mut: |settings_content| { - &mut settings_content.editor.relative_line_numbers + json_path: Some("relative_line_numbers"), + pick: |settings_content| { + settings_content.editor.relative_line_numbers.as_ref() + }, + write: |settings_content, value| { + settings_content.editor.relative_line_numbers = value; }, }), metadata: None, @@ -974,21 +1518,22 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Show Runnables", - description: "Whether to show runnable buttons in the gutter", + description: "Show runnable buttons in the gutter.", field: Box::new(SettingField { + json_path: Some("gutter.runnables"), pick: |settings_content| { - if let Some(gutter) = &settings_content.editor.gutter { - &gutter.runnables - } else { - &None - } + settings_content + .editor + .gutter + .as_ref() + .and_then(|gutter| gutter.runnables.as_ref()) }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .gutter .get_or_insert_default() - .runnables + .runnables = value; }, }), metadata: None, @@ -996,21 +1541,22 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Show Breakpoints", - description: "Whether to show breakpoints in the gutter", + description: "Show breakpoints in the gutter.", field: Box::new(SettingField { + json_path: Some("gutter.breakpoints"), pick: |settings_content| { - if let Some(gutter) = &settings_content.editor.gutter { - &gutter.breakpoints - } else { - &None - } + settings_content + .editor + .gutter + .as_ref() + .and_then(|gutter| gutter.breakpoints.as_ref()) }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .gutter .get_or_insert_default() - .breakpoints + .breakpoints = value; }, }), metadata: None, @@ -1018,17 +1564,19 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Show Folds", - description: "Whether to show code folding controls in the gutter", + description: "Show code folding controls in the gutter.", field: Box::new(SettingField { + json_path: Some("gutter.folds"), pick: |settings_content| { - if let Some(gutter) = &settings_content.editor.gutter { - &gutter.folds - } else { - &None - } + settings_content + .editor + .gutter + .as_ref() + .and_then(|gutter| gutter.folds.as_ref()) }, - pick_mut: |settings_content| { - &mut settings_content.editor.gutter.get_or_insert_default().folds + write: |settings_content, value| { + settings_content.editor.gutter.get_or_insert_default().folds = + value; }, }), metadata: None, @@ -1036,21 +1584,37 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Min Line Number Digits", - description: "Minimum number of characters to reserve space for in the gutter", + description: "Minimum number of characters to reserve space for in the gutter.", field: Box::new(SettingField { + json_path: Some("gutter.min_line_number_digits"), pick: |settings_content| { - if let Some(gutter) = &settings_content.editor.gutter { - &gutter.min_line_number_digits - } else { - &None - } + settings_content + .editor + .gutter + .as_ref() + .and_then(|gutter| gutter.min_line_number_digits.as_ref()) }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .gutter .get_or_insert_default() - .min_line_number_digits + .min_line_number_digits = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Inline Code Actions", + description: "Show code action button at start of buffer line.", + field: Box::new(SettingField { + json_path: Some("inline_code_actions"), + pick: |settings_content| { + settings_content.editor.inline_code_actions.as_ref() + }, + write: |settings_content, value| { + settings_content.editor.inline_code_actions = value; }, }), metadata: None, @@ -1059,21 +1623,18 @@ pub(crate) fn settings_data() -> Vec { SettingsPageItem::SectionHeader("Scrollbar"), SettingsPageItem::SettingItem(SettingItem { title: "Show", - description: "When to show the scrollbar in the editor", + description: "When to show the scrollbar in the editor.", field: Box::new(SettingField { + json_path: Some("scrollbar"), pick: |settings_content| { - if let Some(scrollbar) = &settings_content.editor.scrollbar { - &scrollbar.show - } else { - &None - } + settings_content.editor.scrollbar.as_ref()?.show.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .scrollbar .get_or_insert_default() - .show + .show = value; }, }), metadata: None, @@ -1081,21 +1642,18 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Cursors", - description: "Whether to show cursor positions in the scrollbar", + description: "Show cursor positions in the scrollbar.", field: Box::new(SettingField { + json_path: Some("scrollbar.cursors"), pick: |settings_content| { - if let Some(scrollbar) = &settings_content.editor.scrollbar { - &scrollbar.cursors - } else { - &None - } + settings_content.editor.scrollbar.as_ref()?.cursors.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .scrollbar .get_or_insert_default() - .cursors + .cursors = value; }, }), metadata: None, @@ -1103,21 +1661,23 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Git Diff", - description: "Whether to show git diff indicators in the scrollbar", + description: "Show Git diff indicators in the scrollbar.", field: Box::new(SettingField { + json_path: Some("scrollbar.git_diff"), pick: |settings_content| { - if let Some(scrollbar) = &settings_content.editor.scrollbar { - &scrollbar.git_diff - } else { - &None - } + settings_content + .editor + .scrollbar + .as_ref()? + .git_diff + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .scrollbar .get_or_insert_default() - .git_diff + .git_diff = value; }, }), metadata: None, @@ -1125,21 +1685,23 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Search Results", - description: "Whether to show buffer search result indicators in the scrollbar", + description: "Show buffer search result indicators in the scrollbar.", field: Box::new(SettingField { + json_path: Some("scrollbar.search_results"), pick: |settings_content| { - if let Some(scrollbar) = &settings_content.editor.scrollbar { - &scrollbar.search_results - } else { - &None - } + settings_content + .editor + .scrollbar + .as_ref()? + .search_results + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .scrollbar .get_or_insert_default() - .search_results + .search_results = value; }, }), metadata: None, @@ -1147,21 +1709,23 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Selected Text", - description: "Whether to show selected text occurrences in the scrollbar", + description: "Show selected text occurrences in the scrollbar.", field: Box::new(SettingField { + json_path: Some("scrollbar.selected_text"), pick: |settings_content| { - if let Some(scrollbar) = &settings_content.editor.scrollbar { - &scrollbar.selected_text - } else { - &None - } + settings_content + .editor + .scrollbar + .as_ref()? + .selected_text + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .scrollbar .get_or_insert_default() - .selected_text + .selected_text = value; }, }), metadata: None, @@ -1169,21 +1733,23 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Selected Symbol", - description: "Whether to show selected symbol occurrences in the scrollbar", + description: "Show selected symbol occurrences in the scrollbar.", field: Box::new(SettingField { + json_path: Some("scrollbar.selected_symbol"), pick: |settings_content| { - if let Some(scrollbar) = &settings_content.editor.scrollbar { - &scrollbar.selected_symbol - } else { - &None - } + settings_content + .editor + .scrollbar + .as_ref()? + .selected_symbol + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .scrollbar .get_or_insert_default() - .selected_symbol + .selected_symbol = value; }, }), metadata: None, @@ -1191,21 +1757,23 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Diagnostics", - description: "Which diagnostic indicators to show in the scrollbar", + description: "Which diagnostic indicators to show in the scrollbar.", field: Box::new(SettingField { + json_path: Some("scrollbar.diagnostics"), pick: |settings_content| { - if let Some(scrollbar) = &settings_content.editor.scrollbar { - &scrollbar.diagnostics - } else { - &None - } + settings_content + .editor + .scrollbar + .as_ref()? + .diagnostics + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .scrollbar .get_or_insert_default() - .diagnostics + .diagnostics = value; }, }), metadata: None, @@ -1213,27 +1781,27 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Horizontal Scrollbar", - description: "When false, forcefully disables the horizontal scrollbar", + description: "When false, forcefully disables the horizontal scrollbar.", field: Box::new(SettingField { + json_path: Some("scrollbar.axes.horizontal"), pick: |settings_content| { - if let Some(scrollbar) = &settings_content.editor.scrollbar { - if let Some(axes) = &scrollbar.axes { - &axes.horizontal - } else { - &None - } - } else { - &None - } + settings_content + .editor + .scrollbar + .as_ref()? + .axes + .as_ref()? + .horizontal + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .scrollbar .get_or_insert_default() .axes .get_or_insert_default() - .horizontal + .horizontal = value; }, }), metadata: None, @@ -1241,27 +1809,27 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Vertical Scrollbar", - description: "When false, forcefully disables the vertical scrollbar", + description: "When false, forcefully disables the vertical scrollbar.", field: Box::new(SettingField { + json_path: Some("scrollbar.axes.vertical"), pick: |settings_content| { - if let Some(scrollbar) = &settings_content.editor.scrollbar { - if let Some(axes) = &scrollbar.axes { - &axes.vertical - } else { - &None - } - } else { - &None - } + settings_content + .editor + .scrollbar + .as_ref()? + .axes + .as_ref()? + .vertical + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .scrollbar .get_or_insert_default() .axes .get_or_insert_default() - .vertical + .vertical = value; }, }), metadata: None, @@ -1270,17 +1838,15 @@ pub(crate) fn settings_data() -> Vec { SettingsPageItem::SectionHeader("Minimap"), SettingsPageItem::SettingItem(SettingItem { title: "Show", - description: "When to show the minimap in the editor", + description: "When to show the minimap in the editor.", field: Box::new(SettingField { + json_path: Some("minimap.show"), pick: |settings_content| { - if let Some(minimap) = &settings_content.editor.minimap { - &minimap.show - } else { - &None - } + settings_content.editor.minimap.as_ref()?.show.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.editor.minimap.get_or_insert_default().show + write: |settings_content, value| { + settings_content.editor.minimap.get_or_insert_default().show = + value; }, }), metadata: None, @@ -1288,21 +1854,23 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Display In", - description: "Where to show the minimap in the editor", + description: "Where to show the minimap in the editor.", field: Box::new(SettingField { + json_path: Some("minimap.display_in"), pick: |settings_content| { - if let Some(minimap) = &settings_content.editor.minimap { - &minimap.display_in - } else { - &None - } + settings_content + .editor + .minimap + .as_ref()? + .display_in + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .minimap .get_or_insert_default() - .display_in + .display_in = value; }, }), metadata: None, @@ -1310,21 +1878,18 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Thumb", - description: "When to show the minimap thumb", + description: "When to show the minimap thumb.", field: Box::new(SettingField { + json_path: Some("minimap.thumb"), pick: |settings_content| { - if let Some(minimap) = &settings_content.editor.minimap { - &minimap.thumb - } else { - &None - } + settings_content.editor.minimap.as_ref()?.thumb.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .minimap .get_or_insert_default() - .thumb + .thumb = value; }, }), metadata: None, @@ -1332,21 +1897,23 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Thumb Border", - description: "Border style for the minimap's scrollbar thumb", + description: "Border style for the minimap's scrollbar thumb.", field: Box::new(SettingField { + json_path: Some("minimap.thumb_border"), pick: |settings_content| { - if let Some(minimap) = &settings_content.editor.minimap { - &minimap.thumb_border - } else { - &None - } + settings_content + .editor + .minimap + .as_ref()? + .thumb_border + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .minimap .get_or_insert_default() - .thumb_border + .thumb_border = value; }, }), metadata: None, @@ -1354,23 +1921,23 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Current Line Highlight", - description: "How to highlight the current line in the minimap", + description: "How to highlight the current line in the minimap.", field: Box::new(SettingField { + json_path: Some("minimap.current_line_highlight"), pick: |settings_content| { - if let Some(minimap) = &settings_content.editor.minimap - && minimap.current_line_highlight.is_some() - { - &minimap.current_line_highlight - } else { - &settings_content.editor.current_line_highlight - } + settings_content + .editor + .minimap + .as_ref() + .and_then(|minimap| minimap.current_line_highlight.as_ref()) + .or(settings_content.editor.current_line_highlight.as_ref()) }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .minimap .get_or_insert_default() - .current_line_highlight + .current_line_highlight = value; }, }), metadata: None, @@ -1378,1408 +1945,1464 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Max Width Columns", - description: "Maximum number of columns to display in the minimap", + description: "Maximum number of columns to display in the minimap.", field: Box::new(SettingField { + json_path: Some("minimap.max_width_columns"), pick: |settings_content| { - if let Some(minimap) = &settings_content.editor.minimap { - &minimap.max_width_columns - } else { - &None - } - }, - pick_mut: |settings_content| { - &mut settings_content + settings_content .editor .minimap - .get_or_insert_default() + .as_ref()? .max_width_columns + .as_ref() }, - }), - metadata: None, - files: USER, - }), - SettingsPageItem::SectionHeader("Tabs"), - SettingsPageItem::SettingItem(SettingItem { - title: "Show Tab Bar", - description: "Whether or not to show the tab bar in the editor", - field: Box::new(SettingField { - pick: |settings_content| { - if let Some(tab_bar) = &settings_content.tab_bar { - &tab_bar.show - } else { - &None - } - }, - pick_mut: |settings_content| { - &mut settings_content.tab_bar.get_or_insert_default().show + write: |settings_content, value| { + settings_content + .editor + .minimap + .get_or_insert_default() + .max_width_columns = value; }, }), metadata: None, files: USER, }), + SettingsPageItem::SectionHeader("Toolbar"), SettingsPageItem::SettingItem(SettingItem { - title: "Show Git Status In Tabs", - description: "Whether to show the Git file status on a tab item", + title: "Breadcrumbs", + description: "Show breadcrumbs.", field: Box::new(SettingField { + json_path: Some("toolbar.breadcrumbs"), pick: |settings_content| { - if let Some(tabs) = &settings_content.tabs { - &tabs.git_status - } else { - &None - } + settings_content + .editor + .toolbar + .as_ref()? + .breadcrumbs + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.tabs.get_or_insert_default().git_status + write: |settings_content, value| { + settings_content + .editor + .toolbar + .get_or_insert_default() + .breadcrumbs = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Show File Icons In Tabs", - description: "Whether to show the file icon for a tab", + title: "Quick Actions", + description: "Show quick action buttons (e.g., search, selection, editor controls, etc.).", field: Box::new(SettingField { + json_path: Some("toolbar.quick_actions"), pick: |settings_content| { - if let Some(tabs) = &settings_content.tabs { - &tabs.file_icons - } else { - &None - } + settings_content + .editor + .toolbar + .as_ref()? + .quick_actions + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.tabs.get_or_insert_default().file_icons + write: |settings_content, value| { + settings_content + .editor + .toolbar + .get_or_insert_default() + .quick_actions = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Tab Close Position", - description: "Position of the close button in a tab", + title: "Selections Menu", + description: "Show the selections menu in the editor toolbar.", field: Box::new(SettingField { + json_path: Some("toolbar.selections_menu"), pick: |settings_content| { - if let Some(tabs) = &settings_content.tabs { - &tabs.close_position - } else { - &None - } + settings_content + .editor + .toolbar + .as_ref()? + .selections_menu + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.tabs.get_or_insert_default().close_position + write: |settings_content, value| { + settings_content + .editor + .toolbar + .get_or_insert_default() + .selections_menu = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - files: USER, - title: "Maximum Tabs", - description: "Maximum open tabs in a pane. Will not close an unsaved tab", - // todo(settings_ui): The default for this value is null and it's use in code - // is complex, so I'm going to come back to this later - field: Box::new( - SettingField { - pick: |settings_content| &settings_content.workspace.max_tabs, - pick_mut: |settings_content| { - &mut settings_content.workspace.max_tabs - }, - } - .unimplemented(), - ), - metadata: None, - }), - SettingsPageItem::SectionHeader("Toolbar"), - SettingsPageItem::SettingItem(SettingItem { - title: "Breadcrumbs", - description: "Whether to show breadcrumbs", + title: "Agent Review", + description: "Show agent review buttons in the editor toolbar.", field: Box::new(SettingField { + json_path: Some("toolbar.agent_review"), pick: |settings_content| { - if let Some(toolbar) = &settings_content.editor.toolbar { - &toolbar.breadcrumbs - } else { - &None - } + settings_content + .editor + .toolbar + .as_ref()? + .agent_review + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .toolbar .get_or_insert_default() - .breadcrumbs + .agent_review = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Quick Actions", - description: "Whether to show quick action buttons (e.g., search, selection, editor controls, etc.)", + title: "Code Actions", + description: "Show code action buttons in the editor toolbar.", field: Box::new(SettingField { + json_path: Some("toolbar.code_actions"), pick: |settings_content| { - if let Some(toolbar) = &settings_content.editor.toolbar { - &toolbar.quick_actions - } else { - &None - } + settings_content + .editor + .toolbar + .as_ref()? + .code_actions + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .editor .toolbar .get_or_insert_default() - .quick_actions + .code_actions = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SettingItem(SettingItem { - title: "Selections Menu", - description: "Whether to show the selections menu in the editor toolbar", - field: Box::new(SettingField { - pick: |settings_content| { - if let Some(toolbar) = &settings_content.editor.toolbar { - &toolbar.selections_menu - } else { - &None - } + ]; + items.extend(language_settings_data()); + items + }, + }, + SettingsPage { + title: "Languages & Tools", + items: { + let mut items = vec![]; + items.extend(non_editor_language_settings_data()); + items.extend([ + SettingsPageItem::SectionHeader("File Types"), + SettingsPageItem::SettingItem(SettingItem { + title: "File Type Associations", + description: "A mapping from languages to files and file extensions that should be treated as that language.", + field: Box::new( + SettingField { + json_path: Some("file_types"), + pick: |settings_content| { + settings_content.project.all_languages.file_types.as_ref() + }, + write: |settings_content, value| { + settings_content.project.all_languages.file_types = value; + + }, + } + .unimplemented(), + ), + metadata: None, + files: USER | PROJECT, + }), + ]); + + items.extend([ + SettingsPageItem::SectionHeader("Diagnostics"), + SettingsPageItem::SettingItem(SettingItem { + title: "Max Severity", + description: "Which level to use to filter out diagnostics displayed in the editor.", + field: Box::new(SettingField { + json_path: Some("diagnostics_max_severity"), + pick: |settings_content| settings_content.editor.diagnostics_max_severity.as_ref(), + write: |settings_content, value| { + settings_content.editor.diagnostics_max_severity = value; + }, - pick_mut: |settings_content| { - &mut settings_content - .editor - .toolbar + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Include Warnings", + description: "Whether to show warnings or not by default.", + field: Box::new(SettingField { + json_path: Some("diagnostics.include_warnings"), + pick: |settings_content| { + settings_content.diagnostics.as_ref()?.include_warnings.as_ref() + }, + write: |settings_content, value| { + settings_content + + .diagnostics .get_or_insert_default() - .selections_menu + .include_warnings + = value; }, }), metadata: None, files: USER, }), + SettingsPageItem::SectionHeader("Inline Diagnostics"), SettingsPageItem::SettingItem(SettingItem { - title: "Agent Review", - description: "Whether to show agent review buttons in the editor toolbar", + title: "Enabled", + description: "Whether to show diagnostics inline or not.", field: Box::new(SettingField { + json_path: Some("diagnostics.inline.enabled"), pick: |settings_content| { - if let Some(toolbar) = &settings_content.editor.toolbar { - &toolbar.agent_review - } else { - &None - } + settings_content.diagnostics.as_ref()?.inline.as_ref()?.enabled.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .editor - .toolbar + write: |settings_content, value| { + settings_content + + .diagnostics .get_or_insert_default() - .agent_review + .inline + .get_or_insert_default() + .enabled + = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Code Actions", - description: "Whether to show code action buttons in the editor toolbar", + title: "Update Debounce", + description: "The delay in milliseconds to show inline diagnostics after the last diagnostic update.", field: Box::new(SettingField { + json_path: Some("diagnostics.inline.update_debounce_ms"), pick: |settings_content| { - if let Some(toolbar) = &settings_content.editor.toolbar { - &toolbar.code_actions - } else { - &None - } + settings_content.diagnostics.as_ref()?.inline.as_ref()?.update_debounce_ms.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .editor - .toolbar + write: |settings_content, value| { + settings_content + + .diagnostics .get_or_insert_default() - .code_actions + .inline + .get_or_insert_default() + .update_debounce_ms + = value; }, }), metadata: None, files: USER, }), - ]; - items.extend(language_settings_data()); + SettingsPageItem::SettingItem(SettingItem { + title: "Padding", + description: "The amount of padding between the end of the source line and the start of the inline diagnostic.", + field: Box::new(SettingField { + json_path: Some("diagnostics.inline.padding"), + pick: |settings_content| { + settings_content.diagnostics.as_ref()?.inline.as_ref()?.padding.as_ref() + }, + write: |settings_content, value| { + settings_content + + .diagnostics + .get_or_insert_default() + .inline + .get_or_insert_default() + .padding + = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Minimum Column", + description: "The minimum column at which to display inline diagnostics.", + field: Box::new(SettingField { + json_path: Some("diagnostics.inline.min_column"), + pick: |settings_content| { + settings_content.diagnostics.as_ref()?.inline.as_ref()?.min_column.as_ref() + }, + write: |settings_content, value| { + settings_content + + .diagnostics + .get_or_insert_default() + .inline + .get_or_insert_default() + .min_column + = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SectionHeader("LSP Pull Diagnostics"), + SettingsPageItem::SettingItem(SettingItem { + title: "Enabled", + description: "Whether to pull for language server-powered diagnostics or not.", + field: Box::new(SettingField { + json_path: Some("diagnostics.lsp_pull_diagnostics.enabled"), + pick: |settings_content| { + settings_content.diagnostics.as_ref()?.lsp_pull_diagnostics.as_ref()?.enabled.as_ref() + }, + write: |settings_content, value| { + settings_content + + .diagnostics + .get_or_insert_default() + .lsp_pull_diagnostics + .get_or_insert_default() + .enabled + = value; + }, + }), + metadata: None, + files: USER, + }), + // todo(settings_ui): Needs unit + SettingsPageItem::SettingItem(SettingItem { + title: "Debounce", + description: "Minimum time to wait before pulling diagnostics from the language server(s).", + field: Box::new(SettingField { + json_path: Some("diagnostics.lsp_pull_diagnostics.debounce_ms"), + pick: |settings_content| { + settings_content.diagnostics.as_ref()?.lsp_pull_diagnostics.as_ref()?.debounce_ms.as_ref() + }, + write: |settings_content, value| { + settings_content + + .diagnostics + .get_or_insert_default() + .lsp_pull_diagnostics + .get_or_insert_default() + .debounce_ms + = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SectionHeader("LSP Highlights"), + SettingsPageItem::SettingItem(SettingItem { + title: "Debounce", + description: "The debounce delay before querying highlights from the language.", + field: Box::new(SettingField { + json_path: Some("lsp_highlight_debounce"), + pick: |settings_content| settings_content.editor.lsp_highlight_debounce.as_ref(), + write: |settings_content, value| { + settings_content.editor.lsp_highlight_debounce = value; + }, + }), + metadata: None, + files: USER, + }), + ]); + + // todo(settings_ui): Refresh on extension (un)/installed + // Note that `crates/json_schema_store` solves the same problem, there is probably a way to unify the two + items.push(SettingsPageItem::SectionHeader(LANGUAGES_SECTION_HEADER)); + items.extend(all_language_names(cx).into_iter().map(|language_name| { + SettingsPageItem::SubPageLink(SubPageLink { + title: language_name, + files: USER | PROJECT, + render: Arc::new(|this, window, cx| { + this.render_sub_page_items( + language_settings_data() + .iter() + .chain(non_editor_language_settings_data().iter()) + .enumerate(), + None, + window, + cx, + ) + .into_any_element() + }), + }) + })); items }, }, SettingsPage { - title: "Languages", - items: vec![ - SettingsPageItem::SectionHeader(LANGUAGES_SECTION_HEADER), - SettingsPageItem::SubPageLink(SubPageLink { - title: "JSON", - files: USER | LOCAL, - render: Arc::new(|this, window, cx| { - this.render_page_items( - language_settings_data().iter().enumerate(), - None, - window, - cx, - ) - .into_any_element() - }), - }), - SettingsPageItem::SubPageLink(SubPageLink { - title: "JSONC", - files: USER | LOCAL, - render: Arc::new(|this, window, cx| { - this.render_page_items( - language_settings_data().iter().enumerate(), - None, - window, - cx, - ) - .into_any_element() - }), - }), - SettingsPageItem::SubPageLink(SubPageLink { - title: "Rust", - files: USER | LOCAL, - render: Arc::new(|this, window, cx| { - this.render_page_items( - language_settings_data().iter().enumerate(), - None, - window, - cx, - ) - .into_any_element() - }), - }), - SettingsPageItem::SubPageLink(SubPageLink { - title: "Python", - files: USER | LOCAL, - render: Arc::new(|this, window, cx| { - this.render_page_items( - language_settings_data().iter().enumerate(), - None, - window, - cx, - ) - .into_any_element() - }), - }), - SettingsPageItem::SubPageLink(SubPageLink { - title: "TSX", - files: USER | LOCAL, - render: Arc::new(|this, window, cx| { - this.render_page_items( - language_settings_data().iter().enumerate(), - None, - window, - cx, - ) - .into_any_element() - }), - }), - ], - }, - SettingsPage { - title: "Workbench & Window", + title: "Search & Files", items: vec![ - SettingsPageItem::SectionHeader("Status Bar"), + SettingsPageItem::SectionHeader("Search"), SettingsPageItem::SettingItem(SettingItem { - title: "Project Panel Button", - description: "Whether to show the project panel button in the status bar", + title: "Whole Word", + description: "Search for whole words by default.", field: Box::new(SettingField { + json_path: Some("search.whole_word"), pick: |settings_content| { - if let Some(project_panel) = &settings_content.project_panel { - &project_panel.button - } else { - &None - } + settings_content.editor.search.as_ref()?.whole_word.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .project_panel + write: |settings_content, value| { + settings_content + .editor + .search .get_or_insert_default() - .button + .whole_word = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Active Language Button", - description: "Whether to show the active language button in the status bar", + title: "Case Sensitive", + description: "Search case-sensitively by default.", field: Box::new(SettingField { + json_path: Some("search.case_sensitive"), pick: |settings_content| { - if let Some(status_bar) = &settings_content.status_bar { - &status_bar.active_language_button - } else { - &None - } + settings_content + .editor + .search + .as_ref()? + .case_sensitive + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .status_bar + write: |settings_content, value| { + settings_content + .editor + .search .get_or_insert_default() - .active_language_button + .case_sensitive = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Cursor Position Button", - description: "Whether to show the cursor position button in the status bar", + title: "Use Smartcase Search", + description: "Whether to automatically enable case-sensitive search based on the search query.", field: Box::new(SettingField { + json_path: Some("use_smartcase_search"), pick: |settings_content| { - if let Some(status_bar) = &settings_content.status_bar { - &status_bar.cursor_position_button - } else { - &None - } + settings_content.editor.use_smartcase_search.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .status_bar - .get_or_insert_default() - .cursor_position_button + write: |settings_content, value| { + settings_content.editor.use_smartcase_search = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Terminal Button", - description: "Whether to show the terminal button in the status bar", + title: "Include Ignored", + description: "Include ignored files in search results by default.", field: Box::new(SettingField { + json_path: Some("search.include_ignored"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.button - } else { - &None - } + settings_content + .editor + .search + .as_ref()? + .include_ignored + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.terminal.get_or_insert_default().button + write: |settings_content, value| { + settings_content + .editor + .search + .get_or_insert_default() + .include_ignored = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Diagnostics Button", - description: "Whether to show the project diagnostics button in the status bar", + title: "Regex", + description: "Use regex search by default.", field: Box::new(SettingField { + json_path: Some("search.regex"), pick: |settings_content| { - if let Some(diagnostics) = &settings_content.diagnostics { - &diagnostics.button - } else { - &None - } + settings_content.editor.search.as_ref()?.regex.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.diagnostics.get_or_insert_default().button + write: |settings_content, value| { + settings_content.editor.search.get_or_insert_default().regex = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Project Search Button", - description: "Whether to show the project search button in the status bar", + title: "Search Wrap", + description: "Whether the editor search results will loop.", field: Box::new(SettingField { - pick: |settings_content| { - if let Some(search) = &settings_content.editor.search { - &search.button - } else { - &None - } - }, - pick_mut: |settings_content| { - &mut settings_content - .editor - .search - .get_or_insert_default() - .button + json_path: Some("search_wrap"), + pick: |settings_content| settings_content.editor.search_wrap.as_ref(), + write: |settings_content, value| { + settings_content.editor.search_wrap = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Debugger Button", - description: "Whether to show the debugger button in the status bar", + title: "Seed Search Query From Cursor", + description: "When to populate a new search's query based on the text under the cursor.", field: Box::new(SettingField { + json_path: Some("seed_search_query_from_cursor"), pick: |settings_content| { - if let Some(debugger) = &settings_content.debugger { - &debugger.button - } else { - &None - } + settings_content + .editor + .seed_search_query_from_cursor + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.debugger.get_or_insert_default().button + write: |settings_content, value| { + settings_content.editor.seed_search_query_from_cursor = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Tab Bar"), + SettingsPageItem::SectionHeader("File Finder"), + // todo: null by default SettingsPageItem::SettingItem(SettingItem { - title: "Editor Tabs", - description: "Whether or not to show the tab bar in the editor", - field: Box::new(SettingField { - pick: |settings_content| { - if let Some(tab_bar) = &settings_content.tab_bar { - &tab_bar.show - } else { - &None - } - }, - pick_mut: |settings_content| { - &mut settings_content.tab_bar.get_or_insert_default().show - }, - }), + title: "Include Ignored in Search", + description: "Use gitignored files when searching.", + field: Box::new( + SettingField { + json_path: Some("file_finder.include_ignored"), + pick: |settings_content| { + settings_content + .file_finder + .as_ref()? + .include_ignored + .as_ref() + }, + write: |settings_content, value| { + settings_content + .file_finder + .get_or_insert_default() + .include_ignored = value; + }, + } + ), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Show Navigation History Buttons", - description: "Whether or not to show the navigation history buttons in the tab bar", + title: "File Icons", + description: "Show file icons in the file finder.", field: Box::new(SettingField { + json_path: Some("file_finder.file_icons"), pick: |settings_content| { - if let Some(tab_bar) = &settings_content.tab_bar { - &tab_bar.show_nav_history_buttons - } else { - &None - } + settings_content.file_finder.as_ref()?.file_icons.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .tab_bar + write: |settings_content, value| { + settings_content + .file_finder .get_or_insert_default() - .show_nav_history_buttons + .file_icons = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Title Bar"), SettingsPageItem::SettingItem(SettingItem { - title: "Show Branch Icon", - description: "Whether to show the branch icon beside branch switcher in the titlebar", + title: "Modal Max Width", + description: "Determines how much space the file finder can take up in relation to the available window width.", field: Box::new(SettingField { + json_path: Some("file_finder.modal_max_width"), pick: |settings_content| { - if let Some(title_bar) = &settings_content.title_bar { - &title_bar.show_branch_icon - } else { - &None - } + settings_content + .file_finder + .as_ref()? + .modal_max_width + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .title_bar + write: |settings_content, value| { + settings_content + .file_finder .get_or_insert_default() - .show_branch_icon + .modal_max_width = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Show Branch Name", - description: "Whether to show the branch name button in the titlebar", + title: "Skip Focus For Active In Search", + description: "Whether the file finder should skip focus for the active file in search results.", field: Box::new(SettingField { + json_path: Some("file_finder.skip_focus_for_active_in_search"), pick: |settings_content| { - if let Some(title_bar) = &settings_content.title_bar { - &title_bar.show_branch_name - } else { - &None - } + settings_content + .file_finder + .as_ref()? + .skip_focus_for_active_in_search + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .title_bar + write: |settings_content, value| { + settings_content + .file_finder .get_or_insert_default() - .show_branch_name + .skip_focus_for_active_in_search = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Show Project Items", - description: "Whether to show the project host and name in the titlebar", + title: "Git Status", + description: "Show the Git status in the file finder.", field: Box::new(SettingField { + json_path: Some("file_finder.git_status"), pick: |settings_content| { - if let Some(title_bar) = &settings_content.title_bar { - &title_bar.show_project_items - } else { - &None - } + settings_content.file_finder.as_ref()?.git_status.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .title_bar + write: |settings_content, value| { + settings_content + .file_finder .get_or_insert_default() - .show_project_items + .git_status = value; }, }), metadata: None, files: USER, }), + SettingsPageItem::SectionHeader("File Scan"), SettingsPageItem::SettingItem(SettingItem { - title: "Show Onboarding Banner", - description: "Whether to show banners announcing new features in the titlebar", + title: "File Scan Exclusions", + description: "Files or globs of files that will be excluded by Zed entirely. They will be skipped during file scans, file searches, and not be displayed in the project file tree. Takes precedence over \"File Scan Inclusions\"", + field: Box::new( + SettingField { + json_path: Some("file_scan_exclusions"), + pick: |settings_content| { + settings_content + .project + .worktree + .file_scan_exclusions + .as_ref() + }, + write: |settings_content, value| { + settings_content.project.worktree.file_scan_exclusions = value; + }, + } + .unimplemented(), + ), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "File Scan Inclusions", + description: "Files or globs of files that will be included by Zed, even when ignored by git. This is useful for files that are not tracked by git, but are still important to your project. Note that globs that are overly broad can slow down Zed's file scanning. \"File Scan Exclusions\" takes precedence over these inclusions", + field: Box::new( + SettingField { + json_path: Some("file_scan_inclusions"), + pick: |settings_content| { + settings_content + .project + .worktree + .file_scan_inclusions + .as_ref() + }, + write: |settings_content, value| { + settings_content.project.worktree.file_scan_inclusions = value; + }, + } + .unimplemented(), + ), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Restore File State", + description: "Restore previous file state when reopening.", field: Box::new(SettingField { + json_path: Some("restore_on_file_reopen"), pick: |settings_content| { - if let Some(title_bar) = &settings_content.title_bar { - &title_bar.show_onboarding_banner - } else { - &None - } + settings_content.workspace.restore_on_file_reopen.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .title_bar - .get_or_insert_default() - .show_onboarding_banner + write: |settings_content, value| { + settings_content.workspace.restore_on_file_reopen = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Show User Picture", - description: "Whether to show user picture in the titlebar", + title: "Close on File Delete", + description: "Automatically close files that have been deleted.", field: Box::new(SettingField { + json_path: Some("close_on_file_delete"), pick: |settings_content| { - if let Some(title_bar) = &settings_content.title_bar { - &title_bar.show_user_picture - } else { - &None - } + settings_content.workspace.close_on_file_delete.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .title_bar - .get_or_insert_default() - .show_user_picture + write: |settings_content, value| { + settings_content.workspace.close_on_file_delete = value; }, }), metadata: None, files: USER, }), + ], + }, + SettingsPage { + title: "Window & Layout", + items: vec![ + SettingsPageItem::SectionHeader("Status Bar"), SettingsPageItem::SettingItem(SettingItem { - title: "Show Sign In", - description: "Whether to show the sign in button in the titlebar", + title: "Project Panel Button", + description: "Show the project panel button in the status bar.", field: Box::new(SettingField { + json_path: Some("project_panel.button"), pick: |settings_content| { - if let Some(title_bar) = &settings_content.title_bar { - &title_bar.show_sign_in - } else { - &None - } + settings_content.project_panel.as_ref()?.button.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .title_bar + write: |settings_content, value| { + settings_content + .project_panel .get_or_insert_default() - .show_sign_in + .button = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Show Menus", - description: "Whether to show the menus in the titlebar", + title: "Active Language Button", + description: "Show the active language button in the status bar.", field: Box::new(SettingField { + json_path: Some("status_bar.active_language_button"), pick: |settings_content| { - if let Some(title_bar) = &settings_content.title_bar { - &title_bar.show_menus - } else { - &None - } + settings_content + .status_bar + .as_ref()? + .active_language_button + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .title_bar + write: |settings_content, value| { + settings_content + .status_bar .get_or_insert_default() - .show_menus + .active_language_button = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Tab Settings"), SettingsPageItem::SettingItem(SettingItem { - title: "Activate On Close", - description: "What to do after closing the current tab", + title: "Cursor Position Button", + description: "Show the cursor position button in the status bar.", field: Box::new(SettingField { + json_path: Some("status_bar.cursor_position_button"), pick: |settings_content| { - if let Some(tabs) = &settings_content.tabs { - &tabs.activate_on_close - } else { - &None - } + settings_content + .status_bar + .as_ref()? + .cursor_position_button + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .tabs + write: |settings_content, value| { + settings_content + .status_bar .get_or_insert_default() - .activate_on_close + .cursor_position_button = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Tab Show Diagnostics", - description: "Which files containing diagnostic errors/warnings to mark in the tabs", + title: "Terminal Button", + description: "Show the terminal button in the status bar.", field: Box::new(SettingField { + json_path: Some("terminal.button"), pick: |settings_content| { - if let Some(tabs) = &settings_content.tabs { - &tabs.show_diagnostics - } else { - &None - } + settings_content.terminal.as_ref()?.button.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .tabs - .get_or_insert_default() - .show_diagnostics + write: |settings_content, value| { + settings_content.terminal.get_or_insert_default().button = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Show Close Button", - description: "Controls the appearance behavior of the tab's close button", + title: "Diagnostics Button", + description: "Show the project diagnostics button in the status bar.", field: Box::new(SettingField { + json_path: Some("diagnostics.button"), pick: |settings_content| { - if let Some(tabs) = &settings_content.tabs { - &tabs.show_close_button - } else { - &None - } + settings_content.diagnostics.as_ref()?.button.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .tabs - .get_or_insert_default() - .show_close_button + write: |settings_content, value| { + settings_content.diagnostics.get_or_insert_default().button = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Preview Tabs"), SettingsPageItem::SettingItem(SettingItem { - title: "Preview Tabs Enabled", - description: "Whether to show opened editors as preview tabs", + title: "Project Search Button", + description: "Show the project search button in the status bar.", field: Box::new(SettingField { + json_path: Some("search.button"), pick: |settings_content| { - if let Some(preview_tabs) = &settings_content.preview_tabs { - &preview_tabs.enabled - } else { - &None - } + settings_content.editor.search.as_ref()?.button.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .preview_tabs + write: |settings_content, value| { + settings_content + .editor + .search .get_or_insert_default() - .enabled + .button = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Enable Preview From File Finder", - description: "Whether to open tabs in preview mode when selected from the file finder", + title: "Debugger Button", + description: "Show the debugger button in the status bar.", field: Box::new(SettingField { + json_path: Some("debugger.button"), pick: |settings_content| { - if let Some(preview_tabs) = &settings_content.preview_tabs { - &preview_tabs.enable_preview_from_file_finder - } else { - &None - } + settings_content.debugger.as_ref()?.button.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .preview_tabs - .get_or_insert_default() - .enable_preview_from_file_finder + write: |settings_content, value| { + settings_content.debugger.get_or_insert_default().button = value; }, }), metadata: None, files: USER, }), + SettingsPageItem::SectionHeader("Title Bar"), SettingsPageItem::SettingItem(SettingItem { - title: "Enable Preview From Code Navigation", - description: "Whether a preview tab gets replaced when code navigation is used to navigate away from the tab", + title: "Show Branch Icon", + description: "Show the branch icon beside branch switcher in the titlebar.", field: Box::new(SettingField { + json_path: Some("title_bar.show_branch_icon"), pick: |settings_content| { - if let Some(preview_tabs) = &settings_content.preview_tabs { - &preview_tabs.enable_preview_from_code_navigation - } else { - &None - } + settings_content + .title_bar + .as_ref()? + .show_branch_icon + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .preview_tabs + write: |settings_content, value| { + settings_content + .title_bar .get_or_insert_default() - .enable_preview_from_code_navigation + .show_branch_icon = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Search Settings"), SettingsPageItem::SettingItem(SettingItem { - title: "Whole Word", - description: "Whether to search for whole words by default", + title: "Show Branch Name", + description: "Show the branch name button in the titlebar.", field: Box::new(SettingField { + json_path: Some("title_bar.show_branch_name"), pick: |settings_content| { - if let Some(search) = &settings_content.editor.search { - &search.whole_word - } else { - &None - } + settings_content + .title_bar + .as_ref()? + .show_branch_name + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .editor - .search + write: |settings_content, value| { + settings_content + .title_bar .get_or_insert_default() - .whole_word + .show_branch_name = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Case Sensitive", - description: "Whether to search case-sensitively by default", + title: "Show Project Items", + description: "Show the project host and name in the titlebar.", field: Box::new(SettingField { + json_path: Some("title_bar.show_project_items"), pick: |settings_content| { - if let Some(search) = &settings_content.editor.search { - &search.case_sensitive - } else { - &None - } + settings_content + .title_bar + .as_ref()? + .show_project_items + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .editor - .search + write: |settings_content, value| { + settings_content + .title_bar .get_or_insert_default() - .case_sensitive + .show_project_items = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Include Ignored", - description: "Whether to include ignored files in search results by default", + title: "Show Onboarding Banner", + description: "Show banners announcing new features in the titlebar.", field: Box::new(SettingField { + json_path: Some("title_bar.show_onboarding_banner"), pick: |settings_content| { - if let Some(search) = &settings_content.editor.search { - &search.include_ignored - } else { - &None - } + settings_content + .title_bar + .as_ref()? + .show_onboarding_banner + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .editor - .search + write: |settings_content, value| { + settings_content + .title_bar .get_or_insert_default() - .include_ignored + .show_onboarding_banner = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Regex", - description: "Whether to use regex search by default", + title: "Show User Picture", + description: "Show user picture in the titlebar.", field: Box::new(SettingField { + json_path: Some("title_bar.show_user_picture"), pick: |settings_content| { - if let Some(search) = &settings_content.editor.search { - &search.regex - } else { - &None - } + settings_content + .title_bar + .as_ref()? + .show_user_picture + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.editor.search.get_or_insert_default().regex + write: |settings_content, value| { + settings_content + .title_bar + .get_or_insert_default() + .show_user_picture = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("File Finder"), SettingsPageItem::SettingItem(SettingItem { - title: "File Icons", - description: "Whether to show file icons in the file finder", + title: "Show Sign In", + description: "Show the sign in button in the titlebar.", field: Box::new(SettingField { + json_path: Some("title_bar.show_sign_in"), pick: |settings_content| { - if let Some(file_finder) = &settings_content.file_finder { - &file_finder.file_icons - } else { - &None - } + settings_content.title_bar.as_ref()?.show_sign_in.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .file_finder + write: |settings_content, value| { + settings_content + .title_bar .get_or_insert_default() - .file_icons + .show_sign_in = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Modal Max Width", - description: "Determines how much space the file finder can take up in relation to the available window width", + title: "Show Menus", + description: "Show the menus in the titlebar.", field: Box::new(SettingField { + json_path: Some("title_bar.show_menus"), pick: |settings_content| { - if let Some(file_finder) = &settings_content.file_finder { - &file_finder.modal_max_width - } else { - &None - } + settings_content.title_bar.as_ref()?.show_menus.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .file_finder + write: |settings_content, value| { + settings_content + .title_bar .get_or_insert_default() - .modal_max_width + .show_menus = value; }, }), metadata: None, files: USER, }), + SettingsPageItem::SectionHeader("Tab Bar"), SettingsPageItem::SettingItem(SettingItem { - title: "Skip Focus For Active In Search", - description: "Whether the file finder should skip focus for the active file in search results", + title: "Show Tab Bar", + description: "Show the tab bar in the editor.", + field: Box::new(SettingField { + json_path: Some("tab_bar.show"), + pick: |settings_content| settings_content.tab_bar.as_ref()?.show.as_ref(), + write: |settings_content, value| { + settings_content.tab_bar.get_or_insert_default().show = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Show Git Status In Tabs", + description: "Show the Git file status on a tab item.", field: Box::new(SettingField { + json_path: Some("tabs.git_status"), pick: |settings_content| { - if let Some(file_finder) = &settings_content.file_finder { - &file_finder.skip_focus_for_active_in_search - } else { - &None - } + settings_content.tabs.as_ref()?.git_status.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .file_finder - .get_or_insert_default() - .skip_focus_for_active_in_search + write: |settings_content, value| { + settings_content.tabs.get_or_insert_default().git_status = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Git Status", - description: "Whether to show the git status in the file finder", + title: "Show File Icons In Tabs", + description: "Show the file icon for a tab.", field: Box::new(SettingField { + json_path: Some("tabs.file_icons"), pick: |settings_content| { - if let Some(file_finder) = &settings_content.file_finder { - &file_finder.git_status - } else { - &None - } + settings_content.tabs.as_ref()?.file_icons.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .file_finder - .get_or_insert_default() - .git_status + write: |settings_content, value| { + settings_content.tabs.get_or_insert_default().file_icons = value; }, }), metadata: None, files: USER, }), - // todo: null by default SettingsPageItem::SettingItem(SettingItem { - title: "Include Ignored", - description: "Whether to use gitignored files when searching", + title: "Tab Close Position", + description: "Position of the close button in a tab.", + field: Box::new(SettingField { + json_path: Some("tabs.close_position"), + pick: |settings_content| { + settings_content.tabs.as_ref()?.close_position.as_ref() + }, + write: |settings_content, value| { + settings_content.tabs.get_or_insert_default().close_position = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + files: USER, + title: "Maximum Tabs", + description: "Maximum open tabs in a pane. Will not close an unsaved tab.", + // todo(settings_ui): The default for this value is null and it's use in code + // is complex, so I'm going to come back to this later field: Box::new( SettingField { - pick: |settings_content| { - if let Some(file_finder) = &settings_content.file_finder { - &file_finder.include_ignored - } else { - &None - } - }, - pick_mut: |settings_content| { - &mut settings_content - .file_finder - .get_or_insert_default() - .include_ignored + json_path: Some("max_tabs"), + pick: |settings_content| settings_content.workspace.max_tabs.as_ref(), + write: |settings_content, value| { + settings_content.workspace.max_tabs = value; }, } .unimplemented(), ), metadata: None, - files: USER, }), - ], - }, - SettingsPage { - title: "Panels", - items: vec![ - SettingsPageItem::SectionHeader("Project Panel"), SettingsPageItem::SettingItem(SettingItem { - title: "Project Panel Dock", - description: "Where to dock the project panel", + title: "Show Navigation History Buttons", + description: "Show the navigation history buttons in the tab bar.", field: Box::new(SettingField { + json_path: Some("tab_bar.show_nav_history_buttons"), pick: |settings_content| { - if let Some(project_panel) = &settings_content.project_panel { - &project_panel.dock - } else { - &None - } + settings_content + .tab_bar + .as_ref()? + .show_nav_history_buttons + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.project_panel.get_or_insert_default().dock + write: |settings_content, value| { + settings_content + .tab_bar + .get_or_insert_default() + .show_nav_history_buttons = value; }, }), metadata: None, files: USER, }), + SettingsPageItem::SectionHeader("Tab Settings"), SettingsPageItem::SettingItem(SettingItem { - title: "Project Panel Default Width", - description: "Default width of the project panel in pixels", + title: "Activate On Close", + description: "What to do after closing the current tab.", field: Box::new(SettingField { + json_path: Some("tabs.activate_on_close"), pick: |settings_content| { - if let Some(project_panel) = &settings_content.project_panel { - &project_panel.default_width - } else { - &None - } + settings_content.tabs.as_ref()?.activate_on_close.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .project_panel + write: |settings_content, value| { + settings_content + .tabs .get_or_insert_default() - .default_width + .activate_on_close = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Hide .gitignore", - description: "Whether to hide the gitignore entries in the project panel", + title: "Tab Show Diagnostics", + description: "Which files containing diagnostic errors/warnings to mark in the tabs.", field: Box::new(SettingField { + json_path: Some("tabs.show_diagnostics"), pick: |settings_content| { - if let Some(project_panel) = &settings_content.project_panel { - &project_panel.hide_gitignore - } else { - &None - } + settings_content.tabs.as_ref()?.show_diagnostics.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .project_panel + write: |settings_content, value| { + settings_content + .tabs .get_or_insert_default() - .hide_gitignore + .show_diagnostics = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Entry Spacing", - description: "Spacing between worktree entries in the project panel", + title: "Show Close Button", + description: "Controls the appearance behavior of the tab's close button.", field: Box::new(SettingField { + json_path: Some("tabs.show_close_button"), pick: |settings_content| { - if let Some(project_panel) = &settings_content.project_panel { - &project_panel.entry_spacing - } else { - &None - } + settings_content.tabs.as_ref()?.show_close_button.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .project_panel + write: |settings_content, value| { + settings_content + .tabs .get_or_insert_default() - .entry_spacing + .show_close_button = value; }, }), metadata: None, files: USER, }), + SettingsPageItem::SectionHeader("Preview Tabs"), SettingsPageItem::SettingItem(SettingItem { - title: "File Icons", - description: "Whether to show file icons in the project panel", + title: "Preview Tabs Enabled", + description: "Show opened editors as Preview tabs.", field: Box::new(SettingField { + json_path: Some("preview_tabs.enabled"), pick: |settings_content| { - if let Some(project_panel) = &settings_content.project_panel { - &project_panel.file_icons - } else { - &None - } + settings_content.preview_tabs.as_ref()?.enabled.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .project_panel + write: |settings_content, value| { + settings_content + .preview_tabs .get_or_insert_default() - .file_icons + .enabled = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Folder Icons", - description: "Whether to show folder icons or chevrons for directories in the project panel", + title: "Enable Preview From File Finder", + description: "Whether to open tabs in Preview mode when selected from the file finder.", field: Box::new(SettingField { + json_path: Some("preview_tabs.enable_preview_from_file_finder"), pick: |settings_content| { - if let Some(project_panel) = &settings_content.project_panel { - &project_panel.folder_icons - } else { - &None - } + settings_content + .preview_tabs + .as_ref()? + .enable_preview_from_file_finder + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .project_panel + write: |settings_content, value| { + settings_content + .preview_tabs .get_or_insert_default() - .folder_icons + .enable_preview_from_file_finder = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Git Status", - description: "Whether to show the git status in the project panel", + title: "Enable Preview From Code Navigation", + description: "Whether a preview tab gets replaced when code navigation is used to navigate away from the tab.", field: Box::new(SettingField { + json_path: Some("preview_tabs.enable_preview_from_code_navigation"), pick: |settings_content| { - if let Some(project_panel) = &settings_content.project_panel { - &project_panel.git_status - } else { - &None - } + settings_content + .preview_tabs + .as_ref()? + .enable_preview_from_code_navigation + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .project_panel + write: |settings_content, value| { + settings_content + .preview_tabs .get_or_insert_default() - .git_status + .enable_preview_from_code_navigation = value; }, }), metadata: None, files: USER, }), + SettingsPageItem::SectionHeader("Layout"), SettingsPageItem::SettingItem(SettingItem { - title: "Indent Size", - description: "Amount of indentation for nested items", + title: "Bottom Dock Layout", + description: "Layout mode for the bottom dock.", field: Box::new(SettingField { + json_path: Some("bottom_dock_layout"), pick: |settings_content| { - if let Some(project_panel) = &settings_content.project_panel { - &project_panel.indent_size - } else { - &None - } + settings_content.workspace.bottom_dock_layout.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .project_panel - .get_or_insert_default() - .indent_size + write: |settings_content, value| { + settings_content.workspace.bottom_dock_layout = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Auto Reveal Entries", - description: "Whether to reveal it in the project panel automatically when a corresponding project entry becomes active", + files: USER, + title: "Centered Layout Left Padding", + description: "Left padding for centered layout.", field: Box::new(SettingField { + json_path: Some("centered_layout.left_padding"), pick: |settings_content| { - if let Some(project_panel) = &settings_content.project_panel { - &project_panel.auto_reveal_entries - } else { - &None - } + settings_content + .workspace + .centered_layout + .as_ref()? + .left_padding + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .project_panel + write: |settings_content, value| { + settings_content + .workspace + .centered_layout .get_or_insert_default() - .auto_reveal_entries + .left_padding = value; }, }), metadata: None, - files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Starts Open", - description: "Whether the project panel should open on startup", + files: USER, + title: "Centered Layout Right Padding", + description: "Right padding for centered layout.", field: Box::new(SettingField { + json_path: Some("centered_layout.right_padding"), pick: |settings_content| { - if let Some(project_panel) = &settings_content.project_panel { - &project_panel.starts_open - } else { - &None - } + settings_content + .workspace + .centered_layout + .as_ref()? + .right_padding + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .project_panel + write: |settings_content, value| { + settings_content + .workspace + .centered_layout .get_or_insert_default() - .starts_open + .right_padding = value; }, }), metadata: None, - files: USER, }), + SettingsPageItem::SectionHeader("Window"), + // todo(settings_ui): Should we filter by platform.as_ref()? SettingsPageItem::SettingItem(SettingItem { - title: "Auto Fold Directories", - description: "Whether to fold directories automatically and show compact folders when a directory has only one subdirectory inside", + title: "Use System Window Tabs", + description: "(macOS only) whether to allow Windows to tab together.", field: Box::new(SettingField { + json_path: Some("use_system_window_tabs"), pick: |settings_content| { - if let Some(project_panel) = &settings_content.project_panel { - &project_panel.auto_fold_dirs - } else { - &None - } + settings_content.workspace.use_system_window_tabs.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .project_panel - .get_or_insert_default() - .auto_fold_dirs + write: |settings_content, value| { + settings_content.workspace.use_system_window_tabs = value; }, }), metadata: None, files: USER, }), + SettingsPageItem::SectionHeader("Pane Modifiers"), SettingsPageItem::SettingItem(SettingItem { - title: "Scrollbar Show", - description: "When to show the scrollbar in the project panel", + title: "Inactive Opacity", + description: "Opacity of inactive panels (0.0 - 1.0).", field: Box::new(SettingField { + json_path: Some("active_pane_modifiers.inactive_opacity"), pick: |settings_content| { - if let Some(project_panel) = &settings_content.project_panel - && let Some(scrollbar) = &project_panel.scrollbar - && scrollbar.show.is_some() - { - &scrollbar.show - } else if let Some(scrollbar) = &settings_content.editor.scrollbar { - &scrollbar.show - } else { - &None - } + settings_content + .workspace + .active_pane_modifiers + .as_ref()? + .inactive_opacity + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .project_panel - .get_or_insert_default() - .scrollbar + write: |settings_content, value| { + settings_content + .workspace + .active_pane_modifiers .get_or_insert_default() - .show + .inactive_opacity = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Show Diagnostics", - description: "Which files containing diagnostic errors/warnings to mark in the project panel", + title: "Border Size", + description: "Size of the border surrounding the active pane.", field: Box::new(SettingField { + json_path: Some("active_pane_modifiers.border_size"), pick: |settings_content| { - if let Some(project_panel) = &settings_content.project_panel { - &project_panel.show_diagnostics - } else { - &None - } + settings_content + .workspace + .active_pane_modifiers + .as_ref()? + .border_size + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .project_panel + write: |settings_content, value| { + settings_content + .workspace + .active_pane_modifiers .get_or_insert_default() - .show_diagnostics + .border_size = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Sticky Scroll", - description: "Whether to stick parent directories at top of the project panel", + title: "Zoomed Padding", + description: "Show padding for zoomed panes.", field: Box::new(SettingField { - pick: |settings_content| { - if let Some(project_panel) = &settings_content.project_panel { - &project_panel.sticky_scroll - } else { - &None - } - }, - pick_mut: |settings_content| { - &mut settings_content - .project_panel - .get_or_insert_default() - .sticky_scroll + json_path: Some("zoomed_padding"), + pick: |settings_content| settings_content.workspace.zoomed_padding.as_ref(), + write: |settings_content, value| { + settings_content.workspace.zoomed_padding = value; }, }), metadata: None, files: USER, }), + SettingsPageItem::SectionHeader("Pane Split Direction"), SettingsPageItem::SettingItem(SettingItem { - files: USER, - title: "Indent Guides Show", - description: "When to show indent guides in the project panel", - field: Box::new( - SettingField { - pick: |settings_content| { - if let Some(project_panel) = &settings_content.project_panel { - if let Some(indent_guides) = &project_panel.indent_guides { - &indent_guides.show - } else { - &None - } - } else { - &None - } - }, - pick_mut: |settings_content| { - &mut settings_content - .project_panel - .get_or_insert_default() - .indent_guides - .get_or_insert_default() - .show - }, - } - .unimplemented(), - ), - metadata: None, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Drag and Drop", - description: "Whether to enable drag-and-drop operations in the project panel", + title: "Vertical Split Direction", + description: "Direction to split vertically.", field: Box::new(SettingField { + json_path: Some("pane_split_direction_vertical"), pick: |settings_content| { - if let Some(project_panel) = &settings_content.project_panel { - &project_panel.drag_and_drop - } else { - &None - } + settings_content + .workspace + .pane_split_direction_vertical + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .project_panel - .get_or_insert_default() - .drag_and_drop + write: |settings_content, value| { + settings_content.workspace.pane_split_direction_vertical = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Hide Root", - description: "Whether to hide the root entry when only one folder is open in the window", + title: "Horizontal Split Direction", + description: "Direction to split horizontally.", field: Box::new(SettingField { + json_path: Some("pane_split_direction_horizontal"), pick: |settings_content| { - if let Some(project_panel) = &settings_content.project_panel { - &project_panel.hide_root - } else { - &None - } + settings_content + .workspace + .pane_split_direction_horizontal + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .project_panel - .get_or_insert_default() - .hide_root + write: |settings_content, value| { + settings_content.workspace.pane_split_direction_horizontal = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Terminal Panel"), + ], + }, + SettingsPage { + title: "Panels", + items: vec![ + SettingsPageItem::SectionHeader("Project Panel"), SettingsPageItem::SettingItem(SettingItem { - title: "Terminal Dock", - description: "Where to dock the terminal panel", + title: "Project Panel Dock", + description: "Where to dock the project panel.", field: Box::new(SettingField { + json_path: Some("project_panel.dock"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.dock - } else { - &None - } + settings_content.project_panel.as_ref()?.dock.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.terminal.get_or_insert_default().dock + write: |settings_content, value| { + settings_content.project_panel.get_or_insert_default().dock = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Outline Panel"), SettingsPageItem::SettingItem(SettingItem { - title: "Outline Panel Button", - description: "Whether to show the outline panel button in the status bar", + title: "Project Panel Default Width", + description: "Default width of the project panel in pixels.", field: Box::new(SettingField { + json_path: Some("project_panel.default_width"), pick: |settings_content| { - if let Some(outline_panel) = &settings_content.outline_panel { - &outline_panel.button - } else { - &None - } + settings_content + .project_panel + .as_ref()? + .default_width + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .outline_panel + write: |settings_content, value| { + settings_content + .project_panel .get_or_insert_default() - .button + .default_width = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Outline Panel Dock", - description: "Where to dock the outline panel", + title: "Hide .gitignore", + description: "Whether to hide the gitignore entries in the project panel.", field: Box::new(SettingField { + json_path: Some("project_panel.hide_gitignore"), pick: |settings_content| { - if let Some(outline_panel) = &settings_content.outline_panel { - &outline_panel.dock - } else { - &None - } + settings_content + .project_panel + .as_ref()? + .hide_gitignore + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.outline_panel.get_or_insert_default().dock + write: |settings_content, value| { + settings_content + .project_panel + .get_or_insert_default() + .hide_gitignore = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Outline Panel Default Width", - description: "Default width of the outline panel in pixels", + title: "Entry Spacing", + description: "Spacing between worktree entries in the project panel.", field: Box::new(SettingField { + json_path: Some("project_panel.entry_spacing"), pick: |settings_content| { - if let Some(outline_panel) = &settings_content.outline_panel { - &outline_panel.default_width - } else { - &None - } + settings_content + .project_panel + .as_ref()? + .entry_spacing + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .outline_panel + write: |settings_content, value| { + settings_content + .project_panel .get_or_insert_default() - .default_width + .entry_spacing = value; }, }), metadata: None, @@ -2787,20 +3410,17 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "File Icons", - description: "Whether to show file icons in the outline panel", + description: "Show file icons in the project panel.", field: Box::new(SettingField { + json_path: Some("project_panel.file_icons"), pick: |settings_content| { - if let Some(outline_panel) = &settings_content.outline_panel { - &outline_panel.file_icons - } else { - &None - } + settings_content.project_panel.as_ref()?.file_icons.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .outline_panel + write: |settings_content, value| { + settings_content + .project_panel .get_or_insert_default() - .file_icons + .file_icons = value; }, }), metadata: None, @@ -2808,20 +3428,21 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Folder Icons", - description: "Whether to show folder icons or chevrons for directories in the outline panel", + description: "Whether to show folder icons or chevrons for directories in the project panel.", field: Box::new(SettingField { + json_path: Some("project_panel.folder_icons"), pick: |settings_content| { - if let Some(outline_panel) = &settings_content.outline_panel { - &outline_panel.folder_icons - } else { - &None - } + settings_content + .project_panel + .as_ref()? + .folder_icons + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .outline_panel + write: |settings_content, value| { + settings_content + .project_panel .get_or_insert_default() - .folder_icons + .folder_icons = value; }, }), metadata: None, @@ -2829,20 +3450,17 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Git Status", - description: "Whether to show the git status in the outline panel", + description: "Show the Git status in the project panel.", field: Box::new(SettingField { + json_path: Some("project_panel.git_status"), pick: |settings_content| { - if let Some(outline_panel) = &settings_content.outline_panel { - &outline_panel.git_status - } else { - &None - } + settings_content.project_panel.as_ref()?.git_status.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .outline_panel + write: |settings_content, value| { + settings_content + .project_panel .get_or_insert_default() - .git_status + .git_status = value; }, }), metadata: None, @@ -2850,20 +3468,21 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Indent Size", - description: "Amount of indentation for nested items", + description: "Amount of indentation for nested items.", field: Box::new(SettingField { + json_path: Some("project_panel.indent_size"), pick: |settings_content| { - if let Some(outline_panel) = &settings_content.outline_panel { - &outline_panel.indent_size - } else { - &None - } + settings_content + .project_panel + .as_ref()? + .indent_size + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .outline_panel + write: |settings_content, value| { + settings_content + .project_panel .get_or_insert_default() - .indent_size + .indent_size = value; }, }), metadata: None, @@ -2871,1045 +3490,936 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Auto Reveal Entries", - description: "Whether to reveal when a corresponding outline entry becomes active", + description: "Whether to reveal entries in the project panel automatically when a corresponding project entry becomes active.", field: Box::new(SettingField { + json_path: Some("project_panel.auto_reveal_entries"), pick: |settings_content| { - if let Some(outline_panel) = &settings_content.outline_panel { - &outline_panel.auto_reveal_entries - } else { - &None - } + settings_content + .project_panel + .as_ref()? + .auto_reveal_entries + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .outline_panel + write: |settings_content, value| { + settings_content + .project_panel .get_or_insert_default() - .auto_reveal_entries + .auto_reveal_entries = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Auto Fold Directories", - description: "Whether to fold directories automatically when a directory has only one directory inside", + title: "Starts Open", + description: "Whether the project panel should open on startup.", field: Box::new(SettingField { + json_path: Some("project_panel.starts_open"), pick: |settings_content| { - if let Some(outline_panel) = &settings_content.outline_panel { - &outline_panel.auto_fold_dirs - } else { - &None - } + settings_content + .project_panel + .as_ref()? + .starts_open + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .outline_panel + write: |settings_content, value| { + settings_content + .project_panel .get_or_insert_default() - .auto_fold_dirs + .starts_open = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - files: USER, - title: "Indent Guides Show", - description: "When to show indent guides in the outline panel", - field: Box::new( - SettingField { - pick: |settings_content| { - if let Some(outline_panel) = &settings_content.outline_panel { - if let Some(indent_guides) = &outline_panel.indent_guides { - &indent_guides.show - } else { - &None - } - } else { - &None - } - }, - pick_mut: |settings_content| { - &mut settings_content - .outline_panel - .get_or_insert_default() - .indent_guides - .get_or_insert_default() - .show - }, - } - .unimplemented(), - ), - metadata: None, - }), - SettingsPageItem::SectionHeader("Git Panel"), - SettingsPageItem::SettingItem(SettingItem { - title: "Git Panel Button", - description: "Whether to show the Git panel button in the status bar", + title: "Auto Fold Directories", + description: "Whether to fold directories automatically and show compact folders when a directory has only one subdirectory inside.", field: Box::new(SettingField { + json_path: Some("project_panel.auto_fold_dirs"), pick: |settings_content| { - if let Some(git_panel) = &settings_content.git_panel { - &git_panel.button - } else { - &None - } + settings_content + .project_panel + .as_ref()? + .auto_fold_dirs + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.git_panel.get_or_insert_default().button + write: |settings_content, value| { + settings_content + .project_panel + .get_or_insert_default() + .auto_fold_dirs = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Git Panel Dock", - description: "Where to dock the Git panel", + title: "Show Scrollbar", + description: "Show the scrollbar in the project panel.", field: Box::new(SettingField { + json_path: Some("project_panel.scrollbar.show"), pick: |settings_content| { - if let Some(git_panel) = &settings_content.git_panel { - &git_panel.dock - } else { - &None - } + show_scrollbar_or_editor(settings_content, |settings_content| { + settings_content + .project_panel + .as_ref()? + .scrollbar + .as_ref()? + .show + .as_ref() + }) }, - pick_mut: |settings_content| { - &mut settings_content.git_panel.get_or_insert_default().dock + write: |settings_content, value| { + settings_content + .project_panel + .get_or_insert_default() + .scrollbar + .get_or_insert_default() + .show = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Git Panel Default Width", - description: "Default width of the Git panel in pixels", + title: "Show Diagnostics", + description: "Which files containing diagnostic errors/warnings to mark in the project panel.", field: Box::new(SettingField { + json_path: Some("project_panel.show_diagnostics"), pick: |settings_content| { - if let Some(git_panel) = &settings_content.git_panel { - &git_panel.default_width - } else { - &None - } + settings_content + .project_panel + .as_ref()? + .show_diagnostics + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .git_panel + write: |settings_content, value| { + settings_content + .project_panel .get_or_insert_default() - .default_width + .show_diagnostics = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Debugger Panel"), SettingsPageItem::SettingItem(SettingItem { - title: "Debugger Panel Dock", - description: "The dock position of the debug panel", + title: "Sticky Scroll", + description: "Whether to stick parent directories at top of the project panel.", field: Box::new(SettingField { + json_path: Some("project_panel.sticky_scroll"), pick: |settings_content| { - if let Some(debugger) = &settings_content.debugger { - &debugger.dock - } else { - &None - } + settings_content + .project_panel + .as_ref()? + .sticky_scroll + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.debugger.get_or_insert_default().dock + write: |settings_content, value| { + settings_content + .project_panel + .get_or_insert_default() + .sticky_scroll = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Notification Panel"), SettingsPageItem::SettingItem(SettingItem { - title: "Notification Panel Button", - description: "Whether to show the notification panel button in the status bar", + files: USER, + title: "Show Indent Guides", + description: "Show indent guides in the project panel.", + field: Box::new( + SettingField { + json_path: Some("project_panel.indent_guides.show"), + pick: |settings_content| { + settings_content + .project_panel + .as_ref()? + .indent_guides + .as_ref()? + .show + .as_ref() + }, + write: |settings_content, value| { + settings_content + .project_panel + .get_or_insert_default() + .indent_guides + .get_or_insert_default() + .show = value; + }, + } + ), + metadata: None, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Drag and Drop", + description: "Whether to enable drag-and-drop operations in the project panel.", field: Box::new(SettingField { + json_path: Some("project_panel.drag_and_drop"), pick: |settings_content| { - if let Some(notification_panel) = &settings_content.notification_panel { - ¬ification_panel.button - } else { - &None - } + settings_content + .project_panel + .as_ref()? + .drag_and_drop + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .notification_panel + write: |settings_content, value| { + settings_content + .project_panel .get_or_insert_default() - .button + .drag_and_drop = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Notification Panel Dock", - description: "Where to dock the notification panel", + title: "Hide Root", + description: "Whether to hide the root entry when only one folder is open in the window.", field: Box::new(SettingField { + json_path: Some("project_panel.drag_and_drop"), pick: |settings_content| { - if let Some(notification_panel) = &settings_content.notification_panel { - ¬ification_panel.dock - } else { - &None - } + settings_content.project_panel.as_ref()?.hide_root.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .notification_panel + write: |settings_content, value| { + settings_content + .project_panel .get_or_insert_default() - .dock + .hide_root = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Notification Panel Default Width", - description: "Default width of the notification panel in pixels", + title: "Hide Hidden", + description: "Whether to hide the hidden entries in the project panel.", field: Box::new(SettingField { + json_path: Some("project_panel.hide_hidden"), pick: |settings_content| { - if let Some(notification_panel) = &settings_content.notification_panel { - ¬ification_panel.default_width - } else { - &None - } + settings_content + .project_panel + .as_ref()? + .hide_hidden + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .notification_panel + write: |settings_content, value| { + settings_content + .project_panel .get_or_insert_default() - .default_width + .hide_hidden = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Collaboration Panel"), SettingsPageItem::SettingItem(SettingItem { - title: "Collaboration Panel Button", - description: "Whether to show the collaboration panel button in the status bar", + title: "Open File on Paste", + description: "Whether to automatically open files when pasting them in the project panel.", field: Box::new(SettingField { + json_path: Some("project_panel.open_file_on_paste"), pick: |settings_content| { - if let Some(collaboration_panel) = &settings_content.collaboration_panel - { - &collaboration_panel.button - } else { - &None - } + settings_content + .project_panel + .as_ref()? + .open_file_on_paste + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .collaboration_panel + write: |settings_content, value| { + settings_content + .project_panel .get_or_insert_default() - .button + .open_file_on_paste = value; }, }), metadata: None, files: USER, }), + SettingsPageItem::SectionHeader("Terminal Panel"), SettingsPageItem::SettingItem(SettingItem { - title: "Collaboration Panel Dock", - description: "Where to dock the collaboration panel", + title: "Terminal Dock", + description: "Where to dock the terminal panel.", field: Box::new(SettingField { - pick: |settings_content| { - if let Some(collaboration_panel) = &settings_content.collaboration_panel - { - &collaboration_panel.dock - } else { - &None - } - }, - pick_mut: |settings_content| { - &mut settings_content - .collaboration_panel - .get_or_insert_default() - .dock + json_path: Some("terminal.dock"), + pick: |settings_content| settings_content.terminal.as_ref()?.dock.as_ref(), + write: |settings_content, value| { + settings_content.terminal.get_or_insert_default().dock = value; }, }), metadata: None, files: USER, }), + SettingsPageItem::SectionHeader("Outline Panel"), SettingsPageItem::SettingItem(SettingItem { - title: "Collaboration Panel Default Width", - description: "Default width of the collaboration panel in pixels", + title: "Outline Panel Button", + description: "Show the outline panel button in the status bar.", field: Box::new(SettingField { + json_path: Some("outline_panel.button"), pick: |settings_content| { - if let Some(collaboration_panel) = &settings_content.collaboration_panel - { - &collaboration_panel.default_width - } else { - &None - } + settings_content.outline_panel.as_ref()?.button.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .collaboration_panel + write: |settings_content, value| { + settings_content + .outline_panel .get_or_insert_default() - .default_width + .button = value; }, }), metadata: None, files: USER, }), - ], - }, - SettingsPage { - title: "Version Control", - items: vec![ - SettingsPageItem::SectionHeader("Git"), SettingsPageItem::SettingItem(SettingItem { - title: "Git Gutter", - description: "Control whether git status is shown in the editor's gutter", + title: "Outline Panel Dock", + description: "Where to dock the outline panel.", field: Box::new(SettingField { + json_path: Some("outline_panel.dock"), pick: |settings_content| { - if let Some(git) = &settings_content.git { - &git.git_gutter - } else { - &None - } + settings_content.outline_panel.as_ref()?.dock.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.git.get_or_insert_default().git_gutter + write: |settings_content, value| { + settings_content.outline_panel.get_or_insert_default().dock = value; }, }), metadata: None, files: USER, }), - // todo(settings_ui): Figure out the right default for this value in default.json SettingsPageItem::SettingItem(SettingItem { - title: "Gutter Debounce", - description: "Debounce threshold in milliseconds after which changes are reflected in the git gutter", + title: "Outline Panel Default Width", + description: "Default width of the outline panel in pixels.", field: Box::new(SettingField { + json_path: Some("outline_panel.default_width"), pick: |settings_content| { - if let Some(git) = &settings_content.git { - &git.gutter_debounce - } else { - &None - } + settings_content + .outline_panel + .as_ref()? + .default_width + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.git.get_or_insert_default().gutter_debounce + write: |settings_content, value| { + settings_content + .outline_panel + .get_or_insert_default() + .default_width = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Inline Git Blame", - description: "Whether or not to show git blame data inline in the currently focused line", + title: "File Icons", + description: "Show file icons in the outline panel.", field: Box::new(SettingField { + json_path: Some("outline_panel.file_icons"), pick: |settings_content| { - if let Some(git) = &settings_content.git { - if let Some(inline_blame) = &git.inline_blame { - &inline_blame.enabled - } else { - &None - } - } else { - &None - } + settings_content.outline_panel.as_ref()?.file_icons.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .git - .get_or_insert_default() - .inline_blame + write: |settings_content, value| { + settings_content + .outline_panel .get_or_insert_default() - .enabled + .file_icons = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Inline Git Blame Delay", - description: "The delay after which the inline blame information is shown", + title: "Folder Icons", + description: "Whether to show folder icons or chevrons for directories in the outline panel.", field: Box::new(SettingField { + json_path: Some("outline_panel.folder_icons"), pick: |settings_content| { - if let Some(git) = &settings_content.git { - if let Some(inline_blame) = &git.inline_blame { - &inline_blame.delay_ms - } else { - &None - } - } else { - &None - } + settings_content + .outline_panel + .as_ref()? + .folder_icons + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .git - .get_or_insert_default() - .inline_blame + write: |settings_content, value| { + settings_content + .outline_panel .get_or_insert_default() - .delay_ms + .folder_icons = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Inline Git Blame Padding", - description: "Padding between the end of the source line and the start of the inline blame in columns", + title: "Git Status", + description: "Show the Git status in the outline panel.", field: Box::new(SettingField { + json_path: Some("outline_panel.git_status"), pick: |settings_content| { - if let Some(git) = &settings_content.git { - if let Some(inline_blame) = &git.inline_blame { - &inline_blame.padding - } else { - &None - } - } else { - &None - } + settings_content.outline_panel.as_ref()?.git_status.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .git - .get_or_insert_default() - .inline_blame + write: |settings_content, value| { + settings_content + .outline_panel .get_or_insert_default() - .padding + .git_status = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Inline Git Blame Min Column", - description: "The minimum column number to show the inline blame information at", + title: "Indent Size", + description: "Amount of indentation for nested items.", field: Box::new(SettingField { + json_path: Some("outline_panel.indent_size"), pick: |settings_content| { - if let Some(git) = &settings_content.git { - if let Some(inline_blame) = &git.inline_blame { - &inline_blame.min_column - } else { - &None - } - } else { - &None - } + settings_content + .outline_panel + .as_ref()? + .indent_size + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .git - .get_or_insert_default() - .inline_blame + write: |settings_content, value| { + settings_content + .outline_panel .get_or_insert_default() - .min_column + .indent_size = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Show Commit Summary", - description: "Whether to show commit summary as part of the inline blame", + title: "Auto Reveal Entries", + description: "Whether to reveal when a corresponding outline entry becomes active.", field: Box::new(SettingField { + json_path: Some("outline_panel.auto_reveal_entries"), pick: |settings_content| { - if let Some(git) = &settings_content.git { - if let Some(inline_blame) = &git.inline_blame { - &inline_blame.show_commit_summary - } else { - &None - } - } else { - &None - } + settings_content + .outline_panel + .as_ref()? + .auto_reveal_entries + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .git - .get_or_insert_default() - .inline_blame + write: |settings_content, value| { + settings_content + .outline_panel .get_or_insert_default() - .show_commit_summary + .auto_reveal_entries = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Show Avatar", - description: "Whether to show the avatar of the author of the commit", + title: "Auto Fold Directories", + description: "Whether to fold directories automatically when a directory contains only one subdirectory.", field: Box::new(SettingField { + json_path: Some("outline_panel.auto_fold_dirs"), pick: |settings_content| { - if let Some(git) = &settings_content.git { - if let Some(blame) = &git.blame { - &blame.show_avatar - } else { - &None - } - } else { - &None - } + settings_content + .outline_panel + .as_ref()? + .auto_fold_dirs + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .git - .get_or_insert_default() - .blame + write: |settings_content, value| { + settings_content + .outline_panel .get_or_insert_default() - .show_avatar + .auto_fold_dirs = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Show Author Name In Branch Picker", - description: "Whether to show author name as part of the commit information in branch picker", - field: Box::new(SettingField { - pick: |settings_content| { - if let Some(git) = &settings_content.git { - if let Some(branch_picker) = &git.branch_picker { - &branch_picker.show_author_name - } else { - &None - } - } else { - &None - } - }, - pick_mut: |settings_content| { - &mut settings_content - .git - .get_or_insert_default() - .branch_picker - .get_or_insert_default() - .show_author_name - }, - }), - metadata: None, files: USER, + title: "Show Indent Guides", + description: "When to show indent guides in the outline panel.", + field: Box::new( + SettingField { + json_path: Some("outline_panel.indent_guides.show"), + pick: |settings_content| { + settings_content + .outline_panel + .as_ref()? + .indent_guides + .as_ref()? + .show + .as_ref() + }, + write: |settings_content, value| { + settings_content + .outline_panel + .get_or_insert_default() + .indent_guides + .get_or_insert_default() + .show = value; + }, + } + ), + metadata: None, }), + SettingsPageItem::SectionHeader("Git Panel"), SettingsPageItem::SettingItem(SettingItem { - title: "Hunk Style", - description: "How git hunks are displayed visually in the editor", + title: "Git Panel Button", + description: "Show the Git panel button in the status bar.", field: Box::new(SettingField { + json_path: Some("git_panel.button"), pick: |settings_content| { - if let Some(git) = &settings_content.git { - &git.hunk_style - } else { - &None - } + settings_content.git_panel.as_ref()?.button.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.git.get_or_insert_default().hunk_style + write: |settings_content, value| { + settings_content.git_panel.get_or_insert_default().button = value; }, }), metadata: None, files: USER, }), - ], - }, - SettingsPage { - title: "System & Network", - items: vec![ - SettingsPageItem::SectionHeader("Network"), - // todo(settings_ui): Proxy needs a default - SettingsPageItem::SettingItem(SettingItem { - title: "Proxy", - description: "The proxy to use for network requests", - field: Box::new( - SettingField { - pick: |settings_content| &settings_content.proxy, - pick_mut: |settings_content| &mut settings_content.proxy, - } - .unimplemented(), - ), - metadata: Some(Box::new(SettingsFieldMetadata { - placeholder: Some("socks5h://localhost:10808"), - })), - files: USER, - }), SettingsPageItem::SettingItem(SettingItem { - title: "Server URL", - description: "The URL of the Zed server to connect to", + title: "Git Panel Dock", + description: "Where to dock the Git panel.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.server_url, - pick_mut: |settings_content| &mut settings_content.server_url, + json_path: Some("git_panel.dock"), + pick: |settings_content| settings_content.git_panel.as_ref()?.dock.as_ref(), + write: |settings_content, value| { + settings_content.git_panel.get_or_insert_default().dock = value; + }, }), - metadata: Some(Box::new(SettingsFieldMetadata { - placeholder: Some("https://zed.dev"), - })), + metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("System"), SettingsPageItem::SettingItem(SettingItem { - title: "Auto Update", - description: "Whether or not to automatically check for updates", + title: "Git Panel Default Width", + description: "Default width of the Git panel in pixels.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.auto_update, - pick_mut: |settings_content| &mut settings_content.auto_update, + json_path: Some("git_panel.default_width"), + pick: |settings_content| { + settings_content.git_panel.as_ref()?.default_width.as_ref() + }, + write: |settings_content, value| { + settings_content + .git_panel + .get_or_insert_default() + .default_width = value; + }, }), metadata: None, files: USER, }), - ], - }, - SettingsPage { - title: "Diagnostics & Errors", - items: vec![ - SettingsPageItem::SectionHeader("Filtering"), SettingsPageItem::SettingItem(SettingItem { - title: "Max Severity", - description: "Which level to use to filter out diagnostics displayed in the editor", + title: "Git Panel Status Style", + description: "How entry statuses are displayed.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.editor.diagnostics_max_severity, - pick_mut: |settings_content| { - &mut settings_content.editor.diagnostics_max_severity + json_path: Some("git_panel.status_style"), + pick: |settings_content| { + settings_content.git_panel.as_ref()?.status_style.as_ref() + }, + write: |settings_content, value| { + settings_content + .git_panel + .get_or_insert_default() + .status_style = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Include Warnings", - description: "Whether to show warnings or not by default", + title: "Fallback Branch Name", + description: "Default branch name will be when init.defaultbranch is not set in Git.", field: Box::new(SettingField { + json_path: Some("git_panel.fallback_branch_name"), pick: |settings_content| { - if let Some(diagnostics) = &settings_content.diagnostics { - &diagnostics.include_warnings - } else { - &None - } + settings_content + .git_panel + .as_ref()? + .fallback_branch_name + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .diagnostics + write: |settings_content, value| { + settings_content + .git_panel .get_or_insert_default() - .include_warnings + .fallback_branch_name = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Inline"), SettingsPageItem::SettingItem(SettingItem { - title: "Inline Diagnostics Enabled", - description: "Whether to show diagnostics inline or not", + title: "Sort By Path", + description: "Enable to sort entries in the panel by path, disable to sort by status.", field: Box::new(SettingField { + json_path: Some("git_panel.sort_by_path"), pick: |settings_content| { - if let Some(diagnostics) = &settings_content.diagnostics { - if let Some(inline) = &diagnostics.inline { - &inline.enabled - } else { - &None - } - } else { - &None - } + settings_content.git_panel.as_ref()?.sort_by_path.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .diagnostics - .get_or_insert_default() - .inline + write: |settings_content, value| { + settings_content + .git_panel .get_or_insert_default() - .enabled + .sort_by_path = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Inline Update Debounce", - description: "The delay in milliseconds to show inline diagnostics after the last diagnostic update", + title: "Collapse Untracked Diff", + description: "Whether to collapse untracked files in the diff panel.", field: Box::new(SettingField { + json_path: Some("git_panel.collapse_untracked_diff"), pick: |settings_content| { - if let Some(diagnostics) = &settings_content.diagnostics { - if let Some(inline) = &diagnostics.inline { - &inline.update_debounce_ms - } else { - &None - } - } else { - &None - } + settings_content + .git_panel + .as_ref()? + .collapse_untracked_diff + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .diagnostics - .get_or_insert_default() - .inline + write: |settings_content, value| { + settings_content + .git_panel .get_or_insert_default() - .update_debounce_ms + .collapse_untracked_diff = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Inline Padding", - description: "The amount of padding between the end of the source line and the start of the inline diagnostic", + title: "Scroll Bar", + description: "How and when the scrollbar should be displayed.", field: Box::new(SettingField { + json_path: Some("git_panel.scrollbar.show"), pick: |settings_content| { - if let Some(diagnostics) = &settings_content.diagnostics { - if let Some(inline) = &diagnostics.inline { - &inline.padding - } else { - &None - } - } else { - &None - } + show_scrollbar_or_editor(settings_content, |settings_content| { + settings_content + .git_panel + .as_ref()? + .scrollbar + .as_ref()? + .show + .as_ref() + }) }, - pick_mut: |settings_content| { - &mut settings_content - .diagnostics + write: |settings_content, value| { + settings_content + .git_panel .get_or_insert_default() - .inline + .scrollbar .get_or_insert_default() - .padding + .show = value; }, }), metadata: None, files: USER, }), + SettingsPageItem::SectionHeader("Debugger Panel"), SettingsPageItem::SettingItem(SettingItem { - title: "Inline Min Column", - description: "The minimum column to display inline diagnostics", + title: "Debugger Panel Dock", + description: "The dock position of the debug panel.", field: Box::new(SettingField { - pick: |settings_content| { - if let Some(diagnostics) = &settings_content.diagnostics { - if let Some(inline) = &diagnostics.inline { - &inline.min_column - } else { - &None - } - } else { - &None - } - }, - pick_mut: |settings_content| { - &mut settings_content - .diagnostics - .get_or_insert_default() - .inline - .get_or_insert_default() - .min_column + json_path: Some("debugger.dock"), + pick: |settings_content| settings_content.debugger.as_ref()?.dock.as_ref(), + write: |settings_content, value| { + settings_content.debugger.get_or_insert_default().dock = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Performance"), + SettingsPageItem::SectionHeader("Notification Panel"), SettingsPageItem::SettingItem(SettingItem { - title: "LSP Pull Diagnostics Enabled", - description: "Whether to pull for language server-powered diagnostics or not", + title: "Notification Panel Button", + description: "Show the notification panel button in the status bar.", field: Box::new(SettingField { + json_path: Some("notification_panel.button"), pick: |settings_content| { - if let Some(diagnostics) = &settings_content.diagnostics { - if let Some(lsp_pull) = &diagnostics.lsp_pull_diagnostics { - &lsp_pull.enabled - } else { - &None - } - } else { - &None - } + settings_content + .notification_panel + .as_ref()? + .button + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .diagnostics - .get_or_insert_default() - .lsp_pull_diagnostics + write: |settings_content, value| { + settings_content + .notification_panel .get_or_insert_default() - .enabled + .button = value; }, }), metadata: None, files: USER, }), - // todo(settings_ui): Needs unit SettingsPageItem::SettingItem(SettingItem { - title: "LSP Pull Debounce", - description: "Minimum time to wait before pulling diagnostics from the language server(s)", + title: "Notification Panel Dock", + description: "Where to dock the notification panel.", field: Box::new(SettingField { + json_path: Some("notification_panel.dock"), pick: |settings_content| { - if let Some(diagnostics) = &settings_content.diagnostics { - if let Some(lsp_pull) = &diagnostics.lsp_pull_diagnostics { - &lsp_pull.debounce_ms - } else { - &None - } - } else { - &None - } + settings_content.notification_panel.as_ref()?.dock.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .diagnostics - .get_or_insert_default() - .lsp_pull_diagnostics + write: |settings_content, value| { + settings_content + .notification_panel .get_or_insert_default() - .debounce_ms + .dock = value; }, }), metadata: None, files: USER, }), - ], - }, - SettingsPage { - title: "Debugger", - items: vec![ - SettingsPageItem::SectionHeader("General"), SettingsPageItem::SettingItem(SettingItem { - title: "Stepping Granularity", - description: "Determines the stepping granularity for debug operations", + title: "Notification Panel Default Width", + description: "Default width of the notification panel in pixels.", field: Box::new(SettingField { + json_path: Some("notification_panel.default_width"), pick: |settings_content| { - if let Some(debugger) = &settings_content.debugger { - &debugger.stepping_granularity - } else { - &None - } + settings_content + .notification_panel + .as_ref()? + .default_width + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .debugger + write: |settings_content, value| { + settings_content + .notification_panel .get_or_insert_default() - .stepping_granularity + .default_width = value; }, }), metadata: None, files: USER, }), + SettingsPageItem::SectionHeader("Collaboration Panel"), SettingsPageItem::SettingItem(SettingItem { - title: "Save Breakpoints", - description: "Whether breakpoints should be reused across Zed sessions", + title: "Collaboration Panel Button", + description: "Show the collaboration panel button in the status bar.", field: Box::new(SettingField { + json_path: Some("collaboration_panel.button"), pick: |settings_content| { - if let Some(debugger) = &settings_content.debugger { - &debugger.save_breakpoints - } else { - &None - } + settings_content + .collaboration_panel + .as_ref()? + .button + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .debugger + write: |settings_content, value| { + settings_content + .collaboration_panel .get_or_insert_default() - .save_breakpoints + .button = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Timeout", - description: "Time in milliseconds until timeout error when connecting to a TCP debug adapter", + title: "Collaboration Panel Dock", + description: "Where to dock the collaboration panel.", field: Box::new(SettingField { + json_path: Some("collaboration_panel.dock"), pick: |settings_content| { - if let Some(debugger) = &settings_content.debugger { - &debugger.timeout - } else { - &None - } + settings_content.collaboration_panel.as_ref()?.dock.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.debugger.get_or_insert_default().timeout + write: |settings_content, value| { + settings_content + .collaboration_panel + .get_or_insert_default() + .dock = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Log DAP Communications", - description: "Whether to log messages between active debug adapters and Zed", + title: "Collaboration Panel Default Width", + description: "Default width of the collaboration panel in pixels.", field: Box::new(SettingField { + json_path: Some("collaboration_panel.dock"), pick: |settings_content| { - if let Some(debugger) = &settings_content.debugger { - &debugger.log_dap_communications - } else { - &None - } + settings_content + .collaboration_panel + .as_ref()? + .default_width + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .debugger + write: |settings_content, value| { + settings_content + .collaboration_panel .get_or_insert_default() - .log_dap_communications + .default_width = value; }, }), metadata: None, files: USER, }), + SettingsPageItem::SectionHeader("Agent Panel"), SettingsPageItem::SettingItem(SettingItem { - title: "Format DAP Log Messages", - description: "Whether to format DAP messages when adding them to debug adapter logger", + title: "Agent Panel Button", + description: "Whether to show the agent panel button in the status bar.", field: Box::new(SettingField { - pick: |settings_content| { - if let Some(debugger) = &settings_content.debugger { - &debugger.format_dap_log_messages - } else { - &None - } - }, - pick_mut: |settings_content| { - &mut settings_content - .debugger - .get_or_insert_default() - .format_dap_log_messages + json_path: Some("agent.button"), + pick: |settings_content| settings_content.agent.as_ref()?.button.as_ref(), + write: |settings_content, value| { + settings_content.agent.get_or_insert_default().button = value; }, }), metadata: None, files: USER, }), - ], - }, - SettingsPage { - title: "Collaboration", - items: vec![ - SettingsPageItem::SectionHeader("Calls"), SettingsPageItem::SettingItem(SettingItem { - title: "Mute On Join", - description: "Whether the microphone should be muted when joining a channel or a call", + title: "Agent Panel Dock", + description: "Where to dock the agent panel.", field: Box::new(SettingField { - pick: |settings_content| { - if let Some(calls) = &settings_content.calls { - &calls.mute_on_join - } else { - &None - } - }, - pick_mut: |settings_content| { - &mut settings_content.calls.get_or_insert_default().mute_on_join + json_path: Some("agent.dock"), + pick: |settings_content| settings_content.agent.as_ref()?.dock.as_ref(), + write: |settings_content, value| { + settings_content.agent.get_or_insert_default().dock = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Share On Join", - description: "Whether your current project should be shared when joining an empty channel", + title: "Agent Panel Default Width", + description: "Default width when the agent panel is docked to the left or right.", field: Box::new(SettingField { + json_path: Some("agent.default_width"), pick: |settings_content| { - if let Some(calls) = &settings_content.calls { - &calls.share_on_join - } else { - &None - } + settings_content.agent.as_ref()?.default_width.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.calls.get_or_insert_default().share_on_join + write: |settings_content, value| { + settings_content.agent.get_or_insert_default().default_width = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Experimental"), SettingsPageItem::SettingItem(SettingItem { - title: "Rodio Audio", - description: "Opt into the new audio system", + title: "Agent Panel Default Height", + description: "Default height when the agent panel is docked to the bottom.", field: Box::new(SettingField { + json_path: Some("agent.default_height"), pick: |settings_content| { - if let Some(audio) = &settings_content.audio { - &audio.rodio_audio - } else { - &None - } + settings_content.agent.as_ref()?.default_height.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.audio.get_or_insert_default().rodio_audio + write: |settings_content, value| { + settings_content + .agent + .get_or_insert_default() + .default_height = value; }, }), metadata: None, files: USER, }), + ], + }, + SettingsPage { + title: "Debugger", + items: vec![ + SettingsPageItem::SectionHeader("General"), SettingsPageItem::SettingItem(SettingItem { - title: "Auto Microphone Volume", - description: "Automatically adjust microphone volume (requires Rodio Audio)", + title: "Stepping Granularity", + description: "Determines the stepping granularity for debug operations.", field: Box::new(SettingField { + json_path: Some("agent.default_height"), pick: |settings_content| { - if let Some(audio) = &settings_content.audio { - &audio.auto_microphone_volume - } else { - &None - } + settings_content + .debugger + .as_ref()? + .stepping_granularity + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .audio + write: |settings_content, value| { + settings_content + .debugger .get_or_insert_default() - .auto_microphone_volume + .stepping_granularity = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Auto Speaker Volume", - description: "Automatically adjust volume of other call members (requires Rodio Audio)", + title: "Save Breakpoints", + description: "Whether breakpoints should be reused across Zed sessions.", field: Box::new(SettingField { + json_path: Some("debugger.save_breakpoints"), pick: |settings_content| { - if let Some(audio) = &settings_content.audio { - &audio.auto_speaker_volume - } else { - &None - } + settings_content + .debugger + .as_ref()? + .save_breakpoints + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .audio + write: |settings_content, value| { + settings_content + .debugger .get_or_insert_default() - .auto_speaker_volume + .save_breakpoints = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Denoise", - description: "Remove background noises (requires Rodio Audio)", + title: "Timeout", + description: "Time in milliseconds until timeout error when connecting to a TCP debug adapter.", field: Box::new(SettingField { + json_path: Some("debugger.timeout"), pick: |settings_content| { - if let Some(audio) = &settings_content.audio { - &audio.denoise - } else { - &None - } + settings_content.debugger.as_ref()?.timeout.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content.audio.get_or_insert_default().denoise + write: |settings_content, value| { + settings_content.debugger.get_or_insert_default().timeout = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Legacy Audio Compatible", - description: "Use audio parameters compatible with previous versions (requires Rodio Audio)", + title: "Log DAP Communications", + description: "Whether to log messages between active debug adapters and Zed.", field: Box::new(SettingField { + json_path: Some("debugger.log_dap_communications"), pick: |settings_content| { - if let Some(audio) = &settings_content.audio { - &audio.legacy_audio_compatible - } else { - &None - } + settings_content + .debugger + .as_ref()? + .log_dap_communications + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .audio + write: |settings_content, value| { + settings_content + .debugger .get_or_insert_default() - .legacy_audio_compatible + .log_dap_communications = value; }, }), metadata: None, files: USER, }), - ], - }, - SettingsPage { - title: "AI", - items: vec![ - SettingsPageItem::SectionHeader("General"), SettingsPageItem::SettingItem(SettingItem { - title: "Disable AI", - description: "Whether to disable all AI features in Zed", + title: "Format DAP Log Messages", + description: "Whether to format DAP messages when adding them to debug adapter logger.", field: Box::new(SettingField { - pick: |settings_content| &settings_content.disable_ai, - pick_mut: |settings_content| &mut settings_content.disable_ai, + json_path: Some("debugger.format_dap_log_messages"), + pick: |settings_content| { + settings_content + .debugger + .as_ref()? + .format_dap_log_messages + .as_ref() + }, + write: |settings_content, value| { + settings_content + .debugger + .get_or_insert_default() + .format_dap_log_messages = value; + }, }), metadata: None, files: USER, @@ -3920,122 +4430,341 @@ pub(crate) fn settings_data() -> Vec { title: "Terminal", items: vec![ SettingsPageItem::SectionHeader("Environment"), - SettingsPageItem::SettingItem(SettingItem { - title: "Shell", - description: "What shell to use when opening a terminal", - field: Box::new( - SettingField { + SettingsPageItem::DynamicItem(DynamicItem { + discriminant: SettingItem { + files: USER | PROJECT, + title: "Shell", + description: "What shell to use when opening a terminal.", + field: Box::new(SettingField { + json_path: Some("terminal.shell$"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.project.shell - } else { - &None - } + Some(&dynamic_variants::()[ + settings_content + .terminal + .as_ref()? + .project + .shell + .as_ref()? + .discriminant() as usize]) }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + let Some(value) = value else { + return; + }; + let settings_value = settings_content .terminal .get_or_insert_default() .project .shell + .get_or_insert_with(|| settings::Shell::default()); + *settings_value = match value { + settings::ShellDiscriminants::System => { + settings::Shell::System + }, + settings::ShellDiscriminants::Program => { + let program = match settings_value { + settings::Shell::Program(p) => p.clone(), + settings::Shell::WithArguments { program, .. } => program.clone(), + _ => String::from("sh"), + }; + settings::Shell::Program(program) + }, + settings::ShellDiscriminants::WithArguments => { + let (program, args, title_override) = match settings_value { + settings::Shell::Program(p) => (p.clone(), vec![], None), + settings::Shell::WithArguments { program, args, title_override } => { + (program.clone(), args.clone(), title_override.clone()) + }, + _ => (String::from("sh"), vec![], None), + }; + settings::Shell::WithArguments { + program, + args, + title_override, + } + }, + }; }, + }), + metadata: None, + }, + pick_discriminant: |settings_content| { + Some(settings_content.terminal.as_ref()?.project.shell.as_ref()?.discriminant() as usize) + }, + fields: dynamic_variants::().into_iter().map(|variant| { + match variant { + settings::ShellDiscriminants::System => vec![], + settings::ShellDiscriminants::Program => vec![ + SettingItem { + files: USER | PROJECT, + title: "Program", + description: "The shell program to use.", + field: Box::new(SettingField { + json_path: Some("terminal.shell.program"), + pick: |settings_content| { + match settings_content.terminal.as_ref()?.project.shell.as_ref() { + Some(settings::Shell::Program(program)) => Some(program), + _ => None + } + }, + write: |settings_content, value| { + let Some(value) = value else { + return; + }; + match settings_content + .terminal + .get_or_insert_default() + .project + .shell.as_mut() { + Some(settings::Shell::Program(program)) => *program = value, + _ => return + } + }, + }), + metadata: None, + } + ], + settings::ShellDiscriminants::WithArguments => vec![ + SettingItem { + files: USER | PROJECT, + title: "Program", + description: "The shell program to run.", + field: Box::new(SettingField { + json_path: Some("terminal.shell.program"), + pick: |settings_content| { + match settings_content.terminal.as_ref()?.project.shell.as_ref() { + Some(settings::Shell::WithArguments { program, .. }) => Some(program), + _ => None + } + }, + write: |settings_content, value| { + let Some(value) = value else { + return; + }; + match settings_content + .terminal + .get_or_insert_default() + .project + .shell.as_mut() { + Some(settings::Shell::WithArguments { program, .. }) => *program = value, + _ => return + } + }, + }), + metadata: None, + }, + SettingItem { + files: USER | PROJECT, + title: "Arguments", + description: "The arguments to pass to the shell program.", + field: Box::new( + SettingField { + json_path: Some("terminal.shell.args"), + pick: |settings_content| { + match settings_content.terminal.as_ref()?.project.shell.as_ref() { + Some(settings::Shell::WithArguments { args, .. }) => Some(args), + _ => None + } + }, + write: |settings_content, value| { + let Some(value) = value else { + return; + }; + match settings_content + .terminal + .get_or_insert_default() + .project + .shell.as_mut() { + Some(settings::Shell::WithArguments { args, .. }) => *args = value, + _ => return + } + }, + } + .unimplemented(), + ), + metadata: None, + }, + SettingItem { + files: USER | PROJECT, + title: "Title Override", + description: "An optional string to override the title of the terminal tab.", + field: Box::new(SettingField { + json_path: Some("terminal.shell.title_override"), + pick: |settings_content| { + match settings_content.terminal.as_ref()?.project.shell.as_ref() { + Some(settings::Shell::WithArguments { title_override, .. }) => title_override.as_ref().or(DEFAULT_EMPTY_SHARED_STRING), + _ => None + } + }, + write: |settings_content, value| { + match settings_content + .terminal + .get_or_insert_default() + .project + .shell.as_mut() { + Some(settings::Shell::WithArguments { title_override, .. }) => *title_override = value.filter(|s| !s.is_empty()), + _ => return + } + }, + }), + metadata: None, + } + ], } - .unimplemented(), - ), - metadata: None, - files: USER | LOCAL, + }).collect(), }), - SettingsPageItem::SettingItem(SettingItem { - title: "Working Directory", - description: "What working directory to use when launching the terminal", - field: Box::new( - SettingField { + SettingsPageItem::DynamicItem(DynamicItem { + discriminant: SettingItem { + files: USER | PROJECT, + title: "Working Directory", + description: "What working directory to use when launching the terminal.", + field: Box::new(SettingField { + json_path: Some("terminal.working_directory$"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.project.working_directory - } else { - &None - } + Some(&dynamic_variants::()[ + settings_content + .terminal + .as_ref()? + .project + .working_directory + .as_ref()? + .discriminant() as usize]) }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + let Some(value) = value else { + return; + }; + let settings_value = settings_content .terminal .get_or_insert_default() .project .working_directory + .get_or_insert_with(|| settings::WorkingDirectory::CurrentProjectDirectory); + *settings_value = match value { + settings::WorkingDirectoryDiscriminants::CurrentProjectDirectory => { + settings::WorkingDirectory::CurrentProjectDirectory + }, + settings::WorkingDirectoryDiscriminants::FirstProjectDirectory => { + settings::WorkingDirectory::FirstProjectDirectory + }, + settings::WorkingDirectoryDiscriminants::AlwaysHome => { + settings::WorkingDirectory::AlwaysHome + }, + settings::WorkingDirectoryDiscriminants::Always => { + let directory = match settings_value { + settings::WorkingDirectory::Always { .. } => return, + _ => String::new(), + }; + settings::WorkingDirectory::Always { directory } + }, + }; }, + }), + metadata: None, + }, + pick_discriminant: |settings_content| { + Some(settings_content.terminal.as_ref()?.project.working_directory.as_ref()?.discriminant() as usize) + }, + fields: dynamic_variants::().into_iter().map(|variant| { + match variant { + settings::WorkingDirectoryDiscriminants::CurrentProjectDirectory => vec![], + settings::WorkingDirectoryDiscriminants::FirstProjectDirectory => vec![], + settings::WorkingDirectoryDiscriminants::AlwaysHome => vec![], + settings::WorkingDirectoryDiscriminants::Always => vec![ + SettingItem { + files: USER | PROJECT, + title: "Directory", + description: "The directory path to use (will be shell expanded).", + field: Box::new(SettingField { + json_path: Some("terminal.working_directory.always"), + pick: |settings_content| { + match settings_content.terminal.as_ref()?.project.working_directory.as_ref() { + Some(settings::WorkingDirectory::Always { directory }) => Some(directory), + _ => None + } + }, + write: |settings_content, value| { + let value = value.unwrap_or_default(); + match settings_content + .terminal + .get_or_insert_default() + .project + .working_directory.as_mut() { + Some(settings::WorkingDirectory::Always { directory }) => *directory = value, + _ => return + } + }, + }), + metadata: None, + } + ], } - .unimplemented(), - ), - metadata: None, - files: USER | LOCAL, + }).collect(), }), SettingsPageItem::SettingItem(SettingItem { title: "Environment Variables", - description: "Key-value pairs to add to the terminal's environment", + description: "Key-value pairs to add to the terminal's environment.", field: Box::new( SettingField { + json_path: Some("terminal.env"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.project.env - } else { - &None - } + settings_content.terminal.as_ref()?.project.env.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .terminal .get_or_insert_default() .project - .env + .env = value; }, } .unimplemented(), ), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Detect Virtual Environment", - description: "Activates the python virtual environment, if one is found, in the terminal's working directory", + description: "Activates the Python virtual environment, if one is found, in the terminal's working directory.", field: Box::new( SettingField { + json_path: Some("terminal.detect_venv"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.project.detect_venv - } else { - &None - } + settings_content + .terminal + .as_ref()? + .project + .detect_venv + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .terminal .get_or_insert_default() .project - .detect_venv + .detect_venv = value; }, } .unimplemented(), ), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SectionHeader("Font"), SettingsPageItem::SettingItem(SettingItem { title: "Font Size", - description: "Font size for terminal text. If not set, defaults to buffer font size", + description: "Font size for terminal text. If not set, defaults to buffer font size.", field: Box::new(SettingField { + json_path: Some("terminal.font_size"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.font_size - } else if settings_content.theme.buffer_font_size.is_some() { - &settings_content.theme.buffer_font_size - } else { - &None - } + settings_content + .terminal + .as_ref() + .and_then(|terminal| terminal.font_size.as_ref()) + .or(settings_content.theme.buffer_font_size.as_ref()) }, - pick_mut: |settings_content| { - &mut settings_content.terminal.get_or_insert_default().font_size + write: |settings_content, value| { + settings_content.terminal.get_or_insert_default().font_size = value; }, }), metadata: None, @@ -4043,24 +4772,21 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Font Family", - description: "Font family for terminal text. If not set, defaults to buffer font family", + description: "Font family for terminal text. If not set, defaults to buffer font family.", field: Box::new(SettingField { + json_path: Some("terminal.font_family"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal - && terminal.font_family.is_some() - { - &terminal.font_family - } else if settings_content.theme.buffer_font_family.is_some() { - &settings_content.theme.buffer_font_family - } else { - &None - } + settings_content + .terminal + .as_ref() + .and_then(|terminal| terminal.font_family.as_ref()) + .or(settings_content.theme.buffer_font_family.as_ref()) }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .terminal .get_or_insert_default() - .font_family + .font_family = value; }, }), metadata: None, @@ -4068,21 +4794,22 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Font Fallbacks", - description: "Font fallbacks for terminal text. If not set, defaults to buffer font fallbacks", + description: "Font fallbacks for terminal text. If not set, defaults to buffer font fallbacks.", field: Box::new( SettingField { + json_path: Some("terminal.font_fallbacks"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.font_fallbacks - } else { - &None - } + settings_content + .terminal + .as_ref() + .and_then(|terminal| terminal.font_fallbacks.as_ref()) + .or(settings_content.theme.buffer_font_fallbacks.as_ref()) }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .terminal .get_or_insert_default() - .font_fallbacks + .font_fallbacks = value; }, } .unimplemented(), @@ -4092,20 +4819,17 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Font Weight", - description: "Font weight for terminal text in CSS weight units (100-900)", + description: "Font weight for terminal text in CSS weight units (100-900).", field: Box::new(SettingField { + json_path: Some("terminal.font_weight"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.font_weight - } else { - &None - } + settings_content.terminal.as_ref()?.font_weight.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .terminal .get_or_insert_default() - .font_weight + .font_weight = value; }, }), metadata: None, @@ -4113,21 +4837,22 @@ pub(crate) fn settings_data() -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Font Features", - description: "Font features for terminal text", + description: "Font features for terminal text.", field: Box::new( SettingField { + json_path: Some("terminal.font_features"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.font_features - } else { - &None - } + settings_content + .terminal + .as_ref() + .and_then(|terminal| terminal.font_features.as_ref()) + .or(settings_content.theme.buffer_font_features.as_ref()) }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .terminal .get_or_insert_default() - .font_features + .font_features = value; }, } .unimplemented(), @@ -4138,21 +4863,18 @@ pub(crate) fn settings_data() -> Vec { SettingsPageItem::SectionHeader("Display Settings"), SettingsPageItem::SettingItem(SettingItem { title: "Line Height", - description: "Line height for terminal text", + description: "Line height for terminal text.", field: Box::new( SettingField { + json_path: Some("terminal.line_height"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.line_height - } else { - &None - } + settings_content.terminal.as_ref()?.line_height.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content + write: |settings_content, value| { + settings_content .terminal .get_or_insert_default() - .line_height + .line_height = value; }, } .unimplemented(), @@ -4161,267 +4883,823 @@ pub(crate) fn settings_data() -> Vec { files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Cursor Shape", - description: "Default cursor shape for the terminal (bar, block, underline, or hollow)", + title: "Cursor Shape", + description: "Default cursor shape for the terminal (bar, block, underline, or hollow).", + field: Box::new(SettingField { + json_path: Some("terminal.cursor_shape"), + pick: |settings_content| { + settings_content.terminal.as_ref()?.cursor_shape.as_ref() + }, + write: |settings_content, value| { + settings_content + .terminal + .get_or_insert_default() + .cursor_shape = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Cursor Blinking", + description: "Sets the cursor blinking behavior in the terminal.", + field: Box::new(SettingField { + json_path: Some("terminal.blinking"), + pick: |settings_content| { + settings_content.terminal.as_ref()?.blinking.as_ref() + }, + write: |settings_content, value| { + settings_content.terminal.get_or_insert_default().blinking = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Alternate Scroll", + description: "Whether alternate scroll mode is active by default (converts mouse scroll to arrow keys in apps like Vim).", + field: Box::new(SettingField { + json_path: Some("terminal.alternate_scroll"), + pick: |settings_content| { + settings_content + .terminal + .as_ref()? + .alternate_scroll + .as_ref() + }, + write: |settings_content, value| { + settings_content + .terminal + .get_or_insert_default() + .alternate_scroll = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Minimum Contrast", + description: "The minimum APCA perceptual contrast between foreground and background colors (0-106).", + field: Box::new(SettingField { + json_path: Some("terminal.minimum_contrast"), + pick: |settings_content| { + settings_content + .terminal + .as_ref()? + .minimum_contrast + .as_ref() + }, + write: |settings_content, value| { + settings_content + .terminal + .get_or_insert_default() + .minimum_contrast = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SectionHeader("Behavior Settings"), + SettingsPageItem::SettingItem(SettingItem { + title: "Option As Meta", + description: "Whether the option key behaves as the meta key.", + field: Box::new(SettingField { + json_path: Some("terminal.option_as_meta"), + pick: |settings_content| { + settings_content.terminal.as_ref()?.option_as_meta.as_ref() + }, + write: |settings_content, value| { + settings_content + .terminal + .get_or_insert_default() + .option_as_meta = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Copy On Select", + description: "Whether selecting text in the terminal automatically copies to the system clipboard.", + field: Box::new(SettingField { + json_path: Some("terminal.copy_on_select"), + pick: |settings_content| { + settings_content.terminal.as_ref()?.copy_on_select.as_ref() + }, + write: |settings_content, value| { + settings_content + .terminal + .get_or_insert_default() + .copy_on_select = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Keep Selection On Copy", + description: "Whether to keep the text selection after copying it to the clipboard.", + field: Box::new(SettingField { + json_path: Some("terminal.keep_selection_on_copy"), + pick: |settings_content| { + settings_content + .terminal + .as_ref()? + .keep_selection_on_copy + .as_ref() + }, + write: |settings_content, value| { + settings_content + .terminal + .get_or_insert_default() + .keep_selection_on_copy = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SectionHeader("Layout Settings"), + SettingsPageItem::SettingItem(SettingItem { + title: "Default Width", + description: "Default width when the terminal is docked to the left or right (in pixels).", + field: Box::new(SettingField { + json_path: Some("terminal.default_width"), + pick: |settings_content| { + settings_content.terminal.as_ref()?.default_width.as_ref() + }, + write: |settings_content, value| { + settings_content + .terminal + .get_or_insert_default() + .default_width = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Default Height", + description: "Default height when the terminal is docked to the bottom (in pixels).", + field: Box::new(SettingField { + json_path: Some("terminal.default_height"), + pick: |settings_content| { + settings_content.terminal.as_ref()?.default_height.as_ref() + }, + write: |settings_content, value| { + settings_content + .terminal + .get_or_insert_default() + .default_height = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SectionHeader("Advanced Settings"), + SettingsPageItem::SettingItem(SettingItem { + title: "Max Scroll History Lines", + description: "Maximum number of lines to keep in scrollback history (max: 100,000; 0 disables scrolling).", + field: Box::new(SettingField { + json_path: Some("terminal.max_scroll_history_lines"), + pick: |settings_content| { + settings_content + .terminal + .as_ref()? + .max_scroll_history_lines + .as_ref() + }, + write: |settings_content, value| { + settings_content + .terminal + .get_or_insert_default() + .max_scroll_history_lines = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SectionHeader("Toolbar"), + SettingsPageItem::SettingItem(SettingItem { + title: "Breadcrumbs", + description: "Display the terminal title in breadcrumbs inside the terminal pane.", + field: Box::new(SettingField { + json_path: Some("terminal.toolbar.breadcrumbs"), + pick: |settings_content| { + settings_content + .terminal + .as_ref()? + .toolbar + .as_ref()? + .breadcrumbs + .as_ref() + }, + write: |settings_content, value| { + settings_content + .terminal + .get_or_insert_default() + .toolbar + .get_or_insert_default() + .breadcrumbs = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SectionHeader("Scrollbar"), + SettingsPageItem::SettingItem(SettingItem { + title: "Show Scrollbar", + description: "When to show the scrollbar in the terminal.", + field: Box::new(SettingField { + json_path: Some("terminal.scrollbar.show"), + pick: |settings_content| { + show_scrollbar_or_editor(settings_content, |settings_content| { + settings_content + .terminal + .as_ref()? + .scrollbar + .as_ref()? + .show + .as_ref() + }) + }, + write: |settings_content, value| { + settings_content + .terminal + .get_or_insert_default() + .scrollbar + .get_or_insert_default() + .show = value; + }, + }), + metadata: None, + files: USER, + }), + ], + }, + SettingsPage { + title: "Version Control", + items: vec![ + SettingsPageItem::SectionHeader("Git Gutter"), + SettingsPageItem::SettingItem(SettingItem { + title: "Visibility", + description: "Control whether Git status is shown in the editor's gutter.", + field: Box::new(SettingField { + json_path: Some("git.git_gutter"), + pick: |settings_content| settings_content.git.as_ref()?.git_gutter.as_ref(), + write: |settings_content, value| { + settings_content.git.get_or_insert_default().git_gutter = value; + }, + }), + metadata: None, + files: USER, + }), + // todo(settings_ui): Figure out the right default for this value in default.json + SettingsPageItem::SettingItem(SettingItem { + title: "Debounce", + description: "Debounce threshold in milliseconds after which changes are reflected in the Git gutter.", + field: Box::new(SettingField { + json_path: Some("git.gutter_debounce"), + pick: |settings_content| { + settings_content.git.as_ref()?.gutter_debounce.as_ref() + }, + write: |settings_content, value| { + settings_content.git.get_or_insert_default().gutter_debounce = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SectionHeader("Inline Git Blame"), + SettingsPageItem::SettingItem(SettingItem { + title: "Enabled", + description: "Whether or not to show Git blame data inline in the currently focused line.", + field: Box::new(SettingField { + json_path: Some("git.inline_blame.enabled"), + pick: |settings_content| { + settings_content + .git + .as_ref()? + .inline_blame + .as_ref()? + .enabled + .as_ref() + }, + write: |settings_content, value| { + settings_content + .git + .get_or_insert_default() + .inline_blame + .get_or_insert_default() + .enabled = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Delay", + description: "The delay after which the inline blame information is shown.", + field: Box::new(SettingField { + json_path: Some("git.inline_blame.delay_ms"), + pick: |settings_content| { + settings_content + .git + .as_ref()? + .inline_blame + .as_ref()? + .delay_ms + .as_ref() + }, + write: |settings_content, value| { + settings_content + .git + .get_or_insert_default() + .inline_blame + .get_or_insert_default() + .delay_ms = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Padding", + description: "Padding between the end of the source line and the start of the inline blame in columns.", + field: Box::new(SettingField { + json_path: Some("git.inline_blame.padding"), + pick: |settings_content| { + settings_content + .git + .as_ref()? + .inline_blame + .as_ref()? + .padding + .as_ref() + }, + write: |settings_content, value| { + settings_content + .git + .get_or_insert_default() + .inline_blame + .get_or_insert_default() + .padding = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Minimum Column", + description: "The minimum column number at which to show the inline blame information.", + field: Box::new(SettingField { + json_path: Some("git.inline_blame.min_column"), + pick: |settings_content| { + settings_content + .git + .as_ref()? + .inline_blame + .as_ref()? + .min_column + .as_ref() + }, + write: |settings_content, value| { + settings_content + .git + .get_or_insert_default() + .inline_blame + .get_or_insert_default() + .min_column = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Show Commit Summary", + description: "Show commit summary as part of the inline blame.", + field: Box::new(SettingField { + json_path: Some("git.inline_blame.show_commit_summary"), + pick: |settings_content| { + settings_content + .git + .as_ref()? + .inline_blame + .as_ref()? + .show_commit_summary + .as_ref() + }, + write: |settings_content, value| { + settings_content + .git + .get_or_insert_default() + .inline_blame + .get_or_insert_default() + .show_commit_summary = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SectionHeader("Git Blame View"), + SettingsPageItem::SettingItem(SettingItem { + title: "Show Avatar", + description: "Show the avatar of the author of the commit.", + field: Box::new(SettingField { + json_path: Some("git.blame.show_avatar"), + pick: |settings_content| { + settings_content + .git + .as_ref()? + .blame + .as_ref()? + .show_avatar + .as_ref() + }, + write: |settings_content, value| { + settings_content + .git + .get_or_insert_default() + .blame + .get_or_insert_default() + .show_avatar = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SectionHeader("Branch Picker"), + SettingsPageItem::SettingItem(SettingItem { + title: "Show Author Name", + description: "Show author name as part of the commit information in branch picker.", + field: Box::new(SettingField { + json_path: Some("git.branch_picker.show_author_name"), + pick: |settings_content| { + settings_content + .git + .as_ref()? + .branch_picker + .as_ref()? + .show_author_name + .as_ref() + }, + write: |settings_content, value| { + settings_content + .git + .get_or_insert_default() + .branch_picker + .get_or_insert_default() + .show_author_name = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SectionHeader("Git Hunks"), + SettingsPageItem::SettingItem(SettingItem { + title: "Hunk Style", + description: "How Git hunks are displayed visually in the editor.", + field: Box::new(SettingField { + json_path: Some("git.hunk_style"), + pick: |settings_content| settings_content.git.as_ref()?.hunk_style.as_ref(), + write: |settings_content, value| { + settings_content.git.get_or_insert_default().hunk_style = value; + }, + }), + metadata: None, + files: USER, + }), + ], + }, + SettingsPage { + title: "Collaboration", + items: vec![ + SettingsPageItem::SectionHeader("Calls"), + SettingsPageItem::SettingItem(SettingItem { + title: "Mute On Join", + description: "Whether the microphone should be muted when joining a channel or a call.", + field: Box::new(SettingField { + json_path: Some("calls.mute_on_join"), + pick: |settings_content| { + settings_content.calls.as_ref()?.mute_on_join.as_ref() + }, + write: |settings_content, value| { + settings_content.calls.get_or_insert_default().mute_on_join = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Share On Join", + description: "Whether your current project should be shared when joining an empty channel.", + field: Box::new(SettingField { + json_path: Some("calls.share_on_join"), + pick: |settings_content| { + settings_content.calls.as_ref()?.share_on_join.as_ref() + }, + write: |settings_content, value| { + settings_content.calls.get_or_insert_default().share_on_join = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SectionHeader("Experimental"), + SettingsPageItem::SettingItem(SettingItem { + title: "Rodio Audio", + description: "Opt into the new audio system.", + field: Box::new(SettingField { + json_path: Some("audio.experimental.rodio_audio"), + pick: |settings_content| { + settings_content.audio.as_ref()?.rodio_audio.as_ref() + }, + write: |settings_content, value| { + settings_content.audio.get_or_insert_default().rodio_audio = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Auto Microphone Volume", + description: "Automatically adjust microphone volume (requires rodio audio).", field: Box::new(SettingField { + json_path: Some("audio.experimental.auto_microphone_volume"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.cursor_shape - } else { - &None - } + settings_content + .audio + .as_ref()? + .auto_microphone_volume + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .terminal + write: |settings_content, value| { + settings_content + .audio .get_or_insert_default() - .cursor_shape + .auto_microphone_volume = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Cursor Blinking", - description: "Sets the cursor blinking behavior in the terminal", + title: "Auto Speaker Volume", + description: "Automatically adjust volume of other call members (requires rodio audio).", field: Box::new(SettingField { + json_path: Some("audio.experimental.auto_speaker_volume"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.blinking - } else { - &None - } + settings_content + .audio + .as_ref()? + .auto_speaker_volume + .as_ref() + }, + write: |settings_content, value| { + settings_content + .audio + .get_or_insert_default() + .auto_speaker_volume = value; }, - pick_mut: |settings_content| { - &mut settings_content.terminal.get_or_insert_default().blinking + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Denoise", + description: "Remove background noises (requires rodio audio).", + field: Box::new(SettingField { + json_path: Some("audio.experimental.denoise"), + pick: |settings_content| settings_content.audio.as_ref()?.denoise.as_ref(), + write: |settings_content, value| { + settings_content.audio.get_or_insert_default().denoise = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Alternate Scroll", - description: "Whether Alternate Scroll mode is active by default (converts mouse scroll to arrow keys in apps like vim)", + title: "Legacy Audio Compatible", + description: "Use audio parameters compatible with previous versions (requires rodio audio).", field: Box::new(SettingField { + json_path: Some("audio.experimental.legacy_audio_compatible"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.alternate_scroll - } else { - &None - } + settings_content + .audio + .as_ref()? + .legacy_audio_compatible + .as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .terminal + write: |settings_content, value| { + settings_content + .audio .get_or_insert_default() - .alternate_scroll + .legacy_audio_compatible = value; }, }), metadata: None, files: USER, }), + ], + }, + SettingsPage { + title: "AI", + items: vec![ + SettingsPageItem::SectionHeader("General"), SettingsPageItem::SettingItem(SettingItem { - title: "Minimum Contrast", - description: "The minimum APCA perceptual contrast between foreground and background colors (0-106)", + title: "Disable AI", + description: "Whether to disable all AI features in Zed.", field: Box::new(SettingField { - pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.minimum_contrast - } else { - &None - } + json_path: Some("disable_ai"), + pick: |settings_content| settings_content.disable_ai.as_ref(), + write: |settings_content, value| { + settings_content.disable_ai = value; }, - pick_mut: |settings_content| { - &mut settings_content - .terminal + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SectionHeader("Agent Configuration"), + SettingsPageItem::SettingItem(SettingItem { + title: "Always Allow Tool Actions", + description: "When enabled, the agent can run potentially destructive actions without asking for your confirmation. This setting has no effect on external agents.", + field: Box::new(SettingField { + json_path: Some("agent.always_allow_tool_actions"), + pick: |settings_content| { + settings_content + .agent + .as_ref()? + .always_allow_tool_actions + .as_ref() + }, + write: |settings_content, value| { + settings_content + .agent .get_or_insert_default() - .minimum_contrast + .always_allow_tool_actions = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Behavior Settings"), SettingsPageItem::SettingItem(SettingItem { - title: "Option As Meta", - description: "Whether the option key behaves as the meta key", + title: "Single File Review", + description: "When enabled, agent edits will also be displayed in single-file buffers for review.", field: Box::new(SettingField { + json_path: Some("agent.single_file_review"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.option_as_meta - } else { - &None - } + settings_content.agent.as_ref()?.single_file_review.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .terminal + write: |settings_content, value| { + settings_content + .agent .get_or_insert_default() - .option_as_meta + .single_file_review = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Copy On Select", - description: "Whether selecting text in the terminal automatically copies to the system clipboard", + title: "Enable Feedback", + description: "Show voting thumbs up/down icon buttons for feedback on agent edits.", field: Box::new(SettingField { + json_path: Some("agent.enable_feedback"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.copy_on_select - } else { - &None - } + settings_content.agent.as_ref()?.enable_feedback.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .terminal + write: |settings_content, value| { + settings_content + .agent .get_or_insert_default() - .copy_on_select + .enable_feedback = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Keep Selection On Copy", - description: "Whether to keep the text selection after copying it to the clipboard", + title: "Notify When Agent Waiting", + description: "Where to show notifications when the agent has completed its response or needs confirmation before running a tool action.", field: Box::new(SettingField { + json_path: Some("agent.notify_when_agent_waiting"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.keep_selection_on_copy - } else { - &None - } - }, - pick_mut: |settings_content| { - &mut settings_content - .terminal + settings_content + .agent + .as_ref()? + .notify_when_agent_waiting + .as_ref() + }, + write: |settings_content, value| { + settings_content + .agent .get_or_insert_default() - .keep_selection_on_copy + .notify_when_agent_waiting = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Layout Settings"), SettingsPageItem::SettingItem(SettingItem { - title: "Default Width", - description: "Default width when the terminal is docked to the left or right (in pixels)", + title: "Play Sound When Agent Done", + description: "Whether to play a sound when the agent has either completed its response, or needs user input.", field: Box::new(SettingField { + json_path: Some("agent.play_sound_when_agent_done"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.default_width - } else { - &None - } - }, - pick_mut: |settings_content| { - &mut settings_content - .terminal + settings_content + .agent + .as_ref()? + .play_sound_when_agent_done + .as_ref() + }, + write: |settings_content, value| { + settings_content + .agent .get_or_insert_default() - .default_width + .play_sound_when_agent_done = value; }, }), metadata: None, files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Default Height", - description: "Default height when the terminal is docked to the bottom (in pixels)", + title: "Expand Edit Card", + description: "Whether to have edit cards in the agent panel expanded, showing a Preview of the diff.", field: Box::new(SettingField { + json_path: Some("agent.expand_edit_card"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.default_height - } else { - &None - } + settings_content.agent.as_ref()?.expand_edit_card.as_ref() }, - pick_mut: |settings_content| { - &mut settings_content - .terminal + write: |settings_content, value| { + settings_content + .agent .get_or_insert_default() - .default_height + .expand_edit_card = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Advanced Settings"), SettingsPageItem::SettingItem(SettingItem { - title: "Max Scroll History Lines", - description: "Maximum number of lines to keep in scrollback history (max: 100,000; 0 disables scrolling)", + title: "Expand Terminal Card", + description: "Whether to have terminal cards in the agent panel expanded, showing the whole command output.", field: Box::new(SettingField { + json_path: Some("agent.expand_terminal_card"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - &terminal.max_scroll_history_lines - } else { - &None - } - }, - pick_mut: |settings_content| { - &mut settings_content - .terminal + settings_content + .agent + .as_ref()? + .expand_terminal_card + .as_ref() + }, + write: |settings_content, value| { + settings_content + .agent .get_or_insert_default() - .max_scroll_history_lines + .expand_terminal_card = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Toolbar"), SettingsPageItem::SettingItem(SettingItem { - title: "Breadcrumbs", - description: "Whether to display the terminal title in breadcrumbs inside the terminal pane", + title: "Use Modifier To Send", + description: "Whether to always use cmd-enter (or ctrl-enter on Linux or Windows) to send messages.", field: Box::new(SettingField { + json_path: Some("agent.use_modifier_to_send"), pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal { - if let Some(toolbar) = &terminal.toolbar { - &toolbar.breadcrumbs - } else { - &None - } - } else { - &None - } - }, - pick_mut: |settings_content| { - &mut settings_content - .terminal + settings_content + .agent + .as_ref()? + .use_modifier_to_send + .as_ref() + }, + write: |settings_content, value| { + settings_content + .agent .get_or_insert_default() - .toolbar - .get_or_insert_default() - .breadcrumbs + .use_modifier_to_send = value; }, }), metadata: None, files: USER, }), - SettingsPageItem::SectionHeader("Scrollbar"), SettingsPageItem::SettingItem(SettingItem { - title: "Show Scrollbar", - description: "When to show the scrollbar in the terminal", - field: Box::new(SettingField { - pick: |settings_content| { - if let Some(terminal) = &settings_content.terminal - && let Some(scrollbar) = &terminal.scrollbar - && scrollbar.show.is_some() - { - &scrollbar.show - } else if let Some(scrollbar) = &settings_content.editor.scrollbar { - &scrollbar.show - } else { - &None - } - }, - pick_mut: |settings_content| { - &mut settings_content - .terminal - .get_or_insert_default() - .scrollbar + title: "Message Editor Min Lines", + description: "Minimum number of lines to display in the agent message editor.", + field: Box::new(SettingField { + json_path: Some("agent.message_editor_min_lines"), + pick: |settings_content| { + settings_content + .agent + .as_ref()? + .message_editor_min_lines + .as_ref() + }, + write: |settings_content, value| { + settings_content + .agent .get_or_insert_default() - .show + .message_editor_min_lines = value; }, }), metadata: None, @@ -4429,1230 +5707,1426 @@ pub(crate) fn settings_data() -> Vec { }), ], }, + SettingsPage { + title: "Network", + items: vec![ + SettingsPageItem::SectionHeader("Network"), + // todo(settings_ui): Proxy needs a default + SettingsPageItem::SettingItem(SettingItem { + title: "Proxy", + description: "The proxy to use for network requests.", + field: Box::new( + SettingField { + json_path: Some("proxy"), + pick: |settings_content| settings_content.proxy.as_ref(), + write: |settings_content, value| { + settings_content.proxy = value; + }, + } + .unimplemented(), + ), + metadata: Some(Box::new(SettingsFieldMetadata { + placeholder: Some("socks5h://localhost:10808"), + ..Default::default() + })), + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Server URL", + description: "The URL of the Zed server to connect to.", + field: Box::new(SettingField { + json_path: Some("server_url"), + pick: |settings_content| settings_content.server_url.as_ref(), + write: |settings_content, value| { + settings_content.server_url = value; + }, + }), + metadata: Some(Box::new(SettingsFieldMetadata { + placeholder: Some("https://zed.dev"), + ..Default::default() + })), + files: USER, + }), + ], + }, ] } const LANGUAGES_SECTION_HEADER: &'static str = "Languages"; -fn language_settings_data() -> Vec { - fn current_language() -> Option { - sub_page_stack().iter().find_map(|page| { - (page.section_header == LANGUAGES_SECTION_HEADER) - .then(|| SharedString::new_static(page.link.title)) - }) - } +fn current_language() -> Option { + sub_page_stack().iter().find_map(|page| { + (page.section_header == LANGUAGES_SECTION_HEADER).then(|| page.link.title.clone()) + }) +} - fn language_settings_field( - settings_content: &SettingsContent, - get: fn(&LanguageSettingsContent) -> &Option, - ) -> &Option { - let all_languages = &settings_content.project.all_languages; - if let Some(current_language_name) = current_language() { - if let Some(current_language) = all_languages.languages.0.get(¤t_language_name) { - let value = get(current_language); - if value.is_some() { - return value; - } +fn language_settings_field( + settings_content: &SettingsContent, + get: fn(&LanguageSettingsContent) -> Option<&T>, +) -> Option<&T> { + let all_languages = &settings_content.project.all_languages; + if let Some(current_language_name) = current_language() { + if let Some(current_language) = all_languages.languages.0.get(¤t_language_name) { + let value = get(current_language); + if value.is_some() { + return value; } } - let default_value = get(&all_languages.defaults); - return default_value; } + let default_value = get(&all_languages.defaults); + return default_value; +} - fn language_settings_field_mut( - settings_content: &mut SettingsContent, - get: fn(&mut LanguageSettingsContent) -> &mut Option, - ) -> &mut Option { - let all_languages = &mut settings_content.project.all_languages; - let language_content = if let Some(current_language) = current_language() { - all_languages - .languages - .0 - .entry(current_language) - .or_default() - } else { - &mut all_languages.defaults - }; - return get(language_content); - } +fn language_settings_field_mut( + settings_content: &mut SettingsContent, + value: Option, + write: fn(&mut LanguageSettingsContent, Option), +) { + let all_languages = &mut settings_content.project.all_languages; + let language_content = if let Some(current_language) = current_language() { + all_languages + .languages + .0 + .entry(current_language) + .or_default() + } else { + &mut all_languages.defaults + }; + write(language_content, value); +} - vec![ +fn language_settings_data() -> Vec { + let mut items = vec![ SettingsPageItem::SectionHeader("Indentation"), SettingsPageItem::SettingItem(SettingItem { title: "Tab Size", - description: "How many columns a tab should occupy", + description: "How many columns a tab should occupy.", field: Box::new(SettingField { + json_path: Some("languages.$(language).tab_size"), // TODO(cameron): not JQ syntax because not URL-safe pick: |settings_content| { - language_settings_field(settings_content, |language| &language.tab_size) + language_settings_field(settings_content, |language| language.tab_size.as_ref()) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| &mut language.tab_size) + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.tab_size = value; + }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Hard Tabs", - description: "Whether to indent lines using tab characters, as opposed to multiple spaces", + description: "Whether to indent lines using tab characters, as opposed to multiple spaces.", field: Box::new(SettingField { + json_path: Some("languages.$(language).hard_tabs"), pick: |settings_content| { - language_settings_field(settings_content, |language| &language.hard_tabs) + language_settings_field(settings_content, |language| { + language.hard_tabs.as_ref() + }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.hard_tabs + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.hard_tabs = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Auto Indent", - description: "Whether indentation should be adjusted based on the context whilst typing", + description: "Whether indentation should be adjusted based on the context whilst typing.", field: Box::new(SettingField { + json_path: Some("languages.$(language).auto_indent"), pick: |settings_content| { - language_settings_field(settings_content, |language| &language.auto_indent) + language_settings_field(settings_content, |language| { + language.auto_indent.as_ref() + }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.auto_indent + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.auto_indent = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Auto Indent On Paste", - description: "Whether indentation of pasted content should be adjusted based on the context", + description: "Whether indentation of pasted content should be adjusted based on the context.", field: Box::new(SettingField { + json_path: Some("languages.$(language).auto_indent_on_paste"), pick: |settings_content| { language_settings_field(settings_content, |language| { - &language.auto_indent_on_paste + language.auto_indent_on_paste.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.auto_indent_on_paste + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.auto_indent_on_paste = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SectionHeader("Wrapping"), SettingsPageItem::SettingItem(SettingItem { title: "Soft Wrap", - description: "How to soft-wrap long lines of text", + description: "How to soft-wrap long lines of text.", field: Box::new(SettingField { + json_path: Some("languages.$(language).soft_wrap"), pick: |settings_content| { - language_settings_field(settings_content, |language| &language.soft_wrap) + language_settings_field(settings_content, |language| { + language.soft_wrap.as_ref() + }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.soft_wrap + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.soft_wrap = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Show Wrap Guides", - description: "Whether to show wrap guides in the editor", + description: "Show wrap guides in the editor.", field: Box::new(SettingField { + json_path: Some("languages.$(language).show_wrap_guides"), pick: |settings_content| { - language_settings_field(settings_content, |language| &language.show_wrap_guides) + language_settings_field(settings_content, |language| { + language.show_wrap_guides.as_ref() + }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.show_wrap_guides + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.show_wrap_guides = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Preferred Line Length", - description: "The column at which to soft-wrap lines, for buffers where soft-wrap is enabled", + description: "The column at which to soft-wrap lines, for buffers where soft-wrap is enabled.", field: Box::new(SettingField { + json_path: Some("languages.$(language).preferred_line_length"), pick: |settings_content| { language_settings_field(settings_content, |language| { - &language.preferred_line_length + language.preferred_line_length.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.preferred_line_length + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.preferred_line_length = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Wrap Guides", - description: "Character counts at which to show wrap guides in the editor", + description: "Character counts at which to show wrap guides in the editor.", field: Box::new( SettingField { + json_path: Some("languages.$(language).wrap_guides"), pick: |settings_content| { - language_settings_field(settings_content, |language| &language.wrap_guides) + language_settings_field(settings_content, |language| { + language.wrap_guides.as_ref() + }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.wrap_guides + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.wrap_guides = value; }) }, } .unimplemented(), ), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Allow Rewrap", - description: "Controls where the `editor::Rewrap` action is allowed for this language", + description: "Controls where the `editor::rewrap` action is allowed for this language.", field: Box::new(SettingField { + json_path: Some("languages.$(language).allow_rewrap"), pick: |settings_content| { - language_settings_field(settings_content, |language| &language.allow_rewrap) + language_settings_field(settings_content, |language| { + language.allow_rewrap.as_ref() + }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.allow_rewrap + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.allow_rewrap = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SectionHeader("Indent Guides"), SettingsPageItem::SettingItem(SettingItem { title: "Enabled", - description: "Whether to display indent guides in the editor", + description: "Display indent guides in the editor.", field: Box::new(SettingField { + json_path: Some("languages.$(language).indent_guides.enabled"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(indent_guides) = &language.indent_guides { - &indent_guides.enabled - } else { - &None - } + language + .indent_guides + .as_ref() + .and_then(|indent_guides| indent_guides.enabled.as_ref()) }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.indent_guides.get_or_insert_default().enabled + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.indent_guides.get_or_insert_default().enabled = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Line Width", - description: "The width of the indent guides in pixels, between 1 and 10", + description: "The width of the indent guides in pixels, between 1 and 10.", field: Box::new(SettingField { + json_path: Some("languages.$(language).indent_guides.line_width"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(indent_guides) = &language.indent_guides { - &indent_guides.line_width - } else { - &None - } + language + .indent_guides + .as_ref() + .and_then(|indent_guides| indent_guides.line_width.as_ref()) }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.indent_guides.get_or_insert_default().line_width + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.indent_guides.get_or_insert_default().line_width = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Active Line Width", - description: "The width of the active indent guide in pixels, between 1 and 10", + description: "The width of the active indent guide in pixels, between 1 and 10.", field: Box::new(SettingField { + json_path: Some("languages.$(language).indent_guides.active_line_width"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(indent_guides) = &language.indent_guides { - &indent_guides.active_line_width - } else { - &None - } + language + .indent_guides + .as_ref() + .and_then(|indent_guides| indent_guides.active_line_width.as_ref()) }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language .indent_guides .get_or_insert_default() - .active_line_width + .active_line_width = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Coloring", - description: "Determines how indent guides are colored", + description: "Determines how indent guides are colored.", field: Box::new(SettingField { + json_path: Some("languages.$(language).indent_guides.coloring"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(indent_guides) = &language.indent_guides { - &indent_guides.coloring - } else { - &None - } + language + .indent_guides + .as_ref() + .and_then(|indent_guides| indent_guides.coloring.as_ref()) }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.indent_guides.get_or_insert_default().coloring + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.indent_guides.get_or_insert_default().coloring = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Background Coloring", - description: "Determines how indent guide backgrounds are colored", + description: "Determines how indent guide backgrounds are colored.", field: Box::new(SettingField { + json_path: Some("languages.$(language).indent_guides.background_coloring"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(indent_guides) = &language.indent_guides { - &indent_guides.background_coloring - } else { - &None - } + language + .indent_guides + .as_ref() + .and_then(|indent_guides| indent_guides.background_coloring.as_ref()) }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language .indent_guides .get_or_insert_default() - .background_coloring + .background_coloring = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SectionHeader("Formatting"), SettingsPageItem::SettingItem(SettingItem { title: "Format On Save", - description: "Whether or not to perform a buffer format before saving", + description: "Whether or not to perform a buffer format before saving.", field: Box::new( // TODO(settings_ui): this setting should just be a bool SettingField { + json_path: Some("languages.$(language).format_on_save"), pick: |settings_content| { language_settings_field(settings_content, |language| { - &language.format_on_save + language.format_on_save.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.format_on_save + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.format_on_save = value; }) }, }, ), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Remove Trailing Whitespace On Save", - description: "Whether or not to remove any trailing whitespace from lines of a buffer before saving it", + description: "Whether or not to remove any trailing whitespace from lines of a buffer before saving it.", field: Box::new(SettingField { + json_path: Some("languages.$(language).remove_trailing_whitespace_on_save"), pick: |settings_content| { language_settings_field(settings_content, |language| { - &language.remove_trailing_whitespace_on_save + language.remove_trailing_whitespace_on_save.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.remove_trailing_whitespace_on_save + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.remove_trailing_whitespace_on_save = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Ensure Final Newline On Save", - description: "Whether or not to ensure there's a single newline at the end of a buffer when saving it", + description: "Whether or not to ensure there's a single newline at the end of a buffer when saving it.", field: Box::new(SettingField { + json_path: Some("languages.$(language).ensure_final_newline_on_save"), pick: |settings_content| { language_settings_field(settings_content, |language| { - &language.ensure_final_newline_on_save + language.ensure_final_newline_on_save.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.ensure_final_newline_on_save + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.ensure_final_newline_on_save = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Formatter", - description: "How to perform a buffer format", + description: "How to perform a buffer format.", field: Box::new( SettingField { + json_path: Some("languages.$(language).formatter"), pick: |settings_content| { - language_settings_field(settings_content, |language| &language.formatter) + language_settings_field(settings_content, |language| { + language.formatter.as_ref() + }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.formatter + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.formatter = value; }) }, } .unimplemented(), ), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Use On Type Format", description: "Whether to use additional LSP queries to format (and amend) the code after every \"trigger\" symbol input, defined by LSP server capabilities", field: Box::new(SettingField { + json_path: Some("languages.$(language).use_on_type_format"), pick: |settings_content| { language_settings_field(settings_content, |language| { - &language.use_on_type_format - }) - }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.use_on_type_format - }) - }, - }), - metadata: None, - files: USER | LOCAL, - }), - SettingsPageItem::SectionHeader("Prettier"), - SettingsPageItem::SettingItem(SettingItem { - title: "Allowed", - description: "Enables or disables formatting with Prettier for a given language", - field: Box::new(SettingField { - pick: |settings_content| { - language_settings_field(settings_content, |language| { - if let Some(prettier) = &language.prettier { - &prettier.allowed - } else { - &None - } - }) - }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.prettier.get_or_insert_default().allowed - }) - }, - }), - metadata: None, - files: USER | LOCAL, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Parser", - description: "Forces Prettier integration to use a specific parser name when formatting files with the language", - field: Box::new(SettingField { - pick: |settings_content| { - language_settings_field(settings_content, |language| { - if let Some(prettier) = &language.prettier { - &prettier.parser - } else { - &None - } + language.use_on_type_format.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.prettier.get_or_insert_default().parser + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.use_on_type_format = value; }) }, }), metadata: None, - files: USER | LOCAL, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Plugins", - description: "Forces Prettier integration to use specific plugins when formatting files with the language", - field: Box::new( - SettingField { - pick: |settings_content| { - language_settings_field(settings_content, |language| { - if let Some(prettier) = &language.prettier { - &prettier.plugins - } else { - &None - } - }) - }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.prettier.get_or_insert_default().plugins - }) - }, - } - .unimplemented(), - ), - metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { - title: "Options", - description: "Default Prettier options, in the format as in package.json section for Prettier", + title: "Code Actions On Format", + description: "Additional code actions to run when formatting.", field: Box::new( SettingField { + json_path: Some("languages.$(language).code_actions_on_format"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(prettier) = &language.prettier { - &prettier.options - } else { - &None - } + language.code_actions_on_format.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.prettier.get_or_insert_default().options + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.code_actions_on_format = value; }) }, } .unimplemented(), ), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SectionHeader("Autoclose"), SettingsPageItem::SettingItem(SettingItem { title: "Use Autoclose", - description: "Whether to automatically type closing characters for you. For example, when you type (, Zed will automatically add a closing ) at the correct position", - field: Box::new(SettingField { - pick: |settings_content| { - language_settings_field(settings_content, |language| &language.use_autoclose) - }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.use_autoclose - }) - }, - }), - metadata: None, - files: USER | LOCAL, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Use Auto Surround", - description: "Whether to automatically surround text with characters for you. For example, when you select text and type (, Zed will automatically surround text with ()", - field: Box::new(SettingField { - pick: |settings_content| { - language_settings_field(settings_content, |language| { - &language.use_auto_surround - }) - }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.use_auto_surround - }) - }, - }), - metadata: None, - files: USER | LOCAL, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Always Treat Brackets As Autoclosed", - description: "Controls whether the closing characters are always skipped over and auto-removed no matter how they were inserted", - field: Box::new(SettingField { - pick: |settings_content| { - language_settings_field(settings_content, |language| { - &language.always_treat_brackets_as_autoclosed - }) - }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.always_treat_brackets_as_autoclosed - }) - }, - }), - metadata: None, - files: USER | LOCAL, - }), - SettingsPageItem::SettingItem(SettingItem { - title: "Jsx Tag Auto Close", - description: "Whether to automatically close JSX tags", + description: "Whether to automatically type closing characters for you. For example, when you type '(', Zed will automatically add a closing ')' at the correct position.", field: Box::new(SettingField { - // TODO(settings_ui): this setting should just be a bool + json_path: Some("languages.$(language).use_autoclose"), pick: |settings_content| { language_settings_field(settings_content, |language| { - match language.jsx_tag_auto_close.as_ref() { - Some(s) => &s.enabled, - None => &None, - } + language.use_autoclose.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.jsx_tag_auto_close.get_or_insert_default().enabled + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.use_autoclose = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), - SettingsPageItem::SectionHeader("LSP"), SettingsPageItem::SettingItem(SettingItem { - title: "Enable Language Server", - description: "Whether to use language servers to provide code intelligence", + title: "Use Auto Surround", + description: "Whether to automatically surround text with characters for you. For example, when you select text and type '(', Zed will automatically surround text with ().", field: Box::new(SettingField { + json_path: Some("languages.$(language).use_auto_surround"), pick: |settings_content| { language_settings_field(settings_content, |language| { - &language.enable_language_server + language.use_auto_surround.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.enable_language_server + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.use_auto_surround = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { - title: "Language Servers", - description: "The list of language servers to use (or disable) for this language", - field: Box::new( - SettingField { - pick: |settings_content| { - language_settings_field(settings_content, |language| { - &language.language_servers - }) - }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.language_servers - }) - }, - } - .unimplemented(), - ), + title: "Always Treat Brackets As Autoclosed", + description: "Controls whether the closing characters are always skipped over and auto-removed no matter how they were inserted.", + field: Box::new(SettingField { + json_path: Some("languages.$(language).always_treat_brackets_as_autoclosed"), + pick: |settings_content| { + language_settings_field(settings_content, |language| { + language.always_treat_brackets_as_autoclosed.as_ref() + }) + }, + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.always_treat_brackets_as_autoclosed = value; + }) + }, + }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { - title: "Linked Edits", - description: "Whether to perform linked edits of associated ranges, if the LS supports it. For example, when editing opening tag, the contents of the closing tag will be edited as well", + title: "Jsx Tag Auto Close", + description: "Whether to automatically close JSX tags.", field: Box::new(SettingField { + json_path: Some("languages.$(language).jsx_tag_auto_close"), + // TODO(settings_ui): this setting should just be a bool pick: |settings_content| { - language_settings_field(settings_content, |language| &language.linked_edits) + language_settings_field(settings_content, |language| { + language.jsx_tag_auto_close.as_ref()?.enabled.as_ref() + }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.linked_edits + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.jsx_tag_auto_close.get_or_insert_default().enabled = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SectionHeader("Edit Predictions"), SettingsPageItem::SettingItem(SettingItem { title: "Show Edit Predictions", - description: "Controls whether edit predictions are shown immediately (true) or manually by triggering `editor::ShowEditPrediction` (false)", + description: "Controls whether edit predictions are shown immediately (true) or manually by triggering `editor::showeditprediction` (false).", field: Box::new(SettingField { + json_path: Some("languages.$(language).show_edit_predictions"), pick: |settings_content| { language_settings_field(settings_content, |language| { - &language.show_edit_predictions + language.show_edit_predictions.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.show_edit_predictions + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.show_edit_predictions = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Edit Predictions Disabled In", - description: "Controls whether edit predictions are shown in the given language scopes", + description: "Controls whether edit predictions are shown in the given language scopes.", field: Box::new( SettingField { + json_path: Some("languages.$(language).edit_predictions_disabled_in"), pick: |settings_content| { language_settings_field(settings_content, |language| { - &language.edit_predictions_disabled_in + language.edit_predictions_disabled_in.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.edit_predictions_disabled_in + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.edit_predictions_disabled_in = value; }) }, } .unimplemented(), ), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SectionHeader("Whitespace"), SettingsPageItem::SettingItem(SettingItem { title: "Show Whitespaces", - description: "Whether to show tabs and spaces in the editor", + description: "Whether to show tabs and spaces in the editor.", field: Box::new(SettingField { + json_path: Some("languages.$(language).show_whitespaces"), pick: |settings_content| { - language_settings_field(settings_content, |language| &language.show_whitespaces) + language_settings_field(settings_content, |language| { + language.show_whitespaces.as_ref() + }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.show_whitespaces + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.show_whitespaces = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Space Whitespace Indicator", description: "Visible character used to render space characters when show_whitespaces is enabled (default: \"•\")", field: Box::new( SettingField { + json_path: Some("languages.$(language).whitespace_map.space"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(whitespace_map) = &language.whitespace_map { - &whitespace_map.space - } else { - &None - } + language.whitespace_map.as_ref()?.space.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.whitespace_map.get_or_insert_default().space + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.whitespace_map.get_or_insert_default().space = value; }) }, } .unimplemented(), ), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Tab Whitespace Indicator", description: "Visible character used to render tab characters when show_whitespaces is enabled (default: \"→\")", field: Box::new( SettingField { + json_path: Some("languages.$(language).whitespace_map.tab"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(whitespace_map) = &language.whitespace_map { - &whitespace_map.tab - } else { - &None - } + language.whitespace_map.as_ref()?.tab.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.whitespace_map.get_or_insert_default().tab + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.whitespace_map.get_or_insert_default().tab = value; }) }, } .unimplemented(), ), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SectionHeader("Completions"), SettingsPageItem::SettingItem(SettingItem { title: "Show Completions On Input", - description: "Whether to pop the completions menu while typing in an editor without explicitly requesting it", + description: "Whether to pop the completions menu while typing in an editor without explicitly requesting it.", field: Box::new(SettingField { + json_path: Some("languages.$(language).show_completions_on_input"), pick: |settings_content| { language_settings_field(settings_content, |language| { - &language.show_completions_on_input + language.show_completions_on_input.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.show_completions_on_input + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.show_completions_on_input = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Show Completion Documentation", - description: "Whether to display inline and alongside documentation for items in the completions menu", + description: "Whether to display inline and alongside documentation for items in the completions menu.", field: Box::new(SettingField { + json_path: Some("languages.$(language).show_completion_documentation"), pick: |settings_content| { language_settings_field(settings_content, |language| { - &language.show_completion_documentation + language.show_completion_documentation.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.show_completion_documentation + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.show_completion_documentation = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Words", - description: "Controls how words are completed", + description: "Controls how words are completed.", field: Box::new(SettingField { + json_path: Some("languages.$(language).completions.words"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(completions) = &language.completions { - &completions.words - } else { - &None - } + language.completions.as_ref()?.words.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.completions.get_or_insert_default().words + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.completions.get_or_insert_default().words = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { title: "Words Min Length", - description: "How many characters has to be in the completions query to automatically show the words-based completions", + description: "How many characters has to be in the completions query to automatically show the words-based completions.", field: Box::new(SettingField { + json_path: Some("languages.$(language).completions.words_min_length"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(completions) = &language.completions { - &completions.words_min_length - } else { - &None - } + language.completions.as_ref()?.words_min_length.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language .completions .get_or_insert_default() - .words_min_length + .words_min_length = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), + SettingsPageItem::SectionHeader("Inlay Hints"), SettingsPageItem::SettingItem(SettingItem { - title: "Lsp", - description: "Whether to fetch LSP completions or not", + title: "Enabled", + description: "Global switch to toggle hints on and off.", field: Box::new(SettingField { + json_path: Some("languages.$(language).inlay_hints.enabled"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(completions) = &language.completions { - &completions.lsp - } else { - &None - } + language.inlay_hints.as_ref()?.enabled.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.completions.get_or_insert_default().lsp + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.inlay_hints.get_or_insert_default().enabled = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { - title: "Lsp Fetch Timeout Ms", - description: "When fetching LSP completions, determines how long to wait for a response of a particular server (set to 0 to wait indefinitely)", + title: "Show Value Hints", + description: "Global switch to toggle inline values on and off when debugging.", field: Box::new(SettingField { + json_path: Some("languages.$(language).inlay_hints.show_value_hints"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(completions) = &language.completions { - &completions.lsp_fetch_timeout_ms - } else { - &None - } + language.inlay_hints.as_ref()?.show_value_hints.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language - .completions + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language + .inlay_hints .get_or_insert_default() - .lsp_fetch_timeout_ms + .show_value_hints = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { - title: "Lsp Insert Mode", - description: "Controls how LSP completions are inserted", + title: "Show Type Hints", + description: "Whether type hints should be shown.", field: Box::new(SettingField { + json_path: Some("languages.$(language).inlay_hints.show_type_hints"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(completions) = &language.completions { - &completions.lsp_insert_mode - } else { - &None - } + language.inlay_hints.as_ref()?.show_type_hints.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.completions.get_or_insert_default().lsp_insert_mode + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.inlay_hints.get_or_insert_default().show_type_hints = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), - SettingsPageItem::SectionHeader("Inlay Hints"), SettingsPageItem::SettingItem(SettingItem { - title: "Enabled", - description: "Global switch to toggle hints on and off", + title: "Show Parameter Hints", + description: "Whether parameter hints should be shown.", field: Box::new(SettingField { + json_path: Some("languages.$(language).inlay_hints.show_parameter_hints"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(inlay_hints) = &language.inlay_hints { - &inlay_hints.enabled - } else { - &None - } + language.inlay_hints.as_ref()?.show_parameter_hints.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.inlay_hints.get_or_insert_default().enabled + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language + .inlay_hints + .get_or_insert_default() + .show_parameter_hints = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { - title: "Show Value Hints", - description: "Global switch to toggle inline values on and off when debugging", + title: "Show Other Hints", + description: "Whether other hints should be shown.", field: Box::new(SettingField { + json_path: Some("languages.$(language).inlay_hints.show_other_hints"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(inlay_hints) = &language.inlay_hints { - &inlay_hints.show_value_hints - } else { - &None - } + language.inlay_hints.as_ref()?.show_other_hints.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language .inlay_hints .get_or_insert_default() - .show_value_hints + .show_other_hints = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { - title: "Show Type Hints", - description: "Whether type hints should be shown", + title: "Show Background", + description: "Show a background for inlay hints.", field: Box::new(SettingField { + json_path: Some("languages.$(language).inlay_hints.show_background"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(inlay_hints) = &language.inlay_hints { - &inlay_hints.show_type_hints - } else { - &None - } + language.inlay_hints.as_ref()?.show_background.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.inlay_hints.get_or_insert_default().show_type_hints + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.inlay_hints.get_or_insert_default().show_background = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { - title: "Show Parameter Hints", - description: "Whether parameter hints should be shown", + title: "Edit Debounce Ms", + description: "Whether or not to debounce inlay hints updates after buffer edits (set to 0 to disable debouncing).", field: Box::new(SettingField { + json_path: Some("languages.$(language).inlay_hints.edit_debounce_ms"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(inlay_hints) = &language.inlay_hints { - &inlay_hints.show_parameter_hints - } else { - &None - } + language.inlay_hints.as_ref()?.edit_debounce_ms.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language .inlay_hints .get_or_insert_default() - .show_parameter_hints + .edit_debounce_ms = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { - title: "Show Other Hints", - description: "Whether other hints should be shown", + title: "Scroll Debounce Ms", + description: "Whether or not to debounce inlay hints updates after buffer scrolls (set to 0 to disable debouncing).", field: Box::new(SettingField { + json_path: Some("languages.$(language).inlay_hints.scroll_debounce_ms"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(inlay_hints) = &language.inlay_hints { - &inlay_hints.show_other_hints - } else { - &None - } + language.inlay_hints.as_ref()?.scroll_debounce_ms.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language .inlay_hints .get_or_insert_default() - .show_other_hints + .scroll_debounce_ms = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { - title: "Show Background", - description: "Whether to show a background for inlay hints", + title: "Toggle On Modifiers Press", + description: "Toggles inlay hints (hides or shows) when the user presses the modifiers specified.", + field: Box::new( + SettingField { + json_path: Some("languages.$(language).inlay_hints.toggle_on_modifiers_press"), + pick: |settings_content| { + language_settings_field(settings_content, |language| { + language + .inlay_hints + .as_ref()? + .toggle_on_modifiers_press + .as_ref() + }) + }, + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language + .inlay_hints + .get_or_insert_default() + .toggle_on_modifiers_press = value; + }) + }, + } + .unimplemented(), + ), + metadata: None, + files: USER | PROJECT, + }), + ]; + if current_language().is_none() { + items.push(SettingsPageItem::SettingItem(SettingItem { + title: "LSP Document Colors", + description: "How to render LSP color previews in the editor.", field: Box::new(SettingField { - pick: |settings_content| { - language_settings_field(settings_content, |language| { - if let Some(inlay_hints) = &language.inlay_hints { - &inlay_hints.show_background - } else { - &None - } - }) - }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.inlay_hints.get_or_insert_default().show_background - }) + json_path: Some("lsp_document_colors"), + pick: |settings_content| settings_content.editor.lsp_document_colors.as_ref(), + write: |settings_content, value| { + settings_content.editor.lsp_document_colors = value; }, }), metadata: None, - files: USER | LOCAL, - }), + files: USER, + })) + } + items.extend([ + SettingsPageItem::SectionHeader("Tasks"), SettingsPageItem::SettingItem(SettingItem { - title: "Edit Debounce Ms", - description: "Whether or not to debounce inlay hints updates after buffer edits (set to 0 to disable debouncing)", + title: "Enabled", + description: "Whether tasks are enabled for this language.", field: Box::new(SettingField { + json_path: Some("languages.$(language).tasks.enabled"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(inlay_hints) = &language.inlay_hints { - &inlay_hints.edit_debounce_ms - } else { - &None - } + language.tasks.as_ref()?.enabled.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language - .inlay_hints - .get_or_insert_default() - .edit_debounce_ms + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.tasks.get_or_insert_default().enabled = value; + }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { - title: "Scroll Debounce Ms", - description: "Whether or not to debounce inlay hints updates after buffer scrolls (set to 0 to disable debouncing)", + title: "Variables", + description: "Extra task variables to set for a particular language.", + field: Box::new( + SettingField { + json_path: Some("languages.$(language).tasks.variables"), + pick: |settings_content| { + language_settings_field(settings_content, |language| { + language.tasks.as_ref()?.variables.as_ref() + }) + }, + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.tasks.get_or_insert_default().variables = value; + + }) + }, + } + .unimplemented(), + ), + metadata: None, + files: USER | PROJECT, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Prefer LSP", + description: "Use LSP tasks over Zed language extension tasks.", field: Box::new(SettingField { + json_path: Some("languages.$(language).tasks.prefer_lsp"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(inlay_hints) = &language.inlay_hints { - &inlay_hints.scroll_debounce_ms - } else { - &None - } + language.tasks.as_ref()?.prefer_lsp.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language - .inlay_hints - .get_or_insert_default() - .scroll_debounce_ms + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.tasks.get_or_insert_default().prefer_lsp = value; + }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), + SettingsPageItem::SectionHeader("Miscellaneous"), SettingsPageItem::SettingItem(SettingItem { - title: "Toggle On Modifiers Press", - description: "Toggles inlay hints (hides or shows) when the user presses the modifiers specified", + title: "Debuggers", + description: "Preferred debuggers for this language.", field: Box::new( SettingField { + json_path: Some("languages.$(language).debuggers"), pick: |settings_content| { - language_settings_field(settings_content, |language| { - if let Some(inlay_hints) = &language.inlay_hints { - &inlay_hints.toggle_on_modifiers_press - } else { - &None - } - }) + language_settings_field(settings_content, |language| language.debuggers.as_ref()) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language - .inlay_hints - .get_or_insert_default() - .toggle_on_modifiers_press + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.debuggers = value; + }) }, } .unimplemented(), ), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), - SettingsPageItem::SectionHeader("Tasks"), SettingsPageItem::SettingItem(SettingItem { - title: "Enabled", - description: "Whether tasks are enabled for this language", + title: "Middle Click Paste", + description: "Enable middle-click paste on Linux.", + field: Box::new(SettingField { + json_path: Some("languages.$(language).editor.middle_click_paste"), + pick: |settings_content| settings_content.editor.middle_click_paste.as_ref(), + write: |settings_content, value| {settings_content.editor.middle_click_paste = value;}, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Extend Comment On Newline", + description: "Whether to start a new line with a comment when a previous line is a comment as well.", field: Box::new(SettingField { + json_path: Some("languages.$(language).extend_comment_on_newline"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(tasks) = &language.tasks { - &tasks.enabled - } else { - &None - } + language.extend_comment_on_newline.as_ref() + }) + }, + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.extend_comment_on_newline = value; + + }) + }, + }), + metadata: None, + files: USER | PROJECT, + }), + ]); + + if current_language().is_none() { + items.extend([ + SettingsPageItem::SettingItem(SettingItem { + title: "Image Viewer", + description: "The unit for image file sizes.", + field: Box::new(SettingField { + json_path: Some("image_viewer.unit"), + pick: |settings_content| { + settings_content.image_viewer.as_ref().and_then(|image_viewer| image_viewer.unit.as_ref()) + }, + write: |settings_content, value| { + settings_content.image_viewer.get_or_insert_default().unit = value; + + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Auto Replace Emoji Shortcode", + description: "Whether to automatically replace emoji shortcodes with emoji characters.", + field: Box::new(SettingField { + json_path: Some("message_editor.auto_replace_emoji_shortcode"), + pick: |settings_content| { + settings_content.message_editor.as_ref().and_then(|message_editor| message_editor.auto_replace_emoji_shortcode.as_ref()) + }, + write: |settings_content, value| { + settings_content.message_editor.get_or_insert_default().auto_replace_emoji_shortcode = value; + + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Drop Size Target", + description: "Relative size of the drop target in the editor that will open dropped file as a split pane.", + field: Box::new(SettingField { + json_path: Some("drop_target_size"), + pick: |settings_content| { + settings_content.workspace.drop_target_size.as_ref() + }, + write: |settings_content, value| { + settings_content.workspace.drop_target_size = value; + + }, + }), + metadata: None, + files: USER, + }), + ]); + } + items +} + +/// LanguageSettings items that should be included in the "Languages & Tools" page +/// not the "Editor" page +fn non_editor_language_settings_data() -> Vec { + vec![ + SettingsPageItem::SectionHeader("LSP"), + SettingsPageItem::SettingItem(SettingItem { + title: "Enable Language Server", + description: "Whether to use language servers to provide code intelligence.", + field: Box::new(SettingField { + json_path: Some("languages.$(language).enable_language_server"), + pick: |settings_content| { + language_settings_field(settings_content, |language| { + language.enable_language_server.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.tasks.get_or_insert_default().enabled + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.enable_language_server = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { - title: "Variables", - description: "Extra task variables to set for a particular language", + title: "Language Servers", + description: "The list of language servers to use (or disable) for this language.", field: Box::new( SettingField { + json_path: Some("languages.$(language).language_servers"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(tasks) = &language.tasks { - &tasks.variables - } else { - &None - } + language.language_servers.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.tasks.get_or_insert_default().variables + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.language_servers = value; }) }, } .unimplemented(), ), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { - title: "Prefer Lsp", - description: "Use LSP tasks over Zed language extension ones", + title: "Linked Edits", + description: "Whether to perform linked edits of associated ranges, if the LS supports it. For example, when editing opening tag, the contents of the closing tag will be edited as well.", field: Box::new(SettingField { + json_path: Some("languages.$(language).linked_edits"), pick: |settings_content| { language_settings_field(settings_content, |language| { - if let Some(tasks) = &language.tasks { - &tasks.prefer_lsp - } else { - &None - } + language.linked_edits.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.tasks.get_or_insert_default().prefer_lsp + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.linked_edits = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, }), - SettingsPageItem::SectionHeader("Miscellaneous"), + SettingsPageItem::SettingItem(SettingItem { + title: "Go To Definition Fallback", + description: "Whether to follow-up empty Go to definition responses from the language server.", + field: Box::new(SettingField { + json_path: Some("go_to_definition_fallback"), + pick: |settings_content| settings_content.editor.go_to_definition_fallback.as_ref(), + write: |settings_content, value| { + settings_content.editor.go_to_definition_fallback = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SectionHeader("LSP Completions"), + SettingsPageItem::SettingItem(SettingItem { + title: "Enabled", + description: "Whether to fetch LSP completions or not.", + field: Box::new(SettingField { + json_path: Some("languages.$(language).completions.lsp"), + pick: |settings_content| { + language_settings_field(settings_content, |language| { + language.completions.as_ref()?.lsp.as_ref() + }) + }, + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.completions.get_or_insert_default().lsp = value; + }) + }, + }), + metadata: None, + files: USER | PROJECT, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Fetch Timeout (milliseconds)", + description: "When fetching LSP completions, determines how long to wait for a response of a particular server (set to 0 to wait indefinitely).", + field: Box::new(SettingField { + json_path: Some("languages.$(language).completions.lsp_fetch_timeout_ms"), + pick: |settings_content| { + language_settings_field(settings_content, |language| { + language.completions.as_ref()?.lsp_fetch_timeout_ms.as_ref() + }) + }, + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language + .completions + .get_or_insert_default() + .lsp_fetch_timeout_ms = value; + }) + }, + }), + metadata: None, + files: USER | PROJECT, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Insert Mode", + description: "Controls how LSP completions are inserted.", + field: Box::new(SettingField { + json_path: Some("languages.$(language).completions.lsp_insert_mode"), + pick: |settings_content| { + language_settings_field(settings_content, |language| { + language.completions.as_ref()?.lsp_insert_mode.as_ref() + }) + }, + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.completions.get_or_insert_default().lsp_insert_mode = value; + }) + }, + }), + metadata: None, + files: USER | PROJECT, + }), + SettingsPageItem::SectionHeader("Debuggers"), SettingsPageItem::SettingItem(SettingItem { title: "Debuggers", - description: "Preferred debuggers for this language", + description: "Preferred debuggers for this language.", field: Box::new( SettingField { + json_path: Some("languages.$(language).debuggers"), pick: |settings_content| { - language_settings_field(settings_content, |language| &language.debuggers) + language_settings_field(settings_content, |language| { + language.debuggers.as_ref() + }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.debuggers + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.debuggers = value; }) }, } .unimplemented(), ), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, + }), + SettingsPageItem::SectionHeader("Prettier"), + SettingsPageItem::SettingItem(SettingItem { + title: "Allowed", + description: "Enables or disables formatting with Prettier for a given language.", + field: Box::new(SettingField { + json_path: Some("languages.$(language).prettier.allowed"), + pick: |settings_content| { + language_settings_field(settings_content, |language| { + language.prettier.as_ref()?.allowed.as_ref() + }) + }, + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.prettier.get_or_insert_default().allowed = value; + }) + }, + }), + metadata: None, + files: USER | PROJECT, }), SettingsPageItem::SettingItem(SettingItem { - title: "Extend Comment On Newline", - description: "Whether to start a new line with a comment when a previous line is a comment as well", + title: "Parser", + description: "Forces Prettier integration to use a specific parser name when formatting files with the language.", field: Box::new(SettingField { + json_path: Some("languages.$(language).prettier.parser"), pick: |settings_content| { language_settings_field(settings_content, |language| { - &language.extend_comment_on_newline + language.prettier.as_ref()?.parser.as_ref() }) }, - pick_mut: |settings_content| { - language_settings_field_mut(settings_content, |language| { - &mut language.extend_comment_on_newline + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.prettier.get_or_insert_default().parser = value; }) }, }), metadata: None, - files: USER | LOCAL, + files: USER | PROJECT, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Plugins", + description: "Forces Prettier integration to use specific plugins when formatting files with the language.", + field: Box::new( + SettingField { + json_path: Some("languages.$(language).prettier.plugins"), + pick: |settings_content| { + language_settings_field(settings_content, |language| { + language.prettier.as_ref()?.plugins.as_ref() + }) + }, + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.prettier.get_or_insert_default().plugins = value; + }) + }, + } + .unimplemented(), + ), + metadata: None, + files: USER | PROJECT, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Options", + description: "Default Prettier options, in the format as in package.json section for Prettier.", + field: Box::new( + SettingField { + json_path: Some("languages.$(language).prettier"), + pick: |settings_content| { + language_settings_field(settings_content, |language| { + language.prettier.as_ref()?.options.as_ref() + }) + }, + write: |settings_content, value| { + language_settings_field_mut(settings_content, value, |language, value| { + language.prettier.get_or_insert_default().options = value; + }) + }, + } + .unimplemented(), + ), + metadata: None, + files: USER | PROJECT, }), ] } + +fn show_scrollbar_or_editor( + settings_content: &SettingsContent, + show: fn(&SettingsContent) -> Option<&settings::ShowScrollbar>, +) -> Option<&settings::ShowScrollbar> { + show(settings_content).or(settings_content + .editor + .scrollbar + .as_ref() + .and_then(|scrollbar| scrollbar.show.as_ref())) +} + +fn dynamic_variants() -> &'static [T::Discriminant] +where + T: strum::IntoDiscriminant, + T::Discriminant: strum::VariantArray, +{ + <::Discriminant as strum::VariantArray>::VARIANTS +} diff --git a/crates/settings_ui/src/settings_ui.rs b/crates/settings_ui/src/settings_ui.rs index 0ad5cb647e58f70242c593d5cd85fe18bc22a820..c43b8095a435eb40c25694deda2cf247d7992ca5 100644 --- a/crates/settings_ui/src/settings_ui.rs +++ b/crates/settings_ui/src/settings_ui.rs @@ -1,4 +1,3 @@ -//! # settings_ui mod components; mod page_data; @@ -7,19 +6,17 @@ use editor::{Editor, EditorEvent}; use feature_flags::FeatureFlag; use fuzzy::StringMatchCandidate; use gpui::{ - Action, App, Div, Entity, FocusHandle, Focusable, FontWeight, Global, ReadGlobal as _, - ScrollHandle, Stateful, Subscription, Task, TitlebarOptions, UniformListScrollHandle, Window, - WindowBounds, WindowHandle, WindowOptions, actions, div, point, prelude::*, px, size, - uniform_list, + Action, App, DEFAULT_ADDITIONAL_WINDOW_SIZE, Div, Entity, FocusHandle, Focusable, Global, + ListState, ReadGlobal as _, ScrollHandle, Stateful, Subscription, Task, TitlebarOptions, + UniformListScrollHandle, Window, WindowBounds, WindowHandle, WindowOptions, actions, div, list, + point, prelude::*, px, uniform_list, }; use heck::ToTitleCase as _; -use project::WorktreeId; +use project::{Project, WorktreeId}; +use release_channel::ReleaseChannel; use schemars::JsonSchema; use serde::Deserialize; -use settings::{ - BottomDockLayout, CloseWindowWhenNoItems, CodeFade, CursorShape, OnLastWindowClosed, - RestoreOnStartupBehavior, SaturatingBool, SettingsContent, SettingsStore, -}; +use settings::{Settings, SettingsContent, SettingsStore}; use std::{ any::{Any, TypeId, type_name}, cell::RefCell, @@ -31,16 +28,16 @@ use std::{ }; use title_bar::platform_title_bar::PlatformTitleBar; use ui::{ - ContextMenu, Divider, DividerColor, DropdownMenu, DropdownStyle, IconButtonShape, KeyBinding, - KeybindingHint, PopoverMenu, Switch, SwitchColor, Tooltip, TreeViewItem, WithScrollbar, - prelude::*, + Banner, ContextMenu, Divider, DividerColor, DropdownMenu, DropdownStyle, IconButtonShape, + KeyBinding, KeybindingHint, PopoverMenu, Switch, SwitchColor, Tooltip, TreeViewItem, + WithScrollbar, prelude::*, }; use ui_input::{NumberField, NumberFieldType}; use util::{ResultExt as _, paths::PathStyle, rel_path::RelPath}; -use workspace::{OpenOptions, OpenVisible, Workspace, client_side_decorations}; -use zed_actions::OpenSettings; +use workspace::{AppState, OpenOptions, OpenVisible, Workspace, client_side_decorations}; +use zed_actions::{OpenSettings, OpenSettingsAt}; -use crate::components::SettingsEditor; +use crate::components::{SettingsInputField, font_picker, icon_theme_picker, theme_picker}; const NAVBAR_CONTAINER_TAB_INDEX: isize = 0; const NAVBAR_GROUP_TAB_INDEX: isize = 1; @@ -75,7 +72,11 @@ actions!( /// Focuses the first navigation entry. FocusFirstNavEntry, /// Focuses the last navigation entry. - FocusLastNavEntry + FocusLastNavEntry, + /// Focuses and opens the next navigation entry without moving focus to content. + FocusNextNavEntry, + /// Focuses and opens the previous navigation entry without moving focus to content. + FocusPreviousNavEntry ] ); @@ -84,8 +85,27 @@ actions!( struct FocusFile(pub u32); struct SettingField { - pick: fn(&SettingsContent) -> &Option, - pick_mut: fn(&mut SettingsContent) -> &mut Option, + pick: fn(&SettingsContent) -> Option<&T>, + write: fn(&mut SettingsContent, Option), + + /// A json-path-like string that gives a unique-ish string that identifies + /// where in the JSON the setting is defined. + /// + /// The syntax is `jq`-like, but modified slightly to be URL-safe (and + /// without the leading dot), e.g. `foo.bar`. + /// + /// They are URL-safe (this is important since links are the main use-case + /// for these paths). + /// + /// There are a couple of special cases: + /// - discrimminants are represented with a trailing `$`, for example + /// `terminal.working_directory$`. This is to distinguish the discrimminant + /// setting (i.e. the setting that changes whether the value is a string or + /// an object) from the setting in the case that it is a string. + /// - language-specific settings begin `languages.$(language)`. Links + /// targeting these settings should take the form `languages/Rust/...`, for + /// example, but are not currently supported. + json_path: Option<&'static str>, } impl Clone for SettingField { @@ -98,17 +118,25 @@ impl Clone for SettingField { impl Copy for SettingField {} /// Helper for unimplemented settings, used in combination with `SettingField::unimplemented` -/// to keep the setting around in the UI with valid pick and pick_mut implementations, but don't actually try to render it. +/// to keep the setting around in the UI with valid pick and write implementations, but don't actually try to render it. /// TODO(settings_ui): In non-dev builds (`#[cfg(not(debug_assertions))]`) make this render as edit-in-json +#[derive(Clone, Copy)] struct UnimplementedSettingField; +impl PartialEq for UnimplementedSettingField { + fn eq(&self, _other: &Self) -> bool { + true + } +} + impl SettingField { /// Helper for settings with types that are not yet implemented. #[allow(unused)] fn unimplemented(self) -> SettingField { SettingField { - pick: |_| &Some(UnimplementedSettingField), - pick_mut: |_| unreachable!(), + pick: |_| Some(&UnimplementedSettingField), + write: |_, _| unreachable!(), + json_path: None, } } } @@ -119,9 +147,17 @@ trait AnySettingField { fn type_id(&self) -> TypeId; // Returns the file this value was set in and true, or File::Default and false to indicate it was not found in any file (missing default) fn file_set_in(&self, file: SettingsUiFile, cx: &App) -> (settings::SettingsFile, bool); + fn reset_to_default_fn( + &self, + current_file: &SettingsUiFile, + file_set_in: &settings::SettingsFile, + cx: &App, + ) -> Option>; + + fn json_path(&self) -> Option<&'static str>; } -impl AnySettingField for SettingField { +impl AnySettingField for SettingField { fn as_any(&self) -> &dyn Any { self } @@ -140,6 +176,54 @@ impl AnySettingField for SettingField { .get_value_from_file(file.to_settings(), self.pick); return (file, value.is_some()); } + + fn reset_to_default_fn( + &self, + current_file: &SettingsUiFile, + file_set_in: &settings::SettingsFile, + cx: &App, + ) -> Option> { + if file_set_in == &settings::SettingsFile::Default { + return None; + } + if file_set_in != ¤t_file.to_settings() { + return None; + } + let this = *self; + let store = SettingsStore::global(cx); + let default_value = (this.pick)(store.raw_default_settings()); + let is_default = store + .get_content_for_file(file_set_in.clone()) + .map_or(None, this.pick) + == default_value; + if is_default { + return None; + } + let current_file = current_file.clone(); + + return Some(Box::new(move |cx| { + let store = SettingsStore::global(cx); + let default_value = (this.pick)(store.raw_default_settings()); + let is_set_somewhere_other_than_default = store + .get_value_up_to_file(current_file.to_settings(), this.pick) + .0 + != settings::SettingsFile::Default; + let value_to_set = if is_set_somewhere_other_than_default { + default_value.cloned() + } else { + None + }; + update_settings_file(current_file.clone(), cx, move |settings, _| { + (this.write)(settings, value_to_set); + }) + // todo(settings_ui): Don't log err + .log_err(); + })); + } + + fn json_path(&self) -> Option<&'static str> { + self.json_path + } } #[derive(Default, Clone)] @@ -272,8 +356,10 @@ impl Focusable for NonFocusableHandle { } } +#[derive(Default)] struct SettingsFieldMetadata { placeholder: Option<&'static str>, + should_do_titlecase: Option, } pub struct SettingsUiFeatureFlag; @@ -285,13 +371,26 @@ impl FeatureFlag for SettingsUiFeatureFlag { pub fn init(cx: &mut App) { init_renderers(cx); + cx.observe_new(|workspace: &mut workspace::Workspace, _, _| { + workspace.register_action( + |workspace, OpenSettingsAt { path }: &OpenSettingsAt, window, cx| { + let window_handle = window + .window_handle() + .downcast::() + .expect("Workspaces are root Windows"); + open_settings_editor(workspace, Some(&path), window_handle, cx); + }, + ); + }) + .detach(); + cx.observe_new(|workspace: &mut workspace::Workspace, _, _| { workspace.register_action(|workspace, _: &OpenSettings, window, cx| { let window_handle = window .window_handle() .downcast::() .expect("Workspaces are root Windows"); - open_settings_editor(workspace, window_handle, cx); + open_settings_editor(workspace, None, window_handle, cx); }); }) .detach(); @@ -311,19 +410,14 @@ fn init_renderers(cx: &mut App) { }) .add_basic_renderer::(render_toggle_button) .add_basic_renderer::(render_text_field) - .add_basic_renderer::(render_toggle_button) - .add_basic_renderer::(render_dropdown) - .add_basic_renderer::(render_dropdown) - .add_basic_renderer::(render_dropdown) - .add_basic_renderer::(render_dropdown) - .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_text_field) + .add_basic_renderer::(render_toggle_button) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_font_picker) - // todo(settings_ui): This needs custom ui - // .add_renderer::(|settings_field, file, _, window, cx| { - // // todo(settings_ui): Do we want to expose the custom variant of buffer line height? - // // right now there's a manual impl of strum::VariantArray - // render_dropdown(*settings_field, file, window, cx) - // }) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) @@ -363,8 +457,11 @@ fn init_renderers(cx: &mut App) { .add_basic_renderer::(render_number_field) .add_basic_renderer::>(render_number_field) .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) + .add_basic_renderer::(render_number_field) + .add_basic_renderer::(render_number_field) + .add_basic_renderer::(render_number_field) + .add_basic_renderer::(render_number_field) + .add_basic_renderer::(render_number_field) .add_basic_renderer::(render_number_field) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) @@ -372,17 +469,88 @@ fn init_renderers(cx: &mut App) { .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) - .add_basic_renderer::(render_dropdown); - // .add_renderer::(|settings_field, file, _, window, cx| { - // render_dropdown(*settings_field, file, window, cx) - // }); + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_theme_picker) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_icon_theme_picker) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) + // please semicolon stay on next line + ; } pub fn open_settings_editor( _workspace: &mut Workspace, + path: Option<&str>, workspace_handle: WindowHandle, cx: &mut App, ) { + /// Assumes a settings GUI window is already open + fn open_path( + path: &str, + settings_window: &mut SettingsWindow, + window: &mut Window, + cx: &mut Context, + ) { + if path.starts_with("languages.$(language)") { + log::error!("language-specific settings links are not currently supported"); + return; + } + + settings_window.current_file = SettingsUiFile::User; + settings_window.build_ui(window, cx); + + let mut item_info = None; + 'search: for (nav_entry_index, entry) in settings_window.navbar_entries.iter().enumerate() { + if entry.is_root { + continue; + } + let page_index = entry.page_index; + let header_index = entry + .item_index + .expect("non-root entries should have an item index"); + for item_index in header_index + 1..settings_window.pages[page_index].items.len() { + let item = &settings_window.pages[page_index].items[item_index]; + if let SettingsPageItem::SectionHeader(_) = item { + break; + } + if let SettingsPageItem::SettingItem(item) = item { + if item.field.json_path() == Some(path) { + if !item.files.contains(USER) { + log::error!("Found item {}, but it is not a user setting", path); + return; + } + item_info = Some((item_index, nav_entry_index)); + break 'search; + } + } + } + } + let Some((item_index, navbar_entry_index)) = item_info else { + log::error!("Failed to find item for {}", path); + return; + }; + + settings_window.open_navbar_entry_page(navbar_entry_index); + window.focus(&settings_window.focus_handle_for_content_element(item_index, cx)); + settings_window.scroll_to_content_item(item_index, window, cx); + } + let existing_window = cx .windows() .into_iter() @@ -390,9 +558,12 @@ pub fn open_settings_editor( if let Some(existing_window) = existing_window { existing_window - .update(cx, |settings_window, window, _| { + .update(cx, |settings_window, window, cx| { settings_window.original_window = Some(workspace_handle); window.activate_window(); + if let Some(path) = path { + open_path(path, settings_window, window, cx); + } }) .ok(); return; @@ -400,11 +571,26 @@ pub fn open_settings_editor( // We have to defer this to get the workspace off the stack. + let path = path.map(ToOwned::to_owned); cx.defer(move |cx| { + let current_rem_size: f32 = theme::ThemeSettings::get_global(cx).ui_font_size(cx).into(); + + let default_bounds = DEFAULT_ADDITIONAL_WINDOW_SIZE; + let default_rem_size = 16.0; + let scale_factor = current_rem_size / default_rem_size; + let scaled_bounds: gpui::Size = default_bounds.map(|axis| axis * scale_factor); + + let app_id = ReleaseChannel::global(cx).app_id(); + let window_decorations = match std::env::var("ZED_WINDOW_DECORATIONS") { + Ok(val) if val == "server" => gpui::WindowDecorations::Server, + Ok(val) if val == "client" => gpui::WindowDecorations::Client, + _ => gpui::WindowDecorations::Client, + }; + cx.open_window( WindowOptions { titlebar: Some(TitlebarOptions { - title: Some("Settings Window".into()), + title: Some("Zed — Settings".into()), appears_transparent: true, traffic_light_position: Some(point(px(12.0), px(12.0))), }), @@ -413,11 +599,23 @@ pub fn open_settings_editor( is_movable: true, kind: gpui::WindowKind::Floating, window_background: cx.theme().window_background_appearance(), - window_min_size: Some(size(px(900.), px(750.))), // 4:3 Aspect Ratio - window_bounds: Some(WindowBounds::centered(size(px(900.), px(750.)), cx)), + app_id: Some(app_id.to_owned()), + window_decorations: Some(window_decorations), + window_min_size: Some(scaled_bounds), + window_bounds: Some(WindowBounds::centered(scaled_bounds, cx)), ..Default::default() }, - |window, cx| cx.new(|cx| SettingsWindow::new(Some(workspace_handle), window, cx)), + |window, cx| { + let settings_window = + cx.new(|cx| SettingsWindow::new(Some(workspace_handle), window, cx)); + settings_window.update(cx, |settings_window, cx| { + if let Some(path) = path { + open_path(&path, settings_window, window, cx); + } + }); + + settings_window + }, ) .log_err(); }); @@ -427,7 +625,7 @@ pub fn open_settings_editor( /// If this is empty the selected page is rendered, /// otherwise the last sub page gets rendered. /// -/// Global so that `pick` and `pick_mut` callbacks can access it +/// Global so that `pick` and `write` callbacks can access it /// and use it to dynamically render sub pages (e.g. for language settings) static SUB_PAGE_STACK: LazyLock>> = LazyLock::new(|| RwLock::new(Vec::new())); @@ -459,15 +657,17 @@ pub struct SettingsWindow { /// [page_index][page_item_index] will be false /// when the item is filtered out either by searches /// or by the current file + navbar_focus_subscriptions: Vec, filter_table: Vec>, has_query: bool, content_handles: Vec>>, - page_scroll_handle: ScrollHandle, + sub_page_scroll_handle: ScrollHandle, focus_handle: FocusHandle, navbar_focus_handle: Entity, content_focus_handle: Entity, files_focus_handle: FocusHandle, search_index: Option>, + list_state: ListState, } struct SearchIndex { @@ -507,6 +707,7 @@ enum SettingsPageItem { SectionHeader(&'static str), SettingItem(SettingItem), SubPageLink(SubPageLink), + DynamicItem(DynamicItem), } impl std::fmt::Debug for SettingsPageItem { @@ -519,6 +720,9 @@ impl std::fmt::Debug for SettingsPageItem { SettingsPageItem::SubPageLink(sub_page_link) => { write!(f, "SubPageLink({})", sub_page_link.title) } + SettingsPageItem::DynamicItem(dynamic_item) => { + write!(f, "DynamicItem({})", dynamic_item.discriminant.title) + } } } } @@ -527,29 +731,30 @@ impl SettingsPageItem { fn render( &self, settings_window: &SettingsWindow, - section_header: &'static str, + item_index: usize, is_last: bool, window: &mut Window, cx: &mut Context, ) -> AnyElement { let file = settings_window.current_file.clone(); - match self { - SettingsPageItem::SectionHeader(header) => v_flex() - .w_full() - .gap_1p5() - .child( - Label::new(SharedString::new_static(header)) - .size(LabelSize::Small) - .color(Color::Muted) - .buffer_font(cx), - ) - .child(Divider::horizontal().color(DividerColor::BorderFaded)) - .into_any_element(), - SettingsPageItem::SettingItem(setting_item) => { + + let border_variant = cx.theme().colors().border_variant; + let apply_padding = |element: Stateful
| -> Stateful
{ + let element = element.pt_4(); + if is_last { + element.pb_10() + } else { + element.pb_4().border_b_1().border_color(border_variant) + } + }; + + let mut render_setting_item_inner = + |setting_item: &SettingItem, padding: bool, cx: &mut Context| { let renderer = cx.default_global::().clone(); let (_, found) = setting_item.field.file_set_in(file.clone(), cx); let renderers = renderer.renderers.borrow(); + let field_renderer = renderers.get(&AnySettingField::type_id(setting_item.field.as_ref())); let field_renderer_or_warning = @@ -562,18 +767,20 @@ impl SettingsPageItem { }); let field = match field_renderer_or_warning { - Ok(field_renderer) => field_renderer( - settings_window, - setting_item, - file, - setting_item.metadata.as_deref(), - window, - cx, - ), + Ok(field_renderer) => window.with_id(item_index, |window| { + field_renderer( + settings_window, + setting_item, + file.clone(), + setting_item.metadata.as_deref(), + window, + cx, + ) + }), Err(warning) => render_settings_item( settings_window, setting_item, - file, + file.clone(), Button::new("error-warning", warning) .style(ButtonStyle::Outlined) .size(ButtonSize::Medium) @@ -588,54 +795,129 @@ impl SettingsPageItem { ), }; - field - .pt_4() - .map(|this| { - if is_last { - this.pb_10() - } else { - this.pb_4() - .border_b_1() - .border_color(cx.theme().colors().border_variant) - } - }) - .into_any_element() + let field = if padding { + field.map(apply_padding) + } else { + field + }; + + (field, field_renderer_or_warning.is_ok()) + }; + + match self { + SettingsPageItem::SectionHeader(header) => v_flex() + .w_full() + .gap_1p5() + .child( + Label::new(SharedString::new_static(header)) + .size(LabelSize::Small) + .color(Color::Muted) + .buffer_font(cx), + ) + .child(Divider::horizontal().color(DividerColor::BorderFaded)) + .into_any_element(), + SettingsPageItem::SettingItem(setting_item) => { + let (field_with_padding, _) = render_setting_item_inner(setting_item, true, cx); + field_with_padding.into_any_element() } SettingsPageItem::SubPageLink(sub_page_link) => h_flex() - .id(sub_page_link.title) + .id(sub_page_link.title.clone()) .w_full() .min_w_0() - .gap_2() .justify_between() - .pt_4() - .when(!is_last, |this| { - this.pb_4() - .border_b_1() - .border_color(cx.theme().colors().border_variant) - }) + .map(apply_padding) .child( v_flex() .w_full() .max_w_1_2() - .child(Label::new(SharedString::new_static(sub_page_link.title))), + .child(Label::new(sub_page_link.title.clone())), ) .child( - Button::new(("sub-page".into(), sub_page_link.title), "Configure") - .icon(IconName::ChevronRight) - .tab_index(0_isize) - .icon_position(IconPosition::End) - .icon_color(Color::Muted) - .icon_size(IconSize::Small) - .style(ButtonStyle::Outlined) - .size(ButtonSize::Medium), + Button::new( + ("sub-page".into(), sub_page_link.title.clone()), + "Configure", + ) + .icon(IconName::ChevronRight) + .tab_index(0_isize) + .icon_position(IconPosition::End) + .icon_color(Color::Muted) + .icon_size(IconSize::Small) + .style(ButtonStyle::OutlinedGhost) + .size(ButtonSize::Medium) + .on_click({ + let sub_page_link = sub_page_link.clone(); + cx.listener(move |this, _, _, cx| { + let mut section_index = item_index; + let current_page = this.current_page(); + + while !matches!( + current_page.items[section_index], + SettingsPageItem::SectionHeader(_) + ) { + section_index -= 1; + } + + let SettingsPageItem::SectionHeader(header) = + current_page.items[section_index] + else { + unreachable!("All items always have a section header above them") + }; + + this.push_sub_page(sub_page_link.clone(), header, cx) + }) + }), ) - .on_click({ - let sub_page_link = sub_page_link.clone(); - cx.listener(move |this, _, _, cx| { - this.push_sub_page(sub_page_link.clone(), section_header, cx) - }) - }) .into_any_element(), + SettingsPageItem::DynamicItem(DynamicItem { + discriminant: discriminant_setting_item, + pick_discriminant, + fields, + }) => { + let file = file.to_settings(); + let discriminant = SettingsStore::global(cx) + .get_value_from_file(file, *pick_discriminant) + .1; + + let (discriminant_element, rendered_ok) = + render_setting_item_inner(discriminant_setting_item, true, cx); + + let has_sub_fields = + rendered_ok && discriminant.map(|d| !fields[d].is_empty()).unwrap_or(false); + + let discriminant_element = if has_sub_fields { + discriminant_element.pb_4().border_b_0() + } else { + discriminant_element + }; + + let mut content = v_flex().id("dynamic-item").child(discriminant_element); + + if rendered_ok { + let discriminant = + discriminant.expect("This should be Some if rendered_ok is true"); + let sub_fields = &fields[discriminant]; + let sub_field_count = sub_fields.len(); + + for (index, field) in sub_fields.iter().enumerate() { + let is_last_sub_field = index == sub_field_count - 1; + let (raw_field, _) = render_setting_item_inner(field, false, cx); + + content = content.child( + raw_field + .p_4() + .border_x_1() + .border_t_1() + .when(is_last_sub_field, |this| this.border_b_1()) + .when(is_last_sub_field && is_last, |this| this.mb_8()) + .border_dashed() + .border_color(cx.theme().colors().border_variant) + .bg(cx.theme().colors().element_background.opacity(0.2)), + ); + } + } + + return content.into_any_element(); + } } } } @@ -649,12 +931,11 @@ fn render_settings_item( cx: &mut Context<'_, SettingsWindow>, ) -> Stateful
{ let (found_in_file, _) = setting_item.field.file_set_in(file.clone(), cx); - let file_set_in = SettingsUiFile::from_settings(found_in_file); + let file_set_in = SettingsUiFile::from_settings(found_in_file.clone()); h_flex() .id(setting_item.title) .min_w_0() - .gap_2() .justify_between() .child( v_flex() @@ -664,12 +945,30 @@ fn render_settings_item( .w_full() .gap_1() .child(Label::new(SharedString::new_static(setting_item.title))) + .when_some( + setting_item + .field + .reset_to_default_fn(&file, &found_in_file, cx), + |this, reset_to_default| { + this.child( + IconButton::new("reset-to-default-btn", IconName::Undo) + .icon_color(Color::Muted) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Reset to Default")) + .on_click({ + move |_, _, cx| { + reset_to_default(cx); + } + }), + ) + }, + ) .when_some( file_set_in.filter(|file_set_in| file_set_in != &file), |this, file_set_in| { this.child( Label::new(format!( - "— set in {}", + "— Modified in {}", settings_window .display_name(&file_set_in) .expect("File name should exist") @@ -697,6 +996,18 @@ struct SettingItem { files: FileMask, } +struct DynamicItem { + discriminant: SettingItem, + pick_discriminant: fn(&SettingsContent) -> Option, + fields: Vec>, +} + +impl PartialEq for DynamicItem { + fn eq(&self, other: &Self) -> bool { + self.discriminant == other.discriminant && self.fields == other.fields + } +} + #[derive(PartialEq, Eq, Clone, Copy)] struct FileMask(u8); @@ -708,7 +1019,7 @@ impl std::fmt::Debug for FileMask { if self.contains(USER) { items.push("USER"); } - if self.contains(LOCAL) { + if self.contains(PROJECT) { items.push("LOCAL"); } if self.contains(SERVER) { @@ -720,7 +1031,7 @@ impl std::fmt::Debug for FileMask { } const USER: FileMask = FileMask(1 << 0); -const LOCAL: FileMask = FileMask(1 << 2); +const PROJECT: FileMask = FileMask(1 << 2); const SERVER: FileMask = FileMask(1 << 3); impl std::ops::BitAnd for FileMask { @@ -759,7 +1070,7 @@ impl PartialEq for SettingItem { #[derive(Clone)] struct SubPageLink { - title: &'static str, + title: SharedString, files: FileMask, render: Arc< dyn Fn(&mut SettingsWindow, &mut Window, &mut Context) -> AnyElement @@ -775,6 +1086,20 @@ impl PartialEq for SubPageLink { } } +fn all_language_names(cx: &App) -> Vec { + workspace::AppState::global(cx) + .upgrade() + .map_or(vec![], |state| { + state + .languages + .language_names() + .into_iter() + .filter(|name| name.as_ref() != "Zed Keybind Context") + .map(Into::into) + .collect() + }) +} + #[allow(unused)] #[derive(Clone, PartialEq)] enum SettingsUiFile { @@ -816,14 +1141,14 @@ impl SettingsUiFile { fn mask(&self) -> FileMask { match self { SettingsUiFile::User => USER, - SettingsUiFile::Project(_) => LOCAL, + SettingsUiFile::Project(_) => PROJECT, SettingsUiFile::Server(_) => SERVER, } } } impl SettingsWindow { - pub fn new( + fn new( original_window: Option>, window: &mut Window, cx: &mut Context, @@ -860,17 +1185,77 @@ impl SettingsWindow { }) .detach(); + cx.on_window_closed(|cx| { + if let Some(existing_window) = cx + .windows() + .into_iter() + .find_map(|window| window.downcast::()) + && cx.windows().len() == 1 + { + cx.update_window(*existing_window, |_, window, _| { + window.remove_window(); + }) + .ok(); + } + }) + .detach(); + + if let Some(app_state) = AppState::global(cx).upgrade() { + for project in app_state + .workspace_store + .read(cx) + .workspaces() + .iter() + .filter_map(|space| { + space + .read(cx) + .ok() + .map(|workspace| workspace.project().clone()) + }) + .collect::>() + { + cx.subscribe_in(&project, window, Self::handle_project_event) + .detach(); + } + } else { + log::error!("App state doesn't exist when creating a new settings window"); + } + + let this_weak = cx.weak_entity(); + cx.observe_new::({ + move |_, window, cx| { + let project = cx.entity(); + let Some(window) = window else { + return; + }; + + this_weak + .update(cx, |_, cx| { + cx.subscribe_in(&project, window, Self::handle_project_event) + .detach(); + }) + .ok(); + } + }) + .detach(); + let title_bar = if !cfg!(target_os = "macos") { Some(cx.new(|cx| PlatformTitleBar::new("settings-title-bar", cx))) } else { None }; + // high overdraw value so the list scrollbar len doesn't change too much + let list_state = gpui::ListState::new(0, gpui::ListAlignment::Top, px(0.0)).measure_all(); + list_state.set_scroll_handler(|_, _, _| {}); + let mut this = Self { title_bar, original_window, + worktree_root_dirs: HashMap::default(), files: vec![], + current_file: current_file, pages: vec![], navbar_entries: vec![], @@ -881,7 +1266,7 @@ impl SettingsWindow { filter_table: vec![], has_query: false, content_handles: vec![], - page_scroll_handle: ScrollHandle::new(), + sub_page_scroll_handle: ScrollHandle::new(), focus_handle: cx.focus_handle(), navbar_focus_handle: NonFocusableHandle::new( NAVBAR_CONTAINER_TAB_INDEX, @@ -889,6 +1274,7 @@ impl SettingsWindow { window, cx, ), + navbar_focus_subscriptions: vec![], content_focus_handle: NonFocusableHandle::new( CONTENT_CONTAINER_TAB_INDEX, false, @@ -900,6 +1286,7 @@ impl SettingsWindow { .tab_index(HEADER_CONTAINER_TAB_INDEX) .tab_stop(false), search_index: None, + list_state, }; this.fetch_files(window, cx); @@ -913,6 +1300,23 @@ impl SettingsWindow { this } + fn handle_project_event( + &mut self, + _: &Entity, + event: &project::Event, + window: &mut Window, + cx: &mut Context, + ) { + match event { + project::Event::WorktreeRemoved(_) | project::Event::WorktreeAdded(_) => { + cx.defer_in(window, |this, window, cx| { + this.fetch_files(window, cx); + }); + } + _ => {} + } + } + fn toggle_navbar_entry(&mut self, nav_entry_index: usize) { // We can only toggle root entries if !self.navbar_entries[nav_entry_index].is_root { @@ -921,20 +1325,13 @@ impl SettingsWindow { let expanded = &mut self.navbar_entries[nav_entry_index].expanded; *expanded = !*expanded; - let expanded = *expanded; - - let toggle_page_index = self.page_index_from_navbar_index(nav_entry_index); - let selected_page_index = self.page_index_from_navbar_index(self.navbar_entry); - // if currently selected page is a child of the parent page we are folding, - // set the current page to the parent page - if !expanded && selected_page_index == toggle_page_index { - self.navbar_entry = nav_entry_index; - // note: not opening page. Toggling does not change content just selected page - } + self.navbar_entry = nav_entry_index; + self.reset_list_state(); } fn build_navbar(&mut self, cx: &App) { - let mut navbar_entries = Vec::with_capacity(self.navbar_entries.len()); + let mut navbar_entries = Vec::new(); + for (page_index, page) in self.pages.iter().enumerate() { navbar_entries.push(NavBarEntry { title: page.title, @@ -963,6 +1360,30 @@ impl SettingsWindow { self.navbar_entries = navbar_entries; } + fn setup_navbar_focus_subscriptions( + &mut self, + window: &mut Window, + cx: &mut Context, + ) { + let mut focus_subscriptions = Vec::new(); + + for entry_index in 0..self.navbar_entries.len() { + let focus_handle = self.navbar_entries[entry_index].focus_handle.clone(); + + let subscription = cx.on_focus( + &focus_handle, + window, + move |this: &mut SettingsWindow, + window: &mut Window, + cx: &mut Context| { + this.open_and_scroll_to_navbar_entry(entry_index, None, false, window, cx); + }, + ); + focus_subscriptions.push(subscription); + } + self.navbar_focus_subscriptions = focus_subscriptions; + } + fn visible_navbar_entries(&self) -> impl Iterator { let mut index = 0; let entries = &self.navbar_entries; @@ -1017,15 +1438,13 @@ impl SettingsWindow { header_index = index; any_found_since_last_header = false; } - SettingsPageItem::SettingItem(setting_item) => { - if !setting_item.files.contains(current_file) { - page_filter[index] = false; - } else { - any_found_since_last_header = true; - } - } - SettingsPageItem::SubPageLink(sub_page_link) => { - if !sub_page_link.files.contains(current_file) { + SettingsPageItem::SettingItem(SettingItem { files, .. }) + | SettingsPageItem::SubPageLink(SubPageLink { files, .. }) + | SettingsPageItem::DynamicItem(DynamicItem { + discriminant: SettingItem { files, .. }, + .. + }) => { + if !files.contains(current_file) { page_filter[index] = false; } else { any_found_since_last_header = true; @@ -1050,6 +1469,7 @@ impl SettingsWindow { } self.has_query = false; self.filter_matches_to_file(); + self.reset_list_state(); cx.notify(); return; } @@ -1079,6 +1499,7 @@ impl SettingsWindow { this.has_query = true; this.filter_matches_to_file(); this.open_first_nav_page(); + this.reset_list_state(); cx.notify(); } @@ -1193,7 +1614,10 @@ impl SettingsWindow { for (item_index, item) in page.items.iter().enumerate() { let key_index = key_lut.len(); match item { - SettingsPageItem::SettingItem(item) => { + SettingsPageItem::DynamicItem(DynamicItem { + discriminant: item, .. + }) + | SettingsPageItem::SettingItem(item) => { documents.push(bm25::Document { id: key_index, contents: [page.title, header_str, item.title, item.description] @@ -1214,12 +1638,13 @@ impl SettingsWindow { SettingsPageItem::SubPageLink(sub_page_link) => { documents.push(bm25::Document { id: key_index, - contents: [page.title, header_str, sub_page_link.title].join("\n"), + contents: [page.title, header_str, sub_page_link.title.as_ref()] + .join("\n"), }); push_candidates( &mut fuzzy_match_candidates, key_index, - sub_page_link.title, + sub_page_link.title.as_ref(), ); } } @@ -1254,15 +1679,29 @@ impl SettingsWindow { .collect::>(); } + fn reset_list_state(&mut self) { + // plus one for the title + let mut visible_items_count = self.visible_page_items().count(); + + if visible_items_count > 0 { + // show page title if page is non empty + visible_items_count += 1; + } + + self.list_state.reset(visible_items_count); + } + fn build_ui(&mut self, window: &mut Window, cx: &mut Context) { if self.pages.is_empty() { - self.pages = page_data::settings_data(); + self.pages = page_data::settings_data(cx); self.build_navbar(cx); + self.setup_navbar_focus_subscriptions(window, cx); self.build_content_handles(window, cx); } sub_page_stack_mut().clear(); // PERF: doesn't have to be rebuilt, can just be filled with true. pages is constant once it is built self.build_filter_table(); + self.reset_list_state(); self.update_matches(cx); cx.notify(); @@ -1311,7 +1750,41 @@ impl SettingsWindow { .unwrap_or_else(|| cx.focus_handle().tab_index(0).tab_stop(true)); ui_files.push((settings_ui_file, focus_handle)); } + ui_files.reverse(); + + let mut missing_worktrees = Vec::new(); + + for worktree in all_projects(cx) + .flat_map(|project| project.read(cx).worktrees(cx)) + .filter(|tree| !self.worktree_root_dirs.contains_key(&tree.read(cx).id())) + { + let worktree = worktree.read(cx); + let worktree_id = worktree.id(); + let Some(directory_name) = worktree.root_dir().and_then(|file| { + file.file_name() + .map(|os_string| os_string.to_string_lossy().to_string()) + }) else { + continue; + }; + + missing_worktrees.push((worktree_id, directory_name.clone())); + let path = RelPath::empty().to_owned().into_arc(); + + let settings_ui_file = SettingsUiFile::Project((worktree_id, path)); + + let focus_handle = prev_files + .iter() + .find_map(|(prev_file, handle)| { + (prev_file == &settings_ui_file).then(|| handle.clone()) + }) + .unwrap_or_else(|| cx.focus_handle().tab_index(0).tab_stop(true)); + + ui_files.push((settings_ui_file, focus_handle)); + } + + self.worktree_root_dirs.extend(missing_worktrees); + self.files = ui_files; let current_file_still_exists = self .files @@ -1326,7 +1799,17 @@ impl SettingsWindow { if !self.is_nav_entry_visible(navbar_entry) { self.open_first_nav_page(); } + + let is_new_page = self.navbar_entries[self.navbar_entry].page_index + != self.navbar_entries[navbar_entry].page_index; self.navbar_entry = navbar_entry; + + // We only need to reset visible items when updating matches + // and selecting a new page + if is_new_page { + self.reset_list_state(); + } + sub_page_stack_mut().clear(); } @@ -1344,6 +1827,7 @@ impl SettingsWindow { self.build_ui(window, cx); return; } + if self.files[ix].0 == self.current_file { return; } @@ -1355,7 +1839,7 @@ impl SettingsWindow { .visible_navbar_entries() .any(|(index, _)| index == self.navbar_entry) { - self.open_and_scroll_to_navbar_entry(self.navbar_entry, window, cx); + self.open_and_scroll_to_navbar_entry(self.navbar_entry, None, true, window, cx); } else { self.open_first_nav_page(); }; @@ -1363,45 +1847,123 @@ impl SettingsWindow { fn render_files_header( &self, - _window: &mut Window, + window: &mut Window, cx: &mut Context, ) -> impl IntoElement { + static OVERFLOW_LIMIT: usize = 1; + + let file_button = + |ix, file: &SettingsUiFile, focus_handle, cx: &mut Context| { + Button::new( + ix, + self.display_name(&file) + .expect("Files should always have a name"), + ) + .toggle_state(file == &self.current_file) + .selected_style(ButtonStyle::Tinted(ui::TintColor::Accent)) + .track_focus(focus_handle) + .on_click(cx.listener({ + let focus_handle = focus_handle.clone(); + move |this, _: &gpui::ClickEvent, window, cx| { + this.change_file(ix, window, cx); + focus_handle.focus(window); + } + })) + }; + + let this = cx.entity(); + h_flex() .w_full() .pb_4() .gap_1() .justify_between() - .tab_group() .track_focus(&self.files_focus_handle) + .tab_group() .tab_index(HEADER_GROUP_TAB_INDEX) .child( h_flex() - .id("file_buttons_container") - .w_64() // Temporary fix until long-term solution is a fixed set of buttons representing a file location (User, Project, and Remote) .gap_1() - .overflow_x_scroll() .children( - self.files + self.files.iter().enumerate().take(OVERFLOW_LIMIT).map( + |(ix, (file, focus_handle))| file_button(ix, file, focus_handle, cx), + ), + ) + .when(self.files.len() > OVERFLOW_LIMIT, |div| { + let selected_file_ix = self + .files .iter() .enumerate() - .map(|(ix, (file, focus_handle))| { - Button::new( - ix, - self.display_name(&file) - .expect("Files should always have a name"), + .skip(OVERFLOW_LIMIT) + .find_map(|(ix, (file, _))| { + if file == &self.current_file { + Some(ix) + } else { + None + } + }) + .unwrap_or(OVERFLOW_LIMIT); + + let (file, focus_handle) = &self.files[selected_file_ix]; + + div.child(file_button(selected_file_ix, file, focus_handle, cx)) + .when(self.files.len() > OVERFLOW_LIMIT + 1, |div| { + div.child( + DropdownMenu::new( + "more-files", + format!("+{}", self.files.len() - (OVERFLOW_LIMIT + 1)), + ContextMenu::build(window, cx, move |mut menu, _, _| { + for (mut ix, (file, focus_handle)) in self + .files + .iter() + .enumerate() + .skip(OVERFLOW_LIMIT + 1) + { + let (display_name, focus_handle) = + if selected_file_ix == ix { + ix = OVERFLOW_LIMIT; + ( + self.display_name(&self.files[ix].0), + self.files[ix].1.clone(), + ) + } else { + ( + self.display_name(&file), + focus_handle.clone(), + ) + }; + + menu = menu.entry( + display_name + .expect("Files should always have a name"), + None, + { + let this = this.clone(); + move |window, cx| { + this.update(cx, |this, cx| { + this.change_file(ix, window, cx); + }); + focus_handle.focus(window); + } + }, + ); + } + + menu + }), + ) + .style(DropdownStyle::Subtle) + .trigger_tooltip(Tooltip::text("View Other Projects")) + .trigger_icon(IconName::ChevronDown) + .attach(gpui::Corner::BottomLeft) + .offset(gpui::Point { + x: px(0.0), + y: px(2.0), + }) + .tab_index(0), ) - .toggle_state(file == &self.current_file) - .selected_style(ButtonStyle::Tinted(ui::TintColor::Accent)) - .track_focus(focus_handle) - .on_click(cx.listener({ - let focus_handle = focus_handle.clone(); - move |this, _: &gpui::ClickEvent, window, cx| { - this.change_file(ix, window, cx); - focus_handle.focus(window); - } - })) - }), - ), + }) + }), ) .child( Button::new("edit-in-json", "Edit in settings.json") @@ -1485,6 +2047,9 @@ impl SettingsWindow { .read(cx) .handle .contains_focused(window, cx) + || self + .visible_navbar_entries() + .any(|(_, entry)| entry.focus_handle.is_focused(window)) { "Focus Content" } else { @@ -1492,12 +2057,6 @@ impl SettingsWindow { }; v_flex() - .w_64() - .p_2p5() - .when(cfg!(target_os = "macos"), |c| c.pt_10()) - .h_full() - .flex_none() - .border_r_1() .key_context("NavigationMenu") .on_action(cx.listener(|this, _: &CollapseNavEntry, window, cx| { let Some(focused_entry) = this.focused_nav_entry(window, cx) else { @@ -1571,6 +2130,56 @@ impl SettingsWindow { this.focus_and_scroll_to_nav_entry(last_entry_index, window, cx); } })) + .on_action(cx.listener(|this, _: &FocusNextNavEntry, window, cx| { + let entry_index = this + .focused_nav_entry(window, cx) + .unwrap_or(this.navbar_entry); + let mut next_index = None; + for (index, _) in this.visible_navbar_entries() { + if index > entry_index { + next_index = Some(index); + break; + } + } + let Some(next_entry_index) = next_index else { + return; + }; + this.open_and_scroll_to_navbar_entry( + next_entry_index, + Some(gpui::ScrollStrategy::Bottom), + false, + window, + cx, + ); + })) + .on_action(cx.listener(|this, _: &FocusPreviousNavEntry, window, cx| { + let entry_index = this + .focused_nav_entry(window, cx) + .unwrap_or(this.navbar_entry); + let mut prev_index = None; + for (index, _) in this.visible_navbar_entries() { + if index >= entry_index { + break; + } + prev_index = Some(index); + } + let Some(prev_entry_index) = prev_index else { + return; + }; + this.open_and_scroll_to_navbar_entry( + prev_entry_index, + Some(gpui::ScrollStrategy::Top), + false, + window, + cx, + ); + })) + .w_56() + .h_full() + .p_2p5() + .when(cfg!(target_os = "macos"), |this| this.pt_10()) + .flex_none() + .border_r_1() .border_color(cx.theme().colors().border) .bg(cx.theme().colors().panel_background) .child(self.render_search(window, cx)) @@ -1589,21 +2198,22 @@ impl SettingsWindow { this.visible_navbar_entries() .skip(range.start.saturating_sub(1)) .take(range.len()) - .map(|(ix, entry)| { + .map(|(entry_index, entry)| { TreeViewItem::new( - ("settings-ui-navbar-entry", ix), + ("settings-ui-navbar-entry", entry_index), entry.title, ) .track_focus(&entry.focus_handle) .root_item(entry.is_root) - .toggle_state(this.is_navbar_entry_selected(ix)) + .toggle_state(this.is_navbar_entry_selected(entry_index)) .when(entry.is_root, |item| { item.expanded(entry.expanded || this.has_query) .on_toggle(cx.listener( move |this, _, window, cx| { - this.toggle_navbar_entry(ix); + this.toggle_navbar_entry(entry_index); window.focus( - &this.navbar_entries[ix].focus_handle, + &this.navbar_entries[entry_index] + .focus_handle, ); cx.notify(); }, @@ -1612,7 +2222,11 @@ impl SettingsWindow { .on_click( cx.listener(move |this, _, window, cx| { this.open_and_scroll_to_navbar_entry( - ix, window, cx, + entry_index, + None, + true, + window, + cx, ); }), ) @@ -1634,14 +2248,16 @@ impl SettingsWindow { .flex_shrink_0() .border_t_1() .border_color(cx.theme().colors().border_variant) - .children( - KeyBinding::for_action(&ToggleFocusNav, window, cx).map(|this| { - KeybindingHint::new( - this, - cx.theme().colors().surface_background.opacity(0.5), - ) - .suffix(focus_keybind_label) - }), + .child( + KeybindingHint::new( + KeyBinding::for_action_in( + &ToggleFocusNav, + &self.navbar_focus_handle.focus_handle(cx), + cx, + ), + cx.theme().colors().surface_background.opacity(0.5), + ) + .suffix(focus_keybind_label), ), ) } @@ -1649,34 +2265,60 @@ impl SettingsWindow { fn open_and_scroll_to_navbar_entry( &mut self, navbar_entry_index: usize, + scroll_strategy: Option, + focus_content: bool, window: &mut Window, cx: &mut Context, ) { self.open_navbar_entry_page(navbar_entry_index); cx.notify(); + let mut handle_to_focus = None; + if self.navbar_entries[navbar_entry_index].is_root || !self.is_nav_entry_visible(navbar_entry_index) { - let Some(first_item_index) = self.visible_page_items().next().map(|(index, _)| index) - else { - return; - }; - self.focus_content_element(first_item_index, window, cx); - self.page_scroll_handle.set_offset(point(px(0.), px(0.))); + self.sub_page_scroll_handle + .set_offset(point(px(0.), px(0.))); + if focus_content { + let Some(first_item_index) = + self.visible_page_items().next().map(|(index, _)| index) + else { + return; + }; + handle_to_focus = Some(self.focus_handle_for_content_element(first_item_index, cx)); + } else if !self.is_nav_entry_visible(navbar_entry_index) { + let Some(first_visible_nav_entry_index) = + self.visible_navbar_entries().next().map(|(index, _)| index) + else { + return; + }; + self.focus_and_scroll_to_nav_entry(first_visible_nav_entry_index, window, cx); + } else { + handle_to_focus = + Some(self.navbar_entries[navbar_entry_index].focus_handle.clone()); + } } else { let entry_item_index = self.navbar_entries[navbar_entry_index] .item_index .expect("Non-root items should have an item index"); - let Some(selected_item_index) = self - .visible_page_items() - .position(|(index, _)| index == entry_item_index) - else { - return; - }; - self.page_scroll_handle - .scroll_to_top_of_item(selected_item_index); - self.focus_content_element(entry_item_index, window, cx); + self.scroll_to_content_item(entry_item_index, window, cx); + if focus_content { + handle_to_focus = Some(self.focus_handle_for_content_element(entry_item_index, cx)); + } else { + handle_to_focus = + Some(self.navbar_entries[navbar_entry_index].focus_handle.clone()); + } + } + + if let Some(scroll_strategy) = scroll_strategy + && let Some(logical_entry_index) = self + .visible_navbar_entries() + .into_iter() + .position(|(index, _)| index == navbar_entry_index) + { + self.navbar_scroll_handle + .scroll_to_item(logical_entry_index + 1, scroll_strategy); } // Page scroll handle updates the active item index @@ -1684,7 +2326,11 @@ impl SettingsWindow { // The call after that updates the offset of the scroll handle. So to // ensure the scroll handle doesn't lag behind we need to render three frames // back to back. - cx.on_next_frame(window, |_, window, cx| { + cx.on_next_frame(window, move |_, window, cx| { + if let Some(handle) = handle_to_focus.as_ref() { + window.focus(handle); + } + cx.on_next_frame(window, |_, _, cx| { cx.notify(); }); @@ -1693,11 +2339,48 @@ impl SettingsWindow { cx.notify(); } + fn scroll_to_content_item( + &self, + content_item_index: usize, + _window: &mut Window, + cx: &mut Context, + ) { + let index = self + .visible_page_items() + .position(|(index, _)| index == content_item_index) + .unwrap_or(0); + if index == 0 { + self.sub_page_scroll_handle + .set_offset(point(px(0.), px(0.))); + self.list_state.scroll_to(gpui::ListOffset { + item_ix: 0, + offset_in_item: px(0.), + }); + return; + } + self.list_state.scroll_to(gpui::ListOffset { + item_ix: index + 1, + offset_in_item: px(0.), + }); + cx.notify(); + } + fn is_nav_entry_visible(&self, nav_entry_index: usize) -> bool { self.visible_navbar_entries() .any(|(index, _)| index == nav_entry_index) } + fn focus_and_scroll_to_first_visible_nav_entry( + &self, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(nav_entry_index) = self.visible_navbar_entries().next().map(|(index, _)| index) + { + self.focus_and_scroll_to_nav_entry(nav_entry_index, window, cx); + } + } + fn focus_and_scroll_to_nav_entry( &self, nav_entry_index: usize, @@ -1730,11 +2413,11 @@ impl SettingsWindow { fn render_sub_page_breadcrumbs(&self) -> impl IntoElement { let mut items = vec![]; - items.push(self.current_page().title); + items.push(self.current_page().title.into()); items.extend( sub_page_stack() .iter() - .flat_map(|page| [page.section_header, page.link.title]), + .flat_map(|page| [page.section_header.into(), page.link.title.clone()]), ); let last = items.pop().unwrap(); @@ -1743,13 +2426,108 @@ impl SettingsWindow { .children( items .into_iter() - .flat_map(|item| [item, "/"]) + .flat_map(|item| [item, "/".into()]) .map(|item| Label::new(item).color(Color::Muted)), ) .child(Label::new(last)) } - fn render_page_items<'a, Items: Iterator>( + fn render_empty_state(&self, search_query: SharedString) -> impl IntoElement { + v_flex() + .size_full() + .items_center() + .justify_center() + .gap_1() + .child(Label::new("No Results")) + .child( + Label::new(search_query) + .size(LabelSize::Small) + .color(Color::Muted), + ) + } + + fn render_page_items( + &mut self, + page_index: usize, + _window: &mut Window, + cx: &mut Context, + ) -> impl IntoElement { + let mut page_content = v_flex().id("settings-ui-page").size_full(); + + let has_active_search = !self.search_bar.read(cx).is_empty(cx); + let has_no_results = self.visible_page_items().next().is_none() && has_active_search; + + if has_no_results { + let search_query = self.search_bar.read(cx).text(cx); + page_content = page_content.child( + self.render_empty_state(format!("No settings match \"{}\"", search_query).into()), + ) + } else { + let last_non_header_index = self + .visible_page_items() + .filter_map(|(index, item)| { + (!matches!(item, SettingsPageItem::SectionHeader(_))).then_some(index) + }) + .last(); + + let root_nav_label = self + .navbar_entries + .iter() + .find(|entry| entry.is_root && entry.page_index == self.current_page_index()) + .map(|entry| entry.title); + + let list_content = list( + self.list_state.clone(), + cx.processor(move |this, index, window, cx| { + if index == 0 { + return div() + .when(sub_page_stack().is_empty(), |this| { + this.when_some(root_nav_label, |this, title| { + this.child( + Label::new(title).size(LabelSize::Large).mt_2().mb_3(), + ) + }) + }) + .into_any_element(); + } + + let mut visible_items = this.visible_page_items(); + let Some((actual_item_index, item)) = visible_items.nth(index - 1) else { + return gpui::Empty.into_any_element(); + }; + + let no_bottom_border = visible_items + .next() + .map(|(_, item)| matches!(item, SettingsPageItem::SectionHeader(_))) + .unwrap_or(false); + + let is_last = Some(actual_item_index) == last_non_header_index; + + let item_focus_handle = + this.content_handles[page_index][actual_item_index].focus_handle(cx); + + v_flex() + .id(("settings-page-item", actual_item_index)) + .w_full() + .min_w_0() + .track_focus(&item_focus_handle) + .child(item.render( + this, + actual_item_index, + no_bottom_border || is_last, + window, + cx, + )) + .into_any_element() + }), + ); + + page_content = page_content.child(list_content.size_full()) + } + page_content + } + + fn render_sub_page_items<'a, Items: Iterator>( &self, items: Items, page_index: Option, @@ -1760,7 +2538,7 @@ impl SettingsWindow { .id("settings-ui-page") .size_full() .overflow_y_scroll() - .track_scroll(&self.page_scroll_handle); + .track_scroll(&self.sub_page_scroll_handle); let items: Vec<_> = items.collect(); let items_len = items.len(); @@ -1772,18 +2550,7 @@ impl SettingsWindow { if has_no_results { let search_query = self.search_bar.read(cx).text(cx); page_content = page_content.child( - v_flex() - .size_full() - .items_center() - .justify_center() - .gap_1() - .child(div().child("No Results")) - .child( - div() - .text_sm() - .text_color(cx.theme().colors().text_muted) - .child(format!("No settings match \"{}\"", search_query)), - ), + self.render_empty_state(format!("No settings match \"{}\"", search_query).into()), ) } else { let last_non_header_index = items @@ -1830,7 +2597,7 @@ impl SettingsWindow { }) .child(item.render( self, - section_header.expect("All items rendered after a section header"), + actual_item_index, no_bottom_border || is_last, window, cx, @@ -1853,12 +2620,7 @@ impl SettingsWindow { page_header = self.render_files_header(window, cx).into_any_element(); page_content = self - .render_page_items( - self.visible_page_items(), - Some(self.current_page_index()), - window, - cx, - ) + .render_page_items(self.current_page_index(), window, cx) .into_any_element(); } else { page_header = h_flex() @@ -1880,15 +2642,112 @@ impl SettingsWindow { page_content = (active_page_render_fn)(self, window, cx); } + let mut warning_banner = gpui::Empty.into_any_element(); + if let Some(error) = + SettingsStore::global(cx).error_for_file(self.current_file.to_settings()) + { + warning_banner = v_flex() + .pb_4() + .child( + Banner::new() + .severity(Severity::Warning) + .child( + v_flex() + .my_0p5() + .gap_0p5() + .child(Label::new("Your settings file is in an invalid state.")) + .child( + Label::new(error).size(LabelSize::Small).color(Color::Muted), + ), + ) + .action_slot( + div().pr_1().child( + Button::new("fix-in-json", "Fix in settings.json") + .tab_index(0_isize) + .style(ButtonStyle::Tinted(ui::TintColor::Warning)) + .on_click(cx.listener(|this, _, _, cx| { + this.open_current_settings_file(cx); + })), + ), + ), + ) + .into_any_element() + } + return v_flex() - .size_full() + .id("Settings-ui-page") + .on_action(cx.listener(|this, _: &menu::SelectNext, window, cx| { + if !sub_page_stack().is_empty() { + window.focus_next(); + return; + } + for (logical_index, (actual_index, _)) in this.visible_page_items().enumerate() { + let handle = this.content_handles[this.current_page_index()][actual_index] + .focus_handle(cx); + let mut offset = 1; // for page header + + if let Some((_, next_item)) = this.visible_page_items().nth(logical_index + 1) + && matches!(next_item, SettingsPageItem::SectionHeader(_)) + { + offset += 1; + } + if handle.contains_focused(window, cx) { + let next_logical_index = logical_index + offset + 1; + this.list_state.scroll_to_reveal_item(next_logical_index); + // We need to render the next item to ensure it's focus handle is in the element tree + cx.on_next_frame(window, |_, window, cx| { + window.focus_next(); + cx.notify(); + }); + cx.notify(); + return; + } + } + window.focus_next(); + })) + .on_action(cx.listener(|this, _: &menu::SelectPrevious, window, cx| { + if !sub_page_stack().is_empty() { + window.focus_prev(); + return; + } + let mut prev_was_header = false; + for (logical_index, (actual_index, item)) in this.visible_page_items().enumerate() { + let is_header = matches!(item, SettingsPageItem::SectionHeader(_)); + let handle = this.content_handles[this.current_page_index()][actual_index] + .focus_handle(cx); + let mut offset = 1; // for page header + + if prev_was_header { + offset -= 1; + } + if handle.contains_focused(window, cx) { + let next_logical_index = logical_index + offset - 1; + this.list_state.scroll_to_reveal_item(next_logical_index); + // We need to render the next item to ensure it's focus handle is in the element tree + cx.on_next_frame(window, |_, window, cx| { + window.focus_prev(); + cx.notify(); + }); + cx.notify(); + return; + } + prev_was_header = is_header; + } + window.focus_prev(); + })) + .when(sub_page_stack().is_empty(), |this| { + this.vertical_scrollbar_for(self.list_state.clone(), window, cx) + }) + .when(!sub_page_stack().is_empty(), |this| { + this.vertical_scrollbar_for(self.sub_page_scroll_handle.clone(), window, cx) + }) + .track_focus(&self.content_focus_handle.focus_handle(cx)) + .flex_1() .pt_6() - .pb_8() .px_8() .bg(cx.theme().colors().editor_background) + .child(warning_banner) .child(page_header) - .vertical_scrollbar_for(self.page_scroll_handle.clone(), window, cx) - .track_focus(&self.content_focus_handle.focus_handle(cx)) .child( div() .size_full() @@ -1898,6 +2757,9 @@ impl SettingsWindow { ); } + /// This function will create a new settings file if one doesn't exist + /// if the current file is a project settings with a valid worktree id + /// We do this because the settings ui allows initializing project settings fn open_current_settings_file(&mut self, cx: &mut Context) { match &self.current_file { SettingsUiFile::User => { @@ -1942,58 +2804,83 @@ impl SettingsWindow { .ok(); } SettingsUiFile::Project((worktree_id, path)) => { - let mut corresponding_workspace: Option> = None; let settings_path = path.join(paths::local_settings_file_relative_path()); let Some(app_state) = workspace::AppState::global(cx).upgrade() else { return; }; - for workspace in app_state.workspace_store.read(cx).workspaces() { - let contains_settings_file = workspace - .read_with(cx, |workspace, cx| { - workspace.project().read(cx).contains_local_settings_file( - *worktree_id, - settings_path.as_ref(), - cx, - ) - }) - .ok(); - if Some(true) == contains_settings_file { - corresponding_workspace = Some(*workspace); - break; - } - } - - let Some(corresponding_workspace) = corresponding_workspace else { + let Some((worktree, corresponding_workspace)) = app_state + .workspace_store + .read(cx) + .workspaces() + .iter() + .find_map(|workspace| { + workspace + .read_with(cx, |workspace, cx| { + workspace + .project() + .read(cx) + .worktree_for_id(*worktree_id, cx) + }) + .ok() + .flatten() + .zip(Some(*workspace)) + }) + else { log::error!( - "No corresponding workspace found for settings file {}", - settings_path.as_std_path().display() + "No corresponding workspace contains worktree id: {}", + worktree_id ); return; }; + let create_task = if worktree.read(cx).entry_for_path(&settings_path).is_some() { + None + } else { + Some(worktree.update(cx, |tree, cx| { + tree.create_entry( + settings_path.clone(), + false, + Some("{\n\n}".as_bytes().to_vec()), + cx, + ) + })) + }; + + let worktree_id = *worktree_id; + // TODO: move zed::open_local_file() APIs to this crate, and // re-implement the "initial_contents" behavior corresponding_workspace - .update(cx, |workspace, window, cx| { - let open_task = workspace.open_path( - (*worktree_id, settings_path.clone()), - None, - true, - window, - cx, - ); - + .update(cx, |_, window, cx| { cx.spawn_in(window, async move |workspace, cx| { - if open_task.await.log_err().is_some() { - workspace - .update_in(cx, |_, window, cx| { - window.activate_window(); - cx.notify(); - }) - .ok(); - } + if let Some(create_task) = create_task { + create_task.await.ok()?; + }; + + workspace + .update_in(cx, |workspace, window, cx| { + workspace.open_path( + (worktree_id, settings_path.clone()), + None, + true, + window, + cx, + ) + }) + .ok()? + .await + .log_err()?; + + workspace + .update_in(cx, |_, window, cx| { + window.activate_window(); + cx.notify(); + }) + .ok(); + + Some(()) }) .detach(); }) @@ -2068,12 +2955,13 @@ impl SettingsWindow { 0 } - fn focus_content_element(&self, item_index: usize, window: &mut Window, cx: &mut App) { - if !sub_page_stack().is_empty() { - return; - } + fn focus_handle_for_content_element( + &self, + actual_item_index: usize, + cx: &Context, + ) -> FocusHandle { let page_index = self.current_page_index(); - window.focus(&self.content_handles[page_index][item_index].focus_handle(cx)); + self.content_handles[page_index][actual_item_index].focus_handle(cx) } fn focused_nav_entry(&self, window: &Window, cx: &App) -> Option { @@ -2132,7 +3020,13 @@ impl Render for SettingsWindow { .focus_handle(cx) .contains_focused(window, cx) { - this.open_and_scroll_to_navbar_entry(this.navbar_entry, window, cx); + this.open_and_scroll_to_navbar_entry( + this.navbar_entry, + None, + true, + window, + cx, + ); } else { this.focus_and_scroll_to_nav_entry(this.navbar_entry, window, cx); } @@ -2153,9 +3047,17 @@ impl Render for SettingsWindow { let prev_index = this.focused_file_index(window, cx).saturating_sub(1); this.focus_file_at_index(prev_index, window); })) - .on_action(|_: &menu::SelectNext, window, _| { - window.focus_next(); - }) + .on_action(cx.listener(|this, _: &menu::SelectNext, window, cx| { + if this + .search_bar + .focus_handle(cx) + .contains_focused(window, cx) + { + this.focus_and_scroll_to_first_visible_nav_entry(window, cx); + } else { + window.focus_next(); + } + })) .on_action(|_: &menu::SelectPrevious, window, _| { window.focus_prev(); }) @@ -2166,6 +3068,9 @@ impl Render for SettingsWindow { .font(ui_font) .bg(cx.theme().colors().background) .text_color(cx.theme().colors().text) + .when(!cfg!(target_os = "macos"), |this| { + this.border_t_1().border_color(cx.theme().colors().border) + }) .child(self.render_nav(window, cx)) .child(self.render_page(window, cx)), ), @@ -2198,20 +3103,40 @@ fn update_settings_file( match file { SettingsUiFile::Project((worktree_id, rel_path)) => { let rel_path = rel_path.join(paths::local_settings_file_relative_path()); - let project = all_projects(cx).find(|project| { - project.read_with(cx, |project, cx| { - project.contains_local_settings_file(worktree_id, &rel_path, cx) - }) - }); - let Some(project) = project else { - anyhow::bail!( - "Could not find worktree containing settings file: {}", - &rel_path.display(PathStyle::local()) - ); + let Some((worktree, project)) = all_projects(cx).find_map(|project| { + project + .read(cx) + .worktree_for_id(worktree_id, cx) + .zip(Some(project)) + }) else { + anyhow::bail!("Could not find project with worktree id: {}", worktree_id); }; + project.update(cx, |project, cx| { - project.update_local_settings_file(worktree_id, rel_path, cx, update); + let task = if project.contains_local_settings_file(worktree_id, &rel_path, cx) { + None + } else { + Some(worktree.update(cx, |worktree, cx| { + worktree.create_entry(rel_path.clone(), false, None, cx) + })) + }; + + cx.spawn(async move |project, cx| { + if let Some(task) = task + && task.await.is_err() + { + return; + }; + + project + .update(cx, |project, cx| { + project.update_local_settings_file(worktree_id, rel_path, cx, update); + }) + .ok(); + }) + .detach(); }); + return Ok(()); } SettingsUiFile::User => { @@ -2234,7 +3159,7 @@ fn render_text_field + Into + AsRef + Clone>( SettingsStore::global(cx).get_value_from_file(file.to_settings(), field.pick); let initial_text = initial_text.filter(|s| !s.as_ref().is_empty()); - SettingsEditor::new() + SettingsInputField::new() .tab_index(0) .when_some(initial_text, |editor, text| { editor.with_initial_text(text.as_ref().to_string()) @@ -2246,7 +3171,7 @@ fn render_text_field + Into + AsRef + Clone>( .on_confirm({ move |new_text, cx| { update_settings_file(file.clone(), cx, move |settings, _cx| { - *(field.pick_mut)(settings) = new_text.map(Into::into); + (field.write)(settings, new_text.map(Into::into)); }) .log_err(); // todo(settings_ui) don't log err } @@ -2270,66 +3195,17 @@ fn render_toggle_button + From + Copy>( }; Switch::new("toggle_button", toggle_state) - .color(ui::SwitchColor::Accent) + .tab_index(0_isize) + .color(SwitchColor::Accent) .on_click({ move |state, _window, cx| { let state = *state == ui::ToggleState::Selected; update_settings_file(file.clone(), cx, move |settings, _cx| { - *(field.pick_mut)(settings) = Some(state.into()); + (field.write)(settings, Some(state.into())); }) .log_err(); // todo(settings_ui) don't log err } }) - .tab_index(0_isize) - .color(SwitchColor::Accent) - .into_any_element() -} - -fn render_font_picker( - field: SettingField, - file: SettingsUiFile, - _metadata: Option<&SettingsFieldMetadata>, - window: &mut Window, - cx: &mut App, -) -> AnyElement { - let current_value = SettingsStore::global(cx) - .get_value_from_file(file.to_settings(), field.pick) - .1 - .cloned() - .unwrap_or_else(|| SharedString::default().into()); - - let font_picker = cx.new(|cx| { - ui_input::font_picker( - current_value.clone().into(), - move |font_name, cx| { - update_settings_file(file.clone(), cx, move |settings, _cx| { - *(field.pick_mut)(settings) = Some(font_name.into()); - }) - .log_err(); // todo(settings_ui) don't log err - }, - window, - cx, - ) - }); - - PopoverMenu::new("font-picker") - .menu(move |_window, _cx| Some(font_picker.clone())) - .trigger( - Button::new("font-family-button", current_value) - .tab_index(0_isize) - .style(ButtonStyle::Outlined) - .size(ButtonSize::Medium) - .icon(IconName::ChevronUpDown) - .icon_color(Color::Muted) - .icon_size(IconSize::Small) - .icon_position(IconPosition::End), - ) - .anchor(gpui::Corner::TopLeft) - .offset(gpui::Point { - x: px(0.0), - y: px(2.0), - }) - .with_handle(ui::PopoverMenuHandle::default()) .into_any_element() } @@ -2347,7 +3223,7 @@ fn render_number_field( move |value, _window, cx| { let value = *value; update_settings_file(file.clone(), cx, move |settings, _cx| { - *(field.pick_mut)(settings) = Some(value); + (field.write)(settings, Some(value)); }) .log_err(); // todo(settings_ui) don't log err } @@ -2358,7 +3234,7 @@ fn render_number_field( fn render_dropdown( field: SettingField, file: SettingsUiFile, - _metadata: Option<&SettingsFieldMetadata>, + metadata: Option<&SettingsFieldMetadata>, window: &mut Window, cx: &mut App, ) -> AnyElement @@ -2367,6 +3243,9 @@ where { let variants = || -> &'static [T] { ::VARIANTS }; let labels = || -> &'static [&'static str] { ::VARIANTS }; + let should_do_titlecase = metadata + .and_then(|metadata| metadata.should_do_titlecase) + .unwrap_or(true); let (_, current_value) = SettingsStore::global(cx).get_value_from_file(file.to_settings(), field.pick); @@ -2377,41 +3256,198 @@ where DropdownMenu::new( "dropdown", - current_value_label.to_title_case(), - ContextMenu::build(window, cx, move |mut menu, _, _| { - for (&value, &label) in std::iter::zip(variants(), labels()) { - let file = file.clone(); - menu = menu.toggleable_entry( - label.to_title_case(), - value == current_value, - IconPosition::End, - None, - move |_, cx| { - if value == current_value { - return; - } - update_settings_file(file.clone(), cx, move |settings, _cx| { - *(field.pick_mut)(settings) = Some(value); - }) - .log_err(); // todo(settings_ui) don't log err - }, - ); - } - menu + if should_do_titlecase { + current_value_label.to_title_case() + } else { + current_value_label.to_string() + }, + window.use_state(cx, |window, cx| { + ContextMenu::new(window, cx, move |mut menu, _, _| { + for (&value, &label) in std::iter::zip(variants(), labels()) { + let file = file.clone(); + menu = menu.toggleable_entry( + if should_do_titlecase { + label.to_title_case() + } else { + label.to_string() + }, + value == current_value, + IconPosition::End, + None, + move |_, cx| { + if value == current_value { + return; + } + update_settings_file(file.clone(), cx, move |settings, _cx| { + (field.write)(settings, Some(value)); + }) + .log_err(); // todo(settings_ui) don't log err + }, + ); + } + menu + }) }), ) + .tab_index(0) .trigger_size(ButtonSize::Medium) .style(DropdownStyle::Outlined) .offset(gpui::Point { x: px(0.0), y: px(2.0), }) - .tab_index(0) .into_any_element() } +fn render_picker_trigger_button(id: SharedString, label: SharedString) -> Button { + Button::new(id, label) + .tab_index(0_isize) + .style(ButtonStyle::Outlined) + .size(ButtonSize::Medium) + .icon(IconName::ChevronUpDown) + .icon_color(Color::Muted) + .icon_size(IconSize::Small) + .icon_position(IconPosition::End) +} + +fn render_font_picker( + field: SettingField, + file: SettingsUiFile, + _metadata: Option<&SettingsFieldMetadata>, + _window: &mut Window, + cx: &mut App, +) -> AnyElement { + let current_value = SettingsStore::global(cx) + .get_value_from_file(file.to_settings(), field.pick) + .1 + .cloned() + .unwrap_or_else(|| SharedString::default().into()); + + PopoverMenu::new("font-picker") + .trigger(render_picker_trigger_button( + "font_family_picker_trigger".into(), + current_value.clone().into(), + )) + .menu(move |window, cx| { + let file = file.clone(); + let current_value = current_value.clone(); + + Some(cx.new(move |cx| { + font_picker( + current_value.clone().into(), + move |font_name, cx| { + update_settings_file(file.clone(), cx, move |settings, _cx| { + (field.write)(settings, Some(font_name.into())); + }) + .log_err(); // todo(settings_ui) don't log err + }, + window, + cx, + ) + })) + }) + .anchor(gpui::Corner::TopLeft) + .offset(gpui::Point { + x: px(0.0), + y: px(2.0), + }) + .with_handle(ui::PopoverMenuHandle::default()) + .into_any_element() +} + +fn render_theme_picker( + field: SettingField, + file: SettingsUiFile, + _metadata: Option<&SettingsFieldMetadata>, + _window: &mut Window, + cx: &mut App, +) -> AnyElement { + let (_, value) = SettingsStore::global(cx).get_value_from_file(file.to_settings(), field.pick); + let current_value = value + .cloned() + .map(|theme_name| theme_name.0.into()) + .unwrap_or_else(|| cx.theme().name.clone()); + + PopoverMenu::new("theme-picker") + .trigger(render_picker_trigger_button( + "theme_picker_trigger".into(), + current_value.clone(), + )) + .menu(move |window, cx| { + Some(cx.new(|cx| { + let file = file.clone(); + let current_value = current_value.clone(); + theme_picker( + current_value, + move |theme_name, cx| { + update_settings_file(file.clone(), cx, move |settings, _cx| { + (field.write)(settings, Some(settings::ThemeName(theme_name.into()))); + }) + .log_err(); // todo(settings_ui) don't log err + }, + window, + cx, + ) + })) + }) + .anchor(gpui::Corner::TopLeft) + .offset(gpui::Point { + x: px(0.0), + y: px(2.0), + }) + .with_handle(ui::PopoverMenuHandle::default()) + .into_any_element() +} + +fn render_icon_theme_picker( + field: SettingField, + file: SettingsUiFile, + _metadata: Option<&SettingsFieldMetadata>, + _window: &mut Window, + cx: &mut App, +) -> AnyElement { + let (_, value) = SettingsStore::global(cx).get_value_from_file(file.to_settings(), field.pick); + let current_value = value + .cloned() + .map(|theme_name| theme_name.0.into()) + .unwrap_or_else(|| cx.theme().name.clone()); + + PopoverMenu::new("icon-theme-picker") + .trigger(render_picker_trigger_button( + "icon_theme_picker_trigger".into(), + current_value.clone(), + )) + .menu(move |window, cx| { + Some(cx.new(|cx| { + let file = file.clone(); + let current_value = current_value.clone(); + icon_theme_picker( + current_value, + move |theme_name, cx| { + update_settings_file(file.clone(), cx, move |settings, _cx| { + (field.write)( + settings, + Some(settings::IconThemeName(theme_name.into())), + ); + }) + .log_err(); // todo(settings_ui) don't log err + }, + window, + cx, + ) + })) + }) + .anchor(gpui::Corner::TopLeft) + .offset(gpui::Point { + x: px(0.0), + y: px(2.0), + }) + .with_handle(ui::PopoverMenuHandle::default()) + .into_any_element() +} + #[cfg(test)] -mod test { +pub mod test { use super::*; @@ -2432,7 +3468,7 @@ mod test { } } - fn register_settings(cx: &mut App) { + pub fn register_settings(cx: &mut App) { settings::init(cx); theme::init(theme::LoadThemes::JustBase, cx); workspace::init_settings(cx); @@ -2507,11 +3543,12 @@ mod test { navbar_entry: selected_idx.expect("Must have a selected navbar entry"), navbar_entries: Vec::default(), navbar_scroll_handle: UniformListScrollHandle::default(), + navbar_focus_subscriptions: vec![], filter_table: vec![], has_query: false, content_handles: vec![], search_task: None, - page_scroll_handle: ScrollHandle::new(), + sub_page_scroll_handle: ScrollHandle::new(), focus_handle: cx.focus_handle(), navbar_focus_handle: NonFocusableHandle::new( NAVBAR_CONTAINER_TAB_INDEX, @@ -2527,6 +3564,7 @@ mod test { ), files_focus_handle: cx.focus_handle(), search_index: None, + list_state: ListState::new(0, gpui::ListAlignment::Top, px(0.0)), }; settings_window.build_filter_table(); @@ -2683,11 +3721,11 @@ mod test { ", toggle_page: "General Page", after: r" - > General Page + > General Page* v Project - Worktree Settings Content v AI - - General* + - General > Appearance & Behavior " ); @@ -2706,13 +3744,13 @@ mod test { ", toggle_page: "General Page", after: r" - v General Page + v General Page* - General - Privacy v Project - Worktree Settings Content v AI - - General* + - General > Appearance & Behavior " ); diff --git a/crates/snippet/Cargo.toml b/crates/snippet/Cargo.toml index f4c2d9a87465be8c319e373e6dfed9399b1ba4a4..2dde5c2d005ba699e644589219be94123edfb3b9 100644 --- a/crates/snippet/Cargo.toml +++ b/crates/snippet/Cargo.toml @@ -15,4 +15,3 @@ doctest = false [dependencies] anyhow.workspace = true smallvec.workspace = true -workspace-hack.workspace = true diff --git a/crates/snippet_provider/Cargo.toml b/crates/snippet_provider/Cargo.toml index af7ffcf30ef71a21a6cdfd2efaf1ce3cf763016b..d71439118e90213335213e1365c766eb760bff44 100644 --- a/crates/snippet_provider/Cargo.toml +++ b/crates/snippet_provider/Cargo.toml @@ -23,7 +23,6 @@ serde_json_lenient.workspace = true snippet.workspace = true util.workspace = true schemars.workspace = true -workspace-hack.workspace = true [dev-dependencies] fs = { workspace = true, features = ["test-support"] } diff --git a/crates/snippets_ui/Cargo.toml b/crates/snippets_ui/Cargo.toml index 102374fc73cf8db4bd04c1db05b2b04a6ef38526..3139a41dada1c42f94de41d37e69be68a8de49a5 100644 --- a/crates/snippets_ui/Cargo.toml +++ b/crates/snippets_ui/Cargo.toml @@ -22,5 +22,4 @@ picker.workspace = true settings.workspace = true ui.workspace = true util.workspace = true -workspace-hack.workspace = true workspace.workspace = true diff --git a/crates/sqlez/Cargo.toml b/crates/sqlez/Cargo.toml index 6eb75aa171979283325d22300f95d584cee2cffb..5f4a0bef67efe3cf021d9d113922ca14f269fe85 100644 --- a/crates/sqlez/Cargo.toml +++ b/crates/sqlez/Cargo.toml @@ -21,4 +21,3 @@ sqlformat.workspace = true thread_local = "1.1.4" util.workspace = true uuid.workspace = true -workspace-hack.workspace = true diff --git a/crates/sqlez_macros/Cargo.toml b/crates/sqlez_macros/Cargo.toml index dca7921450547e0c603e5485388173afa0a11a4d..cff96d0b8949757761421c9003250343297bd14c 100644 --- a/crates/sqlez_macros/Cargo.toml +++ b/crates/sqlez_macros/Cargo.toml @@ -17,4 +17,3 @@ doctest = false sqlez.workspace = true sqlformat.workspace = true syn.workspace = true -workspace-hack.workspace = true diff --git a/crates/story/Cargo.toml b/crates/story/Cargo.toml index a9db0b66b04f174c0e0fa10cdd4d485c0f571346..798461402de00c102af9325c091eb9edfdf89b09 100644 --- a/crates/story/Cargo.toml +++ b/crates/story/Cargo.toml @@ -15,4 +15,3 @@ workspace = true gpui.workspace = true itertools.workspace = true smallvec.workspace = true -workspace-hack.workspace = true diff --git a/crates/storybook/Cargo.toml b/crates/storybook/Cargo.toml index f545cb63dad18e31ce3b22019c6a918cfb2d059a..638d070cba14cb871d33d53a0df0acb19ecb3840 100644 --- a/crates/storybook/Cargo.toml +++ b/crates/storybook/Cargo.toml @@ -37,7 +37,6 @@ theme.workspace = true title_bar = { workspace = true, features = ["stories"] } ui = { workspace = true, features = ["stories"] } workspace.workspace = true -workspace-hack.workspace = true [dev-dependencies] gpui = { workspace = true, features = ["test-support"] } diff --git a/crates/streaming_diff/Cargo.toml b/crates/streaming_diff/Cargo.toml index 3774925289b9480c7e1a7362f15c97d47950feb9..b3645a182c3abf52c6ee2f2c23feaedeacf8574a 100644 --- a/crates/streaming_diff/Cargo.toml +++ b/crates/streaming_diff/Cargo.toml @@ -14,7 +14,6 @@ path = "src/streaming_diff.rs" [dependencies] ordered-float.workspace = true rope.workspace = true -workspace-hack.workspace = true [dev-dependencies] rand.workspace = true diff --git a/crates/sum_tree/Cargo.toml b/crates/sum_tree/Cargo.toml index 2e95bc8a5f60b7431ed5d75d57c3f2e2afbe2724..81916c842225085ceec4721dbd8d212608f6bcb9 100644 --- a/crates/sum_tree/Cargo.toml +++ b/crates/sum_tree/Cargo.toml @@ -1,8 +1,8 @@ [package] -name = "zed-sum-tree" +name = "sum_tree" version = "0.1.0" edition.workspace = true -publish = true +publish = false license = "Apache-2.0" description = "A sum tree data structure, a concurrency-friendly B-tree" @@ -17,7 +17,6 @@ doctest = false arrayvec = "0.7.1" rayon.workspace = true log.workspace = true -workspace-hack.workspace = true [dev-dependencies] ctor.workspace = true diff --git a/crates/sum_tree/src/cursor.rs b/crates/sum_tree/src/cursor.rs index 6df1d3da41556dc7eb93f7460b960ccddbe52de6..7418224c86f51a52a8a621da0f2a0c53dcfcf404 100644 --- a/crates/sum_tree/src/cursor.rs +++ b/crates/sum_tree/src/cursor.rs @@ -388,6 +388,7 @@ where T: Item, D: Dimension<'a, T::Summary>, { + /// Returns whether we found the item you were seeking for. #[track_caller] pub fn seek(&mut self, pos: &Target, bias: Bias) -> bool where @@ -397,6 +398,7 @@ where self.seek_internal(pos, bias, &mut ()) } + /// Returns whether we found the item you were seeking for. #[track_caller] pub fn seek_forward(&mut self, pos: &Target, bias: Bias) -> bool where @@ -437,7 +439,7 @@ where summary.0 } - /// Returns whether we found the item you were seeking for + /// Returns whether we found the item you were seeking for. #[track_caller] fn seek_internal( &mut self, diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index bfd2423c9230ea4246509e164d7a03f4890cbf4a..ab0e9d03c4594b89159893c7a671e8a9e3928b3f 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -82,6 +82,11 @@ pub trait Dimension<'a, S: Summary>: Clone { fn zero(cx: S::Context<'_>) -> Self; fn add_summary(&mut self, summary: &'a S, cx: S::Context<'_>); + #[must_use] + fn with_added_summary(mut self, summary: &'a S, cx: S::Context<'_>) -> Self { + self.add_summary(summary, cx); + self + } fn from_summary(summary: &'a S, cx: S::Context<'_>) -> Self { let mut dimension = Self::zero(cx); @@ -371,12 +376,122 @@ impl SumTree { Iter::new(self) } - pub fn cursor<'a, 'b, S>( + /// A more efficient version of `Cursor::new()` + `Cursor::seek()` + `Cursor::item()`. + /// + /// Only returns the item that exactly has the target match. + pub fn find_exact<'a, 'slf, D, Target>( + &'slf self, + cx: ::Context<'a>, + target: &Target, + bias: Bias, + ) -> (D, D, Option<&'slf T>) + where + D: Dimension<'slf, T::Summary>, + Target: SeekTarget<'slf, T::Summary, D>, + { + let tree_end = D::zero(cx).with_added_summary(self.summary(), cx); + let comparison = target.cmp(&tree_end, cx); + if comparison == Ordering::Greater || (comparison == Ordering::Equal && bias == Bias::Right) + { + return (tree_end.clone(), tree_end, None); + } + + let mut pos = D::zero(cx); + return match Self::find_recurse::<_, _, true>(cx, target, bias, &mut pos, self) { + Some((item, end)) => (pos, end, Some(item)), + None => (pos.clone(), pos, None), + }; + } + + /// A more efficient version of `Cursor::new()` + `Cursor::seek()` + `Cursor::item()` + pub fn find<'a, 'slf, D, Target>( + &'slf self, + cx: ::Context<'a>, + target: &Target, + bias: Bias, + ) -> (D, D, Option<&'slf T>) + where + D: Dimension<'slf, T::Summary>, + Target: SeekTarget<'slf, T::Summary, D>, + { + let tree_end = D::zero(cx).with_added_summary(self.summary(), cx); + let comparison = target.cmp(&tree_end, cx); + if comparison == Ordering::Greater || (comparison == Ordering::Equal && bias == Bias::Right) + { + return (tree_end.clone(), tree_end, None); + } + + let mut pos = D::zero(cx); + return match Self::find_recurse::<_, _, false>(cx, target, bias, &mut pos, self) { + Some((item, end)) => (pos, end, Some(item)), + None => (pos.clone(), pos, None), + }; + } + + fn find_recurse<'tree, 'a, D, Target, const EXACT: bool>( + cx: ::Context<'a>, + target: &Target, + bias: Bias, + position: &mut D, + this: &'tree SumTree, + ) -> Option<(&'tree T, D)> + where + D: Dimension<'tree, T::Summary>, + Target: SeekTarget<'tree, T::Summary, D>, + { + match &*this.0 { + Node::Internal { + child_summaries, + child_trees, + .. + } => { + for (child_tree, child_summary) in child_trees.iter().zip(child_summaries) { + let child_end = position.clone().with_added_summary(child_summary, cx); + + let comparison = target.cmp(&child_end, cx); + let target_in_child = comparison == Ordering::Less + || (comparison == Ordering::Equal && bias == Bias::Left); + if target_in_child { + return Self::find_recurse::( + cx, target, bias, position, child_tree, + ); + } + *position = child_end; + } + } + Node::Leaf { + items, + item_summaries, + .. + } => { + for (item, item_summary) in items.iter().zip(item_summaries) { + let mut child_end = position.clone(); + child_end.add_summary(item_summary, cx); + + let comparison = target.cmp(&child_end, cx); + let entry_found = if EXACT { + comparison == Ordering::Equal + } else { + comparison == Ordering::Less + || (comparison == Ordering::Equal && bias == Bias::Left) + }; + if entry_found { + return Some((item, child_end)); + } + + *position = child_end; + } + } + } + None + } + + pub fn cursor<'a, 'b, D>( &'a self, cx: ::Context<'b>, - ) -> Cursor<'a, 'b, T, S> + ) -> Cursor<'a, 'b, T, D> where - S: Dimension<'a, T::Summary>, + D: Dimension<'a, T::Summary>, { Cursor::new(self, cx) } @@ -787,9 +902,8 @@ impl SumTree { key: &T::Key, cx: ::Context<'a>, ) -> Option<&'a T> { - let mut cursor = self.cursor::(cx); - if cursor.seek(key, Bias::Left) { - cursor.item() + if let (_, _, Some(item)) = self.find_exact::(cx, key, Bias::Left) { + Some(item) } else { None } diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index 62630407083fc34d74fef0ebf5e17a44ce40f68e..3e56194dddd9910f819e91c209f6701b55efdd02 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -54,9 +54,10 @@ impl TreeMap { } pub fn get(&self, key: &K) -> Option<&V> { - let mut cursor = self.0.cursor::>(()); - cursor.seek(&MapKeyRef(Some(key)), Bias::Left); - if let Some(item) = cursor.item() { + let (.., item) = self + .0 + .find::, _>((), &MapKeyRef(Some(key)), Bias::Left); + if let Some(item) = item { if Some(key) == item.key().0.as_ref() { Some(&item.value) } else { diff --git a/crates/supermaven/Cargo.toml b/crates/supermaven/Cargo.toml index 1ee8ca4ffc094210dd1edf231d9160556829745a..5b86367f35d508579ac6ba999fc8c9236e7fd66a 100644 --- a/crates/supermaven/Cargo.toml +++ b/crates/supermaven/Cargo.toml @@ -31,7 +31,6 @@ text.workspace = true ui.workspace = true unicode-segmentation.workspace = true util.workspace = true -workspace-hack.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } diff --git a/crates/supermaven_api/Cargo.toml b/crates/supermaven_api/Cargo.toml index 6b6823095d5b4958856f552e14305c3d4f53e630..28868a9a7433f995e99b861cf7f6e9aeeb28942f 100644 --- a/crates/supermaven_api/Cargo.toml +++ b/crates/supermaven_api/Cargo.toml @@ -21,4 +21,3 @@ serde.workspace = true serde_json.workspace = true smol.workspace = true util.workspace = true -workspace-hack.workspace = true diff --git a/crates/svg_preview/Cargo.toml b/crates/svg_preview/Cargo.toml index 63e9e41bbe4d40945f854319a0adc88388f485cd..f64e60afe282da0da6780cc45097c751a8e7e8c1 100644 --- a/crates/svg_preview/Cargo.toml +++ b/crates/svg_preview/Cargo.toml @@ -18,4 +18,3 @@ gpui.workspace = true multi_buffer.workspace = true ui.workspace = true workspace.workspace = true -workspace-hack.workspace = true diff --git a/crates/system_specs/Cargo.toml b/crates/system_specs/Cargo.toml index 8ef1b581ae21632c4894b132c9c52f617e016e7f..86ac3c09116a00d8061f88fb52c5fe884a1a3fe4 100644 --- a/crates/system_specs/Cargo.toml +++ b/crates/system_specs/Cargo.toml @@ -22,7 +22,6 @@ human_bytes.workspace = true release_channel.workspace = true serde.workspace = true sysinfo.workspace = true -workspace-hack.workspace = true [target.'cfg(any(target_os = "linux", target_os = "freebsd"))'.dependencies] pciid-parser.workspace = true diff --git a/crates/system_specs/src/system_specs.rs b/crates/system_specs/src/system_specs.rs index 38f6ff4f0f57d27e55200c0855cdd212bac61e9e..5c2b0a7fed82c53bbaab5f0ee9a8cc74462870d8 100644 --- a/crates/system_specs/src/system_specs.rs +++ b/crates/system_specs/src/system_specs.rs @@ -36,7 +36,7 @@ impl SystemSpecs { let release_channel = ReleaseChannel::global(cx); let os_name = telemetry::os_name(); let system = System::new_with_specifics( - RefreshKind::new().with_memory(MemoryRefreshKind::everything()), + RefreshKind::nothing().with_memory(MemoryRefreshKind::everything()), ); let memory = system.total_memory(); let architecture = env::consts::ARCH; @@ -79,7 +79,7 @@ impl SystemSpecs { let os_name = telemetry::os_name(); let os_version = telemetry::os_version(); let system = System::new_with_specifics( - RefreshKind::new().with_memory(MemoryRefreshKind::everything()), + RefreshKind::nothing().with_memory(MemoryRefreshKind::everything()), ); let memory = system.total_memory(); let architecture = env::consts::ARCH; diff --git a/crates/tab_switcher/Cargo.toml b/crates/tab_switcher/Cargo.toml index d578c76f349d0f7b27764974ab2d1a9c84529dd6..36e4ba77342796ae5967e81cd34e01b8d41aecf6 100644 --- a/crates/tab_switcher/Cargo.toml +++ b/crates/tab_switcher/Cargo.toml @@ -27,7 +27,6 @@ smol.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true -workspace-hack.workspace = true [dev-dependencies] anyhow.workspace = true diff --git a/crates/task/Cargo.toml b/crates/task/Cargo.toml index dceaa63616decf21ec6ee6aaf99a3b2d6e2f715a..b3cb63bf006a972e08fc0ee56e01b6e31c6beee6 100644 --- a/crates/task/Cargo.toml +++ b/crates/task/Cargo.toml @@ -34,7 +34,6 @@ serde_json_lenient.workspace = true sha2.workspace = true shellexpand.workspace = true util.workspace = true -workspace-hack.workspace = true zed_actions.workspace = true [dev-dependencies] diff --git a/crates/task/src/task.rs b/crates/task/src/task.rs index 9f7a10f2c5cace3a8449cf366fd08755f039cd5d..280bf5ccdb91271d7ff738654d507573c9d667d4 100644 --- a/crates/task/src/task.rs +++ b/crates/task/src/task.rs @@ -3,31 +3,30 @@ mod adapter_schema; mod debug_format; mod serde_helpers; -mod shell_builder; pub mod static_source; mod task_template; mod vscode_debug_format; mod vscode_format; +use anyhow::Context as _; use collections::{HashMap, HashSet, hash_map}; use gpui::SharedString; -use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use std::borrow::Cow; use std::path::PathBuf; use std::str::FromStr; -use util::get_system_shell; pub use adapter_schema::{AdapterSchema, AdapterSchemas}; pub use debug_format::{ AttachRequest, BuildTaskDefinition, DebugRequest, DebugScenario, DebugTaskFile, LaunchRequest, Request, TcpArgumentsTemplate, ZedDebugConfig, }; -pub use shell_builder::{ShellBuilder, ShellKind}; pub use task_template::{ DebugArgsRequest, HideStrategy, RevealStrategy, TaskTemplate, TaskTemplates, substitute_variables_in_map, substitute_variables_in_str, }; +pub use util::shell::{Shell, ShellKind}; +pub use util::shell_builder::ShellBuilder; pub use vscode_debug_format::VsCodeDebugTaskFile; pub use vscode_format::VsCodeTaskFile; pub use zed_actions::RevealTarget; @@ -317,49 +316,32 @@ pub struct TaskContext { #[derive(Clone, Debug)] pub struct RunnableTag(pub SharedString); -/// Shell configuration to open the terminal with. -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, Hash)] -#[serde(rename_all = "snake_case")] -pub enum Shell { - /// Use the system's default terminal configuration in /etc/passwd - #[default] - System, - /// Use a specific program with no arguments. - Program(String), - /// Use a specific program with arguments. - WithArguments { - /// The program to run. - program: String, - /// The arguments to pass to the program. - args: Vec, - /// An optional string to override the title of the terminal tab - title_override: Option, - }, +pub fn shell_from_proto(proto: proto::Shell) -> anyhow::Result { + let shell_type = proto.shell_type.context("invalid shell type")?; + let shell = match shell_type { + proto::shell::ShellType::System(_) => Shell::System, + proto::shell::ShellType::Program(program) => Shell::Program(program), + proto::shell::ShellType::WithArguments(program) => Shell::WithArguments { + program: program.program, + args: program.args, + title_override: None, + }, + }; + Ok(shell) } -impl Shell { - pub fn program(&self) -> String { - match self { - Shell::Program(program) => program.clone(), - Shell::WithArguments { program, .. } => program.clone(), - Shell::System => get_system_shell(), - } - } - - pub fn program_and_args(&self) -> (String, &[String]) { - match self { - Shell::Program(program) => (program.clone(), &[]), - Shell::WithArguments { program, args, .. } => (program.clone(), args), - Shell::System => (get_system_shell(), &[]), - } - } - - pub fn shell_kind(&self) -> ShellKind { - match self { - Shell::Program(program) => ShellKind::new(program), - Shell::WithArguments { program, .. } => ShellKind::new(program), - Shell::System => ShellKind::system(), - } +pub fn shell_to_proto(shell: Shell) -> proto::Shell { + let shell_type = match shell { + Shell::System => proto::shell::ShellType::System(proto::System {}), + Shell::Program(program) => proto::shell::ShellType::Program(program), + Shell::WithArguments { + program, + args, + title_override: _, + } => proto::shell::ShellType::WithArguments(proto::shell::WithArguments { program, args }), + }; + proto::Shell { + shell_type: Some(shell_type), } } diff --git a/crates/tasks_ui/Cargo.toml b/crates/tasks_ui/Cargo.toml index 77b143471871eb7d0578aefd7dfcbe968a0d9c5f..2f75a0b57c68161787634e42de7cdfd8d8d7b7a9 100644 --- a/crates/tasks_ui/Cargo.toml +++ b/crates/tasks_ui/Cargo.toml @@ -29,7 +29,6 @@ util.workspace = true workspace.workspace = true language.workspace = true zed_actions.workspace = true -workspace-hack.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } diff --git a/crates/tasks_ui/src/modal.rs b/crates/tasks_ui/src/modal.rs index 0563cd517225ac5781e34575cacbda54b303fe08..f82321feeb245b4ee3b6d56627387c8594d5db8e 100644 --- a/crates/tasks_ui/src/modal.rs +++ b/crates/tasks_ui/src/modal.rs @@ -664,10 +664,10 @@ impl PickerDelegate for TasksModalDelegate { .child( left_button .map(|(label, action)| { - let keybind = KeyBinding::for_action(&*action, window, cx); + let keybind = KeyBinding::for_action(&*action, cx); Button::new("edit-current-task", label) - .when_some(keybind, |this, keybind| this.key_binding(keybind)) + .key_binding(keybind) .on_click(move |_, window, cx| { window.dispatch_action(action.boxed_clone(), cx); }) @@ -682,7 +682,7 @@ impl PickerDelegate for TasksModalDelegate { secondary: current_modifiers.secondary(), } .boxed_clone(); - this.children(KeyBinding::for_action(&*action, window, cx).map(|keybind| { + this.child({ let spawn_oneshot_label = if current_modifiers.secondary() { "Spawn Oneshot Without History" } else { @@ -690,44 +690,35 @@ impl PickerDelegate for TasksModalDelegate { }; Button::new("spawn-onehshot", spawn_oneshot_label) - .key_binding(keybind) + .key_binding(KeyBinding::for_action(&*action, cx)) .on_click(move |_, window, cx| { window.dispatch_action(action.boxed_clone(), cx) }) - })) + }) } else if current_modifiers.secondary() { - this.children( - KeyBinding::for_action(&menu::SecondaryConfirm, window, cx).map( - |keybind| { - let label = if is_recent_selected { - "Rerun Without History" - } else { - "Spawn Without History" - }; - Button::new("spawn", label).key_binding(keybind).on_click( - move |_, window, cx| { - window.dispatch_action( - menu::SecondaryConfirm.boxed_clone(), - cx, - ) - }, - ) - }, - ), - ) + this.child({ + let label = if is_recent_selected { + "Rerun Without History" + } else { + "Spawn Without History" + }; + Button::new("spawn", label) + .key_binding(KeyBinding::for_action(&menu::SecondaryConfirm, cx)) + .on_click(move |_, window, cx| { + window.dispatch_action(menu::SecondaryConfirm.boxed_clone(), cx) + }) + }) } else { - this.children(KeyBinding::for_action(&menu::Confirm, window, cx).map( - |keybind| { - let run_entry_label = - if is_recent_selected { "Rerun" } else { "Spawn" }; - - Button::new("spawn", run_entry_label) - .key_binding(keybind) - .on_click(|_, window, cx| { - window.dispatch_action(menu::Confirm.boxed_clone(), cx); - }) - }, - )) + this.child({ + let run_entry_label = + if is_recent_selected { "Rerun" } else { "Spawn" }; + + Button::new("spawn", run_entry_label) + .key_binding(KeyBinding::for_action(&menu::Confirm, cx)) + .on_click(|_, window, cx| { + window.dispatch_action(menu::Confirm.boxed_clone(), cx); + }) + }) } }) .into_any_element(), diff --git a/crates/telemetry/Cargo.toml b/crates/telemetry/Cargo.toml index 680f12e5732b3e09261252d8463f0cb1f86b2195..ed166ea4c711df2779e067cb94b5e5a1f8869f25 100644 --- a/crates/telemetry/Cargo.toml +++ b/crates/telemetry/Cargo.toml @@ -16,4 +16,3 @@ serde.workspace = true serde_json.workspace = true telemetry_events.workspace = true futures.workspace = true -workspace-hack.workspace = true diff --git a/crates/telemetry_events/Cargo.toml b/crates/telemetry_events/Cargo.toml index d2bdcf20d73dc534c3695703c49dda856228d143..87a02baf06549748e7ac5ccf6ee6ae396681f87c 100644 --- a/crates/telemetry_events/Cargo.toml +++ b/crates/telemetry_events/Cargo.toml @@ -15,4 +15,3 @@ path = "src/telemetry_events.rs" semantic_version.workspace = true serde.workspace = true serde_json.workspace = true -workspace-hack.workspace = true diff --git a/crates/terminal/Cargo.toml b/crates/terminal/Cargo.toml index 3c08c1b8617b0ae9c9ca1ec02a25243070e6f4db..0dc7338e04b79e2a50effbea180dccf1587c66b1 100644 --- a/crates/terminal/Cargo.toml +++ b/crates/terminal/Cargo.toml @@ -39,7 +39,6 @@ thiserror.workspace = true util.workspace = true regex.workspace = true urlencoding.workspace = true -workspace-hack.workspace = true itertools.workspace = true [target.'cfg(windows)'.dependencies] diff --git a/crates/terminal/src/pty_info.rs b/crates/terminal/src/pty_info.rs index a1a559051abdd20a1f0e8386fa0c71f683f1f40c..f8e8a122f845253175ba6f176c12408056163fdb 100644 --- a/crates/terminal/src/pty_info.rs +++ b/crates/terminal/src/pty_info.rs @@ -89,11 +89,11 @@ pub struct PtyProcessInfo { impl PtyProcessInfo { pub fn new(pty: &Pty) -> PtyProcessInfo { - let process_refresh_kind = ProcessRefreshKind::new() + let process_refresh_kind = ProcessRefreshKind::nothing() .with_cmd(UpdateKind::Always) .with_cwd(UpdateKind::Always) .with_exe(UpdateKind::Always); - let refresh_kind = RefreshKind::new().with_processes(process_refresh_kind); + let refresh_kind = RefreshKind::nothing().with_processes(process_refresh_kind); let system = System::new_with_specifics(refresh_kind); PtyProcessInfo { @@ -112,6 +112,7 @@ impl PtyProcessInfo { let pid = self.pid_getter.pid()?; if self.system.refresh_processes_specifics( sysinfo::ProcessesToUpdate::Some(&[pid]), + true, self.refresh_kind, ) == 1 { diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index 0d1073b41bc19e01ac03de24b40e93a13488baca..7da7ba78d90848ed3349c840eff13b13e5b23c34 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -67,7 +67,7 @@ use thiserror::Error; use gpui::{ App, AppContext as _, Bounds, ClipboardItem, Context, EventEmitter, Hsla, Keystroke, Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, Point, Rgba, - ScrollWheelEvent, SharedString, Size, Task, TouchPhase, Window, actions, black, px, + ScrollWheelEvent, Size, Task, TouchPhase, Window, actions, black, px, }; use crate::mappings::{colors::to_alac_rgb, keys::to_esc_str}; @@ -277,7 +277,7 @@ pub struct TerminalError { pub directory: Option, pub program: Option, pub args: Option>, - pub title_override: Option, + pub title_override: Option, pub source: std::io::Error, } @@ -446,14 +446,14 @@ impl TerminalBuilder { struct ShellParams { program: String, args: Option>, - title_override: Option, + title_override: Option, } impl ShellParams { fn new( program: String, args: Option>, - title_override: Option, + title_override: Option, ) -> Self { log::info!("Using {program} as shell"); Self { @@ -466,16 +466,15 @@ impl TerminalBuilder { let shell_params = match shell.clone() { Shell::System => { - #[cfg(target_os = "windows")] - { + if cfg!(windows) { Some(ShellParams::new( util::shell::get_windows_system_shell(), None, None, )) + } else { + None } - #[cfg(not(target_os = "windows"))] - None } Shell::Program(program) => Some(ShellParams::new(program, None, None)), Shell::WithArguments { @@ -495,7 +494,12 @@ impl TerminalBuilder { .unwrap_or(params.program.clone()) }); - let shell_kind = shell.shell_kind(); + // Note: when remoting, this shell_kind will scrutinize `ssh` or + // `wsl.exe` as a shell and fall back to posix or powershell based on + // the compilation target. This is fine right now due to the restricted + // way we use the return value, but would become incorrect if we + // supported remoting into windows. + let shell_kind = shell.shell_kind(cfg!(windows)); let pty_options = { let alac_shell = shell_params.as_ref().map(|params| { @@ -510,10 +514,8 @@ impl TerminalBuilder { working_directory: working_directory.clone(), drain_on_exit: true, env: env.clone().into_iter().collect(), - // We do not want to escape arguments if we are using CMD as our shell. - // If we do we end up with too many quotes/escaped quotes for CMD to handle. #[cfg(windows)] - escape_args: shell_kind != util::shell::ShellKind::Cmd, + escape_args: shell_kind.tty_escape_args(), } }; @@ -819,7 +821,7 @@ pub struct Terminal { pub last_content: TerminalContent, pub selection_head: Option, pub breadcrumb_text: String, - title_override: Option, + title_override: Option, scroll_px: Pixels, next_link_id: usize, selection_phase: SelectionPhase, @@ -2183,21 +2185,13 @@ fn task_summary(task: &TaskState, error_code: Option) -> (bool, String, Str .full_label .replace("\r\n", "\r") .replace('\n', "\r"); - let (success, task_line) = match error_code { - Some(0) => ( - true, - format!("{TASK_DELIMITER}Task `{escaped_full_label}` finished successfully"), - ), - Some(error_code) => ( - false, - format!( - "{TASK_DELIMITER}Task `{escaped_full_label}` finished with non-zero error code: {error_code}" - ), - ), - None => ( - false, - format!("{TASK_DELIMITER}Task `{escaped_full_label}` finished"), + let success = error_code == Some(0); + let task_line = match error_code { + Some(0) => format!("{TASK_DELIMITER}Task `{escaped_full_label}` finished successfully"), + Some(error_code) => format!( + "{TASK_DELIMITER}Task `{escaped_full_label}` finished with non-zero error code: {error_code}" ), + None => format!("{TASK_DELIMITER}Task `{escaped_full_label}` finished"), }; let escaped_command_label = task .spawned_task @@ -2391,8 +2385,8 @@ mod tests { cx.executor().allow_parking(); let (completion_tx, completion_rx) = smol::channel::unbounded(); - let (program, args) = - ShellBuilder::new(&Shell::System).build(Some("echo".to_owned()), &["hello".to_owned()]); + let (program, args) = ShellBuilder::new(&Shell::System, false) + .build(Some("echo".to_owned()), &["hello".to_owned()]); let terminal = cx.new(|cx| { TerminalBuilder::new( None, @@ -2510,7 +2504,7 @@ mod tests { cx.executor().allow_parking(); let (completion_tx, completion_rx) = smol::channel::unbounded(); - let (program, args) = ShellBuilder::new(&Shell::System) + let (program, args) = ShellBuilder::new(&Shell::System, false) .build(Some("asdasdasdasd".to_owned()), &["@@@@@".to_owned()]); let terminal = cx.new(|cx| { TerminalBuilder::new( diff --git a/crates/terminal/src/terminal_settings.rs b/crates/terminal/src/terminal_settings.rs index 27cccea126fecd7d015b21cec6d18809b756bdf8..b8576a1de308d8bf3bd098907018b94cb73eefa0 100644 --- a/crates/terminal/src/terminal_settings.rs +++ b/crates/terminal/src/terminal_settings.rs @@ -8,9 +8,8 @@ use serde::{Deserialize, Serialize}; pub use settings::AlternateScroll; use settings::{ - CursorShapeContent, SettingsContent, ShowScrollbar, TerminalBlink, TerminalDockPosition, - TerminalLineHeight, TerminalSettingsContent, VenvSettings, WorkingDirectory, - merge_from::MergeFrom, + ShowScrollbar, TerminalBlink, TerminalDockPosition, TerminalLineHeight, VenvSettings, + WorkingDirectory, merge_from::MergeFrom, }; use task::Shell; use theme::FontFamilyName; @@ -67,7 +66,7 @@ fn settings_shell_to_task_shell(shell: settings::Shell) -> Shell { } => Shell::WithArguments { program, args, - title_override, + title_override: title_override.map(Into::into), }, } } @@ -116,81 +115,6 @@ impl settings::Settings for TerminalSettings { minimum_contrast: user_content.minimum_contrast.unwrap(), } } - - fn import_from_vscode(vscode: &settings::VsCodeSettings, content: &mut SettingsContent) { - let mut default = TerminalSettingsContent::default(); - let current = content.terminal.as_mut().unwrap_or(&mut default); - let name = |s| format!("terminal.integrated.{s}"); - - vscode.f32_setting(&name("fontSize"), &mut current.font_size); - vscode.font_family_setting( - &name("fontFamily"), - &mut current.font_family, - &mut current.font_fallbacks, - ); - vscode.bool_setting(&name("copyOnSelection"), &mut current.copy_on_select); - vscode.bool_setting("macOptionIsMeta", &mut current.option_as_meta); - vscode.usize_setting("scrollback", &mut current.max_scroll_history_lines); - match vscode.read_bool(&name("cursorBlinking")) { - Some(true) => current.blinking = Some(TerminalBlink::On), - Some(false) => current.blinking = Some(TerminalBlink::Off), - None => {} - } - vscode.enum_setting( - &name("cursorStyle"), - &mut current.cursor_shape, - |s| match s { - "block" => Some(CursorShapeContent::Block), - "line" => Some(CursorShapeContent::Bar), - "underline" => Some(CursorShapeContent::Underline), - _ => None, - }, - ); - // they also have "none" and "outline" as options but just for the "Inactive" variant - if let Some(height) = vscode - .read_value(&name("lineHeight")) - .and_then(|v| v.as_f64()) - { - current.line_height = Some(TerminalLineHeight::Custom(height as f32)) - } - - #[cfg(target_os = "windows")] - let platform = "windows"; - #[cfg(target_os = "linux")] - let platform = "linux"; - #[cfg(target_os = "macos")] - let platform = "osx"; - #[cfg(target_os = "freebsd")] - let platform = "freebsd"; - - // TODO: handle arguments - let shell_name = format!("{platform}Exec"); - if let Some(s) = vscode.read_string(&name(&shell_name)) { - current.project.shell = Some(settings::Shell::Program(s.to_owned())) - } - - if let Some(env) = vscode - .read_value(&name(&format!("env.{platform}"))) - .and_then(|v| v.as_object()) - { - for (k, v) in env { - if v.is_null() - && let Some(zed_env) = current.project.env.as_mut() - { - zed_env.remove(k); - } - let Some(v) = v.as_str() else { continue }; - if let Some(zed_env) = current.project.env.as_mut() { - zed_env.insert(k.clone(), v.to_owned()); - } else { - current.project.env = Some([(k.clone(), v.to_owned())].into_iter().collect()) - } - } - } - if content.terminal.is_none() && default != TerminalSettingsContent::default() { - content.terminal = Some(default) - } - } } #[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] diff --git a/crates/terminal_view/Cargo.toml b/crates/terminal_view/Cargo.toml index 85ee506d69444fa7d58b536acac3a00088e3f047..1800562e2fd262d040ef957b402cc650681956a5 100644 --- a/crates/terminal_view/Cargo.toml +++ b/crates/terminal_view/Cargo.toml @@ -46,7 +46,6 @@ ui.workspace = true util.workspace = true workspace.workspace = true zed_actions.workspace = true -workspace-hack.workspace = true [dev-dependencies] client = { workspace = true, features = ["test-support"] } diff --git a/crates/terminal_view/src/terminal_panel.rs b/crates/terminal_view/src/terminal_panel.rs index 7952eb51e8ce2abe53c9455bcc8225f052ddbfca..5ba6c6c503d1b47b3a40149e3a6becf44565e8cc 100644 --- a/crates/terminal_view/src/terminal_panel.rs +++ b/crates/terminal_view/src/terminal_panel.rs @@ -22,20 +22,19 @@ use settings::{Settings, TerminalDockPosition}; use task::{RevealStrategy, RevealTarget, Shell, ShellBuilder, SpawnInTerminal, TaskId}; use terminal::{Terminal, terminal_settings::TerminalSettings}; use ui::{ - ButtonCommon, Clickable, ContextMenu, FluentBuilder, PopoverMenu, Toggleable, Tooltip, - prelude::*, + ButtonLike, Clickable, ContextMenu, FluentBuilder, PopoverMenu, SplitButton, Toggleable, + Tooltip, prelude::*, }; use util::{ResultExt, TryFutureExt}; use workspace::{ ActivateNextPane, ActivatePane, ActivatePaneDown, ActivatePaneLeft, ActivatePaneRight, ActivatePaneUp, ActivatePreviousPane, DraggedSelection, DraggedTab, ItemId, MoveItemToPane, - MoveItemToPaneInDirection, NewTerminal, Pane, PaneGroup, SplitDirection, SplitDown, SplitLeft, - SplitRight, SplitUp, SwapPaneDown, SwapPaneLeft, SwapPaneRight, SwapPaneUp, ToggleZoom, - Workspace, + MoveItemToPaneInDirection, MovePaneDown, MovePaneLeft, MovePaneRight, MovePaneUp, NewTerminal, + Pane, PaneGroup, SplitDirection, SplitDown, SplitLeft, SplitRight, SplitUp, SwapPaneDown, + SwapPaneLeft, SwapPaneRight, SwapPaneUp, ToggleZoom, Workspace, dock::{DockPosition, Panel, PanelEvent, PanelHandle}, item::SerializableItem, move_active_item, move_item, pane, - ui::IconName, }; use anyhow::{Result, anyhow}; @@ -211,11 +210,10 @@ impl TerminalPanel { .on_click(cx.listener(|pane, _, window, cx| { pane.toggle_zoom(&workspace::ToggleZoom, window, cx); })) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { Tooltip::for_action( if zoomed { "Zoom Out" } else { "Zoom In" }, &ToggleZoom, - window, cx, ) }) @@ -526,23 +524,18 @@ impl TerminalPanel { window: &mut Window, cx: &mut Context, ) -> Task>> { - let remote_client = self - .workspace - .update(cx, |workspace, cx| { - let project = workspace.project().read(cx); - if project.is_via_collab() { - Err(anyhow!("cannot spawn tasks as a guest")) - } else { - Ok(project.remote_client()) - } - }) - .flatten(); - - let remote_client = match remote_client { - Ok(remote_client) => remote_client, - Err(e) => return Task::ready(Err(e)), + let Some(workspace) = self.workspace.upgrade() else { + return Task::ready(Err(anyhow!("failed to read workspace"))); }; + let project = workspace.read(cx).project().read(cx); + + if project.is_via_collab() { + return Task::ready(Err(anyhow!("cannot spawn tasks as a guest"))); + } + + let remote_client = project.remote_client(); + let is_windows = project.path_style(cx).is_windows(); let remote_shell = remote_client .as_ref() .and_then(|remote_client| remote_client.read(cx).shell()); @@ -555,7 +548,7 @@ impl TerminalPanel { task.shell.clone() }; - let builder = ShellBuilder::new(&shell); + let builder = ShellBuilder::new(&shell, is_windows); let command_label = builder.command_label(task.command.as_deref().unwrap_or("")); let (command, args) = builder.build(task.command.clone(), &task.args); @@ -818,6 +811,7 @@ impl TerminalPanel { cx: &mut Context, ) -> Task>> { let workspace = self.workspace.clone(); + cx.spawn_in(window, async move |terminal_panel, cx| { if workspace.update(cx, |workspace, cx| !is_enabled_in_workspace(workspace, cx))? { anyhow::bail!("terminal not yet supported for collaborative projects"); @@ -829,43 +823,59 @@ impl TerminalPanel { let project = workspace.read_with(cx, |workspace, _| workspace.project().clone())?; let terminal = project .update(cx, |project, cx| project.create_terminal_shell(cwd, cx))? - .await?; - let result = workspace.update_in(cx, |workspace, window, cx| { - let terminal_view = Box::new(cx.new(|cx| { - TerminalView::new( - terminal.clone(), - workspace.weak_handle(), - workspace.database_id(), - workspace.project().downgrade(), - window, - cx, - ) - })); + .await; - match reveal_strategy { - RevealStrategy::Always => { - workspace.focus_panel::(window, cx); - } - RevealStrategy::NoFocus => { - workspace.open_panel::(window, cx); - } - RevealStrategy::Never => {} - } + match terminal { + Ok(terminal) => { + let result = workspace.update_in(cx, |workspace, window, cx| { + let terminal_view = Box::new(cx.new(|cx| { + TerminalView::new( + terminal.clone(), + workspace.weak_handle(), + workspace.database_id(), + workspace.project().downgrade(), + window, + cx, + ) + })); - pane.update(cx, |pane, cx| { - let focus = pane.has_focus(window, cx) - || matches!(reveal_strategy, RevealStrategy::Always); - pane.add_item(terminal_view, true, focus, None, window, cx); - }); + match reveal_strategy { + RevealStrategy::Always => { + workspace.focus_panel::(window, cx); + } + RevealStrategy::NoFocus => { + workspace.open_panel::(window, cx); + } + RevealStrategy::Never => {} + } - Ok(terminal.downgrade()) - })?; - terminal_panel.update(cx, |terminal_panel, cx| { - terminal_panel.pending_terminals_to_add = - terminal_panel.pending_terminals_to_add.saturating_sub(1); - terminal_panel.serialize(cx) - })?; - result + pane.update(cx, |pane, cx| { + let focus = pane.has_focus(window, cx) + || matches!(reveal_strategy, RevealStrategy::Always); + pane.add_item(terminal_view, true, focus, None, window, cx); + }); + + Ok(terminal.downgrade()) + })?; + terminal_panel.update(cx, |terminal_panel, cx| { + terminal_panel.pending_terminals_to_add = + terminal_panel.pending_terminals_to_add.saturating_sub(1); + terminal_panel.serialize(cx) + })?; + result + } + Err(error) => { + pane.update_in(cx, |pane, window, cx| { + let focus = pane.has_focus(window, cx); + let failed_to_spawn = cx.new(|cx| FailedToSpawnTerminal { + error: error.to_string(), + focus_handle: cx.focus_handle(), + }); + pane.add_item(Box::new(failed_to_spawn), true, focus, None, window, cx); + })?; + Err(error) + } + } }) } @@ -1060,6 +1070,16 @@ impl TerminalPanel { cx.notify(); } } + + fn move_pane_to_border(&mut self, direction: SplitDirection, cx: &mut Context) { + if self + .center + .move_to_border(&self.active_pane, direction) + .unwrap() + { + cx.notify(); + } + } } fn is_enabled_in_workspace(workspace: &Workspace, cx: &App) -> bool { @@ -1082,6 +1102,7 @@ pub fn new_terminal_pane( Default::default(), None, NewTerminal.boxed_clone(), + false, window, cx, ); @@ -1283,6 +1304,82 @@ fn add_paths_to_terminal( } } +struct FailedToSpawnTerminal { + error: String, + focus_handle: FocusHandle, +} + +impl Focusable for FailedToSpawnTerminal { + fn focus_handle(&self, _: &App) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl Render for FailedToSpawnTerminal { + fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { + let popover_menu = PopoverMenu::new("settings-popover") + .trigger( + IconButton::new("icon-button-popover", IconName::ChevronDown) + .icon_size(IconSize::XSmall), + ) + .menu(move |window, cx| { + Some(ContextMenu::build(window, cx, |context_menu, _, _| { + context_menu + .action("Open Settings", zed_actions::OpenSettings.boxed_clone()) + .action( + "Edit settings.json", + zed_actions::OpenSettingsFile.boxed_clone(), + ) + })) + }) + .anchor(Corner::TopRight) + .offset(gpui::Point { + x: px(0.0), + y: px(2.0), + }); + + v_flex() + .track_focus(&self.focus_handle) + .size_full() + .p_4() + .items_center() + .justify_center() + .bg(cx.theme().colors().editor_background) + .child( + v_flex() + .max_w_112() + .items_center() + .justify_center() + .text_center() + .child(Label::new("Failed to spawn terminal")) + .child( + Label::new(self.error.to_string()) + .size(LabelSize::Small) + .color(Color::Muted) + .mb_4(), + ) + .child(SplitButton::new( + ButtonLike::new("open-settings-ui") + .child(Label::new("Edit Settings").size(LabelSize::Small)) + .on_click(|_, window, cx| { + window.dispatch_action(zed_actions::OpenSettings.boxed_clone(), cx); + }), + popover_menu.into_any_element(), + )), + ) + } +} + +impl EventEmitter<()> for FailedToSpawnTerminal {} + +impl workspace::Item for FailedToSpawnTerminal { + type Event = (); + + fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString { + SharedString::new_static("Failed to spawn terminal") + } +} + impl EventEmitter for TerminalPanel {} impl Render for TerminalPanel { @@ -1409,6 +1506,18 @@ impl Render for TerminalPanel { .on_action(cx.listener(|terminal_panel, _: &SwapPaneDown, _, cx| { terminal_panel.swap_pane_in_direction(SplitDirection::Down, cx); })) + .on_action(cx.listener(|terminal_panel, _: &MovePaneLeft, _, cx| { + terminal_panel.move_pane_to_border(SplitDirection::Left, cx); + })) + .on_action(cx.listener(|terminal_panel, _: &MovePaneRight, _, cx| { + terminal_panel.move_pane_to_border(SplitDirection::Right, cx); + })) + .on_action(cx.listener(|terminal_panel, _: &MovePaneUp, _, cx| { + terminal_panel.move_pane_to_border(SplitDirection::Up, cx); + })) + .on_action(cx.listener(|terminal_panel, _: &MovePaneDown, _, cx| { + terminal_panel.move_pane_to_border(SplitDirection::Down, cx); + })) .on_action( cx.listener(|terminal_panel, action: &MoveItemToPane, window, cx| { let Some(&target_pane) = @@ -1555,6 +1664,10 @@ impl Panel for TerminalPanel { "TerminalPanel" } + fn panel_key() -> &'static str { + TERMINAL_PANEL_KEY + } + fn icon(&self, _window: &Window, cx: &App) -> Option { if (self.is_enabled(cx) || !self.has_no_terminals(cx)) && TerminalSettings::get_global(cx).button @@ -1625,22 +1738,18 @@ impl Render for InlineAssistTabBarButton { .on_click(cx.listener(|_, _, window, cx| { window.dispatch_action(InlineAssist::default().boxed_clone(), cx); })) - .tooltip(move |window, cx| { - Tooltip::for_action_in( - "Inline Assist", - &InlineAssist::default(), - &focus_handle, - window, - cx, - ) + .tooltip(move |_window, cx| { + Tooltip::for_action_in("Inline Assist", &InlineAssist::default(), &focus_handle, cx) }) } } #[cfg(test)] mod tests { + use std::num::NonZero; + use super::*; - use gpui::TestAppContext; + use gpui::{TestAppContext, UpdateGlobal as _}; use pretty_assertions::assert_eq; use project::FakeFs; use settings::SettingsStore; @@ -1695,6 +1804,46 @@ mod tests { .unwrap(); } + #[gpui::test] + async fn test_bypass_max_tabs_limit(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + let workspace = cx.add_window(|window, cx| Workspace::test_new(project, window, cx)); + + let (window_handle, terminal_panel) = workspace + .update(cx, |workspace, window, cx| { + let window_handle = window.window_handle(); + let terminal_panel = cx.new(|cx| TerminalPanel::new(workspace, window, cx)); + (window_handle, terminal_panel) + }) + .unwrap(); + + set_max_tabs(cx, Some(3)); + + for _ in 0..5 { + let task = window_handle + .update(cx, |_, window, cx| { + terminal_panel.update(cx, |panel, cx| { + panel.add_terminal_shell(None, RevealStrategy::Always, window, cx) + }) + }) + .unwrap(); + task.await.unwrap(); + } + + cx.run_until_parked(); + + let item_count = + terminal_panel.read_with(cx, |panel, cx| panel.active_pane.read(cx).items_len()); + + assert_eq!( + item_count, 5, + "Terminal panel should bypass max_tabs limit and have all 5 terminals" + ); + } + // A complex Unix command won't be properly parsed by the Windows terminal hence omit the test there. #[cfg(unix)] #[gpui::test] @@ -1758,6 +1907,65 @@ mod tests { .unwrap(); } + #[gpui::test] + async fn renders_error_if_default_shell_fails(cx: &mut TestAppContext) { + init_test(cx); + + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.terminal.get_or_insert_default().project.shell = + Some(settings::Shell::Program("asdf".to_owned())); + }); + }); + }); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + let workspace = cx.add_window(|window, cx| Workspace::test_new(project, window, cx)); + + let (window_handle, terminal_panel) = workspace + .update(cx, |workspace, window, cx| { + let window_handle = window.window_handle(); + let terminal_panel = cx.new(|cx| TerminalPanel::new(workspace, window, cx)); + (window_handle, terminal_panel) + }) + .unwrap(); + + window_handle + .update(cx, |_, window, cx| { + terminal_panel.update(cx, |terminal_panel, cx| { + terminal_panel.add_terminal_shell(None, RevealStrategy::Always, window, cx) + }) + }) + .unwrap() + .await + .unwrap_err(); + + window_handle + .update(cx, |_, _, cx| { + terminal_panel.update(cx, |terminal_panel, cx| { + assert!( + terminal_panel + .active_pane + .read(cx) + .items() + .any(|item| item.downcast::().is_some()), + "should spawn `FailedToSpawnTerminal` pane" + ); + }) + }) + .unwrap(); + } + + fn set_max_tabs(cx: &mut TestAppContext, value: Option) { + cx.update_global(|store: &mut SettingsStore, cx| { + store.update_user_settings(cx, |settings| { + settings.workspace.max_tabs = value.map(|v| NonZero::new(v).unwrap()) + }); + }); + } + pub fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let store = SettingsStore::test(cx); diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index 0f4f745b877bd6871fadd78c2c6136a268e51ded..ddcf1094dc13810cf04c90e421cdde56b4bf7b4b 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -840,9 +840,7 @@ impl TerminalView { .size(ButtonSize::Compact) .icon_color(Color::Default) .shape(ui::IconButtonShape::Square) - .tooltip(move |window, cx| { - Tooltip::for_action("Rerun task", &RerunTask, window, cx) - }) + .tooltip(move |_window, cx| Tooltip::for_action("Rerun task", &RerunTask, cx)) .on_click(move |_, window, cx| { window.dispatch_action(Box::new(terminal_rerun_override(&task_id)), cx); }), @@ -1211,6 +1209,10 @@ impl Item for TerminalView { None } + fn buffer_kind(&self, _: &App) -> workspace::item::ItemBufferKind { + workspace::item::ItemBufferKind::Singleton + } + fn clone_on_split( &self, workspace_id: Option, diff --git a/crates/text/Cargo.toml b/crates/text/Cargo.toml index e6c7d814948ea2657b2d7ef1786bd106fa4ea78a..a58f2e20cc781f5d688b9fb1ceef8a17c48e6cb8 100644 --- a/crates/text/Cargo.toml +++ b/crates/text/Cargo.toml @@ -23,12 +23,10 @@ log.workspace = true parking_lot.workspace = true postage.workspace = true rand = { workspace = true, optional = true } -regex.workspace = true rope.workspace = true smallvec.workspace = true sum_tree.workspace = true util.workspace = true -workspace-hack.workspace = true [dev-dependencies] collections = { workspace = true, features = ["test-support"] } diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index a05da1243faa05f33708fe6858fc9dada3c0a1e0..cf2febdfc505b426fd8d224a2dc29f18d22cd1a8 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -6,7 +6,7 @@ use std::{cmp::Ordering, fmt::Debug, ops::Range}; use sum_tree::{Bias, Dimensions}; /// A timestamped position in a buffer -#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash, Default)] +#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] pub struct Anchor { pub timestamp: clock::Lamport, /// The byte offset in the buffer @@ -45,19 +45,19 @@ impl Anchor { .then_with(|| self.bias.cmp(&other.bias)) } - pub fn min(&self, other: &Self, buffer: &BufferSnapshot) -> Self { + pub fn min<'a>(&'a self, other: &'a Self, buffer: &BufferSnapshot) -> &'a Self { if self.cmp(other, buffer).is_le() { - *self + self } else { - *other + other } } - pub fn max(&self, other: &Self, buffer: &BufferSnapshot) -> Self { + pub fn max<'a>(&'a self, other: &'a Self, buffer: &BufferSnapshot) -> &'a Self { if self.cmp(other, buffer).is_ge() { - *self + self } else { - *other + other } } @@ -99,13 +99,14 @@ impl Anchor { let Some(fragment_id) = buffer.try_fragment_id_for_anchor(self) else { return false; }; - let mut fragment_cursor = buffer + let (.., item) = buffer .fragments - .cursor::, usize>>(&None); - fragment_cursor.seek(&Some(fragment_id), Bias::Left); - fragment_cursor - .item() - .is_some_and(|fragment| fragment.visible) + .find::, usize>, _>( + &None, + &Some(fragment_id), + Bias::Left, + ); + item.is_some_and(|fragment| fragment.visible) } } } diff --git a/crates/text/src/operation_queue.rs b/crates/text/src/operation_queue.rs index 6604817edfe2dcc243ba837a770b361bd505a7ef..f87af381ff314f91469a9b5d438e667fe6ea190f 100644 --- a/crates/text/src/operation_queue.rs +++ b/crates/text/src/operation_queue.rs @@ -1,3 +1,4 @@ +use clock::Lamport; use std::{fmt::Debug, ops::Add}; use sum_tree::{ContextLessSummary, Dimension, Edit, Item, KeyedItem, SumTree}; @@ -11,10 +12,10 @@ struct OperationItem(T); #[derive(Clone, Debug)] pub struct OperationQueue(SumTree>); -#[derive(Clone, Copy, Debug, Default, Eq, Ord, PartialEq, PartialOrd)] +#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)] pub struct OperationKey(clock::Lamport); -#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)] +#[derive(Clone, Copy, Debug, Eq, PartialEq)] pub struct OperationSummary { pub key: OperationKey, pub len: usize, @@ -69,7 +70,10 @@ impl OperationQueue { impl ContextLessSummary for OperationSummary { fn zero() -> Self { - Default::default() + OperationSummary { + key: OperationKey::new(Lamport::MIN), + len: 0, + } } fn add_summary(&mut self, other: &Self) { @@ -93,7 +97,7 @@ impl Add<&Self> for OperationSummary { impl Dimension<'_, OperationSummary> for OperationKey { fn zero(_cx: ()) -> Self { - Default::default() + OperationKey::new(Lamport::MIN) } fn add_summary(&mut self, summary: &OperationSummary, _: ()) { @@ -123,11 +127,13 @@ impl KeyedItem for OperationItem { #[cfg(test)] mod tests { + use clock::ReplicaId; + use super::*; #[test] fn test_len() { - let mut clock = clock::Lamport::new(0); + let mut clock = clock::Lamport::new(ReplicaId::LOCAL); let mut queue = OperationQueue::new(); assert_eq!(queue.len(), 0); diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index 4298e704ab5f8fbe57af363379395ef23624cfcf..c9e04e407ffdb8ffde6b139e01d78822e54e1a4b 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -16,7 +16,7 @@ fn init_logger() { #[test] fn test_edit() { - let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), "abc"); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "abc"); assert_eq!(buffer.text(), "abc"); buffer.edit([(3..3, "def")]); assert_eq!(buffer.text(), "abcdef"); @@ -40,7 +40,11 @@ fn test_random_edits(mut rng: StdRng) { let mut reference_string = RandomCharIter::new(&mut rng) .take(reference_string_len) .collect::(); - let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), reference_string.clone()); + let mut buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + reference_string.clone(), + ); LineEnding::normalize(&mut reference_string); buffer.set_group_interval(Duration::from_millis(rng.random_range(0..=200))); @@ -176,7 +180,11 @@ fn test_line_endings() { LineEnding::Windows ); - let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), "one\r\ntwo\rthree"); + let mut buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + "one\r\ntwo\rthree", + ); assert_eq!(buffer.text(), "one\ntwo\nthree"); assert_eq!(buffer.line_ending(), LineEnding::Windows); buffer.check_invariants(); @@ -190,7 +198,7 @@ fn test_line_endings() { #[test] fn test_line_len() { - let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), ""); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), ""); buffer.edit([(0..0, "abcd\nefg\nhij")]); buffer.edit([(12..12, "kl\nmno")]); buffer.edit([(18..18, "\npqrs\n")]); @@ -207,7 +215,7 @@ fn test_line_len() { #[test] fn test_common_prefix_at_position() { let text = "a = str; b = δα"; - let buffer = Buffer::new(0, BufferId::new(1).unwrap(), text); + let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), text); let offset1 = offset_after(text, "str"); let offset2 = offset_after(text, "δα"); @@ -256,7 +264,7 @@ fn test_common_prefix_at_position() { #[test] fn test_text_summary_for_range() { let buffer = Buffer::new( - 0, + ReplicaId::LOCAL, BufferId::new(1).unwrap(), "ab\nefg\nhklm\nnopqrs\ntuvwxyz", ); @@ -348,7 +356,7 @@ fn test_text_summary_for_range() { #[test] fn test_chars_at() { - let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), ""); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), ""); buffer.edit([(0..0, "abcd\nefgh\nij")]); buffer.edit([(12..12, "kl\nmno")]); buffer.edit([(18..18, "\npqrs")]); @@ -370,7 +378,7 @@ fn test_chars_at() { assert_eq!(chars.collect::(), "PQrs"); // Regression test: - let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), ""); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), ""); buffer.edit([(0..0, "[workspace]\nmembers = [\n \"xray_core\",\n \"xray_server\",\n \"xray_cli\",\n \"xray_wasm\",\n]\n")]); buffer.edit([(60..60, "\n")]); @@ -380,7 +388,7 @@ fn test_chars_at() { #[test] fn test_anchors() { - let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), ""); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), ""); buffer.edit([(0..0, "abc")]); let left_anchor = buffer.anchor_before(2); let right_anchor = buffer.anchor_after(2); @@ -498,7 +506,7 @@ fn test_anchors() { #[test] fn test_anchors_at_start_and_end() { - let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), ""); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), ""); let before_start_anchor = buffer.anchor_before(0); let after_end_anchor = buffer.anchor_after(0); @@ -521,7 +529,7 @@ fn test_anchors_at_start_and_end() { #[test] fn test_undo_redo() { - let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), "1234"); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "1234"); // Set group interval to zero so as to not group edits in the undo stack. buffer.set_group_interval(Duration::from_secs(0)); @@ -558,7 +566,7 @@ fn test_undo_redo() { #[test] fn test_history() { let mut now = Instant::now(); - let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), "123456"); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "123456"); buffer.set_group_interval(Duration::from_millis(300)); let transaction_1 = buffer.start_transaction_at(now).unwrap(); @@ -625,7 +633,7 @@ fn test_history() { #[test] fn test_finalize_last_transaction() { let now = Instant::now(); - let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), "123456"); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "123456"); buffer.history.group_interval = Duration::from_millis(1); buffer.start_transaction_at(now); @@ -661,7 +669,7 @@ fn test_finalize_last_transaction() { #[test] fn test_edited_ranges_for_transaction() { let now = Instant::now(); - let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), "1234567"); + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "1234567"); buffer.start_transaction_at(now); buffer.edit([(2..4, "cd")]); @@ -700,9 +708,9 @@ fn test_edited_ranges_for_transaction() { fn test_concurrent_edits() { let text = "abcdef"; - let mut buffer1 = Buffer::new(1, BufferId::new(1).unwrap(), text); - let mut buffer2 = Buffer::new(2, BufferId::new(1).unwrap(), text); - let mut buffer3 = Buffer::new(3, BufferId::new(1).unwrap(), text); + let mut buffer1 = Buffer::new(ReplicaId::new(1), BufferId::new(1).unwrap(), text); + let mut buffer2 = Buffer::new(ReplicaId::new(2), BufferId::new(1).unwrap(), text); + let mut buffer3 = Buffer::new(ReplicaId::new(3), BufferId::new(1).unwrap(), text); let buf1_op = buffer1.edit([(1..2, "12")]); assert_eq!(buffer1.text(), "a12cdef"); @@ -741,11 +749,15 @@ fn test_random_concurrent_edits(mut rng: StdRng) { let mut network = Network::new(rng.clone()); for i in 0..peers { - let mut buffer = Buffer::new(i as ReplicaId, BufferId::new(1).unwrap(), base_text.clone()); + let mut buffer = Buffer::new( + ReplicaId::new(i as u16), + BufferId::new(1).unwrap(), + base_text.clone(), + ); buffer.history.group_interval = Duration::from_millis(rng.random_range(0..=200)); buffers.push(buffer); - replica_ids.push(i as u16); - network.add_peer(i as u16); + replica_ids.push(ReplicaId::new(i as u16)); + network.add_peer(ReplicaId::new(i as u16)); } log::info!("initial text: {:?}", base_text); @@ -759,7 +771,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) { 0..=50 if mutation_count != 0 => { let op = buffer.randomly_edit(&mut rng, 5).1; network.broadcast(buffer.replica_id, vec![op]); - log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text()); + log::info!("buffer {:?} text: {:?}", buffer.replica_id, buffer.text()); mutation_count -= 1; } 51..=70 if mutation_count != 0 => { @@ -771,7 +783,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) { let ops = network.receive(replica_id); if !ops.is_empty() { log::info!( - "peer {} applying {} ops from the network.", + "peer {:?} applying {} ops from the network.", replica_id, ops.len() ); @@ -792,7 +804,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) { assert_eq!( buffer.text(), first_buffer.text(), - "Replica {} text != Replica 0 text", + "Replica {:?} text != Replica 0 text", buffer.replica_id ); buffer.check_invariants(); diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index d61038d746fa1bebbf0b92a99b0a59f650bc5704..9a81fc8e941ab4d3a0e16f817fc90fbb608ea84a 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -12,7 +12,7 @@ mod undo_map; pub use anchor::*; use anyhow::{Context as _, Result}; -use clock::LOCAL_BRANCH_REPLICA_ID; +use clock::Lamport; pub use clock::ReplicaId; use collections::{HashMap, HashSet}; use locator::Locator; @@ -20,11 +20,9 @@ use operation_queue::OperationQueue; pub use patch::Patch; use postage::{oneshot, prelude::*}; -use regex::Regex; pub use rope::*; pub use selection::*; use std::{ - borrow::Cow, cmp::{self, Ordering, Reverse}, fmt::Display, future::Future, @@ -32,7 +30,7 @@ use std::{ num::NonZeroU64, ops::{self, Deref, Range, Sub}, str, - sync::{Arc, LazyLock}, + sync::Arc, time::{Duration, Instant}, }; pub use subscription::*; @@ -43,9 +41,6 @@ use undo_map::UndoMap; #[cfg(any(test, feature = "test-support"))] use util::RandomCharIter; -static LINE_SEPARATORS_REGEX: LazyLock = - LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX")); - pub type TransactionId = clock::Lamport; pub struct Buffer { @@ -573,7 +568,7 @@ struct InsertionFragment { fragment_id: Locator, } -#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] struct InsertionFragmentKey { timestamp: clock::Lamport, split_offset: usize, @@ -709,7 +704,7 @@ impl FromIterator for LineIndent { } impl Buffer { - pub fn new(replica_id: u16, remote_id: BufferId, base_text: impl Into) -> Buffer { + pub fn new(replica_id: ReplicaId, remote_id: BufferId, base_text: impl Into) -> Buffer { let mut base_text = base_text.into(); let line_ending = LineEnding::detect(&base_text); LineEnding::normalize(&mut base_text); @@ -717,7 +712,7 @@ impl Buffer { } pub fn new_normalized( - replica_id: u16, + replica_id: ReplicaId, remote_id: BufferId, line_ending: LineEnding, normalized: Rope, @@ -731,10 +726,7 @@ impl Buffer { let visible_text = history.base_text.clone(); if !visible_text.is_empty() { - let insertion_timestamp = clock::Lamport { - replica_id: 0, - value: 1, - }; + let insertion_timestamp = clock::Lamport::new(ReplicaId::LOCAL); lamport_clock.observe(insertion_timestamp); version.observe(insertion_timestamp); let fragment_id = Locator::between(&Locator::min(), &Locator::max()); @@ -788,7 +780,7 @@ impl Buffer { history: History::new(self.base_text().clone()), deferred_ops: OperationQueue::new(), deferred_replicas: HashSet::default(), - lamport_clock: clock::Lamport::new(LOCAL_BRANCH_REPLICA_ID), + lamport_clock: clock::Lamport::new(ReplicaId::LOCAL_BRANCH), subscriptions: Default::default(), edit_id_resolvers: Default::default(), wait_for_version_txs: Default::default(), @@ -1254,7 +1246,7 @@ impl Buffer { for edit_id in edit_ids { let insertion_slice = InsertionSlice { edit_id: *edit_id, - insertion_id: clock::Lamport::default(), + insertion_id: clock::Lamport::MIN, range: 0..0, }; let slices = self @@ -1858,7 +1850,7 @@ impl Buffer { T: rand::Rng, { let mut edits = self.get_random_edits(rng, edit_count); - log::info!("mutating buffer {} with {:?}", self.replica_id, edits); + log::info!("mutating buffer {:?} with {:?}", self.replica_id, edits); let op = self.edit(edits.iter().cloned()); if let Operation::Edit(edit) = &op { @@ -1881,7 +1873,7 @@ impl Buffer { if let Some(entry) = self.history.undo_stack.choose(rng) { let transaction = entry.transaction.clone(); log::info!( - "undoing buffer {} transaction {:?}", + "undoing buffer {:?} transaction {:?}", self.replica_id, transaction ); @@ -2022,10 +2014,24 @@ impl BufferSnapshot { start..position } + /// Returns the buffer's text as a String. + /// + /// Note: This always uses `\n` as the line separator, regardless of the buffer's + /// actual line ending setting. For LSP communication or other cases where you need + /// to preserve the original line endings, use [`Self::text_with_original_line_endings`] instead. pub fn text(&self) -> String { self.visible_text.to_string() } + /// Returns the buffer's text with line same endings as in buffer's file. + /// + /// Unlike [`Self::text`] which always uses `\n`, this method formats the text using + /// the buffer's actual line ending setting (Unix `\n` or Windows `\r\n`). + pub fn text_with_original_line_endings(&self) -> String { + self.visible_text + .to_string_with_line_ending(self.line_ending) + } + pub fn line_ending(&self) -> LineEnding { self.line_ending } @@ -2054,6 +2060,14 @@ impl BufferSnapshot { self.visible_text.point_to_offset(point) } + pub fn point_to_offset_utf16(&self, point: Point) -> OffsetUtf16 { + self.visible_text.point_to_offset_utf16(point) + } + + pub fn point_utf16_to_offset_utf16(&self, point: PointUtf16) -> OffsetUtf16 { + self.visible_text.point_utf16_to_offset_utf16(point) + } + pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize { self.visible_text.point_utf16_to_offset(point) } @@ -2086,6 +2100,10 @@ impl BufferSnapshot { self.visible_text.point_to_point_utf16(point) } + pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point { + self.visible_text.point_utf16_to_point(point) + } + pub fn version(&self) -> &clock::Global { &self.version } @@ -2117,6 +2135,10 @@ impl BufferSnapshot { self.visible_text.reversed_bytes_in_range(start..end) } + /// Returns the text in the given range. + /// + /// Note: This always uses `\n` as the line separator, regardless of the buffer's + /// actual line ending setting. pub fn text_for_range(&self, range: Range) -> Chunks<'_> { let start = range.start.to_offset(self); let end = range.end.to_offset(self); @@ -2326,12 +2348,15 @@ impl BufferSnapshot { ); }; - let mut fragment_cursor = self + let (start, _, item) = self .fragments - .cursor::, usize>>(&None); - fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left); - let fragment = fragment_cursor.item().unwrap(); - let mut fragment_offset = fragment_cursor.start().1; + .find::, usize>, _>( + &None, + &Some(&insertion.fragment_id), + Bias::Left, + ); + let fragment = item.unwrap(); + let mut fragment_offset = start.1; if fragment.visible { fragment_offset += anchor.offset - insertion.split_offset; } @@ -2400,21 +2425,13 @@ impl BufferSnapshot { } else if bias == Bias::Right && offset == self.len() { Anchor::MAX } else { - if !self.visible_text.is_char_boundary(offset) { - // find the character - let char_start = self.visible_text.floor_char_boundary(offset); - // `char_start` must be less than len and a char boundary - let ch = self.visible_text.chars_at(char_start).next().unwrap(); - let char_range = char_start..char_start + ch.len_utf8(); - panic!( - "byte index {} is not a char boundary; it is inside {:?} (bytes {:?})", - offset, ch, char_range, - ); + if offset > self.visible_text.len() { + panic!("offset {} is out of bounds", offset) } - let mut fragment_cursor = self.fragments.cursor::(&None); - fragment_cursor.seek(&offset, bias); - let fragment = fragment_cursor.item().unwrap(); - let overshoot = offset - *fragment_cursor.start(); + self.visible_text.assert_char_boundary(offset); + let (start, _, item) = self.fragments.find::(&None, &offset, bias); + let fragment = item.unwrap(); + let overshoot = offset - start; Anchor { timestamp: fragment.timestamp, offset: fragment.insertion_offset + overshoot, @@ -2495,15 +2512,17 @@ impl BufferSnapshot { cursor.next(); Some(cursor) }; - let mut cursor = self - .fragments - .cursor::, FragmentTextSummary>>(&None); - let start_fragment_id = self.fragment_id_for_anchor(&range.start); - cursor.seek(&Some(start_fragment_id), Bias::Left); - let mut visible_start = cursor.start().1.visible; - let mut deleted_start = cursor.start().1.deleted; - if let Some(fragment) = cursor.item() { + let (start, _, item) = self + .fragments + .find::, FragmentTextSummary>, _>( + &None, + &Some(start_fragment_id), + Bias::Left, + ); + let mut visible_start = start.1.visible; + let mut deleted_start = start.1.deleted; + if let Some(fragment) = item { let overshoot = range.start.offset - fragment.insertion_offset; if fragment.visible { visible_start += overshoot; @@ -2916,7 +2935,10 @@ impl InsertionFragment { impl sum_tree::ContextLessSummary for InsertionFragmentKey { fn zero() -> Self { - Default::default() + InsertionFragmentKey { + timestamp: Lamport::MIN, + split_offset: 0, + } } fn add_summary(&mut self, summary: &Self) { @@ -3237,70 +3259,6 @@ impl FromAnchor for usize { } } -#[derive(Clone, Copy, Debug, PartialEq)] -pub enum LineEnding { - Unix, - Windows, -} - -impl Default for LineEnding { - fn default() -> Self { - #[cfg(unix)] - return Self::Unix; - - #[cfg(not(unix))] - return Self::Windows; - } -} - -impl LineEnding { - pub fn as_str(&self) -> &'static str { - match self { - LineEnding::Unix => "\n", - LineEnding::Windows => "\r\n", - } - } - - pub fn detect(text: &str) -> Self { - let mut max_ix = cmp::min(text.len(), 1000); - while !text.is_char_boundary(max_ix) { - max_ix -= 1; - } - - if let Some(ix) = text[..max_ix].find(['\n']) { - if ix > 0 && text.as_bytes()[ix - 1] == b'\r' { - Self::Windows - } else { - Self::Unix - } - } else { - Self::default() - } - } - - pub fn normalize(text: &mut String) { - if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") { - *text = replaced; - } - } - - pub fn normalize_arc(text: Arc) -> Arc { - if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") { - replaced.into() - } else { - text - } - } - - pub fn normalize_cow(text: Cow) -> Cow { - if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") { - replaced.into() - } else { - text - } - } -} - #[cfg(debug_assertions)] pub mod debug { use super::*; diff --git a/crates/text/src/undo_map.rs b/crates/text/src/undo_map.rs index 60b22a9edba70b65d30c60a0b9ca15b8f286cc85..2c2eba8de62ace68b5953b832a2a29be2317175e 100644 --- a/crates/text/src/undo_map.rs +++ b/crates/text/src/undo_map.rs @@ -1,4 +1,5 @@ use crate::UndoOperation; +use clock::Lamport; use std::cmp; use sum_tree::{Bias, SumTree}; @@ -24,7 +25,7 @@ impl sum_tree::KeyedItem for UndoMapEntry { } } -#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] struct UndoMapKey { edit_id: clock::Lamport, undo_id: clock::Lamport, @@ -32,7 +33,10 @@ struct UndoMapKey { impl sum_tree::ContextLessSummary for UndoMapKey { fn zero() -> Self { - Default::default() + UndoMapKey { + edit_id: Lamport::MIN, + undo_id: Lamport::MIN, + } } fn add_summary(&mut self, summary: &Self) { @@ -69,7 +73,7 @@ impl UndoMap { cursor.seek( &UndoMapKey { edit_id, - undo_id: Default::default(), + undo_id: Lamport::MIN, }, Bias::Left, ); @@ -93,7 +97,7 @@ impl UndoMap { cursor.seek( &UndoMapKey { edit_id, - undo_id: Default::default(), + undo_id: Lamport::MIN, }, Bias::Left, ); diff --git a/crates/theme/Cargo.toml b/crates/theme/Cargo.toml index 306733bf3496ae0c122b73fbd109eb46f3662b8a..ef193c500d461201e8746ad3ec0f33b01e423b18 100644 --- a/crates/theme/Cargo.toml +++ b/crates/theme/Cargo.toml @@ -36,7 +36,6 @@ strum.workspace = true thiserror.workspace = true util.workspace = true uuid.workspace = true -workspace-hack.workspace = true [dev-dependencies] fs = { workspace = true, features = ["test-support"] } diff --git a/crates/theme/src/default_colors.rs b/crates/theme/src/default_colors.rs index 051b7acf102597b6f11581afdd45611b9a4b76e3..a9cd163b8c634f6c3fd8061164b72f8b54127c81 100644 --- a/crates/theme/src/default_colors.rs +++ b/crates/theme/src/default_colors.rs @@ -85,7 +85,7 @@ impl ThemeColors { panel_indent_guide_hover: neutral().light_alpha().step_6(), panel_indent_guide_active: neutral().light_alpha().step_6(), panel_overlay_background: neutral().light().step_2(), - panel_overlay_hover: neutral().light_alpha().step_4(), + panel_overlay_hover: neutral().light().step_4(), pane_focused_border: blue().light().step_5(), pane_group_border: neutral().light().step_6(), scrollbar_thumb_background: neutral().light_alpha().step_3(), @@ -154,6 +154,15 @@ impl ThemeColors { version_control_ignored: gray().light().step_12(), version_control_conflict_marker_ours: green().light().step_10().alpha(0.5), version_control_conflict_marker_theirs: blue().light().step_10().alpha(0.5), + vim_normal_background: system.transparent, + vim_insert_background: system.transparent, + vim_replace_background: system.transparent, + vim_visual_background: system.transparent, + vim_visual_line_background: system.transparent, + vim_visual_block_background: system.transparent, + vim_helix_normal_background: system.transparent, + vim_helix_select_background: system.transparent, + vim_mode_text: system.transparent, } } @@ -211,7 +220,7 @@ impl ThemeColors { panel_indent_guide_hover: neutral().dark_alpha().step_6(), panel_indent_guide_active: neutral().dark_alpha().step_6(), panel_overlay_background: neutral().dark().step_2(), - panel_overlay_hover: neutral().dark_alpha().step_4(), + panel_overlay_hover: neutral().dark().step_4(), pane_focused_border: blue().dark().step_5(), pane_group_border: neutral().dark().step_6(), scrollbar_thumb_background: neutral().dark_alpha().step_3(), @@ -280,6 +289,15 @@ impl ThemeColors { version_control_ignored: gray().dark().step_12(), version_control_conflict_marker_ours: green().dark().step_10().alpha(0.5), version_control_conflict_marker_theirs: blue().dark().step_10().alpha(0.5), + vim_normal_background: system.transparent, + vim_insert_background: system.transparent, + vim_replace_background: system.transparent, + vim_visual_background: system.transparent, + vim_visual_line_background: system.transparent, + vim_visual_block_background: system.transparent, + vim_helix_normal_background: system.transparent, + vim_helix_select_background: system.transparent, + vim_mode_text: system.transparent, } } } diff --git a/crates/theme/src/fallback_themes.rs b/crates/theme/src/fallback_themes.rs index 4fb8069bc16d1967dfe10b2e6a577b990d942db7..ae120165f23095266cf92fd33a1cd1ccb88fe309 100644 --- a/crates/theme/src/fallback_themes.rs +++ b/crates/theme/src/fallback_themes.rs @@ -233,6 +233,16 @@ pub(crate) fn zed_default_dark() -> Theme { version_control_ignored: crate::gray().light().step_12(), version_control_conflict_marker_ours: crate::green().light().step_12().alpha(0.5), version_control_conflict_marker_theirs: crate::blue().light().step_12().alpha(0.5), + + vim_normal_background: SystemColors::default().transparent, + vim_insert_background: SystemColors::default().transparent, + vim_replace_background: SystemColors::default().transparent, + vim_visual_background: SystemColors::default().transparent, + vim_visual_line_background: SystemColors::default().transparent, + vim_visual_block_background: SystemColors::default().transparent, + vim_helix_normal_background: SystemColors::default().transparent, + vim_helix_select_background: SystemColors::default().transparent, + vim_mode_text: SystemColors::default().transparent, }, status: StatusColors { conflict: yellow, diff --git a/crates/theme/src/icon_theme.rs b/crates/theme/src/icon_theme.rs index 513dedfe428e68ff708302e5a23ff64ad6d66d0a..c3e7f3cfbc25cc04f05cd939f74154a732f16f58 100644 --- a/crates/theme/src/icon_theme.rs +++ b/crates/theme/src/icon_theme.rs @@ -152,7 +152,7 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[ ), ("java", &["java"]), ("javascript", &["cjs", "js", "mjs"]), - ("json", &["json"]), + ("json", &["json", "jsonc"]), ("julia", &["jl"]), ("kdl", &["kdl"]), ("kotlin", &["kt"]), @@ -199,9 +199,9 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[ ( "storage", &[ - "accdb", "csv", "dat", "db", "dbf", "dll", "fmp", "fp7", "frm", "gdb", "ib", "jsonc", - "ldf", "mdb", "mdf", "myd", "myi", "pdb", "RData", "rdata", "sav", "sdf", "sql", - "sqlite", "tsv", + "accdb", "csv", "dat", "db", "dbf", "dll", "fmp", "fp7", "frm", "gdb", "ib", "ldf", + "mdb", "mdf", "myd", "myi", "pdb", "RData", "rdata", "sav", "sdf", "sql", "sqlite", + "tsv", ], ), ( diff --git a/crates/theme/src/schema.rs b/crates/theme/src/schema.rs index 2d7e1ff9d823eae0d48b375592c6d1f91318f472..c4ed624bf642e0820fd9187224f96e2acfa92018 100644 --- a/crates/theme/src/schema.rs +++ b/crates/theme/src/schema.rs @@ -756,6 +756,42 @@ pub fn theme_colors_refinement( .as_ref() .or(this.version_control_conflict_theirs_background.as_ref()) .and_then(|color| try_parse_color(color).ok()), + vim_normal_background: this + .vim_normal_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_insert_background: this + .vim_insert_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_replace_background: this + .vim_replace_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_visual_background: this + .vim_visual_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_visual_line_background: this + .vim_visual_line_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_visual_block_background: this + .vim_visual_block_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_helix_normal_background: this + .vim_helix_normal_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_helix_select_background: this + .vim_helix_select_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + vim_mode_text: this + .vim_mode_text + .as_ref() + .and_then(|color| try_parse_color(color).ok()), } } diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index 9f753d5a034466631d2324e52fbad7bd858e8c5c..3ac0f410efbdb4418236959e06d1b6772f7e3684 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -727,15 +727,4 @@ impl settings::Settings for ThemeSettings { unnecessary_code_fade: content.unnecessary_code_fade.unwrap().0.clamp(0.0, 0.9), } } - - fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut SettingsContent) { - vscode.from_f32_setting("editor.fontWeight", &mut current.theme.buffer_font_weight); - vscode.from_f32_setting("editor.fontSize", &mut current.theme.buffer_font_size); - vscode.font_family_setting( - "editor.fontFamily", - &mut current.theme.buffer_font_family, - &mut current.theme.buffer_font_fallbacks, - ) - // TODO: possibly map editor.fontLigatures to buffer_font_features? - } } diff --git a/crates/theme/src/styles/colors.rs b/crates/theme/src/styles/colors.rs index 198ad97adb5d964a1d8f62c5bde99d1d5be5adf7..179d02b91684410bb641893e87759bd30cc73b36 100644 --- a/crates/theme/src/styles/colors.rs +++ b/crates/theme/src/styles/colors.rs @@ -162,6 +162,25 @@ pub struct ThemeColors { /// The border color of the minimap thumb. pub minimap_thumb_border: Hsla, + /// Background color for Vim Normal mode indicator. + pub vim_normal_background: Hsla, + /// Background color for Vim Insert mode indicator. + pub vim_insert_background: Hsla, + /// Background color for Vim Replace mode indicator. + pub vim_replace_background: Hsla, + /// Background color for Vim Visual mode indicator. + pub vim_visual_background: Hsla, + /// Background color for Vim Visual Line mode indicator. + pub vim_visual_line_background: Hsla, + /// Background color for Vim Visual Block mode indicator. + pub vim_visual_block_background: Hsla, + /// Background color for Vim Helix Normal mode indicator. + pub vim_helix_normal_background: Hsla, + /// Background color for Vim Helix Select mode indicator. + pub vim_helix_select_background: Hsla, + /// Text color for Vim mode indicator label. + pub vim_mode_text: Hsla, + // === // Editor // === diff --git a/crates/theme/src/theme.rs b/crates/theme/src/theme.rs index c18719efe0d2665928bb0c6003cc69a85da49b83..fab173484d999711590e6600a92e6ac870bee54e 100644 --- a/crates/theme/src/theme.rs +++ b/crates/theme/src/theme.rs @@ -408,8 +408,8 @@ impl Theme { /// Asynchronously reads the user theme from the specified path. pub async fn read_user_theme(theme_path: &Path, fs: Arc) -> Result { - let reader = fs.open_sync(theme_path).await?; - let theme_family: ThemeFamilyContent = serde_json_lenient::from_reader(reader)?; + let bytes = fs.load_bytes(theme_path).await?; + let theme_family: ThemeFamilyContent = serde_json_lenient::from_slice(&bytes)?; for theme in &theme_family.themes { if theme @@ -433,8 +433,8 @@ pub async fn read_icon_theme( icon_theme_path: &Path, fs: Arc, ) -> Result { - let reader = fs.open_sync(icon_theme_path).await?; - let icon_theme_family: IconThemeFamilyContent = serde_json_lenient::from_reader(reader)?; + let bytes = fs.load_bytes(icon_theme_path).await?; + let icon_theme_family: IconThemeFamilyContent = serde_json_lenient::from_slice(&bytes)?; Ok(icon_theme_family) } diff --git a/crates/theme_extension/Cargo.toml b/crates/theme_extension/Cargo.toml index 718c35d4e268d3f23be771396cce791eeb2b3741..d94e15914b2dfbc8250641e8957366c27c2616a4 100644 --- a/crates/theme_extension/Cargo.toml +++ b/crates/theme_extension/Cargo.toml @@ -17,4 +17,3 @@ extension.workspace = true fs.workspace = true gpui.workspace = true theme.workspace = true -workspace-hack.workspace = true diff --git a/crates/theme_importer/Cargo.toml b/crates/theme_importer/Cargo.toml index 2fef5a62498d9ac0abfef3913edbd1dc711e5e64..a91ffc44544f898be35c4514910a6081b10b4a26 100644 --- a/crates/theme_importer/Cargo.toml +++ b/crates/theme_importer/Cargo.toml @@ -23,4 +23,3 @@ simplelog.workspace= true strum = { workspace = true, features = ["derive"] } theme.workspace = true vscode_theme = "0.2.0" -workspace-hack.workspace = true diff --git a/crates/theme_importer/src/main.rs b/crates/theme_importer/src/main.rs index 0ea6bbc4bcfba6196031b705f92771c753b9dc50..24291fc5115f235972e1aade63e81414d3879f2f 100644 --- a/crates/theme_importer/src/main.rs +++ b/crates/theme_importer/src/main.rs @@ -2,7 +2,7 @@ mod color; mod vscode; use std::fs::File; -use std::io::Write; +use std::io::{Read, Write}; use std::path::PathBuf; use anyhow::{Context as _, Result}; @@ -89,15 +89,16 @@ fn main() -> Result<()> { let theme_file_path = args.theme_path; - let theme_file = match File::open(&theme_file_path) { - Ok(file) => file, + let mut buffer = Vec::new(); + match File::open(&theme_file_path).and_then(|mut file| file.read_to_end(&mut buffer)) { + Ok(_) => {} Err(err) => { log::info!("Failed to open file at path: {:?}", theme_file_path); return Err(err)?; } }; - let vscode_theme: VsCodeTheme = serde_json_lenient::from_reader(theme_file) + let vscode_theme: VsCodeTheme = serde_json_lenient::from_slice(&buffer) .context(format!("failed to parse theme {theme_file_path:?}"))?; let theme_metadata = ThemeMetadata { diff --git a/crates/theme_selector/Cargo.toml b/crates/theme_selector/Cargo.toml index 8ec3e5b63f5341dcacfb1d60728844bcd9e89a26..1a563e81f202b484c846ed620aee3edd122fc80b 100644 --- a/crates/theme_selector/Cargo.toml +++ b/crates/theme_selector/Cargo.toml @@ -26,6 +26,5 @@ ui.workspace = true util.workspace = true workspace.workspace = true zed_actions.workspace = true -workspace-hack.workspace = true [dev-dependencies] diff --git a/crates/theme_selector/src/theme_selector.rs b/crates/theme_selector/src/theme_selector.rs index d3e21d5bd51613ef496fa9dbea52502688fc1f16..38e7fc33f7b14f198679d0dd541c39cd444a71a3 100644 --- a/crates/theme_selector/src/theme_selector.rs +++ b/crates/theme_selector/src/theme_selector.rs @@ -7,7 +7,7 @@ use gpui::{ Window, actions, }; use picker::{Picker, PickerDelegate}; -use settings::{SettingsStore, update_settings_file}; +use settings::{Settings, SettingsStore, update_settings_file}; use std::sync::Arc; use theme::{Appearance, Theme, ThemeMeta, ThemeRegistry, ThemeSettings}; use ui::{ListItem, ListItemSpacing, prelude::*, v_flex}; @@ -231,12 +231,11 @@ impl PickerDelegate for ThemeSelectorDelegate { ) { self.selection_completed = true; - let theme_name = cx.theme().name.clone(); + let appearance = Appearance::from(window.appearance()); + let theme_name = ThemeSettings::get_global(cx).theme.name(appearance).0; telemetry::event!("Settings Changed", setting = "theme", value = theme_name); - let appearance = Appearance::from(window.appearance()); - update_settings_file(self.fs.clone(), cx, move |settings, _| { theme::set_theme(settings, theme_name.to_string(), appearance); }); diff --git a/crates/time_format/Cargo.toml b/crates/time_format/Cargo.toml index 5175a26a7803f81b928fba22a820e516515d3f34..b598d19887e128a0c5951c1d1bd5ec42f27f975b 100644 --- a/crates/time_format/Cargo.toml +++ b/crates/time_format/Cargo.toml @@ -15,7 +15,6 @@ doctest = false [dependencies] sys-locale.workspace = true time.workspace = true -workspace-hack.workspace = true [target.'cfg(target_os = "macos")'.dependencies] core-foundation.workspace = true diff --git a/crates/title_bar/Cargo.toml b/crates/title_bar/Cargo.toml index 127fad3d8bdbf0348b946288007c81b952d14b58..829dea3a55ba9fee7f2ede503139e1348dabc57f 100644 --- a/crates/title_bar/Cargo.toml +++ b/crates/title_bar/Cargo.toml @@ -50,7 +50,6 @@ ui.workspace = true util.workspace = true workspace.workspace = true zed_actions.workspace = true -workspace-hack.workspace = true [target.'cfg(windows)'.dependencies] windows.workspace = true diff --git a/crates/title_bar/src/collab.rs b/crates/title_bar/src/collab.rs index b5a51976a01179d3a70bd6d087533866a6c2814b..5dd08ee3f9e132666520433db92279df559abdb0 100644 --- a/crates/title_bar/src/collab.rs +++ b/crates/title_bar/src/collab.rs @@ -403,14 +403,13 @@ impl TitleBar { IconName::Mic }, ) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { if is_muted { if is_deafened { Tooltip::with_meta( "Unmute Microphone", None, "Audio will be unmuted", - window, cx, ) } else { @@ -444,12 +443,12 @@ impl TitleBar { .selected_style(ButtonStyle::Tinted(TintColor::Error)) .icon_size(IconSize::Small) .toggle_state(is_deafened) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { if is_deafened { let label = "Unmute Audio"; if !muted_by_user { - Tooltip::with_meta(label, None, "Microphone will be unmuted", window, cx) + Tooltip::with_meta(label, None, "Microphone will be unmuted", cx) } else { Tooltip::simple(label, cx) } @@ -457,7 +456,7 @@ impl TitleBar { let label = "Mute Audio"; if !muted_by_user { - Tooltip::with_meta(label, None, "Microphone will be muted", window, cx) + Tooltip::with_meta(label, None, "Microphone will be muted", cx) } else { Tooltip::simple(label, cx) } diff --git a/crates/title_bar/src/onboarding_banner.rs b/crates/title_bar/src/onboarding_banner.rs index 6adc5769498ee19a7139c3fd02bd586e32185778..750ef0a6cdc56d1e9ea87ab12807584a4e0e4bd2 100644 --- a/crates/title_bar/src/onboarding_banner.rs +++ b/crates/title_bar/src/onboarding_banner.rs @@ -154,12 +154,11 @@ impl Render for OnboardingBanner { telemetry::event!("Banner Dismissed", source = this.source); this.dismiss(cx) })) - .tooltip(|window, cx| { + .tooltip(|_window, cx| { Tooltip::with_meta( "Close Announcement Banner", None, "It won't show again for this feature", - window, cx, ) }), diff --git a/crates/title_bar/src/platform_title_bar.rs b/crates/title_bar/src/platform_title_bar.rs index c816f0930ca48bdb7f66233a95dfd7b08c1adf09..fd03e764629454411c9726ef7dcf055d54582d7e 100644 --- a/crates/title_bar/src/platform_title_bar.rs +++ b/crates/title_bar/src/platform_title_bar.rs @@ -97,6 +97,7 @@ impl Render for PlatformTitleBar { }) // this border is to avoid a transparent gap in the rounded corners .mt(px(-1.)) + .mb(px(-1.)) .border(px(1.)) .border_color(titlebar_color), }) diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index be3ee9c4415465de74e42aec0c889ea3e6931d85..18a4592edb153dd204bf8df72b1d37fbc81567d5 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -30,10 +30,7 @@ use gpui::{ Subscription, WeakEntity, Window, actions, div, }; use onboarding_banner::OnboardingBanner; -use project::{ - Project, WorktreeSettings, - git_store::{GitStoreEvent, RepositoryEvent}, -}; +use project::{Project, WorktreeSettings, git_store::GitStoreEvent}; use remote::RemoteConnectionOptions; use settings::{Settings, SettingsLocation}; use std::sync::Arc; @@ -189,10 +186,18 @@ impl Render for TitleBar { let status = &*status.borrow(); let user = self.user_store.read(cx).current_user(); + let signed_in = user.is_some(); + children.push( h_flex() + .map(|this| { + if signed_in { + this.pr_1p5() + } else { + this.pr_1() + } + }) .gap_1() - .pr_1() .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation()) .children(self.render_call_controls(window, cx)) .children(self.render_connection_status(status, cx)) @@ -279,9 +284,7 @@ impl TitleBar { subscriptions.push( cx.subscribe(&git_store, move |_, _, event, cx| match event { GitStoreEvent::ActiveRepositoryChanged(_) - | GitStoreEvent::RepositoryUpdated(_, RepositoryEvent::Updated { .. }, _) - | GitStoreEvent::RepositoryAdded(_) - | GitStoreEvent::RepositoryRemoved(_) => { + | GitStoreEvent::RepositoryUpdated(_, _, true) => { cx.notify(); } _ => {} @@ -371,7 +374,7 @@ impl TitleBar { ) .child(Label::new(nickname).size(LabelSize::Small).truncate()), ) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { Tooltip::with_meta( "Remote Project", Some(&OpenRemote { @@ -379,7 +382,6 @@ impl TitleBar { create_new_window: false, }), meta.clone(), - window, cx, ) }) @@ -473,13 +475,12 @@ impl TitleBar { .when(!is_project_selected, |b| b.color(Color::Muted)) .style(ButtonStyle::Subtle) .label_size(LabelSize::Small) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { Tooltip::for_action( "Recent Projects", &zed_actions::OpenRecent { create_new_window: false, }, - window, cx, ) }) @@ -519,12 +520,11 @@ impl TitleBar { .color(Color::Muted) .style(ButtonStyle::Subtle) .label_size(LabelSize::Small) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { Tooltip::with_meta( "Recent Branches", Some(&zed_actions::git::Branch), "Local branches only", - window, cx, ) }) @@ -753,26 +753,10 @@ impl TitleBar { .into() }) .map(|this| { - if is_signed_in { + if is_signed_in && TitleBarSettings::get_global(cx).show_user_picture { this.trigger_with_tooltip( ButtonLike::new("user-menu") - .child( - h_flex() - .gap_0p5() - .children( - TitleBarSettings::get_global(cx) - .show_user_picture - .then(|| user_avatar.clone()) - .flatten() - .map(|avatar| Avatar::new(avatar)), - ) - .child( - Icon::new(IconName::ChevronDown) - .size(IconSize::Small) - .color(Color::Muted), - ), - ) - .style(ButtonStyle::Subtle), + .children(user_avatar.clone().map(|avatar| Avatar::new(avatar))), Tooltip::text("Toggle User Menu"), ) } else { diff --git a/crates/toolchain_selector/Cargo.toml b/crates/toolchain_selector/Cargo.toml index a17f82564093e2ae17f95ec82559f308b910b2dd..94a655b7270c8e084a7b74b7711bb62c0a6a18aa 100644 --- a/crates/toolchain_selector/Cargo.toml +++ b/crates/toolchain_selector/Cargo.toml @@ -20,7 +20,6 @@ project.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true -workspace-hack.workspace = true [lints] workspace = true diff --git a/crates/toolchain_selector/src/active_toolchain.rs b/crates/toolchain_selector/src/active_toolchain.rs index 1c6cbe5235e22a15ffb5a6a7e1f6a3f3e15deb43..122aa9f22b74c33dd8f148f2bf3b65f04da478a9 100644 --- a/crates/toolchain_selector/src/active_toolchain.rs +++ b/crates/toolchain_selector/src/active_toolchain.rs @@ -2,12 +2,12 @@ use std::sync::Arc; use editor::Editor; use gpui::{ - AsyncWindowContext, Context, Entity, IntoElement, ParentElement, Render, Subscription, Task, - WeakEntity, Window, div, + AsyncWindowContext, Context, Entity, IntoElement, ParentElement, Render, Styled, Subscription, + Task, WeakEntity, Window, div, }; use language::{Buffer, BufferEvent, LanguageName, Toolchain, ToolchainScope}; use project::{Project, ProjectPath, Toolchains, WorktreeId, toolchain_store::ToolchainStoreEvent}; -use ui::{Button, ButtonCommon, Clickable, FluentBuilder, LabelSize, SharedString, Tooltip}; +use ui::{Button, ButtonCommon, Clickable, LabelSize, SharedString, Tooltip}; use util::{maybe, rel_path::RelPath}; use workspace::{StatusItemView, Workspace, item::ItemHandle}; @@ -230,21 +230,22 @@ impl ActiveToolchain { impl Render for ActiveToolchain { fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - div().when_some(self.active_toolchain.as_ref(), |el, active_toolchain| { - let term = self.term.clone(); - el.child( - Button::new("change-toolchain", active_toolchain.name.clone()) - .label_size(LabelSize::Small) - .on_click(cx.listener(|this, _, window, cx| { - if let Some(workspace) = this.workspace.upgrade() { - workspace.update(cx, |workspace, cx| { - ToolchainSelector::toggle(workspace, window, cx) - }); - } - })) - .tooltip(Tooltip::text(format!("Select {}", &term))), - ) - }) + let Some(active_toolchain) = self.active_toolchain.as_ref() else { + return div().hidden(); + }; + + div().child( + Button::new("change-toolchain", active_toolchain.name.clone()) + .label_size(LabelSize::Small) + .on_click(cx.listener(|this, _, window, cx| { + if let Some(workspace) = this.workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + ToolchainSelector::toggle(workspace, window, cx) + }); + } + })) + .tooltip(Tooltip::text(format!("Select {}", &self.term))), + ) } } diff --git a/crates/toolchain_selector/src/toolchain_selector.rs b/crates/toolchain_selector/src/toolchain_selector.rs index 3ebf7670d34851949a55a85ba04cabb4f115bfbd..c017483a32325d13e85a5db34566a3b0bf6e15a5 100644 --- a/crates/toolchain_selector/src/toolchain_selector.rs +++ b/crates/toolchain_selector/src/toolchain_selector.rs @@ -128,66 +128,67 @@ impl AddToolchainState { ) -> (OpenPathDelegate, oneshot::Receiver>>) { let (tx, rx) = oneshot::channel(); let weak = cx.weak_entity(); - let lister = OpenPathDelegate::new( - tx, - DirectoryLister::Project(project), - false, - PathStyle::local(), - ) - .show_hidden() - .with_footer(Arc::new(move |_, cx| { - let error = weak - .read_with(cx, |this, _| { - if let AddState::Path { error, .. } = &this.state { - error.clone() - } else { - None - } - }) - .ok() - .flatten(); - let is_loading = weak - .read_with(cx, |this, _| { - matches!( - this.state, - AddState::Path { - input_state: PathInputState::Resolving(_), - .. - } - ) - }) - .unwrap_or_default(); - Some( - v_flex() - .child(Divider::horizontal()) - .child( - h_flex() - .p_1() - .justify_between() - .gap_2() - .child(Label::new("Select Toolchain Path").color(Color::Muted).map( - |this| { - if is_loading { - this.with_animation( - "select-toolchain-label", - Animation::new(Duration::from_secs(2)) - .repeat() - .with_easing(pulsating_between(0.4, 0.8)), - |label, delta| label.alpha(delta), - ) - .into_any() - } else { - this.into_any_element() - } - }, - )) - .when_some(error, |this, error| { - this.child(Label::new(error).color(Color::Error)) - }), + let path_style = project.read(cx).path_style(cx); + let lister = + OpenPathDelegate::new(tx, DirectoryLister::Project(project), false, path_style) + .show_hidden() + .with_footer(Arc::new(move |_, cx| { + let error = weak + .read_with(cx, |this, _| { + if let AddState::Path { error, .. } = &this.state { + error.clone() + } else { + None + } + }) + .ok() + .flatten(); + let is_loading = weak + .read_with(cx, |this, _| { + matches!( + this.state, + AddState::Path { + input_state: PathInputState::Resolving(_), + .. + } + ) + }) + .unwrap_or_default(); + Some( + v_flex() + .child(Divider::horizontal()) + .child( + h_flex() + .p_1() + .justify_between() + .gap_2() + .child( + Label::new("Select Toolchain Path") + .color(Color::Muted) + .map(|this| { + if is_loading { + this.with_animation( + "select-toolchain-label", + Animation::new(Duration::from_secs(2)) + .repeat() + .with_easing(pulsating_between( + 0.4, 0.8, + )), + |label, delta| label.alpha(delta), + ) + .into_any() + } else { + this.into_any_element() + } + }), + ) + .when_some(error, |this, error| { + this.child(Label::new(error).color(Color::Error)) + }), + ) + .into_any(), ) - .into_any(), - ) - })); + })); (lister, rx) } @@ -489,7 +490,6 @@ impl Render for AddToolchainState { .key_binding(KeyBinding::for_action_in( &menu::Confirm, &handle, - window, cx, )) .on_click(cx.listener(|this, _, window, cx| { @@ -867,12 +867,16 @@ impl ToolchainSelectorDelegate { add_toolchain_text: Arc::from("Add Toolchain"), } } - fn relativize_path(path: SharedString, worktree_root: &Path) -> SharedString { + fn relativize_path( + path: SharedString, + worktree_root: &Path, + path_style: PathStyle, + ) -> SharedString { Path::new(&path.as_ref()) .strip_prefix(&worktree_root) .ok() - .map(|suffix| Path::new(".").join(suffix)) - .and_then(|path| path.to_str().map(String::from).map(SharedString::from)) + .and_then(|suffix| suffix.to_str()) + .map(|suffix| format!(".{}{suffix}", path_style.separator()).into()) .unwrap_or(path) } } @@ -954,14 +958,18 @@ impl PickerDelegate for ToolchainSelectorDelegate { let background = cx.background_executor().clone(); let candidates = self.candidates.clone(); let worktree_root_path = self.worktree_abs_path_root.clone(); + let path_style = self.project.read(cx).path_style(cx); cx.spawn_in(window, async move |this, cx| { let matches = if query.is_empty() { candidates .into_iter() .enumerate() .map(|(index, (candidate, _))| { - let path = - Self::relativize_path(candidate.path.clone(), &worktree_root_path); + let path = Self::relativize_path( + candidate.path.clone(), + &worktree_root_path, + path_style, + ); let string = format!("{}{}", candidate.name, path); StringMatch { candidate_id: index, @@ -976,8 +984,11 @@ impl PickerDelegate for ToolchainSelectorDelegate { .into_iter() .enumerate() .map(|(candidate_id, (toolchain, _))| { - let path = - Self::relativize_path(toolchain.path.clone(), &worktree_root_path); + let path = Self::relativize_path( + toolchain.path.clone(), + &worktree_root_path, + path_style, + ); let string = format!("{}{}", toolchain.name, path); StringMatchCandidate::new(candidate_id, &string) }) @@ -1017,7 +1028,12 @@ impl PickerDelegate for ToolchainSelectorDelegate { let (toolchain, scope) = &self.candidates.get(mat.candidate_id)?; let label = toolchain.name.clone(); - let path = Self::relativize_path(toolchain.path.clone(), &self.worktree_abs_path_root); + let path_style = self.project.read(cx).path_style(cx); + let path = Self::relativize_path( + toolchain.path.clone(), + &self.worktree_abs_path_root, + path_style, + ); let (name_highlights, mut path_highlights) = mat .positions .iter() @@ -1100,7 +1116,6 @@ impl PickerDelegate for ToolchainSelectorDelegate { .key_binding(KeyBinding::for_action_in( &AddToolchain, &self.focus_handle, - _window, cx, )) .on_click(|_, window, cx| { @@ -1112,7 +1127,6 @@ impl PickerDelegate for ToolchainSelectorDelegate { .key_binding(KeyBinding::for_action_in( &menu::Confirm, &self.focus_handle, - _window, cx, )) .on_click(|_, window, cx| { diff --git a/crates/ui/Cargo.toml b/crates/ui/Cargo.toml index 985a2bcdc7dadf3b28241b3d59744e48c654e76e..5eb58bf1da1f25cc273a9fc5d7c08b920d3471e9 100644 --- a/crates/ui/Cargo.toml +++ b/crates/ui/Cargo.toml @@ -30,7 +30,6 @@ strum.workspace = true theme.workspace = true ui_macros.workspace = true util.workspace = true -workspace-hack.workspace = true [target.'cfg(windows)'.dependencies] windows.workspace = true diff --git a/crates/ui/src/components/button/button_like.rs b/crates/ui/src/components/button/button_like.rs index 0c8893e2cccb64243f373126429a823b919bca42..4ce7aeed0d80b579207b2546ee5ac35d0ac8865d 100644 --- a/crates/ui/src/components/button/button_like.rs +++ b/crates/ui/src/components/button/button_like.rs @@ -640,6 +640,11 @@ impl RenderOnce for ButtonLike { .filter(|_| self.selected) .unwrap_or(self.style); + let is_outlined = matches!( + self.style, + ButtonStyle::Outlined | ButtonStyle::OutlinedGhost + ); + self.base .h_flex() .id(self.id.clone()) @@ -654,13 +659,7 @@ impl RenderOnce for ButtonLike { .when_some(self.width, |this, width| { this.w(width).justify_center().text_center() }) - .when( - matches!( - self.style, - ButtonStyle::Outlined | ButtonStyle::OutlinedGhost - ), - |this| this.border_1(), - ) + .when(is_outlined, |this| this.border_1()) .when_some(self.rounding, |this, rounding| { this.when(rounding.top_left, |this| this.rounded_tl_sm()) .when(rounding.top_right, |this| this.rounded_tr_sm()) @@ -688,13 +687,16 @@ impl RenderOnce for ButtonLike { let hovered_style = style.hovered(self.layer, cx); let focus_color = |refinement: StyleRefinement| refinement.bg(hovered_style.background); + this.cursor(self.cursor_style) .hover(focus_color) .map(|this| { - if matches!(self.style, ButtonStyle::Outlined) { - this.focus(|s| s.border_color(cx.theme().colors().border_focused)) + if is_outlined { + this.focus_visible(|s| { + s.border_color(cx.theme().colors().border_focused) + }) } else { - this.focus(focus_color) + this.focus_visible(focus_color) } }) .active(|active| active.bg(style.active(cx).background)) diff --git a/crates/ui/src/components/context_menu.rs b/crates/ui/src/components/context_menu.rs index 7b61789b3c87d54ff231e1d635266d6502fb944f..8db7a9da07992ae6ba6a3a9f4fcec5ff4f9d5344 100644 --- a/crates/ui/src/components/context_menu.rs +++ b/crates/ui/src/components/context_menu.rs @@ -47,6 +47,7 @@ pub struct ContextMenuEntry { toggle: Option<(IconPosition, bool)>, label: SharedString, icon: Option, + custom_icon_path: Option, icon_position: IconPosition, icon_size: IconSize, icon_color: Option, @@ -66,6 +67,7 @@ impl ContextMenuEntry { toggle: None, label: label.into(), icon: None, + custom_icon_path: None, icon_position: IconPosition::Start, icon_size: IconSize::Small, icon_color: None, @@ -90,6 +92,12 @@ impl ContextMenuEntry { self } + pub fn custom_icon_path(mut self, path: impl Into) -> Self { + self.custom_icon_path = Some(path.into()); + self.icon = None; // Clear IconName if custom path is set + self + } + pub fn icon_position(mut self, position: IconPosition) -> Self { self.icon_position = position; self @@ -206,39 +214,46 @@ impl EventEmitter for ContextMenu {} impl FluentBuilder for ContextMenu {} impl ContextMenu { + pub fn new( + window: &mut Window, + cx: &mut Context, + f: impl FnOnce(Self, &mut Window, &mut Context) -> Self, + ) -> Self { + let focus_handle = cx.focus_handle(); + let _on_blur_subscription = cx.on_blur( + &focus_handle, + window, + |this: &mut ContextMenu, window, cx| this.cancel(&menu::Cancel, window, cx), + ); + window.refresh(); + + f( + Self { + builder: None, + items: Default::default(), + focus_handle, + action_context: None, + selected_index: None, + delayed: false, + clicked: false, + key_context: "menu".into(), + _on_blur_subscription, + keep_open_on_confirm: false, + documentation_aside: None, + fixed_width: None, + end_slot_action: None, + }, + window, + cx, + ) + } + pub fn build( window: &mut Window, cx: &mut App, f: impl FnOnce(Self, &mut Window, &mut Context) -> Self, ) -> Entity { - cx.new(|cx| { - let focus_handle = cx.focus_handle(); - let _on_blur_subscription = cx.on_blur( - &focus_handle, - window, - |this: &mut ContextMenu, window, cx| this.cancel(&menu::Cancel, window, cx), - ); - window.refresh(); - f( - Self { - builder: None, - items: Default::default(), - focus_handle, - action_context: None, - selected_index: None, - delayed: false, - clicked: false, - key_context: "menu".into(), - _on_blur_subscription, - keep_open_on_confirm: false, - documentation_aside: None, - fixed_width: None, - end_slot_action: None, - }, - window, - cx, - ) - }) + cx.new(|cx| Self::new(window, cx, f)) } /// Builds a [`ContextMenu`] that will stay open when making changes instead of closing after each confirmation. @@ -380,6 +395,7 @@ impl ContextMenu { label: label.into(), handler: Rc::new(move |_, window, cx| handler(window, cx)), icon: None, + custom_icon_path: None, icon_position: IconPosition::End, icon_size: IconSize::Small, icon_color: None, @@ -408,6 +424,7 @@ impl ContextMenu { label: label.into(), handler: Rc::new(move |_, window, cx| handler(window, cx)), icon: None, + custom_icon_path: None, icon_position: IconPosition::End, icon_size: IconSize::Small, icon_color: None, @@ -436,6 +453,7 @@ impl ContextMenu { label: label.into(), handler: Rc::new(move |_, window, cx| handler(window, cx)), icon: None, + custom_icon_path: None, icon_position: IconPosition::End, icon_size: IconSize::Small, icon_color: None, @@ -463,6 +481,7 @@ impl ContextMenu { label: label.into(), handler: Rc::new(move |_, window, cx| handler(window, cx)), icon: None, + custom_icon_path: None, icon_position: position, icon_size: IconSize::Small, icon_color: None, @@ -521,6 +540,7 @@ impl ContextMenu { window.dispatch_action(action.boxed_clone(), cx); }), icon: None, + custom_icon_path: None, icon_position: IconPosition::End, icon_size: IconSize::Small, icon_color: None, @@ -551,6 +571,7 @@ impl ContextMenu { window.dispatch_action(action.boxed_clone(), cx); }), icon: None, + custom_icon_path: None, icon_size: IconSize::Small, icon_position: IconPosition::End, icon_color: None, @@ -571,6 +592,7 @@ impl ContextMenu { action: Some(action.boxed_clone()), handler: Rc::new(move |_, window, cx| window.dispatch_action(action.boxed_clone(), cx)), icon: Some(IconName::ArrowUpRight), + custom_icon_path: None, icon_size: IconSize::XSmall, icon_position: IconPosition::End, icon_color: None, @@ -834,9 +856,9 @@ impl ContextMenu { .disabled(true) .child(Label::new(label.clone())) .into_any_element(), - ContextMenuItem::Entry(entry) => self - .render_menu_entry(ix, entry, window, cx) - .into_any_element(), + ContextMenuItem::Entry(entry) => { + self.render_menu_entry(ix, entry, cx).into_any_element() + } ContextMenuItem::CustomEntry { entry_render, handler, @@ -883,7 +905,6 @@ impl ContextMenu { &self, ix: usize, entry: &ContextMenuEntry, - window: &mut Window, cx: &mut Context, ) -> impl IntoElement { let ContextMenuEntry { @@ -891,6 +912,7 @@ impl ContextMenu { label, handler, icon, + custom_icon_path, icon_position, icon_size, icon_color, @@ -921,7 +943,29 @@ impl ContextMenu { Color::Default }; - let label_element = if let Some(icon_name) = icon { + let label_element = if let Some(custom_path) = custom_icon_path { + h_flex() + .gap_1p5() + .when( + *icon_position == IconPosition::Start && toggle.is_none(), + |flex| { + flex.child( + Icon::from_path(custom_path.clone()) + .size(*icon_size) + .color(icon_color), + ) + }, + ) + .child(Label::new(label.clone()).color(label_color).truncate()) + .when(*icon_position == IconPosition::End, |flex| { + flex.child( + Icon::from_path(custom_path.clone()) + .size(*icon_size) + .color(icon_color), + ) + }) + .into_any_element() + } else if let Some(icon_name) = icon { h_flex() .gap_1p5() .when( @@ -980,18 +1024,18 @@ impl ContextMenu { .justify_between() .child(label_element) .debug_selector(|| format!("MENU_ITEM-{}", label)) - .children(action.as_ref().and_then(|action| { - self.action_context + .children(action.as_ref().map(|action| { + let binding = self + .action_context .as_ref() - .and_then(|focus| { - KeyBinding::for_action_in(&**action, focus, window, cx) - }) - .or_else(|| KeyBinding::for_action(&**action, window, cx)) - .map(|binding| { - div().ml_4().child(binding.disabled(*disabled)).when( - *disabled && documentation_aside.is_some(), - |parent| parent.invisible(), - ) + .map(|focus| KeyBinding::for_action_in(&**action, focus, cx)) + .unwrap_or_else(|| KeyBinding::for_action(&**action, cx)); + + div() + .ml_4() + .child(binding.disabled(*disabled)) + .when(*disabled && documentation_aside.is_some(), |parent| { + parent.invisible() }) })) .when(*disabled && documentation_aside.is_some(), |parent| { @@ -1016,7 +1060,7 @@ impl ContextMenu { let action_context = self.action_context.clone(); let title = title.clone(); let action = action.boxed_clone(); - move |window, cx| { + move |_window, cx| { action_context .as_ref() .map(|focus| { @@ -1024,17 +1068,11 @@ impl ContextMenu { title.clone(), &*action, focus, - window, cx, ) }) .unwrap_or_else(|| { - Tooltip::for_action( - title.clone(), - &*action, - window, - cx, - ) + Tooltip::for_action(title.clone(), &*action, cx) }) } }) diff --git a/crates/ui/src/components/dropdown_menu.rs b/crates/ui/src/components/dropdown_menu.rs index 8a1abc312748bdd1fdb087973708d58579ffbc1d..5b5de7a257ad22f099b8c2c4cc81aa83d2841976 100644 --- a/crates/ui/src/components/dropdown_menu.rs +++ b/crates/ui/src/components/dropdown_menu.rs @@ -1,6 +1,6 @@ -use gpui::{Corner, Entity, Pixels, Point}; +use gpui::{AnyView, Corner, Entity, Pixels, Point}; -use crate::{ContextMenu, PopoverMenu, prelude::*}; +use crate::{ButtonLike, ContextMenu, PopoverMenu, prelude::*}; use super::PopoverMenuHandle; @@ -9,6 +9,7 @@ pub enum DropdownStyle { #[default] Solid, Outlined, + Subtle, Ghost, } @@ -22,6 +23,8 @@ pub struct DropdownMenu { id: ElementId, label: LabelKind, trigger_size: ButtonSize, + trigger_tooltip: Option AnyView + 'static>>, + trigger_icon: Option, style: DropdownStyle, menu: Entity, full_width: bool, @@ -30,6 +33,7 @@ pub struct DropdownMenu { attach: Option, offset: Option>, tab_index: Option, + chevron: bool, } impl DropdownMenu { @@ -42,6 +46,8 @@ impl DropdownMenu { id: id.into(), label: LabelKind::Text(label.into()), trigger_size: ButtonSize::Default, + trigger_tooltip: None, + trigger_icon: Some(IconName::ChevronUpDown), style: DropdownStyle::default(), menu, full_width: false, @@ -50,6 +56,7 @@ impl DropdownMenu { attach: None, offset: None, tab_index: None, + chevron: true, } } @@ -62,6 +69,8 @@ impl DropdownMenu { id: id.into(), label: LabelKind::Element(label), trigger_size: ButtonSize::Default, + trigger_tooltip: None, + trigger_icon: Some(IconName::ChevronUpDown), style: DropdownStyle::default(), menu, full_width: false, @@ -70,16 +79,30 @@ impl DropdownMenu { attach: None, offset: None, tab_index: None, + chevron: true, } } + pub fn style(mut self, style: DropdownStyle) -> Self { + self.style = style; + self + } + pub fn trigger_size(mut self, size: ButtonSize) -> Self { self.trigger_size = size; self } - pub fn style(mut self, style: DropdownStyle) -> Self { - self.style = style; + pub fn trigger_tooltip( + mut self, + tooltip: impl Fn(&mut Window, &mut App) -> AnyView + 'static, + ) -> Self { + self.trigger_tooltip = Some(Box::new(tooltip)); + self + } + + pub fn trigger_icon(mut self, icon: IconName) -> Self { + self.trigger_icon = Some(icon); self } @@ -109,6 +132,11 @@ impl DropdownMenu { self.tab_index = Some(arg); self } + + pub fn no_chevron(mut self) -> Self { + self.chevron = false; + self + } } impl Disableable for DropdownMenu { @@ -122,6 +150,7 @@ impl RenderOnce for DropdownMenu { fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement { let button_style = match self.style { DropdownStyle::Solid => ButtonStyle::Filled, + DropdownStyle::Subtle => ButtonStyle::Subtle, DropdownStyle::Outlined => ButtonStyle::Outlined, DropdownStyle::Ghost => ButtonStyle::Transparent, }; @@ -129,32 +158,62 @@ impl RenderOnce for DropdownMenu { let full_width = self.full_width; let trigger_size = self.trigger_size; - let button = match self.label { - LabelKind::Text(text) => Button::new(self.id.clone(), text) - .style(button_style) - .icon(IconName::ChevronUpDown) - .icon_position(IconPosition::End) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) - .when(full_width, |this| this.full_width()) - .size(trigger_size) - .disabled(self.disabled), - LabelKind::Element(_element) => Button::new(self.id.clone(), "") - .style(button_style) - .icon(IconName::ChevronUpDown) - .icon_position(IconPosition::End) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) - .when(full_width, |this| this.full_width()) - .size(trigger_size) - .disabled(self.disabled), - } - .when_some(self.tab_index, |this, tab_index| this.tab_index(tab_index)); + let (text_button, element_button) = match self.label { + LabelKind::Text(text) => ( + Some( + Button::new(self.id.clone(), text) + .style(button_style) + .when(self.chevron, |this| { + this.icon(self.trigger_icon) + .icon_position(IconPosition::End) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + }) + .when(full_width, |this| this.full_width()) + .size(trigger_size) + .disabled(self.disabled) + .when_some(self.tab_index, |this, tab_index| this.tab_index(tab_index)), + ), + None, + ), + LabelKind::Element(element) => ( + None, + Some( + ButtonLike::new(self.id.clone()) + .child(element) + .style(button_style) + .when(self.chevron, |this| { + this.child( + Icon::new(IconName::ChevronUpDown) + .size(IconSize::XSmall) + .color(Color::Muted), + ) + }) + .when(full_width, |this| this.full_width()) + .size(trigger_size) + .disabled(self.disabled) + .when_some(self.tab_index, |this, tab_index| this.tab_index(tab_index)), + ), + ), + }; - PopoverMenu::new((self.id.clone(), "popover")) + let mut popover = PopoverMenu::new((self.id.clone(), "popover")) .full_width(self.full_width) - .menu(move |_window, _cx| Some(self.menu.clone())) - .trigger(button) + .menu(move |_window, _cx| Some(self.menu.clone())); + + popover = match (text_button, element_button, self.trigger_tooltip) { + (Some(text_button), None, Some(tooltip)) => { + popover.trigger_with_tooltip(text_button, tooltip) + } + (Some(text_button), None, None) => popover.trigger(text_button), + (None, Some(element_button), Some(tooltip)) => { + popover.trigger_with_tooltip(element_button, tooltip) + } + (None, Some(element_button), None) => popover.trigger(element_button), + _ => popover, + }; + + popover .attach(match self.attach { Some(attach) => attach, None => Corner::BottomRight, diff --git a/crates/ui/src/components/icon.rs b/crates/ui/src/components/icon.rs index 8f7ef41108afd22a7f932e8ab6ed1b74078244ec..d7fadbd962a97c83d31438f43e800f9a4ff8c777 100644 --- a/crates/ui/src/components/icon.rs +++ b/crates/ui/src/components/icon.rs @@ -279,7 +279,7 @@ impl Component for Icon { ) } - fn preview(_window: &mut Window, _cx: &mut App) -> Option { + fn preview(_window: &mut Window, cx: &mut App) -> Option { Some( v_flex() .gap_6() @@ -314,6 +314,30 @@ impl Component for Icon { ), ], ), + example_group_with_title( + "All Icons", + vec![single_example( + "All Icons", + h_flex() + .image_cache(gpui::retain_all("all icons")) + .flex_wrap() + .gap_2() + .children(::iter().map( + |icon_name| { + h_flex() + .gap_1() + .border_1() + .rounded_md() + .px_2() + .py_1() + .border_color(Color::Muted.color(cx)) + .child(SharedString::new_static(icon_name.into())) + .child(Icon::new(icon_name).into_any_element()) + }, + )) + .into_any_element(), + )], + ), ]) .into_any_element(), ) diff --git a/crates/ui/src/components/keybinding.rs b/crates/ui/src/components/keybinding.rs index f8ac85528ec3317bb003d3f8763f8c57a7d4bba2..bf52d7be8c7e91b230eac295dff03f2679a004af 100644 --- a/crates/ui/src/components/keybinding.rs +++ b/crates/ui/src/components/keybinding.rs @@ -1,3 +1,5 @@ +use std::rc::Rc; + use crate::PlatformStyle; use crate::{Icon, IconName, IconSize, h_flex, prelude::*}; use gpui::{ @@ -5,23 +7,49 @@ use gpui::{ Modifiers, Window, relative, }; use itertools::Itertools; +use settings::KeybindSource; + +#[derive(Debug)] +enum Source { + Action { + action: Box, + focus_handle: Option, + }, + Keystrokes { + /// A keybinding consists of a set of keystrokes, + /// where each keystroke is a key and a set of modifier keys. + /// More than one keystroke produces a chord. + /// + /// This should always contain at least one keystroke. + keystrokes: Rc<[KeybindingKeystroke]>, + }, +} -#[derive(Debug, IntoElement, Clone, RegisterComponent)] -pub struct KeyBinding { - /// A keybinding consists of a set of keystrokes, - /// where each keystroke is a key and a set of modifier keys. - /// More than one keystroke produces a chord. - /// - /// This should always contain at least one keystroke. - pub keystrokes: Vec, +impl Clone for Source { + fn clone(&self) -> Self { + match self { + Source::Action { + action, + focus_handle, + } => Source::Action { + action: action.boxed_clone(), + focus_handle: focus_handle.clone(), + }, + Source::Keystrokes { keystrokes } => Source::Keystrokes { + keystrokes: keystrokes.clone(), + }, + } + } +} +#[derive(Clone, Debug, IntoElement, RegisterComponent)] +pub struct KeyBinding { + source: Source, + size: Option, /// The [`PlatformStyle`] to use when displaying this keybinding. platform_style: PlatformStyle, - size: Option, - /// Determines whether the keybinding is meant for vim mode. vim_mode: bool, - /// Indicates whether the keybinding is currently disabled. disabled: bool, } @@ -32,23 +60,13 @@ impl Global for VimStyle {} impl KeyBinding { /// Returns the highest precedence keybinding for an action. This is the last binding added to /// the keymap. User bindings are added after built-in bindings so that they take precedence. - pub fn for_action(action: &dyn Action, window: &mut Window, cx: &App) -> Option { - if let Some(focused) = window.focused(cx) { - return Self::for_action_in(action, &focused, window, cx); - } - let key_binding = window.highest_precedence_binding_for_action(action)?; - Some(Self::new_from_gpui(key_binding, cx)) + pub fn for_action(action: &dyn Action, cx: &App) -> Self { + Self::new(action, None, cx) } /// Like `for_action`, but lets you specify the context from which keybindings are matched. - pub fn for_action_in( - action: &dyn Action, - focus: &FocusHandle, - window: &Window, - cx: &App, - ) -> Option { - let key_binding = window.highest_precedence_binding_for_action_in(action, focus)?; - Some(Self::new_from_gpui(key_binding, cx)) + pub fn for_action_in(action: &dyn Action, focus: &FocusHandle, cx: &App) -> Self { + Self::new(action, Some(focus.clone()), cx) } pub fn set_vim_mode(cx: &mut App, enabled: bool) { @@ -59,18 +77,27 @@ impl KeyBinding { cx.try_global::().is_some_and(|g| g.0) } - pub fn new(keystrokes: Vec, cx: &App) -> Self { + pub fn new(action: &dyn Action, focus_handle: Option, cx: &App) -> Self { Self { - keystrokes, - platform_style: PlatformStyle::platform(), + source: Source::Action { + action: action.boxed_clone(), + focus_handle, + }, size: None, vim_mode: KeyBinding::is_vim_mode(cx), + platform_style: PlatformStyle::platform(), disabled: false, } } - pub fn new_from_gpui(key_binding: gpui::KeyBinding, cx: &App) -> Self { - Self::new(key_binding.keystrokes().to_vec(), cx) + pub fn from_keystrokes(keystrokes: Rc<[KeybindingKeystroke]>, source: KeybindSource) -> Self { + Self { + source: Source::Keystrokes { keystrokes }, + size: None, + vim_mode: source == KeybindSource::Vim, + platform_style: PlatformStyle::platform(), + disabled: false, + } } /// Sets the [`PlatformStyle`] for this [`KeyBinding`]. @@ -91,11 +118,6 @@ impl KeyBinding { self.disabled = disabled; self } - - pub fn vim_mode(mut self, enabled: bool) -> Self { - self.vim_mode = enabled; - self - } } fn render_key( @@ -115,36 +137,54 @@ fn render_key( } impl RenderOnce for KeyBinding { - fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { - let color = self.disabled.then_some(Color::Disabled); - - h_flex() - .debug_selector(|| { - format!( - "KEY_BINDING-{}", - self.keystrokes - .iter() - .map(|k| k.key().to_string()) - .collect::>() - .join(" ") - ) - }) - .gap(DynamicSpacing::Base04.rems(cx)) - .flex_none() - .children(self.keystrokes.iter().map(|keystroke| { - h_flex() - .flex_none() - .py_0p5() - .rounded_xs() - .text_color(cx.theme().colors().text_muted) - .children(render_keybinding_keystroke( - keystroke, - color, - self.size, - self.platform_style, - self.vim_mode, - )) - })) + fn render(self, window: &mut Window, cx: &mut App) -> impl IntoElement { + let render_keybinding = |keystrokes: &[KeybindingKeystroke]| { + let color = self.disabled.then_some(Color::Disabled); + + h_flex() + .debug_selector(|| { + format!( + "KEY_BINDING-{}", + keystrokes + .iter() + .map(|k| k.key().to_string()) + .collect::>() + .join(" ") + ) + }) + .gap(DynamicSpacing::Base04.rems(cx)) + .flex_none() + .children(keystrokes.iter().map(|keystroke| { + h_flex() + .flex_none() + .py_0p5() + .rounded_xs() + .text_color(cx.theme().colors().text_muted) + .children(render_keybinding_keystroke( + keystroke, + color, + self.size, + PlatformStyle::platform(), + self.vim_mode, + )) + })) + .into_any_element() + }; + + match self.source { + Source::Action { + action, + focus_handle, + } => focus_handle + .or_else(|| window.focused(cx)) + .and_then(|focus| { + window.highest_precedence_binding_for_action_in(action.as_ref(), &focus) + }) + .or_else(|| window.highest_precedence_binding_for_action(action.as_ref())) + .map(|binding| render_keybinding(binding.keystrokes())), + Source::Keystrokes { keystrokes } => Some(render_keybinding(keystrokes.as_ref())), + } + .unwrap_or_else(|| gpui::Empty.into_any_element()) } } @@ -517,79 +557,79 @@ impl Component for KeyBinding { ) } - fn preview(_window: &mut Window, cx: &mut App) -> Option { - Some( - v_flex() - .gap_6() - .children(vec![ - example_group_with_title( - "Basic Usage", - vec![ - single_example( - "Default", - KeyBinding::new_from_gpui( - gpui::KeyBinding::new("ctrl-s", gpui::NoAction, None), - cx, - ) - .into_any_element(), - ), - single_example( - "Mac Style", - KeyBinding::new_from_gpui( - gpui::KeyBinding::new("cmd-s", gpui::NoAction, None), - cx, - ) - .platform_style(PlatformStyle::Mac) - .into_any_element(), - ), - single_example( - "Windows Style", - KeyBinding::new_from_gpui( - gpui::KeyBinding::new("ctrl-s", gpui::NoAction, None), - cx, - ) - .platform_style(PlatformStyle::Windows) - .into_any_element(), - ), - ], - ), - example_group_with_title( - "Vim Mode", - vec![single_example( - "Vim Mode Enabled", - KeyBinding::new_from_gpui( - gpui::KeyBinding::new("dd", gpui::NoAction, None), - cx, - ) - .vim_mode(true) - .into_any_element(), - )], - ), - example_group_with_title( - "Complex Bindings", - vec![ - single_example( - "Multiple Keys", - KeyBinding::new_from_gpui( - gpui::KeyBinding::new("ctrl-k ctrl-b", gpui::NoAction, None), - cx, - ) - .into_any_element(), - ), - single_example( - "With Shift", - KeyBinding::new_from_gpui( - gpui::KeyBinding::new("shift-cmd-p", gpui::NoAction, None), - cx, - ) - .into_any_element(), - ), - ], - ), - ]) - .into_any_element(), - ) - } + // fn preview(_window: &mut Window, cx: &mut App) -> Option { + // Some( + // v_flex() + // .gap_6() + // .children(vec![ + // example_group_with_title( + // "Basic Usage", + // vec![ + // single_example( + // "Default", + // KeyBinding::new_from_gpui( + // gpui::KeyBinding::new("ctrl-s", gpui::NoAction, None), + // cx, + // ) + // .into_any_element(), + // ), + // single_example( + // "Mac Style", + // KeyBinding::new_from_gpui( + // gpui::KeyBinding::new("cmd-s", gpui::NoAction, None), + // cx, + // ) + // .platform_style(PlatformStyle::Mac) + // .into_any_element(), + // ), + // single_example( + // "Windows Style", + // KeyBinding::new_from_gpui( + // gpui::KeyBinding::new("ctrl-s", gpui::NoAction, None), + // cx, + // ) + // .platform_style(PlatformStyle::Windows) + // .into_any_element(), + // ), + // ], + // ), + // example_group_with_title( + // "Vim Mode", + // vec![single_example( + // "Vim Mode Enabled", + // KeyBinding::new_from_gpui( + // gpui::KeyBinding::new("dd", gpui::NoAction, None), + // cx, + // ) + // .vim_mode(true) + // .into_any_element(), + // )], + // ), + // example_group_with_title( + // "Complex Bindings", + // vec![ + // single_example( + // "Multiple Keys", + // KeyBinding::new_from_gpui( + // gpui::KeyBinding::new("ctrl-k ctrl-b", gpui::NoAction, None), + // cx, + // ) + // .into_any_element(), + // ), + // single_example( + // "With Shift", + // KeyBinding::new_from_gpui( + // gpui::KeyBinding::new("shift-cmd-p", gpui::NoAction, None), + // cx, + // ) + // .into_any_element(), + // ), + // ], + // ), + // ]) + // .into_any_element(), + // ) + // } } #[cfg(test)] diff --git a/crates/ui/src/components/keybinding_hint.rs b/crates/ui/src/components/keybinding_hint.rs index 58f2793ea0ee29b55eace9e7fe9e53c606ca0a43..c998e29f0ed6f5bccab976b11080320d4d65a7dd 100644 --- a/crates/ui/src/components/keybinding_hint.rs +++ b/crates/ui/src/components/keybinding_hint.rs @@ -14,10 +14,11 @@ use theme::Appearance; /// use gpui::{App, Hsla, KeybindingKeystroke, Keystroke}; /// use ui::prelude::*; /// use ui::{KeyBinding, KeybindingHint}; +/// use settings::KeybindSource; /// /// # fn example(cx: &App) { /// let hint = KeybindingHint::new( -/// KeyBinding::new(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-s").unwrap())], cx), +/// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-s").unwrap())].into(), KeybindSource::Base), /// Hsla::black() /// ) /// .prefix("Save:") @@ -45,10 +46,11 @@ impl KeybindingHint { /// use gpui::{App, Hsla, KeybindingKeystroke, Keystroke}; /// use ui::prelude::*; /// use ui::{KeyBinding, KeybindingHint}; + /// use settings::KeybindSource; /// /// # fn example(cx: &App) { /// let hint = KeybindingHint::new( - /// KeyBinding::new(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-c").unwrap())], cx), + /// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-c").unwrap())].into(), KeybindSource::Base), /// Hsla::black() /// ); /// # } @@ -74,11 +76,12 @@ impl KeybindingHint { /// use gpui::{App, Hsla, KeybindingKeystroke, Keystroke}; /// use ui::prelude::*; /// use ui::{KeyBinding, KeybindingHint}; + /// use settings::KeybindSource; /// /// # fn example(cx: &App) { /// let hint = KeybindingHint::with_prefix( /// "Copy:", - /// KeyBinding::new(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-c").unwrap())], cx), + /// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-c").unwrap())].into(), KeybindSource::Base), /// Hsla::black() /// ); /// # } @@ -108,10 +111,11 @@ impl KeybindingHint { /// use gpui::{App, Hsla, KeybindingKeystroke, Keystroke}; /// use ui::prelude::*; /// use ui::{KeyBinding, KeybindingHint}; + /// use settings::KeybindSource; /// /// # fn example(cx: &App) { /// let hint = KeybindingHint::with_suffix( - /// KeyBinding::new(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-v").unwrap())], cx), + /// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-v").unwrap())].into(), KeybindSource::Base), /// "Paste", /// Hsla::black() /// ); @@ -141,10 +145,11 @@ impl KeybindingHint { /// use gpui::{App, Hsla, KeybindingKeystroke, Keystroke}; /// use ui::prelude::*; /// use ui::{KeyBinding, KeybindingHint}; + /// use settings::KeybindSource; /// /// # fn example(cx: &App) { /// let hint = KeybindingHint::new( - /// KeyBinding::new(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-x").unwrap())], cx), + /// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-x").unwrap())].into(), KeybindSource::Base), /// Hsla::black() /// ) /// .prefix("Cut:"); @@ -165,10 +170,11 @@ impl KeybindingHint { /// use gpui::{App, Hsla, KeybindingKeystroke, Keystroke}; /// use ui::prelude::*; /// use ui::{KeyBinding, KeybindingHint}; + /// use settings::KeybindSource; /// /// # fn example(cx: &App) { /// let hint = KeybindingHint::new( - /// KeyBinding::new(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-f").unwrap())], cx), + /// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-f").unwrap())].into(), KeybindSource::Base), /// Hsla::black() /// ) /// .suffix("Find"); @@ -189,10 +195,11 @@ impl KeybindingHint { /// use gpui::{App, Hsla, KeybindingKeystroke, Keystroke}; /// use ui::prelude::*; /// use ui::{KeyBinding, KeybindingHint}; + /// use settings::KeybindSource; /// /// # fn example(cx: &App) { /// let hint = KeybindingHint::new( - /// KeyBinding::new(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-z").unwrap())], cx), + /// KeyBinding::from_keystrokes(vec![KeybindingKeystroke::from_keystroke(Keystroke::parse("ctrl-z").unwrap())].into(), KeybindSource::Base), /// Hsla::black() /// ) /// .size(Pixels::from(16.0)); @@ -265,10 +272,8 @@ impl Component for KeybindingHint { Some("Displays a keyboard shortcut hint with optional prefix and suffix text") } - fn preview(window: &mut Window, cx: &mut App) -> Option { - let enter_fallback = gpui::KeyBinding::new("enter", menu::Confirm, None); - let enter = KeyBinding::for_action(&menu::Confirm, window, cx) - .unwrap_or(KeyBinding::new_from_gpui(enter_fallback, cx)); + fn preview(_window: &mut Window, cx: &mut App) -> Option { + let enter = KeyBinding::for_action(&menu::Confirm, cx); let bg_color = cx.theme().colors().surface_background; diff --git a/crates/ui/src/components/label/highlighted_label.rs b/crates/ui/src/components/label/highlighted_label.rs index a3cbc33553f701b1c744b04d4e0481a2bd9de129..840bba7b173fe31a3472d758c64b0b1ef984da2c 100644 --- a/crates/ui/src/components/label/highlighted_label.rs +++ b/crates/ui/src/components/label/highlighted_label.rs @@ -15,9 +15,16 @@ impl HighlightedLabel { /// Constructs a label with the given characters highlighted. /// Characters are identified by UTF-8 byte position. pub fn new(label: impl Into, highlight_indices: Vec) -> Self { + let label = label.into(); + for &run in &highlight_indices { + assert!( + label.is_char_boundary(run), + "highlight index {run} is not a valid UTF-8 boundary" + ); + } Self { base: LabelLike::new(), - label: label.into(), + label, highlight_indices, } } diff --git a/crates/ui/src/components/stories/keybinding.rs b/crates/ui/src/components/stories/keybinding.rs index 594f70b6ab0fbafc5e997785c44c494b71320d72..5840a11cf702f7a47aed06791ab47f12e2418d9c 100644 --- a/crates/ui/src/components/stories/keybinding.rs +++ b/crates/ui/src/components/stories/keybinding.rs @@ -1,6 +1,7 @@ use gpui::NoAction; use gpui::Render; use itertools::Itertools; +use settings::KeybindSource; use story::Story; use crate::{KeyBinding, prelude::*}; @@ -15,19 +16,36 @@ impl Render for KeybindingStory { fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { let all_modifier_permutations = ["ctrl", "alt", "cmd", "shift"].into_iter().permutations(2); + const SOURCE: KeybindSource = KeybindSource::Base; + Story::container(cx) .child(Story::title_for::(cx)) .child(Story::label("Single Key", cx)) - .child(KeyBinding::new_from_gpui(binding("Z"), cx)) + .child(KeyBinding::from_keystrokes( + binding("Z").keystrokes().into(), + SOURCE, + )) .child(Story::label("Single Key with Modifier", cx)) .child( div() .flex() .gap_3() - .child(KeyBinding::new_from_gpui(binding("ctrl-c"), cx)) - .child(KeyBinding::new_from_gpui(binding("alt-c"), cx)) - .child(KeyBinding::new_from_gpui(binding("cmd-c"), cx)) - .child(KeyBinding::new_from_gpui(binding("shift-c"), cx)), + .child(KeyBinding::from_keystrokes( + binding("ctrl-c").keystrokes().into(), + SOURCE, + )) + .child(KeyBinding::from_keystrokes( + binding("alt-c").keystrokes().into(), + SOURCE, + )) + .child(KeyBinding::from_keystrokes( + binding("cmd-c").keystrokes().into(), + SOURCE, + )) + .child(KeyBinding::from_keystrokes( + binding("shift-c").keystrokes().into(), + SOURCE, + )), ) .child(Story::label("Single Key with Modifier (Permuted)", cx)) .child( @@ -41,58 +59,77 @@ impl Render for KeybindingStory { .gap_4() .py_3() .children(chunk.map(|permutation| { - KeyBinding::new_from_gpui( - binding(&(permutation.join("-") + "-x")), - cx, + KeyBinding::from_keystrokes( + binding(&(permutation.join("-") + "-x")) + .keystrokes() + .into(), + SOURCE, ) })) }), ), ) .child(Story::label("Single Key with All Modifiers", cx)) - .child(KeyBinding::new_from_gpui( - binding("ctrl-alt-cmd-shift-z"), - cx, + .child(KeyBinding::from_keystrokes( + binding("ctrl-alt-cmd-shift-z").keystrokes().into(), + SOURCE, )) .child(Story::label("Chord", cx)) - .child(KeyBinding::new_from_gpui(binding("a z"), cx)) + .child(KeyBinding::from_keystrokes( + binding("a z").keystrokes().into(), + SOURCE, + )) .child(Story::label("Chord with Modifier", cx)) - .child(KeyBinding::new_from_gpui(binding("ctrl-a shift-z"), cx)) - .child(KeyBinding::new_from_gpui(binding("fn-s"), cx)) + .child(KeyBinding::from_keystrokes( + binding("ctrl-a shift-z").keystrokes().into(), + SOURCE, + )) + .child(KeyBinding::from_keystrokes( + binding("fn-s").keystrokes().into(), + SOURCE, + )) .child(Story::label("Single Key with All Modifiers (Linux)", cx)) .child( - KeyBinding::new_from_gpui(binding("ctrl-alt-cmd-shift-z"), cx) - .platform_style(PlatformStyle::Linux), + KeyBinding::from_keystrokes( + binding("ctrl-alt-cmd-shift-z").keystrokes().into(), + SOURCE, + ) + .platform_style(PlatformStyle::Linux), ) .child(Story::label("Chord (Linux)", cx)) .child( - KeyBinding::new_from_gpui(binding("a z"), cx).platform_style(PlatformStyle::Linux), + KeyBinding::from_keystrokes(binding("a z").keystrokes().into(), SOURCE) + .platform_style(PlatformStyle::Linux), ) .child(Story::label("Chord with Modifier (Linux)", cx)) .child( - KeyBinding::new_from_gpui(binding("ctrl-a shift-z"), cx) + KeyBinding::from_keystrokes(binding("ctrl-a shift-z").keystrokes().into(), SOURCE) .platform_style(PlatformStyle::Linux), ) .child( - KeyBinding::new_from_gpui(binding("fn-s"), cx).platform_style(PlatformStyle::Linux), + KeyBinding::from_keystrokes(binding("fn-s").keystrokes().into(), SOURCE) + .platform_style(PlatformStyle::Linux), ) .child(Story::label("Single Key with All Modifiers (Windows)", cx)) .child( - KeyBinding::new_from_gpui(binding("ctrl-alt-cmd-shift-z"), cx) - .platform_style(PlatformStyle::Windows), + KeyBinding::from_keystrokes( + binding("ctrl-alt-cmd-shift-z").keystrokes().into(), + SOURCE, + ) + .platform_style(PlatformStyle::Windows), ) .child(Story::label("Chord (Windows)", cx)) .child( - KeyBinding::new_from_gpui(binding("a z"), cx) + KeyBinding::from_keystrokes(binding("a z").keystrokes().into(), SOURCE) .platform_style(PlatformStyle::Windows), ) .child(Story::label("Chord with Modifier (Windows)", cx)) .child( - KeyBinding::new_from_gpui(binding("ctrl-a shift-z"), cx) + KeyBinding::from_keystrokes(binding("ctrl-a shift-z").keystrokes().into(), SOURCE) .platform_style(PlatformStyle::Windows), ) .child( - KeyBinding::new_from_gpui(binding("fn-s"), cx) + KeyBinding::from_keystrokes(binding("fn-s").keystrokes().into(), SOURCE) .platform_style(PlatformStyle::Windows), ) } diff --git a/crates/ui/src/components/toggle.rs b/crates/ui/src/components/toggle.rs index 8d582c11e77f4469bb959ec656c9d6800603a72e..ab66b71996d6c7b64d0d3867ab73bd9727816316 100644 --- a/crates/ui/src/components/toggle.rs +++ b/crates/ui/src/components/toggle.rs @@ -514,7 +514,7 @@ impl RenderOnce for Switch { self.tab_index.filter(|_| !self.disabled), |this, tab_index| { this.tab_index(tab_index) - .focus(|mut style| { + .focus_visible(|mut style| { style.border_color = Some(cx.theme().colors().border_focused); style }) diff --git a/crates/ui/src/components/tooltip.rs b/crates/ui/src/components/tooltip.rs index 4bfb7d2fc3e38ba5af2d1734d28de75a51096811..8b4ff3f73163f38e19da80462e687db3d88efc6f 100644 --- a/crates/ui/src/components/tooltip.rs +++ b/crates/ui/src/components/tooltip.rs @@ -64,11 +64,11 @@ impl Tooltip { ) -> impl Fn(&mut Window, &mut App) -> AnyView + use { let title = title.into(); let action = action.boxed_clone(); - move |window, cx| { + move |_, cx| { cx.new(|cx| Self { title: Title::Str(title.clone()), meta: None, - key_binding: KeyBinding::for_action(action.as_ref(), window, cx), + key_binding: Some(KeyBinding::for_action(action.as_ref(), cx)), }) .into() } @@ -82,11 +82,15 @@ impl Tooltip { let title = title.into(); let action = action.boxed_clone(); let focus_handle = focus_handle.clone(); - move |window, cx| { + move |_, cx| { cx.new(|cx| Self { title: Title::Str(title.clone()), meta: None, - key_binding: KeyBinding::for_action_in(action.as_ref(), &focus_handle, window, cx), + key_binding: Some(KeyBinding::for_action_in( + action.as_ref(), + &focus_handle, + cx, + )), }) .into() } @@ -95,13 +99,12 @@ impl Tooltip { pub fn for_action( title: impl Into, action: &dyn Action, - window: &mut Window, cx: &mut App, ) -> AnyView { cx.new(|cx| Self { title: Title::Str(title.into()), meta: None, - key_binding: KeyBinding::for_action(action, window, cx), + key_binding: Some(KeyBinding::for_action(action, cx)), }) .into() } @@ -110,13 +113,12 @@ impl Tooltip { title: impl Into, action: &dyn Action, focus_handle: &FocusHandle, - window: &mut Window, cx: &mut App, ) -> AnyView { cx.new(|cx| Self { title: title.into().into(), meta: None, - key_binding: KeyBinding::for_action_in(action, focus_handle, window, cx), + key_binding: Some(KeyBinding::for_action_in(action, focus_handle, cx)), }) .into() } @@ -125,13 +127,12 @@ impl Tooltip { title: impl Into, action: Option<&dyn Action>, meta: impl Into, - window: &mut Window, cx: &mut App, ) -> AnyView { cx.new(|cx| Self { title: title.into().into(), meta: Some(meta.into()), - key_binding: action.and_then(|action| KeyBinding::for_action(action, window, cx)), + key_binding: action.map(|action| KeyBinding::for_action(action, cx)), }) .into() } @@ -141,14 +142,12 @@ impl Tooltip { action: Option<&dyn Action>, meta: impl Into, focus_handle: &FocusHandle, - window: &mut Window, cx: &mut App, ) -> AnyView { cx.new(|cx| Self { title: title.into().into(), meta: Some(meta.into()), - key_binding: action - .and_then(|action| KeyBinding::for_action_in(action, focus_handle, window, cx)), + key_binding: action.map(|action| KeyBinding::for_action_in(action, focus_handle, cx)), }) .into() } diff --git a/crates/ui/src/components/tree_view_item.rs b/crates/ui/src/components/tree_view_item.rs index 8647b13a65dee64fd825c814303815241547cd75..c96800223d9328779a2e71194a31315e1d57c175 100644 --- a/crates/ui/src/components/tree_view_item.rs +++ b/crates/ui/src/components/tree_view_item.rs @@ -159,7 +159,7 @@ impl RenderOnce for TreeViewItem { .rounded_sm() .border_1() .border_color(transparent_border) - .focus(|s| s.border_color(focused_border)) + .focus_visible(|s| s.border_color(focused_border)) .when(self.selected, |this| { this.border_color(selected_border).bg(selected_bg) }) diff --git a/crates/ui_input/Cargo.toml b/crates/ui_input/Cargo.toml index 0f107e42c382d55c2e2d6725336bc3af569a222d..4e7b08241dff8e3e5c00052826485c309449d205 100644 --- a/crates/ui_input/Cargo.toml +++ b/crates/ui_input/Cargo.toml @@ -14,14 +14,11 @@ path = "src/ui_input.rs" [dependencies] component.workspace = true editor.workspace = true -fuzzy.workspace = true gpui.workspace = true menu.workspace = true -picker.workspace = true settings.workspace = true theme.workspace = true ui.workspace = true -workspace-hack.workspace = true [features] default = [] diff --git a/crates/ui_input/src/input_field.rs b/crates/ui_input/src/input_field.rs new file mode 100644 index 0000000000000000000000000000000000000000..82f7f0261facef8a7c6a422b2ff4ed335229aeb3 --- /dev/null +++ b/crates/ui_input/src/input_field.rs @@ -0,0 +1,222 @@ +use component::{example_group, single_example}; +use editor::{Editor, EditorElement, EditorStyle}; +use gpui::{App, Entity, FocusHandle, Focusable, FontStyle, Hsla, Length, TextStyle}; +use settings::Settings; +use std::sync::Arc; +use theme::ThemeSettings; +use ui::prelude::*; + +pub struct InputFieldStyle { + text_color: Hsla, + background_color: Hsla, + border_color: Hsla, +} + +/// An Input Field component that can be used to create text fields like search inputs, form fields, etc. +/// +/// It wraps a single line [`Editor`] and allows for common field properties like labels, placeholders, icons, etc. +#[derive(RegisterComponent)] +pub struct InputField { + /// An optional label for the text field. + /// + /// Its position is determined by the [`FieldLabelLayout`]. + label: Option, + /// The size of the label text. + label_size: LabelSize, + /// The placeholder text for the text field. + placeholder: SharedString, + /// Exposes the underlying [`Entity`] to allow for customizing the editor beyond the provided API. + /// + /// This likely will only be public in the short term, ideally the API will be expanded to cover necessary use cases. + pub editor: Entity, + /// An optional icon that is displayed at the start of the text field. + /// + /// For example, a magnifying glass icon in a search field. + start_icon: Option, + /// Whether the text field is disabled. + disabled: bool, + /// The minimum width of for the input + min_width: Length, +} + +impl Focusable for InputField { + fn focus_handle(&self, cx: &App) -> FocusHandle { + self.editor.focus_handle(cx) + } +} + +impl InputField { + pub fn new(window: &mut Window, cx: &mut App, placeholder: impl Into) -> Self { + let placeholder_text = placeholder.into(); + + let editor = cx.new(|cx| { + let mut input = Editor::single_line(window, cx); + input.set_placeholder_text(&placeholder_text, window, cx); + input + }); + + Self { + label: None, + label_size: LabelSize::Small, + placeholder: placeholder_text, + editor, + start_icon: None, + disabled: false, + min_width: px(192.).into(), + } + } + + pub fn start_icon(mut self, icon: IconName) -> Self { + self.start_icon = Some(icon); + self + } + + pub fn label(mut self, label: impl Into) -> Self { + self.label = Some(label.into()); + self + } + + pub fn label_size(mut self, size: LabelSize) -> Self { + self.label_size = size; + self + } + + pub fn label_min_width(mut self, width: impl Into) -> Self { + self.min_width = width.into(); + self + } + + pub fn set_disabled(&mut self, disabled: bool, cx: &mut Context) { + self.disabled = disabled; + self.editor + .update(cx, |editor, _| editor.set_read_only(disabled)) + } + + pub fn is_empty(&self, cx: &App) -> bool { + self.editor().read(cx).text(cx).trim().is_empty() + } + + pub fn editor(&self) -> &Entity { + &self.editor + } + + pub fn text(&self, cx: &App) -> String { + self.editor().read(cx).text(cx) + } + + pub fn set_text(&self, text: impl Into>, window: &mut Window, cx: &mut App) { + self.editor() + .update(cx, |editor, cx| editor.set_text(text, window, cx)) + } +} + +impl Render for InputField { + fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { + let settings = ThemeSettings::get_global(cx); + let theme_color = cx.theme().colors(); + + let mut style = InputFieldStyle { + text_color: theme_color.text, + background_color: theme_color.editor_background, + border_color: theme_color.border_variant, + }; + + if self.disabled { + style.text_color = theme_color.text_disabled; + style.background_color = theme_color.editor_background; + style.border_color = theme_color.border_disabled; + } + + // if self.error_message.is_some() { + // style.text_color = cx.theme().status().error; + // style.border_color = cx.theme().status().error_border + // } + + let text_style = TextStyle { + font_family: settings.ui_font.family.clone(), + font_features: settings.ui_font.features.clone(), + font_size: rems(0.875).into(), + font_weight: settings.buffer_font.weight, + font_style: FontStyle::Normal, + line_height: relative(1.2), + color: style.text_color, + ..Default::default() + }; + + let editor_style = EditorStyle { + background: theme_color.ghost_element_background, + local_player: cx.theme().players().local(), + syntax: cx.theme().syntax().clone(), + text: text_style, + ..Default::default() + }; + + v_flex() + .id(self.placeholder.clone()) + .w_full() + .gap_1() + .when_some(self.label.clone(), |this, label| { + this.child( + Label::new(label) + .size(self.label_size) + .color(if self.disabled { + Color::Disabled + } else { + Color::Default + }), + ) + }) + .child( + h_flex() + .min_w(self.min_width) + .min_h_8() + .w_full() + .px_2() + .py_1p5() + .flex_grow() + .text_color(style.text_color) + .rounded_md() + .bg(style.background_color) + .border_1() + .border_color(style.border_color) + .when_some(self.start_icon, |this, icon| { + this.gap_1() + .child(Icon::new(icon).size(IconSize::Small).color(Color::Muted)) + }) + .child(EditorElement::new(&self.editor, editor_style)), + ) + } +} + +impl Component for InputField { + fn scope() -> ComponentScope { + ComponentScope::Input + } + + fn preview(window: &mut Window, cx: &mut App) -> Option { + let input_small = + cx.new(|cx| InputField::new(window, cx, "placeholder").label("Small Label")); + + let input_regular = cx.new(|cx| { + InputField::new(window, cx, "placeholder") + .label("Regular Label") + .label_size(LabelSize::Default) + }); + + Some( + v_flex() + .gap_6() + .children(vec![example_group(vec![ + single_example( + "Small Label (Default)", + div().child(input_small).into_any_element(), + ), + single_example( + "Regular Label", + div().child(input_regular).into_any_element(), + ), + ])]) + .into_any_element(), + ) + } +} diff --git a/crates/ui_input/src/number_field.rs b/crates/ui_input/src/number_field.rs index b3f50584d69d9adc965028400c26fa68074b9b84..f6dc3349cddded1453a5c49270507783fd27ecd2 100644 --- a/crates/ui_input/src/number_field.rs +++ b/crates/ui_input/src/number_field.rs @@ -8,7 +8,7 @@ use std::{ use editor::{Editor, EditorStyle}; use gpui::{ClickEvent, Entity, FocusHandle, Focusable, FontWeight, Modifiers}; -use settings::{CodeFade, MinimumContrast}; +use settings::{CenteredPaddingSettings, CodeFade, DelayMs, InactiveOpacity, MinimumContrast}; use ui::prelude::*; #[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)] @@ -31,78 +31,55 @@ pub trait NumberFieldType: Display + Copy + Clone + Sized + PartialOrd + FromStr fn saturating_sub(self, rhs: Self) -> Self; } -impl NumberFieldType for gpui::FontWeight { - fn default_step() -> Self { - FontWeight(10.0) - } - fn large_step() -> Self { - FontWeight(50.0) - } - fn small_step() -> Self { - FontWeight(5.0) - } - fn min_value() -> Self { - gpui::FontWeight::THIN - } - fn max_value() -> Self { - gpui::FontWeight::BLACK - } - fn saturating_add(self, rhs: Self) -> Self { - FontWeight((self.0 + rhs.0).min(Self::max_value().0)) - } - fn saturating_sub(self, rhs: Self) -> Self { - FontWeight((self.0 - rhs.0).max(Self::min_value().0)) - } -} +macro_rules! impl_newtype_numeric_stepper { + ($type:ident, $default:expr, $large:expr, $small:expr, $min:expr, $max:expr) => { + impl NumberFieldType for $type { + fn default_step() -> Self { + $default.into() + } -impl NumberFieldType for settings::CodeFade { - fn default_step() -> Self { - CodeFade(0.10) - } - fn large_step() -> Self { - CodeFade(0.20) - } - fn small_step() -> Self { - CodeFade(0.05) - } - fn min_value() -> Self { - CodeFade(0.0) - } - fn max_value() -> Self { - CodeFade(0.9) - } - fn saturating_add(self, rhs: Self) -> Self { - CodeFade((self.0 + rhs.0).min(Self::max_value().0)) - } - fn saturating_sub(self, rhs: Self) -> Self { - CodeFade((self.0 - rhs.0).max(Self::min_value().0)) - } -} + fn large_step() -> Self { + $large.into() + } -impl NumberFieldType for settings::MinimumContrast { - fn default_step() -> Self { - MinimumContrast(1.0) - } - fn large_step() -> Self { - MinimumContrast(10.0) - } - fn small_step() -> Self { - MinimumContrast(0.5) - } - fn min_value() -> Self { - MinimumContrast(0.0) - } - fn max_value() -> Self { - MinimumContrast(106.0) - } - fn saturating_add(self, rhs: Self) -> Self { - MinimumContrast((self.0 + rhs.0).min(Self::max_value().0)) - } - fn saturating_sub(self, rhs: Self) -> Self { - MinimumContrast((self.0 - rhs.0).max(Self::min_value().0)) - } + fn small_step() -> Self { + $small.into() + } + + fn min_value() -> Self { + $min.into() + } + + fn max_value() -> Self { + $max.into() + } + + fn saturating_add(self, rhs: Self) -> Self { + $type((self.0 + rhs.0).min(Self::max_value().0)) + } + + fn saturating_sub(self, rhs: Self) -> Self { + $type((self.0 - rhs.0).max(Self::min_value().0)) + } + } + }; } +#[rustfmt::skip] +impl_newtype_numeric_stepper!(FontWeight, 50., 100., 10., FontWeight::THIN, FontWeight::BLACK); +impl_newtype_numeric_stepper!(CodeFade, 0.1, 0.2, 0.05, 0.0, 0.9); +impl_newtype_numeric_stepper!(InactiveOpacity, 0.1, 0.2, 0.05, 0.0, 1.0); +impl_newtype_numeric_stepper!(MinimumContrast, 1., 10., 0.5, 0.0, 106.0); +impl_newtype_numeric_stepper!(DelayMs, 100, 500, 10, 0, 2000); +impl_newtype_numeric_stepper!( + CenteredPaddingSettings, + 0.05, + 0.2, + 0.1, + CenteredPaddingSettings::MIN_PADDING, + CenteredPaddingSettings::MAX_PADDING +); + macro_rules! impl_numeric_stepper_int { ($type:ident) => { impl NumberFieldType for $type { @@ -361,7 +338,7 @@ impl RenderOnce for NumberField { .border_color(border_color) .bg(bg_color) .hover(|s| s.bg(hover_bg_color)) - .focus(|s| s.border_color(focus_border_color).bg(hover_bg_color)) + .focus_visible(|s| s.border_color(focus_border_color).bg(hover_bg_color)) .child(Icon::new(icon).size(IconSize::Small)) }; @@ -392,7 +369,6 @@ impl RenderOnce for NumberField { let new_value = value.saturating_sub(step); let new_value = if new_value < min { min } else { new_value }; on_change(&new_value, window, cx); - window.focus_prev(); } }; diff --git a/crates/ui_input/src/ui_input.rs b/crates/ui_input/src/ui_input.rs index 56f0626f0a502c2bbf5471491441f69e7820e86e..ddc0e659a2c34ffe53424bff24480c3f1b5875fb 100644 --- a/crates/ui_input/src/ui_input.rs +++ b/crates/ui_input/src/ui_input.rs @@ -1,233 +1,9 @@ -//! # UI – Text Field -//! -//! This crate provides a text field component that can be used to create text fields like search inputs, form fields, etc. +//! This crate provides UI components that can be used for form-like scenarios, such as a input and number field. //! //! It can't be located in the `ui` crate because it depends on `editor`. //! -mod font_picker; +mod input_field; mod number_field; -use component::{example_group, single_example}; -use editor::{Editor, EditorElement, EditorStyle}; -pub use font_picker::*; -use gpui::{App, Entity, FocusHandle, Focusable, FontStyle, Hsla, Length, TextStyle}; +pub use input_field::*; pub use number_field::*; -use settings::Settings; -use std::sync::Arc; -use theme::ThemeSettings; -use ui::prelude::*; - -pub struct SingleLineInputStyle { - text_color: Hsla, - background_color: Hsla, - border_color: Hsla, -} - -/// A Text Field that can be used to create text fields like search inputs, form fields, etc. -/// -/// It wraps a single line [`Editor`] and allows for common field properties like labels, placeholders, icons, etc. -#[derive(RegisterComponent)] -pub struct SingleLineInput { - /// An optional label for the text field. - /// - /// Its position is determined by the [`FieldLabelLayout`]. - label: Option, - /// The size of the label text. - label_size: LabelSize, - /// The placeholder text for the text field. - placeholder: SharedString, - /// Exposes the underlying [`Entity`] to allow for customizing the editor beyond the provided API. - /// - /// This likely will only be public in the short term, ideally the API will be expanded to cover necessary use cases. - pub editor: Entity, - /// An optional icon that is displayed at the start of the text field. - /// - /// For example, a magnifying glass icon in a search field. - start_icon: Option, - /// Whether the text field is disabled. - disabled: bool, - /// The minimum width of for the input - min_width: Length, -} - -impl Focusable for SingleLineInput { - fn focus_handle(&self, cx: &App) -> FocusHandle { - self.editor.focus_handle(cx) - } -} - -impl SingleLineInput { - pub fn new(window: &mut Window, cx: &mut App, placeholder: impl Into) -> Self { - let placeholder_text = placeholder.into(); - - let editor = cx.new(|cx| { - let mut input = Editor::single_line(window, cx); - input.set_placeholder_text(&placeholder_text, window, cx); - input - }); - - Self { - label: None, - label_size: LabelSize::Small, - placeholder: placeholder_text, - editor, - start_icon: None, - disabled: false, - min_width: px(192.).into(), - } - } - - pub fn start_icon(mut self, icon: IconName) -> Self { - self.start_icon = Some(icon); - self - } - - pub fn label(mut self, label: impl Into) -> Self { - self.label = Some(label.into()); - self - } - - pub fn label_size(mut self, size: LabelSize) -> Self { - self.label_size = size; - self - } - - pub fn label_min_width(mut self, width: impl Into) -> Self { - self.min_width = width.into(); - self - } - - pub fn set_disabled(&mut self, disabled: bool, cx: &mut Context) { - self.disabled = disabled; - self.editor - .update(cx, |editor, _| editor.set_read_only(disabled)) - } - - pub fn is_empty(&self, cx: &App) -> bool { - self.editor().read(cx).text(cx).trim().is_empty() - } - - pub fn editor(&self) -> &Entity { - &self.editor - } - - pub fn text(&self, cx: &App) -> String { - self.editor().read(cx).text(cx) - } - - pub fn set_text(&self, text: impl Into>, window: &mut Window, cx: &mut App) { - self.editor() - .update(cx, |editor, cx| editor.set_text(text, window, cx)) - } -} - -impl Render for SingleLineInput { - fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - let settings = ThemeSettings::get_global(cx); - let theme_color = cx.theme().colors(); - - let mut style = SingleLineInputStyle { - text_color: theme_color.text, - background_color: theme_color.editor_background, - border_color: theme_color.border_variant, - }; - - if self.disabled { - style.text_color = theme_color.text_disabled; - style.background_color = theme_color.editor_background; - style.border_color = theme_color.border_disabled; - } - - // if self.error_message.is_some() { - // style.text_color = cx.theme().status().error; - // style.border_color = cx.theme().status().error_border - // } - - let text_style = TextStyle { - font_family: settings.ui_font.family.clone(), - font_features: settings.ui_font.features.clone(), - font_size: rems(0.875).into(), - font_weight: settings.buffer_font.weight, - font_style: FontStyle::Normal, - line_height: relative(1.2), - color: style.text_color, - ..Default::default() - }; - - let editor_style = EditorStyle { - background: theme_color.ghost_element_background, - local_player: cx.theme().players().local(), - syntax: cx.theme().syntax().clone(), - text: text_style, - ..Default::default() - }; - - v_flex() - .id(self.placeholder.clone()) - .w_full() - .gap_1() - .when_some(self.label.clone(), |this, label| { - this.child( - Label::new(label) - .size(self.label_size) - .color(if self.disabled { - Color::Disabled - } else { - Color::Default - }), - ) - }) - .child( - h_flex() - .min_w(self.min_width) - .min_h_8() - .w_full() - .px_2() - .py_1p5() - .flex_grow() - .text_color(style.text_color) - .rounded_md() - .bg(style.background_color) - .border_1() - .border_color(style.border_color) - .when_some(self.start_icon, |this, icon| { - this.gap_1() - .child(Icon::new(icon).size(IconSize::Small).color(Color::Muted)) - }) - .child(EditorElement::new(&self.editor, editor_style)), - ) - } -} - -impl Component for SingleLineInput { - fn scope() -> ComponentScope { - ComponentScope::Input - } - - fn preview(window: &mut Window, cx: &mut App) -> Option { - let input_small = - cx.new(|cx| SingleLineInput::new(window, cx, "placeholder").label("Small Label")); - - let input_regular = cx.new(|cx| { - SingleLineInput::new(window, cx, "placeholder") - .label("Regular Label") - .label_size(LabelSize::Default) - }); - - Some( - v_flex() - .gap_6() - .children(vec![example_group(vec![ - single_example( - "Small Label (Default)", - div().child(input_small).into_any_element(), - ), - single_example( - "Regular Label", - div().child(input_regular).into_any_element(), - ), - ])]) - .into_any_element(), - ) - } -} diff --git a/crates/ui_macros/Cargo.toml b/crates/ui_macros/Cargo.toml index 830b9dca8d5c42ec54db3c2aa323ede7d71aa5c9..74bd2186a7576fba067bf321972e4228c5292dec 100644 --- a/crates/ui_macros/Cargo.toml +++ b/crates/ui_macros/Cargo.toml @@ -15,7 +15,6 @@ proc-macro = true [dependencies] quote.workspace = true syn.workspace = true -workspace-hack.workspace = true [dev-dependencies] component.workspace = true diff --git a/crates/ui_prompt/Cargo.toml b/crates/ui_prompt/Cargo.toml index eefc71d257517c44a127ea75cb28ca28f7533705..55a98288433a7b31507310e20c4209a9d419e45f 100644 --- a/crates/ui_prompt/Cargo.toml +++ b/crates/ui_prompt/Cargo.toml @@ -22,4 +22,3 @@ settings.workspace = true theme.workspace = true ui.workspace = true workspace.workspace = true -workspace-hack.workspace = true diff --git a/crates/util/Cargo.toml b/crates/util/Cargo.toml index da500edd1bd6bfcb608804468fdc56c56c35395f..d7c5aae569ec0542c263d704e257ed6114bbe245 100644 --- a/crates/util/Cargo.toml +++ b/crates/util/Cargo.toml @@ -1,8 +1,8 @@ [package] -name = "zed-util" +name = "util" version = "0.1.0" edition.workspace = true -publish = true +publish = false license = "Apache-2.0" description = "A collection of utility structs and functions used by Zed and GPUI" @@ -45,7 +45,6 @@ unicase.workspace = true util_macros = { workspace = true, optional = true } walkdir.workspace = true which.workspace = true -workspace-hack.workspace = true [target.'cfg(unix)'.dependencies] command-fds = "0.3.1" diff --git a/crates/util/src/paths.rs b/crates/util/src/paths.rs index d31828eb568978fdcddbf1030badb5911c730004..20187bf7376861ebd03e02f7fb006428c1c51ec4 100644 --- a/crates/util/src/paths.rs +++ b/crates/util/src/paths.rs @@ -15,7 +15,7 @@ use std::{ sync::LazyLock, }; -use crate::rel_path::RelPath; +use crate::{rel_path::RelPath, shell::ShellKind}; static HOME_DIR: OnceLock = OnceLock::new(); @@ -84,9 +84,7 @@ pub trait PathExt { fn multiple_extensions(&self) -> Option; /// Try to make a shell-safe representation of the path. - /// - /// For Unix, the path is escaped to be safe for POSIX shells - fn try_shell_safe(&self) -> anyhow::Result; + fn try_shell_safe(&self, shell_kind: ShellKind) -> anyhow::Result; } impl> PathExt for T { @@ -164,24 +162,16 @@ impl> PathExt for T { Some(parts.into_iter().join(".")) } - fn try_shell_safe(&self) -> anyhow::Result { - #[cfg(target_os = "windows")] - { - Ok(self.as_ref().to_string_lossy().to_string()) - } - - #[cfg(not(target_os = "windows"))] - { - let path_str = self - .as_ref() - .to_str() - .with_context(|| "Path contains invalid UTF-8")?; - - // As of writing, this can only be fail if the path contains a null byte, which shouldn't be possible - // but shlex has annotated the error as #[non_exhaustive] so we can't make it a compile error if other - // errors are introduced in the future :( - Ok(shlex::try_quote(path_str)?.into_owned()) - } + fn try_shell_safe(&self, shell_kind: ShellKind) -> anyhow::Result { + let path_str = self + .as_ref() + .to_str() + .with_context(|| "Path contains invalid UTF-8")?; + shell_kind + .try_quote(path_str) + .as_deref() + .map(ToOwned::to_owned) + .context("Failed to quote path") } } @@ -558,7 +548,7 @@ impl PathWithPosition { /// # Examples /// /// ``` - /// # use zed_util::paths::PathWithPosition; + /// # use util::paths::PathWithPosition; /// # use std::path::PathBuf; /// assert_eq!(PathWithPosition::parse_str("test_file"), PathWithPosition { /// path: PathBuf::from("test_file"), @@ -589,7 +579,7 @@ impl PathWithPosition { /// /// # Expected parsing results when encounter ill-formatted inputs. /// ``` - /// # use zed_util::paths::PathWithPosition; + /// # use util::paths::PathWithPosition; /// # use std::path::PathBuf; /// assert_eq!(PathWithPosition::parse_str("test_file.rs:a"), PathWithPosition { /// path: PathBuf::from("test_file.rs:a"), @@ -934,7 +924,7 @@ where /// 2. When encountering digits, treating consecutive digits as a single number /// 3. Comparing numbers by their numeric value rather than lexicographically /// 4. For non-numeric characters, using case-sensitive comparison with lowercase priority -fn natural_sort(a: &str, b: &str) -> Ordering { +pub fn natural_sort(a: &str, b: &str) -> Ordering { let mut a_iter = a.chars().peekable(); let mut b_iter = b.chars().peekable(); diff --git a/crates/util/src/shell.rs b/crates/util/src/shell.rs index f61a2ee50f459e088371b61f0d8a584633d963a0..7ab214d5105fb81c930954a1aaf9c4aa6fb865c5 100644 --- a/crates/util/src/shell.rs +++ b/crates/util/src/shell.rs @@ -1,6 +1,54 @@ +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; use std::{borrow::Cow, fmt, path::Path, sync::LazyLock}; -#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)] +/// Shell configuration to open the terminal with. +#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, Hash)] +#[serde(rename_all = "snake_case")] +pub enum Shell { + /// Use the system's default terminal configuration in /etc/passwd + #[default] + System, + /// Use a specific program with no arguments. + Program(String), + /// Use a specific program with arguments. + WithArguments { + /// The program to run. + program: String, + /// The arguments to pass to the program. + args: Vec, + /// An optional string to override the title of the terminal tab + title_override: Option, + }, +} + +impl Shell { + pub fn program(&self) -> String { + match self { + Shell::Program(program) => program.clone(), + Shell::WithArguments { program, .. } => program.clone(), + Shell::System => get_system_shell(), + } + } + + pub fn program_and_args(&self) -> (String, &[String]) { + match self { + Shell::Program(program) => (program.clone(), &[]), + Shell::WithArguments { program, args, .. } => (program.clone(), args), + Shell::System => (get_system_shell(), &[]), + } + } + + pub fn shell_kind(&self, is_windows: bool) -> ShellKind { + match self { + Shell::Program(program) => ShellKind::new(program, is_windows), + Shell::WithArguments { program, .. } => ShellKind::new(program, is_windows), + Shell::System => ShellKind::system(), + } + } +} + +#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] pub enum ShellKind { #[default] Posix, @@ -45,6 +93,7 @@ pub fn get_windows_git_bash() -> Option { let git = which::which("git").ok()?; let git_bash = git.parent()?.parent()?.join("bin").join("bash.exe"); if git_bash.is_file() { + log::info!("Found git-bash at {}", git_bash.display()); Some(git_bash.to_string_lossy().to_string()) } else { None @@ -173,44 +222,30 @@ impl fmt::Display for ShellKind { impl ShellKind { pub fn system() -> Self { - Self::new(&get_system_shell()) + Self::new(&get_system_shell(), cfg!(windows)) } - pub fn new(program: impl AsRef) -> Self { + pub fn new(program: impl AsRef, is_windows: bool) -> Self { let program = program.as_ref(); - let Some(program) = program.file_stem().and_then(|s| s.to_str()) else { - return if cfg!(windows) { - ShellKind::PowerShell - } else { - ShellKind::Posix - }; - }; - if program == "powershell" || program == "pwsh" { - ShellKind::PowerShell - } else if program == "cmd" { - ShellKind::Cmd - } else if program == "nu" { - ShellKind::Nushell - } else if program == "fish" { - ShellKind::Fish - } else if program == "csh" { - ShellKind::Csh - } else if program == "tcsh" { - ShellKind::Tcsh - } else if program == "rc" { - ShellKind::Rc - } else if program == "xonsh" { - ShellKind::Xonsh - } else if program == "sh" || program == "bash" { - ShellKind::Posix - } else { - if cfg!(windows) { - ShellKind::PowerShell - } else { - // Some other shell detected, the user might install and use a - // unix-like shell. - ShellKind::Posix - } + let program = program + .file_stem() + .unwrap_or_else(|| program.as_os_str()) + .to_string_lossy(); + + match &*program { + "powershell" | "pwsh" => ShellKind::PowerShell, + "cmd" => ShellKind::Cmd, + "nu" => ShellKind::Nushell, + "fish" => ShellKind::Fish, + "csh" => ShellKind::Csh, + "tcsh" => ShellKind::Tcsh, + "rc" => ShellKind::Rc, + "xonsh" => ShellKind::Xonsh, + "sh" | "bash" | "zsh" => ShellKind::Posix, + _ if is_windows => ShellKind::PowerShell, + // Some other shell detected, the user might install and use a + // unix-like shell. + _ => ShellKind::Posix, } } @@ -363,14 +398,27 @@ impl ShellKind { match self { ShellKind::PowerShell => Some('&'), ShellKind::Nushell => Some('^'), - _ => None, + ShellKind::Posix + | ShellKind::Csh + | ShellKind::Tcsh + | ShellKind::Rc + | ShellKind::Fish + | ShellKind::Cmd + | ShellKind::Xonsh => None, } } pub const fn sequential_commands_separator(&self) -> char { match self { ShellKind::Cmd => '&', - _ => ';', + ShellKind::Posix + | ShellKind::Csh + | ShellKind::Tcsh + | ShellKind::Rc + | ShellKind::Fish + | ShellKind::PowerShell + | ShellKind::Nushell + | ShellKind::Xonsh => ';', } } @@ -378,29 +426,103 @@ impl ShellKind { shlex::try_quote(arg).ok().map(|arg| match self { // If we are running in PowerShell, we want to take extra care when escaping strings. // In particular, we want to escape strings with a backtick (`) rather than a backslash (\). - // TODO double escaping backslashes is not necessary in PowerShell and probably CMD - ShellKind::PowerShell => Cow::Owned(arg.replace("\\\"", "`\"")), - _ => arg, + ShellKind::PowerShell => Cow::Owned(arg.replace("\\\"", "`\"").replace("\\\\", "\\")), + ShellKind::Cmd => Cow::Owned(arg.replace("\\\\", "\\")), + ShellKind::Posix + | ShellKind::Csh + | ShellKind::Tcsh + | ShellKind::Rc + | ShellKind::Fish + | ShellKind::Nushell + | ShellKind::Xonsh => arg, }) } + pub fn split(&self, input: &str) -> Option> { + shlex::split(input) + } + pub const fn activate_keyword(&self) -> &'static str { match self { ShellKind::Cmd => "", ShellKind::Nushell => "overlay use", ShellKind::PowerShell => ".", - ShellKind::Fish => "source", - ShellKind::Csh => "source", - ShellKind::Tcsh => "source", - ShellKind::Posix | ShellKind::Rc => "source", - ShellKind::Xonsh => "source", + ShellKind::Fish + | ShellKind::Csh + | ShellKind::Tcsh + | ShellKind::Posix + | ShellKind::Rc + | ShellKind::Xonsh => "source", } } pub const fn clear_screen_command(&self) -> &'static str { match self { ShellKind::Cmd => "cls", - _ => "clear", + ShellKind::Posix + | ShellKind::Csh + | ShellKind::Tcsh + | ShellKind::Rc + | ShellKind::Fish + | ShellKind::PowerShell + | ShellKind::Nushell + | ShellKind::Xonsh => "clear", + } + } + + #[cfg(windows)] + /// We do not want to escape arguments if we are using CMD as our shell. + /// If we do we end up with too many quotes/escaped quotes for CMD to handle. + pub const fn tty_escape_args(&self) -> bool { + match self { + ShellKind::Cmd => false, + ShellKind::Posix + | ShellKind::Csh + | ShellKind::Tcsh + | ShellKind::Rc + | ShellKind::Fish + | ShellKind::PowerShell + | ShellKind::Nushell + | ShellKind::Xonsh => true, } } } + +#[cfg(test)] +mod tests { + use super::*; + + // Examples + // WSL + // wsl.exe --distribution NixOS --cd /home/user -- /usr/bin/zsh -c "echo hello" + // wsl.exe --distribution NixOS --cd /home/user -- /usr/bin/zsh -c "\"echo hello\"" | grep hello" + // wsl.exe --distribution NixOS --cd ~ env RUST_LOG=info,remote=debug .zed_wsl_server/zed-remote-server-dev-build proxy --identifier dev-workspace-53 + // PowerShell from Nushell + // nu -c overlay use "C:\Users\kubko\dev\python\39007\tests\.venv\Scripts\activate.nu"; ^"C:\Program Files\PowerShell\7\pwsh.exe" -C "C:\Users\kubko\dev\python\39007\tests\.venv\Scripts\python.exe -m pytest \"test_foo.py::test_foo\"" + // PowerShell from CMD + // cmd /C \" \"C:\\\\Users\\\\kubko\\\\dev\\\\python\\\\39007\\\\tests\\\\.venv\\\\Scripts\\\\activate.bat\"& \"C:\\\\Program Files\\\\PowerShell\\\\7\\\\pwsh.exe\" -C \"C:\\\\Users\\\\kubko\\\\dev\\\\python\\\\39007\\\\tests\\\\.venv\\\\Scripts\\\\python.exe -m pytest \\\"test_foo.py::test_foo\\\"\"\" + + #[test] + fn test_try_quote_powershell() { + let shell_kind = ShellKind::PowerShell; + assert_eq!( + shell_kind + .try_quote("C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest \"test_foo.py::test_foo\"") + .unwrap() + .into_owned(), + "\"C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest `\"test_foo.py::test_foo`\"\"".to_string() + ); + } + + #[test] + fn test_try_quote_cmd() { + let shell_kind = ShellKind::Cmd; + assert_eq!( + shell_kind + .try_quote("C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest \"test_foo.py::test_foo\"") + .unwrap() + .into_owned(), + "\"C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest \\\"test_foo.py::test_foo\\\"\"".to_string() + ); + } +} diff --git a/crates/task/src/shell_builder.rs b/crates/util/src/shell_builder.rs similarity index 93% rename from crates/task/src/shell_builder.rs rename to crates/util/src/shell_builder.rs index d6091997b27e89ff43b421e4ae5e58470510636f..7e52b67b35f6f3d21ea5e3ad5a0632cd46344125 100644 --- a/crates/task/src/shell_builder.rs +++ b/crates/util/src/shell_builder.rs @@ -1,8 +1,5 @@ -use util::shell::get_system_shell; - -use crate::Shell; - -pub use util::shell::ShellKind; +use crate::shell::get_system_shell; +use crate::shell::{Shell, ShellKind}; /// ShellBuilder is used to turn a user-requested task into a /// program that can be executed by the shell. @@ -18,14 +15,14 @@ pub struct ShellBuilder { impl ShellBuilder { /// Create a new ShellBuilder as configured. - pub fn new(shell: &Shell) -> Self { + pub fn new(shell: &Shell, is_windows: bool) -> Self { let (program, args) = match shell { Shell::System => (get_system_shell(), Vec::new()), Shell::Program(shell) => (shell.clone(), Vec::new()), Shell::WithArguments { program, args, .. } => (program.clone(), args.clone()), }; - let kind = ShellKind::new(&program); + let kind = ShellKind::new(&program, is_windows); Self { program, args, @@ -125,7 +122,7 @@ mod test { #[test] fn test_nu_shell_variable_substitution() { let shell = Shell::Program("nu".to_owned()); - let shell_builder = ShellBuilder::new(&shell); + let shell_builder = ShellBuilder::new(&shell, false); let (program, args) = shell_builder.build( Some("echo".into()), @@ -153,7 +150,7 @@ mod test { #[test] fn redirect_stdin_to_dev_null_precedence() { let shell = Shell::Program("nu".to_owned()); - let shell_builder = ShellBuilder::new(&shell); + let shell_builder = ShellBuilder::new(&shell, false); let (program, args) = shell_builder .redirect_stdin_to_dev_null() @@ -166,7 +163,7 @@ mod test { #[test] fn redirect_stdin_to_dev_null_fish() { let shell = Shell::Program("fish".to_owned()); - let shell_builder = ShellBuilder::new(&shell); + let shell_builder = ShellBuilder::new(&shell, false); let (program, args) = shell_builder .redirect_stdin_to_dev_null() diff --git a/crates/util/src/shell_env.rs b/crates/util/src/shell_env.rs index 9d097d91658d69e420599856ef67efe22f685d4d..b3c9e3bef390b945314ba79fcc34ff2669a349a6 100644 --- a/crates/util/src/shell_env.rs +++ b/crates/util/src/shell_env.rs @@ -35,8 +35,8 @@ async fn capture_unix( use std::os::unix::process::CommandExt; use std::process::Stdio; - let zed_path = super::get_shell_safe_zed_path()?; - let shell_kind = ShellKind::new(shell_path); + let shell_kind = ShellKind::new(shell_path, false); + let zed_path = super::get_shell_safe_zed_path(shell_kind)?; let mut command_string = String::new(); let mut command = std::process::Command::new(shell_path); @@ -135,7 +135,7 @@ async fn capture_windows( let zed_path = std::env::current_exe().context("Failed to determine current zed executable path.")?; - let shell_kind = ShellKind::new(shell_path); + let shell_kind = ShellKind::new(shell_path, true); let env_output = match shell_kind { ShellKind::Posix | ShellKind::Csh @@ -177,8 +177,12 @@ async fn capture_windows( .args([ "-c", &format!( - "cd '{}'; {} --printenv", + "cd '{}'; {}{} --printenv", directory.display(), + shell_kind + .command_prefix() + .map(|prefix| prefix.to_string()) + .unwrap_or_default(), zed_path.display() ), ]) diff --git a/crates/util/src/util.rs b/crates/util/src/util.rs index f2efc4532a594eb156f742483f804906314a7d73..3a78ef3d41e557d33d5af77021464ee1dcadf5e4 100644 --- a/crates/util/src/util.rs +++ b/crates/util/src/util.rs @@ -9,6 +9,7 @@ pub mod rel_path; pub mod schemars; pub mod serde; pub mod shell; +pub mod shell_builder; pub mod shell_env; pub mod size; #[cfg(any(test, feature = "test-support"))] @@ -279,7 +280,11 @@ fn load_shell_from_passwd() -> Result<()> { ); let shell = unsafe { std::ffi::CStr::from_ptr(entry.pw_shell).to_str().unwrap() }; - if env::var("SHELL").map_or(true, |shell_env| shell_env != shell) { + let should_set_shell = env::var("SHELL").map_or(true, |shell_env| { + shell_env != shell && !std::path::Path::new(&shell_env).exists() + }); + + if should_set_shell { log::info!( "updating SHELL environment variable to value from passwd entry: {:?}", shell, @@ -291,12 +296,12 @@ fn load_shell_from_passwd() -> Result<()> { } /// Returns a shell escaped path for the current zed executable -pub fn get_shell_safe_zed_path() -> anyhow::Result { +pub fn get_shell_safe_zed_path(shell_kind: shell::ShellKind) -> anyhow::Result { let zed_path = std::env::current_exe().context("Failed to determine current zed executable path.")?; zed_path - .try_shell_safe() + .try_shell_safe(shell_kind) .context("Failed to shell-escape Zed executable path.") } @@ -349,7 +354,10 @@ pub async fn load_login_shell_environment() -> Result<()> { // into shell's `cd` command (and hooks) to manipulate env. // We do this so that we get the env a user would have when spawning a shell // in home directory. - for (name, value) in shell_env::capture(get_system_shell(), &[], paths::home_dir()).await? { + for (name, value) in shell_env::capture(get_system_shell(), &[], paths::home_dir()) + .await + .with_context(|| format!("capturing environment with {:?}", get_system_shell()))? + { unsafe { env::set_var(&name, &value) }; } @@ -623,7 +631,7 @@ where } pub fn log_err(error: &E) { - log_error_with_caller(*Location::caller(), error, log::Level::Warn); + log_error_with_caller(*Location::caller(), error, log::Level::Error); } pub trait TryFutureExt { @@ -923,7 +931,7 @@ impl PartialOrd for NumericPrefixWithSuffix<'_> { /// # Examples /// /// ``` -/// use zed_util::capitalize; +/// use util::capitalize; /// /// assert_eq!(capitalize("hello"), "Hello"); /// assert_eq!(capitalize("WORLD"), "WORLD"); diff --git a/crates/util_macros/Cargo.toml b/crates/util_macros/Cargo.toml index b1c0334c870f7fee799ddc8b208ba9ad8d48fc53..f72955b3aeec58369fd8a24962524c144fdf3bc5 100644 --- a/crates/util_macros/Cargo.toml +++ b/crates/util_macros/Cargo.toml @@ -1,8 +1,8 @@ [package] -name = "zed-util-macros" +name = "util_macros" version = "0.1.0" edition.workspace = true -publish = true +publish = false license = "Apache-2.0" description = "Utility macros for Zed" @@ -18,7 +18,6 @@ doctest = false quote.workspace = true syn.workspace = true perf.workspace = true -workspace-hack.workspace = true [features] perf-enabled = [] diff --git a/crates/util_macros/src/util_macros.rs b/crates/util_macros/src/util_macros.rs index 2cdc7f46f5f0f015498b90e736a7cbe7863f2784..4973e41de2837645d29378ba2572430289c8ac86 100644 --- a/crates/util_macros/src/util_macros.rs +++ b/crates/util_macros/src/util_macros.rs @@ -12,7 +12,7 @@ use syn::{ItemFn, LitStr, parse_macro_input, parse_quote}; /// /// # Example /// ```rust -/// use zed_util_macros::path; +/// use util_macros::path; /// /// let path = path!("/Users/user/file.txt"); /// #[cfg(target_os = "windows")] @@ -43,7 +43,7 @@ pub fn path(input: TokenStream) -> TokenStream { /// /// # Example /// ```rust -/// use zed_util_macros::uri; +/// use util_macros::uri; /// /// let uri = uri!("file:///path/to/file"); /// #[cfg(target_os = "windows")] @@ -69,7 +69,7 @@ pub fn uri(input: TokenStream) -> TokenStream { /// /// # Example /// ```rust -/// use zed_util_macros::line_endings; +/// use util_macros::line_endings; /// /// let text = line_endings!("Hello\nWorld"); /// #[cfg(target_os = "windows")] @@ -156,7 +156,7 @@ impl PerfArgs { /// /// # Examples /// ```rust -/// use zed_util_macros::perf; +/// use util_macros::perf; /// /// #[perf] /// fn generic_test() { @@ -172,7 +172,7 @@ impl PerfArgs { /// This also works with `#[gpui::test]`s, though in most cases it shouldn't /// be used with automatic iterations. /// ```rust,ignore -/// use zed_util_macros::perf; +/// use util_macros::perf; /// /// #[perf(iterations = 1, critical)] /// #[gpui::test] diff --git a/crates/vercel/Cargo.toml b/crates/vercel/Cargo.toml index 60fa1a2390b2ea4e1169765e55f62a36d3d281bf..98b26c91041ab59dfa479d6b619b1891b8d1397d 100644 --- a/crates/vercel/Cargo.toml +++ b/crates/vercel/Cargo.toml @@ -20,4 +20,3 @@ anyhow.workspace = true schemars = { workspace = true, optional = true } serde.workspace = true strum.workspace = true -workspace-hack.workspace = true diff --git a/crates/vim/Cargo.toml b/crates/vim/Cargo.toml index ad84eecd91ddfc4b300b437936aba0ac21b4e41c..9d6381f8e6aa9afdc8b6ce5fa81bbcf47cca21f5 100644 --- a/crates/vim/Cargo.toml +++ b/crates/vim/Cargo.toml @@ -26,6 +26,7 @@ db.workspace = true editor.workspace = true env_logger.workspace = true futures.workspace = true +fuzzy.workspace = true gpui.workspace = true itertools.workspace = true language.workspace = true @@ -52,7 +53,6 @@ util_macros.workspace = true vim_mode_setting.workspace = true workspace.workspace = true zed_actions.workspace = true -workspace-hack.workspace = true [dev-dependencies] assets.workspace = true diff --git a/crates/vim/src/change_list.rs b/crates/vim/src/change_list.rs index c92ce4720e8ccd0454a83409d76789334192745f..a921d182e6ebd0ef96ef0b8d1cce75ed6d532d96 100644 --- a/crates/vim/src/change_list.rs +++ b/crates/vim/src/change_list.rs @@ -50,7 +50,8 @@ impl Vim { pub(crate) fn push_to_change_list(&mut self, window: &mut Window, cx: &mut Context) { let Some((new_positions, buffer)) = self.update_editor(cx, |vim, editor, cx| { - let (map, selections) = editor.selections.all_adjusted_display(cx); + let display_map = editor.display_snapshot(cx); + let selections = editor.selections.all_adjusted_display(&display_map); let buffer = editor.buffer().clone(); let pop_state = editor @@ -59,7 +60,7 @@ impl Vim { .map(|previous| { previous.len() == selections.len() && previous.iter().enumerate().all(|(ix, p)| { - p.to_display_point(&map).row() == selections[ix].head().row() + p.to_display_point(&display_map).row() == selections[ix].head().row() }) }) .unwrap_or(false); @@ -68,11 +69,11 @@ impl Vim { .into_iter() .map(|s| { let point = if vim.mode == Mode::Insert { - movement::saturating_left(&map, s.head()) + movement::saturating_left(&display_map, s.head()) } else { s.head() }; - map.display_point_to_anchor(point, Bias::Left) + display_map.display_point_to_anchor(point, Bias::Left) }) .collect::>(); diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index ef19d41ed88f7f6a9dfc64521041a41d2238da31..9dc4ec999a47e6a0e8ab802761cab474ef81499b 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -1,13 +1,15 @@ use anyhow::{Result, anyhow}; use collections::{HashMap, HashSet}; -use command_palette_hooks::CommandInterceptResult; +use command_palette_hooks::{CommandInterceptItem, CommandInterceptResult}; use editor::{ Bias, Editor, EditorSettings, SelectionEffects, ToPoint, actions::{SortLinesCaseInsensitive, SortLinesCaseSensitive}, display_map::ToDisplayPoint, }; use futures::AsyncWriteExt as _; -use gpui::{Action, App, AppContext as _, Context, Global, Keystroke, Task, Window, actions}; +use gpui::{ + Action, App, AppContext as _, Context, Global, Keystroke, Task, WeakEntity, Window, actions, +}; use itertools::Itertools; use language::Point; use multi_buffer::MultiBufferRow; @@ -20,7 +22,7 @@ use settings::{Settings, SettingsStore}; use std::{ iter::Peekable, ops::{Deref, Range}, - path::Path, + path::{Path, PathBuf}, process::Stdio, str::Chars, sync::OnceLock, @@ -28,8 +30,12 @@ use std::{ }; use task::{HideStrategy, RevealStrategy, SpawnInTerminal, TaskId}; use ui::ActiveTheme; -use util::{ResultExt, rel_path::RelPath}; -use workspace::{Item, SaveIntent, notifications::NotifyResultExt}; +use util::{ + ResultExt, + paths::PathStyle, + rel_path::{RelPath, RelPathBuf}, +}; +use workspace::{Item, SaveIntent, Workspace, notifications::NotifyResultExt}; use workspace::{SplitDirection, notifications::DetachAndPromptErr}; use zed_actions::{OpenDocs, RevealTarget}; @@ -85,7 +91,7 @@ pub enum VimOption { } impl VimOption { - fn possible_commands(query: &str) -> Vec { + fn possible_commands(query: &str) -> Vec { let mut prefix_of_options = Vec::new(); let mut options = query.split(" ").collect::>(); let prefix = options.pop().unwrap_or_default(); @@ -102,7 +108,7 @@ impl VimOption { let mut options = prefix_of_options.clone(); options.push(possible); - CommandInterceptResult { + CommandInterceptItem { string: format!( ":set {}", options.iter().map(|opt| opt.to_string()).join(" ") @@ -600,7 +606,9 @@ pub fn register(editor: &mut Editor, cx: &mut Context) { let result = vim.update_editor(cx, |vim, editor, cx| { let snapshot = editor.snapshot(window, cx); let buffer_row = action.range.head().buffer_row(vim, editor, window, cx)?; - let current = editor.selections.newest::(cx); + let current = editor + .selections + .newest::(&editor.display_snapshot(cx)); let target = snapshot .buffer_snapshot() .clip_point(Point::new(buffer_row.0, current.head().column), Bias::Left); @@ -725,6 +733,13 @@ struct VimCommand { >, >, has_count: bool, + has_filename: bool, +} + +struct ParsedQuery { + args: String, + has_bang: bool, + has_space: bool, } impl VimCommand { @@ -760,6 +775,15 @@ impl VimCommand { self } + fn filename( + mut self, + f: impl Fn(Box, String) -> Option> + Send + Sync + 'static, + ) -> Self { + self.args = Some(Box::new(f)); + self.has_filename = true; + self + } + fn range( mut self, f: impl Fn(Box, &CommandRange) -> Option> + Send + Sync + 'static, @@ -773,14 +797,80 @@ impl VimCommand { self } - fn parse( - &self, - query: &str, - range: &Option, - cx: &App, - ) -> Option> { + fn generate_filename_completions( + parsed_query: &ParsedQuery, + workspace: WeakEntity, + cx: &mut App, + ) -> Task> { + let ParsedQuery { + args, + has_bang: _, + has_space: _, + } = parsed_query; + let Some(workspace) = workspace.upgrade() else { + return Task::ready(Vec::new()); + }; + + let (task, args_path) = workspace.update(cx, |workspace, cx| { + let prefix = workspace + .project() + .read(cx) + .visible_worktrees(cx) + .map(|worktree| worktree.read(cx).abs_path().to_path_buf()) + .next() + .or_else(std::env::home_dir) + .unwrap_or_else(|| PathBuf::from("")); + + let rel_path = match RelPath::new(Path::new(&args), PathStyle::local()) { + Ok(path) => path.to_rel_path_buf(), + Err(_) => { + return (Task::ready(Ok(Vec::new())), RelPathBuf::new()); + } + }; + + let rel_path = if args.ends_with(PathStyle::local().separator()) { + rel_path + } else { + rel_path + .parent() + .map(|rel_path| rel_path.to_rel_path_buf()) + .unwrap_or(RelPathBuf::new()) + }; + + let task = workspace.project().update(cx, |project, cx| { + let path = prefix + .join(rel_path.as_std_path()) + .to_string_lossy() + .to_string(); + project.list_directory(path, cx) + }); + + (task, rel_path) + }); + + cx.background_spawn(async move { + let directories = task.await.unwrap_or_default(); + directories + .iter() + .map(|dir| { + let path = RelPath::new(dir.path.as_path(), PathStyle::local()) + .map(|cow| cow.into_owned()) + .unwrap_or(RelPathBuf::new()); + let mut path_string = args_path + .join(&path) + .display(PathStyle::local()) + .to_string(); + if dir.is_dir { + path_string.push_str(PathStyle::local().separator()); + } + path_string + }) + .collect() + }) + } + + fn get_parsed_query(&self, query: String) -> Option { let rest = query - .to_string() .strip_prefix(self.prefix)? .to_string() .chars() @@ -789,6 +879,7 @@ impl VimCommand { .filter_map(|e| e.left()) .collect::(); let has_bang = rest.starts_with('!'); + let has_space = rest.starts_with("! ") || rest.starts_with(' '); let args = if has_bang { rest.strip_prefix('!')?.trim().to_string() } else if rest.is_empty() { @@ -796,7 +887,24 @@ impl VimCommand { } else { rest.strip_prefix(' ')?.trim().to_string() }; + Some(ParsedQuery { + args, + has_bang, + has_space, + }) + } + fn parse( + &self, + query: &str, + range: &Option, + cx: &App, + ) -> Option> { + let ParsedQuery { + args, + has_bang, + has_space: _, + } = self.get_parsed_query(query.to_string())?; let action = if has_bang && self.bang_action.is_some() { self.bang_action.as_ref().unwrap().boxed_clone() } else if let Some(action) = self.action.as_ref() { @@ -1056,18 +1164,43 @@ fn generate_commands(_: &App) -> Vec { .bang(workspace::Save { save_intent: Some(SaveIntent::Overwrite), }) - .args(|action, args| { + .filename(|action, filename| { Some( VimSave { save_intent: action .as_any() .downcast_ref::() .and_then(|action| action.save_intent), - filename: args, + filename, } .boxed_clone(), ) }), + VimCommand::new(("e", "dit"), editor::actions::ReloadFile) + .bang(editor::actions::ReloadFile) + .filename(|_, filename| Some(VimEdit { filename }.boxed_clone())), + VimCommand::new(("sp", "lit"), workspace::SplitHorizontal).filename(|_, filename| { + Some( + VimSplit { + vertical: false, + filename, + } + .boxed_clone(), + ) + }), + VimCommand::new(("vs", "plit"), workspace::SplitVertical).filename(|_, filename| { + Some( + VimSplit { + vertical: true, + filename, + } + .boxed_clone(), + ) + }), + VimCommand::new(("tabe", "dit"), workspace::NewFile) + .filename(|_action, filename| Some(VimEdit { filename }.boxed_clone())), + VimCommand::new(("tabnew", ""), workspace::NewFile) + .filename(|_action, filename| Some(VimEdit { filename }.boxed_clone())), VimCommand::new( ("q", "uit"), workspace::CloseActiveItem { @@ -1164,24 +1297,6 @@ fn generate_commands(_: &App) -> Vec { save_intent: Some(SaveIntent::Overwrite), }), VimCommand::new(("cq", "uit"), zed_actions::Quit), - VimCommand::new(("sp", "lit"), workspace::SplitHorizontal).args(|_, args| { - Some( - VimSplit { - vertical: false, - filename: args, - } - .boxed_clone(), - ) - }), - VimCommand::new(("vs", "plit"), workspace::SplitVertical).args(|_, args| { - Some( - VimSplit { - vertical: true, - filename: args, - } - .boxed_clone(), - ) - }), VimCommand::new( ("bd", "elete"), workspace::CloseActiveItem { @@ -1224,10 +1339,6 @@ fn generate_commands(_: &App) -> Vec { VimCommand::str(("ls", ""), "tab_switcher::ToggleAll"), VimCommand::new(("new", ""), workspace::NewFileSplitHorizontal), VimCommand::new(("vne", "w"), workspace::NewFileSplitVertical), - VimCommand::new(("tabe", "dit"), workspace::NewFile) - .args(|_action, args| Some(VimEdit { filename: args }.boxed_clone())), - VimCommand::new(("tabnew", ""), workspace::NewFile) - .args(|_action, args| Some(VimEdit { filename: args }.boxed_clone())), VimCommand::new(("tabn", "ext"), workspace::ActivateNextItem).count(), VimCommand::new(("tabp", "revious"), workspace::ActivatePreviousItem).count(), VimCommand::new(("tabN", "ext"), workspace::ActivatePreviousItem).count(), @@ -1327,9 +1438,6 @@ fn generate_commands(_: &App) -> Vec { VimCommand::new(("$", ""), EndOfDocument), VimCommand::new(("%", ""), EndOfDocument), VimCommand::new(("0", ""), StartOfDocument), - VimCommand::new(("e", "dit"), editor::actions::ReloadFile) - .bang(editor::actions::ReloadFile) - .args(|_, args| Some(VimEdit { filename: args }.boxed_clone())), VimCommand::new(("ex", ""), editor::actions::ReloadFile).bang(editor::actions::ReloadFile), VimCommand::new(("cpp", "link"), editor::actions::CopyPermalinkToLine).range(act_on_range), VimCommand::str(("opt", "ions"), "zed::OpenDefaultSettings"), @@ -1383,18 +1491,30 @@ fn wrap_count(action: Box, range: &CommandRange) -> Option Vec { - // NOTE: We also need to support passing arguments to commands like :w - // (ideally with filename autocompletion). +pub fn command_interceptor( + mut input: &str, + workspace: WeakEntity, + cx: &mut App, +) -> Task { while input.starts_with(':') { input = &input[1..]; } let (range, query) = VimCommand::parse_range(input); let range_prefix = input[0..(input.len() - query.len())].to_string(); - let query = query.as_str().trim(); + let has_trailing_space = query.ends_with(" "); + let mut query = query.as_str().trim(); + + let on_matching_lines = (query.starts_with('g') || query.starts_with('v')) + .then(|| { + let (pattern, range, search, invert) = OnMatchingLines::parse(query, &range)?; + let start_idx = query.len() - pattern.len(); + query = query[start_idx..].trim(); + Some((range, search, invert)) + }) + .flatten(); - let action = if range.is_some() && query.is_empty() { + let mut action = if range.is_some() && query.is_empty() { Some( GoToLine { range: range.clone().unwrap(), @@ -1418,7 +1538,10 @@ pub fn command_interceptor(mut input: &str, cx: &App) -> Vec Vec = positions.iter().map(|&pos| pos + offset).collect(); + positions.splice(0..0, no_args_positions.clone()); + let string = format!("{display_string} {string}"); + let action = match cx + .update(|cx| commands(cx).get(cmd_idx)?.parse(&string[1..], &range, cx)) + { + Ok(Some(action)) => action, + _ => continue, + }; + results.push(CommandInterceptItem { + action, + string, + positions, + }); + } + CommandInterceptResult { + results, + exclusive: true, + } + }) + } else { + Task::ready(CommandInterceptResult { + results, + exclusive: false, + }) } - Vec::default() } fn generate_positions(string: &str, query: &str) -> Vec { @@ -1530,19 +1733,40 @@ impl OnMatchingLines { // but we do flip \( and \) to ( and ) (and vice-versa) in the pattern, // and convert \0..\9 to $0..$9 in the replacement so that common idioms work. pub(crate) fn parse( - mut chars: Peekable, - invert: bool, - range: CommandRange, - cx: &App, - ) -> Option { - let delimiter = chars.next().filter(|c| { + query: &str, + range: &Option, + ) -> Option<(String, CommandRange, String, bool)> { + let mut global = "global".chars().peekable(); + let mut query_chars = query.chars().peekable(); + let mut invert = false; + if query_chars.peek() == Some(&'v') { + invert = true; + query_chars.next(); + } + while global + .peek() + .is_some_and(|char| Some(char) == query_chars.peek()) + { + global.next(); + query_chars.next(); + } + if !invert && query_chars.peek() == Some(&'!') { + invert = true; + query_chars.next(); + } + let range = range.clone().unwrap_or(CommandRange { + start: Position::Line { row: 0, offset: 0 }, + end: Some(Position::LastLine { offset: 0 }), + }); + + let delimiter = query_chars.next().filter(|c| { !c.is_alphanumeric() && *c != '"' && *c != '|' && *c != '\'' && *c != '!' })?; let mut search = String::new(); let mut escaped = false; - for c in chars.by_ref() { + for c in query_chars.by_ref() { if escaped { escaped = false; // unescape escaped parens @@ -1563,21 +1787,7 @@ impl OnMatchingLines { } } - let command: String = chars.collect(); - - let action = WrappedAction( - command_interceptor(&command, cx) - .first()? - .action - .boxed_clone(), - ); - - Some(Self { - range, - search, - invert, - action, - }) + Some((query_chars.collect::(), range, search, invert)) } pub fn run(&self, vim: &mut Vim, window: &mut Window, cx: &mut Context) { @@ -1695,7 +1905,9 @@ impl OnMatchingLines { }); window.dispatch_action(action, cx); cx.defer_in(window, move |editor, window, cx| { - let newest = editor.selections.newest::(cx); + let newest = editor + .selections + .newest::(&editor.display_snapshot(cx)); editor.change_selections( SelectionEffects::no_scroll(), window, @@ -1792,7 +2004,9 @@ impl Vim { }; let command = self.update_editor(cx, |_, editor, cx| { let snapshot = editor.snapshot(window, cx); - let start = editor.selections.newest_display(cx); + let start = editor + .selections + .newest_display(&editor.display_snapshot(cx)); let text_layout_details = editor.text_layout_details(window); let (mut range, _) = motion .range( @@ -1839,7 +2053,9 @@ impl Vim { }; let command = self.update_editor(cx, |_, editor, cx| { let snapshot = editor.snapshot(window, cx); - let start = editor.selections.newest_display(cx); + let start = editor + .selections + .newest_display(&editor.display_snapshot(cx)); let range = object .range(&snapshot, start.clone(), around, None) .unwrap_or(start.range()); @@ -1948,7 +2164,11 @@ impl ShellExec { Point::new(range.start.0, 0) ..snapshot.clip_point(Point::new(range.end.0 + 1, 0), Bias::Right) } else { - let mut end = editor.selections.newest::(cx).range().end; + let mut end = editor + .selections + .newest::(&editor.display_snapshot(cx)) + .range() + .end; end = snapshot.clip_point(Point::new(end.row + 1, 0), Bias::Right); needs_newline_prefix = end == snapshot.max_point(); end..end @@ -2184,7 +2404,8 @@ mod test { assert_eq!(fs.load(path).await.unwrap().replace("\r\n", "\n"), "oops\n"); assert!(!cx.has_pending_prompt()); - cx.simulate_keystrokes(": w ! enter"); + cx.simulate_keystrokes(": w !"); + cx.simulate_keystrokes("enter"); assert!(!cx.has_pending_prompt()); assert_eq!(fs.load(path).await.unwrap().replace("\r\n", "\n"), "@@\n"); } @@ -2342,7 +2563,7 @@ mod test { } #[gpui::test] - async fn test_w_command(cx: &mut TestAppContext) { + async fn test_command_write_filename(cx: &mut TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; cx.workspace(|workspace, _, cx| { diff --git a/crates/vim/src/helix.rs b/crates/vim/src/helix.rs index d2c270e5239204b169c55483fdd6ac3185dd4529..6788a186fb45222f7b09fe756862e6cb337c6d90 100644 --- a/crates/vim/src/helix.rs +++ b/crates/vim/src/helix.rs @@ -1,4 +1,5 @@ mod boundary; +mod duplicate; mod object; mod paste; mod select; @@ -17,7 +18,7 @@ use text::{Bias, SelectionGoal}; use workspace::searchable; use workspace::searchable::FilteredSearchRange; -use crate::motion; +use crate::motion::{self, MotionKind}; use crate::state::SearchState; use crate::{ Vim, @@ -40,6 +41,17 @@ actions!( HelixSelectLine, /// Select all matches of a given pattern within the current selection. HelixSelectRegex, + /// Removes all but the one selection that was created last. + /// `Newest` can eventually be `Primary`. + HelixKeepNewestSelection, + /// Copies all selections below. + HelixDuplicateBelow, + /// Copies all selections above. + HelixDuplicateAbove, + /// Delete the selection and enter edit mode. + HelixSubstitute, + /// Delete the selection and enter edit mode, without yanking the selection. + HelixSubstituteNoYank, ] ); @@ -51,6 +63,17 @@ pub fn register(editor: &mut Editor, cx: &mut Context) { Vim::action(editor, cx, Vim::helix_goto_last_modification); Vim::action(editor, cx, Vim::helix_paste); Vim::action(editor, cx, Vim::helix_select_regex); + Vim::action(editor, cx, Vim::helix_keep_newest_selection); + Vim::action(editor, cx, |vim, _: &HelixDuplicateBelow, window, cx| { + let times = Vim::take_count(cx); + vim.helix_duplicate_selections_below(times, window, cx); + }); + Vim::action(editor, cx, |vim, _: &HelixDuplicateAbove, window, cx| { + let times = Vim::take_count(cx); + vim.helix_duplicate_selections_above(times, window, cx); + }); + Vim::action(editor, cx, Vim::helix_substitute); + Vim::action(editor, cx, Vim::helix_substitute_no_yank); } impl Vim { @@ -322,7 +345,7 @@ impl Vim { self.update_editor(cx, |vim, editor, cx| { let has_selection = editor .selections - .all_adjusted(cx) + .all_adjusted(&editor.display_snapshot(cx)) .iter() .any(|selection| !selection.is_empty()); @@ -455,19 +478,20 @@ impl Vim { pub fn helix_replace(&mut self, text: &str, window: &mut Window, cx: &mut Context) { self.update_editor(cx, |_, editor, cx| { editor.transact(window, cx, |editor, window, cx| { - let (map, selections) = editor.selections.all_display(cx); + let display_map = editor.display_snapshot(cx); + let selections = editor.selections.all_display(&display_map); // Store selection info for positioning after edit let selection_info: Vec<_> = selections .iter() .map(|selection| { let range = selection.range(); - let start_offset = range.start.to_offset(&map, Bias::Left); - let end_offset = range.end.to_offset(&map, Bias::Left); + let start_offset = range.start.to_offset(&display_map, Bias::Left); + let end_offset = range.end.to_offset(&display_map, Bias::Left); let was_empty = range.is_empty(); let was_reversed = selection.reversed; ( - map.buffer_snapshot().anchor_before(start_offset), + display_map.buffer_snapshot().anchor_before(start_offset), end_offset - start_offset, was_empty, was_reversed, @@ -481,11 +505,11 @@ impl Vim { // For empty selections, extend to replace one character if range.is_empty() { - range.end = movement::saturating_right(&map, range.start); + range.end = movement::saturating_right(&display_map, range.start); } - let byte_range = range.start.to_offset(&map, Bias::Left) - ..range.end.to_offset(&map, Bias::Left); + let byte_range = range.start.to_offset(&display_map, Bias::Left) + ..range.end.to_offset(&display_map, Bias::Left); if !byte_range.is_empty() { let replacement_text = text.repeat(byte_range.len()); @@ -545,7 +569,7 @@ impl Vim { self.update_editor(cx, |_, editor, cx| { editor.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); let display_map = editor.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = editor.selections.all::(cx); + let mut selections = editor.selections.all::(&display_map); let max_point = display_map.buffer_snapshot().max_point(); let buffer_snapshot = &display_map.buffer_snapshot(); @@ -575,6 +599,71 @@ impl Vim { }); }); } + + fn helix_keep_newest_selection( + &mut self, + _: &HelixKeepNewestSelection, + window: &mut Window, + cx: &mut Context, + ) { + self.update_editor(cx, |_, editor, cx| { + let newest = editor + .selections + .newest::(&editor.display_snapshot(cx)); + editor.change_selections(Default::default(), window, cx, |s| s.select(vec![newest])); + }); + } + + fn do_helix_substitute(&mut self, yank: bool, window: &mut Window, cx: &mut Context) { + self.update_editor(cx, |vim, editor, cx| { + editor.set_clip_at_line_ends(false, cx); + editor.transact(window, cx, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.move_with(|map, selection| { + if selection.start == selection.end { + selection.end = movement::right(map, selection.end); + } + + // If the selection starts and ends on a newline, we exclude the last one. + if !selection.is_empty() + && selection.start.column() == 0 + && selection.end.column() == 0 + { + selection.end = movement::left(map, selection.end); + } + }) + }); + if yank { + vim.copy_selections_content(editor, MotionKind::Exclusive, window, cx); + } + let selections = editor + .selections + .all::(&editor.display_snapshot(cx)) + .into_iter(); + let edits = selections.map(|selection| (selection.start..selection.end, "")); + editor.edit(edits, cx); + }); + }); + self.switch_mode(Mode::Insert, true, window, cx); + } + + fn helix_substitute( + &mut self, + _: &HelixSubstitute, + window: &mut Window, + cx: &mut Context, + ) { + self.do_helix_substitute(true, window, cx); + } + + fn helix_substitute_no_yank( + &mut self, + _: &HelixSubstituteNoYank, + window: &mut Window, + cx: &mut Context, + ) { + self.do_helix_substitute(false, window, cx); + } } #[cfg(test)] @@ -1212,4 +1301,67 @@ mod test { cx.simulate_keystrokes("s o n e enter"); cx.assert_state("ˇone two one", Mode::HelixNormal); } + + #[gpui::test] + async fn test_helix_substitute(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.set_state("ˇone two", Mode::HelixNormal); + cx.simulate_keystrokes("c"); + cx.assert_state("ˇne two", Mode::Insert); + + cx.set_state("«oneˇ» two", Mode::HelixNormal); + cx.simulate_keystrokes("c"); + cx.assert_state("ˇ two", Mode::Insert); + + cx.set_state( + indoc! {" + oneˇ two + three + "}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("x c"); + cx.assert_state( + indoc! {" + ˇ + three + "}, + Mode::Insert, + ); + + cx.set_state( + indoc! {" + one twoˇ + three + "}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("c"); + cx.assert_state( + indoc! {" + one twoˇthree + "}, + Mode::Insert, + ); + + // Helix doesn't set the cursor to the first non-blank one when + // replacing lines: it uses language-dependent indent queries instead. + cx.set_state( + indoc! {" + one two + « indented + three not indentedˇ» + "}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("c"); + cx.set_state( + indoc! {" + one two + ˇ + "}, + Mode::Insert, + ); + } } diff --git a/crates/vim/src/helix/duplicate.rs b/crates/vim/src/helix/duplicate.rs new file mode 100644 index 0000000000000000000000000000000000000000..1b1f10b00b6a7381f22c6ec3be674dc2c085eff6 --- /dev/null +++ b/crates/vim/src/helix/duplicate.rs @@ -0,0 +1,234 @@ +use std::ops::Range; + +use editor::{DisplayPoint, display_map::DisplaySnapshot}; +use gpui::Context; +use text::Bias; +use ui::Window; + +use crate::Vim; + +impl Vim { + /// Creates a duplicate of every selection below it in the first place that has both its start + /// and end + pub(super) fn helix_duplicate_selections_below( + &mut self, + times: Option, + window: &mut Window, + cx: &mut Context, + ) { + self.duplicate_selections( + times, + window, + cx, + |prev_point| *prev_point.row_mut() += 1, + |prev_range, map| prev_range.end.row() >= map.max_point().row(), + false, + ); + } + + /// Creates a duplicate of every selection above it in the first place that has both its start + /// and end + pub(super) fn helix_duplicate_selections_above( + &mut self, + times: Option, + window: &mut Window, + cx: &mut Context, + ) { + self.duplicate_selections( + times, + window, + cx, + |prev_point| *prev_point.row_mut() = prev_point.row().0.saturating_sub(1), + |prev_range, _| prev_range.start.row() == DisplayPoint::zero().row(), + true, + ); + } + + fn duplicate_selections( + &mut self, + times: Option, + window: &mut Window, + cx: &mut Context, + advance_search: impl Fn(&mut DisplayPoint), + end_search: impl Fn(&Range, &DisplaySnapshot) -> bool, + above: bool, + ) { + let times = times.unwrap_or(1); + self.update_editor(cx, |_, editor, cx| { + let mut selections = Vec::new(); + let map = editor.display_snapshot(cx); + let mut original_selections = editor.selections.all_display(&map); + // The order matters, because it is recorded when the selections are added. + if above { + original_selections.reverse(); + } + + for origin in original_selections { + let origin = origin.tail()..origin.head(); + selections.push(display_point_range_to_offset_range(&origin, &map)); + let mut last_origin = origin; + for _ in 1..=times { + if let Some(duplicate) = find_next_valid_duplicate_space( + last_origin.clone(), + &map, + &advance_search, + &end_search, + ) { + selections.push(display_point_range_to_offset_range(&duplicate, &map)); + last_origin = duplicate; + } else { + break; + } + } + } + + editor.change_selections(Default::default(), window, cx, |s| { + s.select_ranges(selections); + }); + }); + } +} + +fn find_next_valid_duplicate_space( + mut origin: Range, + map: &DisplaySnapshot, + advance_search: &impl Fn(&mut DisplayPoint), + end_search: &impl Fn(&Range, &DisplaySnapshot) -> bool, +) -> Option> { + while !end_search(&origin, map) { + advance_search(&mut origin.start); + advance_search(&mut origin.end); + + if map.clip_point(origin.start, Bias::Left) == origin.start + && map.clip_point(origin.end, Bias::Right) == origin.end + { + return Some(origin); + } + } + None +} + +fn display_point_range_to_offset_range( + range: &Range, + map: &DisplaySnapshot, +) -> Range { + range.start.to_offset(map, Bias::Left)..range.end.to_offset(map, Bias::Right) +} + +#[cfg(test)] +mod tests { + use db::indoc; + + use crate::{state::Mode, test::VimTestContext}; + + #[gpui::test] + async fn test_selection_duplication(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + + cx.set_state( + indoc! {" + The quick brown + fox «jumpsˇ» + over the + lazy dog."}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes("C"); + + cx.assert_state( + indoc! {" + The quick brown + fox «jumpsˇ» + over the + lazy« dog.ˇ»"}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes("C"); + + cx.assert_state( + indoc! {" + The quick brown + fox «jumpsˇ» + over the + lazy« dog.ˇ»"}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes("alt-C"); + + cx.assert_state( + indoc! {" + The «quickˇ» brown + fox «jumpsˇ» + over the + lazy« dog.ˇ»"}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes(","); + + cx.assert_state( + indoc! {" + The «quickˇ» brown + fox jumps + over the + lazy dog."}, + Mode::HelixNormal, + ); + } + + #[gpui::test] + async fn test_selection_duplication_backwards(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + + cx.set_state( + indoc! {" + The quick brown + «ˇfox» jumps + over the + lazy dog."}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes("C C alt-C"); + + cx.assert_state( + indoc! {" + «ˇThe» quick brown + «ˇfox» jumps + «ˇove»r the + «ˇlaz»y dog."}, + Mode::HelixNormal, + ); + } + + #[gpui::test] + async fn test_selection_duplication_count(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + + cx.set_state( + indoc! {" + The «qˇ»uick brown + fox jumps + over the + lazy dog."}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes("9 C"); + + cx.assert_state( + indoc! {" + The «qˇ»uick brown + fox «jˇ»umps + over« ˇ»the + lazy« ˇ»dog."}, + Mode::HelixNormal, + ); + } +} diff --git a/crates/vim/src/helix/paste.rs b/crates/vim/src/helix/paste.rs index 9b6b6e454ac1e8d3a47009fcd85db0d2da00261e..62d8c6caef99050cffa17a2e608a924aa97c3e99 100644 --- a/crates/vim/src/helix/paste.rs +++ b/crates/vim/src/helix/paste.rs @@ -44,7 +44,8 @@ impl Vim { return; }; - let (display_map, current_selections) = editor.selections.all_adjusted_display(cx); + let display_map = editor.display_snapshot(cx); + let current_selections = editor.selections.all_adjusted_display(&display_map); // The clipboard can have multiple selections, and there can // be multiple selections. Helix zips them together, so the first diff --git a/crates/vim/src/insert.rs b/crates/vim/src/insert.rs index 5b9fef402a7b4fee9ae1d8722cb2cf22f3c2fdb9..d5323f31dce38dad29831cbbfe551b6f30760ed2 100644 --- a/crates/vim/src/insert.rs +++ b/crates/vim/src/insert.rs @@ -50,17 +50,23 @@ impl Vim { if count <= 1 || Vim::globals(cx).dot_replaying { self.create_mark("^".into(), window, cx); + if HelixModeSetting::get_global(cx).0 { + self.update_editor(cx, |_, editor, cx| { + editor.dismiss_menus_and_popups(false, window, cx); + }); + self.switch_mode(Mode::HelixNormal, false, window, cx); + return; + } + self.update_editor(cx, |_, editor, cx| { editor.dismiss_menus_and_popups(false, window, cx); - if !HelixModeSetting::get_global(cx).0 { - editor.change_selections(Default::default(), window, cx, |s| { - s.move_cursors_with(|map, mut cursor, _| { - *cursor.column_mut() = cursor.column().saturating_sub(1); - (map.clip_point(cursor, Bias::Left), SelectionGoal::None) - }); + editor.change_selections(Default::default(), window, cx, |s| { + s.move_cursors_with(|map, mut cursor, _| { + *cursor.column_mut() = cursor.column().saturating_sub(1); + (map.clip_point(cursor, Bias::Left), SelectionGoal::None) }); - } + }); }); self.switch_mode(Mode::Normal, false, window, cx); @@ -84,7 +90,7 @@ impl Vim { self.update_editor(cx, |_, editor, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); let mut edits = Vec::new(); - for selection in editor.selections.all::(cx) { + for selection in editor.selections.all::(&editor.display_snapshot(cx)) { let point = selection.head(); let new_row = match direction { Direction::Next => point.row + 1, diff --git a/crates/vim/src/mode_indicator.rs b/crates/vim/src/mode_indicator.rs index da2591934284cb29628d8e0c9d225fa1ff473c7d..42d4915fc509e0f373c8d2c5a2a422b74cc84a8f 100644 --- a/crates/vim/src/mode_indicator.rs +++ b/crates/vim/src/mode_indicator.rs @@ -1,4 +1,4 @@ -use gpui::{Context, Element, Entity, Render, Subscription, WeakEntity, Window, div}; +use gpui::{Context, Element, Entity, FontWeight, Render, Subscription, WeakEntity, Window, div}; use ui::text_for_keystrokes; use workspace::{StatusItemView, item::ItemHandle, ui::prelude::*}; @@ -89,17 +89,37 @@ impl Render for ModeIndicator { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { let vim = self.vim(); let Some(vim) = vim else { - return div().into_any(); + return div().hidden().into_any_element(); }; let vim_readable = vim.read(cx); - let label = if let Some(label) = vim_readable.status_label.clone() { - label + let status_label = vim_readable.status_label.clone(); + let temp_mode = vim_readable.temp_mode; + let mode = vim_readable.mode; + + let theme = cx.theme(); + let colors = theme.colors(); + let system_transparent = gpui::hsla(0.0, 0.0, 0.0, 0.0); + let vim_mode_text = colors.vim_mode_text; + let bg_color = match mode { + crate::state::Mode::Normal => colors.vim_normal_background, + crate::state::Mode::Insert => colors.vim_insert_background, + crate::state::Mode::Replace => colors.vim_replace_background, + crate::state::Mode::Visual => colors.vim_visual_background, + crate::state::Mode::VisualLine => colors.vim_visual_line_background, + crate::state::Mode::VisualBlock => colors.vim_visual_block_background, + crate::state::Mode::HelixNormal => colors.vim_helix_normal_background, + crate::state::Mode::HelixSelect => colors.vim_helix_select_background, + }; + + let (label, mode): (SharedString, Option) = if let Some(label) = status_label + { + (label, None) } else { - let mode = if vim_readable.temp_mode { - format!("(insert) {}", vim_readable.mode) + let mode_str = if temp_mode { + format!("(insert) {}", mode) } else { - vim_readable.mode.to_string() + mode.to_string() }; let current_operators_description = self.current_operators_description(vim.clone(), cx); @@ -107,13 +127,45 @@ impl Render for ModeIndicator { .pending_keys .as_ref() .unwrap_or(¤t_operators_description); - format!("{} -- {} --", pending, mode).into() + let mode = if bg_color != system_transparent { + mode_str.into() + } else { + format!("-- {} --", mode_str).into() + }; + (pending.into(), Some(mode)) }; - - Label::new(label) - .size(LabelSize::Small) - .line_height_style(LineHeightStyle::UiLabel) - .into_any_element() + h_flex() + .gap_1() + .when(!label.is_empty(), |el| { + el.child( + Label::new(label) + .line_height_style(LineHeightStyle::UiLabel) + .weight(FontWeight::MEDIUM), + ) + }) + .when_some(mode, |el, mode| { + el.child( + v_flex() + .when(bg_color != system_transparent, |el| el.px_2()) + // match with other icons at the bottom that use default buttons + .h(ButtonSize::Default.rems()) + .justify_center() + .rounded_sm() + .bg(bg_color) + .child( + Label::new(mode) + .size(LabelSize::Small) + .line_height_style(LineHeightStyle::UiLabel) + .weight(FontWeight::MEDIUM) + .when( + bg_color != system_transparent + && vim_mode_text != system_transparent, + |el| el.color(Color::Custom(vim_mode_text)), + ), + ), + ) + }) + .into_any() } } diff --git a/crates/vim/src/motion.rs b/crates/vim/src/motion.rs index 666d2573a53cbf74ed1c2edee02c8561167038c3..1a617e36c18ffa52906cac06d4b9eddb11a91f8e 100644 --- a/crates/vim/src/motion.rs +++ b/crates/vim/src/motion.rs @@ -1525,29 +1525,6 @@ fn wrapping_right_single(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayP } } -/// Given a point, returns the start of the buffer row that is a given number of -/// buffer rows away from the current position. -/// -/// This moves by buffer rows instead of display rows, a distinction that is -/// important when soft wrapping is enabled. -pub(crate) fn start_of_relative_buffer_row( - map: &DisplaySnapshot, - point: DisplayPoint, - times: isize, -) -> DisplayPoint { - let start = map.display_point_to_fold_point(point, Bias::Left); - let target = start.row() as isize + times; - let new_row = (target.max(0) as u32).min(map.fold_snapshot().max_point().row()); - - map.clip_point( - map.fold_point_to_display_point( - map.fold_snapshot() - .clip_point(FoldPoint::new(new_row, 0), Bias::Right), - ), - Bias::Right, - ) -} - fn up_down_buffer_rows( map: &DisplaySnapshot, mut point: DisplayPoint, @@ -2127,7 +2104,7 @@ pub(crate) fn end_of_line( times: usize, ) -> DisplayPoint { if times > 1 { - point = start_of_relative_buffer_row(map, point, times as isize - 1); + point = map.start_of_relative_buffer_row(point, times as isize - 1); } if display_lines { map.clip_point( @@ -2732,17 +2709,17 @@ fn sneak_backward( } fn next_line_start(map: &DisplaySnapshot, point: DisplayPoint, times: usize) -> DisplayPoint { - let correct_line = start_of_relative_buffer_row(map, point, times as isize); + let correct_line = map.start_of_relative_buffer_row(point, times as isize); first_non_whitespace(map, false, correct_line) } fn previous_line_start(map: &DisplaySnapshot, point: DisplayPoint, times: usize) -> DisplayPoint { - let correct_line = start_of_relative_buffer_row(map, point, -(times as isize)); + let correct_line = map.start_of_relative_buffer_row(point, -(times as isize)); first_non_whitespace(map, false, correct_line) } fn go_to_column(map: &DisplaySnapshot, point: DisplayPoint, times: usize) -> DisplayPoint { - let correct_line = start_of_relative_buffer_row(map, point, 0); + let correct_line = map.start_of_relative_buffer_row(point, 0); right(map, correct_line, times.saturating_sub(1)) } @@ -2752,7 +2729,7 @@ pub(crate) fn next_line_end( times: usize, ) -> DisplayPoint { if times > 1 { - point = start_of_relative_buffer_row(map, point, times as isize - 1); + point = map.start_of_relative_buffer_row(point, times as isize - 1); } end_of_line(map, false, point, 1) } @@ -3106,7 +3083,7 @@ mod test { state::Mode, test::{NeovimBackedTestContext, VimTestContext}, }; - use editor::display_map::Inlay; + use editor::Inlay; use indoc::indoc; use language::Point; use multi_buffer::MultiBufferRow; diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 9386eab58a389b4917cdf33078ac7397ffd01796..f80f9be38edbb7fafb0864437c8de2bda4740154 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -28,7 +28,7 @@ use editor::Editor; use editor::{Anchor, SelectionEffects}; use editor::{Bias, ToPoint}; use editor::{display_map::ToDisplayPoint, movement}; -use gpui::{Action, Context, Window, actions}; +use gpui::{Context, Window, actions}; use language::{Point, SelectionGoal}; use log::error; use multi_buffer::MultiBufferRow; @@ -123,8 +123,6 @@ pub(crate) fn register(editor: &mut Editor, cx: &mut Context) { Vim::action(editor, cx, Vim::toggle_comments); Vim::action(editor, cx, Vim::paste); Vim::action(editor, cx, Vim::show_location); - Vim::action(editor, cx, Vim::go_to_tab); - Vim::action(editor, cx, Vim::go_to_previous_tab); Vim::action(editor, cx, |vim, _: &DeleteLeft, window, cx| { vim.record_current_action(cx); @@ -659,7 +657,7 @@ impl Vim { self.switch_mode(Mode::Insert, false, window, cx); self.update_editor(cx, |_, editor, cx| { editor.transact(window, cx, |editor, window, cx| { - let selections = editor.selections.all::(cx); + let selections = editor.selections.all::(&editor.display_snapshot(cx)); let snapshot = editor.buffer().read(cx).snapshot(cx); let selection_start_rows: BTreeSet = selections @@ -681,7 +679,7 @@ impl Vim { editor.edit_with_autoindent(edits, cx); editor.change_selections(Default::default(), window, cx, |s| { s.move_cursors_with(|map, cursor, _| { - let previous_line = motion::start_of_relative_buffer_row(map, cursor, -1); + let previous_line = map.start_of_relative_buffer_row(cursor, -1); let insert_point = motion::end_of_line(map, false, previous_line, 1); (insert_point, SelectionGoal::None) }); @@ -701,7 +699,7 @@ impl Vim { self.update_editor(cx, |_, editor, cx| { let text_layout_details = editor.text_layout_details(window); editor.transact(window, cx, |editor, window, cx| { - let selections = editor.selections.all::(cx); + let selections = editor.selections.all::(&editor.display_snapshot(cx)); let snapshot = editor.buffer().read(cx).snapshot(cx); let selection_end_rows: BTreeSet = selections @@ -747,7 +745,7 @@ impl Vim { Vim::take_forced_motion(cx); self.update_editor(cx, |_, editor, cx| { editor.transact(window, cx, |editor, _, cx| { - let selections = editor.selections.all::(cx); + let selections = editor.selections.all::(&editor.display_snapshot(cx)); let selection_start_rows: BTreeSet = selections .into_iter() @@ -776,9 +774,10 @@ impl Vim { Vim::take_forced_motion(cx); self.update_editor(cx, |_, editor, cx| { editor.transact(window, cx, |editor, window, cx| { - let selections = editor.selections.all::(cx); + let display_map = editor.display_snapshot(cx); + let selections = editor.selections.all::(&display_map); let snapshot = editor.buffer().read(cx).snapshot(cx); - let (_map, display_selections) = editor.selections.all_display(cx); + let display_selections = editor.selections.all_display(&display_map); let original_positions = display_selections .iter() .map(|s| (s.id, s.head())) @@ -939,13 +938,14 @@ impl Vim { self.update_editor(cx, |_, editor, cx| { editor.transact(window, cx, |editor, window, cx| { editor.set_clip_at_line_ends(false, cx); - let (map, display_selections) = editor.selections.all_display(cx); + let display_map = editor.display_snapshot(cx); + let display_selections = editor.selections.all_display(&display_map); - let mut edits = Vec::new(); + let mut edits = Vec::with_capacity(display_selections.len()); for selection in &display_selections { let mut range = selection.range(); for _ in 0..count { - let new_point = movement::saturating_right(&map, range.end); + let new_point = movement::saturating_right(&display_map, range.end); if range.end == new_point { return; } @@ -953,8 +953,8 @@ impl Vim { } edits.push(( - range.start.to_offset(&map, Bias::Left) - ..range.end.to_offset(&map, Bias::Left), + range.start.to_offset(&display_map, Bias::Left) + ..range.end.to_offset(&display_map, Bias::Left), text.repeat(if is_return_char { 0 } else { count }), )); } @@ -978,16 +978,16 @@ impl Vim { pub fn save_selection_starts( &self, editor: &Editor, - cx: &mut Context, ) -> HashMap { - let (map, selections) = editor.selections.all_display(cx); + let display_map = editor.display_snapshot(cx); + let selections = editor.selections.all_display(&display_map); selections .iter() .map(|selection| { ( selection.id, - map.display_point_to_anchor(selection.start, Bias::Right), + display_map.display_point_to_anchor(selection.start, Bias::Right), ) }) .collect::>() @@ -1014,55 +1014,8 @@ impl Vim { self.switch_mode(Mode::Insert, true, window, cx); } } - - fn go_to_tab(&mut self, _: &GoToTab, window: &mut Window, cx: &mut Context) { - let count = Vim::take_count(cx); - Vim::take_forced_motion(cx); - - if let Some(tab_index) = count { - // gt goes to tab (1-based). - let zero_based_index = tab_index.saturating_sub(1); - window.dispatch_action( - workspace::pane::ActivateItem(zero_based_index).boxed_clone(), - cx, - ); - } else { - // If no count is provided, go to the next tab. - window.dispatch_action(workspace::pane::ActivateNextItem.boxed_clone(), cx); - } - } - - fn go_to_previous_tab( - &mut self, - _: &GoToPreviousTab, - window: &mut Window, - cx: &mut Context, - ) { - let count = Vim::take_count(cx); - Vim::take_forced_motion(cx); - - if let Some(count) = count { - // gT with count goes back that many tabs with wraparound (not the same as gt!). - if let Some(workspace) = self.workspace(window) { - let pane = workspace.read(cx).active_pane().read(cx); - let item_count = pane.items().count(); - if item_count > 0 { - let current_index = pane.active_item_index(); - let target_index = (current_index as isize - count as isize) - .rem_euclid(item_count as isize) - as usize; - window.dispatch_action( - workspace::pane::ActivateItem(target_index).boxed_clone(), - cx, - ); - } - } - } else { - // No count provided, go to the previous tab. - window.dispatch_action(workspace::pane::ActivatePreviousItem.boxed_clone(), cx); - } - } } + #[cfg(test)] mod test { use gpui::{KeyBinding, TestAppContext, UpdateGlobal}; diff --git a/crates/vim/src/normal/convert.rs b/crates/vim/src/normal/convert.rs index 11d040850d341155bf428ebc337cc9e3f4cc42c3..0ee132a44d20723970fecbbef4cef13ff31e310c 100644 --- a/crates/vim/src/normal/convert.rs +++ b/crates/vim/src/normal/convert.rs @@ -199,7 +199,7 @@ impl Vim { let mut ranges = Vec::new(); let mut cursor_positions = Vec::new(); let snapshot = editor.buffer().read(cx).snapshot(cx); - for selection in editor.selections.all_adjusted(cx) { + for selection in editor.selections.all_adjusted(&editor.display_snapshot(cx)) { match vim.mode { Mode::Visual | Mode::VisualLine => { ranges.push(selection.start..selection.end); diff --git a/crates/vim/src/normal/increment.rs b/crates/vim/src/normal/increment.rs index 34ac4aab1f11c547ed1335e1a9da12fe52be9b08..4b27b4dfaf911c72458c9f412d5d0d2ba4cd70b8 100644 --- a/crates/vim/src/normal/increment.rs +++ b/crates/vim/src/normal/increment.rs @@ -58,7 +58,7 @@ impl Vim { let mut new_anchors = Vec::new(); let snapshot = editor.buffer().read(cx).snapshot(cx); - for selection in editor.selections.all_adjusted(cx) { + for selection in editor.selections.all_adjusted(&editor.display_snapshot(cx)) { if !selection.is_empty() && (vim.mode != Mode::VisualBlock || new_anchors.is_empty()) { diff --git a/crates/vim/src/normal/mark.rs b/crates/vim/src/normal/mark.rs index ea9aafe1315d3d89afe9d258f4e736717ffe789f..3bb040511fdd7fa53dd97198ae02b492b0e7359d 100644 --- a/crates/vim/src/normal/mark.rs +++ b/crates/vim/src/normal/mark.rs @@ -50,16 +50,19 @@ impl Vim { let mut reversed = vec![]; self.update_editor(cx, |vim, editor, cx| { - let (map, selections) = editor.selections.all_display(cx); + let display_map = editor.display_snapshot(cx); + let selections = editor.selections.all_display(&display_map); for selection in selections { - let end = movement::saturating_left(&map, selection.end); + let end = movement::saturating_left(&display_map, selection.end); ends.push( - map.buffer_snapshot() - .anchor_before(end.to_offset(&map, Bias::Left)), + display_map + .buffer_snapshot() + .anchor_before(end.to_offset(&display_map, Bias::Left)), ); starts.push( - map.buffer_snapshot() - .anchor_before(selection.start.to_offset(&map, Bias::Left)), + display_map + .buffer_snapshot() + .anchor_before(selection.start.to_offset(&display_map, Bias::Left)), ); reversed.push(selection.reversed) } @@ -301,19 +304,21 @@ impl Vim { name = "'"; } if matches!(name, "{" | "}" | "(" | ")") { - let (map, selections) = editor.selections.all_display(cx); + let display_map = editor.display_snapshot(cx); + let selections = editor.selections.all_display(&display_map); let anchors = selections .into_iter() .map(|selection| { let point = match name { - "{" => movement::start_of_paragraph(&map, selection.head(), 1), - "}" => movement::end_of_paragraph(&map, selection.head(), 1), - "(" => motion::sentence_backwards(&map, selection.head(), 1), - ")" => motion::sentence_forwards(&map, selection.head(), 1), + "{" => movement::start_of_paragraph(&display_map, selection.head(), 1), + "}" => movement::end_of_paragraph(&display_map, selection.head(), 1), + "(" => motion::sentence_backwards(&display_map, selection.head(), 1), + ")" => motion::sentence_forwards(&display_map, selection.head(), 1), _ => unreachable!(), }; - map.buffer_snapshot() - .anchor_before(point.to_offset(&map, Bias::Left)) + display_map + .buffer_snapshot() + .anchor_before(point.to_offset(&display_map, Bias::Left)) }) .collect::>(); return Some(Mark::Local(anchors)); diff --git a/crates/vim/src/normal/paste.rs b/crates/vim/src/normal/paste.rs index 2a45695928ec3fe87a8f26c5161bb1f095186c53..74a28322d13b6ab0f563e6953f6b1edbfea66740 100644 --- a/crates/vim/src/normal/paste.rs +++ b/crates/vim/src/normal/paste.rs @@ -56,7 +56,8 @@ impl Vim { vim.copy_selections_content(editor, MotionKind::for_mode(vim.mode), window, cx); } - let (display_map, current_selections) = editor.selections.all_adjusted_display(cx); + let display_map = editor.display_snapshot(cx); + let current_selections = editor.selections.all_adjusted_display(&display_map); // unlike zed, if you have a multi-cursor selection from vim block mode, // pasting it will paste it on subsequent lines, even if you don't yet @@ -173,7 +174,7 @@ impl Vim { original_indent_columns.push(original_indent_column); } - let cursor_offset = editor.selections.last::(cx).head(); + let cursor_offset = editor.selections.last::(&display_map).head(); if editor .buffer() .read(cx) diff --git a/crates/vim/src/normal/scroll.rs b/crates/vim/src/normal/scroll.rs index edb3d7f2157aec8d23faee5fa1a069a10974360f..ff884e3b7393b39b86114338fe2af11e384e1fa0 100644 --- a/crates/vim/src/normal/scroll.rs +++ b/crates/vim/src/normal/scroll.rs @@ -363,7 +363,10 @@ mod test { point(0., 3.0) ); assert_eq!( - editor.selections.newest(cx).range(), + editor + .selections + .newest(&editor.display_snapshot(cx)) + .range(), Point::new(6, 0)..Point::new(6, 0) ) }); @@ -380,7 +383,10 @@ mod test { point(0., 3.0) ); assert_eq!( - editor.selections.newest(cx).range(), + editor + .selections + .newest(&editor.display_snapshot(cx)) + .range(), Point::new(0, 0)..Point::new(6, 1) ) }); diff --git a/crates/vim/src/normal/substitute.rs b/crates/vim/src/normal/substitute.rs index 889d48717068b0561fd21614dc9fb5d0581754dc..df8d7b4879e21491ed808de1dad78cfebc5b12ec 100644 --- a/crates/vim/src/normal/substitute.rs +++ b/crates/vim/src/normal/substitute.rs @@ -94,7 +94,10 @@ impl Vim { MotionKind::Exclusive }; vim.copy_selections_content(editor, kind, window, cx); - let selections = editor.selections.all::(cx).into_iter(); + let selections = editor + .selections + .all::(&editor.display_snapshot(cx)) + .into_iter(); let edits = selections.map(|selection| (selection.start..selection.end, "")); editor.edit(edits, cx); }); diff --git a/crates/vim/src/normal/yank.rs b/crates/vim/src/normal/yank.rs index fe8180ffff37de51a019b394fd5742278b9355e2..d5a45fca544d61735f62a8f46e849db2c009847f 100644 --- a/crates/vim/src/normal/yank.rs +++ b/crates/vim/src/normal/yank.rs @@ -106,7 +106,7 @@ impl Vim { true, editor .selections - .all_adjusted(cx) + .all_adjusted(&editor.display_snapshot(cx)) .iter() .map(|s| s.range()) .collect(), @@ -128,7 +128,7 @@ impl Vim { false, editor .selections - .all_adjusted(cx) + .all_adjusted(&editor.display_snapshot(cx)) .iter() .map(|s| s.range()) .collect(), diff --git a/crates/vim/src/replace.rs b/crates/vim/src/replace.rs index 40fe4f213e205569129775a2e495ec2b3bee14b6..c9a9fbdb9ee3428ce80c934a686a73a63ddee714 100644 --- a/crates/vim/src/replace.rs +++ b/crates/vim/src/replace.rs @@ -53,7 +53,7 @@ impl Vim { editor.transact(window, cx, |editor, window, cx| { editor.set_clip_at_line_ends(false, cx); let map = editor.snapshot(window, cx); - let display_selections = editor.selections.all::(cx); + let display_selections = editor.selections.all::(&map.display_snapshot); // Handles all string that require manipulation, including inserts and replaces let edits = display_selections @@ -98,7 +98,7 @@ impl Vim { editor.transact(window, cx, |editor, window, cx| { editor.set_clip_at_line_ends(false, cx); let map = editor.snapshot(window, cx); - let selections = editor.selections.all::(cx); + let selections = editor.selections.all::(&map.display_snapshot); let mut new_selections = vec![]; let edits: Vec<(Range, String)> = selections .into_iter() @@ -150,7 +150,9 @@ impl Vim { self.stop_recording(cx); self.update_editor(cx, |vim, editor, cx| { editor.set_clip_at_line_ends(false, cx); - let mut selection = editor.selections.newest_display(cx); + let mut selection = editor + .selections + .newest_display(&editor.display_snapshot(cx)); let snapshot = editor.snapshot(window, cx); object.expand_selection(&snapshot, &mut selection, around, None); let start = snapshot @@ -196,7 +198,9 @@ impl Vim { self.update_editor(cx, |vim, editor, cx| { editor.set_clip_at_line_ends(false, cx); let text_layout_details = editor.text_layout_details(window); - let mut selection = editor.selections.newest_display(cx); + let mut selection = editor + .selections + .newest_display(&editor.display_snapshot(cx)); let snapshot = editor.snapshot(window, cx); motion.expand_selection( &snapshot, diff --git a/crates/vim/src/state.rs b/crates/vim/src/state.rs index 88a100fc2abb90005256548395959c596167c148..959edff63dd50fa549edcbae1bea213224b923af 100644 --- a/crates/vim/src/state.rs +++ b/crates/vim/src/state.rs @@ -6,7 +6,7 @@ use crate::{ToggleMarksView, ToggleRegistersView, UseSystemClipboard, Vim, VimAd use crate::{motion::Motion, object::Object}; use anyhow::Result; use collections::HashMap; -use command_palette_hooks::{CommandPaletteFilter, CommandPaletteInterceptor}; +use command_palette_hooks::{CommandPaletteFilter, GlobalCommandPaletteInterceptor}; use db::{ sqlez::{domain::Domain, thread_safe_connection::ThreadSafeConnection}, sqlez_macros::sql, @@ -718,9 +718,7 @@ impl VimGlobals { CommandPaletteFilter::update_global(cx, |filter, _| { filter.show_namespace(Vim::NAMESPACE); }); - CommandPaletteInterceptor::update_global(cx, |interceptor, _| { - interceptor.set(Box::new(command_interceptor)); - }); + GlobalCommandPaletteInterceptor::set(cx, command_interceptor); for window in cx.windows() { if let Some(workspace) = window.downcast::() { workspace @@ -735,9 +733,7 @@ impl VimGlobals { } else { KeyBinding::set_vim_mode(cx, false); *Vim::globals(cx) = VimGlobals::default(); - CommandPaletteInterceptor::update_global(cx, |interceptor, _| { - interceptor.clear(); - }); + GlobalCommandPaletteInterceptor::clear(cx); CommandPaletteFilter::update_global(cx, |filter, _| { filter.hide_namespace(Vim::NAMESPACE); }); @@ -867,7 +863,9 @@ impl VimGlobals { } } '%' => editor.and_then(|editor| { - let selection = editor.selections.newest::(cx); + let selection = editor + .selections + .newest::(&editor.display_snapshot(cx)); if let Some((_, buffer, _)) = editor .buffer() .read(cx) diff --git a/crates/vim/src/surrounds.rs b/crates/vim/src/surrounds.rs index e1b46f56a9e8b934e8c8e55d144b8eb325352375..bc817e2d4871a0be07e8c100b332f5630dcec711 100644 --- a/crates/vim/src/surrounds.rs +++ b/crates/vim/src/surrounds.rs @@ -45,7 +45,8 @@ impl Vim { }, }; let surround = pair.end != surround_alias((*text).as_ref()); - let (display_map, display_selections) = editor.selections.all_adjusted_display(cx); + let display_map = editor.display_snapshot(cx); + let display_selections = editor.selections.all_adjusted_display(&display_map); let mut edits = Vec::new(); let mut anchors = Vec::new(); @@ -144,7 +145,8 @@ impl Vim { editor.transact(window, cx, |editor, window, cx| { editor.set_clip_at_line_ends(false, cx); - let (display_map, display_selections) = editor.selections.all_display(cx); + let display_map = editor.display_snapshot(cx); + let display_selections = editor.selections.all_display(&display_map); let mut edits = Vec::new(); let mut anchors = Vec::new(); @@ -256,7 +258,8 @@ impl Vim { let preserve_space = will_replace_pair.start == will_replace_pair.end || !opening; - let (display_map, selections) = editor.selections.all_adjusted_display(cx); + let display_map = editor.display_snapshot(cx); + let selections = editor.selections.all_adjusted_display(&display_map); let mut edits = Vec::new(); let mut anchors = Vec::new(); @@ -382,7 +385,8 @@ impl Vim { self.update_editor(cx, |_, editor, cx| { editor.transact(window, cx, |editor, window, cx| { editor.set_clip_at_line_ends(false, cx); - let (display_map, selections) = editor.selections.all_adjusted_display(cx); + let display_map = editor.display_snapshot(cx); + let selections = editor.selections.all_adjusted_display(&display_map); let mut anchors = Vec::new(); for selection in &selections { @@ -500,7 +504,8 @@ impl Vim { let mut min_range_size = usize::MAX; let _ = self.editor.update(cx, |editor, cx| { - let (display_map, selections) = editor.selections.all_adjusted_display(cx); + let display_map = editor.display_snapshot(cx); + let selections = editor.selections.all_adjusted_display(&display_map); // Even if there's multiple cursors, we'll simply rely on // the first one to understand what bracket pair to map to. // I believe we could, if worth it, go one step above and diff --git a/crates/vim/src/test.rs b/crates/vim/src/test.rs index 2f130356a70c0ee8cff1803112a1caec87c45469..93b610877a163ba0f3035e8a0483f531a3246e6c 100644 --- a/crates/vim/src/test.rs +++ b/crates/vim/src/test.rs @@ -13,6 +13,7 @@ use editor::{ }; use futures::StreamExt; use gpui::{KeyBinding, Modifiers, MouseButton, TestAppContext, px}; +use itertools::Itertools; use language::Point; pub use neovim_backed_test_context::*; use settings::SettingsStore; @@ -974,6 +975,21 @@ async fn test_jk_delay(cx: &mut gpui::TestAppContext) { cx.assert_state("jˇkhello", Mode::Normal); } +#[perf] +#[gpui::test] +async fn test_jk_max_count(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + + cx.set_shared_state("1\nˇ2\n3").await; + cx.simulate_shared_keystrokes("9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 j") + .await; + cx.shared_state().await.assert_eq("1\n2\nˇ3"); + + let number: String = usize::MAX.to_string().split("").join(" "); + cx.simulate_shared_keystrokes(&format!("{number} k")).await; + cx.shared_state().await.assert_eq("ˇ1\n2\n3"); +} + #[perf] #[gpui::test] async fn test_comma_w(cx: &mut gpui::TestAppContext) { @@ -2279,7 +2295,10 @@ async fn test_clipping_on_mode_change(cx: &mut gpui::TestAppContext) { let mut pixel_position = cx.update_editor(|editor, window, cx| { let snapshot = editor.snapshot(window, cx); - let current_head = editor.selections.newest_display(cx).end; + let current_head = editor + .selections + .newest_display(&snapshot.display_snapshot) + .end; editor.last_bounds().unwrap().origin + editor .display_to_pixel_point(current_head, &snapshot, window) diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 4999a4b04c2005ad2371b4e82c7f23578269c7bc..7481d176109907baccf6e742d0b3f3614014dcac 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -51,7 +51,10 @@ use vim_mode_setting::HelixModeSetting; use vim_mode_setting::VimModeSetting; use workspace::{self, Pane, Workspace}; -use crate::state::ReplayableAction; +use crate::{ + normal::{GoToPreviousTab, GoToTab}, + state::ReplayableAction, +}; /// Number is used to manage vim's count. Pushing a digit /// multiplies the current value by 10 and adds the digit. @@ -409,6 +412,46 @@ pub fn init(cx: &mut App) { cx.defer_in(window, |vim, window, cx| vim.search_submit(window, cx)) }) }); + workspace.register_action(|_, _: &GoToTab, window, cx| { + let count = Vim::take_count(cx); + Vim::take_forced_motion(cx); + + if let Some(tab_index) = count { + // gt goes to tab (1-based). + let zero_based_index = tab_index.saturating_sub(1); + window.dispatch_action( + workspace::pane::ActivateItem(zero_based_index).boxed_clone(), + cx, + ); + } else { + // If no count is provided, go to the next tab. + window.dispatch_action(workspace::pane::ActivateNextItem.boxed_clone(), cx); + } + }); + + workspace.register_action(|workspace, _: &GoToPreviousTab, window, cx| { + let count = Vim::take_count(cx); + Vim::take_forced_motion(cx); + + if let Some(count) = count { + // gT with count goes back that many tabs with wraparound (not the same as gt!). + let pane = workspace.active_pane().read(cx); + let item_count = pane.items().count(); + if item_count > 0 { + let current_index = pane.active_item_index(); + let target_index = (current_index as isize - count as isize) + .rem_euclid(item_count as isize) + as usize; + window.dispatch_action( + workspace::pane::ActivateItem(target_index).boxed_clone(), + cx, + ); + } + } else { + // No count provided, go to the previous tab. + window.dispatch_action(workspace::pane::ActivatePreviousItem.boxed_clone(), cx); + } + }); }) .detach(); } @@ -1316,7 +1359,10 @@ impl Vim { return; }; let newest_selection_empty = editor.update(cx, |editor, cx| { - editor.selections.newest::(cx).is_empty() + editor + .selections + .newest::(&editor.display_snapshot(cx)) + .is_empty() }); let editor = editor.read(cx); let editor_mode = editor.mode(); @@ -1412,9 +1458,11 @@ impl Vim { cx: &mut Context, ) -> Option { self.update_editor(cx, |_, editor, cx| { - let selection = editor.selections.newest::(cx); + let snapshot = &editor.snapshot(window, cx); + let selection = editor + .selections + .newest::(&snapshot.display_snapshot); - let snapshot = editor.snapshot(window, cx); let snapshot = snapshot.buffer_snapshot(); let (range, kind) = snapshot.surrounding_word(selection.start, Some(CharScopeContext::Completion)); @@ -1441,9 +1489,11 @@ impl Vim { let selections = self.editor().map(|editor| { editor.update(cx, |editor, cx| { + let snapshot = editor.display_snapshot(cx); + ( - editor.selections.oldest::(cx), - editor.selections.newest::(cx), + editor.selections.oldest::(&snapshot), + editor.selections.newest::(&snapshot), ) }) }); @@ -1525,6 +1575,7 @@ impl Vim { post_count .checked_mul(10) .and_then(|post_count| post_count.checked_add(number)) + .filter(|post_count| *post_count < isize::MAX as usize) .unwrap_or(post_count), ) } else { @@ -1534,6 +1585,7 @@ impl Vim { pre_count .checked_mul(10) .and_then(|pre_count| pre_count.checked_add(number)) + .filter(|pre_count| *pre_count < isize::MAX as usize) .unwrap_or(pre_count), ) } diff --git a/crates/vim/src/visual.rs b/crates/vim/src/visual.rs index f8ef8e32586ca11800b5d3872aafaead3275bd37..59555205d9862e51c2778eec1f321338fd5e7569 100644 --- a/crates/vim/src/visual.rs +++ b/crates/vim/src/visual.rs @@ -15,10 +15,7 @@ use workspace::searchable::Direction; use crate::{ Vim, - motion::{ - Motion, MotionKind, first_non_whitespace, next_line_end, start_of_line, - start_of_relative_buffer_row, - }, + motion::{Motion, MotionKind, first_non_whitespace, next_line_end, start_of_line}, object::Object, state::{Mark, Mode, Operator}, }; @@ -369,6 +366,8 @@ impl Vim { let mut selections = Vec::new(); let mut row = tail.row(); + let going_up = tail.row() > head.row(); + let direction = if going_up { -1 } else { 1 }; loop { let laid_out_line = map.layout_row(row, &text_layout_details); @@ -399,14 +398,21 @@ impl Vim { selections.push(selection); } - if row == head.row() { + + // When dealing with soft wrapped lines, it's possible that + // `row` ends up being set to a value other than `head.row()` as + // `head.row()` might be a `DisplayPoint` mapped to a soft + // wrapped line, hence the need for `<=` and `>=` instead of + // `==`. + if going_up && row <= head.row() || !going_up && row >= head.row() { break; } - // Move to the next or previous buffer row, ensuring that - // wrapped lines are handled correctly. - let direction = if tail.row() > head.row() { -1 } else { 1 }; - row = start_of_relative_buffer_row(map, DisplayPoint::new(row, 0), direction).row(); + // Find the next or previous buffer row where the `row` should + // be moved to, so that wrapped lines are skipped. + row = map + .start_of_relative_buffer_row(DisplayPoint::new(row, 0), direction) + .row(); } s.select(selections); @@ -748,7 +754,8 @@ impl Vim { self.stop_recording(cx); self.update_editor(cx, |_, editor, cx| { editor.transact(window, cx, |editor, window, cx| { - let (display_map, selections) = editor.selections.all_adjusted_display(cx); + let display_map = editor.display_snapshot(cx); + let selections = editor.selections.all_adjusted_display(&display_map); // Selections are biased right at the start. So we need to store // anchors that are biased left so that we can restore the selections @@ -859,7 +866,9 @@ impl Vim { }); } self.update_editor(cx, |_, editor, cx| { - let latest = editor.selections.newest::(cx); + let latest = editor + .selections + .newest::(&editor.display_snapshot(cx)); start_selection = latest.start; end_selection = latest.end; }); @@ -880,7 +889,9 @@ impl Vim { return; } self.update_editor(cx, |_, editor, cx| { - let latest = editor.selections.newest::(cx); + let latest = editor + .selections + .newest::(&editor.display_snapshot(cx)); if vim_is_normal { start_selection = latest.start; end_selection = latest.end; diff --git a/crates/vim/test_data/test_jk_max_count.json b/crates/vim/test_data/test_jk_max_count.json new file mode 100644 index 0000000000000000000000000000000000000000..83eab46a18a74cb258c47c6b94f1ec01beeadf21 --- /dev/null +++ b/crates/vim/test_data/test_jk_max_count.json @@ -0,0 +1,47 @@ +{"Put":{"state":"1\nˇ2\n3"}} +{"Key":"9"} +{"Key":"9"} +{"Key":"9"} +{"Key":"9"} +{"Key":"9"} +{"Key":"9"} +{"Key":"9"} +{"Key":"9"} +{"Key":"9"} +{"Key":"9"} +{"Key":"9"} +{"Key":"9"} +{"Key":"9"} +{"Key":"9"} +{"Key":"9"} +{"Key":"9"} +{"Key":"9"} +{"Key":"9"} +{"Key":"9"} +{"Key":"9"} +{"Key":"j"} +{"Get":{"state":"1\n2\nˇ3","mode":"Normal"}} +{"Key":""} +{"Key":"1"} +{"Key":"8"} +{"Key":"4"} +{"Key":"4"} +{"Key":"6"} +{"Key":"7"} +{"Key":"4"} +{"Key":"4"} +{"Key":"0"} +{"Key":"7"} +{"Key":"3"} +{"Key":"7"} +{"Key":"0"} +{"Key":"9"} +{"Key":"5"} +{"Key":"5"} +{"Key":"1"} +{"Key":"6"} +{"Key":"1"} +{"Key":"5"} +{"Key":""} +{"Key":"k"} +{"Get":{"state":"ˇ1\n2\n3","mode":"Normal"}} diff --git a/crates/vim_mode_setting/Cargo.toml b/crates/vim_mode_setting/Cargo.toml index 8371cca401fa77c63cba6748dc428645340f48b6..6306d125b27a5342a61f503520692c099ab9c4f6 100644 --- a/crates/vim_mode_setting/Cargo.toml +++ b/crates/vim_mode_setting/Cargo.toml @@ -14,4 +14,3 @@ path = "src/vim_mode_setting.rs" [dependencies] gpui.workspace = true settings.workspace = true -workspace-hack.workspace = true diff --git a/crates/vim_mode_setting/src/vim_mode_setting.rs b/crates/vim_mode_setting/src/vim_mode_setting.rs index d9495c556646f9b9f12dc0b52b9530796a5ad5e3..4caa95b2b412755bd4663a024197c074cb0f1b51 100644 --- a/crates/vim_mode_setting/src/vim_mode_setting.rs +++ b/crates/vim_mode_setting/src/vim_mode_setting.rs @@ -19,10 +19,6 @@ impl Settings for VimModeSetting { fn from_settings(content: &SettingsContent) -> Self { Self(content.vim_mode.unwrap()) } - - fn import_from_vscode(_vscode: &settings::VsCodeSettings, _content: &mut SettingsContent) { - // TODO: could possibly check if any of the `vim.` keys are set? - } } pub struct HelixModeSetting(pub bool); @@ -31,6 +27,4 @@ impl Settings for HelixModeSetting { fn from_settings(content: &SettingsContent) -> Self { Self(content.helix_mode.unwrap()) } - - fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut SettingsContent) {} } diff --git a/crates/watch/Cargo.toml b/crates/watch/Cargo.toml index 439a9af49f2906fc28768008a2c06d265b382584..9d77eaeddec66a08dd2e9d5056249671c9b02670 100644 --- a/crates/watch/Cargo.toml +++ b/crates/watch/Cargo.toml @@ -14,7 +14,6 @@ doctest = true [dependencies] parking_lot.workspace = true -workspace-hack.workspace = true [dev-dependencies] ctor.workspace = true diff --git a/crates/web_search/Cargo.toml b/crates/web_search/Cargo.toml index 4ba46faec4362ac98fffaffb6c606608c02373e8..d0e32e71f08a4b6fa9585d91dc9d5e8c459a8828 100644 --- a/crates/web_search/Cargo.toml +++ b/crates/web_search/Cargo.toml @@ -17,4 +17,3 @@ cloud_llm_client.workspace = true collections.workspace = true gpui.workspace = true serde.workspace = true -workspace-hack.workspace = true diff --git a/crates/web_search_providers/Cargo.toml b/crates/web_search_providers/Cargo.toml index f7a248d10649dc83d7d76b454e8db2d37b55cbef..ecdca5883ff541459e94170986df3b7f16036c5a 100644 --- a/crates/web_search_providers/Cargo.toml +++ b/crates/web_search_providers/Cargo.toml @@ -22,4 +22,3 @@ language_model.workspace = true serde.workspace = true serde_json.workspace = true web_search.workspace = true -workspace-hack.workspace = true diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml index 869aa5322eba7fdaf417606dd62ae73a0c3702b3..d5d3016ab2704392c6cc9cc4bcebf6d50701d3be 100644 --- a/crates/workspace/Cargo.toml +++ b/crates/workspace/Cargo.toml @@ -63,7 +63,6 @@ ui.workspace = true util.workspace = true uuid.workspace = true zed_actions.workspace = true -workspace-hack.workspace = true [target.'cfg(target_os = "windows")'.dependencies] windows.workspace = true diff --git a/crates/workspace/src/dock.rs b/crates/workspace/src/dock.rs index d67d3c81a9fbf67518a49c6c75a842a50ca78684..05af5d080c4c965f3d53f61b5af144a456ce0074 100644 --- a/crates/workspace/src/dock.rs +++ b/crates/workspace/src/dock.rs @@ -27,6 +27,7 @@ pub use proto::PanelId; pub trait Panel: Focusable + EventEmitter + Render + Sized { fn persistent_name() -> &'static str; + fn panel_key() -> &'static str; fn position(&self, window: &Window, cx: &App) -> DockPosition; fn position_is_valid(&self, position: DockPosition) -> bool; fn set_position(&mut self, position: DockPosition, window: &mut Window, cx: &mut Context); @@ -61,6 +62,7 @@ pub trait Panel: Focusable + EventEmitter + Render + Sized { pub trait PanelHandle: Send + Sync { fn panel_id(&self) -> EntityId; fn persistent_name(&self) -> &'static str; + fn panel_key(&self) -> &'static str; fn position(&self, window: &Window, cx: &App) -> DockPosition; fn position_is_valid(&self, position: DockPosition, cx: &App) -> bool; fn set_position(&self, position: DockPosition, window: &mut Window, cx: &mut App); @@ -108,6 +110,10 @@ where T::persistent_name() } + fn panel_key(&self) -> &'static str { + T::panel_key() + } + fn position(&self, window: &Window, cx: &App) -> DockPosition { self.read(cx).position(window, cx) } @@ -942,8 +948,8 @@ impl Render for PanelButtons { } }) .when(!is_active, |this| { - this.tooltip(move |window, cx| { - Tooltip::for_action(tooltip.clone(), &*action, window, cx) + this.tooltip(move |_window, cx| { + Tooltip::for_action(tooltip.clone(), &*action, cx) }) }) }), @@ -1016,6 +1022,10 @@ pub mod test { "TestPanel" } + fn panel_key() -> &'static str { + "TestPanel" + } + fn position(&self, _window: &Window, _: &App) -> super::DockPosition { self.position } diff --git a/crates/workspace/src/history_manager.rs b/crates/workspace/src/history_manager.rs index f68b58ff8289c68883393e7be5087322ca76d480..1b80e7c0125aac0ccf4a0f41c1b5be3c4a651e72 100644 --- a/crates/workspace/src/history_manager.rs +++ b/crates/workspace/src/history_manager.rs @@ -128,8 +128,7 @@ impl HistoryManager { impl HistoryManagerEntry { pub fn new(id: WorkspaceId, paths: &PathList) -> Self { let path = paths - .paths() - .iter() + .ordered_paths() .map(|path| path.compact()) .collect::>(); Self { id, path } diff --git a/crates/workspace/src/invalid_item_view.rs b/crates/workspace/src/invalid_item_view.rs new file mode 100644 index 0000000000000000000000000000000000000000..eb6c8f3299838c1a01777885009fa67271b924d7 --- /dev/null +++ b/crates/workspace/src/invalid_item_view.rs @@ -0,0 +1,113 @@ +use std::{path::Path, sync::Arc}; + +use gpui::{EventEmitter, FocusHandle, Focusable}; +use ui::{ + App, Button, ButtonCommon, ButtonStyle, Clickable, Context, FluentBuilder, InteractiveElement, + KeyBinding, Label, LabelCommon, LabelSize, ParentElement, Render, SharedString, Styled as _, + Window, h_flex, v_flex, +}; +use zed_actions::workspace::OpenWithSystem; + +use crate::Item; + +/// A view to display when a certain buffer/image/other item fails to open. +pub struct InvalidItemView { + /// Which path was attempted to open. + pub abs_path: Arc, + /// An error message, happened when opening the item. + pub error: SharedString, + is_local: bool, + focus_handle: FocusHandle, +} + +impl InvalidItemView { + pub fn new( + abs_path: &Path, + is_local: bool, + e: &anyhow::Error, + _: &mut Window, + cx: &mut App, + ) -> Self { + Self { + is_local, + abs_path: Arc::from(abs_path), + error: format!("{}", e.root_cause()).into(), + focus_handle: cx.focus_handle(), + } + } +} + +impl Item for InvalidItemView { + type Event = (); + + fn tab_content_text(&self, mut detail: usize, _: &App) -> SharedString { + // Ensure we always render at least the filename. + detail += 1; + + let path = self.abs_path.as_ref(); + + let mut prefix = path; + while detail > 0 { + if let Some(parent) = prefix.parent() { + prefix = parent; + detail -= 1; + } else { + break; + } + } + + let path = if detail > 0 { + path + } else { + path.strip_prefix(prefix).unwrap_or(path) + }; + + SharedString::new(path.to_string_lossy()) + } +} + +impl EventEmitter<()> for InvalidItemView {} + +impl Focusable for InvalidItemView { + fn focus_handle(&self, _: &App) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl Render for InvalidItemView { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl gpui::IntoElement { + let abs_path = self.abs_path.clone(); + v_flex() + .size_full() + .track_focus(&self.focus_handle(cx)) + .flex_none() + .justify_center() + .overflow_hidden() + .key_context("InvalidItem") + .child( + h_flex().size_full().justify_center().child( + v_flex() + .justify_center() + .gap_2() + .child(h_flex().justify_center().child("Could not open file")) + .child( + h_flex() + .justify_center() + .child(Label::new(self.error.clone()).size(LabelSize::Small)), + ) + .when(self.is_local, |contents| { + contents.child( + h_flex().justify_center().child( + Button::new("open-with-system", "Open in Default App") + .on_click(move |_, _, cx| { + cx.open_with_system(&abs_path); + }) + .style(ButtonStyle::Outlined) + .key_binding(KeyBinding::for_action(&OpenWithSystem, cx)), + ), + ) + }), + ), + ) + } +} diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index f868547dbf1da85bce8cf90c4bca266f941f78d9..ef8f4452e76c7b984b721af8b50eb744f338c150 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -1,7 +1,7 @@ use crate::{ CollaboratorId, DelayedDebouncedEditAction, FollowableViewRegistry, ItemNavHistory, SerializableItemRegistry, ToolbarItemLocation, ViewId, Workspace, WorkspaceId, - invalid_buffer_view::InvalidBufferView, + invalid_item_view::InvalidItemView, pane::{self, Pane}, persistence::model::ItemId, searchable::SearchableItemHandle, @@ -76,40 +76,6 @@ impl Settings for ItemSettings { show_close_button: tabs.show_close_button.unwrap(), } } - - fn import_from_vscode( - vscode: &settings::VsCodeSettings, - current: &mut settings::SettingsContent, - ) { - if let Some(b) = vscode.read_bool("workbench.editor.tabActionCloseVisibility") { - current.tabs.get_or_insert_default().show_close_button = Some(if b { - ShowCloseButton::Always - } else { - ShowCloseButton::Hidden - }) - } - if let Some(s) = vscode.read_enum("workbench.editor.tabActionLocation", |s| match s { - "right" => Some(ClosePosition::Right), - "left" => Some(ClosePosition::Left), - _ => None, - }) { - current.tabs.get_or_insert_default().close_position = Some(s) - } - if let Some(b) = vscode.read_bool("workbench.editor.focusRecentEditorAfterClose") { - current.tabs.get_or_insert_default().activate_on_close = Some(if b { - ActivateOnClose::History - } else { - ActivateOnClose::LeftNeighbour - }) - } - - if let Some(b) = vscode.read_bool("workbench.editor.showIcons") { - current.tabs.get_or_insert_default().file_icons = Some(b); - }; - if let Some(b) = vscode.read_bool("git.decorations.enabled") { - current.tabs.get_or_insert_default().git_status = Some(b); - } - } } impl Settings for PreviewTabsSettings { @@ -123,31 +89,6 @@ impl Settings for PreviewTabsSettings { .unwrap(), } } - - fn import_from_vscode( - vscode: &settings::VsCodeSettings, - current: &mut settings::SettingsContent, - ) { - if let Some(enabled) = vscode.read_bool("workbench.editor.enablePreview") { - current.preview_tabs.get_or_insert_default().enabled = Some(enabled); - } - if let Some(enable_preview_from_code_navigation) = - vscode.read_bool("workbench.editor.enablePreviewFromCodeNavigation") - { - current - .preview_tabs - .get_or_insert_default() - .enable_preview_from_code_navigation = Some(enable_preview_from_code_navigation) - } - if let Some(enable_preview_from_file_finder) = - vscode.read_bool("workbench.editor.enablePreviewFromQuickOpen") - { - current - .preview_tabs - .get_or_insert_default() - .enable_preview_from_file_finder = Some(enable_preview_from_file_finder) - } - } } #[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] @@ -870,7 +811,7 @@ impl ItemHandle for Entity { let autosave = item.workspace_settings(cx).autosave; if let AutosaveSetting::AfterDelay { milliseconds } = autosave { - let delay = Duration::from_millis(milliseconds); + let delay = Duration::from_millis(milliseconds.0); let item = item.clone(); pending_autosave.fire_new( delay, @@ -1117,7 +1058,7 @@ pub trait ProjectItem: Item { _e: &anyhow::Error, _window: &mut Window, _cx: &mut App, - ) -> Option + ) -> Option where Self: Sized, { diff --git a/crates/workspace/src/notifications.rs b/crates/workspace/src/notifications.rs index 768b10abe4f3973ca6424b7b59b4e3bfb44cbb15..70be040df7c3718ba903565100b8548dcfc8b785 100644 --- a/crates/workspace/src/notifications.rs +++ b/crates/workspace/src/notifications.rs @@ -315,19 +315,17 @@ impl Render for LanguageServerPrompt { ) .child( IconButton::new(close_id, close_icon) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { if suppress { Tooltip::for_action( "Suppress.\nClose with click.", &SuppressNotification, - window, cx, ) } else { Tooltip::for_action( "Close.\nSuppress with shift-click.", &menu::Cancel, - window, cx, ) } @@ -499,7 +497,7 @@ impl NotificationFrame { } /// Determines whether the given notification ID should be suppressible - /// Suppressed motifications will not be shown anymore + /// Suppressed notifications will not be shown anymore pub fn show_suppress_button(mut self, show: bool) -> Self { self.show_suppress_button = show; self @@ -556,23 +554,21 @@ impl RenderOnce for NotificationFrame { this.on_modifiers_changed(move |_, _, cx| cx.notify(entity)) .child( IconButton::new(close_id, close_icon) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { if suppress { Tooltip::for_action( "Suppress.\nClose with click.", &SuppressNotification, - window, cx, ) } else if show_suppress_button { Tooltip::for_action( "Close.\nSuppress with shift-click.", &menu::Cancel, - window, cx, ) } else { - Tooltip::for_action("Close", &menu::Cancel, window, cx) + Tooltip::for_action("Close", &menu::Cancel, cx) } }) .on_click({ @@ -761,8 +757,8 @@ pub mod simple_message_notification { self } - /// Determines whether the given notification ID should be supressable - /// Suppressed motifications will not be shown anymor + /// Determines whether the given notification ID should be suppressible + /// Suppressed notifications will not be shown anymor pub fn show_suppress_button(mut self, show: bool) -> Self { self.show_suppress_button = show; self diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index ebb55f4a75669d5596c0d2ddb554b0a83c12062a..283b53d49432f1de02f46c9b7701ad1344b0495e 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -2,7 +2,7 @@ use crate::{ CloseWindow, NewFile, NewTerminal, OpenInTerminal, OpenOptions, OpenTerminal, OpenVisible, SplitDirection, ToggleFileFinder, ToggleProjectSymbols, ToggleZoom, Workspace, WorkspaceItemBuilder, - invalid_buffer_view::InvalidBufferView, + invalid_item_view::InvalidItemView, item::{ ActivateOnClose, ClosePosition, Item, ItemBufferKind, ItemHandle, ItemSettings, PreviewTabsSettings, ProjectItemKind, SaveOptions, ShowCloseButton, ShowDiagnostics, @@ -376,6 +376,7 @@ pub struct Pane { render_tab_bar: Rc) -> AnyElement>, show_tab_bar_buttons: bool, max_tabs: Option, + use_max_tabs: bool, _subscriptions: Vec, tab_bar_scroll_handle: ScrollHandle, /// This is set to true if a user scroll has occurred more recently than a system scroll @@ -473,10 +474,16 @@ impl Pane { next_timestamp: Arc, can_drop_predicate: Option bool + 'static>>, double_click_dispatch_action: Box, + use_max_tabs: bool, window: &mut Window, cx: &mut Context, ) -> Self { let focus_handle = cx.focus_handle(); + let max_tabs = if use_max_tabs { + WorkspaceSettings::get_global(cx).max_tabs + } else { + None + }; let subscriptions = vec![ cx.on_focus(&focus_handle, window, Pane::focus_in), @@ -498,7 +505,8 @@ impl Pane { zoomed: false, active_item_index: 0, preview_item_id: None, - max_tabs: WorkspaceSettings::get_global(cx).max_tabs, + max_tabs, + use_max_tabs, last_focus_handle_by_item: Default::default(), nav_history: NavHistory(Arc::new(Mutex::new(NavHistoryState { mode: NavigationMode::Normal, @@ -706,7 +714,7 @@ impl Pane { self.preview_item_id = None; } - if new_max_tabs != self.max_tabs { + if self.use_max_tabs && new_max_tabs != self.max_tabs { self.max_tabs = new_max_tabs; self.close_items_on_settings_change(window, cx); } @@ -954,6 +962,11 @@ impl Pane { if allow_preview { pane.set_preview_item_id(Some(new_item.item_id()), cx); } + + if let Some(text) = new_item.telemetry_event_text(cx) { + telemetry::event!(text); + } + pane.add_item_inner( new_item, true, @@ -979,11 +992,11 @@ impl Pane { let new_item = build_item(self, window, cx); // A special case that won't ever get a `project_entry_id` but has to be deduplicated nonetheless. - if let Some(invalid_buffer_view) = new_item.downcast::() { + if let Some(invalid_buffer_view) = new_item.downcast::() { let mut already_open_view = None; let mut views_to_close = HashSet::default(); for existing_error_view in self - .items_of_type::() + .items_of_type::() .filter(|item| item.read(cx).abs_path == invalid_buffer_view.read(cx).abs_path) { if already_open_view.is_none() @@ -1170,6 +1183,10 @@ impl Pane { window: &mut Window, cx: &mut Context, ) { + if let Some(text) = item.telemetry_event_text(cx) { + telemetry::event!(text); + } + self.add_item_inner( item, activate_pane, @@ -2713,12 +2730,11 @@ impl Pane { .map(|this| { if is_active { let focus_handle = focus_handle.clone(); - this.tooltip(move |window, cx| { + this.tooltip(move |_window, cx| { Tooltip::for_action_in( end_slot_tooltip_text, end_slot_action, &focus_handle, - window, cx, ) }) @@ -3021,9 +3037,7 @@ impl Pane { .disabled(!self.can_navigate_backward()) .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { - Tooltip::for_action_in("Go Back", &GoBack, &focus_handle, window, cx) - } + move |_window, cx| Tooltip::for_action_in("Go Back", &GoBack, &focus_handle, cx) }); let navigate_forward = IconButton::new("navigate_forward", IconName::ArrowRight) @@ -3039,8 +3053,8 @@ impl Pane { .disabled(!self.can_navigate_forward()) .tooltip({ let focus_handle = focus_handle.clone(); - move |window, cx| { - Tooltip::for_action_in("Go Forward", &GoForward, &focus_handle, window, cx) + move |_window, cx| { + Tooltip::for_action_in("Go Forward", &GoForward, &focus_handle, cx) } }); @@ -3636,11 +3650,10 @@ fn default_render_tab_bar_buttons( .on_click(cx.listener(|pane, _, window, cx| { pane.toggle_zoom(&crate::ToggleZoom, window, cx); })) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { Tooltip::for_action( if zoomed { "Zoom Out" } else { "Zoom In" }, &ToggleZoom, - window, cx, ) }) diff --git a/crates/workspace/src/pane_group.rs b/crates/workspace/src/pane_group.rs index 127eae6de07670c265597a6f6df3a286487a9c64..36898b127bdd749a9c1867a97bd72dfd6f4e15ea 100644 --- a/crates/workspace/src/pane_group.rs +++ b/crates/workspace/src/pane_group.rs @@ -79,6 +79,56 @@ impl PaneGroup { } } + /// Moves active pane to span the entire border in the given direction, + /// similar to Vim ctrl+w shift-[hjkl] motion. + /// + /// Returns: + /// - Ok(true) if it found and moved a pane + /// - Ok(false) if it found but did not move the pane + /// - Err(_) if it did not find the pane + pub fn move_to_border( + &mut self, + active_pane: &Entity, + direction: SplitDirection, + ) -> Result { + if let Some(pane) = self.find_pane_at_border(direction) + && pane == active_pane + { + return Ok(false); + } + + if !self.remove(active_pane)? { + return Ok(false); + } + + if let Member::Axis(root) = &mut self.root + && direction.axis() == root.axis + { + let idx = if direction.increasing() { + root.members.len() + } else { + 0 + }; + root.insert_pane(idx, active_pane); + return Ok(true); + } + + let members = if direction.increasing() { + vec![self.root.clone(), Member::Pane(active_pane.clone())] + } else { + vec![Member::Pane(active_pane.clone()), self.root.clone()] + }; + self.root = Member::Axis(PaneAxis::new(direction.axis(), members)); + Ok(true) + } + + fn find_pane_at_border(&self, direction: SplitDirection) -> Option<&Entity> { + match &self.root { + Member::Pane(pane) => Some(pane), + Member::Axis(axis) => axis.find_pane_at_border(direction), + } + } + /// Returns: /// - Ok(true) if it found and removed a pane /// - Ok(false) if it found but did not remove the pane @@ -526,9 +576,7 @@ impl PaneAxis { if direction.increasing() { idx += 1; } - - self.members.insert(idx, Member::Pane(new_pane.clone())); - *self.flexes.lock() = vec![1.; self.members.len()]; + self.insert_pane(idx, new_pane); } else { *member = Member::new_axis(old_pane.clone(), new_pane.clone(), direction); @@ -541,6 +589,26 @@ impl PaneAxis { anyhow::bail!("Pane not found"); } + fn insert_pane(&mut self, idx: usize, new_pane: &Entity) { + self.members.insert(idx, Member::Pane(new_pane.clone())); + *self.flexes.lock() = vec![1.; self.members.len()]; + } + + fn find_pane_at_border(&self, direction: SplitDirection) -> Option<&Entity> { + if self.axis != direction.axis() { + return None; + } + let member = if direction.increasing() { + self.members.last() + } else { + self.members.first() + }; + member.and_then(|e| match e { + Member::Pane(pane) => Some(pane), + Member::Axis(_) => None, + }) + } + fn remove(&mut self, pane_to_remove: &Entity) -> Result> { let mut found_pane = false; let mut remove_member = None; @@ -1238,7 +1306,7 @@ mod element { let overlay_opacity = WorkspaceSettings::get(None, cx) .active_pane_modifiers .inactive_opacity - .map(|val| val.clamp(0.0, 1.0)) + .map(|val| val.0.clamp(0.0, 1.0)) .and_then(|val| (val <= 1.).then_some(val)); let mut overlay_background = cx.theme().colors().editor_background; diff --git a/crates/workspace/src/path_list.rs b/crates/workspace/src/path_list.rs index 01e2ffda949faf502de087fb0077cdbc758001ab..035f9e44fcce46527faa0c1053b7a6bb09aae0c8 100644 --- a/crates/workspace/src/path_list.rs +++ b/crates/workspace/src/path_list.rs @@ -3,15 +3,22 @@ use std::{ sync::Arc, }; +use itertools::Itertools; use util::paths::SanitizedPath; /// A list of absolute paths, in a specific order. /// /// The paths are stored in lexicographic order, so that they can be compared to /// other path lists without regard to the order of the paths. +/// +/// The paths can be retrieved in the original order using `ordered_paths()`. #[derive(Default, PartialEq, Eq, Debug, Clone)] pub struct PathList { + /// The paths, in lexicographic order. paths: Arc<[PathBuf]>, + /// The order in which the paths were provided. + /// + /// See `ordered_paths()` for a way to get the paths in the original order. order: Arc<[usize]>, } @@ -42,14 +49,25 @@ impl PathList { self.paths.is_empty() } + /// Get the paths in lexicographic order. pub fn paths(&self) -> &[PathBuf] { self.paths.as_ref() } + /// Get the order in which the paths were provided. pub fn order(&self) -> &[usize] { self.order.as_ref() } + /// Get the paths in the original order. + pub fn ordered_paths(&self) -> impl Iterator { + self.order + .iter() + .zip(self.paths.iter()) + .sorted_by_key(|(i, _)| **i) + .map(|(_, path)| path) + } + pub fn is_lexicographically_ordered(&self) -> bool { self.order.iter().enumerate().all(|(i, &j)| i == j) } @@ -109,15 +127,70 @@ mod tests { let list1 = PathList::new(&["a/d", "a/c"]); let list2 = PathList::new(&["a/c", "a/d"]); - assert_eq!(list1.paths(), list2.paths()); - assert_ne!(list1, list2); - assert_eq!(list1.order(), &[1, 0]); - assert_eq!(list2.order(), &[0, 1]); + assert_eq!(list1.paths(), list2.paths(), "paths differ"); + assert_eq!(list1.order(), &[1, 0], "list1 order incorrect"); + assert_eq!(list2.order(), &[0, 1], "list2 order incorrect"); let list1_deserialized = PathList::deserialize(&list1.serialize()); - assert_eq!(list1_deserialized, list1); + assert_eq!(list1_deserialized, list1, "list1 deserialization failed"); let list2_deserialized = PathList::deserialize(&list2.serialize()); - assert_eq!(list2_deserialized, list2); + assert_eq!(list2_deserialized, list2, "list2 deserialization failed"); + + assert_eq!( + list1.ordered_paths().collect_array().unwrap(), + [&PathBuf::from("a/d"), &PathBuf::from("a/c")], + "list1 ordered paths incorrect" + ); + assert_eq!( + list2.ordered_paths().collect_array().unwrap(), + [&PathBuf::from("a/c"), &PathBuf::from("a/d")], + "list2 ordered paths incorrect" + ); + } + + #[test] + fn test_path_list_ordering() { + let list = PathList::new(&["b", "a", "c"]); + assert_eq!( + list.paths(), + &[PathBuf::from("a"), PathBuf::from("b"), PathBuf::from("c")] + ); + assert_eq!(list.order(), &[1, 0, 2]); + assert!(!list.is_lexicographically_ordered()); + + let serialized = list.serialize(); + let deserialized = PathList::deserialize(&serialized); + assert_eq!(deserialized, list); + + assert_eq!( + deserialized.ordered_paths().collect_array().unwrap(), + [ + &PathBuf::from("b"), + &PathBuf::from("a"), + &PathBuf::from("c") + ] + ); + + let list = PathList::new(&["b", "c", "a"]); + assert_eq!( + list.paths(), + &[PathBuf::from("a"), PathBuf::from("b"), PathBuf::from("c")] + ); + assert_eq!(list.order(), &[2, 0, 1]); + assert!(!list.is_lexicographically_ordered()); + + let serialized = list.serialize(); + let deserialized = PathList::deserialize(&serialized); + assert_eq!(deserialized, list); + + assert_eq!( + deserialized.ordered_paths().collect_array().unwrap(), + [ + &PathBuf::from("b"), + &PathBuf::from("c"), + &PathBuf::from("a"), + ] + ); } } diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index efd5116dc39d3ca615112c503ea135b7da076264..5803f193a838600d7c977931770bbc70df4fa92a 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -1347,7 +1347,39 @@ impl WorkspaceDb { continue; } - if paths.paths().iter().all(|path| path.exists()) + let has_wsl_path = if cfg!(windows) { + fn is_wsl_path(path: &PathBuf) -> bool { + use std::path::{Component, Prefix}; + + path.components() + .next() + .and_then(|component| match component { + Component::Prefix(prefix) => Some(prefix), + _ => None, + }) + .and_then(|prefix| match prefix.kind() { + Prefix::UNC(server, _) => Some(server), + Prefix::VerbatimUNC(server, _) => Some(server), + _ => None, + }) + .map(|server| { + let server_str = server.to_string_lossy(); + server_str == "wsl.localhost" || server_str == "wsl$" + }) + .unwrap_or(false) + } + + paths.paths().iter().any(|path| is_wsl_path(path)) + } else { + false + }; + + // Delete the workspace if any of the paths are WSL paths. + // If a local workspace points to WSL, this check will cause us to wait for the + // WSL VM and file server to boot up. This can block for many seconds. + // Supported scenarios use remote workspaces. + if !has_wsl_path + && paths.paths().iter().all(|path| path.exists()) && paths.paths().iter().any(|path| path.is_dir()) { result.push((id, SerializedWorkspaceLocation::Local, paths)); @@ -2493,7 +2525,7 @@ mod tests { let workspace_6 = SerializedWorkspace { id: WorkspaceId(6), - paths: PathList::new(&["/tmp6a", "/tmp6b", "/tmp6c"]), + paths: PathList::new(&["/tmp6c", "/tmp6b", "/tmp6a"]), location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), @@ -2534,7 +2566,7 @@ mod tests { assert_eq!(locations.len(), 1); assert_eq!( locations[0].0, - PathList::new(&["/tmp6a", "/tmp6b", "/tmp6c"]), + PathList::new(&["/tmp6c", "/tmp6b", "/tmp6a"]), ); assert_eq!(locations[0].1, Some(60)); } diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index 08a2f2e38dd142848f8a9c07652e147b58bee233..a37b2ebbe93efb23cad6a98f127ba1f8800a3eb3 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -3,7 +3,7 @@ use crate::{ Member, Pane, PaneAxis, SerializableItemRegistry, Workspace, WorkspaceId, item::ItemHandle, path_list::PathList, }; -use anyhow::Result; +use anyhow::{Context, Result}; use async_recursion::async_recursion; use collections::IndexSet; use db::sqlez::{ @@ -220,6 +220,7 @@ impl SerializedPaneGroup { let new_items = serialized_pane .deserialize_to(project, &pane, workspace_id, workspace.clone(), cx) .await + .context("Could not deserialize pane)") .log_err()?; if pane diff --git a/crates/workspace/src/theme_preview.rs b/crates/workspace/src/theme_preview.rs index 09a5415ca063d0aab2b2fab97abff3533e113b0b..36ea6e2e52d12fa16e15a2881afa91454dbd9856 100644 --- a/crates/workspace/src/theme_preview.rs +++ b/crates/workspace/src/theme_preview.rs @@ -317,13 +317,7 @@ impl ThemePreview { .style(ButtonStyle::Transparent) .tooltip(move |window, cx| { let name = name.clone(); - Tooltip::with_meta( - name, - None, - format!("{:?}", color), - window, - cx, - ) + Tooltip::with_meta(name, None, format!("{:?}", color), cx) }), ) })), diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 0be758efb7a4622361b5dfe4785e44eb16fe1d4f..65b2d63b229ea4b394825a91131fc267c85b7f8b 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1,6 +1,6 @@ pub mod dock; pub mod history_manager; -pub mod invalid_buffer_view; +pub mod invalid_item_view; pub mod item; mod modal_layer; pub mod notifications; @@ -76,11 +76,14 @@ use project::{ debugger::{breakpoint_store::BreakpointStoreEvent, session::ThreadStatus}, toolchain_store::ToolchainStoreEvent, }; -use remote::{RemoteClientDelegate, RemoteConnectionOptions, remote_client::ConnectionIdentifier}; +use remote::{ + RemoteClientDelegate, RemoteConnection, RemoteConnectionOptions, + remote_client::ConnectionIdentifier, +}; use schemars::JsonSchema; use serde::Deserialize; use session::AppSession; -use settings::{Settings, SettingsLocation, update_settings_file}; +use settings::{CenteredPaddingSettings, Settings, SettingsLocation, update_settings_file}; use shared_screen::SharedScreen; use sqlez::{ bindable::{Bind, Column, StaticColumnCount}, @@ -99,7 +102,10 @@ use std::{ path::{Path, PathBuf}, process::ExitStatus, rc::Rc, - sync::{Arc, LazyLock, Weak, atomic::AtomicUsize}, + sync::{ + Arc, LazyLock, Weak, + atomic::{AtomicBool, AtomicUsize}, + }, time::Duration, }; use task::{DebugScenario, SpawnInTerminal, TaskContext}; @@ -299,6 +305,12 @@ pub struct MoveItemToPaneInDirection { pub clone: bool, } +/// Creates a new file in a split of the desired direction. +#[derive(Clone, Deserialize, PartialEq, JsonSchema, Action)] +#[action(namespace = workspace)] +#[serde(deny_unknown_fields)] +pub struct NewFileSplit(pub SplitDirection); + fn default_right() -> SplitDirection { SplitDirection::Right } @@ -421,6 +433,14 @@ actions!( SwapPaneUp, /// Swaps the current pane with the one below. SwapPaneDown, + /// Move the current pane to be at the far left. + MovePaneLeft, + /// Move the current pane to be at the far right. + MovePaneRight, + /// Move the current pane to be at the very top. + MovePaneUp, + /// Move the current pane to be at the very bottom. + MovePaneDown, ] ); @@ -1179,9 +1199,6 @@ struct FollowerView { } impl Workspace { - const DEFAULT_PADDING: f32 = 0.2; - const MAX_PADDING: f32 = 0.4; - pub fn new( workspace_id: Option, project: Entity, @@ -1314,6 +1331,7 @@ impl Workspace { pane_history_timestamp.clone(), None, NewFile.boxed_clone(), + true, window, cx, ); @@ -1440,7 +1458,7 @@ impl Workspace { }), cx.on_release(move |this, cx| { this.app_state.workspace_store.update(cx, move |store, _| { - store.workspaces.remove(&window_handle.clone()); + store.workspaces.remove(&window_handle); }) }), ]; @@ -1539,7 +1557,7 @@ impl Workspace { persistence::DB.workspace_for_roots(paths_to_open.as_slice()); if let Some(paths) = serialized_workspace.as_ref().map(|ws| &ws.paths) { - paths_to_open = paths.paths().to_vec(); + paths_to_open = paths.ordered_paths().cloned().collect(); if !paths.is_lexicographically_ordered() { project_handle .update(cx, |project, cx| { @@ -3218,6 +3236,7 @@ impl Workspace { self.pane_history_timestamp.clone(), None, NewFile.boxed_clone(), + true, window, cx, ); @@ -3283,10 +3302,6 @@ impl Workspace { window: &mut Window, cx: &mut App, ) { - if let Some(text) = item.telemetry_event_text(cx) { - telemetry::event!(text); - } - pane.update(cx, |pane, cx| { pane.add_item( item, @@ -3866,6 +3881,16 @@ impl Workspace { } } + pub fn move_pane_to_border(&mut self, direction: SplitDirection, cx: &mut Context) { + if self + .center + .move_to_border(&self.active_pane, direction) + .unwrap() + { + cx.notify(); + } + } + pub fn resize_pane( &mut self, axis: gpui::Axis, @@ -5674,6 +5699,18 @@ impl Workspace { .on_action(cx.listener(|workspace, _: &SwapPaneDown, _, cx| { workspace.swap_pane_in_direction(SplitDirection::Down, cx) })) + .on_action(cx.listener(|workspace, _: &MovePaneLeft, _, cx| { + workspace.move_pane_to_border(SplitDirection::Left, cx) + })) + .on_action(cx.listener(|workspace, _: &MovePaneRight, _, cx| { + workspace.move_pane_to_border(SplitDirection::Right, cx) + })) + .on_action(cx.listener(|workspace, _: &MovePaneUp, _, cx| { + workspace.move_pane_to_border(SplitDirection::Up, cx) + })) + .on_action(cx.listener(|workspace, _: &MovePaneDown, _, cx| { + workspace.move_pane_to_border(SplitDirection::Down, cx) + })) .on_action(cx.listener(|this, _: &ToggleLeftDock, window, cx| { this.toggle_dock(DockPosition::Left, window, cx); })) @@ -5864,6 +5901,11 @@ impl Workspace { }) } + pub fn hide_modal(&mut self, window: &mut Window, cx: &mut App) -> bool { + self.modal_layer + .update(cx, |modal_layer, cx| modal_layer.hide_modal(window, cx)) + } + pub fn toggle_status_toast(&mut self, entity: Entity, cx: &mut App) { self.toast_layer .update(cx, |toast_layer, cx| toast_layer.toggle_toast(cx, entity)) @@ -5885,8 +5927,11 @@ impl Workspace { fn adjust_padding(padding: Option) -> f32 { padding - .unwrap_or(Self::DEFAULT_PADDING) - .clamp(0.0, Self::MAX_PADDING) + .unwrap_or(CenteredPaddingSettings::default().0) + .clamp( + CenteredPaddingSettings::MIN_PADDING, + CenteredPaddingSettings::MAX_PADDING, + ) } fn render_dock( @@ -6316,6 +6361,10 @@ impl Render for DraggedDock { impl Render for Workspace { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + static FIRST_PAINT: AtomicBool = AtomicBool::new(true); + if FIRST_PAINT.swap(false, std::sync::atomic::Ordering::Relaxed) { + log::info!("Rendered first frame"); + } let mut context = KeyContext::new_with_defaults(); context.add("Workspace"); context.set("keyboard_layout", cx.keyboard_layout().name().to_string()); @@ -6333,6 +6382,24 @@ impl Render for Workspace { } } + if self.left_dock.read(cx).is_open() { + if let Some(active_panel) = self.left_dock.read(cx).active_panel() { + context.set("left_dock", active_panel.panel_key()); + } + } + + if self.right_dock.read(cx).is_open() { + if let Some(active_panel) = self.right_dock.read(cx).active_panel() { + context.set("right_dock", active_panel.panel_key()); + } + } + + if self.bottom_dock.read(cx).is_open() { + if let Some(active_panel) = self.bottom_dock.read(cx).active_panel() { + context.set("bottom_dock", active_panel.panel_key()); + } + } + let centered_layout = self.centered_layout && self.center.panes().len() == 1 && self.active_item(cx).is_some(); @@ -6348,8 +6415,12 @@ impl Render for Workspace { let paddings = if centered_layout { let settings = WorkspaceSettings::get_global(cx).centered_layout; ( - render_padding(Self::adjust_padding(settings.left_padding)), - render_padding(Self::adjust_padding(settings.right_padding)), + render_padding(Self::adjust_padding( + settings.left_padding.map(|padding| padding.0), + )), + render_padding(Self::adjust_padding( + settings.right_padding.map(|padding| padding.0), + )), ) } else { (None, None) @@ -6940,7 +7011,9 @@ actions!( zed, [ /// Opens the Zed log file. - OpenLog + OpenLog, + /// Reveals the Zed log file in the system file manager. + RevealLogInFileManager ] ); @@ -7406,22 +7479,23 @@ pub fn create_and_open_local_file( pub fn open_remote_project_with_new_connection( window: WindowHandle, - connection_options: RemoteConnectionOptions, + remote_connection: Arc, cancel_rx: oneshot::Receiver<()>, delegate: Arc, app_state: Arc, paths: Vec, cx: &mut App, -) -> Task> { +) -> Task>>>> { cx.spawn(async move |cx| { let (workspace_id, serialized_workspace) = - serialize_remote_project(connection_options.clone(), paths.clone(), cx).await?; + serialize_remote_project(remote_connection.connection_options(), paths.clone(), cx) + .await?; let session = match cx .update(|cx| { remote::RemoteClient::new( ConnectionIdentifier::Workspace(workspace_id.0), - connection_options, + remote_connection, cancel_rx, delegate, cx, @@ -7430,7 +7504,7 @@ pub fn open_remote_project_with_new_connection( .await? { Some(result) => result, - None => return Ok(()), + None => return Ok(Vec::new()), }; let project = cx.update(|cx| { @@ -7465,7 +7539,7 @@ pub fn open_remote_project_with_existing_connection( app_state: Arc, window: WindowHandle, cx: &mut AsyncApp, -) -> Task> { +) -> Task>>>> { cx.spawn(async move |cx| { let (workspace_id, serialized_workspace) = serialize_remote_project(connection_options.clone(), paths.clone(), cx).await?; @@ -7491,7 +7565,7 @@ async fn open_remote_project_inner( app_state: Arc, window: WindowHandle, cx: &mut AsyncApp, -) -> Result<()> { +) -> Result>>> { let toolchains = DB.toolchains(workspace_id).await?; for (toolchain, worktree_id, path) in toolchains { project @@ -7548,7 +7622,7 @@ async fn open_remote_project_inner( }); })?; - window + let items = window .update(cx, |_, window, cx| { window.activate_window(); open_items(serialized_workspace, project_paths_to_open, window, cx) @@ -7567,7 +7641,7 @@ async fn open_remote_project_inner( } })?; - Ok(()) + Ok(items.into_iter().map(|item| item?.ok()).collect()) } fn serialize_remote_project( @@ -8764,8 +8838,9 @@ mod tests { item.update(cx, |item, cx| { SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings(cx, |settings| { - settings.workspace.autosave = - Some(AutosaveSetting::AfterDelay { milliseconds: 500 }); + settings.workspace.autosave = Some(AutosaveSetting::AfterDelay { + milliseconds: 500.into(), + }); }) }); item.is_dirty = true; diff --git a/crates/workspace/src/workspace_settings.rs b/crates/workspace/src/workspace_settings.rs index 541194b0044dd897723c89763abc7d3a2abc20f3..f061227f2cb264b1be1234364ca1e8de7a462e86 100644 --- a/crates/workspace/src/workspace_settings.rs +++ b/crates/workspace/src/workspace_settings.rs @@ -4,11 +4,11 @@ use crate::DockPosition; use collections::HashMap; use serde::Deserialize; pub use settings::AutosaveSetting; -use settings::Settings; pub use settings::{ BottomDockLayout, PaneSplitDirectionHorizontal, PaneSplitDirectionVertical, RestoreOnStartupBehavior, }; +use settings::{InactiveOpacity, Settings}; pub struct WorkspaceSettings { pub active_pane_modifiers: ActivePanelModifiers, @@ -50,7 +50,7 @@ pub struct ActivePanelModifiers { /// /// Default: `1.0` // TODO: make this not an option, it is never None - pub inactive_opacity: Option, + pub inactive_opacity: Option, } #[derive(Deserialize)] @@ -108,91 +108,6 @@ impl Settings for WorkspaceSettings { zoomed_padding: workspace.zoomed_padding.unwrap(), } } - - fn import_from_vscode( - vscode: &settings::VsCodeSettings, - current: &mut settings::SettingsContent, - ) { - if vscode - .read_bool("accessibility.dimUnfocused.enabled") - .unwrap_or_default() - && let Some(opacity) = vscode - .read_value("accessibility.dimUnfocused.opacity") - .and_then(|v| v.as_f64()) - { - current - .workspace - .active_pane_modifiers - .get_or_insert_default() - .inactive_opacity = Some(opacity as f32); - } - - vscode.enum_setting( - "window.confirmBeforeClose", - &mut current.workspace.confirm_quit, - |s| match s { - "always" | "keyboardOnly" => Some(true), - "never" => Some(false), - _ => None, - }, - ); - - vscode.bool_setting( - "workbench.editor.restoreViewState", - &mut current.workspace.restore_on_file_reopen, - ); - - if let Some(b) = vscode.read_bool("window.closeWhenEmpty") { - current.workspace.when_closing_with_no_tabs = Some(if b { - settings::CloseWindowWhenNoItems::CloseWindow - } else { - settings::CloseWindowWhenNoItems::KeepWindowOpen - }); - } - - if let Some(b) = vscode.read_bool("files.simpleDialog.enable") { - current.workspace.use_system_path_prompts = Some(!b); - } - - if let Some(v) = vscode.read_enum("files.autoSave", |s| match s { - "off" => Some(AutosaveSetting::Off), - "afterDelay" => Some(AutosaveSetting::AfterDelay { - milliseconds: vscode - .read_value("files.autoSaveDelay") - .and_then(|v| v.as_u64()) - .unwrap_or(1000), - }), - "onFocusChange" => Some(AutosaveSetting::OnFocusChange), - "onWindowChange" => Some(AutosaveSetting::OnWindowChange), - _ => None, - }) { - current.workspace.autosave = Some(v); - } - - // workbench.editor.limit contains "enabled", "value", and "perEditorGroup" - // our semantics match if those are set to true, some N, and true respectively. - // we'll ignore "perEditorGroup" for now since we only support a global max - if let Some(n) = vscode - .read_value("workbench.editor.limit.value") - .and_then(|v| v.as_u64()) - .and_then(|n| NonZeroUsize::new(n as usize)) - && vscode - .read_bool("workbench.editor.limit.enabled") - .unwrap_or_default() - { - current.workspace.max_tabs = Some(n) - } - - if let Some(b) = vscode.read_bool("window.nativeTabs") { - current.workspace.use_system_window_tabs = Some(b); - } - - // some combination of "window.restoreWindows" and "workbench.startupEditor" might - // map to our "restore_on_startup" - - // there doesn't seem to be a way to read whether the bottom dock's "justified" - // setting is enabled in vscode. that'd be our equivalent to "bottom_dock_layout" - } } impl Settings for TabBarSettings { @@ -204,22 +119,6 @@ impl Settings for TabBarSettings { show_tab_bar_buttons: tab_bar.show_tab_bar_buttons.unwrap(), } } - - fn import_from_vscode( - vscode: &settings::VsCodeSettings, - current: &mut settings::SettingsContent, - ) { - if let Some(b) = vscode.read_enum("workbench.editor.showTabs", |s| match s { - "multiple" => Some(true), - "single" | "none" => Some(false), - _ => None, - }) { - current.tab_bar.get_or_insert_default().show = Some(b); - } - if Some("hidden") == vscode.read_string("workbench.editor.editorActionsLocation") { - current.tab_bar.get_or_insert_default().show_tab_bar_buttons = Some(false) - } - } } #[derive(Deserialize)] @@ -227,6 +126,7 @@ pub struct StatusBarSettings { pub show: bool, pub active_language_button: bool, pub cursor_position_button: bool, + pub line_endings_button: bool, } impl Settings for StatusBarSettings { @@ -236,15 +136,7 @@ impl Settings for StatusBarSettings { show: status_bar.show.unwrap(), active_language_button: status_bar.active_language_button.unwrap(), cursor_position_button: status_bar.cursor_position_button.unwrap(), - } - } - - fn import_from_vscode( - vscode: &settings::VsCodeSettings, - current: &mut settings::SettingsContent, - ) { - if let Some(show) = vscode.read_bool("workbench.statusBar.visible") { - current.status_bar.get_or_insert_default().show = Some(show); + line_endings_button: status_bar.line_endings_button.unwrap(), } } } diff --git a/crates/worktree/Cargo.toml b/crates/worktree/Cargo.toml index fdeca37b7ac73759fe9851f722985349e0a183b7..6d132fbd2cb8c7a1282bffcea6577260a15c4572 100644 --- a/crates/worktree/Cargo.toml +++ b/crates/worktree/Cargo.toml @@ -24,6 +24,7 @@ test-support = [ [dependencies] anyhow.workspace = true +async-lock.workspace = true clock.workspace = true collections.workspace = true fs.workspace = true @@ -46,7 +47,6 @@ smol.workspace = true sum_tree.workspace = true text.workspace = true util.workspace = true -workspace-hack.workspace = true [dev-dependencies] clock = { workspace = true, features = ["test-support"] } diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index eb0b5f861d2181f06bcd7732851cf7d397404786..5f8253e2dfb48fa6882dabf49c64073023a2a298 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -64,7 +64,7 @@ use std::{ use sum_tree::{Bias, Dimensions, Edit, KeyedItem, SeekTarget, SumTree, Summary, TreeMap, TreeSet}; use text::{LineEnding, Rope}; use util::{ - ResultExt, debug_panic, + ResultExt, debug_panic, maybe, paths::{PathMatcher, PathStyle, SanitizedPath, home_dir}, rel_path::RelPath, }; @@ -226,7 +226,7 @@ impl Default for WorkDirectory { } } -#[derive(Debug, Clone)] +#[derive(Clone)] pub struct LocalSnapshot { snapshot: Snapshot, global_gitignore: Option>, @@ -236,9 +236,10 @@ pub struct LocalSnapshot { /// All of the git repositories in the worktree, indexed by the project entry /// id of their parent directory. git_repositories: TreeMap, - /// The file handle of the worktree root. `None` if the worktree is a directory. + /// The file handle of the worktree root /// (so we can find it after it's been moved) root_file_handle: Option>, + executor: BackgroundExecutor, } struct BackgroundScannerState { @@ -321,7 +322,6 @@ impl DerefMut for LocalSnapshot { } } -#[derive(Debug)] enum ScanState { Started, Updated { @@ -402,6 +402,7 @@ impl Worktree { PathStyle::local(), ), root_file_handle, + executor: cx.background_executor().clone(), }; let worktree_id = snapshot.id(); @@ -439,6 +440,10 @@ impl Worktree { entry.is_private = !share_private_files && settings.is_path_private(path); } } + entry.is_hidden = abs_path + .file_name() + .and_then(|name| name.to_str()) + .map_or(false, |name| is_path_hidden(name)); snapshot.insert_entry(entry, fs.as_ref()); } @@ -651,7 +656,7 @@ impl Worktree { pub fn replica_id(&self) -> ReplicaId { match self { - Worktree::Local(_) => 0, + Worktree::Local(_) => ReplicaId::LOCAL, Worktree::Remote(worktree) => worktree.replica_id, } } @@ -1065,7 +1070,7 @@ impl LocalWorktree { scan_requests_rx, path_prefixes_to_scan_rx, next_entry_id, - state: Mutex::new(BackgroundScannerState { + state: async_lock::Mutex::new(BackgroundScannerState { prev_snapshot: snapshot.snapshot.clone(), snapshot, scanned_dirs: Default::default(), @@ -1701,9 +1706,9 @@ impl LocalWorktree { refresh.recv().await; log::trace!("refreshed entry {path:?} in {:?}", t0.elapsed()); let new_entry = this.read_with(cx, |this, _| { - this.entry_for_path(&path) - .cloned() - .context("reading path after update") + this.entry_for_path(&path).cloned().with_context(|| { + format!("Could not find entry in worktree for {path:?} after refresh") + }) })??; Ok(Some(new_entry)) }) @@ -2435,9 +2440,10 @@ impl LocalSnapshot { } fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs) -> Entry { + log::trace!("insert entry {:?}", entry.path); if entry.is_file() && entry.path.file_name() == Some(&GITIGNORE) { let abs_path = self.absolutize(&entry.path); - match smol::block_on(build_gitignore(&abs_path, fs)) { + match self.executor.block(build_gitignore(&abs_path, fs)) { Ok(ignore) => { self.ignores_by_parent_abs_path .insert(abs_path.parent().unwrap().into(), (Arc::new(ignore), true)); @@ -2488,7 +2494,12 @@ impl LocalSnapshot { inodes } - fn ignore_stack_for_abs_path(&self, abs_path: &Path, is_dir: bool, fs: &dyn Fs) -> IgnoreStack { + async fn ignore_stack_for_abs_path( + &self, + abs_path: &Path, + is_dir: bool, + fs: &dyn Fs, + ) -> IgnoreStack { let mut new_ignores = Vec::new(); let mut repo_root = None; for (index, ancestor) in abs_path.ancestors().enumerate() { @@ -2499,9 +2510,8 @@ impl LocalSnapshot { new_ignores.push((ancestor, None)); } } - let metadata = smol::block_on(fs.metadata(&ancestor.join(DOT_GIT))) - .ok() - .flatten(); + + let metadata = fs.metadata(&ancestor.join(DOT_GIT)).await.ok().flatten(); if metadata.is_some() { repo_root = Some(Arc::from(ancestor)); break; @@ -2647,7 +2657,7 @@ impl BackgroundScannerState { .any(|p| entry.path.starts_with(p)) } - fn enqueue_scan_dir( + async fn enqueue_scan_dir( &self, abs_path: Arc, entry: &Entry, @@ -2655,7 +2665,10 @@ impl BackgroundScannerState { fs: &dyn Fs, ) { let path = entry.path.clone(); - let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true, fs); + let ignore_stack = self + .snapshot + .ignore_stack_for_abs_path(&abs_path, true, fs) + .await; let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path); if !ancestor_inodes.contains(&entry.inode) { @@ -2668,6 +2681,7 @@ impl BackgroundScannerState { scan_queue: scan_job_tx.clone(), ancestor_inodes, is_external: entry.is_external, + is_hidden: entry.is_hidden, }) .unwrap(); } @@ -2692,11 +2706,17 @@ impl BackgroundScannerState { } } - fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs, watcher: &dyn Watcher) -> Entry { + async fn insert_entry( + &mut self, + mut entry: Entry, + fs: &dyn Fs, + watcher: &dyn Watcher, + ) -> Entry { self.reuse_entry_id(&mut entry); let entry = self.snapshot.insert_entry(entry, fs); if entry.path.file_name() == Some(&DOT_GIT) { - self.insert_git_repository(entry.path.clone(), fs, watcher); + self.insert_git_repository(entry.path.clone(), fs, watcher) + .await; } #[cfg(test)] @@ -2827,7 +2847,7 @@ impl BackgroundScannerState { self.snapshot.check_invariants(false); } - fn insert_git_repository( + async fn insert_git_repository( &mut self, dot_git_path: Arc, fs: &dyn Fs, @@ -2868,10 +2888,11 @@ impl BackgroundScannerState { fs, watcher, ) + .await .log_err(); } - fn insert_git_repository_for_path( + async fn insert_git_repository_for_path( &mut self, work_directory: WorkDirectory, dot_git_abs_path: Arc, @@ -2893,7 +2914,7 @@ impl BackgroundScannerState { let work_directory_abs_path = self.snapshot.work_directory_abs_path(&work_directory); let (repository_dir_abs_path, common_dir_abs_path) = - discover_git_paths(&dot_git_abs_path, fs); + discover_git_paths(&dot_git_abs_path, fs).await; watcher .add(&common_dir_abs_path) .context("failed to add common directory to watcher") @@ -3177,6 +3198,11 @@ pub struct Entry { /// exclude them from searches. pub is_ignored: bool, + /// Whether this entry is hidden or inside hidden directory. + /// + /// We only scan hidden entries once the directory is expanded. + pub is_hidden: bool, + /// Whether this entry is always included in searches. /// /// This is used for entries that are always included in searches, even @@ -3351,6 +3377,7 @@ impl Entry { size: metadata.len, canonical_path, is_ignored: false, + is_hidden: false, is_always_included: false, is_external: false, is_private: false, @@ -3531,7 +3558,7 @@ impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey { } struct BackgroundScanner { - state: Mutex, + state: async_lock::Mutex, fs: Arc, fs_case_sensitive: bool, status_updates_tx: UnboundedSender, @@ -3557,69 +3584,87 @@ impl BackgroundScanner { // If the worktree root does not contain a git repository, then find // the git repository in an ancestor directory. Find any gitignore files // in ancestor directories. - let root_abs_path = self.state.lock().snapshot.abs_path.clone(); + let root_abs_path = self.state.lock().await.snapshot.abs_path.clone(); let (ignores, repo) = discover_ancestor_git_repo(self.fs.clone(), &root_abs_path).await; self.state .lock() + .await .snapshot .ignores_by_parent_abs_path .extend(ignores); - let containing_git_repository = repo.and_then(|(ancestor_dot_git, work_directory)| { - self.state - .lock() - .insert_git_repository_for_path( - work_directory, - ancestor_dot_git.clone().into(), - self.fs.as_ref(), - self.watcher.as_ref(), - ) - .log_err()?; - Some(ancestor_dot_git) - }); + let containing_git_repository = if let Some((ancestor_dot_git, work_directory)) = repo { + maybe!(async { + self.state + .lock() + .await + .insert_git_repository_for_path( + work_directory, + ancestor_dot_git.clone().into(), + self.fs.as_ref(), + self.watcher.as_ref(), + ) + .await + .log_err()?; + Some(ancestor_dot_git) + }) + .await + } else { + None + }; log::trace!("containing git repository: {containing_git_repository:?}"); - let mut global_gitignore_events = - if let Some(global_gitignore_path) = &paths::global_gitignore_path() { - self.state.lock().snapshot.global_gitignore = - if self.fs.is_file(&global_gitignore_path).await { - build_gitignore(global_gitignore_path, self.fs.as_ref()) - .await - .ok() - .map(Arc::new) - } else { - None - }; + let mut global_gitignore_events = if let Some(global_gitignore_path) = + &paths::global_gitignore_path() + { + let is_file = self.fs.is_file(&global_gitignore_path).await; + self.state.lock().await.snapshot.global_gitignore = if is_file { + build_gitignore(global_gitignore_path, self.fs.as_ref()) + .await + .ok() + .map(Arc::new) + } else { + None + }; + if is_file + || matches!(global_gitignore_path.parent(), Some(path) if self.fs.is_dir(path).await) + { self.fs .watch(global_gitignore_path, FS_WATCH_LATENCY) .await .0 } else { - self.state.lock().snapshot.global_gitignore = None; - Box::pin(futures::stream::empty()) - }; + Box::pin(futures::stream::pending()) + } + } else { + self.state.lock().await.snapshot.global_gitignore = None; + Box::pin(futures::stream::pending()) + }; let (scan_job_tx, scan_job_rx) = channel::unbounded(); { - let mut state = self.state.lock(); + let mut state = self.state.lock().await; state.snapshot.scan_id += 1; if let Some(mut root_entry) = state.snapshot.root_entry().cloned() { - let ignore_stack = state.snapshot.ignore_stack_for_abs_path( - root_abs_path.as_path(), - true, - self.fs.as_ref(), - ); + let ignore_stack = state + .snapshot + .ignore_stack_for_abs_path(root_abs_path.as_path(), true, self.fs.as_ref()) + .await; if ignore_stack.is_abs_path_ignored(root_abs_path.as_path(), true) { root_entry.is_ignored = true; - state.insert_entry(root_entry.clone(), self.fs.as_ref(), self.watcher.as_ref()); + state + .insert_entry(root_entry.clone(), self.fs.as_ref(), self.watcher.as_ref()) + .await; } if root_entry.is_dir() { - state.enqueue_scan_dir( - root_abs_path.as_path().into(), - &root_entry, - &scan_job_tx, - self.fs.as_ref(), - ); + state + .enqueue_scan_dir( + root_abs_path.as_path().into(), + &root_entry, + &scan_job_tx, + self.fs.as_ref(), + ) + .await; } } }; @@ -3628,11 +3673,11 @@ impl BackgroundScanner { drop(scan_job_tx); self.scan_dirs(true, scan_job_rx).await; { - let mut state = self.state.lock(); + let mut state = self.state.lock().await; state.snapshot.completed_scan_id = state.snapshot.scan_id; } - self.send_status_update(false, SmallVec::new()); + self.send_status_update(false, SmallVec::new()).await; // Process any any FS events that occurred while performing the initial scan. // For these events, update events cannot be as precise, because we didn't @@ -3677,7 +3722,7 @@ impl BackgroundScanner { if did_scan { let abs_path = { - let mut state = self.state.lock(); + let mut state = self.state.lock().await; state.path_prefixes_to_scan.insert(request.path.clone()); state.snapshot.absolutize(&request.path) }; @@ -3686,7 +3731,7 @@ impl BackgroundScanner { self.process_events(vec![abs_path]).await; } } - self.send_status_update(false, request.done); + self.send_status_update(false, request.done).await; } paths = fs_events_rx.next().fuse() => { @@ -3702,7 +3747,7 @@ impl BackgroundScanner { Some([event, ..]) => { self.update_global_gitignore(&event.path).await; } - _ => {}, + _ => (), } } } @@ -3715,7 +3760,7 @@ impl BackgroundScanner { request.relative_paths.sort_unstable(); self.forcibly_load_paths(&request.relative_paths).await; - let root_path = self.state.lock().snapshot.abs_path.clone(); + let root_path = self.state.lock().await.snapshot.abs_path.clone(); let root_canonical_path = self.fs.canonicalize(root_path.as_path()).await; let root_canonical_path = match &root_canonical_path { Ok(path) => SanitizedPath::new(path), @@ -3737,7 +3782,7 @@ impl BackgroundScanner { .collect::>(); { - let mut state = self.state.lock(); + let mut state = self.state.lock().await; let is_idle = state.snapshot.completed_scan_id == state.snapshot.scan_id; state.snapshot.scan_id += 1; if is_idle { @@ -3754,11 +3799,12 @@ impl BackgroundScanner { ) .await; - self.send_status_update(scanning, request.done) + self.send_status_update(scanning, request.done).await } async fn process_events(&self, mut abs_paths: Vec) { - let root_path = self.state.lock().snapshot.abs_path.clone(); + log::trace!("process events: {abs_paths:?}"); + let root_path = self.state.lock().await.snapshot.abs_path.clone(); let root_canonical_path = self.fs.canonicalize(root_path.as_path()).await; let root_canonical_path = match &root_canonical_path { Ok(path) => SanitizedPath::new(path), @@ -3766,6 +3812,7 @@ impl BackgroundScanner { let new_path = self .state .lock() + .await .snapshot .root_file_handle .clone() @@ -3783,7 +3830,7 @@ impl BackgroundScanner { .unbounded_send(ScanState::RootUpdated { new_path }) .ok(); } else { - log::warn!("root path could not be canonicalized: {}", err); + log::warn!("root path could not be canonicalized: {:#}", err); } return; } @@ -3798,24 +3845,31 @@ impl BackgroundScanner { let mut dot_git_abs_paths = Vec::new(); abs_paths.sort_unstable(); abs_paths.dedup_by(|a, b| a.starts_with(b)); - abs_paths.retain(|abs_path| { + { + let snapshot = &self.state.lock().await.snapshot; + abs_paths.retain(|abs_path| { let abs_path = &SanitizedPath::new(abs_path); - let snapshot = &self.state.lock().snapshot; + { let mut is_git_related = false; - let dot_git_paths = abs_path.as_path().ancestors().find_map(|ancestor| { - if smol::block_on(is_git_dir(ancestor, self.fs.as_ref())) { + let dot_git_paths = self.executor.block(maybe!(async { + let mut path = None; + for ancestor in abs_path.as_path().ancestors() { + + if is_git_dir(ancestor, self.fs.as_ref()).await { let path_in_git_dir = abs_path .as_path() .strip_prefix(ancestor) .expect("stripping off the ancestor"); - Some((ancestor.to_owned(), path_in_git_dir.to_owned())) - } else { - None + path = Some((ancestor.to_owned(), path_in_git_dir.to_owned())); + break; } - }); + } + path + + })); if let Some((dot_git_abs_path, path_in_git_dir)) = dot_git_paths { if skipped_files_in_dot_git @@ -3888,12 +3942,12 @@ impl BackgroundScanner { true } }); - + } if relative_paths.is_empty() && dot_git_abs_paths.is_empty() { return; } - self.state.lock().snapshot.scan_id += 1; + self.state.lock().await.snapshot.scan_id += 1; let (scan_job_tx, scan_job_rx) = channel::unbounded(); log::debug!("received fs events {:?}", relative_paths); @@ -3907,29 +3961,29 @@ impl BackgroundScanner { .await; let affected_repo_roots = if !dot_git_abs_paths.is_empty() { - self.update_git_repositories(dot_git_abs_paths) + self.update_git_repositories(dot_git_abs_paths).await } else { Vec::new() }; { - let mut ignores_to_update = self.ignores_needing_update(); + let mut ignores_to_update = self.ignores_needing_update().await; ignores_to_update.extend(affected_repo_roots); - let ignores_to_update = self.order_ignores(ignores_to_update); - let snapshot = self.state.lock().snapshot.clone(); + let ignores_to_update = self.order_ignores(ignores_to_update).await; + let snapshot = self.state.lock().await.snapshot.clone(); self.update_ignore_statuses_for_paths(scan_job_tx, snapshot, ignores_to_update) .await; self.scan_dirs(false, scan_job_rx).await; } { - let mut state = self.state.lock(); + let mut state = self.state.lock().await; state.snapshot.completed_scan_id = state.snapshot.scan_id; for (_, entry) in mem::take(&mut state.removed_entries) { state.scanned_dirs.remove(&entry.id); } } - self.send_status_update(false, SmallVec::new()); + self.send_status_update(false, SmallVec::new()).await; } async fn update_global_gitignore(&self, abs_path: &Path) { @@ -3938,30 +3992,30 @@ impl BackgroundScanner { .log_err() .map(Arc::new); let (prev_snapshot, ignore_stack, abs_path) = { - let mut state = self.state.lock(); + let mut state = self.state.lock().await; state.snapshot.global_gitignore = ignore; let abs_path = state.snapshot.abs_path().clone(); - let ignore_stack = - state - .snapshot - .ignore_stack_for_abs_path(&abs_path, true, self.fs.as_ref()); + let ignore_stack = state + .snapshot + .ignore_stack_for_abs_path(&abs_path, true, self.fs.as_ref()) + .await; (state.snapshot.clone(), ignore_stack, abs_path) }; let (scan_job_tx, scan_job_rx) = channel::unbounded(); self.update_ignore_statuses_for_paths( scan_job_tx, prev_snapshot, - vec![(abs_path, ignore_stack)].into_iter(), + vec![(abs_path, ignore_stack)], ) .await; self.scan_dirs(false, scan_job_rx).await; - self.send_status_update(false, SmallVec::new()); + self.send_status_update(false, SmallVec::new()).await; } async fn forcibly_load_paths(&self, paths: &[Arc]) -> bool { let (scan_job_tx, scan_job_rx) = channel::unbounded(); { - let mut state = self.state.lock(); + let mut state = self.state.lock().await; let root_path = state.snapshot.abs_path.clone(); for path in paths { for ancestor in path.ancestors() { @@ -3969,12 +4023,14 @@ impl BackgroundScanner { && entry.kind == EntryKind::UnloadedDir { let abs_path = root_path.join(ancestor.as_std_path()); - state.enqueue_scan_dir( - abs_path.into(), - entry, - &scan_job_tx, - self.fs.as_ref(), - ); + state + .enqueue_scan_dir( + abs_path.into(), + entry, + &scan_job_tx, + self.fs.as_ref(), + ) + .await; state.paths_to_scan.insert(path.clone()); break; } @@ -3986,7 +4042,7 @@ impl BackgroundScanner { self.scan_dir(&job).await.log_err(); } - !mem::take(&mut self.state.lock().paths_to_scan).is_empty() + !mem::take(&mut self.state.lock().await.paths_to_scan).is_empty() } async fn scan_dirs( @@ -4034,7 +4090,7 @@ impl BackgroundScanner { ) { Ok(_) => { last_progress_update_count += 1; - self.send_status_update(true, SmallVec::new()); + self.send_status_update(true, SmallVec::new()).await; } Err(count) => { last_progress_update_count = count; @@ -4059,8 +4115,12 @@ impl BackgroundScanner { .await; } - fn send_status_update(&self, scanning: bool, barrier: SmallVec<[barrier::Sender; 1]>) -> bool { - let mut state = self.state.lock(); + async fn send_status_update( + &self, + scanning: bool, + barrier: SmallVec<[barrier::Sender; 1]>, + ) -> bool { + let mut state = self.state.lock().await; if state.changed_paths.is_empty() && scanning { return true; } @@ -4089,7 +4149,7 @@ impl BackgroundScanner { let root_abs_path; let root_char_bag; { - let snapshot = &self.state.lock().snapshot; + let snapshot = &self.state.lock().await.snapshot; if self.settings.is_path_excluded(&job.path) { log::error!("skipping excluded directory {:?}", job.path); return Ok(()); @@ -4142,12 +4202,14 @@ impl BackgroundScanner { }; if child_name == DOT_GIT { - let mut state = self.state.lock(); - state.insert_git_repository( - child_path.clone(), - self.fs.as_ref(), - self.watcher.as_ref(), - ); + let mut state = self.state.lock().await; + state + .insert_git_repository( + child_path.clone(), + self.fs.as_ref(), + self.watcher.as_ref(), + ) + .await; } else if child_name == GITIGNORE { match build_gitignore(&child_abs_path, self.fs.as_ref()).await { Ok(ignore) => { @@ -4167,7 +4229,7 @@ impl BackgroundScanner { if self.settings.is_path_excluded(&child_path) { log::debug!("skipping excluded child entry {child_path:?}"); - self.state.lock().remove_path(&child_path); + self.state.lock().await.remove_path(&child_path); continue; } @@ -4219,6 +4281,11 @@ impl BackgroundScanner { child_entry.canonical_path = Some(canonical_path.into()); } + child_entry.is_hidden = job.is_hidden + || child_name + .to_str() + .map_or(false, |name| is_path_hidden(name)); + if child_entry.is_dir() { child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, true); child_entry.is_always_included = self.settings.is_path_always_included(&child_path); @@ -4234,6 +4301,7 @@ impl BackgroundScanner { abs_path: child_abs_path.clone(), path: child_path, is_external: child_entry.is_external, + is_hidden: child_entry.is_hidden, ignore_stack: if child_entry.is_ignored { IgnoreStack::all() } else { @@ -4261,7 +4329,7 @@ impl BackgroundScanner { new_entries.push(child_entry); } - let mut state = self.state.lock(); + let mut state = self.state.lock().await; // Identify any subdirectories that should not be scanned. let mut job_ix = 0; @@ -4343,7 +4411,7 @@ impl BackgroundScanner { None }; - let mut state = self.state.lock(); + let mut state = self.state.lock().await; let doing_recursive_update = scan_queue_tx.is_some(); // Remove any entries for paths that no longer exist or are being recursively @@ -4351,7 +4419,6 @@ impl BackgroundScanner { // detected regardless of the order of the paths. for (path, metadata) in relative_paths.iter().zip(metadata.iter()) { if matches!(metadata, Ok(None)) || doing_recursive_update { - log::trace!("remove path {:?}", path); state.remove_path(path); } } @@ -4360,11 +4427,10 @@ impl BackgroundScanner { let abs_path: Arc = root_abs_path.join(path.as_std_path()).into(); match metadata { Ok(Some((metadata, canonical_path))) => { - let ignore_stack = state.snapshot.ignore_stack_for_abs_path( - &abs_path, - metadata.is_dir, - self.fs.as_ref(), - ); + let ignore_stack = state + .snapshot + .ignore_stack_for_abs_path(&abs_path, metadata.is_dir, self.fs.as_ref()) + .await; let is_external = !canonical_path.starts_with(&root_canonical_path); let mut fs_entry = Entry::new( path.clone(), @@ -4384,23 +4450,34 @@ impl BackgroundScanner { fs_entry.is_private = self.is_path_private(path); fs_entry.is_always_included = self.settings.is_path_always_included(path); + let parent_is_hidden = path + .parent() + .and_then(|parent| state.snapshot.entry_for_path(parent)) + .map_or(false, |parent_entry| parent_entry.is_hidden); + fs_entry.is_hidden = parent_is_hidden + || path.file_name().map_or(false, |name| is_path_hidden(name)); + if let (Some(scan_queue_tx), true) = (&scan_queue_tx, is_dir) { if state.should_scan_directory(&fs_entry) || (fs_entry.path.is_empty() && abs_path.file_name() == Some(OsStr::new(DOT_GIT))) { - state.enqueue_scan_dir( - abs_path, - &fs_entry, - scan_queue_tx, - self.fs.as_ref(), - ); + state + .enqueue_scan_dir( + abs_path, + &fs_entry, + scan_queue_tx, + self.fs.as_ref(), + ) + .await; } else { fs_entry.kind = EntryKind::UnloadedDir; } } - state.insert_entry(fs_entry.clone(), self.fs.as_ref(), self.watcher.as_ref()); + state + .insert_entry(fs_entry.clone(), self.fs.as_ref(), self.watcher.as_ref()) + .await; if path.is_empty() && let Some((ignores, repo)) = new_ancestor_repo.take() @@ -4415,6 +4492,7 @@ impl BackgroundScanner { self.fs.as_ref(), self.watcher.as_ref(), ) + .await .log_err(); } } @@ -4453,11 +4531,11 @@ impl BackgroundScanner { &self, scan_job_tx: Sender, prev_snapshot: LocalSnapshot, - mut ignores_to_update: impl Iterator, IgnoreStack)>, + ignores_to_update: Vec<(Arc, IgnoreStack)>, ) { let (ignore_queue_tx, ignore_queue_rx) = channel::unbounded(); { - while let Some((parent_abs_path, ignore_stack)) = ignores_to_update.next() { + for (parent_abs_path, ignore_stack) in ignores_to_update { ignore_queue_tx .send_blocking(UpdateIgnoreStatusJob { abs_path: parent_abs_path, @@ -4498,11 +4576,11 @@ impl BackgroundScanner { .await; } - fn ignores_needing_update(&self) -> Vec> { + async fn ignores_needing_update(&self) -> Vec> { let mut ignores_to_update = Vec::new(); { - let snapshot = &mut self.state.lock().snapshot; + let snapshot = &mut self.state.lock().await.snapshot; let abs_path = snapshot.abs_path.clone(); snapshot .ignores_by_parent_abs_path @@ -4530,26 +4608,27 @@ impl BackgroundScanner { ignores_to_update } - fn order_ignores( - &self, - mut ignores: Vec>, - ) -> impl use<> + Iterator, IgnoreStack)> { + async fn order_ignores(&self, mut ignores: Vec>) -> Vec<(Arc, IgnoreStack)> { let fs = self.fs.clone(); - let snapshot = self.state.lock().snapshot.clone(); + let snapshot = self.state.lock().await.snapshot.clone(); ignores.sort_unstable(); let mut ignores_to_update = ignores.into_iter().peekable(); - std::iter::from_fn(move || { - let parent_abs_path = ignores_to_update.next()?; + + let mut result = vec![]; + while let Some(parent_abs_path) = ignores_to_update.next() { while ignores_to_update .peek() .map_or(false, |p| p.starts_with(&parent_abs_path)) { ignores_to_update.next().unwrap(); } - let ignore_stack = - snapshot.ignore_stack_for_abs_path(&parent_abs_path, true, fs.as_ref()); - Some((parent_abs_path, ignore_stack)) - }) + let ignore_stack = snapshot + .ignore_stack_for_abs_path(&parent_abs_path, true, fs.as_ref()) + .await; + result.push((parent_abs_path, ignore_stack)); + } + + result } async fn update_ignore_status(&self, job: UpdateIgnoreStatusJob, snapshot: &LocalSnapshot) { @@ -4581,7 +4660,7 @@ impl BackgroundScanner { return; }; - if let Ok(Some(metadata)) = smol::block_on(self.fs.metadata(&job.abs_path.join(DOT_GIT))) + if let Ok(Some(metadata)) = self.fs.metadata(&job.abs_path.join(DOT_GIT)).await && metadata.is_dir { ignore_stack.repo_root = Some(job.abs_path.clone()); @@ -4601,14 +4680,16 @@ impl BackgroundScanner { // Scan any directories that were previously ignored and weren't previously scanned. if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() { - let state = self.state.lock(); + let state = self.state.lock().await; if state.should_scan_directory(&entry) { - state.enqueue_scan_dir( - abs_path.clone(), - &entry, - &job.scan_queue, - self.fs.as_ref(), - ); + state + .enqueue_scan_dir( + abs_path.clone(), + &entry, + &job.scan_queue, + self.fs.as_ref(), + ) + .await; } } @@ -4632,7 +4713,7 @@ impl BackgroundScanner { } } - let state = &mut self.state.lock(); + let state = &mut self.state.lock().await; for edit in &entries_by_path_edits { if let Edit::Insert(entry) = edit && let Err(ix) = state.changed_paths.binary_search(&entry.path) @@ -4648,9 +4729,9 @@ impl BackgroundScanner { state.snapshot.entries_by_id.edit(entries_by_id_edits, ()); } - fn update_git_repositories(&self, dot_git_paths: Vec) -> Vec> { + async fn update_git_repositories(&self, dot_git_paths: Vec) -> Vec> { log::trace!("reloading repositories: {dot_git_paths:?}"); - let mut state = self.state.lock(); + let mut state = self.state.lock().await; let scan_id = state.snapshot.scan_id; let mut affected_repo_roots = Vec::new(); for dot_git_dir in dot_git_paths { @@ -4680,13 +4761,15 @@ impl BackgroundScanner { return Vec::new(); }; affected_repo_roots.push(dot_git_dir.parent().unwrap().into()); - state.insert_git_repository( - RelPath::new(relative, PathStyle::local()) - .unwrap() - .into_arc(), - self.fs.as_ref(), - self.watcher.as_ref(), - ); + state + .insert_git_repository( + RelPath::new(relative, PathStyle::local()) + .unwrap() + .into_arc(), + self.fs.as_ref(), + self.watcher.as_ref(), + ) + .await; } Some(local_repository) => { state.snapshot.git_repositories.update( @@ -4714,7 +4797,7 @@ impl BackgroundScanner { if exists_in_snapshot || matches!( - smol::block_on(self.fs.metadata(&entry.common_dir_abs_path)), + self.fs.metadata(&entry.common_dir_abs_path).await, Ok(Some(_)) ) { @@ -4945,6 +5028,10 @@ fn char_bag_for_path(root_char_bag: CharBag, path: &RelPath) -> CharBag { result } +fn is_path_hidden(name: &str) -> bool { + name.starts_with('.') +} + #[derive(Debug)] struct ScanJob { abs_path: Arc, @@ -4953,6 +5040,7 @@ struct ScanJob { scan_queue: Sender, ancestor_inodes: TreeSet, is_external: bool, + is_hidden: bool, } struct UpdateIgnoreStatusJob { @@ -5374,6 +5462,7 @@ impl<'a> From<&'a Entry> for proto::Entry { inode: entry.inode, mtime: entry.mtime.map(|time| time.into()), is_ignored: entry.is_ignored, + is_hidden: entry.is_hidden, is_external: entry.is_external, is_fifo: entry.is_fifo, size: Some(entry.size), @@ -5412,6 +5501,7 @@ impl TryFrom<(&CharBag, &PathMatcher, proto::Entry)> for Entry { .canonical_path .map(|path_string| Arc::from(PathBuf::from(path_string))), is_ignored: entry.is_ignored, + is_hidden: entry.is_hidden, is_always_included, is_external: entry.is_external, is_private: false, @@ -5466,11 +5556,13 @@ fn parse_gitfile(content: &str) -> anyhow::Result<&Path> { Ok(Path::new(path.trim())) } -fn discover_git_paths(dot_git_abs_path: &Arc, fs: &dyn Fs) -> (Arc, Arc) { +async fn discover_git_paths(dot_git_abs_path: &Arc, fs: &dyn Fs) -> (Arc, Arc) { let mut repository_dir_abs_path = dot_git_abs_path.clone(); let mut common_dir_abs_path = dot_git_abs_path.clone(); - if let Some(path) = smol::block_on(fs.load(dot_git_abs_path)) + if let Some(path) = fs + .load(dot_git_abs_path) + .await .ok() .as_ref() .and_then(|contents| parse_gitfile(contents).log_err()) @@ -5479,17 +5571,19 @@ fn discover_git_paths(dot_git_abs_path: &Arc, fs: &dyn Fs) -> (Arc, .parent() .unwrap_or(Path::new("")) .join(path); - if let Some(path) = smol::block_on(fs.canonicalize(&path)).log_err() { + if let Some(path) = fs.canonicalize(&path).await.log_err() { repository_dir_abs_path = Path::new(&path).into(); common_dir_abs_path = repository_dir_abs_path.clone(); - if let Some(commondir_contents) = smol::block_on(fs.load(&path.join("commondir"))).ok() - && let Some(commondir_path) = - smol::block_on(fs.canonicalize(&path.join(commondir_contents.trim()))).log_err() + + if let Some(commondir_contents) = fs.load(&path.join("commondir")).await.ok() + && let Some(commondir_path) = fs + .canonicalize(&path.join(commondir_contents.trim())) + .await + .log_err() { common_dir_abs_path = commondir_path.as_path().into(); } } }; - (repository_dir_abs_path, common_dir_abs_path) } diff --git a/crates/worktree/src/worktree_settings.rs b/crates/worktree/src/worktree_settings.rs index a9fcbf0909617986dd2d1d816ed513dd281f2940..9eddef8eaf43cecca949ea6f595c75795698ab38 100644 --- a/crates/worktree/src/worktree_settings.rs +++ b/crates/worktree/src/worktree_settings.rs @@ -1,7 +1,7 @@ use std::path::Path; use anyhow::Context as _; -use settings::{Settings, SettingsContent}; +use settings::Settings; use util::{ ResultExt, paths::{PathMatcher, PathStyle}, @@ -50,7 +50,7 @@ impl Settings for WorktreeSettings { .collect(); Self { - project_name: worktree.project_name.filter(|p| !p.is_empty()), + project_name: worktree.project_name.into_inner(), file_scan_exclusions: path_matchers(file_scan_exclusions, "file_scan_exclusions") .log_err() .unwrap_or_default(), @@ -64,31 +64,6 @@ impl Settings for WorktreeSettings { .unwrap_or_default(), } } - - fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut SettingsContent) { - if let Some(inclusions) = vscode - .read_value("files.watcherInclude") - .and_then(|v| v.as_array()) - .and_then(|v| v.iter().map(|n| n.as_str().map(str::to_owned)).collect()) - { - if let Some(old) = current.project.worktree.file_scan_inclusions.as_mut() { - old.extend(inclusions) - } else { - current.project.worktree.file_scan_inclusions = Some(inclusions) - } - } - if let Some(exclusions) = vscode - .read_value("files.watcherExclude") - .and_then(|v| v.as_array()) - .and_then(|v| v.iter().map(|n| n.as_str().map(str::to_owned)).collect()) - { - if let Some(old) = current.project.worktree.file_scan_exclusions.as_mut() { - old.extend(exclusions) - } else { - current.project.worktree.file_scan_exclusions = Some(exclusions) - } - } - } } fn path_matchers(mut values: Vec, context: &'static str) -> anyhow::Result { diff --git a/crates/worktree/src/worktree_tests.rs b/crates/worktree/src/worktree_tests.rs index 3c39d5c3ad70563f8f954ee9908c27cef17a752c..d89e1ef4e4df7dbef3cf51789c1f1fc8a5309eb1 100644 --- a/crates/worktree/src/worktree_tests.rs +++ b/crates/worktree/src/worktree_tests.rs @@ -734,7 +734,6 @@ async fn test_write_file(cx: &mut TestAppContext) { }) .await .unwrap(); - worktree.read_with(cx, |tree, _| { let tracked = tree .entry_for_path(rel_path("tracked-dir/file.txt")) @@ -1537,7 +1536,7 @@ async fn test_random_worktree_operations_during_initial_scan( assert_eq!( updated_snapshot.entries(true, 0).collect::>(), final_snapshot.entries(true, 0).collect::>(), - "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}", + "wrong updates after snapshot {i}: {updates:#?}", ); } } diff --git a/crates/worktree_benchmarks/Cargo.toml b/crates/worktree_benchmarks/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..29681573adc9da43e579342194b971770f0a8743 --- /dev/null +++ b/crates/worktree_benchmarks/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "worktree_benchmarks" +version = "0.1.0" +publish.workspace = true +edition.workspace = true + +[dependencies] +fs.workspace = true +gpui = { workspace = true, features = ["windows-manifest"] } +settings.workspace = true +worktree.workspace = true + +[lints] +workspace = true diff --git a/crates/assistant_tool/LICENSE-GPL b/crates/worktree_benchmarks/LICENSE-GPL similarity index 100% rename from crates/assistant_tool/LICENSE-GPL rename to crates/worktree_benchmarks/LICENSE-GPL diff --git a/crates/worktree_benchmarks/src/main.rs b/crates/worktree_benchmarks/src/main.rs new file mode 100644 index 0000000000000000000000000000000000000000..ca86687aff2d4f5f060ca620205dba5d8da6a73a --- /dev/null +++ b/crates/worktree_benchmarks/src/main.rs @@ -0,0 +1,54 @@ +use std::{ + path::Path, + sync::{Arc, atomic::AtomicUsize}, +}; + +use fs::RealFs; +use gpui::Application; +use settings::Settings; +use worktree::{Worktree, WorktreeSettings}; + +fn main() { + let Some(worktree_root_path) = std::env::args().nth(1) else { + println!( + "Missing path to worktree root\nUsage: bench_background_scan PATH_TO_WORKTREE_ROOT" + ); + return; + }; + let app = Application::headless(); + + app.run(|cx| { + settings::init(cx); + WorktreeSettings::register(cx); + let fs = Arc::new(RealFs::new(None, cx.background_executor().clone())); + + cx.spawn(async move |cx| { + let worktree = Worktree::local( + Path::new(&worktree_root_path), + true, + fs, + Arc::new(AtomicUsize::new(0)), + cx, + ) + .await + .expect("Worktree initialization to succeed"); + let did_finish_scan = worktree + .update(cx, |this, _| this.as_local().unwrap().scan_complete()) + .unwrap(); + let start = std::time::Instant::now(); + did_finish_scan.await; + let elapsed = start.elapsed(); + let (files, directories) = worktree + .read_with(cx, |this, _| (this.file_count(), this.dir_count())) + .unwrap(); + println!( + "{:?} for {directories} directories and {files} files", + elapsed + ); + cx.update(|cx| { + cx.quit(); + }) + }) + .detach(); + }) +} diff --git a/crates/x_ai/Cargo.toml b/crates/x_ai/Cargo.toml index 7ca0ca09397111404a59dff85d1ccf0659c0ea45..8ff020df8c1ccaf284157d8b46ddaa0e678b3cd7 100644 --- a/crates/x_ai/Cargo.toml +++ b/crates/x_ai/Cargo.toml @@ -20,4 +20,3 @@ anyhow.workspace = true schemars = { workspace = true, optional = true } serde.workspace = true strum.workspace = true -workspace-hack.workspace = true diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index abaeb40fa6dc1b78c93f24af21a186f1ef0bb0c3..c84fa8261fe2efdc4c8c831fcd239514c2d16526 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition.workspace = true name = "zed" -version = "0.209.0" +version = "0.211.0" publish.workspace = true license = "GPL-3.0-or-later" authors = ["Zed Team "] @@ -21,13 +21,11 @@ path = "src/main.rs" [dependencies] acp_tools.workspace = true activity_indicator.workspace = true -agent.workspace = true agent_settings.workspace = true agent_ui.workspace = true anyhow.workspace = true askpass.workspace = true assets.workspace = true -assistant_tools.workspace = true audio.workspace = true auto_update.workspace = true auto_update_ui.workspace = true @@ -160,7 +158,6 @@ vim_mode_setting.workspace = true watch.workspace = true web_search.workspace = true web_search_providers.workspace = true -workspace-hack.workspace = true workspace.workspace = true zed_actions.workspace = true zed_env_vars.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index cc05cdfd822bd41135034dbaa3c174fd0af667cb..93feb4a71d18164501955b46187a14d6757d861e 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -582,7 +582,6 @@ pub fn main() { false, cx, ); - assistant_tools::init(app_state.client.http_client(), cx); repl::init(app_state.fs.clone(), cx); recent_projects::init(cx); @@ -848,6 +847,18 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut .detach(); }); } + OpenRequestKind::Setting { setting_path } => { + // zed://settings/languages/$(language)/tab_size - DONT SUPPORT + // zed://settings/languages/Rust/tab_size - SUPPORT + // languages.$(language).tab_size + // [ languages $(language) tab_size] + workspace::with_active_or_new_workspace(cx, |_workspace, window, cx| { + window.dispatch_action( + Box::new(zed_actions::OpenSettingsAt { path: setting_path }), + cx, + ); + }); + } } return; diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 58fb18ebd2b94c0f68cbe7e0de3128d5acedf491..12a1bc8d50b2916658877165c2eedef9c8ce235c 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -18,13 +18,13 @@ use breadcrumbs::Breadcrumbs; use client::zed_urls; use collections::VecDeque; use debugger_ui::debugger_panel::DebugPanel; -use editor::ProposedChangesEditorToolbar; use editor::{Editor, MultiBuffer}; use extension_host::ExtensionStore; use feature_flags::{FeatureFlagAppExt, PanicFeatureFlag}; use fs::Fs; use futures::future::Either; use futures::{StreamExt, channel::mpsc, select_biased}; +use git_ui::commit_view::CommitViewToolbar; use git_ui::git_panel::GitPanel; use git_ui::project_diff::ProjectDiffToolbar; use gpui::{ @@ -70,10 +70,7 @@ use std::{ sync::atomic::{self, AtomicBool}, }; use terminal_view::terminal_panel::{self, TerminalPanel}; -use theme::{ - ActiveTheme, GlobalTheme, IconThemeNotFoundError, SystemAppearance, ThemeNotFoundError, - ThemeRegistry, ThemeSettings, -}; +use theme::{ActiveTheme, GlobalTheme, SystemAppearance, ThemeRegistry, ThemeSettings}; use ui::{PopoverMenuHandle, prelude::*}; use util::markdown::MarkdownString; use util::rel_path::RelPath; @@ -180,6 +177,9 @@ pub fn init(cx: &mut App) { open_log_file(workspace, window, cx); }); }); + cx.on_action(|_: &workspace::RevealLogInFileManager, cx| { + cx.reveal_path(paths::log_file().as_path()); + }); cx.on_action(|_: &zed_actions::OpenLicenses, cx| { with_active_or_new_workspace(cx, |workspace, window, cx| { open_bundled_file( @@ -419,6 +419,8 @@ pub fn initialize_workspace( let cursor_position = cx.new(|_| go_to_line::cursor_position::CursorPosition::new(workspace)); + let line_ending_indicator = + cx.new(|_| line_ending_selector::LineEndingIndicator::default()); workspace.status_bar().update(cx, |status_bar, cx| { status_bar.add_left_item(search_button, window, cx); status_bar.add_left_item(lsp_button, window, cx); @@ -427,6 +429,7 @@ pub fn initialize_workspace( status_bar.add_right_item(edit_prediction_button, window, cx); status_bar.add_right_item(active_buffer_language, window, cx); status_bar.add_right_item(active_toolchain_language, window, cx); + status_bar.add_right_item(line_ending_indicator, window, cx); status_bar.add_right_item(vim_mode_indicator, window, cx); status_bar.add_right_item(cursor_position, window, cx); status_bar.add_right_item(image_info, window, cx); @@ -867,6 +870,24 @@ fn register_actions( } } }) + .register_action({ + let fs = app_state.fs.clone(); + move |_, action: &zed_actions::ResetAllZoom, _window, cx| { + if action.persist { + update_settings_file(fs.clone(), cx, move |settings, _| { + settings.theme.ui_font_size = None; + settings.theme.buffer_font_size = None; + settings.theme.agent_ui_font_size = None; + settings.theme.agent_buffer_font_size = None; + }); + } else { + theme::reset_ui_font_size(cx); + theme::reset_buffer_font_size(cx); + theme::reset_agent_ui_font_size(cx); + theme::reset_agent_buffer_font_size(cx); + } + } + }) .register_action(|_, _: &install_cli::RegisterZedScheme, window, cx| { cx.spawn_in(window, async move |workspace, cx| { install_cli::register_zed_scheme(cx).await?; @@ -1031,8 +1052,6 @@ fn initialize_pane( ) }); toolbar.add_item(buffer_search_bar.clone(), window, cx); - let proposed_change_bar = cx.new(|_| ProposedChangesEditorToolbar::new()); - toolbar.add_item(proposed_change_bar, window, cx); let quick_action_bar = cx.new(|cx| QuickActionBar::new(buffer_search_bar, workspace, cx)); toolbar.add_item(quick_action_bar, window, cx); @@ -1044,12 +1063,16 @@ fn initialize_pane( toolbar.add_item(lsp_log_item, window, cx); let dap_log_item = cx.new(|_| debugger_tools::DapLogToolbarItemView::new()); toolbar.add_item(dap_log_item, window, cx); + let acp_tools_item = cx.new(|_| acp_tools::AcpToolsToolbarItemView::new()); + toolbar.add_item(acp_tools_item, window, cx); let syntax_tree_item = cx.new(|_| language_tools::SyntaxTreeToolbarItemView::new()); toolbar.add_item(syntax_tree_item, window, cx); let migration_banner = cx.new(|cx| MigrationBanner::new(workspace, cx)); toolbar.add_item(migration_banner, window, cx); let project_diff_toolbar = cx.new(|cx| ProjectDiffToolbar::new(workspace, cx)); toolbar.add_item(project_diff_toolbar, window, cx); + let commit_view_toolbar = cx.new(|cx| CommitViewToolbar::new(workspace, cx)); + toolbar.add_item(commit_view_toolbar, window, cx); let agent_diff_toolbar = cx.new(AgentDiffToolbar::new); toolbar.add_item(agent_diff_toolbar, window, cx); let basedpyright_banner = cx.new(|cx| BasedPyrightBanner::new(workspace, cx)); @@ -2032,41 +2055,88 @@ pub(crate) fn eager_load_active_theme_and_icon_theme(fs: Arc, cx: &mut A let theme_settings = ThemeSettings::get_global(cx); let appearance = SystemAppearance::global(cx).0; - let theme_name = theme_settings.theme.name(appearance); - if matches!( - theme_registry.get(&theme_name.0), - Err(ThemeNotFoundError(_)) - ) && let Some(theme_path) = extension_store - .read(cx) - .path_to_extension_theme(&theme_name.0) - { - if cx - .background_executor() - .block(theme_registry.load_user_theme(&theme_path, fs.clone())) - .log_err() - .is_some() - { - GlobalTheme::reload_theme(cx); - } + enum LoadTarget { + Theme(PathBuf), + IconTheme((PathBuf, PathBuf)), } - let theme_settings = ThemeSettings::get_global(cx); + let theme_name = theme_settings.theme.name(appearance); let icon_theme_name = theme_settings.icon_theme.name(appearance); - if matches!( - theme_registry.get_icon_theme(&icon_theme_name.0), - Err(IconThemeNotFoundError(_)) - ) && let Some((icon_theme_path, icons_root_path)) = extension_store - .read(cx) - .path_to_extension_icon_theme(&icon_theme_name.0) - { - if cx - .background_executor() - .block(theme_registry.load_icon_theme(&icon_theme_path, &icons_root_path, fs)) - .log_err() - .is_some() - { - GlobalTheme::reload_icon_theme(cx); + let themes_to_load = [ + theme_registry + .get(&theme_name.0) + .is_err() + .then(|| { + extension_store + .read(cx) + .path_to_extension_theme(&theme_name.0) + }) + .flatten() + .map(LoadTarget::Theme), + theme_registry + .get_icon_theme(&icon_theme_name.0) + .is_err() + .then(|| { + extension_store + .read(cx) + .path_to_extension_icon_theme(&icon_theme_name.0) + }) + .flatten() + .map(LoadTarget::IconTheme), + ]; + + enum ReloadTarget { + Theme, + IconTheme, + } + + let executor = cx.background_executor(); + let reload_tasks = parking_lot::Mutex::new(Vec::with_capacity(themes_to_load.len())); + + let mut themes_to_load = themes_to_load.into_iter().flatten().peekable(); + + if themes_to_load.peek().is_none() { + return; + } + + executor.block(executor.scoped(|scope| { + for load_target in themes_to_load { + let theme_registry = &theme_registry; + let reload_tasks = &reload_tasks; + let fs = fs.clone(); + + scope.spawn(async { + match load_target { + LoadTarget::Theme(theme_path) => { + if theme_registry + .load_user_theme(&theme_path, fs) + .await + .log_err() + .is_some() + { + reload_tasks.lock().push(ReloadTarget::Theme); + } + } + LoadTarget::IconTheme((icon_theme_path, icons_root_path)) => { + if theme_registry + .load_icon_theme(&icon_theme_path, &icons_root_path, fs) + .await + .log_err() + .is_some() + { + reload_tasks.lock().push(ReloadTarget::IconTheme); + } + } + } + }); } + })); + + for reload_target in reload_tasks.into_inner() { + match reload_target { + ReloadTarget::Theme => GlobalTheme::reload_theme(cx), + ReloadTarget::IconTheme => GlobalTheme::reload_icon_theme(cx), + }; } } @@ -4517,6 +4587,7 @@ mod tests { | "workspace::ActivatePane" | "workspace::MoveItemToPane" | "workspace::MoveItemToPaneInDirection" + | "workspace::NewFileSplit" | "workspace::OpenTerminal" | "workspace::SendKeystrokes" | "agent::NewNativeAgentThreadFromSummary" @@ -4619,7 +4690,7 @@ mod tests { "keymap_editor", "keystroke_input", "language_selector", - "line_ending", + "line_ending_selector", "lsp_tool", "markdown", "menu", diff --git a/crates/zed/src/zed/app_menus.rs b/crates/zed/src/zed/app_menus.rs index cd18503f61be4a712caf5e4399f794b12c6bf889..ac22f972368f61fa518ac74a5ac23e593433c75b 100644 --- a/crates/zed/src/zed/app_menus.rs +++ b/crates/zed/src/zed/app_menus.rs @@ -2,7 +2,7 @@ use collab_ui::collab_panel; use gpui::{App, Menu, MenuItem, OsAction}; use release_channel::ReleaseChannel; use terminal_view::terminal_panel; -use zed_actions::{ToggleFocus as ToggleDebugPanel, dev}; +use zed_actions::{ToggleFocus as ToggleDebugPanel, agent::AddSelectionToThread, dev}; pub fn app_menus(cx: &mut App) -> Vec { use zed_actions::Quit; @@ -20,6 +20,10 @@ pub fn app_menus(cx: &mut App) -> Vec { "Reset Zoom", zed_actions::ResetBufferFontSize { persist: false }, ), + MenuItem::action( + "Reset All Zoom", + zed_actions::ResetAllZoom { persist: false }, + ), MenuItem::separator(), MenuItem::action("Toggle Left Dock", workspace::ToggleLeftDock), MenuItem::action("Toggle Right Dock", workspace::ToggleRightDock), @@ -185,8 +189,18 @@ pub fn app_menus(cx: &mut App) -> Vec { editor::actions::SelectPreviousSyntaxNode, ), MenuItem::separator(), - MenuItem::action("Add Cursor Above", editor::actions::AddSelectionAbove), - MenuItem::action("Add Cursor Below", editor::actions::AddSelectionBelow), + MenuItem::action( + "Add Cursor Above", + editor::actions::AddSelectionAbove { + skip_soft_wrap: true, + }, + ), + MenuItem::action( + "Add Cursor Below", + editor::actions::AddSelectionBelow { + skip_soft_wrap: true, + }, + ), MenuItem::action( "Select Next Occurrence", editor::actions::SelectNext { @@ -204,6 +218,8 @@ pub fn app_menus(cx: &mut App) -> Vec { MenuItem::action("Move Line Up", editor::actions::MoveLineUp), MenuItem::action("Move Line Down", editor::actions::MoveLineDown), MenuItem::action("Duplicate Selection", editor::actions::DuplicateLineDown), + MenuItem::separator(), + MenuItem::action("Add to Agent Thread", AddSelectionToThread), ], }, Menu { diff --git a/crates/zed/src/zed/component_preview.rs b/crates/zed/src/zed/component_preview.rs index 7a287cf3d83f24e7f4d42221bda420053a975860..78f8755319a87a6ada1357b8179f5b91532d37f8 100644 --- a/crates/zed/src/zed/component_preview.rs +++ b/crates/zed/src/zed/component_preview.rs @@ -17,7 +17,7 @@ use persistence::COMPONENT_PREVIEW_DB; use project::Project; use std::{iter::Iterator, ops::Range, sync::Arc}; use ui::{ButtonLike, Divider, HighlightedLabel, ListItem, ListSubHeader, Tooltip, prelude::*}; -use ui_input::SingleLineInput; +use ui_input::InputField; use workspace::{ AppState, Item, ItemId, SerializableItem, Workspace, WorkspaceId, delete_unloaded_items, item::ItemEvent, @@ -99,7 +99,7 @@ struct ComponentPreview { component_map: HashMap, components: Vec, cursor_index: usize, - filter_editor: Entity, + filter_editor: Entity, filter_text: String, focus_handle: FocusHandle, language_registry: Arc, @@ -126,8 +126,7 @@ impl ComponentPreview { let sorted_components = component_registry.sorted_components(); let selected_index = selected_index.into().unwrap_or(0); let active_page = active_page.unwrap_or(PreviewPage::AllComponents); - let filter_editor = - cx.new(|cx| SingleLineInput::new(window, cx, "Find components or usages…")); + let filter_editor = cx.new(|cx| InputField::new(window, cx, "Find components or usages…")); let component_list = ListState::new( sorted_components.len(), diff --git a/crates/zed/src/zed/edit_prediction_registry.rs b/crates/zed/src/zed/edit_prediction_registry.rs index a9bd0395347dadcb9caa706fcbcc81f58d6af944..fd16478b5a7ade4b8ef86924d2ce737cb2f62c56 100644 --- a/crates/zed/src/zed/edit_prediction_registry.rs +++ b/crates/zed/src/zed/edit_prediction_registry.rs @@ -3,6 +3,7 @@ use codestral::CodestralCompletionProvider; use collections::HashMap; use copilot::{Copilot, CopilotCompletionProvider}; use editor::Editor; +use feature_flags::FeatureFlagAppExt; use gpui::{AnyWindowHandle, App, AppContext as _, Context, Entity, WeakEntity}; use language::language_settings::{EditPredictionProvider, all_language_settings}; use language_models::MistralLanguageModelProvider; @@ -11,6 +12,7 @@ use std::{cell::RefCell, rc::Rc, sync::Arc}; use supermaven::{Supermaven, SupermavenCompletionProvider}; use ui::Window; use zeta::ZetaEditPredictionProvider; +use zeta2::Zeta2FeatureFlag; pub fn init(client: Arc, user_store: Entity, cx: &mut App) { let editors: Rc, AnyWindowHandle>>> = Rc::default(); @@ -217,7 +219,7 @@ fn assign_edit_prediction_provider( } if let Some(project) = editor.project() { - if std::env::var("ZED_ZETA2").is_ok() { + if cx.has_flag::() { let zeta = zeta2::Zeta::global(client, &user_store, cx); let provider = cx.new(|cx| { zeta2::ZetaEditPredictionProvider::new( diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index a8a998b6580269de150280c432c329cf59c30c22..618849b3474e60f8a3737facf7c502f6e5f1cf52 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -47,6 +47,7 @@ pub enum OpenRequestKind { AgentPanel, DockMenuAction { index: usize }, BuiltinJsonSchema { schema_path: String }, + Setting { setting_path: String }, } impl OpenRequest { @@ -93,6 +94,10 @@ impl OpenRequest { this.kind = Some(OpenRequestKind::BuiltinJsonSchema { schema_path: schema_path.to_string(), }); + } else if let Some(setting_path) = url.strip_prefix("zed://settings/") { + this.kind = Some(OpenRequestKind::Setting { + setting_path: setting_path.to_string(), + }); } else if url.starts_with("ssh://") { this.parse_ssh_file_path(&url, cx)? } else if let Some(request_path) = parse_zed_link(&url, cx) { @@ -328,6 +333,7 @@ pub async fn handle_cli_connection( wait, wsl, open_new_workspace, + reuse, env, user_data_dir: _, } => { @@ -363,6 +369,7 @@ pub async fn handle_cli_connection( paths, diff_paths, open_new_workspace, + reuse, &responses, wait, app_state.clone(), @@ -382,6 +389,7 @@ async fn open_workspaces( paths: Vec, diff_paths: Vec<[String; 2]>, open_new_workspace: Option, + reuse: bool, responses: &IpcSender, wait: bool, app_state: Arc, @@ -441,6 +449,7 @@ async fn open_workspaces( workspace_paths, diff_paths.clone(), open_new_workspace, + reuse, wait, responses, env.as_ref(), @@ -487,6 +496,7 @@ async fn open_local_workspace( workspace_paths: Vec, diff_paths: Vec<[String; 2]>, open_new_workspace: Option, + reuse: bool, wait: bool, responses: &IpcSender, env: Option<&HashMap>, @@ -497,12 +507,30 @@ async fn open_local_workspace( let paths_with_position = derive_paths_with_position(app_state.fs.as_ref(), workspace_paths).await; + + // Handle reuse flag by finding existing window to replace + let replace_window = if reuse { + cx.update(|cx| workspace::local_workspace_windows(cx).into_iter().next()) + .ok() + .flatten() + } else { + None + }; + + // For reuse, force new workspace creation but with replace_window set + let effective_open_new_workspace = if reuse { + Some(true) + } else { + open_new_workspace + }; + match open_paths_with_positions( &paths_with_position, &diff_paths, app_state.clone(), workspace::OpenOptions { - open_new_workspace, + open_new_workspace: effective_open_new_workspace, + replace_window, env: env.cloned(), ..Default::default() }, @@ -614,7 +642,9 @@ mod tests { }; use editor::Editor; use gpui::TestAppContext; + use language::LineEnding; use remote::SshConnectionOptions; + use rope::Rope; use serde_json::json; use std::sync::Arc; use util::path; @@ -780,6 +810,7 @@ mod tests { vec![], open_new_workspace, false, + false, &response_tx, None, &app_state, @@ -791,4 +822,102 @@ mod tests { assert!(!errored); } + + #[gpui::test] + async fn test_reuse_flag_functionality(cx: &mut TestAppContext) { + let app_state = init_test(cx); + + let root_dir = if cfg!(windows) { "C:\\root" } else { "/root" }; + let file1_path = if cfg!(windows) { + "C:\\root\\file1.txt" + } else { + "/root/file1.txt" + }; + let file2_path = if cfg!(windows) { + "C:\\root\\file2.txt" + } else { + "/root/file2.txt" + }; + + app_state.fs.create_dir(Path::new(root_dir)).await.unwrap(); + app_state + .fs + .create_file(Path::new(file1_path), Default::default()) + .await + .unwrap(); + app_state + .fs + .save( + Path::new(file1_path), + &Rope::from("content1"), + LineEnding::Unix, + ) + .await + .unwrap(); + app_state + .fs + .create_file(Path::new(file2_path), Default::default()) + .await + .unwrap(); + app_state + .fs + .save( + Path::new(file2_path), + &Rope::from("content2"), + LineEnding::Unix, + ) + .await + .unwrap(); + + // First, open a workspace normally + let (response_tx, _response_rx) = ipc::channel::().unwrap(); + let workspace_paths = vec![file1_path.to_string()]; + + let _errored = cx + .spawn({ + let app_state = app_state.clone(); + let response_tx = response_tx.clone(); + |mut cx| async move { + open_local_workspace( + workspace_paths, + vec![], + None, + false, + false, + &response_tx, + None, + &app_state, + &mut cx, + ) + .await + } + }) + .await; + + // Now test the reuse functionality - should replace the existing workspace + let workspace_paths_reuse = vec![file1_path.to_string()]; + + let errored_reuse = cx + .spawn({ + let app_state = app_state.clone(); + let response_tx = response_tx.clone(); + |mut cx| async move { + open_local_workspace( + workspace_paths_reuse, + vec![], + None, // open_new_workspace will be overridden by reuse logic + true, // reuse = true + false, + &response_tx, + None, + &app_state, + &mut cx, + ) + .await + } + }) + .await; + + assert!(!errored_reuse); + } } diff --git a/crates/zed/src/zed/quick_action_bar.rs b/crates/zed/src/zed/quick_action_bar.rs index c721e1e8b6e7c8d1a3caf5c9a57e0159a5a3c031..a25074d46f356bbea5de986055b93557e73a8383 100644 --- a/crates/zed/src/zed/quick_action_bar.rs +++ b/crates/zed/src/zed/quick_action_bar.rs @@ -266,8 +266,18 @@ impl Render for QuickActionBar { ) .action("Expand Selection", Box::new(SelectLargerSyntaxNode)) .action("Shrink Selection", Box::new(SelectSmallerSyntaxNode)) - .action("Add Cursor Above", Box::new(AddSelectionAbove)) - .action("Add Cursor Below", Box::new(AddSelectionBelow)) + .action( + "Add Cursor Above", + Box::new(AddSelectionAbove { + skip_soft_wrap: true, + }), + ) + .action( + "Add Cursor Below", + Box::new(AddSelectionBelow { + skip_soft_wrap: true, + }), + ) .separator() .action("Go to Symbol", Box::new(ToggleOutline)) .action("Go to Line/Column", Box::new(ToggleGoToLine)) @@ -645,8 +655,8 @@ impl RenderOnce for QuickActionBarButton { .icon_size(IconSize::Small) .style(ButtonStyle::Subtle) .toggle_state(self.toggled) - .tooltip(move |window, cx| { - Tooltip::for_action_in(tooltip.clone(), &*action, &self.focus_handle, window, cx) + .tooltip(move |_window, cx| { + Tooltip::for_action_in(tooltip.clone(), &*action, &self.focus_handle, cx) }) .on_click(move |event, window, cx| (self.on_click)(event, window, cx)) } diff --git a/crates/zed/src/zed/quick_action_bar/preview.rs b/crates/zed/src/zed/quick_action_bar/preview.rs index fb5a75f78d834ab3943e9dfd87cc7744fc453fcd..630d243cf6971ecebda694091acbfd5ba4c049e4 100644 --- a/crates/zed/src/zed/quick_action_bar/preview.rs +++ b/crates/zed/src/zed/quick_action_bar/preview.rs @@ -68,7 +68,7 @@ impl QuickActionBar { let button = IconButton::new(button_id, IconName::Eye) .icon_size(IconSize::Small) .style(ButtonStyle::Subtle) - .tooltip(move |window, cx| { + .tooltip(move |_window, cx| { Tooltip::with_meta( tooltip_text, Some(open_action_for_tooltip), @@ -76,7 +76,6 @@ impl QuickActionBar { "{} to open in a split", text_for_keystroke(&alt_click.modifiers, &alt_click.key, cx) ), - window, cx, ) }) diff --git a/crates/zed/src/zed/quick_action_bar/repl_menu.rs b/crates/zed/src/zed/quick_action_bar/repl_menu.rs index 82eb82de1e2807346eb3ade2ced8a7946413f0a4..5210bb718c0663d2c256f865f0fcabf41bd5708f 100644 --- a/crates/zed/src/zed/quick_action_bar/repl_menu.rs +++ b/crates/zed/src/zed/quick_action_bar/repl_menu.rs @@ -54,7 +54,8 @@ impl QuickActionBar { .count() .ne(&0) .then(|| { - let latest = this.selections.newest_display(cx); + let snapshot = this.display_snapshot(cx); + let latest = this.selections.newest_display(&snapshot); !latest.is_empty() }) .unwrap_or_default() diff --git a/crates/zed/src/zed/windows_only_instance.rs b/crates/zed/src/zed/windows_only_instance.rs index 45f3cd158bb38156a0981f01e5331dc0aead91c9..f3eab154415814d60e2b06f5823d47006b1c367c 100644 --- a/crates/zed/src/zed/windows_only_instance.rs +++ b/crates/zed/src/zed/windows_only_instance.rs @@ -158,6 +158,7 @@ fn send_args_to_instance(args: &Args) -> anyhow::Result<()> { wait: false, wsl: args.wsl.clone(), open_new_workspace: None, + reuse: false, env: None, user_data_dir: args.user_data_dir.clone(), } diff --git a/crates/zed_actions/Cargo.toml b/crates/zed_actions/Cargo.toml index 3778d19621dc287d96adcf86239674f8d907d8ee..1a140c483fff56b8b045b381adc67cbce778be39 100644 --- a/crates/zed_actions/Cargo.toml +++ b/crates/zed_actions/Cargo.toml @@ -12,5 +12,4 @@ workspace = true gpui.workspace = true schemars.workspace = true serde.workspace = true -workspace-hack.workspace = true uuid.workspace = true diff --git a/crates/zed_actions/src/lib.rs b/crates/zed_actions/src/lib.rs index d62de329c9af63ab8c15e1703b2517ac12594195..3fc9903cdc99a8bf5fdb7c14619e3ce963b8fc46 100644 --- a/crates/zed_actions/src/lib.rs +++ b/crates/zed_actions/src/lib.rs @@ -30,12 +30,12 @@ pub struct OpenZedUrl { actions!( zed, [ + /// Opens the settings editor. #[action(deprecated_aliases = ["zed_actions::OpenSettingsEditor"])] OpenSettings, /// Opens the settings JSON file. #[action(deprecated_aliases = ["zed_actions::OpenSettings"])] OpenSettingsFile, - /// Opens the settings editor. /// Opens the default keymap file. OpenDefaultKeymap, /// Opens the user keymap file. @@ -70,6 +70,7 @@ pub enum ExtensionCategoryFilter { Grammars, LanguageServers, ContextServers, + AgentServers, SlashCommands, IndexedDocsProviders, Snippets, @@ -107,6 +108,16 @@ pub struct IncreaseBufferFontSize { pub persist: bool, } +/// Increases the font size in the editor buffer. +#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)] +#[action(namespace = zed)] +#[serde(deny_unknown_fields)] +pub struct OpenSettingsAt { + /// A path to a specific setting (e.g. `theme.mode`) + #[serde(default)] + pub path: String, +} + /// Resets the buffer font size to the default value. #[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)] #[action(namespace = zed)] @@ -143,6 +154,15 @@ pub struct ResetUiFontSize { pub persist: bool, } +/// Resets all zoom levels (UI and buffer font sizes, including in the agent panel) to their default values. +#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)] +#[action(namespace = zed)] +#[serde(deny_unknown_fields)] +pub struct ResetAllZoom { + #[serde(default)] + pub persist: bool, +} + pub mod dev { use gpui::actions; @@ -296,7 +316,12 @@ pub mod agent { #[action(deprecated_aliases = ["assistant::ToggleModelSelector", "assistant2::ToggleModelSelector"])] ToggleModelSelector, /// Triggers re-authentication on Gemini - ReauthenticateAgent + ReauthenticateAgent, + /// Add the current selection as context for threads in the agent panel. + #[action(deprecated_aliases = ["assistant::QuoteSelection", "agent::QuoteSelection"])] + AddSelectionToThread, + /// Resets the agent panel zoom levels (agent UI and buffer font sizes). + ResetAgentZoom, ] ); } diff --git a/crates/zed_env_vars/Cargo.toml b/crates/zed_env_vars/Cargo.toml index f56e3dd529cc7a8001d0021e96902f55034f88e2..1cf32174c351c28ec7eb16deab7b7986655d4a48 100644 --- a/crates/zed_env_vars/Cargo.toml +++ b/crates/zed_env_vars/Cargo.toml @@ -15,5 +15,4 @@ path = "src/zed_env_vars.rs" default = [] [dependencies] -workspace-hack.workspace = true gpui.workspace = true diff --git a/crates/zeta/Cargo.toml b/crates/zeta/Cargo.toml index 09bcfa7f542ce9c01802c9cebc11dfc9a8da2542..821d3e0b9e7a5ff37302cf613f4e09b047f121f1 100644 --- a/crates/zeta/Cargo.toml +++ b/crates/zeta/Cargo.toml @@ -55,7 +55,6 @@ thiserror.workspace = true ui.workspace = true util.workspace = true uuid.workspace = true -workspace-hack.workspace = true workspace.workspace = true worktree.workspace = true zed_actions.workspace = true diff --git a/crates/zeta/src/rate_completion_modal.rs b/crates/zeta/src/rate_completion_modal.rs index 8028865b057f0c6c3b49efc3a5c3c640208e65aa..cc1787ab01c6dd8f6429c3ac821a485355629462 100644 --- a/crates/zeta/src/rate_completion_modal.rs +++ b/crates/zeta/src/rate_completion_modal.rs @@ -382,11 +382,7 @@ impl RateCompletionModal { ) } - fn render_active_completion( - &mut self, - window: &mut Window, - cx: &mut Context, - ) -> Option { + fn render_active_completion(&mut self, cx: &mut Context) -> Option { let active_completion = self.active_completion.as_ref()?; let completion_id = active_completion.completion.id; let focus_handle = &self.focus_handle(cx); @@ -500,7 +496,6 @@ impl RateCompletionModal { .key_binding(KeyBinding::for_action_in( &ThumbsDownActiveCompletion, focus_handle, - window, cx )) .on_click(cx.listener(move |this, _, window, cx| { @@ -521,7 +516,6 @@ impl RateCompletionModal { .key_binding(KeyBinding::for_action_in( &ThumbsUpActiveCompletion, focus_handle, - window, cx )) .on_click(cx.listener(move |this, _, window, cx| { @@ -658,7 +652,7 @@ impl Render for RateCompletionModal { ) ), ) - .children(self.render_active_completion(window, cx)) + .children(self.render_active_completion( cx)) .on_mouse_down_out(cx.listener(|_, _, _, cx| cx.emit(DismissEvent))) } } diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index 1d48571d7b06f35d82934122919e75bbbd087ffa..454a1526a9e8c6a75d47bda875feb6843b454a0d 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -1581,7 +1581,7 @@ fn guess_token_count(bytes: usize) -> usize { #[cfg(test)] mod tests { use client::test::FakeServer; - use clock::FakeSystemClock; + use clock::{FakeSystemClock, ReplicaId}; use cloud_api_types::{CreateLlmTokenResponse, LlmToken}; use gpui::TestAppContext; use http_client::FakeHttpClient; @@ -1839,7 +1839,7 @@ mod tests { let buffer = cx.new(|_cx| { Buffer::remote( language::BufferId::new(1).unwrap(), - 1, + ReplicaId::new(1), language::Capability::ReadWrite, "fn main() {\n println!(\"Hello\");\n}", ) diff --git a/crates/zeta2/Cargo.toml b/crates/zeta2/Cargo.toml index bce7e5987ccec635b335110a3a38298040c68e72..7ca140fa353b6404e451fdb79cccfed982b64e27 100644 --- a/crates/zeta2/Cargo.toml +++ b/crates/zeta2/Cargo.toml @@ -20,6 +20,7 @@ cloud_llm_client.workspace = true cloud_zeta2_prompt.workspace = true edit_prediction.workspace = true edit_prediction_context.workspace = true +feature_flags.workspace = true futures.workspace = true gpui.workspace = true indoc.workspace = true @@ -28,11 +29,11 @@ language_model.workspace = true log.workspace = true project.workspace = true release_channel.workspace = true +serde.workspace = true serde_json.workspace = true thiserror.workspace = true util.workspace = true uuid.workspace = true -workspace-hack.workspace = true workspace.workspace = true worktree.workspace = true diff --git a/crates/zeta2/src/prediction.rs b/crates/zeta2/src/prediction.rs index d4832993b9ecd7c40f154f2ab696c66872073d5e..a0dcd83b88142a5746c0b3c7d82bc7a64965edab 100644 --- a/crates/zeta2/src/prediction.rs +++ b/crates/zeta2/src/prediction.rs @@ -13,6 +13,12 @@ use uuid::Uuid; #[derive(Copy, Clone, Default, Debug, PartialEq, Eq, Hash)] pub struct EditPredictionId(Uuid); +impl Into for EditPredictionId { + fn into(self) -> Uuid { + self.0 + } +} + impl From for gpui::ElementId { fn from(value: EditPredictionId) -> Self { gpui::ElementId::Uuid(value.0) @@ -33,7 +39,7 @@ pub struct EditPrediction { pub snapshot: BufferSnapshot, pub edit_preview: EditPreview, // We keep a reference to the buffer so that we do not need to reload it from disk when applying the prediction. - _buffer: Entity, + pub buffer: Entity, } impl EditPrediction { @@ -108,7 +114,7 @@ impl EditPrediction { edits, snapshot, edit_preview, - _buffer: buffer, + buffer, }) } @@ -184,6 +190,10 @@ pub fn interpolate_edits( if edits.is_empty() { None } else { Some(edits) } } +pub fn line_range_to_point_range(range: Range) -> Range { + language::Point::new(range.start.0, 0)..language::Point::new(range.end.0, 0) +} + fn edits_from_response( edits: &[predict_edits_v3::Edit], snapshot: &TextBufferSnapshot, @@ -191,12 +201,14 @@ fn edits_from_response( edits .iter() .flat_map(|edit| { - let old_text = snapshot.text_for_range(edit.range.clone()); + let point_range = line_range_to_point_range(edit.range.clone()); + let offset = point_range.to_offset(snapshot).start; + let old_text = snapshot.text_for_range(point_range); excerpt_edits_from_response( old_text.collect::>(), &edit.content, - edit.range.start, + offset, &snapshot, ) }) @@ -252,6 +264,7 @@ mod tests { use super::*; use cloud_llm_client::predict_edits_v3; + use edit_prediction_context::Line; use gpui::{App, Entity, TestAppContext, prelude::*}; use indoc::indoc; use language::{Buffer, ToOffset as _}; @@ -278,7 +291,7 @@ mod tests { // TODO cover more cases when multi-file is supported let big_edits = vec![predict_edits_v3::Edit { path: PathBuf::from("test.txt").into(), - range: 0..old.len(), + range: Line(0)..Line(old.lines().count() as u32), content: new.into(), }]; @@ -317,7 +330,7 @@ mod tests { edits, snapshot: cx.read(|cx| buffer.read(cx).snapshot()), path: Path::new("test.txt").into(), - _buffer: buffer.clone(), + buffer: buffer.clone(), edit_preview, }; diff --git a/crates/zeta2/src/provider.rs b/crates/zeta2/src/provider.rs index db637208aa88e8e3ebe4b30dc3d5639497cd0ac0..3c0dd75cc23a6a7b18a0fba19d0eab0a4833ba9c 100644 --- a/crates/zeta2/src/provider.rs +++ b/crates/zeta2/src/provider.rs @@ -179,8 +179,8 @@ impl EditPredictionProvider for ZetaEditPredictionProvider { } fn accept(&mut self, cx: &mut Context) { - self.zeta.update(cx, |zeta, _cx| { - zeta.accept_current_prediction(&self.project); + self.zeta.update(cx, |zeta, cx| { + zeta.accept_current_prediction(&self.project, cx); }); self.pending_predictions.clear(); } diff --git a/crates/zeta2/src/zeta2.rs b/crates/zeta2/src/zeta2.rs index 1e8c840da9903d07740dccd1d50864ffb027db08..a5150b5448c60956b8f21488ee05aa5a501fa517 100644 --- a/crates/zeta2/src/zeta2.rs +++ b/crates/zeta2/src/zeta2.rs @@ -3,7 +3,8 @@ use chrono::TimeDelta; use client::{Client, EditPredictionUsage, UserStore}; use cloud_llm_client::predict_edits_v3::{self, PromptFormat, Signature}; use cloud_llm_client::{ - EXPIRED_LLM_TOKEN_HEADER_NAME, MINIMUM_REQUIRED_VERSION_HEADER_NAME, ZED_VERSION_HEADER_NAME, + AcceptEditPredictionBody, EXPIRED_LLM_TOKEN_HEADER_NAME, MINIMUM_REQUIRED_VERSION_HEADER_NAME, + ZED_VERSION_HEADER_NAME, }; use cloud_zeta2_prompt::{DEFAULT_MAX_PROMPT_BYTES, PlannedPrompt}; use edit_prediction_context::{ @@ -11,18 +12,20 @@ use edit_prediction_context::{ EditPredictionExcerptOptions, EditPredictionScoreOptions, SimilarSnippetOptions, SyntaxIndex, SyntaxIndexState, }; +use feature_flags::{FeatureFlag, FeatureFlagAppExt as _}; use futures::AsyncReadExt as _; use futures::channel::{mpsc, oneshot}; -use gpui::http_client::Method; +use gpui::http_client::{AsyncBody, Method}; use gpui::{ App, Entity, EntityId, Global, SemanticVersion, SharedString, Subscription, Task, WeakEntity, http_client, prelude::*, }; +use language::BufferSnapshot; use language::{Buffer, DiagnosticSet, LanguageServerId, ToOffset as _, ToPoint}; -use language::{BufferSnapshot, TextBufferSnapshot}; use language_model::{LlmApiToken, RefreshLlmTokenListener}; use project::Project; use release_channel::AppVersion; +use serde::de::DeserializeOwned; use std::collections::{HashMap, VecDeque, hash_map}; use std::path::Path; use std::str::FromStr as _; @@ -30,7 +33,6 @@ use std::sync::Arc; use std::time::{Duration, Instant}; use thiserror::Error; use util::rel_path::RelPathBuf; -use util::some_or_debug_panic; use workspace::notifications::{ErrorMessagePrompt, NotificationId, show_app_notification}; mod prediction; @@ -46,6 +48,7 @@ const MAX_EVENT_COUNT: usize = 16; pub const DEFAULT_CONTEXT_OPTIONS: EditPredictionContextOptions = EditPredictionContextOptions { use_imports: true, + max_retrieved_declarations: 0, excerpt: EditPredictionExcerptOptions { max_bytes: 512, min_bytes: 128, @@ -65,6 +68,16 @@ pub const DEFAULT_OPTIONS: ZetaOptions = ZetaOptions { file_indexing_parallelism: 1, }; +pub struct Zeta2FeatureFlag; + +impl FeatureFlag for Zeta2FeatureFlag { + const NAME: &'static str = "zeta2"; + + fn enabled_for_staff() -> bool { + false + } +} + #[derive(Clone)] struct ZetaGlobal(Entity); @@ -91,12 +104,12 @@ pub struct ZetaOptions { } pub struct PredictionDebugInfo { - pub context: EditPredictionContext, + pub request: predict_edits_v3::PredictEditsRequest, pub retrieval_time: TimeDelta, pub buffer: WeakEntity, pub position: language::Anchor, pub local_prompt: Result, - pub response_rx: oneshot::Receiver>, + pub response_rx: oneshot::Receiver>, } pub type RequestDebugInfo = predict_edits_v3::DebugInfo; @@ -108,30 +121,40 @@ struct ZetaProject { current_prediction: Option, } -#[derive(Clone)] +#[derive(Debug, Clone)] struct CurrentEditPrediction { pub requested_by_buffer_id: EntityId, pub prediction: EditPrediction, } impl CurrentEditPrediction { - fn should_replace_prediction( - &self, - old_prediction: &Self, - snapshot: &TextBufferSnapshot, - ) -> bool { - if self.requested_by_buffer_id != old_prediction.requested_by_buffer_id { + fn should_replace_prediction(&self, old_prediction: &Self, cx: &App) -> bool { + let Some(new_edits) = self + .prediction + .interpolate(&self.prediction.buffer.read(cx)) + else { + return false; + }; + + if self.prediction.buffer != old_prediction.prediction.buffer { return true; } - let Some(old_edits) = old_prediction.prediction.interpolate(snapshot) else { + let Some(old_edits) = old_prediction + .prediction + .interpolate(&old_prediction.prediction.buffer.read(cx)) + else { return true; }; - let Some(new_edits) = self.prediction.interpolate(snapshot) else { - return false; - }; - if old_edits.len() == 1 && new_edits.len() == 1 { + // This reduces the occurrence of UI thrash from replacing edits + // + // TODO: This is fairly arbitrary - should have a more general heuristic that handles multiple edits. + if self.requested_by_buffer_id == self.prediction.buffer.entity_id() + && self.requested_by_buffer_id == old_prediction.prediction.buffer.entity_id() + && old_edits.len() == 1 + && new_edits.len() == 1 + { let (old_range, old_text) = &old_edits[0]; let (new_range, new_text) = &new_edits[0]; new_range == old_range && new_text.starts_with(old_text) @@ -382,11 +405,46 @@ impl Zeta { } } - fn accept_current_prediction(&mut self, project: &Entity) { - if let Some(project_state) = self.projects.get_mut(&project.entity_id()) { - project_state.current_prediction.take(); + fn accept_current_prediction(&mut self, project: &Entity, cx: &mut Context) { + let Some(project_state) = self.projects.get_mut(&project.entity_id()) else { + return; }; - // TODO report accepted + + let Some(prediction) = project_state.current_prediction.take() else { + return; + }; + let request_id = prediction.prediction.id.into(); + + let client = self.client.clone(); + let llm_token = self.llm_token.clone(); + let app_version = AppVersion::global(cx); + cx.spawn(async move |this, cx| { + let url = if let Ok(predict_edits_url) = std::env::var("ZED_ACCEPT_PREDICTION_URL") { + http_client::Url::parse(&predict_edits_url)? + } else { + client + .http_client() + .build_zed_llm_url("/predict_edits/accept", &[])? + }; + + let response = cx + .background_spawn(Self::send_api_request::<()>( + move |builder| { + let req = builder.uri(url.as_ref()).body( + serde_json::to_string(&AcceptEditPredictionBody { request_id })?.into(), + ); + Ok(req?) + }, + client, + llm_token, + app_version, + )) + .await; + + Self::handle_api_response(&this, response, cx)?; + anyhow::Ok(()) + }) + .detach_and_log_err(cx); } fn discard_current_prediction(&mut self, project: &Entity) { @@ -423,8 +481,7 @@ impl Zeta { .current_prediction .as_ref() .is_none_or(|old_prediction| { - new_prediction - .should_replace_prediction(&old_prediction, buffer.read(cx)) + new_prediction.should_replace_prediction(&old_prediction, cx) }) { project_state.current_prediction = Some(new_prediction); @@ -515,6 +572,9 @@ impl Zeta { if path.pop() { Some(path) } else { None } }); + // TODO data collection + let can_collect_data = cx.is_staff(); + let request_task = cx.background_spawn({ let snapshot = snapshot.clone(); let buffer = buffer.clone(); @@ -537,7 +597,7 @@ impl Zeta { &options.context, index_state.as_deref(), ) else { - return Ok(None); + return Ok((None, None)); }; let retrieval_time = chrono::Utc::now() - before_retrieval; @@ -550,25 +610,22 @@ impl Zeta { options.max_diagnostic_bytes, ); - let debug_context = debug_tx.map(|tx| (tx, context.clone())); - let request = make_cloud_request( excerpt_path, context, events, - // TODO data collection - false, + can_collect_data, diagnostic_groups, diagnostic_groups_truncated, None, - debug_context.is_some(), + debug_tx.is_some(), &worktree_snapshots, index_state.as_deref(), Some(options.max_prompt_bytes), options.prompt_format, ); - let debug_response_tx = if let Some((debug_tx, context)) = debug_context { + let debug_response_tx = if let Some(debug_tx) = &debug_tx { let (response_tx, response_rx) = oneshot::channel(); let local_prompt = PlannedPrompt::populate(&request) @@ -577,7 +634,7 @@ impl Zeta { debug_tx .unbounded_send(PredictionDebugInfo { - context, + request: request.clone(), retrieval_time, buffer: buffer.downgrade(), local_prompt, @@ -599,20 +656,21 @@ impl Zeta { anyhow::bail!("Skipping request because ZED_ZETA2_SKIP_REQUEST is set") } - let response = Self::perform_request(client, llm_token, app_version, request).await; + let response = + Self::send_prediction_request(client, llm_token, app_version, request).await; if let Some(debug_response_tx) = debug_response_tx { debug_response_tx - .send(response.as_ref().map_err(|err| err.to_string()).and_then( - |response| match some_or_debug_panic(response.0.debug_info.clone()) { - Some(debug_info) => Ok(debug_info), - None => Err("Missing debug info".to_string()), - }, - )) + .send( + response + .as_ref() + .map_err(|err| err.to_string()) + .map(|response| response.0.clone()), + ) .ok(); } - anyhow::Ok(Some(response?)) + response.map(|(res, usage)| (Some(res), usage)) } }); @@ -621,60 +679,18 @@ impl Zeta { cx.spawn({ let project = project.clone(); async move |this, cx| { - match request_task.await { - Ok(Some((response, usage))) => { - if let Some(usage) = usage { - this.update(cx, |this, cx| { - this.user_store.update(cx, |user_store, cx| { - user_store.update_edit_prediction_usage(usage, cx); - }); - }) - .ok(); - } - - let prediction = EditPrediction::from_response( - response, &snapshot, &buffer, &project, cx, - ) - .await; - - // TODO telemetry: duration, etc - Ok(prediction) - } - Ok(None) => Ok(None), - Err(err) => { - if err.is::() { - cx.update(|cx| { - this.update(cx, |this, _cx| { - this.update_required = true; - }) - .ok(); - - let error_message: SharedString = err.to_string().into(); - show_app_notification( - NotificationId::unique::(), - cx, - move |cx| { - cx.new(|cx| { - ErrorMessagePrompt::new(error_message.clone(), cx) - .with_link_button( - "Update Zed", - "https://zed.dev/releases", - ) - }) - }, - ); - }) - .ok(); - } + let Some(response) = Self::handle_api_response(&this, request_task.await, cx)? + else { + return Ok(None); + }; - Err(err) - } - } + // TODO telemetry: duration, etc + Ok(EditPrediction::from_response(response, &snapshot, &buffer, &project, cx).await) } }) } - async fn perform_request( + async fn send_prediction_request( client: Arc, llm_token: LlmApiToken, app_version: SemanticVersion, @@ -683,27 +699,94 @@ impl Zeta { predict_edits_v3::PredictEditsResponse, Option, )> { + let url = if let Ok(predict_edits_url) = std::env::var("ZED_PREDICT_EDITS_URL") { + http_client::Url::parse(&predict_edits_url)? + } else { + client + .http_client() + .build_zed_llm_url("/predict_edits/v3", &[])? + }; + + Self::send_api_request( + |builder| { + let req = builder + .uri(url.as_ref()) + .body(serde_json::to_string(&request)?.into()); + Ok(req?) + }, + client, + llm_token, + app_version, + ) + .await + } + + fn handle_api_response( + this: &WeakEntity, + response: Result<(T, Option)>, + cx: &mut gpui::AsyncApp, + ) -> Result { + match response { + Ok((data, usage)) => { + if let Some(usage) = usage { + this.update(cx, |this, cx| { + this.user_store.update(cx, |user_store, cx| { + user_store.update_edit_prediction_usage(usage, cx); + }); + }) + .ok(); + } + Ok(data) + } + Err(err) => { + if err.is::() { + cx.update(|cx| { + this.update(cx, |this, _cx| { + this.update_required = true; + }) + .ok(); + + let error_message: SharedString = err.to_string().into(); + show_app_notification( + NotificationId::unique::(), + cx, + move |cx| { + cx.new(|cx| { + ErrorMessagePrompt::new(error_message.clone(), cx) + .with_link_button("Update Zed", "https://zed.dev/releases") + }) + }, + ); + }) + .ok(); + } + Err(err) + } + } + } + + async fn send_api_request( + build: impl Fn(http_client::http::request::Builder) -> Result>, + client: Arc, + llm_token: LlmApiToken, + app_version: SemanticVersion, + ) -> Result<(Res, Option)> + where + Res: DeserializeOwned, + { let http_client = client.http_client(); let mut token = llm_token.acquire(&client).await?; let mut did_retry = false; loop { let request_builder = http_client::Request::builder().method(Method::POST); - let request_builder = - if let Ok(predict_edits_url) = std::env::var("ZED_PREDICT_EDITS_URL") { - request_builder.uri(predict_edits_url) - } else { - request_builder.uri( - http_client - .build_zed_llm_url("/predict_edits/v3", &[])? - .as_ref(), - ) - }; - let request = request_builder - .header("Content-Type", "application/json") - .header("Authorization", format!("Bearer {}", token)) - .header(ZED_VERSION_HEADER_NAME, app_version.to_string()) - .body(serde_json::to_string(&request)?.into())?; + + let request = build( + request_builder + .header("Content-Type", "application/json") + .header("Authorization", format!("Bearer {}", token)) + .header(ZED_VERSION_HEADER_NAME, app_version.to_string()), + )?; let mut response = http_client.send(request).await?; @@ -738,7 +821,7 @@ impl Zeta { let mut body = String::new(); response.body_mut().read_to_string(&mut body).await?; anyhow::bail!( - "error predicting edits.\nStatus: {:?}\nBody: {}", + "Request failed with status: {:?}\nBody: {}", response.status(), body ); @@ -928,7 +1011,7 @@ fn make_cloud_request( referenced_declarations.push(predict_edits_v3::ReferencedDeclaration { path: path.as_std_path().into(), text: text.into(), - range: snippet.declaration.item_range(), + range: snippet.declaration.item_line_range(), text_is_truncated, signature_range: snippet.declaration.signature_range_in_item_text(), parent_index, @@ -956,8 +1039,12 @@ fn make_cloud_request( predict_edits_v3::PredictEditsRequest { excerpt_path, excerpt: context.excerpt_text.body, + excerpt_line_range: context.excerpt.line_range, excerpt_range: context.excerpt.range, - cursor_offset: context.cursor_offset_in_excerpt, + cursor_point: predict_edits_v3::Point { + line: predict_edits_v3::Line(context.cursor_point.row), + column: context.cursor_point.column, + }, referenced_declarations, signatures, excerpt_parent, @@ -994,7 +1081,7 @@ fn add_signature( text: text.into(), text_is_truncated, parent_index, - range: parent_declaration.signature_range(), + range: parent_declaration.signature_line_range(), }); declaration_to_signature_index.insert(declaration_id, signature_index); Some(signature_index) @@ -1009,7 +1096,8 @@ mod tests { use client::UserStore; use clock::FakeSystemClock; - use cloud_llm_client::predict_edits_v3; + use cloud_llm_client::predict_edits_v3::{self, Point}; + use edit_prediction_context::Line; use futures::{ AsyncReadExt, StreamExt, channel::{mpsc, oneshot}, @@ -1069,7 +1157,7 @@ mod tests { request_id: Uuid::new_v4(), edits: vec![predict_edits_v3::Edit { path: Path::new(path!("root/1.txt")).into(), - range: 0..snapshot1.len(), + range: Line(0)..Line(snapshot1.max_point().row + 1), content: "Hello!\nHow are you?\nBye".into(), }], debug_info: None, @@ -1085,7 +1173,6 @@ mod tests { }); // Prediction for another file - let prediction_task = zeta.update(cx, |zeta, cx| { zeta.refresh_prediction(&project, &buffer1, position, cx) }); @@ -1095,14 +1182,13 @@ mod tests { request_id: Uuid::new_v4(), edits: vec![predict_edits_v3::Edit { path: Path::new(path!("root/2.txt")).into(), - range: 0..snapshot1.len(), + range: Line(0)..Line(snapshot1.max_point().row + 1), content: "Hola!\nComo estas?\nAdios".into(), }], debug_info: None, }) .unwrap(); prediction_task.await.unwrap(); - zeta.read_with(cx, |zeta, cx| { let prediction = zeta .current_prediction_for_buffer(&buffer1, &project, cx) @@ -1161,14 +1247,20 @@ mod tests { request.excerpt_path.as_ref(), Path::new(path!("root/foo.md")) ); - assert_eq!(request.cursor_offset, 10); + assert_eq!( + request.cursor_point, + Point { + line: Line(1), + column: 3 + } + ); respond_tx .send(predict_edits_v3::PredictEditsResponse { request_id: Uuid::new_v4(), edits: vec![predict_edits_v3::Edit { path: Path::new(path!("root/foo.md")).into(), - range: 0..snapshot.len(), + range: Line(0)..Line(snapshot.max_point().row + 1), content: "Hello!\nHow are you?\nBye".into(), }], debug_info: None, @@ -1246,7 +1338,7 @@ mod tests { request_id: Uuid::new_v4(), edits: vec![predict_edits_v3::Edit { path: Path::new(path!("root/foo.md")).into(), - range: 0..snapshot.len(), + range: Line(0)..Line(snapshot.max_point().row + 1), content: "Hello!\nHow are you?\nBye".into(), }], debug_info: None, diff --git a/crates/zeta2_tools/Cargo.toml b/crates/zeta2_tools/Cargo.toml index c600b3b86e4f3f8477431275d7f85591ccb22ac7..edd1b1eb242c6c02001bec53120425f9a05e5d1d 100644 --- a/crates/zeta2_tools/Cargo.toml +++ b/crates/zeta2_tools/Cargo.toml @@ -18,6 +18,7 @@ cloud_llm_client.workspace = true collections.workspace = true edit_prediction_context.workspace = true editor.workspace = true +feature_flags.workspace = true futures.workspace = true gpui.workspace = true language.workspace = true @@ -26,11 +27,11 @@ multi_buffer.workspace = true ordered-float.workspace = true project.workspace = true serde.workspace = true +telemetry.workspace = true text.workspace = true ui.workspace = true ui_input.workspace = true util.workspace = true -workspace-hack.workspace = true workspace.workspace = true zeta2.workspace = true diff --git a/crates/zeta2_tools/src/zeta2_tools.rs b/crates/zeta2_tools/src/zeta2_tools.rs index 69536ad46806cb271ef987cadb4e95a2061ac953..2319df2a49d04c7e73180830ecf9778380bbf025 100644 --- a/crates/zeta2_tools/src/zeta2_tools.rs +++ b/crates/zeta2_tools/src/zeta2_tools.rs @@ -1,34 +1,38 @@ -use std::{ - cmp::Reverse, collections::hash_map::Entry, path::PathBuf, str::FromStr, sync::Arc, - time::Duration, -}; +use std::{cmp::Reverse, path::PathBuf, str::FromStr, sync::Arc, time::Duration}; use chrono::TimeDelta; use client::{Client, UserStore}; -use cloud_llm_client::predict_edits_v3::{DeclarationScoreComponents, PromptFormat}; +use cloud_llm_client::predict_edits_v3::{ + self, DeclarationScoreComponents, PredictEditsRequest, PredictEditsResponse, PromptFormat, +}; use collections::HashMap; use editor::{Editor, EditorEvent, EditorMode, ExcerptRange, MultiBuffer}; -use futures::{StreamExt as _, channel::oneshot}; +use feature_flags::FeatureFlagAppExt as _; +use futures::{FutureExt, StreamExt as _, channel::oneshot, future::Shared}; use gpui::{ - CursorStyle, Entity, EventEmitter, FocusHandle, Focusable, Subscription, Task, WeakEntity, - actions, prelude::*, + CursorStyle, Empty, Entity, EventEmitter, FocusHandle, Focusable, Subscription, Task, + WeakEntity, actions, prelude::*, }; use language::{Buffer, DiskState}; use ordered_float::OrderedFloat; -use project::{Project, WorktreeId}; -use ui::{ContextMenu, ContextMenuEntry, DropdownMenu, prelude::*}; -use ui_input::SingleLineInput; +use project::{Project, WorktreeId, telemetry_snapshot::TelemetrySnapshot}; +use ui::{ButtonLike, ContextMenu, ContextMenuEntry, DropdownMenu, KeyBinding, prelude::*}; +use ui_input::InputField; use util::{ResultExt, paths::PathStyle, rel_path::RelPath}; use workspace::{Item, SplitDirection, Workspace}; -use zeta2::{DEFAULT_CONTEXT_OPTIONS, PredictionDebugInfo, Zeta, ZetaOptions}; +use zeta2::{PredictionDebugInfo, Zeta, Zeta2FeatureFlag, ZetaOptions}; -use edit_prediction_context::{DeclarationStyle, EditPredictionExcerptOptions}; +use edit_prediction_context::{EditPredictionContextOptions, EditPredictionExcerptOptions}; actions!( dev, [ /// Opens the language server protocol logs viewer. - OpenZeta2Inspector + OpenZeta2Inspector, + /// Rate prediction as positive. + Zeta2RatePredictionPositive, + /// Rate prediction as negative. + Zeta2RatePredictionNegative, ] ); @@ -61,10 +65,11 @@ pub struct Zeta2Inspector { focus_handle: FocusHandle, project: Entity, last_prediction: Option, - max_excerpt_bytes_input: Entity, - min_excerpt_bytes_input: Entity, - cursor_context_ratio_input: Entity, - max_prompt_bytes_input: Entity, + max_excerpt_bytes_input: Entity, + min_excerpt_bytes_input: Entity, + cursor_context_ratio_input: Entity, + max_prompt_bytes_input: Entity, + max_retrieved_declarations: Entity, active_view: ActiveView, zeta: Entity, _active_editor_subscription: Option, @@ -85,16 +90,24 @@ struct LastPrediction { buffer: WeakEntity, position: language::Anchor, state: LastPredictionState, + request: PredictEditsRequest, + project_snapshot: Shared>>, _task: Option>, } +#[derive(Clone, Copy, PartialEq)] +enum Feedback { + Positive, + Negative, +} + enum LastPredictionState { Requested, Success { - inference_time: TimeDelta, - parsing_time: TimeDelta, - prompt_planning_time: TimeDelta, model_response_editor: Entity, + feedback_editor: Entity, + feedback: Option, + response: predict_edits_v3::PredictEditsResponse, }, Failed { message: String, @@ -125,11 +138,12 @@ impl Zeta2Inspector { focus_handle: cx.focus_handle(), project: project.clone(), last_prediction: None, - active_view: ActiveView::Context, + active_view: ActiveView::Inference, max_excerpt_bytes_input: Self::number_input("Max Excerpt Bytes", window, cx), min_excerpt_bytes_input: Self::number_input("Min Excerpt Bytes", window, cx), cursor_context_ratio_input: Self::number_input("Cursor Context Ratio", window, cx), max_prompt_bytes_input: Self::number_input("Max Prompt Bytes", window, cx), + max_retrieved_declarations: Self::number_input("Max Retrieved Definitions", window, cx), zeta: zeta.clone(), _active_editor_subscription: None, _update_state_task: Task::ready(()), @@ -167,6 +181,13 @@ impl Zeta2Inspector { self.max_prompt_bytes_input.update(cx, |input, cx| { input.set_text(options.max_prompt_bytes.to_string(), window, cx); }); + self.max_retrieved_declarations.update(cx, |input, cx| { + input.set_text( + options.context.max_retrieved_declarations.to_string(), + window, + cx, + ); + }); cx.notify(); } @@ -204,9 +225,9 @@ impl Zeta2Inspector { label: &'static str, window: &mut Window, cx: &mut Context, - ) -> Entity { + ) -> Entity { let input = cx.new(|cx| { - SingleLineInput::new(window, cx, "") + InputField::new(window, cx, "") .label(label) .label_min_width(px(64.)) }); @@ -220,7 +241,7 @@ impl Zeta2Inspector { }; fn number_input_value( - input: &Entity, + input: &Entity, cx: &App, ) -> T { input @@ -232,17 +253,24 @@ impl Zeta2Inspector { .unwrap_or_default() } - let mut context_options = DEFAULT_CONTEXT_OPTIONS.clone(); - context_options.excerpt = EditPredictionExcerptOptions { - max_bytes: number_input_value(&this.max_excerpt_bytes_input, cx), - min_bytes: number_input_value(&this.min_excerpt_bytes_input, cx), - target_before_cursor_over_total_bytes: number_input_value( - &this.cursor_context_ratio_input, + let zeta_options = this.zeta.read(cx).options().clone(); + + let context_options = EditPredictionContextOptions { + excerpt: EditPredictionExcerptOptions { + max_bytes: number_input_value(&this.max_excerpt_bytes_input, cx), + min_bytes: number_input_value(&this.min_excerpt_bytes_input, cx), + target_before_cursor_over_total_bytes: number_input_value( + &this.cursor_context_ratio_input, + cx, + ), + }, + max_retrieved_declarations: number_input_value( + &this.max_retrieved_declarations, cx, ), + ..zeta_options.context }; - let zeta_options = this.zeta.read(cx).options(); this.set_options( ZetaOptions { context: context_options, @@ -281,17 +309,23 @@ impl Zeta2Inspector { let language_registry = self.project.read(cx).languages().clone(); async move |this, cx| { let mut languages = HashMap::default(); - for lang_id in prediction - .context - .declarations + for ext in prediction + .request + .referenced_declarations .iter() - .map(|snippet| snippet.declaration.identifier().language_id) - .chain(prediction.context.excerpt_text.language_id) + .filter_map(|snippet| snippet.path.extension()) + .chain(prediction.request.excerpt_path.extension()) { - if let Entry::Vacant(entry) = languages.entry(lang_id) { + if !languages.contains_key(ext) { // Most snippets are gonna be the same language, // so we think it's fine to do this sequentially for now - entry.insert(language_registry.language_for_id(lang_id).await.ok()); + languages.insert( + ext.to_owned(), + language_registry + .language_for_name_or_extension(&ext.to_string_lossy()) + .await + .ok(), + ); } } @@ -314,13 +348,12 @@ impl Zeta2Inspector { let excerpt_buffer = cx.new(|cx| { let mut buffer = - Buffer::local(prediction.context.excerpt_text.body, cx); + Buffer::local(prediction.request.excerpt.clone(), cx); if let Some(language) = prediction - .context - .excerpt_text - .language_id - .as_ref() - .and_then(|id| languages.get(id)) + .request + .excerpt_path + .extension() + .and_then(|ext| languages.get(ext)) { buffer.set_language(language.clone(), cx); } @@ -334,25 +367,18 @@ impl Zeta2Inspector { cx, ); - let mut declarations = prediction.context.declarations.clone(); + let mut declarations = + prediction.request.referenced_declarations.clone(); declarations.sort_unstable_by_key(|declaration| { - Reverse(OrderedFloat( - declaration.score(DeclarationStyle::Declaration), - )) + Reverse(OrderedFloat(declaration.declaration_score)) }); for snippet in &declarations { - let path = this - .project - .read(cx) - .path_for_entry(snippet.declaration.project_entry_id(), cx); - let snippet_file = Arc::new(ExcerptMetadataFile { title: RelPath::unix(&format!( "{} (Score: {})", - path.map(|p| p.path.display(path_style).to_string()) - .unwrap_or_else(|| "".to_string()), - snippet.score(DeclarationStyle::Declaration) + snippet.path.display(), + snippet.declaration_score )) .unwrap() .into(), @@ -361,11 +387,10 @@ impl Zeta2Inspector { }); let excerpt_buffer = cx.new(|cx| { - let mut buffer = - Buffer::local(snippet.declaration.item_text().0, cx); + let mut buffer = Buffer::local(snippet.text.clone(), cx); buffer.file_updated(snippet_file, cx); - if let Some(language) = - languages.get(&snippet.declaration.identifier().language_id) + if let Some(ext) = snippet.path.extension() + && let Some(language) = languages.get(ext) { buffer.set_language(language.clone(), cx); } @@ -380,7 +405,7 @@ impl Zeta2Inspector { let excerpt_id = excerpt_ids.first().unwrap(); excerpt_score_components - .insert(*excerpt_id, snippet.components.clone()); + .insert(*excerpt_id, snippet.score_components.clone()); } multibuffer @@ -412,25 +437,91 @@ impl Zeta2Inspector { if let Some(prediction) = this.last_prediction.as_mut() { prediction.state = match response { Ok(Ok(response)) => { - prediction.prompt_editor.update( - cx, - |prompt_editor, cx| { - prompt_editor.set_text( - response.prompt, - window, + if let Some(debug_info) = &response.debug_info { + prediction.prompt_editor.update( + cx, + |prompt_editor, cx| { + prompt_editor.set_text( + debug_info.prompt.as_str(), + window, + cx, + ); + }, + ); + } + + let feedback_editor = cx.new(|cx| { + let buffer = cx.new(|cx| { + let mut buffer = Buffer::local("", cx); + buffer.set_language( + markdown_language.clone(), cx, ); + buffer + }); + let buffer = + cx.new(|cx| MultiBuffer::singleton(buffer, cx)); + let mut editor = Editor::new( + EditorMode::AutoHeight { + min_lines: 3, + max_lines: None, + }, + buffer, + None, + window, + cx, + ); + editor.set_placeholder_text( + "Write feedback here", + window, + cx, + ); + editor.set_show_line_numbers(false, cx); + editor.set_show_gutter(false, cx); + editor.set_show_scrollbars(false, cx); + editor + }); + + cx.subscribe_in( + &feedback_editor, + window, + |this, editor, ev, window, cx| match ev { + EditorEvent::BufferEdited => { + if let Some(last_prediction) = + this.last_prediction.as_mut() + && let LastPredictionState::Success { + feedback: feedback_state, + .. + } = &mut last_prediction.state + { + if feedback_state.take().is_some() { + editor.update(cx, |editor, cx| { + editor.set_placeholder_text( + "Write feedback here", + window, + cx, + ); + }); + cx.notify(); + } + } + } + _ => {} }, - ); + ) + .detach(); LastPredictionState::Success { - prompt_planning_time: response.prompt_planning_time, - inference_time: response.inference_time, - parsing_time: response.parsing_time, model_response_editor: cx.new(|cx| { let buffer = cx.new(|cx| { let mut buffer = Buffer::local( - response.model_response, + response + .debug_info + .as_ref() + .map(|p| p.model_response.as_str()) + .unwrap_or( + "(Debug info not available)", + ), cx, ); buffer.set_language(markdown_language, cx); @@ -452,6 +543,9 @@ impl Zeta2Inspector { editor.set_show_scrollbars(false, cx); editor }), + feedback_editor, + feedback: None, + response, } } Ok(Err(err)) => { @@ -467,6 +561,8 @@ impl Zeta2Inspector { } }); + let project_snapshot_task = TelemetrySnapshot::new(&this.project, cx); + this.last_prediction = Some(LastPrediction { context_editor, prompt_editor: cx.new(|cx| { @@ -489,6 +585,11 @@ impl Zeta2Inspector { buffer, position, state: LastPredictionState::Requested, + project_snapshot: cx + .foreground_executor() + .spawn(async move { Arc::new(project_snapshot_task.await) }) + .shared(), + request: prediction.request, _task: Some(task), }); cx.notify(); @@ -498,6 +599,103 @@ impl Zeta2Inspector { }); } + fn handle_rate_positive( + &mut self, + _action: &Zeta2RatePredictionPositive, + window: &mut Window, + cx: &mut Context, + ) { + self.handle_rate(Feedback::Positive, window, cx); + } + + fn handle_rate_negative( + &mut self, + _action: &Zeta2RatePredictionNegative, + window: &mut Window, + cx: &mut Context, + ) { + self.handle_rate(Feedback::Negative, window, cx); + } + + fn handle_rate(&mut self, kind: Feedback, window: &mut Window, cx: &mut Context) { + let Some(last_prediction) = self.last_prediction.as_mut() else { + return; + }; + if !last_prediction.request.can_collect_data { + return; + } + + let project_snapshot_task = last_prediction.project_snapshot.clone(); + + cx.spawn_in(window, async move |this, cx| { + let project_snapshot = project_snapshot_task.await; + this.update_in(cx, |this, window, cx| { + let Some(last_prediction) = this.last_prediction.as_mut() else { + return; + }; + + let LastPredictionState::Success { + feedback: feedback_state, + feedback_editor, + model_response_editor, + response, + .. + } = &mut last_prediction.state + else { + return; + }; + + *feedback_state = Some(kind); + let text = feedback_editor.update(cx, |feedback_editor, cx| { + feedback_editor.set_placeholder_text( + "Submitted. Edit or submit again to change.", + window, + cx, + ); + feedback_editor.text(cx) + }); + cx.notify(); + + cx.defer_in(window, { + let model_response_editor = model_response_editor.downgrade(); + move |_, window, cx| { + if let Some(model_response_editor) = model_response_editor.upgrade() { + model_response_editor.focus_handle(cx).focus(window); + } + } + }); + + let kind = match kind { + Feedback::Positive => "positive", + Feedback::Negative => "negative", + }; + + telemetry::event!( + "Zeta2 Prediction Rated", + id = response.request_id, + kind = kind, + text = text, + request = last_prediction.request, + response = response, + project_snapshot = project_snapshot, + ); + }) + .log_err(); + }) + .detach(); + } + + fn focus_feedback(&mut self, window: &mut Window, cx: &mut Context) { + if let Some(last_prediction) = self.last_prediction.as_mut() { + if let LastPredictionState::Success { + feedback_editor, .. + } = &mut last_prediction.state + { + feedback_editor.focus_handle(cx).focus(window); + } + }; + } + fn render_options(&self, window: &mut Window, cx: &mut Context) -> Div { v_flex() .gap_2() @@ -530,6 +728,7 @@ impl Zeta2Inspector { h_flex() .gap_2() .items_end() + .child(self.max_retrieved_declarations.clone()) .child(self.max_prompt_bytes_input.clone()) .child(self.render_prompt_format_dropdown(window, cx)), ), @@ -598,8 +797,9 @@ impl Zeta2Inspector { ), ui::ToggleButtonSimple::new( "Inference", - cx.listener(|this, _, _, cx| { + cx.listener(|this, _, window, cx| { this.active_view = ActiveView::Inference; + this.focus_feedback(window, cx); cx.notify(); }), ), @@ -620,21 +820,24 @@ impl Zeta2Inspector { return None; }; - let (prompt_planning_time, inference_time, parsing_time) = match &prediction.state { - LastPredictionState::Success { - inference_time, - parsing_time, - prompt_planning_time, + let (prompt_planning_time, inference_time, parsing_time) = + if let LastPredictionState::Success { + response: + PredictEditsResponse { + debug_info: Some(debug_info), + .. + }, .. - } => ( - Some(*prompt_planning_time), - Some(*inference_time), - Some(*parsing_time), - ), - LastPredictionState::Requested | LastPredictionState::Failed { .. } => { + } = &prediction.state + { + ( + Some(debug_info.prompt_planning_time), + Some(debug_info.inference_time), + Some(debug_info.parsing_time), + ) + } else { (None, None, None) - } - }; + }; Some( v_flex() @@ -670,18 +873,26 @@ impl Zeta2Inspector { }) } - fn render_content(&self, cx: &mut Context) -> AnyElement { + fn render_content(&self, _: &mut Window, cx: &mut Context) -> AnyElement { + if !cx.has_flag::() { + return Self::render_message("`zeta2` feature flag is not enabled"); + } + match self.last_prediction.as_ref() { - None => v_flex() - .size_full() - .justify_center() - .items_center() - .child(Label::new("No prediction").size(LabelSize::Large)) - .into_any(), + None => Self::render_message("No prediction"), Some(prediction) => self.render_last_prediction(prediction, cx).into_any(), } } + fn render_message(message: impl Into) -> AnyElement { + v_flex() + .size_full() + .justify_center() + .items_center() + .child(Label::new(message).size(LabelSize::Large)) + .into_any() + } + fn render_last_prediction(&self, prediction: &LastPrediction, cx: &mut Context) -> Div { match &self.active_view { ActiveView::Context => div().size_full().child(prediction.context_editor.clone()), @@ -720,24 +931,105 @@ impl Zeta2Inspector { .flex_1() .gap_2() .h_full() - .p_4() - .child(ui::Headline::new("Model Response").size(ui::HeadlineSize::XSmall)) - .child(match &prediction.state { - LastPredictionState::Success { - model_response_editor, - .. - } => model_response_editor.clone().into_any_element(), - LastPredictionState::Requested => v_flex() - .p_4() + .child( + v_flex() + .flex_1() .gap_2() - .child(Label::new("Loading...").buffer_font(cx)) - .into_any(), - LastPredictionState::Failed { message } => v_flex() .p_4() - .gap_2() - .child(Label::new(message.clone()).buffer_font(cx)) - .into_any(), - }), + .child( + ui::Headline::new("Model Response") + .size(ui::HeadlineSize::XSmall), + ) + .child(match &prediction.state { + LastPredictionState::Success { + model_response_editor, + .. + } => model_response_editor.clone().into_any_element(), + LastPredictionState::Requested => v_flex() + .gap_2() + .child(Label::new("Loading...").buffer_font(cx)) + .into_any_element(), + LastPredictionState::Failed { message } => v_flex() + .gap_2() + .max_w_96() + .child(Label::new(message.clone()).buffer_font(cx)) + .into_any_element(), + }), + ) + .child(ui::divider()) + .child( + if prediction.request.can_collect_data + && let LastPredictionState::Success { + feedback_editor, + feedback: feedback_state, + .. + } = &prediction.state + { + v_flex() + .key_context("Zeta2Feedback") + .on_action(cx.listener(Self::handle_rate_positive)) + .on_action(cx.listener(Self::handle_rate_negative)) + .gap_2() + .p_2() + .child(feedback_editor.clone()) + .child( + h_flex() + .justify_end() + .w_full() + .child( + ButtonLike::new("rate-positive") + .when( + *feedback_state == Some(Feedback::Positive), + |this| this.style(ButtonStyle::Filled), + ) + .child( + KeyBinding::for_action( + &Zeta2RatePredictionPositive, + cx, + ) + .size(TextSize::Small.rems(cx)), + ) + .child(ui::Icon::new(ui::IconName::ThumbsUp)) + .on_click(cx.listener( + |this, _, window, cx| { + this.handle_rate_positive( + &Zeta2RatePredictionPositive, + window, + cx, + ); + }, + )), + ) + .child( + ButtonLike::new("rate-negative") + .when( + *feedback_state == Some(Feedback::Negative), + |this| this.style(ButtonStyle::Filled), + ) + .child( + KeyBinding::for_action( + &Zeta2RatePredictionNegative, + cx, + ) + .size(TextSize::Small.rems(cx)), + ) + .child(ui::Icon::new(ui::IconName::ThumbsDown)) + .on_click(cx.listener( + |this, _, window, cx| { + this.handle_rate_negative( + &Zeta2RatePredictionNegative, + window, + cx, + ); + }, + )), + ), + ) + .into_any() + } else { + Empty.into_any_element() + }, + ), ), } } @@ -780,7 +1072,7 @@ impl Render for Zeta2Inspector { .child(ui::vertical_divider()) .children(self.render_stats()), ) - .child(self.render_content(cx)) + .child(self.render_content(window, cx)) } } diff --git a/crates/zeta_cli/Cargo.toml b/crates/zeta_cli/Cargo.toml index d81a5ae6d34fbe7cba25898fc4885baa84f1dfb2..19dafefbdcf8ed577a54e686b6b0c4ed90cf4512 100644 --- a/crates/zeta_cli/Cargo.toml +++ b/crates/zeta_cli/Cargo.toml @@ -14,6 +14,7 @@ path = "src/main.rs" [dependencies] anyhow.workspace = true +chrono.workspace = true clap.workspace = true client.workspace = true cloud_llm_client.workspace= true @@ -35,6 +36,7 @@ log.workspace = true node_runtime.workspace = true ordered-float.workspace = true paths.workspace = true +polars = { version = "0.51", features = ["lazy", "dtype-struct", "parquet"] } project.workspace = true prompt_store.workspace = true release_channel.workspace = true @@ -44,10 +46,10 @@ serde_json.workspace = true settings.workspace = true shellexpand.workspace = true smol.workspace = true +soa-rs = "0.8.1" terminal_view.workspace = true util.workspace = true watch.workspace = true -workspace-hack.workspace = true zeta.workspace = true zeta2.workspace = true zlog.workspace = true diff --git a/crates/zeta_cli/src/main.rs b/crates/zeta_cli/src/main.rs index 38e0a13b9891a1a2af749d84d17032413b67a18f..45f6276bba7d3f9115359304fdd3ded08f3f1dbb 100644 --- a/crates/zeta_cli/src/main.rs +++ b/crates/zeta_cli/src/main.rs @@ -95,14 +95,17 @@ struct Zeta2Args { file_indexing_parallelism: usize, #[arg(long, default_value_t = false)] disable_imports_gathering: bool, + #[arg(long, default_value_t = u8::MAX)] + max_retrieved_definitions: u8, } #[derive(clap::ValueEnum, Default, Debug, Clone)] enum PromptFormat { - #[default] MarkedExcerpt, LabeledSections, OnlySnippets, + #[default] + NumberedLines, } impl Into for PromptFormat { @@ -111,6 +114,7 @@ impl Into for PromptFormat { Self::MarkedExcerpt => predict_edits_v3::PromptFormat::MarkedExcerpt, Self::LabeledSections => predict_edits_v3::PromptFormat::LabeledSections, Self::OnlySnippets => predict_edits_v3::PromptFormat::OnlySnippets, + Self::NumberedLines => predict_edits_v3::PromptFormat::NumLinesUniDiff, } } } @@ -299,6 +303,7 @@ impl Zeta2Args { fn to_options(&self, omit_excerpt_overlaps: bool) -> zeta2::ZetaOptions { zeta2::ZetaOptions { context: EditPredictionContextOptions { + max_retrieved_declarations: self.max_retrieved_definitions, use_imports: !self.disable_imports_gathering, excerpt: EditPredictionExcerptOptions { max_bytes: self.max_excerpt_bytes, diff --git a/crates/zeta_cli/src/retrieval_stats.rs b/crates/zeta_cli/src/retrieval_stats.rs index 3dbc56756d6a46b89b68cdeab54f589355cc5efe..bf1f78200ec5dd9262b6ae8937695b690155e8e2 100644 --- a/crates/zeta_cli/src/retrieval_stats.rs +++ b/crates/zeta_cli/src/retrieval_stats.rs @@ -13,8 +13,10 @@ use gpui::{AppContext, AsyncApp}; use language::OffsetRangeExt; use language::{BufferSnapshot, Point}; use ordered_float::OrderedFloat; +use polars::prelude::*; use project::{Project, ProjectEntryId, ProjectPath, Worktree}; use serde::{Deserialize, Serialize}; +use std::fs; use std::{ cmp::Reverse, collections::{HashMap, HashSet}, @@ -163,16 +165,23 @@ pub async fn retrieval_stats( } let files_hash = hasher.finish(); let file_snapshots = Arc::new(file_snapshots); + let target_cli_dir = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../target/zeta_cli"); + fs::create_dir_all(&target_cli_dir).unwrap(); + let target_cli_dir = target_cli_dir.canonicalize().unwrap(); - let lsp_definitions_path = std::env::current_dir()?.join(format!( - "target/zeta2-lsp-definitions-{:x}.jsonl", + let lsp_cache_dir = target_cli_dir.join("cache"); + fs::create_dir_all(&lsp_cache_dir).unwrap(); + + let lsp_definitions_path = lsp_cache_dir.join(format!( + "{}-{:x}.jsonl", + worktree_path.file_stem().unwrap_or_default().display(), files_hash )); let mut lsp_definitions = HashMap::default(); let mut lsp_files = 0; - if std::fs::exists(&lsp_definitions_path)? { + if fs::exists(&lsp_definitions_path)? { log::info!( "Using cached LSP definitions from {}", lsp_definitions_path.display() @@ -246,8 +255,7 @@ pub async fn retrieval_stats( let files_len = files.len().min(file_limit.unwrap_or(usize::MAX)); let done_count = Arc::new(AtomicUsize::new(0)); - let (output_tx, mut output_rx) = mpsc::unbounded::(); - let mut output = std::fs::File::create("target/zeta-retrieval-stats.txt")?; + let (output_tx, output_rx) = mpsc::unbounded::(); let tasks = files .into_iter() @@ -271,8 +279,6 @@ pub async fn retrieval_stats( &snapshot, ); - println!("references: {}", references.len(),); - let imports = if options.context.use_imports { Imports::gather(&snapshot, Some(&project_file.parent_abs_path)) } else { @@ -309,65 +315,13 @@ pub async fn retrieval_stats( ) .await?; - // TODO: LSP returns things like locals, this filters out some of those, but potentially - // hides some retrieval issues. - if retrieve_result.definitions.is_empty() { - continue; - } - - let mut best_match = None; - let mut has_external_definition = false; - let mut in_excerpt = false; - for (index, retrieved_definition) in - retrieve_result.definitions.iter().enumerate() - { - for lsp_definition in &lsp_definitions { - let SourceRange { - path, - point_range, - offset_range, - } = lsp_definition; - let lsp_point_range = - SerializablePoint::into_language_point_range(point_range.clone()); - has_external_definition = has_external_definition - || path.is_absolute() - || path - .components() - .any(|component| component.as_os_str() == "node_modules"); - let is_match = path.as_path() - == retrieved_definition.path.as_std_path() - && retrieved_definition - .range - .contains_inclusive(&lsp_point_range); - if is_match { - if best_match.is_none() { - best_match = Some(index); - } - } - in_excerpt = in_excerpt - || retrieve_result.excerpt_range.as_ref().is_some_and( - |excerpt_range| excerpt_range.contains_inclusive(&offset_range), - ); - } - } - - let outcome = if let Some(best_match) = best_match { - RetrievalOutcome::Match { best_match } - } else if has_external_definition { - RetrievalOutcome::NoMatchDueToExternalLspDefinitions - } else if in_excerpt { - RetrievalOutcome::ProbablyLocal - } else { - RetrievalOutcome::NoMatch - }; - - let result = RetrievalStatsResult { - outcome, - path: path.clone(), + let result = ReferenceRetrievalResult { + cursor_path: path.clone(), identifier: reference.identifier, - point: query_point, + cursor_point: query_point, lsp_definitions, retrieved_definitions: retrieve_result.definitions, + excerpt_range: retrieve_result.excerpt_range, }; output_tx.unbounded_send(result).ok(); @@ -386,139 +340,610 @@ pub async fn retrieval_stats( drop(output_tx); - let results_task = cx.background_spawn(async move { - let mut results = Vec::new(); - while let Some(result) = output_rx.next().await { - output - .write_all(format!("{:#?}\n", result).as_bytes()) - .log_err(); - results.push(result) - } - results - }); + let df_task = cx.background_spawn(build_dataframe(output_rx)); futures::future::try_join_all(tasks).await?; - println!("Tasks completed"); - let results = results_task.await; - println!("Results received"); - - let mut references_count = 0; - - let mut included_count = 0; - let mut both_absent_count = 0; - - let mut retrieved_count = 0; - let mut top_match_count = 0; - let mut non_top_match_count = 0; - let mut ranking_involved_top_match_count = 0; - - let mut no_match_count = 0; - let mut no_match_none_retrieved = 0; - let mut no_match_wrong_retrieval = 0; - - let mut expected_no_match_count = 0; - let mut in_excerpt_count = 0; - let mut external_definition_count = 0; - - for result in results { - references_count += 1; - match &result.outcome { - RetrievalOutcome::Match { best_match } => { - included_count += 1; - retrieved_count += 1; - let multiple = result.retrieved_definitions.len() > 1; - if *best_match == 0 { - top_match_count += 1; - if multiple { - ranking_involved_top_match_count += 1; - } - } else { - non_top_match_count += 1; - } - } - RetrievalOutcome::NoMatch => { - if result.lsp_definitions.is_empty() { - included_count += 1; - both_absent_count += 1; - } else { - no_match_count += 1; - if result.retrieved_definitions.is_empty() { - no_match_none_retrieved += 1; - } else { - no_match_wrong_retrieval += 1; - } - } - } - RetrievalOutcome::NoMatchDueToExternalLspDefinitions => { - expected_no_match_count += 1; - external_definition_count += 1; + let mut df = df_task.await?; + + let run_id = format!( + "{}-{}", + worktree_path.file_stem().unwrap_or_default().display(), + chrono::Local::now().format("%Y%m%d_%H%M%S") + ); + let run_dir = target_cli_dir.join(run_id); + fs::create_dir(&run_dir).unwrap(); + + let parquet_path = run_dir.join("stats.parquet"); + let mut parquet_file = fs::File::create(&parquet_path)?; + + ParquetWriter::new(&mut parquet_file) + .finish(&mut df) + .unwrap(); + + let stats = SummaryStats::from_dataframe(df)?; + + let stats_path = run_dir.join("stats.txt"); + fs::write(&stats_path, format!("{}", stats))?; + + println!("{}", stats); + println!("\nWrote:"); + println!("- {}", relativize_path(&parquet_path).display()); + println!("- {}", relativize_path(&stats_path).display()); + println!("- {}", relativize_path(&lsp_definitions_path).display()); + + Ok("".to_string()) +} + +async fn build_dataframe( + mut output_rx: mpsc::UnboundedReceiver, +) -> Result { + use soa_rs::{Soa, Soars}; + + #[derive(Default, Soars)] + struct Row { + ref_id: u32, + cursor_path: String, + cursor_row: u32, + cursor_column: u32, + cursor_identifier: String, + gold_in_excerpt: bool, + gold_path: String, + gold_row: u32, + gold_column: u32, + gold_is_external: bool, + candidate_count: u32, + candidate_path: Option, + candidate_row: Option, + candidate_column: Option, + candidate_is_gold: Option, + candidate_rank: Option, + candidate_is_same_file: Option, + candidate_is_referenced_nearby: Option, + candidate_is_referenced_in_breadcrumb: Option, + candidate_reference_count: Option, + candidate_same_file_declaration_count: Option, + candidate_declaration_count: Option, + candidate_reference_line_distance: Option, + candidate_declaration_line_distance: Option, + candidate_excerpt_vs_item_jaccard: Option, + candidate_excerpt_vs_signature_jaccard: Option, + candidate_adjacent_vs_item_jaccard: Option, + candidate_adjacent_vs_signature_jaccard: Option, + candidate_excerpt_vs_item_weighted_overlap: Option, + candidate_excerpt_vs_signature_weighted_overlap: Option, + candidate_adjacent_vs_item_weighted_overlap: Option, + candidate_adjacent_vs_signature_weighted_overlap: Option, + candidate_path_import_match_count: Option, + candidate_wildcard_path_import_match_count: Option, + candidate_import_similarity: Option, + candidate_max_import_similarity: Option, + candidate_normalized_import_similarity: Option, + candidate_wildcard_import_similarity: Option, + candidate_normalized_wildcard_import_similarity: Option, + candidate_included_by_others: Option, + candidate_includes_others: Option, + } + let mut rows = Soa::::new(); + let mut next_ref_id = 0; + + while let Some(result) = output_rx.next().await { + let mut gold_is_external = false; + let mut gold_in_excerpt = false; + let cursor_path = result.cursor_path.as_unix_str(); + let cursor_row = result.cursor_point.row + 1; + let cursor_column = result.cursor_point.column + 1; + let cursor_identifier = result.identifier.name.to_string(); + let ref_id = next_ref_id; + next_ref_id += 1; + + for lsp_definition in result.lsp_definitions { + let SourceRange { + path: gold_path, + point_range: gold_point_range, + offset_range: gold_offset_range, + } = lsp_definition; + let lsp_point_range = + SerializablePoint::into_language_point_range(gold_point_range.clone()); + + gold_is_external = gold_is_external + || gold_path.is_absolute() + || gold_path + .components() + .any(|component| component.as_os_str() == "node_modules"); + + gold_in_excerpt = gold_in_excerpt + || result.excerpt_range.as_ref().is_some_and(|excerpt_range| { + excerpt_range.contains_inclusive(&gold_offset_range) + }); + + let gold_row = gold_point_range.start.row; + let gold_column = gold_point_range.start.column; + let candidate_count = result.retrieved_definitions.len() as u32; + + for (candidate_rank, retrieved_definition) in + result.retrieved_definitions.iter().enumerate() + { + let candidate_is_gold = gold_path.as_path() + == retrieved_definition.path.as_std_path() + && retrieved_definition + .range + .contains_inclusive(&lsp_point_range); + + let candidate_row = retrieved_definition.range.start.row + 1; + let candidate_column = retrieved_definition.range.start.column + 1; + + let DeclarationScoreComponents { + is_same_file, + is_referenced_nearby, + is_referenced_in_breadcrumb, + reference_count, + same_file_declaration_count, + declaration_count, + reference_line_distance, + declaration_line_distance, + excerpt_vs_item_jaccard, + excerpt_vs_signature_jaccard, + adjacent_vs_item_jaccard, + adjacent_vs_signature_jaccard, + excerpt_vs_item_weighted_overlap, + excerpt_vs_signature_weighted_overlap, + adjacent_vs_item_weighted_overlap, + adjacent_vs_signature_weighted_overlap, + path_import_match_count, + wildcard_path_import_match_count, + import_similarity, + max_import_similarity, + normalized_import_similarity, + wildcard_import_similarity, + normalized_wildcard_import_similarity, + included_by_others, + includes_others, + } = retrieved_definition.components; + + rows.push(Row { + ref_id, + cursor_path: cursor_path.to_string(), + cursor_row, + cursor_column, + cursor_identifier: cursor_identifier.clone(), + gold_in_excerpt, + gold_path: gold_path.to_string_lossy().to_string(), + gold_row, + gold_column, + gold_is_external, + candidate_count, + candidate_path: Some(retrieved_definition.path.as_unix_str().to_string()), + candidate_row: Some(candidate_row), + candidate_column: Some(candidate_column), + candidate_is_gold: Some(candidate_is_gold), + candidate_rank: Some(candidate_rank as u32), + candidate_is_same_file: Some(is_same_file), + candidate_is_referenced_nearby: Some(is_referenced_nearby), + candidate_is_referenced_in_breadcrumb: Some(is_referenced_in_breadcrumb), + candidate_reference_count: Some(reference_count as u32), + candidate_same_file_declaration_count: Some(same_file_declaration_count as u32), + candidate_declaration_count: Some(declaration_count as u32), + candidate_reference_line_distance: Some(reference_line_distance), + candidate_declaration_line_distance: Some(declaration_line_distance), + candidate_excerpt_vs_item_jaccard: Some(excerpt_vs_item_jaccard), + candidate_excerpt_vs_signature_jaccard: Some(excerpt_vs_signature_jaccard), + candidate_adjacent_vs_item_jaccard: Some(adjacent_vs_item_jaccard), + candidate_adjacent_vs_signature_jaccard: Some(adjacent_vs_signature_jaccard), + candidate_excerpt_vs_item_weighted_overlap: Some( + excerpt_vs_item_weighted_overlap, + ), + candidate_excerpt_vs_signature_weighted_overlap: Some( + excerpt_vs_signature_weighted_overlap, + ), + candidate_adjacent_vs_item_weighted_overlap: Some( + adjacent_vs_item_weighted_overlap, + ), + candidate_adjacent_vs_signature_weighted_overlap: Some( + adjacent_vs_signature_weighted_overlap, + ), + candidate_path_import_match_count: Some(path_import_match_count as u32), + candidate_wildcard_path_import_match_count: Some( + wildcard_path_import_match_count as u32, + ), + candidate_import_similarity: Some(import_similarity), + candidate_max_import_similarity: Some(max_import_similarity), + candidate_normalized_import_similarity: Some(normalized_import_similarity), + candidate_wildcard_import_similarity: Some(wildcard_import_similarity), + candidate_normalized_wildcard_import_similarity: Some( + normalized_wildcard_import_similarity, + ), + candidate_included_by_others: Some(included_by_others as u32), + candidate_includes_others: Some(includes_others as u32), + }); } - RetrievalOutcome::ProbablyLocal => { - included_count += 1; - in_excerpt_count += 1; + + if result.retrieved_definitions.is_empty() { + rows.push(Row { + ref_id, + cursor_path: cursor_path.to_string(), + cursor_row, + cursor_column, + cursor_identifier: cursor_identifier.clone(), + gold_in_excerpt, + gold_path: gold_path.to_string_lossy().to_string(), + gold_row, + gold_column, + gold_is_external, + candidate_count, + ..Default::default() + }); } } } + let slices = rows.slices(); + + let RowSlices { + ref_id, + cursor_path, + cursor_row, + cursor_column, + cursor_identifier, + gold_in_excerpt, + gold_path, + gold_row, + gold_column, + gold_is_external, + candidate_path, + candidate_row, + candidate_column, + candidate_is_gold, + candidate_rank, + candidate_count, + candidate_is_same_file, + candidate_is_referenced_nearby, + candidate_is_referenced_in_breadcrumb, + candidate_reference_count, + candidate_same_file_declaration_count, + candidate_declaration_count, + candidate_reference_line_distance, + candidate_declaration_line_distance, + candidate_excerpt_vs_item_jaccard, + candidate_excerpt_vs_signature_jaccard, + candidate_adjacent_vs_item_jaccard, + candidate_adjacent_vs_signature_jaccard, + candidate_excerpt_vs_item_weighted_overlap, + candidate_excerpt_vs_signature_weighted_overlap, + candidate_adjacent_vs_item_weighted_overlap, + candidate_adjacent_vs_signature_weighted_overlap, + candidate_path_import_match_count, + candidate_wildcard_path_import_match_count, + candidate_import_similarity, + candidate_max_import_similarity, + candidate_normalized_import_similarity, + candidate_wildcard_import_similarity, + candidate_normalized_wildcard_import_similarity, + candidate_included_by_others, + candidate_includes_others, + } = slices; + + let df = DataFrame::new(vec![ + Series::new(PlSmallStr::from_str("ref_id"), ref_id).into(), + Series::new(PlSmallStr::from_str("cursor_path"), cursor_path).into(), + Series::new(PlSmallStr::from_str("cursor_row"), cursor_row).into(), + Series::new(PlSmallStr::from_str("cursor_column"), cursor_column).into(), + Series::new(PlSmallStr::from_str("cursor_identifier"), cursor_identifier).into(), + Series::new(PlSmallStr::from_str("gold_in_excerpt"), gold_in_excerpt).into(), + Series::new(PlSmallStr::from_str("gold_path"), gold_path).into(), + Series::new(PlSmallStr::from_str("gold_row"), gold_row).into(), + Series::new(PlSmallStr::from_str("gold_column"), gold_column).into(), + Series::new(PlSmallStr::from_str("gold_is_external"), gold_is_external).into(), + Series::new(PlSmallStr::from_str("candidate_count"), candidate_count).into(), + Series::new(PlSmallStr::from_str("candidate_path"), candidate_path).into(), + Series::new(PlSmallStr::from_str("candidate_row"), candidate_row).into(), + Series::new(PlSmallStr::from_str("candidate_column"), candidate_column).into(), + Series::new(PlSmallStr::from_str("candidate_is_gold"), candidate_is_gold).into(), + Series::new(PlSmallStr::from_str("candidate_rank"), candidate_rank).into(), + Series::new( + PlSmallStr::from_str("candidate_is_same_file"), + candidate_is_same_file, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_is_referenced_nearby"), + candidate_is_referenced_nearby, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_is_referenced_in_breadcrumb"), + candidate_is_referenced_in_breadcrumb, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_reference_count"), + candidate_reference_count, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_same_file_declaration_count"), + candidate_same_file_declaration_count, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_declaration_count"), + candidate_declaration_count, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_reference_line_distance"), + candidate_reference_line_distance, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_declaration_line_distance"), + candidate_declaration_line_distance, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_excerpt_vs_item_jaccard"), + candidate_excerpt_vs_item_jaccard, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_excerpt_vs_signature_jaccard"), + candidate_excerpt_vs_signature_jaccard, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_adjacent_vs_item_jaccard"), + candidate_adjacent_vs_item_jaccard, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_adjacent_vs_signature_jaccard"), + candidate_adjacent_vs_signature_jaccard, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_excerpt_vs_item_weighted_overlap"), + candidate_excerpt_vs_item_weighted_overlap, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_excerpt_vs_signature_weighted_overlap"), + candidate_excerpt_vs_signature_weighted_overlap, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_adjacent_vs_item_weighted_overlap"), + candidate_adjacent_vs_item_weighted_overlap, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_adjacent_vs_signature_weighted_overlap"), + candidate_adjacent_vs_signature_weighted_overlap, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_path_import_match_count"), + candidate_path_import_match_count, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_wildcard_path_import_match_count"), + candidate_wildcard_path_import_match_count, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_import_similarity"), + candidate_import_similarity, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_max_import_similarity"), + candidate_max_import_similarity, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_normalized_import_similarity"), + candidate_normalized_import_similarity, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_wildcard_import_similarity"), + candidate_wildcard_import_similarity, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_normalized_wildcard_import_similarity"), + candidate_normalized_wildcard_import_similarity, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_included_by_others"), + candidate_included_by_others, + ) + .into(), + Series::new( + PlSmallStr::from_str("candidate_includes_others"), + candidate_includes_others, + ) + .into(), + ])?; + + Ok(df) +} + +fn relativize_path(path: &Path) -> &Path { + path.strip_prefix(std::env::current_dir().unwrap()) + .unwrap_or(path) +} + +struct SummaryStats { + references_count: u32, + retrieved_count: u32, + top_match_count: u32, + non_top_match_count: u32, + ranking_involved_top_match_count: u32, + missing_none_retrieved: u32, + missing_wrong_retrieval: u32, + missing_external: u32, + in_excerpt_count: u32, +} - fn count_and_percentage(part: usize, total: usize) -> String { +impl SummaryStats { + fn from_dataframe(df: DataFrame) -> Result { + // TODO: use lazy more + let unique_refs = + df.unique::<(), ()>(Some(&["ref_id".into()]), UniqueKeepStrategy::Any, None)?; + let references_count = unique_refs.height() as u32; + + let gold_mask = df.column("candidate_is_gold")?.bool()?; + let gold_df = df.filter(&gold_mask)?; + let retrieved_count = gold_df.height() as u32; + + let top_match_mask = gold_df.column("candidate_rank")?.u32()?.equal(0); + let top_match_df = gold_df.filter(&top_match_mask)?; + let top_match_count = top_match_df.height() as u32; + + let ranking_involved_top_match_count = top_match_df + .column("candidate_count")? + .u32()? + .gt(1) + .sum() + .unwrap_or_default(); + + let non_top_match_count = (!top_match_mask).sum().unwrap_or(0); + + let not_retrieved_df = df + .lazy() + .group_by(&[col("ref_id"), col("candidate_count")]) + .agg(&[ + col("candidate_is_gold") + .fill_null(false) + .sum() + .alias("gold_count"), + col("gold_in_excerpt").sum().alias("gold_in_excerpt_count"), + col("gold_is_external") + .sum() + .alias("gold_is_external_count"), + ]) + .filter(col("gold_count").eq(lit(0))) + .collect()?; + + let in_excerpt_mask = not_retrieved_df + .column("gold_in_excerpt_count")? + .u32()? + .gt(0); + let in_excerpt_count = in_excerpt_mask.sum().unwrap_or(0); + + let missing_df = not_retrieved_df.filter(&!in_excerpt_mask)?; + + let missing_none_retrieved_mask = missing_df.column("candidate_count")?.u32()?.equal(0); + let missing_none_retrieved = missing_none_retrieved_mask.sum().unwrap_or(0); + let external_mask = missing_df.column("gold_is_external_count")?.u32()?.gt(0); + let missing_external = (missing_none_retrieved_mask & external_mask) + .sum() + .unwrap_or(0); + + let missing_wrong_retrieval = missing_df + .column("candidate_count")? + .u32()? + .gt(0) + .sum() + .unwrap_or(0); + + Ok(SummaryStats { + references_count, + retrieved_count, + top_match_count, + non_top_match_count, + ranking_involved_top_match_count, + missing_none_retrieved, + missing_wrong_retrieval, + missing_external, + in_excerpt_count, + }) + } + + fn count_and_percentage(part: u32, total: u32) -> String { format!("{} ({:.2}%)", part, (part as f64 / total as f64) * 100.0) } +} - println!(""); - println!("╮ references: {}", references_count); - println!( - "├─╮ included: {}", - count_and_percentage(included_count, references_count), - ); - println!( - "│ ├─╮ retrieved: {}", - count_and_percentage(retrieved_count, references_count) - ); - println!( - "│ │ ├─╮ top match : {}", - count_and_percentage(top_match_count, retrieved_count) - ); - println!( - "│ │ │ ╰─╴ involving ranking: {}", - count_and_percentage(ranking_involved_top_match_count, top_match_count) - ); - println!( - "│ │ ╰─╴ non-top match: {}", - count_and_percentage(non_top_match_count, retrieved_count) - ); - println!( - "│ ├─╴ both absent: {}", - count_and_percentage(both_absent_count, included_count) - ); - println!( - "│ ╰─╴ in excerpt: {}", - count_and_percentage(in_excerpt_count, included_count) - ); - println!( - "├─╮ no match: {}", - count_and_percentage(no_match_count, references_count) - ); - println!( - "│ ├─╴ none retrieved: {}", - count_and_percentage(no_match_none_retrieved, no_match_count) - ); - println!( - "│ ╰─╴ wrong retrieval: {}", - count_and_percentage(no_match_wrong_retrieval, no_match_count) - ); - println!( - "╰─╮ expected no match: {}", - count_and_percentage(expected_no_match_count, references_count) - ); - println!( - " ╰─╴ external definition: {}", - count_and_percentage(external_definition_count, expected_no_match_count) - ); +impl std::fmt::Display for SummaryStats { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let included = self.in_excerpt_count + self.retrieved_count; + let missing = self.references_count - included; + writeln!(f)?; + writeln!(f, "╮ references: {}", self.references_count)?; + writeln!( + f, + "├─╮ included: {}", + Self::count_and_percentage(included, self.references_count), + )?; + writeln!( + f, + "│ ├─╮ retrieved: {}", + Self::count_and_percentage(self.retrieved_count, self.references_count) + )?; + writeln!( + f, + "│ │ ├─╮ top match : {}", + Self::count_and_percentage(self.top_match_count, self.retrieved_count) + )?; + writeln!( + f, + "│ │ │ ╰─╴ involving ranking: {}", + Self::count_and_percentage(self.ranking_involved_top_match_count, self.top_match_count) + )?; + writeln!( + f, + "│ │ ╰─╴ non-top match: {}", + Self::count_and_percentage(self.non_top_match_count, self.retrieved_count) + )?; + writeln!( + f, + "│ ╰─╴ in excerpt: {}", + Self::count_and_percentage(self.in_excerpt_count, included) + )?; + writeln!( + f, + "╰─╮ missing: {}", + Self::count_and_percentage(missing, self.references_count) + )?; + writeln!( + f, + " ├─╮ none retrieved: {}", + Self::count_and_percentage(self.missing_none_retrieved, missing) + )?; + writeln!( + f, + " │ ╰─╴ external (expected): {}", + Self::count_and_percentage(self.missing_external, missing) + )?; + writeln!( + f, + " ╰─╴ wrong retrieval: {}", + Self::count_and_percentage(self.missing_wrong_retrieval, missing) + )?; + Ok(()) + } +} - println!(""); - println!("LSP definition cache at {}", lsp_definitions_path.display()); +#[derive(Debug)] +struct ReferenceRetrievalResult { + cursor_path: Arc, + cursor_point: Point, + identifier: Identifier, + excerpt_range: Option>, + lsp_definitions: Vec, + retrieved_definitions: Vec, +} - Ok("".to_string()) +#[derive(Debug)] +struct RetrievedDefinition { + path: Arc, + range: Range, + score: f32, + #[allow(dead_code)] + retrieval_score: f32, + #[allow(dead_code)] + components: DeclarationScoreComponents, } struct RetrieveResult { @@ -828,39 +1253,3 @@ impl From for Point { } } } - -#[derive(Debug)] -struct RetrievalStatsResult { - outcome: RetrievalOutcome, - #[allow(dead_code)] - path: Arc, - #[allow(dead_code)] - identifier: Identifier, - #[allow(dead_code)] - point: Point, - #[allow(dead_code)] - lsp_definitions: Vec, - retrieved_definitions: Vec, -} - -#[derive(Debug)] -enum RetrievalOutcome { - Match { - /// Lowest index within retrieved_definitions that matches an LSP definition. - best_match: usize, - }, - ProbablyLocal, - NoMatch, - NoMatchDueToExternalLspDefinitions, -} - -#[derive(Debug)] -struct RetrievedDefinition { - path: Arc, - range: Range, - score: f32, - #[allow(dead_code)] - retrieval_score: f32, - #[allow(dead_code)] - components: DeclarationScoreComponents, -} diff --git a/crates/zlog/Cargo.toml b/crates/zlog/Cargo.toml index 4b758437d5e1608aaa68e86f90215f57b928e883..2799592c8ebebbe088c17644bcba0378052e49bc 100644 --- a/crates/zlog/Cargo.toml +++ b/crates/zlog/Cargo.toml @@ -18,7 +18,6 @@ default = [] collections.workspace = true chrono.workspace = true log.workspace = true -workspace-hack.workspace = true anyhow.workspace = true [dev-dependencies] diff --git a/crates/zlog_settings/Cargo.toml b/crates/zlog_settings/Cargo.toml index 8ec63cefe447d944b4556f1e72e481d5461391f1..39c3b6a193481a276aea61b3f11c9959fb7e0e4a 100644 --- a/crates/zlog_settings/Cargo.toml +++ b/crates/zlog_settings/Cargo.toml @@ -19,4 +19,3 @@ gpui.workspace = true collections.workspace = true settings.workspace = true zlog.workspace = true -workspace-hack.workspace = true diff --git a/crates/zlog_settings/src/zlog_settings.rs b/crates/zlog_settings/src/zlog_settings.rs index 1f695aa8ff5f8eb09d4cc0c2ae04282c469fb29c..abbce9a98c3106de0093a8586313fbda9750b12b 100644 --- a/crates/zlog_settings/src/zlog_settings.rs +++ b/crates/zlog_settings/src/zlog_settings.rs @@ -29,6 +29,4 @@ impl Settings for ZlogSettings { scopes: content.log.clone().unwrap(), } } - - fn import_from_vscode(_: &settings::VsCodeSettings, _: &mut settings::SettingsContent) {} } diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index eb542497238780d93974cbc2627ce3466e23049b..0b9fc289c540e43e9bef89b2c561c97a5c1928ef 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -8,7 +8,7 @@ - [Linux](./linux.md) - [Windows](./windows.md) - [Telemetry](./telemetry.md) -- [Workspace Persistence](./workspace-persistence.md) +- [Troubleshooting](./troubleshooting.md) - [Additional Learning Materials](./additional-learning-materials.md) # Configuration @@ -160,4 +160,5 @@ - [Using Debuggers](./development/debuggers.md) - [Glossary](./development/glossary.md) - [Release Process](./development/releases.md) +- [Release Notes](./development/release-notes.md) - [Debugging Crashes](./development/debugging-crashes.md) diff --git a/docs/src/additional-learning-materials.md b/docs/src/additional-learning-materials.md index 66ff935abf1134f4ff703ef83d03eb4772975398..9ff7b3bc5c02e207d0bbf44443d03c0523729833 100644 --- a/docs/src/additional-learning-materials.md +++ b/docs/src/additional-learning-materials.md @@ -1,3 +1,4 @@ # Additional Learning Materials - [Text Manipulation Kung Fu for the Aspiring Black Belt](https://zed.dev/blog/text-manipulation) +- [Hidden Gems: Team Edition Part 1](https://zed.dev/blog/hidden-gems-team-edition-part-1) diff --git a/docs/src/ai/agent-settings.md b/docs/src/ai/agent-settings.md index 17f4a620ee6a8df8e319fdf7377a7e064e0a4de3..e2aba0fe4134d038b9aed3a2dd19a7359618c139 100644 --- a/docs/src/ai/agent-settings.md +++ b/docs/src/ai/agent-settings.md @@ -8,7 +8,7 @@ Learn about all the settings you can customize in Zed's Agent Panel. If you're using [Zed's hosted LLM service](./subscription.md), it sets `claude-sonnet-4` as the default model for agentic work (agent panel, inline assistant) and `gpt-5-nano` as the default "fast" model (thread summarization, git commit messages). If you're not subscribed or want to change these defaults, you can manually edit the `default_model` object in your settings: -```json +```json [settings] { "agent": { "default_model": { @@ -27,7 +27,7 @@ You can assign distinct and specific models for the following AI-powered feature - Inline assistant model: Used for the inline assistant feature - Commit message model: Used for generating Git commit messages -```json +```json [settings] { "agent": { "default_model": { @@ -64,7 +64,7 @@ The models you specify here are always used in _addition_ to your [default model For example, the following configuration will generate two outputs for every assist. One with Claude Sonnet 4 (the default model), and one with GPT-5-mini. -```json +```json [settings] { "agent": { "default_model": { @@ -85,7 +85,7 @@ One with Claude Sonnet 4 (the default model), and one with GPT-5-mini. Specify a custom temperature for a provider and/or model: -```json +```json [settings] "model_parameters": [ // To set parameters for all requests to OpenAI models: { @@ -114,7 +114,7 @@ Note that some of these settings are also surfaced in the Agent Panel's settings Use the `default_view` setting to change the default view of the Agent Panel. You can choose between `thread` (the default) and `text_thread`: -```json +```json [settings] { "agent": { "default_view": "text_thread" @@ -126,7 +126,7 @@ You can choose between `thread` (the default) and `text_thread`: Use the `agent_font_size` setting to change the font size of rendered agent responses in the panel. -```json +```json [settings] { "agent": { "agent_font_size": 18 @@ -141,7 +141,7 @@ Use the `agent_font_size` setting to change the font size of rendered agent resp Control whether to allow the agent to run commands without asking you for permission. The default value is `false`. -```json +```json [settings] { "agent": { "always_allow_tool_actions": true @@ -154,7 +154,7 @@ The default value is `false`. Control whether to display review actions (accept & reject) in single buffers after the agent is done performing edits. The default value is `false`. -```json +```json [settings] { "agent": { "single_file_review": true @@ -169,7 +169,7 @@ When set to false, these controls are only available in the multibuffer review t Control whether to hear a notification sound when the agent is done generating changes or needs your input. The default value is `false`. -```json +```json [settings] { "agent": { "play_sound_when_agent_done": true @@ -182,7 +182,7 @@ The default value is `false`. Use the `message_editor_min_lines` setting to control minimum number of lines of height the agent message editor should have. It is set to `4` by default, and the max number of lines is always double of the minimum. -```json +```json [settings] { "agent": { "message_editor_min_lines": 4 @@ -196,7 +196,7 @@ Make a modifier (`cmd` on macOS, `ctrl` on Linux) required to send messages. This is encouraged for more thoughtful prompt crafting. The default value is `false`. -```json +```json [settings] { "agent": { "use_modifier_to_send": true @@ -209,7 +209,7 @@ The default value is `false`. Use the `expand_edit_card` setting to control whether edit cards show the full diff in the Agent Panel. It is set to `true` by default, but if set to false, the card's height is capped to a certain number of lines, requiring a click to be expanded. -```json +```json [settings] { "agent": { "expand_edit_card": false @@ -222,7 +222,7 @@ It is set to `true` by default, but if set to false, the card's height is capped Use the `expand_terminal_card` setting to control whether terminal cards show the command output in the Agent Panel. It is set to `true` by default, but if set to false, the card will be fully collapsed even while the command is running, requiring a click to be expanded. -```json +```json [settings] { "agent": { "expand_terminal_card": false @@ -235,7 +235,7 @@ It is set to `true` by default, but if set to false, the card will be fully coll Control whether to display the thumbs up/down buttons at the bottom of each agent response, allowing to give Zed feedback about the agent's performance. The default value is `true`. -```json +```json [settings] { "agent": { "enable_feedback": false diff --git a/docs/src/ai/ai-improvement.md b/docs/src/ai/ai-improvement.md index 972b5908c08c6a7549553b0ae237714283c4b937..6d7fe8fdb172afa17f494723bb16b1cc69c9336c 100644 --- a/docs/src/ai/ai-improvement.md +++ b/docs/src/ai/ai-improvement.md @@ -63,7 +63,7 @@ Zed will intentionally exclude certain files from Predictive Edits entirely, eve You can inspect this exclusion list by opening `zed: open default settings` from the command palette: -```json +```json [settings] { "edit_predictions": { // A list of globs representing files that edit predictions should be disabled for. @@ -83,7 +83,7 @@ You can inspect this exclusion list by opening `zed: open default settings` from Users may explicitly exclude additional paths and/or file extensions by adding them to [`edit_predictions.disabled_globs`](https://zed.dev/docs/configuring-zed#edit-predictions) in their Zed settings.json: -```json +```json [settings] { "edit_predictions": { "disabled_globs": ["secret_dir/*", "**/*.log"] diff --git a/docs/src/ai/configuration.md b/docs/src/ai/configuration.md index c11a0fd65c45ce46598596182fbf8fb0c147380a..e2cd9ad0201933a7ba4f1239615cf44ccdb7f3f6 100644 --- a/docs/src/ai/configuration.md +++ b/docs/src/ai/configuration.md @@ -14,7 +14,7 @@ When using AI in Zed, you can configure multiple dimensions: We want to respect users who want to use Zed without interacting with AI whatsoever. To do that, add the following key to your `settings.json`: -```json +```json [settings] { "disable_ai": true } diff --git a/docs/src/ai/edit-prediction.md b/docs/src/ai/edit-prediction.md index 7843b08ff7d552c0c05366e60feb31dbd31a8ae2..3c653284b015f33c9457338c6932289e95c6babd 100644 --- a/docs/src/ai/edit-prediction.md +++ b/docs/src/ai/edit-prediction.md @@ -1,7 +1,11 @@ # Edit Prediction -Edit Prediction is Zed's native mechanism for predicting the code you want to write through AI. -Each keystroke sends a new request to our [open source, open dataset Zeta model](https://huggingface.co/zed-industries/zeta) and it returns with individual or multi-line suggestions that can be quickly accepted by pressing `tab`. +Edit Prediction is Zed's mechanism for predicting the code you want to write through AI. +Each keystroke sends a new request to the edit prediction provider, which returns individual or multi-line suggestions that can be quickly accepted by pressing `tab`. + +The default provider is [Zeta, a proprietary open source and open dataset model](https://huggingface.co/zed-industries/zeta), which [requires being signed into Zed](../accounts.md#what-features-require-signing-in). + +Alternatively, you can use other providers like [GitHub Copilot](#github-copilot) (or [Enterprise](#github-copilot-enterprise)) or [Supermaven](#supermaven). ## Configuring Zeta @@ -21,9 +25,9 @@ Zed's Edit Prediction comes with two different display modes: Toggle between them via the `mode` key: -```json +```json [settings] "edit_predictions": { - "mode": "eager" | "subtle" + "mode": "eager" // or "subtle" }, ``` @@ -50,7 +54,7 @@ See the [Configuring GitHub Copilot](#github-copilot) and [Configuring Supermave By default, `tab` is used to accept edit predictions. You can use another keybinding by inserting this in your keymap: -```json +```json [settings] { "context": "Editor && edit_prediction", "bindings": { @@ -62,7 +66,7 @@ By default, `tab` is used to accept edit predictions. You can use another keybin When there's a [conflict with the `tab` key](#edit-predictions-conflict), Zed uses a different context to accept keybindings (`edit_prediction_conflict`). If you want to use a different one, you can insert this in your keymap: -```json +```json [settings] { "context": "Editor && edit_prediction_conflict", "bindings": { @@ -75,7 +79,7 @@ If your keybinding contains a modifier (`ctrl` in the example above), it will al You can also bind this action to keybind without a modifier. In that case, Zed will use the default modifier (`alt`) to preview the edit prediction. -```json +```json [settings] { "context": "Editor && edit_prediction_conflict", "bindings": { @@ -88,7 +92,7 @@ You can also bind this action to keybind without a modifier. In that case, Zed w To maintain the use of the modifier key for accepting predictions when there is a language server completions menu, but allow `tab` to accept predictions regardless of cursor position, you can specify the context further with `showing_completions`: -```json +```json [settings] { "context": "Editor && edit_prediction_conflict && !showing_completions", "bindings": { @@ -102,7 +106,7 @@ To maintain the use of the modifier key for accepting predictions when there is The keybinding example below causes `alt-tab` to always be used instead of sometimes using `tab`. You might want this in order to have just one keybinding to use for accepting edit predictions, since the behavior of `tab` varies based on context. -```json +```json [keymap] { "context": "Editor && edit_prediction", "bindings": { @@ -126,7 +130,7 @@ The keybinding example below causes `alt-tab` to always be used instead of somet If `"vim_mode": true` is set within `settings.json`, then additional bindings are needed after the above to return `tab` to its original behavior: -```json +```json [keymap] { "context": "(VimControl && !menu) || vim_mode == replace || vim_mode == waiting", "bindings": { @@ -145,7 +149,7 @@ If `"vim_mode": true` is set within `settings.json`, then additional bindings ar While `tab` and `alt-tab` are supported on Linux, `alt-l` is displayed instead. If your window manager does not reserve `alt-tab`, and you would prefer to use `tab` and `alt-tab`, include these bindings in `keymap.json`: -```json +```json [keymap] { "context": "Editor && edit_prediction", "bindings": { @@ -170,7 +174,7 @@ Zed requires at least one keybinding for the {#action editor::AcceptEditPredicti If you have previously bound the default keybindings to different actions in the global context, you will not be able to preview or accept edit predictions. For example: -```json +```json [keymap] [ // Your keymap { @@ -184,7 +188,7 @@ If you have previously bound the default keybindings to different actions in the To fix this, you can specify your own keybinding for accepting edit predictions: -```json +```json [keymap] [ // ... { @@ -208,7 +212,7 @@ Alternatively, if you have Zed set as your provider, consider [using Subtle Mode To not have predictions appear automatically as you type, set this within `settings.json`: -```json +```json [settings] { "show_edit_predictions": false } @@ -221,7 +225,7 @@ Still, you can trigger edit predictions manually by executing {#action editor::S To not have predictions appear automatically as you type when working with a specific language, set this within `settings.json`: -```json +```json [settings] { "language": { "python": { @@ -235,7 +239,7 @@ To not have predictions appear automatically as you type when working with a spe To disable edit predictions for specific directories or files, set this within `settings.json`: -```json +```json [settings] { "edit_predictions": { "disabled_globs": ["~/.config/zed/settings.json"] @@ -247,7 +251,7 @@ To disable edit predictions for specific directories or files, set this within ` To completely turn off edit prediction across all providers, explicitly set the settings to `none`, like so: -```json +```json [settings] "features": { "edit_prediction_provider": "none" }, @@ -257,7 +261,7 @@ To completely turn off edit prediction across all providers, explicitly set the To use GitHub Copilot as your provider, set this within `settings.json`: -```json +```json [settings] { "features": { "edit_prediction_provider": "copilot" @@ -271,7 +275,7 @@ You should be able to sign-in to GitHub Copilot by clicking on the Copilot icon If your organization uses GitHub Copilot Enterprise, you can configure Zed to use your enterprise instance by specifying the enterprise URI in your `settings.json`: -```json +```json [settings] { "edit_predictions": { "copilot": { @@ -294,7 +298,7 @@ Copilot can provide multiple completion alternatives, and these can be navigated To use Supermaven as your provider, set this within `settings.json`: -```json +```json [settings] { "features": { "edit_prediction_provider": "supermaven" diff --git a/docs/src/ai/external-agents.md b/docs/src/ai/external-agents.md index abe14865902ee261b157cf653b0d556cf83d7c71..054ffb08f7605cd0f690dfbddc9f83e42a1f91a1 100644 --- a/docs/src/ai/external-agents.md +++ b/docs/src/ai/external-agents.md @@ -3,9 +3,10 @@ Zed supports terminal-based agents through the [Agent Client Protocol (ACP)](https://agentclientprotocol.com). Currently, [Gemini CLI](https://github.com/google-gemini/gemini-cli) serves as the reference implementation. -[Claude Code](https://www.anthropic.com/claude-code) is also included by default, and you can [add custom ACP-compatible agents](#add-custom-agents) as well. +[Claude Code](https://www.anthropic.com/claude-code) and [Codex](https://developers.openai.com/codex) are also included by default, and you can [add custom ACP-compatible agents](#add-custom-agents) as well. -Zed's affordance for external agents is strictly UI-based; the billing and legal/terms arrangement is directly between you and the agent provider. Zed does not charge for use of external agents, and our [zero-data retention agreements/privacy guarantees](./ai-improvement.md) are **_only_** applicable for Zed's hosted models. +> Note that Zed's affordance for external agents is strictly UI-based; the billing and legal/terms arrangement is directly between you and the agent provider. +> Zed does not charge for use of external agents, and our [zero-data retention agreements/privacy guarantees](./ai-improvement.md) are **_only_** applicable for Zed's hosted models. ## Gemini CLI {#gemini-cli} @@ -20,7 +21,7 @@ As of [Zed Stable v0.201.5](https://zed.dev/releases/stable/0.201.5) you should If you'd like to bind this to a keyboard shortcut, you can do so by editing your `keymap.json` file via the `zed: open keymap` command to include: -```json +```json [keymap] [ { "bindings": { @@ -36,7 +37,7 @@ The first time you create a Gemini CLI thread, Zed will install [@google/gemini- By default, Zed will use this managed version of Gemini CLI even if you have it installed globally. However, you can configure it to use a version in your `PATH` by adding this to your settings: -```json +```json [settings] { "agent_servers": { "gemini": { @@ -63,7 +64,7 @@ For more information, see the [Gemini CLI docs](https://github.com/google-gemini Similar to Zed's first-party agent, you can use Gemini CLI to do anything that you need. And to give it context, you can @-mention files, recent threads, symbols, or fetch the web. -> Note that some first-party agent features don't yet work with Gemini CLI: editing past messages, resuming threads from history, checkpointing, and using the agent in SSH projects. +> Note that some first-party agent features don't yet work with Gemini CLI: editing past messages, resuming threads from history, and checkpointing. > We hope to add these features in the near future. ## Claude Code @@ -77,7 +78,7 @@ Open the agent panel with {#kb agent::ToggleFocus}, and then use the `+` button If you'd like to bind this to a keyboard shortcut, you can do so by editing your `keymap.json` file via the `zed: open keymap` command to include: -```json +```json [keymap] [ { "bindings": { @@ -97,7 +98,21 @@ To ensure you're using your billing method of choice, [open a new Claude Code th The first time you create a Claude Code thread, Zed will install [@zed-industries/claude-code-acp](https://github.com/zed-industries/claude-code-acp). This installation is only available to Zed and is kept up to date as you use the agent. -Zed will always use this managed version of Claude Code even if you have it installed globally. +Zed will always use this managed version of the Claude Code adapter, which includes a vendored version of the Claude Code CLI, even if you have it installed globally. + +If you want to override the executable used by the adapter, you can set the `CLAUDE_CODE_EXECUTABLE` environment variable in your settings to the path of your preferred executable. + +```json +{ + "agent_servers": { + "claude": { + "env": { + "CLAUDE_CODE_EXECUTABLE": "/path/to/alternate-claude-code-executable" + } + } + } +} +``` ### Usage @@ -111,7 +126,7 @@ However, the SDK doesn't yet expose everything needed to fully support all of th - [Subagents](https://docs.anthropic.com/en/docs/claude-code/sub-agents) are supported. - [Hooks](https://docs.anthropic.com/en/docs/claude-code/hooks-guide) are currently _not_ supported. -> Also note that some [first-party agent](./agent-panel.md) features don't yet work with Claude Code: editing past messages, resuming threads from history, checkpointing, and using the agent in SSH projects. +> Also note that some [first-party agent](./agent-panel.md) features don't yet work with Claude Code: editing past messages, resuming threads from history, and checkpointing. > We hope to add these features in the near future. #### CLAUDE.md @@ -120,11 +135,58 @@ Claude Code in Zed will automatically use any `CLAUDE.md` file found in your pro If you don't have a `CLAUDE.md` file, you can ask Claude Code to create one for you through the `init` slash command. +## Codex CLI + +You can also run [Codex CLI](https://github.com/openai/codex) directly via Zed's [agent panel](./agent-panel.md). +Under the hood, Zed runs Codex CLI and communicates to it over ACP, through [a dedicated adapter](https://github.com/zed-industries/codex-acp). + +### Getting Started + +As of Zed Stable v0.208 you should be able to use Codex directly from Zed. Open the agent panel with {#kb agent::ToggleFocus}, and then use the `+` button in the top right to start a new Codex thread. + +If you'd like to bind this to a keyboard shortcut, you can do so by editing your `keymap.json` file via the `zed: open keymap` command to include: + +```json +[ + { + "bindings": { + "cmd-alt-c": ["agent::NewExternalAgentThread", { "agent": "codex" }] + } + } +] +``` + +### Authentication + +Authentication to Zed's Codex installation is decoupled entirely from Zed's agent. That is to say, an OpenAI API key added via the [Zed Agent's settings](./llm-providers.md#openai) will _not_ be utilized by Codex for authentication and billing. + +To ensure you're using your billing method of choice, [open a new Codex thread](./agent-panel.md#new-thread). The first time you will be prompted to authenticate with one of three methods: + +1. Login with ChatGPT - allows you to use your existing, paid ChatGPT subscription. _Note: This method isn't currently supported in remote projects_ +2. `CODEX_API_KEY` - uses an API key you have set in your environment under the variable `CODEX_API_KEY`. +3. `OPENAI_API_KEY` - uses an API key you have set in your environment under the variable `OPENAI_API_KEY`. + +If you are already logged in and want to change your authentication method, type `/logout` in the thread and authenticate again. + +#### Installation + +The first time you create a Codex thread, Zed will install [codex-acp](https://github.com/zed-industries/codex-acp). This installation is only available to Zed and is kept up to date as you use the agent. + +Zed will always use this managed version of Codex even if you have it installed globally. + +### Usage + +Similar to Zed's first-party agent, you can use Codex to do anything that you need. +And to give it context, you can @-mention files, symbols, or fetch the web. + +> Note that some first-party agent features don't yet work with Codex: editing past messages, resuming threads from history, and checkpointing. +> We hope to add these features in the near future. + ## Add Custom Agents {#add-custom-agents} You can run any agent speaking ACP in Zed by changing your settings as follows: -```json +```json [settings] { "agent_servers": { "Custom Agent": { @@ -145,3 +207,10 @@ You can also specify a custom path, arguments, or environment for the builtin in When using external agents in Zed, you can access the debug view via with `dev: open acp logs` from the Command Palette. This lets you see the messages being sent and received between Zed and the agent. ![The debug view for ACP logs.](https://zed.dev/img/acp/acp-logs.webp) + +## MCP Servers + +Note that for external agents, access to MCP servers [installed from Zed](./mcp.md) may vary depending on the ACP agent implementation. + +Regarding the built-in ones, Claude Code and Codex both support it, and Gemini CLI does not yet. +In the meantime, learn how to add MCP server support to Gemini CLI through [their documentation](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#using-mcp-servers). diff --git a/docs/src/ai/inline-assistant.md b/docs/src/ai/inline-assistant.md index 41923e85da09c2eed067d40518c89088d653b7b7..d3caff6f45903c549073b97105a3310236d64478 100644 --- a/docs/src/ai/inline-assistant.md +++ b/docs/src/ai/inline-assistant.md @@ -18,7 +18,7 @@ A useful pattern here is to create a thread in the Agent Panel, and then mention To create a custom keybinding that prefills a prompt, you can add the following format in your keymap: -```json +```json [keymap] [ { "context": "Editor && mode == full", diff --git a/docs/src/ai/llm-providers.md b/docs/src/ai/llm-providers.md index aeed1be17370c28ad67d8ffb7d49fadc5a77cdce..d78078f83e950d63449e4932a97c288f9fbee6a8 100644 --- a/docs/src/ai/llm-providers.md +++ b/docs/src/ai/llm-providers.md @@ -43,7 +43,7 @@ Ensure your credentials have the following permissions set up: Your IAM policy should look similar to: -```json +```json [settings] { "Version": "2012-10-17", "Statement": [ @@ -65,7 +65,7 @@ With that done, choose one of the two authentication methods: 1. Ensure you have the AWS CLI installed and configured with a named profile 2. Open your `settings.json` (`zed: open settings`) and include the `bedrock` key under `language_models` with the following settings: - ```json + ```json [settings] { "language_models": { "bedrock": { @@ -120,7 +120,7 @@ Zed will also use the `ANTHROPIC_API_KEY` environment variable if it's defined. You can add custom models to the Anthropic provider by adding the following to your Zed `settings.json`: -```json +```json [settings] { "language_models": { "anthropic": { @@ -147,14 +147,14 @@ Custom models will be listed in the model dropdown in the Agent Panel. You can configure a model to use [extended thinking](https://docs.anthropic.com/en/docs/about-claude/models/extended-thinking-models) (if it supports it) by changing the mode in your model's configuration to `thinking`, for example: -```json +```json [settings] { "name": "claude-sonnet-4-latest", "display_name": "claude-sonnet-4-thinking", "max_tokens": 200000, "mode": { "type": "thinking", - "budget_tokens": 4_096 + "budget_tokens": 4096 } } ``` @@ -174,7 +174,7 @@ Zed will also use the `DEEPSEEK_API_KEY` environment variable if it's defined. The Zed agent comes pre-configured to use the latest version for common models (DeepSeek Chat, DeepSeek Reasoner). If you wish to use alternate models or customize the API endpoint, you can do so by adding the following to your Zed `settings.json`: -```json +```json [settings] { "language_models": { "deepseek": { @@ -231,7 +231,7 @@ By default, Zed will use `stable` versions of models, but you can use specific v Here is an example of a custom Google AI model you could add to your Zed `settings.json`: -```json +```json [settings] { "language_models": { "google": { @@ -286,7 +286,7 @@ The Zed agent comes pre-configured with several Mistral models (codestral-latest All the default models support tool use. If you wish to use alternate models or customize their parameters, you can do so by adding the following to your Zed `settings.json`: -```json +```json [settings] { "language_models": { "mistral": { @@ -338,7 +338,7 @@ See [get_max_tokens in ollama.rs](https://github.com/zed-industries/zed/blob/mai Depending on your hardware or use-case you may wish to limit or increase the context length for a specific model via settings.json: -```json +```json [settings] { "language_models": { "ollama": { @@ -406,7 +406,7 @@ Zed will also use the `OPENAI_API_KEY` environment variable if it's defined. The Zed agent comes pre-configured to use the latest version for common models (GPT-5, GPT-5 mini, o4-mini, GPT-4.1, and others). To use alternate models, perhaps a preview release, or if you wish to control the request parameters, you can do so by adding the following to your Zed `settings.json`: -```json +```json [settings] { "language_models": { "openai": { @@ -446,7 +446,7 @@ Then, fill up the input fields available in the modal. To do it via your `settings.json`, add the following snippet under `language_models`: -```json +```json [settings] { "language_models": { "openai_compatible": { @@ -499,7 +499,7 @@ Zed will also use the `OPENROUTER_API_KEY` environment variable if it's defined. You can add custom models to the OpenRouter provider by adding the following to your Zed `settings.json`: -```json +```json [settings] { "language_models": { "open_router": { @@ -555,7 +555,7 @@ Supported fields (all optional): Example adding routing preferences to a model: -```json +```json [settings] { "language_models": { "open_router": { @@ -613,7 +613,7 @@ The xAI API key will be saved in your keychain. Zed will also use the `XAI_API_K The Zed agent comes pre-configured with common Grok models. If you wish to use alternate models or customize their parameters, you can do so by adding the following to your Zed `settings.json`: -```json +```json [settings] { "language_models": { "x_ai": { @@ -643,7 +643,7 @@ The Zed agent comes pre-configured with common Grok models. If you wish to use a You can use a custom API endpoint for different providers, as long as it's compatible with the provider's API structure. To do so, add the following to your `settings.json`: -```json +```json [settings] { "language_models": { "some-provider": { diff --git a/docs/src/ai/mcp.md b/docs/src/ai/mcp.md index 9f79bbb9ca66a5babf0a1990a513fa7016466fbd..8fa36675ec46ed6ae1830dd32196815c34ab587f 100644 --- a/docs/src/ai/mcp.md +++ b/docs/src/ai/mcp.md @@ -11,7 +11,7 @@ Check out the [Anthropic news post](https://www.anthropic.com/news/model-context ### As Extensions One of the ways you can use MCP servers in Zed is by exposing them as an extension. -To learn how to create your own, check out the [MCP Server Extensions](../extensions/mcp-extensions.md) page for more details. +Check out the [MCP Server Extensions](../extensions/mcp-extensions.md) page to learn how to create your own. Thanks to our awesome community, many MCP servers have already been added as extensions. You can check which ones are available via any of these routes: @@ -20,7 +20,7 @@ You can check which ones are available via any of these routes: 2. in the app, open the Command Palette and run the `zed: extensions` action 3. in the app, go to the Agent Panel's top-right menu and look for the "View Server Extensions" menu item -In any case, here are some of the ones available: +In any case, here are some popular available servers: - [Context7](https://zed.dev/extensions/context7-mcp-server) - [GitHub](https://zed.dev/extensions/github-mcp-server) @@ -37,7 +37,7 @@ In any case, here are some of the ones available: Creating an extension is not the only way to use MCP servers in Zed. You can connect them by adding their commands directly to your `settings.json`, like so: -```json +```json [settings] { "context_servers": { "your-mcp-server": { @@ -57,9 +57,9 @@ From there, you can add it through the modal that appears when you click the "Ad ### Configuration Check -Regardless of how you've installed MCP servers, whether as an extension or adding them directly, most servers out there still require some sort of configuration as part of the set up process. +Regardless of how you've installed MCP servers, whether as an extension or adding them directly, most servers out there still require some sort of configuration as part of the setup process. -In the case of server extensions, after installing it, Zed will pop up a modal displaying what is required for you to properly set it up. +In the case of extensions, after installing it, Zed will pop up a modal displaying what is required for you to properly set it up. For example, the GitHub MCP extension requires you to add a [Personal Access Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens). In the case of custom servers, make sure you check the provider documentation to determine what type of command, arguments, and environment variables need to be added to the JSON. @@ -68,18 +68,18 @@ To check if your MCP server is properly configured, go to the Agent Panel's sett If they're running correctly, the indicator will be green and its tooltip will say "Server is active". If not, other colors and tooltip messages will indicate what is happening. -### Using it in the Agent Panel +### Agent Panel Usage Once installation is complete, you can return to the Agent Panel and start prompting. Some models are better than others when it comes to picking up tools from MCP servers. Mentioning your server by name always helps the model to pick it up. -However, if you want to ensure a given MCP server will be used, you can create [a custom profile](./agent-panel.md#custom-profiles) where all built-in tools (or the ones that could cause conflicts with the server's tools) are turned off and only the tools coming from the MCP server are turned on. +However, if you want to _ensure_ a given MCP server will be used, you can create [a custom profile](./agent-panel.md#custom-profiles) where all built-in tools (or the ones that could cause conflicts with the server's tools) are turned off and only the tools coming from the MCP server are turned on. As an example, [the Dagger team suggests](https://container-use.com/agent-integrations#zed) doing that with their [Container Use MCP server](https://zed.dev/extensions/mcp-server-container-use): -```json +```json [settings] "agent": { "profiles": { "container-use": { @@ -127,3 +127,10 @@ As an example, [the Dagger team suggests](https://container-use.com/agent-integr Zed's Agent Panel includes the `agent.always_allow_tool_actions` setting that, if set to `false`, will require you to give permission for any editing attempt as well as tool calls coming from MCP servers. You can change this by setting this key to `true` in either your `settings.json` or through the Agent Panel's settings view. + +### External Agents + +Note that for [external agents](./external-agents.md) connected through the [Agent Client Protocol](https://agentclientprotocol.com/), access to MCP servers installed from Zed may vary depending on the ACP agent implementation. + +Regarding the built-in ones, Claude Code and Codex both support it, and Gemini CLI does not yet. +In the meantime, learn how to add MCP server support to Gemini CLI through [their documentation](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#using-mcp-servers). diff --git a/docs/src/ai/models.md b/docs/src/ai/models.md index 18ae7e4d00ef8605b292a6b40943f786e30c3c56..5b379fc75435c14ac46587f7449c7a5c54becfcf 100644 --- a/docs/src/ai/models.md +++ b/docs/src/ai/models.md @@ -21,6 +21,10 @@ We’re working hard to expand the models supported by Zed’s subscription offe | | Anthropic | Output | $15.00 | $16.50 | | | Anthropic | Input - Cache Write | $3.75 | $4.125 | | | Anthropic | Input - Cache Read | $0.30 | $0.33 | +| Claude Haiku 4.5 | Anthropic | Input | $1.00 | $1.10 | +| | Anthropic | Output | $5.00 | $5.50 | +| | Anthropic | Input - Cache Write | $1.25 | $1.375 | +| | Anthropic | Input - Cache Read | $0.10 | $0.11 | | GPT-5 | OpenAI | Input | $1.25 | $1.375 | | | OpenAI | Output | $10.00 | $11.00 | | | OpenAI | Cached Input | $0.125 | $0.1375 | @@ -40,9 +44,6 @@ We’re working hard to expand the models supported by Zed’s subscription offe | Grok 4 Fast | X.ai | Input | $0.20 | $0.22 | | | X.ai | Output | $0.50 | $0.55 | | | X.ai | Cached Input | $0.05 | $0.055 | -| Grok 4 Fast | X.ai | Input | $0.20 | $0.22 | -| | X.ai | Output | $0.50 | $0.55 | -| | X.ai | Cached Input | $0.05 | $0.055 | | Grok 4 (Non-Reasoning) | X.ai | Input | $0.20 | $0.22 | | | X.ai | Output | $0.50 | $0.55 | | | X.ai | Cached Input | $0.05 | $0.055 | @@ -65,6 +66,7 @@ A context window is the maximum span of text and code an LLM can consider at onc | Claude Opus 4.1 | Anthropic | 200k | | Claude Sonnet 4 | Anthropic | 200k | | Claude Sonnet 3.7 | Anthropic | 200k | +| Claude Haiku 4.5 | Anthropic | 200k | | GPT-5 | OpenAI | 400k | | GPT-5 mini | OpenAI | 400k | | GPT-5 nano | OpenAI | 400k | diff --git a/docs/src/ai/overview.md b/docs/src/ai/overview.md index ca06a4b1ed53d1fc87136a1d5e82da35552082aa..e1a9cb77a996e5e2d42de7f825cd100535b8dcc6 100644 --- a/docs/src/ai/overview.md +++ b/docs/src/ai/overview.md @@ -18,11 +18,11 @@ Learn how to get started using AI with Zed and all its capabilities. - [Rules](./rules.md): How to define rules for AI interactions. -- [Tools](./tools.md): Explore the tools that enable agentic capabilities. +- [Tools](./tools.md): Explore the tools that power Zed's built-in agent. -- [Model Context Protocol](./mcp.md): Learn about how to install and configure MCP servers. +- [Model Context Protocol](./mcp.md): Learn about how to configure and use MCP servers. -- [Inline Assistant](./inline-assistant.md): Discover how to use the agent to power inline transformations directly within a file or terminal. +- [Inline Assistant](./inline-assistant.md): Discover how to use AI to generate inline transformations directly within a file or terminal. ## Edit Prediction @@ -30,4 +30,4 @@ Learn how to get started using AI with Zed and all its capabilities. ## Text Threads -- [Text Threads](./text-threads.md): Learn about an alternative, text-based interface for interacting with language models. +- [Text Threads](./text-threads.md): Learn about an editor-based interface for interacting with language models. diff --git a/docs/src/ai/text-threads.md b/docs/src/ai/text-threads.md index ed439252b4d1612ea1b20269c6286e2b94685ac2..4e7e7904cf53e1e7e141b29c777a6f53796177cf 100644 --- a/docs/src/ai/text-threads.md +++ b/docs/src/ai/text-threads.md @@ -16,7 +16,7 @@ To begin, type a message in a `You` block. As you type, the remaining tokens count for the selected model is updated. -Inserting text from an editor is as simple as highlighting the text and running `agent: quote selection` ({#kb agent::QuoteSelection}); Zed will wrap it in a fenced code block if it is code. +Inserting text from an editor is as simple as highlighting the text and running `agent: add selection to thread` ({#kb agent::AddSelectionToThread}); Zed will wrap it in a fenced code block if it is code. ![Quoting a selection](https://zed.dev/img/assistant/quoting-a-selection.png) @@ -148,7 +148,7 @@ Usage: `/terminal []` The `/selection` command inserts the selected text in the editor into the context. This is useful for referencing specific parts of your code. -This is equivalent to the `agent: quote selection` command ({#kb agent::QuoteSelection}). +This is equivalent to the `agent: add selection to thread` command ({#kb agent::AddSelectionToThread}). Usage: `/selection` diff --git a/docs/src/completions.md b/docs/src/completions.md index d14cf61d829595ac622a1f007cbb44c8135c4cbd..ff96ede7503cd461bbd3d7b4afdedcaa2f36a2e5 100644 --- a/docs/src/completions.md +++ b/docs/src/completions.md @@ -9,7 +9,7 @@ Zed supports two sources for completions: When there is an appropriate language server available, Zed will provide completions of variable names, functions, and other symbols in the current file. You can disable these by adding the following to your Zed `settings.json` file: -```json +```json [settings] "show_completions_on_input": false ``` diff --git a/docs/src/configuring-languages.md b/docs/src/configuring-languages.md index ef5adf97a0618f0b3dbb409b029003c0b74cb125..e04d63f5d16a83c84b933d9f59db901c276b7a6d 100644 --- a/docs/src/configuring-languages.md +++ b/docs/src/configuring-languages.md @@ -28,7 +28,7 @@ Zed allows you to override global settings for individual languages. These custo Here's an example of language-specific settings: -```json +```json [settings] "languages": { "Python": { "tab_size": 4, @@ -67,7 +67,7 @@ Zed automatically detects file types based on their extensions, but you can cust To set up custom file associations, use the [`file_types`](./configuring-zed.md#file-types) setting in your `settings.json`: -```json +```json [settings] "file_types": { "C++": ["c"], "TOML": ["MyLockFile"], @@ -119,7 +119,7 @@ Some languages in Zed offer multiple language server options. You might have mul You can specify your preference using the `language_servers` setting: -```json +```json [settings] "languages": { "PHP": { "language_servers": ["intelephense", "!phpactor", "..."] @@ -145,7 +145,7 @@ Not all languages in Zed support toolchain discovery and selection, but for thos Many language servers accept custom configuration options. You can set these in the `lsp` section of your `settings.json`: -```json +```json [settings] "lsp": { "rust-analyzer": { "initialization_options": { @@ -170,7 +170,7 @@ Suppose you want to configure the following settings for TypeScript: Here's how you would structure these settings in Zed's `settings.json`: -```json +```json [settings] "lsp": { "typescript-language-server": { "initialization_options": { @@ -198,7 +198,7 @@ Sent once during language server startup, requires server's restart to reapply c For example, rust-analyzer and clangd rely on this way of configuring only. -```json +```json [settings] "lsp": { "rust-analyzer": { "initialization_options": { @@ -213,7 +213,7 @@ For example, rust-analyzer and clangd rely on this way of configuring only. May be queried by the server multiple times. Most of the servers would rely on this way of configuring only. -```json +```json [settings] "lsp": { "tailwindcss-language-server": { "settings": { @@ -229,7 +229,7 @@ Apart of the LSP-related server configuration options, certain servers in Zed al Language servers are automatically downloaded or launched if found in your path, if you wish to specify an explicit alternate binary you can specify that in settings: -```json +```json [settings] "lsp": { "rust-analyzer": { "binary": { @@ -249,7 +249,7 @@ Language servers are automatically downloaded or launched if found in your path, You can toggle language server support globally or per-language: -```json +```json [settings] "languages": { "Markdown": { "enable_language_server": false @@ -267,7 +267,7 @@ Zed provides support for code formatting and linting to maintain consistent code Zed supports both built-in and external formatters. See [`formatter`](./configuring-zed.md#formatter) docs for more. You can configure formatters globally or per-language in your `settings.json`: -```json +```json [settings] "languages": { "JavaScript": { "formatter": { @@ -289,7 +289,7 @@ This example uses Prettier for JavaScript and the language server's formatter fo To disable formatting for a specific language: -```json +```json [settings] "languages": { "Markdown": { "format_on_save": "off" @@ -301,7 +301,7 @@ To disable formatting for a specific language: Linting in Zed is typically handled by language servers. Many language servers allow you to configure linting rules: -```json +```json [settings] "lsp": { "eslint": { "settings": { @@ -317,11 +317,11 @@ This configuration sets up ESLint to organize imports on save for JavaScript fil To run linter fixes automatically on save: -```json +```json [settings] "languages": { "JavaScript": { - "code_actions_on_format": { - "source.fixAll.eslint": true + "formatter": { + "code_action": "source.fixAll.eslint" } } } @@ -331,18 +331,20 @@ To run linter fixes automatically on save: Zed allows you to run both formatting and linting on save. Here's an example that uses Prettier for formatting and ESLint for linting JavaScript files: -```json +```json [settings] "languages": { "JavaScript": { - "formatter": { - "external": { - "command": "prettier", - "arguments": ["--stdin-filepath", "{buffer_path}"] + "formatter": [ + { + "code_action": "source.fixAll.eslint" + }, + { + "external": { + "command": "prettier", + "arguments": ["--stdin-filepath", "{buffer_path}"] + } } - }, - "code_actions_on_format": { - "source.fixAll.eslint": true - }, + ], "format_on_save": "on" } } @@ -362,18 +364,20 @@ Zed offers customization options for syntax highlighting and themes, allowing yo ### Customizing Syntax Highlighting -Zed uses Tree-sitter grammars for syntax highlighting. Override the default highlighting using the `experimental.theme_overrides` setting. +Zed uses Tree-sitter grammars for syntax highlighting. Override the default highlighting using the `theme_overrides` setting. This example makes comments italic and changes the color of strings: -```json -"experimental.theme_overrides": { - "syntax": { - "comment": { - "font_style": "italic" - }, - "string": { - "color": "#00AA00" +```json [settings] +"theme_overrides": { + "One Dark": { + "syntax": { + "comment": { + "font_style": "italic" + }, + "string": { + "color": "#00AA00" + } } } } @@ -386,7 +390,7 @@ Change your theme: 1. Use the theme selector ({#kb theme_selector::Toggle}) 2. Or set it in your `settings.json`: -```json +```json [settings] "theme": { "mode": "dark", "dark": "One Dark", @@ -408,7 +412,7 @@ To create your own theme extension, refer to the [Developing Theme Extensions](. Inlay hints provide additional information inline in your code, such as parameter names or inferred types. Configure inlay hints in your `settings.json`: -```json +```json [settings] "inlay_hints": { "enabled": true, "show_type_hints": true, diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 6acb8ca7178868c235b76a740ec79fb349fbdea1..efc4538c0e5286a053a89916c90548796ba619d0 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -35,7 +35,7 @@ Extensions that provide language servers may also provide default settings for t - Setting: `active_pane_modifiers` - Default: -```json +```json [settings] { "active_pane_modifiers": { "border_size": 0.0, @@ -74,7 +74,7 @@ Non-negative `float` values 1. Contain the bottom dock, giving the full height of the window to the left and right docks. -```json +```json [settings] { "bottom_dock_layout": "contained" } @@ -82,7 +82,7 @@ Non-negative `float` values 2. Give the bottom dock the full width of the window, truncating the left and right docks. -```json +```json [settings] { "bottom_dock_layout": "full" } @@ -90,7 +90,7 @@ Non-negative `float` values 3. Left align the bottom dock, truncating the left dock and giving the right dock the full height of the window. -```json +```json [settings] { "bottom_dock_layout": "left_aligned" } @@ -98,7 +98,7 @@ Non-negative `float` values 4. Right align the bottom dock, giving the left dock the full height of the window and truncating the right dock. -```json +```json [settings] { "bottom_dock_layout": "right_aligned" } @@ -124,25 +124,25 @@ Non-negative `float` values 1. Allow rewrap in comments only: -```json +```json [settings] { "allow_rewrap": "in_comments" } ``` -2. Allow rewrap everywhere: +2. Allow rewrap in selections only: -```json +```json [settings] { - "allow_rewrap": "everywhere" + "allow_rewrap": "in_selections" } ``` -3. Never allow rewrap: +3. Allow rewrap anywhere: -```json +```json [settings] { - "allow_rewrap": "never" + "allow_rewrap": "anywhere" } ``` @@ -192,7 +192,7 @@ ls ~/.local/share/zed/extensions/installed Define extensions which should be installed (`true`) or never installed (`false`). -```json +```json [settings] { "auto_install_extensions": { "html": true, @@ -212,7 +212,7 @@ Define extensions which should be installed (`true`) or never installed (`false` 1. To disable autosave, set it to `off`: -```json +```json [settings] { "autosave": "off" } @@ -220,7 +220,7 @@ Define extensions which should be installed (`true`) or never installed (`false` 2. To autosave when focus changes, use `on_focus_change`: -```json +```json [settings] { "autosave": "on_focus_change" } @@ -228,7 +228,7 @@ Define extensions which should be installed (`true`) or never installed (`false` 3. To autosave when the active window changes, use `on_window_change`: -```json +```json [settings] { "autosave": "on_window_change" } @@ -236,7 +236,7 @@ Define extensions which should be installed (`true`) or never installed (`false` 4. To autosave after an inactivity period, use `after_delay`: -```json +```json [settings] { "autosave": { "after_delay": { @@ -298,7 +298,7 @@ Note that a save will be triggered when an unsaved tab is closed, even if this i 1. VS Code -```json +```json [settings] { "base_keymap": "VSCode" } @@ -306,7 +306,7 @@ Note that a save will be triggered when an unsaved tab is closed, even if this i 2. Atom -```json +```json [settings] { "base_keymap": "Atom" } @@ -314,7 +314,7 @@ Note that a save will be triggered when an unsaved tab is closed, even if this i 3. JetBrains -```json +```json [settings] { "base_keymap": "JetBrains" } @@ -322,7 +322,7 @@ Note that a save will be triggered when an unsaved tab is closed, even if this i 4. None -```json +```json [settings] { "base_keymap": "None" } @@ -330,7 +330,7 @@ Note that a save will be triggered when an unsaved tab is closed, even if this i 5. Sublime Text -```json +```json [settings] { "base_keymap": "SublimeText" } @@ -338,7 +338,7 @@ Note that a save will be triggered when an unsaved tab is closed, even if this i 6. TextMate -```json +```json [settings] { "base_keymap": "TextMate" } @@ -367,7 +367,7 @@ Zed supports all OpenType features that can be enabled or disabled for a given b For example, to disable font ligatures, add the following to your settings: -```json +```json [settings] { "buffer_font_features": { "calt": false @@ -377,7 +377,7 @@ For example, to disable font ligatures, add the following to your settings: You can also set other OpenType features, like setting `cv01` to `7`: -```json +```json [settings] { "buffer_font_features": { "cv01": 7 @@ -396,7 +396,7 @@ You can also set other OpenType features, like setting `cv01` to `7`: For example, to use `Nerd Font` as a fallback, add the following to your settings: -```json +```json [settings] { "buffer_font_fallbacks": ["Nerd Font"] } @@ -438,7 +438,7 @@ A font size from `6` to `100` pixels (inclusive) - Setting: `centered_layout` - Default: -```json +```json [settings] "centered_layout": { "left_padding": 0.2, "right_padding": 0.2, @@ -484,15 +484,15 @@ Note: Dirty files (files with unsaved changes) will not be automatically closed 1. Allow all diagnostics (default): -```json +```json [settings] { - "diagnostics_max_severity": null + "diagnostics_max_severity": "all" } ``` 2. Show only errors: -```json +```json [settings] { "diagnostics_max_severity": "error" } @@ -500,7 +500,7 @@ Note: Dirty files (files with unsaved changes) will not be automatically closed 3. Show errors and warnings: -```json +```json [settings] { "diagnostics_max_severity": "warning" } @@ -508,15 +508,15 @@ Note: Dirty files (files with unsaved changes) will not be automatically closed 4. Show errors, warnings, and information: -```json +```json [settings] { - "diagnostics_max_severity": "information" + "diagnostics_max_severity": "info" } ``` 5. Show all including hints: -```json +```json [settings] { "diagnostics_max_severity": "hint" } @@ -557,7 +557,7 @@ There are two options to choose from: 1. Behave as a regular buffer and select the whole word (default): -```json +```json [settings] { "double_click_in_multibuffer": "select" } @@ -565,7 +565,7 @@ There are two options to choose from: 2. Open the excerpt clicked as a new buffer in the new tab: -```json +```json [settings] { "double_click_in_multibuffer": "open" } @@ -589,7 +589,7 @@ For the case of "open", regular selection behavior can be achieved by holding `a - Setting: `edit_predictions` - Default: -```json +```json [settings] "edit_predictions": { "disabled_globs": [ "**/.env*", @@ -627,19 +627,19 @@ List of `string` values 1. Don't show edit predictions in comments: -```json +```json [settings] "disabled_in": ["comment"] ``` 2. Don't show edit predictions in strings and comments: -```json +```json [settings] "disabled_in": ["comment", "string"] ``` 3. Only in Go, don't show edit predictions in strings and comments: -```json +```json [settings] { "languages": { "Go": { @@ -659,25 +659,25 @@ List of `string` values 1. Don't highlight the current line: -```json +```json [settings] "current_line_highlight": "none" ``` 2. Highlight the gutter area: -```json +```json [settings] "current_line_highlight": "gutter" ``` 3. Highlight the editor area: -```json +```json [settings] "current_line_highlight": "line" ``` 4. Highlight the full line: -```json +```json [settings] "current_line_highlight": "all" ``` @@ -713,25 +713,25 @@ List of `string` values 1. A vertical bar: -```json +```json [settings] "cursor_shape": "bar" ``` 2. A block that surrounds the following character: -```json +```json [settings] "cursor_shape": "block" ``` 3. An underline / underscore that runs along the following character: -```json +```json [settings] "cursor_shape": "underline" ``` 4. An box drawn around the following character: -```json +```json [settings] "cursor_shape": "hollow" ``` @@ -741,7 +741,7 @@ List of `string` values - Setting: `gutter` - Default: -```json +```json [settings] { "gutter": { "line_numbers": true, @@ -771,19 +771,19 @@ List of `string` values 1. Never hide the mouse cursor: -```json +```json [settings] "hide_mouse": "never" ``` 2. Hide only when typing: -```json +```json [settings] "hide_mouse": "on_typing" ``` 3. Hide on both typing and cursor movement: -```json +```json [settings] "hide_mouse": "on_typing_and_movement" ``` @@ -797,25 +797,25 @@ List of `string` values 1. Place snippets at the top of the completion list: -```json +```json [settings] "snippet_sort_order": "top" ``` 2. Place snippets normally without any preference: -```json +```json [settings] "snippet_sort_order": "inline" ``` 3. Place snippets at the bottom of the completion list: -```json +```json [settings] "snippet_sort_order": "bottom" ``` 4. Do not show snippets in the completion list at all: -```json +```json [settings] "snippet_sort_order": "none" ``` @@ -825,7 +825,7 @@ List of `string` values - Setting: `scrollbar` - Default: -```json +```json [settings] "scrollbar": { "show": "auto", "cursors": true, @@ -851,7 +851,7 @@ List of `string` values 1. Show the scrollbar if there's important information or follow the system's configured behavior: -```json +```json [settings] "scrollbar": { "show": "auto" } @@ -859,7 +859,7 @@ List of `string` values 2. Match the system's configured behavior: -```json +```json [settings] "scrollbar": { "show": "system" } @@ -867,7 +867,7 @@ List of `string` values 3. Always show the scrollbar: -```json +```json [settings] "scrollbar": { "show": "always" } @@ -875,7 +875,7 @@ List of `string` values 4. Never show the scrollbar: -```json +```json [settings] "scrollbar": { "show": "never" } @@ -941,41 +941,41 @@ List of `string` values 1. Show all diagnostics: -```json +```json [settings] { - "diagnostics": "all" + "show_diagnostics": "all" } ``` 2. Do not show any diagnostics: -```json +```json [settings] { - "diagnostics": "none" + "show_diagnostics": "off" } ``` 3. Show only errors: -```json +```json [settings] { - "diagnostics": "error" + "show_diagnostics": "error" } ``` 4. Show only errors and warnings: -```json +```json [settings] { - "diagnostics": "warning" + "show_diagnostics": "warning" } ``` 5. Show only errors, warnings, and information: -```json +```json [settings] { - "diagnostics": "information" + "show_diagnostics": "info" } ``` @@ -985,7 +985,7 @@ List of `string` values - Setting: `axes` - Default: -```json +```json [settings] "scrollbar": { "axes": { "horizontal": true, @@ -1020,7 +1020,7 @@ List of `string` values - Setting: `minimap` - Default: -```json +```json [settings] { "minimap": { "show": "never", @@ -1041,7 +1041,7 @@ List of `string` values 1. Always show the minimap: -```json +```json [settings] { "show": "always" } @@ -1049,7 +1049,7 @@ List of `string` values 2. Show the minimap if the editor's scrollbars are visible: -```json +```json [settings] { "show": "auto" } @@ -1057,7 +1057,7 @@ List of `string` values 3. Never show the minimap: -```json +```json [settings] { "show": "never" } @@ -1073,7 +1073,7 @@ List of `string` values 1. Show the minimap thumb when hovering over the minimap: -```json +```json [settings] { "thumb": "hover" } @@ -1081,7 +1081,7 @@ List of `string` values 2. Always show the minimap thumb: -```json +```json [settings] { "thumb": "always" } @@ -1097,7 +1097,7 @@ List of `string` values 1. Display a border on all sides of the thumb: -```json +```json [settings] { "thumb_border": "full" } @@ -1105,7 +1105,7 @@ List of `string` values 2. Display a border on all sides except the left side: -```json +```json [settings] { "thumb_border": "left_open" } @@ -1113,7 +1113,7 @@ List of `string` values 3. Display a border on all sides except the right side: -```json +```json [settings] { "thumb_border": "right_open" } @@ -1121,7 +1121,7 @@ List of `string` values 4. Display a border only on the left side: -```json +```json [settings] { "thumb_border": "left_only" } @@ -1129,7 +1129,7 @@ List of `string` values 5. Display the thumb without any border: -```json +```json [settings] { "thumb_border": "none" } @@ -1145,7 +1145,7 @@ List of `string` values 1. Inherit the editor's current line highlight setting: -```json +```json [settings] { "minimap": { "current_line_highlight": null @@ -1155,7 +1155,7 @@ List of `string` values 2. Highlight the current line in the minimap: -```json +```json [settings] { "minimap": { "current_line_highlight": "line" @@ -1165,7 +1165,7 @@ List of `string` values or -```json +```json [settings] { "minimap": { "current_line_highlight": "all" @@ -1175,7 +1175,7 @@ or 3. Do not highlight the current line in the minimap: -```json +```json [settings] { "minimap": { "current_line_highlight": "gutter" @@ -1185,7 +1185,7 @@ or or -```json +```json [settings] { "minimap": { "current_line_highlight": "none" @@ -1199,7 +1199,7 @@ or - Settings: `tab_bar` - Default: -```json +```json [settings] "tab_bar": { "show": true, "show_nav_history_buttons": true, @@ -1243,7 +1243,7 @@ or - Setting: `tabs` - Default: -```json +```json [settings] "tabs": { "close_position": "right", "file_icons": false, @@ -1264,7 +1264,7 @@ or 1. Display the close button on the right: -```json +```json [settings] { "close_position": "right" } @@ -1272,7 +1272,7 @@ or 2. Display the close button on the left: -```json +```json [settings] { "close_position": "left" } @@ -1300,7 +1300,7 @@ or 1. Activate the tab that was open previously: -```json +```json [settings] { "activate_on_close": "history" } @@ -1308,7 +1308,7 @@ or 2. Activate the right neighbour tab if present: -```json +```json [settings] { "activate_on_close": "neighbour" } @@ -1316,7 +1316,7 @@ or 3. Activate the left neighbour tab if present: -```json +```json [settings] { "activate_on_close": "left_neighbour" } @@ -1332,7 +1332,7 @@ or 1. Show it just upon hovering the tab: -```json +```json [settings] { "show_close_button": "hover" } @@ -1340,7 +1340,7 @@ or 2. Show it persistently: -```json +```json [settings] { "show_close_button": "always" } @@ -1348,7 +1348,7 @@ or 3. Never show it, even if hovering it: -```json +```json [settings] { "show_close_button": "hidden" } @@ -1364,7 +1364,7 @@ or 1. Do not mark any files: -```json +```json [settings] { "show_diagnostics": "off" } @@ -1372,7 +1372,7 @@ or 2. Only mark files with errors: -```json +```json [settings] { "show_diagnostics": "errors" } @@ -1380,7 +1380,7 @@ or 3. Mark files with errors and warnings: -```json +```json [settings] { "show_diagnostics": "all" } @@ -1402,7 +1402,7 @@ or - Setting: `drag_and_drop_selection` - Default: -```json +```json [settings] "drag_and_drop_selection": { "enabled": true, "delay": 300 @@ -1415,7 +1415,7 @@ or - Setting: `toolbar` - Default: -```json +```json [settings] "toolbar": { "breadcrumbs": true, "quick_actions": true, @@ -1495,10 +1495,11 @@ Positive `integer` value between 1 and 32. Values outside of this range will be - Setting: `status_bar` - Default: -```json +```json [settings] "status_bar": { "active_language_button": true, - "cursor_position_button": true + "cursor_position_button": true, + "line_endings_button": false }, ``` @@ -1529,7 +1530,7 @@ Some options are passed via `initialization_options` to the language server. The For example to pass the `check` option to `rust-analyzer`, use the following configuration: -```json +```json [settings] "lsp": { "rust-analyzer": { "initialization_options": { @@ -1543,7 +1544,7 @@ For example to pass the `check` option to `rust-analyzer`, use the following con While other options may be changed at a runtime and should be placed under `settings`: -```json +```json [settings] "lsp": { "yaml-language-server": { "settings": { @@ -1561,7 +1562,7 @@ While other options may be changed at a runtime and should be placed under `sett - Setting: `global_lsp_settings` - Default: -```json +```json [settings] { "global_lsp_settings": { "button": true @@ -1589,7 +1590,7 @@ While other options may be changed at a runtime and should be placed under `sett - Setting: `features` - Default: -```json +```json [settings] { "features": { "edit_prediction_provider": "zed" @@ -1607,7 +1608,7 @@ While other options may be changed at a runtime and should be placed under `sett 1. Use Zeta as the edit prediction provider: -```json +```json [settings] { "features": { "edit_prediction_provider": "zed" @@ -1617,7 +1618,7 @@ While other options may be changed at a runtime and should be placed under `sett 2. Use Copilot as the edit prediction provider: -```json +```json [settings] { "features": { "edit_prediction_provider": "copilot" @@ -1627,7 +1628,7 @@ While other options may be changed at a runtime and should be placed under `sett 3. Use Supermaven as the edit prediction provider: -```json +```json [settings] { "features": { "edit_prediction_provider": "supermaven" @@ -1637,7 +1638,7 @@ While other options may be changed at a runtime and should be placed under `sett 4. Turn off edit predictions across all providers -```json +```json [settings] { "features": { "edit_prediction_provider": "none" @@ -1655,7 +1656,7 @@ While other options may be changed at a runtime and should be placed under `sett 1. `on`, enables format on save obeying `formatter` setting: -```json +```json [settings] { "format_on_save": "on" } @@ -1663,7 +1664,7 @@ While other options may be changed at a runtime and should be placed under `sett 2. `off`, disables format on save: -```json +```json [settings] { "format_on_save": "off" } @@ -1679,7 +1680,7 @@ While other options may be changed at a runtime and should be placed under `sett 1. To use the current language server, use `"language_server"`: -```json +```json [settings] { "formatter": "language_server" } @@ -1687,7 +1688,7 @@ While other options may be changed at a runtime and should be placed under `sett 2. Or to use an external command, use `"external"`. Specify the name of the formatting program to run, and an array of arguments to pass to the program. The buffer's text will be passed to the program on stdin, and the formatted output should be written to stdout. For example, the following command would strip trailing spaces using [`sed(1)`](https://linux.die.net/man/1/sed): -```json +```json [settings] { "formatter": { "external": { @@ -1702,7 +1703,7 @@ While other options may be changed at a runtime and should be placed under `sett WARNING: `{buffer_path}` should not be used to direct your formatter to read from a filename. Your formatter should only read from standard input and should not read or write files directly. -```json +```json [settings] "formatter": { "external": { "command": "prettier", @@ -1713,22 +1714,20 @@ WARNING: `{buffer_path}` should not be used to direct your formatter to read fro 4. Or to use code actions provided by the connected language servers, use `"code_actions"`: -```json +```json [settings] { - "formatter": { - "code_actions": { - // Use ESLint's --fix: - "source.fixAll.eslint": true, - // Organize imports on save: - "source.organizeImports": true - } - } + "formatter": [ + // Use ESLint's --fix: + { "code_action": "source.fixAll.eslint" }, + // Organize imports on save: + { "code_action": "source.organizeImports" } + ] } ``` 5. Or to use multiple formatters consecutively, use an array of formatters: -```json +```json [settings] { "formatter": [ { "language_server": { "name": "rust-analyzer" } }, @@ -1745,74 +1744,6 @@ WARNING: `{buffer_path}` should not be used to direct your formatter to read fro Here `rust-analyzer` will be used first to format the code, followed by a call of sed. If any of the formatters fails, the subsequent ones will still be executed. -## Code Actions On Format - -- Description: The code actions to perform with the primary language server when formatting the buffer. -- Setting: `code_actions_on_format` -- Default: `{}`, except for Go it's `{ "source.organizeImports": true }` - -**Examples** - - - -1. Organize imports on format in TypeScript and TSX buffers: - -```json -{ - "languages": { - "TypeScript": { - "code_actions_on_format": { - "source.organizeImports": true - } - }, - "TSX": { - "code_actions_on_format": { - "source.organizeImports": true - } - } - } -} -``` - -2. Run ESLint `fixAll` code action when formatting: - -```json -{ - "languages": { - "JavaScript": { - "code_actions_on_format": { - "source.fixAll.eslint": true - } - } - } -} -``` - -3. Run only a single ESLint rule when using `fixAll`: - -```json -{ - "languages": { - "JavaScript": { - "code_actions_on_format": { - "source.fixAll.eslint": true - } - } - }, - "lsp": { - "eslint": { - "settings": { - "codeActionOnSave": { - "rules": ["import/order"] - } - } - } - } -} -``` - ## Auto close - Description: Whether to automatically add matching closing characters when typing opening parenthesis, bracket, brace, single or double quote characters. @@ -1849,7 +1780,7 @@ The result is still `)))` and not `))))))`, which is what it would be by default - Description: Files or globs of files that will be excluded by Zed entirely. They will be skipped during file scans, file searches, and not be displayed in the project file tree. Overrides `file_scan_inclusions`. - Default: -```json +```json [settings] "file_scan_exclusions": [ "**/.git", "**/.svn", @@ -1871,7 +1802,7 @@ Note, specifying `file_scan_exclusions` in settings.json will override the defau - Description: Files or globs of files that will be included by Zed, even when ignored by git. This is useful for files that are not tracked by git, but are still important to your project. Note that globs that are overly broad can slow down Zed's file scanning. `file_scan_exclusions` takes precedence over these inclusions. - Default: -```json +```json [settings] "file_scan_inclusions": [".env*"], ``` @@ -1881,7 +1812,7 @@ Note, specifying `file_scan_exclusions` in settings.json will override the defau - Description: Configure how Zed selects a language for a file based on its filename or extension. Supports glob entries. - Default: -```json +```json [settings] "file_types": { "JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "**/Zed/**/*.json", "**/.vscode/**/*.json"], "Shell Script": [".env.*"] @@ -1892,7 +1823,7 @@ Note, specifying `file_scan_exclusions` in settings.json will override the defau To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files starting with `Dockerfile` as Dockerfile: -```json +```json [settings] { "file_types": { "C++": ["c"], @@ -1908,7 +1839,7 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files - Setting: `diagnostics` - Default: -```json +```json [settings] { "diagnostics": { "include_warnings": true, @@ -1928,7 +1859,7 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files - Setting: `inline` - Default: -```json +```json [settings] { "diagnostics": { "inline": { @@ -1946,7 +1877,7 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files 1. Enable inline diagnostics. -```json +```json [settings] { "diagnostics": { "inline": { @@ -1958,7 +1889,7 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files 2. Delay diagnostic updates until some time after the last diagnostic update. -```json +```json [settings] { "diagnostics": { "inline": { @@ -1971,7 +1902,7 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files 3. Set padding between the end of the source line and the start of the diagnostic. -```json +```json [settings] { "diagnostics": { "inline": { @@ -1984,7 +1915,7 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files 4. Horizontally align inline diagnostics at the given column. -```json +```json [settings] { "diagnostics": { "inline": { @@ -1997,7 +1928,7 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files 5. Show only warning and error diagnostics. -```json +```json [settings] { "diagnostics": { "inline": { @@ -2014,7 +1945,7 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files - Setting: `git` - Default: -```json +```json [settings] { "git": { "git_gutter": "tracked_files", @@ -2039,7 +1970,7 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files 1. Show git gutter in tracked files -```json +```json [settings] { "git": { "git_gutter": "tracked_files" @@ -2049,7 +1980,7 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files 2. Hide git gutter -```json +```json [settings] { "git": { "git_gutter": "hide" @@ -2069,7 +2000,7 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files Example: -```json +```json [settings] { "git": { "gutter_debounce": 100 @@ -2083,7 +2014,7 @@ Example: - Setting: `inline_blame` - Default: -```json +```json [settings] { "git": { "inline_blame": { @@ -2097,7 +2028,7 @@ Example: 1. Disable inline git blame: -```json +```json [settings] { "git": { "inline_blame": { @@ -2109,7 +2040,7 @@ Example: 2. Only show inline git blame after a delay (that starts after cursor stops moving): -```json +```json [settings] { "git": { "inline_blame": { @@ -2121,7 +2052,7 @@ Example: 3. Show a commit summary next to the commit date and author: -```json +```json [settings] { "git": { "inline_blame": { @@ -2133,7 +2064,7 @@ Example: 4. Use this as the minimum column at which to display inline blame information: -```json +```json [settings] { "git": { "inline_blame": { @@ -2145,7 +2076,7 @@ Example: 5. Set the padding between the end of the line and the inline blame hint, in ems: -```json +```json [settings] { "git": { "inline_blame": { @@ -2161,7 +2092,7 @@ Example: - Setting: `branch_picker` - Default: -```json +```json [settings] { "git": { "branch_picker": { @@ -2175,7 +2106,7 @@ Example: 1. Show the author name in the branch picker: -```json +```json [settings] { "git": { "branch_picker": { @@ -2191,7 +2122,7 @@ Example: - Setting: `hunk_style` - Default: -```json +```json [settings] { "git": { "hunk_style": "staged_hollow" @@ -2203,7 +2134,7 @@ Example: 1. Show the staged hunks faded out and with a border: -```json +```json [settings] { "git": { "hunk_style": "staged_hollow" @@ -2213,7 +2144,7 @@ Example: 2. Show unstaged hunks faded out and with a border: -```json +```json [settings] { "git": { "hunk_style": "unstaged_hollow" @@ -2231,7 +2162,7 @@ Example: 1. Do nothing: -```json +```json [settings] { "go_to_definition_fallback": "none" } @@ -2239,7 +2170,7 @@ Example: 2. Find references for the same symbol (default): -```json +```json [settings] { "go_to_definition_fallback": "find_all_references" } @@ -2271,7 +2202,7 @@ Example: - Setting: `indent_guides` - Default: -```json +```json [settings] { "indent_guides": { "enabled": true, @@ -2287,7 +2218,7 @@ Example: 1. Disable indent guides -```json +```json [settings] { "indent_guides": { "enabled": false @@ -2297,7 +2228,7 @@ Example: 2. Enable indent guides for a specific language. -```json +```json [settings] { "languages": { "Python": { @@ -2312,7 +2243,7 @@ Example: 3. Enable indent aware coloring ("rainbow indentation"). The colors that are used for different indentation levels are defined in the theme (theme key: `accents`). They can be customized by using theme overrides. -```json +```json [settings] { "indent_guides": { "enabled": true, @@ -2324,7 +2255,7 @@ Example: 4. Enable indent aware background coloring ("rainbow indentation"). The colors that are used for different indentation levels are defined in the theme (theme key: `accents`). They can be customized by using theme overrides. -```json +```json [settings] { "indent_guides": { "enabled": true, @@ -2366,7 +2297,7 @@ Example: - Setting: `icon_theme` - Default: -```json +```json [settings] "icon_theme": { "mode": "system", "dark": "Zed (Default)", @@ -2384,7 +2315,7 @@ Example: 1. Set the icon theme to dark mode -```json +```json [settings] { "mode": "dark" } @@ -2392,7 +2323,7 @@ Example: 2. Set the icon theme to light mode -```json +```json [settings] { "mode": "light" } @@ -2400,7 +2331,7 @@ Example: 3. Set the icon theme to system mode -```json +```json [settings] { "mode": "system" } @@ -2432,7 +2363,7 @@ Run the {#action icon_theme_selector::Toggle} action in the command palette to s - Setting: `image_viewer` - Default: -```json +```json [settings] { "image_viewer": { "unit": "binary" @@ -2452,7 +2383,7 @@ Run the {#action icon_theme_selector::Toggle} action in the command palette to s 1. Use binary units (KiB, MiB): -```json +```json [settings] { "image_viewer": { "unit": "binary" @@ -2462,7 +2393,7 @@ Run the {#action icon_theme_selector::Toggle} action in the command palette to s 2. Use decimal units (KB, MB): -```json +```json [settings] { "image_viewer": { "unit": "decimal" @@ -2476,7 +2407,7 @@ Run the {#action icon_theme_selector::Toggle} action in the command palette to s - Setting: `inlay_hints` - Default: -```json +```json [settings] "inlay_hints": { "enabled": false, "show_type_hints": true, @@ -2509,7 +2440,7 @@ Settings-related hint updates are not debounced. All possible config values for `toggle_on_modifiers_press` are: -```json +```json [settings] "inlay_hints": { "toggle_on_modifiers_press": { "control": true, @@ -2529,7 +2460,7 @@ Unspecified values have a `false` value, hints won't be toggled if all the modif - Setting: `journal` - Default: -```json +```json [settings] "journal": { "path": "~", "hour_format": "hour12" @@ -2556,7 +2487,7 @@ Unspecified values have a `false` value, hints won't be toggled if all the modif 1. 12-hour format: -```json +```json [settings] { "hour_format": "hour12" } @@ -2564,7 +2495,7 @@ Unspecified values have a `false` value, hints won't be toggled if all the modif 2. 24-hour format: -```json +```json [settings] { "hour_format": "hour24" } @@ -2576,7 +2507,7 @@ Unspecified values have a `false` value, hints won't be toggled if all the modif - Setting: `jsx_tag_auto_close` - Default: -```json +```json [settings] { "jsx_tag_auto_close": { "enabled": true @@ -2598,7 +2529,7 @@ Unspecified values have a `false` value, hints won't be toggled if all the modif To override settings for a language, add an entry for that languages name to the `languages` value. Example: -```json +```json [settings] "languages": { "C": { "format_on_save": "off", @@ -2636,7 +2567,7 @@ These values take in the same options as the root-level settings with the same n - Setting: `language_models` - Default: -```json +```json [settings] { "language_models": { "anthropic": { @@ -2669,7 +2600,7 @@ Configuration for various AI model providers including API URLs and authenticati 1. Short format: -```json +```json [settings] { "line_indicator_format": "short" } @@ -2677,7 +2608,7 @@ Configuration for various AI model providers including API URLs and authenticati 2. Long format: -```json +```json [settings] { "line_indicator_format": "long" } @@ -2733,7 +2664,7 @@ Positive `integer` values or `null` for unlimited tabs 1. Maps to `Alt` on Linux and Windows and to `Option` on macOS: -```json +```json [settings] { "multi_cursor_modifier": "alt" } @@ -2741,7 +2672,7 @@ Positive `integer` values or `null` for unlimited tabs 2. Maps `Control` on Linux and Windows and to `Command` on macOS: -```json +```json [settings] { "multi_cursor_modifier": "cmd_or_ctrl" // alias: "cmd", "ctrl" } @@ -2753,7 +2684,7 @@ Positive `integer` values or `null` for unlimited tabs - Setting: `node` - Default: -```json +```json [settings] { "node": { "ignore_system_version": false, @@ -2794,7 +2725,7 @@ By default no proxy will be used, or Zed will attempt to retrieve proxy settings For example, to set an `http` proxy, add the following to your settings: -```json +```json [settings] { "proxy": "http://127.0.0.1:10809" } @@ -2802,7 +2733,7 @@ For example, to set an `http` proxy, add the following to your settings: Or to set a `socks5` proxy: -```json +```json [settings] { "proxy": "socks5h://localhost:10808" } @@ -2820,7 +2751,7 @@ If you wish to exclude certain hosts from using the proxy, set the `NO_PROXY` en 1. Use platform default behavior: -```json +```json [settings] { "on_last_window_closed": "platform_default" } @@ -2828,7 +2759,7 @@ If you wish to exclude certain hosts from using the proxy, set the `NO_PROXY` en 2. Always quit the application: -```json +```json [settings] { "on_last_window_closed": "quit_app" } @@ -2844,7 +2775,7 @@ If you wish to exclude certain hosts from using the proxy, set the `NO_PROXY` en Configuration object for defining settings profiles. Example: -```json +```json [settings] { "profiles": { "presentation": { @@ -2871,7 +2802,7 @@ Configuration object for defining settings profiles. Example: - Setting: `preview_tabs` - Default: -```json +```json [settings] "preview_tabs": { "enabled": true, "enable_preview_from_file_finder": false, @@ -2929,7 +2860,7 @@ Configuration object for defining settings profiles. Example: 1. Split upward: -```json +```json [settings] { "pane_split_direction_horizontal": "up" } @@ -2937,7 +2868,7 @@ Configuration object for defining settings profiles. Example: 2. Split downward: -```json +```json [settings] { "pane_split_direction_horizontal": "down" } @@ -2953,7 +2884,7 @@ Configuration object for defining settings profiles. Example: 1. Split to the left: -```json +```json [settings] { "pane_split_direction_vertical": "left" } @@ -2961,7 +2892,7 @@ Configuration object for defining settings profiles. Example: 2. Split to the right: -```json +```json [settings] { "pane_split_direction_vertical": "right" } @@ -3071,7 +3002,7 @@ List of strings containing any combination of: 1. Restore all workspaces that were open when quitting Zed: -```json +```json [settings] { "restore_on_startup": "last_session" } @@ -3079,7 +3010,7 @@ List of strings containing any combination of: 2. Restore the workspace that was closed last: -```json +```json [settings] { "restore_on_startup": "last_workspace" } @@ -3087,7 +3018,7 @@ List of strings containing any combination of: 3. Always start with an empty editor: -```json +```json [settings] { "restore_on_startup": "none" } @@ -3103,7 +3034,7 @@ List of strings containing any combination of: 1. Scroll one page beyond the last line by one page: -```json +```json [settings] { "scroll_beyond_last_line": "one_page" } @@ -3111,7 +3042,7 @@ List of strings containing any combination of: 2. The editor will scroll beyond the last line by the same amount of lines as `vertical_scroll_margin`: -```json +```json [settings] { "scroll_beyond_last_line": "vertical_scroll_margin" } @@ -3119,7 +3050,7 @@ List of strings containing any combination of: 3. The editor will not scroll beyond the last line: -```json +```json [settings] { "scroll_beyond_last_line": "off" } @@ -3175,7 +3106,7 @@ Non-negative `integer` values - Setting: `search` - Default: -```json +```json [settings] "search": { "whole_word": false, "case_sensitive": false, @@ -3234,7 +3165,7 @@ Examples: - Setting: `completions` - Default: -```json +```json [settings] { "completions": { "words": "fallback", @@ -3351,12 +3282,13 @@ Positive integer values - Setting: `whitespace_map` - Default: -```json +```json [settings] { "whitespace_map": { "space": "•", "tab": "→" - }, + } +} ``` ## Soft Wrap @@ -3395,7 +3327,7 @@ Positive integer values ## Use Auto Surround -- Description: Whether to automatically surround selected text when typing opening parenthesis, bracket, brace, single or double quote characters. For example, when you select text and type (, Zed will surround the text with (). +- Description: Whether to automatically surround selected text when typing opening parenthesis, bracket, brace, single or double quote characters. For example, when you select text and type '(', Zed will surround the text with (). - Setting: `use_auto_surround` - Default: `true` @@ -3449,7 +3381,7 @@ List of `integer` column numbers - Setting: `tasks` - Default: -```json +```json [settings] { "tasks": { "variables": {}, @@ -3471,7 +3403,7 @@ List of `integer` column numbers - Setting: `telemetry` - Default: -```json +```json [settings] "telemetry": { "diagnostics": true, "metrics": true @@ -3506,7 +3438,7 @@ List of `integer` column numbers - Setting: `terminal` - Default: -```json +```json [settings] { "terminal": { "alternate_scroll": "off", @@ -3562,7 +3494,7 @@ List of `integer` column numbers 1. Default alternate scroll mode to off -```json +```json [settings] { "terminal": { "alternate_scroll": "off" @@ -3572,7 +3504,7 @@ List of `integer` column numbers 2. Default alternate scroll mode to on -```json +```json [settings] { "terminal": { "alternate_scroll": "on" @@ -3590,7 +3522,7 @@ List of `integer` column numbers 1. Never blink the cursor, ignore the terminal mode -```json +```json [settings] { "terminal": { "blinking": "off" @@ -3600,7 +3532,7 @@ List of `integer` column numbers 2. Default the cursor blink to off, but allow the terminal to turn blinking on -```json +```json [settings] { "terminal": { "blinking": "terminal_controlled" @@ -3610,7 +3542,7 @@ List of `integer` column numbers 3. Always blink the cursor, ignore the terminal mode -```json +```json [settings] { "terminal": { "blinking": "on" @@ -3630,7 +3562,7 @@ List of `integer` column numbers **Example** -```json +```json [settings] { "terminal": { "copy_on_select": true @@ -3648,7 +3580,7 @@ List of `integer` column numbers 1. A block that surrounds the following character -```json +```json [settings] { "terminal": { "cursor_shape": "block" @@ -3658,7 +3590,7 @@ List of `integer` column numbers 2. A vertical bar -```json +```json [settings] { "terminal": { "cursor_shape": "bar" @@ -3668,7 +3600,7 @@ List of `integer` column numbers 3. An underline / underscore that runs along the following character -```json +```json [settings] { "terminal": { "cursor_shape": "underline" @@ -3678,7 +3610,7 @@ List of `integer` column numbers 4. A box drawn around the following character -```json +```json [settings] { "terminal": { "cursor_shape": "hollow" @@ -3698,7 +3630,7 @@ List of `integer` column numbers **Example** -```json +```json [settings] { "terminal": { "keep_selection_on_copy": false @@ -3714,7 +3646,7 @@ List of `integer` column numbers **Example** -```json +```json [settings] { "terminal": { "env": { @@ -3735,7 +3667,7 @@ List of `integer` column numbers `integer` values -```json +```json [settings] { "terminal": { "font_size": 15 @@ -3753,7 +3685,7 @@ List of `integer` column numbers The name of any font family installed on the user's system -```json +```json [settings] { "terminal": { "font_family": "Berkeley Mono" @@ -3772,7 +3704,7 @@ The name of any font family installed on the user's system See Buffer Font Features -```json +```json [settings] { "terminal": { "font_features": { @@ -3793,7 +3725,7 @@ See Buffer Font Features 1. Use a line height that's `comfortable` for reading, 1.618. -```json +```json [settings] { "terminal": { "line_height": "comfortable" @@ -3803,7 +3735,7 @@ See Buffer Font Features 2. Use a `standard` line height, 1.3. This option is useful for TUIs, particularly if they use box characters. (default) -```json +```json [settings] { "terminal": { "line_height": "standard" @@ -3813,7 +3745,7 @@ See Buffer Font Features 3. Use a custom line height. -```json +```json [settings] { "terminal": { "line_height": { @@ -3839,7 +3771,7 @@ See Buffer Font Features - `75`: Minimum for body text - `90`: Preferred for body text -```json +```json [settings] { "terminal": { "minimum_contrast": 45 @@ -3857,7 +3789,7 @@ See Buffer Font Features `boolean` values -```json +```json [settings] { "terminal": { "option_as_meta": true @@ -3875,7 +3807,7 @@ See Buffer Font Features 1. Use the system's default terminal configuration (usually the `/etc/passwd` file). -```json +```json [settings] { "terminal": { "shell": "system" @@ -3885,7 +3817,7 @@ See Buffer Font Features 2. A program to launch: -```json +```json [settings] { "terminal": { "shell": { @@ -3897,7 +3829,7 @@ See Buffer Font Features 3. A program with arguments: -```json +```json [settings] { "terminal": { "shell": { @@ -3916,7 +3848,7 @@ See Buffer Font Features - Setting: `detect_venv` - Default: -```json +```json [settings] { "terminal": { "detect_venv": { @@ -3935,7 +3867,7 @@ See Buffer Font Features Disable with: -```json +```json [settings] { "terminal": { "detect_venv": "off" @@ -3949,7 +3881,7 @@ Disable with: - Setting: `toolbar` - Default: -```json +```json [settings] { "terminal": { "toolbar": { @@ -3979,7 +3911,7 @@ Example command to set the title: `echo -e "\e]2;New Title\007";` `boolean` values -```json +```json [settings] { "terminal": { "button": false @@ -3997,7 +3929,7 @@ Example command to set the title: `echo -e "\e]2;New Title\007";` 1. Use the current file's project directory. Will Fallback to the first project directory strategy if unsuccessful -```json +```json [settings] { "terminal": { "working_directory": "current_project_directory" @@ -4007,7 +3939,7 @@ Example command to set the title: `echo -e "\e]2;New Title\007";` 2. Use the first project in this workspace's directory. Will fallback to using this platform's home directory. -```json +```json [settings] { "terminal": { "working_directory": "first_project_directory" @@ -4017,7 +3949,7 @@ Example command to set the title: `echo -e "\e]2;New Title\007";` 3. Always use this platform's home directory (if we can find it) -```json +```json [settings] { "terminal": { "working_directory": "always_home" @@ -4027,7 +3959,7 @@ Example command to set the title: `echo -e "\e]2;New Title\007";` 4. Always use a specific directory. This value will be shell expanded. If this path is not a valid directory the terminal will default to this platform's home directory. -```json +```json [settings] { "terminal": { "working_directory": { @@ -4045,7 +3977,7 @@ Example command to set the title: `echo -e "\e]2;New Title\007";` - Setting: `repl` - Default: -```json +```json [settings] "repl": { // Maximum number of columns to keep in REPL's scrollback buffer. // Clamped with [20, 512] range. @@ -4068,7 +4000,7 @@ Example command to set the title: `echo -e "\e]2;New Title\007";` - Setting: `theme` - Default: -```json +```json [settings] "theme": { "mode": "system", "dark": "One Dark", @@ -4086,7 +4018,7 @@ Example command to set the title: `echo -e "\e]2;New Title\007";` 1. Set the theme to dark mode -```json +```json [settings] { "mode": "dark" } @@ -4094,7 +4026,7 @@ Example command to set the title: `echo -e "\e]2;New Title\007";` 2. Set the theme to light mode -```json +```json [settings] { "mode": "light" } @@ -4102,7 +4034,7 @@ Example command to set the title: `echo -e "\e]2;New Title\007";` 3. Set the theme to system mode -```json +```json [settings] { "mode": "system" } @@ -4134,7 +4066,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a - Setting: `title_bar` - Default: -```json +```json [settings] "title_bar": { "show_branch_icon": false, "show_branch_name": true, @@ -4172,7 +4104,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a 1. Use platform default behavior: -```json +```json [settings] { "when_closing_with_no_tabs": "platform_default" } @@ -4180,7 +4112,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a 2. Always close the window: -```json +```json [settings] { "when_closing_with_no_tabs": "close_window" } @@ -4188,7 +4120,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a 3. Never close the window: -```json +```json [settings] { "when_closing_with_no_tabs": "keep_window_open" } @@ -4200,7 +4132,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a - Setting: `project_panel` - Default: -```json +```json [settings] { "project_panel": { "button": true, @@ -4223,7 +4155,9 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a "show": "always" }, "hide_root": false, - "starts_open": true + "hide_hidden": false, + "starts_open": true, + "open_file_on_paste": true } } ``` @@ -4238,7 +4172,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a 1. Default dock position to left -```json +```json [settings] { "dock": "left" } @@ -4246,7 +4180,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a 2. Default dock position to right -```json +```json [settings] { "dock": "right" } @@ -4262,7 +4196,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a 1. Comfortable entry spacing -```json +```json [settings] { "entry_spacing": "comfortable" } @@ -4270,7 +4204,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a 2. Standard entry spacing -```json +```json [settings] { "entry_spacing": "standard" } @@ -4286,7 +4220,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a 1. Default enable git status -```json +```json [settings] { "git_status": true } @@ -4294,7 +4228,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a 2. Default disable git status -```json +```json [settings] { "git_status": false } @@ -4320,7 +4254,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a 1. Enable auto reveal entries -```json +```json [settings] { "auto_reveal_entries": true } @@ -4328,7 +4262,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a 2. Disable auto reveal entries -```json +```json [settings] { "auto_reveal_entries": false } @@ -4344,7 +4278,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a 1. Enable auto fold dirs -```json +```json [settings] { "auto_fold_dirs": true } @@ -4352,7 +4286,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a 2. Disable auto fold dirs -```json +```json [settings] { "auto_fold_dirs": false } @@ -4370,7 +4304,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a - Setting: `indent_guides` - Default: -```json +```json [settings] "indent_guides": { "show": "always" } @@ -4380,7 +4314,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a 1. Show indent guides in the project panel -```json +```json [settings] { "indent_guides": { "show": "always" @@ -4390,7 +4324,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a 2. Hide indent guides in the project panel -```json +```json [settings] { "indent_guides": { "show": "never" @@ -4404,7 +4338,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a - Setting: `scrollbar` - Default: -```json +```json [settings] "scrollbar": { "show": null } @@ -4414,7 +4348,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a 1. Show scrollbar in the project panel -```json +```json [settings] { "scrollbar": { "show": "always" @@ -4424,7 +4358,7 @@ Run the {#action theme_selector::Toggle} action in the command palette to see a 2. Hide scrollbar in the project panel -```json +```json [settings] { "scrollbar": { "show": "never" @@ -4442,7 +4376,7 @@ Visit [the Configuration page](./ai/configuration.md) under the AI section to le - Setting: `collaboration_panel` - Default: -```json +```json [settings] { "collaboration_panel": { "button": true, @@ -4464,7 +4398,7 @@ Visit [the Configuration page](./ai/configuration.md) under the AI section to le - Setting: `debugger` - Default: -```json +```json [settings] { "debugger": { "stepping_granularity": "line", @@ -4483,7 +4417,7 @@ See the [debugger page](./debugger.md) for more information about debugging supp - Setting: `git_panel` - Default: -```json +```json [settings] { "git_panel": { "button": true, @@ -4517,7 +4451,7 @@ See the [debugger page](./debugger.md) for more information about debugging supp - Setting: `outline_panel` - Default: -```json +```json [settings] "outline_panel": { "button": true, "default_width": 300, @@ -4543,7 +4477,7 @@ See the [debugger page](./debugger.md) for more information about debugging supp - Setting: `calls` - Default: -```json +```json [settings] "calls": { // Join calls with the microphone live by default "mute_on_join": false, @@ -4567,7 +4501,7 @@ Float values between `0.0` and `0.9`, where: **Example** -```json +```json [settings] { "unnecessary_code_fade": 0.5 } @@ -4589,7 +4523,7 @@ The name of any font family installed on the system, `".ZedSans"` to use the Zed - Setting: `ui_font_features` - Default: -```json +```json [settings] "ui_font_features": { "calt": false } @@ -4603,7 +4537,7 @@ Zed supports all OpenType features that can be enabled or disabled for a given U For example, to disable font ligatures, add the following to your settings: -```json +```json [settings] { "ui_font_features": { "calt": false @@ -4613,7 +4547,7 @@ For example, to disable font ligatures, add the following to your settings: You can also set other OpenType features, like setting `cv01` to `7`: -```json +```json [settings] { "ui_font_features": { "cv01": 7 @@ -4632,7 +4566,7 @@ You can also set other OpenType features, like setting `cv01` to `7`: For example, to use `Nerd Font` as a fallback, add the following to your settings: -```json +```json [settings] { "ui_font_fallbacks": ["Nerd Font"] } @@ -4660,7 +4594,7 @@ For example, to use `Nerd Font` as a fallback, add the following to your setting ## An example configuration: -```json +```json [settings] // ~/.config/zed/settings.json { "theme": "cave-light", @@ -4681,7 +4615,8 @@ For example, to use `Nerd Font` as a fallback, add the following to your setting }, "languages": { "C": { - "format_on_save": "language_server", + "format_on_save": "on", + "formatter": "language_server", "preferred_line_length": 64, "soft_wrap": "preferred_line_length" } diff --git a/docs/src/debugger.md b/docs/src/debugger.md index eef828123345a19d0fdec07ae4dcf656212317cc..4c13f1f8e8004c4eb18af518f458e34421a7aac5 100644 --- a/docs/src/debugger.md +++ b/docs/src/debugger.md @@ -37,7 +37,7 @@ You can open the same modal by clicking the "plus" button at the top right of th For languages that don't provide preconfigured debug tasks (this includes C, C++, and some extension-supported languages), you can define debug configurations in the `.zed/debug.json` file in your project root. This file should be an array of configuration objects: -```json +```json [debug] [ { "adapter": "CodeLLDB", @@ -56,6 +56,16 @@ Check the documentation for your language for example configurations covering ty Zed will also load debug configurations from `.vscode/launch.json`, and show them in the new process modal if no configurations are found in `.zed/debug.json`. +#### Global debug configurations + +If you run the same launch profiles across multiple projects, you can store them once in your user configuration. Invoke {#action zed::OpenDebugTasks} from the command palette to open the global `debug.json` file; Zed creates it next to your user `settings.json` and keeps it in sync with the debugger UI. The file lives at: + +- **macOS:** `~/Library/Application Support/Zed/debug.json` +- **Linux/BSD:** `$XDG_CONFIG_HOME/zed/debug.json` (falls back to `~/.config/zed/debug.json`) +- **Windows:** `%APPDATA%\Zed\debug.json` + +Populate this file with the same array of objects you would place in `.zed/debug.json`. Any scenarios defined there are merged into every workspace, so your favorite launch presets appear automatically in the "New Debug Session" dialog. + ### Launching & Attaching Zed debugger offers two ways to debug your program; you can either _launch_ a new instance of your program or _attach_ to an existing process. @@ -68,9 +78,11 @@ Compared to launching, attaching to an existing process might seem inferior, but ## Configuration -While configuration fields are debug adapter-dependent, most adapters support the following fields: +Zed requires the `adapter` and `label` fields for all debug tasks. In addition, Zed will use the `build` field to run any necessary setup steps before the debugger starts [(see below)](#build-tasks), and can accept a `tcp_connection` field to connect to an existing process. + +All other fields are provided by the debug adapter and can contain [task variables](./tasks.md#variables). Most adapters support `request`, `program`, and `cwd`: -```json +```json [debug] [ { // The label for the debug configuration and used to identify the debug session inside the debug panel & new process modal @@ -89,13 +101,13 @@ While configuration fields are debug adapter-dependent, most adapters support th ] ``` -All configuration fields support [task variables](./tasks.md#variables). +Check your debug adapter's documentation for more information on the fields it supports. ### Build tasks -Zed also allows embedding a Zed task in a `build` field that is run before the debugger starts. This is useful for setting up the environment or running any necessary setup steps before the debugger starts. +Zed allows embedding a Zed task in the `build` field that is run before the debugger starts. This is useful for setting up the environment or running any necessary setup steps before the debugger starts. -```json +```json [debug] [ { "label": "Build Binary", @@ -112,7 +124,7 @@ Zed also allows embedding a Zed task in a `build` field that is run before the d Build tasks can also refer to the existing tasks by unsubstituted label: -```json +```json [debug] [ { "label": "Build Binary", @@ -169,7 +181,7 @@ The settings for the debugger are grouped under the `debugger` key in `settings. 2. `right` - The debug panel will be docked to the right side of the UI. 3. `bottom` - The debug panel will be docked to the bottom of the UI. -```json +```json [settings] "debugger": { "dock": "bottom" }, @@ -187,7 +199,7 @@ The settings for the debugger are grouped under the `debugger` key in `settings. The meaning of a statement is determined by the adapter and it may be considered equivalent to a line. For example 'for(int i = 0; i < 10; i++)' could be considered to have 3 statements 'int i = 0', 'i < 10', and 'i++'. -```json +```json [settings] { "debugger": { "stepping_granularity": "statement" @@ -197,7 +209,7 @@ The settings for the debugger are grouped under the `debugger` key in `settings. 2. Line - The step should allow the program to run until the current source line has executed. -```json +```json [settings] { "debugger": { "stepping_granularity": "line" @@ -207,7 +219,7 @@ The settings for the debugger are grouped under the `debugger` key in `settings. 3. Instruction - The step should allow one instruction to execute (e.g. one x86 instruction). -```json +```json [settings] { "debugger": { "stepping_granularity": "instruction" @@ -225,7 +237,7 @@ The settings for the debugger are grouped under the `debugger` key in `settings. `boolean` values -```json +```json [settings] { "debugger": { "save_breakpoints": true @@ -243,7 +255,7 @@ The settings for the debugger are grouped under the `debugger` key in `settings. `boolean` values -```json +```json [settings] { "debugger": { "show_button": true @@ -261,7 +273,7 @@ The settings for the debugger are grouped under the `debugger` key in `settings. `integer` values -```json +```json [settings] { "debugger": { "timeout": 3000 @@ -277,7 +289,7 @@ The settings for the debugger are grouped under the `debugger` key in `settings. **Options** -```json +```json [settings] { "inlay_hints": { "show_value_hints": false @@ -297,7 +309,7 @@ Inline value hints can also be toggled from the Editor Controls menu in the edit `boolean` values -```json +```json [settings] { "debugger": { "log_dap_communications": true @@ -315,7 +327,7 @@ Inline value hints can also be toggled from the Editor Controls menu in the edit `boolean` values -```json +```json [settings] { "debugger": { "format_dap_log_messages": true @@ -331,7 +343,7 @@ Inline value hints can also be toggled from the Editor Controls menu in the edit You can pass `binary`, `args`, or both. `binary` should be a path to a _debug adapter_ (like `lldb-dap`) not a _debugger_ (like `lldb` itself). The `args` setting overrides any arguments that Zed would otherwise pass to the adapter. -```json +```json [settings] { "dap": { "CodeLLDB": { diff --git a/docs/src/development/local-collaboration.md b/docs/src/development/local-collaboration.md index 87363a4269ac32ac39598efef640b80384d1f44a..393c6f0bbf797cf9aa86d297633734444bdfb328 100644 --- a/docs/src/development/local-collaboration.md +++ b/docs/src/development/local-collaboration.md @@ -106,7 +106,7 @@ cat crates/collab/seed.default.json To use a different set of admin users, you can create your own version of that json file and export the `SEED_PATH` environment variable. Note that the usernames listed in the admins list currently must correspond to valid GitHub users. -```json +```json [settings] { "admins": ["admin1", "admin2"], "channels": ["zed"] @@ -196,7 +196,7 @@ By default Zed assumes that the DATABASE_URL is a Postgres database, but you can To authenticate you must first configure the server by creating a seed.json file that contains at a minimum your github handle. This will be used to create the user on demand. -```json +```json [settings] { "admins": ["nathansobo"] } diff --git a/docs/src/development/release-notes.md b/docs/src/development/release-notes.md new file mode 100644 index 0000000000000000000000000000000000000000..5005fc32d36bafb57754e45423b45fc8b7bf64d9 --- /dev/null +++ b/docs/src/development/release-notes.md @@ -0,0 +1,29 @@ +# Release Notes + +Whenever you open a pull request, the body is automatically populated based on this [pull request template](https://github.com/zed-industries/zed/blob/main/.github/pull_request_template.md). + +```md +... + +Release Notes: + +- N/A _or_ Added/Fixed/Improved ... +``` + +On Wednesdays, we run a [`get-preview-channel-changes`](https://github.com/zed-industries/zed/blob/main/script/get-preview-channel-changes) script that scrapes `Release Notes` lines from pull requests landing in preview, as documented in our [Release](https://zed.dev/docs/development/releases) docs. + +The script outputs everything below the `Release Notes` line, including additional data such as the pull request author (if not a Zed team member) and a link to the pull request. +If you use `N/A`, the script skips your pull request entirely. + +## Guidelines for crafting your `Release Notes` line(s) + +- A `Release Notes` line should only be written if the user can see or feel the difference in Zed. +- A `Release Notes` line should be written such that a Zed user can understand what the change is. + Don't assume a user knows technical editor developer lingo; phrase your change in language they understand as a user of a text editor. +- If you want to include technical details about your pull request for other team members to see, do so above the `Release Notes` line. +- Changes to docs should be labeled as `N/A`. +- If your pull request adds/changes a setting or a keybinding, always mention that setting or keybinding. + Don't make the user dig into docs or the pull request to find this information (although it should be included in docs as well). +- For pull requests that are reverts: + - If the item being reverted **has already been shipped**, include a `Release Notes` line explaining why we reverted, as this is a breaking change. + - If the item being reverted **hasn't been shipped**, edit the original PR's `Release Notes` line to be `N/A`; otherwise, it will be included and the compiler of the release notes may not know to skip it, leading to a potentially-awkward situation where we are stating we shipped something we actually didn't. diff --git a/docs/src/development/releases.md b/docs/src/development/releases.md index 04190aeb9c403575cfd3d7ac1c4bd5e33e795680..9e2cdccfdc01c528a75c85cad5d6ac0fe9ed64e2 100644 --- a/docs/src/development/releases.md +++ b/docs/src/development/releases.md @@ -11,7 +11,7 @@ Credentials for various services used in this process can be found in 1Password. Use the `releases` Slack channel to notify the team that releases will be starting. This is mostly a formality on Wednesday's minor update releases, but can be beneficial when doing patch releases, as other devs may have landed fixes they'd like to cherry pick. ---- +### Starting the Builds 1. Checkout `main` and ensure your working copy is clean. @@ -19,44 +19,74 @@ This is mostly a formality on Wednesday's minor update releases, but can be bene 1. Run `git fetch --tags --force` to forcibly ensure your local tags are in sync with the remote. -1. Run `./script/get-stable-channel-release-notes`. - - - Follow the instructions at the end of the script and aggregate the release notes into one structure. +1. Run `./script/get-stable-channel-release-notes` and store output locally. 1. Run `./script/bump-zed-minor-versions`. - Push the tags and branches as instructed. -1. Run `./script/get-preview-channel-changes`. +1. Run `./script/get-preview-channel-changes` and store output locally. - - Take the script's output and build release notes by organizing each release note line into a category. - - Use a prior release for the initial outline. - - Make sure to append the `Credit` line, if present, to the end of the release note line. +> **Note:** Always prioritize the stable release. +> If you've completed aggregating stable release notes, you can move on to working on aggregating preview release notes, but once the stable build has finished, work through the rest of the stable steps to fully publish. +> Preview can be finished up after. -1. Once release drafts are up on [GitHub Releases](https://github.com/zed-industries/zed/releases), paste both preview and stable release notes into each and **save**. +### Stable Release - - **Do not publish the drafts!** +1. Aggregate stable release notes. -1. Check the release assets. + - Follow the instructions at the end of the script and aggregate the release notes into one structure. - - Ensure the stable and preview release jobs have finished without error. - - Ensure each draft has the proper number of assets—releases currently have 10 assets each. - - Download the artifacts for each release draft and test that you can run them locally. +1. Once the stable release draft is up on [GitHub Releases](https://github.com/zed-industries/zed/releases), paste the stable release notes into it and **save**. + + - **Do not publish the draft!** + +1. Check the stable release assets. -1. Publish the drafts. + - Ensure the stable release job has finished without error. + - Ensure the draft has the proper number of assets—releases currently have 11 assets each. + - Download the artifacts for the stable release draft and test that you can run them locally. - - Publish stable and preview drafts, one at a time. - - Use [Vercel](https://vercel.com/zed-industries/zed-dev) to check the progress of the website rebuild. - The release will be public once the rebuild has completed. +1. Publish the stable draft on [GitHub Releases](https://github.com/zed-industries/zed/releases). + + - Use [Vercel](https://vercel.com/zed-industries/zed-dev) to check the progress of the website rebuild. + The release will be public once the rebuild has completed. 1. Post the stable release notes to social media. - Bluesky and X posts will already be built as drafts in [Buffer](https://buffer.com). + - Double-check links. - Publish both, one at a time, ensuring both are posted to each respective platform. 1. Send the stable release notes email. - The email broadcast will already be built as a draft in [Kit](https://kit.com). + - Double-check links. + - Publish the email. + +### Preview Release + +1. Aggregate preview release notes. + + - Take the script's output and build release notes by organizing each release note line into a category. + - Use a prior release for the initial outline. + - Make sure to append the `Credit` line, if present, to the end of the release note line. + +1. Once the preview release draft is up on [GitHub Releases](https://github.com/zed-industries/zed/releases), paste the preview release notes into it and **save**. + + - **Do not publish the draft!** + +1. Check the preview release assets. + + - Ensure the preview release job has finished without error. + - Ensure the draft has the proper number of assets—releases currently have 11 assets each. + - Download the artifacts for the preview release draft and test that you can run them locally. + +1. Publish the preview draft on [GitHub Releases](https://github.com/zed-industries/zed/releases). + - Use [Vercel](https://vercel.com/zed-industries/zed-dev) to check the progress of the website rebuild. + The release will be public once the rebuild has completed. + +### Prep Content for Next Week's Stable Release 1. Build social media posts based on the popular items in preview. diff --git a/docs/src/development/windows.md b/docs/src/development/windows.md index ccbc17b708b0d4691d69eea56279cc295d2c48da..17382e0bee5b97c2ffc2d74794cf3881a3cb98a1 100644 --- a/docs/src/development/windows.md +++ b/docs/src/development/windows.md @@ -18,7 +18,7 @@ Clone down the [Zed repository](https://github.com/zed-industries/zed). If you can't compile Zed, make sure that you have at least the following components installed in case of a Visual Studio installation: -```json +```json [settings] { "version": "1.0", "components": [ @@ -36,7 +36,7 @@ If you can't compile Zed, make sure that you have at least the following compone Or if in case of just Build Tools, the following components: -```json +```json [settings] { "version": "1.0", "components": [ diff --git a/docs/src/diagnostics.md b/docs/src/diagnostics.md index 9603c8197cf7ef473da027a51fa0db64d0b9b8e9..47cc586008deba55b9fd9fdab8fffd829a519a0e 100644 --- a/docs/src/diagnostics.md +++ b/docs/src/diagnostics.md @@ -8,7 +8,7 @@ By default, Zed displays all diagnostics as underlined text in the editor and th Editor diagnostics could be filtered with the -```json5 +```json [settings] "diagnostics_max_severity": null ``` @@ -16,7 +16,7 @@ editor setting (possible values: `"off"`, `"error"`, `"warning"`, `"info"`, `"hi The scrollbar ones are configured with the -```json5 +```json [settings] "scrollbar": { "diagnostics": "all", } @@ -32,7 +32,7 @@ Or, `editor::GoToDiagnostic` and `editor::GoToPreviousDiagnostic` could be used Zed supports showing diagnostic as lens to the right of the code. This is disabled by default, but can either be temporarily turned on (or off) using the editor menu, or permanently, using the -```json5 +```json [settings] "diagnostics": { "inline": { "enabled": true, @@ -49,7 +49,7 @@ Project panel can have its entries coloured based on the severity of the diagnos To configure, use -```json5 +```json [settings] "project_panel": { "show_diagnostics": "all", } @@ -61,7 +61,7 @@ configuration (possible values: `"off"`, `"errors"`, `"all"` (default)) Similar to the project panel, editor tabs can be colorized with the -```json5 +```json [settings] "tabs": { "show_diagnostics": "off", } diff --git a/docs/src/extensions/developing-extensions.md b/docs/src/extensions/developing-extensions.md index b624077a7ef99c4ebf4ccd9e9c66382388392b60..2b675173ce24d42b0626f2fa821a404b14e6ef4d 100644 --- a/docs/src/extensions/developing-extensions.md +++ b/docs/src/extensions/developing-extensions.md @@ -23,11 +23,7 @@ From the extensions page, click the `Install Dev Extension` button (or the {#act If you need to troubleshoot, you can check the Zed.log ({#action zed::OpenLog}) for additional output. For debug output, close and relaunch zed with the `zed --foreground` from the command line which show more verbose INFO level logging. -If you already have a published extension with the same name installed, your dev extension will override it. - -After installing, the `Extensions` page will indicate that the upstream extension is "Overridden by dev extension". - -Pre-installed extensions with the same name have to be uninstalled before installing the dev extension. See [#31106](https://github.com/zed-industries/zed/issues/31106) for more. +If you already have the published version of the extension installed, the published version will be uninstalled prior to the installation of the dev extension. After successful installation, the `Extensions` page will indicate that the upstream extension is "Overridden by dev extension". ## Directory Structure of a Zed Extension @@ -115,10 +111,13 @@ git submodule update ## Extension License Requirements -As of October 1st, 2025, extension repositories must include one of the following licenses: +As of October 1st, 2025, extension repositories must include a license. +The following licenses are accepted: -- [MIT](https://opensource.org/license/mit) - [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0) +- [BSD 3-Clause](https://opensource.org/license/bsd-3-clause) +- [GNU GPLv3](https://www.gnu.org/licenses/gpl-3.0.en.html) +- [MIT](https://opensource.org/license/mit) This allows us to distribute the resulting binary produced from your extension code to our users. Without a valid license, the pull request to add or update your extension in the following steps will fail CI. diff --git a/docs/src/extensions/icon-themes.md b/docs/src/extensions/icon-themes.md index a76f03d068bb04fb0262d3f9309871a3a1c352d5..697723a59677c25dd14982a1c7f7cf92d1950a70 100644 --- a/docs/src/extensions/icon-themes.md +++ b/docs/src/extensions/icon-themes.md @@ -17,7 +17,7 @@ Each icon theme file should adhere to the JSON schema specified at [`https://zed Here is an example of the structure of an icon theme: -```json +```json [icon-theme] { "$schema": "https://zed.dev/schema/icon_themes/v0.3.0.json", "name": "My Icon Theme", @@ -34,8 +34,8 @@ Here is an example of the structure of an icon theme: "stylesheets": { "collapsed": "./icons/folder-stylesheets.svg", "expanded": "./icons/folder-stylesheets-open.svg" - }, - } + } + }, "chevron_icons": { "collapsed": "./icons/chevron-right.svg", "expanded": "./icons/chevron-down.svg" diff --git a/docs/src/getting-started.md b/docs/src/getting-started.md index 2dd04c70a944a06fe15854d98826b174b95bb94b..6bd34ad5cc387bc4fb208334f206aabc047d37b8 100644 --- a/docs/src/getting-started.md +++ b/docs/src/getting-started.md @@ -20,6 +20,10 @@ As well as Zed preview: brew install --cask zed@preview ``` +### Windows + +Get the latest stable builds via [the download page](https://zed.dev/download). If you want to download our preview build, you can find it on its [releases page](https://zed.dev/releases/preview). After the first manual installation, Zed will periodically check for install updates. + ### Linux For most Linux users, the easiest way to install Zed is through our installation script: diff --git a/docs/src/git.md b/docs/src/git.md index f40040bec83226b19c17d9efdaf9241032dca7a5..d56de998c9d1438a1bd160d7e577b146a4ea4da3 100644 --- a/docs/src/git.md +++ b/docs/src/git.md @@ -17,6 +17,7 @@ Here's an overview of all currently supported features: - Git status in the Project Panel - Branch creating and switching - Git blame viewing +- Git stash pop, apply, drop and view ## Git Panel @@ -74,6 +75,41 @@ Zed offers two commit textareas: As soon as you commit in Zed, in the Git Panel, you'll see a bar right under the commit textarea, which will show the recently submitted commit. In there, you can use the "Uncommit" button, which performs the `git reset HEADˆ--soft` command. +## Stashing + +Git stash allows you to temporarily save your uncommitted changes and revert your working directory to a clean state. This is particularly useful when you need to quickly switch branches or pull updates without committing incomplete work. + +### Creating Stashes + +To stash all your current changes, use the {#action git::StashAll} action. This will save both staged and unstaged changes to a new stash entry and clean your working directory. + +### Managing Stashes + +Zed provides a comprehensive stash picker accessible via {#action git::ViewStash}. From the stash picker, you can: + +- **View stash list**: Browse all your saved stashes with their descriptions and timestamps +- **Open diffs**: See exactly what changes are stored in each stash +- **Apply stashes**: Apply stash changes to your working directory while keeping the stash entry +- **Pop stashes**: Apply stash changes and remove the stash entry from the list +- **Drop stashes**: Delete unwanted stash entries without applying them + +### Quick Stash Operations + +For faster workflows, Zed provides direct actions to work with the most recent stash: + +- **Apply latest stash**: Use {#action git::StashApply} to apply the most recent stash without removing it +- **Pop latest stash**: Use {#action git::StashPop} to apply and remove the most recent stash + +### Stash Diff View + +When viewing a specific stash in the diff view, you have additional options available through the interface: + +- Apply the current stash to your working directory +- Pop the current stash (apply and remove) +- Remove the stash without applying changes + +To open the stash diff view, select a stash from the stash picker and use the {#action stash_picker::ShowStashItem} ({#kb stash_picker::ShowStashItem}) keybinding. + ## AI Support in Git Zed currently supports LLM-powered commit message generation. @@ -83,7 +119,7 @@ You can ask AI to generate a commit message by focusing on the message editor wi You can specify your preferred model to use by providing a `commit_message_model` agent setting. See [Feature-specific models](./ai/agent-settings.md#feature-specific-models) for more information. -```json +```json [settings] { "agent": { "version": "2", @@ -151,6 +187,10 @@ When viewing files with changes, Zed displays diff hunks that can be expanded or | {#action git::Switch} | {#kb git::Switch} | | {#action git::CheckoutBranch} | {#kb git::CheckoutBranch} | | {#action git::Blame} | {#kb git::Blame} | +| {#action git::StashAll} | {#kb git::StashAll} | +| {#action git::StashPop} | {#kb git::StashPop} | +| {#action git::StashApply} | {#kb git::StashApply} | +| {#action git::ViewStash} | {#kb git::ViewStash} | | {#action editor::ToggleGitBlameInline} | {#kb editor::ToggleGitBlameInline} | | {#action editor::ExpandAllDiffHunks} | {#kb editor::ExpandAllDiffHunks} | | {#action editor::ToggleSelectedDiffHunks} | {#kb editor::ToggleSelectedDiffHunks} | diff --git a/docs/src/globs.md b/docs/src/globs.md index 4039d7c4556e24d0fb3ca30eafe8be05d13875bc..60873e6965493c0c089a329e89fdb6462999739f 100644 --- a/docs/src/globs.md +++ b/docs/src/globs.md @@ -57,7 +57,7 @@ When using the "Include" / "Exclude" filters on a Project Search each glob is wr Alternatively, if in your Zed settings you wanted a [`file_types`](./configuring-zed.md#file-types) override which only applied to a certain directory you must explicitly include the wildcard globs. For example, if you had a directory of template files with the `html` extension that you wanted to recognize as Jinja2 template you could use the following: -```json +```json [settings] { "file_types": { "C++": ["[cC]"], diff --git a/docs/src/icon-themes.md b/docs/src/icon-themes.md index 70dd1267aca0630050292dbea61baeabd13b0cf4..b3c449889cbbc53da216ea668cd309e9ae1bfe5b 100644 --- a/docs/src/icon-themes.md +++ b/docs/src/icon-themes.md @@ -18,7 +18,7 @@ Your selected icon theme is stored in your settings file. You can open your sett Just like with themes, Zed allows for configuring different icon themes for light and dark mode. You can set the mode to `"light"` or `"dark"` to ignore the current system mode. -```json +```json [settings] { "icon_theme": { "mode": "system", diff --git a/docs/src/key-bindings.md b/docs/src/key-bindings.md index 306396c73cfc2fab555ce85b223c56a56dd10fa0..1ce148bcf118bce61e113074ad321f27344aec04 100644 --- a/docs/src/key-bindings.md +++ b/docs/src/key-bindings.md @@ -23,8 +23,12 @@ For more information, see the documentation for [Vim mode](./vim.md) and [Helix ## User keymaps -Zed reads your keymap from `~/.config/zed/keymap.json`, which you can open with the {#action zed::OpenKeymap} action from the command palette. -You can also edit your keymap through the Zed Keymap Editor, accessible via the {#action zed::OpenKeymap} action or the {#kb zed::OpenKeymap} keybinding. +Where Zed looks for your keymap: + +- macOS/Linux: `~/.config/zed/keymap.json` +- Windows: `~\AppData\Roaming\Zed/keymap.json` + +You can open the keymap with the {#action zed::OpenKeymapFile} action from the command palette, or edit it in Zed's Keymap Editor, accessible via the {#action zed::OpenKeymap} action or the {#kb zed::OpenKeymap} keybinding. The `keymap.json` file contains a JSON array of objects with `"bindings"`. If no `"context"` is set, the bindings are always active. If it is set, the binding is only active when the [context matches](#contexts). @@ -34,7 +38,7 @@ If you are using a non-QWERTY, Latin-character keyboard, you may want to set `us For example: -```json +```json [keymap] [ { "bindings": { @@ -51,7 +55,11 @@ For example: ] ``` -You can see all of Zed's default bindings in the default keymaps for [macOS](https://github.com/zed-industries/zed/blob/main/assets/keymaps/default-macos.json) or [Linux](https://github.com/zed-industries/zed/blob/main/assets/keymaps/default-linux.json). +You can see all of Zed's default bindings in the default keymaps for: + +- [macOS](https://github.com/zed-industries/zed/blob/main/assets/keymaps/default-macos.json) +- [Windows](https://github.com/zed-industries/zed/blob/main/assets/keymaps/default-windows.json) +- [Linux](https://github.com/zed-industries/zed/blob/main/assets/keymaps/default-linux.json). If you want to debug problems with custom keymaps, you can use `dev: Open Key Context View` from the command palette. Please file [an issue](https://github.com/zed-industries/zed) if you run into something you think should work but isn't. @@ -72,7 +80,7 @@ The keys can be any single Unicode codepoint that your keyboard generates (for e A few examples: -```json +```json [settings] "bindings": { "cmd-k cmd-s": "zed::OpenKeymap", // matches ⌘-k then ⌘-s "space e": "editor::Complete", // type space then e @@ -130,7 +138,7 @@ If you're using Vim mode, we have information on how [vim modes influence the co ### Actions Almost all of Zed's functionality is exposed as actions. -Although there is no explicitly documented list, you can find most of them by searching in the command palette, by looking in the default keymaps for [macOS](https://github.com/zed-industries/zed/blob/main/assets/keymaps/default-macos.json) or [Linux](https://github.com/zed-industries/zed/blob/main/assets/keymaps/default-linux.json), or by using Zed's autocomplete in your keymap file. +Although there is no explicitly documented list, you can find most of them by searching in the command palette, by looking in the default keymaps for [macOS](https://github.com/zed-industries/zed/blob/main/assets/keymaps/default-macos.json), [Windows](https://github.com/zed-industries/zed/blob/main/assets/keymaps/default-windows.json) or [Linux](https://github.com/zed-industries/zed/blob/main/assets/keymaps/default-linux.json), or by using Zed's autocomplete in your keymap file. Most actions do not require any arguments, and so you can bind them as strings: `"ctrl-a": "language_selector::Toggle"`. Some require a single argument and must be bound as an array: `"cmd-1": ["workspace::ActivatePane", 0]`. Some actions require multiple arguments and are bound as an array of a string and an object: `"ctrl-a": ["pane::DeploySearch", { "replace_enabled": true }]`. @@ -161,7 +169,7 @@ On keyboards that support extended Latin alphabets (French AZERTY, German QWERTZ If you are defining shortcuts in your personal keymap, you can opt into the key equivalent mapping by setting `use_key_equivalents` to `true` in your keymap: -```json +```json [keymap] [ { "use_key_equivalents": true, @@ -187,7 +195,7 @@ If you'd like a given binding to do nothing in a given context, you can use want to disable it, or if you want to type the character that would be typed by the sequence, or if you want to disable multikey bindings starting with that key. -```json +```json [keymap] [ { "context": "Workspace", @@ -202,7 +210,7 @@ A `null` binding follows the same precedence rules as normal actions, so it disa This is useful for preventing Zed from falling back to a default key binding when the action you specified is conditional and propagates. For example, `buffer_search::DeployReplace` only triggers when the search bar is not in view. If the search bar is in view, it would propagate and trigger the default action set for that key binding, such as opening the right dock. To prevent this from happening: -```json +```json [keymap] [ { "context": "Workspace", @@ -223,7 +231,7 @@ This is useful for preventing Zed from falling back to a default key binding whe A common request is to be able to map from a single keystroke to a sequence. You can do this with the `workspace::SendKeystrokes` action. -```json +```json [keymap] [ { "bindings": { @@ -262,7 +270,7 @@ If you're on Linux or Windows, you might find yourself wanting to forward key co For example, `ctrl-n` creates a new tab in Zed on Linux. If you want to send `ctrl-n` to the built-in terminal when it's focused, add the following to your keymap: -```json +```json [settings] { "context": "Terminal", "bindings": { diff --git a/docs/src/languages/ansible.md b/docs/src/languages/ansible.md index 16b6cef5abffd59072140c0be19c317160f8c582..bce25ddc6c35f83517bbe26d27dc3d6b1bfa524e 100644 --- a/docs/src/languages/ansible.md +++ b/docs/src/languages/ansible.md @@ -11,7 +11,7 @@ Support for Ansible in Zed is provided via a community-maintained [Ansible exten To avoid mishandling non-Ansible YAML files, the Ansible Language is not associated with any file extensions by default. To change this behavior you can add a `"file_types"` section to Zed settings inside your project (`.zed/settings.json`) or your Zed user settings (`~/.config/zed/settings.json`) to match your folder/naming conventions. For example: -```json +```json [settings] "file_types": { "Ansible": [ "**.ansible.yml", @@ -50,7 +50,7 @@ If your inventory file is in the YAML format, you can either: - Or configure the yaml language server settings to set this schema for all your inventory files, that match your inventory pattern, under your Zed settings ([ref](https://zed.dev/docs/languages/yaml)): -```json +```json [settings] "lsp": { "yaml-language-server": { "settings": { @@ -71,7 +71,7 @@ If your inventory file is in the YAML format, you can either: By default, the following default config is passed to the Ansible language server. It conveniently mirrors the defaults set by [nvim-lspconfig](https://github.com/neovim/nvim-lspconfig/blob/03bc581e05e81d33808b42b2d7e76d70adb3b595/lua/lspconfig/configs/ansiblels.lua) for the Ansible language server: -```json +```json [settings] { "ansible": { "ansible": { @@ -99,7 +99,7 @@ By default, the following default config is passed to the Ansible language serve When desired, any of the above default settings can be overridden under the `"lsp"` section of your Zed settings file. For example: -```json +```json [settings] "lsp": { // Note, the Zed Ansible extension prefixes all settings with `ansible` // so instead of using `ansible.ansible.path` use `ansible.path`. diff --git a/docs/src/languages/biome.md b/docs/src/languages/biome.md index 4632d56d82aa3416e1bbe124b7e7900dc3035533..f0756fe5badd6080a2437dcdf8311427d6b8f0bb 100644 --- a/docs/src/languages/biome.md +++ b/docs/src/languages/biome.md @@ -24,7 +24,7 @@ The Biome extension includes support for the following languages: By default, the `biome.json` file is required to be in the root of the workspace. -```json +```json [settings] { "$schema": "https://biomejs.dev/schemas/1.8.3/schema.json" } diff --git a/docs/src/languages/c.md b/docs/src/languages/c.md index 849ce6a662e291659d27d05deea3aa95f51b0161..7f6e0ba6b2ed24dd958c02a7606e2a569d08f8f1 100644 --- a/docs/src/languages/c.md +++ b/docs/src/languages/c.md @@ -17,7 +17,7 @@ CompileFlags: By default clang and gcc will recognize `*.C` and `*.H` (uppercase extensions) as C++ and not C and so Zed too follows this convention. If you are working with a C-only project (perhaps one with legacy uppercase pathing like `FILENAME.C`) you can override this behavior by adding this to your settings: -```json +```json [settings] { "file_types": { "C": ["C", "H"] @@ -40,7 +40,7 @@ See [Clang-Format Style Options](https://clang.llvm.org/docs/ClangFormatStyleOpt You can trigger formatting via {#kb editor::Format} or the `editor: format` action from the command palette or by adding `format_on_save` to your Zed settings: -```json +```json [settings] "languages": { "C": { "format_on_save": "on", @@ -67,9 +67,12 @@ After building your project, CMake will generate the `compile_commands.json` fil You can use CodeLLDB or GDB to debug native binaries. (Make sure that your build process passes `-g` to the C compiler, so that debug information is included in the resulting binary.) See below for examples of debug configurations that you can add to `.zed/debug.json`. +- [CodeLLDB configuration documentation](https://github.com/vadimcn/codelldb/blob/master/MANUAL.md#starting-a-new-debug-session) +- [GDB configuration documentation](https://sourceware.org/gdb/current/onlinedocs/gdb.html/Debugger-Adapter-Protocol.html) + ### Build and Debug Binary -```json +```json [debug] [ { "label": "Debug native binary", diff --git a/docs/src/languages/cpp.md b/docs/src/languages/cpp.md index fe2eb9c1f911bc2457862d2d94d6d489cb1b3d49..36cdc7a9580d2de41a6eb7063d694d54c7caffa4 100644 --- a/docs/src/languages/cpp.md +++ b/docs/src/languages/cpp.md @@ -13,7 +13,7 @@ By default, Zed will try to find a `clangd` in your `$PATH` and try to use that. If you want to install a pre-release `clangd` version instead you can instruct Zed to do so by setting `pre_release` to `true` in your `settings.json`: -```json +```json [settings] { "lsp": { "clangd": { @@ -27,7 +27,7 @@ If you want to install a pre-release `clangd` version instead you can instruct Z If you want to disable Zed looking for a `clangd` binary, you can set `ignore_system_version` to `true` in your `settings.json`: -```json +```json [settings] { "lsp": { "clangd": { @@ -41,7 +41,7 @@ If you want to disable Zed looking for a `clangd` binary, you can set `ignore_sy If you want to use a binary in a custom location, you can specify a `path` and optional `arguments`: -```json +```json [settings] { "lsp": { "clangd": { @@ -60,7 +60,7 @@ This `"path"` has to be an absolute path. You can pass any number of arguments to clangd. To see a full set of available options, run `clangd --help` from the command line. For example with `--function-arg-placeholders=0` completions contain only parentheses for function calls, while the default (`--function-arg-placeholders=1`) completions also contain placeholders for method parameters. -```json +```json [settings] { "lsp": { "clangd": { @@ -93,7 +93,7 @@ See [Clang-Format Style Options](https://clang.llvm.org/docs/ClangFormatStyleOpt You can trigger formatting via {#kb editor::Format} or the `editor: format` action from the command palette or by adding `format_on_save` to your Zed settings: -```json +```json [settings] "languages": { "C++": { "format_on_save": "on", @@ -135,9 +135,13 @@ After building your project, CMake will generate the `compile_commands.json` fil You can use CodeLLDB or GDB to debug native binaries. (Make sure that your build process passes `-g` to the C++ compiler, so that debug information is included in the resulting binary.) See below for examples of debug configurations that you can add to `.zed/debug.json`. +- [CodeLLDB configuration documentation](https://github.com/vadimcn/codelldb/blob/master/MANUAL.md#starting-a-new-debug-session) +- [GDB configuration documentation](https://sourceware.org/gdb/current/onlinedocs/gdb.html/Debugger-Adapter-Protocol.html) + - GDB needs to be at least v14.1 + ### Build and Debug Binary -```json +```json [debug] [ { "label": "Debug native binary", diff --git a/docs/src/languages/csharp.md b/docs/src/languages/csharp.md index b422e0941b5dc8cd67028c96eaad0d6249ab45c3..e7a702c19053219c37c56887954c393396e7abca 100644 --- a/docs/src/languages/csharp.md +++ b/docs/src/languages/csharp.md @@ -11,7 +11,7 @@ C# support is available through the [C# extension](https://github.com/zed-extens The `OmniSharp` binary can be configured in a Zed settings file with: -```json +```json [settings] { "lsp": { "omnisharp": { diff --git a/docs/src/languages/dart.md b/docs/src/languages/dart.md index 14760a46ad9e7cadeb3649050c5c953871782baa..20f8a1d23018ada2e0b4779447eebbed119299b8 100644 --- a/docs/src/languages/dart.md +++ b/docs/src/languages/dart.md @@ -22,7 +22,7 @@ dart --version If you would like to use a specific dart binary or use dart via FVM you can specify the `dart` binary in your Zed settings.jsons file: -```json +```json [settings] { "lsp": { "dart": { @@ -39,7 +39,7 @@ If you would like to use a specific dart binary or use dart via FVM you can spec Dart by-default uses a very conservative maximum line length (80). If you would like the dart LSP to permit a longer line length when auto-formatting, add the following to your Zed settings.json: -```json +```json [settings] { "lsp": { "dart": { diff --git a/docs/src/languages/deno.md b/docs/src/languages/deno.md index b2ef5a3565002047819110a253be292b5c592a93..a4192257765d6aa131232ff8a80a3af452a38d57 100644 --- a/docs/src/languages/deno.md +++ b/docs/src/languages/deno.md @@ -8,7 +8,7 @@ Deno support is available through the [Deno extension](https://github.com/zed-ex To use the Deno Language Server with TypeScript and TSX files, you will likely wish to disable the default language servers and enable deno by adding the following to your `settings.json`: -```json +```json [settings] { "lsp": { "deno": { @@ -61,7 +61,7 @@ TBD: Deno TypeScript REPL instructions [docs/repl#typescript-deno](../repl.md#ty To get completions for `deno.json` or `package.json` you can add the following to your `settings.json`: (More info here https://zed.dev/docs/languages/json) -```json +```json [settings] "lsp": { "json-language-server": { "settings": { @@ -69,7 +69,8 @@ To get completions for `deno.json` or `package.json` you can add the following t "schemas": [ { "fileMatch": [ - "deno.json" + "deno.json", + "deno.jsonc" ], "url": "https://raw.githubusercontent.com/denoland/deno/refs/heads/main/cli/schemas/config-file.v1.json" }, @@ -90,7 +91,7 @@ To get completions for `deno.json` or `package.json` you can add the following t To debug deno programs, add this to `.zed/debug.json` -```json +```json [debug] [ { "adapter": "JavaScript", @@ -110,7 +111,7 @@ To debug deno programs, add this to `.zed/debug.json` To run deno tasks like tests from the ui, add this to `.zed/tasks.json` -```json +```json [tasks] [ { "label": "deno test", diff --git a/docs/src/languages/diff.md b/docs/src/languages/diff.md index 3f8162962260a6c061e5d900d92e8b4b7cee1608..a089b975e7b420cf195a66dc114d267b6dd07a04 100644 --- a/docs/src/languages/diff.md +++ b/docs/src/languages/diff.md @@ -10,7 +10,7 @@ Zed will not attempt to format diff files and has [`remove_trailing_whitespace_o Zed will automatically recognize files with `patch` and `diff` extensions as Diff files. To recognize other extensions, add them to `file_types` in your Zed settings.json: -```json +```json [settings] "file_types": { "Diff": ["dif"] }, diff --git a/docs/src/languages/elixir.md b/docs/src/languages/elixir.md index b09653b7ff6972a0c9a92d496648302efa7db593..3df116492ae097f30e7da041a2643e085570f61c 100644 --- a/docs/src/languages/elixir.md +++ b/docs/src/languages/elixir.md @@ -21,7 +21,7 @@ The Elixir extension offers language server support for `expert`, `elixir-ls`, ` To switch to `expert`, add the following to your `settings.json`: -```json +```json [settings] "languages": { "Elixir": { "language_servers": ["expert", "!elixir-ls", "!next-ls", "!lexical", "..."] @@ -36,7 +36,7 @@ To switch to `expert`, add the following to your `settings.json`: To switch to `next-ls`, add the following to your `settings.json`: -```json +```json [settings] "languages": { "Elixir": { "language_servers": ["next-ls", "!expert", "!elixir-ls", "!lexical", "..."] @@ -51,7 +51,7 @@ To switch to `next-ls`, add the following to your `settings.json`: To switch to `lexical`, add the following to your `settings.json`: -```json +```json [settings] "languages": { "Elixir": { "language_servers": ["lexical", "!expert", "!elixir-ls", "!next-ls", "..."] @@ -84,11 +84,12 @@ brew install elixir-ls If you prefer to format your code with [Mix](https://hexdocs.pm/mix/Mix.html), use the following snippet in your `settings.json` file to configure it as an external formatter. Formatting will occur on file save. -```json +```json [settings] { "languages": { "Elixir": { - "format_on_save": { + "format_on_save": "on", + "formatter": { "external": { "command": "mix", "arguments": ["format", "--stdin-filename", "{buffer_path}", "-"] @@ -105,7 +106,7 @@ You can pass additional elixir-ls workspace configuration options via lsp settin The following example disables dialyzer: -```json +```json [settings] "lsp": { "elixir-ls": { "settings": { diff --git a/docs/src/languages/elm.md b/docs/src/languages/elm.md index ae9a4c1ffc53a4bd203c5f7dacb8e0bdce754606..3a18af05bb4c4008057d3fd813808c1cc3b61b70 100644 --- a/docs/src/languages/elm.md +++ b/docs/src/languages/elm.md @@ -23,7 +23,7 @@ Zed support for Elm requires installation of `elm`, `elm-format`, and `elm-revie Elm language server can be configured in your `settings.json`, e.g.: -```json +```json [settings] { "lsp": { "elm-language-server": { diff --git a/docs/src/languages/erlang.md b/docs/src/languages/erlang.md index e82e6d48c36ed627cda9031272754802380e967f..b3850fc55ee522d3bd2b8f5dda1af13b83135eb2 100644 --- a/docs/src/languages/erlang.md +++ b/docs/src/languages/erlang.md @@ -15,7 +15,7 @@ The Erlang extension offers language server support for `erlang_ls` and `erlang- To switch to `erlang-language-platform`, add the following to your `settings.json`: -```json +```json [settings] { "languages": { "Erlang": { diff --git a/docs/src/languages/fish.md b/docs/src/languages/fish.md index ad2148d807baeb73241206ab5538ddaffdc789ce..6c07d444b849ec92d73bf702e64e6f6323754e95 100644 --- a/docs/src/languages/fish.md +++ b/docs/src/languages/fish.md @@ -18,7 +18,7 @@ fish_indent --version 2. Configure Zed to automatically format fish code with `fish_indent`: -```json +```json [settings] "languages": { "Fish": { "formatter": { diff --git a/docs/src/languages/go.md b/docs/src/languages/go.md index 0a12616b1c7dda9eb416717aa16bfeb5f50748d4..3c4e505f8f1e64f91fc6945b2d614895a106ea20 100644 --- a/docs/src/languages/go.md +++ b/docs/src/languages/go.md @@ -41,7 +41,7 @@ If `gopls` is not found you will likely need to add `export PATH="$PATH:$HOME/go Zed sets the following initialization options for inlay hints: -```json +```json [settings] "hints": { "assignVariableTypes": true, "compositeLiteralFields": true, @@ -57,12 +57,12 @@ to make the language server send back inlay hints when Zed has them enabled in t Use -```json +```json [settings] "lsp": { "gopls": { "initialization_options": { "hints": { - .... + // .... } } } @@ -75,15 +75,17 @@ See [gopls inlayHints documentation](https://github.com/golang/tools/blob/master ## Debugging -Zed supports zero-configuration debugging of Go tests and entry points (`func main`). Run {#action debugger::Start} ({#kb debugger::Start}) to see a contextual list of these preconfigured debug tasks. +Zed supports zero-configuration debugging of Go tests and entry points (`func main`) using Delve. Run {#action debugger::Start} ({#kb debugger::Start}) to see a contextual list of these preconfigured debug tasks. For more control, you can add debug configurations to `.zed/debug.json`. See below for examples. +- [Delve configuration documentation](https://github.com/go-delve/delve/blob/master/Documentation/api/dap/README.md#launch-and-attach-configurations) + ### Debug Go Packages To debug a specific package, you can do so by setting the Delve mode to "debug". In this case "program" should be set to the package name. -```json +```json [debug] [ { "label": "Go (Delve)", @@ -110,7 +112,7 @@ To debug a specific package, you can do so by setting the Delve mode to "debug". To debug the tests for a package, set the Delve mode to "test". The "program" is still the package name, and you can use the "buildFlags" to do things like set tags, and the "args" to set args on the test binary. (See `go help testflags` for more information on doing that). -```json +```json [debug] [ { "label": "Run integration tests", @@ -130,7 +132,7 @@ The "program" is still the package name, and you can use the "buildFlags" to do If you need to build your application with a specific command, you can use the "exec" mode of Delve. In this case "program" should point to an executable, and the "build" command should build that. -```json +```json [debug] [ { "label": "Debug Prebuilt Unit Tests", @@ -160,7 +162,7 @@ and the "build" command should build that. You might find yourself needing to connect to an existing instance of Delve that's not necessarily running on your machine; in such case, you can use `tcp_arguments` to instrument Zed's connection to Delve. -```json +```json [debug] [ { "adapter": "Delve", @@ -172,7 +174,7 @@ You might find yourself needing to connect to an existing instance of Delve that "request": "launch", "mode": "exec", "stopOnEntry": false, - "tcp_connection": { "host": "123.456.789.012", "port": 53412 } + "tcp_connection": { "host": "127.0.0.1", "port": 53412 } } ] ``` diff --git a/docs/src/languages/haskell.md b/docs/src/languages/haskell.md index fec9142a5f8e8a4414452b4109ae22c003169646..901bd9ded1954931df37ea3f912573bc849317db 100644 --- a/docs/src/languages/haskell.md +++ b/docs/src/languages/haskell.md @@ -19,7 +19,7 @@ which haskell-language-server-wrapper If you need to configure haskell-language-server (hls) you can add configuration options to your Zed settings.json: -```json +```json [settings] { "lsp": { "hls": { @@ -37,7 +37,7 @@ See the official [configuring haskell-language-server](https://haskell-language- If you would like to use a specific hls binary, or perhaps use [static-ls](https://github.com/josephsumabat/static-ls) as a drop-in replacement instead, you can specify the binary path and arguments: -```json +```json [settings] { "lsp": { "hls": { diff --git a/docs/src/languages/helm.md b/docs/src/languages/helm.md index a6e3c8fa49b9056fb9d053790a3f3f656087d366..f8a6f5c5fa736e0e4de3d4d9bfc33f28bec95dbc 100644 --- a/docs/src/languages/helm.md +++ b/docs/src/languages/helm.md @@ -9,7 +9,7 @@ Support for Helm in Zed is provided by the community-maintained [Helm extension] Enable Helm language for Helm files by editing your `.zed/settings.json` and adding: -```json +```json [settings] "file_types": { "Helm": [ "**/templates/**/*.tpl", diff --git a/docs/src/languages/html.md b/docs/src/languages/html.md index 3afa34068d9f9902595e6835951d07fe31de11ca..274083adee504f68852895b3e66c4f7b78ecdfff 100644 --- a/docs/src/languages/html.md +++ b/docs/src/languages/html.md @@ -7,7 +7,7 @@ HTML support is available through the [HTML extension](https://github.com/zed-in This extension is automatically installed, but if you do not want to use it, you can add the following to your settings: -```json +```json [settings] { "auto_install_extensions": { "html": false @@ -21,7 +21,7 @@ By default Zed uses [Prettier](https://prettier.io/) for formatting HTML. You can disable `format_on_save` by adding the following to your Zed `settings.json`: -```json +```json [settings] "languages": { "HTML": { "format_on_save": "off", @@ -35,7 +35,7 @@ You can still trigger formatting manually with {#kb editor::Format} or by openin To use the `vscode-html-language-server` language server auto-formatting instead of Prettier, add the following to your Zed settings: -```json +```json [settings] "languages": { "HTML": { "formatter": "language_server", @@ -45,7 +45,7 @@ To use the `vscode-html-language-server` language server auto-formatting instead You can customize various [formatting options](https://code.visualstudio.com/docs/languages/html#_formatting) for `vscode-html-language-server` via your Zed `settings.json`: -```json +```json [settings] "lsp": { "vscode-html-language-server": { "settings": { diff --git a/docs/src/languages/java.md b/docs/src/languages/java.md index 31177676854884be804838ddf72f937fc9376f71..966f2352b3955e9ed2574eab34a717dc14832d3d 100644 --- a/docs/src/languages/java.md +++ b/docs/src/languages/java.md @@ -31,7 +31,7 @@ You can add these customizations to your Zed Settings by launching {#action zed: ### Zed Java Settings -```json +```json [settings] { "lsp": { "jdtls": { @@ -47,7 +47,7 @@ You can add these customizations to your Zed Settings by launching {#action zed: By default, zed will look in your `PATH` for a `jdtls` binary, if you wish to specify an explicit binary you can do so via settings: -```json +```json [settings] "lsp": { "jdtls": { "binary": { @@ -64,7 +64,7 @@ By default, zed will look in your `PATH` for a `jdtls` binary, if you wish to sp There are also many more options you can pass directly to the language server, for example: -```json +```json [settings] { "lsp": { "jdtls": { diff --git a/docs/src/languages/javascript.md b/docs/src/languages/javascript.md index c71071a9b37c74c2226796083af3ae557751da8e..45f440267ec01880437fc16e788c6b1b715efd82 100644 --- a/docs/src/languages/javascript.md +++ b/docs/src/languages/javascript.md @@ -15,7 +15,7 @@ See [the configuration docs](../configuring-zed.md) for more information. For example, if you have Prettier installed and on your `PATH`, you can use it to format JavaScript files by adding the following to your `settings.json`: -```json +```json [settings] { "languages": { "JavaScript": { @@ -34,7 +34,7 @@ For example, if you have Prettier installed and on your `PATH`, you can use it t Zed supports JSX syntax highlighting out of the box. -In JSX strings, the [`tailwindcss-language-server`](./tailwindcss.md) is used provide autocompletion for Tailwind CSS classes. +In JSX strings, the [`tailwindcss-language-server`](./tailwindcss.md) is used to provide autocompletion for Tailwind CSS classes. ## JSDoc @@ -45,7 +45,7 @@ Zed uses [tree-sitter/tree-sitter-jsdoc](https://github.com/tree-sitter/tree-sit You can configure Zed to format code using `eslint --fix` by running the ESLint code action when formatting: -```json +```json [settings] { "languages": { "JavaScript": { @@ -59,7 +59,7 @@ You can configure Zed to format code using `eslint --fix` by running the ESLint You can also only execute a single ESLint rule when using `fixAll`: -```json +```json [settings] { "languages": { "JavaScript": { @@ -88,14 +88,12 @@ You can also only execute a single ESLint rule when using `fixAll`: If you **only** want to run ESLint on save, you can configure code actions as the formatter: -```json +```json [settings] { "languages": { "JavaScript": { - "formatter": { - "code_actions": { - "source.fixAll.eslint": true - } + "code_actions_on_format": { + "source.fixAll.eslint": true } } } @@ -106,7 +104,7 @@ the formatter: You can configure ESLint's `nodePath` setting: -```json +```json [settings] { "lsp": { "eslint": { @@ -124,7 +122,7 @@ You can configure ESLint's `problems` setting. For example, here's how to set `problems.shortenToSingleLine`: -```json +```json [settings] { "lsp": { "eslint": { @@ -142,7 +140,7 @@ For example, here's how to set `problems.shortenToSingleLine`: You can configure ESLint's `rulesCustomizations` setting: -```json +```json [settings] { "lsp": { "eslint": { @@ -161,7 +159,7 @@ You can configure ESLint's `rulesCustomizations` setting: You can configure ESLint's `workingDirectory` setting: -```json +```json [settings] { "lsp": { "eslint": { @@ -177,21 +175,31 @@ You can configure ESLint's `workingDirectory` setting: ## Debugging -Zed supports debugging JavaScript code out of the box. +Zed supports debugging JavaScript code out of the box with `vscode-js-debug`. The following can be debugged without writing additional configuration: - Tasks from `package.json` -- Tests written using several popular frameworks (Jest, Mocha, Vitest, Jasmine) +- Tests written using several popular frameworks (Jest, Mocha, Vitest, Jasmine, Bun, Node) Run {#action debugger::Start} ({#kb debugger::Start}) to see a contextual list of these predefined debug tasks. +> **Note:** Bun test is automatically detected when `@types/bun` is present in `package.json`. +> +> **Note:** Node test is automatically detected when `@types/node` is present in `package.json` (requires Node.js 20+). + As for all languages, configurations from `.vscode/launch.json` are also available for debugging in Zed. If your use-case isn't covered by any of these, you can take full control by adding debug configurations to `.zed/debug.json`. See below for example configurations. -### Debug the current file +### Configuring JavaScript debug tasks -```json +JavaScript debugging is more complicated than other languages because there are two different environments: Node.js and the browser. `vscode-js-debug` exposes a `type` field, that you can use to specify the environment, either `node` or `chrome`. + +- [vscode-js-debug configuration documentation](https://github.com/microsoft/vscode-js-debug/blob/main/OPTIONS.md) + +### Debug the current file with Node + +```json [debug] [ { "adapter": "JavaScript", @@ -204,11 +212,9 @@ If your use-case isn't covered by any of these, you can take full control by add ] ``` -This implicitly runs the current file using `node`. - ### Launch a web app in Chrome -```json +```json [debug] [ { "adapter": "JavaScript", diff --git a/docs/src/languages/json.md b/docs/src/languages/json.md index 94f56999d51a3e2395f481be67d267172ba07075..33acdb172e40f0d94a3517dadb70efb37a50e635 100644 --- a/docs/src/languages/json.md +++ b/docs/src/languages/json.md @@ -16,7 +16,7 @@ If you use files with the `*.jsonc` extension when using `Format Document` or ha To workaround this behavior you can add the following to your `.prettierrc` configuration file: -```json +```json [settings] { "overrides": [ { @@ -40,7 +40,7 @@ To specify a schema inline with your JSON files, add a `$schema` top level key l For example to for a `.luarc.json` for use with [lua-language-server](https://github.com/LuaLS/lua-language-server/): -```json +```json [settings] { "$schema": "https://raw.githubusercontent.com/sumneko/vscode-lua/master/setting/schema.json", "runtime.version": "Lua 5.4" @@ -53,7 +53,7 @@ You can alternatively associate JSON Schemas with file paths by via Zed LSP sett To -```json +```json [settings] "lsp": { "json-language-server": { "settings": { diff --git a/docs/src/languages/jsonnet.md b/docs/src/languages/jsonnet.md index df4e39b98dc87deff5c56c0a51bf120c81055aa4..405087766b33d3cfe126bfd98c04cff9989cb857 100644 --- a/docs/src/languages/jsonnet.md +++ b/docs/src/languages/jsonnet.md @@ -11,7 +11,7 @@ Workspace configuration options can be passed to the language server via the `ls The following example enables support for resolving [tanka](https://tanka.dev) import paths in `jsonnet-language-server`: -```json +```json [settings] { "lsp": { "jsonnet-language-server": { diff --git a/docs/src/languages/kotlin.md b/docs/src/languages/kotlin.md index 60d66f277eb62c2bdf9905687045abbca4db20b9..a81643ab7d5a20b8985a6bfb3e23d214077c5d6b 100644 --- a/docs/src/languages/kotlin.md +++ b/docs/src/languages/kotlin.md @@ -20,7 +20,7 @@ under `class Configuration` and initialization_options under `class Initializati The following example changes the JVM target from `default` (which is 1.8) to `17`: -```json +```json [settings] { "lsp": { "kotlin-language-server": { @@ -40,7 +40,7 @@ The following example changes the JVM target from `default` (which is 1.8) to To use a specific java installation, just specify the `JAVA_HOME` environment variable with: -```json +```json [settings] { "lsp": { "kotlin-language-server": { diff --git a/docs/src/languages/lua.md b/docs/src/languages/lua.md index 7e92b12b919ef2537b0fa6785a6438ef0039deda..65b709b39188753a29a50119f04b6141ad12d849 100644 --- a/docs/src/languages/lua.md +++ b/docs/src/languages/lua.md @@ -9,7 +9,7 @@ Lua support is available through the [Lua extension](https://github.com/zed-exte To configure LuaLS you can create a `.luarc.json` file in the root of your workspace. -```json +```json [settings] { "$schema": "https://raw.githubusercontent.com/LuaLS/vscode-lua/master/setting/schema.json", "runtime.version": "Lua 5.4", @@ -55,7 +55,7 @@ cd .. && git clone https://github.com/notpeter/playdate-luacats Then in your `.luarc.json`: -```json +```json [settings] { "$schema": "https://raw.githubusercontent.com/LuaLS/vscode-lua/master/setting/schema.json", "runtime.version": "Lua 5.4", @@ -90,7 +90,7 @@ To enable [Inlay Hints](../configuring-languages.md#inlay-hints) for LuaLS in Ze 1. Add the following to your Zed settings.json: -```json +```json [settings] "languages": { "Lua": { "inlay_hints": { @@ -111,7 +111,7 @@ To enable [Inlay Hints](../configuring-languages.md#inlay-hints) for LuaLS in Ze To enable auto-formatting with your LuaLS (provided by [CppCXY/EmmyLuaCodeStyle](https://github.com/CppCXY/EmmyLuaCodeStyle)) make sure you have `"format.enable": true,` in your .luarc.json: -```json +```json [settings] { "$schema": "https://raw.githubusercontent.com/sumneko/vscode-lua/master/setting/schema.json", "format.enable": true @@ -120,7 +120,7 @@ To enable auto-formatting with your LuaLS (provided by [CppCXY/EmmyLuaCodeStyle] Then add the following to your Zed `settings.json`: -```json +```json [settings] { "languages": { "Lua": { @@ -140,7 +140,7 @@ Alternatively to use [StyLua](https://github.com/JohnnyMorganz/StyLua) for auto- 1. Install [StyLua](https://github.com/JohnnyMorganz/StyLua): `brew install stylua` or `cargo install stylua --features lua52,lua53,lua54,luau,luajit` (feel free to remove any Lua versions you don't need). 2. Add the following to your `settings.json`: -```json +```json [settings] { "languages": { "Lua": { diff --git a/docs/src/languages/luau.md b/docs/src/languages/luau.md index 58d78855a1afdb4ab782aff888ba0bbfb637a364..b99cfc86ac1ea7eb0e0f85a3091e2fc76725fed4 100644 --- a/docs/src/languages/luau.md +++ b/docs/src/languages/luau.md @@ -27,7 +27,7 @@ cargo install stylua --features lua52,lua53,lua54,luau Then add the following to your Zed `settings.json`: -```json +```json [settings] "languages": { "Luau": { "formatter": { diff --git a/docs/src/languages/markdown.md b/docs/src/languages/markdown.md index 38a2b1c43f94b91097bcd0b1dc3301427e1b9685..36ce734f7cfbcc066bb8026568209738655a6be9 100644 --- a/docs/src/languages/markdown.md +++ b/docs/src/languages/markdown.md @@ -25,7 +25,7 @@ def fib(n): Zed supports using Prettier to automatically re-format Markdown documents. You can trigger this manually via the {#action editor::Format} action or via the {#kb editor::Format} keyboard shortcut. Alternately, you can automatically format by enabling [`format_on_save`](../configuring-zed.md#format-on-save) in your settings.json: -```json +```json [settings] "languages": { "Markdown": { "format_on_save": "on" @@ -37,7 +37,7 @@ Zed supports using Prettier to automatically re-format Markdown documents. You c By default Zed will remove trailing whitespace on save. If you rely on invisible trailing whitespace being converted to `
` in Markdown files you can disable this behavior with: -```json +```json [settings] "languages": { "Markdown": { "remove_trailing_whitespace_on_save": false diff --git a/docs/src/languages/nim.md b/docs/src/languages/nim.md index 514810183cf63d5e478bacaf4b61a85fc168cb80..03c2bc0609698eee620ba80a8c82e13f53d6764d 100644 --- a/docs/src/languages/nim.md +++ b/docs/src/languages/nim.md @@ -10,7 +10,7 @@ Report issues to: [https://github.com/foxoman/zed-nim/issues](https://github.com To use [arnetheduck/nph](https://github.com/arnetheduck/nph) as a formatter, follow the [nph installation instructions](https://github.com/arnetheduck/nph?tab=readme-ov-file#installation) and add this to your Zed `settings.json`: -```json +```json [settings] "languages": { "Nim": { "formatter": { diff --git a/docs/src/languages/php.md b/docs/src/languages/php.md index 4e94c134467c5a3484ede7a2146f2f09c172e859..b2b8dffcf1b973f769d2900c21385804fbb4394f 100644 --- a/docs/src/languages/php.md +++ b/docs/src/languages/php.md @@ -13,7 +13,7 @@ The PHP extension offers both `phpactor` and `intelephense` language server supp `phpactor` is enabled by default. -## Phpactor +### Phpactor The Zed PHP Extension can install `phpactor` automatically but requires `php` to be installed and available in your path: @@ -25,13 +25,13 @@ The Zed PHP Extension can install `phpactor` automatically but requires `php` to which php ``` -## Intelephense +### Intelephense [Intelephense](https://intelephense.com/) is a [proprietary](https://github.com/bmewburn/vscode-intelephense/blob/master/LICENSE.txt#L29) language server for PHP operating under a freemium model. Certain features require purchase of a [premium license](https://intelephense.com/). To switch to `intelephense`, add the following to your `settings.json`: -```json +```json [settings] { "languages": { "PHP": { @@ -43,7 +43,7 @@ To switch to `intelephense`, add the following to your `settings.json`: To use the premium features, you can place your [licence.txt file](https://intelephense.com/faq.html) at `~/intelephense/licence.txt` inside your home directory. Alternatively, you can pass the licence key or a path to a file containing the licence key as an initialization option for the `intelephense` language server. To do this, add the following to your `settings.json`: -```json +```json [settings] { "lsp": { "intelephense": { @@ -60,3 +60,35 @@ To use the premium features, you can place your [licence.txt file](https://intel Zed supports syntax highlighting for PHPDoc comments. - Tree-sitter: [claytonrcarter/tree-sitter-phpdoc](https://github.com/claytonrcarter/tree-sitter-phpdoc) + +## Setting up Xdebug + +Zed’s PHP extension provides a debug adapter for PHP and Xdebug. The adapter name is `Xdebug`. Here a couple ways you can use it: + +```json +[ + { + "label": "PHP: Listen to Xdebug", + "adapter": "Xdebug", + "request": "launch", + "initialize_args": { + "port": 9003 + } + }, + { + "label": "PHP: Debug this test", + "adapter": "Xdebug", + "request": "launch", + "program": "vendor/bin/phpunit", + "args": ["--filter", "$ZED_SYMBOL"] + } +] +``` + +In case you run into issues: + +- ensure that you have Xdebug installed for the version of PHP you’re running +- ensure that Xdebug is configured to run in `debug` mode +- ensure that Xdebug is actually starting a debugging session +- check that the host and port matches between Xdebug and Zed +- look at the diagnostics log by using the `xdebug_info()` function in the page you’re trying to debug diff --git a/docs/src/languages/powershell.md b/docs/src/languages/powershell.md index d4d706425663c494e66ce0c18d8bf801d94cf910..195ce4ad36ddfb9ddea2e3a759bb7c4ae695d7f1 100644 --- a/docs/src/languages/powershell.md +++ b/docs/src/languages/powershell.md @@ -24,7 +24,7 @@ The Zed PowerShell extensions will attempt to download [PowerShell Editor Servic If want to use a specific binary, you can specify in your that in your Zed settings.json: -```json +```json [settings] "lsp": { "powershell-es": { "binary": { diff --git a/docs/src/languages/proto.md b/docs/src/languages/proto.md index d8feaf4c42f49d17af0135f8876a90ee01bf8679..8d9b8350faa366f3981ab945ff3ffca344fa8c70 100644 --- a/docs/src/languages/proto.md +++ b/docs/src/languages/proto.md @@ -30,7 +30,7 @@ which protols ## Configuration -```json +```json [settings] "lsp": { "protobuf-language-server": { "binary": { @@ -62,7 +62,7 @@ ColumnLimit: 120 Or you can have zed directly invoke `clang-format` by specifying it as a [formatter](https://zed.dev/docs/configuring-zed#formatter) in your settings: -```json +```json [settings] "languages": { "Proto": { "format_on_save": "on", diff --git a/docs/src/languages/python.md b/docs/src/languages/python.md index 98eca1fcc9d43747aaf45085db5ed831f8d0b25f..204258a8e736dc68ef51e338ecb0ee8ba7e0a737 100644 --- a/docs/src/languages/python.md +++ b/docs/src/languages/python.md @@ -77,7 +77,7 @@ Other built-in language servers are: These are disabled by default, but can be enabled in your settings. For example: -```json +```json [settings] { "languages": { "Python": { @@ -123,12 +123,12 @@ For example, in order to: You can use the following configuration: -```json +```json [settings] { "lsp": { "basedpyright": { "settings": { - "basedpyright.analysis": { + "analysis": { "diagnosticMode": "workspace", "inlayHints.callArgumentNames": false } @@ -144,7 +144,7 @@ basedpyright reads project-specific configuration from the `pyrightconfig.json` Here's an example `pyrightconfig.json` file that configures basedpyright to use the `strict` type-checking mode and not to issue diagnostics for any files in `__pycache__` directories: -```json +```json [settings] { "typeCheckingMode": "strict", "ignore": ["**/__pycache__"] @@ -194,7 +194,7 @@ Zed provides the [Ruff](https://docs.astral.sh/ruff/) formatter and linter for P You can disable format-on-save for Python files in your `settings.json`: -```json +```json [settings] { "languages": { "Python": { @@ -206,7 +206,7 @@ You can disable format-on-save for Python files in your `settings.json`: Alternatively, you can use the `black` command-line tool for Python formatting, while keeping Ruff enabled for linting: -```json +```json [settings] { "languages": { "Python": { @@ -228,7 +228,7 @@ Like basedpyright, Ruff reads options from both Zed's language server settings a Here's an example of using language server settings in Zed's `settings.json` to disable all Ruff lints in Zed (while still using Ruff as a formatter): -```json +```json [settings] { "lsp": { "ruff": { @@ -275,9 +275,11 @@ Zed uses `debugpy` under the hood, but no manual adapter configuration is requir For reusable setups, create a `.zed/debug.json` file in your project root. This gives you more control over how Zed runs and debugs your code. +- [debugpy configuration documentation](https://github.com/microsoft/debugpy/wiki/Debug-configuration-settings#launchattach-settings) + #### Debug Active File -```json +```json [debug] [ { "label": "Python Active File", @@ -309,7 +311,7 @@ requirements.txt …the following configuration can be used: -```json +```json [debug] [ { "label": "Python: Flask", diff --git a/docs/src/languages/r.md b/docs/src/languages/r.md index 226a6f866846da43a3f32668dd19e1efb3f657ce..4907d09c5e5daaa32d081ff0da618f5b26cd577b 100644 --- a/docs/src/languages/r.md +++ b/docs/src/languages/r.md @@ -72,7 +72,7 @@ You can configure the [R languageserver settings](https://github.com/REditorSupp For example to disable Lintr linting and suppress code snippet suggestions (both enabled by default): -```json +```json [settings] { "lsp": { "r_language_server": { diff --git a/docs/src/languages/ruby.md b/docs/src/languages/ruby.md index 87210def30ca967da9cdd1a7314961520278adf3..275929e7359f87c06a13b1620b1bf402b0669d42 100644 --- a/docs/src/languages/ruby.md +++ b/docs/src/languages/ruby.md @@ -46,7 +46,7 @@ For all supported Ruby language servers (`solargraph`, `ruby-lsp`, `rubocop`, `s You can skip step 1 and force using the system executable by setting `use_bundler` to `false` in your settings: -```json +```json [settings] { "lsp": { "": { @@ -66,7 +66,7 @@ You can skip step 1 and force using the system executable by setting `use_bundle To switch to `ruby-lsp`, add the following to your `settings.json`: -```json +```json [settings] { "languages": { "Ruby": { @@ -84,7 +84,7 @@ The Ruby extension also provides support for `rubocop` language server for offen To enable it, add the following to your `settings.json`: -```json +```json [settings] { "languages": { "Ruby": { @@ -96,7 +96,7 @@ To enable it, add the following to your `settings.json`: Or, conversely, you can disable `ruby-lsp` and enable `solargraph` and `rubocop` by adding the following to your `settings.json`: -```json +```json [settings] { "languages": { "Ruby": { @@ -110,7 +110,7 @@ Or, conversely, you can disable `ruby-lsp` and enable `solargraph` and `rubocop` Solargraph has formatting and diagnostics disabled by default. We can tell Zed to enable them by adding the following to your `settings.json`: -```json +```json [settings] { "lsp": { "solargraph": { @@ -131,7 +131,7 @@ Solargraph reads its configuration from a file called `.solargraph.yml` in the r You can pass Ruby LSP configuration to `initialization_options`, e.g. -```json +```json [settings] { "languages": { "Ruby": { @@ -152,7 +152,7 @@ You can pass Ruby LSP configuration to `initialization_options`, e.g. LSP `settings` and `initialization_options` can also be project-specific. For example to use [standardrb/standard](https://github.com/standardrb/standard) as a formatter and linter for a particular project, add this to a `.zed/settings.json` inside your project repo: -```json +```json [settings] { "lsp": { "ruby-lsp": { @@ -169,7 +169,7 @@ LSP `settings` and `initialization_options` can also be project-specific. For ex Rubocop has unsafe autocorrection disabled by default. We can tell Zed to enable it by adding the following to your `settings.json`: -```json +```json [settings] { "languages": { "Ruby": { @@ -200,7 +200,7 @@ Rubocop has unsafe autocorrection disabled by default. We can tell Zed to enable To enable Sorbet, add `\"sorbet\"` to the `language_servers` list for Ruby in your `settings.json`. You may want to disable other language servers if Sorbet is intended to be your primary LSP, or if you plan to use it alongside another LSP for specific features like type checking. -```json +```json [settings] { "languages": { "Ruby": { @@ -224,7 +224,7 @@ For all aspects of installing Sorbet, setting it up in your project, and configu To enable Steep, add `\"steep\"` to the `language_servers` list for Ruby in your `settings.json`. You may need to adjust the order or disable other LSPs depending on your desired setup. -```json +```json [settings] { "languages": { "Ruby": { @@ -250,7 +250,7 @@ It's possible to use the [Tailwind CSS Language Server](https://github.com/tailw In order to do that, you need to configure the language server so that it knows about where to look for CSS classes in Ruby/ERB files by adding the following to your `settings.json`: -```json +```json [settings] { "languages": { "Ruby": { @@ -260,10 +260,6 @@ In order to do that, you need to configure the language server so that it knows "lsp": { "tailwindcss-language-server": { "settings": { - "includeLanguages": { - "html+erb": "html", - "ruby": "html" - }, "experimental": { "classRegex": ["\\bclass:\\s*['\"]([^'\"]*)['\"]"] } @@ -294,7 +290,7 @@ To run tests in your Ruby project, you can set up custom tasks in your local `.z ### Minitest with Rails -```json +```json [tasks] [ { "label": "test $ZED_RELATIVE_FILE -n /$ZED_CUSTOM_RUBY_TEST_NAME/", @@ -315,7 +311,7 @@ To run tests in your Ruby project, you can set up custom tasks in your local `.z Plain minitest does not support running tests by line number, only by name, so we need to use `$ZED_CUSTOM_RUBY_TEST_NAME` instead: -```json +```json [tasks] [ { "label": "-Itest $ZED_RELATIVE_FILE -n /$ZED_CUSTOM_RUBY_TEST_NAME/", @@ -336,7 +332,7 @@ Plain minitest does not support running tests by line number, only by name, so w ### RSpec -```json +```json [tasks] [ { "label": "test $ZED_RELATIVE_FILE:$ZED_ROW", @@ -358,7 +354,7 @@ The Ruby extension provides a debug adapter for debugging Ruby code. Zed's name #### Debug a Ruby script -```json +```json [debug] [ { "label": "Debug current file", @@ -372,7 +368,7 @@ The Ruby extension provides a debug adapter for debugging Ruby code. Zed's name #### Debug Rails server -```json +```json [debug] [ { "label": "Debug Rails server", @@ -394,15 +390,15 @@ The Ruby extension provides a debug adapter for debugging Ruby code. Zed's name To format ERB templates, you can use the `erb-formatter` formatter. This formatter uses the [`erb-formatter`](https://rubygems.org/gems/erb-formatter) gem to format ERB templates. -```jsonc +```json [settings] { "HTML+ERB": { "formatter": { "external": { "command": "erb-formatter", - "arguments": ["--stdin-filename", "{buffer_path}"], - }, - }, - }, + "arguments": ["--stdin-filename", "{buffer_path}"] + } + } + } } ``` diff --git a/docs/src/languages/rust.md b/docs/src/languages/rust.md index 359af7737161a8dff388b0ef849183504fe29207..d696cfe41129b2d4405a40d7d82ed3790a5888b2 100644 --- a/docs/src/languages/rust.md +++ b/docs/src/languages/rust.md @@ -16,7 +16,7 @@ TBD: Provide explicit examples not just `....` The following configuration can be used to change the inlay hint settings for `rust-analyzer` in Rust: -```json +```json [settings] { "lsp": { "rust-analyzer": { @@ -43,7 +43,7 @@ See [Inlay Hints](https://rust-analyzer.github.io/book/features.html#inlay-hints The `rust-analyzer` target directory can be set in `initialization_options`: -```json +```json [settings] { "lsp": { "rust-analyzer": { @@ -67,7 +67,7 @@ By default, Zed will try to find a `rust-analyzer` in your `$PATH` and try to us If you want to install pre-release `rust-analyzer` version instead you can instruct Zed to do so by setting `pre_release` to `true` in your `settings.json`: -```json +```json [settings] { "lsp": { "rust-analyzer": { @@ -81,7 +81,7 @@ If you want to install pre-release `rust-analyzer` version instead you can instr If you want to disable Zed looking for a `rust-analyzer` binary, you can set `ignore_system_version` to `true` in your `settings.json`: -```json +```json [settings] { "lsp": { "rust-analyzer": { @@ -95,7 +95,7 @@ If you want to disable Zed looking for a `rust-analyzer` binary, you can set `ig If you want to use a binary in a custom location, you can specify a `path` and optional `arguments`: -```json +```json [settings] { "lsp": { "rust-analyzer": { @@ -114,7 +114,7 @@ This `"path"` has to be an absolute path. If you want rust-analyzer to provide diagnostics for a target other than your current platform (e.g. for windows when running on macOS) you can use the following Zed lsp settings: -```json +```json [settings] { "lsp": { "rust-analyzer": { @@ -139,7 +139,7 @@ rustup target list --installed Zed provides tasks using tree-sitter, but rust-analyzer has an LSP extension method for querying file-related tasks via LSP. This is enabled by default and can be configured as -```json +```json [settings] "lsp": { "rust-analyzer": { "enable_lsp_tasks": true, @@ -191,7 +191,7 @@ Check on save feature is responsible for returning part of the diagnostics based Consider more `rust-analyzer.cargo.` and `rust-analyzer.check.` and `rust-analyzer.diagnostics.` settings from the manual for more fine-grained configuration. Here's a snippet for Zed settings.json (the language server will restart automatically after the `lsp.rust-analyzer` section is edited and saved): -```json +```json [settings] { "lsp": { "rust-analyzer": { @@ -225,7 +225,7 @@ Here's a snippet for Zed settings.json (the language server will restart automat If you want rust-analyzer to analyze multiple Rust projects in the same folder that are not listed in `[members]` in the Cargo workspace, you can list them in `linkedProjects` in the local project settings: -```json +```json [settings] { "lsp": { "rust-analyzer": { @@ -241,7 +241,7 @@ you can list them in `linkedProjects` in the local project settings: There's a way to get custom completion items from rust-analyzer, that will transform the code according to the snippet body: -```json +```json [settings] { "lsp": { "rust-analyzer": { @@ -294,13 +294,16 @@ There's a way to get custom completion items from rust-analyzer, that will trans ## Debugging -Zed supports debugging Rust binaries and tests out of the box. Run {#action debugger::Start} ({#kb debugger::Start}) to launch one of these preconfigured debug tasks. +Zed supports debugging Rust binaries and tests out of the box with `CodeLLDB` and `GDB`. Run {#action debugger::Start} ({#kb debugger::Start}) to launch one of these preconfigured debug tasks. For more control, you can add debug configurations to `.zed/debug.json`. See the examples below. +- [CodeLLDB configuration documentation](https://github.com/vadimcn/codelldb/blob/master/MANUAL.md#starting-a-new-debug-session) +- [GDB configuration documentation](https://sourceware.org/gdb/current/onlinedocs/gdb.html/Debugger-Adapter-Protocol.html) + ### Build binary then debug -```json +```json [debug] [ { "label": "Build & Debug native binary", @@ -321,7 +324,7 @@ For more control, you can add debug configurations to `.zed/debug.json`. See the When you use `cargo build` or `cargo test` as the build command, Zed can infer the path to the output binary. -```json +```json [debug] [ { "label": "Build & Debug native binary", diff --git a/docs/src/languages/sh.md b/docs/src/languages/sh.md index abc8f03a6c051d9484052aad3ea0e453d9fb4fdc..cf88c89bfa29fd12390640a64cb4826678cfa9f5 100644 --- a/docs/src/languages/sh.md +++ b/docs/src/languages/sh.md @@ -8,7 +8,7 @@ Shell Scripts (bash, zsh, dash, sh) are supported natively by Zed. You can configure various settings for Shell Scripts in your Zed User Settings (`~/.config/zed/settings.json`) or Zed Project Settings (`.zed/settings.json`): -```json +```json [settings] "languages": { "Shell Script": { "tab_size": 2, @@ -41,7 +41,7 @@ shfmt --version 3. Configure Zed to automatically format Shell Scripts with `shfmt` on save: -```json +```json [settings] "languages": { "Shell Script": { "format_on_save": "on", diff --git a/docs/src/languages/sql.md b/docs/src/languages/sql.md index 7993450a04c788494116bbd259af84c12d9b0dfc..fd257c9ab0924db9bbc3ffc596a5992ead0dc211 100644 --- a/docs/src/languages/sql.md +++ b/docs/src/languages/sql.md @@ -23,7 +23,7 @@ sql-formatter --version 3. Configure Zed to automatically format SQL with `sql-formatter`: -```json +```json [settings] "languages": { "SQL": { "formatter": { @@ -44,7 +44,7 @@ You can add this to Zed project settings (`.zed/settings.json`) or via your Zed Sql-formatter also allows more precise control by providing [sql-formatter configuration options](https://github.com/sql-formatter-org/sql-formatter#configuration-options). To provide these, create a `.sql-formatter.json` file in your project: -```json +```json [settings] { "language": "postgresql", "tabWidth": 2, @@ -55,7 +55,7 @@ Sql-formatter also allows more precise control by providing [sql-formatter confi When using a `.sql-formatter.json` file you can use a more simplified set of Zed settings since the language need not be specified inline: -```json +```json [settings] "languages": { "SQL": { "formatter": { diff --git a/docs/src/languages/svelte.md b/docs/src/languages/svelte.md index 66d0d0cb50611c765a751552ece6620251daf28c..139195987b1ebb4b78cc19e988c0bbbcf927fb27 100644 --- a/docs/src/languages/svelte.md +++ b/docs/src/languages/svelte.md @@ -9,7 +9,7 @@ Svelte support is available through the [Svelte extension](https://github.com/ze You can modify how certain styles, such as directives and modifiers, appear in attributes: -```json +```json [settings] "syntax": { // Styling for directives (e.g., `class:foo` or `on:click`) (the `on` or `class` part of the attribute). "attribute.function": { @@ -26,7 +26,7 @@ You can modify how certain styles, such as directives and modifiers, appear in a When inlay hints is enabled in Zed, to make the language server send them back, Zed sets the following initialization options: -```json +```json [settings] "inlayHints": { "parameterNames": { "enabled": "all", @@ -53,16 +53,16 @@ When inlay hints is enabled in Zed, to make the language server send them back, To override these settings, use the following: -```json +```json [settings] "lsp": { "svelte-language-server": { "initialization_options": { "configuration": { "typescript": { - ...... + // ...... }, "javascript": { - ...... + // ...... } } } diff --git a/docs/src/languages/swift.md b/docs/src/languages/swift.md index 9b056be5bc8869b18b78e9a2e64ea43db3d8ea90..1492942fe8d33e63949ad0037b12405c373a9770 100644 --- a/docs/src/languages/swift.md +++ b/docs/src/languages/swift.md @@ -18,11 +18,13 @@ Zed's name for the adapter (in the UI and `debug.json`) is `Swift`, and under th The extension tries to find an `lldb-dap` binary using `swiftly`, using `xcrun`, and by searching `$PATH`, in that order of preference. The extension doesn't attempt to download `lldb-dap` if it's not found. +- [lldb-dap configuration documentation](https://github.com/llvm/llvm-project/blob/main/lldb/tools/lldb-dap/README.md#configuration-settings-reference) + ### Examples #### Build and debug a Swift binary -```json +```json [debug] [ { "label": "Debug Swift", diff --git a/docs/src/languages/tailwindcss.md b/docs/src/languages/tailwindcss.md index 4409a12bf0dde643f60bb46ae2887c3aa48ca002..ff20d097e45eb311d49f3c118ed642c11d73e19f 100644 --- a/docs/src/languages/tailwindcss.md +++ b/docs/src/languages/tailwindcss.md @@ -8,18 +8,18 @@ Zed has built-in support for Tailwind CSS autocomplete, linting, and hover previ To configure the Tailwind CSS language server, refer [to the extension settings](https://github.com/tailwindlabs/tailwindcss-intellisense?tab=readme-ov-file#extension-settings) and add them to the `lsp` section of your `settings.json`: -```jsonc +```json [settings] { "lsp": { "tailwindcss-language-server": { "settings": { "classFunctions": ["cva", "cx"], "experimental": { - "classRegex": ["[cls|className]\\s\\:\\=\\s\"([^\"]*)"], - }, - }, - }, - }, + "classRegex": ["[cls|className]\\s\\:\\=\\s\"([^\"]*)"] + } + } + } + } } ``` @@ -40,7 +40,7 @@ Languages which can be used with Tailwind CSS in Zed: Zed supports Prettier out of the box, which means that if you have the [Tailwind CSS Prettier plugin](https://github.com/tailwindlabs/prettier-plugin-tailwindcss) installed, adding it to your Prettier configuration will make it work automatically: -```json +```json [settings] // .prettierrc { "plugins": ["prettier-plugin-tailwindcss"] diff --git a/docs/src/languages/terraform.md b/docs/src/languages/terraform.md index 401526f16952ab209c41ab9f09bb9e22785c4e40..c1ff03a83a4da8d08639bc64dc354671fd66f5cc 100644 --- a/docs/src/languages/terraform.md +++ b/docs/src/languages/terraform.md @@ -13,7 +13,7 @@ TBD: Add example using `rootModulePaths` to match upstream example https://githu The Terraform language server can be configured in your `settings.json`, e.g.: -```json +```json [settings] { "lsp": { "terraform-ls": { diff --git a/docs/src/languages/typescript.md b/docs/src/languages/typescript.md index 02d2672cb64fce1671ae487bffffd6d84523f7fc..a6ec5b71ecb1815aeb4ff3811eec6f9a5c57a54b 100644 --- a/docs/src/languages/typescript.md +++ b/docs/src/languages/typescript.md @@ -16,7 +16,7 @@ TBD: Document the difference between Language servers By default Zed uses [vtsls](https://github.com/yioneko/vtsls) for TypeScript, TSX, and JavaScript files. You can configure the use of [typescript-language-server](https://github.com/typescript-language-server/typescript-language-server) per language in your settings file: -```json +```json [settings] { "languages": { "TypeScript": { @@ -34,7 +34,7 @@ You can configure the use of [typescript-language-server](https://github.com/typ Prettier will also be used for TypeScript files by default. To disable this: -```json +```json [settings] { "languages": { "TypeScript": { @@ -49,7 +49,7 @@ Prettier will also be used for TypeScript files by default. To disable this: `vtsls` may run out of memory on very large projects. We default the limit to 8092 (8 GiB) vs. the default of 3072 but this may not be sufficient for you: -```json +```json [settings] { "lsp": { "vtsls": { @@ -70,7 +70,7 @@ Zed sets the following initialization options to make the language server send b You can override these settings in your Zed `settings.json` when using `typescript-language-server`: -```json +```json [settings] { "lsp": { "typescript-language-server": { @@ -95,7 +95,7 @@ See [typescript-language-server inlayhints documentation](https://github.com/typ When using `vtsls`: -```json +```json [settings] { "lsp": { "vtsls": { @@ -158,23 +158,33 @@ When using `vtsls`: ## Debugging -Zed supports debugging TypeScript code out of the box. +Zed supports debugging TypeScript code out of the box with `vscode-js-debug`. The following can be debugged without writing additional configuration: - Tasks from `package.json` -- Tests written using several popular frameworks (Jest, Mocha, Vitest, Jasmine) +- Tests written using several popular frameworks (Jest, Mocha, Vitest, Jasmine, Bun, Node) Run {#action debugger::Start} ({#kb debugger::Start}) to see a contextual list of these predefined debug tasks. +> **Note:** Bun test is automatically detected when `@types/bun` is present in `package.json`. +> +> **Note:** Node test is automatically detected when `@types/node` is present in `package.json` (requires Node.js 20+). + As for all languages, configurations from `.vscode/launch.json` are also available for debugging in Zed. If your use-case isn't covered by any of these, you can take full control by adding debug configurations to `.zed/debug.json`. See below for example configurations. +### Configuring JavaScript debug tasks + +JavaScript debugging is more complicated than other languages because there are two different environments: Node.js and the browser. `vscode-js-debug` exposes a `type` field, that you can use to specify the environment, either `node` or `chrome`. + +- [vscode-js-debug configuration documentation](https://github.com/microsoft/vscode-js-debug/blob/main/OPTIONS.md) + ### Attach debugger to a server running in web browser (`npx serve`) Given an externally-ran web server (e.g., with `npx serve` or `npx live-server`) one can attach to it and open it with a browser. -```json +```json [debug] [ { "label": "Launch Chrome (TypeScript)", diff --git a/docs/src/languages/xml.md b/docs/src/languages/xml.md index 4318756a101ddd1787d825b06df9425e9b2ac5b9..df3d845d6d258caa5f16432c84fb78da3586fc19 100644 --- a/docs/src/languages/xml.md +++ b/docs/src/languages/xml.md @@ -8,7 +8,7 @@ XML support is available through the [XML extension](https://github.com/sweetppr If you have additional file extensions that are not being automatically recognized as XML just add them to [file_types](../configuring-zed.md#file-types) in your Zed settings: -```json +```json [settings] "file_types": { "XML": ["rdf", "gpx", "kml"] } diff --git a/docs/src/languages/yaml.md b/docs/src/languages/yaml.md index 68167e873430970f2a871065da740659965e2df1..477d197d11fa4f0ad0e62ee25e416eee7c35ee67 100644 --- a/docs/src/languages/yaml.md +++ b/docs/src/languages/yaml.md @@ -9,7 +9,7 @@ YAML support is available natively in Zed. You can configure various [yaml-language-server settings](https://github.com/redhat-developer/yaml-language-server?tab=readme-ov-file#language-server-settings) by adding them to your Zed settings.json in a `yaml-language-server` block under the `lsp` key. For example: -```json +```json [settings] "lsp": { "yaml-language-server": { "settings": { @@ -38,7 +38,7 @@ By default, Zed uses Prettier for formatting YAML files. You can customize the formatting behavior of Prettier. For example to use single-quotes in yaml files add the following to your `.prettierrc` configuration file: -```json +```json [settings] { "overrides": [ { @@ -55,7 +55,7 @@ You can customize the formatting behavior of Prettier. For example to use single To use `yaml-language-server` instead of Prettier for YAML formatting, add the following to your Zed `settings.json`: -```json +```json [settings] "languages": { "YAML": { "formatter": "language_server" @@ -74,12 +74,12 @@ You can override any auto-detected schema via the `schemas` settings key (demons name: Issue Assignment on: issues: - types: [oppened] + types: [opened] ``` You can disable the automatic detection and retrieval of schemas from the JSON Schema if desired: -```json +```json [settings] "lsp": { "yaml-language-server": { "settings": { @@ -99,7 +99,7 @@ Yaml-language-server supports [custom tags](https://github.com/redhat-developer/ For example Amazon CloudFormation YAML uses a number of custom tags, to support these you can add the following to your settings.json: -```json +```json [settings] "lsp": { "yaml-language-server": { "settings": { diff --git a/docs/src/linux.md b/docs/src/linux.md index 1b9c061e71cbfc06c1783cbd747b0648721002da..433891a3e461f6c20d4281c72f7b9ae10a459c03 100644 --- a/docs/src/linux.md +++ b/docs/src/linux.md @@ -51,10 +51,21 @@ There are several third-party Zed packages for various Linux distributions and p See [Repology](https://repology.org/project/zed-editor/versions) for a list of Zed packages in various repositories. +### Community + When installing a third-party package please be aware that it may not be completely up to date and may be slightly different from the Zed we package (a common change is to rename the binary to `zedit` or `zeditor` to avoid conflicting with other packages). We'd love your help making Zed available for everyone. If Zed is not yet available for your package manager, and you would like to fix that, we have some notes on [how to do it](./development/linux.md#notes-for-packaging-zed). +The packages in this section provide binary installs for Zed but are not official packages within the associated distributions. These packages are maintained by community members and as such a higher level of caution should be taken when installing them. + +#### Debian + +Zed is available in [this community-maintained repository](https://debian.griffo.io/). + +Instructions for each version are available in the README of the repository where packages are built. +Build, packaging and instructions for each version are available in the README of the [repository](https://github.com/dariogriffo/zed-debian) + ### Downloading manually If you'd prefer, you can install Zed by downloading our pre-built .tar.gz. This is the same artifact that our install script uses, but you can customize the location of your installation by modifying the instructions below: @@ -179,7 +190,7 @@ Make sure to export the variable if you choose to define it globally in a `.bash ##### Option B -If you are using Mesa, you can run `MESA_VK_DEVICE_SELECT=list zed --foreground` to get a list of available GPUs and then export `MESA_VK_DEVICE_SELECT=xxxx:yyyy` to choose a specific device. +If you are using Mesa, you can run `MESA_VK_DEVICE_SELECT=list zed --foreground` to get a list of available GPUs and then export `MESA_VK_DEVICE_SELECT=xxxx:yyyy` to choose a specific device. Furthermore, you can fallback to xwayland with an additional export of `WAYLAND_DISPLAY=""`. ##### Option C diff --git a/docs/src/remote-development.md b/docs/src/remote-development.md index e597e7a6c5743f47947bba1b3a068e497ee51faa..b4d7033a3b1d2201fbf35afa096551a2e5232272 100644 --- a/docs/src/remote-development.md +++ b/docs/src/remote-development.md @@ -35,7 +35,7 @@ The remote machine must be able to run Zed's server. The following platforms sho The list of remote servers is stored in your settings file {#kb zed::OpenSettings}. You can edit this list using the Remote Projects dialog {#kb projects::OpenRemote}, which provides some robustness - for example it checks that the connection can be established before writing it to the settings file. -```json +```json [settings] { "ssh_connections": [ { @@ -48,7 +48,7 @@ The list of remote servers is stored in your settings file {#kb zed::OpenSetting Zed shells out to the `ssh` on your path, and so it will inherit any configuration you have in `~/.ssh/config` for the given host. That said, if you need to override anything you can configure the following additional options on each connection: -```json +```json [settings] { "ssh_connections": [ { @@ -66,7 +66,7 @@ Zed shells out to the `ssh` on your path, and so it will inherit any configurati There are two additional Zed-specific options per connection, `upload_binary_over_ssh` and `nickname`: -```json +```json [settings] { "ssh_connections": [ { @@ -91,7 +91,7 @@ Additionally it's worth noting that while you can pass a password on the command If you'd like to be able to connect to ports on your remote server from your local machine, you can configure port forwarding in your settings file. This is particularly useful for developing websites so you can load the site in your browser while working. -```json +```json [settings] { "ssh_connections": [ { @@ -106,7 +106,7 @@ This will cause requests from your local machine to `localhost:8080` to be forwa By default these ports are bound to localhost, so other computers in the same network as your development machine cannot access them. You can set the local_host to bind to a different interface, for example, 0.0.0.0 will bind to all local interfaces. -```json +```json [settings] { "ssh_connections": [ { @@ -125,7 +125,7 @@ By default these ports are bound to localhost, so other computers in the same ne These ports also default to the `localhost` interface on the remote host. If you need to change this, you can also set the remote host: -```json +```json [settings] { "ssh_connections": [ { diff --git a/docs/src/repl.md b/docs/src/repl.md index 92b3d81f24fd7c8c238b56f8681b0b62d0ff93c1..692093007cb066e08d52c4694444eb3e55bf2144 100644 --- a/docs/src/repl.md +++ b/docs/src/repl.md @@ -149,7 +149,7 @@ TBD: Improve Julia REPL instructions Zed automatically detects the available kernels on your system. If you need to configure a different default kernel for a language, you can assign a kernel for any supported language in your `settings.json`. -```json +```json [settings] { "jupyter": { "kernel_selections": { diff --git a/docs/src/snippets.md b/docs/src/snippets.md index 6dc5355907bec47c4bd4f86353c226201bb49586..21aed43452318863b735a9b46cd5399a8bfca1c6 100644 --- a/docs/src/snippets.md +++ b/docs/src/snippets.md @@ -6,7 +6,7 @@ The snippets are located in `~/.config/zed/snippets` directory to which you can ## Example configuration -```json +```json [settings] { // Each snippet must have a name and body, but the prefix and description are optional. // The prefix is used to trigger the snippet, but when omitted then the name is used. @@ -44,7 +44,7 @@ The `feature_paths` option in `simple-completion-language-server` is disabled by If you want to enable it you can add the following to your `settings.json`: -```json +```json [settings] { "lsp": { "snippet-completion-server": { diff --git a/docs/src/system-requirements.md b/docs/src/system-requirements.md index 46c559c507acc27ebe30094adc99bdaf28f8e0b8..eaf9c027be5cef93c2dea0149e7e55dcdd8eb154 100644 --- a/docs/src/system-requirements.md +++ b/docs/src/system-requirements.md @@ -14,7 +14,6 @@ Zed supports the follow macOS releases: | macOS 12.x | Monterey | EOL 2024-09-16 | Supported | | macOS 11.x | Big Sur | EOL 2023-09-26 | Partially Supported | | macOS 10.15.x | Catalina | EOL 2022-09-12 | Partially Supported | -| macOS 10.14.x | Mojave | EOL 2021-10-25 | Unsupported | The macOS releases labelled "Partially Supported" (Big Sur and Catalina) do not support screen sharing via Zed Collaboration. These features use the [LiveKit SDK](https://livekit.io) which relies upon [ScreenCaptureKit.framework](https://developer.apple.com/documentation/screencapturekit/) only available on macOS 12 (Monterey) and newer. @@ -43,7 +42,20 @@ Zed requires a Vulkan 1.3 driver, and the following desktop portals: ## Windows -Not yet available as an official download. Can be built [from source](./development/windows.md). +Zed supports the follow Windows releases: +| Version | Microsoft Status | Zed Status | +| ------------------------- | ------------------ | ------------------- | +| Windows 11 (all releases) | Supported | Supported | +| Windows 10 (64-bit) | Supported | Supported | + +### Windows Hardware + +Zed supports machines with Intel or AMD 64-bit (x86_64) processors that meet the above Windows requirements: + +- Windows 11 (64-bit) +- Windows 10 (64-bit) +- Graphics: A GPU that supports DirectX 11 (most PCs from 2012+). +- Driver: Current NVIDIA/AMD/Intel driver (not the Microsoft Basic Display Adapter). ## FreeBSD diff --git a/docs/src/tasks.md b/docs/src/tasks.md index f2986c9951fe660efdc54e2bb984403cdb6da1cd..a11988d9a030036ccd3e2602a161771f8288b549 100644 --- a/docs/src/tasks.md +++ b/docs/src/tasks.md @@ -2,7 +2,7 @@ Zed supports ways to spawn (and rerun) commands using its integrated terminal to output the results. These commands can read a limited subset of Zed state (such as a path to the file currently being edited or selected text). -```json +```json [tasks] [ { "label": "Example task", @@ -89,7 +89,7 @@ These variables allow you to pull information from the current editor and use it To use a variable in a task, prefix it with a dollar sign (`$`): -```json +```json [settings] { "label": "echo current file's path", "command": "echo $ZED_FILE" @@ -106,7 +106,7 @@ When working with paths containing spaces or other special characters, please en For example, instead of this (which will fail if the path has a space): -```json +```json [settings] { "label": "stat current file", "command": "stat $ZED_FILE" @@ -115,7 +115,7 @@ For example, instead of this (which will fail if the path has a space): Provide the following: -```json +```json [settings] { "label": "stat current file", "command": "stat", @@ -125,7 +125,7 @@ Provide the following: Or explicitly include escaped quotes like so: -```json +```json [settings] { "label": "stat current file", "command": "stat \"$ZED_FILE\"" @@ -137,7 +137,7 @@ Or explicitly include escaped quotes like so: Task definitions with variables which are not present at the moment the task list is determined are filtered out. For example, the following task will appear in the spawn modal only if there is a text selection: -```json +```json [settings] { "label": "selected text", "command": "echo \"$ZED_SELECTED_TEXT\"" @@ -146,7 +146,7 @@ For example, the following task will appear in the spawn modal only if there is Set default values to such variables to have such tasks always displayed: -```json +```json [settings] { "label": "selected text with default", "command": "echo \"${ZED_SELECTED_TEXT:no text selected}\"" @@ -172,7 +172,7 @@ By default, tasks capture their variables into a context once, and this "resolve This can be controlled with the `"reevaluate_context"` argument to the task: setting it to `true` will force the task to be reevaluated before each run. -```json +```json [keymap] { "context": "Workspace", "bindings": { @@ -185,7 +185,7 @@ This can be controlled with the `"reevaluate_context"` argument to the task: set You can define your own keybindings for your tasks via an additional argument to `task::Spawn`. If you wanted to bind the aforementioned `echo current file's path` task to `alt-g`, you would add the following snippet in your [`keymap.json`](./key-bindings.md) file: -```json +```json [keymap] { "context": "Workspace", "bindings": { @@ -197,7 +197,7 @@ You can define your own keybindings for your tasks via an additional argument to Note that these tasks can also have a 'target' specified to control where the spawned task should show up. This could be useful for launching a terminal application that you want to use in the center area: -```json +```json [tasks] // In tasks.json { "label": "start lazygit", @@ -205,7 +205,7 @@ This could be useful for launching a terminal application that you want to use i } ``` -```json +```json [keymap] // In keymap.json { "context": "Workspace", @@ -228,7 +228,7 @@ Zed supports overriding the default action for inline runnable indicators via wo To tag a task, add the runnable tag name to the `tags` field on the task template: -```json +```json [settings] { "label": "echo current file's path", "command": "echo $ZED_FILE", diff --git a/docs/src/telemetry.md b/docs/src/telemetry.md index 46c39a88aeecfb5e8172001af67bcf30b339d984..8dca8c1ee6fc9168dd384796292ffb3063027e71 100644 --- a/docs/src/telemetry.md +++ b/docs/src/telemetry.md @@ -9,7 +9,7 @@ To enable or disable some or all telemetry types, open your `settings.json` file Insert and tweak the following: -```json +```json [settings] "telemetry": { "diagnostics": false, "metrics": false diff --git a/docs/src/themes.md b/docs/src/themes.md index 363c99f065a711634e78e3b6093b9069fa7d2c7d..460d00a7627e55f21958142c230b683d92301040 100644 --- a/docs/src/themes.md +++ b/docs/src/themes.md @@ -20,7 +20,7 @@ Your selected theme is stored in your settings file. You can open your settings By default, Zed maintains two themes: one for light mode and one for dark mode. You can set the mode to `"dark"` or `"light"` to ignore the current system mode. -```json +```json [settings] { "theme": { "mode": "system", @@ -32,20 +32,22 @@ By default, Zed maintains two themes: one for light mode and one for dark mode. ## Theme Overrides -To override specific attributes of a theme, use the `experimental.theme_overrides` setting. +To override specific attributes of a theme, use the `theme_overrides` setting. This setting can be used to configure theme-specific overrides. For example, add the following to your `settings.json` if you wish to override the background color of the editor and display comments and doc comments as italics: -```json +```json [settings] { - "experimental.theme_overrides": { - "editor.background": "#333", - "syntax": { - "comment": { - "font_style": "italic" - }, - "comment.doc": { - "font_style": "italic" + "theme_overrides": { + "One Dark": { + "editor.background": "#333", + "syntax": { + "comment": { + "font_style": "italic" + }, + "comment.doc": { + "font_style": "italic" + } } } } @@ -58,7 +60,7 @@ To see a list of available theme attributes look at the JSON file for your theme ## Local Themes -Store new themes locally by placing them in the `~/.config/zed/themes` directory. +Store new themes locally by placing them in the `~/.config/zed/themes` directory (macOS and Linux) or `%USERPROFILE%\AppData\Roaming\Zed\themes\` (Windows). For example, to create a new theme called `my-cool-theme`, create a file called `my-cool-theme.json` in that directory. It will be available in the theme selector the next time Zed loads. diff --git a/docs/src/troubleshooting.md b/docs/src/troubleshooting.md new file mode 100644 index 0000000000000000000000000000000000000000..4aeeda6e3da5ad0f6494a5064f17cff8be98c330 --- /dev/null +++ b/docs/src/troubleshooting.md @@ -0,0 +1,80 @@ +# Troubleshooting + +This guide covers common troubleshooting techniques for Zed. +Sometimes you'll be able to identify and resolve issues on your own using this information. +Other times, troubleshooting means gathering the right information—logs, profiles, or reproduction steps—to help us diagnose and fix the problem. + +> **Note**: To open the command palette, use `cmd-shift-p` on macOS or `ctrl-shift-p` on Windows / Linux. + +## Retrieve Zed and System Information + +When reporting issues or seeking help, it's useful to know your Zed version and system specifications. You can retrieve this information using the following actions from the command palette: + +- {#action zed::About}: Find your Zed version number +- {#action zed::CopySystemSpecsIntoClipboard}: Populate your clipboard with Zed version number, operating system version, and hardware specs + +## Zed Log + +Often, a good first place to look when troubleshooting any issue in Zed is the Zed log, which might contain clues about what's going wrong. +You can review the most recent 1000 lines of the log by running the {#action zed::OpenLog} action from the command palette. +If you want to view the full file, you can reveal it in your operating system's native file manager via {#action zed::RevealLogInFileManager} from the command palette. + +You'll find the Zed log in the respective location on each operating system: + +- macOS: `~/Library/Logs/Zed/Zed.log` +- Windows: `C:\Users\YOU\AppData\Local\Zed\logs\Zed.log` +- Linux: `~/.local/share/zed/logs/Zed.log` or `$XDG_DATA_HOME` + +> Note: In some cases, it might be useful to monitor the log live, such as when [developing a Zed extension](https://zed.dev/docs/extensions/developing-extensions). +> Example: `tail -f ~/Library/Logs/Zed/Zed.log` + +The log may contain enough context to help you debug the issue yourself, or you may find specific errors that are useful when filing a [GitHub issue](https://github.com/zed-industries/zed/issues/new/choose) or when talking to Zed staff in our [Discord server](https://zed.dev/community-links#forums-and-discussions). + +## Performance Issues (Profiling) + +If you're running into performance issues in Zed—such as hitches, hangs, or general unresponsiveness—having a performance profile attached to your issue will help us zero in on what is getting stuck, so we can fix it. + +### macOS + +Xcode Instruments (which comes bundled with your [Xcode](https://apps.apple.com/us/app/xcode/id497799835) download) is the standard tool for profiling on macOS. + +1. With Zed running, open Instruments +1. Select `Time Profiler` as the profiling template +1. In the `Time Profiler` configuration, set the target to the running Zed process +1. Start recording +1. If the performance issue occurs when performing a specific action in Zed, perform that action now +1. Stop recording +1. Save the trace file +1. Compress the trace file into a zip archive +1. File a [GitHub issue](https://github.com/zed-industries/zed/issues/new/choose) with the trace zip attached + + + + + +## Startup and Workspace Issues + +Zed creates local SQLite databases to persist data relating to its workspace and your projects. These databases store, for instance, the tabs and panes you have open in a project, the scroll position of each open file, the list of all projects you've opened (for the recent projects modal picker), etc. You can find and explore these databases in the following locations: + +- macOS: `~/Library/Application Support/Zed/db` +- Linux and FreeBSD: `~/.local/share/zed/db` (or within `XDG_DATA_HOME` or `FLATPAK_XDG_DATA_HOME`) +- Windows: `%LOCALAPPDATA%\Zed\db` + +The naming convention of these databases takes on the form of `0-`: + +- Stable: `0-stable` +- Preview: `0-preview` +- Nightly: `0-nightly` +- Dev: `0-dev` + +While rare, we've seen a few cases where workspace databases became corrupted, which prevented Zed from starting. +If you're experiencing startup issues, you can test whether it's workspace-related by temporarily moving the database from its location, then trying to start Zed again. + +> **Note**: Moving the workspace database will cause Zed to create a fresh one. +> Your recent projects, open tabs, etc. will be reset to "factory". + +If your issue persists after regenerating the database, please [file an issue](https://github.com/zed-industries/zed/issues/new/choose). + +## Language Server Issues + +If you're experiencing language-server related issues, such as stale diagnostics or issues jumping to definitions, restarting the language server via {#action editor::RestartLanguageServer} from the command palette will often resolve the issue. diff --git a/docs/src/vim.md b/docs/src/vim.md index b62ded09896d9ec7cc1f40da262b3a57bdc5d870..6af563d3555ab0bbc192b8521ce3eb0986c28988 100644 --- a/docs/src/vim.md +++ b/docs/src/vim.md @@ -39,7 +39,7 @@ If you missed this, you can toggle vim mode on or off anytime by opening the com > **Note**: This command toggles the following property in your user settings: > -> ```json +> ```json [settings] > { > "vim_mode": true > } @@ -219,7 +219,7 @@ These text objects implement the behavior of the [mini.ai](https://github.com/ec To use these text objects, you need to add bindings to your keymap. Here's an example configuration that makes them available when using text object operators (`i` and `a`) or change-surrounds (`cs`): -```json +```json [settings] { "context": "vim_operator == a || vim_operator == i || vim_operator == cs", "bindings": { @@ -237,9 +237,9 @@ To use these text objects, you need to add bindings to your keymap. Here's an ex With this configuration, you can use commands like: - `cib` - Change inside brackets using AnyBrackets behavior -- `cim` - Change inside brackets using MiniBrackets behavior +- `ciB` - Change inside brackets using MiniBrackets behavior - `ciq` - Change inside quotes using AnyQuotes behavior -- `ciM` - Change inside quotes using MiniQuotes behavior +- `ciQ` - Change inside quotes using MiniQuotes behavior ## Command palette @@ -377,7 +377,7 @@ In this section, we'll learn how to customize the key bindings of Zed's vim mode Zed's key bindings are evaluated only when the `"context"` property matches your location in the editor. For example, if you add key bindings to the `"Editor"` context, they will only work when you're editing a file. If you add key bindings to the `"Workspace"` context, they will work everywhere in Zed. Here's an example of a key binding that saves when you're editing a file: -```json +```json [settings] { "context": "Editor", "bindings": { @@ -388,12 +388,12 @@ Zed's key bindings are evaluated only when the `"context"` property matches your Contexts are nested, so when you're editing a file, the context is the `"Editor"` context, which is inside the `"Pane"` context, which is inside the `"Workspace"` context. That's why any key bindings you add to the `"Workspace"` context will work when you're editing a file. Here's an example: -```json +```json [keymap] // This key binding will work when you're editing a file. It comes built into Zed by default as the workspace: save command. { "context": "Workspace", "bindings": { - "ctrl-s": "file::Save" + "ctrl-s": "workspace::Save" } } ``` @@ -419,7 +419,7 @@ Vim mode adds several contexts to the `"Editor"` context: Here's a template with useful vim mode contexts to help you customize your vim mode key bindings. You can copy it and integrate it into your user keymap. -```json +```json [keymap] [ { "context": "VimControl && !menu", @@ -458,7 +458,7 @@ By default, you can navigate between the different files open in the editor with But you cannot use the same shortcuts to move between all the editor docks (the terminal, project panel, assistant panel, ...). If you want to use the same shortcuts to navigate to the docks, you can add the following key bindings to your user keymap. -```json +```json [settings] { "context": "Dock", "bindings": { @@ -473,7 +473,7 @@ But you cannot use the same shortcuts to move between all the editor docks (the Subword motion, which allows you to navigate and select individual words in camelCase or snake_case, is not enabled by default. To enable it, add these bindings to your keymap. -```json +```json [settings] { "context": "VimControl && !menu && vim_mode != operator", "bindings": { @@ -487,7 +487,7 @@ Subword motion, which allows you to navigate and select individual words in came Vim mode comes with shortcuts to surround the selection in normal mode (`ys`), but it doesn't have a shortcut to add surrounds in visual mode. By default, `shift-s` substitutes the selection (erases the text and enters insert mode). To use `shift-s` to add surrounds in visual mode, you can add the following object to your keymap. -```json +```json [settings] { "context": "vim_mode == visual", "bindings": { @@ -498,7 +498,7 @@ Vim mode comes with shortcuts to surround the selection in normal mode (`ys`), b In non-modal text editors, cursor navigation typically wraps when moving past line ends. Zed, however, handles this behavior exactly like Vim by default: the cursor stops at line boundaries. If you prefer your cursor to wrap between lines, override these keybindings: -```json +```json [settings] // In VimScript, this would look like this: // set whichwrap+=<,>,[,],h,l { @@ -514,7 +514,7 @@ In non-modal text editors, cursor navigation typically wraps when moving past li The [Sneak motion](https://github.com/justinmk/vim-sneak) feature allows for quick navigation to any two-character sequence in your text. You can enable it by adding the following keybindings to your keymap. By default, the `s` key is mapped to `vim::Substitute`. Adding these bindings will override that behavior, so ensure this change aligns with your workflow preferences. -```json +```json [settings] { "context": "vim_mode == normal || vim_mode == visual", "bindings": { @@ -526,7 +526,7 @@ The [Sneak motion](https://github.com/justinmk/vim-sneak) feature allows for qui The [vim-exchange](https://github.com/tommcdo/vim-exchange) feature does not have a default binding for visual mode, as the `shift-x` binding conflicts with the default `shift-x` binding for visual mode (`vim::VisualDeleteLine`). To assign the default vim-exchange binding, add the following keybinding to your keymap: -```json +```json [settings] { "context": "vim_mode == visual", "bindings": { @@ -539,7 +539,7 @@ The [vim-exchange](https://github.com/tommcdo/vim-exchange) feature does not hav If you're using vim mode on Linux or Windows, you may find it overrides keybindings you can't live without: `ctrl+v` to paste, `ctrl+f` to search, etc. You can restore them by copying this data into your keymap: -```json +```json [keymap] { "context": "Editor && !menu", "bindings": { @@ -572,7 +572,7 @@ You can change the following settings to modify vim mode's behavior: Here's an example of adding a digraph for the zombie emoji. This allows you to type `ctrl-k f z` to insert a zombie emoji. You can add as many digraphs as you like. -```json +```json [settings] { "vim": { "custom_digraphs": { @@ -584,7 +584,7 @@ Here's an example of adding a digraph for the zombie emoji. This allows you to t Here's an example of these settings changed: -```json +```json [settings] { "vim": { "default_mode": "insert", @@ -615,7 +615,7 @@ Here are a few general Zed settings that can help you fine-tune your Vim experie Here's an example of these settings changed: -```json +```json [settings] { // Disable cursor blink "cursor_blink": false, diff --git a/docs/src/visual-customization.md b/docs/src/visual-customization.md index 002ebdd4a8c0ca420a5f4be9acb2254c37820664..b353377dd764d2506abd4cce46352df3ca47dfcb 100644 --- a/docs/src/visual-customization.md +++ b/docs/src/visual-customization.md @@ -10,7 +10,7 @@ Use may install zed extensions providing [Themes](./themes.md) and [Icon Themes] You can preview/choose amongst your installed themes and icon themes with {#action theme_selector::Toggle} ({#kb theme_selector::Toggle}) and ({#action icon_theme_selector::Toggle}) which will modify the following settings: -```json +```json [settings] { "theme": "One Dark", "icon_theme": "Zed (Default)" @@ -19,26 +19,26 @@ You can preview/choose amongst your installed themes and icon themes with {#acti If you would like to use distinct themes for light mode/dark mode that can be set with: -```json +```json [settings] { "theme": { - "dark": "One Dark" + "dark": "One Dark", "light": "One Light", // Mode to use (dark, light) or "system" to follow the OS's light/dark mode (default) - "mode": "system", + "mode": "system" }, "icon_theme": { - "dark": "Zed (Default)" + "dark": "Zed (Default)", "light": "Zed (Default)", // Mode to use (dark, light) or "system" to follow the OS's light/dark mode (default) - "mode": "system", + "mode": "system" } } ``` ## Fonts -```json +```json [settings] // UI Font. Use ".SystemUIFont" to use the default system font (SF Pro on macOS), // or ".ZedSans" for the bundled default (currently IBM Plex) "ui_font_family": ".SystemUIFont", @@ -73,7 +73,7 @@ For example `=>` will be displayed as `→` and `!=` will be `≠`. This is pure To disable this behavior use: -```json +```json [settings] { "buffer_font_features": { "calt": false // Disable ligatures @@ -83,7 +83,7 @@ To disable this behavior use: ### Status Bar -```json +```json [settings] { // Whether to show full labels in line indicator or short ones // - `short`: "2 s, 15 l, 32 c" @@ -105,7 +105,7 @@ To disable this behavior use: ### Titlebar -```json +```json [settings] // Control which items are shown/hidden in the title bar "title_bar": { "show_branch_icon": false, // Show/hide branch icon beside branch switcher @@ -120,7 +120,7 @@ To disable this behavior use: ## Workspace -```json +```json [settings] { // Force usage of Zed build in path prompts (file and directory pickers) // instead of OS native pickers (false). @@ -129,10 +129,6 @@ To disable this behavior use: // instead of OS native prompts (false). On linux this is ignored (always false). "use_system_prompts": true, - // Whether to use the system provided dialogs for Open and Save As (true) or - // Zed's built-in keyboard-first pickers (false) - "use_system_path_prompts": true, - // Active pane styling settings. "active_pane_modifiers": { // Inset border size of the active pane, in pixels. @@ -152,7 +148,7 @@ To disable this behavior use: +
diff --git a/extensions/html/languages/html/injections.scm b/extensions/html/languages/html/injections.scm index 0884d8f516706ee3058636931073d9aac1c3016a..525b3efe29dca541afc8829dd41ff217f48439c3 100644 --- a/extensions/html/languages/html/injections.scm +++ b/extensions/html/languages/html/injections.scm @@ -1,3 +1,7 @@ +((comment) @injection.content + (#set! injection.language "comment") +) + (script_element (raw_text) @injection.content (#set! injection.language "javascript")) diff --git a/extensions/html/src/html.rs b/extensions/html/src/html.rs index 27fd2d1e2226a764cdcc3de29d607f3d3db8fd5d..337689ebddd427769ab985ad82512f76b601e67c 100644 --- a/extensions/html/src/html.rs +++ b/extensions/html/src/html.rs @@ -68,22 +68,24 @@ impl zed::Extension for HtmlExtension { worktree: &zed::Worktree, ) -> Result { let server_path = if let Some(path) = worktree.which(BINARY_NAME) { - path + return Ok(zed::Command { + command: path, + args: vec!["--stdio".to_string()], + env: Default::default(), + }); } else { - self.server_script_path(language_server_id)? + let server_path = self.server_script_path(language_server_id)?; + env::current_dir() + .unwrap() + .join(&server_path) + .to_string_lossy() + .to_string() }; self.cached_binary_path = Some(server_path.clone()); Ok(zed::Command { command: zed::node_binary_path()?, - args: vec![ - env::current_dir() - .unwrap() - .join(&server_path) - .to_string_lossy() - .to_string(), - "--stdio".to_string(), - ], + args: vec![server_path, "--stdio".to_string()], env: Default::default(), }) } diff --git a/extensions/slash-commands-example/README.md b/extensions/slash-commands-example/README.md index 6ff00dd2ad673bda951ba323258cfc3db2511c90..8c16a4e168a3334d3197090837eeaf21c956b3c3 100644 --- a/extensions/slash-commands-example/README.md +++ b/extensions/slash-commands-example/README.md @@ -76,8 +76,7 @@ Rebuild to see these changes reflected: ## Troubleshooting / Logs -- MacOS: `tail -f ~/Library/Logs/Zed/Zed.log` -- Linux: `tail -f ~/.local/share/zed/logs/Zed.log` +- [zed.dev docs: Troubleshooting](https://zed.dev/docs/troubleshooting) ## Documentation diff --git a/renovate.json b/renovate.json index 6e5630ad840223787447b9e9532d8afcfd86c18e..01ca7a46a1bfe63a383318aa65b5884d05ed0e4e 100644 --- a/renovate.json +++ b/renovate.json @@ -12,7 +12,7 @@ "timezone": "America/New_York", "schedule": ["after 3pm on Wednesday"], "prFooter": "Release Notes:\n\n- N/A", - "ignorePaths": ["**/node_modules/**", "tooling/workspace-hack/**"], + "ignorePaths": ["**/node_modules/**"], "packageRules": [ { "description": "Group wasmtime crates together.", diff --git a/script/bump-gpui-minor-version b/script/bump-gpui-minor-version deleted file mode 100755 index 82e44120e001e3397576de7c089c0b37e44d9ae9..0000000000000000000000000000000000000000 --- a/script/bump-gpui-minor-version +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env bash - - -# Ensure we're in a clean state on an up-to-date `main` branch. -if [[ -n $(git status --short --untracked-files=no) ]]; then - echo "can't bump versions with uncommitted changes" - exit 1 -fi -if [[ $(git rev-parse --abbrev-ref HEAD) != "main" ]]; then - echo "this command must be run on main" - exit 1 -fi -git pull -q --ff-only origin main - - -# Parse the current version -version=$(script/get-crate-version gpui) -major=$(echo $version | cut -d. -f1) -minor=$(echo $version | cut -d. -f2) -next_minor=$(expr $minor + 1) - -next_minor_branch_name="bump-gpui-to-v${major}.${next_minor}.0" - -git checkout -b ${next_minor_branch_name} - -script/lib/bump-version.sh gpui gpui-v "" minor true - -git checkout -q main diff --git a/script/bump-gpui-version b/script/bump-gpui-version new file mode 100755 index 0000000000000000000000000000000000000000..5112bde4506553c1984b2952ade6d8b94e56b161 --- /dev/null +++ b/script/bump-gpui-version @@ -0,0 +1,45 @@ +#!/usr/bin/env bash + +# Parse arguments +bump_type=${1:-minor} + +if [[ "$bump_type" != "minor" && "$bump_type" != "patch" ]]; then + echo "Usage: $0 [minor|patch]" + echo " minor (default): bumps the minor version (e.g., 0.1.0 -> 0.2.0)" + echo " patch: bumps the patch version (e.g., 0.1.0 -> 0.1.1)" + exit 1 +fi + +# Ensure we're in a clean state on an up-to-date `main` branch. +if [[ -n $(git status --short --untracked-files=no) ]]; then + echo "can't bump versions with uncommitted changes" + exit 1 +fi +if [[ $(git rev-parse --abbrev-ref HEAD) != "main" ]]; then + echo "this command must be run on main" + exit 1 +fi +git pull -q --ff-only origin main + + +# Parse the current version +version=$(script/get-crate-version gpui) +major=$(echo $version | cut -d. -f1) +minor=$(echo $version | cut -d. -f2) +patch=$(echo $version | cut -d. -f3) + +if [[ "$bump_type" == "minor" ]]; then + next_minor=$(expr $minor + 1) + next_version="${major}.${next_minor}.0" +else + next_patch=$(expr $patch + 1) + next_version="${major}.${minor}.${next_patch}" +fi + +branch_name="bump-gpui-to-v${next_version}" + +git checkout -b ${branch_name} + +script/lib/bump-version.sh gpui gpui-v "" $bump_type true + +git checkout -q main diff --git a/script/bundle-linux b/script/bundle-linux index ad67b7a0f75f8c3e5d22e1a12e175ed248ecaf57..e8263fe4bcc8a90073149bf3a02ff1ed481017c3 100755 --- a/script/bundle-linux +++ b/script/bundle-linux @@ -91,7 +91,7 @@ else if [[ -n "${SENTRY_AUTH_TOKEN:-}" ]]; then echo "Uploading zed debug symbols to sentry..." # note: this uploads the unstripped binary which is needed because it contains - # .eh_frame data for stack unwinindg. see https://github.com/getsentry/symbolic/issues/783 + # .eh_frame data for stack unwinding. see https://github.com/getsentry/symbolic/issues/783 sentry-cli debug-files upload --include-sources --wait -p zed -o zed-dev \ "${target_dir}/${target_triple}"/release/zed \ "${target_dir}/${remote_server_triple}"/release/remote_server diff --git a/script/bundle-mac b/script/bundle-mac index 0bac0f75ee5ade49eba842257e854420f9bca82f..abcdb6cee2e6b35bcc185a40b6ad459dd98389fb 100755 --- a/script/bundle-mac +++ b/script/bundle-mac @@ -375,7 +375,7 @@ function upload_debug_info() { if [[ -n "${SENTRY_AUTH_TOKEN:-}" ]]; then echo "Uploading zed debug symbols to sentry..." # note: this uploads the unstripped binary which is needed because it contains - # .eh_frame data for stack unwinindg. see https://github.com/getsentry/symbolic/issues/783 + # .eh_frame data for stack unwinding. see https://github.com/getsentry/symbolic/issues/783 sentry-cli debug-files upload --include-sources --wait -p zed -o zed-dev \ "target/${architecture}/${target_dir}/zed" \ "target/${architecture}/${target_dir}/remote_server" \ diff --git a/script/danger/dangerfile.ts b/script/danger/dangerfile.ts index 6ed4a27fedb0bea7882ad4bcdd1016929bdd40e3..88dc5c5e71c640a83315ac5f1b14c216763023fd 100644 --- a/script/danger/dangerfile.ts +++ b/script/danger/dangerfile.ts @@ -61,12 +61,11 @@ if (includesIssueUrl) { const PROMPT_PATHS = [ "assets/prompts/content_prompt.hbs", "assets/prompts/terminal_assistant_prompt.hbs", - "crates/agent/src/prompts/stale_files_prompt_header.txt", - "crates/agent/src/prompts/summarize_thread_detailed_prompt.txt", - "crates/agent/src/prompts/summarize_thread_prompt.txt", - "crates/assistant_tools/src/templates/create_file_prompt.hbs", - "crates/assistant_tools/src/templates/edit_file_prompt_xml.hbs", - "crates/assistant_tools/src/templates/edit_file_prompt_diff_fenced.hbs", + "crates/agent_settings/src/prompts/summarize_thread_detailed_prompt.txt", + "crates/agent_settings/src/prompts/summarize_thread_prompt.txt", + "crates/agent/src/templates/create_file_prompt.hbs", + "crates/agent/src/templates/edit_file_prompt_xml.hbs", + "crates/agent/src/templates/edit_file_prompt_diff_fenced.hbs", "crates/git_ui/src/commit_message_prompt.txt", ]; diff --git a/script/licenses/template.md.hbs b/script/licenses/template.md.hbs index cc986588fb4991b4dda2e882b0ac43c014e97bc5..f37761f2c569c3e3cbd481927df9a68865430a80 100644 --- a/script/licenses/template.md.hbs +++ b/script/licenses/template.md.hbs @@ -19,3 +19,33 @@ -------------------------------------------------------------------------------- {{/each}} + +#### MIT License + +##### Used by: + +* [Windows Terminal]( https://github.com/microsoft/terminal ) + +Copyright (c) Microsoft Corporation. All rights reserved. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +-------------------------------------------------------------------------------- diff --git a/script/licenses/zed-licenses.toml b/script/licenses/zed-licenses.toml index 9d13087ece08404e2cae1a44733e382521b8fdd0..4f7281a050863b26c6e012acbf116cecadcb4269 100644 --- a/script/licenses/zed-licenses.toml +++ b/script/licenses/zed-licenses.toml @@ -14,6 +14,7 @@ accepted = [ "Unicode-3.0", "OpenSSL", "Zlib", + "BSL-1.0", ] [procinfo.clarify] @@ -33,147 +34,3 @@ license = "BSD-3-Clause" [[fuchsia-cprng.clarify.files]] path = 'LICENSE' checksum = '03b114f53e6587a398931762ee11e2395bfdba252a329940e2c8c9e81813845b' - -[pet.clarify] -license = "MIT" -[[pet.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-conda.clarify] -license = "MIT" -[[pet-conda.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-core.clarify] -license = "MIT" -[[pet-core.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-env-var-path.clarify] -license = "MIT" -[[pet-env-var-path.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-fs.clarify] -license = "MIT" -[[pet-fs.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-global-virtualenvs.clarify] -license = "MIT" -[[pet-global-virtualenvs.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-homebrew.clarify] -license = "MIT" -[[pet-homebrew.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-jsonrpc.clarify] -license = "MIT" -[[pet-jsonrpc.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-linux-global-python.clarify] -license = "MIT" -[[pet-linux-global-python.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-mac-commandlinetools.clarify] -license = "MIT" -[[pet-mac-commandlinetools.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-mac-python-org.clarify] -license = "MIT" -[[pet-mac-python-org.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-mac-xcode.clarify] -license = "MIT" -[[pet-mac-xcode.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-pipenv.clarify] -license = "MIT" -[[pet-pipenv.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-pixi.clarify] -license = "MIT" -[[pet-pixi.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-poetry.clarify] -license = "MIT" -[[pet-poetry.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-pyenv.clarify] -license = "MIT" -[[pet-pyenv.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-python-utils.clarify] -license = "MIT" -[[pet-python-utils.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-reporter.clarify] -license = "MIT" -[[pet-reporter.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-telemetry.clarify] -license = "MIT" -[[pet-telemetry.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-venv.clarify] -license = "MIT" -[[pet-venv.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-virtualenv.clarify] -license = "MIT" -[[pet-virtualenv.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-virtualenvwrapper.clarify] -license = "MIT" -[[pet-virtualenvwrapper.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-windows-registry.clarify] -license = "MIT" -[[pet-windows-registry.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' - -[pet-windows-store.clarify] -license = "MIT" -[[pet-windows-store.clarify.files]] -path = '../../LICENSE' -checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' diff --git a/script/new-crate b/script/new-crate index 1ac2d9262133c788969fe594b5b06480f1293fa7..52ee900b30837cbf77fa1e3145e0282fa5e19b7c 100755 --- a/script/new-crate +++ b/script/new-crate @@ -63,7 +63,6 @@ anyhow.workspace = true gpui.workspace = true ui.workspace = true util.workspace = true -workspace-hack.workspace = true # Uncomment other workspace dependencies as needed # assistant.workspace = true diff --git a/script/update-workspace-hack b/script/update-workspace-hack deleted file mode 100755 index 55f8aa9f926ace44652735f76c001f11be79b921..0000000000000000000000000000000000000000 --- a/script/update-workspace-hack +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env bash - -set -euo pipefail - -HAKARI_VERSION="0.9" - -cd "$(dirname "$0")/.." || exit 1 - -if ! cargo hakari --version | grep "cargo-hakari $HAKARI_VERSION" >/dev/null; then - echo "Installing cargo-hakari@^$HAKARI_VERSION..." - cargo install "cargo-hakari@^$HAKARI_VERSION" -else - echo "cargo-hakari@^$HAKARI_VERSION is already installed." -fi - -# update the workspace-hack crate -cargo hakari generate - -# make sure workspace-hack is added as a dep for all crates in the workspace -cargo hakari manage-deps diff --git a/script/update-workspace-hack.ps1 b/script/update-workspace-hack.ps1 deleted file mode 100644 index 060660724965da0c9bd05a30d5505afec6cea5e8..0000000000000000000000000000000000000000 --- a/script/update-workspace-hack.ps1 +++ /dev/null @@ -1,36 +0,0 @@ -$ErrorActionPreference = "Stop" - -$HAKARI_VERSION = "0.9" - -$scriptPath = Split-Path -Parent $MyInvocation.MyCommand.Path -Set-Location (Split-Path -Parent $scriptPath) - -$hakariInstalled = $false -try { - $versionOutput = cargo hakari --version 2>&1 - if ($versionOutput -match "cargo-hakari $HAKARI_VERSION") { - $hakariInstalled = $true - } -} -catch { - $hakariInstalled = $false -} - -if (-not $hakariInstalled) { - Write-Host "Installing cargo-hakari@^$HAKARI_VERSION..." - cargo install "cargo-hakari@^$HAKARI_VERSION" - if ($LASTEXITCODE -ne 0) { - throw "Failed to install cargo-hakari@^$HAKARI_VERSION" - } -} -else { - Write-Host "cargo-hakari@^$HAKARI_VERSION is already installed." -} - -# update the workspace-hack crate -cargo hakari generate -if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE } - -# make sure workspace-hack is added as a dep for all crates in the workspace -cargo hakari manage-deps -if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE } diff --git a/tooling/perf/Cargo.toml b/tooling/perf/Cargo.toml index bbca817a3e7fcdb2bbe599715f29604cc4b05a1c..d4acad1fdb80be1850582885a548162771619698 100644 --- a/tooling/perf/Cargo.toml +++ b/tooling/perf/Cargo.toml @@ -1,7 +1,7 @@ [package] -name = "zed-perf" +name = "perf" version = "0.1.0" -publish = true +publish = false edition.workspace = true license = "Apache-2.0" description = "A tool for measuring Zed test performance, with too many Clippy lints" @@ -30,4 +30,3 @@ disallowed_methods = { level = "allow", priority = 1} collections.workspace = true serde.workspace = true serde_json.workspace = true -workspace-hack.workspace = true diff --git a/tooling/perf/src/implementation.rs b/tooling/perf/src/implementation.rs new file mode 100644 index 0000000000000000000000000000000000000000..c151dda91f0bec64e261738ea593b233dedd9b62 --- /dev/null +++ b/tooling/perf/src/implementation.rs @@ -0,0 +1,450 @@ +//! The implementation of the this crate is kept in a separate module +//! so that it is easy to publish this crate as part of GPUI's dependencies + +use collections::HashMap; +use serde::{Deserialize, Serialize}; +use std::{num::NonZero, time::Duration}; + +pub mod consts { + //! Preset identifiers and constants so that the profiler and proc macro agree + //! on their communication protocol. + + /// The suffix on the actual test function. + pub const SUF_NORMAL: &str = "__ZED_PERF_FN"; + /// The suffix on an extra function which prints metadata about a test to stdout. + pub const SUF_MDATA: &str = "__ZED_PERF_MDATA"; + /// The env var in which we pass the iteration count to our tests. + pub const ITER_ENV_VAR: &str = "ZED_PERF_ITER"; + /// The prefix printed on all benchmark test metadata lines, to distinguish it from + /// possible output by the test harness itself. + pub const MDATA_LINE_PREF: &str = "ZED_MDATA_"; + /// The version number for the data returned from the test metadata function. + /// Increment on non-backwards-compatible changes. + pub const MDATA_VER: u32 = 0; + /// The default weight, if none is specified. + pub const WEIGHT_DEFAULT: u8 = 50; + /// How long a test must have run to be assumed to be reliable-ish. + pub const NOISE_CUTOFF: std::time::Duration = std::time::Duration::from_millis(250); + + /// Identifier for the iteration count of a test metadata. + pub const ITER_COUNT_LINE_NAME: &str = "iter_count"; + /// Identifier for the weight of a test metadata. + pub const WEIGHT_LINE_NAME: &str = "weight"; + /// Identifier for importance in test metadata. + pub const IMPORTANCE_LINE_NAME: &str = "importance"; + /// Identifier for the test metadata version. + pub const VERSION_LINE_NAME: &str = "version"; + + /// Where to save json run information. + pub const RUNS_DIR: &str = ".perf-runs"; +} + +/// How relevant a benchmark is. +#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)] +pub enum Importance { + /// Regressions shouldn't be accepted without good reason. + Critical = 4, + /// Regressions should be paid extra attention. + Important = 3, + /// No extra attention should be paid to regressions, but they might still + /// be indicative of something happening. + #[default] + Average = 2, + /// Unclear if regressions are likely to be meaningful, but still worth keeping + /// an eye on. Lowest level that's checked by default by the profiler. + Iffy = 1, + /// Regressions are likely to be spurious or don't affect core functionality. + /// Only relevant if a lot of them happen, or as supplemental evidence for a + /// higher-importance benchmark regressing. Not checked by default. + Fluff = 0, +} + +impl std::fmt::Display for Importance { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Importance::Critical => f.write_str("critical"), + Importance::Important => f.write_str("important"), + Importance::Average => f.write_str("average"), + Importance::Iffy => f.write_str("iffy"), + Importance::Fluff => f.write_str("fluff"), + } + } +} + +/// Why or when did this test fail? +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum FailKind { + /// Failed while triaging it to determine the iteration count. + Triage, + /// Failed while profiling it. + Profile, + /// Failed due to an incompatible version for the test. + VersionMismatch, + /// Could not parse metadata for a test. + BadMetadata, + /// Skipped due to filters applied on the perf run. + Skipped, +} + +impl std::fmt::Display for FailKind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + FailKind::Triage => f.write_str("errored in triage"), + FailKind::Profile => f.write_str("errored while profiling"), + FailKind::VersionMismatch => f.write_str("test version mismatch"), + FailKind::BadMetadata => f.write_str("bad test metadata"), + FailKind::Skipped => f.write_str("skipped"), + } + } +} + +/// Information about a given perf test. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct TestMdata { + /// A version number for when the test was generated. If this is greater + /// than the version this test handler expects, one of the following will + /// happen in an unspecified manner: + /// - The test is skipped silently. + /// - The handler exits with an error message indicating the version mismatch + /// or inability to parse the metadata. + /// + /// INVARIANT: If `version` <= `MDATA_VER`, this tool *must* be able to + /// correctly parse the output of this test. + pub version: u32, + /// How many iterations to pass this test if this is preset, or how many + /// iterations a test ended up running afterwards if determined at runtime. + pub iterations: Option>, + /// The importance of this particular test. See the docs on `Importance` for + /// details. + pub importance: Importance, + /// The weight of this particular test within its importance category. Used + /// when comparing across runs. + pub weight: u8, +} + +/// The actual timings of a test, as measured by Hyperfine. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct Timings { + /// Mean runtime for `self.iter_total` runs of this test. + pub mean: Duration, + /// Standard deviation for the above. + pub stddev: Duration, +} + +impl Timings { + /// How many iterations does this test seem to do per second? + #[expect( + clippy::cast_precision_loss, + reason = "We only care about a couple sig figs anyways" + )] + #[must_use] + pub fn iters_per_sec(&self, total_iters: NonZero) -> f64 { + (1000. / self.mean.as_millis() as f64) * total_iters.get() as f64 + } +} + +/// Aggregate results, meant to be used for a given importance category. Each +/// test name corresponds to its benchmark results, iteration count, and weight. +type CategoryInfo = HashMap, u8)>; + +/// Aggregate output of all tests run by this handler. +#[derive(Clone, Debug, Default, Serialize, Deserialize)] +pub struct Output { + /// A list of test outputs. Format is `(test_name, mdata, timings)`. + /// The latter being `Ok(_)` indicates the test succeeded. + /// + /// INVARIANT: If the test succeeded, the second field is `Some(mdata)` and + /// `mdata.iterations` is `Some(_)`. + tests: Vec<(String, Option, Result)>, +} + +impl Output { + /// Instantiates an empty "output". Useful for merging. + #[must_use] + pub fn blank() -> Self { + Output { tests: Vec::new() } + } + + /// Reports a success and adds it to this run's `Output`. + pub fn success( + &mut self, + name: impl AsRef, + mut mdata: TestMdata, + iters: NonZero, + timings: Timings, + ) { + mdata.iterations = Some(iters); + self.tests + .push((name.as_ref().to_string(), Some(mdata), Ok(timings))); + } + + /// Reports a failure and adds it to this run's `Output`. If this test was tried + /// with some number of iterations (i.e. this was not a version mismatch or skipped + /// test), it should be reported also. + /// + /// Using the `fail!()` macro is usually more convenient. + pub fn failure( + &mut self, + name: impl AsRef, + mut mdata: Option, + attempted_iters: Option>, + kind: FailKind, + ) { + if let Some(ref mut mdata) = mdata { + mdata.iterations = attempted_iters; + } + self.tests + .push((name.as_ref().to_string(), mdata, Err(kind))); + } + + /// True if no tests executed this run. + #[must_use] + pub fn is_empty(&self) -> bool { + self.tests.is_empty() + } + + /// Sorts the runs in the output in the order that we want them printed. + pub fn sort(&mut self) { + self.tests.sort_unstable_by(|a, b| match (a, b) { + // Tests where we got no metadata go at the end. + ((_, Some(_), _), (_, None, _)) => std::cmp::Ordering::Greater, + ((_, None, _), (_, Some(_), _)) => std::cmp::Ordering::Less, + // Then sort by importance, then weight. + ((_, Some(a_mdata), _), (_, Some(b_mdata), _)) => { + let c = a_mdata.importance.cmp(&b_mdata.importance); + if matches!(c, std::cmp::Ordering::Equal) { + a_mdata.weight.cmp(&b_mdata.weight) + } else { + c + } + } + // Lastly by name. + ((a_name, ..), (b_name, ..)) => a_name.cmp(b_name), + }); + } + + /// Merges the output of two runs, appending a prefix to the results of the new run. + /// To be used in conjunction with `Output::blank()`, or else only some tests will have + /// a prefix set. + pub fn merge<'a>(&mut self, other: Self, pref_other: impl Into>) { + let pref = if let Some(pref) = pref_other.into() { + "crates/".to_string() + pref + "::" + } else { + String::new() + }; + self.tests = std::mem::take(&mut self.tests) + .into_iter() + .chain( + other + .tests + .into_iter() + .map(|(name, md, tm)| (pref.clone() + &name, md, tm)), + ) + .collect(); + } + + /// Evaluates the performance of `self` against `baseline`. The latter is taken + /// as the comparison point, i.e. a positive resulting `PerfReport` means that + /// `self` performed better. + /// + /// # Panics + /// `self` and `baseline` are assumed to have the iterations field on all + /// `TestMdata`s set to `Some(_)` if the `TestMdata` is present itself. + #[must_use] + pub fn compare_perf(self, baseline: Self) -> PerfReport { + let self_categories = self.collapse(); + let mut other_categories = baseline.collapse(); + + let deltas = self_categories + .into_iter() + .filter_map(|(cat, self_data)| { + // Only compare categories where both meow + // runs have data. / + let mut other_data = other_categories.remove(&cat)?; + let mut max = f64::MIN; + let mut min = f64::MAX; + + // Running totals for averaging out tests. + let mut r_total_numerator = 0.; + let mut r_total_denominator = 0; + // Yeah this is O(n^2), but realistically it'll hardly be a bottleneck. + for (name, (s_timings, s_iters, weight)) in self_data { + // Only use the new weights if they conflict. + let Some((o_timings, o_iters, _)) = other_data.remove(&name) else { + continue; + }; + let shift = + (o_timings.iters_per_sec(o_iters) / s_timings.iters_per_sec(s_iters)) - 1.; + if shift > max { + max = shift; + } + if shift < min { + min = shift; + } + r_total_numerator += shift * f64::from(weight); + r_total_denominator += u32::from(weight); + } + // There were no runs here! + if r_total_denominator == 0 { + None + } else { + let mean = r_total_numerator / f64::from(r_total_denominator); + // TODO: also aggregate standard deviation? That's harder to keep + // meaningful, though, since we dk which tests are correlated. + Some((cat, PerfDelta { max, mean, min })) + } + }) + .collect(); + + PerfReport { deltas } + } + + /// Collapses the `PerfReport` into a `HashMap` over `Importance`, with + /// each importance category having its tests contained. + fn collapse(self) -> HashMap { + let mut categories = HashMap::>::default(); + for entry in self.tests { + if let Some(mdata) = entry.1 + && let Ok(timings) = entry.2 + { + if let Some(handle) = categories.get_mut(&mdata.importance) { + handle.insert(entry.0, (timings, mdata.iterations.unwrap(), mdata.weight)); + } else { + let mut new = HashMap::default(); + new.insert(entry.0, (timings, mdata.iterations.unwrap(), mdata.weight)); + categories.insert(mdata.importance, new); + } + } + } + + categories + } +} + +impl std::fmt::Display for Output { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + // Don't print the header for an empty run. + if self.tests.is_empty() { + return Ok(()); + } + + // We want to print important tests at the top, then alphabetical. + let mut sorted = self.clone(); + sorted.sort(); + // Markdown header for making a nice little table :> + writeln!( + f, + "| Command | Iter/sec | Mean [ms] | SD [ms] | Iterations | Importance (weight) |", + )?; + writeln!(f, "|:---|---:|---:|---:|---:|---:|")?; + for (name, metadata, timings) in &sorted.tests { + match metadata { + Some(metadata) => match timings { + // Happy path. + Ok(timings) => { + // If the test succeeded, then metadata.iterations is Some(_). + writeln!( + f, + "| {} | {:.2} | {} | {:.2} | {} | {} ({}) |", + name, + timings.iters_per_sec(metadata.iterations.unwrap()), + { + // Very small mean runtimes will give inaccurate + // results. Should probably also penalise weight. + let mean = timings.mean.as_secs_f64() * 1000.; + if mean < consts::NOISE_CUTOFF.as_secs_f64() * 1000. / 8. { + format!("{mean:.2} (unreliable)") + } else { + format!("{mean:.2}") + } + }, + timings.stddev.as_secs_f64() * 1000., + metadata.iterations.unwrap(), + metadata.importance, + metadata.weight, + )?; + } + // We have (some) metadata, but the test errored. + Err(err) => writeln!( + f, + "| ({}) {} | N/A | N/A | N/A | {} | {} ({}) |", + err, + name, + metadata + .iterations + .map_or_else(|| "N/A".to_owned(), |i| format!("{i}")), + metadata.importance, + metadata.weight + )?, + }, + // No metadata, couldn't even parse the test output. + None => writeln!( + f, + "| ({}) {} | N/A | N/A | N/A | N/A | N/A |", + timings.as_ref().unwrap_err(), + name + )?, + } + } + Ok(()) + } +} + +/// The difference in performance between two runs within a given importance +/// category. +struct PerfDelta { + /// The biggest improvement / least bad regression. + max: f64, + /// The weighted average change in test times. + mean: f64, + /// The worst regression / smallest improvement. + min: f64, +} + +/// Shim type for reporting all performance deltas across importance categories. +pub struct PerfReport { + /// Inner (group, diff) pairing. + deltas: HashMap, +} + +impl std::fmt::Display for PerfReport { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if self.deltas.is_empty() { + return write!(f, "(no matching tests)"); + } + let sorted = self.deltas.iter().collect::>(); + writeln!(f, "| Category | Max | Mean | Min |")?; + // We don't want to print too many newlines at the end, so handle newlines + // a little jankily like this. + write!(f, "|:---|---:|---:|---:|")?; + for (cat, delta) in sorted.into_iter().rev() { + const SIGN_POS: &str = "↑"; + const SIGN_NEG: &str = "↓"; + const SIGN_NEUTRAL_POS: &str = "±↑"; + const SIGN_NEUTRAL_NEG: &str = "±↓"; + + let prettify = |time: f64| { + let sign = if time > 0.05 { + SIGN_POS + } else if time > 0. { + SIGN_NEUTRAL_POS + } else if time > -0.05 { + SIGN_NEUTRAL_NEG + } else { + SIGN_NEG + }; + format!("{} {:.1}%", sign, time.abs() * 100.) + }; + + // Pretty-print these instead of just using the float display impl. + write!( + f, + "\n| {cat} | {} | {} | {} |", + prettify(delta.max), + prettify(delta.mean), + prettify(delta.min) + )?; + } + Ok(()) + } +} diff --git a/tooling/perf/src/lib.rs b/tooling/perf/src/lib.rs index 3272f179d8d1813948228ee951a46dc683c8a8a5..7933e66e799c66fa317d2e80e5393a21a6149af5 100644 --- a/tooling/perf/src/lib.rs +++ b/tooling/perf/src/lib.rs @@ -3,447 +3,5 @@ //! //! For usage documentation, see the docs on this crate's binary. -use collections::HashMap; -use serde::{Deserialize, Serialize}; -use std::{num::NonZero, time::Duration}; - -pub mod consts { - //! Preset idenitifiers and constants so that the profiler and proc macro agree - //! on their communication protocol. - - /// The suffix on the actual test function. - pub const SUF_NORMAL: &str = "__ZED_PERF_FN"; - /// The suffix on an extra function which prints metadata about a test to stdout. - pub const SUF_MDATA: &str = "__ZED_PERF_MDATA"; - /// The env var in which we pass the iteration count to our tests. - pub const ITER_ENV_VAR: &str = "ZED_PERF_ITER"; - /// The prefix printed on all benchmark test metadata lines, to distinguish it from - /// possible output by the test harness itself. - pub const MDATA_LINE_PREF: &str = "ZED_MDATA_"; - /// The version number for the data returned from the test metadata function. - /// Increment on non-backwards-compatible changes. - pub const MDATA_VER: u32 = 0; - /// The default weight, if none is specified. - pub const WEIGHT_DEFAULT: u8 = 50; - /// How long a test must have run to be assumed to be reliable-ish. - pub const NOISE_CUTOFF: std::time::Duration = std::time::Duration::from_millis(250); - - /// Identifier for the iteration count of a test metadata. - pub const ITER_COUNT_LINE_NAME: &str = "iter_count"; - /// Identifier for the weight of a test metadata. - pub const WEIGHT_LINE_NAME: &str = "weight"; - /// Identifier for importance in test metadata. - pub const IMPORTANCE_LINE_NAME: &str = "importance"; - /// Identifier for the test metadata version. - pub const VERSION_LINE_NAME: &str = "version"; - - /// Where to save json run information. - pub const RUNS_DIR: &str = ".perf-runs"; -} - -/// How relevant a benchmark is. -#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)] -pub enum Importance { - /// Regressions shouldn't be accepted without good reason. - Critical = 4, - /// Regressions should be paid extra attention. - Important = 3, - /// No extra attention should be paid to regressions, but they might still - /// be indicative of something happening. - #[default] - Average = 2, - /// Unclear if regressions are likely to be meaningful, but still worth keeping - /// an eye on. Lowest level that's checked by default by the profiler. - Iffy = 1, - /// Regressions are likely to be spurious or don't affect core functionality. - /// Only relevant if a lot of them happen, or as supplemental evidence for a - /// higher-importance benchmark regressing. Not checked by default. - Fluff = 0, -} - -impl std::fmt::Display for Importance { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Importance::Critical => f.write_str("critical"), - Importance::Important => f.write_str("important"), - Importance::Average => f.write_str("average"), - Importance::Iffy => f.write_str("iffy"), - Importance::Fluff => f.write_str("fluff"), - } - } -} - -/// Why or when did this test fail? -#[derive(Clone, Debug, Serialize, Deserialize)] -pub enum FailKind { - /// Failed while triaging it to determine the iteration count. - Triage, - /// Failed while profiling it. - Profile, - /// Failed due to an incompatible version for the test. - VersionMismatch, - /// Could not parse metadata for a test. - BadMetadata, - /// Skipped due to filters applied on the perf run. - Skipped, -} - -impl std::fmt::Display for FailKind { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - FailKind::Triage => f.write_str("errored in triage"), - FailKind::Profile => f.write_str("errored while profiling"), - FailKind::VersionMismatch => f.write_str("test version mismatch"), - FailKind::BadMetadata => f.write_str("bad test metadata"), - FailKind::Skipped => f.write_str("skipped"), - } - } -} - -/// Information about a given perf test. -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct TestMdata { - /// A version number for when the test was generated. If this is greater - /// than the version this test handler expects, one of the following will - /// happen in an unspecified manner: - /// - The test is skipped silently. - /// - The handler exits with an error message indicating the version mismatch - /// or inability to parse the metadata. - /// - /// INVARIANT: If `version` <= `MDATA_VER`, this tool *must* be able to - /// correctly parse the output of this test. - pub version: u32, - /// How many iterations to pass this test if this is preset, or how many - /// iterations a test ended up running afterwards if determined at runtime. - pub iterations: Option>, - /// The importance of this particular test. See the docs on `Importance` for - /// details. - pub importance: Importance, - /// The weight of this particular test within its importance category. Used - /// when comparing across runs. - pub weight: u8, -} - -/// The actual timings of a test, as measured by Hyperfine. -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct Timings { - /// Mean runtime for `self.iter_total` runs of this test. - pub mean: Duration, - /// Standard deviation for the above. - pub stddev: Duration, -} - -impl Timings { - /// How many iterations does this test seem to do per second? - #[expect( - clippy::cast_precision_loss, - reason = "We only care about a couple sig figs anyways" - )] - #[must_use] - pub fn iters_per_sec(&self, total_iters: NonZero) -> f64 { - (1000. / self.mean.as_millis() as f64) * total_iters.get() as f64 - } -} - -/// Aggregate results, meant to be used for a given importance category. Each -/// test name corresponds to its benchmark results, iteration count, and weight. -type CategoryInfo = HashMap, u8)>; - -/// Aggregate output of all tests run by this handler. -#[derive(Clone, Debug, Default, Serialize, Deserialize)] -pub struct Output { - /// A list of test outputs. Format is `(test_name, mdata, timings)`. - /// The latter being `Ok(_)` indicates the test succeeded. - /// - /// INVARIANT: If the test succeeded, the second field is `Some(mdata)` and - /// `mdata.iterations` is `Some(_)`. - tests: Vec<(String, Option, Result)>, -} - -impl Output { - /// Instantiates an empty "output". Useful for merging. - #[must_use] - pub fn blank() -> Self { - Output { tests: Vec::new() } - } - - /// Reports a success and adds it to this run's `Output`. - pub fn success( - &mut self, - name: impl AsRef, - mut mdata: TestMdata, - iters: NonZero, - timings: Timings, - ) { - mdata.iterations = Some(iters); - self.tests - .push((name.as_ref().to_string(), Some(mdata), Ok(timings))); - } - - /// Reports a failure and adds it to this run's `Output`. If this test was tried - /// with some number of iterations (i.e. this was not a version mismatch or skipped - /// test), it should be reported also. - /// - /// Using the `fail!()` macro is usually more convenient. - pub fn failure( - &mut self, - name: impl AsRef, - mut mdata: Option, - attempted_iters: Option>, - kind: FailKind, - ) { - if let Some(ref mut mdata) = mdata { - mdata.iterations = attempted_iters; - } - self.tests - .push((name.as_ref().to_string(), mdata, Err(kind))); - } - - /// True if no tests executed this run. - #[must_use] - pub fn is_empty(&self) -> bool { - self.tests.is_empty() - } - - /// Sorts the runs in the output in the order that we want them printed. - pub fn sort(&mut self) { - self.tests.sort_unstable_by(|a, b| match (a, b) { - // Tests where we got no metadata go at the end. - ((_, Some(_), _), (_, None, _)) => std::cmp::Ordering::Greater, - ((_, None, _), (_, Some(_), _)) => std::cmp::Ordering::Less, - // Then sort by importance, then weight. - ((_, Some(a_mdata), _), (_, Some(b_mdata), _)) => { - let c = a_mdata.importance.cmp(&b_mdata.importance); - if matches!(c, std::cmp::Ordering::Equal) { - a_mdata.weight.cmp(&b_mdata.weight) - } else { - c - } - } - // Lastly by name. - ((a_name, ..), (b_name, ..)) => a_name.cmp(b_name), - }); - } - - /// Merges the output of two runs, appending a prefix to the results of the new run. - /// To be used in conjunction with `Output::blank()`, or else only some tests will have - /// a prefix set. - pub fn merge<'a>(&mut self, other: Self, pref_other: impl Into>) { - let pref = if let Some(pref) = pref_other.into() { - "crates/".to_string() + pref + "::" - } else { - String::new() - }; - self.tests = std::mem::take(&mut self.tests) - .into_iter() - .chain( - other - .tests - .into_iter() - .map(|(name, md, tm)| (pref.clone() + &name, md, tm)), - ) - .collect(); - } - - /// Evaluates the performance of `self` against `baseline`. The latter is taken - /// as the comparison point, i.e. a positive resulting `PerfReport` means that - /// `self` performed better. - /// - /// # Panics - /// `self` and `baseline` are assumed to have the iterations field on all - /// `TestMdata`s set to `Some(_)` if the `TestMdata` is present itself. - #[must_use] - pub fn compare_perf(self, baseline: Self) -> PerfReport { - let self_categories = self.collapse(); - let mut other_categories = baseline.collapse(); - - let deltas = self_categories - .into_iter() - .filter_map(|(cat, self_data)| { - // Only compare categories where both meow - // runs have data. / - let mut other_data = other_categories.remove(&cat)?; - let mut max = f64::MIN; - let mut min = f64::MAX; - - // Running totals for averaging out tests. - let mut r_total_numerator = 0.; - let mut r_total_denominator = 0; - // Yeah this is O(n^2), but realistically it'll hardly be a bottleneck. - for (name, (s_timings, s_iters, weight)) in self_data { - // Only use the new weights if they conflict. - let Some((o_timings, o_iters, _)) = other_data.remove(&name) else { - continue; - }; - let shift = - (o_timings.iters_per_sec(o_iters) / s_timings.iters_per_sec(s_iters)) - 1.; - if shift > max { - max = shift; - } - if shift < min { - min = shift; - } - r_total_numerator += shift * f64::from(weight); - r_total_denominator += u32::from(weight); - } - // There were no runs here! - if r_total_denominator == 0 { - None - } else { - let mean = r_total_numerator / f64::from(r_total_denominator); - // TODO: also aggregate standard deviation? That's harder to keep - // meaningful, though, since we dk which tests are correlated. - Some((cat, PerfDelta { max, mean, min })) - } - }) - .collect(); - - PerfReport { deltas } - } - - /// Collapses the `PerfReport` into a `HashMap` over `Importance`, with - /// each importance category having its tests contained. - fn collapse(self) -> HashMap { - let mut categories = HashMap::>::default(); - for entry in self.tests { - if let Some(mdata) = entry.1 - && let Ok(timings) = entry.2 - { - if let Some(handle) = categories.get_mut(&mdata.importance) { - handle.insert(entry.0, (timings, mdata.iterations.unwrap(), mdata.weight)); - } else { - let mut new = HashMap::default(); - new.insert(entry.0, (timings, mdata.iterations.unwrap(), mdata.weight)); - categories.insert(mdata.importance, new); - } - } - } - - categories - } -} - -impl std::fmt::Display for Output { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - // Don't print the header for an empty run. - if self.tests.is_empty() { - return Ok(()); - } - - // We want to print important tests at the top, then alphabetical. - let mut sorted = self.clone(); - sorted.sort(); - // Markdown header for making a nice little table :> - writeln!( - f, - "| Command | Iter/sec | Mean [ms] | SD [ms] | Iterations | Importance (weight) |", - )?; - writeln!(f, "|:---|---:|---:|---:|---:|---:|")?; - for (name, metadata, timings) in &sorted.tests { - match metadata { - Some(metadata) => match timings { - // Happy path. - Ok(timings) => { - // If the test succeeded, then metadata.iterations is Some(_). - writeln!( - f, - "| {} | {:.2} | {} | {:.2} | {} | {} ({}) |", - name, - timings.iters_per_sec(metadata.iterations.unwrap()), - { - // Very small mean runtimes will give inaccurate - // results. Should probably also penalise weight. - let mean = timings.mean.as_secs_f64() * 1000.; - if mean < consts::NOISE_CUTOFF.as_secs_f64() * 1000. / 8. { - format!("{mean:.2} (unreliable)") - } else { - format!("{mean:.2}") - } - }, - timings.stddev.as_secs_f64() * 1000., - metadata.iterations.unwrap(), - metadata.importance, - metadata.weight, - )?; - } - // We have (some) metadata, but the test errored. - Err(err) => writeln!( - f, - "| ({}) {} | N/A | N/A | N/A | {} | {} ({}) |", - err, - name, - metadata - .iterations - .map_or_else(|| "N/A".to_owned(), |i| format!("{i}")), - metadata.importance, - metadata.weight - )?, - }, - // No metadata, couldn't even parse the test output. - None => writeln!( - f, - "| ({}) {} | N/A | N/A | N/A | N/A | N/A |", - timings.as_ref().unwrap_err(), - name - )?, - } - } - Ok(()) - } -} - -/// The difference in performance between two runs within a given importance -/// category. -struct PerfDelta { - /// The biggest improvement / least bad regression. - max: f64, - /// The weighted average change in test times. - mean: f64, - /// The worst regression / smallest improvement. - min: f64, -} - -/// Shim type for reporting all performance deltas across importance categories. -pub struct PerfReport { - /// Inner (group, diff) pairing. - deltas: HashMap, -} - -impl std::fmt::Display for PerfReport { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - if self.deltas.is_empty() { - return write!(f, "(no matching tests)"); - } - let sorted = self.deltas.iter().collect::>(); - writeln!(f, "| Category | Max | Mean | Min |")?; - // We don't want to print too many newlines at the end, so handle newlines - // a little jankily like this. - write!(f, "|:---|---:|---:|---:|")?; - for (cat, delta) in sorted.into_iter().rev() { - const SIGN_POS: &str = "↑"; - const SIGN_NEG: &str = "↓"; - const SIGN_NEUTRAL: &str = "±"; - - let prettify = |time: f64| { - let sign = if time > 0.05 { - SIGN_POS - } else if time < 0.05 && time > -0.05 { - SIGN_NEUTRAL - } else { - SIGN_NEG - }; - format!("{} {:.1}%", sign, time.abs() * 100.) - }; - - // Pretty-print these instead of just using the float display impl. - write!( - f, - "\n| {cat} | {} | {} | {} |", - prettify(delta.max), - prettify(delta.mean), - prettify(delta.min) - )?; - } - Ok(()) - } -} +mod implementation; +pub use implementation::*; diff --git a/tooling/perf/src/main.rs b/tooling/perf/src/main.rs index 910b172958a0d9ade05c907b38794a6afd993f03..1e6ddedf11e2c5f265d3d4dd93785afbf7f565d2 100644 --- a/tooling/perf/src/main.rs +++ b/tooling/perf/src/main.rs @@ -46,11 +46,13 @@ //! This should probably not be called manually unless you're working on the profiler //! itself; use the `cargo perf-test` alias (after building this crate) instead. -use zed_perf::{FailKind, Importance, Output, TestMdata, Timings, consts}; +mod implementation; + +use implementation::{FailKind, Importance, Output, TestMdata, Timings, consts}; use std::{ fs::OpenOptions, - io::Write, + io::{Read, Write}, num::NonZero, path::{Path, PathBuf}, process::{Command, Stdio}, @@ -226,8 +228,8 @@ fn compare_profiles(args: &[String]) { a.strip_prefix("--save=") .expect("FATAL: save param formatted incorrectly"), ); + ident_idx = 1; } - ident_idx = 1; }); let ident_new = args .get(ident_idx) @@ -264,8 +266,14 @@ fn compare_profiles(args: &[String]) { let prefix = elems.next().unwrap(); assert_eq!("json", elems.next().unwrap()); assert!(elems.next().is_none()); - let handle = OpenOptions::new().read(true).open(entry.path()).unwrap(); - let o_other: Output = serde_json::from_reader(handle).unwrap(); + let mut buffer = Vec::new(); + let _ = OpenOptions::new() + .read(true) + .open(entry.path()) + .unwrap() + .read_to_end(&mut buffer) + .unwrap(); + let o_other: Output = serde_json::from_slice(&buffer).unwrap(); output.merge(o_other, prefix); }; diff --git a/tooling/workspace-hack/.gitattributes b/tooling/workspace-hack/.gitattributes deleted file mode 100644 index 3e9dba4b64b5c01d9eeff4542140973313165470..0000000000000000000000000000000000000000 --- a/tooling/workspace-hack/.gitattributes +++ /dev/null @@ -1,4 +0,0 @@ -# Avoid putting conflict markers in the generated Cargo.toml file, since their presence breaks -# Cargo. -# Also do not check out the file as CRLF on Windows, as that's what hakari needs. -Cargo.toml merge=binary -crlf diff --git a/tooling/workspace-hack/.ignore b/tooling/workspace-hack/.ignore deleted file mode 100644 index 0eded960b456938b5f0937756a07b257451f9a26..0000000000000000000000000000000000000000 --- a/tooling/workspace-hack/.ignore +++ /dev/null @@ -1,2 +0,0 @@ -# prevent cargo-machete from analyzing this crate -Cargo.toml diff --git a/tooling/workspace-hack/Cargo.toml b/tooling/workspace-hack/Cargo.toml deleted file mode 100644 index 6434f69723d7c525a0fdbae9cb71d3073874c913..0000000000000000000000000000000000000000 --- a/tooling/workspace-hack/Cargo.toml +++ /dev/null @@ -1,738 +0,0 @@ -# This file is generated by `cargo hakari`. -# To regenerate, run: -# cargo install cargo-hakari -# cargo hakari generate - -[package] -name = "workspace-hack" -version = "0.1.0" -description = "workspace-hack package, managed by hakari" -edition.workspace = true -publish.workspace = true - -# The parts of the file between the BEGIN HAKARI SECTION and END HAKARI SECTION comments -# are managed by hakari. - -### BEGIN HAKARI SECTION -[dependencies] -ahash = { version = "0.8", features = ["serde"] } -aho-corasick = { version = "1" } -anstream = { version = "0.6" } -arrayvec = { version = "0.7", features = ["serde"] } -async-compression = { version = "0.4", default-features = false, features = ["deflate", "deflate64", "futures-io", "gzip"] } -async-std = { version = "1", features = ["attributes", "unstable"] } -async-tungstenite = { version = "0.29", features = ["tokio-rustls-manual-roots"] } -aws-config = { version = "1", features = ["behavior-version-latest"] } -aws-credential-types = { version = "1", default-features = false, features = ["hardcoded-credentials", "test-util"] } -aws-runtime = { version = "1", default-features = false, features = ["event-stream", "http-02x", "sigv4a"] } -aws-sigv4 = { version = "1", features = ["http0-compat", "sign-eventstream", "sigv4a"] } -aws-smithy-async = { version = "1", default-features = false, features = ["rt-tokio"] } -aws-smithy-http = { version = "0.62", default-features = false, features = ["event-stream"] } -aws-smithy-runtime = { version = "1", default-features = false, features = ["client", "default-https-client", "rt-tokio", "tls-rustls"] } -aws-smithy-runtime-api = { version = "1", features = ["client", "http-02x", "http-auth", "test-util"] } -aws-smithy-types = { version = "1", default-features = false, features = ["byte-stream-poll-next", "http-body-0-4-x", "http-body-1-x", "rt-tokio", "test-util"] } -base64 = { version = "0.22" } -base64ct = { version = "1", default-features = false, features = ["std"] } -bigdecimal = { version = "0.4", features = ["serde"] } -bit-set = { version = "0.8", default-features = false, features = ["std"] } -bit-vec = { version = "0.8", default-features = false, features = ["std"] } -bitflags = { version = "2", default-features = false, features = ["serde", "std"] } -bstr = { version = "1" } -bytemuck = { version = "1", default-features = false, features = ["aarch64_simd", "derive", "extern_crate_alloc"] } -byteorder = { version = "1" } -bytes = { version = "1" } -chrono = { version = "0.4", features = ["serde"] } -clap = { version = "4", features = ["cargo", "derive", "string", "wrap_help"] } -clap_builder = { version = "4", default-features = false, features = ["cargo", "color", "std", "string", "suggestions", "usage", "wrap_help"] } -concurrent-queue = { version = "2" } -cranelift-codegen = { version = "0.116", default-features = false, features = ["host-arch", "incremental-cache", "std", "timing", "unwind"] } -crossbeam-channel = { version = "0.5" } -crossbeam-epoch = { version = "0.9" } -crossbeam-utils = { version = "0.8" } -deranged = { version = "0.4", default-features = false, features = ["powerfmt", "serde", "std"] } -digest = { version = "0.10", features = ["mac", "oid", "std"] } -either = { version = "1", features = ["serde", "use_std"] } -euclid = { version = "0.22" } -event-listener = { version = "5" } -event-listener-strategy = { version = "0.5" } -form_urlencoded = { version = "1" } -futures = { version = "0.3", features = ["io-compat"] } -futures-channel = { version = "0.3", features = ["sink"] } -futures-core = { version = "0.3" } -futures-executor = { version = "0.3" } -futures-io = { version = "0.3" } -futures-sink = { version = "0.3" } -futures-task = { version = "0.3", default-features = false, features = ["std"] } -futures-util = { version = "0.3", features = ["channel", "io-compat", "sink"] } -getrandom-6f8ce4dd05d13bba = { package = "getrandom", version = "0.2", default-features = false, features = ["std"] } -half = { version = "2", features = ["bytemuck", "num-traits", "rand_distr", "use-intrinsics"] } -handlebars = { version = "4", features = ["rust-embed"] } -hashbrown-3575ec1268b04181 = { package = "hashbrown", version = "0.15", features = ["serde"] } -hashbrown-582f2526e08bb6a0 = { package = "hashbrown", version = "0.14", features = ["raw"] } -hmac = { version = "0.12", default-features = false, features = ["reset"] } -hyper = { version = "0.14", features = ["client", "http1", "http2", "runtime", "server", "stream"] } -idna = { version = "1" } -indexmap = { version = "2", features = ["serde"] } -itertools-5ef9efb8ec2df382 = { package = "itertools", version = "0.12" } -lazy_static = { version = "1", default-features = false, features = ["spin_no_std"] } -libc = { version = "0.2", features = ["extra_traits"] } -libsqlite3-sys = { version = "0.30", features = ["bundled", "unlock_notify"] } -log = { version = "0.4", default-features = false, features = ["kv_unstable_serde"] } -lyon = { version = "1", default-features = false, features = ["extra"] } -lyon_path = { version = "1" } -md-5 = { version = "0.10" } -memchr = { version = "2" } -memmap2 = { version = "0.9", default-features = false, features = ["stable_deref_trait"] } -mime_guess = { version = "2" } -miniz_oxide = { version = "0.8", features = ["simd"] } -nom = { version = "7" } -num-bigint = { version = "0.4" } -num-integer = { version = "0.1", features = ["i128"] } -num-iter = { version = "0.1", default-features = false, features = ["i128", "std"] } -num-rational = { version = "0.4", features = ["num-bigint-std"] } -num-traits = { version = "0.2", features = ["i128", "libm"] } -once_cell = { version = "1" } -percent-encoding = { version = "2" } -phf = { version = "0.11", features = ["macros"] } -phf_shared = { version = "0.11" } -prost-274715c4dabd11b0 = { package = "prost", version = "0.9" } -prost-types = { version = "0.9" } -rand-c38e5c1d305a1b54 = { package = "rand", version = "0.8", features = ["small_rng"] } -rand_chacha = { version = "0.3", default-features = false, features = ["std"] } -rand_core = { version = "0.6", default-features = false, features = ["std"] } -rand_distr = { version = "0.5" } -regalloc2 = { version = "0.11", features = ["checker", "enable-serde"] } -regex = { version = "1" } -regex-automata = { version = "0.4" } -regex-syntax = { version = "0.8" } -rust_decimal = { version = "1", default-features = false, features = ["maths", "serde", "std"] } -rustc-hash = { version = "1" } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", features = ["fs", "net"] } -rustls = { version = "0.23", features = ["ring"] } -rustls-webpki = { version = "0.103", default-features = false, features = ["aws-lc-rs", "ring", "std"] } -sea-orm = { version = "1", features = ["runtime-tokio-rustls", "sqlx-postgres", "sqlx-sqlite"] } -sea-query-binder = { version = "0.7", default-features = false, features = ["postgres-array", "sqlx-postgres", "sqlx-sqlite", "with-bigdecimal", "with-chrono", "with-json", "with-rust_decimal", "with-time", "with-uuid"] } -semver = { version = "1", features = ["serde"] } -serde = { version = "1", features = ["alloc", "derive", "rc"] } -serde_core = { version = "1", default-features = false, features = ["alloc", "rc", "result", "std"] } -serde_json = { version = "1", features = ["alloc", "preserve_order", "raw_value", "unbounded_depth"] } -simd-adler32 = { version = "0.3" } -smallvec = { version = "1", default-features = false, features = ["const_new", "serde", "union"] } -spin = { version = "0.9" } -sqlx = { version = "0.8", features = ["bigdecimal", "chrono", "postgres", "runtime-tokio-rustls", "rust_decimal", "sqlite", "time", "uuid"] } -sqlx-postgres = { version = "0.8", default-features = false, features = ["any", "bigdecimal", "chrono", "json", "migrate", "offline", "rust_decimal", "time", "uuid"] } -sqlx-sqlite = { version = "0.8", default-features = false, features = ["any", "bundled", "chrono", "json", "migrate", "offline", "time", "uuid"] } -stable_deref_trait = { version = "1" } -strum = { version = "0.26", features = ["derive"] } -subtle = { version = "2" } -thiserror = { version = "2" } -time = { version = "0.3", features = ["local-offset", "macros", "serde-well-known"] } -tokio = { version = "1", features = ["full"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["tls12"] } -tokio-util = { version = "0.7", features = ["codec", "compat", "io"] } -toml_datetime = { version = "0.6", default-features = false, features = ["serde"] } -toml_edit = { version = "0.22", default-features = false, features = ["display", "parse", "serde"] } -tracing = { version = "0.1", features = ["log"] } -tracing-core = { version = "0.1" } -tungstenite = { version = "0.26", default-features = false, features = ["__rustls-tls", "handshake"] } -unicode-properties = { version = "0.1" } -url = { version = "2", features = ["serde"] } -uuid = { version = "1", features = ["serde", "v4", "v5", "v7"] } -wasmparser = { version = "0.221" } -wasmtime = { version = "29", default-features = false, features = ["async", "component-model", "cranelift", "demangle", "gc-drc", "incremental-cache", "parallel-compilation"] } -wasmtime-cranelift = { version = "29", default-features = false, features = ["component-model", "gc-drc", "incremental-cache"] } -wasmtime-environ = { version = "29", default-features = false, features = ["compile", "component-model", "demangle", "gc-drc"] } - -[build-dependencies] -ahash = { version = "0.8", features = ["serde"] } -aho-corasick = { version = "1" } -anstream = { version = "0.6" } -arrayvec = { version = "0.7", features = ["serde"] } -async-compression = { version = "0.4", default-features = false, features = ["deflate", "deflate64", "futures-io", "gzip"] } -async-std = { version = "1", features = ["attributes", "unstable"] } -async-tungstenite = { version = "0.29", features = ["tokio-rustls-manual-roots"] } -aws-config = { version = "1", features = ["behavior-version-latest"] } -aws-credential-types = { version = "1", default-features = false, features = ["hardcoded-credentials", "test-util"] } -aws-runtime = { version = "1", default-features = false, features = ["event-stream", "http-02x", "sigv4a"] } -aws-sigv4 = { version = "1", features = ["http0-compat", "sign-eventstream", "sigv4a"] } -aws-smithy-async = { version = "1", default-features = false, features = ["rt-tokio"] } -aws-smithy-http = { version = "0.62", default-features = false, features = ["event-stream"] } -aws-smithy-runtime = { version = "1", default-features = false, features = ["client", "default-https-client", "rt-tokio", "tls-rustls"] } -aws-smithy-runtime-api = { version = "1", features = ["client", "http-02x", "http-auth", "test-util"] } -aws-smithy-types = { version = "1", default-features = false, features = ["byte-stream-poll-next", "http-body-0-4-x", "http-body-1-x", "rt-tokio", "test-util"] } -base64 = { version = "0.22" } -base64ct = { version = "1", default-features = false, features = ["std"] } -bigdecimal = { version = "0.4", features = ["serde"] } -bit-set = { version = "0.8", default-features = false, features = ["std"] } -bit-vec = { version = "0.8", default-features = false, features = ["std"] } -bitflags = { version = "2", default-features = false, features = ["serde", "std"] } -bstr = { version = "1" } -bytemuck = { version = "1", default-features = false, features = ["aarch64_simd", "derive", "extern_crate_alloc"] } -byteorder = { version = "1" } -bytes = { version = "1" } -cc = { version = "1", default-features = false, features = ["parallel"] } -chrono = { version = "0.4", features = ["serde"] } -clap = { version = "4", features = ["cargo", "derive", "string", "wrap_help"] } -clap_builder = { version = "4", default-features = false, features = ["cargo", "color", "std", "string", "suggestions", "usage", "wrap_help"] } -concurrent-queue = { version = "2" } -cranelift-codegen = { version = "0.116", default-features = false, features = ["host-arch", "incremental-cache", "std", "timing", "unwind"] } -crossbeam-channel = { version = "0.5" } -crossbeam-epoch = { version = "0.9" } -crossbeam-utils = { version = "0.8" } -deranged = { version = "0.4", default-features = false, features = ["powerfmt", "serde", "std"] } -digest = { version = "0.10", features = ["mac", "oid", "std"] } -either = { version = "1", features = ["serde", "use_std"] } -euclid = { version = "0.22" } -event-listener = { version = "5" } -event-listener-strategy = { version = "0.5" } -form_urlencoded = { version = "1" } -futures = { version = "0.3", features = ["io-compat"] } -futures-channel = { version = "0.3", features = ["sink"] } -futures-core = { version = "0.3" } -futures-executor = { version = "0.3" } -futures-io = { version = "0.3" } -futures-sink = { version = "0.3" } -futures-task = { version = "0.3", default-features = false, features = ["std"] } -futures-util = { version = "0.3", features = ["channel", "io-compat", "sink"] } -getrandom-6f8ce4dd05d13bba = { package = "getrandom", version = "0.2", default-features = false, features = ["std"] } -half = { version = "2", features = ["bytemuck", "num-traits", "rand_distr", "use-intrinsics"] } -handlebars = { version = "4", features = ["rust-embed"] } -hashbrown-3575ec1268b04181 = { package = "hashbrown", version = "0.15", features = ["serde"] } -hashbrown-582f2526e08bb6a0 = { package = "hashbrown", version = "0.14", features = ["raw"] } -heck = { version = "0.4", features = ["unicode"] } -hmac = { version = "0.12", default-features = false, features = ["reset"] } -hyper = { version = "0.14", features = ["client", "http1", "http2", "runtime", "server", "stream"] } -idna = { version = "1" } -indexmap = { version = "2", features = ["serde"] } -itertools-594e8ee84c453af0 = { package = "itertools", version = "0.13" } -itertools-5ef9efb8ec2df382 = { package = "itertools", version = "0.12" } -lazy_static = { version = "1", default-features = false, features = ["spin_no_std"] } -libc = { version = "0.2", features = ["extra_traits"] } -libsqlite3-sys = { version = "0.30", features = ["bundled", "unlock_notify"] } -log = { version = "0.4", default-features = false, features = ["kv_unstable_serde"] } -lyon = { version = "1", default-features = false, features = ["extra"] } -lyon_path = { version = "1" } -md-5 = { version = "0.10" } -memchr = { version = "2" } -memmap2 = { version = "0.9", default-features = false, features = ["stable_deref_trait"] } -mime_guess = { version = "2" } -miniz_oxide = { version = "0.8", features = ["simd"] } -nom = { version = "7" } -num-bigint = { version = "0.4" } -num-integer = { version = "0.1", features = ["i128"] } -num-iter = { version = "0.1", default-features = false, features = ["i128", "std"] } -num-rational = { version = "0.4", features = ["num-bigint-std"] } -num-traits = { version = "0.2", features = ["i128", "libm"] } -once_cell = { version = "1" } -percent-encoding = { version = "2" } -phf = { version = "0.11", features = ["macros"] } -phf_shared = { version = "0.11" } -prettyplease = { version = "0.2", default-features = false, features = ["verbatim"] } -proc-macro2 = { version = "1" } -prost-274715c4dabd11b0 = { package = "prost", version = "0.9" } -prost-types = { version = "0.9" } -quote = { version = "1" } -rand-c38e5c1d305a1b54 = { package = "rand", version = "0.8", features = ["small_rng"] } -rand_chacha = { version = "0.3", default-features = false, features = ["std"] } -rand_core = { version = "0.6", default-features = false, features = ["std"] } -rand_distr = { version = "0.5" } -regalloc2 = { version = "0.11", features = ["checker", "enable-serde"] } -regex = { version = "1" } -regex-automata = { version = "0.4" } -regex-syntax = { version = "0.8" } -rust_decimal = { version = "1", default-features = false, features = ["maths", "serde", "std"] } -rustc-hash = { version = "1" } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", features = ["fs", "net"] } -rustls = { version = "0.23", features = ["ring"] } -rustls-webpki = { version = "0.103", default-features = false, features = ["aws-lc-rs", "ring", "std"] } -sea-orm = { version = "1", features = ["runtime-tokio-rustls", "sqlx-postgres", "sqlx-sqlite"] } -sea-query-binder = { version = "0.7", default-features = false, features = ["postgres-array", "sqlx-postgres", "sqlx-sqlite", "with-bigdecimal", "with-chrono", "with-json", "with-rust_decimal", "with-time", "with-uuid"] } -semver = { version = "1", features = ["serde"] } -serde = { version = "1", features = ["alloc", "derive", "rc"] } -serde_core = { version = "1", default-features = false, features = ["alloc", "rc", "result", "std"] } -serde_json = { version = "1", features = ["alloc", "preserve_order", "raw_value", "unbounded_depth"] } -simd-adler32 = { version = "0.3" } -smallvec = { version = "1", default-features = false, features = ["const_new", "serde", "union"] } -spin = { version = "0.9" } -sqlx = { version = "0.8", features = ["bigdecimal", "chrono", "postgres", "runtime-tokio-rustls", "rust_decimal", "sqlite", "time", "uuid"] } -sqlx-macros = { version = "0.8", features = ["_rt-tokio", "_tls-rustls-ring-webpki", "bigdecimal", "chrono", "derive", "json", "macros", "migrate", "postgres", "rust_decimal", "sqlite", "time", "uuid"] } -sqlx-macros-core = { version = "0.8", features = ["_rt-tokio", "_tls-rustls-ring-webpki", "bigdecimal", "chrono", "derive", "json", "macros", "migrate", "postgres", "rust_decimal", "sqlite", "time", "uuid"] } -sqlx-postgres = { version = "0.8", default-features = false, features = ["any", "bigdecimal", "chrono", "json", "migrate", "offline", "rust_decimal", "time", "uuid"] } -sqlx-sqlite = { version = "0.8", default-features = false, features = ["any", "bundled", "chrono", "json", "migrate", "offline", "time", "uuid"] } -stable_deref_trait = { version = "1" } -strum = { version = "0.26", features = ["derive"] } -subtle = { version = "2" } -syn-dff4ba8e3ae991db = { package = "syn", version = "1", features = ["extra-traits", "full"] } -syn-f595c2ba2a3f28df = { package = "syn", version = "2", features = ["extra-traits", "fold", "full", "visit", "visit-mut"] } -thiserror = { version = "2" } -time = { version = "0.3", features = ["local-offset", "macros", "serde-well-known"] } -time-macros = { version = "0.2", default-features = false, features = ["formatting", "parsing", "serde"] } -tokio = { version = "1", features = ["full"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["tls12"] } -tokio-util = { version = "0.7", features = ["codec", "compat", "io"] } -toml_datetime = { version = "0.6", default-features = false, features = ["serde"] } -toml_edit = { version = "0.22", default-features = false, features = ["display", "parse", "serde"] } -tracing = { version = "0.1", features = ["log"] } -tracing-core = { version = "0.1" } -tungstenite = { version = "0.26", default-features = false, features = ["__rustls-tls", "handshake"] } -unicode-properties = { version = "0.1" } -url = { version = "2", features = ["serde"] } -uuid = { version = "1", features = ["serde", "v4", "v5", "v7"] } -wasmparser = { version = "0.221" } -wasmtime = { version = "29", default-features = false, features = ["async", "component-model", "cranelift", "demangle", "gc-drc", "incremental-cache", "parallel-compilation"] } -wasmtime-cranelift = { version = "29", default-features = false, features = ["component-model", "gc-drc", "incremental-cache"] } -wasmtime-environ = { version = "29", default-features = false, features = ["compile", "component-model", "demangle", "gc-drc"] } - -[target.x86_64-apple-darwin.dependencies] -codespan-reporting = { version = "0.12" } -core-foundation = { version = "0.9" } -core-foundation-sys = { version = "0.8" } -flate2 = { version = "1" } -foldhash = { version = "0.1", default-features = false, features = ["std"] } -getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] } -gimli = { version = "0.31", default-features = false, features = ["read", "std", "write"] } -hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "native-tokio", "ring", "tls12"] } -livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } -naga = { version = "25", features = ["msl-out", "wgsl-in"] } -nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } -objc2 = { version = "0.6" } -objc2-core-foundation = { version = "0.3", default-features = false, features = ["CFArray", "CFCGTypes", "CFData", "CFDate", "CFDictionary", "CFRunLoop", "CFString", "CFURL", "objc2", "std"] } -objc2-foundation = { version = "0.3", default-features = false, features = ["NSArray", "NSAttributedString", "NSBundle", "NSCoder", "NSData", "NSDate", "NSDictionary", "NSEnumerator", "NSError", "NSGeometry", "NSNotification", "NSNull", "NSObjCRuntime", "NSObject", "NSProcessInfo", "NSRange", "NSRunLoop", "NSString", "NSURL", "NSUndoManager", "NSValue", "objc2-core-foundation", "std"] } -objc2-metal = { version = "0.3" } -object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } -prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } -ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "process"] } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["event", "pipe", "process", "termios", "time"] } -scopeguard = { version = "1" } -security-framework = { version = "3", features = ["OSX_10_14"] } -security-framework-sys = { version = "2", features = ["OSX_10_14"] } -sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } -tokio-socks = { version = "0.5", features = ["futures-io"] } -tokio-stream = { version = "0.1", features = ["fs"] } -tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } - -[target.x86_64-apple-darwin.build-dependencies] -codespan-reporting = { version = "0.12" } -core-foundation = { version = "0.9" } -core-foundation-sys = { version = "0.8" } -flate2 = { version = "1" } -foldhash = { version = "0.1", default-features = false, features = ["std"] } -getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] } -gimli = { version = "0.31", default-features = false, features = ["read", "std", "write"] } -hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "native-tokio", "ring", "tls12"] } -livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } -naga = { version = "25", features = ["msl-out", "wgsl-in"] } -nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } -objc2 = { version = "0.6" } -objc2-core-foundation = { version = "0.3", default-features = false, features = ["CFArray", "CFCGTypes", "CFData", "CFDate", "CFDictionary", "CFRunLoop", "CFString", "CFURL", "objc2", "std"] } -objc2-foundation = { version = "0.3", default-features = false, features = ["NSArray", "NSAttributedString", "NSBundle", "NSCoder", "NSData", "NSDate", "NSDictionary", "NSEnumerator", "NSError", "NSGeometry", "NSNotification", "NSNull", "NSObjCRuntime", "NSObject", "NSProcessInfo", "NSRange", "NSRunLoop", "NSString", "NSURL", "NSUndoManager", "NSValue", "objc2-core-foundation", "std"] } -objc2-metal = { version = "0.3" } -object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } -proc-macro2 = { version = "1", default-features = false, features = ["span-locations"] } -prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } -ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "process"] } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["event", "pipe", "process", "termios", "time"] } -scopeguard = { version = "1" } -security-framework = { version = "3", features = ["OSX_10_14"] } -security-framework-sys = { version = "2", features = ["OSX_10_14"] } -sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } -tokio-socks = { version = "0.5", features = ["futures-io"] } -tokio-stream = { version = "0.1", features = ["fs"] } -tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } - -[target.aarch64-apple-darwin.dependencies] -codespan-reporting = { version = "0.12" } -core-foundation = { version = "0.9" } -core-foundation-sys = { version = "0.8" } -flate2 = { version = "1" } -foldhash = { version = "0.1", default-features = false, features = ["std"] } -getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] } -gimli = { version = "0.31", default-features = false, features = ["read", "std", "write"] } -hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "native-tokio", "ring", "tls12"] } -livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } -naga = { version = "25", features = ["msl-out", "wgsl-in"] } -nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } -objc2 = { version = "0.6" } -objc2-core-foundation = { version = "0.3", default-features = false, features = ["CFArray", "CFCGTypes", "CFData", "CFDate", "CFDictionary", "CFRunLoop", "CFString", "CFURL", "objc2", "std"] } -objc2-foundation = { version = "0.3", default-features = false, features = ["NSArray", "NSAttributedString", "NSBundle", "NSCoder", "NSData", "NSDate", "NSDictionary", "NSEnumerator", "NSError", "NSGeometry", "NSNotification", "NSNull", "NSObjCRuntime", "NSObject", "NSProcessInfo", "NSRange", "NSRunLoop", "NSString", "NSURL", "NSUndoManager", "NSValue", "objc2-core-foundation", "std"] } -objc2-metal = { version = "0.3" } -object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } -prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } -ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "process"] } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["event", "pipe", "process", "termios", "time"] } -scopeguard = { version = "1" } -security-framework = { version = "3", features = ["OSX_10_14"] } -security-framework-sys = { version = "2", features = ["OSX_10_14"] } -sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } -tokio-socks = { version = "0.5", features = ["futures-io"] } -tokio-stream = { version = "0.1", features = ["fs"] } -tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } - -[target.aarch64-apple-darwin.build-dependencies] -codespan-reporting = { version = "0.12" } -core-foundation = { version = "0.9" } -core-foundation-sys = { version = "0.8" } -flate2 = { version = "1" } -foldhash = { version = "0.1", default-features = false, features = ["std"] } -getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] } -gimli = { version = "0.31", default-features = false, features = ["read", "std", "write"] } -hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "native-tokio", "ring", "tls12"] } -livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } -naga = { version = "25", features = ["msl-out", "wgsl-in"] } -nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } -objc2 = { version = "0.6" } -objc2-core-foundation = { version = "0.3", default-features = false, features = ["CFArray", "CFCGTypes", "CFData", "CFDate", "CFDictionary", "CFRunLoop", "CFString", "CFURL", "objc2", "std"] } -objc2-foundation = { version = "0.3", default-features = false, features = ["NSArray", "NSAttributedString", "NSBundle", "NSCoder", "NSData", "NSDate", "NSDictionary", "NSEnumerator", "NSError", "NSGeometry", "NSNotification", "NSNull", "NSObjCRuntime", "NSObject", "NSProcessInfo", "NSRange", "NSRunLoop", "NSString", "NSURL", "NSUndoManager", "NSValue", "objc2-core-foundation", "std"] } -objc2-metal = { version = "0.3" } -object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } -proc-macro2 = { version = "1", default-features = false, features = ["span-locations"] } -prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } -ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "process"] } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["event", "pipe", "process", "termios", "time"] } -scopeguard = { version = "1" } -security-framework = { version = "3", features = ["OSX_10_14"] } -security-framework-sys = { version = "2", features = ["OSX_10_14"] } -sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } -tokio-socks = { version = "0.5", features = ["futures-io"] } -tokio-stream = { version = "0.1", features = ["fs"] } -tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } - -[target.x86_64-unknown-linux-gnu.dependencies] -aes = { version = "0.8", default-features = false, features = ["zeroize"] } -ahash = { version = "0.8", default-features = false, features = ["compile-time-rng"] } -ashpd = { version = "0.11", default-features = false, features = ["async-std", "wayland"] } -bytemuck = { version = "1", default-features = false, features = ["min_const_generics"] } -cipher = { version = "0.4", default-features = false, features = ["block-padding", "rand_core", "zeroize"] } -codespan-reporting = { version = "0.12" } -crypto-common = { version = "0.1", default-features = false, features = ["rand_core", "std"] } -flate2 = { version = "1" } -flume = { version = "0.11" } -foldhash = { version = "0.1", default-features = false, features = ["std"] } -getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] } -getrandom-6f8ce4dd05d13bba = { package = "getrandom", version = "0.2", default-features = false, features = ["js", "rdrand"] } -gimli = { version = "0.31", default-features = false, features = ["read", "std", "write"] } -hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "native-tokio", "ring", "tls12"] } -inout = { version = "0.1", default-features = false, features = ["block-padding"] } -linux-raw-sys-274715c4dabd11b0 = { package = "linux-raw-sys", version = "0.9", default-features = false, features = ["elf", "errno", "general", "if_ether", "ioctl", "net", "netlink", "no_std", "prctl", "xdp"] } -linux-raw-sys-9fbad63c4bcf4a8f = { package = "linux-raw-sys", version = "0.4", default-features = false, features = ["elf", "errno", "general", "if_ether", "ioctl", "net", "netlink", "no_std", "prctl", "system", "xdp"] } -livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } -mio = { version = "1", features = ["net", "os-ext"] } -naga = { version = "25", features = ["spv-out", "wgsl-in"] } -nix-1f5adca70f036a62 = { package = "nix", version = "0.28", features = ["fs", "mman", "ptrace", "signal", "term", "user"] } -nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } -nix-fa1f6196edfd7249 = { package = "nix", version = "0.30", features = ["fs", "socket", "uio", "user"] } -num-bigint-dig = { version = "0.8", features = ["i128", "prime", "zeroize"] } -num-complex = { version = "0.4", features = ["bytemuck"] } -object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } -proc-macro2 = { version = "1", features = ["span-locations"] } -prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } -quote = { version = "1" } -rand-274715c4dabd11b0 = { package = "rand", version = "0.9" } -ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "shm", "system"] } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["event", "pipe", "process", "pty", "stdio", "termios", "time"] } -scopeguard = { version = "1" } -smallvec = { version = "1", default-features = false, features = ["write"] } -syn-f595c2ba2a3f28df = { package = "syn", version = "2", features = ["extra-traits", "fold", "full", "visit", "visit-mut"] } -sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } -tokio-socks = { version = "0.5", features = ["futures-io"] } -tokio-stream = { version = "0.1", features = ["fs"] } -tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } -wayland-backend = { version = "0.3", default-features = false, features = ["client_system", "dlopen"] } -wayland-sys = { version = "0.31", default-features = false, features = ["client", "dlopen"] } -zeroize = { version = "1", features = ["zeroize_derive"] } -zvariant = { version = "5", features = ["enumflags2", "gvariant", "url"] } - -[target.x86_64-unknown-linux-gnu.build-dependencies] -aes = { version = "0.8", default-features = false, features = ["zeroize"] } -ahash = { version = "0.8", default-features = false, features = ["compile-time-rng"] } -ashpd = { version = "0.11", default-features = false, features = ["async-std", "wayland"] } -bytemuck = { version = "1", default-features = false, features = ["min_const_generics"] } -cipher = { version = "0.4", default-features = false, features = ["block-padding", "rand_core", "zeroize"] } -codespan-reporting = { version = "0.12" } -crypto-common = { version = "0.1", default-features = false, features = ["rand_core", "std"] } -flate2 = { version = "1" } -flume = { version = "0.11" } -foldhash = { version = "0.1", default-features = false, features = ["std"] } -getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] } -getrandom-6f8ce4dd05d13bba = { package = "getrandom", version = "0.2", default-features = false, features = ["js", "rdrand"] } -gimli = { version = "0.31", default-features = false, features = ["read", "std", "write"] } -hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "native-tokio", "ring", "tls12"] } -inout = { version = "0.1", default-features = false, features = ["block-padding"] } -linux-raw-sys-274715c4dabd11b0 = { package = "linux-raw-sys", version = "0.9", default-features = false, features = ["elf", "errno", "general", "if_ether", "ioctl", "net", "netlink", "no_std", "prctl", "xdp"] } -linux-raw-sys-9fbad63c4bcf4a8f = { package = "linux-raw-sys", version = "0.4", default-features = false, features = ["elf", "errno", "general", "if_ether", "ioctl", "net", "netlink", "no_std", "prctl", "system", "xdp"] } -livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } -mio = { version = "1", features = ["net", "os-ext"] } -naga = { version = "25", features = ["spv-out", "wgsl-in"] } -nix-1f5adca70f036a62 = { package = "nix", version = "0.28", features = ["fs", "mman", "ptrace", "signal", "term", "user"] } -nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } -nix-fa1f6196edfd7249 = { package = "nix", version = "0.30", features = ["fs", "socket", "uio", "user"] } -num-bigint-dig = { version = "0.8", features = ["i128", "prime", "zeroize"] } -num-complex = { version = "0.4", features = ["bytemuck"] } -object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } -proc-macro2 = { version = "1", default-features = false, features = ["span-locations"] } -prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } -rand-274715c4dabd11b0 = { package = "rand", version = "0.9" } -ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "shm", "system"] } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["event", "pipe", "process", "pty", "stdio", "termios", "time"] } -scopeguard = { version = "1" } -smallvec = { version = "1", default-features = false, features = ["write"] } -sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } -tokio-socks = { version = "0.5", features = ["futures-io"] } -tokio-stream = { version = "0.1", features = ["fs"] } -tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } -wayland-backend = { version = "0.3", default-features = false, features = ["client_system", "dlopen"] } -wayland-sys = { version = "0.31", default-features = false, features = ["client", "dlopen"] } -zbus_macros = { version = "5", features = ["gvariant"] } -zeroize = { version = "1", features = ["zeroize_derive"] } -zvariant = { version = "5", features = ["enumflags2", "gvariant", "url"] } - -[target.aarch64-unknown-linux-gnu.dependencies] -aes = { version = "0.8", default-features = false, features = ["zeroize"] } -ahash = { version = "0.8", default-features = false, features = ["compile-time-rng"] } -ashpd = { version = "0.11", default-features = false, features = ["async-std", "wayland"] } -bytemuck = { version = "1", default-features = false, features = ["min_const_generics"] } -cipher = { version = "0.4", default-features = false, features = ["block-padding", "rand_core", "zeroize"] } -codespan-reporting = { version = "0.12" } -crypto-common = { version = "0.1", default-features = false, features = ["rand_core", "std"] } -flate2 = { version = "1" } -flume = { version = "0.11" } -foldhash = { version = "0.1", default-features = false, features = ["std"] } -getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] } -getrandom-6f8ce4dd05d13bba = { package = "getrandom", version = "0.2", default-features = false, features = ["js", "rdrand"] } -gimli = { version = "0.31", default-features = false, features = ["read", "std", "write"] } -hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "native-tokio", "ring", "tls12"] } -inout = { version = "0.1", default-features = false, features = ["block-padding"] } -linux-raw-sys-274715c4dabd11b0 = { package = "linux-raw-sys", version = "0.9", default-features = false, features = ["elf", "errno", "general", "if_ether", "ioctl", "net", "netlink", "no_std", "prctl", "xdp"] } -linux-raw-sys-9fbad63c4bcf4a8f = { package = "linux-raw-sys", version = "0.4", default-features = false, features = ["elf", "errno", "general", "if_ether", "ioctl", "net", "netlink", "no_std", "prctl", "system", "xdp"] } -livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } -mio = { version = "1", features = ["net", "os-ext"] } -naga = { version = "25", features = ["spv-out", "wgsl-in"] } -nix-1f5adca70f036a62 = { package = "nix", version = "0.28", features = ["fs", "mman", "ptrace", "signal", "term", "user"] } -nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } -nix-fa1f6196edfd7249 = { package = "nix", version = "0.30", features = ["fs", "socket", "uio", "user"] } -num-bigint-dig = { version = "0.8", features = ["i128", "prime", "zeroize"] } -num-complex = { version = "0.4", features = ["bytemuck"] } -object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } -proc-macro2 = { version = "1", features = ["span-locations"] } -prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } -quote = { version = "1" } -rand-274715c4dabd11b0 = { package = "rand", version = "0.9" } -ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "shm", "system"] } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["event", "pipe", "process", "pty", "stdio", "termios", "time"] } -scopeguard = { version = "1" } -smallvec = { version = "1", default-features = false, features = ["write"] } -syn-f595c2ba2a3f28df = { package = "syn", version = "2", features = ["extra-traits", "fold", "full", "visit", "visit-mut"] } -sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } -tokio-socks = { version = "0.5", features = ["futures-io"] } -tokio-stream = { version = "0.1", features = ["fs"] } -tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } -wayland-backend = { version = "0.3", default-features = false, features = ["client_system", "dlopen"] } -wayland-sys = { version = "0.31", default-features = false, features = ["client", "dlopen"] } -zeroize = { version = "1", features = ["zeroize_derive"] } -zvariant = { version = "5", features = ["enumflags2", "gvariant", "url"] } - -[target.aarch64-unknown-linux-gnu.build-dependencies] -aes = { version = "0.8", default-features = false, features = ["zeroize"] } -ahash = { version = "0.8", default-features = false, features = ["compile-time-rng"] } -ashpd = { version = "0.11", default-features = false, features = ["async-std", "wayland"] } -bytemuck = { version = "1", default-features = false, features = ["min_const_generics"] } -cipher = { version = "0.4", default-features = false, features = ["block-padding", "rand_core", "zeroize"] } -codespan-reporting = { version = "0.12" } -crypto-common = { version = "0.1", default-features = false, features = ["rand_core", "std"] } -flate2 = { version = "1" } -flume = { version = "0.11" } -foldhash = { version = "0.1", default-features = false, features = ["std"] } -getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] } -getrandom-6f8ce4dd05d13bba = { package = "getrandom", version = "0.2", default-features = false, features = ["js", "rdrand"] } -gimli = { version = "0.31", default-features = false, features = ["read", "std", "write"] } -hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "native-tokio", "ring", "tls12"] } -inout = { version = "0.1", default-features = false, features = ["block-padding"] } -linux-raw-sys-274715c4dabd11b0 = { package = "linux-raw-sys", version = "0.9", default-features = false, features = ["elf", "errno", "general", "if_ether", "ioctl", "net", "netlink", "no_std", "prctl", "xdp"] } -linux-raw-sys-9fbad63c4bcf4a8f = { package = "linux-raw-sys", version = "0.4", default-features = false, features = ["elf", "errno", "general", "if_ether", "ioctl", "net", "netlink", "no_std", "prctl", "system", "xdp"] } -livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } -mio = { version = "1", features = ["net", "os-ext"] } -naga = { version = "25", features = ["spv-out", "wgsl-in"] } -nix-1f5adca70f036a62 = { package = "nix", version = "0.28", features = ["fs", "mman", "ptrace", "signal", "term", "user"] } -nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } -nix-fa1f6196edfd7249 = { package = "nix", version = "0.30", features = ["fs", "socket", "uio", "user"] } -num-bigint-dig = { version = "0.8", features = ["i128", "prime", "zeroize"] } -num-complex = { version = "0.4", features = ["bytemuck"] } -object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } -proc-macro2 = { version = "1", default-features = false, features = ["span-locations"] } -prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } -rand-274715c4dabd11b0 = { package = "rand", version = "0.9" } -ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "shm", "system"] } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["event", "pipe", "process", "pty", "stdio", "termios", "time"] } -scopeguard = { version = "1" } -smallvec = { version = "1", default-features = false, features = ["write"] } -sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } -tokio-socks = { version = "0.5", features = ["futures-io"] } -tokio-stream = { version = "0.1", features = ["fs"] } -tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } -wayland-backend = { version = "0.3", default-features = false, features = ["client_system", "dlopen"] } -wayland-sys = { version = "0.31", default-features = false, features = ["client", "dlopen"] } -zbus_macros = { version = "5", features = ["gvariant"] } -zeroize = { version = "1", features = ["zeroize_derive"] } -zvariant = { version = "5", features = ["enumflags2", "gvariant", "url"] } - -[target.x86_64-pc-windows-msvc.dependencies] -flate2 = { version = "1" } -flume = { version = "0.11" } -foldhash = { version = "0.1", default-features = false, features = ["std"] } -getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] } -getrandom-6f8ce4dd05d13bba = { package = "getrandom", version = "0.2", default-features = false, features = ["js", "rdrand"] } -hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "native-tokio", "ring", "tls12"] } -livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } -prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } -ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "fs", "net"] } -scopeguard = { version = "1" } -sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } -tokio-socks = { version = "0.5", features = ["futures-io"] } -tokio-stream = { version = "0.1", features = ["fs"] } -tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } -winapi = { version = "0.3", default-features = false, features = ["cfg", "commapi", "consoleapi", "evntrace", "fileapi", "handleapi", "impl-debug", "impl-default", "in6addr", "inaddr", "ioapiset", "knownfolders", "minwinbase", "minwindef", "namedpipeapi", "ntsecapi", "objbase", "processenv", "processthreadsapi", "shlobj", "std", "synchapi", "sysinfoapi", "timezoneapi", "winbase", "windef", "winerror", "winioctl", "winnt", "winreg", "winsock2", "winuser"] } -windows-core = { version = "0.61" } -windows-numerics = { version = "0.2" } -windows-sys-73dcd821b1037cfd = { package = "windows-sys", version = "0.59", features = ["Wdk_Foundation", "Wdk_Storage_FileSystem", "Win32_NetworkManagement_IpHelper", "Win32_Networking_WinSock", "Win32_Security_Authentication_Identity", "Win32_Security_Credentials", "Win32_Security_Cryptography", "Win32_Storage_FileSystem", "Win32_System_Com", "Win32_System_Console", "Win32_System_Diagnostics_Debug", "Win32_System_IO", "Win32_System_Ioctl", "Win32_System_Kernel", "Win32_System_LibraryLoader", "Win32_System_Memory", "Win32_System_Performance", "Win32_System_Pipes", "Win32_System_Registry", "Win32_System_SystemInformation", "Win32_System_SystemServices", "Win32_System_Threading", "Win32_System_Time", "Win32_System_WindowsProgramming", "Win32_UI_Input_KeyboardAndMouse", "Win32_UI_Shell", "Win32_UI_WindowsAndMessaging"] } -windows-sys-b21d60becc0929df = { package = "windows-sys", version = "0.52", features = ["Wdk_Foundation", "Wdk_Storage_FileSystem", "Wdk_System_IO", "Win32_Foundation", "Win32_Networking_WinSock", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Console", "Win32_System_IO", "Win32_System_Pipes", "Win32_System_SystemServices", "Win32_System_Threading", "Win32_System_WindowsProgramming"] } -windows-sys-c8eced492e86ede7 = { package = "windows-sys", version = "0.48", features = ["Win32_Foundation", "Win32_Globalization", "Win32_Networking_WinSock", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Com", "Win32_System_Diagnostics_Debug", "Win32_System_IO", "Win32_System_Pipes", "Win32_System_Registry", "Win32_System_Threading", "Win32_System_Time", "Win32_System_WindowsProgramming", "Win32_UI_Shell"] } -windows-sys-d4189bed749088b6 = { package = "windows-sys", version = "0.61", features = ["Wdk_Foundation", "Wdk_Storage_FileSystem", "Win32_Globalization", "Win32_Networking_WinSock", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Com", "Win32_System_IO", "Win32_System_LibraryLoader", "Win32_System_Threading", "Win32_System_WindowsProgramming", "Win32_UI_Shell"] } - -[target.x86_64-pc-windows-msvc.build-dependencies] -flate2 = { version = "1" } -flume = { version = "0.11" } -foldhash = { version = "0.1", default-features = false, features = ["std"] } -getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] } -getrandom-6f8ce4dd05d13bba = { package = "getrandom", version = "0.2", default-features = false, features = ["js", "rdrand"] } -hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "native-tokio", "ring", "tls12"] } -livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } -proc-macro2 = { version = "1", default-features = false, features = ["span-locations"] } -prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } -ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "fs", "net"] } -scopeguard = { version = "1" } -sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } -tokio-socks = { version = "0.5", features = ["futures-io"] } -tokio-stream = { version = "0.1", features = ["fs"] } -tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } -winapi = { version = "0.3", default-features = false, features = ["cfg", "commapi", "consoleapi", "evntrace", "fileapi", "handleapi", "impl-debug", "impl-default", "in6addr", "inaddr", "ioapiset", "knownfolders", "minwinbase", "minwindef", "namedpipeapi", "ntsecapi", "objbase", "processenv", "processthreadsapi", "shlobj", "std", "synchapi", "sysinfoapi", "timezoneapi", "winbase", "windef", "winerror", "winioctl", "winnt", "winreg", "winsock2", "winuser"] } -windows-core = { version = "0.61" } -windows-numerics = { version = "0.2" } -windows-sys-73dcd821b1037cfd = { package = "windows-sys", version = "0.59", features = ["Wdk_Foundation", "Wdk_Storage_FileSystem", "Win32_NetworkManagement_IpHelper", "Win32_Networking_WinSock", "Win32_Security_Authentication_Identity", "Win32_Security_Credentials", "Win32_Security_Cryptography", "Win32_Storage_FileSystem", "Win32_System_Com", "Win32_System_Console", "Win32_System_Diagnostics_Debug", "Win32_System_IO", "Win32_System_Ioctl", "Win32_System_Kernel", "Win32_System_LibraryLoader", "Win32_System_Memory", "Win32_System_Performance", "Win32_System_Pipes", "Win32_System_Registry", "Win32_System_SystemInformation", "Win32_System_SystemServices", "Win32_System_Threading", "Win32_System_Time", "Win32_System_WindowsProgramming", "Win32_UI_Input_KeyboardAndMouse", "Win32_UI_Shell", "Win32_UI_WindowsAndMessaging"] } -windows-sys-b21d60becc0929df = { package = "windows-sys", version = "0.52", features = ["Wdk_Foundation", "Wdk_Storage_FileSystem", "Wdk_System_IO", "Win32_Foundation", "Win32_Networking_WinSock", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Console", "Win32_System_IO", "Win32_System_Pipes", "Win32_System_SystemServices", "Win32_System_Threading", "Win32_System_WindowsProgramming"] } -windows-sys-c8eced492e86ede7 = { package = "windows-sys", version = "0.48", features = ["Win32_Foundation", "Win32_Globalization", "Win32_Networking_WinSock", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Com", "Win32_System_Diagnostics_Debug", "Win32_System_IO", "Win32_System_Pipes", "Win32_System_Registry", "Win32_System_Threading", "Win32_System_Time", "Win32_System_WindowsProgramming", "Win32_UI_Shell"] } -windows-sys-d4189bed749088b6 = { package = "windows-sys", version = "0.61", features = ["Wdk_Foundation", "Wdk_Storage_FileSystem", "Win32_Globalization", "Win32_Networking_WinSock", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Com", "Win32_System_IO", "Win32_System_LibraryLoader", "Win32_System_Threading", "Win32_System_WindowsProgramming", "Win32_UI_Shell"] } - -[target.x86_64-unknown-linux-musl.dependencies] -aes = { version = "0.8", default-features = false, features = ["zeroize"] } -ahash = { version = "0.8", default-features = false, features = ["compile-time-rng"] } -ashpd = { version = "0.11", default-features = false, features = ["async-std", "wayland"] } -bytemuck = { version = "1", default-features = false, features = ["min_const_generics"] } -cipher = { version = "0.4", default-features = false, features = ["block-padding", "rand_core", "zeroize"] } -codespan-reporting = { version = "0.12" } -crypto-common = { version = "0.1", default-features = false, features = ["rand_core", "std"] } -flate2 = { version = "1" } -flume = { version = "0.11" } -foldhash = { version = "0.1", default-features = false, features = ["std"] } -getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] } -getrandom-6f8ce4dd05d13bba = { package = "getrandom", version = "0.2", default-features = false, features = ["js", "rdrand"] } -gimli = { version = "0.31", default-features = false, features = ["read", "std", "write"] } -hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "native-tokio", "ring", "tls12"] } -inout = { version = "0.1", default-features = false, features = ["block-padding"] } -linux-raw-sys-274715c4dabd11b0 = { package = "linux-raw-sys", version = "0.9", default-features = false, features = ["elf", "errno", "general", "if_ether", "ioctl", "net", "netlink", "no_std", "prctl", "xdp"] } -linux-raw-sys-9fbad63c4bcf4a8f = { package = "linux-raw-sys", version = "0.4", default-features = false, features = ["elf", "errno", "general", "if_ether", "ioctl", "net", "netlink", "no_std", "prctl", "system", "xdp"] } -livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } -mio = { version = "1", features = ["net", "os-ext"] } -naga = { version = "25", features = ["spv-out", "wgsl-in"] } -nix-1f5adca70f036a62 = { package = "nix", version = "0.28", features = ["fs", "mman", "ptrace", "signal", "term", "user"] } -nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } -nix-fa1f6196edfd7249 = { package = "nix", version = "0.30", features = ["fs", "socket", "uio", "user"] } -num-bigint-dig = { version = "0.8", features = ["i128", "prime", "zeroize"] } -num-complex = { version = "0.4", features = ["bytemuck"] } -object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } -proc-macro2 = { version = "1", features = ["span-locations"] } -prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } -quote = { version = "1" } -rand-274715c4dabd11b0 = { package = "rand", version = "0.9" } -ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "shm", "system"] } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["event", "pipe", "process", "pty", "stdio", "termios", "time"] } -scopeguard = { version = "1" } -smallvec = { version = "1", default-features = false, features = ["write"] } -syn-f595c2ba2a3f28df = { package = "syn", version = "2", features = ["extra-traits", "fold", "full", "visit", "visit-mut"] } -sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } -tokio-socks = { version = "0.5", features = ["futures-io"] } -tokio-stream = { version = "0.1", features = ["fs"] } -tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } -wayland-backend = { version = "0.3", default-features = false, features = ["client_system", "dlopen"] } -wayland-sys = { version = "0.31", default-features = false, features = ["client", "dlopen"] } -zeroize = { version = "1", features = ["zeroize_derive"] } -zvariant = { version = "5", features = ["enumflags2", "gvariant", "url"] } - -[target.x86_64-unknown-linux-musl.build-dependencies] -aes = { version = "0.8", default-features = false, features = ["zeroize"] } -ahash = { version = "0.8", default-features = false, features = ["compile-time-rng"] } -ashpd = { version = "0.11", default-features = false, features = ["async-std", "wayland"] } -bytemuck = { version = "1", default-features = false, features = ["min_const_generics"] } -cipher = { version = "0.4", default-features = false, features = ["block-padding", "rand_core", "zeroize"] } -codespan-reporting = { version = "0.12" } -crypto-common = { version = "0.1", default-features = false, features = ["rand_core", "std"] } -flate2 = { version = "1" } -flume = { version = "0.11" } -foldhash = { version = "0.1", default-features = false, features = ["std"] } -getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] } -getrandom-6f8ce4dd05d13bba = { package = "getrandom", version = "0.2", default-features = false, features = ["js", "rdrand"] } -gimli = { version = "0.31", default-features = false, features = ["read", "std", "write"] } -hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "native-tokio", "ring", "tls12"] } -inout = { version = "0.1", default-features = false, features = ["block-padding"] } -linux-raw-sys-274715c4dabd11b0 = { package = "linux-raw-sys", version = "0.9", default-features = false, features = ["elf", "errno", "general", "if_ether", "ioctl", "net", "netlink", "no_std", "prctl", "xdp"] } -linux-raw-sys-9fbad63c4bcf4a8f = { package = "linux-raw-sys", version = "0.4", default-features = false, features = ["elf", "errno", "general", "if_ether", "ioctl", "net", "netlink", "no_std", "prctl", "system", "xdp"] } -livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } -mio = { version = "1", features = ["net", "os-ext"] } -naga = { version = "25", features = ["spv-out", "wgsl-in"] } -nix-1f5adca70f036a62 = { package = "nix", version = "0.28", features = ["fs", "mman", "ptrace", "signal", "term", "user"] } -nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } -nix-fa1f6196edfd7249 = { package = "nix", version = "0.30", features = ["fs", "socket", "uio", "user"] } -num-bigint-dig = { version = "0.8", features = ["i128", "prime", "zeroize"] } -num-complex = { version = "0.4", features = ["bytemuck"] } -object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } -proc-macro2 = { version = "1", default-features = false, features = ["span-locations"] } -prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } -rand-274715c4dabd11b0 = { package = "rand", version = "0.9" } -ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "shm", "system"] } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["event", "pipe", "process", "pty", "stdio", "termios", "time"] } -scopeguard = { version = "1" } -smallvec = { version = "1", default-features = false, features = ["write"] } -sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } -tokio-socks = { version = "0.5", features = ["futures-io"] } -tokio-stream = { version = "0.1", features = ["fs"] } -tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } -wayland-backend = { version = "0.3", default-features = false, features = ["client_system", "dlopen"] } -wayland-sys = { version = "0.31", default-features = false, features = ["client", "dlopen"] } -zbus_macros = { version = "5", features = ["gvariant"] } -zeroize = { version = "1", features = ["zeroize_derive"] } -zvariant = { version = "5", features = ["enumflags2", "gvariant", "url"] } - -### END HAKARI SECTION diff --git a/tooling/workspace-hack/LICENSE-GPL b/tooling/workspace-hack/LICENSE-GPL deleted file mode 120000 index 89e542f750cd3860a0598eff0dc34b56d7336dc4..0000000000000000000000000000000000000000 --- a/tooling/workspace-hack/LICENSE-GPL +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-GPL \ No newline at end of file diff --git a/tooling/workspace-hack/build.rs b/tooling/workspace-hack/build.rs deleted file mode 100644 index 92518ef04cb3b9a0f5463a0c387e4f9a7ad39f93..0000000000000000000000000000000000000000 --- a/tooling/workspace-hack/build.rs +++ /dev/null @@ -1,2 +0,0 @@ -// A build script is required for cargo to consider build dependencies. -fn main() {} diff --git a/tooling/workspace-hack/src/lib.rs b/tooling/workspace-hack/src/lib.rs deleted file mode 100644 index 22489f632bdc1d52c2de57686c95b5081fce706b..0000000000000000000000000000000000000000 --- a/tooling/workspace-hack/src/lib.rs +++ /dev/null @@ -1 +0,0 @@ -// This is a stub lib.rs. diff --git a/tooling/xtask/Cargo.toml b/tooling/xtask/Cargo.toml index 8f968e0ca6eb81b6bebaec5c17bfac2baf3d5c79..aa06e6164683edd3bb011136a127b9fb99215e52 100644 --- a/tooling/xtask/Cargo.toml +++ b/tooling/xtask/Cargo.toml @@ -16,4 +16,3 @@ clap = { workspace = true, features = ["derive"] } toml.workspace = true indoc.workspace = true toml_edit.workspace = true -workspace-hack.workspace = true diff --git a/tooling/xtask/src/tasks/package_conformity.rs b/tooling/xtask/src/tasks/package_conformity.rs index c8bed4bb35185430d7942d4e2a6b704b6fcddff3..e1fd15112fda6f1192cfc65a3d92b89b2e88777a 100644 --- a/tooling/xtask/src/tasks/package_conformity.rs +++ b/tooling/xtask/src/tasks/package_conformity.rs @@ -38,11 +38,6 @@ pub fn run_package_conformity(_args: PackageConformityArgs) -> Result<()> { continue; } - // Ignore `workspace-hack`, as it produces a lot of false positives. - if package.name == "workspace-hack" { - continue; - } - for dependencies in [ &cargo_toml.dependencies, &cargo_toml.dev_dependencies, diff --git a/tooling/xtask/src/tasks/publish_gpui.rs b/tooling/xtask/src/tasks/publish_gpui.rs index 1e0ea4ef8e0dfdc10c57982661895a0d6b36d2f0..2740f75a4840719021b11f9b3d005b16907026d2 100644 --- a/tooling/xtask/src/tasks/publish_gpui.rs +++ b/tooling/xtask/src/tasks/publish_gpui.rs @@ -7,13 +7,13 @@ use clap::Parser; #[derive(Parser)] pub struct PublishGpuiArgs { - /// Optional pre-release identifier to append to the version (e.g., alpha, test.1). Always bumps the minor version. - #[arg(long)] - pre_release: Option, - /// Perform a dry-run and wait for user confirmation before each publish #[arg(long)] dry_run: bool, + + /// Skip to a specific package (by package name or crate name) and start from there + #[arg(long)] + skip_to: Option, } pub fn run_publish_gpui(args: PublishGpuiArgs) -> Result<()> { @@ -24,12 +24,16 @@ pub fn run_publish_gpui(args: PublishGpuiArgs) -> Result<()> { let start_time = std::time::Instant::now(); check_workspace_root()?; - ensure_cargo_set_version()?; - check_git_clean()?; + + if args.skip_to.is_none() { + check_git_clean()?; + } else { + println!("Skipping git clean check due to --skip-to flag"); + } let version = read_gpui_version()?; println!("Updating GPUI to version: {}", version); - publish_dependencies(&version, args.dry_run)?; + publish_dependencies(&version, args.dry_run, args.skip_to.as_deref())?; publish_gpui(&version, args.dry_run)?; println!("GPUI published in {}s", start_time.elapsed().as_secs_f32()); Ok(()) @@ -52,62 +56,106 @@ fn read_gpui_version() -> Result { Ok(version.to_string()) } -fn publish_dependencies(new_version: &str, dry_run: bool) -> Result<()> { +fn publish_dependencies(new_version: &str, dry_run: bool, skip_to: Option<&str>) -> Result<()> { let gpui_dependencies = vec![ - ("zed-collections", "collections"), - ("zed-perf", "perf"), - ("zed-util-macros", "util_macros"), - ("zed-util", "util"), - ("gpui-macros", "gpui_macros"), - ("zed-http-client", "http_client"), - ("zed-derive-refineable", "derive_refineable"), - ("zed-refineable", "refineable"), - ("zed-semantic-version", "semantic_version"), - ("zed-sum-tree", "sum_tree"), - ("zed-media", "media"), + ("collections", "gpui_collections", "crates"), + ("perf", "gpui_perf", "tooling"), + ("util_macros", "gpui_util_macros", "crates"), + ("util", "gpui_util", "crates"), + ("gpui_macros", "gpui-macros", "crates"), + ("http_client", "gpui_http_client", "crates"), + ( + "derive_refineable", + "gpui_derive_refineable", + "crates/refineable", + ), + ("refineable", "gpui_refineable", "crates"), + ("semantic_version", "gpui_semantic_version", "crates"), + ("sum_tree", "gpui_sum_tree", "crates"), + ("media", "gpui_media", "crates"), ]; - for (crate_name, package_name) in gpui_dependencies { + let mut should_skip = skip_to.is_some(); + let skip_target = skip_to.unwrap_or(""); + + for (package_name, crate_name, package_dir) in gpui_dependencies { + if should_skip { + if package_name == skip_target || crate_name == skip_target { + println!("Found skip target: {} ({})", crate_name, package_name); + should_skip = false; + } else { + println!("Skipping: {} ({})", crate_name, package_name); + continue; + } + } + println!( "Publishing dependency: {} (package: {})", crate_name, package_name ); - update_crate_version(crate_name, new_version)?; - update_workspace_dependency_version(package_name, new_version)?; + update_crate_cargo_toml(package_name, crate_name, package_dir, new_version)?; + update_workspace_dependency_version(package_name, crate_name, new_version)?; publish_crate(crate_name, dry_run)?; + } - // println!("Waiting 60s for the rate limit..."); - // thread::sleep(Duration::from_secs(60)); + if should_skip { + bail!( + "Could not find package or crate named '{}' to skip to", + skip_target + ); } Ok(()) } fn publish_gpui(new_version: &str, dry_run: bool) -> Result<()> { - update_crate_version("gpui", new_version)?; + update_crate_cargo_toml("gpui", "gpui", "crates", new_version)?; publish_crate("gpui", dry_run)?; Ok(()) } -fn update_crate_version(package_name: &str, new_version: &str) -> Result<()> { - let output = run_command( - Command::new("cargo") - .arg("set-version") - .arg("--package") - .arg(package_name) - .arg(new_version), - )?; +fn update_crate_cargo_toml( + package_name: &str, + crate_name: &str, + package_dir: &str, + new_version: &str, +) -> Result<()> { + let cargo_toml_path = format!("{}/{}/Cargo.toml", package_dir, package_name); + let contents = std::fs::read_to_string(&cargo_toml_path) + .context(format!("Failed to read {}", cargo_toml_path))?; - if !output.status.success() { - bail!("Failed to set version for package {}", package_name); - } + let updated = update_crate_package_fields(&contents, crate_name, new_version)?; + + std::fs::write(&cargo_toml_path, updated) + .context(format!("Failed to write {}", cargo_toml_path))?; Ok(()) } +fn update_crate_package_fields( + toml_contents: &str, + crate_name: &str, + new_version: &str, +) -> Result { + let mut doc = toml_contents + .parse::() + .context("Failed to parse TOML")?; + + let package = doc + .get_mut("package") + .and_then(|p| p.as_table_like_mut()) + .context("Failed to find [package] section")?; + + package.insert("name", toml_edit::value(crate_name)); + package.insert("version", toml_edit::value(new_version)); + package.insert("publish", toml_edit::value(true)); + + Ok(doc.to_string()) +} + fn publish_crate(crate_name: &str, dry_run: bool) -> Result<()> { let publish_crate_impl = |crate_name, dry_run| { let cargo = std::env::var("CARGO").unwrap_or_else(|_| "cargo".to_string()); @@ -142,29 +190,34 @@ fn publish_crate(crate_name: &str, dry_run: bool) -> Result<()> { Ok(()) } -fn update_workspace_dependency_version(package_name: &str, new_version: &str) -> Result<()> { +fn update_workspace_dependency_version( + package_name: &str, + crate_name: &str, + new_version: &str, +) -> Result<()> { let workspace_cargo_toml_path = "Cargo.toml"; let contents = std::fs::read_to_string(workspace_cargo_toml_path) .context("Failed to read workspace Cargo.toml")?; - let updated = update_dependency_version_in_toml(&contents, package_name, new_version)?; + let mut doc = contents + .parse::() + .context("Failed to parse TOML")?; + + update_dependency_version_in_doc(&mut doc, package_name, crate_name, new_version)?; + update_profile_override_in_doc(&mut doc, package_name, crate_name)?; - std::fs::write(workspace_cargo_toml_path, updated) + std::fs::write(workspace_cargo_toml_path, doc.to_string()) .context("Failed to write workspace Cargo.toml")?; Ok(()) } -fn update_dependency_version_in_toml( - toml_contents: &str, +fn update_dependency_version_in_doc( + doc: &mut toml_edit::DocumentMut, package_name: &str, + crate_name: &str, new_version: &str, -) -> Result { - let mut doc = toml_contents - .parse::() - .context("Failed to parse TOML")?; - - // Navigate to workspace.dependencies. +) -> Result<()> { let dependency = doc .get_mut("workspace") .and_then(|w| w.get_mut("dependencies")) @@ -174,21 +227,35 @@ fn update_dependency_version_in_toml( package_name ))?; - // Update the version field if it exists if let Some(dep_table) = dependency.as_table_like_mut() { - if dep_table.contains_key("version") { - dep_table.insert("version", toml_edit::value(new_version)); - } else { - bail!( - "No version field found for {} in workspace dependencies", - package_name - ); - } + dep_table.insert("version", toml_edit::value(new_version)); + dep_table.insert("package", toml_edit::value(crate_name)); } else { bail!("{} is not a table in workspace dependencies", package_name); } - Ok(doc.to_string()) + Ok(()) +} + +fn update_profile_override_in_doc( + doc: &mut toml_edit::DocumentMut, + package_name: &str, + crate_name: &str, +) -> Result<()> { + if let Some(profile_dev_package) = doc + .get_mut("profile") + .and_then(|p| p.get_mut("dev")) + .and_then(|d| d.get_mut("package")) + .and_then(|p| p.as_table_like_mut()) + { + if let Some(old_entry) = profile_dev_package.get(package_name) { + let old_entry_clone = old_entry.clone(); + profile_dev_package.remove(package_name); + profile_dev_package.insert(crate_name, old_entry_clone); + } + } + + Ok(()) } fn check_workspace_root() -> Result<()> { @@ -215,27 +282,6 @@ fn check_workspace_root() -> Result<()> { Ok(()) } -fn ensure_cargo_set_version() -> Result<()> { - let output = run_command( - Command::new("which") - .arg("cargo-set-version") - .stdout(Stdio::piped()), - ) - .context("Failed to check for cargo-set-version")?; - - if !output.status.success() { - println!("cargo-set-version not found. Installing cargo-edit..."); - - let install_output = run_command(Command::new("cargo").arg("install").arg("cargo-edit"))?; - - if !install_output.status.success() { - bail!("Failed to install cargo-edit"); - } - } - - Ok(()) -} - fn check_git_clean() -> Result<()> { let output = run_command( Command::new("git") @@ -281,6 +327,10 @@ fn run_command(command: &mut Command) -> Result { .wait_with_output() .context("failed to wait for child process")?; + if !output.status.success() { + bail!("Command failed with status {}", output.status); + } + Ok(output) } @@ -298,12 +348,17 @@ mod tests { [workspace.dependencies] # here's a comment - collections = { path = "crates/collections", package = "zed-collections", version = "0.1.0" } + collections = { path = "crates/collections" } util = { path = "crates/util", package = "zed-util", version = "0.1.0" } "#}; - let result = update_dependency_version_in_toml(input, "collections", "0.2.0").unwrap(); + let mut doc = input.parse::().unwrap(); + + update_dependency_version_in_doc(&mut doc, "collections", "gpui_collections", "0.2.0") + .unwrap(); + + let result = doc.to_string(); let output = indoc! {r#" [workspace] @@ -311,11 +366,77 @@ mod tests { [workspace.dependencies] # here's a comment - collections = { path = "crates/collections", package = "zed-collections", version = "0.2.0" } + collections = { path = "crates/collections" , version = "0.2.0", package = "gpui_collections" } util = { path = "crates/util", package = "zed-util", version = "0.1.0" } "#}; assert_eq!(result, output); } + + #[test] + fn test_update_crate_package_fields() { + let input = indoc! {r#" + [package] + name = "collections" + version = "0.1.0" + edition = "2021" + publish = false + # some comment about the license + license = "GPL-3.0-or-later" + + [dependencies] + serde = "1.0" + "#}; + + let result = update_crate_package_fields(input, "gpui_collections", "0.2.0").unwrap(); + + let output = indoc! {r#" + [package] + name = "gpui_collections" + version = "0.2.0" + edition = "2021" + publish = true + # some comment about the license + license = "GPL-3.0-or-later" + + [dependencies] + serde = "1.0" + "#}; + + assert_eq!(result, output); + } + + #[test] + fn test_update_profile_override_in_toml() { + let input = indoc! {r#" + [profile.dev] + split-debuginfo = "unpacked" + + [profile.dev.package] + taffy = { opt-level = 3 } + collections = { codegen-units = 256 } + refineable = { codegen-units = 256 } + util = { codegen-units = 256 } + "#}; + + let mut doc = input.parse::().unwrap(); + + update_profile_override_in_doc(&mut doc, "collections", "gpui_collections").unwrap(); + + let result = doc.to_string(); + + let output = indoc! {r#" + [profile.dev] + split-debuginfo = "unpacked" + + [profile.dev.package] + taffy = { opt-level = 3 } + refineable = { codegen-units = 256 } + util = { codegen-units = 256 } + gpui_collections = { codegen-units = 256 } + "#}; + + assert_eq!(result, output); + } }