diff --git a/.github/workflows/assign-reviewers.yml b/.github/workflows/assign-reviewers.yml new file mode 100644 index 0000000000000000000000000000000000000000..a77f1812d06330b4635fe173583f0f1ce93e4e17 --- /dev/null +++ b/.github/workflows/assign-reviewers.yml @@ -0,0 +1,81 @@ +# Assign Reviewers — Smart team assignment based on diff weight +# +# Triggers on PR open and ready_for_review events. Checks out the coordinator +# repo (zed-industries/codeowner-coordinator) to access the assignment script and rules, +# then assigns the 1-2 most relevant teams as reviewers. +# +# NOTE: This file is stored in the codeowner-coordinator repo but must be deployed to +# the zed repo at .github/workflows/assign-reviewers.yml. See INSTALL.md. +# +# AUTH NOTE: Uses a GitHub App (COORDINATOR_APP_ID + COORDINATOR_APP_PRIVATE_KEY) +# for all API operations: cloning the private coordinator repo, requesting team +# reviewers, and setting PR assignees. GITHUB_TOKEN is not used. + +name: Assign Reviewers + +on: + pull_request: + types: [opened, ready_for_review] + +# GITHUB_TOKEN is not used — all operations use the GitHub App token. +# Declare minimal permissions so the default token has no write access. +permissions: {} + +# Only run for PRs from within the org (not forks) — fork PRs don't have +# write access to request team reviewers. +jobs: + assign-reviewers: + if: >- + github.event.pull_request.head.repo.full_name == github.repository && + github.event.pull_request.draft == false && + contains(fromJSON('["MEMBER", "OWNER"]'), github.event.pull_request.author_association) + runs-on: ubuntu-latest + steps: + - name: Generate app token + id: app-token + uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1 + with: + app-id: ${{ vars.COORDINATOR_APP_ID }} + private-key: ${{ secrets.COORDINATOR_APP_PRIVATE_KEY }} + repositories: codeowner-coordinator,zed + + - name: Checkout coordinator repo + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4.3.1 + with: + repository: zed-industries/codeowner-coordinator + ref: main + path: codeowner-coordinator + token: ${{ steps.app-token.outputs.token }} + persist-credentials: false + + - name: Setup Python + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + with: + python-version: "3.11" + + - name: Install dependencies + run: pip install pyyaml==6.0.3 + + - name: Assign reviewers + env: + GH_TOKEN: ${{ steps.app-token.outputs.token }} + PR_URL: ${{ github.event.pull_request.html_url }} + TARGET_REPO: ${{ github.repository }} + run: | + cd codeowner-coordinator + python .github/scripts/assign-reviewers.py \ + --pr "$PR_URL" \ + --apply \ + --rules-file team-membership-rules.yml \ + --repo "$TARGET_REPO" \ + --org zed-industries \ + --min-association member \ + 2>&1 | tee /tmp/assign-reviewers-output.txt + + - name: Upload output + if: always() + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: assign-reviewers-output + path: /tmp/assign-reviewers-output.txt + retention-days: 30 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 740b33dd55790bd3cabfc75146d71854eca6375d..e7e7629825b5f487a3b00af525d36458eb91956c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -26,6 +26,8 @@ If you're looking for concrete ideas: - [Triaged bugs with confirmed steps to reproduce](https://github.com/zed-industries/zed/issues?q=is%3Aissue%20state%3Aopen%20type%3ABug%20label%3Astate%3Areproducible). - [Area labels](https://github.com/zed-industries/zed/labels?q=area%3A*) to browse bugs in a specific part of the product you care about (after clicking on an area label, add type:Bug to the search). +If you're thinking about proposing or building a larger feature, read the [Zed Feature Process](./docs/src/development/feature-process.md) for how we think about feature design — what context to provide, what integration points to consider, and how to put together a strong proposal. + ## Sending changes The Zed culture values working code and synchronous conversations over long diff --git a/Cargo.lock b/Cargo.lock index f5fe136c8f62fb14b5ebd1e29b636e82a3193c38..964f5f764c48aaf535f5e2d9368b0816abf837d5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -38,7 +38,6 @@ dependencies = [ "sum_tree", "task", "telemetry", - "tempfile", "terminal", "text", "ui", @@ -47,7 +46,6 @@ dependencies = [ "util", "uuid", "watch", - "zlog", ] [[package]] @@ -81,7 +79,6 @@ dependencies = [ "fs", "futures 0.3.31", "gpui", - "indoc", "language", "log", "pretty_assertions", @@ -110,7 +107,6 @@ dependencies = [ "language", "project", "proto", - "release_channel", "smallvec", "ui", "util", @@ -216,11 +212,9 @@ dependencies = [ "task", "telemetry", "tempfile", - "terminal", "text", "theme", "thiserror 2.0.17", - "tree-sitter-rust", "ui", "unindent", "url", @@ -228,7 +222,6 @@ dependencies = [ "uuid", "watch", "web_search", - "worktree", "zed_env_vars", "zlog", "zstd", @@ -287,7 +280,6 @@ dependencies = [ "gpui_tokio", "http_client", "indoc", - "language", "language_model", "libc", "log", @@ -321,7 +313,6 @@ dependencies = [ "gpui", "language_model", "log", - "paths", "project", "regex", "schemars", @@ -354,7 +345,6 @@ dependencies = [ "buffer_diff", "chrono", "client", - "clock", "cloud_api_types", "cloud_llm_client", "collections", @@ -400,9 +390,7 @@ dependencies = [ "prompt_store", "proto", "rand 0.9.2", - "recent_projects", "release_channel", - "remote_connection", "reqwest_client", "rope", "rules_library", @@ -417,14 +405,12 @@ dependencies = [ "streaming_diff", "task", "telemetry", - "tempfile", "terminal", "terminal_view", "text", "theme", "time", "time_format", - "title_bar", "tree-sitter-md", "ui", "ui_input", @@ -673,17 +659,13 @@ dependencies = [ "anyhow", "chrono", "futures 0.3.31", - "gpui", - "gpui_tokio", "http_client", - "reqwest_client", "schemars", "serde", "serde_json", "settings", "strum 0.27.2", "thiserror 2.0.17", - "tokio", ] [[package]] @@ -733,7 +715,7 @@ checksum = "0ae92a5119aa49cdbcf6b9f893fe4e1d98b04ccbf82ee0584ad948a44a734dea" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -895,7 +877,6 @@ dependencies = [ "futures 0.3.31", "fuzzy", "gpui", - "indoc", "itertools 0.14.0", "language", "language_model", @@ -1130,7 +1111,7 @@ checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -1198,7 +1179,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -1228,7 +1209,7 @@ checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2067,7 +2048,7 @@ dependencies = [ "regex", "rustc-hash 2.1.1", "shlex", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2085,7 +2066,7 @@ dependencies = [ "regex", "rustc-hash 2.1.1", "shlex", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2220,7 +2201,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2249,7 +2230,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2322,7 +2303,6 @@ dependencies = [ "pretty_assertions", "rand 0.9.2", "rope", - "serde_json", "settings", "sum_tree", "text", @@ -2399,7 +2379,7 @@ checksum = "f9abbd1bc6865053c427f7198e6af43bfdedc55ab791faed4fbd361d789575ff" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2484,7 +2464,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -2506,7 +2486,6 @@ dependencies = [ "futures 0.3.31", "gpui", "gpui_tokio", - "http_client", "language", "livekit_client", "log", @@ -2738,7 +2717,7 @@ dependencies = [ "quote", "serde", "serde_json", - "syn 2.0.106", + "syn 2.0.117", "tempfile", "toml 0.8.23", ] @@ -2967,7 +2946,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -3101,8 +3080,6 @@ name = "cloud_llm_client" version = "0.1.0" dependencies = [ "anyhow", - "indoc", - "pretty_assertions", "serde", "serde_json", "strum 0.27.2", @@ -3227,6 +3204,7 @@ dependencies = [ "serde", "serde_json", "text", + "zeta_prompt", ] [[package]] @@ -3234,15 +3212,11 @@ name = "collab" version = "0.44.0" dependencies = [ "agent", - "agent-client-protocol", - "agent_settings", - "agent_ui", "anyhow", "assistant_slash_command", "assistant_text_thread", "async-trait", "async-tungstenite", - "audio", "aws-config", "aws-sdk-kinesis", "aws-sdk-s3", @@ -3258,10 +3232,8 @@ dependencies = [ "collab_ui", "collections", "command_palette_hooks", - "context_server", "ctor", "dap", - "dap-types", "dap_adapters", "dashmap", "debugger_ui", @@ -3278,7 +3250,6 @@ dependencies = [ "gpui_tokio", "hex", "http_client", - "hyper 0.14.32", "indoc", "language", "language_model", @@ -3320,7 +3291,6 @@ dependencies = [ "text", "theme", "time", - "title_bar", "tokio", "toml 0.8.23", "tower 0.4.13", @@ -3351,12 +3321,10 @@ dependencies = [ "futures 0.3.31", "fuzzy", "gpui", - "http_client", "log", "menu", "notifications", "picker", - "pretty_assertions", "project", "release_channel", "rpc", @@ -3369,7 +3337,6 @@ dependencies = [ "time", "time_format", "title_bar", - "tree-sitter-md", "ui", "util", "workspace", @@ -3423,10 +3390,8 @@ dependencies = [ "client", "collections", "command_palette_hooks", - "ctor", "db", "editor", - "env_logger 0.11.8", "fuzzy", "go_to_line", "gpui", @@ -3437,7 +3402,6 @@ dependencies = [ "postage", "project", "serde", - "serde_json", "settings", "telemetry", "theme", @@ -3645,24 +3609,29 @@ dependencies = [ "unicode-segmentation", ] +[[package]] +name = "convert_case" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "affbf0190ed2caf063e3def54ff444b449371d55c58e513a95ab98eca50adb49" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "copilot" version = "0.1.0" dependencies = [ "anyhow", "async-std", - "client", - "clock", "collections", "command_palette_hooks", "copilot_chat", - "ctor", "edit_prediction_types", "editor", "fs", "futures 0.3.31", "gpui", - "http_client", "icons", "indoc", "language", @@ -3689,6 +3658,7 @@ dependencies = [ name = "copilot_chat" version = "0.1.0" dependencies = [ + "anthropic", "anyhow", "collections", "dirs 4.0.0", @@ -4356,7 +4326,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" dependencies = [ "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -4433,7 +4403,7 @@ dependencies = [ "proc-macro2", "quote", "scratch", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -4447,7 +4417,7 @@ dependencies = [ "indexmap", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -4465,7 +4435,7 @@ dependencies = [ "indexmap", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -4499,8 +4469,6 @@ dependencies = [ "smol", "task", "telemetry", - "tree-sitter", - "tree-sitter-go", "util", "zlog", ] @@ -4562,7 +4530,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -4573,7 +4541,7 @@ checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ "darling_core", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -4805,7 +4773,7 @@ checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -4827,7 +4795,7 @@ dependencies = [ "proc-macro2", "quote", "rustc_version", - "syn 2.0.106", + "syn 2.0.117", "unicode-xid", ] @@ -4837,7 +4805,7 @@ version = "0.1.0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -4849,7 +4817,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -4871,7 +4839,6 @@ dependencies = [ "serde_json", "settings", "smol", - "theme", "ui", "util", "workspace", @@ -4883,7 +4850,6 @@ name = "diagnostics" version = "0.1.0" dependencies = [ "anyhow", - "client", "collections", "component", "ctor", @@ -5040,7 +5006,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -5103,7 +5069,7 @@ dependencies = [ "proc-macro2", "quote", "strum 0.27.2", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -5276,7 +5242,6 @@ dependencies = [ "thiserror 2.0.17", "time", "toml 0.8.23", - "tree-sitter-rust", "ui", "util", "uuid", @@ -5374,7 +5339,6 @@ dependencies = [ "tree-sitter", "util", "zeta_prompt", - "zlog", ] [[package]] @@ -5395,7 +5359,6 @@ dependencies = [ "anyhow", "buffer_diff", "client", - "clock", "cloud_llm_client", "codestral", "collections", @@ -5412,18 +5375,12 @@ dependencies = [ "gpui", "indoc", "language", - "language_model", - "lsp", "markdown", "menu", "multi_buffer", "paths", - "pretty_assertions", "project", "regex", - "release_channel", - "semver", - "serde_json", "settings", "telemetry", "text", @@ -5434,7 +5391,6 @@ dependencies = [ "workspace", "zed_actions", "zeta_prompt", - "zlog", ] [[package]] @@ -5463,7 +5419,6 @@ dependencies = [ "fuzzy", "git", "gpui", - "http_client", "indoc", "itertools 0.14.0", "language", @@ -5478,6 +5433,8 @@ dependencies = [ "parking_lot", "pretty_assertions", "project", + "proptest", + "proptest-derive", "rand 0.9.2", "regex", "release_channel", @@ -5494,7 +5451,6 @@ dependencies = [ "sum_tree", "task", "telemetry", - "tempfile", "text", "theme", "time", @@ -5654,7 +5610,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -5675,7 +5631,7 @@ checksum = "67c78a4d8fdf9953a5c9d458f9efe940fd97a0cab0941c075a813ac594733827" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -5740,7 +5696,7 @@ checksum = "44f23cf4b44bfce11a86ace86f8a73ffdec849c9fd00a386a53d278bd9e81fb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -5791,6 +5747,15 @@ dependencies = [ "libc", ] +[[package]] +name = "error-graph" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b920e777967421aa5f9bf34f842c0ab6ba19b3bdb4a082946093860f5858879" +dependencies = [ + "serde", +] + [[package]] name = "etagere" version = "0.2.15" @@ -5894,6 +5859,47 @@ dependencies = [ "watch", ] +[[package]] +name = "eval_cli" +version = "0.1.0" +dependencies = [ + "acp_thread", + "agent", + "agent-client-protocol", + "agent_ui", + "anyhow", + "clap", + "client", + "ctrlc", + "debug_adapter_extension", + "env_logger 0.11.8", + "extension", + "feature_flags", + "fs", + "futures 0.3.31", + "gpui", + "gpui_platform", + "gpui_tokio", + "language", + "language_extension", + "language_model", + "language_models", + "languages", + "node_runtime", + "paths", + "project", + "prompt_store", + "release_channel", + "reqwest_client", + "serde", + "serde_json", + "settings", + "shellexpand 2.1.2", + "terminal_view", + "util", + "watch", +] + [[package]] name = "eval_utils" version = "0.1.0" @@ -6022,7 +6028,9 @@ dependencies = [ "serde", "serde_json", "serde_json_lenient", + "settings_content", "snippet_provider", + "task", "theme", "tokio", "toml 0.8.23", @@ -6059,7 +6067,6 @@ dependencies = [ "parking_lot", "paths", "project", - "rand 0.9.2", "release_channel", "remote", "reqwest_client", @@ -6119,6 +6126,12 @@ dependencies = [ "zed_actions", ] +[[package]] +name = "failspot" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c942e64b20ecd39933d5ff938ca4fdb6ef0d298cc3855b231179a5ef0b24948d" + [[package]] name = "fallible-iterator" version = "0.3.0" @@ -6174,7 +6187,7 @@ checksum = "a0aca10fb742cb43f9e7bb8467c91aa9bcb8e3ffbc6a6f7389bb93ffc920577d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -6209,7 +6222,6 @@ dependencies = [ name = "feedback" version = "0.1.0" dependencies = [ - "editor", "gpui", "system_specs", "urlencoding", @@ -6240,7 +6252,6 @@ dependencies = [ "futures 0.3.31", "fuzzy", "gpui", - "language", "menu", "open_path_prompt", "picker", @@ -6473,7 +6484,7 @@ checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -6742,7 +6753,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -7153,7 +7164,7 @@ source = "git+https://github.com/zed-industries/gh-workflow?rev=c9eac0ed361583e1 dependencies = [ "heck 0.5.0", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -7226,7 +7237,6 @@ dependencies = [ "text", "thiserror 2.0.17", "time", - "unindent", "url", "urlencoding", "util", @@ -7263,7 +7273,6 @@ dependencies = [ "menu", "project", "rand 0.9.2", - "recent_projects", "serde_json", "settings", "smallvec", @@ -7314,7 +7323,6 @@ dependencies = [ "futures 0.3.31", "fuzzy", "git", - "git_hosting_providers", "gpui", "indoc", "itertools 0.14.0", @@ -7402,7 +7410,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -7483,8 +7491,6 @@ dependencies = [ "settings", "text", "theme", - "tree-sitter-rust", - "tree-sitter-typescript", "ui", "util", "workspace", @@ -7503,9 +7509,9 @@ dependencies = [ [[package]] name = "goblin" -version = "0.8.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b363a30c165f666402fe6a3024d3bec7ebc898f96a4a23bd1c99f8dbf3f4f47" +checksum = "daa0a64d21a7eb230583b4c5f4e23b7e4e57974f96620f42a7e75e08ae66d745" dependencies = [ "log", "plain", @@ -7615,8 +7621,8 @@ dependencies = [ "pin-project", "pollster 0.4.0", "postage", - "pretty_assertions", "profiling", + "proptest", "rand 0.9.2", "raw-window-handle", "refineable", @@ -7744,7 +7750,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -8172,7 +8178,7 @@ dependencies = [ "markup5ever 0.12.1", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -8630,7 +8636,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "525e9ff3e1a4be2fbea1fdf0e98686a6d98b4d8f937e1bf7402245af1909e8c3" dependencies = [ "byteorder-lite", - "quick-error", + "quick-error 2.0.1", ] [[package]] @@ -8706,7 +8712,7 @@ checksum = "c727f80bfa4a6c6e2508d2f05b6f4bfce242030bd88ed15ae5331c5b5d30fba7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -8801,7 +8807,7 @@ checksum = "c34819042dc3d3971c46c2190835914dfbe0c3c13f61449b2997f4e9722dfa60" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -8993,7 +8999,7 @@ checksum = "03343451ff899767262ec32146f6d559dd759fdadf42ff0e227c7c48f72594b4" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -9415,7 +9421,6 @@ dependencies = [ "copilot_ui", "credentials_provider", "deepseek", - "editor", "extension", "extension_host", "fs", @@ -9435,7 +9440,6 @@ dependencies = [ "open_router", "partial-json-fixer", "pretty_assertions", - "project", "release_channel", "schemars", "semver", @@ -9563,7 +9567,6 @@ dependencies = [ "snippet", "task", "terminal", - "text", "theme", "toml 0.8.23", "tree-sitter", @@ -9587,7 +9590,6 @@ dependencies = [ "unindent", "url", "util", - "workspace", ] [[package]] @@ -9634,9 +9636,9 @@ checksum = "7a79a3332a6609480d7d0c9eab957bca6b455b91bb84e66d19f5ff66294b85b8" [[package]] name = "libc" -version = "0.2.177" +version = "0.2.182" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976" +checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112" [[package]] name = "libdbus-sys" @@ -9941,7 +9943,6 @@ dependencies = [ "serde_json", "serde_urlencoded", "settings", - "sha2", "simplelog", "smallvec", "ui", @@ -10205,6 +10206,7 @@ dependencies = [ "language", "linkify", "log", + "markdown", "markup5ever_rcdom", "mermaid-rs-renderer", "pretty_assertions", @@ -10263,7 +10265,7 @@ checksum = "ac84fd3f360fcc43dc5f5d186f02a94192761a080e8bc58621ad4d12296a58cf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -10497,9 +10499,9 @@ dependencies = [ [[package]] name = "minidump-common" -version = "0.21.2" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c4d14bcca0fd3ed165a03000480aaa364c6860c34e900cb2dafdf3b95340e77" +checksum = "2e16d10087ae9e375bad7a40e8ef5504bc08e808ccc6019067ff9de42a84570f" dependencies = [ "bitflags 2.10.0", "debugid", @@ -10512,14 +10514,16 @@ dependencies = [ [[package]] name = "minidump-writer" -version = "0.8.9" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abcd9c8a1e6e1e9d56ce3627851f39a17ea83e17c96bc510f29d7e43d78a7d" +checksum = "0e1fc14d6ded915b8e850801465e7096f77ed60bf87e4e85878d463720d9dc4d" dependencies = [ "bitflags 2.10.0", "byteorder", "cfg-if", "crash-context", + "error-graph", + "failspot", "goblin", "libc", "log", @@ -10527,18 +10531,20 @@ dependencies = [ "memmap2", "memoffset", "minidump-common", - "nix 0.28.0", + "nix 0.29.0", "procfs-core", "scroll", + "serde", + "serde_json", "tempfile", - "thiserror 1.0.69", + "thiserror 2.0.17", ] [[package]] name = "minidumper" -version = "0.8.3" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b4ebc9d1f8847ec1d078f78b35ed598e0ebefa1f242d5f83cd8d7f03960a7d1" +checksum = "10d9254e42a48098d045472a5c0cb892007a42e25342eddbf2642f6978bf381a" dependencies = [ "cfg-if", "crash-context", @@ -10548,7 +10554,7 @@ dependencies = [ "parking_lot", "polling", "scroll", - "thiserror 1.0.69", + "thiserror 2.0.17", "uds", ] @@ -10681,7 +10687,6 @@ dependencies = [ "log", "parking_lot", "pretty_assertions", - "project", "rand 0.9.2", "rope", "serde", @@ -10732,7 +10737,7 @@ dependencies = [ [[package]] name = "naga" version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c" +source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4" dependencies = [ "arrayvec", "bit-set", @@ -10959,12 +10964,10 @@ dependencies = [ "anyhow", "channel", "client", - "collections", "component", "db", "gpui", "rpc", - "settings", "sum_tree", "time", "ui", @@ -11130,7 +11133,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -11213,7 +11216,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -11606,7 +11609,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -11635,7 +11638,7 @@ checksum = "969ccca8ffc4fb105bd131a228107d5c9dd89d9d627edf3295cbe979156f9712" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -11693,7 +11696,7 @@ dependencies = [ "proc-macro2", "proc-macro2-diagnostics", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -11715,8 +11718,6 @@ dependencies = [ "settings", "smol", "theme", - "tree-sitter-rust", - "tree-sitter-typescript", "ui", "util", "workspace", @@ -11803,7 +11804,7 @@ dependencies = [ "by_address", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -12066,7 +12067,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -12545,7 +12546,7 @@ dependencies = [ "phf_shared 0.11.3", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -12558,7 +12559,7 @@ dependencies = [ "phf_shared 0.12.1", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -12584,14 +12585,12 @@ name = "picker" version = "0.1.0" dependencies = [ "anyhow", - "ctor", "editor", - "env_logger 0.11.8", "gpui", "menu", "schemars", "serde", - "serde_json", + "settings", "theme", "ui", "ui_input", @@ -12622,7 +12621,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -12946,7 +12945,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" dependencies = [ "proc-macro2", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -13010,7 +13009,7 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -13030,19 +13029,20 @@ checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "version_check", "yansi", ] [[package]] name = "procfs-core" -version = "0.16.0" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d3554923a69f4ce04c4a754260c338f505ce22642d3830e049a399fc2059a29" +checksum = "239df02d8349b06fc07398a3a1697b06418223b1c7725085e801e7c0fc6a12ec" dependencies = [ "bitflags 2.10.0", "hex", + "serde", ] [[package]] @@ -13061,7 +13061,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52717f9a02b6965224f95ca2a81e2e0c5c43baacd28ca057577988930b6c3d5b" dependencies = [ "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -13080,8 +13080,6 @@ dependencies = [ "collections", "context_server", "dap", - "dap_adapters", - "db", "encoding_rs", "extension", "fancy-regex", @@ -13188,7 +13186,6 @@ dependencies = [ "pretty_assertions", "project", "rayon", - "remote_connection", "schemars", "search", "serde", @@ -13270,6 +13267,47 @@ dependencies = [ "uuid", ] +[[package]] +name = "proptest" +version = "1.10.0" +source = "git+https://github.com/proptest-rs/proptest?rev=3dca198a8fef1b32e3a66f1e1897c955b4dc5b5b#3dca198a8fef1b32e3a66f1e1897c955b4dc5b5b" +dependencies = [ + "bit-set", + "bit-vec", + "bitflags 2.10.0", + "num-traits", + "proptest-macro", + "rand 0.9.2", + "rand_chacha 0.9.0", + "rand_xorshift", + "regex-syntax", + "rusty-fork", + "tempfile", + "unarray", +] + +[[package]] +name = "proptest-derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c57924a81864dddafba92e1bf92f9bf82f97096c44489548a60e888e1547549b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "proptest-macro" +version = "0.5.0" +source = "git+https://github.com/proptest-rs/proptest?rev=3dca198a8fef1b32e3a66f1e1897c955b4dc5b5b#3dca198a8fef1b32e3a66f1e1897c955b4dc5b5b" +dependencies = [ + "convert_case 0.11.0", + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "prost" version = "0.9.0" @@ -13327,7 +13365,7 @@ dependencies = [ "prost 0.12.6", "prost-types 0.12.6", "regex", - "syn 2.0.106", + "syn 2.0.117", "tempfile", ] @@ -13354,7 +13392,7 @@ dependencies = [ "itertools 0.12.1", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -13381,11 +13419,9 @@ name = "proto" version = "0.1.0" dependencies = [ "anyhow", - "collections", "prost 0.9.0", "prost-build 0.9.0", "serde", - "typed-path", ] [[package]] @@ -13522,6 +13558,12 @@ dependencies = [ "bytemuck", ] +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + [[package]] name = "quick-error" version = "2.0.1" @@ -13747,6 +13789,15 @@ dependencies = [ "rand_core 0.6.4", ] +[[package]] +name = "rand_xorshift" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a" +dependencies = [ + "rand_core 0.9.3", +] + [[package]] name = "random_choice" version = "0.3.2" @@ -13821,7 +13872,7 @@ dependencies = [ "avif-serialize", "imgref", "loop9", - "quick-error", + "quick-error 2.0.1", "rav1e", "rayon", "rgb", @@ -13923,7 +13974,6 @@ dependencies = [ "anyhow", "askpass", "chrono", - "dap", "db", "dev_container", "editor", @@ -14021,7 +14071,7 @@ checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -14172,7 +14222,6 @@ dependencies = [ "collections", "crash-handler", "crashes", - "dap", "dap_adapters", "debug_adapter_extension", "editor", @@ -14204,7 +14253,6 @@ dependencies = [ "paths", "pretty_assertions", "project", - "prompt_store", "proto", "rayon", "release_channel", @@ -14228,7 +14276,6 @@ dependencies = [ "uuid", "watch", "windows 0.61.3", - "workspace", "worktree", "zlog", ] @@ -14262,7 +14309,6 @@ dependencies = [ "collections", "command_palette_hooks", "editor", - "env_logger 0.11.8", "feature_flags", "file_icons", "futures 0.3.31", @@ -14390,7 +14436,6 @@ dependencies = [ "anyhow", "bytes 1.11.1", "futures 0.3.31", - "gpui", "gpui_util", "http_client", "http_client_tls", @@ -14435,20 +14480,6 @@ dependencies = [ "bytemuck", ] -[[package]] -name = "rich_text" -version = "0.1.0" -dependencies = [ - "futures 0.3.31", - "gpui", - "language", - "linkify", - "pulldown-cmark 0.13.0", - "theme", - "ui", - "util", -] - [[package]] name = "ring" version = "0.17.14" @@ -14698,7 +14729,7 @@ dependencies = [ "proc-macro2", "quote", "rust-embed-utils", - "syn 2.0.106", + "syn 2.0.117", "walkdir", ] @@ -14952,6 +14983,18 @@ version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" +[[package]] +name = "rusty-fork" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc6bf79ff24e648f6da1f8d1f011e9cac26491b619e6b9280f2b47f1774e6ee2" +dependencies = [ + "fnv", + "quick-error 1.2.3", + "tempfile", + "wait-timeout", +] + [[package]] name = "rustybuzz" version = "0.20.1" @@ -15071,7 +15114,7 @@ dependencies = [ "proc-macro2", "quote", "serde_derive_internals", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -15132,7 +15175,7 @@ checksum = "1783eabc414609e28a5ba76aee5ddd52199f7107a0b24c2e9746a1ecc34a683d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -15161,7 +15204,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -15203,7 +15246,7 @@ dependencies = [ "proc-macro2", "quote", "sea-bae", - "syn 2.0.106", + "syn 2.0.117", "unicode-ident", ] @@ -15252,7 +15295,6 @@ dependencies = [ "any_vec", "anyhow", "bitflags 2.10.0", - "client", "collections", "editor", "fs", @@ -15388,7 +15430,7 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -15399,7 +15441,7 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -15457,7 +15499,7 @@ checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -15597,18 +15639,16 @@ version = "0.1.0" dependencies = [ "quote", "settings", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] name = "settings_profile_selector" version = "0.1.0" dependencies = [ - "client", "editor", "fuzzy", "gpui", - "language", "menu", "picker", "project", @@ -15627,9 +15667,7 @@ dependencies = [ "agent", "agent_settings", "anyhow", - "assets", "audio", - "client", "codestral", "component", "copilot", @@ -15647,13 +15685,11 @@ dependencies = [ "language", "log", "menu", - "node_runtime", "paths", "picker", "platform_title_bar", "pretty_assertions", "project", - "recent_projects", "regex", "release_channel", "rodio", @@ -15661,7 +15697,6 @@ dependencies = [ "search", "serde", "serde_json", - "session", "settings", "shell_command_parser", "strum 0.27.2", @@ -15672,7 +15707,6 @@ dependencies = [ "util", "workspace", "zed_actions", - "zlog", ] [[package]] @@ -15781,22 +15815,26 @@ name = "sidebar" version = "0.1.0" dependencies = [ "acp_thread", + "agent", + "agent-client-protocol", "agent_ui", + "assistant_text_thread", "chrono", "editor", "feature_flags", "fs", - "fuzzy", "gpui", - "picker", + "language_model", + "menu", "project", "recent_projects", + "serde_json", "settings", "theme", "ui", - "ui_input", "util", "workspace", + "zed_actions", ] [[package]] @@ -15968,7 +16006,7 @@ checksum = "0eb01866308440fc64d6c44d9e86c5cc17adfe33c4d6eed55da9145044d0ffc1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -16146,7 +16184,7 @@ version = "0.1.0" dependencies = [ "sqlez", "sqlformat", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -16223,7 +16261,7 @@ dependencies = [ "quote", "sqlx-core", "sqlx-macros-core", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -16246,7 +16284,7 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 2.0.106", + "syn 2.0.117", "tokio", "url", ] @@ -16405,7 +16443,7 @@ checksum = "172175341049678163e979d9107ca3508046d4d2a7c6682bee46ac541b17db69" dependencies = [ "proc-macro-error2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -16548,7 +16586,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -16564,6 +16602,7 @@ dependencies = [ "arrayvec", "ctor", "log", + "proptest", "rand 0.9.2", "rayon", "tracing", @@ -16849,9 +16888,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.106" +version = "2.0.117" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" dependencies = [ "proc-macro2", "quote", @@ -16890,7 +16929,7 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -17061,13 +17100,11 @@ dependencies = [ name = "tab_switcher" version = "0.1.0" dependencies = [ - "anyhow", "collections", "ctor", "editor", "fuzzy", "gpui", - "language", "menu", "picker", "project", @@ -17256,7 +17293,6 @@ dependencies = [ "release_channel", "schemars", "serde", - "serde_json", "settings", "smol", "sysinfo 0.37.2", @@ -17288,7 +17324,6 @@ dependencies = [ "assistant_slash_command", "async-recursion", "breadcrumbs", - "client", "collections", "db", "dirs 4.0.0", @@ -17301,7 +17336,6 @@ dependencies = [ "menu", "pretty_assertions", "project", - "rand 0.9.2", "regex", "schemars", "serde", @@ -17326,7 +17360,6 @@ dependencies = [ "collections", "ctor", "gpui", - "http_client", "log", "parking_lot", "postage", @@ -17440,7 +17473,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -17451,7 +17484,7 @@ checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -17472,7 +17505,7 @@ dependencies = [ "fax", "flate2", "half", - "quick-error", + "quick-error 2.0.1", "weezl", "zune-jpeg", ] @@ -17629,15 +17662,12 @@ dependencies = [ "chrono", "client", "cloud_api_types", - "collections", "db", "feature_flags", "git_ui", "gpui", - "http_client", "notifications", "platform_title_bar", - "pretty_assertions", "project", "recent_projects", "release_channel", @@ -17651,7 +17681,6 @@ dependencies = [ "story", "telemetry", "theme", - "tree-sitter-md", "ui", "util", "windows 0.61.3", @@ -17695,7 +17724,7 @@ checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -18033,7 +18062,7 @@ checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -18128,7 +18157,7 @@ checksum = "70977707304198400eb4835a78f6a9f928bf41bba420deb8fdb175cd965d77a7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -18481,12 +18510,6 @@ dependencies = [ "utf-8", ] -[[package]] -name = "typed-path" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c462d18470a2857aa657d338af5fa67170bb48bcc80a296710ce3b0802a32566" - [[package]] name = "typeid" version = "1.0.3" @@ -18585,7 +18608,7 @@ version = "0.1.0" dependencies = [ "component", "quote", - "syn 2.0.106", + "syn 2.0.117", "ui", ] @@ -18602,6 +18625,12 @@ dependencies = [ "workspace", ] +[[package]] +name = "unarray" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" + [[package]] name = "unicase" version = "2.8.1" @@ -18807,7 +18836,6 @@ dependencies = [ "git2", "globset", "gpui_util", - "indoc", "itertools 0.14.0", "libc", "log", @@ -18840,7 +18868,7 @@ version = "0.1.0" dependencies = [ "perf", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -18952,7 +18980,6 @@ name = "vim" version = "0.1.0" dependencies = [ "anyhow", - "assets", "async-compat", "async-trait", "collections", @@ -18992,7 +19019,6 @@ dependencies = [ "task", "text", "theme", - "title_bar", "tokio", "ui", "util", @@ -19058,6 +19084,15 @@ dependencies = [ "serde", ] +[[package]] +name = "wait-timeout" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" +dependencies = [ + "libc", +] + [[package]] name = "waker-fn" version = "1.2.0" @@ -19187,7 +19222,7 @@ dependencies = [ "bumpalo", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "wasm-bindgen-shared", ] @@ -19483,7 +19518,7 @@ dependencies = [ "anyhow", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "wasmtime-component-util", "wasmtime-wit-bindgen", "wit-parser 0.229.0", @@ -19598,7 +19633,7 @@ checksum = "d0963c1438357a3d8c0efe152b4ef5259846c1cf8b864340270744fe5b3bae5e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -19691,7 +19726,6 @@ dependencies = [ "futures 0.3.31", "gpui", "parking_lot", - "rand 0.9.2", "zlog", ] @@ -19923,7 +19957,7 @@ checksum = "a751b3277700db47d3e574514de2eced5e54dc8a5436a3bf7a0b248b2cee16f3" [[package]] name = "wgpu" version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c" +source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4" dependencies = [ "arrayvec", "bitflags 2.10.0", @@ -19952,7 +19986,7 @@ dependencies = [ [[package]] name = "wgpu-core" version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c" +source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4" dependencies = [ "arrayvec", "bit-set", @@ -19983,7 +20017,7 @@ dependencies = [ [[package]] name = "wgpu-core-deps-apple" version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c" +source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4" dependencies = [ "wgpu-hal", ] @@ -19991,7 +20025,7 @@ dependencies = [ [[package]] name = "wgpu-core-deps-emscripten" version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c" +source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4" dependencies = [ "wgpu-hal", ] @@ -19999,7 +20033,7 @@ dependencies = [ [[package]] name = "wgpu-core-deps-windows-linux-android" version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c" +source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4" dependencies = [ "wgpu-hal", ] @@ -20007,7 +20041,7 @@ dependencies = [ [[package]] name = "wgpu-hal" version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c" +source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4" dependencies = [ "android_system_properties", "arrayvec", @@ -20054,7 +20088,7 @@ dependencies = [ [[package]] name = "wgpu-types" version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c" +source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4" dependencies = [ "bitflags 2.10.0", "bytemuck", @@ -20136,7 +20170,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "witx", ] @@ -20148,7 +20182,7 @@ checksum = "d873bb5b59ca703b5e41562e96a4796d1af61bf4cf80bf8a7abda755a380ec1c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "wiggle-generate", ] @@ -20371,7 +20405,7 @@ checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -20382,7 +20416,7 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -20393,7 +20427,7 @@ checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -20404,7 +20438,7 @@ checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -20415,7 +20449,7 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -20426,7 +20460,7 @@ checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -21064,7 +21098,7 @@ dependencies = [ "heck 0.5.0", "indexmap", "prettyplease", - "syn 2.0.106", + "syn 2.0.117", "wasm-metadata 0.227.1", "wit-bindgen-core 0.41.0", "wit-component 0.227.1", @@ -21080,7 +21114,7 @@ dependencies = [ "heck 0.5.0", "indexmap", "prettyplease", - "syn 2.0.106", + "syn 2.0.117", "wasm-metadata 0.244.0", "wit-bindgen-core 0.51.0", "wit-component 0.244.0", @@ -21095,7 +21129,7 @@ dependencies = [ "anyhow", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "wit-bindgen-core 0.22.0", "wit-bindgen-rust 0.22.0", ] @@ -21110,7 +21144,7 @@ dependencies = [ "prettyplease", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "wit-bindgen-core 0.41.0", "wit-bindgen-rust 0.41.0", ] @@ -21125,7 +21159,7 @@ dependencies = [ "prettyplease", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "wit-bindgen-core 0.51.0", "wit-bindgen-rust 0.51.0", ] @@ -21283,7 +21317,6 @@ dependencies = [ "clock", "collections", "component", - "dap", "db", "feature_flags", "fs", @@ -21336,9 +21369,7 @@ dependencies = [ "futures 0.3.31", "fuzzy", "git", - "git2", "gpui", - "http_client", "ignore", "language", "log", @@ -21656,7 +21687,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "synstructure", ] @@ -21668,7 +21699,7 @@ checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "synstructure", ] @@ -21716,7 +21747,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "zbus_names", "zvariant", "zvariant_utils", @@ -21772,7 +21803,6 @@ dependencies = [ "copilot_ui", "crashes", "csv_preview", - "dap", "dap_adapters", "db", "debug_adapter_extension", @@ -21882,8 +21912,6 @@ dependencies = [ "title_bar", "toolchain_selector", "tracing", - "tree-sitter-md", - "tree-sitter-rust", "ui", "ui_prompt", "url", @@ -22114,7 +22142,7 @@ checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -22134,7 +22162,7 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "synstructure", ] @@ -22155,7 +22183,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -22212,7 +22240,7 @@ checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", ] [[package]] @@ -22374,7 +22402,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.117", "zvariant_utils", ] @@ -22387,6 +22415,6 @@ dependencies = [ "proc-macro2", "quote", "serde", - "syn 2.0.106", + "syn 2.0.117", "winnow", ] diff --git a/Cargo.toml b/Cargo.toml index b8e57bda7e46ea45451fedd6759268235c7d71ab..b6760fa917da7e051fd60a1375be49d516fcf113 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -66,6 +66,7 @@ members = [ "crates/encoding_selector", "crates/etw_tracing", "crates/eval", + "crates/eval_cli", "crates/eval_utils", "crates/explorer_command_injector", "crates/extension", @@ -158,7 +159,6 @@ members = [ "crates/remote_server", "crates/repl", "crates/reqwest_client", - "crates/rich_text", "crates/rope", "crates/rpc", "crates/rules_library", @@ -595,7 +595,7 @@ lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "a4f410 mach2 = "0.5" markup5ever_rcdom = "0.3.0" metal = "0.33" -minidumper = "0.8" +minidumper = "0.9" moka = { version = "0.12.10", features = ["sync"] } naga = { version = "28.0", features = ["wgsl-in"] } nanoid = "0.4" @@ -649,6 +649,9 @@ postage = { version = "0.5", features = ["futures-traits"] } pretty_assertions = { version = "1.3.0", features = ["unstable"] } proc-macro2 = "1.0.93" profiling = "1" +# replace this with main when #635 is merged +proptest = { git = "https://github.com/proptest-rs/proptest", rev = "3dca198a8fef1b32e3a66f1e1897c955b4dc5b5b", features = ["attr-macro"] } +proptest-derive = "0.8.0" prost = "0.9" prost-build = "0.9" prost-types = "0.9" @@ -779,7 +782,7 @@ wax = "0.7" which = "6.0.0" wasm-bindgen = "0.2.113" web-time = "1.1.0" -wgpu = { git = "https://github.com/zed-industries/wgpu", rev = "9459e95113c5bd116b2cc2c87e8424b28059e17c" } +wgpu = { git = "https://github.com/zed-industries/wgpu", rev = "465557eccfe77c840a9b4936f1408da9503372c4" } windows-core = "0.61" yawc = "0.2.5" zeroize = "1.8" diff --git a/assets/icons/list_collapse.svg b/assets/icons/list_collapse.svg index f18bc550b90228c2f689848b86cfc5bea3d6ff50..dbdb2aaa4537c25ba1867d4957c23819af425835 100644 --- a/assets/icons/list_collapse.svg +++ b/assets/icons/list_collapse.svg @@ -1 +1,7 @@ - + + + + + + + diff --git a/assets/icons/new_thread.svg b/assets/icons/new_thread.svg new file mode 100644 index 0000000000000000000000000000000000000000..19b8fa25ea30ed47a57a5d5f83d62f2b4b56b61e --- /dev/null +++ b/assets/icons/new_thread.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/open_folder.svg b/assets/icons/open_folder.svg new file mode 100644 index 0000000000000000000000000000000000000000..c4aa32b29cc1048fd4ecd8b1b4d32b68ae0a8ad3 --- /dev/null +++ b/assets/icons/open_folder.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 7e01245ec62b2590a1c88fef5946b7d06463968d..cb5cef24c50f9f9ac637f3ac70adb24d37e56d61 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -258,6 +258,7 @@ "ctrl-shift-j": "agent::ToggleNavigationMenu", "ctrl-alt-i": "agent::ToggleOptionsMenu", "ctrl-alt-shift-n": "agent::ToggleNewThreadMenu", + "ctrl-alt-shift-t": "agent::ToggleStartThreadInSelector", "shift-alt-escape": "agent::ExpandMessageEditor", "ctrl->": "agent::AddSelectionToThread", "ctrl-shift-e": "project_panel::ToggleFocus", @@ -673,6 +674,9 @@ "use_key_equivalents": true, "bindings": { "ctrl-n": "multi_workspace::NewWorkspaceInWindow", + "left": "agents_sidebar::CollapseSelectedEntry", + "right": "agents_sidebar::ExpandSelectedEntry", + "enter": "menu::Confirm", }, }, { @@ -815,7 +819,7 @@ }, }, { - "context": "!ContextEditor > Editor && mode == full", + "context": "!ContextEditor && !AcpThread > Editor && mode == full", "bindings": { "alt-enter": "editor::OpenExcerpts", "shift-enter": "editor::ExpandExcerpts", @@ -979,6 +983,7 @@ "ctrl-shift-enter": "git::Amend", "ctrl-space": "git::StageAll", "ctrl-shift-space": "git::UnstageAll", + "ctrl-k ctrl-r": "git::RestoreAndNext", }, }, { diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 43d6419575fc698110cd5a033c01127ac6543f9a..08fb63868be875f41f6c461354b46f1081a2026f 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -297,6 +297,7 @@ "cmd-shift-j": "agent::ToggleNavigationMenu", "cmd-alt-m": "agent::ToggleOptionsMenu", "cmd-alt-shift-n": "agent::ToggleNewThreadMenu", + "cmd-alt-shift-t": "agent::ToggleStartThreadInSelector", "shift-alt-escape": "agent::ExpandMessageEditor", "cmd->": "agent::AddSelectionToThread", "cmd-shift-e": "project_panel::ToggleFocus", @@ -741,6 +742,9 @@ "use_key_equivalents": true, "bindings": { "cmd-n": "multi_workspace::NewWorkspaceInWindow", + "left": "agents_sidebar::CollapseSelectedEntry", + "right": "agents_sidebar::ExpandSelectedEntry", + "enter": "menu::Confirm", }, }, { @@ -878,7 +882,7 @@ }, }, { - "context": "!ContextEditor > Editor && mode == full", + "context": "!ContextEditor && !AcpThread > Editor && mode == full", "use_key_equivalents": true, "bindings": { "alt-enter": "editor::OpenExcerpts", @@ -1030,6 +1034,7 @@ "cmd-shift-enter": "git::Amend", "cmd-ctrl-y": "git::StageAll", "cmd-ctrl-shift-y": "git::UnstageAll", + "cmd-alt-z": "git::RestoreAndNext", }, }, { diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 22541368cecfc6a645e2b8b7ce55a6711491a012..600025e2069978f3020afb5cb978d05a53317682 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -259,6 +259,7 @@ "shift-alt-j": "agent::ToggleNavigationMenu", "shift-alt-i": "agent::ToggleOptionsMenu", "ctrl-shift-alt-n": "agent::ToggleNewThreadMenu", + "ctrl-shift-alt-t": "agent::ToggleStartThreadInSelector", "shift-alt-escape": "agent::ExpandMessageEditor", "ctrl-shift-.": "agent::AddSelectionToThread", "ctrl-shift-e": "project_panel::ToggleFocus", @@ -677,6 +678,9 @@ "use_key_equivalents": true, "bindings": { "ctrl-n": "multi_workspace::NewWorkspaceInWindow", + "left": "agents_sidebar::CollapseSelectedEntry", + "right": "agents_sidebar::ExpandSelectedEntry", + "enter": "menu::Confirm", }, }, { @@ -817,7 +821,7 @@ }, }, { - "context": "!ContextEditor > Editor && mode == full", + "context": "!ContextEditor && !AcpThread > Editor && mode == full", "use_key_equivalents": true, "bindings": { "alt-enter": "editor::OpenExcerpts", @@ -980,6 +984,7 @@ "ctrl-shift-enter": "git::Amend", "ctrl-space": "git::StageAll", "ctrl-shift-space": "git::UnstageAll", + "ctrl-k ctrl-r": "git::RestoreAndNext", }, }, { diff --git a/assets/keymaps/linux/jetbrains.json b/assets/keymaps/linux/jetbrains.json index bdf3949b3f9203220978ff599e0187513d6a976f..98d5cf93106f35e488ab70a60468fa2239cb08c0 100644 --- a/assets/keymaps/linux/jetbrains.json +++ b/assets/keymaps/linux/jetbrains.json @@ -81,6 +81,13 @@ "ctrl-\\": "assistant::InlineAssist", }, }, + { + "context": "Editor && mode == auto_height", + "bindings": { + "shift-enter": "editor::Newline", + "ctrl-shift-enter": "editor::NewlineBelow", + }, + }, { "context": "BufferSearchBar", "bindings": { diff --git a/assets/keymaps/macos/jetbrains.json b/assets/keymaps/macos/jetbrains.json index c9106e4d49671f16917b1322824c2edfcd0e7700..8612e07c4719dfdbf67762c89505cc2da0cfa000 100644 --- a/assets/keymaps/macos/jetbrains.json +++ b/assets/keymaps/macos/jetbrains.json @@ -79,6 +79,13 @@ "cmd-\\": "assistant::InlineAssist", }, }, + { + "context": "Editor && mode == auto_height", + "bindings": { + "shift-enter": "editor::Newline", + "ctrl-shift-enter": "editor::NewlineBelow", + }, + }, { "context": "BufferSearchBar", "bindings": { diff --git a/crates/acp_thread/Cargo.toml b/crates/acp_thread/Cargo.toml index fe47db0aab9917274d6ee7f74bb3bb4bedb3fab3..1c87abbfa21e3dc4b868203a09040b4356c0d1f9 100644 --- a/crates/acp_thread/Cargo.toml +++ b/crates/acp_thread/Cargo.toml @@ -61,7 +61,5 @@ indoc.workspace = true parking_lot.workspace = true project = { workspace = true, "features" = ["test-support"] } rand.workspace = true -tempfile.workspace = true util.workspace = true settings.workspace = true -zlog.workspace = true diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index 1b9271918884dc020986577926d9578e3a6f049c..58252eaddca553eb1da4c960a829a88afb9eb497 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -952,8 +952,11 @@ struct RunningTurn { } pub struct AcpThread { + session_id: acp::SessionId, + cwd: Option, parent_session_id: Option, title: SharedString, + provisional_title: Option, entries: Vec, plan: Plan, project: Entity, @@ -962,7 +965,6 @@ pub struct AcpThread { turn_id: u32, running_turn: Option, connection: Rc, - session_id: acp::SessionId, token_usage: Option, prompt_capabilities: acp::PromptCapabilities, _observe_prompt_capabilities: Task>, @@ -1047,87 +1049,6 @@ pub enum TerminalProviderCommand { }, } -impl AcpThread { - pub fn on_terminal_provider_event( - &mut self, - event: TerminalProviderEvent, - cx: &mut Context, - ) { - match event { - TerminalProviderEvent::Created { - terminal_id, - label, - cwd, - output_byte_limit, - terminal, - } => { - let entity = self.register_terminal_created( - terminal_id.clone(), - label, - cwd, - output_byte_limit, - terminal, - cx, - ); - - if let Some(mut chunks) = self.pending_terminal_output.remove(&terminal_id) { - for data in chunks.drain(..) { - entity.update(cx, |term, cx| { - term.inner().update(cx, |inner, cx| { - inner.write_output(&data, cx); - }) - }); - } - } - - if let Some(_status) = self.pending_terminal_exit.remove(&terminal_id) { - entity.update(cx, |_term, cx| { - cx.notify(); - }); - } - - cx.notify(); - } - TerminalProviderEvent::Output { terminal_id, data } => { - if let Some(entity) = self.terminals.get(&terminal_id) { - entity.update(cx, |term, cx| { - term.inner().update(cx, |inner, cx| { - inner.write_output(&data, cx); - }) - }); - } else { - self.pending_terminal_output - .entry(terminal_id) - .or_default() - .push(data); - } - } - TerminalProviderEvent::TitleChanged { terminal_id, title } => { - if let Some(entity) = self.terminals.get(&terminal_id) { - entity.update(cx, |term, cx| { - term.inner().update(cx, |inner, cx| { - inner.breadcrumb_text = title; - cx.emit(::terminal::Event::BreadcrumbsChanged); - }) - }); - } - } - TerminalProviderEvent::Exit { - terminal_id, - status, - } => { - if let Some(entity) = self.terminals.get(&terminal_id) { - entity.update(cx, |_term, cx| { - cx.notify(); - }); - } else { - self.pending_terminal_exit.insert(terminal_id, status); - } - } - } - } -} - #[derive(PartialEq, Eq, Debug)] pub enum ThreadStatus { Idle, @@ -1174,6 +1095,7 @@ impl AcpThread { pub fn new( parent_session_id: Option, title: impl Into, + cwd: Option, connection: Rc, project: Entity, action_log: Entity, @@ -1194,11 +1116,13 @@ impl AcpThread { Self { parent_session_id, + cwd, action_log, shared_buffers: Default::default(), entries: Default::default(), plan: Default::default(), title: title.into(), + provisional_title: None, project, running_turn: None, turn_id: 0, @@ -1253,7 +1177,9 @@ impl AcpThread { } pub fn title(&self) -> SharedString { - self.title.clone() + self.provisional_title + .clone() + .unwrap_or_else(|| self.title.clone()) } pub fn entries(&self) -> &[AgentThreadEntry] { @@ -1264,6 +1190,10 @@ impl AcpThread { &self.session_id } + pub fn cwd(&self) -> Option<&PathBuf> { + self.cwd.as_ref() + } + pub fn status(&self) -> ThreadStatus { if self.running_turn.is_some() { ThreadStatus::Generating @@ -1505,16 +1435,29 @@ impl AcpThread { } pub fn set_title(&mut self, title: SharedString, cx: &mut Context) -> Task> { + let had_provisional = self.provisional_title.take().is_some(); if title != self.title { self.title = title.clone(); cx.emit(AcpThreadEvent::TitleUpdated); if let Some(set_title) = self.connection.set_title(&self.session_id, cx) { return set_title.run(title, cx); } + } else if had_provisional { + cx.emit(AcpThreadEvent::TitleUpdated); } Task::ready(Ok(())) } + /// Sets a provisional display title without propagating back to the + /// underlying agent connection. This is used for quick preview titles + /// (e.g. first 20 chars of the user message) that should be shown + /// immediately but replaced once the LLM generates a proper title via + /// `set_title`. + pub fn set_provisional_title(&mut self, title: SharedString, cx: &mut Context) { + self.provisional_title = Some(title); + cx.emit(AcpThreadEvent::TitleUpdated); + } + pub fn subagent_spawned(&mut self, session_id: acp::SessionId, cx: &mut Context) { cx.emit(AcpThreadEvent::SubagentSpawned(session_id)); } @@ -2607,6 +2550,85 @@ impl AcpThread { } } } + + pub fn on_terminal_provider_event( + &mut self, + event: TerminalProviderEvent, + cx: &mut Context, + ) { + match event { + TerminalProviderEvent::Created { + terminal_id, + label, + cwd, + output_byte_limit, + terminal, + } => { + let entity = self.register_terminal_created( + terminal_id.clone(), + label, + cwd, + output_byte_limit, + terminal, + cx, + ); + + if let Some(mut chunks) = self.pending_terminal_output.remove(&terminal_id) { + for data in chunks.drain(..) { + entity.update(cx, |term, cx| { + term.inner().update(cx, |inner, cx| { + inner.write_output(&data, cx); + }) + }); + } + } + + if let Some(_status) = self.pending_terminal_exit.remove(&terminal_id) { + entity.update(cx, |_term, cx| { + cx.notify(); + }); + } + + cx.notify(); + } + TerminalProviderEvent::Output { terminal_id, data } => { + if let Some(entity) = self.terminals.get(&terminal_id) { + entity.update(cx, |term, cx| { + term.inner().update(cx, |inner, cx| { + inner.write_output(&data, cx); + }) + }); + } else { + self.pending_terminal_output + .entry(terminal_id) + .or_default() + .push(data); + } + } + TerminalProviderEvent::TitleChanged { terminal_id, title } => { + if let Some(entity) = self.terminals.get(&terminal_id) { + entity.update(cx, |term, cx| { + term.inner().update(cx, |inner, cx| { + inner.breadcrumb_text = title; + cx.emit(::terminal::Event::BreadcrumbsChanged); + }) + }); + } + } + TerminalProviderEvent::Exit { + terminal_id, + status, + } => { + if let Some(entity) = self.terminals.get(&terminal_id) { + entity.update(cx, |_term, cx| { + cx.notify(); + }); + } else { + self.pending_terminal_exit.insert(terminal_id, status); + } + } + } + } } fn markdown_for_raw_output( @@ -3916,6 +3938,7 @@ mod tests { struct FakeAgentConnection { auth_methods: Vec, sessions: Arc>>>, + set_title_calls: Rc>>, on_user_message: Option< Rc< dyn Fn( @@ -3934,6 +3957,7 @@ mod tests { auth_methods: Vec::new(), on_user_message: None, sessions: Arc::default(), + set_title_calls: Default::default(), } } @@ -3969,7 +3993,7 @@ mod tests { fn new_session( self: Rc, project: Entity, - _cwd: &Path, + cwd: &Path, cx: &mut App, ) -> Task>> { let session_id = acp::SessionId::new( @@ -3984,6 +4008,7 @@ mod tests { AcpThread::new( None, "Test", + Some(cwd.to_path_buf()), self.clone(), project, action_log, @@ -4038,11 +4063,32 @@ mod tests { })) } + fn set_title( + &self, + _session_id: &acp::SessionId, + _cx: &App, + ) -> Option> { + Some(Rc::new(FakeAgentSessionSetTitle { + calls: self.set_title_calls.clone(), + })) + } + fn into_any(self: Rc) -> Rc { self } } + struct FakeAgentSessionSetTitle { + calls: Rc>>, + } + + impl AgentSessionSetTitle for FakeAgentSessionSetTitle { + fn run(&self, title: SharedString, _cx: &mut App) -> Task> { + self.calls.borrow_mut().push(title); + Task::ready(Ok(())) + } + } + struct FakeAgentSessionEditor { _session_id: acp::SessionId, } @@ -4634,4 +4680,54 @@ mod tests { ); }); } + + #[gpui::test] + async fn test_provisional_title_replaced_by_real_title(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + let connection = Rc::new(FakeAgentConnection::new()); + let set_title_calls = connection.set_title_calls.clone(); + + let thread = cx + .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .await + .unwrap(); + + // Initial title is the default. + thread.read_with(cx, |thread, _| { + assert_eq!(thread.title().as_ref(), "Test"); + }); + + // Setting a provisional title updates the display title. + thread.update(cx, |thread, cx| { + thread.set_provisional_title("Hello, can you help…".into(), cx); + }); + thread.read_with(cx, |thread, _| { + assert_eq!(thread.title().as_ref(), "Hello, can you help…"); + }); + + // The provisional title should NOT have propagated to the connection. + assert_eq!( + set_title_calls.borrow().len(), + 0, + "provisional title should not propagate to the connection" + ); + + // When the real title arrives via set_title, it replaces the + // provisional title and propagates to the connection. + let task = thread.update(cx, |thread, cx| { + thread.set_title("Helping with Rust question".into(), cx) + }); + task.await.expect("set_title should succeed"); + thread.read_with(cx, |thread, _| { + assert_eq!(thread.title().as_ref(), "Helping with Rust question"); + }); + assert_eq!( + set_title_calls.borrow().as_slice(), + &[SharedString::from("Helping with Rust question")], + "real title should propagate to the connection" + ); + } } diff --git a/crates/acp_thread/src/connection.rs b/crates/acp_thread/src/connection.rs index 0becded53762be7c96789b0d31191fd9cbc02bfe..644986bc15eccbe7d2be32ea5ad6e422db930541 100644 --- a/crates/acp_thread/src/connection.rs +++ b/crates/acp_thread/src/connection.rs @@ -45,9 +45,10 @@ pub trait AgentConnection { /// Load an existing session by ID. fn load_session( self: Rc, - _session: AgentSessionInfo, + _session_id: acp::SessionId, _project: Entity, _cwd: &Path, + _title: Option, _cx: &mut App, ) -> Task>> { Task::ready(Err(anyhow::Error::msg("Loading sessions is not supported"))) @@ -71,9 +72,10 @@ pub trait AgentConnection { /// Resume an existing session by ID without replaying previous messages. fn resume_session( self: Rc, - _session: AgentSessionInfo, + _session_id: acp::SessionId, _project: Entity, _cwd: &Path, + _title: Option, _cx: &mut App, ) -> Task>> { Task::ready(Err(anyhow::Error::msg( @@ -496,6 +498,7 @@ mod test_support { //! - `create_test_png_base64` for generating test images use std::sync::Arc; + use std::sync::atomic::{AtomicUsize, Ordering}; use action_log::ActionLog; use collections::HashMap; @@ -618,15 +621,18 @@ mod test_support { fn new_session( self: Rc, project: Entity, - _cwd: &Path, + cwd: &Path, cx: &mut gpui::App, ) -> Task>> { - let session_id = acp::SessionId::new(self.sessions.lock().len().to_string()); + static NEXT_SESSION_ID: AtomicUsize = AtomicUsize::new(0); + let session_id = + acp::SessionId::new(NEXT_SESSION_ID.fetch_add(1, Ordering::SeqCst).to_string()); let action_log = cx.new(|_| ActionLog::new(project.clone())); let thread = cx.new(|cx| { AcpThread::new( None, "Test", + Some(cwd.to_path_buf()), self.clone(), project, action_log, diff --git a/crates/action_log/Cargo.toml b/crates/action_log/Cargo.toml index b1a1bf824fb770b8378e596fd0c799a7cf98b13d..5227a61651012279e83a3b6e3e68b1484acb0f66 100644 --- a/crates/action_log/Cargo.toml +++ b/crates/action_log/Cargo.toml @@ -37,7 +37,7 @@ collections = { workspace = true, features = ["test-support"] } clock = { workspace = true, features = ["test-support"] } ctor.workspace = true gpui = { workspace = true, features = ["test-support"] } -indoc.workspace = true + language = { workspace = true, features = ["test-support"] } log.workspace = true pretty_assertions.workspace = true diff --git a/crates/activity_indicator/Cargo.toml b/crates/activity_indicator/Cargo.toml index 99ae5b5b077a14c0909737d64935220698a007c7..ce53f23365d57666e25cac434935514fc4bd7e3f 100644 --- a/crates/activity_indicator/Cargo.toml +++ b/crates/activity_indicator/Cargo.toml @@ -30,4 +30,4 @@ workspace.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } -release_channel.workspace = true + diff --git a/crates/agent/Cargo.toml b/crates/agent/Cargo.toml index 9f563cf0b1b009a496d36a6f090b0f4b476433a7..fe2089d94dc2e3fc812f6cbe39c16c5cadc1a1f5 100644 --- a/crates/agent/Cargo.toml +++ b/crates/agent/Cargo.toml @@ -100,9 +100,9 @@ rand.workspace = true reqwest_client.workspace = true settings = { workspace = true, "features" = ["test-support"] } tempfile.workspace = true -terminal = { workspace = true, "features" = ["test-support"] } + theme = { workspace = true, "features" = ["test-support"] } -tree-sitter-rust.workspace = true + unindent = { workspace = true } -worktree = { workspace = true, "features" = ["test-support"] } + zlog.workspace = true diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index 5421538ca736028a4ea7290c09ef81036e055b81..d9ad55c7127983516dbb5fe0392ef135186b79f7 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -361,6 +361,7 @@ impl NativeAgent { let mut acp_thread = acp_thread::AcpThread::new( parent_session_id, title, + None, connection, project.clone(), action_log.clone(), @@ -1277,13 +1278,14 @@ impl acp_thread::AgentConnection for NativeAgentConnection { fn load_session( self: Rc, - session: AgentSessionInfo, + session_id: acp::SessionId, _project: Entity, _cwd: &Path, + _title: Option, cx: &mut App, ) -> Task>> { self.0 - .update(cx, |agent, cx| agent.open_thread(session.session_id, cx)) + .update(cx, |agent, cx| agent.open_thread(session_id, cx)) } fn supports_close_session(&self) -> bool { @@ -1490,16 +1492,6 @@ impl NativeAgentSessionList { } } - fn to_session_info(entry: DbThreadMetadata) -> AgentSessionInfo { - AgentSessionInfo { - session_id: entry.id, - cwd: None, - title: Some(entry.title), - updated_at: Some(entry.updated_at), - meta: None, - } - } - pub fn thread_store(&self) -> &Entity { &self.thread_store } @@ -1515,7 +1507,7 @@ impl AgentSessionList for NativeAgentSessionList { .thread_store .read(cx) .entries() - .map(Self::to_session_info) + .map(|entry| AgentSessionInfo::from(&entry)) .collect(); Task::ready(Ok(AgentSessionListResponse::new(sessions))) } @@ -1639,6 +1631,16 @@ impl NativeThreadEnvironment { agent.register_session(subagent_thread.clone(), cx) })?; + let depth = current_depth + 1; + + telemetry::event!( + "Subagent Started", + session = parent_thread_entity.read(cx).id().to_string(), + subagent_session = session_id.to_string(), + depth, + is_resumed = false, + ); + self.prompt_subagent(session_id, subagent_thread, acp_thread) } @@ -1655,6 +1657,18 @@ impl NativeThreadEnvironment { anyhow::Ok((session.thread.clone(), session.acp_thread.clone())) })??; + let depth = subagent_thread.read(cx).depth(); + + if let Some(parent_thread_entity) = self.thread.upgrade() { + telemetry::event!( + "Subagent Started", + session = parent_thread_entity.read(cx).id().to_string(), + subagent_session = session_id.to_string(), + depth, + is_resumed = true, + ); + } + self.prompt_subagent(session_id, subagent_thread, acp_thread) } diff --git a/crates/agent/src/db.rs b/crates/agent/src/db.rs index 10ecb643b9a17dd6b02b47a416c526a662d12632..2c9b33e4efc4f22059e2914589ca6c635b51c0e5 100644 --- a/crates/agent/src/db.rs +++ b/crates/agent/src/db.rs @@ -32,11 +32,24 @@ pub struct DbThreadMetadata { #[serde(alias = "summary")] pub title: SharedString, pub updated_at: DateTime, + pub created_at: Option>, /// The workspace folder paths this thread was created against, sorted /// lexicographically. Used for grouping threads by project in the sidebar. pub folder_paths: PathList, } +impl From<&DbThreadMetadata> for acp_thread::AgentSessionInfo { + fn from(meta: &DbThreadMetadata) -> Self { + Self { + session_id: meta.id.clone(), + cwd: None, + title: Some(meta.title.clone()), + updated_at: Some(meta.updated_at), + meta: None, + } + } +} + #[derive(Debug, Serialize, Deserialize)] pub struct DbThread { pub title: SharedString, @@ -408,6 +421,17 @@ impl ThreadsDatabase { s().ok(); } + if let Ok(mut s) = connection.exec(indoc! {" + ALTER TABLE threads ADD COLUMN created_at TEXT; + "}) + { + if s().is_ok() { + connection.exec(indoc! {" + UPDATE threads SET created_at = updated_at WHERE created_at IS NULL + "})?()?; + } + } + let db = Self { executor, connection: Arc::new(Mutex::new(connection)), @@ -458,8 +482,19 @@ impl ThreadsDatabase { let data_type = DataType::Zstd; let data = compressed; - let mut insert = connection.exec_bound::<(Arc, Option>, Option, Option, String, String, DataType, Vec)>(indoc! {" - INSERT OR REPLACE INTO threads (id, parent_id, folder_paths, folder_paths_order, summary, updated_at, data_type, data) VALUES (?, ?, ?, ?, ?, ?, ?, ?) + let created_at = Utc::now().to_rfc3339(); + + let mut insert = connection.exec_bound::<(Arc, Option>, Option, Option, String, String, DataType, Vec, String)>(indoc! {" + INSERT INTO threads (id, parent_id, folder_paths, folder_paths_order, summary, updated_at, data_type, data, created_at) + VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9) + ON CONFLICT(id) DO UPDATE SET + parent_id = excluded.parent_id, + folder_paths = excluded.folder_paths, + folder_paths_order = excluded.folder_paths_order, + summary = excluded.summary, + updated_at = excluded.updated_at, + data_type = excluded.data_type, + data = excluded.data "})?; insert(( @@ -471,6 +506,7 @@ impl ThreadsDatabase { updated_at, data_type, data, + created_at, ))?; Ok(()) @@ -483,14 +519,14 @@ impl ThreadsDatabase { let connection = connection.lock(); let mut select = connection - .select_bound::<(), (Arc, Option>, Option, Option, String, String)>(indoc! {" - SELECT id, parent_id, folder_paths, folder_paths_order, summary, updated_at FROM threads ORDER BY updated_at DESC + .select_bound::<(), (Arc, Option>, Option, Option, String, String, Option)>(indoc! {" + SELECT id, parent_id, folder_paths, folder_paths_order, summary, updated_at, created_at FROM threads ORDER BY updated_at DESC, created_at DESC "})?; let rows = select(())?; let mut threads = Vec::new(); - for (id, parent_id, folder_paths, folder_paths_order, summary, updated_at) in rows { + for (id, parent_id, folder_paths, folder_paths_order, summary, updated_at, created_at) in rows { let folder_paths = folder_paths .map(|paths| { PathList::deserialize(&util::path_list::SerializedPathList { @@ -499,11 +535,18 @@ impl ThreadsDatabase { }) }) .unwrap_or_default(); + let created_at = created_at + .as_deref() + .map(DateTime::parse_from_rfc3339) + .transpose()? + .map(|dt| dt.with_timezone(&Utc)); + threads.push(DbThreadMetadata { id: acp::SessionId::new(id), parent_session_id: parent_id.map(acp::SessionId::new), title: summary.into(), updated_at: DateTime::parse_from_rfc3339(&updated_at)?.with_timezone(&Utc), + created_at, folder_paths, }); } @@ -652,7 +695,7 @@ mod tests { } #[gpui::test] - async fn test_list_threads_orders_by_updated_at(cx: &mut TestAppContext) { + async fn test_list_threads_orders_by_created_at(cx: &mut TestAppContext) { let database = ThreadsDatabase::new(cx.executor()).unwrap(); let older_id = session_id("thread-a"); @@ -713,6 +756,10 @@ mod tests { entries[0].updated_at, Utc.with_ymd_and_hms(2024, 1, 2, 0, 0, 0).unwrap() ); + assert!( + entries[0].created_at.is_some(), + "created_at should be populated" + ); } #[test] diff --git a/crates/agent/src/edit_agent/evals.rs b/crates/agent/src/edit_agent/evals.rs index 2e8818b101995b374cf8172547c45b55c27c6f26..e7b67e37bf4a8b71664a78b99b757c6985794ec6 100644 --- a/crates/agent/src/edit_agent/evals.rs +++ b/crates/agent/src/edit_agent/evals.rs @@ -1423,7 +1423,7 @@ impl EditAgentTest { let client = Client::production(cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); settings::init(cx); - language_model::init(client.clone(), cx); + language_model::init(user_store.clone(), client.clone(), cx); language_models::init(user_store, client.clone(), cx); }); diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs index 23ebe41d3c42654cb8fcdc0266009416686858aa..d33c80a435e84359976d4d8a9edb2bdebd66e0ff 100644 --- a/crates/agent/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -3167,7 +3167,7 @@ async fn test_agent_connection(cx: &mut TestAppContext) { let clock = Arc::new(clock::FakeSystemClock::new()); let client = Client::new(clock, http_client, cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - language_model::init(client.clone(), cx); + language_model::init(user_store.clone(), client.clone(), cx); language_models::init(user_store, client.clone(), cx); LanguageModelRegistry::test(cx); }); @@ -3605,6 +3605,113 @@ async fn test_send_max_retries_exceeded(cx: &mut TestAppContext) { )); } +#[gpui::test] +async fn test_streaming_tool_completes_when_llm_stream_ends_without_final_input( + cx: &mut TestAppContext, +) { + init_test(cx); + always_allow_tools(cx); + + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + thread.update(cx, |thread, _cx| { + thread.add_tool(StreamingEchoTool::new()); + }); + + let _events = thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Use the streaming_echo tool"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + // Send a partial tool use (is_input_complete = false), simulating the LLM + // streaming input for a tool. + let tool_use = LanguageModelToolUse { + id: "tool_1".into(), + name: "streaming_echo".into(), + raw_input: r#"{"text": "partial"}"#.into(), + input: json!({"text": "partial"}), + is_input_complete: false, + thought_signature: None, + }; + fake_model + .send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone())); + cx.run_until_parked(); + + // Send a stream error WITHOUT ever sending is_input_complete = true. + // Before the fix, this would deadlock: the tool waits for more partials + // (or cancellation), run_turn_internal waits for the tool, and the sender + // keeping the channel open lives inside RunningTurn. + fake_model.send_last_completion_stream_error( + LanguageModelCompletionError::UpstreamProviderError { + message: "Internal server error".to_string(), + status: http_client::StatusCode::INTERNAL_SERVER_ERROR, + retry_after: None, + }, + ); + fake_model.end_last_completion_stream(); + + // Advance past the retry delay so run_turn_internal retries. + cx.executor().advance_clock(Duration::from_secs(5)); + cx.run_until_parked(); + + // The retry request should contain the streaming tool's error result, + // proving the tool terminated and its result was forwarded. + let completion = fake_model + .pending_completions() + .pop() + .expect("No running turn"); + assert_eq!( + completion.messages[1..], + vec![ + LanguageModelRequestMessage { + role: Role::User, + content: vec!["Use the streaming_echo tool".into()], + cache: false, + reasoning_details: None, + }, + LanguageModelRequestMessage { + role: Role::Assistant, + content: vec![language_model::MessageContent::ToolUse(tool_use.clone())], + cache: false, + reasoning_details: None, + }, + LanguageModelRequestMessage { + role: Role::User, + content: vec![language_model::MessageContent::ToolResult( + LanguageModelToolResult { + tool_use_id: tool_use.id.clone(), + tool_name: tool_use.name, + is_error: true, + content: "Failed to receive tool input: tool input was not fully received" + .into(), + output: Some( + "Failed to receive tool input: tool input was not fully received" + .into() + ), + } + )], + cache: true, + reasoning_details: None, + }, + ] + ); + + // Finish the retry round so the turn completes cleanly. + fake_model.send_last_completion_stream_text_chunk("Done"); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + thread.read_with(cx, |thread, _cx| { + assert!( + thread.is_turn_complete(), + "Thread should not be stuck; the turn should have completed", + ); + }); +} + /// Filters out the stop events for asserting against in tests fn stop_events(result_events: Vec>) -> Vec { result_events @@ -3660,7 +3767,9 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest { ToolRequiringPermission::NAME: true, InfiniteTool::NAME: true, CancellationAwareTool::NAME: true, - (TerminalTool::NAME): true, + StreamingEchoTool::NAME: true, + StreamingFailingEchoTool::NAME: true, + TerminalTool::NAME: true, } } } @@ -3682,7 +3791,7 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest { cx.set_http_client(Arc::new(http_client)); let client = Client::production(cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - language_model::init(client.clone(), cx); + language_model::init(user_store.clone(), client.clone(), cx); language_models::init(user_store, client.clone(), cx); } }; @@ -6227,3 +6336,196 @@ async fn test_queued_message_ends_turn_at_boundary(cx: &mut TestAppContext) { ); }); } + +#[gpui::test] +async fn test_streaming_tool_error_breaks_stream_loop_immediately(cx: &mut TestAppContext) { + init_test(cx); + always_allow_tools(cx); + + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + thread.update(cx, |thread, _cx| { + thread.add_tool(StreamingFailingEchoTool { + receive_chunks_until_failure: 1, + }); + }); + + let _events = thread + .update(cx, |thread, cx| { + thread.send( + UserMessageId::new(), + ["Use the streaming_failing_echo tool"], + cx, + ) + }) + .unwrap(); + cx.run_until_parked(); + + let tool_use = LanguageModelToolUse { + id: "call_1".into(), + name: StreamingFailingEchoTool::NAME.into(), + raw_input: "hello".into(), + input: json!({}), + is_input_complete: false, + thought_signature: None, + }; + + fake_model + .send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone())); + + cx.run_until_parked(); + + let completions = fake_model.pending_completions(); + let last_completion = completions.last().unwrap(); + + assert_eq!( + last_completion.messages[1..], + vec![ + LanguageModelRequestMessage { + role: Role::User, + content: vec!["Use the streaming_failing_echo tool".into()], + cache: false, + reasoning_details: None, + }, + LanguageModelRequestMessage { + role: Role::Assistant, + content: vec![language_model::MessageContent::ToolUse(tool_use.clone())], + cache: false, + reasoning_details: None, + }, + LanguageModelRequestMessage { + role: Role::User, + content: vec![language_model::MessageContent::ToolResult( + LanguageModelToolResult { + tool_use_id: tool_use.id.clone(), + tool_name: tool_use.name, + is_error: true, + content: "failed".into(), + output: Some("failed".into()), + } + )], + cache: true, + reasoning_details: None, + }, + ] + ); +} + +#[gpui::test] +async fn test_streaming_tool_error_waits_for_prior_tools_to_complete(cx: &mut TestAppContext) { + init_test(cx); + always_allow_tools(cx); + + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + let (complete_streaming_echo_tool_call_tx, complete_streaming_echo_tool_call_rx) = + oneshot::channel(); + + thread.update(cx, |thread, _cx| { + thread.add_tool( + StreamingEchoTool::new().with_wait_until_complete(complete_streaming_echo_tool_call_rx), + ); + thread.add_tool(StreamingFailingEchoTool { + receive_chunks_until_failure: 1, + }); + }); + + let _events = thread + .update(cx, |thread, cx| { + thread.send( + UserMessageId::new(), + ["Use the streaming_echo tool and the streaming_failing_echo tool"], + cx, + ) + }) + .unwrap(); + cx.run_until_parked(); + + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: "call_1".into(), + name: StreamingEchoTool::NAME.into(), + raw_input: "hello".into(), + input: json!({ "text": "hello" }), + is_input_complete: false, + thought_signature: None, + }, + )); + let first_tool_use = LanguageModelToolUse { + id: "call_1".into(), + name: StreamingEchoTool::NAME.into(), + raw_input: "hello world".into(), + input: json!({ "text": "hello world" }), + is_input_complete: true, + thought_signature: None, + }; + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + first_tool_use.clone(), + )); + let second_tool_use = LanguageModelToolUse { + name: StreamingFailingEchoTool::NAME.into(), + raw_input: "hello".into(), + input: json!({ "text": "hello" }), + is_input_complete: false, + thought_signature: None, + id: "call_2".into(), + }; + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + second_tool_use.clone(), + )); + + cx.run_until_parked(); + + complete_streaming_echo_tool_call_tx.send(()).unwrap(); + + cx.run_until_parked(); + + let completions = fake_model.pending_completions(); + let last_completion = completions.last().unwrap(); + + assert_eq!( + last_completion.messages[1..], + vec![ + LanguageModelRequestMessage { + role: Role::User, + content: vec![ + "Use the streaming_echo tool and the streaming_failing_echo tool".into() + ], + cache: false, + reasoning_details: None, + }, + LanguageModelRequestMessage { + role: Role::Assistant, + content: vec![ + language_model::MessageContent::ToolUse(first_tool_use.clone()), + language_model::MessageContent::ToolUse(second_tool_use.clone()) + ], + cache: false, + reasoning_details: None, + }, + LanguageModelRequestMessage { + role: Role::User, + content: vec![ + language_model::MessageContent::ToolResult(LanguageModelToolResult { + tool_use_id: second_tool_use.id.clone(), + tool_name: second_tool_use.name, + is_error: true, + content: "failed".into(), + output: Some("failed".into()), + }), + language_model::MessageContent::ToolResult(LanguageModelToolResult { + tool_use_id: first_tool_use.id.clone(), + tool_name: first_tool_use.name, + is_error: false, + content: "hello world".into(), + output: Some("hello world".into()), + }), + ], + cache: true, + reasoning_details: None, + }, + ] + ); +} diff --git a/crates/agent/src/tests/test_tools.rs b/crates/agent/src/tests/test_tools.rs index e0794ee322cdf2c77c37d1d22f30ec77c5642d24..f36549a6c42f9e810c7794d8ec683613b6ae6933 100644 --- a/crates/agent/src/tests/test_tools.rs +++ b/crates/agent/src/tests/test_tools.rs @@ -2,9 +2,130 @@ use super::*; use agent_settings::AgentSettings; use gpui::{App, SharedString, Task}; use std::future; +use std::sync::Mutex; use std::sync::atomic::{AtomicBool, Ordering}; use std::time::Duration; +/// A streaming tool that echoes its input, used to test streaming tool +/// lifecycle (e.g. partial delivery and cleanup when the LLM stream ends +/// before `is_input_complete`). +#[derive(JsonSchema, Serialize, Deserialize)] +pub struct StreamingEchoToolInput { + /// The text to echo. + pub text: String, +} + +pub struct StreamingEchoTool { + wait_until_complete_rx: Mutex>>, +} + +impl StreamingEchoTool { + pub fn new() -> Self { + Self { + wait_until_complete_rx: Mutex::new(None), + } + } + + pub fn with_wait_until_complete(mut self, receiver: oneshot::Receiver<()>) -> Self { + self.wait_until_complete_rx = Mutex::new(Some(receiver)); + self + } +} + +impl AgentTool for StreamingEchoTool { + type Input = StreamingEchoToolInput; + type Output = String; + + const NAME: &'static str = "streaming_echo"; + + fn supports_input_streaming() -> bool { + true + } + + fn kind() -> acp::ToolKind { + acp::ToolKind::Other + } + + fn initial_title( + &self, + _input: Result, + _cx: &mut App, + ) -> SharedString { + "Streaming Echo".into() + } + + fn run( + self: Arc, + mut input: ToolInput, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + let wait_until_complete_rx = self.wait_until_complete_rx.lock().unwrap().take(); + cx.spawn(async move |_cx| { + while input.recv_partial().await.is_some() {} + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + if let Some(rx) = wait_until_complete_rx { + rx.await.ok(); + } + Ok(input.text) + }) + } +} + +/// A streaming tool that echoes its input, used to test streaming tool +/// lifecycle (e.g. partial delivery and cleanup when the LLM stream ends +/// before `is_input_complete`). +#[derive(JsonSchema, Serialize, Deserialize)] +pub struct StreamingFailingEchoToolInput { + /// The text to echo. + pub text: String, +} + +pub struct StreamingFailingEchoTool { + pub receive_chunks_until_failure: usize, +} + +impl AgentTool for StreamingFailingEchoTool { + type Input = StreamingFailingEchoToolInput; + + type Output = String; + + const NAME: &'static str = "streaming_failing_echo"; + + fn kind() -> acp::ToolKind { + acp::ToolKind::Other + } + + fn supports_input_streaming() -> bool { + true + } + + fn initial_title( + &self, + _input: Result, + _cx: &mut App, + ) -> SharedString { + "echo".into() + } + + fn run( + self: Arc, + mut input: ToolInput, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + cx.spawn(async move |_cx| { + for _ in 0..self.receive_chunks_until_failure { + let _ = input.recv_partial().await; + } + Err("failed".into()) + }) + } +} + /// A tool that echoes its input #[derive(JsonSchema, Serialize, Deserialize)] pub struct EchoToolInput { diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 148702e1bafeae05ac67c6127d8259581aff93dd..e61a395e71f93d49d63d378355c89e44359db835 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -1846,12 +1846,37 @@ impl Thread { Ok(events) => (events.fuse(), None), Err(err) => (stream::empty().boxed().fuse(), Some(err)), }; - let mut tool_results = FuturesUnordered::new(); + let mut tool_results: FuturesUnordered> = + FuturesUnordered::new(); + let mut early_tool_results: Vec = Vec::new(); let mut cancelled = false; loop { - // Race between getting the first event and cancellation + // Race between getting the first event, tool completion, and cancellation. let first_event = futures::select! { event = events.next().fuse() => event, + tool_result = futures::StreamExt::select_next_some(&mut tool_results) => { + let is_error = tool_result.is_error; + let is_still_streaming = this + .read_with(cx, |this, _cx| { + this.running_turn + .as_ref() + .and_then(|turn| turn.streaming_tool_inputs.get(&tool_result.tool_use_id)) + .map_or(false, |inputs| !inputs.has_received_final()) + }) + .unwrap_or(false); + + early_tool_results.push(tool_result); + + // Only break if the tool errored and we are still + // streaming the input of the tool. If the tool errored + // but we are no longer streaming its input (i.e. there + // are parallel tool calls) we want to continue + // processing those tool inputs. + if is_error && is_still_streaming { + break; + } + continue; + } _ = cancellation_rx.changed().fuse() => { if *cancellation_rx.borrow() { cancelled = true; @@ -1918,26 +1943,26 @@ impl Thread { // that need their own permits. drop(events); - let end_turn = tool_results.is_empty(); - while let Some(tool_result) = tool_results.next().await { - log::debug!("Tool finished {:?}", tool_result); + // Drop streaming tool input senders that never received their final input. + // This prevents deadlock when the LLM stream ends (e.g. because of an error) + // before sending a tool use with `is_input_complete: true`. + this.update(cx, |this, _cx| { + if let Some(running_turn) = this.running_turn.as_mut() { + if running_turn.streaming_tool_inputs.is_empty() { + return; + } + log::warn!("Dropping partial tool inputs because the stream ended"); + running_turn.streaming_tool_inputs.drain(); + } + })?; - event_stream.update_tool_call_fields( - &tool_result.tool_use_id, - acp::ToolCallUpdateFields::new() - .status(if tool_result.is_error { - acp::ToolCallStatus::Failed - } else { - acp::ToolCallStatus::Completed - }) - .raw_output(tool_result.output.clone()), - None, - ); - this.update(cx, |this, _cx| { - this.pending_message() - .tool_results - .insert(tool_result.tool_use_id.clone(), tool_result); - })?; + let end_turn = tool_results.is_empty() && early_tool_results.is_empty(); + + for tool_result in early_tool_results { + Self::process_tool_result(this, event_stream, cx, tool_result)?; + } + while let Some(tool_result) = tool_results.next().await { + Self::process_tool_result(this, event_stream, cx, tool_result)?; } this.update(cx, |this, cx| { @@ -1991,6 +2016,33 @@ impl Thread { } } + fn process_tool_result( + this: &WeakEntity, + event_stream: &ThreadEventStream, + cx: &mut AsyncApp, + tool_result: LanguageModelToolResult, + ) -> Result<(), anyhow::Error> { + log::debug!("Tool finished {:?}", tool_result); + + event_stream.update_tool_call_fields( + &tool_result.tool_use_id, + acp::ToolCallUpdateFields::new() + .status(if tool_result.is_error { + acp::ToolCallStatus::Failed + } else { + acp::ToolCallStatus::Completed + }) + .raw_output(tool_result.output.clone()), + None, + ); + this.update(cx, |this, _cx| { + this.pending_message() + .tool_results + .insert(tool_result.tool_use_id.clone(), tool_result); + })?; + Ok(()) + } + fn handle_completion_error( &mut self, error: LanguageModelCompletionError, @@ -3019,7 +3071,7 @@ impl ToolInput { let value = self .final_rx .await - .map_err(|_| anyhow!("tool input sender was dropped before sending final input"))?; + .map_err(|_| anyhow!("tool input was not fully received"))?; serde_json::from_value(value).map_err(Into::into) } @@ -3059,6 +3111,10 @@ impl ToolInputSender { (sender, input) } + pub(crate) fn has_received_final(&self) -> bool { + self.final_tx.is_none() + } + pub(crate) fn send_partial(&self, value: serde_json::Value) { self.partial_tx.unbounded_send(value).ok(); } diff --git a/crates/agent/src/thread_store.rs b/crates/agent/src/thread_store.rs index e26820ddacc3132d42946de3b27d25f4424fae02..dd1f650de2f59a0e681e15e7eae3fad1a49ccc41 100644 --- a/crates/agent/src/thread_store.rs +++ b/crates/agent/src/thread_store.rs @@ -2,6 +2,7 @@ use crate::{DbThread, DbThreadMetadata, ThreadsDatabase}; use agent_client_protocol as acp; use anyhow::{Result, anyhow}; use gpui::{App, Context, Entity, Global, Task, prelude::*}; +use std::collections::HashMap; use util::path_list::PathList; struct GlobalThreadStore(Entity); @@ -10,6 +11,7 @@ impl Global for GlobalThreadStore {} pub struct ThreadStore { threads: Vec, + threads_by_paths: HashMap>, } impl ThreadStore { @@ -22,9 +24,14 @@ impl ThreadStore { cx.global::().0.clone() } + pub fn try_global(cx: &App) -> Option> { + cx.try_global::().map(|g| g.0.clone()) + } + pub fn new(cx: &mut Context) -> Self { let this = Self { threads: Vec::new(), + threads_by_paths: HashMap::default(), }; this.reload(cx); this @@ -87,14 +94,21 @@ impl ThreadStore { let database_connection = ThreadsDatabase::connect(cx); cx.spawn(async move |this, cx| { let database = database_connection.await.map_err(|err| anyhow!(err))?; - let threads = database - .list_threads() - .await? - .into_iter() - .filter(|thread| thread.parent_session_id.is_none()) - .collect::>(); + let all_threads = database.list_threads().await?; this.update(cx, |this, cx| { - this.threads = threads; + this.threads.clear(); + this.threads_by_paths.clear(); + for thread in all_threads { + if thread.parent_session_id.is_some() { + continue; + } + let index = this.threads.len(); + this.threads_by_paths + .entry(thread.folder_paths.clone()) + .or_default() + .push(index); + this.threads.push(thread); + } cx.notify(); }) }) @@ -110,10 +124,12 @@ impl ThreadStore { } /// Returns threads whose folder_paths match the given paths exactly. + /// Uses a cached index for O(1) lookup per path list. pub fn threads_for_paths(&self, paths: &PathList) -> impl Iterator { - self.threads - .iter() - .filter(move |thread| &thread.folder_paths == paths) + self.threads_by_paths + .get(paths) + .into_iter() + .flat_map(|indices| indices.iter().map(|&index| &self.threads[index])) } } diff --git a/crates/agent/src/tools/spawn_agent_tool.rs b/crates/agent/src/tools/spawn_agent_tool.rs index 162de68b86115056e9579d22a8623d675245cc91..9c10b2fbf127c42d842300f4af865c4297cdedb8 100644 --- a/crates/agent/src/tools/spawn_agent_tool.rs +++ b/crates/agent/src/tools/spawn_agent_tool.rs @@ -163,6 +163,17 @@ impl AgentTool for SpawnAgentTool { let send_result = subagent.send(input.message, cx).await; + let status = if send_result.is_ok() { + "completed" + } else { + "error" + }; + telemetry::event!( + "Subagent Completed", + subagent_session = session_info.session_id.to_string(), + status, + ); + session_info.message_end_index = cx.update(|cx| Some(subagent.num_entries(cx).saturating_sub(1))); diff --git a/crates/agent/src/tools/streaming_edit_file_tool.rs b/crates/agent/src/tools/streaming_edit_file_tool.rs index 22ba5acdc4659dcb78f6873b31cbef849667daa2..f3f100b4434b4f9e5d0830d8ba525a45b28906f6 100644 --- a/crates/agent/src/tools/streaming_edit_file_tool.rs +++ b/crates/agent/src/tools/streaming_edit_file_tool.rs @@ -108,6 +108,11 @@ pub enum StreamingEditFileMode { pub struct Edit { /// The exact text to find in the file. This will be matched using fuzzy matching /// to handle minor differences in whitespace or formatting. + /// + /// Always include complete lines. Do not start or end mid-line. + /// Be minimal with replacements: + /// - For unique lines, include only those lines + /// - For non-unique lines, include enough context to identify them pub old_text: String, /// The text to replace it with pub new_text: String, @@ -558,6 +563,7 @@ impl EditPipeline { ToolEditEvent::OldTextChunk { chunk, done: false, .. } => { + log::debug!("old_text_chunk: done=false, chunk='{}'", chunk); self.ensure_resolving_old_text(buffer, cx); if let Some(EditPipelineEntry::ResolvingOldText { matcher }) = &mut self.current_edit @@ -581,6 +587,7 @@ impl EditPipeline { chunk, done: true, } => { + log::debug!("old_text_chunk: done=true, chunk='{}'", chunk); self.ensure_resolving_old_text(buffer, cx); let Some(EditPipelineEntry::ResolvingOldText { matcher }) = &mut self.current_edit else { @@ -617,6 +624,14 @@ impl EditPipeline { let old_text_in_buffer = snapshot.text_for_range(range.clone()).collect::(); + log::debug!( + "edit[{}] old_text matched at {}..{}: {:?}", + edit_index, + range.start, + range.end, + old_text_in_buffer, + ); + let text_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.text_snapshot()); self.current_edit = Some(EditPipelineEntry::StreamingNewText { streaming_diff: StreamingDiff::new(old_text_in_buffer), @@ -633,6 +648,7 @@ impl EditPipeline { ToolEditEvent::NewTextChunk { chunk, done: false, .. } => { + log::debug!("new_text_chunk: done=false, chunk='{}'", chunk); let Some(EditPipelineEntry::StreamingNewText { streaming_diff, edit_cursor, @@ -667,6 +683,7 @@ impl EditPipeline { ToolEditEvent::NewTextChunk { chunk, done: true, .. } => { + log::debug!("new_text_chunk: done=true, chunk='{}'", chunk); let Some(EditPipelineEntry::StreamingNewText { mut streaming_diff, mut edit_cursor, @@ -680,6 +697,8 @@ impl EditPipeline { let mut final_text = reindenter.push(chunk); final_text.push_str(&reindenter.finish()); + log::debug!("new_text_chunk: done=true, final_text='{}'", final_text); + if !final_text.is_empty() { let char_ops = streaming_diff.push_new(&final_text); apply_char_operations( @@ -816,6 +835,17 @@ impl EditSession { })?; let events = self.parser.finalize_edits(&edits); self.process_events(&events, tool, event_stream, cx)?; + + if log::log_enabled!(log::Level::Debug) { + log::debug!("Got edits:"); + for edit in &edits { + log::debug!( + " old_text: '{}', new_text: '{}'", + edit.old_text.replace('\n', "\\n"), + edit.new_text.replace('\n', "\\n") + ); + } + } } } diff --git a/crates/agent_servers/Cargo.toml b/crates/agent_servers/Cargo.toml index 4d34632a248c5db35666e93cb068c7ec6727fc48..4fb4109129ee5b8896f7a62afe49e0bcaef701ed 100644 --- a/crates/agent_servers/Cargo.toml +++ b/crates/agent_servers/Cargo.toml @@ -61,7 +61,7 @@ nix.workspace = true client = { workspace = true, features = ["test-support"] } env_logger.workspace = true fs.workspace = true -language.workspace = true + indoc.workspace = true acp_thread = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } diff --git a/crates/agent_servers/src/acp.rs b/crates/agent_servers/src/acp.rs index c63e4fab2201671fa6448e9d58f6c925c2c91cd8..ceceb5b8ae02a0674b27e0fa18244a94f2b409de 100644 --- a/crates/agent_servers/src/acp.rs +++ b/crates/agent_servers/src/acp.rs @@ -385,7 +385,7 @@ impl AgentConnection for AcpConnection { cx.spawn(async move |cx| { let response = self.connection - .new_session(acp::NewSessionRequest::new(cwd).mcp_servers(mcp_servers)) + .new_session(acp::NewSessionRequest::new(cwd.clone()).mcp_servers(mcp_servers)) .await .map_err(map_acp_error)?; @@ -560,6 +560,7 @@ impl AgentConnection for AcpConnection { AcpThread::new( None, self.display_name.clone(), + Some(cwd), self.clone(), project, action_log, @@ -598,9 +599,10 @@ impl AgentConnection for AcpConnection { fn load_session( self: Rc, - session: AgentSessionInfo, + session_id: acp::SessionId, project: Entity, cwd: &Path, + title: Option, cx: &mut App, ) -> Task>> { if !self.agent_capabilities.load_session { @@ -612,25 +614,23 @@ impl AgentConnection for AcpConnection { let cwd = cwd.to_path_buf(); let mcp_servers = mcp_servers_for_project(&project, cx); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let title = session - .title - .clone() - .unwrap_or_else(|| self.display_name.clone()); + let title = title.unwrap_or_else(|| self.display_name.clone()); let thread: Entity = cx.new(|cx| { AcpThread::new( None, title, + Some(cwd.clone()), self.clone(), project, action_log, - session.session_id.clone(), + session_id.clone(), watch::Receiver::constant(self.agent_capabilities.prompt_capabilities.clone()), cx, ) }); self.sessions.borrow_mut().insert( - session.session_id.clone(), + session_id.clone(), AcpSession { thread: thread.downgrade(), suppress_abort_err: false, @@ -644,21 +644,20 @@ impl AgentConnection for AcpConnection { let response = match self .connection .load_session( - acp::LoadSessionRequest::new(session.session_id.clone(), cwd) - .mcp_servers(mcp_servers), + acp::LoadSessionRequest::new(session_id.clone(), cwd).mcp_servers(mcp_servers), ) .await { Ok(response) => response, Err(err) => { - self.sessions.borrow_mut().remove(&session.session_id); + self.sessions.borrow_mut().remove(&session_id); return Err(map_acp_error(err)); } }; let (modes, models, config_options) = config_state(response.modes, response.models, response.config_options); - if let Some(session) = self.sessions.borrow_mut().get_mut(&session.session_id) { + if let Some(session) = self.sessions.borrow_mut().get_mut(&session_id) { session.session_modes = modes; session.models = models; session.config_options = config_options.map(ConfigOptions::new); @@ -670,9 +669,10 @@ impl AgentConnection for AcpConnection { fn resume_session( self: Rc, - session: AgentSessionInfo, + session_id: acp::SessionId, project: Entity, cwd: &Path, + title: Option, cx: &mut App, ) -> Task>> { if self @@ -689,25 +689,23 @@ impl AgentConnection for AcpConnection { let cwd = cwd.to_path_buf(); let mcp_servers = mcp_servers_for_project(&project, cx); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let title = session - .title - .clone() - .unwrap_or_else(|| self.display_name.clone()); + let title = title.unwrap_or_else(|| self.display_name.clone()); let thread: Entity = cx.new(|cx| { AcpThread::new( None, title, + Some(cwd.clone()), self.clone(), project, action_log, - session.session_id.clone(), + session_id.clone(), watch::Receiver::constant(self.agent_capabilities.prompt_capabilities.clone()), cx, ) }); self.sessions.borrow_mut().insert( - session.session_id.clone(), + session_id.clone(), AcpSession { thread: thread.downgrade(), suppress_abort_err: false, @@ -721,21 +719,21 @@ impl AgentConnection for AcpConnection { let response = match self .connection .resume_session( - acp::ResumeSessionRequest::new(session.session_id.clone(), cwd) + acp::ResumeSessionRequest::new(session_id.clone(), cwd) .mcp_servers(mcp_servers), ) .await { Ok(response) => response, Err(err) => { - self.sessions.borrow_mut().remove(&session.session_id); + self.sessions.borrow_mut().remove(&session_id); return Err(map_acp_error(err)); } }; let (modes, models, config_options) = config_state(response.modes, response.models, response.config_options); - if let Some(session) = self.sessions.borrow_mut().get_mut(&session.session_id) { + if let Some(session) = self.sessions.borrow_mut().get_mut(&session_id) { session.session_modes = modes; session.models = models; session.config_options = config_options.map(ConfigOptions::new); diff --git a/crates/agent_servers/src/custom.rs b/crates/agent_servers/src/custom.rs index b0669d1fb69e110f0ba206a3579f16738de5e7e2..0a1830717217872868e66a8222902c49eeaabf9c 100644 --- a/crates/agent_servers/src/custom.rs +++ b/crates/agent_servers/src/custom.rs @@ -84,19 +84,12 @@ impl AgentServer for CustomAgentServer { let config_id = config_id.to_string(); let value_id = value_id.to_string(); - update_settings_file(fs, cx, move |settings, _| { + update_settings_file(fs, cx, move |settings, cx| { let settings = settings .agent_servers .get_or_insert_default() .entry(name.to_string()) - .or_insert_with(|| settings::CustomAgentServerSettings::Extension { - default_model: None, - default_mode: None, - env: Default::default(), - favorite_models: Vec::new(), - default_config_options: Default::default(), - favorite_config_option_values: Default::default(), - }); + .or_insert_with(|| default_settings_for_agent(&name, cx)); match settings { settings::CustomAgentServerSettings::Custom { @@ -132,19 +125,12 @@ impl AgentServer for CustomAgentServer { fn set_default_mode(&self, mode_id: Option, fs: Arc, cx: &mut App) { let name = self.name(); - update_settings_file(fs, cx, move |settings, _| { + update_settings_file(fs, cx, move |settings, cx| { let settings = settings .agent_servers .get_or_insert_default() .entry(name.to_string()) - .or_insert_with(|| settings::CustomAgentServerSettings::Extension { - default_model: None, - default_mode: None, - env: Default::default(), - favorite_models: Vec::new(), - default_config_options: Default::default(), - favorite_config_option_values: Default::default(), - }); + .or_insert_with(|| default_settings_for_agent(&name, cx)); match settings { settings::CustomAgentServerSettings::Custom { default_mode, .. } @@ -171,19 +157,12 @@ impl AgentServer for CustomAgentServer { fn set_default_model(&self, model_id: Option, fs: Arc, cx: &mut App) { let name = self.name(); - update_settings_file(fs, cx, move |settings, _| { + update_settings_file(fs, cx, move |settings, cx| { let settings = settings .agent_servers .get_or_insert_default() .entry(name.to_string()) - .or_insert_with(|| settings::CustomAgentServerSettings::Extension { - default_model: None, - default_mode: None, - env: Default::default(), - favorite_models: Vec::new(), - default_config_options: Default::default(), - favorite_config_option_values: Default::default(), - }); + .or_insert_with(|| default_settings_for_agent(&name, cx)); match settings { settings::CustomAgentServerSettings::Custom { default_model, .. } @@ -222,19 +201,12 @@ impl AgentServer for CustomAgentServer { cx: &App, ) { let name = self.name(); - update_settings_file(fs, cx, move |settings, _| { + update_settings_file(fs, cx, move |settings, cx| { let settings = settings .agent_servers .get_or_insert_default() .entry(name.to_string()) - .or_insert_with(|| settings::CustomAgentServerSettings::Extension { - default_model: None, - default_mode: None, - env: Default::default(), - favorite_models: Vec::new(), - default_config_options: Default::default(), - favorite_config_option_values: Default::default(), - }); + .or_insert_with(|| default_settings_for_agent(&name, cx)); let favorite_models = match settings { settings::CustomAgentServerSettings::Custom { @@ -282,19 +254,12 @@ impl AgentServer for CustomAgentServer { let name = self.name(); let config_id = config_id.to_string(); let value_id = value_id.map(|s| s.to_string()); - update_settings_file(fs, cx, move |settings, _| { + update_settings_file(fs, cx, move |settings, cx| { let settings = settings .agent_servers .get_or_insert_default() .entry(name.to_string()) - .or_insert_with(|| settings::CustomAgentServerSettings::Extension { - default_model: None, - default_mode: None, - env: Default::default(), - favorite_models: Vec::new(), - default_config_options: Default::default(), - favorite_config_option_values: Default::default(), - }); + .or_insert_with(|| default_settings_for_agent(&name, cx)); match settings { settings::CustomAgentServerSettings::Custom { @@ -332,45 +297,27 @@ impl AgentServer for CustomAgentServer { .unwrap_or_else(|| name.clone()); let default_mode = self.default_mode(cx); let default_model = self.default_model(cx); - let is_previous_built_in = - matches!(name.as_ref(), CLAUDE_AGENT_NAME | CODEX_NAME | GEMINI_NAME); - let (default_config_options, is_registry_agent) = - cx.read_global(|settings: &SettingsStore, _| { - let agent_settings = settings - .get::(None) - .get(self.name().as_ref()); - - let is_registry = agent_settings - .map(|s| { - matches!( - s, - project::agent_server_store::CustomAgentServerSettings::Registry { .. } - ) - }) - .unwrap_or(false); - - let config_options = agent_settings - .map(|s| match s { - project::agent_server_store::CustomAgentServerSettings::Custom { - default_config_options, - .. - } - | project::agent_server_store::CustomAgentServerSettings::Extension { - default_config_options, - .. - } - | project::agent_server_store::CustomAgentServerSettings::Registry { - default_config_options, - .. - } => default_config_options.clone(), - }) - .unwrap_or_default(); - - (config_options, is_registry) - }); - - // Intermediate step to allow for previous built-ins to also be triggered if they aren't in settings yet. - let is_registry_agent = is_registry_agent || is_previous_built_in; + let is_registry_agent = is_registry_agent(&name, cx); + let default_config_options = cx.read_global(|settings: &SettingsStore, _| { + settings + .get::(None) + .get(self.name().as_ref()) + .map(|s| match s { + project::agent_server_store::CustomAgentServerSettings::Custom { + default_config_options, + .. + } + | project::agent_server_store::CustomAgentServerSettings::Extension { + default_config_options, + .. + } + | project::agent_server_store::CustomAgentServerSettings::Registry { + default_config_options, + .. + } => default_config_options.clone(), + }) + .unwrap_or_default() + }); if is_registry_agent { if let Some(registry_store) = project::AgentRegistryStore::try_global(cx) { @@ -458,3 +405,222 @@ fn api_key_for_gemini_cli(cx: &mut App) -> Task> { ) }) } + +fn is_registry_agent(name: &str, cx: &App) -> bool { + let is_previous_built_in = matches!(name, CLAUDE_AGENT_NAME | CODEX_NAME | GEMINI_NAME); + let is_in_registry = project::AgentRegistryStore::try_global(cx) + .map(|store| store.read(cx).agent(name).is_some()) + .unwrap_or(false); + let is_settings_registry = cx.read_global(|settings: &SettingsStore, _| { + settings + .get::(None) + .get(name) + .is_some_and(|s| { + matches!( + s, + project::agent_server_store::CustomAgentServerSettings::Registry { .. } + ) + }) + }); + is_previous_built_in || is_in_registry || is_settings_registry +} + +fn default_settings_for_agent(name: &str, cx: &App) -> settings::CustomAgentServerSettings { + if is_registry_agent(name, cx) { + settings::CustomAgentServerSettings::Registry { + default_model: None, + default_mode: None, + env: Default::default(), + favorite_models: Vec::new(), + default_config_options: Default::default(), + favorite_config_option_values: Default::default(), + } + } else { + settings::CustomAgentServerSettings::Extension { + default_model: None, + default_mode: None, + env: Default::default(), + favorite_models: Vec::new(), + default_config_options: Default::default(), + favorite_config_option_values: Default::default(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use collections::HashMap; + use gpui::TestAppContext; + use project::agent_registry_store::{ + AgentRegistryStore, RegistryAgent, RegistryAgentMetadata, RegistryNpxAgent, + }; + use settings::Settings as _; + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + }); + } + + fn init_registry_with_agents(cx: &mut TestAppContext, agent_ids: &[&str]) { + let agents: Vec = agent_ids + .iter() + .map(|id| { + let id = SharedString::from(id.to_string()); + RegistryAgent::Npx(RegistryNpxAgent { + metadata: RegistryAgentMetadata { + id: id.clone(), + name: id.clone(), + description: SharedString::from(""), + version: SharedString::from("1.0.0"), + repository: None, + icon_path: None, + }, + package: id, + args: Vec::new(), + env: HashMap::default(), + }) + }) + .collect(); + cx.update(|cx| { + AgentRegistryStore::init_test_global(cx, agents); + }); + } + + fn set_agent_server_settings( + cx: &mut TestAppContext, + entries: Vec<(&str, settings::CustomAgentServerSettings)>, + ) { + cx.update(|cx| { + AllAgentServersSettings::override_global( + project::agent_server_store::AllAgentServersSettings( + entries + .into_iter() + .map(|(name, settings)| (name.to_string(), settings.into())) + .collect(), + ), + cx, + ); + }); + } + + #[gpui::test] + fn test_previous_builtins_are_registry(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + assert!(is_registry_agent(CLAUDE_AGENT_NAME, cx)); + assert!(is_registry_agent(CODEX_NAME, cx)); + assert!(is_registry_agent(GEMINI_NAME, cx)); + }); + } + + #[gpui::test] + fn test_unknown_agent_is_not_registry(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + assert!(!is_registry_agent("my-custom-agent", cx)); + }); + } + + #[gpui::test] + fn test_agent_in_registry_store_is_registry(cx: &mut TestAppContext) { + init_test(cx); + init_registry_with_agents(cx, &["some-new-registry-agent"]); + cx.update(|cx| { + assert!(is_registry_agent("some-new-registry-agent", cx)); + assert!(!is_registry_agent("not-in-registry", cx)); + }); + } + + #[gpui::test] + fn test_agent_with_registry_settings_type_is_registry(cx: &mut TestAppContext) { + init_test(cx); + set_agent_server_settings( + cx, + vec![( + "agent-from-settings", + settings::CustomAgentServerSettings::Registry { + env: HashMap::default(), + default_mode: None, + default_model: None, + favorite_models: Vec::new(), + default_config_options: HashMap::default(), + favorite_config_option_values: HashMap::default(), + }, + )], + ); + cx.update(|cx| { + assert!(is_registry_agent("agent-from-settings", cx)); + }); + } + + #[gpui::test] + fn test_agent_with_extension_settings_type_is_not_registry(cx: &mut TestAppContext) { + init_test(cx); + set_agent_server_settings( + cx, + vec![( + "my-extension-agent", + settings::CustomAgentServerSettings::Extension { + env: HashMap::default(), + default_mode: None, + default_model: None, + favorite_models: Vec::new(), + default_config_options: HashMap::default(), + favorite_config_option_values: HashMap::default(), + }, + )], + ); + cx.update(|cx| { + assert!(!is_registry_agent("my-extension-agent", cx)); + }); + } + + #[gpui::test] + fn test_default_settings_for_builtin_agent(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + assert!(matches!( + default_settings_for_agent(CODEX_NAME, cx), + settings::CustomAgentServerSettings::Registry { .. } + )); + assert!(matches!( + default_settings_for_agent(CLAUDE_AGENT_NAME, cx), + settings::CustomAgentServerSettings::Registry { .. } + )); + assert!(matches!( + default_settings_for_agent(GEMINI_NAME, cx), + settings::CustomAgentServerSettings::Registry { .. } + )); + }); + } + + #[gpui::test] + fn test_default_settings_for_extension_agent(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + assert!(matches!( + default_settings_for_agent("some-extension-agent", cx), + settings::CustomAgentServerSettings::Extension { .. } + )); + }); + } + + #[gpui::test] + fn test_default_settings_for_agent_in_registry(cx: &mut TestAppContext) { + init_test(cx); + init_registry_with_agents(cx, &["new-registry-agent"]); + cx.update(|cx| { + assert!(matches!( + default_settings_for_agent("new-registry-agent", cx), + settings::CustomAgentServerSettings::Registry { .. } + )); + assert!(matches!( + default_settings_for_agent("not-in-registry", cx), + settings::CustomAgentServerSettings::Extension { .. } + )); + }); + } +} diff --git a/crates/agent_servers/src/e2e_tests.rs b/crates/agent_servers/src/e2e_tests.rs index c5754bcd7610dbf0c858058ea726a746bef37ab1..a0150d41726c94dc830be70e006f4370de919ead 100644 --- a/crates/agent_servers/src/e2e_tests.rs +++ b/crates/agent_servers/src/e2e_tests.rs @@ -2,6 +2,7 @@ use crate::{AgentServer, AgentServerDelegate}; use acp_thread::{AcpThread, AgentThreadEntry, ToolCall, ToolCallStatus}; use agent_client_protocol as acp; use futures::{FutureExt, StreamExt, channel::mpsc, select}; +use gpui::AppContext; use gpui::{Entity, TestAppContext}; use indoc::indoc; use project::{FakeFs, Project}; @@ -408,7 +409,8 @@ pub async fn init_test(cx: &mut TestAppContext) -> Arc { let http_client = reqwest_client::ReqwestClient::user_agent("agent tests").unwrap(); cx.set_http_client(Arc::new(http_client)); let client = client::Client::production(cx); - language_model::init(client, cx); + let user_store = cx.new(|cx| client::UserStore::new(client.clone(), cx)); + language_model::init(user_store, client, cx); #[cfg(test)] project::agent_server_store::AllAgentServersSettings::override_global( diff --git a/crates/agent_settings/Cargo.toml b/crates/agent_settings/Cargo.toml index 01f74de2f2ca5be863dbe27174e5131b9b8a657c..15f35a931dedad303c46895c487655b9ddbc7496 100644 --- a/crates/agent_settings/Cargo.toml +++ b/crates/agent_settings/Cargo.toml @@ -30,7 +30,7 @@ util.workspace = true [dev-dependencies] fs.workspace = true gpui = { workspace = true, features = ["test-support"] } -paths.workspace = true + serde_json_lenient.workspace = true serde_json.workspace = true settings = { workspace = true, features = ["test-support"] } diff --git a/crates/agent_ui/Cargo.toml b/crates/agent_ui/Cargo.toml index 3e46e14b53c46a2aec3ac9552246a10ffc2aeee9..8b06417d2f5812ef2e0fb265e6afa4cfeb26eb3f 100644 --- a/crates/agent_ui/Cargo.toml +++ b/crates/agent_ui/Cargo.toml @@ -121,7 +121,7 @@ acp_thread = { workspace = true, features = ["test-support"] } agent = { workspace = true, features = ["test-support"] } assistant_text_thread = { workspace = true, features = ["test-support"] } buffer_diff = { workspace = true, features = ["test-support"] } -clock.workspace = true + db = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } eval_utils.workspace = true @@ -132,11 +132,9 @@ languages = { workspace = true, features = ["test-support"] } language_model = { workspace = true, "features" = ["test-support"] } pretty_assertions.workspace = true project = { workspace = true, features = ["test-support"] } -recent_projects = { workspace = true, features = ["test-support"] } -remote_connection = { workspace = true, features = ["test-support"] } -title_bar = { workspace = true, features = ["test-support"] } + semver.workspace = true reqwest_client.workspace = true -tempfile.workspace = true + tree-sitter-md.workspace = true unindent.workspace = true diff --git a/crates/agent_ui/src/agent_configuration/tool_picker.rs b/crates/agent_ui/src/agent_configuration/tool_picker.rs index 1c99f665ab1c8fc995d47682f92365852bbc9637..be6fcb5bd2b5eeb4d33f4aaefc31cfeb4a978564 100644 --- a/crates/agent_ui/src/agent_configuration/tool_picker.rs +++ b/crates/agent_ui/src/agent_configuration/tool_picker.rs @@ -172,12 +172,7 @@ impl PickerDelegate for ToolPickerDelegate { self.selected_index = ix; } - fn can_select( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) -> bool { + fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context>) -> bool { let item = &self.filtered_items[ix]; match item { PickerItem::Tool { .. } => true, diff --git a/crates/agent_ui/src/agent_diff.rs b/crates/agent_ui/src/agent_diff.rs index 8fa68b0c510c086d7c6e224b24675e6f19344b82..13e62eb502de1d4bf454b47b216374a0abf2bc79 100644 --- a/crates/agent_ui/src/agent_diff.rs +++ b/crates/agent_ui/src/agent_diff.rs @@ -831,6 +831,7 @@ fn render_diff_hunk_controls( &snapshot, position, Direction::Next, + true, window, cx, ); @@ -866,6 +867,7 @@ fn render_diff_hunk_controls( &snapshot, point, Direction::Prev, + true, window, cx, ); diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 0f1cd3ebf0fdf1df939ccc6f2b0d1a40545bf082..c49b7f668ab12ad4d2b04e8ec48488f7afab3c1c 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -9,7 +9,7 @@ use std::{ time::Duration, }; -use acp_thread::{AcpThread, AgentSessionInfo, MentionUri}; +use acp_thread::{AcpThread, MentionUri, ThreadStatus}; use agent::{ContextServerRegistry, SharedThread, ThreadStore}; use agent_client_protocol as acp; use agent_servers::AgentServer; @@ -22,7 +22,7 @@ use project::{ use serde::{Deserialize, Serialize}; use settings::{LanguageModelProviderSetting, LanguageModelSelection}; -use feature_flags::{AgentGitWorktreesFeatureFlag, AgentV2FeatureFlag, FeatureFlagAppExt as _}; +use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt as _}; use zed_actions::agent::{OpenClaudeAgentOnboardingModal, ReauthenticateAgent, ReviewBranchDiff}; use crate::ManageProfiles; @@ -31,7 +31,7 @@ use crate::{ AddContextServer, AgentDiffPane, ConnectionView, CopyThreadToClipboard, Follow, InlineAssistant, LoadThreadFromClipboard, NewTextThread, NewThread, OpenActiveThreadAsMarkdown, OpenAgentDiff, OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell, StartThreadIn, - ToggleNavigationMenu, ToggleNewThreadMenu, ToggleOptionsMenu, + ToggleNavigationMenu, ToggleNewThreadMenu, ToggleOptionsMenu, ToggleStartThreadInSelector, agent_configuration::{AgentConfiguration, AssistantConfigurationEvent}, connection_view::{AcpThreadViewEvent, ThreadView}, slash_command::SlashCommandCompletionProvider, @@ -39,7 +39,8 @@ use crate::{ ui::EndTrialUpsell, }; use crate::{ - AgentInitialContent, ExternalAgent, NewExternalAgentThread, NewNativeAgentThreadFromSummary, + AgentInitialContent, ExternalAgent, ExternalSourcePrompt, NewExternalAgentThread, + NewNativeAgentThreadFromSummary, }; use crate::{ ExpandMessageEditor, ThreadHistory, ThreadHistoryEvent, @@ -52,6 +53,7 @@ use assistant_slash_command::SlashCommandWorkingSet; use assistant_text_thread::{TextThread, TextThreadEvent, TextThreadSummary}; use client::UserStore; use cloud_api_types::Plan; +use collections::HashMap; use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer}; use extension::ExtensionEvents; use extension_host::ExtensionStore; @@ -72,8 +74,9 @@ use search::{BufferSearchBar, buffer_search}; use settings::{Settings, update_settings_file}; use theme::ThemeSettings; use ui::{ - Button, Callout, ContextMenu, ContextMenuEntry, DocumentationSide, KeyBinding, PopoverMenu, - PopoverMenuHandle, SpinnerLabel, Tab, Tooltip, prelude::*, utils::WithRemSize, + Button, ButtonLike, Callout, ContextMenu, ContextMenuEntry, DocumentationSide, KeyBinding, + PopoverMenu, PopoverMenuHandle, SpinnerLabel, Tab, TintColor, Tooltip, prelude::*, + utils::WithRemSize, }; use util::ResultExt as _; use workspace::{ @@ -189,7 +192,16 @@ pub fn init(cx: &mut App) { if let Some(panel) = workspace.panel::(cx) { workspace.focus_panel::(window, cx); panel.update(cx, |panel, cx| { - panel.external_thread(action.agent.clone(), None, None, window, cx) + panel.external_thread( + action.agent.clone(), + None, + None, + None, + None, + true, + window, + cx, + ) }); } }) @@ -207,7 +219,7 @@ pub fn init(cx: &mut App) { .register_action(|workspace, _: &OpenAgentDiff, window, cx| { let thread = workspace .panel::(cx) - .and_then(|panel| panel.read(cx).active_thread_view().cloned()) + .and_then(|panel| panel.read(cx).active_connection_view().cloned()) .and_then(|thread_view| { thread_view .read(cx) @@ -243,6 +255,18 @@ pub fn init(cx: &mut App) { }); } }) + .register_action(|workspace, _: &ToggleStartThreadInSelector, window, cx| { + if let Some(panel) = workspace.panel::(cx) { + workspace.focus_panel::(window, cx); + panel.update(cx, |panel, cx| { + panel.toggle_start_thread_in_selector( + &ToggleStartThreadInSelector, + window, + cx, + ); + }); + } + }) .register_action(|workspace, _: &OpenAcpOnboardingModal, window, cx| { AcpOnboardingModal::toggle(workspace, window, cx) }) @@ -320,12 +344,15 @@ pub fn init(cx: &mut App) { panel.update(cx, |panel, cx| { panel.external_thread( + None, + None, None, None, Some(AgentInitialContent::ContentBlock { blocks: content_blocks, auto_submit: true, }), + true, window, cx, ); @@ -373,7 +400,7 @@ enum WhichFontSize { } // TODO unify this with ExternalAgent -#[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, PartialEq, Serialize)] pub enum AgentType { #[default] NativeAgent, @@ -383,6 +410,63 @@ pub enum AgentType { }, } +// Custom impl handles legacy variant names from before the built-in agents were moved to +// the registry: "ClaudeAgent" -> Custom { name: "claude-acp" }, "Codex" -> Custom { name: +// "codex-acp" }, "Gemini" -> Custom { name: "gemini" }. +// Can be removed at some point in the future and go back to #[derive(Deserialize)]. +impl<'de> Deserialize<'de> for AgentType { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let value = serde_json::Value::deserialize(deserializer)?; + + if let Some(s) = value.as_str() { + return match s { + "NativeAgent" => Ok(Self::NativeAgent), + "TextThread" => Ok(Self::TextThread), + "ClaudeAgent" | "ClaudeCode" => Ok(Self::Custom { + name: CLAUDE_AGENT_NAME.into(), + }), + "Codex" => Ok(Self::Custom { + name: CODEX_NAME.into(), + }), + "Gemini" => Ok(Self::Custom { + name: GEMINI_NAME.into(), + }), + other => Err(serde::de::Error::unknown_variant( + other, + &[ + "NativeAgent", + "TextThread", + "Custom", + "ClaudeAgent", + "ClaudeCode", + "Codex", + "Gemini", + ], + )), + }; + } + + if let Some(obj) = value.as_object() { + if let Some(inner) = obj.get("Custom") { + #[derive(Deserialize)] + struct CustomFields { + name: SharedString, + } + let fields: CustomFields = + serde_json::from_value(inner.clone()).map_err(serde::de::Error::custom)?; + return Ok(Self::Custom { name: fields.name }); + } + } + + Err(serde::de::Error::custom( + "expected a string variant or {\"Custom\": {\"name\": ...}}", + )) + } +} + impl AgentType { pub fn is_native(&self) -> bool { matches!(self, Self::NativeAgent) @@ -415,17 +499,10 @@ impl From for AgentType { impl StartThreadIn { fn label(&self) -> SharedString { match self { - Self::LocalProject => "Local Project".into(), + Self::LocalProject => "Current Project".into(), Self::NewWorktree => "New Worktree".into(), } } - - fn icon(&self) -> IconName { - match self { - Self::LocalProject => IconName::Screen, - Self::NewWorktree => IconName::GitBranchPlus, - } - } } #[derive(Clone, Debug)] @@ -553,7 +630,7 @@ pub struct AgentPanel { focus_handle: FocusHandle, active_view: ActiveView, previous_view: Option, - _active_view_observation: Option, + background_threads: HashMap>, new_thread_menu_handle: PopoverMenuHandle, start_thread_in_menu_handle: PopoverMenuHandle, agent_panel_menu_handle: PopoverMenuHandle, @@ -569,10 +646,12 @@ pub struct AgentPanel { start_thread_in: StartThreadIn, worktree_creation_status: Option, _thread_view_subscription: Option, + _active_thread_focus_subscription: Option, _worktree_creation_task: Option>, show_trust_workspace_message: bool, last_configuration_error_telemetry: Option, on_boarding_upsell_dismissed: AtomicBool, + _active_view_observation: Option, } impl AgentPanel { @@ -695,7 +774,7 @@ impl AgentPanel { } if let Some(start_thread_in) = serialized_panel.start_thread_in { let is_worktree_flag_enabled = - cx.has_flag::(); + cx.has_flag::(); let is_valid = match &start_thread_in { StartThreadIn::LocalProject => true, StartThreadIn::NewWorktree => { @@ -718,16 +797,9 @@ impl AgentPanel { if let Some(thread_info) = last_active_thread { let agent_type = thread_info.agent_type.clone(); - let session_info = AgentSessionInfo { - session_id: acp::SessionId::new(thread_info.session_id), - cwd: thread_info.cwd, - title: thread_info.title.map(SharedString::from), - updated_at: None, - meta: None, - }; panel.update(cx, |panel, cx| { panel.selected_agent = agent_type; - panel.load_agent_thread(session_info, window, cx); + panel.load_agent_thread_inner(thread_info.session_id.into(), thread_info.cwd, thread_info.title.map(SharedString::from), false, window, cx); }); } panel @@ -764,7 +836,13 @@ impl AgentPanel { window, |this, _, event, window, cx| match event { ThreadHistoryEvent::Open(thread) => { - this.load_agent_thread(thread.clone(), window, cx); + this.load_agent_thread( + thread.session_id.clone(), + thread.cwd.clone(), + thread.title.clone(), + window, + cx, + ); } }, ) @@ -877,7 +955,7 @@ impl AgentPanel { focus_handle: cx.focus_handle(), context_server_registry, previous_view: None, - _active_view_observation: None, + background_threads: HashMap::default(), new_thread_menu_handle: PopoverMenuHandle::default(), start_thread_in_menu_handle: PopoverMenuHandle::default(), agent_panel_menu_handle: PopoverMenuHandle::default(), @@ -896,10 +974,12 @@ impl AgentPanel { start_thread_in: StartThreadIn::default(), worktree_creation_status: None, _thread_view_subscription: None, + _active_thread_focus_subscription: None, _worktree_creation_task: None, show_trust_workspace_message: false, last_configuration_error_telemetry: None, on_boarding_upsell_dismissed: AtomicBool::new(OnboardingUpsell::dismissed()), + _active_view_observation: None, }; // Initial sync of agent servers from extensions @@ -951,14 +1031,19 @@ impl AgentPanel { pub fn open_thread( &mut self, - thread: AgentSessionInfo, + session_id: acp::SessionId, + cwd: Option, + title: Option, window: &mut Window, cx: &mut Context, ) { self.external_thread( Some(crate::ExternalAgent::NativeAgent), - Some(thread), + Some(session_id), + cwd, + title, None, + true, window, cx, ); @@ -985,7 +1070,7 @@ impl AgentPanel { .unwrap_or(false) } - pub(crate) fn active_thread_view(&self) -> Option<&Entity> { + pub fn active_connection_view(&self) -> Option<&Entity> { match &self.active_view { ActiveView::AgentThread { server_view, .. } => Some(server_view), ActiveView::Uninitialized @@ -995,7 +1080,7 @@ impl AgentPanel { } } - fn new_thread(&mut self, _action: &NewThread, window: &mut Window, cx: &mut Context) { + pub fn new_thread(&mut self, _action: &NewThread, window: &mut Window, cx: &mut Context) { self.new_agent_thread(AgentType::NativeAgent, window, cx); } @@ -1016,7 +1101,13 @@ impl AgentPanel { self.external_thread( Some(ExternalAgent::NativeAgent), None, - Some(AgentInitialContent::ThreadSummary(thread)), + None, + None, + Some(AgentInitialContent::ThreadSummary { + session_id: thread.session_id, + title: thread.title, + }), + true, window, cx, ); @@ -1068,8 +1159,11 @@ impl AgentPanel { fn external_thread( &mut self, agent_choice: Option, - resume_thread: Option, + resume_session_id: Option, + cwd: Option, + title: Option, initial_content: Option, + focus: bool, window: &mut Window, cx: &mut Context, ) { @@ -1087,62 +1181,75 @@ impl AgentPanel { let thread_store = self.thread_store.clone(); - cx.spawn_in(window, async move |this, cx| { - let ext_agent = match agent_choice { - Some(agent) => { - cx.background_spawn({ - let agent = agent.clone(); - async move { - if let Some(serialized) = - serde_json::to_string(&LastUsedExternalAgent { agent }).log_err() - { - KEY_VALUE_STORE - .write_kvp(LAST_USED_EXTERNAL_AGENT_KEY.to_string(), serialized) - .await - .log_err(); - } - } - }) - .detach(); - - agent - } - None => { - if is_via_collab { - ExternalAgent::NativeAgent - } else { - cx.background_spawn(async move { - KEY_VALUE_STORE.read_kvp(LAST_USED_EXTERNAL_AGENT_KEY) - }) - .await - .log_err() - .flatten() - .and_then(|value| { - serde_json::from_str::(&value).log_err() - }) - .map(|agent| agent.agent) - .unwrap_or(ExternalAgent::NativeAgent) + if let Some(agent) = agent_choice { + cx.background_spawn({ + let agent = agent.clone(); + async move { + if let Some(serialized) = + serde_json::to_string(&LastUsedExternalAgent { agent }).log_err() + { + KEY_VALUE_STORE + .write_kvp(LAST_USED_EXTERNAL_AGENT_KEY.to_string(), serialized) + .await + .log_err(); } } - }; + }) + .detach(); - let server = ext_agent.server(fs, thread_store); - this.update_in(cx, |agent_panel, window, cx| { - agent_panel.create_external_thread( - server, - resume_thread, - initial_content, - workspace, - project, - ext_agent, - window, - cx, - ); - })?; + let server = agent.server(fs, thread_store); + self.create_external_thread( + server, + resume_session_id, + cwd, + title, + initial_content, + workspace, + project, + agent, + focus, + window, + cx, + ); + } else { + cx.spawn_in(window, async move |this, cx| { + let ext_agent = if is_via_collab { + ExternalAgent::NativeAgent + } else { + cx.background_spawn(async move { + KEY_VALUE_STORE.read_kvp(LAST_USED_EXTERNAL_AGENT_KEY) + }) + .await + .log_err() + .flatten() + .and_then(|value| { + serde_json::from_str::(&value).log_err() + }) + .map(|agent| agent.agent) + .unwrap_or(ExternalAgent::NativeAgent) + }; - anyhow::Ok(()) - }) - .detach_and_log_err(cx); + let server = ext_agent.server(fs, thread_store); + this.update_in(cx, |agent_panel, window, cx| { + agent_panel.create_external_thread( + server, + resume_session_id, + cwd, + title, + initial_content, + workspace, + project, + ext_agent, + focus, + window, + cx, + ); + })?; + + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } } fn deploy_rules_library( @@ -1170,7 +1277,7 @@ impl AgentPanel { } fn expand_message_editor(&mut self, window: &mut Window, cx: &mut Context) { - let Some(thread_view) = self.active_thread_view() else { + let Some(thread_view) = self.active_connection_view() else { return; }; @@ -1309,6 +1416,15 @@ impl AgentPanel { self.new_thread_menu_handle.toggle(window, cx); } + pub fn toggle_start_thread_in_selector( + &mut self, + _: &ToggleStartThreadInSelector, + window: &mut Window, + cx: &mut Context, + ) { + self.start_thread_in_menu_handle.toggle(window, cx); + } + pub fn increase_font_size( &mut self, action: &IncreaseBufferFontSize, @@ -1429,7 +1545,7 @@ impl AgentPanel { cx: &mut Context, ) { if let Some(workspace) = self.workspace.upgrade() - && let Some(thread_view) = self.active_thread_view() + && let Some(thread_view) = self.active_connection_view() && let Some(active_thread) = thread_view.read(cx).active_thread().cloned() { active_thread.update(cx, |thread, cx| { @@ -1549,16 +1665,8 @@ impl AgentPanel { }) .await?; - let thread_metadata = acp_thread::AgentSessionInfo { - session_id, - cwd: None, - title: Some(title), - updated_at: Some(chrono::Utc::now()), - meta: None, - }; - this.update_in(cx, |this, window, cx| { - this.open_thread(thread_metadata, window, cx); + this.open_thread(session_id, None, Some(title), window, cx); })?; this.update_in(cx, |_, _window, cx| { @@ -1650,6 +1758,53 @@ impl AgentPanel { } } + /// Returns the primary thread views for all retained connections: the + pub fn is_background_thread(&self, session_id: &acp::SessionId) -> bool { + self.background_threads.contains_key(session_id) + } + + /// active thread plus any background threads that are still running or + /// completed but unseen. + pub fn parent_threads(&self, cx: &App) -> Vec> { + let mut views = Vec::new(); + + if let Some(server_view) = self.as_active_server_view() { + if let Some(thread_view) = server_view.read(cx).parent_thread(cx) { + views.push(thread_view); + } + } + + for server_view in self.background_threads.values() { + if let Some(thread_view) = server_view.read(cx).parent_thread(cx) { + views.push(thread_view); + } + } + + views + } + + fn retain_running_thread(&mut self, old_view: ActiveView, cx: &mut Context) { + let ActiveView::AgentThread { server_view } = old_view else { + return; + }; + + let Some(thread_view) = server_view.read(cx).parent_thread(cx) else { + return; + }; + + let thread = &thread_view.read(cx).thread; + let (status, session_id) = { + let thread = thread.read(cx); + (thread.status(), thread.session_id().clone()) + }; + + if status != ThreadStatus::Generating { + return; + } + + self.background_threads.insert(session_id, server_view); + } + pub(crate) fn active_native_agent_thread(&self, cx: &App) -> Option> { match &self.active_view { ActiveView::AgentThread { server_view, .. } => { @@ -1688,18 +1843,21 @@ impl AgentPanel { let current_is_config = matches!(self.active_view, ActiveView::Configuration); let new_is_config = matches!(new_view, ActiveView::Configuration); - let current_is_special = current_is_history || current_is_config; - let new_is_special = new_is_history || new_is_config; + let current_is_overlay = current_is_history || current_is_config; + let new_is_overlay = new_is_history || new_is_config; - if current_is_uninitialized || (current_is_special && !new_is_special) { + if current_is_uninitialized || (current_is_overlay && !new_is_overlay) { self.active_view = new_view; - } else if !current_is_special && new_is_special { + } else if !current_is_overlay && new_is_overlay { self.previous_view = Some(std::mem::replace(&mut self.active_view, new_view)); } else { - if !new_is_special { - self.previous_view = None; + let old_view = std::mem::replace(&mut self.active_view, new_view); + if !new_is_overlay { + if let Some(previous) = self.previous_view.take() { + self.retain_running_thread(previous, cx); + } } - self.active_view = new_view; + self.retain_running_thread(old_view, cx); } // Subscribe to the active ThreadView's events (e.g. FirstSendRequested) @@ -1710,6 +1868,12 @@ impl AgentPanel { ActiveView::AgentThread { server_view } => { self._thread_view_subscription = Self::subscribe_to_active_thread_view(server_view, window, cx); + let focus_handle = server_view.focus_handle(cx); + self._active_thread_focus_subscription = + Some(cx.on_focus_in(&focus_handle, window, |_this, _window, cx| { + cx.emit(AgentPanelEvent::ThreadFocused); + cx.notify(); + })); Some( cx.observe_in(server_view, window, |this, server_view, window, cx| { this._thread_view_subscription = @@ -1722,6 +1886,7 @@ impl AgentPanel { } _ => { self._thread_view_subscription = None; + self._active_thread_focus_subscription = None; None } }; @@ -1783,7 +1948,13 @@ impl AgentPanel { let entry = entry.clone(); panel .update(cx, move |this, cx| { - this.load_agent_thread(entry.clone(), window, cx); + this.load_agent_thread( + entry.session_id.clone(), + entry.cwd.clone(), + entry.title.clone(), + window, + cx, + ); }) .ok(); } @@ -1860,9 +2031,7 @@ impl AgentPanel { } fn set_start_thread_in(&mut self, action: &StartThreadIn, cx: &mut Context) { - if matches!(action, StartThreadIn::NewWorktree) - && !cx.has_flag::() - { + if matches!(action, StartThreadIn::NewWorktree) && !cx.has_flag::() { return; } @@ -1920,19 +2089,19 @@ impl AgentPanel { } } - pub fn new_external_thread_with_text( + pub fn new_agent_thread_with_external_source_prompt( &mut self, - initial_text: Option, + external_source_prompt: Option, window: &mut Window, cx: &mut Context, ) { self.external_thread( None, None, - initial_text.map(|text| AgentInitialContent::ContentBlock { - blocks: vec![acp::ContentBlock::Text(acp::TextContent::new(text))], - auto_submit: false, - }), + None, + None, + external_source_prompt.map(AgentInitialContent::from), + true, window, cx, ); @@ -1943,6 +2112,16 @@ impl AgentPanel { agent: AgentType, window: &mut Window, cx: &mut Context, + ) { + self.new_agent_thread_inner(agent, true, window, cx); + } + + fn new_agent_thread_inner( + &mut self, + agent: AgentType, + focus: bool, + window: &mut Window, + cx: &mut Context, ) { match agent { AgentType::TextThread => { @@ -1952,6 +2131,9 @@ impl AgentPanel { Some(crate::ExternalAgent::NativeAgent), None, None, + None, + None, + focus, window, cx, ), @@ -1959,6 +2141,9 @@ impl AgentPanel { Some(crate::ExternalAgent::Custom { name }), None, None, + None, + None, + focus, window, cx, ), @@ -1967,24 +2152,80 @@ impl AgentPanel { pub fn load_agent_thread( &mut self, - thread: AgentSessionInfo, + session_id: acp::SessionId, + cwd: Option, + title: Option, + window: &mut Window, + cx: &mut Context, + ) { + self.load_agent_thread_inner(session_id, cwd, title, true, window, cx); + } + + fn load_agent_thread_inner( + &mut self, + session_id: acp::SessionId, + cwd: Option, + title: Option, + focus: bool, window: &mut Window, cx: &mut Context, ) { + if let Some(server_view) = self.background_threads.remove(&session_id) { + self.set_active_view(ActiveView::AgentThread { server_view }, focus, window, cx); + return; + } + + if let ActiveView::AgentThread { server_view } = &self.active_view { + if server_view + .read(cx) + .active_thread() + .map(|t| t.read(cx).id.clone()) + == Some(session_id.clone()) + { + cx.emit(AgentPanelEvent::ActiveViewChanged); + return; + } + } + + if let Some(ActiveView::AgentThread { server_view }) = &self.previous_view { + if server_view + .read(cx) + .active_thread() + .map(|t| t.read(cx).id.clone()) + == Some(session_id.clone()) + { + let view = self.previous_view.take().unwrap(); + self.set_active_view(view, focus, window, cx); + return; + } + } + let Some(agent) = self.selected_external_agent() else { return; }; - self.external_thread(Some(agent), Some(thread), None, window, cx); + self.external_thread( + Some(agent), + Some(session_id), + cwd, + title, + None, + focus, + window, + cx, + ); } pub(crate) fn create_external_thread( &mut self, server: Rc, - resume_thread: Option, + resume_session_id: Option, + cwd: Option, + title: Option, initial_content: Option, workspace: WeakEntity, project: Entity, ext_agent: ExternalAgent, + focus: bool, window: &mut Window, cx: &mut Context, ) { @@ -2002,7 +2243,9 @@ impl AgentPanel { let server_view = cx.new(|cx| { crate::ConnectionView::new( server, - resume_thread, + resume_session_id, + cwd, + title, initial_content, workspace.clone(), project, @@ -2014,7 +2257,21 @@ impl AgentPanel { ) }); - self.set_active_view(ActiveView::AgentThread { server_view }, true, window, cx); + cx.observe(&server_view, |this, server_view, cx| { + let is_active = this + .as_active_server_view() + .is_some_and(|active| active.entity_id() == server_view.entity_id()); + if is_active { + cx.emit(AgentPanelEvent::ActiveViewChanged); + this.serialize(cx); + } else { + cx.emit(AgentPanelEvent::BackgroundThreadChanged); + } + cx.notify(); + }) + .detach(); + + self.set_active_view(ActiveView::AgentThread { server_view }, focus, window, cx); } fn active_thread_has_messages(&self, cx: &App) -> bool { @@ -2499,7 +2756,16 @@ impl AgentPanel { workspace.focus_panel::(window, cx); if let Some(panel) = workspace.panel::(cx) { panel.update(cx, |panel, cx| { - panel.external_thread(None, None, Some(initial_content), window, cx); + panel.external_thread( + None, + None, + None, + None, + Some(initial_content), + true, + window, + cx, + ); }); } }); @@ -2547,6 +2813,8 @@ fn agent_panel_dock_position(cx: &App) -> DockPosition { pub enum AgentPanelEvent { ActiveViewChanged, + ThreadFocused, + BackgroundThreadChanged, } impl EventEmitter for AgentPanel {} @@ -2606,7 +2874,7 @@ impl Panel for AgentPanel { ) { let selected_agent = self.selected_agent.clone(); - self.new_agent_thread(selected_agent, window, cx); + self.new_agent_thread_inner(selected_agent, false, window, cx); } } @@ -2989,6 +3257,7 @@ impl AgentPanel { } fn render_start_thread_in_selector(&self, cx: &mut Context) -> impl IntoElement { + let focus_handle = self.focus_handle(cx); let has_git_repo = self.project_has_git_repository(cx); let is_via_collab = self.project.read(cx).is_via_collab(); @@ -3007,12 +3276,11 @@ impl AgentPanel { }; let trigger_button = Button::new("thread-target-trigger", trigger_label) - .label_size(LabelSize::Small) - .color(Color::Muted) .icon(icon) .icon_size(IconSize::XSmall) .icon_position(IconPosition::End) .icon_color(Color::Muted) + .selected_style(ButtonStyle::Tinted(TintColor::Accent)) .disabled(is_creating); let dock_position = AgentSettings::get_global(cx).dock; @@ -3024,22 +3292,26 @@ impl AgentPanel { }; PopoverMenu::new("thread-target-selector") - .trigger(trigger_button) - .anchor(gpui::Corner::BottomRight) - .with_handle(self.start_thread_in_menu_handle.clone()) + .trigger_with_tooltip(trigger_button, { + move |_window, cx| { + Tooltip::for_action_in( + "Start Thread In…", + &ToggleStartThreadInSelector, + &focus_handle, + cx, + ) + } + }) .menu(move |window, cx| { - let current_target = current_target; - Some(ContextMenu::build(window, cx, move |menu, _window, _cx| { - let is_local_selected = current_target == StartThreadIn::LocalProject; - let is_new_worktree_selected = current_target == StartThreadIn::NewWorktree; + let is_local_selected = current_target == StartThreadIn::LocalProject; + let is_new_worktree_selected = current_target == StartThreadIn::NewWorktree; + Some(ContextMenu::build(window, cx, move |menu, _window, _cx| { let new_worktree_disabled = !has_git_repo || is_via_collab; menu.header("Start Thread In…") .item( - ContextMenuEntry::new("Local Project") - .icon(StartThreadIn::LocalProject.icon()) - .icon_color(Color::Muted) + ContextMenuEntry::new("Current Project") .toggleable(IconPosition::End, is_local_selected) .handler(|window, cx| { window @@ -3048,8 +3320,6 @@ impl AgentPanel { ) .item({ let entry = ContextMenuEntry::new("New Worktree") - .icon(StartThreadIn::NewWorktree.icon()) - .icon_color(Color::Muted) .toggleable(IconPosition::End, is_new_worktree_selected) .disabled(new_worktree_disabled) .handler(|window, cx| { @@ -3075,6 +3345,12 @@ impl AgentPanel { }) })) }) + .with_handle(self.start_thread_in_menu_handle.clone()) + .anchor(Corner::TopLeft) + .offset(gpui::Point { + x: px(1.0), + y: px(1.0), + }) } fn render_toolbar(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement { @@ -3102,77 +3378,179 @@ impl AgentPanel { | ActiveView::Configuration => None, }; - let new_thread_menu = PopoverMenu::new("new_thread_menu") - .trigger_with_tooltip( - IconButton::new("new_thread_menu_btn", IconName::Plus).icon_size(IconSize::Small), - { - let focus_handle = focus_handle.clone(); - move |_window, cx| { - Tooltip::for_action_in( - "New Thread…", - &ToggleNewThreadMenu, - &focus_handle, - cx, + let new_thread_menu_builder: Rc< + dyn Fn(&mut Window, &mut App) -> Option>, + > = { + let selected_agent = self.selected_agent.clone(); + let is_agent_selected = move |agent_type: AgentType| selected_agent == agent_type; + + let workspace = self.workspace.clone(); + let is_via_collab = workspace + .update(cx, |workspace, cx| { + workspace.project().read(cx).is_via_collab() + }) + .unwrap_or_default(); + + let focus_handle = focus_handle.clone(); + let agent_server_store = agent_server_store; + + Rc::new(move |window, cx| { + telemetry::event!("New Thread Clicked"); + + let active_thread = active_thread.clone(); + Some(ContextMenu::build(window, cx, |menu, _window, cx| { + menu.context(focus_handle.clone()) + .when_some(active_thread, |this, active_thread| { + let thread = active_thread.read(cx); + + if !thread.is_empty() { + let session_id = thread.id().clone(); + this.item( + ContextMenuEntry::new("New From Summary") + .icon(IconName::ThreadFromSummary) + .icon_color(Color::Muted) + .handler(move |window, cx| { + window.dispatch_action( + Box::new(NewNativeAgentThreadFromSummary { + from_session_id: session_id.clone(), + }), + cx, + ); + }), + ) + } else { + this + } + }) + .item( + ContextMenuEntry::new("Zed Agent") + .when( + is_agent_selected(AgentType::NativeAgent) + | is_agent_selected(AgentType::TextThread), + |this| { + this.action(Box::new(NewExternalAgentThread { + agent: None, + })) + }, + ) + .icon(IconName::ZedAgent) + .icon_color(Color::Muted) + .handler({ + let workspace = workspace.clone(); + move |window, cx| { + if let Some(workspace) = workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + if let Some(panel) = + workspace.panel::(cx) + { + panel.update(cx, |panel, cx| { + panel.new_agent_thread( + AgentType::NativeAgent, + window, + cx, + ); + }); + } + }); + } + } + }), ) - } - }, - ) - .anchor(Corner::TopRight) - .with_handle(self.new_thread_menu_handle.clone()) - .menu({ - let selected_agent = self.selected_agent.clone(); - let is_agent_selected = move |agent_type: AgentType| selected_agent == agent_type; + .item( + ContextMenuEntry::new("Text Thread") + .action(NewTextThread.boxed_clone()) + .icon(IconName::TextThread) + .icon_color(Color::Muted) + .handler({ + let workspace = workspace.clone(); + move |window, cx| { + if let Some(workspace) = workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + if let Some(panel) = + workspace.panel::(cx) + { + panel.update(cx, |panel, cx| { + panel.new_agent_thread( + AgentType::TextThread, + window, + cx, + ); + }); + } + }); + } + } + }), + ) + .separator() + .header("External Agents") + .map(|mut menu| { + let agent_server_store = agent_server_store.read(cx); + let registry_store = + project::AgentRegistryStore::try_global(cx); + let registry_store_ref = + registry_store.as_ref().map(|s| s.read(cx)); + + struct AgentMenuItem { + id: ExternalAgentServerName, + display_name: SharedString, + } - let workspace = self.workspace.clone(); - let is_via_collab = workspace - .update(cx, |workspace, cx| { - workspace.project().read(cx).is_via_collab() - }) - .unwrap_or_default(); + let agent_items = agent_server_store + .external_agents() + .map(|name| { + let display_name = agent_server_store + .agent_display_name(name) + .or_else(|| { + registry_store_ref + .as_ref() + .and_then(|store| store.agent(name.0.as_ref())) + .map(|a| a.name().clone()) + }) + .unwrap_or_else(|| name.0.clone()); + AgentMenuItem { + id: name.clone(), + display_name, + } + }) + .sorted_unstable_by_key(|e| e.display_name.to_lowercase()) + .collect::>(); - move |window, cx| { - telemetry::event!("New Thread Clicked"); - - let active_thread = active_thread.clone(); - Some(ContextMenu::build(window, cx, |menu, _window, cx| { - menu.context(focus_handle.clone()) - .when_some(active_thread, |this, active_thread| { - let thread = active_thread.read(cx); - - if !thread.is_empty() { - let session_id = thread.id().clone(); - this.item( - ContextMenuEntry::new("New From Summary") - .icon(IconName::ThreadFromSummary) - .icon_color(Color::Muted) - .handler(move |window, cx| { - window.dispatch_action( - Box::new(NewNativeAgentThreadFromSummary { - from_session_id: session_id.clone(), - }), - cx, - ); - }), - ) + for item in &agent_items { + let mut entry = + ContextMenuEntry::new(item.display_name.clone()); + + let icon_path = agent_server_store + .agent_icon(&item.id) + .or_else(|| { + registry_store_ref + .as_ref() + .and_then(|store| store.agent(item.id.0.as_str())) + .and_then(|a| a.icon_path().cloned()) + }); + + if let Some(icon_path) = icon_path { + entry = entry.custom_icon_svg(icon_path); } else { - this + entry = entry.icon(IconName::Sparkle); } - }) - .item( - ContextMenuEntry::new("Zed Agent") + + entry = entry .when( - is_agent_selected(AgentType::NativeAgent) - | is_agent_selected(AgentType::TextThread), + is_agent_selected(AgentType::Custom { + name: item.id.0.clone(), + }), |this| { - this.action(Box::new(NewExternalAgentThread { - agent: None, - })) + this.action(Box::new( + NewExternalAgentThread { agent: None }, + )) }, ) - .icon(IconName::ZedAgent) .icon_color(Color::Muted) + .disabled(is_via_collab) .handler({ let workspace = workspace.clone(); + let agent_id = item.id.clone(); move |window, cx| { if let Some(workspace) = workspace.upgrade() { workspace.update(cx, |workspace, cx| { @@ -3181,7 +3559,9 @@ impl AgentPanel { { panel.update(cx, |panel, cx| { panel.new_agent_thread( - AgentType::NativeAgent, + AgentType::Custom { + name: agent_id.0.clone(), + }, window, cx, ); @@ -3190,16 +3570,84 @@ impl AgentPanel { }); } } - }), - ) - .item( - ContextMenuEntry::new("Text Thread") - .action(NewTextThread.boxed_clone()) - .icon(IconName::TextThread) + }); + + menu = menu.item(entry); + } + + menu + }) + .separator() + .map(|mut menu| { + let agent_server_store = agent_server_store.read(cx); + let registry_store = + project::AgentRegistryStore::try_global(cx); + let registry_store_ref = + registry_store.as_ref().map(|s| s.read(cx)); + + let previous_built_in_ids: &[ExternalAgentServerName] = + &[CLAUDE_AGENT_NAME.into(), CODEX_NAME.into(), GEMINI_NAME.into()]; + + let promoted_items = previous_built_in_ids + .iter() + .filter(|id| { + !agent_server_store.external_agents.contains_key(*id) + }) + .filter_map(|name| { + let display_name = registry_store_ref + .as_ref() + .and_then(|store| store.agent(name.0.as_ref())) + .map(|a| a.name().clone())?; + Some((name.clone(), display_name)) + }) + .sorted_unstable_by_key(|(_, display_name)| display_name.to_lowercase()) + .collect::>(); + + for (agent_id, display_name) in &promoted_items { + let mut entry = + ContextMenuEntry::new(display_name.clone()); + + let icon_path = registry_store_ref + .as_ref() + .and_then(|store| store.agent(agent_id.0.as_str())) + .and_then(|a| a.icon_path().cloned()); + + if let Some(icon_path) = icon_path { + entry = entry.custom_icon_svg(icon_path); + } else { + entry = entry.icon(IconName::Sparkle); + } + + entry = entry .icon_color(Color::Muted) + .disabled(is_via_collab) .handler({ let workspace = workspace.clone(); + let agent_id = agent_id.clone(); move |window, cx| { + let fs = ::global(cx); + let agent_id_string = + agent_id.to_string(); + settings::update_settings_file( + fs, + cx, + move |settings, _| { + let agent_servers = settings + .agent_servers + .get_or_insert_default(); + agent_servers.entry(agent_id_string).or_insert_with(|| { + settings::CustomAgentServerSettings::Registry { + default_mode: None, + default_model: None, + env: Default::default(), + favorite_models: Vec::new(), + default_config_options: Default::default(), + favorite_config_option_values: Default::default(), + } + }); + }, + ); + if let Some(workspace) = workspace.upgrade() { workspace.update(cx, |workspace, cx| { if let Some(panel) = @@ -3207,7 +3655,9 @@ impl AgentPanel { { panel.update(cx, |panel, cx| { panel.new_agent_thread( - AgentType::TextThread, + AgentType::Custom { + name: agent_id.0.clone(), + }, window, cx, ); @@ -3216,223 +3666,39 @@ impl AgentPanel { }); } } - }), - ) - .separator() - .header("External Agents") - .map(|mut menu| { - let agent_server_store = agent_server_store.read(cx); - let registry_store = - project::AgentRegistryStore::try_global(cx); - let registry_store_ref = - registry_store.as_ref().map(|s| s.read(cx)); - - struct AgentMenuItem { - id: ExternalAgentServerName, - display_name: SharedString, - } - - let agent_items = agent_server_store - .external_agents() - .map(|name| { - let display_name = agent_server_store - .agent_display_name(name) - .or_else(|| { - registry_store_ref - .as_ref() - .and_then(|store| store.agent(name.0.as_ref())) - .map(|a| a.name().clone()) - }) - .unwrap_or_else(|| name.0.clone()); - AgentMenuItem { - id: name.clone(), - display_name, - } - }) - .sorted_unstable_by_key(|e| e.display_name.to_lowercase()) - .collect::>(); - - for item in &agent_items { - let mut entry = - ContextMenuEntry::new(item.display_name.clone()); - - let icon_path = agent_server_store - .agent_icon(&item.id) - .or_else(|| { - registry_store_ref - .as_ref() - .and_then(|store| store.agent(item.id.0.as_str())) - .and_then(|a| a.icon_path().cloned()) - }); + }); - if let Some(icon_path) = icon_path { - entry = entry.custom_icon_svg(icon_path); - } else { - entry = entry.icon(IconName::Sparkle); - } + menu = menu.item(entry); + } - entry = entry - .when( - is_agent_selected(AgentType::Custom { - name: item.id.0.clone(), - }), - |this| { - this.action(Box::new( - NewExternalAgentThread { agent: None }, - )) - }, + menu + }) + .item( + ContextMenuEntry::new("Add More Agents") + .icon(IconName::Plus) + .icon_color(Color::Muted) + .handler({ + move |window, cx| { + window.dispatch_action( + Box::new(zed_actions::AcpRegistry), + cx, ) - .icon_color(Color::Muted) - .disabled(is_via_collab) - .handler({ - let workspace = workspace.clone(); - let agent_id = item.id.clone(); - move |window, cx| { - if let Some(workspace) = workspace.upgrade() { - workspace.update(cx, |workspace, cx| { - if let Some(panel) = - workspace.panel::(cx) - { - panel.update(cx, |panel, cx| { - panel.new_agent_thread( - AgentType::Custom { - name: agent_id.0.clone(), - }, - window, - cx, - ); - }); - } - }); - } - } - }); - - menu = menu.item(entry); - } - - menu - }) - .separator() - .map(|mut menu| { - let agent_server_store = agent_server_store.read(cx); - let registry_store = - project::AgentRegistryStore::try_global(cx); - let registry_store_ref = - registry_store.as_ref().map(|s| s.read(cx)); - - let previous_built_in_ids: &[ExternalAgentServerName] = - &[CLAUDE_AGENT_NAME.into(), CODEX_NAME.into(), GEMINI_NAME.into()]; - - let promoted_items = previous_built_in_ids - .iter() - .filter(|id| { - !agent_server_store.external_agents.contains_key(*id) - }) - .map(|name| { - let display_name = registry_store_ref - .as_ref() - .and_then(|store| store.agent(name.0.as_ref())) - .map(|a| a.name().clone()) - .unwrap_or_else(|| name.0.clone()); - (name.clone(), display_name) - }) - .sorted_unstable_by_key(|(_, display_name)| display_name.to_lowercase()) - .collect::>(); - - for (agent_id, display_name) in &promoted_items { - let mut entry = - ContextMenuEntry::new(display_name.clone()); - - let icon_path = registry_store_ref - .as_ref() - .and_then(|store| store.agent(agent_id.0.as_str())) - .and_then(|a| a.icon_path().cloned()); - - if let Some(icon_path) = icon_path { - entry = entry.custom_icon_svg(icon_path); - } else { - entry = entry.icon(IconName::Sparkle); } - - entry = entry - .icon_color(Color::Muted) - .disabled(is_via_collab) - .handler({ - let workspace = workspace.clone(); - let agent_id = agent_id.clone(); - move |window, cx| { - let fs = ::global(cx); - let agent_id_string = - agent_id.to_string(); - settings::update_settings_file( - fs, - cx, - move |settings, _| { - let agent_servers = settings - .agent_servers - .get_or_insert_default(); - agent_servers.entry(agent_id_string).or_insert_with(|| { - settings::CustomAgentServerSettings::Registry { - default_mode: None, - default_model: None, - env: Default::default(), - favorite_models: Vec::new(), - default_config_options: Default::default(), - favorite_config_option_values: Default::default(), - } - }); - }, - ); - - if let Some(workspace) = workspace.upgrade() { - workspace.update(cx, |workspace, cx| { - if let Some(panel) = - workspace.panel::(cx) - { - panel.update(cx, |panel, cx| { - panel.new_agent_thread( - AgentType::Custom { - name: agent_id.0.clone(), - }, - window, - cx, - ); - }); - } - }); - } - } - }); - - menu = menu.item(entry); - } - - menu - }) - .item( - ContextMenuEntry::new("Add More Agents") - .icon(IconName::Plus) - .icon_color(Color::Muted) - .handler({ - move |window, cx| { - window.dispatch_action( - Box::new(zed_actions::AcpRegistry), - cx, - ) - } - }), - ) - })) - } - }); + }), + ) + })) + }) + }; let is_thread_loading = self - .active_thread_view() + .active_connection_view() .map(|thread| thread.read(cx).is_loading()) .unwrap_or(false); let has_custom_icon = selected_agent_custom_icon.is_some(); + let selected_agent_custom_icon_for_button = selected_agent_custom_icon.clone(); + let selected_agent_builtin_icon = self.selected_agent.icon(); + let selected_agent_label_for_tooltip = selected_agent_label.clone(); let selected_agent = div() .id("selected_agent_icon") @@ -3446,7 +3712,12 @@ impl AgentPanel { }) }) .tooltip(move |_, cx| { - Tooltip::with_meta(selected_agent_label.clone(), None, "Selected Agent", cx) + Tooltip::with_meta( + selected_agent_label_for_tooltip.clone(), + None, + "Selected Agent", + cx, + ) }); let selected_agent = if is_thread_loading { @@ -3465,52 +3736,168 @@ impl AgentPanel { let show_history_menu = self.history_kind_for_selected_agent(cx).is_some(); let has_v2_flag = cx.has_flag::(); + let is_empty_state = !self.active_thread_has_messages(cx); - h_flex() - .id("agent-panel-toolbar") - .h(Tab::container_height(cx)) - .max_w_full() - .flex_none() - .justify_between() - .gap_2() - .bg(cx.theme().colors().tab_bar_background) - .border_b_1() - .border_color(cx.theme().colors().border) - .child( - h_flex() - .size_full() - .gap(DynamicSpacing::Base04.rems(cx)) - .pl(DynamicSpacing::Base04.rems(cx)) - .child(match &self.active_view { - ActiveView::History { .. } | ActiveView::Configuration => { - self.render_toolbar_back_button(cx).into_any_element() - } - _ => selected_agent.into_any_element(), - }) - .child(self.render_title_view(window, cx)), - ) - .child( - h_flex() - .flex_none() - .gap(DynamicSpacing::Base02.rems(cx)) - .pl(DynamicSpacing::Base04.rems(cx)) - .pr(DynamicSpacing::Base06.rems(cx)) - .when( - has_v2_flag - && cx.has_flag::() - && !self.active_thread_has_messages(cx), - |this| this.child(self.render_start_thread_in_selector(cx)), - ) - .child(new_thread_menu) - .when(show_history_menu, |this| { - this.child(self.render_recent_entries_menu( - IconName::MenuAltTemp, - Corner::TopRight, + let is_in_history_or_config = matches!( + &self.active_view, + ActiveView::History { .. } | ActiveView::Configuration + ); + + let use_v2_empty_toolbar = has_v2_flag && is_empty_state && !is_in_history_or_config; + + if use_v2_empty_toolbar { + let (chevron_icon, icon_color, label_color) = + if self.new_thread_menu_handle.is_deployed() { + (IconName::ChevronUp, Color::Accent, Color::Accent) + } else { + (IconName::ChevronDown, Color::Muted, Color::Default) + }; + + let agent_icon_element: AnyElement = + if let Some(icon_path) = selected_agent_custom_icon_for_button { + Icon::from_external_svg(icon_path) + .size(IconSize::Small) + .color(icon_color) + .into_any_element() + } else { + let icon_name = selected_agent_builtin_icon.unwrap_or(IconName::ZedAgent); + Icon::new(icon_name) + .size(IconSize::Small) + .color(icon_color) + .into_any_element() + }; + + let agent_selector_button = ButtonLike::new("agent-selector-trigger") + .selected_style(ButtonStyle::Tinted(TintColor::Accent)) + .child( + h_flex() + .gap_1() + .child(agent_icon_element) + .child(Label::new(selected_agent_label).color(label_color).ml_0p5()) + .child( + Icon::new(chevron_icon) + .color(icon_color) + .size(IconSize::XSmall), + ), + ); + + let agent_selector_menu = PopoverMenu::new("new_thread_menu") + .trigger_with_tooltip(agent_selector_button, { + move |_window, cx| { + Tooltip::for_action_in( + "New Thread\u{2026}", + &ToggleNewThreadMenu, + &focus_handle, cx, - )) - }) - .child(self.render_panel_options_menu(window, cx)), - ) + ) + } + }) + .menu({ + let builder = new_thread_menu_builder.clone(); + move |window, cx| builder(window, cx) + }) + .with_handle(self.new_thread_menu_handle.clone()) + .anchor(Corner::TopLeft) + .offset(gpui::Point { + x: px(1.0), + y: px(1.0), + }); + + h_flex() + .id("agent-panel-toolbar") + .h(Tab::container_height(cx)) + .max_w_full() + .flex_none() + .justify_between() + .gap_2() + .bg(cx.theme().colors().tab_bar_background) + .border_b_1() + .border_color(cx.theme().colors().border) + .child( + h_flex() + .size_full() + .gap(DynamicSpacing::Base04.rems(cx)) + .pl(DynamicSpacing::Base04.rems(cx)) + .child(agent_selector_menu) + .child(self.render_start_thread_in_selector(cx)), + ) + .child( + h_flex() + .flex_none() + .gap(DynamicSpacing::Base02.rems(cx)) + .pl(DynamicSpacing::Base04.rems(cx)) + .pr(DynamicSpacing::Base06.rems(cx)) + .when(show_history_menu, |this| { + this.child(self.render_recent_entries_menu( + IconName::MenuAltTemp, + Corner::TopRight, + cx, + )) + }) + .child(self.render_panel_options_menu(window, cx)), + ) + .into_any_element() + } else { + let new_thread_menu = PopoverMenu::new("new_thread_menu") + .trigger_with_tooltip( + IconButton::new("new_thread_menu_btn", IconName::Plus) + .icon_size(IconSize::Small), + { + move |_window, cx| { + Tooltip::for_action_in( + "New Thread\u{2026}", + &ToggleNewThreadMenu, + &focus_handle, + cx, + ) + } + }, + ) + .anchor(Corner::TopRight) + .with_handle(self.new_thread_menu_handle.clone()) + .menu(move |window, cx| new_thread_menu_builder(window, cx)); + + h_flex() + .id("agent-panel-toolbar") + .h(Tab::container_height(cx)) + .max_w_full() + .flex_none() + .justify_between() + .gap_2() + .bg(cx.theme().colors().tab_bar_background) + .border_b_1() + .border_color(cx.theme().colors().border) + .child( + h_flex() + .size_full() + .gap(DynamicSpacing::Base04.rems(cx)) + .pl(DynamicSpacing::Base04.rems(cx)) + .child(match &self.active_view { + ActiveView::History { .. } | ActiveView::Configuration => { + self.render_toolbar_back_button(cx).into_any_element() + } + _ => selected_agent.into_any_element(), + }) + .child(self.render_title_view(window, cx)), + ) + .child( + h_flex() + .flex_none() + .gap(DynamicSpacing::Base02.rems(cx)) + .pl(DynamicSpacing::Base04.rems(cx)) + .pr(DynamicSpacing::Base06.rems(cx)) + .child(new_thread_menu) + .when(show_history_menu, |this| { + this.child(self.render_recent_entries_menu( + IconName::MenuAltTemp, + Corner::TopRight, + cx, + )) + }) + .child(self.render_panel_options_menu(window, cx)), + ) + .into_any_element() + } } fn render_worktree_creation_status(&self, cx: &mut Context) -> Option { @@ -3979,12 +4366,13 @@ impl Render for AgentPanel { .on_action(cx.listener(Self::go_back)) .on_action(cx.listener(Self::toggle_navigation_menu)) .on_action(cx.listener(Self::toggle_options_menu)) + .on_action(cx.listener(Self::toggle_start_thread_in_selector)) .on_action(cx.listener(Self::increase_font_size)) .on_action(cx.listener(Self::decrease_font_size)) .on_action(cx.listener(Self::reset_font_size)) .on_action(cx.listener(Self::toggle_zoom)) .on_action(cx.listener(|this, _: &ReauthenticateAgent, window, cx| { - if let Some(thread_view) = this.active_thread_view() { + if let Some(thread_view) = this.active_connection_view() { thread_view.update(cx, |thread_view, cx| thread_view.reauthenticate(window, cx)) } })) @@ -4170,7 +4558,7 @@ impl AgentPanelDelegate for ConcreteAssistantPanelDelegate { // Wait to create a new context until the workspace is no longer // being updated. cx.defer_in(window, move |panel, window, cx| { - if let Some(thread_view) = panel.active_thread_view() { + if let Some(thread_view) = panel.active_connection_view() { thread_view.update(cx, |thread_view, cx| { thread_view.insert_selections(window, cx); }); @@ -4208,7 +4596,7 @@ impl AgentPanelDelegate for ConcreteAssistantPanelDelegate { // Wait to create a new context until the workspace is no longer // being updated. cx.defer_in(window, move |panel, window, cx| { - if let Some(thread_view) = panel.active_thread_view() { + if let Some(thread_view) = panel.active_connection_view() { thread_view.update(cx, |thread_view, cx| { thread_view.insert_terminal_text(text, window, cx); }); @@ -4237,6 +4625,15 @@ impl Dismissable for TrialEndUpsell { /// Test-only helper methods #[cfg(any(test, feature = "test-support"))] impl AgentPanel { + pub fn test_new( + workspace: &Workspace, + text_thread_store: Entity, + window: &mut Window, + cx: &mut Context, + ) -> Self { + Self::new(workspace, text_thread_store, None, window, cx) + } + /// Opens an external thread using an arbitrary AgentServer. /// /// This is a test-only helper that allows visual tests and integration tests @@ -4256,7 +4653,7 @@ impl AgentPanel { }; self.create_external_thread( - server, None, None, workspace, project, ext_agent, window, cx, + server, None, None, None, None, workspace, project, ext_agent, true, window, cx, ); } @@ -4265,7 +4662,7 @@ impl AgentPanel { /// This is a test-only accessor that exposes the private `active_thread_view()` /// method for test assertions. Not compiled into production builds. pub fn active_thread_view_for_tests(&self) -> Option<&Entity> { - self.active_thread_view() + self.active_connection_view() } /// Sets the start_thread_in value directly, bypassing validation. @@ -4328,6 +4725,8 @@ impl AgentPanel { mod tests { use super::*; use crate::connection_view::tests::{StubAgentServer, init_test}; + use crate::test_support::{active_session_id, open_thread_with_connection, send_message}; + use acp_thread::{StubAgentConnection, ThreadStatus}; use assistant_text_thread::TextThreadStore; use feature_flags::FeatureFlagAppExt; use fs::FakeFs; @@ -4448,7 +4847,7 @@ mod tests { "workspace A agent type should be restored" ); assert!( - panel.active_thread_view().is_some(), + panel.active_connection_view().is_some(), "workspace A should have its active thread restored" ); }); @@ -4468,7 +4867,7 @@ mod tests { "workspace B agent type should be restored" ); assert!( - panel.active_thread_view().is_none(), + panel.active_connection_view().is_none(), "workspace B should have no active thread" ); }); @@ -4521,6 +4920,172 @@ mod tests { cx.run_until_parked(); } + async fn setup_panel(cx: &mut TestAppContext) -> (Entity, VisualTestContext) { + init_test(cx); + cx.update(|cx| { + cx.update_flags(true, vec!["agent-v2".to_string()]); + agent::ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + }); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs.clone(), [], cx).await; + + let multi_workspace = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let workspace = multi_workspace + .read_with(cx, |mw, _cx| mw.workspace().clone()) + .unwrap(); + + let mut cx = VisualTestContext::from_window(multi_workspace.into(), cx); + + let panel = workspace.update_in(&mut cx, |workspace, window, cx| { + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + cx.new(|cx| AgentPanel::new(workspace, text_thread_store, None, window, cx)) + }); + + (panel, cx) + } + + #[gpui::test] + async fn test_running_thread_retained_when_navigating_away(cx: &mut TestAppContext) { + let (panel, mut cx) = setup_panel(cx).await; + + let connection_a = StubAgentConnection::new(); + open_thread_with_connection(&panel, connection_a.clone(), &mut cx); + send_message(&panel, &mut cx); + + let session_id_a = active_session_id(&panel, &cx); + + // Send a chunk to keep thread A generating (don't end the turn). + cx.update(|_, cx| { + connection_a.send_update( + session_id_a.clone(), + acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new("chunk".into())), + cx, + ); + }); + cx.run_until_parked(); + + // Verify thread A is generating. + panel.read_with(&cx, |panel, cx| { + let thread = panel.active_agent_thread(cx).unwrap(); + assert_eq!(thread.read(cx).status(), ThreadStatus::Generating); + assert!(panel.background_threads.is_empty()); + }); + + // Open a new thread B — thread A should be retained in background. + let connection_b = StubAgentConnection::new(); + open_thread_with_connection(&panel, connection_b, &mut cx); + + panel.read_with(&cx, |panel, _cx| { + assert_eq!( + panel.background_threads.len(), + 1, + "Running thread A should be retained in background_views" + ); + assert!( + panel.background_threads.contains_key(&session_id_a), + "Background view should be keyed by thread A's session ID" + ); + }); + } + + #[gpui::test] + async fn test_idle_thread_dropped_when_navigating_away(cx: &mut TestAppContext) { + let (panel, mut cx) = setup_panel(cx).await; + + let connection_a = StubAgentConnection::new(); + connection_a.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Response".into()), + )]); + open_thread_with_connection(&panel, connection_a, &mut cx); + send_message(&panel, &mut cx); + + let weak_view_a = panel.read_with(&cx, |panel, _cx| { + panel.active_connection_view().unwrap().downgrade() + }); + + // Thread A should be idle (auto-completed via set_next_prompt_updates). + panel.read_with(&cx, |panel, cx| { + let thread = panel.active_agent_thread(cx).unwrap(); + assert_eq!(thread.read(cx).status(), ThreadStatus::Idle); + }); + + // Open a new thread B — thread A should NOT be retained. + let connection_b = StubAgentConnection::new(); + open_thread_with_connection(&panel, connection_b, &mut cx); + + panel.read_with(&cx, |panel, _cx| { + assert!( + panel.background_threads.is_empty(), + "Idle thread A should not be retained in background_views" + ); + }); + + // Verify the old ConnectionView entity was dropped (no strong references remain). + assert!( + weak_view_a.upgrade().is_none(), + "Idle ConnectionView should have been dropped" + ); + } + + #[gpui::test] + async fn test_background_thread_promoted_via_load(cx: &mut TestAppContext) { + let (panel, mut cx) = setup_panel(cx).await; + + let connection_a = StubAgentConnection::new(); + open_thread_with_connection(&panel, connection_a.clone(), &mut cx); + send_message(&panel, &mut cx); + + let session_id_a = active_session_id(&panel, &cx); + + // Keep thread A generating. + cx.update(|_, cx| { + connection_a.send_update( + session_id_a.clone(), + acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new("chunk".into())), + cx, + ); + }); + cx.run_until_parked(); + + // Open thread B — thread A goes to background. + let connection_b = StubAgentConnection::new(); + open_thread_with_connection(&panel, connection_b, &mut cx); + + let session_id_b = active_session_id(&panel, &cx); + + panel.read_with(&cx, |panel, _cx| { + assert_eq!(panel.background_threads.len(), 1); + assert!(panel.background_threads.contains_key(&session_id_a)); + }); + + // Load thread A back via load_agent_thread — should promote from background. + panel.update_in(&mut cx, |panel, window, cx| { + panel.load_agent_thread(session_id_a.clone(), None, None, window, cx); + }); + + // Thread A should now be the active view, promoted from background. + let active_session = active_session_id(&panel, &cx); + assert_eq!( + active_session, session_id_a, + "Thread A should be the active thread after promotion" + ); + + panel.read_with(&cx, |panel, _cx| { + assert!( + !panel.background_threads.contains_key(&session_id_a), + "Promoted thread A should no longer be in background_views" + ); + assert!( + !panel.background_threads.contains_key(&session_id_b), + "Thread B (idle) should not have been retained in background_views" + ); + }); + } + #[gpui::test] async fn test_thread_target_local_project(cx: &mut TestAppContext) { init_test(cx); @@ -4635,10 +5200,7 @@ mod tests { async fn test_thread_target_serialization_round_trip(cx: &mut TestAppContext) { init_test(cx); cx.update(|cx| { - cx.update_flags( - true, - vec!["agent-v2".to_string(), "agent-git-worktrees".to_string()], - ); + cx.update_flags(true, vec!["agent-v2".to_string()]); agent::ThreadStore::init_global(cx); language_model::LanguageModelRegistry::test(cx); }); @@ -4725,100 +5287,6 @@ mod tests { }); } - #[gpui::test] - async fn test_thread_target_deserialization_falls_back_when_worktree_flag_disabled( - cx: &mut TestAppContext, - ) { - init_test(cx); - cx.update(|cx| { - cx.update_flags( - true, - vec!["agent-v2".to_string(), "agent-git-worktrees".to_string()], - ); - agent::ThreadStore::init_global(cx); - language_model::LanguageModelRegistry::test(cx); - }); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - "/project", - json!({ - ".git": {}, - "src": { - "main.rs": "fn main() {}" - } - }), - ) - .await; - fs.set_branch_name(Path::new("/project/.git"), Some("main")); - - let project = Project::test(fs.clone(), [Path::new("/project")], cx).await; - - let multi_workspace = - cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - - let workspace = multi_workspace - .read_with(cx, |multi_workspace, _cx| { - multi_workspace.workspace().clone() - }) - .unwrap(); - - workspace.update(cx, |workspace, _cx| { - workspace.set_random_database_id(); - }); - - let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx); - - // Wait for the project to discover the git repository. - cx.run_until_parked(); - - let panel = workspace.update_in(cx, |workspace, window, cx| { - let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); - let panel = - cx.new(|cx| AgentPanel::new(workspace, text_thread_store, None, window, cx)); - workspace.add_panel(panel.clone(), window, cx); - panel - }); - - cx.run_until_parked(); - - panel.update(cx, |panel, cx| { - panel.set_start_thread_in(&StartThreadIn::NewWorktree, cx); - }); - - panel.read_with(cx, |panel, _cx| { - assert_eq!( - *panel.start_thread_in(), - StartThreadIn::NewWorktree, - "thread target should be NewWorktree before reload" - ); - }); - - // Let serialization complete. - cx.run_until_parked(); - - // Disable worktree flag and reload panel from serialized data. - cx.update(|_, cx| { - cx.update_flags(true, vec!["agent-v2".to_string()]); - }); - - let prompt_builder = Arc::new(prompt_store::PromptBuilder::new(None).unwrap()); - let async_cx = cx.update(|window, cx| window.to_async(cx)); - let loaded_panel = - AgentPanel::load(workspace.downgrade(), prompt_builder.clone(), async_cx) - .await - .expect("panel load should succeed"); - cx.run_until_parked(); - - loaded_panel.read_with(cx, |panel, _cx| { - assert_eq!( - *panel.start_thread_in(), - StartThreadIn::LocalProject, - "thread target should fall back to LocalProject when worktree flag is disabled" - ); - }); - } - #[gpui::test] async fn test_set_active_blocked_during_worktree_creation(cx: &mut TestAppContext) { init_test(cx); @@ -4899,4 +5367,77 @@ mod tests { ); }); } + + #[test] + fn test_deserialize_legacy_agent_type_variants() { + assert_eq!( + serde_json::from_str::(r#""ClaudeAgent""#).unwrap(), + AgentType::Custom { + name: CLAUDE_AGENT_NAME.into(), + }, + ); + assert_eq!( + serde_json::from_str::(r#""ClaudeCode""#).unwrap(), + AgentType::Custom { + name: CLAUDE_AGENT_NAME.into(), + }, + ); + assert_eq!( + serde_json::from_str::(r#""Codex""#).unwrap(), + AgentType::Custom { + name: CODEX_NAME.into(), + }, + ); + assert_eq!( + serde_json::from_str::(r#""Gemini""#).unwrap(), + AgentType::Custom { + name: GEMINI_NAME.into(), + }, + ); + } + + #[test] + fn test_deserialize_current_agent_type_variants() { + assert_eq!( + serde_json::from_str::(r#""NativeAgent""#).unwrap(), + AgentType::NativeAgent, + ); + assert_eq!( + serde_json::from_str::(r#""TextThread""#).unwrap(), + AgentType::TextThread, + ); + assert_eq!( + serde_json::from_str::(r#"{"Custom":{"name":"my-agent"}}"#).unwrap(), + AgentType::Custom { + name: "my-agent".into(), + }, + ); + } + + #[test] + fn test_deserialize_legacy_serialized_panel() { + let json = serde_json::json!({ + "width": 300.0, + "selected_agent": "ClaudeAgent", + "last_active_thread": { + "session_id": "test-session", + "agent_type": "Codex", + }, + }); + + let panel: SerializedAgentPanel = serde_json::from_value(json).unwrap(); + assert_eq!( + panel.selected_agent, + Some(AgentType::Custom { + name: CLAUDE_AGENT_NAME.into(), + }), + ); + let thread = panel.last_active_thread.unwrap(); + assert_eq!( + thread.agent_type, + AgentType::Custom { + name: CODEX_NAME.into(), + }, + ); + } } diff --git a/crates/agent_ui/src/agent_registry_ui.rs b/crates/agent_ui/src/agent_registry_ui.rs index 44d5bb20fb77c18447afbe985695cee08a646558..d003ba958276c8c2370011d83028eda2e9121440 100644 --- a/crates/agent_ui/src/agent_registry_ui.rs +++ b/crates/agent_ui/src/agent_registry_ui.rs @@ -1,6 +1,4 @@ -use std::collections::{BTreeMap, BTreeSet}; use std::ops::Range; -use std::sync::OnceLock; use client::zed_urls; use collections::HashMap; @@ -16,7 +14,7 @@ use project::{AgentRegistryStore, RegistryAgent}; use settings::{Settings, SettingsStore, update_settings_file}; use theme::ThemeSettings; use ui::{ - Banner, ButtonStyle, ScrollableHandle, Severity, ToggleButtonGroup, ToggleButtonGroupSize, + ButtonStyle, ScrollableHandle, ToggleButtonGroup, ToggleButtonGroupSize, ToggleButtonGroupStyle, ToggleButtonSimple, Tooltip, WithScrollbar, prelude::*, }; use workspace::{ @@ -39,28 +37,6 @@ enum RegistryInstallStatus { InstalledExtension, } -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] -enum BuiltInAgent { - Claude, - Codex, - Gemini, -} - -fn keywords_by_agent_feature() -> &'static BTreeMap> { - static KEYWORDS_BY_FEATURE: OnceLock>> = - OnceLock::new(); - KEYWORDS_BY_FEATURE.get_or_init(|| { - BTreeMap::from_iter([ - ( - BuiltInAgent::Claude, - vec!["claude", "claude code", "claude agent"], - ), - (BuiltInAgent::Codex, vec!["codex", "codex cli"]), - (BuiltInAgent::Gemini, vec!["gemini", "gemini cli"]), - ]) - }) -} - #[derive(IntoElement)] struct AgentRegistryCard { children: Vec, @@ -106,7 +82,6 @@ pub struct AgentRegistryPage { installed_statuses: HashMap, query_editor: Entity, filter: RegistryFilter, - upsells: BTreeSet, _subscriptions: Vec, } @@ -141,7 +116,6 @@ impl AgentRegistryPage { installed_statuses: HashMap::default(), query_editor, filter: RegistryFilter::All, - upsells: BTreeSet::new(), _subscriptions: subscriptions, }; @@ -207,7 +181,6 @@ impl AgentRegistryPage { fn filter_registry_agents(&mut self, cx: &mut Context) { self.refresh_installed_statuses(cx); - self.refresh_feature_upsells(cx); let search = self.search_query(cx).map(|search| search.to_lowercase()); let filter = self.filter; let installed_statuses = self.installed_statuses.clone(); @@ -265,83 +238,6 @@ impl AgentRegistryPage { } } - fn refresh_feature_upsells(&mut self, cx: &mut Context) { - let Some(search) = self.search_query(cx) else { - self.upsells.clear(); - return; - }; - - let search = search.to_lowercase(); - let search_terms = search - .split_whitespace() - .map(|term| term.trim()) - .collect::>(); - - for (feature, keywords) in keywords_by_agent_feature() { - if keywords - .iter() - .any(|keyword| search_terms.contains(keyword)) - { - self.upsells.insert(*feature); - } else { - self.upsells.remove(feature); - } - } - } - - fn render_feature_upsell_banner( - &self, - label: SharedString, - docs_url: SharedString, - ) -> impl IntoElement { - let docs_url_button = Button::new("open_docs", "View Documentation") - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::Small) - .icon_position(IconPosition::End) - .icon_color(Color::Muted) - .on_click({ - move |_event, _window, cx| { - telemetry::event!( - "Documentation Viewed", - source = "Agent Registry Feature Upsell", - url = docs_url, - ); - cx.open_url(&docs_url) - } - }); - - div().pt_4().px_4().child( - Banner::new() - .severity(Severity::Success) - .child(Label::new(label).mt_0p5()) - .action_slot(docs_url_button), - ) - } - - fn render_feature_upsells(&self) -> impl IntoElement { - let mut container = v_flex(); - - for feature in &self.upsells { - let banner = match feature { - BuiltInAgent::Claude => self.render_feature_upsell_banner( - "Claude Agent support is built-in to Zed!".into(), - "https://zed.dev/docs/ai/external-agents#claude-agent".into(), - ), - BuiltInAgent::Codex => self.render_feature_upsell_banner( - "Codex CLI support is built-in to Zed!".into(), - "https://zed.dev/docs/ai/external-agents#codex-cli".into(), - ), - BuiltInAgent::Gemini => self.render_feature_upsell_banner( - "Gemini CLI support is built-in to Zed!".into(), - "https://zed.dev/docs/ai/external-agents#gemini-cli".into(), - ), - }; - container = container.child(banner); - } - - container - } - fn render_search(&self, cx: &mut Context) -> Div { let mut key_context = KeyContext::new_with_defaults(); key_context.add("BufferSearchBar"); @@ -704,14 +600,10 @@ impl Render for AgentRegistryPage { ), ), ) - .child(self.render_feature_upsells()) .child(v_flex().px_4().size_full().overflow_y_hidden().map(|this| { let count = self.filtered_registry_indices.len(); - let has_upsells = !self.upsells.is_empty(); - if count == 0 && !has_upsells { + if count == 0 { this.child(self.render_empty_state(cx)).into_any_element() - } else if count == 0 { - this.into_any_element() } else { let scroll_handle = &self.list; this.child( diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index 5ae2d677ba6dd4622127b39938f2bf005e7fcab9..8583e8977a719987b12770eec2d77408187a4e1f 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -11,6 +11,7 @@ pub(crate) mod connection_view; mod context; mod context_server_configuration; mod entry_view_state; +mod external_source_prompt; mod favorite_models; mod inline_assistant; mod inline_prompt_editor; @@ -25,6 +26,8 @@ mod slash_command; mod slash_command_picker; mod terminal_codegen; mod terminal_inline_assistant; +#[cfg(any(test, feature = "test-support"))] +pub mod test_support; mod text_thread_editor; mod text_thread_history; mod thread_history; @@ -33,6 +36,7 @@ mod ui; use std::rc::Rc; use std::sync::Arc; +use agent_client_protocol as acp; use agent_settings::{AgentProfileId, AgentSettings}; use assistant_slash_command::SlashCommandRegistry; use client::Client; @@ -63,6 +67,7 @@ use crate::agent_registry_ui::AgentRegistryPage; pub use crate::inline_assistant::InlineAssistant; pub use agent_diff::{AgentDiffPane, AgentDiffToolbar}; pub(crate) use connection_view::ConnectionView; +pub use external_source_prompt::ExternalSourcePrompt; pub(crate) use mode_selector::ModeSelector; pub(crate) use model_selector::ModelSelector; pub(crate) use model_selector_popover::ModelSelectorPopover; @@ -77,6 +82,8 @@ actions!( NewTextThread, /// Toggles the menu to create new agent threads. ToggleNewThreadMenu, + /// Toggles the selector for choosing where new threads start (current project or new worktree). + ToggleStartThreadInSelector, /// Toggles the navigation menu for switching between threads and views. ToggleNavigationMenu, /// Toggles the options menu for agent settings and preferences. @@ -205,13 +212,70 @@ pub struct NewNativeAgentThreadFromSummary { } // TODO unify this with AgentType -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Clone, PartialEq, Serialize, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum ExternalAgent { NativeAgent, Custom { name: SharedString }, } +// Custom impl handles legacy variant names from before the built-in agents were moved to +// the registry: "claude_code" -> Custom { name: "claude-acp" }, "codex" -> Custom { name: +// "codex-acp" }, "gemini" -> Custom { name: "gemini" }. +// Can be removed at some point in the future and go back to #[derive(Deserialize)]. +impl<'de> serde::Deserialize<'de> for ExternalAgent { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + use project::agent_server_store::{CLAUDE_AGENT_NAME, CODEX_NAME, GEMINI_NAME}; + + let value = serde_json::Value::deserialize(deserializer)?; + + if let Some(s) = value.as_str() { + return match s { + "native_agent" => Ok(Self::NativeAgent), + "claude_code" | "claude_agent" => Ok(Self::Custom { + name: CLAUDE_AGENT_NAME.into(), + }), + "codex" => Ok(Self::Custom { + name: CODEX_NAME.into(), + }), + "gemini" => Ok(Self::Custom { + name: GEMINI_NAME.into(), + }), + other => Err(serde::de::Error::unknown_variant( + other, + &[ + "native_agent", + "custom", + "claude_agent", + "claude_code", + "codex", + "gemini", + ], + )), + }; + } + + if let Some(obj) = value.as_object() { + if let Some(inner) = obj.get("custom") { + #[derive(serde::Deserialize)] + struct CustomFields { + name: SharedString, + } + let fields: CustomFields = + serde_json::from_value(inner.clone()).map_err(serde::de::Error::custom)?; + return Ok(Self::Custom { name: fields.name }); + } + } + + Err(serde::de::Error::custom( + "expected a string variant or {\"custom\": {\"name\": ...}}", + )) + } +} + impl ExternalAgent { pub fn server( &self, @@ -239,11 +303,21 @@ pub enum StartThreadIn { /// Content to initialize new external agent with. pub enum AgentInitialContent { - ThreadSummary(acp_thread::AgentSessionInfo), + ThreadSummary { + session_id: acp::SessionId, + title: Option, + }, ContentBlock { blocks: Vec, auto_submit: bool, }, + FromExternalSource(ExternalSourcePrompt), +} + +impl From for AgentInitialContent { + fn from(prompt: ExternalSourcePrompt) -> Self { + Self::FromExternalSource(prompt) + } } /// Opens the profile management interface for configuring agent tools and settings. @@ -668,4 +742,42 @@ mod tests { ); }); } + + #[test] + fn test_deserialize_legacy_external_agent_variants() { + use project::agent_server_store::{CLAUDE_AGENT_NAME, CODEX_NAME, GEMINI_NAME}; + + assert_eq!( + serde_json::from_str::(r#""claude_code""#).unwrap(), + ExternalAgent::Custom { + name: CLAUDE_AGENT_NAME.into(), + }, + ); + assert_eq!( + serde_json::from_str::(r#""codex""#).unwrap(), + ExternalAgent::Custom { + name: CODEX_NAME.into(), + }, + ); + assert_eq!( + serde_json::from_str::(r#""gemini""#).unwrap(), + ExternalAgent::Custom { + name: GEMINI_NAME.into(), + }, + ); + } + + #[test] + fn test_deserialize_current_external_agent_variants() { + assert_eq!( + serde_json::from_str::(r#""native_agent""#).unwrap(), + ExternalAgent::NativeAgent, + ); + assert_eq!( + serde_json::from_str::(r#"{"custom":{"name":"my-agent"}}"#).unwrap(), + ExternalAgent::Custom { + name: "my-agent".into(), + }, + ); + } } diff --git a/crates/agent_ui/src/completion_provider.rs b/crates/agent_ui/src/completion_provider.rs index 30778909b2c9a91dab0b20417e973b7e83ea6a17..40ad7bc729269d5dae3364ecf3e0de6e5ee5b0ec 100644 --- a/crates/agent_ui/src/completion_provider.rs +++ b/crates/agent_ui/src/completion_provider.rs @@ -5,7 +5,8 @@ use std::sync::Arc; use std::sync::atomic::AtomicBool; use crate::ThreadHistory; -use acp_thread::{AgentSessionInfo, MentionUri}; +use acp_thread::MentionUri; +use agent_client_protocol as acp; use anyhow::Result; use editor::{ CompletionProvider, Editor, ExcerptId, code_context_menus::COMPLETION_MENU_MAX_WIDTH, @@ -144,8 +145,8 @@ impl PromptContextType { pub(crate) enum Match { File(FileMatch), Symbol(SymbolMatch), - Thread(AgentSessionInfo), - RecentThread(AgentSessionInfo), + Thread(SessionMatch), + RecentThread(SessionMatch), Fetch(SharedString), Rules(RulesContextEntry), Entry(EntryMatch), @@ -165,15 +166,19 @@ impl Match { } } +#[derive(Debug, Clone)] +pub struct SessionMatch { + session_id: acp::SessionId, + title: SharedString, +} + pub struct EntryMatch { mat: Option, entry: PromptContextEntry, } -fn session_title(session: &AgentSessionInfo) -> SharedString { - session - .title - .clone() +fn session_title(title: Option) -> SharedString { + title .filter(|title| !title.is_empty()) .unwrap_or_else(|| SharedString::new_static("New Thread")) } @@ -266,7 +271,8 @@ impl PromptCompletionProvider { } fn completion_for_thread( - thread_entry: AgentSessionInfo, + session_id: acp::SessionId, + title: Option, source_range: Range, recent: bool, source: Arc, @@ -275,9 +281,9 @@ impl PromptCompletionProvider { workspace: Entity, cx: &mut App, ) -> Completion { - let title = session_title(&thread_entry); + let title = session_title(title); let uri = MentionUri::Thread { - id: thread_entry.session_id, + id: session_id, name: title.to_string(), }; @@ -841,7 +847,15 @@ impl PromptCompletionProvider { Some(PromptContextType::Thread) => { if let Some(history) = self.history.upgrade() { - let sessions = history.read(cx).sessions().to_vec(); + let sessions = history + .read(cx) + .sessions() + .iter() + .map(|session| SessionMatch { + session_id: session.session_id.clone(), + title: session_title(session.title.clone()), + }) + .collect::>(); let search_task = filter_sessions_by_query(query, cancellation_flag, sessions, cx); cx.spawn(async move |_cx| { @@ -1018,15 +1032,18 @@ impl PromptCompletionProvider { .read(cx) .sessions() .into_iter() + .map(|session| SessionMatch { + session_id: session.session_id.clone(), + title: session_title(session.title.clone()), + }) .filter(|session| { let uri = MentionUri::Thread { id: session.session_id.clone(), - name: session_title(session).to_string(), + name: session.title.to_string(), }; !mentions.contains(&uri) }) .take(RECENT_COUNT) - .cloned() .map(Match::RecentThread), ); return Task::ready(recent); @@ -1298,7 +1315,8 @@ impl CompletionProvider for PromptCompletio ) } Match::Thread(thread) => Some(Self::completion_for_thread( - thread, + thread.session_id, + Some(thread.title), source_range.clone(), false, source.clone(), @@ -1308,7 +1326,8 @@ impl CompletionProvider for PromptCompletio cx, )), Match::RecentThread(thread) => Some(Self::completion_for_thread( - thread, + thread.session_id, + Some(thread.title), source_range.clone(), true, source.clone(), @@ -1878,9 +1897,9 @@ pub(crate) fn search_symbols( fn filter_sessions_by_query( query: String, cancellation_flag: Arc, - sessions: Vec, + sessions: Vec, cx: &mut App, -) -> Task> { +) -> Task> { if query.is_empty() { return Task::ready(sessions); } @@ -1893,10 +1912,13 @@ fn filter_sessions_by_query( async fn filter_sessions( query: String, cancellation_flag: Arc, - sessions: Vec, + sessions: Vec, executor: BackgroundExecutor, -) -> Vec { - let titles = sessions.iter().map(session_title).collect::>(); +) -> Vec { + let titles = sessions + .iter() + .map(|session| session.title.clone()) + .collect::>(); let candidates = titles .iter() .enumerate() @@ -2338,10 +2360,14 @@ mod tests { #[gpui::test] async fn test_filter_sessions_by_query(cx: &mut TestAppContext) { - let mut alpha = AgentSessionInfo::new("session-alpha"); - alpha.title = Some("Alpha Session".into()); - let mut beta = AgentSessionInfo::new("session-beta"); - beta.title = Some("Beta Session".into()); + let alpha = SessionMatch { + session_id: acp::SessionId::new("session-alpha"), + title: "Alpha Session".into(), + }; + let beta = SessionMatch { + session_id: acp::SessionId::new("session-beta"), + title: "Beta Session".into(), + }; let sessions = vec![alpha.clone(), beta]; diff --git a/crates/agent_ui/src/config_options.rs b/crates/agent_ui/src/config_options.rs index 458411d4d3af3f1c85dc57a1e940515e8aabb23a..6ec2595202490ca7474717f8985b6e4f6d7ca0b9 100644 --- a/crates/agent_ui/src/config_options.rs +++ b/crates/agent_ui/src/config_options.rs @@ -493,12 +493,7 @@ impl PickerDelegate for ConfigOptionPickerDelegate { cx.notify(); } - fn can_select( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) -> bool { + fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context>) -> bool { match self.filtered_entries.get(ix) { Some(ConfigOptionPickerEntry::Option(_)) => true, Some(ConfigOptionPickerEntry::Separator(_)) | None => false, diff --git a/crates/agent_ui/src/connection_view.rs b/crates/agent_ui/src/connection_view.rs index 07e34ccd56f0bd867135fe62894a5a3ff388c85e..07841c42215795ffcccf9f7e5ca684f42a59b498 100644 --- a/crates/agent_ui/src/connection_view.rs +++ b/crates/agent_ui/src/connection_view.rs @@ -39,7 +39,7 @@ use prompt_store::{PromptId, PromptStore}; use rope::Point; use settings::{NotifyWhenAgentWaiting, Settings as _, SettingsStore}; use std::cell::RefCell; -use std::path::Path; +use std::path::{Path, PathBuf}; use std::sync::Arc; use std::time::Instant; use std::{collections::BTreeMap, rc::Rc, time::Duration}; @@ -399,7 +399,10 @@ impl ConnectionView { enum ServerState { Loading(Entity), - LoadError(LoadError), + LoadError { + error: LoadError, + session_id: Option, + }, Connected(ConnectedServerState), } @@ -430,6 +433,7 @@ impl AuthState { } struct LoadingView { + session_id: Option, title: SharedString, _load_task: Task<()>, _update_title_task: Task>, @@ -466,7 +470,9 @@ impl ConnectedServerState { impl ConnectionView { pub fn new( agent: Rc, - resume_thread: Option, + resume_session_id: Option, + cwd: Option, + title: Option, initial_content: Option, workspace: WeakEntity, project: Entity, @@ -510,7 +516,9 @@ impl ConnectionView { prompt_store, server_state: Self::initial_state( agent.clone(), - resume_thread, + resume_session_id, + cwd, + title, project, initial_content, window, @@ -536,13 +544,23 @@ impl ConnectionView { } fn reset(&mut self, window: &mut Window, cx: &mut Context) { - let resume_thread_metadata = self + let (resume_session_id, cwd, title) = self .active_thread() - .and_then(|thread| thread.read(cx).resume_thread_metadata.clone()); + .map(|thread_view| { + let thread = thread_view.read(cx).thread.read(cx); + ( + Some(thread.session_id().clone()), + thread.cwd().cloned(), + Some(thread.title()), + ) + }) + .unwrap_or((None, None, None)); let state = Self::initial_state( self.agent.clone(), - resume_thread_metadata, + resume_session_id, + cwd, + title, self.project.clone(), None, window, @@ -566,7 +584,9 @@ impl ConnectionView { fn initial_state( agent: Rc, - resume_thread: Option, + resume_session_id: Option, + cwd: Option, + title: Option, project: Entity, initial_content: Option, window: &mut Window, @@ -575,9 +595,12 @@ impl ConnectionView { if project.read(cx).is_via_collab() && agent.clone().downcast::().is_none() { - return ServerState::LoadError(LoadError::Other( - "External agents are not yet supported in shared projects.".into(), - )); + return ServerState::LoadError { + error: LoadError::Other( + "External agents are not yet supported in shared projects.".into(), + ), + session_id: resume_session_id.clone(), + }; } let mut worktrees = project.read(cx).visible_worktrees(cx).collect::>(); // Pick the first non-single-file worktree for the root directory if there are any, @@ -598,28 +621,22 @@ impl ConnectionView { } }) .collect(); - let session_cwd = resume_thread - .as_ref() - .and_then(|resume| { - resume - .cwd - .as_ref() - .filter(|cwd| { - // Validate with the normalized path (rejects `..` traversals), - // but return the original cwd to preserve its path separators. - // On Windows, `normalize_lexically` rebuilds the path with - // backslashes via `PathBuf::push`, which would corrupt - // forward-slash Linux paths used by WSL agents. - util::paths::normalize_lexically(cwd) - .ok() - .is_some_and(|normalized| { - worktree_roots - .iter() - .any(|root| normalized.starts_with(root.as_ref())) - }) + let session_cwd = cwd + .filter(|cwd| { + // Validate with the normalized path (rejects `..` traversals), + // but return the original cwd to preserve its path separators. + // On Windows, `normalize_lexically` rebuilds the path with + // backslashes via `PathBuf::push`, which would corrupt + // forward-slash Linux paths used by WSL agents. + util::paths::normalize_lexically(cwd) + .ok() + .is_some_and(|normalized| { + worktree_roots + .iter() + .any(|root| normalized.starts_with(root.as_ref())) }) - .map(|path| Arc::from(path.as_path())) }) + .map(|path| path.into()) .or_else(|| worktree_roots.first().cloned()) .unwrap_or_else(|| paths::home_dir().as_path().into()); @@ -633,17 +650,18 @@ impl ConnectionView { ); let connect_task = agent.connect(delegate, cx); + let load_session_id = resume_session_id.clone(); let load_task = cx.spawn_in(window, async move |this, cx| { let connection = match connect_task.await { Ok(connection) => connection, Err(err) => { this.update_in(cx, |this, window, cx| { if err.downcast_ref::().is_some() { - this.handle_load_error(err, window, cx); + this.handle_load_error(load_session_id.clone(), err, window, cx); } else if let Some(active) = this.active_thread() { active.update(cx, |active, cx| active.handle_thread_error(err, cx)); } else { - this.handle_load_error(err, window, cx); + this.handle_load_error(load_session_id.clone(), err, window, cx); } cx.notify(); }) @@ -655,17 +673,25 @@ impl ConnectionView { telemetry::event!("Agent Thread Started", agent = connection.telemetry_id()); let mut resumed_without_history = false; - let result = if let Some(resume) = resume_thread.clone() { + let result = if let Some(session_id) = load_session_id.clone() { cx.update(|_, cx| { if connection.supports_load_session() { - connection - .clone() - .load_session(resume, project.clone(), &session_cwd, cx) + connection.clone().load_session( + session_id, + project.clone(), + &session_cwd, + title, + cx, + ) } else if connection.supports_resume_session() { resumed_without_history = true; - connection - .clone() - .resume_session(resume, project.clone(), &session_cwd, cx) + connection.clone().resume_session( + session_id, + project.clone(), + &session_cwd, + title, + cx, + ) } else { Task::ready(Err(anyhow!(LoadError::Other( "Loading or resuming sessions is not supported by this agent.".into() @@ -721,7 +747,6 @@ impl ConnectionView { thread, conversation.clone(), resumed_without_history, - resume_thread, initial_content, window, cx, @@ -756,7 +781,7 @@ impl ConnectionView { ); } Err(err) => { - this.handle_load_error(err, window, cx); + this.handle_load_error(load_session_id.clone(), err, window, cx); } }; }) @@ -792,6 +817,7 @@ impl ConnectionView { }); LoadingView { + session_id: resume_session_id, title: "Loading…".into(), _load_task: load_task, _update_title_task: update_title_task, @@ -807,7 +833,6 @@ impl ConnectionView { thread: Entity, conversation: Entity, resumed_without_history: bool, - resume_thread: Option, initial_content: Option, window: &mut Window, cx: &mut Context, @@ -956,6 +981,18 @@ impl ConnectionView { .unwrap_or_else(|| agent_name.clone()); let agent_icon = self.agent.logo(); + let agent_icon_from_external_svg = self + .agent_server_store + .read(cx) + .agent_icon(&ExternalAgentServerName(self.agent.name())) + .or_else(|| { + project::AgentRegistryStore::try_global(cx).and_then(|store| { + store + .read(cx) + .agent(self.agent.name().as_ref()) + .and_then(|a| a.icon_path().cloned()) + }) + }); let weak = cx.weak_entity(); cx.new(|cx| { @@ -965,6 +1002,7 @@ impl ConnectionView { conversation, weak, agent_icon, + agent_icon_from_external_svg, agent_name, agent_display_name, self.workspace.clone(), @@ -977,7 +1015,6 @@ impl ConnectionView { prompt_capabilities, available_commands, resumed_without_history, - resume_thread, self.project.downgrade(), self.thread_store.clone(), self.history.clone(), @@ -1073,6 +1110,7 @@ impl ConnectionView { fn handle_load_error( &mut self, + session_id: Option, err: anyhow::Error, window: &mut Window, cx: &mut Context, @@ -1093,7 +1131,13 @@ impl ConnectionView { LoadError::Other(format!("{:#}", err).into()) }; self.emit_load_error_telemetry(&load_error); - self.set_server_state(ServerState::LoadError(load_error), cx); + self.set_server_state( + ServerState::LoadError { + error: load_error, + session_id, + }, + cx, + ); } fn handle_agent_servers_updated( @@ -1108,7 +1152,7 @@ impl ConnectionView { // This handles the case where a thread is restored before authentication completes. let should_retry = match &self.server_state { ServerState::Loading(_) => false, - ServerState::LoadError(_) => true, + ServerState::LoadError { .. } => true, ServerState::Connected(connected) => { connected.auth_state.is_ok() && connected.has_thread_error(cx) } @@ -1132,7 +1176,7 @@ impl ConnectionView { match &self.server_state { ServerState::Connected(_) => "New Thread".into(), ServerState::Loading(loading_view) => loading_view.read(cx).title.clone(), - ServerState::LoadError(error) => match error { + ServerState::LoadError { error, .. } => match error { LoadError::Unsupported { .. } => format!("Upgrade {}", self.agent.name()).into(), LoadError::FailedToInstall(_) => { format!("Failed to Install {}", self.agent.name()).into() @@ -1151,6 +1195,17 @@ impl ConnectionView { } } + // The parent ID is None if we haven't created a thread yet + pub fn parent_id(&self, cx: &App) -> Option { + match &self.server_state { + ServerState::Connected(_) => self + .parent_thread(cx) + .map(|thread| thread.read(cx).id.clone()), + ServerState::Loading(loading) => loading.read(cx).session_id.clone(), + ServerState::LoadError { session_id, .. } => session_id.clone(), + } + } + pub fn is_loading(&self) -> bool { matches!(self.server_state, ServerState::Loading { .. }) } @@ -1348,7 +1403,13 @@ impl ConnectionView { self.focus_handle.focus(window, cx) } } - self.set_server_state(ServerState::LoadError(error.clone()), cx); + self.set_server_state( + ServerState::LoadError { + error: error.clone(), + session_id: Some(thread_id), + }, + cx, + ); } AcpThreadEvent::TitleUpdated => { let title = thread.read(cx).title(); @@ -1360,6 +1421,7 @@ impl ConnectionView { } }); } + cx.notify(); } AcpThreadEvent::PromptCapabilitiesUpdated => { if let Some(active) = self.thread_view(&thread_id) { @@ -1630,9 +1692,10 @@ impl ConnectionView { let cwd = root_dir.unwrap_or_else(|| paths::home_dir().as_path().into()); let subagent_thread_task = connected.connection.clone().load_session( - AgentSessionInfo::new(subagent_id.clone()), + subagent_id.clone(), self.project.clone(), &cwd, + None, cx, ); @@ -1654,7 +1717,6 @@ impl ConnectionView { conversation, false, None, - None, window, cx, ); @@ -2556,10 +2618,10 @@ impl ConnectionView { }) } - pub fn delete_history_entry(&mut self, entry: AgentSessionInfo, cx: &mut Context) { - let task = self.history.update(cx, |history, cx| { - history.delete_session(&entry.session_id, cx) - }); + pub fn delete_history_entry(&mut self, session_id: &acp::SessionId, cx: &mut Context) { + let task = self + .history + .update(cx, |history, cx| history.delete_session(&session_id, cx)); task.detach_and_log_err(cx); } } @@ -2611,6 +2673,7 @@ impl ConnectionView { impl Render for ConnectionView { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { self.sync_queued_message_editors(window, cx); + let v2_flag = cx.has_flag::(); v_flex() .track_focus(&self.focus_handle) @@ -2619,9 +2682,19 @@ impl Render for ConnectionView { .child(match &self.server_state { ServerState::Loading { .. } => v_flex() .flex_1() - // .child(self.render_recent_history(cx)) + .when(v2_flag, |this| { + this.size_full().items_center().justify_center().child( + Label::new("Loading…").color(Color::Muted).with_animation( + "loading-agent-label", + Animation::new(Duration::from_secs(2)) + .repeat() + .with_easing(pulsating_between(0.3, 0.7)), + |label, delta| label.alpha(delta), + ), + ) + }) .into_any(), - ServerState::LoadError(e) => v_flex() + ServerState::LoadError { error: e, .. } => v_flex() .flex_1() .size_full() .items_center() @@ -2723,6 +2796,55 @@ pub(crate) mod tests { assert!(!weak_view.is_upgradable()); } + #[gpui::test] + async fn test_external_source_prompt_requires_manual_send(cx: &mut TestAppContext) { + init_test(cx); + + let Some(prompt) = crate::ExternalSourcePrompt::new("Write me a script") else { + panic!("expected prompt from external source to sanitize successfully"); + }; + let initial_content = AgentInitialContent::FromExternalSource(prompt); + + let (thread_view, cx) = setup_thread_view_with_initial_content( + StubAgentServer::default_response(), + initial_content, + cx, + ) + .await; + + active_thread(&thread_view, cx).read_with(cx, |view, cx| { + assert!(view.show_external_source_prompt_warning); + assert_eq!(view.thread.read(cx).entries().len(), 0); + assert_eq!(view.message_editor.read(cx).text(cx), "Write me a script"); + }); + } + + #[gpui::test] + async fn test_external_source_prompt_warning_clears_after_send(cx: &mut TestAppContext) { + init_test(cx); + + let Some(prompt) = crate::ExternalSourcePrompt::new("Write me a script") else { + panic!("expected prompt from external source to sanitize successfully"); + }; + let initial_content = AgentInitialContent::FromExternalSource(prompt); + + let (thread_view, cx) = setup_thread_view_with_initial_content( + StubAgentServer::default_response(), + initial_content, + cx, + ) + .await; + + active_thread(&thread_view, cx).update_in(cx, |view, window, cx| view.send(window, cx)); + cx.run_until_parked(); + + active_thread(&thread_view, cx).read_with(cx, |view, cx| { + assert!(!view.show_external_source_prompt_warning); + assert_eq!(view.message_editor.read(cx).text(cx), ""); + assert_eq!(view.thread.read(cx).entries().len(), 2); + }); + } + #[gpui::test] async fn test_notification_for_stop_event(cx: &mut TestAppContext) { init_test(cx); @@ -2795,6 +2917,8 @@ pub(crate) mod tests { Rc::new(StubAgentServer::default_response()), None, None, + None, + None, workspace.downgrade(), project, Some(thread_store), @@ -2878,7 +3002,6 @@ pub(crate) mod tests { async fn test_resume_without_history_adds_notice(cx: &mut TestAppContext) { init_test(cx); - let session = AgentSessionInfo::new(SessionId::new("resume-session")); let fs = FakeFs::new(cx.executor()); let project = Project::test(fs, [], cx).await; let (multi_workspace, cx) = @@ -2892,7 +3015,9 @@ pub(crate) mod tests { cx.new(|cx| { ConnectionView::new( Rc::new(StubAgentServer::new(ResumeOnlyAgentConnection)), - Some(session), + Some(SessionId::new("resume-session")), + None, + None, None, workspace.downgrade(), project, @@ -2936,9 +3061,6 @@ pub(crate) mod tests { let connection = CwdCapturingConnection::new(); let captured_cwd = connection.captured_cwd.clone(); - let mut session = AgentSessionInfo::new(SessionId::new("session-1")); - session.cwd = Some(PathBuf::from("/project/subdir")); - let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx))); @@ -2946,7 +3068,9 @@ pub(crate) mod tests { cx.new(|cx| { ConnectionView::new( Rc::new(StubAgentServer::new(connection)), - Some(session), + Some(SessionId::new("session-1")), + Some(PathBuf::from("/project/subdir")), + None, None, workspace.downgrade(), project, @@ -2988,9 +3112,6 @@ pub(crate) mod tests { let connection = CwdCapturingConnection::new(); let captured_cwd = connection.captured_cwd.clone(); - let mut session = AgentSessionInfo::new(SessionId::new("session-1")); - session.cwd = Some(PathBuf::from("/some/other/path")); - let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx))); @@ -2998,7 +3119,9 @@ pub(crate) mod tests { cx.new(|cx| { ConnectionView::new( Rc::new(StubAgentServer::new(connection)), - Some(session), + Some(SessionId::new("session-1")), + Some(PathBuf::from("/some/other/path")), + None, None, workspace.downgrade(), project, @@ -3040,9 +3163,6 @@ pub(crate) mod tests { let connection = CwdCapturingConnection::new(); let captured_cwd = connection.captured_cwd.clone(); - let mut session = AgentSessionInfo::new(SessionId::new("session-1")); - session.cwd = Some(PathBuf::from("/project/../outside")); - let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx))); @@ -3050,7 +3170,9 @@ pub(crate) mod tests { cx.new(|cx| { ConnectionView::new( Rc::new(StubAgentServer::new(connection)), - Some(session), + Some(SessionId::new("session-1")), + Some(PathBuf::from("/project/../outside")), + None, None, workspace.downgrade(), project, @@ -3112,7 +3234,10 @@ pub(crate) mod tests { "Tab title should show the agent name with an error prefix" ); match &view.server_state { - ServerState::LoadError(LoadError::Other(msg)) => { + ServerState::LoadError { + error: LoadError::Other(msg), + .. + } => { assert!( msg.contains("Invalid gzip header"), "Error callout should contain the underlying extraction error, got: {msg}" @@ -3122,7 +3247,7 @@ pub(crate) mod tests { "Expected LoadError::Other, got: {}", match other { ServerState::Loading(_) => "Loading (stuck!)", - ServerState::LoadError(_) => "LoadError (wrong variant)", + ServerState::LoadError { .. } => "LoadError (wrong variant)", ServerState::Connected(_) => "Connected", } ), @@ -3360,6 +3485,8 @@ pub(crate) mod tests { Rc::new(agent), None, None, + None, + None, workspace1.downgrade(), project1.clone(), Some(thread_store), @@ -3532,6 +3659,29 @@ pub(crate) mod tests { Entity, Entity, &mut VisualTestContext, + ) { + setup_thread_view_with_history_and_initial_content(agent, None, cx).await + } + + async fn setup_thread_view_with_initial_content( + agent: impl AgentServer + 'static, + initial_content: AgentInitialContent, + cx: &mut TestAppContext, + ) -> (Entity, &mut VisualTestContext) { + let (thread_view, _history, cx) = + setup_thread_view_with_history_and_initial_content(agent, Some(initial_content), cx) + .await; + (thread_view, cx) + } + + async fn setup_thread_view_with_history_and_initial_content( + agent: impl AgentServer + 'static, + initial_content: Option, + cx: &mut TestAppContext, + ) -> ( + Entity, + Entity, + &mut VisualTestContext, ) { let fs = FakeFs::new(cx.executor()); let project = Project::test(fs, [], cx).await; @@ -3548,6 +3698,8 @@ pub(crate) mod tests { Rc::new(agent), None, None, + None, + initial_content, workspace.downgrade(), project, Some(thread_store), @@ -3728,6 +3880,7 @@ pub(crate) mod tests { AcpThread::new( None, name, + None, connection, project, action_log, @@ -3830,18 +3983,14 @@ pub(crate) mod tests { fn resume_session( self: Rc, - session: AgentSessionInfo, + session_id: acp::SessionId, project: Entity, _cwd: &Path, + _title: Option, cx: &mut App, ) -> Task>> { - let thread = build_test_thread( - self, - project, - "ResumeOnlyAgentConnection", - session.session_id, - cx, - ); + let thread = + build_test_thread(self, project, "ResumeOnlyAgentConnection", session_id, cx); Task::ready(Ok(thread)) } @@ -3901,7 +4050,7 @@ pub(crate) mod tests { fn new_session( self: Rc, project: Entity, - _cwd: &Path, + cwd: &Path, cx: &mut gpui::App, ) -> Task>> { if !*self.authenticated.lock() { @@ -3916,6 +4065,7 @@ pub(crate) mod tests { AcpThread::new( None, "AuthGatedAgent", + Some(cwd.to_path_buf()), self, project, action_log, @@ -3977,7 +4127,7 @@ pub(crate) mod tests { fn new_session( self: Rc, project: Entity, - _cwd: &Path, + cwd: &Path, cx: &mut gpui::App, ) -> Task>> { Task::ready(Ok(cx.new(|cx| { @@ -3985,6 +4135,7 @@ pub(crate) mod tests { AcpThread::new( None, "SaboteurAgentConnection", + Some(cwd.to_path_buf()), self, project, action_log, @@ -4042,7 +4193,7 @@ pub(crate) mod tests { fn new_session( self: Rc, project: Entity, - _cwd: &Path, + cwd: &Path, cx: &mut gpui::App, ) -> Task>> { Task::ready(Ok(cx.new(|cx| { @@ -4050,6 +4201,7 @@ pub(crate) mod tests { AcpThread::new( None, "RefusalAgentConnection", + Some(cwd.to_path_buf()), self, project, action_log, @@ -4125,6 +4277,7 @@ pub(crate) mod tests { AcpThread::new( None, "CwdCapturingConnection", + Some(cwd.to_path_buf()), self.clone(), project, action_log, @@ -4147,9 +4300,10 @@ pub(crate) mod tests { fn load_session( self: Rc, - session: AgentSessionInfo, + session_id: acp::SessionId, project: Entity, cwd: &Path, + _title: Option, cx: &mut App, ) -> Task>> { *self.captured_cwd.lock() = Some(cwd.to_path_buf()); @@ -4158,10 +4312,11 @@ pub(crate) mod tests { AcpThread::new( None, "CwdCapturingConnection", + Some(cwd.to_path_buf()), self.clone(), project, action_log, - session.session_id, + session_id, watch::Receiver::constant( acp::PromptCapabilities::new() .image(true) @@ -4263,6 +4418,8 @@ pub(crate) mod tests { Rc::new(StubAgentServer::new(connection.as_ref().clone())), None, None, + None, + None, workspace.downgrade(), project.clone(), Some(thread_store.clone()), @@ -5972,6 +6129,7 @@ pub(crate) mod tests { AcpThread::new( parent_session_id, "Test Thread", + None, connection, project, action_log, diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs index 8a1a7d2ea5b0f01ba559e83051861b9d6324985f..0519362ab1194a6e21ff9b3f213112f94f4cce55 100644 --- a/crates/agent_ui/src/connection_view/thread_view.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -206,6 +206,7 @@ pub struct ThreadView { pub(crate) conversation: Entity, pub server_view: WeakEntity, pub agent_icon: IconName, + pub agent_icon_from_external_svg: Option, pub agent_name: SharedString, pub focus_handle: FocusHandle, pub workspace: WeakEntity, @@ -246,7 +247,6 @@ pub struct ThreadView { pub is_loading_contents: bool, pub new_server_version_available: Option, pub resumed_without_history: bool, - pub resume_thread_metadata: Option, pub _cancel_task: Option>, _save_task: Option>, _draft_resolve_task: Option>, @@ -262,6 +262,7 @@ pub struct ThreadView { pub project: WeakEntity, pub recent_history_entries: Vec, pub hovered_recent_history_item: Option, + pub show_external_source_prompt_warning: bool, pub show_codex_windows_warning: bool, pub history: Entity, pub _history_subscription: Subscription, @@ -293,6 +294,7 @@ impl ThreadView { conversation: Entity, server_view: WeakEntity, agent_icon: IconName, + agent_icon_from_external_svg: Option, agent_name: SharedString, agent_display_name: SharedString, workspace: WeakEntity, @@ -305,7 +307,6 @@ impl ThreadView { prompt_capabilities: Rc>, available_commands: Rc>>, resumed_without_history: bool, - resume_thread_metadata: Option, project: WeakEntity, thread_store: Option>, history: Entity, @@ -324,6 +325,7 @@ impl ThreadView { }); let mut should_auto_submit = false; + let mut show_external_source_prompt_warning = false; let message_editor = cx.new(|cx| { let mut editor = MessageEditor::new( @@ -345,8 +347,8 @@ impl ThreadView { ); if let Some(content) = initial_content { match content { - AgentInitialContent::ThreadSummary(entry) => { - editor.insert_thread_summary(entry, window, cx); + AgentInitialContent::ThreadSummary { session_id, title } => { + editor.insert_thread_summary(session_id, title, window, cx); } AgentInitialContent::ContentBlock { blocks, @@ -355,6 +357,18 @@ impl ThreadView { should_auto_submit = auto_submit; editor.set_message(blocks, window, cx); } + AgentInitialContent::FromExternalSource(prompt) => { + show_external_source_prompt_warning = true; + // SECURITY: Be explicit about not auto submitting prompt from external source. + should_auto_submit = false; + editor.set_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + prompt.into_string(), + ))], + window, + cx, + ); + } } } else if let Some(draft) = thread.read(cx).draft_prompt() { editor.set_message(draft.to_vec(), window, cx); @@ -424,6 +438,7 @@ impl ThreadView { conversation, server_view, agent_icon, + agent_icon_from_external_svg, agent_name, workspace, entry_view_state, @@ -436,7 +451,6 @@ impl ThreadView { prompt_capabilities, available_commands, resumed_without_history, - resume_thread_metadata, _subscriptions: subscriptions, permission_dropdown_handle: PopoverMenuHandle::default(), thread_retry_status: None, @@ -477,6 +491,7 @@ impl ThreadView { project, recent_history_entries, hovered_recent_history_item: None, + show_external_source_prompt_warning, history, _history_subscription: history_subscription, show_codex_windows_warning, @@ -781,6 +796,13 @@ impl ThreadView { // sending + fn clear_external_source_prompt_warning(&mut self, cx: &mut Context) { + if self.show_external_source_prompt_warning { + self.show_external_source_prompt_warning = false; + cx.notify(); + } + } + pub fn send(&mut self, window: &mut Window, cx: &mut Context) { let thread = &self.thread; @@ -862,6 +884,7 @@ impl ThreadView { .any(|command| command.name == "logout"); if can_login && !logout_supported { message_editor.update(cx, |editor, cx| editor.clear(window, cx)); + self.clear_external_source_prompt_warning(cx); let connection = self.thread.read(cx).connection().clone(); window.defer(cx, { @@ -934,6 +957,7 @@ impl ThreadView { let session_id = self.thread.read(cx).session_id().clone(); let parent_session_id = self.thread.read(cx).parent_session_id().cloned(); let agent_telemetry_id = self.thread.read(cx).connection().telemetry_id(); + let is_first_message = self.thread.read(cx).entries().is_empty(); let thread = self.thread.downgrade(); self.is_loading_contents = true; @@ -953,6 +977,7 @@ impl ThreadView { }; let generation = this.update(cx, |this, cx| { + this.clear_external_source_prompt_warning(cx); let generation = this.start_turn(cx); this.in_flight_prompt = Some(contents.clone()); generation @@ -974,6 +999,24 @@ impl ThreadView { .ok(); } }); + if is_first_message { + let text: String = contents + .iter() + .filter_map(|block| match block { + acp::ContentBlock::Text(text_content) => Some(text_content.text.as_str()), + _ => None, + }) + .collect::>() + .join(" "); + let text = text.lines().next().unwrap_or("").trim(); + if !text.is_empty() { + let title: SharedString = util::truncate_and_trailoff(text, 20).into(); + thread.update(cx, |thread, cx| { + thread.set_provisional_title(title, cx); + })?; + } + } + let turn_start_time = Instant::now(); let send = thread.update(cx, |thread, cx| { thread.action_log().update(cx, |action_log, cx| { @@ -1750,18 +1793,7 @@ impl ThreadView { }) .await?; - let thread_metadata = AgentSessionInfo { - session_id, - cwd: None, - title: Some(format!("🔗 {}", response.title).into()), - updated_at: Some(chrono::Utc::now()), - meta: None, - }; - - this.update_in(cx, |this, window, cx| { - this.resume_thread_metadata = Some(thread_metadata); - server_view.update(cx, |server_view, cx| server_view.reset(window, cx)); - })?; + server_view.update_in(cx, |server_view, window, cx| server_view.reset(window, cx))?; this.update_in(cx, |this, _window, cx| { if let Some(workspace) = this.workspace.upgrade() { @@ -2675,59 +2707,91 @@ impl ThreadView { let focus_handle = self.message_editor.focus_handle(cx); let editor_bg_color = cx.theme().colors().editor_background; let editor_expanded = self.editor_expanded; + let has_messages = self.list_state.item_count() > 0; + let v2_empty_state = cx.has_flag::() && !has_messages; let (expand_icon, expand_tooltip) = if editor_expanded { (IconName::Minimize, "Minimize Message Editor") } else { (IconName::Maximize, "Expand Message Editor") }; + if v2_empty_state { + self.message_editor.update(cx, |editor, cx| { + editor.set_mode( + EditorMode::Full { + scale_ui_elements_with_buffer_font_size: false, + show_active_line_background: false, + sizing_behavior: SizingBehavior::Default, + }, + cx, + ); + }); + } else { + self.message_editor.update(cx, |editor, cx| { + editor.set_mode( + EditorMode::AutoHeight { + min_lines: AgentSettings::get_global(cx).message_editor_min_lines, + max_lines: Some( + AgentSettings::get_global(cx).set_message_editor_max_lines(), + ), + }, + cx, + ); + }); + } + v_flex() .on_action(cx.listener(Self::expand_message_editor)) .p_2() .gap_2() - .border_t_1() - .border_color(cx.theme().colors().border) + .when(!v2_empty_state, |this| { + this.border_t_1().border_color(cx.theme().colors().border) + }) .bg(editor_bg_color) - .when(editor_expanded, |this| { + .when(v2_empty_state, |this| this.flex_1().size_full()) + .when(editor_expanded && !v2_empty_state, |this| { this.h(vh(0.8, window)).size_full().justify_between() }) .child( v_flex() .relative() .size_full() + .when(v2_empty_state, |this| this.flex_1()) .pt_1() .pr_2p5() .child(self.message_editor.clone()) - .child( - h_flex() - .absolute() - .top_0() - .right_0() - .opacity(0.5) - .hover(|this| this.opacity(1.0)) - .child( - IconButton::new("toggle-height", expand_icon) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .tooltip({ - move |_window, cx| { - Tooltip::for_action_in( - expand_tooltip, + .when(!v2_empty_state, |this| { + this.child( + h_flex() + .absolute() + .top_0() + .right_0() + .opacity(0.5) + .hover(|this| this.opacity(1.0)) + .child( + IconButton::new("toggle-height", expand_icon) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .tooltip({ + move |_window, cx| { + Tooltip::for_action_in( + expand_tooltip, + &ExpandMessageEditor, + &focus_handle, + cx, + ) + } + }) + .on_click(cx.listener(|this, _, window, cx| { + this.expand_message_editor( &ExpandMessageEditor, - &focus_handle, + window, cx, - ) - } - }) - .on_click(cx.listener(|this, _, window, cx| { - this.expand_message_editor( - &ExpandMessageEditor, - window, - cx, - ); - })), - ), - ), + ); + })), + ), + ) + }), ) .child( h_flex() @@ -6733,6 +6797,9 @@ impl ThreadView { this.expanded_tool_calls .insert(tool_call_id.clone()); } + let expanded = + this.expanded_tool_calls.contains(&tool_call_id); + telemetry::event!("Subagent Toggled", expanded); cx.notify(); } })) @@ -6751,6 +6818,7 @@ impl ThreadView { |this, thread| { this.on_click(cx.listener( move |_this, _event, _window, cx| { + telemetry::event!("Subagent Stopped"); thread.update(cx, |thread, cx| { thread.cancel(cx).detach(); }); @@ -6778,6 +6846,7 @@ impl ThreadView { .border_t_1() .when(is_failed, |this| this.border_dashed()) .border_color(self.tool_card_border_color(cx)) + .cursor_pointer() .hover(|s| s.bg(cx.theme().colors().element_hover)) .child( Icon::new(IconName::Maximize) @@ -6786,6 +6855,7 @@ impl ThreadView { ) .tooltip(Tooltip::text("Make Subagent Full Screen")) .on_click(cx.listener(move |this, _event, window, cx| { + telemetry::event!("Subagent Maximized"); this.server_view .update(cx, |this, cx| { this.navigate_to_session(session_id.clone(), window, cx); @@ -6835,34 +6905,6 @@ impl ThreadView { .into_any_element() } - /// This will return `true` if there were no other tool calls during the same turn as the given tool call (no concurrent tool calls). - fn should_show_subagent_fullscreen(&self, tool_call: &ToolCall, cx: &App) -> bool { - let parent_thread = self.thread.read(cx); - - let Some(tool_call_index) = parent_thread - .entries() - .iter() - .position(|e| matches!(e, AgentThreadEntry::ToolCall(tc) if tc.id == tool_call.id)) - else { - return false; - }; - - if let Some(AgentThreadEntry::ToolCall(_)) = - parent_thread.entries().get(tool_call_index + 1) - { - return false; - } - - if let Some(AgentThreadEntry::ToolCall(_)) = parent_thread - .entries() - .get(tool_call_index.saturating_sub(1)) - { - return false; - } - - true - } - fn render_subagent_expanded_content( &self, thread_view: &Entity, @@ -6873,8 +6915,6 @@ impl ThreadView { ) -> impl IntoElement { const MAX_PREVIEW_ENTRIES: usize = 8; - let should_show_subagent_fullscreen = self.should_show_subagent_fullscreen(tool_call, cx); - let subagent_view = thread_view.read(cx); let session_id = subagent_view.thread.read(cx).session_id().clone(); @@ -6908,12 +6948,10 @@ impl ThreadView { } else { 0..total_entries }; - if !should_show_subagent_fullscreen { - entry_range.start = entry_range - .end - .saturating_sub(MAX_PREVIEW_ENTRIES) - .max(entry_range.start); - }; + entry_range.start = entry_range + .end + .saturating_sub(MAX_PREVIEW_ENTRIES) + .max(entry_range.start); let start_ix = entry_range.start; let scroll_handle = self @@ -6951,9 +6989,8 @@ impl ThreadView { .track_scroll(&scroll_handle) .children(rendered_entries), ) - .when(!should_show_subagent_fullscreen, |this| { - this.h_56().child(overlay) - }) + .h_56() + .child(overlay) .into_any_element() } @@ -7458,6 +7495,26 @@ impl ThreadView { ) } + fn render_external_source_prompt_warning(&self, cx: &mut Context) -> Callout { + Callout::new() + .icon(IconName::Warning) + .severity(Severity::Warning) + .title("Review before sending") + .description("This prompt was pre-filled by an external link. Read it carefully before you send it.") + .dismiss_action( + IconButton::new("dismiss-external-source-prompt-warning", IconName::Close) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .tooltip(Tooltip::text("Dismiss Warning")) + .on_click(cx.listener({ + move |this, _, _, cx| { + this.show_external_source_prompt_warning = false; + cx.notify(); + } + })), + ) + } + fn render_new_version_callout(&self, version: &SharedString, cx: &mut Context) -> Div { let server_view = self.server_view.clone(); v_flex().w_full().justify_end().child( @@ -7644,20 +7701,25 @@ impl ThreadView { impl Render for ThreadView { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let has_messages = self.list_state.item_count() > 0; + let v2_empty_state = cx.has_flag::() && !has_messages; - let conversation = v_flex().flex_1().map(|this| { - let this = this.when(self.resumed_without_history, |this| { - this.child(Self::render_resume_notice(cx)) + let conversation = v_flex() + .when(!v2_empty_state, |this| this.flex_1()) + .map(|this| { + let this = this.when(self.resumed_without_history, |this| { + this.child(Self::render_resume_notice(cx)) + }); + if has_messages { + let list_state = self.list_state.clone(); + this.child(self.render_entries(cx)) + .vertical_scrollbar_for(&list_state, window, cx) + .into_any() + } else if v2_empty_state { + this.into_any() + } else { + this.child(self.render_recent_history(cx)).into_any() + } }); - if has_messages { - let list_state = self.list_state.clone(); - this.child(self.render_entries(cx)) - .vertical_scrollbar_for(&list_state, window, cx) - .into_any() - } else { - this.child(self.render_recent_history(cx)).into_any() - } - }); v_flex() .key_context("AcpThread") @@ -7802,6 +7864,9 @@ impl Render for ThreadView { .children(self.render_subagent_titlebar(cx)) .child(conversation) .children(self.render_activity_bar(window, cx)) + .when(self.show_external_source_prompt_warning, |this| { + this.child(self.render_external_source_prompt_warning(cx)) + }) .when(self.show_codex_windows_warning, |this| { this.child(self.render_codex_windows_warning(cx)) }) @@ -7900,17 +7965,7 @@ pub(crate) fn open_link( MentionUri::Thread { id, name } => { if let Some(panel) = workspace.panel::(cx) { panel.update(cx, |panel, cx| { - panel.open_thread( - AgentSessionInfo { - session_id: id, - cwd: None, - title: Some(name.into()), - updated_at: None, - meta: None, - }, - window, - cx, - ) + panel.open_thread(id, None, Some(name.into()), window, cx) }); } } diff --git a/crates/agent_ui/src/external_source_prompt.rs b/crates/agent_ui/src/external_source_prompt.rs new file mode 100644 index 0000000000000000000000000000000000000000..cf581c038e97a96ee580818634b8588daf227d2d --- /dev/null +++ b/crates/agent_ui/src/external_source_prompt.rs @@ -0,0 +1,162 @@ +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct ExternalSourcePrompt(String); + +impl ExternalSourcePrompt { + pub fn new(prompt: &str) -> Option { + sanitize(prompt).map(Self) + } + + pub fn as_str(&self) -> &str { + &self.0 + } + + pub fn into_string(self) -> String { + self.0 + } +} + +fn sanitize(prompt: &str) -> Option { + let mut sanitized_prompt = String::with_capacity(prompt.len()); + let mut consecutive_newline_count = 0; + let mut characters = prompt.chars().peekable(); + + while let Some(character) = characters.next() { + let character = if character == '\r' { + if characters.peek() == Some(&'\n') { + characters.next(); + } + '\n' + } else { + character + }; + + if is_bidi_control_character(character) || is_disallowed_control_character(character) { + continue; + } + + if character == '\n' { + consecutive_newline_count += 1; + if consecutive_newline_count > 2 { + continue; + } + } else { + consecutive_newline_count = 0; + } + + sanitized_prompt.push(character); + } + + if sanitized_prompt.is_empty() { + None + } else { + Some(sanitized_prompt) + } +} + +fn is_disallowed_control_character(character: char) -> bool { + character.is_control() && !matches!(character, '\n' | '\t') +} + +fn is_bidi_control_character(character: char) -> bool { + matches!( + character, + '\u{061C}' // ALM + | '\u{200E}' // LRM + | '\u{200F}' // RLM + | '\u{202A}'..='\u{202E}' // LRE, RLE, PDF, LRO, RLO + | '\u{2066}'..='\u{2069}' // LRI, RLI, FSI, PDI + ) +} + +#[cfg(test)] +mod tests { + use super::ExternalSourcePrompt; + + #[test] + fn keeps_normal_prompt_text() { + let prompt = ExternalSourcePrompt::new("Write me a script\nThanks"); + + assert_eq!( + prompt.as_ref().map(ExternalSourcePrompt::as_str), + Some("Write me a script\nThanks") + ); + } + + #[test] + fn keeps_multilingual_text() { + let prompt = + ExternalSourcePrompt::new("日本語の依頼です。\n中文提示也应该保留。\nemoji 👩‍💻"); + + assert_eq!( + prompt.as_ref().map(ExternalSourcePrompt::as_str), + Some("日本語の依頼です。\n中文提示也应该保留。\nemoji 👩‍💻") + ); + } + + #[test] + fn collapses_newline_padding() { + let prompt = ExternalSourcePrompt::new( + "Review this prompt carefully.\n\nThis paragraph should stay separated.\n\n\n\n\n\n\nWrite me a script to do fizz buzz.", + ); + + assert_eq!( + prompt.as_ref().map(ExternalSourcePrompt::as_str), + Some( + "Review this prompt carefully.\n\nThis paragraph should stay separated.\n\nWrite me a script to do fizz buzz." + ) + ); + } + + #[test] + fn normalizes_carriage_returns() { + let prompt = ExternalSourcePrompt::new("Line one\r\nLine two\rLine three"); + + assert_eq!( + prompt.as_ref().map(ExternalSourcePrompt::as_str), + Some("Line one\nLine two\nLine three") + ); + } + + #[test] + fn strips_bidi_control_characters() { + let prompt = ExternalSourcePrompt::new("abc\u{202E}def\u{202C}ghi"); + + assert_eq!( + prompt.as_ref().map(ExternalSourcePrompt::as_str), + Some("abcdefghi") + ); + } + + #[test] + fn strips_other_control_characters() { + let prompt = ExternalSourcePrompt::new("safe\u{0000}\u{001B}\u{007F}text"); + + assert_eq!( + prompt.as_ref().map(ExternalSourcePrompt::as_str), + Some("safetext") + ); + } + + #[test] + fn keeps_tabs() { + let prompt = ExternalSourcePrompt::new("keep\tindentation"); + + assert_eq!( + prompt.as_ref().map(ExternalSourcePrompt::as_str), + Some("keep\tindentation") + ); + } + + #[test] + fn drops_empty_prompt() { + assert_eq!(ExternalSourcePrompt::new(""), None); + } + + #[test] + fn drops_prompt_with_only_removed_characters() { + assert_eq!( + ExternalSourcePrompt::new("\u{202E}\u{202C}\u{0000}\u{001B}"), + None + ); + } +} diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index 9ac84addcc80c806739570ad9951209f16c31bb1..4e7eecfe07aac84269cb1d325cc5a95943578863 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -2120,7 +2120,7 @@ pub mod test { client::init(&client, cx); workspace::init(app_state.clone(), cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - language_model::init(client.clone(), cx); + language_model::init(user_store.clone(), client.clone(), cx); language_models::init(user_store, client.clone(), cx); cx.set_global(inline_assistant); diff --git a/crates/agent_ui/src/language_model_selector.rs b/crates/agent_ui/src/language_model_selector.rs index 9205e21be1ab796fae50a26d31aca514756e2bc2..e6e72b3197b4108d7b423470bf8bb4b75cd055b7 100644 --- a/crates/agent_ui/src/language_model_selector.rs +++ b/crates/agent_ui/src/language_model_selector.rs @@ -455,12 +455,7 @@ impl PickerDelegate for LanguageModelPickerDelegate { cx.notify(); } - fn can_select( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) -> bool { + fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context>) -> bool { match self.filtered_entries.get(ix) { Some(LanguageModelPickerEntry::Model(_)) => true, Some(LanguageModelPickerEntry::Separator(_)) | None => false, diff --git a/crates/agent_ui/src/message_editor.rs b/crates/agent_ui/src/message_editor.rs index c75d0479b7bf16229cc487544d2c87403b3da430..933e24e83c0450dcbdde27d49abebb7fda2fa119 100644 --- a/crates/agent_ui/src/message_editor.rs +++ b/crates/agent_ui/src/message_editor.rs @@ -10,7 +10,7 @@ use crate::{ Mention, MentionImage, MentionSet, insert_crease_for_mention, paste_images_as_context, }, }; -use acp_thread::{AgentSessionInfo, MentionUri}; +use acp_thread::MentionUri; use agent::ThreadStore; use agent_client_protocol as acp; use anyhow::{Result, anyhow}; @@ -301,7 +301,8 @@ impl MessageEditor { pub fn insert_thread_summary( &mut self, - thread: AgentSessionInfo, + session_id: acp::SessionId, + title: Option, window: &mut Window, cx: &mut Context, ) { @@ -311,13 +312,11 @@ impl MessageEditor { let Some(workspace) = self.workspace.upgrade() else { return; }; - let thread_title = thread - .title - .clone() + let thread_title = title .filter(|title| !title.is_empty()) .unwrap_or_else(|| SharedString::new_static("New Thread")); let uri = MentionUri::Thread { - id: thread.session_id, + id: session_id, name: thread_title.to_string(), }; let content = format!("{}\n", uri.as_link()); @@ -1223,8 +1222,10 @@ impl MessageEditor { pub fn set_mode(&mut self, mode: EditorMode, cx: &mut Context) { self.editor.update(cx, |editor, cx| { - editor.set_mode(mode); - cx.notify() + if *editor.mode() != mode { + editor.set_mode(mode); + cx.notify() + } }); } @@ -1425,7 +1426,7 @@ impl MessageEditor { }); } - #[cfg(test)] + #[cfg(any(test, feature = "test-support"))] pub fn set_text(&mut self, text: &str, window: &mut Window, cx: &mut Context) { self.editor.update(cx, |editor, cx| { editor.set_text(text, window, cx); @@ -1571,7 +1572,7 @@ fn find_matching_bracket(text: &str, open: char, close: char) -> Option { mod tests { use std::{cell::RefCell, ops::Range, path::Path, rc::Rc, sync::Arc}; - use acp_thread::{AgentSessionInfo, MentionUri}; + use acp_thread::MentionUri; use agent::{ThreadStore, outline}; use agent_client_protocol as acp; use editor::{ @@ -2811,14 +2812,8 @@ mod tests { let history = cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); - // Create a thread metadata to insert as summary - let thread_metadata = AgentSessionInfo { - session_id: acp::SessionId::new("thread-123"), - cwd: None, - title: Some("Previous Conversation".into()), - updated_at: Some(chrono::Utc::now()), - meta: None, - }; + let session_id = acp::SessionId::new("thread-123"); + let title = Some("Previous Conversation".into()); let message_editor = cx.update(|window, cx| { cx.new(|cx| { @@ -2839,17 +2834,17 @@ mod tests { window, cx, ); - editor.insert_thread_summary(thread_metadata.clone(), window, cx); + editor.insert_thread_summary(session_id.clone(), title.clone(), window, cx); editor }) }); // Construct expected values for verification let expected_uri = MentionUri::Thread { - id: thread_metadata.session_id.clone(), - name: thread_metadata.title.as_ref().unwrap().to_string(), + id: session_id.clone(), + name: title.as_ref().unwrap().to_string(), }; - let expected_title = thread_metadata.title.as_ref().unwrap(); + let expected_title = title.as_ref().unwrap(); let expected_link = format!("[@{}]({})", expected_title, expected_uri.to_uri()); message_editor.read_with(cx, |editor, cx| { @@ -2893,14 +2888,6 @@ mod tests { let history = cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); - let thread_metadata = AgentSessionInfo { - session_id: acp::SessionId::new("thread-123"), - cwd: None, - title: Some("Previous Conversation".into()), - updated_at: Some(chrono::Utc::now()), - meta: None, - }; - let message_editor = cx.update(|window, cx| { cx.new(|cx| { let mut editor = MessageEditor::new( @@ -2920,7 +2907,12 @@ mod tests { window, cx, ); - editor.insert_thread_summary(thread_metadata, window, cx); + editor.insert_thread_summary( + acp::SessionId::new("thread-123"), + Some("Previous Conversation".into()), + window, + cx, + ); editor }) }); diff --git a/crates/agent_ui/src/model_selector.rs b/crates/agent_ui/src/model_selector.rs index 307eda507410a060f551741a998779c44b303b60..89ed3e490b33ca83cbdab25cfce77fee7cf9ccb6 100644 --- a/crates/agent_ui/src/model_selector.rs +++ b/crates/agent_ui/src/model_selector.rs @@ -212,12 +212,7 @@ impl PickerDelegate for ModelPickerDelegate { cx.notify(); } - fn can_select( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) -> bool { + fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context>) -> bool { match self.filtered_entries.get(ix) { Some(ModelPickerEntry::Model(_, _)) => true, Some(ModelPickerEntry::Separator(_)) | None => false, diff --git a/crates/agent_ui/src/profile_selector.rs b/crates/agent_ui/src/profile_selector.rs index 45d7232e0dff8b2ab1056b522b5994e11236d843..926549c22f88bcb0937dddf7c3ff1b32060ed297 100644 --- a/crates/agent_ui/src/profile_selector.rs +++ b/crates/agent_ui/src/profile_selector.rs @@ -443,12 +443,7 @@ impl PickerDelegate for ProfilePickerDelegate { cx.notify(); } - fn can_select( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) -> bool { + fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context>) -> bool { match self.filtered_entries.get(ix) { Some(ProfilePickerEntry::Profile(_)) => true, Some(ProfilePickerEntry::Header(_)) | None => false, diff --git a/crates/agent_ui/src/test_support.rs b/crates/agent_ui/src/test_support.rs new file mode 100644 index 0000000000000000000000000000000000000000..05a6b0925fb9151cc18d7096c8bf4f2674054073 --- /dev/null +++ b/crates/agent_ui/src/test_support.rs @@ -0,0 +1,98 @@ +use acp_thread::{AgentConnection, StubAgentConnection}; +use agent_client_protocol as acp; +use agent_servers::{AgentServer, AgentServerDelegate}; +use gpui::{Entity, SharedString, Task, TestAppContext, VisualTestContext}; +use settings::SettingsStore; +use std::any::Any; +use std::rc::Rc; + +use crate::AgentPanel; +use crate::agent_panel; + +pub struct StubAgentServer { + connection: C, +} + +impl StubAgentServer { + pub fn new(connection: C) -> Self { + Self { connection } + } +} + +impl StubAgentServer { + pub fn default_response() -> Self { + let conn = StubAgentConnection::new(); + conn.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Default response".into()), + )]); + Self::new(conn) + } +} + +impl AgentServer for StubAgentServer +where + C: 'static + AgentConnection + Send + Clone, +{ + fn logo(&self) -> ui::IconName { + ui::IconName::Ai + } + + fn name(&self) -> SharedString { + "Test".into() + } + + fn connect( + &self, + _delegate: AgentServerDelegate, + _cx: &mut gpui::App, + ) -> Task>> { + Task::ready(Ok(Rc::new(self.connection.clone()))) + } + + fn into_any(self: Rc) -> Rc { + self + } +} + +pub fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + theme::init(theme::LoadThemes::JustBase, cx); + editor::init(cx); + release_channel::init("0.0.0".parse().unwrap(), cx); + agent_panel::init(cx); + }); +} + +pub fn open_thread_with_connection( + panel: &Entity, + connection: StubAgentConnection, + cx: &mut VisualTestContext, +) { + panel.update_in(cx, |panel, window, cx| { + panel.open_external_thread_with_server( + Rc::new(StubAgentServer::new(connection)), + window, + cx, + ); + }); + cx.run_until_parked(); +} + +pub fn send_message(panel: &Entity, cx: &mut VisualTestContext) { + let thread_view = panel.read_with(cx, |panel, cx| panel.as_active_thread_view(cx).unwrap()); + let message_editor = thread_view.read_with(cx, |view, _cx| view.message_editor.clone()); + message_editor.update_in(cx, |editor, window, cx| { + editor.set_text("Hello", window, cx); + }); + thread_view.update_in(cx, |view, window, cx| view.send(window, cx)); + cx.run_until_parked(); +} + +pub fn active_session_id(panel: &Entity, cx: &VisualTestContext) -> acp::SessionId { + panel.read_with(cx, |panel, cx| { + let thread = panel.active_agent_thread(cx).unwrap(); + thread.read(cx).session_id().clone() + }) +} diff --git a/crates/agent_ui/src/thread_history.rs b/crates/agent_ui/src/thread_history.rs index 8f8488cb94f94e036b37ef31c9c588740cd6cf02..6601616e9f2ef447beb448f2753460fa7c380fa6 100644 --- a/crates/agent_ui/src/thread_history.rs +++ b/crates/agent_ui/src/thread_history.rs @@ -948,12 +948,12 @@ impl RenderOnce for HistoryEntryElement { }) .on_click({ let thread_view = self.thread_view.clone(); - let entry = self.entry.clone(); + let session_id = self.entry.session_id.clone(); move |_event, _window, cx| { if let Some(thread_view) = thread_view.upgrade() { thread_view.update(cx, |thread_view, cx| { - thread_view.delete_history_entry(entry.clone(), cx); + thread_view.delete_history_entry(&session_id, cx); }); } } @@ -973,7 +973,13 @@ impl RenderOnce for HistoryEntryElement { { if let Some(panel) = workspace.read(cx).panel::(cx) { panel.update(cx, |panel, cx| { - panel.load_agent_thread(entry.clone(), window, cx); + panel.load_agent_thread( + entry.session_id.clone(), + entry.cwd.clone(), + entry.title.clone(), + window, + cx, + ); }); } } diff --git a/crates/agent_ui/src/ui/mention_crease.rs b/crates/agent_ui/src/ui/mention_crease.rs index 0a61b8e4ef2ec69714f158a72f83cc0528cc8a8f..8b813ef7e40c2afe91b98600b9d1146d4751d48b 100644 --- a/crates/agent_ui/src/ui/mention_crease.rs +++ b/crates/agent_ui/src/ui/mention_crease.rs @@ -269,24 +269,13 @@ fn open_thread( cx: &mut Context, ) { use crate::AgentPanel; - use acp_thread::AgentSessionInfo; let Some(panel) = workspace.panel::(cx) else { return; }; panel.update(cx, |panel, cx| { - panel.load_agent_thread( - AgentSessionInfo { - session_id: id, - cwd: None, - title: Some(name.into()), - updated_at: None, - meta: None, - }, - window, - cx, - ) + panel.load_agent_thread(id, None, Some(name.into()), window, cx) }); } diff --git a/crates/anthropic/Cargo.toml b/crates/anthropic/Cargo.toml index f344470475a7603782d3eba9a8c461a92d7b4855..065879bc94b68abe193a1a4fc530142d7695ff49 100644 --- a/crates/anthropic/Cargo.toml +++ b/crates/anthropic/Cargo.toml @@ -27,8 +27,4 @@ settings.workspace = true strum.workspace = true thiserror.workspace = true -[dev-dependencies] -reqwest_client.workspace = true -gpui_tokio.workspace = true -gpui.workspace = true -tokio = { workspace = true, features = ["macros", "rt-multi-thread"] } + diff --git a/crates/anthropic/src/anthropic.rs b/crates/anthropic/src/anthropic.rs index 6bff2be4c15841de597309b626e768bbf79e880a..a6509c81fa1ecabac32ff9e8bb0fafdddd9e7414 100644 --- a/crates/anthropic/src/anthropic.rs +++ b/crates/anthropic/src/anthropic.rs @@ -995,7 +995,7 @@ pub enum Speed { } #[derive(Debug, Serialize, Deserialize)] -struct StreamingRequest { +pub struct StreamingRequest { #[serde(flatten)] pub base: Request, pub stream: bool, diff --git a/crates/assistant_text_thread/Cargo.toml b/crates/assistant_text_thread/Cargo.toml index 4c3563a7d26dca06282d5f3d15ec2a64c411dfba..bbb5cf4778efd5d74b880b7350a71e72562f4d70 100644 --- a/crates/assistant_text_thread/Cargo.toml +++ b/crates/assistant_text_thread/Cargo.toml @@ -55,7 +55,7 @@ zed_env_vars.workspace = true [dev-dependencies] assistant_slash_commands.workspace = true -indoc.workspace = true + language_model = { workspace = true, features = ["test-support"] } pretty_assertions.workspace = true rand.workspace = true diff --git a/crates/audio/src/audio.rs b/crates/audio/src/audio.rs index f9a635a16a2eaf2a4facbd1f25bf6eb0f9fe7a87..2165cf39136a1ed7268fbf6ea670d825b2b50bcc 100644 --- a/crates/audio/src/audio.rs +++ b/crates/audio/src/audio.rs @@ -384,17 +384,29 @@ pub fn open_input_stream( Ok(stream) } -pub fn open_output_stream(device_id: Option) -> anyhow::Result { - let output_handle = if let Some(id) = device_id { - if let Some(device) = default_host().device_by_id(&id) { - DeviceSinkBuilder::from_device(device)?.open_stream() - } else { - DeviceSinkBuilder::open_default_sink() +pub fn resolve_device(device_id: Option<&DeviceId>, input: bool) -> anyhow::Result { + if let Some(id) = device_id { + if let Some(device) = default_host().device_by_id(id) { + return Ok(device); } + log::warn!("Selected audio device not found, falling back to default"); + } + if input { + default_host() + .default_input_device() + .context("no audio input device available") } else { - DeviceSinkBuilder::open_default_sink() - }; - let mut output_handle = output_handle.context("Could not open output stream")?; + default_host() + .default_output_device() + .context("no audio output device available") + } +} + +pub fn open_output_stream(device_id: Option) -> anyhow::Result { + let device = resolve_device(device_id.as_ref(), false)?; + let mut output_handle = DeviceSinkBuilder::from_device(device)? + .open_stream() + .context("Could not open output stream")?; output_handle.log_on_drop(false); log::info!("Output stream: {:?}", output_handle); Ok(output_handle) diff --git a/crates/audio/src/audio_settings.rs b/crates/audio/src/audio_settings.rs index 4f60a6d63aef1d2c2d7fb4761a6fc2e2eaf3d8c7..8425ed5eaa713053f44b26e199a66b76bf9b57a6 100644 --- a/crates/audio/src/audio_settings.rs +++ b/crates/audio/src/audio_settings.rs @@ -42,12 +42,8 @@ pub struct AudioSettings { /// /// You need to rejoin a call for this setting to apply pub legacy_audio_compatible: bool, - /// Requires 'rodio_audio: true' - /// /// Select specific output audio device. pub output_audio_device: Option, - /// Requires 'rodio_audio: true' - /// /// Select specific input audio device. pub input_audio_device: Option, } diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 53fac7beac2475d06f4a0f886536942308f9976c..9b9ccee3b695bebdb08706815bcb407c901e4b5f 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -212,18 +212,10 @@ pub fn init(client: Arc, cx: &mut App) { } pub fn check(_: &Check, window: &mut Window, cx: &mut App) { - if let Some(message) = option_env!("ZED_UPDATE_EXPLANATION") { - drop(window.prompt( - gpui::PromptLevel::Info, - "Zed was installed via a package manager.", - Some(message), - &["Ok"], - cx, - )); - return; - } - - if let Ok(message) = env::var("ZED_UPDATE_EXPLANATION") { + if let Some(message) = option_env!("ZED_UPDATE_EXPLANATION") + .map(ToOwned::to_owned) + .or_else(|| env::var("ZED_UPDATE_EXPLANATION").ok()) + { drop(window.prompt( gpui::PromptLevel::Info, "Zed was installed via a package manager.", @@ -388,6 +380,10 @@ impl AutoUpdater { pub fn poll(&mut self, check_type: UpdateCheckType, cx: &mut Context) { if self.pending_poll.is_some() { + if self.update_check_type == UpdateCheckType::Automatic { + self.update_check_type = check_type; + cx.notify(); + } return; } self.update_check_type = check_type; @@ -557,7 +553,7 @@ impl AutoUpdater { asset, metrics_id: metrics_id.as_deref(), system_id: system_id.as_deref(), - is_staff: is_staff, + is_staff, }, )?; diff --git a/crates/buffer_diff/Cargo.toml b/crates/buffer_diff/Cargo.toml index 06cb6cfa76c66c2d5a7b3b4197566cdef3e0c18c..da18728ed4da5cafc972eb80d4dd93117bcff6ed 100644 --- a/crates/buffer_diff/Cargo.toml +++ b/crates/buffer_diff/Cargo.toml @@ -34,7 +34,7 @@ ztracing.workspace = true ctor.workspace = true gpui = { workspace = true, features = ["test-support"] } rand.workspace = true -serde_json.workspace = true + settings.workspace = true text = { workspace = true, features = ["test-support"] } unindent.workspace = true diff --git a/crates/call/Cargo.toml b/crates/call/Cargo.toml index 2e46b58b74b826e8892d1e9da28c3cf06c99aa9b..64f741bd588d2227198fda13c0a8fbf5fdb4337c 100644 --- a/crates/call/Cargo.toml +++ b/crates/call/Cargo.toml @@ -51,5 +51,5 @@ gpui = { workspace = true, features = ["test-support"] } language = { workspace = true, features = ["test-support"] } project = { workspace = true, features = ["test-support"] } util = { workspace = true, features = ["test-support"] } -http_client = { workspace = true, features = ["test-support"] } + livekit_client = { workspace = true, features = ["test-support"] } diff --git a/crates/cli/src/cli.rs b/crates/cli/src/cli.rs index 8a2394372faf17281babf2cc9769648d64cd67be..1a3ce059b8116ac7438f3eb0330b47660cc863de 100644 --- a/crates/cli/src/cli.rs +++ b/crates/cli/src/cli.rs @@ -34,4 +34,7 @@ pub enum CliResponse { /// When Zed started not as an *.app but as a binary (e.g. local development), /// there's a possibility to tell it to behave "regularly". +/// +/// Note that in the main zed binary, this variable is unset after it's read for the first time, +/// therefore it should always be accessed through the `FORCE_CLI_MODE` static. pub const FORCE_CLI_MODE_ENV_VAR_NAME: &str = "ZED_FORCE_CLI_MODE"; diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index d27bf3387a7c8406885f078eef82be694dfa5dfa..5d38569cfd86c38e5b4780621db40d1f2a3b745c 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -140,6 +140,7 @@ pub enum Event { ParticipantIndicesChanged, PrivateUserInfoUpdated, PlanUpdated, + OrganizationChanged, } #[derive(Clone, Copy)] @@ -694,8 +695,21 @@ impl UserStore { self.current_organization.clone() } - pub fn set_current_organization(&mut self, organization: Arc) { - self.current_organization.replace(organization); + pub fn set_current_organization( + &mut self, + organization: Arc, + cx: &mut Context, + ) { + let is_same_organization = self + .current_organization + .as_ref() + .is_some_and(|current| current.id == organization.id); + + if !is_same_organization { + self.current_organization.replace(organization); + cx.emit(Event::OrganizationChanged); + cx.notify(); + } } pub fn organizations(&self) -> &Vec> { diff --git a/crates/cloud_llm_client/Cargo.toml b/crates/cloud_llm_client/Cargo.toml index 0f0f2e77360dab0793f5740a24965711f4d80fda..a7b4f925a9302296e8fe25a14177a583e5f44b33 100644 --- a/crates/cloud_llm_client/Cargo.toml +++ b/crates/cloud_llm_client/Cargo.toml @@ -22,6 +22,4 @@ strum = { workspace = true, features = ["derive"] } uuid = { workspace = true, features = ["serde"] } zeta_prompt.workspace = true -[dev-dependencies] -pretty_assertions.workspace = true -indoc.workspace = true + diff --git a/crates/codestral/Cargo.toml b/crates/codestral/Cargo.toml index 2addcf110a7c8194538523077d09af9d5104bd0d..0daaee8fb1420c76757ca898655e8dd1a5244d7e 100644 --- a/crates/codestral/Cargo.toml +++ b/crates/codestral/Cargo.toml @@ -22,5 +22,6 @@ log.workspace = true serde.workspace = true serde_json.workspace = true text.workspace = true +zeta_prompt.workspace = true [dev-dependencies] diff --git a/crates/codestral/src/codestral.rs b/crates/codestral/src/codestral.rs index 32436ecc374bef86e3e9a7587acab72741264796..3930e2e873a91618bfae456bc188bbd90ffa64b9 100644 --- a/crates/codestral/src/codestral.rs +++ b/crates/codestral/src/codestral.rs @@ -8,7 +8,7 @@ use gpui::{App, AppContext as _, Context, Entity, Global, SharedString, Task}; use http_client::HttpClient; use icons::IconName; use language::{ - Anchor, Buffer, BufferSnapshot, EditPreview, ToPoint, language_settings::all_language_settings, + Anchor, Buffer, BufferSnapshot, EditPreview, language_settings::all_language_settings, }; use language_model::{ApiKeyState, AuthenticateError, EnvVar, env_var}; use serde::{Deserialize, Serialize}; @@ -18,7 +18,7 @@ use std::{ sync::Arc, time::{Duration, Instant}, }; -use text::{OffsetRangeExt as _, ToOffset}; +use text::ToOffset; pub const CODESTRAL_API_URL: &str = "https://codestral.mistral.ai"; pub const DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(150); @@ -259,28 +259,31 @@ impl EditPredictionDelegate for CodestralEditPredictionDelegate { } let cursor_offset = cursor_position.to_offset(&snapshot); - let cursor_point = cursor_offset.to_point(&snapshot); + const MAX_EDITABLE_TOKENS: usize = 350; const MAX_CONTEXT_TOKENS: usize = 150; - const MAX_REWRITE_TOKENS: usize = 350; - - let (_, context_range) = - cursor_excerpt::editable_and_context_ranges_for_cursor_position( - cursor_point, - &snapshot, - MAX_REWRITE_TOKENS, - MAX_CONTEXT_TOKENS, - ); - - let context_range = context_range.to_offset(&snapshot); - let excerpt_text = snapshot - .text_for_range(context_range.clone()) - .collect::(); - let cursor_within_excerpt = cursor_offset + + let (excerpt_point_range, excerpt_offset_range, cursor_offset_in_excerpt) = + cursor_excerpt::compute_cursor_excerpt(&snapshot, cursor_offset); + let syntax_ranges = cursor_excerpt::compute_syntax_ranges( + &snapshot, + cursor_offset, + &excerpt_offset_range, + ); + let excerpt_text: String = snapshot.text_for_range(excerpt_point_range).collect(); + let (_, context_range) = zeta_prompt::compute_editable_and_context_ranges( + &excerpt_text, + cursor_offset_in_excerpt, + &syntax_ranges, + MAX_EDITABLE_TOKENS, + MAX_CONTEXT_TOKENS, + ); + let context_text = &excerpt_text[context_range.clone()]; + let cursor_within_excerpt = cursor_offset_in_excerpt .saturating_sub(context_range.start) - .min(excerpt_text.len()); - let prompt = excerpt_text[..cursor_within_excerpt].to_string(); - let suffix = excerpt_text[cursor_within_excerpt..].to_string(); + .min(context_text.len()); + let prompt = context_text[..cursor_within_excerpt].to_string(); + let suffix = context_text[cursor_within_excerpt..].to_string(); let completion_text = match Self::fetch_completion( http_client, diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index 5db06ef8e73d3cf276f73fbd8aa53e932e6c75b8..447c2da08e054c9964f3813ac569964173ded5c3 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -75,13 +75,13 @@ uuid.workspace = true [dev-dependencies] agent = { workspace = true, features = ["test-support"] } -agent-client-protocol.workspace = true -agent_settings.workspace = true -agent_ui = { workspace = true, features = ["test-support"] } + + + assistant_text_thread.workspace = true assistant_slash_command.workspace = true async-trait.workspace = true -audio.workspace = true + buffer_diff.workspace = true call = { workspace = true, features = ["test-support"] } channel.workspace = true @@ -90,11 +90,11 @@ collab = { workspace = true, features = ["test-support"] } collab_ui = { workspace = true, features = ["test-support"] } collections = { workspace = true, features = ["test-support"] } command_palette_hooks.workspace = true -context_server.workspace = true + ctor.workspace = true dap = { workspace = true, features = ["test-support"] } dap_adapters = { workspace = true, features = ["test-support"] } -dap-types.workspace = true + debugger_ui = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } extension.workspace = true @@ -105,7 +105,7 @@ git_hosting_providers.workspace = true git_ui = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } gpui_tokio.workspace = true -hyper.workspace = true + indoc.workspace = true language = { workspace = true, features = ["test-support"] } language_model = { workspace = true, features = ["test-support"] } @@ -131,7 +131,7 @@ smol.workspace = true sqlx = { version = "0.8", features = ["sqlite"] } task.workspace = true theme.workspace = true -title_bar = { workspace = true, features = ["test-support"] } + unindent.workspace = true util.workspace = true workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/collab_ui/Cargo.toml b/crates/collab_ui/Cargo.toml index c996e3821fee17dbea99f660304e0b76b6e9bc28..0ac413d1863dbbcdbcd81ad2bb3907f7a370c866 100644 --- a/crates/collab_ui/Cargo.toml +++ b/crates/collab_ui/Cargo.toml @@ -24,7 +24,7 @@ test-support = [ "settings/test-support", "util/test-support", "workspace/test-support", - "http_client/test-support", + "title_bar/test-support", ] @@ -67,11 +67,11 @@ collections = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } notifications = { workspace = true, features = ["test-support"] } -pretty_assertions.workspace = true + project = { workspace = true, features = ["test-support"] } rpc = { workspace = true, features = ["test-support"] } settings = { workspace = true, features = ["test-support"] } -tree-sitter-md.workspace = true + util = { workspace = true, features = ["test-support"] } -http_client = { workspace = true, features = ["test-support"] } + workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/command_palette/Cargo.toml b/crates/command_palette/Cargo.toml index bd86c10a8071896f0b24ea531d354c0e46114d48..96be6cb9ee2b767bc14503cbae7e2de6838e6724 100644 --- a/crates/command_palette/Cargo.toml +++ b/crates/command_palette/Cargo.toml @@ -38,14 +38,14 @@ workspace.workspace = true zed_actions.workspace = true [dev-dependencies] -ctor.workspace = true + db = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } -env_logger.workspace = true + go_to_line.workspace = true gpui = { workspace = true, features = ["test-support"] } language = { workspace = true, features = ["test-support"] } menu.workspace = true project = { workspace = true, features = ["test-support"] } -serde_json.workspace = true + workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/copilot/Cargo.toml b/crates/copilot/Cargo.toml index 236216a8d9a64f736c76399867f0b8766c93c16b..d625c998b034a249cb3f498ae1fdd4e0e179a4cc 100644 --- a/crates/copilot/Cargo.toml +++ b/crates/copilot/Cargo.toml @@ -52,14 +52,10 @@ workspace.workspace = true async-std = { version = "1.12.0", features = ["unstable"] } [dev-dependencies] -client = { workspace = true, features = ["test-support"] } -clock = { workspace = true, features = ["test-support"] } collections = { workspace = true, features = ["test-support"] } -ctor.workspace = true editor = { workspace = true, features = ["test-support"] } fs = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } -http_client = { workspace = true, features = ["test-support"] } indoc.workspace = true language = { workspace = true, features = ["test-support"] } lsp = { workspace = true, features = ["test-support"] } diff --git a/crates/copilot_chat/Cargo.toml b/crates/copilot_chat/Cargo.toml index 991a58ac85227ebc84fad5a6d631fe17811fabd4..79159d59cc05aecd5d4298831a33698762d9a743 100644 --- a/crates/copilot_chat/Cargo.toml +++ b/crates/copilot_chat/Cargo.toml @@ -21,6 +21,7 @@ test-support = [ ] [dependencies] +anthropic.workspace = true anyhow.workspace = true collections.workspace = true dirs.workspace = true diff --git a/crates/copilot_chat/src/copilot_chat.rs b/crates/copilot_chat/src/copilot_chat.rs index 6ac7167c94f0b85e6470b2a20bbf3a17fe190b43..d1f339f89a01d1ed0d17e03b8712b42232177db8 100644 --- a/crates/copilot_chat/src/copilot_chat.rs +++ b/crates/copilot_chat/src/copilot_chat.rs @@ -52,6 +52,10 @@ impl CopilotChatConfiguration { format!("{}/responses", api_endpoint) } + pub fn messages_url(&self, api_endpoint: &str) -> String { + format!("{}/v1/messages", api_endpoint) + } + pub fn models_url(&self, api_endpoint: &str) -> String { format!("{}/models", api_endpoint) } @@ -77,6 +81,30 @@ pub enum Role { System, } +#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)] +pub enum ChatLocation { + #[default] + Panel, + Editor, + EditingSession, + Terminal, + Agent, + Other, +} + +impl ChatLocation { + pub fn to_intent_string(self) -> &'static str { + match self { + ChatLocation::Panel => "conversation-panel", + ChatLocation::Editor => "conversation-inline", + ChatLocation::EditingSession => "conversation-edits", + ChatLocation::Terminal => "conversation-terminal", + ChatLocation::Agent => "conversation-agent", + ChatLocation::Other => "conversation-other", + } + } +} + #[derive(Deserialize, Serialize, Debug, Clone, PartialEq)] pub enum ModelSupportedEndpoint { #[serde(rename = "/chat/completions")] @@ -179,6 +207,16 @@ struct ModelSupportedFeatures { parallel_tool_calls: bool, #[serde(default)] vision: bool, + #[serde(default)] + thinking: bool, + #[serde(default)] + adaptive_thinking: bool, + #[serde(default)] + max_thinking_budget: Option, + #[serde(default)] + min_thinking_budget: Option, + #[serde(default)] + reasoning_effort: Vec, } #[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)] @@ -226,6 +264,10 @@ impl Model { self.capabilities.limits.max_context_window_tokens as u64 } + pub fn max_output_tokens(&self) -> usize { + self.capabilities.limits.max_output_tokens + } + pub fn supports_tools(&self) -> bool { self.capabilities.supports.tool_calls } @@ -256,6 +298,41 @@ impl Model { .contains(&ModelSupportedEndpoint::Responses) } + pub fn supports_messages(&self) -> bool { + self.supported_endpoints + .contains(&ModelSupportedEndpoint::Messages) + } + + pub fn supports_thinking(&self) -> bool { + self.capabilities.supports.thinking + } + + pub fn supports_adaptive_thinking(&self) -> bool { + self.capabilities.supports.adaptive_thinking + } + + pub fn can_think(&self) -> bool { + self.supports_thinking() + || self.supports_adaptive_thinking() + || self.max_thinking_budget().is_some() + } + + pub fn max_thinking_budget(&self) -> Option { + self.capabilities.supports.max_thinking_budget + } + + pub fn min_thinking_budget(&self) -> Option { + self.capabilities.supports.min_thinking_budget + } + + pub fn reasoning_effort_levels(&self) -> &[String] { + &self.capabilities.supports.reasoning_effort + } + + pub fn family(&self) -> &str { + &self.capabilities.family + } + pub fn multiplier(&self) -> f64 { self.billing.multiplier } @@ -263,7 +340,6 @@ impl Model { #[derive(Serialize, Deserialize)] pub struct Request { - pub intent: bool, pub n: usize, pub stream: bool, pub temperature: f32, @@ -273,6 +349,8 @@ pub struct Request { pub tools: Vec, #[serde(default, skip_serializing_if = "Option::is_none")] pub tool_choice: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub thinking_budget: Option, } #[derive(Serialize, Deserialize)] @@ -550,6 +628,7 @@ impl CopilotChat { pub async fn stream_completion( request: Request, + location: ChatLocation, is_user_initiated: bool, mut cx: AsyncApp, ) -> Result>> { @@ -563,12 +642,14 @@ impl CopilotChat { api_url.into(), request, is_user_initiated, + location, ) .await } pub async fn stream_response( request: responses::Request, + location: ChatLocation, is_user_initiated: bool, mut cx: AsyncApp, ) -> Result>> { @@ -582,6 +663,30 @@ impl CopilotChat { api_url, request, is_user_initiated, + location, + ) + .await + } + + pub async fn stream_messages( + body: String, + location: ChatLocation, + is_user_initiated: bool, + anthropic_beta: Option, + mut cx: AsyncApp, + ) -> Result>> { + let (client, oauth_token, api_endpoint, configuration) = + Self::get_auth_details(&mut cx).await?; + + let api_url = configuration.messages_url(&api_endpoint); + stream_messages( + client.clone(), + oauth_token, + api_url, + body, + is_user_initiated, + location, + anthropic_beta, ) .await } @@ -755,6 +860,7 @@ pub(crate) fn copilot_request_headers( builder: http_client::Builder, oauth_token: &str, is_user_initiated: Option, + location: Option, ) -> http_client::Builder { builder .header("Authorization", format!("Bearer {}", oauth_token)) @@ -766,12 +872,19 @@ pub(crate) fn copilot_request_headers( option_env!("CARGO_PKG_VERSION").unwrap_or("unknown") ), ) + .header("X-GitHub-Api-Version", "2025-10-01") .when_some(is_user_initiated, |builder, is_user_initiated| { builder.header( "X-Initiator", if is_user_initiated { "user" } else { "agent" }, ) }) + .when_some(location, |builder, loc| { + let interaction_type = loc.to_intent_string(); + builder + .header("X-Interaction-Type", interaction_type) + .header("OpenAI-Intent", interaction_type) + }) } async fn request_models( @@ -785,8 +898,8 @@ async fn request_models( .uri(models_url.as_ref()), &oauth_token, None, - ) - .header("x-github-api-version", "2025-05-01"); + None, + ); let request = request_builder.body(AsyncBody::empty())?; @@ -830,6 +943,7 @@ async fn stream_completion( completion_url: Arc, request: Request, is_user_initiated: bool, + location: ChatLocation, ) -> Result>> { let is_vision_request = request.messages.iter().any(|message| match message { ChatMessage::User { content } @@ -846,6 +960,7 @@ async fn stream_completion( .uri(completion_url.as_ref()), &oauth_token, Some(is_user_initiated), + Some(location), ) .when(is_vision_request, |builder| { builder.header("Copilot-Vision-Request", is_vision_request.to_string()) @@ -905,6 +1020,65 @@ async fn stream_completion( } } +async fn stream_messages( + client: Arc, + oauth_token: String, + api_url: String, + body: String, + is_user_initiated: bool, + location: ChatLocation, + anthropic_beta: Option, +) -> Result>> { + let mut request_builder = copilot_request_headers( + HttpRequest::builder().method(Method::POST).uri(&api_url), + &oauth_token, + Some(is_user_initiated), + Some(location), + ); + + if let Some(beta) = &anthropic_beta { + request_builder = request_builder.header("anthropic-beta", beta.as_str()); + } + + let request = request_builder.body(AsyncBody::from(body))?; + let mut response = client.send(request).await?; + + if !response.status().is_success() { + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + anyhow::bail!("Failed to connect to API: {} {}", response.status(), body); + } + + let reader = BufReader::new(response.into_body()); + Ok(reader + .lines() + .filter_map(|line| async move { + match line { + Ok(line) => { + let line = line + .strip_prefix("data: ") + .or_else(|| line.strip_prefix("data:"))?; + if line.starts_with("[DONE]") || line.is_empty() { + return None; + } + match serde_json::from_str(line) { + Ok(event) => Some(Ok(event)), + Err(error) => { + log::error!( + "Failed to parse Copilot messages stream event: `{}`\nResponse: `{}`", + error, + line, + ); + Some(Err(anthropic::AnthropicError::DeserializeResponse(error))) + } + } + } + Err(error) => Some(Err(anthropic::AnthropicError::ReadResponse(error))), + } + }) + .boxed()) +} + #[cfg(test)] mod tests { use super::*; @@ -1513,6 +1687,11 @@ mod tests { tool_calls: true, parallel_tool_calls: false, vision: false, + thinking: false, + adaptive_thinking: false, + max_thinking_budget: None, + min_thinking_budget: None, + reasoning_effort: vec![], }, model_type: "chat".to_string(), tokenizer: None, diff --git a/crates/copilot_chat/src/responses.rs b/crates/copilot_chat/src/responses.rs index 473e583027bf77f3f7dc43d7914f6d2afff743a0..4f30ba1eb083c8a70c9a91853c7df37e65783ce3 100644 --- a/crates/copilot_chat/src/responses.rs +++ b/crates/copilot_chat/src/responses.rs @@ -1,9 +1,9 @@ use std::sync::Arc; -use super::copilot_request_headers; +use super::{ChatLocation, copilot_request_headers}; use anyhow::{Result, anyhow}; use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream}; -use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest}; use serde::{Deserialize, Serialize}; use serde_json::Value; pub use settings::OpenAiReasoningEffort as ReasoningEffort; @@ -24,6 +24,7 @@ pub struct Request { pub reasoning: Option, #[serde(skip_serializing_if = "Option::is_none")] pub include: Option>, + pub store: bool, } #[derive(Serialize, Deserialize, Debug, Clone)] @@ -280,6 +281,7 @@ pub async fn stream_response( api_url: String, request: Request, is_user_initiated: bool, + location: ChatLocation, ) -> Result>> { let is_vision_request = request.input.iter().any(|item| match item { ResponseInputItem::Message { @@ -295,13 +297,11 @@ pub async fn stream_response( HttpRequest::builder().method(Method::POST).uri(&api_url), &oauth_token, Some(is_user_initiated), - ); - - let request_builder = if is_vision_request { - request_builder.header("Copilot-Vision-Request", "true") - } else { - request_builder - }; + Some(location), + ) + .when(is_vision_request, |builder| { + builder.header("Copilot-Vision-Request", "true") + }); let is_streaming = request.stream; let json = serde_json::to_string(&request)?; diff --git a/crates/crashes/src/crashes.rs b/crates/crashes/src/crashes.rs index 0c848d759cd444f3eb6e2a9838d3005254a25b19..60af963ee5520addedcfe9abdf41941e77922867 100644 --- a/crates/crashes/src/crashes.rs +++ b/crates/crashes/src/crashes.rs @@ -1,7 +1,7 @@ use crash_handler::{CrashEventResult, CrashHandler}; use futures::future::BoxFuture; use log::info; -use minidumper::{Client, LoopAction, MinidumpBinary}; +use minidumper::{Client, LoopAction, MinidumpBinary, Server, SocketName}; use parking_lot::Mutex; use release_channel::{RELEASE_CHANNEL, ReleaseChannel}; use serde::{Deserialize, Serialize}; @@ -128,7 +128,7 @@ async fn connect_and_keepalive(crash_init: InitCrashHandler, handler: CrashHandl let retry_frequency = Duration::from_millis(100); let mut maybe_client = None; while maybe_client.is_none() { - if let Ok(client) = Client::with_name(socket_name.as_path()) { + if let Ok(client) = Client::with_name(SocketName::Path(&socket_name)) { maybe_client = Some(client); info!("connected to crash handler process after {elapsed:?}"); break; @@ -446,7 +446,7 @@ fn spawn_crash_handler_windows(exe: &Path, socket_name: &Path) { } pub fn crash_server(socket: &Path) { - let Ok(mut server) = minidumper::Server::with_name(socket) else { + let Ok(mut server) = Server::with_name(SocketName::Path(socket)) else { log::info!("Couldn't create socket, there may already be a running crash server"); return; }; diff --git a/crates/dap/Cargo.toml b/crates/dap/Cargo.toml index d856ae0164ff35236f7a133361cdf28908f8b044..a1b107eb42ac44e95b84f4b5bfd1f0871cfcfc93 100644 --- a/crates/dap/Cargo.toml +++ b/crates/dap/Cargo.toml @@ -58,7 +58,6 @@ async-pipe.workspace = true gpui = { workspace = true, features = ["test-support"] } settings = { workspace = true, features = ["test-support"] } task = { workspace = true, features = ["test-support"] } -tree-sitter.workspace = true -tree-sitter-go.workspace = true + util = { workspace = true, features = ["test-support"] } zlog.workspace = true diff --git a/crates/debugger_ui/src/persistence.rs b/crates/debugger_ui/src/persistence.rs index ab68fea1154182fe266bb150d762f8be0995d733..7b0fba39e70012cdeb19408d22ce21e3b6c9621f 100644 --- a/crates/debugger_ui/src/persistence.rs +++ b/crates/debugger_ui/src/persistence.rs @@ -265,49 +265,72 @@ pub(crate) fn deserialize_pane_layout( pane.entity_id(), cx.subscribe_in(&pane, window, RunningState::handle_pane_event), ); + let running_state = cx.weak_entity(); + let pane_handle = pane.downgrade(); let sub_views: Vec<_> = serialized_pane .children .iter() .map(|child| match child { - DebuggerPaneItem::Frames => { - Box::new(SubView::stack_frame_list(stack_frame_list.clone(), cx)) - } + DebuggerPaneItem::Frames => Box::new(SubView::stack_frame_list( + stack_frame_list.clone(), + running_state.clone(), + pane_handle.clone(), + cx, + )), DebuggerPaneItem::Variables => Box::new(SubView::new( variable_list.focus_handle(cx), variable_list.clone().into(), DebuggerPaneItem::Variables, + running_state.clone(), + pane_handle.clone(), + cx, + )), + DebuggerPaneItem::BreakpointList => Box::new(SubView::breakpoint_list( + breakpoint_list.clone(), + running_state.clone(), + pane_handle.clone(), cx, )), - DebuggerPaneItem::BreakpointList => { - Box::new(SubView::breakpoint_list(breakpoint_list.clone(), cx)) - } DebuggerPaneItem::Modules => Box::new(SubView::new( module_list.focus_handle(cx), module_list.clone().into(), DebuggerPaneItem::Modules, + running_state.clone(), + pane_handle.clone(), cx, )), DebuggerPaneItem::LoadedSources => Box::new(SubView::new( loaded_sources.focus_handle(cx), loaded_sources.clone().into(), DebuggerPaneItem::LoadedSources, + running_state.clone(), + pane_handle.clone(), cx, )), DebuggerPaneItem::Console => { - let view = SubView::console(console.clone(), cx); + let view = SubView::console( + console.clone(), + running_state.clone(), + pane_handle.clone(), + cx, + ); Box::new(view) } DebuggerPaneItem::Terminal => Box::new(SubView::new( terminal.focus_handle(cx), terminal.clone().into(), DebuggerPaneItem::Terminal, + running_state.clone(), + pane_handle.clone(), cx, )), DebuggerPaneItem::MemoryView => Box::new(SubView::new( memory_view.focus_handle(cx), memory_view.clone().into(), DebuggerPaneItem::MemoryView, + running_state.clone(), + pane_handle.clone(), cx, )), }) diff --git a/crates/debugger_ui/src/session/running.rs b/crates/debugger_ui/src/session/running.rs index 59e7226f596f1266fdeb3c5f3b60e1f97b81c850..1df442ef88fada109b6b7ad6e3bb5cf63f0ea453 100644 --- a/crates/debugger_ui/src/session/running.rs +++ b/crates/debugger_ui/src/session/running.rs @@ -7,7 +7,6 @@ pub mod stack_frame_list; pub mod variable_list; use std::{ any::Any, - ops::ControlFlow, path::PathBuf, sync::{Arc, LazyLock}, time::Duration, @@ -72,6 +71,7 @@ pub struct RunningState { focus_handle: FocusHandle, _remote_id: Option, workspace: WeakEntity, + project: WeakEntity, session_id: SessionId, variable_list: Entity, _subscriptions: Vec, @@ -144,6 +144,8 @@ pub(crate) struct SubView { inner: AnyView, item_focus_handle: FocusHandle, kind: DebuggerPaneItem, + running_state: WeakEntity, + host_pane: WeakEntity, show_indicator: Box bool>, actions: Option AnyElement>>, hovered: bool, @@ -154,12 +156,16 @@ impl SubView { item_focus_handle: FocusHandle, view: AnyView, kind: DebuggerPaneItem, + running_state: WeakEntity, + host_pane: WeakEntity, cx: &mut App, ) -> Entity { cx.new(|_| Self { kind, inner: view, item_focus_handle, + running_state, + host_pane, show_indicator: Box::new(|_| false), actions: None, hovered: false, @@ -168,6 +174,8 @@ impl SubView { pub(crate) fn stack_frame_list( stack_frame_list: Entity, + running_state: WeakEntity, + host_pane: WeakEntity, cx: &mut App, ) -> Entity { let weak_list = stack_frame_list.downgrade(); @@ -175,6 +183,8 @@ impl SubView { stack_frame_list.focus_handle(cx), stack_frame_list.into(), DebuggerPaneItem::Frames, + running_state, + host_pane, cx, ); @@ -189,12 +199,19 @@ impl SubView { this } - pub(crate) fn console(console: Entity, cx: &mut App) -> Entity { + pub(crate) fn console( + console: Entity, + running_state: WeakEntity, + host_pane: WeakEntity, + cx: &mut App, + ) -> Entity { let weak_console = console.downgrade(); let this = Self::new( console.focus_handle(cx), console.into(), DebuggerPaneItem::Console, + running_state, + host_pane, cx, ); this.update(cx, |this, _| { @@ -207,13 +224,20 @@ impl SubView { this } - pub(crate) fn breakpoint_list(list: Entity, cx: &mut App) -> Entity { + pub(crate) fn breakpoint_list( + list: Entity, + running_state: WeakEntity, + host_pane: WeakEntity, + cx: &mut App, + ) -> Entity { let weak_list = list.downgrade(); let focus_handle = list.focus_handle(cx); let this = Self::new( focus_handle, list.into(), DebuggerPaneItem::BreakpointList, + running_state, + host_pane, cx, ); @@ -239,6 +263,10 @@ impl SubView { ) { self.actions = Some(actions); } + + fn set_host_pane(&mut self, host_pane: WeakEntity) { + self.host_pane = host_pane; + } } impl Focusable for SubView { fn focus_handle(&self, _: &App) -> FocusHandle { @@ -281,6 +309,75 @@ impl Item for SubView { label.into_any_element() } + + fn handle_drop( + &self, + active_pane: &Pane, + dropped: &dyn Any, + window: &mut Window, + cx: &mut App, + ) -> bool { + let Some(tab) = dropped.downcast_ref::() else { + return true; + }; + let Some(this_pane) = self.host_pane.upgrade() else { + return true; + }; + let item = if tab.pane == this_pane { + active_pane.item_for_index(tab.ix) + } else { + tab.pane.read(cx).item_for_index(tab.ix) + }; + let Some(item) = item.filter(|item| item.downcast::().is_some()) else { + return true; + }; + let Some(split_direction) = active_pane.drag_split_direction() else { + return false; + }; + + let source = tab.pane.clone(); + let item_id_to_move = item.item_id(); + let weak_running = self.running_state.clone(); + + // Source pane may be the one currently updated, so defer the move. + window.defer(cx, move |window, cx| { + let new_pane = weak_running.update(cx, |running, cx| { + let Some(project) = running.project.upgrade() else { + return Err(anyhow!("Debugger project has been dropped")); + }; + + let new_pane = new_debugger_pane(running.workspace.clone(), project, window, cx); + let _previous_subscription = running.pane_close_subscriptions.insert( + new_pane.entity_id(), + cx.subscribe_in(&new_pane, window, RunningState::handle_pane_event), + ); + debug_assert!(_previous_subscription.is_none()); + running + .panes + .split(&this_pane, &new_pane, split_direction, cx); + anyhow::Ok(new_pane) + }); + + match new_pane.and_then(|result| result) { + Ok(new_pane) => { + move_item( + &source, + &new_pane, + item_id_to_move, + new_pane.read(cx).active_item_index(), + true, + window, + cx, + ); + } + Err(err) => { + log::error!("{err:?}"); + } + } + }); + + true + } } impl Render for SubView { @@ -311,83 +408,18 @@ pub(crate) fn new_debugger_pane( cx: &mut Context, ) -> Entity { let weak_running = cx.weak_entity(); - let custom_drop_handle = { - let workspace = workspace.clone(); - let project = project.downgrade(); - let weak_running = weak_running.clone(); - move |pane: &mut Pane, any: &dyn Any, window: &mut Window, cx: &mut Context| { - let Some(tab) = any.downcast_ref::() else { - return ControlFlow::Break(()); - }; - let Some(project) = project.upgrade() else { - return ControlFlow::Break(()); - }; - let this_pane = cx.entity(); - let item = if tab.pane == this_pane { - pane.item_for_index(tab.ix) - } else { - tab.pane.read(cx).item_for_index(tab.ix) - }; - let Some(item) = item.filter(|item| item.downcast::().is_some()) else { - return ControlFlow::Break(()); - }; - - let source = tab.pane.clone(); - let item_id_to_move = item.item_id(); - - let Some(split_direction) = pane.drag_split_direction() else { - // If we drop into existing pane or current pane, - // regular pane drop handler will take care of it, - // using the right tab index for the operation. - return ControlFlow::Continue(()); - }; - - let workspace = workspace.clone(); - let weak_running = weak_running.clone(); - // Source pane may be the one currently updated, so defer the move. - window.defer(cx, move |window, cx| { - let new_pane = weak_running.update(cx, |running, cx| { - let new_pane = - new_debugger_pane(workspace.clone(), project.clone(), window, cx); - let _previous_subscription = running.pane_close_subscriptions.insert( - new_pane.entity_id(), - cx.subscribe_in(&new_pane, window, RunningState::handle_pane_event), - ); - debug_assert!(_previous_subscription.is_none()); - running - .panes - .split(&this_pane, &new_pane, split_direction, cx); - new_pane - }); - - match new_pane { - Ok(new_pane) => { - move_item( - &source, - &new_pane, - item_id_to_move, - new_pane.read(cx).active_item_index(), - true, - window, - cx, - ); - } - Err(err) => { - log::error!("{err:?}"); - } - }; - }); - - ControlFlow::Break(()) - } - }; cx.new(move |cx| { + let can_drop_predicate: Arc bool> = + Arc::new(|any, _window, _cx| { + any.downcast_ref::() + .is_some_and(|dragged_tab| dragged_tab.item.downcast::().is_some()) + }); let mut pane = Pane::new( workspace.clone(), project.clone(), Default::default(), - None, + Some(can_drop_predicate), NoAction.boxed_clone(), true, window, @@ -426,7 +458,6 @@ pub(crate) fn new_debugger_pane( }))); pane.set_can_toggle_zoom(false, cx); pane.display_nav_history_buttons(None); - pane.set_custom_drop_handle(cx, custom_drop_handle); pane.set_should_display_tab_bar(|_, _| true); pane.set_render_tab_bar_buttons(cx, |_, _, _| (None, None)); pane.set_render_tab_bar(cx, { @@ -466,8 +497,17 @@ pub(crate) fn new_debugger_pane( }) .on_drop(cx.listener( move |this, dragged_tab: &DraggedTab, window, cx| { + if dragged_tab.item.downcast::().is_none() { + return; + } this.drag_split_direction = None; - this.handle_tab_drop(dragged_tab, this.items_len(), window, cx) + this.handle_tab_drop( + dragged_tab, + this.items_len(), + false, + window, + cx, + ) }, )) .children(pane.items().enumerate().map(|(ix, item)| { @@ -516,8 +556,11 @@ pub(crate) fn new_debugger_pane( )) .on_drop(cx.listener( move |this, dragged_tab: &DraggedTab, window, cx| { + if dragged_tab.item.downcast::().is_none() { + return; + } this.drag_split_direction = None; - this.handle_tab_drop(dragged_tab, ix, window, cx) + this.handle_tab_drop(dragged_tab, ix, false, window, cx) }, )) .on_drag( @@ -729,6 +772,7 @@ impl RunningState { ) -> Self { let focus_handle = cx.focus_handle(); let session_id = session.read(cx).session_id(); + let weak_project = project.downgrade(); let weak_state = cx.weak_entity(); let stack_frame_list = cx.new(|cx| { StackFrameList::new( @@ -904,6 +948,7 @@ impl RunningState { memory_view, session, workspace, + project: weak_project, focus_handle, variable_list, _subscriptions, @@ -1304,48 +1349,71 @@ impl RunningState { fn create_sub_view( &self, item_kind: DebuggerPaneItem, - _pane: &Entity, + pane: &Entity, cx: &mut Context, ) -> Box { + let running_state = cx.weak_entity(); + let host_pane = pane.downgrade(); + match item_kind { - DebuggerPaneItem::Console => Box::new(SubView::console(self.console.clone(), cx)), + DebuggerPaneItem::Console => Box::new(SubView::console( + self.console.clone(), + running_state, + host_pane, + cx, + )), DebuggerPaneItem::Variables => Box::new(SubView::new( self.variable_list.focus_handle(cx), self.variable_list.clone().into(), item_kind, + running_state, + host_pane, + cx, + )), + DebuggerPaneItem::BreakpointList => Box::new(SubView::breakpoint_list( + self.breakpoint_list.clone(), + running_state, + host_pane, cx, )), - DebuggerPaneItem::BreakpointList => { - Box::new(SubView::breakpoint_list(self.breakpoint_list.clone(), cx)) - } DebuggerPaneItem::Frames => Box::new(SubView::new( self.stack_frame_list.focus_handle(cx), self.stack_frame_list.clone().into(), item_kind, + running_state, + host_pane, cx, )), DebuggerPaneItem::Modules => Box::new(SubView::new( self.module_list.focus_handle(cx), self.module_list.clone().into(), item_kind, + running_state, + host_pane, cx, )), DebuggerPaneItem::LoadedSources => Box::new(SubView::new( self.loaded_sources_list.focus_handle(cx), self.loaded_sources_list.clone().into(), item_kind, + running_state, + host_pane, cx, )), DebuggerPaneItem::Terminal => Box::new(SubView::new( self.debug_terminal.focus_handle(cx), self.debug_terminal.clone().into(), item_kind, + running_state, + host_pane, cx, )), DebuggerPaneItem::MemoryView => Box::new(SubView::new( self.memory_view.focus_handle(cx), self.memory_view.clone().into(), item_kind, + running_state, + host_pane, cx, )), } @@ -1454,6 +1522,13 @@ impl RunningState { ) { this.serialize_layout(window, cx); match event { + Event::AddItem { item } => { + if let Some(sub_view) = item.downcast::() { + sub_view.update(cx, |sub_view, _| { + sub_view.set_host_pane(source_pane.downgrade()); + }); + } + } Event::Remove { .. } => { let _did_find_pane = this.panes.remove(source_pane, cx).is_ok(); debug_assert!(_did_find_pane); @@ -1795,23 +1870,28 @@ impl RunningState { window: &mut Window, cx: &mut Context<'_, RunningState>, ) -> Member { + let running_state = cx.weak_entity(); + let leftmost_pane = new_debugger_pane(workspace.clone(), project.clone(), window, cx); + let leftmost_pane_handle = leftmost_pane.downgrade(); + let leftmost_frames = SubView::new( + stack_frame_list.focus_handle(cx), + stack_frame_list.clone().into(), + DebuggerPaneItem::Frames, + running_state.clone(), + leftmost_pane_handle.clone(), + cx, + ); + let leftmost_breakpoints = SubView::breakpoint_list( + breakpoints.clone(), + running_state.clone(), + leftmost_pane_handle, + cx, + ); leftmost_pane.update(cx, |this, cx| { + this.add_item(Box::new(leftmost_frames), true, false, None, window, cx); this.add_item( - Box::new(SubView::new( - this.focus_handle(cx), - stack_frame_list.clone().into(), - DebuggerPaneItem::Frames, - cx, - )), - true, - false, - None, - window, - cx, - ); - this.add_item( - Box::new(SubView::breakpoint_list(breakpoints.clone(), cx)), + Box::new(leftmost_breakpoints), true, false, None, @@ -1820,44 +1900,42 @@ impl RunningState { ); this.activate_item(0, false, false, window, cx); }); + let center_pane = new_debugger_pane(workspace.clone(), project.clone(), window, cx); + let center_pane_handle = center_pane.downgrade(); + let center_console = SubView::console( + console.clone(), + running_state.clone(), + center_pane_handle.clone(), + cx, + ); + let center_variables = SubView::new( + variable_list.focus_handle(cx), + variable_list.clone().into(), + DebuggerPaneItem::Variables, + running_state.clone(), + center_pane_handle, + cx, + ); center_pane.update(cx, |this, cx| { - let view = SubView::console(console.clone(), cx); + this.add_item(Box::new(center_console), true, false, None, window, cx); - this.add_item(Box::new(view), true, false, None, window, cx); - - this.add_item( - Box::new(SubView::new( - variable_list.focus_handle(cx), - variable_list.clone().into(), - DebuggerPaneItem::Variables, - cx, - )), - true, - false, - None, - window, - cx, - ); + this.add_item(Box::new(center_variables), true, false, None, window, cx); this.activate_item(0, false, false, window, cx); }); let rightmost_pane = new_debugger_pane(workspace.clone(), project, window, cx); + let rightmost_terminal = SubView::new( + debug_terminal.focus_handle(cx), + debug_terminal.clone().into(), + DebuggerPaneItem::Terminal, + running_state, + rightmost_pane.downgrade(), + cx, + ); rightmost_pane.update(cx, |this, cx| { - this.add_item( - Box::new(SubView::new( - debug_terminal.focus_handle(cx), - debug_terminal.clone().into(), - DebuggerPaneItem::Terminal, - cx, - )), - false, - false, - None, - window, - cx, - ); + this.add_item(Box::new(rightmost_terminal), false, false, None, window, cx); }); subscriptions.extend( diff --git a/crates/dev_container/Cargo.toml b/crates/dev_container/Cargo.toml index 7b1574da69729a8ff5ddeb5523a8c249779a721b..e3a67601c3837bd9579a477576e9c837f73c1e75 100644 --- a/crates/dev_container/Cargo.toml +++ b/crates/dev_container/Cargo.toml @@ -29,7 +29,7 @@ gpui = { workspace = true, features = ["test-support"] } project = { workspace = true, features = ["test-support"] } serde_json.workspace = true settings = { workspace = true, features = ["test-support"] } -theme.workspace = true + workspace = { workspace = true, features = ["test-support"] } worktree = { workspace = true, features = ["test-support"] } diff --git a/crates/diagnostics/Cargo.toml b/crates/diagnostics/Cargo.toml index a5328a1a6dd2e492dc4fb38a963b68a84d98cc03..09ee023d57fbb9b9f2c7d828f9b2ea25f73d23d9 100644 --- a/crates/diagnostics/Cargo.toml +++ b/crates/diagnostics/Cargo.toml @@ -38,7 +38,7 @@ workspace.workspace = true zed_actions.workspace = true [dev-dependencies] -client = { workspace = true, features = ["test-support"] } + editor = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } language = { workspace = true, features = ["test-support"] } diff --git a/crates/diagnostics/src/diagnostic_renderer.rs b/crates/diagnostics/src/diagnostic_renderer.rs index 920bf4bc880c347c640d3dbf7106f3545bba3444..89cebf8fb237a032866e14c36d3097e18388e6ab 100644 --- a/crates/diagnostics/src/diagnostic_renderer.rs +++ b/crates/diagnostics/src/diagnostic_renderer.rs @@ -297,7 +297,7 @@ impl DiagnosticBlock { return; }; - for (excerpt_id, range) in multibuffer.excerpts_for_buffer(buffer_id, cx) { + for (excerpt_id, _, range) in multibuffer.excerpts_for_buffer(buffer_id, cx) { if range.context.overlaps(&diagnostic.range, &snapshot) { Self::jump_to( editor, diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 57ce6f03d2b56c9441bee763a28dcc7010f8311e..b200d01669a90c1e439338b9b01118cce8b8bb0c 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -583,7 +583,7 @@ impl ProjectDiagnosticsEditor { RetainExcerpts::All | RetainExcerpts::Dirty => multi_buffer .excerpts_for_buffer(buffer_id, cx) .into_iter() - .map(|(_, range)| range) + .map(|(_, _, range)| range) .sorted_by(|a, b| cmp_excerpts(&buffer_snapshot, a, b)) .collect(), } diff --git a/crates/edit_prediction/Cargo.toml b/crates/edit_prediction/Cargo.toml index 9f867584b57c8aed86f7003cca3a2b034c184476..d2a23b8b4ec3425072ffbe9d042ff89d26a56778 100644 --- a/crates/edit_prediction/Cargo.toml +++ b/crates/edit_prediction/Cargo.toml @@ -82,5 +82,5 @@ parking_lot.workspace = true project = { workspace = true, features = ["test-support"] } settings = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } -tree-sitter-rust.workspace = true + zlog.workspace = true diff --git a/crates/edit_prediction/src/capture_example.rs b/crates/edit_prediction/src/capture_example.rs index 0fbece7478068d26c0c1a8accf7e93aba8c83b9c..d21df7868162d279cb18aeea3ef04d4ea9d7be7f 100644 --- a/crates/edit_prediction/src/capture_example.rs +++ b/crates/edit_prediction/src/capture_example.rs @@ -1,12 +1,9 @@ -use crate::{ - StoredEvent, cursor_excerpt::editable_and_context_ranges_for_cursor_position, - example_spec::ExampleSpec, -}; +use crate::{StoredEvent, example_spec::ExampleSpec}; use anyhow::Result; use buffer_diff::BufferDiffSnapshot; use collections::HashMap; use gpui::{App, Entity, Task}; -use language::{Buffer, ToPoint as _}; +use language::Buffer; use project::{Project, WorktreeId}; use std::{collections::hash_map, fmt::Write as _, ops::Range, path::Path, sync::Arc}; use text::{BufferSnapshot as TextBufferSnapshot, Point}; @@ -157,17 +154,34 @@ fn compute_cursor_excerpt( cursor_anchor: language::Anchor, ) -> (String, usize, Range) { use text::ToOffset as _; + use text::ToPoint as _; - let cursor_point = cursor_anchor.to_point(snapshot); - let (_editable_range, context_range) = - editable_and_context_ranges_for_cursor_position(cursor_point, snapshot, 100, 50); - let context_start_offset = context_range.start.to_offset(snapshot); let cursor_offset = cursor_anchor.to_offset(snapshot); - let cursor_offset_in_excerpt = cursor_offset.saturating_sub(context_start_offset); - let excerpt = snapshot - .text_for_range(context_range.clone()) - .collect::(); - (excerpt, cursor_offset_in_excerpt, context_range) + let (excerpt_point_range, excerpt_offset_range, cursor_offset_in_excerpt) = + crate::cursor_excerpt::compute_cursor_excerpt(snapshot, cursor_offset); + let syntax_ranges = crate::cursor_excerpt::compute_syntax_ranges( + snapshot, + cursor_offset, + &excerpt_offset_range, + ); + let excerpt_text: String = snapshot.text_for_range(excerpt_point_range).collect(); + let (_, context_range) = zeta_prompt::compute_editable_and_context_ranges( + &excerpt_text, + cursor_offset_in_excerpt, + &syntax_ranges, + 100, + 50, + ); + let context_text = excerpt_text[context_range.clone()].to_string(); + let cursor_in_context = cursor_offset_in_excerpt.saturating_sub(context_range.start); + let context_buffer_start = + (excerpt_offset_range.start + context_range.start).to_point(snapshot); + let context_buffer_end = (excerpt_offset_range.start + context_range.end).to_point(snapshot); + ( + context_text, + cursor_in_context, + context_buffer_start..context_buffer_end, + ) } async fn collect_snapshots( @@ -533,8 +547,8 @@ mod tests { zlog::init_test(); let http_client = FakeHttpClient::with_404_response(); let client = Client::new(Arc::new(FakeSystemClock::new()), http_client, cx); - language_model::init(client.clone(), cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); + language_model::init(user_store.clone(), client.clone(), cx); EditPredictionStore::global(&client, &user_store, cx); }) } diff --git a/crates/edit_prediction/src/cursor_excerpt.rs b/crates/edit_prediction/src/cursor_excerpt.rs index 690e7001bd45ab3d9a995b4dfd43c2e8e297dbe9..2badcab07a90fd1c96634b4de1581758afc95deb 100644 --- a/crates/edit_prediction/src/cursor_excerpt.rs +++ b/crates/edit_prediction/src/cursor_excerpt.rs @@ -1,107 +1,140 @@ -use language::{BufferSnapshot, Point}; +use language::{BufferSnapshot, Point, ToPoint as _}; use std::ops::Range; use text::OffsetRangeExt as _; -use zeta_prompt::ExcerptRanges; -/// Computes all range variants for a cursor position: editable ranges at 150, 180, and 350 -/// token budgets, plus their corresponding context expansions. Returns the full excerpt range -/// (union of all context ranges) and the individual sub-ranges as Points. -pub fn compute_excerpt_ranges( - position: Point, +const CURSOR_EXCERPT_TOKEN_BUDGET: usize = 8192; + +/// Computes a cursor excerpt as the largest linewise symmetric region around +/// the cursor that fits within an 8192-token budget. Returns the point range, +/// byte offset range, and the cursor offset relative to the excerpt start. +pub fn compute_cursor_excerpt( snapshot: &BufferSnapshot, -) -> (Range, Range, ExcerptRanges) { - let editable_150 = compute_editable_range(snapshot, position, 150); - let editable_180 = compute_editable_range(snapshot, position, 180); - let editable_350 = compute_editable_range(snapshot, position, 350); - let editable_512 = compute_editable_range(snapshot, position, 512); - - let editable_150_context_350 = - expand_context_syntactically_then_linewise(snapshot, editable_150.clone(), 350); - let editable_180_context_350 = - expand_context_syntactically_then_linewise(snapshot, editable_180.clone(), 350); - let editable_350_context_150 = - expand_context_syntactically_then_linewise(snapshot, editable_350.clone(), 150); - let editable_350_context_512 = - expand_context_syntactically_then_linewise(snapshot, editable_350.clone(), 512); - let editable_350_context_1024 = - expand_context_syntactically_then_linewise(snapshot, editable_350.clone(), 1024); - let context_4096 = expand_context_syntactically_then_linewise( - snapshot, - editable_350_context_1024.clone(), - 4096 - 1024, - ); - let context_8192 = - expand_context_syntactically_then_linewise(snapshot, context_4096.clone(), 8192 - 4096); - - let full_start_row = context_8192.start.row; - let full_end_row = context_8192.end.row; - - let full_context = - Point::new(full_start_row, 0)..Point::new(full_end_row, snapshot.line_len(full_end_row)); - - let full_context_offset_range = full_context.to_offset(snapshot); - - let to_offset = |range: &Range| -> Range { - let start = range.start.to_offset(snapshot); - let end = range.end.to_offset(snapshot); - (start - full_context_offset_range.start)..(end - full_context_offset_range.start) - }; - - let ranges = ExcerptRanges { - editable_150: to_offset(&editable_150), - editable_180: to_offset(&editable_180), - editable_350: to_offset(&editable_350), - editable_512: Some(to_offset(&editable_512)), - editable_150_context_350: to_offset(&editable_150_context_350), - editable_180_context_350: to_offset(&editable_180_context_350), - editable_350_context_150: to_offset(&editable_350_context_150), - editable_350_context_512: Some(to_offset(&editable_350_context_512)), - editable_350_context_1024: Some(to_offset(&editable_350_context_1024)), - context_4096: Some(to_offset(&context_4096)), - context_8192: Some(to_offset(&context_8192)), - }; - - (full_context, full_context_offset_range, ranges) + cursor_offset: usize, +) -> (Range, Range, usize) { + let cursor_point = cursor_offset.to_point(snapshot); + let cursor_row = cursor_point.row; + let (start_row, end_row, _) = + expand_symmetric_from_cursor(snapshot, cursor_row, CURSOR_EXCERPT_TOKEN_BUDGET); + + let excerpt_range = Point::new(start_row, 0)..Point::new(end_row, snapshot.line_len(end_row)); + let excerpt_offset_range = excerpt_range.to_offset(snapshot); + let cursor_offset_in_excerpt = cursor_offset - excerpt_offset_range.start; + + ( + excerpt_range, + excerpt_offset_range, + cursor_offset_in_excerpt, + ) } -pub fn editable_and_context_ranges_for_cursor_position( - position: Point, +/// Expands symmetrically from cursor, one line at a time, alternating down then up. +/// Returns (start_row, end_row, remaining_tokens). +fn expand_symmetric_from_cursor( snapshot: &BufferSnapshot, - editable_region_token_limit: usize, - context_token_limit: usize, -) -> (Range, Range) { - let editable_range = compute_editable_range(snapshot, position, editable_region_token_limit); + cursor_row: u32, + mut token_budget: usize, +) -> (u32, u32, usize) { + let mut start_row = cursor_row; + let mut end_row = cursor_row; + + let cursor_line_tokens = line_token_count(snapshot, cursor_row); + token_budget = token_budget.saturating_sub(cursor_line_tokens); + + loop { + let can_expand_up = start_row > 0; + let can_expand_down = end_row < snapshot.max_point().row; + + if token_budget == 0 || (!can_expand_up && !can_expand_down) { + break; + } - let context_range = expand_context_syntactically_then_linewise( - snapshot, - editable_range.clone(), - context_token_limit, - ); + if can_expand_down { + let next_row = end_row + 1; + let line_tokens = line_token_count(snapshot, next_row); + if line_tokens <= token_budget { + end_row = next_row; + token_budget = token_budget.saturating_sub(line_tokens); + } else { + break; + } + } - (editable_range, context_range) + if can_expand_up && token_budget > 0 { + let next_row = start_row - 1; + let line_tokens = line_token_count(snapshot, next_row); + if line_tokens <= token_budget { + start_row = next_row; + token_budget = token_budget.saturating_sub(line_tokens); + } else { + break; + } + } + } + + (start_row, end_row, token_budget) +} + +/// Typical number of string bytes per token for the purposes of limiting model input. This is +/// intentionally low to err on the side of underestimating limits. +pub(crate) const BYTES_PER_TOKEN_GUESS: usize = 3; + +pub fn guess_token_count(bytes: usize) -> usize { + bytes / BYTES_PER_TOKEN_GUESS } -/// Computes the editable range using a three-phase approach: -/// 1. Expand symmetrically from cursor (75% of budget) -/// 2. Expand to syntax boundaries -/// 3. Continue line-wise in the least-expanded direction -fn compute_editable_range( +fn line_token_count(snapshot: &BufferSnapshot, row: u32) -> usize { + guess_token_count(snapshot.line_len(row) as usize).max(1) +} + +/// Computes the byte offset ranges of all syntax nodes containing the cursor, +/// ordered from innermost to outermost. The offsets are relative to +/// `excerpt_offset_range.start`. +pub fn compute_syntax_ranges( snapshot: &BufferSnapshot, - cursor: Point, - token_limit: usize, -) -> Range { - // Phase 1: Expand symmetrically from cursor using 75% of budget. - let initial_budget = (token_limit * 3) / 4; - let (mut start_row, mut end_row, mut remaining_tokens) = - expand_symmetric_from_cursor(snapshot, cursor.row, initial_budget); + cursor_offset: usize, + excerpt_offset_range: &Range, +) -> Vec> { + let cursor_point = cursor_offset.to_point(snapshot); + let range = cursor_point..cursor_point; + let mut current = snapshot.syntax_ancestor(range); + let mut ranges = Vec::new(); + let mut last_range: Option<(usize, usize)> = None; - // Add remaining budget from phase 1. - remaining_tokens += token_limit.saturating_sub(initial_budget); + while let Some(node) = current.take() { + let node_start = node.start_byte(); + let node_end = node.end_byte(); + let key = (node_start, node_end); - let original_start = start_row; - let original_end = end_row; + current = node.parent(); - // Phase 2: Expand to syntax boundaries that fit within budget. + if last_range == Some(key) { + continue; + } + last_range = Some(key); + + let start = node_start.saturating_sub(excerpt_offset_range.start); + let end = node_end + .min(excerpt_offset_range.end) + .saturating_sub(excerpt_offset_range.start); + ranges.push(start..end); + } + + ranges +} + +/// Expands context by first trying to reach syntax boundaries, +/// then expanding line-wise only if no syntax expansion occurred. +pub fn expand_context_syntactically_then_linewise( + snapshot: &BufferSnapshot, + editable_range: Range, + context_token_limit: usize, +) -> Range { + let mut start_row = editable_range.start.row; + let mut end_row = editable_range.end.row; + let mut remaining_tokens = context_token_limit; + let mut did_syntax_expand = false; + + // Phase 1: Try to expand to containing syntax boundaries, picking the largest that fits. for (boundary_start, boundary_end) in containing_syntax_boundaries(snapshot, start_row, end_row) { let tokens_for_start = if boundary_start < start_row { @@ -125,76 +158,57 @@ fn compute_editable_range( end_row = boundary_end; } remaining_tokens = remaining_tokens.saturating_sub(total_needed); + did_syntax_expand = true; } else { break; } } - // Phase 3: Continue line-wise in the direction we expanded least during syntax phase. - let expanded_up = original_start.saturating_sub(start_row); - let expanded_down = end_row.saturating_sub(original_end); - - (start_row, end_row, _) = expand_linewise_biased( - snapshot, - start_row, - end_row, - remaining_tokens, - expanded_up <= expanded_down, // prefer_up if we expanded less upward - ); + // Phase 2: Only expand line-wise if no syntax expansion occurred. + if !did_syntax_expand { + (start_row, end_row, _) = + expand_linewise_biased(snapshot, start_row, end_row, remaining_tokens, true); + } let start = Point::new(start_row, 0); let end = Point::new(end_row, snapshot.line_len(end_row)); start..end } -/// Expands symmetrically from cursor, one line at a time, alternating down then up. -/// Returns (start_row, end_row, remaining_tokens). -fn expand_symmetric_from_cursor( +/// Returns an iterator of (start_row, end_row) for successively larger syntax nodes +/// containing the given row range. Smallest containing node first. +fn containing_syntax_boundaries( snapshot: &BufferSnapshot, - cursor_row: u32, - mut token_budget: usize, -) -> (u32, u32, usize) { - let mut start_row = cursor_row; - let mut end_row = cursor_row; - - // Account for the cursor's line. - let cursor_line_tokens = line_token_count(snapshot, cursor_row); - token_budget = token_budget.saturating_sub(cursor_line_tokens); + start_row: u32, + end_row: u32, +) -> impl Iterator { + let range = Point::new(start_row, 0)..Point::new(end_row, snapshot.line_len(end_row)); + let mut current = snapshot.syntax_ancestor(range); + let mut last_rows: Option<(u32, u32)> = None; - loop { - let can_expand_up = start_row > 0; - let can_expand_down = end_row < snapshot.max_point().row; + std::iter::from_fn(move || { + while let Some(node) = current.take() { + let node_start_row = node.start_position().row as u32; + let node_end_row = node.end_position().row as u32; + let rows = (node_start_row, node_end_row); - if token_budget == 0 || (!can_expand_up && !can_expand_down) { - break; - } + current = node.parent(); - // Expand down first (slight forward bias for edit prediction). - if can_expand_down { - let next_row = end_row + 1; - let line_tokens = line_token_count(snapshot, next_row); - if line_tokens <= token_budget { - end_row = next_row; - token_budget = token_budget.saturating_sub(line_tokens); - } else { - break; + // Skip nodes that don't extend beyond our range. + if node_start_row >= start_row && node_end_row <= end_row { + continue; } - } - // Then expand up. - if can_expand_up && token_budget > 0 { - let next_row = start_row - 1; - let line_tokens = line_token_count(snapshot, next_row); - if line_tokens <= token_budget { - start_row = next_row; - token_budget = token_budget.saturating_sub(line_tokens); - } else { - break; + // Skip if same as last returned (some nodes have same span). + if last_rows == Some(rows) { + continue; } - } - } - (start_row, end_row, token_budget) + last_rows = Some(rows); + return Some(rows); + } + None + }) } /// Expands line-wise with a bias toward one direction. @@ -265,18 +279,6 @@ fn expand_linewise_biased( (start_row, end_row, remaining_tokens) } -/// Typical number of string bytes per token for the purposes of limiting model input. This is -/// intentionally low to err on the side of underestimating limits. -pub(crate) const BYTES_PER_TOKEN_GUESS: usize = 3; - -pub fn guess_token_count(bytes: usize) -> usize { - bytes / BYTES_PER_TOKEN_GUESS -} - -fn line_token_count(snapshot: &BufferSnapshot, row: u32) -> usize { - guess_token_count(snapshot.line_len(row) as usize).max(1) -} - /// Estimates token count for rows in range [start_row, end_row). fn estimate_tokens_for_rows(snapshot: &BufferSnapshot, start_row: u32, end_row: u32) -> usize { let mut tokens = 0; @@ -286,104 +288,14 @@ fn estimate_tokens_for_rows(snapshot: &BufferSnapshot, start_row: u32, end_row: tokens } -/// Returns an iterator of (start_row, end_row) for successively larger syntax nodes -/// containing the given row range. Smallest containing node first. -fn containing_syntax_boundaries( - snapshot: &BufferSnapshot, - start_row: u32, - end_row: u32, -) -> impl Iterator { - let range = Point::new(start_row, 0)..Point::new(end_row, snapshot.line_len(end_row)); - let mut current = snapshot.syntax_ancestor(range); - let mut last_rows: Option<(u32, u32)> = None; - - std::iter::from_fn(move || { - while let Some(node) = current.take() { - let node_start_row = node.start_position().row as u32; - let node_end_row = node.end_position().row as u32; - let rows = (node_start_row, node_end_row); - - current = node.parent(); - - // Skip nodes that don't extend beyond our range. - if node_start_row >= start_row && node_end_row <= end_row { - continue; - } - - // Skip if same as last returned (some nodes have same span). - if last_rows == Some(rows) { - continue; - } - - last_rows = Some(rows); - return Some(rows); - } - None - }) -} - -/// Expands context by first trying to reach syntax boundaries, -/// then expanding line-wise only if no syntax expansion occurred. -fn expand_context_syntactically_then_linewise( - snapshot: &BufferSnapshot, - editable_range: Range, - context_token_limit: usize, -) -> Range { - let mut start_row = editable_range.start.row; - let mut end_row = editable_range.end.row; - let mut remaining_tokens = context_token_limit; - let mut did_syntax_expand = false; - - // Phase 1: Try to expand to containing syntax boundaries, picking the largest that fits. - for (boundary_start, boundary_end) in containing_syntax_boundaries(snapshot, start_row, end_row) - { - let tokens_for_start = if boundary_start < start_row { - estimate_tokens_for_rows(snapshot, boundary_start, start_row) - } else { - 0 - }; - let tokens_for_end = if boundary_end > end_row { - estimate_tokens_for_rows(snapshot, end_row + 1, boundary_end + 1) - } else { - 0 - }; - - let total_needed = tokens_for_start + tokens_for_end; - - if total_needed <= remaining_tokens { - if boundary_start < start_row { - start_row = boundary_start; - } - if boundary_end > end_row { - end_row = boundary_end; - } - remaining_tokens = remaining_tokens.saturating_sub(total_needed); - did_syntax_expand = true; - } else { - break; - } - } - - // Phase 2: Only expand line-wise if no syntax expansion occurred. - if !did_syntax_expand { - (start_row, end_row, _) = - expand_linewise_biased(snapshot, start_row, end_row, remaining_tokens, true); - } - - let start = Point::new(start_row, 0); - let end = Point::new(end_row, snapshot.line_len(end_row)); - start..end -} - -use language::ToOffset as _; - #[cfg(test)] mod tests { use super::*; - use gpui::{App, AppContext}; + use gpui::{App, AppContext as _}; use indoc::indoc; use language::{Buffer, rust_lang}; use util::test::{TextRangeMarker, marked_text_ranges_by}; + use zeta_prompt::compute_editable_and_context_ranges; struct TestCase { name: &'static str, @@ -400,7 +312,18 @@ mod tests { // [ ] = expected context range let test_cases = vec![ TestCase { - name: "cursor near end of function - expands to syntax boundaries", + name: "small function fits entirely in editable and context", + marked_text: indoc! {r#" + [«fn foo() { + let x = 1;ˇ + let y = 2; + }»] + "#}, + editable_token_limit: 30, + context_token_limit: 60, + }, + TestCase { + name: "cursor near end of function - editable expands to syntax boundaries", marked_text: indoc! {r#" [fn first() { let a = 1; @@ -413,12 +336,11 @@ mod tests { println!("{}", x + y);ˇ }»] "#}, - // 18 tokens - expands symmetrically then to syntax boundaries editable_token_limit: 18, context_token_limit: 35, }, TestCase { - name: "cursor at function start - expands to syntax boundaries", + name: "cursor at function start - editable expands to syntax boundaries", marked_text: indoc! {r#" [fn before() { « let a = 1; @@ -434,12 +356,11 @@ mod tests { let b = 2; }] "#}, - // 25 tokens - expands symmetrically then to syntax boundaries editable_token_limit: 25, context_token_limit: 50, }, TestCase { - name: "tiny budget - just lines around cursor", + name: "tiny budget - just lines around cursor, no syntax expansion", marked_text: indoc! {r#" fn outer() { [ let line1 = 1; @@ -451,22 +372,9 @@ mod tests { let line7 = 7; } "#}, - // 12 tokens (~36 bytes) = just the cursor line with tiny budget editable_token_limit: 12, context_token_limit: 24, }, - TestCase { - name: "small function fits entirely", - marked_text: indoc! {r#" - [«fn foo() { - let x = 1;ˇ - let y = 2; - }»] - "#}, - // Plenty of budget for this small function - editable_token_limit: 30, - context_token_limit: 60, - }, TestCase { name: "context extends beyond editable", marked_text: indoc! {r#" @@ -476,13 +384,11 @@ mod tests { fn fourth() { let d = 4; }» fn fifth() { let e = 5; }] "#}, - // Small editable, larger context editable_token_limit: 25, context_token_limit: 45, }, - // Tests for syntax-aware editable and context expansion TestCase { - name: "cursor in first if-statement - expands to syntax boundaries", + name: "cursor in first if-block - editable expands to syntax boundaries", marked_text: indoc! {r#" [«fn before() { } @@ -503,13 +409,11 @@ mod tests { fn after() { }] "#}, - // 35 tokens allows expansion to include function header and first two if blocks editable_token_limit: 35, - // 60 tokens allows context to include the whole file context_token_limit: 60, }, TestCase { - name: "cursor in middle if-statement - expands to syntax boundaries", + name: "cursor in middle if-block - editable spans surrounding blocks", marked_text: indoc! {r#" [fn before() { } @@ -530,13 +434,11 @@ mod tests { fn after() { }] "#}, - // 40 tokens allows expansion to surrounding if blocks editable_token_limit: 40, - // 60 tokens allows context to include the whole file context_token_limit: 60, }, TestCase { - name: "cursor near bottom of long function - editable expands toward syntax, context reaches function", + name: "cursor near bottom of long function - context reaches function boundary", marked_text: indoc! {r#" [fn other() { } @@ -556,11 +458,30 @@ mod tests { fn another() { }»] "#}, - // 40 tokens for editable - allows several lines plus syntax expansion editable_token_limit: 40, - // 55 tokens - enough for function but not whole file context_token_limit: 55, }, + TestCase { + name: "zero context budget - context equals editable", + marked_text: indoc! {r#" + fn before() { + let p = 1; + let q = 2; + [«} + + fn foo() { + let x = 1;ˇ + let y = 2; + } + »] + fn after() { + let r = 3; + let s = 4; + } + "#}, + editable_token_limit: 15, + context_token_limit: 0, + }, ]; for test_case in test_cases { @@ -580,75 +501,63 @@ mod tests { let cursor_ranges = ranges.remove(&cursor_marker).unwrap_or_default(); let expected_editable = ranges.remove(&editable_marker).unwrap_or_default(); let expected_context = ranges.remove(&context_marker).unwrap_or_default(); - assert_eq!(expected_editable.len(), 1); - assert_eq!(expected_context.len(), 1); + assert_eq!(expected_editable.len(), 1, "{}", test_case.name); + assert_eq!(expected_context.len(), 1, "{}", test_case.name); - cx.new(|cx| { + cx.new(|cx: &mut gpui::Context| { let text = text.trim_end_matches('\n'); let buffer = Buffer::local(text, cx).with_language(rust_lang(), cx); let snapshot = buffer.snapshot(); let cursor_offset = cursor_ranges[0].start; - let cursor_point = snapshot.offset_to_point(cursor_offset); - let expected_editable_start = snapshot.offset_to_point(expected_editable[0].start); - let expected_editable_end = snapshot.offset_to_point(expected_editable[0].end); - let expected_context_start = snapshot.offset_to_point(expected_context[0].start); - let expected_context_end = snapshot.offset_to_point(expected_context[0].end); - - let (actual_editable, actual_context) = - editable_and_context_ranges_for_cursor_position( - cursor_point, - &snapshot, - test_case.editable_token_limit, - test_case.context_token_limit, - ); - - let range_text = |start: Point, end: Point| -> String { - snapshot.text_for_range(start..end).collect() + + let (_, excerpt_offset_range, cursor_offset_in_excerpt) = + compute_cursor_excerpt(&snapshot, cursor_offset); + let excerpt_text: String = snapshot + .text_for_range(excerpt_offset_range.clone()) + .collect(); + let syntax_ranges = + compute_syntax_ranges(&snapshot, cursor_offset, &excerpt_offset_range); + + let (actual_editable, actual_context) = compute_editable_and_context_ranges( + &excerpt_text, + cursor_offset_in_excerpt, + &syntax_ranges, + test_case.editable_token_limit, + test_case.context_token_limit, + ); + + let to_buffer_range = |range: Range| -> Range { + (excerpt_offset_range.start + range.start) + ..(excerpt_offset_range.start + range.end) }; - let editable_match = actual_editable.start == expected_editable_start - && actual_editable.end == expected_editable_end; - let context_match = actual_context.start == expected_context_start - && actual_context.end == expected_context_end; + let actual_editable = to_buffer_range(actual_editable); + let actual_context = to_buffer_range(actual_context); + + let expected_editable_range = expected_editable[0].clone(); + let expected_context_range = expected_context[0].clone(); + + let editable_match = actual_editable == expected_editable_range; + let context_match = actual_context == expected_context_range; if !editable_match || !context_match { + let range_text = |range: &Range| { + snapshot.text_for_range(range.clone()).collect::() + }; + println!("\n=== FAILED: {} ===", test_case.name); if !editable_match { - println!( - "\nExpected editable ({:?}..{:?}):", - expected_editable_start, expected_editable_end - ); - println!( - "---\n{}---", - range_text(expected_editable_start, expected_editable_end) - ); - println!( - "\nActual editable ({:?}..{:?}):", - actual_editable.start, actual_editable.end - ); - println!( - "---\n{}---", - range_text(actual_editable.start, actual_editable.end) - ); + println!("\nExpected editable ({:?}):", expected_editable_range); + println!("---\n{}---", range_text(&expected_editable_range)); + println!("\nActual editable ({:?}):", actual_editable); + println!("---\n{}---", range_text(&actual_editable)); } if !context_match { - println!( - "\nExpected context ({:?}..{:?}):", - expected_context_start, expected_context_end - ); - println!( - "---\n{}---", - range_text(expected_context_start, expected_context_end) - ); - println!( - "\nActual context ({:?}..{:?}):", - actual_context.start, actual_context.end - ); - println!( - "---\n{}---", - range_text(actual_context.start, actual_context.end) - ); + println!("\nExpected context ({:?}):", expected_context_range); + println!("---\n{}---", range_text(&expected_context_range)); + println!("\nActual context ({:?}):", actual_context); + println!("---\n{}---", range_text(&actual_context)); } panic!("Test '{}' failed - see output above", test_case.name); } diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index 5c7ce045121739f341b84dd87d827878550f4048..1f692eff2c062cf703e72117c6fd39c7a4e1efbb 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -53,7 +53,6 @@ use std::sync::Arc; use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH}; use thiserror::Error; use util::{RangeExt as _, ResultExt as _}; -use workspace::notifications::{ErrorMessagePrompt, NotificationId, show_app_notification}; pub mod cursor_excerpt; pub mod example_spec; @@ -76,6 +75,7 @@ pub mod zeta; #[cfg(test)] mod edit_prediction_tests; +use crate::example_spec::ExampleSpec; use crate::license_detection::LicenseDetectionWatcher; use crate::mercury::Mercury; use crate::onboarding_modal::ZedPredictModal; @@ -498,6 +498,7 @@ impl std::ops::Deref for BufferEditPrediction<'_> { struct PendingSettledPrediction { request_id: EditPredictionId, editable_anchor_range: Range, + example: Option, enqueued_at: Instant, last_edit_at: Instant, } @@ -793,6 +794,15 @@ impl EditPredictionStore { &self.available_experiments } + pub fn active_experiment(&self) -> Option<&str> { + self.preferred_experiment.as_deref().or_else(|| { + self.shown_predictions + .iter() + .find_map(|p| p.model_version.as_ref()) + .and_then(|model_version| model_version.strip_prefix("zeta2:")) + }) + } + pub fn refresh_available_experiments(&mut self, cx: &mut Context) { let client = self.client.clone(); let llm_token = self.llm_token.clone(); @@ -1572,6 +1582,7 @@ impl EditPredictionStore { EDIT_PREDICTION_SETTLED_EVENT, request_id = pending_prediction.request_id.0.clone(), settled_editable_region, + example = pending_prediction.example.take(), ); return false; @@ -1600,22 +1611,25 @@ impl EditPredictionStore { edited_buffer: &Entity, edited_buffer_snapshot: &BufferSnapshot, editable_offset_range: Range, + example: Option, cx: &mut Context, ) { - let project_state = self.get_or_init_project(project, cx); + let this = &mut *self; + let project_state = this.get_or_init_project(project, cx); if let Some(buffer) = project_state .registered_buffers .get_mut(&edited_buffer.entity_id()) { let now = cx.background_executor().now(); buffer.pending_predictions.push(PendingSettledPrediction { - request_id, + request_id: request_id, editable_anchor_range: edited_buffer_snapshot .anchor_range_around(editable_offset_range), + example, enqueued_at: now, last_edit_at: now, }); - self.settled_predictions_tx.unbounded_send(now).ok(); + this.settled_predictions_tx.unbounded_send(now).ok(); } } @@ -1801,6 +1815,9 @@ impl EditPredictionStore { // Prefer predictions from buffer if project_state.current_prediction.is_some() { + log::debug!( + "edit_prediction: diagnostic refresh skipped, current prediction already exists" + ); return; } @@ -1989,31 +2006,49 @@ impl EditPredictionStore { let project_state = self.get_or_init_project(&project, cx); let pending_prediction_id = project_state.next_pending_prediction_id; project_state.next_pending_prediction_id += 1; - let last_request = *select_throttle(project_state, request_trigger); + let throttle_at_enqueue = *select_throttle(project_state, request_trigger); let task = cx.spawn(async move |this, cx| { - if let Some(timeout) = last_request.and_then(|(last_entity, last_timestamp)| { - if throttle_entity != last_entity { - return None; - } - (last_timestamp + throttle_timeout).checked_duration_since(Instant::now()) - }) { + let throttle_wait = this + .update(cx, |this, cx| { + let project_state = this.get_or_init_project(&project, cx); + let throttle = *select_throttle(project_state, request_trigger); + + throttle.and_then(|(last_entity, last_timestamp)| { + if throttle_entity != last_entity { + return None; + } + (last_timestamp + throttle_timeout).checked_duration_since(Instant::now()) + }) + }) + .ok() + .flatten(); + + if let Some(timeout) = throttle_wait { cx.background_executor().timer(timeout).await; } // If this task was cancelled before the throttle timeout expired, - // do not perform a request. + // do not perform a request. Also skip if another task already + // proceeded since we were enqueued (duplicate). let mut is_cancelled = true; this.update(cx, |this, cx| { let project_state = this.get_or_init_project(&project, cx); let was_cancelled = project_state .cancelled_predictions .remove(&pending_prediction_id); - if !was_cancelled { - let new_refresh = (throttle_entity, Instant::now()); - *select_throttle(project_state, request_trigger) = Some(new_refresh); - is_cancelled = false; + if was_cancelled { + return; + } + + // Another request has been already sent since this was enqueued + if *select_throttle(project_state, request_trigger) != throttle_at_enqueue { + return; } + + let new_refresh = (throttle_entity, Instant::now()); + *select_throttle(project_state, request_trigger) = Some(new_refresh); + is_cancelled = false; }) .ok(); if is_cancelled { @@ -2205,14 +2240,16 @@ impl EditPredictionStore { && self.is_data_collection_enabled(cx) && matches!(self.edit_prediction_model, EditPredictionModel::Zeta); + let recent_paths = project_state.recent_paths.clone(); + let inputs = EditPredictionModelInput { project: project.clone(), - buffer: active_buffer.clone(), - snapshot: snapshot, + buffer: active_buffer, + snapshot, position, events, related_files, - recent_paths: project_state.recent_paths.clone(), + recent_paths, trigger, diagnostic_search_range: diagnostic_search_range, debug_tx, @@ -2221,21 +2258,12 @@ impl EditPredictionStore { is_open_source, }; - if can_collect_data && rand::random_ratio(1, 1000) { - if let Some(task) = capture_example( - project.clone(), - active_buffer, - position, - stored_events, - false, - cx, - ) { - task.detach(); - } - } + let capture_data = (can_collect_data && rand::random_ratio(1, 1000)).then(|| stored_events); let task = match self.edit_prediction_model { - EditPredictionModel::Zeta => zeta::request_prediction_with_zeta(self, inputs, cx), + EditPredictionModel::Zeta => { + zeta::request_prediction_with_zeta(self, inputs, capture_data, cx) + } EditPredictionModel::Fim { format } => fim::request_prediction(inputs, format, cx), EditPredictionModel::Sweep => self.sweep_ai.request_prediction_with_sweep(inputs, cx), EditPredictionModel::Mercury => self.mercury.request_prediction(inputs, cx), @@ -2244,7 +2272,13 @@ impl EditPredictionStore { cx.spawn(async move |this, cx| { let prediction = task.await?; - if prediction.is_none() && allow_jump && has_events { + // Only fall back to diagnostics-based prediction if we got a + // the model had nothing to suggest for the buffer + if prediction.is_none() + && allow_jump + && has_events + && !matches!(trigger, PredictEditsRequestTrigger::Diagnostics) + { this.update(cx, |this, cx| { this.refresh_prediction_from_diagnostics( project, @@ -2435,49 +2469,6 @@ impl EditPredictionStore { .await } - fn handle_api_response( - this: &WeakEntity, - response: Result<(T, Option)>, - cx: &mut gpui::AsyncApp, - ) -> Result { - match response { - Ok((data, usage)) => { - if let Some(usage) = usage { - this.update(cx, |this, cx| { - this.user_store.update(cx, |user_store, cx| { - user_store.update_edit_prediction_usage(usage, cx); - }); - }) - .ok(); - } - Ok(data) - } - Err(err) => { - if err.is::() { - cx.update(|cx| { - this.update(cx, |this, _cx| { - this.update_required = true; - }) - .ok(); - - let error_message: SharedString = err.to_string().into(); - show_app_notification( - NotificationId::unique::(), - cx, - move |cx| { - cx.new(|cx| { - ErrorMessagePrompt::new(error_message.clone(), cx) - .with_link_button("Update Zed", "https://zed.dev/releases") - }) - }, - ); - }); - } - Err(err) - } - } - } - async fn send_api_request( build: impl Fn(http_client::http::request::Builder) -> Result>, client: Arc, @@ -2765,23 +2756,6 @@ fn merge_trailing_events_if_needed( } } -pub(crate) fn filter_redundant_excerpts( - mut related_files: Vec, - cursor_path: &Path, - cursor_row_range: Range, -) -> Vec { - for file in &mut related_files { - if file.path.as_ref() == cursor_path { - file.excerpts.retain(|excerpt| { - excerpt.row_range.start < cursor_row_range.start - || excerpt.row_range.end > cursor_row_range.end - }); - } - } - related_files.retain(|file| !file.excerpts.is_empty()); - related_files -} - #[derive(Error, Debug)] #[error( "You must update to Zed version {minimum_version} or higher to continue using edit predictions." diff --git a/crates/edit_prediction/src/edit_prediction_tests.rs b/crates/edit_prediction/src/edit_prediction_tests.rs index b34ff6fce71fe7afcaff68121510f48f6f8f98c4..ad237e6f8fb31708dbabc6e8332ce0c164877004 100644 --- a/crates/edit_prediction/src/edit_prediction_tests.rs +++ b/crates/edit_prediction/src/edit_prediction_tests.rs @@ -17,7 +17,10 @@ use gpui::{ http_client::{FakeHttpClient, Response}, }; use indoc::indoc; -use language::{Anchor, Buffer, CursorShape, Operation, Point, Selection, SelectionGoal}; +use language::{ + Anchor, Buffer, CursorShape, Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSeverity, + Operation, Point, Selection, SelectionGoal, +}; use lsp::LanguageServerId; use parking_lot::Mutex; use pretty_assertions::{assert_eq, assert_matches}; @@ -25,7 +28,10 @@ use project::{FakeFs, Project}; use serde_json::json; use settings::SettingsStore; use std::{path::Path, sync::Arc, time::Duration}; -use util::path; +use util::{ + path, + test::{TextRangeMarker, marked_text_ranges_by}, +}; use uuid::Uuid; use zeta_prompt::ZetaPromptInput; @@ -1486,6 +1492,52 @@ async fn test_jump_and_edit_throttles_are_independent(cx: &mut TestAppContext) { cx.run_until_parked(); } +#[gpui::test] +async fn test_same_frame_duplicate_requests_deduplicated(cx: &mut TestAppContext) { + let (ep_store, mut requests) = init_test_with_fake_client(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "foo.md": "Hello!\nHow\nBye\n" + }), + ) + .await; + let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + let path = project.find_project_path(path!("root/foo.md"), cx).unwrap(); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let position = snapshot.anchor_before(language::Point::new(1, 3)); + + // Enqueue two refresh calls in the same synchronous frame (no yielding). + // Both `cx.spawn` tasks are created before either executes, so they both + // capture the same `proceed_count_at_enqueue`. Only the first task should + // pass the deduplication gate; the second should be skipped. + ep_store.update(cx, |ep_store, cx| { + ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx); + ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx); + }); + + // Let both spawned tasks run to completion (including any throttle waits). + cx.run_until_parked(); + + // Exactly one prediction request should have been sent. + let (request, respond_tx) = requests.predict.next().await.unwrap(); + respond_tx + .send(model_response(&request, SIMPLE_DIFF)) + .unwrap(); + cx.run_until_parked(); + + // No second request should be pending. + assert_no_predict_request_ready(&mut requests.predict); +} + #[gpui::test] async fn test_rejections_flushing(cx: &mut TestAppContext) { let (ep_store, mut requests) = init_test_with_fake_client(cx); @@ -1610,97 +1662,172 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) { assert_eq!(reject_request.rejections[1].request_id, "retry-2"); } -// Skipped until we start including diagnostics in prompt -// #[gpui::test] -// async fn test_request_diagnostics(cx: &mut TestAppContext) { -// let (ep_store, mut req_rx) = init_test_with_fake_client(cx); -// let fs = FakeFs::new(cx.executor()); -// fs.insert_tree( -// "/root", -// json!({ -// "foo.md": "Hello!\nBye" -// }), -// ) -// .await; -// let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; - -// let path_to_buffer_uri = lsp::Uri::from_file_path(path!("/root/foo.md")).unwrap(); -// let diagnostic = lsp::Diagnostic { -// range: lsp::Range::new(lsp::Position::new(1, 1), lsp::Position::new(1, 5)), -// severity: Some(lsp::DiagnosticSeverity::ERROR), -// message: "\"Hello\" deprecated. Use \"Hi\" instead".to_string(), -// ..Default::default() -// }; - -// project.update(cx, |project, cx| { -// project.lsp_store().update(cx, |lsp_store, cx| { -// // Create some diagnostics -// lsp_store -// .update_diagnostics( -// LanguageServerId(0), -// lsp::PublishDiagnosticsParams { -// uri: path_to_buffer_uri.clone(), -// diagnostics: vec![diagnostic], -// version: None, -// }, -// None, -// language::DiagnosticSourceKind::Pushed, -// &[], -// cx, -// ) -// .unwrap(); -// }); -// }); - -// let buffer = project -// .update(cx, |project, cx| { -// let path = project.find_project_path(path!("root/foo.md"), cx).unwrap(); -// project.open_buffer(path, cx) -// }) -// .await -// .unwrap(); - -// let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); -// let position = snapshot.anchor_before(language::Point::new(0, 0)); - -// let _prediction_task = ep_store.update(cx, |ep_store, cx| { -// ep_store.request_prediction(&project, &buffer, position, cx) -// }); - -// let (request, _respond_tx) = req_rx.next().await.unwrap(); - -// assert_eq!(request.diagnostic_groups.len(), 1); -// let value = serde_json::from_str::(request.diagnostic_groups[0].0.get()) -// .unwrap(); -// // We probably don't need all of this. TODO define a specific diagnostic type in predict_edits_v3 -// assert_eq!( -// value, -// json!({ -// "entries": [{ -// "range": { -// "start": 8, -// "end": 10 -// }, -// "diagnostic": { -// "source": null, -// "code": null, -// "code_description": null, -// "severity": 1, -// "message": "\"Hello\" deprecated. Use \"Hi\" instead", -// "markdown": null, -// "group_id": 0, -// "is_primary": true, -// "is_disk_based": false, -// "is_unnecessary": false, -// "source_kind": "Pushed", -// "data": null, -// "underline": true -// } -// }], -// "primary_ix": 0 -// }) -// ); -// } +#[gpui::test] +fn test_active_buffer_diagnostics_fetching(cx: &mut TestAppContext) { + let diagnostic_marker: TextRangeMarker = ('«', '»').into(); + let search_range_marker: TextRangeMarker = ('[', ']').into(); + + let (text, mut ranges) = marked_text_ranges_by( + indoc! {r#" + fn alpha() { + let «first_value» = 1; + } + + [fn beta() { + let «second_value» = 2; + let third_value = second_value + missing_symbol; + }ˇ] + + fn gamma() { + let «fourth_value» = missing_other_symbol; + } + "#}, + vec![diagnostic_marker.clone(), search_range_marker.clone()], + ); + + let diagnostic_ranges = ranges.remove(&diagnostic_marker).unwrap_or_default(); + let search_ranges = ranges.remove(&search_range_marker).unwrap_or_default(); + + let buffer = cx.new(|cx| Buffer::local(&text, cx)); + + buffer.update(cx, |buffer, cx| { + let snapshot = buffer.snapshot(); + let diagnostics = DiagnosticSet::new( + diagnostic_ranges + .iter() + .enumerate() + .map(|(index, range)| DiagnosticEntry { + range: snapshot.offset_to_point_utf16(range.start) + ..snapshot.offset_to_point_utf16(range.end), + diagnostic: Diagnostic { + severity: match index { + 0 => DiagnosticSeverity::WARNING, + 1 => DiagnosticSeverity::ERROR, + _ => DiagnosticSeverity::HINT, + }, + message: match index { + 0 => "first warning".to_string(), + 1 => "second error".to_string(), + _ => "third hint".to_string(), + }, + group_id: index + 1, + is_primary: true, + source_kind: language::DiagnosticSourceKind::Pushed, + ..Diagnostic::default() + }, + }), + &snapshot, + ); + buffer.update_diagnostics(LanguageServerId(0), diagnostics, cx); + }); + + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let search_range = snapshot.offset_to_point(search_ranges[0].start) + ..snapshot.offset_to_point(search_ranges[0].end); + + let active_buffer_diagnostics = zeta::active_buffer_diagnostics(&snapshot, search_range, 100); + + assert_eq!( + active_buffer_diagnostics, + vec![zeta_prompt::ActiveBufferDiagnostic { + severity: Some(1), + message: "second error".to_string(), + snippet: text, + snippet_buffer_row_range: 5..5, + diagnostic_range_in_snippet: 61..73, + }] + ); + + let buffer = cx.new(|cx| { + Buffer::local( + indoc! {" + one + two + three + four + five + "}, + cx, + ) + }); + + buffer.update(cx, |buffer, cx| { + let snapshot = buffer.snapshot(); + let diagnostics = DiagnosticSet::new( + vec![ + DiagnosticEntry { + range: text::PointUtf16::new(0, 0)..text::PointUtf16::new(0, 3), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "row zero".to_string(), + group_id: 1, + is_primary: true, + source_kind: language::DiagnosticSourceKind::Pushed, + ..Diagnostic::default() + }, + }, + DiagnosticEntry { + range: text::PointUtf16::new(2, 0)..text::PointUtf16::new(2, 5), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::WARNING, + message: "row two".to_string(), + group_id: 2, + is_primary: true, + source_kind: language::DiagnosticSourceKind::Pushed, + ..Diagnostic::default() + }, + }, + DiagnosticEntry { + range: text::PointUtf16::new(4, 0)..text::PointUtf16::new(4, 4), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::INFORMATION, + message: "row four".to_string(), + group_id: 3, + is_primary: true, + source_kind: language::DiagnosticSourceKind::Pushed, + ..Diagnostic::default() + }, + }, + ], + &snapshot, + ); + buffer.update_diagnostics(LanguageServerId(0), diagnostics, cx); + }); + + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + + let active_buffer_diagnostics = + zeta::active_buffer_diagnostics(&snapshot, Point::new(2, 0)..Point::new(4, 0), 100); + + assert_eq!( + active_buffer_diagnostics + .iter() + .map(|diagnostic| ( + diagnostic.severity, + diagnostic.message.clone(), + diagnostic.snippet.clone(), + diagnostic.snippet_buffer_row_range.clone(), + diagnostic.diagnostic_range_in_snippet.clone(), + )) + .collect::>(), + vec![ + ( + Some(2), + "row two".to_string(), + "one\ntwo\nthree\nfour\nfive\n".to_string(), + 2..2, + 8..13, + ), + ( + Some(3), + "row four".to_string(), + "one\ntwo\nthree\nfour\nfive\n".to_string(), + 4..4, + 19..23, + ), + ] + ); +} // Generate a model response that would apply the given diff to the active file. fn model_response(request: &PredictEditsV3Request, diff_to_apply: &str) -> PredictEditsV3Response { @@ -1804,9 +1931,8 @@ fn init_test_with_fake_client( let client = client::Client::new(Arc::new(FakeSystemClock::new()), http_client, cx); client.cloud_client().set_credentials(1, "test".into()); - language_model::init(client.clone(), cx); - let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); + language_model::init(user_store.clone(), client.clone(), cx); let ep_store = EditPredictionStore::global(&client, &user_store, cx); ( @@ -1840,11 +1966,13 @@ async fn test_edit_prediction_basic_interpolation(cx: &mut TestAppContext) { inputs: ZetaPromptInput { events: Default::default(), related_files: Default::default(), + active_buffer_diagnostics: vec![], cursor_path: Path::new("").into(), cursor_excerpt: "".into(), cursor_offset_in_excerpt: 0, excerpt_start_row: None, excerpt_ranges: Default::default(), + syntax_ranges: None, experiment: None, in_open_source_repo: false, can_collect_data: false, @@ -2172,8 +2300,9 @@ async fn make_test_ep_store( }); let client = cx.update(|cx| Client::new(Arc::new(FakeSystemClock::new()), http_client, cx)); + let user_store = cx.update(|cx| cx.new(|cx| client::UserStore::new(client.clone(), cx))); cx.update(|cx| { - RefreshLlmTokenListener::register(client.clone(), cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); }); let _server = FakeServer::for_client(42, &client, cx).await; @@ -2255,8 +2384,9 @@ async fn test_unauthenticated_without_custom_url_blocks_prediction_impl(cx: &mut let client = cx.update(|cx| client::Client::new(Arc::new(FakeSystemClock::new()), http_client, cx)); + let user_store = cx.update(|cx| cx.new(|cx| client::UserStore::new(client.clone(), cx))); cx.update(|cx| { - language_model::RefreshLlmTokenListener::register(client.clone(), cx); + language_model::RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); }); let ep_store = cx.new(|cx| EditPredictionStore::new(client, project.read(cx).user_store(), cx)); @@ -2604,8 +2734,8 @@ async fn test_edit_prediction_settled(cx: &mut TestAppContext) { .await .unwrap(); - let settled_events: Arc>> = - Arc::new(Mutex::new(Vec::new())); + type SettledEventRecord = (EditPredictionId, String); + let settled_events: Arc>> = Arc::new(Mutex::new(Vec::new())); ep_store.update(cx, |ep_store, cx| { ep_store.register_buffer(&buffer, &project, cx); @@ -2628,13 +2758,15 @@ async fn test_edit_prediction_settled(cx: &mut TestAppContext) { // Region A: first 10 lines of the buffer. let editable_region_a = 0..snapshot_a.point_to_offset(Point::new(10, 0)); + ep_store.update(cx, |ep_store, cx| { ep_store.enqueue_settled_prediction( EditPredictionId("prediction-a".into()), &project, &buffer, &snapshot_a, - editable_region_a, + editable_region_a.clone(), + None, cx, ); }); @@ -2689,13 +2821,15 @@ async fn test_edit_prediction_settled(cx: &mut TestAppContext) { let snapshot_b2 = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); let editable_region_b = line_20_offset..snapshot_b2.point_to_offset(Point::new(25, 0)); + ep_store.update(cx, |ep_store, cx| { ep_store.enqueue_settled_prediction( EditPredictionId("prediction-b".into()), &project, &buffer, &snapshot_b2, - editable_region_b, + editable_region_b.clone(), + None, cx, ); }); @@ -2721,7 +2855,7 @@ async fn test_edit_prediction_settled(cx: &mut TestAppContext) { assert_eq!( events.len(), 1, - "only prediction A should have settled, got: {events:?}" + "prediction and capture_sample for A should have settled, got: {events:?}" ); assert_eq!(events[0].0, EditPredictionId("prediction-a".into())); } @@ -2738,7 +2872,7 @@ async fn test_edit_prediction_settled(cx: &mut TestAppContext) { assert_eq!( events.len(), 2, - "both predictions should have settled, got: {events:?}" + "both prediction and capture_sample settled events should be emitted for each request, got: {events:?}" ); assert_eq!(events[1].0, EditPredictionId("prediction-b".into())); } diff --git a/crates/edit_prediction/src/fim.rs b/crates/edit_prediction/src/fim.rs index 02053aae7154acdfa22a01a4f84d6b732a9ca696..8de58b9b2e52502519a362d9502ddc1b3cdffde4 100644 --- a/crates/edit_prediction/src/fim.rs +++ b/crates/edit_prediction/src/fim.rs @@ -6,12 +6,12 @@ use crate::{ use anyhow::{Context as _, Result, anyhow}; use gpui::{App, AppContext as _, Entity, Task}; use language::{ - Anchor, Buffer, BufferSnapshot, OffsetRangeExt as _, ToOffset, ToPoint as _, + Anchor, Buffer, BufferSnapshot, ToOffset, ToPoint as _, language_settings::all_language_settings, }; use settings::EditPredictionPromptFormat; use std::{path::Path, sync::Arc, time::Instant}; -use zeta_prompt::ZetaPromptInput; +use zeta_prompt::{ZetaPromptInput, compute_editable_and_context_ranges}; const FIM_CONTEXT_TOKENS: usize = 512; @@ -62,34 +62,43 @@ pub fn request_prediction( let api_key = load_open_ai_compatible_api_key_if_needed(provider, cx); let result = cx.background_spawn(async move { - let (excerpt_range, _) = cursor_excerpt::editable_and_context_ranges_for_cursor_position( - cursor_point, - &snapshot, + let cursor_offset = cursor_point.to_offset(&snapshot); + let (excerpt_point_range, excerpt_offset_range, cursor_offset_in_excerpt) = + cursor_excerpt::compute_cursor_excerpt(&snapshot, cursor_offset); + let cursor_excerpt: Arc = snapshot + .text_for_range(excerpt_point_range.clone()) + .collect::() + .into(); + let syntax_ranges = + cursor_excerpt::compute_syntax_ranges(&snapshot, cursor_offset, &excerpt_offset_range); + let (editable_range, _) = compute_editable_and_context_ranges( + &cursor_excerpt, + cursor_offset_in_excerpt, + &syntax_ranges, FIM_CONTEXT_TOKENS, 0, ); - let excerpt_offset_range = excerpt_range.to_offset(&snapshot); - let cursor_offset = cursor_point.to_offset(&snapshot); let inputs = ZetaPromptInput { events, - related_files: Vec::new(), + related_files: Some(Vec::new()), + active_buffer_diagnostics: Vec::new(), cursor_offset_in_excerpt: cursor_offset - excerpt_offset_range.start, cursor_path: full_path.clone(), - excerpt_start_row: Some(excerpt_range.start.row), - cursor_excerpt: snapshot - .text_for_range(excerpt_range) - .collect::() - .into(), + excerpt_start_row: Some(excerpt_point_range.start.row), + cursor_excerpt, excerpt_ranges: Default::default(), + syntax_ranges: None, experiment: None, in_open_source_repo: false, can_collect_data: false, repo_url: None, }; - let prefix = inputs.cursor_excerpt[..inputs.cursor_offset_in_excerpt].to_string(); - let suffix = inputs.cursor_excerpt[inputs.cursor_offset_in_excerpt..].to_string(); + let editable_text = &inputs.cursor_excerpt[editable_range.clone()]; + let cursor_in_editable = cursor_offset_in_excerpt.saturating_sub(editable_range.start); + let prefix = editable_text[..cursor_in_editable].to_string(); + let suffix = editable_text[cursor_in_editable..].to_string(); let prompt = format_fim_prompt(prompt_format, &prefix, &suffix); let stop_tokens = get_fim_stop_tokens(); diff --git a/crates/edit_prediction/src/mercury.rs b/crates/edit_prediction/src/mercury.rs index f61219e2f71d5efbb2fb67250b58b0a5a090e9a8..0a952f0869b46f626c231e11f8a61370c50490fa 100644 --- a/crates/edit_prediction/src/mercury.rs +++ b/crates/edit_prediction/src/mercury.rs @@ -10,17 +10,14 @@ use gpui::{ App, AppContext as _, Entity, Global, SharedString, Task, http_client::{self, AsyncBody, HttpClient, Method}, }; -use language::{OffsetRangeExt as _, ToOffset, ToPoint as _}; +use language::{ToOffset, ToPoint as _}; use language_model::{ApiKeyState, EnvVar, env_var}; use release_channel::AppVersion; use serde::Serialize; use std::{mem, ops::Range, path::Path, sync::Arc, time::Instant}; - -use zeta_prompt::{ExcerptRanges, ZetaPromptInput}; +use zeta_prompt::ZetaPromptInput; const MERCURY_API_URL: &str = "https://api.inceptionlabs.ai/v1/edit/completions"; -const MAX_REWRITE_TOKENS: usize = 150; -const MAX_CONTEXT_TOKENS: usize = 350; pub struct Mercury { pub api_token: Entity, @@ -64,52 +61,47 @@ impl Mercury { let active_buffer = buffer.clone(); let result = cx.background_spawn(async move { - let (editable_range, context_range) = - crate::cursor_excerpt::editable_and_context_ranges_for_cursor_position( - cursor_point, - &snapshot, - MAX_CONTEXT_TOKENS, - MAX_REWRITE_TOKENS, - ); + let cursor_offset = cursor_point.to_offset(&snapshot); + let (excerpt_point_range, excerpt_offset_range, cursor_offset_in_excerpt) = + crate::cursor_excerpt::compute_cursor_excerpt(&snapshot, cursor_offset); - let related_files = crate::filter_redundant_excerpts( + let related_files = zeta_prompt::filter_redundant_excerpts( related_files, full_path.as_ref(), - context_range.start.row..context_range.end.row, + excerpt_point_range.start.row..excerpt_point_range.end.row, ); - let context_offset_range = context_range.to_offset(&snapshot); - let context_start_row = context_range.start.row; - - let editable_offset_range = editable_range.to_offset(&snapshot); + let cursor_excerpt: Arc = snapshot + .text_for_range(excerpt_point_range.clone()) + .collect::() + .into(); + let syntax_ranges = crate::cursor_excerpt::compute_syntax_ranges( + &snapshot, + cursor_offset, + &excerpt_offset_range, + ); + let excerpt_ranges = zeta_prompt::compute_legacy_excerpt_ranges( + &cursor_excerpt, + cursor_offset_in_excerpt, + &syntax_ranges, + ); - let editable_range_in_excerpt = (editable_offset_range.start - - context_offset_range.start) - ..(editable_offset_range.end - context_offset_range.start); - let context_range_in_excerpt = - 0..(context_offset_range.end - context_offset_range.start); + let editable_offset_range = (excerpt_offset_range.start + + excerpt_ranges.editable_350.start) + ..(excerpt_offset_range.start + excerpt_ranges.editable_350.end); let inputs = zeta_prompt::ZetaPromptInput { events, - related_files, + related_files: Some(related_files), cursor_offset_in_excerpt: cursor_point.to_offset(&snapshot) - - context_offset_range.start, + - excerpt_offset_range.start, cursor_path: full_path.clone(), - cursor_excerpt: snapshot - .text_for_range(context_range) - .collect::() - .into(), + cursor_excerpt, experiment: None, - excerpt_start_row: Some(context_start_row), - excerpt_ranges: ExcerptRanges { - editable_150: editable_range_in_excerpt.clone(), - editable_180: editable_range_in_excerpt.clone(), - editable_350: editable_range_in_excerpt.clone(), - editable_150_context_350: context_range_in_excerpt.clone(), - editable_180_context_350: context_range_in_excerpt.clone(), - editable_350_context_150: context_range_in_excerpt.clone(), - ..Default::default() - }, + excerpt_start_row: Some(excerpt_point_range.start.row), + excerpt_ranges, + syntax_ranges: Some(syntax_ranges), + active_buffer_diagnostics: vec![], in_open_source_repo: false, can_collect_data: false, repo_url: None, @@ -260,7 +252,7 @@ fn build_prompt(inputs: &ZetaPromptInput) -> String { &mut prompt, RECENTLY_VIEWED_SNIPPETS_START..RECENTLY_VIEWED_SNIPPETS_END, |prompt| { - for related_file in inputs.related_files.iter() { + for related_file in inputs.related_files.as_deref().unwrap_or_default().iter() { for related_excerpt in &related_file.excerpts { push_delimited( prompt, diff --git a/crates/edit_prediction/src/prediction.rs b/crates/edit_prediction/src/prediction.rs index 263409043b397e2df1ac32514a0ce76656fbefe1..0db47b0ec93b69ceebeee1989d8196642385bdd0 100644 --- a/crates/edit_prediction/src/prediction.rs +++ b/crates/edit_prediction/src/prediction.rs @@ -156,12 +156,14 @@ mod tests { model_version: None, inputs: ZetaPromptInput { events: vec![], - related_files: vec![], + related_files: Some(vec![]), + active_buffer_diagnostics: vec![], cursor_path: Path::new("path.txt").into(), cursor_offset_in_excerpt: 0, cursor_excerpt: "".into(), excerpt_start_row: None, excerpt_ranges: Default::default(), + syntax_ranges: None, experiment: None, in_open_source_repo: false, can_collect_data: false, diff --git a/crates/edit_prediction/src/sweep_ai.rs b/crates/edit_prediction/src/sweep_ai.rs index d8ce180801aa8902bfff79044cabaae7570ed05f..99ddd9b86d238c2e56331f52f9fad51438ee1f71 100644 --- a/crates/edit_prediction/src/sweep_ai.rs +++ b/crates/edit_prediction/src/sweep_ai.rs @@ -212,7 +212,8 @@ impl SweepAi { let ep_inputs = zeta_prompt::ZetaPromptInput { events: inputs.events, - related_files: inputs.related_files.clone(), + related_files: Some(inputs.related_files.clone()), + active_buffer_diagnostics: vec![], cursor_path: full_path.clone(), cursor_excerpt: request_body.file_contents.clone().into(), cursor_offset_in_excerpt: request_body.cursor_position, @@ -226,6 +227,7 @@ impl SweepAi { editable_350_context_150: 0..inputs.snapshot.len(), ..Default::default() }, + syntax_ranges: None, experiment: None, in_open_source_repo: false, can_collect_data: false, diff --git a/crates/edit_prediction/src/zeta.rs b/crates/edit_prediction/src/zeta.rs index 3397d31276efcc7e1d68336f87ccf3e035f51f3a..fa93e681b66cb44a554f725d4a1c6dee11f0b1f1 100644 --- a/crates/edit_prediction/src/zeta.rs +++ b/crates/edit_prediction/src/zeta.rs @@ -1,24 +1,31 @@ -use crate::cursor_excerpt::compute_excerpt_ranges; -use crate::prediction::EditPredictionResult; use crate::{ CurrentEditPrediction, DebugEvent, EditPredictionFinishedDebugEvent, EditPredictionId, - EditPredictionModelInput, EditPredictionStartedDebugEvent, EditPredictionStore, + EditPredictionModelInput, EditPredictionStartedDebugEvent, EditPredictionStore, StoredEvent, + ZedUpdateRequiredError, + cursor_excerpt::{self, compute_cursor_excerpt, compute_syntax_ranges}, + prediction::EditPredictionResult, }; use anyhow::Result; -use cloud_llm_client::predict_edits_v3::RawCompletionRequest; -use cloud_llm_client::{AcceptEditPredictionBody, EditPredictionRejectReason}; +use cloud_llm_client::{ + AcceptEditPredictionBody, EditPredictionRejectReason, predict_edits_v3::RawCompletionRequest, +}; use edit_prediction_types::PredictedCursorPosition; -use gpui::{App, AppContext as _, Task, prelude::*}; -use language::language_settings::all_language_settings; -use language::{BufferSnapshot, ToOffset as _, ToPoint, text_diff}; +use gpui::{App, AppContext as _, Entity, Task, WeakEntity, prelude::*}; +use language::{ + Buffer, BufferSnapshot, DiagnosticSeverity, OffsetRangeExt as _, ToOffset as _, + language_settings::all_language_settings, text_diff, +}; use release_channel::AppVersion; use settings::EditPredictionPromptFormat; -use text::{Anchor, Bias}; +use text::{Anchor, Bias, Point}; +use ui::SharedString; +use workspace::notifications::{ErrorMessagePrompt, NotificationId, show_app_notification}; +use zeta_prompt::{ParsedOutput, ZetaPromptInput}; use std::{env, ops::Range, path::Path, sync::Arc, time::Instant}; use zeta_prompt::{ - CURSOR_MARKER, ZetaFormat, clean_zeta2_model_output, format_zeta_prompt, get_prefill, - output_with_context_for_format, prompt_input_contains_special_tokens, + CURSOR_MARKER, ZetaFormat, format_zeta_prompt, get_prefill, parse_zeta2_model_output, + prompt_input_contains_special_tokens, stop_tokens_for_format, zeta1::{self, EDITABLE_REGION_END_MARKER}, }; @@ -37,10 +44,12 @@ pub fn request_prediction_with_zeta( debug_tx, trigger, project, + diagnostic_search_range, can_collect_data, is_open_source, .. }: EditPredictionModelInput, + capture_data: Option>, cx: &mut Context, ) -> Task>> { let settings = &all_language_settings(None, cx).edit_predictions; @@ -85,6 +94,17 @@ pub fn request_prediction_with_zeta( .map(|organization| organization.id.clone()); let app_version = AppVersion::global(cx); + struct Prediction { + prompt_input: ZetaPromptInput, + buffer: Entity, + snapshot: BufferSnapshot, + edits: Vec<(Range, Arc)>, + cursor_position: Option, + received_response_at: Instant, + editable_range_in_buffer: Range, + model_version: Option, + } + let request_task = cx.background_spawn({ async move { let zeta_version = raw_config @@ -93,11 +113,11 @@ pub fn request_prediction_with_zeta( .unwrap_or(ZetaFormat::default()); let cursor_offset = position.to_offset(&snapshot); - let editable_range_in_excerpt: Range; let (full_context_offset_range, prompt_input) = zeta2_prompt_input( &snapshot, related_files, events, + diagnostic_search_range, excerpt_path, cursor_offset, preferred_experiment, @@ -107,7 +127,7 @@ pub fn request_prediction_with_zeta( ); if prompt_input_contains_special_tokens(&prompt_input, zeta_version) { - return Ok((None, None)); + return Err(anyhow::anyhow!("prompt contains special tokens")); } if let Some(debug_tx) = &debug_tx { @@ -125,19 +145,19 @@ pub fn request_prediction_with_zeta( log::trace!("Sending edit prediction request"); - let (request_id, output_text, model_version, usage) = + let (request_id, output, model_version, usage) = if let Some(custom_settings) = &custom_server_settings { let max_tokens = custom_settings.max_output_tokens * 4; match custom_settings.prompt_format { EditPredictionPromptFormat::Zeta => { let ranges = &prompt_input.excerpt_ranges; + let editable_range_in_excerpt = ranges.editable_350.clone(); let prompt = zeta1::format_zeta1_from_input( &prompt_input, - ranges.editable_350.clone(), + editable_range_in_excerpt.clone(), ranges.editable_350_context_150.clone(), ); - editable_range_in_excerpt = ranges.editable_350.clone(); let stop_tokens = vec![ EDITABLE_REGION_END_MARKER.to_string(), format!("{EDITABLE_REGION_END_MARKER}\n"), @@ -158,26 +178,27 @@ pub fn request_prediction_with_zeta( let request_id = EditPredictionId(request_id.into()); let output_text = zeta1::clean_zeta1_model_output(&response_text); + let parsed_output = output_text.map(|text| ParsedOutput { + new_editable_region: text, + range_in_excerpt: editable_range_in_excerpt, + }); - (request_id, output_text, None, None) + (request_id, parsed_output, None, None) } EditPredictionPromptFormat::Zeta2 => { let prompt = format_zeta_prompt(&prompt_input, zeta_version); let prefill = get_prefill(&prompt_input, zeta_version); let prompt = format!("{prompt}{prefill}"); - editable_range_in_excerpt = zeta_prompt::excerpt_range_for_format( - zeta_version, - &prompt_input.excerpt_ranges, - ) - .0; - let (response_text, request_id) = send_custom_server_request( provider, custom_settings, prompt, max_tokens, - vec![], + stop_tokens_for_format(zeta_version) + .iter() + .map(|token| token.to_string()) + .collect(), open_ai_compatible_api_key.clone(), &http_client, ) @@ -188,7 +209,11 @@ pub fn request_prediction_with_zeta( None } else { let output = format!("{prefill}{response_text}"); - Some(clean_zeta2_model_output(&output, zeta_version).to_string()) + Some(parse_zeta2_model_output( + &output, + zeta_version, + &prompt_input, + )?) }; (request_id, output_text, None, None) @@ -207,17 +232,14 @@ pub fn request_prediction_with_zeta( model: config.model_id.clone().unwrap_or_default(), prompt, temperature: None, - stop: vec![], + stop: stop_tokens_for_format(config.format) + .iter() + .map(|token| std::borrow::Cow::Borrowed(*token)) + .collect(), max_tokens: Some(2048), environment, }; - editable_range_in_excerpt = zeta_prompt::excerpt_range_for_format( - config.format, - &prompt_input.excerpt_ranges, - ) - .1; - let (mut response, usage) = EditPredictionStore::send_raw_llm_request( request, client, @@ -229,13 +251,19 @@ pub fn request_prediction_with_zeta( .await?; let request_id = EditPredictionId(response.id.clone().into()); - let output_text = response.choices.pop().map(|choice| { + let output = if let Some(choice) = response.choices.pop() { let response = &choice.text; let output = format!("{prefill}{response}"); - clean_zeta2_model_output(&output, config.format).to_string() - }); + Some(parse_zeta2_model_output( + &output, + config.format, + &prompt_input, + )?) + } else { + None + }; - (request_id, output_text, None, usage) + (request_id, output, None, usage) } else { // Use V3 endpoint - server handles model/version selection and suffix stripping let (response, usage) = EditPredictionStore::send_v3_request( @@ -249,23 +277,26 @@ pub fn request_prediction_with_zeta( .await?; let request_id = EditPredictionId(response.request_id.into()); - let output_text = if response.output.is_empty() { - None - } else { - Some(response.output) - }; - editable_range_in_excerpt = response.editable_range; + let output_text = Some(response.output).filter(|s| !s.is_empty()); let model_version = response.model_version; + let parsed_output = ParsedOutput { + new_editable_region: output_text.unwrap_or_default(), + range_in_excerpt: response.editable_range, + }; - (request_id, output_text, model_version, usage) + (request_id, Some(parsed_output), model_version, usage) }; let received_response_at = Instant::now(); log::trace!("Got edit prediction response"); - let Some(mut output_text) = output_text else { - return Ok((Some((request_id, None, model_version)), usage)); + let Some(ParsedOutput { + new_editable_region: mut output_text, + range_in_excerpt: editable_range_in_excerpt, + }) = output + else { + return Ok(((request_id, None), None)); }; let editable_range_in_buffer = editable_range_in_excerpt.start @@ -276,17 +307,6 @@ pub fn request_prediction_with_zeta( .text_for_range(editable_range_in_buffer.clone()) .collect::(); - // For the hashline format, the model may return <|set|>/<|insert|> - // edit commands instead of a full replacement. Apply them against - // the original editable region to produce the full replacement text. - // This must happen before cursor marker stripping because the cursor - // marker is embedded inside edit command content. - if let Some(rewritten_output) = - output_with_context_for_format(zeta_version, &old_text, &output_text)? - { - output_text = rewritten_output; - } - // Client-side cursor marker processing (applies to both raw and v3 responses) let cursor_offset_in_output = output_text.find(CURSOR_MARKER); if let Some(offset) = cursor_offset_in_output { @@ -322,40 +342,37 @@ pub fn request_prediction_with_zeta( ); anyhow::Ok(( - Some(( + ( request_id, - Some(( + Some(Prediction { prompt_input, buffer, - snapshot.clone(), + snapshot: snapshot.clone(), edits, cursor_position, received_response_at, editable_range_in_buffer, - )), - model_version, - )), + model_version, + }), + ), usage, )) } }); cx.spawn(async move |this, cx| { - let Some((id, prediction, model_version)) = - EditPredictionStore::handle_api_response(&this, request_task.await, cx)? - else { - return Ok(None); - }; + let (id, prediction) = handle_api_response(&this, request_task.await, cx)?; - let Some(( - inputs, - edited_buffer, - edited_buffer_snapshot, + let Some(Prediction { + prompt_input: inputs, + buffer: edited_buffer, + snapshot: edited_buffer_snapshot, edits, cursor_position, received_response_at, editable_range_in_buffer, - )) = prediction + model_version, + }) = prediction else { return Ok(Some(EditPredictionResult { id, @@ -364,17 +381,44 @@ pub fn request_prediction_with_zeta( }; if can_collect_data { - this.update(cx, |this, cx| { - this.enqueue_settled_prediction( - id.clone(), - &project, - &edited_buffer, - &edited_buffer_snapshot, - editable_range_in_buffer, - cx, - ); + let weak_this = this.clone(); + let id = id.clone(); + let edited_buffer = edited_buffer.clone(); + let edited_buffer_snapshot = edited_buffer_snapshot.clone(); + let example_task = capture_data.and_then(|stored_events| { + cx.update(|cx| { + crate::capture_example( + project.clone(), + edited_buffer.clone(), + position, + stored_events, + false, + cx, + ) + }) + }); + cx.spawn(async move |cx| { + let example_spec = if let Some(task) = example_task { + task.await.ok() + } else { + None + }; + + weak_this + .update(cx, |this, cx| { + this.enqueue_settled_prediction( + id.clone(), + &project, + &edited_buffer, + &edited_buffer_snapshot, + editable_range_in_buffer, + example_spec, + cx, + ); + }) + .ok(); }) - .ok(); + .detach(); } Ok(Some( @@ -395,10 +439,93 @@ pub fn request_prediction_with_zeta( }) } +fn handle_api_response( + this: &WeakEntity, + response: Result<(T, Option)>, + cx: &mut gpui::AsyncApp, +) -> Result { + match response { + Ok((data, usage)) => { + if let Some(usage) = usage { + this.update(cx, |this, cx| { + this.user_store.update(cx, |user_store, cx| { + user_store.update_edit_prediction_usage(usage, cx); + }); + }) + .ok(); + } + Ok(data) + } + Err(err) => { + if err.is::() { + cx.update(|cx| { + this.update(cx, |this, _cx| { + this.update_required = true; + }) + .ok(); + + let error_message: SharedString = err.to_string().into(); + show_app_notification( + NotificationId::unique::(), + cx, + move |cx| { + cx.new(|cx| { + ErrorMessagePrompt::new(error_message.clone(), cx) + .with_link_button("Update Zed", "https://zed.dev/releases") + }) + }, + ); + }); + } + Err(err) + } + } +} + +pub(crate) fn active_buffer_diagnostics( + snapshot: &language::BufferSnapshot, + diagnostic_search_range: Range, + additional_context_token_count: usize, +) -> Vec { + snapshot + .diagnostics_in_range::(diagnostic_search_range, false) + .map(|entry| { + let severity = match entry.diagnostic.severity { + DiagnosticSeverity::ERROR => Some(1), + DiagnosticSeverity::WARNING => Some(2), + DiagnosticSeverity::INFORMATION => Some(3), + DiagnosticSeverity::HINT => Some(4), + _ => None, + }; + let diagnostic_point_range = entry.range.clone(); + let snippet_point_range = cursor_excerpt::expand_context_syntactically_then_linewise( + snapshot, + diagnostic_point_range.clone(), + additional_context_token_count, + ); + let snippet = snapshot + .text_for_range(snippet_point_range.clone()) + .collect::(); + let snippet_start_offset = snippet_point_range.start.to_offset(snapshot); + let diagnostic_offset_range = diagnostic_point_range.to_offset(snapshot); + zeta_prompt::ActiveBufferDiagnostic { + severity, + message: entry.diagnostic.message.clone(), + snippet, + snippet_buffer_row_range: diagnostic_point_range.start.row + ..diagnostic_point_range.end.row, + diagnostic_range_in_snippet: diagnostic_offset_range.start - snippet_start_offset + ..diagnostic_offset_range.end - snippet_start_offset, + } + }) + .collect() +} + pub fn zeta2_prompt_input( snapshot: &language::BufferSnapshot, related_files: Vec, events: Vec>, + diagnostic_search_range: Range, excerpt_path: Arc, cursor_offset: usize, preferred_experiment: Option, @@ -406,39 +533,39 @@ pub fn zeta2_prompt_input( can_collect_data: bool, repo_url: Option, ) -> (Range, zeta_prompt::ZetaPromptInput) { - let cursor_point = cursor_offset.to_point(snapshot); - - let (full_context, full_context_offset_range, excerpt_ranges) = - compute_excerpt_ranges(cursor_point, snapshot); - - let related_files = crate::filter_redundant_excerpts( - related_files, - excerpt_path.as_ref(), - full_context.start.row..full_context.end.row, + let (excerpt_point_range, excerpt_offset_range, cursor_offset_in_excerpt) = + compute_cursor_excerpt(snapshot, cursor_offset); + + let cursor_excerpt: Arc = snapshot + .text_for_range(excerpt_point_range.clone()) + .collect::() + .into(); + let syntax_ranges = compute_syntax_ranges(snapshot, cursor_offset, &excerpt_offset_range); + let excerpt_ranges = zeta_prompt::compute_legacy_excerpt_ranges( + &cursor_excerpt, + cursor_offset_in_excerpt, + &syntax_ranges, ); - let full_context_start_offset = full_context_offset_range.start; - let full_context_start_row = full_context.start.row; - - let cursor_offset_in_excerpt = cursor_offset - full_context_start_offset; + let active_buffer_diagnostics = + active_buffer_diagnostics(snapshot, diagnostic_search_range, 100); let prompt_input = zeta_prompt::ZetaPromptInput { cursor_path: excerpt_path, - cursor_excerpt: snapshot - .text_for_range(full_context) - .collect::() - .into(), + cursor_excerpt, cursor_offset_in_excerpt, - excerpt_start_row: Some(full_context_start_row), + excerpt_start_row: Some(excerpt_point_range.start.row), events, - related_files, + related_files: Some(related_files), + active_buffer_diagnostics, excerpt_ranges, + syntax_ranges: Some(syntax_ranges), experiment: preferred_experiment, in_open_source_repo: is_open_source, can_collect_data, repo_url, }; - (full_context_offset_range, prompt_input) + (excerpt_offset_range, prompt_input) } pub(crate) fn edit_prediction_accepted( diff --git a/crates/edit_prediction_cli/src/format_prompt.rs b/crates/edit_prediction_cli/src/format_prompt.rs index f36eaf2799166d6fbd2b7b212003a1a0644b82c4..324c297ba4c75d10a24b53c7961bd35e1f42e2cd 100644 --- a/crates/edit_prediction_cli/src/format_prompt.rs +++ b/crates/edit_prediction_cli/src/format_prompt.rs @@ -95,7 +95,7 @@ pub fn zeta2_output_for_patch( cursor_offset: Option, version: ZetaFormat, ) -> Result { - let (context, editable_range, _) = resolve_cursor_region(input, version); + let (context, editable_range, _, _) = resolve_cursor_region(input, version); let mut old_editable_region = context[editable_range].to_string(); if !old_editable_region.ends_with_newline() { @@ -259,7 +259,10 @@ impl TeacherPrompt { } pub fn format_context(example: &Example) -> String { - let related_files = example.prompt_inputs.as_ref().map(|pi| &pi.related_files); + let related_files = example + .prompt_inputs + .as_ref() + .and_then(|pi| pi.related_files.as_deref()); let Some(related_files) = related_files else { return "(No context)".to_string(); }; diff --git a/crates/edit_prediction_cli/src/headless.rs b/crates/edit_prediction_cli/src/headless.rs index f78903b705a4718e31b59e56d3aa281004395d64..eb2895b06f2ea34bb96b1d16ef0bbd075b78aaf5 100644 --- a/crates/edit_prediction_cli/src/headless.rs +++ b/crates/edit_prediction_cli/src/headless.rs @@ -105,7 +105,7 @@ pub fn init(cx: &mut App) -> EpAppState { debug_adapter_extension::init(extension_host_proxy.clone(), cx); language_extension::init(LspAccess::Noop, extension_host_proxy, languages.clone()); - language_model::init(client.clone(), cx); + language_model::init(user_store.clone(), client.clone(), cx); language_models::init(user_store.clone(), client.clone(), cx); languages::init(languages.clone(), fs.clone(), node_runtime.clone(), cx); prompt_store::init(cx); diff --git a/crates/edit_prediction_cli/src/load_project.rs b/crates/edit_prediction_cli/src/load_project.rs index df458770519be5accd72f33a56893bb13c9b88a9..d9138482767b2c49bb21bf7ed7c349ec6c9af3ff 100644 --- a/crates/edit_prediction_cli/src/load_project.rs +++ b/crates/edit_prediction_cli/src/load_project.rs @@ -7,12 +7,12 @@ use crate::{ use anyhow::{Context as _, Result}; use edit_prediction::{ EditPredictionStore, - cursor_excerpt::compute_excerpt_ranges, + cursor_excerpt::{compute_cursor_excerpt, compute_syntax_ranges}, udiff::{OpenedBuffers, refresh_worktree_entries, strip_diff_path_prefix}, }; use futures::AsyncWriteExt as _; use gpui::{AsyncApp, Entity}; -use language::{Anchor, Buffer, LanguageNotFound, ToOffset, ToPoint}; +use language::{Anchor, Buffer, LanguageNotFound, ToOffset}; use project::{Project, ProjectPath, buffer_store::BufferStoreEvent}; use std::{fs, path::PathBuf, sync::Arc}; use zeta_prompt::ZetaPromptInput; @@ -71,37 +71,41 @@ pub async fn run_load_project( let existing_related_files = example .prompt_inputs .take() - .map(|inputs| inputs.related_files) - .unwrap_or_default(); + .and_then(|inputs| inputs.related_files); let (prompt_inputs, language_name) = buffer.read_with(&cx, |buffer, _cx| { let snapshot = buffer.snapshot(); - let cursor_point = cursor_position.to_point(&snapshot); let cursor_offset = cursor_position.to_offset(&snapshot); let language_name = buffer .language() .map(|l| l.name().to_string()) .unwrap_or_else(|| "Unknown".to_string()); - let (full_context_point_range, full_context_offset_range, excerpt_ranges) = - compute_excerpt_ranges(cursor_point, &snapshot); + let (excerpt_point_range, excerpt_offset_range, cursor_offset_in_excerpt) = + compute_cursor_excerpt(&snapshot, cursor_offset); let cursor_excerpt: Arc = buffer - .text_for_range(full_context_offset_range.clone()) + .text_for_range(excerpt_offset_range.clone()) .collect::() .into(); - let cursor_offset_in_excerpt = cursor_offset - full_context_offset_range.start; - let excerpt_start_row = Some(full_context_point_range.start.row); + let syntax_ranges = compute_syntax_ranges(&snapshot, cursor_offset, &excerpt_offset_range); + let excerpt_ranges = zeta_prompt::compute_legacy_excerpt_ranges( + &cursor_excerpt, + cursor_offset_in_excerpt, + &syntax_ranges, + ); ( ZetaPromptInput { cursor_path: example.spec.cursor_path.clone(), cursor_excerpt, cursor_offset_in_excerpt, - excerpt_start_row, + excerpt_start_row: Some(excerpt_point_range.start.row), events, related_files: existing_related_files, + active_buffer_diagnostics: vec![], excerpt_ranges, + syntax_ranges: Some(syntax_ranges), in_open_source_repo: false, can_collect_data: false, experiment: None, diff --git a/crates/edit_prediction_cli/src/main.rs b/crates/edit_prediction_cli/src/main.rs index 8bb4b2a8e2f50d448fc314a70e2fc94cfa2c3d71..afe25c5badcfff03babd5e951ae66839ce0f790b 100644 --- a/crates/edit_prediction_cli/src/main.rs +++ b/crates/edit_prediction_cli/src/main.rs @@ -738,6 +738,21 @@ async fn load_examples( examples.append(&mut requested_examples); } + if !captured_after_timestamps.is_empty() { + captured_after_timestamps.sort(); + + let mut captured_examples = pull_examples::fetch_captured_examples_after( + http_client.clone(), + &captured_after_timestamps, + max_rows_per_timestamp, + remaining_offset, + background_executor.clone(), + Some(MIN_CAPTURE_VERSION), + ) + .await?; + examples.append(&mut captured_examples); + } + if !settled_after_timestamps.is_empty() { settled_after_timestamps.sort(); diff --git a/crates/edit_prediction_cli/src/parse_output.rs b/crates/edit_prediction_cli/src/parse_output.rs index 2c066b8b32b3eaab54ad6e3b3bcb0796ff27f950..94058efd92ca4a166ba4976819963ef5d3286f5d 100644 --- a/crates/edit_prediction_cli/src/parse_output.rs +++ b/crates/edit_prediction_cli/src/parse_output.rs @@ -6,11 +6,7 @@ use crate::{ }; use anyhow::{Context as _, Result}; use edit_prediction::example_spec::encode_cursor_in_patch; -use zeta_prompt::{ - CURSOR_MARKER, ZetaFormat, clean_extracted_region_for_format, - current_region_markers_for_format, output_end_marker_for_format, - output_with_context_for_format, -}; +use zeta_prompt::{CURSOR_MARKER, ZetaFormat, parse_zeta2_model_output}; pub fn run_parse_output(example: &mut Example) -> Result<()> { example @@ -54,43 +50,23 @@ pub fn parse_prediction_output( } } -fn extract_zeta2_current_region(prompt: &str, format: ZetaFormat) -> Result { - let (current_marker, end_marker) = current_region_markers_for_format(format); - - let start = prompt.find(current_marker).with_context(|| { - format!( - "missing current marker '{}' in prompt", - current_marker.trim() - ) - })? + current_marker.len(); - - let end = prompt[start..] - .find(end_marker) - .with_context(|| format!("missing end marker '{}' in prompt", end_marker.trim()))? - + start; - - let region = &prompt[start..end]; - let region = region.replace(CURSOR_MARKER, ""); - Ok(clean_extracted_region_for_format(format, ®ion)) -} - fn parse_zeta2_output( example: &Example, actual_output: &str, format: ZetaFormat, ) -> Result<(String, Option)> { - let prompt = &example.prompt.as_ref().context("prompt required")?.input; let prompt_inputs = example .prompt_inputs .as_ref() .context("prompt_inputs required")?; - let old_text = extract_zeta2_current_region(prompt, format)?; + let parsed = parse_zeta2_model_output(actual_output, format, prompt_inputs)?; + let range_in_excerpt = parsed.range_in_excerpt; + + let excerpt = prompt_inputs.cursor_excerpt.as_ref(); + let old_text = excerpt[range_in_excerpt.clone()].to_string(); + let mut new_text = parsed.new_editable_region; - let mut new_text = actual_output.to_string(); - if let Some(transformed) = output_with_context_for_format(format, &old_text, &new_text)? { - new_text = transformed; - } let cursor_offset = if let Some(offset) = new_text.find(CURSOR_MARKER) { new_text.replace_range(offset..offset + CURSOR_MARKER.len(), ""); Some(offset) @@ -98,14 +74,8 @@ fn parse_zeta2_output( None }; - if let Some(marker) = output_end_marker_for_format(format) { - new_text = new_text - .strip_suffix(marker) - .unwrap_or(&new_text) - .to_string(); - } - - let mut old_text_normalized = old_text.clone(); + // Normalize trailing newlines for diff generation + let mut old_text_normalized = old_text; if !new_text.is_empty() && !new_text.ends_with('\n') { new_text.push('\n'); } @@ -113,22 +83,10 @@ fn parse_zeta2_output( old_text_normalized.push('\n'); } - let old_text_trimmed = old_text.trim_end_matches('\n'); - let excerpt = prompt_inputs.cursor_excerpt.as_ref(); - let (editable_region_offset, _) = excerpt - .match_indices(old_text_trimmed) - .min_by_key(|(index, _)| index.abs_diff(prompt_inputs.cursor_offset_in_excerpt)) - .with_context(|| { - format!( - "could not find editable region in content.\nLooking for:\n{}\n\nIn content:\n{}", - old_text_trimmed, excerpt - ) - })?; - + let editable_region_offset = range_in_excerpt.start; let editable_region_start_line = excerpt[..editable_region_offset].matches('\n').count(); - - // Use full context so cursor offset (relative to editable region start) aligns with diff content let editable_region_lines = old_text_normalized.lines().count() as u32; + let diff = language::unified_diff_with_context( &old_text_normalized, &new_text, @@ -157,95 +115,3 @@ fn parse_zeta2_output( Ok((formatted_diff, actual_cursor)) } - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_extract_zeta2_current_region_v0113() { - let prompt = indoc::indoc! {" - <|file_sep|>src/main.rs - <|fim_prefix|> - fn main() { - <|fim_middle|>current - println!(\"hello\"); - <|fim_suffix|> - } - <|fim_middle|>updated - "}; - - let region = extract_zeta2_current_region(prompt, ZetaFormat::V0113Ordered).unwrap(); - assert_eq!(region, "println!(\"hello\");\n"); - } - - #[test] - fn test_extract_zeta2_current_region_v0112() { - let prompt = indoc::indoc! {" - <|file_sep|>src/main.rs - <|fim_prefix|> - fn main() { - <|fim_suffix|> - } - <|fim_middle|>current - println!(\"hello\"); - <|fim_middle|>updated - "}; - - let region = extract_zeta2_current_region(prompt, ZetaFormat::V0112MiddleAtEnd).unwrap(); - assert_eq!(region, "println!(\"hello\");\n"); - } - - #[test] - fn test_extract_zeta2_current_region_with_cursor_marker() { - let prompt = indoc::indoc! {" - <|file_sep|>src/main.rs - <|fim_prefix|> - fn main() { - <|fim_middle|>current - print<|user_cursor|>ln!(\"hello\"); - <|fim_suffix|> - } - <|fim_middle|>updated - "}; - - let region = extract_zeta2_current_region(prompt, ZetaFormat::V0113Ordered).unwrap(); - assert_eq!(region, "println!(\"hello\");\n"); - } - - #[test] - fn test_extract_zeta2_current_region_v0120_git_merge_markers() { - let prompt = indoc::indoc! {" - <|file_sep|>src/main.rs - <|fim_prefix|> - fn main() { - <|fim_suffix|> - } - <|fim_middle|><<<<<<< CURRENT - println!(\"hello\"); - ======= - "}; - - let region = - extract_zeta2_current_region(prompt, ZetaFormat::V0120GitMergeMarkers).unwrap(); - assert_eq!(region, "println!(\"hello\");\n"); - } - - #[test] - fn test_extract_zeta2_current_region_v0120_with_cursor_marker() { - let prompt = indoc::indoc! {" - <|file_sep|>src/main.rs - <|fim_prefix|> - fn main() { - <|fim_suffix|> - } - <|fim_middle|><<<<<<< CURRENT - print<|user_cursor|>ln!(\"hello\"); - ======= - "}; - - let region = - extract_zeta2_current_region(prompt, ZetaFormat::V0120GitMergeMarkers).unwrap(); - assert_eq!(region, "println!(\"hello\");\n"); - } -} diff --git a/crates/edit_prediction_cli/src/pull_examples.rs b/crates/edit_prediction_cli/src/pull_examples.rs index cccd351dcdeda0dbf059d851a44b02bc1e558654..15591ae03ccd7b0d537b437c1da2c0898e7e9446 100644 --- a/crates/edit_prediction_cli/src/pull_examples.rs +++ b/crates/edit_prediction_cli/src/pull_examples.rs @@ -565,6 +565,101 @@ pub async fn fetch_requested_examples_after( Ok(all_examples) } +pub async fn fetch_captured_examples_after( + http_client: Arc, + after_timestamps: &[String], + max_rows_per_timestamp: usize, + offset: usize, + background_executor: BackgroundExecutor, + min_capture_version: Option, +) -> Result> { + if after_timestamps.is_empty() { + return Ok(Vec::new()); + } + + let progress = Progress::global(); + + let mut all_examples = Vec::new(); + + for after_date in after_timestamps.iter() { + let step_progress_name = format!("captured>{after_date}"); + let step_progress = progress.start(Step::PullExamples, &step_progress_name); + step_progress.set_substatus("querying"); + + let min_minor_str = min_capture_version.map(|version| version.minor.to_string()); + let min_patch_str = min_capture_version.map(|version| version.patch.to_string()); + let min_minor_str_ref = min_minor_str.as_deref(); + let min_patch_str_ref = min_patch_str.as_deref(); + + let statement = indoc! {r#" + SELECT + settled.event_properties:request_id::string AS request_id, + settled.device_id::string AS device_id, + settled.time::string AS time, + req.event_properties:input AS input, + settled.event_properties:settled_editable_region::string AS settled_editable_region, + settled.event_properties:example AS example, + req.event_properties:zed_version::string AS zed_version + FROM events settled + INNER JOIN events req + ON settled.event_properties:request_id::string = req.event_properties:request_id::string + WHERE settled.event_type = ? + AND req.event_type = ? + AND req.event_properties:version = 'V3' + AND req.event_properties:input:can_collect_data = true + AND settled.event_properties:example IS NOT NULL + AND TYPEOF(settled.event_properties:example) != 'NULL_VALUE' + AND settled.time > TRY_TO_TIMESTAMP_NTZ(?) + AND (? IS NULL OR ( + TRY_CAST(SPLIT_PART(req.event_properties:zed_version::string, '.', 2) AS INTEGER) > ? + OR ( + TRY_CAST(SPLIT_PART(req.event_properties:zed_version::string, '.', 2) AS INTEGER) = ? + AND TRY_CAST(SPLIT_PART(SPLIT_PART(req.event_properties:zed_version::string, '.', 3), '+', 1) AS INTEGER) >= ? + ) + )) + ORDER BY settled.time ASC + LIMIT ? + OFFSET ? + "#}; + + let bindings = json!({ + "1": { "type": "TEXT", "value": EDIT_PREDICTION_SETTLED_EVENT }, + "2": { "type": "TEXT", "value": PREDICTIVE_EDIT_REQUESTED_EVENT }, + "3": { "type": "TEXT", "value": after_date }, + "4": { "type": "FIXED", "value": min_minor_str_ref }, + "5": { "type": "FIXED", "value": min_minor_str_ref }, + "6": { "type": "FIXED", "value": min_minor_str_ref }, + "7": { "type": "FIXED", "value": min_patch_str_ref }, + "8": { "type": "FIXED", "value": max_rows_per_timestamp.to_string() }, + "9": { "type": "FIXED", "value": offset.to_string() } + }); + + let examples = fetch_examples_with_query( + http_client.clone(), + &step_progress, + background_executor.clone(), + statement, + bindings, + DEFAULT_STATEMENT_TIMEOUT_SECONDS, + &[ + "request_id", + "device_id", + "time", + "input", + "settled_editable_region", + "example", + "zed_version", + ], + captured_examples_from_response, + ) + .await?; + + all_examples.extend(examples); + } + + Ok(all_examples) +} + pub async fn fetch_settled_examples_after( http_client: Arc, after_timestamps: &[String], @@ -1018,7 +1113,7 @@ fn settled_examples_from_response<'a>( } }; - let parse_json_value = |_: &str, raw: Option<&JsonValue>| -> Option { + let parse_json_value = |raw: Option<&JsonValue>| -> Option { let value = raw?; match value { JsonValue::String(s) => serde_json::from_str::(s).ok(), @@ -1030,7 +1125,7 @@ fn settled_examples_from_response<'a>( let device_id = get_string("device_id"); let time = get_string("time"); let input_raw = get_value("input"); - let input_json = parse_json_value("input", input_raw.as_ref()); + let input_json = parse_json_value(input_raw.as_ref()); let input: Option = input_json .as_ref() .and_then(|parsed| serde_json::from_value(parsed.clone()).ok()); @@ -1104,6 +1199,133 @@ fn settled_examples_from_response<'a>( Ok(Box::new(iter)) } +fn captured_examples_from_response<'a>( + response: &'a SnowflakeStatementResponse, + column_indices: &'a std::collections::HashMap, +) -> Result + 'a>> { + if let Some(code) = &response.code { + if code != SNOWFLAKE_SUCCESS_CODE { + anyhow::bail!( + "snowflake sql api returned error code={code} message={}", + response.message.as_deref().unwrap_or("") + ); + } + } + + let iter = response + .data + .iter() + .enumerate() + .filter_map(move |(row_index, data_row)| { + let get_value = |name: &str| -> Option { + let index = column_indices.get(name).copied()?; + let value = data_row.get(index)?; + if value.is_null() { + None + } else { + Some(value.clone()) + } + }; + + let get_string = |name: &str| -> Option { + match get_value(name)? { + JsonValue::String(s) => Some(s), + other => Some(other.to_string()), + } + }; + + let parse_json_value = |raw: Option<&JsonValue>| -> Option { + let value = raw?; + match value { + JsonValue::String(s) => serde_json::from_str::(s).ok(), + other => Some(other.clone()), + } + }; + + let request_id = get_string("request_id"); + let device_id = get_string("device_id"); + let time = get_string("time"); + let input_raw = get_value("input"); + let input_json = parse_json_value(input_raw.as_ref()); + let input: Option = input_json + .as_ref() + .and_then(|parsed| serde_json::from_value(parsed.clone()).ok()); + let example_raw = get_value("example"); + let example_json = parse_json_value(example_raw.as_ref()); + let example_spec: Option = example_json.as_ref().and_then(|parsed| { + serde_json::from_value(parsed.clone()) + .or_else(|_| { + parsed + .as_str() + .and_then(|markdown| ExampleSpec::from_markdown(markdown).ok()) + .ok_or_else(|| { + serde_json::Error::io(std::io::Error::other("not markdown")) + }) + }) + .ok() + }); + let has_example_spec = example_spec.is_some(); + let settled_editable_region = get_string("settled_editable_region"); + let zed_version = get_string("zed_version"); + + match ( + request_id.clone(), + device_id.clone(), + time.clone(), + input.clone(), + example_spec, + settled_editable_region.clone(), + ) { + ( + Some(request_id), + Some(device_id), + Some(time), + Some(input), + Some(example_spec), + Some(settled_editable_region), + ) => Some(build_captured_example( + request_id, + device_id, + time, + input, + example_spec, + settled_editable_region, + zed_version, + )), + _ => { + let mut missing_fields = Vec::new(); + + if request_id.is_none() { + missing_fields.push("request_id"); + } + if device_id.is_none() { + missing_fields.push("device_id"); + } + if time.is_none() { + missing_fields.push("time"); + } + if input_raw.is_none() || input_json.is_none() || input.is_none() { + missing_fields.push("input"); + } + if example_raw.is_none() || !has_example_spec { + missing_fields.push("example"); + } + if settled_editable_region.is_none() { + missing_fields.push("settled_editable_region"); + } + + log::warn!( + "skipping captured row {row_index}: [{}]", + missing_fields.join(", "), + ); + None + } + } + }); + + Ok(Box::new(iter)) +} + fn build_settled_example( request_id: String, device_id: String, @@ -1160,6 +1382,43 @@ fn build_settled_example( example } +fn build_captured_example( + request_id: String, + device_id: String, + time: String, + input: ZetaPromptInput, + mut example_spec: ExampleSpec, + settled_editable_region: String, + zed_version: Option, +) -> Example { + let expected_patch = build_output_patch( + &input.cursor_path, + input.cursor_excerpt.as_ref(), + &input.excerpt_ranges.editable_350, + settled_editable_region.as_str(), + ); + + example_spec.expected_patches = vec![expected_patch]; + example_spec.telemetry = Some(TelemetrySource { + request_id, + device_id, + time, + rejection_reason: String::new(), + was_shown: false, + }); + + Example { + spec: example_spec, + zed_version, + prompt_inputs: Some(input), + prompt: None, + predictions: Vec::new(), + score: Vec::new(), + qa: Vec::new(), + state: None, + } +} + fn rejected_examples_from_response<'a>( response: &'a SnowflakeStatementResponse, column_indices: &'a std::collections::HashMap, diff --git a/crates/edit_prediction_cli/src/repair.rs b/crates/edit_prediction_cli/src/repair.rs index b6ad41d553dabf1e49f261f4cc745395fdb1d1f6..9d891314bc62a44e730b584cea3423df665dc381 100644 --- a/crates/edit_prediction_cli/src/repair.rs +++ b/crates/edit_prediction_cli/src/repair.rs @@ -227,16 +227,17 @@ pub fn needs_repair(example: &Example, confidence_threshold: u8) -> bool { /// Handles the `KEEP_PREVIOUS` sentinel by copying the teacher's prediction, /// and delegates normal output to `TeacherPrompt::parse`. pub fn parse(example: &Example, actual_output: &str) -> Result<(String, Option)> { - if let Some(last_codeblock) = extract_last_codeblock(actual_output) { - if last_codeblock.trim() == KEEP_PREVIOUS { - let original = example - .predictions - .first() - .context("no original prediction to keep")?; - let patch = original.actual_patch.clone().unwrap_or_default(); - let cursor = original.actual_cursor.clone(); - return Ok((patch, cursor)); - } + let last_codeblock = + extract_last_codeblock(actual_output).unwrap_or_else(|| actual_output.to_string()); + + if last_codeblock.contains(KEEP_PREVIOUS) { + let original = example + .predictions + .first() + .context("no original prediction to keep")?; + let patch = original.actual_patch.clone().unwrap_or_default(); + let cursor = original.actual_cursor.clone(); + return Ok((patch, cursor)); } TeacherPrompt::parse(example, actual_output) diff --git a/crates/edit_prediction_cli/src/retrieve_context.rs b/crates/edit_prediction_cli/src/retrieve_context.rs index a5fb00b39a67a15a7afcced897b4d109f1f3406f..f02509ceb061db078d2a9a98b4322cf246b87594 100644 --- a/crates/edit_prediction_cli/src/retrieve_context.rs +++ b/crates/edit_prediction_cli/src/retrieve_context.rs @@ -20,18 +20,13 @@ pub async fn run_context_retrieval( example_progress: &ExampleProgress, mut cx: AsyncApp, ) -> anyhow::Result<()> { - if example.prompt_inputs.is_some() { - if example.spec.repository_url.is_empty() { - return Ok(()); - } - - if example - .prompt_inputs - .as_ref() - .is_some_and(|inputs| !inputs.related_files.is_empty()) - { - return Ok(()); - } + if example + .prompt_inputs + .as_ref() + .is_some_and(|inputs| inputs.related_files.is_some()) + || example.spec.repository_url.is_empty() + { + return Ok(()); } run_load_project(example, app_state.clone(), example_progress, cx.clone()).await?; @@ -72,7 +67,7 @@ pub async fn run_context_retrieval( step_progress.set_info(format!("{} excerpts", excerpt_count), InfoStyle::Normal); if let Some(prompt_inputs) = example.prompt_inputs.as_mut() { - prompt_inputs.related_files = context_files; + prompt_inputs.related_files = Some(context_files); } Ok(()) } diff --git a/crates/edit_prediction_cli/src/reversal_tracking.rs b/crates/edit_prediction_cli/src/reversal_tracking.rs index cb955dbdf7dd2375395e8c0ecd52df849e33fb38..60661cea04beae4aba4713ac86b51fab42c91979 100644 --- a/crates/edit_prediction_cli/src/reversal_tracking.rs +++ b/crates/edit_prediction_cli/src/reversal_tracking.rs @@ -668,7 +668,8 @@ mod tests { cursor_offset_in_excerpt: 0, excerpt_start_row, events, - related_files: Vec::new(), + related_files: Some(Vec::new()), + active_buffer_diagnostics: Vec::new(), excerpt_ranges: ExcerptRanges { editable_150: 0..content.len(), editable_180: 0..content.len(), @@ -678,6 +679,7 @@ mod tests { editable_350_context_150: 0..content.len(), ..Default::default() }, + syntax_ranges: None, experiment: None, in_open_source_repo: false, can_collect_data: false, diff --git a/crates/edit_prediction_context/Cargo.toml b/crates/edit_prediction_context/Cargo.toml index e1c1aed4e35f518258edcec8acd59dd9fcac7338..3a63f16610a6b60d2e5a3d415d87698070e7b3f4 100644 --- a/crates/edit_prediction_context/Cargo.toml +++ b/crates/edit_prediction_context/Cargo.toml @@ -42,4 +42,4 @@ serde_json.workspace = true settings = {workspace= true, features = ["test-support"]} text = { workspace = true, features = ["test-support"] } util = { workspace = true, features = ["test-support"] } -zlog.workspace = true + diff --git a/crates/edit_prediction_ui/Cargo.toml b/crates/edit_prediction_ui/Cargo.toml index 05afbabd2045e9bca591b6c2edba846e95953a4f..b6b6473bafa0222a670e1c541e03d255ee0d2d5a 100644 --- a/crates/edit_prediction_ui/Cargo.toml +++ b/crates/edit_prediction_ui/Cargo.toml @@ -50,18 +50,12 @@ zed_actions.workspace = true zeta_prompt.workspace = true [dev-dependencies] -clock.workspace = true copilot = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } futures.workspace = true indoc.workspace = true -language_model.workspace = true -lsp = { workspace = true, features = ["test-support"] } -pretty_assertions.workspace = true project = { workspace = true, features = ["test-support"] } -release_channel.workspace = true -semver.workspace = true -serde_json.workspace = true theme = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } -zlog.workspace = true + + diff --git a/crates/edit_prediction_ui/src/edit_prediction_button.rs b/crates/edit_prediction_ui/src/edit_prediction_button.rs index b00a229164d480d38312ca97cac31a23010f8b69..dac4c812f8ac1377423f7044c1c250b5a5333f64 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_button.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_button.rs @@ -1195,9 +1195,13 @@ impl EditPredictionButton { if cx.is_staff() { if let Some(store) = EditPredictionStore::try_global(cx) { + store.update(cx, |store, cx| { + store.refresh_available_experiments(cx); + }); let store = store.read(cx); let experiments = store.available_experiments().to_vec(); let preferred = store.preferred_experiment().map(|s| s.to_owned()); + let active = store.active_experiment().map(|s| s.to_owned()); let preferred_for_submenu = preferred.clone(); menu = menu @@ -1219,7 +1223,8 @@ impl EditPredictionButton { }, ); for experiment in &experiments { - let is_selected = preferred.as_deref() == Some(experiment.as_str()); + let is_selected = active.as_deref() == Some(experiment.as_str()) + || preferred.as_deref() == Some(experiment.as_str()); let experiment_name = experiment.clone(); menu = menu.toggleable_entry( experiment.clone(), diff --git a/crates/edit_prediction_ui/src/rate_prediction_modal.rs b/crates/edit_prediction_ui/src/rate_prediction_modal.rs index d07dbe9bad72c2252ee2e33c8a014778d1331e96..1c4328d8a1d301b7cc01aa520c166bda4b40e32d 100644 --- a/crates/edit_prediction_ui/src/rate_prediction_modal.rs +++ b/crates/edit_prediction_ui/src/rate_prediction_modal.rs @@ -402,7 +402,13 @@ impl RatePredictionsModal { write!(&mut formatted_inputs, "## Related files\n\n").unwrap(); - for included_file in prediction.inputs.related_files.iter() { + for included_file in prediction + .inputs + .related_files + .as_deref() + .unwrap_or_default() + .iter() + { write!( &mut formatted_inputs, "### {}\n\n", diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index b200d25f6d9ca90e862091b1b999613b0f5e2723..22a9b8effbe52caa67812619d254076493210e68 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -26,6 +26,7 @@ test-support = [ "tree-sitter-rust", "tree-sitter-typescript", "tree-sitter-html", + "proptest", "unindent", ] @@ -63,6 +64,8 @@ ordered-float.workspace = true parking_lot.workspace = true pretty_assertions.workspace = true project.workspace = true +proptest = { workspace = true, optional = true } +proptest-derive = { workspace = true, optional = true } rand.workspace = true regex.workspace = true rpc.workspace = true @@ -110,11 +113,13 @@ lsp = { workspace = true, features = ["test-support"] } markdown = { workspace = true, features = ["test-support"] } multi_buffer = { workspace = true, features = ["test-support"] } project = { workspace = true, features = ["test-support"] } +proptest.workspace = true +proptest-derive.workspace = true release_channel.workspace = true rand.workspace = true semver.workspace = true settings = { workspace = true, features = ["test-support"] } -tempfile.workspace = true + text = { workspace = true, features = ["test-support"] } theme = { workspace = true, features = ["test-support"] } tree-sitter-c.workspace = true @@ -128,7 +133,7 @@ unicode-width.workspace = true unindent.workspace = true util = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } -http_client = { workspace = true, features = ["test-support"] } + zlog.workspace = true diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 00a48a9ab3d249850b9749d64267d8274e7eaa79..b11832faa3f9bb8294c6ea054a335292b1422b02 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -107,7 +107,7 @@ use project::{InlayId, lsp_store::LspFoldingRange, lsp_store::TokenType}; use serde::Deserialize; use smallvec::SmallVec; use sum_tree::{Bias, TreeMap}; -use text::{BufferId, LineIndent, Patch, ToOffset as _}; +use text::{BufferId, LineIndent, Patch}; use ui::{SharedString, px}; use unicode_segmentation::UnicodeSegmentation; use ztracing::instrument; @@ -1977,57 +1977,11 @@ impl DisplaySnapshot { /// Returned ranges are 0-based relative to `buffer_range.start`. pub(super) fn combined_highlights( &self, - buffer_id: BufferId, - buffer_range: Range, + multibuffer_range: Range, syntax_theme: &theme::SyntaxTheme, ) -> Vec<(Range, HighlightStyle)> { let multibuffer = self.buffer_snapshot(); - let multibuffer_range = multibuffer - .excerpts() - .find_map(|(excerpt_id, buffer, range)| { - if buffer.remote_id() != buffer_id { - return None; - } - let context_start = range.context.start.to_offset(buffer); - let context_end = range.context.end.to_offset(buffer); - if buffer_range.start < context_start || buffer_range.end > context_end { - return None; - } - let start_anchor = buffer.anchor_before(buffer_range.start); - let end_anchor = buffer.anchor_after(buffer_range.end); - let mb_range = - multibuffer.anchor_range_in_excerpt(excerpt_id, start_anchor..end_anchor)?; - Some(mb_range.start.to_offset(multibuffer)..mb_range.end.to_offset(multibuffer)) - }); - - let Some(multibuffer_range) = multibuffer_range else { - // Range is outside all excerpts (e.g. symbol name not in a - // multi-buffer excerpt). Fall back to buffer-level syntax highlights. - let buffer_snapshot = multibuffer.excerpts().find_map(|(_, buffer, _)| { - (buffer.remote_id() == buffer_id).then(|| buffer.clone()) - }); - let Some(buffer_snapshot) = buffer_snapshot else { - return Vec::new(); - }; - let mut highlights = Vec::new(); - let mut offset = 0usize; - for chunk in buffer_snapshot.chunks(buffer_range, true) { - let chunk_len = chunk.text.len(); - if chunk_len == 0 { - continue; - } - if let Some(style) = chunk - .syntax_highlight_id - .and_then(|id| id.style(syntax_theme)) - { - highlights.push((offset..offset + chunk_len, style)); - } - offset += chunk_len; - } - return highlights; - }; - let chunks = custom_highlights::CustomHighlightsChunks::new( multibuffer_range, true, diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 2673baae84ab74b2852004320cf1d94c5ed1ed42..d45165660d92170ecc176ebd8e038b890933bd57 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -1091,23 +1091,29 @@ impl BlockMap { }; let rows_before_block; - match block_placement { - BlockPlacement::Above(position) => { - rows_before_block = position - new_transforms.summary().input_rows; + let input_rows = new_transforms.summary().input_rows; + match &block_placement { + &BlockPlacement::Above(position) => { + let Some(delta) = position.checked_sub(input_rows) else { + continue; + }; + rows_before_block = delta; just_processed_folded_buffer = false; } - BlockPlacement::Near(position) | BlockPlacement::Below(position) => { + &BlockPlacement::Near(position) | &BlockPlacement::Below(position) => { if just_processed_folded_buffer { continue; } - if position + RowDelta(1) < new_transforms.summary().input_rows { + let Some(delta) = (position + RowDelta(1)).checked_sub(input_rows) else { continue; - } - rows_before_block = - (position + RowDelta(1)) - new_transforms.summary().input_rows; + }; + rows_before_block = delta; } - BlockPlacement::Replace(ref range) => { - rows_before_block = *range.start() - new_transforms.summary().input_rows; + BlockPlacement::Replace(range) => { + let Some(delta) = range.start().checked_sub(input_rows) else { + continue; + }; + rows_before_block = delta; summary.input_rows = WrapRow(1) + (*range.end() - *range.start()); just_processed_folded_buffer = matches!(block, Block::FoldedBuffer { .. }); } diff --git a/crates/editor/src/display_map/dimensions.rs b/crates/editor/src/display_map/dimensions.rs index fd8efa6ca539d7eee8d59962ad7541d2bbc4fc4b..0bee934f8f87f1ad490cc74e60bb40bf86d8cdc8 100644 --- a/crates/editor/src/display_map/dimensions.rs +++ b/crates/editor/src/display_map/dimensions.rs @@ -41,6 +41,10 @@ macro_rules! impl_for_row_types { pub fn saturating_sub(self, other: $row_delta) -> $row { $row(self.0.saturating_sub(other.0)) } + + pub fn checked_sub(self, other: $row) -> Option<$row_delta> { + self.0.checked_sub(other.0).map($row_delta) + } } impl ::std::ops::Add for $row { diff --git a/crates/editor/src/document_symbols.rs b/crates/editor/src/document_symbols.rs index 927ef34690477ba436bf70a66b3f9f45b8864587..b73c1abbfb9bfec86093eed72082232275388faf 100644 --- a/crates/editor/src/document_symbols.rs +++ b/crates/editor/src/document_symbols.rs @@ -1,4 +1,4 @@ -use std::{cmp, ops::Range}; +use std::ops::Range; use collections::HashMap; use futures::FutureExt; @@ -6,10 +6,15 @@ use futures::future::join_all; use gpui::{App, Context, HighlightStyle, Task}; use itertools::Itertools as _; use language::language_settings::language_settings; -use language::{Buffer, BufferSnapshot, OutlineItem}; -use multi_buffer::{Anchor, MultiBufferSnapshot}; -use text::{Bias, BufferId, OffsetRangeExt as _, ToOffset as _}; +use language::{Buffer, OutlineItem}; +use multi_buffer::{ + Anchor, AnchorRangeExt as _, MultiBufferOffset, MultiBufferRow, MultiBufferSnapshot, + ToOffset as _, +}; +use text::BufferId; use theme::{ActiveTheme as _, SyntaxTheme}; +use unicode_segmentation::UnicodeSegmentation as _; +use util::maybe; use crate::display_map::DisplaySnapshot; use crate::{Editor, LSP_REQUEST_DEBOUNCE_TIMEOUT}; @@ -77,6 +82,9 @@ impl Editor { let excerpt = multi_buffer_snapshot.excerpt_containing(cursor..cursor)?; let excerpt_id = excerpt.id(); let buffer_id = excerpt.buffer_id(); + if Some(buffer_id) != cursor.text_anchor.buffer_id { + return None; + } let buffer = self.buffer.read(cx).buffer(buffer_id)?; let buffer_snapshot = buffer.read(cx).snapshot(); let cursor_text_anchor = cursor.text_anchor; @@ -212,16 +220,13 @@ impl Editor { let display_snapshot = editor.display_map.update(cx, |map, cx| map.snapshot(cx)); let mut highlighted_results = results; - for (buffer_id, items) in &mut highlighted_results { - if let Some(buffer) = editor.buffer.read(cx).buffer(*buffer_id) { - let snapshot = buffer.read(cx).snapshot(); - apply_highlights( - items, - *buffer_id, - &snapshot, - &display_snapshot, - &syntax, - ); + for items in highlighted_results.values_mut() { + for item in items { + if let Some(highlights) = + highlights_from_buffer(&display_snapshot, &item, &syntax) + { + item.highlight_ranges = highlights; + } } } editor.lsp_document_symbols.extend(highlighted_results); @@ -239,34 +244,6 @@ fn lsp_symbols_enabled(buffer: &Buffer, cx: &App) -> bool { .lsp_enabled() } -/// Applies combined syntax + semantic token highlights to LSP document symbol -/// outline items that were built without highlights by the project layer. -fn apply_highlights( - items: &mut [OutlineItem], - buffer_id: BufferId, - buffer_snapshot: &BufferSnapshot, - display_snapshot: &DisplaySnapshot, - syntax_theme: &SyntaxTheme, -) { - for item in items { - let symbol_range = item.range.to_offset(buffer_snapshot); - let selection_start = item.source_range_for_text.start.to_offset(buffer_snapshot); - - if let Some(highlights) = highlights_from_buffer( - &item.text, - 0, - buffer_id, - buffer_snapshot, - display_snapshot, - symbol_range, - selection_start, - syntax_theme, - ) { - item.highlight_ranges = highlights; - } - } -} - /// Finds where the symbol name appears in the buffer and returns combined /// (tree-sitter + semantic token) highlights for those positions. /// @@ -275,117 +252,78 @@ fn apply_highlights( /// to word-by-word matching for cases like `impl Trait for Type` /// where the LSP name doesn't appear verbatim in the buffer. fn highlights_from_buffer( - name: &str, - name_offset_in_text: usize, - buffer_id: BufferId, - buffer_snapshot: &BufferSnapshot, display_snapshot: &DisplaySnapshot, - symbol_range: Range, - selection_start_offset: usize, + item: &OutlineItem, syntax_theme: &SyntaxTheme, ) -> Option, HighlightStyle)>> { - if name.is_empty() { + let outline_text = &item.text; + if outline_text.is_empty() { return None; } - let range_start_offset = symbol_range.start; - let range_end_offset = symbol_range.end; - - // Try to find the name verbatim in the buffer near the selection range. - let search_start = buffer_snapshot.clip_offset( - selection_start_offset - .saturating_sub(name.len()) - .max(range_start_offset), - Bias::Right, - ); - let search_end = buffer_snapshot.clip_offset( - cmp::min(selection_start_offset + name.len() * 2, range_end_offset), - Bias::Left, - ); - - if search_start < search_end { - let buffer_text: String = buffer_snapshot - .text_for_range(search_start..search_end) - .collect(); - if let Some(found_at) = buffer_text.find(name) { - let name_start_offset = search_start + found_at; - let name_end_offset = name_start_offset + name.len(); - let result = highlights_for_buffer_range( - name_offset_in_text, - name_start_offset..name_end_offset, - buffer_id, - display_snapshot, - syntax_theme, + let multi_buffer_snapshot = display_snapshot.buffer(); + let multi_buffer_source_range_anchors = + multi_buffer_snapshot.text_anchors_to_visible_anchors([ + item.source_range_for_text.start, + item.source_range_for_text.end, + ]); + let Some(anchor_range) = maybe!({ + Some( + (*multi_buffer_source_range_anchors.get(0)?)? + ..(*multi_buffer_source_range_anchors.get(1)?)?, + ) + }) else { + return None; + }; + + let selection_point_range = anchor_range.to_point(multi_buffer_snapshot); + let mut search_start = selection_point_range.start; + search_start.column = 0; + let search_start_offset = search_start.to_offset(&multi_buffer_snapshot); + let mut search_end = selection_point_range.end; + search_end.column = multi_buffer_snapshot.line_len(MultiBufferRow(search_end.row)); + + let search_text = multi_buffer_snapshot + .text_for_range(search_start..search_end) + .collect::(); + + let mut outline_text_highlights = Vec::new(); + match search_text.find(outline_text) { + Some(start_index) => { + let multibuffer_start = search_start_offset + MultiBufferOffset(start_index); + let multibuffer_end = multibuffer_start + MultiBufferOffset(outline_text.len()); + outline_text_highlights.extend( + display_snapshot + .combined_highlights(multibuffer_start..multibuffer_end, syntax_theme), ); - if result.is_some() { - return result; - } } - } - - // Fallback: match word-by-word. Split the name on whitespace and find - // each word sequentially in the buffer's symbol range. - let range_start_offset = buffer_snapshot.clip_offset(range_start_offset, Bias::Right); - let range_end_offset = buffer_snapshot.clip_offset(range_end_offset, Bias::Left); - - let mut highlights = Vec::new(); - let mut got_any = false; - let buffer_text: String = buffer_snapshot - .text_for_range(range_start_offset..range_end_offset) - .collect(); - let mut buf_search_from = 0usize; - let mut name_search_from = 0usize; - for word in name.split_whitespace() { - let name_word_start = name[name_search_from..] - .find(word) - .map(|pos| name_search_from + pos) - .unwrap_or(name_search_from); - if let Some(found_in_buf) = buffer_text[buf_search_from..].find(word) { - let buf_word_start = range_start_offset + buf_search_from + found_in_buf; - let buf_word_end = buf_word_start + word.len(); - let text_cursor = name_offset_in_text + name_word_start; - if let Some(mut word_highlights) = highlights_for_buffer_range( - text_cursor, - buf_word_start..buf_word_end, - buffer_id, - display_snapshot, - syntax_theme, - ) { - got_any = true; - highlights.append(&mut word_highlights); + None => { + for (outline_text_word_start, outline_word) in outline_text.split_word_bound_indices() { + if let Some(start_index) = search_text.find(outline_word) { + let multibuffer_start = search_start_offset + MultiBufferOffset(start_index); + let multibuffer_end = multibuffer_start + MultiBufferOffset(outline_word.len()); + outline_text_highlights.extend( + display_snapshot + .combined_highlights(multibuffer_start..multibuffer_end, syntax_theme) + .into_iter() + .map(|(range_in_word, style)| { + ( + outline_text_word_start + range_in_word.start + ..outline_text_word_start + range_in_word.end, + style, + ) + }), + ); + } } - buf_search_from = buf_search_from + found_in_buf + word.len(); } - name_search_from = name_word_start + word.len(); } - got_any.then_some(highlights) -} - -/// Gets combined (tree-sitter + semantic token) highlights for a buffer byte -/// range via the editor's display snapshot, then shifts the returned ranges -/// so they start at `text_cursor_start` (the position in the outline item text). -fn highlights_for_buffer_range( - text_cursor_start: usize, - buffer_range: Range, - buffer_id: BufferId, - display_snapshot: &DisplaySnapshot, - syntax_theme: &SyntaxTheme, -) -> Option, HighlightStyle)>> { - let raw = display_snapshot.combined_highlights(buffer_id, buffer_range, syntax_theme); - if raw.is_empty() { - return None; + if outline_text_highlights.is_empty() { + None + } else { + Some(outline_text_highlights) } - Some( - raw.into_iter() - .map(|(range, style)| { - ( - range.start + text_cursor_start..range.end + text_cursor_start, - style, - ) - }) - .collect(), - ) } #[cfg(test)] diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 3b18c9a447d8fb4569bbf331f1ba8e4602a555b9..40cfb8caf01a0343cb27104d7b23a24e999e9334 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1233,6 +1233,7 @@ pub struct Editor { autoindent_mode: Option, workspace: Option<(WeakEntity, Option)>, input_enabled: bool, + expects_character_input: bool, use_modal_editing: bool, read_only: bool, leader_id: Option, @@ -2469,6 +2470,7 @@ impl Editor { collapse_matches: false, workspace: None, input_enabled: !is_minimap, + expects_character_input: !is_minimap, use_modal_editing: full_mode, read_only: is_minimap, use_autoclose: true, @@ -3365,6 +3367,10 @@ impl Editor { self.input_enabled = input_enabled; } + pub fn set_expects_character_input(&mut self, expects_character_input: bool) { + self.expects_character_input = expects_character_input; + } + pub fn set_edit_predictions_hidden_for_vim_mode( &mut self, hidden: bool, @@ -5088,6 +5094,10 @@ impl Editor { } pub fn newline(&mut self, _: &Newline, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } + self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { let (edits_with_flags, selection_info): (Vec<_>, Vec<_>) = { @@ -5309,6 +5319,10 @@ impl Editor { } pub fn newline_above(&mut self, _: &NewlineAbove, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } + self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let buffer = self.buffer.read(cx); @@ -5376,6 +5390,10 @@ impl Editor { } pub fn newline_below(&mut self, _: &NewlineBelow, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } + self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let mut buffer_edits: HashMap, Vec)> = HashMap::default(); @@ -7482,7 +7500,8 @@ impl Editor { let mut read_ranges = Vec::new(); for highlight in highlights { let buffer_id = cursor_buffer.read(cx).remote_id(); - for (excerpt_id, excerpt_range) in buffer.excerpts_for_buffer(buffer_id, cx) + for (excerpt_id, _, excerpt_range) in + buffer.excerpts_for_buffer(buffer_id, cx) { let start = highlight .range @@ -11665,6 +11684,43 @@ impl Editor { self.restore_hunks_in_ranges(selections, window, cx); } + /// Restores the diff hunks in the editor's selections and moves the cursor + /// to the next diff hunk. Wraps around to the beginning of the buffer if + /// not all diff hunks are expanded. + pub fn restore_and_next( + &mut self, + _: &::git::RestoreAndNext, + window: &mut Window, + cx: &mut Context, + ) { + let selections = self + .selections + .all(&self.display_snapshot(cx)) + .into_iter() + .map(|selection| selection.range()) + .collect(); + + self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); + self.restore_hunks_in_ranges(selections, window, cx); + + let all_diff_hunks_expanded = self.buffer().read(cx).all_diff_hunks_expanded(); + let wrap_around = !all_diff_hunks_expanded; + let snapshot = self.snapshot(window, cx); + let position = self + .selections + .newest::(&snapshot.display_snapshot) + .head(); + + self.go_to_hunk_before_or_after_position( + &snapshot, + position, + Direction::Next, + wrap_around, + window, + cx, + ); + } + pub fn restore_hunks_in_ranges( &mut self, ranges: Vec>, @@ -17717,6 +17773,7 @@ impl Editor { &snapshot, selection.head(), Direction::Next, + true, window, cx, ); @@ -17727,14 +17784,15 @@ impl Editor { snapshot: &EditorSnapshot, position: Point, direction: Direction, + wrap_around: bool, window: &mut Window, cx: &mut Context, ) { let row = if direction == Direction::Next { - self.hunk_after_position(snapshot, position) + self.hunk_after_position(snapshot, position, wrap_around) .map(|hunk| hunk.row_range.start) } else { - self.hunk_before_position(snapshot, position) + self.hunk_before_position(snapshot, position, wrap_around) }; if let Some(row) = row { @@ -17752,17 +17810,23 @@ impl Editor { &mut self, snapshot: &EditorSnapshot, position: Point, + wrap_around: bool, ) -> Option { - snapshot + let result = snapshot .buffer_snapshot() .diff_hunks_in_range(position..snapshot.buffer_snapshot().max_point()) - .find(|hunk| hunk.row_range.start.0 > position.row) - .or_else(|| { + .find(|hunk| hunk.row_range.start.0 > position.row); + + if wrap_around { + result.or_else(|| { snapshot .buffer_snapshot() .diff_hunks_in_range(Point::zero()..position) .find(|hunk| hunk.row_range.end.0 < position.row) }) + } else { + result + } } fn go_to_prev_hunk( @@ -17778,6 +17842,7 @@ impl Editor { &snapshot, selection.head(), Direction::Prev, + true, window, cx, ); @@ -17787,11 +17852,15 @@ impl Editor { &mut self, snapshot: &EditorSnapshot, position: Point, + wrap_around: bool, ) -> Option { - snapshot - .buffer_snapshot() - .diff_hunk_before(position) - .or_else(|| snapshot.buffer_snapshot().diff_hunk_before(Point::MAX)) + let result = snapshot.buffer_snapshot().diff_hunk_before(position); + + if wrap_around { + result.or_else(|| snapshot.buffer_snapshot().diff_hunk_before(Point::MAX)) + } else { + result + } } fn go_to_next_change( @@ -20471,7 +20540,7 @@ impl Editor { let mut all_folded_excerpt_ids = Vec::new(); for buffer_id in &ids_to_fold { let folded_excerpts = self.buffer().read(cx).excerpts_for_buffer(*buffer_id, cx); - all_folded_excerpt_ids.extend(folded_excerpts.into_iter().map(|(id, _)| id)); + all_folded_excerpt_ids.extend(folded_excerpts.into_iter().map(|(id, _, _)| id)); } self.display_map.update(cx, |display_map, cx| { @@ -20501,7 +20570,7 @@ impl Editor { display_map.unfold_buffers([buffer_id], cx); }); cx.emit(EditorEvent::BufferFoldToggled { - ids: unfolded_excerpts.iter().map(|&(id, _)| id).collect(), + ids: unfolded_excerpts.iter().map(|&(id, _, _)| id).collect(), folded: false, }); cx.notify(); @@ -20775,38 +20844,23 @@ impl Editor { } self.stage_or_unstage_diff_hunks(stage, ranges, cx); + + let all_diff_hunks_expanded = self.buffer().read(cx).all_diff_hunks_expanded(); + let wrap_around = !all_diff_hunks_expanded; let snapshot = self.snapshot(window, cx); let position = self .selections .newest::(&snapshot.display_snapshot) .head(); - let mut row = snapshot - .buffer_snapshot() - .diff_hunks_in_range(position..snapshot.buffer_snapshot().max_point()) - .find(|hunk| hunk.row_range.start.0 > position.row) - .map(|hunk| hunk.row_range.start); - let all_diff_hunks_expanded = self.buffer().read(cx).all_diff_hunks_expanded(); - // Outside of the project diff editor, wrap around to the beginning. - if !all_diff_hunks_expanded { - row = row.or_else(|| { - snapshot - .buffer_snapshot() - .diff_hunks_in_range(Point::zero()..position) - .find(|hunk| hunk.row_range.end.0 < position.row) - .map(|hunk| hunk.row_range.start) - }); - } - - if let Some(row) = row { - let destination = Point::new(row.0, 0); - let autoscroll = Autoscroll::center(); - - self.unfold_ranges(&[destination..destination], false, false, cx); - self.change_selections(SelectionEffects::scroll(autoscroll), window, cx, |s| { - s.select_ranges([destination..destination]); - }); - } + self.go_to_hunk_before_or_after_position( + &snapshot, + position, + Direction::Next, + wrap_around, + window, + cx, + ); } pub(crate) fn do_stage_or_unstage( @@ -22888,7 +22942,7 @@ impl Editor { .snapshot(); let mut handled = false; - for (id, ExcerptRange { context, .. }) in + for (id, _, ExcerptRange { context, .. }) in self.buffer.read(cx).excerpts_for_buffer(buffer_id, cx) { if context.start.cmp(&position, &snapshot).is_ge() @@ -28397,7 +28451,7 @@ impl EntityInputHandler for Editor { } fn accepts_text_input(&self, _window: &mut Window, _cx: &mut Context) -> bool { - self.input_enabled + self.expects_character_input } } @@ -29231,6 +29285,7 @@ fn render_diff_hunk_controls( &snapshot, position, Direction::Next, + true, window, cx, ); @@ -29266,6 +29321,7 @@ fn render_diff_hunk_controls( &snapshot, point, Direction::Prev, + true, window, cx, ); diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 199cb0d3785a048f6390070d67546394bd89ff68..d3da58733dd0a24622a6dcde87f638069e206cf4 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -76,6 +76,9 @@ fn display_ranges(editor: &Editor, cx: &mut Context<'_, Editor>) -> Vec( + move |params, _| async move { + assert_eq!( + params.text_document.uri, + lsp::Uri::from_file_path(path!("/file.rs")).unwrap() + ); + Ok(Some(vec![lsp::TextEdit::new( + lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(1, 0)), + ", ".to_string(), + )])) + }, + ); + + let save = editor + .update_in(cx, |editor, window, cx| { + editor.save( + SaveOptions { + format: true, + autosave: false, + }, + project.clone(), + window, + cx, + ) + }) + .unwrap(); + save.await; + + assert_eq!( + editor.update(cx, |editor, cx| editor.text(cx)), + "one, two\nthree\n" + ); +} + #[gpui::test] async fn test_redo_after_noop_format(cx: &mut TestAppContext) { init_test(cx, |settings| { @@ -33424,3 +33557,66 @@ comment */ˇ»;"#}, assert_text_with_selections(editor, indoc! {r#"let arr = [«1, 2, 3]ˇ»;"#}, cx); }); } + +#[gpui::test] +async fn test_restore_and_next(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorTestContext::new(cx).await; + + let diff_base = r#" + one + two + three + four + five + "# + .unindent(); + + cx.set_state( + &r#" + ONE + two + ˇTHREE + four + FIVE + "# + .unindent(), + ); + cx.set_head_text(&diff_base); + + cx.update_editor(|editor, window, cx| { + editor.set_expand_all_diff_hunks(cx); + editor.restore_and_next(&Default::default(), window, cx); + }); + cx.run_until_parked(); + + cx.assert_state_with_diff( + r#" + - one + + ONE + two + three + four + - ˇfive + + FIVE + "# + .unindent(), + ); + + cx.update_editor(|editor, window, cx| { + editor.restore_and_next(&Default::default(), window, cx); + }); + cx.run_until_parked(); + + cx.assert_state_with_diff( + r#" + - one + + ONE + two + three + four + ˇfive + "# + .unindent(), + ); +} diff --git a/crates/editor/src/editor_tests/property_test.rs b/crates/editor/src/editor_tests/property_test.rs new file mode 100644 index 0000000000000000000000000000000000000000..f2d966913b4e82931cfbf115ed450415dbd0041f --- /dev/null +++ b/crates/editor/src/editor_tests/property_test.rs @@ -0,0 +1,85 @@ +use proptest::prelude::*; + +use super::*; + +#[derive(Debug, Clone, proptest_derive::Arbitrary)] +pub enum Direction { + Up, + Down, + Left, + Right, +} + +#[derive(Debug, Clone, proptest_derive::Arbitrary)] +pub enum TestAction { + #[proptest(weight = 4)] + Type(String), + Backspace { + #[proptest(strategy = "1usize..100")] + count: usize, + }, + Move { + #[proptest(strategy = "1usize..100")] + count: usize, + direction: Direction, + }, +} + +impl Editor { + pub fn apply_test_action( + &mut self, + action: &TestAction, + window: &mut Window, + cx: &mut Context, + ) { + match action { + TestAction::Type(text) => self.insert(&text, window, cx), + TestAction::Backspace { count } => { + for _ in 0..*count { + self.delete(&Default::default(), window, cx); + } + } + TestAction::Move { count, direction } => { + for _ in 0..*count { + match direction { + Direction::Up => self.move_up(&Default::default(), window, cx), + Direction::Down => self.move_down(&Default::default(), window, cx), + Direction::Left => self.move_left(&Default::default(), window, cx), + Direction::Right => self.move_right(&Default::default(), window, cx), + } + } + } + } + } +} + +fn test_actions() -> impl Strategy> { + proptest::collection::vec(any::(), 1..10) +} + +#[gpui::property_test(config = ProptestConfig {cases: 100, ..Default::default()})] +fn editor_property_test( + cx: &mut TestAppContext, + #[strategy = test_actions()] actions: Vec, +) { + init_test(cx, |_| {}); + + let group_interval = Duration::from_millis(1); + + let buffer = cx.new(|cx| { + let mut buf = language::Buffer::local("123456", cx); + buf.set_group_interval(group_interval); + buf + }); + + let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); + let editor = cx.add_window(|window, cx| build_editor(buffer.clone(), window, cx)); + + editor + .update(cx, |editor, window, cx| { + for action in actions { + editor.apply_test_action(&action, window, cx); + } + }) + .unwrap(); +} diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 7b5530c6fc36828b22f7f78a6482c1d6e04fc166..b7207fce71bc71c5bdd5962ca3328030935238ca 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -637,6 +637,7 @@ impl EditorElement { register_action(editor, window, Editor::accept_edit_prediction); register_action(editor, window, Editor::restore_file); register_action(editor, window, Editor::git_restore); + register_action(editor, window, Editor::restore_and_next); register_action(editor, window, Editor::apply_all_diff_hunks); register_action(editor, window, Editor::apply_selected_diff_hunks); register_action(editor, window, Editor::open_active_item_in_terminal); @@ -4157,6 +4158,7 @@ impl EditorElement { div() .id(block_id) + .cursor(CursorStyle::Arrow) .w_full() .h((block_height as f32) * line_height) .flex() @@ -11123,12 +11125,6 @@ impl Element for EditorElement { self.paint_mouse_listeners(layout, window, cx); self.paint_background(layout, window, cx); - if !layout.spacer_blocks.is_empty() { - window.with_element_namespace("blocks", |window| { - self.paint_spacer_blocks(layout, window, cx); - }); - } - self.paint_indent_guides(layout, window, cx); if layout.gutter_hitbox.size.width > Pixels::ZERO { @@ -11138,6 +11134,12 @@ impl Element for EditorElement { self.paint_text(layout, window, cx); + if !layout.spacer_blocks.is_empty() { + window.with_element_namespace("blocks", |window| { + self.paint_spacer_blocks(layout, window, cx); + }); + } + if layout.gutter_hitbox.size.width > Pixels::ZERO { self.paint_gutter_highlights(layout, window, cx); self.paint_gutter_indicators(layout, window, cx); diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index d4877a5f1986685bea37f243edf4ac8bbdfdf9f5..659a383d6b20129909b4c3f2d7bdbfbe5e580f4e 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -119,7 +119,7 @@ impl Editor { cx: &mut Context, ) { let hovered_link_modifier = Editor::is_cmd_or_ctrl_pressed(&modifiers, cx); - if !hovered_link_modifier || self.has_pending_selection() { + if !hovered_link_modifier || self.has_pending_selection() || self.mouse_cursor_hidden { self.hide_hovered_link(cx); return; } @@ -782,7 +782,7 @@ fn surrounding_filename( mod tests { use super::*; use crate::{ - DisplayPoint, + DisplayPoint, HideMouseCursorOrigin, display_map::ToDisplayPoint, editor_tests::init_test, inlays::inlay_hints::tests::{cached_hint_labels, visible_hint_labels}, @@ -1362,6 +1362,82 @@ mod tests { ); } + #[gpui::test] + async fn test_hover_preconditions(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + ..Default::default() + }, + cx, + ) + .await; + + macro_rules! assert_no_highlight { + ($cx:expr) => { + // No highlight + $cx.update_editor(|editor, window, cx| { + assert!( + editor + .snapshot(window, cx) + .text_highlight_ranges(HighlightKey::HoveredLinkState) + .unwrap_or_default() + .1 + .is_empty() + ); + }); + }; + } + + // No link + cx.set_state(indoc! {" + Let's test a [complex](https://zed.dev/channel/) caseˇ. + "}); + assert_no_highlight!(cx); + + // No modifier + let screen_coord = cx.pixel_position(indoc! {" + Let's test a [complex](https://zed.dev/channel/ˇ) case. + "}); + cx.simulate_mouse_move(screen_coord, None, Modifiers::none()); + assert_no_highlight!(cx); + + // Modifier active + let screen_coord = cx.pixel_position(indoc! {" + Let's test a [complex](https://zed.dev/channeˇl/) case. + "}); + cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); + cx.assert_editor_text_highlights( + HighlightKey::HoveredLinkState, + indoc! {" + Let's test a [complex](«https://zed.dev/channel/ˇ») case. + "}, + ); + + // Cursor hidden with secondary key + let screen_coord = cx.pixel_position(indoc! {" + Let's test a [complex](https://zed.dev/ˇchannel/) case. + "}); + cx.simulate_mouse_move(screen_coord, None, Modifiers::none()); + cx.update_editor(|editor, _, cx| { + editor.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); + }); + cx.simulate_modifiers_change(Modifiers::secondary_key()); + assert_no_highlight!(cx); + + // Cursor active again + let screen_coord = cx.pixel_position(indoc! {" + Let's test a [complex](https://ˇzed.dev/channel/) case. + "}); + cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); + cx.assert_editor_text_highlights( + HighlightKey::HoveredLinkState, + indoc! {" + Let's test a [complex](«https://zed.dev/channel/ˇ») case. + "}, + ); + } + #[gpui::test] async fn test_urls_at_beginning_of_buffer(cx: &mut gpui::TestAppContext) { init_test(cx, |_| {}); diff --git a/crates/editor/src/split.rs b/crates/editor/src/split.rs index cff98f474487b52e55ab3f53bff250de24cf2d80..4e5f8ebf2793f6807e0a9108e12c276a7ab45427 100644 --- a/crates/editor/src/split.rs +++ b/crates/editor/src/split.rs @@ -1165,8 +1165,8 @@ impl SplittableEditor { let lhs_ranges: Vec> = rhs_multibuffer .excerpts_for_buffer(main_buffer_snapshot.remote_id(), cx) .into_iter() - .filter(|(id, _)| rhs_excerpt_ids.contains(id)) - .map(|(_, excerpt_range)| { + .filter(|(id, _, _)| rhs_excerpt_ids.contains(id)) + .map(|(_, _, excerpt_range)| { let to_base_text = |range: Range| { let start = diff_snapshot .buffer_point_to_base_text_range( @@ -1857,6 +1857,21 @@ impl Item for SplittableEditor { fn pixel_position_of_cursor(&self, cx: &App) -> Option> { self.focused_editor().read(cx).pixel_position_of_cursor(cx) } + + fn act_as_type<'a>( + &'a self, + type_id: std::any::TypeId, + self_handle: &'a Entity, + _: &'a App, + ) -> Option { + if type_id == std::any::TypeId::of::() { + Some(self_handle.clone().into()) + } else if type_id == std::any::TypeId::of::() { + Some(self.rhs_editor.clone().into()) + } else { + None + } + } } impl SearchableItem for SplittableEditor { @@ -2064,7 +2079,7 @@ impl Render for SplittableEditor { #[cfg(test)] mod tests { - use std::sync::Arc; + use std::{any::TypeId, sync::Arc}; use buffer_diff::BufferDiff; use collections::{HashMap, HashSet}; @@ -2080,14 +2095,14 @@ mod tests { use settings::{DiffViewStyle, SettingsStore}; use ui::{VisualContext as _, div, px}; use util::rel_path::rel_path; - use workspace::MultiWorkspace; + use workspace::{Item, MultiWorkspace}; - use crate::SplittableEditor; use crate::display_map::{ BlockPlacement, BlockProperties, BlockStyle, Crease, FoldPlaceholder, }; use crate::inlays::Inlay; use crate::test::{editor_content_with_blocks_and_width, set_block_content_for_tests}; + use crate::{Editor, SplittableEditor}; use multi_buffer::MultiBufferOffset; async fn init_test( @@ -6025,4 +6040,17 @@ mod tests { cx.run_until_parked(); } + + #[gpui::test] + async fn test_act_as_type(cx: &mut gpui::TestAppContext) { + let (splittable_editor, cx) = init_test(cx, SoftWrap::None, DiffViewStyle::Split).await; + let editor = splittable_editor.read_with(cx, |editor, cx| { + editor.act_as_type(TypeId::of::(), &splittable_editor, cx) + }); + + assert!( + editor.is_some(), + "SplittableEditor should be able to act as Editor" + ); + } } diff --git a/crates/eval/src/eval.rs b/crates/eval/src/eval.rs index 4e9a0cb7915d8369c7989ca332a01ff12f86cefe..a621cb0dedb3f7cea512329829f7c99bc8803d41 100644 --- a/crates/eval/src/eval.rs +++ b/crates/eval/src/eval.rs @@ -429,7 +429,7 @@ pub fn init(cx: &mut App) -> Arc { let extension_host_proxy = ExtensionHostProxy::global(cx); debug_adapter_extension::init(extension_host_proxy.clone(), cx); language_extension::init(LspAccess::Noop, extension_host_proxy, languages.clone()); - language_model::init(client.clone(), cx); + language_model::init(user_store.clone(), client.clone(), cx); language_models::init(user_store.clone(), client.clone(), cx); languages::init(languages.clone(), fs.clone(), node_runtime.clone(), cx); prompt_store::init(cx); diff --git a/crates/eval_cli/.gitignore b/crates/eval_cli/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..083ef6e3d354cb335e59916071199149d11965be --- /dev/null +++ b/crates/eval_cli/.gitignore @@ -0,0 +1,3 @@ +**/jobs +**/*.egg-info +**/__pycache__ diff --git a/crates/eval_cli/Cargo.toml b/crates/eval_cli/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..d8f52992e2ae9512e694bb11c491fd8b60c0c947 --- /dev/null +++ b/crates/eval_cli/Cargo.toml @@ -0,0 +1,50 @@ +[package] +name = "eval_cli" +version = "0.1.0" +publish.workspace = true +edition.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[[bin]] +name = "eval-cli" +path = "src/main.rs" + +[dependencies] +acp_thread.workspace = true +agent.workspace = true +agent-client-protocol.workspace = true +agent_ui.workspace = true +anyhow.workspace = true +clap.workspace = true +client.workspace = true +ctrlc = { version = "3.5", features = ["termination"] } +debug_adapter_extension.workspace = true +env_logger.workspace = true +extension.workspace = true +feature_flags.workspace = true +fs.workspace = true +futures.workspace = true +gpui.workspace = true +gpui_platform.workspace = true +gpui_tokio.workspace = true +language.workspace = true +language_extension.workspace = true +language_model.workspace = true +language_models.workspace = true +languages = { workspace = true, features = ["load-grammars"] } +node_runtime.workspace = true +paths.workspace = true +project.workspace = true +prompt_store.workspace = true +release_channel.workspace = true +reqwest_client.workspace = true +serde.workspace = true +serde_json.workspace = true +settings.workspace = true +shellexpand.workspace = true +terminal_view.workspace = true +util.workspace = true +watch.workspace = true diff --git a/crates/eval_cli/Dockerfile b/crates/eval_cli/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..7b91a7adf991428670fac43ad745a6e9998c9c38 --- /dev/null +++ b/crates/eval_cli/Dockerfile @@ -0,0 +1,62 @@ +# Build eval-cli for Linux. +# +# Usage (from the zed repo root): +# docker build --platform linux/amd64 -f crates/eval_cli/Dockerfile -t eval-cli-builder . +# docker cp "$(docker create eval-cli-builder)":/eval-cli ./target/eval-cli +# +# Or use the helper script: +# crates/eval_cli/script/build-linux + +FROM rust:1.93.1-bookworm AS builder + +WORKDIR /app + +# Install build dependencies (subset of script/linux needed for headless GPUI). +RUN apt-get update && apt-get install -y --no-install-recommends \ + cmake \ + clang \ + g++ \ + libasound2-dev \ + libfontconfig-dev \ + libgit2-dev \ + libglib2.0-dev \ + libssl-dev \ + libwayland-dev \ + libx11-xcb-dev \ + libxkbcommon-x11-dev \ + libzstd-dev \ + libsqlite3-dev \ + build-essential \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# Install wild linker for faster linking (built from source to match bookworm's glibc). +RUN cargo install --locked wild-linker --version 0.8.0 --root /usr/local + +# Download WASI SDK (needed by some dependencies). +ARG TARGETARCH +RUN mkdir -p /app/target && \ + WASI_ARCH=$([ "$TARGETARCH" = "arm64" ] && echo "arm64" || echo "x86_64") && \ + curl -L "https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-25/wasi-sdk-25.0-${WASI_ARCH}-linux.tar.gz" \ + | tar -xz -C /app/target && \ + mv /app/target/wasi-sdk-25.0-${WASI_ARCH}-linux /app/target/wasi-sdk + +# Pre-install the toolchain specified in rust-toolchain.toml so it is cached. +RUN rustup toolchain install 1.93 --profile minimal \ + --component rustfmt --component clippy --component rust-analyzer --component rust-src \ + --target wasm32-wasip2 --target wasm32-unknown-unknown --target x86_64-unknown-linux-musl + +COPY . . + +ENV CC=clang CXX=clang++ +ENV RUSTFLAGS="-C linker=clang -C link-arg=--ld-path=wild" + +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=/app/target \ + cargo build --release --package eval_cli && \ + cp /app/target/release/eval-cli /eval-cli && \ + strip /eval-cli + +FROM scratch +COPY --from=builder /eval-cli /eval-cli diff --git a/crates/eval_cli/Dockerfile.dockerignore b/crates/eval_cli/Dockerfile.dockerignore new file mode 100644 index 0000000000000000000000000000000000000000..419f92f9c9b6dad52f04c9ad39e031a7405f2a4b --- /dev/null +++ b/crates/eval_cli/Dockerfile.dockerignore @@ -0,0 +1,21 @@ +.git +.github +**/.gitignore +**/.gitkeep +.gitattributes +.mailmap +**/target +zed.xcworkspace +.DS_Store +compose.yml +plugins/bin +script/node_modules +styles/node_modules +crates/collab/static/styles.css +vendor/bin +assets/themes/ +**/jobs + +**/*.egg-info +**/__pycache__ +**/.venv diff --git a/crates/rich_text/LICENSE-GPL b/crates/eval_cli/LICENSE-GPL similarity index 100% rename from crates/rich_text/LICENSE-GPL rename to crates/eval_cli/LICENSE-GPL diff --git a/crates/eval_cli/README.md b/crates/eval_cli/README.md new file mode 100644 index 0000000000000000000000000000000000000000..a9952bbf4fe1066a78acaad15bfab10d0cee098d --- /dev/null +++ b/crates/eval_cli/README.md @@ -0,0 +1,108 @@ +# eval-cli + +Headless CLI binary for running Zed's agent in evaluation/benchmark +environments. Designed to work inside containerized environments like +[Harbor](https://harborframework.com/) where the repository is already +checked out and API keys are provided via environment variables. + +Uses the same `NativeAgent` + `AcpThread` pipeline as the production Zed +editor — full agentic loop with tool calls, subagents, and retries, just +without a GUI. + +## Building + +### Native (for local testing on the same OS) + +``` +cargo build --release -p eval_cli +``` + +### Cross-compile for Linux x86_64 (from macOS or other hosts) + +Harbor containers run Linux x86_64. Use the Docker-based build script: + +``` +crates/eval_cli/script/build-linux +``` + +This produces `target/eval-cli` (an x86_64 Linux ELF binary). You can +also specify a custom output path: + +``` +crates/eval_cli/script/build-linux --output ~/bin/eval-cli-linux +``` + +## Standalone usage + +``` +eval-cli \ + --workdir /testbed \ + --model anthropic/claude-sonnet-4-6-latest \ + --instruction "Fix the bug described in..." \ + --timeout 600 \ + --output-dir /logs/agent +``` + +Reads API keys from environment variables (`ANTHROPIC_API_KEY`, +`OPENAI_API_KEY`, etc.). Writes `result.json`, `thread.md`, and +`thread.json` to the output directory. + +### Exit codes + +| Code | Meaning | +| ---- | ---------------------------------- | +| 0 | Agent finished | +| 1 | Error (model/auth/runtime failure) | +| 2 | Timeout | +| 3 | Interrupted (SIGTERM/SIGINT) | + +## Harbor integration + +The `zed_eval/` directory contains a Python package that +implements Harbor's `BaseInstalledAgent` interface, allowing eval-cli to +be used with `--agent-import-path` without modifying Harbor's source code. + +### Setup + +``` +pip install -e crates/eval_cli/harbor/ +``` + +### Running with a local binary + +Build for Linux first, then pass the binary path: + +``` +crates/eval_cli/script/build-linux + +harbor run -d "swebench_verified@latest" \ + --agent-import-path zed_eval.agent:ZedAgent \ + --ae binary_path=target/eval-cli \ + -m anthropic/claude-sonnet-4-6-latest +``` + +The agent uploads the binary into the container during setup — no +download URL needed during local iteration. + +### Running with a download URL + +For CI or when the binary is hosted somewhere: + +``` +harbor run -d "swebench_verified@latest" \ + --agent-import-path zed_eval.agent:ZedAgent \ + --ak download_url=https://example.com/eval-cli \ + -m anthropic/claude-sonnet-4-6-latest +``` + +### Setting a timeout + +Pass `EVAL_CLI_TIMEOUT` via `--ae`: + +``` +harbor run -d "swebench_verified@latest" \ + --agent-import-path zed_eval.agent:ZedAgent \ + --ak binary_path=target/eval-cli \ + --ae EVAL_CLI_TIMEOUT=600 \ + -m anthropic/claude-sonnet-4-6-latest +``` diff --git a/crates/eval_cli/build.rs b/crates/eval_cli/build.rs new file mode 100644 index 0000000000000000000000000000000000000000..0180e9036fbd049ba5a9e5b455ec1c017cd700e3 --- /dev/null +++ b/crates/eval_cli/build.rs @@ -0,0 +1,15 @@ +fn main() { + let cargo_toml = + std::fs::read_to_string("../zed/Cargo.toml").expect("Failed to read crates/zed/Cargo.toml"); + let version = cargo_toml + .lines() + .find(|line| line.starts_with("version = ")) + .expect("Version not found in crates/zed/Cargo.toml") + .split('=') + .nth(1) + .expect("Invalid version format") + .trim() + .trim_matches('"'); + println!("cargo:rerun-if-changed=../zed/Cargo.toml"); + println!("cargo:rustc-env=ZED_PKG_VERSION={}", version); +} diff --git a/crates/eval_cli/script/build-linux b/crates/eval_cli/script/build-linux new file mode 100755 index 0000000000000000000000000000000000000000..9c710668de2aa5e956efff727e6ef8eb2c5ed627 --- /dev/null +++ b/crates/eval_cli/script/build-linux @@ -0,0 +1,57 @@ +#!/usr/bin/env bash +# +# Build eval-cli for x86_64 Linux from any host (macOS, Linux, etc.) +# using Docker. The resulting binary is placed at the path printed on +# completion (default: target/eval-cli). +# +# Usage: +# crates/eval_cli/script/build-linux [--output PATH] +# +# Examples: +# crates/eval_cli/script/build-linux +# crates/eval_cli/script/build-linux --output ~/bin/eval-cli +# +# Prerequisites: Docker must be installed and running. + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" +REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)" +OUTPUT="${REPO_ROOT}/target/eval-cli" + +while [[ $# -gt 0 ]]; do + case $1 in + --output) + OUTPUT="$2" + shift 2 + ;; + *) + echo "Unknown option: $1" >&2 + exit 1 + ;; + esac +done + +cd "$REPO_ROOT" + +IMAGE_TAG="eval-cli-builder" + +echo "Building eval-cli for x86_64-unknown-linux-gnu..." +echo " Repo root: $REPO_ROOT" +echo " Output: $OUTPUT" +echo "" + +docker build \ + --platform linux/amd64 \ + -f crates/eval_cli/Dockerfile \ + -t "$IMAGE_TAG" \ + . + +CONTAINER_ID=$(docker create "$IMAGE_TAG" /eval-cli) +mkdir -p "$(dirname "$OUTPUT")" +docker cp "$CONTAINER_ID":/eval-cli "$OUTPUT" +docker rm "$CONTAINER_ID" > /dev/null + +echo "" +echo "Built successfully: $OUTPUT" +echo " $(file "$OUTPUT")" diff --git a/crates/eval_cli/src/headless.rs b/crates/eval_cli/src/headless.rs new file mode 100644 index 0000000000000000000000000000000000000000..54f14ee1938d4b58bdc32acbd07eced8d8a86406 --- /dev/null +++ b/crates/eval_cli/src/headless.rs @@ -0,0 +1,131 @@ +use std::path::PathBuf; +use std::sync::Arc; + +use client::{Client, ProxySettings, UserStore}; +use extension::ExtensionHostProxy; +use fs::RealFs; +use gpui::http_client::read_proxy_from_env; +use gpui::{App, AppContext as _, Entity}; +use gpui_tokio::Tokio; +use language::LanguageRegistry; +use language_extension::LspAccess; +use node_runtime::{NodeBinaryOptions, NodeRuntime}; +use project::project_settings::ProjectSettings; +use prompt_store::PromptBuilder; +use release_channel::{AppCommitSha, AppVersion}; +use reqwest_client::ReqwestClient; +use settings::{Settings, SettingsStore}; +use util::ResultExt as _; + +pub struct AgentCliAppState { + pub languages: Arc, + pub client: Arc, + pub user_store: Entity, + pub fs: Arc, + pub node_runtime: NodeRuntime, +} + +pub fn init(cx: &mut App) -> Arc { + let app_commit_sha = option_env!("ZED_COMMIT_SHA").map(|s| AppCommitSha::new(s.to_owned())); + + let app_version = AppVersion::load( + env!("ZED_PKG_VERSION"), + option_env!("ZED_BUILD_ID"), + app_commit_sha, + ); + + release_channel::init(app_version.clone(), cx); + gpui_tokio::init(cx); + + let settings_store = SettingsStore::new(cx, &settings::default_settings()); + cx.set_global(settings_store); + + let user_agent = format!( + "Zed Agent CLI/{} ({}; {})", + app_version, + std::env::consts::OS, + std::env::consts::ARCH + ); + let proxy_str = ProxySettings::get_global(cx).proxy.to_owned(); + let proxy_url = proxy_str + .as_ref() + .and_then(|input| input.parse().ok()) + .or_else(read_proxy_from_env); + let http = { + let _guard = Tokio::handle(cx).enter(); + ReqwestClient::proxy_and_user_agent(proxy_url, &user_agent) + .expect("could not start HTTP client") + }; + cx.set_http_client(Arc::new(http)); + + let client = Client::production(cx); + cx.set_http_client(client.http_client()); + + let git_binary_path = None; + let fs = Arc::new(RealFs::new( + git_binary_path, + cx.background_executor().clone(), + )); + + let mut languages = LanguageRegistry::new(cx.background_executor().clone()); + languages.set_language_server_download_dir(paths::languages_dir().clone()); + let languages = Arc::new(languages); + + let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); + + extension::init(cx); + + let (mut node_options_tx, node_options_rx) = watch::channel(None); + cx.observe_global::(move |cx| { + let settings = &ProjectSettings::get_global(cx).node; + let options = NodeBinaryOptions { + allow_path_lookup: !settings.ignore_system_version, + allow_binary_download: true, + use_paths: settings.path.as_ref().map(|node_path| { + let node_path = PathBuf::from(shellexpand::tilde(node_path).as_ref()); + let npm_path = settings + .npm_path + .as_ref() + .map(|path| PathBuf::from(shellexpand::tilde(&path).as_ref())); + ( + node_path.clone(), + npm_path.unwrap_or_else(|| { + let base_path = PathBuf::new(); + node_path.parent().unwrap_or(&base_path).join("npm") + }), + ) + }), + }; + node_options_tx.send(Some(options)).log_err(); + }) + .detach(); + let node_runtime = NodeRuntime::new(client.http_client(), None, node_options_rx); + + let extension_host_proxy = ExtensionHostProxy::global(cx); + debug_adapter_extension::init(extension_host_proxy.clone(), cx); + language_extension::init(LspAccess::Noop, extension_host_proxy, languages.clone()); + language_model::init(user_store.clone(), client.clone(), cx); + language_models::init(user_store.clone(), client.clone(), cx); + languages::init(languages.clone(), fs.clone(), node_runtime.clone(), cx); + prompt_store::init(cx); + terminal_view::init(cx); + + let stdout_is_a_pty = false; + let prompt_builder = PromptBuilder::load(fs.clone(), stdout_is_a_pty, cx); + agent_ui::init( + fs.clone(), + client.clone(), + prompt_builder, + languages.clone(), + true, + cx, + ); + + Arc::new(AgentCliAppState { + languages, + client, + user_store, + fs, + node_runtime, + }) +} diff --git a/crates/eval_cli/src/main.rs b/crates/eval_cli/src/main.rs new file mode 100644 index 0000000000000000000000000000000000000000..0f8dbed7ba12cee934e7631dc7068c83db1dc293 --- /dev/null +++ b/crates/eval_cli/src/main.rs @@ -0,0 +1,550 @@ +//! Headless CLI binary for running Zed's agent in evaluation/benchmark environments. +//! +//! Designed to work inside containerized environments (like Harbor/termbench) where: +//! - The repository is already checked out at the working directory +//! - The model API key is provided via environment variables +//! - Results are written to an output directory (default: `/logs/agent/`) +//! +//! ## Usage +//! +//! ```text +//! eval-cli --workdir /testbed --model anthropic/claude-sonnet-4-6-latest \ +//! --instruction "Fix the bug described in..." --timeout 600 +//! ``` +//! +//! ## Output +//! +//! Writes to `--output-dir` (default `/logs/agent/`): +//! - `result.json` — structured result with status, timing, and token usage +//! - `thread.md` — full conversation as markdown +//! - `thread.json` — raw thread state as JSON +//! +//! ## Exit codes +//! +//! | Code | Meaning | +//! |------|---------| +//! | 0 | Agent finished | +//! | 1 | Error (model/auth/runtime failure) | +//! | 2 | Timeout | +//! | 3 | Interrupted (SIGTERM/SIGINT) | + +mod headless; + +use std::path::PathBuf; +use std::process; +use std::rc::Rc; +use std::str::FromStr; +use std::sync::Arc; +use std::sync::atomic::{AtomicBool, Ordering}; +use std::time::{Duration, Instant}; + +use acp_thread::AgentConnection as _; +use agent::{NativeAgent, NativeAgentConnection, Templates, ThreadStore}; +use agent_client_protocol as acp; +use anyhow::{Context, Result}; +use clap::Parser; +use feature_flags::FeatureFlagAppExt as _; + +use futures::{FutureExt, select_biased}; +use gpui::{AppContext as _, AsyncApp, Entity, UpdateGlobal}; +use language_model::{LanguageModelRegistry, SelectedModel}; +use project::Project; +use settings::SettingsStore; + +use crate::headless::AgentCliAppState; + +#[derive(Parser, Debug)] +#[command( + name = "eval-cli", + about = "Run Zed's agent headlessly in evaluation/benchmark environments" +)] +struct Args { + /// Output current environment variables as JSON to stdout. + /// Used internally by Zed's shell environment capture. + #[arg(long, hide = true)] + printenv: bool, + + /// Path to the repository working directory. Defaults to the current directory. + #[arg(long, default_value = ".")] + workdir: PathBuf, + + /// Instruction/prompt text. If omitted, read from --instruction-file or stdin. + #[arg(long)] + instruction: Option, + + /// Language model to use, in `provider/model` format. + #[arg(long, default_value = "anthropic/claude-sonnet-4-6-latest")] + model: String, + + /// Maximum wall-clock time in seconds for the agent run. + #[arg(long)] + timeout: Option, + + /// Directory for output artifacts (result.json, thread.md, thread.json). + #[arg(long, default_value = "/logs/agent")] + output_dir: PathBuf, +} + +enum AgentOutcome { + Completed, + Timeout { seconds: u64 }, + Interrupted, +} + +#[derive(serde::Serialize)] +struct EvalResult { + status: String, + #[serde(skip_serializing_if = "Option::is_none")] + error: Option, + duration_secs: f64, + #[serde(skip_serializing_if = "Option::is_none")] + timeout_secs: Option, + model: String, + #[serde(skip_serializing_if = "Option::is_none")] + input_tokens: Option, + #[serde(skip_serializing_if = "Option::is_none")] + output_tokens: Option, + #[serde(skip_serializing_if = "Option::is_none")] + cache_creation_input_tokens: Option, + #[serde(skip_serializing_if = "Option::is_none")] + cache_read_input_tokens: Option, +} + +const EXIT_OK: i32 = 0; +const EXIT_ERROR: i32 = 1; +const EXIT_TIMEOUT: i32 = 2; +const EXIT_INTERRUPTED: i32 = 3; + +static TERMINATED: AtomicBool = AtomicBool::new(false); + +fn main() { + let args = Args::parse(); + + if args.printenv { + util::shell_env::print_env(); + return; + } + + env_logger::init(); + + ctrlc::set_handler(|| { + TERMINATED.store(true, Ordering::SeqCst); + }) + .expect("failed to set signal handler"); + + let instruction = read_instruction(&args).unwrap_or_else(|e| { + eprintln!("Error reading instruction: {e}"); + process::exit(EXIT_ERROR); + }); + + let workdir = args.workdir.canonicalize().unwrap_or_else(|e| { + eprintln!("Invalid --workdir {:?}: {e}", args.workdir); + process::exit(EXIT_ERROR); + }); + + let output_dir = args.output_dir.clone(); + if let Err(e) = std::fs::create_dir_all(&output_dir) { + eprintln!("Error creating output dir {}: {e}", output_dir.display()); + process::exit(EXIT_ERROR); + } + + let http_client = Arc::new(reqwest_client::ReqwestClient::new()); + let app = gpui_platform::headless().with_http_client(http_client); + + app.run(move |cx| { + let app_state = headless::init(cx); + cx.set_staff(true); + + let auth_tasks = LanguageModelRegistry::global(cx).update(cx, |registry, cx| { + registry + .providers() + .iter() + .map(|p| p.authenticate(cx)) + .collect::>() + }); + + let model_name = args.model.clone(); + let timeout = args.timeout; + + cx.spawn(async move |cx| { + futures::future::join_all(auth_tasks).await; + + let start = Instant::now(); + + let (outcome, token_usage) = run_agent( + &app_state, + &workdir, + &instruction, + &model_name, + timeout, + Some(&output_dir), + cx, + ) + .await; + + let duration = start.elapsed(); + + let (status, error, exit_code) = match &outcome { + Ok(AgentOutcome::Completed) => ("completed".to_string(), None, EXIT_OK), + Ok(AgentOutcome::Timeout { seconds }) => { + eprintln!("Timeout: agent exceeded {seconds}s time limit"); + ("timeout".to_string(), None, EXIT_TIMEOUT) + } + Ok(AgentOutcome::Interrupted) => { + eprintln!("Interrupted: received SIGTERM, saved partial output"); + ("interrupted".to_string(), None, EXIT_INTERRUPTED) + } + Err(e) => { + eprintln!("Error: {e:#}"); + ("error".to_string(), Some(format!("{e:#}")), EXIT_ERROR) + } + }; + + let result = EvalResult { + status, + error, + duration_secs: duration.as_secs_f64(), + timeout_secs: timeout, + model: model_name.clone(), + input_tokens: token_usage.as_ref().map(|u| u.input_tokens), + output_tokens: token_usage.as_ref().map(|u| u.output_tokens), + cache_creation_input_tokens: token_usage + .as_ref() + .filter(|u| u.cache_creation_input_tokens > 0) + .map(|u| u.cache_creation_input_tokens), + cache_read_input_tokens: token_usage + .as_ref() + .filter(|u| u.cache_read_input_tokens > 0) + .map(|u| u.cache_read_input_tokens), + }; + + match serde_json::to_string_pretty(&result) { + Ok(json) => { + if let Err(e) = std::fs::write(output_dir.join("result.json"), &json) { + eprintln!("Error writing result.json: {e:#}"); + } + eprintln!("[eval-cli] result: {json}"); + } + Err(e) => eprintln!("Error serializing result: {e:#}"), + } + + cx.update(|cx| cx.quit()); + process::exit(exit_code); + }) + .detach(); + }); +} + +fn read_instruction(args: &Args) -> Result { + let text = if let Some(text) = &args.instruction { + text.clone() + } else { + use std::io::Read; + let mut buf = String::new(); + std::io::stdin() + .read_to_string(&mut buf) + .context("reading instruction from stdin")?; + buf + }; + anyhow::ensure!(!text.trim().is_empty(), "instruction is empty"); + Ok(text) +} + +async fn run_agent( + app_state: &Arc, + workdir: &std::path::Path, + instruction: &str, + model_name: &str, + timeout: Option, + output_dir: Option<&std::path::Path>, + cx: &mut AsyncApp, +) -> (Result, Option) { + let setup_result: Result<()> = cx.update(|cx| { + let selected = SelectedModel::from_str(model_name).map_err(|e| anyhow::anyhow!("{e}"))?; + let registry = LanguageModelRegistry::global(cx); + let model = registry + .read(cx) + .available_models(cx) + .find(|m| m.id() == selected.model && m.provider_id() == selected.provider) + .ok_or_else(|| { + let available = registry + .read(cx) + .available_models(cx) + .map(|m| format!("{}/{}", m.provider_id().0, m.id().0)) + .collect::>() + .join(", "); + anyhow::anyhow!("Model {model_name} not found. Available: {available}") + })?; + + let supports_thinking = model.supports_thinking(); + + registry.update(cx, |registry, cx| { + registry.set_default_model( + Some(language_model::ConfiguredModel { + provider: registry + .provider(&model.provider_id()) + .context("Provider not found")?, + model, + }), + cx, + ); + anyhow::Ok(()) + })?; + + let (enable_thinking, effort) = if supports_thinking { + (true, "\"high\"") + } else { + (false, "null") + }; + let provider_id = selected.provider.0.to_string(); + let model_id = selected.model.0.to_string(); + SettingsStore::update_global(cx, |store, cx| { + let settings = format!( + r#"{{ + "agent": {{ + "tool_permissions": {{"default": "allow"}}, + "default_model": {{ + "provider": "{provider_id}", + "model": "{model_id}", + "enable_thinking": {enable_thinking}, + "effort": {effort} + }} + }}, + "autosave": "off", + "format_on_save": "off" + }}" + "# + ); + store.set_user_settings(&settings, cx).ok(); + }); + + anyhow::Ok(()) + }); + + if let Err(e) = setup_result { + return (Err(e), None); + } + + let project = cx.update(|cx| { + Project::local( + app_state.client.clone(), + app_state.node_runtime.clone(), + app_state.user_store.clone(), + app_state.languages.clone(), + app_state.fs.clone(), + None, + project::LocalProjectFlags { + init_worktree_trust: false, + ..Default::default() + }, + cx, + ) + }); + + let worktree = project.update(cx, |project, cx| project.create_worktree(workdir, true, cx)); + let worktree = match worktree.await { + Ok(w) => w, + Err(e) => return (Err(e).context("creating worktree"), None), + }; + + let scan_result = worktree.update(cx, |tree, _cx| { + tree.as_local() + .context("expected local worktree") + .map(|local| local.scan_complete()) + }); + match scan_result { + Ok(future) => future.await, + Err(e) => return (Err(e), None), + }; + + let thread_store = cx.new(|cx| ThreadStore::new(cx)); + let agent = match NativeAgent::new( + project.clone(), + thread_store, + Templates::new(), + None, + app_state.fs.clone(), + cx, + ) + .await + { + Ok(a) => a, + Err(e) => return (Err(e).context("creating agent"), None), + }; + + let connection = Rc::new(NativeAgentConnection(agent.clone())); + let acp_thread = match cx + .update(|cx| connection.clone().new_session(project, workdir, cx)) + .await + { + Ok(t) => t, + Err(e) => return (Err(e).context("creating ACP session"), None), + }; + + let _subscription = cx.subscribe(&acp_thread, |acp_thread, event, cx| { + log_acp_thread_event(&acp_thread, event, cx); + }); + + let message = vec![acp::ContentBlock::Text(acp::TextContent::new( + instruction.to_string(), + ))]; + + let send_future = acp_thread.update(cx, |acp_thread: &mut acp_thread::AcpThread, cx| { + acp_thread.send(message, cx) + }); + + let timeout_future = if let Some(timeout_secs) = timeout { + futures::future::Either::Left( + cx.background_executor() + .timer(Duration::from_secs(timeout_secs)), + ) + } else { + futures::future::Either::Right(futures::future::pending::<()>()) + }; + + let sigterm_future = { + let executor = cx.background_executor().clone(); + async move { + while !TERMINATED.load(Ordering::Relaxed) { + executor.timer(Duration::from_millis(100)).await; + } + } + }; + + let outcome = select_biased! { + result = send_future.fuse() => match result { + Ok(Some(response)) => { + eprintln!("[eval-cli] stopped: {:?}", response.stop_reason); + if response.stop_reason == acp::StopReason::MaxTokens { + Err(anyhow::anyhow!("Model hit maximum token limit")) + } else { + Ok(AgentOutcome::Completed) + } + } + Ok(None) => { + eprintln!("[eval-cli] completed (no response)"); + Ok(AgentOutcome::Completed) + } + Err(e) => Err(e).context("agent run failed"), + }, + _ = sigterm_future.fuse() => { + eprintln!("[eval-cli] received SIGTERM, cancelling..."); + acp_thread.update(cx, |t: &mut acp_thread::AcpThread, cx| t.cancel(cx)).await; + Ok(AgentOutcome::Interrupted) + }, + _ = timeout_future.fuse() => { + acp_thread.update(cx, |t: &mut acp_thread::AcpThread, cx| t.cancel(cx)).await; + Ok(AgentOutcome::Timeout { seconds: timeout.unwrap_or(0) }) + } + }; + + let thread = cx.update(|cx| { + let session_id = acp_thread.read(cx).session_id().clone(); + connection.thread(&session_id, cx) + }); + + let cumulative_usage = if let Some(thread) = &thread { + let db_thread = thread.read_with(cx, |thread, cx| thread.to_db(cx)); + let db_thread = db_thread.await; + let usage = db_thread.cumulative_token_usage; + if usage.input_tokens > 0 || usage.output_tokens > 0 { + Some(usage) + } else { + None + } + } else { + None + }; + + let acp_usage = cx.update(|cx| { + acp_thread + .read(cx) + .token_usage() + .map(|usage| language_model::TokenUsage { + input_tokens: usage.input_tokens, + output_tokens: usage.output_tokens, + ..Default::default() + }) + }); + + let final_usage = cumulative_usage.or(acp_usage); + + if let (Some(thread), Some(dir)) = (&thread, output_dir) { + let markdown = thread.read_with(cx, |thread, _cx| thread.to_markdown()); + if let Err(e) = std::fs::write(dir.join("thread.md"), markdown) { + eprintln!("Error writing thread.md: {e:#}"); + } + + let db_thread = thread.read_with(cx, |thread, cx| thread.to_db(cx)); + let db_thread = db_thread.await; + match serde_json::to_string_pretty(&db_thread) { + Ok(json) => { + if let Err(e) = std::fs::write(dir.join("thread.json"), json) { + eprintln!("Error writing thread.json: {e:#}"); + } + } + Err(e) => eprintln!("Error serializing thread.json: {e:#}"), + } + } + + (outcome, final_usage) +} + +fn log_acp_thread_event( + acp_thread: &Entity, + event: &acp_thread::AcpThreadEvent, + cx: &mut gpui::App, +) { + match event { + acp_thread::AcpThreadEvent::NewEntry => { + let entries = acp_thread.read(cx).entries(); + if let Some(acp_thread::AgentThreadEntry::AssistantMessage(message)) = entries.last() { + for chunk in &message.chunks { + if let acp_thread::AssistantMessageChunk::Message { block } = chunk { + if let acp_thread::ContentBlock::Markdown { markdown } = block { + let text = markdown.read(cx).source().to_string(); + if !text.is_empty() { + eprint!("{text}"); + } + } + } + } + } + } + acp_thread::AcpThreadEvent::EntryUpdated(index) => { + let entries = acp_thread.read(cx).entries(); + if let Some(acp_thread::AgentThreadEntry::ToolCall(tool_call)) = entries.get(*index) { + if let Some(name) = &tool_call.tool_name { + match &tool_call.status { + acp_thread::ToolCallStatus::Completed => { + eprintln!("[tool] {name} ✓"); + } + acp_thread::ToolCallStatus::Failed => { + eprintln!("[tool] {name} ✗"); + } + acp_thread::ToolCallStatus::Rejected => { + eprintln!("[tool] {name} rejected"); + } + acp_thread::ToolCallStatus::Canceled => { + eprintln!("[tool] {name} canceled"); + } + _ => {} + } + } + } + } + acp_thread::AcpThreadEvent::Stopped(reason) => { + eprintln!("\n[eval-cli] stopped: {reason:?}"); + } + acp_thread::AcpThreadEvent::Error => { + eprintln!("[eval-cli] error event"); + } + acp_thread::AcpThreadEvent::Retry(status) => { + eprintln!("[eval-cli] retry: {status:?}"); + } + acp_thread::AcpThreadEvent::SubagentSpawned(session_id) => { + eprintln!("[eval-cli] subagent spawned: {session_id}"); + } + _ => {} + } +} diff --git a/crates/eval_cli/zed_eval/__init__.py b/crates/eval_cli/zed_eval/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8cf07a06883a70660eb4bb3ca5a20ae304e6871b --- /dev/null +++ b/crates/eval_cli/zed_eval/__init__.py @@ -0,0 +1,3 @@ +from zed_eval.agent import ZedAgent + +__all__ = ["ZedAgent"] diff --git a/crates/eval_cli/zed_eval/agent.py b/crates/eval_cli/zed_eval/agent.py new file mode 100644 index 0000000000000000000000000000000000000000..6214ff18d784dd9620f404a00ba1b48ce96b5707 --- /dev/null +++ b/crates/eval_cli/zed_eval/agent.py @@ -0,0 +1,161 @@ +"""Harbor agent wrapper for Zed's eval-cli binary. + +Usage: + # Build eval-cli locally first: + cargo build --release -p eval_cli + + # Run via Harbor with a local binary: + harbor run -d "dataset@version" \ + --agent-import-path zed_eval.agent:ZedAgent \ + --ae binary_path=/path/to/target/release/eval-cli \ + --agent-model anthropic/claude-sonnet-4-6-latest + + # Or with a download URL (for CI): + harbor run -d "dataset@version" \ + --agent-import-path zed_eval.agent:ZedAgent \ + --ae download_url=https://example.com/eval-cli \ + --agent-model anthropic/claude-sonnet-4-6-latest +""" + +import json +import os +import shlex +from pathlib import Path + +from harbor.agents.installed.base import BaseInstalledAgent, ExecInput +from harbor.environments.base import BaseEnvironment +from harbor.models.agent.context import AgentContext + + +class ZedAgent(BaseInstalledAgent): + """Runs Zed's headless AI agent (eval-cli) to solve tasks. + + The eval-cli binary boots a headless GPUI application and uses the same + NativeAgent + AcpThread pipeline as the production Zed editor, driving + the full agentic loop (tool calls, subagents, retries) without a GUI. + """ + + def __init__( + self, + logs_dir: Path, + binary_path: str | None = None, + download_url: str | None = None, + *args, + **kwargs, + ): + super().__init__(logs_dir, *args, **kwargs) + self._binary_path = binary_path + self._download_url = download_url or os.environ.get("EVAL_CLI_DOWNLOAD_URL") + + @staticmethod + def name() -> str: + return "zed" + + @property + def _install_agent_template_path(self) -> Path: + return Path(__file__).parent / "install.sh.j2" + + async def setup(self, environment: BaseEnvironment) -> None: + await environment.exec(command="mkdir -p /installed-agent") + + if self._binary_path: + binary = Path(self._binary_path) + if not binary.exists(): + raise FileNotFoundError( + f"eval-cli binary not found at {binary}. " + "Build it with: cargo build --release -p eval_cli" + ) + await environment.upload_file( + source_path=binary, + target_path="/usr/local/bin/eval-cli", + ) + await environment.exec(command="chmod +x /usr/local/bin/eval-cli") + + await super().setup(environment) + + @property + def _template_variables(self) -> dict[str, str]: + variables = super()._template_variables + if self._binary_path: + variables["binary_uploaded"] = "true" + if self._download_url: + variables["download_url"] = self._download_url + return variables + + def populate_context_post_run(self, context: AgentContext) -> None: + result_data = None + for json_file in self.logs_dir.rglob("result.json"): + try: + result_data = json.loads(json_file.read_text()) + break + except (json.JSONDecodeError, OSError): + continue + + if result_data is None: + self.logger.warning("Could not find or parse result.json from eval-cli") + return + + if result_data.get("input_tokens") is not None: + context.n_input_tokens = result_data["input_tokens"] + if result_data.get("output_tokens") is not None: + context.n_output_tokens = result_data["output_tokens"] + if result_data.get("cache_read_input_tokens") is not None: + context.n_cache_tokens = result_data["cache_read_input_tokens"] + + context.metadata = { + "status": result_data.get("status"), + "duration_secs": result_data.get("duration_secs"), + "model": result_data.get("model"), + } + + def _get_api_env(self) -> dict[str, str]: + env: dict[str, str] = {} + if not self.model_name or "/" not in self.model_name: + return env + + provider = self.model_name.split("/", 1)[0] + provider_env_map = { + "anthropic": "ANTHROPIC_API_KEY", + "openai": "OPENAI_API_KEY", + "google": "GEMINI_API_KEY", + "gemini": "GEMINI_API_KEY", + "deepseek": "DEEPSEEK_API_KEY", + "mistral": "MISTRAL_API_KEY", + } + + env_var = provider_env_map.get(provider) + if env_var: + api_key = os.environ.get(env_var, "") + if api_key: + env[env_var] = api_key + + return env + + def create_run_agent_commands(self, instruction: str) -> list[ExecInput]: + escaped_instruction = shlex.quote(instruction) + env = self._get_api_env() + + parts = ["eval-cli", "--workdir /testbed", "--output-dir /logs/agent"] + + if self.model_name: + parts.append(f"--model {self.model_name}") + + timeout = self._extra_env.get("EVAL_CLI_TIMEOUT") + if timeout: + parts.append(f"--timeout {timeout}") + + parts.append(f"--instruction {escaped_instruction}") + + eval_cli_command = " ".join(parts) + " 2>&1 | stdbuf -oL tee /logs/agent/eval-cli.txt" + + patch_command = ( + "cd /testbed && " + "git add -A && " + "git diff --cached HEAD > /logs/agent/patch.diff && " + "echo \"Patch size: $(wc -c < /logs/agent/patch.diff) bytes\"" + ) + + return [ + ExecInput(command=eval_cli_command, env=env), + ExecInput(command=patch_command), + ] diff --git a/crates/eval_cli/zed_eval/install.sh.j2 b/crates/eval_cli/zed_eval/install.sh.j2 new file mode 100644 index 0000000000000000000000000000000000000000..f7ebbe028216a1a7a0fd606e50a2f707db34c5ce --- /dev/null +++ b/crates/eval_cli/zed_eval/install.sh.j2 @@ -0,0 +1,49 @@ +#!/bin/bash +set -euo pipefail + +# Install runtime dependencies needed by the eval-cli binary (dynamically linked +# against glibc + these shared libraries from its GPUI/terminal/language stacks). +apt-get update +apt-get install -y --no-install-recommends \ + ca-certificates \ + curl \ + git \ + libasound2 \ + libfontconfig1 \ + libglib2.0-0 \ + libsqlite3-0 \ + libssl3 \ + libwayland-client0 \ + libx11-xcb1 \ + libxkbcommon-x11-0 \ + libzstd1 + +# Install Node.js 22 LTS (needed by language servers like basedpyright). +curl -fsSL https://deb.nodesource.com/setup_22.x | bash - +apt-get install -y --no-install-recommends nodejs + +# Install uv (needed for running Python tests in SWE-bench tasks). +curl -LsSf https://astral.sh/uv/install.sh | sh +. "$HOME/.local/bin/env" +ln -sf "$HOME/.local/bin/uv" /usr/local/bin/uv +ln -sf "$HOME/.local/bin/uvx" /usr/local/bin/uvx + +{% if binary_uploaded is defined %} +# Binary was uploaded directly via setup() — just verify it works. +eval-cli --help +{% elif download_url is defined %} +curl -fsSL "{{ download_url }}" -o /usr/local/bin/eval-cli +chmod +x /usr/local/bin/eval-cli +eval-cli --help +{% else %} +echo "ERROR: No eval-cli binary provided." +echo "" +echo "Either pass binary_path= to upload a local build:" +echo " --ae binary_path=/path/to/target/release/eval-cli" +echo "" +echo "Or set download_url= / EVAL_CLI_DOWNLOAD_URL:" +echo " --ae download_url=https://example.com/eval-cli" +exit 1 +{% endif %} + +echo "INSTALL_SUCCESS" diff --git a/crates/eval_cli/zed_eval/pyproject.toml b/crates/eval_cli/zed_eval/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..416c025826eaf99ad029c914b609aa28abd56f00 --- /dev/null +++ b/crates/eval_cli/zed_eval/pyproject.toml @@ -0,0 +1,10 @@ +[project] +name = "zed-eval" +version = "0.1.0" +description = "Harbor agent wrapper for Zed's eval-cli" +requires-python = ">=3.12" +dependencies = ["harbor"] + +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" diff --git a/crates/extension/src/extension.rs b/crates/extension/src/extension.rs index 88f2bea0c0c68480a2ad67f536ecf9d465a6a9ae..02db6befb72b53f4610cdfddea80d7c030e5d29a 100644 --- a/crates/extension/src/extension.rs +++ b/crates/extension/src/extension.rs @@ -80,6 +80,18 @@ pub trait Extension: Send + Sync + 'static { worktree: Arc, ) -> Result>; + async fn language_server_initialization_options_schema( + &self, + language_server_id: LanguageServerName, + worktree: Arc, + ) -> Result>; + + async fn language_server_workspace_configuration_schema( + &self, + language_server_id: LanguageServerName, + worktree: Arc, + ) -> Result>; + async fn language_server_additional_initialization_options( &self, language_server_id: LanguageServerName, diff --git a/crates/extension/src/extension_builder.rs b/crates/extension/src/extension_builder.rs index eae51846f164d4aa6baf2fac897d25a8961b4d6c..1c204398c34728cab6b05687050243b4a988902c 100644 --- a/crates/extension/src/extension_builder.rs +++ b/crates/extension/src/extension_builder.rs @@ -7,6 +7,7 @@ use anyhow::{Context as _, Result, bail}; use futures::{StreamExt, io}; use heck::ToSnakeCase; use http_client::{self, AsyncBody, HttpClient}; +use language::LanguageConfig; use serde::Deserialize; use std::{ env, fs, mem, @@ -583,7 +584,7 @@ async fn populate_defaults( while let Some(language_dir) = language_dir_entries.next().await { let language_dir = language_dir?; - let config_path = language_dir.join("config.toml"); + let config_path = language_dir.join(LanguageConfig::FILE_NAME); if fs.is_file(config_path.as_path()).await { let relative_language_dir = language_dir.strip_prefix(extension_path)?.to_path_buf(); diff --git a/crates/extension_api/src/extension_api.rs b/crates/extension_api/src/extension_api.rs index acd1cba47b0150b85ddec8baafa8b5f341460a39..6607cdc9697d017ac51818bb277a1392a8d67d01 100644 --- a/crates/extension_api/src/extension_api.rs +++ b/crates/extension_api/src/extension_api.rs @@ -100,6 +100,28 @@ pub trait Extension: Send + Sync { Ok(None) } + /// Returns the JSON schema for the initialization options. + /// + /// The schema must conform to the JSON Schema speification. + fn language_server_initialization_options_schema( + &mut self, + _language_server_id: &LanguageServerId, + _worktree: &Worktree, + ) -> Option { + None + } + + /// Returns the JSON schema for the workspace configuration. + /// + /// The schema must conform to the JSON Schema specification. + fn language_server_workspace_configuration_schema( + &mut self, + _language_server_id: &LanguageServerId, + _worktree: &Worktree, + ) -> Option { + None + } + /// Returns the initialization options to pass to the other language server. fn language_server_additional_initialization_options( &mut self, @@ -370,6 +392,26 @@ impl wit::Guest for Component { .and_then(|value| serde_json::to_string(&value).ok())) } + fn language_server_initialization_options_schema( + language_server_id: String, + worktree: &Worktree, + ) -> Option { + let language_server_id = LanguageServerId(language_server_id); + extension() + .language_server_initialization_options_schema(&language_server_id, worktree) + .and_then(|value| serde_json::to_string(&value).ok()) + } + + fn language_server_workspace_configuration_schema( + language_server_id: String, + worktree: &Worktree, + ) -> Option { + let language_server_id = LanguageServerId(language_server_id); + extension() + .language_server_workspace_configuration_schema(&language_server_id, worktree) + .and_then(|value| serde_json::to_string(&value).ok()) + } + fn language_server_additional_initialization_options( language_server_id: String, target_language_server_id: String, diff --git a/crates/extension_api/wit/since_v0.8.0/extension.wit b/crates/extension_api/wit/since_v0.8.0/extension.wit index fc2735c72b463225feed0d371ae8274b56c78be1..052d670364b6958b51184def893c49f5b6abdc9e 100644 --- a/crates/extension_api/wit/since_v0.8.0/extension.wit +++ b/crates/extension_api/wit/since_v0.8.0/extension.wit @@ -101,6 +101,16 @@ world extension { /// Returns the workspace configuration options to pass to the language server. export language-server-workspace-configuration: func(language-server-id: string, worktree: borrow) -> result, string>; + /// Returns the JSON schema for the initialization options. + /// + /// The schema is represented as a JSON string conforming to the JSON Schema specification. + export language-server-initialization-options-schema: func(language-server-id: string, worktree: borrow) -> option; + + /// Returns the JSON schema for the workspace configuration. + /// + /// The schema is represented as a JSON string conforming to the JSON Schema specification. + export language-server-workspace-configuration-schema: func(language-server-id: string, worktree: borrow) -> option; + /// Returns the initialization options to pass to the other language server. export language-server-additional-initialization-options: func(language-server-id: string, target-language-server-id: string, worktree: borrow) -> result, string>; diff --git a/crates/extension_cli/Cargo.toml b/crates/extension_cli/Cargo.toml index 9795c13e75864184299fba026f499bbcbefee117..24ea9cfafadc61b2753f7b739fd4b7cbbd24dbfe 100644 --- a/crates/extension_cli/Cargo.toml +++ b/crates/extension_cli/Cargo.toml @@ -26,7 +26,9 @@ reqwest_client.workspace = true serde.workspace = true serde_json.workspace = true serde_json_lenient.workspace = true +settings_content.workspace = true snippet_provider.workspace = true +task.workspace = true theme.workspace = true tokio = { workspace = true, features = ["full"] } toml.workspace = true diff --git a/crates/extension_cli/src/main.rs b/crates/extension_cli/src/main.rs index baefb72fe4bd986edbfaa866e50663b159eff3c9..d0a533bfeb331c196d802df9894e726201794ce7 100644 --- a/crates/extension_cli/src/main.rs +++ b/crates/extension_cli/src/main.rs @@ -11,8 +11,10 @@ use extension::extension_builder::{CompileExtensionOptions, ExtensionBuilder}; use extension::{ExtensionManifest, ExtensionSnippets}; use language::LanguageConfig; use reqwest_client::ReqwestClient; +use settings_content::SemanticTokenRules; use snippet_provider::file_to_snippets; use snippet_provider::format::VsSnippetsFile; +use task::TaskTemplates; use tokio::process::Command; use tree_sitter::{Language, Query, WasmStore}; @@ -323,9 +325,8 @@ fn test_languages( ) -> Result<()> { for relative_language_dir in &manifest.languages { let language_dir = extension_path.join(relative_language_dir); - let config_path = language_dir.join("config.toml"); - let config_content = fs::read_to_string(&config_path)?; - let config: LanguageConfig = toml::from_str(&config_content)?; + let config_path = language_dir.join(LanguageConfig::FILE_NAME); + let config = LanguageConfig::load(&config_path)?; let grammar = if let Some(name) = &config.grammar { Some( grammars @@ -339,18 +340,48 @@ fn test_languages( let query_entries = fs::read_dir(&language_dir)?; for entry in query_entries { let entry = entry?; - let query_path = entry.path(); - if query_path.extension() == Some("scm".as_ref()) { - let grammar = grammar.with_context(|| { - format! { - "language {} provides query {} but no grammar", - config.name, - query_path.display() - } - })?; - - let query_source = fs::read_to_string(&query_path)?; - let _query = Query::new(grammar, &query_source)?; + let file_path = entry.path(); + + let Some(file_name) = file_path.file_name().and_then(|name| name.to_str()) else { + continue; + }; + + match file_name { + LanguageConfig::FILE_NAME => { + // Loaded above + } + SemanticTokenRules::FILE_NAME => { + let _token_rules = SemanticTokenRules::load(&file_path)?; + } + TaskTemplates::FILE_NAME => { + let task_file_content = std::fs::read(&file_path).with_context(|| { + anyhow!( + "Failed to read tasks file at {path}", + path = file_path.display() + ) + })?; + let _task_templates = + serde_json_lenient::from_slice::(&task_file_content) + .with_context(|| { + anyhow!( + "Failed to parse tasks file at {path}", + path = file_path.display() + ) + })?; + } + _ if file_name.ends_with(".scm") => { + let grammar = grammar.with_context(|| { + format! { + "language {} provides query {} but no grammar", + config.name, + file_path.display() + } + })?; + + let query_source = fs::read_to_string(&file_path)?; + let _query = Query::new(grammar, &query_source)?; + } + _ => {} } } diff --git a/crates/extension_host/Cargo.toml b/crates/extension_host/Cargo.toml index c4d1f6d98c82ee348f4a7453a3bb6e3255924b77..c6f4db47c97d69173242953926c6965c039a6397 100644 --- a/crates/extension_host/Cargo.toml +++ b/crates/extension_host/Cargo.toml @@ -65,7 +65,7 @@ language = { workspace = true, features = ["test-support"] } language_extension.workspace = true parking_lot.workspace = true project = { workspace = true, features = ["test-support"] } -rand.workspace = true + reqwest_client.workspace = true theme = { workspace = true, features = ["test-support"] } theme_extension.workspace = true diff --git a/crates/extension_host/src/extension_host.rs b/crates/extension_host/src/extension_host.rs index c691296d61183c9bb0fcd41ff6c74eed6cb61149..5418f630537c1acd98edc8c6af753d9358b23e8f 100644 --- a/crates/extension_host/src/extension_host.rs +++ b/crates/extension_host/src/extension_host.rs @@ -55,6 +55,7 @@ use std::{ sync::Arc, time::{Duration, Instant}, }; +use task::TaskTemplates; use url::Url; use util::{ResultExt, paths::RemotePathBuf}; use wasm_host::{ @@ -1285,19 +1286,11 @@ impl ExtensionStore { ]); // Load semantic token rules if present in the language directory. - let rules_path = language_path.join("semantic_token_rules.json"); - if let Ok(rules_json) = std::fs::read_to_string(&rules_path) { - match serde_json_lenient::from_str::(&rules_json) { - Ok(rules) => { - semantic_token_rules_to_add.push((language_name.clone(), rules)); - } - Err(err) => { - log::error!( - "Failed to parse semantic token rules from {}: {err:#}", - rules_path.display() - ); - } - } + let rules_path = language_path.join(SemanticTokenRules::FILE_NAME); + if std::fs::exists(&rules_path).is_ok_and(|exists| exists) + && let Some(rules) = SemanticTokenRules::load(&rules_path).log_err() + { + semantic_token_rules_to_add.push((language_name.clone(), rules)); } self.proxy.register_language( @@ -1306,11 +1299,11 @@ impl ExtensionStore { language.matcher.clone(), language.hidden, Arc::new(move || { - let config = std::fs::read_to_string(language_path.join("config.toml"))?; - let config: LanguageConfig = ::toml::from_str(&config)?; + let config = + LanguageConfig::load(language_path.join(LanguageConfig::FILE_NAME))?; let queries = load_plugin_queries(&language_path); let context_provider = - std::fs::read_to_string(language_path.join("tasks.json")) + std::fs::read_to_string(language_path.join(TaskTemplates::FILE_NAME)) .ok() .and_then(|contents| { let definitions = @@ -1580,7 +1573,7 @@ impl ExtensionStore { if !fs_metadata.is_dir { continue; } - let language_config_path = language_path.join("config.toml"); + let language_config_path = language_path.join(LanguageConfig::FILE_NAME); let config = fs.load(&language_config_path).await.with_context(|| { format!("loading language config from {language_config_path:?}") })?; @@ -1703,7 +1696,7 @@ impl ExtensionStore { cx.background_spawn(async move { const EXTENSION_TOML: &str = "extension.toml"; const EXTENSION_WASM: &str = "extension.wasm"; - const CONFIG_TOML: &str = "config.toml"; + const CONFIG_TOML: &str = LanguageConfig::FILE_NAME; if is_dev { let manifest_toml = toml::to_string(&loaded_extension.manifest)?; diff --git a/crates/extension_host/src/headless_host.rs b/crates/extension_host/src/headless_host.rs index 290dbb6fd40fc3c15dcb210c767b9102b7117544..0aff06fdddcf5c075bd669528b5c52137f745863 100644 --- a/crates/extension_host/src/headless_host.rs +++ b/crates/extension_host/src/headless_host.rs @@ -138,7 +138,9 @@ impl HeadlessExtensionStore { for language_path in &manifest.languages { let language_path = extension_dir.join(language_path); - let config = fs.load(&language_path.join("config.toml")).await?; + let config = fs + .load(&language_path.join(LanguageConfig::FILE_NAME)) + .await?; let mut config = ::toml::from_str::(&config)?; this.update(cx, |this, _cx| { diff --git a/crates/extension_host/src/wasm_host.rs b/crates/extension_host/src/wasm_host.rs index fe3c11de3ae78115b8e5db08884b7e07be152324..286639cdd67d716b1137290baf269670ecddebe7 100644 --- a/crates/extension_host/src/wasm_host.rs +++ b/crates/extension_host/src/wasm_host.rs @@ -159,6 +159,48 @@ impl extension::Extension for WasmExtension { .await? } + async fn language_server_initialization_options_schema( + &self, + language_server_id: LanguageServerName, + worktree: Arc, + ) -> Result> { + self.call(|extension, store| { + async move { + let resource = store.data_mut().table().push(worktree)?; + extension + .call_language_server_initialization_options_schema( + store, + &language_server_id, + resource, + ) + .await + } + .boxed() + }) + .await? + } + + async fn language_server_workspace_configuration_schema( + &self, + language_server_id: LanguageServerName, + worktree: Arc, + ) -> Result> { + self.call(|extension, store| { + async move { + let resource = store.data_mut().table().push(worktree)?; + extension + .call_language_server_workspace_configuration_schema( + store, + &language_server_id, + resource, + ) + .await + } + .boxed() + }) + .await? + } + async fn language_server_additional_initialization_options( &self, language_server_id: LanguageServerName, diff --git a/crates/extension_host/src/wasm_host/wit.rs b/crates/extension_host/src/wasm_host/wit.rs index ddd3f604c991a43bc58f494410db1be22a93a772..9c4d3aa298c366ae91d0f8195ed090d74099c6d0 100644 --- a/crates/extension_host/src/wasm_host/wit.rs +++ b/crates/extension_host/src/wasm_host/wit.rs @@ -465,6 +465,60 @@ impl Extension { } } + pub async fn call_language_server_initialization_options_schema( + &self, + store: &mut Store, + language_server_id: &LanguageServerName, + resource: Resource>, + ) -> Result> { + match self { + Extension::V0_8_0(ext) => { + ext.call_language_server_initialization_options_schema( + store, + &language_server_id.0, + resource, + ) + .await + } + Extension::V0_6_0(_) + | Extension::V0_5_0(_) + | Extension::V0_4_0(_) + | Extension::V0_3_0(_) + | Extension::V0_2_0(_) + | Extension::V0_1_0(_) + | Extension::V0_0_6(_) + | Extension::V0_0_4(_) + | Extension::V0_0_1(_) => Ok(None), + } + } + + pub async fn call_language_server_workspace_configuration_schema( + &self, + store: &mut Store, + language_server_id: &LanguageServerName, + resource: Resource>, + ) -> Result> { + match self { + Extension::V0_8_0(ext) => { + ext.call_language_server_workspace_configuration_schema( + store, + &language_server_id.0, + resource, + ) + .await + } + Extension::V0_6_0(_) + | Extension::V0_5_0(_) + | Extension::V0_4_0(_) + | Extension::V0_3_0(_) + | Extension::V0_2_0(_) + | Extension::V0_1_0(_) + | Extension::V0_0_6(_) + | Extension::V0_0_4(_) + | Extension::V0_0_1(_) => Ok(None), + } + } + pub async fn call_language_server_additional_initialization_options( &self, store: &mut Store, diff --git a/crates/extensions_ui/src/extensions_ui.rs b/crates/extensions_ui/src/extensions_ui.rs index 1458b2104f31f4d987319c87a41bfd5538b2727f..7343edcdef3851bfeb7a3aa80f3449ff06f55d9f 100644 --- a/crates/extensions_ui/src/extensions_ui.rs +++ b/crates/extensions_ui/src/extensions_ui.rs @@ -870,9 +870,12 @@ impl ExtensionsPage { ) .child( h_flex() + .min_w_0() + .w_full() .justify_between() .child( h_flex() + .min_w_0() .gap_1() .child( Icon::new(IconName::Person) @@ -889,6 +892,7 @@ impl ExtensionsPage { .child( h_flex() .gap_1() + .flex_shrink_0() .child({ let repo_url_for_tooltip = repository_url.clone(); diff --git a/crates/feature_flags/src/flags.rs b/crates/feature_flags/src/flags.rs index 77a98aae05572ac72b239db8bb3d4496bd1c0f4d..8cbacfd823400f2988738af03a05dfbfc0ed72d4 100644 --- a/crates/feature_flags/src/flags.rs +++ b/crates/feature_flags/src/flags.rs @@ -37,16 +37,6 @@ impl FeatureFlag for AgentSharingFeatureFlag { const NAME: &'static str = "agent-sharing"; } -pub struct AgentGitWorktreesFeatureFlag; - -impl FeatureFlag for AgentGitWorktreesFeatureFlag { - const NAME: &'static str = "agent-git-worktrees"; - - fn enabled_for_staff() -> bool { - false - } -} - pub struct DiffReviewFeatureFlag; impl FeatureFlag for DiffReviewFeatureFlag { diff --git a/crates/feedback/Cargo.toml b/crates/feedback/Cargo.toml index 0a53a1b6f38d1af0a6b913d61969d4df105a6a10..c2279d778865cb819a5b0e2e494ad9d1e4470067 100644 --- a/crates/feedback/Cargo.toml +++ b/crates/feedback/Cargo.toml @@ -22,5 +22,3 @@ util.workspace = true workspace.workspace = true zed_actions.workspace = true -[dev-dependencies] -editor = { workspace = true, features = ["test-support"] } diff --git a/crates/file_finder/Cargo.toml b/crates/file_finder/Cargo.toml index 8800c7cdcb86735e3b884bd7bd1fbbf5a0522174..113bf68d34f778f8fba9fdc62b586c31e689a380 100644 --- a/crates/file_finder/Cargo.toml +++ b/crates/file_finder/Cargo.toml @@ -38,7 +38,7 @@ project_panel.workspace = true ctor.workspace = true editor = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } -language = { workspace = true, features = ["test-support"] } + picker = { workspace = true, features = ["test-support"] } pretty_assertions.workspace = true serde_json.workspace = true diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 0fde444171042eda859edcac7915c456ab91e265..6c7074d2139068d2ea581ea6343de4d4c1f09030 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -15,10 +15,14 @@ use gpui::Global; use gpui::ReadGlobal as _; use gpui::SharedString; use std::borrow::Cow; +#[cfg(unix)] +use std::ffi::CString; use util::command::new_command; #[cfg(unix)] use std::os::fd::{AsFd, AsRawFd}; +#[cfg(unix)] +use std::os::unix::ffi::OsStrExt; #[cfg(unix)] use std::os::unix::fs::{FileTypeExt, MetadataExt}; @@ -506,6 +510,63 @@ impl RealFs { } } +#[cfg(any(target_os = "macos", target_os = "linux"))] +fn rename_without_replace(source: &Path, target: &Path) -> io::Result<()> { + let source = path_to_c_string(source)?; + let target = path_to_c_string(target)?; + + #[cfg(target_os = "macos")] + let result = unsafe { libc::renamex_np(source.as_ptr(), target.as_ptr(), libc::RENAME_EXCL) }; + + #[cfg(target_os = "linux")] + let result = unsafe { + libc::syscall( + libc::SYS_renameat2, + libc::AT_FDCWD, + source.as_ptr(), + libc::AT_FDCWD, + target.as_ptr(), + libc::RENAME_NOREPLACE, + ) + }; + + if result == 0 { + Ok(()) + } else { + Err(io::Error::last_os_error()) + } +} + +#[cfg(target_os = "windows")] +fn rename_without_replace(source: &Path, target: &Path) -> io::Result<()> { + use std::os::windows::ffi::OsStrExt; + + use windows::Win32::Storage::FileSystem::{MOVE_FILE_FLAGS, MoveFileExW}; + use windows::core::PCWSTR; + + let source: Vec = source.as_os_str().encode_wide().chain(Some(0)).collect(); + let target: Vec = target.as_os_str().encode_wide().chain(Some(0)).collect(); + + unsafe { + MoveFileExW( + PCWSTR(source.as_ptr()), + PCWSTR(target.as_ptr()), + MOVE_FILE_FLAGS::default(), + ) + } + .map_err(|_| io::Error::last_os_error()) +} + +#[cfg(any(target_os = "macos", target_os = "linux"))] +fn path_to_c_string(path: &Path) -> io::Result { + CString::new(path.as_os_str().as_bytes()).map_err(|_| { + io::Error::new( + io::ErrorKind::InvalidInput, + format!("path contains interior NUL: {}", path.display()), + ) + }) +} + #[async_trait::async_trait] impl Fs for RealFs { async fn create_dir(&self, path: &Path) -> Result<()> { @@ -588,7 +649,56 @@ impl Fs for RealFs { } async fn rename(&self, source: &Path, target: &Path, options: RenameOptions) -> Result<()> { - if !options.overwrite && smol::fs::metadata(target).await.is_ok() { + if options.create_parents { + if let Some(parent) = target.parent() { + self.create_dir(parent).await?; + } + } + + if options.overwrite { + smol::fs::rename(source, target).await?; + return Ok(()); + } + + let use_metadata_fallback = { + #[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))] + { + let source = source.to_path_buf(); + let target = target.to_path_buf(); + match self + .executor + .spawn(async move { rename_without_replace(&source, &target) }) + .await + { + Ok(()) => return Ok(()), + Err(error) if error.kind() == io::ErrorKind::AlreadyExists => { + if options.ignore_if_exists { + return Ok(()); + } + return Err(error.into()); + } + Err(error) + if error.raw_os_error().is_some_and(|code| { + code == libc::ENOSYS + || code == libc::ENOTSUP + || code == libc::EOPNOTSUPP + }) => + { + // For case when filesystem or kernel does not support atomic no-overwrite rename. + true + } + Err(error) => return Err(error.into()), + } + } + + #[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "windows")))] + { + // For platforms which do not have an atomic no-overwrite rename yet. + true + } + }; + + if use_metadata_fallback && smol::fs::metadata(target).await.is_ok() { if options.ignore_if_exists { return Ok(()); } else { @@ -596,12 +706,6 @@ impl Fs for RealFs { } } - if options.create_parents { - if let Some(parent) = target.parent() { - self.create_dir(parent).await?; - } - } - smol::fs::rename(source, target).await?; Ok(()) } diff --git a/crates/fs/tests/integration/fs.rs b/crates/fs/tests/integration/fs.rs index dd5e694e23c99716a81b27afd487e3a6ea648209..b688d5e2c243ede5eb3f499ad2956feaec01a965 100644 --- a/crates/fs/tests/integration/fs.rs +++ b/crates/fs/tests/integration/fs.rs @@ -523,6 +523,65 @@ async fn test_rename(executor: BackgroundExecutor) { ); } +#[gpui::test] +#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))] +async fn test_realfs_parallel_rename_without_overwrite_preserves_losing_source( + executor: BackgroundExecutor, +) { + let temp_dir = TempDir::new().unwrap(); + let root = temp_dir.path(); + let source_a = root.join("dir_a/shared.txt"); + let source_b = root.join("dir_b/shared.txt"); + let target = root.join("shared.txt"); + + std::fs::create_dir_all(source_a.parent().unwrap()).unwrap(); + std::fs::create_dir_all(source_b.parent().unwrap()).unwrap(); + std::fs::write(&source_a, "from a").unwrap(); + std::fs::write(&source_b, "from b").unwrap(); + + let fs = RealFs::new(None, executor); + let (first_result, second_result) = futures::future::join( + fs.rename(&source_a, &target, RenameOptions::default()), + fs.rename(&source_b, &target, RenameOptions::default()), + ) + .await; + + assert_ne!(first_result.is_ok(), second_result.is_ok()); + assert!(target.exists()); + assert_eq!(source_a.exists() as u8 + source_b.exists() as u8, 1); +} + +#[gpui::test] +#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))] +async fn test_realfs_rename_ignore_if_exists_leaves_source_and_target_unchanged( + executor: BackgroundExecutor, +) { + let temp_dir = TempDir::new().unwrap(); + let root = temp_dir.path(); + let source = root.join("source.txt"); + let target = root.join("target.txt"); + + std::fs::write(&source, "from source").unwrap(); + std::fs::write(&target, "from target").unwrap(); + + let fs = RealFs::new(None, executor); + let result = fs + .rename( + &source, + &target, + RenameOptions { + ignore_if_exists: true, + ..Default::default() + }, + ) + .await; + + assert!(result.is_ok()); + + assert_eq!(std::fs::read_to_string(&source).unwrap(), "from source"); + assert_eq!(std::fs::read_to_string(&target).unwrap(), "from target"); +} + #[gpui::test] #[cfg(unix)] async fn test_realfs_broken_symlink_metadata(executor: BackgroundExecutor) { diff --git a/crates/git/Cargo.toml b/crates/git/Cargo.toml index 4d96312e274b3934e0d1ae8aa1f16f235d30a59f..23a937bf1fa17481eb5e130b3e083274dd3f1d16 100644 --- a/crates/git/Cargo.toml +++ b/crates/git/Cargo.toml @@ -48,7 +48,6 @@ ztracing.workspace = true pretty_assertions.workspace = true serde_json.workspace = true text = { workspace = true, features = ["test-support"] } -unindent.workspace = true gpui = { workspace = true, features = ["test-support"] } tempfile.workspace = true rand.workspace = true diff --git a/crates/git/src/git.rs b/crates/git/src/git.rs index 805d8d181ab7a434b565d38bdb2f802a8a3cda1a..13745c1fdfc0523d850b95e45a81cae286a77a00 100644 --- a/crates/git/src/git.rs +++ b/crates/git/src/git.rs @@ -40,6 +40,9 @@ actions!( /// Restores the selected hunks to their original state. #[action(deprecated_aliases = ["editor::RevertSelectedHunks"])] Restore, + /// Restores the selected hunks to their original state and moves to the + /// next one. + RestoreAndNext, // per-file /// Shows git blame information for the current file. #[action(deprecated_aliases = ["editor::ToggleGitBlame"])] diff --git a/crates/git_graph/Cargo.toml b/crates/git_graph/Cargo.toml index 386d82389ca3370f071f8733b039f91fc3f21feb..4756c55ac9232631a46056e252021a704d4a25b6 100644 --- a/crates/git_graph/Cargo.toml +++ b/crates/git_graph/Cargo.toml @@ -43,7 +43,6 @@ git = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } project = { workspace = true, features = ["test-support"] } rand.workspace = true -recent_projects = { workspace = true, features = ["test-support"] } serde_json.workspace = true settings = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/git_ui/Cargo.toml b/crates/git_ui/Cargo.toml index a25911d65eb87d176a0a987d996e159e2c43628c..4493cb58471aed9dcf4a259f5a82117992b1dedb 100644 --- a/crates/git_ui/Cargo.toml +++ b/crates/git_ui/Cargo.toml @@ -73,7 +73,6 @@ windows.workspace = true [dev-dependencies] ctor.workspace = true editor = { workspace = true, features = ["test-support"] } -git_hosting_providers.workspace = true gpui = { workspace = true, features = ["test-support"] } indoc.workspace = true pretty_assertions.workspace = true diff --git a/crates/git_ui/src/conflict_view.rs b/crates/git_ui/src/conflict_view.rs index 82571b541e692141f843a4c3ef6e082c72e55e48..67b39618eaaaa2f7704e100d98621f53b725ff43 100644 --- a/crates/git_ui/src/conflict_view.rs +++ b/crates/git_ui/src/conflict_view.rs @@ -182,7 +182,7 @@ fn conflicts_updated( let excerpts = multibuffer.excerpts_for_buffer(buffer_id, cx); let Some(buffer_snapshot) = excerpts .first() - .and_then(|(excerpt_id, _)| snapshot.buffer_for_excerpt(*excerpt_id)) + .and_then(|(excerpt_id, _, _)| snapshot.buffer_for_excerpt(*excerpt_id)) else { return; }; @@ -221,7 +221,7 @@ fn conflicts_updated( let mut removed_highlighted_ranges = Vec::new(); let mut removed_block_ids = HashSet::default(); for (conflict_range, block_id) in old_conflicts { - let Some((excerpt_id, _)) = excerpts.iter().find(|(_, range)| { + let Some((excerpt_id, _, _)) = excerpts.iter().find(|(_, _, range)| { let precedes_start = range .context .start @@ -263,7 +263,7 @@ fn conflicts_updated( let new_conflicts = &conflict_set.conflicts[event.new_range.clone()]; let mut blocks = Vec::new(); for conflict in new_conflicts { - let Some((excerpt_id, _)) = excerpts.iter().find(|(_, range)| { + let Some((excerpt_id, _, _)) = excerpts.iter().find(|(_, _, range)| { let precedes_start = range .context .start diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 61d94b68a118525bd9b67217a929ce7462696dc7..8205f5ee7b6a9966a37a8406331d171d8ca57f1d 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -1343,6 +1343,7 @@ impl GitPanel { &snapshot, language::Point::new(0, 0), Direction::Next, + true, window, cx, ); diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index f62b08e4c0d99db7d2e60e6aac730a69b139cca3..ad7d6b86befd0b0f4a1ecf6386c030d4294cdf5e 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -517,7 +517,11 @@ impl ProjectDiff { fn move_to_beginning(&mut self, window: &mut Window, cx: &mut Context) { self.editor.update(cx, |editor, cx| { editor.rhs_editor().update(cx, |editor, cx| { - editor.move_to_beginning(&Default::default(), window, cx); + editor.change_selections(Default::default(), window, cx, |s| { + s.select_ranges(vec![ + multi_buffer::Anchor::min()..multi_buffer::Anchor::min(), + ]); + }); }); }); } diff --git a/crates/go_to_line/Cargo.toml b/crates/go_to_line/Cargo.toml index 0260cd2d122f83f2c11505be9e6e8a84f69f8569..58c58dc389e37210063efb55337fc385cc0ad435 100644 --- a/crates/go_to_line/Cargo.toml +++ b/crates/go_to_line/Cargo.toml @@ -34,6 +34,4 @@ menu.workspace = true project = { workspace = true, features = ["test-support"] } rope.workspace = true serde_json.workspace = true -tree-sitter-rust.workspace = true -tree-sitter-typescript.workspace = true workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index 662bf2a98d84ba434da98aeca71791c028f6018c..79c4e54700ccec7575c825ecae6a1bb05419b6fb 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -94,7 +94,9 @@ impl GoToLine { .read(cx) .excerpts_for_buffer(snapshot.remote_id(), cx) .into_iter() - .map(move |(_, range)| text::ToPoint::to_point(&range.context.end, &snapshot).row) + .map(move |(_, _, range)| { + text::ToPoint::to_point(&range.context.end, &snapshot).row + }) .max() .unwrap_or(0); diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index c80f97efb6dc8bf1450c08bfe85290096b44815b..61782fbe50e26a089eefe3c11e70a0016909f6b3 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -24,6 +24,7 @@ test-support = [ "http_client/test-support", "wayland", "x11", + "proptest", ] inspector = ["gpui_macros/inspector"] leak-detection = ["backtrace"] @@ -64,6 +65,7 @@ num_cpus = "1.13" parking = "2.0.0" parking_lot.workspace = true postage.workspace = true +proptest = { workspace = true, optional = true } chrono.workspace = true profiling.workspace = true rand.workspace = true @@ -144,11 +146,11 @@ collections = { workspace = true, features = ["test-support"] } env_logger.workspace = true gpui_platform.workspace = true lyon = { version = "1.0", features = ["extra"] } -pretty_assertions.workspace = true rand.workspace = true scheduler = { workspace = true, features = ["test-support"] } unicode-segmentation.workspace = true gpui_util = { workspace = true } +proptest = { workspace = true } [target.'cfg(not(target_family = "wasm"))'.dev-dependencies] http_client = { workspace = true, features = ["test-support"] } diff --git a/crates/gpui/examples/active_state_bug.rs b/crates/gpui/examples/active_state_bug.rs new file mode 100644 index 0000000000000000000000000000000000000000..f767ed27e456ec65858b72a4df89fab65e7fd1f3 --- /dev/null +++ b/crates/gpui/examples/active_state_bug.rs @@ -0,0 +1,47 @@ +/// Click the button — the `.active()` background gets stuck on every other click. +use gpui::*; +use gpui_platform::application; + +struct Example; + +impl Render for Example { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + // Colors from Zed's default dark theme + let bg = hsla(215. / 360., 0.12, 0.15, 1.); + let text = hsla(221. / 360., 0.11, 0.86, 1.); + let hover = hsla(225. / 360., 0.118, 0.267, 1.); + let active = hsla(220. / 360., 0.118, 0.20, 1.); + + div().bg(bg).size_full().p_1().child( + div() + .id("button") + .px_2() + .py_0p5() + .rounded_md() + .text_sm() + .text_color(text) + .hover(|s| s.bg(hover)) + .active(|s| s.bg(active)) + .on_click(|_, _, _| {}) + .child("Click me"), + ) + } +} + +fn main() { + application().run(|cx: &mut App| { + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(Bounds::centered( + None, + size(px(200.), px(60.)), + cx, + ))), + ..Default::default() + }, + |_, cx| cx.new(|_| Example), + ) + .unwrap(); + cx.activate(true); + }); +} diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index f1fe264f4ef4ccb09081a6672c7c4ddb1d24dc97..8af0a8923b38a6f711d701730996afca012fb48b 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -744,9 +744,11 @@ impl App { })); platform.on_quit(Box::new({ - let cx = app.clone(); + let cx = Rc::downgrade(&app); move || { - cx.borrow_mut().shutdown(); + if let Some(cx) = cx.upgrade() { + cx.borrow_mut().shutdown(); + } } })); @@ -2613,13 +2615,6 @@ impl<'a, T> Drop for GpuiBorrow<'a, T> { } } -impl Drop for App { - fn drop(&mut self) { - self.foreground_executor.close(); - self.background_executor.close(); - } -} - #[cfg(test)] mod test { use std::{cell::RefCell, rc::Rc}; diff --git a/crates/gpui/src/elements/div.rs b/crates/gpui/src/elements/div.rs index 58f11a7fa1fb876ef4b4ef80fedf1948423a24f5..3599affc3c792f3c93b3b94cfc44740d7c38caf7 100644 --- a/crates/gpui/src/elements/div.rs +++ b/crates/gpui/src/elements/div.rs @@ -2517,18 +2517,24 @@ impl Interactivity { ); } + // We unconditionally bind both the mouse up and mouse down active state handlers + // Because we might not get a chance to render a frame before the mouse up event arrives. let active_state = element_state .clicked_state .get_or_insert_with(Default::default) .clone(); - if active_state.borrow().is_clicked() { + + { + let active_state = active_state.clone(); window.on_mouse_event(move |_: &MouseUpEvent, phase, window, _cx| { - if phase == DispatchPhase::Capture { + if phase == DispatchPhase::Capture && active_state.borrow().is_clicked() { *active_state.borrow_mut() = ElementClickedState::default(); window.refresh(); } }); - } else { + } + + { let active_group_hitbox = self .group_active_style .as_ref() diff --git a/crates/gpui/src/elements/text.rs b/crates/gpui/src/elements/text.rs index ded0f596dcea2f6c992961906503adb6829e885f..49036abfec1cb3145ce72d2aabe7683e308f1ed0 100644 --- a/crates/gpui/src/elements/text.rs +++ b/crates/gpui/src/elements/text.rs @@ -246,7 +246,12 @@ impl StyledText { pub fn with_runs(mut self, runs: Vec) -> Self { let mut text = &**self.text; for run in &runs { - text = text.get(run.len..).expect("invalid text run"); + text = text.get(run.len..).unwrap_or_else(|| { + #[cfg(debug_assertions)] + panic!("invalid text run. Text: '{text}', run: {run:?}"); + #[cfg(not(debug_assertions))] + panic!("invalid text run"); + }); } assert!(text.is_empty(), "invalid text run"); self.runs = Some(runs); diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index 31c1ed80b92efb5dfa9ead6dcaf9050fe68ea399..cb65f758d5a521f15f77e7be266b1b4ed0480d03 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -129,11 +129,6 @@ impl BackgroundExecutor { } } - /// Close this executor. Tasks will not run after this is called. - pub fn close(&self) { - self.inner.close(); - } - /// Enqueues the given future to be run to completion on a background thread. #[track_caller] pub fn spawn(&self, future: impl Future + Send + 'static) -> Task @@ -173,7 +168,6 @@ impl BackgroundExecutor { { use crate::RunnableMeta; use parking_lot::{Condvar, Mutex}; - use std::sync::{Arc, atomic::AtomicBool}; struct NotifyOnDrop<'a>(&'a (Condvar, Mutex)); @@ -197,14 +191,13 @@ impl BackgroundExecutor { let dispatcher = self.dispatcher.clone(); let location = core::panic::Location::caller(); - let closed = Arc::new(AtomicBool::new(false)); let pair = &(Condvar::new(), Mutex::new(false)); let _wait_guard = WaitOnDrop(pair); let (runnable, task) = unsafe { async_task::Builder::new() - .metadata(RunnableMeta { location, closed }) + .metadata(RunnableMeta { location }) .spawn_unchecked( move |_| async { let _notify_guard = NotifyOnDrop(pair); @@ -404,11 +397,6 @@ impl ForegroundExecutor { } } - /// Close this executor. Tasks will not run after this is called. - pub fn close(&self) { - self.inner.close(); - } - /// Enqueues the given Task to run on the main thread. #[track_caller] pub fn spawn(&self, future: impl Future + 'static) -> Task @@ -595,144 +583,4 @@ mod test { "Task should run normally when app is alive" ); } - - #[test] - fn test_task_cancelled_when_app_dropped() { - let (dispatcher, _background_executor, app) = create_test_app(); - let foreground_executor = app.borrow().foreground_executor.clone(); - let app_weak = Rc::downgrade(&app); - - let task_ran = Rc::new(RefCell::new(false)); - let task_ran_clone = Rc::clone(&task_ran); - - foreground_executor - .spawn(async move { - *task_ran_clone.borrow_mut() = true; - }) - .detach(); - - drop(app); - - assert!(app_weak.upgrade().is_none(), "App should have been dropped"); - - dispatcher.run_until_parked(); - - // The task should have been cancelled, not run - assert!( - !*task_ran.borrow(), - "Task should have been cancelled when app was dropped, but it ran!" - ); - } - - #[test] - fn test_nested_tasks_both_cancel() { - let (dispatcher, _background_executor, app) = create_test_app(); - let foreground_executor = app.borrow().foreground_executor.clone(); - let app_weak = Rc::downgrade(&app); - - let outer_completed = Rc::new(RefCell::new(false)); - let inner_completed = Rc::new(RefCell::new(false)); - let reached_await = Rc::new(RefCell::new(false)); - - let outer_flag = Rc::clone(&outer_completed); - let inner_flag = Rc::clone(&inner_completed); - let await_flag = Rc::clone(&reached_await); - - // Channel to block the inner task until we're ready - let (tx, rx) = futures::channel::oneshot::channel::<()>(); - - let inner_executor = foreground_executor.clone(); - - foreground_executor - .spawn(async move { - let inner_task = inner_executor.spawn({ - let inner_flag = Rc::clone(&inner_flag); - async move { - rx.await.ok(); - *inner_flag.borrow_mut() = true; - } - }); - - *await_flag.borrow_mut() = true; - - inner_task.await; - - *outer_flag.borrow_mut() = true; - }) - .detach(); - - // Run dispatcher until outer task reaches the await point - // The inner task will be blocked on the channel - dispatcher.run_until_parked(); - - // Verify we actually reached the await point before dropping the app - assert!( - *reached_await.borrow(), - "Outer task should have reached the await point" - ); - - // Neither task should have completed yet - assert!( - !*outer_completed.borrow(), - "Outer task should not have completed yet" - ); - assert!( - !*inner_completed.borrow(), - "Inner task should not have completed yet" - ); - - // Drop the channel sender and app while outer is awaiting inner - drop(tx); - drop(app); - assert!(app_weak.upgrade().is_none(), "App should have been dropped"); - - // Run dispatcher - both tasks should be cancelled - dispatcher.run_until_parked(); - - // Neither task should have completed (both were cancelled) - assert!( - !*outer_completed.borrow(), - "Outer task should have been cancelled, not completed" - ); - assert!( - !*inner_completed.borrow(), - "Inner task should have been cancelled, not completed" - ); - } - - #[test] - #[should_panic] - fn test_polling_cancelled_task_panics() { - let (dispatcher, _background_executor, app) = create_test_app(); - let foreground_executor = app.borrow().foreground_executor.clone(); - let app_weak = Rc::downgrade(&app); - - let task = foreground_executor.spawn(async move { 42 }); - - drop(app); - - assert!(app_weak.upgrade().is_none(), "App should have been dropped"); - - dispatcher.run_until_parked(); - - foreground_executor.block_on(task); - } - - #[test] - fn test_polling_cancelled_task_returns_none_with_fallible() { - let (dispatcher, _background_executor, app) = create_test_app(); - let foreground_executor = app.borrow().foreground_executor.clone(); - let app_weak = Rc::downgrade(&app); - - let task = foreground_executor.spawn(async move { 42 }).fallible(); - - drop(app); - - assert!(app_weak.upgrade().is_none(), "App should have been dropped"); - - dispatcher.run_until_parked(); - - let result = foreground_executor.block_on(task); - assert_eq!(result, None, "Cancelled task should return None"); - } } diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index ff36dbce500b8e7472f3d7faa31d9e5cb17e087e..6d7d801cd42c3639d7892295a660319d21b05dfa 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -54,6 +54,9 @@ mod util; mod view; mod window; +#[cfg(any(test, feature = "test-support"))] +pub use proptest; + #[cfg(doc)] pub mod _ownership_and_data_flow; @@ -86,7 +89,9 @@ pub use elements::*; pub use executor::*; pub use geometry::*; pub use global::*; -pub use gpui_macros::{AppContext, IntoElement, Render, VisualContext, register_action, test}; +pub use gpui_macros::{ + AppContext, IntoElement, Render, VisualContext, property_test, register_action, test, +}; pub use gpui_util::arc_cow::ArcCow; pub use http_client; pub use input::*; diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index a6714ff250f2f854c51d30bfea5e2e5911ce60ee..061a055e7ef23bc4a76b44eaadb90bc1660fdb42 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -1062,6 +1062,13 @@ impl PlatformInputHandler { pub fn accepts_text_input(&mut self, window: &mut Window, cx: &mut App) -> bool { self.handler.accepts_text_input(window, cx) } + + #[allow(dead_code)] + pub fn query_accepts_text_input(&mut self) -> bool { + self.cx + .update(|window, cx| self.handler.accepts_text_input(window, cx)) + .unwrap_or(true) + } } /// A struct representing a selection in a text buffer, in UTF16 characters. diff --git a/crates/gpui/src/platform_scheduler.rs b/crates/gpui/src/platform_scheduler.rs index 900cd6041d38380f4d9cb3ff9b87a3605b0ebd78..0087c588d8d6381fa1fe590a2366c2e35ffe0a7a 100644 --- a/crates/gpui/src/platform_scheduler.rs +++ b/crates/gpui/src/platform_scheduler.rs @@ -109,16 +109,13 @@ impl Scheduler for PlatformScheduler { #[track_caller] fn timer(&self, duration: Duration) -> Timer { - use std::sync::{Arc, atomic::AtomicBool}; - let (tx, rx) = oneshot::channel(); let dispatcher = self.dispatcher.clone(); // Create a runnable that will send the completion signal let location = std::panic::Location::caller(); - let closed = Arc::new(AtomicBool::new(false)); let (runnable, _task) = async_task::Builder::new() - .metadata(RunnableMeta { location, closed }) + .metadata(RunnableMeta { location }) .spawn( move |_| async move { let _ = tx.send(()); diff --git a/crates/gpui/src/test.rs b/crates/gpui/src/test.rs index 9e76d97e97e941121417d872e8c6f596cf658e20..ddcc3d27bd04d2fd82b3367a2fee6930e86ef356 100644 --- a/crates/gpui/src/test.rs +++ b/crates/gpui/src/test.rs @@ -27,12 +27,43 @@ //! ``` use crate::{Entity, Subscription, TestAppContext, TestDispatcher}; use futures::StreamExt as _; +use proptest::prelude::{Just, Strategy, any}; use std::{ env, - panic::{self, RefUnwindSafe}, + panic::{self, RefUnwindSafe, UnwindSafe}, pin::Pin, }; +/// Strategy injected into `#[gpui::property_test]` tests to control the seed +/// given to the scheduler. Doesn't shrink, since all scheduler seeds are +/// equivalent in complexity. If `$SEED` is set, it always uses that value. +pub fn seed_strategy() -> impl Strategy { + match std::env::var("SEED") { + Ok(val) => Just(val.parse().unwrap()).boxed(), + Err(_) => any::().no_shrink().boxed(), + } +} + +/// Similar to [`run_test`], but only runs the callback once, allowing +/// [`FnOnce`] callbacks. This is intended for use with the +/// `gpui::property_test` macro and generally should not be used directly. +/// +/// Doesn't support many features of [`run_test`], since these are provided by +/// proptest. +pub fn run_test_once(seed: u64, test_fn: Box) { + let result = panic::catch_unwind(|| { + let dispatcher = TestDispatcher::new(seed); + let scheduler = dispatcher.scheduler().clone(); + test_fn(dispatcher); + scheduler.end_test(); + }); + + match result { + Ok(()) => {} + Err(e) => panic::resume_unwind(e), + } +} + /// Run the given test function with the configured parameters. /// This is intended for use with the `gpui::test` macro /// and generally should not be used directly. diff --git a/crates/gpui_linux/src/linux/dispatcher.rs b/crates/gpui_linux/src/linux/dispatcher.rs index ff17fd238ae2a4b40ebdf8e36133c05f3e41f9b3..a72276cc7658a399505fa62bd2d5fe7b41e43e14 100644 --- a/crates/gpui_linux/src/linux/dispatcher.rs +++ b/crates/gpui_linux/src/linux/dispatcher.rs @@ -44,11 +44,6 @@ impl LinuxDispatcher { .name(format!("Worker-{i}")) .spawn(move || { for runnable in receiver.iter() { - // Check if the executor that spawned this task was closed - if runnable.metadata().is_closed() { - continue; - } - let start = Instant::now(); let location = runnable.metadata().location; @@ -94,11 +89,6 @@ impl LinuxDispatcher { calloop::timer::Timer::from_duration(timer.duration), move |_, _, _| { if let Some(runnable) = runnable.take() { - // Check if the executor that spawned this task was closed - if runnable.metadata().is_closed() { - return TimeoutAction::Drop; - } - let start = Instant::now(); let location = runnable.metadata().location; let mut timing = TaskTiming { diff --git a/crates/gpui_linux/src/linux/wayland/client.rs b/crates/gpui_linux/src/linux/wayland/client.rs index b49e269a72459d52c13c21b8d1a474ab310dbffd..8dd48b878cc1ffcb87201e9b1b252966bfce5efb 100644 --- a/crates/gpui_linux/src/linux/wayland/client.rs +++ b/crates/gpui_linux/src/linux/wayland/client.rs @@ -95,7 +95,7 @@ use gpui::{ ScrollDelta, ScrollWheelEvent, SharedString, Size, TaskTiming, TouchPhase, WindowParams, point, profiler, px, size, }; -use gpui_wgpu::{CompositorGpuHint, WgpuContext}; +use gpui_wgpu::{CompositorGpuHint, GpuContext}; use wayland_protocols::wp::linux_dmabuf::zv1::client::{ zwp_linux_dmabuf_feedback_v1, zwp_linux_dmabuf_v1, }; @@ -204,7 +204,7 @@ pub struct Output { pub(crate) struct WaylandClientState { serial_tracker: SerialTracker, globals: Globals, - pub gpu_context: Option, + pub gpu_context: GpuContext, pub compositor_gpu: Option, wl_seat: wl_seat::WlSeat, // TODO: Multi seat support wl_pointer: Option, @@ -221,6 +221,7 @@ pub(crate) struct WaylandClientState { // Output to scale mapping outputs: HashMap, in_progress_outputs: HashMap, + wl_outputs: HashMap, keyboard_layout: LinuxKeyboardLayout, keymap_state: Option, compose_state: Option, @@ -463,6 +464,8 @@ impl WaylandClient { let mut seat: Option = None; #[allow(clippy::mutable_key_type)] let mut in_progress_outputs = HashMap::default(); + #[allow(clippy::mutable_key_type)] + let mut wl_outputs: HashMap = HashMap::default(); globals.contents().with_list(|list| { for global in list { match &global.interface[..] { @@ -482,6 +485,7 @@ impl WaylandClient { (), ); in_progress_outputs.insert(output.id(), InProgressOutput::default()); + wl_outputs.insert(output.id(), output); } _ => {} } @@ -520,7 +524,7 @@ impl WaylandClient { .unwrap(); let compositor_gpu = detect_compositor_gpu(); - let gpu_context = None; + let gpu_context = Rc::new(RefCell::new(None)); let seat = seat.unwrap(); let globals = Globals::new( @@ -589,6 +593,7 @@ impl WaylandClient { composing: false, outputs: HashMap::default(), in_progress_outputs, + wl_outputs, windows: HashMap::default(), common, keyboard_layout: LinuxKeyboardLayout::new(UNKNOWN_KEYBOARD_LAYOUT_NAME), @@ -720,17 +725,27 @@ impl LinuxClient for WaylandClient { let parent = state.keyboard_focused_window.clone(); + let target_output = params.display_id.and_then(|display_id| { + let target_protocol_id: u32 = display_id.into(); + state + .wl_outputs + .iter() + .find(|(id, _)| id.protocol_id() == target_protocol_id) + .map(|(_, output)| output.clone()) + }); + let appearance = state.common.appearance; let compositor_gpu = state.compositor_gpu.take(); let (window, surface_id) = WaylandWindow::new( handle, state.globals.clone(), - &mut state.gpu_context, + state.gpu_context.clone(), compositor_gpu, WaylandClientStatePtr(Rc::downgrade(&self.0)), params, appearance, parent, + target_output, )?; state.windows.insert(surface_id, window.0.clone()); @@ -1020,6 +1035,7 @@ impl Dispatch for WaylandClientStat state .in_progress_outputs .insert(output.id(), InProgressOutput::default()); + state.wl_outputs.insert(output.id(), output); } _ => {} }, diff --git a/crates/gpui_linux/src/linux/wayland/window.rs b/crates/gpui_linux/src/linux/wayland/window.rs index 4c0dbae530ee254f5232eaead187b93d10b0b8e3..71a4ee2ab5033a69c5872fab631fd13af6c82b0e 100644 --- a/crates/gpui_linux/src/linux/wayland/window.rs +++ b/crates/gpui_linux/src/linux/wayland/window.rs @@ -12,7 +12,10 @@ use futures::channel::oneshot::Receiver; use raw_window_handle as rwh; use wayland_backend::client::ObjectId; use wayland_client::WEnum; -use wayland_client::{Proxy, protocol::wl_surface}; +use wayland_client::{ + Proxy, + protocol::{wl_output, wl_surface}, +}; use wayland_protocols::wp::viewporter::client::wp_viewport; use wayland_protocols::xdg::decoration::zv1::client::zxdg_toplevel_decoration_v1; use wayland_protocols::xdg::shell::client::xdg_surface; @@ -34,7 +37,7 @@ use gpui::{ WindowDecorations, WindowKind, WindowParams, layer_shell::LayerShellNotSupportedError, px, size, }; -use gpui_wgpu::{CompositorGpuHint, WgpuContext, WgpuRenderer, WgpuSurfaceConfig}; +use gpui_wgpu::{CompositorGpuHint, WgpuRenderer, WgpuSurfaceConfig}; #[derive(Default)] pub(crate) struct Callbacks { @@ -129,6 +132,7 @@ impl WaylandSurfaceState { globals: &Globals, params: &WindowParams, parent: Option, + target_output: Option, ) -> anyhow::Result { // For layer_shell windows, create a layer surface instead of an xdg surface if let WindowKind::LayerShell(options) = ¶ms.kind { @@ -138,7 +142,7 @@ impl WaylandSurfaceState { let layer_surface = layer_shell.get_layer_surface( &surface, - None, + target_output.as_ref(), super::layer_shell::wayland_layer(options.layer), options.namespace.clone(), &globals.qh, @@ -317,7 +321,7 @@ impl WaylandWindowState { viewport: Option, client: WaylandClientStatePtr, globals: Globals, - gpu_context: &mut Option, + gpu_context: gpui_wgpu::GpuContext, compositor_gpu: Option, options: WindowParams, parent: Option, @@ -488,15 +492,17 @@ impl WaylandWindow { pub fn new( handle: AnyWindowHandle, globals: Globals, - gpu_context: &mut Option, + gpu_context: gpui_wgpu::GpuContext, compositor_gpu: Option, client: WaylandClientStatePtr, params: WindowParams, appearance: WindowAppearance, parent: Option, + target_output: Option, ) -> anyhow::Result<(Self, ObjectId)> { let surface = globals.compositor.create_surface(&globals.qh, ()); - let surface_state = WaylandSurfaceState::new(&surface, &globals, ¶ms, parent.clone())?; + let surface_state = + WaylandSurfaceState::new(&surface, &globals, ¶ms, parent.clone(), target_output)?; if let Some(fractional_scale_manager) = globals.fractional_scale_manager.as_ref() { fractional_scale_manager.get_fractional_scale(&surface, &globals.qh, surface.id()); @@ -1251,6 +1257,7 @@ impl PlatformWindow for WaylandWindow { let state = client.borrow(); state .gpu_context + .borrow() .as_ref() .is_some_and(|ctx| ctx.supports_dual_source_blending()) } @@ -1328,6 +1335,41 @@ impl PlatformWindow for WaylandWindow { fn draw(&self, scene: &Scene) { let mut state = self.borrow_mut(); + + if state.renderer.device_lost() { + let raw_window = RawWindow { + window: state.surface.id().as_ptr().cast::(), + display: state + .surface + .backend() + .upgrade() + .unwrap() + .display_ptr() + .cast::(), + }; + let display_handle = rwh::HasDisplayHandle::display_handle(&raw_window) + .unwrap() + .as_raw(); + let window_handle = rwh::HasWindowHandle::window_handle(&raw_window) + .unwrap() + .as_raw(); + + state + .renderer + .recover(display_handle, window_handle) + .unwrap_or_else(|err| { + panic!( + "GPU device lost and recovery failed. \ + This may happen after system suspend/resume. \ + Please restart the application.\n\nError: {err}" + ) + }); + + // The current scene references atlas textures that were cleared during recovery. + // Skip this frame and let the next frame rebuild the scene with fresh textures. + return; + } + state.renderer.draw(scene); } diff --git a/crates/gpui_linux/src/linux/x11/client.rs b/crates/gpui_linux/src/linux/x11/client.rs index 3a970d9f72e1dc82215fc0d11297d222835df431..1f8db390029d67d8cdc17da7800a0f8e1d5e1af9 100644 --- a/crates/gpui_linux/src/linux/x11/client.rs +++ b/crates/gpui_linux/src/linux/x11/client.rs @@ -64,7 +64,7 @@ use gpui::{ PlatformKeyboardLayout, PlatformWindow, Point, RequestFrameOptions, ScrollDelta, Size, TouchPhase, WindowParams, point, px, }; -use gpui_wgpu::{CompositorGpuHint, WgpuContext}; +use gpui_wgpu::{CompositorGpuHint, GpuContext}; /// Value for DeviceId parameters which selects all devices. pub(crate) const XINPUT_ALL_DEVICES: xinput::DeviceId = 0; @@ -177,7 +177,7 @@ pub struct X11ClientState { pub(crate) last_location: Point, pub(crate) current_count: usize, - pub(crate) gpu_context: Option, + pub(crate) gpu_context: GpuContext, pub(crate) compositor_gpu: Option, pub(crate) scale_factor: f32, @@ -295,7 +295,7 @@ impl X11ClientStatePtr { } #[derive(Clone)] -pub(crate) struct X11Client(Rc>); +pub(crate) struct X11Client(pub(crate) Rc>); impl X11Client { pub(crate) fn new() -> anyhow::Result { @@ -493,7 +493,7 @@ impl X11Client { last_mouse_button: None, last_location: Point::new(px(0.0), px(0.0)), current_count: 0, - gpu_context: None, + gpu_context: Rc::new(RefCell::new(None)), compositor_gpu, scale_factor, @@ -1524,7 +1524,7 @@ impl LinuxClient for X11Client { handle, X11ClientStatePtr(Rc::downgrade(&self.0)), state.common.foreground_executor.clone(), - &mut state.gpu_context, + state.gpu_context.clone(), compositor_gpu, params, &xcb_connection, diff --git a/crates/gpui_linux/src/linux/x11/window.rs b/crates/gpui_linux/src/linux/x11/window.rs index a7cdc67ecd908becd22f799767f482754527fa51..57600103ce9ec1a67abb4abc373b0ed4c26cb077 100644 --- a/crates/gpui_linux/src/linux/x11/window.rs +++ b/crates/gpui_linux/src/linux/x11/window.rs @@ -9,7 +9,7 @@ use gpui::{ Tiling, WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControlArea, WindowDecorations, WindowKind, WindowParams, px, }; -use gpui_wgpu::{CompositorGpuHint, WgpuContext, WgpuRenderer, WgpuSurfaceConfig}; +use gpui_wgpu::{CompositorGpuHint, WgpuRenderer, WgpuSurfaceConfig}; use collections::FxHashSet; use raw_window_handle as rwh; @@ -259,6 +259,8 @@ pub struct X11WindowState { executor: ForegroundExecutor, atoms: XcbAtoms, x_root_window: xproto::Window, + x_screen_index: usize, + visual_id: u32, pub(crate) counter_id: sync::Counter, pub(crate) last_sync_counter: Option, bounds: Bounds, @@ -407,7 +409,7 @@ impl X11WindowState { handle: AnyWindowHandle, client: X11ClientStatePtr, executor: ForegroundExecutor, - gpu_context: &mut Option, + gpu_context: gpui_wgpu::GpuContext, compositor_gpu: Option, params: WindowParams, xcb: &Rc, @@ -727,6 +729,8 @@ impl X11WindowState { executor, display, x_root_window: visual_set.root, + x_screen_index, + visual_id: visual.id, bounds: bounds.to_pixels(scale_factor), scale_factor, renderer, @@ -819,7 +823,7 @@ impl X11Window { handle: AnyWindowHandle, client: X11ClientStatePtr, executor: ForegroundExecutor, - gpu_context: &mut Option, + gpu_context: gpui_wgpu::GpuContext, compositor_gpu: Option, params: WindowParams, xcb: &Rc, @@ -1173,13 +1177,11 @@ impl X11WindowStatePtr { } pub fn set_bounds(&self, bounds: Bounds) -> anyhow::Result<()> { - let mut resize_args = None; - let is_resize; - { + let (is_resize, content_size, scale_factor) = { let mut state = self.state.borrow_mut(); let bounds = bounds.map(|f| px(f as f32 / state.scale_factor)); - is_resize = bounds.size.width != state.bounds.size.width + let is_resize = bounds.size.width != state.bounds.size.width || bounds.size.height != state.bounds.size.height; // If it's a resize event (only width/height changed), we ignore `bounds.origin` @@ -1191,22 +1193,19 @@ impl X11WindowStatePtr { } let gpu_size = query_render_extent(&self.xcb, self.x_window)?; - if true { - state.renderer.update_drawable_size(gpu_size); - resize_args = Some((state.content_size(), state.scale_factor)); - } + state.renderer.update_drawable_size(gpu_size); + let result = (is_resize, state.content_size(), state.scale_factor); if let Some(value) = state.last_sync_counter.take() { check_reply( || "X11 sync SetCounter failed.", sync::set_counter(&self.xcb, state.counter_id, value), )?; } - } + result + }; let mut callbacks = self.callbacks.borrow_mut(); - if let Some((content_size, scale_factor)) = resize_args - && let Some(ref mut fun) = callbacks.resize - { + if let Some(ref mut fun) = callbacks.resize { fun(content_size, scale_factor) } @@ -1499,6 +1498,7 @@ impl PlatformWindow for X11Window { let state = ref_cell.borrow(); state .gpu_context + .borrow() .as_ref() .is_some_and(|ctx| ctx.supports_dual_source_blending()) }) @@ -1593,6 +1593,39 @@ impl PlatformWindow for X11Window { fn draw(&self, scene: &Scene) { let mut inner = self.0.state.borrow_mut(); + + if inner.renderer.device_lost() { + let raw_window = RawWindow { + connection: as_raw_xcb_connection::AsRawXcbConnection::as_raw_xcb_connection( + &*self.0.xcb, + ) as *mut _, + screen_id: inner.x_screen_index, + window_id: self.0.x_window, + visual_id: inner.visual_id, + }; + let display_handle = rwh::HasDisplayHandle::display_handle(&raw_window) + .unwrap() + .as_raw(); + let window_handle = rwh::HasWindowHandle::window_handle(&raw_window) + .unwrap() + .as_raw(); + + inner + .renderer + .recover(display_handle, window_handle) + .unwrap_or_else(|err| { + panic!( + "GPU device lost and recovery failed. \ + This may happen after system suspend/resume. \ + Please restart the application.\n\nError: {err}" + ) + }); + + // The current scene references atlas textures that were cleared during recovery. + // Skip this frame and let the next frame rebuild the scene with fresh textures. + return; + } + inner.renderer.draw(scene); } diff --git a/crates/gpui_macos/src/dispatcher.rs b/crates/gpui_macos/src/dispatcher.rs index 07638639e4bf5d3f002c1babfc213bc330e63dce..dd6f546f68b88efe6babc13e2d923d634eff5825 100644 --- a/crates/gpui_macos/src/dispatcher.rs +++ b/crates/gpui_macos/src/dispatcher.rs @@ -201,14 +201,7 @@ extern "C" fn trampoline(context: *mut c_void) { let runnable = unsafe { Runnable::::from_raw(NonNull::new_unchecked(context as *mut ())) }; - let metadata = runnable.metadata(); - - // Check if the executor that spawned this task was closed - if metadata.is_closed() { - return; - } - - let location = metadata.location; + let location = runnable.metadata().location; let start = Instant::now(); let timing = TaskTiming { diff --git a/crates/gpui_macos/src/display_link.rs b/crates/gpui_macos/src/display_link.rs index bd1c21ca5c063b2ed9fa79d939f205698023f42b..86e9b4072bab3cfb7cf5d0d69bc6ca29ad15cbb1 100644 --- a/crates/gpui_macos/src/display_link.rs +++ b/crates/gpui_macos/src/display_link.rs @@ -41,6 +41,7 @@ impl DisplayLink { ); frame_requests.set_context(data); frame_requests.set_event_handler_f(callback); + frame_requests.resume(); let display_link = sys::DisplayLink::new( display_id, @@ -57,7 +58,6 @@ impl DisplayLink { pub fn start(&mut self) -> Result<()> { unsafe { - self.frame_requests.resume(); self.display_link.as_mut().unwrap().start()?; } Ok(()) @@ -65,7 +65,6 @@ impl DisplayLink { pub fn stop(&mut self) -> Result<()> { unsafe { - self.frame_requests.suspend(); self.display_link.as_mut().unwrap().stop()?; } Ok(()) @@ -84,8 +83,6 @@ impl Drop for DisplayLink { // We might also want to upgrade to CADisplayLink, but that requires dropping old macOS support. std::mem::forget(self.display_link.take()); self.frame_requests.cancel(); - // A suspended DispatchSource cannot be destroyed. - self.frame_requests.resume(); } } diff --git a/crates/gpui_macros/Cargo.toml b/crates/gpui_macros/Cargo.toml index 2ee8da52fb7a013cefdd5fe79520a5d18f1e5b3f..513dd61d7b1da83aae2ca4779fb187aece3d7278 100644 --- a/crates/gpui_macros/Cargo.toml +++ b/crates/gpui_macros/Cargo.toml @@ -24,4 +24,4 @@ quote.workspace = true syn.workspace = true [dev-dependencies] -gpui = { workspace = true, features = ["inspector"] } +gpui = { workspace = true, features = ["inspector"] } \ No newline at end of file diff --git a/crates/gpui_macros/src/gpui_macros.rs b/crates/gpui_macros/src/gpui_macros.rs index 0f1365be77ec221d9061f588f84ff6acab3c32ab..e30c85e6edbee8b5307a5139c00a222e9a83bc55 100644 --- a/crates/gpui_macros/src/gpui_macros.rs +++ b/crates/gpui_macros/src/gpui_macros.rs @@ -3,6 +3,7 @@ mod derive_app_context; mod derive_into_element; mod derive_render; mod derive_visual_context; +mod property_test; mod register_action; mod styles; mod test; @@ -188,6 +189,79 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream { test::test(args, function) } +/// A variant of `#[gpui::test]` that supports property-based testing. +/// +/// A property test, much like a standard GPUI randomized test, allows testing +/// claims of the form "for any possible X, Y should hold". For example: +/// ``` +/// #[gpui::property_test] +/// fn test_arithmetic(x: i32, y: i32) { +/// assert!(x == y || x < y || x > y); +/// } +/// ``` +/// Standard GPUI randomized tests provide you with an instance of `StdRng` to +/// generate random data in a controlled manner. Property-based tests have some +/// advantages, however: +/// - Shrinking - the harness also understands a notion of the "complexity" of a +/// particular value. This allows it to find the "simplest possible value that +/// causes the test to fail". +/// - Ergonomics/clarity - the property-testing harness will automatically +/// generate values, removing the need to fill the test body with generation +/// logic. +/// - Failure persistence - if a failing seed is identified, it is stored in a +/// file, which can be checked in, and future runs will check these cases before +/// future cases. +/// +/// Property tests work best when all inputs can be generated up-front and kept +/// in a simple data structure. Sometimes, this isn't possible - for example, if +/// a test needs to make a random decision based on the current state of some +/// structure. In this case, a standard GPUI randomized test may be more +/// suitable. +/// +/// ## Customizing random values +/// +/// This macro is based on the [`#[proptest::property_test]`] macro, but handles +/// some of the same GPUI-specific arguments as `#[gpui::test]`. Specifically, +/// `&{mut,} TestAppContext` and `BackgroundExecutor` work as normal. `StdRng` +/// arguments are **explicitly forbidden**, since they break shrinking, and are +/// a common footgun. +/// +/// All other arguments are forwarded to the underlying proptest macro. +/// +/// Note: much of the following is copied from the proptest docs, specifically the +/// [`#[proptest::property_test]`] macro docs. +/// +/// Random values of type `T` are generated by a `Strategy` object. +/// Some types have a canonical `Strategy` - these types also implement +/// `Arbitrary`. Parameters to a `#[gpui::property_test]`, by default, use a +/// type's `Arbitrary` implementation. If you'd like to provide a custom +/// strategy, you can use `#[strategy = ...]` on the argument: +/// ``` +/// #[gpui::property_test] +/// fn int_test(#[strategy = 1..10] x: i32, #[strategy = "[a-zA-Z0-9]{20}"] s: String) { +/// assert!(s.len() > (x as usize)); +/// } +/// ``` +/// +/// For more information on writing custom `Strategy` and `Arbitrary` +/// implementations, see [the proptest book][book], and the [`Strategy`] trait. +/// +/// ## Scheduler +/// +/// Similar to `#[gpui::test]`, this macro will choose random seeds for the test +/// scheduler. It uses `.no_shrink()` to tell proptest that all seeds are +/// roughly equivalent in terms of "complexity". If `$SEED` is set, it will +/// affect **ONLY** the seed passed to the scheduler. To control other values, +/// use custom `Strategy`s. +/// +/// [`#[proptest::property_test]`]: https://docs.rs/proptest/latest/proptest/attr.property_test.html +/// [book]: https://proptest-rs.github.io/proptest/intro.html +/// [`Strategy`]: https://docs.rs/proptest/latest/proptest/strategy/trait.Strategy.html +#[proc_macro_attribute] +pub fn property_test(args: TokenStream, function: TokenStream) -> TokenStream { + property_test::test(args.into(), function.into()).into() +} + /// When added to a trait, `#[derive_inspector_reflection]` generates a module which provides /// enumeration and lookup by name of all methods that have the shape `fn method(self) -> Self`. /// This is used by the inspector so that it can use the builder methods in `Styled` and diff --git a/crates/gpui_macros/src/property_test.rs b/crates/gpui_macros/src/property_test.rs new file mode 100644 index 0000000000000000000000000000000000000000..6bf60eca1b63a86bce22fbf4ae771230ee34726d --- /dev/null +++ b/crates/gpui_macros/src/property_test.rs @@ -0,0 +1,199 @@ +use proc_macro2::TokenStream; +use quote::{format_ident, quote, quote_spanned}; +use syn::{ + FnArg, Ident, ItemFn, Type, parse2, punctuated::Punctuated, spanned::Spanned, token::Comma, +}; + +pub fn test(args: TokenStream, item: TokenStream) -> TokenStream { + let item_span = item.span(); + let Ok(func) = parse2::(item) else { + return quote_spanned! { item_span => + compile_error!("#[gpui::property_test] must be placed on a function"); + }; + }; + + let test_name = func.sig.ident.clone(); + let inner_fn_name = format_ident!("__{test_name}"); + + let parsed_args = parse_args(func.sig.inputs, &test_name); + + let inner_body = func.block; + let inner_arg_decls = parsed_args.inner_fn_decl_args; + let asyncness = func.sig.asyncness; + + let inner_fn = quote! { + let #inner_fn_name = #asyncness move |#inner_arg_decls| #inner_body; + }; + + let arg_errors = parsed_args.errors; + let proptest_args = parsed_args.proptest_args; + let inner_args = parsed_args.inner_fn_args; + let cx_vars = parsed_args.cx_vars; + let cx_teardowns = parsed_args.cx_teardowns; + + let proptest_args = quote! { + #[strategy = ::gpui::seed_strategy()] __seed: u64, + #proptest_args + }; + + let run_test_body = match &asyncness { + None => quote! { + #cx_vars + #inner_fn_name(#inner_args); + #cx_teardowns + }, + Some(_) => quote! { + let foreground_executor = gpui::ForegroundExecutor::new(std::sync::Arc::new(dispatcher.clone())); + #cx_vars + foreground_executor.block_test(#inner_fn_name(#inner_args)); + #cx_teardowns + }, + }; + + quote! { + #arg_errors + + #[::gpui::proptest::property_test(proptest_path = "::gpui::proptest", #args)] + fn #test_name(#proptest_args) { + #inner_fn + + ::gpui::run_test_once( + __seed, + Box::new(move |dispatcher| { + #run_test_body + }), + ) + } + } +} + +#[derive(Default)] +struct ParsedArgs { + cx_vars: TokenStream, + cx_teardowns: TokenStream, + proptest_args: TokenStream, + errors: TokenStream, + + // exprs passed at the call-site + inner_fn_args: TokenStream, + // args in the declaration + inner_fn_decl_args: TokenStream, +} + +fn parse_args(args: Punctuated, test_name: &Ident) -> ParsedArgs { + let mut parsed = ParsedArgs::default(); + let mut args = args.into_iter().collect(); + + remove_cxs(&mut parsed, &mut args, test_name); + remove_std_rng(&mut parsed, &mut args); + remove_background_executor(&mut parsed, &mut args); + + // all remaining args forwarded to proptest's macro + parsed.proptest_args = quote!( #(#args),* ); + + parsed +} + +fn remove_cxs(parsed: &mut ParsedArgs, args: &mut Vec, test_name: &Ident) { + let mut ix = 0; + args.retain_mut(|arg| { + if !is_test_cx(arg) { + return true; + } + + let cx_varname = format_ident!("cx_{ix}"); + ix += 1; + + parsed.cx_vars.extend(quote!( + let mut #cx_varname = gpui::TestAppContext::build( + dispatcher.clone(), + Some(stringify!(#test_name)), + ); + )); + parsed.cx_teardowns.extend(quote!( + dispatcher.run_until_parked(); + #cx_varname.executor().forbid_parking(); + #cx_varname.quit(); + dispatcher.run_until_parked(); + )); + + parsed.inner_fn_decl_args.extend(quote!(#arg,)); + parsed.inner_fn_args.extend(quote!(&mut #cx_varname,)); + + false + }); +} + +fn remove_std_rng(parsed: &mut ParsedArgs, args: &mut Vec) { + args.retain_mut(|arg| { + if !is_std_rng(arg) { + return true; + } + + parsed.errors.extend(quote_spanned! { arg.span() => + compile_error!("`StdRng` is not allowed in a property test. Consider implementing `Arbitrary`, or implementing a custom `Strategy`. https://altsysrq.github.io/proptest-book/proptest/tutorial/strategy-basics.html"); + }); + + false + }); +} + +fn remove_background_executor(parsed: &mut ParsedArgs, args: &mut Vec) { + args.retain_mut(|arg| { + if !is_background_executor(arg) { + return true; + } + + parsed.inner_fn_decl_args.extend(quote!(#arg,)); + parsed + .inner_fn_args + .extend(quote!(gpui::BackgroundExecutor::new(std::sync::Arc::new( + dispatcher.clone() + )),)); + + false + }); +} + +// Matches `&TestAppContext` or `&foo::bar::baz::TestAppContext` +fn is_test_cx(arg: &FnArg) -> bool { + let FnArg::Typed(arg) = arg else { + return false; + }; + + let Type::Reference(ty) = &*arg.ty else { + return false; + }; + + let Type::Path(ty) = &*ty.elem else { + return false; + }; + + ty.path + .segments + .last() + .is_some_and(|seg| seg.ident == "TestAppContext") +} + +fn is_std_rng(arg: &FnArg) -> bool { + is_path_with_last_segment(arg, "StdRng") +} + +fn is_background_executor(arg: &FnArg) -> bool { + is_path_with_last_segment(arg, "BackgroundExecutor") +} + +fn is_path_with_last_segment(arg: &FnArg, last_segment: &str) -> bool { + let FnArg::Typed(arg) = arg else { + return false; + }; + + let Type::Path(ty) = &*arg.ty else { + return false; + }; + + ty.path + .segments + .last() + .is_some_and(|seg| seg.ident == last_segment) +} diff --git a/crates/gpui_web/src/dispatcher.rs b/crates/gpui_web/src/dispatcher.rs index d9419fb35353cfadd809b0bbc1cb9e7dbf124cda..5a0911f7ef1a33d1959de6d03f9f9797978b7a9b 100644 --- a/crates/gpui_web/src/dispatcher.rs +++ b/crates/gpui_web/src/dispatcher.rs @@ -184,10 +184,6 @@ impl WebDispatcher { } }; - if runnable.metadata().is_closed() { - continue; - } - runnable.run(); } }) @@ -263,9 +259,7 @@ impl PlatformDispatcher for WebDispatcher { let millis = duration.as_millis().min(i32::MAX as u128) as i32; if self.on_main_thread() { let callback = Closure::once_into_js(move || { - if !runnable.metadata().is_closed() { - runnable.run(); - } + runnable.run(); }); self.browser_window .set_timeout_with_callback_and_timeout_and_arguments_0( @@ -300,15 +294,11 @@ impl PlatformDispatcher for WebDispatcher { fn execute_on_main_thread(window: &web_sys::Window, item: MainThreadItem) { match item { MainThreadItem::Runnable(runnable) => { - if !runnable.metadata().is_closed() { - runnable.run(); - } + runnable.run(); } MainThreadItem::Delayed { runnable, millis } => { let callback = Closure::once_into_js(move || { - if !runnable.metadata().is_closed() { - runnable.run(); - } + runnable.run(); }); window .set_timeout_with_callback_and_timeout_and_arguments_0( @@ -325,9 +315,7 @@ fn execute_on_main_thread(window: &web_sys::Window, item: MainThreadItem) { fn schedule_runnable(window: &web_sys::Window, runnable: RunnableVariant, priority: Priority) { let callback = Closure::once_into_js(move || { - if !runnable.metadata().is_closed() { - runnable.run(); - } + runnable.run(); }); let callback: &js_sys::Function = callback.unchecked_ref(); diff --git a/crates/gpui_wgpu/src/gpui_wgpu.rs b/crates/gpui_wgpu/src/gpui_wgpu.rs index a306a9d4cac2251a46cd1115462bdcbe4b368759..452c3c03f51282c34368527dd503b90b92193586 100644 --- a/crates/gpui_wgpu/src/gpui_wgpu.rs +++ b/crates/gpui_wgpu/src/gpui_wgpu.rs @@ -4,6 +4,7 @@ mod wgpu_context; mod wgpu_renderer; pub use cosmic_text_system::*; +pub use wgpu; pub use wgpu_atlas::*; pub use wgpu_context::*; -pub use wgpu_renderer::*; +pub use wgpu_renderer::{GpuContext, WgpuRenderer, WgpuSurfaceConfig}; diff --git a/crates/gpui_wgpu/src/wgpu_atlas.rs b/crates/gpui_wgpu/src/wgpu_atlas.rs index ffef3a65398c3f03639a8551506463f91a862c33..3eba5c533f80d727425cc87ae89b754afa8722b1 100644 --- a/crates/gpui_wgpu/src/wgpu_atlas.rs +++ b/crates/gpui_wgpu/src/wgpu_atlas.rs @@ -65,6 +65,17 @@ impl WgpuAtlas { view: texture.view.clone(), } } + + /// Handles device lost by clearing all textures and cached tiles. + /// The atlas will lazily recreate textures as needed on subsequent frames. + pub fn handle_device_lost(&self, device: Arc, queue: Arc) { + let mut lock = self.0.lock(); + lock.device = device; + lock.queue = queue; + lock.storage = WgpuAtlasStorage::default(); + lock.tiles_by_key.clear(); + lock.pending_uploads.clear(); + } } impl PlatformAtlas for WgpuAtlas { diff --git a/crates/gpui_wgpu/src/wgpu_context.rs b/crates/gpui_wgpu/src/wgpu_context.rs index b7883a6910261da8dc3f1df6414c5e38e1c46cd2..6df2e6fa8aa9d7f529b500e4691c649c21c1fdb1 100644 --- a/crates/gpui_wgpu/src/wgpu_context.rs +++ b/crates/gpui_wgpu/src/wgpu_context.rs @@ -3,6 +3,7 @@ use anyhow::Context as _; #[cfg(not(target_family = "wasm"))] use gpui_util::ResultExt; use std::sync::Arc; +use std::sync::atomic::{AtomicBool, Ordering}; pub struct WgpuContext { pub instance: wgpu::Instance, @@ -10,9 +11,10 @@ pub struct WgpuContext { pub device: Arc, pub queue: Arc, dual_source_blending: bool, + device_lost: Arc, } -#[cfg(not(target_family = "wasm"))] +#[derive(Clone, Copy)] pub struct CompositorGpuHint { pub vendor_id: u32, pub device_id: u32, @@ -47,6 +49,17 @@ impl WgpuContext { compositor_gpu.as_ref(), ))?; + let device_lost = Arc::new(AtomicBool::new(false)); + device.set_device_lost_callback({ + let device_lost = Arc::clone(&device_lost); + move |reason, message| { + log::error!("wgpu device lost: reason={reason:?}, message={message}"); + if reason != wgpu::DeviceLostReason::Destroyed { + device_lost.store(true, Ordering::Relaxed); + } + } + }); + log::info!( "Selected GPU adapter: {:?} ({:?})", adapter.get_info().name, @@ -59,6 +72,7 @@ impl WgpuContext { device: Arc::new(device), queue: Arc::new(queue), dual_source_blending, + device_lost, }) } @@ -86,6 +100,7 @@ impl WgpuContext { adapter.get_info().backend ); + let device_lost = Arc::new(AtomicBool::new(false)); let (device, queue, dual_source_blending) = Self::create_device(&adapter).await?; Ok(Self { @@ -94,6 +109,7 @@ impl WgpuContext { device: Arc::new(device), queue: Arc::new(queue), dual_source_blending, + device_lost, }) } @@ -320,6 +336,17 @@ impl WgpuContext { pub fn supports_dual_source_blending(&self) -> bool { self.dual_source_blending } + + /// Returns true if the GPU device was lost (e.g., due to driver crash, suspend/resume). + /// When this returns true, the context should be recreated. + pub fn device_lost(&self) -> bool { + self.device_lost.load(Ordering::Relaxed) + } + + /// Returns a clone of the device_lost flag for sharing with renderers. + pub(crate) fn device_lost_flag(&self) -> Arc { + Arc::clone(&self.device_lost) + } } #[cfg(not(target_family = "wasm"))] diff --git a/crates/gpui_wgpu/src/wgpu_renderer.rs b/crates/gpui_wgpu/src/wgpu_renderer.rs index 2fd83b7b065e7ce4fe0ba9ec017f39264a33bee3..da94747f3b4debcc65723c8a0ca031d59d9ae03c 100644 --- a/crates/gpui_wgpu/src/wgpu_renderer.rs +++ b/crates/gpui_wgpu/src/wgpu_renderer.rs @@ -1,6 +1,4 @@ -#[cfg(not(target_family = "wasm"))] -use crate::CompositorGpuHint; -use crate::{WgpuAtlas, WgpuContext}; +use crate::{CompositorGpuHint, WgpuAtlas, WgpuContext}; use bytemuck::{Pod, Zeroable}; use gpui::{ AtlasTextureId, Background, Bounds, DevicePixels, GpuSpecs, MonochromeSprite, Path, Point, @@ -10,7 +8,9 @@ use gpui::{ use log::warn; #[cfg(not(target_family = "wasm"))] use raw_window_handle::{HasDisplayHandle, HasWindowHandle}; +use std::cell::RefCell; use std::num::NonZeroU64; +use std::rc::Rc; use std::sync::{Arc, Mutex}; #[repr(C)] @@ -93,28 +93,42 @@ struct WgpuBindGroupLayouts { surfaces: wgpu::BindGroupLayout, } -pub struct WgpuRenderer { +/// Shared GPU context reference, used to coordinate device recovery across multiple windows. +pub type GpuContext = Rc>>; + +/// GPU resources that must be dropped together during device recovery. +struct WgpuResources { device: Arc, queue: Arc, surface: wgpu::Surface<'static>, - surface_config: wgpu::SurfaceConfiguration, pipelines: WgpuPipelines, bind_group_layouts: WgpuBindGroupLayouts, - atlas: Arc, atlas_sampler: wgpu::Sampler, globals_buffer: wgpu::Buffer, - path_globals_offset: u64, - gamma_offset: u64, globals_bind_group: wgpu::BindGroup, path_globals_bind_group: wgpu::BindGroup, instance_buffer: wgpu::Buffer, - instance_buffer_capacity: u64, - max_buffer_size: u64, - storage_buffer_alignment: u64, path_intermediate_texture: Option, path_intermediate_view: Option, path_msaa_texture: Option, path_msaa_view: Option, +} + +pub struct WgpuRenderer { + /// Shared GPU context for device recovery coordination (unused on WASM). + #[allow(dead_code)] + context: Option, + /// Compositor GPU hint for adapter selection (unused on WASM). + #[allow(dead_code)] + compositor_gpu: Option, + resources: Option, + surface_config: wgpu::SurfaceConfiguration, + atlas: Arc, + path_globals_offset: u64, + gamma_offset: u64, + instance_buffer_capacity: u64, + max_buffer_size: u64, + storage_buffer_alignment: u64, rendering_params: RenderingParameters, dual_source_blending: bool, adapter_info: wgpu::AdapterInfo, @@ -123,17 +137,34 @@ pub struct WgpuRenderer { max_texture_size: u32, last_error: Arc>>, failed_frame_count: u32, + device_lost: std::sync::Arc, } impl WgpuRenderer { + fn resources(&self) -> &WgpuResources { + self.resources + .as_ref() + .expect("GPU resources not available") + } + + fn resources_mut(&mut self) -> &mut WgpuResources { + self.resources + .as_mut() + .expect("GPU resources not available") + } + /// Creates a new WgpuRenderer from raw window handles. /// + /// The `gpu_context` is a shared reference that coordinates GPU context across + /// multiple windows. The first window to create a renderer will initialize the + /// context; subsequent windows will share it. + /// /// # Safety /// The caller must ensure that the window handle remains valid for the lifetime /// of the returned renderer. #[cfg(not(target_family = "wasm"))] pub fn new( - gpu_context: &mut Option, + gpu_context: GpuContext, window: &W, config: WgpuSurfaceConfig, compositor_gpu: Option, @@ -154,6 +185,7 @@ impl WgpuRenderer { // The surface must be created with the same instance that will be used for // adapter selection, otherwise wgpu will panic. let instance = gpu_context + .borrow() .as_ref() .map(|ctx| ctx.instance.clone()) .unwrap_or_else(WgpuContext::instance); @@ -167,15 +199,28 @@ impl WgpuRenderer { .map_err(|e| anyhow::anyhow!("Failed to create surface: {e}"))? }; - let context = match gpu_context { + let mut ctx_ref = gpu_context.borrow_mut(); + let context = match ctx_ref.as_mut() { Some(context) => { context.check_compatible_with_surface(&surface)?; context } - None => gpu_context.insert(WgpuContext::new(instance, &surface, compositor_gpu)?), + None => ctx_ref.insert(WgpuContext::new(instance, &surface, compositor_gpu)?), }; - Self::new_with_surface(context, surface, config) + let atlas = Arc::new(WgpuAtlas::new( + Arc::clone(&context.device), + Arc::clone(&context.queue), + )); + + Self::new_internal( + Some(Rc::clone(&gpu_context)), + context, + surface, + config, + compositor_gpu, + atlas, + ) } #[cfg(target_family = "wasm")] @@ -188,13 +233,22 @@ impl WgpuRenderer { .instance .create_surface(wgpu::SurfaceTarget::Canvas(canvas.clone())) .map_err(|e| anyhow::anyhow!("Failed to create surface: {e}"))?; - Self::new_with_surface(context, surface, config) + + let atlas = Arc::new(WgpuAtlas::new( + Arc::clone(&context.device), + Arc::clone(&context.queue), + )); + + Self::new_internal(None, context, surface, config, None, atlas) } - fn new_with_surface( + fn new_internal( + gpu_context: Option, context: &WgpuContext, surface: wgpu::Surface<'static>, config: WgpuSurfaceConfig, + compositor_gpu: Option, + atlas: Arc, ) -> anyhow::Result { let surface_caps = surface.get_capabilities(&context.adapter); let preferred_formats = [ @@ -289,7 +343,6 @@ impl WgpuRenderer { dual_source_blending, ); - let atlas = Arc::new(WgpuAtlas::new(Arc::clone(&device), Arc::clone(&queue))); let atlas_sampler = device.create_sampler(&wgpu::SamplerDescriptor { label: Some("atlas_sampler"), mag_filter: wgpu::FilterMode::Linear, @@ -375,30 +428,36 @@ impl WgpuRenderer { *guard = Some(error.to_string()); })); - Ok(Self { + let resources = WgpuResources { device, queue, surface, - surface_config, pipelines, bind_group_layouts, - atlas, atlas_sampler, globals_buffer, - path_globals_offset, - gamma_offset, globals_bind_group, path_globals_bind_group, instance_buffer, - instance_buffer_capacity: initial_instance_buffer_capacity, - max_buffer_size, - storage_buffer_alignment, // Defer intermediate texture creation to first draw call via ensure_intermediate_textures(). // This avoids panics when the device/surface is in an invalid state during initialization. path_intermediate_texture: None, path_intermediate_view: None, path_msaa_texture: None, path_msaa_view: None, + }; + + Ok(Self { + context: gpu_context, + compositor_gpu, + resources: Some(resources), + surface_config, + atlas, + path_globals_offset, + gamma_offset, + instance_buffer_capacity: initial_instance_buffer_capacity, + max_buffer_size, + storage_buffer_alignment, rendering_params, dual_source_blending, adapter_info, @@ -407,6 +466,7 @@ impl WgpuRenderer { max_texture_size, last_error, failed_frame_count: 0, + device_lost: context.device_lost_flag(), }) } @@ -855,8 +915,14 @@ impl WgpuRenderer { ); } + self.surface_config.width = clamped_width.max(1); + self.surface_config.height = clamped_height.max(1); + let surface_config = self.surface_config.clone(); + + let resources = self.resources_mut(); + // Wait for any in-flight GPU work to complete before destroying textures - if let Err(e) = self.device.poll(wgpu::PollType::Wait { + if let Err(e) = resources.device.poll(wgpu::PollType::Wait { submission_index: None, timeout: None, }) { @@ -864,55 +930,53 @@ impl WgpuRenderer { } // Destroy old textures before allocating new ones to avoid GPU memory spikes - if let Some(ref texture) = self.path_intermediate_texture { + if let Some(ref texture) = resources.path_intermediate_texture { texture.destroy(); } - if let Some(ref texture) = self.path_msaa_texture { + if let Some(ref texture) = resources.path_msaa_texture { texture.destroy(); } - self.surface_config.width = clamped_width.max(1); - self.surface_config.height = clamped_height.max(1); - self.surface.configure(&self.device, &self.surface_config); + resources + .surface + .configure(&resources.device, &surface_config); // Invalidate intermediate textures - they will be lazily recreated // in draw() after we confirm the surface is healthy. This avoids // panics when the device/surface is in an invalid state during resize. - self.path_intermediate_texture = None; - self.path_intermediate_view = None; - self.path_msaa_texture = None; - self.path_msaa_view = None; + resources.path_intermediate_texture = None; + resources.path_intermediate_view = None; + resources.path_msaa_texture = None; + resources.path_msaa_view = None; } } fn ensure_intermediate_textures(&mut self) { - if self.path_intermediate_texture.is_some() { + if self.resources().path_intermediate_texture.is_some() { return; } - let (path_intermediate_texture, path_intermediate_view) = { - let (t, v) = Self::create_path_intermediate( - &self.device, - self.surface_config.format, - self.surface_config.width, - self.surface_config.height, - ); - (Some(t), Some(v)) - }; - self.path_intermediate_texture = path_intermediate_texture; - self.path_intermediate_view = path_intermediate_view; + let format = self.surface_config.format; + let width = self.surface_config.width; + let height = self.surface_config.height; + let path_sample_count = self.rendering_params.path_sample_count; + let resources = self.resources_mut(); + + let (t, v) = Self::create_path_intermediate(&resources.device, format, width, height); + resources.path_intermediate_texture = Some(t); + resources.path_intermediate_view = Some(v); let (path_msaa_texture, path_msaa_view) = Self::create_msaa_if_needed( - &self.device, - self.surface_config.format, - self.surface_config.width, - self.surface_config.height, - self.rendering_params.path_sample_count, + &resources.device, + format, + width, + height, + path_sample_count, ) .map(|(t, v)| (Some(t), Some(v))) .unwrap_or((None, None)); - self.path_msaa_texture = path_msaa_texture; - self.path_msaa_view = path_msaa_view; + resources.path_msaa_texture = path_msaa_texture; + resources.path_msaa_view = path_msaa_view; } pub fn update_transparency(&mut self, transparent: bool) { @@ -924,14 +988,20 @@ impl WgpuRenderer { if new_alpha_mode != self.surface_config.alpha_mode { self.surface_config.alpha_mode = new_alpha_mode; - self.surface.configure(&self.device, &self.surface_config); - self.pipelines = Self::create_pipelines( - &self.device, - &self.bind_group_layouts, - self.surface_config.format, - self.surface_config.alpha_mode, - self.rendering_params.path_sample_count, - self.dual_source_blending, + let surface_config = self.surface_config.clone(); + let path_sample_count = self.rendering_params.path_sample_count; + let dual_source_blending = self.dual_source_blending; + let resources = self.resources_mut(); + resources + .surface + .configure(&resources.device, &surface_config); + resources.pipelines = Self::create_pipelines( + &resources.device, + &resources.bind_group_layouts, + surface_config.format, + surface_config.alpha_mode, + path_sample_count, + dual_source_blending, ); } } @@ -982,14 +1052,20 @@ impl WgpuRenderer { self.atlas.before_frame(); - let frame = match self.surface.get_current_texture() { + let texture_result = self.resources().surface.get_current_texture(); + let frame = match texture_result { Ok(frame) => frame, Err(wgpu::SurfaceError::Lost | wgpu::SurfaceError::Outdated) => { - self.surface.configure(&self.device, &self.surface_config); + let surface_config = self.surface_config.clone(); + let resources = self.resources_mut(); + resources + .surface + .configure(&resources.device, &surface_config); return; } Err(e) => { - log::error!("Failed to acquire surface texture: {e}"); + *self.last_error.lock().unwrap() = + Some(format!("Failed to acquire surface texture: {e}")); return; } }; @@ -1028,28 +1104,35 @@ impl WgpuRenderer { ..globals }; - self.queue - .write_buffer(&self.globals_buffer, 0, bytemuck::bytes_of(&globals)); - self.queue.write_buffer( - &self.globals_buffer, - self.path_globals_offset, - bytemuck::bytes_of(&path_globals), - ); - self.queue.write_buffer( - &self.globals_buffer, - self.gamma_offset, - bytemuck::bytes_of(&gamma_params), - ); + { + let resources = self.resources(); + resources.queue.write_buffer( + &resources.globals_buffer, + 0, + bytemuck::bytes_of(&globals), + ); + resources.queue.write_buffer( + &resources.globals_buffer, + self.path_globals_offset, + bytemuck::bytes_of(&path_globals), + ); + resources.queue.write_buffer( + &resources.globals_buffer, + self.gamma_offset, + bytemuck::bytes_of(&gamma_params), + ); + } loop { let mut instance_offset: u64 = 0; let mut overflow = false; - let mut encoder = self - .device - .create_command_encoder(&wgpu::CommandEncoderDescriptor { - label: Some("main_encoder"), - }); + let mut encoder = + self.resources() + .device + .create_command_encoder(&wgpu::CommandEncoderDescriptor { + label: Some("main_encoder"), + }); { let mut pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor { @@ -1169,7 +1252,9 @@ impl WgpuRenderer { continue; } - self.queue.submit(std::iter::once(encoder.finish())); + self.resources() + .queue + .submit(std::iter::once(encoder.finish())); frame.present(); return; } @@ -1185,7 +1270,7 @@ impl WgpuRenderer { self.draw_instances( data, quads.len() as u32, - &self.pipelines.quads, + &self.resources().pipelines.quads, instance_offset, pass, ) @@ -1201,7 +1286,7 @@ impl WgpuRenderer { self.draw_instances( data, shadows.len() as u32, - &self.pipelines.shadows, + &self.resources().pipelines.shadows, instance_offset, pass, ) @@ -1217,7 +1302,7 @@ impl WgpuRenderer { self.draw_instances( data, underlines.len() as u32, - &self.pipelines.underlines, + &self.resources().pipelines.underlines, instance_offset, pass, ) @@ -1236,7 +1321,7 @@ impl WgpuRenderer { data, sprites.len() as u32, &tex_info.view, - &self.pipelines.mono_sprites, + &self.resources().pipelines.mono_sprites, instance_offset, pass, ) @@ -1251,11 +1336,12 @@ impl WgpuRenderer { ) -> bool { let tex_info = self.atlas.get_texture_info(texture_id); let data = unsafe { Self::instance_bytes(sprites) }; - let pipeline = self + let resources = self.resources(); + let pipeline = resources .pipelines .subpixel_sprites .as_ref() - .unwrap_or(&self.pipelines.mono_sprites); + .unwrap_or(&resources.pipelines.mono_sprites); self.draw_instances_with_texture( data, sprites.len() as u32, @@ -1279,7 +1365,7 @@ impl WgpuRenderer { data, sprites.len() as u32, &tex_info.view, - &self.pipelines.poly_sprites, + &self.resources().pipelines.poly_sprites, instance_offset, pass, ) @@ -1299,16 +1385,19 @@ impl WgpuRenderer { let Some((offset, size)) = self.write_to_instance_buffer(instance_offset, data) else { return false; }; - let bind_group = self.device.create_bind_group(&wgpu::BindGroupDescriptor { - label: None, - layout: &self.bind_group_layouts.instances, - entries: &[wgpu::BindGroupEntry { - binding: 0, - resource: self.instance_binding(offset, size), - }], - }); + let resources = self.resources(); + let bind_group = resources + .device + .create_bind_group(&wgpu::BindGroupDescriptor { + label: None, + layout: &resources.bind_group_layouts.instances, + entries: &[wgpu::BindGroupEntry { + binding: 0, + resource: self.instance_binding(offset, size), + }], + }); pass.set_pipeline(pipeline); - pass.set_bind_group(0, &self.globals_bind_group, &[]); + pass.set_bind_group(0, &resources.globals_bind_group, &[]); pass.set_bind_group(1, &bind_group, &[]); pass.draw(0..4, 0..instance_count); true @@ -1329,26 +1418,29 @@ impl WgpuRenderer { let Some((offset, size)) = self.write_to_instance_buffer(instance_offset, data) else { return false; }; - let bind_group = self.device.create_bind_group(&wgpu::BindGroupDescriptor { - label: None, - layout: &self.bind_group_layouts.instances_with_texture, - entries: &[ - wgpu::BindGroupEntry { - binding: 0, - resource: self.instance_binding(offset, size), - }, - wgpu::BindGroupEntry { - binding: 1, - resource: wgpu::BindingResource::TextureView(texture_view), - }, - wgpu::BindGroupEntry { - binding: 2, - resource: wgpu::BindingResource::Sampler(&self.atlas_sampler), - }, - ], - }); + let resources = self.resources(); + let bind_group = resources + .device + .create_bind_group(&wgpu::BindGroupDescriptor { + label: None, + layout: &resources.bind_group_layouts.instances_with_texture, + entries: &[ + wgpu::BindGroupEntry { + binding: 0, + resource: self.instance_binding(offset, size), + }, + wgpu::BindGroupEntry { + binding: 1, + resource: wgpu::BindingResource::TextureView(texture_view), + }, + wgpu::BindGroupEntry { + binding: 2, + resource: wgpu::BindingResource::Sampler(&resources.atlas_sampler), + }, + ], + }); pass.set_pipeline(pipeline); - pass.set_bind_group(0, &self.globals_bind_group, &[]); + pass.set_bind_group(0, &resources.globals_bind_group, &[]); pass.set_bind_group(1, &bind_group, &[]); pass.draw(0..4, 0..instance_count); true @@ -1386,7 +1478,8 @@ impl WgpuRenderer { vec![PathSprite { bounds }] }; - let Some(path_intermediate_view) = self.path_intermediate_view.as_ref() else { + let resources = self.resources(); + let Some(path_intermediate_view) = resources.path_intermediate_view.as_ref() else { return true; }; @@ -1395,7 +1488,7 @@ impl WgpuRenderer { sprite_data, sprites.len() as u32, path_intermediate_view, - &self.pipelines.paths, + &resources.pipelines.paths, instance_offset, pass, ) @@ -1429,20 +1522,23 @@ impl WgpuRenderer { return false; }; - let data_bind_group = self.device.create_bind_group(&wgpu::BindGroupDescriptor { - label: Some("path_rasterization_bind_group"), - layout: &self.bind_group_layouts.instances, - entries: &[wgpu::BindGroupEntry { - binding: 0, - resource: self.instance_binding(vertex_offset, vertex_size), - }], - }); + let resources = self.resources(); + let data_bind_group = resources + .device + .create_bind_group(&wgpu::BindGroupDescriptor { + label: Some("path_rasterization_bind_group"), + layout: &resources.bind_group_layouts.instances, + entries: &[wgpu::BindGroupEntry { + binding: 0, + resource: self.instance_binding(vertex_offset, vertex_size), + }], + }); - let Some(path_intermediate_view) = self.path_intermediate_view.as_ref() else { + let Some(path_intermediate_view) = resources.path_intermediate_view.as_ref() else { return true; }; - let (target_view, resolve_target) = if let Some(ref msaa_view) = self.path_msaa_view { + let (target_view, resolve_target) = if let Some(ref msaa_view) = resources.path_msaa_view { (msaa_view, Some(path_intermediate_view)) } else { (path_intermediate_view, None) @@ -1464,8 +1560,8 @@ impl WgpuRenderer { ..Default::default() }); - pass.set_pipeline(&self.pipelines.path_rasterization); - pass.set_bind_group(0, &self.path_globals_bind_group, &[]); + pass.set_pipeline(&resources.pipelines.path_rasterization); + pass.set_bind_group(0, &resources.path_globals_bind_group, &[]); pass.set_bind_group(1, &data_bind_group, &[]); pass.draw(0..vertices.len() as u32, 0..1); } @@ -1476,7 +1572,8 @@ impl WgpuRenderer { fn grow_instance_buffer(&mut self) { let new_capacity = (self.instance_buffer_capacity * 2).min(self.max_buffer_size); log::info!("increased instance buffer size to {}", new_capacity); - self.instance_buffer = self.device.create_buffer(&wgpu::BufferDescriptor { + let resources = self.resources_mut(); + resources.instance_buffer = resources.device.create_buffer(&wgpu::BufferDescriptor { label: Some("instance_buffer"), size: new_capacity, usage: wgpu::BufferUsages::STORAGE | wgpu::BufferUsages::COPY_DST, @@ -1495,14 +1592,17 @@ impl WgpuRenderer { if offset + size > self.instance_buffer_capacity { return None; } - self.queue.write_buffer(&self.instance_buffer, offset, data); + let resources = self.resources(); + resources + .queue + .write_buffer(&resources.instance_buffer, offset, data); *instance_offset = offset + size; Some((offset, NonZeroU64::new(size).expect("size is at least 16"))) } fn instance_binding(&self, offset: u64, size: NonZeroU64) -> wgpu::BindingResource<'_> { wgpu::BindingResource::Buffer(wgpu::BufferBinding { - buffer: &self.instance_buffer, + buffer: &self.resources().instance_buffer, offset, size: Some(size), }) @@ -1511,6 +1611,97 @@ impl WgpuRenderer { pub fn destroy(&mut self) { // wgpu resources are automatically cleaned up when dropped } + + /// Returns true if the GPU device was lost and recovery is needed. + pub fn device_lost(&self) -> bool { + self.device_lost.load(std::sync::atomic::Ordering::SeqCst) + } + + /// Recovers from a lost GPU device by recreating the renderer with a new context. + /// + /// Call this after detecting `device_lost()` returns true. + /// + /// This method coordinates recovery across multiple windows: + /// - The first window to call this will recreate the shared context + /// - Subsequent windows will adopt the already-recovered context + #[cfg(not(target_family = "wasm"))] + pub fn recover( + &mut self, + raw_display_handle: raw_window_handle::RawDisplayHandle, + raw_window_handle: raw_window_handle::RawWindowHandle, + ) -> anyhow::Result<()> { + let gpu_context = self.context.as_ref().expect("recover requires gpu_context"); + + // Check if another window already recovered the context + let needs_new_context = gpu_context + .borrow() + .as_ref() + .is_none_or(|ctx| ctx.device_lost()); + + let surface = if needs_new_context { + log::warn!("GPU device lost, recreating context..."); + + // Drop old resources to release Arc/Arc and GPU resources + self.resources = None; + *gpu_context.borrow_mut() = None; + + // Wait for GPU driver to stabilize (350ms copied from windows :shrug:) + std::thread::sleep(std::time::Duration::from_millis(350)); + + let instance = WgpuContext::instance(); + let surface = create_surface(&instance, raw_display_handle, raw_window_handle)?; + let new_context = WgpuContext::new(instance, &surface, self.compositor_gpu)?; + *gpu_context.borrow_mut() = Some(new_context); + surface + } else { + let ctx_ref = gpu_context.borrow(); + let instance = &ctx_ref.as_ref().unwrap().instance; + create_surface(instance, raw_display_handle, raw_window_handle)? + }; + + let config = WgpuSurfaceConfig { + size: gpui::Size { + width: gpui::DevicePixels(self.surface_config.width as i32), + height: gpui::DevicePixels(self.surface_config.height as i32), + }, + transparent: self.surface_config.alpha_mode != wgpu::CompositeAlphaMode::Opaque, + }; + let gpu_context = Rc::clone(gpu_context); + let ctx_ref = gpu_context.borrow(); + let context = ctx_ref.as_ref().expect("context should exist"); + + self.resources = None; + self.atlas + .handle_device_lost(Arc::clone(&context.device), Arc::clone(&context.queue)); + + *self = Self::new_internal( + Some(gpu_context.clone()), + context, + surface, + config, + self.compositor_gpu, + self.atlas.clone(), + )?; + + log::info!("GPU recovery complete"); + Ok(()) + } +} + +#[cfg(not(target_family = "wasm"))] +fn create_surface( + instance: &wgpu::Instance, + raw_display_handle: raw_window_handle::RawDisplayHandle, + raw_window_handle: raw_window_handle::RawWindowHandle, +) -> anyhow::Result> { + unsafe { + instance + .create_surface_unsafe(wgpu::SurfaceTargetUnsafe::RawHandle { + raw_display_handle, + raw_window_handle, + }) + .map_err(|e| anyhow::anyhow!("{e}")) + } } struct RenderingParameters { diff --git a/crates/gpui_windows/src/dispatcher.rs b/crates/gpui_windows/src/dispatcher.rs index 060cdb7ba626133b9c201980e54bd0479694faa6..a5cfd9dc10d9afcce9580565943c28cb83dc9dab 100644 --- a/crates/gpui_windows/src/dispatcher.rs +++ b/crates/gpui_windows/src/dispatcher.rs @@ -58,10 +58,6 @@ impl WindowsDispatcher { let mut task_wrapper = Some(runnable); WorkItemHandler::new(move |_| { let runnable = task_wrapper.take().unwrap(); - // Check if the executor that spawned this task was closed - if runnable.metadata().is_closed() { - return Ok(()); - } Self::execute_runnable(runnable); Ok(()) }) @@ -75,10 +71,6 @@ impl WindowsDispatcher { let mut task_wrapper = Some(runnable); TimerElapsedHandler::new(move |_| { let runnable = task_wrapper.take().unwrap(); - // Check if the executor that spawned this task was closed - if runnable.metadata().is_closed() { - return Ok(()); - } Self::execute_runnable(runnable); Ok(()) }) diff --git a/crates/gpui_windows/src/events.rs b/crates/gpui_windows/src/events.rs index 6bc7b73cc756b44b08ddf7abc5f668681c03dcb9..3506ae2a2cc22d57c4cefba1a4c5a1850c411453 100644 --- a/crates/gpui_windows/src/events.rs +++ b/crates/gpui_windows/src/events.rs @@ -593,33 +593,63 @@ impl WindowsWindowInner { } pub(crate) fn update_ime_position(&self, handle: HWND, caret_position: POINT) { + let Some(ctx) = ImeContext::get(handle) else { + return; + }; unsafe { - let ctx = ImmGetContext(handle); - if ctx.is_invalid() { - return; - } + ImmSetCompositionWindow( + *ctx, + &COMPOSITIONFORM { + dwStyle: CFS_POINT, + ptCurrentPos: caret_position, + ..Default::default() + }, + ) + .ok() + .log_err(); - let config = COMPOSITIONFORM { - dwStyle: CFS_POINT, - ptCurrentPos: caret_position, - ..Default::default() - }; - ImmSetCompositionWindow(ctx, &config).ok().log_err(); - let config = CANDIDATEFORM { - dwStyle: CFS_CANDIDATEPOS, - ptCurrentPos: caret_position, - ..Default::default() - }; - ImmSetCandidateWindow(ctx, &config).ok().log_err(); - ImmReleaseContext(handle, ctx).ok().log_err(); + ImmSetCandidateWindow( + *ctx, + &CANDIDATEFORM { + dwStyle: CFS_CANDIDATEPOS, + ptCurrentPos: caret_position, + ..Default::default() + }, + ) + .ok() + .log_err(); + } + } + + fn update_ime_enabled(&self, handle: HWND) { + let ime_enabled = self + .with_input_handler(|input_handler| input_handler.query_accepts_text_input()) + .unwrap_or(false); + if ime_enabled == self.state.ime_enabled.get() { + return; + } + self.state.ime_enabled.set(ime_enabled); + unsafe { + if ime_enabled { + ImmAssociateContextEx(handle, HIMC::default(), IACE_DEFAULT) + .ok() + .log_err(); + } else { + if let Some(ctx) = ImeContext::get(handle) { + ImmNotifyIME(*ctx, NI_COMPOSITIONSTR, CPS_COMPLETE, 0) + .ok() + .log_err(); + } + ImmAssociateContextEx(handle, HIMC::default(), 0) + .ok() + .log_err(); + } } } fn handle_ime_composition(&self, handle: HWND, lparam: LPARAM) -> Option { - let ctx = unsafe { ImmGetContext(handle) }; - let result = self.handle_ime_composition_inner(ctx, lparam); - unsafe { ImmReleaseContext(handle, ctx).ok().log_err() }; - result + let ctx = ImeContext::get(handle)?; + self.handle_ime_composition_inner(*ctx, lparam) } fn handle_ime_composition_inner(&self, ctx: HIMC, lparam: LPARAM) -> Option { @@ -1123,6 +1153,7 @@ impl WindowsWindowInner { }); self.state.callbacks.request_frame.set(Some(request_frame)); + self.update_ime_enabled(handle); unsafe { ValidateRect(Some(handle), None).ok().log_err() }; Some(0) @@ -1205,6 +1236,36 @@ impl WindowsWindowInner { } } +struct ImeContext { + hwnd: HWND, + himc: HIMC, +} + +impl ImeContext { + fn get(hwnd: HWND) -> Option { + let himc = unsafe { ImmGetContext(hwnd) }; + if himc.is_invalid() { + return None; + } + Some(Self { hwnd, himc }) + } +} + +impl std::ops::Deref for ImeContext { + type Target = HIMC; + fn deref(&self) -> &HIMC { + &self.himc + } +} + +impl Drop for ImeContext { + fn drop(&mut self) { + unsafe { + ImmReleaseContext(self.hwnd, self.himc).ok().log_err(); + } + } +} + fn handle_key_event( wparam: WPARAM, lparam: LPARAM, diff --git a/crates/gpui_windows/src/window.rs b/crates/gpui_windows/src/window.rs index 02653d7e53a4356979b81897b39ab0393bbf54a9..62e88c47dfc10fedf6d636e2c6d6cbdcdc2e37c5 100644 --- a/crates/gpui_windows/src/window.rs +++ b/crates/gpui_windows/src/window.rs @@ -52,6 +52,7 @@ pub struct WindowsWindowState { pub callbacks: Callbacks, pub input_handler: Cell>, + pub ime_enabled: Cell, pub pending_surrogate: Cell>, pub last_reported_modifiers: Cell>, pub last_reported_capslock: Cell>, @@ -142,6 +143,7 @@ impl WindowsWindowState { min_size, callbacks, input_handler: Cell::new(input_handler), + ime_enabled: Cell::new(true), pending_surrogate: Cell::new(pending_surrogate), last_reported_modifiers: Cell::new(last_reported_modifiers), last_reported_capslock: Cell::new(last_reported_capslock), diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index 07204548ff5f2884bb4a5429267a02981ab3e78f..3536e73a9db6247a798145f186ae20d2efe29da5 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -176,7 +176,9 @@ pub enum IconName { Mic, MicMute, Minimize, + NewThread, Notepad, + OpenFolder, Option, PageDown, PageUp, diff --git a/crates/json_schema_store/src/json_schema_store.rs b/crates/json_schema_store/src/json_schema_store.rs index 756f64b2fb1bac13fc6d2868989504a3f8241281..c13f42f9bb7d92b7c136815f720abfe6ec6faac3 100644 --- a/crates/json_schema_store/src/json_schema_store.rs +++ b/crates/json_schema_store/src/json_schema_store.rs @@ -67,25 +67,22 @@ pub fn init(cx: &mut App) { .detach(); if let Some(extension_events) = extension::ExtensionEvents::try_global(cx) { - cx.subscribe(&extension_events, move |_, evt, cx| { - match evt { - extension::Event::ExtensionInstalled(_) - | extension::Event::ExtensionUninstalled(_) - | extension::Event::ConfigureExtensionRequested(_) => return, - extension::Event::ExtensionsInstalledChanged => {} + cx.subscribe(&extension_events, move |_, evt, cx| match evt { + extension::Event::ExtensionsInstalledChanged => { + cx.update_global::(|schema_store, cx| { + schema_store.notify_schema_changed(ChangedSchemas::Settings, cx); + }); } - cx.update_global::(|schema_store, cx| { - schema_store.notify_schema_changed(&format!("{SCHEMA_URI_PREFIX}settings"), cx); - schema_store - .notify_schema_changed(&format!("{SCHEMA_URI_PREFIX}project_settings"), cx); - }); + extension::Event::ExtensionUninstalled(_) + | extension::Event::ExtensionInstalled(_) + | extension::Event::ConfigureExtensionRequested(_) => {} }) .detach(); } cx.observe_global::(move |cx| { cx.update_global::(|schema_store, cx| { - schema_store.notify_schema_changed(&format!("{SCHEMA_URI_PREFIX}debug_tasks"), cx); + schema_store.notify_schema_changed(ChangedSchemas::DebugTasks, cx); }); }) .detach(); @@ -98,18 +95,42 @@ pub struct SchemaStore { impl gpui::Global for SchemaStore {} +enum ChangedSchemas { + Settings, + DebugTasks, +} + impl SchemaStore { - fn notify_schema_changed(&mut self, uri: &str, cx: &mut App) { - DYNAMIC_SCHEMA_CACHE.write().remove(uri); + fn notify_schema_changed(&mut self, changed_schemas: ChangedSchemas, cx: &mut App) { + let uris_to_invalidate = match changed_schemas { + ChangedSchemas::Settings => { + let settings_uri_prefix = &format!("{SCHEMA_URI_PREFIX}settings"); + let project_settings_uri = &format!("{SCHEMA_URI_PREFIX}project_settings"); + DYNAMIC_SCHEMA_CACHE + .write() + .extract_if(|uri, _| { + uri == project_settings_uri || uri.starts_with(settings_uri_prefix) + }) + .map(|(url, _)| url) + .collect() + } + ChangedSchemas::DebugTasks => DYNAMIC_SCHEMA_CACHE + .write() + .remove_entry(&format!("{SCHEMA_URI_PREFIX}debug_tasks")) + .map_or_else(Vec::new, |(uri, _)| vec![uri]), + }; + + if uris_to_invalidate.is_empty() { + return; + } - let uri = uri.to_string(); self.lsp_stores.retain(|lsp_store| { let Some(lsp_store) = lsp_store.upgrade() else { return false; }; - project::lsp_store::json_language_server_ext::notify_schema_changed( + project::lsp_store::json_language_server_ext::notify_schemas_changed( lsp_store, - uri.clone(), + &uris_to_invalidate, cx, ); true @@ -238,7 +259,8 @@ async fn resolve_dynamic_schema( (adapter_name, LspSchemaKind::Settings) } else { anyhow::bail!( - "Invalid LSP schema path: expected '{{adapter}}/initialization_options' or '{{adapter}}/settings', got '{}'", + "Invalid LSP schema path: \ + Expected '{{adapter}}/initialization_options' or '{{adapter}}/settings', got '{}'", lsp_path ); }; @@ -484,7 +506,7 @@ pub fn all_schema_file_associations( let file_name = normalized_action_name_to_file_name(normalized_name.clone()); serde_json::json!({ "fileMatch": [file_name], - "url": format!("{}action/{normalized_name}", SCHEMA_URI_PREFIX) + "url": format!("{SCHEMA_URI_PREFIX}action/{normalized_name}") }) })); diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index 58db79afe59f0e6d27e23eceb9861ea493d853fd..37c19172f7c48743e1436ba41e30d0c7ebf99d1d 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -62,6 +62,7 @@ sum_tree.workspace = true task.workspace = true text.workspace = true theme.workspace = true +toml.workspace = true tracing.workspace = true tree-sitter-md = { workspace = true, optional = true } tree-sitter-python = { workspace = true, optional = true } diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 29b569ba1aa68fe83f3456a2eaf9911b4c83677d..4e994a7e60f58b6e4ccd50c2cb0584f91bd351f2 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -961,6 +961,15 @@ pub struct LanguageConfig { pub import_path_strip_regex: Option, } +impl LanguageConfig { + pub const FILE_NAME: &str = "config.toml"; + + pub fn load(config_path: impl AsRef) -> Result { + let config = std::fs::read_to_string(config_path.as_ref())?; + toml::from_str(&config).map_err(Into::into) + } +} + #[derive(Clone, Debug, Deserialize, Default, JsonSchema)] pub struct DecreaseIndentConfig { #[serde(default, deserialize_with = "deserialize_regex")] diff --git a/crates/language_extension/src/extension_lsp_adapter.rs b/crates/language_extension/src/extension_lsp_adapter.rs index 6f5300991fd8afbfaba710ed2bde068dd4d3a969..88401906fc28bb297fc2798346e110c9651b1387 100644 --- a/crates/language_extension/src/extension_lsp_adapter.rs +++ b/crates/language_extension/src/extension_lsp_adapter.rs @@ -350,6 +350,44 @@ impl LspAdapter for ExtensionLspAdapter { }) } + async fn initialization_options_schema( + self: Arc, + delegate: &Arc, + _cached_binary: OwnedMutexGuard>, + _cx: &mut AsyncApp, + ) -> Option { + let delegate = Arc::new(WorktreeDelegateAdapter(delegate.clone())) as _; + let json_schema: Option = self + .extension + .language_server_initialization_options_schema( + self.language_server_id.clone(), + delegate, + ) + .await + .ok() + .flatten(); + json_schema.and_then(|s| serde_json::from_str(&s).ok()) + } + + async fn settings_schema( + self: Arc, + delegate: &Arc, + _cached_binary: OwnedMutexGuard>, + _cx: &mut AsyncApp, + ) -> Option { + let delegate = Arc::new(WorktreeDelegateAdapter(delegate.clone())) as _; + let json_schema: Option = self + .extension + .language_server_workspace_configuration_schema( + self.language_server_id.clone(), + delegate, + ) + .await + .ok() + .flatten(); + json_schema.and_then(|s| serde_json::from_str(&s).ok()) + } + async fn additional_initialization_options( self: Arc, target_language_server_id: LanguageServerName, diff --git a/crates/language_model/src/language_model.rs b/crates/language_model/src/language_model.rs index c403774499c9dcb384e93cf19367dc28e336aa60..0452c494a2ae0ce43d59de5ef26a75231249c642 100644 --- a/crates/language_model/src/language_model.rs +++ b/crates/language_model/src/language_model.rs @@ -13,10 +13,11 @@ pub mod fake_provider; use anthropic::{AnthropicError, parse_prompt_too_long}; use anyhow::{Result, anyhow}; use client::Client; +use client::UserStore; use cloud_llm_client::CompletionRequestStatus; use futures::FutureExt; use futures::{StreamExt, future::BoxFuture, stream::BoxStream}; -use gpui::{AnyView, App, AsyncApp, SharedString, Task, Window}; +use gpui::{AnyView, App, AsyncApp, Entity, SharedString, Task, Window}; use http_client::{StatusCode, http}; use icons::IconName; use open_router::OpenRouterError; @@ -61,9 +62,9 @@ pub const ZED_CLOUD_PROVIDER_ID: LanguageModelProviderId = LanguageModelProvider pub const ZED_CLOUD_PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("Zed"); -pub fn init(client: Arc, cx: &mut App) { +pub fn init(user_store: Entity, client: Arc, cx: &mut App) { init_settings(cx); - RefreshLlmTokenListener::register(client, cx); + RefreshLlmTokenListener::register(client, user_store, cx); } pub fn init_settings(cx: &mut App) { diff --git a/crates/language_model/src/model/cloud_model.rs b/crates/language_model/src/model/cloud_model.rs index b2af80a3c295cab1cf40a330eb8d84f94a137eb7..e64cc43edd8eef6cfaf0c6c966365c81d37b611c 100644 --- a/crates/language_model/src/model/cloud_model.rs +++ b/crates/language_model/src/model/cloud_model.rs @@ -3,11 +3,14 @@ use std::sync::Arc; use anyhow::{Context as _, Result}; use client::Client; +use client::UserStore; use cloud_api_client::ClientApiError; use cloud_api_types::OrganizationId; use cloud_api_types::websocket_protocol::MessageToClient; use cloud_llm_client::{EXPIRED_LLM_TOKEN_HEADER_NAME, OUTDATED_LLM_TOKEN_HEADER_NAME}; -use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _}; +use gpui::{ + App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _, Subscription, +}; use smol::lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard}; use thiserror::Error; @@ -101,13 +104,15 @@ impl Global for GlobalRefreshLlmTokenListener {} pub struct RefreshLlmTokenEvent; -pub struct RefreshLlmTokenListener; +pub struct RefreshLlmTokenListener { + _subscription: Subscription, +} impl EventEmitter for RefreshLlmTokenListener {} impl RefreshLlmTokenListener { - pub fn register(client: Arc, cx: &mut App) { - let listener = cx.new(|cx| RefreshLlmTokenListener::new(client, cx)); + pub fn register(client: Arc, user_store: Entity, cx: &mut App) { + let listener = cx.new(|cx| RefreshLlmTokenListener::new(client, user_store, cx)); cx.set_global(GlobalRefreshLlmTokenListener(listener)); } @@ -115,7 +120,7 @@ impl RefreshLlmTokenListener { GlobalRefreshLlmTokenListener::global(cx).0.clone() } - fn new(client: Arc, cx: &mut Context) -> Self { + fn new(client: Arc, user_store: Entity, cx: &mut Context) -> Self { client.add_message_to_client_handler({ let this = cx.entity(); move |message, cx| { @@ -123,7 +128,15 @@ impl RefreshLlmTokenListener { } }); - Self + let subscription = cx.subscribe(&user_store, |_this, _user_store, event, cx| { + if matches!(event, client::user::Event::OrganizationChanged) { + cx.emit(RefreshLlmTokenEvent); + } + }); + + Self { + _subscription: subscription, + } } fn handle_refresh_llm_token(this: Entity, message: &MessageToClient, cx: &mut App) { diff --git a/crates/language_models/Cargo.toml b/crates/language_models/Cargo.toml index ece0d68152a20cbf77d0c082746959684816f115..b37f783eb9213a3d1d4bb4cc1bb0011c24879b05 100644 --- a/crates/language_models/Cargo.toml +++ b/crates/language_models/Cargo.toml @@ -68,7 +68,7 @@ vercel = { workspace = true, features = ["schemars"] } x_ai = { workspace = true, features = ["schemars"] } [dev-dependencies] -editor = { workspace = true, features = ["test-support"] } + language_model = { workspace = true, features = ["test-support"] } pretty_assertions.workspace = true -project = { workspace = true, features = ["test-support"] } + diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index b84b19b038905ba9f3d9a0637c770acc95687976..4e705a8d62a5446b17bcc95a7dc75152b0c3269c 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -43,7 +43,6 @@ use std::task::Poll; use std::time::Duration; use thiserror::Error; use ui::{TintColor, prelude::*}; -use util::{ResultExt as _, maybe}; use crate::provider::anthropic::{ AnthropicEventMapper, count_anthropic_tokens_with_tiktoken, into_anthropic, @@ -97,7 +96,7 @@ pub struct State { default_model: Option>, default_fast_model: Option>, recommended_models: Vec>, - _fetch_models_task: Task<()>, + _user_store_subscription: Subscription, _settings_subscription: Subscription, _llm_token_subscription: Subscription, } @@ -110,44 +109,41 @@ impl State { cx: &mut Context, ) -> Self { let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx); - let mut current_user = user_store.read(cx).watch_current_user(); Self { client: client.clone(), llm_api_token: LlmApiToken::default(), - user_store, + user_store: user_store.clone(), status, models: Vec::new(), default_model: None, default_fast_model: None, recommended_models: Vec::new(), - _fetch_models_task: cx.spawn(async move |this, cx| { - maybe!(async move { - let (client, llm_api_token, organization_id) = - this.read_with(cx, |this, cx| { - ( - client.clone(), - this.llm_api_token.clone(), - this.user_store - .read(cx) - .current_organization() - .map(|o| o.id.clone()), - ) - })?; + _user_store_subscription: cx.subscribe( + &user_store, + move |this, _user_store, event, cx| match event { + client::user::Event::PrivateUserInfoUpdated => { + let status = *client.status().borrow(); + if status.is_signed_out() { + return; + } - while current_user.borrow().is_none() { - current_user.next().await; + let client = this.client.clone(); + let llm_api_token = this.llm_api_token.clone(); + let organization_id = this + .user_store + .read(cx) + .current_organization() + .map(|organization| organization.id.clone()); + cx.spawn(async move |this, cx| { + let response = + Self::fetch_models(client, llm_api_token, organization_id).await?; + this.update(cx, |this, cx| this.update_models(response, cx)) + }) + .detach_and_log_err(cx); } - - let response = - Self::fetch_models(client.clone(), llm_api_token.clone(), organization_id) - .await?; - this.update(cx, |this, cx| this.update_models(response, cx))?; - anyhow::Ok(()) - }) - .await - .context("failed to fetch Zed models") - .log_err(); - }), + _ => {} + }, + ), _settings_subscription: cx.observe_global::(|_, cx| { cx.notify(); }), @@ -870,7 +866,10 @@ impl LanguageModel for CloudLanguageModel { ); if enable_thinking && let Some(effort) = effort { - request.reasoning = Some(open_ai::responses::ReasoningConfig { effort }); + request.reasoning = Some(open_ai::responses::ReasoningConfig { + effort, + summary: Some(open_ai::responses::ReasoningSummaryMode::Auto), + }); } let future = self.request_limiter.stream(async move { diff --git a/crates/language_models/src/provider/copilot_chat.rs b/crates/language_models/src/provider/copilot_chat.rs index 7d714cd93a2a93dbb9fd02ec4d2b95149bb43330..47d1b316a581c8013843940ecb3e55ed29bc4500 100644 --- a/crates/language_models/src/provider/copilot_chat.rs +++ b/crates/language_models/src/provider/copilot_chat.rs @@ -2,15 +2,17 @@ use std::pin::Pin; use std::str::FromStr as _; use std::sync::Arc; +use anthropic::AnthropicModelMode; use anyhow::{Result, anyhow}; use cloud_llm_client::CompletionIntent; use collections::HashMap; use copilot::{GlobalCopilotAuth, Status}; use copilot_chat::responses as copilot_responses; use copilot_chat::{ - ChatMessage, ChatMessageContent, ChatMessagePart, CopilotChat, CopilotChatConfiguration, - Function, FunctionContent, ImageUrl, Model as CopilotChatModel, ModelVendor, - Request as CopilotChatRequest, ResponseEvent, Tool, ToolCall, ToolCallContent, ToolChoice, + ChatLocation, ChatMessage, ChatMessageContent, ChatMessagePart, CopilotChat, + CopilotChatConfiguration, Function, FunctionContent, ImageUrl, Model as CopilotChatModel, + ModelVendor, Request as CopilotChatRequest, ResponseEvent, Tool, ToolCall, ToolCallContent, + ToolChoice, }; use futures::future::BoxFuture; use futures::stream::BoxStream; @@ -20,8 +22,8 @@ use http_client::StatusCode; use language::language_settings::all_language_settings; use language_model::{ AuthenticateError, IconOrSvg, LanguageModel, LanguageModelCompletionError, - LanguageModelCompletionEvent, LanguageModelCostInfo, LanguageModelId, LanguageModelName, - LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, + LanguageModelCompletionEvent, LanguageModelCostInfo, LanguageModelEffortLevel, LanguageModelId, + LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, LanguageModelRequestMessage, LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse, MessageContent, RateLimiter, Role, StopReason, TokenUsage, @@ -30,6 +32,7 @@ use settings::SettingsStore; use ui::prelude::*; use util::debug_panic; +use crate::provider::anthropic::{AnthropicEventMapper, into_anthropic}; use crate::provider::util::parse_tool_arguments; const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("copilot_chat"); @@ -254,6 +257,33 @@ impl LanguageModel for CopilotChatLanguageModel { self.model.supports_vision() } + fn supports_thinking(&self) -> bool { + self.model.can_think() + } + + fn supported_effort_levels(&self) -> Vec { + let levels = self.model.reasoning_effort_levels(); + if levels.is_empty() { + return vec![]; + } + levels + .iter() + .map(|level| { + let name: SharedString = match level.as_str() { + "low" => "Low".into(), + "medium" => "Medium".into(), + "high" => "High".into(), + _ => SharedString::from(level.clone()), + }; + LanguageModelEffortLevel { + name, + value: SharedString::from(level.clone()), + is_default: level == "high", + } + }) + .collect() + } + fn tool_input_format(&self) -> LanguageModelToolSchemaFormat { match self.model.vendor() { ModelVendor::OpenAI | ModelVendor::Anthropic => { @@ -333,12 +363,94 @@ impl LanguageModel for CopilotChatLanguageModel { | CompletionIntent::EditFile => false, }); + if self.model.supports_messages() { + let location = intent_to_chat_location(request.intent); + let model = self.model.clone(); + let request_limiter = self.request_limiter.clone(); + let future = cx.spawn(async move |cx| { + let effort = request + .thinking_effort + .as_ref() + .and_then(|e| anthropic::Effort::from_str(e).ok()); + + let mut anthropic_request = into_anthropic( + request, + model.id().to_string(), + 0.0, + model.max_output_tokens() as u64, + if model.supports_adaptive_thinking() { + AnthropicModelMode::Thinking { + budget_tokens: None, + } + } else if model.can_think() { + AnthropicModelMode::Thinking { + budget_tokens: compute_thinking_budget( + model.min_thinking_budget(), + model.max_thinking_budget(), + model.max_output_tokens() as u32, + ), + } + } else { + AnthropicModelMode::Default + }, + ); + + anthropic_request.temperature = None; + + // The Copilot proxy doesn't support eager_input_streaming on tools. + for tool in &mut anthropic_request.tools { + tool.eager_input_streaming = false; + } + + if model.supports_adaptive_thinking() { + if anthropic_request.thinking.is_some() { + anthropic_request.thinking = Some(anthropic::Thinking::Adaptive); + anthropic_request.output_config = Some(anthropic::OutputConfig { effort }); + } + } + + let anthropic_beta = if !model.supports_adaptive_thinking() && model.can_think() { + Some("interleaved-thinking-2025-05-14".to_string()) + } else { + None + }; + + let body = serde_json::to_string(&anthropic::StreamingRequest { + base: anthropic_request, + stream: true, + }) + .map_err(|e| anyhow::anyhow!(e))?; + + let stream = CopilotChat::stream_messages( + body, + location, + is_user_initiated, + anthropic_beta, + cx.clone(), + ); + + request_limiter + .stream(async move { + let events = stream.await?; + let mapper = AnthropicEventMapper::new(); + Ok(mapper.map_stream(events).boxed()) + }) + .await + }); + return async move { Ok(future.await?.boxed()) }.boxed(); + } + if self.model.supports_response() { + let location = intent_to_chat_location(request.intent); let responses_request = into_copilot_responses(&self.model, request); let request_limiter = self.request_limiter.clone(); let future = cx.spawn(async move |cx| { - let request = - CopilotChat::stream_response(responses_request, is_user_initiated, cx.clone()); + let request = CopilotChat::stream_response( + responses_request, + location, + is_user_initiated, + cx.clone(), + ); request_limiter .stream(async move { let stream = request.await?; @@ -350,6 +462,7 @@ impl LanguageModel for CopilotChatLanguageModel { return async move { Ok(future.await?.boxed()) }.boxed(); } + let location = intent_to_chat_location(request.intent); let copilot_request = match into_copilot_chat(&self.model, request) { Ok(request) => request, Err(err) => return futures::future::ready(Err(err.into())).boxed(), @@ -358,8 +471,12 @@ impl LanguageModel for CopilotChatLanguageModel { let request_limiter = self.request_limiter.clone(); let future = cx.spawn(async move |cx| { - let request = - CopilotChat::stream_completion(copilot_request, is_user_initiated, cx.clone()); + let request = CopilotChat::stream_completion( + copilot_request, + location, + is_user_initiated, + cx.clone(), + ); request_limiter .stream(async move { let response = request.await?; @@ -748,7 +865,7 @@ impl CopilotResponsesEventMapper { } copilot_responses::StreamEvent::GenericError { error } => vec![Err( - LanguageModelCompletionError::Other(anyhow!(format!("{error:?}"))), + LanguageModelCompletionError::Other(anyhow!(error.message)), )], copilot_responses::StreamEvent::Created { .. } @@ -761,6 +878,9 @@ fn into_copilot_chat( model: &CopilotChatModel, request: LanguageModelRequest, ) -> Result { + let temperature = request.temperature; + let tool_choice = request.tool_choice; + let mut request_messages: Vec = Vec::new(); for message in request.messages { if let Some(last_message) = request_messages.last_mut() { @@ -859,10 +979,9 @@ fn into_copilot_chat( let text_content = { let mut buffer = String::new(); for string in message.content.iter().filter_map(|content| match content { - MessageContent::Text(text) | MessageContent::Thinking { text, .. } => { - Some(text.as_str()) - } - MessageContent::ToolUse(_) + MessageContent::Text(text) => Some(text.as_str()), + MessageContent::Thinking { .. } + | MessageContent::ToolUse(_) | MessageContent::RedactedThinking(_) | MessageContent::ToolResult(_) | MessageContent::Image(_) => None, @@ -919,21 +1038,52 @@ fn into_copilot_chat( .collect::>(); Ok(CopilotChatRequest { - intent: true, n: 1, stream: model.uses_streaming(), - temperature: 0.1, + temperature: temperature.unwrap_or(0.1), model: model.id().to_string(), messages, tools, - tool_choice: request.tool_choice.map(|choice| match choice { + tool_choice: tool_choice.map(|choice| match choice { LanguageModelToolChoice::Auto => ToolChoice::Auto, LanguageModelToolChoice::Any => ToolChoice::Any, LanguageModelToolChoice::None => ToolChoice::None, }), + thinking_budget: None, }) } +fn compute_thinking_budget( + min_budget: Option, + max_budget: Option, + max_output_tokens: u32, +) -> Option { + let configured_budget: u32 = 16000; + let min_budget = min_budget.unwrap_or(1024); + let max_budget = max_budget.unwrap_or(max_output_tokens.saturating_sub(1)); + let normalized = configured_budget.max(min_budget); + Some( + normalized + .min(max_budget) + .min(max_output_tokens.saturating_sub(1)), + ) +} + +fn intent_to_chat_location(intent: Option) -> ChatLocation { + match intent { + Some(CompletionIntent::UserPrompt) => ChatLocation::Agent, + Some(CompletionIntent::ToolResults) => ChatLocation::Agent, + Some(CompletionIntent::ThreadSummarization) => ChatLocation::Panel, + Some(CompletionIntent::ThreadContextSummarization) => ChatLocation::Panel, + Some(CompletionIntent::CreateFile) => ChatLocation::Agent, + Some(CompletionIntent::EditFile) => ChatLocation::Agent, + Some(CompletionIntent::InlineAssist) => ChatLocation::Editor, + Some(CompletionIntent::TerminalInlineAssist) => ChatLocation::Terminal, + Some(CompletionIntent::GenerateGitCommitMessage) => ChatLocation::Other, + None => ChatLocation::Panel, + } +} + fn into_copilot_responses( model: &CopilotChatModel, request: LanguageModelRequest, @@ -949,7 +1099,7 @@ fn into_copilot_responses( tool_choice, stop: _, temperature, - thinking_allowed: _, + thinking_allowed, thinking_effort: _, speed: _, } = request; @@ -1128,10 +1278,18 @@ fn into_copilot_responses( temperature, tools: converted_tools, tool_choice: mapped_tool_choice, - reasoning: None, // We would need to add support for setting from user settings. + reasoning: if thinking_allowed { + Some(copilot_responses::ReasoningConfig { + effort: copilot_responses::ReasoningEffort::Medium, + summary: Some(copilot_responses::ReasoningSummary::Detailed), + }) + } else { + None + }, include: Some(vec![ copilot_responses::ResponseIncludable::ReasoningEncryptedContent, ]), + store: false, } } diff --git a/crates/language_models/src/provider/lmstudio.rs b/crates/language_models/src/provider/lmstudio.rs index 9af8559c722d1fe726f7f871c9863cd85a3d2678..ee08f1689aeea9cfa18346108cd2d314b2259583 100644 --- a/crates/language_models/src/provider/lmstudio.rs +++ b/crates/language_models/src/provider/lmstudio.rs @@ -1,26 +1,30 @@ use anyhow::{Result, anyhow}; use collections::HashMap; +use fs::Fs; use futures::Stream; use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream}; -use gpui::{AnyView, App, AsyncApp, Context, Entity, Subscription, Task}; +use gpui::{AnyView, App, AsyncApp, Context, CursorStyle, Entity, Subscription, Task}; use http_client::HttpClient; use language_model::{ - AuthenticateError, LanguageModelCompletionError, LanguageModelCompletionEvent, - LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolUse, MessageContent, - StopReason, TokenUsage, + ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError, + LanguageModelCompletionEvent, LanguageModelToolChoice, LanguageModelToolResultContent, + LanguageModelToolUse, MessageContent, StopReason, TokenUsage, env_var, }; use language_model::{ - IconOrSvg, LanguageModel, LanguageModelId, LanguageModelName, LanguageModelProvider, - LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, - LanguageModelRequest, RateLimiter, Role, + LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, + LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role, }; -use lmstudio::{ModelType, get_models}; +use lmstudio::{LMSTUDIO_API_URL, ModelType, get_models}; + pub use settings::LmStudioAvailableModel as AvailableModel; -use settings::{Settings, SettingsStore}; +use settings::{Settings, SettingsStore, update_settings_file}; use std::pin::Pin; +use std::sync::LazyLock; use std::{collections::BTreeMap, sync::Arc}; -use ui::{ButtonLike, Indicator, List, ListBulletItem, prelude::*}; -use util::ResultExt; +use ui::{ + ButtonLike, ConfiguredApiCard, ElevationIndex, List, ListBulletItem, Tooltip, prelude::*, +}; +use ui_input::InputField; use crate::AllLanguageModelSettings; use crate::provider::util::parse_tool_arguments; @@ -32,6 +36,9 @@ const LMSTUDIO_SITE: &str = "https://lmstudio.ai/"; const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("lmstudio"); const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("LM Studio"); +const API_KEY_ENV_VAR_NAME: &str = "LMSTUDIO_API_KEY"; +static API_KEY_ENV_VAR: LazyLock = env_var!(API_KEY_ENV_VAR_NAME); + #[derive(Default, Debug, Clone, PartialEq)] pub struct LmStudioSettings { pub api_url: String, @@ -44,6 +51,7 @@ pub struct LmStudioLanguageModelProvider { } pub struct State { + api_key_state: ApiKeyState, http_client: Arc, available_models: Vec, fetch_model_task: Option>>, @@ -55,14 +63,25 @@ impl State { !self.available_models.is_empty() } + fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let api_url = LmStudioLanguageModelProvider::api_url(cx).into(); + let task = self + .api_key_state + .store(api_url, api_key, |this| &mut this.api_key_state, cx); + self.restart_fetch_models_task(cx); + task + } + fn fetch_models(&mut self, cx: &mut Context) -> Task> { let settings = &AllLanguageModelSettings::get_global(cx).lmstudio; let http_client = self.http_client.clone(); let api_url = settings.api_url.clone(); + let api_key = self.api_key_state.key(&api_url); // As a proxy for the server being "authenticated", we'll check if its up by fetching the models cx.spawn(async move |this, cx| { - let models = get_models(http_client.as_ref(), &api_url, None).await?; + let models = + get_models(http_client.as_ref(), &api_url, api_key.as_deref(), None).await?; let mut models: Vec = models .into_iter() @@ -95,6 +114,11 @@ impl State { } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let api_url = LmStudioLanguageModelProvider::api_url(cx).into(); + let _task = self + .api_key_state + .load_if_needed(api_url, |this| &mut this.api_key_state, cx); + if self.is_authenticated() { return Task::ready(Ok(())); } @@ -145,6 +169,10 @@ impl LmStudioLanguageModelProvider { }); State { + api_key_state: ApiKeyState::new( + Self::api_url(cx).into(), + (*API_KEY_ENV_VAR).clone(), + ), http_client, available_models: Default::default(), fetch_model_task: None, @@ -156,6 +184,17 @@ impl LmStudioLanguageModelProvider { .update(cx, |state, cx| state.restart_fetch_models_task(cx)); this } + + fn api_url(cx: &App) -> String { + AllLanguageModelSettings::get_global(cx) + .lmstudio + .api_url + .clone() + } + + fn has_custom_url(cx: &App) -> bool { + Self::api_url(cx) != LMSTUDIO_API_URL + } } impl LanguageModelProviderState for LmStudioLanguageModelProvider { @@ -225,6 +264,7 @@ impl LanguageModelProvider for LmStudioLanguageModelProvider { model, http_client: self.http_client.clone(), request_limiter: RateLimiter::new(4), + state: self.state.clone(), }) as Arc }) .collect() @@ -244,12 +284,13 @@ impl LanguageModelProvider for LmStudioLanguageModelProvider { _window: &mut Window, cx: &mut App, ) -> AnyView { - let state = self.state.clone(); - cx.new(|cx| ConfigurationView::new(state, cx)).into() + cx.new(|cx| ConfigurationView::new(self.state.clone(), _window, cx)) + .into() } fn reset_credentials(&self, cx: &mut App) -> Task> { - self.state.update(cx, |state, cx| state.fetch_models(cx)) + self.state + .update(cx, |state, cx| state.set_api_key(None, cx)) } } @@ -258,6 +299,7 @@ pub struct LmStudioLanguageModel { model: lmstudio::Model, http_client: Arc, request_limiter: RateLimiter, + state: Entity, } impl LmStudioLanguageModel { @@ -376,15 +418,20 @@ impl LmStudioLanguageModel { Result>>, > { let http_client = self.http_client.clone(); - let api_url = cx.update(|cx| { - let settings = &AllLanguageModelSettings::get_global(cx).lmstudio; - settings.api_url.clone() + let (api_key, api_url) = self.state.read_with(cx, |state, cx| { + let api_url = LmStudioLanguageModelProvider::api_url(cx); + (state.api_key_state.key(&api_url), api_url) }); let future = self.request_limiter.stream(async move { - let request = lmstudio::stream_chat_completion(http_client.as_ref(), &api_url, request); - let response = request.await?; - Ok(response) + let stream = lmstudio::stream_chat_completion( + http_client.as_ref(), + &api_url, + api_key.as_deref(), + request, + ) + .await?; + Ok(stream) }); async move { Ok(future.await?.boxed()) }.boxed() @@ -634,53 +681,212 @@ fn add_message_content_part( struct ConfigurationView { state: Entity, - loading_models_task: Option>, + api_key_editor: Entity, + api_url_editor: Entity, } impl ConfigurationView { - pub fn new(state: Entity, cx: &mut Context) -> Self { - let loading_models_task = Some(cx.spawn({ - let state = state.clone(); - async move |this, cx| { - state - .update(cx, |state, cx| state.authenticate(cx)) - .await - .log_err(); - - this.update(cx, |this, cx| { - this.loading_models_task = None; - cx.notify(); - }) - .log_err(); - } - })); + pub fn new(state: Entity, _window: &mut Window, cx: &mut Context) -> Self { + let api_key_editor = cx.new(|cx| InputField::new(_window, cx, "sk-...").label("API key")); + + let api_url_editor = cx.new(|cx| { + let input = InputField::new(_window, cx, LMSTUDIO_API_URL).label("API URL"); + input.set_text(&LmStudioLanguageModelProvider::api_url(cx), _window, cx); + input + }); + + cx.observe(&state, |_, _, cx| { + cx.notify(); + }) + .detach(); Self { state, - loading_models_task, + api_key_editor, + api_url_editor, } } - fn retry_connection(&self, cx: &mut App) { + fn retry_connection(&mut self, _window: &mut Window, cx: &mut Context) { + let has_api_url = LmStudioLanguageModelProvider::has_custom_url(cx); + let has_api_key = self + .state + .read_with(cx, |state, _| state.api_key_state.has_key()); + if !has_api_url { + self.save_api_url(cx); + } + if !has_api_key { + self.save_api_key(&Default::default(), _window, cx); + } + + self.state.update(cx, |state, cx| { + state.restart_fetch_models_task(cx); + }); + } + + fn save_api_key(&mut self, _: &menu::Confirm, _window: &mut Window, cx: &mut Context) { + let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string(); + if api_key.is_empty() { + return; + } + + self.api_key_editor + .update(cx, |input, cx| input.set_text("", _window, cx)); + + let state = self.state.clone(); + cx.spawn_in(_window, async move |_, cx| { + state + .update(cx, |state, cx| state.set_api_key(Some(api_key), cx)) + .await + }) + .detach_and_log_err(cx); + } + + fn reset_api_key(&mut self, _window: &mut Window, cx: &mut Context) { + self.api_key_editor + .update(cx, |input, cx| input.set_text("", _window, cx)); + + let state = self.state.clone(); + cx.spawn_in(_window, async move |_, cx| { + state + .update(cx, |state, cx| state.set_api_key(None, cx)) + .await + }) + .detach_and_log_err(cx); + + cx.notify(); + } + + fn save_api_url(&self, cx: &mut Context) { + let api_url = self.api_url_editor.read(cx).text(cx).trim().to_string(); + let current_url = LmStudioLanguageModelProvider::api_url(cx); + if !api_url.is_empty() && &api_url != ¤t_url { + self.state + .update(cx, |state, cx| state.set_api_key(None, cx)) + .detach_and_log_err(cx); + + let fs = ::global(cx); + update_settings_file(fs, cx, move |settings, _| { + settings + .language_models + .get_or_insert_default() + .lmstudio + .get_or_insert_default() + .api_url = Some(api_url); + }); + } + } + + fn reset_api_url(&mut self, _window: &mut Window, cx: &mut Context) { + self.api_url_editor + .update(cx, |input, cx| input.set_text("", _window, cx)); + + // Clear API key when URL changes since keys are URL-specific self.state - .update(cx, |state, cx| state.fetch_models(cx)) + .update(cx, |state, cx| state.set_api_key(None, cx)) .detach_and_log_err(cx); - } -} -impl Render for ConfigurationView { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - let is_authenticated = self.state.read(cx).is_authenticated(); + let fs = ::global(cx); + update_settings_file(fs, cx, |settings, _cx| { + if let Some(settings) = settings + .language_models + .as_mut() + .and_then(|models| models.lmstudio.as_mut()) + { + settings.api_url = Some(LMSTUDIO_API_URL.into()); + } + }); + cx.notify(); + } - let lmstudio_intro = "Run local LLMs like Llama, Phi, and Qwen."; + fn render_api_url_editor(&self, cx: &Context) -> impl IntoElement { + let api_url = LmStudioLanguageModelProvider::api_url(cx); + let custom_api_url_set = api_url != LMSTUDIO_API_URL; - if self.loading_models_task.is_some() { - div().child(Label::new("Loading models...")).into_any() + if custom_api_url_set { + h_flex() + .p_3() + .justify_between() + .rounded_md() + .border_1() + .border_color(cx.theme().colors().border) + .bg(cx.theme().colors().elevated_surface_background) + .child( + h_flex() + .gap_2() + .child(Icon::new(IconName::Check).color(Color::Success)) + .child(v_flex().gap_1().child(Label::new(api_url))), + ) + .child( + Button::new("reset-api-url", "Reset API URL") + .label_size(LabelSize::Small) + .icon(IconName::Undo) + .icon_size(IconSize::Small) + .icon_position(IconPosition::Start) + .layer(ElevationIndex::ModalSurface) + .on_click( + cx.listener(|this, _, _window, cx| this.reset_api_url(_window, cx)), + ), + ) + .into_any_element() } else { v_flex() + .on_action(cx.listener(|this, _: &menu::Confirm, _window, cx| { + this.save_api_url(cx); + cx.notify(); + })) .gap_2() + .child(self.api_url_editor.clone()) + .into_any_element() + } + } + + fn render_api_key_editor(&self, cx: &Context) -> impl IntoElement { + let state = self.state.read(cx); + let env_var_set = state.api_key_state.is_from_env_var(); + let configured_card_label = if env_var_set { + format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable.") + } else { + "API key configured".to_string() + }; + + if !state.api_key_state.has_key() { + v_flex() + .on_action(cx.listener(Self::save_api_key)) + .child(self.api_key_editor.clone()) .child( - v_flex().gap_1().child(Label::new(lmstudio_intro)).child( + Label::new(format!( + "You can also set the {API_KEY_ENV_VAR_NAME} environment variable and restart Zed." + )) + .size(LabelSize::Small) + .color(Color::Muted), + ) + .into_any_element() + } else { + ConfiguredApiCard::new(configured_card_label) + .disabled(env_var_set) + .on_click(cx.listener(|this, _, _window, cx| this.reset_api_key(_window, cx))) + .when(env_var_set, |this| { + this.tooltip_label(format!( + "To reset your API key, unset the {API_KEY_ENV_VAR_NAME} environment variable." + )) + }) + .into_any_element() + } + } +} + +impl Render for ConfigurationView { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + let is_authenticated = self.state.read(cx).is_authenticated(); + + v_flex() + .gap_2() + .child( + v_flex() + .gap_1() + .child(Label::new("Run local LLMs like Llama, Phi, and Qwen.")) + .child( List::new() .child(ListBulletItem::new( "LM Studio needs to be running with at least one model downloaded.", @@ -690,86 +896,100 @@ impl Render for ConfigurationView { .child(Label::new("To get your first model, try running")) .child(Label::new("lms get qwen2.5-coder-7b").inline_code(cx)), ), - ), - ) - .child( - h_flex() - .w_full() - .justify_between() - .gap_2() - .child( - h_flex() - .w_full() - .gap_2() - .map(|this| { - if is_authenticated { - this.child( - Button::new("lmstudio-site", "LM Studio") - .style(ButtonStyle::Subtle) - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .on_click(move |_, _window, cx| { - cx.open_url(LMSTUDIO_SITE) - }) - .into_any_element(), - ) - } else { - this.child( - Button::new( - "download_lmstudio_button", - "Download LM Studio", - ) + ) + .child(Label::new( + "Alternatively, you can connect to an LM Studio server by specifying its \ + URL and API key (may not be required):", + )), + ) + .child(self.render_api_url_editor(cx)) + .child(self.render_api_key_editor(cx)) + .child( + h_flex() + .w_full() + .justify_between() + .gap_2() + .child( + h_flex() + .w_full() + .gap_2() + .map(|this| { + if is_authenticated { + this.child( + Button::new("lmstudio-site", "LM Studio") .style(ButtonStyle::Subtle) .icon(IconName::ArrowUpRight) .icon_size(IconSize::Small) .icon_color(Color::Muted) .on_click(move |_, _window, cx| { - cx.open_url(LMSTUDIO_DOWNLOAD_URL) + cx.open_url(LMSTUDIO_SITE) }) .into_any_element(), + ) + } else { + this.child( + Button::new( + "download_lmstudio_button", + "Download LM Studio", ) - } - }) - .child( - Button::new("view-models", "Model Catalog") .style(ButtonStyle::Subtle) .icon(IconName::ArrowUpRight) .icon_size(IconSize::Small) .icon_color(Color::Muted) .on_click(move |_, _window, cx| { - cx.open_url(LMSTUDIO_CATALOG_URL) - }), - ), - ) - .map(|this| { - if is_authenticated { - this.child( - ButtonLike::new("connected") - .disabled(true) - .cursor_style(gpui::CursorStyle::Arrow) - .child( - h_flex() - .gap_2() - .child(Indicator::dot().color(Color::Success)) - .child(Label::new("Connected")) - .into_any_element(), - ), - ) - } else { - this.child( - Button::new("retry_lmstudio_models", "Connect") - .icon_position(IconPosition::Start) - .icon_size(IconSize::XSmall) - .icon(IconName::PlayFilled) - .on_click(cx.listener(move |this, _, _window, cx| { - this.retry_connection(cx) - })), - ) - } - }), - ) - .into_any() - } + cx.open_url(LMSTUDIO_DOWNLOAD_URL) + }) + .into_any_element(), + ) + } + }) + .child( + Button::new("view-models", "Model Catalog") + .style(ButtonStyle::Subtle) + .icon(IconName::ArrowUpRight) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .on_click(move |_, _window, cx| { + cx.open_url(LMSTUDIO_CATALOG_URL) + }), + ), + ) + .map(|this| { + if is_authenticated { + this.child( + ButtonLike::new("connected") + .disabled(true) + .cursor_style(CursorStyle::Arrow) + .child( + h_flex() + .gap_2() + .child(Icon::new(IconName::Check).color(Color::Success)) + .child(Label::new("Connected")) + .into_any_element(), + ) + .child( + IconButton::new("refresh-models", IconName::RotateCcw) + .tooltip(Tooltip::text("Refresh Models")) + .on_click(cx.listener(|this, _, _window, cx| { + this.state.update(cx, |state, _| { + state.available_models.clear(); + }); + this.retry_connection(_window, cx); + })), + ), + ) + } else { + this.child( + Button::new("retry_lmstudio_models", "Connect") + .icon_position(IconPosition::Start) + .icon_size(IconSize::XSmall) + .icon(IconName::PlayFilled) + .on_click(cx.listener(move |this, _, _window, cx| { + this.retry_connection(_window, cx) + })), + ) + } + }), + ) } } diff --git a/crates/language_models/src/provider/mistral.rs b/crates/language_models/src/provider/mistral.rs index 6af66f4e9a9d257b385c84a6c0c6d989f04c013f..338931cf7ca902225e10a7d09c9e7528128f1491 100644 --- a/crates/language_models/src/provider/mistral.rs +++ b/crates/language_models/src/provider/mistral.rs @@ -512,6 +512,13 @@ pub fn into_mistral( model: model.id().to_string(), messages, stream, + stream_options: if stream { + Some(mistral::StreamOptions { + stream_tool_calls: Some(true), + }) + } else { + None + }, max_tokens: max_output_tokens, temperature: request.temperature, response_format: None, @@ -620,12 +627,16 @@ impl MistralEventMapper { for tool_call in tool_calls { let entry = self.tool_calls_by_index.entry(tool_call.index).or_default(); - if let Some(tool_id) = tool_call.id.clone() { + if let Some(tool_id) = tool_call.id.clone() + && !tool_id.is_empty() + { entry.id = tool_id; } if let Some(function) = tool_call.function.as_ref() { - if let Some(name) = function.name.clone() { + if let Some(name) = function.name.clone() + && !name.is_empty() + { entry.name = name; } diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index 57b3a6b20a9712e7c4d99b3ccfc48719e632da9d..ce79de7cb2df22847a2666d7b4847e2c696fb12e 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -310,6 +310,8 @@ impl LanguageModel for OpenAiLanguageModel { | Model::FivePointTwo | Model::FivePointTwoCodex | Model::FivePointThreeCodex + | Model::FivePointFour + | Model::FivePointFourPro | Model::O1 | Model::O3 => true, Model::ThreePointFiveTurbo @@ -600,7 +602,10 @@ pub fn into_open_ai_response( } else { None }, - reasoning: reasoning_effort.map(|effort| open_ai::responses::ReasoningConfig { effort }), + reasoning: reasoning_effort.map(|effort| open_ai::responses::ReasoningConfig { + effort, + summary: Some(open_ai::responses::ReasoningSummaryMode::Auto), + }), } } @@ -961,10 +966,20 @@ impl OpenAiResponseEventMapper { self.function_calls_by_item.insert(item_id, entry); } } - ResponseOutputItem::Unknown => {} + ResponseOutputItem::Reasoning(_) | ResponseOutputItem::Unknown => {} } events } + ResponsesStreamEvent::ReasoningSummaryTextDelta { delta, .. } => { + if delta.is_empty() { + Vec::new() + } else { + vec![Ok(LanguageModelCompletionEvent::Thinking { + text: delta, + signature: None, + })] + } + } ResponsesStreamEvent::OutputTextDelta { delta, .. } => { if delta.is_empty() { Vec::new() @@ -1069,14 +1084,26 @@ impl OpenAiResponseEventMapper { } ResponsesStreamEvent::Error { error } | ResponsesStreamEvent::GenericError { error } => { - vec![Err(LanguageModelCompletionError::Other(anyhow!(format!( - "{error:?}" - ))))] + vec![Err(LanguageModelCompletionError::Other(anyhow!( + error.message + )))] } - ResponsesStreamEvent::OutputTextDone { .. } => Vec::new(), - ResponsesStreamEvent::OutputItemDone { .. } + ResponsesStreamEvent::ReasoningSummaryPartAdded { summary_index, .. } => { + if summary_index > 0 { + vec![Ok(LanguageModelCompletionEvent::Thinking { + text: "\n\n".to_string(), + signature: None, + })] + } else { + Vec::new() + } + } + ResponsesStreamEvent::OutputTextDone { .. } + | ResponsesStreamEvent::OutputItemDone { .. } | ResponsesStreamEvent::ContentPartAdded { .. } | ResponsesStreamEvent::ContentPartDone { .. } + | ResponsesStreamEvent::ReasoningSummaryTextDone { .. } + | ResponsesStreamEvent::ReasoningSummaryPartDone { .. } | ResponsesStreamEvent::Created { .. } | ResponsesStreamEvent::InProgress { .. } | ResponsesStreamEvent::Unknown => Vec::new(), @@ -1217,13 +1244,13 @@ pub fn count_open_ai_tokens( | Model::FiveCodex | Model::FiveMini | Model::FiveNano => tiktoken_rs::num_tokens_from_messages(model.id(), &messages), - // GPT-5.1, 5.2, 5.2-codex, and 5.3-codex don't have dedicated tiktoken support; use gpt-5 tokenizer + // GPT-5.1, 5.2, 5.2-codex, 5.3-codex, 5.4, and 5.4-pro don't have dedicated tiktoken support; use gpt-5 tokenizer Model::FivePointOne | Model::FivePointTwo | Model::FivePointTwoCodex - | Model::FivePointThreeCodex => { - tiktoken_rs::num_tokens_from_messages("gpt-5", &messages) - } + | Model::FivePointThreeCodex + | Model::FivePointFour + | Model::FivePointFourPro => tiktoken_rs::num_tokens_from_messages("gpt-5", &messages), } .map(|tokens| tokens as u64) }) @@ -1414,8 +1441,9 @@ mod tests { use gpui::TestAppContext; use language_model::{LanguageModelRequestMessage, LanguageModelRequestTool}; use open_ai::responses::{ - ResponseFunctionToolCall, ResponseOutputItem, ResponseOutputMessage, ResponseStatusDetails, - ResponseSummary, ResponseUsage, StreamEvent as ResponsesStreamEvent, + ReasoningSummaryPart, ResponseFunctionToolCall, ResponseOutputItem, ResponseOutputMessage, + ResponseReasoningItem, ResponseStatusDetails, ResponseSummary, ResponseUsage, + StreamEvent as ResponsesStreamEvent, }; use pretty_assertions::assert_eq; use serde_json::json; @@ -1673,7 +1701,7 @@ mod tests { } ], "prompt_cache_key": "thread-123", - "reasoning": { "effort": "low" } + "reasoning": { "effort": "low", "summary": "auto" } }); assert_eq!(serialized, expected); @@ -2112,4 +2140,166 @@ mod tests { }) )); } + + #[test] + fn responses_stream_maps_reasoning_summary_deltas() { + let events = vec![ + ResponsesStreamEvent::OutputItemAdded { + output_index: 0, + sequence_number: None, + item: ResponseOutputItem::Reasoning(ResponseReasoningItem { + id: Some("rs_123".into()), + summary: vec![], + }), + }, + ResponsesStreamEvent::ReasoningSummaryPartAdded { + item_id: "rs_123".into(), + output_index: 0, + summary_index: 0, + }, + ResponsesStreamEvent::ReasoningSummaryTextDelta { + item_id: "rs_123".into(), + output_index: 0, + delta: "Thinking about".into(), + }, + ResponsesStreamEvent::ReasoningSummaryTextDelta { + item_id: "rs_123".into(), + output_index: 0, + delta: " the answer".into(), + }, + ResponsesStreamEvent::ReasoningSummaryTextDone { + item_id: "rs_123".into(), + output_index: 0, + text: "Thinking about the answer".into(), + }, + ResponsesStreamEvent::ReasoningSummaryPartDone { + item_id: "rs_123".into(), + output_index: 0, + summary_index: 0, + }, + ResponsesStreamEvent::ReasoningSummaryPartAdded { + item_id: "rs_123".into(), + output_index: 0, + summary_index: 1, + }, + ResponsesStreamEvent::ReasoningSummaryTextDelta { + item_id: "rs_123".into(), + output_index: 0, + delta: "Second part".into(), + }, + ResponsesStreamEvent::ReasoningSummaryTextDone { + item_id: "rs_123".into(), + output_index: 0, + text: "Second part".into(), + }, + ResponsesStreamEvent::ReasoningSummaryPartDone { + item_id: "rs_123".into(), + output_index: 0, + summary_index: 1, + }, + ResponsesStreamEvent::OutputItemDone { + output_index: 0, + sequence_number: None, + item: ResponseOutputItem::Reasoning(ResponseReasoningItem { + id: Some("rs_123".into()), + summary: vec![ + ReasoningSummaryPart::SummaryText { + text: "Thinking about the answer".into(), + }, + ReasoningSummaryPart::SummaryText { + text: "Second part".into(), + }, + ], + }), + }, + ResponsesStreamEvent::OutputItemAdded { + output_index: 1, + sequence_number: None, + item: response_item_message("msg_456"), + }, + ResponsesStreamEvent::OutputTextDelta { + item_id: "msg_456".into(), + output_index: 1, + content_index: Some(0), + delta: "The answer is 42".into(), + }, + ResponsesStreamEvent::Completed { + response: ResponseSummary::default(), + }, + ]; + + let mapped = map_response_events(events); + + let thinking_events: Vec<_> = mapped + .iter() + .filter(|e| matches!(e, LanguageModelCompletionEvent::Thinking { .. })) + .collect(); + assert_eq!( + thinking_events.len(), + 4, + "expected 4 thinking events (2 deltas + separator + second delta), got {:?}", + thinking_events, + ); + + assert!(matches!( + &thinking_events[0], + LanguageModelCompletionEvent::Thinking { text, .. } if text == "Thinking about" + )); + assert!(matches!( + &thinking_events[1], + LanguageModelCompletionEvent::Thinking { text, .. } if text == " the answer" + )); + assert!( + matches!( + &thinking_events[2], + LanguageModelCompletionEvent::Thinking { text, .. } if text == "\n\n" + ), + "expected separator between summary parts" + ); + assert!(matches!( + &thinking_events[3], + LanguageModelCompletionEvent::Thinking { text, .. } if text == "Second part" + )); + + assert!(mapped.iter().any(|e| matches!( + e, + LanguageModelCompletionEvent::Text(t) if t == "The answer is 42" + ))); + } + + #[test] + fn responses_stream_maps_reasoning_from_done_only() { + let events = vec![ + ResponsesStreamEvent::OutputItemAdded { + output_index: 0, + sequence_number: None, + item: ResponseOutputItem::Reasoning(ResponseReasoningItem { + id: Some("rs_789".into()), + summary: vec![], + }), + }, + ResponsesStreamEvent::OutputItemDone { + output_index: 0, + sequence_number: None, + item: ResponseOutputItem::Reasoning(ResponseReasoningItem { + id: Some("rs_789".into()), + summary: vec![ReasoningSummaryPart::SummaryText { + text: "Summary without deltas".into(), + }], + }), + }, + ResponsesStreamEvent::Completed { + response: ResponseSummary::default(), + }, + ]; + + let mapped = map_response_events(events); + + assert!( + !mapped + .iter() + .any(|e| matches!(e, LanguageModelCompletionEvent::Thinking { .. })), + "OutputItemDone reasoning should not produce Thinking events (no delta/done text events)" + ); + } } diff --git a/crates/languages/Cargo.toml b/crates/languages/Cargo.toml index 8529bdb82ace33d6f3c747ed707b9aac9d319627..b66f661b5e8782a7a072332141e4e2246ab1a2b9 100644 --- a/crates/languages/Cargo.toml +++ b/crates/languages/Cargo.toml @@ -98,7 +98,6 @@ util.workspace = true [dev-dependencies] pretty_assertions.workspace = true -text.workspace = true theme = { workspace = true, features = ["test-support"] } tree-sitter-bash.workspace = true tree-sitter-c.workspace = true @@ -109,4 +108,3 @@ tree-sitter-python.workspace = true tree-sitter-typescript.workspace = true tree-sitter.workspace = true unindent.workspace = true -workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 722f4bb795ea857a9d399ef5b291beb8503f1c92..95bfc798414f5d3629e1ea46f54d14a7ed58a8d4 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -1378,12 +1378,9 @@ impl ToolchainLister for PythonToolchainProvider { match toolchain.environment.kind { Some(PythonEnvironmentKind::Conda) => { - let Some(manager_info) = &toolchain.environment.manager else { + if toolchain.environment.manager.is_none() { return vec![]; }; - if smol::fs::metadata(&manager_info.executable).await.is_err() { - return vec![]; - } let manager = match conda_manager { settings::CondaManager::Conda => "conda", diff --git a/crates/livekit_client/Cargo.toml b/crates/livekit_client/Cargo.toml index 66511da9daa943628e71000a2009b2026eeace6c..df1024aa99e15e322c7dff5ee7933db2a9df80b4 100644 --- a/crates/livekit_client/Cargo.toml +++ b/crates/livekit_client/Cargo.toml @@ -61,7 +61,6 @@ objc.workspace = true collections = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } gpui_platform.workspace = true -sha2.workspace = true simplelog.workspace = true [build-dependencies] diff --git a/crates/livekit_client/src/lib.rs b/crates/livekit_client/src/lib.rs index be008d8db5108fb087415edb9d2de91bad19ab97..352776cf6bbe02381957a197eca9a64fff094892 100644 --- a/crates/livekit_client/src/lib.rs +++ b/crates/livekit_client/src/lib.rs @@ -1,8 +1,8 @@ use anyhow::Context as _; use collections::HashMap; +use cpal::DeviceId; mod remote_video_track_view; -use cpal::traits::HostTrait as _; pub use remote_video_track_view::{RemoteVideoTrackView, RemoteVideoTrackViewEvent}; use rodio::DeviceTrait as _; @@ -192,24 +192,18 @@ pub enum RoomEvent { pub(crate) fn default_device( input: bool, + device_id: Option<&DeviceId>, ) -> anyhow::Result<(cpal::Device, cpal::SupportedStreamConfig)> { - let device; - let config; - if input { - device = cpal::default_host() - .default_input_device() - .context("no audio input device available")?; - config = device + let device = audio::resolve_device(device_id, input)?; + let config = if input { + device .default_input_config() - .context("failed to get default input config")?; + .context("failed to get default input config")? } else { - device = cpal::default_host() - .default_output_device() - .context("no audio output device available")?; - config = device + device .default_output_config() - .context("failed to get default output config")?; - } + .context("failed to get default output config")? + }; Ok((device, config)) } diff --git a/crates/livekit_client/src/livekit_client.rs b/crates/livekit_client/src/livekit_client.rs index 1db9a12ef2b7f3b4f3de1cba6c61a30db12a5bd9..863cf0dc527300f1e85df6867d99e367b5c7fa15 100644 --- a/crates/livekit_client/src/livekit_client.rs +++ b/crates/livekit_client/src/livekit_client.rs @@ -150,7 +150,10 @@ impl Room { info!("Using experimental.rodio_audio audio pipeline for output"); playback::play_remote_audio_track(&track.0, speaker, cx) } else if speaker.sends_legacy_audio { - Ok(self.playback.play_remote_audio_track(&track.0)) + let output_audio_device = AudioSettings::get_global(cx).output_audio_device.clone(); + Ok(self + .playback + .play_remote_audio_track(&track.0, output_audio_device)) } else { Err(anyhow!("Client version too old to play audio in call")) } diff --git a/crates/livekit_client/src/livekit_client/playback.rs b/crates/livekit_client/src/livekit_client/playback.rs index df62479f022be5295a3de44f40fabf48aed515f2..0ebb282dd7ec494886fe1ffc90fe1f8688a762da 100644 --- a/crates/livekit_client/src/livekit_client/playback.rs +++ b/crates/livekit_client/src/livekit_client/playback.rs @@ -1,6 +1,7 @@ use anyhow::{Context as _, Result}; use audio::{AudioSettings, CHANNEL_COUNT, LEGACY_CHANNEL_COUNT, LEGACY_SAMPLE_RATE, SAMPLE_RATE}; +use cpal::DeviceId; use cpal::traits::{DeviceTrait, StreamTrait as _}; use futures::channel::mpsc::UnboundedSender; use futures::{Stream, StreamExt as _}; @@ -91,8 +92,9 @@ impl AudioStack { pub(crate) fn play_remote_audio_track( &self, track: &livekit::track::RemoteAudioTrack, + output_audio_device: Option, ) -> AudioStream { - let output_task = self.start_output(); + let output_task = self.start_output(output_audio_device); let next_ssrc = self.next_ssrc.fetch_add(1, Ordering::Relaxed); let source = AudioMixerSource { @@ -130,7 +132,7 @@ impl AudioStack { } } - fn start_output(&self) -> Arc> { + fn start_output(&self, output_audio_device: Option) -> Arc> { if let Some(task) = self._output_task.borrow().upgrade() { return task; } @@ -143,6 +145,7 @@ impl AudioStack { mixer, LEGACY_SAMPLE_RATE.get(), LEGACY_CHANNEL_COUNT.get().into(), + output_audio_device, ) .await .log_err(); @@ -219,12 +222,16 @@ impl AudioStack { Ok(()) }) } else { + let input_audio_device = + AudioSettings::try_read_global(cx, |settings| settings.input_audio_device.clone()) + .flatten(); self.executor.spawn(async move { Self::capture_input( apm, frame_tx, LEGACY_SAMPLE_RATE.get(), LEGACY_CHANNEL_COUNT.get().into(), + input_audio_device, ) .await }) @@ -247,6 +254,7 @@ impl AudioStack { mixer: Arc>, sample_rate: u32, num_channels: u32, + output_audio_device: Option, ) -> Result<()> { // Prevent App Nap from throttling audio playback on macOS. // This guard is held for the entire duration of audio output. @@ -255,7 +263,8 @@ impl AudioStack { loop { let mut device_change_listener = DeviceChangeListener::new(false)?; - let (output_device, output_config) = crate::default_device(false)?; + let (output_device, output_config) = + crate::default_device(false, output_audio_device.as_ref())?; let (end_on_drop_tx, end_on_drop_rx) = std::sync::mpsc::channel::<()>(); let mixer = mixer.clone(); let apm = apm.clone(); @@ -327,10 +336,11 @@ impl AudioStack { frame_tx: UnboundedSender>, sample_rate: u32, num_channels: u32, + input_audio_device: Option, ) -> Result<()> { loop { let mut device_change_listener = DeviceChangeListener::new(true)?; - let (device, config) = crate::default_device(true)?; + let (device, config) = crate::default_device(true, input_audio_device.as_ref())?; let (end_on_drop_tx, end_on_drop_rx) = std::sync::mpsc::channel::<()>(); let apm = apm.clone(); let frame_tx = frame_tx.clone(); diff --git a/crates/livekit_client/src/record.rs b/crates/livekit_client/src/record.rs index c23ab2b938178e9b634f8e0d4d298f2c86450b51..c0fe9eb7218ad8550f7b63042d0e11c2cb53ee20 100644 --- a/crates/livekit_client/src/record.rs +++ b/crates/livekit_client/src/record.rs @@ -7,20 +7,22 @@ use std::{ }; use anyhow::{Context, Result}; +use cpal::DeviceId; use cpal::traits::{DeviceTrait, StreamTrait}; use rodio::{buffer::SamplesBuffer, conversions::SampleTypeConverter}; use util::ResultExt; pub struct CaptureInput { pub name: String, + pub input_device: Option, config: cpal::SupportedStreamConfig, samples: Arc>>, _stream: cpal::Stream, } impl CaptureInput { - pub fn start() -> anyhow::Result { - let (device, config) = crate::default_device(true)?; + pub fn start(input_device: Option) -> anyhow::Result { + let (device, config) = crate::default_device(true, input_device.as_ref())?; let name = device .description() .map(|desc| desc.name().to_string()) @@ -32,6 +34,7 @@ impl CaptureInput { Ok(Self { name, + input_device, _stream: stream, config, samples, diff --git a/crates/lmstudio/src/lmstudio.rs b/crates/lmstudio/src/lmstudio.rs index ef2f7b6208f62e079609049b8eff83a80034741e..8a44b7fdefe5262d955606b0413b2b2425014296 100644 --- a/crates/lmstudio/src/lmstudio.rs +++ b/crates/lmstudio/src/lmstudio.rs @@ -354,14 +354,19 @@ pub struct ResponseMessageDelta { pub async fn complete( client: &dyn HttpClient, api_url: &str, + api_key: Option<&str>, request: ChatCompletionRequest, ) -> Result { let uri = format!("{api_url}/chat/completions"); - let request_builder = HttpRequest::builder() + let mut request_builder = HttpRequest::builder() .method(Method::POST) .uri(uri) .header("Content-Type", "application/json"); + if let Some(api_key) = api_key { + request_builder = request_builder.header("Authorization", format!("Bearer {}", api_key)); + } + let serialized_request = serde_json::to_string(&request)?; let request = request_builder.body(AsyncBody::from(serialized_request))?; @@ -386,14 +391,19 @@ pub async fn complete( pub async fn stream_chat_completion( client: &dyn HttpClient, api_url: &str, + api_key: Option<&str>, request: ChatCompletionRequest, ) -> Result>> { let uri = format!("{api_url}/chat/completions"); - let request_builder = http::Request::builder() + let mut request_builder = http::Request::builder() .method(Method::POST) .uri(uri) .header("Content-Type", "application/json"); + if let Some(api_key) = api_key { + request_builder = request_builder.header("Authorization", format!("Bearer {}", api_key)); + } + let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; let mut response = client.send(request).await?; if response.status().is_success() { @@ -434,14 +444,19 @@ pub async fn stream_chat_completion( pub async fn get_models( client: &dyn HttpClient, api_url: &str, + api_key: Option<&str>, _: Option, ) -> Result> { let uri = format!("{api_url}/models"); - let request_builder = HttpRequest::builder() + let mut request_builder = HttpRequest::builder() .method(Method::GET) .uri(uri) .header("Accept", "application/json"); + if let Some(api_key) = api_key { + request_builder = request_builder.header("Authorization", format!("Bearer {}", api_key)); + } + let request = request_builder.body(AsyncBody::default())?; let mut response = client.send(request).await?; diff --git a/crates/markdown/src/parser.rs b/crates/markdown/src/parser.rs index 21738147eed1b5b02da1c85207736160bd37ceb3..f530b88908380be13de2005bb8b3ec2b7e6e31b5 100644 --- a/crates/markdown/src/parser.rs +++ b/crates/markdown/src/parser.rs @@ -10,7 +10,7 @@ use collections::HashSet; use crate::path_range::PathWithRange; -const PARSE_OPTIONS: Options = Options::ENABLE_TABLES +pub const PARSE_OPTIONS: Options = Options::ENABLE_TABLES .union(Options::ENABLE_FOOTNOTES) .union(Options::ENABLE_STRIKETHROUGH) .union(Options::ENABLE_TASKLISTS) diff --git a/crates/markdown_preview/Cargo.toml b/crates/markdown_preview/Cargo.toml index 55912c66a017fa22902f9b05e5fa924230710d69..4baa308f1088341aada1eb2917c2133b8df8c143 100644 --- a/crates/markdown_preview/Cargo.toml +++ b/crates/markdown_preview/Cargo.toml @@ -25,6 +25,7 @@ html5ever.workspace = true language.workspace = true linkify.workspace = true log.workspace = true +markdown.workspace = true markup5ever_rcdom.workspace = true pretty_assertions.workspace = true pulldown-cmark.workspace = true diff --git a/crates/markdown_preview/src/markdown_parser.rs b/crates/markdown_preview/src/markdown_parser.rs index 59f18647d3ca8ac4937b2e411c8b9bb8e33550b7..ffd697d0e1bafc2feeccf3a3a7836a224d983860 100644 --- a/crates/markdown_preview/src/markdown_parser.rs +++ b/crates/markdown_preview/src/markdown_parser.rs @@ -7,8 +7,9 @@ use collections::FxHashMap; use gpui::{DefiniteLength, FontWeight, px, relative}; use html5ever::{ParseOpts, local_name, parse_document, tendril::TendrilSink}; use language::LanguageRegistry; +use markdown::parser::PARSE_OPTIONS; use markup5ever_rcdom::RcDom; -use pulldown_cmark::{Alignment, Event, Options, Parser, Tag, TagEnd}; +use pulldown_cmark::{Alignment, Event, Parser, Tag, TagEnd}; use std::{ cell::RefCell, collections::HashMap, mem, ops::Range, path::PathBuf, rc::Rc, sync::Arc, vec, }; @@ -19,10 +20,7 @@ pub async fn parse_markdown( file_location_directory: Option, language_registry: Option>, ) -> ParsedMarkdown { - let mut options = Options::all(); - options.remove(pulldown_cmark::Options::ENABLE_DEFINITION_LIST); - - let parser = Parser::new_ext(markdown_input, options); + let parser = Parser::new_ext(markdown_input, PARSE_OPTIONS); let parser = MarkdownParser::new( parser.into_offset_iter().collect(), file_location_directory, @@ -3076,6 +3074,26 @@ More text ); } + #[gpui::test] + async fn test_dollar_signs_are_plain_text() { + // Dollar signs should be preserved as plain text, not treated as math delimiters. + // Regression test for https://github.com/zed-industries/zed/issues/50170 + let parsed = parse("$100$ per unit").await; + assert_eq!(parsed.children, vec![p("$100$ per unit", 0..14)]); + } + + #[gpui::test] + async fn test_dollar_signs_in_list_items() { + let parsed = parse("- $18,000 budget\n- $20,000 budget\n").await; + assert_eq!( + parsed.children, + vec![ + list_item(0..16, 1, Unordered, vec![p("$18,000 budget", 2..16)]), + list_item(17..33, 1, Unordered, vec![p("$20,000 budget", 19..33)]), + ] + ); + } + #[gpui::test] async fn test_code_block() { let parsed = parse( diff --git a/crates/markdown_preview/src/markdown_preview_view.rs b/crates/markdown_preview/src/markdown_preview_view.rs index 79bd7f33290e0510df8dff908b09541717b41696..d6e4a78fd8a5366bb05ad88dcd95cc822eb86629 100644 --- a/crates/markdown_preview/src/markdown_preview_view.rs +++ b/crates/markdown_preview/src/markdown_preview_view.rs @@ -312,6 +312,10 @@ impl MarkdownPreviewView { cx: &mut Context, ) { if let Some(state) = &self.active_editor { + // if there is already a task to update the ui and the current task is also debounced (not high priority), do nothing + if wait_for_debounce && self.parsing_markdown_task.is_some() { + return; + } self.parsing_markdown_task = Some(self.parse_markdown_in_background( wait_for_debounce, state.editor.clone(), @@ -355,6 +359,7 @@ impl MarkdownPreviewView { let scroll_top = view.list_state.logical_scroll_top(); view.list_state.reset(markdown_blocks_count); view.list_state.scroll_to(scroll_top); + view.parsing_markdown_task = None; cx.notify(); }) }) diff --git a/crates/mistral/src/mistral.rs b/crates/mistral/src/mistral.rs index cc9f94304d989c69c3f5a4bd3763704314564a19..0244f904468a5eb3e03b520a2687b31a1168f52b 100644 --- a/crates/mistral/src/mistral.rs +++ b/crates/mistral/src/mistral.rs @@ -233,6 +233,8 @@ pub struct Request { pub messages: Vec, pub stream: bool, #[serde(default, skip_serializing_if = "Option::is_none")] + pub stream_options: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] pub max_tokens: Option, #[serde(default, skip_serializing_if = "Option::is_none")] pub temperature: Option, @@ -246,6 +248,12 @@ pub struct Request { pub tools: Vec, } +#[derive(Debug, Serialize, Deserialize)] +pub struct StreamOptions { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub stream_tool_calls: Option, +} + #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "snake_case")] pub enum ResponseFormat { diff --git a/crates/multi_buffer/Cargo.toml b/crates/multi_buffer/Cargo.toml index 524c916682f4d17b4e4b598a9af158e259b40ffc..66c23101ab26ac6be58d482c752f366522bb9305 100644 --- a/crates/multi_buffer/Cargo.toml +++ b/crates/multi_buffer/Cargo.toml @@ -52,7 +52,6 @@ gpui = { workspace = true, features = ["test-support"] } indoc.workspace = true language = { workspace = true, features = ["test-support"] } pretty_assertions.workspace = true -project = { workspace = true, features = ["test-support"] } rand.workspace = true settings = { workspace = true, features = ["test-support"] } text = { workspace = true, features = ["test-support"] } diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 015c57656b2a21795ded75285571f199ff3a26aa..f08b0e368591ddffcbe9c382604b07ba5ee5c620 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -1987,7 +1987,7 @@ impl MultiBuffer { &self, buffer_id: BufferId, cx: &App, - ) -> Vec<(ExcerptId, ExcerptRange)> { + ) -> Vec<(ExcerptId, Arc, ExcerptRange)> { let mut excerpts = Vec::new(); let snapshot = self.read(cx); let mut cursor = snapshot.excerpts.cursor::>(()); @@ -1997,7 +1997,7 @@ impl MultiBuffer { if let Some(excerpt) = cursor.item() && excerpt.locator == *locator { - excerpts.push((excerpt.id, excerpt.range.clone())); + excerpts.push((excerpt.id, excerpt.buffer.clone(), excerpt.range.clone())); } } } @@ -2128,7 +2128,7 @@ impl MultiBuffer { ) -> Option { let mut found = None; let snapshot = buffer.read(cx).snapshot(); - for (excerpt_id, range) in self.excerpts_for_buffer(snapshot.remote_id(), cx) { + for (excerpt_id, _, range) in self.excerpts_for_buffer(snapshot.remote_id(), cx) { let start = range.context.start.to_point(&snapshot); let end = range.context.end.to_point(&snapshot); if start <= point && point < end { @@ -2157,7 +2157,7 @@ impl MultiBuffer { cx: &App, ) -> Option { let snapshot = buffer.read(cx).snapshot(); - for (excerpt_id, range) in self.excerpts_for_buffer(snapshot.remote_id(), cx) { + for (excerpt_id, _, range) in self.excerpts_for_buffer(snapshot.remote_id(), cx) { if range.context.start.cmp(&anchor, &snapshot).is_le() && range.context.end.cmp(&anchor, &snapshot).is_ge() { diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index 7e27786a76a14783f54e42c73850a888e87a3ac7..41e475a554b99485a86ffb0d7147414f8b9ef46a 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -1285,7 +1285,7 @@ fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut App) { let mut ids = multibuffer .excerpts_for_buffer(buffer_2.read(cx).remote_id(), cx) .into_iter() - .map(|(id, _)| id); + .map(|(id, _, _)| id); (ids.next().unwrap(), ids.next().unwrap()) }); let snapshot_2 = multibuffer.read(cx).snapshot(cx); diff --git a/crates/notifications/Cargo.toml b/crates/notifications/Cargo.toml index 8304c788fdd1ca840d68dbb4eb24bf5e3e79abdc..e0640c67cc55b3c2ba742e762d0e7a1e9d414c40 100644 --- a/crates/notifications/Cargo.toml +++ b/crates/notifications/Cargo.toml @@ -15,7 +15,7 @@ doctest = false [features] test-support = [ "channel/test-support", - "collections/test-support", + "gpui/test-support", "rpc/test-support", ] @@ -37,8 +37,6 @@ zed_actions.workspace = true [dev-dependencies] client = { workspace = true, features = ["test-support"] } -collections = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } rpc = { workspace = true, features = ["test-support"] } -settings = { workspace = true, features = ["test-support"] } util = { workspace = true, features = ["test-support"] } diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index e6145e409058a3fe453c4557b2a32cccf6baf16c..25946591e320df4e2d58e8dd0341d7f27451cc89 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -90,6 +90,10 @@ pub enum Model { FivePointTwoCodex, #[serde(rename = "gpt-5.3-codex")] FivePointThreeCodex, + #[serde(rename = "gpt-5.4")] + FivePointFour, + #[serde(rename = "gpt-5.4-pro")] + FivePointFourPro, #[serde(rename = "custom")] Custom { name: String, @@ -131,6 +135,8 @@ impl Model { "gpt-5.2" => Ok(Self::FivePointTwo), "gpt-5.2-codex" => Ok(Self::FivePointTwoCodex), "gpt-5.3-codex" => Ok(Self::FivePointThreeCodex), + "gpt-5.4" => Ok(Self::FivePointFour), + "gpt-5.4-pro" => Ok(Self::FivePointFourPro), invalid_id => anyhow::bail!("invalid model id '{invalid_id}'"), } } @@ -153,6 +159,8 @@ impl Model { Self::FivePointTwo => "gpt-5.2", Self::FivePointTwoCodex => "gpt-5.2-codex", Self::FivePointThreeCodex => "gpt-5.3-codex", + Self::FivePointFour => "gpt-5.4", + Self::FivePointFourPro => "gpt-5.4-pro", Self::Custom { name, .. } => name, } } @@ -175,6 +183,8 @@ impl Model { Self::FivePointTwo => "gpt-5.2", Self::FivePointTwoCodex => "gpt-5.2-codex", Self::FivePointThreeCodex => "gpt-5.3-codex", + Self::FivePointFour => "gpt-5.4", + Self::FivePointFourPro => "gpt-5.4-pro", Self::Custom { display_name, .. } => display_name.as_deref().unwrap_or(&self.id()), } } @@ -191,12 +201,14 @@ impl Model { Self::O3 => 200_000, Self::Five => 272_000, Self::FiveCodex => 272_000, - Self::FiveMini => 272_000, - Self::FiveNano => 272_000, + Self::FiveMini => 400_000, + Self::FiveNano => 400_000, Self::FivePointOne => 400_000, Self::FivePointTwo => 400_000, Self::FivePointTwoCodex => 400_000, Self::FivePointThreeCodex => 400_000, + Self::FivePointFour => 1_050_000, + Self::FivePointFourPro => 1_050_000, Self::Custom { max_tokens, .. } => *max_tokens, } } @@ -222,6 +234,8 @@ impl Model { Self::FivePointTwo => Some(128_000), Self::FivePointTwoCodex => Some(128_000), Self::FivePointThreeCodex => Some(128_000), + Self::FivePointFour => Some(128_000), + Self::FivePointFourPro => Some(128_000), } } @@ -230,7 +244,7 @@ impl Model { Self::Custom { reasoning_effort, .. } => reasoning_effort.to_owned(), - Self::FivePointThreeCodex => Some(ReasoningEffort::Medium), + Self::FivePointThreeCodex | Self::FivePointFourPro => Some(ReasoningEffort::Medium), _ => None, } } @@ -241,7 +255,10 @@ impl Model { supports_chat_completions, .. } => *supports_chat_completions, - Self::FiveCodex | Self::FivePointTwoCodex | Self::FivePointThreeCodex => false, + Self::FiveCodex + | Self::FivePointTwoCodex + | Self::FivePointThreeCodex + | Self::FivePointFourPro => false, _ => true, } } @@ -263,6 +280,8 @@ impl Model { | Self::FivePointTwo | Self::FivePointTwoCodex | Self::FivePointThreeCodex + | Self::FivePointFour + | Self::FivePointFourPro | Self::FiveNano => true, Self::O1 | Self::O3 | Self::O3Mini | Model::Custom { .. } => false, } diff --git a/crates/open_ai/src/responses.rs b/crates/open_ai/src/responses.rs index 9196b4a11fbaeeabb9ebe7e59cf106c4d260c267..fe97a438859e920313faa8cba0d335b7faeb75e0 100644 --- a/crates/open_ai/src/responses.rs +++ b/crates/open_ai/src/responses.rs @@ -78,6 +78,16 @@ pub enum ResponseInputContent { #[derive(Serialize, Debug)] pub struct ReasoningConfig { pub effort: ReasoningEffort, + #[serde(skip_serializing_if = "Option::is_none")] + pub summary: Option, +} + +#[derive(Serialize, Debug, Clone, Copy, PartialEq, Eq)] +#[serde(rename_all = "lowercase")] +pub enum ReasoningSummaryMode { + Auto, + Concise, + Detailed, } #[derive(Serialize, Debug)] @@ -150,6 +160,30 @@ pub enum StreamEvent { content_index: Option, text: String, }, + #[serde(rename = "response.reasoning_summary_part.added")] + ReasoningSummaryPartAdded { + item_id: String, + output_index: usize, + summary_index: usize, + }, + #[serde(rename = "response.reasoning_summary_text.delta")] + ReasoningSummaryTextDelta { + item_id: String, + output_index: usize, + delta: String, + }, + #[serde(rename = "response.reasoning_summary_text.done")] + ReasoningSummaryTextDone { + item_id: String, + output_index: usize, + text: String, + }, + #[serde(rename = "response.reasoning_summary_part.done")] + ReasoningSummaryPartDone { + item_id: String, + output_index: usize, + summary_index: usize, + }, #[serde(rename = "response.function_call_arguments.delta")] FunctionCallArgumentsDelta { item_id: String, @@ -219,6 +253,25 @@ pub struct ResponseUsage { pub enum ResponseOutputItem { Message(ResponseOutputMessage), FunctionCall(ResponseFunctionToolCall), + Reasoning(ResponseReasoningItem), + #[serde(other)] + Unknown, +} + +#[derive(Deserialize, Debug, Clone)] +pub struct ResponseReasoningItem { + #[serde(default)] + pub id: Option, + #[serde(default)] + pub summary: Vec, +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ReasoningSummaryPart { + SummaryText { + text: String, + }, #[serde(other)] Unknown, } @@ -356,6 +409,21 @@ pub async fn stream_response( }); } } + ResponseOutputItem::Reasoning(reasoning) => { + if let Some(ref item_id) = reasoning.id { + for part in &reasoning.summary { + if let ReasoningSummaryPart::SummaryText { text } = part { + all_events.push( + StreamEvent::ReasoningSummaryTextDelta { + item_id: item_id.clone(), + output_index, + delta: text.clone(), + }, + ); + } + } + } + } ResponseOutputItem::Unknown => {} } diff --git a/crates/outline/Cargo.toml b/crates/outline/Cargo.toml index 905f323624437d988ff9a9eb3bde4f9a7becaa91..79559e03e8b2339fd8b4473d9e06ca6ff47b2b8c 100644 --- a/crates/outline/Cargo.toml +++ b/crates/outline/Cargo.toml @@ -38,6 +38,4 @@ project = { workspace = true, features = ["test-support"] } rope.workspace = true serde_json.workspace = true settings = { workspace = true, features = ["test-support"] } -tree-sitter-rust.workspace = true -tree-sitter-typescript.workspace = true workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 445f63fa1cdc38cb358cf033cc49f404aa6e6d94..ec85fc14a2eefe280afd0d44ed92b4b8502f460c 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -1143,7 +1143,7 @@ impl OutlinePanel { .excerpts_for_buffer(buffer.read(cx).remote_id(), cx) }) .and_then(|excerpts| { - let (excerpt_id, excerpt_range) = excerpts.first()?; + let (excerpt_id, _, excerpt_range) = excerpts.first()?; multi_buffer_snapshot .anchor_in_excerpt(*excerpt_id, excerpt_range.context.start) }) diff --git a/crates/picker/Cargo.toml b/crates/picker/Cargo.toml index f85c55b9f27bcb8fd87101c341058e1a3962934e..8c76aa746453866755be322df576a519ba147b24 100644 --- a/crates/picker/Cargo.toml +++ b/crates/picker/Cargo.toml @@ -28,8 +28,6 @@ workspace.workspace = true zed_actions.workspace = true [dev-dependencies] -ctor.workspace = true editor = { workspace = true, features = ["test-support"] } -env_logger.workspace = true gpui = { workspace = true, features = ["test-support"] } -serde_json.workspace = true +settings.workspace = true diff --git a/crates/picker/src/picker.rs b/crates/picker/src/picker.rs index 716653d89642fe6d8f457f145ed15b8972432a09..e87ec3415cf6d70d840d8566accb94ac6de1547c 100644 --- a/crates/picker/src/picker.rs +++ b/crates/picker/src/picker.rs @@ -114,7 +114,7 @@ pub trait PickerDelegate: Sized + 'static { None } fn can_select( - &mut self, + &self, _ix: usize, _window: &mut Window, _cx: &mut Context>, @@ -619,6 +619,9 @@ impl Picker { ) { cx.stop_propagation(); window.prevent_default(); + if !self.delegate.can_select(ix, window, cx) { + return; + } self.set_selected_index(ix, None, false, window, cx); self.do_confirm(secondary, window, cx) } @@ -753,10 +756,11 @@ impl Picker { ix: usize, ) -> impl IntoElement + use { let item_bounds = self.item_bounds.clone(); + let selectable = self.delegate.can_select(ix, window, cx); div() .id(("item", ix)) - .cursor_pointer() + .when(selectable, |this| this.cursor_pointer()) .child( canvas( move |bounds, _window, _cx| { @@ -850,6 +854,175 @@ impl Picker { } } +#[cfg(test)] +mod tests { + use super::*; + use gpui::TestAppContext; + use std::cell::Cell; + + struct TestDelegate { + items: Vec, + selected_index: usize, + confirmed_index: Rc>>, + } + + impl TestDelegate { + fn new(items: Vec) -> Self { + Self { + items, + selected_index: 0, + confirmed_index: Rc::new(Cell::new(None)), + } + } + } + + impl PickerDelegate for TestDelegate { + type ListItem = ui::ListItem; + + fn match_count(&self) -> usize { + self.items.len() + } + + fn selected_index(&self) -> usize { + self.selected_index + } + + fn set_selected_index( + &mut self, + ix: usize, + _window: &mut Window, + _cx: &mut Context>, + ) { + self.selected_index = ix; + } + + fn can_select( + &self, + ix: usize, + _window: &mut Window, + _cx: &mut Context>, + ) -> bool { + self.items.get(ix).copied().unwrap_or(false) + } + + fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { + "Test".into() + } + + fn update_matches( + &mut self, + _query: String, + _window: &mut Window, + _cx: &mut Context>, + ) -> Task<()> { + Task::ready(()) + } + + fn confirm( + &mut self, + _secondary: bool, + _window: &mut Window, + _cx: &mut Context>, + ) { + self.confirmed_index.set(Some(self.selected_index)); + } + + fn dismissed(&mut self, _window: &mut Window, _cx: &mut Context>) {} + + fn render_match( + &self, + ix: usize, + selected: bool, + _window: &mut Window, + _cx: &mut Context>, + ) -> Option { + Some( + ui::ListItem::new(ix) + .inset(true) + .toggle_state(selected) + .child(ui::Label::new(format!("Item {ix}"))), + ) + } + } + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let store = settings::SettingsStore::test(cx); + cx.set_global(store); + theme::init(theme::LoadThemes::JustBase, cx); + editor::init(cx); + }); + } + + #[gpui::test] + async fn test_clicking_non_selectable_item_does_not_confirm(cx: &mut TestAppContext) { + init_test(cx); + + let confirmed_index = Rc::new(Cell::new(None)); + let (picker, cx) = cx.add_window_view(|window, cx| { + let mut delegate = TestDelegate::new(vec![true, false, true]); + delegate.confirmed_index = confirmed_index.clone(); + Picker::uniform_list(delegate, window, cx) + }); + + picker.update(cx, |picker, _cx| { + assert_eq!(picker.delegate.selected_index(), 0); + }); + + picker.update_in(cx, |picker, window, cx| { + picker.handle_click(1, false, window, cx); + }); + assert!( + confirmed_index.get().is_none(), + "clicking a non-selectable item should not confirm" + ); + + picker.update_in(cx, |picker, window, cx| { + picker.handle_click(0, false, window, cx); + }); + assert_eq!( + confirmed_index.get(), + Some(0), + "clicking a selectable item should confirm" + ); + } + + #[gpui::test] + async fn test_keyboard_navigation_skips_non_selectable_items(cx: &mut TestAppContext) { + init_test(cx); + + let (picker, cx) = cx.add_window_view(|window, cx| { + Picker::uniform_list(TestDelegate::new(vec![true, false, true]), window, cx) + }); + + picker.update(cx, |picker, _cx| { + assert_eq!(picker.delegate.selected_index(), 0); + }); + + picker.update_in(cx, |picker, window, cx| { + picker.select_next(&menu::SelectNext, window, cx); + }); + picker.update(cx, |picker, _cx| { + assert_eq!( + picker.delegate.selected_index(), + 2, + "select_next should skip non-selectable item at index 1" + ); + }); + + picker.update_in(cx, |picker, window, cx| { + picker.select_previous(&menu::SelectPrevious, window, cx); + }); + picker.update(cx, |picker, _cx| { + assert_eq!( + picker.delegate.selected_index(), + 0, + "select_previous should skip non-selectable item at index 1" + ); + }); + } +} + impl EventEmitter for Picker {} impl ModalView for Picker {} diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index cbcd5481ee3c48655fc78e17d5cf65d2ec978a09..dfcc8faf64a7e66cce7b9f07f2daa12eae984fa5 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -31,7 +31,6 @@ test-support = [ "worktree/test-support", "gpui/test-support", "dap/test-support", - "dap_adapters/test-support", ] [dependencies] @@ -105,12 +104,10 @@ tracing.workspace = true [dev-dependencies] client = { workspace = true, features = ["test-support"] } encoding_rs.workspace = true -db = { workspace = true, features = ["test-support"] } collections = { workspace = true, features = ["test-support"] } context_server = { workspace = true, features = ["test-support"] } buffer_diff = { workspace = true, features = ["test-support"] } dap = { workspace = true, features = ["test-support"] } -dap_adapters = { workspace = true, features = ["test-support"] } fs = { workspace = true, features = ["test-support"] } git2.workspace = true gpui = { workspace = true, features = ["test-support"] } diff --git a/crates/project/src/agent_registry_store.rs b/crates/project/src/agent_registry_store.rs index a6fc56b7dadaeb0e89443479c108d999d70b37bd..79d6e52097d17cadc0271cb09de4ab283c6d93b8 100644 --- a/crates/project/src/agent_registry_store.rs +++ b/crates/project/src/agent_registry_store.rs @@ -11,7 +11,7 @@ use http_client::{AsyncBody, HttpClient}; use serde::Deserialize; use settings::Settings as _; -use crate::agent_server_store::AllAgentServersSettings; +use crate::DisableAiSettings; const REGISTRY_URL: &str = "https://cdn.agentclientprotocol.com/registry/v1/latest/registry.json"; const REFRESH_THROTTLE_DURATION: Duration = Duration::from_secs(60 * 60); @@ -129,13 +129,11 @@ impl AgentRegistryStore { let store = cx.new(|cx| Self::new(fs, http_client, cx)); cx.set_global(GlobalAgentRegistryStore(store.clone())); - if AllAgentServersSettings::get_global(cx).has_registry_agents() { - store.update(cx, |store, cx| { - if store.agents.is_empty() { - store.refresh(cx); - } - }); - } + store.update(cx, |store, cx| { + if store.agents.is_empty() { + store.refresh(cx); + } + }); store } @@ -149,6 +147,22 @@ impl AgentRegistryStore { .map(|store| store.0.clone()) } + #[cfg(any(test, feature = "test-support"))] + pub fn init_test_global(cx: &mut App, agents: Vec) -> Entity { + let fs: Arc = fs::FakeFs::new(cx.background_executor().clone()); + let store = cx.new(|_cx| Self { + fs, + http_client: http_client::FakeHttpClient::with_404_response(), + agents, + is_fetching: false, + fetch_error: None, + pending_refresh: None, + last_refresh: None, + }); + cx.set_global(GlobalAgentRegistryStore(store.clone())); + store + } + pub fn agents(&self) -> &[RegistryAgent] { &self.agents } @@ -173,6 +187,10 @@ impl AgentRegistryStore { return; } + if DisableAiSettings::get_global(cx).disable_ai { + return; + } + self.is_fetching = true; self.fetch_error = None; self.last_refresh = Some(Instant::now()); @@ -249,6 +267,10 @@ impl AgentRegistryStore { http_client: Arc, cx: &mut Context, ) { + if DisableAiSettings::get_global(cx).disable_ai { + return; + } + cx.spawn(async move |this, cx| -> Result<()> { let cache_path = registry_cache_path(); if !fs.is_file(&cache_path).await { diff --git a/crates/project/src/context_server_store.rs b/crates/project/src/context_server_store.rs index 88dc64fcbe8795ae4826dcaa2813744f525b9258..ed8d31ea79cc8cb8537f8cff2edbf2a899794d19 100644 --- a/crates/project/src/context_server_store.rs +++ b/crates/project/src/context_server_store.rs @@ -222,6 +222,7 @@ pub struct ContextServerStore { update_servers_task: Option>>, context_server_factory: Option, needs_server_update: bool, + ai_disabled: bool, _subscriptions: Vec, } @@ -377,23 +378,42 @@ impl ContextServerStore { cx: &mut Context, ) -> Self { let mut subscriptions = vec![cx.observe_global::(move |this, cx| { + let ai_disabled = DisableAiSettings::get_global(cx).disable_ai; + let ai_was_disabled = this.ai_disabled; + this.ai_disabled = ai_disabled; + let settings = &Self::resolve_project_settings(&this.worktree_store, cx).context_servers; - if &this.context_server_settings == settings { + let settings_changed = &this.context_server_settings != settings; + + if settings_changed { + this.context_server_settings = settings.clone(); + } + + // When AI is disabled, stop all running servers + if ai_disabled { + let server_ids: Vec<_> = this.servers.keys().cloned().collect(); + for id in server_ids { + this.stop_server(&id, cx).log_err(); + } return; } - this.context_server_settings = settings.clone(); - if maintain_server_loop { + + // Trigger updates if AI was re-enabled or settings changed + if maintain_server_loop && (ai_was_disabled || settings_changed) { this.available_context_servers_changed(cx); } })]; if maintain_server_loop { subscriptions.push(cx.observe(®istry, |this, _registry, cx| { - this.available_context_servers_changed(cx); + if !DisableAiSettings::get_global(cx).disable_ai { + this.available_context_servers_changed(cx); + } })); } + let ai_disabled = DisableAiSettings::get_global(cx).disable_ai; let mut this = Self { state, _subscriptions: subscriptions, @@ -404,12 +424,13 @@ impl ContextServerStore { project: weak_project, registry, needs_server_update: false, + ai_disabled, servers: HashMap::default(), server_ids: Default::default(), update_servers_task: None, context_server_factory, }; - if maintain_server_loop { + if maintain_server_loop && !DisableAiSettings::get_global(cx).disable_ai { this.available_context_servers_changed(cx); } this diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 75f9702e12cf31ce4f555940d7d1918884bbc22a..97aa03cec730c61acfb129579c77f6a5b560ee32 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -1778,9 +1778,10 @@ impl LocalLspStore { } }) } - settings::LanguageServerFormatterSpecifier::Current => { - adapters_and_servers.first().map(|e| e.1.clone()) - } + settings::LanguageServerFormatterSpecifier::Current => adapters_and_servers + .iter() + .find(|(_, server)| Self::server_supports_formatting(server)) + .map(|(_, server)| server.clone()), }; let Some(language_server) = language_server else { @@ -2285,6 +2286,14 @@ impl LocalLspStore { } } + fn server_supports_formatting(server: &Arc) -> bool { + let capabilities = server.capabilities(); + let formatting = capabilities.document_formatting_provider.as_ref(); + let range_formatting = capabilities.document_range_formatting_provider.as_ref(); + matches!(formatting, Some(p) if *p != OneOf::Left(false)) + || matches!(range_formatting, Some(p) if *p != OneOf::Left(false)) + } + async fn format_via_lsp( this: &WeakEntity, buffer: &Entity, @@ -4895,7 +4904,7 @@ impl LspStore { buffer: &Entity, mut check: F, cx: &App, - ) -> Vec + ) -> Vec<(lsp::LanguageServerId, lsp::LanguageServerName)> where F: FnMut(&lsp::LanguageServerName, &lsp::ServerCapabilities) -> bool, { @@ -4925,7 +4934,7 @@ impl LspStore { .map(|c| (server_id, server_name, c)) }) .filter(|(_, server_name, capabilities)| check(server_name, capabilities)) - .map(|(server_id, _, _)| *server_id) + .map(|(server_id, server_name, _)| (*server_id, server_name.clone())) .collect() } @@ -6123,23 +6132,13 @@ impl LspStore { let language = buffer.read(cx).language().cloned(); - // In the future, we should provide project guests with the names of LSP adapters, - // so that they can use the correct LSP adapter when computing labels. For now, - // guests just use the first LSP adapter associated with the buffer's language. - let lsp_adapter = language.as_ref().and_then(|language| { - language_registry - .lsp_adapters(&language.name()) - .first() - .cloned() - }); - let buffer = buffer.clone(); cx.spawn(async move |this, cx| { let requests = join_all( capable_lsps .into_iter() - .map(|id| { + .map(|(id, server_name)| { let request = GetCompletions { position, context: context.clone(), @@ -6147,7 +6146,14 @@ impl LspStore { }; let buffer = buffer.clone(); let language = language.clone(); - let lsp_adapter = lsp_adapter.clone(); + let lsp_adapter = language.as_ref().and_then(|language| { + let adapters = language_registry.lsp_adapters(&language.name()); + adapters + .iter() + .find(|adapter| adapter.name() == server_name) + .or_else(|| adapters.first()) + .cloned() + }); let upstream_client = upstream_client.clone(); let response = this .update(cx, |this, cx| { diff --git a/crates/project/src/lsp_store/json_language_server_ext.rs b/crates/project/src/lsp_store/json_language_server_ext.rs index 13c3aeb2b1ab2f4ab5f22a3cd065d4d0ff4bcb38..1f2fa0330b75deeb41342ae2401ddc8dbe05159c 100644 --- a/crates/project/src/lsp_store/json_language_server_ext.rs +++ b/crates/project/src/lsp_store/json_language_server_ext.rs @@ -42,8 +42,8 @@ impl lsp::notification::Notification for SchemaContentsChanged { type Params = String; } -pub fn notify_schema_changed(lsp_store: Entity, uri: String, cx: &App) { - zlog::trace!(LOGGER => "Notifying schema changed for URI: {:?}", uri); +pub fn notify_schemas_changed(lsp_store: Entity, uris: &[String], cx: &App) { + zlog::trace!(LOGGER => "Notifying schema changes for URIs: {:?}", uris); let servers = lsp_store.read_with(cx, |lsp_store, _| { let mut servers = Vec::new(); let Some(local) = lsp_store.as_local() else { @@ -63,16 +63,18 @@ pub fn notify_schema_changed(lsp_store: Entity, uri: String, cx: &App) servers }); for server in servers { - zlog::trace!(LOGGER => "Notifying server {NAME} (id {ID:?}) of schema change for URI: {uri:?}", - NAME = server.name(), - ID = server.server_id() - ); - if let Err(error) = server.notify::(uri.clone()) { - zlog::error!( - LOGGER => "Failed to notify server {NAME} (id {ID:?}) of schema change for URI {uri:?}: {error:#}", - NAME = server.name(), - ID = server.server_id(), + for uri in uris { + zlog::trace!(LOGGER => "Notifying server {NAME} (id {ID:?}) of schema change for URI: {uri:?}", + NAME = server.name(), + ID = server.server_id() ); + if let Err(error) = server.notify::(uri.clone()) { + zlog::error!( + LOGGER => "Failed to notify server {NAME} (id {ID:?}) of schema change for URI {uri:?}: {error:#}", + NAME = server.name(), + ID = server.server_id(), + ); + } } } } diff --git a/crates/project/tests/integration/context_server_store.rs b/crates/project/tests/integration/context_server_store.rs index 56bdaed41cd77b665d316491e051582c7ccc078a..5b68e11bb95a8b9178a8febf91849ba3a65f76e6 100644 --- a/crates/project/tests/integration/context_server_store.rs +++ b/crates/project/tests/integration/context_server_store.rs @@ -8,10 +8,11 @@ use project::context_server_store::*; use project::project_settings::ContextServerSettings; use project::worktree_store::WorktreeStore; use project::{ - FakeFs, Project, context_server_store::registry::ContextServerDescriptor, + DisableAiSettings, FakeFs, Project, context_server_store::registry::ContextServerDescriptor, project_settings::ProjectSettings, }; use serde_json::json; +use settings::settings_content::SaturatingBool; use settings::{ContextServerCommand, Settings, SettingsStore}; use std::sync::Arc; use std::{cell::RefCell, path::PathBuf, rc::Rc}; @@ -553,6 +554,116 @@ async fn test_context_server_enabled_disabled(cx: &mut TestAppContext) { } } +#[gpui::test] +async fn test_context_server_respects_disable_ai(cx: &mut TestAppContext) { + const SERVER_1_ID: &str = "mcp-1"; + + let server_1_id = ContextServerId(SERVER_1_ID.into()); + + // Set up SettingsStore with disable_ai: true in user settings BEFORE creating project + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + DisableAiSettings::register(cx); + // Set disable_ai via user settings (not override_global) so it persists through recompute_values + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |content| { + content.project.disable_ai = Some(SaturatingBool(true)); + }); + }); + }); + + // Now create the project (ContextServerStore will see disable_ai = true) + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/test"), json!({"code.rs": ""})).await; + let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await; + + let executor = cx.executor(); + let store = project.read_with(cx, |project, _| project.context_server_store()); + store.update(cx, |store, _| { + store.set_context_server_factory(Box::new(move |id, _| { + Arc::new(ContextServer::new( + id.clone(), + Arc::new(create_fake_transport(id.0.to_string(), executor.clone())), + )) + })); + }); + + set_context_server_configuration( + vec![( + server_1_id.0.clone(), + settings::ContextServerSettingsContent::Stdio { + enabled: true, + remote: false, + command: ContextServerCommand { + path: "somebinary".into(), + args: vec!["arg".to_string()], + env: None, + timeout: None, + }, + }, + )], + cx, + ); + + cx.run_until_parked(); + + // Verify that no server started because AI is disabled + cx.update(|cx| { + assert_eq!( + store.read(cx).status_for_server(&server_1_id), + None, + "Server should not start when disable_ai is true" + ); + }); + + // Enable AI and verify server starts + { + let _server_events = assert_server_events( + &store, + vec![ + (server_1_id.clone(), ContextServerStatus::Starting), + (server_1_id.clone(), ContextServerStatus::Running), + ], + cx, + ); + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |content| { + content.project.disable_ai = Some(SaturatingBool(false)); + }); + }); + }); + cx.run_until_parked(); + } + + // Disable AI again and verify server stops + { + let _server_events = assert_server_events( + &store, + vec![(server_1_id.clone(), ContextServerStatus::Stopped)], + cx, + ); + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |content| { + content.project.disable_ai = Some(SaturatingBool(true)); + }); + }); + }); + cx.run_until_parked(); + } + + // Verify server is stopped + cx.update(|cx| { + assert_eq!( + store.read(cx).status_for_server(&server_1_id), + Some(ContextServerStatus::Stopped), + "Server should be stopped when disable_ai is true" + ); + }); +} + #[gpui::test] async fn test_server_ids_includes_disabled_servers(cx: &mut TestAppContext) { const ENABLED_SERVER_ID: &str = "enabled-server"; diff --git a/crates/project_panel/Cargo.toml b/crates/project_panel/Cargo.toml index 5149c6f7834474439bd6119511bb294b560fe4de..88d85c75f9e6452a72eb4181a94a8bf6395ba754 100644 --- a/crates/project_panel/Cargo.toml +++ b/crates/project_panel/Cargo.toml @@ -54,7 +54,6 @@ criterion.workspace = true editor = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } language = { workspace = true, features = ["test-support"] } -remote_connection = { workspace = true, features = ["test-support"] } serde_json.workspace = true tempfile.workspace = true workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 082086d6a0a946e610be4c96e50d626b7000bda4..55f440852ada15505831c78035d9362c91b4a204 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -4415,16 +4415,24 @@ impl ProjectPanel { return; } + let workspace = self.workspace.clone(); if folded_selection_info.is_empty() { for (_, task) in move_tasks { - task.detach_and_log_err(cx); + let workspace = workspace.clone(); + cx.spawn_in(window, async move |_, mut cx| { + task.await.notify_workspace_async_err(workspace, &mut cx); + }) + .detach(); } } else { - cx.spawn_in(window, async move |project_panel, cx| { + cx.spawn_in(window, async move |project_panel, mut cx| { // Await all move tasks and collect successful results let mut move_results: Vec<(ProjectEntryId, Entry)> = Vec::new(); for (entry_id, task) in move_tasks { - if let Some(CreatedEntry::Included(new_entry)) = task.await.log_err() { + if let Some(CreatedEntry::Included(new_entry)) = task + .await + .notify_workspace_async_err(workspace.clone(), &mut cx) + { move_results.push((entry_id, new_entry)); } } @@ -6879,14 +6887,17 @@ impl Render for ProjectPanel { Button::new("open_project", "Open Project") .full_width() .key_binding(KeyBinding::for_action_in( - &workspace::Open, + &workspace::Open::default(), &focus_handle, cx, )) .on_click(cx.listener(|this, _, window, cx| { this.workspace .update(cx, |_, cx| { - window.dispatch_action(workspace::Open.boxed_clone(), cx); + window.dispatch_action( + workspace::Open::default().boxed_clone(), + cx, + ); }) .log_err(); })), diff --git a/crates/project_panel/src/project_panel_tests.rs b/crates/project_panel/src/project_panel_tests.rs index af84a7f522a60abf2608bf1f3435b367d24f6bdc..64e96fee700aea8277fe1b69121abf71599c4d30 100644 --- a/crates/project_panel/src/project_panel_tests.rs +++ b/crates/project_panel/src/project_panel_tests.rs @@ -4412,6 +4412,90 @@ async fn test_drag_marked_entries_in_folded_directories(cx: &mut gpui::TestAppCo ); } +#[gpui::test] +async fn test_dragging_same_named_files_preserves_one_source_on_conflict( + cx: &mut gpui::TestAppContext, +) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "dir_a": { + "shared.txt": "from a" + }, + "dir_b": { + "shared.txt": "from b" + } + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await; + let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window + .read_with(cx, |multi_workspace, _| multi_workspace.workspace().clone()) + .unwrap(); + let cx = &mut VisualTestContext::from_window(window.into(), cx); + let panel = workspace.update_in(cx, ProjectPanel::new); + cx.run_until_parked(); + + panel.update_in(cx, |panel, window, cx| { + let (root_entry_id, worktree_id, entry_a_id, entry_b_id) = { + let worktree = panel.project.read(cx).visible_worktrees(cx).next().unwrap(); + let worktree = worktree.read(cx); + let root_entry_id = worktree.root_entry().unwrap().id; + let worktree_id = worktree.id(); + let entry_a_id = worktree + .entry_for_path(rel_path("dir_a/shared.txt")) + .unwrap() + .id; + let entry_b_id = worktree + .entry_for_path(rel_path("dir_b/shared.txt")) + .unwrap() + .id; + (root_entry_id, worktree_id, entry_a_id, entry_b_id) + }; + + let drag = DraggedSelection { + active_selection: SelectedEntry { + worktree_id, + entry_id: entry_a_id, + }, + marked_selections: Arc::new([ + SelectedEntry { + worktree_id, + entry_id: entry_a_id, + }, + SelectedEntry { + worktree_id, + entry_id: entry_b_id, + }, + ]), + }; + + panel.drag_onto(&drag, root_entry_id, false, window, cx); + }); + cx.executor().run_until_parked(); + + let files = fs.files(); + assert!(files.contains(&PathBuf::from(path!("/root/shared.txt")))); + + let remaining_sources = [ + PathBuf::from(path!("/root/dir_a/shared.txt")), + PathBuf::from(path!("/root/dir_b/shared.txt")), + ] + .into_iter() + .filter(|path| files.contains(path)) + .count(); + + assert_eq!( + remaining_sources, 1, + "one conflicting source file should remain in place" + ); +} + #[gpui::test] async fn test_drag_entries_between_different_worktrees(cx: &mut gpui::TestAppContext) { init_test(cx); diff --git a/crates/proto/Cargo.toml b/crates/proto/Cargo.toml index 5b5b8b985cbc102cc451050403cff2e3699f612f..dfa4166f2077aea60aa87084af4918c92882f2df 100644 --- a/crates/proto/Cargo.toml +++ b/crates/proto/Cargo.toml @@ -7,7 +7,7 @@ publish.workspace = true license = "GPL-3.0-or-later" [features] -test-support = ["collections/test-support"] +test-support = [] [lints] workspace = true @@ -25,5 +25,3 @@ serde.workspace = true prost-build.workspace = true [dev-dependencies] -collections = { workspace = true, features = ["test-support"] } -typed-path = "0.11" diff --git a/crates/recent_projects/Cargo.toml b/crates/recent_projects/Cargo.toml index 11daee79adc8099a8915b427394256eeed8b5e20..a2aa9f78a2a5edaf13a4f23f52f3695de636850f 100644 --- a/crates/recent_projects/Cargo.toml +++ b/crates/recent_projects/Cargo.toml @@ -59,7 +59,6 @@ indoc.workspace = true windows-registry = "0.6.0" [dev-dependencies] -dap.workspace = true editor = { workspace = true, features = ["test-support"] } extension.workspace = true fs.workspace = true diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index 110a702437d463d6f296510c8f4a3a68d28d7d60..548e08eccb49c19551984e6acdd086d78927d614 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -750,12 +750,7 @@ impl PickerDelegate for RecentProjectsDelegate { self.selected_index = ix; } - fn can_select( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) -> bool { + fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context>) -> bool { matches!( self.filtered_entries.get(ix), Some(ProjectPickerEntry::OpenFolder { .. } | ProjectPickerEntry::RecentProject(_)) @@ -1258,17 +1253,16 @@ impl PickerDelegate for RecentProjectsDelegate { .gap_1() .border_t_1() .border_color(cx.theme().colors().border_variant) - .child( + .child({ + let open_action = workspace::Open { + create_new_window: self.create_new_window, + }; Button::new("open_local_folder", "Open Local Project") - .key_binding(KeyBinding::for_action_in( - &workspace::Open, - &focus_handle, - cx, - )) - .on_click(|_, window, cx| { - window.dispatch_action(workspace::Open.boxed_clone(), cx) - }), - ) + .key_binding(KeyBinding::for_action_in(&open_action, &focus_handle, cx)) + .on_click(move |_, window, cx| { + window.dispatch_action(open_action.boxed_clone(), cx) + }) + }) .child( Button::new("open_remote_folder", "Open Remote Project") .key_binding(KeyBinding::for_action( @@ -1359,6 +1353,7 @@ impl PickerDelegate for RecentProjectsDelegate { ) .menu({ let focus_handle = focus_handle.clone(); + let create_new_window = self.create_new_window; move |window, cx| { Some(ContextMenu::build(window, cx, { @@ -1367,7 +1362,7 @@ impl PickerDelegate for RecentProjectsDelegate { menu.context(focus_handle) .action( "Open Local Project", - workspace::Open.boxed_clone(), + workspace::Open { create_new_window }.boxed_clone(), ) .action( "Open Remote Project", diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index a94f7b1d57eaef8657fb0d448480f84c97ce7e70..b094ff6c5bc5499e7ed1f3e6c9e0b9331b6bb7c2 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -1656,7 +1656,9 @@ impl RemoteServerProjects { fn delete_ssh_server(&mut self, server: SshServerIndex, cx: &mut Context) { self.update_settings_file(cx, move |setting, _| { - if let Some(connections) = setting.ssh_connections.as_mut() { + if let Some(connections) = setting.ssh_connections.as_mut() + && connections.get(server.0).is_some() + { connections.remove(server.0); } }); diff --git a/crates/remote_server/Cargo.toml b/crates/remote_server/Cargo.toml index ee729a80eaa9eff56eee7f3bcb8fe6eaf31f0c41..36944261cded68b564df8093d5b7a7621a644c11 100644 --- a/crates/remote_server/Cargo.toml +++ b/crates/remote_server/Cargo.toml @@ -89,9 +89,7 @@ action_log.workspace = true agent = { workspace = true, features = ["test-support"] } client = { workspace = true, features = ["test-support"] } clock = { workspace = true, features = ["test-support"] } -dap = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } -workspace = { workspace = true, features = ["test-support"] } fs = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } http_client = { workspace = true, features = ["test-support"] } @@ -103,7 +101,6 @@ remote = { workspace = true, features = ["test-support"] } theme = { workspace = true, features = ["test-support"] } language_model = { workspace = true, features = ["test-support"] } lsp = { workspace = true, features = ["test-support"] } -prompt_store.workspace = true unindent.workspace = true serde_json.workspace = true zlog.workspace = true diff --git a/crates/repl/Cargo.toml b/crates/repl/Cargo.toml index c2d6f745d9272651bd90bcdfdc689263958b8b09..4329b29ada504cf536337c94b14790acea73ea11 100644 --- a/crates/repl/Cargo.toml +++ b/crates/repl/Cargo.toml @@ -62,7 +62,6 @@ zed_actions.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } -env_logger.workspace = true gpui = { workspace = true, features = ["test-support"] } http_client = { workspace = true, features = ["test-support"] } indoc.workspace = true diff --git a/crates/repl/src/components/kernel_options.rs b/crates/repl/src/components/kernel_options.rs index 3b9535767b64dd3e674020035778dffad1601fc6..b6d4f39c0ccb75619a7e4efd6a532202893c8722 100644 --- a/crates/repl/src/components/kernel_options.rs +++ b/crates/repl/src/components/kernel_options.rs @@ -27,6 +27,7 @@ fn build_grouped_entries(store: &ReplStore, worktree_id: WorktreeId) -> Vec Vec { + KernelSpecification::JupyterServer(_) | KernelSpecification::SshRemote(_) => { remote_kernels.push(KernelPickerEntry::Kernel { spec: spec.clone(), is_recommended, }); } + KernelSpecification::WslRemote(_) => { + wsl_kernels.push(KernelPickerEntry::Kernel { + spec: spec.clone(), + is_recommended, + }); + } } } @@ -105,6 +110,12 @@ fn build_grouped_entries(store: &ReplStore, worktree_id: WorktreeId) -> Vec None, + KernelSpecification::WslRemote(_) => Some(spec.path().to_string()), KernelSpecification::PythonEnv(_) | KernelSpecification::JupyterServer(_) - | KernelSpecification::SshRemote(_) - | KernelSpecification::WslRemote(_) => { + | KernelSpecification::SshRemote(_) => { let env_kind = spec.environment_kind_label(); let path = spec.path(); match env_kind { @@ -437,7 +448,9 @@ where TT: Fn(&mut Window, &mut App) -> AnyView + 'static, { fn render(self, window: &mut Window, cx: &mut App) -> impl IntoElement { - let store = ReplStore::global(cx).read(cx); + let store = ReplStore::global(cx); + store.update(cx, |store, cx| store.ensure_kernelspecs(cx)); + let store = store.read(cx); let all_entries = build_grouped_entries(store, self.worktree_id); let selected_kernelspec = store.active_kernelspec(self.worktree_id, None, cx); diff --git a/crates/repl/src/kernels/mod.rs b/crates/repl/src/kernels/mod.rs index 9ec2ddb497f8c265b51dcfce58d0946d331d87d2..0f1ee9dabebe03b3735bfb95ab0e620a914de1e0 100644 --- a/crates/repl/src/kernels/mod.rs +++ b/crates/repl/src/kernels/mod.rs @@ -9,6 +9,7 @@ pub use native_kernel::*; mod remote_kernels; use project::{Project, ProjectPath, Toolchains, WorktreeId}; +use remote::RemoteConnectionOptions; pub use remote_kernels::*; mod ssh_kernel; @@ -238,7 +239,7 @@ impl KernelSpecification { Self::PythonEnv(spec) => spec.name.clone().into(), Self::JupyterServer(spec) => spec.name.clone().into(), Self::SshRemote(spec) => spec.name.clone().into(), - Self::WslRemote(spec) => spec.name.clone().into(), + Self::WslRemote(spec) => spec.kernelspec.display_name.clone().into(), } } @@ -262,7 +263,7 @@ impl KernelSpecification { Self::PythonEnv(spec) => spec.path.to_string_lossy().into_owned(), Self::JupyterServer(spec) => spec.url.to_string(), Self::SshRemote(spec) => spec.path.to_string(), - Self::WslRemote(_) => "WSL".to_string(), + Self::WslRemote(spec) => spec.distro.clone(), }) } @@ -348,7 +349,16 @@ pub fn python_env_kernel_specifications( ) -> impl Future>> + use<> { let python_language = LanguageName::new_static("Python"); let is_remote = project.read(cx).is_remote(); - log::info!("python_env_kernel_specifications: is_remote: {}", is_remote); + let wsl_distro = project + .read(cx) + .remote_connection_options(cx) + .and_then(|opts| { + if let RemoteConnectionOptions::Wsl(wsl) = opts { + Some(wsl.distro_name) + } else { + None + } + }); let toolchains = project.read(cx).available_toolchains( ProjectPath { @@ -383,6 +393,7 @@ pub fn python_env_kernel_specifications( .flatten() .chain(toolchains.toolchains) .map(|toolchain| { + let wsl_distro = wsl_distro.clone(); background_executor.spawn(async move { // For remote projects, we assume python is available assuming toolchain is reported. // We can skip the `ipykernel` check or run it remotely. @@ -390,10 +401,6 @@ pub fn python_env_kernel_specifications( // `new_smol_command` runs locally. We need to run remotely if `is_remote`. if is_remote { - log::info!( - "python_env_kernel_specifications: returning SshRemote for toolchain {}", - toolchain.name - ); let default_kernelspec = JupyterKernelspec { argv: vec![ toolchain.path.to_string(), @@ -409,6 +416,22 @@ pub fn python_env_kernel_specifications( env: None, }; + if let Some(distro) = wsl_distro { + log::debug!( + "python_env_kernel_specifications: returning WslRemote for toolchain {}", + toolchain.name + ); + return Some(KernelSpecification::WslRemote(WslKernelSpecification { + name: toolchain.name.to_string(), + kernelspec: default_kernelspec, + distro, + })); + } + + log::debug!( + "python_env_kernel_specifications: returning SshRemote for toolchain {}", + toolchain.name + ); return Some(KernelSpecification::SshRemote( SshRemoteKernelSpecification { name: format!("Remote {}", toolchain.name), diff --git a/crates/repl/src/kernels/native_kernel.rs b/crates/repl/src/kernels/native_kernel.rs index daefe99fef81b26f9bb9977a70075285fb4b4821..d7ee106cab6f1769b42e6958a69e39bffec44b3a 100644 --- a/crates/repl/src/kernels/native_kernel.rs +++ b/crates/repl/src/kernels/native_kernel.rs @@ -19,7 +19,7 @@ use std::{ path::PathBuf, sync::Arc, }; -use util::command::Command; + use uuid::Uuid; use super::{KernelSession, RunningKernel, start_kernel_tasks}; @@ -41,7 +41,7 @@ impl Eq for LocalKernelSpecification {} impl LocalKernelSpecification { #[must_use] - fn command(&self, connection_path: &PathBuf) -> Result { + fn command(&self, connection_path: &PathBuf) -> Result { let argv = &self.kernelspec.argv; anyhow::ensure!(!argv.is_empty(), "Empty argv in kernelspec {}", self.name); @@ -52,7 +52,7 @@ impl LocalKernelSpecification { self.name ); - let mut cmd = util::command::new_command(&argv[0]); + let mut cmd = util::command::new_std_command(&argv[0]); for arg in &argv[1..] { if arg == "{connection_file}" { @@ -91,7 +91,7 @@ async fn peek_ports(ip: IpAddr) -> Result<[u16; 5]> { } pub struct NativeRunningKernel { - pub process: util::command::Child, + pub process: util::process::Child, connection_path: PathBuf, _process_status_task: Option>, pub working_directory: PathBuf, @@ -104,7 +104,7 @@ pub struct NativeRunningKernel { impl Debug for NativeRunningKernel { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("RunningKernel") - .field("process", &self.process) + .field("process", &*self.process) .finish() } } @@ -146,15 +146,14 @@ impl NativeRunningKernel { fs.atomic_write(connection_path.clone(), content).await?; let mut cmd = kernel_specification.command(&connection_path)?; - - let mut process = cmd - .current_dir(&working_directory) - .stdout(util::command::Stdio::piped()) - .stderr(util::command::Stdio::piped()) - .stdin(util::command::Stdio::piped()) - .kill_on_drop(true) - .spawn() - .context("failed to start the kernel process")?; + cmd.current_dir(&working_directory); + + let mut process = util::process::Child::spawn( + cmd, + std::process::Stdio::piped(), + std::process::Stdio::piped(), + std::process::Stdio::piped(), + )?; let session_id = Uuid::new_v4().to_string(); diff --git a/crates/repl/src/kernels/wsl_kernel.rs b/crates/repl/src/kernels/wsl_kernel.rs index 34340c74feeb76cc4822a6ca5d669693cc448334..d9ac05c5fc8c2cb756898ff449d6714b78cb7997 100644 --- a/crates/repl/src/kernels/wsl_kernel.rs +++ b/crates/repl/src/kernels/wsl_kernel.rs @@ -274,7 +274,23 @@ impl WslRunningKernel { cd_command, set_env_command, arg_string, arg_string, arg_string, arg_string ) } else { - quote_posix_shell_arguments(&kernel_args)? + let args_string = quote_posix_shell_arguments(&resolved_argv)?; + + let cd_command = if let Some(wd) = wsl_working_directory.as_ref() { + let quoted_wd = shlex::try_quote(wd) + .map(|quoted| quoted.into_owned())?; + format!("cd {quoted_wd} && ") + } else { + String::new() + }; + + let env_prefix_inline = if !env_assignments.is_empty() { + format!("env {} ", env_assignments.join(" ")) + } else { + String::new() + }; + + format!("{cd_command}exec {env_prefix_inline}{args_string}") }; cmd.arg("bash") @@ -578,8 +594,20 @@ pub async fn wsl_kernel_specifications( }) }) .collect::>(); + } else if let Err(e) = + serde_json::from_str::(&json_str) + { + log::error!( + "wsl_kernel_specifications parse error: {} \nJSON: {}", + e, + json_str + ); } + } else { + log::error!("wsl_kernel_specifications command failed"); } + } else if let Err(e) = output { + log::error!("wsl_kernel_specifications command execution failed: {}", e); } Vec::new() diff --git a/crates/repl/src/repl.rs b/crates/repl/src/repl.rs index f17cf8dfba5f5e0e950bd5f2967a6b20d2eebb51..8c3d15a2ad2dfdd18976d750c71e2b3cfb0393a4 100644 --- a/crates/repl/src/repl.rs +++ b/crates/repl/src/repl.rs @@ -46,11 +46,9 @@ fn zed_dispatcher(cx: &mut App) -> impl Dispatcher { impl Dispatcher for ZedDispatcher { #[track_caller] fn dispatch(&self, runnable: Runnable) { - use std::sync::{Arc, atomic::AtomicBool}; let location = core::panic::Location::caller(); - let closed = Arc::new(AtomicBool::new(false)); let (wrapper, task) = async_task::Builder::new() - .metadata(RunnableMeta { location, closed }) + .metadata(RunnableMeta { location }) .spawn(|_| async move { runnable.run() }, { let dispatcher = self.dispatcher.clone(); move |r| dispatcher.dispatch(r, Priority::default()) @@ -61,11 +59,9 @@ fn zed_dispatcher(cx: &mut App) -> impl Dispatcher { #[track_caller] fn dispatch_after(&self, duration: Duration, runnable: Runnable) { - use std::sync::{Arc, atomic::AtomicBool}; let location = core::panic::Location::caller(); - let closed = Arc::new(AtomicBool::new(false)); let (wrapper, task) = async_task::Builder::new() - .metadata(RunnableMeta { location, closed }) + .metadata(RunnableMeta { location }) .spawn(|_| async move { runnable.run() }, { let dispatcher = self.dispatcher.clone(); move |r| dispatcher.dispatch_after(duration, r) diff --git a/crates/repl/src/repl_editor.rs b/crates/repl/src/repl_editor.rs index 56b79e20ffca74ab3f9f9c7948a7caeffc4ad4ce..cf1493000edb5881bff412224f7e44dbfbf88b25 100644 --- a/crates/repl/src/repl_editor.rs +++ b/crates/repl/src/repl_editor.rs @@ -191,6 +191,7 @@ pub fn run( if !store.read(cx).is_enabled() { return Ok(()); } + store.update(cx, |store, cx| store.ensure_kernelspecs(cx)); let editor = editor.upgrade().context("editor was dropped")?; let selected_range = editor diff --git a/crates/repl/src/repl_sessions_ui.rs b/crates/repl/src/repl_sessions_ui.rs index 1dc2107adde84d4625ffee489805570cd7e5f791..9781382fc85d5da549a65dce2ca06fef4a3bff15 100644 --- a/crates/repl/src/repl_sessions_ui.rs +++ b/crates/repl/src/repl_sessions_ui.rs @@ -204,7 +204,8 @@ impl Render for ReplSessionsPage { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { let store = ReplStore::global(cx); - let (kernel_specifications, sessions) = store.update(cx, |store, _cx| { + let (kernel_specifications, sessions) = store.update(cx, |store, cx| { + store.ensure_kernelspecs(cx); ( store .pure_jupyter_kernel_specifications() diff --git a/crates/repl/src/repl_store.rs b/crates/repl/src/repl_store.rs index 8da94eaa7fe40e28a1d6336a648d7eae5c6767ae..800bab030143de70f08ce2c020bd3095b6767e16 100644 --- a/crates/repl/src/repl_store.rs +++ b/crates/repl/src/repl_store.rs @@ -8,6 +8,7 @@ use gpui::{App, Context, Entity, EntityId, Global, SharedString, Subscription, T use jupyter_websocket_client::RemoteServer; use language::{Language, LanguageName}; use project::{Fs, Project, ProjectPath, WorktreeId}; +use remote::RemoteConnectionOptions; use settings::{Settings, SettingsStore}; use util::rel_path::RelPath; @@ -26,6 +27,7 @@ pub struct ReplStore { enabled: bool, sessions: HashMap>, kernel_specifications: Vec, + kernelspecs_initialized: bool, selected_kernel_for_worktree: HashMap, kernel_specifications_for_worktree: HashMap>, active_python_toolchain_for_worktree: HashMap, @@ -38,12 +40,6 @@ impl ReplStore { pub(crate) fn init(fs: Arc, cx: &mut App) { let store = cx.new(move |cx| Self::new(fs, cx)); - - #[cfg(not(feature = "test-support"))] - store - .update(cx, |store, cx| store.refresh_kernelspecs(cx)) - .detach_and_log_err(cx); - cx.set_global(GlobalReplStore(store)) } @@ -64,6 +60,7 @@ impl ReplStore { enabled: JupyterSettings::enabled(cx), sessions: HashMap::default(), kernel_specifications: Vec::new(), + kernelspecs_initialized: false, _subscriptions: subscriptions, kernel_specifications_for_worktree: HashMap::default(), selected_kernel_for_worktree: HashMap::default(), @@ -144,6 +141,14 @@ impl ReplStore { cx: &mut Context, ) -> Task> { let is_remote = project.read(cx).is_remote(); + // WSL does require access to global kernel specs, so we only exclude remote worktrees that aren't WSL. + // TODO: a better way to handle WSL vs SSH/remote projects, + let is_wsl_remote = project + .read(cx) + .remote_connection_options(cx) + .map_or(false, |opts| { + matches!(opts, RemoteConnectionOptions::Wsl(_)) + }); let kernel_specifications = python_env_kernel_specifications(project, worktree_id, cx); let active_toolchain = project.read(cx).active_toolchain( ProjectPath { @@ -168,7 +173,7 @@ impl ReplStore { this.active_python_toolchain_for_worktree .insert(worktree_id, path); } - if is_remote { + if is_remote && !is_wsl_remote { this.remote_worktrees.insert(worktree_id); } else { this.remote_worktrees.remove(&worktree_id); @@ -207,10 +212,17 @@ impl ReplStore { } } + pub fn ensure_kernelspecs(&mut self, cx: &mut Context) { + if self.kernelspecs_initialized { + return; + } + self.kernelspecs_initialized = true; + self.refresh_kernelspecs(cx).detach_and_log_err(cx); + } + pub fn refresh_kernelspecs(&mut self, cx: &mut Context) -> Task> { let local_kernel_specifications = local_kernel_specifications(self.fs.clone()); let wsl_kernel_specifications = wsl_kernel_specifications(cx.background_executor().clone()); - let remote_kernel_specifications = self.get_remote_kernel_specifications(cx); let all_specs = cx.background_spawn(async move { diff --git a/crates/reqwest_client/Cargo.toml b/crates/reqwest_client/Cargo.toml index 41fcd1f5d2f8ca1c78b0a2261a7c48566999e0de..105a3e7df81be5e125477968cf8e8751dfbb9e78 100644 --- a/crates/reqwest_client/Cargo.toml +++ b/crates/reqwest_client/Cargo.toml @@ -31,4 +31,3 @@ gpui_util.workspace = true http_client_tls.workspace = true [dev-dependencies] -gpui.workspace = true diff --git a/crates/rich_text/Cargo.toml b/crates/rich_text/Cargo.toml deleted file mode 100644 index 17bd8d2a4b8977b2bf0079b84dc8f27a9999974b..0000000000000000000000000000000000000000 --- a/crates/rich_text/Cargo.toml +++ /dev/null @@ -1,29 +0,0 @@ -[package] -name = "rich_text" -version = "0.1.0" -edition.workspace = true -publish.workspace = true -license = "GPL-3.0-or-later" - -[lints] -workspace = true - -[lib] -path = "src/rich_text.rs" -doctest = false - -[features] -test-support = [ - "gpui/test-support", - "util/test-support", -] - -[dependencies] -futures.workspace = true -gpui.workspace = true -language.workspace = true -linkify.workspace = true -pulldown-cmark.workspace = true -theme.workspace = true -ui.workspace = true -util.workspace = true diff --git a/crates/rich_text/src/rich_text.rs b/crates/rich_text/src/rich_text.rs deleted file mode 100644 index 2af9988f032c5dc9651e1da6e8c3b52c6c668866..0000000000000000000000000000000000000000 --- a/crates/rich_text/src/rich_text.rs +++ /dev/null @@ -1,418 +0,0 @@ -use futures::FutureExt; -use gpui::{ - AnyElement, AnyView, App, ElementId, FontStyle, FontWeight, HighlightStyle, InteractiveText, - IntoElement, SharedString, StrikethroughStyle, StyledText, UnderlineStyle, Window, -}; -use language::{HighlightId, Language, LanguageRegistry}; -use std::{ops::Range, sync::Arc}; -use theme::ActiveTheme; -use ui::LinkPreview; -use util::RangeExt; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum Highlight { - Code, - Id(HighlightId), - InlineCode(bool), - Highlight(HighlightStyle), - Mention, - SelfMention, -} - -impl From for Highlight { - fn from(style: HighlightStyle) -> Self { - Self::Highlight(style) - } -} - -impl From for Highlight { - fn from(style: HighlightId) -> Self { - Self::Id(style) - } -} - -#[derive(Clone, Default)] -pub struct RichText { - pub text: SharedString, - pub highlights: Vec<(Range, Highlight)>, - pub link_ranges: Vec>, - pub link_urls: Arc<[String]>, - - pub custom_ranges: Vec>, - custom_ranges_tooltip_fn: - Option, &mut Window, &mut App) -> Option>>, -} - -/// Allows one to specify extra links to the rendered markdown, which can be used -/// for e.g. mentions. -#[derive(Debug)] -pub struct Mention { - pub range: Range, - pub is_self_mention: bool, -} - -impl RichText { - pub fn new( - block: String, - mentions: &[Mention], - language_registry: &Arc, - ) -> Self { - let mut text = String::new(); - let mut highlights = Vec::new(); - let mut link_ranges = Vec::new(); - let mut link_urls = Vec::new(); - render_markdown_mut( - &block, - mentions, - language_registry, - None, - &mut text, - &mut highlights, - &mut link_ranges, - &mut link_urls, - ); - text.truncate(text.trim_end().len()); - - RichText { - text: SharedString::from(text), - link_urls: link_urls.into(), - link_ranges, - highlights, - custom_ranges: Vec::new(), - custom_ranges_tooltip_fn: None, - } - } - - pub fn set_tooltip_builder_for_custom_ranges( - &mut self, - f: impl Fn(usize, Range, &mut Window, &mut App) -> Option + 'static, - ) { - self.custom_ranges_tooltip_fn = Some(Arc::new(f)); - } - - pub fn element(&self, id: ElementId, window: &mut Window, cx: &mut App) -> AnyElement { - let theme = cx.theme(); - let code_background = theme.colors().surface_background; - - InteractiveText::new( - id, - StyledText::new(self.text.clone()).with_default_highlights( - &window.text_style(), - self.highlights.iter().map(|(range, highlight)| { - ( - range.clone(), - match highlight { - Highlight::Code => HighlightStyle { - background_color: Some(code_background), - ..Default::default() - }, - Highlight::Id(id) => HighlightStyle { - background_color: Some(code_background), - ..id.style(theme.syntax()).unwrap_or_default() - }, - Highlight::InlineCode(link) => { - if *link { - HighlightStyle { - background_color: Some(code_background), - underline: Some(UnderlineStyle { - thickness: 1.0.into(), - ..Default::default() - }), - ..Default::default() - } - } else { - HighlightStyle { - background_color: Some(code_background), - ..Default::default() - } - } - } - Highlight::Highlight(highlight) => *highlight, - Highlight::Mention => HighlightStyle { - font_weight: Some(FontWeight::BOLD), - ..Default::default() - }, - Highlight::SelfMention => HighlightStyle { - font_weight: Some(FontWeight::BOLD), - ..Default::default() - }, - }, - ) - }), - ), - ) - .on_click(self.link_ranges.clone(), { - let link_urls = self.link_urls.clone(); - move |ix, _, cx| { - let url = &link_urls[ix]; - if url.starts_with("http") { - cx.open_url(url); - } - } - }) - .tooltip({ - let link_ranges = self.link_ranges.clone(); - let link_urls = self.link_urls.clone(); - let custom_tooltip_ranges = self.custom_ranges.clone(); - let custom_tooltip_fn = self.custom_ranges_tooltip_fn.clone(); - move |idx, window, cx| { - for (ix, range) in link_ranges.iter().enumerate() { - if range.contains(&idx) { - return Some(LinkPreview::new(&link_urls[ix], cx)); - } - } - for range in &custom_tooltip_ranges { - if range.contains(&idx) - && let Some(f) = &custom_tooltip_fn - { - return f(idx, range.clone(), window, cx); - } - } - None - } - }) - .into_any_element() - } -} - -pub fn render_markdown_mut( - block: &str, - mut mentions: &[Mention], - language_registry: &Arc, - language: Option<&Arc>, - text: &mut String, - highlights: &mut Vec<(Range, Highlight)>, - link_ranges: &mut Vec>, - link_urls: &mut Vec, -) { - use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag, TagEnd}; - - let mut bold_depth = 0; - let mut italic_depth = 0; - let mut strikethrough_depth = 0; - let mut link_url = None; - let mut current_language = None; - let mut list_stack = Vec::new(); - - let mut options = Options::all(); - options.remove(pulldown_cmark::Options::ENABLE_DEFINITION_LIST); - - for (event, source_range) in Parser::new_ext(block, options).into_offset_iter() { - let prev_len = text.len(); - match event { - Event::Text(t) => { - if let Some(language) = ¤t_language { - render_code(text, highlights, t.as_ref(), language); - } else { - while let Some(mention) = mentions.first() { - if !source_range.contains_inclusive(&mention.range) { - break; - } - mentions = &mentions[1..]; - let range = (prev_len + mention.range.start - source_range.start) - ..(prev_len + mention.range.end - source_range.start); - highlights.push(( - range.clone(), - if mention.is_self_mention { - Highlight::SelfMention - } else { - Highlight::Mention - }, - )); - } - - text.push_str(t.as_ref()); - let mut style = HighlightStyle::default(); - if bold_depth > 0 { - style.font_weight = Some(FontWeight::BOLD); - } - if italic_depth > 0 { - style.font_style = Some(FontStyle::Italic); - } - if strikethrough_depth > 0 { - style.strikethrough = Some(StrikethroughStyle { - thickness: 1.0.into(), - ..Default::default() - }); - } - let last_run_len = if let Some(link_url) = link_url.clone() { - link_ranges.push(prev_len..text.len()); - link_urls.push(link_url); - style.underline = Some(UnderlineStyle { - thickness: 1.0.into(), - ..Default::default() - }); - prev_len - } else { - // Manually scan for links - let mut finder = linkify::LinkFinder::new(); - finder.kinds(&[linkify::LinkKind::Url]); - let mut last_link_len = prev_len; - for link in finder.links(&t) { - let start = link.start(); - let end = link.end(); - let range = (prev_len + start)..(prev_len + end); - link_ranges.push(range.clone()); - link_urls.push(link.as_str().to_string()); - - // If there is a style before we match a link, we have to add this to the highlighted ranges - if style != HighlightStyle::default() && last_link_len < link.start() { - highlights.push(( - last_link_len..link.start(), - Highlight::Highlight(style), - )); - } - - highlights.push(( - range, - Highlight::Highlight(HighlightStyle { - underline: Some(UnderlineStyle { - thickness: 1.0.into(), - ..Default::default() - }), - ..style - }), - )); - - last_link_len = end; - } - last_link_len - }; - - if style != HighlightStyle::default() && last_run_len < text.len() { - let mut new_highlight = true; - if let Some((last_range, last_style)) = highlights.last_mut() - && last_range.end == last_run_len - && last_style == &Highlight::Highlight(style) - { - last_range.end = text.len(); - new_highlight = false; - } - if new_highlight { - highlights - .push((last_run_len..text.len(), Highlight::Highlight(style))); - } - } - } - } - Event::Code(t) => { - text.push_str(t.as_ref()); - let is_link = link_url.is_some(); - - if let Some(link_url) = link_url.clone() { - link_ranges.push(prev_len..text.len()); - link_urls.push(link_url); - } - - highlights.push((prev_len..text.len(), Highlight::InlineCode(is_link))) - } - Event::Start(tag) => match tag { - Tag::Paragraph => new_paragraph(text, &mut list_stack), - Tag::Heading { .. } => { - new_paragraph(text, &mut list_stack); - bold_depth += 1; - } - Tag::CodeBlock(kind) => { - new_paragraph(text, &mut list_stack); - current_language = if let CodeBlockKind::Fenced(language) = kind { - language_registry - .language_for_name(language.as_ref()) - .now_or_never() - .and_then(Result::ok) - } else { - language.cloned() - } - } - Tag::Emphasis => italic_depth += 1, - Tag::Strong => bold_depth += 1, - Tag::Strikethrough => strikethrough_depth += 1, - Tag::Link { dest_url, .. } => link_url = Some(dest_url.to_string()), - Tag::List(number) => { - list_stack.push((number, false)); - } - Tag::Item => { - let len = list_stack.len(); - if let Some((list_number, has_content)) = list_stack.last_mut() { - *has_content = false; - if !text.is_empty() && !text.ends_with('\n') { - text.push('\n'); - } - for _ in 0..len - 1 { - text.push_str(" "); - } - if let Some(number) = list_number { - text.push_str(&format!("{}. ", number)); - *number += 1; - *has_content = false; - } else { - text.push_str("- "); - } - } - } - _ => {} - }, - Event::End(tag) => match tag { - TagEnd::Heading(_) => bold_depth -= 1, - TagEnd::CodeBlock => current_language = None, - TagEnd::Emphasis => italic_depth -= 1, - TagEnd::Strong => bold_depth -= 1, - TagEnd::Strikethrough => strikethrough_depth -= 1, - TagEnd::Link => link_url = None, - TagEnd::List(_) => drop(list_stack.pop()), - _ => {} - }, - Event::HardBreak => text.push('\n'), - Event::SoftBreak => text.push('\n'), - _ => {} - } - } -} - -pub fn render_code( - text: &mut String, - highlights: &mut Vec<(Range, Highlight)>, - content: &str, - language: &Arc, -) { - let prev_len = text.len(); - text.push_str(content); - let mut offset = 0; - for (range, highlight_id) in language.highlight_text(&content.into(), 0..content.len()) { - if range.start > offset { - highlights.push((prev_len + offset..prev_len + range.start, Highlight::Code)); - } - highlights.push(( - prev_len + range.start..prev_len + range.end, - Highlight::Id(highlight_id), - )); - offset = range.end; - } - if offset < content.len() { - highlights.push((prev_len + offset..prev_len + content.len(), Highlight::Code)); - } -} - -pub fn new_paragraph(text: &mut String, list_stack: &mut Vec<(Option, bool)>) { - let mut is_subsequent_paragraph_of_list = false; - if let Some((_, has_content)) = list_stack.last_mut() { - if *has_content { - is_subsequent_paragraph_of_list = true; - } else { - *has_content = true; - return; - } - } - - if !text.is_empty() { - if !text.ends_with('\n') { - text.push('\n'); - } - text.push('\n'); - } - for _ in 0..list_stack.len().saturating_sub(1) { - text.push_str(" "); - } - if is_subsequent_paragraph_of_list { - text.push_str(" "); - } -} diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index 5b599bad51c2f571cca11625be0b290e7e748504..04a38168dfa32bcbf96a3ee5062fe6ab4c62521b 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -693,16 +693,21 @@ impl<'a> Cursor<'a> { } pub fn seek_forward(&mut self, end_offset: usize) { - debug_assert!(end_offset >= self.offset); + assert!( + end_offset >= self.offset, + "cannot seek backward from {} to {}", + self.offset, + end_offset + ); self.chunks.seek_forward(&end_offset, Bias::Right); self.offset = end_offset; } pub fn slice(&mut self, end_offset: usize) -> Rope { - debug_assert!( + assert!( end_offset >= self.offset, - "cannot slice backwards from {} to {}", + "cannot slice backward from {} to {}", self.offset, end_offset ); @@ -730,7 +735,12 @@ impl<'a> Cursor<'a> { } pub fn summary(&mut self, end_offset: usize) -> D { - debug_assert!(end_offset >= self.offset); + assert!( + end_offset >= self.offset, + "cannot summarize backward from {} to {}", + self.offset, + end_offset + ); let mut summary = D::zero(()); if let Some(start_chunk) = self.chunks.item() { diff --git a/crates/rules_library/src/rules_library.rs b/crates/rules_library/src/rules_library.rs index a89657e29680ccfd759fe63efcc837d883ef7590..73bf5fdd8fcaaf1437013d300102a9e593823c7b 100644 --- a/crates/rules_library/src/rules_library.rs +++ b/crates/rules_library/src/rules_library.rs @@ -222,7 +222,7 @@ impl PickerDelegate for RulePickerDelegate { cx.notify(); } - fn can_select(&mut self, ix: usize, _: &mut Window, _: &mut Context>) -> bool { + fn can_select(&self, ix: usize, _: &mut Window, _: &mut Context>) -> bool { match self.filtered_entries.get(ix) { Some(RulePickerEntry::Rule(_)) => true, Some(RulePickerEntry::Header(_)) | Some(RulePickerEntry::Separator) | None => false, diff --git a/crates/scheduler/src/executor.rs b/crates/scheduler/src/executor.rs index 76df2e69f66398e3709e1db58a847b1cd0079fc4..602404142a1f4d19bbce841b3b06996cc2a7427b 100644 --- a/crates/scheduler/src/executor.rs +++ b/crates/scheduler/src/executor.rs @@ -6,10 +6,7 @@ use std::{ panic::Location, pin::Pin, rc::Rc, - sync::{ - Arc, - atomic::{AtomicBool, Ordering}, - }, + sync::Arc, task::{Context, Poll}, thread::{self, ThreadId}, time::Duration, @@ -19,7 +16,6 @@ use std::{ pub struct ForegroundExecutor { session_id: SessionId, scheduler: Arc, - closed: Arc, not_send: PhantomData>, } @@ -28,7 +24,6 @@ impl ForegroundExecutor { Self { session_id, scheduler, - closed: Arc::new(AtomicBool::new(false)), not_send: PhantomData, } } @@ -41,16 +36,6 @@ impl ForegroundExecutor { &self.scheduler } - /// Returns the closed flag for this executor. - pub fn closed(&self) -> &Arc { - &self.closed - } - - /// Close this executor. Tasks will not run after this is called. - pub fn close(&self) { - self.closed.store(true, Ordering::SeqCst); - } - #[track_caller] pub fn spawn(&self, future: F) -> Task where @@ -60,13 +45,12 @@ impl ForegroundExecutor { let session_id = self.session_id; let scheduler = Arc::clone(&self.scheduler); let location = Location::caller(); - let closed = self.closed.clone(); let (runnable, task) = spawn_local_with_source_location( future, move |runnable| { scheduler.schedule_foreground(session_id, runnable); }, - RunnableMeta { location, closed }, + RunnableMeta { location }, ); runnable.schedule(); Task(TaskState::Spawned(task)) @@ -129,25 +113,11 @@ impl ForegroundExecutor { #[derive(Clone)] pub struct BackgroundExecutor { scheduler: Arc, - closed: Arc, } impl BackgroundExecutor { pub fn new(scheduler: Arc) -> Self { - Self { - scheduler, - closed: Arc::new(AtomicBool::new(false)), - } - } - - /// Returns the closed flag for this executor. - pub fn closed(&self) -> &Arc { - &self.closed - } - - /// Close this executor. Tasks will not run after this is called. - pub fn close(&self) { - self.closed.store(true, Ordering::SeqCst); + Self { scheduler } } #[track_caller] @@ -167,9 +137,8 @@ impl BackgroundExecutor { { let scheduler = Arc::clone(&self.scheduler); let location = Location::caller(); - let closed = self.closed.clone(); let (runnable, task) = async_task::Builder::new() - .metadata(RunnableMeta { location, closed }) + .metadata(RunnableMeta { location }) .spawn( move |_| future, move |runnable| { @@ -188,20 +157,16 @@ impl BackgroundExecutor { F::Output: Send + 'static, { let location = Location::caller(); - let closed = self.closed.clone(); let (tx, rx) = flume::bounded::>(1); self.scheduler.spawn_realtime(Box::new(move || { while let Ok(runnable) = rx.recv() { - if runnable.metadata().is_closed() { - continue; - } runnable.run(); } })); let (runnable, task) = async_task::Builder::new() - .metadata(RunnableMeta { location, closed }) + .metadata(RunnableMeta { location }) .spawn( move |_| future, move |runnable| { diff --git a/crates/scheduler/src/scheduler.rs b/crates/scheduler/src/scheduler.rs index 5b1fac258d088d3be7a2254bbf68431cdb507c70..05d285df8d9622ac901618f5543d2f219290ee0d 100644 --- a/crates/scheduler/src/scheduler.rs +++ b/crates/scheduler/src/scheduler.rs @@ -14,10 +14,7 @@ use std::{ future::Future, panic::Location, pin::Pin, - sync::{ - Arc, - atomic::{AtomicBool, Ordering}, - }, + sync::Arc, task::{Context, Poll}, time::Duration, }; @@ -62,23 +59,12 @@ impl Priority { pub struct RunnableMeta { /// The source location where the task was spawned. pub location: &'static Location<'static>, - /// Shared flag indicating whether the scheduler has been closed. - /// When true, tasks should be dropped without running. - pub closed: Arc, -} - -impl RunnableMeta { - /// Returns true if the scheduler has been closed and this task should not run. - pub fn is_closed(&self) -> bool { - self.closed.load(Ordering::SeqCst) - } } impl std::fmt::Debug for RunnableMeta { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("RunnableMeta") .field("location", &self.location) - .field("closed", &self.is_closed()) .finish() } } diff --git a/crates/scheduler/src/test_scheduler.rs b/crates/scheduler/src/test_scheduler.rs index e4c330dcd162ad6512da05c9e66449fd7da36083..5a14f9c335bfaaa16cbac2344a2d89dd585225a7 100644 --- a/crates/scheduler/src/test_scheduler.rs +++ b/crates/scheduler/src/test_scheduler.rs @@ -320,10 +320,6 @@ impl TestScheduler { }; if let Some(runnable) = runnable { - // Check if the executor that spawned this task was closed - if runnable.runnable.metadata().is_closed() { - return true; - } let is_foreground = runnable.session_id.is_some(); let was_main_thread = self.state.lock().is_main_thread; self.state.lock().is_main_thread = is_foreground; diff --git a/crates/search/Cargo.toml b/crates/search/Cargo.toml index 9613bd720919d77f2e7c9421ed51a0b18edf7355..dea69a9a02f3761cec2d953285b178d41dd76d56 100644 --- a/crates/search/Cargo.toml +++ b/crates/search/Cargo.toml @@ -7,7 +7,7 @@ license = "GPL-3.0-or-later" [features] test-support = [ - "client/test-support", + "editor/test-support", "gpui/test-support", "workspace/test-support", @@ -47,7 +47,6 @@ ztracing.workspace = true tracing.workspace = true [dev-dependencies] -client = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } language = { workspace = true, features = ["test-support"] } diff --git a/crates/settings_content/src/language_model.rs b/crates/settings_content/src/language_model.rs index 6af419119d819931f3ad826ff416f1b47c89824f..8ced6e0b487a673ff4dba34cae9c1e2c7ee45d13 100644 --- a/crates/settings_content/src/language_model.rs +++ b/crates/settings_content/src/language_model.rs @@ -148,6 +148,7 @@ impl Default for KeepAlive { #[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)] pub struct LmStudioSettingsContent { pub api_url: Option, + pub api_key: Option, pub available_models: Option>, } diff --git a/crates/settings_content/src/project.rs b/crates/settings_content/src/project.rs index 70544646b1878c163bf5c17d2364eeebd98f6908..85a39f389efc621e902154431278c2050c81a210 100644 --- a/crates/settings_content/src/project.rs +++ b/crates/settings_content/src/project.rs @@ -1,5 +1,9 @@ -use std::{path::PathBuf, sync::Arc}; +use std::{ + path::{Path, PathBuf}, + sync::Arc, +}; +use anyhow::Context; use collections::{BTreeMap, HashMap}; use gpui::Rgba; use schemars::JsonSchema; @@ -233,6 +237,26 @@ pub struct SemanticTokenRules { pub rules: Vec, } +impl SemanticTokenRules { + pub const FILE_NAME: &'static str = "semantic_token_rules.json"; + + pub fn load(file_path: &Path) -> anyhow::Result { + let rules_content = std::fs::read(file_path).with_context(|| { + anyhow::anyhow!( + "Could not read semantic token rules from {}", + file_path.display() + ) + })?; + + serde_json_lenient::from_slice::(&rules_content).with_context(|| { + anyhow::anyhow!( + "Failed to parse semantic token rules from {}", + file_path.display() + ) + }) + } +} + impl crate::merge_from::MergeFrom for SemanticTokenRules { fn merge_from(&mut self, other: &Self) { self.rules.splice(0..0, other.rules.iter().cloned()); diff --git a/crates/settings_profile_selector/Cargo.toml b/crates/settings_profile_selector/Cargo.toml index 23ccac2e43dec6c1ab335eeb2ffb4d9159d85859..9fcce14b0434386068a9c94f47c9ed675210abbb 100644 --- a/crates/settings_profile_selector/Cargo.toml +++ b/crates/settings_profile_selector/Cargo.toml @@ -22,10 +22,8 @@ workspace.workspace = true zed_actions.workspace = true [dev-dependencies] -client = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } -language = { workspace = true, features = ["test-support"] } menu.workspace = true project = { workspace = true, features = ["test-support"] } serde_json.workspace = true diff --git a/crates/settings_ui/Cargo.toml b/crates/settings_ui/Cargo.toml index 399534b968dfba941d17e2f6ce76261ca4e71859..66fefed910cc85e22e731fe9470d2ee511364336 100644 --- a/crates/settings_ui/Cargo.toml +++ b/crates/settings_ui/Cargo.toml @@ -59,20 +59,13 @@ workspace.workspace = true zed_actions.workspace = true [dev-dependencies] -assets.workspace = true -client.workspace = true fs = { workspace = true, features = ["test-support"] } futures.workspace = true gpui = { workspace = true, features = ["test-support"] } -language.workspace = true -node_runtime.workspace = true paths.workspace = true pretty_assertions.workspace = true project = { workspace = true, features = ["test-support"] } -recent_projects = { workspace = true, features = ["test-support"] } serde_json.workspace = true -session.workspace = true settings = { workspace = true, features = ["test-support"] } title_bar = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } -zlog.workspace = true diff --git a/crates/sidebar/Cargo.toml b/crates/sidebar/Cargo.toml index 6165a41c68894df9ad60110663562df713a24470..36a8d1cf085e544d38d903fe63f514539287dcc5 100644 --- a/crates/sidebar/Cargo.toml +++ b/crates/sidebar/Cargo.toml @@ -13,30 +13,38 @@ path = "src/sidebar.rs" [features] default = [] -test-support = [] [dependencies] acp_thread.workspace = true +agent.workspace = true +agent-client-protocol.workspace = true agent_ui.workspace = true chrono.workspace = true +editor.workspace = true +feature_flags.workspace = true fs.workspace = true -fuzzy.workspace = true gpui.workspace = true -picker.workspace = true +menu.workspace = true project.workspace = true recent_projects.workspace = true +settings.workspace = true theme.workspace = true ui.workspace = true -ui_input.workspace = true util.workspace = true workspace.workspace = true +zed_actions.workspace = true [dev-dependencies] +acp_thread = { workspace = true, features = ["test-support"] } +agent = { workspace = true, features = ["test-support"] } +agent_ui = { workspace = true, features = ["test-support"] } +assistant_text_thread = { workspace = true, features = ["test-support"] } editor.workspace = true +language_model = { workspace = true, features = ["test-support"] } +serde_json.workspace = true feature_flags.workspace = true fs = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } project = { workspace = true, features = ["test-support"] } -recent_projects = { workspace = true, features = ["test-support"] } settings = { workspace = true, features = ["test-support"] } -workspace = { workspace = true, features = ["test-support"] } +workspace = { workspace = true, features = ["test-support"] } \ No newline at end of file diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 24974512cda12276b5fcdc51ebd71d091782dff6..4dbc2f811a62c266bc34708cd3b8bd1377938d4d 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -1,949 +1,1351 @@ use acp_thread::ThreadStatus; -use agent_ui::{AgentPanel, AgentPanelEvent}; -use chrono::{Datelike, Local, NaiveDate, TimeDelta}; - -use fs::Fs; -use fuzzy::StringMatchCandidate; +use agent::ThreadStore; +use agent_client_protocol as acp; +use agent_ui::{AgentPanel, AgentPanelEvent, NewThread}; +use chrono::Utc; +use editor::{Editor, EditorElement, EditorStyle}; +use feature_flags::{AgentV2FeatureFlag, FeatureFlagViewExt as _}; use gpui::{ - App, Context, Entity, EventEmitter, FocusHandle, Focusable, Pixels, Render, SharedString, - Subscription, Task, Window, px, + AnyElement, App, Context, Entity, EventEmitter, FocusHandle, Focusable, FontStyle, ListState, + Pixels, Render, SharedString, TextStyle, WeakEntity, Window, actions, list, prelude::*, px, + relative, rems, }; -use picker::{Picker, PickerDelegate}; +use menu::{Cancel, Confirm, SelectFirst, SelectLast, SelectNext, SelectPrevious}; use project::Event as ProjectEvent; -use recent_projects::{RecentProjectEntry, get_recent_projects}; -use std::fmt::Display; - +use recent_projects::RecentProjects; +use settings::Settings; use std::collections::{HashMap, HashSet}; - -use std::path::{Path, PathBuf}; -use std::sync::Arc; -use theme::ActiveTheme; +use std::mem; +use theme::{ActiveTheme, ThemeSettings}; use ui::utils::TRAFFIC_LIGHT_PADDING; use ui::{ - AgentThreadStatus, Divider, DividerColor, KeyBinding, ListSubHeader, Tab, ThreadItem, Tooltip, + AgentThreadStatus, ButtonStyle, GradientFade, HighlightedLabel, IconButtonShape, KeyBinding, + ListItem, PopoverMenu, PopoverMenuHandle, Tab, ThreadItem, TintColor, Tooltip, WithScrollbar, prelude::*, }; -use ui_input::ErasedEditor; -use util::ResultExt as _; +use util::path_list::PathList; use workspace::{ - FocusWorkspaceSidebar, MultiWorkspace, NewWorkspaceInWindow, Sidebar as WorkspaceSidebar, + FocusWorkspaceSidebar, MultiWorkspace, MultiWorkspaceEvent, Sidebar as WorkspaceSidebar, SidebarEvent, ToggleWorkspaceSidebar, Workspace, }; +use zed_actions::OpenRecent; +use zed_actions::editor::{MoveDown, MoveUp}; + +actions!( + agents_sidebar, + [ + /// Collapses the selected entry in the workspace sidebar. + CollapseSelectedEntry, + /// Expands the selected entry in the workspace sidebar. + ExpandSelectedEntry, + ] +); + +const DEFAULT_WIDTH: Pixels = px(320.0); +const MIN_WIDTH: Pixels = px(200.0); +const MAX_WIDTH: Pixels = px(800.0); +const DEFAULT_THREADS_SHOWN: usize = 5; #[derive(Clone, Debug)] -struct AgentThreadInfo { +struct ActiveThreadInfo { + session_id: acp::SessionId, title: SharedString, status: AgentThreadStatus, icon: IconName, + icon_from_external_svg: Option, + is_background: bool, } -const DEFAULT_WIDTH: Pixels = px(320.0); -const MIN_WIDTH: Pixels = px(200.0); -const MAX_WIDTH: Pixels = px(800.0); -const MAX_MATCHES: usize = 100; +impl From<&ActiveThreadInfo> for acp_thread::AgentSessionInfo { + fn from(info: &ActiveThreadInfo) -> Self { + Self { + session_id: info.session_id.clone(), + cwd: None, + title: Some(info.title.clone()), + updated_at: Some(Utc::now()), + meta: None, + } + } +} #[derive(Clone)] -struct WorkspaceThreadEntry { - index: usize, - worktree_label: SharedString, - full_path: SharedString, - thread_info: Option, +struct ThreadEntry { + session_info: acp_thread::AgentSessionInfo, + icon: IconName, + icon_from_external_svg: Option, + status: AgentThreadStatus, + workspace: Entity, + is_live: bool, + is_background: bool, + highlight_positions: Vec, } -impl WorkspaceThreadEntry { - fn new(index: usize, workspace: &Entity, cx: &App) -> Self { - let workspace_ref = workspace.read(cx); - - let worktrees: Vec<_> = workspace_ref - .worktrees(cx) - .filter(|worktree| worktree.read(cx).is_visible()) - .map(|worktree| worktree.read(cx).abs_path()) - .collect(); - - let worktree_names: Vec = worktrees - .iter() - .filter_map(|path| { - path.file_name() - .map(|name| name.to_string_lossy().to_string()) - }) - .collect(); - - let worktree_label: SharedString = if worktree_names.is_empty() { - format!("Workspace {}", index + 1).into() - } else { - worktree_names.join(", ").into() - }; - - let full_path: SharedString = worktrees - .iter() - .map(|path| path.to_string_lossy().to_string()) - .collect::>() - .join("\n") - .into(); - - let thread_info = Self::thread_info(workspace, cx); +#[derive(Clone)] +enum ListEntry { + ProjectHeader { + path_list: PathList, + label: SharedString, + workspace: Entity, + highlight_positions: Vec, + has_threads: bool, + }, + Thread(ThreadEntry), + ViewMore { + path_list: PathList, + remaining_count: usize, + is_fully_expanded: bool, + }, + NewThread { + path_list: PathList, + workspace: Entity, + }, +} - Self { - index, - worktree_label, - full_path, - thread_info, - } +impl From for ListEntry { + fn from(thread: ThreadEntry) -> Self { + ListEntry::Thread(thread) } +} - fn thread_info(workspace: &Entity, cx: &App) -> Option { - let agent_panel = workspace.read(cx).panel::(cx)?; - let agent_panel_ref = agent_panel.read(cx); +#[derive(Default)] +struct SidebarContents { + entries: Vec, + notified_threads: HashSet, +} - let thread_view = agent_panel_ref.as_active_thread_view(cx)?.read(cx); - let thread = thread_view.thread.read(cx); +impl SidebarContents { + fn is_thread_notified(&self, session_id: &acp::SessionId) -> bool { + self.notified_threads.contains(session_id) + } +} - let icon = thread_view.agent_icon; - let title = thread.title(); +fn fuzzy_match_positions(query: &str, candidate: &str) -> Option> { + let mut positions = Vec::new(); + let mut query_chars = query.chars().peekable(); - let status = if thread.is_waiting_for_confirmation() { - AgentThreadStatus::WaitingForConfirmation - } else if thread.had_error() { - AgentThreadStatus::Error - } else { - match thread.status() { - ThreadStatus::Generating => AgentThreadStatus::Running, - ThreadStatus::Idle => AgentThreadStatus::Completed, + for (byte_idx, candidate_char) in candidate.char_indices() { + if let Some(&query_char) = query_chars.peek() { + if candidate_char.eq_ignore_ascii_case(&query_char) { + positions.push(byte_idx); + query_chars.next(); } - }; - Some(AgentThreadInfo { - title, - status, - icon, - }) + } else { + break; + } } -} -#[derive(Clone)] -enum SidebarEntry { - Separator(SharedString), - WorkspaceThread(WorkspaceThreadEntry), - RecentProject(RecentProjectEntry), + if query_chars.peek().is_none() { + Some(positions) + } else { + None + } } -impl SidebarEntry { - fn searchable_text(&self) -> &str { - match self { - SidebarEntry::Separator(_) => "", - SidebarEntry::WorkspaceThread(entry) => entry.worktree_label.as_ref(), - SidebarEntry::RecentProject(entry) => entry.name.as_ref(), +fn workspace_path_list_and_label( + workspace: &Entity, + cx: &App, +) -> (PathList, SharedString) { + let workspace_ref = workspace.read(cx); + let mut paths = Vec::new(); + let mut names = Vec::new(); + + for worktree in workspace_ref.worktrees(cx) { + let worktree_ref = worktree.read(cx); + if !worktree_ref.is_visible() { + continue; + } + let abs_path = worktree_ref.abs_path(); + paths.push(abs_path.to_path_buf()); + if let Some(name) = abs_path.file_name() { + names.push(name.to_string_lossy().to_string()); } } -} -#[derive(Clone)] -struct SidebarMatch { - entry: SidebarEntry, - positions: Vec, + let label: SharedString = if names.is_empty() { + // TODO: Can we do something better in this case? + "Empty Workspace".into() + } else { + names.join(", ").into() + }; + + (PathList::new(&paths), label) } -struct WorkspacePickerDelegate { - multi_workspace: Entity, - entries: Vec, - active_workspace_index: usize, - workspace_thread_count: usize, - /// All recent projects including what's filtered out of entries - /// used to add unopened projects to entries on rebuild - recent_projects: Vec, - recent_project_thread_titles: HashMap, - matches: Vec, - selected_index: usize, - query: String, - hovered_thread_item: Option, - notified_workspaces: HashSet, +pub struct Sidebar { + multi_workspace: WeakEntity, + width: Pixels, + focus_handle: FocusHandle, + filter_editor: Entity, + list_state: ListState, + contents: SidebarContents, + /// The index of the list item that currently has the keyboard focus + /// + /// Note: This is NOT the same as the active item. + selection: Option, + focused_thread: Option, + active_entry_index: Option, + collapsed_groups: HashSet, + expanded_groups: HashMap, + recent_projects_popover_handle: PopoverMenuHandle, } -impl WorkspacePickerDelegate { - fn new(multi_workspace: Entity) -> Self { +impl EventEmitter for Sidebar {} + +impl Sidebar { + pub fn new( + multi_workspace: Entity, + window: &mut Window, + cx: &mut Context, + ) -> Self { + let focus_handle = cx.focus_handle(); + cx.on_focus_in(&focus_handle, window, Self::focus_in) + .detach(); + + let filter_editor = cx.new(|cx| { + let mut editor = Editor::single_line(window, cx); + editor.set_placeholder_text("Search…", window, cx); + editor + }); + + cx.subscribe_in( + &multi_workspace, + window, + |this, _multi_workspace, event: &MultiWorkspaceEvent, window, cx| match event { + MultiWorkspaceEvent::ActiveWorkspaceChanged => { + this.focused_thread = None; + this.update_entries(cx); + } + MultiWorkspaceEvent::WorkspaceAdded(workspace) => { + this.subscribe_to_workspace(workspace, window, cx); + this.update_entries(cx); + } + MultiWorkspaceEvent::WorkspaceRemoved(_) => { + this.update_entries(cx); + } + }, + ) + .detach(); + + cx.subscribe(&filter_editor, |this: &mut Self, _, event, cx| { + if let editor::EditorEvent::BufferEdited = event { + let query = this.filter_editor.read(cx).text(cx); + if !query.is_empty() { + this.selection.take(); + } + this.update_entries(cx); + if !query.is_empty() { + this.selection = this + .contents + .entries + .iter() + .position(|entry| matches!(entry, ListEntry::Thread(_))) + .or_else(|| { + if this.contents.entries.is_empty() { + None + } else { + Some(0) + } + }); + } + } + }) + .detach(); + + let thread_store = ThreadStore::global(cx); + cx.observe_in(&thread_store, window, |this, _, _window, cx| { + this.update_entries(cx); + }) + .detach(); + + cx.observe_flag::(window, |_is_enabled, this, _window, cx| { + this.update_entries(cx); + }) + .detach(); + + let workspaces = multi_workspace.read(cx).workspaces().to_vec(); + cx.defer_in(window, move |this, window, cx| { + for workspace in &workspaces { + this.subscribe_to_workspace(workspace, window, cx); + } + this.update_entries(cx); + }); + Self { - multi_workspace, - entries: Vec::new(), - active_workspace_index: 0, - workspace_thread_count: 0, - recent_projects: Vec::new(), - recent_project_thread_titles: HashMap::new(), - matches: Vec::new(), - selected_index: 0, - query: String::new(), - hovered_thread_item: None, - notified_workspaces: HashSet::new(), + multi_workspace: multi_workspace.downgrade(), + width: DEFAULT_WIDTH, + focus_handle, + filter_editor, + list_state: ListState::new(0, gpui::ListAlignment::Top, px(1000.)), + contents: SidebarContents::default(), + selection: None, + focused_thread: None, + active_entry_index: None, + collapsed_groups: HashSet::new(), + expanded_groups: HashMap::new(), + recent_projects_popover_handle: PopoverMenuHandle::default(), } } - fn set_entries( - &mut self, - workspace_threads: Vec, - active_workspace_index: usize, - cx: &App, + fn subscribe_to_workspace( + &self, + workspace: &Entity, + window: &mut Window, + cx: &mut Context, ) { - if let Some(hovered_index) = self.hovered_thread_item { - let still_exists = workspace_threads - .iter() - .any(|thread| thread.index == hovered_index); - if !still_exists { - self.hovered_thread_item = None; - } - } - - let old_statuses: HashMap = self - .entries - .iter() - .filter_map(|entry| match entry { - SidebarEntry::WorkspaceThread(thread) => thread - .thread_info - .as_ref() - .map(|info| (thread.index, info.status)), - _ => None, - }) - .collect(); + let project = workspace.read(cx).project().clone(); + cx.subscribe_in( + &project, + window, + |this, _project, event, _window, cx| match event { + ProjectEvent::WorktreeAdded(_) + | ProjectEvent::WorktreeRemoved(_) + | ProjectEvent::WorktreeOrderChanged => { + this.update_entries(cx); + } + _ => {} + }, + ) + .detach(); - for thread in &workspace_threads { - if let Some(info) = &thread.thread_info { - if info.status == AgentThreadStatus::Completed - && thread.index != active_workspace_index - { - if old_statuses.get(&thread.index) == Some(&AgentThreadStatus::Running) { - self.notified_workspaces.insert(thread.index); + cx.subscribe_in( + workspace, + window, + |this, _workspace, event: &workspace::Event, window, cx| { + if let workspace::Event::PanelAdded(view) = event { + if let Ok(agent_panel) = view.clone().downcast::() { + this.subscribe_to_agent_panel(&agent_panel, window, cx); } } - } - } + }, + ) + .detach(); - if self.active_workspace_index != active_workspace_index { - self.notified_workspaces.remove(&active_workspace_index); + if let Some(agent_panel) = workspace.read(cx).panel::(cx) { + self.subscribe_to_agent_panel(&agent_panel, window, cx); } - self.active_workspace_index = active_workspace_index; - self.workspace_thread_count = workspace_threads.len(); - self.rebuild_entries(workspace_threads, cx); } - fn set_recent_projects(&mut self, recent_projects: Vec, cx: &App) { - self.recent_project_thread_titles.clear(); + fn subscribe_to_agent_panel( + &self, + agent_panel: &Entity, + window: &mut Window, + cx: &mut Context, + ) { + cx.subscribe_in( + agent_panel, + window, + |this, agent_panel, event: &AgentPanelEvent, _window, cx| match event { + AgentPanelEvent::ActiveViewChanged => { + match agent_panel.read(cx).active_connection_view() { + Some(thread) => { + if let Some(session_id) = thread.read(cx).parent_id(cx) { + this.focused_thread = Some(session_id); + } + } + None => { + this.focused_thread = None; + } + } + this.update_entries(cx); + } + AgentPanelEvent::ThreadFocused => { + let new_focused = agent_panel + .read(cx) + .active_connection_view() + .and_then(|thread| thread.read(cx).parent_id(cx)); + if new_focused.is_some() && new_focused != this.focused_thread { + this.focused_thread = new_focused; + this.update_entries(cx); + } + } + AgentPanelEvent::BackgroundThreadChanged => { + this.update_entries(cx); + } + }, + ) + .detach(); + } - self.recent_projects = recent_projects; + fn all_thread_infos_for_workspace( + workspace: &Entity, + cx: &App, + ) -> Vec { + let Some(agent_panel) = workspace.read(cx).panel::(cx) else { + return Vec::new(); + }; + let agent_panel_ref = agent_panel.read(cx); - let workspace_threads: Vec = self - .entries - .iter() - .filter_map(|entry| match entry { - SidebarEntry::WorkspaceThread(thread) => Some(thread.clone()), - _ => None, - }) - .collect(); - self.rebuild_entries(workspace_threads, cx); - } + agent_panel_ref + .parent_threads(cx) + .into_iter() + .map(|thread_view| { + let thread_view_ref = thread_view.read(cx); + let thread = thread_view_ref.thread.read(cx); + + let icon = thread_view_ref.agent_icon; + let icon_from_external_svg = thread_view_ref.agent_icon_from_external_svg.clone(); + let title = thread.title(); + let session_id = thread.session_id().clone(); + let is_background = agent_panel_ref.is_background_thread(&session_id); + + let status = if thread.is_waiting_for_confirmation() { + AgentThreadStatus::WaitingForConfirmation + } else if thread.had_error() { + AgentThreadStatus::Error + } else { + match thread.status() { + ThreadStatus::Generating => AgentThreadStatus::Running, + ThreadStatus::Idle => AgentThreadStatus::Completed, + } + }; - fn open_workspace_path_sets(&self, cx: &App) -> Vec>> { - self.multi_workspace - .read(cx) - .workspaces() - .iter() - .map(|workspace| { - let mut paths = workspace.read(cx).root_paths(cx); - paths.sort(); - paths + ActiveThreadInfo { + session_id, + title, + status, + icon, + icon_from_external_svg, + is_background, + } }) .collect() } - fn rebuild_entries(&mut self, workspace_threads: Vec, cx: &App) { - let open_path_sets = self.open_workspace_path_sets(cx); + fn rebuild_contents(&mut self, cx: &App) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return; + }; + let mw = multi_workspace.read(cx); + let workspaces = mw.workspaces().to_vec(); + let active_workspace = mw.workspaces().get(mw.active_workspace_index()).cloned(); - self.entries.clear(); + let thread_store = ThreadStore::try_global(cx); + let query = self.filter_editor.read(cx).text(cx); - if !workspace_threads.is_empty() { - self.entries - .push(SidebarEntry::Separator("Active Workspaces".into())); - for thread in workspace_threads { - self.entries.push(SidebarEntry::WorkspaceThread(thread)); - } - } + let previous = mem::take(&mut self.contents); - let recent: Vec<_> = self - .recent_projects + let old_statuses: HashMap = previous + .entries .iter() - .filter(|project| { - let mut project_paths: Vec<&Path> = - project.paths.iter().map(|p| p.as_path()).collect(); - project_paths.sort(); - !open_path_sets.iter().any(|open_paths| { - open_paths.len() == project_paths.len() - && open_paths - .iter() - .zip(&project_paths) - .all(|(a, b)| a.as_ref() == *b) - }) + .filter_map(|entry| match entry { + ListEntry::Thread(thread) if thread.is_live => { + Some((thread.session_info.session_id.clone(), thread.status)) + } + _ => None, }) - .cloned() .collect(); - if !recent.is_empty() { - let today = Local::now().naive_local().date(); - let mut current_bucket: Option = None; + let mut entries = Vec::new(); + let mut notified_threads = previous.notified_threads; + // Track all session IDs we add to entries so we can prune stale + // notifications without a separate pass at the end. + let mut current_session_ids: HashSet = HashSet::new(); + // Compute active_entry_index inline during the build pass. + let mut active_entry_index: Option = None; + + for workspace in workspaces.iter() { + let (path_list, label) = workspace_path_list_and_label(workspace, cx); + + let is_collapsed = self.collapsed_groups.contains(&path_list); + let should_load_threads = !is_collapsed || !query.is_empty(); + + let mut threads: Vec = Vec::new(); + + if should_load_threads { + if let Some(ref thread_store) = thread_store { + for meta in thread_store.read(cx).threads_for_paths(&path_list) { + threads.push(ThreadEntry { + session_info: meta.into(), + icon: IconName::ZedAgent, + icon_from_external_svg: None, + status: AgentThreadStatus::default(), + workspace: workspace.clone(), + is_live: false, + is_background: false, + highlight_positions: Vec::new(), + }); + } + } + + let live_infos = Self::all_thread_infos_for_workspace(workspace, cx); + + if !live_infos.is_empty() { + let thread_index_by_session: HashMap = threads + .iter() + .enumerate() + .map(|(i, t)| (t.session_info.session_id.clone(), i)) + .collect(); + + for info in &live_infos { + let Some(&idx) = thread_index_by_session.get(&info.session_id) else { + continue; + }; + + let thread = &mut threads[idx]; + thread.session_info.title = Some(info.title.clone()); + thread.status = info.status; + thread.icon = info.icon; + thread.icon_from_external_svg = info.icon_from_external_svg.clone(); + thread.is_live = true; + thread.is_background = info.is_background; + } + } - for project in recent { - let entry_date = project.timestamp.with_timezone(&Local).naive_local().date(); - let bucket = TimeBucket::from_dates(today, entry_date); + // Update notification state for live threads in the same pass. + let is_active_workspace = active_workspace + .as_ref() + .is_some_and(|active| active == workspace); + + for thread in &threads { + let session_id = &thread.session_info.session_id; + if thread.is_background && thread.status == AgentThreadStatus::Completed { + notified_threads.insert(session_id.clone()); + } else if thread.status == AgentThreadStatus::Completed + && !is_active_workspace + && old_statuses.get(session_id) == Some(&AgentThreadStatus::Running) + { + notified_threads.insert(session_id.clone()); + } - if current_bucket != Some(bucket) { - current_bucket = Some(bucket); - self.entries - .push(SidebarEntry::Separator(bucket.to_string().into())); + if is_active_workspace && !thread.is_background { + notified_threads.remove(session_id); + } } - self.entries.push(SidebarEntry::RecentProject(project)); + threads.sort_by(|a, b| b.session_info.updated_at.cmp(&a.session_info.updated_at)); } - } - } -} -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -enum TimeBucket { - Today, - Yesterday, - ThisWeek, - PastWeek, - All, -} + if !query.is_empty() { + let has_threads = !threads.is_empty(); + + let workspace_highlight_positions = + fuzzy_match_positions(&query, &label).unwrap_or_default(); + let workspace_matched = !workspace_highlight_positions.is_empty(); + + let mut matched_threads: Vec = Vec::new(); + for mut thread in threads { + let title = thread + .session_info + .title + .as_ref() + .map(|s| s.as_ref()) + .unwrap_or(""); + if let Some(positions) = fuzzy_match_positions(&query, title) { + thread.highlight_positions = positions; + } + if workspace_matched || !thread.highlight_positions.is_empty() { + matched_threads.push(thread); + } + } -impl TimeBucket { - fn from_dates(reference: NaiveDate, date: NaiveDate) -> Self { - if date == reference { - return TimeBucket::Today; - } + if matched_threads.is_empty() && !workspace_matched { + continue; + } - if date == reference - TimeDelta::days(1) { - return TimeBucket::Yesterday; - } + if active_entry_index.is_none() + && self.focused_thread.is_none() + && active_workspace + .as_ref() + .is_some_and(|active| active == workspace) + { + active_entry_index = Some(entries.len()); + } - let week = date.iso_week(); + entries.push(ListEntry::ProjectHeader { + path_list: path_list.clone(), + label, + workspace: workspace.clone(), + highlight_positions: workspace_highlight_positions, + has_threads, + }); - if reference.iso_week() == week { - return TimeBucket::ThisWeek; - } + // Track session IDs and compute active_entry_index as we add + // thread entries. + for thread in matched_threads { + current_session_ids.insert(thread.session_info.session_id.clone()); + if active_entry_index.is_none() { + if let Some(focused) = &self.focused_thread { + if &thread.session_info.session_id == focused { + active_entry_index = Some(entries.len()); + } + } + } + entries.push(thread.into()); + } + } else { + let has_threads = !threads.is_empty(); + + // Check if this header is the active entry before pushing it. + if active_entry_index.is_none() + && self.focused_thread.is_none() + && active_workspace + .as_ref() + .is_some_and(|active| active == workspace) + { + active_entry_index = Some(entries.len()); + } + + entries.push(ListEntry::ProjectHeader { + path_list: path_list.clone(), + label, + workspace: workspace.clone(), + highlight_positions: Vec::new(), + has_threads, + }); + + if is_collapsed { + continue; + } + + let total = threads.len(); + + let extra_batches = self.expanded_groups.get(&path_list).copied().unwrap_or(0); + let threads_to_show = + DEFAULT_THREADS_SHOWN + (extra_batches * DEFAULT_THREADS_SHOWN); + let count = threads_to_show.min(total); + let is_fully_expanded = count >= total; + + // Track session IDs and compute active_entry_index as we add + // thread entries. + for thread in threads.into_iter().take(count) { + current_session_ids.insert(thread.session_info.session_id.clone()); + if active_entry_index.is_none() { + if let Some(focused) = &self.focused_thread { + if &thread.session_info.session_id == focused { + active_entry_index = Some(entries.len()); + } + } + } + entries.push(thread.into()); + } - let last_week = (reference - TimeDelta::days(7)).iso_week(); + if total > DEFAULT_THREADS_SHOWN { + entries.push(ListEntry::ViewMore { + path_list: path_list.clone(), + remaining_count: total.saturating_sub(count), + is_fully_expanded, + }); + } - if week == last_week { - return TimeBucket::PastWeek; + if total == 0 { + entries.push(ListEntry::NewThread { + path_list: path_list.clone(), + workspace: workspace.clone(), + }); + } + } } - TimeBucket::All + // Prune stale notifications using the session IDs we collected during + // the build pass (no extra scan needed). + notified_threads.retain(|id| current_session_ids.contains(id)); + + self.active_entry_index = active_entry_index; + self.contents = SidebarContents { + entries, + notified_threads, + }; } -} -impl Display for TimeBucket { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - TimeBucket::Today => write!(f, "Today"), - TimeBucket::Yesterday => write!(f, "Yesterday"), - TimeBucket::ThisWeek => write!(f, "This Week"), - TimeBucket::PastWeek => write!(f, "Past Week"), - TimeBucket::All => write!(f, "All"), + fn update_entries(&mut self, cx: &mut Context) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return; + }; + if !multi_workspace.read(cx).multi_workspace_enabled(cx) { + return; } - } -} -fn open_recent_project(paths: Vec, window: &mut Window, cx: &mut App) { - let Some(handle) = window.window_handle().downcast::() else { - return; - }; + let had_notifications = self.has_notifications(cx); - cx.defer(move |cx| { - if let Some(task) = handle - .update(cx, |multi_workspace, window, cx| { - multi_workspace.open_project(paths, window, cx) - }) - .log_err() - { - task.detach_and_log_err(cx); - } - }); -} + let scroll_position = self.list_state.logical_scroll_top(); -impl PickerDelegate for WorkspacePickerDelegate { - type ListItem = AnyElement; + self.rebuild_contents(cx); - fn match_count(&self) -> usize { - self.matches.len() - } + self.list_state.reset(self.contents.entries.len()); + self.list_state.scroll_to(scroll_position); - fn selected_index(&self) -> usize { - self.selected_index - } + if had_notifications != self.has_notifications(cx) { + multi_workspace.update(cx, |_, cx| { + cx.notify(); + }); + } - fn set_selected_index( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) { - self.selected_index = ix; + cx.notify(); } - fn can_select( + fn render_list_entry( &mut self, ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) -> bool { - match self.matches.get(ix) { - Some(SidebarMatch { - entry: SidebarEntry::Separator(_), - .. - }) => false, - _ => true, - } - } + window: &mut Window, + cx: &mut Context, + ) -> AnyElement { + let Some(entry) = self.contents.entries.get(ix) else { + return div().into_any_element(); + }; + let is_focused = self.focus_handle.is_focused(window) + || self.filter_editor.focus_handle(cx).is_focused(window); + // is_selected means the keyboard selector is here. + let is_selected = is_focused && self.selection == Some(ix); + + let is_group_header_after_first = + ix > 0 && matches!(entry, ListEntry::ProjectHeader { .. }); + + let rendered = match entry { + ListEntry::ProjectHeader { + path_list, + label, + workspace, + highlight_positions, + has_threads, + } => self.render_project_header( + ix, + path_list, + label, + workspace, + highlight_positions, + *has_threads, + is_selected, + cx, + ), + ListEntry::Thread(thread) => self.render_thread(ix, thread, is_selected, cx), + ListEntry::ViewMore { + path_list, + remaining_count, + is_fully_expanded, + } => self.render_view_more( + ix, + path_list, + *remaining_count, + *is_fully_expanded, + is_selected, + cx, + ), + ListEntry::NewThread { + path_list, + workspace, + } => self.render_new_thread(ix, path_list, workspace, is_selected, cx), + }; - fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { - "Search…".into() - } + // add the blue border here, not in the sub methods - fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option { - if self.query.is_empty() { - None + if is_group_header_after_first { + v_flex() + .w_full() + .pt_2() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + .child(rendered) + .into_any_element() } else { - Some("No threads match your search.".into()) + rendered } } - fn update_matches( - &mut self, - query: String, - window: &mut Window, - cx: &mut Context>, - ) -> Task<()> { - let query_changed = self.query != query; - self.query = query.clone(); - if query_changed { - self.hovered_thread_item = None; - } - let entries = self.entries.clone(); - - if query.is_empty() { - self.matches = entries - .into_iter() - .map(|entry| SidebarMatch { - entry, - positions: Vec::new(), - }) - .collect(); - - let separator_offset = if self.workspace_thread_count > 0 { - 1 - } else { - 0 - }; - self.selected_index = (self.active_workspace_index + separator_offset) - .min(self.matches.len().saturating_sub(1)); - return Task::ready(()); - } - - let executor = cx.background_executor().clone(); - cx.spawn_in(window, async move |picker, cx| { - let matches = cx - .background_spawn(async move { - let data_entries: Vec<(usize, &SidebarEntry)> = entries - .iter() - .enumerate() - .filter(|(_, entry)| !matches!(entry, SidebarEntry::Separator(_))) - .collect(); - - let candidates: Vec = data_entries - .iter() - .enumerate() - .map(|(candidate_index, (_, entry))| { - StringMatchCandidate::new(candidate_index, entry.searchable_text()) - }) - .collect(); + fn render_project_header( + &self, + ix: usize, + path_list: &PathList, + label: &SharedString, + workspace: &Entity, + highlight_positions: &[usize], + has_threads: bool, + is_selected: bool, + cx: &mut Context, + ) -> AnyElement { + let id = SharedString::from(format!("project-header-{}", ix)); + let group_name = SharedString::from(format!("header-group-{}", ix)); + let ib_id = SharedString::from(format!("project-header-new-thread-{}", ix)); + + let is_collapsed = self.collapsed_groups.contains(path_list); + let disclosure_icon = if is_collapsed { + IconName::ChevronRight + } else { + IconName::ChevronDown + }; + let workspace_for_new_thread = workspace.clone(); + let workspace_for_remove = workspace.clone(); + // let workspace_for_activate = workspace.clone(); + + let path_list_for_toggle = path_list.clone(); + let path_list_for_collapse = path_list.clone(); + let view_more_expanded = self.expanded_groups.contains_key(path_list); + + let multi_workspace = self.multi_workspace.upgrade(); + let workspace_count = multi_workspace + .as_ref() + .map_or(0, |mw| mw.read(cx).workspaces().len()); + let is_active_workspace = self.focused_thread.is_none() + && multi_workspace + .as_ref() + .is_some_and(|mw| mw.read(cx).workspace() == workspace); + + let label = if highlight_positions.is_empty() { + Label::new(label.clone()) + .size(LabelSize::Small) + .color(Color::Muted) + .into_any_element() + } else { + HighlightedLabel::new(label.clone(), highlight_positions.to_vec()) + .size(LabelSize::Small) + .color(Color::Muted) + .into_any_element() + }; - let search_matches = fuzzy::match_strings( - &candidates, - &query, - false, - true, - MAX_MATCHES, - &Default::default(), - executor, + let color = cx.theme().colors(); + let gradient_overlay = GradientFade::new( + color.panel_background, + color.element_hover, + color.element_active, + ) + .width(px(48.0)) + .group_name(group_name.clone()); + + ListItem::new(id) + .group_name(group_name) + .toggle_state(is_active_workspace) + .focused(is_selected) + .child( + h_flex() + .relative() + .min_w_0() + .w_full() + .p_1() + .gap_1p5() + .child( + Icon::new(disclosure_icon) + .size(IconSize::Small) + .color(Color::Custom(cx.theme().colors().icon_muted.opacity(0.6))), ) - .await; + .child(label) + .child(gradient_overlay), + ) + .end_hover_slot( + h_flex() + .when(workspace_count > 1, |this| { + this.child( + IconButton::new( + SharedString::from(format!("project-header-remove-{}", ix)), + IconName::Close, + ) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .tooltip(Tooltip::text("Remove Project")) + .on_click(cx.listener( + move |this, _, window, cx| { + this.remove_workspace(&workspace_for_remove, window, cx); + }, + )), + ) + }) + .when(view_more_expanded && !is_collapsed, |this| { + this.child( + IconButton::new( + SharedString::from(format!("project-header-collapse-{}", ix)), + IconName::ListCollapse, + ) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .tooltip(Tooltip::text("Collapse Displayed Threads")) + .on_click(cx.listener({ + let path_list_for_collapse = path_list_for_collapse.clone(); + move |this, _, _window, cx| { + this.selection = None; + this.expanded_groups.remove(&path_list_for_collapse); + this.update_entries(cx); + } + })), + ) + }) + .when(has_threads, |this| { + this.child( + IconButton::new(ib_id, IconName::NewThread) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .tooltip(Tooltip::text("New Thread")) + .on_click(cx.listener(move |this, _, window, cx| { + this.selection = None; + this.create_new_thread(&workspace_for_new_thread, window, cx); + })), + ) + }), + ) + .on_click(cx.listener(move |this, _, window, cx| { + this.selection = None; + this.toggle_collapse(&path_list_for_toggle, window, cx); + })) + // TODO: Decide if we really want the header to be activating different workspaces + // .on_click(cx.listener(move |this, _, window, cx| { + // this.selection = None; + // this.activate_workspace(&workspace_for_activate, window, cx); + // })) + .into_any_element() + } - let mut workspace_matches = Vec::new(); - let mut project_matches = Vec::new(); + fn activate_workspace( + &mut self, + workspace: &Entity, + window: &mut Window, + cx: &mut Context, + ) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return; + }; - for search_match in search_matches { - let (original_index, _) = data_entries[search_match.candidate_id]; - let entry = entries[original_index].clone(); - let sidebar_match = SidebarMatch { - positions: search_match.positions, - entry: entry.clone(), - }; - match entry { - SidebarEntry::WorkspaceThread(_) => { - workspace_matches.push(sidebar_match) - } - SidebarEntry::RecentProject(_) => project_matches.push(sidebar_match), - SidebarEntry::Separator(_) => {} - } - } + self.focused_thread = None; - let mut result = Vec::new(); - if !workspace_matches.is_empty() { - result.push(SidebarMatch { - entry: SidebarEntry::Separator("Active Workspaces".into()), - positions: Vec::new(), - }); - result.extend(workspace_matches); - } - if !project_matches.is_empty() { - result.push(SidebarMatch { - entry: SidebarEntry::Separator("Recent Projects".into()), - positions: Vec::new(), - }); - result.extend(project_matches); - } - result - }) - .await; + multi_workspace.update(cx, |multi_workspace, cx| { + multi_workspace.activate(workspace.clone(), cx); + }); - picker - .update_in(cx, |picker, _window, _cx| { - picker.delegate.matches = matches; - if picker.delegate.matches.is_empty() { - picker.delegate.selected_index = 0; - } else { - let first_selectable = picker - .delegate - .matches - .iter() - .position(|m| !matches!(m.entry, SidebarEntry::Separator(_))) - .unwrap_or(0); - picker.delegate.selected_index = first_selectable; - } - }) - .log_err(); - }) + multi_workspace.update(cx, |multi_workspace, cx| { + multi_workspace.focus_active_workspace(window, cx); + }); } - fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context>) { - let Some(selected_match) = self.matches.get(self.selected_index) else { + fn remove_workspace( + &mut self, + workspace: &Entity, + window: &mut Window, + cx: &mut Context, + ) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { return; }; - match &selected_match.entry { - SidebarEntry::Separator(_) => {} - SidebarEntry::WorkspaceThread(thread_entry) => { - let target_index = thread_entry.index; - self.multi_workspace.update(cx, |multi_workspace, cx| { - multi_workspace.activate_index(target_index, window, cx); - }); - } - SidebarEntry::RecentProject(project_entry) => { - let paths = project_entry.paths.clone(); - open_recent_project(paths, window, cx); - } - } + multi_workspace.update(cx, |multi_workspace, cx| { + let Some(index) = multi_workspace + .workspaces() + .iter() + .position(|w| w == workspace) + else { + return; + }; + multi_workspace.remove_workspace(index, window, cx); + }); } - fn dismissed(&mut self, _window: &mut Window, _cx: &mut Context>) {} - - fn render_match( - &self, - index: usize, - selected: bool, + fn toggle_collapse( + &mut self, + path_list: &PathList, _window: &mut Window, - cx: &mut Context>, - ) -> Option { - let match_entry = self.matches.get(index)?; - let SidebarMatch { entry, positions } = match_entry; - - match entry { - SidebarEntry::Separator(title) => Some( - v_flex() - .when(index > 0, |this| { - this.mt_1() - .gap_2() - .child(Divider::horizontal().color(DividerColor::BorderFaded)) - }) - .child(ListSubHeader::new(title.clone()).inset(true)) - .into_any_element(), - ), - SidebarEntry::WorkspaceThread(thread_entry) => { - let worktree_label = thread_entry.worktree_label.clone(); - let full_path = thread_entry.full_path.clone(); - let thread_info = thread_entry.thread_info.clone(); - let workspace_index = thread_entry.index; - let multi_workspace = self.multi_workspace.clone(); - let workspace_count = self.multi_workspace.read(cx).workspaces().len(); - let is_hovered = self.hovered_thread_item == Some(workspace_index); - - let remove_btn = IconButton::new( - format!("remove-workspace-{}", workspace_index), - IconName::Close, - ) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .tooltip(Tooltip::text("Remove Workspace")) - .on_click({ - let multi_workspace = multi_workspace; - move |_, window, cx| { - multi_workspace.update(cx, |mw, cx| { - mw.remove_workspace(workspace_index, window, cx); - }); - } - }); + cx: &mut Context, + ) { + if self.collapsed_groups.contains(path_list) { + self.collapsed_groups.remove(path_list); + } else { + self.collapsed_groups.insert(path_list.clone()); + } + self.update_entries(cx); + } - let has_notification = self.notified_workspaces.contains(&workspace_index); - let thread_subtitle = thread_info.as_ref().map(|info| info.title.clone()); - let status = thread_info - .as_ref() - .map_or(AgentThreadStatus::default(), |info| info.status); - let running = matches!( - status, - AgentThreadStatus::Running | AgentThreadStatus::WaitingForConfirmation - ); + fn focus_in(&mut self, _window: &mut Window, _cx: &mut Context) {} - Some( - ThreadItem::new( - ("workspace-item", thread_entry.index), - thread_subtitle.unwrap_or("New Thread".into()), - ) - .icon( - thread_info - .as_ref() - .map_or(IconName::ZedAgent, |info| info.icon), - ) - .running(running) - .generation_done(has_notification) - .status(status) - .selected(selected) - .worktree(worktree_label.clone()) - .worktree_highlight_positions(positions.clone()) - .when(workspace_count > 1, |item| item.action_slot(remove_btn)) - .hovered(is_hovered) - .on_hover(cx.listener(move |picker, is_hovered, _window, cx| { - let mut changed = false; - if *is_hovered { - if picker.delegate.hovered_thread_item != Some(workspace_index) { - picker.delegate.hovered_thread_item = Some(workspace_index); - changed = true; - } - } else if picker.delegate.hovered_thread_item == Some(workspace_index) { - picker.delegate.hovered_thread_item = None; - changed = true; - } - if changed { - cx.notify(); - } - })) - .when(!full_path.is_empty(), |this| { - this.tooltip(move |_, cx| { - Tooltip::with_meta(worktree_label.clone(), None, full_path.clone(), cx) - }) - }) - .into_any_element(), - ) - } - SidebarEntry::RecentProject(project_entry) => { - let name = project_entry.name.clone(); - let full_path = project_entry.full_path.clone(); - let item_id: SharedString = - format!("recent-project-{:?}", project_entry.workspace_id).into(); - - Some( - ThreadItem::new(item_id, name.clone()) - .icon(IconName::Folder) - .selected(selected) - .highlight_positions(positions.clone()) - .tooltip(move |_, cx| { - Tooltip::with_meta(name.clone(), None, full_path.clone(), cx) - }) - .into_any_element(), - ) - } + fn cancel(&mut self, _: &Cancel, window: &mut Window, cx: &mut Context) { + if self.reset_filter_editor_text(window, cx) { + self.update_entries(cx); + } else { + self.focus_handle.focus(window, cx); } } - fn render_editor( - &self, - editor: &Arc, - window: &mut Window, - cx: &mut Context>, - ) -> Div { - h_flex() - .h(Tab::container_height(cx)) - .w_full() - .px_2() - .gap_2() - .justify_between() - .border_b_1() - .border_color(cx.theme().colors().border) - .child( - Icon::new(IconName::MagnifyingGlass) - .color(Color::Muted) - .size(IconSize::Small), - ) - .child(editor.render(window, cx)) + fn reset_filter_editor_text(&mut self, window: &mut Window, cx: &mut Context) -> bool { + self.filter_editor.update(cx, |editor, cx| { + if editor.buffer().read(cx).len(cx).0 > 0 { + editor.set_text("", window, cx); + true + } else { + false + } + }) } -} - -pub struct Sidebar { - multi_workspace: Entity, - width: Pixels, - picker: Entity>, - _subscription: Subscription, - _project_subscriptions: Vec, - _agent_panel_subscriptions: Vec, - _thread_subscriptions: Vec, - #[cfg(any(test, feature = "test-support"))] - test_thread_infos: HashMap, - #[cfg(any(test, feature = "test-support"))] - test_recent_project_thread_titles: HashMap, - _fetch_recent_projects: Task<()>, -} - -impl EventEmitter for Sidebar {} -impl Sidebar { - pub fn new( - multi_workspace: Entity, - window: &mut Window, - cx: &mut Context, - ) -> Self { - let delegate = WorkspacePickerDelegate::new(multi_workspace.clone()); - let picker = cx.new(|cx| { - Picker::list(delegate, window, cx) - .max_height(None) - .show_scrollbar(true) - .modal(false) - }); + fn has_filter_query(&self, cx: &App) -> bool { + self.filter_editor.read(cx).buffer().read(cx).is_empty() + } - let subscription = cx.observe_in( - &multi_workspace, - window, - |this, _multi_workspace, window, cx| { - this.update_entries(window, cx); - }, - ); + fn editor_move_down(&mut self, _: &MoveDown, window: &mut Window, cx: &mut Context) { + self.select_next(&SelectNext, window, cx); + } - let fetch_recent_projects = { - let picker = picker.downgrade(); - let fs = ::global(cx); - cx.spawn_in(window, async move |_this, cx| { - let projects = get_recent_projects(None, None, fs).await; - - cx.update(|window, cx| { - if let Some(picker) = picker.upgrade() { - picker.update(cx, |picker, cx| { - picker.delegate.set_recent_projects(projects, cx); - let query = picker.query(cx); - picker.update_matches(query, window, cx); - }); - } - }) - .log_err(); - }) - }; + fn editor_move_up(&mut self, _: &MoveUp, window: &mut Window, cx: &mut Context) { + self.select_previous(&SelectPrevious, window, cx); + } - let mut this = Self { - multi_workspace, - width: DEFAULT_WIDTH, - picker, - _subscription: subscription, - _project_subscriptions: Vec::new(), - _agent_panel_subscriptions: Vec::new(), - _thread_subscriptions: Vec::new(), - #[cfg(any(test, feature = "test-support"))] - test_thread_infos: HashMap::new(), - #[cfg(any(test, feature = "test-support"))] - test_recent_project_thread_titles: HashMap::new(), - _fetch_recent_projects: fetch_recent_projects, + fn select_next(&mut self, _: &SelectNext, _window: &mut Window, cx: &mut Context) { + let next = match self.selection { + Some(ix) if ix + 1 < self.contents.entries.len() => ix + 1, + None if !self.contents.entries.is_empty() => 0, + _ => return, }; - this.update_entries(window, cx); - this + self.selection = Some(next); + self.list_state.scroll_to_reveal_item(next); + cx.notify(); } - fn subscribe_to_projects( + fn select_previous( &mut self, - window: &mut Window, + _: &SelectPrevious, + _window: &mut Window, cx: &mut Context, - ) -> Vec { - let projects: Vec<_> = self - .multi_workspace - .read(cx) - .workspaces() - .iter() - .map(|w| w.read(cx).project().clone()) - .collect(); + ) { + let prev = match self.selection { + Some(ix) if ix > 0 => ix - 1, + None if !self.contents.entries.is_empty() => self.contents.entries.len() - 1, + _ => return, + }; + self.selection = Some(prev); + self.list_state.scroll_to_reveal_item(prev); + cx.notify(); + } - projects - .iter() - .map(|project| { - cx.subscribe_in( - project, - window, - |this, _project, event, window, cx| match event { - ProjectEvent::WorktreeAdded(_) - | ProjectEvent::WorktreeRemoved(_) - | ProjectEvent::WorktreeOrderChanged => { - this.update_entries(window, cx); - } - _ => {} - }, - ) - }) - .collect() + fn select_first(&mut self, _: &SelectFirst, _window: &mut Window, cx: &mut Context) { + if !self.contents.entries.is_empty() { + self.selection = Some(0); + self.list_state.scroll_to_reveal_item(0); + cx.notify(); + } } - fn build_workspace_thread_entries( - &self, - multi_workspace: &MultiWorkspace, - cx: &App, - ) -> (Vec, usize) { - #[allow(unused_mut)] - let mut entries: Vec = multi_workspace - .workspaces() - .iter() - .enumerate() - .map(|(index, workspace)| WorkspaceThreadEntry::new(index, workspace, cx)) - .collect(); + fn select_last(&mut self, _: &SelectLast, _window: &mut Window, cx: &mut Context) { + if let Some(last) = self.contents.entries.len().checked_sub(1) { + self.selection = Some(last); + self.list_state.scroll_to_reveal_item(last); + cx.notify(); + } + } + + fn confirm(&mut self, _: &Confirm, window: &mut Window, cx: &mut Context) { + let Some(ix) = self.selection else { return }; + let Some(entry) = self.contents.entries.get(ix) else { + return; + }; - #[cfg(any(test, feature = "test-support"))] - for (index, info) in &self.test_thread_infos { - if let Some(entry) = entries.get_mut(*index) { - entry.thread_info = Some(info.clone()); + match entry { + ListEntry::ProjectHeader { workspace, .. } => { + let workspace = workspace.clone(); + self.activate_workspace(&workspace, window, cx); + } + ListEntry::Thread(thread) => { + let session_info = thread.session_info.clone(); + let workspace = thread.workspace.clone(); + self.activate_thread(session_info, &workspace, window, cx); + } + ListEntry::ViewMore { + path_list, + is_fully_expanded, + .. + } => { + let path_list = path_list.clone(); + if *is_fully_expanded { + self.expanded_groups.remove(&path_list); + } else { + let current = self.expanded_groups.get(&path_list).copied().unwrap_or(0); + self.expanded_groups.insert(path_list, current + 1); + } + self.update_entries(cx); + } + ListEntry::NewThread { workspace, .. } => { + let workspace = workspace.clone(); + self.create_new_thread(&workspace, window, cx); } } - - (entries, multi_workspace.active_workspace_index()) } - #[cfg(any(test, feature = "test-support"))] - pub fn set_test_recent_projects( - &self, - projects: Vec, + fn activate_thread( + &mut self, + session_info: acp_thread::AgentSessionInfo, + workspace: &Entity, + window: &mut Window, cx: &mut Context, ) { - self.picker.update(cx, |picker, _cx| { - picker.delegate.recent_projects = projects; + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return; + }; + + multi_workspace.update(cx, |multi_workspace, cx| { + multi_workspace.activate(workspace.clone(), cx); }); + + workspace.update(cx, |workspace, cx| { + workspace.open_panel::(window, cx); + }); + + if let Some(agent_panel) = workspace.read(cx).panel::(cx) { + agent_panel.update(cx, |panel, cx| { + panel.load_agent_thread( + session_info.session_id, + session_info.cwd, + session_info.title, + window, + cx, + ); + }); + } } - #[cfg(any(test, feature = "test-support"))] - pub fn set_test_thread_info( + fn expand_selected_entry( &mut self, - index: usize, - title: SharedString, - status: AgentThreadStatus, + _: &ExpandSelectedEntry, + _window: &mut Window, + cx: &mut Context, ) { - self.test_thread_infos.insert( - index, - AgentThreadInfo { - title, - status, - icon: IconName::ZedAgent, - }, - ); + let Some(ix) = self.selection else { return }; + + match self.contents.entries.get(ix) { + Some(ListEntry::ProjectHeader { path_list, .. }) => { + if self.collapsed_groups.contains(path_list) { + let path_list = path_list.clone(); + self.collapsed_groups.remove(&path_list); + self.update_entries(cx); + } else if ix + 1 < self.contents.entries.len() { + self.selection = Some(ix + 1); + self.list_state.scroll_to_reveal_item(ix + 1); + cx.notify(); + } + } + _ => {} + } } - #[cfg(any(test, feature = "test-support"))] - pub fn set_test_recent_project_thread_title( + fn collapse_selected_entry( &mut self, - full_path: SharedString, - title: SharedString, + _: &CollapseSelectedEntry, + _window: &mut Window, cx: &mut Context, ) { - self.test_recent_project_thread_titles - .insert(full_path.clone(), title.clone()); - self.picker.update(cx, |picker, _cx| { - picker - .delegate - .recent_project_thread_titles - .insert(full_path, title); - }); + let Some(ix) = self.selection else { return }; + + match self.contents.entries.get(ix) { + Some(ListEntry::ProjectHeader { path_list, .. }) => { + if !self.collapsed_groups.contains(path_list) { + let path_list = path_list.clone(); + self.collapsed_groups.insert(path_list); + self.update_entries(cx); + } + } + Some( + ListEntry::Thread(_) | ListEntry::ViewMore { .. } | ListEntry::NewThread { .. }, + ) => { + for i in (0..ix).rev() { + if let Some(ListEntry::ProjectHeader { path_list, .. }) = + self.contents.entries.get(i) + { + let path_list = path_list.clone(); + self.selection = Some(i); + self.collapsed_groups.insert(path_list); + self.update_entries(cx); + break; + } + } + } + None => {} + } } - fn subscribe_to_agent_panels( - &mut self, - window: &mut Window, + fn render_thread( + &self, + ix: usize, + thread: &ThreadEntry, + is_selected: bool, cx: &mut Context, - ) -> Vec { - let workspaces: Vec<_> = self.multi_workspace.read(cx).workspaces().to_vec(); + ) -> AnyElement { + let has_notification = self + .contents + .is_thread_notified(&thread.session_info.session_id); + + let title: SharedString = thread + .session_info + .title + .clone() + .unwrap_or_else(|| "Untitled".into()); + let session_info = thread.session_info.clone(); + let workspace = thread.workspace.clone(); + + let id = SharedString::from(format!("thread-entry-{}", ix)); + ThreadItem::new(id, title) + .icon(thread.icon) + .when_some(thread.icon_from_external_svg.clone(), |this, svg| { + this.custom_icon_from_external_svg(svg) + }) + .highlight_positions(thread.highlight_positions.to_vec()) + .status(thread.status) + .notified(has_notification) + .selected(self.focused_thread.as_ref() == Some(&session_info.session_id)) + .focused(is_selected) + .on_click(cx.listener(move |this, _, window, cx| { + this.selection = None; + this.activate_thread(session_info.clone(), &workspace, window, cx); + })) + .into_any_element() + } - workspaces - .iter() - .map(|workspace| { - if let Some(agent_panel) = workspace.read(cx).panel::(cx) { - cx.subscribe_in( - &agent_panel, - window, - |this, _, _event: &AgentPanelEvent, window, cx| { - this.update_entries(window, cx); + fn render_recent_projects_button(&self, cx: &mut Context) -> impl IntoElement { + let workspace = self + .multi_workspace + .upgrade() + .map(|mw| mw.read(cx).workspace().downgrade()); + + let focus_handle = workspace + .as_ref() + .and_then(|ws| ws.upgrade()) + .map(|w| w.read(cx).focus_handle(cx)) + .unwrap_or_else(|| cx.focus_handle()); + + let popover_handle = self.recent_projects_popover_handle.clone(); + + PopoverMenu::new("sidebar-recent-projects-menu") + .with_handle(popover_handle) + .menu(move |window, cx| { + workspace.as_ref().map(|ws| { + RecentProjects::popover(ws.clone(), false, focus_handle.clone(), window, cx) + }) + }) + .trigger_with_tooltip( + IconButton::new("open-project", IconName::OpenFolder) + .icon_size(IconSize::Small) + .selected_style(ButtonStyle::Tinted(TintColor::Accent)), + |_window, cx| { + Tooltip::for_action( + "Recent Projects", + &OpenRecent { + create_new_window: false, }, + cx, ) + }, + ) + .anchor(gpui::Corner::TopLeft) + .offset(gpui::Point { + x: px(0.0), + y: px(2.0), + }) + } + + fn render_filter_input(&self, cx: &mut Context) -> impl IntoElement { + let settings = ThemeSettings::get_global(cx); + let text_style = TextStyle { + color: cx.theme().colors().text, + font_family: settings.ui_font.family.clone(), + font_features: settings.ui_font.features.clone(), + font_fallbacks: settings.ui_font.fallbacks.clone(), + font_size: rems(0.875).into(), + font_weight: settings.ui_font.weight, + font_style: FontStyle::Normal, + line_height: relative(1.3), + ..Default::default() + }; + + EditorElement::new( + &self.filter_editor, + EditorStyle { + local_player: cx.theme().players().local(), + text: text_style, + ..Default::default() + }, + ) + } + + fn render_view_more( + &self, + ix: usize, + path_list: &PathList, + remaining_count: usize, + is_fully_expanded: bool, + is_selected: bool, + cx: &mut Context, + ) -> AnyElement { + let path_list = path_list.clone(); + let id = SharedString::from(format!("view-more-{}", ix)); + + let (icon, label) = if is_fully_expanded { + (IconName::ListCollapse, "Collapse List") + } else { + (IconName::Plus, "View More") + }; + + ListItem::new(id) + .focused(is_selected) + .child( + h_flex() + .p_1() + .gap_1p5() + .child(Icon::new(icon).size(IconSize::Small).color(Color::Muted)) + .child(Label::new(label).color(Color::Muted)) + .when(!is_fully_expanded, |this| { + this.child( + Label::new(format!("({})", remaining_count)) + .color(Color::Custom(cx.theme().colors().text_muted.opacity(0.5))), + ) + }), + ) + .on_click(cx.listener(move |this, _, _window, cx| { + this.selection = None; + if is_fully_expanded { + this.expanded_groups.remove(&path_list); } else { - // Panel hasn't loaded yet — observe the workspace so we - // re-subscribe once the panel appears on its dock. - cx.observe_in(workspace, window, |this, _, window, cx| { - this.update_entries(window, cx); - }) + let current = this.expanded_groups.get(&path_list).copied().unwrap_or(0); + this.expanded_groups.insert(path_list.clone(), current + 1); } - }) - .collect() + this.update_entries(cx); + })) + .into_any_element() } - fn subscribe_to_threads( + fn create_new_thread( &mut self, + workspace: &Entity, window: &mut Window, cx: &mut Context, - ) -> Vec { - let workspaces: Vec<_> = self.multi_workspace.read(cx).workspaces().to_vec(); + ) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return; + }; - workspaces - .iter() - .filter_map(|workspace| { - let agent_panel = workspace.read(cx).panel::(cx)?; - let thread = agent_panel.read(cx).active_agent_thread(cx)?; - Some(cx.observe_in(&thread, window, |this, _, window, cx| { - this.update_entries(window, cx); - })) - }) - .collect() - } + multi_workspace.update(cx, |multi_workspace, cx| { + multi_workspace.activate(workspace.clone(), cx); + }); - /// Reconciles the sidebar's displayed entries with the current state of all - /// workspaces and their agent threads. - fn update_entries(&mut self, window: &mut Window, cx: &mut Context) { - let multi_workspace = self.multi_workspace.clone(); - cx.defer_in(window, move |this, window, cx| { - if !this.multi_workspace.read(cx).multi_workspace_enabled(cx) { - return; + workspace.update(cx, |workspace, cx| { + if let Some(agent_panel) = workspace.panel::(cx) { + agent_panel.update(cx, |panel, cx| { + panel.new_thread(&NewThread, window, cx); + }); } + workspace.focus_panel::(window, cx); + }); + } - this._project_subscriptions = this.subscribe_to_projects(window, cx); - this._agent_panel_subscriptions = this.subscribe_to_agent_panels(window, cx); - this._thread_subscriptions = this.subscribe_to_threads(window, cx); - let (entries, active_index) = multi_workspace.read_with(cx, |multi_workspace, cx| { - this.build_workspace_thread_entries(multi_workspace, cx) - }); + fn render_new_thread( + &self, + ix: usize, + _path_list: &PathList, + workspace: &Entity, + is_selected: bool, + cx: &mut Context, + ) -> AnyElement { + let workspace = workspace.clone(); - let had_notifications = !this.picker.read(cx).delegate.notified_workspaces.is_empty(); - this.picker.update(cx, |picker, cx| { - picker.delegate.set_entries(entries, active_index, cx); - let query = picker.query(cx); - picker.update_matches(query, window, cx); - }); - let has_notifications = !this.picker.read(cx).delegate.notified_workspaces.is_empty(); - if had_notifications != has_notifications { - multi_workspace.update(cx, |_, cx| cx.notify()); - } - }); + div() + .w_full() + .p_2() + .child( + Button::new( + SharedString::from(format!("new-thread-btn-{}", ix)), + "New Thread", + ) + .full_width() + .style(ButtonStyle::Outlined) + .icon(IconName::Plus) + .icon_color(Color::Muted) + .icon_size(IconSize::Small) + .icon_position(IconPosition::Start) + .toggle_state(is_selected) + .on_click(cx.listener(move |this, _, window, cx| { + this.selection = None; + this.create_new_thread(&workspace, window, cx); + })), + ) + .into_any_element() } } @@ -957,14 +1359,22 @@ impl WorkspaceSidebar for Sidebar { cx.notify(); } - fn has_notifications(&self, cx: &App) -> bool { - !self.picker.read(cx).delegate.notified_workspaces.is_empty() + fn has_notifications(&self, _cx: &App) -> bool { + !self.contents.notified_threads.is_empty() + } + + fn toggle_recent_projects_popover(&self, window: &mut Window, cx: &mut App) { + self.recent_projects_popover_handle.toggle(window, cx); + } + + fn is_recent_projects_popover_deployed(&self) -> bool { + self.recent_projects_popover_handle.is_deployed() } } impl Focusable for Sidebar { fn focus_handle(&self, cx: &App) -> FocusHandle { - self.picker.read(cx).focus_handle(cx) + self.filter_editor.focus_handle(cx) } } @@ -972,7 +1382,9 @@ impl Render for Sidebar { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let titlebar_height = ui::utils::platform_title_bar_height(window); let ui_font = theme::setup_ui_font(window, cx); - let is_focused = self.focus_handle(cx).is_focused(window); + let is_focused = self.focus_handle.is_focused(window) + || self.filter_editor.focus_handle(cx).is_focused(window); + let has_query = self.has_filter_query(cx); let focus_tooltip_label = if is_focused { "Focus Workspace" @@ -983,6 +1395,17 @@ impl Render for Sidebar { v_flex() .id("workspace-sidebar") .key_context("WorkspaceSidebar") + .track_focus(&self.focus_handle) + .on_action(cx.listener(Self::select_next)) + .on_action(cx.listener(Self::select_previous)) + .on_action(cx.listener(Self::editor_move_down)) + .on_action(cx.listener(Self::editor_move_up)) + .on_action(cx.listener(Self::select_first)) + .on_action(cx.listener(Self::select_last)) + .on_action(cx.listener(Self::confirm)) + .on_action(cx.listener(Self::expand_selected_entry)) + .on_action(cx.listener(Self::collapse_selected_entry)) + .on_action(cx.listener(Self::cancel)) .font(ui_font) .h_full() .w(self.width) @@ -1006,7 +1429,8 @@ impl Render for Sidebar { .border_b_1() .border_color(cx.theme().colors().border) .child({ - let focus_handle = cx.focus_handle(); + let focus_handle_toggle = self.focus_handle.clone(); + let focus_handle_focus = self.focus_handle.clone(); IconButton::new("close-sidebar", IconName::WorkspaceNavOpen) .icon_size(IconSize::Small) .tooltip(Tooltip::element(move |_, cx| { @@ -1019,7 +1443,7 @@ impl Render for Sidebar { .child(Label::new("Close Sidebar")) .child(KeyBinding::for_action_in( &ToggleWorkspaceSidebar, - &focus_handle, + &focus_handle_toggle, cx, )), ) @@ -1033,7 +1457,7 @@ impl Render for Sidebar { .child(Label::new(focus_tooltip_label)) .child(KeyBinding::for_action_in( &FocusWorkspaceSidebar, - &focus_handle, + &focus_handle_focus, cx, )), ) @@ -1043,30 +1467,65 @@ impl Render for Sidebar { cx.emit(SidebarEvent::Close); })) }) + .child(self.render_recent_projects_button(cx)), + ) + .child( + h_flex() + .flex_none() + .p_2() + .h(Tab::container_height(cx)) + .gap_1p5() + .border_b_1() + .border_color(cx.theme().colors().border) .child( - IconButton::new("new-workspace", IconName::Plus) - .icon_size(IconSize::Small) - .tooltip(|_window, cx| { - Tooltip::for_action("New Workspace", &NewWorkspaceInWindow, cx) - }) - .on_click(cx.listener(|this, _, window, cx| { - this.multi_workspace.update(cx, |multi_workspace, cx| { - multi_workspace.create_workspace(window, cx); - }); - })), - ), + Icon::new(IconName::MagnifyingGlass) + .size(IconSize::Small) + .color(Color::Muted), + ) + .child(self.render_filter_input(cx)) + .when(has_query, |this| { + this.pr_1().child( + IconButton::new("clear_filter", IconName::Close) + .shape(IconButtonShape::Square) + .tooltip(Tooltip::text("Clear Search")) + .on_click(cx.listener(|this, _, window, cx| { + this.reset_filter_editor_text(window, cx); + this.update_entries(cx); + })), + ) + }), + ) + .child( + v_flex() + .flex_1() + .overflow_hidden() + .child( + list( + self.list_state.clone(), + cx.processor(Self::render_list_entry), + ) + .flex_1() + .size_full(), + ) + .vertical_scrollbar_for(&self.list_state, window, cx), ) - .child(self.picker.clone()) } } #[cfg(test)] mod tests { use super::*; + use acp_thread::StubAgentConnection; + use agent::ThreadStore; + use agent_ui::test_support::{active_session_id, open_thread_with_connection, send_message}; + use assistant_text_thread::TextThreadStore; + use chrono::DateTime; use feature_flags::FeatureFlagAppExt as _; use fs::FakeFs; use gpui::TestAppContext; use settings::SettingsStore; + use std::sync::Arc; + use util::path_list::PathList; fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { @@ -1075,199 +1534,2100 @@ mod tests { theme::init(theme::LoadThemes::JustBase, cx); editor::init(cx); cx.update_flags(false, vec!["agent-v2".into()]); + ThreadStore::init_global(cx); }); } - fn set_thread_info_and_refresh( - sidebar: &Entity, - multi_workspace: &Entity, - index: usize, - title: &str, - status: AgentThreadStatus, - cx: &mut gpui::VisualTestContext, - ) { - sidebar.update_in(cx, |s, _window, _cx| { - s.set_test_thread_info(index, SharedString::from(title.to_string()), status); + fn make_test_thread(title: &str, updated_at: DateTime) -> agent::DbThread { + agent::DbThread { + title: title.to_string().into(), + messages: Vec::new(), + updated_at, + detailed_summary: None, + initial_project_snapshot: None, + cumulative_token_usage: Default::default(), + request_token_usage: Default::default(), + model: None, + profile: None, + imported: false, + subagent_context: None, + speed: None, + thinking_enabled: false, + thinking_effort: None, + draft_prompt: None, + ui_scroll_position: None, + } + } + + async fn init_test_project( + worktree_path: &str, + cx: &mut TestAppContext, + ) -> Entity { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(worktree_path, serde_json::json!({ "src": {} })) + .await; + cx.update(|cx| ::set_global(fs.clone(), cx)); + project::Project::test(fs, [worktree_path.as_ref()], cx).await + } + + fn setup_sidebar( + multi_workspace: &Entity, + cx: &mut gpui::VisualTestContext, + ) -> Entity { + let multi_workspace = multi_workspace.clone(); + let sidebar = + cx.update(|window, cx| cx.new(|cx| Sidebar::new(multi_workspace.clone(), window, cx))); + multi_workspace.update_in(cx, |mw, window, cx| { + mw.register_sidebar(sidebar.clone(), window, cx); }); - multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + sidebar + } + + async fn save_n_test_threads( + count: u32, + path_list: &PathList, + cx: &mut gpui::VisualTestContext, + ) { + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + for i in 0..count { + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(format!("thread-{}", i))), + make_test_thread( + &format!("Thread {}", i + 1), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, i).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + } cx.run_until_parked(); } - fn has_notifications(sidebar: &Entity, cx: &mut gpui::VisualTestContext) -> bool { - sidebar.read_with(cx, |s, cx| s.has_notifications(cx)) + async fn save_thread_to_store( + session_id: &acp::SessionId, + path_list: &PathList, + cx: &mut gpui::VisualTestContext, + ) { + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + session_id.clone(), + make_test_thread( + "Test", + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + cx.run_until_parked(); } - #[gpui::test] - async fn test_notification_on_running_to_completed_transition(cx: &mut TestAppContext) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - cx.update(|cx| ::set_global(fs.clone(), cx)); - let project = project::Project::test(fs, [], cx).await; + fn open_and_focus_sidebar( + sidebar: &Entity, + multi_workspace: &Entity, + cx: &mut gpui::VisualTestContext, + ) { + multi_workspace.update_in(cx, |mw, window, cx| { + mw.toggle_sidebar(window, cx); + }); + cx.run_until_parked(); + sidebar.update_in(cx, |_, window, cx| { + cx.focus_self(window); + }); + cx.run_until_parked(); + } + + fn visible_entries_as_strings( + sidebar: &Entity, + cx: &mut gpui::VisualTestContext, + ) -> Vec { + sidebar.read_with(cx, |sidebar, _cx| { + sidebar + .contents + .entries + .iter() + .enumerate() + .map(|(ix, entry)| { + let selected = if sidebar.selection == Some(ix) { + " <== selected" + } else { + "" + }; + match entry { + ListEntry::ProjectHeader { + label, + path_list, + highlight_positions: _, + .. + } => { + let icon = if sidebar.collapsed_groups.contains(path_list) { + ">" + } else { + "v" + }; + format!("{} [{}]{}", icon, label, selected) + } + ListEntry::Thread(thread) => { + let title = thread + .session_info + .title + .as_ref() + .map(|s| s.as_ref()) + .unwrap_or("Untitled"); + let active = if thread.is_live { " *" } else { "" }; + let status_str = match thread.status { + AgentThreadStatus::Running => " (running)", + AgentThreadStatus::Error => " (error)", + AgentThreadStatus::WaitingForConfirmation => " (waiting)", + _ => "", + }; + let notified = if sidebar + .contents + .is_thread_notified(&thread.session_info.session_id) + { + " (!)" + } else { + "" + }; + format!( + " {}{}{}{}{}", + title, active, status_str, notified, selected + ) + } + ListEntry::ViewMore { + remaining_count, + is_fully_expanded, + .. + } => { + if *is_fully_expanded { + format!(" - Collapse{}", selected) + } else { + format!(" + View More ({}){}", remaining_count, selected) + } + } + ListEntry::NewThread { .. } => { + format!(" [+ New Thread]{}", selected) + } + } + }) + .collect() + }) + } + #[gpui::test] + async fn test_single_workspace_no_threads(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " [+ New Thread]"] + ); + } - let sidebar = multi_workspace.update_in(cx, |_mw, window, cx| { - let mw_handle = cx.entity(); - cx.new(|cx| Sidebar::new(mw_handle, window, cx)) + #[gpui::test] + async fn test_single_workspace_with_saved_threads(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from("thread-1")), + make_test_thread( + "Fix crash in project panel", + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 3, 0, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) }); - multi_workspace.update_in(cx, |mw, window, cx| { - mw.register_sidebar(sidebar.clone(), window, cx); + save_task.await.unwrap(); + + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from("thread-2")), + make_test_thread( + "Add inline diff view", + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) }); + save_task.await.unwrap(); cx.run_until_parked(); - // Create a second workspace and switch to it so workspace 0 is background. + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix crash in project panel", + " Add inline diff view", + ] + ); + } + + #[gpui::test] + async fn test_workspace_lifecycle(cx: &mut TestAppContext) { + let project = init_test_project("/project-a", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Single workspace with a thread + let path_list = PathList::new(&[std::path::PathBuf::from("/project-a")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from("thread-a1")), + make_test_thread( + "Thread A1", + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + cx.run_until_parked(); + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project-a]", " Thread A1"] + ); + + // Add a second workspace multi_workspace.update_in(cx, |mw, window, cx| { mw.create_workspace(window, cx); }); cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project-a]", + " Thread A1", + "v [Empty Workspace]", + " [+ New Thread]" + ] + ); + + // Remove the second workspace multi_workspace.update_in(cx, |mw, window, cx| { - mw.activate_index(1, window, cx); + mw.remove_workspace(1, window, cx); }); cx.run_until_parked(); - assert!( - !has_notifications(&sidebar, cx), - "should have no notifications initially" + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project-a]", " Thread A1"] ); + } - set_thread_info_and_refresh( - &sidebar, - &multi_workspace, - 0, - "Test Thread", - AgentThreadStatus::Running, - cx, - ); + #[gpui::test] + async fn test_view_more_pagination(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); - assert!( - !has_notifications(&sidebar, cx), - "Running status alone should not create a notification" - ); + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + save_n_test_threads(12, &path_list, cx).await; - set_thread_info_and_refresh( - &sidebar, - &multi_workspace, - 0, - "Test Thread", - AgentThreadStatus::Completed, - cx, - ); + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); - assert!( - has_notifications(&sidebar, cx), - "Running → Completed transition should create a notification" + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Thread 12", + " Thread 11", + " Thread 10", + " Thread 9", + " Thread 8", + " + View More (7)", + ] ); } #[gpui::test] - async fn test_no_notification_for_active_workspace(cx: &mut TestAppContext) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - cx.update(|cx| ::set_global(fs.clone(), cx)); - let project = project::Project::test(fs, [], cx).await; + async fn test_view_more_batched_expansion(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + // Create 17 threads: initially shows 5, then 10, then 15, then all 17 with Collapse + save_n_test_threads(17, &path_list, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Initially shows 5 threads + View More (12 remaining) + let entries = visible_entries_as_strings(&sidebar, cx); + assert_eq!(entries.len(), 7); // header + 5 threads + View More + assert!(entries.iter().any(|e| e.contains("View More (12)"))); + + // Focus and navigate to View More, then confirm to expand by one batch + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + for _ in 0..7 { + cx.dispatch_action(SelectNext); + } + cx.dispatch_action(Confirm); + cx.run_until_parked(); + // Now shows 10 threads + View More (7 remaining) + let entries = visible_entries_as_strings(&sidebar, cx); + assert_eq!(entries.len(), 12); // header + 10 threads + View More + assert!(entries.iter().any(|e| e.contains("View More (7)"))); + + // Expand again by one batch + sidebar.update_in(cx, |s, _window, cx| { + let current = s.expanded_groups.get(&path_list).copied().unwrap_or(0); + s.expanded_groups.insert(path_list.clone(), current + 1); + s.update_entries(cx); + }); + cx.run_until_parked(); + + // Now shows 15 threads + View More (2 remaining) + let entries = visible_entries_as_strings(&sidebar, cx); + assert_eq!(entries.len(), 17); // header + 15 threads + View More + assert!(entries.iter().any(|e| e.contains("View More (2)"))); + + // Expand one more time - should show all 17 threads with Collapse button + sidebar.update_in(cx, |s, _window, cx| { + let current = s.expanded_groups.get(&path_list).copied().unwrap_or(0); + s.expanded_groups.insert(path_list.clone(), current + 1); + s.update_entries(cx); + }); + cx.run_until_parked(); + + // All 17 threads shown with Collapse button + let entries = visible_entries_as_strings(&sidebar, cx); + assert_eq!(entries.len(), 19); // header + 17 threads + Collapse + assert!(!entries.iter().any(|e| e.contains("View More"))); + assert!(entries.iter().any(|e| e.contains("Collapse"))); + + // Click collapse - should go back to showing 5 threads + sidebar.update_in(cx, |s, _window, cx| { + s.expanded_groups.remove(&path_list); + s.update_entries(cx); + }); + cx.run_until_parked(); + + // Back to initial state: 5 threads + View More (12 remaining) + let entries = visible_entries_as_strings(&sidebar, cx); + assert_eq!(entries.len(), 7); // header + 5 threads + View More + assert!(entries.iter().any(|e| e.contains("View More (12)"))); + } + + #[gpui::test] + async fn test_collapse_and_expand_group(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + save_n_test_threads(1, &path_list, cx).await; - let sidebar = multi_workspace.update_in(cx, |_mw, window, cx| { - let mw_handle = cx.entity(); - cx.new(|cx| Sidebar::new(mw_handle, window, cx)) + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Thread 1"] + ); + + // Collapse + sidebar.update_in(cx, |s, window, cx| { + s.toggle_collapse(&path_list, window, cx); }); - multi_workspace.update_in(cx, |mw, window, cx| { - mw.register_sidebar(sidebar.clone(), window, cx); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["> [my-project]"] + ); + + // Expand + sidebar.update_in(cx, |s, window, cx| { + s.toggle_collapse(&path_list, window, cx); }); cx.run_until_parked(); - // Workspace 0 is the active workspace — thread completes while - // the user is already looking at it. - set_thread_info_and_refresh( - &sidebar, - &multi_workspace, - 0, - "Test Thread", - AgentThreadStatus::Running, - cx, + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Thread 1"] ); - set_thread_info_and_refresh( - &sidebar, - &multi_workspace, - 0, - "Test Thread", - AgentThreadStatus::Completed, - cx, + } + + #[gpui::test] + async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + let expanded_path = PathList::new(&[std::path::PathBuf::from("/expanded")]); + let collapsed_path = PathList::new(&[std::path::PathBuf::from("/collapsed")]); + + sidebar.update_in(cx, |s, _window, _cx| { + s.collapsed_groups.insert(collapsed_path.clone()); + s.contents + .notified_threads + .insert(acp::SessionId::new(Arc::from("t-5"))); + s.contents.entries = vec![ + // Expanded project header + ListEntry::ProjectHeader { + path_list: expanded_path.clone(), + label: "expanded-project".into(), + workspace: workspace.clone(), + highlight_positions: Vec::new(), + has_threads: true, + }, + // Thread with default (Completed) status, not active + ListEntry::Thread(ThreadEntry { + session_info: acp_thread::AgentSessionInfo { + session_id: acp::SessionId::new(Arc::from("t-1")), + cwd: None, + title: Some("Completed thread".into()), + updated_at: Some(Utc::now()), + meta: None, + }, + icon: IconName::ZedAgent, + icon_from_external_svg: None, + status: AgentThreadStatus::Completed, + workspace: workspace.clone(), + is_live: false, + is_background: false, + highlight_positions: Vec::new(), + }), + // Active thread with Running status + ListEntry::Thread(ThreadEntry { + session_info: acp_thread::AgentSessionInfo { + session_id: acp::SessionId::new(Arc::from("t-2")), + cwd: None, + title: Some("Running thread".into()), + updated_at: Some(Utc::now()), + meta: None, + }, + icon: IconName::ZedAgent, + icon_from_external_svg: None, + status: AgentThreadStatus::Running, + workspace: workspace.clone(), + is_live: true, + is_background: false, + highlight_positions: Vec::new(), + }), + // Active thread with Error status + ListEntry::Thread(ThreadEntry { + session_info: acp_thread::AgentSessionInfo { + session_id: acp::SessionId::new(Arc::from("t-3")), + cwd: None, + title: Some("Error thread".into()), + updated_at: Some(Utc::now()), + meta: None, + }, + icon: IconName::ZedAgent, + icon_from_external_svg: None, + status: AgentThreadStatus::Error, + workspace: workspace.clone(), + is_live: true, + is_background: false, + highlight_positions: Vec::new(), + }), + // Thread with WaitingForConfirmation status, not active + ListEntry::Thread(ThreadEntry { + session_info: acp_thread::AgentSessionInfo { + session_id: acp::SessionId::new(Arc::from("t-4")), + cwd: None, + title: Some("Waiting thread".into()), + updated_at: Some(Utc::now()), + meta: None, + }, + icon: IconName::ZedAgent, + icon_from_external_svg: None, + status: AgentThreadStatus::WaitingForConfirmation, + workspace: workspace.clone(), + is_live: false, + is_background: false, + highlight_positions: Vec::new(), + }), + // Background thread that completed (should show notification) + ListEntry::Thread(ThreadEntry { + session_info: acp_thread::AgentSessionInfo { + session_id: acp::SessionId::new(Arc::from("t-5")), + cwd: None, + title: Some("Notified thread".into()), + updated_at: Some(Utc::now()), + meta: None, + }, + icon: IconName::ZedAgent, + icon_from_external_svg: None, + status: AgentThreadStatus::Completed, + workspace: workspace.clone(), + is_live: true, + is_background: true, + highlight_positions: Vec::new(), + }), + // View More entry + ListEntry::ViewMore { + path_list: expanded_path.clone(), + remaining_count: 42, + is_fully_expanded: false, + }, + // Collapsed project header + ListEntry::ProjectHeader { + path_list: collapsed_path.clone(), + label: "collapsed-project".into(), + workspace: workspace.clone(), + highlight_positions: Vec::new(), + has_threads: true, + }, + ]; + // Select the Running thread (index 2) + s.selection = Some(2); + }); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [expanded-project]", + " Completed thread", + " Running thread * (running) <== selected", + " Error thread * (error)", + " Waiting thread (waiting)", + " Notified thread * (!)", + " + View More (42)", + "> [collapsed-project]", + ] ); - assert!( - !has_notifications(&sidebar, cx), - "should not notify for the workspace the user is already looking at" + // Move selection to the collapsed header + sidebar.update_in(cx, |s, _window, _cx| { + s.selection = Some(7); + }); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx).last().cloned(), + Some("> [collapsed-project] <== selected".to_string()), ); + + // Clear selection + sidebar.update_in(cx, |s, _window, _cx| { + s.selection = None; + }); + + // No entry should have the selected marker + let entries = visible_entries_as_strings(&sidebar, cx); + for entry in &entries { + assert!( + !entry.contains("<== selected"), + "unexpected selection marker in: {}", + entry + ); + } } #[gpui::test] - async fn test_notification_cleared_on_workspace_activation(cx: &mut TestAppContext) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - cx.update(|cx| ::set_global(fs.clone(), cx)); - let project = project::Project::test(fs, [], cx).await; + async fn test_keyboard_select_next_and_previous(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + save_n_test_threads(3, &path_list, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Entries: [header, thread3, thread2, thread1] + // Focusing the sidebar does not set a selection; select_next/select_previous + // handle None gracefully by starting from the first or last entry. + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None); + + // First SelectNext from None starts at index 0 + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + + // Move down through remaining entries + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); + + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(2)); + + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(3)); + + // At the end, selection stays on the last entry + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(3)); + + // Move back up + + cx.dispatch_action(SelectPrevious); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(2)); + + cx.dispatch_action(SelectPrevious); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); + + cx.dispatch_action(SelectPrevious); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + + // At the top, selection stays on the first entry + cx.dispatch_action(SelectPrevious); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + } + + #[gpui::test] + async fn test_keyboard_select_first_and_last(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + save_n_test_threads(3, &path_list, cx).await; + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + + // SelectLast jumps to the end + cx.dispatch_action(SelectLast); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(3)); + + // SelectFirst jumps to the beginning + cx.dispatch_action(SelectFirst); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + } + + #[gpui::test] + async fn test_keyboard_focus_in_does_not_set_selection(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Initially no selection + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None); - let sidebar = multi_workspace.update_in(cx, |_mw, window, cx| { - let mw_handle = cx.entity(); - cx.new(|cx| Sidebar::new(mw_handle, window, cx)) + // Open the sidebar so it's rendered, then focus it to trigger focus_in. + // focus_in no longer sets a default selection. + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None); + + // Manually set a selection, blur, then refocus — selection should be preserved + sidebar.update_in(cx, |sidebar, _window, _cx| { + sidebar.selection = Some(0); }); - multi_workspace.update_in(cx, |mw, window, cx| { - mw.register_sidebar(sidebar.clone(), window, cx); + + cx.update(|window, _cx| { + window.blur(); + }); + cx.run_until_parked(); + + sidebar.update_in(cx, |_, window, cx| { + cx.focus_self(window); }); cx.run_until_parked(); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + } + + #[gpui::test] + async fn test_keyboard_confirm_on_project_header_activates_workspace(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); - // Create a second workspace so we can switch away and back. multi_workspace.update_in(cx, |mw, window, cx| { mw.create_workspace(window, cx); }); cx.run_until_parked(); - // Switch to workspace 1 so workspace 0 becomes a background workspace. + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + save_n_test_threads(1, &path_list, cx).await; + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Thread 1", + "v [Empty Workspace]", + " [+ New Thread]", + ] + ); + + // Switch to workspace 1 so we can verify confirm switches back. multi_workspace.update_in(cx, |mw, window, cx| { mw.activate_index(1, window, cx); }); cx.run_until_parked(); - - // Thread on workspace 0 transitions Running → Completed while - // the user is looking at workspace 1. - set_thread_info_and_refresh( - &sidebar, - &multi_workspace, - 0, - "Test Thread", - AgentThreadStatus::Running, - cx, + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.active_workspace_index()), + 1 ); - set_thread_info_and_refresh( - &sidebar, - &multi_workspace, - 0, - "Test Thread", - AgentThreadStatus::Completed, - cx, + + // Focus the sidebar and manually select the header (index 0) + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + sidebar.update_in(cx, |sidebar, _window, _cx| { + sidebar.selection = Some(0); + }); + + // Press confirm on project header (workspace 0) to activate it. + cx.dispatch_action(Confirm); + cx.run_until_parked(); + + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.active_workspace_index()), + 0 ); - assert!( - has_notifications(&sidebar, cx), - "background workspace completion should create a notification" + // Focus should have moved out of the sidebar to the workspace center. + let workspace_0 = multi_workspace.read_with(cx, |mw, _cx| mw.workspaces()[0].clone()); + workspace_0.update_in(cx, |workspace, window, cx| { + let pane_focus = workspace.active_pane().read(cx).focus_handle(cx); + assert!( + pane_focus.contains_focused(window, cx), + "Confirming a project header should focus the workspace center pane" + ); + }); + } + + #[gpui::test] + async fn test_keyboard_confirm_on_view_more_expands(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + save_n_test_threads(8, &path_list, cx).await; + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Should show header + 5 threads + "View More (3)" + let entries = visible_entries_as_strings(&sidebar, cx); + assert_eq!(entries.len(), 7); + assert!(entries.iter().any(|e| e.contains("View More (3)"))); + + // Focus sidebar (selection starts at None), then navigate down to the "View More" entry (index 6) + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + for _ in 0..7 { + cx.dispatch_action(SelectNext); + } + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(6)); + + // Confirm on "View More" to expand + cx.dispatch_action(Confirm); + cx.run_until_parked(); + + // All 8 threads should now be visible with a "Collapse" button + let entries = visible_entries_as_strings(&sidebar, cx); + assert_eq!(entries.len(), 10); // header + 8 threads + Collapse button + assert!(!entries.iter().any(|e| e.contains("View More"))); + assert!(entries.iter().any(|e| e.contains("Collapse"))); + } + + #[gpui::test] + async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + save_n_test_threads(1, &path_list, cx).await; + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Thread 1"] ); - // Switching back to workspace 0 should clear the notification. - multi_workspace.update_in(cx, |mw, window, cx| { - mw.activate_index(0, window, cx); + // Focus sidebar and manually select the header (index 0). Press left to collapse. + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + sidebar.update_in(cx, |sidebar, _window, _cx| { + sidebar.selection = Some(0); }); + + cx.dispatch_action(CollapseSelectedEntry); cx.run_until_parked(); - assert!( - !has_notifications(&sidebar, cx), - "notification should be cleared when workspace becomes active" + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["> [my-project] <== selected"] + ); + + // Press right to expand + cx.dispatch_action(ExpandSelectedEntry); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project] <== selected", " Thread 1",] ); + + // Press right again on already-expanded header moves selection down + cx.dispatch_action(ExpandSelectedEntry); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); + } + + #[gpui::test] + async fn test_keyboard_collapse_from_child_selects_parent(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + save_n_test_threads(1, &path_list, cx).await; + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Focus sidebar (selection starts at None), then navigate down to the thread (child) + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + cx.dispatch_action(SelectNext); + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Thread 1 <== selected",] + ); + + // Pressing left on a child collapses the parent group and selects it + cx.dispatch_action(CollapseSelectedEntry); + cx.run_until_parked(); + + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["> [my-project] <== selected"] + ); + } + + #[gpui::test] + async fn test_keyboard_navigation_on_empty_list(cx: &mut TestAppContext) { + let project = init_test_project("/empty-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Even an empty project has the header and a new thread button + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [empty-project]", " [+ New Thread]"] + ); + + // Focus sidebar — focus_in does not set a selection + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None); + + // First SelectNext from None starts at index 0 (header) + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + + // SelectNext moves to the new thread button + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); + + // At the end, selection stays on the last entry + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); + + // SelectPrevious goes back to the header + cx.dispatch_action(SelectPrevious); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + } + + #[gpui::test] + async fn test_selection_clamps_after_entry_removal(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + save_n_test_threads(1, &path_list, cx).await; + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Focus sidebar (selection starts at None), navigate down to the thread (index 1) + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + cx.dispatch_action(SelectNext); + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); + + // Collapse the group, which removes the thread from the list + cx.dispatch_action(CollapseSelectedEntry); + cx.run_until_parked(); + + // Selection should be clamped to the last valid index (0 = header) + let selection = sidebar.read_with(cx, |s, _| s.selection); + let entry_count = sidebar.read_with(cx, |s, _| s.contents.entries.len()); + assert!( + selection.unwrap_or(0) < entry_count, + "selection {} should be within bounds (entries: {})", + selection.unwrap_or(0), + entry_count, + ); + } + + async fn init_test_project_with_agent_panel( + worktree_path: &str, + cx: &mut TestAppContext, + ) -> Entity { + agent_ui::test_support::init_test(cx); + cx.update(|cx| { + cx.update_flags(false, vec!["agent-v2".into()]); + ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(worktree_path, serde_json::json!({ "src": {} })) + .await; + cx.update(|cx| ::set_global(fs.clone(), cx)); + project::Project::test(fs, [worktree_path.as_ref()], cx).await + } + + fn add_agent_panel( + workspace: &Entity, + project: &Entity, + cx: &mut gpui::VisualTestContext, + ) -> Entity { + workspace.update_in(cx, |workspace, window, cx| { + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + let panel = cx.new(|cx| AgentPanel::test_new(workspace, text_thread_store, window, cx)); + workspace.add_panel(panel.clone(), window, cx); + panel + }) + } + + fn setup_sidebar_with_agent_panel( + multi_workspace: &Entity, + project: &Entity, + cx: &mut gpui::VisualTestContext, + ) -> (Entity, Entity) { + let sidebar = setup_sidebar(multi_workspace, cx); + let workspace = multi_workspace.read_with(cx, |mw, _cx| mw.workspace().clone()); + let panel = add_agent_panel(&workspace, project, cx); + (sidebar, panel) + } + + #[gpui::test] + async fn test_parallel_threads_shown_with_live_status(cx: &mut TestAppContext) { + let project = init_test_project_with_agent_panel("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, &project, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + + // Open thread A and keep it generating. + let connection_a = StubAgentConnection::new(); + open_thread_with_connection(&panel, connection_a.clone(), cx); + send_message(&panel, cx); + + let session_id_a = active_session_id(&panel, cx); + save_thread_to_store(&session_id_a, &path_list, cx).await; + + cx.update(|_, cx| { + connection_a.send_update( + session_id_a.clone(), + acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new("working...".into())), + cx, + ); + }); + cx.run_until_parked(); + + // Open thread B (idle, default response) — thread A goes to background. + let connection_b = StubAgentConnection::new(); + connection_b.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Done".into()), + )]); + open_thread_with_connection(&panel, connection_b, cx); + send_message(&panel, cx); + + let session_id_b = active_session_id(&panel, cx); + save_thread_to_store(&session_id_b, &path_list, cx).await; + + cx.run_until_parked(); + + let mut entries = visible_entries_as_strings(&sidebar, cx); + entries[1..].sort(); + assert_eq!( + entries, + vec!["v [my-project]", " Hello *", " Hello * (running)",] + ); + } + + #[gpui::test] + async fn test_background_thread_completion_triggers_notification(cx: &mut TestAppContext) { + let project_a = init_test_project_with_agent_panel("/project-a", cx).await; + let (multi_workspace, cx) = cx + .add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + let (sidebar, panel_a) = setup_sidebar_with_agent_panel(&multi_workspace, &project_a, cx); + + let path_list_a = PathList::new(&[std::path::PathBuf::from("/project-a")]); + + // Open thread on workspace A and keep it generating. + let connection_a = StubAgentConnection::new(); + open_thread_with_connection(&panel_a, connection_a.clone(), cx); + send_message(&panel_a, cx); + + let session_id_a = active_session_id(&panel_a, cx); + save_thread_to_store(&session_id_a, &path_list_a, cx).await; + + cx.update(|_, cx| { + connection_a.send_update( + session_id_a.clone(), + acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new("chunk".into())), + cx, + ); + }); + cx.run_until_parked(); + + // Add a second workspace and activate it (making workspace A the background). + let fs = cx.update(|_, cx| ::global(cx)); + let project_b = project::Project::test(fs, [], cx).await; + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b, window, cx); + }); + cx.run_until_parked(); + + // Thread A is still running; no notification yet. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project-a]", + " Hello * (running)", + "v [Empty Workspace]", + " [+ New Thread]", + ] + ); + + // Complete thread A's turn (transition Running → Completed). + connection_a.end_turn(session_id_a.clone(), acp::StopReason::EndTurn); + cx.run_until_parked(); + + // The completed background thread shows a notification indicator. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project-a]", + " Hello * (!)", + "v [Empty Workspace]", + " [+ New Thread]", + ] + ); + } + + fn type_in_search(sidebar: &Entity, query: &str, cx: &mut gpui::VisualTestContext) { + sidebar.update_in(cx, |sidebar, window, cx| { + window.focus(&sidebar.filter_editor.focus_handle(cx), cx); + sidebar.filter_editor.update(cx, |editor, cx| { + editor.set_text(query, window, cx); + }); + }); + cx.run_until_parked(); + } + + #[gpui::test] + async fn test_search_narrows_visible_threads_to_matches(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + for (id, title, hour) in [ + ("t-1", "Fix crash in project panel", 3), + ("t-2", "Add inline diff view", 2), + ("t-3", "Refactor settings module", 1), + ] { + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(id)), + make_test_thread( + title, + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + } + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix crash in project panel", + " Add inline diff view", + " Refactor settings module", + ] + ); + + // User types "diff" in the search box — only the matching thread remains, + // with its workspace header preserved for context. + type_in_search(&sidebar, "diff", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Add inline diff view <== selected",] + ); + + // User changes query to something with no matches — list is empty. + type_in_search(&sidebar, "nonexistent", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + Vec::::new() + ); + } + + #[gpui::test] + async fn test_search_matches_regardless_of_case(cx: &mut TestAppContext) { + // Scenario: A user remembers a thread title but not the exact casing. + // Search should match case-insensitively so they can still find it. + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from("thread-1")), + make_test_thread( + "Fix Crash In Project Panel", + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + cx.run_until_parked(); + + // Lowercase query matches mixed-case title. + type_in_search(&sidebar, "fix crash", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix Crash In Project Panel <== selected", + ] + ); + + // Uppercase query also matches the same title. + type_in_search(&sidebar, "FIX CRASH", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix Crash In Project Panel <== selected", + ] + ); + } + + #[gpui::test] + async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContext) { + // Scenario: A user searches, finds what they need, then presses Escape + // to dismiss the filter and see the full list again. + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + for (id, title, hour) in [("t-1", "Alpha thread", 2), ("t-2", "Beta thread", 1)] { + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(id)), + make_test_thread( + title, + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + } + cx.run_until_parked(); + + // Confirm the full list is showing. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Alpha thread", " Beta thread",] + ); + + // User types a search query to filter down. + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + type_in_search(&sidebar, "alpha", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Alpha thread <== selected",] + ); + + // User presses Escape — filter clears, full list is restored. + cx.dispatch_action(Cancel); + cx.run_until_parked(); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Alpha thread <== selected", + " Beta thread", + ] + ); + } + + #[gpui::test] + async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppContext) { + let project_a = init_test_project("/project-a", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list_a = PathList::new(&[std::path::PathBuf::from("/project-a")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + for (id, title, hour) in [ + ("a1", "Fix bug in sidebar", 2), + ("a2", "Add tests for editor", 1), + ] { + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(id)), + make_test_thread( + title, + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + ), + path_list_a.clone(), + cx, + ) + }); + save_task.await.unwrap(); + } + + // Add a second workspace. + multi_workspace.update_in(cx, |mw, window, cx| { + mw.create_workspace(window, cx); + }); + cx.run_until_parked(); + + let path_list_b = PathList::new::(&[]); + + for (id, title, hour) in [ + ("b1", "Refactor sidebar layout", 3), + ("b2", "Fix typo in README", 1), + ] { + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(id)), + make_test_thread( + title, + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + ), + path_list_b.clone(), + cx, + ) + }); + save_task.await.unwrap(); + } + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project-a]", + " Fix bug in sidebar", + " Add tests for editor", + "v [Empty Workspace]", + " Refactor sidebar layout", + " Fix typo in README", + ] + ); + + // "sidebar" matches a thread in each workspace — both headers stay visible. + type_in_search(&sidebar, "sidebar", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project-a]", + " Fix bug in sidebar <== selected", + "v [Empty Workspace]", + " Refactor sidebar layout", + ] + ); + + // "typo" only matches in the second workspace — the first header disappears. + type_in_search(&sidebar, "typo", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [Empty Workspace]", " Fix typo in README <== selected",] + ); + + // "project-a" matches the first workspace name — the header appears + // with all child threads included. + type_in_search(&sidebar, "project-a", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project-a]", + " Fix bug in sidebar <== selected", + " Add tests for editor", + ] + ); + } + + #[gpui::test] + async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { + let project_a = init_test_project("/alpha-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list_a = PathList::new(&[std::path::PathBuf::from("/alpha-project")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + for (id, title, hour) in [ + ("a1", "Fix bug in sidebar", 2), + ("a2", "Add tests for editor", 1), + ] { + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(id)), + make_test_thread( + title, + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + ), + path_list_a.clone(), + cx, + ) + }); + save_task.await.unwrap(); + } + + // Add a second workspace. + multi_workspace.update_in(cx, |mw, window, cx| { + mw.create_workspace(window, cx); + }); + cx.run_until_parked(); + + let path_list_b = PathList::new::(&[]); + + for (id, title, hour) in [ + ("b1", "Refactor sidebar layout", 3), + ("b2", "Fix typo in README", 1), + ] { + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(id)), + make_test_thread( + title, + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + ), + path_list_b.clone(), + cx, + ) + }); + save_task.await.unwrap(); + } + cx.run_until_parked(); + + // "alpha" matches the workspace name "alpha-project" but no thread titles. + // The workspace header should appear with all child threads included. + type_in_search(&sidebar, "alpha", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [alpha-project]", + " Fix bug in sidebar <== selected", + " Add tests for editor", + ] + ); + + // "sidebar" matches thread titles in both workspaces but not workspace names. + // Both headers appear with their matching threads. + type_in_search(&sidebar, "sidebar", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [alpha-project]", + " Fix bug in sidebar <== selected", + "v [Empty Workspace]", + " Refactor sidebar layout", + ] + ); + + // "alpha sidebar" matches the workspace name "alpha-project" (fuzzy: a-l-p-h-a-s-i-d-e-b-a-r + // doesn't match) — but does not match either workspace name or any thread. + // Actually let's test something simpler: a query that matches both a workspace + // name AND some threads in that workspace. Matching threads should still appear. + type_in_search(&sidebar, "fix", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [alpha-project]", + " Fix bug in sidebar <== selected", + "v [Empty Workspace]", + " Fix typo in README", + ] + ); + + // A query that matches a workspace name AND a thread in that same workspace. + // Both the header (highlighted) and all child threads should appear. + type_in_search(&sidebar, "alpha", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [alpha-project]", + " Fix bug in sidebar <== selected", + " Add tests for editor", + ] + ); + + // Now search for something that matches only a workspace name when there + // are also threads with matching titles — the non-matching workspace's + // threads should still appear if their titles match. + type_in_search(&sidebar, "alp", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [alpha-project]", + " Fix bug in sidebar <== selected", + " Add tests for editor", + ] + ); + } + + #[gpui::test] + async fn test_search_finds_threads_hidden_behind_view_more(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + // Create 8 threads. The oldest one has a unique name and will be + // behind View More (only 5 shown by default). + for i in 0..8u32 { + let title = if i == 0 { + "Hidden gem thread".to_string() + } else { + format!("Thread {}", i + 1) + }; + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(format!("thread-{}", i))), + make_test_thread( + &title, + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, i).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + } + cx.run_until_parked(); + + // Confirm the thread is not visible and View More is shown. + let entries = visible_entries_as_strings(&sidebar, cx); + assert!( + entries.iter().any(|e| e.contains("View More")), + "should have View More button" + ); + assert!( + !entries.iter().any(|e| e.contains("Hidden gem")), + "Hidden gem should be behind View More" + ); + + // User searches for the hidden thread — it appears, and View More is gone. + type_in_search(&sidebar, "hidden gem", cx); + let filtered = visible_entries_as_strings(&sidebar, cx); + assert_eq!( + filtered, + vec!["v [my-project]", " Hidden gem thread <== selected",] + ); + assert!( + !filtered.iter().any(|e| e.contains("View More")), + "View More should not appear when filtering" + ); + } + + #[gpui::test] + async fn test_search_finds_threads_inside_collapsed_groups(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from("thread-1")), + make_test_thread( + "Important thread", + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + cx.run_until_parked(); + + // User focuses the sidebar and collapses the group using keyboard: + // manually select the header, then press CollapseSelectedEntry to collapse. + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + sidebar.update_in(cx, |sidebar, _window, _cx| { + sidebar.selection = Some(0); + }); + cx.dispatch_action(CollapseSelectedEntry); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["> [my-project] <== selected"] + ); + + // User types a search — the thread appears even though its group is collapsed. + type_in_search(&sidebar, "important", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["> [my-project]", " Important thread <== selected",] + ); + } + + #[gpui::test] + async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + for (id, title, hour) in [ + ("t-1", "Fix crash in panel", 3), + ("t-2", "Fix lint warnings", 2), + ("t-3", "Add new feature", 1), + ] { + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(id)), + make_test_thread( + title, + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + } + cx.run_until_parked(); + + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + + // User types "fix" — two threads match. + type_in_search(&sidebar, "fix", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix crash in panel <== selected", + " Fix lint warnings", + ] + ); + + // Selection starts on the first matching thread. User presses + // SelectNext to move to the second match. + cx.dispatch_action(SelectNext); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix crash in panel", + " Fix lint warnings <== selected", + ] + ); + + // User can also jump back with SelectPrevious. + cx.dispatch_action(SelectPrevious); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix crash in panel <== selected", + " Fix lint warnings", + ] + ); + } + + #[gpui::test] + async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.create_workspace(window, cx); + }); + cx.run_until_parked(); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from("hist-1")), + make_test_thread( + "Historical Thread", + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 6, 1, 0, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + cx.run_until_parked(); + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Historical Thread", + "v [Empty Workspace]", + " [+ New Thread]", + ] + ); + + // Switch to workspace 1 so we can verify the confirm switches back. + multi_workspace.update_in(cx, |mw, window, cx| { + mw.activate_index(1, window, cx); + }); + cx.run_until_parked(); + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.active_workspace_index()), + 1 + ); + + // Confirm on the historical (non-live) thread at index 1. + // Before a previous fix, the workspace field was Option and + // historical threads had None, so activate_thread early-returned + // without switching the workspace. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.selection = Some(1); + sidebar.confirm(&Confirm, window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.active_workspace_index()), + 0 + ); + } + + #[gpui::test] + async fn test_click_clears_selection_and_focus_in_restores_it(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from("t-1")), + make_test_thread( + "Thread A", + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from("t-2")), + make_test_thread( + "Thread B", + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + cx.run_until_parked(); + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Thread A", " Thread B",] + ); + + // Keyboard confirm preserves selection. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.selection = Some(1); + sidebar.confirm(&Confirm, window, cx); + }); + assert_eq!( + sidebar.read_with(cx, |sidebar, _| sidebar.selection), + Some(1) + ); + + // Click handlers clear selection to None so no highlight lingers + // after a click regardless of focus state. The hover style provides + // visual feedback during mouse interaction instead. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.selection = None; + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + sidebar.toggle_collapse(&path_list, window, cx); + }); + assert_eq!(sidebar.read_with(cx, |sidebar, _| sidebar.selection), None); + + // When the user tabs back into the sidebar, focus_in no longer + // restores selection — it stays None. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.focus_in(window, cx); + }); + assert_eq!(sidebar.read_with(cx, |sidebar, _| sidebar.selection), None); + } + + #[gpui::test] + async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext) { + let project = init_test_project_with_agent_panel("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, &project, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + + let connection = StubAgentConnection::new(); + connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Hi there!".into()), + )]); + open_thread_with_connection(&panel, connection, cx); + send_message(&panel, cx); + + let session_id = active_session_id(&panel, cx); + save_thread_to_store(&session_id, &path_list, cx).await; + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Hello *"] + ); + + // Simulate the agent generating a title. The notification chain is: + // AcpThread::set_title emits TitleUpdated → + // ConnectionView::handle_thread_event calls cx.notify() → + // AgentPanel observer fires and emits AgentPanelEvent → + // Sidebar subscription calls update_entries / rebuild_contents. + // + // Before the fix, handle_thread_event did NOT call cx.notify() for + // TitleUpdated, so the AgentPanel observer never fired and the + // sidebar kept showing the old title. + let thread = panel.read_with(cx, |panel, cx| panel.active_agent_thread(cx).unwrap()); + thread.update(cx, |thread, cx| { + thread + .set_title("Friendly Greeting with AI".into(), cx) + .detach(); + }); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Friendly Greeting with AI *"] + ); + } + + #[gpui::test] + async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { + let project_a = init_test_project_with_agent_panel("/project-a", cx).await; + let (multi_workspace, cx) = cx + .add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + let (sidebar, panel_a) = setup_sidebar_with_agent_panel(&multi_workspace, &project_a, cx); + + let path_list_a = PathList::new(&[std::path::PathBuf::from("/project-a")]); + + // Save a thread so it appears in the list. + let connection_a = StubAgentConnection::new(); + connection_a.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Done".into()), + )]); + open_thread_with_connection(&panel_a, connection_a, cx); + send_message(&panel_a, cx); + let session_id_a = active_session_id(&panel_a, cx); + save_thread_to_store(&session_id_a, &path_list_a, cx).await; + + // Add a second workspace with its own agent panel. + let fs = cx.update(|_, cx| ::global(cx)); + fs.as_fake() + .insert_tree("/project-b", serde_json::json!({ "src": {} })) + .await; + let project_b = project::Project::test(fs, ["/project-b".as_ref()], cx).await; + let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b.clone(), window, cx) + }); + let panel_b = add_agent_panel(&workspace_b, &project_b, cx); + cx.run_until_parked(); + + let workspace_a = multi_workspace.read_with(cx, |mw, _cx| mw.workspaces()[0].clone()); + + // ── 1. Initial state: no focused thread ────────────────────────────── + // Workspace B is active (just added), so its header is the active entry. + sidebar.read_with(cx, |sidebar, _cx| { + assert_eq!( + sidebar.focused_thread, None, + "Initially no thread should be focused" + ); + let active_entry = sidebar + .active_entry_index + .and_then(|ix| sidebar.contents.entries.get(ix)); + assert!( + matches!(active_entry, Some(ListEntry::ProjectHeader { .. })), + "Active entry should be the active workspace header" + ); + }); + + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.activate_thread( + acp_thread::AgentSessionInfo { + session_id: session_id_a.clone(), + cwd: None, + title: Some("Test".into()), + updated_at: None, + meta: None, + }, + &workspace_a, + window, + cx, + ); + }); + cx.run_until_parked(); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_eq!( + sidebar.focused_thread.as_ref(), + Some(&session_id_a), + "After clicking a thread, it should be the focused thread" + ); + let active_entry = sidebar.active_entry_index + .and_then(|ix| sidebar.contents.entries.get(ix)); + assert!( + matches!(active_entry, Some(ListEntry::Thread(thread)) if thread.session_info.session_id == session_id_a), + "Active entry should be the clicked thread" + ); + }); + + workspace_a.read_with(cx, |workspace, cx| { + assert!( + workspace.panel::(cx).is_some(), + "Agent panel should exist" + ); + let dock = workspace.right_dock().read(cx); + assert!( + dock.is_open(), + "Clicking a thread should open the agent panel dock" + ); + }); + + let connection_b = StubAgentConnection::new(); + connection_b.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Thread B".into()), + )]); + open_thread_with_connection(&panel_b, connection_b, cx); + send_message(&panel_b, cx); + let session_id_b = active_session_id(&panel_b, cx); + let path_list_b = PathList::new(&[std::path::PathBuf::from("/project-b")]); + save_thread_to_store(&session_id_b, &path_list_b, cx).await; + cx.run_until_parked(); + + // Workspace A is currently active. Click a thread in workspace B, + // which also triggers a workspace switch. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.activate_thread( + acp_thread::AgentSessionInfo { + session_id: session_id_b.clone(), + cwd: None, + title: Some("Thread B".into()), + updated_at: None, + meta: None, + }, + &workspace_b, + window, + cx, + ); + }); + cx.run_until_parked(); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_eq!( + sidebar.focused_thread.as_ref(), + Some(&session_id_b), + "Clicking a thread in another workspace should focus that thread" + ); + let active_entry = sidebar + .active_entry_index + .and_then(|ix| sidebar.contents.entries.get(ix)); + assert!( + matches!(active_entry, Some(ListEntry::Thread(thread)) if thread.session_info.session_id == session_id_b), + "Active entry should be the cross-workspace thread" + ); + }); + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.activate_next_workspace(window, cx); + }); + cx.run_until_parked(); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_eq!( + sidebar.focused_thread, None, + "External workspace switch should clear focused_thread" + ); + let active_entry = sidebar + .active_entry_index + .and_then(|ix| sidebar.contents.entries.get(ix)); + assert!( + matches!(active_entry, Some(ListEntry::ProjectHeader { .. })), + "Active entry should be the workspace header after external switch" + ); + }); + + let connection_b2 = StubAgentConnection::new(); + connection_b2.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("New thread".into()), + )]); + open_thread_with_connection(&panel_b, connection_b2, cx); + send_message(&panel_b, cx); + let session_id_b2 = active_session_id(&panel_b, cx); + save_thread_to_store(&session_id_b2, &path_list_b, cx).await; + cx.run_until_parked(); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_eq!( + sidebar.focused_thread.as_ref(), + Some(&session_id_b2), + "Opening a thread externally should set focused_thread" + ); + }); + + workspace_b.update_in(cx, |workspace, window, cx| { + workspace.focus_handle(cx).focus(window, cx); + }); + cx.run_until_parked(); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_eq!( + sidebar.focused_thread.as_ref(), + Some(&session_id_b2), + "Defocusing the sidebar should not clear focused_thread" + ); + }); + + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.activate_workspace(&workspace_b, window, cx); + }); + cx.run_until_parked(); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_eq!( + sidebar.focused_thread, None, + "Clicking a workspace header should clear focused_thread" + ); + let active_entry = sidebar + .active_entry_index + .and_then(|ix| sidebar.contents.entries.get(ix)); + assert!( + matches!(active_entry, Some(ListEntry::ProjectHeader { .. })), + "Active entry should be the workspace header" + ); + }); + + // ── 8. Focusing the agent panel thread restores focused_thread ──── + // Workspace B still has session_id_b2 loaded in the agent panel. + // Clicking into the thread (simulated by focusing its view) should + // set focused_thread via the ThreadFocused event. + panel_b.update_in(cx, |panel, window, cx| { + if let Some(thread_view) = panel.active_connection_view() { + thread_view.read(cx).focus_handle(cx).focus(window, cx); + } + }); + cx.run_until_parked(); + + sidebar.read_with(cx, |sidebar, _cx| { + assert_eq!( + sidebar.focused_thread.as_ref(), + Some(&session_id_b2), + "Focusing the agent panel thread should set focused_thread" + ); + let active_entry = sidebar + .active_entry_index + .and_then(|ix| sidebar.contents.entries.get(ix)); + assert!( + matches!(active_entry, Some(ListEntry::Thread(thread)) if thread.session_info.session_id == session_id_b2), + "Active entry should be the focused thread" + ); + }); } } diff --git a/crates/sum_tree/Cargo.toml b/crates/sum_tree/Cargo.toml index 3e06ede162dad37f94017207ccbd6ee5c38f26a5..e4cf78181aa43cce4a6692cc3c6c92e03b7bf9ad 100644 --- a/crates/sum_tree/Cargo.toml +++ b/crates/sum_tree/Cargo.toml @@ -19,11 +19,17 @@ rayon.workspace = true log.workspace = true ztracing.workspace = true tracing.workspace = true +proptest = { workspace = true, optional = true } [dev-dependencies] ctor.workspace = true rand.workspace = true +proptest.workspace = true zlog.workspace = true + [package.metadata.cargo-machete] ignored = ["tracing"] + +[features] +test-support = ["proptest"] \ No newline at end of file diff --git a/crates/sum_tree/src/property_test.rs b/crates/sum_tree/src/property_test.rs new file mode 100644 index 0000000000000000000000000000000000000000..d6c6bd76f94704c60dfc6919fa02ba66c19f349d --- /dev/null +++ b/crates/sum_tree/src/property_test.rs @@ -0,0 +1,32 @@ +use core::fmt::Debug; + +use proptest::{prelude::*, sample::SizeRange}; + +use crate::{Item, SumTree, Summary}; + +impl Arbitrary for SumTree +where + T: Debug + Arbitrary + Item + 'static, + T::Summary: Debug + Summary = ()>, +{ + type Parameters = (); + type Strategy = BoxedStrategy; + + fn arbitrary_with((): Self::Parameters) -> Self::Strategy { + any::>() + .prop_map(|vec| SumTree::from_iter(vec, ())) + .boxed() + } +} + +/// A strategy for producing a [`SumTree`] with a given size. +/// +/// Equivalent to [`proptest::collection::vec`]. +pub fn sum_tree(values: S, size: impl Into) -> impl Strategy> +where + T: Debug + Arbitrary + Item + 'static, + T::Summary: Debug + Summary = ()>, + S: Strategy, +{ + proptest::collection::vec(values, size).prop_map(|vec| SumTree::from_iter(vec, ())) +} diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index 068bc4bce56816962a3b75d6f6497b033a9209a5..8ab9b5ccb1fdb3b28b3aa0dd93c7a732a21645cb 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -1,4 +1,6 @@ mod cursor; +#[cfg(any(test, feature = "test-support"))] +pub mod property_test; mod tree_map; use arrayvec::ArrayVec; diff --git a/crates/tab_switcher/Cargo.toml b/crates/tab_switcher/Cargo.toml index 36e4ba77342796ae5967e81cd34e01b8d41aecf6..e2855aa1696c3af0c3efeb2b927f968783978332 100644 --- a/crates/tab_switcher/Cargo.toml +++ b/crates/tab_switcher/Cargo.toml @@ -29,10 +29,8 @@ util.workspace = true workspace.workspace = true [dev-dependencies] -anyhow.workspace = true ctor.workspace = true gpui = { workspace = true, features = ["test-support"] } -language = { workspace = true, features = ["test-support"] } serde_json.workspace = true theme = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/task/src/task_template.rs b/crates/task/src/task_template.rs index 539b2779cc85b5830af90aeb4ffd28596c2c29c3..a85c3565e2869e10f093a47f71024384e496fbd2 100644 --- a/crates/task/src/task_template.rs +++ b/crates/task/src/task_template.rs @@ -114,6 +114,7 @@ pub enum HideStrategy { pub struct TaskTemplates(pub Vec); impl TaskTemplates { + pub const FILE_NAME: &str = "tasks.json"; /// Generates JSON schema of Tasks JSON template format. pub fn generate_json_schema() -> serde_json::Value { let schema = schemars::generate::SchemaSettings::draft2019_09() diff --git a/crates/terminal/Cargo.toml b/crates/terminal/Cargo.toml index ee29546b81c32038e85805850bc07111fca81af7..fcb637f14b3785cf2d11b68b8cbf60934f055df4 100644 --- a/crates/terminal/Cargo.toml +++ b/crates/terminal/Cargo.toml @@ -49,6 +49,5 @@ windows.workspace = true [dev-dependencies] gpui = { workspace = true, features = ["test-support"] } rand.workspace = true -serde_json.workspace = true settings = { workspace = true, features = ["test-support"] } util_macros.workspace = true diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index 0fa3b37e1501ed6407d18b07e0b2188ce5e77cf7..56cca7cb40195298ed0479fc43c8b13b6c577249 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -415,6 +415,8 @@ impl TerminalBuilder { event_loop_task: Task::ready(Ok(())), background_executor: background_executor.clone(), path_style, + #[cfg(any(test, feature = "test-support"))] + input_log: Vec::new(), }; Ok(TerminalBuilder { @@ -646,6 +648,8 @@ impl TerminalBuilder { event_loop_task: Task::ready(Ok(())), background_executor, path_style, + #[cfg(any(test, feature = "test-support"))] + input_log: Vec::new(), }; if !activation_script.is_empty() && no_task { @@ -870,6 +874,8 @@ pub struct Terminal { event_loop_task: Task>, background_executor: BackgroundExecutor, path_style: PathStyle, + #[cfg(any(test, feature = "test-support"))] + input_log: Vec>, } struct CopyTemplate { @@ -1451,9 +1457,18 @@ impl Terminal { .push_back(InternalEvent::Scroll(AlacScroll::Bottom)); self.events.push_back(InternalEvent::SetSelection(None)); + let input = input.into(); + #[cfg(any(test, feature = "test-support"))] + self.input_log.push(input.to_vec()); + self.write_to_pty(input); } + #[cfg(any(test, feature = "test-support"))] + pub fn take_input_log(&mut self) -> Vec> { + std::mem::take(&mut self.input_log) + } + pub fn toggle_vi_mode(&mut self) { self.events.push_back(InternalEvent::ToggleViMode); } diff --git a/crates/terminal_view/Cargo.toml b/crates/terminal_view/Cargo.toml index ef31480341ddc873e00612b471217899836a3bd1..6fc1d4ae710a342b2d275b6dd5713d37a14b1da6 100644 --- a/crates/terminal_view/Cargo.toml +++ b/crates/terminal_view/Cargo.toml @@ -48,11 +48,10 @@ workspace.workspace = true zed_actions.workspace = true [dev-dependencies] -client = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } project = { workspace = true, features = ["test-support"] } -rand.workspace = true +terminal = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } [package.metadata.cargo-machete] diff --git a/crates/terminal_view/src/terminal_panel.rs b/crates/terminal_view/src/terminal_panel.rs index 88bde3c771f72a0771a405cfbf123ac4e2286ad9..93b9e651191e791da8bbda35600c3db001b46d90 100644 --- a/crates/terminal_view/src/terminal_panel.rs +++ b/crates/terminal_view/src/terminal_panel.rs @@ -1,4 +1,4 @@ -use std::{cmp, ops::ControlFlow, path::PathBuf, process::ExitStatus, sync::Arc, time::Duration}; +use std::{cmp, path::PathBuf, process::ExitStatus, sync::Arc, time::Duration}; use crate::{ TerminalView, default_working_directory, @@ -12,11 +12,11 @@ use db::kvp::KEY_VALUE_STORE; use futures::{channel::oneshot, future::join_all}; use gpui::{ Action, AnyView, App, AsyncApp, AsyncWindowContext, Context, Corner, Entity, EventEmitter, - ExternalPaths, FocusHandle, Focusable, IntoElement, ParentElement, Pixels, Render, Styled, - Task, WeakEntity, Window, actions, + FocusHandle, Focusable, IntoElement, ParentElement, Pixels, Render, Styled, Task, WeakEntity, + Window, actions, }; use itertools::Itertools; -use project::{Fs, Project, ProjectEntryId}; +use project::{Fs, Project}; use settings::{Settings, TerminalDockPosition}; use task::{RevealStrategy, RevealTarget, Shell, ShellBuilder, SpawnInTerminal, TaskId}; @@ -28,13 +28,13 @@ use ui::{ use util::{ResultExt, TryFutureExt}; use workspace::{ ActivateNextPane, ActivatePane, ActivatePaneDown, ActivatePaneLeft, ActivatePaneRight, - ActivatePaneUp, ActivatePreviousPane, DraggedSelection, DraggedTab, ItemId, MoveItemToPane, + ActivatePaneUp, ActivatePreviousPane, DraggedTab, ItemId, MoveItemToPane, MoveItemToPaneInDirection, MovePaneDown, MovePaneLeft, MovePaneRight, MovePaneUp, Pane, PaneGroup, SplitDirection, SplitDown, SplitLeft, SplitMode, SplitRight, SplitUp, SwapPaneDown, SwapPaneLeft, SwapPaneRight, SwapPaneUp, ToggleZoom, Workspace, dock::{DockPosition, Panel, PanelEvent, PanelHandle}, item::SerializableItem, - move_active_item, move_item, pane, + move_active_item, pane, }; use anyhow::{Result, anyhow}; @@ -133,7 +133,11 @@ impl TerminalPanel { } } - fn apply_tab_bar_buttons(&self, terminal_pane: &Entity, cx: &mut Context) { + pub(crate) fn apply_tab_bar_buttons( + &self, + terminal_pane: &Entity, + cx: &mut Context, + ) { let assistant_tab_bar_button = self.assistant_tab_bar_button.clone(); terminal_pane.update(cx, |pane, cx| { pane.set_render_tab_bar_buttons(cx, move |pane, window, cx| { @@ -1187,7 +1191,6 @@ pub fn new_terminal_pane( window: &mut Window, cx: &mut Context, ) -> Entity { - let is_local = project.read(cx).is_local(); let terminal_panel = cx.entity(); let pane = cx.new(|cx| { let mut pane = Pane::new( @@ -1245,113 +1248,6 @@ pub fn new_terminal_pane( toolbar.add_item(breadcrumbs, window, cx); }); - let drop_closure_project = project.downgrade(); - let drop_closure_terminal_panel = terminal_panel.downgrade(); - pane.set_custom_drop_handle(cx, move |pane, dropped_item, window, cx| { - let Some(project) = drop_closure_project.upgrade() else { - return ControlFlow::Break(()); - }; - if let Some(tab) = dropped_item.downcast_ref::() { - let this_pane = cx.entity(); - let item = if tab.pane == this_pane { - pane.item_for_index(tab.ix) - } else { - tab.pane.read(cx).item_for_index(tab.ix) - }; - if let Some(item) = item { - if item.downcast::().is_some() { - let source = tab.pane.clone(); - let item_id_to_move = item.item_id(); - - // If no split direction, let the regular pane drop handler take care of it - let Some(split_direction) = pane.drag_split_direction() else { - return ControlFlow::Continue(()); - }; - - // Gather data synchronously before deferring - let is_zoomed = drop_closure_terminal_panel - .upgrade() - .map(|terminal_panel| { - let terminal_panel = terminal_panel.read(cx); - if terminal_panel.active_pane == this_pane { - pane.is_zoomed() - } else { - terminal_panel.active_pane.read(cx).is_zoomed() - } - }) - .unwrap_or(false); - - let workspace = workspace.clone(); - let terminal_panel = drop_closure_terminal_panel.clone(); - - // Defer the split operation to avoid re-entrancy panic. - // The pane may be the one currently being updated, so we cannot - // call mark_positions (via split) synchronously. - cx.spawn_in(window, async move |_, cx| { - cx.update(|window, cx| { - let Ok(new_pane) = - terminal_panel.update(cx, |terminal_panel, cx| { - let new_pane = new_terminal_pane( - workspace, project, is_zoomed, window, cx, - ); - terminal_panel.apply_tab_bar_buttons(&new_pane, cx); - terminal_panel.center.split( - &this_pane, - &new_pane, - split_direction, - cx, - ); - new_pane - }) - else { - return; - }; - - move_item( - &source, - &new_pane, - item_id_to_move, - new_pane.read(cx).active_item_index(), - true, - window, - cx, - ); - }) - .ok(); - }) - .detach(); - } else if let Some(project_path) = item.project_path(cx) - && let Some(entry_path) = project.read(cx).absolute_path(&project_path, cx) - { - add_paths_to_terminal(pane, &[entry_path], window, cx); - } - } - } else if let Some(selection) = dropped_item.downcast_ref::() { - let project = project.read(cx); - let paths_to_add = selection - .items() - .map(|selected_entry| selected_entry.entry_id) - .filter_map(|entry_id| project.path_for_entry(entry_id, cx)) - .filter_map(|project_path| project.absolute_path(&project_path, cx)) - .collect::>(); - if !paths_to_add.is_empty() { - add_paths_to_terminal(pane, &paths_to_add, window, cx); - } - } else if let Some(&entry_id) = dropped_item.downcast_ref::() { - if let Some(entry_path) = project - .read(cx) - .path_for_entry(entry_id, cx) - .and_then(|project_path| project.read(cx).absolute_path(&project_path, cx)) - { - add_paths_to_terminal(pane, &[entry_path], window, cx); - } - } else if is_local && let Some(paths) = dropped_item.downcast_ref::() { - add_paths_to_terminal(pane, paths.paths(), window, cx); - } - - ControlFlow::Break(()) - }); - pane }); @@ -1376,27 +1272,6 @@ async fn wait_for_terminals_tasks( join_all(pending_tasks).await; } -fn add_paths_to_terminal( - pane: &mut Pane, - paths: &[PathBuf], - window: &mut Window, - cx: &mut Context, -) { - if let Some(terminal_view) = pane - .active_item() - .and_then(|item| item.downcast::()) - { - window.focus(&terminal_view.focus_handle(cx), cx); - let mut new_text = paths.iter().map(|path| format!(" {path:?}")).join(""); - new_text.push(' '); - terminal_view.update(cx, |terminal_view, cx| { - terminal_view.terminal().update(cx, |terminal, _| { - terminal.paste(&new_text); - }); - }); - } -} - struct FailedToSpawnTerminal { error: String, focus_handle: FocusHandle, diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index eaba1f22682a759d8cfce42e555ca692cee9ada6..e4ed410ef79897770d2a27aaef10017b1d284390 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -8,18 +8,20 @@ mod terminal_slash_command; use assistant_slash_command::SlashCommandRegistry; use editor::{Editor, EditorSettings, actions::SelectAll, blink_manager::BlinkManager}; use gpui::{ - Action, AnyElement, App, ClipboardEntry, DismissEvent, Entity, EventEmitter, FocusHandle, - Focusable, KeyContext, KeyDownEvent, Keystroke, MouseButton, MouseDownEvent, Pixels, Point, - Render, ScrollWheelEvent, Styled, Subscription, Task, WeakEntity, actions, anchored, deferred, - div, + Action, AnyElement, App, ClipboardEntry, DismissEvent, Entity, EventEmitter, ExternalPaths, + FocusHandle, Focusable, KeyContext, KeyDownEvent, Keystroke, MouseButton, MouseDownEvent, + Pixels, Point, Render, ScrollWheelEvent, Styled, Subscription, Task, WeakEntity, actions, + anchored, deferred, div, }; +use itertools::Itertools; use menu; use persistence::TERMINAL_DB; -use project::{Project, search::SearchQuery}; +use project::{Project, ProjectEntryId, search::SearchQuery}; use schemars::JsonSchema; use serde::Deserialize; use settings::{Settings, SettingsStore, TerminalBlink, WorkingDirectory}; use std::{ + any::Any, cmp, ops::{Range, RangeInclusive}, path::{Path, PathBuf}, @@ -50,8 +52,8 @@ use ui::{ }; use util::ResultExt; use workspace::{ - CloseActiveItem, NewCenterTerminal, NewTerminal, ToolbarItemLocation, Workspace, WorkspaceId, - delete_unloaded_items, + CloseActiveItem, DraggedSelection, DraggedTab, NewCenterTerminal, NewTerminal, Pane, + ToolbarItemLocation, Workspace, WorkspaceId, delete_unloaded_items, item::{ BreadcrumbText, Item, ItemEvent, SerializableItem, TabContentParams, TabTooltipContent, }, @@ -833,6 +835,15 @@ impl TerminalView { }); } + fn add_paths_to_terminal(&self, paths: &[PathBuf], window: &mut Window, cx: &mut App) { + let mut text = paths.iter().map(|path| format!(" {path:?}")).join(""); + text.push(' '); + window.focus(&self.focus_handle(cx), cx); + self.terminal.update(cx, |terminal, _| { + terminal.paste(&text); + }); + } + fn send_text(&mut self, text: &SendText, _: &mut Window, cx: &mut Context) { self.clear_bell(cx); self.terminal.update(cx, |term, _| { @@ -1412,6 +1423,154 @@ impl Item for TerminalView { None } + fn handle_drop( + &self, + active_pane: &Pane, + dropped: &dyn Any, + window: &mut Window, + cx: &mut App, + ) -> bool { + let Some(project) = self.project.upgrade() else { + return false; + }; + + if let Some(paths) = dropped.downcast_ref::() { + let is_local = project.read(cx).is_local(); + if is_local { + self.add_paths_to_terminal(paths.paths(), window, cx); + return true; + } + + return false; + } else if let Some(tab) = dropped.downcast_ref::() { + let Some(self_handle) = self.self_handle.upgrade() else { + return false; + }; + + let Some(workspace) = self.workspace.upgrade() else { + return false; + }; + + let Some(this_pane) = workspace.read(cx).pane_for(&self_handle) else { + return false; + }; + + let item = if tab.pane == this_pane { + active_pane.item_for_index(tab.ix) + } else { + tab.pane.read(cx).item_for_index(tab.ix) + }; + + let Some(item) = item else { + return false; + }; + + if item.downcast::().is_some() { + let Some(split_direction) = active_pane.drag_split_direction() else { + return false; + }; + + let Some(terminal_panel) = workspace.read(cx).panel::(cx) else { + return false; + }; + + if !terminal_panel.read(cx).center.panes().contains(&&this_pane) { + return false; + } + + let source = tab.pane.clone(); + let item_id_to_move = item.item_id(); + let is_zoomed = { + let terminal_panel = terminal_panel.read(cx); + if terminal_panel.active_pane == this_pane { + active_pane.is_zoomed() + } else { + terminal_panel.active_pane.read(cx).is_zoomed() + } + }; + + let workspace = workspace.downgrade(); + let terminal_panel = terminal_panel.downgrade(); + // Defer the split operation to avoid re-entrancy panic. + // The pane may be the one currently being updated, so we cannot + // call mark_positions (via split) synchronously. + window + .spawn(cx, async move |cx| { + cx.update(|window, cx| { + let Ok(new_pane) = terminal_panel.update(cx, |terminal_panel, cx| { + let new_pane = terminal_panel::new_terminal_pane( + workspace, project, is_zoomed, window, cx, + ); + terminal_panel.apply_tab_bar_buttons(&new_pane, cx); + terminal_panel.center.split( + &this_pane, + &new_pane, + split_direction, + cx, + ); + anyhow::Ok(new_pane) + }) else { + return; + }; + + let Some(new_pane) = new_pane.log_err() else { + return; + }; + + workspace::move_item( + &source, + &new_pane, + item_id_to_move, + new_pane.read(cx).active_item_index(), + true, + window, + cx, + ); + }) + .ok(); + }) + .detach(); + + return true; + } else { + if let Some(project_path) = item.project_path(cx) + && let Some(path) = project.read(cx).absolute_path(&project_path, cx) + { + self.add_paths_to_terminal(&[path], window, cx); + return true; + } + } + + return false; + } else if let Some(selection) = dropped.downcast_ref::() { + let project = project.read(cx); + let paths = selection + .items() + .map(|selected_entry| selected_entry.entry_id) + .filter_map(|entry_id| project.path_for_entry(entry_id, cx)) + .filter_map(|project_path| project.absolute_path(&project_path, cx)) + .collect::>(); + + if !paths.is_empty() { + self.add_paths_to_terminal(&paths, window, cx); + } + + return true; + } else if let Some(&entry_id) = dropped.downcast_ref::() { + let project = project.read(cx); + if let Some(path) = project + .path_for_entry(entry_id, cx) + .and_then(|project_path| project.absolute_path(&project_path, cx)) + { + self.add_paths_to_terminal(&[path], window, cx); + } + + return true; + } + + false + } + fn tab_extra_context_menu_actions( &self, _window: &mut Window, @@ -1840,10 +1999,46 @@ mod tests { use super::*; use gpui::TestAppContext; use project::{Entry, Project, ProjectPath, Worktree}; - use std::path::Path; + use std::path::{Path, PathBuf}; use util::paths::PathStyle; use util::rel_path::RelPath; - use workspace::{AppState, MultiWorkspace}; + use workspace::item::test::{TestItem, TestProjectItem}; + use workspace::{AppState, MultiWorkspace, SelectedEntry}; + + fn expected_drop_text(paths: &[PathBuf]) -> String { + let mut text = String::new(); + for path in paths { + text.push(' '); + text.push_str(&format!("{path:?}")); + } + text.push(' '); + text + } + + fn assert_drop_writes_to_terminal( + pane: &Entity, + terminal_view_index: usize, + terminal: &Entity, + dropped: &dyn Any, + expected_text: &str, + window: &mut Window, + cx: &mut Context, + ) { + let _ = terminal.update(cx, |terminal, _| terminal.take_input_log()); + + let handled = pane.update(cx, |pane, cx| { + pane.item_for_index(terminal_view_index) + .unwrap() + .handle_drop(pane, dropped, window, cx) + }); + assert!(handled, "handle_drop should return true for {:?}", dropped); + + let mut input_log = terminal.update(cx, |terminal, _| terminal.take_input_log()); + assert_eq!(input_log.len(), 1, "expected exactly one write to terminal"); + let written = + String::from_utf8(input_log.remove(0)).expect("terminal write should be valid UTF-8"); + assert_eq!(written, expected_text); + } // Working directory calculation tests @@ -1972,24 +2167,7 @@ mod tests { let (project, _workspace) = init_test(cx).await; let (wt, _entry) = create_folder_wt(project.clone(), "/root/", cx).await; - let entry = cx - .update(|cx| { - wt.update(cx, |wt, cx| { - wt.create_entry( - RelPath::new(Path::new("src/main.rs"), PathStyle::local()) - .unwrap() - .as_ref() - .into(), - false, - None, - cx, - ) - }) - }) - .await - .unwrap() - .into_included() - .unwrap(); + let entry = create_file_in_worktree(wt.clone(), "src/main.rs", cx).await; insert_active_entry_for(wt, entry, project.clone(), cx); cx.update(|cx| { @@ -2014,6 +2192,18 @@ mod tests { /// Creates a worktree with 1 file: /root.txt pub async fn init_test(cx: &mut TestAppContext) -> (Entity, Entity) { + let (project, workspace, _) = init_test_with_window(cx).await; + (project, workspace) + } + + /// Creates a worktree with 1 file /root.txt and returns the project, workspace, and window handle. + async fn init_test_with_window( + cx: &mut TestAppContext, + ) -> ( + Entity, + Entity, + gpui::WindowHandle, + ) { let params = cx.update(AppState::test); cx.update(|cx| { theme::init(theme::LoadThemes::JustBase, cx); @@ -2026,7 +2216,32 @@ mod tests { .read_with(cx, |mw, _| mw.workspace().clone()) .unwrap(); - (project, workspace) + (project, workspace, window_handle) + } + + /// Creates a file in the given worktree and returns its entry. + async fn create_file_in_worktree( + worktree: Entity, + relative_path: impl AsRef, + cx: &mut TestAppContext, + ) -> Entry { + cx.update(|cx| { + worktree.update(cx, |worktree, cx| { + worktree.create_entry( + RelPath::new(relative_path.as_ref(), PathStyle::local()) + .unwrap() + .as_ref() + .into(), + false, + None, + cx, + ) + }) + }) + .await + .unwrap() + .into_included() + .unwrap() } /// Creates a worktree with 1 folder: /root{suffix}/ @@ -2089,6 +2304,183 @@ mod tests { }); } + // Terminal drag/drop test + + #[gpui::test] + async fn test_handle_drop_writes_paths_for_all_drop_types(cx: &mut TestAppContext) { + let (project, _workspace, window_handle) = init_test_with_window(cx).await; + + let (worktree, _) = create_folder_wt(project.clone(), "/root/", cx).await; + let first_entry = create_file_in_worktree(worktree.clone(), "first.txt", cx).await; + let second_entry = create_file_in_worktree(worktree.clone(), "second.txt", cx).await; + + let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id()); + let first_path = project + .read_with(cx, |project, cx| { + project.absolute_path( + &ProjectPath { + worktree_id, + path: first_entry.path.clone(), + }, + cx, + ) + }) + .unwrap(); + let second_path = project + .read_with(cx, |project, cx| { + project.absolute_path( + &ProjectPath { + worktree_id, + path: second_entry.path.clone(), + }, + cx, + ) + }) + .unwrap(); + + let (active_pane, terminal, terminal_view, tab_item) = window_handle + .update(cx, |multi_workspace, window, cx| { + let workspace = multi_workspace.workspace().clone(); + let active_pane = workspace.read(cx).active_pane().clone(); + + let terminal = cx.new(|cx| { + terminal::TerminalBuilder::new_display_only( + CursorShape::default(), + terminal::terminal_settings::AlternateScroll::On, + None, + 0, + cx.background_executor(), + PathStyle::local(), + ) + .unwrap() + .subscribe(cx) + }); + let terminal_view = cx.new(|cx| { + TerminalView::new( + terminal.clone(), + workspace.downgrade(), + None, + project.downgrade(), + window, + cx, + ) + }); + + active_pane.update(cx, |pane, cx| { + pane.add_item( + Box::new(terminal_view.clone()), + true, + false, + None, + window, + cx, + ); + }); + + let tab_project_item = cx.new(|_| TestProjectItem { + entry_id: Some(second_entry.id), + project_path: Some(ProjectPath { + worktree_id, + path: second_entry.path.clone(), + }), + is_dirty: false, + }); + let tab_item = + cx.new(|cx| TestItem::new(cx).with_project_items(&[tab_project_item])); + active_pane.update(cx, |pane, cx| { + pane.add_item(Box::new(tab_item.clone()), true, false, None, window, cx); + }); + + (active_pane, terminal, terminal_view, tab_item) + }) + .unwrap(); + + cx.run_until_parked(); + + window_handle + .update(cx, |multi_workspace, window, cx| { + let workspace = multi_workspace.workspace().clone(); + let terminal_view_index = + active_pane.read(cx).index_for_item(&terminal_view).unwrap(); + let dragged_tab_index = active_pane.read(cx).index_for_item(&tab_item).unwrap(); + + assert!( + workspace.read(cx).pane_for(&terminal_view).is_some(), + "terminal view not registered with workspace after run_until_parked" + ); + + // Dragging an external file should write its path to the terminal + let external_paths = ExternalPaths(vec![first_path.clone()].into()); + assert_drop_writes_to_terminal( + &active_pane, + terminal_view_index, + &terminal, + &external_paths, + &expected_drop_text(std::slice::from_ref(&first_path)), + window, + cx, + ); + + // Dragging a tab should write the path of the tab's item to the terminal + let dragged_tab = DraggedTab { + pane: active_pane.clone(), + item: Box::new(tab_item.clone()), + ix: dragged_tab_index, + detail: 0, + is_active: false, + }; + assert_drop_writes_to_terminal( + &active_pane, + terminal_view_index, + &terminal, + &dragged_tab, + &expected_drop_text(std::slice::from_ref(&second_path)), + window, + cx, + ); + + // Dragging multiple selections should write both paths to the terminal + let dragged_selection = DraggedSelection { + active_selection: SelectedEntry { + worktree_id, + entry_id: first_entry.id, + }, + marked_selections: Arc::from([ + SelectedEntry { + worktree_id, + entry_id: first_entry.id, + }, + SelectedEntry { + worktree_id, + entry_id: second_entry.id, + }, + ]), + }; + assert_drop_writes_to_terminal( + &active_pane, + terminal_view_index, + &terminal, + &dragged_selection, + &expected_drop_text(&[first_path.clone(), second_path.clone()]), + window, + cx, + ); + + // Dropping a project entry should write the entry's path to the terminal + let dropped_entry_id = first_entry.id; + assert_drop_writes_to_terminal( + &active_pane, + terminal_view_index, + &terminal, + &dropped_entry_id, + &expected_drop_text(&[first_path]), + window, + cx, + ); + }) + .unwrap(); + } + // Terminal rename tests #[gpui::test] diff --git a/crates/text/Cargo.toml b/crates/text/Cargo.toml index ed02381eb83db5daececd159171a90072244a340..4dc186b374719bdf0112243160d09c14e0bc5970 100644 --- a/crates/text/Cargo.toml +++ b/crates/text/Cargo.toml @@ -35,5 +35,4 @@ ctor.workspace = true gpui = { workspace = true, features = ["test-support"] } rand.workspace = true util = { workspace = true, features = ["test-support"] } -http_client = { workspace = true, features = ["test-support"] } zlog.workspace = true diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index 63e0570e91ef08dfce02fbbca25e97ee7519dc0a..5c4cce0f11d7db7b7593631e796c0f5e3d50adab 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -15,8 +15,8 @@ pub struct Anchor { // we store the replica id and sequence number of the timestamp inline // to avoid the alignment of our fields from increasing the size of this struct // This saves 8 bytes, by allowing replica id, value and bias to occupy the padding - timestamp_replica_id: clock::ReplicaId, - timestamp_value: clock::Seq, + pub(crate) timestamp_replica_id: clock::ReplicaId, + pub(crate) timestamp_value: clock::Seq, /// The byte offset into the text inserted in the operation /// at `timestamp`. diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index a5bdbe443bbaa4723c8d3104bfed28e4c2fe8fdb..a991a72df40c502a90aa0b82191b37c54b3f8de2 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -2379,13 +2379,22 @@ impl BufferSnapshot { anchor ); }; + // TODO verbose debug because we are seeing is_max return false unexpectedly, + // remove this once that is understood and fixed assert_eq!( insertion.timestamp, anchor.timestamp(), - "invalid insertion for buffer {}@{:?} and anchor {:?}", + "invalid insertion for buffer {}@{:?}. anchor: {:?}, {:?}, {:?}, {:?}, {:?}. timestamp: {:?}, offset: {:?}, bias: {:?}", self.remote_id(), self.version, - anchor + anchor.timestamp_replica_id, + anchor.timestamp_value, + anchor.offset, + anchor.bias, + anchor.buffer_id, + anchor.timestamp() == clock::Lamport::MAX, + anchor.offset == u32::MAX, + anchor.bias == Bias::Right, ); fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left); diff --git a/crates/title_bar/Cargo.toml b/crates/title_bar/Cargo.toml index a9988d498e463edb463175ec19867fa6624479e5..b5c10835c6bf85ea24db1ff9bad5abbbf3b517ee 100644 --- a/crates/title_bar/Cargo.toml +++ b/crates/title_bar/Cargo.toml @@ -18,9 +18,9 @@ stories = ["dep:story"] test-support = [ "call/test-support", "client/test-support", - "collections/test-support", + "gpui/test-support", - "http_client/test-support", + "project/test-support", "remote/test-support", "util/test-support", @@ -65,17 +65,13 @@ windows.workspace = true [dev-dependencies] call = { workspace = true, features = ["test-support"] } client = { workspace = true, features = ["test-support"] } -collections = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } -http_client = { workspace = true, features = ["test-support"] } notifications = { workspace = true, features = ["test-support"] } -pretty_assertions.workspace = true project = { workspace = true, features = ["test-support"] } release_channel.workspace = true remote = { workspace = true, features = ["test-support"] } rpc = { workspace = true, features = ["test-support"] } semver.workspace = true settings = { workspace = true, features = ["test-support"] } -tree-sitter-md.workspace = true util = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index f00a71a305e306ba9201e5a4976382012ae0059e..96cc929c06039c14a9ce4eaa05fd067fbd95b7d0 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -151,6 +151,7 @@ pub struct TitleBar { user_store: Entity, client: Arc, workspace: WeakEntity, + multi_workspace: Option>, application_menu: Option>, _subscriptions: Vec, banner: Entity, @@ -188,7 +189,7 @@ impl Render for TitleBar { .when(title_bar_settings.show_project_items, |title_bar| { title_bar .children(self.render_project_host(cx)) - .child(self.render_project_name(cx)) + .child(self.render_project_name(window, cx)) }) .when(title_bar_settings.show_branch_name, |title_bar| { title_bar.children(self.render_project_branch(cx)) @@ -389,6 +390,7 @@ impl TitleBar { if let Some(this) = this.upgrade() { this.update(cx, |this, _| { this._subscriptions.push(subscription); + this.multi_workspace = Some(multi_workspace.downgrade()); }); } }); @@ -400,6 +402,7 @@ impl TitleBar { platform_titlebar, application_menu, workspace: workspace.weak_handle(), + multi_workspace: None, project, user_store, client, @@ -709,7 +712,7 @@ impl TitleBar { .indicator_border_color(Some(cx.theme().colors().title_bar_background)) }) .tooltip(move |_, cx| { - Tooltip::for_action("Open Workspace Sidebar", &ToggleWorkspaceSidebar, cx) + Tooltip::for_action("Open Threads Sidebar", &ToggleWorkspaceSidebar, cx) }) .on_click(|_, window, cx| { window.dispatch_action(ToggleWorkspaceSidebar.boxed_clone(), cx); @@ -718,7 +721,11 @@ impl TitleBar { ) } - pub fn render_project_name(&self, cx: &mut Context) -> impl IntoElement { + pub fn render_project_name( + &self, + window: &mut Window, + cx: &mut Context, + ) -> impl IntoElement { let workspace = self.workspace.clone(); let name = self.effective_active_worktree(cx).map(|worktree| { @@ -734,6 +741,19 @@ impl TitleBar { "Open Recent Project".to_string() }; + let is_sidebar_open = self.platform_titlebar.read(cx).is_workspace_sidebar_open(); + + if is_sidebar_open { + return self + .render_project_name_with_sidebar_popover( + window, + display_name, + is_project_selected, + cx, + ) + .into_any_element(); + } + let focus_handle = workspace .upgrade() .map(|w| w.read(cx).focus_handle(cx)) @@ -773,6 +793,49 @@ impl TitleBar { .into_any_element() } + fn render_project_name_with_sidebar_popover( + &self, + _window: &Window, + display_name: String, + is_project_selected: bool, + cx: &mut Context, + ) -> impl IntoElement { + let multi_workspace = self.multi_workspace.clone(); + + let is_popover_deployed = multi_workspace + .as_ref() + .and_then(|mw| mw.upgrade()) + .map(|mw| mw.read(cx).is_recent_projects_popover_deployed(cx)) + .unwrap_or(false); + + Button::new("project_name_trigger", display_name) + .label_size(LabelSize::Small) + .when(self.worktree_count(cx) > 1, |this| { + this.icon(IconName::ChevronDown) + .icon_color(Color::Muted) + .icon_size(IconSize::XSmall) + }) + .toggle_state(is_popover_deployed) + .selected_style(ButtonStyle::Tinted(TintColor::Accent)) + .when(!is_project_selected, |s| s.color(Color::Muted)) + .tooltip(move |_window, cx| { + Tooltip::for_action( + "Recent Projects", + &zed_actions::OpenRecent { + create_new_window: false, + }, + cx, + ) + }) + .on_click(move |_, window, cx| { + if let Some(mw) = multi_workspace.as_ref().and_then(|mw| mw.upgrade()) { + mw.update(cx, |mw, cx| { + mw.toggle_recent_projects_popover(window, cx); + }); + } + }) + } + pub fn render_project_branch(&self, cx: &mut Context) -> Option { let effective_worktree = self.effective_active_worktree(cx)?; let repository = self.get_repository_for_worktree(&effective_worktree, cx)?; @@ -1014,9 +1077,9 @@ impl TitleBar { let user_store = user_store.clone(); let organization = organization.clone(); move |_window, cx| { - user_store.update(cx, |user_store, _cx| { + user_store.update(cx, |user_store, cx| { user_store - .set_current_organization(organization.clone()); + .set_current_organization(organization.clone(), cx); }); } }, diff --git a/crates/ui/src/components.rs b/crates/ui/src/components.rs index cce736e237e2c2500b56f13ae579dee4426b5bfb..ef344529cd92efcbf8f57d192c44bbb53befc25e 100644 --- a/crates/ui/src/components.rs +++ b/crates/ui/src/components.rs @@ -12,6 +12,7 @@ mod disclosure; mod divider; mod dropdown_menu; mod facepile; +mod gradient_fade; mod group; mod icon; mod image; @@ -54,6 +55,7 @@ pub use disclosure::*; pub use divider::*; pub use dropdown_menu::*; pub use facepile::*; +pub use gradient_fade::*; pub use group::*; pub use icon::*; pub use image::*; diff --git a/crates/ui/src/components/ai/thread_item.rs b/crates/ui/src/components/ai/thread_item.rs index 6cc710690ea0103bf2de4253bc405eb52be5af69..3c08bd946710f76ccf49f933b82091a3bcb06e08 100644 --- a/crates/ui/src/components/ai/thread_item.rs +++ b/crates/ui/src/components/ai/thread_item.rs @@ -1,9 +1,9 @@ use crate::{ - DecoratedIcon, DiffStat, HighlightedLabel, IconDecoration, IconDecorationKind, SpinnerLabel, - prelude::*, + DecoratedIcon, DiffStat, GradientFade, HighlightedLabel, IconDecoration, IconDecorationKind, + SpinnerLabel, prelude::*, }; -use gpui::{AnyView, ClickEvent, SharedString}; +use gpui::{AnyView, ClickEvent, Hsla, SharedString}; #[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] pub enum AgentThreadStatus { @@ -18,12 +18,13 @@ pub enum AgentThreadStatus { pub struct ThreadItem { id: ElementId, icon: IconName, + custom_icon_from_external_svg: Option, title: SharedString, timestamp: SharedString, - running: bool, - generation_done: bool, + notified: bool, status: AgentThreadStatus, selected: bool, + focused: bool, hovered: bool, added: Option, removed: Option, @@ -41,12 +42,13 @@ impl ThreadItem { Self { id: id.into(), icon: IconName::ZedAgent, + custom_icon_from_external_svg: None, title: title.into(), timestamp: "".into(), - running: false, - generation_done: false, + notified: false, status: AgentThreadStatus::default(), selected: false, + focused: false, hovered: false, added: None, removed: None, @@ -70,13 +72,13 @@ impl ThreadItem { self } - pub fn running(mut self, running: bool) -> Self { - self.running = running; + pub fn custom_icon_from_external_svg(mut self, svg: impl Into) -> Self { + self.custom_icon_from_external_svg = Some(svg.into()); self } - pub fn generation_done(mut self, generation_done: bool) -> Self { - self.generation_done = generation_done; + pub fn notified(mut self, notified: bool) -> Self { + self.notified = notified; self } @@ -90,6 +92,11 @@ impl ThreadItem { self } + pub fn focused(mut self, focused: bool) -> Self { + self.focused = focused; + self + } + pub fn added(mut self, added: usize) -> Self { self.added = Some(added); self @@ -146,7 +153,7 @@ impl ThreadItem { impl RenderOnce for ThreadItem { fn render(self, _: &mut Window, cx: &mut App) -> impl IntoElement { - let clr = cx.theme().colors(); + let color = cx.theme().colors(); // let dot_separator = || { // Label::new("•") // .size(LabelSize::Small) @@ -154,50 +161,35 @@ impl RenderOnce for ThreadItem { // .alpha(0.5) // }; - let icon_container = || h_flex().size_4().justify_center(); - let agent_icon = Icon::new(self.icon) - .color(Color::Muted) - .size(IconSize::Small); + let icon_container = || h_flex().size_4().flex_none().justify_center(); + let agent_icon = if let Some(custom_svg) = self.custom_icon_from_external_svg { + Icon::from_external_svg(custom_svg) + .color(Color::Muted) + .size(IconSize::Small) + } else { + Icon::new(self.icon) + .color(Color::Muted) + .size(IconSize::Small) + }; - let decoration = if self.status == AgentThreadStatus::WaitingForConfirmation { - Some( - IconDecoration::new( - IconDecorationKind::Triangle, - cx.theme().colors().surface_background, - cx, - ) - .color(cx.theme().status().warning) + let decoration = |icon: IconDecorationKind, color: Hsla| { + IconDecoration::new(icon, cx.theme().colors().surface_background, cx) + .color(color) .position(gpui::Point { x: px(-2.), y: px(-2.), - }), - ) + }) + }; + + let decoration = if self.status == AgentThreadStatus::WaitingForConfirmation { + Some(decoration( + IconDecorationKind::Triangle, + cx.theme().status().warning, + )) } else if self.status == AgentThreadStatus::Error { - Some( - IconDecoration::new( - IconDecorationKind::X, - cx.theme().colors().surface_background, - cx, - ) - .color(cx.theme().status().error) - .position(gpui::Point { - x: px(-2.), - y: px(-2.), - }), - ) - } else if self.generation_done { - Some( - IconDecoration::new( - IconDecorationKind::Dot, - cx.theme().colors().surface_background, - cx, - ) - .color(cx.theme().colors().text_accent) - .position(gpui::Point { - x: px(-2.), - y: px(-2.), - }), - ) + Some(decoration(IconDecorationKind::X, cx.theme().status().error)) + } else if self.notified { + Some(decoration(IconDecorationKind::Dot, color.text_accent)) } else { None }; @@ -208,23 +200,40 @@ impl RenderOnce for ThreadItem { icon_container().child(agent_icon) }; - let running_or_action = self.running || (self.hovered && self.action_slot.is_some()); - - // let has_no_changes = self.added.is_none() && self.removed.is_none(); + let is_running = matches!( + self.status, + AgentThreadStatus::Running | AgentThreadStatus::WaitingForConfirmation + ); + let running_or_action = is_running || (self.hovered && self.action_slot.is_some()); let title = self.title; let highlight_positions = self.highlight_positions; let title_label = if highlight_positions.is_empty() { - Label::new(title).truncate().into_any_element() + Label::new(title).into_any_element() + } else { + HighlightedLabel::new(title, highlight_positions).into_any_element() + }; + + let base_bg = if self.selected { + color.element_active } else { - HighlightedLabel::new(title, highlight_positions) - .truncate() - .into_any_element() + color.panel_background }; + let gradient_overlay = + GradientFade::new(base_bg, color.element_hover, color.element_active) + .width(px(32.0)) + .right(px(-10.0)) + .gradient_stop(0.8) + .group_name("thread-item"); + v_flex() .id(self.id.clone()) + .group("thread-item") + .relative() + .overflow_hidden() .cursor_pointer() + .w_full() .map(|this| { if self.worktree.is_some() { this.p_2() @@ -232,8 +241,11 @@ impl RenderOnce for ThreadItem { this.px_2().py_1() } }) - .when(self.selected, |s| s.bg(clr.element_active)) - .hover(|s| s.bg(clr.element_hover)) + .when(self.selected, |s| s.bg(color.element_active)) + .border_1() + .border_color(gpui::transparent_black()) + .when(self.focused, |s| s.border_color(color.panel_focused_border)) + .hover(|s| s.bg(color.element_hover)) .on_hover(self.on_hover) .child( h_flex() @@ -251,11 +263,12 @@ impl RenderOnce for ThreadItem { .child(title_label) .when_some(self.tooltip, |this, tooltip| this.tooltip(tooltip)), ) + .child(gradient_overlay) .when(running_or_action, |this| { this.child( h_flex() .gap_1() - .when(self.running, |this| { + .when(is_running, |this| { this.child( icon_container() .child(SpinnerLabel::new().color(Color::Accent)), @@ -273,7 +286,6 @@ impl RenderOnce for ThreadItem { Label::new(worktree) .size(LabelSize::Small) .color(Color::Muted) - .truncate_start() .into_any_element() } else { HighlightedLabel::new(worktree, worktree_highlight_positions) @@ -347,12 +359,12 @@ impl Component for ThreadItem { .into_any_element(), ), single_example( - "Generation Done", + "Notified", container() .child( ThreadItem::new("ti-2", "Refine thread view scrolling behavior") .timestamp("12:12 AM") - .generation_done(true), + .notified(true), ) .into_any_element(), ), @@ -383,7 +395,7 @@ impl Component for ThreadItem { ThreadItem::new("ti-3", "Add line numbers option to FileEditBlock") .icon(IconName::AiClaude) .timestamp("7:30 PM") - .running(true), + .status(AgentThreadStatus::Running), ) .into_any_element(), ), @@ -421,6 +433,29 @@ impl Component for ThreadItem { ) .into_any_element(), ), + single_example( + "Focused Item (Keyboard Selection)", + container() + .child( + ThreadItem::new("ti-7", "Implement keyboard navigation") + .icon(IconName::AiClaude) + .timestamp("4:00 PM") + .focused(true), + ) + .into_any_element(), + ), + single_example( + "Selected + Focused", + container() + .child( + ThreadItem::new("ti-8", "Active and keyboard-focused thread") + .icon(IconName::AiGemini) + .timestamp("5:00 PM") + .selected(true) + .focused(true), + ) + .into_any_element(), + ), ]; Some( diff --git a/crates/ui/src/components/data_table.rs b/crates/ui/src/components/data_table.rs index 76ed64850c92e274bd8aeca483dd197cfbccbf52..3da30838ca8313b68608e432ce1e76870157c1fd 100644 --- a/crates/ui/src/components/data_table.rs +++ b/crates/ui/src/components/data_table.rs @@ -18,216 +18,9 @@ use crate::{ }; use itertools::intersperse_with; -pub mod table_row { - //! A newtype for a table row that enforces a fixed column count at runtime. - //! - //! This type ensures that all rows in a table have the same width, preventing accidental creation or mutation of rows with inconsistent lengths. - //! It is especially useful for CSV or tabular data where rectangular invariants must be maintained, but the number of columns is only known at runtime. - //! By using `TableRow`, we gain stronger guarantees and safer APIs compared to a bare `Vec`, without requiring const generics. - - use std::{ - any::type_name, - ops::{ - Index, IndexMut, Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive, - }, - }; - - #[derive(Clone, Debug, PartialEq, Eq)] - pub struct TableRow(Vec); - - impl TableRow { - pub fn from_element(element: T, length: usize) -> Self - where - T: Clone, - { - Self::from_vec(vec![element; length], length) - } - - /// Constructs a `TableRow` from a `Vec`, panicking if the length does not match `expected_length`. - /// - /// Use this when you want to ensure at construction time that the row has the correct number of columns. - /// This enforces the rectangular invariant for table data, preventing accidental creation of malformed rows. - /// - /// # Panics - /// Panics if `data.len() != expected_length`. - pub fn from_vec(data: Vec, expected_length: usize) -> Self { - Self::try_from_vec(data, expected_length).unwrap_or_else(|e| { - let name = type_name::>(); - panic!("Expected {name} to be created successfully: {e}"); - }) - } - - /// Attempts to construct a `TableRow` from a `Vec`, returning an error if the length does not match `expected_len`. - /// - /// This is a fallible alternative to `from_vec`, allowing you to handle inconsistent row lengths gracefully. - /// Returns `Ok(TableRow)` if the length matches, or an `Err` with a descriptive message otherwise. - pub fn try_from_vec(data: Vec, expected_len: usize) -> Result { - if data.len() != expected_len { - Err(format!( - "Row length {} does not match expected {}", - data.len(), - expected_len - )) - } else { - Ok(Self(data)) - } - } - - /// Returns reference to element by column index. - /// - /// # Panics - /// Panics if `col` is out of bounds (i.e., `col >= self.cols()`). - pub fn expect_get(&self, col: impl Into) -> &T { - let col = col.into(); - self.0.get(col).unwrap_or_else(|| { - panic!( - "Expected table row of `{}` to have {col:?}", - type_name::() - ) - }) - } - - pub fn get(&self, col: impl Into) -> Option<&T> { - self.0.get(col.into()) - } - - pub fn as_slice(&self) -> &[T] { - &self.0 - } - - pub fn into_vec(self) -> Vec { - self.0 - } - - /// Like [`map`], but borrows the row and clones each element before mapping. - /// - /// This is useful when you want to map over a borrowed row without consuming it, - /// but your mapping function requires ownership of each element. - /// - /// # Difference - /// - `map_cloned` takes `&self`, clones each element, and applies `f(T) -> U`. - /// - [`map`] takes `self` by value and applies `f(T) -> U` directly, consuming the row. - /// - [`map_ref`] takes `&self` and applies `f(&T) -> U` to references of each element. - pub fn map_cloned(&self, f: F) -> TableRow - where - F: FnMut(T) -> U, - T: Clone, - { - self.clone().map(f) - } - - /// Consumes the row and transforms all elements within it in a length-safe way. - /// - /// # Difference - /// - `map` takes ownership of the row (`self`) and applies `f(T) -> U` to each element. - /// - Use this when you want to transform and consume the row in one step. - /// - See also [`map_cloned`] (for mapping over a borrowed row with cloning) and [`map_ref`] (for mapping over references). - pub fn map(self, f: F) -> TableRow - where - F: FnMut(T) -> U, - { - TableRow(self.0.into_iter().map(f).collect()) - } - - /// Borrows the row and transforms all elements by reference in a length-safe way. - /// - /// # Difference - /// - `map_ref` takes `&self` and applies `f(&T) -> U` to each element by reference. - /// - Use this when you want to map over a borrowed row without cloning or consuming it. - /// - See also [`map`] (for consuming the row) and [`map_cloned`] (for mapping with cloning). - pub fn map_ref(&self, f: F) -> TableRow - where - F: FnMut(&T) -> U, - { - TableRow(self.0.iter().map(f).collect()) - } - - /// Number of columns (alias to `len()` with more semantic meaning) - pub fn cols(&self) -> usize { - self.0.len() - } - } - - ///// Convenience traits ///// - pub trait IntoTableRow { - fn into_table_row(self, expected_length: usize) -> TableRow; - } - impl IntoTableRow for Vec { - fn into_table_row(self, expected_length: usize) -> TableRow { - TableRow::from_vec(self, expected_length) - } - } - - // Index implementations for convenient access - impl Index for TableRow { - type Output = T; - - fn index(&self, index: usize) -> &Self::Output { - &self.0[index] - } - } - - impl IndexMut for TableRow { - fn index_mut(&mut self, index: usize) -> &mut Self::Output { - &mut self.0[index] - } - } - - // Range indexing implementations for slice operations - impl Index> for TableRow { - type Output = [T]; - - fn index(&self, index: Range) -> &Self::Output { - as Index>>::index(&self.0, index) - } - } - - impl Index> for TableRow { - type Output = [T]; - - fn index(&self, index: RangeFrom) -> &Self::Output { - as Index>>::index(&self.0, index) - } - } - - impl Index> for TableRow { - type Output = [T]; - - fn index(&self, index: RangeTo) -> &Self::Output { - as Index>>::index(&self.0, index) - } - } - - impl Index> for TableRow { - type Output = [T]; - - fn index(&self, index: RangeToInclusive) -> &Self::Output { - as Index>>::index(&self.0, index) - } - } - - impl Index for TableRow { - type Output = [T]; - - fn index(&self, index: RangeFull) -> &Self::Output { - as Index>::index(&self.0, index) - } - } - - impl Index> for TableRow { - type Output = [T]; - - fn index(&self, index: RangeInclusive) -> &Self::Output { - as Index>>::index(&self.0, index) - } - } - - impl IndexMut> for TableRow { - fn index_mut(&mut self, index: RangeInclusive) -> &mut Self::Output { - as IndexMut>>::index_mut(&mut self.0, index) - } - } -} +pub mod table_row; +#[cfg(test)] +mod tests; const RESIZE_COLUMN_WIDTH: f32 = 8.0; @@ -1445,330 +1238,3 @@ impl Component for Table { ) } } - -#[cfg(test)] -mod test { - use super::*; - - fn is_almost_eq(a: &[f32], b: &[f32]) -> bool { - a.len() == b.len() && a.iter().zip(b).all(|(x, y)| (x - y).abs() < 1e-6) - } - - fn cols_to_str(cols: &[f32], total_size: f32) -> String { - cols.iter() - .map(|f| "*".repeat(f32::round(f * total_size) as usize)) - .collect::>() - .join("|") - } - - fn parse_resize_behavior( - input: &str, - total_size: f32, - expected_cols: usize, - ) -> Vec { - let mut resize_behavior = Vec::with_capacity(expected_cols); - for col in input.split('|') { - if col.starts_with('X') || col.is_empty() { - resize_behavior.push(TableResizeBehavior::None); - } else if col.starts_with('*') { - resize_behavior.push(TableResizeBehavior::MinSize(col.len() as f32 / total_size)); - } else { - panic!("invalid test input: unrecognized resize behavior: {}", col); - } - } - - if resize_behavior.len() != expected_cols { - panic!( - "invalid test input: expected {} columns, got {}", - expected_cols, - resize_behavior.len() - ); - } - resize_behavior - } - - mod reset_column_size { - use super::*; - - fn parse(input: &str) -> (Vec, f32, Option) { - let mut widths = Vec::new(); - let mut column_index = None; - for (index, col) in input.split('|').enumerate() { - widths.push(col.len() as f32); - if col.starts_with('X') { - column_index = Some(index); - } - } - - for w in &widths { - assert!(w.is_finite(), "incorrect number of columns"); - } - let total = widths.iter().sum::(); - for width in &mut widths { - *width /= total; - } - (widths, total, column_index) - } - - #[track_caller] - fn check_reset_size( - initial_sizes: &str, - widths: &str, - expected: &str, - resize_behavior: &str, - ) { - let (initial_sizes, total_1, None) = parse(initial_sizes) else { - panic!("invalid test input: initial sizes should not be marked"); - }; - let (widths, total_2, Some(column_index)) = parse(widths) else { - panic!("invalid test input: widths should be marked"); - }; - assert_eq!( - total_1, total_2, - "invalid test input: total width not the same {total_1}, {total_2}" - ); - let (expected, total_3, None) = parse(expected) else { - panic!("invalid test input: expected should not be marked: {expected:?}"); - }; - assert_eq!( - total_2, total_3, - "invalid test input: total width not the same" - ); - let cols = initial_sizes.len(); - let resize_behavior_vec = parse_resize_behavior(resize_behavior, total_1, cols); - let resize_behavior = TableRow::from_vec(resize_behavior_vec, cols); - let result = TableColumnWidths::reset_to_initial_size( - column_index, - TableRow::from_vec(widths, cols), - TableRow::from_vec(initial_sizes, cols), - &resize_behavior, - ); - let result_slice = result.as_slice(); - let is_eq = is_almost_eq(result_slice, &expected); - if !is_eq { - let result_str = cols_to_str(result_slice, total_1); - let expected_str = cols_to_str(&expected, total_1); - panic!( - "resize failed\ncomputed: {result_str}\nexpected: {expected_str}\n\ncomputed values: {result_slice:?}\nexpected values: {expected:?}\n:minimum widths: {resize_behavior:?}" - ); - } - } - - macro_rules! check_reset_size { - (columns: $cols:expr, starting: $initial:expr, snapshot: $current:expr, expected: $expected:expr, resizing: $resizing:expr $(,)?) => { - check_reset_size($initial, $current, $expected, $resizing); - }; - ($name:ident, columns: $cols:expr, starting: $initial:expr, snapshot: $current:expr, expected: $expected:expr, minimums: $resizing:expr $(,)?) => { - #[test] - fn $name() { - check_reset_size($initial, $current, $expected, $resizing); - } - }; - } - - check_reset_size!( - basic_right, - columns: 5, - starting: "**|**|**|**|**", - snapshot: "**|**|X|***|**", - expected: "**|**|**|**|**", - minimums: "X|*|*|*|*", - ); - - check_reset_size!( - basic_left, - columns: 5, - starting: "**|**|**|**|**", - snapshot: "**|**|***|X|**", - expected: "**|**|**|**|**", - minimums: "X|*|*|*|**", - ); - - check_reset_size!( - squashed_left_reset_col2, - columns: 6, - starting: "*|***|**|**|****|*", - snapshot: "*|*|X|*|*|********", - expected: "*|*|**|*|*|*******", - minimums: "X|*|*|*|*|*", - ); - - check_reset_size!( - grow_cascading_right, - columns: 6, - starting: "*|***|****|**|***|*", - snapshot: "*|***|X|**|**|*****", - expected: "*|***|****|*|*|****", - minimums: "X|*|*|*|*|*", - ); - - check_reset_size!( - squashed_right_reset_col4, - columns: 6, - starting: "*|***|**|**|****|*", - snapshot: "*|********|*|*|X|*", - expected: "*|*****|*|*|****|*", - minimums: "X|*|*|*|*|*", - ); - - check_reset_size!( - reset_col6_right, - columns: 6, - starting: "*|***|**|***|***|**", - snapshot: "*|***|**|***|**|XXX", - expected: "*|***|**|***|***|**", - minimums: "X|*|*|*|*|*", - ); - - check_reset_size!( - reset_col6_left, - columns: 6, - starting: "*|***|**|***|***|**", - snapshot: "*|***|**|***|****|X", - expected: "*|***|**|***|***|**", - minimums: "X|*|*|*|*|*", - ); - - check_reset_size!( - last_column_grow_cascading, - columns: 6, - starting: "*|***|**|**|**|***", - snapshot: "*|*******|*|**|*|X", - expected: "*|******|*|*|*|***", - minimums: "X|*|*|*|*|*", - ); - - check_reset_size!( - goes_left_when_left_has_extreme_diff, - columns: 6, - starting: "*|***|****|**|**|***", - snapshot: "*|********|X|*|**|**", - expected: "*|*****|****|*|**|**", - minimums: "X|*|*|*|*|*", - ); - - check_reset_size!( - basic_shrink_right, - columns: 6, - starting: "**|**|**|**|**|**", - snapshot: "**|**|XXX|*|**|**", - expected: "**|**|**|**|**|**", - minimums: "X|*|*|*|*|*", - ); - - check_reset_size!( - shrink_should_go_left, - columns: 6, - starting: "*|***|**|*|*|*", - snapshot: "*|*|XXX|**|*|*", - expected: "*|**|**|**|*|*", - minimums: "X|*|*|*|*|*", - ); - - check_reset_size!( - shrink_should_go_right, - columns: 6, - starting: "*|***|**|**|**|*", - snapshot: "*|****|XXX|*|*|*", - expected: "*|****|**|**|*|*", - minimums: "X|*|*|*|*|*", - ); - } - - mod drag_handle { - use super::*; - - fn parse(input: &str) -> (Vec, f32, Option) { - let mut widths = Vec::new(); - let column_index = input.replace("*", "").find("I"); - for col in input.replace("I", "|").split('|') { - widths.push(col.len() as f32); - } - - for w in &widths { - assert!(w.is_finite(), "incorrect number of columns"); - } - let total = widths.iter().sum::(); - for width in &mut widths { - *width /= total; - } - (widths, total, column_index) - } - - #[track_caller] - fn check(distance: i32, widths: &str, expected: &str, resize_behavior: &str) { - let (widths, total_1, Some(column_index)) = parse(widths) else { - panic!("invalid test input: widths should be marked"); - }; - let (expected, total_2, None) = parse(expected) else { - panic!("invalid test input: expected should not be marked: {expected:?}"); - }; - assert_eq!( - total_1, total_2, - "invalid test input: total width not the same" - ); - let cols = widths.len(); - let resize_behavior_vec = parse_resize_behavior(resize_behavior, total_1, cols); - let resize_behavior = TableRow::from_vec(resize_behavior_vec, cols); - - let distance = distance as f32 / total_1; - - let mut widths_table_row = TableRow::from_vec(widths, cols); - TableColumnWidths::drag_column_handle( - distance, - column_index, - &mut widths_table_row, - &resize_behavior, - ); - - let result_widths = widths_table_row.as_slice(); - let is_eq = is_almost_eq(result_widths, &expected); - if !is_eq { - let result_str = cols_to_str(result_widths, total_1); - let expected_str = cols_to_str(&expected, total_1); - panic!( - "resize failed\ncomputed: {result_str}\nexpected: {expected_str}\n\ncomputed values: {result_widths:?}\nexpected values: {expected:?}\n:minimum widths: {resize_behavior:?}" - ); - } - } - - macro_rules! check { - (columns: $cols:expr, distance: $dist:expr, snapshot: $current:expr, expected: $expected:expr, resizing: $resizing:expr $(,)?) => { - check($dist, $current, $expected, $resizing); - }; - ($name:ident, columns: $cols:expr, distance: $dist:expr, snapshot: $current:expr, expected: $expected:expr, minimums: $resizing:expr $(,)?) => { - #[test] - fn $name() { - check($dist, $current, $expected, $resizing); - } - }; - } - - check!( - basic_right_drag, - columns: 3, - distance: 1, - snapshot: "**|**I**", - expected: "**|***|*", - minimums: "X|*|*", - ); - - check!( - drag_left_against_mins, - columns: 5, - distance: -1, - snapshot: "*|*|*|*I*******", - expected: "*|*|*|*|*******", - minimums: "X|*|*|*|*", - ); - - check!( - drag_left, - columns: 5, - distance: -2, - snapshot: "*|*|*|*****I***", - expected: "*|*|*|***|*****", - minimums: "X|*|*|*|*", - ); - } -} diff --git a/crates/ui/src/components/data_table/table_row.rs b/crates/ui/src/components/data_table/table_row.rs new file mode 100644 index 0000000000000000000000000000000000000000..9ef75e4cbbb72755294ae5c34724a55fbc40f8b8 --- /dev/null +++ b/crates/ui/src/components/data_table/table_row.rs @@ -0,0 +1,208 @@ +//! A newtype for a table row that enforces a fixed column count at runtime. +//! +//! This type ensures that all rows in a table have the same width, preventing accidental creation or mutation of rows with inconsistent lengths. +//! It is especially useful for CSV or tabular data where rectangular invariants must be maintained, but the number of columns is only known at runtime. +//! By using `TableRow`, we gain stronger guarantees and safer APIs compared to a bare `Vec`, without requiring const generics. + +use std::{ + any::type_name, + ops::{ + Index, IndexMut, Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive, + }, +}; + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct TableRow(Vec); + +impl TableRow { + pub fn from_element(element: T, length: usize) -> Self + where + T: Clone, + { + Self::from_vec(vec![element; length], length) + } + + /// Constructs a `TableRow` from a `Vec`, panicking if the length does not match `expected_length`. + /// + /// Use this when you want to ensure at construction time that the row has the correct number of columns. + /// This enforces the rectangular invariant for table data, preventing accidental creation of malformed rows. + /// + /// # Panics + /// Panics if `data.len() != expected_length`. + pub fn from_vec(data: Vec, expected_length: usize) -> Self { + Self::try_from_vec(data, expected_length).unwrap_or_else(|e| { + let name = type_name::>(); + panic!("Expected {name} to be created successfully: {e}"); + }) + } + + /// Attempts to construct a `TableRow` from a `Vec`, returning an error if the length does not match `expected_len`. + /// + /// This is a fallible alternative to `from_vec`, allowing you to handle inconsistent row lengths gracefully. + /// Returns `Ok(TableRow)` if the length matches, or an `Err` with a descriptive message otherwise. + pub fn try_from_vec(data: Vec, expected_len: usize) -> Result { + if data.len() != expected_len { + Err(format!( + "Row length {} does not match expected {}", + data.len(), + expected_len + )) + } else { + Ok(Self(data)) + } + } + + /// Returns reference to element by column index. + /// + /// # Panics + /// Panics if `col` is out of bounds (i.e., `col >= self.cols()`). + pub fn expect_get(&self, col: impl Into) -> &T { + let col = col.into(); + self.0.get(col).unwrap_or_else(|| { + panic!( + "Expected table row of `{}` to have {col:?}", + type_name::() + ) + }) + } + + pub fn get(&self, col: impl Into) -> Option<&T> { + self.0.get(col.into()) + } + + pub fn as_slice(&self) -> &[T] { + &self.0 + } + + pub fn into_vec(self) -> Vec { + self.0 + } + + /// Like [`map`], but borrows the row and clones each element before mapping. + /// + /// This is useful when you want to map over a borrowed row without consuming it, + /// but your mapping function requires ownership of each element. + /// + /// # Difference + /// - `map_cloned` takes `&self`, clones each element, and applies `f(T) -> U`. + /// - [`map`] takes `self` by value and applies `f(T) -> U` directly, consuming the row. + /// - [`map_ref`] takes `&self` and applies `f(&T) -> U` to references of each element. + pub fn map_cloned(&self, f: F) -> TableRow + where + F: FnMut(T) -> U, + T: Clone, + { + self.clone().map(f) + } + + /// Consumes the row and transforms all elements within it in a length-safe way. + /// + /// # Difference + /// - `map` takes ownership of the row (`self`) and applies `f(T) -> U` to each element. + /// - Use this when you want to transform and consume the row in one step. + /// - See also [`map_cloned`] (for mapping over a borrowed row with cloning) and [`map_ref`] (for mapping over references). + pub fn map(self, f: F) -> TableRow + where + F: FnMut(T) -> U, + { + TableRow(self.0.into_iter().map(f).collect()) + } + + /// Borrows the row and transforms all elements by reference in a length-safe way. + /// + /// # Difference + /// - `map_ref` takes `&self` and applies `f(&T) -> U` to each element by reference. + /// - Use this when you want to map over a borrowed row without cloning or consuming it. + /// - See also [`map`] (for consuming the row) and [`map_cloned`] (for mapping with cloning). + pub fn map_ref(&self, f: F) -> TableRow + where + F: FnMut(&T) -> U, + { + TableRow(self.0.iter().map(f).collect()) + } + + /// Number of columns (alias to `len()` with more semantic meaning) + pub fn cols(&self) -> usize { + self.0.len() + } +} + +///// Convenience traits ///// +pub trait IntoTableRow { + fn into_table_row(self, expected_length: usize) -> TableRow; +} +impl IntoTableRow for Vec { + fn into_table_row(self, expected_length: usize) -> TableRow { + TableRow::from_vec(self, expected_length) + } +} + +// Index implementations for convenient access +impl Index for TableRow { + type Output = T; + + fn index(&self, index: usize) -> &Self::Output { + &self.0[index] + } +} + +impl IndexMut for TableRow { + fn index_mut(&mut self, index: usize) -> &mut Self::Output { + &mut self.0[index] + } +} + +// Range indexing implementations for slice operations +impl Index> for TableRow { + type Output = [T]; + + fn index(&self, index: Range) -> &Self::Output { + as Index>>::index(&self.0, index) + } +} + +impl Index> for TableRow { + type Output = [T]; + + fn index(&self, index: RangeFrom) -> &Self::Output { + as Index>>::index(&self.0, index) + } +} + +impl Index> for TableRow { + type Output = [T]; + + fn index(&self, index: RangeTo) -> &Self::Output { + as Index>>::index(&self.0, index) + } +} + +impl Index> for TableRow { + type Output = [T]; + + fn index(&self, index: RangeToInclusive) -> &Self::Output { + as Index>>::index(&self.0, index) + } +} + +impl Index for TableRow { + type Output = [T]; + + fn index(&self, index: RangeFull) -> &Self::Output { + as Index>::index(&self.0, index) + } +} + +impl Index> for TableRow { + type Output = [T]; + + fn index(&self, index: RangeInclusive) -> &Self::Output { + as Index>>::index(&self.0, index) + } +} + +impl IndexMut> for TableRow { + fn index_mut(&mut self, index: RangeInclusive) -> &mut Self::Output { + as IndexMut>>::index_mut(&mut self.0, index) + } +} diff --git a/crates/ui/src/components/data_table/tests.rs b/crates/ui/src/components/data_table/tests.rs new file mode 100644 index 0000000000000000000000000000000000000000..f0982a8aa5abe5f5a9351ebaaaf4072ca17839e6 --- /dev/null +++ b/crates/ui/src/components/data_table/tests.rs @@ -0,0 +1,318 @@ +use super::*; + +fn is_almost_eq(a: &[f32], b: &[f32]) -> bool { + a.len() == b.len() && a.iter().zip(b).all(|(x, y)| (x - y).abs() < 1e-6) +} + +fn cols_to_str(cols: &[f32], total_size: f32) -> String { + cols.iter() + .map(|f| "*".repeat(f32::round(f * total_size) as usize)) + .collect::>() + .join("|") +} + +fn parse_resize_behavior( + input: &str, + total_size: f32, + expected_cols: usize, +) -> Vec { + let mut resize_behavior = Vec::with_capacity(expected_cols); + for col in input.split('|') { + if col.starts_with('X') || col.is_empty() { + resize_behavior.push(TableResizeBehavior::None); + } else if col.starts_with('*') { + resize_behavior.push(TableResizeBehavior::MinSize(col.len() as f32 / total_size)); + } else { + panic!("invalid test input: unrecognized resize behavior: {}", col); + } + } + + if resize_behavior.len() != expected_cols { + panic!( + "invalid test input: expected {} columns, got {}", + expected_cols, + resize_behavior.len() + ); + } + resize_behavior +} + +mod reset_column_size { + use super::*; + + fn parse(input: &str) -> (Vec, f32, Option) { + let mut widths = Vec::new(); + let mut column_index = None; + for (index, col) in input.split('|').enumerate() { + widths.push(col.len() as f32); + if col.starts_with('X') { + column_index = Some(index); + } + } + + for w in &widths { + assert!(w.is_finite(), "incorrect number of columns"); + } + let total = widths.iter().sum::(); + for width in &mut widths { + *width /= total; + } + (widths, total, column_index) + } + + #[track_caller] + fn check_reset_size(initial_sizes: &str, widths: &str, expected: &str, resize_behavior: &str) { + let (initial_sizes, total_1, None) = parse(initial_sizes) else { + panic!("invalid test input: initial sizes should not be marked"); + }; + let (widths, total_2, Some(column_index)) = parse(widths) else { + panic!("invalid test input: widths should be marked"); + }; + assert_eq!( + total_1, total_2, + "invalid test input: total width not the same {total_1}, {total_2}" + ); + let (expected, total_3, None) = parse(expected) else { + panic!("invalid test input: expected should not be marked: {expected:?}"); + }; + assert_eq!( + total_2, total_3, + "invalid test input: total width not the same" + ); + let cols = initial_sizes.len(); + let resize_behavior_vec = parse_resize_behavior(resize_behavior, total_1, cols); + let resize_behavior = TableRow::from_vec(resize_behavior_vec, cols); + let result = TableColumnWidths::reset_to_initial_size( + column_index, + TableRow::from_vec(widths, cols), + TableRow::from_vec(initial_sizes, cols), + &resize_behavior, + ); + let result_slice = result.as_slice(); + let is_eq = is_almost_eq(result_slice, &expected); + if !is_eq { + let result_str = cols_to_str(result_slice, total_1); + let expected_str = cols_to_str(&expected, total_1); + panic!( + "resize failed\ncomputed: {result_str}\nexpected: {expected_str}\n\ncomputed values: {result_slice:?}\nexpected values: {expected:?}\n:minimum widths: {resize_behavior:?}" + ); + } + } + + macro_rules! check_reset_size { + (columns: $cols:expr, starting: $initial:expr, snapshot: $current:expr, expected: $expected:expr, resizing: $resizing:expr $(,)?) => { + check_reset_size($initial, $current, $expected, $resizing); + }; + ($name:ident, columns: $cols:expr, starting: $initial:expr, snapshot: $current:expr, expected: $expected:expr, minimums: $resizing:expr $(,)?) => { + #[test] + fn $name() { + check_reset_size($initial, $current, $expected, $resizing); + } + }; + } + + check_reset_size!( + basic_right, + columns: 5, + starting: "**|**|**|**|**", + snapshot: "**|**|X|***|**", + expected: "**|**|**|**|**", + minimums: "X|*|*|*|*", + ); + + check_reset_size!( + basic_left, + columns: 5, + starting: "**|**|**|**|**", + snapshot: "**|**|***|X|**", + expected: "**|**|**|**|**", + minimums: "X|*|*|*|**", + ); + + check_reset_size!( + squashed_left_reset_col2, + columns: 6, + starting: "*|***|**|**|****|*", + snapshot: "*|*|X|*|*|********", + expected: "*|*|**|*|*|*******", + minimums: "X|*|*|*|*|*", + ); + + check_reset_size!( + grow_cascading_right, + columns: 6, + starting: "*|***|****|**|***|*", + snapshot: "*|***|X|**|**|*****", + expected: "*|***|****|*|*|****", + minimums: "X|*|*|*|*|*", + ); + + check_reset_size!( + squashed_right_reset_col4, + columns: 6, + starting: "*|***|**|**|****|*", + snapshot: "*|********|*|*|X|*", + expected: "*|*****|*|*|****|*", + minimums: "X|*|*|*|*|*", + ); + + check_reset_size!( + reset_col6_right, + columns: 6, + starting: "*|***|**|***|***|**", + snapshot: "*|***|**|***|**|XXX", + expected: "*|***|**|***|***|**", + minimums: "X|*|*|*|*|*", + ); + + check_reset_size!( + reset_col6_left, + columns: 6, + starting: "*|***|**|***|***|**", + snapshot: "*|***|**|***|****|X", + expected: "*|***|**|***|***|**", + minimums: "X|*|*|*|*|*", + ); + + check_reset_size!( + last_column_grow_cascading, + columns: 6, + starting: "*|***|**|**|**|***", + snapshot: "*|*******|*|**|*|X", + expected: "*|******|*|*|*|***", + minimums: "X|*|*|*|*|*", + ); + + check_reset_size!( + goes_left_when_left_has_extreme_diff, + columns: 6, + starting: "*|***|****|**|**|***", + snapshot: "*|********|X|*|**|**", + expected: "*|*****|****|*|**|**", + minimums: "X|*|*|*|*|*", + ); + + check_reset_size!( + basic_shrink_right, + columns: 6, + starting: "**|**|**|**|**|**", + snapshot: "**|**|XXX|*|**|**", + expected: "**|**|**|**|**|**", + minimums: "X|*|*|*|*|*", + ); + + check_reset_size!( + shrink_should_go_left, + columns: 6, + starting: "*|***|**|*|*|*", + snapshot: "*|*|XXX|**|*|*", + expected: "*|**|**|**|*|*", + minimums: "X|*|*|*|*|*", + ); + + check_reset_size!( + shrink_should_go_right, + columns: 6, + starting: "*|***|**|**|**|*", + snapshot: "*|****|XXX|*|*|*", + expected: "*|****|**|**|*|*", + minimums: "X|*|*|*|*|*", + ); +} + +mod drag_handle { + use super::*; + + fn parse(input: &str) -> (Vec, f32, Option) { + let mut widths = Vec::new(); + let column_index = input.replace("*", "").find("I"); + for col in input.replace("I", "|").split('|') { + widths.push(col.len() as f32); + } + + for w in &widths { + assert!(w.is_finite(), "incorrect number of columns"); + } + let total = widths.iter().sum::(); + for width in &mut widths { + *width /= total; + } + (widths, total, column_index) + } + + #[track_caller] + fn check(distance: i32, widths: &str, expected: &str, resize_behavior: &str) { + let (widths, total_1, Some(column_index)) = parse(widths) else { + panic!("invalid test input: widths should be marked"); + }; + let (expected, total_2, None) = parse(expected) else { + panic!("invalid test input: expected should not be marked: {expected:?}"); + }; + assert_eq!( + total_1, total_2, + "invalid test input: total width not the same" + ); + let cols = widths.len(); + let resize_behavior_vec = parse_resize_behavior(resize_behavior, total_1, cols); + let resize_behavior = TableRow::from_vec(resize_behavior_vec, cols); + + let distance = distance as f32 / total_1; + + let mut widths_table_row = TableRow::from_vec(widths, cols); + TableColumnWidths::drag_column_handle( + distance, + column_index, + &mut widths_table_row, + &resize_behavior, + ); + + let result_widths = widths_table_row.as_slice(); + let is_eq = is_almost_eq(result_widths, &expected); + if !is_eq { + let result_str = cols_to_str(result_widths, total_1); + let expected_str = cols_to_str(&expected, total_1); + panic!( + "resize failed\ncomputed: {result_str}\nexpected: {expected_str}\n\ncomputed values: {result_widths:?}\nexpected values: {expected:?}\n:minimum widths: {resize_behavior:?}" + ); + } + } + + macro_rules! check { + (columns: $cols:expr, distance: $dist:expr, snapshot: $current:expr, expected: $expected:expr, resizing: $resizing:expr $(,)?) => { + check($dist, $current, $expected, $resizing); + }; + ($name:ident, columns: $cols:expr, distance: $dist:expr, snapshot: $current:expr, expected: $expected:expr, minimums: $resizing:expr $(,)?) => { + #[test] + fn $name() { + check($dist, $current, $expected, $resizing); + } + }; + } + + check!( + basic_right_drag, + columns: 3, + distance: 1, + snapshot: "**|**I**", + expected: "**|***|*", + minimums: "X|*|*", + ); + + check!( + drag_left_against_mins, + columns: 5, + distance: -1, + snapshot: "*|*|*|*I*******", + expected: "*|*|*|*|*******", + minimums: "X|*|*|*|*", + ); + + check!( + drag_left, + columns: 5, + distance: -2, + snapshot: "*|*|*|*****I***", + expected: "*|*|*|***|*****", + minimums: "X|*|*|*|*", + ); +} diff --git a/crates/ui/src/components/gradient_fade.rs b/crates/ui/src/components/gradient_fade.rs new file mode 100644 index 0000000000000000000000000000000000000000..2173fdf06ea8c07c947f092066c2a12d716d4b44 --- /dev/null +++ b/crates/ui/src/components/gradient_fade.rs @@ -0,0 +1,88 @@ +use gpui::{Hsla, Pixels, SharedString, linear_color_stop, linear_gradient, px}; + +use crate::prelude::*; + +/// A gradient overlay that fades from a solid color to transparent. +#[derive(IntoElement)] +pub struct GradientFade { + base_bg: Hsla, + hover_bg: Hsla, + active_bg: Hsla, + width: Pixels, + right: Pixels, + gradient_stop: f32, + group_name: Option, +} + +impl GradientFade { + pub fn new(base_bg: Hsla, hover_bg: Hsla, active_bg: Hsla) -> Self { + Self { + base_bg, + hover_bg, + active_bg, + width: px(48.0), + right: px(0.0), + gradient_stop: 0.6, + group_name: None, + } + } + + pub fn width(mut self, width: Pixels) -> Self { + self.width = width; + self + } + + pub fn right(mut self, right: Pixels) -> Self { + self.right = right; + self + } + + pub fn gradient_stop(mut self, stop: f32) -> Self { + self.gradient_stop = stop; + self + } + + pub fn group_name(mut self, name: impl Into) -> Self { + self.group_name = Some(name.into()); + self + } +} + +impl RenderOnce for GradientFade { + fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement { + let stop = self.gradient_stop; + let hover_bg = self.hover_bg; + let active_bg = self.active_bg; + + div() + .id("gradient_fade") + .absolute() + .top_0() + .right(self.right) + .w(self.width) + .h_full() + .bg(linear_gradient( + 90., + linear_color_stop(self.base_bg, stop), + linear_color_stop(self.base_bg.opacity(0.0), 0.), + )) + .when_some(self.group_name.clone(), |element, group_name| { + element.group_hover(group_name, move |s| { + s.bg(linear_gradient( + 90., + linear_color_stop(hover_bg, stop), + linear_color_stop(hover_bg.opacity(0.0), 0.), + )) + }) + }) + .when_some(self.group_name, |element, group_name| { + element.group_active(group_name, move |s| { + s.bg(linear_gradient( + 90., + linear_color_stop(active_bg, stop), + linear_color_stop(active_bg.opacity(0.0), 0.), + )) + }) + }) + } +} diff --git a/crates/ui/src/components/list/list_item.rs b/crates/ui/src/components/list/list_item.rs index d581fad9453d9812f17b7bc9e0297fb9927c8188..dc2fc76a06c29c72457d385effd06ea71e5f9625 100644 --- a/crates/ui/src/components/list/list_item.rs +++ b/crates/ui/src/components/list/list_item.rs @@ -4,7 +4,7 @@ use component::{Component, ComponentScope, example_group_with_title, single_exam use gpui::{AnyElement, AnyView, ClickEvent, MouseButton, MouseDownEvent, Pixels, px}; use smallvec::SmallVec; -use crate::{Disclosure, prelude::*}; +use crate::{Disclosure, GradientFade, prelude::*}; #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Default)] pub enum ListItemSpacing { @@ -209,6 +209,21 @@ impl ParentElement for ListItem { impl RenderOnce for ListItem { fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { + let color = cx.theme().colors(); + + let base_bg = if self.selected { + color.element_active + } else { + color.panel_background + }; + + let end_hover_gradient_overlay = + GradientFade::new(base_bg, color.element_hover, color.element_active) + .width(px(96.0)) + .when_some(self.group_name.clone(), |fade, group| { + fade.group_name(group) + }); + h_flex() .id(self.id) .when_some(self.group_name, |this, group| this.group(group)) @@ -220,25 +235,22 @@ impl RenderOnce for ListItem { .px(DynamicSpacing::Base04.rems(cx)) }) .when(!self.inset && !self.disabled, |this| { - this - // TODO: Add focus state - // .when(self.state == InteractionState::Focused, |this| { - .when_some(self.focused, |this, focused| { - if focused { - this.border_1() - .border_color(cx.theme().colors().border_focused) - } else { - this.border_1() - } - }) - .when(self.selectable, |this| { - this.hover(|style| style.bg(cx.theme().colors().ghost_element_hover)) - .active(|style| style.bg(cx.theme().colors().ghost_element_active)) - .when(self.outlined, |this| this.rounded_sm()) - .when(self.selected, |this| { - this.bg(cx.theme().colors().ghost_element_selected) - }) - }) + this.when_some(self.focused, |this, focused| { + if focused { + this.border_1() + .border_color(cx.theme().colors().border_focused) + } else { + this.border_1() + } + }) + .when(self.selectable, |this| { + this.hover(|style| style.bg(cx.theme().colors().ghost_element_hover)) + .active(|style| style.bg(cx.theme().colors().ghost_element_active)) + .when(self.outlined, |this| this.rounded_sm()) + .when(self.selected, |this| { + this.bg(cx.theme().colors().ghost_element_selected) + }) + }) }) .when(self.rounded, |this| this.rounded_sm()) .when_some(self.on_hover, |this, on_hover| this.on_hover(on_hover)) @@ -350,6 +362,7 @@ impl RenderOnce for ListItem { .right(DynamicSpacing::Base06.rems(cx)) .top_0() .visible_on_hover("list_item") + .child(end_hover_gradient_overlay) .child(end_hover_slot), ) }), diff --git a/crates/ui/src/components/scrollbar.rs b/crates/ui/src/components/scrollbar.rs index 21d6aa46d0f90a0d48e267e935b00d9f263a30c5..d0c720d5081d3ab7ad700df798b931933e03db28 100644 --- a/crates/ui/src/components/scrollbar.rs +++ b/crates/ui/src/components/scrollbar.rs @@ -1041,7 +1041,18 @@ impl ScrollbarLayout { impl PartialEq for ScrollbarLayout { fn eq(&self, other: &Self) -> bool { - self.axis == other.axis && self.thumb_bounds == other.thumb_bounds + if self.axis != other.axis { + return false; + } + + let axis = self.axis; + let thumb_offset = + self.thumb_bounds.origin.along(axis) - self.track_bounds.origin.along(axis); + let other_thumb_offset = + other.thumb_bounds.origin.along(axis) - other.track_bounds.origin.along(axis); + + thumb_offset == other_thumb_offset + && self.thumb_bounds.size.along(axis) == other.thumb_bounds.size.along(axis) } } diff --git a/crates/util/Cargo.toml b/crates/util/Cargo.toml index 6a9b30d463af2d9407e8f4c9e3a81133a87c1bce..9f4c391ed01cc21e6e334d37407c8206ff1b3409 100644 --- a/crates/util/Cargo.toml +++ b/crates/util/Cargo.toml @@ -64,7 +64,6 @@ tendril = "0.4.3" [dev-dependencies] git2.workspace = true -indoc.workspace = true rand.workspace = true util_macros.workspace = true pretty_assertions.workspace = true diff --git a/crates/util/src/path_list.rs b/crates/util/src/path_list.rs index 1f923769780de2ae7f1dc18d3334020960ff3bb6..7d605c7924a7d9c25a89634ca7339a457fb99ae4 100644 --- a/crates/util/src/path_list.rs +++ b/crates/util/src/path_list.rs @@ -13,7 +13,7 @@ use serde::{Deserialize, Deserializer, Serialize, Serializer}; /// other path lists without regard to the order of the paths. /// /// The paths can be retrieved in the original order using `ordered_paths()`. -#[derive(Default, PartialEq, Eq, Debug, Clone)] +#[derive(Default, PartialEq, Eq, Hash, Debug, Clone)] pub struct PathList { /// The paths, in lexicographic order. paths: Arc<[PathBuf]>, diff --git a/crates/util/src/paths.rs b/crates/util/src/paths.rs index 39b4064a1bd9d3c4c240abf9665b17151066e9ef..3ff07c67a8d2def75e4e7f756c4a466ea2b68ed0 100644 --- a/crates/util/src/paths.rs +++ b/crates/util/src/paths.rs @@ -601,6 +601,7 @@ const ROW_COL_CAPTURE_REGEX: &str = r"(?xs) | \((\d+)\)() # filename(row) ) + \:*$ | (.+?)(?: \:+(\d+)\:(\d+)\:*$ # filename:row:column @@ -2097,6 +2098,15 @@ mod tests { column: Some(9), } ); + + assert_eq!( + PathWithPosition::parse_str("main (1).log"), + PathWithPosition { + path: PathBuf::from("main (1).log"), + row: None, + column: None + } + ); } #[perf] @@ -2175,6 +2185,15 @@ mod tests { column: None } ); + + assert_eq!( + PathWithPosition::parse_str("C:\\Users\\someone\\main (1).log"), + PathWithPosition { + path: PathBuf::from("C:\\Users\\someone\\main (1).log"), + row: None, + column: None + } + ); } #[perf] diff --git a/crates/vim/Cargo.toml b/crates/vim/Cargo.toml index 38bf9fed621aa3aa378cbcaa3479f7ecd7b60e11..7b4cff5ff9bdf37666076c403593c45131a63067 100644 --- a/crates/vim/Cargo.toml +++ b/crates/vim/Cargo.toml @@ -54,11 +54,9 @@ workspace.workspace = true zed_actions.workspace = true [dev-dependencies] -assets.workspace = true command_palette = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } git_ui = { workspace = true, features = ["test-support"] } -title_bar = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } indoc.workspace = true language = { workspace = true, features = ["test-support"] } diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index edbbca1c30fb1bda0bedc35d0de6666228b9ef5d..8c551bcd2768043ae416157c80d4d2f9faa19092 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -978,6 +978,7 @@ impl Vim { editor.set_clip_at_line_ends(false, cx); editor.set_collapse_matches(false); editor.set_input_enabled(true); + editor.set_expects_character_input(true); editor.set_autoindent(true); editor.selections.set_line_mode(false); editor.unregister_addon::(); @@ -1346,6 +1347,15 @@ impl Vim { } } + fn expects_character_input(&self) -> bool { + if let Some(operator) = self.operator_stack.last() { + if operator.is_waiting(self.mode) { + return true; + } + } + self.editor_input_enabled() + } + pub fn editor_input_enabled(&self) -> bool { match self.mode { Mode::Insert => { @@ -2058,6 +2068,7 @@ impl Vim { clip_at_line_ends: self.clip_at_line_ends(), collapse_matches: !HelixModeSetting::get_global(cx).0, input_enabled: self.editor_input_enabled(), + expects_character_input: self.expects_character_input(), autoindent: self.should_autoindent(), cursor_offset_on_selection: self.mode.is_visual(), line_mode: matches!(self.mode, Mode::VisualLine), @@ -2075,6 +2086,7 @@ impl Vim { editor.set_clip_at_line_ends(state.clip_at_line_ends, cx); editor.set_collapse_matches(state.collapse_matches); editor.set_input_enabled(state.input_enabled); + editor.set_expects_character_input(state.expects_character_input); editor.set_autoindent(state.autoindent); editor.set_cursor_offset_on_selection(state.cursor_offset_on_selection); editor.selections.set_line_mode(state.line_mode); @@ -2087,6 +2099,7 @@ struct VimEditorSettingsState { clip_at_line_ends: bool, collapse_matches: bool, input_enabled: bool, + expects_character_input: bool, autoindent: bool, cursor_offset_on_selection: bool, line_mode: bool, diff --git a/crates/watch/Cargo.toml b/crates/watch/Cargo.toml index 9d77eaeddec66a08dd2e9d5056249671c9b02670..aea8b0bbbda7d53d17400553407eceb7cb8253b2 100644 --- a/crates/watch/Cargo.toml +++ b/crates/watch/Cargo.toml @@ -19,5 +19,4 @@ parking_lot.workspace = true ctor.workspace = true futures.workspace = true gpui = { workspace = true, features = ["test-support"] } -rand.workspace = true zlog.workspace = true diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml index 84fd10c8c03e4f7411fc8c813b70255f5e00031d..e884b834af1294a368ad67d72057561b42876ce2 100644 --- a/crates/workspace/Cargo.toml +++ b/crates/workspace/Cargo.toml @@ -72,7 +72,6 @@ windows.workspace = true [dev-dependencies] client = { workspace = true, features = ["test-support"] } -dap = { workspace = true, features = ["test-support"] } db = { workspace = true, features = ["test-support"] } fs = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index b29e02f05b367bab557403f3bb34f6ffa45caecc..09c99c230a0c7a9710e2976ac0673b639d8e36c4 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -366,6 +366,18 @@ pub trait Item: Focusable + EventEmitter + Render + Sized { true } + /// Called when the containing pane receives a drop on the item or the item's tab. + /// Returns `true` to consume it and suppress the pane's default drop behavior. + fn handle_drop( + &self, + _active_pane: &Pane, + _dropped: &dyn Any, + _window: &mut Window, + _cx: &mut App, + ) -> bool { + false + } + /// Returns additional actions to add to the tab's context menu. /// Each entry is a label and an action to dispatch. fn tab_extra_context_menu_actions( @@ -545,6 +557,13 @@ pub trait ItemHandle: 'static + Send { fn preserve_preview(&self, cx: &App) -> bool; fn include_in_nav_history(&self) -> bool; fn relay_action(&self, action: Box, window: &mut Window, cx: &mut App); + fn handle_drop( + &self, + active_pane: &Pane, + dropped: &dyn Any, + window: &mut Window, + cx: &mut App, + ) -> bool; fn tab_extra_context_menu_actions( &self, window: &mut Window, @@ -925,10 +944,10 @@ impl ItemHandle for Entity { }, )); - cx.on_blur( + cx.on_focus_out( &self.read(cx).focus_handle(cx), window, - move |workspace, window, cx| { + move |workspace, _event, window, cx| { if let Some(item) = weak_item.upgrade() && item.workspace_settings(cx).autosave == AutosaveSetting::OnFocusChange { @@ -1110,6 +1129,20 @@ impl ItemHandle for Entity { }) } + /// Called when the containing pane receives a drop on the item or the item's tab. + /// Returns `true` if the item handled it and the pane should skip its default drop behavior. + fn handle_drop( + &self, + active_pane: &Pane, + dropped: &dyn Any, + window: &mut Window, + cx: &mut App, + ) -> bool { + self.update(cx, |this, cx| { + this.handle_drop(active_pane, dropped, window, cx) + }) + } + fn tab_extra_context_menu_actions( &self, window: &mut Window, @@ -1371,7 +1404,8 @@ pub mod test { }; use gpui::{ AnyElement, App, AppContext as _, Context, Entity, EntityId, EventEmitter, Focusable, - InteractiveElement, IntoElement, Render, SharedString, Task, WeakEntity, Window, + InteractiveElement, IntoElement, ParentElement, Render, SharedString, Task, WeakEntity, + Window, }; use project::{Project, ProjectEntryId, ProjectPath, WorktreeId}; use std::{any::Any, cell::Cell, sync::Arc}; @@ -1400,6 +1434,7 @@ pub mod test { pub tab_detail: Cell>, serialize: Option Option>>>>, focus_handle: gpui::FocusHandle, + pub child_focus_handles: Vec, } impl project::ProjectItem for TestProjectItem { @@ -1482,6 +1517,7 @@ pub mod test { workspace_id: Default::default(), focus_handle: cx.focus_handle(), serialize: None, + child_focus_handles: Vec::new(), } } @@ -1529,6 +1565,11 @@ pub mod test { self } + pub fn with_child_focus_handles(mut self, count: usize, cx: &mut Context) -> Self { + self.child_focus_handles = (0..count).map(|_| cx.focus_handle()).collect(); + self + } + pub fn set_state(&mut self, state: String, cx: &mut Context) { self.push_to_nav_history(cx); self.state = state; @@ -1543,7 +1584,12 @@ pub mod test { impl Render for TestItem { fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - gpui::div().track_focus(&self.focus_handle(cx)) + let parent = gpui::div().track_focus(&self.focus_handle(cx)); + self.child_focus_handles + .iter() + .fold(parent, |parent, child_handle| { + parent.child(gpui::div().track_focus(child_handle)) + }) } } @@ -1641,23 +1687,30 @@ pub mod test { where Self: Sized, { - Task::ready(Some(cx.new(|cx| Self { - state: self.state.clone(), - label: self.label.clone(), - save_count: self.save_count, - save_as_count: self.save_as_count, - reload_count: self.reload_count, - is_dirty: self.is_dirty, - buffer_kind: self.buffer_kind, - has_conflict: self.has_conflict, - has_deleted_file: self.has_deleted_file, - project_items: self.project_items.clone(), - nav_history: None, - tab_descriptions: None, - tab_detail: Default::default(), - workspace_id: self.workspace_id, - focus_handle: cx.focus_handle(), - serialize: None, + Task::ready(Some(cx.new(|cx| { + Self { + state: self.state.clone(), + label: self.label.clone(), + save_count: self.save_count, + save_as_count: self.save_as_count, + reload_count: self.reload_count, + is_dirty: self.is_dirty, + buffer_kind: self.buffer_kind, + has_conflict: self.has_conflict, + has_deleted_file: self.has_deleted_file, + project_items: self.project_items.clone(), + nav_history: None, + tab_descriptions: None, + tab_detail: Default::default(), + workspace_id: self.workspace_id, + focus_handle: cx.focus_handle(), + serialize: None, + child_focus_handles: self + .child_focus_handles + .iter() + .map(|_| cx.focus_handle()) + .collect(), + } }))) } diff --git a/crates/workspace/src/multi_workspace.rs b/crates/workspace/src/multi_workspace.rs index cd77f4fe30461b5f726c3bcd2f5f78b561e4d415..26af1ce27ecc28b7b541625a16731d0d721a7fc9 100644 --- a/crates/workspace/src/multi_workspace.rs +++ b/crates/workspace/src/multi_workspace.rs @@ -35,6 +35,12 @@ actions!( ] ); +pub enum MultiWorkspaceEvent { + ActiveWorkspaceChanged, + WorkspaceAdded(Entity), + WorkspaceRemoved(EntityId), +} + pub enum SidebarEvent { Open, Close, @@ -44,6 +50,8 @@ pub trait Sidebar: EventEmitter + Focusable + Render + Sized { fn width(&self, cx: &App) -> Pixels; fn set_width(&mut self, width: Option, cx: &mut Context); fn has_notifications(&self, cx: &App) -> bool; + fn toggle_recent_projects_popover(&self, window: &mut Window, cx: &mut App); + fn is_recent_projects_popover_deployed(&self) -> bool; } pub trait SidebarHandle: 'static + Send + Sync { @@ -54,6 +62,8 @@ pub trait SidebarHandle: 'static + Send + Sync { fn has_notifications(&self, cx: &App) -> bool; fn to_any(&self) -> AnyView; fn entity_id(&self) -> EntityId; + fn toggle_recent_projects_popover(&self, window: &mut Window, cx: &mut App); + fn is_recent_projects_popover_deployed(&self, cx: &App) -> bool; } #[derive(Clone)] @@ -94,6 +104,16 @@ impl SidebarHandle for Entity { fn entity_id(&self) -> EntityId { Entity::entity_id(self) } + + fn toggle_recent_projects_popover(&self, window: &mut Window, cx: &mut App) { + self.update(cx, |this, cx| { + this.toggle_recent_projects_popover(window, cx); + }); + } + + fn is_recent_projects_popover_deployed(&self, cx: &App) -> bool { + self.read(cx).is_recent_projects_popover_deployed() + } } pub struct MultiWorkspace { @@ -109,6 +129,8 @@ pub struct MultiWorkspace { _subscriptions: Vec, } +impl EventEmitter for MultiWorkspace {} + impl MultiWorkspace { pub fn new(workspace: Entity, window: &mut Window, cx: &mut Context) -> Self { let release_subscription = cx.on_release(|this: &mut MultiWorkspace, _cx| { @@ -179,6 +201,18 @@ impl MultiWorkspace { .map_or(false, |s| s.has_notifications(cx)) } + pub fn toggle_recent_projects_popover(&self, window: &mut Window, cx: &mut App) { + if let Some(sidebar) = &self.sidebar { + sidebar.toggle_recent_projects_popover(window, cx); + } + } + + pub fn is_recent_projects_popover_deployed(&self, cx: &App) -> bool { + self.sidebar + .as_ref() + .map_or(false, |s| s.is_recent_projects_popover_deployed(cx)) + } + pub fn multi_workspace_enabled(&self, cx: &App) -> bool { cx.has_flag::() && !DisableAiSettings::get_global(cx).disable_ai } @@ -304,6 +338,7 @@ impl MultiWorkspace { if !self.multi_workspace_enabled(cx) { self.workspaces[0] = workspace; self.active_workspace_index = 0; + cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged); cx.notify(); return; } @@ -321,7 +356,11 @@ impl MultiWorkspace { cx: &mut Context, ) -> usize { let index = self.add_workspace(workspace, cx); + let changed = self.active_workspace_index != index; self.active_workspace_index = index; + if changed { + cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged); + } cx.notify(); index } @@ -338,7 +377,8 @@ impl MultiWorkspace { }); } Self::subscribe_to_workspace(&workspace, cx); - self.workspaces.push(workspace); + self.workspaces.push(workspace.clone()); + cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace)); cx.notify(); self.workspaces.len() - 1 } @@ -349,9 +389,13 @@ impl MultiWorkspace { index < self.workspaces.len(), "workspace index out of bounds" ); + let changed = self.active_workspace_index != index; self.active_workspace_index = index; self.serialize(cx); self.focus_active_workspace(window, cx); + if changed { + cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged); + } cx.notify(); } @@ -406,7 +450,7 @@ impl MultiWorkspace { } } - fn focus_active_workspace(&self, window: &mut Window, cx: &mut App) { + pub fn focus_active_workspace(&self, window: &mut Window, cx: &mut App) { // If a dock panel is zoomed, focus it instead of the center pane. // Otherwise, focusing the center pane triggers dismiss_zoomed_items_to_reveal // which closes the zoomed dock. @@ -633,6 +677,10 @@ impl MultiWorkspace { self.serialize(cx); self.focus_active_workspace(window, cx); + cx.emit(MultiWorkspaceEvent::WorkspaceRemoved( + removed_workspace.entity_id(), + )); + cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged); cx.notify(); } diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 81283427e83afb820b113250545d90f787030e25..5f1177e58d5dcb0e8617ac1eb6068b7a9858685c 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -34,7 +34,6 @@ use std::{ any::Any, cmp, fmt, mem, num::NonZeroUsize, - ops::ControlFlow, path::PathBuf, rc::Rc, sync::{ @@ -382,9 +381,6 @@ pub struct Pane { project: WeakEntity, pub drag_split_direction: Option, can_drop_predicate: Option bool>>, - custom_drop_handle: Option< - Arc) -> ControlFlow<(), ()>>, - >, can_split_predicate: Option) -> bool>>, can_toggle_zoom: bool, @@ -567,7 +563,6 @@ impl Pane { workspace, project: project.downgrade(), can_drop_predicate, - custom_drop_handle: None, can_split_predicate: None, can_toggle_zoom: true, should_display_tab_bar: Rc::new(|_, cx| TabBarSettings::get_global(cx).show), @@ -846,15 +841,6 @@ impl Pane { cx.notify(); } - pub fn set_custom_drop_handle(&mut self, cx: &mut Context, handle: F) - where - F: 'static - + Fn(&mut Pane, &dyn Any, &mut Window, &mut Context) -> ControlFlow<(), ()>, - { - self.custom_drop_handle = Some(Arc::new(handle)); - cx.notify(); - } - pub fn nav_history_for_item(&self, item: &Entity) -> ItemNavHistory { ItemNavHistory { history: self.nav_history.clone(), @@ -2901,7 +2887,7 @@ impl Pane { .on_drop( cx.listener(move |this, dragged_tab: &DraggedTab, window, cx| { this.drag_split_direction = None; - this.handle_tab_drop(dragged_tab, ix, window, cx) + this.handle_tab_drop(dragged_tab, ix, false, window, cx) }), ) .on_drop( @@ -3550,7 +3536,7 @@ impl Pane { .on_drop( cx.listener(move |this, dragged_tab: &DraggedTab, window, cx| { this.drag_split_direction = None; - this.handle_tab_drop(dragged_tab, this.items.len(), window, cx) + this.handle_tab_drop(dragged_tab, this.items.len(), false, window, cx) }), ) .on_drop( @@ -3691,14 +3677,18 @@ impl Pane { &mut self, dragged_tab: &DraggedTab, ix: usize, + is_pane_target: bool, window: &mut Window, cx: &mut Context, ) { - if let Some(custom_drop_handle) = self.custom_drop_handle.clone() - && let ControlFlow::Break(()) = custom_drop_handle(self, dragged_tab, window, cx) + if is_pane_target + && ix == self.active_item_index + && let Some(active_item) = self.active_item() + && active_item.handle_drop(self, dragged_tab, window, cx) { return; } + let mut to_pane = cx.entity(); let split_direction = self.drag_split_direction; let item_id = dragged_tab.item.item_id(); @@ -3791,7 +3781,7 @@ impl Pane { let item_id = dragged_tab.item.item_id(); let pinned_count = self.pinned_tab_count; - self.handle_tab_drop(dragged_tab, pinned_count, window, cx); + self.handle_tab_drop(dragged_tab, pinned_count, false, window, cx); let to_pane = cx.entity(); @@ -3843,11 +3833,12 @@ impl Pane { window: &mut Window, cx: &mut Context, ) { - if let Some(custom_drop_handle) = self.custom_drop_handle.clone() - && let ControlFlow::Break(()) = custom_drop_handle(self, dragged_selection, window, cx) + if let Some(active_item) = self.active_item() + && active_item.handle_drop(self, dragged_selection, window, cx) { return; } + self.handle_project_entry_drop( &dragged_selection.active_selection.entry_id, dragged_onto, @@ -3863,11 +3854,12 @@ impl Pane { window: &mut Window, cx: &mut Context, ) { - if let Some(custom_drop_handle) = self.custom_drop_handle.clone() - && let ControlFlow::Break(()) = custom_drop_handle(self, project_entry_id, window, cx) + if let Some(active_item) = self.active_item() + && active_item.handle_drop(self, project_entry_id, window, cx) { return; } + let mut to_pane = cx.entity(); let split_direction = self.drag_split_direction; let project_entry_id = *project_entry_id; @@ -3939,11 +3931,12 @@ impl Pane { window: &mut Window, cx: &mut Context, ) { - if let Some(custom_drop_handle) = self.custom_drop_handle.clone() - && let ControlFlow::Break(()) = custom_drop_handle(self, paths, window, cx) + if let Some(active_item) = self.active_item() + && active_item.handle_drop(self, paths, window, cx) { return; } + let mut to_pane = cx.entity(); let mut split_direction = self.drag_split_direction; let paths = paths.paths().to_vec(); @@ -4424,6 +4417,7 @@ impl Render for Pane { this.handle_tab_drop( dragged_tab, this.active_item_index(), + true, window, cx, ) @@ -4826,7 +4820,7 @@ impl Render for DraggedTab { #[cfg(test)] mod tests { - use std::{iter::zip, num::NonZero}; + use std::{cell::Cell, iter::zip, num::NonZero}; use super::*; use crate::{ @@ -4839,6 +4833,65 @@ mod tests { use theme::LoadThemes; use util::TryFutureExt; + // drop_call_count is a Cell here because `handle_drop` takes &self, not &mut self. + struct CustomDropHandlingItem { + focus_handle: gpui::FocusHandle, + drop_call_count: Cell, + } + + impl CustomDropHandlingItem { + fn new(cx: &mut Context) -> Self { + Self { + focus_handle: cx.focus_handle(), + drop_call_count: Cell::new(0), + } + } + + fn drop_call_count(&self) -> usize { + self.drop_call_count.get() + } + } + + impl EventEmitter<()> for CustomDropHandlingItem {} + + impl Focusable for CustomDropHandlingItem { + fn focus_handle(&self, _cx: &App) -> gpui::FocusHandle { + self.focus_handle.clone() + } + } + + impl Render for CustomDropHandlingItem { + fn render( + &mut self, + _window: &mut Window, + _cx: &mut Context, + ) -> impl gpui::IntoElement { + gpui::Empty + } + } + + impl Item for CustomDropHandlingItem { + type Event = (); + + fn tab_content_text(&self, _detail: usize, _cx: &App) -> gpui::SharedString { + "custom_drop_handling_item".into() + } + + fn handle_drop( + &self, + _active_pane: &Pane, + dropped: &dyn std::any::Any, + _window: &mut Window, + _cx: &mut App, + ) -> bool { + let is_dragged_tab = dropped.downcast_ref::().is_some(); + if is_dragged_tab { + self.drop_call_count.set(self.drop_call_count.get() + 1); + } + is_dragged_tab + } + } + #[gpui::test] async fn test_add_item_capped_to_max_tabs(cx: &mut TestAppContext) { init_test(cx); @@ -5664,6 +5717,83 @@ mod tests { assert_item_labels(&pane, ["C", "A", "B*"], cx); } + #[gpui::test] + async fn test_handle_tab_drop_respects_is_pane_target(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, None, cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let source_pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone()); + + let item_a = add_labeled_item(&source_pane, "A", false, cx); + let item_b = add_labeled_item(&source_pane, "B", false, cx); + + let target_pane = workspace.update_in(cx, |workspace, window, cx| { + workspace.split_pane(source_pane.clone(), SplitDirection::Right, window, cx) + }); + + let custom_item = target_pane.update_in(cx, |pane, window, cx| { + let custom_item = Box::new(cx.new(CustomDropHandlingItem::new)); + pane.add_item(custom_item.clone(), true, true, None, window, cx); + custom_item + }); + + let moved_item_id = item_a.item_id(); + let other_item_id = item_b.item_id(); + let custom_item_id = custom_item.item_id(); + + let pane_item_ids = |pane: &Entity, cx: &mut VisualTestContext| { + pane.read_with(cx, |pane, _| { + pane.items().map(|item| item.item_id()).collect::>() + }) + }; + + let source_before_item_ids = pane_item_ids(&source_pane, cx); + assert_eq!(source_before_item_ids, vec![moved_item_id, other_item_id]); + + let target_before_item_ids = pane_item_ids(&target_pane, cx); + assert_eq!(target_before_item_ids, vec![custom_item_id]); + + let dragged_tab = DraggedTab { + pane: source_pane.clone(), + item: item_a.boxed_clone(), + ix: 0, + detail: 0, + is_active: true, + }; + + // Dropping item_a onto the target pane itself means the + // custom item handles the drop and no tab move should occur + target_pane.update_in(cx, |pane, window, cx| { + pane.handle_tab_drop(&dragged_tab, pane.active_item_index(), true, window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + custom_item.read_with(cx, |item, _| item.drop_call_count()), + 1 + ); + assert_eq!(pane_item_ids(&source_pane, cx), source_before_item_ids); + assert_eq!(pane_item_ids(&target_pane, cx), target_before_item_ids); + + // Dropping item_a onto the tab target means the custom handler + // should be skipped and the pane's default tab drop behavior should run. + target_pane.update_in(cx, |pane, window, cx| { + pane.handle_tab_drop(&dragged_tab, pane.active_item_index(), false, window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + custom_item.read_with(cx, |item, _| item.drop_call_count()), + 1 + ); + assert_eq!(pane_item_ids(&source_pane, cx), vec![other_item_id]); + + let target_item_ids = pane_item_ids(&target_pane, cx); + assert_eq!(target_item_ids, vec![moved_item_id, custom_item_id]); + } + #[gpui::test] async fn test_drag_unpinned_tab_to_split_creates_pane_with_unpinned_tab( cx: &mut TestAppContext, @@ -5699,7 +5829,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 0, window, cx); + pane.handle_tab_drop(&dragged_tab, 0, true, window, cx); }); // A should be moved to new pane. B should remain pinned, A should not be pinned @@ -5748,7 +5878,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 0, window, cx); + pane.handle_tab_drop(&dragged_tab, 0, true, window, cx); }); // A should be moved to new pane. Both A and B should still be pinned @@ -5798,7 +5928,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 0, window, cx); + pane.handle_tab_drop(&dragged_tab, 0, false, window, cx); }); // A should stay pinned @@ -5846,7 +5976,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 1, window, cx); + pane.handle_tab_drop(&dragged_tab, 1, false, window, cx); }); // A should become pinned @@ -5890,7 +6020,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 0, window, cx); + pane.handle_tab_drop(&dragged_tab, 0, false, window, cx); }); // A should stay pinned @@ -5952,7 +6082,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 0, window, cx); + pane.handle_tab_drop(&dragged_tab, 0, false, window, cx); }); // E (unpinned) should be closed, leaving 3 pinned items @@ -5987,7 +6117,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 1, window, cx); + pane.handle_tab_drop(&dragged_tab, 1, false, window, cx); }); // A should still be pinned and active @@ -6027,7 +6157,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 2, window, cx); + pane.handle_tab_drop(&dragged_tab, 2, false, window, cx); }); // A stays pinned @@ -6064,7 +6194,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 1, window, cx); + pane.handle_tab_drop(&dragged_tab, 1, false, window, cx); }); // Neither are pinned @@ -6101,7 +6231,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 2, window, cx); + pane.handle_tab_drop(&dragged_tab, 2, false, window, cx); }); // A becomes unpinned @@ -6138,7 +6268,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 0, window, cx); + pane.handle_tab_drop(&dragged_tab, 0, false, window, cx); }); // A becomes unpinned @@ -6174,7 +6304,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 1, window, cx); + pane.handle_tab_drop(&dragged_tab, 1, false, window, cx); }); // A stays pinned, B and C remain unpinned @@ -6215,7 +6345,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 0, window, cx); + pane.handle_tab_drop(&dragged_tab, 0, false, window, cx); }); // A should become pinned since it was dropped in the pinned region @@ -6257,7 +6387,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 1, window, cx); + pane.handle_tab_drop(&dragged_tab, 1, true, window, cx); }); // A should remain unpinned since it was dropped outside the pinned region @@ -6304,7 +6434,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 1, window, cx); + pane.handle_tab_drop(&dragged_tab, 1, false, window, cx); }); // A should be after B and all are pinned @@ -6319,7 +6449,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 2, window, cx); + pane.handle_tab_drop(&dragged_tab, 2, false, window, cx); }); // A should be after C and all are pinned @@ -6334,7 +6464,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 1, window, cx); + pane.handle_tab_drop(&dragged_tab, 1, false, window, cx); }); // A should be before C and all are pinned @@ -6349,7 +6479,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 0, window, cx); + pane.handle_tab_drop(&dragged_tab, 0, false, window, cx); }); // A should be before B and all are pinned @@ -6381,7 +6511,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 2, window, cx); + pane.handle_tab_drop(&dragged_tab, 2, false, window, cx); }); // A should be at the end @@ -6413,7 +6543,7 @@ mod tests { detail: 0, is_active: true, }; - pane.handle_tab_drop(&dragged_tab, 0, window, cx); + pane.handle_tab_drop(&dragged_tab, 0, false, window, cx); }); // C should be at the beginning diff --git a/crates/workspace/src/welcome.rs b/crates/workspace/src/welcome.rs index 1caa5b56e5f38db00ad59a4aca3a2a830ee023b7..92f1cb4840731bedda5b0b6751f44bfdcdb8ea52 100644 --- a/crates/workspace/src/welcome.rs +++ b/crates/workspace/src/welcome.rs @@ -10,8 +10,10 @@ use gpui::{ ParentElement, Render, Styled, Task, Window, actions, }; use menu::{SelectNext, SelectPrevious}; +use project::DisableAiSettings; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; +use settings::Settings; use ui::{ButtonLike, Divider, DividerColor, KeyBinding, Vector, VectorName, prelude::*}; use util::ResultExt; use zed_actions::{Extensions, OpenOnboarding, OpenSettings, agent, command_palette}; @@ -121,21 +123,43 @@ impl RenderOnce for SectionButton { } } +enum SectionVisibility { + Always, + Conditional(fn(&App) -> bool), +} + +impl SectionVisibility { + fn is_visible(&self, cx: &App) -> bool { + match self { + SectionVisibility::Always => true, + SectionVisibility::Conditional(f) => f(cx), + } + } +} + struct SectionEntry { icon: IconName, title: &'static str, action: &'static dyn Action, + visibility_guard: SectionVisibility, } impl SectionEntry { - fn render(&self, button_index: usize, focus: &FocusHandle, _cx: &App) -> impl IntoElement { - SectionButton::new( - self.title, - self.icon, - self.action, - button_index, - focus.clone(), - ) + fn render( + &self, + button_index: usize, + focus: &FocusHandle, + cx: &App, + ) -> Option { + self.visibility_guard.is_visible(cx).then(|| { + SectionButton::new( + self.title, + self.icon, + self.action, + button_index, + focus.clone(), + ) + }) } } @@ -147,21 +171,25 @@ const CONTENT: (Section<4>, Section<3>) = ( icon: IconName::Plus, title: "New File", action: &NewFile, + visibility_guard: SectionVisibility::Always, }, SectionEntry { icon: IconName::FolderOpen, title: "Open Project", - action: &Open, + action: &Open::DEFAULT, + visibility_guard: SectionVisibility::Always, }, SectionEntry { icon: IconName::CloudDownload, title: "Clone Repository", action: &GitClone, + visibility_guard: SectionVisibility::Always, }, SectionEntry { icon: IconName::ListCollapse, title: "Open Command Palette", action: &command_palette::Toggle, + visibility_guard: SectionVisibility::Always, }, ], }, @@ -172,11 +200,15 @@ const CONTENT: (Section<4>, Section<3>) = ( icon: IconName::Settings, title: "Open Settings", action: &OpenSettings, + visibility_guard: SectionVisibility::Always, }, SectionEntry { icon: IconName::ZedAssistant, title: "View AI Settings", action: &agent::OpenSettings, + visibility_guard: SectionVisibility::Conditional(|cx| { + !DisableAiSettings::get_global(cx).disable_ai + }), }, SectionEntry { icon: IconName::Blocks, @@ -185,6 +217,7 @@ const CONTENT: (Section<4>, Section<3>) = ( category_filter: None, id: None, }, + visibility_guard: SectionVisibility::Always, }, ], }, @@ -204,7 +237,7 @@ impl Section { self.entries .iter() .enumerate() - .map(|(index, entry)| entry.render(index_offset + index, focus, cx)), + .filter_map(|(index, entry)| entry.render(index_offset + index, focus, cx)), ) } } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 3839b4446e7399536a12e7951c004cce81d5c4e6..90f05d07a3a87a53ca25a1dc15da7663a95984a8 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -27,9 +27,9 @@ mod workspace_settings; pub use crate::notifications::NotificationFrame; pub use dock::Panel; pub use multi_workspace::{ - DraggedSidebar, FocusWorkspaceSidebar, MultiWorkspace, NewWorkspaceInWindow, - NextWorkspaceInWindow, PreviousWorkspaceInWindow, Sidebar, SidebarEvent, SidebarHandle, - ToggleWorkspaceSidebar, + DraggedSidebar, FocusWorkspaceSidebar, MultiWorkspace, MultiWorkspaceEvent, + NewWorkspaceInWindow, NextWorkspaceInWindow, PreviousWorkspaceInWindow, Sidebar, SidebarEvent, + SidebarHandle, ToggleWorkspaceSidebar, }; pub use path_list::{PathList, SerializedPathList}; pub use toast_layer::{ToastAction, ToastLayer, ToastView}; @@ -209,6 +209,34 @@ pub trait DebuggerProvider { fn active_thread_state(&self, cx: &App) -> Option; } +/// Opens a file or directory. +#[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)] +#[action(namespace = workspace)] +pub struct Open { + /// When true, opens in a new window. When false, adds to the current + /// window as a new workspace (multi-workspace). + #[serde(default = "Open::default_create_new_window")] + pub create_new_window: bool, +} + +impl Open { + pub const DEFAULT: Self = Self { + create_new_window: true, + }; + + /// Used by `#[serde(default)]` on the `create_new_window` field so that + /// the serde default and `Open::DEFAULT` stay in sync. + fn default_create_new_window() -> bool { + Self::DEFAULT.create_new_window + } +} + +impl Default for Open { + fn default() -> Self { + Self::DEFAULT + } +} + actions!( workspace, [ @@ -254,8 +282,6 @@ actions!( NewSearch, /// Opens a new window. NewWindow, - /// Opens a file or directory. - Open, /// Opens multiple files. OpenFiles, /// Opens the current location in terminal. @@ -626,7 +652,7 @@ fn prompt_and_open_paths(app_state: Arc, options: PathPromptOptions, c .update(cx, |multi_workspace, window, cx| { let workspace = multi_workspace.workspace().clone(); workspace.update(cx, |workspace, cx| { - prompt_for_open_path_and_open(workspace, app_state, options, window, cx); + prompt_for_open_path_and_open(workspace, app_state, options, true, window, cx); }); }) .ok(); @@ -638,7 +664,7 @@ fn prompt_and_open_paths(app_state: Arc, options: PathPromptOptions, c window.activate_window(); let workspace = multi_workspace.workspace().clone(); workspace.update(cx, |workspace, cx| { - prompt_for_open_path_and_open(workspace, app_state, options, window, cx); + prompt_for_open_path_and_open(workspace, app_state, options, true, window, cx); }); })?; anyhow::Ok(()) @@ -651,6 +677,7 @@ pub fn prompt_for_open_path_and_open( workspace: &mut Workspace, app_state: Arc, options: PathPromptOptions, + create_new_window: bool, window: &mut Window, cx: &mut Context, ) { @@ -660,10 +687,24 @@ pub fn prompt_for_open_path_and_open( window, cx, ); + let multi_workspace_handle = window.window_handle().downcast::(); cx.spawn_in(window, async move |this, cx| { let Some(paths) = paths.await.log_err().flatten() else { return; }; + if !create_new_window { + if let Some(handle) = multi_workspace_handle { + if let Some(task) = handle + .update(cx, |multi_workspace, window, cx| { + multi_workspace.open_project(paths, window, cx) + }) + .log_err() + { + task.await.log_err(); + } + return; + } + } if let Some(task) = this .update_in(cx, |this, window, cx| { this.open_workspace_for_paths(false, paths, window, cx) @@ -1189,6 +1230,7 @@ pub enum Event { ZoomChanged, ModalOpened, Activate, + PanelAdded(AnyView), } #[derive(Debug, Clone)] @@ -2088,10 +2130,13 @@ impl Workspace { let dock_position = panel.position(window, cx); let dock = self.dock_at_position(dock_position); + let any_panel = panel.to_any(); dock.update(cx, |dock, cx| { dock.add_panel(panel, self.weak_self.clone(), window, cx) }); + + cx.emit(Event::PanelAdded(any_panel)); } pub fn remove_panel( @@ -7040,7 +7085,17 @@ impl Workspace { } fn resize_left_dock(&mut self, new_size: Pixels, window: &mut Window, cx: &mut App) { - let size = new_size.min(self.bounds.right() - RESIZE_HANDLE_SIZE); + let workspace_width = self.bounds.size.width; + let mut size = new_size.min(workspace_width - RESIZE_HANDLE_SIZE); + + self.right_dock.read_with(cx, |right_dock, cx| { + let right_dock_size = right_dock + .active_panel_size(window, cx) + .unwrap_or(Pixels::ZERO); + if right_dock_size + size > workspace_width { + size = workspace_width - right_dock_size + } + }); self.left_dock.update(cx, |left_dock, cx| { if WorkspaceSettings::get_global(cx) @@ -7055,13 +7110,14 @@ impl Workspace { } fn resize_right_dock(&mut self, new_size: Pixels, window: &mut Window, cx: &mut App) { - let mut size = new_size.max(self.bounds.left() - RESIZE_HANDLE_SIZE); + let workspace_width = self.bounds.size.width; + let mut size = new_size.min(workspace_width - RESIZE_HANDLE_SIZE); self.left_dock.read_with(cx, |left_dock, cx| { let left_dock_size = left_dock .active_panel_size(window, cx) .unwrap_or(Pixels::ZERO); - if left_dock_size + size > self.bounds.right() { - size = self.bounds.right() - left_dock_size + if left_dock_size + size > workspace_width { + size = workspace_width - left_dock_size } }); self.right_dock.update(cx, |right_dock, cx| { @@ -7622,6 +7678,7 @@ impl Render for Workspace { { workspace.previous_dock_drag_coordinates = Some(e.event.position); + match e.drag(cx).0 { DockPosition::Left => { workspace.resize_left_dock( @@ -10651,6 +10708,85 @@ mod tests { item.read_with(cx, |item, _| assert_eq!(item.save_count, 6)); } + #[gpui::test] + async fn test_autosave_on_focus_change_in_multibuffer(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + + // Create a multibuffer-like item with two child focus handles, + // simulating individual buffer editors within a multibuffer. + let item = cx.new(|cx| { + TestItem::new(cx) + .with_project_items(&[TestProjectItem::new(1, "1.txt", cx)]) + .with_child_focus_handles(2, cx) + }); + workspace.update_in(cx, |workspace, window, cx| { + workspace.add_item_to_active_pane(Box::new(item.clone()), None, true, window, cx); + }); + + // Set autosave to OnFocusChange and focus the first child handle, + // simulating the user's cursor being inside one of the multibuffer's excerpts. + item.update_in(cx, |item, window, cx| { + SettingsStore::update_global(cx, |settings, cx| { + settings.update_user_settings(cx, |settings| { + settings.workspace.autosave = Some(AutosaveSetting::OnFocusChange); + }) + }); + item.is_dirty = true; + window.focus(&item.child_focus_handles[0], cx); + }); + cx.executor().run_until_parked(); + item.read_with(cx, |item, _| assert_eq!(item.save_count, 0)); + + // Moving focus from one child to another within the same item should + // NOT trigger autosave — focus is still within the item's focus hierarchy. + item.update_in(cx, |item, window, cx| { + window.focus(&item.child_focus_handles[1], cx); + }); + cx.executor().run_until_parked(); + item.read_with(cx, |item, _| { + assert_eq!( + item.save_count, 0, + "Switching focus between children within the same item should not autosave" + ); + }); + + // Blurring the item saves the file. This is the core regression scenario: + // with `on_blur`, this would NOT trigger because `on_blur` only fires when + // the item's own focus handle is the leaf that lost focus. In a multibuffer, + // the leaf is always a child focus handle, so `on_blur` never detected + // focus leaving the item. + item.update_in(cx, |_, window, _| window.blur()); + cx.executor().run_until_parked(); + item.read_with(cx, |item, _| { + assert_eq!( + item.save_count, 1, + "Blurring should trigger autosave when focus was on a child of the item" + ); + }); + + // Deactivating the window should also trigger autosave when a child of + // the multibuffer item currently owns focus. + item.update_in(cx, |item, window, cx| { + item.is_dirty = true; + window.focus(&item.child_focus_handles[0], cx); + }); + cx.executor().run_until_parked(); + item.read_with(cx, |item, _| assert_eq!(item.save_count, 1)); + + cx.deactivate_window(); + item.read_with(cx, |item, _| { + assert_eq!( + item.save_count, 2, + "Deactivating window should trigger autosave when focus was on a child" + ); + }); + } + #[gpui::test] async fn test_pane_navigation(cx: &mut gpui::TestAppContext) { init_test(cx); diff --git a/crates/worktree/Cargo.toml b/crates/worktree/Cargo.toml index 788333b5e801f2a0bb22558945d2f142b50ef0a5..6d8faad3dc495a02e054f3fa652f5815f301cf3f 100644 --- a/crates/worktree/Cargo.toml +++ b/crates/worktree/Cargo.toml @@ -21,7 +21,7 @@ workspace = true [features] test-support = [ "gpui/test-support", - "http_client/test-support", + "language/test-support", "pretty_assertions", "settings/test-support", @@ -63,9 +63,7 @@ ztracing.workspace = true [dev-dependencies] clock = { workspace = true, features = ["test-support"] } collections = { workspace = true, features = ["test-support"] } -git2.workspace = true gpui = { workspace = true, features = ["test-support"] } -http_client.workspace = true paths = { workspace = true, features = ["test-support"] } rand.workspace = true rpc = { workspace = true, features = ["test-support"] } diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 6ea308db5a32cf82e48439c477c8bb81f02ab777..9c0c892ad7105cc5be9b3dd548659aa1f12a7966 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -48,7 +48,6 @@ visual-tests = [ "language_model/test-support", "fs/test-support", "recent_projects/test-support", - "sidebar/test-support", "title_bar/test-support", ] @@ -244,7 +243,6 @@ pkg-config = "0.3.22" [dev-dependencies] call = { workspace = true, features = ["test-support"] } -dap = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } image_viewer = { workspace = true, features = ["test-support"] } @@ -254,8 +252,6 @@ pretty_assertions.workspace = true project = { workspace = true, features = ["test-support"] } semver.workspace = true terminal_view = { workspace = true, features = ["test-support"] } -tree-sitter-md.workspace = true -tree-sitter-rust.workspace = true title_bar = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } image.workspace = true diff --git a/crates/zed/build.rs b/crates/zed/build.rs index e169760acf16d6caa44aeb2004cd823a355f36ee..9b9ed59bf4de65220f36c1fd53421fdf44c1e529 100644 --- a/crates/zed/build.rs +++ b/crates/zed/build.rs @@ -43,12 +43,28 @@ fn main() { "cargo:rustc-env=TARGET={}", std::env::var("TARGET").unwrap() ); - if let Ok(output) = Command::new("git").args(["rev-parse", "HEAD"]).output() - && output.status.success() - { - let git_sha = String::from_utf8_lossy(&output.stdout); - let git_sha = git_sha.trim(); + let git_sha = match std::env::var("ZED_COMMIT_SHA").ok() { + Some(git_sha) => { + // In deterministic build environments such as Nix, we inject the commit sha into the build script. + Some(git_sha) + } + None => { + if let Some(output) = Command::new("git") + .args(["rev-parse", "HEAD"]) + .output() + .ok() + && output.status.success() + { + let git_sha = String::from_utf8_lossy(&output.stdout); + Some(git_sha.trim().to_string()) + } else { + None + } + } + }; + + if let Some(git_sha) = git_sha { println!("cargo:rustc-env=ZED_COMMIT_SHA={git_sha}"); if let Some(build_identifier) = option_env!("GITHUB_RUN_NUMBER") { diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 0d50339f6c9d42ffa653e5c7565ae6e22441bdca..f98d51061630fefba33f7703eac68670cde67502 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -48,7 +48,7 @@ use std::{ path::{Path, PathBuf}, process, rc::Rc, - sync::{Arc, OnceLock}, + sync::{Arc, LazyLock, OnceLock}, time::Instant, }; use theme::{ActiveTheme, GlobalTheme, ThemeRegistry}; @@ -657,7 +657,7 @@ fn main() { ); copilot_ui::init(&app_state, cx); - language_model::init(app_state.client.clone(), cx); + language_model::init(app_state.user_store.clone(), app_state.client.clone(), cx); language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); acp_tools::init(cx); zed::telemetry_log::init(cx); @@ -914,7 +914,9 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut }) .detach_and_log_err(cx); } - OpenRequestKind::AgentPanel { initial_prompt } => { + OpenRequestKind::AgentPanel { + external_source_prompt, + } => { cx.spawn(async move |cx| { let multi_workspace = workspace::get_any_active_multi_workspace(app_state, cx.clone()).await?; @@ -923,7 +925,11 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut multi_workspace.workspace().update(cx, |workspace, cx| { if let Some(panel) = workspace.focus_panel::(window, cx) { panel.update(cx, |panel, cx| { - panel.new_external_thread_with_text(initial_prompt, window, cx); + panel.new_agent_thread_with_external_source_prompt( + external_source_prompt, + window, + cx, + ); }); } }); @@ -979,21 +985,19 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut }) .await?; - let thread_metadata = acp_thread::AgentSessionInfo { - session_id, - cwd: None, - title: Some(format!("🔗 {}", response.title).into()), - updated_at: Some(chrono::Utc::now()), - meta: None, - }; - let sharer_username = response.sharer_username.clone(); multi_workspace.update(cx, |_, window, cx| { workspace.update(cx, |workspace, cx| { if let Some(panel) = workspace.panel::(cx) { panel.update(cx, |panel, cx| { - panel.open_thread(thread_metadata, window, cx); + panel.open_thread( + session_id, + None, + Some(format!("🔗 {}", response.title).into()), + window, + cx, + ); }); panel.focus_handle(cx).focus(window, cx); } @@ -1573,8 +1577,14 @@ fn init_paths() -> HashMap> { }) } +pub(crate) static FORCE_CLI_MODE: LazyLock = LazyLock::new(|| { + let env_var = std::env::var(FORCE_CLI_MODE_ENV_VAR_NAME).ok().is_some(); + unsafe { std::env::remove_var(FORCE_CLI_MODE_ENV_VAR_NAME) }; + env_var +}); + fn stdout_is_a_pty() -> bool { - std::env::var(FORCE_CLI_MODE_ENV_VAR_NAME).ok().is_none() && io::stdout().is_terminal() + !*FORCE_CLI_MODE && io::stdout().is_terminal() } #[derive(Parser, Debug)] diff --git a/crates/zed/src/visual_test_runner.rs b/crates/zed/src/visual_test_runner.rs index 8f005fa68b6accb5cf5686157bbb065e33bb1b0c..ead16b911e3ccf9ebd1b9f54113cb01dca849e9d 100644 --- a/crates/zed/src/visual_test_runner.rs +++ b/crates/zed/src/visual_test_runner.rs @@ -42,6 +42,55 @@ fn main() { std::process::exit(1); } +#[cfg(target_os = "macos")] +fn main() { + // Set ZED_STATELESS early to prevent file system access to real config directories + // This must be done before any code accesses zed_env_vars::ZED_STATELESS + // SAFETY: We're at the start of main(), before any threads are spawned + unsafe { + std::env::set_var("ZED_STATELESS", "1"); + } + + env_logger::builder() + .filter_level(log::LevelFilter::Info) + .init(); + + let update_baseline = std::env::var("UPDATE_BASELINE").is_ok(); + + // Create a temporary directory for test files + // Canonicalize the path to resolve symlinks (on macOS, /var -> /private/var) + // which prevents "path does not exist" errors during worktree scanning + // Use keep() to prevent auto-cleanup - background worktree tasks may still be running + // when tests complete, so we let the OS clean up temp directories on process exit + let temp_dir = tempfile::tempdir().expect("Failed to create temp directory"); + let temp_path = temp_dir.keep(); + let canonical_temp = temp_path + .canonicalize() + .expect("Failed to canonicalize temp directory"); + let project_path = canonical_temp.join("project"); + std::fs::create_dir_all(&project_path).expect("Failed to create project directory"); + + // Create test files in the real filesystem + create_test_files(&project_path); + + let test_result = std::panic::catch_unwind(|| run_visual_tests(project_path, update_baseline)); + + // Note: We don't delete temp_path here because background worktree tasks may still + // be running. The directory will be cleaned up when the process exits or by the OS. + + match test_result { + Ok(Ok(())) => {} + Ok(Err(e)) => { + eprintln!("Visual tests failed: {}", e); + std::process::exit(1); + } + Err(_) => { + eprintln!("Visual tests panicked"); + std::process::exit(1); + } + } +} + // All macOS-specific imports grouped together #[cfg(target_os = "macos")] use { @@ -50,7 +99,6 @@ use { agent_servers::{AgentServer, AgentServerDelegate}, anyhow::{Context as _, Result}, assets::Assets, - chrono::{Duration as ChronoDuration, Utc}, editor::display_map::DisplayRow, feature_flags::FeatureFlagAppExt as _, git_ui::project_diff::ProjectDiff, @@ -60,7 +108,6 @@ use { }, image::RgbaImage, project_panel::ProjectPanel, - recent_projects::RecentProjectEntry, settings::{NotifyWhenAgentWaiting, Settings as _}, settings_ui::SettingsWindow, std::{ @@ -71,7 +118,7 @@ use { time::Duration, }, util::ResultExt as _, - workspace::{AppState, MultiWorkspace, Panel as _, Workspace, WorkspaceId}, + workspace::{AppState, MultiWorkspace, Panel as _, Workspace}, zed_actions::OpenSettingsAt, }; @@ -97,55 +144,6 @@ mod constants { #[cfg(target_os = "macos")] use constants::*; -#[cfg(target_os = "macos")] -fn main() { - // Set ZED_STATELESS early to prevent file system access to real config directories - // This must be done before any code accesses zed_env_vars::ZED_STATELESS - // SAFETY: We're at the start of main(), before any threads are spawned - unsafe { - std::env::set_var("ZED_STATELESS", "1"); - } - - env_logger::builder() - .filter_level(log::LevelFilter::Info) - .init(); - - let update_baseline = std::env::var("UPDATE_BASELINE").is_ok(); - - // Create a temporary directory for test files - // Canonicalize the path to resolve symlinks (on macOS, /var -> /private/var) - // which prevents "path does not exist" errors during worktree scanning - // Use keep() to prevent auto-cleanup - background worktree tasks may still be running - // when tests complete, so we let the OS clean up temp directories on process exit - let temp_dir = tempfile::tempdir().expect("Failed to create temp directory"); - let temp_path = temp_dir.keep(); - let canonical_temp = temp_path - .canonicalize() - .expect("Failed to canonicalize temp directory"); - let project_path = canonical_temp.join("project"); - std::fs::create_dir_all(&project_path).expect("Failed to create project directory"); - - // Create test files in the real filesystem - create_test_files(&project_path); - - let test_result = std::panic::catch_unwind(|| run_visual_tests(project_path, update_baseline)); - - // Note: We don't delete temp_path here because background worktree tasks may still - // be running. The directory will be cleaned up when the process exits or by the OS. - - match test_result { - Ok(Ok(())) => {} - Ok(Err(e)) => { - eprintln!("Visual tests failed: {}", e); - std::process::exit(1); - } - Err(_) => { - eprintln!("Visual tests panicked"); - std::process::exit(1); - } - } -} - #[cfg(target_os = "macos")] fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()> { // Create the visual test context with deterministic task scheduling @@ -202,7 +200,7 @@ fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()> }); prompt_store::init(cx); let prompt_builder = prompt_store::PromptBuilder::load(app_state.fs.clone(), false, cx); - language_model::init(app_state.client.clone(), cx); + language_model::init(app_state.user_store.clone(), app_state.client.clone(), cx); language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); git_ui::init(cx); project::AgentRegistryStore::init_global( @@ -2528,16 +2526,6 @@ fn run_multi_workspace_sidebar_visual_tests( std::fs::create_dir_all(&workspace1_dir)?; std::fs::create_dir_all(&workspace2_dir)?; - // Create directories for recent projects (they must exist on disk for display) - let recent1_dir = canonical_temp.join("tiny-project"); - let recent2_dir = canonical_temp.join("font-kit"); - let recent3_dir = canonical_temp.join("ideas"); - let recent4_dir = canonical_temp.join("tmp"); - std::fs::create_dir_all(&recent1_dir)?; - std::fs::create_dir_all(&recent2_dir)?; - std::fs::create_dir_all(&recent3_dir)?; - std::fs::create_dir_all(&recent4_dir)?; - // Enable the agent-v2 feature flag so multi-workspace is active cx.update(|cx| { cx.update_flags(true, vec!["agent-v2".to_string()]); @@ -2677,83 +2665,78 @@ fn run_multi_workspace_sidebar_visual_tests( cx.run_until_parked(); - // Inject recent project entries into the sidebar. - // We update the sidebar entity directly (not through the MultiWorkspace window update) - // to avoid a re-entrant read panic: rebuild_entries reads MultiWorkspace, so we can't - // be inside a MultiWorkspace update when that happens. - cx.update(|cx| { - sidebar.update(cx, |sidebar, cx| { - let now = Utc::now(); - let today_timestamp = now; - let yesterday_timestamp = now - ChronoDuration::days(1); - let past_week_timestamp = now - ChronoDuration::days(10); - let all_timestamp = now - ChronoDuration::days(60); - - let recent_projects = vec![ - RecentProjectEntry { - name: "tiny-project".into(), - full_path: recent1_dir.to_string_lossy().to_string().into(), - paths: vec![recent1_dir.clone()], - workspace_id: WorkspaceId::default(), - timestamp: today_timestamp, - }, - RecentProjectEntry { - name: "font-kit".into(), - full_path: recent2_dir.to_string_lossy().to_string().into(), - paths: vec![recent2_dir.clone()], - workspace_id: WorkspaceId::default(), - timestamp: yesterday_timestamp, - }, - RecentProjectEntry { - name: "ideas".into(), - full_path: recent3_dir.to_string_lossy().to_string().into(), - paths: vec![recent3_dir.clone()], - workspace_id: WorkspaceId::default(), - timestamp: past_week_timestamp, - }, - RecentProjectEntry { - name: "tmp".into(), - full_path: recent4_dir.to_string_lossy().to_string().into(), - paths: vec![recent4_dir.clone()], - workspace_id: WorkspaceId::default(), - timestamp: all_timestamp, - }, - ]; - sidebar.set_test_recent_projects(recent_projects, cx); - }); - }); - - // Set thread info directly on the sidebar for visual testing - cx.update(|cx| { - sidebar.update(cx, |sidebar, _cx| { - sidebar.set_test_thread_info( - 0, - "Refine thread view scrolling behavior".into(), - ui::AgentThreadStatus::Completed, - ); - sidebar.set_test_thread_info( - 1, - "Add line numbers option to FileEditBlock".into(), - ui::AgentThreadStatus::Running, - ); - }); - }); + // Save test threads to the ThreadStore for each workspace + let save_tasks = multi_workspace_window + .update(cx, |multi_workspace, _window, cx| { + let thread_store = agent::ThreadStore::global(cx); + let workspaces = multi_workspace.workspaces().to_vec(); + let mut tasks = Vec::new(); + + for (index, workspace) in workspaces.iter().enumerate() { + let workspace_ref = workspace.read(cx); + let mut paths = Vec::new(); + for worktree in workspace_ref.worktrees(cx) { + let worktree_ref = worktree.read(cx); + if worktree_ref.is_visible() { + paths.push(worktree_ref.abs_path().to_path_buf()); + } + } + let path_list = util::path_list::PathList::new(&paths); + + let (session_id, title, updated_at) = match index { + 0 => ( + "visual-test-thread-0", + "Refine thread view scrolling behavior", + chrono::TimeZone::with_ymd_and_hms(&chrono::Utc, 2024, 6, 15, 10, 30, 0) + .unwrap(), + ), + 1 => ( + "visual-test-thread-1", + "Add line numbers option to FileEditBlock", + chrono::TimeZone::with_ymd_and_hms(&chrono::Utc, 2024, 6, 15, 11, 0, 0) + .unwrap(), + ), + _ => continue, + }; + + let task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(session_id)), + agent::DbThread { + title: title.to_string().into(), + messages: Vec::new(), + updated_at, + detailed_summary: None, + initial_project_snapshot: None, + cumulative_token_usage: Default::default(), + request_token_usage: Default::default(), + model: None, + profile: None, + imported: false, + subagent_context: None, + speed: None, + thinking_enabled: false, + thinking_effort: None, + ui_scroll_position: None, + draft_prompt: None, + }, + path_list, + cx, + ) + }); + tasks.push(task); + } + tasks + }) + .context("Failed to create test threads")?; - // Set last-worked-on thread titles on some recent projects for visual testing - cx.update(|cx| { - sidebar.update(cx, |sidebar, cx| { - sidebar.set_test_recent_project_thread_title( - recent1_dir.to_string_lossy().to_string().into(), - "Fix flaky test in CI pipeline".into(), - cx, - ); - sidebar.set_test_recent_project_thread_title( - recent2_dir.to_string_lossy().to_string().into(), - "Upgrade font rendering engine".into(), - cx, - ); - }); - }); + cx.background_executor.allow_parking(); + for task in save_tasks { + cx.foreground_executor + .block_test(task) + .context("Failed to save test thread")?; + } + cx.background_executor.forbid_parking(); cx.run_until_parked(); @@ -2909,12 +2892,12 @@ impl gpui::Render for ThreadItemIconDecorationsTestView { container() .child(ThreadItem::new("ti-none", "Default idle thread").timestamp("1:00 AM")), ) - .child(section_label("Blue dot (generation done)")) + .child(section_label("Blue dot (notified)")) .child( container().child( ThreadItem::new("ti-done", "Generation completed successfully") .timestamp("1:05 AM") - .generation_done(true), + .notified(true), ), ) .child(section_label("Yellow triangle (waiting for confirmation)")) @@ -2939,18 +2922,17 @@ impl gpui::Render for ThreadItemIconDecorationsTestView { ThreadItem::new("ti-running", "Generating response...") .icon(IconName::AiClaude) .timestamp("1:20 AM") - .running(true), + .status(ui::AgentThreadStatus::Running), ), ) .child(section_label( - "Spinner + yellow triangle (running + waiting)", + "Spinner + yellow triangle (waiting for confirmation)", )) .child( container().child( ThreadItem::new("ti-running-waiting", "Running but needs confirmation") .icon(IconName::AiClaude) .timestamp("1:25 AM") - .running(true) .status(ui::AgentThreadStatus::WaitingForConfirmation), ), ) @@ -3099,10 +3081,7 @@ fn run_start_thread_in_selector_visual_tests( // Enable feature flags so the thread target selector renders cx.update(|cx| { - cx.update_flags( - true, - vec!["agent-v2".to_string(), "agent-git-worktrees".to_string()], - ); + cx.update_flags(true, vec!["agent-v2".to_string()]); }); // Create a temp directory with a real git repo so "New Worktree" is enabled diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index aeb740c5ec05f5382e3b93527bb2191cb44f9d51..079a78225c248e341121f1980a368b37f85eea84 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -371,15 +371,12 @@ pub fn initialize_workspace( }) .detach(); - cx.observe_new(|multi_workspace: &mut MultiWorkspace, window, cx| { + cx.observe_new(|_multi_workspace: &mut MultiWorkspace, window, cx| { let Some(window) = window else { return; }; - let multi_workspace_handle = cx.entity(); - let sidebar = cx.new(|cx| Sidebar::new(multi_workspace_handle.clone(), window, cx)); - multi_workspace.register_sidebar(sidebar, window, cx); - let multi_workspace_handle = multi_workspace_handle.downgrade(); + let multi_workspace_handle = cx.entity().downgrade(); window.on_window_should_close(cx, move |window, cx| { multi_workspace_handle .update(cx, |multi_workspace, cx| { @@ -389,6 +386,20 @@ pub fn initialize_workspace( }) .unwrap_or(true) }); + + let window_handle = window.window_handle(); + let multi_workspace_handle = cx.entity(); + cx.defer(move |cx| { + window_handle + .update(cx, |_, window, cx| { + let sidebar = + cx.new(|cx| Sidebar::new(multi_workspace_handle.clone(), window, cx)); + multi_workspace_handle.update(cx, |multi_workspace, cx| { + multi_workspace.register_sidebar(sidebar, window, cx); + }); + }) + .ok(); + }); }) .detach(); @@ -491,7 +502,9 @@ pub fn initialize_workspace( workspace.set_panels_task(panels_task); register_actions(app_state.clone(), workspace, window, cx); - workspace.focus_handle(cx).focus(window, cx); + if !workspace.has_active_modal(window, cx) { + workspace.focus_handle(cx).focus(window, cx); + } }) .detach(); } @@ -785,7 +798,7 @@ fn register_actions( } } }) - .register_action(|workspace, _: &workspace::Open, window, cx| { + .register_action(|workspace, action: &workspace::Open, window, cx| { telemetry::event!("Project Opened"); workspace::prompt_for_open_path_and_open( workspace, @@ -796,6 +809,7 @@ fn register_actions( multiple: true, prompt: None, }, + action.create_new_window, window, cx, ); @@ -811,6 +825,7 @@ fn register_actions( multiple: true, prompt: None, }, + true, window, cx, ); @@ -1993,13 +2008,29 @@ fn open_local_file( } fn open_bundled_file( - workspace: &Workspace, + workspace: &mut Workspace, text: Cow<'static, str>, title: &'static str, language: &'static str, window: &mut Window, cx: &mut Context, ) { + let existing = workspace.items_of_type::(cx).find(|editor| { + editor.read_with(cx, |editor, cx| { + editor.read_only(cx) + && editor.title(cx).as_ref() == title + && editor + .buffer() + .read(cx) + .as_singleton() + .is_some_and(|buffer| buffer.read(cx).file().is_none()) + }) + }); + if let Some(existing) = existing { + workspace.activate_item(&existing, true, true, window, cx); + return; + } + let language = workspace.app_state().languages.language_for_name(language); cx.spawn_in(window, async move |workspace, cx| { let language = language.await.log_err(); @@ -4783,6 +4814,7 @@ mod tests { "action", "activity_indicator", "agent", + "agents_sidebar", "app_menu", "assistant", "assistant2", @@ -4949,6 +4981,54 @@ mod tests { ); } + #[gpui::test] + async fn test_bundled_files_reuse_existing_editor(cx: &mut TestAppContext) { + let app_state = init_test(cx); + cx.update(init); + + let project = Project::test(app_state.fs.clone(), [], cx).await; + let _window = cx.add_window(|window, cx| MultiWorkspace::test_new(project, window, cx)); + + cx.update(|cx| { + cx.dispatch_action(&OpenDefaultSettings); + }); + cx.run_until_parked(); + + let multi_workspace = cx.windows()[0].downcast::().unwrap(); + let first_item_id = multi_workspace + .update(cx, |multi_workspace, _, cx| { + multi_workspace.workspace().update(cx, |workspace, cx| { + workspace + .active_item(cx) + .expect("default settings should be open") + .item_id() + }) + }) + .unwrap(); + + cx.update(|cx| { + cx.dispatch_action(&OpenDefaultSettings); + }); + cx.run_until_parked(); + + let (second_item_id, item_count) = multi_workspace + .update(cx, |multi_workspace, _, cx| { + multi_workspace.workspace().update(cx, |workspace, cx| { + let pane = workspace.active_pane().read(cx); + ( + pane.active_item() + .expect("default settings should still be open") + .item_id(), + pane.items_len(), + ) + }) + }) + .unwrap(); + + assert_eq!(first_item_id, second_item_id); + assert_eq!(item_count, 1); + } + #[gpui::test] async fn test_bundled_languages(cx: &mut TestAppContext) { let fs = fs::FakeFs::new(cx.background_executor.clone()); @@ -5008,7 +5088,7 @@ mod tests { cx, ); image_viewer::init(cx); - language_model::init(app_state.client.clone(), cx); + language_model::init(app_state.user_store.clone(), app_state.client.clone(), cx); language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); web_search::init(cx); git_graph::init(cx); diff --git a/crates/zed/src/zed/app_menus.rs b/crates/zed/src/zed/app_menus.rs index debcb605f222dc7c983b9d061803720df5ff727c..f73d703557f8f73ad380c0b7a2cb995b29f92cf1 100644 --- a/crates/zed/src/zed/app_menus.rs +++ b/crates/zed/src/zed/app_menus.rs @@ -125,7 +125,7 @@ pub fn app_menus(cx: &mut App) -> Vec { } else { "Open…" }, - workspace::Open, + workspace::Open::default(), ), MenuItem::action( "Open Recent...", diff --git a/crates/zed/src/zed/edit_prediction_registry.rs b/crates/zed/src/zed/edit_prediction_registry.rs index 9f05c5795e6f16cab231df8a5586106ed25b03ee..952c840d4abe0cb99be170e27f66a2ba188c08ca 100644 --- a/crates/zed/src/zed/edit_prediction_registry.rs +++ b/crates/zed/src/zed/edit_prediction_registry.rs @@ -316,7 +316,7 @@ mod tests { let app_state = cx.update(|cx| { let app_state = AppState::test(cx); client::init(&app_state.client, cx); - language_model::init(app_state.client.clone(), cx); + language_model::init(app_state.user_store.clone(), app_state.client.clone(), cx); editor::init(cx); app_state }); diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index a7d1da663b3da6848d3552707f261fe02beba56b..e8f8554482680c4a51fc182c58369de19184bcb0 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -1,5 +1,6 @@ use crate::handle_open_request; use crate::restore_or_create_workspace; +use agent_ui::ExternalSourcePrompt; use anyhow::{Context as _, Result, anyhow}; use cli::{CliRequest, CliResponse, ipc::IpcSender}; use cli::{IpcHandshake, ipc}; @@ -48,7 +49,7 @@ pub enum OpenRequestKind { extension_id: String, }, AgentPanel { - initial_prompt: Option, + external_source_prompt: Option, }, SharedAgentThread { session_id: String, @@ -110,8 +111,6 @@ impl OpenRequest { this.kind = Some(OpenRequestKind::Extension { extension_id: extension_id.to_string(), }); - } else if let Some(agent_path) = url.strip_prefix("zed://agent") { - this.parse_agent_url(agent_path) } else if let Some(session_id_str) = url.strip_prefix("zed://agent/shared/") { if uuid::Uuid::parse_str(session_id_str).is_ok() { this.kind = Some(OpenRequestKind::SharedAgentThread { @@ -120,6 +119,8 @@ impl OpenRequest { } else { log::error!("Invalid session ID in URL: {}", session_id_str); } + } else if let Some(agent_path) = url.strip_prefix("zed://agent") { + this.parse_agent_url(agent_path) } else if let Some(schema_path) = url.strip_prefix("zed://schemas/") { this.kind = Some(OpenRequestKind::BuiltinJsonSchema { schema_path: schema_path.to_string(), @@ -164,13 +165,14 @@ impl OpenRequest { fn parse_agent_url(&mut self, agent_path: &str) { // Format: "" or "?prompt=" - let initial_prompt = agent_path.strip_prefix('?').and_then(|query| { + let external_source_prompt = agent_path.strip_prefix('?').and_then(|query| { url::form_urlencoded::parse(query.as_bytes()) .find_map(|(key, value)| (key == "prompt").then_some(value)) - .filter(|s| !s.is_empty()) - .map(|s| s.into_owned()) + .and_then(|prompt| ExternalSourcePrompt::new(prompt.as_ref())) + }); + self.kind = Some(OpenRequestKind::AgentPanel { + external_source_prompt, }); - self.kind = Some(OpenRequestKind::AgentPanel { initial_prompt }); } fn parse_git_clone_url(&mut self, clone_path: &str) -> Result<()> { @@ -772,6 +774,137 @@ mod tests { assert_eq!(request.open_paths, vec!["/"]); } + #[gpui::test] + fn test_parse_agent_url(cx: &mut TestAppContext) { + let _app_state = init_test(cx); + + let request = cx.update(|cx| { + OpenRequest::parse( + RawOpenRequest { + urls: vec!["zed://agent".into()], + ..Default::default() + }, + cx, + ) + .unwrap() + }); + + match request.kind { + Some(OpenRequestKind::AgentPanel { + external_source_prompt, + }) => { + assert_eq!(external_source_prompt, None); + } + _ => panic!("Expected AgentPanel kind"), + } + } + + fn agent_url_with_prompt(prompt: &str) -> String { + let mut serializer = url::form_urlencoded::Serializer::new("zed://agent?".to_string()); + serializer.append_pair("prompt", prompt); + serializer.finish() + } + + #[gpui::test] + fn test_parse_agent_url_with_prompt(cx: &mut TestAppContext) { + let _app_state = init_test(cx); + let prompt = "Write me a script\nThanks"; + + let request = cx.update(|cx| { + OpenRequest::parse( + RawOpenRequest { + urls: vec![agent_url_with_prompt(prompt)], + ..Default::default() + }, + cx, + ) + .unwrap() + }); + + match request.kind { + Some(OpenRequestKind::AgentPanel { + external_source_prompt, + }) => { + assert_eq!( + external_source_prompt + .as_ref() + .map(ExternalSourcePrompt::as_str), + Some("Write me a script\nThanks") + ); + } + _ => panic!("Expected AgentPanel kind"), + } + } + + #[gpui::test] + fn test_parse_agent_url_with_empty_prompt(cx: &mut TestAppContext) { + let _app_state = init_test(cx); + + let request = cx.update(|cx| { + OpenRequest::parse( + RawOpenRequest { + urls: vec![agent_url_with_prompt("")], + ..Default::default() + }, + cx, + ) + .unwrap() + }); + + match request.kind { + Some(OpenRequestKind::AgentPanel { + external_source_prompt, + }) => { + assert_eq!(external_source_prompt, None); + } + _ => panic!("Expected AgentPanel kind"), + } + } + + #[gpui::test] + fn test_parse_shared_agent_thread_url(cx: &mut TestAppContext) { + let _app_state = init_test(cx); + let session_id = "123e4567-e89b-12d3-a456-426614174000"; + + let request = cx.update(|cx| { + OpenRequest::parse( + RawOpenRequest { + urls: vec![format!("zed://agent/shared/{session_id}")], + ..Default::default() + }, + cx, + ) + .unwrap() + }); + + match request.kind { + Some(OpenRequestKind::SharedAgentThread { + session_id: parsed_session_id, + }) => { + assert_eq!(parsed_session_id, session_id); + } + _ => panic!("Expected SharedAgentThread kind"), + } + } + + #[gpui::test] + fn test_parse_shared_agent_thread_url_with_invalid_uuid(cx: &mut TestAppContext) { + let _app_state = init_test(cx); + + let request = cx.update(|cx| { + OpenRequest::parse( + RawOpenRequest { + urls: vec!["zed://agent/shared/not-a-uuid".into()], + ..Default::default() + }, + cx, + ) + .unwrap() + }); + + assert!(request.kind.is_none()); + } + #[gpui::test] fn test_parse_git_commit_url(cx: &mut TestAppContext) { let _app_state = init_test(cx); diff --git a/crates/zeta_prompt/src/excerpt_ranges.rs b/crates/zeta_prompt/src/excerpt_ranges.rs new file mode 100644 index 0000000000000000000000000000000000000000..40621fe98a13bfa9195293ad29ba549240532a2e --- /dev/null +++ b/crates/zeta_prompt/src/excerpt_ranges.rs @@ -0,0 +1,443 @@ +use std::ops::Range; + +use serde::{Deserialize, Serialize}; + +use crate::estimate_tokens; + +/// Pre-computed byte offset ranges within `cursor_excerpt` for different +/// editable and context token budgets. Allows the server to select the +/// appropriate ranges for whichever model it uses. +#[derive(Clone, Debug, Default, PartialEq, Hash, Serialize, Deserialize)] +pub struct ExcerptRanges { + /// Editable region computed with a 150-token budget. + pub editable_150: Range, + /// Editable region computed with a 180-token budget. + pub editable_180: Range, + /// Editable region computed with a 350-token budget. + pub editable_350: Range, + /// Editable region computed with a 350-token budget. + pub editable_512: Option>, + /// Context boundary when using editable_150 with 350 tokens of additional context. + pub editable_150_context_350: Range, + /// Context boundary when using editable_180 with 350 tokens of additional context. + pub editable_180_context_350: Range, + /// Context boundary when using editable_350 with 150 tokens of additional context. + pub editable_350_context_150: Range, + pub editable_350_context_512: Option>, + pub editable_350_context_1024: Option>, + pub context_4096: Option>, + pub context_8192: Option>, +} + +/// Builds an `ExcerptRanges` by computing editable and context ranges for each +/// budget combination, using the syntax-aware logic in +/// `compute_editable_and_context_ranges`. +pub fn compute_legacy_excerpt_ranges( + cursor_excerpt: &str, + cursor_offset: usize, + syntax_ranges: &[Range], +) -> ExcerptRanges { + let compute = |editable_tokens, context_tokens| { + compute_editable_and_context_ranges( + cursor_excerpt, + cursor_offset, + syntax_ranges, + editable_tokens, + context_tokens, + ) + }; + + let (editable_150, editable_150_context_350) = compute(150, 350); + let (editable_180, editable_180_context_350) = compute(180, 350); + let (editable_350, editable_350_context_150) = compute(350, 150); + let (editable_512, _) = compute(512, 0); + let (_, editable_350_context_512) = compute(350, 512); + let (_, editable_350_context_1024) = compute(350, 1024); + let (_, context_4096) = compute(350, 4096); + let (_, context_8192) = compute(350, 8192); + + ExcerptRanges { + editable_150, + editable_180, + editable_350, + editable_512: Some(editable_512), + editable_150_context_350, + editable_180_context_350, + editable_350_context_150, + editable_350_context_512: Some(editable_350_context_512), + editable_350_context_1024: Some(editable_350_context_1024), + context_4096: Some(context_4096), + context_8192: Some(context_8192), + } +} + +/// Given the cursor excerpt text, cursor offset, and the syntax node ranges +/// containing the cursor (innermost to outermost), compute the editable range +/// and context range as byte offset ranges within `cursor_excerpt`. +/// +/// This is the server-side equivalent of `compute_excerpt_ranges` in +/// `edit_prediction::cursor_excerpt`, but operates on plain text with +/// pre-computed syntax boundaries instead of a `BufferSnapshot`. +pub fn compute_editable_and_context_ranges( + cursor_excerpt: &str, + cursor_offset: usize, + syntax_ranges: &[Range], + editable_token_limit: usize, + context_token_limit: usize, +) -> (Range, Range) { + let line_starts = compute_line_starts(cursor_excerpt); + let cursor_row = offset_to_row(&line_starts, cursor_offset); + let max_row = line_starts.len().saturating_sub(1) as u32; + + let editable_range = compute_editable_range_from_text( + cursor_excerpt, + &line_starts, + cursor_row, + max_row, + syntax_ranges, + editable_token_limit, + ); + + let context_range = expand_context_from_text( + cursor_excerpt, + &line_starts, + max_row, + &editable_range, + syntax_ranges, + context_token_limit, + ); + + (editable_range, context_range) +} + +fn compute_line_starts(text: &str) -> Vec { + let mut starts = vec![0]; + for (index, byte) in text.bytes().enumerate() { + if byte == b'\n' { + starts.push(index + 1); + } + } + starts +} + +fn offset_to_row(line_starts: &[usize], offset: usize) -> u32 { + match line_starts.binary_search(&offset) { + Ok(row) => row as u32, + Err(row) => (row.saturating_sub(1)) as u32, + } +} + +fn row_start_offset(line_starts: &[usize], row: u32) -> usize { + line_starts.get(row as usize).copied().unwrap_or(0) +} + +fn row_end_offset(text: &str, line_starts: &[usize], row: u32) -> usize { + if let Some(&next_start) = line_starts.get(row as usize + 1) { + // End before the newline of this row. + next_start.saturating_sub(1).min(text.len()) + } else { + text.len() + } +} + +fn row_range_to_byte_range( + text: &str, + line_starts: &[usize], + start_row: u32, + end_row: u32, +) -> Range { + let start = row_start_offset(line_starts, start_row); + let end = row_end_offset(text, line_starts, end_row); + start..end +} + +fn estimate_tokens_for_row_range( + text: &str, + line_starts: &[usize], + start_row: u32, + end_row: u32, +) -> usize { + let mut tokens = 0; + for row in start_row..end_row { + let row_len = row_end_offset(text, line_starts, row) + .saturating_sub(row_start_offset(line_starts, row)); + tokens += estimate_tokens(row_len).max(1); + } + tokens +} + +fn line_token_count_from_text(text: &str, line_starts: &[usize], row: u32) -> usize { + let row_len = + row_end_offset(text, line_starts, row).saturating_sub(row_start_offset(line_starts, row)); + estimate_tokens(row_len).max(1) +} + +/// Returns syntax boundaries (as row ranges) that contain the given row range +/// and extend beyond it, ordered from smallest to largest. +fn containing_syntax_boundaries_from_ranges( + line_starts: &[usize], + syntax_ranges: &[Range], + start_row: u32, + end_row: u32, +) -> Vec<(u32, u32)> { + let mut boundaries = Vec::new(); + let mut last: Option<(u32, u32)> = None; + + // syntax_ranges is innermost to outermost, so iterate in order. + for range in syntax_ranges { + let node_start_row = offset_to_row(line_starts, range.start); + let node_end_row = offset_to_row(line_starts, range.end); + + // Skip nodes that don't extend beyond the current range. + if node_start_row >= start_row && node_end_row <= end_row { + continue; + } + + let rows = (node_start_row, node_end_row); + if last == Some(rows) { + continue; + } + + last = Some(rows); + boundaries.push(rows); + } + + boundaries +} + +fn compute_editable_range_from_text( + text: &str, + line_starts: &[usize], + cursor_row: u32, + max_row: u32, + syntax_ranges: &[Range], + token_limit: usize, +) -> Range { + // Phase 1: Expand symmetrically from cursor using 75% of budget. + let initial_budget = (token_limit * 3) / 4; + let (mut start_row, mut end_row, mut remaining_tokens) = + expand_symmetric(text, line_starts, cursor_row, max_row, initial_budget); + + remaining_tokens += token_limit.saturating_sub(initial_budget); + + let original_start = start_row; + let original_end = end_row; + + // Phase 2: Expand to syntax boundaries that fit within budget. + let boundaries = + containing_syntax_boundaries_from_ranges(line_starts, syntax_ranges, start_row, end_row); + for (boundary_start, boundary_end) in &boundaries { + let tokens_for_start = if *boundary_start < start_row { + estimate_tokens_for_row_range(text, line_starts, *boundary_start, start_row) + } else { + 0 + }; + let tokens_for_end = if *boundary_end > end_row { + estimate_tokens_for_row_range(text, line_starts, end_row + 1, *boundary_end + 1) + } else { + 0 + }; + + let total_needed = tokens_for_start + tokens_for_end; + if total_needed <= remaining_tokens { + if *boundary_start < start_row { + start_row = *boundary_start; + } + if *boundary_end > end_row { + end_row = *boundary_end; + } + remaining_tokens = remaining_tokens.saturating_sub(total_needed); + } else { + break; + } + } + + // Phase 3: Continue line-wise in the direction we expanded least. + let expanded_up = original_start.saturating_sub(start_row); + let expanded_down = end_row.saturating_sub(original_end); + let prefer_up = expanded_up <= expanded_down; + + (start_row, end_row, _) = expand_linewise( + text, + line_starts, + start_row, + end_row, + max_row, + remaining_tokens, + prefer_up, + ); + + row_range_to_byte_range(text, line_starts, start_row, end_row) +} + +fn expand_context_from_text( + text: &str, + line_starts: &[usize], + max_row: u32, + editable_range: &Range, + syntax_ranges: &[Range], + context_token_limit: usize, +) -> Range { + let mut start_row = offset_to_row(line_starts, editable_range.start); + let mut end_row = offset_to_row(line_starts, editable_range.end); + let mut remaining_tokens = context_token_limit; + let mut did_syntax_expand = false; + + let boundaries = + containing_syntax_boundaries_from_ranges(line_starts, syntax_ranges, start_row, end_row); + for (boundary_start, boundary_end) in &boundaries { + let tokens_for_start = if *boundary_start < start_row { + estimate_tokens_for_row_range(text, line_starts, *boundary_start, start_row) + } else { + 0 + }; + let tokens_for_end = if *boundary_end > end_row { + estimate_tokens_for_row_range(text, line_starts, end_row + 1, *boundary_end + 1) + } else { + 0 + }; + + let total_needed = tokens_for_start + tokens_for_end; + if total_needed <= remaining_tokens { + if *boundary_start < start_row { + start_row = *boundary_start; + } + if *boundary_end > end_row { + end_row = *boundary_end; + } + remaining_tokens = remaining_tokens.saturating_sub(total_needed); + did_syntax_expand = true; + } else { + break; + } + } + + // Only expand line-wise if no syntax expansion occurred. + if !did_syntax_expand { + (start_row, end_row, _) = expand_linewise( + text, + line_starts, + start_row, + end_row, + max_row, + remaining_tokens, + true, + ); + } + + row_range_to_byte_range(text, line_starts, start_row, end_row) +} + +fn expand_symmetric( + text: &str, + line_starts: &[usize], + cursor_row: u32, + max_row: u32, + mut token_budget: usize, +) -> (u32, u32, usize) { + let mut start_row = cursor_row; + let mut end_row = cursor_row; + + let cursor_line_tokens = line_token_count_from_text(text, line_starts, cursor_row); + token_budget = token_budget.saturating_sub(cursor_line_tokens); + + loop { + let can_expand_up = start_row > 0; + let can_expand_down = end_row < max_row; + + if token_budget == 0 || (!can_expand_up && !can_expand_down) { + break; + } + + if can_expand_down { + let next_row = end_row + 1; + let line_tokens = line_token_count_from_text(text, line_starts, next_row); + if line_tokens <= token_budget { + end_row = next_row; + token_budget = token_budget.saturating_sub(line_tokens); + } else { + break; + } + } + + if can_expand_up && token_budget > 0 { + let next_row = start_row - 1; + let line_tokens = line_token_count_from_text(text, line_starts, next_row); + if line_tokens <= token_budget { + start_row = next_row; + token_budget = token_budget.saturating_sub(line_tokens); + } else { + break; + } + } + } + + (start_row, end_row, token_budget) +} + +fn expand_linewise( + text: &str, + line_starts: &[usize], + mut start_row: u32, + mut end_row: u32, + max_row: u32, + mut remaining_tokens: usize, + prefer_up: bool, +) -> (u32, u32, usize) { + loop { + let can_expand_up = start_row > 0; + let can_expand_down = end_row < max_row; + + if remaining_tokens == 0 || (!can_expand_up && !can_expand_down) { + break; + } + + let mut expanded = false; + + if prefer_up { + if can_expand_up { + let next_row = start_row - 1; + let line_tokens = line_token_count_from_text(text, line_starts, next_row); + if line_tokens <= remaining_tokens { + start_row = next_row; + remaining_tokens = remaining_tokens.saturating_sub(line_tokens); + expanded = true; + } + } + if can_expand_down && remaining_tokens > 0 { + let next_row = end_row + 1; + let line_tokens = line_token_count_from_text(text, line_starts, next_row); + if line_tokens <= remaining_tokens { + end_row = next_row; + remaining_tokens = remaining_tokens.saturating_sub(line_tokens); + expanded = true; + } + } + } else { + if can_expand_down { + let next_row = end_row + 1; + let line_tokens = line_token_count_from_text(text, line_starts, next_row); + if line_tokens <= remaining_tokens { + end_row = next_row; + remaining_tokens = remaining_tokens.saturating_sub(line_tokens); + expanded = true; + } + } + if can_expand_up && remaining_tokens > 0 { + let next_row = start_row - 1; + let line_tokens = line_token_count_from_text(text, line_starts, next_row); + if line_tokens <= remaining_tokens { + start_row = next_row; + remaining_tokens = remaining_tokens.saturating_sub(line_tokens); + expanded = true; + } + } + } + + if !expanded { + break; + } + } + + (start_row, end_row, remaining_tokens) +} diff --git a/crates/zeta_prompt/src/zeta_prompt.rs b/crates/zeta_prompt/src/zeta_prompt.rs index d1cb24a8c83710e06d04e0c006a1963882982f59..1dd675e8b39ccab8403682beb040a075381aaf1d 100644 --- a/crates/zeta_prompt/src/zeta_prompt.rs +++ b/crates/zeta_prompt/src/zeta_prompt.rs @@ -1,4 +1,6 @@ -use anyhow::Result; +pub mod excerpt_ranges; + +use anyhow::{Result, anyhow}; use serde::{Deserialize, Serialize}; use std::fmt::Write; use std::ops::Range; @@ -6,6 +8,10 @@ use std::path::Path; use std::sync::Arc; use strum::{EnumIter, IntoEnumIterator as _, IntoStaticStr}; +pub use crate::excerpt_ranges::{ + ExcerptRanges, compute_editable_and_context_ranges, compute_legacy_excerpt_ranges, +}; + pub const CURSOR_MARKER: &str = "<|user_cursor|>"; pub const MAX_PROMPT_TOKENS: usize = 4096; @@ -18,31 +24,6 @@ fn estimate_tokens(bytes: usize) -> usize { bytes / 3 } -/// Pre-computed byte offset ranges within `cursor_excerpt` for different -/// editable and context token budgets. Allows the server to select the -/// appropriate ranges for whichever model it uses. -#[derive(Clone, Debug, Default, PartialEq, Hash, Serialize, Deserialize)] -pub struct ExcerptRanges { - /// Editable region computed with a 150-token budget. - pub editable_150: Range, - /// Editable region computed with a 180-token budget. - pub editable_180: Range, - /// Editable region computed with a 350-token budget. - pub editable_350: Range, - /// Editable region computed with a 350-token budget. - pub editable_512: Option>, - /// Context boundary when using editable_150 with 350 tokens of additional context. - pub editable_150_context_350: Range, - /// Context boundary when using editable_180 with 350 tokens of additional context. - pub editable_180_context_350: Range, - /// Context boundary when using editable_350 with 150 tokens of additional context. - pub editable_350_context_150: Range, - pub editable_350_context_512: Option>, - pub editable_350_context_1024: Option>, - pub context_4096: Option>, - pub context_8192: Option>, -} - #[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] pub struct ZetaPromptInput { pub cursor_path: Arc, @@ -51,9 +32,18 @@ pub struct ZetaPromptInput { #[serde(default, skip_serializing_if = "Option::is_none")] pub excerpt_start_row: Option, pub events: Vec>, - pub related_files: Vec, + #[serde(default)] + pub related_files: Option>, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub active_buffer_diagnostics: Vec, /// These ranges let the server select model-appropriate subsets. pub excerpt_ranges: ExcerptRanges, + /// Byte offset ranges within `cursor_excerpt` for all syntax nodes that + /// contain `cursor_offset_in_excerpt`, ordered from innermost to outermost. + /// When present, the server uses these to compute editable/context ranges + /// instead of `excerpt_ranges`. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub syntax_ranges: Option>>, /// The name of the edit prediction model experiment to use. #[serde(default, skip_serializing_if = "Option::is_none")] pub experiment: Option, @@ -89,6 +79,8 @@ pub enum ZetaFormat { V0211Prefill, V0211SeedCoder, v0226Hashline, + V0304VariableEdit, + V0304SeedNoEdits, } impl std::fmt::Display for ZetaFormat { @@ -178,6 +170,15 @@ pub fn write_event(prompt: &mut String, event: &Event) { } } +#[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] +pub struct ActiveBufferDiagnostic { + pub severity: Option, + pub message: String, + pub snippet: String, + pub snippet_buffer_row_range: Range, + pub diagnostic_range_in_snippet: Range, +} + #[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] pub struct RelatedFile { pub path: Arc, @@ -215,6 +216,38 @@ pub fn special_tokens_for_format(format: ZetaFormat) -> &'static [&'static str] ZetaFormat::V0211Prefill => v0211_prefill::special_tokens(), ZetaFormat::V0211SeedCoder => seed_coder::special_tokens(), ZetaFormat::v0226Hashline => hashline::special_tokens(), + ZetaFormat::V0304VariableEdit => v0304_variable_edit::special_tokens(), + ZetaFormat::V0304SeedNoEdits => seed_coder::special_tokens(), + } +} + +/// Returns the (editable_token_limit, context_token_limit) for a given format. +pub fn token_limits_for_format(format: ZetaFormat) -> (usize, usize) { + match format { + ZetaFormat::V0112MiddleAtEnd | ZetaFormat::V0113Ordered => (150, 350), + ZetaFormat::V0114180EditableRegion => (180, 350), + ZetaFormat::V0120GitMergeMarkers + | ZetaFormat::V0131GitMergeMarkersPrefix + | ZetaFormat::V0211Prefill + | ZetaFormat::V0211SeedCoder + | ZetaFormat::v0226Hashline + | ZetaFormat::V0304SeedNoEdits => (350, 150), + ZetaFormat::V0304VariableEdit => (1024, 0), + } +} + +pub fn stop_tokens_for_format(format: ZetaFormat) -> &'static [&'static str] { + match format { + ZetaFormat::v0226Hashline => &[hashline::NO_EDITS_COMMAND_MARKER], + ZetaFormat::V0112MiddleAtEnd + | ZetaFormat::V0113Ordered + | ZetaFormat::V0114180EditableRegion + | ZetaFormat::V0120GitMergeMarkers + | ZetaFormat::V0131GitMergeMarkersPrefix + | ZetaFormat::V0211Prefill + | ZetaFormat::V0211SeedCoder + | ZetaFormat::V0304VariableEdit + | ZetaFormat::V0304SeedNoEdits => &[], } } @@ -235,10 +268,19 @@ pub fn excerpt_ranges_for_format( | ZetaFormat::V0131GitMergeMarkersPrefix | ZetaFormat::V0211Prefill | ZetaFormat::V0211SeedCoder - | ZetaFormat::v0226Hashline => ( + | ZetaFormat::v0226Hashline + | ZetaFormat::V0304SeedNoEdits => ( ranges.editable_350.clone(), ranges.editable_350_context_150.clone(), ), + ZetaFormat::V0304VariableEdit => { + let context = ranges + .editable_350_context_1024 + .clone() + .or(ranges.editable_350_context_512.clone()) + .unwrap_or_else(|| ranges.editable_350_context_150.clone()); + (context.clone(), context) + } } } @@ -283,13 +325,15 @@ pub fn write_cursor_excerpt_section_for_format( cursor_offset, ) } - ZetaFormat::V0211SeedCoder => seed_coder::write_cursor_excerpt_section( - prompt, - path, - context, - editable_range, - cursor_offset, - ), + ZetaFormat::V0211SeedCoder | ZetaFormat::V0304SeedNoEdits => { + seed_coder::write_cursor_excerpt_section( + prompt, + path, + context, + editable_range, + cursor_offset, + ) + } ZetaFormat::v0226Hashline => hashline::write_cursor_excerpt_section( prompt, path, @@ -297,7 +341,19 @@ pub fn write_cursor_excerpt_section_for_format( editable_range, cursor_offset, ), + ZetaFormat::V0304VariableEdit => { + v0304_variable_edit::write_cursor_excerpt_section(prompt, path, context, cursor_offset) + } + } +} + +fn offset_range_to_row_range(text: &str, range: Range) -> Range { + let start_row = text[0..range.start].matches('\n').count() as u32; + let mut end_row = start_row + text[range.clone()].matches('\n').count() as u32; + if !text[..range.end].ends_with('\n') { + end_row += 1; } + return start_row..end_row; } pub fn format_prompt_with_budget_for_format( @@ -305,19 +361,37 @@ pub fn format_prompt_with_budget_for_format( format: ZetaFormat, max_tokens: usize, ) -> String { - let (context, editable_range, cursor_offset) = resolve_cursor_region(input, format); + let (context, editable_range, context_range, cursor_offset) = + resolve_cursor_region(input, format); let path = &*input.cursor_path; + let empty_files = Vec::new(); + let input_related_files = input.related_files.as_deref().unwrap_or(&empty_files); + let related_files = if let Some(cursor_excerpt_start_row) = input.excerpt_start_row { + let relative_row_range = offset_range_to_row_range(&input.cursor_excerpt, context_range); + let row_range = relative_row_range.start + cursor_excerpt_start_row + ..relative_row_range.end + cursor_excerpt_start_row; + &filter_redundant_excerpts( + input_related_files.to_vec(), + input.cursor_path.as_ref(), + row_range, + ) + } else { + input_related_files + }; + match format { - ZetaFormat::V0211SeedCoder => seed_coder::format_prompt_with_budget( - path, - context, - &editable_range, - cursor_offset, - &input.events, - &input.related_files, - max_tokens, - ), + ZetaFormat::V0211SeedCoder | ZetaFormat::V0304SeedNoEdits => { + seed_coder::format_prompt_with_budget( + path, + context, + &editable_range, + cursor_offset, + &input.events, + related_files, + max_tokens, + ) + } _ => { let mut cursor_section = String::new(); write_cursor_excerpt_section_for_format( @@ -342,7 +416,7 @@ pub fn format_prompt_with_budget_for_format( let budget_after_edit_history = budget_after_cursor.saturating_sub(edit_history_tokens); let related_files_section = format_related_files_within_budget( - &input.related_files, + &related_files, "<|file_sep|>", "", budget_after_edit_history, @@ -357,6 +431,23 @@ pub fn format_prompt_with_budget_for_format( } } +pub fn filter_redundant_excerpts( + mut related_files: Vec, + cursor_path: &Path, + cursor_row_range: Range, +) -> Vec { + for file in &mut related_files { + if file.path.as_ref() == cursor_path { + file.excerpts.retain(|excerpt| { + excerpt.row_range.start < cursor_row_range.start + || excerpt.row_range.end > cursor_row_range.end + }); + } + } + related_files.retain(|file| !file.excerpts.is_empty()); + related_files +} + pub fn get_prefill_for_format( format: ZetaFormat, context: &str, @@ -370,7 +461,9 @@ pub fn get_prefill_for_format( | ZetaFormat::V0120GitMergeMarkers | ZetaFormat::V0131GitMergeMarkersPrefix | ZetaFormat::V0211SeedCoder - | ZetaFormat::v0226Hashline => String::new(), + | ZetaFormat::v0226Hashline + | ZetaFormat::V0304VariableEdit => String::new(), + ZetaFormat::V0304SeedNoEdits => String::new(), } } @@ -379,34 +472,12 @@ pub fn output_end_marker_for_format(format: ZetaFormat) -> Option<&'static str> ZetaFormat::V0120GitMergeMarkers => Some(v0120_git_merge_markers::END_MARKER), ZetaFormat::V0131GitMergeMarkersPrefix => Some(v0131_git_merge_markers_prefix::END_MARKER), ZetaFormat::V0211Prefill => Some(v0131_git_merge_markers_prefix::END_MARKER), - ZetaFormat::V0211SeedCoder => Some(seed_coder::END_MARKER), + ZetaFormat::V0211SeedCoder | ZetaFormat::V0304SeedNoEdits => Some(seed_coder::END_MARKER), ZetaFormat::V0112MiddleAtEnd | ZetaFormat::V0113Ordered | ZetaFormat::V0114180EditableRegion - | ZetaFormat::v0226Hashline => None, - } -} - -pub fn current_region_markers_for_format(format: ZetaFormat) -> (&'static str, &'static str) { - match format { - ZetaFormat::V0112MiddleAtEnd => ("<|fim_middle|>current\n", "<|fim_middle|>updated"), - ZetaFormat::V0113Ordered - | ZetaFormat::V0114180EditableRegion - | ZetaFormat::v0226Hashline => ("<|fim_middle|>current\n", "<|fim_suffix|>"), - ZetaFormat::V0120GitMergeMarkers - | ZetaFormat::V0131GitMergeMarkersPrefix - | ZetaFormat::V0211Prefill => ( - v0120_git_merge_markers::START_MARKER, - v0120_git_merge_markers::SEPARATOR, - ), - ZetaFormat::V0211SeedCoder => (seed_coder::START_MARKER, seed_coder::SEPARATOR), - } -} - -pub fn clean_extracted_region_for_format(format: ZetaFormat, region: &str) -> String { - match format { - ZetaFormat::v0226Hashline => hashline::strip_hashline_prefixes(region), - _ => region.to_string(), + | ZetaFormat::v0226Hashline + | ZetaFormat::V0304VariableEdit => None, } } @@ -420,36 +491,68 @@ pub fn encode_patch_as_output_for_format( ZetaFormat::v0226Hashline => { hashline::patch_to_edit_commands(old_editable_region, patch, cursor_offset).map(Some) } + ZetaFormat::V0304VariableEdit => v0304_variable_edit::patch_to_variable_edit_output( + old_editable_region, + patch, + cursor_offset, + ) + .map(Some), + ZetaFormat::V0304SeedNoEdits => Ok(seed_coder::no_edits(patch)), _ => Ok(None), } } -pub fn output_with_context_for_format( - format: ZetaFormat, - old_editable_region: &str, - output: &str, -) -> Result> { - match format { - ZetaFormat::v0226Hashline => { - if hashline::output_has_edit_commands(output) { - Ok(Some(hashline::apply_edit_commands( - old_editable_region, - output, - ))) - } else { - Ok(None) - } - } - _ => Ok(None), - } +pub struct ParsedOutput { + /// Text that should replace the editable region + pub new_editable_region: String, + /// The byte range within `cursor_excerpt` that this replacement applies to + pub range_in_excerpt: Range, } -/// Post-processes model output for the given zeta format by stripping format-specific suffixes. -pub fn clean_zeta2_model_output(output: &str, format: ZetaFormat) -> &str { - match output_end_marker_for_format(format) { +/// Parse model output for the given zeta format +pub fn parse_zeta2_model_output( + output: &str, + format: ZetaFormat, + prompt_inputs: &ZetaPromptInput, +) -> Result { + let output = match output_end_marker_for_format(format) { Some(marker) => output.strip_suffix(marker).unwrap_or(output), None => output, - } + }; + + let (context, editable_range_in_context, context_range, _) = + resolve_cursor_region(prompt_inputs, format); + let context_start = context_range.start; + let old_editable_region = &context[editable_range_in_context.clone()]; + + let (range_in_context, output) = match format { + ZetaFormat::v0226Hashline => ( + editable_range_in_context, + if hashline::output_has_edit_commands(output) { + hashline::apply_edit_commands(old_editable_region, output) + } else { + output.to_string() + }, + ), + ZetaFormat::V0304VariableEdit => v0304_variable_edit::apply_variable_edit(context, output)?, + ZetaFormat::V0304SeedNoEdits => ( + editable_range_in_context, + if output.starts_with(seed_coder::NO_EDITS) { + old_editable_region.to_string() + } else { + output.to_string() + }, + ), + _ => (editable_range_in_context, output.to_string()), + }; + + let range_in_excerpt = + range_in_context.start + context_start..range_in_context.end + context_start; + + Ok(ParsedOutput { + new_editable_region: output, + range_in_excerpt, + }) } pub fn excerpt_range_for_format( @@ -462,19 +565,35 @@ pub fn excerpt_range_for_format( pub fn resolve_cursor_region( input: &ZetaPromptInput, format: ZetaFormat, -) -> (&str, Range, usize) { - let (editable_range, context_range) = excerpt_range_for_format(format, &input.excerpt_ranges); +) -> (&str, Range, Range, usize) { + let (editable_range, context_range) = if let Some(syntax_ranges) = &input.syntax_ranges { + let (editable_tokens, context_tokens) = token_limits_for_format(format); + compute_editable_and_context_ranges( + &input.cursor_excerpt, + input.cursor_offset_in_excerpt, + syntax_ranges, + editable_tokens, + context_tokens, + ) + } else { + excerpt_range_for_format(format, &input.excerpt_ranges) + }; let context_start = context_range.start; - let context_text = &input.cursor_excerpt[context_range]; + let context_text = &input.cursor_excerpt[context_range.clone()]; let adjusted_editable = (editable_range.start - context_start)..(editable_range.end - context_start); let adjusted_cursor = input.cursor_offset_in_excerpt - context_start; - (context_text, adjusted_editable, adjusted_cursor) + ( + context_text, + adjusted_editable, + context_range, + adjusted_cursor, + ) } pub fn get_prefill(input: &ZetaPromptInput, format: ZetaFormat) -> String { - let (context, editable_range, _) = resolve_cursor_region(input, format); + let (context, editable_range, _, _) = resolve_cursor_region(input, format); get_prefill_for_format(format, context, &editable_range) } @@ -934,12 +1053,14 @@ pub mod hashline { const SET_COMMAND_MARKER: &str = "<|set|>"; const INSERT_COMMAND_MARKER: &str = "<|insert|>"; + pub const NO_EDITS_COMMAND_MARKER: &str = "<|no_edits|>"; pub fn special_tokens() -> &'static [&'static str] { return &[ SET_COMMAND_MARKER, "<|set_range|>", INSERT_COMMAND_MARKER, + NO_EDITS_COMMAND_MARKER, CURSOR_MARKER, "<|file_sep|>", "<|fim_prefix|>", @@ -1033,6 +1154,7 @@ pub mod hashline { } prompt.push_str(END_MARKER); + prompt.push('\n'); } /// A single edit command parsed from the model output. @@ -1158,7 +1280,9 @@ pub mod hashline { } pub fn output_has_edit_commands(model_output: &str) -> bool { - model_output.contains(SET_COMMAND_MARKER) || model_output.contains(INSERT_COMMAND_MARKER) + model_output.contains(SET_COMMAND_MARKER) + || model_output.contains(INSERT_COMMAND_MARKER) + || model_output.contains(NO_EDITS_COMMAND_MARKER) } /// Apply `<|set|>` and `<|insert|>` edit commands from the model output to the @@ -1169,6 +1293,13 @@ pub mod hashline { /// /// Returns the full replacement text for the editable region. pub fn apply_edit_commands(editable_region: &str, model_output: &str) -> String { + if model_output + .trim_start() + .starts_with(NO_EDITS_COMMAND_MARKER) + { + return editable_region.to_string(); + } + let original_lines: Vec<&str> = editable_region.lines().collect(); let old_hashes: Vec = original_lines .iter() @@ -1473,6 +1604,10 @@ pub mod hashline { result.pop(); } + if result.is_empty() { + return Ok(NO_EDITS_COMMAND_MARKER.to_string()); + } + Ok(result) } @@ -1503,7 +1638,8 @@ pub mod hashline { <|fim_middle|>current 0:5c|hello<|user_cursor|> world <|fim_suffix|> - <|fim_middle|>updated"}, + <|fim_middle|>updated + "}, }, Case { name: "multiline_cursor_on_second_line", @@ -1518,7 +1654,8 @@ pub mod hashline { 1:26|b<|user_cursor|>bb 2:29|ccc <|fim_suffix|> - <|fim_middle|>updated"}, + <|fim_middle|>updated + "}, }, Case { name: "no_trailing_newline_in_context", @@ -1532,7 +1669,8 @@ pub mod hashline { 0:d9|lin<|user_cursor|>e1 1:da|line2 <|fim_suffix|> - <|fim_middle|>updated"}, + <|fim_middle|>updated + "}, }, Case { name: "leading_newline_in_editable_region", @@ -1546,7 +1684,8 @@ pub mod hashline { 0:00| 1:26|a<|user_cursor|>bc <|fim_suffix|> - <|fim_middle|>updated"}, + <|fim_middle|>updated + "}, }, Case { name: "with_suffix", @@ -1560,7 +1699,8 @@ pub mod hashline { 0:26|ab<|user_cursor|>c <|fim_suffix|> def - <|fim_middle|>updated"}, + <|fim_middle|>updated + "}, }, Case { name: "unicode_two_byte_chars", @@ -1573,7 +1713,8 @@ pub mod hashline { <|fim_middle|>current 0:1b|hé<|user_cursor|>llo <|fim_suffix|> - <|fim_middle|>updated"}, + <|fim_middle|>updated + "}, }, Case { name: "unicode_three_byte_chars", @@ -1586,7 +1727,8 @@ pub mod hashline { <|fim_middle|>current 0:80|日本<|user_cursor|>語 <|fim_suffix|> - <|fim_middle|>updated"}, + <|fim_middle|>updated + "}, }, Case { name: "unicode_four_byte_chars", @@ -1599,7 +1741,8 @@ pub mod hashline { <|fim_middle|>current 0:6b|a🌍<|user_cursor|>b <|fim_suffix|> - <|fim_middle|>updated"}, + <|fim_middle|>updated + "}, }, Case { name: "cursor_at_start_of_region_not_placed", @@ -1612,7 +1755,8 @@ pub mod hashline { <|fim_middle|>current 0:26|abc <|fim_suffix|> - <|fim_middle|>updated"}, + <|fim_middle|>updated + "}, }, Case { name: "cursor_at_end_of_line_not_placed", @@ -1626,7 +1770,8 @@ pub mod hashline { 0:26|abc 1:2f|def <|fim_suffix|> - <|fim_middle|>updated"}, + <|fim_middle|>updated + "}, }, Case { name: "cursor_offset_relative_to_context_not_editable_region", @@ -1645,7 +1790,8 @@ pub mod hashline { 1:26|b<|user_cursor|>bb <|fim_suffix|> suf - <|fim_middle|>updated"}, + <|fim_middle|>updated + "}, }, ]; @@ -1818,6 +1964,18 @@ pub mod hashline { world "}, }, + Case { + name: "no_edits_command_returns_original", + original: indoc! {" + hello + world + "}, + model_output: "<|no_edits|>", + expected: indoc! {" + hello + world + "}, + }, Case { name: "wrong_hash_set_ignored", original: indoc! {" @@ -2037,6 +2195,7 @@ pub mod hashline { ))); assert!(!hashline::output_has_edit_commands("just plain text")); assert!(!hashline::output_has_edit_commands("NO_EDITS")); + assert!(hashline::output_has_edit_commands("<|no_edits|>")); } // ---- hashline::patch_to_edit_commands round-trip tests ---- @@ -2274,35 +2433,47 @@ pub mod hashline { } "#}, patch: indoc! {r#" - @@ -1,3 +1,3 @@ - fn main() { - - println!(); - + eprintln!(""); - } - "#}, + @@ -1,3 +1,3 @@ + fn main() { + - println!(); + + eprintln!(""); + } + "#}, expected_new: indoc! {r#" - fn main() { - eprintln!("<|user_cursor|>"); - } - "#}, + fn main() { + eprintln!("<|user_cursor|>"); + } + "#}, }, Case { name: "non_local_hunk_header_pure_insertion_repro", old: indoc! {" - aaa - bbb - "}, + aaa + bbb + "}, patch: indoc! {" - @@ -20,2 +20,3 @@ - aaa - +xxx - bbb - "}, + @@ -20,2 +20,3 @@ + aaa + +xxx + bbb + "}, expected_new: indoc! {" - aaa - xxx - bbb - "}, + aaa + xxx + bbb + "}, + }, + Case { + name: "empty_patch_produces_no_edits_marker", + old: indoc! {" + aaa + bbb + "}, + patch: "@@ -20,2 +20,3 @@\n", + expected_new: indoc! {" + aaa + bbb + "}, }, ]; @@ -2381,6 +2552,8 @@ pub mod seed_coder { pub const SEPARATOR: &str = "=======\n"; pub const END_MARKER: &str = ">>>>>>> UPDATED\n"; + pub const NO_EDITS: &str = "NO_EDITS\n"; + pub fn special_tokens() -> &'static [&'static str] { &[ FIM_SUFFIX, @@ -2485,6 +2658,1020 @@ pub mod seed_coder { section.push_str(SEPARATOR); section } + + /// Format patch as containing no changes if it's empty; otherwise return None. + pub(crate) fn no_edits(patch: &str) -> Option { + // Count lines in the patch + let empty_patch = patch.lines().count() <= 3; + if empty_patch { + Some(format!("{NO_EDITS}{END_MARKER}")) + } else { + None + } + } +} + +pub mod v0304_variable_edit { + //! A prompt format with no fixed editable region. The entire context is shown + //! to the model, and it chooses which text to replace by outputting surrounding + //! context lines with `<|fim_middle|>` and `<|fim_suffix|>` delimiting the new + //! text. + //! + //! Example prompt: + //! + //! <|file_sep|>path/to/file.py + //! zero + //! one + //! two + //! three<|user_cursor|> + //! four + //! five + //! <|fim_prefix|> + // + //! Expected output (model generates): + //! + //! two + //! <|fim_middle|> + //! THREE + //! <|fim_suffix|> + //! four + //! + //! The output means: find "two\n...\nfour" in the context, and replace + //! everything between "two\n" and "four" with "THREE\n". + + use super::*; + + pub fn special_tokens() -> &'static [&'static str] { + &[ + "<|fim_prefix|>", + "<|fim_suffix|>", + "<|fim_middle|>", + "<|file_sep|>", + CURSOR_MARKER, + ] + } + + pub fn write_cursor_excerpt_section( + prompt: &mut String, + path: &Path, + context: &str, + cursor_offset: usize, + ) { + let path_str = path.to_string_lossy(); + write!(prompt, "<|file_sep|>{}\n", path_str).ok(); + + prompt.push_str(&context[..cursor_offset]); + prompt.push_str(CURSOR_MARKER); + prompt.push_str(&context[cursor_offset..]); + if !prompt.ends_with('\n') { + prompt.push('\n'); + } + prompt.push_str("<|fim_prefix|>\n") + } + + /// Apply a variable-edit model output to the original context text. + /// + /// The model output has the form: + /// + /// - prefix context lines + /// - `<|fim_middle|>` + /// - new text + /// - `<|fim_suffix|>` + /// - suffix context lines + /// + /// We locate the prefix/suffix context lines in the original text and replace + /// everything between them with the new text. + pub fn apply_variable_edit( + context: &str, + model_output: &str, + ) -> Result<(Range, String)> { + let (prefix_context, rest) = model_output + .split_once("<|fim_middle|>\n") + .or_else(|| model_output.split_once("<|fim_middle|>")) + .ok_or_else(|| anyhow::anyhow!("missing <|fim_middle|> in model output"))?; + + let (new_text, suffix_context) = rest + .split_once("<|fim_suffix|>\n") + .or_else(|| rest.split_once("<|fim_suffix|>")) + .unwrap_or((rest, "")); + + let suffix_context = if prefix_context.is_empty() && !suffix_context.is_empty() { + suffix_context.strip_prefix('\n').unwrap_or(suffix_context) + } else { + suffix_context + }; + + let prefix_offset = find_substring_at_line_boundary(context, prefix_context) + .ok_or_else(|| anyhow!("could not locate prefix lines"))? + + prefix_context.len(); + let suffix_offset = if suffix_context.is_empty() { + context.len() + } else { + find_substring_at_line_boundary(&context[prefix_offset..], suffix_context) + .ok_or_else(|| anyhow!("could not locate suffix lines"))? + + prefix_offset + }; + + let edit_range = prefix_offset..suffix_offset; + return Ok((edit_range, new_text.to_string())); + } + + fn find_substring_at_line_boundary(haystack: &str, needle: &str) -> Option { + if needle.is_empty() { + return Some(0); + } + + haystack.match_indices(needle).find_map(|(offset, _)| { + let matched_line_start = offset == 0 || haystack[..offset].ends_with('\n'); + matched_line_start.then_some(offset) + }) + } + + /// Convert a unified diff patch into the variable-edit output format. + /// + /// Parses `patch` as a unified diff against `old_text` and produces model + /// output with context lines surrounding `<|fim_middle|>` / `<|fim_suffix|>` + /// delimiters. The diff is resolved by content matching rather than line + /// numbers. + pub fn patch_to_variable_edit_output( + old_text: &str, + patch: &str, + cursor_offset: Option, + ) -> Result { + // Parse the unified diff into hunks. Each hunk has an `old_context` + // string (context + deleted lines interleaved in order) and a list of + // edits expressed as byte ranges within that context plus replacement + // text. + let hunks = parse_hunks(patch); + if hunks.is_empty() { + return Ok(String::new()); + } + + // Apply each hunk by finding its old_context in the text and + // performing the edits. We search forward from where the previous + // hunk ended so that hunks are applied in order. + let mut new_text = old_text.to_string(); + let mut search_from: usize = 0; + let mut first_hunk_pos: Option = None; + + for hunk in &hunks { + let context_pos = new_text[search_from..] + .find(&hunk.old_context) + .map(|pos| pos + search_from) + .ok_or_else(|| anyhow::anyhow!("could not locate hunk context in text"))?; + + if first_hunk_pos.is_none() { + first_hunk_pos = Some(context_pos); + } + + // Apply edits in reverse order so byte offsets remain valid. + for edit in hunk.edits.iter().rev() { + let abs_start = context_pos + edit.range.start; + let abs_end = context_pos + edit.range.end; + new_text.replace_range(abs_start..abs_end, &edit.text); + } + + // Advance past this hunk's region in the (now modified) text. + let new_region_len: usize = + hunk.edits.iter().fold(hunk.old_context.len(), |len, edit| { + len + edit.text.len() - (edit.range.end - edit.range.start) + }); + search_from = context_pos + new_region_len; + } + + // Now we have old_text and new_text. Find the changed line range by + // comparing them. + let old_lines: Vec<&str> = old_text.lines().collect(); + let new_lines: Vec<&str> = new_text.lines().collect(); + + // Find first differing line. + let first_changed_row = old_lines + .iter() + .zip(new_lines.iter()) + .position(|(a, b)| a != b) + .unwrap_or_else(|| old_lines.len().min(new_lines.len())); + + // Find last differing line (from the end). + let max_suffix = old_lines.len().min(new_lines.len()) - first_changed_row; + let common_suffix = old_lines + .iter() + .rev() + .zip(new_lines.iter().rev()) + .take(max_suffix) + .take_while(|(a, b)| a == b) + .count(); + + let old_end = old_lines.len() - common_suffix; + let new_end = new_lines.len() - common_suffix; + + if first_changed_row == old_end && first_changed_row == new_end { + return Ok(String::new()); + } + + // Build the replacement text from new_lines[first_diff..new_end]. + let mut merged_new_text = String::new(); + for line in &new_lines[first_changed_row..new_end] { + merged_new_text.push_str(line); + merged_new_text.push('\n'); + } + + // cursor_offset is relative to the first hunk's new content in + // new_text. Translate it to an offset within merged_new_text, which + // only contains lines first_diff..new_end of new_text. + if let Some(hunk_offset) = cursor_offset { + let hunk_start = first_hunk_pos.unwrap_or(0); + let absolute_pos = hunk_start + hunk_offset; + + // Byte offset where first_diff starts in new_text. + let merged_start: usize = new_lines[..first_changed_row] + .iter() + .map(|line| line.len() + 1) + .sum(); + + if absolute_pos >= merged_start { + let relative_offset = absolute_pos - merged_start; + if relative_offset <= merged_new_text.len() { + merged_new_text.insert_str(relative_offset, CURSOR_MARKER); + } + } + } + + // Build output with 2 lines of context above and below. + let context_lines_count = 2; + let mut prefix_start = first_changed_row.saturating_sub(context_lines_count); + let mut suffix_end = (old_end + context_lines_count).min(old_lines.len()); + + fn count_matches(line_range: Range, lines: &[&str]) -> usize { + let pattern = &lines[line_range]; + let pattern_len = pattern.len(); + + let mut count = 0; + for offset in 0..=lines.len() - pattern_len { + if &lines[offset..offset + pattern_len] == pattern { + count += 1; + } + } + count + } + + // Expand prefix and suffix until they are unique + while prefix_start > 0 { + if count_matches(prefix_start..first_changed_row, &old_lines) > 1 { + prefix_start -= 1; + } else { + break; + } + } + while suffix_end < old_lines.len() { + if count_matches(old_end..suffix_end, &old_lines) > 1 { + suffix_end += 1; + } else { + break; + } + } + + let mut output = String::new(); + for line in &old_lines[prefix_start..first_changed_row] { + output.push_str(line); + output.push('\n'); + } + output.push_str("<|fim_middle|>\n"); + output.push_str(&merged_new_text); + output.push_str("<|fim_suffix|>\n"); + for line in &old_lines[old_end..suffix_end] { + output.push_str(line); + output.push('\n'); + } + + Ok(output) + } + + struct ParsedHunk { + old_context: String, + edits: Vec, + } + + struct ParsedEdit { + range: Range, + text: String, + } + + /// Parse a unified diff into content-based hunks. Each hunk contains an + /// `old_context` string (context lines + deleted lines, which together + /// form the text that should be found in the original) and a list of edits + /// expressed as byte ranges within that context. + fn parse_hunks(patch: &str) -> Vec { + let mut hunks = Vec::new(); + let mut current: Option = None; + + for line in patch.lines() { + if line.starts_with("@@") { + if let Some(hunk) = current.take() { + if !hunk.old_context.is_empty() || !hunk.edits.is_empty() { + hunks.push(hunk); + } + } + current = Some(ParsedHunk { + old_context: String::new(), + edits: Vec::new(), + }); + } else if line.starts_with("---") || line.starts_with("+++") { + continue; + } else if let Some(hunk) = &mut current { + if let Some(added) = line.strip_prefix('+') { + let pos = hunk.old_context.len(); + if let Some(last_edit) = hunk.edits.last_mut() { + if last_edit.range.end == pos { + writeln!(&mut last_edit.text, "{added}").ok(); + continue; + } + } + hunk.edits.push(ParsedEdit { + range: pos..pos, + text: format!("{added}\n"), + }); + } else if let Some(removed) = line.strip_prefix('-') { + let start = hunk.old_context.len(); + writeln!(&mut hunk.old_context, "{removed}").ok(); + let end = hunk.old_context.len(); + if let Some(last_edit) = hunk.edits.last_mut() { + if last_edit.range.end == start { + last_edit.range.end = end; + continue; + } + } + hunk.edits.push(ParsedEdit { + range: start..end, + text: String::new(), + }); + } else { + let ctx = line.strip_prefix(' ').unwrap_or(line); + writeln!(&mut hunk.old_context, "{ctx}").ok(); + } + } + } + + if let Some(hunk) = current { + if !hunk.old_context.is_empty() || !hunk.edits.is_empty() { + hunks.push(hunk); + } + } + + hunks + } + + #[cfg(test)] + mod tests { + use super::*; + use indoc::indoc; + + #[test] + fn test_apply_variable_edit() { + struct Case { + name: &'static str, + original: &'static str, + model_output: &'static str, + expected: &'static str, + } + + let cases = [ + Case { + name: "simple_single_line_replacement", + original: indoc! {" + zero + one + two + three + four + five + "}, + model_output: indoc! {" + two + <|fim_middle|> + THREE + <|fim_suffix|> + four + "}, + expected: indoc! {" + zero + one + two + THREE + four + five + "}, + }, + Case { + name: "multi_line_replacement", + original: indoc! {" + a + b + c + d + e + "}, + model_output: indoc! {" + a + <|fim_middle|> + B + C + D + <|fim_suffix|> + e + "}, + expected: indoc! {" + a + B + C + D + e + "}, + }, + Case { + name: "insertion_between_existing_lines", + original: indoc! {" + a + b + c + "}, + model_output: indoc! {" + a + <|fim_middle|> + X + <|fim_suffix|> + b + "}, + expected: indoc! {" + a + X + b + c + "}, + }, + Case { + name: "deletion", + original: indoc! {" + a + b + c + d + "}, + model_output: indoc! {" + a + <|fim_middle|> + <|fim_suffix|> + c + "}, + expected: indoc! {" + a + c + d + "}, + }, + Case { + name: "replacement_at_start_no_prefix_context", + original: indoc! {" + a + b + c + "}, + model_output: indoc! {" + <|fim_middle|> + X + <|fim_suffix|> + b + "}, + expected: indoc! {" + X + b + c + "}, + }, + Case { + name: "replacement_at_end_no_suffix_context", + original: indoc! {" + a + b + c + "}, + model_output: indoc! {" + b + <|fim_middle|> + Z + <|fim_suffix|> + "}, + expected: indoc! {" + a + b + Z + "}, + }, + Case { + name: "context_with_trailing_newline_is_preserved", + original: indoc! {" + a + b + c + "}, + model_output: indoc! {" + a + <|fim_middle|> + B + <|fim_suffix|> + c + "}, + expected: indoc! {" + a + B + c + "}, + }, + Case { + name: "cursor_marker_passes_through_untouched", + original: indoc! {" + a + b + c + "}, + model_output: indoc! {" + a + <|fim_middle|> + B<|user_cursor|>B + <|fim_suffix|> + c + "}, + expected: indoc! {" + a + B<|user_cursor|>B + c + "}, + }, + Case { + name: "multiple_prefix_context_lines", + original: indoc! {" + a + b + c + d + e + "}, + model_output: indoc! {" + b + c + <|fim_middle|> + D + <|fim_suffix|> + e + "}, + expected: indoc! {" + a + b + c + D + e + "}, + }, + ]; + + for case in cases { + let (edit_range, replacement) = + apply_variable_edit(case.original, case.model_output).unwrap(); + let mut edited = case.original.to_string(); + edited.replace_range(edit_range, &replacement); + assert_eq!(edited, case.expected, "{}", case.name); + } + } + + #[test] + fn test_patch_to_variable_edit() { + struct Case { + name: &'static str, + old: &'static str, + patch: &'static str, + cursor_offset: Option, + expected_variable_edit: &'static str, + expected_after_apply: &'static str, + } + + let cases = [ + Case { + name: "simple_replacement", + old: indoc! {" + zero + one + two + three + four + five + "}, + patch: indoc! {" + @@ -3,3 +3,3 @@ + two + -three + +THREE + four + "}, + cursor_offset: None, + expected_variable_edit: indoc! {" + one + two + <|fim_middle|> + THREE + <|fim_suffix|> + four + five + "}, + expected_after_apply: indoc! {" + zero + one + two + THREE + four + five + "}, + }, + Case { + name: "insertion", + old: indoc! {" + a + b + c + d + e + "}, + patch: indoc! {" + @@ -2,0 +3,1 @@ + b + +X + c + "}, + cursor_offset: None, + expected_variable_edit: indoc! {" + a + b + <|fim_middle|> + X + <|fim_suffix|> + c + d + "}, + expected_after_apply: indoc! {" + a + b + X + c + d + e + "}, + }, + Case { + name: "deletion", + old: indoc! {" + a + b + c + d + e + "}, + patch: indoc! {" + @@ -2,3 +2,2 @@ + b + -c + d + "}, + cursor_offset: None, + expected_variable_edit: indoc! {" + a + b + <|fim_middle|> + <|fim_suffix|> + d + e + "}, + expected_after_apply: indoc! {" + a + b + d + e + "}, + }, + Case { + name: "edit_near_start", + old: indoc! {" + first + second + third + fourth + "}, + patch: indoc! {" + @@ -1,1 +1,1 @@ + -first + +FIRST + "}, + cursor_offset: None, + expected_variable_edit: indoc! {" + <|fim_middle|> + FIRST + <|fim_suffix|> + second + third + "}, + expected_after_apply: indoc! {" + FIRST + second + third + fourth + "}, + }, + Case { + name: "edit_near_end", + old: indoc! {" + first + second + third + fourth + "}, + patch: indoc! {" + @@ -4,1 +4,1 @@ + -fourth + +FOURTH + "}, + cursor_offset: None, + expected_variable_edit: indoc! {" + second + third + <|fim_middle|> + FOURTH + <|fim_suffix|> + "}, + expected_after_apply: indoc! {" + first + second + third + FOURTH + "}, + }, + Case { + name: "cursor_at_start_of_replacement", + old: indoc! {" + zero + one + two + three + four + five + "}, + patch: indoc! {" + @@ -3,3 +3,3 @@ + two + -three + +THREE + four + "}, + cursor_offset: Some(4), + expected_variable_edit: indoc! {" + one + two + <|fim_middle|> + <|user_cursor|>THREE + <|fim_suffix|> + four + five + "}, + expected_after_apply: indoc! {" + zero + one + two + <|user_cursor|>THREE + four + five + "}, + }, + Case { + name: "cursor_in_middle_of_replacement", + old: indoc! {" + zero + one + two + three + four + five + "}, + patch: indoc! {" + @@ -3,3 +3,3 @@ + two + -three + +THREE + four + "}, + cursor_offset: Some(6), + expected_variable_edit: indoc! {" + one + two + <|fim_middle|> + TH<|user_cursor|>REE + <|fim_suffix|> + four + five + "}, + expected_after_apply: indoc! {" + zero + one + two + TH<|user_cursor|>REE + four + five + "}, + }, + Case { + name: "expands_context_when_two_lines_not_unique_before_and_after", + old: indoc! {" + one + a + b + c + d + two + a + b + c + d + three + a + b + c + d + four + "}, + patch: indoc! {" + @@ -4,5 +4,5 @@ + two + a + b + -c + +C + d + three + "}, + cursor_offset: None, + expected_variable_edit: indoc! {" + two + a + b + <|fim_middle|> + C + <|fim_suffix|> + d + three + "}, + expected_after_apply: indoc! {" + one + a + b + c + d + two + a + b + C + d + three + a + b + c + d + four + "}, + }, + Case { + name: "expands_context_when_two_lines_not_unique_before_and_after", + old: indoc! {" + { + { + one(); + } + } + { + { + two(); + } + } + { + { + three(); + } + } + { + { + four(); + } + } + "}, + patch: indoc! {" + @@ -4,5 +4,5 @@ + { + - two(); + + TWO(); + } + "}, + cursor_offset: None, + expected_variable_edit: indoc! {" + one(); + } + } + { + { + <|fim_middle|> + TWO(); + <|fim_suffix|> + } + } + { + { + three(); + "}, + expected_after_apply: indoc! {" + { + { + one(); + } + } + { + { + TWO(); + } + } + { + { + three(); + } + } + { + { + four(); + } + } + "}, + }, + ]; + + for case in cases { + let output = + patch_to_variable_edit_output(case.old, case.patch, case.cursor_offset) + .unwrap_or_else(|error| { + panic!("failed converting patch for {}: {error}", case.name) + }); + assert_eq!( + output, case.expected_variable_edit, + "patch->variable_edit mismatch for {}", + case.name + ); + + let (edit_range, replacement) = apply_variable_edit(case.old, &output) + .unwrap_or_else(|error| { + panic!("failed applying variable_edit for {}: {error}", case.name) + }); + let mut edited_by_variable_edit = case.old.to_string(); + edited_by_variable_edit.replace_range(edit_range, &replacement); + assert_eq!( + edited_by_variable_edit, case.expected_after_apply, + "variable_edit apply mismatch for {}", + case.name + ); + + let (expected_edit_range, expected_replacement) = + apply_variable_edit(case.old, case.expected_variable_edit).unwrap_or_else( + |error| { + panic!( + "failed applying expected variable_edit for {}: {error}", + case.name + ) + }, + ); + let mut edited_by_expected_variable_edit = case.old.to_string(); + edited_by_expected_variable_edit + .replace_range(expected_edit_range, &expected_replacement); + assert_eq!( + edited_by_expected_variable_edit, case.expected_after_apply, + "expected variable_edit apply mismatch for {}", + case.name + ); + } + } + + #[test] + fn test_write_cursor_excerpt_section() { + let path = Path::new("test.rs"); + let context = "fn main() {\n hello();\n}\n"; + let cursor_offset = 17; + let mut prompt = String::new(); + write_cursor_excerpt_section(&mut prompt, path, context, cursor_offset); + assert_eq!( + prompt, + "<|file_sep|>test.rs\nfn main() {\n h<|user_cursor|>ello();\n}\n<|fim_prefix|>\n" + ); + } + } } /// The zeta1 prompt format @@ -2704,7 +3891,39 @@ mod tests { cursor_offset_in_excerpt: cursor_offset, excerpt_start_row: None, events: events.into_iter().map(Arc::new).collect(), - related_files, + related_files: Some(related_files), + active_buffer_diagnostics: vec![], + excerpt_ranges: ExcerptRanges { + editable_150: editable_range.clone(), + editable_180: editable_range.clone(), + editable_350: editable_range, + editable_150_context_350: context_range.clone(), + editable_180_context_350: context_range.clone(), + editable_350_context_150: context_range, + ..Default::default() + }, + syntax_ranges: None, + experiment: None, + in_open_source_repo: false, + can_collect_data: false, + repo_url: None, + } + } + + fn make_input_with_context_range( + excerpt: &str, + editable_range: Range, + context_range: Range, + cursor_offset: usize, + ) -> ZetaPromptInput { + ZetaPromptInput { + cursor_path: Path::new("test.rs").into(), + cursor_excerpt: excerpt.into(), + cursor_offset_in_excerpt: cursor_offset, + excerpt_start_row: None, + events: vec![], + related_files: Some(vec![]), + active_buffer_diagnostics: vec![], excerpt_ranges: ExcerptRanges { editable_150: editable_range.clone(), editable_180: editable_range.clone(), @@ -2714,6 +3933,7 @@ mod tests { editable_350_context_150: context_range, ..Default::default() }, + syntax_ranges: None, experiment: None, in_open_source_repo: false, can_collect_data: false, @@ -3278,21 +4498,6 @@ mod tests { ); } - #[test] - fn test_seed_coder_clean_output() { - let output_with_marker = "new code\n>>>>>>> UPDATED\n"; - let output_without_marker = "new code\n"; - - assert_eq!( - clean_zeta2_model_output(output_with_marker, ZetaFormat::V0211SeedCoder), - "new code\n" - ); - assert_eq!( - clean_zeta2_model_output(output_without_marker, ZetaFormat::V0211SeedCoder), - "new code\n" - ); - } - #[test] fn test_format_zeta1_from_input_basic() { let excerpt = "fn before() {}\nfn foo() {\n let x = 1;\n}\nfn after() {}\n"; @@ -3302,7 +4507,8 @@ mod tests { cursor_offset_in_excerpt: 30, excerpt_start_row: Some(0), events: vec![Arc::new(make_event("other.rs", "-old\n+new\n"))], - related_files: vec![], + related_files: Some(vec![]), + active_buffer_diagnostics: vec![], excerpt_ranges: ExcerptRanges { editable_150: 15..41, editable_180: 15..41, @@ -3312,6 +4518,7 @@ mod tests { editable_350_context_150: 0..excerpt.len(), ..Default::default() }, + syntax_ranges: None, experiment: None, in_open_source_repo: false, can_collect_data: false, @@ -3365,7 +4572,8 @@ mod tests { cursor_offset_in_excerpt: 15, excerpt_start_row: Some(10), events: vec![], - related_files: vec![], + related_files: Some(vec![]), + active_buffer_diagnostics: vec![], excerpt_ranges: ExcerptRanges { editable_150: 0..28, editable_180: 0..28, @@ -3375,6 +4583,7 @@ mod tests { editable_350_context_150: 0..28, ..Default::default() }, + syntax_ranges: None, experiment: None, in_open_source_repo: false, can_collect_data: false, @@ -3423,7 +4632,8 @@ mod tests { cursor_offset_in_excerpt: 25, excerpt_start_row: Some(0), events: vec![], - related_files: vec![], + related_files: Some(vec![]), + active_buffer_diagnostics: vec![], excerpt_ranges: ExcerptRanges { editable_150: editable_range.clone(), editable_180: editable_range.clone(), @@ -3433,6 +4643,7 @@ mod tests { editable_350_context_150: context_range.clone(), ..Default::default() }, + syntax_ranges: None, experiment: None, in_open_source_repo: false, can_collect_data: false, @@ -3516,4 +4727,73 @@ mod tests { let cleaned = zeta1::clean_zeta1_model_output(output).unwrap(); assert_eq!(cleaned, ""); } + + fn apply_edit(excerpt: &str, parsed_output: &ParsedOutput) -> String { + let mut result = excerpt.to_string(); + result.replace_range( + parsed_output.range_in_excerpt.clone(), + &parsed_output.new_editable_region, + ); + result + } + + #[test] + fn test_parse_zeta2_model_output() { + let excerpt = "before ctx\nctx start\neditable old\nctx end\nafter ctx\n"; + let context_start = excerpt.find("ctx start").unwrap(); + let context_end = excerpt.find("after ctx").unwrap(); + let editable_start = excerpt.find("editable old").unwrap(); + let editable_end = editable_start + "editable old\n".len(); + let input = make_input_with_context_range( + excerpt, + editable_start..editable_end, + context_start..context_end, + editable_start, + ); + + let output = parse_zeta2_model_output( + "editable new\n>>>>>>> UPDATED\n", + ZetaFormat::V0131GitMergeMarkersPrefix, + &input, + ) + .unwrap(); + + assert_eq!( + apply_edit(excerpt, &output), + "before ctx\nctx start\neditable new\nctx end\nafter ctx\n" + ); + } + + #[test] + fn test_parse_zeta2_model_output_identity() { + let excerpt = "aaa\nbbb\nccc\nddd\neee\n"; + let editable_start = excerpt.find("bbb").unwrap(); + let editable_end = excerpt.find("ddd").unwrap(); + let input = make_input_with_context_range( + excerpt, + editable_start..editable_end, + 0..excerpt.len(), + editable_start, + ); + + let format = ZetaFormat::V0131GitMergeMarkersPrefix; + let output = + parse_zeta2_model_output("bbb\nccc\n>>>>>>> UPDATED\n", format, &input).unwrap(); + + assert_eq!(apply_edit(excerpt, &output), excerpt); + } + + #[test] + fn test_parse_zeta2_model_output_strips_end_marker() { + let excerpt = "hello\nworld\n"; + let input = make_input_with_context_range(excerpt, 0..excerpt.len(), 0..excerpt.len(), 0); + + let format = ZetaFormat::V0131GitMergeMarkersPrefix; + let output1 = + parse_zeta2_model_output("new content\n>>>>>>> UPDATED\n", format, &input).unwrap(); + let output2 = parse_zeta2_model_output("new content\n", format, &input).unwrap(); + + assert_eq!(apply_edit(excerpt, &output1), apply_edit(excerpt, &output2)); + assert_eq!(apply_edit(excerpt, &output1), "new content\n"); + } } diff --git a/docs/src/ai/agent-settings.md b/docs/src/ai/agent-settings.md index 0547f19c9ca0e58cb5d63d7ae1c5231d091a6503..3e152fc5671225abef4a6477b3f73be5d054a365 100644 --- a/docs/src/ai/agent-settings.md +++ b/docs/src/ai/agent-settings.md @@ -1,6 +1,6 @@ --- title: AI Agent Settings - Zed -description: Customize Zed's AI agent: default models, temperature, tool approval, auto-run commands, notifications, and panel options. +description: "Customize Zed's AI agent: default models, temperature, tool approval, auto-run commands, notifications, and panel options." --- # Agent Settings diff --git a/docs/src/ai/privacy-and-security.md b/docs/src/ai/privacy-and-security.md index 4aada3dff47ba8d0eca8f1056e326d6060451306..828953cca74868b097490dfafcb318b8245a2ef8 100644 --- a/docs/src/ai/privacy-and-security.md +++ b/docs/src/ai/privacy-and-security.md @@ -1,6 +1,6 @@ --- title: AI Privacy and Security - Zed -description: Zed's approach to AI privacy: opt-in data sharing by default, zero-data retention with providers, and full open-source transparency. +description: "Zed's approach to AI privacy: opt-in data sharing by default, zero-data retention with providers, and full open-source transparency." --- # Privacy and Security diff --git a/docs/src/development/feature-process.md b/docs/src/development/feature-process.md new file mode 100644 index 0000000000000000000000000000000000000000..811e1a4fd6130fdf0abc687f6943f58b24e81b08 --- /dev/null +++ b/docs/src/development/feature-process.md @@ -0,0 +1,51 @@ +# Zed's Feature Development Process + +This is for moderate-to-large features — new UI, behavior changes, or work that cuts across multiple parts of Zed. Small keybindings or settings tweaks don't need all of this. + +> **Before you start:** If you're an external contributor, make sure the feature is something the team wants before investing significant effort. That said, coming prepared with background research makes it much easier for the team to understand and approve the proposal. Read the [Contributing guide](../../../CONTRIBUTING.md#sending-changes) — if there isn't already a GitHub issue with staff confirmation, start with a GitHub Discussion or a Discord message rather than a PR. + +## 1. Why does this matter? + +Every feature starts as an idea. Before writing any code, ground it: + +- **What problem does this solve?** +- **What's the evidence?** GitHub issues, Discord requests, thumbs-up counts, blog posts. +- **Is there prior art?** If it's in VS Code, JetBrains, Neovim, or a wildly popular plugin, that's a strong signal. If the idea is more novel, name what it's based on — "This is X, adapted for Zed's multi-buffers" is far more useful than "I think this would be cool." + +## 2. What is it? + +Write a short, concrete feature statement, then back it up with the context gathered above. If you can't describe the feature in a few sentences, it might be too big or too vague. + +Here's an example format, though adapt it to whatever your feature needs: + +> **Feature:** Inline Git Blame +> **Purpose:** Show the last commit author and message for each line directly after the editor text, so developers can understand code history without opening the git blame. +> **Background:** +> This is standard across all major code editors +> \[screenshot of VSCode] +> \[screenshot of Intellij] +> \[screenshot of Neovim] +> and has 146 thumbs up on the [github issue](https://github.com). +> **Decisions:** +> We have to decide whether to use the git CLI or a git library. Zed uses a git library but its blame implementation is too slow for a code editor, so we should use the CLI's porcelain interface. + +## 3. What else does this affect? + +Walk through this list before you start building. Not everything will apply: + +- **Actions & keybindings.** What actions does your feature define? Do the default keybindings conflict with existing ones? +- **Settings.** Is any behavior configurable? Per-user vs. per-project vs. per-language? Don't forget to add new settings to the Settings UI. +- **Themes & styling.** Does this need a new semantic token? Does it look right in both light and dark mode? +- **Vim mode.** Vim users might have different expectations for this feature. +- **Remote development.** Does your feature work with remote projects? File paths, shell commands, and environment variables all might behave differently. +- **Persistence across restarts.** Should your feature's state persist across restarts? +- **Accessibility.** Is it keyboard-navigable? Are focus states clear? +- **Platform differences.** Does behavior differ on macOS, Linux, or Windows? +- **Performance.** How does it behave with large files or big projects? Are interactions instant? +- **Security.** How does this feature interact with Workspace Trust? Does it open new attack surfaces in Zed? + +If your feature touches the **editor** specifically: the editor has a lot of coexisting features — gutter elements, inline blocks, multiple cursors, folding, edit predictions, code intelligence popovers, the minimap. Test your changes with different combinations of them active. Features that work in a normal buffer might need to be disabled in a multi-buffer. + +## 4. Ship it + +Use this as the basis for your GitHub Discussion, issue, or PR description. Good product research gets everyone aligned on goals, the state of the art, and any tradeoffs we might need to consider. diff --git a/docs/src/development/glossary.md b/docs/src/development/glossary.md index 720c20c3bd42074b3e2b4863b879a54001d27e73..ed3b9fdde00a605ec04e3efc25271b57691a45af 100644 --- a/docs/src/development/glossary.md +++ b/docs/src/development/glossary.md @@ -1,5 +1,5 @@ --- -title: Zed Development: Glossary +title: "Zed Development: Glossary" description: "Guide to zed development: glossary for Zed development." --- diff --git a/docs/src/extensions/developing-extensions.md b/docs/src/extensions/developing-extensions.md index 84e57df49fca95adb6c5c4fb5d9aad3b8c771383..c5b4b1079066ba3f7b5e4149778c8e369d03d9cd 100644 --- a/docs/src/extensions/developing-extensions.md +++ b/docs/src/extensions/developing-extensions.md @@ -126,9 +126,11 @@ The following licenses are accepted: - [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0) - [BSD 2-Clause](https://opensource.org/license/bsd-2-clause) - [BSD 3-Clause](https://opensource.org/license/bsd-3-clause) +- [CC BY 4.0](https://creativecommons.org/licenses/by/4.0) - [GNU GPLv3](https://www.gnu.org/licenses/gpl-3.0.en.html) - [GNU LGPLv3](https://www.gnu.org/licenses/lgpl-3.0.en.html) - [MIT](https://opensource.org/license/mit) +- [Unlicense](https://unlicense.org) - [zlib](https://opensource.org/license/zlib) This allows us to distribute the resulting binary produced from your extension code to our users. diff --git a/docs/theme/analytics.js b/docs/theme/analytics.js index 6e9df27f30fc6d38ba6fb322f9888fda089bb20c..84d9705bfea223655270f5fb2541426b50a98365 100644 --- a/docs/theme/analytics.js +++ b/docs/theme/analytics.js @@ -55,7 +55,7 @@ document.addEventListener("DOMContentLoaded", () => { consentStore.subscribe((state) => { const hideBanner = state.activeUI === "none" || - (state.activeUI === "banner" && state.mode === "opt-out"); + (state.activeUI === "banner" && state.model === "opt-out"); banner.style.display = hideBanner ? "none" : "block"; if (state.activeUI === "dialog" && previousActiveUI !== "dialog") { diff --git a/nix/build.nix b/nix/build.nix index 68f8a4acdbe83f7e8981659dd0376ec87ef52dfe..d96a7e51ca08d23572b01f0c387d6ef9e4f2dd70 100644 --- a/nix/build.nix +++ b/nix/build.nix @@ -52,6 +52,7 @@ withGLES ? false, profile ? "release", + commitSha ? null, }: assert withGLES -> stdenv.hostPlatform.isLinux; let @@ -84,7 +85,10 @@ let in rec { pname = "zed-editor"; - version = zedCargoLock.package.version + "-nightly"; + version = + zedCargoLock.package.version + + "-nightly" + + lib.optionalString (commitSha != null) "+${builtins.substring 0 7 commitSha}"; src = builtins.path { path = ../.; filter = mkIncludeFilter ../.; @@ -220,6 +224,7 @@ let }; ZED_UPDATE_EXPLANATION = "Zed has been installed using Nix. Auto-updates have thus been disabled."; RELEASE_VERSION = version; + ZED_COMMIT_SHA = commitSha; LK_CUSTOM_WEBRTC = pkgs.callPackage ./livekit-libwebrtc/package.nix { }; PROTOC = "${protobuf}/bin/protoc"; diff --git a/nix/modules/devshells.nix b/nix/modules/devshells.nix index cfc0e48b871e71d87f9f794b35c16fed714ed4a9..ab58d37fff2dcaa64885effa5526db7bd365586b 100644 --- a/nix/modules/devshells.nix +++ b/nix/modules/devshells.nix @@ -22,10 +22,14 @@ # Cargo build timings wrapper script wrappedCargo = pkgs.writeShellApplication { name = "cargo"; - runtimeInputs = [pkgs.nodejs]; - text = '' - NIX_WRAPPER=1 CARGO=${rustToolchain}/bin/cargo ./script/cargo "$@" - ''; + runtimeInputs = [ pkgs.nodejs ]; + text = + let + pathToCargoScript = ./. + "/../../script/cargo"; + in + '' + NIX_WRAPPER=1 CARGO=${rustToolchain}/bin/cargo ${pathToCargoScript} "$@" + ''; }; in { @@ -34,7 +38,7 @@ inputsFrom = [ zed-editor ]; packages = with pkgs; [ - wrappedCargo # must be first, to shadow the `cargo` provided by `rustToolchain` + wrappedCargo # must be first, to shadow the `cargo` provided by `rustToolchain` rustToolchain # cargo, rustc, and rust-toolchain.toml components included cargo-nextest cargo-hakari diff --git a/nix/toolchain.nix b/nix/toolchain.nix index 6ef22e2a6b06882940c553b2a774f4c6f73e9ea0..2e32f00f6b56570ab9863ab0b5975e603b68f5fa 100644 --- a/nix/toolchain.nix +++ b/nix/toolchain.nix @@ -6,4 +6,5 @@ in pkgs.callPackage ./build.nix { crane = inputs.crane.mkLib pkgs; rustToolchain = rustBin.fromRustupToolchainFile ../rust-toolchain.toml; + commitSha = inputs.self.rev or null; } diff --git a/script/linux b/script/linux index 706fa63b037e290cd7991d3adfa42fac0c0cfe25..c7922355342a7776202f81abf9e471cf32854085 100755 --- a/script/linux +++ b/script/linux @@ -60,12 +60,21 @@ if [[ -n $apt ]]; then # Ubuntu 20.04 ships clang-10 and libstdc++-10 which lack adequate C++20 # support for building webrtc-sys (requires -std=c++20, lambdas in # unevaluated contexts from clang 17+, and working std::ranges in the - # stdlib). clang-18 is available in focal-security/universe as an official - # backport, and libstdc++-11-dev from the ubuntu-toolchain-r PPA provides - # headers with working pointer_traits/contiguous_range. + # stdlib). # Note: the prebuilt libwebrtc.a is compiled with libstdc++, so we must # use libstdc++ (not libc++) to avoid ABI mismatches at link time. - $maysudo add-apt-repository -y ppa:ubuntu-toolchain-r/test + + # libstdc++-11-dev (headers with working pointer_traits/contiguous_range) + # is only available from the ubuntu-toolchain-r PPA. Add the source list + # and GPG key manually instead of using add-apt-repository, whose HKP + # keyserver lookups (port 11371) frequently time out in CI. + $maysudo "$apt" install -y curl gnupg + codename=$(lsb_release -cs) + echo "deb https://ppa.launchpadcontent.net/ubuntu-toolchain-r/test/ubuntu $codename main" | \ + $maysudo tee /etc/apt/sources.list.d/ubuntu-toolchain-r-test.list > /dev/null + curl -fsSL 'https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x1E9377A2BA9EF27F' | \ + sed -n '/-----BEGIN PGP PUBLIC KEY BLOCK-----/,/-----END PGP PUBLIC KEY BLOCK-----/p' | \ + $maysudo gpg --dearmor -o /etc/apt/trusted.gpg.d/ubuntu-toolchain-r-test.gpg deps+=( clang-18 libstdc++-11-dev ) fi