Detailed changes
@@ -0,0 +1,81 @@
+# Assign Reviewers — Smart team assignment based on diff weight
+#
+# Triggers on PR open and ready_for_review events. Checks out the coordinator
+# repo (zed-industries/codeowner-coordinator) to access the assignment script and rules,
+# then assigns the 1-2 most relevant teams as reviewers.
+#
+# NOTE: This file is stored in the codeowner-coordinator repo but must be deployed to
+# the zed repo at .github/workflows/assign-reviewers.yml. See INSTALL.md.
+#
+# AUTH NOTE: Uses a GitHub App (COORDINATOR_APP_ID + COORDINATOR_APP_PRIVATE_KEY)
+# for all API operations: cloning the private coordinator repo, requesting team
+# reviewers, and setting PR assignees. GITHUB_TOKEN is not used.
+
+name: Assign Reviewers
+
+on:
+ pull_request:
+ types: [opened, ready_for_review]
+
+# GITHUB_TOKEN is not used — all operations use the GitHub App token.
+# Declare minimal permissions so the default token has no write access.
+permissions: {}
+
+# Only run for PRs from within the org (not forks) — fork PRs don't have
+# write access to request team reviewers.
+jobs:
+ assign-reviewers:
+ if: >-
+ github.event.pull_request.head.repo.full_name == github.repository &&
+ github.event.pull_request.draft == false &&
+ contains(fromJSON('["MEMBER", "OWNER"]'), github.event.pull_request.author_association)
+ runs-on: ubuntu-latest
+ steps:
+ - name: Generate app token
+ id: app-token
+ uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
+ with:
+ app-id: ${{ vars.COORDINATOR_APP_ID }}
+ private-key: ${{ secrets.COORDINATOR_APP_PRIVATE_KEY }}
+ repositories: codeowner-coordinator,zed
+
+ - name: Checkout coordinator repo
+ uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4.3.1
+ with:
+ repository: zed-industries/codeowner-coordinator
+ ref: main
+ path: codeowner-coordinator
+ token: ${{ steps.app-token.outputs.token }}
+ persist-credentials: false
+
+ - name: Setup Python
+ uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
+ with:
+ python-version: "3.11"
+
+ - name: Install dependencies
+ run: pip install pyyaml==6.0.3
+
+ - name: Assign reviewers
+ env:
+ GH_TOKEN: ${{ steps.app-token.outputs.token }}
+ PR_URL: ${{ github.event.pull_request.html_url }}
+ TARGET_REPO: ${{ github.repository }}
+ run: |
+ cd codeowner-coordinator
+ python .github/scripts/assign-reviewers.py \
+ --pr "$PR_URL" \
+ --apply \
+ --rules-file team-membership-rules.yml \
+ --repo "$TARGET_REPO" \
+ --org zed-industries \
+ --min-association member \
+ 2>&1 | tee /tmp/assign-reviewers-output.txt
+
+ - name: Upload output
+ if: always()
+ uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
+ with:
+ name: assign-reviewers-output
+ path: /tmp/assign-reviewers-output.txt
+ retention-days: 30
@@ -38,7 +38,6 @@ dependencies = [
"smol",
"task",
"telemetry",
- "tempfile",
"terminal",
"text",
"ui",
@@ -47,7 +46,6 @@ dependencies = [
"util",
"uuid",
"watch",
- "zlog",
]
[[package]]
@@ -81,7 +79,6 @@ dependencies = [
"fs",
"futures 0.3.31",
"gpui",
- "indoc",
"language",
"log",
"pretty_assertions",
@@ -110,7 +107,6 @@ dependencies = [
"language",
"project",
"proto",
- "release_channel",
"smallvec",
"ui",
"util",
@@ -216,11 +212,9 @@ dependencies = [
"task",
"telemetry",
"tempfile",
- "terminal",
"text",
"theme",
"thiserror 2.0.17",
- "tree-sitter-rust",
"ui",
"unindent",
"url",
@@ -228,7 +222,6 @@ dependencies = [
"uuid",
"watch",
"web_search",
- "worktree",
"zed_env_vars",
"zlog",
"zstd",
@@ -287,7 +280,6 @@ dependencies = [
"gpui_tokio",
"http_client",
"indoc",
- "language",
"language_model",
"libc",
"log",
@@ -321,7 +313,6 @@ dependencies = [
"gpui",
"language_model",
"log",
- "paths",
"project",
"regex",
"schemars",
@@ -354,7 +345,6 @@ dependencies = [
"buffer_diff",
"chrono",
"client",
- "clock",
"cloud_api_types",
"cloud_llm_client",
"collections",
@@ -400,9 +390,7 @@ dependencies = [
"prompt_store",
"proto",
"rand 0.9.2",
- "recent_projects",
"release_channel",
- "remote_connection",
"reqwest_client",
"rope",
"rules_library",
@@ -417,14 +405,12 @@ dependencies = [
"streaming_diff",
"task",
"telemetry",
- "tempfile",
"terminal",
"terminal_view",
"text",
"theme",
"time",
"time_format",
- "title_bar",
"tree-sitter-md",
"ui",
"ui_input",
@@ -673,17 +659,13 @@ dependencies = [
"anyhow",
"chrono",
"futures 0.3.31",
- "gpui",
- "gpui_tokio",
"http_client",
- "reqwest_client",
"schemars",
"serde",
"serde_json",
"settings",
"strum 0.27.2",
"thiserror 2.0.17",
- "tokio",
]
[[package]]
@@ -895,7 +877,6 @@ dependencies = [
"futures 0.3.31",
"fuzzy",
"gpui",
- "indoc",
"itertools 0.14.0",
"language",
"language_model",
@@ -2322,7 +2303,6 @@ dependencies = [
"pretty_assertions",
"rand 0.9.2",
"rope",
- "serde_json",
"settings",
"sum_tree",
"text",
@@ -2506,7 +2486,6 @@ dependencies = [
"futures 0.3.31",
"gpui",
"gpui_tokio",
- "http_client",
"language",
"livekit_client",
"log",
@@ -3101,8 +3080,6 @@ name = "cloud_llm_client"
version = "0.1.0"
dependencies = [
"anyhow",
- "indoc",
- "pretty_assertions",
"serde",
"serde_json",
"strum 0.27.2",
@@ -3227,6 +3204,7 @@ dependencies = [
"serde",
"serde_json",
"text",
+ "zeta_prompt",
]
[[package]]
@@ -3234,15 +3212,11 @@ name = "collab"
version = "0.44.0"
dependencies = [
"agent",
- "agent-client-protocol",
- "agent_settings",
- "agent_ui",
"anyhow",
"assistant_slash_command",
"assistant_text_thread",
"async-trait",
"async-tungstenite",
- "audio",
"aws-config",
"aws-sdk-kinesis",
"aws-sdk-s3",
@@ -3258,10 +3232,8 @@ dependencies = [
"collab_ui",
"collections",
"command_palette_hooks",
- "context_server",
"ctor",
"dap",
- "dap-types",
"dap_adapters",
"dashmap",
"debugger_ui",
@@ -3278,7 +3250,6 @@ dependencies = [
"gpui_tokio",
"hex",
"http_client",
- "hyper 0.14.32",
"indoc",
"language",
"language_model",
@@ -3320,7 +3291,6 @@ dependencies = [
"text",
"theme",
"time",
- "title_bar",
"tokio",
"toml 0.8.23",
"tower 0.4.13",
@@ -3351,12 +3321,10 @@ dependencies = [
"futures 0.3.31",
"fuzzy",
"gpui",
- "http_client",
"log",
"menu",
"notifications",
"picker",
- "pretty_assertions",
"project",
"release_channel",
"rpc",
@@ -3369,7 +3337,6 @@ dependencies = [
"time",
"time_format",
"title_bar",
- "tree-sitter-md",
"ui",
"util",
"workspace",
@@ -3423,10 +3390,8 @@ dependencies = [
"client",
"collections",
"command_palette_hooks",
- "ctor",
"db",
"editor",
- "env_logger 0.11.8",
"fuzzy",
"go_to_line",
"gpui",
@@ -3437,7 +3402,6 @@ dependencies = [
"postage",
"project",
"serde",
- "serde_json",
"settings",
"telemetry",
"theme",
@@ -3660,18 +3624,14 @@ version = "0.1.0"
dependencies = [
"anyhow",
"async-std",
- "client",
- "clock",
"collections",
"command_palette_hooks",
"copilot_chat",
- "ctor",
"edit_prediction_types",
"editor",
"fs",
"futures 0.3.31",
"gpui",
- "http_client",
"icons",
"indoc",
"language",
@@ -4509,8 +4469,6 @@ dependencies = [
"smol",
"task",
"telemetry",
- "tree-sitter",
- "tree-sitter-go",
"util",
"zlog",
]
@@ -4881,7 +4839,6 @@ dependencies = [
"serde_json",
"settings",
"smol",
- "theme",
"ui",
"util",
"workspace",
@@ -4893,7 +4850,6 @@ name = "diagnostics"
version = "0.1.0"
dependencies = [
"anyhow",
- "client",
"collections",
"component",
"ctor",
@@ -5286,7 +5242,6 @@ dependencies = [
"thiserror 2.0.17",
"time",
"toml 0.8.23",
- "tree-sitter-rust",
"ui",
"util",
"uuid",
@@ -5384,7 +5339,6 @@ dependencies = [
"tree-sitter",
"util",
"zeta_prompt",
- "zlog",
]
[[package]]
@@ -5405,7 +5359,6 @@ dependencies = [
"anyhow",
"buffer_diff",
"client",
- "clock",
"cloud_llm_client",
"codestral",
"collections",
@@ -5422,18 +5375,12 @@ dependencies = [
"gpui",
"indoc",
"language",
- "language_model",
- "lsp",
"markdown",
"menu",
"multi_buffer",
"paths",
- "pretty_assertions",
"project",
"regex",
- "release_channel",
- "semver",
- "serde_json",
"settings",
"telemetry",
"text",
@@ -5444,7 +5391,6 @@ dependencies = [
"workspace",
"zed_actions",
"zeta_prompt",
- "zlog",
]
[[package]]
@@ -5473,7 +5419,6 @@ dependencies = [
"fuzzy",
"git",
"gpui",
- "http_client",
"indoc",
"itertools 0.14.0",
"language",
@@ -5506,7 +5451,6 @@ dependencies = [
"sum_tree",
"task",
"telemetry",
- "tempfile",
"text",
"theme",
"time",
@@ -6084,7 +6028,9 @@ dependencies = [
"serde",
"serde_json",
"serde_json_lenient",
+ "settings_content",
"snippet_provider",
+ "task",
"theme",
"tokio",
"toml 0.8.23",
@@ -6121,7 +6067,6 @@ dependencies = [
"parking_lot",
"paths",
"project",
- "rand 0.9.2",
"release_channel",
"remote",
"reqwest_client",
@@ -6277,7 +6222,6 @@ dependencies = [
name = "feedback"
version = "0.1.0"
dependencies = [
- "editor",
"gpui",
"system_specs",
"urlencoding",
@@ -6308,7 +6252,6 @@ dependencies = [
"futures 0.3.31",
"fuzzy",
"gpui",
- "language",
"menu",
"open_path_prompt",
"picker",
@@ -7294,7 +7237,6 @@ dependencies = [
"text",
"thiserror 2.0.17",
"time",
- "unindent",
"url",
"urlencoding",
"util",
@@ -7331,7 +7273,6 @@ dependencies = [
"menu",
"project",
"rand 0.9.2",
- "recent_projects",
"serde_json",
"settings",
"smallvec",
@@ -7382,7 +7323,6 @@ dependencies = [
"futures 0.3.31",
"fuzzy",
"git",
- "git_hosting_providers",
"gpui",
"indoc",
"itertools 0.14.0",
@@ -7551,8 +7491,6 @@ dependencies = [
"settings",
"text",
"theme",
- "tree-sitter-rust",
- "tree-sitter-typescript",
"ui",
"util",
"workspace",
@@ -7683,7 +7621,6 @@ dependencies = [
"pin-project",
"pollster 0.4.0",
"postage",
- "pretty_assertions",
"profiling",
"proptest",
"rand 0.9.2",
@@ -9495,7 +9432,6 @@ dependencies = [
"copilot_ui",
"credentials_provider",
"deepseek",
- "editor",
"extension",
"extension_host",
"fs",
@@ -9515,7 +9451,6 @@ dependencies = [
"open_router",
"partial-json-fixer",
"pretty_assertions",
- "project",
"release_channel",
"schemars",
"semver",
@@ -9643,7 +9578,6 @@ dependencies = [
"snippet",
"task",
"terminal",
- "text",
"theme",
"toml 0.8.23",
"tree-sitter",
@@ -9667,7 +9601,6 @@ dependencies = [
"unindent",
"url",
"util",
- "workspace",
]
[[package]]
@@ -10021,7 +9954,6 @@ dependencies = [
"serde_json",
"serde_urlencoded",
"settings",
- "sha2",
"simplelog",
"smallvec",
"ui",
@@ -10766,7 +10698,6 @@ dependencies = [
"log",
"parking_lot",
"pretty_assertions",
- "project",
"rand 0.9.2",
"rope",
"serde",
@@ -11044,12 +10975,10 @@ dependencies = [
"anyhow",
"channel",
"client",
- "collections",
"component",
"db",
"gpui",
"rpc",
- "settings",
"sum_tree",
"time",
"ui",
@@ -11800,8 +11729,6 @@ dependencies = [
"settings",
"smol",
"theme",
- "tree-sitter-rust",
- "tree-sitter-typescript",
"ui",
"util",
"workspace",
@@ -13164,8 +13091,6 @@ dependencies = [
"collections",
"context_server",
"dap",
- "dap_adapters",
- "db",
"encoding_rs",
"extension",
"fancy-regex",
@@ -13274,7 +13199,6 @@ dependencies = [
"pretty_assertions",
"project",
"rayon",
- "remote_connection",
"schemars",
"search",
"serde",
@@ -13508,11 +13432,9 @@ name = "proto"
version = "0.1.0"
dependencies = [
"anyhow",
- "collections",
"prost 0.9.0",
"prost-build 0.9.0",
"serde",
- "typed-path",
]
[[package]]
@@ -14065,7 +13987,6 @@ dependencies = [
"anyhow",
"askpass",
"chrono",
- "dap",
"db",
"dev_container",
"editor",
@@ -14314,7 +14235,6 @@ dependencies = [
"collections",
"crash-handler",
"crashes",
- "dap",
"dap_adapters",
"debug_adapter_extension",
"editor",
@@ -14346,7 +14266,6 @@ dependencies = [
"paths",
"pretty_assertions",
"project",
- "prompt_store",
"proto",
"rayon",
"release_channel",
@@ -14370,7 +14289,6 @@ dependencies = [
"uuid",
"watch",
"windows 0.61.3",
- "workspace",
"worktree",
"zlog",
]
@@ -14404,7 +14322,6 @@ dependencies = [
"collections",
"command_palette_hooks",
"editor",
- "env_logger 0.11.8",
"feature_flags",
"file_icons",
"futures 0.3.31",
@@ -14532,7 +14449,6 @@ dependencies = [
"anyhow",
"bytes 1.11.1",
"futures 0.3.31",
- "gpui",
"gpui_util",
"http_client",
"http_client_tls",
@@ -14577,20 +14493,6 @@ dependencies = [
"bytemuck",
]
-[[package]]
-name = "rich_text"
-version = "0.1.0"
-dependencies = [
- "futures 0.3.31",
- "gpui",
- "language",
- "linkify",
- "pulldown-cmark 0.13.0",
- "theme",
- "ui",
- "util",
-]
-
[[package]]
name = "ring"
version = "0.17.14"
@@ -15420,7 +15322,6 @@ dependencies = [
"any_vec",
"anyhow",
"bitflags 2.10.0",
- "client",
"collections",
"editor",
"fs",
@@ -15772,11 +15673,9 @@ dependencies = [
name = "settings_profile_selector"
version = "0.1.0"
dependencies = [
- "client",
"editor",
"fuzzy",
"gpui",
- "language",
"menu",
"picker",
"project",
@@ -15795,9 +15694,7 @@ dependencies = [
"agent",
"agent_settings",
"anyhow",
- "assets",
"audio",
- "client",
"codestral",
"component",
"copilot",
@@ -15815,13 +15712,11 @@ dependencies = [
"language",
"log",
"menu",
- "node_runtime",
"paths",
"picker",
"platform_title_bar",
"pretty_assertions",
"project",
- "recent_projects",
"regex",
"release_channel",
"rodio",
@@ -15829,7 +15724,6 @@ dependencies = [
"search",
"serde",
"serde_json",
- "session",
"settings",
"shell_command_parser",
"strum 0.27.2",
@@ -15840,7 +15734,6 @@ dependencies = [
"util",
"workspace",
"zed_actions",
- "zlog",
]
[[package]]
@@ -15961,6 +15854,7 @@ dependencies = [
"language_model",
"menu",
"project",
+ "recent_projects",
"serde_json",
"settings",
"theme",
@@ -17233,13 +17127,11 @@ dependencies = [
name = "tab_switcher"
version = "0.1.0"
dependencies = [
- "anyhow",
"collections",
"ctor",
"editor",
"fuzzy",
"gpui",
- "language",
"menu",
"picker",
"project",
@@ -17429,7 +17321,6 @@ dependencies = [
"sandbox",
"schemars",
"serde",
- "serde_json",
"settings",
"smol",
"sysinfo 0.37.2",
@@ -17462,7 +17353,6 @@ dependencies = [
"assistant_slash_command",
"async-recursion",
"breadcrumbs",
- "client",
"collections",
"db",
"dirs 4.0.0",
@@ -17475,7 +17365,6 @@ dependencies = [
"menu",
"pretty_assertions",
"project",
- "rand 0.9.2",
"regex",
"schemars",
"serde",
@@ -17500,11 +17389,9 @@ dependencies = [
"collections",
"ctor",
"gpui",
- "http_client",
"log",
"parking_lot",
"postage",
- "proptest",
"rand 0.9.2",
"regex",
"rope",
@@ -17804,15 +17691,12 @@ dependencies = [
"chrono",
"client",
"cloud_api_types",
- "collections",
"db",
"feature_flags",
"git_ui",
"gpui",
- "http_client",
"notifications",
"platform_title_bar",
- "pretty_assertions",
"project",
"recent_projects",
"release_channel",
@@ -17826,7 +17710,6 @@ dependencies = [
"story",
"telemetry",
"theme",
- "tree-sitter-md",
"ui",
"util",
"windows 0.61.3",
@@ -18656,12 +18539,6 @@ dependencies = [
"utf-8",
]
-[[package]]
-name = "typed-path"
-version = "0.11.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c462d18470a2857aa657d338af5fa67170bb48bcc80a296710ce3b0802a32566"
-
[[package]]
name = "typeid"
version = "1.0.3"
@@ -18988,7 +18865,6 @@ dependencies = [
"git2",
"globset",
"gpui_util",
- "indoc",
"itertools 0.14.0",
"libc",
"log",
@@ -19133,7 +19009,6 @@ name = "vim"
version = "0.1.0"
dependencies = [
"anyhow",
- "assets",
"async-compat",
"async-trait",
"collections",
@@ -19173,7 +19048,6 @@ dependencies = [
"task",
"text",
"theme",
- "title_bar",
"tokio",
"ui",
"util",
@@ -19881,7 +19755,6 @@ dependencies = [
"futures 0.3.31",
"gpui",
"parking_lot",
- "rand 0.9.2",
"zlog",
]
@@ -21473,7 +21346,6 @@ dependencies = [
"clock",
"collections",
"component",
- "dap",
"db",
"feature_flags",
"fs",
@@ -21526,9 +21398,7 @@ dependencies = [
"futures 0.3.31",
"fuzzy",
"git",
- "git2",
"gpui",
- "http_client",
"ignore",
"language",
"log",
@@ -21962,7 +21832,6 @@ dependencies = [
"copilot_ui",
"crashes",
"csv_preview",
- "dap",
"dap_adapters",
"db",
"debug_adapter_extension",
@@ -22072,8 +21941,6 @@ dependencies = [
"title_bar",
"toolchain_selector",
"tracing",
- "tree-sitter-md",
- "tree-sitter-rust",
"ui",
"ui_prompt",
"url",
@@ -159,7 +159,6 @@ members = [
"crates/remote_server",
"crates/repl",
"crates/reqwest_client",
- "crates/rich_text",
"crates/rope",
"crates/rpc",
"crates/rules_library",
@@ -1 +1,7 @@
-<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="none"><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M2.857 6.857 4.286 5.43 2.857 4M2.857 12l1.429-1.429-1.429-1.428M6.857 4.571h6.286M6.857 8h6.286M6.857 11.428h6.286"/></svg>
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M2 4H7.33333" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M2 8H7.33333" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M2 12H7.33333" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M10 4L12 6L14 4" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M10 12L12 10L14 12" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+</svg>
@@ -258,6 +258,7 @@
"ctrl-shift-j": "agent::ToggleNavigationMenu",
"ctrl-alt-i": "agent::ToggleOptionsMenu",
"ctrl-alt-shift-n": "agent::ToggleNewThreadMenu",
+ "ctrl-alt-shift-t": "agent::ToggleStartThreadInSelector",
"shift-alt-escape": "agent::ExpandMessageEditor",
"ctrl->": "agent::AddSelectionToThread",
"ctrl-shift-e": "project_panel::ToggleFocus",
@@ -818,7 +819,7 @@
},
},
{
- "context": "!ContextEditor > Editor && mode == full",
+ "context": "!ContextEditor && !AcpThread > Editor && mode == full",
"bindings": {
"alt-enter": "editor::OpenExcerpts",
"shift-enter": "editor::ExpandExcerpts",
@@ -982,6 +983,7 @@
"ctrl-shift-enter": "git::Amend",
"ctrl-space": "git::StageAll",
"ctrl-shift-space": "git::UnstageAll",
+ "ctrl-k ctrl-r": "git::RestoreAndNext",
},
},
{
@@ -297,6 +297,7 @@
"cmd-shift-j": "agent::ToggleNavigationMenu",
"cmd-alt-m": "agent::ToggleOptionsMenu",
"cmd-alt-shift-n": "agent::ToggleNewThreadMenu",
+ "cmd-alt-shift-t": "agent::ToggleStartThreadInSelector",
"shift-alt-escape": "agent::ExpandMessageEditor",
"cmd->": "agent::AddSelectionToThread",
"cmd-shift-e": "project_panel::ToggleFocus",
@@ -881,7 +882,7 @@
},
},
{
- "context": "!ContextEditor > Editor && mode == full",
+ "context": "!ContextEditor && !AcpThread > Editor && mode == full",
"use_key_equivalents": true,
"bindings": {
"alt-enter": "editor::OpenExcerpts",
@@ -1033,6 +1034,7 @@
"cmd-shift-enter": "git::Amend",
"cmd-ctrl-y": "git::StageAll",
"cmd-ctrl-shift-y": "git::UnstageAll",
+ "cmd-alt-z": "git::RestoreAndNext",
},
},
{
@@ -259,6 +259,7 @@
"shift-alt-j": "agent::ToggleNavigationMenu",
"shift-alt-i": "agent::ToggleOptionsMenu",
"ctrl-shift-alt-n": "agent::ToggleNewThreadMenu",
+ "ctrl-shift-alt-t": "agent::ToggleStartThreadInSelector",
"shift-alt-escape": "agent::ExpandMessageEditor",
"ctrl-shift-.": "agent::AddSelectionToThread",
"ctrl-shift-e": "project_panel::ToggleFocus",
@@ -820,7 +821,7 @@
},
},
{
- "context": "!ContextEditor > Editor && mode == full",
+ "context": "!ContextEditor && !AcpThread > Editor && mode == full",
"use_key_equivalents": true,
"bindings": {
"alt-enter": "editor::OpenExcerpts",
@@ -983,6 +984,7 @@
"ctrl-shift-enter": "git::Amend",
"ctrl-space": "git::StageAll",
"ctrl-shift-space": "git::UnstageAll",
+ "ctrl-k ctrl-r": "git::RestoreAndNext",
},
},
{
@@ -61,7 +61,5 @@ indoc.workspace = true
parking_lot.workspace = true
project = { workspace = true, "features" = ["test-support"] }
rand.workspace = true
-tempfile.workspace = true
util.workspace = true
settings.workspace = true
-zlog.workspace = true
@@ -37,7 +37,7 @@ collections = { workspace = true, features = ["test-support"] }
clock = { workspace = true, features = ["test-support"] }
ctor.workspace = true
gpui = { workspace = true, features = ["test-support"] }
-indoc.workspace = true
+
language = { workspace = true, features = ["test-support"] }
log.workspace = true
pretty_assertions.workspace = true
@@ -30,4 +30,4 @@ workspace.workspace = true
[dev-dependencies]
editor = { workspace = true, features = ["test-support"] }
-release_channel.workspace = true
+
@@ -100,9 +100,9 @@ rand.workspace = true
reqwest_client.workspace = true
settings = { workspace = true, "features" = ["test-support"] }
tempfile.workspace = true
-terminal = { workspace = true, "features" = ["test-support"] }
+
theme = { workspace = true, "features" = ["test-support"] }
-tree-sitter-rust.workspace = true
+
unindent = { workspace = true }
-worktree = { workspace = true, "features" = ["test-support"] }
+
zlog.workspace = true
@@ -2,6 +2,7 @@ use crate::{DbThread, DbThreadMetadata, ThreadsDatabase};
use agent_client_protocol as acp;
use anyhow::{Result, anyhow};
use gpui::{App, Context, Entity, Global, Task, prelude::*};
+use std::collections::HashMap;
use util::path_list::PathList;
struct GlobalThreadStore(Entity<ThreadStore>);
@@ -10,6 +11,7 @@ impl Global for GlobalThreadStore {}
pub struct ThreadStore {
threads: Vec<DbThreadMetadata>,
+ threads_by_paths: HashMap<PathList, Vec<usize>>,
}
impl ThreadStore {
@@ -29,6 +31,7 @@ impl ThreadStore {
pub fn new(cx: &mut Context<Self>) -> Self {
let this = Self {
threads: Vec::new(),
+ threads_by_paths: HashMap::default(),
};
this.reload(cx);
this
@@ -91,14 +94,21 @@ impl ThreadStore {
let database_connection = ThreadsDatabase::connect(cx);
cx.spawn(async move |this, cx| {
let database = database_connection.await.map_err(|err| anyhow!(err))?;
- let threads = database
- .list_threads()
- .await?
- .into_iter()
- .filter(|thread| thread.parent_session_id.is_none())
- .collect::<Vec<_>>();
+ let all_threads = database.list_threads().await?;
this.update(cx, |this, cx| {
- this.threads = threads;
+ this.threads.clear();
+ this.threads_by_paths.clear();
+ for thread in all_threads {
+ if thread.parent_session_id.is_some() {
+ continue;
+ }
+ let index = this.threads.len();
+ this.threads_by_paths
+ .entry(thread.folder_paths.clone())
+ .or_default()
+ .push(index);
+ this.threads.push(thread);
+ }
cx.notify();
})
})
@@ -114,10 +124,12 @@ impl ThreadStore {
}
/// Returns threads whose folder_paths match the given paths exactly.
+ /// Uses a cached index for O(1) lookup per path list.
pub fn threads_for_paths(&self, paths: &PathList) -> impl Iterator<Item = &DbThreadMetadata> {
- self.threads
- .iter()
- .filter(move |thread| &thread.folder_paths == paths)
+ self.threads_by_paths
+ .get(paths)
+ .into_iter()
+ .flat_map(|indices| indices.iter().map(|&index| &self.threads[index]))
}
}
@@ -61,7 +61,7 @@ nix.workspace = true
client = { workspace = true, features = ["test-support"] }
env_logger.workspace = true
fs.workspace = true
-language.workspace = true
+
indoc.workspace = true
acp_thread = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
@@ -84,19 +84,12 @@ impl AgentServer for CustomAgentServer {
let config_id = config_id.to_string();
let value_id = value_id.to_string();
- update_settings_file(fs, cx, move |settings, _| {
+ update_settings_file(fs, cx, move |settings, cx| {
let settings = settings
.agent_servers
.get_or_insert_default()
.entry(name.to_string())
- .or_insert_with(|| settings::CustomAgentServerSettings::Extension {
- default_model: None,
- default_mode: None,
- env: Default::default(),
- favorite_models: Vec::new(),
- default_config_options: Default::default(),
- favorite_config_option_values: Default::default(),
- });
+ .or_insert_with(|| default_settings_for_agent(&name, cx));
match settings {
settings::CustomAgentServerSettings::Custom {
@@ -132,19 +125,12 @@ impl AgentServer for CustomAgentServer {
fn set_default_mode(&self, mode_id: Option<acp::SessionModeId>, fs: Arc<dyn Fs>, cx: &mut App) {
let name = self.name();
- update_settings_file(fs, cx, move |settings, _| {
+ update_settings_file(fs, cx, move |settings, cx| {
let settings = settings
.agent_servers
.get_or_insert_default()
.entry(name.to_string())
- .or_insert_with(|| settings::CustomAgentServerSettings::Extension {
- default_model: None,
- default_mode: None,
- env: Default::default(),
- favorite_models: Vec::new(),
- default_config_options: Default::default(),
- favorite_config_option_values: Default::default(),
- });
+ .or_insert_with(|| default_settings_for_agent(&name, cx));
match settings {
settings::CustomAgentServerSettings::Custom { default_mode, .. }
@@ -171,19 +157,12 @@ impl AgentServer for CustomAgentServer {
fn set_default_model(&self, model_id: Option<acp::ModelId>, fs: Arc<dyn Fs>, cx: &mut App) {
let name = self.name();
- update_settings_file(fs, cx, move |settings, _| {
+ update_settings_file(fs, cx, move |settings, cx| {
let settings = settings
.agent_servers
.get_or_insert_default()
.entry(name.to_string())
- .or_insert_with(|| settings::CustomAgentServerSettings::Extension {
- default_model: None,
- default_mode: None,
- env: Default::default(),
- favorite_models: Vec::new(),
- default_config_options: Default::default(),
- favorite_config_option_values: Default::default(),
- });
+ .or_insert_with(|| default_settings_for_agent(&name, cx));
match settings {
settings::CustomAgentServerSettings::Custom { default_model, .. }
@@ -222,19 +201,12 @@ impl AgentServer for CustomAgentServer {
cx: &App,
) {
let name = self.name();
- update_settings_file(fs, cx, move |settings, _| {
+ update_settings_file(fs, cx, move |settings, cx| {
let settings = settings
.agent_servers
.get_or_insert_default()
.entry(name.to_string())
- .or_insert_with(|| settings::CustomAgentServerSettings::Extension {
- default_model: None,
- default_mode: None,
- env: Default::default(),
- favorite_models: Vec::new(),
- default_config_options: Default::default(),
- favorite_config_option_values: Default::default(),
- });
+ .or_insert_with(|| default_settings_for_agent(&name, cx));
let favorite_models = match settings {
settings::CustomAgentServerSettings::Custom {
@@ -282,19 +254,12 @@ impl AgentServer for CustomAgentServer {
let name = self.name();
let config_id = config_id.to_string();
let value_id = value_id.map(|s| s.to_string());
- update_settings_file(fs, cx, move |settings, _| {
+ update_settings_file(fs, cx, move |settings, cx| {
let settings = settings
.agent_servers
.get_or_insert_default()
.entry(name.to_string())
- .or_insert_with(|| settings::CustomAgentServerSettings::Extension {
- default_model: None,
- default_mode: None,
- env: Default::default(),
- favorite_models: Vec::new(),
- default_config_options: Default::default(),
- favorite_config_option_values: Default::default(),
- });
+ .or_insert_with(|| default_settings_for_agent(&name, cx));
match settings {
settings::CustomAgentServerSettings::Custom {
@@ -332,45 +297,27 @@ impl AgentServer for CustomAgentServer {
.unwrap_or_else(|| name.clone());
let default_mode = self.default_mode(cx);
let default_model = self.default_model(cx);
- let is_previous_built_in =
- matches!(name.as_ref(), CLAUDE_AGENT_NAME | CODEX_NAME | GEMINI_NAME);
- let (default_config_options, is_registry_agent) =
- cx.read_global(|settings: &SettingsStore, _| {
- let agent_settings = settings
- .get::<AllAgentServersSettings>(None)
- .get(self.name().as_ref());
-
- let is_registry = agent_settings
- .map(|s| {
- matches!(
- s,
- project::agent_server_store::CustomAgentServerSettings::Registry { .. }
- )
- })
- .unwrap_or(false);
-
- let config_options = agent_settings
- .map(|s| match s {
- project::agent_server_store::CustomAgentServerSettings::Custom {
- default_config_options,
- ..
- }
- | project::agent_server_store::CustomAgentServerSettings::Extension {
- default_config_options,
- ..
- }
- | project::agent_server_store::CustomAgentServerSettings::Registry {
- default_config_options,
- ..
- } => default_config_options.clone(),
- })
- .unwrap_or_default();
-
- (config_options, is_registry)
- });
-
- // Intermediate step to allow for previous built-ins to also be triggered if they aren't in settings yet.
- let is_registry_agent = is_registry_agent || is_previous_built_in;
+ let is_registry_agent = is_registry_agent(&name, cx);
+ let default_config_options = cx.read_global(|settings: &SettingsStore, _| {
+ settings
+ .get::<AllAgentServersSettings>(None)
+ .get(self.name().as_ref())
+ .map(|s| match s {
+ project::agent_server_store::CustomAgentServerSettings::Custom {
+ default_config_options,
+ ..
+ }
+ | project::agent_server_store::CustomAgentServerSettings::Extension {
+ default_config_options,
+ ..
+ }
+ | project::agent_server_store::CustomAgentServerSettings::Registry {
+ default_config_options,
+ ..
+ } => default_config_options.clone(),
+ })
+ .unwrap_or_default()
+ });
if is_registry_agent {
if let Some(registry_store) = project::AgentRegistryStore::try_global(cx) {
@@ -458,3 +405,222 @@ fn api_key_for_gemini_cli(cx: &mut App) -> Task<Result<String>> {
)
})
}
+
+fn is_registry_agent(name: &str, cx: &App) -> bool {
+ let is_previous_built_in = matches!(name, CLAUDE_AGENT_NAME | CODEX_NAME | GEMINI_NAME);
+ let is_in_registry = project::AgentRegistryStore::try_global(cx)
+ .map(|store| store.read(cx).agent(name).is_some())
+ .unwrap_or(false);
+ let is_settings_registry = cx.read_global(|settings: &SettingsStore, _| {
+ settings
+ .get::<AllAgentServersSettings>(None)
+ .get(name)
+ .is_some_and(|s| {
+ matches!(
+ s,
+ project::agent_server_store::CustomAgentServerSettings::Registry { .. }
+ )
+ })
+ });
+ is_previous_built_in || is_in_registry || is_settings_registry
+}
+
+fn default_settings_for_agent(name: &str, cx: &App) -> settings::CustomAgentServerSettings {
+ if is_registry_agent(name, cx) {
+ settings::CustomAgentServerSettings::Registry {
+ default_model: None,
+ default_mode: None,
+ env: Default::default(),
+ favorite_models: Vec::new(),
+ default_config_options: Default::default(),
+ favorite_config_option_values: Default::default(),
+ }
+ } else {
+ settings::CustomAgentServerSettings::Extension {
+ default_model: None,
+ default_mode: None,
+ env: Default::default(),
+ favorite_models: Vec::new(),
+ default_config_options: Default::default(),
+ favorite_config_option_values: Default::default(),
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use collections::HashMap;
+ use gpui::TestAppContext;
+ use project::agent_registry_store::{
+ AgentRegistryStore, RegistryAgent, RegistryAgentMetadata, RegistryNpxAgent,
+ };
+ use settings::Settings as _;
+
+ fn init_test(cx: &mut TestAppContext) {
+ cx.update(|cx| {
+ let settings_store = SettingsStore::test(cx);
+ cx.set_global(settings_store);
+ });
+ }
+
+ fn init_registry_with_agents(cx: &mut TestAppContext, agent_ids: &[&str]) {
+ let agents: Vec<RegistryAgent> = agent_ids
+ .iter()
+ .map(|id| {
+ let id = SharedString::from(id.to_string());
+ RegistryAgent::Npx(RegistryNpxAgent {
+ metadata: RegistryAgentMetadata {
+ id: id.clone(),
+ name: id.clone(),
+ description: SharedString::from(""),
+ version: SharedString::from("1.0.0"),
+ repository: None,
+ icon_path: None,
+ },
+ package: id,
+ args: Vec::new(),
+ env: HashMap::default(),
+ })
+ })
+ .collect();
+ cx.update(|cx| {
+ AgentRegistryStore::init_test_global(cx, agents);
+ });
+ }
+
+ fn set_agent_server_settings(
+ cx: &mut TestAppContext,
+ entries: Vec<(&str, settings::CustomAgentServerSettings)>,
+ ) {
+ cx.update(|cx| {
+ AllAgentServersSettings::override_global(
+ project::agent_server_store::AllAgentServersSettings(
+ entries
+ .into_iter()
+ .map(|(name, settings)| (name.to_string(), settings.into()))
+ .collect(),
+ ),
+ cx,
+ );
+ });
+ }
+
+ #[gpui::test]
+ fn test_previous_builtins_are_registry(cx: &mut TestAppContext) {
+ init_test(cx);
+ cx.update(|cx| {
+ assert!(is_registry_agent(CLAUDE_AGENT_NAME, cx));
+ assert!(is_registry_agent(CODEX_NAME, cx));
+ assert!(is_registry_agent(GEMINI_NAME, cx));
+ });
+ }
+
+ #[gpui::test]
+ fn test_unknown_agent_is_not_registry(cx: &mut TestAppContext) {
+ init_test(cx);
+ cx.update(|cx| {
+ assert!(!is_registry_agent("my-custom-agent", cx));
+ });
+ }
+
+ #[gpui::test]
+ fn test_agent_in_registry_store_is_registry(cx: &mut TestAppContext) {
+ init_test(cx);
+ init_registry_with_agents(cx, &["some-new-registry-agent"]);
+ cx.update(|cx| {
+ assert!(is_registry_agent("some-new-registry-agent", cx));
+ assert!(!is_registry_agent("not-in-registry", cx));
+ });
+ }
+
+ #[gpui::test]
+ fn test_agent_with_registry_settings_type_is_registry(cx: &mut TestAppContext) {
+ init_test(cx);
+ set_agent_server_settings(
+ cx,
+ vec![(
+ "agent-from-settings",
+ settings::CustomAgentServerSettings::Registry {
+ env: HashMap::default(),
+ default_mode: None,
+ default_model: None,
+ favorite_models: Vec::new(),
+ default_config_options: HashMap::default(),
+ favorite_config_option_values: HashMap::default(),
+ },
+ )],
+ );
+ cx.update(|cx| {
+ assert!(is_registry_agent("agent-from-settings", cx));
+ });
+ }
+
+ #[gpui::test]
+ fn test_agent_with_extension_settings_type_is_not_registry(cx: &mut TestAppContext) {
+ init_test(cx);
+ set_agent_server_settings(
+ cx,
+ vec![(
+ "my-extension-agent",
+ settings::CustomAgentServerSettings::Extension {
+ env: HashMap::default(),
+ default_mode: None,
+ default_model: None,
+ favorite_models: Vec::new(),
+ default_config_options: HashMap::default(),
+ favorite_config_option_values: HashMap::default(),
+ },
+ )],
+ );
+ cx.update(|cx| {
+ assert!(!is_registry_agent("my-extension-agent", cx));
+ });
+ }
+
+ #[gpui::test]
+ fn test_default_settings_for_builtin_agent(cx: &mut TestAppContext) {
+ init_test(cx);
+ cx.update(|cx| {
+ assert!(matches!(
+ default_settings_for_agent(CODEX_NAME, cx),
+ settings::CustomAgentServerSettings::Registry { .. }
+ ));
+ assert!(matches!(
+ default_settings_for_agent(CLAUDE_AGENT_NAME, cx),
+ settings::CustomAgentServerSettings::Registry { .. }
+ ));
+ assert!(matches!(
+ default_settings_for_agent(GEMINI_NAME, cx),
+ settings::CustomAgentServerSettings::Registry { .. }
+ ));
+ });
+ }
+
+ #[gpui::test]
+ fn test_default_settings_for_extension_agent(cx: &mut TestAppContext) {
+ init_test(cx);
+ cx.update(|cx| {
+ assert!(matches!(
+ default_settings_for_agent("some-extension-agent", cx),
+ settings::CustomAgentServerSettings::Extension { .. }
+ ));
+ });
+ }
+
+ #[gpui::test]
+ fn test_default_settings_for_agent_in_registry(cx: &mut TestAppContext) {
+ init_test(cx);
+ init_registry_with_agents(cx, &["new-registry-agent"]);
+ cx.update(|cx| {
+ assert!(matches!(
+ default_settings_for_agent("new-registry-agent", cx),
+ settings::CustomAgentServerSettings::Registry { .. }
+ ));
+ assert!(matches!(
+ default_settings_for_agent("not-in-registry", cx),
+ settings::CustomAgentServerSettings::Extension { .. }
+ ));
+ });
+ }
+}
@@ -30,7 +30,7 @@ util.workspace = true
[dev-dependencies]
fs.workspace = true
gpui = { workspace = true, features = ["test-support"] }
-paths.workspace = true
+
serde_json_lenient.workspace = true
serde_json.workspace = true
settings = { workspace = true, features = ["test-support"] }
@@ -121,7 +121,7 @@ acp_thread = { workspace = true, features = ["test-support"] }
agent = { workspace = true, features = ["test-support"] }
assistant_text_thread = { workspace = true, features = ["test-support"] }
buffer_diff = { workspace = true, features = ["test-support"] }
-clock.workspace = true
+
db = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] }
eval_utils.workspace = true
@@ -132,11 +132,9 @@ languages = { workspace = true, features = ["test-support"] }
language_model = { workspace = true, "features" = ["test-support"] }
pretty_assertions.workspace = true
project = { workspace = true, features = ["test-support"] }
-recent_projects = { workspace = true, features = ["test-support"] }
-remote_connection = { workspace = true, features = ["test-support"] }
-title_bar = { workspace = true, features = ["test-support"] }
+
semver.workspace = true
reqwest_client.workspace = true
-tempfile.workspace = true
+
tree-sitter-md.workspace = true
unindent.workspace = true
@@ -831,6 +831,7 @@ fn render_diff_hunk_controls(
&snapshot,
position,
Direction::Next,
+ true,
window,
cx,
);
@@ -866,6 +867,7 @@ fn render_diff_hunk_controls(
&snapshot,
point,
Direction::Prev,
+ true,
window,
cx,
);
@@ -31,7 +31,7 @@ use crate::{
AddContextServer, AgentDiffPane, ConnectionView, CopyThreadToClipboard, Follow,
InlineAssistant, LoadThreadFromClipboard, NewTextThread, NewThread, OpenActiveThreadAsMarkdown,
OpenAgentDiff, OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell, StartThreadIn,
- ToggleNavigationMenu, ToggleNewThreadMenu, ToggleOptionsMenu,
+ ToggleNavigationMenu, ToggleNewThreadMenu, ToggleOptionsMenu, ToggleStartThreadInSelector,
agent_configuration::{AgentConfiguration, AssistantConfigurationEvent},
connection_view::{AcpThreadViewEvent, ThreadView},
slash_command::SlashCommandCompletionProvider,
@@ -255,6 +255,18 @@ pub fn init(cx: &mut App) {
});
}
})
+ .register_action(|workspace, _: &ToggleStartThreadInSelector, window, cx| {
+ if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
+ workspace.focus_panel::<AgentPanel>(window, cx);
+ panel.update(cx, |panel, cx| {
+ panel.toggle_start_thread_in_selector(
+ &ToggleStartThreadInSelector,
+ window,
+ cx,
+ );
+ });
+ }
+ })
.register_action(|workspace, _: &OpenAcpOnboardingModal, window, cx| {
AcpOnboardingModal::toggle(workspace, window, cx)
})
@@ -388,7 +400,7 @@ enum WhichFontSize {
}
// TODO unify this with ExternalAgent
-#[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)]
+#[derive(Debug, Default, Clone, PartialEq, Serialize)]
pub enum AgentType {
#[default]
NativeAgent,
@@ -398,6 +410,63 @@ pub enum AgentType {
},
}
+// Custom impl handles legacy variant names from before the built-in agents were moved to
+// the registry: "ClaudeAgent" -> Custom { name: "claude-acp" }, "Codex" -> Custom { name:
+// "codex-acp" }, "Gemini" -> Custom { name: "gemini" }.
+// Can be removed at some point in the future and go back to #[derive(Deserialize)].
+impl<'de> Deserialize<'de> for AgentType {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ let value = serde_json::Value::deserialize(deserializer)?;
+
+ if let Some(s) = value.as_str() {
+ return match s {
+ "NativeAgent" => Ok(Self::NativeAgent),
+ "TextThread" => Ok(Self::TextThread),
+ "ClaudeAgent" | "ClaudeCode" => Ok(Self::Custom {
+ name: CLAUDE_AGENT_NAME.into(),
+ }),
+ "Codex" => Ok(Self::Custom {
+ name: CODEX_NAME.into(),
+ }),
+ "Gemini" => Ok(Self::Custom {
+ name: GEMINI_NAME.into(),
+ }),
+ other => Err(serde::de::Error::unknown_variant(
+ other,
+ &[
+ "NativeAgent",
+ "TextThread",
+ "Custom",
+ "ClaudeAgent",
+ "ClaudeCode",
+ "Codex",
+ "Gemini",
+ ],
+ )),
+ };
+ }
+
+ if let Some(obj) = value.as_object() {
+ if let Some(inner) = obj.get("Custom") {
+ #[derive(Deserialize)]
+ struct CustomFields {
+ name: SharedString,
+ }
+ let fields: CustomFields =
+ serde_json::from_value(inner.clone()).map_err(serde::de::Error::custom)?;
+ return Ok(Self::Custom { name: fields.name });
+ }
+ }
+
+ Err(serde::de::Error::custom(
+ "expected a string variant or {\"Custom\": {\"name\": ...}}",
+ ))
+ }
+}
+
impl AgentType {
pub fn is_native(&self) -> bool {
matches!(self, Self::NativeAgent)
@@ -1347,6 +1416,15 @@ impl AgentPanel {
self.new_thread_menu_handle.toggle(window, cx);
}
+ pub fn toggle_start_thread_in_selector(
+ &mut self,
+ _: &ToggleStartThreadInSelector,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.start_thread_in_menu_handle.toggle(window, cx);
+ }
+
pub fn increase_font_size(
&mut self,
action: &IncreaseBufferFontSize,
@@ -3179,6 +3257,7 @@ impl AgentPanel {
}
fn render_start_thread_in_selector(&self, cx: &mut Context<Self>) -> impl IntoElement {
+ let focus_handle = self.focus_handle(cx);
let has_git_repo = self.project_has_git_repository(cx);
let is_via_collab = self.project.read(cx).is_via_collab();
@@ -3213,7 +3292,16 @@ impl AgentPanel {
};
PopoverMenu::new("thread-target-selector")
- .trigger(trigger_button)
+ .trigger_with_tooltip(trigger_button, {
+ move |_window, cx| {
+ Tooltip::for_action_in(
+ "Start Thread In…",
+ &ToggleStartThreadInSelector,
+ &focus_handle,
+ cx,
+ )
+ }
+ })
.menu(move |window, cx| {
let is_local_selected = current_target == StartThreadIn::LocalProject;
let is_new_worktree_selected = current_target == StartThreadIn::NewWorktree;
@@ -3694,7 +3782,16 @@ impl AgentPanel {
);
let agent_selector_menu = PopoverMenu::new("new_thread_menu")
- .trigger(agent_selector_button)
+ .trigger_with_tooltip(agent_selector_button, {
+ move |_window, cx| {
+ Tooltip::for_action_in(
+ "New Thread\u{2026}",
+ &ToggleNewThreadMenu,
+ &focus_handle,
+ cx,
+ )
+ }
+ })
.menu({
let builder = new_thread_menu_builder.clone();
move |window, cx| builder(window, cx)
@@ -4269,6 +4366,7 @@ impl Render for AgentPanel {
.on_action(cx.listener(Self::go_back))
.on_action(cx.listener(Self::toggle_navigation_menu))
.on_action(cx.listener(Self::toggle_options_menu))
+ .on_action(cx.listener(Self::toggle_start_thread_in_selector))
.on_action(cx.listener(Self::increase_font_size))
.on_action(cx.listener(Self::decrease_font_size))
.on_action(cx.listener(Self::reset_font_size))
@@ -5269,4 +5367,77 @@ mod tests {
);
});
}
+
+ #[test]
+ fn test_deserialize_legacy_agent_type_variants() {
+ assert_eq!(
+ serde_json::from_str::<AgentType>(r#""ClaudeAgent""#).unwrap(),
+ AgentType::Custom {
+ name: CLAUDE_AGENT_NAME.into(),
+ },
+ );
+ assert_eq!(
+ serde_json::from_str::<AgentType>(r#""ClaudeCode""#).unwrap(),
+ AgentType::Custom {
+ name: CLAUDE_AGENT_NAME.into(),
+ },
+ );
+ assert_eq!(
+ serde_json::from_str::<AgentType>(r#""Codex""#).unwrap(),
+ AgentType::Custom {
+ name: CODEX_NAME.into(),
+ },
+ );
+ assert_eq!(
+ serde_json::from_str::<AgentType>(r#""Gemini""#).unwrap(),
+ AgentType::Custom {
+ name: GEMINI_NAME.into(),
+ },
+ );
+ }
+
+ #[test]
+ fn test_deserialize_current_agent_type_variants() {
+ assert_eq!(
+ serde_json::from_str::<AgentType>(r#""NativeAgent""#).unwrap(),
+ AgentType::NativeAgent,
+ );
+ assert_eq!(
+ serde_json::from_str::<AgentType>(r#""TextThread""#).unwrap(),
+ AgentType::TextThread,
+ );
+ assert_eq!(
+ serde_json::from_str::<AgentType>(r#"{"Custom":{"name":"my-agent"}}"#).unwrap(),
+ AgentType::Custom {
+ name: "my-agent".into(),
+ },
+ );
+ }
+
+ #[test]
+ fn test_deserialize_legacy_serialized_panel() {
+ let json = serde_json::json!({
+ "width": 300.0,
+ "selected_agent": "ClaudeAgent",
+ "last_active_thread": {
+ "session_id": "test-session",
+ "agent_type": "Codex",
+ },
+ });
+
+ let panel: SerializedAgentPanel = serde_json::from_value(json).unwrap();
+ assert_eq!(
+ panel.selected_agent,
+ Some(AgentType::Custom {
+ name: CLAUDE_AGENT_NAME.into(),
+ }),
+ );
+ let thread = panel.last_active_thread.unwrap();
+ assert_eq!(
+ thread.agent_type,
+ AgentType::Custom {
+ name: CODEX_NAME.into(),
+ },
+ );
+ }
}
@@ -82,6 +82,8 @@ actions!(
NewTextThread,
/// Toggles the menu to create new agent threads.
ToggleNewThreadMenu,
+ /// Toggles the selector for choosing where new threads start (current project or new worktree).
+ ToggleStartThreadInSelector,
/// Toggles the navigation menu for switching between threads and views.
ToggleNavigationMenu,
/// Toggles the options menu for agent settings and preferences.
@@ -210,13 +212,70 @@ pub struct NewNativeAgentThreadFromSummary {
}
// TODO unify this with AgentType
-#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, JsonSchema)]
+#[derive(Debug, Clone, PartialEq, Serialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum ExternalAgent {
NativeAgent,
Custom { name: SharedString },
}
+// Custom impl handles legacy variant names from before the built-in agents were moved to
+// the registry: "claude_code" -> Custom { name: "claude-acp" }, "codex" -> Custom { name:
+// "codex-acp" }, "gemini" -> Custom { name: "gemini" }.
+// Can be removed at some point in the future and go back to #[derive(Deserialize)].
+impl<'de> serde::Deserialize<'de> for ExternalAgent {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ use project::agent_server_store::{CLAUDE_AGENT_NAME, CODEX_NAME, GEMINI_NAME};
+
+ let value = serde_json::Value::deserialize(deserializer)?;
+
+ if let Some(s) = value.as_str() {
+ return match s {
+ "native_agent" => Ok(Self::NativeAgent),
+ "claude_code" | "claude_agent" => Ok(Self::Custom {
+ name: CLAUDE_AGENT_NAME.into(),
+ }),
+ "codex" => Ok(Self::Custom {
+ name: CODEX_NAME.into(),
+ }),
+ "gemini" => Ok(Self::Custom {
+ name: GEMINI_NAME.into(),
+ }),
+ other => Err(serde::de::Error::unknown_variant(
+ other,
+ &[
+ "native_agent",
+ "custom",
+ "claude_agent",
+ "claude_code",
+ "codex",
+ "gemini",
+ ],
+ )),
+ };
+ }
+
+ if let Some(obj) = value.as_object() {
+ if let Some(inner) = obj.get("custom") {
+ #[derive(serde::Deserialize)]
+ struct CustomFields {
+ name: SharedString,
+ }
+ let fields: CustomFields =
+ serde_json::from_value(inner.clone()).map_err(serde::de::Error::custom)?;
+ return Ok(Self::Custom { name: fields.name });
+ }
+ }
+
+ Err(serde::de::Error::custom(
+ "expected a string variant or {\"custom\": {\"name\": ...}}",
+ ))
+ }
+}
+
impl ExternalAgent {
pub fn server(
&self,
@@ -683,4 +742,42 @@ mod tests {
);
});
}
+
+ #[test]
+ fn test_deserialize_legacy_external_agent_variants() {
+ use project::agent_server_store::{CLAUDE_AGENT_NAME, CODEX_NAME, GEMINI_NAME};
+
+ assert_eq!(
+ serde_json::from_str::<ExternalAgent>(r#""claude_code""#).unwrap(),
+ ExternalAgent::Custom {
+ name: CLAUDE_AGENT_NAME.into(),
+ },
+ );
+ assert_eq!(
+ serde_json::from_str::<ExternalAgent>(r#""codex""#).unwrap(),
+ ExternalAgent::Custom {
+ name: CODEX_NAME.into(),
+ },
+ );
+ assert_eq!(
+ serde_json::from_str::<ExternalAgent>(r#""gemini""#).unwrap(),
+ ExternalAgent::Custom {
+ name: GEMINI_NAME.into(),
+ },
+ );
+ }
+
+ #[test]
+ fn test_deserialize_current_external_agent_variants() {
+ assert_eq!(
+ serde_json::from_str::<ExternalAgent>(r#""native_agent""#).unwrap(),
+ ExternalAgent::NativeAgent,
+ );
+ assert_eq!(
+ serde_json::from_str::<ExternalAgent>(r#"{"custom":{"name":"my-agent"}}"#).unwrap(),
+ ExternalAgent::Custom {
+ name: "my-agent".into(),
+ },
+ );
+ }
}
@@ -27,8 +27,4 @@ settings.workspace = true
strum.workspace = true
thiserror.workspace = true
-[dev-dependencies]
-reqwest_client.workspace = true
-gpui_tokio.workspace = true
-gpui.workspace = true
-tokio = { workspace = true, features = ["macros", "rt-multi-thread"] }
+
@@ -55,7 +55,7 @@ zed_env_vars.workspace = true
[dev-dependencies]
assistant_slash_commands.workspace = true
-indoc.workspace = true
+
language_model = { workspace = true, features = ["test-support"] }
pretty_assertions.workspace = true
rand.workspace = true
@@ -384,17 +384,29 @@ pub fn open_input_stream(
Ok(stream)
}
-pub fn open_output_stream(device_id: Option<DeviceId>) -> anyhow::Result<MixerDeviceSink> {
- let output_handle = if let Some(id) = device_id {
- if let Some(device) = default_host().device_by_id(&id) {
- DeviceSinkBuilder::from_device(device)?.open_stream()
- } else {
- DeviceSinkBuilder::open_default_sink()
+pub fn resolve_device(device_id: Option<&DeviceId>, input: bool) -> anyhow::Result<cpal::Device> {
+ if let Some(id) = device_id {
+ if let Some(device) = default_host().device_by_id(id) {
+ return Ok(device);
}
+ log::warn!("Selected audio device not found, falling back to default");
+ }
+ if input {
+ default_host()
+ .default_input_device()
+ .context("no audio input device available")
} else {
- DeviceSinkBuilder::open_default_sink()
- };
- let mut output_handle = output_handle.context("Could not open output stream")?;
+ default_host()
+ .default_output_device()
+ .context("no audio output device available")
+ }
+}
+
+pub fn open_output_stream(device_id: Option<DeviceId>) -> anyhow::Result<MixerDeviceSink> {
+ let device = resolve_device(device_id.as_ref(), false)?;
+ let mut output_handle = DeviceSinkBuilder::from_device(device)?
+ .open_stream()
+ .context("Could not open output stream")?;
output_handle.log_on_drop(false);
log::info!("Output stream: {:?}", output_handle);
Ok(output_handle)
@@ -42,12 +42,8 @@ pub struct AudioSettings {
///
/// You need to rejoin a call for this setting to apply
pub legacy_audio_compatible: bool,
- /// Requires 'rodio_audio: true'
- ///
/// Select specific output audio device.
pub output_audio_device: Option<DeviceId>,
- /// Requires 'rodio_audio: true'
- ///
/// Select specific input audio device.
pub input_audio_device: Option<DeviceId>,
}
@@ -212,18 +212,10 @@ pub fn init(client: Arc<Client>, cx: &mut App) {
}
pub fn check(_: &Check, window: &mut Window, cx: &mut App) {
- if let Some(message) = option_env!("ZED_UPDATE_EXPLANATION") {
- drop(window.prompt(
- gpui::PromptLevel::Info,
- "Zed was installed via a package manager.",
- Some(message),
- &["Ok"],
- cx,
- ));
- return;
- }
-
- if let Ok(message) = env::var("ZED_UPDATE_EXPLANATION") {
+ if let Some(message) = option_env!("ZED_UPDATE_EXPLANATION")
+ .map(ToOwned::to_owned)
+ .or_else(|| env::var("ZED_UPDATE_EXPLANATION").ok())
+ {
drop(window.prompt(
gpui::PromptLevel::Info,
"Zed was installed via a package manager.",
@@ -388,6 +380,10 @@ impl AutoUpdater {
pub fn poll(&mut self, check_type: UpdateCheckType, cx: &mut Context<Self>) {
if self.pending_poll.is_some() {
+ if self.update_check_type == UpdateCheckType::Automatic {
+ self.update_check_type = check_type;
+ cx.notify();
+ }
return;
}
self.update_check_type = check_type;
@@ -557,7 +553,7 @@ impl AutoUpdater {
asset,
metrics_id: metrics_id.as_deref(),
system_id: system_id.as_deref(),
- is_staff: is_staff,
+ is_staff,
},
)?;
@@ -34,7 +34,7 @@ ztracing.workspace = true
ctor.workspace = true
gpui = { workspace = true, features = ["test-support"] }
rand.workspace = true
-serde_json.workspace = true
+
settings.workspace = true
text = { workspace = true, features = ["test-support"] }
unindent.workspace = true
@@ -51,5 +51,5 @@ gpui = { workspace = true, features = ["test-support"] }
language = { workspace = true, features = ["test-support"] }
project = { workspace = true, features = ["test-support"] }
util = { workspace = true, features = ["test-support"] }
-http_client = { workspace = true, features = ["test-support"] }
+
livekit_client = { workspace = true, features = ["test-support"] }
@@ -22,6 +22,4 @@ strum = { workspace = true, features = ["derive"] }
uuid = { workspace = true, features = ["serde"] }
zeta_prompt.workspace = true
-[dev-dependencies]
-pretty_assertions.workspace = true
-indoc.workspace = true
+
@@ -22,5 +22,6 @@ log.workspace = true
serde.workspace = true
serde_json.workspace = true
text.workspace = true
+zeta_prompt.workspace = true
[dev-dependencies]
@@ -8,7 +8,7 @@ use gpui::{App, AppContext as _, Context, Entity, Global, SharedString, Task};
use http_client::HttpClient;
use icons::IconName;
use language::{
- Anchor, Buffer, BufferSnapshot, EditPreview, ToPoint, language_settings::all_language_settings,
+ Anchor, Buffer, BufferSnapshot, EditPreview, language_settings::all_language_settings,
};
use language_model::{ApiKeyState, AuthenticateError, EnvVar, env_var};
use serde::{Deserialize, Serialize};
@@ -18,7 +18,7 @@ use std::{
sync::Arc,
time::{Duration, Instant},
};
-use text::{OffsetRangeExt as _, ToOffset};
+use text::ToOffset;
pub const CODESTRAL_API_URL: &str = "https://codestral.mistral.ai";
pub const DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(150);
@@ -259,28 +259,31 @@ impl EditPredictionDelegate for CodestralEditPredictionDelegate {
}
let cursor_offset = cursor_position.to_offset(&snapshot);
- let cursor_point = cursor_offset.to_point(&snapshot);
+ const MAX_EDITABLE_TOKENS: usize = 350;
const MAX_CONTEXT_TOKENS: usize = 150;
- const MAX_REWRITE_TOKENS: usize = 350;
-
- let (_, context_range) =
- cursor_excerpt::editable_and_context_ranges_for_cursor_position(
- cursor_point,
- &snapshot,
- MAX_REWRITE_TOKENS,
- MAX_CONTEXT_TOKENS,
- );
-
- let context_range = context_range.to_offset(&snapshot);
- let excerpt_text = snapshot
- .text_for_range(context_range.clone())
- .collect::<String>();
- let cursor_within_excerpt = cursor_offset
+
+ let (excerpt_point_range, excerpt_offset_range, cursor_offset_in_excerpt) =
+ cursor_excerpt::compute_cursor_excerpt(&snapshot, cursor_offset);
+ let syntax_ranges = cursor_excerpt::compute_syntax_ranges(
+ &snapshot,
+ cursor_offset,
+ &excerpt_offset_range,
+ );
+ let excerpt_text: String = snapshot.text_for_range(excerpt_point_range).collect();
+ let (_, context_range) = zeta_prompt::compute_editable_and_context_ranges(
+ &excerpt_text,
+ cursor_offset_in_excerpt,
+ &syntax_ranges,
+ MAX_EDITABLE_TOKENS,
+ MAX_CONTEXT_TOKENS,
+ );
+ let context_text = &excerpt_text[context_range.clone()];
+ let cursor_within_excerpt = cursor_offset_in_excerpt
.saturating_sub(context_range.start)
- .min(excerpt_text.len());
- let prompt = excerpt_text[..cursor_within_excerpt].to_string();
- let suffix = excerpt_text[cursor_within_excerpt..].to_string();
+ .min(context_text.len());
+ let prompt = context_text[..cursor_within_excerpt].to_string();
+ let suffix = context_text[cursor_within_excerpt..].to_string();
let completion_text = match Self::fetch_completion(
http_client,
@@ -75,13 +75,13 @@ uuid.workspace = true
[dev-dependencies]
agent = { workspace = true, features = ["test-support"] }
-agent-client-protocol.workspace = true
-agent_settings.workspace = true
-agent_ui = { workspace = true, features = ["test-support"] }
+
+
+
assistant_text_thread.workspace = true
assistant_slash_command.workspace = true
async-trait.workspace = true
-audio.workspace = true
+
buffer_diff.workspace = true
call = { workspace = true, features = ["test-support"] }
channel.workspace = true
@@ -90,11 +90,11 @@ collab = { workspace = true, features = ["test-support"] }
collab_ui = { workspace = true, features = ["test-support"] }
collections = { workspace = true, features = ["test-support"] }
command_palette_hooks.workspace = true
-context_server.workspace = true
+
ctor.workspace = true
dap = { workspace = true, features = ["test-support"] }
dap_adapters = { workspace = true, features = ["test-support"] }
-dap-types.workspace = true
+
debugger_ui = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] }
extension.workspace = true
@@ -105,7 +105,7 @@ git_hosting_providers.workspace = true
git_ui = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
gpui_tokio.workspace = true
-hyper.workspace = true
+
indoc.workspace = true
language = { workspace = true, features = ["test-support"] }
language_model = { workspace = true, features = ["test-support"] }
@@ -131,7 +131,7 @@ smol.workspace = true
sqlx = { version = "0.8", features = ["sqlite"] }
task.workspace = true
theme.workspace = true
-title_bar = { workspace = true, features = ["test-support"] }
+
unindent.workspace = true
util.workspace = true
workspace = { workspace = true, features = ["test-support"] }
@@ -24,7 +24,7 @@ test-support = [
"settings/test-support",
"util/test-support",
"workspace/test-support",
- "http_client/test-support",
+
"title_bar/test-support",
]
@@ -67,11 +67,11 @@ collections = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
notifications = { workspace = true, features = ["test-support"] }
-pretty_assertions.workspace = true
+
project = { workspace = true, features = ["test-support"] }
rpc = { workspace = true, features = ["test-support"] }
settings = { workspace = true, features = ["test-support"] }
-tree-sitter-md.workspace = true
+
util = { workspace = true, features = ["test-support"] }
-http_client = { workspace = true, features = ["test-support"] }
+
workspace = { workspace = true, features = ["test-support"] }
@@ -38,14 +38,14 @@ workspace.workspace = true
zed_actions.workspace = true
[dev-dependencies]
-ctor.workspace = true
+
db = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] }
-env_logger.workspace = true
+
go_to_line.workspace = true
gpui = { workspace = true, features = ["test-support"] }
language = { workspace = true, features = ["test-support"] }
menu.workspace = true
project = { workspace = true, features = ["test-support"] }
-serde_json.workspace = true
+
workspace = { workspace = true, features = ["test-support"] }
@@ -52,14 +52,10 @@ workspace.workspace = true
async-std = { version = "1.12.0", features = ["unstable"] }
[dev-dependencies]
-client = { workspace = true, features = ["test-support"] }
-clock = { workspace = true, features = ["test-support"] }
collections = { workspace = true, features = ["test-support"] }
-ctor.workspace = true
editor = { workspace = true, features = ["test-support"] }
fs = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
-http_client = { workspace = true, features = ["test-support"] }
indoc.workspace = true
language = { workspace = true, features = ["test-support"] }
lsp = { workspace = true, features = ["test-support"] }
@@ -58,7 +58,6 @@ async-pipe.workspace = true
gpui = { workspace = true, features = ["test-support"] }
settings = { workspace = true, features = ["test-support"] }
task = { workspace = true, features = ["test-support"] }
-tree-sitter.workspace = true
-tree-sitter-go.workspace = true
+
util = { workspace = true, features = ["test-support"] }
zlog.workspace = true
@@ -29,7 +29,7 @@ gpui = { workspace = true, features = ["test-support"] }
project = { workspace = true, features = ["test-support"] }
serde_json.workspace = true
settings = { workspace = true, features = ["test-support"] }
-theme.workspace = true
+
workspace = { workspace = true, features = ["test-support"] }
worktree = { workspace = true, features = ["test-support"] }
@@ -38,7 +38,7 @@ workspace.workspace = true
zed_actions.workspace = true
[dev-dependencies]
-client = { workspace = true, features = ["test-support"] }
+
editor = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
language = { workspace = true, features = ["test-support"] }
@@ -297,7 +297,7 @@ impl DiagnosticBlock {
return;
};
- for (excerpt_id, range) in multibuffer.excerpts_for_buffer(buffer_id, cx) {
+ for (excerpt_id, _, range) in multibuffer.excerpts_for_buffer(buffer_id, cx) {
if range.context.overlaps(&diagnostic.range, &snapshot) {
Self::jump_to(
editor,
@@ -583,7 +583,7 @@ impl ProjectDiagnosticsEditor {
RetainExcerpts::All | RetainExcerpts::Dirty => multi_buffer
.excerpts_for_buffer(buffer_id, cx)
.into_iter()
- .map(|(_, range)| range)
+ .map(|(_, _, range)| range)
.sorted_by(|a, b| cmp_excerpts(&buffer_snapshot, a, b))
.collect(),
}
@@ -82,5 +82,5 @@ parking_lot.workspace = true
project = { workspace = true, features = ["test-support"] }
settings = { workspace = true, features = ["test-support"] }
workspace = { workspace = true, features = ["test-support"] }
-tree-sitter-rust.workspace = true
+
zlog.workspace = true
@@ -1,12 +1,9 @@
-use crate::{
- StoredEvent, cursor_excerpt::editable_and_context_ranges_for_cursor_position,
- example_spec::ExampleSpec,
-};
+use crate::{StoredEvent, example_spec::ExampleSpec};
use anyhow::Result;
use buffer_diff::BufferDiffSnapshot;
use collections::HashMap;
use gpui::{App, Entity, Task};
-use language::{Buffer, ToPoint as _};
+use language::Buffer;
use project::{Project, WorktreeId};
use std::{collections::hash_map, fmt::Write as _, ops::Range, path::Path, sync::Arc};
use text::{BufferSnapshot as TextBufferSnapshot, Point};
@@ -157,17 +154,34 @@ fn compute_cursor_excerpt(
cursor_anchor: language::Anchor,
) -> (String, usize, Range<Point>) {
use text::ToOffset as _;
+ use text::ToPoint as _;
- let cursor_point = cursor_anchor.to_point(snapshot);
- let (_editable_range, context_range) =
- editable_and_context_ranges_for_cursor_position(cursor_point, snapshot, 100, 50);
- let context_start_offset = context_range.start.to_offset(snapshot);
let cursor_offset = cursor_anchor.to_offset(snapshot);
- let cursor_offset_in_excerpt = cursor_offset.saturating_sub(context_start_offset);
- let excerpt = snapshot
- .text_for_range(context_range.clone())
- .collect::<String>();
- (excerpt, cursor_offset_in_excerpt, context_range)
+ let (excerpt_point_range, excerpt_offset_range, cursor_offset_in_excerpt) =
+ crate::cursor_excerpt::compute_cursor_excerpt(snapshot, cursor_offset);
+ let syntax_ranges = crate::cursor_excerpt::compute_syntax_ranges(
+ snapshot,
+ cursor_offset,
+ &excerpt_offset_range,
+ );
+ let excerpt_text: String = snapshot.text_for_range(excerpt_point_range).collect();
+ let (_, context_range) = zeta_prompt::compute_editable_and_context_ranges(
+ &excerpt_text,
+ cursor_offset_in_excerpt,
+ &syntax_ranges,
+ 100,
+ 50,
+ );
+ let context_text = excerpt_text[context_range.clone()].to_string();
+ let cursor_in_context = cursor_offset_in_excerpt.saturating_sub(context_range.start);
+ let context_buffer_start =
+ (excerpt_offset_range.start + context_range.start).to_point(snapshot);
+ let context_buffer_end = (excerpt_offset_range.start + context_range.end).to_point(snapshot);
+ (
+ context_text,
+ cursor_in_context,
+ context_buffer_start..context_buffer_end,
+ )
}
async fn collect_snapshots(
@@ -1,107 +1,140 @@
-use language::{BufferSnapshot, Point};
+use language::{BufferSnapshot, Point, ToPoint as _};
use std::ops::Range;
use text::OffsetRangeExt as _;
-use zeta_prompt::ExcerptRanges;
-/// Computes all range variants for a cursor position: editable ranges at 150, 180, and 350
-/// token budgets, plus their corresponding context expansions. Returns the full excerpt range
-/// (union of all context ranges) and the individual sub-ranges as Points.
-pub fn compute_excerpt_ranges(
- position: Point,
+const CURSOR_EXCERPT_TOKEN_BUDGET: usize = 8192;
+
+/// Computes a cursor excerpt as the largest linewise symmetric region around
+/// the cursor that fits within an 8192-token budget. Returns the point range,
+/// byte offset range, and the cursor offset relative to the excerpt start.
+pub fn compute_cursor_excerpt(
snapshot: &BufferSnapshot,
-) -> (Range<Point>, Range<usize>, ExcerptRanges) {
- let editable_150 = compute_editable_range(snapshot, position, 150);
- let editable_180 = compute_editable_range(snapshot, position, 180);
- let editable_350 = compute_editable_range(snapshot, position, 350);
- let editable_512 = compute_editable_range(snapshot, position, 512);
-
- let editable_150_context_350 =
- expand_context_syntactically_then_linewise(snapshot, editable_150.clone(), 350);
- let editable_180_context_350 =
- expand_context_syntactically_then_linewise(snapshot, editable_180.clone(), 350);
- let editable_350_context_150 =
- expand_context_syntactically_then_linewise(snapshot, editable_350.clone(), 150);
- let editable_350_context_512 =
- expand_context_syntactically_then_linewise(snapshot, editable_350.clone(), 512);
- let editable_350_context_1024 =
- expand_context_syntactically_then_linewise(snapshot, editable_350.clone(), 1024);
- let context_4096 = expand_context_syntactically_then_linewise(
- snapshot,
- editable_350_context_1024.clone(),
- 4096 - 1024,
- );
- let context_8192 =
- expand_context_syntactically_then_linewise(snapshot, context_4096.clone(), 8192 - 4096);
-
- let full_start_row = context_8192.start.row;
- let full_end_row = context_8192.end.row;
-
- let full_context =
- Point::new(full_start_row, 0)..Point::new(full_end_row, snapshot.line_len(full_end_row));
-
- let full_context_offset_range = full_context.to_offset(snapshot);
-
- let to_offset = |range: &Range<Point>| -> Range<usize> {
- let start = range.start.to_offset(snapshot);
- let end = range.end.to_offset(snapshot);
- (start - full_context_offset_range.start)..(end - full_context_offset_range.start)
- };
-
- let ranges = ExcerptRanges {
- editable_150: to_offset(&editable_150),
- editable_180: to_offset(&editable_180),
- editable_350: to_offset(&editable_350),
- editable_512: Some(to_offset(&editable_512)),
- editable_150_context_350: to_offset(&editable_150_context_350),
- editable_180_context_350: to_offset(&editable_180_context_350),
- editable_350_context_150: to_offset(&editable_350_context_150),
- editable_350_context_512: Some(to_offset(&editable_350_context_512)),
- editable_350_context_1024: Some(to_offset(&editable_350_context_1024)),
- context_4096: Some(to_offset(&context_4096)),
- context_8192: Some(to_offset(&context_8192)),
- };
-
- (full_context, full_context_offset_range, ranges)
+ cursor_offset: usize,
+) -> (Range<Point>, Range<usize>, usize) {
+ let cursor_point = cursor_offset.to_point(snapshot);
+ let cursor_row = cursor_point.row;
+ let (start_row, end_row, _) =
+ expand_symmetric_from_cursor(snapshot, cursor_row, CURSOR_EXCERPT_TOKEN_BUDGET);
+
+ let excerpt_range = Point::new(start_row, 0)..Point::new(end_row, snapshot.line_len(end_row));
+ let excerpt_offset_range = excerpt_range.to_offset(snapshot);
+ let cursor_offset_in_excerpt = cursor_offset - excerpt_offset_range.start;
+
+ (
+ excerpt_range,
+ excerpt_offset_range,
+ cursor_offset_in_excerpt,
+ )
}
-pub fn editable_and_context_ranges_for_cursor_position(
- position: Point,
+/// Expands symmetrically from cursor, one line at a time, alternating down then up.
+/// Returns (start_row, end_row, remaining_tokens).
+fn expand_symmetric_from_cursor(
snapshot: &BufferSnapshot,
- editable_region_token_limit: usize,
- context_token_limit: usize,
-) -> (Range<Point>, Range<Point>) {
- let editable_range = compute_editable_range(snapshot, position, editable_region_token_limit);
+ cursor_row: u32,
+ mut token_budget: usize,
+) -> (u32, u32, usize) {
+ let mut start_row = cursor_row;
+ let mut end_row = cursor_row;
+
+ let cursor_line_tokens = line_token_count(snapshot, cursor_row);
+ token_budget = token_budget.saturating_sub(cursor_line_tokens);
+
+ loop {
+ let can_expand_up = start_row > 0;
+ let can_expand_down = end_row < snapshot.max_point().row;
+
+ if token_budget == 0 || (!can_expand_up && !can_expand_down) {
+ break;
+ }
- let context_range = expand_context_syntactically_then_linewise(
- snapshot,
- editable_range.clone(),
- context_token_limit,
- );
+ if can_expand_down {
+ let next_row = end_row + 1;
+ let line_tokens = line_token_count(snapshot, next_row);
+ if line_tokens <= token_budget {
+ end_row = next_row;
+ token_budget = token_budget.saturating_sub(line_tokens);
+ } else {
+ break;
+ }
+ }
- (editable_range, context_range)
+ if can_expand_up && token_budget > 0 {
+ let next_row = start_row - 1;
+ let line_tokens = line_token_count(snapshot, next_row);
+ if line_tokens <= token_budget {
+ start_row = next_row;
+ token_budget = token_budget.saturating_sub(line_tokens);
+ } else {
+ break;
+ }
+ }
+ }
+
+ (start_row, end_row, token_budget)
+}
+
+/// Typical number of string bytes per token for the purposes of limiting model input. This is
+/// intentionally low to err on the side of underestimating limits.
+pub(crate) const BYTES_PER_TOKEN_GUESS: usize = 3;
+
+pub fn guess_token_count(bytes: usize) -> usize {
+ bytes / BYTES_PER_TOKEN_GUESS
}
-/// Computes the editable range using a three-phase approach:
-/// 1. Expand symmetrically from cursor (75% of budget)
-/// 2. Expand to syntax boundaries
-/// 3. Continue line-wise in the least-expanded direction
-fn compute_editable_range(
+fn line_token_count(snapshot: &BufferSnapshot, row: u32) -> usize {
+ guess_token_count(snapshot.line_len(row) as usize).max(1)
+}
+
+/// Computes the byte offset ranges of all syntax nodes containing the cursor,
+/// ordered from innermost to outermost. The offsets are relative to
+/// `excerpt_offset_range.start`.
+pub fn compute_syntax_ranges(
snapshot: &BufferSnapshot,
- cursor: Point,
- token_limit: usize,
-) -> Range<Point> {
- // Phase 1: Expand symmetrically from cursor using 75% of budget.
- let initial_budget = (token_limit * 3) / 4;
- let (mut start_row, mut end_row, mut remaining_tokens) =
- expand_symmetric_from_cursor(snapshot, cursor.row, initial_budget);
+ cursor_offset: usize,
+ excerpt_offset_range: &Range<usize>,
+) -> Vec<Range<usize>> {
+ let cursor_point = cursor_offset.to_point(snapshot);
+ let range = cursor_point..cursor_point;
+ let mut current = snapshot.syntax_ancestor(range);
+ let mut ranges = Vec::new();
+ let mut last_range: Option<(usize, usize)> = None;
- // Add remaining budget from phase 1.
- remaining_tokens += token_limit.saturating_sub(initial_budget);
+ while let Some(node) = current.take() {
+ let node_start = node.start_byte();
+ let node_end = node.end_byte();
+ let key = (node_start, node_end);
- let original_start = start_row;
- let original_end = end_row;
+ current = node.parent();
- // Phase 2: Expand to syntax boundaries that fit within budget.
+ if last_range == Some(key) {
+ continue;
+ }
+ last_range = Some(key);
+
+ let start = node_start.saturating_sub(excerpt_offset_range.start);
+ let end = node_end
+ .min(excerpt_offset_range.end)
+ .saturating_sub(excerpt_offset_range.start);
+ ranges.push(start..end);
+ }
+
+ ranges
+}
+
+/// Expands context by first trying to reach syntax boundaries,
+/// then expanding line-wise only if no syntax expansion occurred.
+pub fn expand_context_syntactically_then_linewise(
+ snapshot: &BufferSnapshot,
+ editable_range: Range<Point>,
+ context_token_limit: usize,
+) -> Range<Point> {
+ let mut start_row = editable_range.start.row;
+ let mut end_row = editable_range.end.row;
+ let mut remaining_tokens = context_token_limit;
+ let mut did_syntax_expand = false;
+
+ // Phase 1: Try to expand to containing syntax boundaries, picking the largest that fits.
for (boundary_start, boundary_end) in containing_syntax_boundaries(snapshot, start_row, end_row)
{
let tokens_for_start = if boundary_start < start_row {
@@ -125,76 +158,57 @@ fn compute_editable_range(
end_row = boundary_end;
}
remaining_tokens = remaining_tokens.saturating_sub(total_needed);
+ did_syntax_expand = true;
} else {
break;
}
}
- // Phase 3: Continue line-wise in the direction we expanded least during syntax phase.
- let expanded_up = original_start.saturating_sub(start_row);
- let expanded_down = end_row.saturating_sub(original_end);
-
- (start_row, end_row, _) = expand_linewise_biased(
- snapshot,
- start_row,
- end_row,
- remaining_tokens,
- expanded_up <= expanded_down, // prefer_up if we expanded less upward
- );
+ // Phase 2: Only expand line-wise if no syntax expansion occurred.
+ if !did_syntax_expand {
+ (start_row, end_row, _) =
+ expand_linewise_biased(snapshot, start_row, end_row, remaining_tokens, true);
+ }
let start = Point::new(start_row, 0);
let end = Point::new(end_row, snapshot.line_len(end_row));
start..end
}
-/// Expands symmetrically from cursor, one line at a time, alternating down then up.
-/// Returns (start_row, end_row, remaining_tokens).
-fn expand_symmetric_from_cursor(
+/// Returns an iterator of (start_row, end_row) for successively larger syntax nodes
+/// containing the given row range. Smallest containing node first.
+fn containing_syntax_boundaries(
snapshot: &BufferSnapshot,
- cursor_row: u32,
- mut token_budget: usize,
-) -> (u32, u32, usize) {
- let mut start_row = cursor_row;
- let mut end_row = cursor_row;
-
- // Account for the cursor's line.
- let cursor_line_tokens = line_token_count(snapshot, cursor_row);
- token_budget = token_budget.saturating_sub(cursor_line_tokens);
+ start_row: u32,
+ end_row: u32,
+) -> impl Iterator<Item = (u32, u32)> {
+ let range = Point::new(start_row, 0)..Point::new(end_row, snapshot.line_len(end_row));
+ let mut current = snapshot.syntax_ancestor(range);
+ let mut last_rows: Option<(u32, u32)> = None;
- loop {
- let can_expand_up = start_row > 0;
- let can_expand_down = end_row < snapshot.max_point().row;
+ std::iter::from_fn(move || {
+ while let Some(node) = current.take() {
+ let node_start_row = node.start_position().row as u32;
+ let node_end_row = node.end_position().row as u32;
+ let rows = (node_start_row, node_end_row);
- if token_budget == 0 || (!can_expand_up && !can_expand_down) {
- break;
- }
+ current = node.parent();
- // Expand down first (slight forward bias for edit prediction).
- if can_expand_down {
- let next_row = end_row + 1;
- let line_tokens = line_token_count(snapshot, next_row);
- if line_tokens <= token_budget {
- end_row = next_row;
- token_budget = token_budget.saturating_sub(line_tokens);
- } else {
- break;
+ // Skip nodes that don't extend beyond our range.
+ if node_start_row >= start_row && node_end_row <= end_row {
+ continue;
}
- }
- // Then expand up.
- if can_expand_up && token_budget > 0 {
- let next_row = start_row - 1;
- let line_tokens = line_token_count(snapshot, next_row);
- if line_tokens <= token_budget {
- start_row = next_row;
- token_budget = token_budget.saturating_sub(line_tokens);
- } else {
- break;
+ // Skip if same as last returned (some nodes have same span).
+ if last_rows == Some(rows) {
+ continue;
}
- }
- }
- (start_row, end_row, token_budget)
+ last_rows = Some(rows);
+ return Some(rows);
+ }
+ None
+ })
}
/// Expands line-wise with a bias toward one direction.
@@ -265,18 +279,6 @@ fn expand_linewise_biased(
(start_row, end_row, remaining_tokens)
}
-/// Typical number of string bytes per token for the purposes of limiting model input. This is
-/// intentionally low to err on the side of underestimating limits.
-pub(crate) const BYTES_PER_TOKEN_GUESS: usize = 3;
-
-pub fn guess_token_count(bytes: usize) -> usize {
- bytes / BYTES_PER_TOKEN_GUESS
-}
-
-fn line_token_count(snapshot: &BufferSnapshot, row: u32) -> usize {
- guess_token_count(snapshot.line_len(row) as usize).max(1)
-}
-
/// Estimates token count for rows in range [start_row, end_row).
fn estimate_tokens_for_rows(snapshot: &BufferSnapshot, start_row: u32, end_row: u32) -> usize {
let mut tokens = 0;
@@ -286,104 +288,14 @@ fn estimate_tokens_for_rows(snapshot: &BufferSnapshot, start_row: u32, end_row:
tokens
}
-/// Returns an iterator of (start_row, end_row) for successively larger syntax nodes
-/// containing the given row range. Smallest containing node first.
-fn containing_syntax_boundaries(
- snapshot: &BufferSnapshot,
- start_row: u32,
- end_row: u32,
-) -> impl Iterator<Item = (u32, u32)> {
- let range = Point::new(start_row, 0)..Point::new(end_row, snapshot.line_len(end_row));
- let mut current = snapshot.syntax_ancestor(range);
- let mut last_rows: Option<(u32, u32)> = None;
-
- std::iter::from_fn(move || {
- while let Some(node) = current.take() {
- let node_start_row = node.start_position().row as u32;
- let node_end_row = node.end_position().row as u32;
- let rows = (node_start_row, node_end_row);
-
- current = node.parent();
-
- // Skip nodes that don't extend beyond our range.
- if node_start_row >= start_row && node_end_row <= end_row {
- continue;
- }
-
- // Skip if same as last returned (some nodes have same span).
- if last_rows == Some(rows) {
- continue;
- }
-
- last_rows = Some(rows);
- return Some(rows);
- }
- None
- })
-}
-
-/// Expands context by first trying to reach syntax boundaries,
-/// then expanding line-wise only if no syntax expansion occurred.
-fn expand_context_syntactically_then_linewise(
- snapshot: &BufferSnapshot,
- editable_range: Range<Point>,
- context_token_limit: usize,
-) -> Range<Point> {
- let mut start_row = editable_range.start.row;
- let mut end_row = editable_range.end.row;
- let mut remaining_tokens = context_token_limit;
- let mut did_syntax_expand = false;
-
- // Phase 1: Try to expand to containing syntax boundaries, picking the largest that fits.
- for (boundary_start, boundary_end) in containing_syntax_boundaries(snapshot, start_row, end_row)
- {
- let tokens_for_start = if boundary_start < start_row {
- estimate_tokens_for_rows(snapshot, boundary_start, start_row)
- } else {
- 0
- };
- let tokens_for_end = if boundary_end > end_row {
- estimate_tokens_for_rows(snapshot, end_row + 1, boundary_end + 1)
- } else {
- 0
- };
-
- let total_needed = tokens_for_start + tokens_for_end;
-
- if total_needed <= remaining_tokens {
- if boundary_start < start_row {
- start_row = boundary_start;
- }
- if boundary_end > end_row {
- end_row = boundary_end;
- }
- remaining_tokens = remaining_tokens.saturating_sub(total_needed);
- did_syntax_expand = true;
- } else {
- break;
- }
- }
-
- // Phase 2: Only expand line-wise if no syntax expansion occurred.
- if !did_syntax_expand {
- (start_row, end_row, _) =
- expand_linewise_biased(snapshot, start_row, end_row, remaining_tokens, true);
- }
-
- let start = Point::new(start_row, 0);
- let end = Point::new(end_row, snapshot.line_len(end_row));
- start..end
-}
-
-use language::ToOffset as _;
-
#[cfg(test)]
mod tests {
use super::*;
- use gpui::{App, AppContext};
+ use gpui::{App, AppContext as _};
use indoc::indoc;
use language::{Buffer, rust_lang};
use util::test::{TextRangeMarker, marked_text_ranges_by};
+ use zeta_prompt::compute_editable_and_context_ranges;
struct TestCase {
name: &'static str,
@@ -400,7 +312,18 @@ mod tests {
// [ ] = expected context range
let test_cases = vec![
TestCase {
- name: "cursor near end of function - expands to syntax boundaries",
+ name: "small function fits entirely in editable and context",
+ marked_text: indoc! {r#"
+ [«fn foo() {
+ let x = 1;ˇ
+ let y = 2;
+ }»]
+ "#},
+ editable_token_limit: 30,
+ context_token_limit: 60,
+ },
+ TestCase {
+ name: "cursor near end of function - editable expands to syntax boundaries",
marked_text: indoc! {r#"
[fn first() {
let a = 1;
@@ -413,12 +336,11 @@ mod tests {
println!("{}", x + y);ˇ
}»]
"#},
- // 18 tokens - expands symmetrically then to syntax boundaries
editable_token_limit: 18,
context_token_limit: 35,
},
TestCase {
- name: "cursor at function start - expands to syntax boundaries",
+ name: "cursor at function start - editable expands to syntax boundaries",
marked_text: indoc! {r#"
[fn before() {
« let a = 1;
@@ -434,12 +356,11 @@ mod tests {
let b = 2;
}]
"#},
- // 25 tokens - expands symmetrically then to syntax boundaries
editable_token_limit: 25,
context_token_limit: 50,
},
TestCase {
- name: "tiny budget - just lines around cursor",
+ name: "tiny budget - just lines around cursor, no syntax expansion",
marked_text: indoc! {r#"
fn outer() {
[ let line1 = 1;
@@ -451,22 +372,9 @@ mod tests {
let line7 = 7;
}
"#},
- // 12 tokens (~36 bytes) = just the cursor line with tiny budget
editable_token_limit: 12,
context_token_limit: 24,
},
- TestCase {
- name: "small function fits entirely",
- marked_text: indoc! {r#"
- [«fn foo() {
- let x = 1;ˇ
- let y = 2;
- }»]
- "#},
- // Plenty of budget for this small function
- editable_token_limit: 30,
- context_token_limit: 60,
- },
TestCase {
name: "context extends beyond editable",
marked_text: indoc! {r#"
@@ -476,13 +384,11 @@ mod tests {
fn fourth() { let d = 4; }»
fn fifth() { let e = 5; }]
"#},
- // Small editable, larger context
editable_token_limit: 25,
context_token_limit: 45,
},
- // Tests for syntax-aware editable and context expansion
TestCase {
- name: "cursor in first if-statement - expands to syntax boundaries",
+ name: "cursor in first if-block - editable expands to syntax boundaries",
marked_text: indoc! {r#"
[«fn before() { }
@@ -503,13 +409,11 @@ mod tests {
fn after() { }]
"#},
- // 35 tokens allows expansion to include function header and first two if blocks
editable_token_limit: 35,
- // 60 tokens allows context to include the whole file
context_token_limit: 60,
},
TestCase {
- name: "cursor in middle if-statement - expands to syntax boundaries",
+ name: "cursor in middle if-block - editable spans surrounding blocks",
marked_text: indoc! {r#"
[fn before() { }
@@ -530,13 +434,11 @@ mod tests {
fn after() { }]
"#},
- // 40 tokens allows expansion to surrounding if blocks
editable_token_limit: 40,
- // 60 tokens allows context to include the whole file
context_token_limit: 60,
},
TestCase {
- name: "cursor near bottom of long function - editable expands toward syntax, context reaches function",
+ name: "cursor near bottom of long function - context reaches function boundary",
marked_text: indoc! {r#"
[fn other() { }
@@ -556,11 +458,30 @@ mod tests {
fn another() { }»]
"#},
- // 40 tokens for editable - allows several lines plus syntax expansion
editable_token_limit: 40,
- // 55 tokens - enough for function but not whole file
context_token_limit: 55,
},
+ TestCase {
+ name: "zero context budget - context equals editable",
+ marked_text: indoc! {r#"
+ fn before() {
+ let p = 1;
+ let q = 2;
+ [«}
+
+ fn foo() {
+ let x = 1;ˇ
+ let y = 2;
+ }
+ »]
+ fn after() {
+ let r = 3;
+ let s = 4;
+ }
+ "#},
+ editable_token_limit: 15,
+ context_token_limit: 0,
+ },
];
for test_case in test_cases {
@@ -580,75 +501,63 @@ mod tests {
let cursor_ranges = ranges.remove(&cursor_marker).unwrap_or_default();
let expected_editable = ranges.remove(&editable_marker).unwrap_or_default();
let expected_context = ranges.remove(&context_marker).unwrap_or_default();
- assert_eq!(expected_editable.len(), 1);
- assert_eq!(expected_context.len(), 1);
+ assert_eq!(expected_editable.len(), 1, "{}", test_case.name);
+ assert_eq!(expected_context.len(), 1, "{}", test_case.name);
- cx.new(|cx| {
+ cx.new(|cx: &mut gpui::Context<Buffer>| {
let text = text.trim_end_matches('\n');
let buffer = Buffer::local(text, cx).with_language(rust_lang(), cx);
let snapshot = buffer.snapshot();
let cursor_offset = cursor_ranges[0].start;
- let cursor_point = snapshot.offset_to_point(cursor_offset);
- let expected_editable_start = snapshot.offset_to_point(expected_editable[0].start);
- let expected_editable_end = snapshot.offset_to_point(expected_editable[0].end);
- let expected_context_start = snapshot.offset_to_point(expected_context[0].start);
- let expected_context_end = snapshot.offset_to_point(expected_context[0].end);
-
- let (actual_editable, actual_context) =
- editable_and_context_ranges_for_cursor_position(
- cursor_point,
- &snapshot,
- test_case.editable_token_limit,
- test_case.context_token_limit,
- );
-
- let range_text = |start: Point, end: Point| -> String {
- snapshot.text_for_range(start..end).collect()
+
+ let (_, excerpt_offset_range, cursor_offset_in_excerpt) =
+ compute_cursor_excerpt(&snapshot, cursor_offset);
+ let excerpt_text: String = snapshot
+ .text_for_range(excerpt_offset_range.clone())
+ .collect();
+ let syntax_ranges =
+ compute_syntax_ranges(&snapshot, cursor_offset, &excerpt_offset_range);
+
+ let (actual_editable, actual_context) = compute_editable_and_context_ranges(
+ &excerpt_text,
+ cursor_offset_in_excerpt,
+ &syntax_ranges,
+ test_case.editable_token_limit,
+ test_case.context_token_limit,
+ );
+
+ let to_buffer_range = |range: Range<usize>| -> Range<usize> {
+ (excerpt_offset_range.start + range.start)
+ ..(excerpt_offset_range.start + range.end)
};
- let editable_match = actual_editable.start == expected_editable_start
- && actual_editable.end == expected_editable_end;
- let context_match = actual_context.start == expected_context_start
- && actual_context.end == expected_context_end;
+ let actual_editable = to_buffer_range(actual_editable);
+ let actual_context = to_buffer_range(actual_context);
+
+ let expected_editable_range = expected_editable[0].clone();
+ let expected_context_range = expected_context[0].clone();
+
+ let editable_match = actual_editable == expected_editable_range;
+ let context_match = actual_context == expected_context_range;
if !editable_match || !context_match {
+ let range_text = |range: &Range<usize>| {
+ snapshot.text_for_range(range.clone()).collect::<String>()
+ };
+
println!("\n=== FAILED: {} ===", test_case.name);
if !editable_match {
- println!(
- "\nExpected editable ({:?}..{:?}):",
- expected_editable_start, expected_editable_end
- );
- println!(
- "---\n{}---",
- range_text(expected_editable_start, expected_editable_end)
- );
- println!(
- "\nActual editable ({:?}..{:?}):",
- actual_editable.start, actual_editable.end
- );
- println!(
- "---\n{}---",
- range_text(actual_editable.start, actual_editable.end)
- );
+ println!("\nExpected editable ({:?}):", expected_editable_range);
+ println!("---\n{}---", range_text(&expected_editable_range));
+ println!("\nActual editable ({:?}):", actual_editable);
+ println!("---\n{}---", range_text(&actual_editable));
}
if !context_match {
- println!(
- "\nExpected context ({:?}..{:?}):",
- expected_context_start, expected_context_end
- );
- println!(
- "---\n{}---",
- range_text(expected_context_start, expected_context_end)
- );
- println!(
- "\nActual context ({:?}..{:?}):",
- actual_context.start, actual_context.end
- );
- println!(
- "---\n{}---",
- range_text(actual_context.start, actual_context.end)
- );
+ println!("\nExpected context ({:?}):", expected_context_range);
+ println!("---\n{}---", range_text(&expected_context_range));
+ println!("\nActual context ({:?}):", actual_context);
+ println!("---\n{}---", range_text(&actual_context));
}
panic!("Test '{}' failed - see output above", test_case.name);
}
@@ -17,7 +17,10 @@ use gpui::{
http_client::{FakeHttpClient, Response},
};
use indoc::indoc;
-use language::{Anchor, Buffer, CursorShape, Operation, Point, Selection, SelectionGoal};
+use language::{
+ Anchor, Buffer, CursorShape, Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSeverity,
+ Operation, Point, Selection, SelectionGoal,
+};
use lsp::LanguageServerId;
use parking_lot::Mutex;
use pretty_assertions::{assert_eq, assert_matches};
@@ -25,7 +28,10 @@ use project::{FakeFs, Project};
use serde_json::json;
use settings::SettingsStore;
use std::{path::Path, sync::Arc, time::Duration};
-use util::path;
+use util::{
+ path,
+ test::{TextRangeMarker, marked_text_ranges_by},
+};
use uuid::Uuid;
use zeta_prompt::ZetaPromptInput;
@@ -1656,97 +1662,172 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) {
assert_eq!(reject_request.rejections[1].request_id, "retry-2");
}
-// Skipped until we start including diagnostics in prompt
-// #[gpui::test]
-// async fn test_request_diagnostics(cx: &mut TestAppContext) {
-// let (ep_store, mut req_rx) = init_test_with_fake_client(cx);
-// let fs = FakeFs::new(cx.executor());
-// fs.insert_tree(
-// "/root",
-// json!({
-// "foo.md": "Hello!\nBye"
-// }),
-// )
-// .await;
-// let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
-
-// let path_to_buffer_uri = lsp::Uri::from_file_path(path!("/root/foo.md")).unwrap();
-// let diagnostic = lsp::Diagnostic {
-// range: lsp::Range::new(lsp::Position::new(1, 1), lsp::Position::new(1, 5)),
-// severity: Some(lsp::DiagnosticSeverity::ERROR),
-// message: "\"Hello\" deprecated. Use \"Hi\" instead".to_string(),
-// ..Default::default()
-// };
-
-// project.update(cx, |project, cx| {
-// project.lsp_store().update(cx, |lsp_store, cx| {
-// // Create some diagnostics
-// lsp_store
-// .update_diagnostics(
-// LanguageServerId(0),
-// lsp::PublishDiagnosticsParams {
-// uri: path_to_buffer_uri.clone(),
-// diagnostics: vec![diagnostic],
-// version: None,
-// },
-// None,
-// language::DiagnosticSourceKind::Pushed,
-// &[],
-// cx,
-// )
-// .unwrap();
-// });
-// });
-
-// let buffer = project
-// .update(cx, |project, cx| {
-// let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
-// project.open_buffer(path, cx)
-// })
-// .await
-// .unwrap();
-
-// let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
-// let position = snapshot.anchor_before(language::Point::new(0, 0));
-
-// let _prediction_task = ep_store.update(cx, |ep_store, cx| {
-// ep_store.request_prediction(&project, &buffer, position, cx)
-// });
-
-// let (request, _respond_tx) = req_rx.next().await.unwrap();
-
-// assert_eq!(request.diagnostic_groups.len(), 1);
-// let value = serde_json::from_str::<serde_json::Value>(request.diagnostic_groups[0].0.get())
-// .unwrap();
-// // We probably don't need all of this. TODO define a specific diagnostic type in predict_edits_v3
-// assert_eq!(
-// value,
-// json!({
-// "entries": [{
-// "range": {
-// "start": 8,
-// "end": 10
-// },
-// "diagnostic": {
-// "source": null,
-// "code": null,
-// "code_description": null,
-// "severity": 1,
-// "message": "\"Hello\" deprecated. Use \"Hi\" instead",
-// "markdown": null,
-// "group_id": 0,
-// "is_primary": true,
-// "is_disk_based": false,
-// "is_unnecessary": false,
-// "source_kind": "Pushed",
-// "data": null,
-// "underline": true
-// }
-// }],
-// "primary_ix": 0
-// })
-// );
-// }
+#[gpui::test]
+fn test_active_buffer_diagnostics_fetching(cx: &mut TestAppContext) {
+ let diagnostic_marker: TextRangeMarker = ('«', '»').into();
+ let search_range_marker: TextRangeMarker = ('[', ']').into();
+
+ let (text, mut ranges) = marked_text_ranges_by(
+ indoc! {r#"
+ fn alpha() {
+ let «first_value» = 1;
+ }
+
+ [fn beta() {
+ let «second_value» = 2;
+ let third_value = second_value + missing_symbol;
+ }ˇ]
+
+ fn gamma() {
+ let «fourth_value» = missing_other_symbol;
+ }
+ "#},
+ vec![diagnostic_marker.clone(), search_range_marker.clone()],
+ );
+
+ let diagnostic_ranges = ranges.remove(&diagnostic_marker).unwrap_or_default();
+ let search_ranges = ranges.remove(&search_range_marker).unwrap_or_default();
+
+ let buffer = cx.new(|cx| Buffer::local(&text, cx));
+
+ buffer.update(cx, |buffer, cx| {
+ let snapshot = buffer.snapshot();
+ let diagnostics = DiagnosticSet::new(
+ diagnostic_ranges
+ .iter()
+ .enumerate()
+ .map(|(index, range)| DiagnosticEntry {
+ range: snapshot.offset_to_point_utf16(range.start)
+ ..snapshot.offset_to_point_utf16(range.end),
+ diagnostic: Diagnostic {
+ severity: match index {
+ 0 => DiagnosticSeverity::WARNING,
+ 1 => DiagnosticSeverity::ERROR,
+ _ => DiagnosticSeverity::HINT,
+ },
+ message: match index {
+ 0 => "first warning".to_string(),
+ 1 => "second error".to_string(),
+ _ => "third hint".to_string(),
+ },
+ group_id: index + 1,
+ is_primary: true,
+ source_kind: language::DiagnosticSourceKind::Pushed,
+ ..Diagnostic::default()
+ },
+ }),
+ &snapshot,
+ );
+ buffer.update_diagnostics(LanguageServerId(0), diagnostics, cx);
+ });
+
+ let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
+ let search_range = snapshot.offset_to_point(search_ranges[0].start)
+ ..snapshot.offset_to_point(search_ranges[0].end);
+
+ let active_buffer_diagnostics = zeta::active_buffer_diagnostics(&snapshot, search_range, 100);
+
+ assert_eq!(
+ active_buffer_diagnostics,
+ vec![zeta_prompt::ActiveBufferDiagnostic {
+ severity: Some(1),
+ message: "second error".to_string(),
+ snippet: text,
+ snippet_buffer_row_range: 5..5,
+ diagnostic_range_in_snippet: 61..73,
+ }]
+ );
+
+ let buffer = cx.new(|cx| {
+ Buffer::local(
+ indoc! {"
+ one
+ two
+ three
+ four
+ five
+ "},
+ cx,
+ )
+ });
+
+ buffer.update(cx, |buffer, cx| {
+ let snapshot = buffer.snapshot();
+ let diagnostics = DiagnosticSet::new(
+ vec![
+ DiagnosticEntry {
+ range: text::PointUtf16::new(0, 0)..text::PointUtf16::new(0, 3),
+ diagnostic: Diagnostic {
+ severity: DiagnosticSeverity::ERROR,
+ message: "row zero".to_string(),
+ group_id: 1,
+ is_primary: true,
+ source_kind: language::DiagnosticSourceKind::Pushed,
+ ..Diagnostic::default()
+ },
+ },
+ DiagnosticEntry {
+ range: text::PointUtf16::new(2, 0)..text::PointUtf16::new(2, 5),
+ diagnostic: Diagnostic {
+ severity: DiagnosticSeverity::WARNING,
+ message: "row two".to_string(),
+ group_id: 2,
+ is_primary: true,
+ source_kind: language::DiagnosticSourceKind::Pushed,
+ ..Diagnostic::default()
+ },
+ },
+ DiagnosticEntry {
+ range: text::PointUtf16::new(4, 0)..text::PointUtf16::new(4, 4),
+ diagnostic: Diagnostic {
+ severity: DiagnosticSeverity::INFORMATION,
+ message: "row four".to_string(),
+ group_id: 3,
+ is_primary: true,
+ source_kind: language::DiagnosticSourceKind::Pushed,
+ ..Diagnostic::default()
+ },
+ },
+ ],
+ &snapshot,
+ );
+ buffer.update_diagnostics(LanguageServerId(0), diagnostics, cx);
+ });
+
+ let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
+
+ let active_buffer_diagnostics =
+ zeta::active_buffer_diagnostics(&snapshot, Point::new(2, 0)..Point::new(4, 0), 100);
+
+ assert_eq!(
+ active_buffer_diagnostics
+ .iter()
+ .map(|diagnostic| (
+ diagnostic.severity,
+ diagnostic.message.clone(),
+ diagnostic.snippet.clone(),
+ diagnostic.snippet_buffer_row_range.clone(),
+ diagnostic.diagnostic_range_in_snippet.clone(),
+ ))
+ .collect::<Vec<_>>(),
+ vec![
+ (
+ Some(2),
+ "row two".to_string(),
+ "one\ntwo\nthree\nfour\nfive\n".to_string(),
+ 2..2,
+ 8..13,
+ ),
+ (
+ Some(3),
+ "row four".to_string(),
+ "one\ntwo\nthree\nfour\nfive\n".to_string(),
+ 4..4,
+ 19..23,
+ ),
+ ]
+ );
+}
// Generate a model response that would apply the given diff to the active file.
fn model_response(request: &PredictEditsV3Request, diff_to_apply: &str) -> PredictEditsV3Response {
@@ -1885,11 +1966,13 @@ async fn test_edit_prediction_basic_interpolation(cx: &mut TestAppContext) {
inputs: ZetaPromptInput {
events: Default::default(),
related_files: Default::default(),
+ active_buffer_diagnostics: vec![],
cursor_path: Path::new("").into(),
cursor_excerpt: "".into(),
cursor_offset_in_excerpt: 0,
excerpt_start_row: None,
excerpt_ranges: Default::default(),
+ syntax_ranges: None,
experiment: None,
in_open_source_repo: false,
can_collect_data: false,
@@ -6,12 +6,12 @@ use crate::{
use anyhow::{Context as _, Result, anyhow};
use gpui::{App, AppContext as _, Entity, Task};
use language::{
- Anchor, Buffer, BufferSnapshot, OffsetRangeExt as _, ToOffset, ToPoint as _,
+ Anchor, Buffer, BufferSnapshot, ToOffset, ToPoint as _,
language_settings::all_language_settings,
};
use settings::EditPredictionPromptFormat;
use std::{path::Path, sync::Arc, time::Instant};
-use zeta_prompt::ZetaPromptInput;
+use zeta_prompt::{ZetaPromptInput, compute_editable_and_context_ranges};
const FIM_CONTEXT_TOKENS: usize = 512;
@@ -62,34 +62,43 @@ pub fn request_prediction(
let api_key = load_open_ai_compatible_api_key_if_needed(provider, cx);
let result = cx.background_spawn(async move {
- let (excerpt_range, _) = cursor_excerpt::editable_and_context_ranges_for_cursor_position(
- cursor_point,
- &snapshot,
+ let cursor_offset = cursor_point.to_offset(&snapshot);
+ let (excerpt_point_range, excerpt_offset_range, cursor_offset_in_excerpt) =
+ cursor_excerpt::compute_cursor_excerpt(&snapshot, cursor_offset);
+ let cursor_excerpt: Arc<str> = snapshot
+ .text_for_range(excerpt_point_range.clone())
+ .collect::<String>()
+ .into();
+ let syntax_ranges =
+ cursor_excerpt::compute_syntax_ranges(&snapshot, cursor_offset, &excerpt_offset_range);
+ let (editable_range, _) = compute_editable_and_context_ranges(
+ &cursor_excerpt,
+ cursor_offset_in_excerpt,
+ &syntax_ranges,
FIM_CONTEXT_TOKENS,
0,
);
- let excerpt_offset_range = excerpt_range.to_offset(&snapshot);
- let cursor_offset = cursor_point.to_offset(&snapshot);
let inputs = ZetaPromptInput {
events,
- related_files: Vec::new(),
+ related_files: Some(Vec::new()),
+ active_buffer_diagnostics: Vec::new(),
cursor_offset_in_excerpt: cursor_offset - excerpt_offset_range.start,
cursor_path: full_path.clone(),
- excerpt_start_row: Some(excerpt_range.start.row),
- cursor_excerpt: snapshot
- .text_for_range(excerpt_range)
- .collect::<String>()
- .into(),
+ excerpt_start_row: Some(excerpt_point_range.start.row),
+ cursor_excerpt,
excerpt_ranges: Default::default(),
+ syntax_ranges: None,
experiment: None,
in_open_source_repo: false,
can_collect_data: false,
repo_url: None,
};
- let prefix = inputs.cursor_excerpt[..inputs.cursor_offset_in_excerpt].to_string();
- let suffix = inputs.cursor_excerpt[inputs.cursor_offset_in_excerpt..].to_string();
+ let editable_text = &inputs.cursor_excerpt[editable_range.clone()];
+ let cursor_in_editable = cursor_offset_in_excerpt.saturating_sub(editable_range.start);
+ let prefix = editable_text[..cursor_in_editable].to_string();
+ let suffix = editable_text[cursor_in_editable..].to_string();
let prompt = format_fim_prompt(prompt_format, &prefix, &suffix);
let stop_tokens = get_fim_stop_tokens();
@@ -10,17 +10,14 @@ use gpui::{
App, AppContext as _, Entity, Global, SharedString, Task,
http_client::{self, AsyncBody, HttpClient, Method},
};
-use language::{OffsetRangeExt as _, ToOffset, ToPoint as _};
+use language::{ToOffset, ToPoint as _};
use language_model::{ApiKeyState, EnvVar, env_var};
use release_channel::AppVersion;
use serde::Serialize;
use std::{mem, ops::Range, path::Path, sync::Arc, time::Instant};
-
-use zeta_prompt::{ExcerptRanges, ZetaPromptInput};
+use zeta_prompt::ZetaPromptInput;
const MERCURY_API_URL: &str = "https://api.inceptionlabs.ai/v1/edit/completions";
-const MAX_REWRITE_TOKENS: usize = 150;
-const MAX_CONTEXT_TOKENS: usize = 350;
pub struct Mercury {
pub api_token: Entity<ApiKeyState>,
@@ -64,52 +61,47 @@ impl Mercury {
let active_buffer = buffer.clone();
let result = cx.background_spawn(async move {
- let (editable_range, context_range) =
- crate::cursor_excerpt::editable_and_context_ranges_for_cursor_position(
- cursor_point,
- &snapshot,
- MAX_CONTEXT_TOKENS,
- MAX_REWRITE_TOKENS,
- );
+ let cursor_offset = cursor_point.to_offset(&snapshot);
+ let (excerpt_point_range, excerpt_offset_range, cursor_offset_in_excerpt) =
+ crate::cursor_excerpt::compute_cursor_excerpt(&snapshot, cursor_offset);
let related_files = zeta_prompt::filter_redundant_excerpts(
related_files,
full_path.as_ref(),
- context_range.start.row..context_range.end.row,
+ excerpt_point_range.start.row..excerpt_point_range.end.row,
);
- let context_offset_range = context_range.to_offset(&snapshot);
- let context_start_row = context_range.start.row;
-
- let editable_offset_range = editable_range.to_offset(&snapshot);
+ let cursor_excerpt: Arc<str> = snapshot
+ .text_for_range(excerpt_point_range.clone())
+ .collect::<String>()
+ .into();
+ let syntax_ranges = crate::cursor_excerpt::compute_syntax_ranges(
+ &snapshot,
+ cursor_offset,
+ &excerpt_offset_range,
+ );
+ let excerpt_ranges = zeta_prompt::compute_legacy_excerpt_ranges(
+ &cursor_excerpt,
+ cursor_offset_in_excerpt,
+ &syntax_ranges,
+ );
- let editable_range_in_excerpt = (editable_offset_range.start
- - context_offset_range.start)
- ..(editable_offset_range.end - context_offset_range.start);
- let context_range_in_excerpt =
- 0..(context_offset_range.end - context_offset_range.start);
+ let editable_offset_range = (excerpt_offset_range.start
+ + excerpt_ranges.editable_350.start)
+ ..(excerpt_offset_range.start + excerpt_ranges.editable_350.end);
let inputs = zeta_prompt::ZetaPromptInput {
events,
- related_files,
+ related_files: Some(related_files),
cursor_offset_in_excerpt: cursor_point.to_offset(&snapshot)
- - context_offset_range.start,
+ - excerpt_offset_range.start,
cursor_path: full_path.clone(),
- cursor_excerpt: snapshot
- .text_for_range(context_range)
- .collect::<String>()
- .into(),
+ cursor_excerpt,
experiment: None,
- excerpt_start_row: Some(context_start_row),
- excerpt_ranges: ExcerptRanges {
- editable_150: editable_range_in_excerpt.clone(),
- editable_180: editable_range_in_excerpt.clone(),
- editable_350: editable_range_in_excerpt.clone(),
- editable_150_context_350: context_range_in_excerpt.clone(),
- editable_180_context_350: context_range_in_excerpt.clone(),
- editable_350_context_150: context_range_in_excerpt.clone(),
- ..Default::default()
- },
+ excerpt_start_row: Some(excerpt_point_range.start.row),
+ excerpt_ranges,
+ syntax_ranges: Some(syntax_ranges),
+ active_buffer_diagnostics: vec![],
in_open_source_repo: false,
can_collect_data: false,
repo_url: None,
@@ -260,7 +252,7 @@ fn build_prompt(inputs: &ZetaPromptInput) -> String {
&mut prompt,
RECENTLY_VIEWED_SNIPPETS_START..RECENTLY_VIEWED_SNIPPETS_END,
|prompt| {
- for related_file in inputs.related_files.iter() {
+ for related_file in inputs.related_files.as_deref().unwrap_or_default().iter() {
for related_excerpt in &related_file.excerpts {
push_delimited(
prompt,
@@ -156,12 +156,14 @@ mod tests {
model_version: None,
inputs: ZetaPromptInput {
events: vec![],
- related_files: vec![],
+ related_files: Some(vec![]),
+ active_buffer_diagnostics: vec![],
cursor_path: Path::new("path.txt").into(),
cursor_offset_in_excerpt: 0,
cursor_excerpt: "".into(),
excerpt_start_row: None,
excerpt_ranges: Default::default(),
+ syntax_ranges: None,
experiment: None,
in_open_source_repo: false,
can_collect_data: false,
@@ -212,7 +212,8 @@ impl SweepAi {
let ep_inputs = zeta_prompt::ZetaPromptInput {
events: inputs.events,
- related_files: inputs.related_files.clone(),
+ related_files: Some(inputs.related_files.clone()),
+ active_buffer_diagnostics: vec![],
cursor_path: full_path.clone(),
cursor_excerpt: request_body.file_contents.clone().into(),
cursor_offset_in_excerpt: request_body.cursor_position,
@@ -226,6 +227,7 @@ impl SweepAi {
editable_350_context_150: 0..inputs.snapshot.len(),
..Default::default()
},
+ syntax_ranges: None,
experiment: None,
in_open_source_repo: false,
can_collect_data: false,
@@ -1,7 +1,8 @@
use crate::{
CurrentEditPrediction, DebugEvent, EditPredictionFinishedDebugEvent, EditPredictionId,
EditPredictionModelInput, EditPredictionStartedDebugEvent, EditPredictionStore, StoredEvent,
- ZedUpdateRequiredError, cursor_excerpt::compute_excerpt_ranges,
+ ZedUpdateRequiredError,
+ cursor_excerpt::{self, compute_cursor_excerpt, compute_syntax_ranges},
prediction::EditPredictionResult,
};
use anyhow::Result;
@@ -11,12 +12,12 @@ use cloud_llm_client::{
use edit_prediction_types::PredictedCursorPosition;
use gpui::{App, AppContext as _, Entity, Task, WeakEntity, prelude::*};
use language::{
- Buffer, BufferSnapshot, ToOffset as _, ToPoint, language_settings::all_language_settings,
- text_diff,
+ Buffer, BufferSnapshot, DiagnosticSeverity, OffsetRangeExt as _, ToOffset as _,
+ language_settings::all_language_settings, text_diff,
};
use release_channel::AppVersion;
use settings::EditPredictionPromptFormat;
-use text::{Anchor, Bias};
+use text::{Anchor, Bias, Point};
use ui::SharedString;
use workspace::notifications::{ErrorMessagePrompt, NotificationId, show_app_notification};
use zeta_prompt::{ParsedOutput, ZetaPromptInput};
@@ -24,7 +25,7 @@ use zeta_prompt::{ParsedOutput, ZetaPromptInput};
use std::{env, ops::Range, path::Path, sync::Arc, time::Instant};
use zeta_prompt::{
CURSOR_MARKER, ZetaFormat, format_zeta_prompt, get_prefill, parse_zeta2_model_output,
- prompt_input_contains_special_tokens,
+ prompt_input_contains_special_tokens, stop_tokens_for_format,
zeta1::{self, EDITABLE_REGION_END_MARKER},
};
@@ -43,6 +44,7 @@ pub fn request_prediction_with_zeta(
debug_tx,
trigger,
project,
+ diagnostic_search_range,
can_collect_data,
is_open_source,
..
@@ -115,6 +117,7 @@ pub fn request_prediction_with_zeta(
&snapshot,
related_files,
events,
+ diagnostic_search_range,
excerpt_path,
cursor_offset,
preferred_experiment,
@@ -192,7 +195,10 @@ pub fn request_prediction_with_zeta(
custom_settings,
prompt,
max_tokens,
- vec![],
+ stop_tokens_for_format(zeta_version)
+ .iter()
+ .map(|token| token.to_string())
+ .collect(),
open_ai_compatible_api_key.clone(),
&http_client,
)
@@ -226,7 +232,10 @@ pub fn request_prediction_with_zeta(
model: config.model_id.clone().unwrap_or_default(),
prompt,
temperature: None,
- stop: vec![],
+ stop: stop_tokens_for_format(config.format)
+ .iter()
+ .map(|token| std::borrow::Cow::Borrowed(*token))
+ .collect(),
max_tokens: Some(2048),
environment,
};
@@ -473,10 +482,50 @@ fn handle_api_response<T>(
}
}
+pub(crate) fn active_buffer_diagnostics(
+ snapshot: &language::BufferSnapshot,
+ diagnostic_search_range: Range<Point>,
+ additional_context_token_count: usize,
+) -> Vec<zeta_prompt::ActiveBufferDiagnostic> {
+ snapshot
+ .diagnostics_in_range::<Point, Point>(diagnostic_search_range, false)
+ .map(|entry| {
+ let severity = match entry.diagnostic.severity {
+ DiagnosticSeverity::ERROR => Some(1),
+ DiagnosticSeverity::WARNING => Some(2),
+ DiagnosticSeverity::INFORMATION => Some(3),
+ DiagnosticSeverity::HINT => Some(4),
+ _ => None,
+ };
+ let diagnostic_point_range = entry.range.clone();
+ let snippet_point_range = cursor_excerpt::expand_context_syntactically_then_linewise(
+ snapshot,
+ diagnostic_point_range.clone(),
+ additional_context_token_count,
+ );
+ let snippet = snapshot
+ .text_for_range(snippet_point_range.clone())
+ .collect::<String>();
+ let snippet_start_offset = snippet_point_range.start.to_offset(snapshot);
+ let diagnostic_offset_range = diagnostic_point_range.to_offset(snapshot);
+ zeta_prompt::ActiveBufferDiagnostic {
+ severity,
+ message: entry.diagnostic.message.clone(),
+ snippet,
+ snippet_buffer_row_range: diagnostic_point_range.start.row
+ ..diagnostic_point_range.end.row,
+ diagnostic_range_in_snippet: diagnostic_offset_range.start - snippet_start_offset
+ ..diagnostic_offset_range.end - snippet_start_offset,
+ }
+ })
+ .collect()
+}
+
pub fn zeta2_prompt_input(
snapshot: &language::BufferSnapshot,
related_files: Vec<zeta_prompt::RelatedFile>,
events: Vec<Arc<zeta_prompt::Event>>,
+ diagnostic_search_range: Range<Point>,
excerpt_path: Arc<Path>,
cursor_offset: usize,
preferred_experiment: Option<String>,
@@ -484,33 +533,39 @@ pub fn zeta2_prompt_input(
can_collect_data: bool,
repo_url: Option<String>,
) -> (Range<usize>, zeta_prompt::ZetaPromptInput) {
- let cursor_point = cursor_offset.to_point(snapshot);
-
- let (full_context, full_context_offset_range, excerpt_ranges) =
- compute_excerpt_ranges(cursor_point, snapshot);
-
- let full_context_start_offset = full_context_offset_range.start;
- let full_context_start_row = full_context.start.row;
+ let (excerpt_point_range, excerpt_offset_range, cursor_offset_in_excerpt) =
+ compute_cursor_excerpt(snapshot, cursor_offset);
+
+ let cursor_excerpt: Arc<str> = snapshot
+ .text_for_range(excerpt_point_range.clone())
+ .collect::<String>()
+ .into();
+ let syntax_ranges = compute_syntax_ranges(snapshot, cursor_offset, &excerpt_offset_range);
+ let excerpt_ranges = zeta_prompt::compute_legacy_excerpt_ranges(
+ &cursor_excerpt,
+ cursor_offset_in_excerpt,
+ &syntax_ranges,
+ );
- let cursor_offset_in_excerpt = cursor_offset - full_context_start_offset;
+ let active_buffer_diagnostics =
+ active_buffer_diagnostics(snapshot, diagnostic_search_range, 100);
let prompt_input = zeta_prompt::ZetaPromptInput {
cursor_path: excerpt_path,
- cursor_excerpt: snapshot
- .text_for_range(full_context)
- .collect::<String>()
- .into(),
+ cursor_excerpt,
cursor_offset_in_excerpt,
- excerpt_start_row: Some(full_context_start_row),
+ excerpt_start_row: Some(excerpt_point_range.start.row),
events,
- related_files,
+ related_files: Some(related_files),
+ active_buffer_diagnostics,
excerpt_ranges,
+ syntax_ranges: Some(syntax_ranges),
experiment: preferred_experiment,
in_open_source_repo: is_open_source,
can_collect_data,
repo_url,
};
- (full_context_offset_range, prompt_input)
+ (excerpt_offset_range, prompt_input)
}
pub(crate) fn edit_prediction_accepted(
@@ -259,7 +259,10 @@ impl TeacherPrompt {
}
pub fn format_context(example: &Example) -> String {
- let related_files = example.prompt_inputs.as_ref().map(|pi| &pi.related_files);
+ let related_files = example
+ .prompt_inputs
+ .as_ref()
+ .and_then(|pi| pi.related_files.as_deref());
let Some(related_files) = related_files else {
return "(No context)".to_string();
};
@@ -7,12 +7,12 @@ use crate::{
use anyhow::{Context as _, Result};
use edit_prediction::{
EditPredictionStore,
- cursor_excerpt::compute_excerpt_ranges,
+ cursor_excerpt::{compute_cursor_excerpt, compute_syntax_ranges},
udiff::{OpenedBuffers, refresh_worktree_entries, strip_diff_path_prefix},
};
use futures::AsyncWriteExt as _;
use gpui::{AsyncApp, Entity};
-use language::{Anchor, Buffer, LanguageNotFound, ToOffset, ToPoint};
+use language::{Anchor, Buffer, LanguageNotFound, ToOffset};
use project::{Project, ProjectPath, buffer_store::BufferStoreEvent};
use std::{fs, path::PathBuf, sync::Arc};
use zeta_prompt::ZetaPromptInput;
@@ -71,37 +71,41 @@ pub async fn run_load_project(
let existing_related_files = example
.prompt_inputs
.take()
- .map(|inputs| inputs.related_files)
- .unwrap_or_default();
+ .and_then(|inputs| inputs.related_files);
let (prompt_inputs, language_name) = buffer.read_with(&cx, |buffer, _cx| {
let snapshot = buffer.snapshot();
- let cursor_point = cursor_position.to_point(&snapshot);
let cursor_offset = cursor_position.to_offset(&snapshot);
let language_name = buffer
.language()
.map(|l| l.name().to_string())
.unwrap_or_else(|| "Unknown".to_string());
- let (full_context_point_range, full_context_offset_range, excerpt_ranges) =
- compute_excerpt_ranges(cursor_point, &snapshot);
+ let (excerpt_point_range, excerpt_offset_range, cursor_offset_in_excerpt) =
+ compute_cursor_excerpt(&snapshot, cursor_offset);
let cursor_excerpt: Arc<str> = buffer
- .text_for_range(full_context_offset_range.clone())
+ .text_for_range(excerpt_offset_range.clone())
.collect::<String>()
.into();
- let cursor_offset_in_excerpt = cursor_offset - full_context_offset_range.start;
- let excerpt_start_row = Some(full_context_point_range.start.row);
+ let syntax_ranges = compute_syntax_ranges(&snapshot, cursor_offset, &excerpt_offset_range);
+ let excerpt_ranges = zeta_prompt::compute_legacy_excerpt_ranges(
+ &cursor_excerpt,
+ cursor_offset_in_excerpt,
+ &syntax_ranges,
+ );
(
ZetaPromptInput {
cursor_path: example.spec.cursor_path.clone(),
cursor_excerpt,
cursor_offset_in_excerpt,
- excerpt_start_row,
+ excerpt_start_row: Some(excerpt_point_range.start.row),
events,
related_files: existing_related_files,
+ active_buffer_diagnostics: vec![],
excerpt_ranges,
+ syntax_ranges: Some(syntax_ranges),
in_open_source_repo: false,
can_collect_data: false,
experiment: None,
@@ -20,18 +20,13 @@ pub async fn run_context_retrieval(
example_progress: &ExampleProgress,
mut cx: AsyncApp,
) -> anyhow::Result<()> {
- if example.prompt_inputs.is_some() {
- if example.spec.repository_url.is_empty() {
- return Ok(());
- }
-
- if example
- .prompt_inputs
- .as_ref()
- .is_some_and(|inputs| !inputs.related_files.is_empty())
- {
- return Ok(());
- }
+ if example
+ .prompt_inputs
+ .as_ref()
+ .is_some_and(|inputs| inputs.related_files.is_some())
+ || example.spec.repository_url.is_empty()
+ {
+ return Ok(());
}
run_load_project(example, app_state.clone(), example_progress, cx.clone()).await?;
@@ -72,7 +67,7 @@ pub async fn run_context_retrieval(
step_progress.set_info(format!("{} excerpts", excerpt_count), InfoStyle::Normal);
if let Some(prompt_inputs) = example.prompt_inputs.as_mut() {
- prompt_inputs.related_files = context_files;
+ prompt_inputs.related_files = Some(context_files);
}
Ok(())
}
@@ -668,7 +668,8 @@ mod tests {
cursor_offset_in_excerpt: 0,
excerpt_start_row,
events,
- related_files: Vec::new(),
+ related_files: Some(Vec::new()),
+ active_buffer_diagnostics: Vec::new(),
excerpt_ranges: ExcerptRanges {
editable_150: 0..content.len(),
editable_180: 0..content.len(),
@@ -678,6 +679,7 @@ mod tests {
editable_350_context_150: 0..content.len(),
..Default::default()
},
+ syntax_ranges: None,
experiment: None,
in_open_source_repo: false,
can_collect_data: false,
@@ -42,4 +42,4 @@ serde_json.workspace = true
settings = {workspace= true, features = ["test-support"]}
text = { workspace = true, features = ["test-support"] }
util = { workspace = true, features = ["test-support"] }
-zlog.workspace = true
+
@@ -50,18 +50,12 @@ zed_actions.workspace = true
zeta_prompt.workspace = true
[dev-dependencies]
-clock.workspace = true
copilot = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] }
futures.workspace = true
indoc.workspace = true
-language_model.workspace = true
-lsp = { workspace = true, features = ["test-support"] }
-pretty_assertions.workspace = true
project = { workspace = true, features = ["test-support"] }
-release_channel.workspace = true
-semver.workspace = true
-serde_json.workspace = true
theme = { workspace = true, features = ["test-support"] }
workspace = { workspace = true, features = ["test-support"] }
-zlog.workspace = true
+
+
@@ -402,7 +402,13 @@ impl RatePredictionsModal {
write!(&mut formatted_inputs, "## Related files\n\n").unwrap();
- for included_file in prediction.inputs.related_files.iter() {
+ for included_file in prediction
+ .inputs
+ .related_files
+ .as_deref()
+ .unwrap_or_default()
+ .iter()
+ {
write!(
&mut formatted_inputs,
"### {}\n\n",
@@ -119,7 +119,7 @@ release_channel.workspace = true
rand.workspace = true
semver.workspace = true
settings = { workspace = true, features = ["test-support"] }
-tempfile.workspace = true
+
text = { workspace = true, features = ["test-support"] }
theme = { workspace = true, features = ["test-support"] }
tree-sitter-c.workspace = true
@@ -133,7 +133,7 @@ unicode-width.workspace = true
unindent.workspace = true
util = { workspace = true, features = ["test-support"] }
workspace = { workspace = true, features = ["test-support"] }
-http_client = { workspace = true, features = ["test-support"] }
+
zlog.workspace = true
@@ -107,7 +107,7 @@ use project::{InlayId, lsp_store::LspFoldingRange, lsp_store::TokenType};
use serde::Deserialize;
use smallvec::SmallVec;
use sum_tree::{Bias, TreeMap};
-use text::{BufferId, LineIndent, Patch, ToOffset as _};
+use text::{BufferId, LineIndent, Patch};
use ui::{SharedString, px};
use unicode_segmentation::UnicodeSegmentation;
use ztracing::instrument;
@@ -1977,57 +1977,11 @@ impl DisplaySnapshot {
/// Returned ranges are 0-based relative to `buffer_range.start`.
pub(super) fn combined_highlights(
&self,
- buffer_id: BufferId,
- buffer_range: Range<usize>,
+ multibuffer_range: Range<MultiBufferOffset>,
syntax_theme: &theme::SyntaxTheme,
) -> Vec<(Range<usize>, HighlightStyle)> {
let multibuffer = self.buffer_snapshot();
- let multibuffer_range = multibuffer
- .excerpts()
- .find_map(|(excerpt_id, buffer, range)| {
- if buffer.remote_id() != buffer_id {
- return None;
- }
- let context_start = range.context.start.to_offset(buffer);
- let context_end = range.context.end.to_offset(buffer);
- if buffer_range.start < context_start || buffer_range.end > context_end {
- return None;
- }
- let start_anchor = buffer.anchor_before(buffer_range.start);
- let end_anchor = buffer.anchor_after(buffer_range.end);
- let mb_range =
- multibuffer.anchor_range_in_excerpt(excerpt_id, start_anchor..end_anchor)?;
- Some(mb_range.start.to_offset(multibuffer)..mb_range.end.to_offset(multibuffer))
- });
-
- let Some(multibuffer_range) = multibuffer_range else {
- // Range is outside all excerpts (e.g. symbol name not in a
- // multi-buffer excerpt). Fall back to buffer-level syntax highlights.
- let buffer_snapshot = multibuffer.excerpts().find_map(|(_, buffer, _)| {
- (buffer.remote_id() == buffer_id).then(|| buffer.clone())
- });
- let Some(buffer_snapshot) = buffer_snapshot else {
- return Vec::new();
- };
- let mut highlights = Vec::new();
- let mut offset = 0usize;
- for chunk in buffer_snapshot.chunks(buffer_range, true) {
- let chunk_len = chunk.text.len();
- if chunk_len == 0 {
- continue;
- }
- if let Some(style) = chunk
- .syntax_highlight_id
- .and_then(|id| id.style(syntax_theme))
- {
- highlights.push((offset..offset + chunk_len, style));
- }
- offset += chunk_len;
- }
- return highlights;
- };
-
let chunks = custom_highlights::CustomHighlightsChunks::new(
multibuffer_range,
true,
@@ -1091,23 +1091,29 @@ impl BlockMap {
};
let rows_before_block;
- match block_placement {
- BlockPlacement::Above(position) => {
- rows_before_block = position - new_transforms.summary().input_rows;
+ let input_rows = new_transforms.summary().input_rows;
+ match &block_placement {
+ &BlockPlacement::Above(position) => {
+ let Some(delta) = position.checked_sub(input_rows) else {
+ continue;
+ };
+ rows_before_block = delta;
just_processed_folded_buffer = false;
}
- BlockPlacement::Near(position) | BlockPlacement::Below(position) => {
+ &BlockPlacement::Near(position) | &BlockPlacement::Below(position) => {
if just_processed_folded_buffer {
continue;
}
- if position + RowDelta(1) < new_transforms.summary().input_rows {
+ let Some(delta) = (position + RowDelta(1)).checked_sub(input_rows) else {
continue;
- }
- rows_before_block =
- (position + RowDelta(1)) - new_transforms.summary().input_rows;
+ };
+ rows_before_block = delta;
}
- BlockPlacement::Replace(ref range) => {
- rows_before_block = *range.start() - new_transforms.summary().input_rows;
+ BlockPlacement::Replace(range) => {
+ let Some(delta) = range.start().checked_sub(input_rows) else {
+ continue;
+ };
+ rows_before_block = delta;
summary.input_rows = WrapRow(1) + (*range.end() - *range.start());
just_processed_folded_buffer = matches!(block, Block::FoldedBuffer { .. });
}
@@ -41,6 +41,10 @@ macro_rules! impl_for_row_types {
pub fn saturating_sub(self, other: $row_delta) -> $row {
$row(self.0.saturating_sub(other.0))
}
+
+ pub fn checked_sub(self, other: $row) -> Option<$row_delta> {
+ self.0.checked_sub(other.0).map($row_delta)
+ }
}
impl ::std::ops::Add for $row {
@@ -1,4 +1,4 @@
-use std::{cmp, ops::Range};
+use std::ops::Range;
use collections::HashMap;
use futures::FutureExt;
@@ -6,10 +6,15 @@ use futures::future::join_all;
use gpui::{App, Context, HighlightStyle, Task};
use itertools::Itertools as _;
use language::language_settings::language_settings;
-use language::{Buffer, BufferSnapshot, OutlineItem};
-use multi_buffer::{Anchor, MultiBufferSnapshot};
-use text::{Bias, BufferId, OffsetRangeExt as _, ToOffset as _};
+use language::{Buffer, OutlineItem};
+use multi_buffer::{
+ Anchor, AnchorRangeExt as _, MultiBufferOffset, MultiBufferRow, MultiBufferSnapshot,
+ ToOffset as _,
+};
+use text::BufferId;
use theme::{ActiveTheme as _, SyntaxTheme};
+use unicode_segmentation::UnicodeSegmentation as _;
+use util::maybe;
use crate::display_map::DisplaySnapshot;
use crate::{Editor, LSP_REQUEST_DEBOUNCE_TIMEOUT};
@@ -77,6 +82,9 @@ impl Editor {
let excerpt = multi_buffer_snapshot.excerpt_containing(cursor..cursor)?;
let excerpt_id = excerpt.id();
let buffer_id = excerpt.buffer_id();
+ if Some(buffer_id) != cursor.text_anchor.buffer_id {
+ return None;
+ }
let buffer = self.buffer.read(cx).buffer(buffer_id)?;
let buffer_snapshot = buffer.read(cx).snapshot();
let cursor_text_anchor = cursor.text_anchor;
@@ -212,16 +220,13 @@ impl Editor {
let display_snapshot =
editor.display_map.update(cx, |map, cx| map.snapshot(cx));
let mut highlighted_results = results;
- for (buffer_id, items) in &mut highlighted_results {
- if let Some(buffer) = editor.buffer.read(cx).buffer(*buffer_id) {
- let snapshot = buffer.read(cx).snapshot();
- apply_highlights(
- items,
- *buffer_id,
- &snapshot,
- &display_snapshot,
- &syntax,
- );
+ for items in highlighted_results.values_mut() {
+ for item in items {
+ if let Some(highlights) =
+ highlights_from_buffer(&display_snapshot, &item, &syntax)
+ {
+ item.highlight_ranges = highlights;
+ }
}
}
editor.lsp_document_symbols.extend(highlighted_results);
@@ -239,34 +244,6 @@ fn lsp_symbols_enabled(buffer: &Buffer, cx: &App) -> bool {
.lsp_enabled()
}
-/// Applies combined syntax + semantic token highlights to LSP document symbol
-/// outline items that were built without highlights by the project layer.
-fn apply_highlights(
- items: &mut [OutlineItem<text::Anchor>],
- buffer_id: BufferId,
- buffer_snapshot: &BufferSnapshot,
- display_snapshot: &DisplaySnapshot,
- syntax_theme: &SyntaxTheme,
-) {
- for item in items {
- let symbol_range = item.range.to_offset(buffer_snapshot);
- let selection_start = item.source_range_for_text.start.to_offset(buffer_snapshot);
-
- if let Some(highlights) = highlights_from_buffer(
- &item.text,
- 0,
- buffer_id,
- buffer_snapshot,
- display_snapshot,
- symbol_range,
- selection_start,
- syntax_theme,
- ) {
- item.highlight_ranges = highlights;
- }
- }
-}
-
/// Finds where the symbol name appears in the buffer and returns combined
/// (tree-sitter + semantic token) highlights for those positions.
///
@@ -275,117 +252,78 @@ fn apply_highlights(
/// to word-by-word matching for cases like `impl<T> Trait<T> for Type`
/// where the LSP name doesn't appear verbatim in the buffer.
fn highlights_from_buffer(
- name: &str,
- name_offset_in_text: usize,
- buffer_id: BufferId,
- buffer_snapshot: &BufferSnapshot,
display_snapshot: &DisplaySnapshot,
- symbol_range: Range<usize>,
- selection_start_offset: usize,
+ item: &OutlineItem<text::Anchor>,
syntax_theme: &SyntaxTheme,
) -> Option<Vec<(Range<usize>, HighlightStyle)>> {
- if name.is_empty() {
+ let outline_text = &item.text;
+ if outline_text.is_empty() {
return None;
}
- let range_start_offset = symbol_range.start;
- let range_end_offset = symbol_range.end;
-
- // Try to find the name verbatim in the buffer near the selection range.
- let search_start = buffer_snapshot.clip_offset(
- selection_start_offset
- .saturating_sub(name.len())
- .max(range_start_offset),
- Bias::Right,
- );
- let search_end = buffer_snapshot.clip_offset(
- cmp::min(selection_start_offset + name.len() * 2, range_end_offset),
- Bias::Left,
- );
-
- if search_start < search_end {
- let buffer_text: String = buffer_snapshot
- .text_for_range(search_start..search_end)
- .collect();
- if let Some(found_at) = buffer_text.find(name) {
- let name_start_offset = search_start + found_at;
- let name_end_offset = name_start_offset + name.len();
- let result = highlights_for_buffer_range(
- name_offset_in_text,
- name_start_offset..name_end_offset,
- buffer_id,
- display_snapshot,
- syntax_theme,
+ let multi_buffer_snapshot = display_snapshot.buffer();
+ let multi_buffer_source_range_anchors =
+ multi_buffer_snapshot.text_anchors_to_visible_anchors([
+ item.source_range_for_text.start,
+ item.source_range_for_text.end,
+ ]);
+ let Some(anchor_range) = maybe!({
+ Some(
+ (*multi_buffer_source_range_anchors.get(0)?)?
+ ..(*multi_buffer_source_range_anchors.get(1)?)?,
+ )
+ }) else {
+ return None;
+ };
+
+ let selection_point_range = anchor_range.to_point(multi_buffer_snapshot);
+ let mut search_start = selection_point_range.start;
+ search_start.column = 0;
+ let search_start_offset = search_start.to_offset(&multi_buffer_snapshot);
+ let mut search_end = selection_point_range.end;
+ search_end.column = multi_buffer_snapshot.line_len(MultiBufferRow(search_end.row));
+
+ let search_text = multi_buffer_snapshot
+ .text_for_range(search_start..search_end)
+ .collect::<String>();
+
+ let mut outline_text_highlights = Vec::new();
+ match search_text.find(outline_text) {
+ Some(start_index) => {
+ let multibuffer_start = search_start_offset + MultiBufferOffset(start_index);
+ let multibuffer_end = multibuffer_start + MultiBufferOffset(outline_text.len());
+ outline_text_highlights.extend(
+ display_snapshot
+ .combined_highlights(multibuffer_start..multibuffer_end, syntax_theme),
);
- if result.is_some() {
- return result;
- }
}
- }
-
- // Fallback: match word-by-word. Split the name on whitespace and find
- // each word sequentially in the buffer's symbol range.
- let range_start_offset = buffer_snapshot.clip_offset(range_start_offset, Bias::Right);
- let range_end_offset = buffer_snapshot.clip_offset(range_end_offset, Bias::Left);
-
- let mut highlights = Vec::new();
- let mut got_any = false;
- let buffer_text: String = buffer_snapshot
- .text_for_range(range_start_offset..range_end_offset)
- .collect();
- let mut buf_search_from = 0usize;
- let mut name_search_from = 0usize;
- for word in name.split_whitespace() {
- let name_word_start = name[name_search_from..]
- .find(word)
- .map(|pos| name_search_from + pos)
- .unwrap_or(name_search_from);
- if let Some(found_in_buf) = buffer_text[buf_search_from..].find(word) {
- let buf_word_start = range_start_offset + buf_search_from + found_in_buf;
- let buf_word_end = buf_word_start + word.len();
- let text_cursor = name_offset_in_text + name_word_start;
- if let Some(mut word_highlights) = highlights_for_buffer_range(
- text_cursor,
- buf_word_start..buf_word_end,
- buffer_id,
- display_snapshot,
- syntax_theme,
- ) {
- got_any = true;
- highlights.append(&mut word_highlights);
+ None => {
+ for (outline_text_word_start, outline_word) in outline_text.split_word_bound_indices() {
+ if let Some(start_index) = search_text.find(outline_word) {
+ let multibuffer_start = search_start_offset + MultiBufferOffset(start_index);
+ let multibuffer_end = multibuffer_start + MultiBufferOffset(outline_word.len());
+ outline_text_highlights.extend(
+ display_snapshot
+ .combined_highlights(multibuffer_start..multibuffer_end, syntax_theme)
+ .into_iter()
+ .map(|(range_in_word, style)| {
+ (
+ outline_text_word_start + range_in_word.start
+ ..outline_text_word_start + range_in_word.end,
+ style,
+ )
+ }),
+ );
+ }
}
- buf_search_from = buf_search_from + found_in_buf + word.len();
}
- name_search_from = name_word_start + word.len();
}
- got_any.then_some(highlights)
-}
-
-/// Gets combined (tree-sitter + semantic token) highlights for a buffer byte
-/// range via the editor's display snapshot, then shifts the returned ranges
-/// so they start at `text_cursor_start` (the position in the outline item text).
-fn highlights_for_buffer_range(
- text_cursor_start: usize,
- buffer_range: Range<usize>,
- buffer_id: BufferId,
- display_snapshot: &DisplaySnapshot,
- syntax_theme: &SyntaxTheme,
-) -> Option<Vec<(Range<usize>, HighlightStyle)>> {
- let raw = display_snapshot.combined_highlights(buffer_id, buffer_range, syntax_theme);
- if raw.is_empty() {
- return None;
+ if outline_text_highlights.is_empty() {
+ None
+ } else {
+ Some(outline_text_highlights)
}
- Some(
- raw.into_iter()
- .map(|(range, style)| {
- (
- range.start + text_cursor_start..range.end + text_cursor_start,
- style,
- )
- })
- .collect(),
- )
}
#[cfg(test)]
@@ -7500,7 +7500,8 @@ impl Editor {
let mut read_ranges = Vec::new();
for highlight in highlights {
let buffer_id = cursor_buffer.read(cx).remote_id();
- for (excerpt_id, excerpt_range) in buffer.excerpts_for_buffer(buffer_id, cx)
+ for (excerpt_id, _, excerpt_range) in
+ buffer.excerpts_for_buffer(buffer_id, cx)
{
let start = highlight
.range
@@ -11683,6 +11684,43 @@ impl Editor {
self.restore_hunks_in_ranges(selections, window, cx);
}
+ /// Restores the diff hunks in the editor's selections and moves the cursor
+ /// to the next diff hunk. Wraps around to the beginning of the buffer if
+ /// not all diff hunks are expanded.
+ pub fn restore_and_next(
+ &mut self,
+ _: &::git::RestoreAndNext,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let selections = self
+ .selections
+ .all(&self.display_snapshot(cx))
+ .into_iter()
+ .map(|selection| selection.range())
+ .collect();
+
+ self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx);
+ self.restore_hunks_in_ranges(selections, window, cx);
+
+ let all_diff_hunks_expanded = self.buffer().read(cx).all_diff_hunks_expanded();
+ let wrap_around = !all_diff_hunks_expanded;
+ let snapshot = self.snapshot(window, cx);
+ let position = self
+ .selections
+ .newest::<Point>(&snapshot.display_snapshot)
+ .head();
+
+ self.go_to_hunk_before_or_after_position(
+ &snapshot,
+ position,
+ Direction::Next,
+ wrap_around,
+ window,
+ cx,
+ );
+ }
+
pub fn restore_hunks_in_ranges(
&mut self,
ranges: Vec<Range<Point>>,
@@ -17735,6 +17773,7 @@ impl Editor {
&snapshot,
selection.head(),
Direction::Next,
+ true,
window,
cx,
);
@@ -17745,14 +17784,15 @@ impl Editor {
snapshot: &EditorSnapshot,
position: Point,
direction: Direction,
+ wrap_around: bool,
window: &mut Window,
cx: &mut Context<Editor>,
) {
let row = if direction == Direction::Next {
- self.hunk_after_position(snapshot, position)
+ self.hunk_after_position(snapshot, position, wrap_around)
.map(|hunk| hunk.row_range.start)
} else {
- self.hunk_before_position(snapshot, position)
+ self.hunk_before_position(snapshot, position, wrap_around)
};
if let Some(row) = row {
@@ -17770,17 +17810,23 @@ impl Editor {
&mut self,
snapshot: &EditorSnapshot,
position: Point,
+ wrap_around: bool,
) -> Option<MultiBufferDiffHunk> {
- snapshot
+ let result = snapshot
.buffer_snapshot()
.diff_hunks_in_range(position..snapshot.buffer_snapshot().max_point())
- .find(|hunk| hunk.row_range.start.0 > position.row)
- .or_else(|| {
+ .find(|hunk| hunk.row_range.start.0 > position.row);
+
+ if wrap_around {
+ result.or_else(|| {
snapshot
.buffer_snapshot()
.diff_hunks_in_range(Point::zero()..position)
.find(|hunk| hunk.row_range.end.0 < position.row)
})
+ } else {
+ result
+ }
}
fn go_to_prev_hunk(
@@ -17796,6 +17842,7 @@ impl Editor {
&snapshot,
selection.head(),
Direction::Prev,
+ true,
window,
cx,
);
@@ -17805,11 +17852,15 @@ impl Editor {
&mut self,
snapshot: &EditorSnapshot,
position: Point,
+ wrap_around: bool,
) -> Option<MultiBufferRow> {
- snapshot
- .buffer_snapshot()
- .diff_hunk_before(position)
- .or_else(|| snapshot.buffer_snapshot().diff_hunk_before(Point::MAX))
+ let result = snapshot.buffer_snapshot().diff_hunk_before(position);
+
+ if wrap_around {
+ result.or_else(|| snapshot.buffer_snapshot().diff_hunk_before(Point::MAX))
+ } else {
+ result
+ }
}
fn go_to_next_change(
@@ -20489,7 +20540,7 @@ impl Editor {
let mut all_folded_excerpt_ids = Vec::new();
for buffer_id in &ids_to_fold {
let folded_excerpts = self.buffer().read(cx).excerpts_for_buffer(*buffer_id, cx);
- all_folded_excerpt_ids.extend(folded_excerpts.into_iter().map(|(id, _)| id));
+ all_folded_excerpt_ids.extend(folded_excerpts.into_iter().map(|(id, _, _)| id));
}
self.display_map.update(cx, |display_map, cx| {
@@ -20519,7 +20570,7 @@ impl Editor {
display_map.unfold_buffers([buffer_id], cx);
});
cx.emit(EditorEvent::BufferFoldToggled {
- ids: unfolded_excerpts.iter().map(|&(id, _)| id).collect(),
+ ids: unfolded_excerpts.iter().map(|&(id, _, _)| id).collect(),
folded: false,
});
cx.notify();
@@ -20793,38 +20844,23 @@ impl Editor {
}
self.stage_or_unstage_diff_hunks(stage, ranges, cx);
+
+ let all_diff_hunks_expanded = self.buffer().read(cx).all_diff_hunks_expanded();
+ let wrap_around = !all_diff_hunks_expanded;
let snapshot = self.snapshot(window, cx);
let position = self
.selections
.newest::<Point>(&snapshot.display_snapshot)
.head();
- let mut row = snapshot
- .buffer_snapshot()
- .diff_hunks_in_range(position..snapshot.buffer_snapshot().max_point())
- .find(|hunk| hunk.row_range.start.0 > position.row)
- .map(|hunk| hunk.row_range.start);
-
- let all_diff_hunks_expanded = self.buffer().read(cx).all_diff_hunks_expanded();
- // Outside of the project diff editor, wrap around to the beginning.
- if !all_diff_hunks_expanded {
- row = row.or_else(|| {
- snapshot
- .buffer_snapshot()
- .diff_hunks_in_range(Point::zero()..position)
- .find(|hunk| hunk.row_range.end.0 < position.row)
- .map(|hunk| hunk.row_range.start)
- });
- }
- if let Some(row) = row {
- let destination = Point::new(row.0, 0);
- let autoscroll = Autoscroll::center();
-
- self.unfold_ranges(&[destination..destination], false, false, cx);
- self.change_selections(SelectionEffects::scroll(autoscroll), window, cx, |s| {
- s.select_ranges([destination..destination]);
- });
- }
+ self.go_to_hunk_before_or_after_position(
+ &snapshot,
+ position,
+ Direction::Next,
+ wrap_around,
+ window,
+ cx,
+ );
}
pub(crate) fn do_stage_or_unstage(
@@ -22906,7 +22942,7 @@ impl Editor {
.snapshot();
let mut handled = false;
- for (id, ExcerptRange { context, .. }) in
+ for (id, _, ExcerptRange { context, .. }) in
self.buffer.read(cx).excerpts_for_buffer(buffer_id, cx)
{
if context.start.cmp(&position, &snapshot).is_ge()
@@ -29249,6 +29285,7 @@ fn render_diff_hunk_controls(
&snapshot,
position,
Direction::Next,
+ true,
window,
cx,
);
@@ -29284,6 +29321,7 @@ fn render_diff_hunk_controls(
&snapshot,
point,
Direction::Prev,
+ true,
window,
cx,
);
@@ -33557,3 +33557,66 @@ comment */ˇ»;"#},
assert_text_with_selections(editor, indoc! {r#"let arr = [«1, 2, 3]ˇ»;"#}, cx);
});
}
+
+#[gpui::test]
+async fn test_restore_and_next(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+ let mut cx = EditorTestContext::new(cx).await;
+
+ let diff_base = r#"
+ one
+ two
+ three
+ four
+ five
+ "#
+ .unindent();
+
+ cx.set_state(
+ &r#"
+ ONE
+ two
+ ˇTHREE
+ four
+ FIVE
+ "#
+ .unindent(),
+ );
+ cx.set_head_text(&diff_base);
+
+ cx.update_editor(|editor, window, cx| {
+ editor.set_expand_all_diff_hunks(cx);
+ editor.restore_and_next(&Default::default(), window, cx);
+ });
+ cx.run_until_parked();
+
+ cx.assert_state_with_diff(
+ r#"
+ - one
+ + ONE
+ two
+ three
+ four
+ - ˇfive
+ + FIVE
+ "#
+ .unindent(),
+ );
+
+ cx.update_editor(|editor, window, cx| {
+ editor.restore_and_next(&Default::default(), window, cx);
+ });
+ cx.run_until_parked();
+
+ cx.assert_state_with_diff(
+ r#"
+ - one
+ + ONE
+ two
+ three
+ four
+ ˇfive
+ "#
+ .unindent(),
+ );
+}
@@ -637,6 +637,7 @@ impl EditorElement {
register_action(editor, window, Editor::accept_edit_prediction);
register_action(editor, window, Editor::restore_file);
register_action(editor, window, Editor::git_restore);
+ register_action(editor, window, Editor::restore_and_next);
register_action(editor, window, Editor::apply_all_diff_hunks);
register_action(editor, window, Editor::apply_selected_diff_hunks);
register_action(editor, window, Editor::open_active_item_in_terminal);
@@ -1165,8 +1165,8 @@ impl SplittableEditor {
let lhs_ranges: Vec<ExcerptRange<Point>> = rhs_multibuffer
.excerpts_for_buffer(main_buffer_snapshot.remote_id(), cx)
.into_iter()
- .filter(|(id, _)| rhs_excerpt_ids.contains(id))
- .map(|(_, excerpt_range)| {
+ .filter(|(id, _, _)| rhs_excerpt_ids.contains(id))
+ .map(|(_, _, excerpt_range)| {
let to_base_text = |range: Range<Point>| {
let start = diff_snapshot
.buffer_point_to_base_text_range(
@@ -1857,6 +1857,21 @@ impl Item for SplittableEditor {
fn pixel_position_of_cursor(&self, cx: &App) -> Option<gpui::Point<gpui::Pixels>> {
self.focused_editor().read(cx).pixel_position_of_cursor(cx)
}
+
+ fn act_as_type<'a>(
+ &'a self,
+ type_id: std::any::TypeId,
+ self_handle: &'a Entity<Self>,
+ _: &'a App,
+ ) -> Option<gpui::AnyEntity> {
+ if type_id == std::any::TypeId::of::<Self>() {
+ Some(self_handle.clone().into())
+ } else if type_id == std::any::TypeId::of::<Editor>() {
+ Some(self.rhs_editor.clone().into())
+ } else {
+ None
+ }
+ }
}
impl SearchableItem for SplittableEditor {
@@ -2064,7 +2079,7 @@ impl Render for SplittableEditor {
#[cfg(test)]
mod tests {
- use std::sync::Arc;
+ use std::{any::TypeId, sync::Arc};
use buffer_diff::BufferDiff;
use collections::{HashMap, HashSet};
@@ -2080,14 +2095,14 @@ mod tests {
use settings::{DiffViewStyle, SettingsStore};
use ui::{VisualContext as _, div, px};
use util::rel_path::rel_path;
- use workspace::MultiWorkspace;
+ use workspace::{Item, MultiWorkspace};
- use crate::SplittableEditor;
use crate::display_map::{
BlockPlacement, BlockProperties, BlockStyle, Crease, FoldPlaceholder,
};
use crate::inlays::Inlay;
use crate::test::{editor_content_with_blocks_and_width, set_block_content_for_tests};
+ use crate::{Editor, SplittableEditor};
use multi_buffer::MultiBufferOffset;
async fn init_test(
@@ -6025,4 +6040,17 @@ mod tests {
cx.run_until_parked();
}
+
+ #[gpui::test]
+ async fn test_act_as_type(cx: &mut gpui::TestAppContext) {
+ let (splittable_editor, cx) = init_test(cx, SoftWrap::None, DiffViewStyle::Split).await;
+ let editor = splittable_editor.read_with(cx, |editor, cx| {
+ editor.act_as_type(TypeId::of::<Editor>(), &splittable_editor, cx)
+ });
+
+ assert!(
+ editor.is_some(),
+ "SplittableEditor should be able to act as Editor"
+ );
+ }
}
@@ -7,6 +7,7 @@ use anyhow::{Context as _, Result, bail};
use futures::{StreamExt, io};
use heck::ToSnakeCase;
use http_client::{self, AsyncBody, HttpClient};
+use language::LanguageConfig;
use serde::Deserialize;
use std::{
env, fs, mem,
@@ -583,7 +584,7 @@ async fn populate_defaults(
while let Some(language_dir) = language_dir_entries.next().await {
let language_dir = language_dir?;
- let config_path = language_dir.join("config.toml");
+ let config_path = language_dir.join(LanguageConfig::FILE_NAME);
if fs.is_file(config_path.as_path()).await {
let relative_language_dir =
language_dir.strip_prefix(extension_path)?.to_path_buf();
@@ -26,7 +26,9 @@ reqwest_client.workspace = true
serde.workspace = true
serde_json.workspace = true
serde_json_lenient.workspace = true
+settings_content.workspace = true
snippet_provider.workspace = true
+task.workspace = true
theme.workspace = true
tokio = { workspace = true, features = ["full"] }
toml.workspace = true
@@ -11,8 +11,10 @@ use extension::extension_builder::{CompileExtensionOptions, ExtensionBuilder};
use extension::{ExtensionManifest, ExtensionSnippets};
use language::LanguageConfig;
use reqwest_client::ReqwestClient;
+use settings_content::SemanticTokenRules;
use snippet_provider::file_to_snippets;
use snippet_provider::format::VsSnippetsFile;
+use task::TaskTemplates;
use tokio::process::Command;
use tree_sitter::{Language, Query, WasmStore};
@@ -323,9 +325,8 @@ fn test_languages(
) -> Result<()> {
for relative_language_dir in &manifest.languages {
let language_dir = extension_path.join(relative_language_dir);
- let config_path = language_dir.join("config.toml");
- let config_content = fs::read_to_string(&config_path)?;
- let config: LanguageConfig = toml::from_str(&config_content)?;
+ let config_path = language_dir.join(LanguageConfig::FILE_NAME);
+ let config = LanguageConfig::load(&config_path)?;
let grammar = if let Some(name) = &config.grammar {
Some(
grammars
@@ -339,18 +340,48 @@ fn test_languages(
let query_entries = fs::read_dir(&language_dir)?;
for entry in query_entries {
let entry = entry?;
- let query_path = entry.path();
- if query_path.extension() == Some("scm".as_ref()) {
- let grammar = grammar.with_context(|| {
- format! {
- "language {} provides query {} but no grammar",
- config.name,
- query_path.display()
- }
- })?;
-
- let query_source = fs::read_to_string(&query_path)?;
- let _query = Query::new(grammar, &query_source)?;
+ let file_path = entry.path();
+
+ let Some(file_name) = file_path.file_name().and_then(|name| name.to_str()) else {
+ continue;
+ };
+
+ match file_name {
+ LanguageConfig::FILE_NAME => {
+ // Loaded above
+ }
+ SemanticTokenRules::FILE_NAME => {
+ let _token_rules = SemanticTokenRules::load(&file_path)?;
+ }
+ TaskTemplates::FILE_NAME => {
+ let task_file_content = std::fs::read(&file_path).with_context(|| {
+ anyhow!(
+ "Failed to read tasks file at {path}",
+ path = file_path.display()
+ )
+ })?;
+ let _task_templates =
+ serde_json_lenient::from_slice::<TaskTemplates>(&task_file_content)
+ .with_context(|| {
+ anyhow!(
+ "Failed to parse tasks file at {path}",
+ path = file_path.display()
+ )
+ })?;
+ }
+ _ if file_name.ends_with(".scm") => {
+ let grammar = grammar.with_context(|| {
+ format! {
+ "language {} provides query {} but no grammar",
+ config.name,
+ file_path.display()
+ }
+ })?;
+
+ let query_source = fs::read_to_string(&file_path)?;
+ let _query = Query::new(grammar, &query_source)?;
+ }
+ _ => {}
}
}
@@ -65,7 +65,7 @@ language = { workspace = true, features = ["test-support"] }
language_extension.workspace = true
parking_lot.workspace = true
project = { workspace = true, features = ["test-support"] }
-rand.workspace = true
+
reqwest_client.workspace = true
theme = { workspace = true, features = ["test-support"] }
theme_extension.workspace = true
@@ -55,6 +55,7 @@ use std::{
sync::Arc,
time::{Duration, Instant},
};
+use task::TaskTemplates;
use url::Url;
use util::{ResultExt, paths::RemotePathBuf};
use wasm_host::{
@@ -1285,19 +1286,11 @@ impl ExtensionStore {
]);
// Load semantic token rules if present in the language directory.
- let rules_path = language_path.join("semantic_token_rules.json");
- if let Ok(rules_json) = std::fs::read_to_string(&rules_path) {
- match serde_json_lenient::from_str::<SemanticTokenRules>(&rules_json) {
- Ok(rules) => {
- semantic_token_rules_to_add.push((language_name.clone(), rules));
- }
- Err(err) => {
- log::error!(
- "Failed to parse semantic token rules from {}: {err:#}",
- rules_path.display()
- );
- }
- }
+ let rules_path = language_path.join(SemanticTokenRules::FILE_NAME);
+ if std::fs::exists(&rules_path).is_ok_and(|exists| exists)
+ && let Some(rules) = SemanticTokenRules::load(&rules_path).log_err()
+ {
+ semantic_token_rules_to_add.push((language_name.clone(), rules));
}
self.proxy.register_language(
@@ -1306,11 +1299,11 @@ impl ExtensionStore {
language.matcher.clone(),
language.hidden,
Arc::new(move || {
- let config = std::fs::read_to_string(language_path.join("config.toml"))?;
- let config: LanguageConfig = ::toml::from_str(&config)?;
+ let config =
+ LanguageConfig::load(language_path.join(LanguageConfig::FILE_NAME))?;
let queries = load_plugin_queries(&language_path);
let context_provider =
- std::fs::read_to_string(language_path.join("tasks.json"))
+ std::fs::read_to_string(language_path.join(TaskTemplates::FILE_NAME))
.ok()
.and_then(|contents| {
let definitions =
@@ -1580,7 +1573,7 @@ impl ExtensionStore {
if !fs_metadata.is_dir {
continue;
}
- let language_config_path = language_path.join("config.toml");
+ let language_config_path = language_path.join(LanguageConfig::FILE_NAME);
let config = fs.load(&language_config_path).await.with_context(|| {
format!("loading language config from {language_config_path:?}")
})?;
@@ -1703,7 +1696,7 @@ impl ExtensionStore {
cx.background_spawn(async move {
const EXTENSION_TOML: &str = "extension.toml";
const EXTENSION_WASM: &str = "extension.wasm";
- const CONFIG_TOML: &str = "config.toml";
+ const CONFIG_TOML: &str = LanguageConfig::FILE_NAME;
if is_dev {
let manifest_toml = toml::to_string(&loaded_extension.manifest)?;
@@ -138,7 +138,9 @@ impl HeadlessExtensionStore {
for language_path in &manifest.languages {
let language_path = extension_dir.join(language_path);
- let config = fs.load(&language_path.join("config.toml")).await?;
+ let config = fs
+ .load(&language_path.join(LanguageConfig::FILE_NAME))
+ .await?;
let mut config = ::toml::from_str::<LanguageConfig>(&config)?;
this.update(cx, |this, _cx| {
@@ -870,9 +870,12 @@ impl ExtensionsPage {
)
.child(
h_flex()
+ .min_w_0()
+ .w_full()
.justify_between()
.child(
h_flex()
+ .min_w_0()
.gap_1()
.child(
Icon::new(IconName::Person)
@@ -889,6 +892,7 @@ impl ExtensionsPage {
.child(
h_flex()
.gap_1()
+ .flex_shrink_0()
.child({
let repo_url_for_tooltip = repository_url.clone();
@@ -22,5 +22,3 @@ util.workspace = true
workspace.workspace = true
zed_actions.workspace = true
-[dev-dependencies]
-editor = { workspace = true, features = ["test-support"] }
@@ -38,7 +38,7 @@ project_panel.workspace = true
ctor.workspace = true
editor = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
-language = { workspace = true, features = ["test-support"] }
+
picker = { workspace = true, features = ["test-support"] }
pretty_assertions.workspace = true
serde_json.workspace = true
@@ -15,10 +15,14 @@ use gpui::Global;
use gpui::ReadGlobal as _;
use gpui::SharedString;
use std::borrow::Cow;
+#[cfg(unix)]
+use std::ffi::CString;
use util::command::new_command;
#[cfg(unix)]
use std::os::fd::{AsFd, AsRawFd};
+#[cfg(unix)]
+use std::os::unix::ffi::OsStrExt;
#[cfg(unix)]
use std::os::unix::fs::{FileTypeExt, MetadataExt};
@@ -506,6 +510,63 @@ impl RealFs {
}
}
+#[cfg(any(target_os = "macos", target_os = "linux"))]
+fn rename_without_replace(source: &Path, target: &Path) -> io::Result<()> {
+ let source = path_to_c_string(source)?;
+ let target = path_to_c_string(target)?;
+
+ #[cfg(target_os = "macos")]
+ let result = unsafe { libc::renamex_np(source.as_ptr(), target.as_ptr(), libc::RENAME_EXCL) };
+
+ #[cfg(target_os = "linux")]
+ let result = unsafe {
+ libc::syscall(
+ libc::SYS_renameat2,
+ libc::AT_FDCWD,
+ source.as_ptr(),
+ libc::AT_FDCWD,
+ target.as_ptr(),
+ libc::RENAME_NOREPLACE,
+ )
+ };
+
+ if result == 0 {
+ Ok(())
+ } else {
+ Err(io::Error::last_os_error())
+ }
+}
+
+#[cfg(target_os = "windows")]
+fn rename_without_replace(source: &Path, target: &Path) -> io::Result<()> {
+ use std::os::windows::ffi::OsStrExt;
+
+ use windows::Win32::Storage::FileSystem::{MOVE_FILE_FLAGS, MoveFileExW};
+ use windows::core::PCWSTR;
+
+ let source: Vec<u16> = source.as_os_str().encode_wide().chain(Some(0)).collect();
+ let target: Vec<u16> = target.as_os_str().encode_wide().chain(Some(0)).collect();
+
+ unsafe {
+ MoveFileExW(
+ PCWSTR(source.as_ptr()),
+ PCWSTR(target.as_ptr()),
+ MOVE_FILE_FLAGS::default(),
+ )
+ }
+ .map_err(|_| io::Error::last_os_error())
+}
+
+#[cfg(any(target_os = "macos", target_os = "linux"))]
+fn path_to_c_string(path: &Path) -> io::Result<CString> {
+ CString::new(path.as_os_str().as_bytes()).map_err(|_| {
+ io::Error::new(
+ io::ErrorKind::InvalidInput,
+ format!("path contains interior NUL: {}", path.display()),
+ )
+ })
+}
+
#[async_trait::async_trait]
impl Fs for RealFs {
async fn create_dir(&self, path: &Path) -> Result<()> {
@@ -588,7 +649,56 @@ impl Fs for RealFs {
}
async fn rename(&self, source: &Path, target: &Path, options: RenameOptions) -> Result<()> {
- if !options.overwrite && smol::fs::metadata(target).await.is_ok() {
+ if options.create_parents {
+ if let Some(parent) = target.parent() {
+ self.create_dir(parent).await?;
+ }
+ }
+
+ if options.overwrite {
+ smol::fs::rename(source, target).await?;
+ return Ok(());
+ }
+
+ let use_metadata_fallback = {
+ #[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
+ {
+ let source = source.to_path_buf();
+ let target = target.to_path_buf();
+ match self
+ .executor
+ .spawn(async move { rename_without_replace(&source, &target) })
+ .await
+ {
+ Ok(()) => return Ok(()),
+ Err(error) if error.kind() == io::ErrorKind::AlreadyExists => {
+ if options.ignore_if_exists {
+ return Ok(());
+ }
+ return Err(error.into());
+ }
+ Err(error)
+ if error.raw_os_error().is_some_and(|code| {
+ code == libc::ENOSYS
+ || code == libc::ENOTSUP
+ || code == libc::EOPNOTSUPP
+ }) =>
+ {
+ // For case when filesystem or kernel does not support atomic no-overwrite rename.
+ true
+ }
+ Err(error) => return Err(error.into()),
+ }
+ }
+
+ #[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "windows")))]
+ {
+ // For platforms which do not have an atomic no-overwrite rename yet.
+ true
+ }
+ };
+
+ if use_metadata_fallback && smol::fs::metadata(target).await.is_ok() {
if options.ignore_if_exists {
return Ok(());
} else {
@@ -596,12 +706,6 @@ impl Fs for RealFs {
}
}
- if options.create_parents {
- if let Some(parent) = target.parent() {
- self.create_dir(parent).await?;
- }
- }
-
smol::fs::rename(source, target).await?;
Ok(())
}
@@ -523,6 +523,65 @@ async fn test_rename(executor: BackgroundExecutor) {
);
}
+#[gpui::test]
+#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
+async fn test_realfs_parallel_rename_without_overwrite_preserves_losing_source(
+ executor: BackgroundExecutor,
+) {
+ let temp_dir = TempDir::new().unwrap();
+ let root = temp_dir.path();
+ let source_a = root.join("dir_a/shared.txt");
+ let source_b = root.join("dir_b/shared.txt");
+ let target = root.join("shared.txt");
+
+ std::fs::create_dir_all(source_a.parent().unwrap()).unwrap();
+ std::fs::create_dir_all(source_b.parent().unwrap()).unwrap();
+ std::fs::write(&source_a, "from a").unwrap();
+ std::fs::write(&source_b, "from b").unwrap();
+
+ let fs = RealFs::new(None, executor);
+ let (first_result, second_result) = futures::future::join(
+ fs.rename(&source_a, &target, RenameOptions::default()),
+ fs.rename(&source_b, &target, RenameOptions::default()),
+ )
+ .await;
+
+ assert_ne!(first_result.is_ok(), second_result.is_ok());
+ assert!(target.exists());
+ assert_eq!(source_a.exists() as u8 + source_b.exists() as u8, 1);
+}
+
+#[gpui::test]
+#[cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))]
+async fn test_realfs_rename_ignore_if_exists_leaves_source_and_target_unchanged(
+ executor: BackgroundExecutor,
+) {
+ let temp_dir = TempDir::new().unwrap();
+ let root = temp_dir.path();
+ let source = root.join("source.txt");
+ let target = root.join("target.txt");
+
+ std::fs::write(&source, "from source").unwrap();
+ std::fs::write(&target, "from target").unwrap();
+
+ let fs = RealFs::new(None, executor);
+ let result = fs
+ .rename(
+ &source,
+ &target,
+ RenameOptions {
+ ignore_if_exists: true,
+ ..Default::default()
+ },
+ )
+ .await;
+
+ assert!(result.is_ok());
+
+ assert_eq!(std::fs::read_to_string(&source).unwrap(), "from source");
+ assert_eq!(std::fs::read_to_string(&target).unwrap(), "from target");
+}
+
#[gpui::test]
#[cfg(unix)]
async fn test_realfs_broken_symlink_metadata(executor: BackgroundExecutor) {
@@ -48,7 +48,6 @@ ztracing.workspace = true
pretty_assertions.workspace = true
serde_json.workspace = true
text = { workspace = true, features = ["test-support"] }
-unindent.workspace = true
gpui = { workspace = true, features = ["test-support"] }
tempfile.workspace = true
rand.workspace = true
@@ -40,6 +40,9 @@ actions!(
/// Restores the selected hunks to their original state.
#[action(deprecated_aliases = ["editor::RevertSelectedHunks"])]
Restore,
+ /// Restores the selected hunks to their original state and moves to the
+ /// next one.
+ RestoreAndNext,
// per-file
/// Shows git blame information for the current file.
#[action(deprecated_aliases = ["editor::ToggleGitBlame"])]
@@ -43,7 +43,6 @@ git = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
project = { workspace = true, features = ["test-support"] }
rand.workspace = true
-recent_projects = { workspace = true, features = ["test-support"] }
serde_json.workspace = true
settings = { workspace = true, features = ["test-support"] }
workspace = { workspace = true, features = ["test-support"] }
@@ -73,7 +73,6 @@ windows.workspace = true
[dev-dependencies]
ctor.workspace = true
editor = { workspace = true, features = ["test-support"] }
-git_hosting_providers.workspace = true
gpui = { workspace = true, features = ["test-support"] }
indoc.workspace = true
pretty_assertions.workspace = true
@@ -182,7 +182,7 @@ fn conflicts_updated(
let excerpts = multibuffer.excerpts_for_buffer(buffer_id, cx);
let Some(buffer_snapshot) = excerpts
.first()
- .and_then(|(excerpt_id, _)| snapshot.buffer_for_excerpt(*excerpt_id))
+ .and_then(|(excerpt_id, _, _)| snapshot.buffer_for_excerpt(*excerpt_id))
else {
return;
};
@@ -221,7 +221,7 @@ fn conflicts_updated(
let mut removed_highlighted_ranges = Vec::new();
let mut removed_block_ids = HashSet::default();
for (conflict_range, block_id) in old_conflicts {
- let Some((excerpt_id, _)) = excerpts.iter().find(|(_, range)| {
+ let Some((excerpt_id, _, _)) = excerpts.iter().find(|(_, _, range)| {
let precedes_start = range
.context
.start
@@ -263,7 +263,7 @@ fn conflicts_updated(
let new_conflicts = &conflict_set.conflicts[event.new_range.clone()];
let mut blocks = Vec::new();
for conflict in new_conflicts {
- let Some((excerpt_id, _)) = excerpts.iter().find(|(_, range)| {
+ let Some((excerpt_id, _, _)) = excerpts.iter().find(|(_, _, range)| {
let precedes_start = range
.context
.start
@@ -1343,6 +1343,7 @@ impl GitPanel {
&snapshot,
language::Point::new(0, 0),
Direction::Next,
+ true,
window,
cx,
);
@@ -34,6 +34,4 @@ menu.workspace = true
project = { workspace = true, features = ["test-support"] }
rope.workspace = true
serde_json.workspace = true
-tree-sitter-rust.workspace = true
-tree-sitter-typescript.workspace = true
workspace = { workspace = true, features = ["test-support"] }
@@ -94,7 +94,9 @@ impl GoToLine {
.read(cx)
.excerpts_for_buffer(snapshot.remote_id(), cx)
.into_iter()
- .map(move |(_, range)| text::ToPoint::to_point(&range.context.end, &snapshot).row)
+ .map(move |(_, _, range)| {
+ text::ToPoint::to_point(&range.context.end, &snapshot).row
+ })
.max()
.unwrap_or(0);
@@ -146,7 +146,6 @@ collections = { workspace = true, features = ["test-support"] }
env_logger.workspace = true
gpui_platform.workspace = true
lyon = { version = "1.0", features = ["extra"] }
-pretty_assertions.workspace = true
rand.workspace = true
scheduler = { workspace = true, features = ["test-support"] }
unicode-segmentation.workspace = true
@@ -246,7 +246,12 @@ impl StyledText {
pub fn with_runs(mut self, runs: Vec<TextRun>) -> Self {
let mut text = &**self.text;
for run in &runs {
- text = text.get(run.len..).expect("invalid text run");
+ text = text.get(run.len..).unwrap_or_else(|| {
+ #[cfg(debug_assertions)]
+ panic!("invalid text run. Text: '{text}', run: {run:?}");
+ #[cfg(not(debug_assertions))]
+ panic!("invalid text run");
+ });
}
assert!(text.is_empty(), "invalid text run");
self.runs = Some(runs);
@@ -62,6 +62,7 @@ sum_tree.workspace = true
task.workspace = true
text.workspace = true
theme.workspace = true
+toml.workspace = true
tracing.workspace = true
tree-sitter-md = { workspace = true, optional = true }
tree-sitter-python = { workspace = true, optional = true }
@@ -961,6 +961,15 @@ pub struct LanguageConfig {
pub import_path_strip_regex: Option<Regex>,
}
+impl LanguageConfig {
+ pub const FILE_NAME: &str = "config.toml";
+
+ pub fn load(config_path: impl AsRef<Path>) -> Result<Self> {
+ let config = std::fs::read_to_string(config_path.as_ref())?;
+ toml::from_str(&config).map_err(Into::into)
+ }
+}
+
#[derive(Clone, Debug, Deserialize, Default, JsonSchema)]
pub struct DecreaseIndentConfig {
#[serde(default, deserialize_with = "deserialize_regex")]
@@ -68,7 +68,7 @@ vercel = { workspace = true, features = ["schemars"] }
x_ai = { workspace = true, features = ["schemars"] }
[dev-dependencies]
-editor = { workspace = true, features = ["test-support"] }
+
language_model = { workspace = true, features = ["test-support"] }
pretty_assertions.workspace = true
-project = { workspace = true, features = ["test-support"] }
+
@@ -866,7 +866,10 @@ impl LanguageModel for CloudLanguageModel {
);
if enable_thinking && let Some(effort) = effort {
- request.reasoning = Some(open_ai::responses::ReasoningConfig { effort });
+ request.reasoning = Some(open_ai::responses::ReasoningConfig {
+ effort,
+ summary: Some(open_ai::responses::ReasoningSummaryMode::Auto),
+ });
}
let future = self.request_limiter.stream(async move {
@@ -602,7 +602,10 @@ pub fn into_open_ai_response(
} else {
None
},
- reasoning: reasoning_effort.map(|effort| open_ai::responses::ReasoningConfig { effort }),
+ reasoning: reasoning_effort.map(|effort| open_ai::responses::ReasoningConfig {
+ effort,
+ summary: Some(open_ai::responses::ReasoningSummaryMode::Auto),
+ }),
}
}
@@ -963,10 +966,20 @@ impl OpenAiResponseEventMapper {
self.function_calls_by_item.insert(item_id, entry);
}
}
- ResponseOutputItem::Unknown => {}
+ ResponseOutputItem::Reasoning(_) | ResponseOutputItem::Unknown => {}
}
events
}
+ ResponsesStreamEvent::ReasoningSummaryTextDelta { delta, .. } => {
+ if delta.is_empty() {
+ Vec::new()
+ } else {
+ vec![Ok(LanguageModelCompletionEvent::Thinking {
+ text: delta,
+ signature: None,
+ })]
+ }
+ }
ResponsesStreamEvent::OutputTextDelta { delta, .. } => {
if delta.is_empty() {
Vec::new()
@@ -1075,10 +1088,22 @@ impl OpenAiResponseEventMapper {
error.message
)))]
}
- ResponsesStreamEvent::OutputTextDone { .. } => Vec::new(),
- ResponsesStreamEvent::OutputItemDone { .. }
+ ResponsesStreamEvent::ReasoningSummaryPartAdded { summary_index, .. } => {
+ if summary_index > 0 {
+ vec![Ok(LanguageModelCompletionEvent::Thinking {
+ text: "\n\n".to_string(),
+ signature: None,
+ })]
+ } else {
+ Vec::new()
+ }
+ }
+ ResponsesStreamEvent::OutputTextDone { .. }
+ | ResponsesStreamEvent::OutputItemDone { .. }
| ResponsesStreamEvent::ContentPartAdded { .. }
| ResponsesStreamEvent::ContentPartDone { .. }
+ | ResponsesStreamEvent::ReasoningSummaryTextDone { .. }
+ | ResponsesStreamEvent::ReasoningSummaryPartDone { .. }
| ResponsesStreamEvent::Created { .. }
| ResponsesStreamEvent::InProgress { .. }
| ResponsesStreamEvent::Unknown => Vec::new(),
@@ -1416,8 +1441,9 @@ mod tests {
use gpui::TestAppContext;
use language_model::{LanguageModelRequestMessage, LanguageModelRequestTool};
use open_ai::responses::{
- ResponseFunctionToolCall, ResponseOutputItem, ResponseOutputMessage, ResponseStatusDetails,
- ResponseSummary, ResponseUsage, StreamEvent as ResponsesStreamEvent,
+ ReasoningSummaryPart, ResponseFunctionToolCall, ResponseOutputItem, ResponseOutputMessage,
+ ResponseReasoningItem, ResponseStatusDetails, ResponseSummary, ResponseUsage,
+ StreamEvent as ResponsesStreamEvent,
};
use pretty_assertions::assert_eq;
use serde_json::json;
@@ -1675,7 +1701,7 @@ mod tests {
}
],
"prompt_cache_key": "thread-123",
- "reasoning": { "effort": "low" }
+ "reasoning": { "effort": "low", "summary": "auto" }
});
assert_eq!(serialized, expected);
@@ -2114,4 +2140,166 @@ mod tests {
})
));
}
+
+ #[test]
+ fn responses_stream_maps_reasoning_summary_deltas() {
+ let events = vec![
+ ResponsesStreamEvent::OutputItemAdded {
+ output_index: 0,
+ sequence_number: None,
+ item: ResponseOutputItem::Reasoning(ResponseReasoningItem {
+ id: Some("rs_123".into()),
+ summary: vec![],
+ }),
+ },
+ ResponsesStreamEvent::ReasoningSummaryPartAdded {
+ item_id: "rs_123".into(),
+ output_index: 0,
+ summary_index: 0,
+ },
+ ResponsesStreamEvent::ReasoningSummaryTextDelta {
+ item_id: "rs_123".into(),
+ output_index: 0,
+ delta: "Thinking about".into(),
+ },
+ ResponsesStreamEvent::ReasoningSummaryTextDelta {
+ item_id: "rs_123".into(),
+ output_index: 0,
+ delta: " the answer".into(),
+ },
+ ResponsesStreamEvent::ReasoningSummaryTextDone {
+ item_id: "rs_123".into(),
+ output_index: 0,
+ text: "Thinking about the answer".into(),
+ },
+ ResponsesStreamEvent::ReasoningSummaryPartDone {
+ item_id: "rs_123".into(),
+ output_index: 0,
+ summary_index: 0,
+ },
+ ResponsesStreamEvent::ReasoningSummaryPartAdded {
+ item_id: "rs_123".into(),
+ output_index: 0,
+ summary_index: 1,
+ },
+ ResponsesStreamEvent::ReasoningSummaryTextDelta {
+ item_id: "rs_123".into(),
+ output_index: 0,
+ delta: "Second part".into(),
+ },
+ ResponsesStreamEvent::ReasoningSummaryTextDone {
+ item_id: "rs_123".into(),
+ output_index: 0,
+ text: "Second part".into(),
+ },
+ ResponsesStreamEvent::ReasoningSummaryPartDone {
+ item_id: "rs_123".into(),
+ output_index: 0,
+ summary_index: 1,
+ },
+ ResponsesStreamEvent::OutputItemDone {
+ output_index: 0,
+ sequence_number: None,
+ item: ResponseOutputItem::Reasoning(ResponseReasoningItem {
+ id: Some("rs_123".into()),
+ summary: vec![
+ ReasoningSummaryPart::SummaryText {
+ text: "Thinking about the answer".into(),
+ },
+ ReasoningSummaryPart::SummaryText {
+ text: "Second part".into(),
+ },
+ ],
+ }),
+ },
+ ResponsesStreamEvent::OutputItemAdded {
+ output_index: 1,
+ sequence_number: None,
+ item: response_item_message("msg_456"),
+ },
+ ResponsesStreamEvent::OutputTextDelta {
+ item_id: "msg_456".into(),
+ output_index: 1,
+ content_index: Some(0),
+ delta: "The answer is 42".into(),
+ },
+ ResponsesStreamEvent::Completed {
+ response: ResponseSummary::default(),
+ },
+ ];
+
+ let mapped = map_response_events(events);
+
+ let thinking_events: Vec<_> = mapped
+ .iter()
+ .filter(|e| matches!(e, LanguageModelCompletionEvent::Thinking { .. }))
+ .collect();
+ assert_eq!(
+ thinking_events.len(),
+ 4,
+ "expected 4 thinking events (2 deltas + separator + second delta), got {:?}",
+ thinking_events,
+ );
+
+ assert!(matches!(
+ &thinking_events[0],
+ LanguageModelCompletionEvent::Thinking { text, .. } if text == "Thinking about"
+ ));
+ assert!(matches!(
+ &thinking_events[1],
+ LanguageModelCompletionEvent::Thinking { text, .. } if text == " the answer"
+ ));
+ assert!(
+ matches!(
+ &thinking_events[2],
+ LanguageModelCompletionEvent::Thinking { text, .. } if text == "\n\n"
+ ),
+ "expected separator between summary parts"
+ );
+ assert!(matches!(
+ &thinking_events[3],
+ LanguageModelCompletionEvent::Thinking { text, .. } if text == "Second part"
+ ));
+
+ assert!(mapped.iter().any(|e| matches!(
+ e,
+ LanguageModelCompletionEvent::Text(t) if t == "The answer is 42"
+ )));
+ }
+
+ #[test]
+ fn responses_stream_maps_reasoning_from_done_only() {
+ let events = vec![
+ ResponsesStreamEvent::OutputItemAdded {
+ output_index: 0,
+ sequence_number: None,
+ item: ResponseOutputItem::Reasoning(ResponseReasoningItem {
+ id: Some("rs_789".into()),
+ summary: vec![],
+ }),
+ },
+ ResponsesStreamEvent::OutputItemDone {
+ output_index: 0,
+ sequence_number: None,
+ item: ResponseOutputItem::Reasoning(ResponseReasoningItem {
+ id: Some("rs_789".into()),
+ summary: vec![ReasoningSummaryPart::SummaryText {
+ text: "Summary without deltas".into(),
+ }],
+ }),
+ },
+ ResponsesStreamEvent::Completed {
+ response: ResponseSummary::default(),
+ },
+ ];
+
+ let mapped = map_response_events(events);
+
+ assert!(
+ !mapped
+ .iter()
+ .any(|e| matches!(e, LanguageModelCompletionEvent::Thinking { .. })),
+ "OutputItemDone reasoning should not produce Thinking events (no delta/done text events)"
+ );
+ }
}
@@ -98,7 +98,6 @@ util.workspace = true
[dev-dependencies]
pretty_assertions.workspace = true
-text.workspace = true
theme = { workspace = true, features = ["test-support"] }
tree-sitter-bash.workspace = true
tree-sitter-c.workspace = true
@@ -109,4 +108,3 @@ tree-sitter-python.workspace = true
tree-sitter-typescript.workspace = true
tree-sitter.workspace = true
unindent.workspace = true
-workspace = { workspace = true, features = ["test-support"] }
@@ -61,7 +61,6 @@ objc.workspace = true
collections = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
gpui_platform.workspace = true
-sha2.workspace = true
simplelog.workspace = true
[build-dependencies]
@@ -1,8 +1,8 @@
use anyhow::Context as _;
use collections::HashMap;
+use cpal::DeviceId;
mod remote_video_track_view;
-use cpal::traits::HostTrait as _;
pub use remote_video_track_view::{RemoteVideoTrackView, RemoteVideoTrackViewEvent};
use rodio::DeviceTrait as _;
@@ -192,24 +192,18 @@ pub enum RoomEvent {
pub(crate) fn default_device(
input: bool,
+ device_id: Option<&DeviceId>,
) -> anyhow::Result<(cpal::Device, cpal::SupportedStreamConfig)> {
- let device;
- let config;
- if input {
- device = cpal::default_host()
- .default_input_device()
- .context("no audio input device available")?;
- config = device
+ let device = audio::resolve_device(device_id, input)?;
+ let config = if input {
+ device
.default_input_config()
- .context("failed to get default input config")?;
+ .context("failed to get default input config")?
} else {
- device = cpal::default_host()
- .default_output_device()
- .context("no audio output device available")?;
- config = device
+ device
.default_output_config()
- .context("failed to get default output config")?;
- }
+ .context("failed to get default output config")?
+ };
Ok((device, config))
}
@@ -150,7 +150,10 @@ impl Room {
info!("Using experimental.rodio_audio audio pipeline for output");
playback::play_remote_audio_track(&track.0, speaker, cx)
} else if speaker.sends_legacy_audio {
- Ok(self.playback.play_remote_audio_track(&track.0))
+ let output_audio_device = AudioSettings::get_global(cx).output_audio_device.clone();
+ Ok(self
+ .playback
+ .play_remote_audio_track(&track.0, output_audio_device))
} else {
Err(anyhow!("Client version too old to play audio in call"))
}
@@ -1,6 +1,7 @@
use anyhow::{Context as _, Result};
use audio::{AudioSettings, CHANNEL_COUNT, LEGACY_CHANNEL_COUNT, LEGACY_SAMPLE_RATE, SAMPLE_RATE};
+use cpal::DeviceId;
use cpal::traits::{DeviceTrait, StreamTrait as _};
use futures::channel::mpsc::UnboundedSender;
use futures::{Stream, StreamExt as _};
@@ -91,8 +92,9 @@ impl AudioStack {
pub(crate) fn play_remote_audio_track(
&self,
track: &livekit::track::RemoteAudioTrack,
+ output_audio_device: Option<DeviceId>,
) -> AudioStream {
- let output_task = self.start_output();
+ let output_task = self.start_output(output_audio_device);
let next_ssrc = self.next_ssrc.fetch_add(1, Ordering::Relaxed);
let source = AudioMixerSource {
@@ -130,7 +132,7 @@ impl AudioStack {
}
}
- fn start_output(&self) -> Arc<Task<()>> {
+ fn start_output(&self, output_audio_device: Option<DeviceId>) -> Arc<Task<()>> {
if let Some(task) = self._output_task.borrow().upgrade() {
return task;
}
@@ -143,6 +145,7 @@ impl AudioStack {
mixer,
LEGACY_SAMPLE_RATE.get(),
LEGACY_CHANNEL_COUNT.get().into(),
+ output_audio_device,
)
.await
.log_err();
@@ -219,12 +222,16 @@ impl AudioStack {
Ok(())
})
} else {
+ let input_audio_device =
+ AudioSettings::try_read_global(cx, |settings| settings.input_audio_device.clone())
+ .flatten();
self.executor.spawn(async move {
Self::capture_input(
apm,
frame_tx,
LEGACY_SAMPLE_RATE.get(),
LEGACY_CHANNEL_COUNT.get().into(),
+ input_audio_device,
)
.await
})
@@ -247,6 +254,7 @@ impl AudioStack {
mixer: Arc<Mutex<audio_mixer::AudioMixer>>,
sample_rate: u32,
num_channels: u32,
+ output_audio_device: Option<DeviceId>,
) -> Result<()> {
// Prevent App Nap from throttling audio playback on macOS.
// This guard is held for the entire duration of audio output.
@@ -255,7 +263,8 @@ impl AudioStack {
loop {
let mut device_change_listener = DeviceChangeListener::new(false)?;
- let (output_device, output_config) = crate::default_device(false)?;
+ let (output_device, output_config) =
+ crate::default_device(false, output_audio_device.as_ref())?;
let (end_on_drop_tx, end_on_drop_rx) = std::sync::mpsc::channel::<()>();
let mixer = mixer.clone();
let apm = apm.clone();
@@ -327,10 +336,11 @@ impl AudioStack {
frame_tx: UnboundedSender<AudioFrame<'static>>,
sample_rate: u32,
num_channels: u32,
+ input_audio_device: Option<DeviceId>,
) -> Result<()> {
loop {
let mut device_change_listener = DeviceChangeListener::new(true)?;
- let (device, config) = crate::default_device(true)?;
+ let (device, config) = crate::default_device(true, input_audio_device.as_ref())?;
let (end_on_drop_tx, end_on_drop_rx) = std::sync::mpsc::channel::<()>();
let apm = apm.clone();
let frame_tx = frame_tx.clone();
@@ -7,20 +7,22 @@ use std::{
};
use anyhow::{Context, Result};
+use cpal::DeviceId;
use cpal::traits::{DeviceTrait, StreamTrait};
use rodio::{buffer::SamplesBuffer, conversions::SampleTypeConverter};
use util::ResultExt;
pub struct CaptureInput {
pub name: String,
+ pub input_device: Option<DeviceId>,
config: cpal::SupportedStreamConfig,
samples: Arc<Mutex<Vec<i16>>>,
_stream: cpal::Stream,
}
impl CaptureInput {
- pub fn start() -> anyhow::Result<Self> {
- let (device, config) = crate::default_device(true)?;
+ pub fn start(input_device: Option<DeviceId>) -> anyhow::Result<Self> {
+ let (device, config) = crate::default_device(true, input_device.as_ref())?;
let name = device
.description()
.map(|desc| desc.name().to_string())
@@ -32,6 +34,7 @@ impl CaptureInput {
Ok(Self {
name,
+ input_device,
_stream: stream,
config,
samples,
@@ -52,7 +52,6 @@ gpui = { workspace = true, features = ["test-support"] }
indoc.workspace = true
language = { workspace = true, features = ["test-support"] }
pretty_assertions.workspace = true
-project = { workspace = true, features = ["test-support"] }
rand.workspace = true
settings = { workspace = true, features = ["test-support"] }
text = { workspace = true, features = ["test-support"] }
@@ -1987,7 +1987,7 @@ impl MultiBuffer {
&self,
buffer_id: BufferId,
cx: &App,
- ) -> Vec<(ExcerptId, ExcerptRange<text::Anchor>)> {
+ ) -> Vec<(ExcerptId, Arc<BufferSnapshot>, ExcerptRange<text::Anchor>)> {
let mut excerpts = Vec::new();
let snapshot = self.read(cx);
let mut cursor = snapshot.excerpts.cursor::<Option<&Locator>>(());
@@ -1997,7 +1997,7 @@ impl MultiBuffer {
if let Some(excerpt) = cursor.item()
&& excerpt.locator == *locator
{
- excerpts.push((excerpt.id, excerpt.range.clone()));
+ excerpts.push((excerpt.id, excerpt.buffer.clone(), excerpt.range.clone()));
}
}
}
@@ -2128,7 +2128,7 @@ impl MultiBuffer {
) -> Option<Anchor> {
let mut found = None;
let snapshot = buffer.read(cx).snapshot();
- for (excerpt_id, range) in self.excerpts_for_buffer(snapshot.remote_id(), cx) {
+ for (excerpt_id, _, range) in self.excerpts_for_buffer(snapshot.remote_id(), cx) {
let start = range.context.start.to_point(&snapshot);
let end = range.context.end.to_point(&snapshot);
if start <= point && point < end {
@@ -2157,7 +2157,7 @@ impl MultiBuffer {
cx: &App,
) -> Option<Anchor> {
let snapshot = buffer.read(cx).snapshot();
- for (excerpt_id, range) in self.excerpts_for_buffer(snapshot.remote_id(), cx) {
+ for (excerpt_id, _, range) in self.excerpts_for_buffer(snapshot.remote_id(), cx) {
if range.context.start.cmp(&anchor, &snapshot).is_le()
&& range.context.end.cmp(&anchor, &snapshot).is_ge()
{
@@ -1285,7 +1285,7 @@ fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut App) {
let mut ids = multibuffer
.excerpts_for_buffer(buffer_2.read(cx).remote_id(), cx)
.into_iter()
- .map(|(id, _)| id);
+ .map(|(id, _, _)| id);
(ids.next().unwrap(), ids.next().unwrap())
});
let snapshot_2 = multibuffer.read(cx).snapshot(cx);
@@ -15,7 +15,7 @@ doctest = false
[features]
test-support = [
"channel/test-support",
- "collections/test-support",
+
"gpui/test-support",
"rpc/test-support",
]
@@ -37,8 +37,6 @@ zed_actions.workspace = true
[dev-dependencies]
client = { workspace = true, features = ["test-support"] }
-collections = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
rpc = { workspace = true, features = ["test-support"] }
-settings = { workspace = true, features = ["test-support"] }
util = { workspace = true, features = ["test-support"] }
@@ -78,6 +78,16 @@ pub enum ResponseInputContent {
#[derive(Serialize, Debug)]
pub struct ReasoningConfig {
pub effort: ReasoningEffort,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub summary: Option<ReasoningSummaryMode>,
+}
+
+#[derive(Serialize, Debug, Clone, Copy, PartialEq, Eq)]
+#[serde(rename_all = "lowercase")]
+pub enum ReasoningSummaryMode {
+ Auto,
+ Concise,
+ Detailed,
}
#[derive(Serialize, Debug)]
@@ -150,6 +160,30 @@ pub enum StreamEvent {
content_index: Option<usize>,
text: String,
},
+ #[serde(rename = "response.reasoning_summary_part.added")]
+ ReasoningSummaryPartAdded {
+ item_id: String,
+ output_index: usize,
+ summary_index: usize,
+ },
+ #[serde(rename = "response.reasoning_summary_text.delta")]
+ ReasoningSummaryTextDelta {
+ item_id: String,
+ output_index: usize,
+ delta: String,
+ },
+ #[serde(rename = "response.reasoning_summary_text.done")]
+ ReasoningSummaryTextDone {
+ item_id: String,
+ output_index: usize,
+ text: String,
+ },
+ #[serde(rename = "response.reasoning_summary_part.done")]
+ ReasoningSummaryPartDone {
+ item_id: String,
+ output_index: usize,
+ summary_index: usize,
+ },
#[serde(rename = "response.function_call_arguments.delta")]
FunctionCallArgumentsDelta {
item_id: String,
@@ -219,6 +253,25 @@ pub struct ResponseUsage {
pub enum ResponseOutputItem {
Message(ResponseOutputMessage),
FunctionCall(ResponseFunctionToolCall),
+ Reasoning(ResponseReasoningItem),
+ #[serde(other)]
+ Unknown,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+pub struct ResponseReasoningItem {
+ #[serde(default)]
+ pub id: Option<String>,
+ #[serde(default)]
+ pub summary: Vec<ReasoningSummaryPart>,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(tag = "type", rename_all = "snake_case")]
+pub enum ReasoningSummaryPart {
+ SummaryText {
+ text: String,
+ },
#[serde(other)]
Unknown,
}
@@ -356,6 +409,21 @@ pub async fn stream_response(
});
}
}
+ ResponseOutputItem::Reasoning(reasoning) => {
+ if let Some(ref item_id) = reasoning.id {
+ for part in &reasoning.summary {
+ if let ReasoningSummaryPart::SummaryText { text } = part {
+ all_events.push(
+ StreamEvent::ReasoningSummaryTextDelta {
+ item_id: item_id.clone(),
+ output_index,
+ delta: text.clone(),
+ },
+ );
+ }
+ }
+ }
+ }
ResponseOutputItem::Unknown => {}
}
@@ -38,6 +38,4 @@ project = { workspace = true, features = ["test-support"] }
rope.workspace = true
serde_json.workspace = true
settings = { workspace = true, features = ["test-support"] }
-tree-sitter-rust.workspace = true
-tree-sitter-typescript.workspace = true
workspace = { workspace = true, features = ["test-support"] }
@@ -1143,7 +1143,7 @@ impl OutlinePanel {
.excerpts_for_buffer(buffer.read(cx).remote_id(), cx)
})
.and_then(|excerpts| {
- let (excerpt_id, excerpt_range) = excerpts.first()?;
+ let (excerpt_id, _, excerpt_range) = excerpts.first()?;
multi_buffer_snapshot
.anchor_in_excerpt(*excerpt_id, excerpt_range.context.start)
})
@@ -31,7 +31,6 @@ test-support = [
"worktree/test-support",
"gpui/test-support",
"dap/test-support",
- "dap_adapters/test-support",
]
[dependencies]
@@ -107,12 +106,10 @@ tracing.workspace = true
[dev-dependencies]
client = { workspace = true, features = ["test-support"] }
encoding_rs.workspace = true
-db = { workspace = true, features = ["test-support"] }
collections = { workspace = true, features = ["test-support"] }
context_server = { workspace = true, features = ["test-support"] }
buffer_diff = { workspace = true, features = ["test-support"] }
dap = { workspace = true, features = ["test-support"] }
-dap_adapters = { workspace = true, features = ["test-support"] }
fs = { workspace = true, features = ["test-support"] }
git2.workspace = true
gpui = { workspace = true, features = ["test-support"] }
@@ -147,6 +147,22 @@ impl AgentRegistryStore {
.map(|store| store.0.clone())
}
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn init_test_global(cx: &mut App, agents: Vec<RegistryAgent>) -> Entity<Self> {
+ let fs: Arc<dyn Fs> = fs::FakeFs::new(cx.background_executor().clone());
+ let store = cx.new(|_cx| Self {
+ fs,
+ http_client: http_client::FakeHttpClient::with_404_response(),
+ agents,
+ is_fetching: false,
+ fetch_error: None,
+ pending_refresh: None,
+ last_refresh: None,
+ });
+ cx.set_global(GlobalAgentRegistryStore(store.clone()));
+ store
+ }
+
pub fn agents(&self) -> &[RegistryAgent] {
&self.agents
}
@@ -54,7 +54,6 @@ criterion.workspace = true
editor = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
language = { workspace = true, features = ["test-support"] }
-remote_connection = { workspace = true, features = ["test-support"] }
serde_json.workspace = true
tempfile.workspace = true
workspace = { workspace = true, features = ["test-support"] }
@@ -4415,16 +4415,24 @@ impl ProjectPanel {
return;
}
+ let workspace = self.workspace.clone();
if folded_selection_info.is_empty() {
for (_, task) in move_tasks {
- task.detach_and_log_err(cx);
+ let workspace = workspace.clone();
+ cx.spawn_in(window, async move |_, mut cx| {
+ task.await.notify_workspace_async_err(workspace, &mut cx);
+ })
+ .detach();
}
} else {
- cx.spawn_in(window, async move |project_panel, cx| {
+ cx.spawn_in(window, async move |project_panel, mut cx| {
// Await all move tasks and collect successful results
let mut move_results: Vec<(ProjectEntryId, Entry)> = Vec::new();
for (entry_id, task) in move_tasks {
- if let Some(CreatedEntry::Included(new_entry)) = task.await.log_err() {
+ if let Some(CreatedEntry::Included(new_entry)) = task
+ .await
+ .notify_workspace_async_err(workspace.clone(), &mut cx)
+ {
move_results.push((entry_id, new_entry));
}
}
@@ -4412,6 +4412,90 @@ async fn test_drag_marked_entries_in_folded_directories(cx: &mut gpui::TestAppCo
);
}
+#[gpui::test]
+async fn test_dragging_same_named_files_preserves_one_source_on_conflict(
+ cx: &mut gpui::TestAppContext,
+) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "dir_a": {
+ "shared.txt": "from a"
+ },
+ "dir_b": {
+ "shared.txt": "from b"
+ }
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
+ let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = window
+ .read_with(cx, |multi_workspace, _| multi_workspace.workspace().clone())
+ .unwrap();
+ let cx = &mut VisualTestContext::from_window(window.into(), cx);
+ let panel = workspace.update_in(cx, ProjectPanel::new);
+ cx.run_until_parked();
+
+ panel.update_in(cx, |panel, window, cx| {
+ let (root_entry_id, worktree_id, entry_a_id, entry_b_id) = {
+ let worktree = panel.project.read(cx).visible_worktrees(cx).next().unwrap();
+ let worktree = worktree.read(cx);
+ let root_entry_id = worktree.root_entry().unwrap().id;
+ let worktree_id = worktree.id();
+ let entry_a_id = worktree
+ .entry_for_path(rel_path("dir_a/shared.txt"))
+ .unwrap()
+ .id;
+ let entry_b_id = worktree
+ .entry_for_path(rel_path("dir_b/shared.txt"))
+ .unwrap()
+ .id;
+ (root_entry_id, worktree_id, entry_a_id, entry_b_id)
+ };
+
+ let drag = DraggedSelection {
+ active_selection: SelectedEntry {
+ worktree_id,
+ entry_id: entry_a_id,
+ },
+ marked_selections: Arc::new([
+ SelectedEntry {
+ worktree_id,
+ entry_id: entry_a_id,
+ },
+ SelectedEntry {
+ worktree_id,
+ entry_id: entry_b_id,
+ },
+ ]),
+ };
+
+ panel.drag_onto(&drag, root_entry_id, false, window, cx);
+ });
+ cx.executor().run_until_parked();
+
+ let files = fs.files();
+ assert!(files.contains(&PathBuf::from(path!("/root/shared.txt"))));
+
+ let remaining_sources = [
+ PathBuf::from(path!("/root/dir_a/shared.txt")),
+ PathBuf::from(path!("/root/dir_b/shared.txt")),
+ ]
+ .into_iter()
+ .filter(|path| files.contains(path))
+ .count();
+
+ assert_eq!(
+ remaining_sources, 1,
+ "one conflicting source file should remain in place"
+ );
+}
+
#[gpui::test]
async fn test_drag_entries_between_different_worktrees(cx: &mut gpui::TestAppContext) {
init_test(cx);
@@ -7,7 +7,7 @@ publish.workspace = true
license = "GPL-3.0-or-later"
[features]
-test-support = ["collections/test-support"]
+test-support = []
[lints]
workspace = true
@@ -25,5 +25,3 @@ serde.workspace = true
prost-build.workspace = true
[dev-dependencies]
-collections = { workspace = true, features = ["test-support"] }
-typed-path = "0.11"
@@ -59,7 +59,6 @@ indoc.workspace = true
windows-registry = "0.6.0"
[dev-dependencies]
-dap.workspace = true
editor = { workspace = true, features = ["test-support"] }
extension.workspace = true
fs.workspace = true
@@ -1656,7 +1656,9 @@ impl RemoteServerProjects {
fn delete_ssh_server(&mut self, server: SshServerIndex, cx: &mut Context<Self>) {
self.update_settings_file(cx, move |setting, _| {
- if let Some(connections) = setting.ssh_connections.as_mut() {
+ if let Some(connections) = setting.ssh_connections.as_mut()
+ && connections.get(server.0).is_some()
+ {
connections.remove(server.0);
}
});
@@ -89,9 +89,7 @@ action_log.workspace = true
agent = { workspace = true, features = ["test-support"] }
client = { workspace = true, features = ["test-support"] }
clock = { workspace = true, features = ["test-support"] }
-dap = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] }
-workspace = { workspace = true, features = ["test-support"] }
fs = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
http_client = { workspace = true, features = ["test-support"] }
@@ -103,7 +101,6 @@ remote = { workspace = true, features = ["test-support"] }
theme = { workspace = true, features = ["test-support"] }
language_model = { workspace = true, features = ["test-support"] }
lsp = { workspace = true, features = ["test-support"] }
-prompt_store.workspace = true
unindent.workspace = true
serde_json.workspace = true
zlog.workspace = true
@@ -62,7 +62,6 @@ zed_actions.workspace = true
[dev-dependencies]
editor = { workspace = true, features = ["test-support"] }
-env_logger.workspace = true
gpui = { workspace = true, features = ["test-support"] }
http_client = { workspace = true, features = ["test-support"] }
indoc.workspace = true
@@ -31,4 +31,3 @@ gpui_util.workspace = true
http_client_tls.workspace = true
[dev-dependencies]
-gpui.workspace = true
@@ -1,29 +0,0 @@
-[package]
-name = "rich_text"
-version = "0.1.0"
-edition.workspace = true
-publish.workspace = true
-license = "GPL-3.0-or-later"
-
-[lints]
-workspace = true
-
-[lib]
-path = "src/rich_text.rs"
-doctest = false
-
-[features]
-test-support = [
- "gpui/test-support",
- "util/test-support",
-]
-
-[dependencies]
-futures.workspace = true
-gpui.workspace = true
-language.workspace = true
-linkify.workspace = true
-pulldown-cmark.workspace = true
-theme.workspace = true
-ui.workspace = true
-util.workspace = true
@@ -1 +0,0 @@
-../../LICENSE-GPL
@@ -1,418 +0,0 @@
-use futures::FutureExt;
-use gpui::{
- AnyElement, AnyView, App, ElementId, FontStyle, FontWeight, HighlightStyle, InteractiveText,
- IntoElement, SharedString, StrikethroughStyle, StyledText, UnderlineStyle, Window,
-};
-use language::{HighlightId, Language, LanguageRegistry};
-use std::{ops::Range, sync::Arc};
-use theme::ActiveTheme;
-use ui::LinkPreview;
-use util::RangeExt;
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub enum Highlight {
- Code,
- Id(HighlightId),
- InlineCode(bool),
- Highlight(HighlightStyle),
- Mention,
- SelfMention,
-}
-
-impl From<HighlightStyle> for Highlight {
- fn from(style: HighlightStyle) -> Self {
- Self::Highlight(style)
- }
-}
-
-impl From<HighlightId> for Highlight {
- fn from(style: HighlightId) -> Self {
- Self::Id(style)
- }
-}
-
-#[derive(Clone, Default)]
-pub struct RichText {
- pub text: SharedString,
- pub highlights: Vec<(Range<usize>, Highlight)>,
- pub link_ranges: Vec<Range<usize>>,
- pub link_urls: Arc<[String]>,
-
- pub custom_ranges: Vec<Range<usize>>,
- custom_ranges_tooltip_fn:
- Option<Arc<dyn Fn(usize, Range<usize>, &mut Window, &mut App) -> Option<AnyView>>>,
-}
-
-/// Allows one to specify extra links to the rendered markdown, which can be used
-/// for e.g. mentions.
-#[derive(Debug)]
-pub struct Mention {
- pub range: Range<usize>,
- pub is_self_mention: bool,
-}
-
-impl RichText {
- pub fn new(
- block: String,
- mentions: &[Mention],
- language_registry: &Arc<LanguageRegistry>,
- ) -> Self {
- let mut text = String::new();
- let mut highlights = Vec::new();
- let mut link_ranges = Vec::new();
- let mut link_urls = Vec::new();
- render_markdown_mut(
- &block,
- mentions,
- language_registry,
- None,
- &mut text,
- &mut highlights,
- &mut link_ranges,
- &mut link_urls,
- );
- text.truncate(text.trim_end().len());
-
- RichText {
- text: SharedString::from(text),
- link_urls: link_urls.into(),
- link_ranges,
- highlights,
- custom_ranges: Vec::new(),
- custom_ranges_tooltip_fn: None,
- }
- }
-
- pub fn set_tooltip_builder_for_custom_ranges(
- &mut self,
- f: impl Fn(usize, Range<usize>, &mut Window, &mut App) -> Option<AnyView> + 'static,
- ) {
- self.custom_ranges_tooltip_fn = Some(Arc::new(f));
- }
-
- pub fn element(&self, id: ElementId, window: &mut Window, cx: &mut App) -> AnyElement {
- let theme = cx.theme();
- let code_background = theme.colors().surface_background;
-
- InteractiveText::new(
- id,
- StyledText::new(self.text.clone()).with_default_highlights(
- &window.text_style(),
- self.highlights.iter().map(|(range, highlight)| {
- (
- range.clone(),
- match highlight {
- Highlight::Code => HighlightStyle {
- background_color: Some(code_background),
- ..Default::default()
- },
- Highlight::Id(id) => HighlightStyle {
- background_color: Some(code_background),
- ..id.style(theme.syntax()).unwrap_or_default()
- },
- Highlight::InlineCode(link) => {
- if *link {
- HighlightStyle {
- background_color: Some(code_background),
- underline: Some(UnderlineStyle {
- thickness: 1.0.into(),
- ..Default::default()
- }),
- ..Default::default()
- }
- } else {
- HighlightStyle {
- background_color: Some(code_background),
- ..Default::default()
- }
- }
- }
- Highlight::Highlight(highlight) => *highlight,
- Highlight::Mention => HighlightStyle {
- font_weight: Some(FontWeight::BOLD),
- ..Default::default()
- },
- Highlight::SelfMention => HighlightStyle {
- font_weight: Some(FontWeight::BOLD),
- ..Default::default()
- },
- },
- )
- }),
- ),
- )
- .on_click(self.link_ranges.clone(), {
- let link_urls = self.link_urls.clone();
- move |ix, _, cx| {
- let url = &link_urls[ix];
- if url.starts_with("http") {
- cx.open_url(url);
- }
- }
- })
- .tooltip({
- let link_ranges = self.link_ranges.clone();
- let link_urls = self.link_urls.clone();
- let custom_tooltip_ranges = self.custom_ranges.clone();
- let custom_tooltip_fn = self.custom_ranges_tooltip_fn.clone();
- move |idx, window, cx| {
- for (ix, range) in link_ranges.iter().enumerate() {
- if range.contains(&idx) {
- return Some(LinkPreview::new(&link_urls[ix], cx));
- }
- }
- for range in &custom_tooltip_ranges {
- if range.contains(&idx)
- && let Some(f) = &custom_tooltip_fn
- {
- return f(idx, range.clone(), window, cx);
- }
- }
- None
- }
- })
- .into_any_element()
- }
-}
-
-pub fn render_markdown_mut(
- block: &str,
- mut mentions: &[Mention],
- language_registry: &Arc<LanguageRegistry>,
- language: Option<&Arc<Language>>,
- text: &mut String,
- highlights: &mut Vec<(Range<usize>, Highlight)>,
- link_ranges: &mut Vec<Range<usize>>,
- link_urls: &mut Vec<String>,
-) {
- use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag, TagEnd};
-
- let mut bold_depth = 0;
- let mut italic_depth = 0;
- let mut strikethrough_depth = 0;
- let mut link_url = None;
- let mut current_language = None;
- let mut list_stack = Vec::new();
-
- let mut options = Options::all();
- options.remove(pulldown_cmark::Options::ENABLE_DEFINITION_LIST);
-
- for (event, source_range) in Parser::new_ext(block, options).into_offset_iter() {
- let prev_len = text.len();
- match event {
- Event::Text(t) => {
- if let Some(language) = ¤t_language {
- render_code(text, highlights, t.as_ref(), language);
- } else {
- while let Some(mention) = mentions.first() {
- if !source_range.contains_inclusive(&mention.range) {
- break;
- }
- mentions = &mentions[1..];
- let range = (prev_len + mention.range.start - source_range.start)
- ..(prev_len + mention.range.end - source_range.start);
- highlights.push((
- range.clone(),
- if mention.is_self_mention {
- Highlight::SelfMention
- } else {
- Highlight::Mention
- },
- ));
- }
-
- text.push_str(t.as_ref());
- let mut style = HighlightStyle::default();
- if bold_depth > 0 {
- style.font_weight = Some(FontWeight::BOLD);
- }
- if italic_depth > 0 {
- style.font_style = Some(FontStyle::Italic);
- }
- if strikethrough_depth > 0 {
- style.strikethrough = Some(StrikethroughStyle {
- thickness: 1.0.into(),
- ..Default::default()
- });
- }
- let last_run_len = if let Some(link_url) = link_url.clone() {
- link_ranges.push(prev_len..text.len());
- link_urls.push(link_url);
- style.underline = Some(UnderlineStyle {
- thickness: 1.0.into(),
- ..Default::default()
- });
- prev_len
- } else {
- // Manually scan for links
- let mut finder = linkify::LinkFinder::new();
- finder.kinds(&[linkify::LinkKind::Url]);
- let mut last_link_len = prev_len;
- for link in finder.links(&t) {
- let start = link.start();
- let end = link.end();
- let range = (prev_len + start)..(prev_len + end);
- link_ranges.push(range.clone());
- link_urls.push(link.as_str().to_string());
-
- // If there is a style before we match a link, we have to add this to the highlighted ranges
- if style != HighlightStyle::default() && last_link_len < link.start() {
- highlights.push((
- last_link_len..link.start(),
- Highlight::Highlight(style),
- ));
- }
-
- highlights.push((
- range,
- Highlight::Highlight(HighlightStyle {
- underline: Some(UnderlineStyle {
- thickness: 1.0.into(),
- ..Default::default()
- }),
- ..style
- }),
- ));
-
- last_link_len = end;
- }
- last_link_len
- };
-
- if style != HighlightStyle::default() && last_run_len < text.len() {
- let mut new_highlight = true;
- if let Some((last_range, last_style)) = highlights.last_mut()
- && last_range.end == last_run_len
- && last_style == &Highlight::Highlight(style)
- {
- last_range.end = text.len();
- new_highlight = false;
- }
- if new_highlight {
- highlights
- .push((last_run_len..text.len(), Highlight::Highlight(style)));
- }
- }
- }
- }
- Event::Code(t) => {
- text.push_str(t.as_ref());
- let is_link = link_url.is_some();
-
- if let Some(link_url) = link_url.clone() {
- link_ranges.push(prev_len..text.len());
- link_urls.push(link_url);
- }
-
- highlights.push((prev_len..text.len(), Highlight::InlineCode(is_link)))
- }
- Event::Start(tag) => match tag {
- Tag::Paragraph => new_paragraph(text, &mut list_stack),
- Tag::Heading { .. } => {
- new_paragraph(text, &mut list_stack);
- bold_depth += 1;
- }
- Tag::CodeBlock(kind) => {
- new_paragraph(text, &mut list_stack);
- current_language = if let CodeBlockKind::Fenced(language) = kind {
- language_registry
- .language_for_name(language.as_ref())
- .now_or_never()
- .and_then(Result::ok)
- } else {
- language.cloned()
- }
- }
- Tag::Emphasis => italic_depth += 1,
- Tag::Strong => bold_depth += 1,
- Tag::Strikethrough => strikethrough_depth += 1,
- Tag::Link { dest_url, .. } => link_url = Some(dest_url.to_string()),
- Tag::List(number) => {
- list_stack.push((number, false));
- }
- Tag::Item => {
- let len = list_stack.len();
- if let Some((list_number, has_content)) = list_stack.last_mut() {
- *has_content = false;
- if !text.is_empty() && !text.ends_with('\n') {
- text.push('\n');
- }
- for _ in 0..len - 1 {
- text.push_str(" ");
- }
- if let Some(number) = list_number {
- text.push_str(&format!("{}. ", number));
- *number += 1;
- *has_content = false;
- } else {
- text.push_str("- ");
- }
- }
- }
- _ => {}
- },
- Event::End(tag) => match tag {
- TagEnd::Heading(_) => bold_depth -= 1,
- TagEnd::CodeBlock => current_language = None,
- TagEnd::Emphasis => italic_depth -= 1,
- TagEnd::Strong => bold_depth -= 1,
- TagEnd::Strikethrough => strikethrough_depth -= 1,
- TagEnd::Link => link_url = None,
- TagEnd::List(_) => drop(list_stack.pop()),
- _ => {}
- },
- Event::HardBreak => text.push('\n'),
- Event::SoftBreak => text.push('\n'),
- _ => {}
- }
- }
-}
-
-pub fn render_code(
- text: &mut String,
- highlights: &mut Vec<(Range<usize>, Highlight)>,
- content: &str,
- language: &Arc<Language>,
-) {
- let prev_len = text.len();
- text.push_str(content);
- let mut offset = 0;
- for (range, highlight_id) in language.highlight_text(&content.into(), 0..content.len()) {
- if range.start > offset {
- highlights.push((prev_len + offset..prev_len + range.start, Highlight::Code));
- }
- highlights.push((
- prev_len + range.start..prev_len + range.end,
- Highlight::Id(highlight_id),
- ));
- offset = range.end;
- }
- if offset < content.len() {
- highlights.push((prev_len + offset..prev_len + content.len(), Highlight::Code));
- }
-}
-
-pub fn new_paragraph(text: &mut String, list_stack: &mut Vec<(Option<u64>, bool)>) {
- let mut is_subsequent_paragraph_of_list = false;
- if let Some((_, has_content)) = list_stack.last_mut() {
- if *has_content {
- is_subsequent_paragraph_of_list = true;
- } else {
- *has_content = true;
- return;
- }
- }
-
- if !text.is_empty() {
- if !text.ends_with('\n') {
- text.push('\n');
- }
- text.push('\n');
- }
- for _ in 0..list_stack.len().saturating_sub(1) {
- text.push_str(" ");
- }
- if is_subsequent_paragraph_of_list {
- text.push_str(" ");
- }
-}
@@ -693,16 +693,21 @@ impl<'a> Cursor<'a> {
}
pub fn seek_forward(&mut self, end_offset: usize) {
- debug_assert!(end_offset >= self.offset);
+ assert!(
+ end_offset >= self.offset,
+ "cannot seek backward from {} to {}",
+ self.offset,
+ end_offset
+ );
self.chunks.seek_forward(&end_offset, Bias::Right);
self.offset = end_offset;
}
pub fn slice(&mut self, end_offset: usize) -> Rope {
- debug_assert!(
+ assert!(
end_offset >= self.offset,
- "cannot slice backwards from {} to {}",
+ "cannot slice backward from {} to {}",
self.offset,
end_offset
);
@@ -730,7 +735,12 @@ impl<'a> Cursor<'a> {
}
pub fn summary<D: TextDimension>(&mut self, end_offset: usize) -> D {
- debug_assert!(end_offset >= self.offset);
+ assert!(
+ end_offset >= self.offset,
+ "cannot summarize backward from {} to {}",
+ self.offset,
+ end_offset
+ );
let mut summary = D::zero(());
if let Some(start_chunk) = self.chunks.item() {
@@ -7,7 +7,7 @@ license = "GPL-3.0-or-later"
[features]
test-support = [
- "client/test-support",
+
"editor/test-support",
"gpui/test-support",
"workspace/test-support",
@@ -47,7 +47,6 @@ ztracing.workspace = true
tracing.workspace = true
[dev-dependencies]
-client = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
language = { workspace = true, features = ["test-support"] }
@@ -1,5 +1,9 @@
-use std::{path::PathBuf, sync::Arc};
+use std::{
+ path::{Path, PathBuf},
+ sync::Arc,
+};
+use anyhow::Context;
use collections::{BTreeMap, HashMap};
use gpui::Rgba;
use schemars::JsonSchema;
@@ -233,6 +237,26 @@ pub struct SemanticTokenRules {
pub rules: Vec<SemanticTokenRule>,
}
+impl SemanticTokenRules {
+ pub const FILE_NAME: &'static str = "semantic_token_rules.json";
+
+ pub fn load(file_path: &Path) -> anyhow::Result<Self> {
+ let rules_content = std::fs::read(file_path).with_context(|| {
+ anyhow::anyhow!(
+ "Could not read semantic token rules from {}",
+ file_path.display()
+ )
+ })?;
+
+ serde_json_lenient::from_slice::<SemanticTokenRules>(&rules_content).with_context(|| {
+ anyhow::anyhow!(
+ "Failed to parse semantic token rules from {}",
+ file_path.display()
+ )
+ })
+ }
+}
+
impl crate::merge_from::MergeFrom for SemanticTokenRules {
fn merge_from(&mut self, other: &Self) {
self.rules.splice(0..0, other.rules.iter().cloned());
@@ -22,10 +22,8 @@ workspace.workspace = true
zed_actions.workspace = true
[dev-dependencies]
-client = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
-language = { workspace = true, features = ["test-support"] }
menu.workspace = true
project = { workspace = true, features = ["test-support"] }
serde_json.workspace = true
@@ -59,20 +59,13 @@ workspace.workspace = true
zed_actions.workspace = true
[dev-dependencies]
-assets.workspace = true
-client.workspace = true
fs = { workspace = true, features = ["test-support"] }
futures.workspace = true
gpui = { workspace = true, features = ["test-support"] }
-language.workspace = true
-node_runtime.workspace = true
paths.workspace = true
pretty_assertions.workspace = true
project = { workspace = true, features = ["test-support"] }
-recent_projects = { workspace = true, features = ["test-support"] }
serde_json.workspace = true
-session.workspace = true
settings = { workspace = true, features = ["test-support"] }
title_bar = { workspace = true, features = ["test-support"] }
workspace = { workspace = true, features = ["test-support"] }
-zlog.workspace = true
@@ -26,6 +26,7 @@ fs.workspace = true
gpui.workspace = true
menu.workspace = true
project.workspace = true
+recent_projects.workspace = true
settings.workspace = true
theme.workspace = true
ui.workspace = true
@@ -12,20 +12,23 @@ use gpui::{
};
use menu::{Cancel, Confirm, SelectFirst, SelectLast, SelectNext, SelectPrevious};
use project::Event as ProjectEvent;
+use recent_projects::RecentProjects;
use settings::Settings;
use std::collections::{HashMap, HashSet};
use std::mem;
use theme::{ActiveTheme, ThemeSettings};
use ui::utils::TRAFFIC_LIGHT_PADDING;
use ui::{
- AgentThreadStatus, HighlightedLabel, IconButtonShape, KeyBinding, ListItem, Tab, ThreadItem,
- Tooltip, WithScrollbar, prelude::*,
+ AgentThreadStatus, ButtonStyle, GradientFade, HighlightedLabel, IconButtonShape, KeyBinding,
+ ListItem, PopoverMenu, PopoverMenuHandle, Tab, ThreadItem, TintColor, Tooltip, WithScrollbar,
+ prelude::*,
};
use util::path_list::PathList;
use workspace::{
FocusWorkspaceSidebar, MultiWorkspace, MultiWorkspaceEvent, Sidebar as WorkspaceSidebar,
SidebarEvent, ToggleWorkspaceSidebar, Workspace,
};
+use zed_actions::OpenRecent;
use zed_actions::editor::{MoveDown, MoveUp};
actions!(
@@ -65,8 +68,19 @@ impl From<&ActiveThreadInfo> for acp_thread::AgentSessionInfo {
}
}
-#[derive(Clone, Debug)]
-#[allow(dead_code)]
+#[derive(Clone)]
+struct ThreadEntry {
+ session_info: acp_thread::AgentSessionInfo,
+ icon: IconName,
+ icon_from_external_svg: Option<SharedString>,
+ status: AgentThreadStatus,
+ workspace: Entity<Workspace>,
+ is_live: bool,
+ is_background: bool,
+ highlight_positions: Vec<usize>,
+}
+
+#[derive(Clone)]
enum ListEntry {
ProjectHeader {
path_list: PathList,
@@ -75,20 +89,11 @@ enum ListEntry {
highlight_positions: Vec<usize>,
has_threads: bool,
},
- Thread {
- session_info: acp_thread::AgentSessionInfo,
- icon: IconName,
- icon_from_external_svg: Option<SharedString>,
- status: AgentThreadStatus,
- diff_stats: Option<(usize, usize)>,
- workspace: Entity<Workspace>,
- is_live: bool,
- is_background: bool,
- highlight_positions: Vec<usize>,
- },
+ Thread(ThreadEntry),
ViewMore {
path_list: PathList,
remaining_count: usize,
+ is_fully_expanded: bool,
},
NewThread {
path_list: PathList,
@@ -96,6 +101,12 @@ enum ListEntry {
},
}
+impl From<ThreadEntry> for ListEntry {
+ fn from(thread: ThreadEntry) -> Self {
+ ListEntry::Thread(thread)
+ }
+}
+
#[derive(Default)]
struct SidebarContents {
entries: Vec<ListEntry>,
@@ -174,7 +185,8 @@ pub struct Sidebar {
focused_thread: Option<acp::SessionId>,
active_entry_index: Option<usize>,
collapsed_groups: HashSet<PathList>,
- expanded_groups: HashSet<PathList>,
+ expanded_groups: HashMap<PathList, usize>,
+ recent_projects_popover_handle: PopoverMenuHandle<RecentProjects>,
}
impl EventEmitter<SidebarEvent> for Sidebar {}
@@ -226,7 +238,7 @@ impl Sidebar {
.contents
.entries
.iter()
- .position(|entry| matches!(entry, ListEntry::Thread { .. }))
+ .position(|entry| matches!(entry, ListEntry::Thread(_)))
.or_else(|| {
if this.contents.entries.is_empty() {
None
@@ -269,7 +281,8 @@ impl Sidebar {
focused_thread: None,
active_entry_index: None,
collapsed_groups: HashSet::new(),
- expanded_groups: HashSet::new(),
+ expanded_groups: HashMap::new(),
+ recent_projects_popover_handle: PopoverMenuHandle::default(),
}
}
@@ -415,18 +428,20 @@ impl Sidebar {
.entries
.iter()
.filter_map(|entry| match entry {
- ListEntry::Thread {
- session_info,
- status,
- is_live: true,
- ..
- } => Some((session_info.session_id.clone(), *status)),
+ ListEntry::Thread(thread) if thread.is_live => {
+ Some((thread.session_info.session_id.clone(), thread.status))
+ }
_ => None,
})
.collect();
let mut entries = Vec::new();
let mut notified_threads = previous.notified_threads;
+ // Track all session IDs we add to entries so we can prune stale
+ // notifications without a separate pass at the end.
+ let mut current_session_ids: HashSet<acp::SessionId> = HashSet::new();
+ // Compute active_entry_index inline during the build pass.
+ let mut active_entry_index: Option<usize> = None;
for workspace in workspaces.iter() {
let (path_list, label) = workspace_path_list_and_label(workspace, cx);
@@ -434,17 +449,16 @@ impl Sidebar {
let is_collapsed = self.collapsed_groups.contains(&path_list);
let should_load_threads = !is_collapsed || !query.is_empty();
- let mut threads: Vec<ListEntry> = Vec::new();
+ let mut threads: Vec<ThreadEntry> = Vec::new();
if should_load_threads {
if let Some(ref thread_store) = thread_store {
for meta in thread_store.read(cx).threads_for_paths(&path_list) {
- threads.push(ListEntry::Thread {
+ threads.push(ThreadEntry {
session_info: meta.into(),
icon: IconName::ZedAgent,
icon_from_external_svg: None,
status: AgentThreadStatus::default(),
- diff_stats: None,
workspace: workspace.clone(),
is_live: false,
is_background: false,
@@ -455,107 +469,88 @@ impl Sidebar {
let live_infos = Self::all_thread_infos_for_workspace(workspace, cx);
- for info in &live_infos {
- let Some(existing) = threads.iter_mut().find(|t| {
- matches!(t, ListEntry::Thread { session_info, .. } if session_info.session_id == info.session_id)
- }) else {
- continue;
- };
-
- if let ListEntry::Thread {
- session_info,
- status,
- icon,
- icon_from_external_svg,
- workspace: _,
- is_live,
- is_background,
- ..
- } = existing
- {
- session_info.title = Some(info.title.clone());
- *status = info.status;
- *icon = info.icon;
- *icon_from_external_svg = info.icon_from_external_svg.clone();
- *is_live = true;
- *is_background = info.is_background;
+ if !live_infos.is_empty() {
+ let thread_index_by_session: HashMap<acp::SessionId, usize> = threads
+ .iter()
+ .enumerate()
+ .map(|(i, t)| (t.session_info.session_id.clone(), i))
+ .collect();
+
+ for info in &live_infos {
+ let Some(&idx) = thread_index_by_session.get(&info.session_id) else {
+ continue;
+ };
+
+ let thread = &mut threads[idx];
+ thread.session_info.title = Some(info.title.clone());
+ thread.status = info.status;
+ thread.icon = info.icon;
+ thread.icon_from_external_svg = info.icon_from_external_svg.clone();
+ thread.is_live = true;
+ thread.is_background = info.is_background;
}
}
- // Update notification state for live threads.
+ // Update notification state for live threads in the same pass.
+ let is_active_workspace = active_workspace
+ .as_ref()
+ .is_some_and(|active| active == workspace);
+
for thread in &threads {
- if let ListEntry::Thread {
- workspace: thread_workspace,
- session_info,
- status,
- is_background,
- ..
- } = thread
+ let session_id = &thread.session_info.session_id;
+ if thread.is_background && thread.status == AgentThreadStatus::Completed {
+ notified_threads.insert(session_id.clone());
+ } else if thread.status == AgentThreadStatus::Completed
+ && !is_active_workspace
+ && old_statuses.get(session_id) == Some(&AgentThreadStatus::Running)
{
- let session_id = &session_info.session_id;
- if *is_background && *status == AgentThreadStatus::Completed {
- notified_threads.insert(session_id.clone());
- } else if *status == AgentThreadStatus::Completed
- && active_workspace
- .as_ref()
- .is_none_or(|active| active != thread_workspace)
- && old_statuses.get(session_id) == Some(&AgentThreadStatus::Running)
- {
- notified_threads.insert(session_id.clone());
- }
+ notified_threads.insert(session_id.clone());
+ }
- if active_workspace
- .as_ref()
- .is_some_and(|active| active == thread_workspace)
- && !*is_background
- {
- notified_threads.remove(session_id);
- }
+ if is_active_workspace && !thread.is_background {
+ notified_threads.remove(session_id);
}
}
- threads.sort_by(|a, b| {
- let a_time = match a {
- ListEntry::Thread { session_info, .. } => session_info.updated_at,
- _ => unreachable!(),
- };
- let b_time = match b {
- ListEntry::Thread { session_info, .. } => session_info.updated_at,
- _ => unreachable!(),
- };
- b_time.cmp(&a_time)
- });
+ threads.sort_by(|a, b| b.session_info.updated_at.cmp(&a.session_info.updated_at));
}
if !query.is_empty() {
let has_threads = !threads.is_empty();
- let mut matched_threads = Vec::new();
- for mut thread in threads {
- if let ListEntry::Thread {
- session_info,
- highlight_positions,
- ..
- } = &mut thread
- {
- let title = session_info
- .title
- .as_ref()
- .map(|s| s.as_ref())
- .unwrap_or("");
- if let Some(positions) = fuzzy_match_positions(&query, title) {
- *highlight_positions = positions;
- matched_threads.push(thread);
- }
- }
- }
let workspace_highlight_positions =
fuzzy_match_positions(&query, &label).unwrap_or_default();
+ let workspace_matched = !workspace_highlight_positions.is_empty();
+
+ let mut matched_threads: Vec<ThreadEntry> = Vec::new();
+ for mut thread in threads {
+ let title = thread
+ .session_info
+ .title
+ .as_ref()
+ .map(|s| s.as_ref())
+ .unwrap_or("");
+ if let Some(positions) = fuzzy_match_positions(&query, title) {
+ thread.highlight_positions = positions;
+ }
+ if workspace_matched || !thread.highlight_positions.is_empty() {
+ matched_threads.push(thread);
+ }
+ }
- if matched_threads.is_empty() && workspace_highlight_positions.is_empty() {
+ if matched_threads.is_empty() && !workspace_matched {
continue;
}
+ if active_entry_index.is_none()
+ && self.focused_thread.is_none()
+ && active_workspace
+ .as_ref()
+ .is_some_and(|active| active == workspace)
+ {
+ active_entry_index = Some(entries.len());
+ }
+
entries.push(ListEntry::ProjectHeader {
path_list: path_list.clone(),
label,
@@ -563,9 +558,33 @@ impl Sidebar {
highlight_positions: workspace_highlight_positions,
has_threads,
});
- entries.extend(matched_threads);
+
+ // Track session IDs and compute active_entry_index as we add
+ // thread entries.
+ for thread in matched_threads {
+ current_session_ids.insert(thread.session_info.session_id.clone());
+ if active_entry_index.is_none() {
+ if let Some(focused) = &self.focused_thread {
+ if &thread.session_info.session_id == focused {
+ active_entry_index = Some(entries.len());
+ }
+ }
+ }
+ entries.push(thread.into());
+ }
} else {
let has_threads = !threads.is_empty();
+
+ // Check if this header is the active entry before pushing it.
+ if active_entry_index.is_none()
+ && self.focused_thread.is_none()
+ && active_workspace
+ .as_ref()
+ .is_some_and(|active| active == workspace)
+ {
+ active_entry_index = Some(entries.len());
+ }
+
entries.push(ListEntry::ProjectHeader {
path_list: path_list.clone(),
label,
@@ -579,21 +598,32 @@ impl Sidebar {
}
let total = threads.len();
- let show_view_more =
- total > DEFAULT_THREADS_SHOWN && !self.expanded_groups.contains(&path_list);
-
- let count = if show_view_more {
- DEFAULT_THREADS_SHOWN
- } else {
- total
- };
- entries.extend(threads.into_iter().take(count));
+ let extra_batches = self.expanded_groups.get(&path_list).copied().unwrap_or(0);
+ let threads_to_show =
+ DEFAULT_THREADS_SHOWN + (extra_batches * DEFAULT_THREADS_SHOWN);
+ let count = threads_to_show.min(total);
+ let is_fully_expanded = count >= total;
+
+ // Track session IDs and compute active_entry_index as we add
+ // thread entries.
+ for thread in threads.into_iter().take(count) {
+ current_session_ids.insert(thread.session_info.session_id.clone());
+ if active_entry_index.is_none() {
+ if let Some(focused) = &self.focused_thread {
+ if &thread.session_info.session_id == focused {
+ active_entry_index = Some(entries.len());
+ }
+ }
+ }
+ entries.push(thread.into());
+ }
- if show_view_more {
+ if total > DEFAULT_THREADS_SHOWN {
entries.push(ListEntry::ViewMore {
path_list: path_list.clone(),
- remaining_count: total - DEFAULT_THREADS_SHOWN,
+ remaining_count: total.saturating_sub(count),
+ is_fully_expanded,
});
}
@@ -606,16 +636,11 @@ impl Sidebar {
}
}
- // Prune stale entries from notified_threads.
- let current_session_ids: HashSet<&acp::SessionId> = entries
- .iter()
- .filter_map(|e| match e {
- ListEntry::Thread { session_info, .. } => Some(&session_info.session_id),
- _ => None,
- })
- .collect();
+ // Prune stale notifications using the session IDs we collected during
+ // the build pass (no extra scan needed).
notified_threads.retain(|id| current_session_ids.contains(id));
+ self.active_entry_index = active_entry_index;
self.contents = SidebarContents {
entries,
notified_threads,
@@ -632,10 +657,12 @@ impl Sidebar {
let had_notifications = self.has_notifications(cx);
+ let scroll_position = self.list_state.logical_scroll_top();
+
self.rebuild_contents(cx);
- self.recompute_active_entry_index(cx);
self.list_state.reset(self.contents.entries.len());
+ self.list_state.scroll_to(scroll_position);
if had_notifications != self.has_notifications(cx) {
multi_workspace.update(cx, |_, cx| {
@@ -646,24 +673,6 @@ impl Sidebar {
cx.notify();
}
- fn recompute_active_entry_index(&mut self, cx: &App) {
- self.active_entry_index = if let Some(session_id) = &self.focused_thread {
- self.contents.entries.iter().position(|entry| {
- matches!(entry, ListEntry::Thread { session_info, .. } if &session_info.session_id == session_id)
- })
- } else {
- let active_workspace = self
- .multi_workspace
- .upgrade()
- .map(|mw| mw.read(cx).workspace().clone());
- active_workspace.and_then(|active| {
- self.contents.entries.iter().position(|entry| {
- matches!(entry, ListEntry::ProjectHeader { workspace, .. } if workspace == &active)
- })
- })
- };
- }
-
fn render_list_entry(
&mut self,
ix: usize,
@@ -698,29 +707,19 @@ impl Sidebar {
is_selected,
cx,
),
- ListEntry::Thread {
- session_info,
- icon,
- icon_from_external_svg,
- status,
- workspace,
- highlight_positions,
- ..
- } => self.render_thread(
+ ListEntry::Thread(thread) => self.render_thread(ix, thread, is_selected, cx),
+ ListEntry::ViewMore {
+ path_list,
+ remaining_count,
+ is_fully_expanded,
+ } => self.render_view_more(
ix,
- session_info,
- *icon,
- icon_from_external_svg.clone(),
- *status,
- workspace,
- highlight_positions,
+ path_list,
+ *remaining_count,
+ *is_fully_expanded,
is_selected,
cx,
),
- ListEntry::ViewMore {
- path_list,
- remaining_count,
- } => self.render_view_more(ix, path_list, *remaining_count, is_selected, cx),
ListEntry::NewThread {
path_list,
workspace,
@@ -732,6 +731,7 @@ impl Sidebar {
if is_group_header_after_first {
v_flex()
.w_full()
+ .pt_2()
.border_t_1()
.border_color(cx.theme().colors().border_variant)
.child(rendered)
@@ -753,6 +753,7 @@ impl Sidebar {
cx: &mut Context<Self>,
) -> AnyElement {
let id = SharedString::from(format!("project-header-{}", ix));
+ let group_name = SharedString::from(format!("header-group-{}", ix));
let ib_id = SharedString::from(format!("project-header-new-thread-{}", ix));
let is_collapsed = self.collapsed_groups.contains(path_list);
@@ -764,7 +765,11 @@ impl Sidebar {
let workspace_for_new_thread = workspace.clone();
let workspace_for_remove = workspace.clone();
// let workspace_for_activate = workspace.clone();
+
let path_list_for_toggle = path_list.clone();
+ let path_list_for_collapse = path_list.clone();
+ let view_more_expanded = self.expanded_groups.contains_key(path_list);
+
let multi_workspace = self.multi_workspace.upgrade();
let workspace_count = multi_workspace
.as_ref()
@@ -786,11 +791,24 @@ impl Sidebar {
.into_any_element()
};
+ let color = cx.theme().colors();
+ let gradient_overlay = GradientFade::new(
+ color.panel_background,
+ color.element_hover,
+ color.element_active,
+ )
+ .width(px(48.0))
+ .group_name(group_name.clone());
+
ListItem::new(id)
+ .group_name(group_name)
.toggle_state(is_active_workspace)
.focused(is_selected)
.child(
h_flex()
+ .relative()
+ .min_w_0()
+ .w_full()
.p_1()
.gap_1p5()
.child(
@@ -798,11 +816,11 @@ impl Sidebar {
.size(IconSize::Small)
.color(Color::Custom(cx.theme().colors().icon_muted.opacity(0.6))),
)
- .child(label),
+ .child(label)
+ .child(gradient_overlay),
)
.end_hover_slot(
h_flex()
- .gap_0p5()
.when(workspace_count > 1, |this| {
this.child(
IconButton::new(
@@ -819,6 +837,25 @@ impl Sidebar {
)),
)
})
+ .when(view_more_expanded && !is_collapsed, |this| {
+ this.child(
+ IconButton::new(
+ SharedString::from(format!("project-header-collapse-{}", ix)),
+ IconName::ListCollapse,
+ )
+ .icon_size(IconSize::Small)
+ .icon_color(Color::Muted)
+ .tooltip(Tooltip::text("Collapse Displayed Threads"))
+ .on_click(cx.listener({
+ let path_list_for_collapse = path_list_for_collapse.clone();
+ move |this, _, _window, cx| {
+ this.selection = None;
+ this.expanded_groups.remove(&path_list_for_collapse);
+ this.update_entries(cx);
+ }
+ })),
+ )
+ })
.when(has_threads, |this| {
this.child(
IconButton::new(ib_id, IconName::NewThread)
@@ -922,8 +959,8 @@ impl Sidebar {
})
}
- fn filter_query(&self, cx: &App) -> String {
- self.filter_editor.read(cx).text(cx)
+ fn has_filter_query(&self, cx: &App) -> bool {
+ self.filter_editor.read(cx).buffer().read(cx).is_empty()
}
fn editor_move_down(&mut self, _: &MoveDown, window: &mut Window, cx: &mut Context<Self>) {
@@ -988,18 +1025,23 @@ impl Sidebar {
let workspace = workspace.clone();
self.activate_workspace(&workspace, window, cx);
}
- ListEntry::Thread {
- session_info,
- workspace,
- ..
- } => {
- let session_info = session_info.clone();
- let workspace = workspace.clone();
+ ListEntry::Thread(thread) => {
+ let session_info = thread.session_info.clone();
+ let workspace = thread.workspace.clone();
self.activate_thread(session_info, &workspace, window, cx);
}
- ListEntry::ViewMore { path_list, .. } => {
+ ListEntry::ViewMore {
+ path_list,
+ is_fully_expanded,
+ ..
+ } => {
let path_list = path_list.clone();
- self.expanded_groups.insert(path_list);
+ if *is_fully_expanded {
+ self.expanded_groups.remove(&path_list);
+ } else {
+ let current = self.expanded_groups.get(&path_list).copied().unwrap_or(0);
+ self.expanded_groups.insert(path_list, current + 1);
+ }
self.update_entries(cx);
}
ListEntry::NewThread { workspace, .. } => {
@@ -1082,7 +1124,7 @@ impl Sidebar {
}
}
Some(
- ListEntry::Thread { .. } | ListEntry::ViewMore { .. } | ListEntry::NewThread { .. },
+ ListEntry::Thread(_) | ListEntry::ViewMore { .. } | ListEntry::NewThread { .. },
) => {
for i in (0..ix).rev() {
if let Some(ListEntry::ProjectHeader { path_list, .. }) =
@@ -1103,32 +1145,30 @@ impl Sidebar {
fn render_thread(
&self,
ix: usize,
- session_info: &acp_thread::AgentSessionInfo,
- icon: IconName,
- icon_from_external_svg: Option<SharedString>,
- status: AgentThreadStatus,
- workspace: &Entity<Workspace>,
- highlight_positions: &[usize],
+ thread: &ThreadEntry,
is_selected: bool,
cx: &mut Context<Self>,
) -> AnyElement {
- let has_notification = self.contents.is_thread_notified(&session_info.session_id);
+ let has_notification = self
+ .contents
+ .is_thread_notified(&thread.session_info.session_id);
- let title: SharedString = session_info
+ let title: SharedString = thread
+ .session_info
.title
.clone()
.unwrap_or_else(|| "Untitled".into());
- let session_info = session_info.clone();
- let workspace = workspace.clone();
+ let session_info = thread.session_info.clone();
+ let workspace = thread.workspace.clone();
let id = SharedString::from(format!("thread-entry-{}", ix));
ThreadItem::new(id, title)
- .icon(icon)
- .when_some(icon_from_external_svg, |this, svg| {
+ .icon(thread.icon)
+ .when_some(thread.icon_from_external_svg.clone(), |this, svg| {
this.custom_icon_from_external_svg(svg)
})
- .highlight_positions(highlight_positions.to_vec())
- .status(status)
+ .highlight_positions(thread.highlight_positions.to_vec())
+ .status(thread.status)
.notified(has_notification)
.selected(self.focused_thread.as_ref() == Some(&session_info.session_id))
.focused(is_selected)
@@ -1139,6 +1179,48 @@ impl Sidebar {
.into_any_element()
}
+ fn render_recent_projects_button(&self, cx: &mut Context<Self>) -> impl IntoElement {
+ let workspace = self
+ .multi_workspace
+ .upgrade()
+ .map(|mw| mw.read(cx).workspace().downgrade());
+
+ let focus_handle = workspace
+ .as_ref()
+ .and_then(|ws| ws.upgrade())
+ .map(|w| w.read(cx).focus_handle(cx))
+ .unwrap_or_else(|| cx.focus_handle());
+
+ let popover_handle = self.recent_projects_popover_handle.clone();
+
+ PopoverMenu::new("sidebar-recent-projects-menu")
+ .with_handle(popover_handle)
+ .menu(move |window, cx| {
+ workspace.as_ref().map(|ws| {
+ RecentProjects::popover(ws.clone(), false, focus_handle.clone(), window, cx)
+ })
+ })
+ .trigger_with_tooltip(
+ IconButton::new("open-project", IconName::OpenFolder)
+ .icon_size(IconSize::Small)
+ .selected_style(ButtonStyle::Tinted(TintColor::Accent)),
+ |_window, cx| {
+ Tooltip::for_action(
+ "Recent Projects",
+ &OpenRecent {
+ create_new_window: false,
+ },
+ cx,
+ )
+ },
+ )
+ .anchor(gpui::Corner::TopLeft)
+ .offset(gpui::Point {
+ x: px(0.0),
+ y: px(2.0),
+ })
+ }
+
fn render_filter_input(&self, cx: &mut Context<Self>) -> impl IntoElement {
let settings = ThemeSettings::get_global(cx);
let text_style = TextStyle {
@@ -1168,32 +1250,42 @@ impl Sidebar {
ix: usize,
path_list: &PathList,
remaining_count: usize,
+ is_fully_expanded: bool,
is_selected: bool,
cx: &mut Context<Self>,
) -> AnyElement {
let path_list = path_list.clone();
let id = SharedString::from(format!("view-more-{}", ix));
- let count = format!("({})", remaining_count);
+ let (icon, label) = if is_fully_expanded {
+ (IconName::ListCollapse, "Collapse List")
+ } else {
+ (IconName::Plus, "View More")
+ };
ListItem::new(id)
.focused(is_selected)
.child(
h_flex()
- .px_1()
- .py_1p5()
+ .p_1()
.gap_1p5()
- .child(
- Icon::new(IconName::Plus)
- .size(IconSize::Small)
- .color(Color::Muted),
- )
- .child(Label::new("View More").color(Color::Muted))
- .child(Label::new(count).color(Color::Muted).size(LabelSize::Small)),
+ .child(Icon::new(icon).size(IconSize::Small).color(Color::Muted))
+ .child(Label::new(label).color(Color::Muted))
+ .when(!is_fully_expanded, |this| {
+ this.child(
+ Label::new(format!("({})", remaining_count))
+ .color(Color::Custom(cx.theme().colors().text_muted.opacity(0.5))),
+ )
+ }),
)
.on_click(cx.listener(move |this, _, _window, cx| {
this.selection = None;
- this.expanded_groups.insert(path_list.clone());
+ if is_fully_expanded {
+ this.expanded_groups.remove(&path_list);
+ } else {
+ let current = this.expanded_groups.get(&path_list).copied().unwrap_or(0);
+ this.expanded_groups.insert(path_list.clone(), current + 1);
+ }
this.update_entries(cx);
}))
.into_any_element()
@@ -1270,6 +1362,14 @@ impl WorkspaceSidebar for Sidebar {
fn has_notifications(&self, _cx: &App) -> bool {
!self.contents.notified_threads.is_empty()
}
+
+ fn toggle_recent_projects_popover(&self, window: &mut Window, cx: &mut App) {
+ self.recent_projects_popover_handle.toggle(window, cx);
+ }
+
+ fn is_recent_projects_popover_deployed(&self) -> bool {
+ self.recent_projects_popover_handle.is_deployed()
+ }
}
impl Focusable for Sidebar {
@@ -1284,7 +1384,7 @@ impl Render for Sidebar {
let ui_font = theme::setup_ui_font(window, cx);
let is_focused = self.focus_handle.is_focused(window)
|| self.filter_editor.focus_handle(cx).is_focused(window);
- let has_query = !self.filter_query(cx).is_empty();
+ let has_query = self.has_filter_query(cx);
let focus_tooltip_label = if is_focused {
"Focus Workspace"
@@ -1367,27 +1467,7 @@ impl Render for Sidebar {
cx.emit(SidebarEvent::Close);
}))
})
- .child(
- IconButton::new("open-project", IconName::OpenFolder)
- .icon_size(IconSize::Small)
- .tooltip(|_window, cx| {
- Tooltip::for_action(
- "Open Project",
- &workspace::Open {
- create_new_window: false,
- },
- cx,
- )
- })
- .on_click(|_event, window, cx| {
- window.dispatch_action(
- Box::new(workspace::Open {
- create_new_window: false,
- }),
- cx,
- );
- }),
- ),
+ .child(self.render_recent_projects_button(cx)),
)
.child(
h_flex()
@@ -1594,19 +1674,15 @@ mod tests {
};
format!("{} [{}]{}", icon, label, selected)
}
- ListEntry::Thread {
- session_info,
- status,
- is_live,
- ..
- } => {
- let title = session_info
+ ListEntry::Thread(thread) => {
+ let title = thread
+ .session_info
.title
.as_ref()
.map(|s| s.as_ref())
.unwrap_or("Untitled");
- let active = if *is_live { " *" } else { "" };
- let status_str = match status {
+ let active = if thread.is_live { " *" } else { "" };
+ let status_str = match thread.status {
AgentThreadStatus::Running => " (running)",
AgentThreadStatus::Error => " (error)",
AgentThreadStatus::WaitingForConfirmation => " (waiting)",
@@ -1614,7 +1690,7 @@ mod tests {
};
let notified = if sidebar
.contents
- .is_thread_notified(&session_info.session_id)
+ .is_thread_notified(&thread.session_info.session_id)
{
" (!)"
} else {
@@ -1626,9 +1702,15 @@ mod tests {
)
}
ListEntry::ViewMore {
- remaining_count, ..
+ remaining_count,
+ is_fully_expanded,
+ ..
} => {
- format!(" + View More ({}){}", remaining_count, selected)
+ if *is_fully_expanded {
+ format!(" - Collapse{}", selected)
+ } else {
+ format!(" + View More ({}){}", remaining_count, selected)
+ }
}
ListEntry::NewThread { .. } => {
format!(" [+ New Thread]{}", selected)
@@ -1790,6 +1872,78 @@ mod tests {
);
}
+ #[gpui::test]
+ async fn test_view_more_batched_expansion(cx: &mut TestAppContext) {
+ let project = init_test_project("/my-project", cx).await;
+ let (multi_workspace, cx) =
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ let sidebar = setup_sidebar(&multi_workspace, cx);
+
+ let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
+ // Create 17 threads: initially shows 5, then 10, then 15, then all 17 with Collapse
+ save_n_test_threads(17, &path_list, cx).await;
+
+ multi_workspace.update_in(cx, |_, _window, cx| cx.notify());
+ cx.run_until_parked();
+
+ // Initially shows 5 threads + View More (12 remaining)
+ let entries = visible_entries_as_strings(&sidebar, cx);
+ assert_eq!(entries.len(), 7); // header + 5 threads + View More
+ assert!(entries.iter().any(|e| e.contains("View More (12)")));
+
+ // Focus and navigate to View More, then confirm to expand by one batch
+ open_and_focus_sidebar(&sidebar, &multi_workspace, cx);
+ for _ in 0..7 {
+ cx.dispatch_action(SelectNext);
+ }
+ cx.dispatch_action(Confirm);
+ cx.run_until_parked();
+
+ // Now shows 10 threads + View More (7 remaining)
+ let entries = visible_entries_as_strings(&sidebar, cx);
+ assert_eq!(entries.len(), 12); // header + 10 threads + View More
+ assert!(entries.iter().any(|e| e.contains("View More (7)")));
+
+ // Expand again by one batch
+ sidebar.update_in(cx, |s, _window, cx| {
+ let current = s.expanded_groups.get(&path_list).copied().unwrap_or(0);
+ s.expanded_groups.insert(path_list.clone(), current + 1);
+ s.update_entries(cx);
+ });
+ cx.run_until_parked();
+
+ // Now shows 15 threads + View More (2 remaining)
+ let entries = visible_entries_as_strings(&sidebar, cx);
+ assert_eq!(entries.len(), 17); // header + 15 threads + View More
+ assert!(entries.iter().any(|e| e.contains("View More (2)")));
+
+ // Expand one more time - should show all 17 threads with Collapse button
+ sidebar.update_in(cx, |s, _window, cx| {
+ let current = s.expanded_groups.get(&path_list).copied().unwrap_or(0);
+ s.expanded_groups.insert(path_list.clone(), current + 1);
+ s.update_entries(cx);
+ });
+ cx.run_until_parked();
+
+ // All 17 threads shown with Collapse button
+ let entries = visible_entries_as_strings(&sidebar, cx);
+ assert_eq!(entries.len(), 19); // header + 17 threads + Collapse
+ assert!(!entries.iter().any(|e| e.contains("View More")));
+ assert!(entries.iter().any(|e| e.contains("Collapse")));
+
+ // Click collapse - should go back to showing 5 threads
+ sidebar.update_in(cx, |s, _window, cx| {
+ s.expanded_groups.remove(&path_list);
+ s.update_entries(cx);
+ });
+ cx.run_until_parked();
+
+ // Back to initial state: 5 threads + View More (12 remaining)
+ let entries = visible_entries_as_strings(&sidebar, cx);
+ assert_eq!(entries.len(), 7); // header + 5 threads + View More
+ assert!(entries.iter().any(|e| e.contains("View More (12)")));
+ }
+
#[gpui::test]
async fn test_collapse_and_expand_group(cx: &mut TestAppContext) {
let project = init_test_project("/my-project", cx).await;
@@ -1857,7 +2011,7 @@ mod tests {
has_threads: true,
},
// Thread with default (Completed) status, not active
- ListEntry::Thread {
+ ListEntry::Thread(ThreadEntry {
session_info: acp_thread::AgentSessionInfo {
session_id: acp::SessionId::new(Arc::from("t-1")),
cwd: None,
@@ -1868,14 +2022,13 @@ mod tests {
icon: IconName::ZedAgent,
icon_from_external_svg: None,
status: AgentThreadStatus::Completed,
- diff_stats: None,
workspace: workspace.clone(),
is_live: false,
is_background: false,
highlight_positions: Vec::new(),
- },
+ }),
// Active thread with Running status
- ListEntry::Thread {
+ ListEntry::Thread(ThreadEntry {
session_info: acp_thread::AgentSessionInfo {
session_id: acp::SessionId::new(Arc::from("t-2")),
cwd: None,
@@ -29,10 +29,8 @@ util.workspace = true
workspace.workspace = true
[dev-dependencies]
-anyhow.workspace = true
ctor.workspace = true
gpui = { workspace = true, features = ["test-support"] }
-language = { workspace = true, features = ["test-support"] }
serde_json.workspace = true
theme = { workspace = true, features = ["test-support"] }
workspace = { workspace = true, features = ["test-support"] }
@@ -114,6 +114,7 @@ pub enum HideStrategy {
pub struct TaskTemplates(pub Vec<TaskTemplate>);
impl TaskTemplates {
+ pub const FILE_NAME: &str = "tasks.json";
/// Generates JSON schema of Tasks JSON template format.
pub fn generate_json_schema() -> serde_json::Value {
let schema = schemars::generate::SchemaSettings::draft2019_09()
@@ -51,7 +51,6 @@ windows.workspace = true
[dev-dependencies]
gpui = { workspace = true, features = ["test-support"] }
rand.workspace = true
-serde_json.workspace = true
settings = { workspace = true, features = ["test-support"] }
tempfile.workspace = true
util_macros.workspace = true
@@ -48,11 +48,9 @@ workspace.workspace = true
zed_actions.workspace = true
[dev-dependencies]
-client = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
project = { workspace = true, features = ["test-support"] }
-rand.workspace = true
terminal = { workspace = true, features = ["test-support"] }
workspace = { workspace = true, features = ["test-support"] }
@@ -35,6 +35,4 @@ ctor.workspace = true
gpui = { workspace = true, features = ["test-support"] }
rand.workspace = true
util = { workspace = true, features = ["test-support"] }
-http_client = { workspace = true, features = ["test-support"] }
zlog.workspace = true
-proptest.workspace = true
@@ -18,9 +18,9 @@ stories = ["dep:story"]
test-support = [
"call/test-support",
"client/test-support",
- "collections/test-support",
+
"gpui/test-support",
- "http_client/test-support",
+
"project/test-support",
"remote/test-support",
"util/test-support",
@@ -65,17 +65,13 @@ windows.workspace = true
[dev-dependencies]
call = { workspace = true, features = ["test-support"] }
client = { workspace = true, features = ["test-support"] }
-collections = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
-http_client = { workspace = true, features = ["test-support"] }
notifications = { workspace = true, features = ["test-support"] }
-pretty_assertions.workspace = true
project = { workspace = true, features = ["test-support"] }
release_channel.workspace = true
remote = { workspace = true, features = ["test-support"] }
rpc = { workspace = true, features = ["test-support"] }
semver.workspace = true
settings = { workspace = true, features = ["test-support"] }
-tree-sitter-md.workspace = true
util = { workspace = true, features = ["test-support"] }
workspace = { workspace = true, features = ["test-support"] }
@@ -151,6 +151,7 @@ pub struct TitleBar {
user_store: Entity<UserStore>,
client: Arc<Client>,
workspace: WeakEntity<Workspace>,
+ multi_workspace: Option<WeakEntity<MultiWorkspace>>,
application_menu: Option<Entity<ApplicationMenu>>,
_subscriptions: Vec<Subscription>,
banner: Entity<OnboardingBanner>,
@@ -188,7 +189,7 @@ impl Render for TitleBar {
.when(title_bar_settings.show_project_items, |title_bar| {
title_bar
.children(self.render_project_host(cx))
- .child(self.render_project_name(cx))
+ .child(self.render_project_name(window, cx))
})
.when(title_bar_settings.show_branch_name, |title_bar| {
title_bar.children(self.render_project_branch(cx))
@@ -389,6 +390,7 @@ impl TitleBar {
if let Some(this) = this.upgrade() {
this.update(cx, |this, _| {
this._subscriptions.push(subscription);
+ this.multi_workspace = Some(multi_workspace.downgrade());
});
}
});
@@ -400,6 +402,7 @@ impl TitleBar {
platform_titlebar,
application_menu,
workspace: workspace.weak_handle(),
+ multi_workspace: None,
project,
user_store,
client,
@@ -718,7 +721,11 @@ impl TitleBar {
)
}
- pub fn render_project_name(&self, cx: &mut Context<Self>) -> impl IntoElement {
+ pub fn render_project_name(
+ &self,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> impl IntoElement {
let workspace = self.workspace.clone();
let name = self.effective_active_worktree(cx).map(|worktree| {
@@ -734,6 +741,19 @@ impl TitleBar {
"Open Recent Project".to_string()
};
+ let is_sidebar_open = self.platform_titlebar.read(cx).is_workspace_sidebar_open();
+
+ if is_sidebar_open {
+ return self
+ .render_project_name_with_sidebar_popover(
+ window,
+ display_name,
+ is_project_selected,
+ cx,
+ )
+ .into_any_element();
+ }
+
let focus_handle = workspace
.upgrade()
.map(|w| w.read(cx).focus_handle(cx))
@@ -773,6 +793,49 @@ impl TitleBar {
.into_any_element()
}
+ fn render_project_name_with_sidebar_popover(
+ &self,
+ _window: &Window,
+ display_name: String,
+ is_project_selected: bool,
+ cx: &mut Context<Self>,
+ ) -> impl IntoElement {
+ let multi_workspace = self.multi_workspace.clone();
+
+ let is_popover_deployed = multi_workspace
+ .as_ref()
+ .and_then(|mw| mw.upgrade())
+ .map(|mw| mw.read(cx).is_recent_projects_popover_deployed(cx))
+ .unwrap_or(false);
+
+ Button::new("project_name_trigger", display_name)
+ .label_size(LabelSize::Small)
+ .when(self.worktree_count(cx) > 1, |this| {
+ this.icon(IconName::ChevronDown)
+ .icon_color(Color::Muted)
+ .icon_size(IconSize::XSmall)
+ })
+ .toggle_state(is_popover_deployed)
+ .selected_style(ButtonStyle::Tinted(TintColor::Accent))
+ .when(!is_project_selected, |s| s.color(Color::Muted))
+ .tooltip(move |_window, cx| {
+ Tooltip::for_action(
+ "Recent Projects",
+ &zed_actions::OpenRecent {
+ create_new_window: false,
+ },
+ cx,
+ )
+ })
+ .on_click(move |_, window, cx| {
+ if let Some(mw) = multi_workspace.as_ref().and_then(|mw| mw.upgrade()) {
+ mw.update(cx, |mw, cx| {
+ mw.toggle_recent_projects_popover(window, cx);
+ });
+ }
+ })
+ }
+
pub fn render_project_branch(&self, cx: &mut Context<Self>) -> Option<impl IntoElement> {
let effective_worktree = self.effective_active_worktree(cx)?;
let repository = self.get_repository_for_worktree(&effective_worktree, cx)?;
@@ -12,6 +12,7 @@ mod disclosure;
mod divider;
mod dropdown_menu;
mod facepile;
+mod gradient_fade;
mod group;
mod icon;
mod image;
@@ -54,6 +55,7 @@ pub use disclosure::*;
pub use divider::*;
pub use dropdown_menu::*;
pub use facepile::*;
+pub use gradient_fade::*;
pub use group::*;
pub use icon::*;
pub use image::*;
@@ -1,9 +1,9 @@
use crate::{
- DecoratedIcon, DiffStat, HighlightedLabel, IconDecoration, IconDecorationKind, SpinnerLabel,
- prelude::*,
+ DecoratedIcon, DiffStat, GradientFade, HighlightedLabel, IconDecoration, IconDecorationKind,
+ SpinnerLabel, prelude::*,
};
-use gpui::{AnyView, ClickEvent, Hsla, SharedString, linear_color_stop, linear_gradient};
+use gpui::{AnyView, ClickEvent, Hsla, SharedString};
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
pub enum AgentThreadStatus {
@@ -220,24 +220,12 @@ impl RenderOnce for ThreadItem {
color.panel_background
};
- let gradient_overlay = div()
- .absolute()
- .top_0()
- .right(px(-10.0))
- .w_12()
- .h_full()
- .bg(linear_gradient(
- 90.,
- linear_color_stop(base_bg, 0.6),
- linear_color_stop(base_bg.opacity(0.0), 0.),
- ))
- .group_hover("thread-item", |s| {
- s.bg(linear_gradient(
- 90.,
- linear_color_stop(color.element_hover, 0.6),
- linear_color_stop(color.element_hover.opacity(0.0), 0.),
- ))
- });
+ let gradient_overlay =
+ GradientFade::new(base_bg, color.element_hover, color.element_active)
+ .width(px(32.0))
+ .right(px(-10.0))
+ .gradient_stop(0.8)
+ .group_name("thread-item");
v_flex()
.id(self.id.clone())
@@ -18,216 +18,9 @@ use crate::{
};
use itertools::intersperse_with;
-pub mod table_row {
- //! A newtype for a table row that enforces a fixed column count at runtime.
- //!
- //! This type ensures that all rows in a table have the same width, preventing accidental creation or mutation of rows with inconsistent lengths.
- //! It is especially useful for CSV or tabular data where rectangular invariants must be maintained, but the number of columns is only known at runtime.
- //! By using `TableRow`, we gain stronger guarantees and safer APIs compared to a bare `Vec<T>`, without requiring const generics.
-
- use std::{
- any::type_name,
- ops::{
- Index, IndexMut, Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive,
- },
- };
-
- #[derive(Clone, Debug, PartialEq, Eq)]
- pub struct TableRow<T>(Vec<T>);
-
- impl<T> TableRow<T> {
- pub fn from_element(element: T, length: usize) -> Self
- where
- T: Clone,
- {
- Self::from_vec(vec![element; length], length)
- }
-
- /// Constructs a `TableRow` from a `Vec<T>`, panicking if the length does not match `expected_length`.
- ///
- /// Use this when you want to ensure at construction time that the row has the correct number of columns.
- /// This enforces the rectangular invariant for table data, preventing accidental creation of malformed rows.
- ///
- /// # Panics
- /// Panics if `data.len() != expected_length`.
- pub fn from_vec(data: Vec<T>, expected_length: usize) -> Self {
- Self::try_from_vec(data, expected_length).unwrap_or_else(|e| {
- let name = type_name::<Vec<T>>();
- panic!("Expected {name} to be created successfully: {e}");
- })
- }
-
- /// Attempts to construct a `TableRow` from a `Vec<T>`, returning an error if the length does not match `expected_len`.
- ///
- /// This is a fallible alternative to `from_vec`, allowing you to handle inconsistent row lengths gracefully.
- /// Returns `Ok(TableRow)` if the length matches, or an `Err` with a descriptive message otherwise.
- pub fn try_from_vec(data: Vec<T>, expected_len: usize) -> Result<Self, String> {
- if data.len() != expected_len {
- Err(format!(
- "Row length {} does not match expected {}",
- data.len(),
- expected_len
- ))
- } else {
- Ok(Self(data))
- }
- }
-
- /// Returns reference to element by column index.
- ///
- /// # Panics
- /// Panics if `col` is out of bounds (i.e., `col >= self.cols()`).
- pub fn expect_get(&self, col: impl Into<usize>) -> &T {
- let col = col.into();
- self.0.get(col).unwrap_or_else(|| {
- panic!(
- "Expected table row of `{}` to have {col:?}",
- type_name::<T>()
- )
- })
- }
-
- pub fn get(&self, col: impl Into<usize>) -> Option<&T> {
- self.0.get(col.into())
- }
-
- pub fn as_slice(&self) -> &[T] {
- &self.0
- }
-
- pub fn into_vec(self) -> Vec<T> {
- self.0
- }
-
- /// Like [`map`], but borrows the row and clones each element before mapping.
- ///
- /// This is useful when you want to map over a borrowed row without consuming it,
- /// but your mapping function requires ownership of each element.
- ///
- /// # Difference
- /// - `map_cloned` takes `&self`, clones each element, and applies `f(T) -> U`.
- /// - [`map`] takes `self` by value and applies `f(T) -> U` directly, consuming the row.
- /// - [`map_ref`] takes `&self` and applies `f(&T) -> U` to references of each element.
- pub fn map_cloned<F, U>(&self, f: F) -> TableRow<U>
- where
- F: FnMut(T) -> U,
- T: Clone,
- {
- self.clone().map(f)
- }
-
- /// Consumes the row and transforms all elements within it in a length-safe way.
- ///
- /// # Difference
- /// - `map` takes ownership of the row (`self`) and applies `f(T) -> U` to each element.
- /// - Use this when you want to transform and consume the row in one step.
- /// - See also [`map_cloned`] (for mapping over a borrowed row with cloning) and [`map_ref`] (for mapping over references).
- pub fn map<F, U>(self, f: F) -> TableRow<U>
- where
- F: FnMut(T) -> U,
- {
- TableRow(self.0.into_iter().map(f).collect())
- }
-
- /// Borrows the row and transforms all elements by reference in a length-safe way.
- ///
- /// # Difference
- /// - `map_ref` takes `&self` and applies `f(&T) -> U` to each element by reference.
- /// - Use this when you want to map over a borrowed row without cloning or consuming it.
- /// - See also [`map`] (for consuming the row) and [`map_cloned`] (for mapping with cloning).
- pub fn map_ref<F, U>(&self, f: F) -> TableRow<U>
- where
- F: FnMut(&T) -> U,
- {
- TableRow(self.0.iter().map(f).collect())
- }
-
- /// Number of columns (alias to `len()` with more semantic meaning)
- pub fn cols(&self) -> usize {
- self.0.len()
- }
- }
-
- ///// Convenience traits /////
- pub trait IntoTableRow<T> {
- fn into_table_row(self, expected_length: usize) -> TableRow<T>;
- }
- impl<T> IntoTableRow<T> for Vec<T> {
- fn into_table_row(self, expected_length: usize) -> TableRow<T> {
- TableRow::from_vec(self, expected_length)
- }
- }
-
- // Index implementations for convenient access
- impl<T> Index<usize> for TableRow<T> {
- type Output = T;
-
- fn index(&self, index: usize) -> &Self::Output {
- &self.0[index]
- }
- }
-
- impl<T> IndexMut<usize> for TableRow<T> {
- fn index_mut(&mut self, index: usize) -> &mut Self::Output {
- &mut self.0[index]
- }
- }
-
- // Range indexing implementations for slice operations
- impl<T> Index<Range<usize>> for TableRow<T> {
- type Output = [T];
-
- fn index(&self, index: Range<usize>) -> &Self::Output {
- <Vec<T> as Index<Range<usize>>>::index(&self.0, index)
- }
- }
-
- impl<T> Index<RangeFrom<usize>> for TableRow<T> {
- type Output = [T];
-
- fn index(&self, index: RangeFrom<usize>) -> &Self::Output {
- <Vec<T> as Index<RangeFrom<usize>>>::index(&self.0, index)
- }
- }
-
- impl<T> Index<RangeTo<usize>> for TableRow<T> {
- type Output = [T];
-
- fn index(&self, index: RangeTo<usize>) -> &Self::Output {
- <Vec<T> as Index<RangeTo<usize>>>::index(&self.0, index)
- }
- }
-
- impl<T> Index<RangeToInclusive<usize>> for TableRow<T> {
- type Output = [T];
-
- fn index(&self, index: RangeToInclusive<usize>) -> &Self::Output {
- <Vec<T> as Index<RangeToInclusive<usize>>>::index(&self.0, index)
- }
- }
-
- impl<T> Index<RangeFull> for TableRow<T> {
- type Output = [T];
-
- fn index(&self, index: RangeFull) -> &Self::Output {
- <Vec<T> as Index<RangeFull>>::index(&self.0, index)
- }
- }
-
- impl<T> Index<RangeInclusive<usize>> for TableRow<T> {
- type Output = [T];
-
- fn index(&self, index: RangeInclusive<usize>) -> &Self::Output {
- <Vec<T> as Index<RangeInclusive<usize>>>::index(&self.0, index)
- }
- }
-
- impl<T> IndexMut<RangeInclusive<usize>> for TableRow<T> {
- fn index_mut(&mut self, index: RangeInclusive<usize>) -> &mut Self::Output {
- <Vec<T> as IndexMut<RangeInclusive<usize>>>::index_mut(&mut self.0, index)
- }
- }
-}
+pub mod table_row;
+#[cfg(test)]
+mod tests;
const RESIZE_COLUMN_WIDTH: f32 = 8.0;
@@ -1445,330 +1238,3 @@ impl Component for Table {
)
}
}
-
-#[cfg(test)]
-mod test {
- use super::*;
-
- fn is_almost_eq(a: &[f32], b: &[f32]) -> bool {
- a.len() == b.len() && a.iter().zip(b).all(|(x, y)| (x - y).abs() < 1e-6)
- }
-
- fn cols_to_str(cols: &[f32], total_size: f32) -> String {
- cols.iter()
- .map(|f| "*".repeat(f32::round(f * total_size) as usize))
- .collect::<Vec<String>>()
- .join("|")
- }
-
- fn parse_resize_behavior(
- input: &str,
- total_size: f32,
- expected_cols: usize,
- ) -> Vec<TableResizeBehavior> {
- let mut resize_behavior = Vec::with_capacity(expected_cols);
- for col in input.split('|') {
- if col.starts_with('X') || col.is_empty() {
- resize_behavior.push(TableResizeBehavior::None);
- } else if col.starts_with('*') {
- resize_behavior.push(TableResizeBehavior::MinSize(col.len() as f32 / total_size));
- } else {
- panic!("invalid test input: unrecognized resize behavior: {}", col);
- }
- }
-
- if resize_behavior.len() != expected_cols {
- panic!(
- "invalid test input: expected {} columns, got {}",
- expected_cols,
- resize_behavior.len()
- );
- }
- resize_behavior
- }
-
- mod reset_column_size {
- use super::*;
-
- fn parse(input: &str) -> (Vec<f32>, f32, Option<usize>) {
- let mut widths = Vec::new();
- let mut column_index = None;
- for (index, col) in input.split('|').enumerate() {
- widths.push(col.len() as f32);
- if col.starts_with('X') {
- column_index = Some(index);
- }
- }
-
- for w in &widths {
- assert!(w.is_finite(), "incorrect number of columns");
- }
- let total = widths.iter().sum::<f32>();
- for width in &mut widths {
- *width /= total;
- }
- (widths, total, column_index)
- }
-
- #[track_caller]
- fn check_reset_size(
- initial_sizes: &str,
- widths: &str,
- expected: &str,
- resize_behavior: &str,
- ) {
- let (initial_sizes, total_1, None) = parse(initial_sizes) else {
- panic!("invalid test input: initial sizes should not be marked");
- };
- let (widths, total_2, Some(column_index)) = parse(widths) else {
- panic!("invalid test input: widths should be marked");
- };
- assert_eq!(
- total_1, total_2,
- "invalid test input: total width not the same {total_1}, {total_2}"
- );
- let (expected, total_3, None) = parse(expected) else {
- panic!("invalid test input: expected should not be marked: {expected:?}");
- };
- assert_eq!(
- total_2, total_3,
- "invalid test input: total width not the same"
- );
- let cols = initial_sizes.len();
- let resize_behavior_vec = parse_resize_behavior(resize_behavior, total_1, cols);
- let resize_behavior = TableRow::from_vec(resize_behavior_vec, cols);
- let result = TableColumnWidths::reset_to_initial_size(
- column_index,
- TableRow::from_vec(widths, cols),
- TableRow::from_vec(initial_sizes, cols),
- &resize_behavior,
- );
- let result_slice = result.as_slice();
- let is_eq = is_almost_eq(result_slice, &expected);
- if !is_eq {
- let result_str = cols_to_str(result_slice, total_1);
- let expected_str = cols_to_str(&expected, total_1);
- panic!(
- "resize failed\ncomputed: {result_str}\nexpected: {expected_str}\n\ncomputed values: {result_slice:?}\nexpected values: {expected:?}\n:minimum widths: {resize_behavior:?}"
- );
- }
- }
-
- macro_rules! check_reset_size {
- (columns: $cols:expr, starting: $initial:expr, snapshot: $current:expr, expected: $expected:expr, resizing: $resizing:expr $(,)?) => {
- check_reset_size($initial, $current, $expected, $resizing);
- };
- ($name:ident, columns: $cols:expr, starting: $initial:expr, snapshot: $current:expr, expected: $expected:expr, minimums: $resizing:expr $(,)?) => {
- #[test]
- fn $name() {
- check_reset_size($initial, $current, $expected, $resizing);
- }
- };
- }
-
- check_reset_size!(
- basic_right,
- columns: 5,
- starting: "**|**|**|**|**",
- snapshot: "**|**|X|***|**",
- expected: "**|**|**|**|**",
- minimums: "X|*|*|*|*",
- );
-
- check_reset_size!(
- basic_left,
- columns: 5,
- starting: "**|**|**|**|**",
- snapshot: "**|**|***|X|**",
- expected: "**|**|**|**|**",
- minimums: "X|*|*|*|**",
- );
-
- check_reset_size!(
- squashed_left_reset_col2,
- columns: 6,
- starting: "*|***|**|**|****|*",
- snapshot: "*|*|X|*|*|********",
- expected: "*|*|**|*|*|*******",
- minimums: "X|*|*|*|*|*",
- );
-
- check_reset_size!(
- grow_cascading_right,
- columns: 6,
- starting: "*|***|****|**|***|*",
- snapshot: "*|***|X|**|**|*****",
- expected: "*|***|****|*|*|****",
- minimums: "X|*|*|*|*|*",
- );
-
- check_reset_size!(
- squashed_right_reset_col4,
- columns: 6,
- starting: "*|***|**|**|****|*",
- snapshot: "*|********|*|*|X|*",
- expected: "*|*****|*|*|****|*",
- minimums: "X|*|*|*|*|*",
- );
-
- check_reset_size!(
- reset_col6_right,
- columns: 6,
- starting: "*|***|**|***|***|**",
- snapshot: "*|***|**|***|**|XXX",
- expected: "*|***|**|***|***|**",
- minimums: "X|*|*|*|*|*",
- );
-
- check_reset_size!(
- reset_col6_left,
- columns: 6,
- starting: "*|***|**|***|***|**",
- snapshot: "*|***|**|***|****|X",
- expected: "*|***|**|***|***|**",
- minimums: "X|*|*|*|*|*",
- );
-
- check_reset_size!(
- last_column_grow_cascading,
- columns: 6,
- starting: "*|***|**|**|**|***",
- snapshot: "*|*******|*|**|*|X",
- expected: "*|******|*|*|*|***",
- minimums: "X|*|*|*|*|*",
- );
-
- check_reset_size!(
- goes_left_when_left_has_extreme_diff,
- columns: 6,
- starting: "*|***|****|**|**|***",
- snapshot: "*|********|X|*|**|**",
- expected: "*|*****|****|*|**|**",
- minimums: "X|*|*|*|*|*",
- );
-
- check_reset_size!(
- basic_shrink_right,
- columns: 6,
- starting: "**|**|**|**|**|**",
- snapshot: "**|**|XXX|*|**|**",
- expected: "**|**|**|**|**|**",
- minimums: "X|*|*|*|*|*",
- );
-
- check_reset_size!(
- shrink_should_go_left,
- columns: 6,
- starting: "*|***|**|*|*|*",
- snapshot: "*|*|XXX|**|*|*",
- expected: "*|**|**|**|*|*",
- minimums: "X|*|*|*|*|*",
- );
-
- check_reset_size!(
- shrink_should_go_right,
- columns: 6,
- starting: "*|***|**|**|**|*",
- snapshot: "*|****|XXX|*|*|*",
- expected: "*|****|**|**|*|*",
- minimums: "X|*|*|*|*|*",
- );
- }
-
- mod drag_handle {
- use super::*;
-
- fn parse(input: &str) -> (Vec<f32>, f32, Option<usize>) {
- let mut widths = Vec::new();
- let column_index = input.replace("*", "").find("I");
- for col in input.replace("I", "|").split('|') {
- widths.push(col.len() as f32);
- }
-
- for w in &widths {
- assert!(w.is_finite(), "incorrect number of columns");
- }
- let total = widths.iter().sum::<f32>();
- for width in &mut widths {
- *width /= total;
- }
- (widths, total, column_index)
- }
-
- #[track_caller]
- fn check(distance: i32, widths: &str, expected: &str, resize_behavior: &str) {
- let (widths, total_1, Some(column_index)) = parse(widths) else {
- panic!("invalid test input: widths should be marked");
- };
- let (expected, total_2, None) = parse(expected) else {
- panic!("invalid test input: expected should not be marked: {expected:?}");
- };
- assert_eq!(
- total_1, total_2,
- "invalid test input: total width not the same"
- );
- let cols = widths.len();
- let resize_behavior_vec = parse_resize_behavior(resize_behavior, total_1, cols);
- let resize_behavior = TableRow::from_vec(resize_behavior_vec, cols);
-
- let distance = distance as f32 / total_1;
-
- let mut widths_table_row = TableRow::from_vec(widths, cols);
- TableColumnWidths::drag_column_handle(
- distance,
- column_index,
- &mut widths_table_row,
- &resize_behavior,
- );
-
- let result_widths = widths_table_row.as_slice();
- let is_eq = is_almost_eq(result_widths, &expected);
- if !is_eq {
- let result_str = cols_to_str(result_widths, total_1);
- let expected_str = cols_to_str(&expected, total_1);
- panic!(
- "resize failed\ncomputed: {result_str}\nexpected: {expected_str}\n\ncomputed values: {result_widths:?}\nexpected values: {expected:?}\n:minimum widths: {resize_behavior:?}"
- );
- }
- }
-
- macro_rules! check {
- (columns: $cols:expr, distance: $dist:expr, snapshot: $current:expr, expected: $expected:expr, resizing: $resizing:expr $(,)?) => {
- check($dist, $current, $expected, $resizing);
- };
- ($name:ident, columns: $cols:expr, distance: $dist:expr, snapshot: $current:expr, expected: $expected:expr, minimums: $resizing:expr $(,)?) => {
- #[test]
- fn $name() {
- check($dist, $current, $expected, $resizing);
- }
- };
- }
-
- check!(
- basic_right_drag,
- columns: 3,
- distance: 1,
- snapshot: "**|**I**",
- expected: "**|***|*",
- minimums: "X|*|*",
- );
-
- check!(
- drag_left_against_mins,
- columns: 5,
- distance: -1,
- snapshot: "*|*|*|*I*******",
- expected: "*|*|*|*|*******",
- minimums: "X|*|*|*|*",
- );
-
- check!(
- drag_left,
- columns: 5,
- distance: -2,
- snapshot: "*|*|*|*****I***",
- expected: "*|*|*|***|*****",
- minimums: "X|*|*|*|*",
- );
- }
-}
@@ -0,0 +1,208 @@
+//! A newtype for a table row that enforces a fixed column count at runtime.
+//!
+//! This type ensures that all rows in a table have the same width, preventing accidental creation or mutation of rows with inconsistent lengths.
+//! It is especially useful for CSV or tabular data where rectangular invariants must be maintained, but the number of columns is only known at runtime.
+//! By using `TableRow`, we gain stronger guarantees and safer APIs compared to a bare `Vec<T>`, without requiring const generics.
+
+use std::{
+ any::type_name,
+ ops::{
+ Index, IndexMut, Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive,
+ },
+};
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct TableRow<T>(Vec<T>);
+
+impl<T> TableRow<T> {
+ pub fn from_element(element: T, length: usize) -> Self
+ where
+ T: Clone,
+ {
+ Self::from_vec(vec![element; length], length)
+ }
+
+ /// Constructs a `TableRow` from a `Vec<T>`, panicking if the length does not match `expected_length`.
+ ///
+ /// Use this when you want to ensure at construction time that the row has the correct number of columns.
+ /// This enforces the rectangular invariant for table data, preventing accidental creation of malformed rows.
+ ///
+ /// # Panics
+ /// Panics if `data.len() != expected_length`.
+ pub fn from_vec(data: Vec<T>, expected_length: usize) -> Self {
+ Self::try_from_vec(data, expected_length).unwrap_or_else(|e| {
+ let name = type_name::<Vec<T>>();
+ panic!("Expected {name} to be created successfully: {e}");
+ })
+ }
+
+ /// Attempts to construct a `TableRow` from a `Vec<T>`, returning an error if the length does not match `expected_len`.
+ ///
+ /// This is a fallible alternative to `from_vec`, allowing you to handle inconsistent row lengths gracefully.
+ /// Returns `Ok(TableRow)` if the length matches, or an `Err` with a descriptive message otherwise.
+ pub fn try_from_vec(data: Vec<T>, expected_len: usize) -> Result<Self, String> {
+ if data.len() != expected_len {
+ Err(format!(
+ "Row length {} does not match expected {}",
+ data.len(),
+ expected_len
+ ))
+ } else {
+ Ok(Self(data))
+ }
+ }
+
+ /// Returns reference to element by column index.
+ ///
+ /// # Panics
+ /// Panics if `col` is out of bounds (i.e., `col >= self.cols()`).
+ pub fn expect_get(&self, col: impl Into<usize>) -> &T {
+ let col = col.into();
+ self.0.get(col).unwrap_or_else(|| {
+ panic!(
+ "Expected table row of `{}` to have {col:?}",
+ type_name::<T>()
+ )
+ })
+ }
+
+ pub fn get(&self, col: impl Into<usize>) -> Option<&T> {
+ self.0.get(col.into())
+ }
+
+ pub fn as_slice(&self) -> &[T] {
+ &self.0
+ }
+
+ pub fn into_vec(self) -> Vec<T> {
+ self.0
+ }
+
+ /// Like [`map`], but borrows the row and clones each element before mapping.
+ ///
+ /// This is useful when you want to map over a borrowed row without consuming it,
+ /// but your mapping function requires ownership of each element.
+ ///
+ /// # Difference
+ /// - `map_cloned` takes `&self`, clones each element, and applies `f(T) -> U`.
+ /// - [`map`] takes `self` by value and applies `f(T) -> U` directly, consuming the row.
+ /// - [`map_ref`] takes `&self` and applies `f(&T) -> U` to references of each element.
+ pub fn map_cloned<F, U>(&self, f: F) -> TableRow<U>
+ where
+ F: FnMut(T) -> U,
+ T: Clone,
+ {
+ self.clone().map(f)
+ }
+
+ /// Consumes the row and transforms all elements within it in a length-safe way.
+ ///
+ /// # Difference
+ /// - `map` takes ownership of the row (`self`) and applies `f(T) -> U` to each element.
+ /// - Use this when you want to transform and consume the row in one step.
+ /// - See also [`map_cloned`] (for mapping over a borrowed row with cloning) and [`map_ref`] (for mapping over references).
+ pub fn map<F, U>(self, f: F) -> TableRow<U>
+ where
+ F: FnMut(T) -> U,
+ {
+ TableRow(self.0.into_iter().map(f).collect())
+ }
+
+ /// Borrows the row and transforms all elements by reference in a length-safe way.
+ ///
+ /// # Difference
+ /// - `map_ref` takes `&self` and applies `f(&T) -> U` to each element by reference.
+ /// - Use this when you want to map over a borrowed row without cloning or consuming it.
+ /// - See also [`map`] (for consuming the row) and [`map_cloned`] (for mapping with cloning).
+ pub fn map_ref<F, U>(&self, f: F) -> TableRow<U>
+ where
+ F: FnMut(&T) -> U,
+ {
+ TableRow(self.0.iter().map(f).collect())
+ }
+
+ /// Number of columns (alias to `len()` with more semantic meaning)
+ pub fn cols(&self) -> usize {
+ self.0.len()
+ }
+}
+
+///// Convenience traits /////
+pub trait IntoTableRow<T> {
+ fn into_table_row(self, expected_length: usize) -> TableRow<T>;
+}
+impl<T> IntoTableRow<T> for Vec<T> {
+ fn into_table_row(self, expected_length: usize) -> TableRow<T> {
+ TableRow::from_vec(self, expected_length)
+ }
+}
+
+// Index implementations for convenient access
+impl<T> Index<usize> for TableRow<T> {
+ type Output = T;
+
+ fn index(&self, index: usize) -> &Self::Output {
+ &self.0[index]
+ }
+}
+
+impl<T> IndexMut<usize> for TableRow<T> {
+ fn index_mut(&mut self, index: usize) -> &mut Self::Output {
+ &mut self.0[index]
+ }
+}
+
+// Range indexing implementations for slice operations
+impl<T> Index<Range<usize>> for TableRow<T> {
+ type Output = [T];
+
+ fn index(&self, index: Range<usize>) -> &Self::Output {
+ <Vec<T> as Index<Range<usize>>>::index(&self.0, index)
+ }
+}
+
+impl<T> Index<RangeFrom<usize>> for TableRow<T> {
+ type Output = [T];
+
+ fn index(&self, index: RangeFrom<usize>) -> &Self::Output {
+ <Vec<T> as Index<RangeFrom<usize>>>::index(&self.0, index)
+ }
+}
+
+impl<T> Index<RangeTo<usize>> for TableRow<T> {
+ type Output = [T];
+
+ fn index(&self, index: RangeTo<usize>) -> &Self::Output {
+ <Vec<T> as Index<RangeTo<usize>>>::index(&self.0, index)
+ }
+}
+
+impl<T> Index<RangeToInclusive<usize>> for TableRow<T> {
+ type Output = [T];
+
+ fn index(&self, index: RangeToInclusive<usize>) -> &Self::Output {
+ <Vec<T> as Index<RangeToInclusive<usize>>>::index(&self.0, index)
+ }
+}
+
+impl<T> Index<RangeFull> for TableRow<T> {
+ type Output = [T];
+
+ fn index(&self, index: RangeFull) -> &Self::Output {
+ <Vec<T> as Index<RangeFull>>::index(&self.0, index)
+ }
+}
+
+impl<T> Index<RangeInclusive<usize>> for TableRow<T> {
+ type Output = [T];
+
+ fn index(&self, index: RangeInclusive<usize>) -> &Self::Output {
+ <Vec<T> as Index<RangeInclusive<usize>>>::index(&self.0, index)
+ }
+}
+
+impl<T> IndexMut<RangeInclusive<usize>> for TableRow<T> {
+ fn index_mut(&mut self, index: RangeInclusive<usize>) -> &mut Self::Output {
+ <Vec<T> as IndexMut<RangeInclusive<usize>>>::index_mut(&mut self.0, index)
+ }
+}
@@ -0,0 +1,318 @@
+use super::*;
+
+fn is_almost_eq(a: &[f32], b: &[f32]) -> bool {
+ a.len() == b.len() && a.iter().zip(b).all(|(x, y)| (x - y).abs() < 1e-6)
+}
+
+fn cols_to_str(cols: &[f32], total_size: f32) -> String {
+ cols.iter()
+ .map(|f| "*".repeat(f32::round(f * total_size) as usize))
+ .collect::<Vec<String>>()
+ .join("|")
+}
+
+fn parse_resize_behavior(
+ input: &str,
+ total_size: f32,
+ expected_cols: usize,
+) -> Vec<TableResizeBehavior> {
+ let mut resize_behavior = Vec::with_capacity(expected_cols);
+ for col in input.split('|') {
+ if col.starts_with('X') || col.is_empty() {
+ resize_behavior.push(TableResizeBehavior::None);
+ } else if col.starts_with('*') {
+ resize_behavior.push(TableResizeBehavior::MinSize(col.len() as f32 / total_size));
+ } else {
+ panic!("invalid test input: unrecognized resize behavior: {}", col);
+ }
+ }
+
+ if resize_behavior.len() != expected_cols {
+ panic!(
+ "invalid test input: expected {} columns, got {}",
+ expected_cols,
+ resize_behavior.len()
+ );
+ }
+ resize_behavior
+}
+
+mod reset_column_size {
+ use super::*;
+
+ fn parse(input: &str) -> (Vec<f32>, f32, Option<usize>) {
+ let mut widths = Vec::new();
+ let mut column_index = None;
+ for (index, col) in input.split('|').enumerate() {
+ widths.push(col.len() as f32);
+ if col.starts_with('X') {
+ column_index = Some(index);
+ }
+ }
+
+ for w in &widths {
+ assert!(w.is_finite(), "incorrect number of columns");
+ }
+ let total = widths.iter().sum::<f32>();
+ for width in &mut widths {
+ *width /= total;
+ }
+ (widths, total, column_index)
+ }
+
+ #[track_caller]
+ fn check_reset_size(initial_sizes: &str, widths: &str, expected: &str, resize_behavior: &str) {
+ let (initial_sizes, total_1, None) = parse(initial_sizes) else {
+ panic!("invalid test input: initial sizes should not be marked");
+ };
+ let (widths, total_2, Some(column_index)) = parse(widths) else {
+ panic!("invalid test input: widths should be marked");
+ };
+ assert_eq!(
+ total_1, total_2,
+ "invalid test input: total width not the same {total_1}, {total_2}"
+ );
+ let (expected, total_3, None) = parse(expected) else {
+ panic!("invalid test input: expected should not be marked: {expected:?}");
+ };
+ assert_eq!(
+ total_2, total_3,
+ "invalid test input: total width not the same"
+ );
+ let cols = initial_sizes.len();
+ let resize_behavior_vec = parse_resize_behavior(resize_behavior, total_1, cols);
+ let resize_behavior = TableRow::from_vec(resize_behavior_vec, cols);
+ let result = TableColumnWidths::reset_to_initial_size(
+ column_index,
+ TableRow::from_vec(widths, cols),
+ TableRow::from_vec(initial_sizes, cols),
+ &resize_behavior,
+ );
+ let result_slice = result.as_slice();
+ let is_eq = is_almost_eq(result_slice, &expected);
+ if !is_eq {
+ let result_str = cols_to_str(result_slice, total_1);
+ let expected_str = cols_to_str(&expected, total_1);
+ panic!(
+ "resize failed\ncomputed: {result_str}\nexpected: {expected_str}\n\ncomputed values: {result_slice:?}\nexpected values: {expected:?}\n:minimum widths: {resize_behavior:?}"
+ );
+ }
+ }
+
+ macro_rules! check_reset_size {
+ (columns: $cols:expr, starting: $initial:expr, snapshot: $current:expr, expected: $expected:expr, resizing: $resizing:expr $(,)?) => {
+ check_reset_size($initial, $current, $expected, $resizing);
+ };
+ ($name:ident, columns: $cols:expr, starting: $initial:expr, snapshot: $current:expr, expected: $expected:expr, minimums: $resizing:expr $(,)?) => {
+ #[test]
+ fn $name() {
+ check_reset_size($initial, $current, $expected, $resizing);
+ }
+ };
+ }
+
+ check_reset_size!(
+ basic_right,
+ columns: 5,
+ starting: "**|**|**|**|**",
+ snapshot: "**|**|X|***|**",
+ expected: "**|**|**|**|**",
+ minimums: "X|*|*|*|*",
+ );
+
+ check_reset_size!(
+ basic_left,
+ columns: 5,
+ starting: "**|**|**|**|**",
+ snapshot: "**|**|***|X|**",
+ expected: "**|**|**|**|**",
+ minimums: "X|*|*|*|**",
+ );
+
+ check_reset_size!(
+ squashed_left_reset_col2,
+ columns: 6,
+ starting: "*|***|**|**|****|*",
+ snapshot: "*|*|X|*|*|********",
+ expected: "*|*|**|*|*|*******",
+ minimums: "X|*|*|*|*|*",
+ );
+
+ check_reset_size!(
+ grow_cascading_right,
+ columns: 6,
+ starting: "*|***|****|**|***|*",
+ snapshot: "*|***|X|**|**|*****",
+ expected: "*|***|****|*|*|****",
+ minimums: "X|*|*|*|*|*",
+ );
+
+ check_reset_size!(
+ squashed_right_reset_col4,
+ columns: 6,
+ starting: "*|***|**|**|****|*",
+ snapshot: "*|********|*|*|X|*",
+ expected: "*|*****|*|*|****|*",
+ minimums: "X|*|*|*|*|*",
+ );
+
+ check_reset_size!(
+ reset_col6_right,
+ columns: 6,
+ starting: "*|***|**|***|***|**",
+ snapshot: "*|***|**|***|**|XXX",
+ expected: "*|***|**|***|***|**",
+ minimums: "X|*|*|*|*|*",
+ );
+
+ check_reset_size!(
+ reset_col6_left,
+ columns: 6,
+ starting: "*|***|**|***|***|**",
+ snapshot: "*|***|**|***|****|X",
+ expected: "*|***|**|***|***|**",
+ minimums: "X|*|*|*|*|*",
+ );
+
+ check_reset_size!(
+ last_column_grow_cascading,
+ columns: 6,
+ starting: "*|***|**|**|**|***",
+ snapshot: "*|*******|*|**|*|X",
+ expected: "*|******|*|*|*|***",
+ minimums: "X|*|*|*|*|*",
+ );
+
+ check_reset_size!(
+ goes_left_when_left_has_extreme_diff,
+ columns: 6,
+ starting: "*|***|****|**|**|***",
+ snapshot: "*|********|X|*|**|**",
+ expected: "*|*****|****|*|**|**",
+ minimums: "X|*|*|*|*|*",
+ );
+
+ check_reset_size!(
+ basic_shrink_right,
+ columns: 6,
+ starting: "**|**|**|**|**|**",
+ snapshot: "**|**|XXX|*|**|**",
+ expected: "**|**|**|**|**|**",
+ minimums: "X|*|*|*|*|*",
+ );
+
+ check_reset_size!(
+ shrink_should_go_left,
+ columns: 6,
+ starting: "*|***|**|*|*|*",
+ snapshot: "*|*|XXX|**|*|*",
+ expected: "*|**|**|**|*|*",
+ minimums: "X|*|*|*|*|*",
+ );
+
+ check_reset_size!(
+ shrink_should_go_right,
+ columns: 6,
+ starting: "*|***|**|**|**|*",
+ snapshot: "*|****|XXX|*|*|*",
+ expected: "*|****|**|**|*|*",
+ minimums: "X|*|*|*|*|*",
+ );
+}
+
+mod drag_handle {
+ use super::*;
+
+ fn parse(input: &str) -> (Vec<f32>, f32, Option<usize>) {
+ let mut widths = Vec::new();
+ let column_index = input.replace("*", "").find("I");
+ for col in input.replace("I", "|").split('|') {
+ widths.push(col.len() as f32);
+ }
+
+ for w in &widths {
+ assert!(w.is_finite(), "incorrect number of columns");
+ }
+ let total = widths.iter().sum::<f32>();
+ for width in &mut widths {
+ *width /= total;
+ }
+ (widths, total, column_index)
+ }
+
+ #[track_caller]
+ fn check(distance: i32, widths: &str, expected: &str, resize_behavior: &str) {
+ let (widths, total_1, Some(column_index)) = parse(widths) else {
+ panic!("invalid test input: widths should be marked");
+ };
+ let (expected, total_2, None) = parse(expected) else {
+ panic!("invalid test input: expected should not be marked: {expected:?}");
+ };
+ assert_eq!(
+ total_1, total_2,
+ "invalid test input: total width not the same"
+ );
+ let cols = widths.len();
+ let resize_behavior_vec = parse_resize_behavior(resize_behavior, total_1, cols);
+ let resize_behavior = TableRow::from_vec(resize_behavior_vec, cols);
+
+ let distance = distance as f32 / total_1;
+
+ let mut widths_table_row = TableRow::from_vec(widths, cols);
+ TableColumnWidths::drag_column_handle(
+ distance,
+ column_index,
+ &mut widths_table_row,
+ &resize_behavior,
+ );
+
+ let result_widths = widths_table_row.as_slice();
+ let is_eq = is_almost_eq(result_widths, &expected);
+ if !is_eq {
+ let result_str = cols_to_str(result_widths, total_1);
+ let expected_str = cols_to_str(&expected, total_1);
+ panic!(
+ "resize failed\ncomputed: {result_str}\nexpected: {expected_str}\n\ncomputed values: {result_widths:?}\nexpected values: {expected:?}\n:minimum widths: {resize_behavior:?}"
+ );
+ }
+ }
+
+ macro_rules! check {
+ (columns: $cols:expr, distance: $dist:expr, snapshot: $current:expr, expected: $expected:expr, resizing: $resizing:expr $(,)?) => {
+ check($dist, $current, $expected, $resizing);
+ };
+ ($name:ident, columns: $cols:expr, distance: $dist:expr, snapshot: $current:expr, expected: $expected:expr, minimums: $resizing:expr $(,)?) => {
+ #[test]
+ fn $name() {
+ check($dist, $current, $expected, $resizing);
+ }
+ };
+ }
+
+ check!(
+ basic_right_drag,
+ columns: 3,
+ distance: 1,
+ snapshot: "**|**I**",
+ expected: "**|***|*",
+ minimums: "X|*|*",
+ );
+
+ check!(
+ drag_left_against_mins,
+ columns: 5,
+ distance: -1,
+ snapshot: "*|*|*|*I*******",
+ expected: "*|*|*|*|*******",
+ minimums: "X|*|*|*|*",
+ );
+
+ check!(
+ drag_left,
+ columns: 5,
+ distance: -2,
+ snapshot: "*|*|*|*****I***",
+ expected: "*|*|*|***|*****",
+ minimums: "X|*|*|*|*",
+ );
+}
@@ -0,0 +1,88 @@
+use gpui::{Hsla, Pixels, SharedString, linear_color_stop, linear_gradient, px};
+
+use crate::prelude::*;
+
+/// A gradient overlay that fades from a solid color to transparent.
+#[derive(IntoElement)]
+pub struct GradientFade {
+ base_bg: Hsla,
+ hover_bg: Hsla,
+ active_bg: Hsla,
+ width: Pixels,
+ right: Pixels,
+ gradient_stop: f32,
+ group_name: Option<SharedString>,
+}
+
+impl GradientFade {
+ pub fn new(base_bg: Hsla, hover_bg: Hsla, active_bg: Hsla) -> Self {
+ Self {
+ base_bg,
+ hover_bg,
+ active_bg,
+ width: px(48.0),
+ right: px(0.0),
+ gradient_stop: 0.6,
+ group_name: None,
+ }
+ }
+
+ pub fn width(mut self, width: Pixels) -> Self {
+ self.width = width;
+ self
+ }
+
+ pub fn right(mut self, right: Pixels) -> Self {
+ self.right = right;
+ self
+ }
+
+ pub fn gradient_stop(mut self, stop: f32) -> Self {
+ self.gradient_stop = stop;
+ self
+ }
+
+ pub fn group_name(mut self, name: impl Into<SharedString>) -> Self {
+ self.group_name = Some(name.into());
+ self
+ }
+}
+
+impl RenderOnce for GradientFade {
+ fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement {
+ let stop = self.gradient_stop;
+ let hover_bg = self.hover_bg;
+ let active_bg = self.active_bg;
+
+ div()
+ .id("gradient_fade")
+ .absolute()
+ .top_0()
+ .right(self.right)
+ .w(self.width)
+ .h_full()
+ .bg(linear_gradient(
+ 90.,
+ linear_color_stop(self.base_bg, stop),
+ linear_color_stop(self.base_bg.opacity(0.0), 0.),
+ ))
+ .when_some(self.group_name.clone(), |element, group_name| {
+ element.group_hover(group_name, move |s| {
+ s.bg(linear_gradient(
+ 90.,
+ linear_color_stop(hover_bg, stop),
+ linear_color_stop(hover_bg.opacity(0.0), 0.),
+ ))
+ })
+ })
+ .when_some(self.group_name, |element, group_name| {
+ element.group_active(group_name, move |s| {
+ s.bg(linear_gradient(
+ 90.,
+ linear_color_stop(active_bg, stop),
+ linear_color_stop(active_bg.opacity(0.0), 0.),
+ ))
+ })
+ })
+ }
+}
@@ -4,7 +4,7 @@ use component::{Component, ComponentScope, example_group_with_title, single_exam
use gpui::{AnyElement, AnyView, ClickEvent, MouseButton, MouseDownEvent, Pixels, px};
use smallvec::SmallVec;
-use crate::{Disclosure, prelude::*};
+use crate::{Disclosure, GradientFade, prelude::*};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Default)]
pub enum ListItemSpacing {
@@ -209,6 +209,21 @@ impl ParentElement for ListItem {
impl RenderOnce for ListItem {
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
+ let color = cx.theme().colors();
+
+ let base_bg = if self.selected {
+ color.element_active
+ } else {
+ color.panel_background
+ };
+
+ let end_hover_gradient_overlay =
+ GradientFade::new(base_bg, color.element_hover, color.element_active)
+ .width(px(96.0))
+ .when_some(self.group_name.clone(), |fade, group| {
+ fade.group_name(group)
+ });
+
h_flex()
.id(self.id)
.when_some(self.group_name, |this, group| this.group(group))
@@ -220,25 +235,22 @@ impl RenderOnce for ListItem {
.px(DynamicSpacing::Base04.rems(cx))
})
.when(!self.inset && !self.disabled, |this| {
- this
- // TODO: Add focus state
- // .when(self.state == InteractionState::Focused, |this| {
- .when_some(self.focused, |this, focused| {
- if focused {
- this.border_1()
- .border_color(cx.theme().colors().border_focused)
- } else {
- this.border_1()
- }
- })
- .when(self.selectable, |this| {
- this.hover(|style| style.bg(cx.theme().colors().ghost_element_hover))
- .active(|style| style.bg(cx.theme().colors().ghost_element_active))
- .when(self.outlined, |this| this.rounded_sm())
- .when(self.selected, |this| {
- this.bg(cx.theme().colors().ghost_element_selected)
- })
- })
+ this.when_some(self.focused, |this, focused| {
+ if focused {
+ this.border_1()
+ .border_color(cx.theme().colors().border_focused)
+ } else {
+ this.border_1()
+ }
+ })
+ .when(self.selectable, |this| {
+ this.hover(|style| style.bg(cx.theme().colors().ghost_element_hover))
+ .active(|style| style.bg(cx.theme().colors().ghost_element_active))
+ .when(self.outlined, |this| this.rounded_sm())
+ .when(self.selected, |this| {
+ this.bg(cx.theme().colors().ghost_element_selected)
+ })
+ })
})
.when(self.rounded, |this| this.rounded_sm())
.when_some(self.on_hover, |this, on_hover| this.on_hover(on_hover))
@@ -350,6 +362,7 @@ impl RenderOnce for ListItem {
.right(DynamicSpacing::Base06.rems(cx))
.top_0()
.visible_on_hover("list_item")
+ .child(end_hover_gradient_overlay)
.child(end_hover_slot),
)
}),
@@ -1041,7 +1041,18 @@ impl ScrollbarLayout {
impl PartialEq for ScrollbarLayout {
fn eq(&self, other: &Self) -> bool {
- self.axis == other.axis && self.thumb_bounds == other.thumb_bounds
+ if self.axis != other.axis {
+ return false;
+ }
+
+ let axis = self.axis;
+ let thumb_offset =
+ self.thumb_bounds.origin.along(axis) - self.track_bounds.origin.along(axis);
+ let other_thumb_offset =
+ other.thumb_bounds.origin.along(axis) - other.track_bounds.origin.along(axis);
+
+ thumb_offset == other_thumb_offset
+ && self.thumb_bounds.size.along(axis) == other.thumb_bounds.size.along(axis)
}
}
@@ -64,7 +64,6 @@ tendril = "0.4.3"
[dev-dependencies]
git2.workspace = true
-indoc.workspace = true
rand.workspace = true
util_macros.workspace = true
pretty_assertions.workspace = true
@@ -54,11 +54,9 @@ workspace.workspace = true
zed_actions.workspace = true
[dev-dependencies]
-assets.workspace = true
command_palette = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] }
git_ui = { workspace = true, features = ["test-support"] }
-title_bar = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
indoc.workspace = true
language = { workspace = true, features = ["test-support"] }
@@ -19,5 +19,4 @@ parking_lot.workspace = true
ctor.workspace = true
futures.workspace = true
gpui = { workspace = true, features = ["test-support"] }
-rand.workspace = true
zlog.workspace = true
@@ -72,7 +72,6 @@ windows.workspace = true
[dev-dependencies]
client = { workspace = true, features = ["test-support"] }
-dap = { workspace = true, features = ["test-support"] }
db = { workspace = true, features = ["test-support"] }
fs = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
@@ -50,6 +50,8 @@ pub trait Sidebar: EventEmitter<SidebarEvent> + Focusable + Render + Sized {
fn width(&self, cx: &App) -> Pixels;
fn set_width(&mut self, width: Option<Pixels>, cx: &mut Context<Self>);
fn has_notifications(&self, cx: &App) -> bool;
+ fn toggle_recent_projects_popover(&self, window: &mut Window, cx: &mut App);
+ fn is_recent_projects_popover_deployed(&self) -> bool;
}
pub trait SidebarHandle: 'static + Send + Sync {
@@ -60,6 +62,8 @@ pub trait SidebarHandle: 'static + Send + Sync {
fn has_notifications(&self, cx: &App) -> bool;
fn to_any(&self) -> AnyView;
fn entity_id(&self) -> EntityId;
+ fn toggle_recent_projects_popover(&self, window: &mut Window, cx: &mut App);
+ fn is_recent_projects_popover_deployed(&self, cx: &App) -> bool;
}
#[derive(Clone)]
@@ -100,6 +104,16 @@ impl<T: Sidebar> SidebarHandle for Entity<T> {
fn entity_id(&self) -> EntityId {
Entity::entity_id(self)
}
+
+ fn toggle_recent_projects_popover(&self, window: &mut Window, cx: &mut App) {
+ self.update(cx, |this, cx| {
+ this.toggle_recent_projects_popover(window, cx);
+ });
+ }
+
+ fn is_recent_projects_popover_deployed(&self, cx: &App) -> bool {
+ self.read(cx).is_recent_projects_popover_deployed()
+ }
}
pub struct MultiWorkspace {
@@ -187,6 +201,18 @@ impl MultiWorkspace {
.map_or(false, |s| s.has_notifications(cx))
}
+ pub fn toggle_recent_projects_popover(&self, window: &mut Window, cx: &mut App) {
+ if let Some(sidebar) = &self.sidebar {
+ sidebar.toggle_recent_projects_popover(window, cx);
+ }
+ }
+
+ pub fn is_recent_projects_popover_deployed(&self, cx: &App) -> bool {
+ self.sidebar
+ .as_ref()
+ .map_or(false, |s| s.is_recent_projects_popover_deployed(cx))
+ }
+
pub fn multi_workspace_enabled(&self, cx: &App) -> bool {
cx.has_flag::<AgentV2FeatureFlag>() && !DisableAiSettings::get_global(cx).disable_ai
}
@@ -10,8 +10,10 @@ use gpui::{
ParentElement, Render, Styled, Task, Window, actions,
};
use menu::{SelectNext, SelectPrevious};
+use project::DisableAiSettings;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
+use settings::Settings;
use ui::{ButtonLike, Divider, DividerColor, KeyBinding, Vector, VectorName, prelude::*};
use util::ResultExt;
use zed_actions::{Extensions, OpenOnboarding, OpenSettings, agent, command_palette};
@@ -121,21 +123,43 @@ impl RenderOnce for SectionButton {
}
}
+enum SectionVisibility {
+ Always,
+ Conditional(fn(&App) -> bool),
+}
+
+impl SectionVisibility {
+ fn is_visible(&self, cx: &App) -> bool {
+ match self {
+ SectionVisibility::Always => true,
+ SectionVisibility::Conditional(f) => f(cx),
+ }
+ }
+}
+
struct SectionEntry {
icon: IconName,
title: &'static str,
action: &'static dyn Action,
+ visibility_guard: SectionVisibility,
}
impl SectionEntry {
- fn render(&self, button_index: usize, focus: &FocusHandle, _cx: &App) -> impl IntoElement {
- SectionButton::new(
- self.title,
- self.icon,
- self.action,
- button_index,
- focus.clone(),
- )
+ fn render(
+ &self,
+ button_index: usize,
+ focus: &FocusHandle,
+ cx: &App,
+ ) -> Option<impl IntoElement> {
+ self.visibility_guard.is_visible(cx).then(|| {
+ SectionButton::new(
+ self.title,
+ self.icon,
+ self.action,
+ button_index,
+ focus.clone(),
+ )
+ })
}
}
@@ -147,21 +171,25 @@ const CONTENT: (Section<4>, Section<3>) = (
icon: IconName::Plus,
title: "New File",
action: &NewFile,
+ visibility_guard: SectionVisibility::Always,
},
SectionEntry {
icon: IconName::FolderOpen,
title: "Open Project",
action: &Open::DEFAULT,
+ visibility_guard: SectionVisibility::Always,
},
SectionEntry {
icon: IconName::CloudDownload,
title: "Clone Repository",
action: &GitClone,
+ visibility_guard: SectionVisibility::Always,
},
SectionEntry {
icon: IconName::ListCollapse,
title: "Open Command Palette",
action: &command_palette::Toggle,
+ visibility_guard: SectionVisibility::Always,
},
],
},
@@ -172,11 +200,15 @@ const CONTENT: (Section<4>, Section<3>) = (
icon: IconName::Settings,
title: "Open Settings",
action: &OpenSettings,
+ visibility_guard: SectionVisibility::Always,
},
SectionEntry {
icon: IconName::ZedAssistant,
title: "View AI Settings",
action: &agent::OpenSettings,
+ visibility_guard: SectionVisibility::Conditional(|cx| {
+ !DisableAiSettings::get_global(cx).disable_ai
+ }),
},
SectionEntry {
icon: IconName::Blocks,
@@ -185,6 +217,7 @@ const CONTENT: (Section<4>, Section<3>) = (
category_filter: None,
id: None,
},
+ visibility_guard: SectionVisibility::Always,
},
],
},
@@ -204,7 +237,7 @@ impl<const COLS: usize> Section<COLS> {
self.entries
.iter()
.enumerate()
- .map(|(index, entry)| entry.render(index_offset + index, focus, cx)),
+ .filter_map(|(index, entry)| entry.render(index_offset + index, focus, cx)),
)
}
}
@@ -21,7 +21,7 @@ workspace = true
[features]
test-support = [
"gpui/test-support",
- "http_client/test-support",
+
"language/test-support",
"pretty_assertions",
"settings/test-support",
@@ -63,9 +63,7 @@ ztracing.workspace = true
[dev-dependencies]
clock = { workspace = true, features = ["test-support"] }
collections = { workspace = true, features = ["test-support"] }
-git2.workspace = true
gpui = { workspace = true, features = ["test-support"] }
-http_client.workspace = true
paths = { workspace = true, features = ["test-support"] }
rand.workspace = true
rpc = { workspace = true, features = ["test-support"] }
@@ -243,7 +243,6 @@ pkg-config = "0.3.22"
[dev-dependencies]
call = { workspace = true, features = ["test-support"] }
-dap = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
image_viewer = { workspace = true, features = ["test-support"] }
@@ -253,8 +252,6 @@ pretty_assertions.workspace = true
project = { workspace = true, features = ["test-support"] }
semver.workspace = true
terminal_view = { workspace = true, features = ["test-support"] }
-tree-sitter-md.workspace = true
-tree-sitter-rust.workspace = true
title_bar = { workspace = true, features = ["test-support"] }
workspace = { workspace = true, features = ["test-support"] }
image.workspace = true
@@ -43,12 +43,28 @@ fn main() {
"cargo:rustc-env=TARGET={}",
std::env::var("TARGET").unwrap()
);
- if let Ok(output) = Command::new("git").args(["rev-parse", "HEAD"]).output()
- && output.status.success()
- {
- let git_sha = String::from_utf8_lossy(&output.stdout);
- let git_sha = git_sha.trim();
+ let git_sha = match std::env::var("ZED_COMMIT_SHA").ok() {
+ Some(git_sha) => {
+ // In deterministic build environments such as Nix, we inject the commit sha into the build script.
+ Some(git_sha)
+ }
+ None => {
+ if let Some(output) = Command::new("git")
+ .args(["rev-parse", "HEAD"])
+ .output()
+ .ok()
+ && output.status.success()
+ {
+ let git_sha = String::from_utf8_lossy(&output.stdout);
+ Some(git_sha.trim().to_string())
+ } else {
+ None
+ }
+ }
+ };
+
+ if let Some(git_sha) = git_sha {
println!("cargo:rustc-env=ZED_COMMIT_SHA={git_sha}");
if let Some(build_identifier) = option_env!("GITHUB_RUN_NUMBER") {
@@ -285,7 +285,7 @@ fn main() {
zlog::init();
- if true {
+ if stdout_is_a_pty() {
zlog::init_output_stdout();
} else {
let result = zlog::init_output_file(paths::log_file(), Some(paths::old_log_file()));
@@ -0,0 +1,443 @@
+use std::ops::Range;
+
+use serde::{Deserialize, Serialize};
+
+use crate::estimate_tokens;
+
+/// Pre-computed byte offset ranges within `cursor_excerpt` for different
+/// editable and context token budgets. Allows the server to select the
+/// appropriate ranges for whichever model it uses.
+#[derive(Clone, Debug, Default, PartialEq, Hash, Serialize, Deserialize)]
+pub struct ExcerptRanges {
+ /// Editable region computed with a 150-token budget.
+ pub editable_150: Range<usize>,
+ /// Editable region computed with a 180-token budget.
+ pub editable_180: Range<usize>,
+ /// Editable region computed with a 350-token budget.
+ pub editable_350: Range<usize>,
+ /// Editable region computed with a 350-token budget.
+ pub editable_512: Option<Range<usize>>,
+ /// Context boundary when using editable_150 with 350 tokens of additional context.
+ pub editable_150_context_350: Range<usize>,
+ /// Context boundary when using editable_180 with 350 tokens of additional context.
+ pub editable_180_context_350: Range<usize>,
+ /// Context boundary when using editable_350 with 150 tokens of additional context.
+ pub editable_350_context_150: Range<usize>,
+ pub editable_350_context_512: Option<Range<usize>>,
+ pub editable_350_context_1024: Option<Range<usize>>,
+ pub context_4096: Option<Range<usize>>,
+ pub context_8192: Option<Range<usize>>,
+}
+
+/// Builds an `ExcerptRanges` by computing editable and context ranges for each
+/// budget combination, using the syntax-aware logic in
+/// `compute_editable_and_context_ranges`.
+pub fn compute_legacy_excerpt_ranges(
+ cursor_excerpt: &str,
+ cursor_offset: usize,
+ syntax_ranges: &[Range<usize>],
+) -> ExcerptRanges {
+ let compute = |editable_tokens, context_tokens| {
+ compute_editable_and_context_ranges(
+ cursor_excerpt,
+ cursor_offset,
+ syntax_ranges,
+ editable_tokens,
+ context_tokens,
+ )
+ };
+
+ let (editable_150, editable_150_context_350) = compute(150, 350);
+ let (editable_180, editable_180_context_350) = compute(180, 350);
+ let (editable_350, editable_350_context_150) = compute(350, 150);
+ let (editable_512, _) = compute(512, 0);
+ let (_, editable_350_context_512) = compute(350, 512);
+ let (_, editable_350_context_1024) = compute(350, 1024);
+ let (_, context_4096) = compute(350, 4096);
+ let (_, context_8192) = compute(350, 8192);
+
+ ExcerptRanges {
+ editable_150,
+ editable_180,
+ editable_350,
+ editable_512: Some(editable_512),
+ editable_150_context_350,
+ editable_180_context_350,
+ editable_350_context_150,
+ editable_350_context_512: Some(editable_350_context_512),
+ editable_350_context_1024: Some(editable_350_context_1024),
+ context_4096: Some(context_4096),
+ context_8192: Some(context_8192),
+ }
+}
+
+/// Given the cursor excerpt text, cursor offset, and the syntax node ranges
+/// containing the cursor (innermost to outermost), compute the editable range
+/// and context range as byte offset ranges within `cursor_excerpt`.
+///
+/// This is the server-side equivalent of `compute_excerpt_ranges` in
+/// `edit_prediction::cursor_excerpt`, but operates on plain text with
+/// pre-computed syntax boundaries instead of a `BufferSnapshot`.
+pub fn compute_editable_and_context_ranges(
+ cursor_excerpt: &str,
+ cursor_offset: usize,
+ syntax_ranges: &[Range<usize>],
+ editable_token_limit: usize,
+ context_token_limit: usize,
+) -> (Range<usize>, Range<usize>) {
+ let line_starts = compute_line_starts(cursor_excerpt);
+ let cursor_row = offset_to_row(&line_starts, cursor_offset);
+ let max_row = line_starts.len().saturating_sub(1) as u32;
+
+ let editable_range = compute_editable_range_from_text(
+ cursor_excerpt,
+ &line_starts,
+ cursor_row,
+ max_row,
+ syntax_ranges,
+ editable_token_limit,
+ );
+
+ let context_range = expand_context_from_text(
+ cursor_excerpt,
+ &line_starts,
+ max_row,
+ &editable_range,
+ syntax_ranges,
+ context_token_limit,
+ );
+
+ (editable_range, context_range)
+}
+
+fn compute_line_starts(text: &str) -> Vec<usize> {
+ let mut starts = vec![0];
+ for (index, byte) in text.bytes().enumerate() {
+ if byte == b'\n' {
+ starts.push(index + 1);
+ }
+ }
+ starts
+}
+
+fn offset_to_row(line_starts: &[usize], offset: usize) -> u32 {
+ match line_starts.binary_search(&offset) {
+ Ok(row) => row as u32,
+ Err(row) => (row.saturating_sub(1)) as u32,
+ }
+}
+
+fn row_start_offset(line_starts: &[usize], row: u32) -> usize {
+ line_starts.get(row as usize).copied().unwrap_or(0)
+}
+
+fn row_end_offset(text: &str, line_starts: &[usize], row: u32) -> usize {
+ if let Some(&next_start) = line_starts.get(row as usize + 1) {
+ // End before the newline of this row.
+ next_start.saturating_sub(1).min(text.len())
+ } else {
+ text.len()
+ }
+}
+
+fn row_range_to_byte_range(
+ text: &str,
+ line_starts: &[usize],
+ start_row: u32,
+ end_row: u32,
+) -> Range<usize> {
+ let start = row_start_offset(line_starts, start_row);
+ let end = row_end_offset(text, line_starts, end_row);
+ start..end
+}
+
+fn estimate_tokens_for_row_range(
+ text: &str,
+ line_starts: &[usize],
+ start_row: u32,
+ end_row: u32,
+) -> usize {
+ let mut tokens = 0;
+ for row in start_row..end_row {
+ let row_len = row_end_offset(text, line_starts, row)
+ .saturating_sub(row_start_offset(line_starts, row));
+ tokens += estimate_tokens(row_len).max(1);
+ }
+ tokens
+}
+
+fn line_token_count_from_text(text: &str, line_starts: &[usize], row: u32) -> usize {
+ let row_len =
+ row_end_offset(text, line_starts, row).saturating_sub(row_start_offset(line_starts, row));
+ estimate_tokens(row_len).max(1)
+}
+
+/// Returns syntax boundaries (as row ranges) that contain the given row range
+/// and extend beyond it, ordered from smallest to largest.
+fn containing_syntax_boundaries_from_ranges(
+ line_starts: &[usize],
+ syntax_ranges: &[Range<usize>],
+ start_row: u32,
+ end_row: u32,
+) -> Vec<(u32, u32)> {
+ let mut boundaries = Vec::new();
+ let mut last: Option<(u32, u32)> = None;
+
+ // syntax_ranges is innermost to outermost, so iterate in order.
+ for range in syntax_ranges {
+ let node_start_row = offset_to_row(line_starts, range.start);
+ let node_end_row = offset_to_row(line_starts, range.end);
+
+ // Skip nodes that don't extend beyond the current range.
+ if node_start_row >= start_row && node_end_row <= end_row {
+ continue;
+ }
+
+ let rows = (node_start_row, node_end_row);
+ if last == Some(rows) {
+ continue;
+ }
+
+ last = Some(rows);
+ boundaries.push(rows);
+ }
+
+ boundaries
+}
+
+fn compute_editable_range_from_text(
+ text: &str,
+ line_starts: &[usize],
+ cursor_row: u32,
+ max_row: u32,
+ syntax_ranges: &[Range<usize>],
+ token_limit: usize,
+) -> Range<usize> {
+ // Phase 1: Expand symmetrically from cursor using 75% of budget.
+ let initial_budget = (token_limit * 3) / 4;
+ let (mut start_row, mut end_row, mut remaining_tokens) =
+ expand_symmetric(text, line_starts, cursor_row, max_row, initial_budget);
+
+ remaining_tokens += token_limit.saturating_sub(initial_budget);
+
+ let original_start = start_row;
+ let original_end = end_row;
+
+ // Phase 2: Expand to syntax boundaries that fit within budget.
+ let boundaries =
+ containing_syntax_boundaries_from_ranges(line_starts, syntax_ranges, start_row, end_row);
+ for (boundary_start, boundary_end) in &boundaries {
+ let tokens_for_start = if *boundary_start < start_row {
+ estimate_tokens_for_row_range(text, line_starts, *boundary_start, start_row)
+ } else {
+ 0
+ };
+ let tokens_for_end = if *boundary_end > end_row {
+ estimate_tokens_for_row_range(text, line_starts, end_row + 1, *boundary_end + 1)
+ } else {
+ 0
+ };
+
+ let total_needed = tokens_for_start + tokens_for_end;
+ if total_needed <= remaining_tokens {
+ if *boundary_start < start_row {
+ start_row = *boundary_start;
+ }
+ if *boundary_end > end_row {
+ end_row = *boundary_end;
+ }
+ remaining_tokens = remaining_tokens.saturating_sub(total_needed);
+ } else {
+ break;
+ }
+ }
+
+ // Phase 3: Continue line-wise in the direction we expanded least.
+ let expanded_up = original_start.saturating_sub(start_row);
+ let expanded_down = end_row.saturating_sub(original_end);
+ let prefer_up = expanded_up <= expanded_down;
+
+ (start_row, end_row, _) = expand_linewise(
+ text,
+ line_starts,
+ start_row,
+ end_row,
+ max_row,
+ remaining_tokens,
+ prefer_up,
+ );
+
+ row_range_to_byte_range(text, line_starts, start_row, end_row)
+}
+
+fn expand_context_from_text(
+ text: &str,
+ line_starts: &[usize],
+ max_row: u32,
+ editable_range: &Range<usize>,
+ syntax_ranges: &[Range<usize>],
+ context_token_limit: usize,
+) -> Range<usize> {
+ let mut start_row = offset_to_row(line_starts, editable_range.start);
+ let mut end_row = offset_to_row(line_starts, editable_range.end);
+ let mut remaining_tokens = context_token_limit;
+ let mut did_syntax_expand = false;
+
+ let boundaries =
+ containing_syntax_boundaries_from_ranges(line_starts, syntax_ranges, start_row, end_row);
+ for (boundary_start, boundary_end) in &boundaries {
+ let tokens_for_start = if *boundary_start < start_row {
+ estimate_tokens_for_row_range(text, line_starts, *boundary_start, start_row)
+ } else {
+ 0
+ };
+ let tokens_for_end = if *boundary_end > end_row {
+ estimate_tokens_for_row_range(text, line_starts, end_row + 1, *boundary_end + 1)
+ } else {
+ 0
+ };
+
+ let total_needed = tokens_for_start + tokens_for_end;
+ if total_needed <= remaining_tokens {
+ if *boundary_start < start_row {
+ start_row = *boundary_start;
+ }
+ if *boundary_end > end_row {
+ end_row = *boundary_end;
+ }
+ remaining_tokens = remaining_tokens.saturating_sub(total_needed);
+ did_syntax_expand = true;
+ } else {
+ break;
+ }
+ }
+
+ // Only expand line-wise if no syntax expansion occurred.
+ if !did_syntax_expand {
+ (start_row, end_row, _) = expand_linewise(
+ text,
+ line_starts,
+ start_row,
+ end_row,
+ max_row,
+ remaining_tokens,
+ true,
+ );
+ }
+
+ row_range_to_byte_range(text, line_starts, start_row, end_row)
+}
+
+fn expand_symmetric(
+ text: &str,
+ line_starts: &[usize],
+ cursor_row: u32,
+ max_row: u32,
+ mut token_budget: usize,
+) -> (u32, u32, usize) {
+ let mut start_row = cursor_row;
+ let mut end_row = cursor_row;
+
+ let cursor_line_tokens = line_token_count_from_text(text, line_starts, cursor_row);
+ token_budget = token_budget.saturating_sub(cursor_line_tokens);
+
+ loop {
+ let can_expand_up = start_row > 0;
+ let can_expand_down = end_row < max_row;
+
+ if token_budget == 0 || (!can_expand_up && !can_expand_down) {
+ break;
+ }
+
+ if can_expand_down {
+ let next_row = end_row + 1;
+ let line_tokens = line_token_count_from_text(text, line_starts, next_row);
+ if line_tokens <= token_budget {
+ end_row = next_row;
+ token_budget = token_budget.saturating_sub(line_tokens);
+ } else {
+ break;
+ }
+ }
+
+ if can_expand_up && token_budget > 0 {
+ let next_row = start_row - 1;
+ let line_tokens = line_token_count_from_text(text, line_starts, next_row);
+ if line_tokens <= token_budget {
+ start_row = next_row;
+ token_budget = token_budget.saturating_sub(line_tokens);
+ } else {
+ break;
+ }
+ }
+ }
+
+ (start_row, end_row, token_budget)
+}
+
+fn expand_linewise(
+ text: &str,
+ line_starts: &[usize],
+ mut start_row: u32,
+ mut end_row: u32,
+ max_row: u32,
+ mut remaining_tokens: usize,
+ prefer_up: bool,
+) -> (u32, u32, usize) {
+ loop {
+ let can_expand_up = start_row > 0;
+ let can_expand_down = end_row < max_row;
+
+ if remaining_tokens == 0 || (!can_expand_up && !can_expand_down) {
+ break;
+ }
+
+ let mut expanded = false;
+
+ if prefer_up {
+ if can_expand_up {
+ let next_row = start_row - 1;
+ let line_tokens = line_token_count_from_text(text, line_starts, next_row);
+ if line_tokens <= remaining_tokens {
+ start_row = next_row;
+ remaining_tokens = remaining_tokens.saturating_sub(line_tokens);
+ expanded = true;
+ }
+ }
+ if can_expand_down && remaining_tokens > 0 {
+ let next_row = end_row + 1;
+ let line_tokens = line_token_count_from_text(text, line_starts, next_row);
+ if line_tokens <= remaining_tokens {
+ end_row = next_row;
+ remaining_tokens = remaining_tokens.saturating_sub(line_tokens);
+ expanded = true;
+ }
+ }
+ } else {
+ if can_expand_down {
+ let next_row = end_row + 1;
+ let line_tokens = line_token_count_from_text(text, line_starts, next_row);
+ if line_tokens <= remaining_tokens {
+ end_row = next_row;
+ remaining_tokens = remaining_tokens.saturating_sub(line_tokens);
+ expanded = true;
+ }
+ }
+ if can_expand_up && remaining_tokens > 0 {
+ let next_row = start_row - 1;
+ let line_tokens = line_token_count_from_text(text, line_starts, next_row);
+ if line_tokens <= remaining_tokens {
+ start_row = next_row;
+ remaining_tokens = remaining_tokens.saturating_sub(line_tokens);
+ expanded = true;
+ }
+ }
+ }
+
+ if !expanded {
+ break;
+ }
+ }
+
+ (start_row, end_row, remaining_tokens)
+}
@@ -1,3 +1,5 @@
+pub mod excerpt_ranges;
+
use anyhow::{Result, anyhow};
use serde::{Deserialize, Serialize};
use std::fmt::Write;
@@ -6,6 +8,10 @@ use std::path::Path;
use std::sync::Arc;
use strum::{EnumIter, IntoEnumIterator as _, IntoStaticStr};
+pub use crate::excerpt_ranges::{
+ ExcerptRanges, compute_editable_and_context_ranges, compute_legacy_excerpt_ranges,
+};
+
pub const CURSOR_MARKER: &str = "<|user_cursor|>";
pub const MAX_PROMPT_TOKENS: usize = 4096;
@@ -18,31 +24,6 @@ fn estimate_tokens(bytes: usize) -> usize {
bytes / 3
}
-/// Pre-computed byte offset ranges within `cursor_excerpt` for different
-/// editable and context token budgets. Allows the server to select the
-/// appropriate ranges for whichever model it uses.
-#[derive(Clone, Debug, Default, PartialEq, Hash, Serialize, Deserialize)]
-pub struct ExcerptRanges {
- /// Editable region computed with a 150-token budget.
- pub editable_150: Range<usize>,
- /// Editable region computed with a 180-token budget.
- pub editable_180: Range<usize>,
- /// Editable region computed with a 350-token budget.
- pub editable_350: Range<usize>,
- /// Editable region computed with a 350-token budget.
- pub editable_512: Option<Range<usize>>,
- /// Context boundary when using editable_150 with 350 tokens of additional context.
- pub editable_150_context_350: Range<usize>,
- /// Context boundary when using editable_180 with 350 tokens of additional context.
- pub editable_180_context_350: Range<usize>,
- /// Context boundary when using editable_350 with 150 tokens of additional context.
- pub editable_350_context_150: Range<usize>,
- pub editable_350_context_512: Option<Range<usize>>,
- pub editable_350_context_1024: Option<Range<usize>>,
- pub context_4096: Option<Range<usize>>,
- pub context_8192: Option<Range<usize>>,
-}
-
#[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)]
pub struct ZetaPromptInput {
pub cursor_path: Arc<Path>,
@@ -51,9 +32,18 @@ pub struct ZetaPromptInput {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub excerpt_start_row: Option<u32>,
pub events: Vec<Arc<Event>>,
- pub related_files: Vec<RelatedFile>,
+ #[serde(default)]
+ pub related_files: Option<Vec<RelatedFile>>,
+ #[serde(default, skip_serializing_if = "Vec::is_empty")]
+ pub active_buffer_diagnostics: Vec<ActiveBufferDiagnostic>,
/// These ranges let the server select model-appropriate subsets.
pub excerpt_ranges: ExcerptRanges,
+ /// Byte offset ranges within `cursor_excerpt` for all syntax nodes that
+ /// contain `cursor_offset_in_excerpt`, ordered from innermost to outermost.
+ /// When present, the server uses these to compute editable/context ranges
+ /// instead of `excerpt_ranges`.
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ pub syntax_ranges: Option<Vec<Range<usize>>>,
/// The name of the edit prediction model experiment to use.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub experiment: Option<String>,
@@ -180,6 +170,15 @@ pub fn write_event(prompt: &mut String, event: &Event) {
}
}
+#[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)]
+pub struct ActiveBufferDiagnostic {
+ pub severity: Option<i32>,
+ pub message: String,
+ pub snippet: String,
+ pub snippet_buffer_row_range: Range<u32>,
+ pub diagnostic_range_in_snippet: Range<usize>,
+}
+
#[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)]
pub struct RelatedFile {
pub path: Arc<Path>,
@@ -222,6 +221,36 @@ pub fn special_tokens_for_format(format: ZetaFormat) -> &'static [&'static str]
}
}
+/// Returns the (editable_token_limit, context_token_limit) for a given format.
+pub fn token_limits_for_format(format: ZetaFormat) -> (usize, usize) {
+ match format {
+ ZetaFormat::V0112MiddleAtEnd | ZetaFormat::V0113Ordered => (150, 350),
+ ZetaFormat::V0114180EditableRegion => (180, 350),
+ ZetaFormat::V0120GitMergeMarkers
+ | ZetaFormat::V0131GitMergeMarkersPrefix
+ | ZetaFormat::V0211Prefill
+ | ZetaFormat::V0211SeedCoder
+ | ZetaFormat::v0226Hashline
+ | ZetaFormat::V0304SeedNoEdits => (350, 150),
+ ZetaFormat::V0304VariableEdit => (1024, 0),
+ }
+}
+
+pub fn stop_tokens_for_format(format: ZetaFormat) -> &'static [&'static str] {
+ match format {
+ ZetaFormat::v0226Hashline => &[hashline::NO_EDITS_COMMAND_MARKER],
+ ZetaFormat::V0112MiddleAtEnd
+ | ZetaFormat::V0113Ordered
+ | ZetaFormat::V0114180EditableRegion
+ | ZetaFormat::V0120GitMergeMarkers
+ | ZetaFormat::V0131GitMergeMarkersPrefix
+ | ZetaFormat::V0211Prefill
+ | ZetaFormat::V0211SeedCoder
+ | ZetaFormat::V0304VariableEdit
+ | ZetaFormat::V0304SeedNoEdits => &[],
+ }
+}
+
pub fn excerpt_ranges_for_format(
format: ZetaFormat,
ranges: &ExcerptRanges,
@@ -246,8 +275,9 @@ pub fn excerpt_ranges_for_format(
),
ZetaFormat::V0304VariableEdit => {
let context = ranges
- .context_8192
+ .editable_350_context_1024
.clone()
+ .or(ranges.editable_350_context_512.clone())
.unwrap_or_else(|| ranges.editable_350_context_150.clone());
(context.clone(), context)
}
@@ -335,17 +365,19 @@ pub fn format_prompt_with_budget_for_format(
resolve_cursor_region(input, format);
let path = &*input.cursor_path;
+ let empty_files = Vec::new();
+ let input_related_files = input.related_files.as_deref().unwrap_or(&empty_files);
let related_files = if let Some(cursor_excerpt_start_row) = input.excerpt_start_row {
let relative_row_range = offset_range_to_row_range(&input.cursor_excerpt, context_range);
let row_range = relative_row_range.start + cursor_excerpt_start_row
..relative_row_range.end + cursor_excerpt_start_row;
&filter_redundant_excerpts(
- input.related_files.clone(),
+ input_related_files.to_vec(),
input.cursor_path.as_ref(),
row_range,
)
} else {
- &input.related_files
+ input_related_files
};
match format {
@@ -534,7 +566,18 @@ pub fn resolve_cursor_region(
input: &ZetaPromptInput,
format: ZetaFormat,
) -> (&str, Range<usize>, Range<usize>, usize) {
- let (editable_range, context_range) = excerpt_range_for_format(format, &input.excerpt_ranges);
+ let (editable_range, context_range) = if let Some(syntax_ranges) = &input.syntax_ranges {
+ let (editable_tokens, context_tokens) = token_limits_for_format(format);
+ compute_editable_and_context_ranges(
+ &input.cursor_excerpt,
+ input.cursor_offset_in_excerpt,
+ syntax_ranges,
+ editable_tokens,
+ context_tokens,
+ )
+ } else {
+ excerpt_range_for_format(format, &input.excerpt_ranges)
+ };
let context_start = context_range.start;
let context_text = &input.cursor_excerpt[context_range.clone()];
let adjusted_editable =
@@ -1010,12 +1053,14 @@ pub mod hashline {
const SET_COMMAND_MARKER: &str = "<|set|>";
const INSERT_COMMAND_MARKER: &str = "<|insert|>";
+ pub const NO_EDITS_COMMAND_MARKER: &str = "<|no_edits|>";
pub fn special_tokens() -> &'static [&'static str] {
return &[
SET_COMMAND_MARKER,
"<|set_range|>",
INSERT_COMMAND_MARKER,
+ NO_EDITS_COMMAND_MARKER,
CURSOR_MARKER,
"<|file_sep|>",
"<|fim_prefix|>",
@@ -1109,6 +1154,7 @@ pub mod hashline {
}
prompt.push_str(END_MARKER);
+ prompt.push('\n');
}
/// A single edit command parsed from the model output.
@@ -1234,7 +1280,9 @@ pub mod hashline {
}
pub fn output_has_edit_commands(model_output: &str) -> bool {
- model_output.contains(SET_COMMAND_MARKER) || model_output.contains(INSERT_COMMAND_MARKER)
+ model_output.contains(SET_COMMAND_MARKER)
+ || model_output.contains(INSERT_COMMAND_MARKER)
+ || model_output.contains(NO_EDITS_COMMAND_MARKER)
}
/// Apply `<|set|>` and `<|insert|>` edit commands from the model output to the
@@ -1245,6 +1293,13 @@ pub mod hashline {
///
/// Returns the full replacement text for the editable region.
pub fn apply_edit_commands(editable_region: &str, model_output: &str) -> String {
+ if model_output
+ .trim_start()
+ .starts_with(NO_EDITS_COMMAND_MARKER)
+ {
+ return editable_region.to_string();
+ }
+
let original_lines: Vec<&str> = editable_region.lines().collect();
let old_hashes: Vec<u8> = original_lines
.iter()
@@ -1549,6 +1604,10 @@ pub mod hashline {
result.pop();
}
+ if result.is_empty() {
+ return Ok(NO_EDITS_COMMAND_MARKER.to_string());
+ }
+
Ok(result)
}
@@ -1579,7 +1638,8 @@ pub mod hashline {
<|fim_middle|>current
0:5c|hello<|user_cursor|> world
<|fim_suffix|>
- <|fim_middle|>updated"},
+ <|fim_middle|>updated
+ "},
},
Case {
name: "multiline_cursor_on_second_line",
@@ -1594,7 +1654,8 @@ pub mod hashline {
1:26|b<|user_cursor|>bb
2:29|ccc
<|fim_suffix|>
- <|fim_middle|>updated"},
+ <|fim_middle|>updated
+ "},
},
Case {
name: "no_trailing_newline_in_context",
@@ -1608,7 +1669,8 @@ pub mod hashline {
0:d9|lin<|user_cursor|>e1
1:da|line2
<|fim_suffix|>
- <|fim_middle|>updated"},
+ <|fim_middle|>updated
+ "},
},
Case {
name: "leading_newline_in_editable_region",
@@ -1622,7 +1684,8 @@ pub mod hashline {
0:00|
1:26|a<|user_cursor|>bc
<|fim_suffix|>
- <|fim_middle|>updated"},
+ <|fim_middle|>updated
+ "},
},
Case {
name: "with_suffix",
@@ -1636,7 +1699,8 @@ pub mod hashline {
0:26|ab<|user_cursor|>c
<|fim_suffix|>
def
- <|fim_middle|>updated"},
+ <|fim_middle|>updated
+ "},
},
Case {
name: "unicode_two_byte_chars",
@@ -1649,7 +1713,8 @@ pub mod hashline {
<|fim_middle|>current
0:1b|hé<|user_cursor|>llo
<|fim_suffix|>
- <|fim_middle|>updated"},
+ <|fim_middle|>updated
+ "},
},
Case {
name: "unicode_three_byte_chars",
@@ -1662,7 +1727,8 @@ pub mod hashline {
<|fim_middle|>current
0:80|日本<|user_cursor|>語
<|fim_suffix|>
- <|fim_middle|>updated"},
+ <|fim_middle|>updated
+ "},
},
Case {
name: "unicode_four_byte_chars",
@@ -1675,7 +1741,8 @@ pub mod hashline {
<|fim_middle|>current
0:6b|a🌍<|user_cursor|>b
<|fim_suffix|>
- <|fim_middle|>updated"},
+ <|fim_middle|>updated
+ "},
},
Case {
name: "cursor_at_start_of_region_not_placed",
@@ -1688,7 +1755,8 @@ pub mod hashline {
<|fim_middle|>current
0:26|abc
<|fim_suffix|>
- <|fim_middle|>updated"},
+ <|fim_middle|>updated
+ "},
},
Case {
name: "cursor_at_end_of_line_not_placed",
@@ -1702,7 +1770,8 @@ pub mod hashline {
0:26|abc
1:2f|def
<|fim_suffix|>
- <|fim_middle|>updated"},
+ <|fim_middle|>updated
+ "},
},
Case {
name: "cursor_offset_relative_to_context_not_editable_region",
@@ -1721,7 +1790,8 @@ pub mod hashline {
1:26|b<|user_cursor|>bb
<|fim_suffix|>
suf
- <|fim_middle|>updated"},
+ <|fim_middle|>updated
+ "},
},
];
@@ -1894,6 +1964,18 @@ pub mod hashline {
world
"},
},
+ Case {
+ name: "no_edits_command_returns_original",
+ original: indoc! {"
+ hello
+ world
+ "},
+ model_output: "<|no_edits|>",
+ expected: indoc! {"
+ hello
+ world
+ "},
+ },
Case {
name: "wrong_hash_set_ignored",
original: indoc! {"
@@ -2113,6 +2195,7 @@ pub mod hashline {
)));
assert!(!hashline::output_has_edit_commands("just plain text"));
assert!(!hashline::output_has_edit_commands("NO_EDITS"));
+ assert!(hashline::output_has_edit_commands("<|no_edits|>"));
}
// ---- hashline::patch_to_edit_commands round-trip tests ----
@@ -2350,35 +2433,47 @@ pub mod hashline {
}
"#},
patch: indoc! {r#"
- @@ -1,3 +1,3 @@
- fn main() {
- - println!();
- + eprintln!("");
- }
- "#},
+ @@ -1,3 +1,3 @@
+ fn main() {
+ - println!();
+ + eprintln!("");
+ }
+ "#},
expected_new: indoc! {r#"
- fn main() {
- eprintln!("<|user_cursor|>");
- }
- "#},
+ fn main() {
+ eprintln!("<|user_cursor|>");
+ }
+ "#},
},
Case {
name: "non_local_hunk_header_pure_insertion_repro",
old: indoc! {"
- aaa
- bbb
- "},
+ aaa
+ bbb
+ "},
patch: indoc! {"
- @@ -20,2 +20,3 @@
- aaa
- +xxx
- bbb
- "},
+ @@ -20,2 +20,3 @@
+ aaa
+ +xxx
+ bbb
+ "},
expected_new: indoc! {"
- aaa
- xxx
- bbb
- "},
+ aaa
+ xxx
+ bbb
+ "},
+ },
+ Case {
+ name: "empty_patch_produces_no_edits_marker",
+ old: indoc! {"
+ aaa
+ bbb
+ "},
+ patch: "@@ -20,2 +20,3 @@\n",
+ expected_new: indoc! {"
+ aaa
+ bbb
+ "},
},
];
@@ -3796,7 +3891,8 @@ mod tests {
cursor_offset_in_excerpt: cursor_offset,
excerpt_start_row: None,
events: events.into_iter().map(Arc::new).collect(),
- related_files,
+ related_files: Some(related_files),
+ active_buffer_diagnostics: vec![],
excerpt_ranges: ExcerptRanges {
editable_150: editable_range.clone(),
editable_180: editable_range.clone(),
@@ -3806,6 +3902,7 @@ mod tests {
editable_350_context_150: context_range,
..Default::default()
},
+ syntax_ranges: None,
experiment: None,
in_open_source_repo: false,
can_collect_data: false,
@@ -3825,7 +3922,8 @@ mod tests {
cursor_offset_in_excerpt: cursor_offset,
excerpt_start_row: None,
events: vec![],
- related_files: vec![],
+ related_files: Some(vec![]),
+ active_buffer_diagnostics: vec![],
excerpt_ranges: ExcerptRanges {
editable_150: editable_range.clone(),
editable_180: editable_range.clone(),
@@ -3835,6 +3933,7 @@ mod tests {
editable_350_context_150: context_range,
..Default::default()
},
+ syntax_ranges: None,
experiment: None,
in_open_source_repo: false,
can_collect_data: false,
@@ -4408,7 +4507,8 @@ mod tests {
cursor_offset_in_excerpt: 30,
excerpt_start_row: Some(0),
events: vec![Arc::new(make_event("other.rs", "-old\n+new\n"))],
- related_files: vec![],
+ related_files: Some(vec![]),
+ active_buffer_diagnostics: vec![],
excerpt_ranges: ExcerptRanges {
editable_150: 15..41,
editable_180: 15..41,
@@ -4418,6 +4518,7 @@ mod tests {
editable_350_context_150: 0..excerpt.len(),
..Default::default()
},
+ syntax_ranges: None,
experiment: None,
in_open_source_repo: false,
can_collect_data: false,
@@ -4471,7 +4572,8 @@ mod tests {
cursor_offset_in_excerpt: 15,
excerpt_start_row: Some(10),
events: vec![],
- related_files: vec![],
+ related_files: Some(vec![]),
+ active_buffer_diagnostics: vec![],
excerpt_ranges: ExcerptRanges {
editable_150: 0..28,
editable_180: 0..28,
@@ -4481,6 +4583,7 @@ mod tests {
editable_350_context_150: 0..28,
..Default::default()
},
+ syntax_ranges: None,
experiment: None,
in_open_source_repo: false,
can_collect_data: false,
@@ -4529,7 +4632,8 @@ mod tests {
cursor_offset_in_excerpt: 25,
excerpt_start_row: Some(0),
events: vec![],
- related_files: vec![],
+ related_files: Some(vec![]),
+ active_buffer_diagnostics: vec![],
excerpt_ranges: ExcerptRanges {
editable_150: editable_range.clone(),
editable_180: editable_range.clone(),
@@ -4539,6 +4643,7 @@ mod tests {
editable_350_context_150: context_range.clone(),
..Default::default()
},
+ syntax_ranges: None,
experiment: None,
in_open_source_repo: false,
can_collect_data: false,
@@ -126,9 +126,11 @@ The following licenses are accepted:
- [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0)
- [BSD 2-Clause](https://opensource.org/license/bsd-2-clause)
- [BSD 3-Clause](https://opensource.org/license/bsd-3-clause)
+- [CC BY 4.0](https://creativecommons.org/licenses/by/4.0)
- [GNU GPLv3](https://www.gnu.org/licenses/gpl-3.0.en.html)
- [GNU LGPLv3](https://www.gnu.org/licenses/lgpl-3.0.en.html)
- [MIT](https://opensource.org/license/mit)
+- [Unlicense](https://unlicense.org)
- [zlib](https://opensource.org/license/zlib)
This allows us to distribute the resulting binary produced from your extension code to our users.
@@ -52,6 +52,7 @@
withGLES ? false,
profile ? "release",
+ commitSha ? null,
}:
assert withGLES -> stdenv.hostPlatform.isLinux;
let
@@ -84,7 +85,10 @@ let
in
rec {
pname = "zed-editor";
- version = zedCargoLock.package.version + "-nightly";
+ version =
+ zedCargoLock.package.version
+ + "-nightly"
+ + lib.optionalString (commitSha != null) "+${builtins.substring 0 7 commitSha}";
src = builtins.path {
path = ../.;
filter = mkIncludeFilter ../.;
@@ -220,6 +224,7 @@ let
};
ZED_UPDATE_EXPLANATION = "Zed has been installed using Nix. Auto-updates have thus been disabled.";
RELEASE_VERSION = version;
+ ZED_COMMIT_SHA = commitSha;
LK_CUSTOM_WEBRTC = pkgs.callPackage ./livekit-libwebrtc/package.nix { };
PROTOC = "${protobuf}/bin/protoc";
@@ -6,4 +6,5 @@ in
pkgs.callPackage ./build.nix {
crane = inputs.crane.mkLib pkgs;
rustToolchain = rustBin.fromRustupToolchainFile ../rust-toolchain.toml;
+ commitSha = inputs.self.rev or null;
}
@@ -60,12 +60,21 @@ if [[ -n $apt ]]; then
# Ubuntu 20.04 ships clang-10 and libstdc++-10 which lack adequate C++20
# support for building webrtc-sys (requires -std=c++20, lambdas in
# unevaluated contexts from clang 17+, and working std::ranges in the
- # stdlib). clang-18 is available in focal-security/universe as an official
- # backport, and libstdc++-11-dev from the ubuntu-toolchain-r PPA provides
- # headers with working pointer_traits/contiguous_range.
+ # stdlib).
# Note: the prebuilt libwebrtc.a is compiled with libstdc++, so we must
# use libstdc++ (not libc++) to avoid ABI mismatches at link time.
- $maysudo add-apt-repository -y ppa:ubuntu-toolchain-r/test
+
+ # libstdc++-11-dev (headers with working pointer_traits/contiguous_range)
+ # is only available from the ubuntu-toolchain-r PPA. Add the source list
+ # and GPG key manually instead of using add-apt-repository, whose HKP
+ # keyserver lookups (port 11371) frequently time out in CI.
+ $maysudo "$apt" install -y curl gnupg
+ codename=$(lsb_release -cs)
+ echo "deb https://ppa.launchpadcontent.net/ubuntu-toolchain-r/test/ubuntu $codename main" | \
+ $maysudo tee /etc/apt/sources.list.d/ubuntu-toolchain-r-test.list > /dev/null
+ curl -fsSL 'https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x1E9377A2BA9EF27F' | \
+ sed -n '/-----BEGIN PGP PUBLIC KEY BLOCK-----/,/-----END PGP PUBLIC KEY BLOCK-----/p' | \
+ $maysudo gpg --dearmor -o /etc/apt/trusted.gpg.d/ubuntu-toolchain-r-test.gpg
deps+=( clang-18 libstdc++-11-dev )
fi